src/eric7/DebugClients/Python/coverage/data.py

branch
eric7-maintenance
changeset 9264
18a7312cfdb3
parent 9252
32dd11232e06
child 9374
ed79209469ad
equal deleted inserted replaced
9241:d23e9854aea4 9264:18a7312cfdb3
1 # Licensed under the Apache License: http://www.apache.org/licenses/LICENSE-2.0
2 # For details: https://github.com/nedbat/coveragepy/blob/master/NOTICE.txt
3
4 """Coverage data for coverage.py.
5
6 This file had the 4.x JSON data support, which is now gone. This file still
7 has storage-agnostic helpers, and is kept to avoid changing too many imports.
8 CoverageData is now defined in sqldata.py, and imported here to keep the
9 imports working.
10
11 """
12
13 import glob
14 import os.path
15
16 from coverage.exceptions import CoverageException, NoDataError
17 from coverage.misc import file_be_gone, human_sorted, plural
18 from coverage.sqldata import CoverageData
19
20
21 def line_counts(data, fullpath=False):
22 """Return a dict summarizing the line coverage data.
23
24 Keys are based on the file names, and values are the number of executed
25 lines. If `fullpath` is true, then the keys are the full pathnames of
26 the files, otherwise they are the basenames of the files.
27
28 Returns a dict mapping file names to counts of lines.
29
30 """
31 summ = {}
32 if fullpath:
33 # pylint: disable=unnecessary-lambda-assignment
34 filename_fn = lambda f: f
35 else:
36 filename_fn = os.path.basename
37 for filename in data.measured_files():
38 summ[filename_fn(filename)] = len(data.lines(filename))
39 return summ
40
41
42 def add_data_to_hash(data, filename, hasher):
43 """Contribute `filename`'s data to the `hasher`.
44
45 `hasher` is a `coverage.misc.Hasher` instance to be updated with
46 the file's data. It should only get the results data, not the run
47 data.
48
49 """
50 if data.has_arcs():
51 hasher.update(sorted(data.arcs(filename) or []))
52 else:
53 hasher.update(sorted(data.lines(filename) or []))
54 hasher.update(data.file_tracer(filename))
55
56
57 def combinable_files(data_file, data_paths=None):
58 """Make a list of data files to be combined.
59
60 `data_file` is a path to a data file. `data_paths` is a list of files or
61 directories of files.
62
63 Returns a list of absolute file paths.
64 """
65 data_dir, local = os.path.split(os.path.abspath(data_file))
66
67 data_paths = data_paths or [data_dir]
68 files_to_combine = []
69 for p in data_paths:
70 if os.path.isfile(p):
71 files_to_combine.append(os.path.abspath(p))
72 elif os.path.isdir(p):
73 pattern = os.path.join(os.path.abspath(p), f"{local}.*")
74 files_to_combine.extend(glob.glob(pattern))
75 else:
76 raise NoDataError(f"Couldn't combine from non-existent path '{p}'")
77 return files_to_combine
78
79
80 def combine_parallel_data(
81 data, aliases=None, data_paths=None, strict=False, keep=False, message=None,
82 ):
83 """Combine a number of data files together.
84
85 `data` is a CoverageData.
86
87 Treat `data.filename` as a file prefix, and combine the data from all
88 of the data files starting with that prefix plus a dot.
89
90 If `aliases` is provided, it's a `PathAliases` object that is used to
91 re-map paths to match the local machine's.
92
93 If `data_paths` is provided, it is a list of directories or files to
94 combine. Directories are searched for files that start with
95 `data.filename` plus dot as a prefix, and those files are combined.
96
97 If `data_paths` is not provided, then the directory portion of
98 `data.filename` is used as the directory to search for data files.
99
100 Unless `keep` is True every data file found and combined is then deleted from disk. If a file
101 cannot be read, a warning will be issued, and the file will not be
102 deleted.
103
104 If `strict` is true, and no files are found to combine, an error is
105 raised.
106
107 """
108 files_to_combine = combinable_files(data.base_filename(), data_paths)
109
110 if strict and not files_to_combine:
111 raise NoDataError("No data to combine")
112
113 files_combined = 0
114 for f in files_to_combine:
115 if f == data.data_filename():
116 # Sometimes we are combining into a file which is one of the
117 # parallel files. Skip that file.
118 if data._debug.should('dataio'):
119 data._debug.write(f"Skipping combining ourself: {f!r}")
120 continue
121 if data._debug.should('dataio'):
122 data._debug.write(f"Combining data file {f!r}")
123 try:
124 new_data = CoverageData(f, debug=data._debug)
125 new_data.read()
126 except CoverageException as exc:
127 if data._warn:
128 # The CoverageException has the file name in it, so just
129 # use the message as the warning.
130 data._warn(str(exc))
131 else:
132 data.update(new_data, aliases=aliases)
133 files_combined += 1
134 if message:
135 message(f"Combined data file {os.path.relpath(f)}")
136 if not keep:
137 if data._debug.should('dataio'):
138 data._debug.write(f"Deleting combined data file {f!r}")
139 file_be_gone(f)
140
141 if strict and not files_combined:
142 raise NoDataError("No usable data files")
143
144
145 def debug_data_file(filename):
146 """Implementation of 'coverage debug data'."""
147 data = CoverageData(filename)
148 filename = data.data_filename()
149 print(f"path: {filename}")
150 if not os.path.exists(filename):
151 print("No data collected: file doesn't exist")
152 return
153 data.read()
154 print(f"has_arcs: {data.has_arcs()!r}")
155 summary = line_counts(data, fullpath=True)
156 filenames = human_sorted(summary.keys())
157 nfiles = len(filenames)
158 print(f"{nfiles} file{plural(nfiles)}:")
159 for f in filenames:
160 line = f"{f}: {summary[f]} line{plural(summary[f])}"
161 plugin = data.file_tracer(f)
162 if plugin:
163 line += f" [{plugin}]"
164 print(line)

eric ide

mercurial