src/eric7/DebugClients/Python/coverage/data.py

branch
eric7
changeset 9209
b99e7fd55fd3
parent 8929
fcca2fa618bf
child 9252
32dd11232e06
equal deleted inserted replaced
9208:3fc8dfeb6ebe 9209:b99e7fd55fd3
1 # Licensed under the Apache License: http://www.apache.org/licenses/LICENSE-2.0
2 # For details: https://github.com/nedbat/coveragepy/blob/master/NOTICE.txt
3
4 """Coverage data for coverage.py.
5
6 This file had the 4.x JSON data support, which is now gone. This file still
7 has storage-agnostic helpers, and is kept to avoid changing too many imports.
8 CoverageData is now defined in sqldata.py, and imported here to keep the
9 imports working.
10
11 """
12
13 import glob
14 import os.path
15
16 from coverage.exceptions import CoverageException, NoDataError
17 from coverage.misc import file_be_gone, human_sorted, plural
18 from coverage.sqldata import CoverageData
19
20
21 def line_counts(data, fullpath=False):
22 """Return a dict summarizing the line coverage data.
23
24 Keys are based on the file names, and values are the number of executed
25 lines. If `fullpath` is true, then the keys are the full pathnames of
26 the files, otherwise they are the basenames of the files.
27
28 Returns a dict mapping file names to counts of lines.
29
30 """
31 summ = {}
32 if fullpath:
33 filename_fn = lambda f: f
34 else:
35 filename_fn = os.path.basename
36 for filename in data.measured_files():
37 summ[filename_fn(filename)] = len(data.lines(filename))
38 return summ
39
40
41 def add_data_to_hash(data, filename, hasher):
42 """Contribute `filename`'s data to the `hasher`.
43
44 `hasher` is a `coverage.misc.Hasher` instance to be updated with
45 the file's data. It should only get the results data, not the run
46 data.
47
48 """
49 if data.has_arcs():
50 hasher.update(sorted(data.arcs(filename) or []))
51 else:
52 hasher.update(sorted(data.lines(filename) or []))
53 hasher.update(data.file_tracer(filename))
54
55
56 def combinable_files(data_file, data_paths=None):
57 """Make a list of data files to be combined.
58
59 `data_file` is a path to a data file. `data_paths` is a list of files or
60 directories of files.
61
62 Returns a list of absolute file paths.
63 """
64 data_dir, local = os.path.split(os.path.abspath(data_file))
65
66 data_paths = data_paths or [data_dir]
67 files_to_combine = []
68 for p in data_paths:
69 if os.path.isfile(p):
70 files_to_combine.append(os.path.abspath(p))
71 elif os.path.isdir(p):
72 pattern = os.path.join(os.path.abspath(p), f"{local}.*")
73 files_to_combine.extend(glob.glob(pattern))
74 else:
75 raise NoDataError(f"Couldn't combine from non-existent path '{p}'")
76 return files_to_combine
77
78
79 def combine_parallel_data(
80 data, aliases=None, data_paths=None, strict=False, keep=False, message=None,
81 ):
82 """Combine a number of data files together.
83
84 `data` is a CoverageData.
85
86 Treat `data.filename` as a file prefix, and combine the data from all
87 of the data files starting with that prefix plus a dot.
88
89 If `aliases` is provided, it's a `PathAliases` object that is used to
90 re-map paths to match the local machine's.
91
92 If `data_paths` is provided, it is a list of directories or files to
93 combine. Directories are searched for files that start with
94 `data.filename` plus dot as a prefix, and those files are combined.
95
96 If `data_paths` is not provided, then the directory portion of
97 `data.filename` is used as the directory to search for data files.
98
99 Unless `keep` is True every data file found and combined is then deleted from disk. If a file
100 cannot be read, a warning will be issued, and the file will not be
101 deleted.
102
103 If `strict` is true, and no files are found to combine, an error is
104 raised.
105
106 """
107 files_to_combine = combinable_files(data.base_filename(), data_paths)
108
109 if strict and not files_to_combine:
110 raise NoDataError("No data to combine")
111
112 files_combined = 0
113 for f in files_to_combine:
114 if f == data.data_filename():
115 # Sometimes we are combining into a file which is one of the
116 # parallel files. Skip that file.
117 if data._debug.should('dataio'):
118 data._debug.write(f"Skipping combining ourself: {f!r}")
119 continue
120 if data._debug.should('dataio'):
121 data._debug.write(f"Combining data file {f!r}")
122 try:
123 new_data = CoverageData(f, debug=data._debug)
124 new_data.read()
125 except CoverageException as exc:
126 if data._warn:
127 # The CoverageException has the file name in it, so just
128 # use the message as the warning.
129 data._warn(str(exc))
130 else:
131 data.update(new_data, aliases=aliases)
132 files_combined += 1
133 if message:
134 message(f"Combined data file {os.path.relpath(f)}")
135 if not keep:
136 if data._debug.should('dataio'):
137 data._debug.write(f"Deleting combined data file {f!r}")
138 file_be_gone(f)
139
140 if strict and not files_combined:
141 raise NoDataError("No usable data files")
142
143
144 def debug_data_file(filename):
145 """Implementation of 'coverage debug data'."""
146 data = CoverageData(filename)
147 filename = data.data_filename()
148 print(f"path: {filename}")
149 if not os.path.exists(filename):
150 print("No data collected: file doesn't exist")
151 return
152 data.read()
153 print(f"has_arcs: {data.has_arcs()!r}")
154 summary = line_counts(data, fullpath=True)
155 filenames = human_sorted(summary.keys())
156 nfiles = len(filenames)
157 print(f"{nfiles} file{plural(nfiles)}:")
158 for f in filenames:
159 line = f"{f}: {summary[f]} line{plural(summary[f])}"
160 plugin = data.file_tracer(f)
161 if plugin:
162 line += f" [{plugin}]"
163 print(line)

eric ide

mercurial