|
1 # Licensed under the Apache License: http://www.apache.org/licenses/LICENSE-2.0 |
|
2 # For details: https://github.com/nedbat/coveragepy/blob/master/NOTICE.txt |
|
3 |
|
4 """Coverage data for coverage.py. |
|
5 |
|
6 This file had the 4.x JSON data support, which is now gone. This file still |
|
7 has storage-agnostic helpers, and is kept to avoid changing too many imports. |
|
8 CoverageData is now defined in sqldata.py, and imported here to keep the |
|
9 imports working. |
|
10 |
|
11 """ |
|
12 |
|
13 import glob |
|
14 import os.path |
|
15 |
|
16 from coverage.misc import CoverageException, file_be_gone |
|
17 from coverage.sqldata import CoverageData |
|
18 |
|
19 |
|
20 def line_counts(data, fullpath=False): |
|
21 """Return a dict summarizing the line coverage data. |
|
22 |
|
23 Keys are based on the file names, and values are the number of executed |
|
24 lines. If `fullpath` is true, then the keys are the full pathnames of |
|
25 the files, otherwise they are the basenames of the files. |
|
26 |
|
27 Returns a dict mapping file names to counts of lines. |
|
28 |
|
29 """ |
|
30 summ = {} |
|
31 if fullpath: |
|
32 filename_fn = lambda f: f |
|
33 else: |
|
34 filename_fn = os.path.basename |
|
35 for filename in data.measured_files(): |
|
36 summ[filename_fn(filename)] = len(data.lines(filename)) |
|
37 return summ |
|
38 |
|
39 |
|
40 def add_data_to_hash(data, filename, hasher): |
|
41 """Contribute `filename`'s data to the `hasher`. |
|
42 |
|
43 `hasher` is a `coverage.misc.Hasher` instance to be updated with |
|
44 the file's data. It should only get the results data, not the run |
|
45 data. |
|
46 |
|
47 """ |
|
48 if data.has_arcs(): |
|
49 hasher.update(sorted(data.arcs(filename) or [])) |
|
50 else: |
|
51 hasher.update(sorted(data.lines(filename) or [])) |
|
52 hasher.update(data.file_tracer(filename)) |
|
53 |
|
54 |
|
55 def combine_parallel_data(data, aliases=None, data_paths=None, strict=False): |
|
56 """Combine a number of data files together. |
|
57 |
|
58 Treat `data.filename` as a file prefix, and combine the data from all |
|
59 of the data files starting with that prefix plus a dot. |
|
60 |
|
61 If `aliases` is provided, it's a `PathAliases` object that is used to |
|
62 re-map paths to match the local machine's. |
|
63 |
|
64 If `data_paths` is provided, it is a list of directories or files to |
|
65 combine. Directories are searched for files that start with |
|
66 `data.filename` plus dot as a prefix, and those files are combined. |
|
67 |
|
68 If `data_paths` is not provided, then the directory portion of |
|
69 `data.filename` is used as the directory to search for data files. |
|
70 |
|
71 Every data file found and combined is then deleted from disk. If a file |
|
72 cannot be read, a warning will be issued, and the file will not be |
|
73 deleted. |
|
74 |
|
75 If `strict` is true, and no files are found to combine, an error is |
|
76 raised. |
|
77 |
|
78 """ |
|
79 # Because of the os.path.abspath in the constructor, data_dir will |
|
80 # never be an empty string. |
|
81 data_dir, local = os.path.split(data.base_filename()) |
|
82 localdot = local + '.*' |
|
83 |
|
84 data_paths = data_paths or [data_dir] |
|
85 files_to_combine = [] |
|
86 for p in data_paths: |
|
87 if os.path.isfile(p): |
|
88 files_to_combine.append(os.path.abspath(p)) |
|
89 elif os.path.isdir(p): |
|
90 pattern = os.path.join(os.path.abspath(p), localdot) |
|
91 files_to_combine.extend(glob.glob(pattern)) |
|
92 else: |
|
93 raise CoverageException("Couldn't combine from non-existent path '%s'" % (p,)) |
|
94 |
|
95 if strict and not files_to_combine: |
|
96 raise CoverageException("No data to combine") |
|
97 |
|
98 files_combined = 0 |
|
99 for f in files_to_combine: |
|
100 if f == data.data_filename(): |
|
101 # Sometimes we are combining into a file which is one of the |
|
102 # parallel files. Skip that file. |
|
103 if data._debug.should('dataio'): |
|
104 data._debug.write("Skipping combining ourself: %r" % (f,)) |
|
105 continue |
|
106 if data._debug.should('dataio'): |
|
107 data._debug.write("Combining data file %r" % (f,)) |
|
108 try: |
|
109 new_data = CoverageData(f, debug=data._debug) |
|
110 new_data.read() |
|
111 except CoverageException as exc: |
|
112 if data._warn: |
|
113 # The CoverageException has the file name in it, so just |
|
114 # use the message as the warning. |
|
115 data._warn(str(exc)) |
|
116 else: |
|
117 data.update(new_data, aliases=aliases) |
|
118 files_combined += 1 |
|
119 if data._debug.should('dataio'): |
|
120 data._debug.write("Deleting combined data file %r" % (f,)) |
|
121 file_be_gone(f) |
|
122 |
|
123 if strict and not files_combined: |
|
124 raise CoverageException("No usable data files") |