18 |
26 |
19 # Our htmlfiles directory. |
27 # Our htmlfiles directory. |
20 os.path.join(os.path.dirname(__file__), "htmlfiles"), |
28 os.path.join(os.path.dirname(__file__), "htmlfiles"), |
21 ] |
29 ] |
22 |
30 |
|
31 |
23 def data_filename(fname, pkgdir=""): |
32 def data_filename(fname, pkgdir=""): |
24 """Return the path to a data file of ours. |
33 """Return the path to a data file of ours. |
25 |
34 |
26 The file is searched for on `STATIC_PATH`, and the first place it's found, |
35 The file is searched for on `STATIC_PATH`, and the first place it's found, |
27 is returned. |
36 is returned. |
28 |
37 |
29 Each directory in `STATIC_PATH` is searched as-is, and also, if `pkgdir` |
38 Each directory in `STATIC_PATH` is searched as-is, and also, if `pkgdir` |
30 is provided, at that subdirectory. |
39 is provided, at that sub-directory. |
31 |
40 |
32 """ |
41 """ |
|
42 tried = [] |
33 for static_dir in STATIC_PATH: |
43 for static_dir in STATIC_PATH: |
34 static_filename = os.path.join(static_dir, fname) |
44 static_filename = os.path.join(static_dir, fname) |
35 if os.path.exists(static_filename): |
45 if os.path.exists(static_filename): |
36 return static_filename |
46 return static_filename |
|
47 else: |
|
48 tried.append(static_filename) |
37 if pkgdir: |
49 if pkgdir: |
38 static_filename = os.path.join(static_dir, pkgdir, fname) |
50 static_filename = os.path.join(static_dir, pkgdir, fname) |
39 if os.path.exists(static_filename): |
51 if os.path.exists(static_filename): |
40 return static_filename |
52 return static_filename |
41 raise CoverageException("Couldn't find static file %r" % fname) |
53 else: |
|
54 tried.append(static_filename) |
|
55 raise CoverageException( |
|
56 "Couldn't find static file %r from %r, tried: %r" % (fname, os.getcwd(), tried) |
|
57 ) |
42 |
58 |
43 |
59 |
44 def data(fname): |
60 def data(fname): |
45 """Return the contents of a data file of ours.""" |
61 """Return the contents of a data file of ours.""" |
46 data_file = open(data_filename(fname)) |
62 with open(data_filename(fname)) as data_file: |
47 try: |
|
48 return data_file.read() |
63 return data_file.read() |
49 finally: |
|
50 data_file.close() |
|
51 |
64 |
52 |
65 |
53 class HtmlReporter(Reporter): |
66 class HtmlReporter(Reporter): |
54 """HTML reporting.""" |
67 """HTML reporting.""" |
55 |
68 |
56 # These files will be copied from the htmlfiles dir to the output dir. |
69 # These files will be copied from the htmlfiles directory to the output |
|
70 # directory. |
57 STATIC_FILES = [ |
71 STATIC_FILES = [ |
58 ("style.css", ""), |
72 ("style.css", ""), |
59 ("jquery.min.js", "jquery"), |
73 ("jquery.min.js", "jquery"), |
60 ("jquery.hotkeys.js", "jquery-hotkeys"), |
74 ("jquery.debounce.min.js", "jquery-debounce"), |
61 ("jquery.isonscreen.js", "jquery-isonscreen"), |
75 ("jquery.hotkeys.js", "jquery-hotkeys"), |
62 ("jquery.tablesorter.min.js", "jquery-tablesorter"), |
76 ("jquery.isonscreen.js", "jquery-isonscreen"), |
63 ("coverage_html.js", ""), |
77 ("jquery.tablesorter.min.js", "jquery-tablesorter"), |
64 ("keybd_closed.png", ""), |
78 ("coverage_html.js", ""), |
65 ("keybd_open.png", ""), |
79 ("keybd_closed.png", ""), |
66 ] |
80 ("keybd_open.png", ""), |
|
81 ] |
67 |
82 |
68 def __init__(self, cov, config): |
83 def __init__(self, cov, config): |
69 super(HtmlReporter, self).__init__(cov, config) |
84 super(HtmlReporter, self).__init__(cov, config) |
70 self.directory = None |
85 self.directory = None |
|
86 title = self.config.html_title |
|
87 if env.PY2: |
|
88 title = title.decode("utf8") |
71 self.template_globals = { |
89 self.template_globals = { |
72 'escape': escape, |
90 'escape': escape, |
73 'title': self.config.html_title, |
91 'pair': pair, |
|
92 'title': title, |
74 '__url__': coverage.__url__, |
93 '__url__': coverage.__url__, |
75 '__version__': coverage.__version__, |
94 '__version__': coverage.__version__, |
76 } |
95 } |
77 self.source_tmpl = Templite( |
96 self.source_tmpl = Templite( |
78 data("pyfile.html"), self.template_globals |
97 data("pyfile.html"), self.template_globals |
79 ) |
98 ) |
80 |
99 |
81 self.coverage = cov |
100 self.coverage = cov |
82 |
101 |
83 self.files = [] |
102 self.files = [] |
84 self.arcs = self.coverage.data.has_arcs() |
103 self.has_arcs = self.coverage.data.has_arcs() |
85 self.status = HtmlStatus() |
104 self.status = HtmlStatus() |
86 self.extra_css = None |
105 self.extra_css = None |
87 self.totals = Numbers() |
106 self.totals = Numbers() |
|
107 self.time_stamp = datetime.datetime.now().strftime('%Y-%m-%d %H:%M') |
88 |
108 |
89 def report(self, morfs): |
109 def report(self, morfs): |
90 """Generate an HTML report for `morfs`. |
110 """Generate an HTML report for `morfs`. |
91 |
111 |
92 `morfs` is a list of modules or filenames. |
112 `morfs` is a list of modules or file names. |
93 |
113 |
94 """ |
114 """ |
95 assert self.config.html_dir, "must give a directory for html reporting" |
115 assert self.config.html_dir, "must give a directory for html reporting" |
96 |
116 |
97 # Read the status data. |
117 # Read the status data. |
98 self.status.read(self.config.html_dir) |
118 self.status.read(self.config.html_dir) |
99 |
119 |
100 # Check that this run used the same settings as the last run. |
120 # Check that this run used the same settings as the last run. |
101 m = Hasher() |
121 m = Hasher() |
102 m.update(self.config) |
122 m.update(self.config) |
103 these_settings = m.digest() |
123 these_settings = m.hexdigest() |
104 if self.status.settings_hash() != these_settings: |
124 if self.status.settings_hash() != these_settings: |
105 self.status.reset() |
125 self.status.reset() |
106 self.status.set_settings_hash(these_settings) |
126 self.status.set_settings_hash(these_settings) |
107 |
127 |
108 # The user may have extra CSS they want copied. |
128 # The user may have extra CSS they want copied. |
117 |
137 |
118 # Write the index file. |
138 # Write the index file. |
119 self.index_file() |
139 self.index_file() |
120 |
140 |
121 self.make_local_static_report_files() |
141 self.make_local_static_report_files() |
122 |
142 return self.totals.n_statements and self.totals.pc_covered |
123 return self.totals.pc_covered |
|
124 |
143 |
125 def make_local_static_report_files(self): |
144 def make_local_static_report_files(self): |
126 """Make local instances of static files for HTML report.""" |
145 """Make local instances of static files for HTML report.""" |
127 # The files we provide must always be copied. |
146 # The files we provide must always be copied. |
128 for static, pkgdir in self.STATIC_FILES: |
147 for static, pkgdir in self.STATIC_FILES: |
129 shutil.copyfile( |
148 shutil.copyfile( |
130 data_filename(static, pkgdir), |
149 data_filename(static, pkgdir), |
131 os.path.join(self.directory, static) |
150 os.path.join(self.directory, static) |
132 ) |
151 ) |
133 |
152 |
134 # The user may have extra CSS they want copied. |
153 # The user may have extra CSS they want copied. |
135 if self.extra_css: |
154 if self.extra_css: |
136 shutil.copyfile( |
155 shutil.copyfile( |
137 self.config.extra_css, |
156 self.config.extra_css, |
138 os.path.join(self.directory, self.extra_css) |
157 os.path.join(self.directory, self.extra_css) |
139 ) |
158 ) |
140 |
159 |
141 def write_html(self, fname, html): |
160 def write_html(self, fname, html): |
142 """Write `html` to `fname`, properly encoded.""" |
161 """Write `html` to `fname`, properly encoded.""" |
143 fout = open(fname, "wb") |
162 with open(fname, "wb") as fout: |
144 try: |
|
145 fout.write(html.encode('ascii', 'xmlcharrefreplace')) |
163 fout.write(html.encode('ascii', 'xmlcharrefreplace')) |
146 finally: |
164 |
147 fout.close() |
165 def file_hash(self, source, fr): |
148 |
|
149 def file_hash(self, source, cu): |
|
150 """Compute a hash that changes if the file needs to be re-reported.""" |
166 """Compute a hash that changes if the file needs to be re-reported.""" |
151 m = Hasher() |
167 m = Hasher() |
152 m.update(source) |
168 m.update(source) |
153 self.coverage.data.add_to_hash(cu.filename, m) |
169 self.coverage.data.add_to_hash(fr.filename, m) |
154 return m.digest() |
170 return m.hexdigest() |
155 |
171 |
156 def html_file(self, cu, analysis): |
172 def html_file(self, fr, analysis): |
157 """Generate an HTML file for one source file.""" |
173 """Generate an HTML file for one source file.""" |
158 source_file = cu.source_file() |
174 source = fr.source() |
159 try: |
|
160 source = source_file.read() |
|
161 finally: |
|
162 source_file.close() |
|
163 |
175 |
164 # Find out if the file on disk is already correct. |
176 # Find out if the file on disk is already correct. |
165 flat_rootname = cu.flat_rootname() |
177 rootname = flat_rootname(fr.relative_filename()) |
166 this_hash = self.file_hash(source, cu) |
178 this_hash = self.file_hash(source.encode('utf-8'), fr) |
167 that_hash = self.status.file_hash(flat_rootname) |
179 that_hash = self.status.file_hash(rootname) |
168 if this_hash == that_hash: |
180 if this_hash == that_hash: |
169 # Nothing has changed to require the file to be reported again. |
181 # Nothing has changed to require the file to be reported again. |
170 self.files.append(self.status.index_info(flat_rootname)) |
182 self.files.append(self.status.index_info(rootname)) |
171 return |
183 return |
172 |
184 |
173 self.status.set_file_hash(flat_rootname, this_hash) |
185 self.status.set_file_hash(rootname, this_hash) |
174 |
|
175 # If need be, determine the encoding of the source file. We use it |
|
176 # later to properly write the HTML. |
|
177 if sys.version_info < (3, 0): |
|
178 encoding = source_encoding(source) |
|
179 # Some UTF8 files have the dreaded UTF8 BOM. If so, junk it. |
|
180 if encoding.startswith("utf-8") and source[:3] == "\xef\xbb\xbf": |
|
181 source = source[3:] |
|
182 encoding = "utf-8" |
|
183 |
186 |
184 # Get the numbers for this file. |
187 # Get the numbers for this file. |
185 nums = analysis.numbers |
188 nums = analysis.numbers |
186 |
189 |
187 if self.arcs: |
190 if self.has_arcs: |
188 missing_branch_arcs = analysis.missing_branch_arcs() |
191 missing_branch_arcs = analysis.missing_branch_arcs() |
189 |
192 |
190 # These classes determine which lines are highlighted by default. |
193 # These classes determine which lines are highlighted by default. |
191 c_run = "run hide_run" |
194 c_run = "run hide_run" |
192 c_exc = "exc" |
195 c_exc = "exc" |
193 c_mis = "mis" |
196 c_mis = "mis" |
194 c_par = "par " + c_run |
197 c_par = "par " + c_run |
195 |
198 |
196 lines = [] |
199 lines = [] |
197 |
200 |
198 for lineno, line in enumerate(source_token_lines(source)): |
201 for lineno, line in enumerate(fr.source_token_lines(), start=1): |
199 lineno += 1 # 1-based line numbers. |
|
200 # Figure out how to mark this line. |
202 # Figure out how to mark this line. |
201 line_class = [] |
203 line_class = [] |
202 annotate_html = "" |
204 annotate_html = "" |
203 annotate_title = "" |
205 annotate_title = "" |
204 if lineno in analysis.statements: |
206 if lineno in analysis.statements: |
205 line_class.append("stm") |
207 line_class.append("stm") |
206 if lineno in analysis.excluded: |
208 if lineno in analysis.excluded: |
207 line_class.append(c_exc) |
209 line_class.append(c_exc) |
208 elif lineno in analysis.missing: |
210 elif lineno in analysis.missing: |
209 line_class.append(c_mis) |
211 line_class.append(c_mis) |
210 elif self.arcs and lineno in missing_branch_arcs: |
212 elif self.has_arcs and lineno in missing_branch_arcs: |
211 line_class.append(c_par) |
213 line_class.append(c_par) |
212 annlines = [] |
214 shorts = [] |
|
215 longs = [] |
213 for b in missing_branch_arcs[lineno]: |
216 for b in missing_branch_arcs[lineno]: |
214 if b < 0: |
217 if b < 0: |
215 annlines.append("exit") |
218 shorts.append("exit") |
|
219 longs.append("the function exit") |
216 else: |
220 else: |
217 annlines.append(str(b)) |
221 shorts.append(b) |
218 annotate_html = " ".join(annlines) |
222 longs.append("line %d" % b) |
219 if len(annlines) > 1: |
223 # 202F is NARROW NO-BREAK SPACE. |
220 annotate_title = "no jumps to these line numbers" |
224 # 219B is RIGHTWARDS ARROW WITH STROKE. |
221 elif len(annlines) == 1: |
225 short_fmt = "%s ↛ %s" |
222 annotate_title = "no jump to this line number" |
226 annotate_html = ", ".join(short_fmt % (lineno, d) for d in shorts) |
|
227 annotate_html += " [?]" |
|
228 |
|
229 annotate_title = "Line %d was executed, but never jumped to " % lineno |
|
230 if len(longs) == 1: |
|
231 annotate_title += longs[0] |
|
232 elif len(longs) == 2: |
|
233 annotate_title += longs[0] + " or " + longs[1] |
|
234 else: |
|
235 annotate_title += ", ".join(longs[:-1]) + ", or " + longs[-1] |
223 elif lineno in analysis.statements: |
236 elif lineno in analysis.statements: |
224 line_class.append(c_run) |
237 line_class.append(c_run) |
225 |
238 |
226 # Build the HTML for the line |
239 # Build the HTML for the line. |
227 html = [] |
240 html = [] |
228 for tok_type, tok_text in line: |
241 for tok_type, tok_text in line: |
229 if tok_type == "ws": |
242 if tok_type == "ws": |
230 html.append(escape(tok_text)) |
243 html.append(escape(tok_text)) |
231 else: |
244 else: |
232 tok_html = escape(tok_text) or ' ' |
245 tok_html = escape(tok_text) or ' ' |
233 html.append( |
246 html.append( |
234 "<span class='%s'>%s</span>" % (tok_type, tok_html) |
247 '<span class="%s">%s</span>' % (tok_type, tok_html) |
235 ) |
248 ) |
236 |
249 |
237 lines.append({ |
250 lines.append({ |
238 'html': ''.join(html), |
251 'html': ''.join(html), |
239 'number': lineno, |
252 'number': lineno, |
240 'class': ' '.join(line_class) or "pln", |
253 'class': ' '.join(line_class) or "pln", |
241 'annotate': annotate_html, |
254 'annotate': annotate_html, |
242 'annotate_title': annotate_title, |
255 'annotate_title': annotate_title, |
243 }) |
256 }) |
244 |
257 |
245 # Write the HTML page for this file. |
258 # Write the HTML page for this file. |
246 html = spaceless(self.source_tmpl.render({ |
259 template_values = { |
247 'c_exc': c_exc, 'c_mis': c_mis, 'c_par': c_par, 'c_run': c_run, |
260 'c_exc': c_exc, 'c_mis': c_mis, 'c_par': c_par, 'c_run': c_run, |
248 'arcs': self.arcs, 'extra_css': self.extra_css, |
261 'has_arcs': self.has_arcs, 'extra_css': self.extra_css, |
249 'cu': cu, 'nums': nums, 'lines': lines, |
262 'fr': fr, 'nums': nums, 'lines': lines, |
250 })) |
263 'time_stamp': self.time_stamp, |
251 |
264 } |
252 if sys.version_info < (3, 0): |
265 html = spaceless(self.source_tmpl.render(template_values)) |
253 html = html.decode(encoding) |
266 |
254 |
267 html_filename = rootname + ".html" |
255 html_filename = flat_rootname + ".html" |
|
256 html_path = os.path.join(self.directory, html_filename) |
268 html_path = os.path.join(self.directory, html_filename) |
257 self.write_html(html_path, html) |
269 self.write_html(html_path, html) |
258 |
270 |
259 # Save this file's information for the index file. |
271 # Save this file's information for the index file. |
260 index_info = { |
272 index_info = { |
261 'nums': nums, |
273 'nums': nums, |
262 'html_filename': html_filename, |
274 'html_filename': html_filename, |
263 'name': cu.name, |
275 'relative_filename': fr.relative_filename(), |
264 } |
276 } |
265 self.files.append(index_info) |
277 self.files.append(index_info) |
266 self.status.set_index_info(flat_rootname, index_info) |
278 self.status.set_index_info(rootname, index_info) |
267 |
279 |
268 def index_file(self): |
280 def index_file(self): |
269 """Write the index.html file for this report.""" |
281 """Write the index.html file for this report.""" |
270 index_tmpl = Templite( |
282 index_tmpl = Templite(data("index.html"), self.template_globals) |
271 data("index.html"), self.template_globals |
283 |
272 ) |
284 self.totals = sum(f['nums'] for f in self.files) |
273 |
|
274 self.totals = sum([f['nums'] for f in self.files]) |
|
275 |
285 |
276 html = index_tmpl.render({ |
286 html = index_tmpl.render({ |
277 'arcs': self.arcs, |
287 'has_arcs': self.has_arcs, |
278 'extra_css': self.extra_css, |
288 'extra_css': self.extra_css, |
279 'files': self.files, |
289 'files': self.files, |
280 'totals': self.totals, |
290 'totals': self.totals, |
|
291 'time_stamp': self.time_stamp, |
281 }) |
292 }) |
282 |
293 |
283 if sys.version_info < (3, 0): |
294 self.write_html(os.path.join(self.directory, "index.html"), html) |
284 html = html.decode("utf-8") |
|
285 self.write_html( |
|
286 os.path.join(self.directory, "index.html"), |
|
287 html |
|
288 ) |
|
289 |
295 |
290 # Write the latest hashes for next time. |
296 # Write the latest hashes for next time. |
291 self.status.write(self.directory) |
297 self.status.write(self.directory) |
292 |
298 |
293 |
299 |
294 class HtmlStatus(object): |
300 class HtmlStatus(object): |
295 """The status information we keep to support incremental reporting.""" |
301 """The status information we keep to support incremental reporting.""" |
296 |
302 |
297 STATUS_FILE = "status.dat" |
303 STATUS_FILE = "status.json" |
298 STATUS_FORMAT = 1 |
304 STATUS_FORMAT = 1 |
|
305 |
|
306 # pylint: disable=wrong-spelling-in-comment,useless-suppression |
|
307 # The data looks like: |
|
308 # |
|
309 # { |
|
310 # 'format': 1, |
|
311 # 'settings': '540ee119c15d52a68a53fe6f0897346d', |
|
312 # 'version': '4.0a1', |
|
313 # 'files': { |
|
314 # 'cogapp___init__': { |
|
315 # 'hash': 'e45581a5b48f879f301c0f30bf77a50c', |
|
316 # 'index': { |
|
317 # 'html_filename': 'cogapp___init__.html', |
|
318 # 'name': 'cogapp/__init__', |
|
319 # 'nums': <coverage.results.Numbers object at 0x10ab7ed0>, |
|
320 # } |
|
321 # }, |
|
322 # ... |
|
323 # 'cogapp_whiteutils': { |
|
324 # 'hash': '8504bb427fc488c4176809ded0277d51', |
|
325 # 'index': { |
|
326 # 'html_filename': 'cogapp_whiteutils.html', |
|
327 # 'name': 'cogapp/whiteutils', |
|
328 # 'nums': <coverage.results.Numbers object at 0x10ab7d90>, |
|
329 # } |
|
330 # }, |
|
331 # }, |
|
332 # } |
299 |
333 |
300 def __init__(self): |
334 def __init__(self): |
301 self.reset() |
335 self.reset() |
302 |
336 |
303 def reset(self): |
337 def reset(self): |
308 def read(self, directory): |
342 def read(self, directory): |
309 """Read the last status in `directory`.""" |
343 """Read the last status in `directory`.""" |
310 usable = False |
344 usable = False |
311 try: |
345 try: |
312 status_file = os.path.join(directory, self.STATUS_FILE) |
346 status_file = os.path.join(directory, self.STATUS_FILE) |
313 fstatus = open(status_file, "rb") |
347 with open(status_file, "r") as fstatus: |
314 try: |
348 status = json.load(fstatus) |
315 status = pickle.load(fstatus) |
|
316 finally: |
|
317 fstatus.close() |
|
318 except (IOError, ValueError): |
349 except (IOError, ValueError): |
319 usable = False |
350 usable = False |
320 else: |
351 else: |
321 usable = True |
352 usable = True |
322 if status['format'] != self.STATUS_FORMAT: |
353 if status['format'] != self.STATUS_FORMAT: |
323 usable = False |
354 usable = False |
324 elif status['version'] != coverage.__version__: |
355 elif status['version'] != coverage.__version__: |
325 usable = False |
356 usable = False |
326 |
357 |
327 if usable: |
358 if usable: |
328 self.files = status['files'] |
359 self.files = {} |
|
360 for filename, fileinfo in iitems(status['files']): |
|
361 fileinfo['index']['nums'] = Numbers(*fileinfo['index']['nums']) |
|
362 self.files[filename] = fileinfo |
329 self.settings = status['settings'] |
363 self.settings = status['settings'] |
330 else: |
364 else: |
331 self.reset() |
365 self.reset() |
332 |
366 |
333 def write(self, directory): |
367 def write(self, directory): |
334 """Write the current status to `directory`.""" |
368 """Write the current status to `directory`.""" |
335 status_file = os.path.join(directory, self.STATUS_FILE) |
369 status_file = os.path.join(directory, self.STATUS_FILE) |
|
370 files = {} |
|
371 for filename, fileinfo in iitems(self.files): |
|
372 fileinfo['index']['nums'] = fileinfo['index']['nums'].init_args() |
|
373 files[filename] = fileinfo |
|
374 |
336 status = { |
375 status = { |
337 'format': self.STATUS_FORMAT, |
376 'format': self.STATUS_FORMAT, |
338 'version': coverage.__version__, |
377 'version': coverage.__version__, |
339 'settings': self.settings, |
378 'settings': self.settings, |
340 'files': self.files, |
379 'files': files, |
341 } |
380 } |
342 fout = open(status_file, "wb") |
381 with open(status_file, "w") as fout: |
343 try: |
382 json.dump(status, fout) |
344 pickle.dump(status, fout) |
383 |
345 finally: |
384 # Older versions of ShiningPanda look for the old name, status.dat. |
346 fout.close() |
385 # Accomodate them if we are running under Jenkins. |
|
386 # https://issues.jenkins-ci.org/browse/JENKINS-28428 |
|
387 if "JENKINS_URL" in os.environ: |
|
388 with open(os.path.join(directory, "status.dat"), "w") as dat: |
|
389 dat.write("https://issues.jenkins-ci.org/browse/JENKINS-28428\n") |
347 |
390 |
348 def settings_hash(self): |
391 def settings_hash(self): |
349 """Get the hash of the coverage.py settings.""" |
392 """Get the hash of the coverage.py settings.""" |
350 return self.settings |
393 return self.settings |
351 |
394 |