64 return data_file.read() |
66 return data_file.read() |
65 |
67 |
66 |
68 |
67 def write_html(fname, html): |
69 def write_html(fname, html): |
68 """Write `html` to `fname`, properly encoded.""" |
70 """Write `html` to `fname`, properly encoded.""" |
|
71 html = re.sub(r"(\A\s+)|(\s+$)", "", html, flags=re.MULTILINE) + "\n" |
69 with open(fname, "wb") as fout: |
72 with open(fname, "wb") as fout: |
70 fout.write(html.encode('ascii', 'xmlcharrefreplace')) |
73 fout.write(html.encode('ascii', 'xmlcharrefreplace')) |
71 |
74 |
72 |
75 |
73 class HtmlReporter(Reporter): |
76 class HtmlDataGeneration(object): |
|
77 """Generate structured data to be turned into HTML reports.""" |
|
78 |
|
79 EMPTY = "(empty)" |
|
80 |
|
81 def __init__(self, cov): |
|
82 self.coverage = cov |
|
83 self.config = self.coverage.config |
|
84 data = self.coverage.get_data() |
|
85 self.has_arcs = data.has_arcs() |
|
86 if self.config.show_contexts: |
|
87 if data.measured_contexts() == set([""]): |
|
88 self.coverage._warn("No contexts were measured") |
|
89 data.set_query_contexts(self.config.report_contexts) |
|
90 |
|
91 def data_for_file(self, fr, analysis): |
|
92 """Produce the data needed for one file's report.""" |
|
93 if self.has_arcs: |
|
94 missing_branch_arcs = analysis.missing_branch_arcs() |
|
95 arcs_executed = analysis.arcs_executed() |
|
96 |
|
97 if self.config.show_contexts: |
|
98 contexts_by_lineno = analysis.data.contexts_by_lineno(analysis.filename) |
|
99 |
|
100 lines = [] |
|
101 |
|
102 for lineno, tokens in enumerate(fr.source_token_lines(), start=1): |
|
103 # Figure out how to mark this line. |
|
104 category = None |
|
105 short_annotations = [] |
|
106 long_annotations = [] |
|
107 |
|
108 if lineno in analysis.excluded: |
|
109 category = 'exc' |
|
110 elif lineno in analysis.missing: |
|
111 category = 'mis' |
|
112 elif self.has_arcs and lineno in missing_branch_arcs: |
|
113 category = 'par' |
|
114 for b in missing_branch_arcs[lineno]: |
|
115 if b < 0: |
|
116 short_annotations.append("exit") |
|
117 else: |
|
118 short_annotations.append(b) |
|
119 long_annotations.append(fr.missing_arc_description(lineno, b, arcs_executed)) |
|
120 elif lineno in analysis.statements: |
|
121 category = 'run' |
|
122 |
|
123 contexts = contexts_label = None |
|
124 context_list = None |
|
125 if category and self.config.show_contexts: |
|
126 contexts = sorted(c or self.EMPTY for c in contexts_by_lineno[lineno]) |
|
127 if contexts == [self.EMPTY]: |
|
128 contexts_label = self.EMPTY |
|
129 else: |
|
130 contexts_label = "{} ctx".format(len(contexts)) |
|
131 context_list = contexts |
|
132 |
|
133 lines.append(SimpleNamespace( |
|
134 tokens=tokens, |
|
135 number=lineno, |
|
136 category=category, |
|
137 statement=(lineno in analysis.statements), |
|
138 contexts=contexts, |
|
139 contexts_label=contexts_label, |
|
140 context_list=context_list, |
|
141 short_annotations=short_annotations, |
|
142 long_annotations=long_annotations, |
|
143 )) |
|
144 |
|
145 file_data = SimpleNamespace( |
|
146 relative_filename=fr.relative_filename(), |
|
147 nums=analysis.numbers, |
|
148 lines=lines, |
|
149 ) |
|
150 |
|
151 return file_data |
|
152 |
|
153 |
|
154 class HtmlReporter(object): |
74 """HTML reporting.""" |
155 """HTML reporting.""" |
75 |
156 |
76 # These files will be copied from the htmlfiles directory to the output |
157 # These files will be copied from the htmlfiles directory to the output |
77 # directory. |
158 # directory. |
78 STATIC_FILES = [ |
159 STATIC_FILES = [ |
85 ("coverage_html.js", ""), |
166 ("coverage_html.js", ""), |
86 ("keybd_closed.png", ""), |
167 ("keybd_closed.png", ""), |
87 ("keybd_open.png", ""), |
168 ("keybd_open.png", ""), |
88 ] |
169 ] |
89 |
170 |
90 def __init__(self, cov, config): |
171 def __init__(self, cov): |
91 super(HtmlReporter, self).__init__(cov, config) |
172 self.coverage = cov |
92 self.directory = None |
173 self.config = self.coverage.config |
|
174 self.directory = self.config.html_dir |
93 title = self.config.html_title |
175 title = self.config.html_title |
94 if env.PY2: |
176 if env.PY2: |
95 title = title.decode("utf8") |
177 title = title.decode("utf8") |
|
178 |
|
179 if self.config.extra_css: |
|
180 self.extra_css = os.path.basename(self.config.extra_css) |
|
181 else: |
|
182 self.extra_css = None |
|
183 |
|
184 self.data = self.coverage.get_data() |
|
185 self.has_arcs = self.data.has_arcs() |
|
186 |
|
187 self.file_summaries = [] |
|
188 self.all_files_nums = [] |
|
189 self.incr = IncrementalChecker(self.directory) |
|
190 self.datagen = HtmlDataGeneration(self.coverage) |
|
191 self.totals = Numbers() |
|
192 |
96 self.template_globals = { |
193 self.template_globals = { |
|
194 # Functions available in the templates. |
97 'escape': escape, |
195 'escape': escape, |
98 'pair': pair, |
196 'pair': pair, |
99 'title': title, |
197 'len': len, |
|
198 |
|
199 # Constants for this report. |
100 '__url__': coverage.__url__, |
200 '__url__': coverage.__url__, |
101 '__version__': coverage.__version__, |
201 '__version__': coverage.__version__, |
|
202 'title': title, |
|
203 'time_stamp': datetime.datetime.now().strftime('%Y-%m-%d %H:%M'), |
|
204 'extra_css': self.extra_css, |
|
205 'has_arcs': self.has_arcs, |
|
206 'show_contexts': self.config.show_contexts, |
|
207 |
|
208 # Constants for all reports. |
|
209 # These css classes determine which lines are highlighted by default. |
|
210 'category': { |
|
211 'exc': 'exc show_exc', |
|
212 'mis': 'mis show_mis', |
|
213 'par': 'par run show_par', |
|
214 'run': 'run', |
|
215 } |
102 } |
216 } |
103 self.source_tmpl = Templite(read_data("pyfile.html"), self.template_globals) |
217 self.pyfile_html_source = read_data("pyfile.html") |
104 |
218 self.source_tmpl = Templite(self.pyfile_html_source, self.template_globals) |
105 self.coverage = cov |
|
106 |
|
107 self.files = [] |
|
108 self.all_files_nums = [] |
|
109 self.has_arcs = self.coverage.data.has_arcs() |
|
110 self.status = HtmlStatus() |
|
111 self.extra_css = None |
|
112 self.totals = Numbers() |
|
113 self.time_stamp = datetime.datetime.now().strftime('%Y-%m-%d %H:%M') |
|
114 |
219 |
115 def report(self, morfs): |
220 def report(self, morfs): |
116 """Generate an HTML report for `morfs`. |
221 """Generate an HTML report for `morfs`. |
117 |
222 |
118 `morfs` is a list of modules or file names. |
223 `morfs` is a list of modules or file names. |
119 |
224 |
120 """ |
225 """ |
121 assert self.config.html_dir, "must give a directory for html reporting" |
226 # Read the status data and check that this run used the same |
122 |
227 # global data as the last run. |
123 # Read the status data. |
228 self.incr.read() |
124 self.status.read(self.config.html_dir) |
229 self.incr.check_global_data(self.config, self.pyfile_html_source) |
125 |
|
126 # Check that this run used the same settings as the last run. |
|
127 m = Hasher() |
|
128 m.update(self.config) |
|
129 these_settings = m.hexdigest() |
|
130 if self.status.settings_hash() != these_settings: |
|
131 self.status.reset() |
|
132 self.status.set_settings_hash(these_settings) |
|
133 |
|
134 # The user may have extra CSS they want copied. |
|
135 if self.config.extra_css: |
|
136 self.extra_css = os.path.basename(self.config.extra_css) |
|
137 |
230 |
138 # Process all the files. |
231 # Process all the files. |
139 self.report_files(self.html_file, morfs, self.config.html_dir) |
232 for fr, analysis in get_analysis_to_report(self.coverage, morfs): |
|
233 self.html_file(fr, analysis) |
140 |
234 |
141 if not self.all_files_nums: |
235 if not self.all_files_nums: |
142 raise CoverageException("No data to report.") |
236 raise CoverageException("No data to report.") |
|
237 |
|
238 self.totals = sum(self.all_files_nums) |
143 |
239 |
144 # Write the index file. |
240 # Write the index file. |
145 self.index_file() |
241 self.index_file() |
146 |
242 |
147 self.make_local_static_report_files() |
243 self.make_local_static_report_files() |
187 if no_missing_lines and no_missing_branches: |
277 if no_missing_lines and no_missing_branches: |
188 # If there's an existing file, remove it. |
278 # If there's an existing file, remove it. |
189 file_be_gone(html_path) |
279 file_be_gone(html_path) |
190 return |
280 return |
191 |
281 |
192 source = fr.source() |
282 if self.config.skip_empty: |
|
283 # Don't report on empty files. |
|
284 if nums.n_statements == 0: |
|
285 file_be_gone(html_path) |
|
286 return |
193 |
287 |
194 # Find out if the file on disk is already correct. |
288 # Find out if the file on disk is already correct. |
195 this_hash = self.file_hash(source.encode('utf-8'), fr) |
289 if self.incr.can_skip_file(self.data, fr, rootname): |
196 that_hash = self.status.file_hash(rootname) |
290 self.file_summaries.append(self.incr.index_info(rootname)) |
197 if this_hash == that_hash: |
|
198 # Nothing has changed to require the file to be reported again. |
|
199 self.files.append(self.status.index_info(rootname)) |
|
200 return |
291 return |
201 |
292 |
202 self.status.set_file_hash(rootname, this_hash) |
293 # Write the HTML page for this file. |
203 |
294 file_data = self.datagen.data_for_file(fr, analysis) |
204 if self.has_arcs: |
295 for ldata in file_data.lines: |
205 missing_branch_arcs = analysis.missing_branch_arcs() |
|
206 arcs_executed = analysis.arcs_executed() |
|
207 |
|
208 # These classes determine which lines are highlighted by default. |
|
209 c_run = "run hide_run" |
|
210 c_exc = "exc" |
|
211 c_mis = "mis" |
|
212 c_par = "par " + c_run |
|
213 |
|
214 lines = [] |
|
215 |
|
216 for lineno, line in enumerate(fr.source_token_lines(), start=1): |
|
217 # Figure out how to mark this line. |
|
218 line_class = [] |
|
219 annotate_html = "" |
|
220 annotate_long = "" |
|
221 if lineno in analysis.statements: |
|
222 line_class.append("stm") |
|
223 if lineno in analysis.excluded: |
|
224 line_class.append(c_exc) |
|
225 elif lineno in analysis.missing: |
|
226 line_class.append(c_mis) |
|
227 elif self.has_arcs and lineno in missing_branch_arcs: |
|
228 line_class.append(c_par) |
|
229 shorts = [] |
|
230 longs = [] |
|
231 for b in missing_branch_arcs[lineno]: |
|
232 if b < 0: |
|
233 shorts.append("exit") |
|
234 else: |
|
235 shorts.append(b) |
|
236 longs.append(fr.missing_arc_description(lineno, b, arcs_executed)) |
|
237 # 202F is NARROW NO-BREAK SPACE. |
|
238 # 219B is RIGHTWARDS ARROW WITH STROKE. |
|
239 short_fmt = "%s ↛ %s" |
|
240 annotate_html = ", ".join(short_fmt % (lineno, d) for d in shorts) |
|
241 |
|
242 if len(longs) == 1: |
|
243 annotate_long = longs[0] |
|
244 else: |
|
245 annotate_long = "%d missed branches: %s" % ( |
|
246 len(longs), |
|
247 ", ".join("%d) %s" % (num, ann_long) |
|
248 for num, ann_long in enumerate(longs, start=1)), |
|
249 ) |
|
250 elif lineno in analysis.statements: |
|
251 line_class.append(c_run) |
|
252 |
|
253 # Build the HTML for the line. |
296 # Build the HTML for the line. |
254 html = [] |
297 html = [] |
255 for tok_type, tok_text in line: |
298 for tok_type, tok_text in ldata.tokens: |
256 if tok_type == "ws": |
299 if tok_type == "ws": |
257 html.append(escape(tok_text)) |
300 html.append(escape(tok_text)) |
258 else: |
301 else: |
259 tok_html = escape(tok_text) or ' ' |
302 tok_html = escape(tok_text) or ' ' |
260 html.append( |
303 html.append( |
261 '<span class="%s">%s</span>' % (tok_type, tok_html) |
304 u'<span class="{}">{}</span>'.format(tok_type, tok_html) |
262 ) |
305 ) |
263 |
306 ldata.html = ''.join(html) |
264 lines.append({ |
307 |
265 'html': ''.join(html), |
308 if ldata.short_annotations: |
266 'number': lineno, |
309 # 202F is NARROW NO-BREAK SPACE. |
267 'class': ' '.join(line_class) or "pln", |
310 # 219B is RIGHTWARDS ARROW WITH STROKE. |
268 'annotate': annotate_html, |
311 ldata.annotate = u", ".join( |
269 'annotate_long': annotate_long, |
312 u"{} ↛ {}".format(ldata.number, d) |
270 }) |
313 for d in ldata.short_annotations |
271 |
314 ) |
272 # Write the HTML page for this file. |
315 else: |
273 html = self.source_tmpl.render({ |
316 ldata.annotate = None |
274 'c_exc': c_exc, |
317 |
275 'c_mis': c_mis, |
318 if ldata.long_annotations: |
276 'c_par': c_par, |
319 longs = ldata.long_annotations |
277 'c_run': c_run, |
320 if len(longs) == 1: |
278 'has_arcs': self.has_arcs, |
321 ldata.annotate_long = longs[0] |
279 'extra_css': self.extra_css, |
322 else: |
280 'fr': fr, |
323 ldata.annotate_long = u"{:d} missed branches: {}".format( |
281 'nums': nums, |
324 len(longs), |
282 'lines': lines, |
325 u", ".join( |
283 'time_stamp': self.time_stamp, |
326 u"{:d}) {}".format(num, ann_long) |
284 }) |
327 for num, ann_long in enumerate(longs, start=1) |
285 |
328 ), |
|
329 ) |
|
330 else: |
|
331 ldata.annotate_long = None |
|
332 |
|
333 css_classes = [] |
|
334 if ldata.category: |
|
335 css_classes.append(self.template_globals['category'][ldata.category]) |
|
336 ldata.css_class = ' '.join(css_classes) or "pln" |
|
337 |
|
338 html = self.source_tmpl.render(file_data.__dict__) |
286 write_html(html_path, html) |
339 write_html(html_path, html) |
287 |
340 |
288 # Save this file's information for the index file. |
341 # Save this file's information for the index file. |
289 index_info = { |
342 index_info = { |
290 'nums': nums, |
343 'nums': nums, |
291 'html_filename': html_filename, |
344 'html_filename': html_filename, |
292 'relative_filename': fr.relative_filename(), |
345 'relative_filename': fr.relative_filename(), |
293 } |
346 } |
294 self.files.append(index_info) |
347 self.file_summaries.append(index_info) |
295 self.status.set_index_info(rootname, index_info) |
348 self.incr.set_index_info(rootname, index_info) |
296 |
349 |
297 def index_file(self): |
350 def index_file(self): |
298 """Write the index.html file for this report.""" |
351 """Write the index.html file for this report.""" |
299 index_tmpl = Templite(read_data("index.html"), self.template_globals) |
352 index_tmpl = Templite(read_data("index.html"), self.template_globals) |
300 |
353 |
301 self.totals = sum(self.all_files_nums) |
|
302 |
|
303 html = index_tmpl.render({ |
354 html = index_tmpl.render({ |
304 'has_arcs': self.has_arcs, |
355 'files': self.file_summaries, |
305 'extra_css': self.extra_css, |
|
306 'files': self.files, |
|
307 'totals': self.totals, |
356 'totals': self.totals, |
308 'time_stamp': self.time_stamp, |
|
309 }) |
357 }) |
310 |
358 |
311 write_html(os.path.join(self.directory, "index.html"), html) |
359 write_html(os.path.join(self.directory, "index.html"), html) |
312 |
360 |
313 # Write the latest hashes for next time. |
361 # Write the latest hashes for next time. |
314 self.status.write(self.directory) |
362 self.incr.write() |
315 |
363 |
316 |
364 |
317 class HtmlStatus(object): |
365 class IncrementalChecker(object): |
318 """The status information we keep to support incremental reporting.""" |
366 """Logic and data to support incremental reporting.""" |
319 |
367 |
320 STATUS_FILE = "status.json" |
368 STATUS_FILE = "status.json" |
321 STATUS_FORMAT = 1 |
369 STATUS_FORMAT = 2 |
322 |
370 |
323 # pylint: disable=wrong-spelling-in-comment,useless-suppression |
371 # pylint: disable=wrong-spelling-in-comment,useless-suppression |
324 # The data looks like: |
372 # The data looks like: |
325 # |
373 # |
326 # { |
374 # { |
327 # 'format': 1, |
375 # "format": 2, |
328 # 'settings': '540ee119c15d52a68a53fe6f0897346d', |
376 # "globals": "540ee119c15d52a68a53fe6f0897346d", |
329 # 'version': '4.0a1', |
377 # "version": "4.0a1", |
330 # 'files': { |
378 # "files": { |
331 # 'cogapp___init__': { |
379 # "cogapp___init__": { |
332 # 'hash': 'e45581a5b48f879f301c0f30bf77a50c', |
380 # "hash": "e45581a5b48f879f301c0f30bf77a50c", |
333 # 'index': { |
381 # "index": { |
334 # 'html_filename': 'cogapp___init__.html', |
382 # "html_filename": "cogapp___init__.html", |
335 # 'name': 'cogapp/__init__', |
383 # "relative_filename": "cogapp/__init__", |
336 # 'nums': <coverage.results.Numbers object at 0x10ab7ed0>, |
384 # "nums": [ 1, 14, 0, 0, 0, 0, 0 ] |
337 # } |
385 # } |
338 # }, |
386 # }, |
339 # ... |
387 # ... |
340 # 'cogapp_whiteutils': { |
388 # "cogapp_whiteutils": { |
341 # 'hash': '8504bb427fc488c4176809ded0277d51', |
389 # "hash": "8504bb427fc488c4176809ded0277d51", |
342 # 'index': { |
390 # "index": { |
343 # 'html_filename': 'cogapp_whiteutils.html', |
391 # "html_filename": "cogapp_whiteutils.html", |
344 # 'name': 'cogapp/whiteutils', |
392 # "relative_filename": "cogapp/whiteutils", |
345 # 'nums': <coverage.results.Numbers object at 0x10ab7d90>, |
393 # "nums": [ 1, 59, 0, 1, 28, 2, 2 ] |
346 # } |
394 # } |
347 # }, |
395 # } |
348 # }, |
396 # } |
349 # } |
397 # } |
350 |
398 |
351 def __init__(self): |
399 def __init__(self, directory): |
|
400 self.directory = directory |
352 self.reset() |
401 self.reset() |
353 |
402 |
354 def reset(self): |
403 def reset(self): |
355 """Initialize to empty.""" |
404 """Initialize to empty. Causes all files to be reported.""" |
356 self.settings = '' |
405 self.globals = '' |
357 self.files = {} |
406 self.files = {} |
358 |
407 |
359 def read(self, directory): |
408 def read(self): |
360 """Read the last status in `directory`.""" |
409 """Read the information we stored last time.""" |
361 usable = False |
410 usable = False |
362 try: |
411 try: |
363 status_file = os.path.join(directory, self.STATUS_FILE) |
412 status_file = os.path.join(self.directory, self.STATUS_FILE) |
364 with open(status_file, "r") as fstatus: |
413 with open(status_file) as fstatus: |
365 status = json.load(fstatus) |
414 status = json.load(fstatus) |
366 except (IOError, ValueError): |
415 except (IOError, ValueError): |
367 usable = False |
416 usable = False |
368 else: |
417 else: |
369 usable = True |
418 usable = True |
375 if usable: |
424 if usable: |
376 self.files = {} |
425 self.files = {} |
377 for filename, fileinfo in iitems(status['files']): |
426 for filename, fileinfo in iitems(status['files']): |
378 fileinfo['index']['nums'] = Numbers(*fileinfo['index']['nums']) |
427 fileinfo['index']['nums'] = Numbers(*fileinfo['index']['nums']) |
379 self.files[filename] = fileinfo |
428 self.files[filename] = fileinfo |
380 self.settings = status['settings'] |
429 self.globals = status['globals'] |
381 else: |
430 else: |
382 self.reset() |
431 self.reset() |
383 |
432 |
384 def write(self, directory): |
433 def write(self): |
385 """Write the current status to `directory`.""" |
434 """Write the current status.""" |
386 status_file = os.path.join(directory, self.STATUS_FILE) |
435 status_file = os.path.join(self.directory, self.STATUS_FILE) |
387 files = {} |
436 files = {} |
388 for filename, fileinfo in iitems(self.files): |
437 for filename, fileinfo in iitems(self.files): |
389 fileinfo['index']['nums'] = fileinfo['index']['nums'].init_args() |
438 fileinfo['index']['nums'] = fileinfo['index']['nums'].init_args() |
390 files[filename] = fileinfo |
439 files[filename] = fileinfo |
391 |
440 |
392 status = { |
441 status = { |
393 'format': self.STATUS_FORMAT, |
442 'format': self.STATUS_FORMAT, |
394 'version': coverage.__version__, |
443 'version': coverage.__version__, |
395 'settings': self.settings, |
444 'globals': self.globals, |
396 'files': files, |
445 'files': files, |
397 } |
446 } |
398 with open(status_file, "w") as fout: |
447 with open(status_file, "w") as fout: |
399 json.dump(status, fout, separators=(',', ':')) |
448 json.dump(status, fout, separators=(',', ':')) |
400 |
449 |
401 # Older versions of ShiningPanda look for the old name, status.dat. |
450 def check_global_data(self, *data): |
402 # Accommodate them if we are running under Jenkins. |
451 """Check the global data that can affect incremental reporting.""" |
403 # https://issues.jenkins-ci.org/browse/JENKINS-28428 |
452 m = Hasher() |
404 if "JENKINS_URL" in os.environ: |
453 for d in data: |
405 with open(os.path.join(directory, "status.dat"), "w") as dat: |
454 m.update(d) |
406 dat.write("https://issues.jenkins-ci.org/browse/JENKINS-28428\n") |
455 these_globals = m.hexdigest() |
407 |
456 if self.globals != these_globals: |
408 def settings_hash(self): |
457 self.reset() |
409 """Get the hash of the coverage.py settings.""" |
458 self.globals = these_globals |
410 return self.settings |
459 |
411 |
460 def can_skip_file(self, data, fr, rootname): |
412 def set_settings_hash(self, settings): |
461 """Can we skip reporting this file? |
413 """Set the hash of the coverage.py settings.""" |
462 |
414 self.settings = settings |
463 `data` is a CoverageData object, `fr` is a `FileReporter`, and |
|
464 `rootname` is the name being used for the file. |
|
465 """ |
|
466 m = Hasher() |
|
467 m.update(fr.source().encode('utf-8')) |
|
468 add_data_to_hash(data, fr.filename, m) |
|
469 this_hash = m.hexdigest() |
|
470 |
|
471 that_hash = self.file_hash(rootname) |
|
472 |
|
473 if this_hash == that_hash: |
|
474 # Nothing has changed to require the file to be reported again. |
|
475 return True |
|
476 else: |
|
477 self.set_file_hash(rootname, this_hash) |
|
478 return False |
415 |
479 |
416 def file_hash(self, fname): |
480 def file_hash(self, fname): |
417 """Get the hash of `fname`'s contents.""" |
481 """Get the hash of `fname`'s contents.""" |
418 return self.files.get(fname, {}).get('hash', '') |
482 return self.files.get(fname, {}).get('hash', '') |
419 |
483 |