DebugClients/Python/coverage/data.py

changeset 5141
bc64243b7672
parent 5126
d28b92dabc2b
parent 5140
01484c0afbc6
child 5144
1ab536d25072
equal deleted inserted replaced
5126:d28b92dabc2b 5141:bc64243b7672
1 # Licensed under the Apache License: http://www.apache.org/licenses/LICENSE-2.0
2 # For details: https://bitbucket.org/ned/coveragepy/src/default/NOTICE.txt
3
4 """Coverage data for coverage.py."""
5
6 import glob
7 import itertools
8 import json
9 import optparse
10 import os
11 import os.path
12 import random
13 import re
14 import socket
15
16 from coverage import env
17 from coverage.backward import iitems, string_class
18 from coverage.debug import _TEST_NAME_FILE
19 from coverage.files import PathAliases
20 from coverage.misc import CoverageException, file_be_gone, isolate_module
21
22 os = isolate_module(os)
23
24
25 class CoverageData(object):
26 """Manages collected coverage data, including file storage.
27
28 This class is the public supported API to the data coverage.py collects
29 during program execution. It includes information about what code was
30 executed. It does not include information from the analysis phase, to
31 determine what lines could have been executed, or what lines were not
32 executed.
33
34 .. note::
35
36 The file format is not documented or guaranteed. It will change in
37 the future, in possibly complicated ways. Do not read coverage.py
38 data files directly. Use this API to avoid disruption.
39
40 There are a number of kinds of data that can be collected:
41
42 * **lines**: the line numbers of source lines that were executed.
43 These are always available.
44
45 * **arcs**: pairs of source and destination line numbers for transitions
46 between source lines. These are only available if branch coverage was
47 used.
48
49 * **file tracer names**: the module names of the file tracer plugins that
50 handled each file in the data.
51
52 * **run information**: information about the program execution. This is
53 written during "coverage run", and then accumulated during "coverage
54 combine".
55
56 Lines, arcs, and file tracer names are stored for each source file. File
57 names in this API are case-sensitive, even on platforms with
58 case-insensitive file systems.
59
60 To read a coverage.py data file, use :meth:`read_file`, or
61 :meth:`read_fileobj` if you have an already-opened file. You can then
62 access the line, arc, or file tracer data with :meth:`lines`, :meth:`arcs`,
63 or :meth:`file_tracer`. Run information is available with
64 :meth:`run_infos`.
65
66 The :meth:`has_arcs` method indicates whether arc data is available. You
67 can get a list of the files in the data with :meth:`measured_files`.
68 A summary of the line data is available from :meth:`line_counts`. As with
69 most Python containers, you can determine if there is any data at all by
70 using this object as a boolean value.
71
72
73 Most data files will be created by coverage.py itself, but you can use
74 methods here to create data files if you like. The :meth:`add_lines`,
75 :meth:`add_arcs`, and :meth:`add_file_tracers` methods add data, in ways
76 that are convenient for coverage.py. The :meth:`add_run_info` method adds
77 key-value pairs to the run information.
78
79 To add a file without any measured data, use :meth:`touch_file`.
80
81 You write to a named file with :meth:`write_file`, or to an already opened
82 file with :meth:`write_fileobj`.
83
84 You can clear the data in memory with :meth:`erase`. Two data collections
85 can be combined by using :meth:`update` on one :class:`CoverageData`,
86 passing it the other.
87
88 """
89
90 # The data file format is JSON, with these keys:
91 #
92 # * lines: a dict mapping file names to lists of line numbers
93 # executed::
94 #
95 # { "file1": [17,23,45], "file2": [1,2,3], ... }
96 #
97 # * arcs: a dict mapping file names to lists of line number pairs::
98 #
99 # { "file1": [[17,23], [17,25], [25,26]], ... }
100 #
101 # * file_tracers: a dict mapping file names to plugin names::
102 #
103 # { "file1": "django.coverage", ... }
104 #
105 # * runs: a list of dicts of information about the coverage.py runs
106 # contributing to the data::
107 #
108 # [ { "brief_sys": "CPython 2.7.10 Darwin" }, ... ]
109 #
110 # Only one of `lines` or `arcs` will be present: with branch coverage, data
111 # is stored as arcs. Without branch coverage, it is stored as lines. The
112 # line data is easily recovered from the arcs: it is all the first elements
113 # of the pairs that are greater than zero.
114
115 def __init__(self, debug=None):
116 """Create a CoverageData.
117
118 `debug` is a `DebugControl` object for writing debug messages.
119
120 """
121 self._debug = debug
122
123 # A map from canonical Python source file name to a dictionary in
124 # which there's an entry for each line number that has been
125 # executed:
126 #
127 # { 'filename1.py': [12, 47, 1001], ... }
128 #
129 self._lines = None
130
131 # A map from canonical Python source file name to a dictionary with an
132 # entry for each pair of line numbers forming an arc:
133 #
134 # { 'filename1.py': [(12,14), (47,48), ... ], ... }
135 #
136 self._arcs = None
137
138 # A map from canonical source file name to a plugin module name:
139 #
140 # { 'filename1.py': 'django.coverage', ... }
141 #
142 self._file_tracers = {}
143
144 # A list of dicts of information about the coverage.py runs.
145 self._runs = []
146
147 def __repr__(self):
148 return "<{klass} lines={lines} arcs={arcs} tracers={tracers} runs={runs}>".format(
149 klass=self.__class__.__name__,
150 lines="None" if self._lines is None else "{{{0}}}".format(len(self._lines)),
151 arcs="None" if self._arcs is None else "{{{0}}}".format(len(self._arcs)),
152 tracers="{{{0}}}".format(len(self._file_tracers)),
153 runs="[{0}]".format(len(self._runs)),
154 )
155
156 ##
157 ## Reading data
158 ##
159
160 def has_arcs(self):
161 """Does this data have arcs?
162
163 Arc data is only available if branch coverage was used during
164 collection.
165
166 Returns a boolean.
167
168 """
169 return self._has_arcs()
170
171 def lines(self, filename):
172 """Get the list of lines executed for a file.
173
174 If the file was not measured, returns None. A file might be measured,
175 and have no lines executed, in which case an empty list is returned.
176
177 If the file was executed, returns a list of integers, the line numbers
178 executed in the file. The list is in no particular order.
179
180 """
181 if self._arcs is not None:
182 arcs = self._arcs.get(filename)
183 if arcs is not None:
184 all_lines = itertools.chain.from_iterable(arcs)
185 return list(set(l for l in all_lines if l > 0))
186 elif self._lines is not None:
187 return self._lines.get(filename)
188 return None
189
190 def arcs(self, filename):
191 """Get the list of arcs executed for a file.
192
193 If the file was not measured, returns None. A file might be measured,
194 and have no arcs executed, in which case an empty list is returned.
195
196 If the file was executed, returns a list of 2-tuples of integers. Each
197 pair is a starting line number and an ending line number for a
198 transition from one line to another. The list is in no particular
199 order.
200
201 Negative numbers have special meaning. If the starting line number is
202 -N, it represents an entry to the code object that starts at line N.
203 If the ending ling number is -N, it's an exit from the code object that
204 starts at line N.
205
206 """
207 if self._arcs is not None:
208 if filename in self._arcs:
209 return self._arcs[filename]
210 return None
211
212 def file_tracer(self, filename):
213 """Get the plugin name of the file tracer for a file.
214
215 Returns the name of the plugin that handles this file. If the file was
216 measured, but didn't use a plugin, then "" is returned. If the file
217 was not measured, then None is returned.
218
219 """
220 # Because the vast majority of files involve no plugin, we don't store
221 # them explicitly in self._file_tracers. Check the measured data
222 # instead to see if it was a known file with no plugin.
223 if filename in (self._arcs or self._lines or {}):
224 return self._file_tracers.get(filename, "")
225 return None
226
227 def run_infos(self):
228 """Return the list of dicts of run information.
229
230 For data collected during a single run, this will be a one-element
231 list. If data has been combined, there will be one element for each
232 original data file.
233
234 """
235 return self._runs
236
237 def measured_files(self):
238 """A list of all files that had been measured."""
239 return list(self._arcs or self._lines or {})
240
241 def line_counts(self, fullpath=False):
242 """Return a dict summarizing the line coverage data.
243
244 Keys are based on the file names, and values are the number of executed
245 lines. If `fullpath` is true, then the keys are the full pathnames of
246 the files, otherwise they are the basenames of the files.
247
248 Returns a dict mapping file names to counts of lines.
249
250 """
251 summ = {}
252 if fullpath:
253 filename_fn = lambda f: f
254 else:
255 filename_fn = os.path.basename
256 for filename in self.measured_files():
257 summ[filename_fn(filename)] = len(self.lines(filename))
258 return summ
259
260 def __nonzero__(self):
261 return bool(self._lines or self._arcs)
262
263 __bool__ = __nonzero__
264
265 def read_fileobj(self, file_obj):
266 """Read the coverage data from the given file object.
267
268 Should only be used on an empty CoverageData object.
269
270 """
271 data = self._read_raw_data(file_obj)
272
273 self._lines = self._arcs = None
274
275 if 'lines' in data:
276 self._lines = data['lines']
277 if 'arcs' in data:
278 self._arcs = dict(
279 (fname, [tuple(pair) for pair in arcs])
280 for fname, arcs in iitems(data['arcs'])
281 )
282 self._file_tracers = data.get('file_tracers', {})
283 self._runs = data.get('runs', [])
284
285 self._validate()
286
287 def read_file(self, filename):
288 """Read the coverage data from `filename` into this object."""
289 if self._debug and self._debug.should('dataio'):
290 self._debug.write("Reading data from %r" % (filename,))
291 try:
292 with self._open_for_reading(filename) as f:
293 self.read_fileobj(f)
294 except Exception as exc:
295 raise CoverageException(
296 "Couldn't read data from '%s': %s: %s" % (
297 filename, exc.__class__.__name__, exc,
298 )
299 )
300
301 _GO_AWAY = "!coverage.py: This is a private format, don't read it directly!"
302
303 @classmethod
304 def _open_for_reading(cls, filename):
305 """Open a file appropriately for reading data."""
306 return open(filename, "r")
307
308 @classmethod
309 def _read_raw_data(cls, file_obj):
310 """Read the raw data from a file object."""
311 go_away = file_obj.read(len(cls._GO_AWAY))
312 if go_away != cls._GO_AWAY:
313 raise CoverageException("Doesn't seem to be a coverage.py data file")
314 return json.load(file_obj)
315
316 @classmethod
317 def _read_raw_data_file(cls, filename):
318 """Read the raw data from a file, for debugging."""
319 with cls._open_for_reading(filename) as f:
320 return cls._read_raw_data(f)
321
322 ##
323 ## Writing data
324 ##
325
326 def add_lines(self, line_data):
327 """Add measured line data.
328
329 `line_data` is a dictionary mapping file names to dictionaries::
330
331 { filename: { lineno: None, ... }, ...}
332
333 """
334 if self._debug and self._debug.should('dataop'):
335 self._debug.write("Adding lines: %d files, %d lines total" % (
336 len(line_data), sum(len(lines) for lines in line_data.values())
337 ))
338 if self._has_arcs():
339 raise CoverageException("Can't add lines to existing arc data")
340
341 if self._lines is None:
342 self._lines = {}
343 for filename, linenos in iitems(line_data):
344 if filename in self._lines:
345 new_linenos = set(self._lines[filename])
346 new_linenos.update(linenos)
347 linenos = new_linenos
348 self._lines[filename] = list(linenos)
349
350 self._validate()
351
352 def add_arcs(self, arc_data):
353 """Add measured arc data.
354
355 `arc_data` is a dictionary mapping file names to dictionaries::
356
357 { filename: { (l1,l2): None, ... }, ...}
358
359 """
360 if self._debug and self._debug.should('dataop'):
361 self._debug.write("Adding arcs: %d files, %d arcs total" % (
362 len(arc_data), sum(len(arcs) for arcs in arc_data.values())
363 ))
364 if self._has_lines():
365 raise CoverageException("Can't add arcs to existing line data")
366
367 if self._arcs is None:
368 self._arcs = {}
369 for filename, arcs in iitems(arc_data):
370 if filename in self._arcs:
371 new_arcs = set(self._arcs[filename])
372 new_arcs.update(arcs)
373 arcs = new_arcs
374 self._arcs[filename] = list(arcs)
375
376 self._validate()
377
378 def add_file_tracers(self, file_tracers):
379 """Add per-file plugin information.
380
381 `file_tracers` is { filename: plugin_name, ... }
382
383 """
384 if self._debug and self._debug.should('dataop'):
385 self._debug.write("Adding file tracers: %d files" % (len(file_tracers),))
386
387 existing_files = self._arcs or self._lines or {}
388 for filename, plugin_name in iitems(file_tracers):
389 if filename not in existing_files:
390 raise CoverageException(
391 "Can't add file tracer data for unmeasured file '%s'" % (filename,)
392 )
393 existing_plugin = self._file_tracers.get(filename)
394 if existing_plugin is not None and plugin_name != existing_plugin:
395 raise CoverageException(
396 "Conflicting file tracer name for '%s': %r vs %r" % (
397 filename, existing_plugin, plugin_name,
398 )
399 )
400 self._file_tracers[filename] = plugin_name
401
402 self._validate()
403
404 def add_run_info(self, **kwargs):
405 """Add information about the run.
406
407 Keywords are arbitrary, and are stored in the run dictionary. Values
408 must be JSON serializable. You may use this function more than once,
409 but repeated keywords overwrite each other.
410
411 """
412 if self._debug and self._debug.should('dataop'):
413 self._debug.write("Adding run info: %r" % (kwargs,))
414 if not self._runs:
415 self._runs = [{}]
416 self._runs[0].update(kwargs)
417 self._validate()
418
419 def touch_file(self, filename):
420 """Ensure that `filename` appears in the data, empty if needed."""
421 if self._debug and self._debug.should('dataop'):
422 self._debug.write("Touching %r" % (filename,))
423 if not self._has_arcs() and not self._has_lines():
424 raise CoverageException("Can't touch files in an empty CoverageData")
425
426 if self._has_arcs():
427 where = self._arcs
428 else:
429 where = self._lines
430 where.setdefault(filename, [])
431
432 self._validate()
433
434 def write_fileobj(self, file_obj):
435 """Write the coverage data to `file_obj`."""
436
437 # Create the file data.
438 file_data = {}
439
440 if self._has_arcs():
441 file_data['arcs'] = self._arcs
442
443 if self._has_lines():
444 file_data['lines'] = self._lines
445
446 if self._file_tracers:
447 file_data['file_tracers'] = self._file_tracers
448
449 if self._runs:
450 file_data['runs'] = self._runs
451
452 # Write the data to the file.
453 file_obj.write(self._GO_AWAY)
454 json.dump(file_data, file_obj)
455
456 def write_file(self, filename):
457 """Write the coverage data to `filename`."""
458 if self._debug and self._debug.should('dataio'):
459 self._debug.write("Writing data to %r" % (filename,))
460 with open(filename, 'w') as fdata:
461 self.write_fileobj(fdata)
462
463 def erase(self):
464 """Erase the data in this object."""
465 self._lines = None
466 self._arcs = None
467 self._file_tracers = {}
468 self._runs = []
469 self._validate()
470
471 def update(self, other_data, aliases=None):
472 """Update this data with data from another `CoverageData`.
473
474 If `aliases` is provided, it's a `PathAliases` object that is used to
475 re-map paths to match the local machine's.
476
477 """
478 if self._has_lines() and other_data._has_arcs():
479 raise CoverageException("Can't combine arc data with line data")
480 if self._has_arcs() and other_data._has_lines():
481 raise CoverageException("Can't combine line data with arc data")
482
483 aliases = aliases or PathAliases()
484
485 # _file_tracers: only have a string, so they have to agree.
486 # Have to do these first, so that our examination of self._arcs and
487 # self._lines won't be confused by data updated from other_data.
488 for filename in other_data.measured_files():
489 other_plugin = other_data.file_tracer(filename)
490 filename = aliases.map(filename)
491 this_plugin = self.file_tracer(filename)
492 if this_plugin is None:
493 if other_plugin:
494 self._file_tracers[filename] = other_plugin
495 elif this_plugin != other_plugin:
496 raise CoverageException(
497 "Conflicting file tracer name for '%s': %r vs %r" % (
498 filename, this_plugin, other_plugin,
499 )
500 )
501
502 # _runs: add the new runs to these runs.
503 self._runs.extend(other_data._runs)
504
505 # _lines: merge dicts.
506 if other_data._has_lines():
507 if self._lines is None:
508 self._lines = {}
509 for filename, file_lines in iitems(other_data._lines):
510 filename = aliases.map(filename)
511 if filename in self._lines:
512 lines = set(self._lines[filename])
513 lines.update(file_lines)
514 file_lines = list(lines)
515 self._lines[filename] = file_lines
516
517 # _arcs: merge dicts.
518 if other_data._has_arcs():
519 if self._arcs is None:
520 self._arcs = {}
521 for filename, file_arcs in iitems(other_data._arcs):
522 filename = aliases.map(filename)
523 if filename in self._arcs:
524 arcs = set(self._arcs[filename])
525 arcs.update(file_arcs)
526 file_arcs = list(arcs)
527 self._arcs[filename] = file_arcs
528
529 self._validate()
530
531 ##
532 ## Miscellaneous
533 ##
534
535 def _validate(self):
536 """If we are in paranoid mode, validate that everything is right."""
537 if env.TESTING:
538 self._validate_invariants()
539
540 def _validate_invariants(self):
541 """Validate internal invariants."""
542 # Only one of _lines or _arcs should exist.
543 assert not(self._has_lines() and self._has_arcs()), (
544 "Shouldn't have both _lines and _arcs"
545 )
546
547 # _lines should be a dict of lists of ints.
548 if self._has_lines():
549 for fname, lines in iitems(self._lines):
550 assert isinstance(fname, string_class), "Key in _lines shouldn't be %r" % (fname,)
551 assert all(isinstance(x, int) for x in lines), (
552 "_lines[%r] shouldn't be %r" % (fname, lines)
553 )
554
555 # _arcs should be a dict of lists of pairs of ints.
556 if self._has_arcs():
557 for fname, arcs in iitems(self._arcs):
558 assert isinstance(fname, string_class), "Key in _arcs shouldn't be %r" % (fname,)
559 assert all(isinstance(x, int) and isinstance(y, int) for x, y in arcs), (
560 "_arcs[%r] shouldn't be %r" % (fname, arcs)
561 )
562
563 # _file_tracers should have only non-empty strings as values.
564 for fname, plugin in iitems(self._file_tracers):
565 assert isinstance(fname, string_class), (
566 "Key in _file_tracers shouldn't be %r" % (fname,)
567 )
568 assert plugin and isinstance(plugin, string_class), (
569 "_file_tracers[%r] shoudn't be %r" % (fname, plugin)
570 )
571
572 # _runs should be a list of dicts.
573 for val in self._runs:
574 assert isinstance(val, dict)
575 for key in val:
576 assert isinstance(key, string_class), "Key in _runs shouldn't be %r" % (key,)
577
578 def add_to_hash(self, filename, hasher):
579 """Contribute `filename`'s data to the `hasher`.
580
581 `hasher` is a `coverage.misc.Hasher` instance to be updated with
582 the file's data. It should only get the results data, not the run
583 data.
584
585 """
586 if self._has_arcs():
587 hasher.update(sorted(self.arcs(filename) or []))
588 else:
589 hasher.update(sorted(self.lines(filename) or []))
590 hasher.update(self.file_tracer(filename))
591
592 ##
593 ## Internal
594 ##
595
596 def _has_lines(self):
597 """Do we have data in self._lines?"""
598 return self._lines is not None
599
600 def _has_arcs(self):
601 """Do we have data in self._arcs?"""
602 return self._arcs is not None
603
604
605 class CoverageDataFiles(object):
606 """Manage the use of coverage data files."""
607
608 def __init__(self, basename=None, warn=None):
609 """Create a CoverageDataFiles to manage data files.
610
611 `warn` is the warning function to use.
612
613 `basename` is the name of the file to use for storing data.
614
615 """
616 self.warn = warn
617 # Construct the file name that will be used for data storage.
618 self.filename = os.path.abspath(basename or ".coverage")
619
620 def erase(self, parallel=False):
621 """Erase the data from the file storage.
622
623 If `parallel` is true, then also deletes data files created from the
624 basename by parallel-mode.
625
626 """
627 file_be_gone(self.filename)
628 if parallel:
629 data_dir, local = os.path.split(self.filename)
630 localdot = local + '.*'
631 pattern = os.path.join(os.path.abspath(data_dir), localdot)
632 for filename in glob.glob(pattern):
633 file_be_gone(filename)
634
635 def read(self, data):
636 """Read the coverage data."""
637 if os.path.exists(self.filename):
638 data.read_file(self.filename)
639
640 def write(self, data, suffix=None):
641 """Write the collected coverage data to a file.
642
643 `suffix` is a suffix to append to the base file name. This can be used
644 for multiple or parallel execution, so that many coverage data files
645 can exist simultaneously. A dot will be used to join the base name and
646 the suffix.
647
648 """
649 filename = self.filename
650 if suffix is True:
651 # If data_suffix was a simple true value, then make a suffix with
652 # plenty of distinguishing information. We do this here in
653 # `save()` at the last minute so that the pid will be correct even
654 # if the process forks.
655 extra = ""
656 if _TEST_NAME_FILE: # pragma: debugging
657 with open(_TEST_NAME_FILE) as f:
658 test_name = f.read()
659 extra = "." + test_name
660 suffix = "%s%s.%s.%06d" % (
661 socket.gethostname(), extra, os.getpid(),
662 random.randint(0, 999999)
663 )
664
665 if suffix:
666 filename += "." + suffix
667 data.write_file(filename)
668
669 def combine_parallel_data(self, data, aliases=None, data_paths=None):
670 """Combine a number of data files together.
671
672 Treat `self.filename` as a file prefix, and combine the data from all
673 of the data files starting with that prefix plus a dot.
674
675 If `aliases` is provided, it's a `PathAliases` object that is used to
676 re-map paths to match the local machine's.
677
678 If `data_paths` is provided, it is a list of directories or files to
679 combine. Directories are searched for files that start with
680 `self.filename` plus dot as a prefix, and those files are combined.
681
682 If `data_paths` is not provided, then the directory portion of
683 `self.filename` is used as the directory to search for data files.
684
685 Every data file found and combined is then deleted from disk. If a file
686 cannot be read, a warning will be issued, and the file will not be
687 deleted.
688
689 """
690 # Because of the os.path.abspath in the constructor, data_dir will
691 # never be an empty string.
692 data_dir, local = os.path.split(self.filename)
693 localdot = local + '.*'
694
695 data_paths = data_paths or [data_dir]
696 files_to_combine = []
697 for p in data_paths:
698 if os.path.isfile(p):
699 files_to_combine.append(os.path.abspath(p))
700 elif os.path.isdir(p):
701 pattern = os.path.join(os.path.abspath(p), localdot)
702 files_to_combine.extend(glob.glob(pattern))
703 else:
704 raise CoverageException("Couldn't combine from non-existent path '%s'" % (p,))
705
706 for f in files_to_combine:
707 new_data = CoverageData()
708 try:
709 new_data.read_file(f)
710 except CoverageException as exc:
711 if self.warn:
712 # The CoverageException has the file name in it, so just
713 # use the message as the warning.
714 self.warn(str(exc))
715 else:
716 data.update(new_data, aliases=aliases)
717 file_be_gone(f)
718
719
720 def canonicalize_json_data(data):
721 """Canonicalize our JSON data so it can be compared."""
722 for fname, lines in iitems(data.get('lines', {})):
723 data['lines'][fname] = sorted(lines)
724 for fname, arcs in iitems(data.get('arcs', {})):
725 data['arcs'][fname] = sorted(arcs)
726
727
728 def pretty_data(data):
729 """Format data as JSON, but as nicely as possible.
730
731 Returns a string.
732
733 """
734 # Start with a basic JSON dump.
735 out = json.dumps(data, indent=4, sort_keys=True)
736 # But pairs of numbers shouldn't be split across lines...
737 out = re.sub(r"\[\s+(-?\d+),\s+(-?\d+)\s+]", r"[\1, \2]", out)
738 # Trailing spaces mess with tests, get rid of them.
739 out = re.sub(r"(?m)\s+$", "", out)
740 return out
741
742
743 def debug_main(args):
744 """Dump the raw data from data files.
745
746 Run this as::
747
748 $ python -m coverage.data [FILE]
749
750 """
751 parser = optparse.OptionParser()
752 parser.add_option(
753 "-c", "--canonical", action="store_true",
754 help="Sort data into a canonical order",
755 )
756 options, args = parser.parse_args(args)
757
758 for filename in (args or [".coverage"]):
759 print("--- {0} ------------------------------".format(filename))
760 data = CoverageData._read_raw_data_file(filename)
761 if options.canonical:
762 canonicalize_json_data(data)
763 print(pretty_data(data))
764
765
766 if __name__ == '__main__':
767 import sys
768 debug_main(sys.argv[1:])
769
770 #
771 # eflag: FileType = Python2

eric ide

mercurial