10 import datetime |
10 import datetime |
11 import functools |
11 import functools |
12 import glob |
12 import glob |
13 import itertools |
13 import itertools |
14 import os |
14 import os |
|
15 import random |
15 import re |
16 import re |
|
17 import socket |
16 import sqlite3 |
18 import sqlite3 |
17 import sys |
19 import sys |
18 import threading |
20 import threading |
19 import zlib |
21 import zlib |
20 |
22 |
21 from coverage.debug import NoDebugging, SimpleReprMixin, clipped_repr |
23 from coverage.debug import NoDebugging, SimpleReprMixin, clipped_repr |
22 from coverage.exceptions import CoverageException |
24 from coverage.exceptions import CoverageException, DataError |
23 from coverage.files import PathAliases |
25 from coverage.files import PathAliases |
24 from coverage.misc import contract, file_be_gone, filename_suffix, isolate_module |
26 from coverage.misc import contract, file_be_gone, isolate_module |
25 from coverage.numbits import numbits_to_nums, numbits_union, nums_to_numbits |
27 from coverage.numbits import numbits_to_nums, numbits_union, nums_to_numbits |
26 from coverage.version import __version__ |
28 from coverage.version import __version__ |
27 |
29 |
28 os = isolate_module(os) |
30 os = isolate_module(os) |
29 |
31 |
189 def __init__(self, basename=None, suffix=None, no_disk=False, warn=None, debug=None): |
191 def __init__(self, basename=None, suffix=None, no_disk=False, warn=None, debug=None): |
190 """Create a :class:`CoverageData` object to hold coverage-measured data. |
192 """Create a :class:`CoverageData` object to hold coverage-measured data. |
191 |
193 |
192 Arguments: |
194 Arguments: |
193 basename (str): the base name of the data file, defaulting to |
195 basename (str): the base name of the data file, defaulting to |
194 ".coverage". |
196 ".coverage". This can be a path to a file in another directory. |
195 suffix (str or bool): has the same meaning as the `data_suffix` |
197 suffix (str or bool): has the same meaning as the `data_suffix` |
196 argument to :class:`coverage.Coverage`. |
198 argument to :class:`coverage.Coverage`. |
197 no_disk (bool): if True, keep all data in memory, and don't |
199 no_disk (bool): if True, keep all data in memory, and don't |
198 write any disk file. |
200 write any disk file. |
199 warn: a warning callback function, accepting a warning message |
201 warn: a warning callback function, accepting a warning message |
285 """Read the metadata from a database so that we are ready to use it.""" |
287 """Read the metadata from a database so that we are ready to use it.""" |
286 with self._dbs[threading.get_ident()] as db: |
288 with self._dbs[threading.get_ident()] as db: |
287 try: |
289 try: |
288 schema_version, = db.execute_one("select version from coverage_schema") |
290 schema_version, = db.execute_one("select version from coverage_schema") |
289 except Exception as exc: |
291 except Exception as exc: |
290 raise CoverageException( |
292 raise DataError( |
291 "Data file {!r} doesn't seem to be a coverage data file: {}".format( |
293 "Data file {!r} doesn't seem to be a coverage data file: {}".format( |
292 self._filename, exc |
294 self._filename, exc |
293 ) |
295 ) |
294 ) from exc |
296 ) from exc |
295 else: |
297 else: |
296 if schema_version != SCHEMA_VERSION: |
298 if schema_version != SCHEMA_VERSION: |
297 raise CoverageException( |
299 raise DataError( |
298 "Couldn't use data file {!r}: wrong schema: {} instead of {}".format( |
300 "Couldn't use data file {!r}: wrong schema: {} instead of {}".format( |
299 self._filename, schema_version, SCHEMA_VERSION |
301 self._filename, schema_version, SCHEMA_VERSION |
300 ) |
302 ) |
301 ) |
303 ) |
302 |
304 |
314 self._open_db() |
316 self._open_db() |
315 else: |
317 else: |
316 self._create_db() |
318 self._create_db() |
317 return self._dbs[threading.get_ident()] |
319 return self._dbs[threading.get_ident()] |
318 |
320 |
319 def __nonzero__(self): |
321 def __bool__(self): |
320 if (threading.get_ident() not in self._dbs and not os.path.exists(self._filename)): |
322 if (threading.get_ident() not in self._dbs and not os.path.exists(self._filename)): |
321 return False |
323 return False |
322 try: |
324 try: |
323 with self._connect() as con: |
325 with self._connect() as con: |
324 rows = con.execute("select * from file limit 1") |
326 rows = con.execute("select * from file limit 1") |
325 return bool(list(rows)) |
327 return bool(list(rows)) |
326 except CoverageException: |
328 except CoverageException: |
327 return False |
329 return False |
328 |
330 |
329 __bool__ = __nonzero__ |
|
330 |
|
331 @contract(returns="bytes") |
331 @contract(returns="bytes") |
332 def dumps(self): |
332 def dumps(self): |
333 """Serialize the current data to a byte string. |
333 """Serialize the current data to a byte string. |
334 |
334 |
335 The format of the serialized data is not documented. It is only |
335 The format of the serialized data is not documented. It is only |
368 |
368 |
369 """ |
369 """ |
370 if self._debug.should("dataio"): |
370 if self._debug.should("dataio"): |
371 self._debug.write(f"Loading data into data file {self._filename!r}") |
371 self._debug.write(f"Loading data into data file {self._filename!r}") |
372 if data[:1] != b"z": |
372 if data[:1] != b"z": |
373 raise CoverageException( |
373 raise DataError( |
374 f"Unrecognized serialization: {data[:40]!r} (head of {len(data)} bytes)" |
374 f"Unrecognized serialization: {data[:40]!r} (head of {len(data)} bytes)" |
375 ) |
375 ) |
376 script = zlib.decompress(data[1:]).decode("utf-8") |
376 script = zlib.decompress(data[1:]).decode("utf-8") |
377 self._dbs[threading.get_ident()] = db = SqliteDb(self._filename, self._debug) |
377 self._dbs[threading.get_ident()] = db = SqliteDb(self._filename, self._debug) |
378 with db: |
378 with db: |
379 db.executescript(script) |
379 db.executescript(script) |
380 self._read_db() |
380 self._read_db() |
511 def _choose_lines_or_arcs(self, lines=False, arcs=False): |
511 def _choose_lines_or_arcs(self, lines=False, arcs=False): |
512 """Force the data file to choose between lines and arcs.""" |
512 """Force the data file to choose between lines and arcs.""" |
513 assert lines or arcs |
513 assert lines or arcs |
514 assert not (lines and arcs) |
514 assert not (lines and arcs) |
515 if lines and self._has_arcs: |
515 if lines and self._has_arcs: |
516 raise CoverageException("Can't add line measurements to existing branch data") |
516 raise DataError("Can't add line measurements to existing branch data") |
517 if arcs and self._has_lines: |
517 if arcs and self._has_lines: |
518 raise CoverageException("Can't add branch measurements to existing line data") |
518 raise DataError("Can't add branch measurements to existing line data") |
519 if not self._has_arcs and not self._has_lines: |
519 if not self._has_arcs and not self._has_lines: |
520 self._has_lines = lines |
520 self._has_lines = lines |
521 self._has_arcs = arcs |
521 self._has_arcs = arcs |
522 with self._connect() as con: |
522 with self._connect() as con: |
523 con.execute( |
523 con.execute( |
539 self._start_using() |
539 self._start_using() |
540 with self._connect() as con: |
540 with self._connect() as con: |
541 for filename, plugin_name in file_tracers.items(): |
541 for filename, plugin_name in file_tracers.items(): |
542 file_id = self._file_id(filename) |
542 file_id = self._file_id(filename) |
543 if file_id is None: |
543 if file_id is None: |
544 raise CoverageException( |
544 raise DataError( |
545 f"Can't add file tracer data for unmeasured file '{filename}'" |
545 f"Can't add file tracer data for unmeasured file '{filename}'" |
546 ) |
546 ) |
547 |
547 |
548 existing_plugin = self.file_tracer(filename) |
548 existing_plugin = self.file_tracer(filename) |
549 if existing_plugin: |
549 if existing_plugin: |
550 if existing_plugin != plugin_name: |
550 if existing_plugin != plugin_name: |
551 raise CoverageException( |
551 raise DataError( |
552 "Conflicting file tracer name for '{}': {!r} vs {!r}".format( |
552 "Conflicting file tracer name for '{}': {!r} vs {!r}".format( |
553 filename, existing_plugin, plugin_name, |
553 filename, existing_plugin, plugin_name, |
554 ) |
554 ) |
555 ) |
555 ) |
556 elif plugin_name: |
556 elif plugin_name: |
576 if self._debug.should("dataop"): |
576 if self._debug.should("dataop"): |
577 self._debug.write(f"Touching {filenames!r}") |
577 self._debug.write(f"Touching {filenames!r}") |
578 self._start_using() |
578 self._start_using() |
579 with self._connect(): # Use this to get one transaction. |
579 with self._connect(): # Use this to get one transaction. |
580 if not self._has_arcs and not self._has_lines: |
580 if not self._has_arcs and not self._has_lines: |
581 raise CoverageException("Can't touch files in an empty CoverageData") |
581 raise DataError("Can't touch files in an empty CoverageData") |
582 |
582 |
583 for filename in filenames: |
583 for filename in filenames: |
584 self._file_id(filename, add=True) |
584 self._file_id(filename, add=True) |
585 if plugin_name: |
585 if plugin_name: |
586 # Set the tracer for this file |
586 # Set the tracer for this file |
595 if self._debug.should("dataop"): |
595 if self._debug.should("dataop"): |
596 self._debug.write("Updating with data from {!r}".format( |
596 self._debug.write("Updating with data from {!r}".format( |
597 getattr(other_data, "_filename", "???"), |
597 getattr(other_data, "_filename", "???"), |
598 )) |
598 )) |
599 if self._has_lines and other_data._has_arcs: |
599 if self._has_lines and other_data._has_arcs: |
600 raise CoverageException("Can't combine arc data with line data") |
600 raise DataError("Can't combine arc data with line data") |
601 if self._has_arcs and other_data._has_lines: |
601 if self._has_arcs and other_data._has_lines: |
602 raise CoverageException("Can't combine line data with arc data") |
602 raise DataError("Can't combine line data with arc data") |
603 |
603 |
604 aliases = aliases or PathAliases() |
604 aliases = aliases or PathAliases() |
605 |
605 |
606 # Force the database we're writing to to exist before we start nesting |
606 # Force the database we're writing to to exist before we start nesting |
607 # contexts. |
607 # contexts. |
690 for path in files.values(): |
690 for path in files.values(): |
691 this_tracer = this_tracers.get(path) |
691 this_tracer = this_tracers.get(path) |
692 other_tracer = tracers.get(path, "") |
692 other_tracer = tracers.get(path, "") |
693 # If there is no tracer, there is always the None tracer. |
693 # If there is no tracer, there is always the None tracer. |
694 if this_tracer is not None and this_tracer != other_tracer: |
694 if this_tracer is not None and this_tracer != other_tracer: |
695 raise CoverageException( |
695 raise DataError( |
696 "Conflicting file tracer name for '{}': {!r} vs {!r}".format( |
696 "Conflicting file tracer name for '{}': {!r} vs {!r}".format( |
697 path, this_tracer, other_tracer |
697 path, this_tracer, other_tracer |
698 ) |
698 ) |
699 ) |
699 ) |
700 tracer_map[path] = other_tracer |
700 tracer_map[path] = other_tracer |
1002 ("sqlite3_temp_store", temp_store), |
1002 ("sqlite3_temp_store", temp_store), |
1003 ("sqlite3_compile_options", copts), |
1003 ("sqlite3_compile_options", copts), |
1004 ] |
1004 ] |
1005 |
1005 |
1006 |
1006 |
|
1007 def filename_suffix(suffix): |
|
1008 """Compute a filename suffix for a data file. |
|
1009 |
|
1010 If `suffix` is a string or None, simply return it. If `suffix` is True, |
|
1011 then build a suffix incorporating the hostname, process id, and a random |
|
1012 number. |
|
1013 |
|
1014 Returns a string or None. |
|
1015 |
|
1016 """ |
|
1017 if suffix is True: |
|
1018 # If data_suffix was a simple true value, then make a suffix with |
|
1019 # plenty of distinguishing information. We do this here in |
|
1020 # `save()` at the last minute so that the pid will be correct even |
|
1021 # if the process forks. |
|
1022 dice = random.Random(os.urandom(8)).randint(0, 999999) |
|
1023 suffix = "%s.%s.%06d" % (socket.gethostname(), os.getpid(), dice) |
|
1024 return suffix |
|
1025 |
|
1026 |
1007 class SqliteDb(SimpleReprMixin): |
1027 class SqliteDb(SimpleReprMixin): |
1008 """A simple abstraction over a SQLite database. |
1028 """A simple abstraction over a SQLite database. |
1009 |
1029 |
1010 Use as a context manager, then you can use it like a |
1030 Use as a context manager, then you can use it like a |
1011 :class:`python:sqlite3.Connection` object:: |
1031 :class:`python:sqlite3.Connection` object:: |
1033 if self.debug: |
1053 if self.debug: |
1034 self.debug.write(f"Connecting to {self.filename!r}") |
1054 self.debug.write(f"Connecting to {self.filename!r}") |
1035 try: |
1055 try: |
1036 self.con = sqlite3.connect(self.filename, check_same_thread=False) |
1056 self.con = sqlite3.connect(self.filename, check_same_thread=False) |
1037 except sqlite3.Error as exc: |
1057 except sqlite3.Error as exc: |
1038 raise CoverageException(f"Couldn't use data file {self.filename!r}: {exc}") from exc |
1058 raise DataError(f"Couldn't use data file {self.filename!r}: {exc}") from exc |
1039 |
1059 |
1040 self.con.create_function("REGEXP", 2, _regexp) |
1060 self.con.create_function("REGEXP", 2, _regexp) |
1041 |
1061 |
1042 # This pragma makes writing faster. It disables rollbacks, but we never need them. |
1062 # This pragma makes writing faster. It disables rollbacks, but we never need them. |
1043 # PyPy needs the .close() calls here, or sqlite gets twisted up: |
1063 # PyPy needs the .close() calls here, or sqlite gets twisted up: |
1066 self.con.__exit__(exc_type, exc_value, traceback) |
1086 self.con.__exit__(exc_type, exc_value, traceback) |
1067 self.close() |
1087 self.close() |
1068 except Exception as exc: |
1088 except Exception as exc: |
1069 if self.debug: |
1089 if self.debug: |
1070 self.debug.write(f"EXCEPTION from __exit__: {exc}") |
1090 self.debug.write(f"EXCEPTION from __exit__: {exc}") |
1071 raise CoverageException(f"Couldn't end data file {self.filename!r}: {exc}") from exc |
1091 raise DataError(f"Couldn't end data file {self.filename!r}: {exc}") from exc |
1072 |
1092 |
1073 def execute(self, sql, parameters=()): |
1093 def execute(self, sql, parameters=()): |
1074 """Same as :meth:`python:sqlite3.Connection.execute`.""" |
1094 """Same as :meth:`python:sqlite3.Connection.execute`.""" |
1075 if self.debug: |
1095 if self.debug: |
1076 tail = f" with {parameters!r}" if parameters else "" |
1096 tail = f" with {parameters!r}" if parameters else "" |
1097 ) |
1117 ) |
1098 except Exception: # pragma: cant happen |
1118 except Exception: # pragma: cant happen |
1099 pass |
1119 pass |
1100 if self.debug: |
1120 if self.debug: |
1101 self.debug.write(f"EXCEPTION from execute: {msg}") |
1121 self.debug.write(f"EXCEPTION from execute: {msg}") |
1102 raise CoverageException(f"Couldn't use data file {self.filename!r}: {msg}") from exc |
1122 raise DataError(f"Couldn't use data file {self.filename!r}: {msg}") from exc |
1103 |
1123 |
1104 def execute_one(self, sql, parameters=()): |
1124 def execute_one(self, sql, parameters=()): |
1105 """Execute a statement and return the one row that results. |
1125 """Execute a statement and return the one row that results. |
1106 |
1126 |
1107 This is like execute(sql, parameters).fetchone(), except it is |
1127 This is like execute(sql, parameters).fetchone(), except it is |