213 self._file_map = {} |
213 self._file_map = {} |
214 # Maps thread ids to SqliteDb objects. |
214 # Maps thread ids to SqliteDb objects. |
215 self._dbs = {} |
215 self._dbs = {} |
216 self._pid = os.getpid() |
216 self._pid = os.getpid() |
217 # Synchronize the operations used during collection. |
217 # Synchronize the operations used during collection. |
218 self._lock = threading.Lock() |
218 self._lock = threading.RLock() |
219 |
219 |
220 # Are we in sync with the data file? |
220 # Are we in sync with the data file? |
221 self._have_used = False |
221 self._have_used = False |
222 |
222 |
223 self._has_lines = False |
223 self._has_lines = False |
229 |
229 |
230 def _locked(method): # pylint: disable=no-self-argument |
230 def _locked(method): # pylint: disable=no-self-argument |
231 """A decorator for methods that should hold self._lock.""" |
231 """A decorator for methods that should hold self._lock.""" |
232 @functools.wraps(method) |
232 @functools.wraps(method) |
233 def _wrapped(self, *args, **kwargs): |
233 def _wrapped(self, *args, **kwargs): |
|
234 if self._debug.should("lock"): |
|
235 self._debug.write(f"Locking {self._lock!r} for {method.__name__}") |
234 with self._lock: |
236 with self._lock: |
|
237 if self._debug.should("lock"): |
|
238 self._debug.write(f"Locked {self._lock!r} for {method.__name__}") |
235 # pylint: disable=not-callable |
239 # pylint: disable=not-callable |
236 return method(self, *args, **kwargs) |
240 return method(self, *args, **kwargs) |
237 return _wrapped |
241 return _wrapped |
238 |
242 |
239 def _choose_filename(self): |
243 def _choose_filename(self): |
254 self._dbs = {} |
258 self._dbs = {} |
255 self._file_map = {} |
259 self._file_map = {} |
256 self._have_used = False |
260 self._have_used = False |
257 self._current_context_id = None |
261 self._current_context_id = None |
258 |
262 |
259 def _create_db(self): |
|
260 """Create a db file that doesn't exist yet. |
|
261 |
|
262 Initializes the schema and certain metadata. |
|
263 """ |
|
264 if self._debug.should("dataio"): |
|
265 self._debug.write(f"Creating data file {self._filename!r}") |
|
266 self._dbs[threading.get_ident()] = db = SqliteDb(self._filename, self._debug) |
|
267 with db: |
|
268 db.executescript(SCHEMA) |
|
269 db.execute("insert into coverage_schema (version) values (?)", (SCHEMA_VERSION,)) |
|
270 db.executemany( |
|
271 "insert into meta (key, value) values (?, ?)", |
|
272 [ |
|
273 ("sys_argv", str(getattr(sys, "argv", None))), |
|
274 ("version", __version__), |
|
275 ("when", datetime.datetime.now().strftime("%Y-%m-%d %H:%M:%S")), |
|
276 ] |
|
277 ) |
|
278 |
|
279 def _open_db(self): |
263 def _open_db(self): |
280 """Open an existing db file, and read its metadata.""" |
264 """Open an existing db file, and read its metadata.""" |
281 if self._debug.should("dataio"): |
265 if self._debug.should("dataio"): |
282 self._debug.write(f"Opening data file {self._filename!r}") |
266 self._debug.write(f"Opening data file {self._filename!r}") |
283 self._dbs[threading.get_ident()] = SqliteDb(self._filename, self._debug) |
267 self._dbs[threading.get_ident()] = SqliteDb(self._filename, self._debug) |
287 """Read the metadata from a database so that we are ready to use it.""" |
271 """Read the metadata from a database so that we are ready to use it.""" |
288 with self._dbs[threading.get_ident()] as db: |
272 with self._dbs[threading.get_ident()] as db: |
289 try: |
273 try: |
290 schema_version, = db.execute_one("select version from coverage_schema") |
274 schema_version, = db.execute_one("select version from coverage_schema") |
291 except Exception as exc: |
275 except Exception as exc: |
292 raise DataError( |
276 if "no such table: coverage_schema" in str(exc): |
293 "Data file {!r} doesn't seem to be a coverage data file: {}".format( |
277 self._init_db(db) |
294 self._filename, exc |
278 else: |
295 ) |
279 raise DataError( |
296 ) from exc |
280 "Data file {!r} doesn't seem to be a coverage data file: {}".format( |
|
281 self._filename, exc |
|
282 ) |
|
283 ) from exc |
297 else: |
284 else: |
298 if schema_version != SCHEMA_VERSION: |
285 if schema_version != SCHEMA_VERSION: |
299 raise DataError( |
286 raise DataError( |
300 "Couldn't use data file {!r}: wrong schema: {} instead of {}".format( |
287 "Couldn't use data file {!r}: wrong schema: {} instead of {}".format( |
301 self._filename, schema_version, SCHEMA_VERSION |
288 self._filename, schema_version, SCHEMA_VERSION |
307 self._has_lines = not self._has_arcs |
294 self._has_lines = not self._has_arcs |
308 |
295 |
309 for path, file_id in db.execute("select path, id from file"): |
296 for path, file_id in db.execute("select path, id from file"): |
310 self._file_map[path] = file_id |
297 self._file_map[path] = file_id |
311 |
298 |
|
299 def _init_db(self, db): |
|
300 """Write the initial contents of the database.""" |
|
301 if self._debug.should("dataio"): |
|
302 self._debug.write(f"Initing data file {self._filename!r}") |
|
303 db.executescript(SCHEMA) |
|
304 db.execute("insert into coverage_schema (version) values (?)", (SCHEMA_VERSION,)) |
|
305 db.executemany( |
|
306 "insert or ignore into meta (key, value) values (?, ?)", |
|
307 [ |
|
308 ("sys_argv", str(getattr(sys, "argv", None))), |
|
309 ("version", __version__), |
|
310 ("when", datetime.datetime.now().strftime("%Y-%m-%d %H:%M:%S")), |
|
311 ] |
|
312 ) |
|
313 |
312 def _connect(self): |
314 def _connect(self): |
313 """Get the SqliteDb object to use.""" |
315 """Get the SqliteDb object to use.""" |
314 if threading.get_ident() not in self._dbs: |
316 if threading.get_ident() not in self._dbs: |
315 if os.path.exists(self._filename): |
317 self._open_db() |
316 self._open_db() |
|
317 else: |
|
318 self._create_db() |
|
319 return self._dbs[threading.get_ident()] |
318 return self._dbs[threading.get_ident()] |
320 |
319 |
321 def __bool__(self): |
320 def __bool__(self): |
322 if (threading.get_ident() not in self._dbs and not os.path.exists(self._filename)): |
321 if (threading.get_ident() not in self._dbs and not os.path.exists(self._filename)): |
323 return False |
322 return False |
347 |
346 |
348 """ |
347 """ |
349 if self._debug.should("dataio"): |
348 if self._debug.should("dataio"): |
350 self._debug.write(f"Dumping data from data file {self._filename!r}") |
349 self._debug.write(f"Dumping data from data file {self._filename!r}") |
351 with self._connect() as con: |
350 with self._connect() as con: |
352 return b"z" + zlib.compress(con.dump().encode("utf-8")) |
351 script = con.dump() |
|
352 return b"z" + zlib.compress(script.encode("utf-8")) |
353 |
353 |
354 @contract(data="bytes") |
354 @contract(data="bytes") |
355 def loads(self, data): |
355 def loads(self, data): |
356 """Deserialize data from :meth:`dumps`. |
356 """Deserialize data from :meth:`dumps`. |
357 |
357 |
499 return |
499 return |
500 with self._connect() as con: |
500 with self._connect() as con: |
501 self._set_context_id() |
501 self._set_context_id() |
502 for filename, arcs in arc_data.items(): |
502 for filename, arcs in arc_data.items(): |
503 file_id = self._file_id(filename, add=True) |
503 file_id = self._file_id(filename, add=True) |
|
504 from coverage import env |
|
505 if env.PYVERSION == (3, 11, 0, "alpha", 4, 0): |
|
506 arcs = [(a, b) for a, b in arcs if a is not None and b is not None] |
504 data = [(file_id, self._current_context_id, fromno, tono) for fromno, tono in arcs] |
507 data = [(file_id, self._current_context_id, fromno, tono) for fromno, tono in arcs] |
505 con.executemany( |
508 con.executemany( |
506 "insert or ignore into arc " + |
509 "insert or ignore into arc " + |
507 "(file_id, context_id, fromno, tono) values (?, ?, ?, ?)", |
510 "(file_id, context_id, fromno, tono) values (?, ?, ?, ?)", |
508 data, |
511 data, |
511 def _choose_lines_or_arcs(self, lines=False, arcs=False): |
514 def _choose_lines_or_arcs(self, lines=False, arcs=False): |
512 """Force the data file to choose between lines and arcs.""" |
515 """Force the data file to choose between lines and arcs.""" |
513 assert lines or arcs |
516 assert lines or arcs |
514 assert not (lines and arcs) |
517 assert not (lines and arcs) |
515 if lines and self._has_arcs: |
518 if lines and self._has_arcs: |
|
519 if self._debug.should("dataop"): |
|
520 self._debug.write("Error: Can't add line measurements to existing branch data") |
516 raise DataError("Can't add line measurements to existing branch data") |
521 raise DataError("Can't add line measurements to existing branch data") |
517 if arcs and self._has_lines: |
522 if arcs and self._has_lines: |
|
523 if self._debug.should("dataop"): |
|
524 self._debug.write("Error: Can't add branch measurements to existing line data") |
518 raise DataError("Can't add branch measurements to existing line data") |
525 raise DataError("Can't add branch measurements to existing line data") |
519 if not self._has_arcs and not self._has_lines: |
526 if not self._has_arcs and not self._has_lines: |
520 self._has_lines = lines |
527 self._has_lines = lines |
521 self._has_arcs = arcs |
528 self._has_arcs = arcs |
522 with self._connect() as con: |
529 with self._connect() as con: |
523 con.execute( |
530 con.execute( |
524 "insert into meta (key, value) values (?, ?)", |
531 "insert or ignore into meta (key, value) values (?, ?)", |
525 ("has_arcs", str(int(arcs))) |
532 ("has_arcs", str(int(arcs))) |
526 ) |
533 ) |
527 |
534 |
528 @_locked |
535 @_locked |
529 def add_file_tracers(self, file_tracers): |
536 def add_file_tracers(self, file_tracers): |