eric7/DebugClients/Python/coverage/sqldata.py

branch
eric7
changeset 8775
0802ae193343
parent 8527
2bd1325d727e
child 8929
fcca2fa618bf
--- a/eric7/DebugClients/Python/coverage/sqldata.py	Fri Nov 19 19:28:47 2021 +0100
+++ b/eric7/DebugClients/Python/coverage/sqldata.py	Sat Nov 20 16:47:38 2021 +0100
@@ -8,19 +8,20 @@
 
 import collections
 import datetime
+import functools
 import glob
 import itertools
 import os
 import re
 import sqlite3
 import sys
+import threading
 import zlib
 
-from coverage import env
-from coverage.backward import get_thread_id, iitems, to_bytes, to_string
 from coverage.debug import NoDebugging, SimpleReprMixin, clipped_repr
+from coverage.exceptions import CoverageException
 from coverage.files import PathAliases
-from coverage.misc import CoverageException, contract, file_be_gone, filename_suffix, isolate_module
+from coverage.misc import contract, file_be_gone, filename_suffix, isolate_module
 from coverage.numbits import numbits_to_nums, numbits_union, nums_to_numbits
 from coverage.version import __version__
 
@@ -179,6 +180,10 @@
     Data in a :class:`CoverageData` can be serialized and deserialized with
     :meth:`dumps` and :meth:`loads`.
 
+    The methods used during the coverage.py collection phase
+    (:meth:`add_lines`, :meth:`add_arcs`, :meth:`set_context`, and
+    :meth:`add_file_tracers`) are thread-safe.  Other methods may not be.
+
     """
 
     def __init__(self, basename=None, suffix=None, no_disk=False, warn=None, debug=None):
@@ -207,6 +212,8 @@
         # Maps thread ids to SqliteDb objects.
         self._dbs = {}
         self._pid = os.getpid()
+        # Synchronize the operations used during collection.
+        self._lock = threading.Lock()
 
         # Are we in sync with the data file?
         self._have_used = False
@@ -218,6 +225,15 @@
         self._current_context_id = None
         self._query_context_ids = None
 
+    def _locked(method):            # pylint: disable=no-self-argument
+        """A decorator for methods that should hold self._lock."""
+        @functools.wraps(method)
+        def _wrapped(self, *args, **kwargs):
+            with self._lock:
+                # pylint: disable=not-callable
+                return method(self, *args, **kwargs)
+        return _wrapped
+
     def _choose_filename(self):
         """Set self._filename based on inited attributes."""
         if self._no_disk:
@@ -243,31 +259,31 @@
 
         Initializes the schema and certain metadata.
         """
-        if self._debug.should('dataio'):
-            self._debug.write("Creating data file {!r}".format(self._filename))
-        self._dbs[get_thread_id()] = db = SqliteDb(self._filename, self._debug)
+        if self._debug.should("dataio"):
+            self._debug.write(f"Creating data file {self._filename!r}")
+        self._dbs[threading.get_ident()] = db = SqliteDb(self._filename, self._debug)
         with db:
             db.executescript(SCHEMA)
             db.execute("insert into coverage_schema (version) values (?)", (SCHEMA_VERSION,))
             db.executemany(
                 "insert into meta (key, value) values (?, ?)",
                 [
-                    ('sys_argv', str(getattr(sys, 'argv', None))),
-                    ('version', __version__),
-                    ('when', datetime.datetime.now().strftime('%Y-%m-%d %H:%M:%S')),
+                    ("sys_argv", str(getattr(sys, "argv", None))),
+                    ("version", __version__),
+                    ("when", datetime.datetime.now().strftime("%Y-%m-%d %H:%M:%S")),
                 ]
             )
 
     def _open_db(self):
         """Open an existing db file, and read its metadata."""
-        if self._debug.should('dataio'):
-            self._debug.write("Opening data file {!r}".format(self._filename))
-        self._dbs[get_thread_id()] = SqliteDb(self._filename, self._debug)
+        if self._debug.should("dataio"):
+            self._debug.write(f"Opening data file {self._filename!r}")
+        self._dbs[threading.get_ident()] = SqliteDb(self._filename, self._debug)
         self._read_db()
 
     def _read_db(self):
         """Read the metadata from a database so that we are ready to use it."""
-        with self._dbs[get_thread_id()] as db:
+        with self._dbs[threading.get_ident()] as db:
             try:
                 schema_version, = db.execute_one("select version from coverage_schema")
             except Exception as exc:
@@ -275,7 +291,7 @@
                     "Data file {!r} doesn't seem to be a coverage data file: {}".format(
                         self._filename, exc
                     )
-                )
+                ) from exc
             else:
                 if schema_version != SCHEMA_VERSION:
                     raise CoverageException(
@@ -293,15 +309,15 @@
 
     def _connect(self):
         """Get the SqliteDb object to use."""
-        if get_thread_id() not in self._dbs:
+        if threading.get_ident() not in self._dbs:
             if os.path.exists(self._filename):
                 self._open_db()
             else:
                 self._create_db()
-        return self._dbs[get_thread_id()]
+        return self._dbs[threading.get_ident()]
 
     def __nonzero__(self):
-        if (get_thread_id() not in self._dbs and not os.path.exists(self._filename)):
+        if (threading.get_ident() not in self._dbs and not os.path.exists(self._filename)):
             return False
         try:
             with self._connect() as con:
@@ -312,7 +328,7 @@
 
     __bool__ = __nonzero__
 
-    @contract(returns='bytes')
+    @contract(returns="bytes")
     def dumps(self):
         """Serialize the current data to a byte string.
 
@@ -320,38 +336,45 @@
         suitable for use with :meth:`loads` in the same version of
         coverage.py.
 
+        Note that this serialization is not what gets stored in coverage data
+        files.  This method is meant to produce bytes that can be transmitted
+        elsewhere and then deserialized with :meth:`loads`.
+
         Returns:
             A byte string of serialized data.
 
         .. versionadded:: 5.0
 
         """
-        if self._debug.should('dataio'):
-            self._debug.write("Dumping data from data file {!r}".format(self._filename))
+        if self._debug.should("dataio"):
+            self._debug.write(f"Dumping data from data file {self._filename!r}")
         with self._connect() as con:
-            return b'z' + zlib.compress(to_bytes(con.dump()))
+            return b"z" + zlib.compress(con.dump().encode("utf-8"))
 
-    @contract(data='bytes')
+    @contract(data="bytes")
     def loads(self, data):
-        """Deserialize data from :meth:`dumps`
+        """Deserialize data from :meth:`dumps`.
 
         Use with a newly-created empty :class:`CoverageData` object.  It's
         undefined what happens if the object already has data in it.
 
+        Note that this is not for reading data from a coverage data file.  It
+        is only for use on data you produced with :meth:`dumps`.
+
         Arguments:
             data: A byte string of serialized data produced by :meth:`dumps`.
 
         .. versionadded:: 5.0
 
         """
-        if self._debug.should('dataio'):
-            self._debug.write("Loading data into data file {!r}".format(self._filename))
-        if data[:1] != b'z':
+        if self._debug.should("dataio"):
+            self._debug.write(f"Loading data into data file {self._filename!r}")
+        if data[:1] != b"z":
             raise CoverageException(
-                "Unrecognized serialization: {!r} (head of {} bytes)".format(data[:40], len(data))
+                f"Unrecognized serialization: {data[:40]!r} (head of {len(data)} bytes)"
                 )
-        script = to_string(zlib.decompress(data[1:]))
-        self._dbs[get_thread_id()] = db = SqliteDb(self._filename, self._debug)
+        script = zlib.decompress(data[1:]).decode("utf-8")
+        self._dbs[threading.get_ident()] = db = SqliteDb(self._filename, self._debug)
         with db:
             db.executescript(script)
         self._read_db()
@@ -381,6 +404,7 @@
             else:
                 return None
 
+    @_locked
     def set_context(self, context):
         """Set the current context for future :meth:`add_lines` etc.
 
@@ -390,8 +414,8 @@
         .. versionadded:: 5.0
 
         """
-        if self._debug.should('dataop'):
-            self._debug.write("Setting context: %r" % (context,))
+        if self._debug.should("dataop"):
+            self._debug.write(f"Setting context: {context!r}")
         self._current_context = context
         self._current_context_id = None
 
@@ -422,15 +446,16 @@
         """
         return self._filename
 
+    @_locked
     def add_lines(self, line_data):
         """Add measured line data.
 
-        `line_data` is a dictionary mapping file names to dictionaries::
+        `line_data` is a dictionary mapping file names to iterables of ints::
 
-            { filename: { lineno: None, ... }, ...}
+            { filename: { line1, line2, ... }, ...}
 
         """
-        if self._debug.should('dataop'):
+        if self._debug.should("dataop"):
             self._debug.write("Adding lines: %d files, %d lines total" % (
                 len(line_data), sum(len(lines) for lines in line_data.values())
             ))
@@ -440,7 +465,7 @@
             return
         with self._connect() as con:
             self._set_context_id()
-            for filename, linenos in iitems(line_data):
+            for filename, linenos in line_data.items():
                 linemap = nums_to_numbits(linenos)
                 file_id = self._file_id(filename, add=True)
                 query = "select numbits from line_bits where file_id = ? and context_id = ?"
@@ -449,20 +474,22 @@
                     linemap = numbits_union(linemap, existing[0][0])
 
                 con.execute(
-                    "insert or replace into line_bits "
+                    "insert or replace into line_bits " +
                     " (file_id, context_id, numbits) values (?, ?, ?)",
                     (file_id, self._current_context_id, linemap),
                 )
 
+    @_locked
     def add_arcs(self, arc_data):
         """Add measured arc data.
 
-        `arc_data` is a dictionary mapping file names to dictionaries::
+        `arc_data` is a dictionary mapping file names to iterables of pairs of
+        ints::
 
-            { filename: { (l1,l2): None, ... }, ...}
+            { filename: { (l1,l2), (l1,l2), ... }, ...}
 
         """
-        if self._debug.should('dataop'):
+        if self._debug.should("dataop"):
             self._debug.write("Adding arcs: %d files, %d arcs total" % (
                 len(arc_data), sum(len(arcs) for arcs in arc_data.values())
             ))
@@ -472,11 +499,11 @@
             return
         with self._connect() as con:
             self._set_context_id()
-            for filename, arcs in iitems(arc_data):
+            for filename, arcs in arc_data.items():
                 file_id = self._file_id(filename, add=True)
                 data = [(file_id, self._current_context_id, fromno, tono) for fromno, tono in arcs]
                 con.executemany(
-                    "insert or ignore into arc "
+                    "insert or ignore into arc " +
                     "(file_id, context_id, fromno, tono) values (?, ?, ?, ?)",
                     data,
                 )
@@ -495,33 +522,34 @@
             with self._connect() as con:
                 con.execute(
                     "insert into meta (key, value) values (?, ?)",
-                    ('has_arcs', str(int(arcs)))
+                    ("has_arcs", str(int(arcs)))
                 )
 
+    @_locked
     def add_file_tracers(self, file_tracers):
         """Add per-file plugin information.
 
         `file_tracers` is { filename: plugin_name, ... }
 
         """
-        if self._debug.should('dataop'):
+        if self._debug.should("dataop"):
             self._debug.write("Adding file tracers: %d files" % (len(file_tracers),))
         if not file_tracers:
             return
         self._start_using()
         with self._connect() as con:
-            for filename, plugin_name in iitems(file_tracers):
+            for filename, plugin_name in file_tracers.items():
                 file_id = self._file_id(filename)
                 if file_id is None:
                     raise CoverageException(
-                        "Can't add file tracer data for unmeasured file '%s'" % (filename,)
+                        f"Can't add file tracer data for unmeasured file '{filename}'"
                     )
 
                 existing_plugin = self.file_tracer(filename)
                 if existing_plugin:
                     if existing_plugin != plugin_name:
                         raise CoverageException(
-                            "Conflicting file tracer name for '%s': %r vs %r" % (
+                            "Conflicting file tracer name for '{}': {!r} vs {!r}".format(
                                 filename, existing_plugin, plugin_name,
                             )
                         )
@@ -545,8 +573,8 @@
         `plugin_name` is the name of the plugin responsible for these files. It is used
         to associate the right filereporter, etc.
         """
-        if self._debug.should('dataop'):
-            self._debug.write("Touching %r" % (filenames,))
+        if self._debug.should("dataop"):
+            self._debug.write(f"Touching {filenames!r}")
         self._start_using()
         with self._connect(): # Use this to get one transaction.
             if not self._has_arcs and not self._has_lines:
@@ -564,9 +592,9 @@
         If `aliases` is provided, it's a `PathAliases` object that is used to
         re-map paths to match the local machine's.
         """
-        if self._debug.should('dataop'):
-            self._debug.write("Updating with data from %r" % (
-                getattr(other_data, '_filename', '???'),
+        if self._debug.should("dataop"):
+            self._debug.write("Updating with data from {!r}".format(
+                getattr(other_data, "_filename", "???"),
             ))
         if self._has_lines and other_data._has_arcs:
             raise CoverageException("Can't combine arc data with line data")
@@ -583,79 +611,76 @@
         other_data.read()
         with other_data._connect() as conn:
             # Get files data.
-            cur = conn.execute('select path from file')
+            cur = conn.execute("select path from file")
             files = {path: aliases.map(path) for (path,) in cur}
             cur.close()
 
             # Get contexts data.
-            cur = conn.execute('select context from context')
+            cur = conn.execute("select context from context")
             contexts = [context for (context,) in cur]
             cur.close()
 
             # Get arc data.
             cur = conn.execute(
-                'select file.path, context.context, arc.fromno, arc.tono '
-                'from arc '
-                'inner join file on file.id = arc.file_id '
-                'inner join context on context.id = arc.context_id'
+                "select file.path, context.context, arc.fromno, arc.tono " +
+                "from arc " +
+                "inner join file on file.id = arc.file_id " +
+                "inner join context on context.id = arc.context_id"
             )
             arcs = [(files[path], context, fromno, tono) for (path, context, fromno, tono) in cur]
             cur.close()
 
             # Get line data.
             cur = conn.execute(
-                'select file.path, context.context, line_bits.numbits '
-                'from line_bits '
-                'inner join file on file.id = line_bits.file_id '
-                'inner join context on context.id = line_bits.context_id'
+                "select file.path, context.context, line_bits.numbits " +
+                "from line_bits " +
+                "inner join file on file.id = line_bits.file_id " +
+                "inner join context on context.id = line_bits.context_id"
                 )
-            lines = {
-                (files[path], context): numbits
-                for (path, context, numbits) in cur
-                }
+            lines = {(files[path], context): numbits for (path, context, numbits) in cur}
             cur.close()
 
             # Get tracer data.
             cur = conn.execute(
-                'select file.path, tracer '
-                'from tracer '
-                'inner join file on file.id = tracer.file_id'
+                "select file.path, tracer " +
+                "from tracer " +
+                "inner join file on file.id = tracer.file_id"
             )
             tracers = {files[path]: tracer for (path, tracer) in cur}
             cur.close()
 
         with self._connect() as conn:
-            conn.con.isolation_level = 'IMMEDIATE'
+            conn.con.isolation_level = "IMMEDIATE"
 
             # Get all tracers in the DB. Files not in the tracers are assumed
             # to have an empty string tracer. Since Sqlite does not support
             # full outer joins, we have to make two queries to fill the
             # dictionary.
-            this_tracers = {path: '' for path, in conn.execute('select path from file')}
+            this_tracers = {path: "" for path, in conn.execute("select path from file")}
             this_tracers.update({
                 aliases.map(path): tracer
                 for path, tracer in conn.execute(
-                    'select file.path, tracer from tracer '
-                    'inner join file on file.id = tracer.file_id'
+                    "select file.path, tracer from tracer " +
+                    "inner join file on file.id = tracer.file_id"
                 )
             })
 
             # Create all file and context rows in the DB.
             conn.executemany(
-                'insert or ignore into file (path) values (?)',
+                "insert or ignore into file (path) values (?)",
                 ((file,) for file in files.values())
             )
             file_ids = {
                 path: id
-                for id, path in conn.execute('select id, path from file')
+                for id, path in conn.execute("select id, path from file")
             }
             conn.executemany(
-                'insert or ignore into context (context) values (?)',
+                "insert or ignore into context (context) values (?)",
                 ((context,) for context in contexts)
             )
             context_ids = {
                 context: id
-                for id, context in conn.execute('select id, context from context')
+                for id, context in conn.execute("select id, context from context")
             }
 
             # Prepare tracers and fail, if a conflict is found.
@@ -664,11 +689,11 @@
             tracer_map = {}
             for path in files.values():
                 this_tracer = this_tracers.get(path)
-                other_tracer = tracers.get(path, '')
+                other_tracer = tracers.get(path, "")
                 # If there is no tracer, there is always the None tracer.
                 if this_tracer is not None and this_tracer != other_tracer:
                     raise CoverageException(
-                        "Conflicting file tracer name for '%s': %r vs %r" % (
+                        "Conflicting file tracer name for '{}': {!r} vs {!r}".format(
                             path, this_tracer, other_tracer
                         )
                     )
@@ -684,10 +709,10 @@
 
             # Get line data.
             cur = conn.execute(
-                'select file.path, context.context, line_bits.numbits '
-                'from line_bits '
-                'inner join file on file.id = line_bits.file_id '
-                'inner join context on context.id = line_bits.context_id'
+                "select file.path, context.context, line_bits.numbits " +
+                "from line_bits " +
+                "inner join file on file.id = line_bits.file_id " +
+                "inner join context on context.id = line_bits.context_id"
                 )
             for path, context, numbits in cur:
                 key = (aliases.map(path), context)
@@ -701,8 +726,8 @@
 
                 # Write the combined data.
                 conn.executemany(
-                    'insert or ignore into arc '
-                    '(file_id, context_id, fromno, tono) values (?, ?, ?, ?)',
+                    "insert or ignore into arc " +
+                    "(file_id, context_id, fromno, tono) values (?, ?, ?, ?)",
                     arc_rows
                 )
 
@@ -710,7 +735,7 @@
                 self._choose_lines_or_arcs(lines=True)
                 conn.execute("delete from line_bits")
                 conn.executemany(
-                    "insert into line_bits "
+                    "insert into line_bits " +
                     "(file_id, context_id, numbits) values (?, ?, ?)",
                     [
                         (file_ids[file], context_ids[context], numbits)
@@ -718,7 +743,7 @@
                     ]
                 )
             conn.executemany(
-                'insert or ignore into tracer (file_id, tracer) values (?, ?)',
+                "insert or ignore into tracer (file_id, tracer) values (?, ?)",
                 ((file_ids[filename], tracer) for filename, tracer in tracer_map.items())
             )
 
@@ -736,16 +761,16 @@
         self._reset()
         if self._no_disk:
             return
-        if self._debug.should('dataio'):
-            self._debug.write("Erasing data file {!r}".format(self._filename))
+        if self._debug.should("dataio"):
+            self._debug.write(f"Erasing data file {self._filename!r}")
         file_be_gone(self._filename)
         if parallel:
             data_dir, local = os.path.split(self._filename)
-            localdot = local + '.*'
+            localdot = local + ".*"
             pattern = os.path.join(os.path.abspath(data_dir), localdot)
             for filename in glob.glob(pattern):
-                if self._debug.should('dataio'):
-                    self._debug.write("Erasing parallel data file {!r}".format(filename))
+                if self._debug.should("dataio"):
+                    self._debug.write(f"Erasing parallel data file {filename!r}")
                 file_be_gone(filename)
 
     def read(self):
@@ -836,14 +861,14 @@
         self._start_using()
         if contexts:
             with self._connect() as con:
-                context_clause = ' or '.join(['context regexp ?'] * len(contexts))
+                context_clause = " or ".join(["context regexp ?"] * len(contexts))
                 cur = con.execute("select id from context where " + context_clause, contexts)
                 self._query_context_ids = [row[0] for row in cur.fetchall()]
         else:
             self._query_context_ids = None
 
     def lines(self, filename):
-        """Get the list of lines executed for a file.
+        """Get the list of lines executed for a source file.
 
         If the file was not measured, returns None.  A file might be measured,
         and have no lines executed, in which case an empty list is returned.
@@ -867,7 +892,7 @@
                 query = "select numbits from line_bits where file_id = ?"
                 data = [file_id]
                 if self._query_context_ids is not None:
-                    ids_array = ', '.join('?' * len(self._query_context_ids))
+                    ids_array = ", ".join("?" * len(self._query_context_ids))
                     query += " and context_id in (" + ids_array + ")"
                     data += self._query_context_ids
                 bitmaps = list(con.execute(query, data))
@@ -902,7 +927,7 @@
                 query = "select distinct fromno, tono from arc where file_id = ?"
                 data = [file_id]
                 if self._query_context_ids is not None:
-                    ids_array = ', '.join('?' * len(self._query_context_ids))
+                    ids_array = ", ".join("?" * len(self._query_context_ids))
                     query += " and context_id in (" + ids_array + ")"
                     data += self._query_context_ids
                 arcs = con.execute(query, data)
@@ -917,43 +942,45 @@
         .. versionadded:: 5.0
 
         """
-        lineno_contexts_map = collections.defaultdict(list)
         self._start_using()
         with self._connect() as con:
             file_id = self._file_id(filename)
             if file_id is None:
-                return lineno_contexts_map
+                return {}
+
+            lineno_contexts_map = collections.defaultdict(set)
             if self.has_arcs():
                 query = (
-                    "select arc.fromno, arc.tono, context.context "
-                    "from arc, context "
+                    "select arc.fromno, arc.tono, context.context " +
+                    "from arc, context " +
                     "where arc.file_id = ? and arc.context_id = context.id"
                 )
                 data = [file_id]
                 if self._query_context_ids is not None:
-                    ids_array = ', '.join('?' * len(self._query_context_ids))
+                    ids_array = ", ".join("?" * len(self._query_context_ids))
                     query += " and arc.context_id in (" + ids_array + ")"
                     data += self._query_context_ids
                 for fromno, tono, context in con.execute(query, data):
-                    if context not in lineno_contexts_map[fromno]:
-                        lineno_contexts_map[fromno].append(context)
-                    if context not in lineno_contexts_map[tono]:
-                        lineno_contexts_map[tono].append(context)
+                    if fromno > 0:
+                        lineno_contexts_map[fromno].add(context)
+                    if tono > 0:
+                        lineno_contexts_map[tono].add(context)
             else:
                 query = (
-                    "select l.numbits, c.context from line_bits l, context c "
-                    "where l.context_id = c.id "
+                    "select l.numbits, c.context from line_bits l, context c " +
+                    "where l.context_id = c.id " +
                     "and file_id = ?"
                     )
                 data = [file_id]
                 if self._query_context_ids is not None:
-                    ids_array = ', '.join('?' * len(self._query_context_ids))
+                    ids_array = ", ".join("?" * len(self._query_context_ids))
                     query += " and l.context_id in (" + ids_array + ")"
                     data += self._query_context_ids
                 for numbits, context in con.execute(query, data):
                     for lineno in numbits_to_nums(numbits):
-                        lineno_contexts_map[lineno].append(context)
-        return lineno_contexts_map
+                        lineno_contexts_map[lineno].add(context)
+
+        return {lineno: list(contexts) for lineno, contexts in lineno_contexts_map.items()}
 
     @classmethod
     def sys_info(cls):
@@ -964,13 +991,16 @@
         """
         with SqliteDb(":memory:", debug=NoDebugging()) as db:
             temp_store = [row[0] for row in db.execute("pragma temp_store")]
-            compile_options = [row[0] for row in db.execute("pragma compile_options")]
+            copts = [row[0] for row in db.execute("pragma compile_options")]
+            # Yes, this is overkill. I don't like the long list of options
+            # at the end of "debug sys", but I don't want to omit information.
+            copts = ["; ".join(copts[i:i + 3]) for i in range(0, len(copts), 3)]
 
         return [
-            ('sqlite3_version', sqlite3.version),
-            ('sqlite3_sqlite_version', sqlite3.sqlite_version),
-            ('sqlite3_temp_store', temp_store),
-            ('sqlite3_compile_options', compile_options),
+            ("sqlite3_version", sqlite3.version),
+            ("sqlite3_sqlite_version", sqlite3.sqlite_version),
+            ("sqlite3_temp_store", temp_store),
+            ("sqlite3_compile_options", copts),
         ]
 
 
@@ -985,7 +1015,7 @@
 
     """
     def __init__(self, filename, debug):
-        self.debug = debug if debug.should('sql') else None
+        self.debug = debug if debug.should("sql") else None
         self.filename = filename
         self.nest = 0
         self.con = None
@@ -995,29 +1025,19 @@
         if self.con is not None:
             return
 
-        # SQLite on Windows on py2 won't open a file if the filename argument
-        # has non-ascii characters in it.  Opening a relative file name avoids
-        # a problem if the current directory has non-ascii.
-        filename = self.filename
-        if env.WINDOWS and env.PY2:
-            try:
-                filename = os.path.relpath(self.filename)
-            except ValueError:
-                # ValueError can be raised under Windows when os.getcwd() returns a
-                # folder from a different drive than the drive of self.filename in
-                # which case we keep the original value of self.filename unchanged,
-                # hoping that we won't face the non-ascii directory problem.
-                pass
-
         # It can happen that Python switches threads while the tracer writes
         # data. The second thread will also try to write to the data,
         # effectively causing a nested context. However, given the idempotent
         # nature of the tracer operations, sharing a connection among threads
         # is not a problem.
         if self.debug:
-            self.debug.write("Connecting to {!r}".format(self.filename))
-        self.con = sqlite3.connect(filename, check_same_thread=False)
-        self.con.create_function('REGEXP', 2, _regexp)
+            self.debug.write(f"Connecting to {self.filename!r}")
+        try:
+            self.con = sqlite3.connect(self.filename, check_same_thread=False)
+        except sqlite3.Error as exc:
+            raise CoverageException(f"Couldn't use data file {self.filename!r}: {exc}") from exc
+
+        self.con.create_function("REGEXP", 2, _regexp)
 
         # This pragma makes writing faster. It disables rollbacks, but we never need them.
         # PyPy needs the .close() calls here, or sqlite gets twisted up:
@@ -1047,14 +1067,14 @@
                 self.close()
             except Exception as exc:
                 if self.debug:
-                    self.debug.write("EXCEPTION from __exit__: {}".format(exc))
-                raise
+                    self.debug.write(f"EXCEPTION from __exit__: {exc}")
+                raise CoverageException(f"Couldn't end data file {self.filename!r}: {exc}") from exc
 
     def execute(self, sql, parameters=()):
         """Same as :meth:`python:sqlite3.Connection.execute`."""
         if self.debug:
-            tail = " with {!r}".format(parameters) if parameters else ""
-            self.debug.write("Executing {!r}{}".format(sql, tail))
+            tail = f" with {parameters!r}" if parameters else ""
+            self.debug.write(f"Executing {sql!r}{tail}")
         try:
             try:
                 return self.con.execute(sql, parameters)
@@ -1072,14 +1092,14 @@
                     cov4_sig = b"!coverage.py: This is a private format"
                     if bad_file.read(len(cov4_sig)) == cov4_sig:
                         msg = (
-                            "Looks like a coverage 4.x data file. "
+                            "Looks like a coverage 4.x data file. " +
                             "Are you mixing versions of coverage?"
                         )
-            except Exception:
+            except Exception:   # pragma: cant happen
                 pass
             if self.debug:
-                self.debug.write("EXCEPTION from execute: {}".format(msg))
-            raise CoverageException("Couldn't use data file {!r}: {}".format(self.filename, msg))
+                self.debug.write(f"EXCEPTION from execute: {msg}")
+            raise CoverageException(f"Couldn't use data file {self.filename!r}: {msg}") from exc
 
     def execute_one(self, sql, parameters=()):
         """Execute a statement and return the one row that results.
@@ -1096,14 +1116,20 @@
         elif len(rows) == 1:
             return rows[0]
         else:
-            raise CoverageException("Sql {!r} shouldn't return {} rows".format(sql, len(rows)))
+            raise AssertionError(f"SQL {sql!r} shouldn't return {len(rows)} rows")
 
     def executemany(self, sql, data):
         """Same as :meth:`python:sqlite3.Connection.executemany`."""
         if self.debug:
             data = list(data)
-            self.debug.write("Executing many {!r} with {} rows".format(sql, len(data)))
-        return self.con.executemany(sql, data)
+            self.debug.write(f"Executing many {sql!r} with {len(data)} rows")
+        try:
+            return self.con.executemany(sql, data)
+        except Exception:   # pragma: cant happen
+            # In some cases, an error might happen that isn't really an
+            # error.  Try again immediately.
+            # https://github.com/nedbat/coveragepy/issues/1010
+            return self.con.executemany(sql, data)
 
     def executescript(self, script):
         """Same as :meth:`python:sqlite3.Connection.executescript`."""

eric ide

mercurial