eric7/DebugClients/Python/coverage/sqldata.py

branch
eric7
changeset 9099
0e511e0e94a3
parent 8991
2fc945191992
diff -r fb9351497cea -r 0e511e0e94a3 eric7/DebugClients/Python/coverage/sqldata.py
--- a/eric7/DebugClients/Python/coverage/sqldata.py	Tue May 24 10:22:46 2022 +0200
+++ b/eric7/DebugClients/Python/coverage/sqldata.py	Tue May 24 11:00:52 2022 +0200
@@ -1,10 +1,7 @@
 # Licensed under the Apache License: http://www.apache.org/licenses/LICENSE-2.0
 # For details: https://github.com/nedbat/coveragepy/blob/master/NOTICE.txt
 
-"""Sqlite coverage data."""
-
-# TODO: factor out dataop debugging to a wrapper class?
-# TODO: make sure all dataop debugging is in place somehow
+"""SQLite coverage data."""
 
 import collections
 import datetime
@@ -17,6 +14,7 @@
 import socket
 import sqlite3
 import sys
+import textwrap
 import threading
 import zlib
 
@@ -30,7 +28,7 @@
 os = isolate_module(os)
 
 # If you change the schema, increment the SCHEMA_VERSION, and update the
-# docs in docs/dbschema.rst also.
+# docs in docs/dbschema.rst by running "make cogdoc".
 
 SCHEMA_VERSION = 7
 
@@ -252,10 +250,10 @@
 
     def _reset(self):
         """Reset our attributes."""
-        if self._dbs:
+        if not self._no_disk:
             for db in self._dbs.values():
                 db.close()
-        self._dbs = {}
+            self._dbs = {}
         self._file_map = {}
         self._have_used = False
         self._current_context_id = None
@@ -293,7 +291,7 @@
                 self._has_arcs = bool(int(row[0]))
                 self._has_lines = not self._has_arcs
 
-            for path, file_id in db.execute("select path, id from file"):
+            for file_id, path in db.execute("select id, path from file"):
                 self._file_map[path] = file_id
 
     def _init_db(self, db):
@@ -389,8 +387,10 @@
         if filename not in self._file_map:
             if add:
                 with self._connect() as con:
-                    cur = con.execute("insert or replace into file (path) values (?)", (filename,))
-                    self._file_map[filename] = cur.lastrowid
+                    self._file_map[filename] = con.execute_for_rowid(
+                        "insert or replace into file (path) values (?)",
+                        (filename,)
+                    )
         return self._file_map.get(filename)
 
     def _context_id(self, context):
@@ -427,8 +427,10 @@
             self._current_context_id = context_id
         else:
             with self._connect() as con:
-                cur = con.execute("insert into context (context) values (?)", (context,))
-                self._current_context_id = cur.lastrowid
+                self._current_context_id = con.execute_for_rowid(
+                    "insert into context (context) values (?)",
+                    (context,)
+                )
 
     def base_filename(self):
         """The base filename for storing data.
@@ -501,9 +503,6 @@
             self._set_context_id()
             for filename, arcs in arc_data.items():
                 file_id = self._file_id(filename, add=True)
-                from coverage import env
-                if env.PYVERSION == (3, 11, 0, "alpha", 4, 0):
-                    arcs = [(a, b) for a, b in arcs if a is not None and b is not None]
                 data = [(file_id, self._current_context_id, fromno, tono) for fromno, tono in arcs]
                 con.executemany(
                     "insert or ignore into arc " +
@@ -616,19 +615,19 @@
 
         # Collector for all arcs, lines and tracers
         other_data.read()
-        with other_data._connect() as conn:
+        with other_data._connect() as con:
             # Get files data.
-            cur = conn.execute("select path from file")
+            cur = con.execute("select path from file")
             files = {path: aliases.map(path) for (path,) in cur}
             cur.close()
 
             # Get contexts data.
-            cur = conn.execute("select context from context")
+            cur = con.execute("select context from context")
             contexts = [context for (context,) in cur]
             cur.close()
 
             # Get arc data.
-            cur = conn.execute(
+            cur = con.execute(
                 "select file.path, context.context, arc.fromno, arc.tono " +
                 "from arc " +
                 "inner join file on file.id = arc.file_id " +
@@ -638,17 +637,17 @@
             cur.close()
 
             # Get line data.
-            cur = conn.execute(
+            cur = con.execute(
                 "select file.path, context.context, line_bits.numbits " +
                 "from line_bits " +
                 "inner join file on file.id = line_bits.file_id " +
                 "inner join context on context.id = line_bits.context_id"
-                )
+            )
             lines = {(files[path], context): numbits for (path, context, numbits) in cur}
             cur.close()
 
             # Get tracer data.
-            cur = conn.execute(
+            cur = con.execute(
                 "select file.path, tracer " +
                 "from tracer " +
                 "inner join file on file.id = tracer.file_id"
@@ -656,38 +655,39 @@
             tracers = {files[path]: tracer for (path, tracer) in cur}
             cur.close()
 
-        with self._connect() as conn:
-            conn.con.isolation_level = "IMMEDIATE"
+        with self._connect() as con:
+            con.con.isolation_level = "IMMEDIATE"
 
             # Get all tracers in the DB. Files not in the tracers are assumed
             # to have an empty string tracer. Since Sqlite does not support
             # full outer joins, we have to make two queries to fill the
             # dictionary.
-            this_tracers = {path: "" for path, in conn.execute("select path from file")}
+            this_tracers = {path: "" for path, in con.execute("select path from file")}
             this_tracers.update({
                 aliases.map(path): tracer
-                for path, tracer in conn.execute(
+                for path, tracer in con.execute(
                     "select file.path, tracer from tracer " +
                     "inner join file on file.id = tracer.file_id"
                 )
             })
 
             # Create all file and context rows in the DB.
-            conn.executemany(
+            con.executemany(
                 "insert or ignore into file (path) values (?)",
                 ((file,) for file in files.values())
             )
             file_ids = {
                 path: id
-                for id, path in conn.execute("select id, path from file")
+                for id, path in con.execute("select id, path from file")
             }
-            conn.executemany(
+            self._file_map.update(file_ids)
+            con.executemany(
                 "insert or ignore into context (context) values (?)",
                 ((context,) for context in contexts)
             )
             context_ids = {
                 context: id
-                for id, context in conn.execute("select id, context from context")
+                for id, context in con.execute("select id, context from context")
             }
 
             # Prepare tracers and fail, if a conflict is found.
@@ -715,12 +715,12 @@
             )
 
             # Get line data.
-            cur = conn.execute(
+            cur = con.execute(
                 "select file.path, context.context, line_bits.numbits " +
                 "from line_bits " +
                 "inner join file on file.id = line_bits.file_id " +
                 "inner join context on context.id = line_bits.context_id"
-                )
+            )
             for path, context, numbits in cur:
                 key = (aliases.map(path), context)
                 if key in lines:
@@ -732,7 +732,7 @@
                 self._choose_lines_or_arcs(arcs=True)
 
                 # Write the combined data.
-                conn.executemany(
+                con.executemany(
                     "insert or ignore into arc " +
                     "(file_id, context_id, fromno, tono) values (?, ?, ?, ?)",
                     arc_rows
@@ -740,8 +740,8 @@
 
             if lines:
                 self._choose_lines_or_arcs(lines=True)
-                conn.execute("delete from line_bits")
-                conn.executemany(
+                con.execute("delete from line_bits")
+                con.executemany(
                     "insert into line_bits " +
                     "(file_id, context_id, numbits) values (?, ?, ?)",
                     [
@@ -749,14 +749,15 @@
                         for (file, context), numbits in lines.items()
                     ]
                 )
-            conn.executemany(
+            con.executemany(
                 "insert or ignore into tracer (file_id, tracer) values (?, ?)",
                 ((file_ids[filename], tracer) for filename, tracer in tracer_map.items())
             )
 
-        # Update all internal cache data.
-        self._reset()
-        self.read()
+        if not self._no_disk:
+            # Update all internal cache data.
+            self._reset()
+            self.read()
 
     def erase(self, parallel=False):
         """Erase the data in this object.
@@ -782,8 +783,9 @@
 
     def read(self):
         """Start using an existing data file."""
-        with self._connect():       # TODO: doesn't look right
-            self._have_used = True
+        if os.path.exists(self._filename):
+            with self._connect():
+                self._have_used = True
 
     def write(self):
         """Ensure the data is written to the data file."""
@@ -977,7 +979,7 @@
                     "select l.numbits, c.context from line_bits l, context c " +
                     "where l.context_id = c.id " +
                     "and file_id = ?"
-                    )
+                )
                 data = [file_id]
                 if self._query_context_ids is not None:
                     ids_array = ", ".join("?" * len(self._query_context_ids))
@@ -999,9 +1001,7 @@
         with SqliteDb(":memory:", debug=NoDebugging()) as db:
             temp_store = [row[0] for row in db.execute("pragma temp_store")]
             copts = [row[0] for row in db.execute("pragma compile_options")]
-            # Yes, this is overkill. I don't like the long list of options
-            # at the end of "debug sys", but I don't want to omit information.
-            copts = ["; ".join(copts[i:i + 3]) for i in range(0, len(copts), 3)]
+            copts = textwrap.wrap(", ".join(copts), width=75)
 
         return [
             ("sqlite3_version", sqlite3.version),
@@ -1042,7 +1042,7 @@
 
     """
     def __init__(self, filename, debug):
-        self.debug = debug if debug.should("sql") else None
+        self.debug = debug
         self.filename = filename
         self.nest = 0
         self.con = None
@@ -1057,7 +1057,7 @@
         # effectively causing a nested context. However, given the idempotent
         # nature of the tracer operations, sharing a connection among threads
         # is not a problem.
-        if self.debug:
+        if self.debug.should("sql"):
             self.debug.write(f"Connecting to {self.filename!r}")
         try:
             self.con = sqlite3.connect(self.filename, check_same_thread=False)
@@ -1093,13 +1093,13 @@
                 self.con.__exit__(exc_type, exc_value, traceback)
                 self.close()
             except Exception as exc:
-                if self.debug:
+                if self.debug.should("sql"):
                     self.debug.write(f"EXCEPTION from __exit__: {exc}")
                 raise DataError(f"Couldn't end data file {self.filename!r}: {exc}") from exc
 
     def execute(self, sql, parameters=()):
         """Same as :meth:`python:sqlite3.Connection.execute`."""
-        if self.debug:
+        if self.debug.should("sql"):
             tail = f" with {parameters!r}" if parameters else ""
             self.debug.write(f"Executing {sql!r}{tail}")
         try:
@@ -1124,10 +1124,18 @@
                         )
             except Exception:   # pragma: cant happen
                 pass
-            if self.debug:
+            if self.debug.should("sql"):
                 self.debug.write(f"EXCEPTION from execute: {msg}")
             raise DataError(f"Couldn't use data file {self.filename!r}: {msg}") from exc
 
+    def execute_for_rowid(self, sql, parameters=()):
+        """Like execute, but returns the lastrowid."""
+        con = self.execute(sql, parameters)
+        rowid = con.lastrowid
+        if self.debug.should("sqldata"):
+            self.debug.write(f"Row id result: {rowid!r}")
+        return rowid
+
     def execute_one(self, sql, parameters=()):
         """Execute a statement and return the one row that results.
 
@@ -1147,9 +1155,13 @@
 
     def executemany(self, sql, data):
         """Same as :meth:`python:sqlite3.Connection.executemany`."""
-        if self.debug:
+        if self.debug.should("sql"):
             data = list(data)
-            self.debug.write(f"Executing many {sql!r} with {len(data)} rows")
+            final = ":" if self.debug.should("sqldata") else ""
+            self.debug.write(f"Executing many {sql!r} with {len(data)} rows{final}")
+            if self.debug.should("sqldata"):
+                for i, row in enumerate(data):
+                    self.debug.write(f"{i:4d}: {row!r}")
         try:
             return self.con.executemany(sql, data)
         except Exception:   # pragma: cant happen
@@ -1160,7 +1172,7 @@
 
     def executescript(self, script):
         """Same as :meth:`python:sqlite3.Connection.executescript`."""
-        if self.debug:
+        if self.debug.should("sql"):
             self.debug.write("Executing script with {} chars: {}".format(
                 len(script), clipped_repr(script, 100),
             ))

eric ide

mercurial