499 return |
501 return |
500 with self._connect() as con: |
502 with self._connect() as con: |
501 self._set_context_id() |
503 self._set_context_id() |
502 for filename, arcs in arc_data.items(): |
504 for filename, arcs in arc_data.items(): |
503 file_id = self._file_id(filename, add=True) |
505 file_id = self._file_id(filename, add=True) |
504 from coverage import env |
|
505 if env.PYVERSION == (3, 11, 0, "alpha", 4, 0): |
|
506 arcs = [(a, b) for a, b in arcs if a is not None and b is not None] |
|
507 data = [(file_id, self._current_context_id, fromno, tono) for fromno, tono in arcs] |
506 data = [(file_id, self._current_context_id, fromno, tono) for fromno, tono in arcs] |
508 con.executemany( |
507 con.executemany( |
509 "insert or ignore into arc " + |
508 "insert or ignore into arc " + |
510 "(file_id, context_id, fromno, tono) values (?, ?, ?, ?)", |
509 "(file_id, context_id, fromno, tono) values (?, ?, ?, ?)", |
511 data, |
510 data, |
614 # contexts. |
613 # contexts. |
615 self._start_using() |
614 self._start_using() |
616 |
615 |
617 # Collector for all arcs, lines and tracers |
616 # Collector for all arcs, lines and tracers |
618 other_data.read() |
617 other_data.read() |
619 with other_data._connect() as conn: |
618 with other_data._connect() as con: |
620 # Get files data. |
619 # Get files data. |
621 cur = conn.execute("select path from file") |
620 cur = con.execute("select path from file") |
622 files = {path: aliases.map(path) for (path,) in cur} |
621 files = {path: aliases.map(path) for (path,) in cur} |
623 cur.close() |
622 cur.close() |
624 |
623 |
625 # Get contexts data. |
624 # Get contexts data. |
626 cur = conn.execute("select context from context") |
625 cur = con.execute("select context from context") |
627 contexts = [context for (context,) in cur] |
626 contexts = [context for (context,) in cur] |
628 cur.close() |
627 cur.close() |
629 |
628 |
630 # Get arc data. |
629 # Get arc data. |
631 cur = conn.execute( |
630 cur = con.execute( |
632 "select file.path, context.context, arc.fromno, arc.tono " + |
631 "select file.path, context.context, arc.fromno, arc.tono " + |
633 "from arc " + |
632 "from arc " + |
634 "inner join file on file.id = arc.file_id " + |
633 "inner join file on file.id = arc.file_id " + |
635 "inner join context on context.id = arc.context_id" |
634 "inner join context on context.id = arc.context_id" |
636 ) |
635 ) |
637 arcs = [(files[path], context, fromno, tono) for (path, context, fromno, tono) in cur] |
636 arcs = [(files[path], context, fromno, tono) for (path, context, fromno, tono) in cur] |
638 cur.close() |
637 cur.close() |
639 |
638 |
640 # Get line data. |
639 # Get line data. |
641 cur = conn.execute( |
640 cur = con.execute( |
642 "select file.path, context.context, line_bits.numbits " + |
641 "select file.path, context.context, line_bits.numbits " + |
643 "from line_bits " + |
642 "from line_bits " + |
644 "inner join file on file.id = line_bits.file_id " + |
643 "inner join file on file.id = line_bits.file_id " + |
645 "inner join context on context.id = line_bits.context_id" |
644 "inner join context on context.id = line_bits.context_id" |
646 ) |
645 ) |
647 lines = {(files[path], context): numbits for (path, context, numbits) in cur} |
646 lines = {(files[path], context): numbits for (path, context, numbits) in cur} |
648 cur.close() |
647 cur.close() |
649 |
648 |
650 # Get tracer data. |
649 # Get tracer data. |
651 cur = conn.execute( |
650 cur = con.execute( |
652 "select file.path, tracer " + |
651 "select file.path, tracer " + |
653 "from tracer " + |
652 "from tracer " + |
654 "inner join file on file.id = tracer.file_id" |
653 "inner join file on file.id = tracer.file_id" |
655 ) |
654 ) |
656 tracers = {files[path]: tracer for (path, tracer) in cur} |
655 tracers = {files[path]: tracer for (path, tracer) in cur} |
657 cur.close() |
656 cur.close() |
658 |
657 |
659 with self._connect() as conn: |
658 with self._connect() as con: |
660 conn.con.isolation_level = "IMMEDIATE" |
659 con.con.isolation_level = "IMMEDIATE" |
661 |
660 |
662 # Get all tracers in the DB. Files not in the tracers are assumed |
661 # Get all tracers in the DB. Files not in the tracers are assumed |
663 # to have an empty string tracer. Since Sqlite does not support |
662 # to have an empty string tracer. Since Sqlite does not support |
664 # full outer joins, we have to make two queries to fill the |
663 # full outer joins, we have to make two queries to fill the |
665 # dictionary. |
664 # dictionary. |
666 this_tracers = {path: "" for path, in conn.execute("select path from file")} |
665 this_tracers = {path: "" for path, in con.execute("select path from file")} |
667 this_tracers.update({ |
666 this_tracers.update({ |
668 aliases.map(path): tracer |
667 aliases.map(path): tracer |
669 for path, tracer in conn.execute( |
668 for path, tracer in con.execute( |
670 "select file.path, tracer from tracer " + |
669 "select file.path, tracer from tracer " + |
671 "inner join file on file.id = tracer.file_id" |
670 "inner join file on file.id = tracer.file_id" |
672 ) |
671 ) |
673 }) |
672 }) |
674 |
673 |
675 # Create all file and context rows in the DB. |
674 # Create all file and context rows in the DB. |
676 conn.executemany( |
675 con.executemany( |
677 "insert or ignore into file (path) values (?)", |
676 "insert or ignore into file (path) values (?)", |
678 ((file,) for file in files.values()) |
677 ((file,) for file in files.values()) |
679 ) |
678 ) |
680 file_ids = { |
679 file_ids = { |
681 path: id |
680 path: id |
682 for id, path in conn.execute("select id, path from file") |
681 for id, path in con.execute("select id, path from file") |
683 } |
682 } |
684 conn.executemany( |
683 self._file_map.update(file_ids) |
|
684 con.executemany( |
685 "insert or ignore into context (context) values (?)", |
685 "insert or ignore into context (context) values (?)", |
686 ((context,) for context in contexts) |
686 ((context,) for context in contexts) |
687 ) |
687 ) |
688 context_ids = { |
688 context_ids = { |
689 context: id |
689 context: id |
690 for id, context in conn.execute("select id, context from context") |
690 for id, context in con.execute("select id, context from context") |
691 } |
691 } |
692 |
692 |
693 # Prepare tracers and fail, if a conflict is found. |
693 # Prepare tracers and fail, if a conflict is found. |
694 # tracer_paths is used to ensure consistency over the tracer data |
694 # tracer_paths is used to ensure consistency over the tracer data |
695 # and tracer_map tracks the tracers to be inserted. |
695 # and tracer_map tracks the tracers to be inserted. |
713 (file_ids[file], context_ids[context], fromno, tono) |
713 (file_ids[file], context_ids[context], fromno, tono) |
714 for file, context, fromno, tono in arcs |
714 for file, context, fromno, tono in arcs |
715 ) |
715 ) |
716 |
716 |
717 # Get line data. |
717 # Get line data. |
718 cur = conn.execute( |
718 cur = con.execute( |
719 "select file.path, context.context, line_bits.numbits " + |
719 "select file.path, context.context, line_bits.numbits " + |
720 "from line_bits " + |
720 "from line_bits " + |
721 "inner join file on file.id = line_bits.file_id " + |
721 "inner join file on file.id = line_bits.file_id " + |
722 "inner join context on context.id = line_bits.context_id" |
722 "inner join context on context.id = line_bits.context_id" |
723 ) |
723 ) |
724 for path, context, numbits in cur: |
724 for path, context, numbits in cur: |
725 key = (aliases.map(path), context) |
725 key = (aliases.map(path), context) |
726 if key in lines: |
726 if key in lines: |
727 numbits = numbits_union(lines[key], numbits) |
727 numbits = numbits_union(lines[key], numbits) |
728 lines[key] = numbits |
728 lines[key] = numbits |
730 |
730 |
731 if arcs: |
731 if arcs: |
732 self._choose_lines_or_arcs(arcs=True) |
732 self._choose_lines_or_arcs(arcs=True) |
733 |
733 |
734 # Write the combined data. |
734 # Write the combined data. |
735 conn.executemany( |
735 con.executemany( |
736 "insert or ignore into arc " + |
736 "insert or ignore into arc " + |
737 "(file_id, context_id, fromno, tono) values (?, ?, ?, ?)", |
737 "(file_id, context_id, fromno, tono) values (?, ?, ?, ?)", |
738 arc_rows |
738 arc_rows |
739 ) |
739 ) |
740 |
740 |
741 if lines: |
741 if lines: |
742 self._choose_lines_or_arcs(lines=True) |
742 self._choose_lines_or_arcs(lines=True) |
743 conn.execute("delete from line_bits") |
743 con.execute("delete from line_bits") |
744 conn.executemany( |
744 con.executemany( |
745 "insert into line_bits " + |
745 "insert into line_bits " + |
746 "(file_id, context_id, numbits) values (?, ?, ?)", |
746 "(file_id, context_id, numbits) values (?, ?, ?)", |
747 [ |
747 [ |
748 (file_ids[file], context_ids[context], numbits) |
748 (file_ids[file], context_ids[context], numbits) |
749 for (file, context), numbits in lines.items() |
749 for (file, context), numbits in lines.items() |
750 ] |
750 ] |
751 ) |
751 ) |
752 conn.executemany( |
752 con.executemany( |
753 "insert or ignore into tracer (file_id, tracer) values (?, ?)", |
753 "insert or ignore into tracer (file_id, tracer) values (?, ?)", |
754 ((file_ids[filename], tracer) for filename, tracer in tracer_map.items()) |
754 ((file_ids[filename], tracer) for filename, tracer in tracer_map.items()) |
755 ) |
755 ) |
756 |
756 |
757 # Update all internal cache data. |
757 if not self._no_disk: |
758 self._reset() |
758 # Update all internal cache data. |
759 self.read() |
759 self._reset() |
|
760 self.read() |
760 |
761 |
761 def erase(self, parallel=False): |
762 def erase(self, parallel=False): |
762 """Erase the data in this object. |
763 """Erase the data in this object. |
763 |
764 |
764 If `parallel` is true, then also deletes data files created from the |
765 If `parallel` is true, then also deletes data files created from the |
997 |
999 |
998 """ |
1000 """ |
999 with SqliteDb(":memory:", debug=NoDebugging()) as db: |
1001 with SqliteDb(":memory:", debug=NoDebugging()) as db: |
1000 temp_store = [row[0] for row in db.execute("pragma temp_store")] |
1002 temp_store = [row[0] for row in db.execute("pragma temp_store")] |
1001 copts = [row[0] for row in db.execute("pragma compile_options")] |
1003 copts = [row[0] for row in db.execute("pragma compile_options")] |
1002 # Yes, this is overkill. I don't like the long list of options |
1004 copts = textwrap.wrap(", ".join(copts), width=75) |
1003 # at the end of "debug sys", but I don't want to omit information. |
|
1004 copts = ["; ".join(copts[i:i + 3]) for i in range(0, len(copts), 3)] |
|
1005 |
1005 |
1006 return [ |
1006 return [ |
1007 ("sqlite3_version", sqlite3.version), |
1007 ("sqlite3_version", sqlite3.version), |
1008 ("sqlite3_sqlite_version", sqlite3.sqlite_version), |
1008 ("sqlite3_sqlite_version", sqlite3.sqlite_version), |
1009 ("sqlite3_temp_store", temp_store), |
1009 ("sqlite3_temp_store", temp_store), |
1055 # It can happen that Python switches threads while the tracer writes |
1055 # It can happen that Python switches threads while the tracer writes |
1056 # data. The second thread will also try to write to the data, |
1056 # data. The second thread will also try to write to the data, |
1057 # effectively causing a nested context. However, given the idempotent |
1057 # effectively causing a nested context. However, given the idempotent |
1058 # nature of the tracer operations, sharing a connection among threads |
1058 # nature of the tracer operations, sharing a connection among threads |
1059 # is not a problem. |
1059 # is not a problem. |
1060 if self.debug: |
1060 if self.debug.should("sql"): |
1061 self.debug.write(f"Connecting to {self.filename!r}") |
1061 self.debug.write(f"Connecting to {self.filename!r}") |
1062 try: |
1062 try: |
1063 self.con = sqlite3.connect(self.filename, check_same_thread=False) |
1063 self.con = sqlite3.connect(self.filename, check_same_thread=False) |
1064 except sqlite3.Error as exc: |
1064 except sqlite3.Error as exc: |
1065 raise DataError(f"Couldn't use data file {self.filename!r}: {exc}") from exc |
1065 raise DataError(f"Couldn't use data file {self.filename!r}: {exc}") from exc |
1091 if self.nest == 0: |
1091 if self.nest == 0: |
1092 try: |
1092 try: |
1093 self.con.__exit__(exc_type, exc_value, traceback) |
1093 self.con.__exit__(exc_type, exc_value, traceback) |
1094 self.close() |
1094 self.close() |
1095 except Exception as exc: |
1095 except Exception as exc: |
1096 if self.debug: |
1096 if self.debug.should("sql"): |
1097 self.debug.write(f"EXCEPTION from __exit__: {exc}") |
1097 self.debug.write(f"EXCEPTION from __exit__: {exc}") |
1098 raise DataError(f"Couldn't end data file {self.filename!r}: {exc}") from exc |
1098 raise DataError(f"Couldn't end data file {self.filename!r}: {exc}") from exc |
1099 |
1099 |
1100 def execute(self, sql, parameters=()): |
1100 def execute(self, sql, parameters=()): |
1101 """Same as :meth:`python:sqlite3.Connection.execute`.""" |
1101 """Same as :meth:`python:sqlite3.Connection.execute`.""" |
1102 if self.debug: |
1102 if self.debug.should("sql"): |
1103 tail = f" with {parameters!r}" if parameters else "" |
1103 tail = f" with {parameters!r}" if parameters else "" |
1104 self.debug.write(f"Executing {sql!r}{tail}") |
1104 self.debug.write(f"Executing {sql!r}{tail}") |
1105 try: |
1105 try: |
1106 try: |
1106 try: |
1107 return self.con.execute(sql, parameters) |
1107 return self.con.execute(sql, parameters) |
1122 "Looks like a coverage 4.x data file. " + |
1122 "Looks like a coverage 4.x data file. " + |
1123 "Are you mixing versions of coverage?" |
1123 "Are you mixing versions of coverage?" |
1124 ) |
1124 ) |
1125 except Exception: # pragma: cant happen |
1125 except Exception: # pragma: cant happen |
1126 pass |
1126 pass |
1127 if self.debug: |
1127 if self.debug.should("sql"): |
1128 self.debug.write(f"EXCEPTION from execute: {msg}") |
1128 self.debug.write(f"EXCEPTION from execute: {msg}") |
1129 raise DataError(f"Couldn't use data file {self.filename!r}: {msg}") from exc |
1129 raise DataError(f"Couldn't use data file {self.filename!r}: {msg}") from exc |
|
1130 |
|
1131 def execute_for_rowid(self, sql, parameters=()): |
|
1132 """Like execute, but returns the lastrowid.""" |
|
1133 con = self.execute(sql, parameters) |
|
1134 rowid = con.lastrowid |
|
1135 if self.debug.should("sqldata"): |
|
1136 self.debug.write(f"Row id result: {rowid!r}") |
|
1137 return rowid |
1130 |
1138 |
1131 def execute_one(self, sql, parameters=()): |
1139 def execute_one(self, sql, parameters=()): |
1132 """Execute a statement and return the one row that results. |
1140 """Execute a statement and return the one row that results. |
1133 |
1141 |
1134 This is like execute(sql, parameters).fetchone(), except it is |
1142 This is like execute(sql, parameters).fetchone(), except it is |
1145 else: |
1153 else: |
1146 raise AssertionError(f"SQL {sql!r} shouldn't return {len(rows)} rows") |
1154 raise AssertionError(f"SQL {sql!r} shouldn't return {len(rows)} rows") |
1147 |
1155 |
1148 def executemany(self, sql, data): |
1156 def executemany(self, sql, data): |
1149 """Same as :meth:`python:sqlite3.Connection.executemany`.""" |
1157 """Same as :meth:`python:sqlite3.Connection.executemany`.""" |
1150 if self.debug: |
1158 if self.debug.should("sql"): |
1151 data = list(data) |
1159 data = list(data) |
1152 self.debug.write(f"Executing many {sql!r} with {len(data)} rows") |
1160 final = ":" if self.debug.should("sqldata") else "" |
|
1161 self.debug.write(f"Executing many {sql!r} with {len(data)} rows{final}") |
|
1162 if self.debug.should("sqldata"): |
|
1163 for i, row in enumerate(data): |
|
1164 self.debug.write(f"{i:4d}: {row!r}") |
1153 try: |
1165 try: |
1154 return self.con.executemany(sql, data) |
1166 return self.con.executemany(sql, data) |
1155 except Exception: # pragma: cant happen |
1167 except Exception: # pragma: cant happen |
1156 # In some cases, an error might happen that isn't really an |
1168 # In some cases, an error might happen that isn't really an |
1157 # error. Try again immediately. |
1169 # error. Try again immediately. |
1158 # https://github.com/nedbat/coveragepy/issues/1010 |
1170 # https://github.com/nedbat/coveragepy/issues/1010 |
1159 return self.con.executemany(sql, data) |
1171 return self.con.executemany(sql, data) |
1160 |
1172 |
1161 def executescript(self, script): |
1173 def executescript(self, script): |
1162 """Same as :meth:`python:sqlite3.Connection.executescript`.""" |
1174 """Same as :meth:`python:sqlite3.Connection.executescript`.""" |
1163 if self.debug: |
1175 if self.debug.should("sql"): |
1164 self.debug.write("Executing script with {} chars: {}".format( |
1176 self.debug.write("Executing script with {} chars: {}".format( |
1165 len(script), clipped_repr(script, 100), |
1177 len(script), clipped_repr(script, 100), |
1166 )) |
1178 )) |
1167 self.con.executescript(script) |
1179 self.con.executescript(script) |
1168 |
1180 |