[pre-commit.ci] auto fixes from pre-commit.com hooks

for more information, see https://pre-commit.ci
This commit is contained in:
pre-commit-ci[bot] 2024-04-13 00:00:18 +00:00
parent 72ad6dc953
commit f4cd1ba0d6
813 changed files with 66015 additions and 58839 deletions

View file

@ -1,6 +1,5 @@
# Licensed under the Apache License: http://www.apache.org/licenses/LICENSE-2.0
# For details: https://github.com/nedbat/coveragepy/blob/master/NOTICE.txt
"""Coverage data for coverage.py.
This file had the 4.x JSON data support, which is now gone. This file still
@ -9,18 +8,21 @@ CoverageData is now defined in sqldata.py, and imported here to keep the
imports working.
"""
from __future__ import annotations
import glob
import hashlib
import os.path
from typing import Callable
from typing import Iterable
from typing import Callable, Iterable
from coverage.exceptions import CoverageException, NoDataError
from coverage.exceptions import CoverageException
from coverage.exceptions import NoDataError
from coverage.files import PathAliases
from coverage.misc import Hasher, file_be_gone, human_sorted, plural
from coverage.misc import file_be_gone
from coverage.misc import Hasher
from coverage.misc import human_sorted
from coverage.misc import plural
from coverage.sqldata import CoverageData
@ -38,7 +40,7 @@ def line_counts(data: CoverageData, fullpath: bool = False) -> dict[str, int]:
filename_fn: Callable[[str], str]
if fullpath:
# pylint: disable=unnecessary-lambda-assignment
filename_fn = lambda f: f
def filename_fn(f): return f
else:
filename_fn = os.path.basename
for filename in data.measured_files():
@ -79,14 +81,14 @@ def combinable_files(data_file: str, data_paths: Iterable[str] | None = None) ->
if os.path.isfile(p):
files_to_combine.append(os.path.abspath(p))
elif os.path.isdir(p):
pattern = glob.escape(os.path.join(os.path.abspath(p), local)) +".*"
pattern = glob.escape(os.path.join(os.path.abspath(p), local)) + '.*'
files_to_combine.extend(glob.glob(pattern))
else:
raise NoDataError(f"Couldn't combine from non-existent path '{p}'")
# SQLite might have made journal files alongside our database files.
# We never want to combine those.
files_to_combine = [fnm for fnm in files_to_combine if not fnm.endswith("-journal")]
files_to_combine = [fnm for fnm in files_to_combine if not fnm.endswith('-journal')]
# Sorting isn't usually needed, since it shouldn't matter what order files
# are combined, but sorting makes tests more predictable, and makes
@ -132,7 +134,7 @@ def combine_parallel_data(
files_to_combine = combinable_files(data.base_filename(), data_paths)
if strict and not files_to_combine:
raise NoDataError("No data to combine")
raise NoDataError('No data to combine')
file_hashes = set()
combined_any = False
@ -141,8 +143,8 @@ def combine_parallel_data(
if f == data.data_filename():
# Sometimes we are combining into a file which is one of the
# parallel files. Skip that file.
if data._debug.should("dataio"):
data._debug.write(f"Skipping combining ourself: {f!r}")
if data._debug.should('dataio'):
data._debug.write(f'Skipping combining ourself: {f!r}')
continue
try:
@ -153,16 +155,16 @@ def combine_parallel_data(
# we print the original value of f instead of its relative path
rel_file_name = f
with open(f, "rb") as fobj:
hasher = hashlib.new("sha3_256")
with open(f, 'rb') as fobj:
hasher = hashlib.new('sha3_256')
hasher.update(fobj.read())
sha = hasher.digest()
combine_this_one = sha not in file_hashes
delete_this_one = not keep
if combine_this_one:
if data._debug.should("dataio"):
data._debug.write(f"Combining data file {f!r}")
if data._debug.should('dataio'):
data._debug.write(f'Combining data file {f!r}')
file_hashes.add(sha)
try:
new_data = CoverageData(f, debug=data._debug)
@ -179,39 +181,39 @@ def combine_parallel_data(
data.update(new_data, aliases=aliases)
combined_any = True
if message:
message(f"Combined data file {rel_file_name}")
message(f'Combined data file {rel_file_name}')
else:
if message:
message(f"Skipping duplicate data {rel_file_name}")
message(f'Skipping duplicate data {rel_file_name}')
if delete_this_one:
if data._debug.should("dataio"):
data._debug.write(f"Deleting data file {f!r}")
if data._debug.should('dataio'):
data._debug.write(f'Deleting data file {f!r}')
file_be_gone(f)
if strict and not combined_any:
raise NoDataError("No usable data files")
raise NoDataError('No usable data files')
def debug_data_file(filename: str) -> None:
"""Implementation of 'coverage debug data'."""
data = CoverageData(filename)
filename = data.data_filename()
print(f"path: {filename}")
print(f'path: {filename}')
if not os.path.exists(filename):
print("No data collected: file doesn't exist")
return
data.read()
print(f"has_arcs: {data.has_arcs()!r}")
print(f'has_arcs: {data.has_arcs()!r}')
summary = line_counts(data, fullpath=True)
filenames = human_sorted(summary.keys())
nfiles = len(filenames)
print(f"{nfiles} file{plural(nfiles)}:")
print(f'{nfiles} file{plural(nfiles)}:')
for f in filenames:
line = f"{f}: {summary[f]} line{plural(summary[f])}"
line = f'{f}: {summary[f]} line{plural(summary[f])}'
plugin = data.file_tracer(f)
if plugin:
line += f" [{plugin}]"
line += f' [{plugin}]'
print(line)