[pre-commit.ci] auto fixes from pre-commit.com hooks

for more information, see https://pre-commit.ci
This commit is contained in:
pre-commit-ci[bot] 2024-04-13 00:00:18 +00:00
parent 72ad6dc953
commit f4cd1ba0d6
813 changed files with 66015 additions and 58839 deletions

View file

@ -2,6 +2,8 @@
"""Implementation of the cache provider."""
# This plugin was not named "cache" to avoid conflicts with the external
# pytest-cache version.
from __future__ import annotations
import dataclasses
import json
import os
@ -15,9 +17,6 @@ from typing import Optional
from typing import Set
from typing import Union
from .pathlib import resolve_from_str
from .pathlib import rm_rf
from .reports import CollectReport
from _pytest import nodes
from _pytest._io import TerminalWriter
from _pytest.config import Config
@ -32,6 +31,10 @@ from _pytest.nodes import Directory
from _pytest.nodes import File
from _pytest.reports import TestReport
from .pathlib import resolve_from_str
from .pathlib import rm_rf
from .reports import CollectReport
README_CONTENT = """\
# pytest cache directory #
@ -61,27 +64,27 @@ class Cache:
_config: Config = dataclasses.field(repr=False)
# Sub-directory under cache-dir for directories created by `mkdir()`.
_CACHE_PREFIX_DIRS = "d"
_CACHE_PREFIX_DIRS = 'd'
# Sub-directory under cache-dir for values created by `set()`.
_CACHE_PREFIX_VALUES = "v"
_CACHE_PREFIX_VALUES = 'v'
def __init__(
self, cachedir: Path, config: Config, *, _ispytest: bool = False
self, cachedir: Path, config: Config, *, _ispytest: bool = False,
) -> None:
check_ispytest(_ispytest)
self._cachedir = cachedir
self._config = config
@classmethod
def for_config(cls, config: Config, *, _ispytest: bool = False) -> "Cache":
def for_config(cls, config: Config, *, _ispytest: bool = False) -> Cache:
"""Create the Cache instance for a Config.
:meta private:
"""
check_ispytest(_ispytest)
cachedir = cls.cache_dir_from_config(config, _ispytest=True)
if config.getoption("cacheclear") and cachedir.is_dir():
if config.getoption('cacheclear') and cachedir.is_dir():
cls.clear_cache(cachedir, _ispytest=True)
return cls(cachedir, config, _ispytest=True)
@ -104,7 +107,7 @@ class Cache:
:meta private:
"""
check_ispytest(_ispytest)
return resolve_from_str(config.getini("cache_dir"), config.rootpath)
return resolve_from_str(config.getini('cache_dir'), config.rootpath)
def warn(self, fmt: str, *, _ispytest: bool = False, **args: object) -> None:
"""Issue a cache warning.
@ -138,7 +141,7 @@ class Cache:
"""
path = Path(name)
if len(path.parts) > 1:
raise ValueError("name is not allowed to contain path separators")
raise ValueError('name is not allowed to contain path separators')
res = self._cachedir.joinpath(self._CACHE_PREFIX_DIRS, path)
res.mkdir(exist_ok=True, parents=True)
return res
@ -160,7 +163,7 @@ class Cache:
"""
path = self._getvaluepath(key)
try:
with path.open("r", encoding="UTF-8") as f:
with path.open('r', encoding='UTF-8') as f:
return json.load(f)
except (ValueError, OSError):
return default
@ -184,7 +187,7 @@ class Cache:
path.parent.mkdir(exist_ok=True, parents=True)
except OSError as exc:
self.warn(
f"could not create cache path {path}: {exc}",
f'could not create cache path {path}: {exc}',
_ispytest=True,
)
return
@ -192,10 +195,10 @@ class Cache:
self._ensure_supporting_files()
data = json.dumps(value, ensure_ascii=False, indent=2)
try:
f = path.open("w", encoding="UTF-8")
f = path.open('w', encoding='UTF-8')
except OSError as exc:
self.warn(
f"cache could not write path {path}: {exc}",
f'cache could not write path {path}: {exc}',
_ispytest=True,
)
else:
@ -204,25 +207,25 @@ class Cache:
def _ensure_supporting_files(self) -> None:
"""Create supporting files in the cache dir that are not really part of the cache."""
readme_path = self._cachedir / "README.md"
readme_path.write_text(README_CONTENT, encoding="UTF-8")
readme_path = self._cachedir / 'README.md'
readme_path.write_text(README_CONTENT, encoding='UTF-8')
gitignore_path = self._cachedir.joinpath(".gitignore")
msg = "# Created by pytest automatically.\n*\n"
gitignore_path.write_text(msg, encoding="UTF-8")
gitignore_path = self._cachedir.joinpath('.gitignore')
msg = '# Created by pytest automatically.\n*\n'
gitignore_path.write_text(msg, encoding='UTF-8')
cachedir_tag_path = self._cachedir.joinpath("CACHEDIR.TAG")
cachedir_tag_path = self._cachedir.joinpath('CACHEDIR.TAG')
cachedir_tag_path.write_bytes(CACHEDIR_TAG_CONTENT)
class LFPluginCollWrapper:
def __init__(self, lfplugin: "LFPlugin") -> None:
def __init__(self, lfplugin: LFPlugin) -> None:
self.lfplugin = lfplugin
self._collected_at_least_one_failure = False
@hookimpl(wrapper=True)
def pytest_make_collect_report(
self, collector: nodes.Collector
self, collector: nodes.Collector,
) -> Generator[None, CollectReport, CollectReport]:
res = yield
if isinstance(collector, (Session, Directory)):
@ -230,7 +233,7 @@ class LFPluginCollWrapper:
lf_paths = self.lfplugin._last_failed_paths
# Use stable sort to priorize last failed.
def sort_key(node: Union[nodes.Item, nodes.Collector]) -> bool:
def sort_key(node: nodes.Item | nodes.Collector) -> bool:
return node.path in lf_paths
res.result = sorted(
@ -249,7 +252,7 @@ class LFPluginCollWrapper:
if not any(x.nodeid in lastfailed for x in result):
return res
self.lfplugin.config.pluginmanager.register(
LFPluginCollSkipfiles(self.lfplugin), "lfplugin-collskip"
LFPluginCollSkipfiles(self.lfplugin), 'lfplugin-collskip',
)
self._collected_at_least_one_failure = True
@ -257,30 +260,30 @@ class LFPluginCollWrapper:
result[:] = [
x
for x in result
if x.nodeid in lastfailed
if x.nodeid in lastfailed or
# Include any passed arguments (not trivial to filter).
or session.isinitpath(x.path)
session.isinitpath(x.path) or
# Keep all sub-collectors.
or isinstance(x, nodes.Collector)
isinstance(x, nodes.Collector)
]
return res
class LFPluginCollSkipfiles:
def __init__(self, lfplugin: "LFPlugin") -> None:
def __init__(self, lfplugin: LFPlugin) -> None:
self.lfplugin = lfplugin
@hookimpl
def pytest_make_collect_report(
self, collector: nodes.Collector
) -> Optional[CollectReport]:
self, collector: nodes.Collector,
) -> CollectReport | None:
if isinstance(collector, File):
if collector.path not in self.lfplugin._last_failed_paths:
self.lfplugin._skipped_files += 1
return CollectReport(
collector.nodeid, "passed", longrepr=None, result=[]
collector.nodeid, 'passed', longrepr=None, result=[],
)
return None
@ -290,44 +293,44 @@ class LFPlugin:
def __init__(self, config: Config) -> None:
self.config = config
active_keys = "lf", "failedfirst"
active_keys = 'lf', 'failedfirst'
self.active = any(config.getoption(key) for key in active_keys)
assert config.cache
self.lastfailed: Dict[str, bool] = config.cache.get("cache/lastfailed", {})
self._previously_failed_count: Optional[int] = None
self._report_status: Optional[str] = None
self.lastfailed: dict[str, bool] = config.cache.get('cache/lastfailed', {})
self._previously_failed_count: int | None = None
self._report_status: str | None = None
self._skipped_files = 0 # count skipped files during collection due to --lf
if config.getoption("lf"):
if config.getoption('lf'):
self._last_failed_paths = self.get_last_failed_paths()
config.pluginmanager.register(
LFPluginCollWrapper(self), "lfplugin-collwrapper"
LFPluginCollWrapper(self), 'lfplugin-collwrapper',
)
def get_last_failed_paths(self) -> Set[Path]:
def get_last_failed_paths(self) -> set[Path]:
"""Return a set with all Paths of the previously failed nodeids and
their parents."""
rootpath = self.config.rootpath
result = set()
for nodeid in self.lastfailed:
path = rootpath / nodeid.split("::")[0]
path = rootpath / nodeid.split('::')[0]
result.add(path)
result.update(path.parents)
return {x for x in result if x.exists()}
def pytest_report_collectionfinish(self) -> Optional[str]:
if self.active and self.config.getoption("verbose") >= 0:
return "run-last-failure: %s" % self._report_status
def pytest_report_collectionfinish(self) -> str | None:
if self.active and self.config.getoption('verbose') >= 0:
return 'run-last-failure: %s' % self._report_status
return None
def pytest_runtest_logreport(self, report: TestReport) -> None:
if (report.when == "call" and report.passed) or report.skipped:
if (report.when == 'call' and report.passed) or report.skipped:
self.lastfailed.pop(report.nodeid, None)
elif report.failed:
self.lastfailed[report.nodeid] = True
def pytest_collectreport(self, report: CollectReport) -> None:
passed = report.outcome in ("passed", "skipped")
passed = report.outcome in ('passed', 'skipped')
if passed:
if report.nodeid in self.lastfailed:
self.lastfailed.pop(report.nodeid)
@ -337,7 +340,7 @@ class LFPlugin:
@hookimpl(wrapper=True, tryfirst=True)
def pytest_collection_modifyitems(
self, config: Config, items: List[nodes.Item]
self, config: Config, items: list[nodes.Item],
) -> Generator[None, None, None]:
res = yield
@ -357,45 +360,45 @@ class LFPlugin:
if not previously_failed:
# Running a subset of all tests with recorded failures
# only outside of it.
self._report_status = "%d known failures not in selected tests" % (
self._report_status = '%d known failures not in selected tests' % (
len(self.lastfailed),
)
else:
if self.config.getoption("lf"):
if self.config.getoption('lf'):
items[:] = previously_failed
config.hook.pytest_deselected(items=previously_passed)
else: # --failedfirst
items[:] = previously_failed + previously_passed
noun = "failure" if self._previously_failed_count == 1 else "failures"
suffix = " first" if self.config.getoption("failedfirst") else ""
noun = 'failure' if self._previously_failed_count == 1 else 'failures'
suffix = ' first' if self.config.getoption('failedfirst') else ''
self._report_status = (
f"rerun previous {self._previously_failed_count} {noun}{suffix}"
f'rerun previous {self._previously_failed_count} {noun}{suffix}'
)
if self._skipped_files > 0:
files_noun = "file" if self._skipped_files == 1 else "files"
self._report_status += f" (skipped {self._skipped_files} {files_noun})"
files_noun = 'file' if self._skipped_files == 1 else 'files'
self._report_status += f' (skipped {self._skipped_files} {files_noun})'
else:
self._report_status = "no previously failed tests, "
if self.config.getoption("last_failed_no_failures") == "none":
self._report_status += "deselecting all items."
self._report_status = 'no previously failed tests, '
if self.config.getoption('last_failed_no_failures') == 'none':
self._report_status += 'deselecting all items.'
config.hook.pytest_deselected(items=items[:])
items[:] = []
else:
self._report_status += "not deselecting items."
self._report_status += 'not deselecting items.'
return res
def pytest_sessionfinish(self, session: Session) -> None:
config = self.config
if config.getoption("cacheshow") or hasattr(config, "workerinput"):
if config.getoption('cacheshow') or hasattr(config, 'workerinput'):
return
assert config.cache is not None
saved_lastfailed = config.cache.get("cache/lastfailed", {})
saved_lastfailed = config.cache.get('cache/lastfailed', {})
if saved_lastfailed != self.lastfailed:
config.cache.set("cache/lastfailed", self.lastfailed)
config.cache.set('cache/lastfailed', self.lastfailed)
class NFPlugin:
@ -405,17 +408,17 @@ class NFPlugin:
self.config = config
self.active = config.option.newfirst
assert config.cache is not None
self.cached_nodeids = set(config.cache.get("cache/nodeids", []))
self.cached_nodeids = set(config.cache.get('cache/nodeids', []))
@hookimpl(wrapper=True, tryfirst=True)
def pytest_collection_modifyitems(
self, items: List[nodes.Item]
self, items: list[nodes.Item],
) -> Generator[None, None, None]:
res = yield
if self.active:
new_items: Dict[str, nodes.Item] = {}
other_items: Dict[str, nodes.Item] = {}
new_items: dict[str, nodes.Item] = {}
other_items: dict[str, nodes.Item] = {}
for item in items:
if item.nodeid not in self.cached_nodeids:
new_items[item.nodeid] = item
@ -423,7 +426,7 @@ class NFPlugin:
other_items[item.nodeid] = item
items[:] = self._get_increasing_order(
new_items.values()
new_items.values(),
) + self._get_increasing_order(other_items.values())
self.cached_nodeids.update(new_items)
else:
@ -431,84 +434,84 @@ class NFPlugin:
return res
def _get_increasing_order(self, items: Iterable[nodes.Item]) -> List[nodes.Item]:
def _get_increasing_order(self, items: Iterable[nodes.Item]) -> list[nodes.Item]:
return sorted(items, key=lambda item: item.path.stat().st_mtime, reverse=True) # type: ignore[no-any-return]
def pytest_sessionfinish(self) -> None:
config = self.config
if config.getoption("cacheshow") or hasattr(config, "workerinput"):
if config.getoption('cacheshow') or hasattr(config, 'workerinput'):
return
if config.getoption("collectonly"):
if config.getoption('collectonly'):
return
assert config.cache is not None
config.cache.set("cache/nodeids", sorted(self.cached_nodeids))
config.cache.set('cache/nodeids', sorted(self.cached_nodeids))
def pytest_addoption(parser: Parser) -> None:
group = parser.getgroup("general")
group = parser.getgroup('general')
group.addoption(
"--lf",
"--last-failed",
action="store_true",
dest="lf",
help="Rerun only the tests that failed "
"at the last run (or all if none failed)",
'--lf',
'--last-failed',
action='store_true',
dest='lf',
help='Rerun only the tests that failed '
'at the last run (or all if none failed)',
)
group.addoption(
"--ff",
"--failed-first",
action="store_true",
dest="failedfirst",
help="Run all tests, but run the last failures first. "
"This may re-order tests and thus lead to "
"repeated fixture setup/teardown.",
'--ff',
'--failed-first',
action='store_true',
dest='failedfirst',
help='Run all tests, but run the last failures first. '
'This may re-order tests and thus lead to '
'repeated fixture setup/teardown.',
)
group.addoption(
"--nf",
"--new-first",
action="store_true",
dest="newfirst",
help="Run tests from new files first, then the rest of the tests "
"sorted by file mtime",
'--nf',
'--new-first',
action='store_true',
dest='newfirst',
help='Run tests from new files first, then the rest of the tests '
'sorted by file mtime',
)
group.addoption(
"--cache-show",
action="append",
nargs="?",
dest="cacheshow",
'--cache-show',
action='append',
nargs='?',
dest='cacheshow',
help=(
"Show cache contents, don't perform collection or tests. "
"Optional argument: glob (default: '*')."
),
)
group.addoption(
"--cache-clear",
action="store_true",
dest="cacheclear",
help="Remove all cache contents at start of test run",
'--cache-clear',
action='store_true',
dest='cacheclear',
help='Remove all cache contents at start of test run',
)
cache_dir_default = ".pytest_cache"
if "TOX_ENV_DIR" in os.environ:
cache_dir_default = os.path.join(os.environ["TOX_ENV_DIR"], cache_dir_default)
parser.addini("cache_dir", default=cache_dir_default, help="Cache directory path")
cache_dir_default = '.pytest_cache'
if 'TOX_ENV_DIR' in os.environ:
cache_dir_default = os.path.join(os.environ['TOX_ENV_DIR'], cache_dir_default)
parser.addini('cache_dir', default=cache_dir_default, help='Cache directory path')
group.addoption(
"--lfnf",
"--last-failed-no-failures",
action="store",
dest="last_failed_no_failures",
choices=("all", "none"),
default="all",
help="With ``--lf``, determines whether to execute tests when there "
"are no previously (known) failures or when no "
"cached ``lastfailed`` data was found. "
"``all`` (the default) runs the full test suite again. "
"``none`` just emits a message about no known failures and exits successfully.",
'--lfnf',
'--last-failed-no-failures',
action='store',
dest='last_failed_no_failures',
choices=('all', 'none'),
default='all',
help='With ``--lf``, determines whether to execute tests when there '
'are no previously (known) failures or when no '
'cached ``lastfailed`` data was found. '
'``all`` (the default) runs the full test suite again. '
'``none`` just emits a message about no known failures and exits successfully.',
)
def pytest_cmdline_main(config: Config) -> Optional[Union[int, ExitCode]]:
def pytest_cmdline_main(config: Config) -> int | ExitCode | None:
if config.option.cacheshow and not config.option.help:
from _pytest.main import wrap_session
@ -519,8 +522,8 @@ def pytest_cmdline_main(config: Config) -> Optional[Union[int, ExitCode]]:
@hookimpl(tryfirst=True)
def pytest_configure(config: Config) -> None:
config.cache = Cache.for_config(config, _ispytest=True)
config.pluginmanager.register(LFPlugin(config), "lfplugin")
config.pluginmanager.register(NFPlugin(config), "nfplugin")
config.pluginmanager.register(LFPlugin(config), 'lfplugin')
config.pluginmanager.register(NFPlugin(config), 'nfplugin')
@fixture
@ -539,9 +542,9 @@ def cache(request: FixtureRequest) -> Cache:
return request.config.cache
def pytest_report_header(config: Config) -> Optional[str]:
def pytest_report_header(config: Config) -> str | None:
"""Display cachedir with --cache-show and if non-default."""
if config.option.verbose > 0 or config.getini("cache_dir") != ".pytest_cache":
if config.option.verbose > 0 or config.getini('cache_dir') != '.pytest_cache':
assert config.cache is not None
cachedir = config.cache._cachedir
# TODO: evaluate generating upward relative paths
@ -551,7 +554,7 @@ def pytest_report_header(config: Config) -> Optional[str]:
displaypath = cachedir.relative_to(config.rootpath)
except ValueError:
displaypath = cachedir
return f"cachedir: {displaypath}"
return f'cachedir: {displaypath}'
return None
@ -561,37 +564,37 @@ def cacheshow(config: Config, session: Session) -> int:
assert config.cache is not None
tw = TerminalWriter()
tw.line("cachedir: " + str(config.cache._cachedir))
tw.line('cachedir: ' + str(config.cache._cachedir))
if not config.cache._cachedir.is_dir():
tw.line("cache is empty")
tw.line('cache is empty')
return 0
glob = config.option.cacheshow[0]
if glob is None:
glob = "*"
glob = '*'
dummy = object()
basedir = config.cache._cachedir
vdir = basedir / Cache._CACHE_PREFIX_VALUES
tw.sep("-", "cache values for %r" % glob)
tw.sep('-', 'cache values for %r' % glob)
for valpath in sorted(x for x in vdir.rglob(glob) if x.is_file()):
key = str(valpath.relative_to(vdir))
val = config.cache.get(key, dummy)
if val is dummy:
tw.line("%s contains unreadable content, will be ignored" % key)
tw.line('%s contains unreadable content, will be ignored' % key)
else:
tw.line("%s contains:" % key)
tw.line('%s contains:' % key)
for line in pformat(val).splitlines():
tw.line(" " + line)
tw.line(' ' + line)
ddir = basedir / Cache._CACHE_PREFIX_DIRS
if ddir.is_dir():
contents = sorted(ddir.rglob(glob))
tw.sep("-", "cache directories for %r" % glob)
tw.sep('-', 'cache directories for %r' % glob)
for p in contents:
# if p.is_dir():
# print("%s/" % p.relative_to(basedir))
if p.is_file():
key = str(p.relative_to(basedir))
tw.line(f"{key} is a file of length {p.stat().st_size:d}")
tw.line(f'{key} is a file of length {p.stat().st_size:d}')
return 0