Update hooks and use autopep8 + add-trailing-comma instead of black

This commit is contained in:
Max R 2025-07-20 19:12:27 -04:00
parent 23d2a8517e
commit 5fab0d1887
33 changed files with 110 additions and 102 deletions

View file

@ -1,6 +1,10 @@
repos:
- repo: https://github.com/asottile/add-trailing-comma
rev: v3.2.0
hooks:
- id: add-trailing-comma
- repo: https://github.com/pre-commit/pre-commit-hooks
rev: v4.5.0
rev: v5.0.0
hooks:
- id: check-yaml
- id: debug-statements
@ -8,11 +12,11 @@ repos:
- id: trailing-whitespace
exclude: ^tests/fixtures/
- repo: https://github.com/asottile/setup-cfg-fmt
rev: v2.5.0
rev: v2.8.0
hooks:
- id: setup-cfg-fmt
- repo: https://github.com/asottile/reorder-python-imports
rev: v3.14.0
rev: v3.15.0
hooks:
- id: reorder-python-imports
args: [
@ -21,21 +25,20 @@ repos:
--add-import, 'from __future__ import annotations',
]
- repo: https://github.com/asottile/pyupgrade
rev: v3.19.1
rev: v3.20.0
hooks:
- id: pyupgrade
args: [--py39-plus]
- repo: https://github.com/psf/black
rev: 23.12.1
- repo: https://github.com/hhatto/autopep8
rev: v2.3.2
hooks:
- id: black
args: [--line-length=79]
- id: autopep8
- repo: https://github.com/PyCQA/flake8
rev: 7.0.0
rev: 7.3.0
hooks:
- id: flake8
- repo: https://github.com/pre-commit/mirrors-mypy
rev: v1.15.0
rev: v1.17.0
hooks:
- id: mypy
exclude: ^(docs/|example-plugin/)

View file

@ -16,7 +16,6 @@ classifiers =
Environment :: Console
Framework :: Flake8
Intended Audience :: Developers
License :: OSI Approved :: MIT License
Programming Language :: Python
Programming Language :: Python :: 3
Programming Language :: Python :: 3 :: Only

View file

@ -66,5 +66,5 @@ def configure_logging(
LOG.addHandler(handler)
LOG.setLevel(log_level)
LOG.debug(
"Added a %s logging handler to logger root at %s", filename, __name__
"Added a %s logging handler to logger root at %s", filename, __name__,
)

View file

@ -135,7 +135,7 @@ class StyleGuide:
stdin_display_name=self.options.stdin_display_name,
filename_patterns=self.options.filename,
exclude=self.options.exclude,
)
),
)
return not paths
@ -153,7 +153,7 @@ class StyleGuide:
if not issubclass(reporter, formatter.BaseFormatter):
raise ValueError(
"Report should be subclass of "
"flake8.formatter.BaseFormatter."
"flake8.formatter.BaseFormatter.",
)
self._application.formatter = reporter(self.options)
self._application.guide = None

View file

@ -50,7 +50,7 @@ _mp: tuple[Checkers, argparse.Namespace] | None = None
@contextlib.contextmanager
def _mp_prefork(
plugins: Checkers, options: argparse.Namespace
plugins: Checkers, options: argparse.Namespace,
) -> Generator[None]:
# we can save significant startup work w/ `fork` multiprocessing
global _mp
@ -77,7 +77,7 @@ def _mp_run(filename: str) -> tuple[str, Results, dict[str, int]]:
assert _mp is not None, _mp
plugins, options = _mp
return FileChecker(
filename=filename, plugins=plugins, options=options
filename=filename, plugins=plugins, options=options,
).run_checks()
@ -137,7 +137,7 @@ class Manager:
if utils.is_using_stdin(self.options.filenames):
LOG.warning(
"The --jobs option is not compatible with supplying "
"input using - . Ignoring --jobs arguments."
"input using - . Ignoring --jobs arguments.",
)
return 0
@ -252,7 +252,7 @@ class Manager:
stdin_display_name=self.options.stdin_display_name,
filename_patterns=self.options.filename,
exclude=self.exclude,
)
),
)
self.jobs = min(len(self.filenames), self.jobs)
@ -332,11 +332,11 @@ class FileChecker:
assert self.processor is not None, self.filename
try:
params = self.processor.keyword_arguments_for(
plugin.parameters, arguments
plugin.parameters, arguments,
)
except AttributeError as ae:
raise exceptions.PluginRequestedUnknownParameters(
plugin_name=plugin.display_name, exception=ae
plugin_name=plugin.display_name, exception=ae,
)
try:
return plugin.obj(**arguments, **params)
@ -548,7 +548,7 @@ class FileChecker:
self.run_logical_checks()
def check_physical_eol(
self, token: tokenize.TokenInfo, prev_physical: str
self, token: tokenize.TokenInfo, prev_physical: str,
) -> None:
"""Run physical checks if and only if it is at the end of the line."""
assert self.processor is not None
@ -598,7 +598,7 @@ def _try_initialize_processpool(
def find_offset(
offset: int, mapping: processor._LogicalMapping
offset: int, mapping: processor._LogicalMapping,
) -> tuple[int, int]:
"""Find the offset tuple for a single offset."""
if isinstance(offset, tuple):

View file

@ -110,7 +110,7 @@ class BaseFormatter:
The formatted error string.
"""
raise NotImplementedError(
"Subclass of BaseFormatter did not implement" " format."
"Subclass of BaseFormatter did not implement" " format.",
)
def show_statistics(self, statistics: Statistics) -> None:

View file

@ -76,7 +76,7 @@ class Application:
assert self.formatter is not None
assert self.options is not None
self.guide = style_guide.StyleGuideManager(
self.options, self.formatter
self.options, self.formatter,
)
def make_file_checker_manager(self, argv: Sequence[str]) -> None:

View file

@ -14,7 +14,7 @@ def information(version: str, plugins: Plugins) -> dict[str, Any]:
(loaded.plugin.package, loaded.plugin.version)
for loaded in plugins.all_plugins()
if loaded.plugin.package not in {"flake8", "local"}
}
},
)
return {
"version": version,

View file

@ -32,7 +32,7 @@ def stage1_arg_parser() -> argparse.ArgumentParser:
)
parser.add_argument(
"--output-file", default=None, help="Redirect report to a file."
"--output-file", default=None, help="Redirect report to a file.",
)
# Config file options

View file

@ -78,7 +78,7 @@ def load_config(
if config is not None:
if not cfg.read(config, encoding="UTF-8"):
raise exceptions.ExecutionError(
f"The specified config file does not exist: {config}"
f"The specified config file does not exist: {config}",
)
cfg_dir = os.path.dirname(config)
else:
@ -89,7 +89,7 @@ def load_config(
for filename in extra:
if not cfg.read(filename, encoding="UTF-8"):
raise exceptions.ExecutionError(
f"The specified config file does not exist: {filename}"
f"The specified config file does not exist: {filename}",
)
return cfg, cfg_dir
@ -131,7 +131,7 @@ def parse_config(
raise ValueError(
f"Error code {error_code!r} "
f"supplied to {option_name!r} option "
f"does not match {VALID_CODE_PREFIX.pattern!r}"
f"does not match {VALID_CODE_PREFIX.pattern!r}",
)
assert option.config_name is not None

View file

@ -165,7 +165,7 @@ class Option:
if long_option_name is _ARG.NO:
raise ValueError(
"When specifying parse_from_config=True, "
"a long_option_name must also be specified."
"a long_option_name must also be specified.",
)
self.config_name = long_option_name[2:].replace("-", "_")

View file

@ -83,8 +83,8 @@ class Plugins(NamedTuple):
f"{loaded.plugin.package}: {loaded.plugin.version}"
for loaded in self.all_plugins()
if loaded.plugin.package not in {"flake8", "local"}
}
)
},
),
)
@ -167,7 +167,7 @@ def _flake8_plugins(
# ideally pycodestyle's plugin entrypoints would exactly represent
# the codes they produce...
yield Plugin(
pycodestyle_meta["name"], pycodestyle_meta["version"], ep
pycodestyle_meta["name"], pycodestyle_meta["version"], ep,
)
else:
yield Plugin(name, version, ep)
@ -240,7 +240,7 @@ def _check_required_plugins(
f"required plugins were not installed!\n"
f"- installed: {', '.join(sorted(plugin_names))}\n"
f"- expected: {', '.join(sorted(expected_names))}\n"
f"- missing: {', '.join(sorted(missing_plugins))}"
f"- missing: {', '.join(sorted(missing_plugins))}",
)
@ -338,7 +338,7 @@ def _classify_plugins(
if not VALID_CODE_PREFIX.match(loaded.entry_name):
raise ExecutionError(
f"plugin code for `{loaded.display_name}` does not match "
f"{VALID_CODE_PREFIX.pattern}"
f"{VALID_CODE_PREFIX.pattern}",
)
return Plugins(

View file

@ -72,7 +72,7 @@ class FlakesChecker(pyflakes.checker.Checker):
def __init__(self, tree: ast.AST, filename: str) -> None:
"""Initialize the PyFlakes plugin with an AST tree and filename."""
super().__init__(
tree, filename=filename, withDoctest=self.with_doctest
tree, filename=filename, withDoctest=self.with_doctest,
)
@classmethod

View file

@ -21,7 +21,7 @@ LOG = logging.getLogger(__name__)
NEWLINE = frozenset([tokenize.NL, tokenize.NEWLINE])
SKIP_TOKENS = frozenset(
[tokenize.NL, tokenize.NEWLINE, tokenize.INDENT, tokenize.DEDENT]
[tokenize.NL, tokenize.NEWLINE, tokenize.INDENT, tokenize.DEDENT],
)
_LogicalMapping = list[tuple[int, tuple[int, int]]]
@ -173,7 +173,7 @@ class FileProcessor:
"""Update the checker_state attribute for the plugin."""
if "checker_state" in plugin.parameters:
self.checker_state = self._checker_states.setdefault(
plugin.entry_name, {}
plugin.entry_name, {},
)
def next_logical_line(self) -> None:
@ -280,7 +280,7 @@ class FileProcessor:
def _noqa_line_range(self, min_line: int, max_line: int) -> dict[int, str]:
line_range = range(min_line, max_line + 1)
joined = "".join(self.lines[min_line - 1 : max_line])
joined = "".join(self.lines[min_line - 1: max_line])
return dict.fromkeys(line_range, joined)
@functools.cached_property
@ -367,7 +367,7 @@ class FileProcessor:
elif any(defaults.NOQA_FILE.search(line) for line in self.lines):
LOG.warning(
"Detected `flake8: noqa` on line with code. To ignore an "
"error on a line use `noqa` instead."
"error on a line use `noqa` instead.",
)
return False
else:
@ -388,7 +388,7 @@ class FileProcessor:
def is_eol_token(token: tokenize.TokenInfo) -> bool:
"""Check if the token is an end-of-line token."""
return token[0] in NEWLINE or token[4][token[3][1] :].lstrip() == "\\\n"
return token[0] in NEWLINE or token[4][token[3][1]:].lstrip() == "\\\n"
def is_multiline_string(token: tokenize.TokenInfo) -> bool:

View file

@ -35,7 +35,7 @@ class Statistics:
self._store[key].increment()
def statistics_for(
self, prefix: str, filename: str | None = None
self, prefix: str, filename: str | None = None,
) -> Generator[Statistic]:
"""Generate statistics for the prefix and filename.
@ -108,7 +108,7 @@ class Statistic:
"""
def __init__(
self, error_code: str, filename: str, message: str, count: int
self, error_code: str, filename: str, message: str, count: int,
) -> None:
"""Initialize our Statistic."""
self.error_code = error_code

View file

@ -218,7 +218,7 @@ class StyleGuideManager:
self.decider = decider or DecisionEngine(options)
self.style_guides: list[StyleGuide] = []
self.default_style_guide = StyleGuide(
options, formatter, self.stats, decider=decider
options, formatter, self.stats, decider=decider,
)
self.style_guides = [
self.default_style_guide,
@ -228,7 +228,7 @@ class StyleGuideManager:
self.style_guide_for = functools.cache(self._style_guide_for)
def populate_style_guides_with(
self, options: argparse.Namespace
self, options: argparse.Namespace,
) -> Generator[StyleGuide]:
"""Generate style guides from the per-file-ignores option.
@ -240,7 +240,7 @@ class StyleGuideManager:
per_file = utils.parse_files_to_codes_mapping(options.per_file_ignores)
for filename, violations in per_file:
yield self.default_style_guide.copy(
filename=filename, extend_ignore_with=violations
filename=filename, extend_ignore_with=violations,
)
def _style_guide_for(self, filename: str) -> StyleGuide:
@ -288,7 +288,7 @@ class StyleGuideManager:
"""
guide = self.style_guide_for(filename)
return guide.handle_error(
code, filename, line_number, column_number, text, physical_line
code, filename, line_number, column_number, text, physical_line,
)
@ -330,7 +330,7 @@ class StyleGuide:
options.extend_ignore = options.extend_ignore or []
options.extend_ignore.extend(extend_ignore_with or [])
return StyleGuide(
options, self.formatter, self.stats, filename=filename
options, self.formatter, self.stats, filename=filename,
)
@contextlib.contextmanager

View file

@ -23,7 +23,7 @@ NORMALIZE_PACKAGE_NAME_RE = re.compile(r"[-_.]+")
def parse_comma_separated_list(
value: str, regexp: Pattern[str] = COMMA_SEPARATED_LIST_RE
value: str, regexp: Pattern[str] = COMMA_SEPARATED_LIST_RE,
) -> list[str]:
"""Parse a comma-separated list.
@ -115,7 +115,7 @@ def parse_files_to_codes_mapping( # noqa: C901
f"Expected `per-file-ignores` to be a mapping from file exclude "
f"patterns to ignore codes.\n\n"
f"Configured `per-file-ignores` setting:\n\n"
f"{textwrap.indent(value.strip(), ' ')}"
f"{textwrap.indent(value.strip(), ' ')}",
)
for token in _tokenize_files_to_codes_mapping(value):
@ -150,7 +150,7 @@ def parse_files_to_codes_mapping( # noqa: C901
def normalize_paths(
paths: Sequence[str], parent: str = os.curdir
paths: Sequence[str], parent: str = os.curdir,
) -> list[str]:
"""Normalize a list of paths relative to a parent directory.

View file

@ -64,6 +64,6 @@ class Violation(NamedTuple):
return True
LOG.debug(
"%r is not ignored inline with ``# noqa: %s``", self, codes_str
"%r is not ignored inline with ``# noqa: %s``", self, codes_str,
)
return False

View file

@ -97,7 +97,7 @@ def mock_file_checker_with_plugin(plugin_target):
# Prevent it from reading lines from stdin or somewhere else
with mock.patch(
"flake8.processor.FileProcessor.read_lines", return_value=["Line 1"]
"flake8.processor.FileProcessor.read_lines", return_value=["Line 1"],
):
file_checker = checker.FileChecker(
filename="-",
@ -325,12 +325,12 @@ def test_handling_syntaxerrors_across_pythons():
if sys.version_info < (3, 10): # pragma: no cover (<3.10)
# Python 3.9 or older
err = SyntaxError(
"invalid syntax", ("<unknown>", 2, 5, "bad python:\n")
"invalid syntax", ("<unknown>", 2, 5, "bad python:\n"),
)
expected = (2, 4)
else: # pragma: no cover (3.10+)
err = SyntaxError(
"invalid syntax", ("<unknown>", 2, 1, "bad python:\n", 2, 11)
"invalid syntax", ("<unknown>", 2, 1, "bad python:\n", 2, 11),
)
expected = (2, 1)
file_checker = checker.FileChecker(

View file

@ -314,7 +314,7 @@ def test_cli_config_option_respected(tmp_path):
"""\
[flake8]
ignore = F401
"""
""",
)
py_file = tmp_path / "t.py"
@ -330,7 +330,7 @@ def test_cli_isolated_overrides_config_option(tmp_path):
"""\
[flake8]
ignore = F401
"""
""",
)
py_file = tmp_path / "t.py"
@ -364,7 +364,7 @@ def test_output_file(tmpdir, capsys):
def test_early_keyboard_interrupt_does_not_crash(capsys):
with mock.patch.object(
config, "load_config", side_effect=KeyboardInterrupt
config, "load_config", side_effect=KeyboardInterrupt,
):
assert cli.main(["does-not-exist"]) == 1
out, err = capsys.readouterr()

View file

@ -86,7 +86,7 @@ def test_local_plugin_can_add_option(local_config):
stage1_args, rest = stage1_parser.parse_known_args(argv)
cfg, cfg_dir = config.load_config(
config=stage1_args.config, extra=[], isolated=False
config=stage1_args.config, extra=[], isolated=False,
)
opts = finder.parse_plugin_options(

View file

@ -42,7 +42,7 @@ def test_plugins_all_plugins():
logical_line_plugin = _loaded(parameters={"logical_line": True})
physical_line_plugin = _loaded(parameters={"physical_line": True})
report_plugin = _loaded(
plugin=_plugin(ep=_ep(name="R", group="flake8.report"))
plugin=_plugin(ep=_ep(name="R", group="flake8.report")),
)
plugins = finder.Plugins(
@ -200,14 +200,16 @@ def test_flake8_plugins(flake8_dist, mock_distribution):
"flake8",
"9001",
importlib.metadata.EntryPoint(
"default", "flake8.formatting.default:Default", "flake8.report"
"default",
"flake8.formatting.default:Default",
"flake8.report",
),
),
finder.Plugin(
"flake8",
"9001",
importlib.metadata.EntryPoint(
"pylint", "flake8.formatting.default:Pylint", "flake8.report"
"pylint", "flake8.formatting.default:Pylint", "flake8.report",
),
),
}
@ -270,7 +272,7 @@ unrelated = unrelated:main
"flake8-foo",
"1.2.3",
importlib.metadata.EntryPoint(
"Q", "flake8_foo:Plugin", "flake8.extension"
"Q", "flake8_foo:Plugin", "flake8.extension",
),
),
finder.Plugin(
@ -304,21 +306,23 @@ unrelated = unrelated:main
"flake8",
"9001",
importlib.metadata.EntryPoint(
"default", "flake8.formatting.default:Default", "flake8.report"
"default",
"flake8.formatting.default:Default",
"flake8.report",
),
),
finder.Plugin(
"flake8",
"9001",
importlib.metadata.EntryPoint(
"pylint", "flake8.formatting.default:Pylint", "flake8.report"
"pylint", "flake8.formatting.default:Pylint", "flake8.report",
),
),
finder.Plugin(
"flake8-foo",
"1.2.3",
importlib.metadata.EntryPoint(
"foo", "flake8_foo:Formatter", "flake8.report"
"foo", "flake8_foo:Formatter", "flake8.report",
),
),
}
@ -485,28 +489,30 @@ def test_find_plugins(
"flake8",
"9001",
importlib.metadata.EntryPoint(
"default", "flake8.formatting.default:Default", "flake8.report"
"default",
"flake8.formatting.default:Default",
"flake8.report",
),
),
finder.Plugin(
"flake8",
"9001",
importlib.metadata.EntryPoint(
"pylint", "flake8.formatting.default:Pylint", "flake8.report"
"pylint", "flake8.formatting.default:Pylint", "flake8.report",
),
),
finder.Plugin(
"flake8-foo",
"1.2.3",
importlib.metadata.EntryPoint(
"Q", "flake8_foo:Plugin", "flake8.extension"
"Q", "flake8_foo:Plugin", "flake8.extension",
),
),
finder.Plugin(
"flake8-foo",
"1.2.3",
importlib.metadata.EntryPoint(
"foo", "flake8_foo:Formatter", "flake8.report"
"foo", "flake8_foo:Formatter", "flake8.report",
),
),
finder.Plugin(
@ -518,7 +524,7 @@ def test_find_plugins(
"local",
"local",
importlib.metadata.EntryPoint(
"Y", "mod2:attr", "flake8.extension"
"Y", "mod2:attr", "flake8.extension",
),
),
finder.Plugin(
@ -723,7 +729,7 @@ def test_import_plugins_extends_sys_path():
def test_classify_plugins():
report_plugin = _loaded(
plugin=_plugin(ep=_ep(name="R", group="flake8.report"))
plugin=_plugin(ep=_ep(name="R", group="flake8.report")),
)
tree_plugin = _loaded(parameters={"tree": True})
logical_line_plugin = _loaded(parameters={"logical_line": True})

View file

@ -25,7 +25,7 @@ def reporters():
"flake8",
"123",
importlib.metadata.EntryPoint(
name, f"{cls.__module__}:{cls.__name__}", "flake8.report"
name, f"{cls.__module__}:{cls.__name__}", "flake8.report",
),
),
cls,
@ -72,5 +72,5 @@ def test_make_formatter_format_string(reporters, caplog):
"flake8.plugins.reporter",
30,
"'hi %(code)s' is an unknown formatter. Falling back to default.",
)
),
]

View file

@ -36,7 +36,7 @@ def application():
],
)
def test_application_exit_code(
result_count, catastrophic, exit_zero, value, application
result_count, catastrophic, exit_zero, value, application,
):
"""Verify Application.exit_code returns the correct value."""
application.result_count = result_count

View file

@ -50,7 +50,7 @@ def test_format_needs_to_be_implemented():
formatter = base.BaseFormatter(options())
with pytest.raises(NotImplementedError):
formatter.format(
Violation("A000", "file.py", 1, 1, "error text", None)
Violation("A000", "file.py", 1, 1, "error text", None),
)
@ -59,7 +59,7 @@ def test_show_source_returns_nothing_when_not_showing_source():
formatter = base.BaseFormatter(options(show_source=False))
assert (
formatter.show_source(
Violation("A000", "file.py", 1, 1, "error text", "line")
Violation("A000", "file.py", 1, 1, "error text", "line"),
)
== ""
)
@ -70,7 +70,7 @@ def test_show_source_returns_nothing_when_there_is_source():
formatter = base.BaseFormatter(options(show_source=True))
assert (
formatter.show_source(
Violation("A000", "file.py", 1, 1, "error text", None)
Violation("A000", "file.py", 1, 1, "error text", None),
)
== ""
)

View file

@ -14,7 +14,7 @@ def test_debug_information():
pkg,
version,
importlib.metadata.EntryPoint(
ep_name, "dne:dne", "flake8.extension"
ep_name, "dne:dne", "flake8.extension",
),
),
None,

View file

@ -35,7 +35,7 @@ def create_options(**kwargs):
def test_was_ignored_ignores_errors(ignore_list, extend_ignore, error_code):
"""Verify we detect users explicitly ignoring an error."""
decider = style_guide.DecisionEngine(
create_options(ignore=ignore_list, extend_ignore=extend_ignore)
create_options(ignore=ignore_list, extend_ignore=extend_ignore),
)
assert decider.was_ignored(error_code) is style_guide.Ignored.Explicitly
@ -53,11 +53,11 @@ def test_was_ignored_ignores_errors(ignore_list, extend_ignore, error_code):
],
)
def test_was_ignored_implicitly_selects_errors(
ignore_list, extend_ignore, error_code
ignore_list, extend_ignore, error_code,
):
"""Verify we detect users does not explicitly ignore an error."""
decider = style_guide.DecisionEngine(
create_options(ignore=ignore_list, extend_ignore=extend_ignore)
create_options(ignore=ignore_list, extend_ignore=extend_ignore),
)
assert decider.was_ignored(error_code) is style_guide.Selected.Implicitly
@ -179,7 +179,7 @@ def test_was_selected_excludes_errors(select_list, error_code):
],
)
def test_decision_for(
select_list, ignore_list, extend_ignore, error_code, expected
select_list, ignore_list, extend_ignore, error_code, expected,
):
"""Verify we decide when to report an error."""
decider = style_guide.DecisionEngine(
@ -187,7 +187,7 @@ def test_decision_for(
select=select_list,
ignore=ignore_list,
extend_ignore=extend_ignore,
)
),
)
assert decider.decision_for(error_code) is expected

View file

@ -47,7 +47,7 @@ def test_filenames_from_a_directory_with_a_predicate():
_filenames_from(
arg=_normpath("a/b/"),
predicate=lambda path: path.endswith(_normpath("b/c.py")),
)
),
)
# should not include c.py
expected = _normpaths(("a/b/d.py", "a/b/e/f.py"))
@ -61,7 +61,7 @@ def test_filenames_from_a_directory_with_a_predicate_from_the_current_dir():
_filenames_from(
arg=_normpath("./a/b"),
predicate=lambda path: path == "c.py",
)
),
)
# none should have matched the predicate so all returned
expected = _normpaths(("./a/b/c.py", "./a/b/d.py", "./a/b/e/f.py"))
@ -132,7 +132,7 @@ def _expand_paths(
stdin_display_name=stdin_display_name,
filename_patterns=filename_patterns,
exclude=exclude,
)
),
)

View file

@ -28,7 +28,7 @@ def _lines_from_file(tmpdir, contents, options):
def test_read_lines_universal_newlines(tmpdir, default_options):
r"""Verify that line endings are translated to \n."""
lines = _lines_from_file(
tmpdir, b"# coding: utf-8\r\nx = 1\r\n", default_options
tmpdir, b"# coding: utf-8\r\nx = 1\r\n", default_options,
)
assert lines == ["# coding: utf-8\n", "x = 1\n"]
@ -36,7 +36,7 @@ def test_read_lines_universal_newlines(tmpdir, default_options):
def test_read_lines_incorrect_utf_16(tmpdir, default_options):
"""Verify that an incorrectly encoded file is read as latin-1."""
lines = _lines_from_file(
tmpdir, b"# coding: utf16\nx = 1\n", default_options
tmpdir, b"# coding: utf16\nx = 1\n", default_options,
)
assert lines == ["# coding: utf16\n", "x = 1\n"]
@ -44,7 +44,7 @@ def test_read_lines_incorrect_utf_16(tmpdir, default_options):
def test_read_lines_unknown_encoding(tmpdir, default_options):
"""Verify that an unknown encoding is still read as latin-1."""
lines = _lines_from_file(
tmpdir, b"# coding: fake-encoding\nx = 1\n", default_options
tmpdir, b"# coding: fake-encoding\nx = 1\n", default_options,
)
assert lines == ["# coding: fake-encoding\n", "x = 1\n"]
@ -289,7 +289,7 @@ def test_processor_split_line(default_options):
def test_build_ast(default_options):
"""Verify the logic for how we build an AST for plugins."""
file_processor = processor.FileProcessor(
"-", default_options, lines=["a = 1\n"]
"-", default_options, lines=["a = 1\n"],
)
module = file_processor.build_ast()
@ -299,7 +299,7 @@ def test_build_ast(default_options):
def test_next_logical_line_updates_the_previous_logical_line(default_options):
"""Verify that we update our tracking of the previous logical line."""
file_processor = processor.FileProcessor(
"-", default_options, lines=["a = 1\n"]
"-", default_options, lines=["a = 1\n"],
)
file_processor.indent_level = 1
@ -315,7 +315,7 @@ def test_next_logical_line_updates_the_previous_logical_line(default_options):
def test_visited_new_blank_line(default_options):
"""Verify we update the number of blank lines seen."""
file_processor = processor.FileProcessor(
"-", default_options, lines=["a = 1\n"]
"-", default_options, lines=["a = 1\n"],
)
assert file_processor.blank_lines == 0

View file

@ -6,7 +6,7 @@ from flake8.main import options
def test_stage1_arg_parser():
stage1_parser = options.stage1_arg_parser()
opts, args = stage1_parser.parse_known_args(
["--foo", "--verbose", "src", "setup.py", "--statistics", "--version"]
["--foo", "--verbose", "src", "setup.py", "--statistics", "--version"],
)
assert opts.verbose

View file

@ -122,7 +122,7 @@ def test_parse_args_handles_comma_separated_defaults(optmanager):
assert optmanager.config_options_dict == {}
optmanager.add_option(
"--exclude", default="E123,W234", comma_separated_list=True
"--exclude", default="E123,W234", comma_separated_list=True,
)
options = optmanager.parse_args([])
@ -135,7 +135,7 @@ def test_parse_args_handles_comma_separated_lists(optmanager):
assert optmanager.config_options_dict == {}
optmanager.add_option(
"--exclude", default="E123,W234", comma_separated_list=True
"--exclude", default="E123,W234", comma_separated_list=True,
)
options = optmanager.parse_args(["--exclude", "E201,W111,F280"])
@ -148,11 +148,11 @@ def test_parse_args_normalize_paths(optmanager):
assert optmanager.config_options_dict == {}
optmanager.add_option(
"--extra-config", normalize_paths=True, comma_separated_list=True
"--extra-config", normalize_paths=True, comma_separated_list=True,
)
options = optmanager.parse_args(
["--extra-config", "../config.ini,tox.ini,flake8/some-other.cfg"]
["--extra-config", "../config.ini,tox.ini,flake8/some-other.cfg"],
)
assert options.extra_config == [
os.path.abspath("../config.ini"),

View file

@ -169,7 +169,7 @@ def test_load_extra_config_utf8(tmpdir):
@pytest.fixture
def opt_manager():
ret = OptionManager(
version="123", plugin_versions="", parents=[], formatter_names=[]
version="123", plugin_versions="", parents=[], formatter_names=[],
)
register_default_options(ret)
return ret
@ -213,7 +213,7 @@ def test_parse_config_ignores_unknowns(tmp_path, opt_manager, caplog):
"flake8.options.config",
10,
'Option "wat" is not registered. Ignoring.',
)
),
]

View file

@ -36,7 +36,7 @@ def test_handle_error_does_not_raise_type_errors():
)
assert 1 == guide.handle_error(
"T111", "file.py", 1, 1, "error found", "a = 1"
"T111", "file.py", 1, 1, "error found", "a = 1",
)
@ -110,7 +110,7 @@ def test_style_guide_manager_pre_file_ignores_parsing():
],
)
def test_style_guide_manager_pre_file_ignores(
ignores, violation, filename, handle_error_return
ignores, violation, filename, handle_error_return,
):
"""Verify how the StyleGuideManager creates a default style guide."""
formatter = mock.create_autospec(base.BaseFormatter, instance=True)