audit + string joining

This commit is contained in:
Anthony Sottile 2021-03-29 20:07:13 -07:00
parent e9a2a10183
commit edadccd8dc
6 changed files with 11 additions and 12 deletions

View file

@ -275,9 +275,9 @@ class Application:
add_statistic = statistics.append add_statistic = statistics.append
for statistic in defaults.STATISTIC_NAMES + ("files",): for statistic in defaults.STATISTIC_NAMES + ("files",):
value = self.file_checker_manager.statistics[statistic] value = self.file_checker_manager.statistics[statistic]
total_description = "total " + statistic + " processed" total_description = f"total {statistic} processed"
add_statistic((total_description, value)) add_statistic((total_description, value))
per_second_description = statistic + " processed per second" per_second_description = f"{statistic} processed per second"
add_statistic((per_second_description, int(value / time_elapsed))) add_statistic((per_second_description, int(value / time_elapsed)))
self.formatter.show_benchmarks(statistics) self.formatter.show_benchmarks(statistics)

View file

@ -51,7 +51,7 @@ class ConfigFileFinder:
self.user_config_file = self._user_config_file(program_name) self.user_config_file = self._user_config_file(program_name)
# List of filenames to find in the local/project directory # List of filenames to find in the local/project directory
self.project_filenames = ("setup.cfg", "tox.ini", "." + program_name) self.project_filenames = ("setup.cfg", "tox.ini", f".{program_name}")
self.local_directory = os.path.abspath(os.curdir) self.local_directory = os.path.abspath(os.curdir)
@ -59,7 +59,7 @@ class ConfigFileFinder:
def _user_config_file(program_name: str) -> str: def _user_config_file(program_name: str) -> str:
if utils.is_windows(): if utils.is_windows():
home_dir = os.path.expanduser("~") home_dir = os.path.expanduser("~")
config_file_basename = "." + program_name config_file_basename = f".{program_name}"
else: else:
home_dir = os.environ.get( home_dir = os.environ.get(
"XDG_CONFIG_HOME", os.path.expanduser("~/.config") "XDG_CONFIG_HOME", os.path.expanduser("~/.config")

View file

@ -145,7 +145,7 @@ class FlakesChecker(pyflakes.checker.Checker):
if included_file == "": if included_file == "":
continue continue
if not included_file.startswith((os.sep, "./", "~/")): if not included_file.startswith((os.sep, "./", "~/")):
included_files.append("./" + included_file) included_files.append(f"./{included_file}")
else: else:
included_files.append(included_file) included_files.append(included_file)
cls.include_in_doctest = utils.normalize_paths(included_files) cls.include_in_doctest = utils.normalize_paths(included_files)
@ -155,7 +155,7 @@ class FlakesChecker(pyflakes.checker.Checker):
if excluded_file == "": if excluded_file == "":
continue continue
if not excluded_file.startswith((os.sep, "./", "~/")): if not excluded_file.startswith((os.sep, "./", "~/")):
excluded_files.append("./" + excluded_file) excluded_files.append(f"./{excluded_file}")
else: else:
excluded_files.append(excluded_file) excluded_files.append(excluded_file)
cls.exclude_from_doctest = utils.normalize_paths(excluded_files) cls.exclude_from_doctest = utils.normalize_paths(excluded_files)

View file

@ -218,7 +218,7 @@ class FileProcessor:
if previous_text == "," or ( if previous_text == "," or (
previous_text not in "{[(" and text not in "}])" previous_text not in "{[(" and text not in "}])"
): ):
text = " " + text text = f" {text}"
elif previous_column != start_column: elif previous_column != start_column:
text = line[previous_column:start_column] + text text = line[previous_column:start_column] + text
logical.append(text) logical.append(text)

View file

@ -9,6 +9,7 @@ import os
import platform import platform
import re import re
import sys import sys
import textwrap
import tokenize import tokenize
from typing import Callable from typing import Callable
from typing import Dict from typing import Dict
@ -122,13 +123,11 @@ def parse_files_to_codes_mapping( # noqa: C901
State.codes = [] State.codes = []
def _unexpected_token() -> exceptions.ExecutionError: def _unexpected_token() -> exceptions.ExecutionError:
def _indent(s: str) -> str:
return " " + s.strip().replace("\n", "\n ")
return exceptions.ExecutionError( return exceptions.ExecutionError(
f"Expected `per-file-ignores` to be a mapping from file exclude " f"Expected `per-file-ignores` to be a mapping from file exclude "
f"patterns to ignore codes.\n\n" f"patterns to ignore codes.\n\n"
f"Configured `per-file-ignores` setting:\n\n{_indent(value)}" f"Configured `per-file-ignores` setting:\n\n"
f"{textwrap.indent(value.strip(), ' ')}"
) )
for token in _tokenize_files_to_codes_mapping(value): for token in _tokenize_files_to_codes_mapping(value):

View file

@ -134,7 +134,7 @@ def test_normalize_path(value, expected):
(["flake8", "pep8", "pyflakes", "mccabe"], (["flake8", "pep8", "pyflakes", "mccabe"],
["flake8", "pep8", "pyflakes", "mccabe"]), ["flake8", "pep8", "pyflakes", "mccabe"]),
(["../flake8", "../pep8", "../pyflakes", "../mccabe"], (["../flake8", "../pep8", "../pyflakes", "../mccabe"],
[os.path.abspath("../" + p) for p in RELATIVE_PATHS]), [os.path.abspath(f"../{p}") for p in RELATIVE_PATHS]),
]) ])
def test_normalize_paths(value, expected): def test_normalize_paths(value, expected):
"""Verify we normalizes a sequence of paths provided to the tool.""" """Verify we normalizes a sequence of paths provided to the tool."""