Merge pull request #1536 from asottile/physical-line-fix

use the actual line contents when processing physical lines
This commit is contained in:
Anthony Sottile 2022-01-22 15:45:15 -05:00 committed by GitHub
commit 5ecea41b6d
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
4 changed files with 57 additions and 21 deletions

View file

@ -581,7 +581,7 @@ class FileChecker:
line_no = token[2][0]
with self.processor.inside_multiline(line_number=line_no):
for line in self.processor.split_line(token):
self.run_physical_checks(line + "\n")
self.run_physical_checks(line)
def _pool_init() -> None:

View file

@ -233,8 +233,10 @@ class FileProcessor:
This also auto-increments the line number for the caller.
"""
for line in token[1].split("\n")[:-1]:
yield line
# intentionally don't include the last line, that line will be
# terminated later by a future end-of-line
for line_no in range(token.start[0], token.end[0]):
yield self.lines[line_no - 1]
self.line_number += 1
def keyword_arguments_for(

View file

@ -160,3 +160,37 @@ extension =
out, err = capsys.readouterr()
assert out == f"{t_py}:1:1: ABC123 error\n"
assert err == ""
def yields_physical_line(physical_line):
yield 0, f"T001 {physical_line!r}"
def test_physical_line_plugin_multiline_string(tmpdir, capsys):
cfg_s = f"""\
[flake8:local-plugins]
extension =
T = {yields_physical_line.__module__}:{yields_physical_line.__name__}
"""
cfg = tmpdir.join("tox.ini")
cfg.write(cfg_s)
src = '''\
x = "foo" + """
bar
"""
'''
t_py = tmpdir.join("t.py")
t_py.write_binary(src.encode())
with tmpdir.as_cwd():
assert main(("t.py", "--config", str(cfg))) == 1
expected = '''\
t.py:1:1: T001 'x = "foo" + """\\n'
t.py:2:1: T001 'bar\\n'
t.py:3:1: T001 '"""\\n'
'''
out, err = capsys.readouterr()
assert out == expected

View file

@ -256,30 +256,30 @@ def test_keyword_arguments_for_does_not_handle_attribute_errors(
file_processor.keyword_arguments_for({"fake": True})
@pytest.mark.parametrize(
"unsplit_line, expected_lines",
[
("line", []),
("line 1\n", ["line 1"]),
("line 1\nline 2\n", ["line 1", "line 2"]),
("line 1\n\nline 2\n", ["line 1", "", "line 2"]),
],
)
def test_split_line(unsplit_line, expected_lines, default_options):
"""Verify the token line splitting."""
def test_processor_split_line(default_options):
file_processor = processor.FileProcessor(
"-",
default_options,
lines=[
"Line 1",
'x = """\n',
"contents\n",
'"""\n',
],
)
token = tokenize.TokenInfo(1, unsplit_line, (0, 0), (0, 0), "")
actual_lines = list(file_processor.split_line(token))
assert expected_lines == actual_lines
assert len(actual_lines) == file_processor.line_number
token = tokenize.TokenInfo(
3,
'"""\ncontents\n"""',
(1, 4),
(3, 3),
'x = """\ncontents\n"""\n',
)
expected = [('x = """\n', 0), ("contents\n", 1)]
actual = [
(line, file_processor.line_number)
for line in file_processor.split_line(token)
]
assert expected == actual
assert file_processor.line_number == 2
def test_build_ast(default_options):