mirror of
https://github.com/PyCQA/flake8.git
synced 2026-04-10 14:54:17 +00:00
Merge pull request #1849 from PyCQA/handle-multiline-fstrings-in-312
handle multiline fstrings in 3.12
This commit is contained in:
commit
d4d1552c5b
5 changed files with 83 additions and 53 deletions
11
src/flake8/_compat.py
Normal file
11
src/flake8/_compat.py
Normal file
|
|
@ -0,0 +1,11 @@
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import sys
|
||||||
|
import tokenize
|
||||||
|
|
||||||
|
if sys.version_info >= (3, 12):
|
||||||
|
FSTRING_START = tokenize.FSTRING_START
|
||||||
|
FSTRING_MIDDLE = tokenize.FSTRING_MIDDLE
|
||||||
|
FSTRING_END = tokenize.FSTRING_END
|
||||||
|
else:
|
||||||
|
FSTRING_START = FSTRING_MIDDLE = FSTRING_END = -1
|
||||||
|
|
@ -20,6 +20,7 @@ from flake8 import defaults
|
||||||
from flake8 import exceptions
|
from flake8 import exceptions
|
||||||
from flake8 import processor
|
from flake8 import processor
|
||||||
from flake8 import utils
|
from flake8 import utils
|
||||||
|
from flake8._compat import FSTRING_START
|
||||||
from flake8.discover_files import expand_paths
|
from flake8.discover_files import expand_paths
|
||||||
from flake8.options.parse_args import parse_args
|
from flake8.options.parse_args import parse_args
|
||||||
from flake8.plugins.finder import Checkers
|
from flake8.plugins.finder import Checkers
|
||||||
|
|
@ -551,15 +552,17 @@ class FileChecker:
|
||||||
) -> None:
|
) -> None:
|
||||||
"""Run physical checks if and only if it is at the end of the line."""
|
"""Run physical checks if and only if it is at the end of the line."""
|
||||||
assert self.processor is not None
|
assert self.processor is not None
|
||||||
|
if token.type == FSTRING_START: # pragma: >=3.12 cover
|
||||||
|
self.processor.fstring_start(token.start[0])
|
||||||
# a newline token ends a single physical line.
|
# a newline token ends a single physical line.
|
||||||
if processor.is_eol_token(token):
|
elif processor.is_eol_token(token):
|
||||||
# if the file does not end with a newline, the NEWLINE
|
# if the file does not end with a newline, the NEWLINE
|
||||||
# token is inserted by the parser, but it does not contain
|
# token is inserted by the parser, but it does not contain
|
||||||
# the previous physical line in `token[4]`
|
# the previous physical line in `token[4]`
|
||||||
if token[4] == "":
|
if token.line == "":
|
||||||
self.run_physical_checks(prev_physical)
|
self.run_physical_checks(prev_physical)
|
||||||
else:
|
else:
|
||||||
self.run_physical_checks(token[4])
|
self.run_physical_checks(token.line)
|
||||||
elif processor.is_multiline_string(token):
|
elif processor.is_multiline_string(token):
|
||||||
# Less obviously, a string that contains newlines is a
|
# Less obviously, a string that contains newlines is a
|
||||||
# multiline string, either triple-quoted or with internal
|
# multiline string, either triple-quoted or with internal
|
||||||
|
|
@ -572,10 +575,8 @@ class FileChecker:
|
||||||
# - have to wind self.line_number back because initially it
|
# - have to wind self.line_number back because initially it
|
||||||
# points to the last line of the string, and we want
|
# points to the last line of the string, and we want
|
||||||
# check_physical() to give accurate feedback
|
# check_physical() to give accurate feedback
|
||||||
line_no = token[2][0]
|
for line in self.processor.multiline_string(token):
|
||||||
with self.processor.inside_multiline(line_number=line_no):
|
self.run_physical_checks(line)
|
||||||
for line in self.processor.split_line(token):
|
|
||||||
self.run_physical_checks(line)
|
|
||||||
|
|
||||||
|
|
||||||
def _try_initialize_processpool(
|
def _try_initialize_processpool(
|
||||||
|
|
|
||||||
|
|
@ -3,9 +3,7 @@ from __future__ import annotations
|
||||||
|
|
||||||
import argparse
|
import argparse
|
||||||
import ast
|
import ast
|
||||||
import contextlib
|
|
||||||
import logging
|
import logging
|
||||||
import sys
|
|
||||||
import tokenize
|
import tokenize
|
||||||
from typing import Any
|
from typing import Any
|
||||||
from typing import Generator
|
from typing import Generator
|
||||||
|
|
@ -14,6 +12,8 @@ from typing import Tuple
|
||||||
|
|
||||||
from flake8 import defaults
|
from flake8 import defaults
|
||||||
from flake8 import utils
|
from flake8 import utils
|
||||||
|
from flake8._compat import FSTRING_END
|
||||||
|
from flake8._compat import FSTRING_MIDDLE
|
||||||
from flake8.plugins.finder import LoadedPlugin
|
from flake8.plugins.finder import LoadedPlugin
|
||||||
|
|
||||||
LOG = logging.getLogger(__name__)
|
LOG = logging.getLogger(__name__)
|
||||||
|
|
@ -117,6 +117,7 @@ class FileProcessor:
|
||||||
self._file_tokens: list[tokenize.TokenInfo] | None = None
|
self._file_tokens: list[tokenize.TokenInfo] | None = None
|
||||||
# map from line number to the line we'll search for `noqa` in
|
# map from line number to the line we'll search for `noqa` in
|
||||||
self._noqa_line_mapping: dict[int, str] | None = None
|
self._noqa_line_mapping: dict[int, str] | None = None
|
||||||
|
self._fstring_start = -1
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def file_tokens(self) -> list[tokenize.TokenInfo]:
|
def file_tokens(self) -> list[tokenize.TokenInfo]:
|
||||||
|
|
@ -129,14 +130,26 @@ class FileProcessor:
|
||||||
|
|
||||||
return self._file_tokens
|
return self._file_tokens
|
||||||
|
|
||||||
@contextlib.contextmanager
|
def fstring_start(self, lineno: int) -> None:
|
||||||
def inside_multiline(
|
"""Signal the beginning of an fstring."""
|
||||||
self, line_number: int
|
self._fstring_start = lineno
|
||||||
) -> Generator[None, None, None]:
|
|
||||||
"""Context-manager to toggle the multiline attribute."""
|
def multiline_string(
|
||||||
self.line_number = line_number
|
self, token: tokenize.TokenInfo
|
||||||
|
) -> Generator[str, None, None]:
|
||||||
|
"""Iterate through the lines of a multiline string."""
|
||||||
|
if token.type == FSTRING_END:
|
||||||
|
start = self._fstring_start
|
||||||
|
else:
|
||||||
|
start = token.start[0]
|
||||||
|
|
||||||
self.multiline = True
|
self.multiline = True
|
||||||
yield
|
self.line_number = start
|
||||||
|
# intentionally don't include the last line, that line will be
|
||||||
|
# terminated later by a future end-of-line
|
||||||
|
for _ in range(start, token.end[0]):
|
||||||
|
yield self.lines[self.line_number - 1]
|
||||||
|
self.line_number += 1
|
||||||
self.multiline = False
|
self.multiline = False
|
||||||
|
|
||||||
def reset_blank_before(self) -> None:
|
def reset_blank_before(self) -> None:
|
||||||
|
|
@ -196,10 +209,7 @@ class FileProcessor:
|
||||||
continue
|
continue
|
||||||
if token_type == tokenize.STRING:
|
if token_type == tokenize.STRING:
|
||||||
text = mutate_string(text)
|
text = mutate_string(text)
|
||||||
elif (
|
elif token_type == FSTRING_MIDDLE:
|
||||||
sys.version_info >= (3, 12)
|
|
||||||
and token_type == tokenize.FSTRING_MIDDLE
|
|
||||||
):
|
|
||||||
text = "x" * len(text)
|
text = "x" * len(text)
|
||||||
if previous_row:
|
if previous_row:
|
||||||
(start_row, start_column) = start
|
(start_row, start_column) = start
|
||||||
|
|
@ -231,19 +241,6 @@ class FileProcessor:
|
||||||
self.statistics["logical lines"] += 1
|
self.statistics["logical lines"] += 1
|
||||||
return joined_comments, self.logical_line, mapping_list
|
return joined_comments, self.logical_line, mapping_list
|
||||||
|
|
||||||
def split_line(
|
|
||||||
self, token: tokenize.TokenInfo
|
|
||||||
) -> Generator[str, None, None]:
|
|
||||||
"""Split a physical line's line based on new-lines.
|
|
||||||
|
|
||||||
This also auto-increments the line number for the caller.
|
|
||||||
"""
|
|
||||||
# intentionally don't include the last line, that line will be
|
|
||||||
# terminated later by a future end-of-line
|
|
||||||
for line_no in range(token.start[0], token.end[0]):
|
|
||||||
yield self.lines[line_no - 1]
|
|
||||||
self.line_number += 1
|
|
||||||
|
|
||||||
def keyword_arguments_for(
|
def keyword_arguments_for(
|
||||||
self,
|
self,
|
||||||
parameters: dict[str, bool],
|
parameters: dict[str, bool],
|
||||||
|
|
@ -398,7 +395,9 @@ def is_eol_token(token: tokenize.TokenInfo) -> bool:
|
||||||
|
|
||||||
def is_multiline_string(token: tokenize.TokenInfo) -> bool:
|
def is_multiline_string(token: tokenize.TokenInfo) -> bool:
|
||||||
"""Check if this is a multiline string."""
|
"""Check if this is a multiline string."""
|
||||||
return token[0] == tokenize.STRING and "\n" in token[1]
|
return token.type == FSTRING_END or (
|
||||||
|
token.type == tokenize.STRING and "\n" in token.string
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
def token_is_newline(token: tokenize.TokenInfo) -> bool:
|
def token_is_newline(token: tokenize.TokenInfo) -> bool:
|
||||||
|
|
|
||||||
|
|
@ -199,6 +199,38 @@ t.py:3:1: T001 '"""\\n'
|
||||||
assert out == expected
|
assert out == expected
|
||||||
|
|
||||||
|
|
||||||
|
def test_physical_line_plugin_multiline_fstring(tmpdir, capsys):
|
||||||
|
cfg_s = f"""\
|
||||||
|
[flake8:local-plugins]
|
||||||
|
extension =
|
||||||
|
T = {yields_physical_line.__module__}:{yields_physical_line.__name__}
|
||||||
|
"""
|
||||||
|
|
||||||
|
cfg = tmpdir.join("tox.ini")
|
||||||
|
cfg.write(cfg_s)
|
||||||
|
|
||||||
|
src = '''\
|
||||||
|
y = 1
|
||||||
|
x = f"""
|
||||||
|
hello {y}
|
||||||
|
"""
|
||||||
|
'''
|
||||||
|
t_py = tmpdir.join("t.py")
|
||||||
|
t_py.write_binary(src.encode())
|
||||||
|
|
||||||
|
with tmpdir.as_cwd():
|
||||||
|
assert main(("t.py", "--config", str(cfg))) == 1
|
||||||
|
|
||||||
|
expected = '''\
|
||||||
|
t.py:1:1: T001 'y = 1\\n'
|
||||||
|
t.py:2:1: T001 'x = f"""\\n'
|
||||||
|
t.py:3:1: T001 'hello {y}\\n'
|
||||||
|
t.py:4:1: T001 '"""\\n'
|
||||||
|
'''
|
||||||
|
out, err = capsys.readouterr()
|
||||||
|
assert out == expected
|
||||||
|
|
||||||
|
|
||||||
def yields_logical_line(logical_line):
|
def yields_logical_line(logical_line):
|
||||||
yield 0, f"T001 {logical_line!r}"
|
yield 0, f"T001 {logical_line!r}"
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -275,13 +275,15 @@ def test_processor_split_line(default_options):
|
||||||
(3, 3),
|
(3, 3),
|
||||||
'x = """\ncontents\n"""\n',
|
'x = """\ncontents\n"""\n',
|
||||||
)
|
)
|
||||||
expected = [('x = """\n', 0), ("contents\n", 1)]
|
expected = [('x = """\n', 1, True), ("contents\n", 2, True)]
|
||||||
|
assert file_processor.multiline is False
|
||||||
actual = [
|
actual = [
|
||||||
(line, file_processor.line_number)
|
(line, file_processor.line_number, file_processor.multiline)
|
||||||
for line in file_processor.split_line(token)
|
for line in file_processor.multiline_string(token)
|
||||||
]
|
]
|
||||||
|
assert file_processor.multiline is False
|
||||||
assert expected == actual
|
assert expected == actual
|
||||||
assert file_processor.line_number == 2
|
assert file_processor.line_number == 3
|
||||||
|
|
||||||
|
|
||||||
def test_build_ast(default_options):
|
def test_build_ast(default_options):
|
||||||
|
|
@ -321,21 +323,6 @@ def test_visited_new_blank_line(default_options):
|
||||||
assert file_processor.blank_lines == 1
|
assert file_processor.blank_lines == 1
|
||||||
|
|
||||||
|
|
||||||
def test_inside_multiline(default_options):
|
|
||||||
"""Verify we update the line number and reset multiline."""
|
|
||||||
file_processor = processor.FileProcessor(
|
|
||||||
"-", default_options, lines=["a = 1\n"]
|
|
||||||
)
|
|
||||||
|
|
||||||
assert file_processor.multiline is False
|
|
||||||
assert file_processor.line_number == 0
|
|
||||||
with file_processor.inside_multiline(10):
|
|
||||||
assert file_processor.multiline is True
|
|
||||||
assert file_processor.line_number == 10
|
|
||||||
|
|
||||||
assert file_processor.multiline is False
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.parametrize(
|
@pytest.mark.parametrize(
|
||||||
"string, expected",
|
"string, expected",
|
||||||
[
|
[
|
||||||
|
|
|
||||||
Loading…
Add table
Add a link
Reference in a new issue