Merge pull request #1983 from PyCQA/py314

add support for t-strings
This commit is contained in:
Anthony Sottile 2025-05-23 18:45:49 -04:00 committed by GitHub
commit 23e4005c55
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
4 changed files with 57 additions and 3 deletions

View file

@ -9,3 +9,10 @@ if sys.version_info >= (3, 12): # pragma: >=3.12 cover
FSTRING_END = tokenize.FSTRING_END
else: # pragma: <3.12 cover
FSTRING_START = FSTRING_MIDDLE = FSTRING_END = -1
if sys.version_info >= (3, 14): # pragma: >=3.14 cover
TSTRING_START = tokenize.TSTRING_START
TSTRING_MIDDLE = tokenize.TSTRING_MIDDLE
TSTRING_END = tokenize.TSTRING_END
else: # pragma: <3.14 cover
TSTRING_START = TSTRING_MIDDLE = TSTRING_END = -1

View file

@ -19,6 +19,7 @@ from flake8 import exceptions
from flake8 import processor
from flake8 import utils
from flake8._compat import FSTRING_START
from flake8._compat import TSTRING_START
from flake8.discover_files import expand_paths
from flake8.options.parse_args import parse_args
from flake8.plugins.finder import Checkers
@ -554,6 +555,8 @@ class FileChecker:
assert self.processor is not None
if token.type == FSTRING_START: # pragma: >=3.12 cover
self.processor.fstring_start(token.start[0])
elif token.type == TSTRING_START: # pragma: >=3.14 cover
self.processor.tstring_start(token.start[0])
# a newline token ends a single physical line.
elif processor.is_eol_token(token):
# if the file does not end with a newline, the NEWLINE

View file

@ -13,6 +13,8 @@ from flake8 import defaults
from flake8 import utils
from flake8._compat import FSTRING_END
from flake8._compat import FSTRING_MIDDLE
from flake8._compat import TSTRING_END
from flake8._compat import TSTRING_MIDDLE
from flake8.plugins.finder import LoadedPlugin
LOG = logging.getLogger(__name__)
@ -113,7 +115,7 @@ class FileProcessor:
self.verbose = options.verbose
#: Statistics dictionary
self.statistics = {"logical lines": 0}
self._fstring_start = -1
self._fstring_start = self._tstring_start = -1
@functools.cached_property
def file_tokens(self) -> list[tokenize.TokenInfo]:
@ -125,10 +127,16 @@ class FileProcessor:
"""Signal the beginning of an fstring."""
self._fstring_start = lineno
def tstring_start(self, lineno: int) -> None: # pragma: >=3.14 cover
"""Signal the beginning of an tstring."""
self._tstring_start = lineno
def multiline_string(self, token: tokenize.TokenInfo) -> Generator[str]:
"""Iterate through the lines of a multiline string."""
if token.type == FSTRING_END: # pragma: >=3.12 cover
start = self._fstring_start
elif token.type == TSTRING_END: # pragma: >=3.14 cover
start = self._tstring_start
else:
start = token.start[0]
@ -198,7 +206,10 @@ class FileProcessor:
continue
if token_type == tokenize.STRING:
text = mutate_string(text)
elif token_type == FSTRING_MIDDLE: # pragma: >=3.12 cover
elif token_type in {
FSTRING_MIDDLE,
TSTRING_MIDDLE,
}: # pragma: >=3.12 cover # noqa: E501
# A curly brace in an FSTRING_MIDDLE token must be an escaped
# curly brace. Both 'text' and 'end' will account for the
# escaped version of the token (i.e. a single brace) rather
@ -382,7 +393,7 @@ def is_eol_token(token: tokenize.TokenInfo) -> bool:
def is_multiline_string(token: tokenize.TokenInfo) -> bool:
"""Check if this is a multiline string."""
return token.type == FSTRING_END or (
return token.type in {FSTRING_END, TSTRING_END} or (
token.type == tokenize.STRING and "\n" in token.string
)

View file

@ -296,3 +296,36 @@ t.py:1:1: T001 "f'xxxxxxxxxxxxxxxxxxxxxxxx'"
"""
out, err = capsys.readouterr()
assert out == expected
@pytest.mark.xfail(sys.version_info < (3, 14), reason="3.14+")
def test_tstring_logical_line(tmpdir, capsys): # pragma: >=3.14 cover
cfg_s = f"""\
[flake8]
extend-ignore = F
[flake8:local-plugins]
extension =
T = {yields_logical_line.__module__}:{yields_logical_line.__name__}
"""
cfg = tmpdir.join("tox.ini")
cfg.write(cfg_s)
src = """\
t'''
hello {world}
'''
t'{{"{hello}": "{world}"}}'
"""
t_py = tmpdir.join("t.py")
t_py.write_binary(src.encode())
with tmpdir.as_cwd():
assert main(("t.py", "--config", str(cfg))) == 1
expected = """\
t.py:1:1: T001 "t'''xxxxxxx{world}x'''"
t.py:4:1: T001 "t'xxx{hello}xxxx{world}xxx'"
"""
out, err = capsys.readouterr()
assert out == expected