mirror of
https://github.com/pre-commit/pre-commit-hooks.git
synced 2026-03-29 18:16:52 +00:00
feat: process fstring when python version gte 3.12
This commit is contained in:
parent
0d20f18212
commit
8a5e9ade6c
2 changed files with 17 additions and 0 deletions
|
|
@ -48,6 +48,8 @@ def fix_strings(filename: str) -> int:
|
|||
splitcontents = list(contents)
|
||||
|
||||
fstring_depth = 0
|
||||
fstring_content = ''
|
||||
f_erow = f_ecol = -1
|
||||
|
||||
# Iterate in reverse so the offsets are always correct
|
||||
tokens_l = list(tokenize.generate_tokens(io.StringIO(contents).readline))
|
||||
|
|
@ -55,8 +57,17 @@ def fix_strings(filename: str) -> int:
|
|||
for token_type, token_text, (srow, scol), (erow, ecol), _ in tokens:
|
||||
if token_type == FSTRING_START: # pragma: >=3.12 cover
|
||||
fstring_depth += 1
|
||||
splitcontents[
|
||||
line_offsets[srow] + scol:
|
||||
line_offsets[f_erow] + f_ecol
|
||||
] = handle_match(token_text + fstring_content)
|
||||
fstring_content = ''
|
||||
elif token_type == FSTRING_END: # pragma: >=3.12 cover
|
||||
fstring_depth -= 1
|
||||
fstring_content = token_text + fstring_content
|
||||
f_erow, f_ecol = erow, ecol
|
||||
elif fstring_depth != 0: # pragma: >=3.12 cover
|
||||
fstring_content = token_text + fstring_content
|
||||
elif fstring_depth == 0 and token_type == tokenize.STRING:
|
||||
new_text = handle_match(token_text)
|
||||
splitcontents[
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue