From 8a5e9ade6ca655117d43563bad6c0d45fc16b89f Mon Sep 17 00:00:00 2001 From: ifuryst Date: Mon, 15 Apr 2024 18:07:21 +0800 Subject: [PATCH] feat: process fstring when python version gte 3.12 --- pre_commit_hooks/string_fixer.py | 11 +++++++++++ tests/string_fixer_test.py | 6 ++++++ 2 files changed, 17 insertions(+) diff --git a/pre_commit_hooks/string_fixer.py b/pre_commit_hooks/string_fixer.py index d1b1c4a..6cf1f7a 100644 --- a/pre_commit_hooks/string_fixer.py +++ b/pre_commit_hooks/string_fixer.py @@ -48,6 +48,8 @@ def fix_strings(filename: str) -> int: splitcontents = list(contents) fstring_depth = 0 + fstring_content = '' + f_erow = f_ecol = -1 # Iterate in reverse so the offsets are always correct tokens_l = list(tokenize.generate_tokens(io.StringIO(contents).readline)) @@ -55,8 +57,17 @@ def fix_strings(filename: str) -> int: for token_type, token_text, (srow, scol), (erow, ecol), _ in tokens: if token_type == FSTRING_START: # pragma: >=3.12 cover fstring_depth += 1 + splitcontents[ + line_offsets[srow] + scol: + line_offsets[f_erow] + f_ecol + ] = handle_match(token_text + fstring_content) + fstring_content = '' elif token_type == FSTRING_END: # pragma: >=3.12 cover fstring_depth -= 1 + fstring_content = token_text + fstring_content + f_erow, f_ecol = erow, ecol + elif fstring_depth != 0: # pragma: >=3.12 cover + fstring_content = token_text + fstring_content elif fstring_depth == 0 and token_type == tokenize.STRING: new_text = handle_match(token_text) splitcontents[ diff --git a/tests/string_fixer_test.py b/tests/string_fixer_test.py index 8eb164c..d554354 100644 --- a/tests/string_fixer_test.py +++ b/tests/string_fixer_test.py @@ -43,6 +43,12 @@ TESTS = ( 0, id='ignore nested fstrings', ), + pytest.param( + 'f"Error during task loop"', + "f'Error during task loop'", + 1, + id='process the fstrings when pyver is gte 3.12', + ), )