use tokenize.TokenInfo instead of _Token alias

This commit is contained in:
Anthony Sottile 2022-01-05 15:37:25 -05:00
parent 01e8376094
commit ec57d5e67c
4 changed files with 18 additions and 14 deletions

View file

@ -275,7 +275,7 @@ def test_split_line(unsplit_line, expected_lines, default_options):
],
)
token = (1, unsplit_line, (0, 0), (0, 0), "")
token = tokenize.TokenInfo(1, unsplit_line, (0, 0), (0, 0), "")
actual_lines = list(file_processor.split_line(token))
assert expected_lines == actual_lines