processor: Catch SyntaxError also when generating tokens for a file

`tokenize.generate_tokens()` can also raise `SyntaxError` in addition to
`tokenize.TokenError`.
This commit is contained in:
Eric N. Vander Weele 2020-06-02 09:40:12 -04:00 committed by Anthony Sottile
parent 40716454a2
commit a68d4d0172
2 changed files with 12 additions and 1 deletions

View file

@ -125,7 +125,7 @@ class FileProcessor(object):
self._file_tokens = list(
tokenize.generate_tokens(lambda: next(line_iter))
)
except tokenize.TokenError as exc:
except (tokenize.TokenError, SyntaxError) as exc:
raise exceptions.InvalidSyntax(exception=exc)
return self._file_tokens

View file

@ -162,6 +162,17 @@ def test_tokenization_error_but_not_syntax_error(tmpdir, capsys):
assert err == ''
def test_tokenization_error_is_a_syntax_error(tmpdir, capsys):
"""Test when tokenize raises a SyntaxError."""
with tmpdir.as_cwd():
tmpdir.join('t.py').write('if True:\n pass\n pass\n')
_call_main(['t.py'], retv=1)
out, err = capsys.readouterr()
assert out == 't.py:1:1: E902 IndentationError: unindent does not match any outer indentation level\n' # noqa: E501
assert err == ''
def test_bug_report_successful(capsys):
"""Test that --bug-report does not crash."""
_call_main(['--bug-report'])