Fix crash when file fails to tokenize but parses

This commit is contained in:
Anthony Sottile 2019-04-07 07:35:46 -07:00
parent 2b333fad1a
commit 38ab47702a
2 changed files with 15 additions and 2 deletions

View file

@ -597,6 +597,7 @@ class FileChecker(object):
"""Run checks against the file."""
try:
self.process_tokens()
self.run_ast_checks()
except exceptions.InvalidSyntax as exc:
self.report(
exc.error_code,
@ -605,8 +606,6 @@ class FileChecker(object):
exc.error_message,
)
self.run_ast_checks()
logical_lines = self.processor.statistics["logical lines"]
self.statistics["logical lines"] = logical_lines
return self.filename, self.results, self.statistics

View file

@ -85,3 +85,17 @@ Configured `per-file-ignores` setting:
incorrect/*
values/*
''' # noqa: E501
def test_tokenization_error_but_not_syntax_error(tmpdir, capsys):
"""Test that flake8 does not crash on tokenization errors."""
with tmpdir.as_cwd():
# this is a crash in the tokenizer, but not in the ast
tmpdir.join('t.py').write("b'foo' \\\n")
app = application.Application()
app.run(['t.py'])
out, err = capsys.readouterr()
assert out == 't.py:1:1: E902 TokenError: EOF in multi-line statement\n'
assert err == ''