diff --git a/src/flake8/processor.py b/src/flake8/processor.py index 0375ed9..b675c99 100644 --- a/src/flake8/processor.py +++ b/src/flake8/processor.py @@ -125,7 +125,7 @@ class FileProcessor(object): self._file_tokens = list( tokenize.generate_tokens(lambda: next(line_iter)) ) - except tokenize.TokenError as exc: + except (tokenize.TokenError, SyntaxError) as exc: raise exceptions.InvalidSyntax(exception=exc) return self._file_tokens diff --git a/tests/integration/test_main.py b/tests/integration/test_main.py index ab10bf9..ce8ad13 100644 --- a/tests/integration/test_main.py +++ b/tests/integration/test_main.py @@ -162,6 +162,17 @@ def test_tokenization_error_but_not_syntax_error(tmpdir, capsys): assert err == '' +def test_tokenization_error_is_a_syntax_error(tmpdir, capsys): + """Test when tokenize raises a SyntaxError.""" + with tmpdir.as_cwd(): + tmpdir.join('t.py').write('if True:\n pass\n pass\n') + _call_main(['t.py'], retv=1) + + out, err = capsys.readouterr() + assert out == 't.py:1:1: E902 IndentationError: unindent does not match any outer indentation level\n' # noqa: E501 + assert err == '' + + def test_bug_report_successful(capsys): """Test that --bug-report does not crash.""" _call_main(['--bug-report'])