Handle SyntaxErrors when tokenizing a file

Closes #205
This commit is contained in:
Ian Cordasco 2016-08-06 07:44:09 -05:00
parent 7730a790f4
commit 78100de8c6
No known key found for this signature in database
GPG key ID: 656D3395E4A9791A
2 changed files with 5 additions and 8 deletions

View file

@ -240,13 +240,7 @@ class FileProcessor(object):
break
self.tokens.append(token)
yield token
# NOTE(sigmavirus24): pycodestyle was catching both a SyntaxError
# and a tokenize.TokenError. In looking a the source on Python 2 and
# Python 3, the SyntaxError should never arise from generate_tokens.
# If we were using tokenize.tokenize, we would have to catch that. Of
# course, I'm going to be unsurprised to be proven wrong at a later
# date.
except tokenize.TokenError as exc:
except (tokenize.TokenError, SyntaxError) as exc:
raise exceptions.InvalidSyntax(exception=exc)
def line_for(self, line_number):