diff --git a/flake8/checker.py b/flake8/checker.py index 9f2154d..38a4634 100644 --- a/flake8/checker.py +++ b/flake8/checker.py @@ -1,4 +1,5 @@ """Checker Manager and Checker classes.""" +import contextlib import io import logging import os @@ -205,22 +206,57 @@ class FileChecker(object): error = (error_code, self.filename, line_number, column, text) self.results.append(error) - def run_check(self, plugin): + def run_check(self, plugin, **arguments): """Run the check in a single plugin.""" - arguments = {} - for parameter in plugin.parameters: - arguments[parameter] = self.attributes[parameter] + self.processor.keyword_arguments_for(plugin.parameters, arguments) return plugin.execute(**arguments) + def run_physical_checks(self, physical_line): + for plugin in self.checks.physical_line_plugins: + result = self.run_check(plugin, physical_line=physical_line) + if result is not None: + column_offset, text = result + error_code, error_text = text.split(' ', 1) + self.report( + error_code=error_code, + line_number=self.processor.line_number, + column=column_offset, + text=error_text, + ) + + self.processor.check_physical_error(error_code, physical_line) + def run_checks(self): """Run checks against the file.""" try: for token in self.processor.generate_tokens(): - pass + self.check_physical_eol(token) except exceptions.InvalidSyntax as exc: self.report(exc.error_code, exc.line_number, exc.column_number, exc.error_message) + def check_physical_eol(self, token): + """Run physical checks if and only if it is at the end of the line.""" + if utils.is_eol_token(token): + # Obviously, a newline token ends a single physical line. + self.run_physical_checks(token[4]) + elif utils.is_multiline_string(token): + # Less obviously, a string that contains newlines is a + # multiline string, either triple-quoted or with internal + # newlines backslash-escaped. Check every physical line in the + # string *except* for the last one: its newline is outside of + # the multiline string, so we consider it a regular physical + # line, and will check it like any other physical line. + # + # Subtleties: + # - have to wind self.line_number back because initially it + # points to the last line of the string, and we want + # check_physical() to give accurate feedback + line_no = token[2][0] + with self.processor.inside_multiline(line_number=line_no): + for line in self.processor.split_line(token): + self.run_physical_checks(line + '\n') + class FileProcessor(object): """Processes a file and holdes state. @@ -287,6 +323,37 @@ class FileProcessor(object): #: Verbosity level of Flake8 self.verbosity = options.verbosity + @contextlib.contextmanager + def inside_multiline(self, line_number): + """Context-manager to toggle the multiline attribute.""" + self.line_number = line_number + self.multiline = True + yield + self.multiline = False + + def split_line(self, token): + """Split a physical line's line based on new-lines. + + This also auto-increments the line number for the caller. + """ + for line in token[1].split('\n')[:-1]: + yield line + self.line_number += 1 + + def keyword_arguments_for(self, parameters, arguments=None): + """Generate the keyword arguments for a list of parameters.""" + if arguments is None: + arguments = {} + for param in parameters: + if param not in arguments: + arguments[param] = getattr(self, param) + return arguments + + def check_physical_error(self, error_code, line): + """Update attributes based on error code and line.""" + if error_code == 'E101': + self.indent_char = line[0] + def generate_tokens(self): """Tokenize the file and yield the tokens. diff --git a/flake8/style_guide.py b/flake8/style_guide.py index bbfe658..51c3a8e 100644 --- a/flake8/style_guide.py +++ b/flake8/style_guide.py @@ -157,14 +157,16 @@ class StyleGuide(object): LOG.debug('"%s" will be "%s"', code, decision) return decision - def is_inline_ignored(self, error): + def is_inline_ignored(self, error, physical_line=None): # type: (Error) -> bool """Determine if an comment has been added to ignore this line.""" # TODO(sigmavirus24): Determine how to handle stdin with linecache if self.options.disable_noqa: return False - physical_line = linecache.getline(error.filename, error.line_number) + if physical_line is None: + physical_line = linecache.getline(error.filename, + error.line_number) noqa_match = self.NOQA_INLINE_REGEXP.search(physical_line) if noqa_match is None: LOG.debug('%r is not inline ignored', error) diff --git a/flake8/utils.py b/flake8/utils.py index 7e08e41..a3be0ea 100644 --- a/flake8/utils.py +++ b/flake8/utils.py @@ -4,6 +4,7 @@ import inspect import io import os import sys +import tokenize def parse_comma_separated_list(value): @@ -183,3 +184,24 @@ def parameters_for(plugin): parameters.remove('self') return parameters + +NEWLINE = frozenset([tokenize.NL, tokenize.NEWLINE]) +# Work around Python < 2.6 behaviour, which does not generate NL after +# a comment which is on a line by itself. +COMMENT_WITH_NL = tokenize.generate_tokens(['#\n'].pop).send(None)[1] == '#\n' + + +def is_eol_token(token): + """Check if the token is an end-of-line token.""" + return token[0] in NEWLINE or token[4][token[3][1]:].lstrip() == '\\\n' + +if COMMENT_WITH_NL: # If on Python 2.6 + def is_eol_token(token, _is_eol_token=is_eol_token): + """Check if the token is an end-of-line token.""" + return (_is_eol_token(token) or + (token[0] == tokenize.COMMENT and token[1] == token[4])) + + +def is_multiline_string(token): + """Check if this is a multiline string.""" + return token[0] == tokenize.STRING and '\n' in token[1]