mirror of
https://github.com/PyCQA/flake8.git
synced 2026-04-10 06:44:18 +00:00
Continue porting more logic from pep8
This commit is contained in:
parent
074739de27
commit
6ac955dfd4
3 changed files with 98 additions and 7 deletions
|
|
@ -1,4 +1,5 @@
|
||||||
"""Checker Manager and Checker classes."""
|
"""Checker Manager and Checker classes."""
|
||||||
|
import contextlib
|
||||||
import io
|
import io
|
||||||
import logging
|
import logging
|
||||||
import os
|
import os
|
||||||
|
|
@ -205,22 +206,57 @@ class FileChecker(object):
|
||||||
error = (error_code, self.filename, line_number, column, text)
|
error = (error_code, self.filename, line_number, column, text)
|
||||||
self.results.append(error)
|
self.results.append(error)
|
||||||
|
|
||||||
def run_check(self, plugin):
|
def run_check(self, plugin, **arguments):
|
||||||
"""Run the check in a single plugin."""
|
"""Run the check in a single plugin."""
|
||||||
arguments = {}
|
self.processor.keyword_arguments_for(plugin.parameters, arguments)
|
||||||
for parameter in plugin.parameters:
|
|
||||||
arguments[parameter] = self.attributes[parameter]
|
|
||||||
return plugin.execute(**arguments)
|
return plugin.execute(**arguments)
|
||||||
|
|
||||||
|
def run_physical_checks(self, physical_line):
|
||||||
|
for plugin in self.checks.physical_line_plugins:
|
||||||
|
result = self.run_check(plugin, physical_line=physical_line)
|
||||||
|
if result is not None:
|
||||||
|
column_offset, text = result
|
||||||
|
error_code, error_text = text.split(' ', 1)
|
||||||
|
self.report(
|
||||||
|
error_code=error_code,
|
||||||
|
line_number=self.processor.line_number,
|
||||||
|
column=column_offset,
|
||||||
|
text=error_text,
|
||||||
|
)
|
||||||
|
|
||||||
|
self.processor.check_physical_error(error_code, physical_line)
|
||||||
|
|
||||||
def run_checks(self):
|
def run_checks(self):
|
||||||
"""Run checks against the file."""
|
"""Run checks against the file."""
|
||||||
try:
|
try:
|
||||||
for token in self.processor.generate_tokens():
|
for token in self.processor.generate_tokens():
|
||||||
pass
|
self.check_physical_eol(token)
|
||||||
except exceptions.InvalidSyntax as exc:
|
except exceptions.InvalidSyntax as exc:
|
||||||
self.report(exc.error_code, exc.line_number, exc.column_number,
|
self.report(exc.error_code, exc.line_number, exc.column_number,
|
||||||
exc.error_message)
|
exc.error_message)
|
||||||
|
|
||||||
|
def check_physical_eol(self, token):
|
||||||
|
"""Run physical checks if and only if it is at the end of the line."""
|
||||||
|
if utils.is_eol_token(token):
|
||||||
|
# Obviously, a newline token ends a single physical line.
|
||||||
|
self.run_physical_checks(token[4])
|
||||||
|
elif utils.is_multiline_string(token):
|
||||||
|
# Less obviously, a string that contains newlines is a
|
||||||
|
# multiline string, either triple-quoted or with internal
|
||||||
|
# newlines backslash-escaped. Check every physical line in the
|
||||||
|
# string *except* for the last one: its newline is outside of
|
||||||
|
# the multiline string, so we consider it a regular physical
|
||||||
|
# line, and will check it like any other physical line.
|
||||||
|
#
|
||||||
|
# Subtleties:
|
||||||
|
# - have to wind self.line_number back because initially it
|
||||||
|
# points to the last line of the string, and we want
|
||||||
|
# check_physical() to give accurate feedback
|
||||||
|
line_no = token[2][0]
|
||||||
|
with self.processor.inside_multiline(line_number=line_no):
|
||||||
|
for line in self.processor.split_line(token):
|
||||||
|
self.run_physical_checks(line + '\n')
|
||||||
|
|
||||||
|
|
||||||
class FileProcessor(object):
|
class FileProcessor(object):
|
||||||
"""Processes a file and holdes state.
|
"""Processes a file and holdes state.
|
||||||
|
|
@ -287,6 +323,37 @@ class FileProcessor(object):
|
||||||
#: Verbosity level of Flake8
|
#: Verbosity level of Flake8
|
||||||
self.verbosity = options.verbosity
|
self.verbosity = options.verbosity
|
||||||
|
|
||||||
|
@contextlib.contextmanager
|
||||||
|
def inside_multiline(self, line_number):
|
||||||
|
"""Context-manager to toggle the multiline attribute."""
|
||||||
|
self.line_number = line_number
|
||||||
|
self.multiline = True
|
||||||
|
yield
|
||||||
|
self.multiline = False
|
||||||
|
|
||||||
|
def split_line(self, token):
|
||||||
|
"""Split a physical line's line based on new-lines.
|
||||||
|
|
||||||
|
This also auto-increments the line number for the caller.
|
||||||
|
"""
|
||||||
|
for line in token[1].split('\n')[:-1]:
|
||||||
|
yield line
|
||||||
|
self.line_number += 1
|
||||||
|
|
||||||
|
def keyword_arguments_for(self, parameters, arguments=None):
|
||||||
|
"""Generate the keyword arguments for a list of parameters."""
|
||||||
|
if arguments is None:
|
||||||
|
arguments = {}
|
||||||
|
for param in parameters:
|
||||||
|
if param not in arguments:
|
||||||
|
arguments[param] = getattr(self, param)
|
||||||
|
return arguments
|
||||||
|
|
||||||
|
def check_physical_error(self, error_code, line):
|
||||||
|
"""Update attributes based on error code and line."""
|
||||||
|
if error_code == 'E101':
|
||||||
|
self.indent_char = line[0]
|
||||||
|
|
||||||
def generate_tokens(self):
|
def generate_tokens(self):
|
||||||
"""Tokenize the file and yield the tokens.
|
"""Tokenize the file and yield the tokens.
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -157,14 +157,16 @@ class StyleGuide(object):
|
||||||
LOG.debug('"%s" will be "%s"', code, decision)
|
LOG.debug('"%s" will be "%s"', code, decision)
|
||||||
return decision
|
return decision
|
||||||
|
|
||||||
def is_inline_ignored(self, error):
|
def is_inline_ignored(self, error, physical_line=None):
|
||||||
# type: (Error) -> bool
|
# type: (Error) -> bool
|
||||||
"""Determine if an comment has been added to ignore this line."""
|
"""Determine if an comment has been added to ignore this line."""
|
||||||
# TODO(sigmavirus24): Determine how to handle stdin with linecache
|
# TODO(sigmavirus24): Determine how to handle stdin with linecache
|
||||||
if self.options.disable_noqa:
|
if self.options.disable_noqa:
|
||||||
return False
|
return False
|
||||||
|
|
||||||
physical_line = linecache.getline(error.filename, error.line_number)
|
if physical_line is None:
|
||||||
|
physical_line = linecache.getline(error.filename,
|
||||||
|
error.line_number)
|
||||||
noqa_match = self.NOQA_INLINE_REGEXP.search(physical_line)
|
noqa_match = self.NOQA_INLINE_REGEXP.search(physical_line)
|
||||||
if noqa_match is None:
|
if noqa_match is None:
|
||||||
LOG.debug('%r is not inline ignored', error)
|
LOG.debug('%r is not inline ignored', error)
|
||||||
|
|
|
||||||
|
|
@ -4,6 +4,7 @@ import inspect
|
||||||
import io
|
import io
|
||||||
import os
|
import os
|
||||||
import sys
|
import sys
|
||||||
|
import tokenize
|
||||||
|
|
||||||
|
|
||||||
def parse_comma_separated_list(value):
|
def parse_comma_separated_list(value):
|
||||||
|
|
@ -183,3 +184,24 @@ def parameters_for(plugin):
|
||||||
parameters.remove('self')
|
parameters.remove('self')
|
||||||
|
|
||||||
return parameters
|
return parameters
|
||||||
|
|
||||||
|
NEWLINE = frozenset([tokenize.NL, tokenize.NEWLINE])
|
||||||
|
# Work around Python < 2.6 behaviour, which does not generate NL after
|
||||||
|
# a comment which is on a line by itself.
|
||||||
|
COMMENT_WITH_NL = tokenize.generate_tokens(['#\n'].pop).send(None)[1] == '#\n'
|
||||||
|
|
||||||
|
|
||||||
|
def is_eol_token(token):
|
||||||
|
"""Check if the token is an end-of-line token."""
|
||||||
|
return token[0] in NEWLINE or token[4][token[3][1]:].lstrip() == '\\\n'
|
||||||
|
|
||||||
|
if COMMENT_WITH_NL: # If on Python 2.6
|
||||||
|
def is_eol_token(token, _is_eol_token=is_eol_token):
|
||||||
|
"""Check if the token is an end-of-line token."""
|
||||||
|
return (_is_eol_token(token) or
|
||||||
|
(token[0] == tokenize.COMMENT and token[1] == token[4]))
|
||||||
|
|
||||||
|
|
||||||
|
def is_multiline_string(token):
|
||||||
|
"""Check if this is a multiline string."""
|
||||||
|
return token[0] == tokenize.STRING and '\n' in token[1]
|
||||||
|
|
|
||||||
Loading…
Add table
Add a link
Reference in a new issue