From 9cf8603e9494420ebd1a48bea33b31d4979525d7 Mon Sep 17 00:00:00 2001 From: Fabian Neundorf Date: Thu, 21 Jul 2016 11:55:41 +0000 Subject: [PATCH 1/3] Add support for tokens of a complete file The `tokens` property of the `FileProcessor` class only contains tokens of the current line but not all tokens. So if a plugin which is only executed once per file, that property is useless. To make the tokens also available to plugins it is now be able to supply all the tokens of a file. It also updates the documentation to separate which parameters are static and which are changed on each line. Using the latter parameters on plugins which are only run once per file isn't very sensible. --- .../plugin-development/plugin-parameters.rst | 17 +++++++++++++---- src/flake8/processor.py | 14 ++++++++++++++ 2 files changed, 27 insertions(+), 4 deletions(-) diff --git a/docs/source/plugin-development/plugin-parameters.rst b/docs/source/plugin-development/plugin-parameters.rst index 3c4534a..e341572 100644 --- a/docs/source/plugin-development/plugin-parameters.rst +++ b/docs/source/plugin-development/plugin-parameters.rst @@ -34,18 +34,27 @@ a file, a plugin can ask for any of the following: - :attr:`~flake8.processor.FileProcessor.indent_level` - :attr:`~flake8.processor.FileProcessor.line_number` - :attr:`~flake8.processor.FileProcessor.logical_line` -- :attr:`~flake8.processor.FileProcessor.max_line_length` - :attr:`~flake8.processor.FileProcessor.multiline` - :attr:`~flake8.processor.FileProcessor.noqa` - :attr:`~flake8.processor.FileProcessor.previous_indent_level` - :attr:`~flake8.processor.FileProcessor.previous_logical` - :attr:`~flake8.processor.FileProcessor.tokens` + +Some properties are set once per file for plugins which iterate itself over +the data instead of being called on each physical or logical line. + +- :attr:`~flake8.processor.FileProcessor.filename` +- :attr:`~flake8.processor.FileProcessor.file_tokens` +- :attr:`~flake8.processor.FileProcessor.lines` +- :attr:`~flake8.processor.FileProcessor.max_line_length` - :attr:`~flake8.processor.FileProcessor.total_lines` - :attr:`~flake8.processor.FileProcessor.verbose` -Alternatively, a plugin can accept ``tree`` and ``filename``. -``tree`` will be a parsed abstract syntax tree that will be used by plugins -like PyFlakes and McCabe. +These parameters can also be supplied to plugins working on each line +separately. Additionally, plugins called once per file can also accept ``tree`` +which is not supplied as a parameter of +:class:`~flake8.processor.FileProcessor`, which will be a parsed abstract +syntax tree. It is used by plugins like PyFlakes and McCabe. Registering Options diff --git a/src/flake8/processor.py b/src/flake8/processor.py index dee0b15..79844af 100644 --- a/src/flake8/processor.py +++ b/src/flake8/processor.py @@ -43,6 +43,7 @@ class FileProcessor(object): - :attr:`previous_indent_level` - :attr:`previous_logical` - :attr:`tokens` + - :attr:`file_tokens` - :attr:`total_lines` - :attr:`verbose` """ @@ -101,6 +102,19 @@ class FileProcessor(object): self.statistics = { 'logical lines': 0, } + self._file_tokens = None + + @property + def file_tokens(self): + if self._file_tokens is None: + line_iter = iter(self.lines) + try: + self._file_tokens = list(tokenize.generate_tokens( + lambda: next(line_iter))) + except tokenize.TokenError as exc: + raise exceptions.InvalidSyntax(exc.message, exception=exc) + + return self._file_tokens[:] @contextlib.contextmanager def inside_multiline(self, line_number): From 1cfc12f366c8b3408993374ddc5a73f5f66da43e Mon Sep 17 00:00:00 2001 From: Ian Cordasco Date: Tue, 25 Oct 2016 12:01:43 -0500 Subject: [PATCH 2/3] Trim trailing spaces from documentation --- docs/source/plugin-development/plugin-parameters.rst | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/docs/source/plugin-development/plugin-parameters.rst b/docs/source/plugin-development/plugin-parameters.rst index 729f9c5..1625098 100644 --- a/docs/source/plugin-development/plugin-parameters.rst +++ b/docs/source/plugin-development/plugin-parameters.rst @@ -53,11 +53,11 @@ the data instead of being called on each physical or logical line. These parameters can also be supplied to plugins working on each line separately. -Plugins that depend on ``physical_line`` or ``logical_line`` are run on each -physical or logical line once. These parameters should be the first in the -list of arguments (with the exception of ``self``). Plugins that need an AST -(e.g., PyFlakes and McCabe) should depend on ``tree``. These plugins will run -once per file. The parameters listed above can be combined with +Plugins that depend on ``physical_line`` or ``logical_line`` are run on each +physical or logical line once. These parameters should be the first in the +list of arguments (with the exception of ``self``). Plugins that need an AST +(e.g., PyFlakes and McCabe) should depend on ``tree``. These plugins will run +once per file. The parameters listed above can be combined with ``physical_line``, ``logical_line``, and ``tree``. From 8dfe38e9e6541185e78a25d684ca1a4a4d9695b0 Mon Sep 17 00:00:00 2001 From: Ian Cordasco Date: Tue, 25 Oct 2016 12:02:00 -0500 Subject: [PATCH 3/3] Fix up FileProcessor.file_tokens property We opted to not copy the file_tokens attribute each time it's accessed in the merge request discussion but it was never reflected in the code. Further, the attribute had no documentation or docstring, so we've added that. Finally, we address a personal style nit that I otherwise wouldn't have picked at. --- src/flake8/processor.py | 11 +++++++++-- 1 file changed, 9 insertions(+), 2 deletions(-) diff --git a/src/flake8/processor.py b/src/flake8/processor.py index 0e8c153..8490092 100644 --- a/src/flake8/processor.py +++ b/src/flake8/processor.py @@ -103,15 +103,22 @@ class FileProcessor(object): @property def file_tokens(self): + """The complete set of tokens for a file. + + Accessing this attribute *may* raise an InvalidSyntax exception. + + :raises: flake8.exceptions.InvalidSyntax + """ if self._file_tokens is None: line_iter = iter(self.lines) try: self._file_tokens = list(tokenize.generate_tokens( - lambda: next(line_iter))) + lambda: next(line_iter) + )) except tokenize.TokenError as exc: raise exceptions.InvalidSyntax(exc.message, exception=exc) - return self._file_tokens[:] + return self._file_tokens @contextlib.contextmanager def inside_multiline(self, line_number):