diff --git a/.gitignore b/.gitignore index c968761..baf560d 100644 --- a/.gitignore +++ b/.gitignore @@ -1,7 +1,12 @@ *.pyc .tox +.eggs *.egg *.egg-info build dist *.zip +.cache +*.sw* +*.log +docs/build/html/* diff --git a/.pylintrc b/.pylintrc new file mode 100644 index 0000000..f778dd4 --- /dev/null +++ b/.pylintrc @@ -0,0 +1,378 @@ +[MASTER] + +# Specify a configuration file. +#rcfile= + +# Python code to execute, usually for sys.path manipulation such as +# pygtk.require(). +#init-hook= + +# Add files or directories to the blacklist. They should be base names, not +# paths. +ignore=CVS,.git,flake8.egg-info + +# Pickle collected data for later comparisons. +persistent=yes + +# List of plugins (as comma separated values of python modules names) to load, +# usually to register additional checkers. +load-plugins= + +# Use multiple processes to speed up Pylint. +jobs=4 + +# Allow loading of arbitrary C extensions. Extensions are imported into the +# active Python interpreter and may run arbitrary code. +unsafe-load-any-extension=no + +# A comma-separated list of package or module names from where C extensions may +# be loaded. Extensions are loading into the active Python interpreter and may +# run arbitrary code +extension-pkg-whitelist= + +# Allow optimization of some AST trees. This will activate a peephole AST +# optimizer, which will apply various small optimizations. For instance, it can +# be used to obtain the result of joining multiple strings with the addition +# operator. Joining a lot of strings can lead to a maximum recursion error in +# Pylint and this flag can prevent that. It has one side effect, the resulting +# AST will be different than the one from reality. +optimize-ast=no + + +[MESSAGES CONTROL] + +# Only show warnings with the listed confidence levels. Leave empty to show +# all. Valid levels: HIGH, INFERENCE, INFERENCE_FAILURE, UNDEFINED +confidence=INFERENCE_FAILURE + +# Enable the message, report, category or checker with the given id(s). You can +# either give multiple identifier separated by comma (,) or put this option +# multiple time. See also the "--disable" option for examples. +#enable= + +# Disable the message, report, category or checker with the given id(s). You +# can either give multiple identifiers separated by comma (,) or put this +# option multiple times (only on the command line, not in the configuration +# file where it should appear only once).You can also use "--disable=all" to +# disable everything first and then reenable specific checks. For example, if +# you want to run only the similarities checker, you can use "--disable=all +# --enable=similarities". If you want to run only the classes checker, but have +# no Warning level messages displayed, use"--disable=all --enable=classes +# --disable=W" +disable=intern-builtin,nonzero-method,parameter-unpacking,backtick,raw_input-builtin,dict-view-method,filter-builtin-not-iterating,long-builtin,unichr-builtin,input-builtin,unicode-builtin,file-builtin,map-builtin-not-iterating,delslice-method,apply-builtin,cmp-method,setslice-method,coerce-method,long-suffix,raising-string,import-star-module-level,buffer-builtin,reload-builtin,unpacking-in-except,print-statement,hex-method,old-octal-literal,metaclass-assignment,dict-iter-method,range-builtin-not-iterating,using-cmp-argument,indexing-exception,no-absolute-import,coerce-builtin,getslice-method,suppressed-message,execfile-builtin,round-builtin,useless-suppression,reduce-builtin,old-raise-syntax,zip-builtin-not-iterating,cmp-builtin,xrange-builtin,standarderror-builtin,old-division,oct-method,next-method-called,old-ne-operator,basestring-builtin + + +[REPORTS] + +# Set the output format. Available formats are text, parseable, colorized, msvs +# (visual studio) and html. You can also give a reporter class, eg +# mypackage.mymodule.MyReporterClass. +output-format=text + +# Put messages in a separate file for each module / package specified on the +# command line instead of printing them on stdout. Reports (if any) will be +# written in a file name "pylint_global.[txt|html]". +files-output=no + +# Tells whether to display a full report or only the messages +reports=no + +# Python expression which should return a note less than 10 (10 is the highest +# note). You have access to the variables errors warning, statement which +# respectively contain the number of errors / warnings messages and the total +# number of statements analyzed. This is used by the global evaluation report +# (RP0004). +evaluation=10.0 - ((float(5 * error + warning + refactor + convention) / statement) * 10) + +# Template used to display messages. This is a python new-style format string +# used to format the message information. See doc for all details +#msg-template= + + +[BASIC] + +# List of builtins function names that should not be used, separated by a comma +bad-functions=map,filter + +# Good variable names which should always be accepted, separated by a comma +good-names=i,j,k,ex,Run,_ + +# Bad variable names which should always be refused, separated by a comma +bad-names=foo,bar,baz,toto,tutu,tata + +# Colon-delimited sets of names that determine each other's naming style when +# the name regexes allow several styles. +name-group= + +# Include a hint for the correct naming format with invalid-name +include-naming-hint=yes + +# Regular expression matching correct argument names +argument-rgx=[a-z_][a-z0-9_]{2,30}$ + +# Naming hint for argument names +argument-name-hint=[a-z_][a-z0-9_]{2,30}$ + +# Regular expression matching correct attribute names +attr-rgx=[a-z_][a-z0-9_]{2,30}$ + +# Naming hint for attribute names +attr-name-hint=[a-z_][a-z0-9_]{2,30}$ + +# Regular expression matching correct constant names +const-rgx=(([A-Z_][A-Z0-9_]*)|(__.*__))$ + +# Naming hint for constant names +const-name-hint=(([A-Z_][A-Z0-9_]*)|(__.*__))$ + +# Regular expression matching correct class names +class-rgx=[A-Z_][a-zA-Z0-9]+$ + +# Naming hint for class names +class-name-hint=[A-Z_][a-zA-Z0-9]+$ + +# Regular expression matching correct inline iteration names +inlinevar-rgx=[A-Za-z_][A-Za-z0-9_]*$ + +# Naming hint for inline iteration names +inlinevar-name-hint=[A-Za-z_][A-Za-z0-9_]*$ + +# Regular expression matching correct class attribute names +class-attribute-rgx=([A-Za-z_][A-Za-z0-9_]{2,30}|(__.*__))$ + +# Naming hint for class attribute names +class-attribute-name-hint=([A-Za-z_][A-Za-z0-9_]{2,30}|(__.*__))$ + +# Regular expression matching correct function names +function-rgx=[a-z_][a-z0-9_]{2,30}$ + +# Naming hint for function names +function-name-hint=[a-z_][a-z0-9_]{2,30}$ + +# Regular expression matching correct module names +module-rgx=(([a-z_][a-z0-9_]*)|([A-Z][a-zA-Z0-9]+))$ + +# Naming hint for module names +module-name-hint=(([a-z_][a-z0-9_]*)|([A-Z][a-zA-Z0-9]+))$ + +# Regular expression matching correct method names +method-rgx=[a-z_][a-z0-9_]{2,30}$ + +# Naming hint for method names +method-name-hint=[a-z_][a-z0-9_]{2,30}$ + +# Regular expression matching correct variable names +variable-rgx=[a-z_][a-z0-9_]{2,30}$ + +# Naming hint for variable names +variable-name-hint=[a-z_][a-z0-9_]{2,30}$ + +# Regular expression which should only match function or class names that do +# not require a docstring. +no-docstring-rgx=^_ + +# Minimum line length for functions/classes that require docstrings, shorter +# ones are exempt. +docstring-min-length=-1 + + +[ELIF] + +# Maximum number of nested blocks for function / method body +max-nested-blocks=5 + + +[FORMAT] + +# Maximum number of characters on a single line. +max-line-length=100 + +# Regexp for a line that is allowed to be longer than the limit. +ignore-long-lines=^\s*(# )??$ + +# Allow the body of an if to be on the same line as the test if there is no +# else. +single-line-if-stmt=no + +# List of optional constructs for which whitespace checking is disabled. `dict- +# separator` is used to allow tabulation in dicts, etc.: {1 : 1,\n222: 2}. +# `trailing-comma` allows a space between comma and closing bracket: (a, ). +# `empty-line` allows space-only lines. +no-space-check=trailing-comma,dict-separator + +# Maximum number of lines in a module +max-module-lines=1000 + +# String used as indentation unit. This is usually " " (4 spaces) or "\t" (1 +# tab). +indent-string=' ' + +# Number of spaces of indent required inside a hanging or continued line. +indent-after-paren=4 + +# Expected format of line ending, e.g. empty (any line ending), LF or CRLF. +expected-line-ending-format= + + +[LOGGING] + +# Logging modules to check that the string format arguments are in logging +# function parameter format +logging-modules=logging + + +[MISCELLANEOUS] + +# List of note tags to take in consideration, separated by a comma. +notes=FIXME,XXX,TODO + + +[SIMILARITIES] + +# Minimum lines number of a similarity. +min-similarity-lines=4 + +# Ignore comments when computing similarities. +ignore-comments=yes + +# Ignore docstrings when computing similarities. +ignore-docstrings=yes + +# Ignore imports when computing similarities. +ignore-imports=no + + +[SPELLING] + +# Spelling dictionary name. Available dictionaries: none. To make it working +# install python-enchant package. +spelling-dict= + +# List of comma separated words that should not be checked. +spelling-ignore-words= + +# A path to a file that contains private dictionary; one word per line. +spelling-private-dict-file= + +# Tells whether to store unknown words to indicated private dictionary in +# --spelling-private-dict-file option instead of raising a message. +spelling-store-unknown-words=no + + +[TYPECHECK] + +# Tells whether missing members accessed in mixin class should be ignored. A +# mixin class is detected if its name ends with "mixin" (case insensitive). +ignore-mixin-members=yes + +# List of module names for which member attributes should not be checked +# (useful for modules/projects where namespaces are manipulated during runtime +# and thus existing member attributes cannot be deduced by static analysis. It +# supports qualified module names, as well as Unix pattern matching. +ignored-modules= + +# List of classes names for which member attributes should not be checked +# (useful for classes with attributes dynamically set). This supports can work +# with qualified names. +ignored-classes= + +# List of members which are set dynamically and missed by pylint inference +# system, and so shouldn't trigger E1101 when accessed. Python regular +# expressions are accepted. +generated-members= + + +[VARIABLES] + +# Tells whether we should check for unused import in __init__ files. +init-import=no + +# A regular expression matching the name of dummy variables (i.e. expectedly +# not used). +dummy-variables-rgx=_$|dummy + +# List of additional names supposed to be defined in builtins. Remember that +# you should avoid to define new builtins when possible. +additional-builtins= + +# List of strings which can identify a callback function by name. A callback +# name must start or end with one of those strings. +callbacks=cb_,_cb + + +[CLASSES] + +# List of method names used to declare (i.e. assign) instance attributes. +defining-attr-methods=__init__,__new__,setUp + +# List of valid names for the first argument in a class method. +valid-classmethod-first-arg=cls + +# List of valid names for the first argument in a metaclass class method. +valid-metaclass-classmethod-first-arg=mcs + +# List of member names, which should be excluded from the protected access +# warning. +exclude-protected=_asdict,_fields,_replace,_source,_make + + +[DESIGN] + +# Maximum number of arguments for function / method +max-args=20 + +# Argument names that match this expression will be ignored. Default to name +# with leading underscore +ignored-argument-names=_.* + +# Maximum number of locals for function / method body +max-locals=20 + +# Maximum number of return / yield for function / method body +max-returns=6 + +# Maximum number of branch for function / method body +max-branches=12 + +# Maximum number of statements in function / method body +max-statements=50 + +# Maximum number of parents for a class (see R0901). +max-parents=7 + +# Maximum number of attributes for a class (see R0902). +max-attributes=10 + +# Minimum number of public methods for a class (see R0903). +min-public-methods=2 + +# Maximum number of public methods for a class (see R0904). +max-public-methods=20 + +# Maximum number of boolean expressions in a if statement +max-bool-expr=5 + + +[IMPORTS] + +# Deprecated modules which should not be used, separated by a comma +deprecated-modules=optparse + +# Create a graph of every (i.e. internal and external) dependencies in the +# given file (report RP0402 must not be disabled) +import-graph= + +# Create a graph of external dependencies in the given file (report RP0402 must +# not be disabled) +ext-import-graph= + +# Create a graph of internal dependencies in the given file (report RP0402 must +# not be disabled) +int-import-graph= + + +[EXCEPTIONS] + +# Exceptions that will emit a warning when being caught. Defaults to +# "Exception" +overgeneral-exceptions=Exception diff --git a/DESIGN.rst b/DESIGN.rst new file mode 100644 index 0000000..cac20f9 --- /dev/null +++ b/DESIGN.rst @@ -0,0 +1,185 @@ +============== + Design Goals +============== + +Outline +------- + +#. :ref:`plugins` + + #. :ref:`checking` + + #. :ref:`autofixing` + + #. :ref:`reporter-plugins` + + #. :ref:`options-passing` + + #. :ref:`plugin-default-ignore` + + #. :ref:`report-generation` + +#. :ref:`options` + + #. :ref:`better-select-ignore` + +#. :ref:`standard-in` + +#. :ref:`multiprocessing` + +.. _plugins: + +Better Plugins Support +---------------------- + +Currently, Flake8 has some rather excellent support for plugins. It currently +allows for the following: + +- Third-party packages to register checks + +- Checks to be disabled by default + +- Checks to accept an AST compiled tree, physical lines, or logical lines. + +- Flake8 handles running those checks in separate subprocesses as necessary + +That said, plugins cannot access the options passed on the command-line, or +options parsed from config files (without parsing them, themselves) and all +reporting is handled by pep8 instead of flake8 which reduces the flexibility +users have in aggregating reports. + +.. _checking: + +Support for Plugins that Only Run Checks +++++++++++++++++++++++++++++++++++++++++ + +Flake8 currently already supports plugins that only run checks. This support +needs to continue and should be trivial to continue. + +.. _autofixing: + +Support for Plugins that Autofix Errors ++++++++++++++++++++++++++++++++++++++++ + +Flake8 should enable people writing plugins for both core Flake8 checkers and +third-party checkers that allow the code to be automatically fixed. The trick +is in how to do this. + +Once Flake8 has control over running plugins and treats pep8, flake8, and +mccabe as "plugins", it will aggregate the errors returned by all of the +plugins and be able to "notify" other plugins that have chosen to listen for +errors so those plugins can auto-fix the problems in the file. + +We should also be considerate of allowing these plugins to be composable. Each +plugin should have a way of defining its capabilities. + +See https://gitlab.com/pycqa/flake8/issues/84 + +.. note:: Will probably need a Trie implementation for this + +What we *might* want is for a autofix plugin to register something like + +:: + + 'flake8.listen': [ + 'E1 = my_fixer.E1Listener', + 'E2 = my_fixer.E2Listener', + ] + +This means that the notifer would need to take an error code like ``E111`` and +then notify anything listening for ``E111``, ``E11``, ``E1``, and ``E``. + +.. _reporter-plugins: + +Support for Plugins that Format Output +++++++++++++++++++++++++++++++++++++++ + +Flake8 currently supports formatting output via pep8's ``--format`` option. +This works but is fundamentally a bit limiting. Allowing users to replace or +compose formatters would allow for certain formatters to highlight more +important information over less important information as the user deems +necessary. + +:: + + 'flake8.format': [ + 'json = my_formatter.JsonFormatter', + 'xml = my_formatter.XMLFormatter', + ] + +See https://gitlab.com/pycqa/flake8/issues/66 + +.. _options-passing: + +Support for Plugins Require Parsed Options +++++++++++++++++++++++++++++++++++++++++++ + +Plugins currently are able to use ``add_options`` and ``parse_options`` +classmethods to register and retrieve options information. This is admittedly +a little awkward and could be improved, but should at least be preserved in +this rewrite. + +See potential improvements as a result of +https://gitlab.com/pycqa/flake8/issues/88 + +.. _plugin-default-ignore: + +Support for Plugins Specifying Default Ignore list +++++++++++++++++++++++++++++++++++++++++++++++++++ + +Plugins currently have no way of extending the default ignore list. This means +they have to hard-code checks to auto-ignore errors. + +.. _options: + +Better Options Support +---------------------- + +Currently there are some options handled by pep8 that are handled poorly. +Further, the way the options work is confusing to some, e.g., when specifying +``--ignore``, users do not expect it to override the ``DEFAULT_IGNORE`` list. +Users also don't expect ``--ignore`` and ``--select`` to step on each other's +toes. + +.. _better-select-ignore: + +Support for Better Select/Ignore Handling ++++++++++++++++++++++++++++++++++++++++++ + +Currently ``--select`` and ``--ignore`` cause one or the other to be ignored. +Users presently cannot specify both for granularity. This should be +significantly improved. + +Further, new tools have developed ``--add-select`` and ``--add-ignore`` which +allows an add-only interface. This seems to be a good direction to follow. +Flake8 should support this. + +See https://github.com/PyCQA/pep8/issues/390 + +.. _standard-in: + +Better stdin support +-------------------- + +Currently, flake8 accepts input from standard-in to check. It also currently +monkey-patches pep8 to cache that value. It would be better if there was one +way to retrieve the stdin input for plugins. Flake8 should provide this +directly instead of pep8 providing it. + +See +https://gitlab.com/pycqa/flake8/commit/41393c9b6de513ea169b61c175b71018e8a12336 + +.. _multiprocessing: + +Multiprocessing Support +----------------------- + +Flake8's existing multiprocessing support (and handling for different error +cases needs to persist through this redesign). + +See: + +- https://gitlab.com/pycqa/flake8/issues/8 +- https://gitlab.com/pycqa/flake8/issues/17 +- https://gitlab.com/pycqa/flake8/issues/44 +- https://gitlab.com/pycqa/flake8/issues/74 diff --git a/docs/build/.keep b/docs/build/.keep new file mode 100644 index 0000000..e69de29 diff --git a/docs/source/conf.py b/docs/source/conf.py new file mode 100644 index 0000000..0d10af2 --- /dev/null +++ b/docs/source/conf.py @@ -0,0 +1,295 @@ +# -*- coding: utf-8 -*- +# +# flake8 documentation build configuration file, created by +# sphinx-quickstart on Tue Jan 19 07:14:10 2016. +# +# This file is execfile()d with the current directory set to its +# containing dir. +# +# Note that not all possible configuration values are present in this +# autogenerated file. +# +# All configuration values have a default; values that are commented out +# serve to show the default. + +import sys +import os + +# If extensions (or modules to document with autodoc) are in another directory, +# add these directories to sys.path here. If the directory is relative to the +# documentation root, use os.path.abspath to make it absolute, like shown here. +#sys.path.insert(0, os.path.abspath('.')) + +# -- General configuration ------------------------------------------------ + +# If your documentation needs a minimal Sphinx version, state it here. +needs_sphinx = '1.3' + +# Add any Sphinx extension module names here, as strings. They can be +# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom +# ones. +extensions = [ + 'sphinx.ext.autodoc', + 'sphinx.ext.doctest', + 'sphinx.ext.intersphinx', + 'sphinx.ext.todo', + 'sphinx.ext.coverage', + 'sphinx.ext.viewcode', +] + +# Add any paths that contain templates here, relative to this directory. +templates_path = ['_templates'] + +# The suffix(es) of source filenames. +# You can specify multiple suffix as a list of string: +# source_suffix = ['.rst', '.md'] +source_suffix = '.rst' + +# The encoding of source files. +#source_encoding = 'utf-8-sig' + +# The master toctree document. +master_doc = 'index' + +# General information about the project. +project = u'flake8' +copyright = u'2016, Ian Cordasco' +author = u'Ian Cordasco' + +import flake8 +# The version info for the project you're documenting, acts as replacement for +# |version| and |release|, also used in various other places throughout the +# built documents. +# +# The short X.Y version. +version = flake8.__version__ +# The full version, including alpha/beta/rc tags. +release = flake8.__version__ + +# The language for content autogenerated by Sphinx. Refer to documentation +# for a list of supported languages. +# +# This is also used if you do content translation via gettext catalogs. +# Usually you set "language" from the command line for these cases. +language = None + +# There are two options for replacing |today|: either, you set today to some +# non-false value, then it is used: +#today = '' +# Else, today_fmt is used as the format for a strftime call. +#today_fmt = '%B %d, %Y' + +# List of patterns, relative to source directory, that match files and +# directories to ignore when looking for source files. +exclude_patterns = [] + +# The reST default role (used for this markup: `text`) to use for all +# documents. +#default_role = None + +# If true, '()' will be appended to :func: etc. cross-reference text. +#add_function_parentheses = True + +# If true, the current module name will be prepended to all description +# unit titles (such as .. function::). +#add_module_names = True + +# If true, sectionauthor and moduleauthor directives will be shown in the +# output. They are ignored by default. +#show_authors = False + +# The name of the Pygments (syntax highlighting) style to use. +pygments_style = 'sphinx' + +# A list of ignored prefixes for module index sorting. +#modindex_common_prefix = [] + +# If true, keep warnings as "system message" paragraphs in the built documents. +#keep_warnings = False + +# If true, `todo` and `todoList` produce output, else they produce nothing. +todo_include_todos = True + + +# -- Options for HTML output ---------------------------------------------- + +# The theme to use for HTML and HTML Help pages. See the documentation for +# a list of builtin themes. +html_theme = 'sphinx_rtd_theme' + +# Theme options are theme-specific and customize the look and feel of a theme +# further. For a list of options available for each theme, see the +# documentation. +#html_theme_options = {} + +# Add any paths that contain custom themes here, relative to this directory. +#html_theme_path = [] + +# The name for this set of Sphinx documents. If None, it defaults to +# " v documentation". +#html_title = None + +# A shorter title for the navigation bar. Default is the same as html_title. +#html_short_title = None + +# The name of an image file (relative to this directory) to place at the top +# of the sidebar. +#html_logo = None + +# The name of an image file (within the static path) to use as favicon of the +# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 +# pixels large. +#html_favicon = None + +# Add any paths that contain custom static files (such as style sheets) here, +# relative to this directory. They are copied after the builtin static files, +# so a file named "default.css" will overwrite the builtin "default.css". +html_static_path = ['_static'] + +# Add any extra paths that contain custom files (such as robots.txt or +# .htaccess) here, relative to this directory. These files are copied +# directly to the root of the documentation. +#html_extra_path = [] + +# If not '', a 'Last updated on:' timestamp is inserted at every page bottom, +# using the given strftime format. +#html_last_updated_fmt = '%b %d, %Y' + +# If true, SmartyPants will be used to convert quotes and dashes to +# typographically correct entities. +#html_use_smartypants = True + +# Custom sidebar templates, maps document names to template names. +#html_sidebars = {} + +# Additional templates that should be rendered to pages, maps page names to +# template names. +#html_additional_pages = {} + +# If false, no module index is generated. +#html_domain_indices = True + +# If false, no index is generated. +#html_use_index = True + +# If true, the index is split into individual pages for each letter. +#html_split_index = False + +# If true, links to the reST sources are added to the pages. +#html_show_sourcelink = True + +# If true, "Created using Sphinx" is shown in the HTML footer. Default is True. +#html_show_sphinx = True + +# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True. +#html_show_copyright = True + +# If true, an OpenSearch description file will be output, and all pages will +# contain a tag referring to it. The value of this option must be the +# base URL from which the finished HTML is served. +#html_use_opensearch = '' + +# This is the file name suffix for HTML files (e.g. ".xhtml"). +#html_file_suffix = None + +# Language to be used for generating the HTML full-text search index. +# Sphinx supports the following languages: +# 'da', 'de', 'en', 'es', 'fi', 'fr', 'hu', 'it', 'ja' +# 'nl', 'no', 'pt', 'ro', 'ru', 'sv', 'tr' +#html_search_language = 'en' + +# A dictionary with options for the search language support, empty by default. +# Now only 'ja' uses this config value +#html_search_options = {'type': 'default'} + +# The name of a javascript file (relative to the configuration directory) that +# implements a search results scorer. If empty, the default will be used. +#html_search_scorer = 'scorer.js' + +# Output file base name for HTML help builder. +htmlhelp_basename = 'flake8doc' + +# -- Options for LaTeX output --------------------------------------------- + +latex_elements = { +# The paper size ('letterpaper' or 'a4paper'). +#'papersize': 'letterpaper', + +# The font size ('10pt', '11pt' or '12pt'). +#'pointsize': '10pt', + +# Additional stuff for the LaTeX preamble. +#'preamble': '', + +# Latex figure (float) alignment +#'figure_align': 'htbp', +} + +# Grouping the document tree into LaTeX files. List of tuples +# (source start file, target name, title, +# author, documentclass [howto, manual, or own class]). +latex_documents = [ + (master_doc, 'flake8.tex', u'flake8 Documentation', + u'Ian Cordasco', 'manual'), +] + +# The name of an image file (relative to this directory) to place at the top of +# the title page. +#latex_logo = None + +# For "manual" documents, if this is true, then toplevel headings are parts, +# not chapters. +#latex_use_parts = False + +# If true, show page references after internal links. +#latex_show_pagerefs = False + +# If true, show URL addresses after external links. +#latex_show_urls = False + +# Documents to append as an appendix to all manuals. +#latex_appendices = [] + +# If false, no module index is generated. +#latex_domain_indices = True + + +# -- Options for manual page output --------------------------------------- + +# One entry per manual page. List of tuples +# (source start file, name, description, authors, manual section). +man_pages = [ + (master_doc, 'flake8', u'flake8 Documentation', + [author], 1) +] + +# If true, show URL addresses after external links. +#man_show_urls = False + + +# -- Options for Texinfo output ------------------------------------------- + +# Grouping the document tree into Texinfo files. List of tuples +# (source start file, target name, title, author, +# dir menu entry, description, category) +texinfo_documents = [ + (master_doc, 'flake8', u'flake8 Documentation', + author, 'flake8', 'One line description of project.', + 'Miscellaneous'), +] + +# Documents to append as an appendix to all manuals. +#texinfo_appendices = [] + +# If false, no module index is generated. +#texinfo_domain_indices = True + +# How to display URL addresses: 'footnote', 'no', or 'inline'. +#texinfo_show_urls = 'footnote' + +# If true, do not generate a @detailmenu in the "Top" node's menu. +#texinfo_no_detailmenu = False + + +# Example configuration for intersphinx: refer to the Python standard library. +intersphinx_mapping = {'python': ('https://docs.python.org/3.4', None)} diff --git a/docs/source/dev/.keep b/docs/source/dev/.keep new file mode 100644 index 0000000..e69de29 diff --git a/docs/source/dev/formatters.rst b/docs/source/dev/formatters.rst new file mode 100644 index 0000000..0b37abd --- /dev/null +++ b/docs/source/dev/formatters.rst @@ -0,0 +1,51 @@ +=========================================== + Developing a Formatting Plugin for Flake8 +=========================================== + +Flake8 added the ability to develop custom formatting plugins in version +3.0.0. Let's write a plugin together: + +.. code-block:: python + + from flake8.formatting import base + + + class Example(base.BaseFormatter): + """Flake8's example formatter.""" + + pass + +We notice, as soon as we start, that we inherit from Flake8's +:class:`~flake8.formatting.base.BaseFormatter` class. If we follow the +:ref:`instructions to register a plugin ` and try to use +our example formatter, e.g., ``flake8 --format=example`` then Flake8 will fail +because we did not implement the ``format`` method. Let's do that next. + +.. code-block:: python + + class Example(base.BaseFormatter): + """Flake8's example formatter.""" + + def format(self, error): + return 'Example formatter: {0!r}'.format(error) + +With that we're done. Obviously this isn't a very useful formatter, but it +should highlight the simplicitly of creating a formatter with Flake8. If we +wanted to instead create a formatter that aggregated the results and returned +XML, JSON, or subunit we could also do that. Flake8 interacts with the +formatter in two ways: + +#. It creates the formatter and provides it the options parsed from the + configuration files and command-line + +#. It uses the instance of the formatter and calls ``handle`` with the error. + +By default :meth:`flake8.formatting.base.BaseFormatter.handle` simply calls +the ``format`` method and then ``write``. Any extra handling you wish to do +for formatting purposes should override the ``handle`` method. + +API Documentation +================= + +.. autoclass:: flake8.formatting.base.BaseFormatter + :members: diff --git a/docs/source/dev/registering_plugins.rst b/docs/source/dev/registering_plugins.rst new file mode 100644 index 0000000..0cc18d9 --- /dev/null +++ b/docs/source/dev/registering_plugins.rst @@ -0,0 +1,115 @@ +.. _register-a-plugin: + +================================== + Registering a Plugin with Flake8 +================================== + +To register any kind of plugin with Flake8, you need a few things: + +#. You need a way to install the plugin (whether it is packaged on its own or + as part of something else). In this section, we will use a ``setup.py`` + written for an example plugin. + +#. A name for your plugin that will (ideally) be unique. + +#. A somewhat recent version of setuptools (newer than 0.7.0 but preferably as + recent as you can attain). + +Flake8 presently relies on a functionality provided by setuptools called +`Entry Points`_. These allow any package to register a plugin with Flake8 via +that package's ``setup.py`` file. + +Let's presume that we already have our plugin written and it's in a module +called ``flake8_example``. We might have a ``setup.py`` that looks something +like: + +.. code-block:: python + + from __future__ import with_statement + import setuptools + + requires = [ + "flake8 > 3.0.0", + ] + + flake8_entry_point = # ... + + setuptools.setup( + name="flake8_example", + license="MIT", + version="0.1.0", + description="our extension to flake8", + author="Me", + author_email="example@example.com", + url="https://gitlab.com/me/flake8_example", + packages=[ + "flake8_example", + ], + install_requires=requires, + entry_points={ + flake8_entry_point: [ + 'X = flake8_example:ExamplePlugin', + ], + }, + classifiers=[ + "Environment :: Console", + "Intended Audience :: Developers", + "License :: OSI Approved :: MIT License", + "Programming Language :: Python", + "Programming Language :: Python :: 2", + "Programming Language :: Python :: 3", + "Topic :: Software Development :: Libraries :: Python Modules", + "Topic :: Software Development :: Quality Assurance", + ], + ) + +Note specifically these lines: + +.. code-block:: python + + flake8_entry_point = # ... + + setuptools.setup( + # snip ... + entry_points={ + flake8_entry_point: [ + 'X = flake8_example:ExamplePlugin', + ], + }, + # snip ... + ) + +We tell setuptools to register our entry point "X" inside the specific +grouping of entry-points that flake8 should look in. + +Flake8 presently looks at three groups: + +- ``flake8.extension`` + +- ``flake8.listen`` + +- ``flake8.report`` + +If your plugin is one that adds checks to Flake8, you will use +``flake8.extension``. If your plugin automatically fixes errors in code, you +will use ``flake8.listen``. Finally, if your plugin performs extra report +handling (formatting, filtering, etc.) it will use ``flake8.report``. + +If our ``ExamplePlugin`` is something that adds checks, our code would look +like: + +.. code-block:: python + + setuptools.setup( + # snip ... + entry_points={ + 'flake8.extension': [ + 'X = flake8_example:ExamplePlugin', + ], + }, + # snip ... + ) + + +.. _Entry Points: + https://pythonhosted.org/setuptools/pkg_resources.html#entry-points diff --git a/docs/source/index.rst b/docs/source/index.rst new file mode 100644 index 0000000..c8a4f35 --- /dev/null +++ b/docs/source/index.rst @@ -0,0 +1,40 @@ +.. flake8 documentation master file, created by + sphinx-quickstart on Tue Jan 19 07:14:10 2016. + You can adapt this file completely to your liking, but it should at least + contain the root `toctree` directive. + +Flake8: Your Tool For Style Guide Enforcement +============================================= + +User Guide +---------- + +.. toctree:: + :maxdepth: 2 + +Plugin Developer Guide +---------------------- + +.. toctree:: + :maxdepth: 2 + + dev/formatters + dev/registering_plugins + +Developer Guide +--------------- + +.. toctree:: + :maxdepth: 2 + + internal/formatters + internal/option_handling + internal/plugin_handling + internal/utils + +Indices and tables +================== + +* :ref:`genindex` +* :ref:`modindex` +* :ref:`search` diff --git a/docs/source/internal/.keep b/docs/source/internal/.keep new file mode 100644 index 0000000..e69de29 diff --git a/docs/source/internal/formatters.rst b/docs/source/internal/formatters.rst new file mode 100644 index 0000000..d54cf87 --- /dev/null +++ b/docs/source/internal/formatters.rst @@ -0,0 +1,47 @@ +===================== + Built-in Formatters +===================== + +By default Flake8 has two formatters built-in, ``default`` and ``pylint``. +These correspond to two classes |DefaultFormatter| and |PylintFormatter|. + +In Flake8 2.0, pep8 handled formatting of errors and also allowed users to +specify an arbitrary format string as a parameter to ``--format``. In order +to allow for this backwards compatibility, Flake8 3.0 made two choices: + +#. To not limit a user's choices for ``--format`` to the format class names + +#. To make the default formatter attempt to use the string provided by the + user if it cannot find a formatter with that name. + +Default Formatter +================= + +The |DefaultFormatter| continues to use the same default format string as +pep8: ``'%(path)s:%(row)d:%(col)d: %(code)s %(text)s'``. + +In order to provide the default functionality it overrides two methods: + +#. ``after_init`` + +#. ``format`` + +The former allows us to inspect the value provided to ``--format`` by the +user and alter our own format based on that value. The second simply uses +that format string to format the error. + +.. autoclass:: flake8.formatting.default.Default + :members: + +Pylint Formatter +================ + +The |PylintFormatter| simply defines the default Pylint format string from +pep8: ``'%(path)s:%(row)d: [%(code)s] %(text)s'``. + +.. autoclass:: flake8.formatting.default.Pylint + :members: + + +.. |DefaultFormatter| replace:: :class:`~flake8.formatting.default.Default` +.. |PylintFormatter| replace:: :class:`~flake8.formatting.default.Pylint` diff --git a/docs/source/internal/option_handling.rst b/docs/source/internal/option_handling.rst new file mode 100644 index 0000000..8ab1911 --- /dev/null +++ b/docs/source/internal/option_handling.rst @@ -0,0 +1,225 @@ +Option and Configuration Handling +================================= + +Option Management +----------------- + +Command-line options are often also set in configuration files for Flake8. +While not all options are meant to be parsed from configuration files, many +default options are also parsed from configuration files as are most plugin +options. + +In Flake8 2, plugins received a :class:`optparse.OptionParser` instance and +called :meth:`optparse.OptionParser.add_option` to register options. If the +plugin author also wanted to have that option parsed from config files they +also had to do something like: + +.. code-block:: python + + parser.config_options.append('my_config_option') + parser.config_options.extend(['config_opt1', 'config_opt2']) + +This was previously undocumented and led to a lot of confusion as to why +registered options were not automatically parsed from configuration files. + +Since Flake8 3 was rewritten from scratch, we decided to take a different +approach to configuration file parsing. Instead of needing to know about an +undocumented attribute that pep8 looks for, Flake8 3 now accepts a parameter +to ``add_option``, specifically ``parse_from_config`` which is a boolean +value. + +Flake8 does this by creating its own abstractions on top of :mod:`optparse`. +The first abstraction is the :class:`flake8.options.manager.Option` class. The +second is the :class:`flake8.options.manager.OptionManager`. In fact, we add +three new parameters: + +- ``parse_from_config`` + +- ``comma_separated_list`` + +- ``normalize_paths`` + +The last two are not specifically for configuration file handling, but they +do improve that dramatically. We found that there were options that when +specified in a configuration file, lended themselves to being split across +multiple lines and those options were almost always comma-separated. For +example, let's consider a user's list of ignored error codes for a project: + +.. code-block:: ini + + [flake8] + ignore = + E111, # Reasoning + E711, # Reasoning + E712, # Reasoning + E121, # Reasoning + E122, # Reasoning + E123, # Reasoning + E131, # Reasoning + E251 # Reasoning + +It makes sense here to allow users to specify the value this way, but, the +standard libary's :class:`configparser.RawConfigParser` class does returns a +string that looks like + +.. code-block:: python + + "\nE111, \nE711, \nE712, \nE121, \nE122, \nE123, \nE131, \nE251 " + +This means that a typical call to :meth:`str.split` with ``','`` will not be +sufficient here. Telling Flake8 that something is a comma-separated list +(e.g., ``comma_separated_list=True``) will handle this for you. Flake8 will +return: + +.. code-block:: python + + ["E111", "E711", "E712", "E121", "E122", "E123", "E131", "E251"] + +Next let's look at how users might like to specify their ``exclude`` list. +Presently OpenStack's Nova project has this line in their `tox.ini`_: + +.. code-block:: ini + + exclude = .venv,.git,.tox,dist,doc,*openstack/common/*,*lib/python*,*egg,build,tools/xenserver*,releasenotes + +I think we can all agree that this would be easier to read like this: + +.. code-block:: ini + + exclude = + .venv, + .git, + .tox, + dist, + doc, + *openstack/common/*, + *lib/python*, + *egg, + build, + tools/xenserver*, + releasenotes + +In this case, since these are actually intended to be paths, we would specify +both ``comma_separated_list=True`` and ``normalize_paths=True`` because we +want the paths to be provided to us with some consistency (either all absolute +paths or not). + +Now let's look at how this would actually be utilized. Most plugin developers +will receive an instance of :class:`~flake8.options.manager.OptionManager` so +to ease the transition we kept the same API as the +:class:`optparse.OptionParser` object. The only difference is that +:meth:`~flake8.options.manager.OptionManager.add_option` accepts the three +extra arguments we highlighted above. + +.. _tox.ini: + https://github.com/openstack/nova/blob/3eb190c4cfc0eefddac6c2cc1b94a699fb1687f8/tox.ini#L155 + +Configuration File Management +----------------------------- + +In Flake8 2, configuration file discovery and management was handled by pep8. +In pep8's 1.6 release series, it drastically broke how discovery and merging +worked (as a result of trying to improve it). To avoid a dependency breaking +Flake8 again in the future, we have created our own discovery and management. +As part of managing this ourselves, we decided to change management/discovery +for 3.0.0. We have done the following: + +- User files (files stored in a user's home directory or in the XDG directory + inside their home directory) are the first files read. For example, if the + user has a ``~/.flake8`` file, we will read that first. + +- Project files (files stored in the current directory) are read next and + merged on top of the user file. In other words, configuration in project + files takes precedence over configuration in user files. + +- **New in 3.0.0** The user can specify ``--append-config `` + repeatedly to include extra configuration files that should be read and + take precedence over user and project files. + +- **New in 3.0.0** The user can specify ``--config `` to so this + file is the only configuration file used. This is a change from Flake8 2 + where pep8 would simply merge this configuration file into the configuration + generated by user and project files (where this takes precedence). + +- **New in 3.0.0** The user can specify ``--isolated`` to disable + configuration via discovered configuration files. + +To facilitate the configuration file management, we've taken a different +approach to discovery and management of files than pep8. In pep8 1.5, 1.6, and +1.7 configuration discovery and management was centralized in `66 lines of +very terse python`_ which was confusing and not very explicit. The terseness +of this function (Flake8's authors believe) caused the confusion and problems +with pep8's 1.6 series. As such, Flake8 has separated out discovery, +management, and merging into a module to make reasoning about each of these +pieces easier and more explicit (as well as easier to test). + +Configuration file discovery is managed by the +:class:`~flake8.options.config.ConfigFileFinder` object. This object needs to +know information about the program's name, any extra arguments passed to it, +and any configuration files that should be appended to the list of discovered +files. It provides methods for finding the files and similiar methods for +parsing those fles. For example, it provides +:meth:`~flake8.options.config.ConfigFileFinder.local_config_files` to find +known local config files (and append the extra configuration files) and it +also provides :meth:`~flake8.options.config.ConfigFileFinder.local_configs` +to parse those configuration files. + +.. note:: ``local_config_files`` also filters out non-existent files. + +Configuration file merging and managemnt is controlled by the +:class:`~flake8.options.config.MergedConfigParser`. This requires the instance +of :class:`~flake8.options.manager.OptionManager` that the program is using, +the list of appended config files, and the list of extra arguments. This +object is currently the sole user of the +:class:`~flake8.options.config.ConfigFileFinder` object. It appropriately +initializes the object and uses it in each of + +- :meth:`~flake8.options.config.MergedConfigParser.parse_cli_config` +- :meth:`~flake8.options.config.MergedConfigParser.parse_local_config` +- :meth:`~flake8.options.config.MergedConfigParser.parse_user_config` + +Finally, +:meth:`~flake8.options.config.MergedConfigParser.merge_user_and_local_config` +takes the user and local configuration files that are parsed by +:meth:`~flake8.options.config.MergedConfigParser.parse_local_config` and +:meth:`~flake8.options.config.MergedConfigParser.parse_user_config`. The +main usage of the ``MergedConfigParser`` is in +:func:`~flake8.options.aggregator.aggregate_options`. + +Aggregating Configuration File and Command Line Arguments +--------------------------------------------------------- + +:func:`~flake8.options.aggregator.aggregate_options` accepts an instance of +:class:`~flake8.options.maanger.OptionManager` and does the work to parse the +command-line arguments passed by the user necessary for creating an instance +of :class:`~flake8.options.config.MergedConfigParser`. + +After parsing the configuration file, we determine the default ignore list. We +use the defaults from the OptionManager and update those with the parsed +configuration files. Finally we parse the user-provided options one last time +using the option defaults and configuration file values as defaults. The +parser merges on the command-line specified arguments for us so we have our +final, definitive, aggregated options. + +.. _66 lines of very terse python: + https://github.com/PyCQA/pep8/blob/b8088a2b6bc5b76bece174efad877f764529bc74/pep8.py#L1981..L2047 + +API Documentation +----------------- + +.. autofunction:: flake8.options.aggregator.aggregate_options + +.. autoclass:: flake8.options.manager.Option + :members: __init__, normalize, to_optparse + +.. autoclass:: flake8.options.manager.OptionManager + :members: + :special-members: + +.. autoclass:: flake8.options.config.ConfigFileFinder + :members: + :special-members: + +.. autoclass:: flake8.options.config.MergedConfigParser + :members: + :special-members: diff --git a/docs/source/internal/plugin_handling.rst b/docs/source/internal/plugin_handling.rst new file mode 100644 index 0000000..b3b9b0e --- /dev/null +++ b/docs/source/internal/plugin_handling.rst @@ -0,0 +1,114 @@ +Plugin Handling +=============== + +Plugin Management +----------------- + +Flake8 3.0 added support for two other plugins besides those which define new +checks. It now supports: + +- extra checks + +- alternative report formatters + +- listeners to auto-correct violations of checks + +To facilitate this, Flake8 needed a more mature way of managing plugins. As +such, we developed the |PluginManager| which accepts a namespace and will load +the plugins for that namespace. A |PluginManager| creates and manages many +|Plugin| instances. + +A |Plugin| lazily loads the underlying entry-point provided by setuptools. +The entry-point will be loaded either by calling +:meth:`~flake8.plugins.manager.Plugin.load_plugin` or accessing the ``plugin`` +attribute. We also use this abstraction to retrieve options that the plugin +wishes to register and parse. + +The only public method that the |PluginManager| provides is +:meth:`~flake8.plugins.manager.PluginManager.map`. This will accept a function +(or other callable) and call it with each plugin as the first parameter. + +We build atop the |PluginManager| with the |PTM|. It is expected that users of +the |PTM| will subclass it and specify the ``namespace``, e.g., + +.. code-block:: python + + class ExamplePluginType(flake8.plugin.manager.PluginTypeManager): + namespace = 'example-plugins' + +This provides a few extra methods via the |PluginManager|'s ``map`` method. + +Finally, we create three classes of plugins: + +- :class:`~flake8.plugins.manager.Checkers` + +- :class:`~flake8.plugins.manager.Listeners` + +- :class:`~flake8.plugins.manager.ReportFormatters` + +These are used to interact with each of the types of plugins individually. + +.. note:: + + Our inspiration for our plugin handling comes from the author's extensive + experience with ``stevedore``. + +Notifying Listener Plugins +-------------------------- + +One of the interesting challenges with allowing plugins to be notified each +time an error or warning is emitted by a checker is finding listeners quickly +and efficiently. It makes sense to allow a listener to listen for a certain +class of warnings or just a specific warning. As such, we need to allow all +plugins that listen to a specific warning or class to be notified. For +example, someone might register a listener for ``E1`` and another for ``E111`` +if ``E111`` is triggered by the code, both listeners should be notified. +If ``E112`` is returned, then only ``E1`` (and any other listeners) would be +notified. + +To implement this goal, we needed an object to store listeners in that would +allow for efficient look up - a Trie (or Prefix Tree). Given that none of the +existing packages on PyPI allowed for storing data on each node of the trie, +it was left up to write our own as :class:`~flake8.plugins._trie.Trie`. On +top of that we layer our :class:`~flake8.plugins.notifier.Notifier` class. + +Now when Flake8 receives an error or warning, we can easily call the +:meth:`~flake8.plugins.notifier.Notifier.notify` method and let plugins act on +that knowledge. + +Default Plugins +--------------- + +Finally, Flake8 has always provided its own plugin shim for Pyflakes. As part +of that we carry our own shim in-tree and now store that in +:mod:`flake8.plugins.pyflakes`. + +API Documentation +----------------- + +.. autoclass:: flake8.plugins.manager.PluginManager + :members: + :special-members: __init__, __contains__, __getitem__ + +.. autoclass:: flake8.plugins.manager.Plugin + :members: + :special-members: __init__ + +.. autoclass:: flake8.plugins.manager.PluginTypeManager + :members: + +.. autoclass:: flake8.plugins.manager.Checkers + :members: + +.. autoclass:: flake8.plugins.manager.Listeners + :members: build_notifier + +.. autoclass:: flake8.plugins.manager.ReportFormatters + +.. autoclass:: flake8.plugins.notifier.Notifier + +.. autoclass:: flake8.plugins._trie.Trie + +.. |PluginManager| replace:: :class:`~flake8.plugins.manager.PluginManager` +.. |Plugin| replace:: :class:`~flake8.plugins.manager.Plugin` +.. |PTM| replace:: :class:`~flake8.plugins.manager.PluginTypeManager` diff --git a/docs/source/internal/utils.rst b/docs/source/internal/utils.rst new file mode 100644 index 0000000..d8adeac --- /dev/null +++ b/docs/source/internal/utils.rst @@ -0,0 +1,100 @@ +=================== + Utility Functions +=================== + +Flake8 has a few utility functions that it uses and provides to plugins. + +.. autofunction:: flake8.utils.parse_comma_separated_list + +:func:`~flake8.utils.parse_comma_separated_list` takes either a string like + +.. code-block:: python + + "E121,W123,F904" + "E121,\nW123,\nF804" + "E121,\n\tW123,\n\tF804" + +Or it will take a list of strings (potentially with whitespace) such as + +.. code-block:: python + + [" E121\n", "\t\nW123 ", "\n\tF904\n "] + +And converts it to a list that looks as follows + +.. code-block:: python + + ["E121", "W123", "F904"] + +This function helps normalize any kind of comma-separated input you or Flake8 +might receive. This is most helpful when taking advantage of Flake8's +additional parameters to :class:`~flake8.options.manager.Option`. + +.. autofunction:: flake8.utils.normalize_path + +This utility takes a string that represents a path and returns the absolute +path if the string has a ``/`` in it. It also removes trailing ``/``\ s. + +.. autofunction:: flake8.utils.normalize_paths + +This function utilizes :func:`~flake8.utils.parse_comma_separated_list` and +:func:`~flake8.utils.normalize_path` to normalize it's input to a list of +strings that should be paths. + +.. autofunction:: flake8.utils.stdin_get_value + +This function retrieves and caches the value provided on ``sys.stdin``. This +allows plugins to use this to retrieve ``stdin`` if necessary. + +.. autofunction:: flake8.utils.is_windows + +This provides a convenient and explicitly named function that checks if we are +currently running on a Windows (or ``nt``) operating system. + +.. autofunction:: flake8.utils.is_using_stdin + +Another helpful function that is named only to be explicit given it is a very +trivial check, this checks if the user specified ``-`` in their arguments to +Flake8 to indicate we should read from stdin. + +.. autofunction:: flake8.utils.filenames_from + +When provided an argument to Flake8, we need to be able to traverse +directories in a convenient manner. For example, if someone runs + +.. code:: + + $ flake8 flake8/ + +Then they want us to check all of the files in the directory ``flake8/``. This +function will handle that while also handling the case where they specify a +file like: + +.. code:: + + $ flake8 flake8/__init__.py + + +.. autofunction:: flake8.utils.fnmatch + +The standard library's :func:`fnmatch.fnmatch` is excellent at deciding if a +filename matches a single pattern. In our use case, however, we typically have +a list of patterns and want to know if the filename matches any of them. This +function abstracts that logic away with a little extra logic. + +.. autofunction:: flake8.utils.parameters_for + +Flake8 analyzes the parameters to plugins to determine what input they are +expecting. Plugins may expect one of the following: + +- ``physical_line`` to receive the line as it appears in the file + +- ``logical_line`` to receive the logical line (not as it appears in the file) + +- ``tree`` to receive the abstract syntax tree (AST) for the file + +We also analyze the rest of the parameters to provide more detail to the +plugin. This function will return the parameters in a consistent way across +versions of Python and will handle both classes and functions that are used as +plugins. Further, if the plugin is a class, it will strip the ``self`` +argument so we can check the parameters of the plugin consistently. diff --git a/docs/source/user/.keep b/docs/source/user/.keep new file mode 100644 index 0000000..e69de29 diff --git a/flake8/__init__.py b/flake8/__init__.py new file mode 100644 index 0000000..40336ad --- /dev/null +++ b/flake8/__init__.py @@ -0,0 +1,81 @@ +"""Top-level module for Flake8. + +This module + +- initializes logging for the command-line tool +- tracks the version of the package +- provides a way to configure logging for the command-line tool + +.. autofunction:: flake8.configure_logging + +""" +import logging +try: + from logging import NullHandler +except ImportError: + class NullHandler(logging.Handler): + """Shim for version of Python < 2.7.""" + + def emit(self, record): + """Do nothing.""" + pass +import sys + +LOG = logging.getLogger(__name__) +LOG.addHandler(NullHandler()) + +# Clean up after LOG config +del NullHandler + +__version__ = '3.0.0a1' + + +# There is nothing lower than logging.DEBUG (10) in the logging library, +# but we want an extra level to avoid being too verbose when using -vv. +_EXTRA_VERBOSE = 5 +logging.addLevelName(_EXTRA_VERBOSE, 'VERBOSE') + +_VERBOSITY_TO_LOG_LEVEL = { + # output more than warnings but not debugging info + 1: logging.INFO, # INFO is a numerical level of 20 + # output debugging information + 2: logging.DEBUG, # DEBUG is a numerical level of 10 + # output extra verbose debugging information + 3: _EXTRA_VERBOSE, +} + +LOG_FORMAT = ('%(name)-25s %(processName)-11s %(relativeCreated)6d ' + '%(levelname)-8s %(message)s') + + +def configure_logging(verbosity, filename=None, logformat=LOG_FORMAT): + """Configure logging for flake8. + + :param int verbosity: + How verbose to be in logging information. + :param str filename: + Name of the file to append log information to. + If ``None`` this will log to ``sys.stderr``. + If the name is "stdout" or "stderr" this will log to the appropriate + stream. + """ + if verbosity <= 0: + return + if verbosity > 3: + verbosity = 3 + + log_level = _VERBOSITY_TO_LOG_LEVEL[verbosity] + + if not filename or filename in ('stderr', 'stdout'): + fileobj = getattr(sys, filename or 'stderr') + handler_cls = logging.StreamHandler + else: + fileobj = filename + handler_cls = logging.FileHandler + + handler = handler_cls(fileobj) + handler.setFormatter(logging.Formatter(logformat)) + LOG.addHandler(handler) + LOG.setLevel(log_level) + LOG.debug('Added a %s logging handler to logger root at %s', + filename, __name__) diff --git a/flake8/__main__.py b/flake8/__main__.py new file mode 100644 index 0000000..42bc428 --- /dev/null +++ b/flake8/__main__.py @@ -0,0 +1,4 @@ +"""Module allowing for ``python -m flake8 ...``.""" +from flake8.main import cli + +cli.main() diff --git a/flake8/checker.py b/flake8/checker.py new file mode 100644 index 0000000..becde97 --- /dev/null +++ b/flake8/checker.py @@ -0,0 +1,495 @@ +"""Checker Manager and Checker classes.""" +import logging +import os +import sys +import tokenize + +try: + import multiprocessing +except ImportError: + multiprocessing = None + +from flake8 import exceptions +from flake8 import processor +from flake8 import utils + +LOG = logging.getLogger(__name__) + + +class Manager(object): + """Manage the parallelism and checker instances for each plugin and file. + + This class will be responsible for the following: + + - Determining the parallelism of Flake8, e.g.: + + * Do we use :mod:`multiprocessing` or is it unavailable? + + * Do we automatically decide on the number of jobs to use or did the + user provide that? + + - Falling back to a serial way of processing files if we run into an + OSError related to :mod:`multiprocessing` + + - Organizing the results of each checker so we can group the output + together and make our output deterministic. + """ + + def __init__(self, style_guide, arguments, checker_plugins): + """Initialize our Manager instance. + + :param style_guide: + The instantiated style guide for this instance of Flake8. + :type style_guide: + flake8.style_guide.StyleGuide + :param list arguments: + The extra arguments parsed from the CLI (if any) + :param checker_plugins: + The plugins representing checks parsed from entry-points. + :type checker_plugins: + flake8.plugins.manager.Checkers + """ + self.arguments = arguments + self.style_guide = style_guide + self.options = style_guide.options + self.checks = checker_plugins + self.jobs = self._job_count() + self.process_queue = None + self.results_queue = None + self.using_multiprocessing = False + self.processes = [] + self.checkers = [] + + if self.jobs > 1: + self.using_multiprocessing = True + self.process_queue = multiprocessing.Queue() + self.results_queue = multiprocessing.Queue() + + @staticmethod + def _cleanup_queue(q): + while not q.empty(): + q.get_nowait() + + def _force_cleanup(self): + if self.using_multiprocessing: + for proc in self.processes: + proc.join(0.2) + self._cleanup_queue(self.process_queue) + self._cleanup_queue(self.results_queue) + + def _job_count(self): + # type: () -> Union[int, NoneType] + # First we walk through all of our error cases: + # - multiprocessing library is not present + # - we're running on windows in which case we know we have significant + # implemenation issues + # - the user provided stdin and that's not something we can handle + # well + # - we're processing a diff, which again does not work well with + # multiprocessing and which really shouldn't require multiprocessing + # - the user provided some awful input + if not multiprocessing: + LOG.warning('The multiprocessing module is not available. ' + 'Ignoring --jobs arguments.') + return 0 + + if utils.is_windows(): + LOG.warning('The --jobs option is not available on Windows. ' + 'Ignoring --jobs arguments.') + return 0 + + if utils.is_using_stdin(self.arguments): + LOG.warning('The --jobs option is not compatible with supplying ' + 'input using - . Ignoring --jobs arguments.') + return 0 + + if self.options.diff: + LOG.warning('The --diff option was specified with --jobs but ' + 'they are not compatible. Ignoring --jobs arguments.') + return 0 + + jobs = self.options.jobs + if jobs != 'auto' and not jobs.isdigit(): + LOG.warning('"%s" is not a valid parameter to --jobs. Must be one ' + 'of "auto" or a numerical value, e.g., 4.', jobs) + return 0 + + # If the value is "auto", we want to let the multiprocessing library + # decide the number based on the number of CPUs. However, if that + # function is not implemented for this particular value of Python we + # default to 1 + if jobs == 'auto': + try: + return multiprocessing.cpu_count() + except NotImplementedError: + return 0 + + # Otherwise, we know jobs should be an integer and we can just convert + # it to an integer + return int(jobs) + + def _results(self): + seen_done = 0 + while True: + LOG.info('Retrieving results') + result = self.results_queue.get() + if result == 'DONE': + seen_done += 1 + if seen_done >= self.jobs: + break + continue + + yield result + + def _report_after_parallel(self): + style_guide = self.style_guide + final_results = {} + for (filename, results) in self._results(): + final_results[filename] = results + + for checker in self.checkers: + filename = checker.filename + results = sorted(final_results.get(filename, []), + key=lambda tup: (tup[1], tup[2])) + for (error_code, line_number, column, text, line) in results: + style_guide.handle_error( + code=error_code, + filename=filename, + line_number=line_number, + column_number=column, + text=text, + physical_line=line, + ) + + def _report_after_serial(self): + style_guide = self.style_guide + for checker in self.checkers: + results = sorted(checker.results, key=lambda tup: (tup[2], tup[3])) + filename = checker.filename + for (error_code, line_number, column, text, line) in results: + style_guide.handle_error( + code=error_code, + filename=filename, + line_number=line_number, + column_number=column, + text=text, + physical_line=line, + ) + + def _run_checks_from_queue(self): + LOG.info('Running checks in parallel') + for checker in iter(self.process_queue.get, 'DONE'): + LOG.debug('Running checker for file "%s"', checker.filename) + checker.run_checks(self.results_queue) + self.results_queue.put('DONE') + + def is_path_excluded(self, path): + # type: (str) -> bool + """Check if a path is excluded. + + :param str path: + Path to check against the exclude patterns. + :returns: + True if there are exclude patterns and the path matches, + otherwise False. + :rtype: + bool + """ + exclude = self.options.exclude + if not exclude: + return False + basename = os.path.basename(path) + if utils.fnmatch(basename, exclude): + LOG.info('"%s" has been excluded', basename) + return True + + absolute_path = os.path.abspath(path) + match = utils.fnmatch(absolute_path, exclude) + LOG.info('"%s" has %sbeen excluded', absolute_path, + '' if match else 'not ') + return match + + def make_checkers(self, paths=None): + # type: (List[str]) -> NoneType + """Create checkers for each file.""" + if paths is None: + paths = self.arguments + filename_patterns = self.options.filename + self.checkers = [ + FileChecker(filename, self.checks, self.style_guide) + for argument in paths + for filename in utils.filenames_from(argument, + self.is_path_excluded) + if utils.fnmatch(filename, filename_patterns) + ] + + def report(self): + """Report all of the errors found in the managed file checkers. + + This iterates over each of the checkers and reports the errors sorted + by line number. + """ + if not self.using_multiprocessing: + self._report_after_serial() + + def run(self): + """Run all the checkers. + + This handles starting the process workers or just simply running all + of the checks in serial. + """ + if self.using_multiprocessing: + LOG.info('Starting %d process workers', self.jobs) + for i in range(self.jobs): + proc = multiprocessing.Process( + target=self._run_checks_from_queue + ) + proc.daemon = True + proc.start() + self.processes.append(proc) + proc = multiprocessing.Process(target=self._report_after_parallel) + proc.start() + LOG.info('Started process to report errors') + self.processes.append(proc) + else: + for checker in self.checkers: + checker.run_checks(self.results_queue) + + def start(self): + """Start checking files.""" + LOG.info('Making checkers') + self.make_checkers() + if not self.using_multiprocessing: + return + + LOG.info('Populating process queue') + for checker in self.checkers: + self.process_queue.put(checker) + + def stop(self): + """Stop checking files.""" + for i in range(self.jobs): + self.process_queue.put('DONE') + + for proc in self.processes: + proc.join() + + +class FileChecker(object): + """Manage running checks for a file and aggregate the results.""" + + def __init__(self, filename, checks, style_guide): + """Initialize our file checker. + + :param str filename: + Name of the file to check. + :param checks: + The plugins registered to check the file. + :type checks: + flake8.plugins.manager.Checkers + """ + self.filename = filename + self.checks = checks + self.style_guide = style_guide + self.results = [] + self.processor = self._make_processor() + + def _make_processor(self): + try: + return processor.FileProcessor(self.filename, + self.style_guide.options) + except IOError: + # If we can not read the file due to an IOError (e.g., the file + # does not exist or we do not have the permissions to open it) + # then we need to format that exception for the user. + # NOTE(sigmavirus24): Historically, pep8 has always reported this + # as an E902. We probably *want* a better error code for this + # going forward. + (exc_type, exception) = sys.exc_info()[:2] + message = '{0}: {1}'.format(exc_type.__name__, exception) + self.report('E902', 0, 0, message) + return None + + def report(self, error_code, line_number, column, text): + # type: (str, int, int, str) -> str + """Report an error by storing it in the results list.""" + if error_code is None: + error_code, text = text.split(' ', 1) + physical_line = self.processor.line_for(line_number) + error = (error_code, line_number, column, text, physical_line) + self.results.append(error) + return error_code + + def run_check(self, plugin, **arguments): + """Run the check in a single plugin.""" + LOG.debug('Running %r with %r', plugin, arguments) + self.processor.keyword_arguments_for(plugin.parameters, arguments) + return plugin.execute(**arguments) + + def run_ast_checks(self): + """Run all checks expecting an abstract syntax tree.""" + try: + ast = self.processor.build_ast() + except (ValueError, SyntaxError, TypeError): + (exc_type, exception) = sys.exc_info()[:2] + if len(exception.args) > 1: + offset = exception.args[1] + if len(offset) > 2: + offset = offset[1:3] + else: + offset = (1, 0) + + self.report('E999', offset[0], offset[1], '%s: %s' % + (exc_type.__name__, exception.args[0])) + return + + for plugin in self.checks.ast_plugins: + checker = self.run_check(plugin, tree=ast) + # NOTE(sigmavirus24): If we want to allow for AST plugins that are + # not classes exclusively, we can do the following: + # retrieve_results = getattr(checker, 'run', lambda: checker) + # Otherwise, we just call run on the checker + for (line_number, offset, text, check) in checker.run(): + self.report( + error_code=None, + line_number=line_number, + column=offset, + text=text, + ) + + def run_logical_checks(self): + """Run all checks expecting a logical line.""" + comments, logical_line, mapping = self.processor.build_logical_line() + if not mapping: + return + self.processor.update_state(mapping) + + LOG.debug('Logical line: "%s"', logical_line.rstrip()) + + for plugin in self.checks.logical_line_plugins: + self.processor.update_checker_state_for(plugin) + results = self.run_check(plugin, logical_line=logical_line) or () + for offset, text in results: + offset = find_offset(offset, mapping) + line_number, column_offset = offset + self.report( + error_code=None, + line_number=line_number, + column=column_offset, + text=text, + ) + + self.processor.next_logical_line() + + def run_physical_checks(self, physical_line): + """Run all checks for a given physical line.""" + for plugin in self.checks.physical_line_plugins: + self.processor.update_checker_state_for(plugin) + result = self.run_check(plugin, physical_line=physical_line) + if result is not None: + column_offset, text = result + error_code = self.report( + error_code=None, + line_number=self.processor.line_number, + column=column_offset, + text=text, + ) + + self.processor.check_physical_error(error_code, physical_line) + + def process_tokens(self): + """Process tokens and trigger checks. + + This can raise a :class:`flake8.exceptions.InvalidSyntax` exception. + Instead of using this directly, you should use + :meth:`flake8.checker.FileChecker.run_checks`. + """ + parens = 0 + file_processor = self.processor + for token in file_processor.generate_tokens(): + self.check_physical_eol(token) + token_type, text = token[0:2] + processor.log_token(LOG, token) + if token_type == tokenize.OP: + parens = processor.count_parentheses(parens, text) + elif parens == 0: + if processor.token_is_newline(token): + self.handle_newline(token_type) + elif (processor.token_is_comment(token) and + len(file_processor.tokens) == 1): + self.handle_comment(token, text) + + if file_processor.tokens: + # If any tokens are left over, process them + self.run_physical_checks(file_processor.lines[-1]) + self.run_logical_checks() + + def run_checks(self, results_queue): + """Run checks against the file.""" + if self.processor.should_ignore_file(): + return + + try: + self.process_tokens() + except exceptions.InvalidSyntax as exc: + self.report(exc.error_code, exc.line_number, exc.column_number, + exc.error_message) + + self.run_ast_checks() + + if results_queue is not None: + results_queue.put((self.filename, self.results)) + + def handle_comment(self, token, token_text): + """Handle the logic when encountering a comment token.""" + # The comment also ends a physical line + token = list(token) + token[1] = token_text.rstrip('\r\n') + token[3] = (token[2][0], token[2][1] + len(token[1])) + self.processor.tokens = [tuple(token)] + self.run_logical_checks() + + def handle_newline(self, token_type): + """Handle the logic when encountering a newline token.""" + if token_type == tokenize.NEWLINE: + self.run_logical_checks() + self.processor.reset_blank_before() + elif len(self.processor.tokens) == 1: + # The physical line contains only this token. + self.processor.visited_new_blank_line() + self.processor.delete_first_token() + else: + self.run_logical_checks() + + def check_physical_eol(self, token): + """Run physical checks if and only if it is at the end of the line.""" + if processor.is_eol_token(token): + # Obviously, a newline token ends a single physical line. + self.run_physical_checks(token[4]) + elif processor.is_multiline_string(token): + # Less obviously, a string that contains newlines is a + # multiline string, either triple-quoted or with internal + # newlines backslash-escaped. Check every physical line in the + # string *except* for the last one: its newline is outside of + # the multiline string, so we consider it a regular physical + # line, and will check it like any other physical line. + # + # Subtleties: + # - have to wind self.line_number back because initially it + # points to the last line of the string, and we want + # check_physical() to give accurate feedback + line_no = token[2][0] + with self.processor.inside_multiline(line_number=line_no): + for line in self.processor.split_line(token): + self.run_physical_checks(line + '\n') + + +def find_offset(offset, mapping): + """Find the offset tuple for a single offset.""" + if isinstance(offset, tuple): + return offset + + for token_offset, position in mapping: + if offset <= token_offset: + break + return (position[0], position[1] + offset - token_offset) diff --git a/flake8/defaults.py b/flake8/defaults.py new file mode 100644 index 0000000..62939b5 --- /dev/null +++ b/flake8/defaults.py @@ -0,0 +1,8 @@ +"""Constants that define defaults.""" + +EXCLUDE = '.svn,CVS,.bzr,.hg,.git,__pycache__,.tox' +IGNORE = 'E121,E123,E126,E226,E24,E704' +MAX_LINE_LENGTH = 79 + +# Other consants +WHITESPACE = frozenset(' \t') diff --git a/flake8/exceptions.py b/flake8/exceptions.py new file mode 100644 index 0000000..18ee90c --- /dev/null +++ b/flake8/exceptions.py @@ -0,0 +1,43 @@ +"""Exception classes for all of Flake8.""" + + +class Flake8Exception(Exception): + """Plain Flake8 exception.""" + + pass + + +class FailedToLoadPlugin(Flake8Exception): + """Exception raised when a plugin fails to load.""" + + FORMAT = 'Flake8 failed to load plugin "%(name)s" due to %(exc)s.' + + def __init__(self, *args, **kwargs): + """Initialize our FailedToLoadPlugin exception.""" + self.plugin = kwargs.pop('plugin') + self.ep_name = self.plugin.name + self.original_exception = kwargs.pop('exception') + super(FailedToLoadPlugin, self).__init__(*args, **kwargs) + + def __str__(self): + """Return a nice string for our exception.""" + return self.FORMAT % {'name': self.ep_name, + 'exc': self.original_exception} + + +class InvalidSyntax(Flake8Exception): + """Exception raised when tokenizing a file fails.""" + + def __init__(self, *args, **kwargs): + """Initialize our InvalidSyntax exception.""" + self.original_exception = kwargs.pop('exception') + self.error_code = 'E902' + self.line_number = 1 + self.column_number = 0 + try: + self.error_message = self.original_exception.message + except AttributeError: + # On Python 3, the IOError is an OSError which has a + # strerror attribute instead of a message attribute + self.error_message = self.original_exception.strerror + super(InvalidSyntax, self).__init__(*args, **kwargs) diff --git a/flake8/formatting/__init__.py b/flake8/formatting/__init__.py new file mode 100644 index 0000000..bf44801 --- /dev/null +++ b/flake8/formatting/__init__.py @@ -0,0 +1 @@ +"""Submodule containing the default formatters for Flake8.""" diff --git a/flake8/formatting/base.py b/flake8/formatting/base.py new file mode 100644 index 0000000..e419470 --- /dev/null +++ b/flake8/formatting/base.py @@ -0,0 +1,105 @@ +"""The base class and interface for all formatting plugins.""" +from __future__ import print_function + + +class BaseFormatter(object): + """Class defining the formatter interface. + + .. attribute:: options + + The options parsed from both configuration files and the command-line. + + .. attribute:: filename + + If specified by the user, the path to store the results of the run. + + .. attribute:: output_fd + + Initialized when the :meth:`start` is called. This will be a file + object opened for writing. + + .. attribute:: newline + + The string to add to the end of a line. This is only used when the + output filename has been specified. + """ + + def __init__(self, options): + """Initialize with the options parsed from config and cli. + + This also calls a hook, :meth:`after_init`, so subclasses do not need + to call super to call this method. + + :param optparse.Values options: + User specified configuration parsed from both configuration files + and the command-line interface. + """ + self.options = options + self.filename = options.output_file + self.output_fd = None + self.newline = '\n' + self.after_init() + + def after_init(self): + """Initialize the formatter further.""" + pass + + def start(self): + """Prepare the formatter to receive input. + + This defaults to initializing :attr:`output_fd` if :attr:`filename` + """ + if self.filename: + self.output_fd = open(self.filename, 'w') + + def handle(self, error): + """Handle an error reported by Flake8. + + This defaults to calling :meth:`format` and then :meth:`write`. To + extend how errors are handled, override this method. + + :param error: + This will be an instance of :class:`~flake8.style_guide.Error`. + :type error: + flake8.style_guide.Error + """ + line = self.format(error) + self.write(line) + + def format(self, error): + """Format an error reported by Flake8. + + This method **must** be implemented by subclasses. + + :param error: + This will be an instance of :class:`~flake8.style_guide.Error`. + :type error: + flake8.style_guide.Error + :returns: + The formatted error string. + :rtype: + str + """ + raise NotImplementedError('Subclass of BaseFormatter did not implement' + ' format.') + + def write(self, line): + """Write the line either to the output file or stdout. + + This handles deciding whether to write to a file or print to standard + out for subclasses. Override this if you want behaviour that differs + from the default. + + :param str line: + The formatted string to print or write. + """ + if self.output_fd is not None: + self.output_fd.write(line + self.newline) + else: + print(line) + + def stop(self): + """Clean up after reporting is finished.""" + if self.output_fd is not None: + self.output_fd.close() + self.output_fd = None diff --git a/flake8/formatting/default.py b/flake8/formatting/default.py new file mode 100644 index 0000000..bef8c88 --- /dev/null +++ b/flake8/formatting/default.py @@ -0,0 +1,56 @@ +"""Default formatting class for Flake8.""" +from flake8.formatting import base + + +class SimpleFormatter(base.BaseFormatter): + """Simple abstraction for Default and Pylint formatter commonality. + + Sub-classes of this need to define an ``error_format`` attribute in order + to succeed. The ``format`` method relies on that attribute and expects the + ``error_format`` string to use the old-style formatting strings with named + parameters: + + * code + * text + * path + * row + * col + + """ + + error_format = None + + def format(self, error): + """Format and write error out. + + If an output filename is specified, write formatted errors to that + file. Otherwise, print the formatted error to standard out. + """ + return self.error_format % { + "code": error.code, + "text": error.text, + "path": error.filename, + "row": error.line_number, + "col": error.column_number, + } + + +class Default(SimpleFormatter): + """Default formatter for Flake8. + + This also handles backwards compatibility for people specifying a custom + format string. + """ + + error_format = '%(path)s:%(row)d:%(col)d: %(code)s %(text)s' + + def after_init(self): + """Check for a custom format string.""" + if self.options.format.lower() != 'default': + self.error_format = self.options.format + + +class Pylint(SimpleFormatter): + """Pylint formatter for Flake8.""" + + error_format = '%(path)s:%(row)d: [%(code)s] %(text)s' diff --git a/flake8/main/__init__.py b/flake8/main/__init__.py new file mode 100644 index 0000000..d3aa1de --- /dev/null +++ b/flake8/main/__init__.py @@ -0,0 +1 @@ +"""Module containing the logic for the Flake8 entry-points.""" diff --git a/flake8/main/cli.py b/flake8/main/cli.py new file mode 100644 index 0000000..dbd9603 --- /dev/null +++ b/flake8/main/cli.py @@ -0,0 +1,305 @@ +"""Command-line implementation of flake8.""" +import logging + +import flake8 +from flake8 import checker +from flake8 import defaults +from flake8 import style_guide +from flake8.options import aggregator +from flake8.options import manager +from flake8.plugins import manager as plugin_manager + +LOG = logging.getLogger(__name__) + + +def register_default_options(option_manager): + """Register the default options on our OptionManager.""" + add_option = option_manager.add_option + + # pep8 options + add_option( + '-v', '--verbose', default=0, action='count', + parse_from_config=True, + help='Print more information about what is happening in flake8.' + ' This option is repeatable and will increase verbosity each ' + 'time it is repeated.', + ) + add_option( + '-q', '--quiet', default=0, action='count', + parse_from_config=True, + help='Report only file names, or nothing. This option is repeatable.', + ) + + add_option( + '--count', action='store_true', parse_from_config=True, + help='Print total number of errors and warnings to standard error and' + ' set the exit code to 1 if total is not empty.', + ) + + add_option( + '--diff', action='store_true', + help='Report changes only within line number ranges in the unified ' + 'diff provided on standard in by the user.', + ) + + add_option( + '--exclude', metavar='patterns', default=defaults.EXCLUDE, + comma_separated_list=True, parse_from_config=True, + normalize_paths=True, + help='Comma-separated list of files or directories to exclude.' + '(Default: %default)', + ) + + add_option( + '--filename', metavar='patterns', default='*.py', + parse_from_config=True, comma_separated_list=True, + help='Only check for filenames matching the patterns in this comma-' + 'separated list. (Default: %default)', + ) + + # TODO(sigmavirus24): Figure out --first/--repeat + + add_option( + '--format', metavar='format', default='default', + parse_from_config=True, + help='Format errors according to the chosen formatter.', + ) + + add_option( + '--hang-closing', action='store_true', parse_from_config=True, + help='Hang closing bracket instead of matching indentation of opening' + " bracket's line.", + ) + + add_option( + '--ignore', metavar='errors', default=defaults.IGNORE, + parse_from_config=True, comma_separated_list=True, + help='Comma-separated list of errors and warnings to ignore (or skip).' + ' For example, ``--ignore=E4,E51,W234``. (Default: %default)', + ) + + add_option( + '--max-line-length', type='int', metavar='n', + default=defaults.MAX_LINE_LENGTH, parse_from_config=True, + help='Maximum allowed line length for the entirety of this run. ' + '(Default: %default)', + ) + + add_option( + '--select', metavar='errors', default='', + parse_from_config=True, comma_separated_list=True, + help='Comma-separated list of errors and warnings to enable.' + ' For example, ``--select=E4,E51,W234``. (Default: %default)', + ) + + add_option( + '--disable-noqa', default=False, parse_from_config=True, + action='store_true', + help='Disable the effect of "# noqa". This will report errors on ' + 'lines with "# noqa" at the end.' + ) + + # TODO(sigmavirus24): Decide what to do about --show-pep8 + + add_option( + '--show-source', action='store_true', parse_from_config=True, + help='Show the source generate each error or warning.', + ) + + add_option( + '--statistics', action='store_true', parse_from_config=True, + help='Count errors and warnings.', + ) + + # Flake8 options + add_option( + '--enabled-extensions', default='', parse_from_config=True, + comma_separated_list=True, type='string', + help='Enable plugins and extensions that are otherwise disabled ' + 'by default', + ) + + add_option( + '--exit-zero', action='store_true', + help='Exit with status code "0" even if there are errors.', + ) + + add_option( + '-j', '--jobs', type='string', default='auto', parse_from_config=True, + help='Number of subprocesses to use to run checks in parallel. ' + 'This is ignored on Windows. The default, "auto", will ' + 'auto-detect the number of processors available to use.' + ' (Default: %default)', + ) + + add_option( + '--output-file', default=None, type='string', parse_from_config=True, + # callback=callbacks.redirect_stdout, + help='Redirect report to a file.', + ) + + # Config file options + + add_option( + '--append-config', action='append', + help='Provide extra config files to parse in addition to the files ' + 'found by Flake8 by default. These files are the last ones read ' + 'and so they take the highest precedence when multiple files ' + 'provide the same option.', + ) + + add_option( + '--config', default=None, + help='Path to the config file that will be the authoritative config ' + 'source. This will cause Flake8 to ignore all other ' + 'configuration files.' + ) + + add_option( + '--isolated', default=False, action='store_true', + help='Ignore all found configuration files.', + ) + + +class Application(object): + """Abstract our application into a class.""" + + def __init__(self, program='flake8', version=flake8.__version__): + # type: (str, str) -> NoneType + """Initialize our application. + + :param str program: + The name of the program/application that we're executing. + :param str version: + The version of the program/application we're executing. + """ + self.program = program + self.version = version + self.option_manager = manager.OptionManager( + prog='flake8', version=flake8.__version__ + ) + register_default_options(self.option_manager) + + # Set the verbosity of the program + preliminary_opts, _ = self.option_manager.parse_args() + flake8.configure_logging(preliminary_opts.verbose, + preliminary_opts.output_file) + + self.check_plugins = None + self.listening_plugins = None + self.formatting_plugins = None + self.formatter = None + self.listener_trie = None + self.guide = None + self.file_checker_manager = None + + self.options = None + self.args = None + + def find_plugins(self): + # type: () -> NoneType + """Find and load the plugins for this application.""" + if self.check_plugins is None: + self.check_plugins = plugin_manager.Checkers() + + if self.listening_plugins is None: + self.listening_plugins = plugin_manager.Listeners() + + if self.formatting_plugins is None: + self.formatting_plugins = plugin_manager.ReportFormatters() + + def register_plugin_options(self): + # type: () -> NoneType + """Register options provided by plugins to our option manager.""" + self.check_plugins.register_options(self.option_manager) + self.listening_plugins.register_options(self.option_manager) + self.formatting_plugins.register_options(self.option_manager) + + def parse_configuration_and_cli(self, argv=None): + # type: (Union[NoneType, List[str]]) -> NoneType + """Parse configuration files and the CLI options. + + :param list argv: + Command-line arguments passed in directly. + """ + if self.options is None and self.args is None: + self.options, self.args = aggregator.aggregate_options( + self.option_manager, argv + ) + + self.check_plugins.provide_options(self.option_manager, self.options, + self.args) + + def make_formatter(self): + # type: () -> NoneType + """Initialize a formatter based on the parsed options.""" + if self.formatter is None: + self.formatter = self.formatting_plugins.get( + self.options.format, self.formatting_plugins['default'] + ).execute(self.options) + + def make_notifier(self): + # type: () -> NoneType + """Initialize our listener Notifier.""" + if self.listener_trie is None: + self.listener_trie = self.listening_plugins.build_notifier() + + def make_guide(self): + # type: () -> NoneType + """Initialize our StyleGuide.""" + if self.guide is None: + self.guide = style_guide.StyleGuide( + self.options, self.listener_trie, self.formatter + ) + + def make_file_checker_manager(self): + # type: () -> NoneType + """Initialize our FileChecker Manager.""" + if self.file_checker_manager is None: + self.file_checker_manager = checker.Manager( + style_guide=self.guide, + arguments=self.args, + checker_plugins=self.check_plugins, + ) + + def run_checks(self): + # type: () -> NoneType + """Run the actual checks with the FileChecker Manager.""" + self.file_checker_manager.start() + self.file_checker_manager.run() + LOG.info('Finished running') + self.file_checker_manager.stop() + + def report_errors(self): + # type: () -> NoneType + """Report all the errors found by flake8 3.0.""" + LOG.info('Reporting errors') + self.file_checker_manager.report() + + def _run(self, argv): + self.find_plugins() + self.register_plugin_options() + self.parse_configuration_and_cli(argv) + self.make_formatter() + self.make_notifier() + self.make_guide() + self.make_file_checker_manager() + self.run_checks() + self.report_errors() + + def run(self, argv=None): + # type: (Union[NoneType, List[str]]) -> NoneType + """Run our application.""" + try: + self._run(argv) + except KeyboardInterrupt as exc: + LOG.critical('Caught keyboard interrupt from user') + LOG.exception(exc) + self.file_checker_manager._force_cleanup() + + +def main(argv=None): + # type: (Union[NoneType, List[str]]) -> NoneType + """Main entry-point for the flake8 command-line tool.""" + app = Application() + app.run(argv) diff --git a/flake8/options/__init__.py b/flake8/options/__init__.py new file mode 100644 index 0000000..cc20daa --- /dev/null +++ b/flake8/options/__init__.py @@ -0,0 +1,12 @@ +"""Package containing the option manager and config management logic. + +- :mod:`flake8.options.config` contains the logic for finding, parsing, and + merging configuration files. + +- :mod:`flake8.options.manager` contains the logic for managing customized + Flake8 command-line and configuration options. + +- :mod:`flake8.options.aggregator` uses objects from both of the above modules + to aggregate configuration into one object used by plugins and Flake8. + +""" diff --git a/flake8/options/aggregator.py b/flake8/options/aggregator.py new file mode 100644 index 0000000..99d0cfe --- /dev/null +++ b/flake8/options/aggregator.py @@ -0,0 +1,74 @@ +"""Aggregation function for CLI specified options and config file options. + +This holds the logic that uses the collected and merged config files and +applies the user-specified command-line configuration on top of it. +""" +import logging + +from flake8 import utils +from flake8.options import config + +LOG = logging.getLogger(__name__) + + +def aggregate_options(manager, arglist=None, values=None): + """Aggregate and merge CLI and config file options. + + :param flake8.option.manager.OptionManager manager: + The instance of the OptionManager that we're presently using. + :param list arglist: + The list of arguments to pass to ``manager.parse_args``. In most cases + this will be None so ``parse_args`` uses ``sys.argv``. This is mostly + available to make testing easier. + :param optparse.Values values: + Previously parsed set of parsed options. + :returns: + Tuple of the parsed options and extra arguments returned by + ``manager.parse_args``. + :rtype: + tuple(optparse.Values, list) + """ + # Get defaults from the option parser + default_values, _ = manager.parse_args([], values=values) + # Get original CLI values so we can find additional config file paths and + # see if --config was specified. + original_values, original_args = manager.parse_args(arglist) + extra_config_files = utils.normalize_paths(original_values.append_config) + + # Make our new configuration file mergerator + config_parser = config.MergedConfigParser( + option_manager=manager, + extra_config_files=extra_config_files, + args=original_args, + ) + + # Get the parsed config + parsed_config = config_parser.parse(original_values.config, + original_values.isolated) + + # Extend the default ignore value with the extended default ignore list, + # registered by plugins. + extended_default_ignore = manager.extended_default_ignore.copy() + LOG.debug('Extended default ignore list: %s', + list(extended_default_ignore)) + extended_default_ignore.update(default_values.ignore) + default_values.ignore = list(extended_default_ignore) + LOG.debug('Merged default ignore list: %s', default_values.ignore) + + # Merge values parsed from config onto the default values returned + for config_name, value in parsed_config.items(): + dest_name = config_name + # If the config name is somehow different from the destination name, + # fetch the destination name from our Option + if not hasattr(default_values, config_name): + dest_name = config_parser.config_options[config_name].dest + + LOG.debug('Overriding default value of (%s) for "%s" with (%s)', + getattr(default_values, dest_name, None), + dest_name, + value) + # Override the default values with the config values + setattr(default_values, dest_name, value) + + # Finally parse the command-line options + return manager.parse_args(arglist, default_values) diff --git a/flake8/options/config.py b/flake8/options/config.py new file mode 100644 index 0000000..ecc40f7 --- /dev/null +++ b/flake8/options/config.py @@ -0,0 +1,277 @@ +"""Config handling logic for Flake8.""" +import logging +import os.path +import sys + +if sys.version_info < (3, 0): + import ConfigParser as configparser +else: + import configparser + +LOG = logging.getLogger(__name__) + +__all__ = ('ConfigFileFinder', 'MergedConfigParser') + + +class ConfigFileFinder(object): + """Encapsulate the logic for finding and reading config files.""" + + PROJECT_FILENAMES = ('setup.cfg', 'tox.ini') + + def __init__(self, program_name, args, extra_config_files): + """Initialize object to find config files. + + :param str program_name: + Name of the current program (e.g., flake8). + :param list args: + The extra arguments passed on the command-line. + :param list extra_config_files: + Extra configuration files specified by the user to read. + """ + # The values of --append-config from the CLI + extra_config_files = extra_config_files or [] + self.extra_config_files = [ + # Ensure the paths are absolute paths for local_config_files + os.path.abspath(f) for f in extra_config_files + ] + + # Platform specific settings + self.is_windows = sys.platform == 'win32' + self.xdg_home = os.environ.get('XDG_CONFIG_HOME', + os.path.expanduser('~/.config')) + + # Look for '.' files + self.program_config = '.' + program_name + self.program_name = program_name + + # List of filenames to find in the local/project directory + self.project_filenames = ('setup.cfg', 'tox.ini', self.program_config) + + self.local_directory = os.path.abspath(os.curdir) + + if not args: + args = ['.'] + self.parent = self.tail = os.path.abspath(os.path.commonprefix(args)) + + @staticmethod + def _read_config(files): + config = configparser.RawConfigParser() + found_files = config.read(files) + return (config, found_files) + + def cli_config(self, files): + """Read and parse the config file specified on the command-line.""" + config, found_files = self._read_config(files) + if found_files: + LOG.debug('Found cli configuration files: %s', found_files) + return config + + def generate_possible_local_files(self): + """Find and generate all local config files.""" + tail = self.tail + parent = self.parent + local_dir = self.local_directory + while tail: + for project_filename in self.project_filenames: + filename = os.path.abspath(os.path.join(parent, + project_filename)) + yield filename + if parent == local_dir: + break + (parent, tail) = os.path.split(parent) + + def local_config_files(self): + """Find all local config files which actually exist. + + Filter results from + :meth:`~ConfigFileFinder.generate_possible_local_files` based + on whether the filename exists or not. + + :returns: + List of files that exist that are local project config files with + extra config files appended to that list (which also exist). + :rtype: + [str] + """ + exists = os.path.exists + return [ + filename + for filename in self.generate_possible_local_files() + if os.path.exists(filename) + ] + [f for f in self.extra_config_files if exists(f)] + + def local_configs(self): + """Parse all local config files into one config object.""" + config, found_files = self._read_config(self.local_config_files()) + if found_files: + LOG.debug('Found local configuration files: %s', found_files) + return config + + def user_config_file(self): + """Find the user-level config file.""" + if self.is_windows: + return os.path.expanduser('~\\' + self.program_config) + return os.path.join(self.xdg_home, self.program_name) + + def user_config(self): + """Parse the user config file into a config object.""" + config, found_files = self._read_config(self.user_config_file()) + if found_files: + LOG.debug('Found user configuration files: %s', found_files) + return config + + +class MergedConfigParser(object): + """Encapsulate merging different types of configuration files. + + This parses out the options registered that were specified in the + configuration files, handles extra configuration files, and returns + dictionaries with the parsed values. + """ + + #: Set of types that should use the + #: :meth:`~configparser.RawConfigParser.getint` method. + GETINT_TYPES = set(['int', 'count']) + #: Set of actions that should use the + #: :meth:`~configparser.RawConfigParser.getbool` method. + GETBOOL_ACTIONS = set(['store_true', 'store_false']) + + def __init__(self, option_manager, extra_config_files=None, args=None): + """Initialize the MergedConfigParser instance. + + :param flake8.option.manager.OptionManager option_manager: + Initialized OptionManager. + :param list extra_config_files: + List of extra config files to parse. + :params list args: + The extra parsed arguments from the command-line. + """ + #: Our instance of flake8.options.manager.OptionManager + self.option_manager = option_manager + #: The prog value for the cli parser + self.program_name = option_manager.program_name + #: Parsed extra arguments + self.args = args + #: Mapping of configuration option names to + #: :class:`~flake8.options.manager.Option` instances + self.config_options = option_manager.config_options_dict + #: List of extra config files + self.extra_config_files = extra_config_files or [] + #: Our instance of our :class:`~ConfigFileFinder` + self.config_finder = ConfigFileFinder(self.program_name, self.args, + self.extra_config_files) + + @staticmethod + def _normalize_value(option, value): + final_value = option.normalize(value) + LOG.debug('%r has been normalized to %r for option "%s"', + value, final_value, option.config_name) + return final_value + + def _parse_config(self, config_parser): + config_dict = {} + for option_name in config_parser.options(self.program_name): + if option_name not in self.config_options: + LOG.debug('Option "%s" is not registered. Ignoring.', + option_name) + continue + option = self.config_options[option_name] + + # Use the appropriate method to parse the config value + method = config_parser.get + if option.type in self.GETINT_TYPES: + method = config_parser.getint + elif option.action in self.GETBOOL_ACTIONS: + method = config_parser.getboolean + + value = method(self.program_name, option_name) + LOG.debug('Option "%s" returned value: %r', option_name, value) + + final_value = self._normalize_value(option, value) + config_dict[option_name] = final_value + + return config_dict + + def is_configured_by(self, config): + """Check if the specified config parser has an appropriate section.""" + return config.has_section(self.program_name) + + def parse_local_config(self): + """Parse and return the local configuration files.""" + config = self.config_finder.local_configs() + if not self.is_configured_by(config): + LOG.debug('Local configuration files have no %s section', + self.program_name) + return {} + + LOG.debug('Parsing local configuration files.') + return self._parse_config(config) + + def parse_user_config(self): + """Parse and return the user configuration files.""" + config = self.config_finder.user_config() + if not self.is_configured_by(config): + LOG.debug('User configuration files have no %s section', + self.program_name) + return {} + + LOG.debug('Parsing user configuration files.') + return self._parse_config(config) + + def parse_cli_config(self, config_path): + """Parse and return the file specified by --config.""" + config = self.config_finder.cli_config(config_path) + if not self.is_configured_by(config): + LOG.debug('CLI configuration files have no %s section', + self.program_name) + return {} + + LOG.debug('Parsing CLI configuration files.') + return self._parse_config(config) + + def merge_user_and_local_config(self): + """Merge the parsed user and local configuration files. + + :returns: + Dictionary of the parsed and merged configuration options. + :rtype: + dict + """ + user_config = self.parse_user_config() + config = self.parse_local_config() + + for option, value in user_config.items(): + config.setdefault(option, value) + + return config + + def parse(self, cli_config=None, isolated=False): + """Parse and return the local and user config files. + + First this copies over the parsed local configuration and then + iterates over the options in the user configuration and sets them if + they were not set by the local configuration file. + + :param str cli_config: + Value of --config when specified at the command-line. Overrides + all other config files. + :param bool isolated: + Determines if we should parse configuration files at all or not. + If running in isolated mode, we ignore all configuration files + :returns: + Dictionary of parsed configuration options + :rtype: + dict + """ + if isolated: + LOG.debug('Refusing to parse configuration files due to user-' + 'requested isolation') + return {} + + if cli_config: + LOG.debug('Ignoring user and locally found configuration files. ' + 'Reading only configuration from "%s" specified via ' + '--config by the user', cli_config) + return self.parse_cli_config(cli_config) + + return self.merge_user_and_local_config() diff --git a/flake8/options/manager.py b/flake8/options/manager.py new file mode 100644 index 0000000..cb4c831 --- /dev/null +++ b/flake8/options/manager.py @@ -0,0 +1,245 @@ +"""Option handling and Option management logic.""" +import logging +import optparse # pylint: disable=deprecated-module + +from flake8 import utils + +LOG = logging.getLogger(__name__) + + +class Option(object): + """Our wrapper around an optparse.Option object to add features.""" + + def __init__(self, short_option_name=None, long_option_name=None, + # Options below here are taken from the optparse.Option class + action=None, default=None, type=None, dest=None, + nargs=None, const=None, choices=None, callback=None, + callback_args=None, callback_kwargs=None, help=None, + metavar=None, + # Options below here are specific to Flake8 + parse_from_config=False, comma_separated_list=False, + normalize_paths=False): + """Initialize an Option instance wrapping optparse.Option. + + The following are all passed directly through to optparse. + + :param str short_option_name: + The short name of the option (e.g., ``-x``). This will be the + first argument passed to :class:`~optparse.Option`. + :param str long_option_name: + The long name of the option (e.g., ``--xtra-long-option``). This + will be the second argument passed to :class:`~optparse.Option`. + :param str action: + Any action allowed by :mod:`optparse`. + :param default: + Default value of the option. + :param type: + Any type allowed by :mod:`optparse`. + :param dest: + Attribute name to store parsed option value as. + :param nargs: + Number of arguments to parse for this option. + :param const: + Constant value to store on a common destination. Usually used in + conjuntion with ``action="store_const"``. + :param iterable choices: + Possible values for the option. + :param callable callback: + Callback used if the action is ``"callback"``. + :param iterable callback_args: + Additional positional arguments to the callback callable. + :param dictionary callback_kwargs: + Keyword arguments to the callback callable. + :param str help: + Help text displayed in the usage information. + :param str metavar: + Name to use instead of the long option name for help text. + + The following parameters are for Flake8's option handling alone. + + :param bool parse_from_config: + Whether or not this option should be parsed out of config files. + :param bool comma_separated_list: + Whether the option is a comma separated list when parsing from a + config file. + :param bool normalize_paths: + Whether the option is expecting a path or list of paths and should + attempt to normalize the paths to absolute paths. + """ + self.short_option_name = short_option_name + self.long_option_name = long_option_name + self.option_args = [ + x for x in (short_option_name, long_option_name) if x is not None + ] + self.option_kwargs = { + 'action': action, + 'default': default, + 'type': type, + 'dest': self._make_dest(dest), + 'nargs': nargs, + 'const': const, + 'choices': choices, + 'callback': callback, + 'callback_args': callback_args, + 'callback_kwargs': callback_kwargs, + 'help': help, + 'metavar': metavar, + } + # Set attributes for our option arguments + for key, value in self.option_kwargs.items(): + setattr(self, key, value) + + # Set our custom attributes + self.parse_from_config = parse_from_config + self.comma_separated_list = comma_separated_list + self.normalize_paths = normalize_paths + + self.config_name = None + if parse_from_config: + if not long_option_name: + raise ValueError('When specifying parse_from_config=True, ' + 'a long_option_name must also be specified.') + self.config_name = long_option_name[2:].replace('-', '_') + + self._opt = None + + def __repr__(self): + """Simple representation of an Option class.""" + return ( + 'Option({0}, {1}, action={action}, default={default}, ' + 'dest={dest}, type={type}, callback={callback}, help={help},' + ' callback={callback}, callback_args={callback_args}, ' + 'callback_kwargs={callback_kwargs}, metavar={metavar})' + ).format(self.short_option_name, self.long_option_name, + **self.option_kwargs) + + def _make_dest(self, dest): + if dest: + return dest + + if self.long_option_name: + return self.long_option_name[2:].replace('-', '_') + return self.short_option_name[1] + + def normalize(self, value): + """Normalize the value based on the option configuration.""" + if self.normalize_paths: + # Decide whether to parse a list of paths or a single path + normalize = utils.normalize_path + if self.comma_separated_list: + normalize = utils.normalize_paths + return normalize(value) + elif self.comma_separated_list: + return utils.parse_comma_separated_list(value) + return value + + def to_optparse(self): + """Convert a Flake8 Option to an optparse Option.""" + if self._opt is None: + self._opt = optparse.Option(*self.option_args, + **self.option_kwargs) + return self._opt + + +class OptionManager(object): + """Manage Options and OptionParser while adding post-processing.""" + + def __init__(self, prog=None, version=None, + usage='%prog [options] file file ...'): + """Initialize an instance of an OptionManager. + + :param str prog: + Name of the actual program (e.g., flake8). + :param str version: + Version string for the program. + :param str usage: + Basic usage string used by the OptionParser. + """ + self.parser = optparse.OptionParser(prog=prog, version=version, + usage=usage) + self.config_options_dict = {} + self.options = [] + self.program_name = prog + self.version = version + self.registered_plugins = set() + self.extended_default_ignore = set() + + @staticmethod + def format_plugin(plugin_tuple): + """Convert a plugin tuple into a dictionary mapping name to value.""" + return dict(zip(["entry", "name", "version"], plugin_tuple)) + + def add_option(self, *args, **kwargs): + """Create and register a new option. + + See parameters for :class:`~flake8.options.manager.Option` for + acceptable arguments to this method. + + .. note:: + + ``short_option_name`` and ``long_option_name`` may be specified + positionally as they are with optparse normally. + """ + if len(args) == 1 and args[0].startswith('--'): + args = (None, args[0]) + option = Option(*args, **kwargs) + self.parser.add_option(option.to_optparse()) + self.options.append(option) + if option.parse_from_config: + self.config_options_dict[option.config_name] = option + LOG.debug('Registered option "%s".', option) + + def extend_default_ignore(self, error_codes): + """Extend the default ignore list with the error codes provided. + + :param list error_codes: + List of strings that are the error/warning codes with which to + extend the default ignore list. + """ + LOG.debug('Extending default ignore list with %r', error_codes) + self.extended_default_ignore.update(error_codes) + + def generate_versions(self, format_str='%(name)s: %(version)s'): + """Generate a comma-separated list of versions of plugins.""" + return ', '.join( + format_str % self.format_plugin(plugin) + for plugin in self.registered_plugins + ) + + def update_version_string(self): + """Update the flake8 version string.""" + self.parser.version = (self.version + ' (' + + self.generate_versions() + ')') + + def generate_epilog(self): + """Create an epilog with the version and name of each of plugin.""" + plugin_version_format = '%(name)s(%(entry)s): %(version)s' + self.parser.epilog = 'Installed plugins: ' + self.generate_versions( + plugin_version_format + ) + + def parse_args(self, args=None, values=None): + """Simple proxy to calling the OptionParser's parse_args method.""" + self.generate_epilog() + self.update_version_string() + options, xargs = self.parser.parse_args(args, values) + for option in self.options: + old_value = getattr(options, option.dest) + setattr(options, option.dest, option.normalize(old_value)) + + return options, xargs + + def register_plugin(self, entry_point_name, name, version): + """Register a plugin relying on the OptionManager. + + :param str entry_point_name: + The name of the entry-point loaded with pkg_resources. For + example, if the entry-point looks like: ``C90 = mccabe.Checker`` + then the ``entry_point_name`` would be ``C90``. + :param str name: + The name of the checker itself. This will be the ``name`` + attribute of the class or function loaded from the entry-point. + :param str version: + The version of the checker that we're using. + """ + self.registered_plugins.add((entry_point_name, name, version)) diff --git a/flake8/plugins/__init__.py b/flake8/plugins/__init__.py new file mode 100644 index 0000000..fda6a44 --- /dev/null +++ b/flake8/plugins/__init__.py @@ -0,0 +1 @@ +"""Submodule of built-in plugins and plugin managers.""" diff --git a/flake8/plugins/_trie.py b/flake8/plugins/_trie.py new file mode 100644 index 0000000..4871abb --- /dev/null +++ b/flake8/plugins/_trie.py @@ -0,0 +1,97 @@ +"""Independent implementation of a Trie tree.""" + +__all__ = ('Trie', 'TrieNode') + + +def _iterate_stringlike_objects(string): + for i in range(len(string)): + yield string[i:i + 1] + + +class Trie(object): + """The object that manages the trie nodes.""" + + def __init__(self): + """Initialize an empty trie.""" + self.root = TrieNode(None, None) + + def add(self, path, node_data): + """Add the node data to the path described.""" + node = self.root + for prefix in _iterate_stringlike_objects(path): + child = node.find_prefix(prefix) + if child is None: + child = node.add_child(prefix, []) + node = child + node.data.append(node_data) + + def find(self, path): + """Find a node based on the path provided.""" + node = self.root + for prefix in _iterate_stringlike_objects(path): + child = node.find_prefix(prefix) + if child is None: + return None + node = child + return node + + def traverse(self): + """Traverse this tree. + + This performs a depth-first pre-order traversal of children in this + tree. It returns the results consistently by first sorting the + children based on their prefix and then traversing them in + alphabetical order. + """ + return self.root.traverse() + + +class TrieNode(object): + """The majority of the implementation details of a Trie.""" + + def __init__(self, prefix, data, children=None): + """Initialize a TrieNode with data and children.""" + self.children = children or {} + self.data = data + self.prefix = prefix + + def __repr__(self): + """Generate an easy to read representation of the node.""" + return 'TrieNode(prefix={0}, data={1})'.format( + self.prefix, self.data + ) + + def find_prefix(self, prefix): + """Find the prefix in the children of this node. + + :returns: A child matching the prefix or None. + :rtype: :class:`~TrieNode` or None + """ + return self.children.get(prefix, None) + + def add_child(self, prefix, data, children=None): + """Create and add a new child node. + + :returns: The newly created node + :rtype: :class:`~TrieNode` + """ + new_node = TrieNode(prefix, data, children) + self.children[prefix] = new_node + return new_node + + def traverse(self): + """Traverse children of this node. + + This performs a depth-first pre-order traversal of the remaining + children in this sub-tree. It returns the results consistently by + first sorting the children based on their prefix and then traversing + them in alphabetical order. + """ + if not self.children: + return + + for prefix in sorted(self.children.keys()): + child = self.children[prefix] + yield child + for child in child.traverse(): + yield child diff --git a/flake8/plugins/manager.py b/flake8/plugins/manager.py new file mode 100644 index 0000000..872a4f4 --- /dev/null +++ b/flake8/plugins/manager.py @@ -0,0 +1,354 @@ +"""Plugin loading and management logic and classes.""" +import collections +import logging + +from flake8 import exceptions +from flake8 import utils +from flake8.plugins import notifier + +import pkg_resources + +LOG = logging.getLogger(__name__) + +__all__ = ( + 'Checkers', + 'Listeners', + 'Plugin', + 'PluginManager', + 'ReportFormatters', +) + + +class Plugin(object): + """Wrap an EntryPoint from setuptools and other logic.""" + + def __init__(self, name, entry_point): + """"Initialize our Plugin. + + :param str name: + Name of the entry-point as it was registered with setuptools. + :param entry_point: + EntryPoint returned by setuptools. + :type entry_point: + setuptools.EntryPoint + """ + self.name = name + self.entry_point = entry_point + self._plugin = None + self._parameters = None + + def __repr__(self): + """Provide an easy to read description of the current plugin.""" + return 'Plugin(name="{0}", entry_point="{1}")'.format( + self.name, self.entry_point + ) + + @property + def parameters(self): + """List of arguments that need to be passed to the plugin.""" + if self._parameters is None: + self._parameters = utils.parameters_for(self) + return self._parameters + + @property + def plugin(self): + """The loaded (and cached) plugin associated with the entry-point. + + This property implicitly loads the plugin and then caches it. + """ + self.load_plugin() + return self._plugin + + @property + def version(self): + """Return the version attribute on the plugin.""" + return self.plugin.version + + def execute(self, *args, **kwargs): + r"""Call the plugin with \*args and \*\*kwargs.""" + return self.plugin(*args, **kwargs) # pylint: disable=not-callable + + def _load(self, verify_requirements): + # Avoid relying on hasattr() here. + resolve = getattr(self.entry_point, 'resolve', None) + require = getattr(self.entry_point, 'require', None) + if resolve and require: + if verify_requirements: + LOG.debug('Verifying plugin "%s"\'s requirements.', + self.name) + require() + self._plugin = resolve() + else: + self._plugin = self.entry_point.load( + require=verify_requirements + ) + + def load_plugin(self, verify_requirements=False): + """Retrieve the plugin for this entry-point. + + This loads the plugin, stores it on the instance and then returns it. + It does not reload it after the first time, it merely returns the + cached plugin. + + :param bool verify_requirements: + Whether or not to make setuptools verify that the requirements for + the plugin are satisfied. + :returns: + Nothing + """ + if self._plugin is None: + LOG.info('Loading plugin "%s" from entry-point.', self.name) + try: + self._load(verify_requirements) + except Exception as load_exception: + LOG.exception(load_exception, exc_info=True) + failed_to_load = exceptions.FailedToLoadPlugin( + plugin=self, + exception=load_exception, + ) + LOG.critical(str(failed_to_load)) + raise failed_to_load + + def provide_options(self, optmanager, options, extra_args): + """Pass the parsed options and extra arguments to the plugin.""" + parse_options = getattr(self.plugin, 'parse_options', None) + if parse_options is not None: + LOG.debug('Providing options to plugin "%s".', self.name) + try: + parse_options(optmanager, options, extra_args) + except TypeError: + parse_options(options) + + def register_options(self, optmanager): + """Register the plugin's command-line options on the OptionManager. + + :param optmanager: + Instantiated OptionManager to register options on. + :type optmanager: + flake8.options.manager.OptionManager + :returns: + Nothing + """ + add_options = getattr(self.plugin, 'add_options', None) + if add_options is not None: + LOG.debug( + 'Registering options from plugin "%s" on OptionManager %r', + self.name, optmanager + ) + add_options(optmanager) + optmanager.register_plugin( + entry_point_name=self.name, + name=self.plugin.name, + version=self.plugin.version + ) + + +class PluginManager(object): # pylint: disable=too-few-public-methods + """Find and manage plugins consistently.""" + + def __init__(self, namespace, verify_requirements=False): + """Initialize the manager. + + :param str namespace: + Namespace of the plugins to manage, e.g., 'flake8.extension'. + :param bool verify_requirements: + Whether or not to make setuptools verify that the requirements for + the plugin are satisfied. + """ + self.namespace = namespace + self.verify_requirements = verify_requirements + self.plugins = {} + self.names = [] + self._load_all_plugins() + + def _load_all_plugins(self): + LOG.info('Loading entry-points for "%s".', self.namespace) + for entry_point in pkg_resources.iter_entry_points(self.namespace): + name = entry_point.name + self.plugins[name] = Plugin(name, entry_point) + self.names.append(name) + LOG.debug('Loaded %r for plugin "%s".', self.plugins[name], name) + + def map(self, func, *args, **kwargs): + r"""Call ``func`` with the plugin and \*args and \**kwargs after. + + This yields the return value from ``func`` for each plugin. + + :param collections.Callable func: + Function to call with each plugin. Signature should at least be: + + .. code-block:: python + + def myfunc(plugin): + pass + + Any extra positional or keyword arguments specified with map will + be passed along to this function after the plugin. The plugin + passed is a :class:`~flake8.plugins.manager.Plugin`. + :param args: + Positional arguments to pass to ``func`` after each plugin. + :param kwargs: + Keyword arguments to pass to ``func`` after each plugin. + """ + for name in self.names: + yield func(self.plugins[name], *args, **kwargs) + + +class PluginTypeManager(object): + """Parent class for most of the specific plugin types.""" + + namespace = None + + def __init__(self): + """Initialize the plugin type's manager.""" + self.manager = PluginManager(self.namespace) + self.plugins_loaded = False + + def __contains__(self, name): + """Check if the entry-point name is in this plugin type manager.""" + LOG.debug('Checking for "%s" in plugin type manager.', name) + return name in self.plugins + + def __getitem__(self, name): + """Retrieve a plugin by its name.""" + LOG.debug('Retrieving plugin for "%s".', name) + return self.plugins[name] + + def get(self, name, default=None): + """Retrieve the plugin referred to by ``name`` or return the default. + + :param str name: + Name of the plugin to retrieve. + :param default: + Default value to return. + :returns: + Plugin object referred to by name, if it exists. + :rtype: + :class:`Plugin` + """ + if name in self: + return self[name] + return default + + @property + def names(self): + """Proxy attribute to underlying manager.""" + return self.manager.names + + @property + def plugins(self): + """Proxy attribute to underlying manager.""" + return self.manager.plugins + + @staticmethod + def _generate_call_function(method_name, optmanager, *args, **kwargs): + def generated_function(plugin): + """Function that attempts to call a specific method on a plugin.""" + method = getattr(plugin, method_name, None) + if (method is not None and + isinstance(method, collections.Callable)): + return method(optmanager, *args, **kwargs) + return generated_function + + def load_plugins(self): + """Load all plugins of this type that are managed by this manager.""" + if self.plugins_loaded: + return + + def load_plugin(plugin): + """Call each plugin's load_plugin method.""" + return plugin.load_plugin() + + plugins = list(self.manager.map(load_plugin)) + # Do not set plugins_loaded if we run into an exception + self.plugins_loaded = True + return plugins + + def register_options(self, optmanager): + """Register all of the checkers' options to the OptionManager.""" + self.load_plugins() + call_register_options = self._generate_call_function( + 'register_options', optmanager, + ) + + list(self.manager.map(call_register_options)) + + def provide_options(self, optmanager, options, extra_args): + """Provide parsed options and extra arguments to the plugins.""" + call_provide_options = self._generate_call_function( + 'provide_options', optmanager, options, extra_args, + ) + + list(self.manager.map(call_provide_options)) + + +class NotifierBuilderMixin(object): # pylint: disable=too-few-public-methods + """Mixin class that builds a Notifier from a PluginManager.""" + + def build_notifier(self): + """Build a Notifier for our Listeners. + + :returns: + Object to notify our listeners of certain error codes and + warnings. + :rtype: + :class:`~flake8.notifier.Notifier` + """ + notifier_trie = notifier.Notifier() + for name in self.names: + notifier_trie.register_listener(name, self.manager[name]) + return notifier_trie + + +class Checkers(PluginTypeManager): + """All of the checkers registered through entry-ponits.""" + + namespace = 'flake8.extension' + + def checks_expecting(self, argument_name): + """Retrieve checks that expect an argument with the specified name. + + Find all checker plugins that are expecting a specific argument. + """ + for plugin in self.plugins.values(): + if argument_name == plugin.parameters[0]: + yield plugin + + @property + def ast_plugins(self): + """List of plugins that expect the AST tree.""" + plugins = getattr(self, '_ast_plugins', []) + if not plugins: + plugins = list(self.checks_expecting('tree')) + self._ast_plugins = plugins + return plugins + + @property + def logical_line_plugins(self): + """List of plugins that expect the logical lines.""" + plugins = getattr(self, '_logical_line_plugins', []) + if not plugins: + plugins = list(self.checks_expecting('logical_line')) + self._logical_line_plugins = plugins + return plugins + + @property + def physical_line_plugins(self): + """List of plugins that expect the physical lines.""" + plugins = getattr(self, '_physical_line_plugins', []) + if not plugins: + plugins = list(self.checks_expecting('physical_line')) + self._physical_line_plugins = plugins + return plugins + + +class Listeners(PluginTypeManager, NotifierBuilderMixin): + """All of the listeners registered through entry-points.""" + + namespace = 'flake8.listen' + + +class ReportFormatters(PluginTypeManager): + """All of the report formatters registered through entry-points.""" + + namespace = 'flake8.report' diff --git a/flake8/plugins/notifier.py b/flake8/plugins/notifier.py new file mode 100644 index 0000000..dc255c4 --- /dev/null +++ b/flake8/plugins/notifier.py @@ -0,0 +1,46 @@ +"""Implementation of the class that registers and notifies listeners.""" +from flake8.plugins import _trie + + +class Notifier(object): + """Object that tracks and notifies listener objects.""" + + def __init__(self): + """Initialize an empty notifier object.""" + self.listeners = _trie.Trie() + + def listeners_for(self, error_code): + """Retrieve listeners for an error_code. + + There may be listeners registered for E1, E100, E101, E110, E112, and + E126. To get all the listeners for one of E100, E101, E110, E112, or + E126 you would also need to incorporate the listeners for E1 (since + they're all in the same class). + + Example usage: + + .. code-block:: python + + from flake8 import notifier + + n = notifier.Notifier() + # register listeners + for listener in n.listeners_for('W102'): + listener.notify(...) + """ + path = error_code + while path: + node = self.listeners.find(path) + listeners = getattr(node, 'data', []) + for listener in listeners: + yield listener + path = path[:-1] + + def notify(self, error_code, *args, **kwargs): + """Notify all listeners for the specified error code.""" + for listener in self.listeners_for(error_code): + listener.notify(error_code, *args, **kwargs) + + def register_listener(self, error_code, listener): + """Register a listener for a specific error_code.""" + self.listeners.add(error_code, listener) diff --git a/flake8/plugins/pyflakes.py b/flake8/plugins/pyflakes.py new file mode 100644 index 0000000..7025baa --- /dev/null +++ b/flake8/plugins/pyflakes.py @@ -0,0 +1,140 @@ +"""Plugin built-in to Flake8 to treat pyflakes as a plugin.""" +# -*- coding: utf-8 -*- +from __future__ import absolute_import + +try: + # The 'demandimport' breaks pyflakes and flake8.plugins.pyflakes + from mercurial import demandimport +except ImportError: + pass +else: + demandimport.disable() +import os + +from flake8 import utils + +import pyflakes +import pyflakes.checker + + +def patch_pyflakes(): + """Add error codes to Pyflakes messages.""" + codes = dict([line.split()[::-1] for line in ( + 'F401 UnusedImport', + 'F402 ImportShadowedByLoopVar', + 'F403 ImportStarUsed', + 'F404 LateFutureImport', + 'F810 Redefined', + 'F811 RedefinedWhileUnused', + 'F812 RedefinedInListComp', + 'F821 UndefinedName', + 'F822 UndefinedExport', + 'F823 UndefinedLocal', + 'F831 DuplicateArgument', + 'F841 UnusedVariable', + )]) + + for name, obj in vars(pyflakes.messages).items(): + if name[0].isupper() and obj.message: + obj.flake8_msg = '%s %s' % (codes.get(name, 'F999'), obj.message) +patch_pyflakes() + + +class FlakesChecker(pyflakes.checker.Checker): + """Subclass the Pyflakes checker to conform with the flake8 API.""" + + name = 'pyflakes' + version = pyflakes.__version__ + + def __init__(self, tree, filename): + """Initialize the PyFlakes plugin with an AST tree and filename.""" + filename = utils.normalize_paths(filename)[0] + with_doctest = self.with_doctest + included_by = [include for include in self.include_in_doctest + if include != '' and filename.startswith(include)] + if included_by: + with_doctest = True + + for exclude in self.exclude_from_doctest: + if exclude != '' and filename.startswith(exclude): + with_doctest = False + overlaped_by = [include for include in included_by + if include.startswith(exclude)] + + if overlaped_by: + with_doctest = True + + super(FlakesChecker, self).__init__(tree, filename, + withDoctest=with_doctest) + + @classmethod + def add_options(cls, parser): + """Register options for PyFlakes on the Flake8 OptionManager.""" + parser.add_option( + '--builtins', parse_from_config=True, comma_separated_list=True, + help="define more built-ins, comma separated", + ) + parser.add_option( + '--doctests', default=False, action='store_true', + parse_from_config=True, + help="check syntax of the doctests", + ) + parser.add_option( + '--include-in-doctest', default='', + dest='include_in_doctest', parse_from_config=True, + comma_separated_list=True, normalize_paths=True, + help='Run doctests only on these files', + type='string', + ) + parser.add_option( + '--exclude-from-doctest', default='', + dest='exclude_from_doctest', parse_from_config=True, + comma_separated_list=True, normalize_paths=True, + help='Skip these files when running doctests', + type='string', + ) + + @classmethod + def parse_options(cls, options): + """Parse option values from Flake8's OptionManager.""" + if options.builtins: + cls.builtIns = cls.builtIns.union(options.builtins) + cls.with_doctest = options.doctests + + included_files = [] + for included_file in options.include_in_doctest: + if included_file == '': + continue + if not included_file.startswith((os.sep, './', '~/')): + included_files.append('./' + included_file) + else: + included_files.append(included_file) + cls.include_in_doctest = utils.normalize_paths(included_files) + + excluded_files = [] + for excluded_file in options.exclude_from_doctest: + if excluded_file == '': + continue + if not excluded_file.startswith((os.sep, './', '~/')): + excluded_files.append('./' + excluded_file) + else: + excluded_files.append(excluded_file) + cls.exclude_from_doctest = utils.normalize_paths(excluded_files) + + inc_exc = set(cls.include_in_doctest).intersection( + cls.exclude_from_doctest + ) + if inc_exc: + raise ValueError('"%s" was specified in both the ' + 'include-in-doctest and exclude-from-doctest ' + 'options. You are not allowed to specify it in ' + 'both for doctesting.' % inc_exc) + + def run(self): + """Run the plugin.""" + for message in self.messages: + col = getattr(message, 'col', 0) + yield (message.lineno, + col, + (message.flake8_msg % message.message_args), + message.__class__) diff --git a/flake8/processor.py b/flake8/processor.py new file mode 100644 index 0000000..1dc27a1 --- /dev/null +++ b/flake8/processor.py @@ -0,0 +1,408 @@ +"""Module containing our file processor that tokenizes a file for checks.""" +import contextlib +import io +import re +import sys +import tokenize + +import flake8 +from flake8 import defaults +from flake8 import exceptions +from flake8 import utils + +PyCF_ONLY_AST = 1024 +NEWLINE = frozenset([tokenize.NL, tokenize.NEWLINE]) +# Work around Python < 2.6 behaviour, which does not generate NL after +# a comment which is on a line by itself. +COMMENT_WITH_NL = tokenize.generate_tokens(['#\n'].pop).send(None)[1] == '#\n' + +SKIP_TOKENS = frozenset([tokenize.NL, tokenize.NEWLINE, tokenize.INDENT, + tokenize.DEDENT]) + + +class FileProcessor(object): + """Processes a file and holdes state. + + This processes a file by generating tokens, logical and physical lines, + and AST trees. This also provides a way of passing state about the file + to checks expecting that state. Any public attribute on this object can + be requested by a plugin. The known public attributes are: + + - blank_before + - blank_lines + - indect_char + - indent_level + - line_number + - logical_line + - max_line_length + - multiline + - noqa + - previous_indent_level + - previous_logical + - tokens + - total_lines + - verbose + """ + + NOQA_FILE = re.compile(r'\s*# flake8[:=]\s*noqa', re.I) + + def __init__(self, filename, options): + """Initialice our file processor. + + :param str filename: + Name of the file to process + """ + self.filename = filename + self.lines = self.read_lines() + self.strip_utf_bom() + self.options = options + + # Defaults for public attributes + #: Number of preceding blank lines + self.blank_before = 0 + #: Number of blank lines + self.blank_lines = 0 + #: Checker states for each plugin? + self._checker_states = {} + #: Current checker state + self.checker_state = None + #: User provided option for hang closing + self.hang_closing = options.hang_closing + #: Character used for indentation + self.indent_char = None + #: Current level of indentation + self.indent_level = 0 + #: Line number in the file + self.line_number = 0 + #: Current logical line + self.logical_line = '' + #: Maximum line length as configured by the user + self.max_line_length = options.max_line_length + #: Whether the current physical line is multiline + self.multiline = False + #: Whether or not we're observing NoQA + self.noqa = False + #: Previous level of indentation + self.previous_indent_level = 0 + #: Previous logical line + self.previous_logical = '' + #: Current set of tokens + self.tokens = [] + #: Total number of lines in the file + self.total_lines = len(self.lines) + #: Verbosity level of Flake8 + self.verbose = options.verbose + + @contextlib.contextmanager + def inside_multiline(self, line_number): + """Context-manager to toggle the multiline attribute.""" + self.line_number = line_number + self.multiline = True + yield + self.multiline = False + + def reset_blank_before(self): + """Reset the blank_before attribute to zero.""" + self.blank_before = 0 + + def delete_first_token(self): + """Delete the first token in the list of tokens.""" + del self.tokens[0] + + def visited_new_blank_line(self): + """Note that we visited a new blank line.""" + self.blank_lines += 1 + + def update_state(self, mapping): + """Update the indent level based on the logical line mapping.""" + (start_row, start_col) = mapping[0][1] + start_line = self.lines[start_row - 1] + self.indent_level = expand_indent(start_line[:start_col]) + if self.blank_before < self.blank_lines: + self.blank_before = self.blank_lines + + def update_checker_state_for(self, plugin): + """Update the checker_state attribute for the plugin.""" + if 'checker_state' in plugin.parameters: + self.checker_state = self._checker_states.setdefault( + plugin.name, {} + ) + + def next_logical_line(self): + """Record the previous logical line. + + This also resets the tokens list and the blank_lines count. + """ + if self.logical_line: + self.previous_indent_level = self.indent_level + self.previous_logical = self.logical_line + self.blank_lines = 0 + self.tokens = [] + + def build_logical_line_tokens(self): + """Build the mapping, comments, and logical line lists.""" + logical = [] + comments = [] + length = 0 + previous_row = previous_column = mapping = None + for token_type, text, start, end, line in self.tokens: + if token_type in SKIP_TOKENS: + continue + if not mapping: + mapping = [(0, start)] + if token_type == tokenize.COMMENT: + comments.append(text) + continue + if token_type == tokenize.STRING: + text = mutate_string(text) + if previous_row: + (start_row, start_column) = start + if previous_row != start_row: + row_index = previous_row - 1 + column_index = previous_column - 1 + previous_text = self.lines[row_index][column_index] + if (previous_text == ',' or + (previous_text not in '{[(' and + text not in '}])')): + text = ' ' + text + elif previous_column != start_column: + text = line[previous_column:start_column] + text + logical.append(text) + length += len(text) + mapping.append((length, end)) + (previous_row, previous_column) = end + return comments, logical, mapping + + def build_ast(self): + """Build an abstract syntax tree from the list of lines.""" + return compile(''.join(self.lines), '', 'exec', PyCF_ONLY_AST) + + def build_logical_line(self): + """Build a logical line from the current tokens list.""" + comments, logical, mapping_list = self.build_logical_line_tokens() + self.logical_line = ''.join(logical) + return ''.join(comments), self.logical_line, mapping_list + + def split_line(self, token): + """Split a physical line's line based on new-lines. + + This also auto-increments the line number for the caller. + """ + for line in token[1].split('\n')[:-1]: + yield line + self.line_number += 1 + + def keyword_arguments_for(self, parameters, arguments=None): + """Generate the keyword arguments for a list of parameters.""" + if arguments is None: + arguments = {} + for param in parameters: + if param not in arguments: + arguments[param] = getattr(self, param) + return arguments + + def check_physical_error(self, error_code, line): + """Update attributes based on error code and line.""" + if error_code == 'E101': + self.indent_char = line[0] + + def generate_tokens(self): + """Tokenize the file and yield the tokens. + + :raises flake8.exceptions.InvalidSyntax: + If a :class:`tokenize.TokenError` is raised while generating + tokens. + """ + try: + for token in tokenize.generate_tokens(self.next_line): + if token[2][0] > self.total_lines: + break + self.tokens.append(token) + yield token + # NOTE(sigmavirus24): pycodestyle was catching both a SyntaxError + # and a tokenize.TokenError. In looking a the source on Python 2 and + # Python 3, the SyntaxError should never arise from generate_tokens. + # If we were using tokenize.tokenize, we would have to catch that. Of + # course, I'm going to be unsurprised to be proven wrong at a later + # date. + except tokenize.TokenError as exc: + raise exceptions.InvalidSyntax(exc.message, exception=exc) + + def line_for(self, line_number): + """Retrieve the physical line at the specified line number.""" + return self.lines[line_number - 1] + + def next_line(self): + """Get the next line from the list.""" + if self.line_number >= self.total_lines: + return '' + line = self.lines[self.line_number] + self.line_number += 1 + if self.indent_char is None and line[:1] in defaults.WHITESPACE: + self.indent_char = line[0] + return line + + def read_lines(self): + # type: () -> List[str] + """Read the lines for this file checker.""" + if self.filename is None or self.filename == '-': + self.filename = 'stdin' + return self.read_lines_from_stdin() + return self.read_lines_from_filename() + + def _readlines_py2(self): + # type: () -> List[str] + with open(self.filename, 'rU') as fd: + return fd.readlines() + + def _readlines_py3(self): + # type: () -> List[str] + try: + with open(self.filename, 'rb') as fd: + (coding, lines) = tokenize.detect_encoding(fd.readline) + textfd = io.TextIOWrapper(fd, coding, line_buffering=True) + return ([l.decode(coding) for l in lines] + + textfd.readlines()) + except (LookupError, SyntaxError, UnicodeError): + # If we can't detect the codec with tokenize.detect_encoding, or + # the detected encoding is incorrect, just fallback to latin-1. + with open(self.filename, encoding='latin-1') as fd: + return fd.readlines() + + def read_lines_from_filename(self): + # type: () -> List[str] + """Read the lines for a file.""" + if (2, 6) <= sys.version_info < (3, 0): + readlines = self._readlines_py2 + elif (3, 0) <= sys.version_info < (4, 0): + readlines = self._readlines_py3 + return readlines() + + def read_lines_from_stdin(self): + # type: () -> List[str] + """Read the lines from standard in.""" + return utils.stdin_get_value().splitlines(True) + + def should_ignore_file(self): + # type: () -> bool + """Check if ``# flake8: noqa`` is in the file to be ignored. + + :returns: + True if a line matches :attr:`FileProcessor.NOQA_FILE`, + otherwise False + :rtype: + bool + """ + ignore_file = self.NOQA_FILE.search + return any(ignore_file(line) for line in self.lines) + + def strip_utf_bom(self): + # type: () -> NoneType + """Strip the UTF bom from the lines of the file.""" + if not self.lines: + # If we have nothing to analyze quit early + return + + first_byte = ord(self.lines[0][0]) + if first_byte not in (0xEF, 0xFEFF): + return + + # If the first byte of the file is a UTF-8 BOM, strip it + if first_byte == 0xFEFF: + self.lines[0] = self.lines[0][1:] + elif self.lines[0][:3] == '\xEF\xBB\xBF': + self.lines[0] = self.lines[0][3:] + + +def is_eol_token(token): + """Check if the token is an end-of-line token.""" + return token[0] in NEWLINE or token[4][token[3][1]:].lstrip() == '\\\n' + +if COMMENT_WITH_NL: # If on Python 2.6 + def is_eol_token(token, _is_eol_token=is_eol_token): + """Check if the token is an end-of-line token.""" + return (_is_eol_token(token) or + (token[0] == tokenize.COMMENT and token[1] == token[4])) + + +def is_multiline_string(token): + """Check if this is a multiline string.""" + return token[0] == tokenize.STRING and '\n' in token[1] + + +def token_is_newline(token): + """Check if the token type is a newline token type.""" + return token[0] in NEWLINE + + +def token_is_comment(token): + """Check if the token type is a comment.""" + return COMMENT_WITH_NL and token[0] == tokenize.COMMENT + + +def count_parentheses(current_parentheses_count, token_text): + """Count the number of parentheses.""" + current_parentheses_count = current_parentheses_count or 0 + if token_text in '([{': + return current_parentheses_count + 1 + elif token_text in '}])': + return current_parentheses_count - 1 + return current_parentheses_count + + +def log_token(log, token): + """Log a token to a provided logging object.""" + if token[2][0] == token[3][0]: + pos = '[%s:%s]' % (token[2][1] or '', token[3][1]) + else: + pos = 'l.%s' % token[3][0] + log.log(flake8._EXTRA_VERBOSE, 'l.%s\t%s\t%s\t%r' % + (token[2][0], pos, tokenize.tok_name[token[0]], + token[1])) + + +def expand_indent(line): + r"""Return the amount of indentation. + + Tabs are expanded to the next multiple of 8. + + >>> expand_indent(' ') + 4 + >>> expand_indent('\t') + 8 + >>> expand_indent(' \t') + 8 + >>> expand_indent(' \t') + 16 + """ + if '\t' not in line: + return len(line) - len(line.lstrip()) + result = 0 + for char in line: + if char == '\t': + result = result // 8 * 8 + 8 + elif char == ' ': + result += 1 + else: + break + return result + + +def mutate_string(text): + """Replace contents with 'xxx' to prevent syntax matching. + + >>> mute_string('"abc"') + '"xxx"' + >>> mute_string("'''abc'''") + "'''xxx'''" + >>> mute_string("r'abc'") + "r'xxx'" + """ + # String modifiers (e.g. u or r) + start = text.index(text[-1]) + 1 + end = len(text) - 1 + # Triple quotes + if text[-3:] in ('"""', "'''"): + start += 2 + end -= 2 + return text[:start] + 'x' * (end - start) + text[end:] diff --git a/flake8/style_guide.py b/flake8/style_guide.py new file mode 100644 index 0000000..2c18c9d --- /dev/null +++ b/flake8/style_guide.py @@ -0,0 +1,209 @@ +"""Implementation of the StyleGuide used by Flake8.""" +import collections +import enum +import linecache +import logging +import re + +from flake8 import utils + +__all__ = ( + 'StyleGuide', +) + +LOG = logging.getLogger(__name__) + + +# TODO(sigmavirus24): Determine if we need to use enum/enum34 +class Selected(enum.Enum): + """Enum representing an explicitly or implicitly selected code.""" + + Explicitly = 'explicitly selected' + Implicitly = 'implicitly selected' + + +class Ignored(enum.Enum): + """Enum representing an explicitly or implicitly ignored code.""" + + Explicitly = 'explicitly ignored' + Implicitly = 'implicitly ignored' + + +class Decision(enum.Enum): + """Enum representing whether a code should be ignored or selected.""" + + Ignored = 'ignored error' + Selected = 'selected error' + + +Error = collections.namedtuple('Error', ['code', + 'filename', + 'line_number', + 'column_number', + 'text']) + + +class StyleGuide(object): + """Manage a Flake8 user's style guide.""" + + NOQA_INLINE_REGEXP = re.compile( + # We're looking for items that look like this: + # ``# noqa`` + # ``# noqa: E123`` + # ``# noqa: E123,W451,F921`` + # ``# NoQA: E123,W451,F921`` + # ``# NOQA: E123,W451,F921`` + # We do not care about the ``: `` that follows ``noqa`` + # We do not care about the casing of ``noqa`` + # We want a comma-separated list of errors + '# noqa(?:: )?(?P[A-Z0-9,]+)?$', + re.IGNORECASE + ) + + def __init__(self, options, listener_trie, formatter): + """Initialize our StyleGuide. + + .. todo:: Add parameter documentation. + """ + self.options = options + self.listener = listener_trie + self.formatter = formatter + self._selected = tuple(options.select) + self._ignored = tuple(options.ignore) + self._decision_cache = {} + + def is_user_selected(self, code): + # type: (str) -> Union[Selected, Ignored] + """Determine if the code has been selected by the user. + + :param str code: + The code for the check that has been run. + :returns: + Selected.Implicitly if the selected list is empty, + Selected.Explicitly if the selected list is not empty and a match + was found, + Ignored.Implicitly if the selected list is not empty but no match + was found. + """ + if not self._selected: + return Selected.Implicitly + + if code.startswith(self._selected): + return Selected.Explicitly + + return Ignored.Implicitly + + def is_user_ignored(self, code): + # type: (str) -> Union[Selected, Ignored] + """Determine if the code has been ignored by the user. + + :param str code: + The code for the check that has been run. + :returns: + Selected.Implicitly if the ignored list is empty, + Ignored.Explicitly if the ignored list is not empty and a match was + found, + Selected.Implicitly if the ignored list is not empty but no match + was found. + """ + if self._ignored and code.startswith(self._ignored): + return Ignored.Explicitly + + return Selected.Implicitly + + def _decision_for(self, code): + # type: (Error) -> Decision + startswith = code.startswith + selected = sorted([s for s in self._selected if startswith(s)])[0] + ignored = sorted([i for i in self._ignored if startswith(i)])[0] + + if selected.startswith(ignored): + return Decision.Selected + return Decision.Ignored + + def should_report_error(self, code): + # type: (str) -> Decision + """Determine if the error code should be reported or ignored. + + This method only cares about the select and ignore rules as specified + by the user in their configuration files and command-line flags. + + This method does not look at whether the specific line is being + ignored in the file itself. + + :param str code: + The code for the check that has been run. + """ + decision = self._decision_cache.get(code) + if decision is None: + LOG.debug('Deciding if "%s" should be reported', code) + selected = self.is_user_selected(code) + ignored = self.is_user_ignored(code) + LOG.debug('The user configured "%s" to be "%s", "%s"', + code, selected, ignored) + + if ((selected is Selected.Explicitly or + selected is Selected.Implicitly) and + ignored is Selected.Implicitly): + decision = Decision.Selected + elif (selected is Selected.Explicitly and + ignored is Ignored.Explicitly): + decision = self._decision_for(code) + elif (selected is Ignored.Implicitly or + ignored is Ignored.Explicitly): + decision = Decision.Ignored # pylint: disable=R0204 + + self._decision_cache[code] = decision + LOG.debug('"%s" will be "%s"', code, decision) + return decision + + def is_inline_ignored(self, error, physical_line=None): + # type: (Error) -> bool + """Determine if an comment has been added to ignore this line.""" + # TODO(sigmavirus24): Determine how to handle stdin with linecache + if self.options.disable_noqa: + return False + + if physical_line is None: + physical_line = linecache.getline(error.filename, + error.line_number) + noqa_match = self.NOQA_INLINE_REGEXP.search(physical_line) + if noqa_match is None: + LOG.debug('%r is not inline ignored', error) + return False + + codes_str = noqa_match.groupdict()['codes'] + if codes_str is None: + LOG.debug('%r is ignored by a blanket ``# noqa``', error) + return True + + codes = set(utils.parse_comma_separated_list(codes_str)) + if error.code in codes or error.code.startswith(tuple(codes)): + LOG.debug('%r is ignored specifically inline with ``# noqa: %s``', + error, codes_str) + return True + + LOG.debug('%r is not ignored inline with ``# noqa: %s``', + error, codes_str) + return False + + def handle_error(self, code, filename, line_number, column_number, text, + physical_line=None): + # type: (str, str, int, int, str) -> NoneType + """Handle an error reported by a check.""" + error = Error(code, filename, line_number, column_number, text) + if (self.should_report_error(error.code) is Decision.Selected and + self.is_inline_ignored(error, physical_line) is False): + self.formatter.handle(error) + self.listener.notify(error.code, error) + +# Should separate style guide logic from code that runs checks +# StyleGuide should manage select/ignore logic as well as include/exclude +# logic. See also https://github.com/PyCQA/pep8/pull/433 + +# StyleGuide shoud dispatch check execution in a way that can use +# multiprocessing but also retry in serial. See also: +# https://gitlab.com/pycqa/flake8/issues/74 + +# StyleGuide should interface with Reporter and aggregate errors/notify +# listeners diff --git a/flake8/utils.py b/flake8/utils.py new file mode 100644 index 0000000..1ceb76f --- /dev/null +++ b/flake8/utils.py @@ -0,0 +1,185 @@ +"""Utility methods for flake8.""" +import fnmatch as _fnmatch +import inspect +import io +import os +import sys + + +def parse_comma_separated_list(value): + # type: (Union[Sequence[str], str]) -> List[str] + """Parse a comma-separated list. + + :param value: + String or list of strings to be parsed and normalized. + :returns: + List of values with whitespace stripped. + :rtype: + list + """ + if not value: + return [] + + if not isinstance(value, (list, tuple)): + value = value.split(',') + + return [item.strip() for item in value] + + +def normalize_paths(paths, parent=os.curdir): + # type: (Union[Sequence[str], str], str) -> List[str] + """Parse a comma-separated list of paths. + + :returns: + The normalized paths. + :rtype: + [str] + """ + return [normalize_path(p, parent) + for p in parse_comma_separated_list(paths)] + + +def normalize_path(path, parent=os.curdir): + # type: (str, str) -> str + """Normalize a single-path. + + :returns: + The normalized path. + :rtype: + str + """ + if '/' in path: + path = os.path.abspath(os.path.join(parent, path)) + return path.rstrip('/') + + +def stdin_get_value(): + # type: () -> str + """Get and cache it so plugins can use it.""" + cached_value = getattr(stdin_get_value, 'cached_stdin', None) + if cached_value is None: + stdin_value = sys.stdin.read() + if sys.version_info < (3, 0): + cached_type = io.BytesIO + else: + cached_type = io.StringIO + stdin_get_value.cached_stdin = cached_type(stdin_value) + return cached_value.getvalue() + + +def is_windows(): + # type: () -> bool + """Determine if we're running on Windows. + + :returns: + True if running on Windows, otherwise False + :rtype: + bool + """ + return os.name == 'nt' + + +def is_using_stdin(paths): + # type: (List[str]) -> bool + """Determine if we're going to read from stdin. + + :param list paths: + The paths that we're going to check. + :returns: + True if stdin (-) is in the path, otherwise False + :rtype: + bool + """ + return '-' in paths + + +def _default_predicate(*args): + return False + + +def filenames_from(arg, predicate=None): + # type: (str, callable) -> Generator + """Generate filenames from an argument. + + :param str arg: + Parameter from the command-line. + :param callable predicate: + Predicate to use to filter out filenames. If the predicate + returns ``True`` we will exclude the filename, otherwise we + will yield it. By default, we include every filename + generated. + :returns: + Generator of paths + """ + if predicate is None: + predicate = _default_predicate + if os.path.isdir(arg): + for root, sub_directories, files in os.walk(arg): + for filename in files: + joined = os.path.join(root, filename) + if predicate(joined): + continue + yield joined + # NOTE(sigmavirus24): os.walk() will skip a directory if you + # remove it from the list of sub-directories. + for directory in sub_directories: + if predicate(directory): + sub_directories.remove(directory) + else: + yield arg + + +def fnmatch(filename, patterns, default=True): + # type: (str, List[str], bool) -> bool + """Wrap :func:`fnmatch.fnmatch` to add some functionality. + + :param str filename: + Name of the file we're trying to match. + :param list patterns: + Patterns we're using to try to match the filename. + :param bool default: + The default value if patterns is empty + :returns: + True if a pattern matches the filename, False if it doesn't. + ``default`` if patterns is empty. + """ + if not patterns: + return default + return any(_fnmatch.fnmatch(filename, pattern) for pattern in patterns) + + +def parameters_for(plugin): + # type: (flake8.plugins.manager.Plugin) -> List[str] + """Return the parameters for the plugin. + + This will inspect the plugin and return either the function parameters + if the plugin is a function or the parameters for ``__init__`` after + ``self`` if the plugin is a class. + + :param plugin: + The internal plugin object. + :type plugin: + flake8.plugins.manager.Plugin + :returns: + Parameters to the plugin. + :rtype: + list(str) + """ + func = plugin.plugin + is_class = not inspect.isfunction(func) + if is_class: # The plugin is a class + func = plugin.plugin.__init__ + + if sys.version_info < (3, 3): + parameters = inspect.getargspec(func)[0] + else: + parameters = [ + parameter.name + for parameter in inspect.signature(func).parameters.values() + if parameter.kind == parameter.POSITIONAL_OR_KEYWORD + ] + + if is_class: + parameters.remove('self') + + return parameters diff --git a/setup.cfg b/setup.cfg index 89f3026..e96761b 100644 --- a/setup.cfg +++ b/setup.cfg @@ -1,6 +1,9 @@ -[wheel] -universal = 1 +[aliases] +test=pytest -; Publish a universal wheel to PyPI: -; $ pip install -U pip wheel -; $ python setup.py sdist bdist_wheel upload +[bdist_wheel] +universal=1 + +[metadata] +requires-dist = + enum34; python_version<"3.4" diff --git a/setup.py b/setup.py new file mode 100644 index 0000000..f0b9b83 --- /dev/null +++ b/setup.py @@ -0,0 +1,129 @@ +"""Packaging logic for Flake8.""" +# -*- coding: utf-8 -*- +from __future__ import with_statement + +import sys + +import setuptools + +import flake8 # noqa + +try: + # Work around a traceback with Nose on Python 2.6 + # http://bugs.python.org/issue15881#msg170215 + __import__('multiprocessing') +except ImportError: + pass + +try: + # Use https://docs.python.org/3/library/unittest.mock.html + from unittest import mock +except ImportError: + # < Python 3.3 + mock = None + + +tests_require = ['pytest'] +if mock is None: + tests_require.append('mock') + + +requires = [ + "pyflakes >= 0.8.1, < 1.1", + "pep8 >= 1.5.7, != 1.6.0, != 1.6.1, != 1.6.2", + # "mccabe >= 0.2.1, < 0.4", +] + +if sys.version_info < (3, 4): + requires.append("enum34") + + +def get_long_description(): + """Generate a long description from the README and CHANGES files.""" + descr = [] + for fname in ('README.rst', 'CHANGES.rst'): + with open(fname) as f: + descr.append(f.read()) + return '\n\n'.join(descr) + + +setuptools.setup( + name="flake8", + license="MIT", + version=flake8.__version__, + description="the modular source code checker: pep8, pyflakes and co", + # long_description=get_long_description(), + author="Tarek Ziade", + author_email="tarek@ziade.org", + maintainer="Ian Cordasco", + maintainer_email="graffatcolmingov@gmail.com", + url="https://gitlab.com/pycqa/flake8", + packages=[ + "flake8", + "flake8.formatting", + "flake8.main", + "flake8.options", + "flake8.plugins", + ], + install_requires=requires, + entry_points={ + 'distutils.commands': ['flake8 = flake8.main:Flake8Command'], + 'console_scripts': ['flake8 = flake8.main.cli:main'], + 'flake8.extension': [ + 'F = flake8.plugins.pyflakes:FlakesChecker', + # PEP-0008 checks provied by PyCQA/pycodestyle + 'pep8.tabs_or_spaces = pep8:tabs_or_spaces', + 'pep8.tabs_obsolete = pep8:tabs_obsolete', + 'pep8.trailing_whitespace = pep8:trailing_whitespace', + 'pep8.trailing_blank_lines = pep8:trailing_blank_lines', + 'pep8.maximum_line_length = pep8:maximum_line_length', + 'pep8.blank_lines = pep8:blank_lines', + 'pep8.extraneous_whitespace = pep8:extraneous_whitespace', + ('pep8.whitespace_around_keywords = ' + 'pep8:whitespace_around_keywords'), + 'pep8.missing_whitespace = pep8:missing_whitespace', + 'pep8.indentation = pep8:indentation', + 'pep8.continued_indentation = pep8:continued_indentation', + ('pep8.whitespace_before_parameters = ' + 'pep8:whitespace_before_parameters'), + ('pep8.whitespace_around_operator = ' + 'pep8:whitespace_around_operator'), + ('pep8.missing_whitespace_around_operator = ' + 'pep8:missing_whitespace_around_operator'), + 'pep8.whitespace_around_comma = pep8:whitespace_around_comma', + ('pep8.whitespace_around_named_parameter_equals = ' + 'pep8:whitespace_around_named_parameter_equals'), + 'pep8.whitespace_before_comment = pep8:whitespace_before_comment', + 'pep8.imports_on_separate_lines = pep8:imports_on_separate_lines', + ('pep8.module_imports_on_top_of_file = ' + 'pep8:module_imports_on_top_of_file'), + 'pep8.compound_statements = pep8:compound_statements', + 'pep8.explicit_line_join = pep8:explicit_line_join', + ('pep8.break_around_binary_operator = ' + 'pep8:break_around_binary_operator'), + 'pep8.comparison_to_singleton = pep8:comparison_to_singleton', + 'pep8.comparison_negative = pep8:comparison_negative', + 'pep8.comparison_type = pep8:comparison_type', + 'pep8.python_3000_has_key = pep8:python_3000_has_key', + 'pep8.python_3000_raise_comma = pep8:python_3000_raise_comma', + 'pep8.python_3000_not_equal = pep8:python_3000_not_equal', + 'pep8.python_3000_backticks = pep8:python_3000_backticks', + ], + 'flake8.report': [ + 'default = flake8.formatting.default:Default', + 'pylint = flake8.formatting.default:Pylint', + ], + }, + classifiers=[ + "Environment :: Console", + "Intended Audience :: Developers", + "License :: OSI Approved :: MIT License", + "Programming Language :: Python", + "Programming Language :: Python :: 2", + "Programming Language :: Python :: 3", + "Topic :: Software Development :: Libraries :: Python Modules", + "Topic :: Software Development :: Quality Assurance", + ], + tests_require=tests_require, + setup_requires=['pytest-runner'], +) diff --git a/tests/conftest.py b/tests/conftest.py new file mode 100644 index 0000000..9bf4f95 --- /dev/null +++ b/tests/conftest.py @@ -0,0 +1,6 @@ +"""Test configuration for py.test.""" +import sys + +import flake8 + +flake8.configure_logging(2, 'test-logs-%s.%s.log' % sys.version_info[0:2]) diff --git a/tests/fixtures/config_files/README.rst b/tests/fixtures/config_files/README.rst new file mode 100644 index 0000000..b00adad --- /dev/null +++ b/tests/fixtures/config_files/README.rst @@ -0,0 +1,38 @@ +About this directory +==================== + +The files in this directory are test fixtures for unit and integration tests. +Their purpose is described below. Please note the list of file names that can +not be created as they are already used by tests. + +New fixtures are preferred over updating existing features unless existing +tests will fail. + +Files that should not be created +-------------------------------- + +- ``tests/fixtures/config_files/missing.ini`` + +Purposes of existing fixtures +----------------------------- + +``tests/fixtures/config_files/cli-specified.ini`` + + This should only be used when providing config file(s) specified by the + user on the command-line. + +``tests/fixtures/config_files/local-config.ini`` + + This should be used when providing config files that would have been found + by looking for config files in the current working project directory. + + +``tests/fixtures/config_files/no-flake8-section.ini`` + + This should be used when parsing an ini file without a ``[flake8]`` + section. + +``tests/fixtures/config_files/user-config.ini`` + + This is an example configuration file that would be found in the user's + home directory (or XDG Configuration Directory). diff --git a/tests/fixtures/config_files/cli-specified.ini b/tests/fixtures/config_files/cli-specified.ini new file mode 100644 index 0000000..753604a --- /dev/null +++ b/tests/fixtures/config_files/cli-specified.ini @@ -0,0 +1,9 @@ +[flake8] +ignore = + E123, + W234, + E111 +exclude = + foo/, + bar/, + bogus/ diff --git a/tests/fixtures/config_files/local-config.ini b/tests/fixtures/config_files/local-config.ini new file mode 100644 index 0000000..348751a --- /dev/null +++ b/tests/fixtures/config_files/local-config.ini @@ -0,0 +1,3 @@ +[flake8] +exclude = docs/ +select = E,W,F diff --git a/tests/fixtures/config_files/no-flake8-section.ini b/tests/fixtures/config_files/no-flake8-section.ini new file mode 100644 index 0000000..a85b709 --- /dev/null +++ b/tests/fixtures/config_files/no-flake8-section.ini @@ -0,0 +1,20 @@ +[tox] +minversion=2.3.1 +envlist = py26,py27,py32,py33,py34,py35,flake8 + +[testenv] +deps = + mock + pytest +commands = + py.test {posargs} + +[testenv:flake8] +skipsdist = true +skip_install = true +use_develop = false +deps = + flake8 + flake8-docstrings +commands = + flake8 diff --git a/tests/fixtures/config_files/user-config.ini b/tests/fixtures/config_files/user-config.ini new file mode 100644 index 0000000..b06c24f --- /dev/null +++ b/tests/fixtures/config_files/user-config.ini @@ -0,0 +1,5 @@ +[flake8] +exclude = + tests/fixtures/, + docs/ +ignore = D203 diff --git a/tests/integration/test_aggregator.py b/tests/integration/test_aggregator.py new file mode 100644 index 0000000..2186c35 --- /dev/null +++ b/tests/integration/test_aggregator.py @@ -0,0 +1,48 @@ +"""Test aggregation of config files and command-line options.""" +import os + +from flake8.main import cli +from flake8.options import aggregator +from flake8.options import manager + +import pytest + +CLI_SPECIFIED_CONFIG = 'tests/fixtures/config_files/cli-specified.ini' + + +@pytest.fixture +def optmanager(): + """Create a new OptionManager.""" + option_manager = manager.OptionManager( + prog='flake8', + version='3.0.0', + ) + cli.register_default_options(option_manager) + return option_manager + + +def test_aggregate_options_with_config(optmanager): + """Verify we aggregate options and config values appropriately.""" + arguments = ['flake8', '--config', CLI_SPECIFIED_CONFIG, '--select', + 'E11,E34,E402,W,F', '--exclude', 'tests/*'] + options, args = aggregator.aggregate_options(optmanager, arguments) + + assert options.config == CLI_SPECIFIED_CONFIG + assert options.select == ['E11', 'E34', 'E402', 'W', 'F'] + assert options.ignore == ['E123', 'W234', 'E111'] + assert options.exclude == [os.path.abspath('tests/*')] + + +def test_aggregate_options_when_isolated(optmanager): + """Verify we aggregate options and config values appropriately.""" + arguments = ['flake8', '--isolated', '--select', 'E11,E34,E402,W,F', + '--exclude', 'tests/*'] + optmanager.extend_default_ignore(['E8']) + options, args = aggregator.aggregate_options(optmanager, arguments) + + assert options.isolated is True + assert options.select == ['E11', 'E34', 'E402', 'W', 'F'] + assert sorted(options.ignore) == [ + 'E121', 'E123', 'E126', 'E226', 'E24', 'E704', 'E8', + ] + assert options.exclude == [os.path.abspath('tests/*')] diff --git a/tests/unit/test_config_file_finder.py b/tests/unit/test_config_file_finder.py new file mode 100644 index 0000000..58009f7 --- /dev/null +++ b/tests/unit/test_config_file_finder.py @@ -0,0 +1,121 @@ +"""Tests for the ConfigFileFinder.""" +try: + import ConfigParser as configparser +except ImportError: + import configparser + +import os +import sys + +from flake8.options import config + +import mock + +import pytest + +CLI_SPECIFIED_FILEPATH = 'tests/fixtures/config_files/cli-specified.ini' + + +def test_uses_default_args(): + """Show that we default the args value.""" + finder = config.ConfigFileFinder('flake8', None, []) + assert finder.parent == os.path.abspath('.') + + +@pytest.mark.parametrize('platform,is_windows', [ + ('win32', True), + ('linux', False), + ('darwin', False), +]) +def test_windows_detection(platform, is_windows): + """Verify we detect Windows to the best of our knowledge.""" + with mock.patch.object(sys, 'platform', platform): + finder = config.ConfigFileFinder('flake8', None, []) + assert finder.is_windows is is_windows + + +def test_cli_config(): + """Verify opening and reading the file specified via the cli.""" + cli_filepath = CLI_SPECIFIED_FILEPATH + finder = config.ConfigFileFinder('flake8', None, []) + + parsed_config = finder.cli_config(cli_filepath) + assert parsed_config.has_section('flake8') + + +@pytest.mark.parametrize('args,expected', [ + # No arguments, common prefix of abspath('.') + ([], + [os.path.abspath('setup.cfg'), + os.path.abspath('tox.ini'), + os.path.abspath('.flake8')]), + # Common prefix of "flake8/" + (['flake8/options', 'flake8/'], + [os.path.abspath('flake8/setup.cfg'), + os.path.abspath('flake8/tox.ini'), + os.path.abspath('flake8/.flake8'), + os.path.abspath('setup.cfg'), + os.path.abspath('tox.ini'), + os.path.abspath('.flake8')]), + # Common prefix of "flake8/options" + (['flake8/options', 'flake8/options/sub'], + [os.path.abspath('flake8/options/setup.cfg'), + os.path.abspath('flake8/options/tox.ini'), + os.path.abspath('flake8/options/.flake8'), + os.path.abspath('flake8/setup.cfg'), + os.path.abspath('flake8/tox.ini'), + os.path.abspath('flake8/.flake8'), + os.path.abspath('setup.cfg'), + os.path.abspath('tox.ini'), + os.path.abspath('.flake8')]), +]) +def test_generate_possible_local_files(args, expected): + """Verify generation of all possible config paths.""" + finder = config.ConfigFileFinder('flake8', args, []) + + assert (list(finder.generate_possible_local_files()) == + expected) + + +@pytest.mark.parametrize('args,extra_config_files,expected', [ + # No arguments, common prefix of abspath('.') + ([], + [], + [os.path.abspath('setup.cfg'), + os.path.abspath('tox.ini')]), + # Common prefix of "flake8/" + (['flake8/options', 'flake8/'], + [], + [os.path.abspath('setup.cfg'), + os.path.abspath('tox.ini')]), + # Common prefix of "flake8/options" + (['flake8/options', 'flake8/options/sub'], + [], + [os.path.abspath('setup.cfg'), + os.path.abspath('tox.ini')]), + # Common prefix of "flake8/" with extra config files specified + (['flake8/'], + [CLI_SPECIFIED_FILEPATH], + [os.path.abspath('setup.cfg'), + os.path.abspath('tox.ini'), + os.path.abspath(CLI_SPECIFIED_FILEPATH)]), + # Common prefix of "flake8/" with missing extra config files specified + (['flake8/'], + [CLI_SPECIFIED_FILEPATH, + 'tests/fixtures/config_files/missing.ini'], + [os.path.abspath('setup.cfg'), + os.path.abspath('tox.ini'), + os.path.abspath(CLI_SPECIFIED_FILEPATH)]), +]) +def test_local_config_files(args, extra_config_files, expected): + """Verify discovery of local config files.""" + finder = config.ConfigFileFinder('flake8', args, extra_config_files) + + assert list(finder.local_config_files()) == expected + + +def test_local_configs(): + """Verify we return a ConfigParser.""" + finder = config.ConfigFileFinder('flake8', None, []) + + assert isinstance(finder.local_configs(), configparser.RawConfigParser) diff --git a/tests/unit/test_file_checker.py b/tests/unit/test_file_checker.py new file mode 100644 index 0000000..e26bc83 --- /dev/null +++ b/tests/unit/test_file_checker.py @@ -0,0 +1,26 @@ +"""Tests for the FileChecker class.""" +from flake8 import checker + +import pytest + + +def test_read_lines_splits_lines(): + """Verify that read_lines splits the lines of the file.""" + file_checker = checker.FileChecker(__file__, []) + lines = file_checker.read_lines() + assert len(lines) > 5 + assert '"""Tests for the FileChecker class."""\n' in lines + + +@pytest.mark.parametrize('first_line', [ + '\xEF\xBB\xBF"""Module docstring."""\n', + '\uFEFF"""Module docstring."""\n', +]) +def test_strip_utf_bom(first_line): + r"""Verify that we strip '\xEF\xBB\xBF' from the first line.""" + lines = [first_line] + file_checker = checker.FileChecker('stdin', []) + file_checker.lines = lines[:] + file_checker.strip_utf_bom() + assert file_checker.lines != lines + assert file_checker.lines[0] == '"""Module docstring."""\n' diff --git a/tests/unit/test_merged_config_parser.py b/tests/unit/test_merged_config_parser.py new file mode 100644 index 0000000..c64cae6 --- /dev/null +++ b/tests/unit/test_merged_config_parser.py @@ -0,0 +1,174 @@ +"""Unit tests for flake8.options.config.MergedConfigParser.""" +import os + +from flake8.options import config +from flake8.options import manager + +import mock + +import pytest + + +@pytest.fixture +def optmanager(): + """Generate an OptionManager with simple values.""" + return manager.OptionManager(prog='flake8', version='3.0.0a1') + + +@pytest.mark.parametrize('args,extra_config_files', [ + (None, None), + (None, []), + (None, ['foo.ini']), + ('flake8/', []), + ('flake8/', ['foo.ini']), +]) +def test_creates_its_own_config_file_finder(args, extra_config_files, + optmanager): + """Verify we create a ConfigFileFinder correctly.""" + class_path = 'flake8.options.config.ConfigFileFinder' + with mock.patch(class_path) as ConfigFileFinder: + parser = config.MergedConfigParser( + option_manager=optmanager, + extra_config_files=extra_config_files, + args=args, + ) + + assert parser.program_name == 'flake8' + ConfigFileFinder.assert_called_once_with( + 'flake8', + args, + extra_config_files or [], + ) + + +def test_parse_cli_config(optmanager): + """Parse the specified config file as a cli config file.""" + optmanager.add_option('--exclude', parse_from_config=True, + comma_separated_list=True, + normalize_paths=True) + optmanager.add_option('--ignore', parse_from_config=True, + comma_separated_list=True) + parser = config.MergedConfigParser(optmanager) + + parsed_config = parser.parse_cli_config( + 'tests/fixtures/config_files/cli-specified.ini' + ) + assert parsed_config == { + 'ignore': ['E123', 'W234', 'E111'], + 'exclude': [ + os.path.abspath('foo/'), + os.path.abspath('bar/'), + os.path.abspath('bogus/'), + ] + } + + +@pytest.mark.parametrize('filename,is_configured_by', [ + ('tests/fixtures/config_files/cli-specified.ini', True), + ('tests/fixtures/config_files/no-flake8-section.ini', False), +]) +def test_is_configured_by(filename, is_configured_by, optmanager): + """Verify the behaviour of the is_configured_by method.""" + parsed_config, _ = config.ConfigFileFinder._read_config(filename) + parser = config.MergedConfigParser(optmanager) + + assert parser.is_configured_by(parsed_config) is is_configured_by + + +def test_parse_user_config(optmanager): + """Verify parsing of user config files.""" + optmanager.add_option('--exclude', parse_from_config=True, + comma_separated_list=True, + normalize_paths=True) + optmanager.add_option('--ignore', parse_from_config=True, + comma_separated_list=True) + parser = config.MergedConfigParser(optmanager) + + with mock.patch.object(parser.config_finder, 'user_config_file') as usercf: + usercf.return_value = 'tests/fixtures/config_files/cli-specified.ini' + parsed_config = parser.parse_user_config() + + assert parsed_config == { + 'ignore': ['E123', 'W234', 'E111'], + 'exclude': [ + os.path.abspath('foo/'), + os.path.abspath('bar/'), + os.path.abspath('bogus/'), + ] + } + + +def test_parse_local_config(optmanager): + """Verify parsing of local config files.""" + optmanager.add_option('--exclude', parse_from_config=True, + comma_separated_list=True, + normalize_paths=True) + optmanager.add_option('--ignore', parse_from_config=True, + comma_separated_list=True) + parser = config.MergedConfigParser(optmanager) + config_finder = parser.config_finder + + with mock.patch.object(config_finder, 'local_config_files') as localcfs: + localcfs.return_value = [ + 'tests/fixtures/config_files/cli-specified.ini' + ] + parsed_config = parser.parse_local_config() + + assert parsed_config == { + 'ignore': ['E123', 'W234', 'E111'], + 'exclude': [ + os.path.abspath('foo/'), + os.path.abspath('bar/'), + os.path.abspath('bogus/'), + ] + } + + +def test_merge_user_and_local_config(optmanager): + """Verify merging of parsed user and local config files.""" + optmanager.add_option('--exclude', parse_from_config=True, + comma_separated_list=True, + normalize_paths=True) + optmanager.add_option('--ignore', parse_from_config=True, + comma_separated_list=True) + optmanager.add_option('--select', parse_from_config=True, + comma_separated_list=True) + parser = config.MergedConfigParser(optmanager) + config_finder = parser.config_finder + + with mock.patch.object(config_finder, 'local_config_files') as localcfs: + localcfs.return_value = [ + 'tests/fixtures/config_files/local-config.ini' + ] + with mock.patch.object(config_finder, + 'user_config_file') as usercf: + usercf.return_value = ('tests/fixtures/config_files/' + 'user-config.ini') + parsed_config = parser.merge_user_and_local_config() + + assert parsed_config == { + 'exclude': [ + os.path.abspath('docs/') + ], + 'ignore': ['D203'], + 'select': ['E', 'W', 'F'], + } + + +@mock.patch('flake8.options.config.ConfigFileFinder') +def test_parse_isolates_config(ConfigFileManager, optmanager): + """Verify behaviour of the parse method with isolated=True.""" + parser = config.MergedConfigParser(optmanager) + + assert parser.parse(isolated=True) == {} + assert parser.config_finder.local_configs.called is False + assert parser.config_finder.user_config.called is False + + +@mock.patch('flake8.options.config.ConfigFileFinder') +def test_parse_uses_cli_config(ConfigFileManager, optmanager): + """Verify behaviour of the parse method with a specified config.""" + parser = config.MergedConfigParser(optmanager) + + parser.parse(cli_config='foo.ini') + parser.config_finder.cli_config.assert_called_once_with('foo.ini') diff --git a/tests/unit/test_notifier.py b/tests/unit/test_notifier.py new file mode 100644 index 0000000..8c001da --- /dev/null +++ b/tests/unit/test_notifier.py @@ -0,0 +1,54 @@ +"""Unit tests for the Notifier object.""" +from flake8.plugins import notifier + +import pytest + + +class _Listener(object): + def __init__(self, error_code): + self.error_code = error_code + self.was_notified = False + + def notify(self, error_code, *args, **kwargs): + assert error_code.startswith(self.error_code) + self.was_notified = True + + +class TestNotifier(object): + """Notifier unit tests.""" + + @pytest.fixture(autouse=True) + def setup(self): + """Set up each TestNotifier instance.""" + self.notifier = notifier.Notifier() + self.listener_map = {} + + def add_listener(error_code): + listener = _Listener(error_code) + self.listener_map[error_code] = listener + self.notifier.register_listener(error_code, listener) + + for i in range(10): + add_listener('E{0}'.format(i)) + for j in range(30): + add_listener('E{0}{1:02d}'.format(i, j)) + + def test_notify(self): + """Show that we notify a specific error code.""" + self.notifier.notify('E111', 'extra', 'args') + assert self.listener_map['E111'].was_notified is True + assert self.listener_map['E1'].was_notified is True + + @pytest.mark.parametrize('code', ['W123', 'W12', 'W1', 'W']) + def test_no_listeners_for(self, code): + """Show that we return an empty list of listeners.""" + assert list(self.notifier.listeners_for(code)) == [] + + @pytest.mark.parametrize('code,expected', [ + ('E101', ['E101', 'E1']), + ('E211', ['E211', 'E2']), + ]) + def test_listeners_for(self, code, expected): + """Verify that we retrieve the correct listeners.""" + assert ([l.error_code for l in self.notifier.listeners_for(code)] == + expected) diff --git a/tests/unit/test_option.py b/tests/unit/test_option.py new file mode 100644 index 0000000..45f9be7 --- /dev/null +++ b/tests/unit/test_option.py @@ -0,0 +1,69 @@ +"""Unit tests for flake8.options.manager.Option.""" +from flake8.options import manager + +import mock + +import pytest + + +def test_to_optparse(): + """Test conversion to an optparse.Option class.""" + opt = manager.Option( + short_option_name='-t', + long_option_name='--test', + action='count', + parse_from_config=True, + normalize_paths=True, + ) + assert opt.normalize_paths is True + assert opt.parse_from_config is True + + optparse_opt = opt.to_optparse() + assert not hasattr(optparse_opt, 'parse_from_config') + assert not hasattr(optparse_opt, 'normalize_paths') + assert optparse_opt.action == 'count' + + +@mock.patch('optparse.Option') +def test_to_optparse_creates_an_option_as_we_expect(Option): + """Show that we pass all keyword args to optparse.Option.""" + opt = manager.Option('-t', '--test', action='count') + opt.to_optparse() + option_kwargs = { + 'action': 'count', + 'default': None, + 'type': None, + 'dest': 'test', + 'nargs': None, + 'const': None, + 'choices': None, + 'callback': None, + 'callback_args': None, + 'callback_kwargs': None, + 'help': None, + 'metavar': None, + } + + Option.assert_called_once_with( + '-t', '--test', **option_kwargs + ) + + +def test_config_name_generation(): + """Show that we generate the config name deterministically.""" + opt = manager.Option(long_option_name='--some-very-long-option-name', + parse_from_config=True) + + assert opt.config_name == 'some_very_long_option_name' + + +def test_config_name_needs_long_option_name(): + """Show that we error out if the Option should be parsed from config.""" + with pytest.raises(ValueError): + manager.Option('-s', parse_from_config=True) + + +def test_dest_is_not_overridden(): + """Show that we do not override custom destinations.""" + opt = manager.Option('-s', '--short', dest='something_not_short') + assert opt.dest == 'something_not_short' diff --git a/tests/unit/test_option_manager.py b/tests/unit/test_option_manager.py new file mode 100644 index 0000000..1ba5442 --- /dev/null +++ b/tests/unit/test_option_manager.py @@ -0,0 +1,196 @@ +"""Unit tests for flake.options.manager.OptionManager.""" +import optparse +import os + +from flake8.options import manager + +import pytest + +TEST_VERSION = '3.0.0b1' + + +@pytest.fixture +def optmanager(): + """Generate a simple OptionManager with default test arguments.""" + return manager.OptionManager(prog='flake8', version=TEST_VERSION) + + +def test_option_manager_creates_option_parser(optmanager): + """Verify that a new manager creates a new parser.""" + assert optmanager.parser is not None + assert isinstance(optmanager.parser, optparse.OptionParser) is True + + +def test_add_option_short_option_only(optmanager): + """Verify the behaviour of adding a short-option only.""" + assert optmanager.options == [] + assert optmanager.config_options_dict == {} + + optmanager.add_option('-s', help='Test short opt') + assert optmanager.options[0].short_option_name == '-s' + + +def test_add_option_long_option_only(optmanager): + """Verify the behaviour of adding a long-option only.""" + assert optmanager.options == [] + assert optmanager.config_options_dict == {} + + optmanager.add_option('--long', help='Test long opt') + assert optmanager.options[0].short_option_name is None + assert optmanager.options[0].long_option_name == '--long' + + +def test_add_short_and_long_option_names(optmanager): + """Verify the behaviour of using both short and long option names.""" + assert optmanager.options == [] + assert optmanager.config_options_dict == {} + + optmanager.add_option('-b', '--both', help='Test both opts') + assert optmanager.options[0].short_option_name == '-b' + assert optmanager.options[0].long_option_name == '--both' + + +def test_add_option_with_custom_args(optmanager): + """Verify that add_option handles custom Flake8 parameters.""" + assert optmanager.options == [] + assert optmanager.config_options_dict == {} + + optmanager.add_option('--parse', parse_from_config=True) + optmanager.add_option('--commas', comma_separated_list=True) + optmanager.add_option('--files', normalize_paths=True) + + attrs = ['parse_from_config', 'comma_separated_list', 'normalize_paths'] + for option, attr in zip(optmanager.options, attrs): + assert getattr(option, attr) is True + + +def test_parse_args_normalize_path(optmanager): + """Show that parse_args handles path normalization.""" + assert optmanager.options == [] + assert optmanager.config_options_dict == {} + + optmanager.add_option('--config', normalize_paths=True) + + options, args = optmanager.parse_args(['--config', '../config.ini']) + assert options.config == os.path.abspath('../config.ini') + + +def test_parse_args_handles_comma_separated_defaults(optmanager): + """Show that parse_args handles defaults that are comma-separated.""" + assert optmanager.options == [] + assert optmanager.config_options_dict == {} + + optmanager.add_option('--exclude', default='E123,W234', + comma_separated_list=True) + + options, args = optmanager.parse_args([]) + assert options.exclude == ['E123', 'W234'] + + +def test_parse_args_handles_comma_separated_lists(optmanager): + """Show that parse_args handles user-specified comma-separated lists.""" + assert optmanager.options == [] + assert optmanager.config_options_dict == {} + + optmanager.add_option('--exclude', default='E123,W234', + comma_separated_list=True) + + options, args = optmanager.parse_args(['--exclude', 'E201,W111,F280']) + assert options.exclude == ['E201', 'W111', 'F280'] + + +def test_parse_args_normalize_paths(optmanager): + """Verify parse_args normalizes a comma-separated list of paths.""" + assert optmanager.options == [] + assert optmanager.config_options_dict == {} + + optmanager.add_option('--extra-config', normalize_paths=True, + comma_separated_list=True) + + options, args = optmanager.parse_args([ + '--extra-config', '../config.ini,tox.ini,flake8/some-other.cfg' + ]) + assert options.extra_config == [ + os.path.abspath('../config.ini'), + 'tox.ini', + os.path.abspath('flake8/some-other.cfg'), + ] + + +def test_format_plugin(): + """Verify that format_plugin turns a tuple into a dictionary.""" + plugin = manager.OptionManager.format_plugin(('T101', 'Testing', '0.0.0')) + assert plugin['entry'] == 'T101' + assert plugin['name'] == 'Testing' + assert plugin['version'] == '0.0.0' + + +def test_generate_versions(optmanager): + """Verify a comma-separated string is generated of registered plugins.""" + optmanager.registered_plugins = [ + ('T100', 'Testing 100', '0.0.0'), + ('T101', 'Testing 101', '0.0.0'), + ('T300', 'Testing 300', '0.0.0'), + ] + assert (optmanager.generate_versions() == + 'Testing 100: 0.0.0, Testing 101: 0.0.0, Testing 300: 0.0.0') + + +def test_generate_versions_with_format_string(optmanager): + """Verify a comma-separated string is generated of registered plugins.""" + optmanager.registered_plugins = [ + ('T100', 'Testing', '0.0.0'), + ('T101', 'Testing', '0.0.0'), + ('T300', 'Testing', '0.0.0'), + ] + assert ( + optmanager.generate_versions('%(name)s(%(entry)s): %(version)s') == + 'Testing(T100): 0.0.0, Testing(T101): 0.0.0, Testing(T300): 0.0.0' + ) + + +def test_update_version_string(optmanager): + """Verify we update the version string idempotently.""" + assert optmanager.version == TEST_VERSION + assert optmanager.parser.version == TEST_VERSION + + optmanager.registered_plugins = [ + ('T100', 'Testing 100', '0.0.0'), + ('T101', 'Testing 101', '0.0.0'), + ('T300', 'Testing 300', '0.0.0'), + ] + + optmanager.update_version_string() + + assert optmanager.version == TEST_VERSION + assert (optmanager.parser.version == TEST_VERSION + ' (' + 'Testing 100: 0.0.0, Testing 101: 0.0.0, Testing 300: 0.0.0)') + + +def test_generate_epilog(optmanager): + """Verify how we generate the epilog for help text.""" + assert optmanager.parser.epilog is None + + optmanager.registered_plugins = [ + ('T100', 'Testing 100', '0.0.0'), + ('T101', 'Testing 101', '0.0.0'), + ('T300', 'Testing 300', '0.0.0'), + ] + + expected_value = ( + 'Installed plugins: Testing 100(T100): 0.0.0, Testing 101(T101): ' + '0.0.0, Testing 300(T300): 0.0.0' + ) + + optmanager.generate_epilog() + assert optmanager.parser.epilog == expected_value + + +def test_extend_default_ignore(optmanager): + """Verify that we update the extended default ignore list.""" + assert optmanager.extended_default_ignore == set() + + optmanager.extend_default_ignore(['T100', 'T101', 'T102']) + assert optmanager.extended_default_ignore == set(['T100', + 'T101', + 'T102']) diff --git a/tests/unit/test_plugin.py b/tests/unit/test_plugin.py new file mode 100644 index 0000000..0f6eec1 --- /dev/null +++ b/tests/unit/test_plugin.py @@ -0,0 +1,157 @@ +"""Tests for flake8.plugins.manager.Plugin.""" +from flake8 import exceptions +from flake8.plugins import manager + +import mock + +import pytest + + +def test_load_plugin_fallsback_on_old_setuptools(): + """Verify we fallback gracefully to on old versions of setuptools.""" + entry_point = mock.Mock(spec=['load']) + plugin = manager.Plugin('T000', entry_point) + + plugin.load_plugin() + entry_point.load.assert_called_once_with(require=False) + + +def test_load_plugin_avoids_deprecated_entry_point_methods(): + """Verify we use the preferred methods on new versions of setuptools.""" + entry_point = mock.Mock(spec=['require', 'resolve', 'load']) + plugin = manager.Plugin('T000', entry_point) + + plugin.load_plugin(verify_requirements=True) + assert entry_point.load.called is False + entry_point.require.assert_called_once_with() + entry_point.resolve.assert_called_once_with() + + +def test_load_plugin_is_idempotent(): + """Verify we use the preferred methods on new versions of setuptools.""" + entry_point = mock.Mock(spec=['require', 'resolve', 'load']) + plugin = manager.Plugin('T000', entry_point) + + plugin.load_plugin(verify_requirements=True) + plugin.load_plugin(verify_requirements=True) + plugin.load_plugin() + assert entry_point.load.called is False + entry_point.require.assert_called_once_with() + entry_point.resolve.assert_called_once_with() + + +def test_load_plugin_only_calls_require_when_verifying_requirements(): + """Verify we do not call require when verify_requirements is False.""" + entry_point = mock.Mock(spec=['require', 'resolve', 'load']) + plugin = manager.Plugin('T000', entry_point) + + plugin.load_plugin() + assert entry_point.load.called is False + assert entry_point.require.called is False + entry_point.resolve.assert_called_once_with() + + +def test_load_plugin_catches_and_reraises_exceptions(): + """Verify we raise our own FailedToLoadPlugin.""" + entry_point = mock.Mock(spec=['require', 'resolve']) + entry_point.resolve.side_effect = ValueError('Test failure') + plugin = manager.Plugin('T000', entry_point) + + with pytest.raises(exceptions.FailedToLoadPlugin): + plugin.load_plugin() + + +def test_plugin_property_loads_plugin_on_first_use(): + """Verify that we load our plugin when we first try to use it.""" + entry_point = mock.Mock(spec=['require', 'resolve', 'load']) + plugin = manager.Plugin('T000', entry_point) + + assert plugin.plugin is not None + entry_point.resolve.assert_called_once_with() + + +def test_execute_calls_plugin_with_passed_arguments(): + """Verify that we pass arguments directly to the plugin.""" + entry_point = mock.Mock(spec=['require', 'resolve', 'load']) + plugin_obj = mock.Mock() + plugin = manager.Plugin('T000', entry_point) + plugin._plugin = plugin_obj + + plugin.execute('arg1', 'arg2', kwarg1='value1', kwarg2='value2') + plugin_obj.assert_called_once_with( + 'arg1', 'arg2', kwarg1='value1', kwarg2='value2' + ) + + # Extra assertions + assert entry_point.load.called is False + assert entry_point.require.called is False + assert entry_point.resolve.called is False + + +def test_version_proxies_to_the_plugin(): + """Verify that we pass arguments directly to the plugin.""" + entry_point = mock.Mock(spec=['require', 'resolve', 'load']) + plugin_obj = mock.Mock(spec_set=['version']) + plugin_obj.version = 'a.b.c' + plugin = manager.Plugin('T000', entry_point) + plugin._plugin = plugin_obj + + assert plugin.version == 'a.b.c' + + +def test_register_options(): + """Verify we call add_options on the plugin only if it exists.""" + # Set up our mocks and Plugin object + entry_point = mock.Mock(spec=['require', 'resolve', 'load']) + plugin_obj = mock.Mock(spec_set=['name', 'version', 'add_options', + 'parse_options']) + option_manager = mock.Mock(spec=['register_plugin']) + plugin = manager.Plugin('T000', entry_point) + plugin._plugin = plugin_obj + + # Call the method we're testing. + plugin.register_options(option_manager) + + # Assert that we call add_options + plugin_obj.add_options.assert_called_once_with(option_manager) + # Assert that we register the plugin + option_manager.register_plugin.assert_called_once_with( + entry_point_name='T000', + version=plugin_obj.version, + name=plugin_obj.name, + ) + + +def test_register_options_checks_plugin_for_method(): + """Verify we call add_options on the plugin only if it exists.""" + # Set up our mocks and Plugin object + entry_point = mock.Mock(spec=['require', 'resolve', 'load']) + plugin_obj = mock.Mock(spec_set=['name', 'version', 'parse_options']) + option_manager = mock.Mock(spec=['register_plugin']) + plugin = manager.Plugin('T000', entry_point) + plugin._plugin = plugin_obj + + # Call the method we're testing. + plugin.register_options(option_manager) + + # Assert that we register the plugin + assert option_manager.register_plugin.called is False + + +def test_provide_options(): + """Verify we call add_options on the plugin only if it exists.""" + # Set up our mocks and Plugin object + entry_point = mock.Mock(spec=['require', 'resolve', 'load']) + plugin_obj = mock.Mock(spec_set=['name', 'version', 'add_options', + 'parse_options']) + option_manager = mock.Mock() + plugin = manager.Plugin('T000', entry_point) + plugin._plugin = plugin_obj + + # Call the method we're testing. + plugin.provide_options(option_manager, 'options', None) + + # Assert that we call add_options + plugin_obj.parse_options.assert_called_once_with( + option_manager, 'options', None + ) diff --git a/tests/unit/test_plugin_manager.py b/tests/unit/test_plugin_manager.py new file mode 100644 index 0000000..5a50386 --- /dev/null +++ b/tests/unit/test_plugin_manager.py @@ -0,0 +1,50 @@ +"""Tests for flake8.plugins.manager.PluginManager.""" +from flake8.plugins import manager + +import mock + + +def create_entry_point_mock(name): + """Create a mocked EntryPoint.""" + ep = mock.Mock(spec=['name']) + ep.name = name + return ep + + +@mock.patch('pkg_resources.iter_entry_points') +def test_calls_pkg_resources_on_instantiation(iter_entry_points): + """Verify that we call iter_entry_points when we create a manager.""" + iter_entry_points.return_value = [] + manager.PluginManager(namespace='testing.pkg_resources') + + iter_entry_points.assert_called_once_with('testing.pkg_resources') + + +@mock.patch('pkg_resources.iter_entry_points') +def test_calls_pkg_resources_creates_plugins_automaticaly(iter_entry_points): + """Verify that we create Plugins on instantiation.""" + iter_entry_points.return_value = [ + create_entry_point_mock('T100'), + create_entry_point_mock('T200'), + ] + plugin_mgr = manager.PluginManager(namespace='testing.pkg_resources') + + iter_entry_points.assert_called_once_with('testing.pkg_resources') + assert 'T100' in plugin_mgr.plugins + assert 'T200' in plugin_mgr.plugins + assert isinstance(plugin_mgr.plugins['T100'], manager.Plugin) + assert isinstance(plugin_mgr.plugins['T200'], manager.Plugin) + + +@mock.patch('pkg_resources.iter_entry_points') +def test_handles_mapping_functions_across_plugins(iter_entry_points): + """Verify we can use the PluginManager call functions on all plugins.""" + entry_point_mocks = [ + create_entry_point_mock('T100'), + create_entry_point_mock('T200'), + ] + iter_entry_points.return_value = entry_point_mocks + plugin_mgr = manager.PluginManager(namespace='testing.pkg_resources') + plugins = [plugin_mgr.plugins[name] for name in plugin_mgr.names] + + assert list(plugin_mgr.map(lambda x: x)) == plugins diff --git a/tests/unit/test_plugin_type_manager.py b/tests/unit/test_plugin_type_manager.py new file mode 100644 index 0000000..271ebc0 --- /dev/null +++ b/tests/unit/test_plugin_type_manager.py @@ -0,0 +1,229 @@ +"""Tests for flake8.plugins.manager.PluginTypeManager.""" +import collections + +from flake8 import exceptions +from flake8.plugins import manager + +import mock + +import pytest + +TEST_NAMESPACE = "testing.plugin-type-manager" + + +def create_plugin_mock(raise_exception=False): + """Create an auto-spec'd mock of a flake8 Plugin.""" + plugin = mock.create_autospec(manager.Plugin, instance=True) + if raise_exception: + plugin.load_plugin.side_effect = exceptions.FailedToLoadPlugin( + plugin=mock.Mock(name='T101'), + exception=ValueError('Test failure'), + ) + return plugin + + +def create_mapping_manager_mock(plugins): + """Create a mock for the PluginManager.""" + # Have a function that will actually call the method underneath + def fake_map(func): + for plugin in plugins: + yield func(plugin) + + # Mock out the PluginManager instance + manager_mock = mock.Mock(spec=['map']) + # Replace the map method + manager_mock.map = fake_map + return manager_mock + + +def create_manager_with_plugins(plugins): + """Create a fake PluginManager with a plugins dictionary.""" + manager_mock = mock.create_autospec(manager.PluginManager) + manager_mock.plugins = plugins + return manager_mock + + +class TestType(manager.PluginTypeManager): + """Fake PluginTypeManager.""" + + namespace = TEST_NAMESPACE + + +@mock.patch('flake8.plugins.manager.PluginManager') +def test_instantiates_a_manager(PluginManager): + """Verify we create a PluginManager on instantiation.""" + TestType() + + PluginManager.assert_called_once_with(TEST_NAMESPACE) + + +@mock.patch('flake8.plugins.manager.PluginManager') +def test_proxies_names_to_manager(PluginManager): + """Verify we proxy the names attribute.""" + PluginManager.return_value = mock.Mock(names=[ + 'T100', 'T200', 'T300' + ]) + type_mgr = TestType() + + assert type_mgr.names == ['T100', 'T200', 'T300'] + + +@mock.patch('flake8.plugins.manager.PluginManager') +def test_proxies_plugins_to_manager(PluginManager): + """Verify we proxy the plugins attribute.""" + PluginManager.return_value = mock.Mock(plugins=[ + 'T100', 'T200', 'T300' + ]) + type_mgr = TestType() + + assert type_mgr.plugins == ['T100', 'T200', 'T300'] + + +def test_generate_call_function(): + """Verify the function we generate.""" + optmanager = object() + plugin = mock.Mock(method_name=lambda x: x) + func = manager.PluginTypeManager._generate_call_function( + 'method_name', optmanager, + ) + + assert isinstance(func, collections.Callable) + assert func(plugin) is optmanager + + +@mock.patch('flake8.plugins.manager.PluginManager') +def test_load_plugins(PluginManager): + """Verify load plugins loads *every* plugin.""" + # Create a bunch of fake plugins + plugins = [create_plugin_mock(), create_plugin_mock(), + create_plugin_mock(), create_plugin_mock(), + create_plugin_mock(), create_plugin_mock(), + create_plugin_mock(), create_plugin_mock()] + # Return our PluginManager mock + PluginManager.return_value = create_mapping_manager_mock(plugins) + + type_mgr = TestType() + # Load the tests (do what we're actually testing) + assert len(type_mgr.load_plugins()) == 8 + # Assert that our closure does what we think it does + for plugin in plugins: + plugin.load_plugin.assert_called_once_with() + assert type_mgr.plugins_loaded is True + + +@mock.patch('flake8.plugins.manager.PluginManager') +def test_load_plugins_fails(PluginManager): + """Verify load plugins bubbles up exceptions.""" + plugins = [create_plugin_mock(), create_plugin_mock(True), + create_plugin_mock(), create_plugin_mock(), + create_plugin_mock(), create_plugin_mock(), + create_plugin_mock(), create_plugin_mock()] + # Return our PluginManager mock + PluginManager.return_value = create_mapping_manager_mock(plugins) + + type_mgr = TestType() + with pytest.raises(exceptions.FailedToLoadPlugin): + type_mgr.load_plugins() + + # Assert we didn't finish loading plugins + assert type_mgr.plugins_loaded is False + # Assert the first two plugins had their load_plugin method called + plugins[0].load_plugin.assert_called_once_with() + plugins[1].load_plugin.assert_called_once_with() + # Assert the rest of the plugins were not loaded + for plugin in plugins[2:]: + assert plugin.load_plugin.called is False + + +@mock.patch('flake8.plugins.manager.PluginManager') +def test_register_options(PluginManager): + """Test that we map over every plugin to register options.""" + plugins = [create_plugin_mock(), create_plugin_mock(), + create_plugin_mock(), create_plugin_mock(), + create_plugin_mock(), create_plugin_mock(), + create_plugin_mock(), create_plugin_mock()] + # Return our PluginManager mock + PluginManager.return_value = create_mapping_manager_mock(plugins) + optmanager = object() + + type_mgr = TestType() + type_mgr.register_options(optmanager) + + for plugin in plugins: + plugin.register_options.assert_called_with(optmanager) + + +@mock.patch('flake8.plugins.manager.PluginManager') +def test_provide_options(PluginManager): + """Test that we map over every plugin to provide parsed options.""" + plugins = [create_plugin_mock(), create_plugin_mock(), + create_plugin_mock(), create_plugin_mock(), + create_plugin_mock(), create_plugin_mock(), + create_plugin_mock(), create_plugin_mock()] + # Return our PluginManager mock + PluginManager.return_value = create_mapping_manager_mock(plugins) + optmanager = object() + options = object() + extra_args = [] + + type_mgr = TestType() + type_mgr.provide_options(optmanager, options, extra_args) + + for plugin in plugins: + plugin.provide_options.assert_called_with(optmanager, + options, + extra_args) + + +@mock.patch('flake8.plugins.manager.PluginManager') +def test_proxy_contains_to_managers_plugins_dict(PluginManager): + """Verify that we proxy __contains__ to the manager's dictionary.""" + plugins = {'T10%i' % i: create_plugin_mock() for i in range(8)} + # Return our PluginManager mock + PluginManager.return_value = create_manager_with_plugins(plugins) + + type_mgr = TestType() + for i in range(8): + key = 'T10%i' % i + assert key in type_mgr + + +@mock.patch('flake8.plugins.manager.PluginManager') +def test_proxies_getitem_to_managers_plugins_dictionary(PluginManager): + """Verify that we can use the PluginTypeManager like a dictionary.""" + plugins = {'T10%i' % i: create_plugin_mock() for i in range(8)} + # Return our PluginManager mock + PluginManager.return_value = create_manager_with_plugins(plugins) + + type_mgr = TestType() + for i in range(8): + key = 'T10%i' % i + assert type_mgr[key] is plugins[key] + + +class FakePluginTypeManager(manager.NotifierBuilderMixin): + """Provide an easy way to test the NotifierBuilderMixin.""" + + def __init__(self, manager): + """Initialize with our fake manager.""" + self.names = sorted(manager.keys()) + self.manager = manager + + +@pytest.fixture +def notifier_builder(): + """Create a fake plugin type manager.""" + return FakePluginTypeManager(manager={ + 'T100': object(), + 'T101': object(), + 'T110': object(), + }) + + +def test_build_notifier(notifier_builder): + """Verify we properly build a Notifier object.""" + notifier = notifier_builder.build_notifier() + for name in ('T100', 'T101', 'T110'): + assert list(notifier.listeners_for(name)) == [ + notifier_builder.manager[name] + ] diff --git a/tests/unit/test_style_guide.py b/tests/unit/test_style_guide.py new file mode 100644 index 0000000..973281b --- /dev/null +++ b/tests/unit/test_style_guide.py @@ -0,0 +1,203 @@ +"""Tests for the flake8.style_guide.StyleGuide class.""" +import optparse + +from flake8 import style_guide +from flake8.formatting import base +from flake8.plugins import notifier + +import mock + +import pytest + + +def create_options(**kwargs): + """Create and return an instance of optparse.Values.""" + kwargs.setdefault('select', []) + kwargs.setdefault('ignore', []) + kwargs.setdefault('disable_noqa', False) + return optparse.Values(kwargs) + + +@pytest.mark.parametrize('ignore_list,error_code', [ + (['E111', 'E121'], 'E111'), + (['E111', 'E121'], 'E121'), + (['E11', 'E12'], 'E121'), + (['E2', 'E12'], 'E121'), + (['E2', 'E12'], 'E211'), +]) +def test_is_user_ignored_ignores_errors(ignore_list, error_code): + """Verify we detect users explicitly ignoring an error.""" + guide = style_guide.StyleGuide(create_options(ignore=ignore_list), + listener_trie=None, + formatter=None) + + assert guide.is_user_ignored(error_code) is style_guide.Ignored.Explicitly + + +@pytest.mark.parametrize('ignore_list,error_code', [ + (['E111', 'E121'], 'E112'), + (['E111', 'E121'], 'E122'), + (['E11', 'E12'], 'W121'), + (['E2', 'E12'], 'E112'), + (['E2', 'E12'], 'E111'), +]) +def test_is_user_ignored_implicitly_selects_errors(ignore_list, error_code): + """Verify we detect users does not explicitly ignore an error.""" + guide = style_guide.StyleGuide(create_options(ignore=ignore_list), + listener_trie=None, + formatter=None) + + assert guide.is_user_ignored(error_code) is style_guide.Selected.Implicitly + + +@pytest.mark.parametrize('select_list,error_code', [ + (['E111', 'E121'], 'E111'), + (['E111', 'E121'], 'E121'), + (['E11', 'E12'], 'E121'), + (['E2', 'E12'], 'E121'), + (['E2', 'E12'], 'E211'), +]) +def test_is_user_selected_selects_errors(select_list, error_code): + """Verify we detect users explicitly selecting an error.""" + guide = style_guide.StyleGuide(create_options(select=select_list), + listener_trie=None, + formatter=None) + + assert (guide.is_user_selected(error_code) is + style_guide.Selected.Explicitly) + + +def test_is_user_selected_implicitly_selects_errors(): + """Verify we detect users implicitly selecting an error.""" + select_list = [] + error_code = 'E121' + guide = style_guide.StyleGuide(create_options(select=select_list), + listener_trie=None, + formatter=None) + + assert (guide.is_user_selected(error_code) is + style_guide.Selected.Implicitly) + + +@pytest.mark.parametrize('select_list,error_code', [ + (['E111', 'E121'], 'E112'), + (['E111', 'E121'], 'E122'), + (['E11', 'E12'], 'E132'), + (['E2', 'E12'], 'E321'), + (['E2', 'E12'], 'E410'), +]) +def test_is_user_selected_excludes_errors(select_list, error_code): + """Verify we detect users implicitly excludes an error.""" + guide = style_guide.StyleGuide(create_options(select=select_list), + listener_trie=None, + formatter=None) + + assert guide.is_user_selected(error_code) is style_guide.Ignored.Implicitly + + +@pytest.mark.parametrize('select_list,ignore_list,error_code,expected', [ + (['E111', 'E121'], [], 'E111', style_guide.Decision.Selected), + (['E111', 'E121'], [], 'E112', style_guide.Decision.Ignored), + (['E111', 'E121'], [], 'E121', style_guide.Decision.Selected), + (['E111', 'E121'], [], 'E122', style_guide.Decision.Ignored), + (['E11', 'E12'], [], 'E132', style_guide.Decision.Ignored), + (['E2', 'E12'], [], 'E321', style_guide.Decision.Ignored), + (['E2', 'E12'], [], 'E410', style_guide.Decision.Ignored), + (['E11', 'E121'], ['E1'], 'E112', style_guide.Decision.Selected), + (['E111', 'E121'], ['E2'], 'E122', style_guide.Decision.Ignored), + (['E11', 'E12'], ['E13'], 'E132', style_guide.Decision.Ignored), + (['E1', 'E3'], ['E32'], 'E321', style_guide.Decision.Ignored), + ([], ['E2', 'E12'], 'E410', style_guide.Decision.Selected), + (['E4'], ['E2', 'E12', 'E41'], 'E410', style_guide.Decision.Ignored), + (['E41'], ['E2', 'E12', 'E4'], 'E410', style_guide.Decision.Selected), +]) +def test_should_report_error(select_list, ignore_list, error_code, expected): + """Verify we decide when to report an error.""" + guide = style_guide.StyleGuide(create_options(select=select_list, + ignore=ignore_list), + listener_trie=None, + formatter=None) + + assert guide.should_report_error(error_code) is expected + + +@pytest.mark.parametrize('error_code,physical_line,expected_result', [ + ('E111', 'a = 1', False), + ('E121', 'a = 1 # noqa: E111', False), + ('E121', 'a = 1 # noqa: E111,W123,F821', False), + ('E111', 'a = 1 # noqa: E111,W123,F821', True), + ('W123', 'a = 1 # noqa: E111,W123,F821', True), + ('E111', 'a = 1 # noqa: E11,W123,F821', True), +]) +def test_is_inline_ignored(error_code, physical_line, expected_result): + """Verify that we detect inline usage of ``# noqa``.""" + guide = style_guide.StyleGuide(create_options(select=['E', 'W', 'F']), + listener_trie=None, + formatter=None) + error = style_guide.Error(error_code, 'filename.py', 1, 1, 'error text') + + with mock.patch('linecache.getline', return_value=physical_line): + assert guide.is_inline_ignored(error) is expected_result + + +def test_disable_is_inline_ignored(): + """Verify that is_inline_ignored exits immediately if disabling NoQA.""" + guide = style_guide.StyleGuide(create_options(disable_noqa=True), + listener_trie=None, + formatter=None) + error = style_guide.Error('E121', 'filename.py', 1, 1, 'error text') + + with mock.patch('linecache.getline') as getline: + assert guide.is_inline_ignored(error) is False + + assert getline.called is False + + +@pytest.mark.parametrize('select_list,ignore_list,error_code', [ + (['E111', 'E121'], [], 'E111'), + (['E111', 'E121'], [], 'E121'), + (['E11', 'E121'], ['E1'], 'E112'), + ([], ['E2', 'E12'], 'E410'), + (['E41'], ['E2', 'E12', 'E4'], 'E410'), +]) +def test_handle_error_notifies_listeners(select_list, ignore_list, error_code): + """Verify that error codes notify the listener trie appropriately.""" + listener_trie = mock.create_autospec(notifier.Notifier, instance=True) + formatter = mock.create_autospec(base.BaseFormatter, instance=True) + guide = style_guide.StyleGuide(create_options(select=select_list, + ignore=ignore_list), + listener_trie=listener_trie, + formatter=formatter) + + with mock.patch('linecache.getline', return_value=''): + guide.handle_error(error_code, 'stdin', 1, 1, 'error found') + error = style_guide.Error(error_code, 'stdin', 1, 1, 'error found') + listener_trie.notify.assert_called_once_with(error_code, error) + formatter.handle.assert_called_once_with(error) + + +@pytest.mark.parametrize('select_list,ignore_list,error_code', [ + (['E111', 'E121'], [], 'E122'), + (['E11', 'E12'], [], 'E132'), + (['E2', 'E12'], [], 'E321'), + (['E2', 'E12'], [], 'E410'), + (['E111', 'E121'], ['E2'], 'E122'), + (['E11', 'E12'], ['E13'], 'E132'), + (['E1', 'E3'], ['E32'], 'E321'), + (['E4'], ['E2', 'E12', 'E41'], 'E410'), + (['E111', 'E121'], [], 'E112'), +]) +def test_handle_error_does_not_notify_listeners(select_list, ignore_list, + error_code): + """Verify that error codes notify the listener trie appropriately.""" + listener_trie = mock.create_autospec(notifier.Notifier, instance=True) + formatter = mock.create_autospec(base.BaseFormatter, instance=True) + guide = style_guide.StyleGuide(create_options(select=select_list, + ignore=ignore_list), + listener_trie=listener_trie, + formatter=formatter) + + with mock.patch('linecache.getline', return_value=''): + guide.handle_error(error_code, 'stdin', 1, 1, 'error found') + assert listener_trie.notify.called is False + assert formatter.handle.called is False diff --git a/tests/unit/test_trie.py b/tests/unit/test_trie.py new file mode 100644 index 0000000..152b5b6 --- /dev/null +++ b/tests/unit/test_trie.py @@ -0,0 +1,122 @@ +"""Unit test for the _trie module.""" +from flake8.plugins import _trie as trie + + +class TestTrie(object): + """Collection of tests for the Trie class.""" + + def test_traverse_without_data(self): + """Verify the behaviour when traversing an empty Trie.""" + tree = trie.Trie() + assert list(tree.traverse()) == [] + + def test_traverse_with_data(self): + """Verify that traversal of our Trie is depth-first and pre-order.""" + tree = trie.Trie() + tree.add('A', 'A') + tree.add('a', 'a') + tree.add('AB', 'B') + tree.add('Ab', 'b') + tree.add('AbC', 'C') + tree.add('Abc', 'c') + # The trie tree here should look something like + # + # + # / \ + # A a + # / | + # B b + # / \ + # C c + # + # And the traversal should look like: + # + # A B b C c a + expected_order = ['A', 'B', 'b', 'C', 'c', 'a'] + for expected, actual_node in zip(expected_order, tree.traverse()): + assert actual_node.prefix == expected + + def test_find(self): + """Exercise the Trie.find method.""" + tree = trie.Trie() + tree.add('A', 'A') + tree.add('a', 'a') + tree.add('AB', 'AB') + tree.add('Ab', 'Ab') + tree.add('AbC', 'AbC') + tree.add('Abc', 'Abc') + + assert tree.find('AB').data == ['AB'] + assert tree.find('AbC').data == ['AbC'] + assert tree.find('A').data == ['A'] + assert tree.find('X') is None + + +class TestTrieNode(object): + """Collection of tests for the TrieNode class.""" + + def test_add_child(self): + """Verify we add children appropriately.""" + node = trie.TrieNode('E', 'E is for Eat') + assert node.find_prefix('a') is None + added = node.add_child('a', 'a is for Apple') + assert node.find_prefix('a') is added + + def test_add_child_overrides_previous_child(self): + """Verify adding a child will replace the previous child.""" + node = trie.TrieNode('E', 'E is for Eat', children={ + 'a': trie.TrieNode('a', 'a is for Apple') + }) + previous = node.find_prefix('a') + assert previous is not None + + added = node.add_child('a', 'a is for Ascertain') + assert node.find_prefix('a') is added + + def test_find_prefix(self): + """Verify we can find a child with the specified prefix.""" + node = trie.TrieNode('E', 'E is for Eat', children={ + 'a': trie.TrieNode('a', 'a is for Apple') + }) + child = node.find_prefix('a') + assert child is not None + assert child.prefix == 'a' + assert child.data == 'a is for Apple' + + def test_find_prefix_returns_None_when_no_children_have_the_prefix(self): + """Verify we receive None from find_prefix for missing children.""" + node = trie.TrieNode('E', 'E is for Eat', children={ + 'a': trie.TrieNode('a', 'a is for Apple') + }) + assert node.find_prefix('b') is None + + def test_traverse_does_nothing_when_a_node_has_no_children(self): + """Verify traversing a node with no children does nothing.""" + node = trie.TrieNode('E', 'E is for Eat') + assert list(node.traverse()) == [] + + def test_traverse(self): + """Verify traversal is depth-first and pre-order.""" + root = trie.TrieNode(None, None) + node = root.add_child('A', 'A') + root.add_child('a', 'a') + node.add_child('B', 'B') + node = node.add_child('b', 'b') + node.add_child('C', 'C') + node.add_child('c', 'c') + # The sub-tree here should look something like + # + # + # / \ + # A a + # / | + # B b + # / \ + # C c + # + # And the traversal should look like: + # + # A B b C c a + expected_order = ['A', 'B', 'b', 'C', 'c', 'a'] + for expected, actual_node in zip(expected_order, root.traverse()): + assert actual_node.prefix == expected diff --git a/tests/unit/test_utils.py b/tests/unit/test_utils.py new file mode 100644 index 0000000..d69d939 --- /dev/null +++ b/tests/unit/test_utils.py @@ -0,0 +1,117 @@ +"""Tests for flake8's utils module.""" +import os + +from flake8 import utils +from flake8.plugins import manager as plugin_manager + +import mock + +import pytest + + +RELATIVE_PATHS = ["flake8", "pep8", "pyflakes", "mccabe"] + + +@pytest.mark.parametrize("value,expected", [ + ("E123,\n\tW234,\n E206", ["E123", "W234", "E206"]), + ("E123,W234,E206", ["E123", "W234", "E206"]), + (["E123", "W234", "E206"], ["E123", "W234", "E206"]), + (["E123", "\n\tW234", "\n E206"], ["E123", "W234", "E206"]), +]) +def test_parse_comma_separated_list(value, expected): + """Verify that similar inputs produce identical outputs.""" + assert utils.parse_comma_separated_list(value) == expected + + +@pytest.mark.parametrize("value,expected", [ + ("flake8", "flake8"), + ("../flake8", os.path.abspath("../flake8")), + ("flake8/", os.path.abspath("flake8")), +]) +def test_normalize_path(value, expected): + """Verify that we normalize paths provided to the tool.""" + assert utils.normalize_path(value) == expected + + +@pytest.mark.parametrize("value,expected", [ + ("flake8,pep8,pyflakes,mccabe", ["flake8", "pep8", "pyflakes", "mccabe"]), + ("flake8,\n\tpep8,\n pyflakes,\n\n mccabe", + ["flake8", "pep8", "pyflakes", "mccabe"]), + ("../flake8,../pep8,../pyflakes,../mccabe", + [os.path.abspath("../" + p) for p in RELATIVE_PATHS]), +]) +def test_normalize_paths(value, expected): + """Verify we normalize comma-separated paths provided to the tool.""" + assert utils.normalize_paths(value) == expected + + +def test_is_windows_checks_for_nt(): + """Verify that we correctly detect Windows.""" + with mock.patch.object(os, 'name', 'nt'): + assert utils.is_windows() is True + + with mock.patch.object(os, 'name', 'posix'): + assert utils.is_windows() is False + + +@pytest.mark.parametrize('filename,patterns,expected', [ + ('foo.py', [], True), + ('foo.py', ['*.pyc'], False), + ('foo.pyc', ['*.pyc'], True), + ('foo.pyc', ['*.swp', '*.pyc', '*.py'], True), +]) +def test_fnmatch(filename, patterns, expected): + """Verify that our fnmatch wrapper works as expected.""" + assert utils.fnmatch(filename, patterns) is expected + + +def test_fnmatch_returns_the_default_with_empty_default(): + """The default parameter should be returned when no patterns are given.""" + sentinel = object() + assert utils.fnmatch('file.py', [], default=sentinel) is sentinel + + +def test_filenames_from_a_directory(): + """Verify that filenames_from walks a directory.""" + filenames = list(utils.filenames_from('flake8/')) + assert len(filenames) > 2 + assert 'flake8/__init__.py' in filenames + + +def test_filenames_from_a_directory_with_a_predicate(): + """Verify that predicates filter filenames_from.""" + filenames = list(utils.filenames_from( + arg='flake8/', + predicate=lambda filename: filename == 'flake8/__init__.py', + )) + assert len(filenames) > 2 + assert 'flake8/__init__.py' not in filenames + + +def test_filenames_from_a_single_file(): + """Verify that we simply yield that filename.""" + filenames = list(utils.filenames_from('flake8/__init__.py')) + + assert len(filenames) == 1 + assert ['flake8/__init__.py'] == filenames + + +def test_parameters_for_class_plugin(): + """Verify that we can retrieve the parameters for a class plugin.""" + class FakeCheck(object): + def __init__(self, tree): + pass + + plugin = plugin_manager.Plugin('plugin-name', object()) + plugin._plugin = FakeCheck + assert utils.parameters_for(plugin) == ['tree'] + + +def test_parameters_for_function_plugin(): + """Verify that we retrieve the parameters for a function plugin.""" + def fake_plugin(physical_line, self, tree): + pass + + plugin = plugin_manager.Plugin('plugin-name', object()) + plugin._plugin = fake_plugin + assert utils.parameters_for(plugin) == ['physical_line', 'self', 'tree'] diff --git a/tox.ini b/tox.ini new file mode 100644 index 0000000..946d532 --- /dev/null +++ b/tox.ini @@ -0,0 +1,110 @@ +[tox] +minversion=2.3.1 +envlist = py26,py27,py32,py33,py34,py35,flake8 + +[testenv] +deps = + mock + pytest +commands = + py.test {posargs} + +[testenv:venv] +deps = + . +commands = {posargs} + +# Linters +[testenv:flake8] +skipsdist = true +skip_install = true +use_develop = false +deps = + flake8 + flake8-docstrings + flake8-import-order +commands = + flake8 + +[testenv:pylint] +basepython = python3 +skipsdist = true +skip_install = true +use_develop = false +deps = + pyflakes + pylint +commands = + pylint flake8 + +[testenv:doc8] +basepython = python3 +skipsdist = true +skip_install = true +use_develop = false +deps = + sphinx + doc8 +commands = + doc8 docs/source/ + +[testenv:mypy] +basepython = python3 +skipsdist = true +skip_install = true +use_develop = false +deps = + mypy-lang +commands = + mypy flake8 + +[testenv:linters] +basepython = python3 +skipsdist = true +skip_install = true +use_develop = false +deps = + {[testenv:flake8]deps} + {[testenv:pylint]deps} + {[testenv:doc8]deps} +commands = + {[testenv:flake8]commands} + {[testenv:pylint]commands} + {[testenv:doc8]commands} + +# Documentation +[testenv:docs] +deps = + sphinx>=1.3.0 +commands = + sphinx-build -E -W -c docs/source/ -b html docs/source/ docs/build/html + +[testenv:serve-docs] +basepython = python3.4 +skipsdist = true +skip_install = true +use_develop = false +changedir = docs/build/html +deps = +commands = + python -m http.server {posargs} + +[testenv:readme] +deps = + readme_renderer +commands = + python setup.py check -r -s + +# Flake8 Configuration +[flake8] +# Ignore some flake8-docstrings errors +# NOTE(sigmavirus24): While we're still using flake8 2.x, this ignore line +# defaults to selecting all other errors so we do not need select=E,F,W,I,D +# Once Flake8 3.0 is released and in a good state, we can use both and it will +# work well \o/ +ignore = D203 +# NOTE(sigmavirus24): Once we release 3.0.0 this exclude option can be specified +# across multiple lines. Presently it cannot be specified across multiple lines. +# :-( +exclude = .git,__pycache__,docs/source/conf.py +max-complexity = 10