[pre-commit.ci] auto fixes from pre-commit.com hooks

for more information, see https://pre-commit.ci
This commit is contained in:
pre-commit-ci[bot] 2024-04-13 00:00:18 +00:00
parent 72ad6dc953
commit f4cd1ba0d6
813 changed files with 66015 additions and 58839 deletions

View file

@ -1,8 +1,6 @@
# Licensed under the Apache License: http://www.apache.org/licenses/LICENSE-2.0
# For details: https://github.com/nedbat/coveragepy/blob/master/NOTICE.txt
"""Code parsing for coverage.py."""
from __future__ import annotations
import ast
@ -12,21 +10,30 @@ import re
import sys
import token
import tokenize
from dataclasses import dataclass
from types import CodeType
from typing import (
cast, Any, Callable, Dict, Iterable, List, Optional, Protocol, Sequence,
Set, Tuple,
)
from typing import Any
from typing import Callable
from typing import cast
from typing import Dict
from typing import Iterable
from typing import List
from typing import Optional
from typing import Protocol
from typing import Sequence
from typing import Set
from typing import Tuple
from coverage import env
from coverage.bytecode import code_objects
from coverage.debug import short_stack
from coverage.exceptions import NoSource, NotPython
from coverage.misc import join_regex, nice_pair
from coverage.exceptions import NoSource
from coverage.exceptions import NotPython
from coverage.misc import join_regex
from coverage.misc import nice_pair
from coverage.phystokens import generate_tokens
from coverage.types import TArc, TLineNo
from coverage.types import TArc
from coverage.types import TLineNo
class PythonParser:
@ -36,6 +43,7 @@ class PythonParser:
involved.
"""
def __init__(
self,
text: str | None = None,
@ -48,8 +56,8 @@ class PythonParser:
`exclude`, a regex string.
"""
assert text or filename, "PythonParser needs either text or filename"
self.filename = filename or "<code>"
assert text or filename, 'PythonParser needs either text or filename'
self.filename = filename or '<code>'
if text is not None:
self.text: str = text
else:
@ -62,7 +70,7 @@ class PythonParser:
self.exclude = exclude
# The text lines of the parsed code.
self.lines: list[str] = self.text.split("\n")
self.lines: list[str] = self.text.split('\n')
# The normalized line numbers of the statements in the code. Exclusions
# are taken into account, and statements are adjusted to their first
@ -152,25 +160,27 @@ class PythonParser:
tokgen = generate_tokens(self.text)
for toktype, ttext, (slineno, _), (elineno, _), ltext in tokgen:
if self.show_tokens: # pragma: debugging
print("%10s %5s %-20r %r" % (
tokenize.tok_name.get(toktype, toktype),
nice_pair((slineno, elineno)), ttext, ltext,
))
print(
'%10s %5s %-20r %r' % (
tokenize.tok_name.get(toktype, toktype),
nice_pair((slineno, elineno)), ttext, ltext,
),
)
if toktype == token.INDENT:
indent += 1
elif toktype == token.DEDENT:
indent -= 1
elif toktype == token.NAME:
if ttext == "class":
if ttext == 'class':
# Class definitions look like branches in the bytecode, so
# we need to exclude them. The simplest way is to note the
# lines with the "class" keyword.
self.raw_classdefs.add(slineno)
elif toktype == token.OP:
if ttext == ":" and nesting == 0:
if ttext == ':' and nesting == 0:
should_exclude = (
self.raw_excluded.intersection(range(first_line, elineno + 1))
or excluding_decorators
self.raw_excluded.intersection(range(first_line, elineno + 1)) or
excluding_decorators
)
if not excluding and should_exclude:
# Start excluding a suite. We trigger off of the colon
@ -180,28 +190,28 @@ class PythonParser:
exclude_indent = indent
excluding = True
excluding_decorators = False
elif ttext == "@" and first_on_line:
elif ttext == '@' and first_on_line:
# A decorator.
if elineno in self.raw_excluded:
excluding_decorators = True
if excluding_decorators:
self.raw_excluded.add(elineno)
elif ttext in "([{":
elif ttext in '([{':
nesting += 1
elif ttext in ")]}":
elif ttext in ')]}':
nesting -= 1
elif toktype == token.STRING:
if prev_toktype == token.INDENT:
# Strings that are first on an indented line are docstrings.
# (a trick from trace.py in the stdlib.) This works for
# 99.9999% of cases.
self.raw_docstrings.update(range(slineno, elineno+1))
self.raw_docstrings.update(range(slineno, elineno + 1))
elif toktype == token.NEWLINE:
if first_line and elineno != first_line:
# We're at the end of a line, and we've ended on a
# different line than the first line of the statement,
# so record a multi-line range.
for l in range(first_line, elineno+1):
for l in range(first_line, elineno + 1):
self._multiline[l] = first_line
first_line = 0
first_on_line = True
@ -267,13 +277,13 @@ class PythonParser:
try:
self._raw_parse()
except (tokenize.TokenError, IndentationError, SyntaxError) as err:
if hasattr(err, "lineno"):
if hasattr(err, 'lineno'):
lineno = err.lineno # IndentationError
else:
lineno = err.args[1][0] # TokenError
raise NotPython(
f"Couldn't parse '{self.filename}' as Python source: " +
f"{err.args[0]!r} at line {lineno}",
f'{err.args[0]!r} at line {lineno}',
) from err
self.excluded = self.first_lines(self.raw_excluded)
@ -376,13 +386,13 @@ class PythonParser:
emsg = "didn't jump to line {lineno}"
emsg = emsg.format(lineno=end)
msg = f"line {actual_start} {emsg}"
msg = f'line {actual_start} {emsg}'
if smsg is not None:
msg += f", because {smsg.format(lineno=actual_start)}"
msg += f', because {smsg.format(lineno=actual_start)}'
msgs.append(msg)
return " or ".join(msgs)
return ' or '.join(msgs)
class ByteParser:
@ -400,7 +410,7 @@ class ByteParser:
else:
assert filename is not None
try:
self.code = compile(text, filename, "exec", dont_inherit=True)
self.code = compile(text, filename, 'exec', dont_inherit=True)
except SyntaxError as synerr:
raise NotPython(
"Couldn't parse '%s' as Python source: '%s' at line %d" % (
@ -422,7 +432,7 @@ class ByteParser:
Uses co_lnotab described in Python/compile.c to find the
line numbers. Produces a sequence: l0, l1, ...
"""
if hasattr(self.code, "co_lines"):
if hasattr(self.code, 'co_lines'):
# PYVERSIONS: new in 3.10
for _, _, line in self.code.co_lines():
if line:
@ -477,11 +487,12 @@ class ArcStart:
"""
lineno: TLineNo
cause: str = ""
cause: str = ''
class TAddArcFn(Protocol):
"""The type for AstArcAnalyzer.add_arc()."""
def __call__(
self,
start: TLineNo,
@ -491,8 +502,10 @@ class TAddArcFn(Protocol):
) -> None:
...
TArcFragments = Dict[TArc, List[Tuple[Optional[str], Optional[str]]]]
class Block:
"""
Blocks need to handle various exiting statements in their own ways.
@ -503,6 +516,7 @@ class Block:
stack.
"""
# pylint: disable=unused-argument
def process_break_exits(self, exits: set[ArcStart], add_arc: TAddArcFn) -> bool:
"""Process break exits."""
# Because break can only appear in loops, and most subclasses
@ -526,6 +540,7 @@ class Block:
class LoopBlock(Block):
"""A block on the block stack representing a `for` or `while` loop."""
def __init__(self, start: TLineNo) -> None:
# The line number where the loop starts.
self.start = start
@ -544,6 +559,7 @@ class LoopBlock(Block):
class FunctionBlock(Block):
"""A block on the block stack representing a function definition."""
def __init__(self, start: TLineNo, name: str) -> None:
# The line number where the function starts.
self.start = start
@ -569,6 +585,7 @@ class FunctionBlock(Block):
class TryBlock(Block):
"""A block on the block stack representing a `try` block."""
def __init__(self, handler_start: TLineNo | None, final_start: TLineNo | None) -> None:
# The line number of the first "except" handler, if any.
self.handler_start = handler_start
@ -612,6 +629,7 @@ class TryBlock(Block):
class WithBlock(Block):
"""A block on the block stack representing a `with` block."""
def __init__(self, start: TLineNo) -> None:
# We only ever use this block if it is needed, so that we don't have to
# check this setting in all the methods.
@ -659,6 +677,7 @@ class NodeList(ast.AST):
unconditional execution of one of the clauses.
"""
def __init__(self, body: Sequence[ast.AST]) -> None:
self.body = body
self.lineno = body[0].lineno
@ -667,8 +686,10 @@ class NodeList(ast.AST):
# TODO: the cause messages have too many commas.
# TODO: Shouldn't the cause messages join with "and" instead of "or"?
def _make_expression_code_method(noun: str) -> Callable[[AstArcAnalyzer, ast.AST], None]:
"""A function to make methods for expression-based callable _code_object__ methods."""
def _code_object__expression_callable(self: AstArcAnalyzer, node: ast.AST) -> None:
start = self.line_for_node(node)
self.add_arc(-start, start, None, f"didn't run the {noun} on line {start}")
@ -692,15 +713,15 @@ class AstArcAnalyzer:
# Turn on AST dumps with an environment variable.
# $set_env.py: COVERAGE_AST_DUMP - Dump the AST nodes when parsing code.
dump_ast = bool(int(os.getenv("COVERAGE_AST_DUMP", "0")))
dump_ast = bool(int(os.getenv('COVERAGE_AST_DUMP', '0')))
if dump_ast: # pragma: debugging
# Dump the AST so that failing tests have helpful output.
print(f"Statements: {self.statements}")
print(f"Multiline map: {self.multiline}")
print(f'Statements: {self.statements}')
print(f'Multiline map: {self.multiline}')
dumpkw: dict[str, Any] = {}
if sys.version_info >= (3, 9):
dumpkw["indent"] = 4
dumpkw['indent'] = 4
print(ast.dump(self.root_node, include_attributes=True, **dumpkw))
self.arcs: set[TArc] = set()
@ -714,7 +735,7 @@ class AstArcAnalyzer:
self.block_stack: list[Block] = []
# $set_env.py: COVERAGE_TRACK_ARCS - Trace possible arcs added while parsing code.
self.debug = bool(int(os.getenv("COVERAGE_TRACK_ARCS", "0")))
self.debug = bool(int(os.getenv('COVERAGE_TRACK_ARCS', '0')))
def analyze(self) -> None:
"""Examine the AST tree from `root_node` to determine possible arcs.
@ -725,7 +746,7 @@ class AstArcAnalyzer:
"""
for node in ast.walk(self.root_node):
node_name = node.__class__.__name__
code_object_handler = getattr(self, "_code_object__" + node_name, None)
code_object_handler = getattr(self, '_code_object__' + node_name, None)
if code_object_handler is not None:
code_object_handler(node)
@ -738,7 +759,7 @@ class AstArcAnalyzer:
) -> None:
"""Add an arc, including message fragments to use if it is missing."""
if self.debug: # pragma: debugging
print(f"\nAdding possible arc: ({start}, {end}): {smsg!r}, {emsg!r}")
print(f'\nAdding possible arc: ({start}, {end}): {smsg!r}, {emsg!r}')
print(short_stack())
self.arcs.add((start, end))
@ -758,7 +779,7 @@ class AstArcAnalyzer:
node_name = node.__class__.__name__
handler = cast(
Optional[Callable[[ast.AST], TLineNo]],
getattr(self, "_line__" + node_name, None),
getattr(self, '_line__' + node_name, None),
)
if handler is not None:
return handler(node)
@ -809,8 +830,8 @@ class AstArcAnalyzer:
# The node types that just flow to the next node with no complications.
OK_TO_DEFAULT = {
"AnnAssign", "Assign", "Assert", "AugAssign", "Delete", "Expr", "Global",
"Import", "ImportFrom", "Nonlocal", "Pass",
'AnnAssign', 'Assign', 'Assert', 'AugAssign', 'Delete', 'Expr', 'Global',
'Import', 'ImportFrom', 'Nonlocal', 'Pass',
}
def add_arcs(self, node: ast.AST) -> set[ArcStart]:
@ -832,7 +853,7 @@ class AstArcAnalyzer:
node_name = node.__class__.__name__
handler = cast(
Optional[Callable[[ast.AST], Set[ArcStart]]],
getattr(self, "_handle__" + node_name, None),
getattr(self, '_handle__' + node_name, None),
)
if handler is not None:
return handler(node)
@ -841,7 +862,7 @@ class AstArcAnalyzer:
# statement), or it's something we overlooked.
if env.TESTING:
if node_name not in self.OK_TO_DEFAULT:
raise RuntimeError(f"*** Unhandled: {node}") # pragma: only failure
raise RuntimeError(f'*** Unhandled: {node}') # pragma: only failure
# Default for simple statements: one exit from this node.
return {ArcStart(self.line_for_node(node))}
@ -898,7 +919,7 @@ class AstArcAnalyzer:
missing_fn = cast(
Optional[Callable[[ast.AST], Optional[ast.AST]]],
getattr(self, "_missing__" + node.__class__.__name__, None),
getattr(self, '_missing__' + node.__class__.__name__, None),
)
if missing_fn is not None:
ret_node = missing_fn(node)
@ -949,8 +970,8 @@ class AstArcAnalyzer:
new_while.lineno = body_nodes.lineno
new_while.test = ast.Name()
new_while.test.lineno = body_nodes.lineno
new_while.test.id = "True"
assert hasattr(body_nodes, "body")
new_while.test.id = 'True'
assert hasattr(body_nodes, 'body')
new_while.body = body_nodes.body
new_while.orelse = []
return new_while
@ -958,11 +979,11 @@ class AstArcAnalyzer:
def is_constant_expr(self, node: ast.AST) -> str | None:
"""Is this a compile-time constant?"""
node_name = node.__class__.__name__
if node_name in ["Constant", "NameConstant", "Num"]:
return "Num"
if node_name in ['Constant', 'NameConstant', 'Num']:
return 'Num'
elif isinstance(node, ast.Name):
if node.id in ["True", "False", "None", "__debug__"]:
return "Name"
if node.id in ['True', 'False', 'None', '__debug__']:
return 'Name'
return None
# In the fullness of time, these might be good tests to write:
@ -1063,7 +1084,7 @@ class AstArcAnalyzer:
def _handle__For(self, node: ast.For) -> set[ArcStart]:
start = self.line_for_node(node.iter)
self.block_stack.append(LoopBlock(start=start))
from_start = ArcStart(start, cause="the loop on line {lineno} never started")
from_start = ArcStart(start, cause='the loop on line {lineno} never started')
exits = self.add_body_arcs(node.body, from_start=from_start)
# Any exit from the body will go back to the top of the loop.
for xit in exits:
@ -1087,9 +1108,9 @@ class AstArcAnalyzer:
def _handle__If(self, node: ast.If) -> set[ArcStart]:
start = self.line_for_node(node.test)
from_start = ArcStart(start, cause="the condition on line {lineno} was never true")
from_start = ArcStart(start, cause='the condition on line {lineno} was never true')
exits = self.add_body_arcs(node.body, from_start=from_start)
from_start = ArcStart(start, cause="the condition on line {lineno} was never false")
from_start = ArcStart(start, cause='the condition on line {lineno} was never false')
exits |= self.add_body_arcs(node.orelse, from_start=from_start)
return exits
@ -1106,16 +1127,16 @@ class AstArcAnalyzer:
pattern = pattern.patterns[-1]
if isinstance(pattern, ast.MatchAs):
had_wildcard = True
self.add_arc(last_start, case_start, "the pattern on line {lineno} always matched")
self.add_arc(last_start, case_start, 'the pattern on line {lineno} always matched')
from_start = ArcStart(
case_start,
cause="the pattern on line {lineno} never matched",
cause='the pattern on line {lineno} never matched',
)
exits |= self.add_body_arcs(case.body, from_start=from_start)
last_start = case_start
if not had_wildcard:
exits.add(
ArcStart(case_start, cause="the pattern on line {lineno} always matched"),
ArcStart(case_start, cause='the pattern on line {lineno} always matched'),
)
return exits
@ -1260,7 +1281,7 @@ class AstArcAnalyzer:
for start in sorted(starts):
if start.cause:
causes.append(start.cause.format(lineno=start.lineno))
cause = " or ".join(causes)
cause = ' or '.join(causes)
exits = {ArcStart(xit.lineno, cause) for xit in exits}
return exits
@ -1275,7 +1296,7 @@ class AstArcAnalyzer:
if top_is_body0:
to_top = self.line_for_node(node.body[0])
self.block_stack.append(LoopBlock(start=to_top))
from_start = ArcStart(start, cause="the condition on line {lineno} was never true")
from_start = ArcStart(start, cause='the condition on line {lineno} was never true')
exits = self.add_body_arcs(node.body, from_start=from_start)
for xit in exits:
self.add_arc(xit.lineno, to_top, xit.cause)
@ -1283,7 +1304,7 @@ class AstArcAnalyzer:
my_block = self.block_stack.pop()
assert isinstance(my_block, LoopBlock)
exits.update(my_block.break_exits)
from_start = ArcStart(start, cause="the condition on line {lineno} was never false")
from_start = ArcStart(start, cause='the condition on line {lineno} was never false')
if node.orelse:
else_exits = self.add_body_arcs(node.orelse, from_start=from_start)
exits |= else_exits
@ -1357,9 +1378,9 @@ class AstArcAnalyzer:
f"didn't exit the body of class {node.name!r}",
)
_code_object__Lambda = _make_expression_code_method("lambda")
_code_object__GeneratorExp = _make_expression_code_method("generator expression")
_code_object__Lambda = _make_expression_code_method('lambda')
_code_object__GeneratorExp = _make_expression_code_method('generator expression')
if env.PYBEHAVIOR.comprehensions_are_functions:
_code_object__DictComp = _make_expression_code_method("dictionary comprehension")
_code_object__SetComp = _make_expression_code_method("set comprehension")
_code_object__ListComp = _make_expression_code_method("list comprehension")
_code_object__DictComp = _make_expression_code_method('dictionary comprehension')
_code_object__SetComp = _make_expression_code_method('set comprehension')
_code_object__ListComp = _make_expression_code_method('list comprehension')