[pre-commit.ci] auto fixes from pre-commit.com hooks

for more information, see https://pre-commit.ci
This commit is contained in:
pre-commit-ci[bot] 2024-04-13 00:00:18 +00:00
parent 72ad6dc953
commit f4cd1ba0d6
813 changed files with 66015 additions and 58839 deletions

View file

@ -1,3 +1,5 @@
from __future__ import annotations
import email.feedparser
import email.header
import email.message
@ -5,22 +7,23 @@ import email.parser
import email.policy
import sys
import typing
from typing import (
Any,
Callable,
Dict,
Generic,
List,
Optional,
Tuple,
Type,
Union,
cast,
)
from typing import Any
from typing import Callable
from typing import cast
from typing import Dict
from typing import Generic
from typing import List
from typing import Optional
from typing import Tuple
from typing import Type
from typing import Union
from . import requirements, specifiers, utils, version as version_module
from . import requirements
from . import specifiers
from . import utils
from . import version as version_module
T = typing.TypeVar("T")
T = typing.TypeVar('T')
if sys.version_info[:2] >= (3, 8): # pragma: no cover
from typing import Literal, TypedDict
else: # pragma: no cover
@ -52,14 +55,14 @@ except NameError: # pragma: no cover
"""
message: str
exceptions: List[Exception]
exceptions: list[Exception]
def __init__(self, message: str, exceptions: List[Exception]) -> None:
def __init__(self, message: str, exceptions: list[Exception]) -> None:
self.message = message
self.exceptions = exceptions
def __repr__(self) -> str:
return f"{self.__class__.__name__}({self.message!r}, {self.exceptions!r})"
return f'{self.__class__.__name__}({self.message!r}, {self.exceptions!r})'
else: # pragma: no cover
ExceptionGroup = ExceptionGroup
@ -100,32 +103,32 @@ class RawMetadata(TypedDict, total=False):
metadata_version: str
name: str
version: str
platforms: List[str]
platforms: list[str]
summary: str
description: str
keywords: List[str]
keywords: list[str]
home_page: str
author: str
author_email: str
license: str
# Metadata 1.1 - PEP 314
supported_platforms: List[str]
supported_platforms: list[str]
download_url: str
classifiers: List[str]
requires: List[str]
provides: List[str]
obsoletes: List[str]
classifiers: list[str]
requires: list[str]
provides: list[str]
obsoletes: list[str]
# Metadata 1.2 - PEP 345
maintainer: str
maintainer_email: str
requires_dist: List[str]
provides_dist: List[str]
obsoletes_dist: List[str]
requires_dist: list[str]
provides_dist: list[str]
obsoletes_dist: list[str]
requires_python: str
requires_external: List[str]
project_urls: Dict[str, str]
requires_external: list[str]
project_urls: dict[str, str]
# Metadata 2.0
# PEP 426 attempted to completely revamp the metadata format
@ -138,10 +141,10 @@ class RawMetadata(TypedDict, total=False):
# Metadata 2.1 - PEP 566
description_content_type: str
provides_extra: List[str]
provides_extra: list[str]
# Metadata 2.2 - PEP 643
dynamic: List[str]
dynamic: list[str]
# Metadata 2.3 - PEP 685
# No new fields were added in PEP 685, just some edge case were
@ -149,48 +152,48 @@ class RawMetadata(TypedDict, total=False):
_STRING_FIELDS = {
"author",
"author_email",
"description",
"description_content_type",
"download_url",
"home_page",
"license",
"maintainer",
"maintainer_email",
"metadata_version",
"name",
"requires_python",
"summary",
"version",
'author',
'author_email',
'description',
'description_content_type',
'download_url',
'home_page',
'license',
'maintainer',
'maintainer_email',
'metadata_version',
'name',
'requires_python',
'summary',
'version',
}
_LIST_FIELDS = {
"classifiers",
"dynamic",
"obsoletes",
"obsoletes_dist",
"platforms",
"provides",
"provides_dist",
"provides_extra",
"requires",
"requires_dist",
"requires_external",
"supported_platforms",
'classifiers',
'dynamic',
'obsoletes',
'obsoletes_dist',
'platforms',
'provides',
'provides_dist',
'provides_extra',
'requires',
'requires_dist',
'requires_external',
'supported_platforms',
}
_DICT_FIELDS = {
"project_urls",
'project_urls',
}
def _parse_keywords(data: str) -> List[str]:
def _parse_keywords(data: str) -> list[str]:
"""Split a string of comma-separate keyboards into a list of keywords."""
return [k.strip() for k in data.split(",")]
return [k.strip() for k in data.split(',')]
def _parse_project_urls(data: List[str]) -> Dict[str, str]:
def _parse_project_urls(data: list[str]) -> dict[str, str]:
"""Parse a list of label/URL string pairings separated by a comma."""
urls = {}
for pair in data:
@ -211,8 +214,8 @@ def _parse_project_urls(data: List[str]) -> Dict[str, str]:
# answer with what to do in that case. As such, we'll do the only
# thing we can, which is treat the field as unparseable and add it
# to our list of unparsed fields.
parts = [p.strip() for p in pair.split(",", 1)]
parts.extend([""] * (max(0, 2 - len(parts)))) # Ensure 2 items
parts = [p.strip() for p in pair.split(',', 1)]
parts.extend([''] * (max(0, 2 - len(parts)))) # Ensure 2 items
# TODO: The spec doesn't say anything about if the keys should be
# considered case sensitive or not... logically they should
@ -224,13 +227,13 @@ def _parse_project_urls(data: List[str]) -> Dict[str, str]:
# The label already exists in our set of urls, so this field
# is unparseable, and we can just add the whole thing to our
# unparseable data and stop processing it.
raise KeyError("duplicate labels in project urls")
raise KeyError('duplicate labels in project urls')
urls[label] = url
return urls
def _get_payload(msg: email.message.Message, source: Union[bytes, str]) -> str:
def _get_payload(msg: email.message.Message, source: bytes | str) -> str:
"""Get the body of the message."""
# If our source is a str, then our caller has managed encodings for us,
# and we don't need to deal with it.
@ -242,9 +245,9 @@ def _get_payload(msg: email.message.Message, source: Union[bytes, str]) -> str:
else:
bpayload: bytes = msg.get_payload(decode=True)
try:
return bpayload.decode("utf8", "strict")
return bpayload.decode('utf8', 'strict')
except UnicodeDecodeError:
raise ValueError("payload in an invalid encoding")
raise ValueError('payload in an invalid encoding')
# The various parse_FORMAT functions here are intended to be as lenient as
@ -260,39 +263,39 @@ def _get_payload(msg: email.message.Message, source: Union[bytes, str]) -> str:
# Map METADATA fields to RawMetadata.
_EMAIL_TO_RAW_MAPPING = {
"author": "author",
"author-email": "author_email",
"classifier": "classifiers",
"description": "description",
"description-content-type": "description_content_type",
"download-url": "download_url",
"dynamic": "dynamic",
"home-page": "home_page",
"keywords": "keywords",
"license": "license",
"maintainer": "maintainer",
"maintainer-email": "maintainer_email",
"metadata-version": "metadata_version",
"name": "name",
"obsoletes": "obsoletes",
"obsoletes-dist": "obsoletes_dist",
"platform": "platforms",
"project-url": "project_urls",
"provides": "provides",
"provides-dist": "provides_dist",
"provides-extra": "provides_extra",
"requires": "requires",
"requires-dist": "requires_dist",
"requires-external": "requires_external",
"requires-python": "requires_python",
"summary": "summary",
"supported-platform": "supported_platforms",
"version": "version",
'author': 'author',
'author-email': 'author_email',
'classifier': 'classifiers',
'description': 'description',
'description-content-type': 'description_content_type',
'download-url': 'download_url',
'dynamic': 'dynamic',
'home-page': 'home_page',
'keywords': 'keywords',
'license': 'license',
'maintainer': 'maintainer',
'maintainer-email': 'maintainer_email',
'metadata-version': 'metadata_version',
'name': 'name',
'obsoletes': 'obsoletes',
'obsoletes-dist': 'obsoletes_dist',
'platform': 'platforms',
'project-url': 'project_urls',
'provides': 'provides',
'provides-dist': 'provides_dist',
'provides-extra': 'provides_extra',
'requires': 'requires',
'requires-dist': 'requires_dist',
'requires-external': 'requires_external',
'requires-python': 'requires_python',
'summary': 'summary',
'supported-platform': 'supported_platforms',
'version': 'version',
}
_RAW_TO_EMAIL_MAPPING = {raw: email for email, raw in _EMAIL_TO_RAW_MAPPING.items()}
def parse_email(data: Union[bytes, str]) -> Tuple[RawMetadata, Dict[str, List[str]]]:
def parse_email(data: bytes | str) -> tuple[RawMetadata, dict[str, list[str]]]:
"""Parse a distribution's metadata stored as email headers (e.g. from ``METADATA``).
This function returns a two-item tuple of dicts. The first dict is of
@ -308,8 +311,8 @@ def parse_email(data: Union[bytes, str]) -> Tuple[RawMetadata, Dict[str, List[st
included in this dict.
"""
raw: Dict[str, Union[str, List[str], Dict[str, str]]] = {}
unparsed: Dict[str, List[str]] = {}
raw: dict[str, str | list[str] | dict[str, str]] = {}
unparsed: dict[str, list[str]] = {}
if isinstance(data, str):
parsed = email.parser.Parser(policy=email.policy.compat32).parsestr(data)
@ -357,16 +360,16 @@ def parse_email(data: Union[bytes, str]) -> Tuple[RawMetadata, Dict[str, List[st
# The Header object stores it's data as chunks, and each chunk
# can be independently encoded, so we'll need to check each
# of them.
chunks: List[Tuple[bytes, Optional[str]]] = []
chunks: list[tuple[bytes, str | None]] = []
for bin, encoding in email.header.decode_header(h):
try:
bin.decode("utf8", "strict")
bin.decode('utf8', 'strict')
except UnicodeDecodeError:
# Enable mojibake.
encoding = "latin1"
encoding = 'latin1'
valid_encoding = False
else:
encoding = "utf8"
encoding = 'utf8'
chunks.append((bin, encoding))
# Turn our chunks back into a Header object, then let that
@ -416,7 +419,7 @@ def parse_email(data: Union[bytes, str]) -> Tuple[RawMetadata, Dict[str, List[st
# but it conceptually is a list of strings, and is serialized using
# ", ".join(keywords), so we'll do some light data massaging to turn
# this into what it logically is.
elif raw_name == "keywords" and len(value) == 1:
elif raw_name == 'keywords' and len(value) == 1:
raw[raw_name] = _parse_keywords(value[0])
# Special Case: Project-URL
# The project urls is implemented in the metadata spec as a list of
@ -427,7 +430,7 @@ def parse_email(data: Union[bytes, str]) -> Tuple[RawMetadata, Dict[str, List[st
#
# We will do a little light data massaging to turn this into a map as
# it logically should be.
elif raw_name == "project_urls":
elif raw_name == 'project_urls':
try:
raw[raw_name] = _parse_project_urls(value)
except KeyError:
@ -444,22 +447,22 @@ def parse_email(data: Union[bytes, str]) -> Tuple[RawMetadata, Dict[str, List[st
try:
payload = _get_payload(parsed, data)
except ValueError:
unparsed.setdefault("description", []).append(
parsed.get_payload(decode=isinstance(data, bytes))
unparsed.setdefault('description', []).append(
parsed.get_payload(decode=isinstance(data, bytes)),
)
else:
if payload:
# Check to see if we've already got a description, if so then both
# it, and this body move to unparseable.
if "description" in raw:
description_header = cast(str, raw.pop("description"))
unparsed.setdefault("description", []).extend(
[description_header, payload]
if 'description' in raw:
description_header = cast(str, raw.pop('description'))
unparsed.setdefault('description', []).extend(
[description_header, payload],
)
elif "description" in unparsed:
unparsed["description"].append(payload)
elif 'description' in unparsed:
unparsed['description'].append(payload)
else:
raw["description"] = payload
raw['description'] = payload
# We need to cast our `raw` to a metadata, because a TypedDict only support
# literal key names, but we're computing our key names on purpose, but the
@ -472,10 +475,10 @@ _NOT_FOUND = object()
# Keep the two values in sync.
_VALID_METADATA_VERSIONS = ["1.0", "1.1", "1.2", "2.1", "2.2", "2.3"]
_MetadataVersion = Literal["1.0", "1.1", "1.2", "2.1", "2.2", "2.3"]
_VALID_METADATA_VERSIONS = ['1.0', '1.1', '1.2', '2.1', '2.2', '2.3']
_MetadataVersion = Literal['1.0', '1.1', '1.2', '2.1', '2.2', '2.3']
_REQUIRED_ATTRS = frozenset(["metadata_version", "name", "version"])
_REQUIRED_ATTRS = frozenset(['metadata_version', 'name', 'version'])
class _Validator(Generic[T]):
@ -495,15 +498,15 @@ class _Validator(Generic[T]):
def __init__(
self,
*,
added: _MetadataVersion = "1.0",
added: _MetadataVersion = '1.0',
) -> None:
self.added = added
def __set_name__(self, _owner: "Metadata", name: str) -> None:
def __set_name__(self, _owner: Metadata, name: str) -> None:
self.name = name
self.raw_name = _RAW_TO_EMAIL_MAPPING[name]
def __get__(self, instance: "Metadata", _owner: Type["Metadata"]) -> T:
def __get__(self, instance: Metadata, _owner: type[Metadata]) -> T:
# With Python 3.8, the caching can be replaced with functools.cached_property().
# No need to check the cache as attribute lookup will resolve into the
# instance's __dict__ before __get__ is called.
@ -516,7 +519,7 @@ class _Validator(Generic[T]):
# converters never have to deal with the None union.
if self.name in _REQUIRED_ATTRS or value is not None:
try:
converter: Callable[[Any], T] = getattr(self, f"_process_{self.name}")
converter: Callable[[Any], T] = getattr(self, f'_process_{self.name}')
except AttributeError:
pass
else:
@ -531,10 +534,10 @@ class _Validator(Generic[T]):
return cast(T, value)
def _invalid_metadata(
self, msg: str, cause: Optional[Exception] = None
self, msg: str, cause: Exception | None = None,
) -> InvalidMetadata:
exc = InvalidMetadata(
self.raw_name, msg.format_map({"field": repr(self.raw_name)})
self.raw_name, msg.format_map({'field': repr(self.raw_name)}),
)
exc.__cause__ = cause
return exc
@ -542,91 +545,91 @@ class _Validator(Generic[T]):
def _process_metadata_version(self, value: str) -> _MetadataVersion:
# Implicitly makes Metadata-Version required.
if value not in _VALID_METADATA_VERSIONS:
raise self._invalid_metadata(f"{value!r} is not a valid metadata version")
raise self._invalid_metadata(f'{value!r} is not a valid metadata version')
return cast(_MetadataVersion, value)
def _process_name(self, value: str) -> str:
if not value:
raise self._invalid_metadata("{field} is a required field")
raise self._invalid_metadata('{field} is a required field')
# Validate the name as a side-effect.
try:
utils.canonicalize_name(value, validate=True)
except utils.InvalidName as exc:
raise self._invalid_metadata(
f"{value!r} is invalid for {{field}}", cause=exc
f'{value!r} is invalid for {{field}}', cause=exc,
)
else:
return value
def _process_version(self, value: str) -> version_module.Version:
if not value:
raise self._invalid_metadata("{field} is a required field")
raise self._invalid_metadata('{field} is a required field')
try:
return version_module.parse(value)
except version_module.InvalidVersion as exc:
raise self._invalid_metadata(
f"{value!r} is invalid for {{field}}", cause=exc
f'{value!r} is invalid for {{field}}', cause=exc,
)
def _process_summary(self, value: str) -> str:
"""Check the field contains no newlines."""
if "\n" in value:
raise self._invalid_metadata("{field} must be a single line")
if '\n' in value:
raise self._invalid_metadata('{field} must be a single line')
return value
def _process_description_content_type(self, value: str) -> str:
content_types = {"text/plain", "text/x-rst", "text/markdown"}
content_types = {'text/plain', 'text/x-rst', 'text/markdown'}
message = email.message.EmailMessage()
message["content-type"] = value
message['content-type'] = value
content_type, parameters = (
# Defaults to `text/plain` if parsing failed.
message.get_content_type().lower(),
message["content-type"].params,
message['content-type'].params,
)
# Check if content-type is valid or defaulted to `text/plain` and thus was
# not parseable.
if content_type not in content_types or content_type not in value.lower():
raise self._invalid_metadata(
f"{{field}} must be one of {list(content_types)}, not {value!r}"
f'{{field}} must be one of {list(content_types)}, not {value!r}',
)
charset = parameters.get("charset", "UTF-8")
if charset != "UTF-8":
charset = parameters.get('charset', 'UTF-8')
if charset != 'UTF-8':
raise self._invalid_metadata(
f"{{field}} can only specify the UTF-8 charset, not {list(charset)}"
f'{{field}} can only specify the UTF-8 charset, not {list(charset)}',
)
markdown_variants = {"GFM", "CommonMark"}
variant = parameters.get("variant", "GFM") # Use an acceptable default.
if content_type == "text/markdown" and variant not in markdown_variants:
markdown_variants = {'GFM', 'CommonMark'}
variant = parameters.get('variant', 'GFM') # Use an acceptable default.
if content_type == 'text/markdown' and variant not in markdown_variants:
raise self._invalid_metadata(
f"valid Markdown variants for {{field}} are {list(markdown_variants)}, "
f"not {variant!r}",
f'valid Markdown variants for {{field}} are {list(markdown_variants)}, '
f'not {variant!r}',
)
return value
def _process_dynamic(self, value: List[str]) -> List[str]:
def _process_dynamic(self, value: list[str]) -> list[str]:
for dynamic_field in map(str.lower, value):
if dynamic_field in {"name", "version", "metadata-version"}:
if dynamic_field in {'name', 'version', 'metadata-version'}:
raise self._invalid_metadata(
f"{value!r} is not allowed as a dynamic field"
f'{value!r} is not allowed as a dynamic field',
)
elif dynamic_field not in _EMAIL_TO_RAW_MAPPING:
raise self._invalid_metadata(f"{value!r} is not a valid dynamic field")
raise self._invalid_metadata(f'{value!r} is not a valid dynamic field')
return list(map(str.lower, value))
def _process_provides_extra(
self,
value: List[str],
) -> List[utils.NormalizedName]:
value: list[str],
) -> list[utils.NormalizedName]:
normalized_names = []
try:
for name in value:
normalized_names.append(utils.canonicalize_name(name, validate=True))
except utils.InvalidName as exc:
raise self._invalid_metadata(
f"{name!r} is invalid for {{field}}", cause=exc
f'{name!r} is invalid for {{field}}', cause=exc,
)
else:
return normalized_names
@ -636,19 +639,19 @@ class _Validator(Generic[T]):
return specifiers.SpecifierSet(value)
except specifiers.InvalidSpecifier as exc:
raise self._invalid_metadata(
f"{value!r} is invalid for {{field}}", cause=exc
f'{value!r} is invalid for {{field}}', cause=exc,
)
def _process_requires_dist(
self,
value: List[str],
) -> List[requirements.Requirement]:
value: list[str],
) -> list[requirements.Requirement]:
reqs = []
try:
for req in value:
reqs.append(requirements.Requirement(req))
except requirements.InvalidRequirement as exc:
raise self._invalid_metadata(f"{req!r} is invalid for {{field}}", cause=exc)
raise self._invalid_metadata(f'{req!r} is invalid for {{field}}', cause=exc)
else:
return reqs
@ -665,7 +668,7 @@ class Metadata:
_raw: RawMetadata
@classmethod
def from_raw(cls, data: RawMetadata, *, validate: bool = True) -> "Metadata":
def from_raw(cls, data: RawMetadata, *, validate: bool = True) -> Metadata:
"""Create an instance from :class:`RawMetadata`.
If *validate* is true, all metadata will be validated. All exceptions
@ -675,7 +678,7 @@ class Metadata:
ins._raw = data.copy() # Mutations occur due to caching enriched values.
if validate:
exceptions: List[Exception] = []
exceptions: list[Exception] = []
try:
metadata_version = ins.metadata_version
metadata_age = _VALID_METADATA_VERSIONS.index(metadata_version)
@ -687,7 +690,7 @@ class Metadata:
# fields (so their absence can be reported).
fields_to_check = frozenset(ins._raw) | _REQUIRED_ATTRS
# Remove fields that have already been checked.
fields_to_check -= {"metadata_version"}
fields_to_check -= {'metadata_version'}
for key in fields_to_check:
try:
@ -697,18 +700,18 @@ class Metadata:
try:
field_metadata_version = cls.__dict__[key].added
except KeyError:
exc = InvalidMetadata(key, f"unrecognized field: {key!r}")
exc = InvalidMetadata(key, f'unrecognized field: {key!r}')
exceptions.append(exc)
continue
field_age = _VALID_METADATA_VERSIONS.index(
field_metadata_version
field_metadata_version,
)
if field_age > metadata_age:
field = _RAW_TO_EMAIL_MAPPING[key]
exc = InvalidMetadata(
field,
"{field} introduced in metadata version "
"{field_metadata_version}, not {metadata_version}",
'{field} introduced in metadata version '
'{field_metadata_version}, not {metadata_version}',
)
exceptions.append(exc)
continue
@ -717,14 +720,14 @@ class Metadata:
exceptions.append(exc)
if exceptions:
raise ExceptionGroup("invalid metadata", exceptions)
raise ExceptionGroup('invalid metadata', exceptions)
return ins
@classmethod
def from_email(
cls, data: Union[bytes, str], *, validate: bool = True
) -> "Metadata":
cls, data: bytes | str, *, validate: bool = True,
) -> Metadata:
"""Parse metadata from email headers.
If *validate* is true, the metadata will be validated. All exceptions
@ -736,19 +739,19 @@ class Metadata:
exceptions: list[Exception] = []
for unparsed_key in unparsed:
if unparsed_key in _EMAIL_TO_RAW_MAPPING:
message = f"{unparsed_key!r} has invalid data"
message = f'{unparsed_key!r} has invalid data'
else:
message = f"unrecognized field: {unparsed_key!r}"
message = f'unrecognized field: {unparsed_key!r}'
exceptions.append(InvalidMetadata(unparsed_key, message))
if exceptions:
raise ExceptionGroup("unparsed", exceptions)
raise ExceptionGroup('unparsed', exceptions)
try:
return cls.from_raw(raw, validate=validate)
except ExceptionGroup as exc_group:
raise ExceptionGroup(
"invalid or unparsed metadata", exc_group.exceptions
'invalid or unparsed metadata', exc_group.exceptions,
) from None
metadata_version: _Validator[_MetadataVersion] = _Validator()
@ -760,66 +763,66 @@ class Metadata:
*validate* parameter)"""
version: _Validator[version_module.Version] = _Validator()
""":external:ref:`core-metadata-version` (required)"""
dynamic: _Validator[Optional[List[str]]] = _Validator(
added="2.2",
dynamic: _Validator[list[str] | None] = _Validator(
added='2.2',
)
""":external:ref:`core-metadata-dynamic`
(validated against core metadata field names and lowercased)"""
platforms: _Validator[Optional[List[str]]] = _Validator()
platforms: _Validator[list[str] | None] = _Validator()
""":external:ref:`core-metadata-platform`"""
supported_platforms: _Validator[Optional[List[str]]] = _Validator(added="1.1")
supported_platforms: _Validator[list[str] | None] = _Validator(added='1.1')
""":external:ref:`core-metadata-supported-platform`"""
summary: _Validator[Optional[str]] = _Validator()
summary: _Validator[str | None] = _Validator()
""":external:ref:`core-metadata-summary` (validated to contain no newlines)"""
description: _Validator[Optional[str]] = _Validator() # TODO 2.1: can be in body
description: _Validator[str | None] = _Validator() # TODO 2.1: can be in body
""":external:ref:`core-metadata-description`"""
description_content_type: _Validator[Optional[str]] = _Validator(added="2.1")
description_content_type: _Validator[str | None] = _Validator(added='2.1')
""":external:ref:`core-metadata-description-content-type` (validated)"""
keywords: _Validator[Optional[List[str]]] = _Validator()
keywords: _Validator[list[str] | None] = _Validator()
""":external:ref:`core-metadata-keywords`"""
home_page: _Validator[Optional[str]] = _Validator()
home_page: _Validator[str | None] = _Validator()
""":external:ref:`core-metadata-home-page`"""
download_url: _Validator[Optional[str]] = _Validator(added="1.1")
download_url: _Validator[str | None] = _Validator(added='1.1')
""":external:ref:`core-metadata-download-url`"""
author: _Validator[Optional[str]] = _Validator()
author: _Validator[str | None] = _Validator()
""":external:ref:`core-metadata-author`"""
author_email: _Validator[Optional[str]] = _Validator()
author_email: _Validator[str | None] = _Validator()
""":external:ref:`core-metadata-author-email`"""
maintainer: _Validator[Optional[str]] = _Validator(added="1.2")
maintainer: _Validator[str | None] = _Validator(added='1.2')
""":external:ref:`core-metadata-maintainer`"""
maintainer_email: _Validator[Optional[str]] = _Validator(added="1.2")
maintainer_email: _Validator[str | None] = _Validator(added='1.2')
""":external:ref:`core-metadata-maintainer-email`"""
license: _Validator[Optional[str]] = _Validator()
license: _Validator[str | None] = _Validator()
""":external:ref:`core-metadata-license`"""
classifiers: _Validator[Optional[List[str]]] = _Validator(added="1.1")
classifiers: _Validator[list[str] | None] = _Validator(added='1.1')
""":external:ref:`core-metadata-classifier`"""
requires_dist: _Validator[Optional[List[requirements.Requirement]]] = _Validator(
added="1.2"
requires_dist: _Validator[list[requirements.Requirement] | None] = _Validator(
added='1.2',
)
""":external:ref:`core-metadata-requires-dist`"""
requires_python: _Validator[Optional[specifiers.SpecifierSet]] = _Validator(
added="1.2"
requires_python: _Validator[specifiers.SpecifierSet | None] = _Validator(
added='1.2',
)
""":external:ref:`core-metadata-requires-python`"""
# Because `Requires-External` allows for non-PEP 440 version specifiers, we
# don't do any processing on the values.
requires_external: _Validator[Optional[List[str]]] = _Validator(added="1.2")
requires_external: _Validator[list[str] | None] = _Validator(added='1.2')
""":external:ref:`core-metadata-requires-external`"""
project_urls: _Validator[Optional[Dict[str, str]]] = _Validator(added="1.2")
project_urls: _Validator[dict[str, str] | None] = _Validator(added='1.2')
""":external:ref:`core-metadata-project-url`"""
# PEP 685 lets us raise an error if an extra doesn't pass `Name` validation
# regardless of metadata version.
provides_extra: _Validator[Optional[List[utils.NormalizedName]]] = _Validator(
added="2.1",
provides_extra: _Validator[list[utils.NormalizedName] | None] = _Validator(
added='2.1',
)
""":external:ref:`core-metadata-provides-extra`"""
provides_dist: _Validator[Optional[List[str]]] = _Validator(added="1.2")
provides_dist: _Validator[list[str] | None] = _Validator(added='1.2')
""":external:ref:`core-metadata-provides-dist`"""
obsoletes_dist: _Validator[Optional[List[str]]] = _Validator(added="1.2")
obsoletes_dist: _Validator[list[str] | None] = _Validator(added='1.2')
""":external:ref:`core-metadata-obsoletes-dist`"""
requires: _Validator[Optional[List[str]]] = _Validator(added="1.1")
requires: _Validator[list[str] | None] = _Validator(added='1.1')
"""``Requires`` (deprecated)"""
provides: _Validator[Optional[List[str]]] = _Validator(added="1.1")
provides: _Validator[list[str] | None] = _Validator(added='1.1')
"""``Provides`` (deprecated)"""
obsoletes: _Validator[Optional[List[str]]] = _Validator(added="1.1")
obsoletes: _Validator[list[str] | None] = _Validator(added='1.1')
"""``Obsoletes`` (deprecated)"""