mirror of
https://github.com/Tautulli/Tautulli.git
synced 2025-07-12 08:16:06 -07:00
Bump packaging from 23.1 to 24.0 (#2274)
* Bump packaging from 23.1 to 24.0 Bumps [packaging](https://github.com/pypa/packaging) from 23.1 to 24.0. - [Release notes](https://github.com/pypa/packaging/releases) - [Changelog](https://github.com/pypa/packaging/blob/main/CHANGELOG.rst) - [Commits](https://github.com/pypa/packaging/compare/23.1...24.0) --- updated-dependencies: - dependency-name: packaging dependency-type: direct:production update-type: version-update:semver-major ... Signed-off-by: dependabot[bot] <support@github.com> * Update packaging==24.0 --------- Signed-off-by: dependabot[bot] <support@github.com> Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Co-authored-by: JonnyWong16 <9099342+JonnyWong16@users.noreply.github.com> [skip ci]
This commit is contained in:
parent
a0170a6f3d
commit
4398dfa821
11 changed files with 663 additions and 161 deletions
|
@ -6,10 +6,10 @@ __title__ = "packaging"
|
|||
__summary__ = "Core utilities for Python packages"
|
||||
__uri__ = "https://github.com/pypa/packaging"
|
||||
|
||||
__version__ = "23.1"
|
||||
__version__ = "24.0"
|
||||
|
||||
__author__ = "Donald Stufft and individual contributors"
|
||||
__email__ = "donald@stufft.io"
|
||||
|
||||
__license__ = "BSD-2-Clause or Apache-2.0"
|
||||
__copyright__ = "2014-2019 %s" % __author__
|
||||
__copyright__ = "2014 %s" % __author__
|
||||
|
|
|
@ -5,7 +5,7 @@ import os
|
|||
import re
|
||||
import sys
|
||||
import warnings
|
||||
from typing import Dict, Generator, Iterator, NamedTuple, Optional, Tuple
|
||||
from typing import Dict, Generator, Iterator, NamedTuple, Optional, Sequence, Tuple
|
||||
|
||||
from ._elffile import EIClass, EIData, ELFFile, EMachine
|
||||
|
||||
|
@ -50,12 +50,21 @@ def _is_linux_i686(executable: str) -> bool:
|
|||
)
|
||||
|
||||
|
||||
def _have_compatible_abi(executable: str, arch: str) -> bool:
|
||||
if arch == "armv7l":
|
||||
def _have_compatible_abi(executable: str, archs: Sequence[str]) -> bool:
|
||||
if "armv7l" in archs:
|
||||
return _is_linux_armhf(executable)
|
||||
if arch == "i686":
|
||||
if "i686" in archs:
|
||||
return _is_linux_i686(executable)
|
||||
return arch in {"x86_64", "aarch64", "ppc64", "ppc64le", "s390x"}
|
||||
allowed_archs = {
|
||||
"x86_64",
|
||||
"aarch64",
|
||||
"ppc64",
|
||||
"ppc64le",
|
||||
"s390x",
|
||||
"loongarch64",
|
||||
"riscv64",
|
||||
}
|
||||
return any(arch in allowed_archs for arch in archs)
|
||||
|
||||
|
||||
# If glibc ever changes its major version, we need to know what the last
|
||||
|
@ -81,7 +90,7 @@ def _glibc_version_string_confstr() -> Optional[str]:
|
|||
# https://github.com/python/cpython/blob/fcf1d003bf4f0100c/Lib/platform.py#L175-L183
|
||||
try:
|
||||
# Should be a string like "glibc 2.17".
|
||||
version_string: str = getattr(os, "confstr")("CS_GNU_LIBC_VERSION")
|
||||
version_string: Optional[str] = os.confstr("CS_GNU_LIBC_VERSION")
|
||||
assert version_string is not None
|
||||
_, version = version_string.rsplit()
|
||||
except (AssertionError, AttributeError, OSError, ValueError):
|
||||
|
@ -167,13 +176,13 @@ def _get_glibc_version() -> Tuple[int, int]:
|
|||
|
||||
|
||||
# From PEP 513, PEP 600
|
||||
def _is_compatible(name: str, arch: str, version: _GLibCVersion) -> bool:
|
||||
def _is_compatible(arch: str, version: _GLibCVersion) -> bool:
|
||||
sys_glibc = _get_glibc_version()
|
||||
if sys_glibc < version:
|
||||
return False
|
||||
# Check for presence of _manylinux module.
|
||||
try:
|
||||
import _manylinux # noqa
|
||||
import _manylinux
|
||||
except ImportError:
|
||||
return True
|
||||
if hasattr(_manylinux, "manylinux_compatible"):
|
||||
|
@ -203,12 +212,22 @@ _LEGACY_MANYLINUX_MAP = {
|
|||
}
|
||||
|
||||
|
||||
def platform_tags(linux: str, arch: str) -> Iterator[str]:
|
||||
if not _have_compatible_abi(sys.executable, arch):
|
||||
def platform_tags(archs: Sequence[str]) -> Iterator[str]:
|
||||
"""Generate manylinux tags compatible to the current platform.
|
||||
|
||||
:param archs: Sequence of compatible architectures.
|
||||
The first one shall be the closest to the actual architecture and be the part of
|
||||
platform tag after the ``linux_`` prefix, e.g. ``x86_64``.
|
||||
The ``linux_`` prefix is assumed as a prerequisite for the current platform to
|
||||
be manylinux-compatible.
|
||||
|
||||
:returns: An iterator of compatible manylinux tags.
|
||||
"""
|
||||
if not _have_compatible_abi(sys.executable, archs):
|
||||
return
|
||||
# Oldest glibc to be supported regardless of architecture is (2, 17).
|
||||
too_old_glibc2 = _GLibCVersion(2, 16)
|
||||
if arch in {"x86_64", "i686"}:
|
||||
if set(archs) & {"x86_64", "i686"}:
|
||||
# On x86/i686 also oldest glibc to be supported is (2, 5).
|
||||
too_old_glibc2 = _GLibCVersion(2, 4)
|
||||
current_glibc = _GLibCVersion(*_get_glibc_version())
|
||||
|
@ -222,19 +241,20 @@ def platform_tags(linux: str, arch: str) -> Iterator[str]:
|
|||
for glibc_major in range(current_glibc.major - 1, 1, -1):
|
||||
glibc_minor = _LAST_GLIBC_MINOR[glibc_major]
|
||||
glibc_max_list.append(_GLibCVersion(glibc_major, glibc_minor))
|
||||
for glibc_max in glibc_max_list:
|
||||
if glibc_max.major == too_old_glibc2.major:
|
||||
min_minor = too_old_glibc2.minor
|
||||
else:
|
||||
# For other glibc major versions oldest supported is (x, 0).
|
||||
min_minor = -1
|
||||
for glibc_minor in range(glibc_max.minor, min_minor, -1):
|
||||
glibc_version = _GLibCVersion(glibc_max.major, glibc_minor)
|
||||
tag = "manylinux_{}_{}".format(*glibc_version)
|
||||
if _is_compatible(tag, arch, glibc_version):
|
||||
yield linux.replace("linux", tag)
|
||||
# Handle the legacy manylinux1, manylinux2010, manylinux2014 tags.
|
||||
if glibc_version in _LEGACY_MANYLINUX_MAP:
|
||||
legacy_tag = _LEGACY_MANYLINUX_MAP[glibc_version]
|
||||
if _is_compatible(legacy_tag, arch, glibc_version):
|
||||
yield linux.replace("linux", legacy_tag)
|
||||
for arch in archs:
|
||||
for glibc_max in glibc_max_list:
|
||||
if glibc_max.major == too_old_glibc2.major:
|
||||
min_minor = too_old_glibc2.minor
|
||||
else:
|
||||
# For other glibc major versions oldest supported is (x, 0).
|
||||
min_minor = -1
|
||||
for glibc_minor in range(glibc_max.minor, min_minor, -1):
|
||||
glibc_version = _GLibCVersion(glibc_max.major, glibc_minor)
|
||||
tag = "manylinux_{}_{}".format(*glibc_version)
|
||||
if _is_compatible(arch, glibc_version):
|
||||
yield f"{tag}_{arch}"
|
||||
# Handle the legacy manylinux1, manylinux2010, manylinux2014 tags.
|
||||
if glibc_version in _LEGACY_MANYLINUX_MAP:
|
||||
legacy_tag = _LEGACY_MANYLINUX_MAP[glibc_version]
|
||||
if _is_compatible(arch, glibc_version):
|
||||
yield f"{legacy_tag}_{arch}"
|
||||
|
|
|
@ -8,7 +8,7 @@ import functools
|
|||
import re
|
||||
import subprocess
|
||||
import sys
|
||||
from typing import Iterator, NamedTuple, Optional
|
||||
from typing import Iterator, NamedTuple, Optional, Sequence
|
||||
|
||||
from ._elffile import ELFFile
|
||||
|
||||
|
@ -47,24 +47,27 @@ def _get_musl_version(executable: str) -> Optional[_MuslVersion]:
|
|||
return None
|
||||
if ld is None or "musl" not in ld:
|
||||
return None
|
||||
proc = subprocess.run([ld], stderr=subprocess.PIPE, universal_newlines=True)
|
||||
proc = subprocess.run([ld], stderr=subprocess.PIPE, text=True)
|
||||
return _parse_musl_version(proc.stderr)
|
||||
|
||||
|
||||
def platform_tags(arch: str) -> Iterator[str]:
|
||||
def platform_tags(archs: Sequence[str]) -> Iterator[str]:
|
||||
"""Generate musllinux tags compatible to the current platform.
|
||||
|
||||
:param arch: Should be the part of platform tag after the ``linux_``
|
||||
prefix, e.g. ``x86_64``. The ``linux_`` prefix is assumed as a
|
||||
prerequisite for the current platform to be musllinux-compatible.
|
||||
:param archs: Sequence of compatible architectures.
|
||||
The first one shall be the closest to the actual architecture and be the part of
|
||||
platform tag after the ``linux_`` prefix, e.g. ``x86_64``.
|
||||
The ``linux_`` prefix is assumed as a prerequisite for the current platform to
|
||||
be musllinux-compatible.
|
||||
|
||||
:returns: An iterator of compatible musllinux tags.
|
||||
"""
|
||||
sys_musl = _get_musl_version(sys.executable)
|
||||
if sys_musl is None: # Python not dynamically linked against musl.
|
||||
return
|
||||
for minor in range(sys_musl.minor, -1, -1):
|
||||
yield f"musllinux_{sys_musl.major}_{minor}_{arch}"
|
||||
for arch in archs:
|
||||
for minor in range(sys_musl.minor, -1, -1):
|
||||
yield f"musllinux_{sys_musl.major}_{minor}_{arch}"
|
||||
|
||||
|
||||
if __name__ == "__main__": # pragma: no cover
|
||||
|
|
|
@ -252,7 +252,13 @@ def _parse_version_many(tokenizer: Tokenizer) -> str:
|
|||
# Recursive descent parser for marker expression
|
||||
# --------------------------------------------------------------------------------------
|
||||
def parse_marker(source: str) -> MarkerList:
|
||||
return _parse_marker(Tokenizer(source, rules=DEFAULT_RULES))
|
||||
return _parse_full_marker(Tokenizer(source, rules=DEFAULT_RULES))
|
||||
|
||||
|
||||
def _parse_full_marker(tokenizer: Tokenizer) -> MarkerList:
|
||||
retval = _parse_marker(tokenizer)
|
||||
tokenizer.expect("END", expected="end of marker expression")
|
||||
return retval
|
||||
|
||||
|
||||
def _parse_marker(tokenizer: Tokenizer) -> MarkerList:
|
||||
|
@ -318,10 +324,7 @@ def _parse_marker_var(tokenizer: Tokenizer) -> MarkerVar:
|
|||
|
||||
|
||||
def process_env_var(env_var: str) -> Variable:
|
||||
if (
|
||||
env_var == "platform_python_implementation"
|
||||
or env_var == "python_implementation"
|
||||
):
|
||||
if env_var in ("platform_python_implementation", "python_implementation"):
|
||||
return Variable("platform_python_implementation")
|
||||
else:
|
||||
return Variable(env_var)
|
||||
|
|
|
@ -5,23 +5,77 @@ import email.parser
|
|||
import email.policy
|
||||
import sys
|
||||
import typing
|
||||
from typing import Dict, List, Optional, Tuple, Union, cast
|
||||
from typing import (
|
||||
Any,
|
||||
Callable,
|
||||
Dict,
|
||||
Generic,
|
||||
List,
|
||||
Optional,
|
||||
Tuple,
|
||||
Type,
|
||||
Union,
|
||||
cast,
|
||||
)
|
||||
|
||||
if sys.version_info >= (3, 8): # pragma: no cover
|
||||
from typing import TypedDict
|
||||
from . import requirements, specifiers, utils, version as version_module
|
||||
|
||||
T = typing.TypeVar("T")
|
||||
if sys.version_info[:2] >= (3, 8): # pragma: no cover
|
||||
from typing import Literal, TypedDict
|
||||
else: # pragma: no cover
|
||||
if typing.TYPE_CHECKING:
|
||||
from typing_extensions import TypedDict
|
||||
from typing_extensions import Literal, TypedDict
|
||||
else:
|
||||
try:
|
||||
from typing_extensions import TypedDict
|
||||
from typing_extensions import Literal, TypedDict
|
||||
except ImportError:
|
||||
|
||||
class Literal:
|
||||
def __init_subclass__(*_args, **_kwargs):
|
||||
pass
|
||||
|
||||
class TypedDict:
|
||||
def __init_subclass__(*_args, **_kwargs):
|
||||
pass
|
||||
|
||||
|
||||
try:
|
||||
ExceptionGroup
|
||||
except NameError: # pragma: no cover
|
||||
|
||||
class ExceptionGroup(Exception): # noqa: N818
|
||||
"""A minimal implementation of :external:exc:`ExceptionGroup` from Python 3.11.
|
||||
|
||||
If :external:exc:`ExceptionGroup` is already defined by Python itself,
|
||||
that version is used instead.
|
||||
"""
|
||||
|
||||
message: str
|
||||
exceptions: List[Exception]
|
||||
|
||||
def __init__(self, message: str, exceptions: List[Exception]) -> None:
|
||||
self.message = message
|
||||
self.exceptions = exceptions
|
||||
|
||||
def __repr__(self) -> str:
|
||||
return f"{self.__class__.__name__}({self.message!r}, {self.exceptions!r})"
|
||||
|
||||
else: # pragma: no cover
|
||||
ExceptionGroup = ExceptionGroup
|
||||
|
||||
|
||||
class InvalidMetadata(ValueError):
|
||||
"""A metadata field contains invalid data."""
|
||||
|
||||
field: str
|
||||
"""The name of the field that contains invalid data."""
|
||||
|
||||
def __init__(self, field: str, message: str) -> None:
|
||||
self.field = field
|
||||
super().__init__(message)
|
||||
|
||||
|
||||
# The RawMetadata class attempts to make as few assumptions about the underlying
|
||||
# serialization formats as possible. The idea is that as long as a serialization
|
||||
# formats offer some very basic primitives in *some* way then we can support
|
||||
|
@ -33,7 +87,8 @@ class RawMetadata(TypedDict, total=False):
|
|||
provided). The key is lower-case and underscores are used instead of dashes
|
||||
compared to the equivalent core metadata field. Any core metadata field that
|
||||
can be specified multiple times or can hold multiple values in a single
|
||||
field have a key with a plural name.
|
||||
field have a key with a plural name. See :class:`Metadata` whose attributes
|
||||
match the keys of this dictionary.
|
||||
|
||||
Core metadata fields that can be specified multiple times are stored as a
|
||||
list or dict depending on which is appropriate for the field. Any fields
|
||||
|
@ -77,7 +132,7 @@ class RawMetadata(TypedDict, total=False):
|
|||
# but got stuck without ever being able to build consensus on
|
||||
# it and ultimately ended up withdrawn.
|
||||
#
|
||||
# However, a number of tools had started emiting METADATA with
|
||||
# However, a number of tools had started emitting METADATA with
|
||||
# `2.0` Metadata-Version, so for historical reasons, this version
|
||||
# was skipped.
|
||||
|
||||
|
@ -110,7 +165,7 @@ _STRING_FIELDS = {
|
|||
"version",
|
||||
}
|
||||
|
||||
_LIST_STRING_FIELDS = {
|
||||
_LIST_FIELDS = {
|
||||
"classifiers",
|
||||
"dynamic",
|
||||
"obsoletes",
|
||||
|
@ -125,6 +180,10 @@ _LIST_STRING_FIELDS = {
|
|||
"supported_platforms",
|
||||
}
|
||||
|
||||
_DICT_FIELDS = {
|
||||
"project_urls",
|
||||
}
|
||||
|
||||
|
||||
def _parse_keywords(data: str) -> List[str]:
|
||||
"""Split a string of comma-separate keyboards into a list of keywords."""
|
||||
|
@ -230,10 +289,11 @@ _EMAIL_TO_RAW_MAPPING = {
|
|||
"supported-platform": "supported_platforms",
|
||||
"version": "version",
|
||||
}
|
||||
_RAW_TO_EMAIL_MAPPING = {raw: email for email, raw in _EMAIL_TO_RAW_MAPPING.items()}
|
||||
|
||||
|
||||
def parse_email(data: Union[bytes, str]) -> Tuple[RawMetadata, Dict[str, List[str]]]:
|
||||
"""Parse a distribution's metadata.
|
||||
"""Parse a distribution's metadata stored as email headers (e.g. from ``METADATA``).
|
||||
|
||||
This function returns a two-item tuple of dicts. The first dict is of
|
||||
recognized fields from the core metadata specification. Fields that can be
|
||||
|
@ -267,7 +327,7 @@ def parse_email(data: Union[bytes, str]) -> Tuple[RawMetadata, Dict[str, List[st
|
|||
# We use get_all() here, even for fields that aren't multiple use,
|
||||
# because otherwise someone could have e.g. two Name fields, and we
|
||||
# would just silently ignore it rather than doing something about it.
|
||||
headers = parsed.get_all(name)
|
||||
headers = parsed.get_all(name) or []
|
||||
|
||||
# The way the email module works when parsing bytes is that it
|
||||
# unconditionally decodes the bytes as ascii using the surrogateescape
|
||||
|
@ -349,7 +409,7 @@ def parse_email(data: Union[bytes, str]) -> Tuple[RawMetadata, Dict[str, List[st
|
|||
# If this is one of our list of string fields, then we can just assign
|
||||
# the value, since email *only* has strings, and our get_all() call
|
||||
# above ensures that this is a list.
|
||||
elif raw_name in _LIST_STRING_FIELDS:
|
||||
elif raw_name in _LIST_FIELDS:
|
||||
raw[raw_name] = value
|
||||
# Special Case: Keywords
|
||||
# The keywords field is implemented in the metadata spec as a str,
|
||||
|
@ -406,3 +466,360 @@ def parse_email(data: Union[bytes, str]) -> Tuple[RawMetadata, Dict[str, List[st
|
|||
# way this function is implemented, our `TypedDict` can only have valid key
|
||||
# names.
|
||||
return cast(RawMetadata, raw), unparsed
|
||||
|
||||
|
||||
_NOT_FOUND = object()
|
||||
|
||||
|
||||
# Keep the two values in sync.
|
||||
_VALID_METADATA_VERSIONS = ["1.0", "1.1", "1.2", "2.1", "2.2", "2.3"]
|
||||
_MetadataVersion = Literal["1.0", "1.1", "1.2", "2.1", "2.2", "2.3"]
|
||||
|
||||
_REQUIRED_ATTRS = frozenset(["metadata_version", "name", "version"])
|
||||
|
||||
|
||||
class _Validator(Generic[T]):
|
||||
"""Validate a metadata field.
|
||||
|
||||
All _process_*() methods correspond to a core metadata field. The method is
|
||||
called with the field's raw value. If the raw value is valid it is returned
|
||||
in its "enriched" form (e.g. ``version.Version`` for the ``Version`` field).
|
||||
If the raw value is invalid, :exc:`InvalidMetadata` is raised (with a cause
|
||||
as appropriate).
|
||||
"""
|
||||
|
||||
name: str
|
||||
raw_name: str
|
||||
added: _MetadataVersion
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
*,
|
||||
added: _MetadataVersion = "1.0",
|
||||
) -> None:
|
||||
self.added = added
|
||||
|
||||
def __set_name__(self, _owner: "Metadata", name: str) -> None:
|
||||
self.name = name
|
||||
self.raw_name = _RAW_TO_EMAIL_MAPPING[name]
|
||||
|
||||
def __get__(self, instance: "Metadata", _owner: Type["Metadata"]) -> T:
|
||||
# With Python 3.8, the caching can be replaced with functools.cached_property().
|
||||
# No need to check the cache as attribute lookup will resolve into the
|
||||
# instance's __dict__ before __get__ is called.
|
||||
cache = instance.__dict__
|
||||
value = instance._raw.get(self.name)
|
||||
|
||||
# To make the _process_* methods easier, we'll check if the value is None
|
||||
# and if this field is NOT a required attribute, and if both of those
|
||||
# things are true, we'll skip the the converter. This will mean that the
|
||||
# converters never have to deal with the None union.
|
||||
if self.name in _REQUIRED_ATTRS or value is not None:
|
||||
try:
|
||||
converter: Callable[[Any], T] = getattr(self, f"_process_{self.name}")
|
||||
except AttributeError:
|
||||
pass
|
||||
else:
|
||||
value = converter(value)
|
||||
|
||||
cache[self.name] = value
|
||||
try:
|
||||
del instance._raw[self.name] # type: ignore[misc]
|
||||
except KeyError:
|
||||
pass
|
||||
|
||||
return cast(T, value)
|
||||
|
||||
def _invalid_metadata(
|
||||
self, msg: str, cause: Optional[Exception] = None
|
||||
) -> InvalidMetadata:
|
||||
exc = InvalidMetadata(
|
||||
self.raw_name, msg.format_map({"field": repr(self.raw_name)})
|
||||
)
|
||||
exc.__cause__ = cause
|
||||
return exc
|
||||
|
||||
def _process_metadata_version(self, value: str) -> _MetadataVersion:
|
||||
# Implicitly makes Metadata-Version required.
|
||||
if value not in _VALID_METADATA_VERSIONS:
|
||||
raise self._invalid_metadata(f"{value!r} is not a valid metadata version")
|
||||
return cast(_MetadataVersion, value)
|
||||
|
||||
def _process_name(self, value: str) -> str:
|
||||
if not value:
|
||||
raise self._invalid_metadata("{field} is a required field")
|
||||
# Validate the name as a side-effect.
|
||||
try:
|
||||
utils.canonicalize_name(value, validate=True)
|
||||
except utils.InvalidName as exc:
|
||||
raise self._invalid_metadata(
|
||||
f"{value!r} is invalid for {{field}}", cause=exc
|
||||
)
|
||||
else:
|
||||
return value
|
||||
|
||||
def _process_version(self, value: str) -> version_module.Version:
|
||||
if not value:
|
||||
raise self._invalid_metadata("{field} is a required field")
|
||||
try:
|
||||
return version_module.parse(value)
|
||||
except version_module.InvalidVersion as exc:
|
||||
raise self._invalid_metadata(
|
||||
f"{value!r} is invalid for {{field}}", cause=exc
|
||||
)
|
||||
|
||||
def _process_summary(self, value: str) -> str:
|
||||
"""Check the field contains no newlines."""
|
||||
if "\n" in value:
|
||||
raise self._invalid_metadata("{field} must be a single line")
|
||||
return value
|
||||
|
||||
def _process_description_content_type(self, value: str) -> str:
|
||||
content_types = {"text/plain", "text/x-rst", "text/markdown"}
|
||||
message = email.message.EmailMessage()
|
||||
message["content-type"] = value
|
||||
|
||||
content_type, parameters = (
|
||||
# Defaults to `text/plain` if parsing failed.
|
||||
message.get_content_type().lower(),
|
||||
message["content-type"].params,
|
||||
)
|
||||
# Check if content-type is valid or defaulted to `text/plain` and thus was
|
||||
# not parseable.
|
||||
if content_type not in content_types or content_type not in value.lower():
|
||||
raise self._invalid_metadata(
|
||||
f"{{field}} must be one of {list(content_types)}, not {value!r}"
|
||||
)
|
||||
|
||||
charset = parameters.get("charset", "UTF-8")
|
||||
if charset != "UTF-8":
|
||||
raise self._invalid_metadata(
|
||||
f"{{field}} can only specify the UTF-8 charset, not {list(charset)}"
|
||||
)
|
||||
|
||||
markdown_variants = {"GFM", "CommonMark"}
|
||||
variant = parameters.get("variant", "GFM") # Use an acceptable default.
|
||||
if content_type == "text/markdown" and variant not in markdown_variants:
|
||||
raise self._invalid_metadata(
|
||||
f"valid Markdown variants for {{field}} are {list(markdown_variants)}, "
|
||||
f"not {variant!r}",
|
||||
)
|
||||
return value
|
||||
|
||||
def _process_dynamic(self, value: List[str]) -> List[str]:
|
||||
for dynamic_field in map(str.lower, value):
|
||||
if dynamic_field in {"name", "version", "metadata-version"}:
|
||||
raise self._invalid_metadata(
|
||||
f"{value!r} is not allowed as a dynamic field"
|
||||
)
|
||||
elif dynamic_field not in _EMAIL_TO_RAW_MAPPING:
|
||||
raise self._invalid_metadata(f"{value!r} is not a valid dynamic field")
|
||||
return list(map(str.lower, value))
|
||||
|
||||
def _process_provides_extra(
|
||||
self,
|
||||
value: List[str],
|
||||
) -> List[utils.NormalizedName]:
|
||||
normalized_names = []
|
||||
try:
|
||||
for name in value:
|
||||
normalized_names.append(utils.canonicalize_name(name, validate=True))
|
||||
except utils.InvalidName as exc:
|
||||
raise self._invalid_metadata(
|
||||
f"{name!r} is invalid for {{field}}", cause=exc
|
||||
)
|
||||
else:
|
||||
return normalized_names
|
||||
|
||||
def _process_requires_python(self, value: str) -> specifiers.SpecifierSet:
|
||||
try:
|
||||
return specifiers.SpecifierSet(value)
|
||||
except specifiers.InvalidSpecifier as exc:
|
||||
raise self._invalid_metadata(
|
||||
f"{value!r} is invalid for {{field}}", cause=exc
|
||||
)
|
||||
|
||||
def _process_requires_dist(
|
||||
self,
|
||||
value: List[str],
|
||||
) -> List[requirements.Requirement]:
|
||||
reqs = []
|
||||
try:
|
||||
for req in value:
|
||||
reqs.append(requirements.Requirement(req))
|
||||
except requirements.InvalidRequirement as exc:
|
||||
raise self._invalid_metadata(f"{req!r} is invalid for {{field}}", cause=exc)
|
||||
else:
|
||||
return reqs
|
||||
|
||||
|
||||
class Metadata:
|
||||
"""Representation of distribution metadata.
|
||||
|
||||
Compared to :class:`RawMetadata`, this class provides objects representing
|
||||
metadata fields instead of only using built-in types. Any invalid metadata
|
||||
will cause :exc:`InvalidMetadata` to be raised (with a
|
||||
:py:attr:`~BaseException.__cause__` attribute as appropriate).
|
||||
"""
|
||||
|
||||
_raw: RawMetadata
|
||||
|
||||
@classmethod
|
||||
def from_raw(cls, data: RawMetadata, *, validate: bool = True) -> "Metadata":
|
||||
"""Create an instance from :class:`RawMetadata`.
|
||||
|
||||
If *validate* is true, all metadata will be validated. All exceptions
|
||||
related to validation will be gathered and raised as an :class:`ExceptionGroup`.
|
||||
"""
|
||||
ins = cls()
|
||||
ins._raw = data.copy() # Mutations occur due to caching enriched values.
|
||||
|
||||
if validate:
|
||||
exceptions: List[Exception] = []
|
||||
try:
|
||||
metadata_version = ins.metadata_version
|
||||
metadata_age = _VALID_METADATA_VERSIONS.index(metadata_version)
|
||||
except InvalidMetadata as metadata_version_exc:
|
||||
exceptions.append(metadata_version_exc)
|
||||
metadata_version = None
|
||||
|
||||
# Make sure to check for the fields that are present, the required
|
||||
# fields (so their absence can be reported).
|
||||
fields_to_check = frozenset(ins._raw) | _REQUIRED_ATTRS
|
||||
# Remove fields that have already been checked.
|
||||
fields_to_check -= {"metadata_version"}
|
||||
|
||||
for key in fields_to_check:
|
||||
try:
|
||||
if metadata_version:
|
||||
# Can't use getattr() as that triggers descriptor protocol which
|
||||
# will fail due to no value for the instance argument.
|
||||
try:
|
||||
field_metadata_version = cls.__dict__[key].added
|
||||
except KeyError:
|
||||
exc = InvalidMetadata(key, f"unrecognized field: {key!r}")
|
||||
exceptions.append(exc)
|
||||
continue
|
||||
field_age = _VALID_METADATA_VERSIONS.index(
|
||||
field_metadata_version
|
||||
)
|
||||
if field_age > metadata_age:
|
||||
field = _RAW_TO_EMAIL_MAPPING[key]
|
||||
exc = InvalidMetadata(
|
||||
field,
|
||||
"{field} introduced in metadata version "
|
||||
"{field_metadata_version}, not {metadata_version}",
|
||||
)
|
||||
exceptions.append(exc)
|
||||
continue
|
||||
getattr(ins, key)
|
||||
except InvalidMetadata as exc:
|
||||
exceptions.append(exc)
|
||||
|
||||
if exceptions:
|
||||
raise ExceptionGroup("invalid metadata", exceptions)
|
||||
|
||||
return ins
|
||||
|
||||
@classmethod
|
||||
def from_email(
|
||||
cls, data: Union[bytes, str], *, validate: bool = True
|
||||
) -> "Metadata":
|
||||
"""Parse metadata from email headers.
|
||||
|
||||
If *validate* is true, the metadata will be validated. All exceptions
|
||||
related to validation will be gathered and raised as an :class:`ExceptionGroup`.
|
||||
"""
|
||||
raw, unparsed = parse_email(data)
|
||||
|
||||
if validate:
|
||||
exceptions: list[Exception] = []
|
||||
for unparsed_key in unparsed:
|
||||
if unparsed_key in _EMAIL_TO_RAW_MAPPING:
|
||||
message = f"{unparsed_key!r} has invalid data"
|
||||
else:
|
||||
message = f"unrecognized field: {unparsed_key!r}"
|
||||
exceptions.append(InvalidMetadata(unparsed_key, message))
|
||||
|
||||
if exceptions:
|
||||
raise ExceptionGroup("unparsed", exceptions)
|
||||
|
||||
try:
|
||||
return cls.from_raw(raw, validate=validate)
|
||||
except ExceptionGroup as exc_group:
|
||||
raise ExceptionGroup(
|
||||
"invalid or unparsed metadata", exc_group.exceptions
|
||||
) from None
|
||||
|
||||
metadata_version: _Validator[_MetadataVersion] = _Validator()
|
||||
""":external:ref:`core-metadata-metadata-version`
|
||||
(required; validated to be a valid metadata version)"""
|
||||
name: _Validator[str] = _Validator()
|
||||
""":external:ref:`core-metadata-name`
|
||||
(required; validated using :func:`~packaging.utils.canonicalize_name` and its
|
||||
*validate* parameter)"""
|
||||
version: _Validator[version_module.Version] = _Validator()
|
||||
""":external:ref:`core-metadata-version` (required)"""
|
||||
dynamic: _Validator[Optional[List[str]]] = _Validator(
|
||||
added="2.2",
|
||||
)
|
||||
""":external:ref:`core-metadata-dynamic`
|
||||
(validated against core metadata field names and lowercased)"""
|
||||
platforms: _Validator[Optional[List[str]]] = _Validator()
|
||||
""":external:ref:`core-metadata-platform`"""
|
||||
supported_platforms: _Validator[Optional[List[str]]] = _Validator(added="1.1")
|
||||
""":external:ref:`core-metadata-supported-platform`"""
|
||||
summary: _Validator[Optional[str]] = _Validator()
|
||||
""":external:ref:`core-metadata-summary` (validated to contain no newlines)"""
|
||||
description: _Validator[Optional[str]] = _Validator() # TODO 2.1: can be in body
|
||||
""":external:ref:`core-metadata-description`"""
|
||||
description_content_type: _Validator[Optional[str]] = _Validator(added="2.1")
|
||||
""":external:ref:`core-metadata-description-content-type` (validated)"""
|
||||
keywords: _Validator[Optional[List[str]]] = _Validator()
|
||||
""":external:ref:`core-metadata-keywords`"""
|
||||
home_page: _Validator[Optional[str]] = _Validator()
|
||||
""":external:ref:`core-metadata-home-page`"""
|
||||
download_url: _Validator[Optional[str]] = _Validator(added="1.1")
|
||||
""":external:ref:`core-metadata-download-url`"""
|
||||
author: _Validator[Optional[str]] = _Validator()
|
||||
""":external:ref:`core-metadata-author`"""
|
||||
author_email: _Validator[Optional[str]] = _Validator()
|
||||
""":external:ref:`core-metadata-author-email`"""
|
||||
maintainer: _Validator[Optional[str]] = _Validator(added="1.2")
|
||||
""":external:ref:`core-metadata-maintainer`"""
|
||||
maintainer_email: _Validator[Optional[str]] = _Validator(added="1.2")
|
||||
""":external:ref:`core-metadata-maintainer-email`"""
|
||||
license: _Validator[Optional[str]] = _Validator()
|
||||
""":external:ref:`core-metadata-license`"""
|
||||
classifiers: _Validator[Optional[List[str]]] = _Validator(added="1.1")
|
||||
""":external:ref:`core-metadata-classifier`"""
|
||||
requires_dist: _Validator[Optional[List[requirements.Requirement]]] = _Validator(
|
||||
added="1.2"
|
||||
)
|
||||
""":external:ref:`core-metadata-requires-dist`"""
|
||||
requires_python: _Validator[Optional[specifiers.SpecifierSet]] = _Validator(
|
||||
added="1.2"
|
||||
)
|
||||
""":external:ref:`core-metadata-requires-python`"""
|
||||
# Because `Requires-External` allows for non-PEP 440 version specifiers, we
|
||||
# don't do any processing on the values.
|
||||
requires_external: _Validator[Optional[List[str]]] = _Validator(added="1.2")
|
||||
""":external:ref:`core-metadata-requires-external`"""
|
||||
project_urls: _Validator[Optional[Dict[str, str]]] = _Validator(added="1.2")
|
||||
""":external:ref:`core-metadata-project-url`"""
|
||||
# PEP 685 lets us raise an error if an extra doesn't pass `Name` validation
|
||||
# regardless of metadata version.
|
||||
provides_extra: _Validator[Optional[List[utils.NormalizedName]]] = _Validator(
|
||||
added="2.1",
|
||||
)
|
||||
""":external:ref:`core-metadata-provides-extra`"""
|
||||
provides_dist: _Validator[Optional[List[str]]] = _Validator(added="1.2")
|
||||
""":external:ref:`core-metadata-provides-dist`"""
|
||||
obsoletes_dist: _Validator[Optional[List[str]]] = _Validator(added="1.2")
|
||||
""":external:ref:`core-metadata-obsoletes-dist`"""
|
||||
requires: _Validator[Optional[List[str]]] = _Validator(added="1.1")
|
||||
"""``Requires`` (deprecated)"""
|
||||
provides: _Validator[Optional[List[str]]] = _Validator(added="1.1")
|
||||
"""``Provides`` (deprecated)"""
|
||||
obsoletes: _Validator[Optional[List[str]]] = _Validator(added="1.1")
|
||||
"""``Obsoletes`` (deprecated)"""
|
||||
|
|
|
@ -2,13 +2,13 @@
|
|||
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
|
||||
# for complete details.
|
||||
|
||||
import urllib.parse
|
||||
from typing import Any, List, Optional, Set
|
||||
from typing import Any, Iterator, Optional, Set
|
||||
|
||||
from ._parser import parse_requirement as _parse_requirement
|
||||
from ._tokenizer import ParserSyntaxError
|
||||
from .markers import Marker, _normalize_extra_values
|
||||
from .specifiers import SpecifierSet
|
||||
from .utils import canonicalize_name
|
||||
|
||||
|
||||
class InvalidRequirement(ValueError):
|
||||
|
@ -37,57 +37,52 @@ class Requirement:
|
|||
raise InvalidRequirement(str(e)) from e
|
||||
|
||||
self.name: str = parsed.name
|
||||
if parsed.url:
|
||||
parsed_url = urllib.parse.urlparse(parsed.url)
|
||||
if parsed_url.scheme == "file":
|
||||
if urllib.parse.urlunparse(parsed_url) != parsed.url:
|
||||
raise InvalidRequirement("Invalid URL given")
|
||||
elif not (parsed_url.scheme and parsed_url.netloc) or (
|
||||
not parsed_url.scheme and not parsed_url.netloc
|
||||
):
|
||||
raise InvalidRequirement(f"Invalid URL: {parsed.url}")
|
||||
self.url: Optional[str] = parsed.url
|
||||
else:
|
||||
self.url = None
|
||||
self.extras: Set[str] = set(parsed.extras if parsed.extras else [])
|
||||
self.url: Optional[str] = parsed.url or None
|
||||
self.extras: Set[str] = set(parsed.extras or [])
|
||||
self.specifier: SpecifierSet = SpecifierSet(parsed.specifier)
|
||||
self.marker: Optional[Marker] = None
|
||||
if parsed.marker is not None:
|
||||
self.marker = Marker.__new__(Marker)
|
||||
self.marker._markers = _normalize_extra_values(parsed.marker)
|
||||
|
||||
def __str__(self) -> str:
|
||||
parts: List[str] = [self.name]
|
||||
def _iter_parts(self, name: str) -> Iterator[str]:
|
||||
yield name
|
||||
|
||||
if self.extras:
|
||||
formatted_extras = ",".join(sorted(self.extras))
|
||||
parts.append(f"[{formatted_extras}]")
|
||||
yield f"[{formatted_extras}]"
|
||||
|
||||
if self.specifier:
|
||||
parts.append(str(self.specifier))
|
||||
yield str(self.specifier)
|
||||
|
||||
if self.url:
|
||||
parts.append(f"@ {self.url}")
|
||||
yield f"@ {self.url}"
|
||||
if self.marker:
|
||||
parts.append(" ")
|
||||
yield " "
|
||||
|
||||
if self.marker:
|
||||
parts.append(f"; {self.marker}")
|
||||
yield f"; {self.marker}"
|
||||
|
||||
return "".join(parts)
|
||||
def __str__(self) -> str:
|
||||
return "".join(self._iter_parts(self.name))
|
||||
|
||||
def __repr__(self) -> str:
|
||||
return f"<Requirement('{self}')>"
|
||||
|
||||
def __hash__(self) -> int:
|
||||
return hash((self.__class__.__name__, str(self)))
|
||||
return hash(
|
||||
(
|
||||
self.__class__.__name__,
|
||||
*self._iter_parts(canonicalize_name(self.name)),
|
||||
)
|
||||
)
|
||||
|
||||
def __eq__(self, other: Any) -> bool:
|
||||
if not isinstance(other, Requirement):
|
||||
return NotImplemented
|
||||
|
||||
return (
|
||||
self.name == other.name
|
||||
canonicalize_name(self.name) == canonicalize_name(other.name)
|
||||
and self.extras == other.extras
|
||||
and self.specifier == other.specifier
|
||||
and self.url == other.url
|
||||
|
|
|
@ -11,17 +11,7 @@
|
|||
import abc
|
||||
import itertools
|
||||
import re
|
||||
from typing import (
|
||||
Callable,
|
||||
Iterable,
|
||||
Iterator,
|
||||
List,
|
||||
Optional,
|
||||
Set,
|
||||
Tuple,
|
||||
TypeVar,
|
||||
Union,
|
||||
)
|
||||
from typing import Callable, Iterable, Iterator, List, Optional, Tuple, TypeVar, Union
|
||||
|
||||
from .utils import canonicalize_version
|
||||
from .version import Version
|
||||
|
@ -383,7 +373,7 @@ class Specifier(BaseSpecifier):
|
|||
|
||||
# We want everything but the last item in the version, but we want to
|
||||
# ignore suffix segments.
|
||||
prefix = ".".join(
|
||||
prefix = _version_join(
|
||||
list(itertools.takewhile(_is_not_suffix, _version_split(spec)))[:-1]
|
||||
)
|
||||
|
||||
|
@ -404,13 +394,13 @@ class Specifier(BaseSpecifier):
|
|||
)
|
||||
# Get the normalized version string ignoring the trailing .*
|
||||
normalized_spec = canonicalize_version(spec[:-2], strip_trailing_zero=False)
|
||||
# Split the spec out by dots, and pretend that there is an implicit
|
||||
# dot in between a release segment and a pre-release segment.
|
||||
# Split the spec out by bangs and dots, and pretend that there is
|
||||
# an implicit dot in between a release segment and a pre-release segment.
|
||||
split_spec = _version_split(normalized_spec)
|
||||
|
||||
# Split the prospective version out by dots, and pretend that there
|
||||
# is an implicit dot in between a release segment and a pre-release
|
||||
# segment.
|
||||
# Split the prospective version out by bangs and dots, and pretend
|
||||
# that there is an implicit dot in between a release segment and
|
||||
# a pre-release segment.
|
||||
split_prospective = _version_split(normalized_prospective)
|
||||
|
||||
# 0-pad the prospective version before shortening it to get the correct
|
||||
|
@ -644,8 +634,19 @@ _prefix_regex = re.compile(r"^([0-9]+)((?:a|b|c|rc)[0-9]+)$")
|
|||
|
||||
|
||||
def _version_split(version: str) -> List[str]:
|
||||
"""Split version into components.
|
||||
|
||||
The split components are intended for version comparison. The logic does
|
||||
not attempt to retain the original version string, so joining the
|
||||
components back with :func:`_version_join` may not produce the original
|
||||
version string.
|
||||
"""
|
||||
result: List[str] = []
|
||||
for item in version.split("."):
|
||||
|
||||
epoch, _, rest = version.rpartition("!")
|
||||
result.append(epoch or "0")
|
||||
|
||||
for item in rest.split("."):
|
||||
match = _prefix_regex.search(item)
|
||||
if match:
|
||||
result.extend(match.groups())
|
||||
|
@ -654,6 +655,17 @@ def _version_split(version: str) -> List[str]:
|
|||
return result
|
||||
|
||||
|
||||
def _version_join(components: List[str]) -> str:
|
||||
"""Join split version components into a version string.
|
||||
|
||||
This function assumes the input came from :func:`_version_split`, where the
|
||||
first component must be the epoch (either empty or numeric), and all other
|
||||
components numeric.
|
||||
"""
|
||||
epoch, *rest = components
|
||||
return f"{epoch}!{'.'.join(rest)}"
|
||||
|
||||
|
||||
def _is_not_suffix(segment: str) -> bool:
|
||||
return not any(
|
||||
segment.startswith(prefix) for prefix in ("dev", "a", "b", "rc", "post")
|
||||
|
@ -675,7 +687,10 @@ def _pad_version(left: List[str], right: List[str]) -> Tuple[List[str], List[str
|
|||
left_split.insert(1, ["0"] * max(0, len(right_split[0]) - len(left_split[0])))
|
||||
right_split.insert(1, ["0"] * max(0, len(left_split[0]) - len(right_split[0])))
|
||||
|
||||
return (list(itertools.chain(*left_split)), list(itertools.chain(*right_split)))
|
||||
return (
|
||||
list(itertools.chain.from_iterable(left_split)),
|
||||
list(itertools.chain.from_iterable(right_split)),
|
||||
)
|
||||
|
||||
|
||||
class SpecifierSet(BaseSpecifier):
|
||||
|
@ -707,14 +722,8 @@ class SpecifierSet(BaseSpecifier):
|
|||
# strip each item to remove leading/trailing whitespace.
|
||||
split_specifiers = [s.strip() for s in specifiers.split(",") if s.strip()]
|
||||
|
||||
# Parsed each individual specifier, attempting first to make it a
|
||||
# Specifier.
|
||||
parsed: Set[Specifier] = set()
|
||||
for specifier in split_specifiers:
|
||||
parsed.add(Specifier(specifier))
|
||||
|
||||
# Turn our parsed specifiers into a frozen set and save them for later.
|
||||
self._specs = frozenset(parsed)
|
||||
# Make each individual specifier a Specifier and save in a frozen set for later.
|
||||
self._specs = frozenset(map(Specifier, split_specifiers))
|
||||
|
||||
# Store our prereleases value so we can use it later to determine if
|
||||
# we accept prereleases or not.
|
||||
|
|
|
@ -4,6 +4,8 @@
|
|||
|
||||
import logging
|
||||
import platform
|
||||
import re
|
||||
import struct
|
||||
import subprocess
|
||||
import sys
|
||||
import sysconfig
|
||||
|
@ -37,7 +39,7 @@ INTERPRETER_SHORT_NAMES: Dict[str, str] = {
|
|||
}
|
||||
|
||||
|
||||
_32_BIT_INTERPRETER = sys.maxsize <= 2**32
|
||||
_32_BIT_INTERPRETER = struct.calcsize("P") == 4
|
||||
|
||||
|
||||
class Tag:
|
||||
|
@ -123,20 +125,37 @@ def _normalize_string(string: str) -> str:
|
|||
return string.replace(".", "_").replace("-", "_").replace(" ", "_")
|
||||
|
||||
|
||||
def _abi3_applies(python_version: PythonVersion) -> bool:
|
||||
def _is_threaded_cpython(abis: List[str]) -> bool:
|
||||
"""
|
||||
Determine if the ABI corresponds to a threaded (`--disable-gil`) build.
|
||||
|
||||
The threaded builds are indicated by a "t" in the abiflags.
|
||||
"""
|
||||
if len(abis) == 0:
|
||||
return False
|
||||
# expect e.g., cp313
|
||||
m = re.match(r"cp\d+(.*)", abis[0])
|
||||
if not m:
|
||||
return False
|
||||
abiflags = m.group(1)
|
||||
return "t" in abiflags
|
||||
|
||||
|
||||
def _abi3_applies(python_version: PythonVersion, threading: bool) -> bool:
|
||||
"""
|
||||
Determine if the Python version supports abi3.
|
||||
|
||||
PEP 384 was first implemented in Python 3.2.
|
||||
PEP 384 was first implemented in Python 3.2. The threaded (`--disable-gil`)
|
||||
builds do not support abi3.
|
||||
"""
|
||||
return len(python_version) > 1 and tuple(python_version) >= (3, 2)
|
||||
return len(python_version) > 1 and tuple(python_version) >= (3, 2) and not threading
|
||||
|
||||
|
||||
def _cpython_abis(py_version: PythonVersion, warn: bool = False) -> List[str]:
|
||||
py_version = tuple(py_version) # To allow for version comparison.
|
||||
abis = []
|
||||
version = _version_nodot(py_version[:2])
|
||||
debug = pymalloc = ucs4 = ""
|
||||
threading = debug = pymalloc = ucs4 = ""
|
||||
with_debug = _get_config_var("Py_DEBUG", warn)
|
||||
has_refcount = hasattr(sys, "gettotalrefcount")
|
||||
# Windows doesn't set Py_DEBUG, so checking for support of debug-compiled
|
||||
|
@ -145,6 +164,8 @@ def _cpython_abis(py_version: PythonVersion, warn: bool = False) -> List[str]:
|
|||
has_ext = "_d.pyd" in EXTENSION_SUFFIXES
|
||||
if with_debug or (with_debug is None and (has_refcount or has_ext)):
|
||||
debug = "d"
|
||||
if py_version >= (3, 13) and _get_config_var("Py_GIL_DISABLED", warn):
|
||||
threading = "t"
|
||||
if py_version < (3, 8):
|
||||
with_pymalloc = _get_config_var("WITH_PYMALLOC", warn)
|
||||
if with_pymalloc or with_pymalloc is None:
|
||||
|
@ -158,13 +179,8 @@ def _cpython_abis(py_version: PythonVersion, warn: bool = False) -> List[str]:
|
|||
elif debug:
|
||||
# Debug builds can also load "normal" extension modules.
|
||||
# We can also assume no UCS-4 or pymalloc requirement.
|
||||
abis.append(f"cp{version}")
|
||||
abis.insert(
|
||||
0,
|
||||
"cp{version}{debug}{pymalloc}{ucs4}".format(
|
||||
version=version, debug=debug, pymalloc=pymalloc, ucs4=ucs4
|
||||
),
|
||||
)
|
||||
abis.append(f"cp{version}{threading}")
|
||||
abis.insert(0, f"cp{version}{threading}{debug}{pymalloc}{ucs4}")
|
||||
return abis
|
||||
|
||||
|
||||
|
@ -212,11 +228,14 @@ def cpython_tags(
|
|||
for abi in abis:
|
||||
for platform_ in platforms:
|
||||
yield Tag(interpreter, abi, platform_)
|
||||
if _abi3_applies(python_version):
|
||||
|
||||
threading = _is_threaded_cpython(abis)
|
||||
use_abi3 = _abi3_applies(python_version, threading)
|
||||
if use_abi3:
|
||||
yield from (Tag(interpreter, "abi3", platform_) for platform_ in platforms)
|
||||
yield from (Tag(interpreter, "none", platform_) for platform_ in platforms)
|
||||
|
||||
if _abi3_applies(python_version):
|
||||
if use_abi3:
|
||||
for minor_version in range(python_version[1] - 1, 1, -1):
|
||||
for platform_ in platforms:
|
||||
interpreter = "cp{version}".format(
|
||||
|
@ -406,7 +425,7 @@ def mac_platforms(
|
|||
check=True,
|
||||
env={"SYSTEM_VERSION_COMPAT": "0"},
|
||||
stdout=subprocess.PIPE,
|
||||
universal_newlines=True,
|
||||
text=True,
|
||||
).stdout
|
||||
version = cast("MacVersion", tuple(map(int, version_str.split(".")[:2])))
|
||||
else:
|
||||
|
@ -469,15 +488,21 @@ def mac_platforms(
|
|||
|
||||
def _linux_platforms(is_32bit: bool = _32_BIT_INTERPRETER) -> Iterator[str]:
|
||||
linux = _normalize_string(sysconfig.get_platform())
|
||||
if not linux.startswith("linux_"):
|
||||
# we should never be here, just yield the sysconfig one and return
|
||||
yield linux
|
||||
return
|
||||
if is_32bit:
|
||||
if linux == "linux_x86_64":
|
||||
linux = "linux_i686"
|
||||
elif linux == "linux_aarch64":
|
||||
linux = "linux_armv7l"
|
||||
linux = "linux_armv8l"
|
||||
_, arch = linux.split("_", 1)
|
||||
yield from _manylinux.platform_tags(linux, arch)
|
||||
yield from _musllinux.platform_tags(arch)
|
||||
yield linux
|
||||
archs = {"armv8l": ["armv8l", "armv7l"]}.get(arch, [arch])
|
||||
yield from _manylinux.platform_tags(archs)
|
||||
yield from _musllinux.platform_tags(archs)
|
||||
for arch in archs:
|
||||
yield f"linux_{arch}"
|
||||
|
||||
|
||||
def _generic_platforms() -> Iterator[str]:
|
||||
|
|
|
@ -12,6 +12,12 @@ BuildTag = Union[Tuple[()], Tuple[int, str]]
|
|||
NormalizedName = NewType("NormalizedName", str)
|
||||
|
||||
|
||||
class InvalidName(ValueError):
|
||||
"""
|
||||
An invalid distribution name; users should refer to the packaging user guide.
|
||||
"""
|
||||
|
||||
|
||||
class InvalidWheelFilename(ValueError):
|
||||
"""
|
||||
An invalid wheel filename was found, users should refer to PEP 427.
|
||||
|
@ -24,17 +30,28 @@ class InvalidSdistFilename(ValueError):
|
|||
"""
|
||||
|
||||
|
||||
# Core metadata spec for `Name`
|
||||
_validate_regex = re.compile(
|
||||
r"^([A-Z0-9]|[A-Z0-9][A-Z0-9._-]*[A-Z0-9])$", re.IGNORECASE
|
||||
)
|
||||
_canonicalize_regex = re.compile(r"[-_.]+")
|
||||
_normalized_regex = re.compile(r"^([a-z0-9]|[a-z0-9]([a-z0-9-](?!--))*[a-z0-9])$")
|
||||
# PEP 427: The build number must start with a digit.
|
||||
_build_tag_regex = re.compile(r"(\d+)(.*)")
|
||||
|
||||
|
||||
def canonicalize_name(name: str) -> NormalizedName:
|
||||
def canonicalize_name(name: str, *, validate: bool = False) -> NormalizedName:
|
||||
if validate and not _validate_regex.match(name):
|
||||
raise InvalidName(f"name is invalid: {name!r}")
|
||||
# This is taken from PEP 503.
|
||||
value = _canonicalize_regex.sub("-", name).lower()
|
||||
return cast(NormalizedName, value)
|
||||
|
||||
|
||||
def is_normalized_name(name: str) -> bool:
|
||||
return _normalized_regex.match(name) is not None
|
||||
|
||||
|
||||
def canonicalize_version(
|
||||
version: Union[Version, str], *, strip_trailing_zero: bool = True
|
||||
) -> str:
|
||||
|
@ -100,11 +117,18 @@ def parse_wheel_filename(
|
|||
|
||||
parts = filename.split("-", dashes - 2)
|
||||
name_part = parts[0]
|
||||
# See PEP 427 for the rules on escaping the project name
|
||||
# See PEP 427 for the rules on escaping the project name.
|
||||
if "__" in name_part or re.match(r"^[\w\d._]*$", name_part, re.UNICODE) is None:
|
||||
raise InvalidWheelFilename(f"Invalid project name: {filename}")
|
||||
name = canonicalize_name(name_part)
|
||||
version = Version(parts[1])
|
||||
|
||||
try:
|
||||
version = Version(parts[1])
|
||||
except InvalidVersion as e:
|
||||
raise InvalidWheelFilename(
|
||||
f"Invalid wheel filename (invalid version): {filename}"
|
||||
) from e
|
||||
|
||||
if dashes == 5:
|
||||
build_part = parts[2]
|
||||
build_match = _build_tag_regex.match(build_part)
|
||||
|
@ -137,5 +161,12 @@ def parse_sdist_filename(filename: str) -> Tuple[NormalizedName, Version]:
|
|||
raise InvalidSdistFilename(f"Invalid sdist filename: {filename}")
|
||||
|
||||
name = canonicalize_name(name_part)
|
||||
version = Version(version_part)
|
||||
|
||||
try:
|
||||
version = Version(version_part)
|
||||
except InvalidVersion as e:
|
||||
raise InvalidSdistFilename(
|
||||
f"Invalid sdist filename (invalid version): {filename}"
|
||||
) from e
|
||||
|
||||
return (name, version)
|
||||
|
|
|
@ -7,37 +7,39 @@
|
|||
from packaging.version import parse, Version
|
||||
"""
|
||||
|
||||
import collections
|
||||
import itertools
|
||||
import re
|
||||
from typing import Any, Callable, Optional, SupportsInt, Tuple, Union
|
||||
from typing import Any, Callable, NamedTuple, Optional, SupportsInt, Tuple, Union
|
||||
|
||||
from ._structures import Infinity, InfinityType, NegativeInfinity, NegativeInfinityType
|
||||
|
||||
__all__ = ["VERSION_PATTERN", "parse", "Version", "InvalidVersion"]
|
||||
|
||||
InfiniteTypes = Union[InfinityType, NegativeInfinityType]
|
||||
PrePostDevType = Union[InfiniteTypes, Tuple[str, int]]
|
||||
SubLocalType = Union[InfiniteTypes, int, str]
|
||||
LocalType = Union[
|
||||
LocalType = Tuple[Union[int, str], ...]
|
||||
|
||||
CmpPrePostDevType = Union[InfinityType, NegativeInfinityType, Tuple[str, int]]
|
||||
CmpLocalType = Union[
|
||||
NegativeInfinityType,
|
||||
Tuple[
|
||||
Union[
|
||||
SubLocalType,
|
||||
Tuple[SubLocalType, str],
|
||||
Tuple[NegativeInfinityType, SubLocalType],
|
||||
],
|
||||
...,
|
||||
],
|
||||
Tuple[Union[Tuple[int, str], Tuple[NegativeInfinityType, Union[int, str]]], ...],
|
||||
]
|
||||
CmpKey = Tuple[
|
||||
int, Tuple[int, ...], PrePostDevType, PrePostDevType, PrePostDevType, LocalType
|
||||
int,
|
||||
Tuple[int, ...],
|
||||
CmpPrePostDevType,
|
||||
CmpPrePostDevType,
|
||||
CmpPrePostDevType,
|
||||
CmpLocalType,
|
||||
]
|
||||
VersionComparisonMethod = Callable[[CmpKey, CmpKey], bool]
|
||||
|
||||
_Version = collections.namedtuple(
|
||||
"_Version", ["epoch", "release", "dev", "pre", "post", "local"]
|
||||
)
|
||||
|
||||
class _Version(NamedTuple):
|
||||
epoch: int
|
||||
release: Tuple[int, ...]
|
||||
dev: Optional[Tuple[str, int]]
|
||||
pre: Optional[Tuple[str, int]]
|
||||
post: Optional[Tuple[str, int]]
|
||||
local: Optional[LocalType]
|
||||
|
||||
|
||||
def parse(version: str) -> "Version":
|
||||
|
@ -117,7 +119,7 @@ _VERSION_PATTERN = r"""
|
|||
(?P<release>[0-9]+(?:\.[0-9]+)*) # release segment
|
||||
(?P<pre> # pre-release
|
||||
[-_\.]?
|
||||
(?P<pre_l>(a|b|c|rc|alpha|beta|pre|preview))
|
||||
(?P<pre_l>alpha|a|beta|b|preview|pre|c|rc)
|
||||
[-_\.]?
|
||||
(?P<pre_n>[0-9]+)?
|
||||
)?
|
||||
|
@ -269,8 +271,7 @@ class Version(_BaseVersion):
|
|||
>>> Version("1!2.0.0").epoch
|
||||
1
|
||||
"""
|
||||
_epoch: int = self._version.epoch
|
||||
return _epoch
|
||||
return self._version.epoch
|
||||
|
||||
@property
|
||||
def release(self) -> Tuple[int, ...]:
|
||||
|
@ -286,8 +287,7 @@ class Version(_BaseVersion):
|
|||
Includes trailing zeroes but not the epoch or any pre-release / development /
|
||||
post-release suffixes.
|
||||
"""
|
||||
_release: Tuple[int, ...] = self._version.release
|
||||
return _release
|
||||
return self._version.release
|
||||
|
||||
@property
|
||||
def pre(self) -> Optional[Tuple[str, int]]:
|
||||
|
@ -302,8 +302,7 @@ class Version(_BaseVersion):
|
|||
>>> Version("1.2.3rc1").pre
|
||||
('rc', 1)
|
||||
"""
|
||||
_pre: Optional[Tuple[str, int]] = self._version.pre
|
||||
return _pre
|
||||
return self._version.pre
|
||||
|
||||
@property
|
||||
def post(self) -> Optional[int]:
|
||||
|
@ -451,7 +450,7 @@ class Version(_BaseVersion):
|
|||
|
||||
|
||||
def _parse_letter_version(
|
||||
letter: str, number: Union[str, bytes, SupportsInt]
|
||||
letter: Optional[str], number: Union[str, bytes, SupportsInt, None]
|
||||
) -> Optional[Tuple[str, int]]:
|
||||
|
||||
if letter:
|
||||
|
@ -489,7 +488,7 @@ def _parse_letter_version(
|
|||
_local_version_separators = re.compile(r"[\._-]")
|
||||
|
||||
|
||||
def _parse_local_version(local: str) -> Optional[LocalType]:
|
||||
def _parse_local_version(local: Optional[str]) -> Optional[LocalType]:
|
||||
"""
|
||||
Takes a string like abc.1.twelve and turns it into ("abc", 1, "twelve").
|
||||
"""
|
||||
|
@ -507,7 +506,7 @@ def _cmpkey(
|
|||
pre: Optional[Tuple[str, int]],
|
||||
post: Optional[Tuple[str, int]],
|
||||
dev: Optional[Tuple[str, int]],
|
||||
local: Optional[Tuple[SubLocalType]],
|
||||
local: Optional[LocalType],
|
||||
) -> CmpKey:
|
||||
|
||||
# When we compare a release version, we want to compare it with all of the
|
||||
|
@ -524,7 +523,7 @@ def _cmpkey(
|
|||
# if there is not a pre or a post segment. If we have one of those then
|
||||
# the normal sorting rules will handle this case correctly.
|
||||
if pre is None and post is None and dev is not None:
|
||||
_pre: PrePostDevType = NegativeInfinity
|
||||
_pre: CmpPrePostDevType = NegativeInfinity
|
||||
# Versions without a pre-release (except as noted above) should sort after
|
||||
# those with one.
|
||||
elif pre is None:
|
||||
|
@ -534,21 +533,21 @@ def _cmpkey(
|
|||
|
||||
# Versions without a post segment should sort before those with one.
|
||||
if post is None:
|
||||
_post: PrePostDevType = NegativeInfinity
|
||||
_post: CmpPrePostDevType = NegativeInfinity
|
||||
|
||||
else:
|
||||
_post = post
|
||||
|
||||
# Versions without a development segment should sort after those with one.
|
||||
if dev is None:
|
||||
_dev: PrePostDevType = Infinity
|
||||
_dev: CmpPrePostDevType = Infinity
|
||||
|
||||
else:
|
||||
_dev = dev
|
||||
|
||||
if local is None:
|
||||
# Versions without a local segment should sort before those with one.
|
||||
_local: LocalType = NegativeInfinity
|
||||
_local: CmpLocalType = NegativeInfinity
|
||||
else:
|
||||
# Versions with a local segment need that segment parsed to implement
|
||||
# the sorting rules in PEP440.
|
||||
|
|
|
@ -25,7 +25,7 @@ IPy==1.01
|
|||
Mako==1.3.2
|
||||
MarkupSafe==2.1.3
|
||||
musicbrainzngs==0.7.1
|
||||
packaging==23.1
|
||||
packaging==24.0
|
||||
paho-mqtt==1.6.1
|
||||
platformdirs==4.2.0
|
||||
plexapi==4.15.10
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue