update setuptools libs

This commit is contained in:
Clinton Hall 2024-02-28 10:48:19 +13:00
commit fb248cf499
110 changed files with 13502 additions and 20981 deletions

View file

@ -1,21 +1,15 @@
# This file is dual licensed under the terms of the Apache License, Version # This file is dual licensed under the terms of the Apache License, Version
# 2.0, and the BSD License. See the LICENSE file in the root of this repository # 2.0, and the BSD License. See the LICENSE file in the root of this repository
# for complete details. # for complete details.
from __future__ import absolute_import, division, print_function
__all__ = [
"__title__", "__summary__", "__uri__", "__version__", "__author__",
"__email__", "__license__", "__copyright__",
]
__title__ = "packaging" __title__ = "packaging"
__summary__ = "Core utilities for Python packages" __summary__ = "Core utilities for Python packages"
__uri__ = "https://github.com/pypa/packaging" __uri__ = "https://github.com/pypa/packaging"
__version__ = "16.8" __version__ = "23.2"
__author__ = "Donald Stufft and individual contributors" __author__ = "Donald Stufft and individual contributors"
__email__ = "donald@stufft.io" __email__ = "donald@stufft.io"
__license__ = "BSD or Apache License, Version 2.0" __license__ = "BSD-2-Clause or Apache-2.0"
__copyright__ = "Copyright 2014-2016 %s" % __author__ __copyright__ = "2014 %s" % __author__

View file

@ -0,0 +1,108 @@
"""
ELF file parser.
This provides a class ``ELFFile`` that parses an ELF executable in a similar
interface to ``ZipFile``. Only the read interface is implemented.
Based on: https://gist.github.com/lyssdod/f51579ae8d93c8657a5564aefc2ffbca
ELF header: https://refspecs.linuxfoundation.org/elf/gabi4+/ch4.eheader.html
"""
import enum
import os
import struct
from typing import IO, Optional, Tuple
class ELFInvalid(ValueError):
pass
class EIClass(enum.IntEnum):
C32 = 1
C64 = 2
class EIData(enum.IntEnum):
Lsb = 1
Msb = 2
class EMachine(enum.IntEnum):
I386 = 3
S390 = 22
Arm = 40
X8664 = 62
AArc64 = 183
class ELFFile:
"""
Representation of an ELF executable.
"""
def __init__(self, f: IO[bytes]) -> None:
self._f = f
try:
ident = self._read("16B")
except struct.error:
raise ELFInvalid("unable to parse identification")
magic = bytes(ident[:4])
if magic != b"\x7fELF":
raise ELFInvalid(f"invalid magic: {magic!r}")
self.capacity = ident[4] # Format for program header (bitness).
self.encoding = ident[5] # Data structure encoding (endianness).
try:
# e_fmt: Format for program header.
# p_fmt: Format for section header.
# p_idx: Indexes to find p_type, p_offset, and p_filesz.
e_fmt, self._p_fmt, self._p_idx = {
(1, 1): ("<HHIIIIIHHH", "<IIIIIIII", (0, 1, 4)), # 32-bit LSB.
(1, 2): (">HHIIIIIHHH", ">IIIIIIII", (0, 1, 4)), # 32-bit MSB.
(2, 1): ("<HHIQQQIHHH", "<IIQQQQQQ", (0, 2, 5)), # 64-bit LSB.
(2, 2): (">HHIQQQIHHH", ">IIQQQQQQ", (0, 2, 5)), # 64-bit MSB.
}[(self.capacity, self.encoding)]
except KeyError:
raise ELFInvalid(
f"unrecognized capacity ({self.capacity}) or "
f"encoding ({self.encoding})"
)
try:
(
_,
self.machine, # Architecture type.
_,
_,
self._e_phoff, # Offset of program header.
_,
self.flags, # Processor-specific flags.
_,
self._e_phentsize, # Size of section.
self._e_phnum, # Number of sections.
) = self._read(e_fmt)
except struct.error as e:
raise ELFInvalid("unable to parse machine and section information") from e
def _read(self, fmt: str) -> Tuple[int, ...]:
return struct.unpack(fmt, self._f.read(struct.calcsize(fmt)))
@property
def interpreter(self) -> Optional[str]:
"""
The path recorded in the ``PT_INTERP`` section header.
"""
for index in range(self._e_phnum):
self._f.seek(self._e_phoff + self._e_phentsize * index)
try:
data = self._read(self._p_fmt)
except struct.error:
continue
if data[self._p_idx[0]] != 3: # Not PT_INTERP.
continue
self._f.seek(data[self._p_idx[1]])
return os.fsdecode(self._f.read(data[self._p_idx[2]])).strip("\0")
return None

View file

@ -0,0 +1,252 @@
import collections
import contextlib
import functools
import os
import re
import sys
import warnings
from typing import Dict, Generator, Iterator, NamedTuple, Optional, Sequence, Tuple
from ._elffile import EIClass, EIData, ELFFile, EMachine
EF_ARM_ABIMASK = 0xFF000000
EF_ARM_ABI_VER5 = 0x05000000
EF_ARM_ABI_FLOAT_HARD = 0x00000400
# `os.PathLike` not a generic type until Python 3.9, so sticking with `str`
# as the type for `path` until then.
@contextlib.contextmanager
def _parse_elf(path: str) -> Generator[Optional[ELFFile], None, None]:
try:
with open(path, "rb") as f:
yield ELFFile(f)
except (OSError, TypeError, ValueError):
yield None
def _is_linux_armhf(executable: str) -> bool:
# hard-float ABI can be detected from the ELF header of the running
# process
# https://static.docs.arm.com/ihi0044/g/aaelf32.pdf
with _parse_elf(executable) as f:
return (
f is not None
and f.capacity == EIClass.C32
and f.encoding == EIData.Lsb
and f.machine == EMachine.Arm
and f.flags & EF_ARM_ABIMASK == EF_ARM_ABI_VER5
and f.flags & EF_ARM_ABI_FLOAT_HARD == EF_ARM_ABI_FLOAT_HARD
)
def _is_linux_i686(executable: str) -> bool:
with _parse_elf(executable) as f:
return (
f is not None
and f.capacity == EIClass.C32
and f.encoding == EIData.Lsb
and f.machine == EMachine.I386
)
def _have_compatible_abi(executable: str, archs: Sequence[str]) -> bool:
if "armv7l" in archs:
return _is_linux_armhf(executable)
if "i686" in archs:
return _is_linux_i686(executable)
allowed_archs = {"x86_64", "aarch64", "ppc64", "ppc64le", "s390x", "loongarch64"}
return any(arch in allowed_archs for arch in archs)
# If glibc ever changes its major version, we need to know what the last
# minor version was, so we can build the complete list of all versions.
# For now, guess what the highest minor version might be, assume it will
# be 50 for testing. Once this actually happens, update the dictionary
# with the actual value.
_LAST_GLIBC_MINOR: Dict[int, int] = collections.defaultdict(lambda: 50)
class _GLibCVersion(NamedTuple):
major: int
minor: int
def _glibc_version_string_confstr() -> Optional[str]:
"""
Primary implementation of glibc_version_string using os.confstr.
"""
# os.confstr is quite a bit faster than ctypes.DLL. It's also less likely
# to be broken or missing. This strategy is used in the standard library
# platform module.
# https://github.com/python/cpython/blob/fcf1d003bf4f0100c/Lib/platform.py#L175-L183
try:
# Should be a string like "glibc 2.17".
version_string: str = getattr(os, "confstr")("CS_GNU_LIBC_VERSION")
assert version_string is not None
_, version = version_string.rsplit()
except (AssertionError, AttributeError, OSError, ValueError):
# os.confstr() or CS_GNU_LIBC_VERSION not available (or a bad value)...
return None
return version
def _glibc_version_string_ctypes() -> Optional[str]:
"""
Fallback implementation of glibc_version_string using ctypes.
"""
try:
import ctypes
except ImportError:
return None
# ctypes.CDLL(None) internally calls dlopen(NULL), and as the dlopen
# manpage says, "If filename is NULL, then the returned handle is for the
# main program". This way we can let the linker do the work to figure out
# which libc our process is actually using.
#
# We must also handle the special case where the executable is not a
# dynamically linked executable. This can occur when using musl libc,
# for example. In this situation, dlopen() will error, leading to an
# OSError. Interestingly, at least in the case of musl, there is no
# errno set on the OSError. The single string argument used to construct
# OSError comes from libc itself and is therefore not portable to
# hard code here. In any case, failure to call dlopen() means we
# can proceed, so we bail on our attempt.
try:
process_namespace = ctypes.CDLL(None)
except OSError:
return None
try:
gnu_get_libc_version = process_namespace.gnu_get_libc_version
except AttributeError:
# Symbol doesn't exist -> therefore, we are not linked to
# glibc.
return None
# Call gnu_get_libc_version, which returns a string like "2.5"
gnu_get_libc_version.restype = ctypes.c_char_p
version_str: str = gnu_get_libc_version()
# py2 / py3 compatibility:
if not isinstance(version_str, str):
version_str = version_str.decode("ascii")
return version_str
def _glibc_version_string() -> Optional[str]:
"""Returns glibc version string, or None if not using glibc."""
return _glibc_version_string_confstr() or _glibc_version_string_ctypes()
def _parse_glibc_version(version_str: str) -> Tuple[int, int]:
"""Parse glibc version.
We use a regexp instead of str.split because we want to discard any
random junk that might come after the minor version -- this might happen
in patched/forked versions of glibc (e.g. Linaro's version of glibc
uses version strings like "2.20-2014.11"). See gh-3588.
"""
m = re.match(r"(?P<major>[0-9]+)\.(?P<minor>[0-9]+)", version_str)
if not m:
warnings.warn(
f"Expected glibc version with 2 components major.minor,"
f" got: {version_str}",
RuntimeWarning,
)
return -1, -1
return int(m.group("major")), int(m.group("minor"))
@functools.lru_cache()
def _get_glibc_version() -> Tuple[int, int]:
version_str = _glibc_version_string()
if version_str is None:
return (-1, -1)
return _parse_glibc_version(version_str)
# From PEP 513, PEP 600
def _is_compatible(arch: str, version: _GLibCVersion) -> bool:
sys_glibc = _get_glibc_version()
if sys_glibc < version:
return False
# Check for presence of _manylinux module.
try:
import _manylinux # noqa
except ImportError:
return True
if hasattr(_manylinux, "manylinux_compatible"):
result = _manylinux.manylinux_compatible(version[0], version[1], arch)
if result is not None:
return bool(result)
return True
if version == _GLibCVersion(2, 5):
if hasattr(_manylinux, "manylinux1_compatible"):
return bool(_manylinux.manylinux1_compatible)
if version == _GLibCVersion(2, 12):
if hasattr(_manylinux, "manylinux2010_compatible"):
return bool(_manylinux.manylinux2010_compatible)
if version == _GLibCVersion(2, 17):
if hasattr(_manylinux, "manylinux2014_compatible"):
return bool(_manylinux.manylinux2014_compatible)
return True
_LEGACY_MANYLINUX_MAP = {
# CentOS 7 w/ glibc 2.17 (PEP 599)
(2, 17): "manylinux2014",
# CentOS 6 w/ glibc 2.12 (PEP 571)
(2, 12): "manylinux2010",
# CentOS 5 w/ glibc 2.5 (PEP 513)
(2, 5): "manylinux1",
}
def platform_tags(archs: Sequence[str]) -> Iterator[str]:
"""Generate manylinux tags compatible to the current platform.
:param archs: Sequence of compatible architectures.
The first one shall be the closest to the actual architecture and be the part of
platform tag after the ``linux_`` prefix, e.g. ``x86_64``.
The ``linux_`` prefix is assumed as a prerequisite for the current platform to
be manylinux-compatible.
:returns: An iterator of compatible manylinux tags.
"""
if not _have_compatible_abi(sys.executable, archs):
return
# Oldest glibc to be supported regardless of architecture is (2, 17).
too_old_glibc2 = _GLibCVersion(2, 16)
if set(archs) & {"x86_64", "i686"}:
# On x86/i686 also oldest glibc to be supported is (2, 5).
too_old_glibc2 = _GLibCVersion(2, 4)
current_glibc = _GLibCVersion(*_get_glibc_version())
glibc_max_list = [current_glibc]
# We can assume compatibility across glibc major versions.
# https://sourceware.org/bugzilla/show_bug.cgi?id=24636
#
# Build a list of maximum glibc versions so that we can
# output the canonical list of all glibc from current_glibc
# down to too_old_glibc2, including all intermediary versions.
for glibc_major in range(current_glibc.major - 1, 1, -1):
glibc_minor = _LAST_GLIBC_MINOR[glibc_major]
glibc_max_list.append(_GLibCVersion(glibc_major, glibc_minor))
for arch in archs:
for glibc_max in glibc_max_list:
if glibc_max.major == too_old_glibc2.major:
min_minor = too_old_glibc2.minor
else:
# For other glibc major versions oldest supported is (x, 0).
min_minor = -1
for glibc_minor in range(glibc_max.minor, min_minor, -1):
glibc_version = _GLibCVersion(glibc_max.major, glibc_minor)
tag = "manylinux_{}_{}".format(*glibc_version)
if _is_compatible(arch, glibc_version):
yield f"{tag}_{arch}"
# Handle the legacy manylinux1, manylinux2010, manylinux2014 tags.
if glibc_version in _LEGACY_MANYLINUX_MAP:
legacy_tag = _LEGACY_MANYLINUX_MAP[glibc_version]
if _is_compatible(arch, glibc_version):
yield f"{legacy_tag}_{arch}"

View file

@ -0,0 +1,83 @@
"""PEP 656 support.
This module implements logic to detect if the currently running Python is
linked against musl, and what musl version is used.
"""
import functools
import re
import subprocess
import sys
from typing import Iterator, NamedTuple, Optional, Sequence
from ._elffile import ELFFile
class _MuslVersion(NamedTuple):
major: int
minor: int
def _parse_musl_version(output: str) -> Optional[_MuslVersion]:
lines = [n for n in (n.strip() for n in output.splitlines()) if n]
if len(lines) < 2 or lines[0][:4] != "musl":
return None
m = re.match(r"Version (\d+)\.(\d+)", lines[1])
if not m:
return None
return _MuslVersion(major=int(m.group(1)), minor=int(m.group(2)))
@functools.lru_cache()
def _get_musl_version(executable: str) -> Optional[_MuslVersion]:
"""Detect currently-running musl runtime version.
This is done by checking the specified executable's dynamic linking
information, and invoking the loader to parse its output for a version
string. If the loader is musl, the output would be something like::
musl libc (x86_64)
Version 1.2.2
Dynamic Program Loader
"""
try:
with open(executable, "rb") as f:
ld = ELFFile(f).interpreter
except (OSError, TypeError, ValueError):
return None
if ld is None or "musl" not in ld:
return None
proc = subprocess.run([ld], stderr=subprocess.PIPE, text=True)
return _parse_musl_version(proc.stderr)
def platform_tags(archs: Sequence[str]) -> Iterator[str]:
"""Generate musllinux tags compatible to the current platform.
:param archs: Sequence of compatible architectures.
The first one shall be the closest to the actual architecture and be the part of
platform tag after the ``linux_`` prefix, e.g. ``x86_64``.
The ``linux_`` prefix is assumed as a prerequisite for the current platform to
be musllinux-compatible.
:returns: An iterator of compatible musllinux tags.
"""
sys_musl = _get_musl_version(sys.executable)
if sys_musl is None: # Python not dynamically linked against musl.
return
for arch in archs:
for minor in range(sys_musl.minor, -1, -1):
yield f"musllinux_{sys_musl.major}_{minor}_{arch}"
if __name__ == "__main__": # pragma: no cover
import sysconfig
plat = sysconfig.get_platform()
assert plat.startswith("linux-"), "not linux"
print("plat:", plat)
print("musl:", _get_musl_version(sys.executable))
print("tags:", end=" ")
for t in platform_tags(re.sub(r"[.-]", "_", plat.split("-", 1)[-1])):
print(t, end="\n ")

View file

@ -0,0 +1,359 @@
"""Handwritten parser of dependency specifiers.
The docstring for each __parse_* function contains ENBF-inspired grammar representing
the implementation.
"""
import ast
from typing import Any, List, NamedTuple, Optional, Tuple, Union
from ._tokenizer import DEFAULT_RULES, Tokenizer
class Node:
def __init__(self, value: str) -> None:
self.value = value
def __str__(self) -> str:
return self.value
def __repr__(self) -> str:
return f"<{self.__class__.__name__}('{self}')>"
def serialize(self) -> str:
raise NotImplementedError
class Variable(Node):
def serialize(self) -> str:
return str(self)
class Value(Node):
def serialize(self) -> str:
return f'"{self}"'
class Op(Node):
def serialize(self) -> str:
return str(self)
MarkerVar = Union[Variable, Value]
MarkerItem = Tuple[MarkerVar, Op, MarkerVar]
# MarkerAtom = Union[MarkerItem, List["MarkerAtom"]]
# MarkerList = List[Union["MarkerList", MarkerAtom, str]]
# mypy does not support recursive type definition
# https://github.com/python/mypy/issues/731
MarkerAtom = Any
MarkerList = List[Any]
class ParsedRequirement(NamedTuple):
name: str
url: str
extras: List[str]
specifier: str
marker: Optional[MarkerList]
# --------------------------------------------------------------------------------------
# Recursive descent parser for dependency specifier
# --------------------------------------------------------------------------------------
def parse_requirement(source: str) -> ParsedRequirement:
return _parse_requirement(Tokenizer(source, rules=DEFAULT_RULES))
def _parse_requirement(tokenizer: Tokenizer) -> ParsedRequirement:
"""
requirement = WS? IDENTIFIER WS? extras WS? requirement_details
"""
tokenizer.consume("WS")
name_token = tokenizer.expect(
"IDENTIFIER", expected="package name at the start of dependency specifier"
)
name = name_token.text
tokenizer.consume("WS")
extras = _parse_extras(tokenizer)
tokenizer.consume("WS")
url, specifier, marker = _parse_requirement_details(tokenizer)
tokenizer.expect("END", expected="end of dependency specifier")
return ParsedRequirement(name, url, extras, specifier, marker)
def _parse_requirement_details(
tokenizer: Tokenizer,
) -> Tuple[str, str, Optional[MarkerList]]:
"""
requirement_details = AT URL (WS requirement_marker?)?
| specifier WS? (requirement_marker)?
"""
specifier = ""
url = ""
marker = None
if tokenizer.check("AT"):
tokenizer.read()
tokenizer.consume("WS")
url_start = tokenizer.position
url = tokenizer.expect("URL", expected="URL after @").text
if tokenizer.check("END", peek=True):
return (url, specifier, marker)
tokenizer.expect("WS", expected="whitespace after URL")
# The input might end after whitespace.
if tokenizer.check("END", peek=True):
return (url, specifier, marker)
marker = _parse_requirement_marker(
tokenizer, span_start=url_start, after="URL and whitespace"
)
else:
specifier_start = tokenizer.position
specifier = _parse_specifier(tokenizer)
tokenizer.consume("WS")
if tokenizer.check("END", peek=True):
return (url, specifier, marker)
marker = _parse_requirement_marker(
tokenizer,
span_start=specifier_start,
after=(
"version specifier"
if specifier
else "name and no valid version specifier"
),
)
return (url, specifier, marker)
def _parse_requirement_marker(
tokenizer: Tokenizer, *, span_start: int, after: str
) -> MarkerList:
"""
requirement_marker = SEMICOLON marker WS?
"""
if not tokenizer.check("SEMICOLON"):
tokenizer.raise_syntax_error(
f"Expected end or semicolon (after {after})",
span_start=span_start,
)
tokenizer.read()
marker = _parse_marker(tokenizer)
tokenizer.consume("WS")
return marker
def _parse_extras(tokenizer: Tokenizer) -> List[str]:
"""
extras = (LEFT_BRACKET wsp* extras_list? wsp* RIGHT_BRACKET)?
"""
if not tokenizer.check("LEFT_BRACKET", peek=True):
return []
with tokenizer.enclosing_tokens(
"LEFT_BRACKET",
"RIGHT_BRACKET",
around="extras",
):
tokenizer.consume("WS")
extras = _parse_extras_list(tokenizer)
tokenizer.consume("WS")
return extras
def _parse_extras_list(tokenizer: Tokenizer) -> List[str]:
"""
extras_list = identifier (wsp* ',' wsp* identifier)*
"""
extras: List[str] = []
if not tokenizer.check("IDENTIFIER"):
return extras
extras.append(tokenizer.read().text)
while True:
tokenizer.consume("WS")
if tokenizer.check("IDENTIFIER", peek=True):
tokenizer.raise_syntax_error("Expected comma between extra names")
elif not tokenizer.check("COMMA"):
break
tokenizer.read()
tokenizer.consume("WS")
extra_token = tokenizer.expect("IDENTIFIER", expected="extra name after comma")
extras.append(extra_token.text)
return extras
def _parse_specifier(tokenizer: Tokenizer) -> str:
"""
specifier = LEFT_PARENTHESIS WS? version_many WS? RIGHT_PARENTHESIS
| WS? version_many WS?
"""
with tokenizer.enclosing_tokens(
"LEFT_PARENTHESIS",
"RIGHT_PARENTHESIS",
around="version specifier",
):
tokenizer.consume("WS")
parsed_specifiers = _parse_version_many(tokenizer)
tokenizer.consume("WS")
return parsed_specifiers
def _parse_version_many(tokenizer: Tokenizer) -> str:
"""
version_many = (SPECIFIER (WS? COMMA WS? SPECIFIER)*)?
"""
parsed_specifiers = ""
while tokenizer.check("SPECIFIER"):
span_start = tokenizer.position
parsed_specifiers += tokenizer.read().text
if tokenizer.check("VERSION_PREFIX_TRAIL", peek=True):
tokenizer.raise_syntax_error(
".* suffix can only be used with `==` or `!=` operators",
span_start=span_start,
span_end=tokenizer.position + 1,
)
if tokenizer.check("VERSION_LOCAL_LABEL_TRAIL", peek=True):
tokenizer.raise_syntax_error(
"Local version label can only be used with `==` or `!=` operators",
span_start=span_start,
span_end=tokenizer.position,
)
tokenizer.consume("WS")
if not tokenizer.check("COMMA"):
break
parsed_specifiers += tokenizer.read().text
tokenizer.consume("WS")
return parsed_specifiers
# --------------------------------------------------------------------------------------
# Recursive descent parser for marker expression
# --------------------------------------------------------------------------------------
def parse_marker(source: str) -> MarkerList:
return _parse_full_marker(Tokenizer(source, rules=DEFAULT_RULES))
def _parse_full_marker(tokenizer: Tokenizer) -> MarkerList:
retval = _parse_marker(tokenizer)
tokenizer.expect("END", expected="end of marker expression")
return retval
def _parse_marker(tokenizer: Tokenizer) -> MarkerList:
"""
marker = marker_atom (BOOLOP marker_atom)+
"""
expression = [_parse_marker_atom(tokenizer)]
while tokenizer.check("BOOLOP"):
token = tokenizer.read()
expr_right = _parse_marker_atom(tokenizer)
expression.extend((token.text, expr_right))
return expression
def _parse_marker_atom(tokenizer: Tokenizer) -> MarkerAtom:
"""
marker_atom = WS? LEFT_PARENTHESIS WS? marker WS? RIGHT_PARENTHESIS WS?
| WS? marker_item WS?
"""
tokenizer.consume("WS")
if tokenizer.check("LEFT_PARENTHESIS", peek=True):
with tokenizer.enclosing_tokens(
"LEFT_PARENTHESIS",
"RIGHT_PARENTHESIS",
around="marker expression",
):
tokenizer.consume("WS")
marker: MarkerAtom = _parse_marker(tokenizer)
tokenizer.consume("WS")
else:
marker = _parse_marker_item(tokenizer)
tokenizer.consume("WS")
return marker
def _parse_marker_item(tokenizer: Tokenizer) -> MarkerItem:
"""
marker_item = WS? marker_var WS? marker_op WS? marker_var WS?
"""
tokenizer.consume("WS")
marker_var_left = _parse_marker_var(tokenizer)
tokenizer.consume("WS")
marker_op = _parse_marker_op(tokenizer)
tokenizer.consume("WS")
marker_var_right = _parse_marker_var(tokenizer)
tokenizer.consume("WS")
return (marker_var_left, marker_op, marker_var_right)
def _parse_marker_var(tokenizer: Tokenizer) -> MarkerVar:
"""
marker_var = VARIABLE | QUOTED_STRING
"""
if tokenizer.check("VARIABLE"):
return process_env_var(tokenizer.read().text.replace(".", "_"))
elif tokenizer.check("QUOTED_STRING"):
return process_python_str(tokenizer.read().text)
else:
tokenizer.raise_syntax_error(
message="Expected a marker variable or quoted string"
)
def process_env_var(env_var: str) -> Variable:
if (
env_var == "platform_python_implementation"
or env_var == "python_implementation"
):
return Variable("platform_python_implementation")
else:
return Variable(env_var)
def process_python_str(python_str: str) -> Value:
value = ast.literal_eval(python_str)
return Value(str(value))
def _parse_marker_op(tokenizer: Tokenizer) -> Op:
"""
marker_op = IN | NOT IN | OP
"""
if tokenizer.check("IN"):
tokenizer.read()
return Op("in")
elif tokenizer.check("NOT"):
tokenizer.read()
tokenizer.expect("WS", expected="whitespace after 'not'")
tokenizer.expect("IN", expected="'in' after 'not'")
return Op("not in")
elif tokenizer.check("OP"):
return Op(tokenizer.read().text)
else:
return tokenizer.raise_syntax_error(
"Expected marker operator, one of "
"<=, <, !=, ==, >=, >, ~=, ===, in, not in"
)

View file

@ -0,0 +1,61 @@
# This file is dual licensed under the terms of the Apache License, Version
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
# for complete details.
class InfinityType:
def __repr__(self) -> str:
return "Infinity"
def __hash__(self) -> int:
return hash(repr(self))
def __lt__(self, other: object) -> bool:
return False
def __le__(self, other: object) -> bool:
return False
def __eq__(self, other: object) -> bool:
return isinstance(other, self.__class__)
def __gt__(self, other: object) -> bool:
return True
def __ge__(self, other: object) -> bool:
return True
def __neg__(self: object) -> "NegativeInfinityType":
return NegativeInfinity
Infinity = InfinityType()
class NegativeInfinityType:
def __repr__(self) -> str:
return "-Infinity"
def __hash__(self) -> int:
return hash(repr(self))
def __lt__(self, other: object) -> bool:
return True
def __le__(self, other: object) -> bool:
return True
def __eq__(self, other: object) -> bool:
return isinstance(other, self.__class__)
def __gt__(self, other: object) -> bool:
return False
def __ge__(self, other: object) -> bool:
return False
def __neg__(self: object) -> InfinityType:
return Infinity
NegativeInfinity = NegativeInfinityType()

View file

@ -0,0 +1,192 @@
import contextlib
import re
from dataclasses import dataclass
from typing import Dict, Iterator, NoReturn, Optional, Tuple, Union
from .specifiers import Specifier
@dataclass
class Token:
name: str
text: str
position: int
class ParserSyntaxError(Exception):
"""The provided source text could not be parsed correctly."""
def __init__(
self,
message: str,
*,
source: str,
span: Tuple[int, int],
) -> None:
self.span = span
self.message = message
self.source = source
super().__init__()
def __str__(self) -> str:
marker = " " * self.span[0] + "~" * (self.span[1] - self.span[0]) + "^"
return "\n ".join([self.message, self.source, marker])
DEFAULT_RULES: "Dict[str, Union[str, re.Pattern[str]]]" = {
"LEFT_PARENTHESIS": r"\(",
"RIGHT_PARENTHESIS": r"\)",
"LEFT_BRACKET": r"\[",
"RIGHT_BRACKET": r"\]",
"SEMICOLON": r";",
"COMMA": r",",
"QUOTED_STRING": re.compile(
r"""
(
('[^']*')
|
("[^"]*")
)
""",
re.VERBOSE,
),
"OP": r"(===|==|~=|!=|<=|>=|<|>)",
"BOOLOP": r"\b(or|and)\b",
"IN": r"\bin\b",
"NOT": r"\bnot\b",
"VARIABLE": re.compile(
r"""
\b(
python_version
|python_full_version
|os[._]name
|sys[._]platform
|platform_(release|system)
|platform[._](version|machine|python_implementation)
|python_implementation
|implementation_(name|version)
|extra
)\b
""",
re.VERBOSE,
),
"SPECIFIER": re.compile(
Specifier._operator_regex_str + Specifier._version_regex_str,
re.VERBOSE | re.IGNORECASE,
),
"AT": r"\@",
"URL": r"[^ \t]+",
"IDENTIFIER": r"\b[a-zA-Z0-9][a-zA-Z0-9._-]*\b",
"VERSION_PREFIX_TRAIL": r"\.\*",
"VERSION_LOCAL_LABEL_TRAIL": r"\+[a-z0-9]+(?:[-_\.][a-z0-9]+)*",
"WS": r"[ \t]+",
"END": r"$",
}
class Tokenizer:
"""Context-sensitive token parsing.
Provides methods to examine the input stream to check whether the next token
matches.
"""
def __init__(
self,
source: str,
*,
rules: "Dict[str, Union[str, re.Pattern[str]]]",
) -> None:
self.source = source
self.rules: Dict[str, re.Pattern[str]] = {
name: re.compile(pattern) for name, pattern in rules.items()
}
self.next_token: Optional[Token] = None
self.position = 0
def consume(self, name: str) -> None:
"""Move beyond provided token name, if at current position."""
if self.check(name):
self.read()
def check(self, name: str, *, peek: bool = False) -> bool:
"""Check whether the next token has the provided name.
By default, if the check succeeds, the token *must* be read before
another check. If `peek` is set to `True`, the token is not loaded and
would need to be checked again.
"""
assert (
self.next_token is None
), f"Cannot check for {name!r}, already have {self.next_token!r}"
assert name in self.rules, f"Unknown token name: {name!r}"
expression = self.rules[name]
match = expression.match(self.source, self.position)
if match is None:
return False
if not peek:
self.next_token = Token(name, match[0], self.position)
return True
def expect(self, name: str, *, expected: str) -> Token:
"""Expect a certain token name next, failing with a syntax error otherwise.
The token is *not* read.
"""
if not self.check(name):
raise self.raise_syntax_error(f"Expected {expected}")
return self.read()
def read(self) -> Token:
"""Consume the next token and return it."""
token = self.next_token
assert token is not None
self.position += len(token.text)
self.next_token = None
return token
def raise_syntax_error(
self,
message: str,
*,
span_start: Optional[int] = None,
span_end: Optional[int] = None,
) -> NoReturn:
"""Raise ParserSyntaxError at the given position."""
span = (
self.position if span_start is None else span_start,
self.position if span_end is None else span_end,
)
raise ParserSyntaxError(
message,
source=self.source,
span=span,
)
@contextlib.contextmanager
def enclosing_tokens(
self, open_token: str, close_token: str, *, around: str
) -> Iterator[None]:
if self.check(open_token):
open_position = self.position
self.read()
else:
open_position = None
yield
if open_position is None:
return
if not self.check(close_token):
self.raise_syntax_error(
f"Expected matching {close_token} for {open_token}, after {around}",
span_start=open_position,
)
self.read()

View file

@ -0,0 +1,252 @@
# This file is dual licensed under the terms of the Apache License, Version
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
# for complete details.
import operator
import os
import platform
import sys
from typing import Any, Callable, Dict, List, Optional, Tuple, Union
from ._parser import (
MarkerAtom,
MarkerList,
Op,
Value,
Variable,
parse_marker as _parse_marker,
)
from ._tokenizer import ParserSyntaxError
from .specifiers import InvalidSpecifier, Specifier
from .utils import canonicalize_name
__all__ = [
"InvalidMarker",
"UndefinedComparison",
"UndefinedEnvironmentName",
"Marker",
"default_environment",
]
Operator = Callable[[str, str], bool]
class InvalidMarker(ValueError):
"""
An invalid marker was found, users should refer to PEP 508.
"""
class UndefinedComparison(ValueError):
"""
An invalid operation was attempted on a value that doesn't support it.
"""
class UndefinedEnvironmentName(ValueError):
"""
A name was attempted to be used that does not exist inside of the
environment.
"""
def _normalize_extra_values(results: Any) -> Any:
"""
Normalize extra values.
"""
if isinstance(results[0], tuple):
lhs, op, rhs = results[0]
if isinstance(lhs, Variable) and lhs.value == "extra":
normalized_extra = canonicalize_name(rhs.value)
rhs = Value(normalized_extra)
elif isinstance(rhs, Variable) and rhs.value == "extra":
normalized_extra = canonicalize_name(lhs.value)
lhs = Value(normalized_extra)
results[0] = lhs, op, rhs
return results
def _format_marker(
marker: Union[List[str], MarkerAtom, str], first: Optional[bool] = True
) -> str:
assert isinstance(marker, (list, tuple, str))
# Sometimes we have a structure like [[...]] which is a single item list
# where the single item is itself it's own list. In that case we want skip
# the rest of this function so that we don't get extraneous () on the
# outside.
if (
isinstance(marker, list)
and len(marker) == 1
and isinstance(marker[0], (list, tuple))
):
return _format_marker(marker[0])
if isinstance(marker, list):
inner = (_format_marker(m, first=False) for m in marker)
if first:
return " ".join(inner)
else:
return "(" + " ".join(inner) + ")"
elif isinstance(marker, tuple):
return " ".join([m.serialize() for m in marker])
else:
return marker
_operators: Dict[str, Operator] = {
"in": lambda lhs, rhs: lhs in rhs,
"not in": lambda lhs, rhs: lhs not in rhs,
"<": operator.lt,
"<=": operator.le,
"==": operator.eq,
"!=": operator.ne,
">=": operator.ge,
">": operator.gt,
}
def _eval_op(lhs: str, op: Op, rhs: str) -> bool:
try:
spec = Specifier("".join([op.serialize(), rhs]))
except InvalidSpecifier:
pass
else:
return spec.contains(lhs, prereleases=True)
oper: Optional[Operator] = _operators.get(op.serialize())
if oper is None:
raise UndefinedComparison(f"Undefined {op!r} on {lhs!r} and {rhs!r}.")
return oper(lhs, rhs)
def _normalize(*values: str, key: str) -> Tuple[str, ...]:
# PEP 685 Comparison of extra names for optional distribution dependencies
# https://peps.python.org/pep-0685/
# > When comparing extra names, tools MUST normalize the names being
# > compared using the semantics outlined in PEP 503 for names
if key == "extra":
return tuple(canonicalize_name(v) for v in values)
# other environment markers don't have such standards
return values
def _evaluate_markers(markers: MarkerList, environment: Dict[str, str]) -> bool:
groups: List[List[bool]] = [[]]
for marker in markers:
assert isinstance(marker, (list, tuple, str))
if isinstance(marker, list):
groups[-1].append(_evaluate_markers(marker, environment))
elif isinstance(marker, tuple):
lhs, op, rhs = marker
if isinstance(lhs, Variable):
environment_key = lhs.value
lhs_value = environment[environment_key]
rhs_value = rhs.value
else:
lhs_value = lhs.value
environment_key = rhs.value
rhs_value = environment[environment_key]
lhs_value, rhs_value = _normalize(lhs_value, rhs_value, key=environment_key)
groups[-1].append(_eval_op(lhs_value, op, rhs_value))
else:
assert marker in ["and", "or"]
if marker == "or":
groups.append([])
return any(all(item) for item in groups)
def format_full_version(info: "sys._version_info") -> str:
version = "{0.major}.{0.minor}.{0.micro}".format(info)
kind = info.releaselevel
if kind != "final":
version += kind[0] + str(info.serial)
return version
def default_environment() -> Dict[str, str]:
iver = format_full_version(sys.implementation.version)
implementation_name = sys.implementation.name
return {
"implementation_name": implementation_name,
"implementation_version": iver,
"os_name": os.name,
"platform_machine": platform.machine(),
"platform_release": platform.release(),
"platform_system": platform.system(),
"platform_version": platform.version(),
"python_full_version": platform.python_version(),
"platform_python_implementation": platform.python_implementation(),
"python_version": ".".join(platform.python_version_tuple()[:2]),
"sys_platform": sys.platform,
}
class Marker:
def __init__(self, marker: str) -> None:
# Note: We create a Marker object without calling this constructor in
# packaging.requirements.Requirement. If any additional logic is
# added here, make sure to mirror/adapt Requirement.
try:
self._markers = _normalize_extra_values(_parse_marker(marker))
# The attribute `_markers` can be described in terms of a recursive type:
# MarkerList = List[Union[Tuple[Node, ...], str, MarkerList]]
#
# For example, the following expression:
# python_version > "3.6" or (python_version == "3.6" and os_name == "unix")
#
# is parsed into:
# [
# (<Variable('python_version')>, <Op('>')>, <Value('3.6')>),
# 'and',
# [
# (<Variable('python_version')>, <Op('==')>, <Value('3.6')>),
# 'or',
# (<Variable('os_name')>, <Op('==')>, <Value('unix')>)
# ]
# ]
except ParserSyntaxError as e:
raise InvalidMarker(str(e)) from e
def __str__(self) -> str:
return _format_marker(self._markers)
def __repr__(self) -> str:
return f"<Marker('{self}')>"
def __hash__(self) -> int:
return hash((self.__class__.__name__, str(self)))
def __eq__(self, other: Any) -> bool:
if not isinstance(other, Marker):
return NotImplemented
return str(self) == str(other)
def evaluate(self, environment: Optional[Dict[str, str]] = None) -> bool:
"""Evaluate a marker.
Return the boolean from evaluating the given marker against the
environment. environment is an optional argument to override all or
part of the determined environment.
The environment is determined from the current Python process.
"""
current_environment = default_environment()
current_environment["extra"] = ""
if environment is not None:
current_environment.update(environment)
# The API used to allow setting extra to None. We need to handle this
# case for backwards compatibility.
if current_environment["extra"] is None:
current_environment["extra"] = ""
return _evaluate_markers(self._markers, current_environment)

View file

@ -0,0 +1,822 @@
import email.feedparser
import email.header
import email.message
import email.parser
import email.policy
import sys
import typing
from typing import (
Any,
Callable,
Dict,
Generic,
List,
Optional,
Tuple,
Type,
Union,
cast,
)
from . import requirements, specifiers, utils, version as version_module
T = typing.TypeVar("T")
if sys.version_info[:2] >= (3, 8): # pragma: no cover
from typing import Literal, TypedDict
else: # pragma: no cover
if typing.TYPE_CHECKING:
from typing_extensions import Literal, TypedDict
else:
try:
from typing_extensions import Literal, TypedDict
except ImportError:
class Literal:
def __init_subclass__(*_args, **_kwargs):
pass
class TypedDict:
def __init_subclass__(*_args, **_kwargs):
pass
try:
ExceptionGroup = __builtins__.ExceptionGroup # type: ignore[attr-defined]
except AttributeError:
class ExceptionGroup(Exception): # type: ignore[no-redef] # noqa: N818
"""A minimal implementation of :external:exc:`ExceptionGroup` from Python 3.11.
If :external:exc:`ExceptionGroup` is already defined by Python itself,
that version is used instead.
"""
message: str
exceptions: List[Exception]
def __init__(self, message: str, exceptions: List[Exception]) -> None:
self.message = message
self.exceptions = exceptions
def __repr__(self) -> str:
return f"{self.__class__.__name__}({self.message!r}, {self.exceptions!r})"
class InvalidMetadata(ValueError):
"""A metadata field contains invalid data."""
field: str
"""The name of the field that contains invalid data."""
def __init__(self, field: str, message: str) -> None:
self.field = field
super().__init__(message)
# The RawMetadata class attempts to make as few assumptions about the underlying
# serialization formats as possible. The idea is that as long as a serialization
# formats offer some very basic primitives in *some* way then we can support
# serializing to and from that format.
class RawMetadata(TypedDict, total=False):
"""A dictionary of raw core metadata.
Each field in core metadata maps to a key of this dictionary (when data is
provided). The key is lower-case and underscores are used instead of dashes
compared to the equivalent core metadata field. Any core metadata field that
can be specified multiple times or can hold multiple values in a single
field have a key with a plural name. See :class:`Metadata` whose attributes
match the keys of this dictionary.
Core metadata fields that can be specified multiple times are stored as a
list or dict depending on which is appropriate for the field. Any fields
which hold multiple values in a single field are stored as a list.
"""
# Metadata 1.0 - PEP 241
metadata_version: str
name: str
version: str
platforms: List[str]
summary: str
description: str
keywords: List[str]
home_page: str
author: str
author_email: str
license: str
# Metadata 1.1 - PEP 314
supported_platforms: List[str]
download_url: str
classifiers: List[str]
requires: List[str]
provides: List[str]
obsoletes: List[str]
# Metadata 1.2 - PEP 345
maintainer: str
maintainer_email: str
requires_dist: List[str]
provides_dist: List[str]
obsoletes_dist: List[str]
requires_python: str
requires_external: List[str]
project_urls: Dict[str, str]
# Metadata 2.0
# PEP 426 attempted to completely revamp the metadata format
# but got stuck without ever being able to build consensus on
# it and ultimately ended up withdrawn.
#
# However, a number of tools had started emitting METADATA with
# `2.0` Metadata-Version, so for historical reasons, this version
# was skipped.
# Metadata 2.1 - PEP 566
description_content_type: str
provides_extra: List[str]
# Metadata 2.2 - PEP 643
dynamic: List[str]
# Metadata 2.3 - PEP 685
# No new fields were added in PEP 685, just some edge case were
# tightened up to provide better interoptability.
_STRING_FIELDS = {
"author",
"author_email",
"description",
"description_content_type",
"download_url",
"home_page",
"license",
"maintainer",
"maintainer_email",
"metadata_version",
"name",
"requires_python",
"summary",
"version",
}
_LIST_FIELDS = {
"classifiers",
"dynamic",
"obsoletes",
"obsoletes_dist",
"platforms",
"provides",
"provides_dist",
"provides_extra",
"requires",
"requires_dist",
"requires_external",
"supported_platforms",
}
_DICT_FIELDS = {
"project_urls",
}
def _parse_keywords(data: str) -> List[str]:
"""Split a string of comma-separate keyboards into a list of keywords."""
return [k.strip() for k in data.split(",")]
def _parse_project_urls(data: List[str]) -> Dict[str, str]:
"""Parse a list of label/URL string pairings separated by a comma."""
urls = {}
for pair in data:
# Our logic is slightly tricky here as we want to try and do
# *something* reasonable with malformed data.
#
# The main thing that we have to worry about, is data that does
# not have a ',' at all to split the label from the Value. There
# isn't a singular right answer here, and we will fail validation
# later on (if the caller is validating) so it doesn't *really*
# matter, but since the missing value has to be an empty str
# and our return value is dict[str, str], if we let the key
# be the missing value, then they'd have multiple '' values that
# overwrite each other in a accumulating dict.
#
# The other potentional issue is that it's possible to have the
# same label multiple times in the metadata, with no solid "right"
# answer with what to do in that case. As such, we'll do the only
# thing we can, which is treat the field as unparseable and add it
# to our list of unparsed fields.
parts = [p.strip() for p in pair.split(",", 1)]
parts.extend([""] * (max(0, 2 - len(parts)))) # Ensure 2 items
# TODO: The spec doesn't say anything about if the keys should be
# considered case sensitive or not... logically they should
# be case-preserving and case-insensitive, but doing that
# would open up more cases where we might have duplicate
# entries.
label, url = parts
if label in urls:
# The label already exists in our set of urls, so this field
# is unparseable, and we can just add the whole thing to our
# unparseable data and stop processing it.
raise KeyError("duplicate labels in project urls")
urls[label] = url
return urls
def _get_payload(msg: email.message.Message, source: Union[bytes, str]) -> str:
"""Get the body of the message."""
# If our source is a str, then our caller has managed encodings for us,
# and we don't need to deal with it.
if isinstance(source, str):
payload: str = msg.get_payload()
return payload
# If our source is a bytes, then we're managing the encoding and we need
# to deal with it.
else:
bpayload: bytes = msg.get_payload(decode=True)
try:
return bpayload.decode("utf8", "strict")
except UnicodeDecodeError:
raise ValueError("payload in an invalid encoding")
# The various parse_FORMAT functions here are intended to be as lenient as
# possible in their parsing, while still returning a correctly typed
# RawMetadata.
#
# To aid in this, we also generally want to do as little touching of the
# data as possible, except where there are possibly some historic holdovers
# that make valid data awkward to work with.
#
# While this is a lower level, intermediate format than our ``Metadata``
# class, some light touch ups can make a massive difference in usability.
# Map METADATA fields to RawMetadata.
_EMAIL_TO_RAW_MAPPING = {
"author": "author",
"author-email": "author_email",
"classifier": "classifiers",
"description": "description",
"description-content-type": "description_content_type",
"download-url": "download_url",
"dynamic": "dynamic",
"home-page": "home_page",
"keywords": "keywords",
"license": "license",
"maintainer": "maintainer",
"maintainer-email": "maintainer_email",
"metadata-version": "metadata_version",
"name": "name",
"obsoletes": "obsoletes",
"obsoletes-dist": "obsoletes_dist",
"platform": "platforms",
"project-url": "project_urls",
"provides": "provides",
"provides-dist": "provides_dist",
"provides-extra": "provides_extra",
"requires": "requires",
"requires-dist": "requires_dist",
"requires-external": "requires_external",
"requires-python": "requires_python",
"summary": "summary",
"supported-platform": "supported_platforms",
"version": "version",
}
_RAW_TO_EMAIL_MAPPING = {raw: email for email, raw in _EMAIL_TO_RAW_MAPPING.items()}
def parse_email(data: Union[bytes, str]) -> Tuple[RawMetadata, Dict[str, List[str]]]:
"""Parse a distribution's metadata stored as email headers (e.g. from ``METADATA``).
This function returns a two-item tuple of dicts. The first dict is of
recognized fields from the core metadata specification. Fields that can be
parsed and translated into Python's built-in types are converted
appropriately. All other fields are left as-is. Fields that are allowed to
appear multiple times are stored as lists.
The second dict contains all other fields from the metadata. This includes
any unrecognized fields. It also includes any fields which are expected to
be parsed into a built-in type but were not formatted appropriately. Finally,
any fields that are expected to appear only once but are repeated are
included in this dict.
"""
raw: Dict[str, Union[str, List[str], Dict[str, str]]] = {}
unparsed: Dict[str, List[str]] = {}
if isinstance(data, str):
parsed = email.parser.Parser(policy=email.policy.compat32).parsestr(data)
else:
parsed = email.parser.BytesParser(policy=email.policy.compat32).parsebytes(data)
# We have to wrap parsed.keys() in a set, because in the case of multiple
# values for a key (a list), the key will appear multiple times in the
# list of keys, but we're avoiding that by using get_all().
for name in frozenset(parsed.keys()):
# Header names in RFC are case insensitive, so we'll normalize to all
# lower case to make comparisons easier.
name = name.lower()
# We use get_all() here, even for fields that aren't multiple use,
# because otherwise someone could have e.g. two Name fields, and we
# would just silently ignore it rather than doing something about it.
headers = parsed.get_all(name) or []
# The way the email module works when parsing bytes is that it
# unconditionally decodes the bytes as ascii using the surrogateescape
# handler. When you pull that data back out (such as with get_all() ),
# it looks to see if the str has any surrogate escapes, and if it does
# it wraps it in a Header object instead of returning the string.
#
# As such, we'll look for those Header objects, and fix up the encoding.
value = []
# Flag if we have run into any issues processing the headers, thus
# signalling that the data belongs in 'unparsed'.
valid_encoding = True
for h in headers:
# It's unclear if this can return more types than just a Header or
# a str, so we'll just assert here to make sure.
assert isinstance(h, (email.header.Header, str))
# If it's a header object, we need to do our little dance to get
# the real data out of it. In cases where there is invalid data
# we're going to end up with mojibake, but there's no obvious, good
# way around that without reimplementing parts of the Header object
# ourselves.
#
# That should be fine since, if mojibacked happens, this key is
# going into the unparsed dict anyways.
if isinstance(h, email.header.Header):
# The Header object stores it's data as chunks, and each chunk
# can be independently encoded, so we'll need to check each
# of them.
chunks: List[Tuple[bytes, Optional[str]]] = []
for bin, encoding in email.header.decode_header(h):
try:
bin.decode("utf8", "strict")
except UnicodeDecodeError:
# Enable mojibake.
encoding = "latin1"
valid_encoding = False
else:
encoding = "utf8"
chunks.append((bin, encoding))
# Turn our chunks back into a Header object, then let that
# Header object do the right thing to turn them into a
# string for us.
value.append(str(email.header.make_header(chunks)))
# This is already a string, so just add it.
else:
value.append(h)
# We've processed all of our values to get them into a list of str,
# but we may have mojibake data, in which case this is an unparsed
# field.
if not valid_encoding:
unparsed[name] = value
continue
raw_name = _EMAIL_TO_RAW_MAPPING.get(name)
if raw_name is None:
# This is a bit of a weird situation, we've encountered a key that
# we don't know what it means, so we don't know whether it's meant
# to be a list or not.
#
# Since we can't really tell one way or another, we'll just leave it
# as a list, even though it may be a single item list, because that's
# what makes the most sense for email headers.
unparsed[name] = value
continue
# If this is one of our string fields, then we'll check to see if our
# value is a list of a single item. If it is then we'll assume that
# it was emitted as a single string, and unwrap the str from inside
# the list.
#
# If it's any other kind of data, then we haven't the faintest clue
# what we should parse it as, and we have to just add it to our list
# of unparsed stuff.
if raw_name in _STRING_FIELDS and len(value) == 1:
raw[raw_name] = value[0]
# If this is one of our list of string fields, then we can just assign
# the value, since email *only* has strings, and our get_all() call
# above ensures that this is a list.
elif raw_name in _LIST_FIELDS:
raw[raw_name] = value
# Special Case: Keywords
# The keywords field is implemented in the metadata spec as a str,
# but it conceptually is a list of strings, and is serialized using
# ", ".join(keywords), so we'll do some light data massaging to turn
# this into what it logically is.
elif raw_name == "keywords" and len(value) == 1:
raw[raw_name] = _parse_keywords(value[0])
# Special Case: Project-URL
# The project urls is implemented in the metadata spec as a list of
# specially-formatted strings that represent a key and a value, which
# is fundamentally a mapping, however the email format doesn't support
# mappings in a sane way, so it was crammed into a list of strings
# instead.
#
# We will do a little light data massaging to turn this into a map as
# it logically should be.
elif raw_name == "project_urls":
try:
raw[raw_name] = _parse_project_urls(value)
except KeyError:
unparsed[name] = value
# Nothing that we've done has managed to parse this, so it'll just
# throw it in our unparseable data and move on.
else:
unparsed[name] = value
# We need to support getting the Description from the message payload in
# addition to getting it from the the headers. This does mean, though, there
# is the possibility of it being set both ways, in which case we put both
# in 'unparsed' since we don't know which is right.
try:
payload = _get_payload(parsed, data)
except ValueError:
unparsed.setdefault("description", []).append(
parsed.get_payload(decode=isinstance(data, bytes))
)
else:
if payload:
# Check to see if we've already got a description, if so then both
# it, and this body move to unparseable.
if "description" in raw:
description_header = cast(str, raw.pop("description"))
unparsed.setdefault("description", []).extend(
[description_header, payload]
)
elif "description" in unparsed:
unparsed["description"].append(payload)
else:
raw["description"] = payload
# We need to cast our `raw` to a metadata, because a TypedDict only support
# literal key names, but we're computing our key names on purpose, but the
# way this function is implemented, our `TypedDict` can only have valid key
# names.
return cast(RawMetadata, raw), unparsed
_NOT_FOUND = object()
# Keep the two values in sync.
_VALID_METADATA_VERSIONS = ["1.0", "1.1", "1.2", "2.1", "2.2", "2.3"]
_MetadataVersion = Literal["1.0", "1.1", "1.2", "2.1", "2.2", "2.3"]
_REQUIRED_ATTRS = frozenset(["metadata_version", "name", "version"])
class _Validator(Generic[T]):
"""Validate a metadata field.
All _process_*() methods correspond to a core metadata field. The method is
called with the field's raw value. If the raw value is valid it is returned
in its "enriched" form (e.g. ``version.Version`` for the ``Version`` field).
If the raw value is invalid, :exc:`InvalidMetadata` is raised (with a cause
as appropriate).
"""
name: str
raw_name: str
added: _MetadataVersion
def __init__(
self,
*,
added: _MetadataVersion = "1.0",
) -> None:
self.added = added
def __set_name__(self, _owner: "Metadata", name: str) -> None:
self.name = name
self.raw_name = _RAW_TO_EMAIL_MAPPING[name]
def __get__(self, instance: "Metadata", _owner: Type["Metadata"]) -> T:
# With Python 3.8, the caching can be replaced with functools.cached_property().
# No need to check the cache as attribute lookup will resolve into the
# instance's __dict__ before __get__ is called.
cache = instance.__dict__
try:
value = instance._raw[self.name] # type: ignore[literal-required]
except KeyError:
if self.name in _STRING_FIELDS:
value = ""
elif self.name in _LIST_FIELDS:
value = []
elif self.name in _DICT_FIELDS:
value = {}
else: # pragma: no cover
assert False
try:
converter: Callable[[Any], T] = getattr(self, f"_process_{self.name}")
except AttributeError:
pass
else:
value = converter(value)
cache[self.name] = value
try:
del instance._raw[self.name] # type: ignore[misc]
except KeyError:
pass
return cast(T, value)
def _invalid_metadata(
self, msg: str, cause: Optional[Exception] = None
) -> InvalidMetadata:
exc = InvalidMetadata(
self.raw_name, msg.format_map({"field": repr(self.raw_name)})
)
exc.__cause__ = cause
return exc
def _process_metadata_version(self, value: str) -> _MetadataVersion:
# Implicitly makes Metadata-Version required.
if value not in _VALID_METADATA_VERSIONS:
raise self._invalid_metadata(f"{value!r} is not a valid metadata version")
return cast(_MetadataVersion, value)
def _process_name(self, value: str) -> str:
if not value:
raise self._invalid_metadata("{field} is a required field")
# Validate the name as a side-effect.
try:
utils.canonicalize_name(value, validate=True)
except utils.InvalidName as exc:
raise self._invalid_metadata(
f"{value!r} is invalid for {{field}}", cause=exc
)
else:
return value
def _process_version(self, value: str) -> version_module.Version:
if not value:
raise self._invalid_metadata("{field} is a required field")
try:
return version_module.parse(value)
except version_module.InvalidVersion as exc:
raise self._invalid_metadata(
f"{value!r} is invalid for {{field}}", cause=exc
)
def _process_summary(self, value: str) -> str:
"""Check the field contains no newlines."""
if "\n" in value:
raise self._invalid_metadata("{field} must be a single line")
return value
def _process_description_content_type(self, value: str) -> str:
content_types = {"text/plain", "text/x-rst", "text/markdown"}
message = email.message.EmailMessage()
message["content-type"] = value
content_type, parameters = (
# Defaults to `text/plain` if parsing failed.
message.get_content_type().lower(),
message["content-type"].params,
)
# Check if content-type is valid or defaulted to `text/plain` and thus was
# not parseable.
if content_type not in content_types or content_type not in value.lower():
raise self._invalid_metadata(
f"{{field}} must be one of {list(content_types)}, not {value!r}"
)
charset = parameters.get("charset", "UTF-8")
if charset != "UTF-8":
raise self._invalid_metadata(
f"{{field}} can only specify the UTF-8 charset, not {list(charset)}"
)
markdown_variants = {"GFM", "CommonMark"}
variant = parameters.get("variant", "GFM") # Use an acceptable default.
if content_type == "text/markdown" and variant not in markdown_variants:
raise self._invalid_metadata(
f"valid Markdown variants for {{field}} are {list(markdown_variants)}, "
f"not {variant!r}",
)
return value
def _process_dynamic(self, value: List[str]) -> List[str]:
for dynamic_field in map(str.lower, value):
if dynamic_field in {"name", "version", "metadata-version"}:
raise self._invalid_metadata(
f"{value!r} is not allowed as a dynamic field"
)
elif dynamic_field not in _EMAIL_TO_RAW_MAPPING:
raise self._invalid_metadata(f"{value!r} is not a valid dynamic field")
return list(map(str.lower, value))
def _process_provides_extra(
self,
value: List[str],
) -> List[utils.NormalizedName]:
normalized_names = []
try:
for name in value:
normalized_names.append(utils.canonicalize_name(name, validate=True))
except utils.InvalidName as exc:
raise self._invalid_metadata(
f"{name!r} is invalid for {{field}}", cause=exc
)
else:
return normalized_names
def _process_requires_python(self, value: str) -> specifiers.SpecifierSet:
try:
return specifiers.SpecifierSet(value)
except specifiers.InvalidSpecifier as exc:
raise self._invalid_metadata(
f"{value!r} is invalid for {{field}}", cause=exc
)
def _process_requires_dist(
self,
value: List[str],
) -> List[requirements.Requirement]:
reqs = []
try:
for req in value:
reqs.append(requirements.Requirement(req))
except requirements.InvalidRequirement as exc:
raise self._invalid_metadata(f"{req!r} is invalid for {{field}}", cause=exc)
else:
return reqs
class Metadata:
"""Representation of distribution metadata.
Compared to :class:`RawMetadata`, this class provides objects representing
metadata fields instead of only using built-in types. Any invalid metadata
will cause :exc:`InvalidMetadata` to be raised (with a
:py:attr:`~BaseException.__cause__` attribute as appropriate).
"""
_raw: RawMetadata
@classmethod
def from_raw(cls, data: RawMetadata, *, validate: bool = True) -> "Metadata":
"""Create an instance from :class:`RawMetadata`.
If *validate* is true, all metadata will be validated. All exceptions
related to validation will be gathered and raised as an :class:`ExceptionGroup`.
"""
ins = cls()
ins._raw = data.copy() # Mutations occur due to caching enriched values.
if validate:
exceptions: List[InvalidMetadata] = []
try:
metadata_version = ins.metadata_version
metadata_age = _VALID_METADATA_VERSIONS.index(metadata_version)
except InvalidMetadata as metadata_version_exc:
exceptions.append(metadata_version_exc)
metadata_version = None
# Make sure to check for the fields that are present, the required
# fields (so their absence can be reported).
fields_to_check = frozenset(ins._raw) | _REQUIRED_ATTRS
# Remove fields that have already been checked.
fields_to_check -= {"metadata_version"}
for key in fields_to_check:
try:
if metadata_version:
# Can't use getattr() as that triggers descriptor protocol which
# will fail due to no value for the instance argument.
try:
field_metadata_version = cls.__dict__[key].added
except KeyError:
exc = InvalidMetadata(key, f"unrecognized field: {key!r}")
exceptions.append(exc)
continue
field_age = _VALID_METADATA_VERSIONS.index(
field_metadata_version
)
if field_age > metadata_age:
field = _RAW_TO_EMAIL_MAPPING[key]
exc = InvalidMetadata(
field,
"{field} introduced in metadata version "
"{field_metadata_version}, not {metadata_version}",
)
exceptions.append(exc)
continue
getattr(ins, key)
except InvalidMetadata as exc:
exceptions.append(exc)
if exceptions:
raise ExceptionGroup("invalid metadata", exceptions)
return ins
@classmethod
def from_email(
cls, data: Union[bytes, str], *, validate: bool = True
) -> "Metadata":
"""Parse metadata from email headers.
If *validate* is true, the metadata will be validated. All exceptions
related to validation will be gathered and raised as an :class:`ExceptionGroup`.
"""
exceptions: list[InvalidMetadata] = []
raw, unparsed = parse_email(data)
if validate:
for unparsed_key in unparsed:
if unparsed_key in _EMAIL_TO_RAW_MAPPING:
message = f"{unparsed_key!r} has invalid data"
else:
message = f"unrecognized field: {unparsed_key!r}"
exceptions.append(InvalidMetadata(unparsed_key, message))
if exceptions:
raise ExceptionGroup("unparsed", exceptions)
try:
return cls.from_raw(raw, validate=validate)
except ExceptionGroup as exc_group:
exceptions.extend(exc_group.exceptions)
raise ExceptionGroup("invalid or unparsed metadata", exceptions) from None
metadata_version: _Validator[_MetadataVersion] = _Validator()
""":external:ref:`core-metadata-metadata-version`
(required; validated to be a valid metadata version)"""
name: _Validator[str] = _Validator()
""":external:ref:`core-metadata-name`
(required; validated using :func:`~packaging.utils.canonicalize_name` and its
*validate* parameter)"""
version: _Validator[version_module.Version] = _Validator()
""":external:ref:`core-metadata-version` (required)"""
dynamic: _Validator[List[str]] = _Validator(
added="2.2",
)
""":external:ref:`core-metadata-dynamic`
(validated against core metadata field names and lowercased)"""
platforms: _Validator[List[str]] = _Validator()
""":external:ref:`core-metadata-platform`"""
supported_platforms: _Validator[List[str]] = _Validator(added="1.1")
""":external:ref:`core-metadata-supported-platform`"""
summary: _Validator[str] = _Validator()
""":external:ref:`core-metadata-summary` (validated to contain no newlines)"""
description: _Validator[str] = _Validator() # TODO 2.1: can be in body
""":external:ref:`core-metadata-description`"""
description_content_type: _Validator[str] = _Validator(added="2.1")
""":external:ref:`core-metadata-description-content-type` (validated)"""
keywords: _Validator[List[str]] = _Validator()
""":external:ref:`core-metadata-keywords`"""
home_page: _Validator[str] = _Validator()
""":external:ref:`core-metadata-home-page`"""
download_url: _Validator[str] = _Validator(added="1.1")
""":external:ref:`core-metadata-download-url`"""
author: _Validator[str] = _Validator()
""":external:ref:`core-metadata-author`"""
author_email: _Validator[str] = _Validator()
""":external:ref:`core-metadata-author-email`"""
maintainer: _Validator[str] = _Validator(added="1.2")
""":external:ref:`core-metadata-maintainer`"""
maintainer_email: _Validator[str] = _Validator(added="1.2")
""":external:ref:`core-metadata-maintainer-email`"""
license: _Validator[str] = _Validator()
""":external:ref:`core-metadata-license`"""
classifiers: _Validator[List[str]] = _Validator(added="1.1")
""":external:ref:`core-metadata-classifier`"""
requires_dist: _Validator[List[requirements.Requirement]] = _Validator(added="1.2")
""":external:ref:`core-metadata-requires-dist`"""
requires_python: _Validator[specifiers.SpecifierSet] = _Validator(added="1.2")
""":external:ref:`core-metadata-requires-python`"""
# Because `Requires-External` allows for non-PEP 440 version specifiers, we
# don't do any processing on the values.
requires_external: _Validator[List[str]] = _Validator(added="1.2")
""":external:ref:`core-metadata-requires-external`"""
project_urls: _Validator[Dict[str, str]] = _Validator(added="1.2")
""":external:ref:`core-metadata-project-url`"""
# PEP 685 lets us raise an error if an extra doesn't pass `Name` validation
# regardless of metadata version.
provides_extra: _Validator[List[utils.NormalizedName]] = _Validator(
added="2.1",
)
""":external:ref:`core-metadata-provides-extra`"""
provides_dist: _Validator[List[str]] = _Validator(added="1.2")
""":external:ref:`core-metadata-provides-dist`"""
obsoletes_dist: _Validator[List[str]] = _Validator(added="1.2")
""":external:ref:`core-metadata-obsoletes-dist`"""
requires: _Validator[List[str]] = _Validator(added="1.1")
"""``Requires`` (deprecated)"""
provides: _Validator[List[str]] = _Validator(added="1.1")
"""``Provides`` (deprecated)"""
obsoletes: _Validator[List[str]] = _Validator(added="1.1")
"""``Obsoletes`` (deprecated)"""

View file

View file

@ -0,0 +1,90 @@
# This file is dual licensed under the terms of the Apache License, Version
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
# for complete details.
from typing import Any, Iterator, Optional, Set
from ._parser import parse_requirement as _parse_requirement
from ._tokenizer import ParserSyntaxError
from .markers import Marker, _normalize_extra_values
from .specifiers import SpecifierSet
from .utils import canonicalize_name
class InvalidRequirement(ValueError):
"""
An invalid requirement was found, users should refer to PEP 508.
"""
class Requirement:
"""Parse a requirement.
Parse a given requirement string into its parts, such as name, specifier,
URL, and extras. Raises InvalidRequirement on a badly-formed requirement
string.
"""
# TODO: Can we test whether something is contained within a requirement?
# If so how do we do that? Do we need to test against the _name_ of
# the thing as well as the version? What about the markers?
# TODO: Can we normalize the name and extra name?
def __init__(self, requirement_string: str) -> None:
try:
parsed = _parse_requirement(requirement_string)
except ParserSyntaxError as e:
raise InvalidRequirement(str(e)) from e
self.name: str = parsed.name
self.url: Optional[str] = parsed.url or None
self.extras: Set[str] = set(parsed.extras if parsed.extras else [])
self.specifier: SpecifierSet = SpecifierSet(parsed.specifier)
self.marker: Optional[Marker] = None
if parsed.marker is not None:
self.marker = Marker.__new__(Marker)
self.marker._markers = _normalize_extra_values(parsed.marker)
def _iter_parts(self, name: str) -> Iterator[str]:
yield name
if self.extras:
formatted_extras = ",".join(sorted(self.extras))
yield f"[{formatted_extras}]"
if self.specifier:
yield str(self.specifier)
if self.url:
yield f"@ {self.url}"
if self.marker:
yield " "
if self.marker:
yield f"; {self.marker}"
def __str__(self) -> str:
return "".join(self._iter_parts(self.name))
def __repr__(self) -> str:
return f"<Requirement('{self}')>"
def __hash__(self) -> int:
return hash(
(
self.__class__.__name__,
*self._iter_parts(canonicalize_name(self.name)),
)
)
def __eq__(self, other: Any) -> bool:
if not isinstance(other, Requirement):
return NotImplemented
return (
canonicalize_name(self.name) == canonicalize_name(other.name)
and self.extras == other.extras
and self.specifier == other.specifier
and self.url == other.url
and self.marker == other.marker
)

File diff suppressed because it is too large Load diff

View file

@ -0,0 +1,553 @@
# This file is dual licensed under the terms of the Apache License, Version
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
# for complete details.
import logging
import platform
import struct
import subprocess
import sys
import sysconfig
from importlib.machinery import EXTENSION_SUFFIXES
from typing import (
Dict,
FrozenSet,
Iterable,
Iterator,
List,
Optional,
Sequence,
Tuple,
Union,
cast,
)
from . import _manylinux, _musllinux
logger = logging.getLogger(__name__)
PythonVersion = Sequence[int]
MacVersion = Tuple[int, int]
INTERPRETER_SHORT_NAMES: Dict[str, str] = {
"python": "py", # Generic.
"cpython": "cp",
"pypy": "pp",
"ironpython": "ip",
"jython": "jy",
}
_32_BIT_INTERPRETER = struct.calcsize("P") == 4
class Tag:
"""
A representation of the tag triple for a wheel.
Instances are considered immutable and thus are hashable. Equality checking
is also supported.
"""
__slots__ = ["_interpreter", "_abi", "_platform", "_hash"]
def __init__(self, interpreter: str, abi: str, platform: str) -> None:
self._interpreter = interpreter.lower()
self._abi = abi.lower()
self._platform = platform.lower()
# The __hash__ of every single element in a Set[Tag] will be evaluated each time
# that a set calls its `.disjoint()` method, which may be called hundreds of
# times when scanning a page of links for packages with tags matching that
# Set[Tag]. Pre-computing the value here produces significant speedups for
# downstream consumers.
self._hash = hash((self._interpreter, self._abi, self._platform))
@property
def interpreter(self) -> str:
return self._interpreter
@property
def abi(self) -> str:
return self._abi
@property
def platform(self) -> str:
return self._platform
def __eq__(self, other: object) -> bool:
if not isinstance(other, Tag):
return NotImplemented
return (
(self._hash == other._hash) # Short-circuit ASAP for perf reasons.
and (self._platform == other._platform)
and (self._abi == other._abi)
and (self._interpreter == other._interpreter)
)
def __hash__(self) -> int:
return self._hash
def __str__(self) -> str:
return f"{self._interpreter}-{self._abi}-{self._platform}"
def __repr__(self) -> str:
return f"<{self} @ {id(self)}>"
def parse_tag(tag: str) -> FrozenSet[Tag]:
"""
Parses the provided tag (e.g. `py3-none-any`) into a frozenset of Tag instances.
Returning a set is required due to the possibility that the tag is a
compressed tag set.
"""
tags = set()
interpreters, abis, platforms = tag.split("-")
for interpreter in interpreters.split("."):
for abi in abis.split("."):
for platform_ in platforms.split("."):
tags.add(Tag(interpreter, abi, platform_))
return frozenset(tags)
def _get_config_var(name: str, warn: bool = False) -> Union[int, str, None]:
value: Union[int, str, None] = sysconfig.get_config_var(name)
if value is None and warn:
logger.debug(
"Config variable '%s' is unset, Python ABI tag may be incorrect", name
)
return value
def _normalize_string(string: str) -> str:
return string.replace(".", "_").replace("-", "_").replace(" ", "_")
def _abi3_applies(python_version: PythonVersion) -> bool:
"""
Determine if the Python version supports abi3.
PEP 384 was first implemented in Python 3.2.
"""
return len(python_version) > 1 and tuple(python_version) >= (3, 2)
def _cpython_abis(py_version: PythonVersion, warn: bool = False) -> List[str]:
py_version = tuple(py_version) # To allow for version comparison.
abis = []
version = _version_nodot(py_version[:2])
debug = pymalloc = ucs4 = ""
with_debug = _get_config_var("Py_DEBUG", warn)
has_refcount = hasattr(sys, "gettotalrefcount")
# Windows doesn't set Py_DEBUG, so checking for support of debug-compiled
# extension modules is the best option.
# https://github.com/pypa/pip/issues/3383#issuecomment-173267692
has_ext = "_d.pyd" in EXTENSION_SUFFIXES
if with_debug or (with_debug is None and (has_refcount or has_ext)):
debug = "d"
if py_version < (3, 8):
with_pymalloc = _get_config_var("WITH_PYMALLOC", warn)
if with_pymalloc or with_pymalloc is None:
pymalloc = "m"
if py_version < (3, 3):
unicode_size = _get_config_var("Py_UNICODE_SIZE", warn)
if unicode_size == 4 or (
unicode_size is None and sys.maxunicode == 0x10FFFF
):
ucs4 = "u"
elif debug:
# Debug builds can also load "normal" extension modules.
# We can also assume no UCS-4 or pymalloc requirement.
abis.append(f"cp{version}")
abis.insert(
0,
"cp{version}{debug}{pymalloc}{ucs4}".format(
version=version, debug=debug, pymalloc=pymalloc, ucs4=ucs4
),
)
return abis
def cpython_tags(
python_version: Optional[PythonVersion] = None,
abis: Optional[Iterable[str]] = None,
platforms: Optional[Iterable[str]] = None,
*,
warn: bool = False,
) -> Iterator[Tag]:
"""
Yields the tags for a CPython interpreter.
The tags consist of:
- cp<python_version>-<abi>-<platform>
- cp<python_version>-abi3-<platform>
- cp<python_version>-none-<platform>
- cp<less than python_version>-abi3-<platform> # Older Python versions down to 3.2.
If python_version only specifies a major version then user-provided ABIs and
the 'none' ABItag will be used.
If 'abi3' or 'none' are specified in 'abis' then they will be yielded at
their normal position and not at the beginning.
"""
if not python_version:
python_version = sys.version_info[:2]
interpreter = f"cp{_version_nodot(python_version[:2])}"
if abis is None:
if len(python_version) > 1:
abis = _cpython_abis(python_version, warn)
else:
abis = []
abis = list(abis)
# 'abi3' and 'none' are explicitly handled later.
for explicit_abi in ("abi3", "none"):
try:
abis.remove(explicit_abi)
except ValueError:
pass
platforms = list(platforms or platform_tags())
for abi in abis:
for platform_ in platforms:
yield Tag(interpreter, abi, platform_)
if _abi3_applies(python_version):
yield from (Tag(interpreter, "abi3", platform_) for platform_ in platforms)
yield from (Tag(interpreter, "none", platform_) for platform_ in platforms)
if _abi3_applies(python_version):
for minor_version in range(python_version[1] - 1, 1, -1):
for platform_ in platforms:
interpreter = "cp{version}".format(
version=_version_nodot((python_version[0], minor_version))
)
yield Tag(interpreter, "abi3", platform_)
def _generic_abi() -> List[str]:
"""
Return the ABI tag based on EXT_SUFFIX.
"""
# The following are examples of `EXT_SUFFIX`.
# We want to keep the parts which are related to the ABI and remove the
# parts which are related to the platform:
# - linux: '.cpython-310-x86_64-linux-gnu.so' => cp310
# - mac: '.cpython-310-darwin.so' => cp310
# - win: '.cp310-win_amd64.pyd' => cp310
# - win: '.pyd' => cp37 (uses _cpython_abis())
# - pypy: '.pypy38-pp73-x86_64-linux-gnu.so' => pypy38_pp73
# - graalpy: '.graalpy-38-native-x86_64-darwin.dylib'
# => graalpy_38_native
ext_suffix = _get_config_var("EXT_SUFFIX", warn=True)
if not isinstance(ext_suffix, str) or ext_suffix[0] != ".":
raise SystemError("invalid sysconfig.get_config_var('EXT_SUFFIX')")
parts = ext_suffix.split(".")
if len(parts) < 3:
# CPython3.7 and earlier uses ".pyd" on Windows.
return _cpython_abis(sys.version_info[:2])
soabi = parts[1]
if soabi.startswith("cpython"):
# non-windows
abi = "cp" + soabi.split("-")[1]
elif soabi.startswith("cp"):
# windows
abi = soabi.split("-")[0]
elif soabi.startswith("pypy"):
abi = "-".join(soabi.split("-")[:2])
elif soabi.startswith("graalpy"):
abi = "-".join(soabi.split("-")[:3])
elif soabi:
# pyston, ironpython, others?
abi = soabi
else:
return []
return [_normalize_string(abi)]
def generic_tags(
interpreter: Optional[str] = None,
abis: Optional[Iterable[str]] = None,
platforms: Optional[Iterable[str]] = None,
*,
warn: bool = False,
) -> Iterator[Tag]:
"""
Yields the tags for a generic interpreter.
The tags consist of:
- <interpreter>-<abi>-<platform>
The "none" ABI will be added if it was not explicitly provided.
"""
if not interpreter:
interp_name = interpreter_name()
interp_version = interpreter_version(warn=warn)
interpreter = "".join([interp_name, interp_version])
if abis is None:
abis = _generic_abi()
else:
abis = list(abis)
platforms = list(platforms or platform_tags())
if "none" not in abis:
abis.append("none")
for abi in abis:
for platform_ in platforms:
yield Tag(interpreter, abi, platform_)
def _py_interpreter_range(py_version: PythonVersion) -> Iterator[str]:
"""
Yields Python versions in descending order.
After the latest version, the major-only version will be yielded, and then
all previous versions of that major version.
"""
if len(py_version) > 1:
yield f"py{_version_nodot(py_version[:2])}"
yield f"py{py_version[0]}"
if len(py_version) > 1:
for minor in range(py_version[1] - 1, -1, -1):
yield f"py{_version_nodot((py_version[0], minor))}"
def compatible_tags(
python_version: Optional[PythonVersion] = None,
interpreter: Optional[str] = None,
platforms: Optional[Iterable[str]] = None,
) -> Iterator[Tag]:
"""
Yields the sequence of tags that are compatible with a specific version of Python.
The tags consist of:
- py*-none-<platform>
- <interpreter>-none-any # ... if `interpreter` is provided.
- py*-none-any
"""
if not python_version:
python_version = sys.version_info[:2]
platforms = list(platforms or platform_tags())
for version in _py_interpreter_range(python_version):
for platform_ in platforms:
yield Tag(version, "none", platform_)
if interpreter:
yield Tag(interpreter, "none", "any")
for version in _py_interpreter_range(python_version):
yield Tag(version, "none", "any")
def _mac_arch(arch: str, is_32bit: bool = _32_BIT_INTERPRETER) -> str:
if not is_32bit:
return arch
if arch.startswith("ppc"):
return "ppc"
return "i386"
def _mac_binary_formats(version: MacVersion, cpu_arch: str) -> List[str]:
formats = [cpu_arch]
if cpu_arch == "x86_64":
if version < (10, 4):
return []
formats.extend(["intel", "fat64", "fat32"])
elif cpu_arch == "i386":
if version < (10, 4):
return []
formats.extend(["intel", "fat32", "fat"])
elif cpu_arch == "ppc64":
# TODO: Need to care about 32-bit PPC for ppc64 through 10.2?
if version > (10, 5) or version < (10, 4):
return []
formats.append("fat64")
elif cpu_arch == "ppc":
if version > (10, 6):
return []
formats.extend(["fat32", "fat"])
if cpu_arch in {"arm64", "x86_64"}:
formats.append("universal2")
if cpu_arch in {"x86_64", "i386", "ppc64", "ppc", "intel"}:
formats.append("universal")
return formats
def mac_platforms(
version: Optional[MacVersion] = None, arch: Optional[str] = None
) -> Iterator[str]:
"""
Yields the platform tags for a macOS system.
The `version` parameter is a two-item tuple specifying the macOS version to
generate platform tags for. The `arch` parameter is the CPU architecture to
generate platform tags for. Both parameters default to the appropriate value
for the current system.
"""
version_str, _, cpu_arch = platform.mac_ver()
if version is None:
version = cast("MacVersion", tuple(map(int, version_str.split(".")[:2])))
if version == (10, 16):
# When built against an older macOS SDK, Python will report macOS 10.16
# instead of the real version.
version_str = subprocess.run(
[
sys.executable,
"-sS",
"-c",
"import platform; print(platform.mac_ver()[0])",
],
check=True,
env={"SYSTEM_VERSION_COMPAT": "0"},
stdout=subprocess.PIPE,
text=True,
).stdout
version = cast("MacVersion", tuple(map(int, version_str.split(".")[:2])))
else:
version = version
if arch is None:
arch = _mac_arch(cpu_arch)
else:
arch = arch
if (10, 0) <= version and version < (11, 0):
# Prior to Mac OS 11, each yearly release of Mac OS bumped the
# "minor" version number. The major version was always 10.
for minor_version in range(version[1], -1, -1):
compat_version = 10, minor_version
binary_formats = _mac_binary_formats(compat_version, arch)
for binary_format in binary_formats:
yield "macosx_{major}_{minor}_{binary_format}".format(
major=10, minor=minor_version, binary_format=binary_format
)
if version >= (11, 0):
# Starting with Mac OS 11, each yearly release bumps the major version
# number. The minor versions are now the midyear updates.
for major_version in range(version[0], 10, -1):
compat_version = major_version, 0
binary_formats = _mac_binary_formats(compat_version, arch)
for binary_format in binary_formats:
yield "macosx_{major}_{minor}_{binary_format}".format(
major=major_version, minor=0, binary_format=binary_format
)
if version >= (11, 0):
# Mac OS 11 on x86_64 is compatible with binaries from previous releases.
# Arm64 support was introduced in 11.0, so no Arm binaries from previous
# releases exist.
#
# However, the "universal2" binary format can have a
# macOS version earlier than 11.0 when the x86_64 part of the binary supports
# that version of macOS.
if arch == "x86_64":
for minor_version in range(16, 3, -1):
compat_version = 10, minor_version
binary_formats = _mac_binary_formats(compat_version, arch)
for binary_format in binary_formats:
yield "macosx_{major}_{minor}_{binary_format}".format(
major=compat_version[0],
minor=compat_version[1],
binary_format=binary_format,
)
else:
for minor_version in range(16, 3, -1):
compat_version = 10, minor_version
binary_format = "universal2"
yield "macosx_{major}_{minor}_{binary_format}".format(
major=compat_version[0],
minor=compat_version[1],
binary_format=binary_format,
)
def _linux_platforms(is_32bit: bool = _32_BIT_INTERPRETER) -> Iterator[str]:
linux = _normalize_string(sysconfig.get_platform())
if not linux.startswith("linux_"):
# we should never be here, just yield the sysconfig one and return
yield linux
return
if is_32bit:
if linux == "linux_x86_64":
linux = "linux_i686"
elif linux == "linux_aarch64":
linux = "linux_armv8l"
_, arch = linux.split("_", 1)
archs = {"armv8l": ["armv8l", "armv7l"]}.get(arch, [arch])
yield from _manylinux.platform_tags(archs)
yield from _musllinux.platform_tags(archs)
for arch in archs:
yield f"linux_{arch}"
def _generic_platforms() -> Iterator[str]:
yield _normalize_string(sysconfig.get_platform())
def platform_tags() -> Iterator[str]:
"""
Provides the platform tags for this installation.
"""
if platform.system() == "Darwin":
return mac_platforms()
elif platform.system() == "Linux":
return _linux_platforms()
else:
return _generic_platforms()
def interpreter_name() -> str:
"""
Returns the name of the running interpreter.
Some implementations have a reserved, two-letter abbreviation which will
be returned when appropriate.
"""
name = sys.implementation.name
return INTERPRETER_SHORT_NAMES.get(name) or name
def interpreter_version(*, warn: bool = False) -> str:
"""
Returns the version of the running interpreter.
"""
version = _get_config_var("py_version_nodot", warn=warn)
if version:
version = str(version)
else:
version = _version_nodot(sys.version_info[:2])
return version
def _version_nodot(version: PythonVersion) -> str:
return "".join(map(str, version))
def sys_tags(*, warn: bool = False) -> Iterator[Tag]:
"""
Returns the sequence of tag triples for the running interpreter.
The order of the sequence corresponds to priority order for the
interpreter, from most to least important.
"""
interp_name = interpreter_name()
if interp_name == "cp":
yield from cpython_tags(warn=warn)
else:
yield from generic_tags()
if interp_name == "pp":
interp = "pp3"
elif interp_name == "cp":
interp = "cp" + interpreter_version(warn=warn)
else:
interp = None
yield from compatible_tags(interpreter=interp)

View file

@ -0,0 +1,172 @@
# This file is dual licensed under the terms of the Apache License, Version
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
# for complete details.
import re
from typing import FrozenSet, NewType, Tuple, Union, cast
from .tags import Tag, parse_tag
from .version import InvalidVersion, Version
BuildTag = Union[Tuple[()], Tuple[int, str]]
NormalizedName = NewType("NormalizedName", str)
class InvalidName(ValueError):
"""
An invalid distribution name; users should refer to the packaging user guide.
"""
class InvalidWheelFilename(ValueError):
"""
An invalid wheel filename was found, users should refer to PEP 427.
"""
class InvalidSdistFilename(ValueError):
"""
An invalid sdist filename was found, users should refer to the packaging user guide.
"""
# Core metadata spec for `Name`
_validate_regex = re.compile(
r"^([A-Z0-9]|[A-Z0-9][A-Z0-9._-]*[A-Z0-9])$", re.IGNORECASE
)
_canonicalize_regex = re.compile(r"[-_.]+")
_normalized_regex = re.compile(r"^([a-z0-9]|[a-z0-9]([a-z0-9-](?!--))*[a-z0-9])$")
# PEP 427: The build number must start with a digit.
_build_tag_regex = re.compile(r"(\d+)(.*)")
def canonicalize_name(name: str, *, validate: bool = False) -> NormalizedName:
if validate and not _validate_regex.match(name):
raise InvalidName(f"name is invalid: {name!r}")
# This is taken from PEP 503.
value = _canonicalize_regex.sub("-", name).lower()
return cast(NormalizedName, value)
def is_normalized_name(name: str) -> bool:
return _normalized_regex.match(name) is not None
def canonicalize_version(
version: Union[Version, str], *, strip_trailing_zero: bool = True
) -> str:
"""
This is very similar to Version.__str__, but has one subtle difference
with the way it handles the release segment.
"""
if isinstance(version, str):
try:
parsed = Version(version)
except InvalidVersion:
# Legacy versions cannot be normalized
return version
else:
parsed = version
parts = []
# Epoch
if parsed.epoch != 0:
parts.append(f"{parsed.epoch}!")
# Release segment
release_segment = ".".join(str(x) for x in parsed.release)
if strip_trailing_zero:
# NB: This strips trailing '.0's to normalize
release_segment = re.sub(r"(\.0)+$", "", release_segment)
parts.append(release_segment)
# Pre-release
if parsed.pre is not None:
parts.append("".join(str(x) for x in parsed.pre))
# Post-release
if parsed.post is not None:
parts.append(f".post{parsed.post}")
# Development release
if parsed.dev is not None:
parts.append(f".dev{parsed.dev}")
# Local version segment
if parsed.local is not None:
parts.append(f"+{parsed.local}")
return "".join(parts)
def parse_wheel_filename(
filename: str,
) -> Tuple[NormalizedName, Version, BuildTag, FrozenSet[Tag]]:
if not filename.endswith(".whl"):
raise InvalidWheelFilename(
f"Invalid wheel filename (extension must be '.whl'): {filename}"
)
filename = filename[:-4]
dashes = filename.count("-")
if dashes not in (4, 5):
raise InvalidWheelFilename(
f"Invalid wheel filename (wrong number of parts): {filename}"
)
parts = filename.split("-", dashes - 2)
name_part = parts[0]
# See PEP 427 for the rules on escaping the project name.
if "__" in name_part or re.match(r"^[\w\d._]*$", name_part, re.UNICODE) is None:
raise InvalidWheelFilename(f"Invalid project name: {filename}")
name = canonicalize_name(name_part)
try:
version = Version(parts[1])
except InvalidVersion as e:
raise InvalidWheelFilename(
f"Invalid wheel filename (invalid version): {filename}"
) from e
if dashes == 5:
build_part = parts[2]
build_match = _build_tag_regex.match(build_part)
if build_match is None:
raise InvalidWheelFilename(
f"Invalid build number: {build_part} in '{filename}'"
)
build = cast(BuildTag, (int(build_match.group(1)), build_match.group(2)))
else:
build = ()
tags = parse_tag(parts[-1])
return (name, version, build, tags)
def parse_sdist_filename(filename: str) -> Tuple[NormalizedName, Version]:
if filename.endswith(".tar.gz"):
file_stem = filename[: -len(".tar.gz")]
elif filename.endswith(".zip"):
file_stem = filename[: -len(".zip")]
else:
raise InvalidSdistFilename(
f"Invalid sdist filename (extension must be '.tar.gz' or '.zip'):"
f" {filename}"
)
# We are requiring a PEP 440 version, which cannot contain dashes,
# so we split on the last dash.
name_part, sep, version_part = file_stem.rpartition("-")
if not sep:
raise InvalidSdistFilename(f"Invalid sdist filename: {filename}")
name = canonicalize_name(name_part)
try:
version = Version(version_part)
except InvalidVersion as e:
raise InvalidSdistFilename(
f"Invalid sdist filename (invalid version): {filename}"
) from e
return (name, version)

View file

@ -0,0 +1,563 @@
# This file is dual licensed under the terms of the Apache License, Version
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
# for complete details.
"""
.. testsetup::
from packaging.version import parse, Version
"""
import itertools
import re
from typing import Any, Callable, NamedTuple, Optional, SupportsInt, Tuple, Union
from ._structures import Infinity, InfinityType, NegativeInfinity, NegativeInfinityType
__all__ = ["VERSION_PATTERN", "parse", "Version", "InvalidVersion"]
LocalType = Tuple[Union[int, str], ...]
CmpPrePostDevType = Union[InfinityType, NegativeInfinityType, Tuple[str, int]]
CmpLocalType = Union[
NegativeInfinityType,
Tuple[Union[Tuple[int, str], Tuple[NegativeInfinityType, Union[int, str]]], ...],
]
CmpKey = Tuple[
int,
Tuple[int, ...],
CmpPrePostDevType,
CmpPrePostDevType,
CmpPrePostDevType,
CmpLocalType,
]
VersionComparisonMethod = Callable[[CmpKey, CmpKey], bool]
class _Version(NamedTuple):
epoch: int
release: Tuple[int, ...]
dev: Optional[Tuple[str, int]]
pre: Optional[Tuple[str, int]]
post: Optional[Tuple[str, int]]
local: Optional[LocalType]
def parse(version: str) -> "Version":
"""Parse the given version string.
>>> parse('1.0.dev1')
<Version('1.0.dev1')>
:param version: The version string to parse.
:raises InvalidVersion: When the version string is not a valid version.
"""
return Version(version)
class InvalidVersion(ValueError):
"""Raised when a version string is not a valid version.
>>> Version("invalid")
Traceback (most recent call last):
...
packaging.version.InvalidVersion: Invalid version: 'invalid'
"""
class _BaseVersion:
_key: Tuple[Any, ...]
def __hash__(self) -> int:
return hash(self._key)
# Please keep the duplicated `isinstance` check
# in the six comparisons hereunder
# unless you find a way to avoid adding overhead function calls.
def __lt__(self, other: "_BaseVersion") -> bool:
if not isinstance(other, _BaseVersion):
return NotImplemented
return self._key < other._key
def __le__(self, other: "_BaseVersion") -> bool:
if not isinstance(other, _BaseVersion):
return NotImplemented
return self._key <= other._key
def __eq__(self, other: object) -> bool:
if not isinstance(other, _BaseVersion):
return NotImplemented
return self._key == other._key
def __ge__(self, other: "_BaseVersion") -> bool:
if not isinstance(other, _BaseVersion):
return NotImplemented
return self._key >= other._key
def __gt__(self, other: "_BaseVersion") -> bool:
if not isinstance(other, _BaseVersion):
return NotImplemented
return self._key > other._key
def __ne__(self, other: object) -> bool:
if not isinstance(other, _BaseVersion):
return NotImplemented
return self._key != other._key
# Deliberately not anchored to the start and end of the string, to make it
# easier for 3rd party code to reuse
_VERSION_PATTERN = r"""
v?
(?:
(?:(?P<epoch>[0-9]+)!)? # epoch
(?P<release>[0-9]+(?:\.[0-9]+)*) # release segment
(?P<pre> # pre-release
[-_\.]?
(?P<pre_l>alpha|a|beta|b|preview|pre|c|rc)
[-_\.]?
(?P<pre_n>[0-9]+)?
)?
(?P<post> # post release
(?:-(?P<post_n1>[0-9]+))
|
(?:
[-_\.]?
(?P<post_l>post|rev|r)
[-_\.]?
(?P<post_n2>[0-9]+)?
)
)?
(?P<dev> # dev release
[-_\.]?
(?P<dev_l>dev)
[-_\.]?
(?P<dev_n>[0-9]+)?
)?
)
(?:\+(?P<local>[a-z0-9]+(?:[-_\.][a-z0-9]+)*))? # local version
"""
VERSION_PATTERN = _VERSION_PATTERN
"""
A string containing the regular expression used to match a valid version.
The pattern is not anchored at either end, and is intended for embedding in larger
expressions (for example, matching a version number as part of a file name). The
regular expression should be compiled with the ``re.VERBOSE`` and ``re.IGNORECASE``
flags set.
:meta hide-value:
"""
class Version(_BaseVersion):
"""This class abstracts handling of a project's versions.
A :class:`Version` instance is comparison aware and can be compared and
sorted using the standard Python interfaces.
>>> v1 = Version("1.0a5")
>>> v2 = Version("1.0")
>>> v1
<Version('1.0a5')>
>>> v2
<Version('1.0')>
>>> v1 < v2
True
>>> v1 == v2
False
>>> v1 > v2
False
>>> v1 >= v2
False
>>> v1 <= v2
True
"""
_regex = re.compile(r"^\s*" + VERSION_PATTERN + r"\s*$", re.VERBOSE | re.IGNORECASE)
_key: CmpKey
def __init__(self, version: str) -> None:
"""Initialize a Version object.
:param version:
The string representation of a version which will be parsed and normalized
before use.
:raises InvalidVersion:
If the ``version`` does not conform to PEP 440 in any way then this
exception will be raised.
"""
# Validate the version and parse it into pieces
match = self._regex.search(version)
if not match:
raise InvalidVersion(f"Invalid version: '{version}'")
# Store the parsed out pieces of the version
self._version = _Version(
epoch=int(match.group("epoch")) if match.group("epoch") else 0,
release=tuple(int(i) for i in match.group("release").split(".")),
pre=_parse_letter_version(match.group("pre_l"), match.group("pre_n")),
post=_parse_letter_version(
match.group("post_l"), match.group("post_n1") or match.group("post_n2")
),
dev=_parse_letter_version(match.group("dev_l"), match.group("dev_n")),
local=_parse_local_version(match.group("local")),
)
# Generate a key which will be used for sorting
self._key = _cmpkey(
self._version.epoch,
self._version.release,
self._version.pre,
self._version.post,
self._version.dev,
self._version.local,
)
def __repr__(self) -> str:
"""A representation of the Version that shows all internal state.
>>> Version('1.0.0')
<Version('1.0.0')>
"""
return f"<Version('{self}')>"
def __str__(self) -> str:
"""A string representation of the version that can be rounded-tripped.
>>> str(Version("1.0a5"))
'1.0a5'
"""
parts = []
# Epoch
if self.epoch != 0:
parts.append(f"{self.epoch}!")
# Release segment
parts.append(".".join(str(x) for x in self.release))
# Pre-release
if self.pre is not None:
parts.append("".join(str(x) for x in self.pre))
# Post-release
if self.post is not None:
parts.append(f".post{self.post}")
# Development release
if self.dev is not None:
parts.append(f".dev{self.dev}")
# Local version segment
if self.local is not None:
parts.append(f"+{self.local}")
return "".join(parts)
@property
def epoch(self) -> int:
"""The epoch of the version.
>>> Version("2.0.0").epoch
0
>>> Version("1!2.0.0").epoch
1
"""
return self._version.epoch
@property
def release(self) -> Tuple[int, ...]:
"""The components of the "release" segment of the version.
>>> Version("1.2.3").release
(1, 2, 3)
>>> Version("2.0.0").release
(2, 0, 0)
>>> Version("1!2.0.0.post0").release
(2, 0, 0)
Includes trailing zeroes but not the epoch or any pre-release / development /
post-release suffixes.
"""
return self._version.release
@property
def pre(self) -> Optional[Tuple[str, int]]:
"""The pre-release segment of the version.
>>> print(Version("1.2.3").pre)
None
>>> Version("1.2.3a1").pre
('a', 1)
>>> Version("1.2.3b1").pre
('b', 1)
>>> Version("1.2.3rc1").pre
('rc', 1)
"""
return self._version.pre
@property
def post(self) -> Optional[int]:
"""The post-release number of the version.
>>> print(Version("1.2.3").post)
None
>>> Version("1.2.3.post1").post
1
"""
return self._version.post[1] if self._version.post else None
@property
def dev(self) -> Optional[int]:
"""The development number of the version.
>>> print(Version("1.2.3").dev)
None
>>> Version("1.2.3.dev1").dev
1
"""
return self._version.dev[1] if self._version.dev else None
@property
def local(self) -> Optional[str]:
"""The local version segment of the version.
>>> print(Version("1.2.3").local)
None
>>> Version("1.2.3+abc").local
'abc'
"""
if self._version.local:
return ".".join(str(x) for x in self._version.local)
else:
return None
@property
def public(self) -> str:
"""The public portion of the version.
>>> Version("1.2.3").public
'1.2.3'
>>> Version("1.2.3+abc").public
'1.2.3'
>>> Version("1.2.3+abc.dev1").public
'1.2.3'
"""
return str(self).split("+", 1)[0]
@property
def base_version(self) -> str:
"""The "base version" of the version.
>>> Version("1.2.3").base_version
'1.2.3'
>>> Version("1.2.3+abc").base_version
'1.2.3'
>>> Version("1!1.2.3+abc.dev1").base_version
'1!1.2.3'
The "base version" is the public version of the project without any pre or post
release markers.
"""
parts = []
# Epoch
if self.epoch != 0:
parts.append(f"{self.epoch}!")
# Release segment
parts.append(".".join(str(x) for x in self.release))
return "".join(parts)
@property
def is_prerelease(self) -> bool:
"""Whether this version is a pre-release.
>>> Version("1.2.3").is_prerelease
False
>>> Version("1.2.3a1").is_prerelease
True
>>> Version("1.2.3b1").is_prerelease
True
>>> Version("1.2.3rc1").is_prerelease
True
>>> Version("1.2.3dev1").is_prerelease
True
"""
return self.dev is not None or self.pre is not None
@property
def is_postrelease(self) -> bool:
"""Whether this version is a post-release.
>>> Version("1.2.3").is_postrelease
False
>>> Version("1.2.3.post1").is_postrelease
True
"""
return self.post is not None
@property
def is_devrelease(self) -> bool:
"""Whether this version is a development release.
>>> Version("1.2.3").is_devrelease
False
>>> Version("1.2.3.dev1").is_devrelease
True
"""
return self.dev is not None
@property
def major(self) -> int:
"""The first item of :attr:`release` or ``0`` if unavailable.
>>> Version("1.2.3").major
1
"""
return self.release[0] if len(self.release) >= 1 else 0
@property
def minor(self) -> int:
"""The second item of :attr:`release` or ``0`` if unavailable.
>>> Version("1.2.3").minor
2
>>> Version("1").minor
0
"""
return self.release[1] if len(self.release) >= 2 else 0
@property
def micro(self) -> int:
"""The third item of :attr:`release` or ``0`` if unavailable.
>>> Version("1.2.3").micro
3
>>> Version("1").micro
0
"""
return self.release[2] if len(self.release) >= 3 else 0
def _parse_letter_version(
letter: Optional[str], number: Union[str, bytes, SupportsInt, None]
) -> Optional[Tuple[str, int]]:
if letter:
# We consider there to be an implicit 0 in a pre-release if there is
# not a numeral associated with it.
if number is None:
number = 0
# We normalize any letters to their lower case form
letter = letter.lower()
# We consider some words to be alternate spellings of other words and
# in those cases we want to normalize the spellings to our preferred
# spelling.
if letter == "alpha":
letter = "a"
elif letter == "beta":
letter = "b"
elif letter in ["c", "pre", "preview"]:
letter = "rc"
elif letter in ["rev", "r"]:
letter = "post"
return letter, int(number)
if not letter and number:
# We assume if we are given a number, but we are not given a letter
# then this is using the implicit post release syntax (e.g. 1.0-1)
letter = "post"
return letter, int(number)
return None
_local_version_separators = re.compile(r"[\._-]")
def _parse_local_version(local: Optional[str]) -> Optional[LocalType]:
"""
Takes a string like abc.1.twelve and turns it into ("abc", 1, "twelve").
"""
if local is not None:
return tuple(
part.lower() if not part.isdigit() else int(part)
for part in _local_version_separators.split(local)
)
return None
def _cmpkey(
epoch: int,
release: Tuple[int, ...],
pre: Optional[Tuple[str, int]],
post: Optional[Tuple[str, int]],
dev: Optional[Tuple[str, int]],
local: Optional[LocalType],
) -> CmpKey:
# When we compare a release version, we want to compare it with all of the
# trailing zeros removed. So we'll use a reverse the list, drop all the now
# leading zeros until we come to something non zero, then take the rest
# re-reverse it back into the correct order and make it a tuple and use
# that for our sorting key.
_release = tuple(
reversed(list(itertools.dropwhile(lambda x: x == 0, reversed(release))))
)
# We need to "trick" the sorting algorithm to put 1.0.dev0 before 1.0a0.
# We'll do this by abusing the pre segment, but we _only_ want to do this
# if there is not a pre or a post segment. If we have one of those then
# the normal sorting rules will handle this case correctly.
if pre is None and post is None and dev is not None:
_pre: CmpPrePostDevType = NegativeInfinity
# Versions without a pre-release (except as noted above) should sort after
# those with one.
elif pre is None:
_pre = Infinity
else:
_pre = pre
# Versions without a post segment should sort before those with one.
if post is None:
_post: CmpPrePostDevType = NegativeInfinity
else:
_post = post
# Versions without a development segment should sort after those with one.
if dev is None:
_dev: CmpPrePostDevType = Infinity
else:
_dev = dev
if local is None:
# Versions without a local segment should sort before those with one.
_local: CmpLocalType = NegativeInfinity
else:
# Versions with a local segment need that segment parsed to implement
# the sorting rules in PEP440.
# - Alpha numeric segments sort before numeric segments
# - Alpha numeric segments sort lexicographically
# - Numeric segments sort numerically
# - Shorter versions sort before longer versions when the prefixes
# match exactly
_local = tuple(
(i, "") if isinstance(i, int) else (NegativeInfinity, i) for i in local
)
return epoch, _release, _pre, _post, _dev, _local

File diff suppressed because it is too large Load diff

View file

@ -1,608 +0,0 @@
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# Copyright (c) 2005-2010 ActiveState Software Inc.
# Copyright (c) 2013 Eddy Petrișor
"""Utilities for determining application-specific dirs.
See <http://github.com/ActiveState/appdirs> for details and usage.
"""
# Dev Notes:
# - MSDN on where to store app data files:
# http://support.microsoft.com/default.aspx?scid=kb;en-us;310294#XSLTH3194121123120121120120
# - Mac OS X: http://developer.apple.com/documentation/MacOSX/Conceptual/BPFileSystem/index.html
# - XDG spec for Un*x: http://standards.freedesktop.org/basedir-spec/basedir-spec-latest.html
__version_info__ = (1, 4, 3)
__version__ = '.'.join(map(str, __version_info__))
import sys
import os
PY3 = sys.version_info[0] == 3
if PY3:
unicode = str
if sys.platform.startswith('java'):
import platform
os_name = platform.java_ver()[3][0]
if os_name.startswith('Windows'): # "Windows XP", "Windows 7", etc.
system = 'win32'
elif os_name.startswith('Mac'): # "Mac OS X", etc.
system = 'darwin'
else: # "Linux", "SunOS", "FreeBSD", etc.
# Setting this to "linux2" is not ideal, but only Windows or Mac
# are actually checked for and the rest of the module expects
# *sys.platform* style strings.
system = 'linux2'
else:
system = sys.platform
def user_data_dir(appname=None, appauthor=None, version=None, roaming=False):
r"""Return full path to the user-specific data dir for this application.
"appname" is the name of application.
If None, just the system directory is returned.
"appauthor" (only used on Windows) is the name of the
appauthor or distributing body for this application. Typically
it is the owning company name. This falls back to appname. You may
pass False to disable it.
"version" is an optional version path element to append to the
path. You might want to use this if you want multiple versions
of your app to be able to run independently. If used, this
would typically be "<major>.<minor>".
Only applied when appname is present.
"roaming" (boolean, default False) can be set True to use the Windows
roaming appdata directory. That means that for users on a Windows
network setup for roaming profiles, this user data will be
sync'd on login. See
<http://technet.microsoft.com/en-us/library/cc766489(WS.10).aspx>
for a discussion of issues.
Typical user data directories are:
Mac OS X: ~/Library/Application Support/<AppName>
Unix: ~/.local/share/<AppName> # or in $XDG_DATA_HOME, if defined
Win XP (not roaming): C:\Documents and Settings\<username>\Application Data\<AppAuthor>\<AppName>
Win XP (roaming): C:\Documents and Settings\<username>\Local Settings\Application Data\<AppAuthor>\<AppName>
Win 7 (not roaming): C:\Users\<username>\AppData\Local\<AppAuthor>\<AppName>
Win 7 (roaming): C:\Users\<username>\AppData\Roaming\<AppAuthor>\<AppName>
For Unix, we follow the XDG spec and support $XDG_DATA_HOME.
That means, by default "~/.local/share/<AppName>".
"""
if system == "win32":
if appauthor is None:
appauthor = appname
const = roaming and "CSIDL_APPDATA" or "CSIDL_LOCAL_APPDATA"
path = os.path.normpath(_get_win_folder(const))
if appname:
if appauthor is not False:
path = os.path.join(path, appauthor, appname)
else:
path = os.path.join(path, appname)
elif system == 'darwin':
path = os.path.expanduser('~/Library/Application Support/')
if appname:
path = os.path.join(path, appname)
else:
path = os.getenv('XDG_DATA_HOME', os.path.expanduser("~/.local/share"))
if appname:
path = os.path.join(path, appname)
if appname and version:
path = os.path.join(path, version)
return path
def site_data_dir(appname=None, appauthor=None, version=None, multipath=False):
r"""Return full path to the user-shared data dir for this application.
"appname" is the name of application.
If None, just the system directory is returned.
"appauthor" (only used on Windows) is the name of the
appauthor or distributing body for this application. Typically
it is the owning company name. This falls back to appname. You may
pass False to disable it.
"version" is an optional version path element to append to the
path. You might want to use this if you want multiple versions
of your app to be able to run independently. If used, this
would typically be "<major>.<minor>".
Only applied when appname is present.
"multipath" is an optional parameter only applicable to *nix
which indicates that the entire list of data dirs should be
returned. By default, the first item from XDG_DATA_DIRS is
returned, or '/usr/local/share/<AppName>',
if XDG_DATA_DIRS is not set
Typical site data directories are:
Mac OS X: /Library/Application Support/<AppName>
Unix: /usr/local/share/<AppName> or /usr/share/<AppName>
Win XP: C:\Documents and Settings\All Users\Application Data\<AppAuthor>\<AppName>
Vista: (Fail! "C:\ProgramData" is a hidden *system* directory on Vista.)
Win 7: C:\ProgramData\<AppAuthor>\<AppName> # Hidden, but writeable on Win 7.
For Unix, this is using the $XDG_DATA_DIRS[0] default.
WARNING: Do not use this on Windows. See the Vista-Fail note above for why.
"""
if system == "win32":
if appauthor is None:
appauthor = appname
path = os.path.normpath(_get_win_folder("CSIDL_COMMON_APPDATA"))
if appname:
if appauthor is not False:
path = os.path.join(path, appauthor, appname)
else:
path = os.path.join(path, appname)
elif system == 'darwin':
path = os.path.expanduser('/Library/Application Support')
if appname:
path = os.path.join(path, appname)
else:
# XDG default for $XDG_DATA_DIRS
# only first, if multipath is False
path = os.getenv('XDG_DATA_DIRS',
os.pathsep.join(['/usr/local/share', '/usr/share']))
pathlist = [os.path.expanduser(x.rstrip(os.sep)) for x in path.split(os.pathsep)]
if appname:
if version:
appname = os.path.join(appname, version)
pathlist = [os.sep.join([x, appname]) for x in pathlist]
if multipath:
path = os.pathsep.join(pathlist)
else:
path = pathlist[0]
return path
if appname and version:
path = os.path.join(path, version)
return path
def user_config_dir(appname=None, appauthor=None, version=None, roaming=False):
r"""Return full path to the user-specific config dir for this application.
"appname" is the name of application.
If None, just the system directory is returned.
"appauthor" (only used on Windows) is the name of the
appauthor or distributing body for this application. Typically
it is the owning company name. This falls back to appname. You may
pass False to disable it.
"version" is an optional version path element to append to the
path. You might want to use this if you want multiple versions
of your app to be able to run independently. If used, this
would typically be "<major>.<minor>".
Only applied when appname is present.
"roaming" (boolean, default False) can be set True to use the Windows
roaming appdata directory. That means that for users on a Windows
network setup for roaming profiles, this user data will be
sync'd on login. See
<http://technet.microsoft.com/en-us/library/cc766489(WS.10).aspx>
for a discussion of issues.
Typical user config directories are:
Mac OS X: same as user_data_dir
Unix: ~/.config/<AppName> # or in $XDG_CONFIG_HOME, if defined
Win *: same as user_data_dir
For Unix, we follow the XDG spec and support $XDG_CONFIG_HOME.
That means, by default "~/.config/<AppName>".
"""
if system in ["win32", "darwin"]:
path = user_data_dir(appname, appauthor, None, roaming)
else:
path = os.getenv('XDG_CONFIG_HOME', os.path.expanduser("~/.config"))
if appname:
path = os.path.join(path, appname)
if appname and version:
path = os.path.join(path, version)
return path
def site_config_dir(appname=None, appauthor=None, version=None, multipath=False):
r"""Return full path to the user-shared data dir for this application.
"appname" is the name of application.
If None, just the system directory is returned.
"appauthor" (only used on Windows) is the name of the
appauthor or distributing body for this application. Typically
it is the owning company name. This falls back to appname. You may
pass False to disable it.
"version" is an optional version path element to append to the
path. You might want to use this if you want multiple versions
of your app to be able to run independently. If used, this
would typically be "<major>.<minor>".
Only applied when appname is present.
"multipath" is an optional parameter only applicable to *nix
which indicates that the entire list of config dirs should be
returned. By default, the first item from XDG_CONFIG_DIRS is
returned, or '/etc/xdg/<AppName>', if XDG_CONFIG_DIRS is not set
Typical site config directories are:
Mac OS X: same as site_data_dir
Unix: /etc/xdg/<AppName> or $XDG_CONFIG_DIRS[i]/<AppName> for each value in
$XDG_CONFIG_DIRS
Win *: same as site_data_dir
Vista: (Fail! "C:\ProgramData" is a hidden *system* directory on Vista.)
For Unix, this is using the $XDG_CONFIG_DIRS[0] default, if multipath=False
WARNING: Do not use this on Windows. See the Vista-Fail note above for why.
"""
if system in ["win32", "darwin"]:
path = site_data_dir(appname, appauthor)
if appname and version:
path = os.path.join(path, version)
else:
# XDG default for $XDG_CONFIG_DIRS
# only first, if multipath is False
path = os.getenv('XDG_CONFIG_DIRS', '/etc/xdg')
pathlist = [os.path.expanduser(x.rstrip(os.sep)) for x in path.split(os.pathsep)]
if appname:
if version:
appname = os.path.join(appname, version)
pathlist = [os.sep.join([x, appname]) for x in pathlist]
if multipath:
path = os.pathsep.join(pathlist)
else:
path = pathlist[0]
return path
def user_cache_dir(appname=None, appauthor=None, version=None, opinion=True):
r"""Return full path to the user-specific cache dir for this application.
"appname" is the name of application.
If None, just the system directory is returned.
"appauthor" (only used on Windows) is the name of the
appauthor or distributing body for this application. Typically
it is the owning company name. This falls back to appname. You may
pass False to disable it.
"version" is an optional version path element to append to the
path. You might want to use this if you want multiple versions
of your app to be able to run independently. If used, this
would typically be "<major>.<minor>".
Only applied when appname is present.
"opinion" (boolean) can be False to disable the appending of
"Cache" to the base app data dir for Windows. See
discussion below.
Typical user cache directories are:
Mac OS X: ~/Library/Caches/<AppName>
Unix: ~/.cache/<AppName> (XDG default)
Win XP: C:\Documents and Settings\<username>\Local Settings\Application Data\<AppAuthor>\<AppName>\Cache
Vista: C:\Users\<username>\AppData\Local\<AppAuthor>\<AppName>\Cache
On Windows the only suggestion in the MSDN docs is that local settings go in
the `CSIDL_LOCAL_APPDATA` directory. This is identical to the non-roaming
app data dir (the default returned by `user_data_dir` above). Apps typically
put cache data somewhere *under* the given dir here. Some examples:
...\Mozilla\Firefox\Profiles\<ProfileName>\Cache
...\Acme\SuperApp\Cache\1.0
OPINION: This function appends "Cache" to the `CSIDL_LOCAL_APPDATA` value.
This can be disabled with the `opinion=False` option.
"""
if system == "win32":
if appauthor is None:
appauthor = appname
path = os.path.normpath(_get_win_folder("CSIDL_LOCAL_APPDATA"))
if appname:
if appauthor is not False:
path = os.path.join(path, appauthor, appname)
else:
path = os.path.join(path, appname)
if opinion:
path = os.path.join(path, "Cache")
elif system == 'darwin':
path = os.path.expanduser('~/Library/Caches')
if appname:
path = os.path.join(path, appname)
else:
path = os.getenv('XDG_CACHE_HOME', os.path.expanduser('~/.cache'))
if appname:
path = os.path.join(path, appname)
if appname and version:
path = os.path.join(path, version)
return path
def user_state_dir(appname=None, appauthor=None, version=None, roaming=False):
r"""Return full path to the user-specific state dir for this application.
"appname" is the name of application.
If None, just the system directory is returned.
"appauthor" (only used on Windows) is the name of the
appauthor or distributing body for this application. Typically
it is the owning company name. This falls back to appname. You may
pass False to disable it.
"version" is an optional version path element to append to the
path. You might want to use this if you want multiple versions
of your app to be able to run independently. If used, this
would typically be "<major>.<minor>".
Only applied when appname is present.
"roaming" (boolean, default False) can be set True to use the Windows
roaming appdata directory. That means that for users on a Windows
network setup for roaming profiles, this user data will be
sync'd on login. See
<http://technet.microsoft.com/en-us/library/cc766489(WS.10).aspx>
for a discussion of issues.
Typical user state directories are:
Mac OS X: same as user_data_dir
Unix: ~/.local/state/<AppName> # or in $XDG_STATE_HOME, if defined
Win *: same as user_data_dir
For Unix, we follow this Debian proposal <https://wiki.debian.org/XDGBaseDirectorySpecification#state>
to extend the XDG spec and support $XDG_STATE_HOME.
That means, by default "~/.local/state/<AppName>".
"""
if system in ["win32", "darwin"]:
path = user_data_dir(appname, appauthor, None, roaming)
else:
path = os.getenv('XDG_STATE_HOME', os.path.expanduser("~/.local/state"))
if appname:
path = os.path.join(path, appname)
if appname and version:
path = os.path.join(path, version)
return path
def user_log_dir(appname=None, appauthor=None, version=None, opinion=True):
r"""Return full path to the user-specific log dir for this application.
"appname" is the name of application.
If None, just the system directory is returned.
"appauthor" (only used on Windows) is the name of the
appauthor or distributing body for this application. Typically
it is the owning company name. This falls back to appname. You may
pass False to disable it.
"version" is an optional version path element to append to the
path. You might want to use this if you want multiple versions
of your app to be able to run independently. If used, this
would typically be "<major>.<minor>".
Only applied when appname is present.
"opinion" (boolean) can be False to disable the appending of
"Logs" to the base app data dir for Windows, and "log" to the
base cache dir for Unix. See discussion below.
Typical user log directories are:
Mac OS X: ~/Library/Logs/<AppName>
Unix: ~/.cache/<AppName>/log # or under $XDG_CACHE_HOME if defined
Win XP: C:\Documents and Settings\<username>\Local Settings\Application Data\<AppAuthor>\<AppName>\Logs
Vista: C:\Users\<username>\AppData\Local\<AppAuthor>\<AppName>\Logs
On Windows the only suggestion in the MSDN docs is that local settings
go in the `CSIDL_LOCAL_APPDATA` directory. (Note: I'm interested in
examples of what some windows apps use for a logs dir.)
OPINION: This function appends "Logs" to the `CSIDL_LOCAL_APPDATA`
value for Windows and appends "log" to the user cache dir for Unix.
This can be disabled with the `opinion=False` option.
"""
if system == "darwin":
path = os.path.join(
os.path.expanduser('~/Library/Logs'),
appname)
elif system == "win32":
path = user_data_dir(appname, appauthor, version)
version = False
if opinion:
path = os.path.join(path, "Logs")
else:
path = user_cache_dir(appname, appauthor, version)
version = False
if opinion:
path = os.path.join(path, "log")
if appname and version:
path = os.path.join(path, version)
return path
class AppDirs(object):
"""Convenience wrapper for getting application dirs."""
def __init__(self, appname=None, appauthor=None, version=None,
roaming=False, multipath=False):
self.appname = appname
self.appauthor = appauthor
self.version = version
self.roaming = roaming
self.multipath = multipath
@property
def user_data_dir(self):
return user_data_dir(self.appname, self.appauthor,
version=self.version, roaming=self.roaming)
@property
def site_data_dir(self):
return site_data_dir(self.appname, self.appauthor,
version=self.version, multipath=self.multipath)
@property
def user_config_dir(self):
return user_config_dir(self.appname, self.appauthor,
version=self.version, roaming=self.roaming)
@property
def site_config_dir(self):
return site_config_dir(self.appname, self.appauthor,
version=self.version, multipath=self.multipath)
@property
def user_cache_dir(self):
return user_cache_dir(self.appname, self.appauthor,
version=self.version)
@property
def user_state_dir(self):
return user_state_dir(self.appname, self.appauthor,
version=self.version)
@property
def user_log_dir(self):
return user_log_dir(self.appname, self.appauthor,
version=self.version)
#---- internal support stuff
def _get_win_folder_from_registry(csidl_name):
"""This is a fallback technique at best. I'm not sure if using the
registry for this guarantees us the correct answer for all CSIDL_*
names.
"""
if PY3:
import winreg as _winreg
else:
import _winreg
shell_folder_name = {
"CSIDL_APPDATA": "AppData",
"CSIDL_COMMON_APPDATA": "Common AppData",
"CSIDL_LOCAL_APPDATA": "Local AppData",
}[csidl_name]
key = _winreg.OpenKey(
_winreg.HKEY_CURRENT_USER,
r"Software\Microsoft\Windows\CurrentVersion\Explorer\Shell Folders"
)
dir, type = _winreg.QueryValueEx(key, shell_folder_name)
return dir
def _get_win_folder_with_pywin32(csidl_name):
from win32com.shell import shellcon, shell
dir = shell.SHGetFolderPath(0, getattr(shellcon, csidl_name), 0, 0)
# Try to make this a unicode path because SHGetFolderPath does
# not return unicode strings when there is unicode data in the
# path.
try:
dir = unicode(dir)
# Downgrade to short path name if have highbit chars. See
# <http://bugs.activestate.com/show_bug.cgi?id=85099>.
has_high_char = False
for c in dir:
if ord(c) > 255:
has_high_char = True
break
if has_high_char:
try:
import win32api
dir = win32api.GetShortPathName(dir)
except ImportError:
pass
except UnicodeError:
pass
return dir
def _get_win_folder_with_ctypes(csidl_name):
import ctypes
csidl_const = {
"CSIDL_APPDATA": 26,
"CSIDL_COMMON_APPDATA": 35,
"CSIDL_LOCAL_APPDATA": 28,
}[csidl_name]
buf = ctypes.create_unicode_buffer(1024)
ctypes.windll.shell32.SHGetFolderPathW(None, csidl_const, None, 0, buf)
# Downgrade to short path name if have highbit chars. See
# <http://bugs.activestate.com/show_bug.cgi?id=85099>.
has_high_char = False
for c in buf:
if ord(c) > 255:
has_high_char = True
break
if has_high_char:
buf2 = ctypes.create_unicode_buffer(1024)
if ctypes.windll.kernel32.GetShortPathNameW(buf.value, buf2, 1024):
buf = buf2
return buf.value
def _get_win_folder_with_jna(csidl_name):
import array
from com.sun import jna
from com.sun.jna.platform import win32
buf_size = win32.WinDef.MAX_PATH * 2
buf = array.zeros('c', buf_size)
shell = win32.Shell32.INSTANCE
shell.SHGetFolderPath(None, getattr(win32.ShlObj, csidl_name), None, win32.ShlObj.SHGFP_TYPE_CURRENT, buf)
dir = jna.Native.toString(buf.tostring()).rstrip("\0")
# Downgrade to short path name if have highbit chars. See
# <http://bugs.activestate.com/show_bug.cgi?id=85099>.
has_high_char = False
for c in dir:
if ord(c) > 255:
has_high_char = True
break
if has_high_char:
buf = array.zeros('c', buf_size)
kernel = win32.Kernel32.INSTANCE
if kernel.GetShortPathName(dir, buf, buf_size):
dir = jna.Native.toString(buf.tostring()).rstrip("\0")
return dir
if system == "win32":
try:
import win32com.shell
_get_win_folder = _get_win_folder_with_pywin32
except ImportError:
try:
from ctypes import windll
_get_win_folder = _get_win_folder_with_ctypes
except ImportError:
try:
import com.sun.jna
_get_win_folder = _get_win_folder_with_jna
except ImportError:
_get_win_folder = _get_win_folder_from_registry
#---- self test code
if __name__ == "__main__":
appname = "MyApp"
appauthor = "MyCompany"
props = ("user_data_dir",
"user_config_dir",
"user_cache_dir",
"user_state_dir",
"user_log_dir",
"site_data_dir",
"site_config_dir")
print("-- app dirs %s --" % __version__)
print("-- app dirs (with optional 'version')")
dirs = AppDirs(appname, appauthor, version="1.0")
for prop in props:
print("%s: %s" % (prop, getattr(dirs, prop)))
print("\n-- app dirs (without optional 'version')")
dirs = AppDirs(appname, appauthor)
for prop in props:
print("%s: %s" % (prop, getattr(dirs, prop)))
print("\n-- app dirs (without optional 'appauthor')")
dirs = AppDirs(appname)
for prop in props:
print("%s: %s" % (prop, getattr(dirs, prop)))
print("\n-- app dirs (with disabled 'appauthor')")
dirs = AppDirs(appname, appauthor=False)
for prop in props:
print("%s: %s" % (prop, getattr(dirs, prop)))

View file

@ -1,14 +1,15 @@
# This file is dual licensed under the terms of the Apache License, Version # This file is dual licensed under the terms of the Apache License, Version
# 2.0, and the BSD License. See the LICENSE file in the root of this repository # 2.0, and the BSD License. See the LICENSE file in the root of this repository
# for complete details. # for complete details.
from __future__ import absolute_import, division, print_function
from .__about__ import ( __title__ = "packaging"
__author__, __copyright__, __email__, __license__, __summary__, __title__, __summary__ = "Core utilities for Python packages"
__uri__, __version__ __uri__ = "https://github.com/pypa/packaging"
)
__all__ = [ __version__ = "23.1"
"__title__", "__summary__", "__uri__", "__version__", "__author__",
"__email__", "__license__", "__copyright__", __author__ = "Donald Stufft and individual contributors"
] __email__ = "donald@stufft.io"
__license__ = "BSD-2-Clause or Apache-2.0"
__copyright__ = "2014-2019 %s" % __author__

View file

@ -1,30 +0,0 @@
# This file is dual licensed under the terms of the Apache License, Version
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
# for complete details.
from __future__ import absolute_import, division, print_function
import sys
PY2 = sys.version_info[0] == 2
PY3 = sys.version_info[0] == 3
# flake8: noqa
if PY3:
string_types = str,
else:
string_types = basestring,
def with_metaclass(meta, *bases):
"""
Create a base class with a metaclass.
"""
# This requires a bit of explanation: the basic idea is to make a dummy
# metaclass for one level of class instantiation that replaces itself with
# the actual metaclass.
class metaclass(meta):
def __new__(cls, name, this_bases, d):
return meta(name, bases, d)
return type.__new__(metaclass, 'temporary_class', (), {})

View file

@ -1,68 +1,61 @@
# This file is dual licensed under the terms of the Apache License, Version # This file is dual licensed under the terms of the Apache License, Version
# 2.0, and the BSD License. See the LICENSE file in the root of this repository # 2.0, and the BSD License. See the LICENSE file in the root of this repository
# for complete details. # for complete details.
from __future__ import absolute_import, division, print_function
class Infinity(object): class InfinityType:
def __repr__(self) -> str:
def __repr__(self):
return "Infinity" return "Infinity"
def __hash__(self): def __hash__(self) -> int:
return hash(repr(self)) return hash(repr(self))
def __lt__(self, other): def __lt__(self, other: object) -> bool:
return False return False
def __le__(self, other): def __le__(self, other: object) -> bool:
return False return False
def __eq__(self, other): def __eq__(self, other: object) -> bool:
return isinstance(other, self.__class__) return isinstance(other, self.__class__)
def __ne__(self, other): def __gt__(self, other: object) -> bool:
return not isinstance(other, self.__class__)
def __gt__(self, other):
return True return True
def __ge__(self, other): def __ge__(self, other: object) -> bool:
return True return True
def __neg__(self): def __neg__(self: object) -> "NegativeInfinityType":
return NegativeInfinity return NegativeInfinity
Infinity = Infinity()
Infinity = InfinityType()
class NegativeInfinity(object): class NegativeInfinityType:
def __repr__(self) -> str:
def __repr__(self):
return "-Infinity" return "-Infinity"
def __hash__(self): def __hash__(self) -> int:
return hash(repr(self)) return hash(repr(self))
def __lt__(self, other): def __lt__(self, other: object) -> bool:
return True return True
def __le__(self, other): def __le__(self, other: object) -> bool:
return True return True
def __eq__(self, other): def __eq__(self, other: object) -> bool:
return isinstance(other, self.__class__) return isinstance(other, self.__class__)
def __ne__(self, other): def __gt__(self, other: object) -> bool:
return not isinstance(other, self.__class__)
def __gt__(self, other):
return False return False
def __ge__(self, other): def __ge__(self, other: object) -> bool:
return False return False
def __neg__(self): def __neg__(self: object) -> InfinityType:
return Infinity return Infinity
NegativeInfinity = NegativeInfinity()
NegativeInfinity = NegativeInfinityType()

View file

@ -1,26 +1,35 @@
# This file is dual licensed under the terms of the Apache License, Version # This file is dual licensed under the terms of the Apache License, Version
# 2.0, and the BSD License. See the LICENSE file in the root of this repository # 2.0, and the BSD License. See the LICENSE file in the root of this repository
# for complete details. # for complete details.
from __future__ import absolute_import, division, print_function
import operator import operator
import os import os
import platform import platform
import sys import sys
from typing import Any, Callable, Dict, List, Optional, Tuple, Union
from pkg_resources.extern.pyparsing import ParseException, ParseResults, stringStart, stringEnd from ._parser import (
from pkg_resources.extern.pyparsing import ZeroOrMore, Group, Forward, QuotedString MarkerAtom,
from pkg_resources.extern.pyparsing import Literal as L # noqa MarkerList,
Op,
from ._compat import string_types Value,
from .specifiers import Specifier, InvalidSpecifier Variable,
parse_marker as _parse_marker,
)
from ._tokenizer import ParserSyntaxError
from .specifiers import InvalidSpecifier, Specifier
from .utils import canonicalize_name
__all__ = [ __all__ = [
"InvalidMarker", "UndefinedComparison", "UndefinedEnvironmentName", "InvalidMarker",
"Marker", "default_environment", "UndefinedComparison",
"UndefinedEnvironmentName",
"Marker",
"default_environment",
] ]
Operator = Callable[[str, str], bool]
class InvalidMarker(ValueError): class InvalidMarker(ValueError):
""" """
@ -41,119 +50,37 @@ class UndefinedEnvironmentName(ValueError):
""" """
class Node(object): def _normalize_extra_values(results: Any) -> Any:
"""
def __init__(self, value): Normalize extra values.
self.value = value """
if isinstance(results[0], tuple):
def __str__(self): lhs, op, rhs = results[0]
return str(self.value) if isinstance(lhs, Variable) and lhs.value == "extra":
normalized_extra = canonicalize_name(rhs.value)
def __repr__(self): rhs = Value(normalized_extra)
return "<{0}({1!r})>".format(self.__class__.__name__, str(self)) elif isinstance(rhs, Variable) and rhs.value == "extra":
normalized_extra = canonicalize_name(lhs.value)
def serialize(self): lhs = Value(normalized_extra)
raise NotImplementedError results[0] = lhs, op, rhs
return results
class Variable(Node): def _format_marker(
marker: Union[List[str], MarkerAtom, str], first: Optional[bool] = True
) -> str:
def serialize(self): assert isinstance(marker, (list, tuple, str))
return str(self)
class Value(Node):
def serialize(self):
return '"{0}"'.format(self)
class Op(Node):
def serialize(self):
return str(self)
VARIABLE = (
L("implementation_version") |
L("platform_python_implementation") |
L("implementation_name") |
L("python_full_version") |
L("platform_release") |
L("platform_version") |
L("platform_machine") |
L("platform_system") |
L("python_version") |
L("sys_platform") |
L("os_name") |
L("os.name") | # PEP-345
L("sys.platform") | # PEP-345
L("platform.version") | # PEP-345
L("platform.machine") | # PEP-345
L("platform.python_implementation") | # PEP-345
L("python_implementation") | # undocumented setuptools legacy
L("extra")
)
ALIASES = {
'os.name': 'os_name',
'sys.platform': 'sys_platform',
'platform.version': 'platform_version',
'platform.machine': 'platform_machine',
'platform.python_implementation': 'platform_python_implementation',
'python_implementation': 'platform_python_implementation'
}
VARIABLE.setParseAction(lambda s, l, t: Variable(ALIASES.get(t[0], t[0])))
VERSION_CMP = (
L("===") |
L("==") |
L(">=") |
L("<=") |
L("!=") |
L("~=") |
L(">") |
L("<")
)
MARKER_OP = VERSION_CMP | L("not in") | L("in")
MARKER_OP.setParseAction(lambda s, l, t: Op(t[0]))
MARKER_VALUE = QuotedString("'") | QuotedString('"')
MARKER_VALUE.setParseAction(lambda s, l, t: Value(t[0]))
BOOLOP = L("and") | L("or")
MARKER_VAR = VARIABLE | MARKER_VALUE
MARKER_ITEM = Group(MARKER_VAR + MARKER_OP + MARKER_VAR)
MARKER_ITEM.setParseAction(lambda s, l, t: tuple(t[0]))
LPAREN = L("(").suppress()
RPAREN = L(")").suppress()
MARKER_EXPR = Forward()
MARKER_ATOM = MARKER_ITEM | Group(LPAREN + MARKER_EXPR + RPAREN)
MARKER_EXPR << MARKER_ATOM + ZeroOrMore(BOOLOP + MARKER_EXPR)
MARKER = stringStart + MARKER_EXPR + stringEnd
def _coerce_parse_result(results):
if isinstance(results, ParseResults):
return [_coerce_parse_result(i) for i in results]
else:
return results
def _format_marker(marker, first=True):
assert isinstance(marker, (list, tuple, string_types))
# Sometimes we have a structure like [[...]] which is a single item list # Sometimes we have a structure like [[...]] which is a single item list
# where the single item is itself it's own list. In that case we want skip # where the single item is itself it's own list. In that case we want skip
# the rest of this function so that we don't get extraneous () on the # the rest of this function so that we don't get extraneous () on the
# outside. # outside.
if (isinstance(marker, list) and len(marker) == 1 and if (
isinstance(marker[0], (list, tuple))): isinstance(marker, list)
and len(marker) == 1
and isinstance(marker[0], (list, tuple))
):
return _format_marker(marker[0]) return _format_marker(marker[0])
if isinstance(marker, list): if isinstance(marker, list):
@ -168,7 +95,7 @@ def _format_marker(marker, first=True):
return marker return marker
_operators = { _operators: Dict[str, Operator] = {
"in": lambda lhs, rhs: lhs in rhs, "in": lambda lhs, rhs: lhs in rhs,
"not in": lambda lhs, rhs: lhs not in rhs, "not in": lambda lhs, rhs: lhs not in rhs,
"<": operator.lt, "<": operator.lt,
@ -180,42 +107,38 @@ _operators = {
} }
def _eval_op(lhs, op, rhs): def _eval_op(lhs: str, op: Op, rhs: str) -> bool:
try: try:
spec = Specifier("".join([op.serialize(), rhs])) spec = Specifier("".join([op.serialize(), rhs]))
except InvalidSpecifier: except InvalidSpecifier:
pass pass
else: else:
return spec.contains(lhs) return spec.contains(lhs, prereleases=True)
oper = _operators.get(op.serialize()) oper: Optional[Operator] = _operators.get(op.serialize())
if oper is None: if oper is None:
raise UndefinedComparison( raise UndefinedComparison(f"Undefined {op!r} on {lhs!r} and {rhs!r}.")
"Undefined {0!r} on {1!r} and {2!r}.".format(op, lhs, rhs)
)
return oper(lhs, rhs) return oper(lhs, rhs)
_undefined = object() def _normalize(*values: str, key: str) -> Tuple[str, ...]:
# PEP 685 Comparison of extra names for optional distribution dependencies
# https://peps.python.org/pep-0685/
# > When comparing extra names, tools MUST normalize the names being
# > compared using the semantics outlined in PEP 503 for names
if key == "extra":
return tuple(canonicalize_name(v) for v in values)
# other environment markers don't have such standards
return values
def _get_env(environment, name): def _evaluate_markers(markers: MarkerList, environment: Dict[str, str]) -> bool:
value = environment.get(name, _undefined) groups: List[List[bool]] = [[]]
if value is _undefined:
raise UndefinedEnvironmentName(
"{0!r} does not exist in evaluation environment.".format(name)
)
return value
def _evaluate_markers(markers, environment):
groups = [[]]
for marker in markers: for marker in markers:
assert isinstance(marker, (list, tuple, string_types)) assert isinstance(marker, (list, tuple, str))
if isinstance(marker, list): if isinstance(marker, list):
groups[-1].append(_evaluate_markers(marker, environment)) groups[-1].append(_evaluate_markers(marker, environment))
@ -223,12 +146,15 @@ def _evaluate_markers(markers, environment):
lhs, op, rhs = marker lhs, op, rhs = marker
if isinstance(lhs, Variable): if isinstance(lhs, Variable):
lhs_value = _get_env(environment, lhs.value) environment_key = lhs.value
lhs_value = environment[environment_key]
rhs_value = rhs.value rhs_value = rhs.value
else: else:
lhs_value = lhs.value lhs_value = lhs.value
rhs_value = _get_env(environment, rhs.value) environment_key = rhs.value
rhs_value = environment[environment_key]
lhs_value, rhs_value = _normalize(lhs_value, rhs_value, key=environment_key)
groups[-1].append(_eval_op(lhs_value, op, rhs_value)) groups[-1].append(_eval_op(lhs_value, op, rhs_value))
else: else:
assert marker in ["and", "or"] assert marker in ["and", "or"]
@ -238,22 +164,17 @@ def _evaluate_markers(markers, environment):
return any(all(item) for item in groups) return any(all(item) for item in groups)
def format_full_version(info): def format_full_version(info: "sys._version_info") -> str:
version = '{0.major}.{0.minor}.{0.micro}'.format(info) version = "{0.major}.{0.minor}.{0.micro}".format(info)
kind = info.releaselevel kind = info.releaselevel
if kind != 'final': if kind != "final":
version += kind[0] + str(info.serial) version += kind[0] + str(info.serial)
return version return version
def default_environment(): def default_environment() -> Dict[str, str]:
if hasattr(sys, 'implementation'): iver = format_full_version(sys.implementation.version)
iver = format_full_version(sys.implementation.version) implementation_name = sys.implementation.name
implementation_name = sys.implementation.name
else:
iver = '0'
implementation_name = ''
return { return {
"implementation_name": implementation_name, "implementation_name": implementation_name,
"implementation_version": iver, "implementation_version": iver,
@ -264,28 +185,53 @@ def default_environment():
"platform_version": platform.version(), "platform_version": platform.version(),
"python_full_version": platform.python_version(), "python_full_version": platform.python_version(),
"platform_python_implementation": platform.python_implementation(), "platform_python_implementation": platform.python_implementation(),
"python_version": platform.python_version()[:3], "python_version": ".".join(platform.python_version_tuple()[:2]),
"sys_platform": sys.platform, "sys_platform": sys.platform,
} }
class Marker(object): class Marker:
def __init__(self, marker: str) -> None:
def __init__(self, marker): # Note: We create a Marker object without calling this constructor in
# packaging.requirements.Requirement. If any additional logic is
# added here, make sure to mirror/adapt Requirement.
try: try:
self._markers = _coerce_parse_result(MARKER.parseString(marker)) self._markers = _normalize_extra_values(_parse_marker(marker))
except ParseException as e: # The attribute `_markers` can be described in terms of a recursive type:
err_str = "Invalid marker: {0!r}, parse error at {1!r}".format( # MarkerList = List[Union[Tuple[Node, ...], str, MarkerList]]
marker, marker[e.loc:e.loc + 8]) #
raise InvalidMarker(err_str) # For example, the following expression:
# python_version > "3.6" or (python_version == "3.6" and os_name == "unix")
#
# is parsed into:
# [
# (<Variable('python_version')>, <Op('>')>, <Value('3.6')>),
# 'and',
# [
# (<Variable('python_version')>, <Op('==')>, <Value('3.6')>),
# 'or',
# (<Variable('os_name')>, <Op('==')>, <Value('unix')>)
# ]
# ]
except ParserSyntaxError as e:
raise InvalidMarker(str(e)) from e
def __str__(self): def __str__(self) -> str:
return _format_marker(self._markers) return _format_marker(self._markers)
def __repr__(self): def __repr__(self) -> str:
return "<Marker({0!r})>".format(str(self)) return f"<Marker('{self}')>"
def evaluate(self, environment=None): def __hash__(self) -> int:
return hash((self.__class__.__name__, str(self)))
def __eq__(self, other: Any) -> bool:
if not isinstance(other, Marker):
return NotImplemented
return str(self) == str(other)
def evaluate(self, environment: Optional[Dict[str, str]] = None) -> bool:
"""Evaluate a marker. """Evaluate a marker.
Return the boolean from evaluating the given marker against the Return the boolean from evaluating the given marker against the
@ -295,7 +241,12 @@ class Marker(object):
The environment is determined from the current Python process. The environment is determined from the current Python process.
""" """
current_environment = default_environment() current_environment = default_environment()
current_environment["extra"] = ""
if environment is not None: if environment is not None:
current_environment.update(environment) current_environment.update(environment)
# The API used to allow setting extra to None. We need to handle this
# case for backwards compatibility.
if current_environment["extra"] is None:
current_environment["extra"] = ""
return _evaluate_markers(self._markers, current_environment) return _evaluate_markers(self._markers, current_environment)

View file

@ -1,18 +1,14 @@
# This file is dual licensed under the terms of the Apache License, Version # This file is dual licensed under the terms of the Apache License, Version
# 2.0, and the BSD License. See the LICENSE file in the root of this repository # 2.0, and the BSD License. See the LICENSE file in the root of this repository
# for complete details. # for complete details.
from __future__ import absolute_import, division, print_function
import string import urllib.parse
import re from typing import Any, List, Optional, Set
from pkg_resources.extern.pyparsing import stringStart, stringEnd, originalTextFor, ParseException from ._parser import parse_requirement as _parse_requirement
from pkg_resources.extern.pyparsing import ZeroOrMore, Word, Optional, Regex, Combine from ._tokenizer import ParserSyntaxError
from pkg_resources.extern.pyparsing import Literal as L # noqa from .markers import Marker, _normalize_extra_values
from pkg_resources.extern.six.moves.urllib import parse as urlparse from .specifiers import SpecifierSet
from .markers import MARKER_EXPR, Marker
from .specifiers import LegacySpecifier, Specifier, SpecifierSet
class InvalidRequirement(ValueError): class InvalidRequirement(ValueError):
@ -21,58 +17,7 @@ class InvalidRequirement(ValueError):
""" """
ALPHANUM = Word(string.ascii_letters + string.digits) class Requirement:
LBRACKET = L("[").suppress()
RBRACKET = L("]").suppress()
LPAREN = L("(").suppress()
RPAREN = L(")").suppress()
COMMA = L(",").suppress()
SEMICOLON = L(";").suppress()
AT = L("@").suppress()
PUNCTUATION = Word("-_.")
IDENTIFIER_END = ALPHANUM | (ZeroOrMore(PUNCTUATION) + ALPHANUM)
IDENTIFIER = Combine(ALPHANUM + ZeroOrMore(IDENTIFIER_END))
NAME = IDENTIFIER("name")
EXTRA = IDENTIFIER
URI = Regex(r'[^ ]+')("url")
URL = (AT + URI)
EXTRAS_LIST = EXTRA + ZeroOrMore(COMMA + EXTRA)
EXTRAS = (LBRACKET + Optional(EXTRAS_LIST) + RBRACKET)("extras")
VERSION_PEP440 = Regex(Specifier._regex_str, re.VERBOSE | re.IGNORECASE)
VERSION_LEGACY = Regex(LegacySpecifier._regex_str, re.VERBOSE | re.IGNORECASE)
VERSION_ONE = VERSION_PEP440 ^ VERSION_LEGACY
VERSION_MANY = Combine(VERSION_ONE + ZeroOrMore(COMMA + VERSION_ONE),
joinString=",", adjacent=False)("_raw_spec")
_VERSION_SPEC = Optional(((LPAREN + VERSION_MANY + RPAREN) | VERSION_MANY))
_VERSION_SPEC.setParseAction(lambda s, l, t: t._raw_spec or '')
VERSION_SPEC = originalTextFor(_VERSION_SPEC)("specifier")
VERSION_SPEC.setParseAction(lambda s, l, t: t[1])
MARKER_EXPR = originalTextFor(MARKER_EXPR())("marker")
MARKER_EXPR.setParseAction(
lambda s, l, t: Marker(s[t._original_start:t._original_end])
)
MARKER_SEPERATOR = SEMICOLON
MARKER = MARKER_SEPERATOR + MARKER_EXPR
VERSION_AND_MARKER = VERSION_SPEC + Optional(MARKER)
URL_AND_MARKER = URL + Optional(MARKER)
NAMED_REQUIREMENT = \
NAME + Optional(EXTRAS) + (URL_AND_MARKER | VERSION_AND_MARKER)
REQUIREMENT = stringStart + NAMED_REQUIREMENT + stringEnd
class Requirement(object):
"""Parse a requirement. """Parse a requirement.
Parse a given requirement string into its parts, such as name, specifier, Parse a given requirement string into its parts, such as name, specifier,
@ -85,43 +30,66 @@ class Requirement(object):
# the thing as well as the version? What about the markers? # the thing as well as the version? What about the markers?
# TODO: Can we normalize the name and extra name? # TODO: Can we normalize the name and extra name?
def __init__(self, requirement_string): def __init__(self, requirement_string: str) -> None:
try: try:
req = REQUIREMENT.parseString(requirement_string) parsed = _parse_requirement(requirement_string)
except ParseException as e: except ParserSyntaxError as e:
raise InvalidRequirement( raise InvalidRequirement(str(e)) from e
"Invalid requirement, parse error at \"{0!r}\"".format(
requirement_string[e.loc:e.loc + 8]))
self.name = req.name self.name: str = parsed.name
if req.url: if parsed.url:
parsed_url = urlparse.urlparse(req.url) parsed_url = urllib.parse.urlparse(parsed.url)
if not (parsed_url.scheme and parsed_url.netloc) or ( if parsed_url.scheme == "file":
not parsed_url.scheme and not parsed_url.netloc): if urllib.parse.urlunparse(parsed_url) != parsed.url:
raise InvalidRequirement("Invalid URL given") raise InvalidRequirement("Invalid URL given")
self.url = req.url elif not (parsed_url.scheme and parsed_url.netloc) or (
not parsed_url.scheme and not parsed_url.netloc
):
raise InvalidRequirement(f"Invalid URL: {parsed.url}")
self.url: Optional[str] = parsed.url
else: else:
self.url = None self.url = None
self.extras = set(req.extras.asList() if req.extras else []) self.extras: Set[str] = set(parsed.extras if parsed.extras else [])
self.specifier = SpecifierSet(req.specifier) self.specifier: SpecifierSet = SpecifierSet(parsed.specifier)
self.marker = req.marker if req.marker else None self.marker: Optional[Marker] = None
if parsed.marker is not None:
self.marker = Marker.__new__(Marker)
self.marker._markers = _normalize_extra_values(parsed.marker)
def __str__(self): def __str__(self) -> str:
parts = [self.name] parts: List[str] = [self.name]
if self.extras: if self.extras:
parts.append("[{0}]".format(",".join(sorted(self.extras)))) formatted_extras = ",".join(sorted(self.extras))
parts.append(f"[{formatted_extras}]")
if self.specifier: if self.specifier:
parts.append(str(self.specifier)) parts.append(str(self.specifier))
if self.url: if self.url:
parts.append("@ {0}".format(self.url)) parts.append(f"@ {self.url}")
if self.marker:
parts.append(" ")
if self.marker: if self.marker:
parts.append("; {0}".format(self.marker)) parts.append(f"; {self.marker}")
return "".join(parts) return "".join(parts)
def __repr__(self): def __repr__(self) -> str:
return "<Requirement({0!r})>".format(str(self)) return f"<Requirement('{self}')>"
def __hash__(self) -> int:
return hash((self.__class__.__name__, str(self)))
def __eq__(self, other: Any) -> bool:
if not isinstance(other, Requirement):
return NotImplemented
return (
self.name == other.name
and self.extras == other.extras
and self.specifier == other.specifier
and self.url == other.url
and self.marker == other.marker
)

File diff suppressed because it is too large Load diff

View file

@ -1,14 +1,141 @@
# This file is dual licensed under the terms of the Apache License, Version # This file is dual licensed under the terms of the Apache License, Version
# 2.0, and the BSD License. See the LICENSE file in the root of this repository # 2.0, and the BSD License. See the LICENSE file in the root of this repository
# for complete details. # for complete details.
from __future__ import absolute_import, division, print_function
import re import re
from typing import FrozenSet, NewType, Tuple, Union, cast
from .tags import Tag, parse_tag
from .version import InvalidVersion, Version
BuildTag = Union[Tuple[()], Tuple[int, str]]
NormalizedName = NewType("NormalizedName", str)
class InvalidWheelFilename(ValueError):
"""
An invalid wheel filename was found, users should refer to PEP 427.
"""
class InvalidSdistFilename(ValueError):
"""
An invalid sdist filename was found, users should refer to the packaging user guide.
"""
_canonicalize_regex = re.compile(r"[-_.]+") _canonicalize_regex = re.compile(r"[-_.]+")
# PEP 427: The build number must start with a digit.
_build_tag_regex = re.compile(r"(\d+)(.*)")
def canonicalize_name(name): def canonicalize_name(name: str) -> NormalizedName:
# This is taken from PEP 503. # This is taken from PEP 503.
return _canonicalize_regex.sub("-", name).lower() value = _canonicalize_regex.sub("-", name).lower()
return cast(NormalizedName, value)
def canonicalize_version(
version: Union[Version, str], *, strip_trailing_zero: bool = True
) -> str:
"""
This is very similar to Version.__str__, but has one subtle difference
with the way it handles the release segment.
"""
if isinstance(version, str):
try:
parsed = Version(version)
except InvalidVersion:
# Legacy versions cannot be normalized
return version
else:
parsed = version
parts = []
# Epoch
if parsed.epoch != 0:
parts.append(f"{parsed.epoch}!")
# Release segment
release_segment = ".".join(str(x) for x in parsed.release)
if strip_trailing_zero:
# NB: This strips trailing '.0's to normalize
release_segment = re.sub(r"(\.0)+$", "", release_segment)
parts.append(release_segment)
# Pre-release
if parsed.pre is not None:
parts.append("".join(str(x) for x in parsed.pre))
# Post-release
if parsed.post is not None:
parts.append(f".post{parsed.post}")
# Development release
if parsed.dev is not None:
parts.append(f".dev{parsed.dev}")
# Local version segment
if parsed.local is not None:
parts.append(f"+{parsed.local}")
return "".join(parts)
def parse_wheel_filename(
filename: str,
) -> Tuple[NormalizedName, Version, BuildTag, FrozenSet[Tag]]:
if not filename.endswith(".whl"):
raise InvalidWheelFilename(
f"Invalid wheel filename (extension must be '.whl'): {filename}"
)
filename = filename[:-4]
dashes = filename.count("-")
if dashes not in (4, 5):
raise InvalidWheelFilename(
f"Invalid wheel filename (wrong number of parts): {filename}"
)
parts = filename.split("-", dashes - 2)
name_part = parts[0]
# See PEP 427 for the rules on escaping the project name
if "__" in name_part or re.match(r"^[\w\d._]*$", name_part, re.UNICODE) is None:
raise InvalidWheelFilename(f"Invalid project name: {filename}")
name = canonicalize_name(name_part)
version = Version(parts[1])
if dashes == 5:
build_part = parts[2]
build_match = _build_tag_regex.match(build_part)
if build_match is None:
raise InvalidWheelFilename(
f"Invalid build number: {build_part} in '{filename}'"
)
build = cast(BuildTag, (int(build_match.group(1)), build_match.group(2)))
else:
build = ()
tags = parse_tag(parts[-1])
return (name, version, build, tags)
def parse_sdist_filename(filename: str) -> Tuple[NormalizedName, Version]:
if filename.endswith(".tar.gz"):
file_stem = filename[: -len(".tar.gz")]
elif filename.endswith(".zip"):
file_stem = filename[: -len(".zip")]
else:
raise InvalidSdistFilename(
f"Invalid sdist filename (extension must be '.tar.gz' or '.zip'):"
f" {filename}"
)
# We are requiring a PEP 440 version, which cannot contain dashes,
# so we split on the last dash.
name_part, sep, version_part = file_stem.rpartition("-")
if not sep:
raise InvalidSdistFilename(f"Invalid sdist filename: {filename}")
name = canonicalize_name(name_part)
version = Version(version_part)
return (name, version)

View file

@ -1,162 +1,116 @@
# This file is dual licensed under the terms of the Apache License, Version # This file is dual licensed under the terms of the Apache License, Version
# 2.0, and the BSD License. See the LICENSE file in the root of this repository # 2.0, and the BSD License. See the LICENSE file in the root of this repository
# for complete details. # for complete details.
from __future__ import absolute_import, division, print_function """
.. testsetup::
from packaging.version import parse, Version
"""
import collections import collections
import itertools import itertools
import re import re
from typing import Any, Callable, Optional, SupportsInt, Tuple, Union
from ._structures import Infinity from ._structures import Infinity, InfinityType, NegativeInfinity, NegativeInfinityType
__all__ = ["VERSION_PATTERN", "parse", "Version", "InvalidVersion"]
__all__ = [ InfiniteTypes = Union[InfinityType, NegativeInfinityType]
"parse", "Version", "LegacyVersion", "InvalidVersion", "VERSION_PATTERN" PrePostDevType = Union[InfiniteTypes, Tuple[str, int]]
SubLocalType = Union[InfiniteTypes, int, str]
LocalType = Union[
NegativeInfinityType,
Tuple[
Union[
SubLocalType,
Tuple[SubLocalType, str],
Tuple[NegativeInfinityType, SubLocalType],
],
...,
],
] ]
CmpKey = Tuple[
int, Tuple[int, ...], PrePostDevType, PrePostDevType, PrePostDevType, LocalType
]
VersionComparisonMethod = Callable[[CmpKey, CmpKey], bool]
_Version = collections.namedtuple( _Version = collections.namedtuple(
"_Version", "_Version", ["epoch", "release", "dev", "pre", "post", "local"]
["epoch", "release", "dev", "pre", "post", "local"],
) )
def parse(version): def parse(version: str) -> "Version":
"""Parse the given version string.
>>> parse('1.0.dev1')
<Version('1.0.dev1')>
:param version: The version string to parse.
:raises InvalidVersion: When the version string is not a valid version.
""" """
Parse the given version string and return either a :class:`Version` object return Version(version)
or a :class:`LegacyVersion` object depending on if the given version is
a valid PEP 440 version or a legacy version.
"""
try:
return Version(version)
except InvalidVersion:
return LegacyVersion(version)
class InvalidVersion(ValueError): class InvalidVersion(ValueError):
""" """Raised when a version string is not a valid version.
An invalid version was found, users should refer to PEP 440.
>>> Version("invalid")
Traceback (most recent call last):
...
packaging.version.InvalidVersion: Invalid version: 'invalid'
""" """
class _BaseVersion(object): class _BaseVersion:
_key: Tuple[Any, ...]
def __hash__(self): def __hash__(self) -> int:
return hash(self._key) return hash(self._key)
def __lt__(self, other): # Please keep the duplicated `isinstance` check
return self._compare(other, lambda s, o: s < o) # in the six comparisons hereunder
# unless you find a way to avoid adding overhead function calls.
def __le__(self, other): def __lt__(self, other: "_BaseVersion") -> bool:
return self._compare(other, lambda s, o: s <= o)
def __eq__(self, other):
return self._compare(other, lambda s, o: s == o)
def __ge__(self, other):
return self._compare(other, lambda s, o: s >= o)
def __gt__(self, other):
return self._compare(other, lambda s, o: s > o)
def __ne__(self, other):
return self._compare(other, lambda s, o: s != o)
def _compare(self, other, method):
if not isinstance(other, _BaseVersion): if not isinstance(other, _BaseVersion):
return NotImplemented return NotImplemented
return method(self._key, other._key) return self._key < other._key
def __le__(self, other: "_BaseVersion") -> bool:
if not isinstance(other, _BaseVersion):
return NotImplemented
class LegacyVersion(_BaseVersion): return self._key <= other._key
def __init__(self, version): def __eq__(self, other: object) -> bool:
self._version = str(version) if not isinstance(other, _BaseVersion):
self._key = _legacy_cmpkey(self._version) return NotImplemented
def __str__(self): return self._key == other._key
return self._version
def __repr__(self): def __ge__(self, other: "_BaseVersion") -> bool:
return "<LegacyVersion({0})>".format(repr(str(self))) if not isinstance(other, _BaseVersion):
return NotImplemented
@property return self._key >= other._key
def public(self):
return self._version
@property def __gt__(self, other: "_BaseVersion") -> bool:
def base_version(self): if not isinstance(other, _BaseVersion):
return self._version return NotImplemented
@property return self._key > other._key
def local(self):
return None
@property def __ne__(self, other: object) -> bool:
def is_prerelease(self): if not isinstance(other, _BaseVersion):
return False return NotImplemented
@property return self._key != other._key
def is_postrelease(self):
return False
_legacy_version_component_re = re.compile(
r"(\d+ | [a-z]+ | \.| -)", re.VERBOSE,
)
_legacy_version_replacement_map = {
"pre": "c", "preview": "c", "-": "final-", "rc": "c", "dev": "@",
}
def _parse_version_parts(s):
for part in _legacy_version_component_re.split(s):
part = _legacy_version_replacement_map.get(part, part)
if not part or part == ".":
continue
if part[:1] in "0123456789":
# pad for numeric comparison
yield part.zfill(8)
else:
yield "*" + part
# ensure that alpha/beta/candidate are before final
yield "*final"
def _legacy_cmpkey(version):
# We hardcode an epoch of -1 here. A PEP 440 version can only have a epoch
# greater than or equal to 0. This will effectively put the LegacyVersion,
# which uses the defacto standard originally implemented by setuptools,
# as before all PEP 440 versions.
epoch = -1
# This scheme is taken from pkg_resources.parse_version setuptools prior to
# it's adoption of the packaging library.
parts = []
for part in _parse_version_parts(version.lower()):
if part.startswith("*"):
# remove "-" before a prerelease tag
if part < "*final":
while parts and parts[-1] == "*final-":
parts.pop()
# remove trailing zeros from each series of numeric parts
while parts and parts[-1] == "00000000":
parts.pop()
parts.append(part)
parts = tuple(parts)
return epoch, parts
# Deliberately not anchored to the start and end of the string, to make it # Deliberately not anchored to the start and end of the string, to make it
# easier for 3rd party code to reuse # easier for 3rd party code to reuse
VERSION_PATTERN = r""" _VERSION_PATTERN = r"""
v? v?
(?: (?:
(?:(?P<epoch>[0-9]+)!)? # epoch (?:(?P<epoch>[0-9]+)!)? # epoch
@ -187,36 +141,71 @@ VERSION_PATTERN = r"""
(?:\+(?P<local>[a-z0-9]+(?:[-_\.][a-z0-9]+)*))? # local version (?:\+(?P<local>[a-z0-9]+(?:[-_\.][a-z0-9]+)*))? # local version
""" """
VERSION_PATTERN = _VERSION_PATTERN
"""
A string containing the regular expression used to match a valid version.
The pattern is not anchored at either end, and is intended for embedding in larger
expressions (for example, matching a version number as part of a file name). The
regular expression should be compiled with the ``re.VERBOSE`` and ``re.IGNORECASE``
flags set.
:meta hide-value:
"""
class Version(_BaseVersion): class Version(_BaseVersion):
"""This class abstracts handling of a project's versions.
_regex = re.compile( A :class:`Version` instance is comparison aware and can be compared and
r"^\s*" + VERSION_PATTERN + r"\s*$", sorted using the standard Python interfaces.
re.VERBOSE | re.IGNORECASE,
) >>> v1 = Version("1.0a5")
>>> v2 = Version("1.0")
>>> v1
<Version('1.0a5')>
>>> v2
<Version('1.0')>
>>> v1 < v2
True
>>> v1 == v2
False
>>> v1 > v2
False
>>> v1 >= v2
False
>>> v1 <= v2
True
"""
_regex = re.compile(r"^\s*" + VERSION_PATTERN + r"\s*$", re.VERBOSE | re.IGNORECASE)
_key: CmpKey
def __init__(self, version: str) -> None:
"""Initialize a Version object.
:param version:
The string representation of a version which will be parsed and normalized
before use.
:raises InvalidVersion:
If the ``version`` does not conform to PEP 440 in any way then this
exception will be raised.
"""
def __init__(self, version):
# Validate the version and parse it into pieces # Validate the version and parse it into pieces
match = self._regex.search(version) match = self._regex.search(version)
if not match: if not match:
raise InvalidVersion("Invalid version: '{0}'".format(version)) raise InvalidVersion(f"Invalid version: '{version}'")
# Store the parsed out pieces of the version # Store the parsed out pieces of the version
self._version = _Version( self._version = _Version(
epoch=int(match.group("epoch")) if match.group("epoch") else 0, epoch=int(match.group("epoch")) if match.group("epoch") else 0,
release=tuple(int(i) for i in match.group("release").split(".")), release=tuple(int(i) for i in match.group("release").split(".")),
pre=_parse_letter_version( pre=_parse_letter_version(match.group("pre_l"), match.group("pre_n")),
match.group("pre_l"),
match.group("pre_n"),
),
post=_parse_letter_version( post=_parse_letter_version(
match.group("post_l"), match.group("post_l"), match.group("post_n1") or match.group("post_n2")
match.group("post_n1") or match.group("post_n2"),
),
dev=_parse_letter_version(
match.group("dev_l"),
match.group("dev_n"),
), ),
dev=_parse_letter_version(match.group("dev_l"), match.group("dev_n")),
local=_parse_local_version(match.group("local")), local=_parse_local_version(match.group("local")),
) )
@ -230,72 +219,241 @@ class Version(_BaseVersion):
self._version.local, self._version.local,
) )
def __repr__(self): def __repr__(self) -> str:
return "<Version({0})>".format(repr(str(self))) """A representation of the Version that shows all internal state.
def __str__(self): >>> Version('1.0.0')
<Version('1.0.0')>
"""
return f"<Version('{self}')>"
def __str__(self) -> str:
"""A string representation of the version that can be rounded-tripped.
>>> str(Version("1.0a5"))
'1.0a5'
"""
parts = [] parts = []
# Epoch # Epoch
if self._version.epoch != 0: if self.epoch != 0:
parts.append("{0}!".format(self._version.epoch)) parts.append(f"{self.epoch}!")
# Release segment # Release segment
parts.append(".".join(str(x) for x in self._version.release)) parts.append(".".join(str(x) for x in self.release))
# Pre-release # Pre-release
if self._version.pre is not None: if self.pre is not None:
parts.append("".join(str(x) for x in self._version.pre)) parts.append("".join(str(x) for x in self.pre))
# Post-release # Post-release
if self._version.post is not None: if self.post is not None:
parts.append(".post{0}".format(self._version.post[1])) parts.append(f".post{self.post}")
# Development release # Development release
if self._version.dev is not None: if self.dev is not None:
parts.append(".dev{0}".format(self._version.dev[1])) parts.append(f".dev{self.dev}")
# Local version segment # Local version segment
if self._version.local is not None: if self.local is not None:
parts.append( parts.append(f"+{self.local}")
"+{0}".format(".".join(str(x) for x in self._version.local))
)
return "".join(parts) return "".join(parts)
@property @property
def public(self): def epoch(self) -> int:
"""The epoch of the version.
>>> Version("2.0.0").epoch
0
>>> Version("1!2.0.0").epoch
1
"""
_epoch: int = self._version.epoch
return _epoch
@property
def release(self) -> Tuple[int, ...]:
"""The components of the "release" segment of the version.
>>> Version("1.2.3").release
(1, 2, 3)
>>> Version("2.0.0").release
(2, 0, 0)
>>> Version("1!2.0.0.post0").release
(2, 0, 0)
Includes trailing zeroes but not the epoch or any pre-release / development /
post-release suffixes.
"""
_release: Tuple[int, ...] = self._version.release
return _release
@property
def pre(self) -> Optional[Tuple[str, int]]:
"""The pre-release segment of the version.
>>> print(Version("1.2.3").pre)
None
>>> Version("1.2.3a1").pre
('a', 1)
>>> Version("1.2.3b1").pre
('b', 1)
>>> Version("1.2.3rc1").pre
('rc', 1)
"""
_pre: Optional[Tuple[str, int]] = self._version.pre
return _pre
@property
def post(self) -> Optional[int]:
"""The post-release number of the version.
>>> print(Version("1.2.3").post)
None
>>> Version("1.2.3.post1").post
1
"""
return self._version.post[1] if self._version.post else None
@property
def dev(self) -> Optional[int]:
"""The development number of the version.
>>> print(Version("1.2.3").dev)
None
>>> Version("1.2.3.dev1").dev
1
"""
return self._version.dev[1] if self._version.dev else None
@property
def local(self) -> Optional[str]:
"""The local version segment of the version.
>>> print(Version("1.2.3").local)
None
>>> Version("1.2.3+abc").local
'abc'
"""
if self._version.local:
return ".".join(str(x) for x in self._version.local)
else:
return None
@property
def public(self) -> str:
"""The public portion of the version.
>>> Version("1.2.3").public
'1.2.3'
>>> Version("1.2.3+abc").public
'1.2.3'
>>> Version("1.2.3+abc.dev1").public
'1.2.3'
"""
return str(self).split("+", 1)[0] return str(self).split("+", 1)[0]
@property @property
def base_version(self): def base_version(self) -> str:
"""The "base version" of the version.
>>> Version("1.2.3").base_version
'1.2.3'
>>> Version("1.2.3+abc").base_version
'1.2.3'
>>> Version("1!1.2.3+abc.dev1").base_version
'1!1.2.3'
The "base version" is the public version of the project without any pre or post
release markers.
"""
parts = [] parts = []
# Epoch # Epoch
if self._version.epoch != 0: if self.epoch != 0:
parts.append("{0}!".format(self._version.epoch)) parts.append(f"{self.epoch}!")
# Release segment # Release segment
parts.append(".".join(str(x) for x in self._version.release)) parts.append(".".join(str(x) for x in self.release))
return "".join(parts) return "".join(parts)
@property @property
def local(self): def is_prerelease(self) -> bool:
version_string = str(self) """Whether this version is a pre-release.
if "+" in version_string:
return version_string.split("+", 1)[1] >>> Version("1.2.3").is_prerelease
False
>>> Version("1.2.3a1").is_prerelease
True
>>> Version("1.2.3b1").is_prerelease
True
>>> Version("1.2.3rc1").is_prerelease
True
>>> Version("1.2.3dev1").is_prerelease
True
"""
return self.dev is not None or self.pre is not None
@property @property
def is_prerelease(self): def is_postrelease(self) -> bool:
return bool(self._version.dev or self._version.pre) """Whether this version is a post-release.
>>> Version("1.2.3").is_postrelease
False
>>> Version("1.2.3.post1").is_postrelease
True
"""
return self.post is not None
@property @property
def is_postrelease(self): def is_devrelease(self) -> bool:
return bool(self._version.post) """Whether this version is a development release.
>>> Version("1.2.3").is_devrelease
False
>>> Version("1.2.3.dev1").is_devrelease
True
"""
return self.dev is not None
@property
def major(self) -> int:
"""The first item of :attr:`release` or ``0`` if unavailable.
>>> Version("1.2.3").major
1
"""
return self.release[0] if len(self.release) >= 1 else 0
@property
def minor(self) -> int:
"""The second item of :attr:`release` or ``0`` if unavailable.
>>> Version("1.2.3").minor
2
>>> Version("1").minor
0
"""
return self.release[1] if len(self.release) >= 2 else 0
@property
def micro(self) -> int:
"""The third item of :attr:`release` or ``0`` if unavailable.
>>> Version("1.2.3").micro
3
>>> Version("1").micro
0
"""
return self.release[2] if len(self.release) >= 3 else 0
def _parse_letter_version(letter, number): def _parse_letter_version(
letter: str, number: Union[str, bytes, SupportsInt]
) -> Optional[Tuple[str, int]]:
if letter: if letter:
# We consider there to be an implicit 0 in a pre-release if there is # We consider there to be an implicit 0 in a pre-release if there is
# not a numeral associated with it. # not a numeral associated with it.
@ -325,34 +483,40 @@ def _parse_letter_version(letter, number):
return letter, int(number) return letter, int(number)
return None
_local_version_seperators = re.compile(r"[\._-]")
def _parse_local_version(local): _local_version_separators = re.compile(r"[\._-]")
def _parse_local_version(local: str) -> Optional[LocalType]:
""" """
Takes a string like abc.1.twelve and turns it into ("abc", 1, "twelve"). Takes a string like abc.1.twelve and turns it into ("abc", 1, "twelve").
""" """
if local is not None: if local is not None:
return tuple( return tuple(
part.lower() if not part.isdigit() else int(part) part.lower() if not part.isdigit() else int(part)
for part in _local_version_seperators.split(local) for part in _local_version_separators.split(local)
) )
return None
def _cmpkey(epoch, release, pre, post, dev, local): def _cmpkey(
epoch: int,
release: Tuple[int, ...],
pre: Optional[Tuple[str, int]],
post: Optional[Tuple[str, int]],
dev: Optional[Tuple[str, int]],
local: Optional[Tuple[SubLocalType]],
) -> CmpKey:
# When we compare a release version, we want to compare it with all of the # When we compare a release version, we want to compare it with all of the
# trailing zeros removed. So we'll use a reverse the list, drop all the now # trailing zeros removed. So we'll use a reverse the list, drop all the now
# leading zeros until we come to something non zero, then take the rest # leading zeros until we come to something non zero, then take the rest
# re-reverse it back into the correct order and make it a tuple and use # re-reverse it back into the correct order and make it a tuple and use
# that for our sorting key. # that for our sorting key.
release = tuple( _release = tuple(
reversed(list( reversed(list(itertools.dropwhile(lambda x: x == 0, reversed(release))))
itertools.dropwhile(
lambda x: x == 0,
reversed(release),
)
))
) )
# We need to "trick" the sorting algorithm to put 1.0.dev0 before 1.0a0. # We need to "trick" the sorting algorithm to put 1.0.dev0 before 1.0a0.
@ -360,23 +524,31 @@ def _cmpkey(epoch, release, pre, post, dev, local):
# if there is not a pre or a post segment. If we have one of those then # if there is not a pre or a post segment. If we have one of those then
# the normal sorting rules will handle this case correctly. # the normal sorting rules will handle this case correctly.
if pre is None and post is None and dev is not None: if pre is None and post is None and dev is not None:
pre = -Infinity _pre: PrePostDevType = NegativeInfinity
# Versions without a pre-release (except as noted above) should sort after # Versions without a pre-release (except as noted above) should sort after
# those with one. # those with one.
elif pre is None: elif pre is None:
pre = Infinity _pre = Infinity
else:
_pre = pre
# Versions without a post segment should sort before those with one. # Versions without a post segment should sort before those with one.
if post is None: if post is None:
post = -Infinity _post: PrePostDevType = NegativeInfinity
else:
_post = post
# Versions without a development segment should sort after those with one. # Versions without a development segment should sort after those with one.
if dev is None: if dev is None:
dev = Infinity _dev: PrePostDevType = Infinity
else:
_dev = dev
if local is None: if local is None:
# Versions without a local segment should sort before those with one. # Versions without a local segment should sort before those with one.
local = -Infinity _local: LocalType = NegativeInfinity
else: else:
# Versions with a local segment need that segment parsed to implement # Versions with a local segment need that segment parsed to implement
# the sorting rules in PEP440. # the sorting rules in PEP440.
@ -385,9 +557,8 @@ def _cmpkey(epoch, release, pre, post, dev, local):
# - Numeric segments sort numerically # - Numeric segments sort numerically
# - Shorter versions sort before longer versions when the prefixes # - Shorter versions sort before longer versions when the prefixes
# match exactly # match exactly
local = tuple( _local = tuple(
(i, "") if isinstance(i, int) else (-Infinity, i) (i, "") if isinstance(i, int) else (NegativeInfinity, i) for i in local
for i in local
) )
return epoch, release, pre, post, dev, local return epoch, _release, _pre, _post, _dev, _local

File diff suppressed because it is too large Load diff

View file

@ -1,868 +0,0 @@
"""Utilities for writing code that runs on Python 2 and 3"""
# Copyright (c) 2010-2015 Benjamin Peterson
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
from __future__ import absolute_import
import functools
import itertools
import operator
import sys
import types
__author__ = "Benjamin Peterson <benjamin@python.org>"
__version__ = "1.10.0"
# Useful for very coarse version differentiation.
PY2 = sys.version_info[0] == 2
PY3 = sys.version_info[0] == 3
PY34 = sys.version_info[0:2] >= (3, 4)
if PY3:
string_types = str,
integer_types = int,
class_types = type,
text_type = str
binary_type = bytes
MAXSIZE = sys.maxsize
else:
string_types = basestring,
integer_types = (int, long)
class_types = (type, types.ClassType)
text_type = unicode
binary_type = str
if sys.platform.startswith("java"):
# Jython always uses 32 bits.
MAXSIZE = int((1 << 31) - 1)
else:
# It's possible to have sizeof(long) != sizeof(Py_ssize_t).
class X(object):
def __len__(self):
return 1 << 31
try:
len(X())
except OverflowError:
# 32-bit
MAXSIZE = int((1 << 31) - 1)
else:
# 64-bit
MAXSIZE = int((1 << 63) - 1)
del X
def _add_doc(func, doc):
"""Add documentation to a function."""
func.__doc__ = doc
def _import_module(name):
"""Import module, returning the module after the last dot."""
__import__(name)
return sys.modules[name]
class _LazyDescr(object):
def __init__(self, name):
self.name = name
def __get__(self, obj, tp):
result = self._resolve()
setattr(obj, self.name, result) # Invokes __set__.
try:
# This is a bit ugly, but it avoids running this again by
# removing this descriptor.
delattr(obj.__class__, self.name)
except AttributeError:
pass
return result
class MovedModule(_LazyDescr):
def __init__(self, name, old, new=None):
super(MovedModule, self).__init__(name)
if PY3:
if new is None:
new = name
self.mod = new
else:
self.mod = old
def _resolve(self):
return _import_module(self.mod)
def __getattr__(self, attr):
_module = self._resolve()
value = getattr(_module, attr)
setattr(self, attr, value)
return value
class _LazyModule(types.ModuleType):
def __init__(self, name):
super(_LazyModule, self).__init__(name)
self.__doc__ = self.__class__.__doc__
def __dir__(self):
attrs = ["__doc__", "__name__"]
attrs += [attr.name for attr in self._moved_attributes]
return attrs
# Subclasses should override this
_moved_attributes = []
class MovedAttribute(_LazyDescr):
def __init__(self, name, old_mod, new_mod, old_attr=None, new_attr=None):
super(MovedAttribute, self).__init__(name)
if PY3:
if new_mod is None:
new_mod = name
self.mod = new_mod
if new_attr is None:
if old_attr is None:
new_attr = name
else:
new_attr = old_attr
self.attr = new_attr
else:
self.mod = old_mod
if old_attr is None:
old_attr = name
self.attr = old_attr
def _resolve(self):
module = _import_module(self.mod)
return getattr(module, self.attr)
class _SixMetaPathImporter(object):
"""
A meta path importer to import six.moves and its submodules.
This class implements a PEP302 finder and loader. It should be compatible
with Python 2.5 and all existing versions of Python3
"""
def __init__(self, six_module_name):
self.name = six_module_name
self.known_modules = {}
def _add_module(self, mod, *fullnames):
for fullname in fullnames:
self.known_modules[self.name + "." + fullname] = mod
def _get_module(self, fullname):
return self.known_modules[self.name + "." + fullname]
def find_module(self, fullname, path=None):
if fullname in self.known_modules:
return self
return None
def __get_module(self, fullname):
try:
return self.known_modules[fullname]
except KeyError:
raise ImportError("This loader does not know module " + fullname)
def load_module(self, fullname):
try:
# in case of a reload
return sys.modules[fullname]
except KeyError:
pass
mod = self.__get_module(fullname)
if isinstance(mod, MovedModule):
mod = mod._resolve()
else:
mod.__loader__ = self
sys.modules[fullname] = mod
return mod
def is_package(self, fullname):
"""
Return true, if the named module is a package.
We need this method to get correct spec objects with
Python 3.4 (see PEP451)
"""
return hasattr(self.__get_module(fullname), "__path__")
def get_code(self, fullname):
"""Return None
Required, if is_package is implemented"""
self.__get_module(fullname) # eventually raises ImportError
return None
get_source = get_code # same as get_code
_importer = _SixMetaPathImporter(__name__)
class _MovedItems(_LazyModule):
"""Lazy loading of moved objects"""
__path__ = [] # mark as package
_moved_attributes = [
MovedAttribute("cStringIO", "cStringIO", "io", "StringIO"),
MovedAttribute("filter", "itertools", "builtins", "ifilter", "filter"),
MovedAttribute("filterfalse", "itertools", "itertools", "ifilterfalse", "filterfalse"),
MovedAttribute("input", "__builtin__", "builtins", "raw_input", "input"),
MovedAttribute("intern", "__builtin__", "sys"),
MovedAttribute("map", "itertools", "builtins", "imap", "map"),
MovedAttribute("getcwd", "os", "os", "getcwdu", "getcwd"),
MovedAttribute("getcwdb", "os", "os", "getcwd", "getcwdb"),
MovedAttribute("range", "__builtin__", "builtins", "xrange", "range"),
MovedAttribute("reload_module", "__builtin__", "importlib" if PY34 else "imp", "reload"),
MovedAttribute("reduce", "__builtin__", "functools"),
MovedAttribute("shlex_quote", "pipes", "shlex", "quote"),
MovedAttribute("StringIO", "StringIO", "io"),
MovedAttribute("UserDict", "UserDict", "collections"),
MovedAttribute("UserList", "UserList", "collections"),
MovedAttribute("UserString", "UserString", "collections"),
MovedAttribute("xrange", "__builtin__", "builtins", "xrange", "range"),
MovedAttribute("zip", "itertools", "builtins", "izip", "zip"),
MovedAttribute("zip_longest", "itertools", "itertools", "izip_longest", "zip_longest"),
MovedModule("builtins", "__builtin__"),
MovedModule("configparser", "ConfigParser"),
MovedModule("copyreg", "copy_reg"),
MovedModule("dbm_gnu", "gdbm", "dbm.gnu"),
MovedModule("_dummy_thread", "dummy_thread", "_dummy_thread"),
MovedModule("http_cookiejar", "cookielib", "http.cookiejar"),
MovedModule("http_cookies", "Cookie", "http.cookies"),
MovedModule("html_entities", "htmlentitydefs", "html.entities"),
MovedModule("html_parser", "HTMLParser", "html.parser"),
MovedModule("http_client", "httplib", "http.client"),
MovedModule("email_mime_multipart", "email.MIMEMultipart", "email.mime.multipart"),
MovedModule("email_mime_nonmultipart", "email.MIMENonMultipart", "email.mime.nonmultipart"),
MovedModule("email_mime_text", "email.MIMEText", "email.mime.text"),
MovedModule("email_mime_base", "email.MIMEBase", "email.mime.base"),
MovedModule("BaseHTTPServer", "BaseHTTPServer", "http.server"),
MovedModule("CGIHTTPServer", "CGIHTTPServer", "http.server"),
MovedModule("SimpleHTTPServer", "SimpleHTTPServer", "http.server"),
MovedModule("cPickle", "cPickle", "pickle"),
MovedModule("queue", "Queue"),
MovedModule("reprlib", "repr"),
MovedModule("socketserver", "SocketServer"),
MovedModule("_thread", "thread", "_thread"),
MovedModule("tkinter", "Tkinter"),
MovedModule("tkinter_dialog", "Dialog", "tkinter.dialog"),
MovedModule("tkinter_filedialog", "FileDialog", "tkinter.filedialog"),
MovedModule("tkinter_scrolledtext", "ScrolledText", "tkinter.scrolledtext"),
MovedModule("tkinter_simpledialog", "SimpleDialog", "tkinter.simpledialog"),
MovedModule("tkinter_tix", "Tix", "tkinter.tix"),
MovedModule("tkinter_ttk", "ttk", "tkinter.ttk"),
MovedModule("tkinter_constants", "Tkconstants", "tkinter.constants"),
MovedModule("tkinter_dnd", "Tkdnd", "tkinter.dnd"),
MovedModule("tkinter_colorchooser", "tkColorChooser",
"tkinter.colorchooser"),
MovedModule("tkinter_commondialog", "tkCommonDialog",
"tkinter.commondialog"),
MovedModule("tkinter_tkfiledialog", "tkFileDialog", "tkinter.filedialog"),
MovedModule("tkinter_font", "tkFont", "tkinter.font"),
MovedModule("tkinter_messagebox", "tkMessageBox", "tkinter.messagebox"),
MovedModule("tkinter_tksimpledialog", "tkSimpleDialog",
"tkinter.simpledialog"),
MovedModule("urllib_parse", __name__ + ".moves.urllib_parse", "urllib.parse"),
MovedModule("urllib_error", __name__ + ".moves.urllib_error", "urllib.error"),
MovedModule("urllib", __name__ + ".moves.urllib", __name__ + ".moves.urllib"),
MovedModule("urllib_robotparser", "robotparser", "urllib.robotparser"),
MovedModule("xmlrpc_client", "xmlrpclib", "xmlrpc.client"),
MovedModule("xmlrpc_server", "SimpleXMLRPCServer", "xmlrpc.server"),
]
# Add windows specific modules.
if sys.platform == "win32":
_moved_attributes += [
MovedModule("winreg", "_winreg"),
]
for attr in _moved_attributes:
setattr(_MovedItems, attr.name, attr)
if isinstance(attr, MovedModule):
_importer._add_module(attr, "moves." + attr.name)
del attr
_MovedItems._moved_attributes = _moved_attributes
moves = _MovedItems(__name__ + ".moves")
_importer._add_module(moves, "moves")
class Module_six_moves_urllib_parse(_LazyModule):
"""Lazy loading of moved objects in six.moves.urllib_parse"""
_urllib_parse_moved_attributes = [
MovedAttribute("ParseResult", "urlparse", "urllib.parse"),
MovedAttribute("SplitResult", "urlparse", "urllib.parse"),
MovedAttribute("parse_qs", "urlparse", "urllib.parse"),
MovedAttribute("parse_qsl", "urlparse", "urllib.parse"),
MovedAttribute("urldefrag", "urlparse", "urllib.parse"),
MovedAttribute("urljoin", "urlparse", "urllib.parse"),
MovedAttribute("urlparse", "urlparse", "urllib.parse"),
MovedAttribute("urlsplit", "urlparse", "urllib.parse"),
MovedAttribute("urlunparse", "urlparse", "urllib.parse"),
MovedAttribute("urlunsplit", "urlparse", "urllib.parse"),
MovedAttribute("quote", "urllib", "urllib.parse"),
MovedAttribute("quote_plus", "urllib", "urllib.parse"),
MovedAttribute("unquote", "urllib", "urllib.parse"),
MovedAttribute("unquote_plus", "urllib", "urllib.parse"),
MovedAttribute("urlencode", "urllib", "urllib.parse"),
MovedAttribute("splitquery", "urllib", "urllib.parse"),
MovedAttribute("splittag", "urllib", "urllib.parse"),
MovedAttribute("splituser", "urllib", "urllib.parse"),
MovedAttribute("uses_fragment", "urlparse", "urllib.parse"),
MovedAttribute("uses_netloc", "urlparse", "urllib.parse"),
MovedAttribute("uses_params", "urlparse", "urllib.parse"),
MovedAttribute("uses_query", "urlparse", "urllib.parse"),
MovedAttribute("uses_relative", "urlparse", "urllib.parse"),
]
for attr in _urllib_parse_moved_attributes:
setattr(Module_six_moves_urllib_parse, attr.name, attr)
del attr
Module_six_moves_urllib_parse._moved_attributes = _urllib_parse_moved_attributes
_importer._add_module(Module_six_moves_urllib_parse(__name__ + ".moves.urllib_parse"),
"moves.urllib_parse", "moves.urllib.parse")
class Module_six_moves_urllib_error(_LazyModule):
"""Lazy loading of moved objects in six.moves.urllib_error"""
_urllib_error_moved_attributes = [
MovedAttribute("URLError", "urllib2", "urllib.error"),
MovedAttribute("HTTPError", "urllib2", "urllib.error"),
MovedAttribute("ContentTooShortError", "urllib", "urllib.error"),
]
for attr in _urllib_error_moved_attributes:
setattr(Module_six_moves_urllib_error, attr.name, attr)
del attr
Module_six_moves_urllib_error._moved_attributes = _urllib_error_moved_attributes
_importer._add_module(Module_six_moves_urllib_error(__name__ + ".moves.urllib.error"),
"moves.urllib_error", "moves.urllib.error")
class Module_six_moves_urllib_request(_LazyModule):
"""Lazy loading of moved objects in six.moves.urllib_request"""
_urllib_request_moved_attributes = [
MovedAttribute("urlopen", "urllib2", "urllib.request"),
MovedAttribute("install_opener", "urllib2", "urllib.request"),
MovedAttribute("build_opener", "urllib2", "urllib.request"),
MovedAttribute("pathname2url", "urllib", "urllib.request"),
MovedAttribute("url2pathname", "urllib", "urllib.request"),
MovedAttribute("getproxies", "urllib", "urllib.request"),
MovedAttribute("Request", "urllib2", "urllib.request"),
MovedAttribute("OpenerDirector", "urllib2", "urllib.request"),
MovedAttribute("HTTPDefaultErrorHandler", "urllib2", "urllib.request"),
MovedAttribute("HTTPRedirectHandler", "urllib2", "urllib.request"),
MovedAttribute("HTTPCookieProcessor", "urllib2", "urllib.request"),
MovedAttribute("ProxyHandler", "urllib2", "urllib.request"),
MovedAttribute("BaseHandler", "urllib2", "urllib.request"),
MovedAttribute("HTTPPasswordMgr", "urllib2", "urllib.request"),
MovedAttribute("HTTPPasswordMgrWithDefaultRealm", "urllib2", "urllib.request"),
MovedAttribute("AbstractBasicAuthHandler", "urllib2", "urllib.request"),
MovedAttribute("HTTPBasicAuthHandler", "urllib2", "urllib.request"),
MovedAttribute("ProxyBasicAuthHandler", "urllib2", "urllib.request"),
MovedAttribute("AbstractDigestAuthHandler", "urllib2", "urllib.request"),
MovedAttribute("HTTPDigestAuthHandler", "urllib2", "urllib.request"),
MovedAttribute("ProxyDigestAuthHandler", "urllib2", "urllib.request"),
MovedAttribute("HTTPHandler", "urllib2", "urllib.request"),
MovedAttribute("HTTPSHandler", "urllib2", "urllib.request"),
MovedAttribute("FileHandler", "urllib2", "urllib.request"),
MovedAttribute("FTPHandler", "urllib2", "urllib.request"),
MovedAttribute("CacheFTPHandler", "urllib2", "urllib.request"),
MovedAttribute("UnknownHandler", "urllib2", "urllib.request"),
MovedAttribute("HTTPErrorProcessor", "urllib2", "urllib.request"),
MovedAttribute("urlretrieve", "urllib", "urllib.request"),
MovedAttribute("urlcleanup", "urllib", "urllib.request"),
MovedAttribute("URLopener", "urllib", "urllib.request"),
MovedAttribute("FancyURLopener", "urllib", "urllib.request"),
MovedAttribute("proxy_bypass", "urllib", "urllib.request"),
]
for attr in _urllib_request_moved_attributes:
setattr(Module_six_moves_urllib_request, attr.name, attr)
del attr
Module_six_moves_urllib_request._moved_attributes = _urllib_request_moved_attributes
_importer._add_module(Module_six_moves_urllib_request(__name__ + ".moves.urllib.request"),
"moves.urllib_request", "moves.urllib.request")
class Module_six_moves_urllib_response(_LazyModule):
"""Lazy loading of moved objects in six.moves.urllib_response"""
_urllib_response_moved_attributes = [
MovedAttribute("addbase", "urllib", "urllib.response"),
MovedAttribute("addclosehook", "urllib", "urllib.response"),
MovedAttribute("addinfo", "urllib", "urllib.response"),
MovedAttribute("addinfourl", "urllib", "urllib.response"),
]
for attr in _urllib_response_moved_attributes:
setattr(Module_six_moves_urllib_response, attr.name, attr)
del attr
Module_six_moves_urllib_response._moved_attributes = _urllib_response_moved_attributes
_importer._add_module(Module_six_moves_urllib_response(__name__ + ".moves.urllib.response"),
"moves.urllib_response", "moves.urllib.response")
class Module_six_moves_urllib_robotparser(_LazyModule):
"""Lazy loading of moved objects in six.moves.urllib_robotparser"""
_urllib_robotparser_moved_attributes = [
MovedAttribute("RobotFileParser", "robotparser", "urllib.robotparser"),
]
for attr in _urllib_robotparser_moved_attributes:
setattr(Module_six_moves_urllib_robotparser, attr.name, attr)
del attr
Module_six_moves_urllib_robotparser._moved_attributes = _urllib_robotparser_moved_attributes
_importer._add_module(Module_six_moves_urllib_robotparser(__name__ + ".moves.urllib.robotparser"),
"moves.urllib_robotparser", "moves.urllib.robotparser")
class Module_six_moves_urllib(types.ModuleType):
"""Create a six.moves.urllib namespace that resembles the Python 3 namespace"""
__path__ = [] # mark as package
parse = _importer._get_module("moves.urllib_parse")
error = _importer._get_module("moves.urllib_error")
request = _importer._get_module("moves.urllib_request")
response = _importer._get_module("moves.urllib_response")
robotparser = _importer._get_module("moves.urllib_robotparser")
def __dir__(self):
return ['parse', 'error', 'request', 'response', 'robotparser']
_importer._add_module(Module_six_moves_urllib(__name__ + ".moves.urllib"),
"moves.urllib")
def add_move(move):
"""Add an item to six.moves."""
setattr(_MovedItems, move.name, move)
def remove_move(name):
"""Remove item from six.moves."""
try:
delattr(_MovedItems, name)
except AttributeError:
try:
del moves.__dict__[name]
except KeyError:
raise AttributeError("no such move, %r" % (name,))
if PY3:
_meth_func = "__func__"
_meth_self = "__self__"
_func_closure = "__closure__"
_func_code = "__code__"
_func_defaults = "__defaults__"
_func_globals = "__globals__"
else:
_meth_func = "im_func"
_meth_self = "im_self"
_func_closure = "func_closure"
_func_code = "func_code"
_func_defaults = "func_defaults"
_func_globals = "func_globals"
try:
advance_iterator = next
except NameError:
def advance_iterator(it):
return it.next()
next = advance_iterator
try:
callable = callable
except NameError:
def callable(obj):
return any("__call__" in klass.__dict__ for klass in type(obj).__mro__)
if PY3:
def get_unbound_function(unbound):
return unbound
create_bound_method = types.MethodType
def create_unbound_method(func, cls):
return func
Iterator = object
else:
def get_unbound_function(unbound):
return unbound.im_func
def create_bound_method(func, obj):
return types.MethodType(func, obj, obj.__class__)
def create_unbound_method(func, cls):
return types.MethodType(func, None, cls)
class Iterator(object):
def next(self):
return type(self).__next__(self)
callable = callable
_add_doc(get_unbound_function,
"""Get the function out of a possibly unbound function""")
get_method_function = operator.attrgetter(_meth_func)
get_method_self = operator.attrgetter(_meth_self)
get_function_closure = operator.attrgetter(_func_closure)
get_function_code = operator.attrgetter(_func_code)
get_function_defaults = operator.attrgetter(_func_defaults)
get_function_globals = operator.attrgetter(_func_globals)
if PY3:
def iterkeys(d, **kw):
return iter(d.keys(**kw))
def itervalues(d, **kw):
return iter(d.values(**kw))
def iteritems(d, **kw):
return iter(d.items(**kw))
def iterlists(d, **kw):
return iter(d.lists(**kw))
viewkeys = operator.methodcaller("keys")
viewvalues = operator.methodcaller("values")
viewitems = operator.methodcaller("items")
else:
def iterkeys(d, **kw):
return d.iterkeys(**kw)
def itervalues(d, **kw):
return d.itervalues(**kw)
def iteritems(d, **kw):
return d.iteritems(**kw)
def iterlists(d, **kw):
return d.iterlists(**kw)
viewkeys = operator.methodcaller("viewkeys")
viewvalues = operator.methodcaller("viewvalues")
viewitems = operator.methodcaller("viewitems")
_add_doc(iterkeys, "Return an iterator over the keys of a dictionary.")
_add_doc(itervalues, "Return an iterator over the values of a dictionary.")
_add_doc(iteritems,
"Return an iterator over the (key, value) pairs of a dictionary.")
_add_doc(iterlists,
"Return an iterator over the (key, [values]) pairs of a dictionary.")
if PY3:
def b(s):
return s.encode("latin-1")
def u(s):
return s
unichr = chr
import struct
int2byte = struct.Struct(">B").pack
del struct
byte2int = operator.itemgetter(0)
indexbytes = operator.getitem
iterbytes = iter
import io
StringIO = io.StringIO
BytesIO = io.BytesIO
_assertCountEqual = "assertCountEqual"
if sys.version_info[1] <= 1:
_assertRaisesRegex = "assertRaisesRegexp"
_assertRegex = "assertRegexpMatches"
else:
_assertRaisesRegex = "assertRaisesRegex"
_assertRegex = "assertRegex"
else:
def b(s):
return s
# Workaround for standalone backslash
def u(s):
return unicode(s.replace(r'\\', r'\\\\'), "unicode_escape")
unichr = unichr
int2byte = chr
def byte2int(bs):
return ord(bs[0])
def indexbytes(buf, i):
return ord(buf[i])
iterbytes = functools.partial(itertools.imap, ord)
import StringIO
StringIO = BytesIO = StringIO.StringIO
_assertCountEqual = "assertItemsEqual"
_assertRaisesRegex = "assertRaisesRegexp"
_assertRegex = "assertRegexpMatches"
_add_doc(b, """Byte literal""")
_add_doc(u, """Text literal""")
def assertCountEqual(self, *args, **kwargs):
return getattr(self, _assertCountEqual)(*args, **kwargs)
def assertRaisesRegex(self, *args, **kwargs):
return getattr(self, _assertRaisesRegex)(*args, **kwargs)
def assertRegex(self, *args, **kwargs):
return getattr(self, _assertRegex)(*args, **kwargs)
if PY3:
exec_ = getattr(moves.builtins, "exec")
def reraise(tp, value, tb=None):
if value is None:
value = tp()
if value.__traceback__ is not tb:
raise value.with_traceback(tb)
raise value
else:
def exec_(_code_, _globs_=None, _locs_=None):
"""Execute code in a namespace."""
if _globs_ is None:
frame = sys._getframe(1)
_globs_ = frame.f_globals
if _locs_ is None:
_locs_ = frame.f_locals
del frame
elif _locs_ is None:
_locs_ = _globs_
exec("""exec _code_ in _globs_, _locs_""")
exec_("""def reraise(tp, value, tb=None):
raise tp, value, tb
""")
if sys.version_info[:2] == (3, 2):
exec_("""def raise_from(value, from_value):
if from_value is None:
raise value
raise value from from_value
""")
elif sys.version_info[:2] > (3, 2):
exec_("""def raise_from(value, from_value):
raise value from from_value
""")
else:
def raise_from(value, from_value):
raise value
print_ = getattr(moves.builtins, "print", None)
if print_ is None:
def print_(*args, **kwargs):
"""The new-style print function for Python 2.4 and 2.5."""
fp = kwargs.pop("file", sys.stdout)
if fp is None:
return
def write(data):
if not isinstance(data, basestring):
data = str(data)
# If the file has an encoding, encode unicode with it.
if (isinstance(fp, file) and
isinstance(data, unicode) and
fp.encoding is not None):
errors = getattr(fp, "errors", None)
if errors is None:
errors = "strict"
data = data.encode(fp.encoding, errors)
fp.write(data)
want_unicode = False
sep = kwargs.pop("sep", None)
if sep is not None:
if isinstance(sep, unicode):
want_unicode = True
elif not isinstance(sep, str):
raise TypeError("sep must be None or a string")
end = kwargs.pop("end", None)
if end is not None:
if isinstance(end, unicode):
want_unicode = True
elif not isinstance(end, str):
raise TypeError("end must be None or a string")
if kwargs:
raise TypeError("invalid keyword arguments to print()")
if not want_unicode:
for arg in args:
if isinstance(arg, unicode):
want_unicode = True
break
if want_unicode:
newline = unicode("\n")
space = unicode(" ")
else:
newline = "\n"
space = " "
if sep is None:
sep = space
if end is None:
end = newline
for i, arg in enumerate(args):
if i:
write(sep)
write(arg)
write(end)
if sys.version_info[:2] < (3, 3):
_print = print_
def print_(*args, **kwargs):
fp = kwargs.get("file", sys.stdout)
flush = kwargs.pop("flush", False)
_print(*args, **kwargs)
if flush and fp is not None:
fp.flush()
_add_doc(reraise, """Reraise an exception.""")
if sys.version_info[0:2] < (3, 4):
def wraps(wrapped, assigned=functools.WRAPPER_ASSIGNMENTS,
updated=functools.WRAPPER_UPDATES):
def wrapper(f):
f = functools.wraps(wrapped, assigned, updated)(f)
f.__wrapped__ = wrapped
return f
return wrapper
else:
wraps = functools.wraps
def with_metaclass(meta, *bases):
"""Create a base class with a metaclass."""
# This requires a bit of explanation: the basic idea is to make a dummy
# metaclass for one level of class instantiation that replaces itself with
# the actual metaclass.
class metaclass(meta):
def __new__(cls, name, this_bases, d):
return meta(name, bases, d)
return type.__new__(metaclass, 'temporary_class', (), {})
def add_metaclass(metaclass):
"""Class decorator for creating a class with a metaclass."""
def wrapper(cls):
orig_vars = cls.__dict__.copy()
slots = orig_vars.get('__slots__')
if slots is not None:
if isinstance(slots, str):
slots = [slots]
for slots_var in slots:
orig_vars.pop(slots_var)
orig_vars.pop('__dict__', None)
orig_vars.pop('__weakref__', None)
return metaclass(cls.__name__, cls.__bases__, orig_vars)
return wrapper
def python_2_unicode_compatible(klass):
"""
A decorator that defines __unicode__ and __str__ methods under Python 2.
Under Python 3 it does nothing.
To support Python 2 and 3 with a single code base, define a __str__ method
returning text and apply this decorator to the class.
"""
if PY2:
if '__str__' not in klass.__dict__:
raise ValueError("@python_2_unicode_compatible cannot be applied "
"to %s because it doesn't define __str__()." %
klass.__name__)
klass.__unicode__ = klass.__str__
klass.__str__ = lambda self: self.__unicode__().encode('utf-8')
return klass
# Complete the moves implementation.
# This code is at the end of this module to speed up module loading.
# Turn this module into a package.
__path__ = [] # required for PEP 302 and PEP 451
__package__ = __name__ # see PEP 366 @ReservedAssignment
if globals().get("__spec__") is not None:
__spec__.submodule_search_locations = [] # PEP 451 @UndefinedVariable
# Remove other six meta path importers, since they cause problems. This can
# happen if six is removed from sys.modules and then reloaded. (Setuptools does
# this for some reason.)
if sys.meta_path:
for i, importer in enumerate(sys.meta_path):
# Here's some real nastiness: Another "instance" of the six module might
# be floating around. Therefore, we can't use isinstance() to check for
# the six meta path importer, since the other six instance will have
# inserted an importer with different class.
if (type(importer).__name__ == "_SixMetaPathImporter" and
importer.name == __name__):
del sys.meta_path[i]
break
del i, importer
# Finally, add the importer to the meta path import hook.
sys.meta_path.append(_importer)

View file

@ -1,3 +1,4 @@
import importlib.util
import sys import sys
@ -20,17 +21,10 @@ class VendorImporter:
yield self.vendor_pkg + '.' yield self.vendor_pkg + '.'
yield '' yield ''
def find_module(self, fullname, path=None): def _module_matches_namespace(self, fullname):
""" """Figure out if the target module is vendored."""
Return self when fullname starts with root_name and the
target module is one vendored through this importer.
"""
root, base, target = fullname.partition(self.root_name + '.') root, base, target = fullname.partition(self.root_name + '.')
if root: return not root and any(map(target.startswith, self.vendored_names))
return
if not any(map(target.startswith, self.vendored_names)):
return
return self
def load_module(self, fullname): def load_module(self, fullname):
""" """
@ -43,13 +37,6 @@ class VendorImporter:
__import__(extant) __import__(extant)
mod = sys.modules[extant] mod = sys.modules[extant]
sys.modules[fullname] = mod sys.modules[fullname] = mod
# mysterious hack:
# Remove the reference to the extant package/module
# on later Python versions to cause relative imports
# in the vendor package to resolve the same modules
# as those going through this importer.
if prefix and sys.version_info > (3, 3):
del sys.modules[extant]
return mod return mod
except ImportError: except ImportError:
pass pass
@ -61,6 +48,20 @@ class VendorImporter:
"distribution.".format(**locals()) "distribution.".format(**locals())
) )
def create_module(self, spec):
return self.load_module(spec.name)
def exec_module(self, module):
pass
def find_spec(self, fullname, path=None, target=None):
"""Return a module spec for vendored names."""
return (
importlib.util.spec_from_loader(fullname, self)
if self._module_matches_namespace(fullname)
else None
)
def install(self): def install(self):
""" """
Install this importer into sys.meta_path if not already present. Install this importer into sys.meta_path if not already present.
@ -69,5 +70,11 @@ class VendorImporter:
sys.meta_path.append(self) sys.meta_path.append(self)
names = 'packaging', 'pyparsing', 'six', 'appdirs' names = (
'packaging',
'platformdirs',
'jaraco',
'importlib_resources',
'more_itertools',
)
VendorImporter(__name__, names).install() VendorImporter(__name__, names).install()

View file

@ -1,23 +0,0 @@
import os
import errno
import sys
from .extern import six
def _makedirs_31(path, exist_ok=False):
try:
os.makedirs(path)
except OSError as exc:
if not exist_ok or exc.errno != errno.EEXIST:
raise
# rely on compatibility behavior until mode considerations
# and exists_ok considerations are disentangled.
# See https://github.com/pypa/setuptools/pull/1083#issuecomment-315168663
needs_makedirs = (
six.PY2 or
(3, 4) <= sys.version_info < (3, 4, 1)
)
makedirs = _makedirs_31 if needs_makedirs else os.makedirs

View file

@ -0,0 +1,625 @@
"""
Utilities for determining application-specific dirs. See <https://github.com/platformdirs/platformdirs> for details and
usage.
"""
from __future__ import annotations
import os
import sys
from typing import TYPE_CHECKING
from .api import PlatformDirsABC
from .version import __version__
from .version import __version_tuple__ as __version_info__
if TYPE_CHECKING:
from pathlib import Path
from typing import Literal
def _set_platform_dir_class() -> type[PlatformDirsABC]:
if sys.platform == "win32":
from platformdirs.windows import Windows as Result
elif sys.platform == "darwin":
from platformdirs.macos import MacOS as Result
else:
from platformdirs.unix import Unix as Result
if os.getenv("ANDROID_DATA") == "/data" and os.getenv("ANDROID_ROOT") == "/system":
if os.getenv("SHELL") or os.getenv("PREFIX"):
return Result
from platformdirs.android import _android_folder
if _android_folder() is not None:
from platformdirs.android import Android
return Android # return to avoid redefinition of result
return Result
PlatformDirs = _set_platform_dir_class() #: Currently active platform
AppDirs = PlatformDirs #: Backwards compatibility with appdirs
def user_data_dir(
appname: str | None = None,
appauthor: str | None | Literal[False] = None,
version: str | None = None,
roaming: bool = False, # noqa: FBT001, FBT002
ensure_exists: bool = False, # noqa: FBT001, FBT002
) -> str:
"""
:param appname: See `appname <platformdirs.api.PlatformDirsABC.appname>`.
:param appauthor: See `appauthor <platformdirs.api.PlatformDirsABC.appauthor>`.
:param version: See `version <platformdirs.api.PlatformDirsABC.version>`.
:param roaming: See `roaming <platformdirs.api.PlatformDirsABC.roaming>`.
:param ensure_exists: See `ensure_exists <platformdirs.api.PlatformDirsABC.ensure_exists>`.
:returns: data directory tied to the user
"""
return PlatformDirs(
appname=appname,
appauthor=appauthor,
version=version,
roaming=roaming,
ensure_exists=ensure_exists,
).user_data_dir
def site_data_dir(
appname: str | None = None,
appauthor: str | None | Literal[False] = None,
version: str | None = None,
multipath: bool = False, # noqa: FBT001, FBT002
ensure_exists: bool = False, # noqa: FBT001, FBT002
) -> str:
"""
:param appname: See `appname <platformdirs.api.PlatformDirsABC.appname>`.
:param appauthor: See `appauthor <platformdirs.api.PlatformDirsABC.appauthor>`.
:param version: See `version <platformdirs.api.PlatformDirsABC.version>`.
:param multipath: See `roaming <platformdirs.api.PlatformDirsABC.multipath>`.
:param ensure_exists: See `ensure_exists <platformdirs.api.PlatformDirsABC.ensure_exists>`.
:returns: data directory shared by users
"""
return PlatformDirs(
appname=appname,
appauthor=appauthor,
version=version,
multipath=multipath,
ensure_exists=ensure_exists,
).site_data_dir
def user_config_dir(
appname: str | None = None,
appauthor: str | None | Literal[False] = None,
version: str | None = None,
roaming: bool = False, # noqa: FBT001, FBT002
ensure_exists: bool = False, # noqa: FBT001, FBT002
) -> str:
"""
:param appname: See `appname <platformdirs.api.PlatformDirsABC.appname>`.
:param appauthor: See `appauthor <platformdirs.api.PlatformDirsABC.appauthor>`.
:param version: See `version <platformdirs.api.PlatformDirsABC.version>`.
:param roaming: See `roaming <platformdirs.api.PlatformDirsABC.roaming>`.
:param ensure_exists: See `ensure_exists <platformdirs.api.PlatformDirsABC.ensure_exists>`.
:returns: config directory tied to the user
"""
return PlatformDirs(
appname=appname,
appauthor=appauthor,
version=version,
roaming=roaming,
ensure_exists=ensure_exists,
).user_config_dir
def site_config_dir(
appname: str | None = None,
appauthor: str | None | Literal[False] = None,
version: str | None = None,
multipath: bool = False, # noqa: FBT001, FBT002
ensure_exists: bool = False, # noqa: FBT001, FBT002
) -> str:
"""
:param appname: See `appname <platformdirs.api.PlatformDirsABC.appname>`.
:param appauthor: See `appauthor <platformdirs.api.PlatformDirsABC.appauthor>`.
:param version: See `version <platformdirs.api.PlatformDirsABC.version>`.
:param multipath: See `roaming <platformdirs.api.PlatformDirsABC.multipath>`.
:param ensure_exists: See `ensure_exists <platformdirs.api.PlatformDirsABC.ensure_exists>`.
:returns: config directory shared by the users
"""
return PlatformDirs(
appname=appname,
appauthor=appauthor,
version=version,
multipath=multipath,
ensure_exists=ensure_exists,
).site_config_dir
def user_cache_dir(
appname: str | None = None,
appauthor: str | None | Literal[False] = None,
version: str | None = None,
opinion: bool = True, # noqa: FBT001, FBT002
ensure_exists: bool = False, # noqa: FBT001, FBT002
) -> str:
"""
:param appname: See `appname <platformdirs.api.PlatformDirsABC.appname>`.
:param appauthor: See `appauthor <platformdirs.api.PlatformDirsABC.appauthor>`.
:param version: See `version <platformdirs.api.PlatformDirsABC.version>`.
:param opinion: See `roaming <platformdirs.api.PlatformDirsABC.opinion>`.
:param ensure_exists: See `ensure_exists <platformdirs.api.PlatformDirsABC.ensure_exists>`.
:returns: cache directory tied to the user
"""
return PlatformDirs(
appname=appname,
appauthor=appauthor,
version=version,
opinion=opinion,
ensure_exists=ensure_exists,
).user_cache_dir
def site_cache_dir(
appname: str | None = None,
appauthor: str | None | Literal[False] = None,
version: str | None = None,
opinion: bool = True, # noqa: FBT001, FBT002
ensure_exists: bool = False, # noqa: FBT001, FBT002
) -> str:
"""
:param appname: See `appname <platformdirs.api.PlatformDirsABC.appname>`.
:param appauthor: See `appauthor <platformdirs.api.PlatformDirsABC.appauthor>`.
:param version: See `version <platformdirs.api.PlatformDirsABC.version>`.
:param opinion: See `opinion <platformdirs.api.PlatformDirsABC.opinion>`.
:param ensure_exists: See `ensure_exists <platformdirs.api.PlatformDirsABC.ensure_exists>`.
:returns: cache directory tied to the user
"""
return PlatformDirs(
appname=appname,
appauthor=appauthor,
version=version,
opinion=opinion,
ensure_exists=ensure_exists,
).site_cache_dir
def user_state_dir(
appname: str | None = None,
appauthor: str | None | Literal[False] = None,
version: str | None = None,
roaming: bool = False, # noqa: FBT001, FBT002
ensure_exists: bool = False, # noqa: FBT001, FBT002
) -> str:
"""
:param appname: See `appname <platformdirs.api.PlatformDirsABC.appname>`.
:param appauthor: See `appauthor <platformdirs.api.PlatformDirsABC.appauthor>`.
:param version: See `version <platformdirs.api.PlatformDirsABC.version>`.
:param roaming: See `roaming <platformdirs.api.PlatformDirsABC.roaming>`.
:param ensure_exists: See `ensure_exists <platformdirs.api.PlatformDirsABC.ensure_exists>`.
:returns: state directory tied to the user
"""
return PlatformDirs(
appname=appname,
appauthor=appauthor,
version=version,
roaming=roaming,
ensure_exists=ensure_exists,
).user_state_dir
def user_log_dir(
appname: str | None = None,
appauthor: str | None | Literal[False] = None,
version: str | None = None,
opinion: bool = True, # noqa: FBT001, FBT002
ensure_exists: bool = False, # noqa: FBT001, FBT002
) -> str:
"""
:param appname: See `appname <platformdirs.api.PlatformDirsABC.appname>`.
:param appauthor: See `appauthor <platformdirs.api.PlatformDirsABC.appauthor>`.
:param version: See `version <platformdirs.api.PlatformDirsABC.version>`.
:param opinion: See `roaming <platformdirs.api.PlatformDirsABC.opinion>`.
:param ensure_exists: See `ensure_exists <platformdirs.api.PlatformDirsABC.ensure_exists>`.
:returns: log directory tied to the user
"""
return PlatformDirs(
appname=appname,
appauthor=appauthor,
version=version,
opinion=opinion,
ensure_exists=ensure_exists,
).user_log_dir
def user_documents_dir() -> str:
""":returns: documents directory tied to the user"""
return PlatformDirs().user_documents_dir
def user_downloads_dir() -> str:
""":returns: downloads directory tied to the user"""
return PlatformDirs().user_downloads_dir
def user_pictures_dir() -> str:
""":returns: pictures directory tied to the user"""
return PlatformDirs().user_pictures_dir
def user_videos_dir() -> str:
""":returns: videos directory tied to the user"""
return PlatformDirs().user_videos_dir
def user_music_dir() -> str:
""":returns: music directory tied to the user"""
return PlatformDirs().user_music_dir
def user_desktop_dir() -> str:
""":returns: desktop directory tied to the user"""
return PlatformDirs().user_desktop_dir
def user_runtime_dir(
appname: str | None = None,
appauthor: str | None | Literal[False] = None,
version: str | None = None,
opinion: bool = True, # noqa: FBT001, FBT002
ensure_exists: bool = False, # noqa: FBT001, FBT002
) -> str:
"""
:param appname: See `appname <platformdirs.api.PlatformDirsABC.appname>`.
:param appauthor: See `appauthor <platformdirs.api.PlatformDirsABC.appauthor>`.
:param version: See `version <platformdirs.api.PlatformDirsABC.version>`.
:param opinion: See `opinion <platformdirs.api.PlatformDirsABC.opinion>`.
:param ensure_exists: See `ensure_exists <platformdirs.api.PlatformDirsABC.ensure_exists>`.
:returns: runtime directory tied to the user
"""
return PlatformDirs(
appname=appname,
appauthor=appauthor,
version=version,
opinion=opinion,
ensure_exists=ensure_exists,
).user_runtime_dir
def site_runtime_dir(
appname: str | None = None,
appauthor: str | None | Literal[False] = None,
version: str | None = None,
opinion: bool = True, # noqa: FBT001, FBT002
ensure_exists: bool = False, # noqa: FBT001, FBT002
) -> str:
"""
:param appname: See `appname <platformdirs.api.PlatformDirsABC.appname>`.
:param appauthor: See `appauthor <platformdirs.api.PlatformDirsABC.appauthor>`.
:param version: See `version <platformdirs.api.PlatformDirsABC.version>`.
:param opinion: See `opinion <platformdirs.api.PlatformDirsABC.opinion>`.
:param ensure_exists: See `ensure_exists <platformdirs.api.PlatformDirsABC.ensure_exists>`.
:returns: runtime directory shared by users
"""
return PlatformDirs(
appname=appname,
appauthor=appauthor,
version=version,
opinion=opinion,
ensure_exists=ensure_exists,
).site_runtime_dir
def user_data_path(
appname: str | None = None,
appauthor: str | None | Literal[False] = None,
version: str | None = None,
roaming: bool = False, # noqa: FBT001, FBT002
ensure_exists: bool = False, # noqa: FBT001, FBT002
) -> Path:
"""
:param appname: See `appname <platformdirs.api.PlatformDirsABC.appname>`.
:param appauthor: See `appauthor <platformdirs.api.PlatformDirsABC.appauthor>`.
:param version: See `version <platformdirs.api.PlatformDirsABC.version>`.
:param roaming: See `roaming <platformdirs.api.PlatformDirsABC.roaming>`.
:param ensure_exists: See `ensure_exists <platformdirs.api.PlatformDirsABC.ensure_exists>`.
:returns: data path tied to the user
"""
return PlatformDirs(
appname=appname,
appauthor=appauthor,
version=version,
roaming=roaming,
ensure_exists=ensure_exists,
).user_data_path
def site_data_path(
appname: str | None = None,
appauthor: str | None | Literal[False] = None,
version: str | None = None,
multipath: bool = False, # noqa: FBT001, FBT002
ensure_exists: bool = False, # noqa: FBT001, FBT002
) -> Path:
"""
:param appname: See `appname <platformdirs.api.PlatformDirsABC.appname>`.
:param appauthor: See `appauthor <platformdirs.api.PlatformDirsABC.appauthor>`.
:param version: See `version <platformdirs.api.PlatformDirsABC.version>`.
:param multipath: See `multipath <platformdirs.api.PlatformDirsABC.multipath>`.
:param ensure_exists: See `ensure_exists <platformdirs.api.PlatformDirsABC.ensure_exists>`.
:returns: data path shared by users
"""
return PlatformDirs(
appname=appname,
appauthor=appauthor,
version=version,
multipath=multipath,
ensure_exists=ensure_exists,
).site_data_path
def user_config_path(
appname: str | None = None,
appauthor: str | None | Literal[False] = None,
version: str | None = None,
roaming: bool = False, # noqa: FBT001, FBT002
ensure_exists: bool = False, # noqa: FBT001, FBT002
) -> Path:
"""
:param appname: See `appname <platformdirs.api.PlatformDirsABC.appname>`.
:param appauthor: See `appauthor <platformdirs.api.PlatformDirsABC.appauthor>`.
:param version: See `version <platformdirs.api.PlatformDirsABC.version>`.
:param roaming: See `roaming <platformdirs.api.PlatformDirsABC.roaming>`.
:param ensure_exists: See `ensure_exists <platformdirs.api.PlatformDirsABC.ensure_exists>`.
:returns: config path tied to the user
"""
return PlatformDirs(
appname=appname,
appauthor=appauthor,
version=version,
roaming=roaming,
ensure_exists=ensure_exists,
).user_config_path
def site_config_path(
appname: str | None = None,
appauthor: str | None | Literal[False] = None,
version: str | None = None,
multipath: bool = False, # noqa: FBT001, FBT002
ensure_exists: bool = False, # noqa: FBT001, FBT002
) -> Path:
"""
:param appname: See `appname <platformdirs.api.PlatformDirsABC.appname>`.
:param appauthor: See `appauthor <platformdirs.api.PlatformDirsABC.appauthor>`.
:param version: See `version <platformdirs.api.PlatformDirsABC.version>`.
:param multipath: See `roaming <platformdirs.api.PlatformDirsABC.multipath>`.
:param ensure_exists: See `ensure_exists <platformdirs.api.PlatformDirsABC.ensure_exists>`.
:returns: config path shared by the users
"""
return PlatformDirs(
appname=appname,
appauthor=appauthor,
version=version,
multipath=multipath,
ensure_exists=ensure_exists,
).site_config_path
def site_cache_path(
appname: str | None = None,
appauthor: str | None | Literal[False] = None,
version: str | None = None,
opinion: bool = True, # noqa: FBT001, FBT002
ensure_exists: bool = False, # noqa: FBT001, FBT002
) -> Path:
"""
:param appname: See `appname <platformdirs.api.PlatformDirsABC.appname>`.
:param appauthor: See `appauthor <platformdirs.api.PlatformDirsABC.appauthor>`.
:param version: See `version <platformdirs.api.PlatformDirsABC.version>`.
:param opinion: See `opinion <platformdirs.api.PlatformDirsABC.opinion>`.
:param ensure_exists: See `ensure_exists <platformdirs.api.PlatformDirsABC.ensure_exists>`.
:returns: cache directory tied to the user
"""
return PlatformDirs(
appname=appname,
appauthor=appauthor,
version=version,
opinion=opinion,
ensure_exists=ensure_exists,
).site_cache_path
def user_cache_path(
appname: str | None = None,
appauthor: str | None | Literal[False] = None,
version: str | None = None,
opinion: bool = True, # noqa: FBT001, FBT002
ensure_exists: bool = False, # noqa: FBT001, FBT002
) -> Path:
"""
:param appname: See `appname <platformdirs.api.PlatformDirsABC.appname>`.
:param appauthor: See `appauthor <platformdirs.api.PlatformDirsABC.appauthor>`.
:param version: See `version <platformdirs.api.PlatformDirsABC.version>`.
:param opinion: See `roaming <platformdirs.api.PlatformDirsABC.opinion>`.
:param ensure_exists: See `ensure_exists <platformdirs.api.PlatformDirsABC.ensure_exists>`.
:returns: cache path tied to the user
"""
return PlatformDirs(
appname=appname,
appauthor=appauthor,
version=version,
opinion=opinion,
ensure_exists=ensure_exists,
).user_cache_path
def user_state_path(
appname: str | None = None,
appauthor: str | None | Literal[False] = None,
version: str | None = None,
roaming: bool = False, # noqa: FBT001, FBT002
ensure_exists: bool = False, # noqa: FBT001, FBT002
) -> Path:
"""
:param appname: See `appname <platformdirs.api.PlatformDirsABC.appname>`.
:param appauthor: See `appauthor <platformdirs.api.PlatformDirsABC.appauthor>`.
:param version: See `version <platformdirs.api.PlatformDirsABC.version>`.
:param roaming: See `roaming <platformdirs.api.PlatformDirsABC.roaming>`.
:param ensure_exists: See `ensure_exists <platformdirs.api.PlatformDirsABC.ensure_exists>`.
:returns: state path tied to the user
"""
return PlatformDirs(
appname=appname,
appauthor=appauthor,
version=version,
roaming=roaming,
ensure_exists=ensure_exists,
).user_state_path
def user_log_path(
appname: str | None = None,
appauthor: str | None | Literal[False] = None,
version: str | None = None,
opinion: bool = True, # noqa: FBT001, FBT002
ensure_exists: bool = False, # noqa: FBT001, FBT002
) -> Path:
"""
:param appname: See `appname <platformdirs.api.PlatformDirsABC.appname>`.
:param appauthor: See `appauthor <platformdirs.api.PlatformDirsABC.appauthor>`.
:param version: See `version <platformdirs.api.PlatformDirsABC.version>`.
:param opinion: See `roaming <platformdirs.api.PlatformDirsABC.opinion>`.
:param ensure_exists: See `ensure_exists <platformdirs.api.PlatformDirsABC.ensure_exists>`.
:returns: log path tied to the user
"""
return PlatformDirs(
appname=appname,
appauthor=appauthor,
version=version,
opinion=opinion,
ensure_exists=ensure_exists,
).user_log_path
def user_documents_path() -> Path:
""":returns: documents path tied to the user"""
return PlatformDirs().user_documents_path
def user_downloads_path() -> Path:
""":returns: downloads path tied to the user"""
return PlatformDirs().user_downloads_path
def user_pictures_path() -> Path:
""":returns: pictures path tied to the user"""
return PlatformDirs().user_pictures_path
def user_videos_path() -> Path:
""":returns: videos path tied to the user"""
return PlatformDirs().user_videos_path
def user_music_path() -> Path:
""":returns: music path tied to the user"""
return PlatformDirs().user_music_path
def user_desktop_path() -> Path:
""":returns: desktop path tied to the user"""
return PlatformDirs().user_desktop_path
def user_runtime_path(
appname: str | None = None,
appauthor: str | None | Literal[False] = None,
version: str | None = None,
opinion: bool = True, # noqa: FBT001, FBT002
ensure_exists: bool = False, # noqa: FBT001, FBT002
) -> Path:
"""
:param appname: See `appname <platformdirs.api.PlatformDirsABC.appname>`.
:param appauthor: See `appauthor <platformdirs.api.PlatformDirsABC.appauthor>`.
:param version: See `version <platformdirs.api.PlatformDirsABC.version>`.
:param opinion: See `opinion <platformdirs.api.PlatformDirsABC.opinion>`.
:param ensure_exists: See `ensure_exists <platformdirs.api.PlatformDirsABC.ensure_exists>`.
:returns: runtime path tied to the user
"""
return PlatformDirs(
appname=appname,
appauthor=appauthor,
version=version,
opinion=opinion,
ensure_exists=ensure_exists,
).user_runtime_path
def site_runtime_path(
appname: str | None = None,
appauthor: str | None | Literal[False] = None,
version: str | None = None,
opinion: bool = True, # noqa: FBT001, FBT002
ensure_exists: bool = False, # noqa: FBT001, FBT002
) -> Path:
"""
:param appname: See `appname <platformdirs.api.PlatformDirsABC.appname>`.
:param appauthor: See `appauthor <platformdirs.api.PlatformDirsABC.appauthor>`.
:param version: See `version <platformdirs.api.PlatformDirsABC.version>`.
:param opinion: See `opinion <platformdirs.api.PlatformDirsABC.opinion>`.
:param ensure_exists: See `ensure_exists <platformdirs.api.PlatformDirsABC.ensure_exists>`.
:returns: runtime path shared by users
"""
return PlatformDirs(
appname=appname,
appauthor=appauthor,
version=version,
opinion=opinion,
ensure_exists=ensure_exists,
).site_runtime_path
__all__ = [
"__version__",
"__version_info__",
"PlatformDirs",
"AppDirs",
"PlatformDirsABC",
"user_data_dir",
"user_config_dir",
"user_cache_dir",
"user_state_dir",
"user_log_dir",
"user_documents_dir",
"user_downloads_dir",
"user_pictures_dir",
"user_videos_dir",
"user_music_dir",
"user_desktop_dir",
"user_runtime_dir",
"site_data_dir",
"site_config_dir",
"site_cache_dir",
"site_runtime_dir",
"user_data_path",
"user_config_path",
"user_cache_path",
"user_state_path",
"user_log_path",
"user_documents_path",
"user_downloads_path",
"user_pictures_path",
"user_videos_path",
"user_music_path",
"user_desktop_path",
"user_runtime_path",
"site_data_path",
"site_config_path",
"site_cache_path",
"site_runtime_path",
]

View file

@ -0,0 +1,55 @@
"""Main entry point."""
from __future__ import annotations
from platformdirs import PlatformDirs, __version__
PROPS = (
"user_data_dir",
"user_config_dir",
"user_cache_dir",
"user_state_dir",
"user_log_dir",
"user_documents_dir",
"user_downloads_dir",
"user_pictures_dir",
"user_videos_dir",
"user_music_dir",
"user_runtime_dir",
"site_data_dir",
"site_config_dir",
"site_cache_dir",
"site_runtime_dir",
)
def main() -> None:
"""Run main entry point."""
app_name = "MyApp"
app_author = "MyCompany"
print(f"-- platformdirs {__version__} --") # noqa: T201
print("-- app dirs (with optional 'version')") # noqa: T201
dirs = PlatformDirs(app_name, app_author, version="1.0")
for prop in PROPS:
print(f"{prop}: {getattr(dirs, prop)}") # noqa: T201
print("\n-- app dirs (without optional 'version')") # noqa: T201
dirs = PlatformDirs(app_name, app_author)
for prop in PROPS:
print(f"{prop}: {getattr(dirs, prop)}") # noqa: T201
print("\n-- app dirs (without optional 'appauthor')") # noqa: T201
dirs = PlatformDirs(app_name)
for prop in PROPS:
print(f"{prop}: {getattr(dirs, prop)}") # noqa: T201
print("\n-- app dirs (with disabled 'appauthor')") # noqa: T201
dirs = PlatformDirs(app_name, appauthor=False)
for prop in PROPS:
print(f"{prop}: {getattr(dirs, prop)}") # noqa: T201
if __name__ == "__main__":
main()

View file

@ -0,0 +1,221 @@
"""Android."""
from __future__ import annotations
import os
import re
import sys
from functools import lru_cache
from typing import cast
from .api import PlatformDirsABC
class Android(PlatformDirsABC):
"""
Follows the guidance `from here <https://android.stackexchange.com/a/216132>`_. Makes use of the
`appname <platformdirs.api.PlatformDirsABC.appname>`,
`version <platformdirs.api.PlatformDirsABC.version>`,
`ensure_exists <platformdirs.api.PlatformDirsABC.ensure_exists>`.
"""
@property
def user_data_dir(self) -> str:
""":return: data directory tied to the user, e.g. ``/data/user/<userid>/<packagename>/files/<AppName>``"""
return self._append_app_name_and_version(cast(str, _android_folder()), "files")
@property
def site_data_dir(self) -> str:
""":return: data directory shared by users, same as `user_data_dir`"""
return self.user_data_dir
@property
def user_config_dir(self) -> str:
"""
:return: config directory tied to the user, e.g. \
``/data/user/<userid>/<packagename>/shared_prefs/<AppName>``
"""
return self._append_app_name_and_version(cast(str, _android_folder()), "shared_prefs")
@property
def site_config_dir(self) -> str:
""":return: config directory shared by the users, same as `user_config_dir`"""
return self.user_config_dir
@property
def user_cache_dir(self) -> str:
""":return: cache directory tied to the user, e.g. e.g. ``/data/user/<userid>/<packagename>/cache/<AppName>``"""
return self._append_app_name_and_version(cast(str, _android_folder()), "cache")
@property
def site_cache_dir(self) -> str:
""":return: cache directory shared by users, same as `user_cache_dir`"""
return self.user_cache_dir
@property
def user_state_dir(self) -> str:
""":return: state directory tied to the user, same as `user_data_dir`"""
return self.user_data_dir
@property
def user_log_dir(self) -> str:
"""
:return: log directory tied to the user, same as `user_cache_dir` if not opinionated else ``log`` in it,
e.g. ``/data/user/<userid>/<packagename>/cache/<AppName>/log``
"""
path = self.user_cache_dir
if self.opinion:
path = os.path.join(path, "log") # noqa: PTH118
return path
@property
def user_documents_dir(self) -> str:
""":return: documents directory tied to the user e.g. ``/storage/emulated/0/Documents``"""
return _android_documents_folder()
@property
def user_downloads_dir(self) -> str:
""":return: downloads directory tied to the user e.g. ``/storage/emulated/0/Downloads``"""
return _android_downloads_folder()
@property
def user_pictures_dir(self) -> str:
""":return: pictures directory tied to the user e.g. ``/storage/emulated/0/Pictures``"""
return _android_pictures_folder()
@property
def user_videos_dir(self) -> str:
""":return: videos directory tied to the user e.g. ``/storage/emulated/0/DCIM/Camera``"""
return _android_videos_folder()
@property
def user_music_dir(self) -> str:
""":return: music directory tied to the user e.g. ``/storage/emulated/0/Music``"""
return _android_music_folder()
@property
def user_desktop_dir(self) -> str:
""":return: desktop directory tied to the user e.g. ``/storage/emulated/0/Desktop``"""
return "/storage/emulated/0/Desktop"
@property
def user_runtime_dir(self) -> str:
"""
:return: runtime directory tied to the user, same as `user_cache_dir` if not opinionated else ``tmp`` in it,
e.g. ``/data/user/<userid>/<packagename>/cache/<AppName>/tmp``
"""
path = self.user_cache_dir
if self.opinion:
path = os.path.join(path, "tmp") # noqa: PTH118
return path
@property
def site_runtime_dir(self) -> str:
""":return: runtime directory shared by users, same as `user_runtime_dir`"""
return self.user_runtime_dir
@lru_cache(maxsize=1)
def _android_folder() -> str | None:
""":return: base folder for the Android OS or None if it cannot be found"""
try:
# First try to get path to android app via pyjnius
from jnius import autoclass
context = autoclass("android.content.Context")
result: str | None = context.getFilesDir().getParentFile().getAbsolutePath()
except Exception: # noqa: BLE001
# if fails find an android folder looking path on the sys.path
pattern = re.compile(r"/data/(data|user/\d+)/(.+)/files")
for path in sys.path:
if pattern.match(path):
result = path.split("/files")[0]
break
else:
result = None
return result
@lru_cache(maxsize=1)
def _android_documents_folder() -> str:
""":return: documents folder for the Android OS"""
# Get directories with pyjnius
try:
from jnius import autoclass
context = autoclass("android.content.Context")
environment = autoclass("android.os.Environment")
documents_dir: str = context.getExternalFilesDir(environment.DIRECTORY_DOCUMENTS).getAbsolutePath()
except Exception: # noqa: BLE001
documents_dir = "/storage/emulated/0/Documents"
return documents_dir
@lru_cache(maxsize=1)
def _android_downloads_folder() -> str:
""":return: downloads folder for the Android OS"""
# Get directories with pyjnius
try:
from jnius import autoclass
context = autoclass("android.content.Context")
environment = autoclass("android.os.Environment")
downloads_dir: str = context.getExternalFilesDir(environment.DIRECTORY_DOWNLOADS).getAbsolutePath()
except Exception: # noqa: BLE001
downloads_dir = "/storage/emulated/0/Downloads"
return downloads_dir
@lru_cache(maxsize=1)
def _android_pictures_folder() -> str:
""":return: pictures folder for the Android OS"""
# Get directories with pyjnius
try:
from jnius import autoclass
context = autoclass("android.content.Context")
environment = autoclass("android.os.Environment")
pictures_dir: str = context.getExternalFilesDir(environment.DIRECTORY_PICTURES).getAbsolutePath()
except Exception: # noqa: BLE001
pictures_dir = "/storage/emulated/0/Pictures"
return pictures_dir
@lru_cache(maxsize=1)
def _android_videos_folder() -> str:
""":return: videos folder for the Android OS"""
# Get directories with pyjnius
try:
from jnius import autoclass
context = autoclass("android.content.Context")
environment = autoclass("android.os.Environment")
videos_dir: str = context.getExternalFilesDir(environment.DIRECTORY_DCIM).getAbsolutePath()
except Exception: # noqa: BLE001
videos_dir = "/storage/emulated/0/DCIM/Camera"
return videos_dir
@lru_cache(maxsize=1)
def _android_music_folder() -> str:
""":return: music folder for the Android OS"""
# Get directories with pyjnius
try:
from jnius import autoclass
context = autoclass("android.content.Context")
environment = autoclass("android.os.Environment")
music_dir: str = context.getExternalFilesDir(environment.DIRECTORY_MUSIC).getAbsolutePath()
except Exception: # noqa: BLE001
music_dir = "/storage/emulated/0/Music"
return music_dir
__all__ = [
"Android",
]

View file

@ -0,0 +1,279 @@
"""Base API."""
from __future__ import annotations
import os
from abc import ABC, abstractmethod
from pathlib import Path
from typing import TYPE_CHECKING
if TYPE_CHECKING:
from typing import Iterator, Literal
class PlatformDirsABC(ABC):
"""Abstract base class for platform directories."""
def __init__( # noqa: PLR0913
self,
appname: str | None = None,
appauthor: str | None | Literal[False] = None,
version: str | None = None,
roaming: bool = False, # noqa: FBT001, FBT002
multipath: bool = False, # noqa: FBT001, FBT002
opinion: bool = True, # noqa: FBT001, FBT002
ensure_exists: bool = False, # noqa: FBT001, FBT002
) -> None:
"""
Create a new platform directory.
:param appname: See `appname`.
:param appauthor: See `appauthor`.
:param version: See `version`.
:param roaming: See `roaming`.
:param multipath: See `multipath`.
:param opinion: See `opinion`.
:param ensure_exists: See `ensure_exists`.
"""
self.appname = appname #: The name of application.
self.appauthor = appauthor
"""
The name of the app author or distributing body for this application. Typically, it is the owning company name.
Defaults to `appname`. You may pass ``False`` to disable it.
"""
self.version = version
"""
An optional version path element to append to the path. You might want to use this if you want multiple versions
of your app to be able to run independently. If used, this would typically be ``<major>.<minor>``.
"""
self.roaming = roaming
"""
Whether to use the roaming appdata directory on Windows. That means that for users on a Windows network setup
for roaming profiles, this user data will be synced on login (see
`here <http://technet.microsoft.com/en-us/library/cc766489(WS.10).aspx>`_).
"""
self.multipath = multipath
"""
An optional parameter which indicates that the entire list of data dirs should be returned.
By default, the first item would only be returned.
"""
self.opinion = opinion #: A flag to indicating to use opinionated values.
self.ensure_exists = ensure_exists
"""
Optionally create the directory (and any missing parents) upon access if it does not exist.
By default, no directories are created.
"""
def _append_app_name_and_version(self, *base: str) -> str:
params = list(base[1:])
if self.appname:
params.append(self.appname)
if self.version:
params.append(self.version)
path = os.path.join(base[0], *params) # noqa: PTH118
self._optionally_create_directory(path)
return path
def _optionally_create_directory(self, path: str) -> None:
if self.ensure_exists:
Path(path).mkdir(parents=True, exist_ok=True)
@property
@abstractmethod
def user_data_dir(self) -> str:
""":return: data directory tied to the user"""
@property
@abstractmethod
def site_data_dir(self) -> str:
""":return: data directory shared by users"""
@property
@abstractmethod
def user_config_dir(self) -> str:
""":return: config directory tied to the user"""
@property
@abstractmethod
def site_config_dir(self) -> str:
""":return: config directory shared by the users"""
@property
@abstractmethod
def user_cache_dir(self) -> str:
""":return: cache directory tied to the user"""
@property
@abstractmethod
def site_cache_dir(self) -> str:
""":return: cache directory shared by users"""
@property
@abstractmethod
def user_state_dir(self) -> str:
""":return: state directory tied to the user"""
@property
@abstractmethod
def user_log_dir(self) -> str:
""":return: log directory tied to the user"""
@property
@abstractmethod
def user_documents_dir(self) -> str:
""":return: documents directory tied to the user"""
@property
@abstractmethod
def user_downloads_dir(self) -> str:
""":return: downloads directory tied to the user"""
@property
@abstractmethod
def user_pictures_dir(self) -> str:
""":return: pictures directory tied to the user"""
@property
@abstractmethod
def user_videos_dir(self) -> str:
""":return: videos directory tied to the user"""
@property
@abstractmethod
def user_music_dir(self) -> str:
""":return: music directory tied to the user"""
@property
@abstractmethod
def user_desktop_dir(self) -> str:
""":return: desktop directory tied to the user"""
@property
@abstractmethod
def user_runtime_dir(self) -> str:
""":return: runtime directory tied to the user"""
@property
@abstractmethod
def site_runtime_dir(self) -> str:
""":return: runtime directory shared by users"""
@property
def user_data_path(self) -> Path:
""":return: data path tied to the user"""
return Path(self.user_data_dir)
@property
def site_data_path(self) -> Path:
""":return: data path shared by users"""
return Path(self.site_data_dir)
@property
def user_config_path(self) -> Path:
""":return: config path tied to the user"""
return Path(self.user_config_dir)
@property
def site_config_path(self) -> Path:
""":return: config path shared by the users"""
return Path(self.site_config_dir)
@property
def user_cache_path(self) -> Path:
""":return: cache path tied to the user"""
return Path(self.user_cache_dir)
@property
def site_cache_path(self) -> Path:
""":return: cache path shared by users"""
return Path(self.site_cache_dir)
@property
def user_state_path(self) -> Path:
""":return: state path tied to the user"""
return Path(self.user_state_dir)
@property
def user_log_path(self) -> Path:
""":return: log path tied to the user"""
return Path(self.user_log_dir)
@property
def user_documents_path(self) -> Path:
""":return: documents path tied to the user"""
return Path(self.user_documents_dir)
@property
def user_downloads_path(self) -> Path:
""":return: downloads path tied to the user"""
return Path(self.user_downloads_dir)
@property
def user_pictures_path(self) -> Path:
""":return: pictures path tied to the user"""
return Path(self.user_pictures_dir)
@property
def user_videos_path(self) -> Path:
""":return: videos path tied to the user"""
return Path(self.user_videos_dir)
@property
def user_music_path(self) -> Path:
""":return: music path tied to the user"""
return Path(self.user_music_dir)
@property
def user_desktop_path(self) -> Path:
""":return: desktop path tied to the user"""
return Path(self.user_desktop_dir)
@property
def user_runtime_path(self) -> Path:
""":return: runtime path tied to the user"""
return Path(self.user_runtime_dir)
@property
def site_runtime_path(self) -> Path:
""":return: runtime path shared by users"""
return Path(self.site_runtime_dir)
def iter_config_dirs(self) -> Iterator[str]:
""":yield: all user and site configuration directories."""
yield self.user_config_dir
yield self.site_config_dir
def iter_data_dirs(self) -> Iterator[str]:
""":yield: all user and site data directories."""
yield self.user_data_dir
yield self.site_data_dir
def iter_cache_dirs(self) -> Iterator[str]:
""":yield: all user and site cache directories."""
yield self.user_cache_dir
yield self.site_cache_dir
def iter_runtime_dirs(self) -> Iterator[str]:
""":yield: all user and site runtime directories."""
yield self.user_runtime_dir
yield self.site_runtime_dir
def iter_config_paths(self) -> Iterator[Path]:
""":yield: all user and site configuration paths."""
for path in self.iter_config_dirs():
yield Path(path)
def iter_data_paths(self) -> Iterator[Path]:
""":yield: all user and site data paths."""
for path in self.iter_data_dirs():
yield Path(path)
def iter_cache_paths(self) -> Iterator[Path]:
""":yield: all user and site cache paths."""
for path in self.iter_cache_dirs():
yield Path(path)
def iter_runtime_paths(self) -> Iterator[Path]:
""":yield: all user and site runtime paths."""
for path in self.iter_runtime_dirs():
yield Path(path)

View file

@ -0,0 +1,127 @@
"""macOS."""
from __future__ import annotations
import os.path
import sys
from .api import PlatformDirsABC
class MacOS(PlatformDirsABC):
"""
Platform directories for the macOS operating system. Follows the guidance from `Apple documentation
<https://developer.apple.com/library/archive/documentation/FileManagement/Conceptual/FileSystemProgrammingGuide/MacOSXDirectories/MacOSXDirectories.html>`_.
Makes use of the `appname <platformdirs.api.PlatformDirsABC.appname>`,
`version <platformdirs.api.PlatformDirsABC.version>`,
`ensure_exists <platformdirs.api.PlatformDirsABC.ensure_exists>`.
"""
@property
def user_data_dir(self) -> str:
""":return: data directory tied to the user, e.g. ``~/Library/Application Support/$appname/$version``"""
return self._append_app_name_and_version(os.path.expanduser("~/Library/Application Support")) # noqa: PTH111
@property
def site_data_dir(self) -> str:
"""
:return: data directory shared by users, e.g. ``/Library/Application Support/$appname/$version``.
If we're using a Python binary managed by `Homebrew <https://brew.sh>`_, the directory
will be under the Homebrew prefix, e.g. ``/opt/homebrew/share/$appname/$version``.
If `multipath <platformdirs.api.PlatformDirsABC.multipath>` is enabled and we're in Homebrew,
the response is a multi-path string separated by ":", e.g.
``/opt/homebrew/share/$appname/$version:/Library/Application Support/$appname/$version``
"""
is_homebrew = sys.prefix.startswith("/opt/homebrew")
path_list = [self._append_app_name_and_version("/opt/homebrew/share")] if is_homebrew else []
path_list.append(self._append_app_name_and_version("/Library/Application Support"))
if self.multipath:
return os.pathsep.join(path_list)
return path_list[0]
@property
def user_config_dir(self) -> str:
""":return: config directory tied to the user, same as `user_data_dir`"""
return self.user_data_dir
@property
def site_config_dir(self) -> str:
""":return: config directory shared by the users, same as `site_data_dir`"""
return self.site_data_dir
@property
def user_cache_dir(self) -> str:
""":return: cache directory tied to the user, e.g. ``~/Library/Caches/$appname/$version``"""
return self._append_app_name_and_version(os.path.expanduser("~/Library/Caches")) # noqa: PTH111
@property
def site_cache_dir(self) -> str:
"""
:return: cache directory shared by users, e.g. ``/Library/Caches/$appname/$version``.
If we're using a Python binary managed by `Homebrew <https://brew.sh>`_, the directory
will be under the Homebrew prefix, e.g. ``/opt/homebrew/var/cache/$appname/$version``.
If `multipath <platformdirs.api.PlatformDirsABC.multipath>` is enabled and we're in Homebrew,
the response is a multi-path string separated by ":", e.g.
``/opt/homebrew/var/cache/$appname/$version:/Library/Caches/$appname/$version``
"""
is_homebrew = sys.prefix.startswith("/opt/homebrew")
path_list = [self._append_app_name_and_version("/opt/homebrew/var/cache")] if is_homebrew else []
path_list.append(self._append_app_name_and_version("/Library/Caches"))
if self.multipath:
return os.pathsep.join(path_list)
return path_list[0]
@property
def user_state_dir(self) -> str:
""":return: state directory tied to the user, same as `user_data_dir`"""
return self.user_data_dir
@property
def user_log_dir(self) -> str:
""":return: log directory tied to the user, e.g. ``~/Library/Logs/$appname/$version``"""
return self._append_app_name_and_version(os.path.expanduser("~/Library/Logs")) # noqa: PTH111
@property
def user_documents_dir(self) -> str:
""":return: documents directory tied to the user, e.g. ``~/Documents``"""
return os.path.expanduser("~/Documents") # noqa: PTH111
@property
def user_downloads_dir(self) -> str:
""":return: downloads directory tied to the user, e.g. ``~/Downloads``"""
return os.path.expanduser("~/Downloads") # noqa: PTH111
@property
def user_pictures_dir(self) -> str:
""":return: pictures directory tied to the user, e.g. ``~/Pictures``"""
return os.path.expanduser("~/Pictures") # noqa: PTH111
@property
def user_videos_dir(self) -> str:
""":return: videos directory tied to the user, e.g. ``~/Movies``"""
return os.path.expanduser("~/Movies") # noqa: PTH111
@property
def user_music_dir(self) -> str:
""":return: music directory tied to the user, e.g. ``~/Music``"""
return os.path.expanduser("~/Music") # noqa: PTH111
@property
def user_desktop_dir(self) -> str:
""":return: desktop directory tied to the user, e.g. ``~/Desktop``"""
return os.path.expanduser("~/Desktop") # noqa: PTH111
@property
def user_runtime_dir(self) -> str:
""":return: runtime directory tied to the user, e.g. ``~/Library/Caches/TemporaryItems/$appname/$version``"""
return self._append_app_name_and_version(os.path.expanduser("~/Library/Caches/TemporaryItems")) # noqa: PTH111
@property
def site_runtime_dir(self) -> str:
""":return: runtime directory shared by users, same as `user_runtime_dir`"""
return self.user_runtime_dir
__all__ = [
"MacOS",
]

View file

View file

@ -0,0 +1,270 @@
"""Unix."""
from __future__ import annotations
import os
import sys
from configparser import ConfigParser
from pathlib import Path
from typing import Iterator
from .api import PlatformDirsABC
if sys.platform == "win32":
def getuid() -> int:
msg = "should only be used on Unix"
raise RuntimeError(msg)
else:
from os import getuid
class Unix(PlatformDirsABC):
"""
On Unix/Linux, we follow the
`XDG Basedir Spec <https://specifications.freedesktop.org/basedir-spec/basedir-spec-latest.html>`_. The spec allows
overriding directories with environment variables. The examples show are the default values, alongside the name of
the environment variable that overrides them. Makes use of the
`appname <platformdirs.api.PlatformDirsABC.appname>`,
`version <platformdirs.api.PlatformDirsABC.version>`,
`multipath <platformdirs.api.PlatformDirsABC.multipath>`,
`opinion <platformdirs.api.PlatformDirsABC.opinion>`,
`ensure_exists <platformdirs.api.PlatformDirsABC.ensure_exists>`.
"""
@property
def user_data_dir(self) -> str:
"""
:return: data directory tied to the user, e.g. ``~/.local/share/$appname/$version`` or
``$XDG_DATA_HOME/$appname/$version``
"""
path = os.environ.get("XDG_DATA_HOME", "")
if not path.strip():
path = os.path.expanduser("~/.local/share") # noqa: PTH111
return self._append_app_name_and_version(path)
@property
def _site_data_dirs(self) -> list[str]:
path = os.environ.get("XDG_DATA_DIRS", "")
if not path.strip():
path = f"/usr/local/share{os.pathsep}/usr/share"
return [self._append_app_name_and_version(p) for p in path.split(os.pathsep)]
@property
def site_data_dir(self) -> str:
"""
:return: data directories shared by users (if `multipath <platformdirs.api.PlatformDirsABC.multipath>` is
enabled and ``XDG_DATA_DIRS`` is set and a multi path the response is also a multi path separated by the
OS path separator), e.g. ``/usr/local/share/$appname/$version`` or ``/usr/share/$appname/$version``
"""
# XDG default for $XDG_DATA_DIRS; only first, if multipath is False
dirs = self._site_data_dirs
if not self.multipath:
return dirs[0]
return os.pathsep.join(dirs)
@property
def user_config_dir(self) -> str:
"""
:return: config directory tied to the user, e.g. ``~/.config/$appname/$version`` or
``$XDG_CONFIG_HOME/$appname/$version``
"""
path = os.environ.get("XDG_CONFIG_HOME", "")
if not path.strip():
path = os.path.expanduser("~/.config") # noqa: PTH111
return self._append_app_name_and_version(path)
@property
def _site_config_dirs(self) -> list[str]:
path = os.environ.get("XDG_CONFIG_DIRS", "")
if not path.strip():
path = "/etc/xdg"
return [self._append_app_name_and_version(p) for p in path.split(os.pathsep)]
@property
def site_config_dir(self) -> str:
"""
:return: config directories shared by users (if `multipath <platformdirs.api.PlatformDirsABC.multipath>`
is enabled and ``XDG_CONFIG_DIRS`` is set and a multi path the response is also a multi path separated by
the OS path separator), e.g. ``/etc/xdg/$appname/$version``
"""
# XDG default for $XDG_CONFIG_DIRS only first, if multipath is False
dirs = self._site_config_dirs
if not self.multipath:
return dirs[0]
return os.pathsep.join(dirs)
@property
def user_cache_dir(self) -> str:
"""
:return: cache directory tied to the user, e.g. ``~/.cache/$appname/$version`` or
``~/$XDG_CACHE_HOME/$appname/$version``
"""
path = os.environ.get("XDG_CACHE_HOME", "")
if not path.strip():
path = os.path.expanduser("~/.cache") # noqa: PTH111
return self._append_app_name_and_version(path)
@property
def site_cache_dir(self) -> str:
""":return: cache directory shared by users, e.g. ``/var/cache/$appname/$version``"""
return self._append_app_name_and_version("/var/cache")
@property
def user_state_dir(self) -> str:
"""
:return: state directory tied to the user, e.g. ``~/.local/state/$appname/$version`` or
``$XDG_STATE_HOME/$appname/$version``
"""
path = os.environ.get("XDG_STATE_HOME", "")
if not path.strip():
path = os.path.expanduser("~/.local/state") # noqa: PTH111
return self._append_app_name_and_version(path)
@property
def user_log_dir(self) -> str:
""":return: log directory tied to the user, same as `user_state_dir` if not opinionated else ``log`` in it"""
path = self.user_state_dir
if self.opinion:
path = os.path.join(path, "log") # noqa: PTH118
self._optionally_create_directory(path)
return path
@property
def user_documents_dir(self) -> str:
""":return: documents directory tied to the user, e.g. ``~/Documents``"""
return _get_user_media_dir("XDG_DOCUMENTS_DIR", "~/Documents")
@property
def user_downloads_dir(self) -> str:
""":return: downloads directory tied to the user, e.g. ``~/Downloads``"""
return _get_user_media_dir("XDG_DOWNLOAD_DIR", "~/Downloads")
@property
def user_pictures_dir(self) -> str:
""":return: pictures directory tied to the user, e.g. ``~/Pictures``"""
return _get_user_media_dir("XDG_PICTURES_DIR", "~/Pictures")
@property
def user_videos_dir(self) -> str:
""":return: videos directory tied to the user, e.g. ``~/Videos``"""
return _get_user_media_dir("XDG_VIDEOS_DIR", "~/Videos")
@property
def user_music_dir(self) -> str:
""":return: music directory tied to the user, e.g. ``~/Music``"""
return _get_user_media_dir("XDG_MUSIC_DIR", "~/Music")
@property
def user_desktop_dir(self) -> str:
""":return: desktop directory tied to the user, e.g. ``~/Desktop``"""
return _get_user_media_dir("XDG_DESKTOP_DIR", "~/Desktop")
@property
def user_runtime_dir(self) -> str:
"""
:return: runtime directory tied to the user, e.g. ``/run/user/$(id -u)/$appname/$version`` or
``$XDG_RUNTIME_DIR/$appname/$version``.
For FreeBSD/OpenBSD/NetBSD, it would return ``/var/run/user/$(id -u)/$appname/$version`` if
exists, otherwise ``/tmp/runtime-$(id -u)/$appname/$version``, if``$XDG_RUNTIME_DIR``
is not set.
"""
path = os.environ.get("XDG_RUNTIME_DIR", "")
if not path.strip():
if sys.platform.startswith(("freebsd", "openbsd", "netbsd")):
path = f"/var/run/user/{getuid()}"
if not Path(path).exists():
path = f"/tmp/runtime-{getuid()}" # noqa: S108
else:
path = f"/run/user/{getuid()}"
return self._append_app_name_and_version(path)
@property
def site_runtime_dir(self) -> str:
"""
:return: runtime directory shared by users, e.g. ``/run/$appname/$version`` or \
``$XDG_RUNTIME_DIR/$appname/$version``.
Note that this behaves almost exactly like `user_runtime_dir` if ``$XDG_RUNTIME_DIR`` is set, but will
fall back to paths associated to the root user instead of a regular logged-in user if it's not set.
If you wish to ensure that a logged-in root user path is returned e.g. ``/run/user/0``, use `user_runtime_dir`
instead.
For FreeBSD/OpenBSD/NetBSD, it would return ``/var/run/$appname/$version`` if ``$XDG_RUNTIME_DIR`` is not set.
"""
path = os.environ.get("XDG_RUNTIME_DIR", "")
if not path.strip():
if sys.platform.startswith(("freebsd", "openbsd", "netbsd")):
path = "/var/run"
else:
path = "/run"
return self._append_app_name_and_version(path)
@property
def site_data_path(self) -> Path:
""":return: data path shared by users. Only return first item, even if ``multipath`` is set to ``True``"""
return self._first_item_as_path_if_multipath(self.site_data_dir)
@property
def site_config_path(self) -> Path:
""":return: config path shared by the users. Only return first item, even if ``multipath`` is set to ``True``"""
return self._first_item_as_path_if_multipath(self.site_config_dir)
@property
def site_cache_path(self) -> Path:
""":return: cache path shared by users. Only return first item, even if ``multipath`` is set to ``True``"""
return self._first_item_as_path_if_multipath(self.site_cache_dir)
def _first_item_as_path_if_multipath(self, directory: str) -> Path:
if self.multipath:
# If multipath is True, the first path is returned.
directory = directory.split(os.pathsep)[0]
return Path(directory)
def iter_config_dirs(self) -> Iterator[str]:
""":yield: all user and site configuration directories."""
yield self.user_config_dir
yield from self._site_config_dirs
def iter_data_dirs(self) -> Iterator[str]:
""":yield: all user and site data directories."""
yield self.user_data_dir
yield from self._site_data_dirs
def _get_user_media_dir(env_var: str, fallback_tilde_path: str) -> str:
media_dir = _get_user_dirs_folder(env_var)
if media_dir is None:
media_dir = os.environ.get(env_var, "").strip()
if not media_dir:
media_dir = os.path.expanduser(fallback_tilde_path) # noqa: PTH111
return media_dir
def _get_user_dirs_folder(key: str) -> str | None:
"""Return directory from user-dirs.dirs config file. See https://freedesktop.org/wiki/Software/xdg-user-dirs/."""
user_dirs_config_path = Path(Unix().user_config_dir) / "user-dirs.dirs"
if user_dirs_config_path.exists():
parser = ConfigParser()
with user_dirs_config_path.open() as stream:
# Add fake section header, so ConfigParser doesn't complain
parser.read_string(f"[top]\n{stream.read()}")
if key not in parser["top"]:
return None
path = parser["top"][key].strip('"')
# Handle relative home paths
return path.replace("$HOME", os.path.expanduser("~")) # noqa: PTH111
return None
__all__ = [
"Unix",
]

View file

@ -0,0 +1,16 @@
# file generated by setuptools_scm
# don't change, don't track in version control
TYPE_CHECKING = False
if TYPE_CHECKING:
from typing import Tuple, Union
VERSION_TUPLE = Tuple[Union[int, str], ...]
else:
VERSION_TUPLE = object
version: str
__version__: str
__version_tuple__: VERSION_TUPLE
version_tuple: VERSION_TUPLE
__version__ = version = '4.2.0'
__version_tuple__ = version_tuple = (4, 2, 0)

View file

@ -0,0 +1,267 @@
"""Windows."""
from __future__ import annotations
import ctypes
import os
import sys
from functools import lru_cache
from typing import TYPE_CHECKING
from .api import PlatformDirsABC
if TYPE_CHECKING:
from collections.abc import Callable
class Windows(PlatformDirsABC):
"""
`MSDN on where to store app data files
<http://support.microsoft.com/default.aspx?scid=kb;en-us;310294#XSLTH3194121123120121120120>`_.
Makes use of the
`appname <platformdirs.api.PlatformDirsABC.appname>`,
`appauthor <platformdirs.api.PlatformDirsABC.appauthor>`,
`version <platformdirs.api.PlatformDirsABC.version>`,
`roaming <platformdirs.api.PlatformDirsABC.roaming>`,
`opinion <platformdirs.api.PlatformDirsABC.opinion>`,
`ensure_exists <platformdirs.api.PlatformDirsABC.ensure_exists>`.
"""
@property
def user_data_dir(self) -> str:
"""
:return: data directory tied to the user, e.g.
``%USERPROFILE%\\AppData\\Local\\$appauthor\\$appname`` (not roaming) or
``%USERPROFILE%\\AppData\\Roaming\\$appauthor\\$appname`` (roaming)
"""
const = "CSIDL_APPDATA" if self.roaming else "CSIDL_LOCAL_APPDATA"
path = os.path.normpath(get_win_folder(const))
return self._append_parts(path)
def _append_parts(self, path: str, *, opinion_value: str | None = None) -> str:
params = []
if self.appname:
if self.appauthor is not False:
author = self.appauthor or self.appname
params.append(author)
params.append(self.appname)
if opinion_value is not None and self.opinion:
params.append(opinion_value)
if self.version:
params.append(self.version)
path = os.path.join(path, *params) # noqa: PTH118
self._optionally_create_directory(path)
return path
@property
def site_data_dir(self) -> str:
""":return: data directory shared by users, e.g. ``C:\\ProgramData\\$appauthor\\$appname``"""
path = os.path.normpath(get_win_folder("CSIDL_COMMON_APPDATA"))
return self._append_parts(path)
@property
def user_config_dir(self) -> str:
""":return: config directory tied to the user, same as `user_data_dir`"""
return self.user_data_dir
@property
def site_config_dir(self) -> str:
""":return: config directory shared by the users, same as `site_data_dir`"""
return self.site_data_dir
@property
def user_cache_dir(self) -> str:
"""
:return: cache directory tied to the user (if opinionated with ``Cache`` folder within ``$appname``) e.g.
``%USERPROFILE%\\AppData\\Local\\$appauthor\\$appname\\Cache\\$version``
"""
path = os.path.normpath(get_win_folder("CSIDL_LOCAL_APPDATA"))
return self._append_parts(path, opinion_value="Cache")
@property
def site_cache_dir(self) -> str:
""":return: cache directory shared by users, e.g. ``C:\\ProgramData\\$appauthor\\$appname\\Cache\\$version``"""
path = os.path.normpath(get_win_folder("CSIDL_COMMON_APPDATA"))
return self._append_parts(path, opinion_value="Cache")
@property
def user_state_dir(self) -> str:
""":return: state directory tied to the user, same as `user_data_dir`"""
return self.user_data_dir
@property
def user_log_dir(self) -> str:
""":return: log directory tied to the user, same as `user_data_dir` if not opinionated else ``Logs`` in it"""
path = self.user_data_dir
if self.opinion:
path = os.path.join(path, "Logs") # noqa: PTH118
self._optionally_create_directory(path)
return path
@property
def user_documents_dir(self) -> str:
""":return: documents directory tied to the user e.g. ``%USERPROFILE%\\Documents``"""
return os.path.normpath(get_win_folder("CSIDL_PERSONAL"))
@property
def user_downloads_dir(self) -> str:
""":return: downloads directory tied to the user e.g. ``%USERPROFILE%\\Downloads``"""
return os.path.normpath(get_win_folder("CSIDL_DOWNLOADS"))
@property
def user_pictures_dir(self) -> str:
""":return: pictures directory tied to the user e.g. ``%USERPROFILE%\\Pictures``"""
return os.path.normpath(get_win_folder("CSIDL_MYPICTURES"))
@property
def user_videos_dir(self) -> str:
""":return: videos directory tied to the user e.g. ``%USERPROFILE%\\Videos``"""
return os.path.normpath(get_win_folder("CSIDL_MYVIDEO"))
@property
def user_music_dir(self) -> str:
""":return: music directory tied to the user e.g. ``%USERPROFILE%\\Music``"""
return os.path.normpath(get_win_folder("CSIDL_MYMUSIC"))
@property
def user_desktop_dir(self) -> str:
""":return: desktop directory tied to the user, e.g. ``%USERPROFILE%\\Desktop``"""
return os.path.normpath(get_win_folder("CSIDL_DESKTOPDIRECTORY"))
@property
def user_runtime_dir(self) -> str:
"""
:return: runtime directory tied to the user, e.g.
``%USERPROFILE%\\AppData\\Local\\Temp\\$appauthor\\$appname``
"""
path = os.path.normpath(os.path.join(get_win_folder("CSIDL_LOCAL_APPDATA"), "Temp")) # noqa: PTH118
return self._append_parts(path)
@property
def site_runtime_dir(self) -> str:
""":return: runtime directory shared by users, same as `user_runtime_dir`"""
return self.user_runtime_dir
def get_win_folder_from_env_vars(csidl_name: str) -> str:
"""Get folder from environment variables."""
result = get_win_folder_if_csidl_name_not_env_var(csidl_name)
if result is not None:
return result
env_var_name = {
"CSIDL_APPDATA": "APPDATA",
"CSIDL_COMMON_APPDATA": "ALLUSERSPROFILE",
"CSIDL_LOCAL_APPDATA": "LOCALAPPDATA",
}.get(csidl_name)
if env_var_name is None:
msg = f"Unknown CSIDL name: {csidl_name}"
raise ValueError(msg)
result = os.environ.get(env_var_name)
if result is None:
msg = f"Unset environment variable: {env_var_name}"
raise ValueError(msg)
return result
def get_win_folder_if_csidl_name_not_env_var(csidl_name: str) -> str | None:
"""Get folder for a CSIDL name that does not exist as an environment variable."""
if csidl_name == "CSIDL_PERSONAL":
return os.path.join(os.path.normpath(os.environ["USERPROFILE"]), "Documents") # noqa: PTH118
if csidl_name == "CSIDL_DOWNLOADS":
return os.path.join(os.path.normpath(os.environ["USERPROFILE"]), "Downloads") # noqa: PTH118
if csidl_name == "CSIDL_MYPICTURES":
return os.path.join(os.path.normpath(os.environ["USERPROFILE"]), "Pictures") # noqa: PTH118
if csidl_name == "CSIDL_MYVIDEO":
return os.path.join(os.path.normpath(os.environ["USERPROFILE"]), "Videos") # noqa: PTH118
if csidl_name == "CSIDL_MYMUSIC":
return os.path.join(os.path.normpath(os.environ["USERPROFILE"]), "Music") # noqa: PTH118
return None
def get_win_folder_from_registry(csidl_name: str) -> str:
"""
Get folder from the registry.
This is a fallback technique at best. I'm not sure if using the registry for these guarantees us the correct answer
for all CSIDL_* names.
"""
shell_folder_name = {
"CSIDL_APPDATA": "AppData",
"CSIDL_COMMON_APPDATA": "Common AppData",
"CSIDL_LOCAL_APPDATA": "Local AppData",
"CSIDL_PERSONAL": "Personal",
"CSIDL_DOWNLOADS": "{374DE290-123F-4565-9164-39C4925E467B}",
"CSIDL_MYPICTURES": "My Pictures",
"CSIDL_MYVIDEO": "My Video",
"CSIDL_MYMUSIC": "My Music",
}.get(csidl_name)
if shell_folder_name is None:
msg = f"Unknown CSIDL name: {csidl_name}"
raise ValueError(msg)
if sys.platform != "win32": # only needed for mypy type checker to know that this code runs only on Windows
raise NotImplementedError
import winreg
key = winreg.OpenKey(winreg.HKEY_CURRENT_USER, r"Software\Microsoft\Windows\CurrentVersion\Explorer\Shell Folders")
directory, _ = winreg.QueryValueEx(key, shell_folder_name)
return str(directory)
def get_win_folder_via_ctypes(csidl_name: str) -> str:
"""Get folder with ctypes."""
# There is no 'CSIDL_DOWNLOADS'.
# Use 'CSIDL_PROFILE' (40) and append the default folder 'Downloads' instead.
# https://learn.microsoft.com/en-us/windows/win32/shell/knownfolderid
csidl_const = {
"CSIDL_APPDATA": 26,
"CSIDL_COMMON_APPDATA": 35,
"CSIDL_LOCAL_APPDATA": 28,
"CSIDL_PERSONAL": 5,
"CSIDL_MYPICTURES": 39,
"CSIDL_MYVIDEO": 14,
"CSIDL_MYMUSIC": 13,
"CSIDL_DOWNLOADS": 40,
"CSIDL_DESKTOPDIRECTORY": 16,
}.get(csidl_name)
if csidl_const is None:
msg = f"Unknown CSIDL name: {csidl_name}"
raise ValueError(msg)
buf = ctypes.create_unicode_buffer(1024)
windll = getattr(ctypes, "windll") # noqa: B009 # using getattr to avoid false positive with mypy type checker
windll.shell32.SHGetFolderPathW(None, csidl_const, None, 0, buf)
# Downgrade to short path name if it has high-bit chars.
if any(ord(c) > 255 for c in buf): # noqa: PLR2004
buf2 = ctypes.create_unicode_buffer(1024)
if windll.kernel32.GetShortPathNameW(buf.value, buf2, 1024):
buf = buf2
if csidl_name == "CSIDL_DOWNLOADS":
return os.path.join(buf.value, "Downloads") # noqa: PTH118
return buf.value
def _pick_get_win_folder() -> Callable[[str], str]:
if hasattr(ctypes, "windll"):
return get_win_folder_via_ctypes
try:
import winreg # noqa: F401
except ImportError:
return get_win_folder_from_env_vars
else:
return get_win_folder_from_registry
get_win_folder = lru_cache(maxsize=None)(_pick_get_win_folder())
__all__ = [
"Windows",
]

View file

@ -1,129 +1,40 @@
"""Extensions to the 'distutils' for large or complex distributions""" """Extensions to the 'distutils' for large or complex distributions"""
import os
import sys
import functools import functools
import distutils.core import os
import distutils.filelist
import re import re
import _distutils_hack.override # noqa: F401
import distutils.core
from distutils.errors import DistutilsOptionError from distutils.errors import DistutilsOptionError
from distutils.util import convert_path from distutils.util import convert_path as _convert_path
from fnmatch import fnmatchcase
from ._deprecation_warning import SetuptoolsDeprecationWarning
from setuptools.extern.six import PY3, string_types
from setuptools.extern.six.moves import filter, map
import setuptools.version
from setuptools.extension import Extension
from setuptools.dist import Distribution, Feature
from setuptools.depends import Require
from . import monkey
__metaclass__ = type
from . import logging, monkey
from . import version as _version_module
from .depends import Require
from .discovery import PackageFinder, PEP420PackageFinder
from .dist import Distribution
from .extension import Extension
from .warnings import SetuptoolsDeprecationWarning
__all__ = [ __all__ = [
'setup', 'Distribution', 'Feature', 'Command', 'Extension', 'Require', 'setup',
'Distribution',
'Command',
'Extension',
'Require',
'SetuptoolsDeprecationWarning', 'SetuptoolsDeprecationWarning',
'find_packages' 'find_packages',
'find_namespace_packages',
] ]
if PY3: __version__ = _version_module.__version__
__all__.append('find_namespace_packages')
__version__ = setuptools.version.__version__
bootstrap_install_from = None bootstrap_install_from = None
# If we run 2to3 on .py files, should we also convert docstrings?
# Default: yes; assume that we can detect doctests reliably
run_2to3_on_doctests = True
# Standard package names for fixer packages
lib2to3_fixer_packages = ['lib2to3.fixes']
class PackageFinder:
"""
Generate a list of all Python packages found within a directory
"""
@classmethod
def find(cls, where='.', exclude=(), include=('*',)):
"""Return a list all Python packages found within directory 'where'
'where' is the root directory which will be searched for packages. It
should be supplied as a "cross-platform" (i.e. URL-style) path; it will
be converted to the appropriate local path syntax.
'exclude' is a sequence of package names to exclude; '*' can be used
as a wildcard in the names, such that 'foo.*' will exclude all
subpackages of 'foo' (but not 'foo' itself).
'include' is a sequence of package names to include. If it's
specified, only the named packages will be included. If it's not
specified, all found packages will be included. 'include' can contain
shell style wildcard patterns just like 'exclude'.
"""
return list(cls._find_packages_iter(
convert_path(where),
cls._build_filter('ez_setup', '*__pycache__', *exclude),
cls._build_filter(*include)))
@classmethod
def _find_packages_iter(cls, where, exclude, include):
"""
All the packages found in 'where' that pass the 'include' filter, but
not the 'exclude' filter.
"""
for root, dirs, files in os.walk(where, followlinks=True):
# Copy dirs to iterate over it, then empty dirs.
all_dirs = dirs[:]
dirs[:] = []
for dir in all_dirs:
full_path = os.path.join(root, dir)
rel_path = os.path.relpath(full_path, where)
package = rel_path.replace(os.path.sep, '.')
# Skip directory trees that are not valid packages
if ('.' in dir or not cls._looks_like_package(full_path)):
continue
# Should this package be included?
if include(package) and not exclude(package):
yield package
# Keep searching subdirectories, as there may be more packages
# down there, even if the parent was excluded.
dirs.append(dir)
@staticmethod
def _looks_like_package(path):
"""Does a directory look like a package?"""
return os.path.isfile(os.path.join(path, '__init__.py'))
@staticmethod
def _build_filter(*patterns):
"""
Given a list of patterns, return a callable that will be true only if
the input matches at least one of the patterns.
"""
return lambda name: any(fnmatchcase(name, pat=pat) for pat in patterns)
class PEP420PackageFinder(PackageFinder):
@staticmethod
def _looks_like_package(path):
return True
find_packages = PackageFinder.find find_packages = PackageFinder.find
find_namespace_packages = PEP420PackageFinder.find
if PY3:
find_namespace_packages = PEP420PackageFinder.find
def _install_setup_requires(attrs): def _install_setup_requires(attrs):
@ -134,13 +45,21 @@ def _install_setup_requires(attrs):
A minimal version of a distribution for supporting the A minimal version of a distribution for supporting the
fetch_build_eggs interface. fetch_build_eggs interface.
""" """
def __init__(self, attrs): def __init__(self, attrs):
_incl = 'dependency_links', 'setup_requires' _incl = 'dependency_links', 'setup_requires'
filtered = { filtered = {k: attrs[k] for k in set(_incl) & set(attrs)}
k: attrs[k] super().__init__(filtered)
for k in set(_incl) & set(attrs) # Prevent accidentally triggering discovery with incomplete set of attrs
} self.set_defaults._disable()
distutils.core.Distribution.__init__(self, filtered)
def _get_project_config_files(self, filenames=None):
"""Ignore ``pyproject.toml``, they are not related to setup_requires"""
try:
cfg, toml = super()._split_standard_project_metadata(filenames)
return cfg, ()
except Exception:
return filenames, ()
def finalize_options(self): def finalize_options(self):
""" """
@ -153,14 +72,37 @@ def _install_setup_requires(attrs):
# Honor setup.cfg's options. # Honor setup.cfg's options.
dist.parse_config_files(ignore_option_errors=True) dist.parse_config_files(ignore_option_errors=True)
if dist.setup_requires: if dist.setup_requires:
_fetch_build_eggs(dist)
def _fetch_build_eggs(dist):
try:
dist.fetch_build_eggs(dist.setup_requires) dist.fetch_build_eggs(dist.setup_requires)
except Exception as ex:
msg = """
It is possible a package already installed in your system
contains an version that is invalid according to PEP 440.
You can try `pip install --use-pep517` as a workaround for this problem,
or rely on a new virtual environment.
If the problem refers to a package that is not installed yet,
please contact that package's maintainers or distributors.
"""
if "InvalidVersion" in ex.__class__.__name__:
if hasattr(ex, "add_note"):
ex.add_note(msg) # PEP 678
else:
dist.announce(f"\n{msg}\n")
raise
def setup(**attrs): def setup(**attrs):
# Make sure we have any requirements needed to interpret 'attrs'. # Make sure we have any requirements needed to interpret 'attrs'.
logging.configure()
_install_setup_requires(attrs) _install_setup_requires(attrs)
return distutils.core.setup(**attrs) return distutils.core.setup(**attrs)
setup.__doc__ = distutils.core.setup.__doc__ setup.__doc__ = distutils.core.setup.__doc__
@ -168,7 +110,59 @@ _Command = monkey.get_unpatched(distutils.core.Command)
class Command(_Command): class Command(_Command):
__doc__ = _Command.__doc__ """
Setuptools internal actions are organized using a *command design pattern*.
This means that each action (or group of closely related actions) executed during
the build should be implemented as a ``Command`` subclass.
These commands are abstractions and do not necessarily correspond to a command that
can (or should) be executed via a terminal, in a CLI fashion (although historically
they would).
When creating a new command from scratch, custom defined classes **SHOULD** inherit
from ``setuptools.Command`` and implement a few mandatory methods.
Between these mandatory methods, are listed:
.. method:: initialize_options(self)
Set or (reset) all options/attributes/caches used by the command
to their default values. Note that these values may be overwritten during
the build.
.. method:: finalize_options(self)
Set final values for all options/attributes used by the command.
Most of the time, each option/attribute/cache should only be set if it does not
have any value yet (e.g. ``if self.attr is None: self.attr = val``).
.. method:: run(self)
Execute the actions intended by the command.
(Side effects **SHOULD** only take place when ``run`` is executed,
for example, creating new files or writing to the terminal output).
A useful analogy for command classes is to think of them as subroutines with local
variables called "options". The options are "declared" in ``initialize_options()``
and "defined" (given their final values, aka "finalized") in ``finalize_options()``,
both of which must be defined by every command class. The "body" of the subroutine,
(where it does all the work) is the ``run()`` method.
Between ``initialize_options()`` and ``finalize_options()``, ``setuptools`` may set
the values for options/attributes based on user's input (or circumstance),
which means that the implementation should be careful to not overwrite values in
``finalize_options`` unless necessary.
Please note that other commands (or other parts of setuptools) may also overwrite
the values of the command's options/attributes multiple times during the build
process.
Therefore it is important to consistently implement ``initialize_options()`` and
``finalize_options()``. For example, all derived attributes (or attributes that
depend on the value of other attributes) **SHOULD** be recomputed in
``finalize_options``.
When overwriting existing commands, custom defined classes **MUST** abide by the
same APIs implemented by the original class. They also **SHOULD** inherit from the
original class.
"""
command_consumes_arguments = False command_consumes_arguments = False
@ -177,7 +171,7 @@ class Command(_Command):
Construct the command for dist, updating Construct the command for dist, updating
vars(self) with any keyword parameters. vars(self) with any keyword parameters.
""" """
_Command.__init__(self, dist) super().__init__(dist)
vars(self).update(kw) vars(self).update(kw)
def _ensure_stringlike(self, option, what, default=None): def _ensure_stringlike(self, option, what, default=None):
@ -185,9 +179,10 @@ class Command(_Command):
if val is None: if val is None:
setattr(self, option, default) setattr(self, option, default)
return default return default
elif not isinstance(val, string_types): elif not isinstance(val, str):
raise DistutilsOptionError("'%s' must be a %s (got `%s`)" raise DistutilsOptionError(
% (option, what, val)) "'%s' must be a %s (got `%s`)" % (option, what, val)
)
return val return val
def ensure_string_list(self, option): def ensure_string_list(self, option):
@ -195,21 +190,27 @@ class Command(_Command):
currently a string, we split it either on /,\s*/ or /\s+/, so currently a string, we split it either on /,\s*/ or /\s+/, so
"foo bar baz", "foo,bar,baz", and "foo, bar baz" all become "foo bar baz", "foo,bar,baz", and "foo, bar baz" all become
["foo", "bar", "baz"]. ["foo", "bar", "baz"].
..
TODO: This method seems to be similar to the one in ``distutils.cmd``
Probably it is just here for backward compatibility with old Python versions?
:meta private:
""" """
val = getattr(self, option) val = getattr(self, option)
if val is None: if val is None:
return return
elif isinstance(val, string_types): elif isinstance(val, str):
setattr(self, option, re.split(r',\s*|\s+', val)) setattr(self, option, re.split(r',\s*|\s+', val))
else: else:
if isinstance(val, list): if isinstance(val, list):
ok = all(isinstance(v, string_types) for v in val) ok = all(isinstance(v, str) for v in val)
else: else:
ok = False ok = False
if not ok: if not ok:
raise DistutilsOptionError( raise DistutilsOptionError(
"'%s' must be a list of strings (got %r)" "'%s' must be a list of strings (got %r)" % (option, val)
% (option, val)) )
def reinitialize_command(self, command, reinit_subcommands=0, **kw): def reinitialize_command(self, command, reinit_subcommands=0, **kw):
cmd = _Command.reinitialize_command(self, command, reinit_subcommands) cmd = _Command.reinitialize_command(self, command, reinit_subcommands)
@ -241,5 +242,25 @@ def findall(dir=os.curdir):
return list(files) return list(files)
@functools.wraps(_convert_path)
def convert_path(pathname):
SetuptoolsDeprecationWarning.emit(
"Access to implementation detail",
"""
The function `convert_path` is not provided by setuptools itself,
and therefore not part of the public API.
Its direct usage by 3rd-party packages is considered improper and the function
may be removed in the future.
""",
due_date=(2023, 12, 13), # initial deprecation 2022-03-25, see #3201
)
return _convert_path(pathname)
class sic(str):
"""Treat this string as-is (https://en.wikipedia.org/wiki/Sic)"""
# Apply monkey patches # Apply monkey patches
monkey.patch_all() monkey.patch_all()

View file

@ -1,7 +0,0 @@
class SetuptoolsDeprecationWarning(Warning):
"""
Base class for warning deprecations in ``setuptools``
This class is not derived from ``DeprecationWarning``, and as such is
visible by default.
"""

View file

@ -7,7 +7,7 @@ import os
import importlib.util import importlib.util
import importlib.machinery import importlib.machinery
from .py34compat import module_from_spec from importlib.util import module_from_spec
PY_SOURCE = 1 PY_SOURCE = 1
@ -17,9 +17,18 @@ C_BUILTIN = 6
PY_FROZEN = 7 PY_FROZEN = 7
def find_spec(module, paths):
finder = (
importlib.machinery.PathFinder().find_spec
if isinstance(paths, list)
else importlib.util.find_spec
)
return finder(module, paths)
def find_module(module, paths=None): def find_module(module, paths=None):
"""Just like 'imp.find_module()', but with package support""" """Just like 'imp.find_module()', but with package support"""
spec = importlib.util.find_spec(module, paths) spec = find_spec(module, paths)
if spec is None: if spec is None:
raise ImportError("Can't find %s" % module) raise ImportError("Can't find %s" % module)
if not spec.has_location and hasattr(spec, 'submodule_search_locations'): if not spec.has_location and hasattr(spec, 'submodule_search_locations'):
@ -28,16 +37,22 @@ def find_module(module, paths=None):
kind = -1 kind = -1
file = None file = None
static = isinstance(spec.loader, type) static = isinstance(spec.loader, type)
if spec.origin == 'frozen' or static and issubclass( if (
spec.loader, importlib.machinery.FrozenImporter): spec.origin == 'frozen'
or static
and issubclass(spec.loader, importlib.machinery.FrozenImporter)
):
kind = PY_FROZEN kind = PY_FROZEN
path = None # imp compabilty path = None # imp compabilty
suffix = mode = '' # imp compability suffix = mode = '' # imp compatibility
elif spec.origin == 'built-in' or static and issubclass( elif (
spec.loader, importlib.machinery.BuiltinImporter): spec.origin == 'built-in'
or static
and issubclass(spec.loader, importlib.machinery.BuiltinImporter)
):
kind = C_BUILTIN kind = C_BUILTIN
path = None # imp compabilty path = None # imp compabilty
suffix = mode = '' # imp compability suffix = mode = '' # imp compatibility
elif spec.has_location: elif spec.has_location:
path = spec.origin path = spec.origin
suffix = os.path.splitext(path)[1] suffix = os.path.splitext(path)[1]
@ -60,14 +75,14 @@ def find_module(module, paths=None):
def get_frozen_object(module, paths=None): def get_frozen_object(module, paths=None):
spec = importlib.util.find_spec(module, paths) spec = find_spec(module, paths)
if not spec: if not spec:
raise ImportError("Can't find %s" % module) raise ImportError("Can't find %s" % module)
return spec.loader.get_code(module) return spec.loader.get_code(module)
def get_module(module, paths, info): def get_module(module, paths, info):
spec = importlib.util.find_spec(module, paths) spec = find_spec(module, paths)
if not spec: if not spec:
raise ImportError("Can't find %s" % module) raise ImportError("Can't find %s" % module)
return module_from_spec(spec) return module_from_spec(spec)

View file

@ -1,27 +0,0 @@
# This file is dual licensed under the terms of the Apache License, Version
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
# for complete details.
from __future__ import absolute_import, division, print_function
__all__ = [
"__title__",
"__summary__",
"__uri__",
"__version__",
"__author__",
"__email__",
"__license__",
"__copyright__",
]
__title__ = "packaging"
__summary__ = "Core utilities for Python packages"
__uri__ = "https://github.com/pypa/packaging"
__version__ = "19.2"
__author__ = "Donald Stufft and individual contributors"
__email__ = "donald@stufft.io"
__license__ = "BSD or Apache License, Version 2.0"
__copyright__ = "Copyright 2014-2019 %s" % __author__

View file

@ -1,26 +1,15 @@
# This file is dual licensed under the terms of the Apache License, Version # This file is dual licensed under the terms of the Apache License, Version
# 2.0, and the BSD License. See the LICENSE file in the root of this repository # 2.0, and the BSD License. See the LICENSE file in the root of this repository
# for complete details. # for complete details.
from __future__ import absolute_import, division, print_function
from .__about__ import ( __title__ = "packaging"
__author__, __summary__ = "Core utilities for Python packages"
__copyright__, __uri__ = "https://github.com/pypa/packaging"
__email__,
__license__,
__summary__,
__title__,
__uri__,
__version__,
)
__all__ = [ __version__ = "23.1"
"__title__",
"__summary__", __author__ = "Donald Stufft and individual contributors"
"__uri__", __email__ = "donald@stufft.io"
"__version__",
"__author__", __license__ = "BSD-2-Clause or Apache-2.0"
"__email__", __copyright__ = "2014-2019 %s" % __author__
"__license__",
"__copyright__",
]

View file

@ -1,31 +0,0 @@
# This file is dual licensed under the terms of the Apache License, Version
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
# for complete details.
from __future__ import absolute_import, division, print_function
import sys
PY2 = sys.version_info[0] == 2
PY3 = sys.version_info[0] == 3
# flake8: noqa
if PY3:
string_types = (str,)
else:
string_types = (basestring,)
def with_metaclass(meta, *bases):
"""
Create a base class with a metaclass.
"""
# This requires a bit of explanation: the basic idea is to make a dummy
# metaclass for one level of class instantiation that replaces itself with
# the actual metaclass.
class metaclass(meta):
def __new__(cls, name, this_bases, d):
return meta(name, bases, d)
return type.__new__(metaclass, "temporary_class", (), {})

View file

@ -1,68 +1,61 @@
# This file is dual licensed under the terms of the Apache License, Version # This file is dual licensed under the terms of the Apache License, Version
# 2.0, and the BSD License. See the LICENSE file in the root of this repository # 2.0, and the BSD License. See the LICENSE file in the root of this repository
# for complete details. # for complete details.
from __future__ import absolute_import, division, print_function
class Infinity(object): class InfinityType:
def __repr__(self): def __repr__(self) -> str:
return "Infinity" return "Infinity"
def __hash__(self): def __hash__(self) -> int:
return hash(repr(self)) return hash(repr(self))
def __lt__(self, other): def __lt__(self, other: object) -> bool:
return False return False
def __le__(self, other): def __le__(self, other: object) -> bool:
return False return False
def __eq__(self, other): def __eq__(self, other: object) -> bool:
return isinstance(other, self.__class__) return isinstance(other, self.__class__)
def __ne__(self, other): def __gt__(self, other: object) -> bool:
return not isinstance(other, self.__class__)
def __gt__(self, other):
return True return True
def __ge__(self, other): def __ge__(self, other: object) -> bool:
return True return True
def __neg__(self): def __neg__(self: object) -> "NegativeInfinityType":
return NegativeInfinity return NegativeInfinity
Infinity = Infinity() Infinity = InfinityType()
class NegativeInfinity(object): class NegativeInfinityType:
def __repr__(self): def __repr__(self) -> str:
return "-Infinity" return "-Infinity"
def __hash__(self): def __hash__(self) -> int:
return hash(repr(self)) return hash(repr(self))
def __lt__(self, other): def __lt__(self, other: object) -> bool:
return True return True
def __le__(self, other): def __le__(self, other: object) -> bool:
return True return True
def __eq__(self, other): def __eq__(self, other: object) -> bool:
return isinstance(other, self.__class__) return isinstance(other, self.__class__)
def __ne__(self, other): def __gt__(self, other: object) -> bool:
return not isinstance(other, self.__class__)
def __gt__(self, other):
return False return False
def __ge__(self, other): def __ge__(self, other: object) -> bool:
return False return False
def __neg__(self): def __neg__(self: object) -> InfinityType:
return Infinity return Infinity
NegativeInfinity = NegativeInfinity() NegativeInfinity = NegativeInfinityType()

View file

@ -1,20 +1,24 @@
# This file is dual licensed under the terms of the Apache License, Version # This file is dual licensed under the terms of the Apache License, Version
# 2.0, and the BSD License. See the LICENSE file in the root of this repository # 2.0, and the BSD License. See the LICENSE file in the root of this repository
# for complete details. # for complete details.
from __future__ import absolute_import, division, print_function
import operator import operator
import os import os
import platform import platform
import sys import sys
from typing import Any, Callable, Dict, List, Optional, Tuple, Union
from setuptools.extern.pyparsing import ParseException, ParseResults, stringStart, stringEnd from ._parser import (
from setuptools.extern.pyparsing import ZeroOrMore, Group, Forward, QuotedString MarkerAtom,
from setuptools.extern.pyparsing import Literal as L # noqa MarkerList,
Op,
from ._compat import string_types Value,
from .specifiers import Specifier, InvalidSpecifier Variable,
parse_marker as _parse_marker,
)
from ._tokenizer import ParserSyntaxError
from .specifiers import InvalidSpecifier, Specifier
from .utils import canonicalize_name
__all__ = [ __all__ = [
"InvalidMarker", "InvalidMarker",
@ -24,6 +28,8 @@ __all__ = [
"default_environment", "default_environment",
] ]
Operator = Callable[[str, str], bool]
class InvalidMarker(ValueError): class InvalidMarker(ValueError):
""" """
@ -44,101 +50,27 @@ class UndefinedEnvironmentName(ValueError):
""" """
class Node(object): def _normalize_extra_values(results: Any) -> Any:
def __init__(self, value): """
self.value = value Normalize extra values.
"""
def __str__(self): if isinstance(results[0], tuple):
return str(self.value) lhs, op, rhs = results[0]
if isinstance(lhs, Variable) and lhs.value == "extra":
def __repr__(self): normalized_extra = canonicalize_name(rhs.value)
return "<{0}({1!r})>".format(self.__class__.__name__, str(self)) rhs = Value(normalized_extra)
elif isinstance(rhs, Variable) and rhs.value == "extra":
def serialize(self): normalized_extra = canonicalize_name(lhs.value)
raise NotImplementedError lhs = Value(normalized_extra)
results[0] = lhs, op, rhs
return results
class Variable(Node): def _format_marker(
def serialize(self): marker: Union[List[str], MarkerAtom, str], first: Optional[bool] = True
return str(self) ) -> str:
assert isinstance(marker, (list, tuple, str))
class Value(Node):
def serialize(self):
return '"{0}"'.format(self)
class Op(Node):
def serialize(self):
return str(self)
VARIABLE = (
L("implementation_version")
| L("platform_python_implementation")
| L("implementation_name")
| L("python_full_version")
| L("platform_release")
| L("platform_version")
| L("platform_machine")
| L("platform_system")
| L("python_version")
| L("sys_platform")
| L("os_name")
| L("os.name")
| L("sys.platform") # PEP-345
| L("platform.version") # PEP-345
| L("platform.machine") # PEP-345
| L("platform.python_implementation") # PEP-345
| L("python_implementation") # PEP-345
| L("extra") # undocumented setuptools legacy
)
ALIASES = {
"os.name": "os_name",
"sys.platform": "sys_platform",
"platform.version": "platform_version",
"platform.machine": "platform_machine",
"platform.python_implementation": "platform_python_implementation",
"python_implementation": "platform_python_implementation",
}
VARIABLE.setParseAction(lambda s, l, t: Variable(ALIASES.get(t[0], t[0])))
VERSION_CMP = (
L("===") | L("==") | L(">=") | L("<=") | L("!=") | L("~=") | L(">") | L("<")
)
MARKER_OP = VERSION_CMP | L("not in") | L("in")
MARKER_OP.setParseAction(lambda s, l, t: Op(t[0]))
MARKER_VALUE = QuotedString("'") | QuotedString('"')
MARKER_VALUE.setParseAction(lambda s, l, t: Value(t[0]))
BOOLOP = L("and") | L("or")
MARKER_VAR = VARIABLE | MARKER_VALUE
MARKER_ITEM = Group(MARKER_VAR + MARKER_OP + MARKER_VAR)
MARKER_ITEM.setParseAction(lambda s, l, t: tuple(t[0]))
LPAREN = L("(").suppress()
RPAREN = L(")").suppress()
MARKER_EXPR = Forward()
MARKER_ATOM = MARKER_ITEM | Group(LPAREN + MARKER_EXPR + RPAREN)
MARKER_EXPR << MARKER_ATOM + ZeroOrMore(BOOLOP + MARKER_EXPR)
MARKER = stringStart + MARKER_EXPR + stringEnd
def _coerce_parse_result(results):
if isinstance(results, ParseResults):
return [_coerce_parse_result(i) for i in results]
else:
return results
def _format_marker(marker, first=True):
assert isinstance(marker, (list, tuple, string_types))
# Sometimes we have a structure like [[...]] which is a single item list # Sometimes we have a structure like [[...]] which is a single item list
# where the single item is itself it's own list. In that case we want skip # where the single item is itself it's own list. In that case we want skip
@ -163,7 +95,7 @@ def _format_marker(marker, first=True):
return marker return marker
_operators = { _operators: Dict[str, Operator] = {
"in": lambda lhs, rhs: lhs in rhs, "in": lambda lhs, rhs: lhs in rhs,
"not in": lambda lhs, rhs: lhs not in rhs, "not in": lambda lhs, rhs: lhs not in rhs,
"<": operator.lt, "<": operator.lt,
@ -175,42 +107,38 @@ _operators = {
} }
def _eval_op(lhs, op, rhs): def _eval_op(lhs: str, op: Op, rhs: str) -> bool:
try: try:
spec = Specifier("".join([op.serialize(), rhs])) spec = Specifier("".join([op.serialize(), rhs]))
except InvalidSpecifier: except InvalidSpecifier:
pass pass
else: else:
return spec.contains(lhs) return spec.contains(lhs, prereleases=True)
oper = _operators.get(op.serialize()) oper: Optional[Operator] = _operators.get(op.serialize())
if oper is None: if oper is None:
raise UndefinedComparison( raise UndefinedComparison(f"Undefined {op!r} on {lhs!r} and {rhs!r}.")
"Undefined {0!r} on {1!r} and {2!r}.".format(op, lhs, rhs)
)
return oper(lhs, rhs) return oper(lhs, rhs)
_undefined = object() def _normalize(*values: str, key: str) -> Tuple[str, ...]:
# PEP 685 Comparison of extra names for optional distribution dependencies
# https://peps.python.org/pep-0685/
# > When comparing extra names, tools MUST normalize the names being
# > compared using the semantics outlined in PEP 503 for names
if key == "extra":
return tuple(canonicalize_name(v) for v in values)
# other environment markers don't have such standards
return values
def _get_env(environment, name): def _evaluate_markers(markers: MarkerList, environment: Dict[str, str]) -> bool:
value = environment.get(name, _undefined) groups: List[List[bool]] = [[]]
if value is _undefined:
raise UndefinedEnvironmentName(
"{0!r} does not exist in evaluation environment.".format(name)
)
return value
def _evaluate_markers(markers, environment):
groups = [[]]
for marker in markers: for marker in markers:
assert isinstance(marker, (list, tuple, string_types)) assert isinstance(marker, (list, tuple, str))
if isinstance(marker, list): if isinstance(marker, list):
groups[-1].append(_evaluate_markers(marker, environment)) groups[-1].append(_evaluate_markers(marker, environment))
@ -218,12 +146,15 @@ def _evaluate_markers(markers, environment):
lhs, op, rhs = marker lhs, op, rhs = marker
if isinstance(lhs, Variable): if isinstance(lhs, Variable):
lhs_value = _get_env(environment, lhs.value) environment_key = lhs.value
lhs_value = environment[environment_key]
rhs_value = rhs.value rhs_value = rhs.value
else: else:
lhs_value = lhs.value lhs_value = lhs.value
rhs_value = _get_env(environment, rhs.value) environment_key = rhs.value
rhs_value = environment[environment_key]
lhs_value, rhs_value = _normalize(lhs_value, rhs_value, key=environment_key)
groups[-1].append(_eval_op(lhs_value, op, rhs_value)) groups[-1].append(_eval_op(lhs_value, op, rhs_value))
else: else:
assert marker in ["and", "or"] assert marker in ["and", "or"]
@ -233,7 +164,7 @@ def _evaluate_markers(markers, environment):
return any(all(item) for item in groups) return any(all(item) for item in groups)
def format_full_version(info): def format_full_version(info: "sys._version_info") -> str:
version = "{0.major}.{0.minor}.{0.micro}".format(info) version = "{0.major}.{0.minor}.{0.micro}".format(info)
kind = info.releaselevel kind = info.releaselevel
if kind != "final": if kind != "final":
@ -241,14 +172,9 @@ def format_full_version(info):
return version return version
def default_environment(): def default_environment() -> Dict[str, str]:
if hasattr(sys, "implementation"): iver = format_full_version(sys.implementation.version)
iver = format_full_version(sys.implementation.version) implementation_name = sys.implementation.name
implementation_name = sys.implementation.name
else:
iver = "0"
implementation_name = ""
return { return {
"implementation_name": implementation_name, "implementation_name": implementation_name,
"implementation_version": iver, "implementation_version": iver,
@ -264,23 +190,48 @@ def default_environment():
} }
class Marker(object): class Marker:
def __init__(self, marker): def __init__(self, marker: str) -> None:
# Note: We create a Marker object without calling this constructor in
# packaging.requirements.Requirement. If any additional logic is
# added here, make sure to mirror/adapt Requirement.
try: try:
self._markers = _coerce_parse_result(MARKER.parseString(marker)) self._markers = _normalize_extra_values(_parse_marker(marker))
except ParseException as e: # The attribute `_markers` can be described in terms of a recursive type:
err_str = "Invalid marker: {0!r}, parse error at {1!r}".format( # MarkerList = List[Union[Tuple[Node, ...], str, MarkerList]]
marker, marker[e.loc : e.loc + 8] #
) # For example, the following expression:
raise InvalidMarker(err_str) # python_version > "3.6" or (python_version == "3.6" and os_name == "unix")
#
# is parsed into:
# [
# (<Variable('python_version')>, <Op('>')>, <Value('3.6')>),
# 'and',
# [
# (<Variable('python_version')>, <Op('==')>, <Value('3.6')>),
# 'or',
# (<Variable('os_name')>, <Op('==')>, <Value('unix')>)
# ]
# ]
except ParserSyntaxError as e:
raise InvalidMarker(str(e)) from e
def __str__(self): def __str__(self) -> str:
return _format_marker(self._markers) return _format_marker(self._markers)
def __repr__(self): def __repr__(self) -> str:
return "<Marker({0!r})>".format(str(self)) return f"<Marker('{self}')>"
def evaluate(self, environment=None): def __hash__(self) -> int:
return hash((self.__class__.__name__, str(self)))
def __eq__(self, other: Any) -> bool:
if not isinstance(other, Marker):
return NotImplemented
return str(self) == str(other)
def evaluate(self, environment: Optional[Dict[str, str]] = None) -> bool:
"""Evaluate a marker. """Evaluate a marker.
Return the boolean from evaluating the given marker against the Return the boolean from evaluating the given marker against the
@ -290,7 +241,12 @@ class Marker(object):
The environment is determined from the current Python process. The environment is determined from the current Python process.
""" """
current_environment = default_environment() current_environment = default_environment()
current_environment["extra"] = ""
if environment is not None: if environment is not None:
current_environment.update(environment) current_environment.update(environment)
# The API used to allow setting extra to None. We need to handle this
# case for backwards compatibility.
if current_environment["extra"] is None:
current_environment["extra"] = ""
return _evaluate_markers(self._markers, current_environment) return _evaluate_markers(self._markers, current_environment)

View file

@ -1,18 +1,14 @@
# This file is dual licensed under the terms of the Apache License, Version # This file is dual licensed under the terms of the Apache License, Version
# 2.0, and the BSD License. See the LICENSE file in the root of this repository # 2.0, and the BSD License. See the LICENSE file in the root of this repository
# for complete details. # for complete details.
from __future__ import absolute_import, division, print_function
import string import urllib.parse
import re from typing import Any, List, Optional, Set
from setuptools.extern.pyparsing import stringStart, stringEnd, originalTextFor, ParseException from ._parser import parse_requirement as _parse_requirement
from setuptools.extern.pyparsing import ZeroOrMore, Word, Optional, Regex, Combine from ._tokenizer import ParserSyntaxError
from setuptools.extern.pyparsing import Literal as L # noqa from .markers import Marker, _normalize_extra_values
from setuptools.extern.six.moves.urllib import parse as urlparse from .specifiers import SpecifierSet
from .markers import MARKER_EXPR, Marker
from .specifiers import LegacySpecifier, Specifier, SpecifierSet
class InvalidRequirement(ValueError): class InvalidRequirement(ValueError):
@ -21,61 +17,7 @@ class InvalidRequirement(ValueError):
""" """
ALPHANUM = Word(string.ascii_letters + string.digits) class Requirement:
LBRACKET = L("[").suppress()
RBRACKET = L("]").suppress()
LPAREN = L("(").suppress()
RPAREN = L(")").suppress()
COMMA = L(",").suppress()
SEMICOLON = L(";").suppress()
AT = L("@").suppress()
PUNCTUATION = Word("-_.")
IDENTIFIER_END = ALPHANUM | (ZeroOrMore(PUNCTUATION) + ALPHANUM)
IDENTIFIER = Combine(ALPHANUM + ZeroOrMore(IDENTIFIER_END))
NAME = IDENTIFIER("name")
EXTRA = IDENTIFIER
URI = Regex(r"[^ ]+")("url")
URL = AT + URI
EXTRAS_LIST = EXTRA + ZeroOrMore(COMMA + EXTRA)
EXTRAS = (LBRACKET + Optional(EXTRAS_LIST) + RBRACKET)("extras")
VERSION_PEP440 = Regex(Specifier._regex_str, re.VERBOSE | re.IGNORECASE)
VERSION_LEGACY = Regex(LegacySpecifier._regex_str, re.VERBOSE | re.IGNORECASE)
VERSION_ONE = VERSION_PEP440 ^ VERSION_LEGACY
VERSION_MANY = Combine(
VERSION_ONE + ZeroOrMore(COMMA + VERSION_ONE), joinString=",", adjacent=False
)("_raw_spec")
_VERSION_SPEC = Optional(((LPAREN + VERSION_MANY + RPAREN) | VERSION_MANY))
_VERSION_SPEC.setParseAction(lambda s, l, t: t._raw_spec or "")
VERSION_SPEC = originalTextFor(_VERSION_SPEC)("specifier")
VERSION_SPEC.setParseAction(lambda s, l, t: t[1])
MARKER_EXPR = originalTextFor(MARKER_EXPR())("marker")
MARKER_EXPR.setParseAction(
lambda s, l, t: Marker(s[t._original_start : t._original_end])
)
MARKER_SEPARATOR = SEMICOLON
MARKER = MARKER_SEPARATOR + MARKER_EXPR
VERSION_AND_MARKER = VERSION_SPEC + Optional(MARKER)
URL_AND_MARKER = URL + Optional(MARKER)
NAMED_REQUIREMENT = NAME + Optional(EXTRAS) + (URL_AND_MARKER | VERSION_AND_MARKER)
REQUIREMENT = stringStart + NAMED_REQUIREMENT + stringEnd
# setuptools.extern.pyparsing isn't thread safe during initialization, so we do it eagerly, see
# issue #104
REQUIREMENT.parseString("x[]")
class Requirement(object):
"""Parse a requirement. """Parse a requirement.
Parse a given requirement string into its parts, such as name, specifier, Parse a given requirement string into its parts, such as name, specifier,
@ -88,51 +30,66 @@ class Requirement(object):
# the thing as well as the version? What about the markers? # the thing as well as the version? What about the markers?
# TODO: Can we normalize the name and extra name? # TODO: Can we normalize the name and extra name?
def __init__(self, requirement_string): def __init__(self, requirement_string: str) -> None:
try: try:
req = REQUIREMENT.parseString(requirement_string) parsed = _parse_requirement(requirement_string)
except ParseException as e: except ParserSyntaxError as e:
raise InvalidRequirement( raise InvalidRequirement(str(e)) from e
'Parse error at "{0!r}": {1}'.format(
requirement_string[e.loc : e.loc + 8], e.msg
)
)
self.name = req.name self.name: str = parsed.name
if req.url: if parsed.url:
parsed_url = urlparse.urlparse(req.url) parsed_url = urllib.parse.urlparse(parsed.url)
if parsed_url.scheme == "file": if parsed_url.scheme == "file":
if urlparse.urlunparse(parsed_url) != req.url: if urllib.parse.urlunparse(parsed_url) != parsed.url:
raise InvalidRequirement("Invalid URL given") raise InvalidRequirement("Invalid URL given")
elif not (parsed_url.scheme and parsed_url.netloc) or ( elif not (parsed_url.scheme and parsed_url.netloc) or (
not parsed_url.scheme and not parsed_url.netloc not parsed_url.scheme and not parsed_url.netloc
): ):
raise InvalidRequirement("Invalid URL: {0}".format(req.url)) raise InvalidRequirement(f"Invalid URL: {parsed.url}")
self.url = req.url self.url: Optional[str] = parsed.url
else: else:
self.url = None self.url = None
self.extras = set(req.extras.asList() if req.extras else []) self.extras: Set[str] = set(parsed.extras if parsed.extras else [])
self.specifier = SpecifierSet(req.specifier) self.specifier: SpecifierSet = SpecifierSet(parsed.specifier)
self.marker = req.marker if req.marker else None self.marker: Optional[Marker] = None
if parsed.marker is not None:
self.marker = Marker.__new__(Marker)
self.marker._markers = _normalize_extra_values(parsed.marker)
def __str__(self): def __str__(self) -> str:
parts = [self.name] parts: List[str] = [self.name]
if self.extras: if self.extras:
parts.append("[{0}]".format(",".join(sorted(self.extras)))) formatted_extras = ",".join(sorted(self.extras))
parts.append(f"[{formatted_extras}]")
if self.specifier: if self.specifier:
parts.append(str(self.specifier)) parts.append(str(self.specifier))
if self.url: if self.url:
parts.append("@ {0}".format(self.url)) parts.append(f"@ {self.url}")
if self.marker: if self.marker:
parts.append(" ") parts.append(" ")
if self.marker: if self.marker:
parts.append("; {0}".format(self.marker)) parts.append(f"; {self.marker}")
return "".join(parts) return "".join(parts)
def __repr__(self): def __repr__(self) -> str:
return "<Requirement({0!r})>".format(str(self)) return f"<Requirement('{self}')>"
def __hash__(self) -> int:
return hash((self.__class__.__name__, str(self)))
def __eq__(self, other: Any) -> bool:
if not isinstance(other, Requirement):
return NotImplemented
return (
self.name == other.name
and self.extras == other.extras
and self.specifier == other.specifier
and self.url == other.url
and self.marker == other.marker
)

File diff suppressed because it is too large Load diff

View file

@ -2,25 +2,33 @@
# 2.0, and the BSD License. See the LICENSE file in the root of this repository # 2.0, and the BSD License. See the LICENSE file in the root of this repository
# for complete details. # for complete details.
from __future__ import absolute_import import logging
import distutils.util
try:
from importlib.machinery import EXTENSION_SUFFIXES
except ImportError: # pragma: no cover
import imp
EXTENSION_SUFFIXES = [x[0] for x in imp.get_suffixes()]
del imp
import platform import platform
import re import subprocess
import sys import sys
import sysconfig import sysconfig
import warnings from importlib.machinery import EXTENSION_SUFFIXES
from typing import (
Dict,
FrozenSet,
Iterable,
Iterator,
List,
Optional,
Sequence,
Tuple,
Union,
cast,
)
from . import _manylinux, _musllinux
INTERPRETER_SHORT_NAMES = { logger = logging.getLogger(__name__)
PythonVersion = Sequence[int]
MacVersion = Tuple[int, int]
INTERPRETER_SHORT_NAMES: Dict[str, str] = {
"python": "py", # Generic. "python": "py", # Generic.
"cpython": "cp", "cpython": "cp",
"pypy": "pp", "pypy": "pp",
@ -29,48 +37,70 @@ INTERPRETER_SHORT_NAMES = {
} }
_32_BIT_INTERPRETER = sys.maxsize <= 2 ** 32 _32_BIT_INTERPRETER = sys.maxsize <= 2**32
class Tag(object): class Tag:
"""
A representation of the tag triple for a wheel.
__slots__ = ["_interpreter", "_abi", "_platform"] Instances are considered immutable and thus are hashable. Equality checking
is also supported.
"""
def __init__(self, interpreter, abi, platform): __slots__ = ["_interpreter", "_abi", "_platform", "_hash"]
def __init__(self, interpreter: str, abi: str, platform: str) -> None:
self._interpreter = interpreter.lower() self._interpreter = interpreter.lower()
self._abi = abi.lower() self._abi = abi.lower()
self._platform = platform.lower() self._platform = platform.lower()
# The __hash__ of every single element in a Set[Tag] will be evaluated each time
# that a set calls its `.disjoint()` method, which may be called hundreds of
# times when scanning a page of links for packages with tags matching that
# Set[Tag]. Pre-computing the value here produces significant speedups for
# downstream consumers.
self._hash = hash((self._interpreter, self._abi, self._platform))
@property @property
def interpreter(self): def interpreter(self) -> str:
return self._interpreter return self._interpreter
@property @property
def abi(self): def abi(self) -> str:
return self._abi return self._abi
@property @property
def platform(self): def platform(self) -> str:
return self._platform return self._platform
def __eq__(self, other): def __eq__(self, other: object) -> bool:
if not isinstance(other, Tag):
return NotImplemented
return ( return (
(self.platform == other.platform) (self._hash == other._hash) # Short-circuit ASAP for perf reasons.
and (self.abi == other.abi) and (self._platform == other._platform)
and (self.interpreter == other.interpreter) and (self._abi == other._abi)
and (self._interpreter == other._interpreter)
) )
def __hash__(self): def __hash__(self) -> int:
return hash((self._interpreter, self._abi, self._platform)) return self._hash
def __str__(self): def __str__(self) -> str:
return "{}-{}-{}".format(self._interpreter, self._abi, self._platform) return f"{self._interpreter}-{self._abi}-{self._platform}"
def __repr__(self): def __repr__(self) -> str:
return "<{self} @ {self_id}>".format(self=self, self_id=id(self)) return f"<{self} @ {id(self)}>"
def parse_tag(tag): def parse_tag(tag: str) -> FrozenSet[Tag]:
"""
Parses the provided tag (e.g. `py3-none-any`) into a frozenset of Tag instances.
Returning a set is required due to the possibility that the tag is a
compressed tag set.
"""
tags = set() tags = set()
interpreters, abis, platforms = tag.split("-") interpreters, abis, platforms = tag.split("-")
for interpreter in interpreters.split("."): for interpreter in interpreters.split("."):
@ -80,20 +110,34 @@ def parse_tag(tag):
return frozenset(tags) return frozenset(tags)
def _normalize_string(string): def _get_config_var(name: str, warn: bool = False) -> Union[int, str, None]:
return string.replace(".", "_").replace("-", "_") value: Union[int, str, None] = sysconfig.get_config_var(name)
if value is None and warn:
logger.debug(
"Config variable '%s' is unset, Python ABI tag may be incorrect", name
)
return value
def _cpython_interpreter(py_version): def _normalize_string(string: str) -> str:
# TODO: Is using py_version_nodot for interpreter version critical? return string.replace(".", "_").replace("-", "_").replace(" ", "_")
return "cp{major}{minor}".format(major=py_version[0], minor=py_version[1])
def _cpython_abis(py_version): def _abi3_applies(python_version: PythonVersion) -> bool:
"""
Determine if the Python version supports abi3.
PEP 384 was first implemented in Python 3.2.
"""
return len(python_version) > 1 and tuple(python_version) >= (3, 2)
def _cpython_abis(py_version: PythonVersion, warn: bool = False) -> List[str]:
py_version = tuple(py_version) # To allow for version comparison.
abis = [] abis = []
version = "{}{}".format(*py_version[:2]) version = _version_nodot(py_version[:2])
debug = pymalloc = ucs4 = "" debug = pymalloc = ucs4 = ""
with_debug = sysconfig.get_config_var("Py_DEBUG") with_debug = _get_config_var("Py_DEBUG", warn)
has_refcount = hasattr(sys, "gettotalrefcount") has_refcount = hasattr(sys, "gettotalrefcount")
# Windows doesn't set Py_DEBUG, so checking for support of debug-compiled # Windows doesn't set Py_DEBUG, so checking for support of debug-compiled
# extension modules is the best option. # extension modules is the best option.
@ -102,11 +146,11 @@ def _cpython_abis(py_version):
if with_debug or (with_debug is None and (has_refcount or has_ext)): if with_debug or (with_debug is None and (has_refcount or has_ext)):
debug = "d" debug = "d"
if py_version < (3, 8): if py_version < (3, 8):
with_pymalloc = sysconfig.get_config_var("WITH_PYMALLOC") with_pymalloc = _get_config_var("WITH_PYMALLOC", warn)
if with_pymalloc or with_pymalloc is None: if with_pymalloc or with_pymalloc is None:
pymalloc = "m" pymalloc = "m"
if py_version < (3, 3): if py_version < (3, 3):
unicode_size = sysconfig.get_config_var("Py_UNICODE_SIZE") unicode_size = _get_config_var("Py_UNICODE_SIZE", warn)
if unicode_size == 4 or ( if unicode_size == 4 or (
unicode_size is None and sys.maxunicode == 0x10FFFF unicode_size is None and sys.maxunicode == 0x10FFFF
): ):
@ -114,7 +158,7 @@ def _cpython_abis(py_version):
elif debug: elif debug:
# Debug builds can also load "normal" extension modules. # Debug builds can also load "normal" extension modules.
# We can also assume no UCS-4 or pymalloc requirement. # We can also assume no UCS-4 or pymalloc requirement.
abis.append("cp{version}".format(version=version)) abis.append(f"cp{version}")
abis.insert( abis.insert(
0, 0,
"cp{version}{debug}{pymalloc}{ucs4}".format( "cp{version}{debug}{pymalloc}{ucs4}".format(
@ -124,86 +168,176 @@ def _cpython_abis(py_version):
return abis return abis
def _cpython_tags(py_version, interpreter, abis, platforms): def cpython_tags(
python_version: Optional[PythonVersion] = None,
abis: Optional[Iterable[str]] = None,
platforms: Optional[Iterable[str]] = None,
*,
warn: bool = False,
) -> Iterator[Tag]:
"""
Yields the tags for a CPython interpreter.
The tags consist of:
- cp<python_version>-<abi>-<platform>
- cp<python_version>-abi3-<platform>
- cp<python_version>-none-<platform>
- cp<less than python_version>-abi3-<platform> # Older Python versions down to 3.2.
If python_version only specifies a major version then user-provided ABIs and
the 'none' ABItag will be used.
If 'abi3' or 'none' are specified in 'abis' then they will be yielded at
their normal position and not at the beginning.
"""
if not python_version:
python_version = sys.version_info[:2]
interpreter = f"cp{_version_nodot(python_version[:2])}"
if abis is None:
if len(python_version) > 1:
abis = _cpython_abis(python_version, warn)
else:
abis = []
abis = list(abis)
# 'abi3' and 'none' are explicitly handled later.
for explicit_abi in ("abi3", "none"):
try:
abis.remove(explicit_abi)
except ValueError:
pass
platforms = list(platforms or platform_tags())
for abi in abis: for abi in abis:
for platform_ in platforms: for platform_ in platforms:
yield Tag(interpreter, abi, platform_) yield Tag(interpreter, abi, platform_)
for tag in (Tag(interpreter, "abi3", platform_) for platform_ in platforms): if _abi3_applies(python_version):
yield tag yield from (Tag(interpreter, "abi3", platform_) for platform_ in platforms)
for tag in (Tag(interpreter, "none", platform_) for platform_ in platforms): yield from (Tag(interpreter, "none", platform_) for platform_ in platforms)
yield tag
# PEP 384 was first implemented in Python 3.2. if _abi3_applies(python_version):
for minor_version in range(py_version[1] - 1, 1, -1): for minor_version in range(python_version[1] - 1, 1, -1):
for platform_ in platforms: for platform_ in platforms:
interpreter = "cp{major}{minor}".format( interpreter = "cp{version}".format(
major=py_version[0], minor=minor_version version=_version_nodot((python_version[0], minor_version))
) )
yield Tag(interpreter, "abi3", platform_) yield Tag(interpreter, "abi3", platform_)
def _pypy_interpreter(): def _generic_abi() -> List[str]:
return "pp{py_major}{pypy_major}{pypy_minor}".format(
py_major=sys.version_info[0],
pypy_major=sys.pypy_version_info.major,
pypy_minor=sys.pypy_version_info.minor,
)
def _generic_abi():
abi = sysconfig.get_config_var("SOABI")
if abi:
return _normalize_string(abi)
else:
return "none"
def _pypy_tags(py_version, interpreter, abi, platforms):
for tag in (Tag(interpreter, abi, platform) for platform in platforms):
yield tag
for tag in (Tag(interpreter, "none", platform) for platform in platforms):
yield tag
def _generic_tags(interpreter, py_version, abi, platforms):
for tag in (Tag(interpreter, abi, platform) for platform in platforms):
yield tag
if abi != "none":
tags = (Tag(interpreter, "none", platform_) for platform_ in platforms)
for tag in tags:
yield tag
def _py_interpreter_range(py_version):
""" """
Yield Python versions in descending order. Return the ABI tag based on EXT_SUFFIX.
"""
# The following are examples of `EXT_SUFFIX`.
# We want to keep the parts which are related to the ABI and remove the
# parts which are related to the platform:
# - linux: '.cpython-310-x86_64-linux-gnu.so' => cp310
# - mac: '.cpython-310-darwin.so' => cp310
# - win: '.cp310-win_amd64.pyd' => cp310
# - win: '.pyd' => cp37 (uses _cpython_abis())
# - pypy: '.pypy38-pp73-x86_64-linux-gnu.so' => pypy38_pp73
# - graalpy: '.graalpy-38-native-x86_64-darwin.dylib'
# => graalpy_38_native
ext_suffix = _get_config_var("EXT_SUFFIX", warn=True)
if not isinstance(ext_suffix, str) or ext_suffix[0] != ".":
raise SystemError("invalid sysconfig.get_config_var('EXT_SUFFIX')")
parts = ext_suffix.split(".")
if len(parts) < 3:
# CPython3.7 and earlier uses ".pyd" on Windows.
return _cpython_abis(sys.version_info[:2])
soabi = parts[1]
if soabi.startswith("cpython"):
# non-windows
abi = "cp" + soabi.split("-")[1]
elif soabi.startswith("cp"):
# windows
abi = soabi.split("-")[0]
elif soabi.startswith("pypy"):
abi = "-".join(soabi.split("-")[:2])
elif soabi.startswith("graalpy"):
abi = "-".join(soabi.split("-")[:3])
elif soabi:
# pyston, ironpython, others?
abi = soabi
else:
return []
return [_normalize_string(abi)]
def generic_tags(
interpreter: Optional[str] = None,
abis: Optional[Iterable[str]] = None,
platforms: Optional[Iterable[str]] = None,
*,
warn: bool = False,
) -> Iterator[Tag]:
"""
Yields the tags for a generic interpreter.
The tags consist of:
- <interpreter>-<abi>-<platform>
The "none" ABI will be added if it was not explicitly provided.
"""
if not interpreter:
interp_name = interpreter_name()
interp_version = interpreter_version(warn=warn)
interpreter = "".join([interp_name, interp_version])
if abis is None:
abis = _generic_abi()
else:
abis = list(abis)
platforms = list(platforms or platform_tags())
if "none" not in abis:
abis.append("none")
for abi in abis:
for platform_ in platforms:
yield Tag(interpreter, abi, platform_)
def _py_interpreter_range(py_version: PythonVersion) -> Iterator[str]:
"""
Yields Python versions in descending order.
After the latest version, the major-only version will be yielded, and then After the latest version, the major-only version will be yielded, and then
all following versions up to 'end'. all previous versions of that major version.
""" """
yield "py{major}{minor}".format(major=py_version[0], minor=py_version[1]) if len(py_version) > 1:
yield "py{major}".format(major=py_version[0]) yield f"py{_version_nodot(py_version[:2])}"
for minor in range(py_version[1] - 1, -1, -1): yield f"py{py_version[0]}"
yield "py{major}{minor}".format(major=py_version[0], minor=minor) if len(py_version) > 1:
for minor in range(py_version[1] - 1, -1, -1):
yield f"py{_version_nodot((py_version[0], minor))}"
def _independent_tags(interpreter, py_version, platforms): def compatible_tags(
python_version: Optional[PythonVersion] = None,
interpreter: Optional[str] = None,
platforms: Optional[Iterable[str]] = None,
) -> Iterator[Tag]:
""" """
Return the sequence of tags that are consistent across implementations. Yields the sequence of tags that are compatible with a specific version of Python.
The tags consist of: The tags consist of:
- py*-none-<platform> - py*-none-<platform>
- <interpreter>-none-any - <interpreter>-none-any # ... if `interpreter` is provided.
- py*-none-any - py*-none-any
""" """
for version in _py_interpreter_range(py_version): if not python_version:
python_version = sys.version_info[:2]
platforms = list(platforms or platform_tags())
for version in _py_interpreter_range(python_version):
for platform_ in platforms: for platform_ in platforms:
yield Tag(version, "none", platform_) yield Tag(version, "none", platform_)
yield Tag(interpreter, "none", "any") if interpreter:
for version in _py_interpreter_range(py_version): yield Tag(interpreter, "none", "any")
for version in _py_interpreter_range(python_version):
yield Tag(version, "none", "any") yield Tag(version, "none", "any")
def _mac_arch(arch, is_32bit=_32_BIT_INTERPRETER): def _mac_arch(arch: str, is_32bit: bool = _32_BIT_INTERPRETER) -> str:
if not is_32bit: if not is_32bit:
return arch return arch
@ -213,7 +347,7 @@ def _mac_arch(arch, is_32bit=_32_BIT_INTERPRETER):
return "i386" return "i386"
def _mac_binary_formats(version, cpu_arch): def _mac_binary_formats(version: MacVersion, cpu_arch: str) -> List[str]:
formats = [cpu_arch] formats = [cpu_arch]
if cpu_arch == "x86_64": if cpu_arch == "x86_64":
if version < (10, 4): if version < (10, 4):
@ -236,169 +370,177 @@ def _mac_binary_formats(version, cpu_arch):
return [] return []
formats.extend(["fat32", "fat"]) formats.extend(["fat32", "fat"])
formats.append("universal") if cpu_arch in {"arm64", "x86_64"}:
formats.append("universal2")
if cpu_arch in {"x86_64", "i386", "ppc64", "ppc", "intel"}:
formats.append("universal")
return formats return formats
def _mac_platforms(version=None, arch=None): def mac_platforms(
version: Optional[MacVersion] = None, arch: Optional[str] = None
) -> Iterator[str]:
"""
Yields the platform tags for a macOS system.
The `version` parameter is a two-item tuple specifying the macOS version to
generate platform tags for. The `arch` parameter is the CPU architecture to
generate platform tags for. Both parameters default to the appropriate value
for the current system.
"""
version_str, _, cpu_arch = platform.mac_ver() version_str, _, cpu_arch = platform.mac_ver()
if version is None: if version is None:
version = tuple(map(int, version_str.split(".")[:2])) version = cast("MacVersion", tuple(map(int, version_str.split(".")[:2])))
if version == (10, 16):
# When built against an older macOS SDK, Python will report macOS 10.16
# instead of the real version.
version_str = subprocess.run(
[
sys.executable,
"-sS",
"-c",
"import platform; print(platform.mac_ver()[0])",
],
check=True,
env={"SYSTEM_VERSION_COMPAT": "0"},
stdout=subprocess.PIPE,
universal_newlines=True,
).stdout
version = cast("MacVersion", tuple(map(int, version_str.split(".")[:2])))
else:
version = version
if arch is None: if arch is None:
arch = _mac_arch(cpu_arch) arch = _mac_arch(cpu_arch)
platforms = [] else:
for minor_version in range(version[1], -1, -1): arch = arch
compat_version = version[0], minor_version
binary_formats = _mac_binary_formats(compat_version, arch) if (10, 0) <= version and version < (11, 0):
for binary_format in binary_formats: # Prior to Mac OS 11, each yearly release of Mac OS bumped the
platforms.append( # "minor" version number. The major version was always 10.
"macosx_{major}_{minor}_{binary_format}".format( for minor_version in range(version[1], -1, -1):
compat_version = 10, minor_version
binary_formats = _mac_binary_formats(compat_version, arch)
for binary_format in binary_formats:
yield "macosx_{major}_{minor}_{binary_format}".format(
major=10, minor=minor_version, binary_format=binary_format
)
if version >= (11, 0):
# Starting with Mac OS 11, each yearly release bumps the major version
# number. The minor versions are now the midyear updates.
for major_version in range(version[0], 10, -1):
compat_version = major_version, 0
binary_formats = _mac_binary_formats(compat_version, arch)
for binary_format in binary_formats:
yield "macosx_{major}_{minor}_{binary_format}".format(
major=major_version, minor=0, binary_format=binary_format
)
if version >= (11, 0):
# Mac OS 11 on x86_64 is compatible with binaries from previous releases.
# Arm64 support was introduced in 11.0, so no Arm binaries from previous
# releases exist.
#
# However, the "universal2" binary format can have a
# macOS version earlier than 11.0 when the x86_64 part of the binary supports
# that version of macOS.
if arch == "x86_64":
for minor_version in range(16, 3, -1):
compat_version = 10, minor_version
binary_formats = _mac_binary_formats(compat_version, arch)
for binary_format in binary_formats:
yield "macosx_{major}_{minor}_{binary_format}".format(
major=compat_version[0],
minor=compat_version[1],
binary_format=binary_format,
)
else:
for minor_version in range(16, 3, -1):
compat_version = 10, minor_version
binary_format = "universal2"
yield "macosx_{major}_{minor}_{binary_format}".format(
major=compat_version[0], major=compat_version[0],
minor=compat_version[1], minor=compat_version[1],
binary_format=binary_format, binary_format=binary_format,
) )
)
return platforms
# From PEP 513. def _linux_platforms(is_32bit: bool = _32_BIT_INTERPRETER) -> Iterator[str]:
def _is_manylinux_compatible(name, glibc_version): linux = _normalize_string(sysconfig.get_platform())
# Check for presence of _manylinux module. if is_32bit:
try: if linux == "linux_x86_64":
import _manylinux linux = "linux_i686"
elif linux == "linux_aarch64":
return bool(getattr(_manylinux, name + "_compatible")) linux = "linux_armv7l"
except (ImportError, AttributeError): _, arch = linux.split("_", 1)
# Fall through to heuristic check below. yield from _manylinux.platform_tags(linux, arch)
pass yield from _musllinux.platform_tags(arch)
yield linux
return _have_compatible_glibc(*glibc_version)
def _glibc_version_string(): def _generic_platforms() -> Iterator[str]:
# Returns glibc version string, or None if not using glibc. yield _normalize_string(sysconfig.get_platform())
import ctypes
# ctypes.CDLL(None) internally calls dlopen(NULL), and as the dlopen
# manpage says, "If filename is NULL, then the returned handle is for the
# main program". This way we can let the linker do the work to figure out
# which libc our process is actually using.
process_namespace = ctypes.CDLL(None)
try:
gnu_get_libc_version = process_namespace.gnu_get_libc_version
except AttributeError:
# Symbol doesn't exist -> therefore, we are not linked to
# glibc.
return None
# Call gnu_get_libc_version, which returns a string like "2.5"
gnu_get_libc_version.restype = ctypes.c_char_p
version_str = gnu_get_libc_version()
# py2 / py3 compatibility:
if not isinstance(version_str, str):
version_str = version_str.decode("ascii")
return version_str
# Separated out from have_compatible_glibc for easier unit testing. def platform_tags() -> Iterator[str]:
def _check_glibc_version(version_str, required_major, minimum_minor): """
# Parse string and check against requested version. Provides the platform tags for this installation.
# """
# We use a regexp instead of str.split because we want to discard any if platform.system() == "Darwin":
# random junk that might come after the minor version -- this might happen return mac_platforms()
# in patched/forked versions of glibc (e.g. Linaro's version of glibc elif platform.system() == "Linux":
# uses version strings like "2.20-2014.11"). See gh-3588. return _linux_platforms()
m = re.match(r"(?P<major>[0-9]+)\.(?P<minor>[0-9]+)", version_str)
if not m:
warnings.warn(
"Expected glibc version with 2 components major.minor,"
" got: %s" % version_str,
RuntimeWarning,
)
return False
return (
int(m.group("major")) == required_major
and int(m.group("minor")) >= minimum_minor
)
def _have_compatible_glibc(required_major, minimum_minor):
version_str = _glibc_version_string()
if version_str is None:
return False
return _check_glibc_version(version_str, required_major, minimum_minor)
def _linux_platforms(is_32bit=_32_BIT_INTERPRETER):
linux = _normalize_string(distutils.util.get_platform())
if linux == "linux_x86_64" and is_32bit:
linux = "linux_i686"
manylinux_support = (
("manylinux2014", (2, 17)), # CentOS 7 w/ glibc 2.17 (PEP 599)
("manylinux2010", (2, 12)), # CentOS 6 w/ glibc 2.12 (PEP 571)
("manylinux1", (2, 5)), # CentOS 5 w/ glibc 2.5 (PEP 513)
)
manylinux_support_iter = iter(manylinux_support)
for name, glibc_version in manylinux_support_iter:
if _is_manylinux_compatible(name, glibc_version):
platforms = [linux.replace("linux", name)]
break
else: else:
platforms = [] return _generic_platforms()
# Support for a later manylinux implies support for an earlier version.
platforms += [linux.replace("linux", name) for name, _ in manylinux_support_iter]
platforms.append(linux)
return platforms
def _generic_platforms(): def interpreter_name() -> str:
platform = _normalize_string(distutils.util.get_platform()) """
return [platform] Returns the name of the running interpreter.
Some implementations have a reserved, two-letter abbreviation which will
def _interpreter_name(): be returned when appropriate.
name = platform.python_implementation().lower() """
name = sys.implementation.name
return INTERPRETER_SHORT_NAMES.get(name) or name return INTERPRETER_SHORT_NAMES.get(name) or name
def _generic_interpreter(name, py_version): def interpreter_version(*, warn: bool = False) -> str:
version = sysconfig.get_config_var("py_version_nodot") """
if not version: Returns the version of the running interpreter.
version = "".join(map(str, py_version[:2])) """
return "{name}{version}".format(name=name, version=version) version = _get_config_var("py_version_nodot", warn=warn)
if version:
version = str(version)
else:
version = _version_nodot(sys.version_info[:2])
return version
def sys_tags(): def _version_nodot(version: PythonVersion) -> str:
return "".join(map(str, version))
def sys_tags(*, warn: bool = False) -> Iterator[Tag]:
""" """
Returns the sequence of tag triples for the running interpreter. Returns the sequence of tag triples for the running interpreter.
The order of the sequence corresponds to priority order for the The order of the sequence corresponds to priority order for the
interpreter, from most to least important. interpreter, from most to least important.
""" """
py_version = sys.version_info[:2]
interpreter_name = _interpreter_name()
if platform.system() == "Darwin":
platforms = _mac_platforms()
elif platform.system() == "Linux":
platforms = _linux_platforms()
else:
platforms = _generic_platforms()
if interpreter_name == "cp": interp_name = interpreter_name()
interpreter = _cpython_interpreter(py_version) if interp_name == "cp":
abis = _cpython_abis(py_version) yield from cpython_tags(warn=warn)
for tag in _cpython_tags(py_version, interpreter, abis, platforms):
yield tag
elif interpreter_name == "pp":
interpreter = _pypy_interpreter()
abi = _generic_abi()
for tag in _pypy_tags(py_version, interpreter, abi, platforms):
yield tag
else: else:
interpreter = _generic_interpreter(interpreter_name, py_version) yield from generic_tags()
abi = _generic_abi()
for tag in _generic_tags(interpreter, py_version, abi, platforms): if interp_name == "pp":
yield tag interp = "pp3"
for tag in _independent_tags(interpreter, py_version, platforms): elif interp_name == "cp":
yield tag interp = "cp" + interpreter_version(warn=warn)
else:
interp = None
yield from compatible_tags(interpreter=interp)

View file

@ -1,57 +1,141 @@
# This file is dual licensed under the terms of the Apache License, Version # This file is dual licensed under the terms of the Apache License, Version
# 2.0, and the BSD License. See the LICENSE file in the root of this repository # 2.0, and the BSD License. See the LICENSE file in the root of this repository
# for complete details. # for complete details.
from __future__ import absolute_import, division, print_function
import re import re
from typing import FrozenSet, NewType, Tuple, Union, cast
from .tags import Tag, parse_tag
from .version import InvalidVersion, Version from .version import InvalidVersion, Version
BuildTag = Union[Tuple[()], Tuple[int, str]]
NormalizedName = NewType("NormalizedName", str)
class InvalidWheelFilename(ValueError):
"""
An invalid wheel filename was found, users should refer to PEP 427.
"""
class InvalidSdistFilename(ValueError):
"""
An invalid sdist filename was found, users should refer to the packaging user guide.
"""
_canonicalize_regex = re.compile(r"[-_.]+") _canonicalize_regex = re.compile(r"[-_.]+")
# PEP 427: The build number must start with a digit.
_build_tag_regex = re.compile(r"(\d+)(.*)")
def canonicalize_name(name): def canonicalize_name(name: str) -> NormalizedName:
# This is taken from PEP 503. # This is taken from PEP 503.
return _canonicalize_regex.sub("-", name).lower() value = _canonicalize_regex.sub("-", name).lower()
return cast(NormalizedName, value)
def canonicalize_version(version): def canonicalize_version(
version: Union[Version, str], *, strip_trailing_zero: bool = True
) -> str:
""" """
This is very similar to Version.__str__, but has one subtle differences This is very similar to Version.__str__, but has one subtle difference
with the way it handles the release segment. with the way it handles the release segment.
""" """
if isinstance(version, str):
try: try:
version = Version(version) parsed = Version(version)
except InvalidVersion: except InvalidVersion:
# Legacy versions cannot be normalized # Legacy versions cannot be normalized
return version return version
else:
parsed = version
parts = [] parts = []
# Epoch # Epoch
if version.epoch != 0: if parsed.epoch != 0:
parts.append("{0}!".format(version.epoch)) parts.append(f"{parsed.epoch}!")
# Release segment # Release segment
# NB: This strips trailing '.0's to normalize release_segment = ".".join(str(x) for x in parsed.release)
parts.append(re.sub(r"(\.0)+$", "", ".".join(str(x) for x in version.release))) if strip_trailing_zero:
# NB: This strips trailing '.0's to normalize
release_segment = re.sub(r"(\.0)+$", "", release_segment)
parts.append(release_segment)
# Pre-release # Pre-release
if version.pre is not None: if parsed.pre is not None:
parts.append("".join(str(x) for x in version.pre)) parts.append("".join(str(x) for x in parsed.pre))
# Post-release # Post-release
if version.post is not None: if parsed.post is not None:
parts.append(".post{0}".format(version.post)) parts.append(f".post{parsed.post}")
# Development release # Development release
if version.dev is not None: if parsed.dev is not None:
parts.append(".dev{0}".format(version.dev)) parts.append(f".dev{parsed.dev}")
# Local version segment # Local version segment
if version.local is not None: if parsed.local is not None:
parts.append("+{0}".format(version.local)) parts.append(f"+{parsed.local}")
return "".join(parts) return "".join(parts)
def parse_wheel_filename(
filename: str,
) -> Tuple[NormalizedName, Version, BuildTag, FrozenSet[Tag]]:
if not filename.endswith(".whl"):
raise InvalidWheelFilename(
f"Invalid wheel filename (extension must be '.whl'): {filename}"
)
filename = filename[:-4]
dashes = filename.count("-")
if dashes not in (4, 5):
raise InvalidWheelFilename(
f"Invalid wheel filename (wrong number of parts): {filename}"
)
parts = filename.split("-", dashes - 2)
name_part = parts[0]
# See PEP 427 for the rules on escaping the project name
if "__" in name_part or re.match(r"^[\w\d._]*$", name_part, re.UNICODE) is None:
raise InvalidWheelFilename(f"Invalid project name: {filename}")
name = canonicalize_name(name_part)
version = Version(parts[1])
if dashes == 5:
build_part = parts[2]
build_match = _build_tag_regex.match(build_part)
if build_match is None:
raise InvalidWheelFilename(
f"Invalid build number: {build_part} in '{filename}'"
)
build = cast(BuildTag, (int(build_match.group(1)), build_match.group(2)))
else:
build = ()
tags = parse_tag(parts[-1])
return (name, version, build, tags)
def parse_sdist_filename(filename: str) -> Tuple[NormalizedName, Version]:
if filename.endswith(".tar.gz"):
file_stem = filename[: -len(".tar.gz")]
elif filename.endswith(".zip"):
file_stem = filename[: -len(".zip")]
else:
raise InvalidSdistFilename(
f"Invalid sdist filename (extension must be '.tar.gz' or '.zip'):"
f" {filename}"
)
# We are requiring a PEP 440 version, which cannot contain dashes,
# so we split on the last dash.
name_part, sep, version_part = file_stem.rpartition("-")
if not sep:
raise InvalidSdistFilename(f"Invalid sdist filename: {filename}")
name = canonicalize_name(name_part)
version = Version(version_part)
return (name, version)

View file

@ -1,184 +1,116 @@
# This file is dual licensed under the terms of the Apache License, Version # This file is dual licensed under the terms of the Apache License, Version
# 2.0, and the BSD License. See the LICENSE file in the root of this repository # 2.0, and the BSD License. See the LICENSE file in the root of this repository
# for complete details. # for complete details.
from __future__ import absolute_import, division, print_function """
.. testsetup::
from packaging.version import parse, Version
"""
import collections import collections
import itertools import itertools
import re import re
from typing import Any, Callable, Optional, SupportsInt, Tuple, Union
from ._structures import Infinity from ._structures import Infinity, InfinityType, NegativeInfinity, NegativeInfinityType
__all__ = ["VERSION_PATTERN", "parse", "Version", "InvalidVersion"]
__all__ = ["parse", "Version", "LegacyVersion", "InvalidVersion", "VERSION_PATTERN"] InfiniteTypes = Union[InfinityType, NegativeInfinityType]
PrePostDevType = Union[InfiniteTypes, Tuple[str, int]]
SubLocalType = Union[InfiniteTypes, int, str]
LocalType = Union[
NegativeInfinityType,
Tuple[
Union[
SubLocalType,
Tuple[SubLocalType, str],
Tuple[NegativeInfinityType, SubLocalType],
],
...,
],
]
CmpKey = Tuple[
int, Tuple[int, ...], PrePostDevType, PrePostDevType, PrePostDevType, LocalType
]
VersionComparisonMethod = Callable[[CmpKey, CmpKey], bool]
_Version = collections.namedtuple( _Version = collections.namedtuple(
"_Version", ["epoch", "release", "dev", "pre", "post", "local"] "_Version", ["epoch", "release", "dev", "pre", "post", "local"]
) )
def parse(version): def parse(version: str) -> "Version":
"""Parse the given version string.
>>> parse('1.0.dev1')
<Version('1.0.dev1')>
:param version: The version string to parse.
:raises InvalidVersion: When the version string is not a valid version.
""" """
Parse the given version string and return either a :class:`Version` object return Version(version)
or a :class:`LegacyVersion` object depending on if the given version is
a valid PEP 440 version or a legacy version.
"""
try:
return Version(version)
except InvalidVersion:
return LegacyVersion(version)
class InvalidVersion(ValueError): class InvalidVersion(ValueError):
""" """Raised when a version string is not a valid version.
An invalid version was found, users should refer to PEP 440.
>>> Version("invalid")
Traceback (most recent call last):
...
packaging.version.InvalidVersion: Invalid version: 'invalid'
""" """
class _BaseVersion(object): class _BaseVersion:
def __hash__(self): _key: Tuple[Any, ...]
def __hash__(self) -> int:
return hash(self._key) return hash(self._key)
def __lt__(self, other): # Please keep the duplicated `isinstance` check
return self._compare(other, lambda s, o: s < o) # in the six comparisons hereunder
# unless you find a way to avoid adding overhead function calls.
def __le__(self, other): def __lt__(self, other: "_BaseVersion") -> bool:
return self._compare(other, lambda s, o: s <= o)
def __eq__(self, other):
return self._compare(other, lambda s, o: s == o)
def __ge__(self, other):
return self._compare(other, lambda s, o: s >= o)
def __gt__(self, other):
return self._compare(other, lambda s, o: s > o)
def __ne__(self, other):
return self._compare(other, lambda s, o: s != o)
def _compare(self, other, method):
if not isinstance(other, _BaseVersion): if not isinstance(other, _BaseVersion):
return NotImplemented return NotImplemented
return method(self._key, other._key) return self._key < other._key
def __le__(self, other: "_BaseVersion") -> bool:
if not isinstance(other, _BaseVersion):
return NotImplemented
class LegacyVersion(_BaseVersion): return self._key <= other._key
def __init__(self, version):
self._version = str(version)
self._key = _legacy_cmpkey(self._version)
def __str__(self): def __eq__(self, other: object) -> bool:
return self._version if not isinstance(other, _BaseVersion):
return NotImplemented
def __repr__(self): return self._key == other._key
return "<LegacyVersion({0})>".format(repr(str(self)))
@property def __ge__(self, other: "_BaseVersion") -> bool:
def public(self): if not isinstance(other, _BaseVersion):
return self._version return NotImplemented
@property return self._key >= other._key
def base_version(self):
return self._version
@property def __gt__(self, other: "_BaseVersion") -> bool:
def epoch(self): if not isinstance(other, _BaseVersion):
return -1 return NotImplemented
@property return self._key > other._key
def release(self):
return None
@property def __ne__(self, other: object) -> bool:
def pre(self): if not isinstance(other, _BaseVersion):
return None return NotImplemented
@property return self._key != other._key
def post(self):
return None
@property
def dev(self):
return None
@property
def local(self):
return None
@property
def is_prerelease(self):
return False
@property
def is_postrelease(self):
return False
@property
def is_devrelease(self):
return False
_legacy_version_component_re = re.compile(r"(\d+ | [a-z]+ | \.| -)", re.VERBOSE)
_legacy_version_replacement_map = {
"pre": "c",
"preview": "c",
"-": "final-",
"rc": "c",
"dev": "@",
}
def _parse_version_parts(s):
for part in _legacy_version_component_re.split(s):
part = _legacy_version_replacement_map.get(part, part)
if not part or part == ".":
continue
if part[:1] in "0123456789":
# pad for numeric comparison
yield part.zfill(8)
else:
yield "*" + part
# ensure that alpha/beta/candidate are before final
yield "*final"
def _legacy_cmpkey(version):
# We hardcode an epoch of -1 here. A PEP 440 version can only have a epoch
# greater than or equal to 0. This will effectively put the LegacyVersion,
# which uses the defacto standard originally implemented by setuptools,
# as before all PEP 440 versions.
epoch = -1
# This scheme is taken from pkg_resources.parse_version setuptools prior to
# it's adoption of the packaging library.
parts = []
for part in _parse_version_parts(version.lower()):
if part.startswith("*"):
# remove "-" before a prerelease tag
if part < "*final":
while parts and parts[-1] == "*final-":
parts.pop()
# remove trailing zeros from each series of numeric parts
while parts and parts[-1] == "00000000":
parts.pop()
parts.append(part)
parts = tuple(parts)
return epoch, parts
# Deliberately not anchored to the start and end of the string, to make it # Deliberately not anchored to the start and end of the string, to make it
# easier for 3rd party code to reuse # easier for 3rd party code to reuse
VERSION_PATTERN = r""" _VERSION_PATTERN = r"""
v? v?
(?: (?:
(?:(?P<epoch>[0-9]+)!)? # epoch (?:(?P<epoch>[0-9]+)!)? # epoch
@ -209,16 +141,61 @@ VERSION_PATTERN = r"""
(?:\+(?P<local>[a-z0-9]+(?:[-_\.][a-z0-9]+)*))? # local version (?:\+(?P<local>[a-z0-9]+(?:[-_\.][a-z0-9]+)*))? # local version
""" """
VERSION_PATTERN = _VERSION_PATTERN
"""
A string containing the regular expression used to match a valid version.
The pattern is not anchored at either end, and is intended for embedding in larger
expressions (for example, matching a version number as part of a file name). The
regular expression should be compiled with the ``re.VERBOSE`` and ``re.IGNORECASE``
flags set.
:meta hide-value:
"""
class Version(_BaseVersion): class Version(_BaseVersion):
"""This class abstracts handling of a project's versions.
A :class:`Version` instance is comparison aware and can be compared and
sorted using the standard Python interfaces.
>>> v1 = Version("1.0a5")
>>> v2 = Version("1.0")
>>> v1
<Version('1.0a5')>
>>> v2
<Version('1.0')>
>>> v1 < v2
True
>>> v1 == v2
False
>>> v1 > v2
False
>>> v1 >= v2
False
>>> v1 <= v2
True
"""
_regex = re.compile(r"^\s*" + VERSION_PATTERN + r"\s*$", re.VERBOSE | re.IGNORECASE) _regex = re.compile(r"^\s*" + VERSION_PATTERN + r"\s*$", re.VERBOSE | re.IGNORECASE)
_key: CmpKey
def __init__(self, version: str) -> None:
"""Initialize a Version object.
:param version:
The string representation of a version which will be parsed and normalized
before use.
:raises InvalidVersion:
If the ``version`` does not conform to PEP 440 in any way then this
exception will be raised.
"""
def __init__(self, version):
# Validate the version and parse it into pieces # Validate the version and parse it into pieces
match = self._regex.search(version) match = self._regex.search(version)
if not match: if not match:
raise InvalidVersion("Invalid version: '{0}'".format(version)) raise InvalidVersion(f"Invalid version: '{version}'")
# Store the parsed out pieces of the version # Store the parsed out pieces of the version
self._version = _Version( self._version = _Version(
@ -242,15 +219,25 @@ class Version(_BaseVersion):
self._version.local, self._version.local,
) )
def __repr__(self): def __repr__(self) -> str:
return "<Version({0})>".format(repr(str(self))) """A representation of the Version that shows all internal state.
def __str__(self): >>> Version('1.0.0')
<Version('1.0.0')>
"""
return f"<Version('{self}')>"
def __str__(self) -> str:
"""A string representation of the version that can be rounded-tripped.
>>> str(Version("1.0a5"))
'1.0a5'
"""
parts = [] parts = []
# Epoch # Epoch
if self.epoch != 0: if self.epoch != 0:
parts.append("{0}!".format(self.epoch)) parts.append(f"{self.epoch}!")
# Release segment # Release segment
parts.append(".".join(str(x) for x in self.release)) parts.append(".".join(str(x) for x in self.release))
@ -261,56 +248,131 @@ class Version(_BaseVersion):
# Post-release # Post-release
if self.post is not None: if self.post is not None:
parts.append(".post{0}".format(self.post)) parts.append(f".post{self.post}")
# Development release # Development release
if self.dev is not None: if self.dev is not None:
parts.append(".dev{0}".format(self.dev)) parts.append(f".dev{self.dev}")
# Local version segment # Local version segment
if self.local is not None: if self.local is not None:
parts.append("+{0}".format(self.local)) parts.append(f"+{self.local}")
return "".join(parts) return "".join(parts)
@property @property
def epoch(self): def epoch(self) -> int:
return self._version.epoch """The epoch of the version.
>>> Version("2.0.0").epoch
0
>>> Version("1!2.0.0").epoch
1
"""
_epoch: int = self._version.epoch
return _epoch
@property @property
def release(self): def release(self) -> Tuple[int, ...]:
return self._version.release """The components of the "release" segment of the version.
>>> Version("1.2.3").release
(1, 2, 3)
>>> Version("2.0.0").release
(2, 0, 0)
>>> Version("1!2.0.0.post0").release
(2, 0, 0)
Includes trailing zeroes but not the epoch or any pre-release / development /
post-release suffixes.
"""
_release: Tuple[int, ...] = self._version.release
return _release
@property @property
def pre(self): def pre(self) -> Optional[Tuple[str, int]]:
return self._version.pre """The pre-release segment of the version.
>>> print(Version("1.2.3").pre)
None
>>> Version("1.2.3a1").pre
('a', 1)
>>> Version("1.2.3b1").pre
('b', 1)
>>> Version("1.2.3rc1").pre
('rc', 1)
"""
_pre: Optional[Tuple[str, int]] = self._version.pre
return _pre
@property @property
def post(self): def post(self) -> Optional[int]:
"""The post-release number of the version.
>>> print(Version("1.2.3").post)
None
>>> Version("1.2.3.post1").post
1
"""
return self._version.post[1] if self._version.post else None return self._version.post[1] if self._version.post else None
@property @property
def dev(self): def dev(self) -> Optional[int]:
"""The development number of the version.
>>> print(Version("1.2.3").dev)
None
>>> Version("1.2.3.dev1").dev
1
"""
return self._version.dev[1] if self._version.dev else None return self._version.dev[1] if self._version.dev else None
@property @property
def local(self): def local(self) -> Optional[str]:
"""The local version segment of the version.
>>> print(Version("1.2.3").local)
None
>>> Version("1.2.3+abc").local
'abc'
"""
if self._version.local: if self._version.local:
return ".".join(str(x) for x in self._version.local) return ".".join(str(x) for x in self._version.local)
else: else:
return None return None
@property @property
def public(self): def public(self) -> str:
"""The public portion of the version.
>>> Version("1.2.3").public
'1.2.3'
>>> Version("1.2.3+abc").public
'1.2.3'
>>> Version("1.2.3+abc.dev1").public
'1.2.3'
"""
return str(self).split("+", 1)[0] return str(self).split("+", 1)[0]
@property @property
def base_version(self): def base_version(self) -> str:
"""The "base version" of the version.
>>> Version("1.2.3").base_version
'1.2.3'
>>> Version("1.2.3+abc").base_version
'1.2.3'
>>> Version("1!1.2.3+abc.dev1").base_version
'1!1.2.3'
The "base version" is the public version of the project without any pre or post
release markers.
"""
parts = [] parts = []
# Epoch # Epoch
if self.epoch != 0: if self.epoch != 0:
parts.append("{0}!".format(self.epoch)) parts.append(f"{self.epoch}!")
# Release segment # Release segment
parts.append(".".join(str(x) for x in self.release)) parts.append(".".join(str(x) for x in self.release))
@ -318,19 +380,80 @@ class Version(_BaseVersion):
return "".join(parts) return "".join(parts)
@property @property
def is_prerelease(self): def is_prerelease(self) -> bool:
"""Whether this version is a pre-release.
>>> Version("1.2.3").is_prerelease
False
>>> Version("1.2.3a1").is_prerelease
True
>>> Version("1.2.3b1").is_prerelease
True
>>> Version("1.2.3rc1").is_prerelease
True
>>> Version("1.2.3dev1").is_prerelease
True
"""
return self.dev is not None or self.pre is not None return self.dev is not None or self.pre is not None
@property @property
def is_postrelease(self): def is_postrelease(self) -> bool:
"""Whether this version is a post-release.
>>> Version("1.2.3").is_postrelease
False
>>> Version("1.2.3.post1").is_postrelease
True
"""
return self.post is not None return self.post is not None
@property @property
def is_devrelease(self): def is_devrelease(self) -> bool:
"""Whether this version is a development release.
>>> Version("1.2.3").is_devrelease
False
>>> Version("1.2.3.dev1").is_devrelease
True
"""
return self.dev is not None return self.dev is not None
@property
def major(self) -> int:
"""The first item of :attr:`release` or ``0`` if unavailable.
>>> Version("1.2.3").major
1
"""
return self.release[0] if len(self.release) >= 1 else 0
@property
def minor(self) -> int:
"""The second item of :attr:`release` or ``0`` if unavailable.
>>> Version("1.2.3").minor
2
>>> Version("1").minor
0
"""
return self.release[1] if len(self.release) >= 2 else 0
@property
def micro(self) -> int:
"""The third item of :attr:`release` or ``0`` if unavailable.
>>> Version("1.2.3").micro
3
>>> Version("1").micro
0
"""
return self.release[2] if len(self.release) >= 3 else 0
def _parse_letter_version(
letter: str, number: Union[str, bytes, SupportsInt]
) -> Optional[Tuple[str, int]]:
def _parse_letter_version(letter, number):
if letter: if letter:
# We consider there to be an implicit 0 in a pre-release if there is # We consider there to be an implicit 0 in a pre-release if there is
# not a numeral associated with it. # not a numeral associated with it.
@ -360,11 +483,13 @@ def _parse_letter_version(letter, number):
return letter, int(number) return letter, int(number)
return None
_local_version_separators = re.compile(r"[\._-]") _local_version_separators = re.compile(r"[\._-]")
def _parse_local_version(local): def _parse_local_version(local: str) -> Optional[LocalType]:
""" """
Takes a string like abc.1.twelve and turns it into ("abc", 1, "twelve"). Takes a string like abc.1.twelve and turns it into ("abc", 1, "twelve").
""" """
@ -373,15 +498,24 @@ def _parse_local_version(local):
part.lower() if not part.isdigit() else int(part) part.lower() if not part.isdigit() else int(part)
for part in _local_version_separators.split(local) for part in _local_version_separators.split(local)
) )
return None
def _cmpkey(epoch, release, pre, post, dev, local): def _cmpkey(
epoch: int,
release: Tuple[int, ...],
pre: Optional[Tuple[str, int]],
post: Optional[Tuple[str, int]],
dev: Optional[Tuple[str, int]],
local: Optional[Tuple[SubLocalType]],
) -> CmpKey:
# When we compare a release version, we want to compare it with all of the # When we compare a release version, we want to compare it with all of the
# trailing zeros removed. So we'll use a reverse the list, drop all the now # trailing zeros removed. So we'll use a reverse the list, drop all the now
# leading zeros until we come to something non zero, then take the rest # leading zeros until we come to something non zero, then take the rest
# re-reverse it back into the correct order and make it a tuple and use # re-reverse it back into the correct order and make it a tuple and use
# that for our sorting key. # that for our sorting key.
release = tuple( _release = tuple(
reversed(list(itertools.dropwhile(lambda x: x == 0, reversed(release)))) reversed(list(itertools.dropwhile(lambda x: x == 0, reversed(release))))
) )
@ -390,23 +524,31 @@ def _cmpkey(epoch, release, pre, post, dev, local):
# if there is not a pre or a post segment. If we have one of those then # if there is not a pre or a post segment. If we have one of those then
# the normal sorting rules will handle this case correctly. # the normal sorting rules will handle this case correctly.
if pre is None and post is None and dev is not None: if pre is None and post is None and dev is not None:
pre = -Infinity _pre: PrePostDevType = NegativeInfinity
# Versions without a pre-release (except as noted above) should sort after # Versions without a pre-release (except as noted above) should sort after
# those with one. # those with one.
elif pre is None: elif pre is None:
pre = Infinity _pre = Infinity
else:
_pre = pre
# Versions without a post segment should sort before those with one. # Versions without a post segment should sort before those with one.
if post is None: if post is None:
post = -Infinity _post: PrePostDevType = NegativeInfinity
else:
_post = post
# Versions without a development segment should sort after those with one. # Versions without a development segment should sort after those with one.
if dev is None: if dev is None:
dev = Infinity _dev: PrePostDevType = Infinity
else:
_dev = dev
if local is None: if local is None:
# Versions without a local segment should sort before those with one. # Versions without a local segment should sort before those with one.
local = -Infinity _local: LocalType = NegativeInfinity
else: else:
# Versions with a local segment need that segment parsed to implement # Versions with a local segment need that segment parsed to implement
# the sorting rules in PEP440. # the sorting rules in PEP440.
@ -415,6 +557,8 @@ def _cmpkey(epoch, release, pre, post, dev, local):
# - Numeric segments sort numerically # - Numeric segments sort numerically
# - Shorter versions sort before longer versions when the prefixes # - Shorter versions sort before longer versions when the prefixes
# match exactly # match exactly
local = tuple((i, "") if isinstance(i, int) else (-Infinity, i) for i in local) _local = tuple(
(i, "") if isinstance(i, int) else (NegativeInfinity, i) for i in local
)
return epoch, release, pre, post, dev, local return epoch, _release, _pre, _post, _dev, _local

File diff suppressed because it is too large Load diff

View file

@ -1,868 +0,0 @@
"""Utilities for writing code that runs on Python 2 and 3"""
# Copyright (c) 2010-2015 Benjamin Peterson
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
from __future__ import absolute_import
import functools
import itertools
import operator
import sys
import types
__author__ = "Benjamin Peterson <benjamin@python.org>"
__version__ = "1.10.0"
# Useful for very coarse version differentiation.
PY2 = sys.version_info[0] == 2
PY3 = sys.version_info[0] == 3
PY34 = sys.version_info[0:2] >= (3, 4)
if PY3:
string_types = str,
integer_types = int,
class_types = type,
text_type = str
binary_type = bytes
MAXSIZE = sys.maxsize
else:
string_types = basestring,
integer_types = (int, long)
class_types = (type, types.ClassType)
text_type = unicode
binary_type = str
if sys.platform.startswith("java"):
# Jython always uses 32 bits.
MAXSIZE = int((1 << 31) - 1)
else:
# It's possible to have sizeof(long) != sizeof(Py_ssize_t).
class X(object):
def __len__(self):
return 1 << 31
try:
len(X())
except OverflowError:
# 32-bit
MAXSIZE = int((1 << 31) - 1)
else:
# 64-bit
MAXSIZE = int((1 << 63) - 1)
del X
def _add_doc(func, doc):
"""Add documentation to a function."""
func.__doc__ = doc
def _import_module(name):
"""Import module, returning the module after the last dot."""
__import__(name)
return sys.modules[name]
class _LazyDescr(object):
def __init__(self, name):
self.name = name
def __get__(self, obj, tp):
result = self._resolve()
setattr(obj, self.name, result) # Invokes __set__.
try:
# This is a bit ugly, but it avoids running this again by
# removing this descriptor.
delattr(obj.__class__, self.name)
except AttributeError:
pass
return result
class MovedModule(_LazyDescr):
def __init__(self, name, old, new=None):
super(MovedModule, self).__init__(name)
if PY3:
if new is None:
new = name
self.mod = new
else:
self.mod = old
def _resolve(self):
return _import_module(self.mod)
def __getattr__(self, attr):
_module = self._resolve()
value = getattr(_module, attr)
setattr(self, attr, value)
return value
class _LazyModule(types.ModuleType):
def __init__(self, name):
super(_LazyModule, self).__init__(name)
self.__doc__ = self.__class__.__doc__
def __dir__(self):
attrs = ["__doc__", "__name__"]
attrs += [attr.name for attr in self._moved_attributes]
return attrs
# Subclasses should override this
_moved_attributes = []
class MovedAttribute(_LazyDescr):
def __init__(self, name, old_mod, new_mod, old_attr=None, new_attr=None):
super(MovedAttribute, self).__init__(name)
if PY3:
if new_mod is None:
new_mod = name
self.mod = new_mod
if new_attr is None:
if old_attr is None:
new_attr = name
else:
new_attr = old_attr
self.attr = new_attr
else:
self.mod = old_mod
if old_attr is None:
old_attr = name
self.attr = old_attr
def _resolve(self):
module = _import_module(self.mod)
return getattr(module, self.attr)
class _SixMetaPathImporter(object):
"""
A meta path importer to import six.moves and its submodules.
This class implements a PEP302 finder and loader. It should be compatible
with Python 2.5 and all existing versions of Python3
"""
def __init__(self, six_module_name):
self.name = six_module_name
self.known_modules = {}
def _add_module(self, mod, *fullnames):
for fullname in fullnames:
self.known_modules[self.name + "." + fullname] = mod
def _get_module(self, fullname):
return self.known_modules[self.name + "." + fullname]
def find_module(self, fullname, path=None):
if fullname in self.known_modules:
return self
return None
def __get_module(self, fullname):
try:
return self.known_modules[fullname]
except KeyError:
raise ImportError("This loader does not know module " + fullname)
def load_module(self, fullname):
try:
# in case of a reload
return sys.modules[fullname]
except KeyError:
pass
mod = self.__get_module(fullname)
if isinstance(mod, MovedModule):
mod = mod._resolve()
else:
mod.__loader__ = self
sys.modules[fullname] = mod
return mod
def is_package(self, fullname):
"""
Return true, if the named module is a package.
We need this method to get correct spec objects with
Python 3.4 (see PEP451)
"""
return hasattr(self.__get_module(fullname), "__path__")
def get_code(self, fullname):
"""Return None
Required, if is_package is implemented"""
self.__get_module(fullname) # eventually raises ImportError
return None
get_source = get_code # same as get_code
_importer = _SixMetaPathImporter(__name__)
class _MovedItems(_LazyModule):
"""Lazy loading of moved objects"""
__path__ = [] # mark as package
_moved_attributes = [
MovedAttribute("cStringIO", "cStringIO", "io", "StringIO"),
MovedAttribute("filter", "itertools", "builtins", "ifilter", "filter"),
MovedAttribute("filterfalse", "itertools", "itertools", "ifilterfalse", "filterfalse"),
MovedAttribute("input", "__builtin__", "builtins", "raw_input", "input"),
MovedAttribute("intern", "__builtin__", "sys"),
MovedAttribute("map", "itertools", "builtins", "imap", "map"),
MovedAttribute("getcwd", "os", "os", "getcwdu", "getcwd"),
MovedAttribute("getcwdb", "os", "os", "getcwd", "getcwdb"),
MovedAttribute("range", "__builtin__", "builtins", "xrange", "range"),
MovedAttribute("reload_module", "__builtin__", "importlib" if PY34 else "imp", "reload"),
MovedAttribute("reduce", "__builtin__", "functools"),
MovedAttribute("shlex_quote", "pipes", "shlex", "quote"),
MovedAttribute("StringIO", "StringIO", "io"),
MovedAttribute("UserDict", "UserDict", "collections"),
MovedAttribute("UserList", "UserList", "collections"),
MovedAttribute("UserString", "UserString", "collections"),
MovedAttribute("xrange", "__builtin__", "builtins", "xrange", "range"),
MovedAttribute("zip", "itertools", "builtins", "izip", "zip"),
MovedAttribute("zip_longest", "itertools", "itertools", "izip_longest", "zip_longest"),
MovedModule("builtins", "__builtin__"),
MovedModule("configparser", "ConfigParser"),
MovedModule("copyreg", "copy_reg"),
MovedModule("dbm_gnu", "gdbm", "dbm.gnu"),
MovedModule("_dummy_thread", "dummy_thread", "_dummy_thread"),
MovedModule("http_cookiejar", "cookielib", "http.cookiejar"),
MovedModule("http_cookies", "Cookie", "http.cookies"),
MovedModule("html_entities", "htmlentitydefs", "html.entities"),
MovedModule("html_parser", "HTMLParser", "html.parser"),
MovedModule("http_client", "httplib", "http.client"),
MovedModule("email_mime_multipart", "email.MIMEMultipart", "email.mime.multipart"),
MovedModule("email_mime_nonmultipart", "email.MIMENonMultipart", "email.mime.nonmultipart"),
MovedModule("email_mime_text", "email.MIMEText", "email.mime.text"),
MovedModule("email_mime_base", "email.MIMEBase", "email.mime.base"),
MovedModule("BaseHTTPServer", "BaseHTTPServer", "http.server"),
MovedModule("CGIHTTPServer", "CGIHTTPServer", "http.server"),
MovedModule("SimpleHTTPServer", "SimpleHTTPServer", "http.server"),
MovedModule("cPickle", "cPickle", "pickle"),
MovedModule("queue", "Queue"),
MovedModule("reprlib", "repr"),
MovedModule("socketserver", "SocketServer"),
MovedModule("_thread", "thread", "_thread"),
MovedModule("tkinter", "Tkinter"),
MovedModule("tkinter_dialog", "Dialog", "tkinter.dialog"),
MovedModule("tkinter_filedialog", "FileDialog", "tkinter.filedialog"),
MovedModule("tkinter_scrolledtext", "ScrolledText", "tkinter.scrolledtext"),
MovedModule("tkinter_simpledialog", "SimpleDialog", "tkinter.simpledialog"),
MovedModule("tkinter_tix", "Tix", "tkinter.tix"),
MovedModule("tkinter_ttk", "ttk", "tkinter.ttk"),
MovedModule("tkinter_constants", "Tkconstants", "tkinter.constants"),
MovedModule("tkinter_dnd", "Tkdnd", "tkinter.dnd"),
MovedModule("tkinter_colorchooser", "tkColorChooser",
"tkinter.colorchooser"),
MovedModule("tkinter_commondialog", "tkCommonDialog",
"tkinter.commondialog"),
MovedModule("tkinter_tkfiledialog", "tkFileDialog", "tkinter.filedialog"),
MovedModule("tkinter_font", "tkFont", "tkinter.font"),
MovedModule("tkinter_messagebox", "tkMessageBox", "tkinter.messagebox"),
MovedModule("tkinter_tksimpledialog", "tkSimpleDialog",
"tkinter.simpledialog"),
MovedModule("urllib_parse", __name__ + ".moves.urllib_parse", "urllib.parse"),
MovedModule("urllib_error", __name__ + ".moves.urllib_error", "urllib.error"),
MovedModule("urllib", __name__ + ".moves.urllib", __name__ + ".moves.urllib"),
MovedModule("urllib_robotparser", "robotparser", "urllib.robotparser"),
MovedModule("xmlrpc_client", "xmlrpclib", "xmlrpc.client"),
MovedModule("xmlrpc_server", "SimpleXMLRPCServer", "xmlrpc.server"),
]
# Add windows specific modules.
if sys.platform == "win32":
_moved_attributes += [
MovedModule("winreg", "_winreg"),
]
for attr in _moved_attributes:
setattr(_MovedItems, attr.name, attr)
if isinstance(attr, MovedModule):
_importer._add_module(attr, "moves." + attr.name)
del attr
_MovedItems._moved_attributes = _moved_attributes
moves = _MovedItems(__name__ + ".moves")
_importer._add_module(moves, "moves")
class Module_six_moves_urllib_parse(_LazyModule):
"""Lazy loading of moved objects in six.moves.urllib_parse"""
_urllib_parse_moved_attributes = [
MovedAttribute("ParseResult", "urlparse", "urllib.parse"),
MovedAttribute("SplitResult", "urlparse", "urllib.parse"),
MovedAttribute("parse_qs", "urlparse", "urllib.parse"),
MovedAttribute("parse_qsl", "urlparse", "urllib.parse"),
MovedAttribute("urldefrag", "urlparse", "urllib.parse"),
MovedAttribute("urljoin", "urlparse", "urllib.parse"),
MovedAttribute("urlparse", "urlparse", "urllib.parse"),
MovedAttribute("urlsplit", "urlparse", "urllib.parse"),
MovedAttribute("urlunparse", "urlparse", "urllib.parse"),
MovedAttribute("urlunsplit", "urlparse", "urllib.parse"),
MovedAttribute("quote", "urllib", "urllib.parse"),
MovedAttribute("quote_plus", "urllib", "urllib.parse"),
MovedAttribute("unquote", "urllib", "urllib.parse"),
MovedAttribute("unquote_plus", "urllib", "urllib.parse"),
MovedAttribute("urlencode", "urllib", "urllib.parse"),
MovedAttribute("splitquery", "urllib", "urllib.parse"),
MovedAttribute("splittag", "urllib", "urllib.parse"),
MovedAttribute("splituser", "urllib", "urllib.parse"),
MovedAttribute("uses_fragment", "urlparse", "urllib.parse"),
MovedAttribute("uses_netloc", "urlparse", "urllib.parse"),
MovedAttribute("uses_params", "urlparse", "urllib.parse"),
MovedAttribute("uses_query", "urlparse", "urllib.parse"),
MovedAttribute("uses_relative", "urlparse", "urllib.parse"),
]
for attr in _urllib_parse_moved_attributes:
setattr(Module_six_moves_urllib_parse, attr.name, attr)
del attr
Module_six_moves_urllib_parse._moved_attributes = _urllib_parse_moved_attributes
_importer._add_module(Module_six_moves_urllib_parse(__name__ + ".moves.urllib_parse"),
"moves.urllib_parse", "moves.urllib.parse")
class Module_six_moves_urllib_error(_LazyModule):
"""Lazy loading of moved objects in six.moves.urllib_error"""
_urllib_error_moved_attributes = [
MovedAttribute("URLError", "urllib2", "urllib.error"),
MovedAttribute("HTTPError", "urllib2", "urllib.error"),
MovedAttribute("ContentTooShortError", "urllib", "urllib.error"),
]
for attr in _urllib_error_moved_attributes:
setattr(Module_six_moves_urllib_error, attr.name, attr)
del attr
Module_six_moves_urllib_error._moved_attributes = _urllib_error_moved_attributes
_importer._add_module(Module_six_moves_urllib_error(__name__ + ".moves.urllib.error"),
"moves.urllib_error", "moves.urllib.error")
class Module_six_moves_urllib_request(_LazyModule):
"""Lazy loading of moved objects in six.moves.urllib_request"""
_urllib_request_moved_attributes = [
MovedAttribute("urlopen", "urllib2", "urllib.request"),
MovedAttribute("install_opener", "urllib2", "urllib.request"),
MovedAttribute("build_opener", "urllib2", "urllib.request"),
MovedAttribute("pathname2url", "urllib", "urllib.request"),
MovedAttribute("url2pathname", "urllib", "urllib.request"),
MovedAttribute("getproxies", "urllib", "urllib.request"),
MovedAttribute("Request", "urllib2", "urllib.request"),
MovedAttribute("OpenerDirector", "urllib2", "urllib.request"),
MovedAttribute("HTTPDefaultErrorHandler", "urllib2", "urllib.request"),
MovedAttribute("HTTPRedirectHandler", "urllib2", "urllib.request"),
MovedAttribute("HTTPCookieProcessor", "urllib2", "urllib.request"),
MovedAttribute("ProxyHandler", "urllib2", "urllib.request"),
MovedAttribute("BaseHandler", "urllib2", "urllib.request"),
MovedAttribute("HTTPPasswordMgr", "urllib2", "urllib.request"),
MovedAttribute("HTTPPasswordMgrWithDefaultRealm", "urllib2", "urllib.request"),
MovedAttribute("AbstractBasicAuthHandler", "urllib2", "urllib.request"),
MovedAttribute("HTTPBasicAuthHandler", "urllib2", "urllib.request"),
MovedAttribute("ProxyBasicAuthHandler", "urllib2", "urllib.request"),
MovedAttribute("AbstractDigestAuthHandler", "urllib2", "urllib.request"),
MovedAttribute("HTTPDigestAuthHandler", "urllib2", "urllib.request"),
MovedAttribute("ProxyDigestAuthHandler", "urllib2", "urllib.request"),
MovedAttribute("HTTPHandler", "urllib2", "urllib.request"),
MovedAttribute("HTTPSHandler", "urllib2", "urllib.request"),
MovedAttribute("FileHandler", "urllib2", "urllib.request"),
MovedAttribute("FTPHandler", "urllib2", "urllib.request"),
MovedAttribute("CacheFTPHandler", "urllib2", "urllib.request"),
MovedAttribute("UnknownHandler", "urllib2", "urllib.request"),
MovedAttribute("HTTPErrorProcessor", "urllib2", "urllib.request"),
MovedAttribute("urlretrieve", "urllib", "urllib.request"),
MovedAttribute("urlcleanup", "urllib", "urllib.request"),
MovedAttribute("URLopener", "urllib", "urllib.request"),
MovedAttribute("FancyURLopener", "urllib", "urllib.request"),
MovedAttribute("proxy_bypass", "urllib", "urllib.request"),
]
for attr in _urllib_request_moved_attributes:
setattr(Module_six_moves_urllib_request, attr.name, attr)
del attr
Module_six_moves_urllib_request._moved_attributes = _urllib_request_moved_attributes
_importer._add_module(Module_six_moves_urllib_request(__name__ + ".moves.urllib.request"),
"moves.urllib_request", "moves.urllib.request")
class Module_six_moves_urllib_response(_LazyModule):
"""Lazy loading of moved objects in six.moves.urllib_response"""
_urllib_response_moved_attributes = [
MovedAttribute("addbase", "urllib", "urllib.response"),
MovedAttribute("addclosehook", "urllib", "urllib.response"),
MovedAttribute("addinfo", "urllib", "urllib.response"),
MovedAttribute("addinfourl", "urllib", "urllib.response"),
]
for attr in _urllib_response_moved_attributes:
setattr(Module_six_moves_urllib_response, attr.name, attr)
del attr
Module_six_moves_urllib_response._moved_attributes = _urllib_response_moved_attributes
_importer._add_module(Module_six_moves_urllib_response(__name__ + ".moves.urllib.response"),
"moves.urllib_response", "moves.urllib.response")
class Module_six_moves_urllib_robotparser(_LazyModule):
"""Lazy loading of moved objects in six.moves.urllib_robotparser"""
_urllib_robotparser_moved_attributes = [
MovedAttribute("RobotFileParser", "robotparser", "urllib.robotparser"),
]
for attr in _urllib_robotparser_moved_attributes:
setattr(Module_six_moves_urllib_robotparser, attr.name, attr)
del attr
Module_six_moves_urllib_robotparser._moved_attributes = _urllib_robotparser_moved_attributes
_importer._add_module(Module_six_moves_urllib_robotparser(__name__ + ".moves.urllib.robotparser"),
"moves.urllib_robotparser", "moves.urllib.robotparser")
class Module_six_moves_urllib(types.ModuleType):
"""Create a six.moves.urllib namespace that resembles the Python 3 namespace"""
__path__ = [] # mark as package
parse = _importer._get_module("moves.urllib_parse")
error = _importer._get_module("moves.urllib_error")
request = _importer._get_module("moves.urllib_request")
response = _importer._get_module("moves.urllib_response")
robotparser = _importer._get_module("moves.urllib_robotparser")
def __dir__(self):
return ['parse', 'error', 'request', 'response', 'robotparser']
_importer._add_module(Module_six_moves_urllib(__name__ + ".moves.urllib"),
"moves.urllib")
def add_move(move):
"""Add an item to six.moves."""
setattr(_MovedItems, move.name, move)
def remove_move(name):
"""Remove item from six.moves."""
try:
delattr(_MovedItems, name)
except AttributeError:
try:
del moves.__dict__[name]
except KeyError:
raise AttributeError("no such move, %r" % (name,))
if PY3:
_meth_func = "__func__"
_meth_self = "__self__"
_func_closure = "__closure__"
_func_code = "__code__"
_func_defaults = "__defaults__"
_func_globals = "__globals__"
else:
_meth_func = "im_func"
_meth_self = "im_self"
_func_closure = "func_closure"
_func_code = "func_code"
_func_defaults = "func_defaults"
_func_globals = "func_globals"
try:
advance_iterator = next
except NameError:
def advance_iterator(it):
return it.next()
next = advance_iterator
try:
callable = callable
except NameError:
def callable(obj):
return any("__call__" in klass.__dict__ for klass in type(obj).__mro__)
if PY3:
def get_unbound_function(unbound):
return unbound
create_bound_method = types.MethodType
def create_unbound_method(func, cls):
return func
Iterator = object
else:
def get_unbound_function(unbound):
return unbound.im_func
def create_bound_method(func, obj):
return types.MethodType(func, obj, obj.__class__)
def create_unbound_method(func, cls):
return types.MethodType(func, None, cls)
class Iterator(object):
def next(self):
return type(self).__next__(self)
callable = callable
_add_doc(get_unbound_function,
"""Get the function out of a possibly unbound function""")
get_method_function = operator.attrgetter(_meth_func)
get_method_self = operator.attrgetter(_meth_self)
get_function_closure = operator.attrgetter(_func_closure)
get_function_code = operator.attrgetter(_func_code)
get_function_defaults = operator.attrgetter(_func_defaults)
get_function_globals = operator.attrgetter(_func_globals)
if PY3:
def iterkeys(d, **kw):
return iter(d.keys(**kw))
def itervalues(d, **kw):
return iter(d.values(**kw))
def iteritems(d, **kw):
return iter(d.items(**kw))
def iterlists(d, **kw):
return iter(d.lists(**kw))
viewkeys = operator.methodcaller("keys")
viewvalues = operator.methodcaller("values")
viewitems = operator.methodcaller("items")
else:
def iterkeys(d, **kw):
return d.iterkeys(**kw)
def itervalues(d, **kw):
return d.itervalues(**kw)
def iteritems(d, **kw):
return d.iteritems(**kw)
def iterlists(d, **kw):
return d.iterlists(**kw)
viewkeys = operator.methodcaller("viewkeys")
viewvalues = operator.methodcaller("viewvalues")
viewitems = operator.methodcaller("viewitems")
_add_doc(iterkeys, "Return an iterator over the keys of a dictionary.")
_add_doc(itervalues, "Return an iterator over the values of a dictionary.")
_add_doc(iteritems,
"Return an iterator over the (key, value) pairs of a dictionary.")
_add_doc(iterlists,
"Return an iterator over the (key, [values]) pairs of a dictionary.")
if PY3:
def b(s):
return s.encode("latin-1")
def u(s):
return s
unichr = chr
import struct
int2byte = struct.Struct(">B").pack
del struct
byte2int = operator.itemgetter(0)
indexbytes = operator.getitem
iterbytes = iter
import io
StringIO = io.StringIO
BytesIO = io.BytesIO
_assertCountEqual = "assertCountEqual"
if sys.version_info[1] <= 1:
_assertRaisesRegex = "assertRaisesRegexp"
_assertRegex = "assertRegexpMatches"
else:
_assertRaisesRegex = "assertRaisesRegex"
_assertRegex = "assertRegex"
else:
def b(s):
return s
# Workaround for standalone backslash
def u(s):
return unicode(s.replace(r'\\', r'\\\\'), "unicode_escape")
unichr = unichr
int2byte = chr
def byte2int(bs):
return ord(bs[0])
def indexbytes(buf, i):
return ord(buf[i])
iterbytes = functools.partial(itertools.imap, ord)
import StringIO
StringIO = BytesIO = StringIO.StringIO
_assertCountEqual = "assertItemsEqual"
_assertRaisesRegex = "assertRaisesRegexp"
_assertRegex = "assertRegexpMatches"
_add_doc(b, """Byte literal""")
_add_doc(u, """Text literal""")
def assertCountEqual(self, *args, **kwargs):
return getattr(self, _assertCountEqual)(*args, **kwargs)
def assertRaisesRegex(self, *args, **kwargs):
return getattr(self, _assertRaisesRegex)(*args, **kwargs)
def assertRegex(self, *args, **kwargs):
return getattr(self, _assertRegex)(*args, **kwargs)
if PY3:
exec_ = getattr(moves.builtins, "exec")
def reraise(tp, value, tb=None):
if value is None:
value = tp()
if value.__traceback__ is not tb:
raise value.with_traceback(tb)
raise value
else:
def exec_(_code_, _globs_=None, _locs_=None):
"""Execute code in a namespace."""
if _globs_ is None:
frame = sys._getframe(1)
_globs_ = frame.f_globals
if _locs_ is None:
_locs_ = frame.f_locals
del frame
elif _locs_ is None:
_locs_ = _globs_
exec("""exec _code_ in _globs_, _locs_""")
exec_("""def reraise(tp, value, tb=None):
raise tp, value, tb
""")
if sys.version_info[:2] == (3, 2):
exec_("""def raise_from(value, from_value):
if from_value is None:
raise value
raise value from from_value
""")
elif sys.version_info[:2] > (3, 2):
exec_("""def raise_from(value, from_value):
raise value from from_value
""")
else:
def raise_from(value, from_value):
raise value
print_ = getattr(moves.builtins, "print", None)
if print_ is None:
def print_(*args, **kwargs):
"""The new-style print function for Python 2.4 and 2.5."""
fp = kwargs.pop("file", sys.stdout)
if fp is None:
return
def write(data):
if not isinstance(data, basestring):
data = str(data)
# If the file has an encoding, encode unicode with it.
if (isinstance(fp, file) and
isinstance(data, unicode) and
fp.encoding is not None):
errors = getattr(fp, "errors", None)
if errors is None:
errors = "strict"
data = data.encode(fp.encoding, errors)
fp.write(data)
want_unicode = False
sep = kwargs.pop("sep", None)
if sep is not None:
if isinstance(sep, unicode):
want_unicode = True
elif not isinstance(sep, str):
raise TypeError("sep must be None or a string")
end = kwargs.pop("end", None)
if end is not None:
if isinstance(end, unicode):
want_unicode = True
elif not isinstance(end, str):
raise TypeError("end must be None or a string")
if kwargs:
raise TypeError("invalid keyword arguments to print()")
if not want_unicode:
for arg in args:
if isinstance(arg, unicode):
want_unicode = True
break
if want_unicode:
newline = unicode("\n")
space = unicode(" ")
else:
newline = "\n"
space = " "
if sep is None:
sep = space
if end is None:
end = newline
for i, arg in enumerate(args):
if i:
write(sep)
write(arg)
write(end)
if sys.version_info[:2] < (3, 3):
_print = print_
def print_(*args, **kwargs):
fp = kwargs.get("file", sys.stdout)
flush = kwargs.pop("flush", False)
_print(*args, **kwargs)
if flush and fp is not None:
fp.flush()
_add_doc(reraise, """Reraise an exception.""")
if sys.version_info[0:2] < (3, 4):
def wraps(wrapped, assigned=functools.WRAPPER_ASSIGNMENTS,
updated=functools.WRAPPER_UPDATES):
def wrapper(f):
f = functools.wraps(wrapped, assigned, updated)(f)
f.__wrapped__ = wrapped
return f
return wrapper
else:
wraps = functools.wraps
def with_metaclass(meta, *bases):
"""Create a base class with a metaclass."""
# This requires a bit of explanation: the basic idea is to make a dummy
# metaclass for one level of class instantiation that replaces itself with
# the actual metaclass.
class metaclass(meta):
def __new__(cls, name, this_bases, d):
return meta(name, bases, d)
return type.__new__(metaclass, 'temporary_class', (), {})
def add_metaclass(metaclass):
"""Class decorator for creating a class with a metaclass."""
def wrapper(cls):
orig_vars = cls.__dict__.copy()
slots = orig_vars.get('__slots__')
if slots is not None:
if isinstance(slots, str):
slots = [slots]
for slots_var in slots:
orig_vars.pop(slots_var)
orig_vars.pop('__dict__', None)
orig_vars.pop('__weakref__', None)
return metaclass(cls.__name__, cls.__bases__, orig_vars)
return wrapper
def python_2_unicode_compatible(klass):
"""
A decorator that defines __unicode__ and __str__ methods under Python 2.
Under Python 3 it does nothing.
To support Python 2 and 3 with a single code base, define a __str__ method
returning text and apply this decorator to the class.
"""
if PY2:
if '__str__' not in klass.__dict__:
raise ValueError("@python_2_unicode_compatible cannot be applied "
"to %s because it doesn't define __str__()." %
klass.__name__)
klass.__unicode__ = klass.__str__
klass.__str__ = lambda self: self.__unicode__().encode('utf-8')
return klass
# Complete the moves implementation.
# This code is at the end of this module to speed up module loading.
# Turn this module into a package.
__path__ = [] # required for PEP 302 and PEP 451
__package__ = __name__ # see PEP 366 @ReservedAssignment
if globals().get("__spec__") is not None:
__spec__.submodule_search_locations = [] # PEP 451 @UndefinedVariable
# Remove other six meta path importers, since they cause problems. This can
# happen if six is removed from sys.modules and then reloaded. (Setuptools does
# this for some reason.)
if sys.meta_path:
for i, importer in enumerate(sys.meta_path):
# Here's some real nastiness: Another "instance" of the six module might
# be floating around. Therefore, we can't use isinstance() to check for
# the six meta path importer, since the other six instance will have
# inserted an importer with different class.
if (type(importer).__name__ == "_SixMetaPathImporter" and
importer.name == __name__):
del sys.meta_path[i]
break
del i, importer
# Finally, add the importer to the meta path import hook.
sys.meta_path.append(_importer)

View file

@ -8,11 +8,16 @@ import posixpath
import contextlib import contextlib
from distutils.errors import DistutilsError from distutils.errors import DistutilsError
from pkg_resources import ensure_directory from ._path import ensure_directory
__all__ = [ __all__ = [
"unpack_archive", "unpack_zipfile", "unpack_tarfile", "default_filter", "unpack_archive",
"UnrecognizedFormat", "extraction_drivers", "unpack_directory", "unpack_zipfile",
"unpack_tarfile",
"default_filter",
"UnrecognizedFormat",
"extraction_drivers",
"unpack_directory",
] ]
@ -25,8 +30,7 @@ def default_filter(src, dst):
return dst return dst
def unpack_archive(filename, extract_dir, progress_filter=default_filter, def unpack_archive(filename, extract_dir, progress_filter=default_filter, drivers=None):
drivers=None):
"""Unpack `filename` to `extract_dir`, or raise ``UnrecognizedFormat`` """Unpack `filename` to `extract_dir`, or raise ``UnrecognizedFormat``
`progress_filter` is a function taking two arguments: a source path `progress_filter` is a function taking two arguments: a source path
@ -55,13 +59,11 @@ def unpack_archive(filename, extract_dir, progress_filter=default_filter,
else: else:
return return
else: else:
raise UnrecognizedFormat( raise UnrecognizedFormat("Not a recognized archive type: %s" % filename)
"Not a recognized archive type: %s" % filename
)
def unpack_directory(filename, extract_dir, progress_filter=default_filter): def unpack_directory(filename, extract_dir, progress_filter=default_filter):
""""Unpack" a directory, using the same interface as for archives """ "Unpack" a directory, using the same interface as for archives
Raises ``UnrecognizedFormat`` if `filename` is not a directory Raises ``UnrecognizedFormat`` if `filename` is not a directory
""" """
@ -99,29 +101,87 @@ def unpack_zipfile(filename, extract_dir, progress_filter=default_filter):
raise UnrecognizedFormat("%s is not a zip file" % (filename,)) raise UnrecognizedFormat("%s is not a zip file" % (filename,))
with zipfile.ZipFile(filename) as z: with zipfile.ZipFile(filename) as z:
for info in z.infolist(): _unpack_zipfile_obj(z, extract_dir, progress_filter)
name = info.filename
def _unpack_zipfile_obj(zipfile_obj, extract_dir, progress_filter=default_filter):
"""Internal/private API used by other parts of setuptools.
Similar to ``unpack_zipfile``, but receives an already opened :obj:`zipfile.ZipFile`
object instead of a filename.
"""
for info in zipfile_obj.infolist():
name = info.filename
# don't extract absolute paths or ones with .. in them
if name.startswith('/') or '..' in name.split('/'):
continue
target = os.path.join(extract_dir, *name.split('/'))
target = progress_filter(name, target)
if not target:
continue
if name.endswith('/'):
# directory
ensure_directory(target)
else:
# file
ensure_directory(target)
data = zipfile_obj.read(info.filename)
with open(target, 'wb') as f:
f.write(data)
unix_attributes = info.external_attr >> 16
if unix_attributes:
os.chmod(target, unix_attributes)
def _resolve_tar_file_or_dir(tar_obj, tar_member_obj):
"""Resolve any links and extract link targets as normal files."""
while tar_member_obj is not None and (
tar_member_obj.islnk() or tar_member_obj.issym()
):
linkpath = tar_member_obj.linkname
if tar_member_obj.issym():
base = posixpath.dirname(tar_member_obj.name)
linkpath = posixpath.join(base, linkpath)
linkpath = posixpath.normpath(linkpath)
tar_member_obj = tar_obj._getmember(linkpath)
is_file_or_dir = tar_member_obj is not None and (
tar_member_obj.isfile() or tar_member_obj.isdir()
)
if is_file_or_dir:
return tar_member_obj
raise LookupError('Got unknown file type')
def _iter_open_tar(tar_obj, extract_dir, progress_filter):
"""Emit member-destination pairs from a tar archive."""
# don't do any chowning!
tar_obj.chown = lambda *args: None
with contextlib.closing(tar_obj):
for member in tar_obj:
name = member.name
# don't extract absolute paths or ones with .. in them # don't extract absolute paths or ones with .. in them
if name.startswith('/') or '..' in name.split('/'): if name.startswith('/') or '..' in name.split('/'):
continue continue
target = os.path.join(extract_dir, *name.split('/')) prelim_dst = os.path.join(extract_dir, *name.split('/'))
target = progress_filter(name, target)
if not target: try:
member = _resolve_tar_file_or_dir(tar_obj, member)
except LookupError:
continue continue
if name.endswith('/'):
# directory final_dst = progress_filter(name, prelim_dst)
ensure_directory(target) if not final_dst:
else: continue
# file
ensure_directory(target) if final_dst.endswith(os.sep):
data = z.read(info.filename) final_dst = final_dst[:-1]
with open(target, 'wb') as f:
f.write(data) yield member, final_dst
unix_attributes = info.external_attr >> 16
if unix_attributes:
os.chmod(target, unix_attributes)
def unpack_tarfile(filename, extract_dir, progress_filter=default_filter): def unpack_tarfile(filename, extract_dir, progress_filter=default_filter):
@ -133,41 +193,24 @@ def unpack_tarfile(filename, extract_dir, progress_filter=default_filter):
""" """
try: try:
tarobj = tarfile.open(filename) tarobj = tarfile.open(filename)
except tarfile.TarError: except tarfile.TarError as e:
raise UnrecognizedFormat( raise UnrecognizedFormat(
"%s is not a compressed or uncompressed tar file" % (filename,) "%s is not a compressed or uncompressed tar file" % (filename,)
) ) from e
with contextlib.closing(tarobj):
# don't do any chowning!
tarobj.chown = lambda *args: None
for member in tarobj:
name = member.name
# don't extract absolute paths or ones with .. in them
if not name.startswith('/') and '..' not in name.split('/'):
prelim_dst = os.path.join(extract_dir, *name.split('/'))
# resolve any links and to extract the link targets as normal for member, final_dst in _iter_open_tar(
# files tarobj,
while member is not None and (member.islnk() or member.issym()): extract_dir,
linkpath = member.linkname progress_filter,
if member.issym(): ):
base = posixpath.dirname(member.name) try:
linkpath = posixpath.join(base, linkpath) # XXX Ugh
linkpath = posixpath.normpath(linkpath) tarobj._extract_member(member, final_dst)
member = tarobj._getmember(linkpath) except tarfile.ExtractError:
# chown/chmod/mkfifo/mknode/makedev failed
pass
if member is not None and (member.isfile() or member.isdir()): return True
final_dst = progress_filter(name, prelim_dst)
if final_dst:
if final_dst.endswith(os.sep):
final_dst = final_dst[:-1]
try:
# XXX Ugh
tarobj._extract_member(member, final_dst)
except tarfile.ExtractError:
# chown/chmod/mkfifo/mknode/makedev failed
pass
return True
extraction_drivers = unpack_directory, unpack_zipfile, unpack_tarfile extraction_drivers = unpack_directory, unpack_zipfile, unpack_tarfile

View file

@ -28,25 +28,41 @@ Again, this is not a formal definition! Just a "taste" of the module.
import io import io
import os import os
import shlex
import sys import sys
import tokenize import tokenize
import shutil import shutil
import contextlib import contextlib
import tempfile
import warnings
from pathlib import Path
from typing import Dict, Iterator, List, Optional, Union
import setuptools import setuptools
import distutils import distutils
from setuptools.py31compat import TemporaryDirectory from . import errors
from ._path import same_path
from ._reqs import parse_strings
from .warnings import SetuptoolsDeprecationWarning
from distutils.util import strtobool
from pkg_resources import parse_requirements
from pkg_resources.py31compat import makedirs
__all__ = ['get_requires_for_build_sdist', __all__ = [
'get_requires_for_build_wheel', 'get_requires_for_build_sdist',
'prepare_metadata_for_build_wheel', 'get_requires_for_build_wheel',
'build_wheel', 'prepare_metadata_for_build_wheel',
'build_sdist', 'build_wheel',
'__legacy__', 'build_sdist',
'SetupRequirementsError'] 'get_requires_for_build_editable',
'prepare_metadata_for_build_editable',
'build_editable',
'__legacy__',
'SetupRequirementsError',
]
SETUPTOOLS_ENABLE_FEATURES = os.getenv("SETUPTOOLS_ENABLE_FEATURES", "").lower()
LEGACY_EDITABLE = "legacy-editable" in SETUPTOOLS_ENABLE_FEATURES.replace("_", "-")
class SetupRequirementsError(BaseException): class SetupRequirementsError(BaseException):
def __init__(self, specifiers): def __init__(self, specifiers):
@ -55,7 +71,7 @@ class SetupRequirementsError(BaseException):
class Distribution(setuptools.dist.Distribution): class Distribution(setuptools.dist.Distribution):
def fetch_build_eggs(self, specifiers): def fetch_build_eggs(self, specifiers):
specifier_list = list(map(str, parse_requirements(specifiers))) specifier_list = list(parse_strings(specifiers))
raise SetupRequirementsError(specifier_list) raise SetupRequirementsError(specifier_list)
@ -75,53 +91,205 @@ class Distribution(setuptools.dist.Distribution):
distutils.core.Distribution = orig distutils.core.Distribution = orig
def _to_str(s): @contextlib.contextmanager
def no_install_setup_requires():
"""Temporarily disable installing setup_requires
Under PEP 517, the backend reports build dependencies to the frontend,
and the frontend is responsible for ensuring they're installed.
So setuptools (acting as a backend) should not try to install them.
""" """
Convert a filename to a string (on Python 2, explicitly orig = setuptools._install_setup_requires
a byte string, not Unicode) as distutils checks for the setuptools._install_setup_requires = lambda attrs: None
exact type str. try:
""" yield
if sys.version_info[0] == 2 and not isinstance(s, str): finally:
# Assume it's Unicode, as that's what the PEP says setuptools._install_setup_requires = orig
# should be provided.
return s.encode(sys.getfilesystemencoding())
return s
def _get_immediate_subdirectories(a_dir): def _get_immediate_subdirectories(a_dir):
return [name for name in os.listdir(a_dir) return [
if os.path.isdir(os.path.join(a_dir, name))] name for name in os.listdir(a_dir) if os.path.isdir(os.path.join(a_dir, name))
]
def _file_with_extension(directory, extension): def _file_with_extension(directory, extension):
matching = ( matching = (f for f in os.listdir(directory) if f.endswith(extension))
f for f in os.listdir(directory) try:
if f.endswith(extension) (file,) = matching
) except ValueError:
file, = matching raise ValueError(
'No distribution was found. Ensure that `setup.py` '
'is not empty and that it calls `setup()`.'
) from None
return file return file
def _open_setup_script(setup_script): def _open_setup_script(setup_script):
if not os.path.exists(setup_script): if not os.path.exists(setup_script):
# Supply a default setup.py # Supply a default setup.py
return io.StringIO(u"from setuptools import setup; setup()") return io.StringIO("from setuptools import setup; setup()")
return getattr(tokenize, 'open', open)(setup_script) return tokenize.open(setup_script)
class _BuildMetaBackend(object): @contextlib.contextmanager
def suppress_known_deprecation():
with warnings.catch_warnings():
warnings.filterwarnings('ignore', 'setup.py install is deprecated')
yield
def _fix_config(self, config_settings):
config_settings = config_settings or {}
config_settings.setdefault('--global-option', [])
return config_settings
_ConfigSettings = Optional[Dict[str, Union[str, List[str], None]]]
"""
Currently the user can run::
pip install -e . --config-settings key=value
python -m build -C--key=value -C key=value
- pip will pass both key and value as strings and overwriting repeated keys
(pypa/pip#11059).
- build will accumulate values associated with repeated keys in a list.
It will also accept keys with no associated value.
This means that an option passed by build can be ``str | list[str] | None``.
- PEP 517 specifies that ``config_settings`` is an optional dict.
"""
class _ConfigSettingsTranslator:
"""Translate ``config_settings`` into distutils-style command arguments.
Only a limited number of options is currently supported.
"""
# See pypa/setuptools#1928 pypa/setuptools#2491
def _get_config(self, key: str, config_settings: _ConfigSettings) -> List[str]:
"""
Get the value of a specific key in ``config_settings`` as a list of strings.
>>> fn = _ConfigSettingsTranslator()._get_config
>>> fn("--global-option", None)
[]
>>> fn("--global-option", {})
[]
>>> fn("--global-option", {'--global-option': 'foo'})
['foo']
>>> fn("--global-option", {'--global-option': ['foo']})
['foo']
>>> fn("--global-option", {'--global-option': 'foo'})
['foo']
>>> fn("--global-option", {'--global-option': 'foo bar'})
['foo', 'bar']
"""
cfg = config_settings or {}
opts = cfg.get(key) or []
return shlex.split(opts) if isinstance(opts, str) else opts
def _global_args(self, config_settings: _ConfigSettings) -> Iterator[str]:
"""
Let the user specify ``verbose`` or ``quiet`` + escape hatch via
``--global-option``.
Note: ``-v``, ``-vv``, ``-vvv`` have similar effects in setuptools,
so we just have to cover the basic scenario ``-v``.
>>> fn = _ConfigSettingsTranslator()._global_args
>>> list(fn(None))
[]
>>> list(fn({"verbose": "False"}))
['-q']
>>> list(fn({"verbose": "1"}))
['-v']
>>> list(fn({"--verbose": None}))
['-v']
>>> list(fn({"verbose": "true", "--global-option": "-q --no-user-cfg"}))
['-v', '-q', '--no-user-cfg']
>>> list(fn({"--quiet": None}))
['-q']
"""
cfg = config_settings or {}
falsey = {"false", "no", "0", "off"}
if "verbose" in cfg or "--verbose" in cfg:
level = str(cfg.get("verbose") or cfg.get("--verbose") or "1")
yield ("-q" if level.lower() in falsey else "-v")
if "quiet" in cfg or "--quiet" in cfg:
level = str(cfg.get("quiet") or cfg.get("--quiet") or "1")
yield ("-v" if level.lower() in falsey else "-q")
yield from self._get_config("--global-option", config_settings)
def __dist_info_args(self, config_settings: _ConfigSettings) -> Iterator[str]:
"""
The ``dist_info`` command accepts ``tag-date`` and ``tag-build``.
.. warning::
We cannot use this yet as it requires the ``sdist`` and ``bdist_wheel``
commands run in ``build_sdist`` and ``build_wheel`` to reuse the egg-info
directory created in ``prepare_metadata_for_build_wheel``.
>>> fn = _ConfigSettingsTranslator()._ConfigSettingsTranslator__dist_info_args
>>> list(fn(None))
[]
>>> list(fn({"tag-date": "False"}))
['--no-date']
>>> list(fn({"tag-date": None}))
['--no-date']
>>> list(fn({"tag-date": "true", "tag-build": ".a"}))
['--tag-date', '--tag-build', '.a']
"""
cfg = config_settings or {}
if "tag-date" in cfg:
val = strtobool(str(cfg["tag-date"] or "false"))
yield ("--tag-date" if val else "--no-date")
if "tag-build" in cfg:
yield from ["--tag-build", str(cfg["tag-build"])]
def _editable_args(self, config_settings: _ConfigSettings) -> Iterator[str]:
"""
The ``editable_wheel`` command accepts ``editable-mode=strict``.
>>> fn = _ConfigSettingsTranslator()._editable_args
>>> list(fn(None))
[]
>>> list(fn({"editable-mode": "strict"}))
['--mode', 'strict']
"""
cfg = config_settings or {}
mode = cfg.get("editable-mode") or cfg.get("editable_mode")
if not mode:
return
yield from ["--mode", str(mode)]
def _arbitrary_args(self, config_settings: _ConfigSettings) -> Iterator[str]:
"""
Users may expect to pass arbitrary lists of arguments to a command
via "--global-option" (example provided in PEP 517 of a "escape hatch").
>>> fn = _ConfigSettingsTranslator()._arbitrary_args
>>> list(fn(None))
[]
>>> list(fn({}))
[]
>>> list(fn({'--build-option': 'foo'}))
['foo']
>>> list(fn({'--build-option': ['foo']}))
['foo']
>>> list(fn({'--build-option': 'foo'}))
['foo']
>>> list(fn({'--build-option': 'foo bar'}))
['foo', 'bar']
>>> list(fn({'--global-option': 'foo'}))
[]
"""
yield from self._get_config("--build-option", config_settings)
class _BuildMetaBackend(_ConfigSettingsTranslator):
def _get_build_requires(self, config_settings, requirements): def _get_build_requires(self, config_settings, requirements):
config_settings = self._fix_config(config_settings) sys.argv = [
*sys.argv[:1],
sys.argv = sys.argv[:1] + ['egg_info'] + \ *self._global_args(config_settings),
config_settings["--global-option"] "egg_info",
]
try: try:
with Distribution.patch(): with Distribution.patch():
self.run_setup() self.run_setup()
@ -133,65 +301,98 @@ class _BuildMetaBackend(object):
def run_setup(self, setup_script='setup.py'): def run_setup(self, setup_script='setup.py'):
# Note that we can reuse our build directory between calls # Note that we can reuse our build directory between calls
# Correctness comes first, then optimization later # Correctness comes first, then optimization later
__file__ = setup_script __file__ = os.path.abspath(setup_script)
__name__ = '__main__' __name__ = '__main__'
with _open_setup_script(__file__) as f: with _open_setup_script(__file__) as f:
code = f.read().replace(r'\r\n', r'\n') code = f.read().replace(r'\r\n', r'\n')
exec(compile(code, __file__, 'exec'), locals()) try:
exec(code, locals())
except SystemExit as e:
if e.code:
raise
# We ignore exit code indicating success
SetuptoolsDeprecationWarning.emit(
"Running `setup.py` directly as CLI tool is deprecated.",
"Please avoid using `sys.exit(0)` or similar statements "
"that don't fit in the paradigm of a configuration file.",
see_url="https://blog.ganssle.io/articles/2021/10/"
"setup-py-deprecated.html",
)
def get_requires_for_build_wheel(self, config_settings=None): def get_requires_for_build_wheel(self, config_settings=None):
config_settings = self._fix_config(config_settings)
return self._get_build_requires(config_settings, requirements=['wheel']) return self._get_build_requires(config_settings, requirements=['wheel'])
def get_requires_for_build_sdist(self, config_settings=None): def get_requires_for_build_sdist(self, config_settings=None):
config_settings = self._fix_config(config_settings)
return self._get_build_requires(config_settings, requirements=[]) return self._get_build_requires(config_settings, requirements=[])
def prepare_metadata_for_build_wheel(self, metadata_directory, def _bubble_up_info_directory(self, metadata_directory: str, suffix: str) -> str:
config_settings=None): """
sys.argv = sys.argv[:1] + ['dist_info', '--egg-base', PEP 517 requires that the .dist-info directory be placed in the
_to_str(metadata_directory)] metadata_directory. To comply, we MUST copy the directory to the root.
self.run_setup()
dist_info_directory = metadata_directory Returns the basename of the info directory, e.g. `proj-0.0.0.dist-info`.
while True: """
dist_infos = [f for f in os.listdir(dist_info_directory) info_dir = self._find_info_directory(metadata_directory, suffix)
if f.endswith('.dist-info')] if not same_path(info_dir.parent, metadata_directory):
shutil.move(str(info_dir), metadata_directory)
# PEP 517 allow other files and dirs to exist in metadata_directory
return info_dir.name
if (len(dist_infos) == 0 and def _find_info_directory(self, metadata_directory: str, suffix: str) -> Path:
len(_get_immediate_subdirectories(dist_info_directory)) == 1): for parent, dirs, _ in os.walk(metadata_directory):
candidates = [f for f in dirs if f.endswith(suffix)]
dist_info_directory = os.path.join( if len(candidates) != 0 or len(dirs) != 1:
dist_info_directory, os.listdir(dist_info_directory)[0]) assert len(candidates) == 1, f"Multiple {suffix} directories found"
continue return Path(parent, candidates[0])
assert len(dist_infos) == 1 msg = f"No {suffix} directory found in {metadata_directory}"
break raise errors.InternalError(msg)
# PEP 517 requires that the .dist-info directory be placed in the def prepare_metadata_for_build_wheel(
# metadata_directory. To comply, we MUST copy the directory to the root self, metadata_directory, config_settings=None
if dist_info_directory != metadata_directory: ):
shutil.move( sys.argv = [
os.path.join(dist_info_directory, dist_infos[0]), *sys.argv[:1],
metadata_directory) *self._global_args(config_settings),
shutil.rmtree(dist_info_directory, ignore_errors=True) "dist_info",
"--output-dir",
metadata_directory,
"--keep-egg-info",
]
with no_install_setup_requires():
self.run_setup()
return dist_infos[0] self._bubble_up_info_directory(metadata_directory, ".egg-info")
return self._bubble_up_info_directory(metadata_directory, ".dist-info")
def _build_with_temp_dir(self, setup_command, result_extension, def _build_with_temp_dir(
result_directory, config_settings): self,
config_settings = self._fix_config(config_settings) setup_command,
result_extension,
result_directory,
config_settings,
arbitrary_args=(),
):
result_directory = os.path.abspath(result_directory) result_directory = os.path.abspath(result_directory)
# Build in a temporary directory, then copy to the target. # Build in a temporary directory, then copy to the target.
makedirs(result_directory, exist_ok=True) os.makedirs(result_directory, exist_ok=True)
with TemporaryDirectory(dir=result_directory) as tmp_dist_dir: temp_opts = {"prefix": ".tmp-", "dir": result_directory}
sys.argv = (sys.argv[:1] + setup_command +
['--dist-dir', tmp_dist_dir] + with tempfile.TemporaryDirectory(**temp_opts) as tmp_dist_dir:
config_settings["--global-option"]) sys.argv = [
self.run_setup() *sys.argv[:1],
*self._global_args(config_settings),
*setup_command,
"--dist-dir",
tmp_dist_dir,
*arbitrary_args,
]
with no_install_setup_requires():
self.run_setup()
result_basename = _file_with_extension(tmp_dist_dir, result_extension) result_basename = _file_with_extension(tmp_dist_dir, result_extension)
result_path = os.path.join(result_directory, result_basename) result_path = os.path.join(result_directory, result_basename)
@ -202,31 +403,75 @@ class _BuildMetaBackend(object):
return result_basename return result_basename
def build_wheel(
def build_wheel(self, wheel_directory, config_settings=None, self, wheel_directory, config_settings=None, metadata_directory=None
metadata_directory=None): ):
return self._build_with_temp_dir(['bdist_wheel'], '.whl', with suppress_known_deprecation():
wheel_directory, config_settings) return self._build_with_temp_dir(
['bdist_wheel'],
'.whl',
wheel_directory,
config_settings,
self._arbitrary_args(config_settings),
)
def build_sdist(self, sdist_directory, config_settings=None): def build_sdist(self, sdist_directory, config_settings=None):
return self._build_with_temp_dir(['sdist', '--formats', 'gztar'], return self._build_with_temp_dir(
'.tar.gz', sdist_directory, ['sdist', '--formats', 'gztar'], '.tar.gz', sdist_directory, config_settings
config_settings) )
def _get_dist_info_dir(self, metadata_directory: Optional[str]) -> Optional[str]:
if not metadata_directory:
return None
dist_info_candidates = list(Path(metadata_directory).glob("*.dist-info"))
assert len(dist_info_candidates) <= 1
return str(dist_info_candidates[0]) if dist_info_candidates else None
if not LEGACY_EDITABLE:
# PEP660 hooks:
# build_editable
# get_requires_for_build_editable
# prepare_metadata_for_build_editable
def build_editable(
self, wheel_directory, config_settings=None, metadata_directory=None
):
# XXX can or should we hide our editable_wheel command normally?
info_dir = self._get_dist_info_dir(metadata_directory)
opts = ["--dist-info-dir", info_dir] if info_dir else []
cmd = ["editable_wheel", *opts, *self._editable_args(config_settings)]
with suppress_known_deprecation():
return self._build_with_temp_dir(
cmd, ".whl", wheel_directory, config_settings
)
def get_requires_for_build_editable(self, config_settings=None):
return self.get_requires_for_build_wheel(config_settings)
def prepare_metadata_for_build_editable(
self, metadata_directory, config_settings=None
):
return self.prepare_metadata_for_build_wheel(
metadata_directory, config_settings
)
class _BuildMetaLegacyBackend(_BuildMetaBackend): class _BuildMetaLegacyBackend(_BuildMetaBackend):
"""Compatibility backend for setuptools """Compatibility backend for setuptools
This is a version of setuptools.build_meta that endeavors to maintain backwards This is a version of setuptools.build_meta that endeavors
compatibility with pre-PEP 517 modes of invocation. It exists as a temporary to maintain backwards
bridge between the old packaging mechanism and the new packaging mechanism, compatibility with pre-PEP 517 modes of invocation. It
exists as a temporary
bridge between the old packaging mechanism and the new
packaging mechanism,
and will eventually be removed. and will eventually be removed.
""" """
def run_setup(self, setup_script='setup.py'): def run_setup(self, setup_script='setup.py'):
# In order to maintain compatibility with scripts assuming that # In order to maintain compatibility with scripts assuming that
# the setup.py script is in a directory on the PYTHONPATH, inject # the setup.py script is in a directory on the PYTHONPATH, inject
# '' into sys.path. (pypa/setuptools#1642) # '' into sys.path. (pypa/setuptools#1642)
sys_path = list(sys.path) # Save the original path sys_path = list(sys.path) # Save the original path
script_dir = os.path.dirname(os.path.abspath(setup_script)) script_dir = os.path.dirname(os.path.abspath(setup_script))
if script_dir not in sys.path: if script_dir not in sys.path:
@ -239,8 +484,7 @@ class _BuildMetaLegacyBackend(_BuildMetaBackend):
sys.argv[0] = setup_script sys.argv[0] = setup_script
try: try:
super(_BuildMetaLegacyBackend, super().run_setup(setup_script=setup_script)
self).run_setup(setup_script=setup_script)
finally: finally:
# While PEP 517 frontends should be calling each hook in a fresh # While PEP 517 frontends should be calling each hook in a fresh
# subprocess according to the standard (and thus it should not be # subprocess according to the standard (and thus it should not be
@ -250,6 +494,7 @@ class _BuildMetaLegacyBackend(_BuildMetaBackend):
sys.path[:] = sys_path sys.path[:] = sys_path
sys.argv[0] = sys_argv_0 sys.argv[0] = sys_argv_0
# The primary backend # The primary backend
_BACKEND = _BuildMetaBackend() _BACKEND = _BuildMetaBackend()
@ -259,6 +504,11 @@ prepare_metadata_for_build_wheel = _BACKEND.prepare_metadata_for_build_wheel
build_wheel = _BACKEND.build_wheel build_wheel = _BACKEND.build_wheel
build_sdist = _BACKEND.build_sdist build_sdist = _BACKEND.build_sdist
if not LEGACY_EDITABLE:
get_requires_for_build_editable = _BACKEND.get_requires_for_build_editable
prepare_metadata_for_build_editable = _BACKEND.prepare_metadata_for_build_editable
build_editable = _BACKEND.build_editable
# The legacy backend # The legacy backend
__legacy__ = _BuildMetaLegacyBackend() __legacy__ = _BuildMetaLegacyBackend()

Binary file not shown.

Binary file not shown.

Binary file not shown.

View file

@ -1,17 +1,12 @@
__all__ = [
'alias', 'bdist_egg', 'bdist_rpm', 'build_ext', 'build_py', 'develop',
'easy_install', 'egg_info', 'install', 'install_lib', 'rotate', 'saveopts',
'sdist', 'setopt', 'test', 'install_egg_info', 'install_scripts',
'bdist_wininst', 'upload_docs', 'build_clib', 'dist_info',
]
from distutils.command.bdist import bdist from distutils.command.bdist import bdist
import sys import sys
from setuptools.command import install_scripts
if 'egg' not in bdist.format_commands: if 'egg' not in bdist.format_commands:
bdist.format_command['egg'] = ('bdist_egg', "Python .egg file") try:
bdist.format_commands.append('egg') bdist.format_commands['egg'] = ('bdist_egg', "Python .egg file")
except TypeError:
# For backward compatibility with older distutils (stdlib)
bdist.format_command['egg'] = ('bdist_egg', "Python .egg file")
bdist.format_commands.append('egg')
del bdist, sys del bdist, sys

View file

@ -1,7 +1,5 @@
from distutils.errors import DistutilsOptionError from distutils.errors import DistutilsOptionError
from setuptools.extern.six.moves import map
from setuptools.command.setopt import edit_config, option_base, config_file from setuptools.command.setopt import edit_config, option_base, config_file
@ -51,7 +49,7 @@ class alias(option_base):
return return
elif len(self.args) == 1: elif len(self.args) == 1:
alias, = self.args (alias,) = self.args
if self.remove: if self.remove:
command = None command = None
elif alias in aliases: elif alias in aliases:

View file

@ -2,7 +2,6 @@
Build .egg distributions""" Build .egg distributions"""
from distutils.errors import DistutilsSetupError
from distutils.dir_util import remove_tree, mkpath from distutils.dir_util import remove_tree, mkpath
from distutils import log from distutils import log
from types import CodeType from types import CodeType
@ -12,24 +11,15 @@ import re
import textwrap import textwrap
import marshal import marshal
from setuptools.extern import six
from pkg_resources import get_build_platform, Distribution, ensure_directory
from pkg_resources import EntryPoint
from setuptools.extension import Library from setuptools.extension import Library
from setuptools import Command from setuptools import Command
from .._path import ensure_directory
try: from sysconfig import get_path, get_python_version
# Python 2.7 or >=3.2
from sysconfig import get_path, get_python_version
def _get_purelib():
return get_path("purelib")
except ImportError:
from distutils.sysconfig import get_python_lib, get_python_version
def _get_purelib(): def _get_purelib():
return get_python_lib(False) return get_path("purelib")
def strip_module(filename): def strip_module(filename):
@ -51,15 +41,19 @@ def sorted_walk(dir):
def write_stub(resource, pyfile): def write_stub(resource, pyfile):
_stub_template = textwrap.dedent(""" _stub_template = textwrap.dedent(
"""
def __bootstrap__(): def __bootstrap__():
global __bootstrap__, __loader__, __file__ global __bootstrap__, __loader__, __file__
import sys, pkg_resources, imp import sys, pkg_resources, importlib.util
__file__ = pkg_resources.resource_filename(__name__, %r) __file__ = pkg_resources.resource_filename(__name__, %r)
__loader__ = None; del __bootstrap__, __loader__ __loader__ = None; del __bootstrap__, __loader__
imp.load_dynamic(__name__,__file__) spec = importlib.util.spec_from_file_location(__name__,__file__)
mod = importlib.util.module_from_spec(spec)
spec.loader.exec_module(mod)
__bootstrap__() __bootstrap__()
""").lstrip() """
).lstrip()
with open(pyfile, 'w') as f: with open(pyfile, 'w') as f:
f.write(_stub_template % resource) f.write(_stub_template % resource)
@ -68,24 +62,25 @@ class bdist_egg(Command):
description = "create an \"egg\" distribution" description = "create an \"egg\" distribution"
user_options = [ user_options = [
('bdist-dir=', 'b', ('bdist-dir=', 'b', "temporary directory for creating the distribution"),
"temporary directory for creating the distribution"), (
('plat-name=', 'p', "platform name to embed in generated filenames " 'plat-name=',
"(default: %s)" % get_build_platform()), 'p',
('exclude-source-files', None, "platform name to embed in generated filenames "
"remove all .py files from the generated egg"), "(by default uses `pkg_resources.get_build_platform()`)",
('keep-temp', 'k', ),
"keep the pseudo-installation tree around after " + ('exclude-source-files', None, "remove all .py files from the generated egg"),
"creating the distribution archive"), (
('dist-dir=', 'd', 'keep-temp',
"directory to put final built distributions in"), 'k',
('skip-build', None, "keep the pseudo-installation tree around after "
"skip rebuilding everything (for testing/debugging)"), + "creating the distribution archive",
),
('dist-dir=', 'd', "directory to put final built distributions in"),
('skip-build', None, "skip rebuilding everything (for testing/debugging)"),
] ]
boolean_options = [ boolean_options = ['keep-temp', 'skip-build', 'exclude-source-files']
'keep-temp', 'skip-build', 'exclude-source-files'
]
def initialize_options(self): def initialize_options(self):
self.bdist_dir = None self.bdist_dir = None
@ -105,18 +100,18 @@ class bdist_egg(Command):
self.bdist_dir = os.path.join(bdist_base, 'egg') self.bdist_dir = os.path.join(bdist_base, 'egg')
if self.plat_name is None: if self.plat_name is None:
from pkg_resources import get_build_platform
self.plat_name = get_build_platform() self.plat_name = get_build_platform()
self.set_undefined_options('bdist', ('dist_dir', 'dist_dir')) self.set_undefined_options('bdist', ('dist_dir', 'dist_dir'))
if self.egg_output is None: if self.egg_output is None:
# Compute filename of the output egg # Compute filename of the output egg
basename = Distribution( basename = ei_cmd._get_egg_basename(
None, None, ei_cmd.egg_name, ei_cmd.egg_version, py_version=get_python_version(),
get_python_version(), platform=self.distribution.has_ext_modules() and self.plat_name,
self.distribution.has_ext_modules() and self.plat_name )
).egg_name()
self.egg_output = os.path.join(self.dist_dir, basename + '.egg') self.egg_output = os.path.join(self.dist_dir, basename + '.egg')
@ -135,7 +130,7 @@ class bdist_egg(Command):
if normalized == site_packages or normalized.startswith( if normalized == site_packages or normalized.startswith(
site_packages + os.sep site_packages + os.sep
): ):
item = realpath[len(site_packages) + 1:], item[1] item = realpath[len(site_packages) + 1 :], item[1]
# XXX else: raise ??? # XXX else: raise ???
self.distribution.data_files.append(item) self.distribution.data_files.append(item)
@ -158,7 +153,7 @@ class bdist_egg(Command):
self.run_command(cmdname) self.run_command(cmdname)
return cmd return cmd
def run(self): def run(self): # noqa: C901 # is too complex (14) # FIXME
# Generate metadata first # Generate metadata first
self.run_command("egg_info") self.run_command("egg_info")
# We run install_lib before install_data, because some data hacks # We run install_lib before install_data, because some data hacks
@ -175,10 +170,9 @@ class bdist_egg(Command):
all_outputs, ext_outputs = self.get_ext_outputs() all_outputs, ext_outputs = self.get_ext_outputs()
self.stubs = [] self.stubs = []
to_compile = [] to_compile = []
for (p, ext_name) in enumerate(ext_outputs): for p, ext_name in enumerate(ext_outputs):
filename, ext = os.path.splitext(ext_name) filename, ext = os.path.splitext(ext_name)
pyfile = os.path.join(self.bdist_dir, strip_module(filename) + pyfile = os.path.join(self.bdist_dir, strip_module(filename) + '.py')
'.py')
self.stubs.append(pyfile) self.stubs.append(pyfile)
log.info("creating stub loader for %s", ext_name) log.info("creating stub loader for %s", ext_name)
if not self.dry_run: if not self.dry_run:
@ -198,8 +192,7 @@ class bdist_egg(Command):
if self.distribution.scripts: if self.distribution.scripts:
script_dir = os.path.join(egg_info, 'scripts') script_dir = os.path.join(egg_info, 'scripts')
log.info("installing scripts to %s", script_dir) log.info("installing scripts to %s", script_dir)
self.call_command('install_scripts', install_dir=script_dir, self.call_command('install_scripts', install_dir=script_dir, no_ep=1)
no_ep=1)
self.copy_metadata_to(egg_info) self.copy_metadata_to(egg_info)
native_libs = os.path.join(egg_info, "native_libs.txt") native_libs = os.path.join(egg_info, "native_libs.txt")
@ -216,9 +209,7 @@ class bdist_egg(Command):
if not self.dry_run: if not self.dry_run:
os.unlink(native_libs) os.unlink(native_libs)
write_safety_flag( write_safety_flag(os.path.join(archive_root, 'EGG-INFO'), self.zip_safe())
os.path.join(archive_root, 'EGG-INFO'), self.zip_safe()
)
if os.path.exists(os.path.join(self.egg_info, 'depends.txt')): if os.path.exists(os.path.join(self.egg_info, 'depends.txt')):
log.warn( log.warn(
@ -230,14 +221,22 @@ class bdist_egg(Command):
self.zap_pyfiles() self.zap_pyfiles()
# Make the archive # Make the archive
make_zipfile(self.egg_output, archive_root, verbose=self.verbose, make_zipfile(
dry_run=self.dry_run, mode=self.gen_header()) self.egg_output,
archive_root,
verbose=self.verbose,
dry_run=self.dry_run,
mode=self.gen_header(),
)
if not self.keep_temp: if not self.keep_temp:
remove_tree(self.bdist_dir, dry_run=self.dry_run) remove_tree(self.bdist_dir, dry_run=self.dry_run)
# Add to 'Distribution.dist_files' so that the "upload" command works # Add to 'Distribution.dist_files' so that the "upload" command works
getattr(self.distribution, 'dist_files', []).append( getattr(self.distribution, 'dist_files', []).append((
('bdist_egg', get_python_version(), self.egg_output)) 'bdist_egg',
get_python_version(),
self.egg_output,
))
def zap_pyfiles(self): def zap_pyfiles(self):
log.info("Removing .py files from temporary directory") log.info("Removing .py files from temporary directory")
@ -254,11 +253,8 @@ class bdist_egg(Command):
pattern = r'(?P<name>.+)\.(?P<magic>[^.]+)\.pyc' pattern = r'(?P<name>.+)\.(?P<magic>[^.]+)\.pyc'
m = re.match(pattern, name) m = re.match(pattern, name)
path_new = os.path.join( path_new = os.path.join(base, os.pardir, m.group('name') + '.pyc')
base, os.pardir, m.group('name') + '.pyc') log.info("Renaming file from [%s] to [%s]" % (path_old, path_new))
log.info(
"Renaming file from [%s] to [%s]"
% (path_old, path_new))
try: try:
os.remove(path_new) os.remove(path_new)
except OSError: except OSError:
@ -273,43 +269,7 @@ class bdist_egg(Command):
return analyze_egg(self.bdist_dir, self.stubs) return analyze_egg(self.bdist_dir, self.stubs)
def gen_header(self): def gen_header(self):
epm = EntryPoint.parse_map(self.distribution.entry_points or '') return 'w'
ep = epm.get('setuptools.installation', {}).get('eggsecutable')
if ep is None:
return 'w' # not an eggsecutable, do it the usual way.
if not ep.attrs or ep.extras:
raise DistutilsSetupError(
"eggsecutable entry point (%r) cannot have 'extras' "
"or refer to a module" % (ep,)
)
pyver = '{}.{}'.format(*sys.version_info)
pkg = ep.module_name
full = '.'.join(ep.attrs)
base = ep.attrs[0]
basename = os.path.basename(self.egg_output)
header = (
"#!/bin/sh\n"
'if [ `basename $0` = "%(basename)s" ]\n'
'then exec python%(pyver)s -c "'
"import sys, os; sys.path.insert(0, os.path.abspath('$0')); "
"from %(pkg)s import %(base)s; sys.exit(%(full)s())"
'" "$@"\n'
'else\n'
' echo $0 is not the correct name for this egg file.\n'
' echo Please rename it back to %(basename)s and try again.\n'
' exec false\n'
'fi\n'
) % locals()
if not self.dry_run:
mkpath(os.path.dirname(self.egg_output), dry_run=self.dry_run)
f = open(self.egg_output, 'w')
f.write(header)
f.close()
return 'a'
def copy_metadata_to(self, target_dir): def copy_metadata_to(self, target_dir):
"Copy metadata (egg info) to the target_dir" "Copy metadata (egg info) to the target_dir"
@ -319,7 +279,7 @@ class bdist_egg(Command):
prefix = os.path.join(norm_egg_info, '') prefix = os.path.join(norm_egg_info, '')
for path in self.ei_cmd.filelist.files: for path in self.ei_cmd.filelist.files:
if path.startswith(prefix): if path.startswith(prefix):
target = os.path.join(target_dir, path[len(prefix):]) target = os.path.join(target_dir, path[len(prefix) :])
ensure_directory(target) ensure_directory(target)
self.copy_file(path, target) self.copy_file(path, target)
@ -335,8 +295,7 @@ class bdist_egg(Command):
if os.path.splitext(filename)[1].lower() in NATIVE_EXTENSIONS: if os.path.splitext(filename)[1].lower() in NATIVE_EXTENSIONS:
all_outputs.append(paths[base] + filename) all_outputs.append(paths[base] + filename)
for filename in dirs: for filename in dirs:
paths[os.path.join(base, filename)] = (paths[base] + paths[os.path.join(base, filename)] = paths[base] + filename + '/'
filename + '/')
if self.distribution.has_ext_modules(): if self.distribution.has_ext_modules():
build_cmd = self.get_finalized_command('build_ext') build_cmd = self.get_finalized_command('build_ext')
@ -362,8 +321,7 @@ def walk_egg(egg_dir):
if 'EGG-INFO' in dirs: if 'EGG-INFO' in dirs:
dirs.remove('EGG-INFO') dirs.remove('EGG-INFO')
yield base, dirs, files yield base, dirs, files
for bdf in walker: yield from walker
yield bdf
def analyze_egg(egg_dir, stubs): def analyze_egg(egg_dir, stubs):
@ -409,14 +367,9 @@ def scan_module(egg_dir, base, name, stubs):
filename = os.path.join(base, name) filename = os.path.join(base, name)
if filename[:-1] in stubs: if filename[:-1] in stubs:
return True # Extension module return True # Extension module
pkg = base[len(egg_dir) + 1:].replace(os.sep, '.') pkg = base[len(egg_dir) + 1 :].replace(os.sep, '.')
module = pkg + (pkg and '.' or '') + os.path.splitext(name)[0] module = pkg + (pkg and '.' or '') + os.path.splitext(name)[0]
if six.PY2: skip = 16 # skip magic & reserved? & date & file size
skip = 8 # skip magic & date
elif sys.version_info < (3, 7):
skip = 12 # skip magic & date & file size
else:
skip = 16 # skip magic & reserved? & date & file size
f = open(filename, 'rb') f = open(filename, 'rb')
f.read(skip) f.read(skip)
code = marshal.load(f) code = marshal.load(f)
@ -429,9 +382,17 @@ def scan_module(egg_dir, base, name, stubs):
safe = False safe = False
if 'inspect' in symbols: if 'inspect' in symbols:
for bad in [ for bad in [
'getsource', 'getabsfile', 'getsourcefile', 'getfile' 'getsource',
'getsourcelines', 'findsource', 'getcomments', 'getframeinfo', 'getabsfile',
'getinnerframes', 'getouterframes', 'stack', 'trace' 'getsourcefile',
'getfile' 'getsourcelines',
'findsource',
'getcomments',
'getframeinfo',
'getinnerframes',
'getouterframes',
'stack',
'trace',
]: ]:
if bad in symbols: if bad in symbols:
log.warn("%s: module MAY be using inspect.%s", module, bad) log.warn("%s: module MAY be using inspect.%s", module, bad)
@ -441,14 +402,12 @@ def scan_module(egg_dir, base, name, stubs):
def iter_symbols(code): def iter_symbols(code):
"""Yield names and strings used by `code` and its nested code objects""" """Yield names and strings used by `code` and its nested code objects"""
for name in code.co_names: yield from code.co_names
yield name
for const in code.co_consts: for const in code.co_consts:
if isinstance(const, six.string_types): if isinstance(const, str):
yield const yield const
elif isinstance(const, CodeType): elif isinstance(const, CodeType):
for name in iter_symbols(const): yield from iter_symbols(const)
yield name
def can_scan(): def can_scan():
@ -456,20 +415,20 @@ def can_scan():
# CPython, PyPy, etc. # CPython, PyPy, etc.
return True return True
log.warn("Unable to analyze compiled code on this platform.") log.warn("Unable to analyze compiled code on this platform.")
log.warn("Please ask the author to include a 'zip_safe'" log.warn(
" setting (either True or False) in the package's setup.py") "Please ask the author to include a 'zip_safe'"
" setting (either True or False) in the package's setup.py"
)
return False
# Attribute names of options for commands that might need to be convinced to # Attribute names of options for commands that might need to be convinced to
# install to the egg build directory # install to the egg build directory
INSTALL_DIRECTORY_ATTRS = [ INSTALL_DIRECTORY_ATTRS = ['install_lib', 'install_dir', 'install_data', 'install_base']
'install_lib', 'install_dir', 'install_data', 'install_base'
]
def make_zipfile(zip_filename, base_dir, verbose=0, dry_run=0, compress=True, def make_zipfile(zip_filename, base_dir, verbose=0, dry_run=0, compress=True, mode='w'):
mode='w'):
"""Create a zip file from all the files under 'base_dir'. The output """Create a zip file from all the files under 'base_dir'. The output
zip file will be named 'base_dir' + ".zip". Uses either the "zipfile" zip file will be named 'base_dir' + ".zip". Uses either the "zipfile"
Python module (if available) or the InfoZIP "zip" utility (if installed Python module (if available) or the InfoZIP "zip" utility (if installed
@ -485,7 +444,7 @@ def make_zipfile(zip_filename, base_dir, verbose=0, dry_run=0, compress=True,
for name in names: for name in names:
path = os.path.normpath(os.path.join(dirname, name)) path = os.path.normpath(os.path.join(dirname, name))
if os.path.isfile(path): if os.path.isfile(path):
p = path[len(base_dir) + 1:] p = path[len(base_dir) + 1 :]
if not dry_run: if not dry_run:
z.write(path, p) z.write(path, p)
log.debug("adding '%s'", p) log.debug("adding '%s'", p)

View file

@ -1,5 +1,7 @@
import distutils.command.bdist_rpm as orig import distutils.command.bdist_rpm as orig
from ..warnings import SetuptoolsDeprecationWarning
class bdist_rpm(orig.bdist_rpm): class bdist_rpm(orig.bdist_rpm):
""" """
@ -8,36 +10,30 @@ class bdist_rpm(orig.bdist_rpm):
1. Run egg_info to ensure the name and version are properly calculated. 1. Run egg_info to ensure the name and version are properly calculated.
2. Always run 'install' using --single-version-externally-managed to 2. Always run 'install' using --single-version-externally-managed to
disable eggs in RPM distributions. disable eggs in RPM distributions.
3. Replace dash with underscore in the version numbers for better RPM
compatibility.
""" """
def run(self): def run(self):
SetuptoolsDeprecationWarning.emit(
"Deprecated command",
"""
bdist_rpm is deprecated and will be removed in a future version.
Use bdist_wheel (wheel packages) instead.
""",
see_url="https://github.com/pypa/setuptools/issues/1988",
due_date=(2023, 10, 30), # Deprecation introduced in 22 Oct 2021.
)
# ensure distro name is up-to-date # ensure distro name is up-to-date
self.run_command('egg_info') self.run_command('egg_info')
orig.bdist_rpm.run(self) orig.bdist_rpm.run(self)
def _make_spec_file(self): def _make_spec_file(self):
version = self.distribution.get_version()
rpmversion = version.replace('-', '_')
spec = orig.bdist_rpm._make_spec_file(self) spec = orig.bdist_rpm._make_spec_file(self)
line23 = '%define version ' + version return [
line24 = '%define version ' + rpmversion
spec = [
line.replace( line.replace(
"Source0: %{name}-%{version}.tar",
"Source0: %{name}-%{unmangled_version}.tar"
).replace(
"setup.py install ", "setup.py install ",
"setup.py install --single-version-externally-managed " "setup.py install --single-version-externally-managed ",
).replace( ).replace("%setup", "%setup -n %{name}-%{unmangled_version}")
"%setup",
"%setup -n %{name}-%{unmangled_version}"
).replace(line23, line24)
for line in spec for line in spec
] ]
insert_loc = spec.index(line24) + 1
unmangled_version = "%define unmangled_version " + version
spec.insert(insert_loc, unmangled_version)
return spec

View file

@ -1,21 +0,0 @@
import distutils.command.bdist_wininst as orig
class bdist_wininst(orig.bdist_wininst):
def reinitialize_command(self, command, reinit_subcommands=0):
"""
Supplement reinitialize_command to work around
http://bugs.python.org/issue20819
"""
cmd = self.distribution.reinitialize_command(
command, reinit_subcommands)
if command in ('install', 'install_lib'):
cmd.install_lib = None
return cmd
def run(self):
self._is_running = True
try:
orig.bdist_wininst.run(self)
finally:
self._is_running = False

View file

@ -1,7 +1,12 @@
import distutils.command.build_clib as orig import distutils.command.build_clib as orig
from distutils.errors import DistutilsSetupError from distutils.errors import DistutilsSetupError
from distutils import log from distutils import log
from setuptools.dep_util import newer_pairwise_group
try:
from distutils._modified import newer_pairwise_group
except ImportError:
# fallback for SETUPTOOLS_USE_DISTUTILS=stdlib
from .._distutils._modified import newer_pairwise_group
class build_clib(orig.build_clib): class build_clib(orig.build_clib):
@ -21,14 +26,15 @@ class build_clib(orig.build_clib):
""" """
def build_libraries(self, libraries): def build_libraries(self, libraries):
for (lib_name, build_info) in libraries: for lib_name, build_info in libraries:
sources = build_info.get('sources') sources = build_info.get('sources')
if sources is None or not isinstance(sources, (list, tuple)): if sources is None or not isinstance(sources, (list, tuple)):
raise DistutilsSetupError( raise DistutilsSetupError(
"in 'libraries' option (library '%s'), " "in 'libraries' option (library '%s'), "
"'sources' must be present and must be " "'sources' must be present and must be "
"a list of source filenames" % lib_name) "a list of source filenames" % lib_name
sources = list(sources) )
sources = sorted(list(sources))
log.info("building '%s' library", lib_name) log.info("building '%s' library", lib_name)
@ -38,9 +44,10 @@ class build_clib(orig.build_clib):
obj_deps = build_info.get('obj_deps', dict()) obj_deps = build_info.get('obj_deps', dict())
if not isinstance(obj_deps, dict): if not isinstance(obj_deps, dict):
raise DistutilsSetupError( raise DistutilsSetupError(
"in 'libraries' option (library '%s'), " "in 'libraries' option (library '%s'), "
"'obj_deps' must be a dictionary of " "'obj_deps' must be a dictionary of "
"type 'source: list'" % lib_name) "type 'source: list'" % lib_name
)
dependencies = [] dependencies = []
# Get the global dependencies that are specified by the '' key. # Get the global dependencies that are specified by the '' key.
@ -48,9 +55,10 @@ class build_clib(orig.build_clib):
global_deps = obj_deps.get('', list()) global_deps = obj_deps.get('', list())
if not isinstance(global_deps, (list, tuple)): if not isinstance(global_deps, (list, tuple)):
raise DistutilsSetupError( raise DistutilsSetupError(
"in 'libraries' option (library '%s'), " "in 'libraries' option (library '%s'), "
"'obj_deps' must be a dictionary of " "'obj_deps' must be a dictionary of "
"type 'source: list'" % lib_name) "type 'source: list'" % lib_name
)
# Build the list to be used by newer_pairwise_group # Build the list to be used by newer_pairwise_group
# each source will be auto-added to its dependencies. # each source will be auto-added to its dependencies.
@ -60,16 +68,17 @@ class build_clib(orig.build_clib):
extra_deps = obj_deps.get(source, list()) extra_deps = obj_deps.get(source, list())
if not isinstance(extra_deps, (list, tuple)): if not isinstance(extra_deps, (list, tuple)):
raise DistutilsSetupError( raise DistutilsSetupError(
"in 'libraries' option (library '%s'), " "in 'libraries' option (library '%s'), "
"'obj_deps' must be a dictionary of " "'obj_deps' must be a dictionary of "
"type 'source: list'" % lib_name) "type 'source: list'" % lib_name
)
src_deps.extend(extra_deps) src_deps.extend(extra_deps)
dependencies.append(src_deps) dependencies.append(src_deps)
expected_objects = self.compiler.object_filenames( expected_objects = self.compiler.object_filenames(
sources, sources,
output_dir=self.build_temp output_dir=self.build_temp,
) )
if newer_pairwise_group(dependencies, expected_objects) != ([], []): if newer_pairwise_group(dependencies, expected_objects) != ([], []):
# First, compile the source code to object files in the library # First, compile the source code to object files in the library
@ -78,21 +87,18 @@ class build_clib(orig.build_clib):
macros = build_info.get('macros') macros = build_info.get('macros')
include_dirs = build_info.get('include_dirs') include_dirs = build_info.get('include_dirs')
cflags = build_info.get('cflags') cflags = build_info.get('cflags')
objects = self.compiler.compile( self.compiler.compile(
sources, sources,
output_dir=self.build_temp, output_dir=self.build_temp,
macros=macros, macros=macros,
include_dirs=include_dirs, include_dirs=include_dirs,
extra_postargs=cflags, extra_postargs=cflags,
debug=self.debug debug=self.debug,
) )
# Now "link" the object files together into a static library. # Now "link" the object files together into a static library.
# (On Unix at least, this isn't really linking -- it just # (On Unix at least, this isn't really linking -- it just
# builds an archive. Whatever.) # builds an archive. Whatever.)
self.compiler.create_static_lib( self.compiler.create_static_lib(
expected_objects, expected_objects, lib_name, output_dir=self.build_clib, debug=self.debug
lib_name, )
output_dir=self.build_clib,
debug=self.debug
)

View file

@ -1,26 +1,23 @@
import os import os
import sys import sys
import itertools import itertools
from importlib.machinery import EXTENSION_SUFFIXES
from importlib.util import cache_from_source as _compiled_file_name
from typing import Dict, Iterator, List, Tuple
from pathlib import Path
from distutils.command.build_ext import build_ext as _du_build_ext from distutils.command.build_ext import build_ext as _du_build_ext
from distutils.file_util import copy_file
from distutils.ccompiler import new_compiler from distutils.ccompiler import new_compiler
from distutils.sysconfig import customize_compiler, get_config_var from distutils.sysconfig import customize_compiler, get_config_var
from distutils.errors import DistutilsError
from distutils import log from distutils import log
from setuptools.extension import Library from setuptools.errors import BaseError
from setuptools.extern import six from setuptools.extension import Extension, Library
if six.PY2:
import imp
EXTENSION_SUFFIXES = [s for s, _, tp in imp.get_suffixes() if tp == imp.C_EXTENSION]
else:
from importlib.machinery import EXTENSION_SUFFIXES
try: try:
# Attempt to use Cython for building extensions, if available # Attempt to use Cython for building extensions, if available
from Cython.Distutils.build_ext import build_ext as _build_ext from Cython.Distutils.build_ext import build_ext as _build_ext
# Additionally, assert that the compiler module will load # Additionally, assert that the compiler module will load
# also. Ref #1229. # also. Ref #1229.
__import__('Cython.Compiler.Main') __import__('Cython.Compiler.Main')
@ -29,7 +26,7 @@ except ImportError:
# make sure _config_vars is initialized # make sure _config_vars is initialized
get_config_var("LDSHARED") get_config_var("LDSHARED")
from distutils.sysconfig import _config_vars as _CONFIG_VARS from distutils.sysconfig import _config_vars as _CONFIG_VARS # noqa
def _customize_compiler_for_shlib(compiler): def _customize_compiler_for_shlib(compiler):
@ -41,7 +38,8 @@ def _customize_compiler_for_shlib(compiler):
try: try:
# XXX Help! I don't have any idea whether these are right... # XXX Help! I don't have any idea whether these are right...
_CONFIG_VARS['LDSHARED'] = ( _CONFIG_VARS['LDSHARED'] = (
"gcc -Wl,-x -dynamiclib -undefined dynamic_lookup") "gcc -Wl,-x -dynamiclib -undefined dynamic_lookup"
)
_CONFIG_VARS['CCSHARED'] = " -dynamiclib" _CONFIG_VARS['CCSHARED'] = " -dynamiclib"
_CONFIG_VARS['SO'] = ".dylib" _CONFIG_VARS['SO'] = ".dylib"
customize_compiler(compiler) customize_compiler(compiler)
@ -61,11 +59,14 @@ if sys.platform == "darwin":
elif os.name != 'nt': elif os.name != 'nt':
try: try:
import dl import dl
use_stubs = have_rtld = hasattr(dl, 'RTLD_NOW') use_stubs = have_rtld = hasattr(dl, 'RTLD_NOW')
except ImportError: except ImportError:
pass pass
if_dl = lambda s: s if have_rtld else ''
def if_dl(s):
return s if have_rtld else ''
def get_abi3_suffix(): def get_abi3_suffix():
@ -75,9 +76,13 @@ def get_abi3_suffix():
return suffix return suffix
elif suffix == '.pyd': # Windows elif suffix == '.pyd': # Windows
return suffix return suffix
return None
class build_ext(_build_ext): class build_ext(_build_ext):
editable_mode: bool = False
inplace: bool = False
def run(self): def run(self):
"""Build extensions in build directory, then copy if --inplace""" """Build extensions in build directory, then copy if --inplace"""
old_inplace, self.inplace = self.inplace, 0 old_inplace, self.inplace = self.inplace, 0
@ -86,41 +91,78 @@ class build_ext(_build_ext):
if old_inplace: if old_inplace:
self.copy_extensions_to_source() self.copy_extensions_to_source()
def _get_inplace_equivalent(self, build_py, ext: Extension) -> Tuple[str, str]:
fullname = self.get_ext_fullname(ext.name)
filename = self.get_ext_filename(fullname)
modpath = fullname.split('.')
package = '.'.join(modpath[:-1])
package_dir = build_py.get_package_dir(package)
inplace_file = os.path.join(package_dir, os.path.basename(filename))
regular_file = os.path.join(self.build_lib, filename)
return (inplace_file, regular_file)
def copy_extensions_to_source(self): def copy_extensions_to_source(self):
build_py = self.get_finalized_command('build_py') build_py = self.get_finalized_command('build_py')
for ext in self.extensions: for ext in self.extensions:
fullname = self.get_ext_fullname(ext.name) inplace_file, regular_file = self._get_inplace_equivalent(build_py, ext)
filename = self.get_ext_filename(fullname)
modpath = fullname.split('.')
package = '.'.join(modpath[:-1])
package_dir = build_py.get_package_dir(package)
dest_filename = os.path.join(package_dir,
os.path.basename(filename))
src_filename = os.path.join(self.build_lib, filename)
# Always copy, even if source is older than destination, to ensure # Always copy, even if source is older than destination, to ensure
# that the right extensions for the current Python/platform are # that the right extensions for the current Python/platform are
# used. # used.
copy_file( if os.path.exists(regular_file) or not ext.optional:
src_filename, dest_filename, verbose=self.verbose, self.copy_file(regular_file, inplace_file, level=self.verbose)
dry_run=self.dry_run
)
if ext._needs_stub: if ext._needs_stub:
self.write_stub(package_dir or os.curdir, ext, True) inplace_stub = self._get_equivalent_stub(ext, inplace_file)
self._write_stub_file(inplace_stub, ext, compile=True)
# Always compile stub and remove the original (leave the cache behind)
# (this behaviour was observed in previous iterations of the code)
def _get_equivalent_stub(self, ext: Extension, output_file: str) -> str:
dir_ = os.path.dirname(output_file)
_, _, name = ext.name.rpartition(".")
return f"{os.path.join(dir_, name)}.py"
def _get_output_mapping(self) -> Iterator[Tuple[str, str]]:
if not self.inplace:
return
build_py = self.get_finalized_command('build_py')
opt = self.get_finalized_command('install_lib').optimize or ""
for ext in self.extensions:
inplace_file, regular_file = self._get_inplace_equivalent(build_py, ext)
yield (regular_file, inplace_file)
if ext._needs_stub:
# This version of `build_ext` always builds artifacts in another dir,
# when "inplace=True" is given it just copies them back.
# This is done in the `copy_extensions_to_source` function, which
# always compile stub files via `_compile_and_remove_stub`.
# At the end of the process, a `.pyc` stub file is created without the
# corresponding `.py`.
inplace_stub = self._get_equivalent_stub(ext, inplace_file)
regular_stub = self._get_equivalent_stub(ext, regular_file)
inplace_cache = _compiled_file_name(inplace_stub, optimization=opt)
output_cache = _compiled_file_name(regular_stub, optimization=opt)
yield (output_cache, inplace_cache)
def get_ext_filename(self, fullname): def get_ext_filename(self, fullname):
filename = _build_ext.get_ext_filename(self, fullname) so_ext = os.getenv('SETUPTOOLS_EXT_SUFFIX')
if so_ext:
filename = os.path.join(*fullname.split('.')) + so_ext
else:
filename = _build_ext.get_ext_filename(self, fullname)
so_ext = get_config_var('EXT_SUFFIX')
if fullname in self.ext_map: if fullname in self.ext_map:
ext = self.ext_map[fullname] ext = self.ext_map[fullname]
use_abi3 = ( use_abi3 = ext.py_limited_api and get_abi3_suffix()
not six.PY2
and getattr(ext, 'py_limited_api')
and get_abi3_suffix()
)
if use_abi3: if use_abi3:
so_ext = get_config_var('EXT_SUFFIX') filename = filename[: -len(so_ext)]
filename = filename[:-len(so_ext)] so_ext = get_abi3_suffix()
filename = filename + get_abi3_suffix() filename = filename + so_ext
if isinstance(ext, Library): if isinstance(ext, Library):
fn, ext = os.path.splitext(filename) fn, ext = os.path.splitext(filename)
return self.shlib_compiler.library_filename(fn, libtype) return self.shlib_compiler.library_filename(fn, libtype)
@ -134,13 +176,13 @@ class build_ext(_build_ext):
self.shlib_compiler = None self.shlib_compiler = None
self.shlibs = [] self.shlibs = []
self.ext_map = {} self.ext_map = {}
self.editable_mode = False
def finalize_options(self): def finalize_options(self):
_build_ext.finalize_options(self) _build_ext.finalize_options(self)
self.extensions = self.extensions or [] self.extensions = self.extensions or []
self.check_extensions_list(self.extensions) self.check_extensions_list(self.extensions)
self.shlibs = [ext for ext in self.extensions self.shlibs = [ext for ext in self.extensions if isinstance(ext, Library)]
if isinstance(ext, Library)]
if self.shlibs: if self.shlibs:
self.setup_shlib_compiler() self.setup_shlib_compiler()
for ext in self.extensions: for ext in self.extensions:
@ -164,6 +206,9 @@ class build_ext(_build_ext):
if ltd and use_stubs and os.curdir not in ext.runtime_library_dirs: if ltd and use_stubs and os.curdir not in ext.runtime_library_dirs:
ext.runtime_library_dirs.append(os.curdir) ext.runtime_library_dirs.append(os.curdir)
if self.editable_mode:
self.inplace = True
def setup_shlib_compiler(self): def setup_shlib_compiler(self):
compiler = self.shlib_compiler = new_compiler( compiler = self.shlib_compiler = new_compiler(
compiler=self.compiler, dry_run=self.dry_run, force=self.force compiler=self.compiler, dry_run=self.dry_run, force=self.force
@ -174,7 +219,7 @@ class build_ext(_build_ext):
compiler.set_include_dirs(self.include_dirs) compiler.set_include_dirs(self.include_dirs)
if self.define is not None: if self.define is not None:
# 'define' option is a list of (name,value) tuples # 'define' option is a list of (name,value) tuples
for (name, value) in self.define: for name, value in self.define:
compiler.define_macro(name, value) compiler.define_macro(name, value)
if self.undef is not None: if self.undef is not None:
for macro in self.undef: for macro in self.undef:
@ -204,8 +249,8 @@ class build_ext(_build_ext):
self.compiler = self.shlib_compiler self.compiler = self.shlib_compiler
_build_ext.build_extension(self, ext) _build_ext.build_extension(self, ext)
if ext._needs_stub: if ext._needs_stub:
cmd = self.get_finalized_command('build_py').build_lib build_lib = self.get_finalized_command('build_py').build_lib
self.write_stub(cmd, ext) self.write_stub(build_lib, ext)
finally: finally:
self.compiler = _compiler self.compiler = _compiler
@ -218,8 +263,56 @@ class build_ext(_build_ext):
pkg = '.'.join(ext._full_name.split('.')[:-1] + ['']) pkg = '.'.join(ext._full_name.split('.')[:-1] + [''])
return any(pkg + libname in libnames for libname in ext.libraries) return any(pkg + libname in libnames for libname in ext.libraries)
def get_outputs(self): def get_source_files(self) -> List[str]:
return _build_ext.get_outputs(self) + self.__get_stubs_outputs() return [*_build_ext.get_source_files(self), *self._get_internal_depends()]
def _get_internal_depends(self) -> Iterator[str]:
"""Yield ``ext.depends`` that are contained by the project directory"""
project_root = Path(self.distribution.src_root or os.curdir).resolve()
depends = (dep for ext in self.extensions for dep in ext.depends)
def skip(orig_path: str, reason: str) -> None:
log.info(
"dependency %s won't be automatically "
"included in the manifest: the path %s",
orig_path,
reason,
)
for dep in depends:
path = Path(dep)
if path.is_absolute():
skip(dep, "must be relative")
continue
if ".." in path.parts:
skip(dep, "can't have `..` segments")
continue
try:
resolved = (project_root / path).resolve(strict=True)
except OSError:
skip(dep, "doesn't exist")
continue
try:
resolved.relative_to(project_root)
except ValueError:
skip(dep, "must be inside the project root")
continue
yield path.as_posix()
def get_outputs(self) -> List[str]:
if self.inplace:
return list(self.get_output_mapping().keys())
return sorted(_build_ext.get_outputs(self) + self.__get_stubs_outputs())
def get_output_mapping(self) -> Dict[str, str]:
"""See :class:`setuptools.commands.build.SubCommand`"""
mapping = self._get_output_mapping()
return dict(sorted(mapping, key=lambda x: x[0]))
def __get_stubs_outputs(self): def __get_stubs_outputs(self):
# assemble the base name for each extension that needs a stub # assemble the base name for each extension that needs a stub
@ -239,22 +332,22 @@ class build_ext(_build_ext):
yield '.pyo' yield '.pyo'
def write_stub(self, output_dir, ext, compile=False): def write_stub(self, output_dir, ext, compile=False):
log.info("writing stub loader for %s to %s", ext._full_name, stub_file = os.path.join(output_dir, *ext._full_name.split('.')) + '.py'
output_dir) self._write_stub_file(stub_file, ext, compile)
stub_file = (os.path.join(output_dir, *ext._full_name.split('.')) +
'.py') def _write_stub_file(self, stub_file: str, ext: Extension, compile=False):
log.info("writing stub loader for %s to %s", ext._full_name, stub_file)
if compile and os.path.exists(stub_file): if compile and os.path.exists(stub_file):
raise DistutilsError(stub_file + " already exists! Please delete.") raise BaseError(stub_file + " already exists! Please delete.")
if not self.dry_run: if not self.dry_run:
f = open(stub_file, 'w') f = open(stub_file, 'w')
f.write( f.write(
'\n'.join([ '\n'.join([
"def __bootstrap__():", "def __bootstrap__():",
" global __bootstrap__, __file__, __loader__", " global __bootstrap__, __file__, __loader__",
" import sys, os, pkg_resources, imp" + if_dl(", dl"), " import sys, os, pkg_resources, importlib.util" + if_dl(", dl"),
" __file__ = pkg_resources.resource_filename" " __file__ = pkg_resources.resource_filename"
"(__name__,%r)" "(__name__,%r)" % os.path.basename(ext._file_name),
% os.path.basename(ext._file_name),
" del __bootstrap__", " del __bootstrap__",
" if '__loader__' in globals():", " if '__loader__' in globals():",
" del __loader__", " del __loader__",
@ -263,51 +356,87 @@ class build_ext(_build_ext):
" try:", " try:",
" os.chdir(os.path.dirname(__file__))", " os.chdir(os.path.dirname(__file__))",
if_dl(" sys.setdlopenflags(dl.RTLD_NOW)"), if_dl(" sys.setdlopenflags(dl.RTLD_NOW)"),
" imp.load_dynamic(__name__,__file__)", " spec = importlib.util.spec_from_file_location(",
" __name__, __file__)",
" mod = importlib.util.module_from_spec(spec)",
" spec.loader.exec_module(mod)",
" finally:", " finally:",
if_dl(" sys.setdlopenflags(old_flags)"), if_dl(" sys.setdlopenflags(old_flags)"),
" os.chdir(old_dir)", " os.chdir(old_dir)",
"__bootstrap__()", "__bootstrap__()",
"" # terminal \n "", # terminal \n
]) ])
) )
f.close() f.close()
if compile: if compile:
from distutils.util import byte_compile self._compile_and_remove_stub(stub_file)
byte_compile([stub_file], optimize=0, def _compile_and_remove_stub(self, stub_file: str):
force=True, dry_run=self.dry_run) from distutils.util import byte_compile
optimize = self.get_finalized_command('install_lib').optimize
if optimize > 0: byte_compile([stub_file], optimize=0, force=True, dry_run=self.dry_run)
byte_compile([stub_file], optimize=optimize, optimize = self.get_finalized_command('install_lib').optimize
force=True, dry_run=self.dry_run) if optimize > 0:
if os.path.exists(stub_file) and not self.dry_run: byte_compile(
os.unlink(stub_file) [stub_file], optimize=optimize, force=True, dry_run=self.dry_run
)
if os.path.exists(stub_file) and not self.dry_run:
os.unlink(stub_file)
if use_stubs or os.name == 'nt': if use_stubs or os.name == 'nt':
# Build shared libraries # Build shared libraries
# #
def link_shared_object( def link_shared_object(
self, objects, output_libname, output_dir=None, libraries=None, self,
library_dirs=None, runtime_library_dirs=None, export_symbols=None, objects,
debug=0, extra_preargs=None, extra_postargs=None, build_temp=None, output_libname,
target_lang=None): output_dir=None,
libraries=None,
library_dirs=None,
runtime_library_dirs=None,
export_symbols=None,
debug=0,
extra_preargs=None,
extra_postargs=None,
build_temp=None,
target_lang=None,
):
self.link( self.link(
self.SHARED_LIBRARY, objects, output_libname, self.SHARED_LIBRARY,
output_dir, libraries, library_dirs, runtime_library_dirs, objects,
export_symbols, debug, extra_preargs, extra_postargs, output_libname,
build_temp, target_lang output_dir,
libraries,
library_dirs,
runtime_library_dirs,
export_symbols,
debug,
extra_preargs,
extra_postargs,
build_temp,
target_lang,
) )
else: else:
# Build static libraries everywhere else # Build static libraries everywhere else
libtype = 'static' libtype = 'static'
def link_shared_object( def link_shared_object(
self, objects, output_libname, output_dir=None, libraries=None, self,
library_dirs=None, runtime_library_dirs=None, export_symbols=None, objects,
debug=0, extra_preargs=None, extra_postargs=None, build_temp=None, output_libname,
target_lang=None): output_dir=None,
libraries=None,
library_dirs=None,
runtime_library_dirs=None,
export_symbols=None,
debug=0,
extra_preargs=None,
extra_postargs=None,
build_temp=None,
target_lang=None,
):
# XXX we need to either disallow these attrs on Library instances, # XXX we need to either disallow these attrs on Library instances,
# or warn/abort here if set, or something... # or warn/abort here if set, or something...
# libraries=None, library_dirs=None, runtime_library_dirs=None, # libraries=None, library_dirs=None, runtime_library_dirs=None,
@ -322,6 +451,4 @@ else:
# a different prefix # a different prefix
basename = basename[3:] basename = basename[3:]
self.create_static_lib( self.create_static_lib(objects, basename, output_dir, debug, target_lang)
objects, basename, output_dir, debug, target_lang
)

View file

@ -1,26 +1,28 @@
from functools import partial
from glob import glob from glob import glob
from distutils.util import convert_path from distutils.util import convert_path
import distutils.command.build_py as orig import distutils.command.build_py as orig
import os import os
import fnmatch import fnmatch
import textwrap import textwrap
import io
import distutils.errors import distutils.errors
import itertools import itertools
import stat
from pathlib import Path
from typing import Dict, Iterable, Iterator, List, Optional, Tuple
from setuptools.extern import six from ..extern.more_itertools import unique_everseen
from setuptools.extern.six.moves import map, filter, filterfalse from ..warnings import SetuptoolsDeprecationWarning
try:
from setuptools.lib2to3_ex import Mixin2to3
except ImportError:
class Mixin2to3:
def run_2to3(self, files, doctests=True):
"do nothing"
class build_py(orig.build_py, Mixin2to3): _IMPLICIT_DATA_FILES = ('*.pyi', 'py.typed')
def make_writable(target):
os.chmod(target, os.stat(target).st_mode | stat.S_IWRITE)
class build_py(orig.build_py):
"""Enhanced 'build_py' command that includes data files with packages """Enhanced 'build_py' command that includes data files with packages
The data files are specified via a 'package_data' argument to 'setup()'. The data files are specified via a 'package_data' argument to 'setup()'.
@ -30,19 +32,31 @@ class build_py(orig.build_py, Mixin2to3):
'py_modules' and 'packages' in the same setup operation. 'py_modules' and 'packages' in the same setup operation.
""" """
editable_mode: bool = False
existing_egg_info_dir: Optional[str] = None #: Private API, internal use only.
def finalize_options(self): def finalize_options(self):
orig.build_py.finalize_options(self) orig.build_py.finalize_options(self)
self.package_data = self.distribution.package_data self.package_data = self.distribution.package_data
self.exclude_package_data = (self.distribution.exclude_package_data or self.exclude_package_data = self.distribution.exclude_package_data or {}
{})
if 'data_files' in self.__dict__: if 'data_files' in self.__dict__:
del self.__dict__['data_files'] del self.__dict__['data_files']
self.__updated_files = [] self.__updated_files = []
self.__doctests_2to3 = []
def copy_file(
self, infile, outfile, preserve_mode=1, preserve_times=1, link=None, level=1
):
# Overwrite base class to allow using links
if link:
infile = str(Path(infile).resolve())
outfile = str(Path(outfile).resolve())
return super().copy_file(
infile, outfile, preserve_mode, preserve_times, link, level
)
def run(self): def run(self):
"""Build modules, packages, and copy data files to build directory""" """Build modules, packages, and copy data files to build directory"""
if not self.py_modules and not self.packages: if not (self.py_modules or self.packages) or self.editable_mode:
return return
if self.py_modules: if self.py_modules:
@ -52,10 +66,6 @@ class build_py(orig.build_py, Mixin2to3):
self.build_packages() self.build_packages()
self.build_package_data() self.build_package_data()
self.run_2to3(self.__updated_files, False)
self.run_2to3(self.__updated_files, True)
self.run_2to3(self.__doctests_2to3, True)
# Only compile actual .py files, using our base class' idea of what our # Only compile actual .py files, using our base class' idea of what our
# output files are. # output files are.
self.byte_compile(orig.build_py.get_outputs(self, include_bytecode=0)) self.byte_compile(orig.build_py.get_outputs(self, include_bytecode=0))
@ -68,11 +78,7 @@ class build_py(orig.build_py, Mixin2to3):
return orig.build_py.__getattr__(self, attr) return orig.build_py.__getattr__(self, attr)
def build_module(self, module, module_file, package): def build_module(self, module, module_file, package):
if six.PY2 and isinstance(package, six.string_types): outfile, copied = orig.build_py.build_module(self, module, module_file, package)
# avoid errors on Python 2 when unicode is passed (#190)
package = package.split('.')
outfile, copied = orig.build_py.build_module(self, module, module_file,
package)
if copied: if copied:
self.__updated_files.append(outfile) self.__updated_files.append(outfile)
return outfile, copied return outfile, copied
@ -82,6 +88,16 @@ class build_py(orig.build_py, Mixin2to3):
self.analyze_manifest() self.analyze_manifest()
return list(map(self._get_pkg_data_files, self.packages or ())) return list(map(self._get_pkg_data_files, self.packages or ()))
def get_data_files_without_manifest(self):
"""
Generate list of ``(package,src_dir,build_dir,filenames)`` tuples,
but without triggering any attempt to analyze or build the manifest.
"""
# Prevent eventual errors from unset `manifest_files`
# (that would otherwise be set by `analyze_manifest`)
self.__dict__.setdefault('manifest_files', {})
return list(map(self._get_pkg_data_files, self.packages or ()))
def _get_pkg_data_files(self, package): def _get_pkg_data_files(self, package):
# Locate package source directory # Locate package source directory
src_dir = self.get_package_dir(package) src_dir = self.get_package_dir(package)
@ -102,8 +118,9 @@ class build_py(orig.build_py, Mixin2to3):
self.package_data, self.package_data,
package, package,
src_dir, src_dir,
extra_patterns=_IMPLICIT_DATA_FILES,
) )
globs_expanded = map(glob, patterns) globs_expanded = map(partial(glob, recursive=True), patterns)
# flatten the expanded globs into an iterable of matches # flatten the expanded globs into an iterable of matches
globs_matches = itertools.chain.from_iterable(globs_expanded) globs_matches = itertools.chain.from_iterable(globs_expanded)
glob_files = filter(os.path.isfile, globs_matches) glob_files = filter(os.path.isfile, globs_matches)
@ -113,18 +130,41 @@ class build_py(orig.build_py, Mixin2to3):
) )
return self.exclude_data_files(package, src_dir, files) return self.exclude_data_files(package, src_dir, files)
def build_package_data(self): def get_outputs(self, include_bytecode=1) -> List[str]:
"""Copy data files into build directory""" """See :class:`setuptools.commands.build.SubCommand`"""
if self.editable_mode:
return list(self.get_output_mapping().keys())
return super().get_outputs(include_bytecode)
def get_output_mapping(self) -> Dict[str, str]:
"""See :class:`setuptools.commands.build.SubCommand`"""
mapping = itertools.chain(
self._get_package_data_output_mapping(),
self._get_module_mapping(),
)
return dict(sorted(mapping, key=lambda x: x[0]))
def _get_module_mapping(self) -> Iterator[Tuple[str, str]]:
"""Iterate over all modules producing (dest, src) pairs."""
for package, module, module_file in self.find_all_modules():
package = package.split('.')
filename = self.get_module_outfile(self.build_lib, package, module)
yield (filename, module_file)
def _get_package_data_output_mapping(self) -> Iterator[Tuple[str, str]]:
"""Iterate over package data producing (dest, src) pairs."""
for package, src_dir, build_dir, filenames in self.data_files: for package, src_dir, build_dir, filenames in self.data_files:
for filename in filenames: for filename in filenames:
target = os.path.join(build_dir, filename) target = os.path.join(build_dir, filename)
self.mkpath(os.path.dirname(target))
srcfile = os.path.join(src_dir, filename) srcfile = os.path.join(src_dir, filename)
outf, copied = self.copy_file(srcfile, target) yield (target, srcfile)
srcfile = os.path.abspath(srcfile)
if (copied and def build_package_data(self):
srcfile in self.distribution.convert_2to3_doctests): """Copy data files into build directory"""
self.__doctests_2to3.append(outf) for target, srcfile in self._get_package_data_output_mapping():
self.mkpath(os.path.dirname(target))
_outf, _copied = self.copy_file(srcfile, target)
make_writable(target)
def analyze_manifest(self): def analyze_manifest(self):
self.manifest_files = mf = {} self.manifest_files = mf = {}
@ -135,9 +175,21 @@ class build_py(orig.build_py, Mixin2to3):
# Locate package source directory # Locate package source directory
src_dirs[assert_relative(self.get_package_dir(package))] = package src_dirs[assert_relative(self.get_package_dir(package))] = package
self.run_command('egg_info') if (
ei_cmd = self.get_finalized_command('egg_info') getattr(self, 'existing_egg_info_dir', None)
for path in ei_cmd.filelist.files: and Path(self.existing_egg_info_dir, "SOURCES.txt").exists()
):
egg_info_dir = self.existing_egg_info_dir
manifest = Path(egg_info_dir, "SOURCES.txt")
files = manifest.read_text(encoding="utf-8").splitlines()
else:
self.run_command('egg_info')
ei_cmd = self.get_finalized_command('egg_info')
egg_info_dir = ei_cmd.egg_info
files = ei_cmd.filelist.files
check = _IncludePackageDataAbuse()
for path in self._filter_build_files(files, egg_info_dir):
d, f = os.path.split(assert_relative(path)) d, f = os.path.split(assert_relative(path))
prev = None prev = None
oldf = f oldf = f
@ -146,10 +198,34 @@ class build_py(orig.build_py, Mixin2to3):
d, df = os.path.split(d) d, df = os.path.split(d)
f = os.path.join(df, f) f = os.path.join(df, f)
if d in src_dirs: if d in src_dirs:
if path.endswith('.py') and f == oldf: if f == oldf:
continue # it's a module, not data if check.is_module(f):
continue # it's a module, not data
else:
importable = check.importable_subpackage(src_dirs[d], f)
if importable:
check.warn(importable)
mf.setdefault(src_dirs[d], []).append(path) mf.setdefault(src_dirs[d], []).append(path)
def _filter_build_files(self, files: Iterable[str], egg_info: str) -> Iterator[str]:
"""
``build_meta`` may try to create egg_info outside of the project directory,
and this can be problematic for certain plugins (reported in issue #3500).
Extensions might also include between their sources files created on the
``build_lib`` and ``build_temp`` directories.
This function should filter this case of invalid files out.
"""
build = self.get_finalized_command("build")
build_dirs = (egg_info, self.build_lib, build.build_temp, build.build_base)
norm_dirs = [os.path.normpath(p) for p in build_dirs if p]
for file in files:
norm_path = os.path.normpath(file)
if not os.path.isabs(file) or all(d not in norm_path for d in norm_dirs):
yield file
def get_data_files(self): def get_data_files(self):
pass # Lazily compute data files in _get_data_files() function. pass # Lazily compute data files in _get_data_files() function.
@ -172,7 +248,7 @@ class build_py(orig.build_py, Mixin2to3):
else: else:
return init_py return init_py
with io.open(init_py, 'rb') as f: with open(init_py, 'rb') as f:
contents = f.read() contents = f.read()
if b'declare_namespace' not in contents: if b'declare_namespace' not in contents:
raise distutils.errors.DistutilsError( raise distutils.errors.DistutilsError(
@ -186,6 +262,8 @@ class build_py(orig.build_py, Mixin2to3):
def initialize_options(self): def initialize_options(self):
self.packages_checked = {} self.packages_checked = {}
orig.build_py.initialize_options(self) orig.build_py.initialize_options(self)
self.editable_mode = False
self.existing_egg_info_dir = None
def get_package_dir(self, package): def get_package_dir(self, package):
res = orig.build_py.get_package_dir(self, package) res = orig.build_py.get_package_dir(self, package)
@ -201,23 +279,16 @@ class build_py(orig.build_py, Mixin2to3):
package, package,
src_dir, src_dir,
) )
match_groups = ( match_groups = (fnmatch.filter(files, pattern) for pattern in patterns)
fnmatch.filter(files, pattern)
for pattern in patterns
)
# flatten the groups of matches into an iterable of matches # flatten the groups of matches into an iterable of matches
matches = itertools.chain.from_iterable(match_groups) matches = itertools.chain.from_iterable(match_groups)
bad = set(matches) bad = set(matches)
keepers = ( keepers = (fn for fn in files if fn not in bad)
fn
for fn in files
if fn not in bad
)
# ditch dupes # ditch dupes
return list(_unique_everseen(keepers)) return list(unique_everseen(keepers))
@staticmethod @staticmethod
def _get_platform_patterns(spec, package, src_dir): def _get_platform_patterns(spec, package, src_dir, extra_patterns=()):
""" """
yield platform-specific path patterns (suitable for glob yield platform-specific path patterns (suitable for glob
or fn_match) from a glob-based spec (such as or fn_match) from a glob-based spec (such as
@ -225,6 +296,7 @@ class build_py(orig.build_py, Mixin2to3):
matching package in src_dir. matching package in src_dir.
""" """
raw_patterns = itertools.chain( raw_patterns = itertools.chain(
extra_patterns,
spec.get('', []), spec.get('', []),
spec.get(package, []), spec.get(package, []),
) )
@ -235,36 +307,87 @@ class build_py(orig.build_py, Mixin2to3):
) )
# from Python docs
def _unique_everseen(iterable, key=None):
"List unique elements, preserving order. Remember all elements ever seen."
# unique_everseen('AAAABBBCCDAABBB') --> A B C D
# unique_everseen('ABBCcAD', str.lower) --> A B C D
seen = set()
seen_add = seen.add
if key is None:
for element in filterfalse(seen.__contains__, iterable):
seen_add(element)
yield element
else:
for element in iterable:
k = key(element)
if k not in seen:
seen_add(k)
yield element
def assert_relative(path): def assert_relative(path):
if not os.path.isabs(path): if not os.path.isabs(path):
return path return path
from distutils.errors import DistutilsSetupError from distutils.errors import DistutilsSetupError
msg = textwrap.dedent(""" msg = (
textwrap.dedent(
"""
Error: setup script specifies an absolute path: Error: setup script specifies an absolute path:
%s %s
setup() arguments must *always* be /-separated paths relative to the setup() arguments must *always* be /-separated paths relative to the
setup.py directory, *never* absolute paths. setup.py directory, *never* absolute paths.
""").lstrip() % path """
).lstrip()
% path
)
raise DistutilsSetupError(msg) raise DistutilsSetupError(msg)
class _IncludePackageDataAbuse:
"""Inform users that package or module is included as 'data file'"""
class _Warning(SetuptoolsDeprecationWarning):
_SUMMARY = """
Package {importable!r} is absent from the `packages` configuration.
"""
_DETAILS = """
############################
# Package would be ignored #
############################
Python recognizes {importable!r} as an importable package[^1],
but it is absent from setuptools' `packages` configuration.
This leads to an ambiguous overall configuration. If you want to distribute this
package, please make sure that {importable!r} is explicitly added
to the `packages` configuration field.
Alternatively, you can also rely on setuptools' discovery methods
(for example by using `find_namespace_packages(...)`/`find_namespace:`
instead of `find_packages(...)`/`find:`).
You can read more about "package discovery" on setuptools documentation page:
- https://setuptools.pypa.io/en/latest/userguide/package_discovery.html
If you don't want {importable!r} to be distributed and are
already explicitly excluding {importable!r} via
`find_namespace_packages(...)/find_namespace` or `find_packages(...)/find`,
you can try to use `exclude_package_data`, or `include-package-data=False` in
combination with a more fine grained `package-data` configuration.
You can read more about "package data files" on setuptools documentation page:
- https://setuptools.pypa.io/en/latest/userguide/datafiles.html
[^1]: For Python, any directory (with suitable naming) can be imported,
even if it does not contain any `.py` files.
On the other hand, currently there is no concept of package data
directory, all directories are treated like packages.
"""
# _DUE_DATE: still not defined as this is particularly controversial.
# Warning initially introduced in May 2022. See issue #3340 for discussion.
def __init__(self):
self._already_warned = set()
def is_module(self, file):
return file.endswith(".py") and file[: -len(".py")].isidentifier()
def importable_subpackage(self, parent, file):
pkg = Path(file).parent
parts = list(itertools.takewhile(str.isidentifier, pkg.parts))
if parts:
return ".".join([parent, *parts])
return None
def warn(self, importable):
if importable not in self._already_warned:
self._Warning.emit(importable=importable)
self._already_warned.add(importable)

View file

@ -1,19 +1,15 @@
from distutils.util import convert_path from distutils.util import convert_path
from distutils import log from distutils import log
from distutils.errors import DistutilsError, DistutilsOptionError from distutils.errors import DistutilsOptionError
import os import os
import glob import glob
import io
from setuptools.extern import six
import pkg_resources
from setuptools.command.easy_install import easy_install from setuptools.command.easy_install import easy_install
from setuptools import _normalization
from setuptools import _path
from setuptools import namespaces from setuptools import namespaces
import setuptools import setuptools
__metaclass__ = type
class develop(namespaces.DevelopInstaller, easy_install): class develop(namespaces.DevelopInstaller, easy_install):
"""Set up package for development""" """Set up package for development"""
@ -46,11 +42,9 @@ class develop(namespaces.DevelopInstaller, easy_install):
self.always_copy_from = '.' # always copy eggs installed in curdir self.always_copy_from = '.' # always copy eggs installed in curdir
def finalize_options(self): def finalize_options(self):
import pkg_resources
ei = self.get_finalized_command("egg_info") ei = self.get_finalized_command("egg_info")
if ei.broken_egg_info:
template = "Please rename %r to %r before using 'develop'"
args = ei.egg_info, ei.broken_egg_info
raise DistutilsError(template % args)
self.args = [ei.egg_name] self.args = [ei.egg_name]
easy_install.finalize_options(self) easy_install.finalize_options(self)
@ -59,15 +53,16 @@ class develop(namespaces.DevelopInstaller, easy_install):
# pick up setup-dir .egg files only: no .egg-info # pick up setup-dir .egg files only: no .egg-info
self.package_index.scan(glob.glob('*.egg')) self.package_index.scan(glob.glob('*.egg'))
egg_link_fn = ei.egg_name + '.egg-link' egg_link_fn = (
_normalization.filename_component_broken(ei.egg_name) + '.egg-link'
)
self.egg_link = os.path.join(self.install_dir, egg_link_fn) self.egg_link = os.path.join(self.install_dir, egg_link_fn)
self.egg_base = ei.egg_base self.egg_base = ei.egg_base
if self.egg_path is None: if self.egg_path is None:
self.egg_path = os.path.abspath(ei.egg_base) self.egg_path = os.path.abspath(ei.egg_base)
target = pkg_resources.normalize_path(self.egg_base) target = _path.normpath(self.egg_base)
egg_path = pkg_resources.normalize_path( egg_path = _path.normpath(os.path.join(self.install_dir, self.egg_path))
os.path.join(self.install_dir, self.egg_path))
if egg_path != target: if egg_path != target:
raise DistutilsOptionError( raise DistutilsOptionError(
"--egg-path must be a relative path from the install" "--egg-path must be a relative path from the install"
@ -78,7 +73,7 @@ class develop(namespaces.DevelopInstaller, easy_install):
self.dist = pkg_resources.Distribution( self.dist = pkg_resources.Distribution(
target, target,
pkg_resources.PathMetadata(target, os.path.abspath(ei.egg_info)), pkg_resources.PathMetadata(target, os.path.abspath(ei.egg_info)),
project_name=ei.egg_name project_name=ei.egg_name,
) )
self.setup_path = self._resolve_setup_path( self.setup_path = self._resolve_setup_path(
@ -97,49 +92,24 @@ class develop(namespaces.DevelopInstaller, easy_install):
path_to_setup = egg_base.replace(os.sep, '/').rstrip('/') path_to_setup = egg_base.replace(os.sep, '/').rstrip('/')
if path_to_setup != os.curdir: if path_to_setup != os.curdir:
path_to_setup = '../' * (path_to_setup.count('/') + 1) path_to_setup = '../' * (path_to_setup.count('/') + 1)
resolved = pkg_resources.normalize_path( resolved = _path.normpath(os.path.join(install_dir, egg_path, path_to_setup))
os.path.join(install_dir, egg_path, path_to_setup) curdir = _path.normpath(os.curdir)
) if resolved != curdir:
if resolved != pkg_resources.normalize_path(os.curdir):
raise DistutilsOptionError( raise DistutilsOptionError(
"Can't get a consistent path to setup script from" "Can't get a consistent path to setup script from"
" installation directory", resolved, " installation directory",
pkg_resources.normalize_path(os.curdir)) resolved,
curdir,
)
return path_to_setup return path_to_setup
def install_for_development(self): def install_for_development(self):
if not six.PY2 and getattr(self.distribution, 'use_2to3', False): self.run_command('egg_info')
# If we run 2to3 we can not do this inplace:
# Ensure metadata is up-to-date # Build extensions in-place
self.reinitialize_command('build_py', inplace=0) self.reinitialize_command('build_ext', inplace=1)
self.run_command('build_py') self.run_command('build_ext')
bpy_cmd = self.get_finalized_command("build_py")
build_path = pkg_resources.normalize_path(bpy_cmd.build_lib)
# Build extensions
self.reinitialize_command('egg_info', egg_base=build_path)
self.run_command('egg_info')
self.reinitialize_command('build_ext', inplace=0)
self.run_command('build_ext')
# Fixup egg-link and easy-install.pth
ei_cmd = self.get_finalized_command("egg_info")
self.egg_path = build_path
self.dist.location = build_path
# XXX
self.dist._provider = pkg_resources.PathMetadata(
build_path, ei_cmd.egg_info)
else:
# Without 2to3 inplace works fine:
self.run_command('egg_info')
# Build extensions in-place
self.reinitialize_command('build_ext', inplace=1)
self.run_command('build_ext')
self.install_site_py() # ensure that target dir is site-safe
if setuptools.bootstrap_install_from: if setuptools.bootstrap_install_from:
self.easy_install(setuptools.bootstrap_install_from) self.easy_install(setuptools.bootstrap_install_from)
setuptools.bootstrap_install_from = None setuptools.bootstrap_install_from = None
@ -161,8 +131,7 @@ class develop(namespaces.DevelopInstaller, easy_install):
egg_link_file = open(self.egg_link) egg_link_file = open(self.egg_link)
contents = [line.rstrip() for line in egg_link_file] contents = [line.rstrip() for line in egg_link_file]
egg_link_file.close() egg_link_file.close()
if contents not in ([self.egg_path], if contents not in ([self.egg_path], [self.egg_path, self.setup_path]):
[self.egg_path, self.setup_path]):
log.warn("Link points to %s: uninstall aborted", contents) log.warn("Link points to %s: uninstall aborted", contents)
return return
if not self.dry_run: if not self.dry_run:
@ -187,10 +156,12 @@ class develop(namespaces.DevelopInstaller, easy_install):
for script_name in self.distribution.scripts or []: for script_name in self.distribution.scripts or []:
script_path = os.path.abspath(convert_path(script_name)) script_path = os.path.abspath(convert_path(script_name))
script_name = os.path.basename(script_path) script_name = os.path.basename(script_path)
with io.open(script_path) as strm: with open(script_path) as strm:
script_text = strm.read() script_text = strm.read()
self.install_script(dist, script_name, script_text, script_path) self.install_script(dist, script_name, script_text, script_path)
return None
def install_wrapper_scripts(self, dist): def install_wrapper_scripts(self, dist):
dist = VersionlessRequirement(dist) dist = VersionlessRequirement(dist)
return easy_install.install_wrapper_scripts(self, dist) return easy_install.install_wrapper_scripts(self, dist)

View file

@ -4,33 +4,101 @@ As defined in the wheel specification
""" """
import os import os
import shutil
from distutils.core import Command from contextlib import contextmanager
from distutils import log from distutils import log
from distutils.core import Command
from pathlib import Path
from .. import _normalization
class dist_info(Command): class dist_info(Command):
"""
This command is private and reserved for internal use of setuptools,
users should rely on ``setuptools.build_meta`` APIs.
"""
description = 'create a .dist-info directory' description = "DO NOT CALL DIRECTLY, INTERNAL ONLY: create .dist-info directory"
user_options = [ user_options = [
('egg-base=', 'e', "directory containing .egg-info directories" (
" (default: top of the source tree)"), 'output-dir=',
'o',
"directory inside of which the .dist-info will be"
"created (default: top of the source tree)",
),
('tag-date', 'd', "Add date stamp (e.g. 20050528) to version number"),
('tag-build=', 'b', "Specify explicit tag to add to version number"),
('no-date', 'D', "Don't include date stamp [default]"),
('keep-egg-info', None, "*TRANSITIONAL* will be removed in the future"),
] ]
boolean_options = ['tag-date', 'keep-egg-info']
negative_opt = {'no-date': 'tag-date'}
def initialize_options(self): def initialize_options(self):
self.egg_base = None self.output_dir = None
self.name = None
self.dist_info_dir = None
self.tag_date = None
self.tag_build = None
self.keep_egg_info = False
def finalize_options(self): def finalize_options(self):
pass dist = self.distribution
project_dir = dist.src_root or os.curdir
self.output_dir = Path(self.output_dir or project_dir)
egg_info = self.reinitialize_command("egg_info")
egg_info.egg_base = str(self.output_dir)
if self.tag_date:
egg_info.tag_date = self.tag_date
else:
self.tag_date = egg_info.tag_date
if self.tag_build:
egg_info.tag_build = self.tag_build
else:
self.tag_build = egg_info.tag_build
egg_info.finalize_options()
self.egg_info = egg_info
name = _normalization.safer_name(dist.get_name())
version = _normalization.safer_best_effort_version(dist.get_version())
self.name = f"{name}-{version}"
self.dist_info_dir = os.path.join(self.output_dir, f"{self.name}.dist-info")
@contextmanager
def _maybe_bkp_dir(self, dir_path: str, requires_bkp: bool):
if requires_bkp:
bkp_name = f"{dir_path}.__bkp__"
_rm(bkp_name, ignore_errors=True)
shutil.copytree(dir_path, bkp_name, dirs_exist_ok=True, symlinks=True)
try:
yield
finally:
_rm(dir_path, ignore_errors=True)
shutil.move(bkp_name, dir_path)
else:
yield
def run(self): def run(self):
egg_info = self.get_finalized_command('egg_info') self.output_dir.mkdir(parents=True, exist_ok=True)
egg_info.egg_base = self.egg_base self.egg_info.run()
egg_info.finalize_options() egg_info_dir = self.egg_info.egg_info
egg_info.run() assert os.path.isdir(egg_info_dir), ".egg-info dir should have been created"
dist_info_dir = egg_info.egg_info[:-len('.egg-info')] + '.dist-info'
log.info("creating '{}'".format(os.path.abspath(dist_info_dir)))
log.info("creating '{}'".format(os.path.abspath(self.dist_info_dir)))
bdist_wheel = self.get_finalized_command('bdist_wheel') bdist_wheel = self.get_finalized_command('bdist_wheel')
bdist_wheel.egg2dist(egg_info.egg_info, dist_info_dir)
# TODO: if bdist_wheel if merged into setuptools, just add "keep_egg_info" there
with self._maybe_bkp_dir(egg_info_dir, self.keep_egg_info):
bdist_wheel.egg2dist(egg_info_dir, self.dist_info_dir)
def _rm(dir_name, **opts):
if os.path.isdir(dir_name):
shutil.rmtree(dir_name, **opts)

File diff suppressed because it is too large Load diff

View file

@ -8,32 +8,33 @@ from distutils.util import convert_path
from distutils import log from distutils import log
import distutils.errors import distutils.errors
import distutils.filelist import distutils.filelist
import functools
import os import os
import re import re
import sys import sys
import io
import warnings
import time import time
import collections import collections
from setuptools.extern import six from .._importlib import metadata
from setuptools.extern.six.moves import map from .. import _entry_points, _normalization
from . import _requirestxt
from setuptools import Command from setuptools import Command
from setuptools.command.sdist import sdist from setuptools.command.sdist import sdist
from setuptools.command.sdist import walk_revctrl from setuptools.command.sdist import walk_revctrl
from setuptools.command.setopt import edit_config from setuptools.command.setopt import edit_config
from setuptools.command import bdist_egg from setuptools.command import bdist_egg
from pkg_resources import (
parse_requirements, safe_name, parse_version,
safe_version, yield_lines, EntryPoint, iter_entry_points, to_filename)
import setuptools.unicode_utils as unicode_utils import setuptools.unicode_utils as unicode_utils
from setuptools.glob import glob from setuptools.glob import glob
from setuptools.extern import packaging from setuptools.extern import packaging
from setuptools import SetuptoolsDeprecationWarning from ..warnings import SetuptoolsDeprecationWarning
def translate_pattern(glob):
PY_MAJOR = '{}.{}'.format(*sys.version_info)
def translate_pattern(glob): # noqa: C901 # is too complex (14) # FIXME
""" """
Translate a file path glob like '*.txt' in to a regular expression. Translate a file path glob like '*.txt' in to a regular expression.
This differs from fnmatch.translate which allows wildcards to match This differs from fnmatch.translate which allows wildcards to match
@ -91,7 +92,7 @@ def translate_pattern(glob):
pat += re.escape(char) pat += re.escape(char)
else: else:
# Grab the insides of the [brackets] # Grab the insides of the [brackets]
inner = chunk[i + 1:inner_i] inner = chunk[i + 1 : inner_i]
char_class = '' char_class = ''
# Class negation # Class negation
@ -113,7 +114,7 @@ def translate_pattern(glob):
pat += sep pat += sep
pat += r'\Z' pat += r'\Z'
return re.compile(pat, flags=re.MULTILINE|re.DOTALL) return re.compile(pat, flags=re.MULTILINE | re.DOTALL)
class InfoCommon: class InfoCommon:
@ -122,23 +123,44 @@ class InfoCommon:
@property @property
def name(self): def name(self):
return safe_name(self.distribution.get_name()) return _normalization.safe_name(self.distribution.get_name())
def tagged_version(self): def tagged_version(self):
version = self.distribution.get_version() tagged = self._maybe_tag(self.distribution.get_version())
# egg_info may be called more than once for a distribution, return _normalization.safe_version(tagged)
# in which case the version string already contains all tags.
if self.vtags and version.endswith(self.vtags):
return safe_version(version)
return safe_version(version + self.vtags)
def tags(self): def _maybe_tag(self, version):
"""
egg_info may be called more than once for a distribution,
in which case the version string already contains all tags.
"""
return (
version
if self.vtags and self._already_tagged(version)
else version + self.vtags
)
def _already_tagged(self, version: str) -> bool:
# Depending on their format, tags may change with version normalization.
# So in addition the regular tags, we have to search for the normalized ones.
return version.endswith(self.vtags) or version.endswith(self._safe_tags())
def _safe_tags(self) -> str:
# To implement this we can rely on `safe_version` pretending to be version 0
# followed by tags. Then we simply discard the starting 0 (fake version number)
try:
return _normalization.safe_version(f"0{self.vtags}")[1:]
except packaging.version.InvalidVersion:
return _normalization.safe_name(self.vtags.replace(' ', '.'))
def tags(self) -> str:
version = '' version = ''
if self.tag_build: if self.tag_build:
version += self.tag_build version += self.tag_build
if self.tag_date: if self.tag_date:
version += time.strftime("-%Y%m%d") version += time.strftime("%Y%m%d")
return version return version
vtags = property(tags) vtags = property(tags)
@ -146,8 +168,12 @@ class egg_info(InfoCommon, Command):
description = "create a distribution's .egg-info directory" description = "create a distribution's .egg-info directory"
user_options = [ user_options = [
('egg-base=', 'e', "directory containing .egg-info directories" (
" (default: top of the source tree)"), 'egg-base=',
'e',
"directory containing .egg-info directories"
" (default: top of the source tree)",
),
('tag-date', 'd', "Add date stamp (e.g. 20050528) to version number"), ('tag-date', 'd', "Add date stamp (e.g. 20050528) to version number"),
('tag-build=', 'b', "Specify explicit tag to add to version number"), ('tag-build=', 'b', "Specify explicit tag to add to version number"),
('no-date', 'D', "Don't include date stamp [default]"), ('no-date', 'D', "Don't include date stamp [default]"),
@ -163,7 +189,7 @@ class egg_info(InfoCommon, Command):
self.egg_name = None self.egg_name = None
self.egg_info = None self.egg_info = None
self.egg_version = None self.egg_version = None
self.broken_egg_info = False self.ignore_egg_info_in_manifest = False
#################################### ####################################
# allow the 'tag_svn_revision' to be detected and # allow the 'tag_svn_revision' to be detected and
@ -175,6 +201,7 @@ class egg_info(InfoCommon, Command):
@tag_svn_revision.setter @tag_svn_revision.setter
def tag_svn_revision(self, value): def tag_svn_revision(self, value):
pass pass
#################################### ####################################
def save_version_info(self, filename): def save_version_info(self, filename):
@ -197,32 +224,26 @@ class egg_info(InfoCommon, Command):
# repercussions. # repercussions.
self.egg_name = self.name self.egg_name = self.name
self.egg_version = self.tagged_version() self.egg_version = self.tagged_version()
parsed_version = parse_version(self.egg_version) parsed_version = packaging.version.Version(self.egg_version)
try: try:
is_version = isinstance(parsed_version, packaging.version.Version) is_version = isinstance(parsed_version, packaging.version.Version)
spec = ( spec = "%s==%s" if is_version else "%s===%s"
"%s==%s" if is_version else "%s===%s" packaging.requirements.Requirement(spec % (self.egg_name, self.egg_version))
) except ValueError as e:
list(
parse_requirements(spec % (self.egg_name, self.egg_version))
)
except ValueError:
raise distutils.errors.DistutilsOptionError( raise distutils.errors.DistutilsOptionError(
"Invalid distribution name or version syntax: %s-%s" % "Invalid distribution name or version syntax: %s-%s"
(self.egg_name, self.egg_version) % (self.egg_name, self.egg_version)
) ) from e
if self.egg_base is None: if self.egg_base is None:
dirs = self.distribution.package_dir dirs = self.distribution.package_dir
self.egg_base = (dirs or {}).get('', os.curdir) self.egg_base = (dirs or {}).get('', os.curdir)
self.ensure_dirname('egg_base') self.ensure_dirname('egg_base')
self.egg_info = to_filename(self.egg_name) + '.egg-info' self.egg_info = _normalization.filename_component(self.egg_name) + '.egg-info'
if self.egg_base != os.curdir: if self.egg_base != os.curdir:
self.egg_info = os.path.join(self.egg_base, self.egg_info) self.egg_info = os.path.join(self.egg_base, self.egg_info)
if '-' in self.egg_name:
self.check_broken_egg_info()
# Set package version for the benefit of dumber commands # Set package version for the benefit of dumber commands
# (e.g. sdist, bdist_wininst, etc.) # (e.g. sdist, bdist_wininst, etc.)
@ -234,11 +255,16 @@ class egg_info(InfoCommon, Command):
# to the version info # to the version info
# #
pd = self.distribution._patched_dist pd = self.distribution._patched_dist
if pd is not None and pd.key == self.egg_name.lower(): key = getattr(pd, "key", None) or getattr(pd, "name", None)
if pd is not None and key == self.egg_name.lower():
pd._version = self.egg_version pd._version = self.egg_version
pd._parsed_version = parse_version(self.egg_version) pd._parsed_version = packaging.version.Version(self.egg_version)
self.distribution._patched_dist = None self.distribution._patched_dist = None
def _get_egg_basename(self, py_version=PY_MAJOR, platform=None):
"""Compute filename of the output egg. Private API."""
return _egg_basename(self.egg_name, self.egg_version, py_version, platform)
def write_or_delete_file(self, what, filename, data, force=False): def write_or_delete_file(self, what, filename, data, force=False):
"""Write `data` to `filename` or delete if empty """Write `data` to `filename` or delete if empty
@ -252,9 +278,7 @@ class egg_info(InfoCommon, Command):
self.write_file(what, filename, data) self.write_file(what, filename, data)
elif os.path.exists(filename): elif os.path.exists(filename):
if data is None and not force: if data is None and not force:
log.warn( log.warn("%s not set in setup(), but %s exists", what, filename)
"%s not set in setup(), but %s exists", what, filename
)
return return
else: else:
self.delete_file(filename) self.delete_file(filename)
@ -266,8 +290,7 @@ class egg_info(InfoCommon, Command):
to the file. to the file.
""" """
log.info("writing %s to %s", what, filename) log.info("writing %s to %s", what, filename)
if not six.PY2: data = data.encode("utf-8")
data = data.encode("utf-8")
if not self.dry_run: if not self.dry_run:
f = open(filename, 'wb') f = open(filename, 'wb')
f.write(data) f.write(data)
@ -281,11 +304,13 @@ class egg_info(InfoCommon, Command):
def run(self): def run(self):
self.mkpath(self.egg_info) self.mkpath(self.egg_info)
os.utime(self.egg_info, None) try:
installer = self.distribution.fetch_build_egg os.utime(self.egg_info, None)
for ep in iter_entry_points('egg_info.writers'): except OSError as e:
ep.require(installer=installer) msg = f"Cannot update time stamp of directory '{self.egg_info}'"
writer = ep.resolve() raise distutils.errors.DistutilsFileError(msg) from e
for ep in metadata.entry_points(group='egg_info.writers'):
writer = ep.load()
writer(self, ep.name, os.path.join(self.egg_info, ep.name)) writer(self, ep.name, os.path.join(self.egg_info, ep.name))
# Get rid of native_libs.txt if it was put there by older bdist_egg # Get rid of native_libs.txt if it was put there by older bdist_egg
@ -299,29 +324,19 @@ class egg_info(InfoCommon, Command):
"""Generate SOURCES.txt manifest file""" """Generate SOURCES.txt manifest file"""
manifest_filename = os.path.join(self.egg_info, "SOURCES.txt") manifest_filename = os.path.join(self.egg_info, "SOURCES.txt")
mm = manifest_maker(self.distribution) mm = manifest_maker(self.distribution)
mm.ignore_egg_info_dir = self.ignore_egg_info_in_manifest
mm.manifest = manifest_filename mm.manifest = manifest_filename
mm.run() mm.run()
self.filelist = mm.filelist self.filelist = mm.filelist
def check_broken_egg_info(self):
bei = self.egg_name + '.egg-info'
if self.egg_base != os.curdir:
bei = os.path.join(self.egg_base, bei)
if os.path.exists(bei):
log.warn(
"-" * 78 + '\n'
"Note: Your current .egg-info directory has a '-' in its name;"
'\nthis will not work correctly with "setup.py develop".\n\n'
'Please rename %s to %s to correct this problem.\n' + '-' * 78,
bei, self.egg_info
)
self.broken_egg_info = self.egg_info
self.egg_info = bei # make it work for now
class FileList(_FileList): class FileList(_FileList):
# Implementations of the various MANIFEST.in commands # Implementations of the various MANIFEST.in commands
def __init__(self, warn=None, debug_print=None, ignore_egg_info_dir=False):
super().__init__(warn, debug_print)
self.ignore_egg_info_dir = ignore_egg_info_dir
def process_template_line(self, line): def process_template_line(self, line):
# Parse the line: split it up, make sure the right number of words # Parse the line: split it up, make sure the right number of words
# is there, and return the relevant words. 'action' is always # is there, and return the relevant words. 'action' is always
@ -330,70 +345,67 @@ class FileList(_FileList):
# patterns, (dir and patterns), or (dir_pattern). # patterns, (dir and patterns), or (dir_pattern).
(action, patterns, dir, dir_pattern) = self._parse_template_line(line) (action, patterns, dir, dir_pattern) = self._parse_template_line(line)
action_map = {
'include': self.include,
'exclude': self.exclude,
'global-include': self.global_include,
'global-exclude': self.global_exclude,
'recursive-include': functools.partial(
self.recursive_include,
dir,
),
'recursive-exclude': functools.partial(
self.recursive_exclude,
dir,
),
'graft': self.graft,
'prune': self.prune,
}
log_map = {
'include': "warning: no files found matching '%s'",
'exclude': ("warning: no previously-included files found " "matching '%s'"),
'global-include': (
"warning: no files found matching '%s' " "anywhere in distribution"
),
'global-exclude': (
"warning: no previously-included files matching "
"'%s' found anywhere in distribution"
),
'recursive-include': (
"warning: no files found matching '%s' " "under directory '%s'"
),
'recursive-exclude': (
"warning: no previously-included files matching "
"'%s' found under directory '%s'"
),
'graft': "warning: no directories found matching '%s'",
'prune': "no previously-included directories found matching '%s'",
}
try:
process_action = action_map[action]
except KeyError:
msg = f"Invalid MANIFEST.in: unknown action {action!r} in {line!r}"
raise DistutilsInternalError(msg) from None
# OK, now we know that the action is valid and we have the # OK, now we know that the action is valid and we have the
# right number of words on the line for that action -- so we # right number of words on the line for that action -- so we
# can proceed with minimal error-checking. # can proceed with minimal error-checking.
if action == 'include':
self.debug_print("include " + ' '.join(patterns))
for pattern in patterns:
if not self.include(pattern):
log.warn("warning: no files found matching '%s'", pattern)
elif action == 'exclude': action_is_recursive = action.startswith('recursive-')
self.debug_print("exclude " + ' '.join(patterns)) if action in {'graft', 'prune'}:
for pattern in patterns: patterns = [dir_pattern]
if not self.exclude(pattern): extra_log_args = (dir,) if action_is_recursive else ()
log.warn(("warning: no previously-included files " log_tmpl = log_map[action]
"found matching '%s'"), pattern)
elif action == 'global-include': self.debug_print(
self.debug_print("global-include " + ' '.join(patterns)) ' '.join(
for pattern in patterns: [action] + ([dir] if action_is_recursive else []) + patterns,
if not self.global_include(pattern): )
log.warn(("warning: no files found matching '%s' " )
"anywhere in distribution"), pattern) for pattern in patterns:
if not process_action(pattern):
elif action == 'global-exclude': log.warn(log_tmpl, pattern, *extra_log_args)
self.debug_print("global-exclude " + ' '.join(patterns))
for pattern in patterns:
if not self.global_exclude(pattern):
log.warn(("warning: no previously-included files matching "
"'%s' found anywhere in distribution"),
pattern)
elif action == 'recursive-include':
self.debug_print("recursive-include %s %s" %
(dir, ' '.join(patterns)))
for pattern in patterns:
if not self.recursive_include(dir, pattern):
log.warn(("warning: no files found matching '%s' "
"under directory '%s'"),
pattern, dir)
elif action == 'recursive-exclude':
self.debug_print("recursive-exclude %s %s" %
(dir, ' '.join(patterns)))
for pattern in patterns:
if not self.recursive_exclude(dir, pattern):
log.warn(("warning: no previously-included files matching "
"'%s' found under directory '%s'"),
pattern, dir)
elif action == 'graft':
self.debug_print("graft " + dir_pattern)
if not self.graft(dir_pattern):
log.warn("warning: no directories found matching '%s'",
dir_pattern)
elif action == 'prune':
self.debug_print("prune " + dir_pattern)
if not self.prune(dir_pattern):
log.warn(("no previously-included directories found "
"matching '%s'"), dir_pattern)
else:
raise DistutilsInternalError(
"this cannot happen: invalid action '%s'" % action)
def _remove_files(self, predicate): def _remove_files(self, predicate):
""" """
@ -424,8 +436,7 @@ class FileList(_FileList):
Include all files anywhere in 'dir/' that match the pattern. Include all files anywhere in 'dir/' that match the pattern.
""" """
full_pattern = os.path.join(dir, '**', pattern) full_pattern = os.path.join(dir, '**', pattern)
found = [f for f in glob(full_pattern, recursive=True) found = [f for f in glob(full_pattern, recursive=True) if not os.path.isdir(f)]
if not os.path.isdir(f)]
self.extend(found) self.extend(found)
return bool(found) return bool(found)
@ -507,6 +518,10 @@ class FileList(_FileList):
return False return False
try: try:
# ignore egg-info paths
is_egg_info = ".egg-info" in u_path or b".egg-info" in utf8_path
if self.ignore_egg_info_dir and is_egg_info:
return False
# accept is either way checks out # accept is either way checks out
if os.path.exists(u_path) or os.path.exists(utf8_path): if os.path.exists(u_path) or os.path.exists(utf8_path):
return True return True
@ -523,17 +538,20 @@ class manifest_maker(sdist):
self.prune = 1 self.prune = 1
self.manifest_only = 1 self.manifest_only = 1
self.force_manifest = 1 self.force_manifest = 1
self.ignore_egg_info_dir = False
def finalize_options(self): def finalize_options(self):
pass pass
def run(self): def run(self):
self.filelist = FileList() self.filelist = FileList(ignore_egg_info_dir=self.ignore_egg_info_dir)
if not os.path.exists(self.manifest): if not os.path.exists(self.manifest):
self.write_manifest() # it must exist so it'll get in the list self.write_manifest() # it must exist so it'll get in the list
self.add_defaults() self.add_defaults()
if os.path.exists(self.template): if os.path.exists(self.template):
self.read_template() self.read_template()
self.add_license_files()
self._add_referenced_files()
self.prune_file_list() self.prune_file_list()
self.filelist.sort() self.filelist.sort()
self.filelist.remove_duplicates() self.filelist.remove_duplicates()
@ -568,7 +586,6 @@ class manifest_maker(sdist):
def add_defaults(self): def add_defaults(self):
sdist.add_defaults(self) sdist.add_defaults(self)
self.check_license()
self.filelist.append(self.template) self.filelist.append(self.template)
self.filelist.append(self.manifest) self.filelist.append(self.manifest)
rcfiles = list(walk_revctrl()) rcfiles = list(walk_revctrl())
@ -585,14 +602,53 @@ class manifest_maker(sdist):
ei_cmd = self.get_finalized_command('egg_info') ei_cmd = self.get_finalized_command('egg_info')
self.filelist.graft(ei_cmd.egg_info) self.filelist.graft(ei_cmd.egg_info)
def add_license_files(self):
license_files = self.distribution.metadata.license_files or []
for lf in license_files:
log.info("adding license file '%s'", lf)
self.filelist.extend(license_files)
def _add_referenced_files(self):
"""Add files referenced by the config (e.g. `file:` directive) to filelist"""
referenced = getattr(self.distribution, '_referenced_files', [])
# ^-- fallback if dist comes from distutils or is a custom class
for rf in referenced:
log.debug("adding file referenced by config '%s'", rf)
self.filelist.extend(referenced)
def prune_file_list(self): def prune_file_list(self):
build = self.get_finalized_command('build') build = self.get_finalized_command('build')
base_dir = self.distribution.get_fullname() base_dir = self.distribution.get_fullname()
self.filelist.prune(build.build_base) self.filelist.prune(build.build_base)
self.filelist.prune(base_dir) self.filelist.prune(base_dir)
sep = re.escape(os.sep) sep = re.escape(os.sep)
self.filelist.exclude_pattern(r'(^|' + sep + r')(RCS|CVS|\.svn)' + sep, self.filelist.exclude_pattern(
is_regex=1) r'(^|' + sep + r')(RCS|CVS|\.svn)' + sep, is_regex=1
)
def _safe_data_files(self, build_py):
"""
The parent class implementation of this method
(``sdist``) will try to include data files, which
might cause recursion problems when
``include_package_data=True``.
Therefore, avoid triggering any attempt of
analyzing/building the manifest again.
"""
if hasattr(build_py, 'get_data_files_without_manifest'):
return build_py.get_data_files_without_manifest()
SetuptoolsDeprecationWarning.emit(
"`build_py` command does not inherit from setuptools' `build_py`.",
"""
Custom 'build_py' does not implement 'get_data_files_without_manifest'.
Please extend command classes from setuptools instead of distutils.
""",
see_url="https://peps.python.org/pep-0632/",
# due_date not defined yet, old projects might still do it?
)
return build_py.get_data_files()
def write_file(filename, contents): def write_file(filename, contents):
@ -628,44 +684,24 @@ def write_pkg_info(cmd, basename, filename):
def warn_depends_obsolete(cmd, basename, filename): def warn_depends_obsolete(cmd, basename, filename):
if os.path.exists(filename): """
log.warn( Unused: left to avoid errors when updating (from source) from <= 67.8.
"WARNING: 'depends.txt' is not used by setuptools 0.6!\n" Old installations have a .dist-info directory with the entry-point
"Use the install_requires/extras_require setup() args instead." ``depends.txt = setuptools.command.egg_info:warn_depends_obsolete``.
) This may trigger errors when running the first egg_info in build_meta.
TODO: Remove this function in a version sufficiently > 68.
"""
def _write_requirements(stream, reqs): # Export API used in entry_points
lines = yield_lines(reqs or ()) write_requirements = _requirestxt.write_requirements
append_cr = lambda line: line + '\n' write_setup_requirements = _requirestxt.write_setup_requirements
lines = map(append_cr, lines)
stream.writelines(lines)
def write_requirements(cmd, basename, filename):
dist = cmd.distribution
data = six.StringIO()
_write_requirements(data, dist.install_requires)
extras_require = dist.extras_require or {}
for extra in sorted(extras_require):
data.write('\n[{extra}]\n'.format(**vars()))
_write_requirements(data, extras_require[extra])
cmd.write_or_delete_file("requirements", filename, data.getvalue())
def write_setup_requirements(cmd, basename, filename):
data = io.StringIO()
_write_requirements(data, cmd.distribution.setup_requires)
cmd.write_or_delete_file("setup-requirements", filename, data.getvalue())
def write_toplevel_names(cmd, basename, filename): def write_toplevel_names(cmd, basename, filename):
pkgs = dict.fromkeys( pkgs = dict.fromkeys([
[ k.split('.', 1)[0] for k in cmd.distribution.iter_distribution_names()
k.split('.', 1)[0] ])
for k in cmd.distribution.iter_distribution_names()
]
)
cmd.write_file("top-level names", filename, '\n'.join(sorted(pkgs)) + '\n') cmd.write_file("top-level names", filename, '\n'.join(sorted(pkgs)) + '\n')
@ -682,36 +718,20 @@ def write_arg(cmd, basename, filename, force=False):
def write_entries(cmd, basename, filename): def write_entries(cmd, basename, filename):
ep = cmd.distribution.entry_points eps = _entry_points.load(cmd.distribution.entry_points)
defn = _entry_points.render(eps)
if isinstance(ep, six.string_types) or ep is None: cmd.write_or_delete_file('entry points', filename, defn, True)
data = ep
elif ep is not None:
data = []
for section, contents in sorted(ep.items()):
if not isinstance(contents, six.string_types):
contents = EntryPoint.parse_group(section, contents)
contents = '\n'.join(sorted(map(str, contents.values())))
data.append('[%s]\n%s\n\n' % (section, contents))
data = ''.join(data)
cmd.write_or_delete_file('entry points', filename, data, True)
def get_pkg_info_revision(): def _egg_basename(egg_name, egg_version, py_version=None, platform=None):
""" """Compute filename of the output egg. Private API."""
Get a -r### off of PKG-INFO Version in case this is an sdist of name = _normalization.filename_component(egg_name)
a subversion revision. version = _normalization.filename_component(egg_version)
""" egg = f"{name}-{version}-py{py_version or PY_MAJOR}"
warnings.warn("get_pkg_info_revision is deprecated.", EggInfoDeprecationWarning) if platform:
if os.path.exists('PKG-INFO'): egg += f"-{platform}"
with io.open('PKG-INFO') as f: return egg
for line in f:
match = re.match(r"Version:.*-r(\d+)\s*$", line)
if match:
return int(match.group(1))
return 0
class EggInfoDeprecationWarning(SetuptoolsDeprecationWarning): class EggInfoDeprecationWarning(SetuptoolsDeprecationWarning):
"""Class for warning about deprecations in eggInfo in setupTools. Not ignored by default, unlike DeprecationWarning.""" """Deprecated behavior warning for EggInfo, bypassing suppression."""

View file

@ -1,11 +1,11 @@
from distutils.errors import DistutilsArgError from distutils.errors import DistutilsArgError
import inspect import inspect
import glob import glob
import warnings
import platform import platform
import distutils.command.install as orig import distutils.command.install as orig
import setuptools import setuptools
from ..warnings import SetuptoolsDeprecationWarning, SetuptoolsWarning
# Prior to numpy 1.9, NumPy relies on the '_install' name, so provide it for # Prior to numpy 1.9, NumPy relies on the '_install' name, so provide it for
# now. See https://github.com/pypa/setuptools/issues/199/ # now. See https://github.com/pypa/setuptools/issues/199/
@ -17,11 +17,15 @@ class install(orig.install):
user_options = orig.install.user_options + [ user_options = orig.install.user_options + [
('old-and-unmanageable', None, "Try not to use this!"), ('old-and-unmanageable', None, "Try not to use this!"),
('single-version-externally-managed', None, (
"used by system package builders to create 'flat' eggs"), 'single-version-externally-managed',
None,
"used by system package builders to create 'flat' eggs",
),
] ]
boolean_options = orig.install.boolean_options + [ boolean_options = orig.install.boolean_options + [
'old-and-unmanageable', 'single-version-externally-managed', 'old-and-unmanageable',
'single-version-externally-managed',
] ]
new_commands = [ new_commands = [
('install_egg_info', lambda self: True), ('install_egg_info', lambda self: True),
@ -30,6 +34,19 @@ class install(orig.install):
_nc = dict(new_commands) _nc = dict(new_commands)
def initialize_options(self): def initialize_options(self):
SetuptoolsDeprecationWarning.emit(
"setup.py install is deprecated.",
"""
Please avoid running ``setup.py`` directly.
Instead, use pypa/build, pypa/installer or other
standards-based tools.
""",
see_url="https://blog.ganssle.io/articles/2021/10/setup-py-deprecated.html",
# TODO: Document how to bootstrap setuptools without install
# (e.g. by unziping the wheel file)
# and then add a due_date to this warning.
)
orig.install.initialize_options(self) orig.install.initialize_options(self)
self.old_and_unmanageable = None self.old_and_unmanageable = None
self.single_version_externally_managed = None self.single_version_externally_managed = None
@ -54,6 +71,7 @@ class install(orig.install):
# command without --root or --single-version-externally-managed # command without --root or --single-version-externally-managed
self.path_file = None self.path_file = None
self.extra_dirs = '' self.extra_dirs = ''
return None
def run(self): def run(self):
# Explicit request for old-style install? Just do it # Explicit request for old-style install? Just do it
@ -66,6 +84,8 @@ class install(orig.install):
else: else:
self.do_egg_install() self.do_egg_install()
return None
@staticmethod @staticmethod
def _called_from_setup(run_frame): def _called_from_setup(run_frame):
""" """
@ -79,26 +99,34 @@ class install(orig.install):
""" """
if run_frame is None: if run_frame is None:
msg = "Call stack not available. bdist_* commands may fail." msg = "Call stack not available. bdist_* commands may fail."
warnings.warn(msg) SetuptoolsWarning.emit(msg)
if platform.python_implementation() == 'IronPython': if platform.python_implementation() == 'IronPython':
msg = "For best results, pass -X:Frames to enable call stack." msg = "For best results, pass -X:Frames to enable call stack."
warnings.warn(msg) SetuptoolsWarning.emit(msg)
return True return True
res = inspect.getouterframes(run_frame)[2]
caller, = res[:1] frames = inspect.getouterframes(run_frame)
info = inspect.getframeinfo(caller) for frame in frames[2:4]:
caller_module = caller.f_globals.get('__name__', '') (caller,) = frame[:1]
return ( info = inspect.getframeinfo(caller)
caller_module == 'distutils.dist' caller_module = caller.f_globals.get('__name__', '')
and info.function == 'run_commands'
) if caller_module == "setuptools.dist" and info.function == "run_command":
# Starting from v61.0.0 setuptools overwrites dist.run_command
continue
return caller_module == 'distutils.dist' and info.function == 'run_commands'
return False
def do_egg_install(self): def do_egg_install(self):
easy_install = self.distribution.get_command_class('easy_install') easy_install = self.distribution.get_command_class('easy_install')
cmd = easy_install( cmd = easy_install(
self.distribution, args="x", root=self.root, record=self.record, self.distribution,
args="x",
root=self.root,
record=self.record,
) )
cmd.ensure_finalized() # finalize before bdist_egg munges install cmd cmd.ensure_finalized() # finalize before bdist_egg munges install cmd
cmd.always_copy_from = '.' # make sure local-dir eggs get installed cmd.always_copy_from = '.' # make sure local-dir eggs get installed
@ -119,7 +147,6 @@ class install(orig.install):
# XXX Python 3.1 doesn't see _nc if this is inside the class # XXX Python 3.1 doesn't see _nc if this is inside the class
install.sub_commands = ( install.sub_commands = [
[cmd for cmd in orig.install.sub_commands if cmd[0] not in install._nc] + cmd for cmd in orig.install.sub_commands if cmd[0] not in install._nc
install.new_commands ] + install.new_commands
)

View file

@ -4,7 +4,7 @@ import os
from setuptools import Command from setuptools import Command
from setuptools import namespaces from setuptools import namespaces
from setuptools.archive_util import unpack_archive from setuptools.archive_util import unpack_archive
import pkg_resources from .._path import ensure_directory
class install_egg_info(namespaces.Installer, Command): class install_egg_info(namespaces.Installer, Command):
@ -20,12 +20,9 @@ class install_egg_info(namespaces.Installer, Command):
self.install_dir = None self.install_dir = None
def finalize_options(self): def finalize_options(self):
self.set_undefined_options('install_lib', self.set_undefined_options('install_lib', ('install_dir', 'install_dir'))
('install_dir', 'install_dir'))
ei_cmd = self.get_finalized_command("egg_info") ei_cmd = self.get_finalized_command("egg_info")
basename = pkg_resources.Distribution( basename = f"{ei_cmd._get_egg_basename()}.egg-info"
None, None, ei_cmd.egg_name, ei_cmd.egg_version
).egg_name() + '.egg-info'
self.source = ei_cmd.egg_info self.source = ei_cmd.egg_info
self.target = os.path.join(self.install_dir, basename) self.target = os.path.join(self.install_dir, basename)
self.outputs = [] self.outputs = []
@ -37,10 +34,8 @@ class install_egg_info(namespaces.Installer, Command):
elif os.path.exists(self.target): elif os.path.exists(self.target):
self.execute(os.unlink, (self.target,), "Removing " + self.target) self.execute(os.unlink, (self.target,), "Removing " + self.target)
if not self.dry_run: if not self.dry_run:
pkg_resources.ensure_directory(self.target) ensure_directory(self.target)
self.execute( self.execute(self.copytree, (), "Copying %s to %s" % (self.source, self.target))
self.copytree, (), "Copying %s to %s" % (self.source, self.target)
)
self.install_namespaces() self.install_namespaces()
def get_outputs(self): def get_outputs(self):

View file

@ -84,8 +84,13 @@ class install_lib(orig.install_lib):
yield base + '.opt-2.pyc' yield base + '.opt-2.pyc'
def copy_tree( def copy_tree(
self, infile, outfile, self,
preserve_mode=1, preserve_times=1, preserve_symlinks=0, level=1 infile,
outfile,
preserve_mode=1,
preserve_times=1,
preserve_symlinks=0,
level=1,
): ):
assert preserve_mode and preserve_times and not preserve_symlinks assert preserve_mode and preserve_times and not preserve_symlinks
exclude = self.get_exclusions() exclude = self.get_exclusions()
@ -102,8 +107,7 @@ class install_lib(orig.install_lib):
def pf(src, dst): def pf(src, dst):
if dst in exclude: if dst in exclude:
log.warn("Skipping installation of %s (namespace package)", log.warn("Skipping installation of %s (namespace package)", dst)
dst)
return False return False
log.info("copying %s -> %s", src, os.path.dirname(dst)) log.info("copying %s -> %s", src, os.path.dirname(dst))

View file

@ -3,7 +3,7 @@ import distutils.command.install_scripts as orig
import os import os
import sys import sys
from pkg_resources import Distribution, PathMetadata, ensure_directory from .._path import ensure_directory
class install_scripts(orig.install_scripts): class install_scripts(orig.install_scripts):
@ -14,8 +14,6 @@ class install_scripts(orig.install_scripts):
self.no_ep = False self.no_ep = False
def run(self): def run(self):
import setuptools.command.easy_install as ei
self.run_command("egg_info") self.run_command("egg_info")
if self.distribution.scripts: if self.distribution.scripts:
orig.install_scripts.run(self) # run first to set up self.outfiles orig.install_scripts.run(self) # run first to set up self.outfiles
@ -24,20 +22,23 @@ class install_scripts(orig.install_scripts):
if self.no_ep: if self.no_ep:
# don't install entry point scripts into .egg file! # don't install entry point scripts into .egg file!
return return
self._install_ep_scripts()
def _install_ep_scripts(self):
# Delay import side-effects
from pkg_resources import Distribution, PathMetadata
from . import easy_install as ei
ei_cmd = self.get_finalized_command("egg_info") ei_cmd = self.get_finalized_command("egg_info")
dist = Distribution( dist = Distribution(
ei_cmd.egg_base, PathMetadata(ei_cmd.egg_base, ei_cmd.egg_info), ei_cmd.egg_base,
ei_cmd.egg_name, ei_cmd.egg_version, PathMetadata(ei_cmd.egg_base, ei_cmd.egg_info),
ei_cmd.egg_name,
ei_cmd.egg_version,
) )
bs_cmd = self.get_finalized_command('build_scripts') bs_cmd = self.get_finalized_command('build_scripts')
exec_param = getattr(bs_cmd, 'executable', None) exec_param = getattr(bs_cmd, 'executable', None)
bw_cmd = self.get_finalized_command("bdist_wininst")
is_wininst = getattr(bw_cmd, '_is_running', False)
writer = ei.ScriptWriter writer = ei.ScriptWriter
if is_wininst:
exec_param = "python.exe"
writer = ei.WindowsScriptWriter
if exec_param == sys.executable: if exec_param == sys.executable:
# In case the path to the Python executable contains a space, wrap # In case the path to the Python executable contains a space, wrap
# it so it's not split up. # it so it's not split up.

View file

@ -1,136 +0,0 @@
import os
from glob import glob
from distutils.util import convert_path
from distutils.command import sdist
from setuptools.extern.six.moves import filter
class sdist_add_defaults:
"""
Mix-in providing forward-compatibility for functionality as found in
distutils on Python 3.7.
Do not edit the code in this class except to update functionality
as implemented in distutils. Instead, override in the subclass.
"""
def add_defaults(self):
"""Add all the default files to self.filelist:
- README or README.txt
- setup.py
- test/test*.py
- all pure Python modules mentioned in setup script
- all files pointed by package_data (build_py)
- all files defined in data_files.
- all files defined as scripts.
- all C sources listed as part of extensions or C libraries
in the setup script (doesn't catch C headers!)
Warns if (README or README.txt) or setup.py are missing; everything
else is optional.
"""
self._add_defaults_standards()
self._add_defaults_optional()
self._add_defaults_python()
self._add_defaults_data_files()
self._add_defaults_ext()
self._add_defaults_c_libs()
self._add_defaults_scripts()
@staticmethod
def _cs_path_exists(fspath):
"""
Case-sensitive path existence check
>>> sdist_add_defaults._cs_path_exists(__file__)
True
>>> sdist_add_defaults._cs_path_exists(__file__.upper())
False
"""
if not os.path.exists(fspath):
return False
# make absolute so we always have a directory
abspath = os.path.abspath(fspath)
directory, filename = os.path.split(abspath)
return filename in os.listdir(directory)
def _add_defaults_standards(self):
standards = [self.READMES, self.distribution.script_name]
for fn in standards:
if isinstance(fn, tuple):
alts = fn
got_it = False
for fn in alts:
if self._cs_path_exists(fn):
got_it = True
self.filelist.append(fn)
break
if not got_it:
self.warn("standard file not found: should have one of " +
', '.join(alts))
else:
if self._cs_path_exists(fn):
self.filelist.append(fn)
else:
self.warn("standard file '%s' not found" % fn)
def _add_defaults_optional(self):
optional = ['test/test*.py', 'setup.cfg']
for pattern in optional:
files = filter(os.path.isfile, glob(pattern))
self.filelist.extend(files)
def _add_defaults_python(self):
# build_py is used to get:
# - python modules
# - files defined in package_data
build_py = self.get_finalized_command('build_py')
# getting python files
if self.distribution.has_pure_modules():
self.filelist.extend(build_py.get_source_files())
# getting package_data files
# (computed in build_py.data_files by build_py.finalize_options)
for pkg, src_dir, build_dir, filenames in build_py.data_files:
for filename in filenames:
self.filelist.append(os.path.join(src_dir, filename))
def _add_defaults_data_files(self):
# getting distribution.data_files
if self.distribution.has_data_files():
for item in self.distribution.data_files:
if isinstance(item, str):
# plain file
item = convert_path(item)
if os.path.isfile(item):
self.filelist.append(item)
else:
# a (dirname, filenames) tuple
dirname, filenames = item
for f in filenames:
f = convert_path(f)
if os.path.isfile(f):
self.filelist.append(f)
def _add_defaults_ext(self):
if self.distribution.has_ext_modules():
build_ext = self.get_finalized_command('build_ext')
self.filelist.extend(build_ext.get_source_files())
def _add_defaults_c_libs(self):
if self.distribution.has_c_libraries():
build_clib = self.get_finalized_command('build_clib')
self.filelist.extend(build_clib.get_source_files())
def _add_defaults_scripts(self):
if self.distribution.has_scripts():
build_scripts = self.get_finalized_command('build_scripts')
self.filelist.extend(build_scripts.get_source_files())
if hasattr(sdist.sdist, '_add_defaults_standards'):
# disable the functionality already available upstream
class sdist_add_defaults:
pass

View file

@ -4,8 +4,6 @@ from distutils.errors import DistutilsOptionError
import os import os
import shutil import shutil
from setuptools.extern import six
from setuptools import Command from setuptools import Command
@ -36,12 +34,10 @@ class rotate(Command):
raise DistutilsOptionError("Must specify number of files to keep") raise DistutilsOptionError("Must specify number of files to keep")
try: try:
self.keep = int(self.keep) self.keep = int(self.keep)
except ValueError: except ValueError as e:
raise DistutilsOptionError("--keep must be an integer") raise DistutilsOptionError("--keep must be an integer") from e
if isinstance(self.match, six.string_types): if isinstance(self.match, str):
self.match = [ self.match = [convert_path(p.strip()) for p in self.match.split(',')]
convert_path(p.strip()) for p in self.match.split(',')
]
self.set_undefined_options('bdist', ('dist_dir', 'dist_dir')) self.set_undefined_options('bdist', ('dist_dir', 'dist_dir'))
def run(self): def run(self):
@ -56,8 +52,8 @@ class rotate(Command):
files.reverse() files.reverse()
log.info("%d file(s) matching %s", len(files), pattern) log.info("%d file(s) matching %s", len(files), pattern)
files = files[self.keep:] files = files[self.keep :]
for (t, f) in files: for t, f in files:
log.info("Deleting %s", f) log.info("Deleting %s", f)
if not self.dry_run: if not self.dry_run:
if os.path.isdir(f): if os.path.isdir(f):

View file

@ -11,7 +11,6 @@ class saveopts(option_base):
settings = {} settings = {}
for cmd in dist.command_options: for cmd in dist.command_options:
if cmd == 'saveopts': if cmd == 'saveopts':
continue # don't save our own options! continue # don't save our own options!

View file

@ -1,38 +1,46 @@
from distutils import log from distutils import log
import distutils.command.sdist as orig import distutils.command.sdist as orig
import os import os
import sys
import io
import contextlib import contextlib
from itertools import chain
from setuptools.extern import six, ordered_set from .._importlib import metadata
from .build import _ORIGINAL_SUBCOMMANDS
from .py36compat import sdist_add_defaults
import pkg_resources
_default_revctrl = list _default_revctrl = list
def walk_revctrl(dirname=''): def walk_revctrl(dirname=''):
"""Find all files under revision control""" """Find all files under revision control"""
for ep in pkg_resources.iter_entry_points('setuptools.file_finders'): for ep in metadata.entry_points(group='setuptools.file_finders'):
for item in ep.load()(dirname): yield from ep.load()(dirname)
yield item
class sdist(sdist_add_defaults, orig.sdist): class sdist(orig.sdist):
"""Smart sdist that finds anything supported by revision control""" """Smart sdist that finds anything supported by revision control"""
user_options = [ user_options = [
('formats=', None, ('formats=', None, "formats for source distribution (comma-separated list)"),
"formats for source distribution (comma-separated list)"), (
('keep-temp', 'k', 'keep-temp',
"keep the distribution tree around after creating " + 'k',
"archive file(s)"), "keep the distribution tree around after creating " + "archive file(s)",
('dist-dir=', 'd', ),
"directory to put the source distribution archive(s) in " (
"[default: dist]"), 'dist-dir=',
'd',
"directory to put the source distribution archive(s) in " "[default: dist]",
),
(
'owner=',
'u',
"Owner name used when creating a tar file [default: current user]",
),
(
'group=',
'g',
"Group name used when creating a tar file [default: current group]",
),
] ]
negative_opt = {} negative_opt = {}
@ -62,14 +70,6 @@ class sdist(sdist_add_defaults, orig.sdist):
def initialize_options(self): def initialize_options(self):
orig.sdist.initialize_options(self) orig.sdist.initialize_options(self)
self._default_to_gztar()
def _default_to_gztar(self):
# only needed on Python prior to 3.6.
if sys.version_info >= (3, 6, 0, 'beta', 1):
return
self.formats = ['gztar']
def make_distribution(self): def make_distribution(self):
""" """
Workaround for #516 Workaround for #516
@ -96,36 +96,14 @@ class sdist(sdist_add_defaults, orig.sdist):
yield yield
finally: finally:
if orig_val is not NoValue: if orig_val is not NoValue:
setattr(os, 'link', orig_val) os.link = orig_val
def __read_template_hack(self): def add_defaults(self):
# This grody hack closes the template file (MANIFEST.in) if an super().add_defaults()
# exception occurs during read_template. self._add_defaults_build_sub_commands()
# Doing so prevents an error when easy_install attempts to delete the
# file.
try:
orig.sdist.read_template(self)
except Exception:
_, _, tb = sys.exc_info()
tb.tb_next.tb_frame.f_locals['template'].close()
raise
# Beginning with Python 2.7.2, 3.1.4, and 3.2.1, this leaky file handle
# has been fixed, so only override the method if we're using an earlier
# Python.
has_leaky_handle = (
sys.version_info < (2, 7, 2)
or (3, 0) <= sys.version_info < (3, 1, 4)
or (3, 2) <= sys.version_info < (3, 2, 1)
)
if has_leaky_handle:
read_template = __read_template_hack
def _add_defaults_optional(self): def _add_defaults_optional(self):
if six.PY2: super()._add_defaults_optional()
sdist_add_defaults._add_defaults_optional(self)
else:
super()._add_defaults_optional()
if os.path.isfile('pyproject.toml'): if os.path.isfile('pyproject.toml'):
self.filelist.append('pyproject.toml') self.filelist.append('pyproject.toml')
@ -136,14 +114,25 @@ class sdist(sdist_add_defaults, orig.sdist):
self.filelist.extend(build_py.get_source_files()) self.filelist.extend(build_py.get_source_files())
self._add_data_files(self._safe_data_files(build_py)) self._add_data_files(self._safe_data_files(build_py))
def _add_defaults_build_sub_commands(self):
build = self.get_finalized_command("build")
missing_cmds = set(build.get_sub_commands()) - _ORIGINAL_SUBCOMMANDS
# ^-- the original built-in sub-commands are already handled by default.
cmds = (self.get_finalized_command(c) for c in missing_cmds)
files = (c.get_source_files() for c in cmds if hasattr(c, "get_source_files"))
self.filelist.extend(chain.from_iterable(files))
def _safe_data_files(self, build_py): def _safe_data_files(self, build_py):
""" """
Extracting data_files from build_py is known to cause Since the ``sdist`` class is also used to compute the MANIFEST
infinite recursion errors when `include_package_data` (via :obj:`setuptools.command.egg_info.manifest_maker`),
is enabled, so suppress it in that case. there might be recursion problems when trying to obtain the list of
data_files and ``include_package_data=True`` (which in turn depends on
the files included in the MANIFEST).
To avoid that, ``manifest_maker`` should be able to overwrite this
method and avoid recursive attempts to build/analyze the MANIFEST.
""" """
if self.distribution.include_package_data:
return ()
return build_py.data_files return build_py.data_files
def _add_data_files(self, data_files): def _add_data_files(self, data_files):
@ -158,10 +147,7 @@ class sdist(sdist_add_defaults, orig.sdist):
def _add_defaults_data_files(self): def _add_defaults_data_files(self):
try: try:
if six.PY2: super()._add_defaults_data_files()
sdist_add_defaults._add_defaults_data_files(self)
else:
super()._add_defaults_data_files()
except TypeError: except TypeError:
log.warn("data_files contains unexpected objects") log.warn("data_files contains unexpected objects")
@ -171,8 +157,7 @@ class sdist(sdist_add_defaults, orig.sdist):
return return
else: else:
self.warn( self.warn(
"standard file not found: should have one of " + "standard file not found: should have one of " + ', '.join(self.READMES)
', '.join(self.READMES)
) )
def make_release_tree(self, base_dir, files): def make_release_tree(self, base_dir, files):
@ -193,10 +178,9 @@ class sdist(sdist_add_defaults, orig.sdist):
if not os.path.isfile(self.manifest): if not os.path.isfile(self.manifest):
return False return False
with io.open(self.manifest, 'rb') as fp: with open(self.manifest, 'rb') as fp:
first_line = fp.readline() first_line = fp.readline()
return (first_line != return first_line != b'# file GENERATED by distutils, do NOT edit\n'
'# file GENERATED by distutils, do NOT edit\n'.encode())
def read_manifest(self): def read_manifest(self):
"""Read the manifest file (named by 'self.manifest') and use it to """Read the manifest file (named by 'self.manifest') and use it to
@ -207,46 +191,14 @@ class sdist(sdist_add_defaults, orig.sdist):
manifest = open(self.manifest, 'rb') manifest = open(self.manifest, 'rb')
for line in manifest: for line in manifest:
# The manifest must contain UTF-8. See #303. # The manifest must contain UTF-8. See #303.
if not six.PY2: try:
try: line = line.decode('UTF-8')
line = line.decode('UTF-8') except UnicodeDecodeError:
except UnicodeDecodeError: log.warn("%r not UTF-8 decodable -- skipping" % line)
log.warn("%r not UTF-8 decodable -- skipping" % line) continue
continue
# ignore comments and blank lines # ignore comments and blank lines
line = line.strip() line = line.strip()
if line.startswith('#') or not line: if line.startswith('#') or not line:
continue continue
self.filelist.append(line) self.filelist.append(line)
manifest.close() manifest.close()
def check_license(self):
"""Checks if license_file' or 'license_files' is configured and adds any
valid paths to 'self.filelist'.
"""
files = ordered_set.OrderedSet()
opts = self.distribution.get_option_dict('metadata')
# ignore the source of the value
_, license_file = opts.get('license_file', (None, None))
if license_file is None:
log.debug("'license_file' option was not specified")
else:
files.add(license_file)
try:
files.update(self.distribution.metadata.license_files)
except TypeError:
log.warn("warning: 'license_files' option is malformed")
for f in files:
if not os.path.exists(f):
log.warn(
"warning: Failed to find the configured license file '%s'",
f)
files.remove(f)
self.filelist.extend(files)

View file

@ -3,8 +3,7 @@ from distutils import log
from distutils.errors import DistutilsOptionError from distutils.errors import DistutilsOptionError
import distutils import distutils
import os import os
import configparser
from setuptools.extern.six.moves import configparser
from setuptools import Command from setuptools import Command
@ -19,15 +18,11 @@ def config_file(kind="local"):
if kind == 'local': if kind == 'local':
return 'setup.cfg' return 'setup.cfg'
if kind == 'global': if kind == 'global':
return os.path.join( return os.path.join(os.path.dirname(distutils.__file__), 'distutils.cfg')
os.path.dirname(distutils.__file__), 'distutils.cfg'
)
if kind == 'user': if kind == 'user':
dot = os.name == 'posix' and '.' or '' dot = os.name == 'posix' and '.' or ''
return os.path.expanduser(convert_path("~/%spydistutils.cfg" % dot)) return os.path.expanduser(convert_path("~/%spydistutils.cfg" % dot))
raise ValueError( raise ValueError("config_file() type must be 'local', 'global', or 'user'", kind)
"config_file() type must be 'local', 'global', or 'user'", kind
)
def edit_config(filename, settings, dry_run=False): def edit_config(filename, settings, dry_run=False):
@ -40,6 +35,7 @@ def edit_config(filename, settings, dry_run=False):
""" """
log.debug("Reading configuration from %s", filename) log.debug("Reading configuration from %s", filename)
opts = configparser.RawConfigParser() opts = configparser.RawConfigParser()
opts.optionxform = lambda x: x
opts.read([filename]) opts.read([filename])
for section, options in settings.items(): for section, options in settings.items():
if options is None: if options is None:
@ -51,19 +47,16 @@ def edit_config(filename, settings, dry_run=False):
opts.add_section(section) opts.add_section(section)
for option, value in options.items(): for option, value in options.items():
if value is None: if value is None:
log.debug( log.debug("Deleting %s.%s from %s", section, option, filename)
"Deleting %s.%s from %s",
section, option, filename
)
opts.remove_option(section, option) opts.remove_option(section, option)
if not opts.options(section): if not opts.options(section):
log.info("Deleting empty [%s] section from %s", log.info(
section, filename) "Deleting empty [%s] section from %s", section, filename
)
opts.remove_section(section) opts.remove_section(section)
else: else:
log.debug( log.debug(
"Setting %s.%s to %r in %s", "Setting %s.%s to %r in %s", section, option, value, filename
section, option, value, filename
) )
opts.set(section, option, value) opts.set(section, option, value)
@ -77,16 +70,14 @@ class option_base(Command):
"""Abstract base class for commands that mess with config files""" """Abstract base class for commands that mess with config files"""
user_options = [ user_options = [
('global-config', 'g', ('global-config', 'g', "save options to the site-wide distutils.cfg file"),
"save options to the site-wide distutils.cfg file"), ('user-config', 'u', "save options to the current user's pydistutils.cfg file"),
('user-config', 'u', ('filename=', 'f', "configuration file to use (default=setup.cfg)"),
"save options to the current user's pydistutils.cfg file"),
('filename=', 'f',
"configuration file to use (default=setup.cfg)"),
] ]
boolean_options = [ boolean_options = [
'global-config', 'user-config', 'global-config',
'user-config',
] ]
def initialize_options(self): def initialize_options(self):
@ -106,10 +97,9 @@ class option_base(Command):
filenames.append(config_file('local')) filenames.append(config_file('local'))
if len(filenames) > 1: if len(filenames) > 1:
raise DistutilsOptionError( raise DistutilsOptionError(
"Must specify only one configuration file option", "Must specify only one configuration file option", filenames
filenames
) )
self.filename, = filenames (self.filename,) = filenames
class setopt(option_base): class setopt(option_base):
@ -142,8 +132,7 @@ class setopt(option_base):
def run(self): def run(self):
edit_config( edit_config(
self.filename, { self.filename,
self.command: {self.option.replace('-', '_'): self.set_value} {self.command: {self.option.replace('-', '_'): self.set_value}},
}, self.dry_run,
self.dry_run
) )

View file

@ -8,20 +8,22 @@ from distutils.errors import DistutilsError, DistutilsOptionError
from distutils import log from distutils import log
from unittest import TestLoader from unittest import TestLoader
from setuptools.extern import six from pkg_resources import (
from setuptools.extern.six.moves import map, filter resource_listdir,
resource_exists,
from pkg_resources import (resource_listdir, resource_exists, normalize_path, normalize_path,
working_set, _namespace_packages, evaluate_marker, working_set,
add_activation_listener, require, EntryPoint) evaluate_marker,
add_activation_listener,
require,
)
from .._importlib import metadata
from setuptools import Command from setuptools import Command
from .build_py import _unique_everseen from setuptools.extern.more_itertools import unique_everseen
from setuptools.extern.jaraco.functools import pass_none
__metaclass__ = type
class ScanningLoader(TestLoader): class ScanningLoader(TestLoader):
def __init__(self): def __init__(self):
TestLoader.__init__(self) TestLoader.__init__(self)
self._visited = set() self._visited = set()
@ -78,8 +80,11 @@ class test(Command):
user_options = [ user_options = [
('test-module=', 'm', "Run 'test_suite' in specified module"), ('test-module=', 'm', "Run 'test_suite' in specified module"),
('test-suite=', 's', (
"Run single test, case or suite (e.g. 'module.test_suite')"), 'test-suite=',
's',
"Run single test, case or suite (e.g. 'module.test_suite')",
),
('test-runner=', 'r', "Test runner to use"), ('test-runner=', 'r', "Test runner to use"),
] ]
@ -90,7 +95,6 @@ class test(Command):
self.test_runner = None self.test_runner = None
def finalize_options(self): def finalize_options(self):
if self.test_suite and self.test_module: if self.test_suite and self.test_module:
msg = "You may specify a module or a suite, but not both" msg = "You may specify a module or a suite, but not both"
raise DistutilsOptionError(msg) raise DistutilsOptionError(msg)
@ -113,7 +117,7 @@ class test(Command):
return list(self._test_args()) return list(self._test_args())
def _test_args(self): def _test_args(self):
if not self.test_suite and sys.version_info >= (2, 7): if not self.test_suite:
yield 'discover' yield 'discover'
if self.verbose: if self.verbose:
yield '--verbose' yield '--verbose'
@ -128,31 +132,12 @@ class test(Command):
func() func()
@contextlib.contextmanager @contextlib.contextmanager
def project_on_sys_path(self, include_dists=[]): def project_on_sys_path(self, include_dists=()):
with_2to3 = not six.PY2 and getattr(self.distribution, 'use_2to3', False) self.run_command('egg_info')
if with_2to3: # Build extensions in-place
# If we run 2to3 we can not do this inplace: self.reinitialize_command('build_ext', inplace=1)
self.run_command('build_ext')
# Ensure metadata is up-to-date
self.reinitialize_command('build_py', inplace=0)
self.run_command('build_py')
bpy_cmd = self.get_finalized_command("build_py")
build_path = normalize_path(bpy_cmd.build_lib)
# Build extensions
self.reinitialize_command('egg_info', egg_base=build_path)
self.run_command('egg_info')
self.reinitialize_command('build_ext', inplace=0)
self.run_command('build_ext')
else:
# Without 2to3 inplace works fine:
self.run_command('egg_info')
# Build extensions in-place
self.reinitialize_command('build_ext', inplace=1)
self.run_command('build_ext')
ei_cmd = self.get_finalized_command("egg_info") ei_cmd = self.get_finalized_command("egg_info")
@ -187,7 +172,7 @@ class test(Command):
orig_pythonpath = os.environ.get('PYTHONPATH', nothing) orig_pythonpath = os.environ.get('PYTHONPATH', nothing)
current_pythonpath = os.environ.get('PYTHONPATH', '') current_pythonpath = os.environ.get('PYTHONPATH', '')
try: try:
prefix = os.pathsep.join(_unique_everseen(paths)) prefix = os.pathsep.join(unique_everseen(paths))
to_join = filter(None, [prefix, current_pythonpath]) to_join = filter(None, [prefix, current_pythonpath])
new_path = os.pathsep.join(to_join) new_path = os.pathsep.join(to_join)
if new_path: if new_path:
@ -208,7 +193,8 @@ class test(Command):
ir_d = dist.fetch_build_eggs(dist.install_requires) ir_d = dist.fetch_build_eggs(dist.install_requires)
tr_d = dist.fetch_build_eggs(dist.tests_require or []) tr_d = dist.fetch_build_eggs(dist.tests_require or [])
er_d = dist.fetch_build_eggs( er_d = dist.fetch_build_eggs(
v for k, v in dist.extras_require.items() v
for k, v in dist.extras_require.items()
if k.startswith(':') and evaluate_marker(k[1:]) if k.startswith(':') and evaluate_marker(k[1:])
) )
return itertools.chain(ir_d, tr_d, er_d) return itertools.chain(ir_d, tr_d, er_d)
@ -237,23 +223,10 @@ class test(Command):
self.run_tests() self.run_tests()
def run_tests(self): def run_tests(self):
# Purge modules under test from sys.modules. The test loader will
# re-import them from the build location. Required when 2to3 is used
# with namespace packages.
if not six.PY2 and getattr(self.distribution, 'use_2to3', False):
module = self.test_suite.split('.')[0]
if module in _namespace_packages:
del_modules = []
if module in sys.modules:
del_modules.append(module)
module += '.'
for name in sys.modules:
if name.startswith(module):
del_modules.append(name)
list(map(sys.modules.__delitem__, del_modules))
test = unittest.main( test = unittest.main(
None, None, self._argv, None,
None,
self._argv,
testLoader=self._resolve_as_ep(self.test_loader), testLoader=self._resolve_as_ep(self.test_loader),
testRunner=self._resolve_as_ep(self.test_runner), testRunner=self._resolve_as_ep(self.test_runner),
exit=False, exit=False,
@ -268,12 +241,10 @@ class test(Command):
return ['unittest'] + self.test_args return ['unittest'] + self.test_args
@staticmethod @staticmethod
@pass_none
def _resolve_as_ep(val): def _resolve_as_ep(val):
""" """
Load the indicated attribute value, called, as a as if it were Load the indicated attribute value, called, as a as if it were
specified as an entry point. specified as an entry point.
""" """
if val is None: return metadata.EntryPoint(value=val, name=None, group=None).load()()
return
parsed = EntryPoint.parse("x=" + val)
return parsed.resolve()()

View file

@ -1,31 +1,29 @@
# -*- coding: utf-8 -*-
"""upload_docs """upload_docs
Implements a Distutils 'upload_docs' subcommand (upload documentation to Implements a Distutils 'upload_docs' subcommand (upload documentation to
PyPI's pythonhosted.org). sites other than PyPi such as devpi).
""" """
from base64 import standard_b64encode from base64 import standard_b64encode
from distutils import log from distutils import log
from distutils.errors import DistutilsOptionError from distutils.errors import DistutilsOptionError
import os import os
import socket
import zipfile import zipfile
import tempfile import tempfile
import shutil import shutil
import itertools import itertools
import functools import functools
import http.client
import urllib.parse
from setuptools.extern import six from .._importlib import metadata
from setuptools.extern.six.moves import http_client, urllib from ..warnings import SetuptoolsDeprecationWarning
from pkg_resources import iter_entry_points
from .upload import upload from .upload import upload
def _encode(s): def _encode(s):
errors = 'strict' if six.PY2 else 'surrogateescape' return s.encode('utf-8', 'surrogateescape')
return s.encode('utf-8', errors)
class upload_docs(upload): class upload_docs(upload):
@ -33,21 +31,24 @@ class upload_docs(upload):
# supported by Warehouse (and won't be). # supported by Warehouse (and won't be).
DEFAULT_REPOSITORY = 'https://pypi.python.org/pypi/' DEFAULT_REPOSITORY = 'https://pypi.python.org/pypi/'
description = 'Upload documentation to PyPI' description = 'Upload documentation to sites other than PyPi such as devpi'
user_options = [ user_options = [
('repository=', 'r', (
"url of repository [default: %s]" % upload.DEFAULT_REPOSITORY), 'repository=',
('show-response', None, 'r',
'display full response text from server'), "url of repository [default: %s]" % upload.DEFAULT_REPOSITORY,
),
('show-response', None, 'display full response text from server'),
('upload-dir=', None, 'directory to upload'), ('upload-dir=', None, 'directory to upload'),
] ]
boolean_options = upload.boolean_options boolean_options = upload.boolean_options
def has_sphinx(self): def has_sphinx(self):
if self.upload_dir is None: return bool(
for ep in iter_entry_points('distutils.commands', 'build_sphinx'): self.upload_dir is None
return True and metadata.entry_points(group='distutils.commands', name='build_sphinx')
)
sub_commands = [('build_sphinx', has_sphinx)] sub_commands = [('build_sphinx', has_sphinx)]
@ -57,19 +58,21 @@ class upload_docs(upload):
self.target_dir = None self.target_dir = None
def finalize_options(self): def finalize_options(self):
log.warn(
"Upload_docs command is deprecated. Use Read the Docs "
"(https://readthedocs.org) instead."
)
upload.finalize_options(self) upload.finalize_options(self)
if self.upload_dir is None: if self.upload_dir is None:
if self.has_sphinx(): if self.has_sphinx():
build_sphinx = self.get_finalized_command('build_sphinx') build_sphinx = self.get_finalized_command('build_sphinx')
self.target_dir = build_sphinx.builder_target_dir self.target_dir = dict(build_sphinx.builder_target_dirs)['html']
else: else:
build = self.get_finalized_command('build') build = self.get_finalized_command('build')
self.target_dir = os.path.join(build.build_base, 'docs') self.target_dir = os.path.join(build.build_base, 'docs')
else: else:
self.ensure_dirname('upload_dir') self.ensure_dirname('upload_dir')
self.target_dir = self.upload_dir self.target_dir = self.upload_dir
if 'pypi.python.org' in self.repository:
log.warn("Upload_docs command is deprecated. Use RTD instead.")
self.announce('Using upload directory %s' % self.target_dir) self.announce('Using upload directory %s' % self.target_dir)
def create_zipfile(self, filename): def create_zipfile(self, filename):
@ -82,13 +85,23 @@ class upload_docs(upload):
raise DistutilsOptionError(tmpl % self.target_dir) raise DistutilsOptionError(tmpl % self.target_dir)
for name in files: for name in files:
full = os.path.join(root, name) full = os.path.join(root, name)
relative = root[len(self.target_dir):].lstrip(os.path.sep) relative = root[len(self.target_dir) :].lstrip(os.path.sep)
dest = os.path.join(relative, name) dest = os.path.join(relative, name)
zip_file.write(full, dest) zip_file.write(full, dest)
finally: finally:
zip_file.close() zip_file.close()
def run(self): def run(self):
SetuptoolsDeprecationWarning.emit(
"Deprecated command",
"""
upload_docs is deprecated and will be removed in a future version.
Instead, use tools like devpi and Read the Docs; or lower level tools like
httpie and curl to interact directly with your hosting service API.
""",
due_date=(2023, 9, 26), # warning introduced in 27 Jul 2022
)
# Run sub commands # Run sub commands
for cmd_name in self.get_sub_commands(): for cmd_name in self.get_sub_commands():
self.run_command(cmd_name) self.run_command(cmd_name)
@ -127,10 +140,13 @@ class upload_docs(upload):
""" """
Build up the MIME payload for the POST data Build up the MIME payload for the POST data
""" """
boundary = b'--------------GHSKFJDLGDS7543FJKLFHRE75642756743254' boundary = '--------------GHSKFJDLGDS7543FJKLFHRE75642756743254'
sep_boundary = b'\n--' + boundary sep_boundary = b'\n--' + boundary.encode('ascii')
end_boundary = sep_boundary + b'--' end_boundary = sep_boundary + b'--'
end_items = end_boundary, b"\n", end_items = (
end_boundary,
b"\n",
)
builder = functools.partial( builder = functools.partial(
cls._build_part, cls._build_part,
sep_boundary=sep_boundary, sep_boundary=sep_boundary,
@ -138,7 +154,7 @@ class upload_docs(upload):
part_groups = map(builder, data.items()) part_groups = map(builder, data.items())
parts = itertools.chain.from_iterable(part_groups) parts = itertools.chain.from_iterable(part_groups)
body_items = itertools.chain(parts, end_items) body_items = itertools.chain(parts, end_items)
content_type = 'multipart/form-data; boundary=%s' % boundary.decode('ascii') content_type = 'multipart/form-data; boundary=%s' % boundary
return b''.join(body_items), content_type return b''.join(body_items), content_type
def upload_file(self, filename): def upload_file(self, filename):
@ -152,9 +168,7 @@ class upload_docs(upload):
} }
# set up the authentication # set up the authentication
credentials = _encode(self.username + ':' + self.password) credentials = _encode(self.username + ':' + self.password)
credentials = standard_b64encode(credentials) credentials = standard_b64encode(credentials).decode('ascii')
if not six.PY2:
credentials = credentials.decode('ascii')
auth = "Basic " + credentials auth = "Basic " + credentials
body, ct = self._build_multipart(data) body, ct = self._build_multipart(data)
@ -165,13 +179,14 @@ class upload_docs(upload):
# build the Request # build the Request
# We can't use urllib2 since we need to send the Basic # We can't use urllib2 since we need to send the Basic
# auth right with the first request # auth right with the first request
schema, netloc, url, params, query, fragments = \ schema, netloc, url, params, query, fragments = urllib.parse.urlparse(
urllib.parse.urlparse(self.repository) self.repository
)
assert not params and not query and not fragments assert not params and not query and not fragments
if schema == 'http': if schema == 'http':
conn = http_client.HTTPConnection(netloc) conn = http.client.HTTPConnection(netloc)
elif schema == 'https': elif schema == 'https':
conn = http_client.HTTPSConnection(netloc) conn = http.client.HTTPSConnection(netloc)
else: else:
raise AssertionError("unsupported schema " + schema) raise AssertionError("unsupported schema " + schema)
@ -185,7 +200,7 @@ class upload_docs(upload):
conn.putheader('Authorization', auth) conn.putheader('Authorization', auth)
conn.endheaders() conn.endheaders()
conn.send(body) conn.send(body)
except socket.error as e: except OSError as e:
self.announce(str(e), log.ERROR) self.announce(str(e), log.ERROR)
return return

View file

@ -1,659 +0,0 @@
from __future__ import absolute_import, unicode_literals
import io
import os
import sys
import warnings
import functools
from collections import defaultdict
from functools import partial
from functools import wraps
from importlib import import_module
from distutils.errors import DistutilsOptionError, DistutilsFileError
from setuptools.extern.packaging.version import LegacyVersion, parse
from setuptools.extern.packaging.specifiers import SpecifierSet
from setuptools.extern.six import string_types, PY3
__metaclass__ = type
def read_configuration(
filepath, find_others=False, ignore_option_errors=False):
"""Read given configuration file and returns options from it as a dict.
:param str|unicode filepath: Path to configuration file
to get options from.
:param bool find_others: Whether to search for other configuration files
which could be on in various places.
:param bool ignore_option_errors: Whether to silently ignore
options, values of which could not be resolved (e.g. due to exceptions
in directives such as file:, attr:, etc.).
If False exceptions are propagated as expected.
:rtype: dict
"""
from setuptools.dist import Distribution, _Distribution
filepath = os.path.abspath(filepath)
if not os.path.isfile(filepath):
raise DistutilsFileError(
'Configuration file %s does not exist.' % filepath)
current_directory = os.getcwd()
os.chdir(os.path.dirname(filepath))
try:
dist = Distribution()
filenames = dist.find_config_files() if find_others else []
if filepath not in filenames:
filenames.append(filepath)
_Distribution.parse_config_files(dist, filenames=filenames)
handlers = parse_configuration(
dist, dist.command_options,
ignore_option_errors=ignore_option_errors)
finally:
os.chdir(current_directory)
return configuration_to_dict(handlers)
def _get_option(target_obj, key):
"""
Given a target object and option key, get that option from
the target object, either through a get_{key} method or
from an attribute directly.
"""
getter_name = 'get_{key}'.format(**locals())
by_attribute = functools.partial(getattr, target_obj, key)
getter = getattr(target_obj, getter_name, by_attribute)
return getter()
def configuration_to_dict(handlers):
"""Returns configuration data gathered by given handlers as a dict.
:param list[ConfigHandler] handlers: Handlers list,
usually from parse_configuration()
:rtype: dict
"""
config_dict = defaultdict(dict)
for handler in handlers:
for option in handler.set_options:
value = _get_option(handler.target_obj, option)
config_dict[handler.section_prefix][option] = value
return config_dict
def parse_configuration(
distribution, command_options, ignore_option_errors=False):
"""Performs additional parsing of configuration options
for a distribution.
Returns a list of used option handlers.
:param Distribution distribution:
:param dict command_options:
:param bool ignore_option_errors: Whether to silently ignore
options, values of which could not be resolved (e.g. due to exceptions
in directives such as file:, attr:, etc.).
If False exceptions are propagated as expected.
:rtype: list
"""
options = ConfigOptionsHandler(
distribution, command_options, ignore_option_errors)
options.parse()
meta = ConfigMetadataHandler(
distribution.metadata, command_options, ignore_option_errors,
distribution.package_dir)
meta.parse()
return meta, options
class ConfigHandler:
"""Handles metadata supplied in configuration files."""
section_prefix = None
"""Prefix for config sections handled by this handler.
Must be provided by class heirs.
"""
aliases = {}
"""Options aliases.
For compatibility with various packages. E.g.: d2to1 and pbr.
Note: `-` in keys is replaced with `_` by config parser.
"""
def __init__(self, target_obj, options, ignore_option_errors=False):
sections = {}
section_prefix = self.section_prefix
for section_name, section_options in options.items():
if not section_name.startswith(section_prefix):
continue
section_name = section_name.replace(section_prefix, '').strip('.')
sections[section_name] = section_options
self.ignore_option_errors = ignore_option_errors
self.target_obj = target_obj
self.sections = sections
self.set_options = []
@property
def parsers(self):
"""Metadata item name to parser function mapping."""
raise NotImplementedError(
'%s must provide .parsers property' % self.__class__.__name__)
def __setitem__(self, option_name, value):
unknown = tuple()
target_obj = self.target_obj
# Translate alias into real name.
option_name = self.aliases.get(option_name, option_name)
current_value = getattr(target_obj, option_name, unknown)
if current_value is unknown:
raise KeyError(option_name)
if current_value:
# Already inhabited. Skipping.
return
skip_option = False
parser = self.parsers.get(option_name)
if parser:
try:
value = parser(value)
except Exception:
skip_option = True
if not self.ignore_option_errors:
raise
if skip_option:
return
setter = getattr(target_obj, 'set_%s' % option_name, None)
if setter is None:
setattr(target_obj, option_name, value)
else:
setter(value)
self.set_options.append(option_name)
@classmethod
def _parse_list(cls, value, separator=','):
"""Represents value as a list.
Value is split either by separator (defaults to comma) or by lines.
:param value:
:param separator: List items separator character.
:rtype: list
"""
if isinstance(value, list): # _get_parser_compound case
return value
if '\n' in value:
value = value.splitlines()
else:
value = value.split(separator)
return [chunk.strip() for chunk in value if chunk.strip()]
@classmethod
def _parse_dict(cls, value):
"""Represents value as a dict.
:param value:
:rtype: dict
"""
separator = '='
result = {}
for line in cls._parse_list(value):
key, sep, val = line.partition(separator)
if sep != separator:
raise DistutilsOptionError(
'Unable to parse option value to dict: %s' % value)
result[key.strip()] = val.strip()
return result
@classmethod
def _parse_bool(cls, value):
"""Represents value as boolean.
:param value:
:rtype: bool
"""
value = value.lower()
return value in ('1', 'true', 'yes')
@classmethod
def _exclude_files_parser(cls, key):
"""Returns a parser function to make sure field inputs
are not files.
Parses a value after getting the key so error messages are
more informative.
:param key:
:rtype: callable
"""
def parser(value):
exclude_directive = 'file:'
if value.startswith(exclude_directive):
raise ValueError(
'Only strings are accepted for the {0} field, '
'files are not accepted'.format(key))
return value
return parser
@classmethod
def _parse_file(cls, value):
"""Represents value as a string, allowing including text
from nearest files using `file:` directive.
Directive is sandboxed and won't reach anything outside
directory with setup.py.
Examples:
file: README.rst, CHANGELOG.md, src/file.txt
:param str value:
:rtype: str
"""
include_directive = 'file:'
if not isinstance(value, string_types):
return value
if not value.startswith(include_directive):
return value
spec = value[len(include_directive):]
filepaths = (os.path.abspath(path.strip()) for path in spec.split(','))
return '\n'.join(
cls._read_file(path)
for path in filepaths
if (cls._assert_local(path) or True)
and os.path.isfile(path)
)
@staticmethod
def _assert_local(filepath):
if not filepath.startswith(os.getcwd()):
raise DistutilsOptionError(
'`file:` directive can not access %s' % filepath)
@staticmethod
def _read_file(filepath):
with io.open(filepath, encoding='utf-8') as f:
return f.read()
@classmethod
def _parse_attr(cls, value, package_dir=None):
"""Represents value as a module attribute.
Examples:
attr: package.attr
attr: package.module.attr
:param str value:
:rtype: str
"""
attr_directive = 'attr:'
if not value.startswith(attr_directive):
return value
attrs_path = value.replace(attr_directive, '').strip().split('.')
attr_name = attrs_path.pop()
module_name = '.'.join(attrs_path)
module_name = module_name or '__init__'
parent_path = os.getcwd()
if package_dir:
if attrs_path[0] in package_dir:
# A custom path was specified for the module we want to import
custom_path = package_dir[attrs_path[0]]
parts = custom_path.rsplit('/', 1)
if len(parts) > 1:
parent_path = os.path.join(os.getcwd(), parts[0])
module_name = parts[1]
else:
module_name = custom_path
elif '' in package_dir:
# A custom parent directory was specified for all root modules
parent_path = os.path.join(os.getcwd(), package_dir[''])
sys.path.insert(0, parent_path)
try:
module = import_module(module_name)
value = getattr(module, attr_name)
finally:
sys.path = sys.path[1:]
return value
@classmethod
def _get_parser_compound(cls, *parse_methods):
"""Returns parser function to represents value as a list.
Parses a value applying given methods one after another.
:param parse_methods:
:rtype: callable
"""
def parse(value):
parsed = value
for method in parse_methods:
parsed = method(parsed)
return parsed
return parse
@classmethod
def _parse_section_to_dict(cls, section_options, values_parser=None):
"""Parses section options into a dictionary.
Optionally applies a given parser to values.
:param dict section_options:
:param callable values_parser:
:rtype: dict
"""
value = {}
values_parser = values_parser or (lambda val: val)
for key, (_, val) in section_options.items():
value[key] = values_parser(val)
return value
def parse_section(self, section_options):
"""Parses configuration file section.
:param dict section_options:
"""
for (name, (_, value)) in section_options.items():
try:
self[name] = value
except KeyError:
pass # Keep silent for a new option may appear anytime.
def parse(self):
"""Parses configuration file items from one
or more related sections.
"""
for section_name, section_options in self.sections.items():
method_postfix = ''
if section_name: # [section.option] variant
method_postfix = '_%s' % section_name
section_parser_method = getattr(
self,
# Dots in section names are translated into dunderscores.
('parse_section%s' % method_postfix).replace('.', '__'),
None)
if section_parser_method is None:
raise DistutilsOptionError(
'Unsupported distribution option section: [%s.%s]' % (
self.section_prefix, section_name))
section_parser_method(section_options)
def _deprecated_config_handler(self, func, msg, warning_class):
""" this function will wrap around parameters that are deprecated
:param msg: deprecation message
:param warning_class: class of warning exception to be raised
:param func: function to be wrapped around
"""
@wraps(func)
def config_handler(*args, **kwargs):
warnings.warn(msg, warning_class)
return func(*args, **kwargs)
return config_handler
class ConfigMetadataHandler(ConfigHandler):
section_prefix = 'metadata'
aliases = {
'home_page': 'url',
'summary': 'description',
'classifier': 'classifiers',
'platform': 'platforms',
}
strict_mode = False
"""We need to keep it loose, to be partially compatible with
`pbr` and `d2to1` packages which also uses `metadata` section.
"""
def __init__(self, target_obj, options, ignore_option_errors=False,
package_dir=None):
super(ConfigMetadataHandler, self).__init__(target_obj, options,
ignore_option_errors)
self.package_dir = package_dir
@property
def parsers(self):
"""Metadata item name to parser function mapping."""
parse_list = self._parse_list
parse_file = self._parse_file
parse_dict = self._parse_dict
exclude_files_parser = self._exclude_files_parser
return {
'platforms': parse_list,
'keywords': parse_list,
'provides': parse_list,
'requires': self._deprecated_config_handler(
parse_list,
"The requires parameter is deprecated, please use "
"install_requires for runtime dependencies.",
DeprecationWarning),
'obsoletes': parse_list,
'classifiers': self._get_parser_compound(parse_file, parse_list),
'license': exclude_files_parser('license'),
'license_files': parse_list,
'description': parse_file,
'long_description': parse_file,
'version': self._parse_version,
'project_urls': parse_dict,
}
def _parse_version(self, value):
"""Parses `version` option value.
:param value:
:rtype: str
"""
version = self._parse_file(value)
if version != value:
version = version.strip()
# Be strict about versions loaded from file because it's easy to
# accidentally include newlines and other unintended content
if isinstance(parse(version), LegacyVersion):
tmpl = (
'Version loaded from {value} does not '
'comply with PEP 440: {version}'
)
raise DistutilsOptionError(tmpl.format(**locals()))
return version
version = self._parse_attr(value, self.package_dir)
if callable(version):
version = version()
if not isinstance(version, string_types):
if hasattr(version, '__iter__'):
version = '.'.join(map(str, version))
else:
version = '%s' % version
return version
class ConfigOptionsHandler(ConfigHandler):
section_prefix = 'options'
@property
def parsers(self):
"""Metadata item name to parser function mapping."""
parse_list = self._parse_list
parse_list_semicolon = partial(self._parse_list, separator=';')
parse_bool = self._parse_bool
parse_dict = self._parse_dict
return {
'zip_safe': parse_bool,
'use_2to3': parse_bool,
'include_package_data': parse_bool,
'package_dir': parse_dict,
'use_2to3_fixers': parse_list,
'use_2to3_exclude_fixers': parse_list,
'convert_2to3_doctests': parse_list,
'scripts': parse_list,
'eager_resources': parse_list,
'dependency_links': parse_list,
'namespace_packages': parse_list,
'install_requires': parse_list_semicolon,
'setup_requires': parse_list_semicolon,
'tests_require': parse_list_semicolon,
'packages': self._parse_packages,
'entry_points': self._parse_file,
'py_modules': parse_list,
'python_requires': SpecifierSet,
}
def _parse_packages(self, value):
"""Parses `packages` option value.
:param value:
:rtype: list
"""
find_directives = ['find:', 'find_namespace:']
trimmed_value = value.strip()
if trimmed_value not in find_directives:
return self._parse_list(value)
findns = trimmed_value == find_directives[1]
if findns and not PY3:
raise DistutilsOptionError(
'find_namespace: directive is unsupported on Python < 3.3')
# Read function arguments from a dedicated section.
find_kwargs = self.parse_section_packages__find(
self.sections.get('packages.find', {}))
if findns:
from setuptools import find_namespace_packages as find_packages
else:
from setuptools import find_packages
return find_packages(**find_kwargs)
def parse_section_packages__find(self, section_options):
"""Parses `packages.find` configuration file section.
To be used in conjunction with _parse_packages().
:param dict section_options:
"""
section_data = self._parse_section_to_dict(
section_options, self._parse_list)
valid_keys = ['where', 'include', 'exclude']
find_kwargs = dict(
[(k, v) for k, v in section_data.items() if k in valid_keys and v])
where = find_kwargs.get('where')
if where is not None:
find_kwargs['where'] = where[0] # cast list to single val
return find_kwargs
def parse_section_entry_points(self, section_options):
"""Parses `entry_points` configuration file section.
:param dict section_options:
"""
parsed = self._parse_section_to_dict(section_options, self._parse_list)
self['entry_points'] = parsed
def _parse_package_data(self, section_options):
parsed = self._parse_section_to_dict(section_options, self._parse_list)
root = parsed.get('*')
if root:
parsed[''] = root
del parsed['*']
return parsed
def parse_section_package_data(self, section_options):
"""Parses `package_data` configuration file section.
:param dict section_options:
"""
self['package_data'] = self._parse_package_data(section_options)
def parse_section_exclude_package_data(self, section_options):
"""Parses `exclude_package_data` configuration file section.
:param dict section_options:
"""
self['exclude_package_data'] = self._parse_package_data(
section_options)
def parse_section_extras_require(self, section_options):
"""Parses `extras_require` configuration file section.
:param dict section_options:
"""
parse_list = partial(self._parse_list, separator=';')
self['extras_require'] = self._parse_section_to_dict(
section_options, parse_list)
def parse_section_data_files(self, section_options):
"""Parses `data_files` configuration file section.
:param dict section_options:
"""
parsed = self._parse_section_to_dict(section_options, self._parse_list)
self['data_files'] = [(k, v) for k, v in parsed.items()]

View file

@ -1,23 +1,16 @@
from distutils.dep_util import newer_group from ._distutils import _modified
from .warnings import SetuptoolsDeprecationWarning
# yes, this is was almost entirely copy-pasted from
# 'newer_pairwise()', this is just another convenience
# function.
def newer_pairwise_group(sources_groups, targets):
"""Walk both arguments in parallel, testing if each source group is newer
than its corresponding target. Returns a pair of lists (sources_groups,
targets) where sources is newer than target, according to the semantics
of 'newer_group()'.
"""
if len(sources_groups) != len(targets):
raise ValueError("'sources_group' and 'targets' must be the same length")
# build a pair of lists (sources_groups, targets) where source is newer def __getattr__(name):
n_sources = [] if name not in ['newer_group', 'newer_pairwise_group']:
n_targets = [] raise AttributeError(name)
for i in range(len(sources_groups)): SetuptoolsDeprecationWarning.emit(
if newer_group(sources_groups[i], targets[i]): "dep_util is Deprecated. Use functions from setuptools.modified instead.",
n_sources.append(sources_groups[i]) "Please use `setuptools.modified` instead of `setuptools.dep_util`.",
n_targets.append(targets[i]) see_url="https://github.com/pypa/setuptools/pull/4069",
due_date=(2024, 5, 21),
return n_sources, n_targets # Warning added in v69.0.0 on 2023/11/20,
# See https://github.com/pypa/setuptools/discussions/4128
)
return getattr(_modified, name)

View file

@ -1,28 +1,25 @@
import sys import sys
import marshal import marshal
import contextlib import contextlib
from distutils.version import StrictVersion import dis
from .py33compat import Bytecode
from .py27compat import find_module, PY_COMPILED, PY_FROZEN, PY_SOURCE
from . import py27compat
__all__ = [ from . import _imp
'Require', 'find_module', 'get_module_constant', 'extract_constant' from ._imp import find_module, PY_COMPILED, PY_FROZEN, PY_SOURCE
] from .extern.packaging.version import Version
__all__ = ['Require', 'find_module', 'get_module_constant', 'extract_constant']
class Require: class Require:
"""A prerequisite to building or installing a distribution""" """A prerequisite to building or installing a distribution"""
def __init__( def __init__(
self, name, requested_version, module, homepage='', self, name, requested_version, module, homepage='', attribute=None, format=None
attribute=None, format=None): ):
if format is None and requested_version is not None: if format is None and requested_version is not None:
format = StrictVersion format = Version
if format is not None: if format is not None:
requested_version = format(requested_version) requested_version = format(requested_version)
@ -40,8 +37,12 @@ class Require:
def version_ok(self, version): def version_ok(self, version):
"""Is 'version' sufficiently up-to-date?""" """Is 'version' sufficiently up-to-date?"""
return self.attribute is None or self.format is None or \ return (
str(version) != "unknown" and version >= self.requested_version self.attribute is None
or self.format is None
or str(version) != "unknown"
and self.format(version) >= self.requested_version
)
def get_version(self, paths=None, default="unknown"): def get_version(self, paths=None, default="unknown"):
"""Get version number of installed module, 'None', or 'default' """Get version number of installed module, 'None', or 'default'
@ -79,7 +80,7 @@ class Require:
version = self.get_version(paths) version = self.get_version(paths)
if version is None: if version is None:
return False return False
return self.version_ok(version) return self.version_ok(str(version))
def maybe_close(f): def maybe_close(f):
@ -87,6 +88,7 @@ def maybe_close(f):
def empty(): def empty():
yield yield
return return
if not f: if not f:
return empty() return empty()
@ -111,12 +113,12 @@ def get_module_constant(module, symbol, default=-1, paths=None):
f.read(8) # skip magic & date f.read(8) # skip magic & date
code = marshal.load(f) code = marshal.load(f)
elif kind == PY_FROZEN: elif kind == PY_FROZEN:
code = py27compat.get_frozen_object(module, paths) code = _imp.get_frozen_object(module, paths)
elif kind == PY_SOURCE: elif kind == PY_SOURCE:
code = compile(f.read(), path, 'exec') code = compile(f.read(), path, 'exec')
else: else:
# Not something we can parse; we'll have to import it. :( # Not something we can parse; we'll have to import it. :(
imported = py27compat.get_module(module, paths, info) imported = _imp.get_module(module, paths, info)
return getattr(imported, symbol, None) return getattr(imported, symbol, None)
return extract_constant(code, symbol, default) return extract_constant(code, symbol, default)
@ -140,13 +142,13 @@ def extract_constant(code, symbol, default=-1):
name_idx = list(code.co_names).index(symbol) name_idx = list(code.co_names).index(symbol)
STORE_NAME = 90 STORE_NAME = dis.opmap['STORE_NAME']
STORE_GLOBAL = 97 STORE_GLOBAL = dis.opmap['STORE_GLOBAL']
LOAD_CONST = 100 LOAD_CONST = dis.opmap['LOAD_CONST']
const = default const = default
for byte_code in Bytecode(code): for byte_code in dis.Bytecode(code):
op = byte_code.opcode op = byte_code.opcode
arg = byte_code.arg arg = byte_code.arg
@ -157,6 +159,8 @@ def extract_constant(code, symbol, default=-1):
else: else:
const = default const = default
return None
def _update_globals(): def _update_globals():
""" """

File diff suppressed because it is too large Load diff

View file

@ -3,10 +3,41 @@
Provides exceptions used by setuptools modules. Provides exceptions used by setuptools modules.
""" """
from distutils.errors import DistutilsError from distutils import errors as _distutils_errors
class RemovedCommandError(DistutilsError, RuntimeError): # Re-export errors from distutils to facilitate the migration to PEP632
ByteCompileError = _distutils_errors.DistutilsByteCompileError
CCompilerError = _distutils_errors.CCompilerError
ClassError = _distutils_errors.DistutilsClassError
CompileError = _distutils_errors.CompileError
ExecError = _distutils_errors.DistutilsExecError
FileError = _distutils_errors.DistutilsFileError
InternalError = _distutils_errors.DistutilsInternalError
LibError = _distutils_errors.LibError
LinkError = _distutils_errors.LinkError
ModuleError = _distutils_errors.DistutilsModuleError
OptionError = _distutils_errors.DistutilsOptionError
PlatformError = _distutils_errors.DistutilsPlatformError
PreprocessError = _distutils_errors.PreprocessError
SetupError = _distutils_errors.DistutilsSetupError
TemplateError = _distutils_errors.DistutilsTemplateError
UnknownFileError = _distutils_errors.UnknownFileError
# The root error class in the hierarchy
BaseError = _distutils_errors.DistutilsError
class InvalidConfigError(OptionError):
"""Error used for invalid configurations."""
class RemovedConfigError(OptionError):
"""Error used for configurations that were deprecated and removed."""
class RemovedCommandError(BaseError, RuntimeError):
"""Error used for commands that have been removed in setuptools. """Error used for commands that have been removed in setuptools.
Since ``setuptools`` is built on ``distutils``, simply removing a command Since ``setuptools`` is built on ``distutils``, simply removing a command
@ -14,3 +45,22 @@ class RemovedCommandError(DistutilsError, RuntimeError):
error is raised if a command exists in ``distutils`` but has been actively error is raised if a command exists in ``distutils`` but has been actively
removed in ``setuptools``. removed in ``setuptools``.
""" """
class PackageDiscoveryError(BaseError, RuntimeError):
"""Impossible to perform automatic discovery of packages and/or modules.
The current project layout or given discovery options can lead to problems when
scanning the project directory.
Setuptools might also refuse to complete auto-discovery if an error prone condition
is detected (e.g. when a project is organised as a flat-layout but contains
multiple directories that can be taken as top-level packages inside a single
distribution [*]_). In these situations the users are encouraged to be explicit
about which packages to include or to make the discovery parameters more specific.
.. [*] Since multi-package distributions are uncommon it is very likely that the
developers did not intend for all the directories to be packaged, and are just
leaving auxiliary code in the repository top-level, such as maintenance-related
scripts.
"""

View file

@ -4,8 +4,6 @@ import distutils.core
import distutils.errors import distutils.errors
import distutils.extension import distutils.extension
from setuptools.extern.six.moves import map
from .monkey import get_unpatched from .monkey import get_unpatched
@ -30,13 +28,106 @@ _Extension = get_unpatched(distutils.core.Extension)
class Extension(_Extension): class Extension(_Extension):
"""Extension that uses '.c' files in place of '.pyx' files""" """
Describes a single extension module.
This means that all source files will be compiled into a single binary file
``<module path>.<suffix>`` (with ``<module path>`` derived from ``name`` and
``<suffix>`` defined by one of the values in
``importlib.machinery.EXTENSION_SUFFIXES``).
In the case ``.pyx`` files are passed as ``sources and`` ``Cython`` is **not**
installed in the build environment, ``setuptools`` may also try to look for the
equivalent ``.cpp`` or ``.c`` files.
:arg str name:
the full name of the extension, including any packages -- ie.
*not* a filename or pathname, but Python dotted name
:arg list[str] sources:
list of source filenames, relative to the distribution root
(where the setup script lives), in Unix form (slash-separated)
for portability. Source files may be C, C++, SWIG (.i),
platform-specific resource files, or whatever else is recognized
by the "build_ext" command as source for a Python extension.
:keyword list[str] include_dirs:
list of directories to search for C/C++ header files (in Unix
form for portability)
:keyword list[tuple[str, str|None]] define_macros:
list of macros to define; each macro is defined using a 2-tuple:
the first item corresponding to the name of the macro and the second
item either a string with its value or None to
define it without a particular value (equivalent of "#define
FOO" in source or -DFOO on Unix C compiler command line)
:keyword list[str] undef_macros:
list of macros to undefine explicitly
:keyword list[str] library_dirs:
list of directories to search for C/C++ libraries at link time
:keyword list[str] libraries:
list of library names (not filenames or paths) to link against
:keyword list[str] runtime_library_dirs:
list of directories to search for C/C++ libraries at run time
(for shared extensions, this is when the extension is loaded).
Setting this will cause an exception during build on Windows
platforms.
:keyword list[str] extra_objects:
list of extra files to link with (eg. object files not implied
by 'sources', static library that must be explicitly specified,
binary resource files, etc.)
:keyword list[str] extra_compile_args:
any extra platform- and compiler-specific information to use
when compiling the source files in 'sources'. For platforms and
compilers where "command line" makes sense, this is typically a
list of command-line arguments, but for other platforms it could
be anything.
:keyword list[str] extra_link_args:
any extra platform- and compiler-specific information to use
when linking object files together to create the extension (or
to create a new static Python interpreter). Similar
interpretation as for 'extra_compile_args'.
:keyword list[str] export_symbols:
list of symbols to be exported from a shared extension. Not
used on all platforms, and not generally necessary for Python
extensions, which typically export exactly one symbol: "init" +
extension_name.
:keyword list[str] swig_opts:
any extra options to pass to SWIG if a source file has the .i
extension.
:keyword list[str] depends:
list of files that the extension depends on
:keyword str language:
extension language (i.e. "c", "c++", "objc"). Will be detected
from the source extensions if not provided.
:keyword bool optional:
specifies that a build failure in the extension should not abort the
build process, but simply not install the failing extension.
:keyword bool py_limited_api:
opt-in flag for the usage of :doc:`Python's limited API <python:c-api/stable>`.
:raises setuptools.errors.PlatformError: if 'runtime_library_dirs' is
specified on Windows. (since v63)
"""
def __init__(self, name, sources, *args, **kw): def __init__(self, name, sources, *args, **kw):
# The *args is needed for compatibility as calls may use positional # The *args is needed for compatibility as calls may use positional
# arguments. py_limited_api may be set only via keyword. # arguments. py_limited_api may be set only via keyword.
self.py_limited_api = kw.pop("py_limited_api", False) self.py_limited_api = kw.pop("py_limited_api", False)
_Extension.__init__(self, name, sources, *args, **kw) super().__init__(name, sources, *args, **kw)
def _convert_pyx_sources_to_lang(self): def _convert_pyx_sources_to_lang(self):
""" """

View file

@ -1,3 +1,4 @@
import importlib.util
import sys import sys
@ -20,17 +21,10 @@ class VendorImporter:
yield self.vendor_pkg + '.' yield self.vendor_pkg + '.'
yield '' yield ''
def find_module(self, fullname, path=None): def _module_matches_namespace(self, fullname):
""" """Figure out if the target module is vendored."""
Return self when fullname starts with root_name and the
target module is one vendored through this importer.
"""
root, base, target = fullname.partition(self.root_name + '.') root, base, target = fullname.partition(self.root_name + '.')
if root: return not root and any(map(target.startswith, self.vendored_names))
return
if not any(map(target.startswith, self.vendored_names)):
return
return self
def load_module(self, fullname): def load_module(self, fullname):
""" """
@ -43,13 +37,6 @@ class VendorImporter:
__import__(extant) __import__(extant)
mod = sys.modules[extant] mod = sys.modules[extant]
sys.modules[fullname] = mod sys.modules[fullname] = mod
# mysterious hack:
# Remove the reference to the extant package/module
# on later Python versions to cause relative imports
# in the vendor package to resolve the same modules
# as those going through this importer.
if sys.version_info >= (3, ):
del sys.modules[extant]
return mod return mod
except ImportError: except ImportError:
pass pass
@ -61,6 +48,20 @@ class VendorImporter:
"distribution.".format(**locals()) "distribution.".format(**locals())
) )
def create_module(self, spec):
return self.load_module(spec.name)
def exec_module(self, module):
pass
def find_spec(self, fullname, path=None, target=None):
"""Return a module spec for vendored names."""
return (
importlib.util.spec_from_loader(fullname, self)
if self._module_matches_namespace(fullname)
else None
)
def install(self): def install(self):
""" """
Install this importer into sys.meta_path if not already present. Install this importer into sys.meta_path if not already present.
@ -69,5 +70,15 @@ class VendorImporter:
sys.meta_path.append(self) sys.meta_path.append(self)
names = 'six', 'packaging', 'pyparsing', 'ordered_set', names = (
'packaging',
'ordered_set',
'more_itertools',
'importlib_metadata',
'zipp',
'importlib_resources',
'jaraco',
'typing_extensions',
'tomli',
)
VendorImporter(__name__, names, 'setuptools._vendor').install() VendorImporter(__name__, names, 'setuptools._vendor').install()

View file

@ -47,6 +47,8 @@ def iglob(pathname, recursive=False):
def _iglob(pathname, recursive): def _iglob(pathname, recursive):
dirname, basename = os.path.split(pathname) dirname, basename = os.path.split(pathname)
glob_in_dir = glob2 if recursive and _isrecursive(basename) else glob1
if not has_magic(pathname): if not has_magic(pathname):
if basename: if basename:
if os.path.lexists(pathname): if os.path.lexists(pathname):
@ -56,13 +58,9 @@ def _iglob(pathname, recursive):
if os.path.isdir(dirname): if os.path.isdir(dirname):
yield pathname yield pathname
return return
if not dirname: if not dirname:
if recursive and _isrecursive(basename): yield from glob_in_dir(dirname, basename)
for x in glob2(dirname, basename):
yield x
else:
for x in glob1(dirname, basename):
yield x
return return
# `os.path.split()` returns the argument itself as a dirname if it is a # `os.path.split()` returns the argument itself as a dirname if it is a
# drive or UNC path. Prevent an infinite recursion if a drive or UNC path # drive or UNC path. Prevent an infinite recursion if a drive or UNC path
@ -71,12 +69,7 @@ def _iglob(pathname, recursive):
dirs = _iglob(dirname, recursive) dirs = _iglob(dirname, recursive)
else: else:
dirs = [dirname] dirs = [dirname]
if has_magic(basename): if not has_magic(basename):
if recursive and _isrecursive(basename):
glob_in_dir = glob2
else:
glob_in_dir = glob1
else:
glob_in_dir = glob0 glob_in_dir = glob0
for dirname in dirs: for dirname in dirs:
for name in glob_in_dir(dirname, basename): for name in glob_in_dir(dirname, basename):
@ -120,8 +113,7 @@ def glob0(dirname, basename):
def glob2(dirname, pattern): def glob2(dirname, pattern):
assert _isrecursive(pattern) assert _isrecursive(pattern)
yield pattern[:0] yield pattern[:0]
for x in _rlistdir(dirname): yield from _rlistdir(dirname)
yield x
# Recursively yields relative pathnames inside a literal directory. # Recursively yields relative pathnames inside a literal directory.
@ -133,7 +125,7 @@ def _rlistdir(dirname):
dirname = os.curdir dirname = os.curdir
try: try:
names = os.listdir(dirname) names = os.listdir(dirname)
except os.error: except OSError:
return return
for x in names: for x in names:
yield x yield x
@ -162,13 +154,12 @@ def _isrecursive(pattern):
def escape(pathname): def escape(pathname):
"""Escape all special characters. """Escape all special characters."""
"""
# Escaping is done by wrapping any of "*?[" between square brackets. # Escaping is done by wrapping any of "*?[" between square brackets.
# Metacharacters do not work in the drive part and shouldn't be escaped. # Metacharacters do not work in the drive part and shouldn't be escaped.
drive, pathname = os.path.splitdrive(pathname) drive, pathname = os.path.splitdrive(pathname)
if isinstance(pathname, bytes): if isinstance(pathname, bytes):
pathname = magic_check_bytes.sub(br'[\1]', pathname) pathname = magic_check_bytes.sub(rb'[\1]', pathname)
else: else:
pathname = magic_check.sub(r'[\1]', pathname) pathname = magic_check.sub(r'[\1]', pathname)
return drive + pathname return drive + pathname

Binary file not shown.

Binary file not shown.

Binary file not shown.

View file

@ -2,77 +2,52 @@ import glob
import os import os
import subprocess import subprocess
import sys import sys
import tempfile
from distutils import log from distutils import log
from distutils.errors import DistutilsError from distutils.errors import DistutilsError
from functools import partial
import pkg_resources from . import _reqs
from setuptools.command.easy_install import easy_install from .wheel import Wheel
from setuptools.extern import six from .warnings import SetuptoolsDeprecationWarning
from setuptools.wheel import Wheel
from .py31compat import TemporaryDirectory
def _fixup_find_links(find_links): def _fixup_find_links(find_links):
"""Ensure find-links option end-up being a list of strings.""" """Ensure find-links option end-up being a list of strings."""
if isinstance(find_links, six.string_types): if isinstance(find_links, str):
return find_links.split() return find_links.split()
assert isinstance(find_links, (tuple, list)) assert isinstance(find_links, (tuple, list))
return find_links return find_links
def _legacy_fetch_build_egg(dist, req):
"""Fetch an egg needed for building.
Legacy path using EasyInstall.
"""
tmp_dist = dist.__class__({'script_args': ['easy_install']})
opts = tmp_dist.get_option_dict('easy_install')
opts.clear()
opts.update(
(k, v)
for k, v in dist.get_option_dict('easy_install').items()
if k in (
# don't use any other settings
'find_links', 'site_dirs', 'index_url',
'optimize', 'site_dirs', 'allow_hosts',
))
if dist.dependency_links:
links = dist.dependency_links[:]
if 'find_links' in opts:
links = _fixup_find_links(opts['find_links'][1]) + links
opts['find_links'] = ('setup', links)
install_dir = dist.get_egg_cache_dir()
cmd = easy_install(
tmp_dist, args=["x"], install_dir=install_dir,
exclude_scripts=True,
always_copy=False, build_directory=None, editable=False,
upgrade=False, multi_version=True, no_report=True, user=False
)
cmd.ensure_finalized()
return cmd.easy_install(req)
def fetch_build_egg(dist, req): def fetch_build_egg(dist, req):
"""Fetch an egg needed for building. """Fetch an egg needed for building.
Use pip/wheel to fetch/build a wheel.""" Use pip/wheel to fetch/build a wheel."""
# Check pip is available. _DeprecatedInstaller.emit()
try: _warn_wheel_not_available(dist)
pkg_resources.get_distribution('pip') return _fetch_build_egg_no_warn(dist, req)
except pkg_resources.DistributionNotFound:
dist.announce(
'WARNING: The pip package is not available, falling back ' def _fetch_build_eggs(dist, requires):
'to EasyInstall for handling setup_requires/test_requires; ' import pkg_resources # Delay import to avoid unnecessary side-effects
'this is deprecated and will be removed in a future version.'
, log.WARN _DeprecatedInstaller.emit(stacklevel=3)
) _warn_wheel_not_available(dist)
return _legacy_fetch_build_egg(dist, req)
# Warn if wheel is not. resolved_dists = pkg_resources.working_set.resolve(
try: _reqs.parse(requires, pkg_resources.Requirement), # required for compatibility
pkg_resources.get_distribution('wheel') installer=partial(_fetch_build_egg_no_warn, dist), # avoid warning twice
except pkg_resources.DistributionNotFound: replace_conflicting=True,
dist.announce('WARNING: The wheel package is not available.', log.WARN) )
for dist in resolved_dists:
pkg_resources.working_set.add(dist, replace=True)
return resolved_dists
def _fetch_build_egg_no_warn(dist, req): # noqa: C901 # is too complex (16) # FIXME
import pkg_resources # Delay import to avoid unnecessary side-effects
# Ignore environment markers; if supplied, it is required. # Ignore environment markers; if supplied, it is required.
req = strip_marker(req) req = strip_marker(req)
# Take easy_install options into account, but do not override relevant # Take easy_install options into account, but do not override relevant
@ -80,22 +55,20 @@ def fetch_build_egg(dist, req):
# take precedence. # take precedence.
opts = dist.get_option_dict('easy_install') opts = dist.get_option_dict('easy_install')
if 'allow_hosts' in opts: if 'allow_hosts' in opts:
raise DistutilsError('the `allow-hosts` option is not supported ' raise DistutilsError(
'when using pip to install requirements.') 'the `allow-hosts` option is not supported '
if 'PIP_QUIET' in os.environ or 'PIP_VERBOSE' in os.environ: 'when using pip to install requirements.'
quiet = False )
else: quiet = 'PIP_QUIET' not in os.environ and 'PIP_VERBOSE' not in os.environ
quiet = True
if 'PIP_INDEX_URL' in os.environ: if 'PIP_INDEX_URL' in os.environ:
index_url = None index_url = None
elif 'index_url' in opts: elif 'index_url' in opts:
index_url = opts['index_url'][1] index_url = opts['index_url'][1]
else: else:
index_url = None index_url = None
if 'find_links' in opts: find_links = (
find_links = _fixup_find_links(opts['find_links'][1])[:] _fixup_find_links(opts['find_links'][1])[:] if 'find_links' in opts else []
else: )
find_links = []
if dist.dependency_links: if dist.dependency_links:
find_links.extend(dist.dependency_links) find_links.extend(dist.dependency_links)
eggs_dir = os.path.realpath(dist.get_egg_cache_dir()) eggs_dir = os.path.realpath(dist.get_egg_cache_dir())
@ -103,39 +76,40 @@ def fetch_build_egg(dist, req):
for egg_dist in pkg_resources.find_distributions(eggs_dir): for egg_dist in pkg_resources.find_distributions(eggs_dir):
if egg_dist in req and environment.can_add(egg_dist): if egg_dist in req and environment.can_add(egg_dist):
return egg_dist return egg_dist
with TemporaryDirectory() as tmpdir: with tempfile.TemporaryDirectory() as tmpdir:
cmd = [ cmd = [
sys.executable, '-m', 'pip', sys.executable,
'-m',
'pip',
'--disable-pip-version-check', '--disable-pip-version-check',
'wheel', '--no-deps', 'wheel',
'-w', tmpdir, '--no-deps',
'-w',
tmpdir,
] ]
if quiet: if quiet:
cmd.append('--quiet') cmd.append('--quiet')
if index_url is not None: if index_url is not None:
cmd.extend(('--index-url', index_url)) cmd.extend(('--index-url', index_url))
if find_links is not None: for link in find_links or []:
for link in find_links: cmd.extend(('--find-links', link))
cmd.extend(('--find-links', link))
# If requirement is a PEP 508 direct URL, directly pass # If requirement is a PEP 508 direct URL, directly pass
# the URL to pip, as `req @ url` does not work on the # the URL to pip, as `req @ url` does not work on the
# command line. # command line.
if req.url: cmd.append(req.url or str(req))
cmd.append(req.url)
else:
cmd.append(str(req))
try: try:
subprocess.check_call(cmd) subprocess.check_call(cmd)
except subprocess.CalledProcessError as e: except subprocess.CalledProcessError as e:
raise DistutilsError(str(e)) raise DistutilsError(str(e)) from e
wheel = Wheel(glob.glob(os.path.join(tmpdir, '*.whl'))[0]) wheel = Wheel(glob.glob(os.path.join(tmpdir, '*.whl'))[0])
dist_location = os.path.join(eggs_dir, wheel.egg_name()) dist_location = os.path.join(eggs_dir, wheel.egg_name())
wheel.install_as_egg(dist_location) wheel.install_as_egg(dist_location)
dist_metadata = pkg_resources.PathMetadata( dist_metadata = pkg_resources.PathMetadata(
dist_location, os.path.join(dist_location, 'EGG-INFO')) dist_location, os.path.join(dist_location, 'EGG-INFO')
dist = pkg_resources.Distribution.from_filename( )
dist_location, metadata=dist_metadata) return pkg_resources.Distribution.from_filename(
return dist dist_location, metadata=dist_metadata
)
def strip_marker(req): def strip_marker(req):
@ -144,7 +118,27 @@ def strip_marker(req):
calling pip with something like `babel; extra == "i18n"`, which calling pip with something like `babel; extra == "i18n"`, which
would always be ignored. would always be ignored.
""" """
import pkg_resources # Delay import to avoid unnecessary side-effects
# create a copy to avoid mutating the input # create a copy to avoid mutating the input
req = pkg_resources.Requirement.parse(str(req)) req = pkg_resources.Requirement.parse(str(req))
req.marker = None req.marker = None
return req return req
def _warn_wheel_not_available(dist):
import pkg_resources # Delay import to avoid unnecessary side-effects
try:
pkg_resources.get_distribution('wheel')
except pkg_resources.DistributionNotFound:
dist.announce('WARNING: The wheel package is not available.', log.WARN)
class _DeprecatedInstaller(SetuptoolsDeprecationWarning):
_SUMMARY = "setuptools.installer and fetch_build_eggs are deprecated."
_DETAILS = """
Requirements should be satisfied by a PEP 517 installer.
If you are using pip, you can try `pip install --use-pep517`.
"""
# _DUE_DATE not decided yet

View file

@ -25,7 +25,8 @@ def run():
sys.argv[:] = sys.argv[1:] sys.argv[:] = sys.argv[1:]
open_ = getattr(tokenize, 'open', open) open_ = getattr(tokenize, 'open', open)
script = open_(script_name).read() with open_(script_name) as fid:
script = fid.read()
norm_script = script.replace('\\r\\n', '\\n') norm_script = script.replace('\\r\\n', '\\n')
code = compile(norm_script, script_name, 'exec') code = compile(norm_script, script_name, 'exec')
exec(code, namespace) exec(code, namespace)

View file

@ -1,62 +0,0 @@
"""
Customized Mixin2to3 support:
- adds support for converting doctests
This module raises an ImportError on Python 2.
"""
from distutils.util import Mixin2to3 as _Mixin2to3
from distutils import log
from lib2to3.refactor import RefactoringTool, get_fixers_from_package
import setuptools
class DistutilsRefactoringTool(RefactoringTool):
def log_error(self, msg, *args, **kw):
log.error(msg, *args)
def log_message(self, msg, *args):
log.info(msg, *args)
def log_debug(self, msg, *args):
log.debug(msg, *args)
class Mixin2to3(_Mixin2to3):
def run_2to3(self, files, doctests=False):
# See of the distribution option has been set, otherwise check the
# setuptools default.
if self.distribution.use_2to3 is not True:
return
if not files:
return
log.info("Fixing " + " ".join(files))
self.__build_fixer_names()
self.__exclude_fixers()
if doctests:
if setuptools.run_2to3_on_doctests:
r = DistutilsRefactoringTool(self.fixer_names)
r.refactor(files, write=True, doctests_only=True)
else:
_Mixin2to3.run_2to3(self, files)
def __build_fixer_names(self):
if self.fixer_names:
return
self.fixer_names = []
for p in setuptools.lib2to3_fixer_packages:
self.fixer_names.extend(get_fixers_from_package(p))
if self.distribution.use_2to3_fixers is not None:
for p in self.distribution.use_2to3_fixers:
self.fixer_names.extend(get_fixers_from_package(p))
def __exclude_fixers(self):
excluded_fixers = getattr(self, 'exclude_fixers', [])
if self.distribution.use_2to3_exclude_fixers is not None:
excluded_fixers.extend(self.distribution.use_2to3_exclude_fixers)
for fixer_name in excluded_fixers:
if fixer_name in self.fixer_names:
self.fixer_names.remove(fixer_name)

View file

@ -2,17 +2,15 @@
Monkey patching of distutils. Monkey patching of distutils.
""" """
import sys
import distutils.filelist
import platform
import types
import functools import functools
from importlib import import_module
import inspect import inspect
import platform
import sys
import types
from importlib import import_module
from setuptools.extern import six import distutils.filelist
import setuptools
__all__ = [] __all__ = []
""" """
@ -37,9 +35,11 @@ def _get_mro(cls):
def get_unpatched(item): def get_unpatched(item):
lookup = ( lookup = (
get_unpatched_class if isinstance(item, six.class_types) else get_unpatched_class
get_unpatched_function if isinstance(item, types.FunctionType) else if isinstance(item, type)
lambda item: None else get_unpatched_function
if isinstance(item, types.FunctionType)
else lambda item: None
) )
return lookup(item) return lookup(item)
@ -51,9 +51,7 @@ def get_unpatched_class(cls):
first. first.
""" """
external_bases = ( external_bases = (
cls cls for cls in _get_mro(cls) if not cls.__module__.startswith('setuptools')
for cls in _get_mro(cls)
if not cls.__module__.startswith('setuptools')
) )
base = next(external_bases) base = next(external_bases)
if not base.__module__.startswith('distutils'): if not base.__module__.startswith('distutils'):
@ -63,27 +61,11 @@ def get_unpatched_class(cls):
def patch_all(): def patch_all():
import setuptools
# we can't patch distutils.cmd, alas # we can't patch distutils.cmd, alas
distutils.core.Command = setuptools.Command distutils.core.Command = setuptools.Command
has_issue_12885 = sys.version_info <= (3, 5, 3)
if has_issue_12885:
# fix findall bug in distutils (http://bugs.python.org/issue12885)
distutils.filelist.findall = setuptools.findall
needs_warehouse = (
sys.version_info < (2, 7, 13)
or
(3, 4) < sys.version_info < (3, 4, 6)
or
(3, 5) < sys.version_info <= (3, 5, 3)
)
if needs_warehouse:
warehouse = 'https://upload.pypi.org/legacy/'
distutils.config.PyPIRCCommand.DEFAULT_REPOSITORY = warehouse
_patch_distribution_metadata() _patch_distribution_metadata()
# Install Distribution throughout the distutils # Install Distribution throughout the distutils
@ -94,17 +76,24 @@ def patch_all():
distutils.core.Extension = setuptools.extension.Extension distutils.core.Extension = setuptools.extension.Extension
distutils.extension.Extension = setuptools.extension.Extension distutils.extension.Extension = setuptools.extension.Extension
if 'distutils.command.build_ext' in sys.modules: if 'distutils.command.build_ext' in sys.modules:
sys.modules['distutils.command.build_ext'].Extension = ( sys.modules[
setuptools.extension.Extension 'distutils.command.build_ext'
) ].Extension = setuptools.extension.Extension
patch_for_msvc_specialized_compiler() patch_for_msvc_specialized_compiler()
def _patch_distribution_metadata(): def _patch_distribution_metadata():
from . import _core_metadata
"""Patch write_pkg_file and read_pkg_file for higher metadata standards""" """Patch write_pkg_file and read_pkg_file for higher metadata standards"""
for attr in ('write_pkg_file', 'read_pkg_file', 'get_metadata_version'): for attr in (
new_val = getattr(setuptools.dist, attr) 'write_pkg_info',
'write_pkg_file',
'read_pkg_file',
'get_metadata_version',
):
new_val = getattr(_core_metadata, attr)
setattr(distutils.dist.DistributionMetadata, attr, new_val) setattr(distutils.dist.DistributionMetadata, attr, new_val)
@ -126,7 +115,7 @@ def patch_func(replacement, target_mod, func_name):
def get_unpatched_function(candidate): def get_unpatched_function(candidate):
return getattr(candidate, 'unpatched') return candidate.unpatched
def patch_for_msvc_specialized_compiler(): def patch_for_msvc_specialized_compiler():
@ -134,18 +123,17 @@ def patch_for_msvc_specialized_compiler():
Patch functions in distutils to use standalone Microsoft Visual C++ Patch functions in distutils to use standalone Microsoft Visual C++
compilers. compilers.
""" """
# import late to avoid circular imports on Python < 3.5 from . import msvc
msvc = import_module('setuptools.msvc')
if platform.system() != 'Windows': if platform.system() != 'Windows':
# Compilers only availables on Microsoft Windows # Compilers only available on Microsoft Windows
return return
def patch_params(mod_name, func_name): def patch_params(mod_name, func_name):
""" """
Prepare the parameters for patch_func to patch indicated function. Prepare the parameters for patch_func to patch indicated function.
""" """
repl_prefix = 'msvc9_' if 'msvc9' in mod_name else 'msvc14_' repl_prefix = 'msvc14_'
repl_name = repl_prefix + func_name.lstrip('_') repl_name = repl_prefix + func_name.lstrip('_')
repl = getattr(msvc, repl_name) repl = getattr(msvc, repl_name)
mod = import_module(mod_name) mod = import_module(mod_name)
@ -153,27 +141,11 @@ def patch_for_msvc_specialized_compiler():
raise ImportError(func_name) raise ImportError(func_name)
return repl, mod, func_name return repl, mod, func_name
# Python 2.7 to 3.4
msvc9 = functools.partial(patch_params, 'distutils.msvc9compiler')
# Python 3.5+ # Python 3.5+
msvc14 = functools.partial(patch_params, 'distutils._msvccompiler') msvc14 = functools.partial(patch_params, 'distutils._msvccompiler')
try:
# Patch distutils.msvc9compiler
patch_func(*msvc9('find_vcvarsall'))
patch_func(*msvc9('query_vcvarsall'))
except ImportError:
pass
try: try:
# Patch distutils._msvccompiler._get_vc_env # Patch distutils._msvccompiler._get_vc_env
patch_func(*msvc14('_get_vc_env')) patch_func(*msvc14('_get_vc_env'))
except ImportError: except ImportError:
pass pass
try:
# Patch distutils._msvccompiler.gen_lib_options for Numpy
patch_func(*msvc14('gen_lib_options'))
except ImportError:
pass

View file

@ -3,14 +3,6 @@ Improved support for Microsoft Visual C++ compilers.
Known supported compilers: Known supported compilers:
-------------------------- --------------------------
Microsoft Visual C++ 9.0:
Microsoft Visual C++ Compiler for Python 2.7 (x86, amd64)
Microsoft Windows SDK 6.1 (x86, x64, ia64)
Microsoft Windows SDK 7.0 (x86, x64, ia64)
Microsoft Visual C++ 10.0:
Microsoft Windows SDK 7.1 (x86, x64, ia64)
Microsoft Visual C++ 14.X: Microsoft Visual C++ 14.X:
Microsoft Visual C++ Build Tools 2015 (x86, x64, arm) Microsoft Visual C++ Build Tools 2015 (x86, x64, arm)
Microsoft Visual Studio Build Tools 2017 (x86, x64, arm, arm64) Microsoft Visual Studio Build Tools 2017 (x86, x64, arm, arm64)
@ -20,21 +12,18 @@ This may also support compilers shipped with compatible Visual Studio versions.
""" """
import json import json
from io import open
from os import listdir, pathsep from os import listdir, pathsep
from os.path import join, isfile, isdir, dirname from os.path import join, isfile, isdir, dirname
import sys from subprocess import CalledProcessError
import contextlib
import platform import platform
import itertools import itertools
import subprocess
import distutils.errors import distutils.errors
from setuptools.extern.packaging.version import LegacyVersion from setuptools.extern.more_itertools import unique_everseen
from setuptools.extern.six.moves import filterfalse
from .monkey import get_unpatched
if platform.system() == 'Windows': if platform.system() == 'Windows':
from setuptools.extern.six.moves import winreg import winreg
from os import environ from os import environ
else: else:
# Mock winreg and environ so the module can be imported on this platform. # Mock winreg and environ so the module can be imported on this platform.
@ -47,99 +36,173 @@ else:
environ = dict() environ = dict()
_msvc9_suppress_errors = (
# msvc9compiler isn't available on some platforms
ImportError,
# msvc9compiler raises DistutilsPlatformError in some def _msvc14_find_vc2015():
# environments. See #1118. """Python 3.8 "distutils/_msvccompiler.py" backport"""
distutils.errors.DistutilsPlatformError,
)
try:
from distutils.msvc9compiler import Reg
except _msvc9_suppress_errors:
pass
def msvc9_find_vcvarsall(version):
"""
Patched "distutils.msvc9compiler.find_vcvarsall" to use the standalone
compiler build for Python
(VCForPython / Microsoft Visual C++ Compiler for Python 2.7).
Fall back to original behavior when the standalone compiler is not
available.
Redirect the path of "vcvarsall.bat".
Parameters
----------
version: float
Required Microsoft Visual C++ version.
Return
------
str
vcvarsall.bat path
"""
vc_base = r'Software\%sMicrosoft\DevDiv\VCForPython\%0.1f'
key = vc_base % ('', version)
try: try:
# Per-user installs register the compiler path here key = winreg.OpenKey(
productdir = Reg.get_value(key, "installdir") winreg.HKEY_LOCAL_MACHINE,
except KeyError: r"Software\Microsoft\VisualStudio\SxS\VC7",
0,
winreg.KEY_READ | winreg.KEY_WOW64_32KEY,
)
except OSError:
return None, None
best_version = 0
best_dir = None
with key:
for i in itertools.count():
try:
v, vc_dir, vt = winreg.EnumValue(key, i)
except OSError:
break
if v and vt == winreg.REG_SZ and isdir(vc_dir):
try:
version = int(float(v))
except (ValueError, TypeError):
continue
if version >= 14 and version > best_version:
best_version, best_dir = version, vc_dir
return best_version, best_dir
def _msvc14_find_vc2017():
"""Python 3.8 "distutils/_msvccompiler.py" backport
Returns "15, path" based on the result of invoking vswhere.exe
If no install is found, returns "None, None"
The version is returned to avoid unnecessarily changing the function
result. It may be ignored when the path is not None.
If vswhere.exe is not available, by definition, VS 2017 is not
installed.
"""
root = environ.get("ProgramFiles(x86)") or environ.get("ProgramFiles")
if not root:
return None, None
suitable_components = (
"Microsoft.VisualStudio.Component.VC.Tools.x86.x64",
"Microsoft.VisualStudio.Workload.WDExpress",
)
for component in suitable_components:
# Workaround for `-requiresAny` (only available on VS 2017 > 15.6)
with contextlib.suppress(CalledProcessError, OSError, UnicodeDecodeError):
path = (
subprocess.check_output([
join(root, "Microsoft Visual Studio", "Installer", "vswhere.exe"),
"-latest",
"-prerelease",
"-requires",
component,
"-property",
"installationPath",
"-products",
"*",
])
.decode(encoding="mbcs", errors="strict")
.strip()
)
path = join(path, "VC", "Auxiliary", "Build")
if isdir(path):
return 15, path
return None, None # no suitable component found
PLAT_SPEC_TO_RUNTIME = {
'x86': 'x86',
'x86_amd64': 'x64',
'x86_arm': 'arm',
'x86_arm64': 'arm64',
}
def _msvc14_find_vcvarsall(plat_spec):
"""Python 3.8 "distutils/_msvccompiler.py" backport"""
_, best_dir = _msvc14_find_vc2017()
vcruntime = None
if plat_spec in PLAT_SPEC_TO_RUNTIME:
vcruntime_plat = PLAT_SPEC_TO_RUNTIME[plat_spec]
else:
vcruntime_plat = 'x64' if 'amd64' in plat_spec else 'x86'
if best_dir:
vcredist = join(
best_dir,
"..",
"..",
"redist",
"MSVC",
"**",
vcruntime_plat,
"Microsoft.VC14*.CRT",
"vcruntime140.dll",
)
try: try:
# All-user installs on a 64-bit system register here import glob
key = vc_base % ('Wow6432Node\\', version)
productdir = Reg.get_value(key, "installdir")
except KeyError:
productdir = None
if productdir: vcruntime = glob.glob(vcredist, recursive=True)[-1]
vcvarsall = join(productdir, "vcvarsall.bat") except (ImportError, OSError, LookupError):
if isfile(vcvarsall): vcruntime = None
return vcvarsall
return get_unpatched(msvc9_find_vcvarsall)(version) if not best_dir:
best_version, best_dir = _msvc14_find_vc2015()
if best_version:
vcruntime = join(
best_dir,
'redist',
vcruntime_plat,
"Microsoft.VC140.CRT",
"vcruntime140.dll",
)
if not best_dir:
return None, None
vcvarsall = join(best_dir, "vcvarsall.bat")
if not isfile(vcvarsall):
return None, None
if not vcruntime or not isfile(vcruntime):
vcruntime = None
return vcvarsall, vcruntime
def msvc9_query_vcvarsall(ver, arch='x86', *args, **kwargs): def _msvc14_get_vc_env(plat_spec):
""" """Python 3.8 "distutils/_msvccompiler.py" backport"""
Patched "distutils.msvc9compiler.query_vcvarsall" for support extra if "DISTUTILS_USE_SDK" in environ:
Microsoft Visual C++ 9.0 and 10.0 compilers. return {key.lower(): value for key, value in environ.items()}
Set environment without use of "vcvarsall.bat". vcvarsall, vcruntime = _msvc14_find_vcvarsall(plat_spec)
if not vcvarsall:
raise distutils.errors.DistutilsPlatformError("Unable to find vcvarsall.bat")
Parameters
----------
ver: float
Required Microsoft Visual C++ version.
arch: str
Target architecture.
Return
------
dict
environment
"""
# Try to get environment from vcvarsall.bat (Classical way)
try: try:
orig = get_unpatched(msvc9_query_vcvarsall) out = subprocess.check_output(
return orig(ver, arch, *args, **kwargs) 'cmd /u /c "{}" {} && set'.format(vcvarsall, plat_spec),
except distutils.errors.DistutilsPlatformError: stderr=subprocess.STDOUT,
# Pass error if Vcvarsall.bat is missing ).decode('utf-16le', errors='replace')
pass except subprocess.CalledProcessError as exc:
except ValueError: raise distutils.errors.DistutilsPlatformError(
# Pass error if environment not set after executing vcvarsall.bat "Error executing {}".format(exc.cmd)
pass ) from exc
# If error, try to set environment directly env = {
try: key.lower(): value
return EnvironmentInfo(arch, ver).return_env() for key, _, value in (line.partition('=') for line in out.splitlines())
except distutils.errors.DistutilsPlatformError as exc: if key and value
_augment_exception(exc, ver, arch) }
raise
if vcruntime:
env['py_vcruntime_redist'] = vcruntime
return env
def msvc14_get_vc_env(plat_spec): def msvc14_get_vc_env(plat_spec):
@ -159,34 +222,15 @@ def msvc14_get_vc_env(plat_spec):
dict dict
environment environment
""" """
# Try to get environment from vcvarsall.bat (Classical way)
try:
return get_unpatched(msvc14_get_vc_env)(plat_spec)
except distutils.errors.DistutilsPlatformError:
# Pass error Vcvarsall.bat is missing
pass
# If error, try to set environment directly # Always use backport from CPython 3.8
try: try:
return EnvironmentInfo(plat_spec, vc_min_ver=14.0).return_env() return _msvc14_get_vc_env(plat_spec)
except distutils.errors.DistutilsPlatformError as exc: except distutils.errors.DistutilsPlatformError as exc:
_augment_exception(exc, 14.0) _augment_exception(exc, 14.0)
raise raise
def msvc14_gen_lib_options(*args, **kwargs):
"""
Patched "distutils._msvccompiler.gen_lib_options" for fix
compatibility between "numpy.distutils" and "distutils._msvccompiler"
(for Numpy < 1.11.2)
"""
if "numpy.distutils" in sys.modules:
import numpy as np
if LegacyVersion(np.__version__) < LegacyVersion('1.11.2'):
return np.distutils.ccompiler.gen_lib_options(*args, **kwargs)
return get_unpatched(msvc14_gen_lib_options)(*args, **kwargs)
def _augment_exception(exc, version, arch=''): def _augment_exception(exc, version, arch=''):
""" """
Add details to the exception message to help guide the user Add details to the exception message to help guide the user
@ -197,7 +241,7 @@ def _augment_exception(exc, version, arch=''):
if "vcvarsall" in message.lower() or "visual c" in message.lower(): if "vcvarsall" in message.lower() or "visual c" in message.lower():
# Special error message if MSVC++ not installed # Special error message if MSVC++ not installed
tmpl = 'Microsoft Visual C++ {version:0.1f} is required.' tmpl = 'Microsoft Visual C++ {version:0.1f} or greater is required.'
message = tmpl.format(**locals()) message = tmpl.format(**locals())
msdownload = 'www.microsoft.com/download/details.aspx?id=%d' msdownload = 'www.microsoft.com/download/details.aspx?id=%d'
if version == 9.0: if version == 9.0:
@ -217,10 +261,13 @@ def _augment_exception(exc, version, arch=''):
message += msdownload % 8279 message += msdownload % 8279
elif version >= 14.0: elif version >= 14.0:
# For VC++ 14.X Redirect user to latest Visual C++ Build Tools # For VC++ 14.X Redirect user to latest Visual C++ Build Tools
message += (' Get it with "Build Tools for Visual Studio": ' message += (
r'https://visualstudio.microsoft.com/downloads/') ' Get it with "Microsoft C++ Build Tools": '
r'https://visualstudio.microsoft.com'
r'/visual-cpp-build-tools/'
)
exc.args = (message, ) exc.args = (message,)
class PlatformInfo: class PlatformInfo:
@ -232,6 +279,7 @@ class PlatformInfo:
arch: str arch: str
Target architecture. Target architecture.
""" """
current_cpu = environ.get('processor_architecture', '').lower() current_cpu = environ.get('processor_architecture', '').lower()
def __init__(self, arch): def __init__(self, arch):
@ -247,7 +295,7 @@ class PlatformInfo:
str str
Target CPU Target CPU
""" """
return self.arch[self.arch.find('_') + 1:] return self.arch[self.arch.find('_') + 1 :]
def target_is_x86(self): def target_is_x86(self):
""" """
@ -288,9 +336,11 @@ class PlatformInfo:
subfolder: '\target', or '' (see hidex86 parameter) subfolder: '\target', or '' (see hidex86 parameter)
""" """
return ( return (
'' if (self.current_cpu == 'x86' and hidex86) else ''
r'\x64' if (self.current_cpu == 'amd64' and x64) else if (self.current_cpu == 'x86' and hidex86)
r'\%s' % self.current_cpu else r'\x64'
if (self.current_cpu == 'amd64' and x64)
else r'\%s' % self.current_cpu
) )
def target_dir(self, hidex86=False, x64=False): def target_dir(self, hidex86=False, x64=False):
@ -310,9 +360,11 @@ class PlatformInfo:
subfolder: '\current', or '' (see hidex86 parameter) subfolder: '\current', or '' (see hidex86 parameter)
""" """
return ( return (
'' if (self.target_cpu == 'x86' and hidex86) else ''
r'\x64' if (self.target_cpu == 'amd64' and x64) else if (self.target_cpu == 'x86' and hidex86)
r'\%s' % self.target_cpu else r'\x64'
if (self.target_cpu == 'amd64' and x64)
else r'\%s' % self.target_cpu
) )
def cross_dir(self, forcex86=False): def cross_dir(self, forcex86=False):
@ -333,8 +385,9 @@ class PlatformInfo:
""" """
current = 'x86' if forcex86 else self.current_cpu current = 'x86' if forcex86 else self.current_cpu
return ( return (
'' if self.target_cpu == current else ''
self.target_dir().replace('\\', '\\%s_' % current) if self.target_cpu == current
else self.target_dir().replace('\\', '\\%s_' % current)
) )
@ -347,10 +400,13 @@ class RegistryInfo:
platform_info: PlatformInfo platform_info: PlatformInfo
"PlatformInfo" instance. "PlatformInfo" instance.
""" """
HKEYS = (winreg.HKEY_USERS,
winreg.HKEY_CURRENT_USER, HKEYS = (
winreg.HKEY_LOCAL_MACHINE, winreg.HKEY_USERS,
winreg.HKEY_CLASSES_ROOT) winreg.HKEY_CURRENT_USER,
winreg.HKEY_LOCAL_MACHINE,
winreg.HKEY_CLASSES_ROOT,
)
def __init__(self, platform_info): def __init__(self, platform_info):
self.pi = platform_info self.pi = platform_info
@ -500,22 +556,28 @@ class RegistryInfo:
""" """
key_read = winreg.KEY_READ key_read = winreg.KEY_READ
openkey = winreg.OpenKey openkey = winreg.OpenKey
closekey = winreg.CloseKey
ms = self.microsoft ms = self.microsoft
for hkey in self.HKEYS: for hkey in self.HKEYS:
bkey = None
try: try:
bkey = openkey(hkey, ms(key), 0, key_read) bkey = openkey(hkey, ms(key), 0, key_read)
except (OSError, IOError): except OSError:
if not self.pi.current_is_x86(): if not self.pi.current_is_x86():
try: try:
bkey = openkey(hkey, ms(key, True), 0, key_read) bkey = openkey(hkey, ms(key, True), 0, key_read)
except (OSError, IOError): except OSError:
continue continue
else: else:
continue continue
try: try:
return winreg.QueryValueEx(bkey, name)[0] return winreg.QueryValueEx(bkey, name)[0]
except (OSError, IOError): except OSError:
pass pass
finally:
if bkey:
closekey(bkey)
return None
class SystemInfo: class SystemInfo:
@ -543,8 +605,7 @@ class SystemInfo:
self.known_vs_paths = self.find_programdata_vs_vers() self.known_vs_paths = self.find_programdata_vs_vers()
# Except for VS15+, VC version is aligned with VS version # Except for VS15+, VC version is aligned with VS version
self.vs_ver = self.vc_ver = ( self.vs_ver = self.vc_ver = vc_ver or self._find_latest_available_vs_ver()
vc_ver or self._find_latest_available_vs_ver())
def _find_latest_available_vs_ver(self): def _find_latest_available_vs_ver(self):
""" """
@ -559,7 +620,8 @@ class SystemInfo:
if not (reg_vc_vers or self.known_vs_paths): if not (reg_vc_vers or self.known_vs_paths):
raise distutils.errors.DistutilsPlatformError( raise distutils.errors.DistutilsPlatformError(
'No Microsoft Visual C++ version found') 'No Microsoft Visual C++ version found'
)
vc_vers = set(reg_vc_vers) vc_vers = set(reg_vc_vers)
vc_vers.update(self.known_vs_paths) vc_vers.update(self.known_vs_paths)
@ -577,27 +639,23 @@ class SystemInfo:
ms = self.ri.microsoft ms = self.ri.microsoft
vckeys = (self.ri.vc, self.ri.vc_for_python, self.ri.vs) vckeys = (self.ri.vc, self.ri.vc_for_python, self.ri.vs)
vs_vers = [] vs_vers = []
for hkey in self.ri.HKEYS: for hkey, key in itertools.product(self.ri.HKEYS, vckeys):
for key in vckeys: try:
try: bkey = winreg.OpenKey(hkey, ms(key), 0, winreg.KEY_READ)
bkey = winreg.OpenKey(hkey, ms(key), 0, winreg.KEY_READ) except OSError:
except (OSError, IOError): continue
continue with bkey:
subkeys, values, _ = winreg.QueryInfoKey(bkey) subkeys, values, _ = winreg.QueryInfoKey(bkey)
for i in range(values): for i in range(values):
try: with contextlib.suppress(ValueError):
ver = float(winreg.EnumValue(bkey, i)[0]) ver = float(winreg.EnumValue(bkey, i)[0])
if ver not in vs_vers: if ver not in vs_vers:
vs_vers.append(ver) vs_vers.append(ver)
except ValueError:
pass
for i in range(subkeys): for i in range(subkeys):
try: with contextlib.suppress(ValueError):
ver = float(winreg.EnumKey(bkey, i)) ver = float(winreg.EnumKey(bkey, i))
if ver not in vs_vers: if ver not in vs_vers:
vs_vers.append(ver) vs_vers.append(ver)
except ValueError:
pass
return sorted(vs_vers) return sorted(vs_vers)
def find_programdata_vs_vers(self): def find_programdata_vs_vers(self):
@ -611,13 +669,12 @@ class SystemInfo:
float version as key, path as value. float version as key, path as value.
""" """
vs_versions = {} vs_versions = {}
instances_dir = \ instances_dir = r'C:\ProgramData\Microsoft\VisualStudio\Packages\_Instances'
r'C:\ProgramData\Microsoft\VisualStudio\Packages\_Instances'
try: try:
hashed_names = listdir(instances_dir) hashed_names = listdir(instances_dir)
except (OSError, IOError): except OSError:
# Directory not exists with all Visual Studio versions # Directory not exists with all Visual Studio versions
return vs_versions return vs_versions
@ -633,10 +690,11 @@ class SystemInfo:
listdir(join(vs_path, r'VC\Tools\MSVC')) listdir(join(vs_path, r'VC\Tools\MSVC'))
# Store version and path # Store version and path
vs_versions[self._as_float_version( vs_versions[self._as_float_version(state['installationVersion'])] = (
state['installationVersion'])] = vs_path vs_path
)
except (OSError, IOError, KeyError): except (OSError, KeyError):
# Skip if "state.json" file is missing or bad format # Skip if "state.json" file is missing or bad format
continue continue
@ -670,8 +728,9 @@ class SystemInfo:
path path
""" """
# Default path # Default path
default = join(self.ProgramFilesx86, default = join(
'Microsoft Visual Studio %0.1f' % self.vs_ver) self.ProgramFilesx86, 'Microsoft Visual Studio %0.1f' % self.vs_ver
)
# Try to get path from registry, if fail use default path # Try to get path from registry, if fail use default path
return self.ri.lookup(self.ri.vs, '%0.1f' % self.vs_ver) or default return self.ri.lookup(self.ri.vs, '%0.1f' % self.vs_ver) or default
@ -721,7 +780,7 @@ class SystemInfo:
vc_ver = listdir(guess_vc)[-1] vc_ver = listdir(guess_vc)[-1]
self.vc_ver = self._as_float_version(vc_ver) self.vc_ver = self._as_float_version(vc_ver)
return join(guess_vc, vc_ver) return join(guess_vc, vc_ver)
except (OSError, IOError, IndexError): except (OSError, IndexError):
return '' return ''
def _guess_vc_legacy(self): def _guess_vc_legacy(self):
@ -733,8 +792,9 @@ class SystemInfo:
str str
path path
""" """
default = join(self.ProgramFilesx86, default = join(
r'Microsoft Visual Studio %0.1f\VC' % self.vs_ver) self.ProgramFilesx86, r'Microsoft Visual Studio %0.1f\VC' % self.vs_ver
)
# Try to get "VC++ for Python" path from registry as default path # Try to get "VC++ for Python" path from registry as default path
reg_path = join(self.ri.vc_for_python, '%0.1f' % self.vs_ver) reg_path = join(self.ri.vc_for_python, '%0.1f' % self.vs_ver)
@ -764,6 +824,7 @@ class SystemInfo:
return '8.1', '8.1a' return '8.1', '8.1a'
elif self.vs_ver >= 14.0: elif self.vs_ver >= 14.0:
return '10.0', '8.1' return '10.0', '8.1'
return None
@property @property
def WindowsSdkLastVersion(self): def WindowsSdkLastVersion(self):
@ -777,8 +838,8 @@ class SystemInfo:
""" """
return self._use_last_dir_name(join(self.WindowsSdkDir, 'lib')) return self._use_last_dir_name(join(self.WindowsSdkDir, 'lib'))
@property @property # noqa: C901
def WindowsSdkDir(self): def WindowsSdkDir(self): # noqa: C901 # is too complex (12) # FIXME
""" """
Microsoft Windows SDK directory. Microsoft Windows SDK directory.
@ -803,7 +864,7 @@ class SystemInfo:
if not sdkdir or not isdir(sdkdir): if not sdkdir or not isdir(sdkdir):
# If fail, use default new path # If fail, use default new path
for ver in self.WindowsSdkVersion: for ver in self.WindowsSdkVersion:
intver = ver[:ver.rfind('.')] intver = ver[: ver.rfind('.')]
path = r'Microsoft SDKs\Windows Kits\%s' % intver path = r'Microsoft SDKs\Windows Kits\%s' % intver
d = join(self.ProgramFiles, path) d = join(self.ProgramFiles, path)
if isdir(d): if isdir(d):
@ -855,6 +916,8 @@ class SystemInfo:
if execpath: if execpath:
return execpath return execpath
return None
@property @property
def FSharpInstallDir(self): def FSharpInstallDir(self):
""" """
@ -883,11 +946,12 @@ class SystemInfo:
# Find path of the more recent Kit # Find path of the more recent Kit
for ver in vers: for ver in vers:
sdkdir = self.ri.lookup(self.ri.windows_kits_roots, sdkdir = self.ri.lookup(self.ri.windows_kits_roots, 'kitsroot%s' % ver)
'kitsroot%s' % ver)
if sdkdir: if sdkdir:
return sdkdir or '' return sdkdir or ''
return None
@property @property
def UniversalCRTSdkLastVersion(self): def UniversalCRTSdkLastVersion(self):
""" """
@ -911,10 +975,11 @@ class SystemInfo:
versions versions
""" """
# Set FxSdk versions for specified VS version # Set FxSdk versions for specified VS version
return (('4.7.2', '4.7.1', '4.7', return (
'4.6.2', '4.6.1', '4.6', ('4.7.2', '4.7.1', '4.7', '4.6.2', '4.6.1', '4.6', '4.5.2', '4.5.1', '4.5')
'4.5.2', '4.5.1', '4.5') if self.vs_ver >= 14.0
if self.vs_ver >= 14.0 else ()) else ()
)
@property @property
def NetFxSdkDir(self): def NetFxSdkDir(self):
@ -1039,8 +1104,7 @@ class SystemInfo:
matching_dirs = ( matching_dirs = (
dir_name dir_name
for dir_name in reversed(listdir(path)) for dir_name in reversed(listdir(path))
if isdir(join(path, dir_name)) and if isdir(join(path, dir_name)) and dir_name.startswith(prefix)
dir_name.startswith(prefix)
) )
return next(matching_dirs, None) or '' return next(matching_dirs, None) or ''
@ -1132,8 +1196,10 @@ class EnvironmentInfo:
list of str list of str
paths paths
""" """
return [join(self.si.VCInstallDir, 'Include'), return [
join(self.si.VCInstallDir, r'ATLMFC\Include')] join(self.si.VCInstallDir, 'Include'),
join(self.si.VCInstallDir, r'ATLMFC\Include'),
]
@property @property
def VCLibraries(self): def VCLibraries(self):
@ -1193,14 +1259,15 @@ class EnvironmentInfo:
tools += [join(si.VCInstallDir, path)] tools += [join(si.VCInstallDir, path)]
elif self.vs_ver >= 15.0: elif self.vs_ver >= 15.0:
host_dir = (r'bin\HostX86%s' if self.pi.current_is_x86() else host_dir = (
r'bin\HostX64%s') r'bin\HostX86%s' if self.pi.current_is_x86() else r'bin\HostX64%s'
tools += [join( )
si.VCInstallDir, host_dir % self.pi.target_dir(x64=True))] tools += [join(si.VCInstallDir, host_dir % self.pi.target_dir(x64=True))]
if self.pi.current_cpu != self.pi.target_cpu: if self.pi.current_cpu != self.pi.target_cpu:
tools += [join( tools += [
si.VCInstallDir, host_dir % self.pi.current_dir(x64=True))] join(si.VCInstallDir, host_dir % self.pi.current_dir(x64=True))
]
else: else:
tools += [join(si.VCInstallDir, 'Bin')] tools += [join(si.VCInstallDir, 'Bin')]
@ -1225,7 +1292,7 @@ class EnvironmentInfo:
arch_subdir = self.pi.target_dir(x64=True) arch_subdir = self.pi.target_dir(x64=True)
lib = join(self.si.WindowsSdkDir, 'lib') lib = join(self.si.WindowsSdkDir, 'lib')
libver = self._sdk_subdir libver = self._sdk_subdir
return [join(lib, '%sum%s' % (libver , arch_subdir))] return [join(lib, '%sum%s' % (libver, arch_subdir))]
@property @property
def OSIncludes(self): def OSIncludes(self):
@ -1247,9 +1314,11 @@ class EnvironmentInfo:
sdkver = self._sdk_subdir sdkver = self._sdk_subdir
else: else:
sdkver = '' sdkver = ''
return [join(include, '%sshared' % sdkver), return [
join(include, '%sum' % sdkver), join(include, '%sshared' % sdkver),
join(include, '%swinrt' % sdkver)] join(include, '%sum' % sdkver),
join(include, '%swinrt' % sdkver),
]
@property @property
def OSLibpath(self): def OSLibpath(self):
@ -1276,11 +1345,16 @@ class EnvironmentInfo:
join(self.si.WindowsSdkDir, 'UnionMetadata'), join(self.si.WindowsSdkDir, 'UnionMetadata'),
join(ref, 'Windows.Foundation.UniversalApiContract', '1.0.0.0'), join(ref, 'Windows.Foundation.UniversalApiContract', '1.0.0.0'),
join(ref, 'Windows.Foundation.FoundationContract', '1.0.0.0'), join(ref, 'Windows.Foundation.FoundationContract', '1.0.0.0'),
join(ref,'Windows.Networking.Connectivity.WwanContract', join(ref, 'Windows.Networking.Connectivity.WwanContract', '1.0.0.0'),
'1.0.0.0'), join(
join(self.si.WindowsSdkDir, 'ExtensionSDKs', 'Microsoft.VCLibs', self.si.WindowsSdkDir,
'%0.1f' % self.vs_ver, 'References', 'CommonConfiguration', 'ExtensionSDKs',
'neutral'), 'Microsoft.VCLibs',
'%0.1f' % self.vs_ver,
'References',
'CommonConfiguration',
'neutral',
),
] ]
return libpath return libpath
@ -1381,11 +1455,9 @@ class EnvironmentInfo:
tools = [] tools = []
if include32: if include32:
tools += [join(si.FrameworkDir32, ver) tools += [join(si.FrameworkDir32, ver) for ver in si.FrameworkVersion32]
for ver in si.FrameworkVersion32]
if include64: if include64:
tools += [join(si.FrameworkDir64, ver) tools += [join(si.FrameworkDir64, ver) for ver in si.FrameworkVersion64]
for ver in si.FrameworkVersion64]
return tools return tools
@property @property
@ -1561,9 +1633,11 @@ class EnvironmentInfo:
prefixes += [join(tools_path, 'redist')] # VS14 legacy path prefixes += [join(tools_path, 'redist')] # VS14 legacy path
# CRT directory # CRT directory
crt_dirs = ('Microsoft.VC%d.CRT' % (self.vc_ver * 10), crt_dirs = (
# Sometime store in directory with VS version instead of VC 'Microsoft.VC%d.CRT' % (self.vc_ver * 10),
'Microsoft.VC%d.CRT' % (int(self.vs_ver) * 10)) # Sometime store in directory with VS version instead of VC
'Microsoft.VC%d.CRT' % (int(self.vs_ver) * 10),
)
# vcruntime path # vcruntime path
for prefix, crt_dir in itertools.product(prefixes, crt_dirs): for prefix, crt_dir in itertools.product(prefixes, crt_dirs):
@ -1586,36 +1660,47 @@ class EnvironmentInfo:
environment environment
""" """
env = dict( env = dict(
include=self._build_paths('include', include=self._build_paths(
[self.VCIncludes, 'include',
self.OSIncludes, [
self.UCRTIncludes, self.VCIncludes,
self.NetFxSDKIncludes], self.OSIncludes,
exists), self.UCRTIncludes,
lib=self._build_paths('lib', self.NetFxSDKIncludes,
[self.VCLibraries, ],
self.OSLibraries, exists,
self.FxTools, ),
self.UCRTLibraries, lib=self._build_paths(
self.NetFxSDKLibraries], 'lib',
exists), [
libpath=self._build_paths('libpath', self.VCLibraries,
[self.VCLibraries, self.OSLibraries,
self.FxTools, self.FxTools,
self.VCStoreRefs, self.UCRTLibraries,
self.OSLibpath], self.NetFxSDKLibraries,
exists), ],
path=self._build_paths('path', exists,
[self.VCTools, ),
self.VSTools, libpath=self._build_paths(
self.VsTDb, 'libpath',
self.SdkTools, [self.VCLibraries, self.FxTools, self.VCStoreRefs, self.OSLibpath],
self.SdkSetup, exists,
self.FxTools, ),
self.MSBuild, path=self._build_paths(
self.HTMLHelpWorkshop, 'path',
self.FSharp], [
exists), self.VCTools,
self.VSTools,
self.VsTDb,
self.SdkTools,
self.SdkSetup,
self.FxTools,
self.MSBuild,
self.HTMLHelpWorkshop,
self.FSharp,
],
exists,
),
) )
if self.vs_ver >= 14 and isfile(self.VCRuntimeRedist): if self.vs_ver >= 14 and isfile(self.VCRuntimeRedist):
env['py_vcruntime_redist'] = self.VCRuntimeRedist env['py_vcruntime_redist'] = self.VCRuntimeRedist
@ -1651,29 +1736,5 @@ class EnvironmentInfo:
if not extant_paths: if not extant_paths:
msg = "%s environment variable is empty" % name.upper() msg = "%s environment variable is empty" % name.upper()
raise distutils.errors.DistutilsPlatformError(msg) raise distutils.errors.DistutilsPlatformError(msg)
unique_paths = self._unique_everseen(extant_paths) unique_paths = unique_everseen(extant_paths)
return pathsep.join(unique_paths) return pathsep.join(unique_paths)
# from Python docs
@staticmethod
def _unique_everseen(iterable, key=None):
"""
List unique elements, preserving order.
Remember all elements ever seen.
_unique_everseen('AAAABBBCCDAABBB') --> A B C D
_unique_everseen('ABBCcAD', str.lower) --> A B C D
"""
seen = set()
seen_add = seen.add
if key is None:
for element in filterfalse(seen.__contains__, iterable):
seen_add(element)
yield element
else:
for element in iterable:
k = key(element)
if k not in seen:
seen_add(k)
yield element

View file

@ -2,22 +2,18 @@ import os
from distutils import log from distutils import log
import itertools import itertools
from setuptools.extern.six.moves import map
flatten = itertools.chain.from_iterable flatten = itertools.chain.from_iterable
class Installer: class Installer:
nspkg_ext = '-nspkg.pth' nspkg_ext = '-nspkg.pth'
def install_namespaces(self): def install_namespaces(self):
nsp = self._get_all_ns_packages() nsp = self._get_all_ns_packages()
if not nsp: if not nsp:
return return
filename, ext = os.path.splitext(self._get_target()) filename = self._get_nspkg_file()
filename += self.nspkg_ext
self.outputs.append(filename) self.outputs.append(filename)
log.info("Installing %s", filename) log.info("Installing %s", filename)
lines = map(self._gen_nspkg_line, nsp) lines = map(self._gen_nspkg_line, nsp)
@ -31,45 +27,44 @@ class Installer:
f.writelines(lines) f.writelines(lines)
def uninstall_namespaces(self): def uninstall_namespaces(self):
filename, ext = os.path.splitext(self._get_target()) filename = self._get_nspkg_file()
filename += self.nspkg_ext
if not os.path.exists(filename): if not os.path.exists(filename):
return return
log.info("Removing %s", filename) log.info("Removing %s", filename)
os.remove(filename) os.remove(filename)
def _get_nspkg_file(self):
filename, _ = os.path.splitext(self._get_target())
return filename + self.nspkg_ext
def _get_target(self): def _get_target(self):
return self.target return self.target
_nspkg_tmpl = ( _nspkg_tmpl = (
"import sys, types, os", "import sys, types, os",
"has_mfs = sys.version_info > (3, 5)",
"p = os.path.join(%(root)s, *%(pth)r)", "p = os.path.join(%(root)s, *%(pth)r)",
"importlib = has_mfs and __import__('importlib.util')", "importlib = __import__('importlib.util')",
"has_mfs and __import__('importlib.machinery')", "__import__('importlib.machinery')",
"m = has_mfs and " (
"m = "
"sys.modules.setdefault(%(pkg)r, " "sys.modules.setdefault(%(pkg)r, "
"importlib.util.module_from_spec(" "importlib.util.module_from_spec("
"importlib.machinery.PathFinder.find_spec(%(pkg)r, " "importlib.machinery.PathFinder.find_spec(%(pkg)r, "
"[os.path.dirname(p)])))", "[os.path.dirname(p)])))"
"m = m or " ),
"sys.modules.setdefault(%(pkg)r, types.ModuleType(%(pkg)r))", ("m = m or " "sys.modules.setdefault(%(pkg)r, types.ModuleType(%(pkg)r))"),
"mp = (m or []) and m.__dict__.setdefault('__path__',[])", "mp = (m or []) and m.__dict__.setdefault('__path__',[])",
"(p not in mp) and mp.append(p)", "(p not in mp) and mp.append(p)",
) )
"lines for the namespace installer" "lines for the namespace installer"
_nspkg_tmpl_multi = ( _nspkg_tmpl_multi = ('m and setattr(sys.modules[%(parent)r], %(child)r, m)',)
'm and setattr(sys.modules[%(parent)r], %(child)r, m)',
)
"additional line(s) when a parent package is indicated" "additional line(s) when a parent package is indicated"
def _get_root(self): def _get_root(self):
return "sys._getframe(1).f_locals['sitedir']" return "sys._getframe(1).f_locals['sitedir']"
def _gen_nspkg_line(self, pkg): def _gen_nspkg_line(self, pkg):
# ensure pkg is not a unicode string under Python 2.7
pkg = str(pkg)
pth = tuple(pkg.split('.')) pth = tuple(pkg.split('.'))
root = self._get_root() root = self._get_root()
tmpl_lines = self._nspkg_tmpl tmpl_lines = self._nspkg_tmpl
@ -81,7 +76,7 @@ class Installer:
def _get_all_ns_packages(self): def _get_all_ns_packages(self):
"""Return sorted list of all package namespaces""" """Return sorted list of all package namespaces"""
pkgs = self.distribution.namespace_packages or [] pkgs = self.distribution.namespace_packages or []
return sorted(flatten(map(self._pkg_names, pkgs))) return sorted(set(flatten(map(self._pkg_names, pkgs))))
@staticmethod @staticmethod
def _pkg_names(pkg): def _pkg_names(pkg):

View file

@ -1,33 +1,45 @@
"""PyPI and direct package downloading""" """PyPI and direct package downloading."""
import sys import sys
import os import os
import re import re
import io
import shutil import shutil
import socket import socket
import base64 import base64
import hashlib import hashlib
import itertools import itertools
import warnings import configparser
import html
import http.client
import urllib.parse
import urllib.request
import urllib.error
from functools import wraps from functools import wraps
from setuptools.extern import six
from setuptools.extern.six.moves import urllib, http_client, configparser, map
import setuptools import setuptools
from pkg_resources import ( from pkg_resources import (
CHECKOUT_DIST, Distribution, BINARY_DIST, normalize_path, SOURCE_DIST, CHECKOUT_DIST,
Environment, find_distributions, safe_name, safe_version, Distribution,
to_filename, Requirement, DEVELOP_DIST, EGG_DIST, BINARY_DIST,
normalize_path,
SOURCE_DIST,
Environment,
find_distributions,
safe_name,
safe_version,
to_filename,
Requirement,
DEVELOP_DIST,
EGG_DIST,
parse_version,
) )
from setuptools import ssl_support
from distutils import log from distutils import log
from distutils.errors import DistutilsError from distutils.errors import DistutilsError
from fnmatch import translate from fnmatch import translate
from setuptools.py27compat import get_all_headers
from setuptools.py33compat import unescape
from setuptools.wheel import Wheel from setuptools.wheel import Wheel
from setuptools.extern.more_itertools import unique_everseen
__metaclass__ = type
EGG_FRAGMENT = re.compile(r'^egg=([-A-Za-z0-9_.+!]+)$') EGG_FRAGMENT = re.compile(r'^egg=([-A-Za-z0-9_.+!]+)$')
HREF = re.compile(r"""href\s*=\s*['"]?([^'"> ]+)""", re.I) HREF = re.compile(r"""href\s*=\s*['"]?([^'"> ]+)""", re.I)
@ -39,23 +51,27 @@ URL_SCHEME = re.compile('([-+.a-z0-9]{2,}):', re.I).match
EXTENSIONS = ".tar.gz .tar.bz2 .tar .zip .tgz".split() EXTENSIONS = ".tar.gz .tar.bz2 .tar .zip .tgz".split()
__all__ = [ __all__ = [
'PackageIndex', 'distros_for_url', 'parse_bdist_wininst', 'PackageIndex',
'distros_for_url',
'parse_bdist_wininst',
'interpret_distro_name', 'interpret_distro_name',
] ]
_SOCKET_TIMEOUT = 15 _SOCKET_TIMEOUT = 15
_tmpl = "setuptools/{setuptools.__version__} Python-urllib/{py_major}" _tmpl = "setuptools/{setuptools.__version__} Python-urllib/{py_major}"
user_agent = _tmpl.format(py_major='{}.{}'.format(*sys.version_info), setuptools=setuptools) user_agent = _tmpl.format(
py_major='{}.{}'.format(*sys.version_info), setuptools=setuptools
)
def parse_requirement_arg(spec): def parse_requirement_arg(spec):
try: try:
return Requirement.parse(spec) return Requirement.parse(spec)
except ValueError: except ValueError as e:
raise DistutilsError( raise DistutilsError(
"Not a URL, existing file, or requirement spec: %r" % (spec,) "Not a URL, existing file, or requirement spec: %r" % (spec,)
) ) from e
def parse_bdist_wininst(name): def parse_bdist_wininst(name):
@ -96,15 +112,13 @@ def egg_info_for_url(url):
def distros_for_url(url, metadata=None): def distros_for_url(url, metadata=None):
"""Yield egg or source distribution objects that might be found at a URL""" """Yield egg or source distribution objects that might be found at a URL"""
base, fragment = egg_info_for_url(url) base, fragment = egg_info_for_url(url)
for dist in distros_for_location(url, base, metadata): yield from distros_for_location(url, base, metadata)
yield dist
if fragment: if fragment:
match = EGG_FRAGMENT.match(fragment) match = EGG_FRAGMENT.match(fragment)
if match: if match:
for dist in interpret_distro_name( yield from interpret_distro_name(
url, match.group(1), metadata, precedence=CHECKOUT_DIST url, match.group(1), metadata, precedence=CHECKOUT_DIST
): )
yield dist
def distros_for_location(location, basename, metadata=None): def distros_for_location(location, basename, metadata=None):
@ -118,13 +132,15 @@ def distros_for_location(location, basename, metadata=None):
wheel = Wheel(basename) wheel = Wheel(basename)
if not wheel.is_compatible(): if not wheel.is_compatible():
return [] return []
return [Distribution( return [
location=location, Distribution(
project_name=wheel.project_name, location=location,
version=wheel.version, project_name=wheel.project_name,
# Increase priority over eggs. version=wheel.version,
precedence=EGG_DIST + 1, # Increase priority over eggs.
)] precedence=EGG_DIST + 1,
)
]
if basename.endswith('.exe'): if basename.endswith('.exe'):
win_base, py_ver, platform = parse_bdist_wininst(basename) win_base, py_ver, platform = parse_bdist_wininst(basename)
if win_base is not None: if win_base is not None:
@ -135,7 +151,7 @@ def distros_for_location(location, basename, metadata=None):
# #
for ext in EXTENSIONS: for ext in EXTENSIONS:
if basename.endswith(ext): if basename.endswith(ext):
basename = basename[:-len(ext)] basename = basename[: -len(ext)]
return interpret_distro_name(location, basename, metadata) return interpret_distro_name(location, basename, metadata)
return [] # no extension matched return [] # no extension matched
@ -148,57 +164,37 @@ def distros_for_filename(filename, metadata=None):
def interpret_distro_name( def interpret_distro_name(
location, basename, metadata, py_version=None, precedence=SOURCE_DIST, location, basename, metadata, py_version=None, precedence=SOURCE_DIST, platform=None
platform=None
): ):
"""Generate alternative interpretations of a source distro name """Generate the interpretation of a source distro name
Note: if `location` is a filesystem filename, you should call Note: if `location` is a filesystem filename, you should call
``pkg_resources.normalize_path()`` on it before passing it to this ``pkg_resources.normalize_path()`` on it before passing it to this
routine! routine!
""" """
# Generate alternative interpretations of a source distro name
# Because some packages are ambiguous as to name/versions split
# e.g. "adns-python-1.1.0", "egenix-mx-commercial", etc.
# So, we generate each possible interepretation (e.g. "adns, python-1.1.0"
# "adns-python, 1.1.0", and "adns-python-1.1.0, no version"). In practice,
# the spurious interpretations should be ignored, because in the event
# there's also an "adns" package, the spurious "python-1.1.0" version will
# compare lower than any numeric version number, and is therefore unlikely
# to match a request for it. It's still a potential problem, though, and
# in the long run PyPI and the distutils should go for "safe" names and
# versions in distribution archive names (sdist and bdist).
parts = basename.split('-') parts = basename.split('-')
if not py_version and any(re.match(r'py\d\.\d$', p) for p in parts[2:]): if not py_version and any(re.match(r'py\d\.\d$', p) for p in parts[2:]):
# it is a bdist_dumb, not an sdist -- bail out # it is a bdist_dumb, not an sdist -- bail out
return return
for p in range(1, len(parts) + 1): # find the pivot (p) that splits the name from the version.
yield Distribution( # infer the version as the first item that has a digit.
location, metadata, '-'.join(parts[:p]), '-'.join(parts[p:]), for p in range(len(parts)):
py_version=py_version, precedence=precedence, if parts[p][:1].isdigit():
platform=platform break
)
# From Python 2.7 docs
def unique_everseen(iterable, key=None):
"List unique elements, preserving order. Remember all elements ever seen."
# unique_everseen('AAAABBBCCDAABBB') --> A B C D
# unique_everseen('ABBCcAD', str.lower) --> A B C D
seen = set()
seen_add = seen.add
if key is None:
for element in six.moves.filterfalse(seen.__contains__, iterable):
seen_add(element)
yield element
else: else:
for element in iterable: p = len(parts)
k = key(element)
if k not in seen: yield Distribution(
seen_add(k) location,
yield element metadata,
'-'.join(parts[:p]),
'-'.join(parts[p:]),
py_version=py_version,
precedence=precedence,
platform=platform,
)
def unique_values(func): def unique_values(func):
@ -214,8 +210,10 @@ def unique_values(func):
return wrapper return wrapper
REL = re.compile(r"""<([^>]*\srel\s*=\s*['"]?([^'">]+)[^>]*)>""", re.I) REL = re.compile(r"""<([^>]*\srel\s{0,10}=\s{0,10}['"]?([^'" >]+)[^>]*)>""", re.I)
# this line is here to fix emacs' cruddy broken syntax highlighting """
Regex for an HTML tag with 'rel="val"' attributes.
"""
@unique_values @unique_values
@ -299,27 +297,33 @@ class PackageIndex(Environment):
"""A distribution index that scans web pages for download URLs""" """A distribution index that scans web pages for download URLs"""
def __init__( def __init__(
self, index_url="https://pypi.org/simple/", hosts=('*',), self,
ca_bundle=None, verify_ssl=True, *args, **kw index_url="https://pypi.org/simple/",
hosts=('*',),
ca_bundle=None,
verify_ssl=True,
*args,
**kw,
): ):
Environment.__init__(self, *args, **kw) super().__init__(*args, **kw)
self.index_url = index_url + "/" [:not index_url.endswith('/')] self.index_url = index_url + "/"[: not index_url.endswith('/')]
self.scanned_urls = {} self.scanned_urls = {}
self.fetched_urls = {} self.fetched_urls = {}
self.package_pages = {} self.package_pages = {}
self.allows = re.compile('|'.join(map(translate, hosts))).match self.allows = re.compile('|'.join(map(translate, hosts))).match
self.to_scan = [] self.to_scan = []
use_ssl = ( self.opener = urllib.request.urlopen
verify_ssl
and ssl_support.is_available
and (ca_bundle or ssl_support.find_ca_bundle())
)
if use_ssl:
self.opener = ssl_support.opener_for(ca_bundle)
else:
self.opener = urllib.request.urlopen
def process_url(self, url, retrieve=False): def add(self, dist):
# ignore invalid versions
try:
parse_version(dist.version)
except Exception:
return None
return super().add(dist)
# FIXME: 'PackageIndex.process_url' is too complex (14)
def process_url(self, url, retrieve=False): # noqa: C901
"""Evaluate a URL as a possible download, and maybe retrieve it""" """Evaluate a URL as a possible download, and maybe retrieve it"""
if url in self.scanned_urls and not retrieve: if url in self.scanned_urls and not retrieve:
return return
@ -348,6 +352,8 @@ class PackageIndex(Environment):
f = self.open_url(url, tmpl % url) f = self.open_url(url, tmpl % url)
if f is None: if f is None:
return return
if isinstance(f, urllib.error.HTTPError) and f.code == 401:
self.info("Authentication error: %s" % f.msg)
self.fetched_urls[f.url] = True self.fetched_urls[f.url] = True
if 'html' not in f.headers.get('content-type', '').lower(): if 'html' not in f.headers.get('content-type', '').lower():
f.close() # not html, we can't process it f.close() # not html, we can't process it
@ -393,11 +399,14 @@ class PackageIndex(Environment):
return True return True
msg = ( msg = (
"\nNote: Bypassing %s (disallowed host; see " "\nNote: Bypassing %s (disallowed host; see "
"http://bit.ly/2hrImnY for details).\n") "https://setuptools.pypa.io/en/latest/deprecated/"
"easy_install.html#restricting-downloads-with-allow-hosts for details).\n"
)
if fatal: if fatal:
raise DistutilsError(msg % url) raise DistutilsError(msg % url)
else: else:
self.warn(msg, url) self.warn(msg, url)
return False
def scan_egg_links(self, search_path): def scan_egg_links(self, search_path):
dirs = filter(os.path.isdir, search_path) dirs = filter(os.path.isdir, search_path)
@ -425,62 +434,63 @@ class PackageIndex(Environment):
dist.precedence = SOURCE_DIST dist.precedence = SOURCE_DIST
self.add(dist) self.add(dist)
def _scan(self, link):
# Process a URL to see if it's for a package page
NO_MATCH_SENTINEL = None, None
if not link.startswith(self.index_url):
return NO_MATCH_SENTINEL
parts = list(map(urllib.parse.unquote, link[len(self.index_url) :].split('/')))
if len(parts) != 2 or '#' in parts[1]:
return NO_MATCH_SENTINEL
# it's a package page, sanitize and index it
pkg = safe_name(parts[0])
ver = safe_version(parts[1])
self.package_pages.setdefault(pkg.lower(), {})[link] = True
return to_filename(pkg), to_filename(ver)
def process_index(self, url, page): def process_index(self, url, page):
"""Process the contents of a PyPI page""" """Process the contents of a PyPI page"""
def scan(link):
# Process a URL to see if it's for a package page
if link.startswith(self.index_url):
parts = list(map(
urllib.parse.unquote, link[len(self.index_url):].split('/')
))
if len(parts) == 2 and '#' not in parts[1]:
# it's a package page, sanitize and index it
pkg = safe_name(parts[0])
ver = safe_version(parts[1])
self.package_pages.setdefault(pkg.lower(), {})[link] = True
return to_filename(pkg), to_filename(ver)
return None, None
# process an index page into the package-page index # process an index page into the package-page index
for match in HREF.finditer(page): for match in HREF.finditer(page):
try: try:
scan(urllib.parse.urljoin(url, htmldecode(match.group(1)))) self._scan(urllib.parse.urljoin(url, htmldecode(match.group(1))))
except ValueError: except ValueError:
pass pass
pkg, ver = scan(url) # ensure this page is in the page index pkg, ver = self._scan(url) # ensure this page is in the page index
if pkg: if not pkg:
# process individual package page
for new_url in find_external_links(url, page):
# Process the found URL
base, frag = egg_info_for_url(new_url)
if base.endswith('.py') and not frag:
if ver:
new_url += '#egg=%s-%s' % (pkg, ver)
else:
self.need_version_info(url)
self.scan_url(new_url)
return PYPI_MD5.sub(
lambda m: '<a href="%s#md5=%s">%s</a>' % m.group(1, 3, 2), page
)
else:
return "" # no sense double-scanning non-package pages return "" # no sense double-scanning non-package pages
# process individual package page
for new_url in find_external_links(url, page):
# Process the found URL
base, frag = egg_info_for_url(new_url)
if base.endswith('.py') and not frag:
if ver:
new_url += '#egg=%s-%s' % (pkg, ver)
else:
self.need_version_info(url)
self.scan_url(new_url)
return PYPI_MD5.sub(
lambda m: '<a href="%s#md5=%s">%s</a>' % m.group(1, 3, 2), page
)
def need_version_info(self, url): def need_version_info(self, url):
self.scan_all( self.scan_all(
"Page at %s links to .py file(s) without version info; an index " "Page at %s links to .py file(s) without version info; an index "
"scan is required.", url "scan is required.",
url,
) )
def scan_all(self, msg=None, *args): def scan_all(self, msg=None, *args):
if self.index_url not in self.fetched_urls: if self.index_url not in self.fetched_urls:
if msg: if msg:
self.warn(msg, *args) self.warn(msg, *args)
self.info( self.info("Scanning index of all packages (this may take a while)")
"Scanning index of all packages (this may take a while)"
)
self.scan_url(self.index_url) self.scan_url(self.index_url)
def find_packages(self, requirement): def find_packages(self, requirement):
@ -505,15 +515,13 @@ class PackageIndex(Environment):
if dist in requirement: if dist in requirement:
return dist return dist
self.debug("%s does not match %s", requirement, dist) self.debug("%s does not match %s", requirement, dist)
return super(PackageIndex, self).obtain(requirement, installer) return super().obtain(requirement, installer)
def check_hash(self, checker, filename, tfp): def check_hash(self, checker, filename, tfp):
""" """
checker is a ContentChecker checker is a ContentChecker
""" """
checker.report( checker.report(self.debug, "Validating %%s checksum for %s" % filename)
self.debug,
"Validating %%s checksum for %s" % filename)
if not checker.is_valid(): if not checker.is_valid():
tfp.close() tfp.close()
os.unlink(filename) os.unlink(filename)
@ -550,7 +558,8 @@ class PackageIndex(Environment):
else: # no distros seen for this name, might be misspelled else: # no distros seen for this name, might be misspelled
meth, msg = ( meth, msg = (
self.warn, self.warn,
"Couldn't find index page for %r (maybe misspelled?)") "Couldn't find index page for %r (maybe misspelled?)",
)
meth(msg, requirement.unsafe_name) meth(msg, requirement.unsafe_name)
self.scan_all() self.scan_all()
@ -588,9 +597,15 @@ class PackageIndex(Environment):
spec = parse_requirement_arg(spec) spec = parse_requirement_arg(spec)
return getattr(self.fetch_distribution(spec, tmpdir), 'location', None) return getattr(self.fetch_distribution(spec, tmpdir), 'location', None)
def fetch_distribution( def fetch_distribution( # noqa: C901 # is too complex (14) # FIXME
self, requirement, tmpdir, force_scan=False, source=False, self,
develop_ok=False, local_index=None): requirement,
tmpdir,
force_scan=False,
source=False,
develop_ok=False,
local_index=None,
):
"""Obtain a distribution suitable for fulfilling `requirement` """Obtain a distribution suitable for fulfilling `requirement`
`requirement` must be a ``pkg_resources.Requirement`` instance. `requirement` must be a ``pkg_resources.Requirement`` instance.
@ -618,25 +633,24 @@ class PackageIndex(Environment):
# Find a matching distribution; may be called more than once # Find a matching distribution; may be called more than once
for dist in env[req.key]: for dist in env[req.key]:
if dist.precedence == DEVELOP_DIST and not develop_ok: if dist.precedence == DEVELOP_DIST and not develop_ok:
if dist not in skipped: if dist not in skipped:
self.warn( self.warn(
"Skipping development or system egg: %s", dist, "Skipping development or system egg: %s",
dist,
) )
skipped[dist] = 1 skipped[dist] = 1
continue continue
test = ( test = dist in req and (dist.precedence <= SOURCE_DIST or not source)
dist in req
and (dist.precedence <= SOURCE_DIST or not source)
)
if test: if test:
loc = self.download(dist.location, tmpdir) loc = self.download(dist.location, tmpdir)
dist.download_location = loc dist.download_location = loc
if os.path.exists(dist.download_location): if os.path.exists(dist.download_location):
return dist return dist
return None
if force_scan: if force_scan:
self.prescan() self.prescan()
self.find_packages(requirement) self.find_packages(requirement)
@ -660,6 +674,7 @@ class PackageIndex(Environment):
(source and "a source distribution of " or ""), (source and "a source distribution of " or ""),
requirement, requirement,
) )
return None
else: else:
self.info("Best match: %s", dist) self.info("Best match: %s", dist)
return dist.clone(location=dist.download_location) return dist.clone(location=dist.download_location)
@ -679,10 +694,15 @@ class PackageIndex(Environment):
def gen_setup(self, filename, fragment, tmpdir): def gen_setup(self, filename, fragment, tmpdir):
match = EGG_FRAGMENT.match(fragment) match = EGG_FRAGMENT.match(fragment)
dists = match and [ dists = (
d for d in match
interpret_distro_name(filename, match.group(1), None) if d.version and [
] or [] d
for d in interpret_distro_name(filename, match.group(1), None)
if d.version
]
or []
)
if len(dists) == 1: # unambiguous ``#egg`` fragment if len(dists) == 1: # unambiguous ``#egg`` fragment
basename = os.path.basename(filename) basename = os.path.basename(filename)
@ -690,8 +710,7 @@ class PackageIndex(Environment):
# Make sure the file has been downloaded to the temp dir. # Make sure the file has been downloaded to the temp dir.
if os.path.dirname(filename) != tmpdir: if os.path.dirname(filename) != tmpdir:
dst = os.path.join(tmpdir, basename) dst = os.path.join(tmpdir, basename)
from setuptools.command.easy_install import samefile if not (os.path.exists(dst) and os.path.samefile(filename, dst)):
if not samefile(filename, dst):
shutil.copy2(filename, dst) shutil.copy2(filename, dst)
filename = dst filename = dst
@ -700,8 +719,9 @@ class PackageIndex(Environment):
"from setuptools import setup\n" "from setuptools import setup\n"
"setup(name=%r, version=%r, py_modules=[%r])\n" "setup(name=%r, version=%r, py_modules=[%r])\n"
% ( % (
dists[0].project_name, dists[0].version, dists[0].project_name,
os.path.splitext(basename)[0] dists[0].version,
os.path.splitext(basename)[0],
) )
) )
return filename return filename
@ -737,7 +757,7 @@ class PackageIndex(Environment):
size = -1 size = -1
if "content-length" in headers: if "content-length" in headers:
# Some servers return multiple Content-Length headers :( # Some servers return multiple Content-Length headers :(
sizes = get_all_headers(headers, 'Content-Length') sizes = headers.get_all('Content-Length')
size = max(map(int, sizes)) size = max(map(int, sizes))
self.reporthook(url, filename, blocknum, bs, size) self.reporthook(url, filename, blocknum, bs, size)
with open(filename, 'wb') as tfp: with open(filename, 'wb') as tfp:
@ -759,40 +779,40 @@ class PackageIndex(Environment):
def reporthook(self, url, filename, blocknum, blksize, size): def reporthook(self, url, filename, blocknum, blksize, size):
pass # no-op pass # no-op
def open_url(self, url, warning=None): # FIXME:
def open_url(self, url, warning=None): # noqa: C901 # is too complex (12)
if url.startswith('file:'): if url.startswith('file:'):
return local_open(url) return local_open(url)
try: try:
return open_with_auth(url, self.opener) return open_with_auth(url, self.opener)
except (ValueError, http_client.InvalidURL) as v: except (ValueError, http.client.InvalidURL) as v:
msg = ' '.join([str(arg) for arg in v.args]) msg = ' '.join([str(arg) for arg in v.args])
if warning: if warning:
self.warn(warning, msg) self.warn(warning, msg)
else: else:
raise DistutilsError('%s %s' % (url, msg)) raise DistutilsError('%s %s' % (url, msg)) from v
except urllib.error.HTTPError as v: except urllib.error.HTTPError as v:
return v return v
except urllib.error.URLError as v: except urllib.error.URLError as v:
if warning: if warning:
self.warn(warning, v.reason) self.warn(warning, v.reason)
else: else:
raise DistutilsError("Download error for %s: %s" raise DistutilsError(
% (url, v.reason)) "Download error for %s: %s" % (url, v.reason)
except http_client.BadStatusLine as v: ) from v
except http.client.BadStatusLine as v:
if warning: if warning:
self.warn(warning, v.line) self.warn(warning, v.line)
else: else:
raise DistutilsError( raise DistutilsError(
'%s returned a bad status line. The server might be ' '%s returned a bad status line. The server might be '
'down, %s' % 'down, %s' % (url, v.line)
(url, v.line) ) from v
) except (http.client.HTTPException, OSError) as v:
except (http_client.HTTPException, socket.error) as v:
if warning: if warning:
self.warn(warning, v) self.warn(warning, v)
else: else:
raise DistutilsError("Download error for %s: %s" raise DistutilsError("Download error for %s: %s" % (url, v)) from v
% (url, v))
def _download_url(self, scheme, url, tmpdir): def _download_url(self, scheme, url, tmpdir):
# Determine download filename # Determine download filename
@ -829,46 +849,16 @@ class PackageIndex(Environment):
def _attempt_download(self, url, filename): def _attempt_download(self, url, filename):
headers = self._download_to(url, filename) headers = self._download_to(url, filename)
if 'html' in headers.get('content-type', '').lower(): if 'html' in headers.get('content-type', '').lower():
return self._download_html(url, headers, filename) return self._invalid_download_html(url, headers, filename)
else: else:
return filename return filename
def _download_html(self, url, headers, filename): def _invalid_download_html(self, url, headers, filename):
file = open(filename)
for line in file:
if line.strip():
# Check for a subversion index page
if re.search(r'<title>([^- ]+ - )?Revision \d+:', line):
# it's a subversion index page:
file.close()
os.unlink(filename)
return self._download_svn(url, filename)
break # not an index page
file.close()
os.unlink(filename) os.unlink(filename)
raise DistutilsError("Unexpected HTML page found at " + url) raise DistutilsError(f"Unexpected HTML page found at {url}")
def _download_svn(self, url, filename): def _download_svn(self, url, _filename):
warnings.warn("SVN download support is deprecated", UserWarning) raise DistutilsError(f"Invalid config, SVN download is not supported: {url}")
url = url.split('#', 1)[0] # remove any fragment for svn's sake
creds = ''
if url.lower().startswith('svn:') and '@' in url:
scheme, netloc, path, p, q, f = urllib.parse.urlparse(url)
if not netloc and path.startswith('//') and '/' in path[2:]:
netloc, path = path[2:].split('/', 1)
auth, host = _splituser(netloc)
if auth:
if ':' in auth:
user, pw = auth.split(':', 1)
creds = " --username=%s --password=%s" % (user, pw)
else:
creds = " --username=" + auth
netloc = host
parts = scheme, netloc, url, p, q, f
url = urllib.parse.urlunparse(parts)
self.info("Doing subversion checkout from %s to %s", url, filename)
os.system("svn checkout%s -q %s %s" % (creds, url, filename))
return filename
@staticmethod @staticmethod
def _vcs_split_rev_from_url(url, pop_prefix=False): def _vcs_split_rev_from_url(url, pop_prefix=False):
@ -897,10 +887,13 @@ class PackageIndex(Environment):
if rev is not None: if rev is not None:
self.info("Checking out %s", rev) self.info("Checking out %s", rev)
os.system("git -C %s checkout --quiet %s" % ( os.system(
filename, "git -C %s checkout --quiet %s"
rev, % (
)) filename,
rev,
)
)
return filename return filename
@ -913,10 +906,13 @@ class PackageIndex(Environment):
if rev is not None: if rev is not None:
self.info("Updating to %s", rev) self.info("Updating to %s", rev)
os.system("hg --cwd %s up -C -r %s -q" % ( os.system(
filename, "hg --cwd %s up -C -r %s -q"
rev, % (
)) filename,
rev,
)
)
return filename return filename
@ -937,7 +933,7 @@ entity_sub = re.compile(r'&(#(\d+|x[\da-fA-F]+)|[\w.:-]+);?').sub
def decode_entity(match): def decode_entity(match):
what = match.group(0) what = match.group(0)
return unescape(what) return html.unescape(what)
def htmldecode(text): def htmldecode(text):
@ -969,8 +965,7 @@ def socket_timeout(timeout=15):
def _encode_auth(auth): def _encode_auth(auth):
""" """
A function compatible with Python 2.3-3.3 that will encode Encode auth from a URL suitable for an HTTP header.
auth from a URL suitable for an HTTP header.
>>> str(_encode_auth('username%3Apassword')) >>> str(_encode_auth('username%3Apassword'))
'dXNlcm5hbWU6cGFzc3dvcmQ=' 'dXNlcm5hbWU6cGFzc3dvcmQ='
@ -1012,7 +1007,7 @@ class PyPIConfig(configparser.RawConfigParser):
Load from ~/.pypirc Load from ~/.pypirc
""" """
defaults = dict.fromkeys(['username', 'password', 'repository'], '') defaults = dict.fromkeys(['username', 'password', 'repository'], '')
configparser.RawConfigParser.__init__(self, defaults) super().__init__(defaults)
rc = os.path.join(os.path.expanduser('~'), '.pypirc') rc = os.path.join(os.path.expanduser('~'), '.pypirc')
if os.path.exists(rc): if os.path.exists(rc):
@ -1021,7 +1016,8 @@ class PyPIConfig(configparser.RawConfigParser):
@property @property
def creds_by_repository(self): def creds_by_repository(self):
sections_with_repositories = [ sections_with_repositories = [
section for section in self.sections() section
for section in self.sections()
if self.get(section, 'repository').strip() if self.get(section, 'repository').strip()
] ]
@ -1042,6 +1038,7 @@ class PyPIConfig(configparser.RawConfigParser):
for repository, cred in self.creds_by_repository.items(): for repository, cred in self.creds_by_repository.items():
if url.startswith(repository): if url.startswith(repository):
return cred return cred
return None
def open_with_auth(url, opener=urllib.request.urlopen): def open_with_auth(url, opener=urllib.request.urlopen):
@ -1050,10 +1047,10 @@ def open_with_auth(url, opener=urllib.request.urlopen):
parsed = urllib.parse.urlparse(url) parsed = urllib.parse.urlparse(url)
scheme, netloc, path, params, query, frag = parsed scheme, netloc, path, params, query, frag = parsed
# Double scheme does not raise on Mac OS X as revealed by a # Double scheme does not raise on macOS as revealed by a
# failing test. We would expect "nonnumeric port". Refs #20. # failing test. We would expect "nonnumeric port". Refs #20.
if netloc.endswith(':'): if netloc.endswith(':'):
raise http_client.InvalidURL("nonnumeric port: ''") raise http.client.InvalidURL("nonnumeric port: ''")
if scheme in ('http', 'https'): if scheme in ('http', 'https'):
auth, address = _splituser(netloc) auth, address = _splituser(netloc)
@ -1092,7 +1089,8 @@ def open_with_auth(url, opener=urllib.request.urlopen):
# copy of urllib.parse._splituser from Python 3.8 # copy of urllib.parse._splituser from Python 3.8
def _splituser(host): def _splituser(host):
"""splituser('user[:passwd]@host[:port]') --> 'user[:passwd]', 'host[:port]'.""" """splituser('user[:passwd]@host[:port]')
--> 'user[:passwd]', 'host[:port]'."""
user, delim, host = host.rpartition('@') user, delim, host = host.rpartition('@')
return (user if delim else None), host return (user if delim else None), host
@ -1124,13 +1122,13 @@ def local_open(url):
files.append('<a href="{name}">{name}</a>'.format(name=f)) files.append('<a href="{name}">{name}</a>'.format(name=f))
else: else:
tmpl = ( tmpl = (
"<html><head><title>{url}</title>" "<html><head><title>{url}</title>" "</head><body>{files}</body></html>"
"</head><body>{files}</body></html>") )
body = tmpl.format(url=url, files='\n'.join(files)) body = tmpl.format(url=url, files='\n'.join(files))
status, message = 200, "OK" status, message = 200, "OK"
else: else:
status, message, body = 404, "Path not found", "Not found" status, message, body = 404, "Path not found", "Not found"
headers = {'content-type': 'text/html'} headers = {'content-type': 'text/html'}
body_stream = six.StringIO(body) body_stream = io.StringIO(body)
return urllib.error.HTTPError(url, status, message, headers, body_stream) return urllib.error.HTTPError(url, status, message, headers, body_stream)

View file

@ -1,60 +0,0 @@
"""
Compatibility Support for Python 2.7 and earlier
"""
import sys
import platform
from setuptools.extern import six
def get_all_headers(message, key):
"""
Given an HTTPMessage, return all headers matching a given key.
"""
return message.get_all(key)
if six.PY2:
def get_all_headers(message, key):
return message.getheaders(key)
linux_py2_ascii = (
platform.system() == 'Linux' and
six.PY2
)
rmtree_safe = str if linux_py2_ascii else lambda x: x
"""Workaround for http://bugs.python.org/issue24672"""
try:
from ._imp import find_module, PY_COMPILED, PY_FROZEN, PY_SOURCE
from ._imp import get_frozen_object, get_module
except ImportError:
import imp
from imp import PY_COMPILED, PY_FROZEN, PY_SOURCE # noqa
def find_module(module, paths=None):
"""Just like 'imp.find_module()', but with package support"""
parts = module.split('.')
while parts:
part = parts.pop(0)
f, path, (suffix, mode, kind) = info = imp.find_module(part, paths)
if kind == imp.PKG_DIRECTORY:
parts = parts or ['__init__']
paths = [path]
elif parts:
raise ImportError("Can't find %r in %s" % (parts, module))
return info
def get_frozen_object(module, paths):
return imp.get_frozen_object(module)
def get_module(module, paths, info):
imp.load_module(module, *info)
return sys.modules[module]

Some files were not shown because too many files have changed in this diff Show more