Merge branch 'nightly' into dependabot/pip/nightly/importlib-metadata-5.2.0

This commit is contained in:
JonnyWong16 2022-12-21 16:02:16 -08:00 committed by GitHub
commit dbd254f0ca
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
93 changed files with 2039 additions and 2017 deletions

View file

@ -10,7 +10,7 @@ jobs:
runs-on: ubuntu-latest
steps:
- name: Stale
uses: actions/stale@v6
uses: actions/stale@v7
with:
stale-issue-message: >
This issue is stale because it has been open for 30 days with no activity.
@ -30,7 +30,7 @@ jobs:
days-before-close: 5
- name: Invalid Template
uses: actions/stale@v6
uses: actions/stale@v7
with:
stale-issue-message: >
Invalid issues template.

View file

@ -10,6 +10,6 @@ jobs:
runs-on: ubuntu-latest
steps:
- name: Label Issues
uses: dessant/label-actions@v2
uses: dessant/label-actions@v3
with:
github-token: ${{ github.token }}

View file

@ -13,7 +13,7 @@ jobs:
if: ${{ !contains(github.event.head_commit.message, '[skip ci]') }}
steps:
- name: Checkout Code
uses: actions/checkout@v3.1.0
uses: actions/checkout@v3.2.0
- name: Prepare
id: prepare
@ -47,7 +47,7 @@ jobs:
version: latest
- name: Cache Docker Layers
uses: actions/cache@v3.0.11
uses: actions/cache@v3.2.0
with:
path: /tmp/.buildx-cache
key: ${{ runner.os }}-buildx-${{ github.sha }}

View file

@ -24,7 +24,7 @@ jobs:
steps:
- name: Checkout Code
uses: actions/checkout@v3.1.0
uses: actions/checkout@v3.2.0
- name: Set Release Version
id: get_version
@ -52,7 +52,7 @@ jobs:
echo $GITHUB_SHA > version.txt
- name: Set Up Python
uses: actions/setup-python@v4.3.0
uses: actions/setup-python@v4.3.1
with:
python-version: '3.9'
cache: pip
@ -103,7 +103,7 @@ jobs:
uses: technote-space/workflow-conclusion-action@v3.0
- name: Checkout Code
uses: actions/checkout@v3.1.0
uses: actions/checkout@v3.2.0
- name: Set Release Version
id: get_version

View file

@ -20,7 +20,7 @@ jobs:
- armhf
steps:
- name: Checkout Code
uses: actions/checkout@v3.1.0
uses: actions/checkout@v3.2.0
- name: Prepare
id: prepare
@ -38,7 +38,7 @@ jobs:
uses: docker/setup-qemu-action@v2
- name: Build Snap Package
uses: diddlesnaps/snapcraft-multiarch-action@v1.5.0
uses: diddlesnaps/snapcraft-multiarch-action@v1
id: build
with:
architecture: ${{ matrix.architecture }}

View file

@ -10,7 +10,7 @@ jobs:
runs-on: ubuntu-latest
steps:
- name: Checkout Code
uses: actions/checkout@v3.1.0
uses: actions/checkout@v3.2.0
- name: Comment on Pull Request
uses: mshick/add-pr-comment@v2

View file

@ -24,6 +24,7 @@
<html>
<head>
<meta name="viewport" content="width=device-width">
<meta name="color-scheme" content="light dark">
<meta http-equiv="Content-Type" content="text/html; charset=UTF-8">
<title>Tautulli Newsletter - ${subject}</title>
<link rel="shortcut icon" href="${base_url_image + 'images/favicon/favicon.ico' if base_url_image else 'https://tautulli.com/images/favicon.ico'}">

View file

@ -24,6 +24,7 @@
<html>
<head>
<meta name="viewport" content="width=device-width"/>
<meta name="color-scheme" content="light dark">
<meta http-equiv="Content-Type" content="text/html; charset=UTF-8"/>
<title>Tautulli Newsletter - ${subject}</title>
<link rel="shortcut icon" href="${base_url_image + 'images/favicon/favicon.ico' if base_url_image else 'https://tautulli.com/images/favicon.ico'}">

View file

@ -1,4 +1,4 @@
from .core import contents, where
__all__ = ["contents", "where"]
__version__ = "2022.09.24"
__version__ = "2022.12.07"

View file

@ -636,37 +636,6 @@ BA6+C4OmF4O5MBKgxTMVBbkN+8cFduPYSo38NBejxiEovjBFMR7HeL5YYTisO+IB
ZQ==
-----END CERTIFICATE-----
# Issuer: CN=Network Solutions Certificate Authority O=Network Solutions L.L.C.
# Subject: CN=Network Solutions Certificate Authority O=Network Solutions L.L.C.
# Label: "Network Solutions Certificate Authority"
# Serial: 116697915152937497490437556386812487904
# MD5 Fingerprint: d3:f3:a6:16:c0:fa:6b:1d:59:b1:2d:96:4d:0e:11:2e
# SHA1 Fingerprint: 74:f8:a3:c3:ef:e7:b3:90:06:4b:83:90:3c:21:64:60:20:e5:df:ce
# SHA256 Fingerprint: 15:f0:ba:00:a3:ac:7a:f3:ac:88:4c:07:2b:10:11:a0:77:bd:77:c0:97:f4:01:64:b2:f8:59:8a:bd:83:86:0c
-----BEGIN CERTIFICATE-----
MIID5jCCAs6gAwIBAgIQV8szb8JcFuZHFhfjkDFo4DANBgkqhkiG9w0BAQUFADBi
MQswCQYDVQQGEwJVUzEhMB8GA1UEChMYTmV0d29yayBTb2x1dGlvbnMgTC5MLkMu
MTAwLgYDVQQDEydOZXR3b3JrIFNvbHV0aW9ucyBDZXJ0aWZpY2F0ZSBBdXRob3Jp
dHkwHhcNMDYxMjAxMDAwMDAwWhcNMjkxMjMxMjM1OTU5WjBiMQswCQYDVQQGEwJV
UzEhMB8GA1UEChMYTmV0d29yayBTb2x1dGlvbnMgTC5MLkMuMTAwLgYDVQQDEydO
ZXR3b3JrIFNvbHV0aW9ucyBDZXJ0aWZpY2F0ZSBBdXRob3JpdHkwggEiMA0GCSqG
SIb3DQEBAQUAA4IBDwAwggEKAoIBAQDkvH6SMG3G2I4rC7xGzuAnlt7e+foS0zwz
c7MEL7xxjOWftiJgPl9dzgn/ggwbmlFQGiaJ3dVhXRncEg8tCqJDXRfQNJIg6nPP
OCwGJgl6cvf6UDL4wpPTaaIjzkGxzOTVHzbRijr4jGPiFFlp7Q3Tf2vouAPlT2rl
mGNpSAW+Lv8ztumXWWn4Zxmuk2GWRBXTcrA/vGp97Eh/jcOrqnErU2lBUzS1sLnF
BgrEsEX1QV1uiUV7PTsmjHTC5dLRfbIR1PtYMiKagMnc/Qzpf14Dl847ABSHJ3A4
qY5usyd2mFHgBeMhqxrVhSI8KbWaFsWAqPS7azCPL0YCorEMIuDTAgMBAAGjgZcw
gZQwHQYDVR0OBBYEFCEwyfsA106Y2oeqKtCnLrFAMadMMA4GA1UdDwEB/wQEAwIB
BjAPBgNVHRMBAf8EBTADAQH/MFIGA1UdHwRLMEkwR6BFoEOGQWh0dHA6Ly9jcmwu
bmV0c29sc3NsLmNvbS9OZXR3b3JrU29sdXRpb25zQ2VydGlmaWNhdGVBdXRob3Jp
dHkuY3JsMA0GCSqGSIb3DQEBBQUAA4IBAQC7rkvnt1frf6ott3NHhWrB5KUd5Oc8
6fRZZXe1eltajSU24HqXLjjAV2CDmAaDn7l2em5Q4LqILPxFzBiwmZVRDuwduIj/
h1AcgsLj4DKAv6ALR8jDMe+ZZzKATxcheQxpXN5eNK4CtSbqUN9/GGUsyfJj4akH
/nxxH2szJGoeBfcFaMBqEssuXmHLrijTfsK0ZpEmXzwuJF/LWA/rKOyvEZbz3Htv
wKeI8lN3s2Berq4o2jUsbzRF0ybh3uxbTydrFny9RAQYgrOJeRcQcT16ohZO9QHN
pGxlaKFJdlxDydi8NmdspZS11My5vWo1ViHe2MPr+8ukYEywVaCge1ey
-----END CERTIFICATE-----
# Issuer: CN=COMODO ECC Certification Authority O=COMODO CA Limited
# Subject: CN=COMODO ECC Certification Authority O=COMODO CA Limited
# Label: "COMODO ECC Certification Authority"
@ -2204,46 +2173,6 @@ KoZIzj0EAwMDaAAwZQIxAOVpEslu28YxuglB4Zf4+/2a4n0Sye18ZNPLBSWLVtmg
xwy8p2Fp8fc74SrL+SvzZpA3
-----END CERTIFICATE-----
# Issuer: CN=Staat der Nederlanden EV Root CA O=Staat der Nederlanden
# Subject: CN=Staat der Nederlanden EV Root CA O=Staat der Nederlanden
# Label: "Staat der Nederlanden EV Root CA"
# Serial: 10000013
# MD5 Fingerprint: fc:06:af:7b:e8:1a:f1:9a:b4:e8:d2:70:1f:c0:f5:ba
# SHA1 Fingerprint: 76:e2:7e:c1:4f:db:82:c1:c0:a6:75:b5:05:be:3d:29:b4:ed:db:bb
# SHA256 Fingerprint: 4d:24:91:41:4c:fe:95:67:46:ec:4c:ef:a6:cf:6f:72:e2:8a:13:29:43:2f:9d:8a:90:7a:c4:cb:5d:ad:c1:5a
-----BEGIN CERTIFICATE-----
MIIFcDCCA1igAwIBAgIEAJiWjTANBgkqhkiG9w0BAQsFADBYMQswCQYDVQQGEwJO
TDEeMBwGA1UECgwVU3RhYXQgZGVyIE5lZGVybGFuZGVuMSkwJwYDVQQDDCBTdGFh
dCBkZXIgTmVkZXJsYW5kZW4gRVYgUm9vdCBDQTAeFw0xMDEyMDgxMTE5MjlaFw0y
MjEyMDgxMTEwMjhaMFgxCzAJBgNVBAYTAk5MMR4wHAYDVQQKDBVTdGFhdCBkZXIg
TmVkZXJsYW5kZW4xKTAnBgNVBAMMIFN0YWF0IGRlciBOZWRlcmxhbmRlbiBFViBS
b290IENBMIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEA48d+ifkkSzrS
M4M1LGns3Amk41GoJSt5uAg94JG6hIXGhaTK5skuU6TJJB79VWZxXSzFYGgEt9nC
UiY4iKTWO0Cmws0/zZiTs1QUWJZV1VD+hq2kY39ch/aO5ieSZxeSAgMs3NZmdO3d
Z//BYY1jTw+bbRcwJu+r0h8QoPnFfxZpgQNH7R5ojXKhTbImxrpsX23Wr9GxE46p
rfNeaXUmGD5BKyF/7otdBwadQ8QpCiv8Kj6GyzyDOvnJDdrFmeK8eEEzduG/L13l
pJhQDBXd4Pqcfzho0LKmeqfRMb1+ilgnQ7O6M5HTp5gVXJrm0w912fxBmJc+qiXb
j5IusHsMX/FjqTf5m3VpTCgmJdrV8hJwRVXj33NeN/UhbJCONVrJ0yPr08C+eKxC
KFhmpUZtcALXEPlLVPxdhkqHz3/KRawRWrUgUY0viEeXOcDPusBCAUCZSCELa6fS
/ZbV0b5GnUngC6agIk440ME8MLxwjyx1zNDFjFE7PZQIZCZhfbnDZY8UnCHQqv0X
cgOPvZuM5l5Tnrmd74K74bzickFbIZTTRTeU0d8JOV3nI6qaHcptqAqGhYqCvkIH
1vI4gnPah1vlPNOePqc7nvQDs/nxfRN0Av+7oeX6AHkcpmZBiFxgV6YuCcS6/ZrP
px9Aw7vMWgpVSzs4dlG4Y4uElBbmVvMCAwEAAaNCMEAwDwYDVR0TAQH/BAUwAwEB
/zAOBgNVHQ8BAf8EBAMCAQYwHQYDVR0OBBYEFP6rAJCYniT8qcwaivsnuL8wbqg7
MA0GCSqGSIb3DQEBCwUAA4ICAQDPdyxuVr5Os7aEAJSrR8kN0nbHhp8dB9O2tLsI
eK9p0gtJ3jPFrK3CiAJ9Brc1AsFgyb/E6JTe1NOpEyVa/m6irn0F3H3zbPB+po3u
2dfOWBfoqSmuc0iH55vKbimhZF8ZE/euBhD/UcabTVUlT5OZEAFTdfETzsemQUHS
v4ilf0X8rLiltTMMgsT7B/Zq5SWEXwbKwYY5EdtYzXc7LMJMD16a4/CrPmEbUCTC
wPTxGfARKbalGAKb12NMcIxHowNDXLldRqANb/9Zjr7dn3LDWyvfjFvO5QxGbJKy
CqNMVEIYFRIYvdr8unRu/8G2oGTYqV9Vrp9canaW2HNnh/tNf1zuacpzEPuKqf2e
vTY4SUmH9A4U8OmHuD+nT3pajnnUk+S7aFKErGzp85hwVXIy+TSrK0m1zSBi5Dp6
Z2Orltxtrpfs/J92VoguZs9btsmksNcFuuEnL5O7Jiqik7Ab846+HUCjuTaPPoIa
Gl6I6lD4WeKDRikL40Rc4ZW2aZCaFG+XroHPaO+Zmr615+F/+PoTRxZMzG0IQOeL
eG9QgkRQP2YGiqtDhFZKDyAthg710tvSeopLzaXoTvFeJiUBWSOgftL2fiFX1ye8
FVdMpEbB4IMeDExNH08GGeL5qPQ6gqGyeUN51q1veieQA6TqJIc/2b3Z6fJfUEkc
7uzXLg==
-----END CERTIFICATE-----
# Issuer: CN=IdenTrust Commercial Root CA 1 O=IdenTrust
# Subject: CN=IdenTrust Commercial Root CA 1 O=IdenTrust
# Label: "IdenTrust Commercial Root CA 1"
@ -2851,116 +2780,6 @@ T8p+ck0LcIymSLumoRT2+1hEmRSuqguTaaApJUqlyyvdimYHFngVV3Eb7PVHhPOe
MTd61X8kreS8/f3MboPoDKi3QWwH3b08hpcv0g==
-----END CERTIFICATE-----
# Issuer: CN=TrustCor RootCert CA-1 O=TrustCor Systems S. de R.L. OU=TrustCor Certificate Authority
# Subject: CN=TrustCor RootCert CA-1 O=TrustCor Systems S. de R.L. OU=TrustCor Certificate Authority
# Label: "TrustCor RootCert CA-1"
# Serial: 15752444095811006489
# MD5 Fingerprint: 6e:85:f1:dc:1a:00:d3:22:d5:b2:b2:ac:6b:37:05:45
# SHA1 Fingerprint: ff:bd:cd:e7:82:c8:43:5e:3c:6f:26:86:5c:ca:a8:3a:45:5b:c3:0a
# SHA256 Fingerprint: d4:0e:9c:86:cd:8f:e4:68:c1:77:69:59:f4:9e:a7:74:fa:54:86:84:b6:c4:06:f3:90:92:61:f4:dc:e2:57:5c
-----BEGIN CERTIFICATE-----
MIIEMDCCAxigAwIBAgIJANqb7HHzA7AZMA0GCSqGSIb3DQEBCwUAMIGkMQswCQYD
VQQGEwJQQTEPMA0GA1UECAwGUGFuYW1hMRQwEgYDVQQHDAtQYW5hbWEgQ2l0eTEk
MCIGA1UECgwbVHJ1c3RDb3IgU3lzdGVtcyBTLiBkZSBSLkwuMScwJQYDVQQLDB5U
cnVzdENvciBDZXJ0aWZpY2F0ZSBBdXRob3JpdHkxHzAdBgNVBAMMFlRydXN0Q29y
IFJvb3RDZXJ0IENBLTEwHhcNMTYwMjA0MTIzMjE2WhcNMjkxMjMxMTcyMzE2WjCB
pDELMAkGA1UEBhMCUEExDzANBgNVBAgMBlBhbmFtYTEUMBIGA1UEBwwLUGFuYW1h
IENpdHkxJDAiBgNVBAoMG1RydXN0Q29yIFN5c3RlbXMgUy4gZGUgUi5MLjEnMCUG
A1UECwweVHJ1c3RDb3IgQ2VydGlmaWNhdGUgQXV0aG9yaXR5MR8wHQYDVQQDDBZU
cnVzdENvciBSb290Q2VydCBDQS0xMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIB
CgKCAQEAv463leLCJhJrMxnHQFgKq1mqjQCj/IDHUHuO1CAmujIS2CNUSSUQIpid
RtLByZ5OGy4sDjjzGiVoHKZaBeYei0i/mJZ0PmnK6bV4pQa81QBeCQryJ3pS/C3V
seq0iWEk8xoT26nPUu0MJLq5nux+AHT6k61sKZKuUbS701e/s/OojZz0JEsq1pme
9J7+wH5COucLlVPat2gOkEz7cD+PSiyU8ybdY2mplNgQTsVHCJCZGxdNuWxu72CV
EY4hgLW9oHPY0LJ3xEXqWib7ZnZ2+AYfYW0PVcWDtxBWcgYHpfOxGgMFZA6dWorW
hnAbJN7+KIor0Gqw/Hqi3LJ5DotlDwIDAQABo2MwYTAdBgNVHQ4EFgQU7mtJPHo/
DeOxCbeKyKsZn3MzUOcwHwYDVR0jBBgwFoAU7mtJPHo/DeOxCbeKyKsZn3MzUOcw
DwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAYYwDQYJKoZIhvcNAQELBQAD
ggEBACUY1JGPE+6PHh0RU9otRCkZoB5rMZ5NDp6tPVxBb5UrJKF5mDo4Nvu7Zp5I
/5CQ7z3UuJu0h3U/IJvOcs+hVcFNZKIZBqEHMwwLKeXx6quj7LUKdJDHfXLy11yf
ke+Ri7fc7Waiz45mO7yfOgLgJ90WmMCV1Aqk5IGadZQ1nJBfiDcGrVmVCrDRZ9MZ
yonnMlo2HD6CqFqTvsbQZJG2z9m2GM/bftJlo6bEjhcxwft+dtvTheNYsnd6djts
L1Ac59v2Z3kf9YKVmgenFK+P3CghZwnS1k1aHBkcjndcw5QkPTJrS37UeJSDvjdN
zl/HHk484IkzlQsPpTLWPFp5LBk=
-----END CERTIFICATE-----
# Issuer: CN=TrustCor RootCert CA-2 O=TrustCor Systems S. de R.L. OU=TrustCor Certificate Authority
# Subject: CN=TrustCor RootCert CA-2 O=TrustCor Systems S. de R.L. OU=TrustCor Certificate Authority
# Label: "TrustCor RootCert CA-2"
# Serial: 2711694510199101698
# MD5 Fingerprint: a2:e1:f8:18:0b:ba:45:d5:c7:41:2a:bb:37:52:45:64
# SHA1 Fingerprint: b8:be:6d:cb:56:f1:55:b9:63:d4:12:ca:4e:06:34:c7:94:b2:1c:c0
# SHA256 Fingerprint: 07:53:e9:40:37:8c:1b:d5:e3:83:6e:39:5d:ae:a5:cb:83:9e:50:46:f1:bd:0e:ae:19:51:cf:10:fe:c7:c9:65
-----BEGIN CERTIFICATE-----
MIIGLzCCBBegAwIBAgIIJaHfyjPLWQIwDQYJKoZIhvcNAQELBQAwgaQxCzAJBgNV
BAYTAlBBMQ8wDQYDVQQIDAZQYW5hbWExFDASBgNVBAcMC1BhbmFtYSBDaXR5MSQw
IgYDVQQKDBtUcnVzdENvciBTeXN0ZW1zIFMuIGRlIFIuTC4xJzAlBgNVBAsMHlRy
dXN0Q29yIENlcnRpZmljYXRlIEF1dGhvcml0eTEfMB0GA1UEAwwWVHJ1c3RDb3Ig
Um9vdENlcnQgQ0EtMjAeFw0xNjAyMDQxMjMyMjNaFw0zNDEyMzExNzI2MzlaMIGk
MQswCQYDVQQGEwJQQTEPMA0GA1UECAwGUGFuYW1hMRQwEgYDVQQHDAtQYW5hbWEg
Q2l0eTEkMCIGA1UECgwbVHJ1c3RDb3IgU3lzdGVtcyBTLiBkZSBSLkwuMScwJQYD
VQQLDB5UcnVzdENvciBDZXJ0aWZpY2F0ZSBBdXRob3JpdHkxHzAdBgNVBAMMFlRy
dXN0Q29yIFJvb3RDZXJ0IENBLTIwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIK
AoICAQCnIG7CKqJiJJWQdsg4foDSq8GbZQWU9MEKENUCrO2fk8eHyLAnK0IMPQo+
QVqedd2NyuCb7GgypGmSaIwLgQ5WoD4a3SwlFIIvl9NkRvRUqdw6VC0xK5mC8tkq
1+9xALgxpL56JAfDQiDyitSSBBtlVkxs1Pu2YVpHI7TYabS3OtB0PAx1oYxOdqHp
2yqlO/rOsP9+aij9JxzIsekp8VduZLTQwRVtDr4uDkbIXvRR/u8OYzo7cbrPb1nK
DOObXUm4TOJXsZiKQlecdu/vvdFoqNL0Cbt3Nb4lggjEFixEIFapRBF37120Hape
az6LMvYHL1cEksr1/p3C6eizjkxLAjHZ5DxIgif3GIJ2SDpxsROhOdUuxTTCHWKF
3wP+TfSvPd9cW436cOGlfifHhi5qjxLGhF5DUVCcGZt45vz27Ud+ez1m7xMTiF88
oWP7+ayHNZ/zgp6kPwqcMWmLmaSISo5uZk3vFsQPeSghYA2FFn3XVDjxklb9tTNM
g9zXEJ9L/cb4Qr26fHMC4P99zVvh1Kxhe1fVSntb1IVYJ12/+CtgrKAmrhQhJ8Z3
mjOAPF5GP/fDsaOGM8boXg25NSyqRsGFAnWAoOsk+xWq5Gd/bnc/9ASKL3x74xdh
8N0JqSDIvgmk0H5Ew7IwSjiqqewYmgeCK9u4nBit2uBGF6zPXQIDAQABo2MwYTAd
BgNVHQ4EFgQU2f4hQG6UnrybPZx9mCAZ5YwwYrIwHwYDVR0jBBgwFoAU2f4hQG6U
nrybPZx9mCAZ5YwwYrIwDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAYYw
DQYJKoZIhvcNAQELBQADggIBAJ5Fngw7tu/hOsh80QA9z+LqBrWyOrsGS2h60COX
dKcs8AjYeVrXWoSK2BKaG9l9XE1wxaX5q+WjiYndAfrs3fnpkpfbsEZC89NiqpX+
MWcUaViQCqoL7jcjx1BRtPV+nuN79+TMQjItSQzL/0kMmx40/W5ulop5A7Zv2wnL
/V9lFDfhOPXzYRZY5LVtDQsEGz9QLX+zx3oaFoBg+Iof6Rsqxvm6ARppv9JYx1RX
CI/hOWB3S6xZhBqI8d3LT3jX5+EzLfzuQfogsL7L9ziUwOHQhQ+77Sxzq+3+knYa
ZH9bDTMJBzN7Bj8RpFxwPIXAz+OQqIN3+tvmxYxoZxBnpVIt8MSZj3+/0WvitUfW
2dCFmU2Umw9Lje4AWkcdEQOsQRivh7dvDDqPys/cA8GiCcjl/YBeyGBCARsaU1q7
N6a3vLqE6R5sGtRk2tRD/pOLS/IseRYQ1JMLiI+h2IYURpFHmygk71dSTlxCnKr3
Sewn6EAes6aJInKc9Q0ztFijMDvd1GpUk74aTfOTlPf8hAs/hCBcNANExdqtvArB
As8e5ZTZ845b2EzwnexhF7sUMlQMAimTHpKG9n/v55IFDlndmQguLvqcAFLTxWYp
5KeXRKQOKIETNcX2b2TmQcTVL8w0RSXPQQCWPUouwpaYT05KnJe32x+SMsj/D1Fu
1uwJ
-----END CERTIFICATE-----
# Issuer: CN=TrustCor ECA-1 O=TrustCor Systems S. de R.L. OU=TrustCor Certificate Authority
# Subject: CN=TrustCor ECA-1 O=TrustCor Systems S. de R.L. OU=TrustCor Certificate Authority
# Label: "TrustCor ECA-1"
# Serial: 9548242946988625984
# MD5 Fingerprint: 27:92:23:1d:0a:f5:40:7c:e9:e6:6b:9d:d8:f5:e7:6c
# SHA1 Fingerprint: 58:d1:df:95:95:67:6b:63:c0:f0:5b:1c:17:4d:8b:84:0b:c8:78:bd
# SHA256 Fingerprint: 5a:88:5d:b1:9c:01:d9:12:c5:75:93:88:93:8c:af:bb:df:03:1a:b2:d4:8e:91:ee:15:58:9b:42:97:1d:03:9c
-----BEGIN CERTIFICATE-----
MIIEIDCCAwigAwIBAgIJAISCLF8cYtBAMA0GCSqGSIb3DQEBCwUAMIGcMQswCQYD
VQQGEwJQQTEPMA0GA1UECAwGUGFuYW1hMRQwEgYDVQQHDAtQYW5hbWEgQ2l0eTEk
MCIGA1UECgwbVHJ1c3RDb3IgU3lzdGVtcyBTLiBkZSBSLkwuMScwJQYDVQQLDB5U
cnVzdENvciBDZXJ0aWZpY2F0ZSBBdXRob3JpdHkxFzAVBgNVBAMMDlRydXN0Q29y
IEVDQS0xMB4XDTE2MDIwNDEyMzIzM1oXDTI5MTIzMTE3MjgwN1owgZwxCzAJBgNV
BAYTAlBBMQ8wDQYDVQQIDAZQYW5hbWExFDASBgNVBAcMC1BhbmFtYSBDaXR5MSQw
IgYDVQQKDBtUcnVzdENvciBTeXN0ZW1zIFMuIGRlIFIuTC4xJzAlBgNVBAsMHlRy
dXN0Q29yIENlcnRpZmljYXRlIEF1dGhvcml0eTEXMBUGA1UEAwwOVHJ1c3RDb3Ig
RUNBLTEwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQDPj+ARtZ+odnbb
3w9U73NjKYKtR8aja+3+XzP4Q1HpGjORMRegdMTUpwHmspI+ap3tDvl0mEDTPwOA
BoJA6LHip1GnHYMma6ve+heRK9jGrB6xnhkB1Zem6g23xFUfJ3zSCNV2HykVh0A5
3ThFEXXQmqc04L/NyFIduUd+Dbi7xgz2c1cWWn5DkR9VOsZtRASqnKmcp0yJF4Ou
owReUoCLHhIlERnXDH19MURB6tuvsBzvgdAsxZohmz3tQjtQJvLsznFhBmIhVE5/
wZ0+fyCMgMsq2JdiyIMzkX2woloPV+g7zPIlstR8L+xNxqE6FXrntl019fZISjZF
ZtS6mFjBAgMBAAGjYzBhMB0GA1UdDgQWBBREnkj1zG1I1KBLf/5ZJC+Dl5mahjAf
BgNVHSMEGDAWgBREnkj1zG1I1KBLf/5ZJC+Dl5mahjAPBgNVHRMBAf8EBTADAQH/
MA4GA1UdDwEB/wQEAwIBhjANBgkqhkiG9w0BAQsFAAOCAQEABT41XBVwm8nHc2Fv
civUwo/yQ10CzsSUuZQRg2dd4mdsdXa/uwyqNsatR5Nj3B5+1t4u/ukZMjgDfxT2
AHMsWbEhBuH7rBiVDKP/mZb3Kyeb1STMHd3BOuCYRLDE5D53sXOpZCz2HAF8P11F
hcCF5yWPldwX8zyfGm6wyuMdKulMY/okYWLW2n62HGz1Ah3UKt1VkOsqEUc8Ll50
soIipX1TH0XsJ5F95yIW6MBoNtjG8U+ARDL54dHRHareqKucBK+tIA5kmE2la8BI
WJZpTdwHjFGTot+fDz2LYLSCjaoITmJF4PkL0uDgPFveXHEnJcLmA4GLEFPjx1Wi
tJ/X5g==
-----END CERTIFICATE-----
# Issuer: CN=SSL.com Root Certification Authority RSA O=SSL Corporation
# Subject: CN=SSL.com Root Certification Authority RSA O=SSL Corporation
# Label: "SSL.com Root Certification Authority RSA"

View file

@ -1,15 +1,12 @@
"""High-performance, pure-Python HTTP server used by CherryPy."""
from __future__ import absolute_import, division, print_function
__metaclass__ = type
try:
import pkg_resources
from importlib import metadata
except ImportError:
pass
import importlib_metadata as metadata # noqa: WPS440
try:
__version__ = pkg_resources.get_distribution('cheroot').version
__version__ = metadata.version('cheroot')
except Exception:
__version__ = 'unknown'

View file

@ -1,19 +1,9 @@
# pylint: disable=unused-import
"""Compatibility code for using Cheroot with various versions of Python."""
from __future__ import absolute_import, division, print_function
__metaclass__ = type
import os
import platform
import re
import six
try:
import selectors # lgtm [py/unused-import]
except ImportError:
import selectors2 as selectors # noqa: F401 # lgtm [py/unused-import]
try:
import ssl
@ -22,20 +12,6 @@ try:
except ImportError:
IS_ABOVE_OPENSSL10 = None
# contextlib.suppress was added in Python 3.4
try:
from contextlib import suppress
except ImportError:
from contextlib import contextmanager
@contextmanager
def suppress(*exceptions):
"""Return a context manager that suppresses the `exceptions`."""
try:
yield
except exceptions:
pass
IS_CI = bool(os.getenv('CI'))
IS_GITHUB_ACTIONS_WORKFLOW = bool(os.getenv('GITHUB_WORKFLOW'))
@ -53,53 +29,23 @@ PLATFORM_ARCH = platform.machine()
IS_PPC = PLATFORM_ARCH.startswith('ppc')
if not six.PY2:
def ntob(n, encoding='ISO-8859-1'):
"""Return the native string as bytes in the given encoding."""
assert_native(n)
# In Python 3, the native string type is unicode
return n.encode(encoding)
def ntob(n, encoding='ISO-8859-1'):
"""Return the native string as bytes in the given encoding."""
assert_native(n)
# In Python 3, the native string type is unicode
return n.encode(encoding)
def ntou(n, encoding='ISO-8859-1'):
"""Return the native string as Unicode with the given encoding."""
assert_native(n)
# In Python 3, the native string type is unicode
return n
def bton(b, encoding='ISO-8859-1'):
"""Return the byte string as native string in the given encoding."""
return b.decode(encoding)
else:
# Python 2
def ntob(n, encoding='ISO-8859-1'):
"""Return the native string as bytes in the given encoding."""
assert_native(n)
# In Python 2, the native string type is bytes. Assume it's already
# in the given encoding, which for ISO-8859-1 is almost always what
# was intended.
return n
def ntou(n, encoding='ISO-8859-1'):
"""Return the native string as Unicode with the given encoding."""
assert_native(n)
# In Python 3, the native string type is unicode
return n
def ntou(n, encoding='ISO-8859-1'):
"""Return the native string as Unicode with the given encoding."""
assert_native(n)
# In Python 2, the native string type is bytes.
# First, check for the special encoding 'escape'. The test suite uses
# this to signal that it wants to pass a string with embedded \uXXXX
# escapes, but without having to prefix it with u'' for Python 2,
# but no prefix for Python 3.
if encoding == 'escape':
return re.sub(
r'\\u([0-9a-zA-Z]{4})',
lambda m: six.unichr(int(m.group(1), 16)),
n.decode('ISO-8859-1'),
)
# Assume it's already in the given encoding, which for ISO-8859-1
# is almost always what was intended.
return n.decode(encoding)
def bton(b, encoding='ISO-8859-1'):
"""Return the byte string as native string in the given encoding."""
return b
def bton(b, encoding='ISO-8859-1'):
"""Return the byte string as native string in the given encoding."""
return b.decode(encoding)
def assert_native(n):
@ -113,17 +59,6 @@ def assert_native(n):
raise TypeError('n must be a native str (got %s)' % type(n).__name__)
if not six.PY2:
"""Python 3 has :py:class:`memoryview` builtin."""
# Python 2.7 has it backported, but socket.write() does
# str(memoryview(b'0' * 100)) -> <memory at 0x7fb6913a5588>
# instead of accessing it correctly.
memoryview = memoryview
else:
"""Link :py:class:`memoryview` to buffer under Python 2."""
memoryview = buffer # noqa: F821
def extract_bytes(mv):
r"""Retrieve bytes out of the given input buffer.
@ -138,7 +73,7 @@ def extract_bytes(mv):
or :py:class:`bytes`
"""
if isinstance(mv, memoryview):
return bytes(mv) if six.PY2 else mv.tobytes()
return mv.tobytes()
if isinstance(mv, bytes):
return mv

21
lib/cheroot/_compat.pyi Normal file
View file

@ -0,0 +1,21 @@
from typing import Any, ContextManager, Optional, Type, Union
def suppress(*exceptions: Type[BaseException]) -> ContextManager[None]: ...
IS_ABOVE_OPENSSL10: Optional[bool]
IS_CI: bool
IS_GITHUB_ACTIONS_WORKFLOW: bool
IS_PYPY: bool
SYS_PLATFORM: str
IS_WINDOWS: bool
IS_LINUX: bool
IS_MACOS: bool
PLATFORM_ARCH: str
IS_PPC: bool
def ntob(n: str, encoding: str = ...) -> bytes: ...
def ntou(n: str, encoding: str = ...) -> str: ...
def bton(b: bytes, encoding: str = ...) -> str: ...
def assert_native(n: str) -> None: ...
def extract_bytes(mv: Union[memoryview, bytes]) -> bytes: ...

View file

@ -28,18 +28,14 @@ Basic usage:
"""
import argparse
from importlib import import_module
import os
import sys
import six
import urllib.parse # noqa: WPS301
from importlib import import_module
from contextlib import suppress
from . import server
from . import wsgi
from ._compat import suppress
__metaclass__ = type
class BindLocation:
@ -143,7 +139,7 @@ def parse_wsgi_bind_location(bind_addr_string):
return AbstractSocket(bind_addr_string[1:])
# try and match for an IP/hostname and port
match = six.moves.urllib.parse.urlparse(
match = urllib.parse.urlparse(
'//{addr}'.format(addr=bind_addr_string),
)
try:

View file

@ -1,22 +1,17 @@
"""Utilities to manage open connections."""
from __future__ import absolute_import, division, print_function
__metaclass__ = type
import io
import os
import socket
import threading
import time
import selectors
from contextlib import suppress
from . import errors
from ._compat import selectors
from ._compat import suppress
from ._compat import IS_WINDOWS
from .makefile import MakeFile
import six
try:
import fcntl
except ImportError:
@ -310,8 +305,7 @@ class ConnectionManager:
msg,
]
sock_to_make = s if not six.PY2 else s._sock
wfile = mf(sock_to_make, 'wb', io.DEFAULT_BUFFER_SIZE)
wfile = mf(s, 'wb', io.DEFAULT_BUFFER_SIZE)
try:
wfile.write(''.join(buf).encode('ISO-8859-1'))
except socket.error as ex:
@ -327,10 +321,7 @@ class ConnectionManager:
conn = self.server.ConnectionClass(self.server, s, mf)
if not isinstance(
self.server.bind_addr,
(six.text_type, six.binary_type),
):
if not isinstance(self.server.bind_addr, (str, bytes)):
# optional values
# Until we do DNS lookups, omit REMOTE_HOST
if addr is None: # sometimes this can happen

View file

@ -1,17 +1,14 @@
# -*- coding: utf-8 -*-
"""Collection of exceptions raised and/or processed by Cheroot."""
from __future__ import absolute_import, division, print_function
__metaclass__ = type
import errno
import sys
class MaxSizeExceeded(Exception):
"""Exception raised when a client sends more data then acceptable within limit.
"""Exception raised when a client sends more data then allowed under limit.
Depends on ``request.body.maxbytes`` config option if used within CherryPy
Depends on ``request.body.maxbytes`` config option if used within CherryPy.
"""

View file

@ -1,4 +1,4 @@
from typing import Any, List, Set, Tuple
from typing import List, Set, Tuple, Type
class MaxSizeExceeded(Exception): ...
class NoSSLError(Exception): ...
@ -10,4 +10,4 @@ socket_error_eintr: List[int]
socket_errors_to_ignore: List[int]
socket_errors_nonblocking: List[int]
acceptable_sock_shutdown_error_codes: Set[int]
acceptable_sock_shutdown_exceptions: Tuple[Exception]
acceptable_sock_shutdown_exceptions: Tuple[Type[Exception], ...]

View file

@ -1,21 +1,9 @@
"""Socket file object."""
from __future__ import absolute_import, division, print_function
__metaclass__ = type
import socket
try:
# prefer slower Python-based io module
import _pyio as io
except ImportError:
# Python 2.6
import io
import six
from . import errors
from ._compat import extract_bytes, memoryview
# prefer slower Python-based io module
import _pyio as io
# Write only 16K at a time to sockets
@ -48,400 +36,41 @@ class BufferedWriter(io.BufferedWriter):
del self._write_buf[:n]
class MakeFile_PY2(getattr(socket, '_fileobject', object)):
"""Faux file object attached to a socket object."""
class StreamReader(io.BufferedReader):
"""Socket stream reader."""
def __init__(self, *args, **kwargs):
"""Initialize faux file object."""
def __init__(self, sock, mode='r', bufsize=io.DEFAULT_BUFFER_SIZE):
"""Initialize socket stream reader."""
super().__init__(socket.SocketIO(sock, mode), bufsize)
self.bytes_read = 0
def read(self, *args, **kwargs):
"""Capture bytes read."""
val = super().read(*args, **kwargs)
self.bytes_read += len(val)
return val
def has_data(self):
"""Return true if there is buffered data to read."""
return len(self._read_buf) > self._read_pos
class StreamWriter(BufferedWriter):
"""Socket stream writer."""
def __init__(self, sock, mode='w', bufsize=io.DEFAULT_BUFFER_SIZE):
"""Initialize socket stream writer."""
super().__init__(socket.SocketIO(sock, mode), bufsize)
self.bytes_written = 0
socket._fileobject.__init__(self, *args, **kwargs)
self._refcount = 0
def _reuse(self):
self._refcount += 1
def _drop(self):
if self._refcount < 0:
self.close()
else:
self._refcount -= 1
def write(self, data):
"""Send entire data contents for non-blocking sockets."""
bytes_sent = 0
data_mv = memoryview(data)
payload_size = len(data_mv)
while bytes_sent < payload_size:
try:
bytes_sent += self.send(
data_mv[bytes_sent:bytes_sent + SOCK_WRITE_BLOCKSIZE],
)
except socket.error as e:
if e.args[0] not in errors.socket_errors_nonblocking:
raise
def send(self, data):
"""Send some part of message to the socket."""
bytes_sent = self._sock.send(extract_bytes(data))
self.bytes_written += bytes_sent
return bytes_sent
def flush(self):
"""Write all data from buffer to socket and reset write buffer."""
if self._wbuf:
buffer = ''.join(self._wbuf)
self._wbuf = []
self.write(buffer)
def recv(self, size):
"""Receive message of a size from the socket."""
while True:
try:
data = self._sock.recv(size)
self.bytes_read += len(data)
return data
except socket.error as e:
what = (
e.args[0] not in errors.socket_errors_nonblocking
and e.args[0] not in errors.socket_error_eintr
)
if what:
raise
class FauxSocket:
"""Faux socket with the minimal interface required by pypy."""
def _reuse(self):
pass
_fileobject_uses_str_type = six.PY2 and isinstance(
socket._fileobject(FauxSocket())._rbuf, six.string_types,
)
# FauxSocket is no longer needed
del FauxSocket
if not _fileobject_uses_str_type: # noqa: C901 # FIXME
def read(self, size=-1):
"""Read data from the socket to buffer."""
# Use max, disallow tiny reads in a loop as they are very
# inefficient.
# We never leave read() with any leftover data from a new recv()
# call in our internal buffer.
rbufsize = max(self._rbufsize, self.default_bufsize)
# Our use of StringIO rather than lists of string objects returned
# by recv() minimizes memory usage and fragmentation that occurs
# when rbufsize is large compared to the typical return value of
# recv().
buf = self._rbuf
buf.seek(0, 2) # seek end
if size < 0:
# Read until EOF
# reset _rbuf. we consume it via buf.
self._rbuf = io.BytesIO()
while True:
data = self.recv(rbufsize)
if not data:
break
buf.write(data)
return buf.getvalue()
else:
# Read until size bytes or EOF seen, whichever comes first
buf_len = buf.tell()
if buf_len >= size:
# Already have size bytes in our buffer? Extract and
# return.
buf.seek(0)
rv = buf.read(size)
self._rbuf = io.BytesIO()
self._rbuf.write(buf.read())
return rv
# reset _rbuf. we consume it via buf.
self._rbuf = io.BytesIO()
while True:
left = size - buf_len
# recv() will malloc the amount of memory given as its
# parameter even though it often returns much less data
# than that. The returned data string is short lived
# as we copy it into a StringIO and free it. This avoids
# fragmentation issues on many platforms.
data = self.recv(left)
if not data:
break
n = len(data)
if n == size and not buf_len:
# Shortcut. Avoid buffer data copies when:
# - We have no data in our buffer.
# AND
# - Our call to recv returned exactly the
# number of bytes we were asked to read.
return data
if n == left:
buf.write(data)
del data # explicit free
break
assert n <= left, 'recv(%d) returned %d bytes' % (left, n)
buf.write(data)
buf_len += n
del data # explicit free
# assert buf_len == buf.tell()
return buf.getvalue()
def readline(self, size=-1):
"""Read line from the socket to buffer."""
buf = self._rbuf
buf.seek(0, 2) # seek end
if buf.tell() > 0:
# check if we already have it in our buffer
buf.seek(0)
bline = buf.readline(size)
if bline.endswith('\n') or len(bline) == size:
self._rbuf = io.BytesIO()
self._rbuf.write(buf.read())
return bline
del bline
if size < 0:
# Read until \n or EOF, whichever comes first
if self._rbufsize <= 1:
# Speed up unbuffered case
buf.seek(0)
buffers = [buf.read()]
# reset _rbuf. we consume it via buf.
self._rbuf = io.BytesIO()
data = None
recv = self.recv
while data != '\n':
data = recv(1)
if not data:
break
buffers.append(data)
return ''.join(buffers)
buf.seek(0, 2) # seek end
# reset _rbuf. we consume it via buf.
self._rbuf = io.BytesIO()
while True:
data = self.recv(self._rbufsize)
if not data:
break
nl = data.find('\n')
if nl >= 0:
nl += 1
buf.write(data[:nl])
self._rbuf.write(data[nl:])
del data
break
buf.write(data)
return buf.getvalue()
else:
# Read until size bytes or \n or EOF seen, whichever comes
# first
buf.seek(0, 2) # seek end
buf_len = buf.tell()
if buf_len >= size:
buf.seek(0)
rv = buf.read(size)
self._rbuf = io.BytesIO()
self._rbuf.write(buf.read())
return rv
# reset _rbuf. we consume it via buf.
self._rbuf = io.BytesIO()
while True:
data = self.recv(self._rbufsize)
if not data:
break
left = size - buf_len
# did we just receive a newline?
nl = data.find('\n', 0, left)
if nl >= 0:
nl += 1
# save the excess data to _rbuf
self._rbuf.write(data[nl:])
if buf_len:
buf.write(data[:nl])
break
else:
# Shortcut. Avoid data copy through buf when
# returning a substring of our first recv().
return data[:nl]
n = len(data)
if n == size and not buf_len:
# Shortcut. Avoid data copy through buf when
# returning exactly all of our first recv().
return data
if n >= left:
buf.write(data[:left])
self._rbuf.write(data[left:])
break
buf.write(data)
buf_len += n
# assert buf_len == buf.tell()
return buf.getvalue()
def has_data(self):
"""Return true if there is buffered data to read."""
return bool(self._rbuf.getvalue())
else:
def read(self, size=-1):
"""Read data from the socket to buffer."""
if size < 0:
# Read until EOF
buffers = [self._rbuf]
self._rbuf = ''
if self._rbufsize <= 1:
recv_size = self.default_bufsize
else:
recv_size = self._rbufsize
while True:
data = self.recv(recv_size)
if not data:
break
buffers.append(data)
return ''.join(buffers)
else:
# Read until size bytes or EOF seen, whichever comes first
data = self._rbuf
buf_len = len(data)
if buf_len >= size:
self._rbuf = data[size:]
return data[:size]
buffers = []
if data:
buffers.append(data)
self._rbuf = ''
while True:
left = size - buf_len
recv_size = max(self._rbufsize, left)
data = self.recv(recv_size)
if not data:
break
buffers.append(data)
n = len(data)
if n >= left:
self._rbuf = data[left:]
buffers[-1] = data[:left]
break
buf_len += n
return ''.join(buffers)
def readline(self, size=-1):
"""Read line from the socket to buffer."""
data = self._rbuf
if size < 0:
# Read until \n or EOF, whichever comes first
if self._rbufsize <= 1:
# Speed up unbuffered case
assert data == ''
buffers = []
while data != '\n':
data = self.recv(1)
if not data:
break
buffers.append(data)
return ''.join(buffers)
nl = data.find('\n')
if nl >= 0:
nl += 1
self._rbuf = data[nl:]
return data[:nl]
buffers = []
if data:
buffers.append(data)
self._rbuf = ''
while True:
data = self.recv(self._rbufsize)
if not data:
break
buffers.append(data)
nl = data.find('\n')
if nl >= 0:
nl += 1
self._rbuf = data[nl:]
buffers[-1] = data[:nl]
break
return ''.join(buffers)
else:
# Read until size bytes or \n or EOF seen, whichever comes
# first
nl = data.find('\n', 0, size)
if nl >= 0:
nl += 1
self._rbuf = data[nl:]
return data[:nl]
buf_len = len(data)
if buf_len >= size:
self._rbuf = data[size:]
return data[:size]
buffers = []
if data:
buffers.append(data)
self._rbuf = ''
while True:
data = self.recv(self._rbufsize)
if not data:
break
buffers.append(data)
left = size - buf_len
nl = data.find('\n', 0, left)
if nl >= 0:
nl += 1
self._rbuf = data[nl:]
buffers[-1] = data[:nl]
break
n = len(data)
if n >= left:
self._rbuf = data[left:]
buffers[-1] = data[:left]
break
buf_len += n
return ''.join(buffers)
def has_data(self):
"""Return true if there is buffered data to read."""
return bool(self._rbuf)
def write(self, val, *args, **kwargs):
"""Capture bytes written."""
res = super().write(val, *args, **kwargs)
self.bytes_written += len(val)
return res
if not six.PY2:
class StreamReader(io.BufferedReader):
"""Socket stream reader."""
def __init__(self, sock, mode='r', bufsize=io.DEFAULT_BUFFER_SIZE):
"""Initialize socket stream reader."""
super().__init__(socket.SocketIO(sock, mode), bufsize)
self.bytes_read = 0
def read(self, *args, **kwargs):
"""Capture bytes read."""
val = super().read(*args, **kwargs)
self.bytes_read += len(val)
return val
def has_data(self):
"""Return true if there is buffered data to read."""
return len(self._read_buf) > self._read_pos
class StreamWriter(BufferedWriter):
"""Socket stream writer."""
def __init__(self, sock, mode='w', bufsize=io.DEFAULT_BUFFER_SIZE):
"""Initialize socket stream writer."""
super().__init__(socket.SocketIO(sock, mode), bufsize)
self.bytes_written = 0
def write(self, val, *args, **kwargs):
"""Capture bytes written."""
res = super().write(val, *args, **kwargs)
self.bytes_written += len(val)
return res
def MakeFile(sock, mode='r', bufsize=io.DEFAULT_BUFFER_SIZE):
"""File object attached to a socket object."""
cls = StreamReader if 'r' in mode else StreamWriter
return cls(sock, mode, bufsize)
else:
StreamReader = StreamWriter = MakeFile = MakeFile_PY2
def MakeFile(sock, mode='r', bufsize=io.DEFAULT_BUFFER_SIZE):
"""File object attached to a socket object."""
cls = StreamReader if 'r' in mode else StreamWriter
return cls(sock, mode, bufsize)

View file

@ -5,19 +5,6 @@ SOCK_WRITE_BLOCKSIZE: int
class BufferedWriter(io.BufferedWriter):
def write(self, b): ...
class MakeFile_PY2:
bytes_read: int
bytes_written: int
def __init__(self, *args, **kwargs) -> None: ...
def write(self, data) -> None: ...
def send(self, data): ...
def flush(self) -> None: ...
def recv(self, size): ...
class FauxSocket: ...
def read(self, size: int = ...): ...
def readline(self, size: int = ...): ...
def has_data(self): ...
class StreamReader(io.BufferedReader):
bytes_read: int
def __init__(self, sock, mode: str = ..., bufsize=...) -> None: ...

View file

@ -65,9 +65,6 @@ And now for a trivial doctest to exercise the test suite
True
"""
from __future__ import absolute_import, division, print_function
__metaclass__ = type
import os
import io
import re
@ -78,20 +75,14 @@ import time
import traceback as traceback_
import logging
import platform
import queue
import contextlib
import threading
try:
from functools import lru_cache
except ImportError:
from backports.functools_lru_cache import lru_cache
import six
from six.moves import queue
from six.moves import urllib
import urllib.parse
from functools import lru_cache
from . import connections, errors, __version__
from ._compat import bton, ntou
from ._compat import bton
from ._compat import IS_PPC
from .workers import threadpool
from .makefile import MakeFile, StreamWriter
@ -606,8 +597,8 @@ class ChunkedRFile:
def read_trailer_lines(self):
"""Read HTTP headers and yield them.
Returns:
Generator: yields CRLF separated lines.
:yields: CRLF separated lines
:ytype: bytes
"""
if not self.closed:
@ -817,10 +808,6 @@ class HTTPRequest:
return False
try:
if six.PY2: # FIXME: Figure out better way to do this
# Ref: https://stackoverflow.com/a/196392/595220 (like this?)
"""This is a dummy check for unicode in URI."""
ntou(bton(uri, 'ascii'), 'ascii')
scheme, authority, path, qs, fragment = urllib.parse.urlsplit(uri)
except UnicodeError:
self.simple_response('400 Bad Request', 'Malformed Request-URI')
@ -1120,7 +1107,7 @@ class HTTPRequest:
buf.append(CRLF)
if msg:
if isinstance(msg, six.text_type):
if isinstance(msg, str):
msg = msg.encode('ISO-8859-1')
buf.append(msg)
@ -1422,10 +1409,7 @@ class HTTPConnection:
https://github.com/daveti/tcpSockHack
msdn.microsoft.com/en-us/commandline/wsl/release_notes#build-15025
"""
six.raise_from( # 3.6+: raise RuntimeError from socket_err
RuntimeError,
socket_err,
)
raise RuntimeError from socket_err
else:
pid, uid, gid = struct.unpack(PEERCRED_STRUCT_DEF, peer_creds)
return pid, uid, gid
@ -1589,7 +1573,7 @@ class HTTPServer:
"""
keep_alive_conn_limit = 10
"""The maximum number of waiting keep-alive connections that will be kept open.
"""Maximum number of waiting keep-alive connections that will be kept open.
Default is 10. Set to None to have unlimited connections."""
@ -1762,13 +1746,13 @@ class HTTPServer:
if os.getenv('LISTEN_PID', None):
# systemd socket activation
self.socket = socket.fromfd(3, socket.AF_INET, socket.SOCK_STREAM)
elif isinstance(self.bind_addr, (six.text_type, six.binary_type)):
elif isinstance(self.bind_addr, (str, bytes)):
# AF_UNIX socket
try:
self.bind_unix_socket(self.bind_addr)
except socket.error as serr:
msg = '%s -- (%s: %s)' % (msg, self.bind_addr, serr)
six.raise_from(socket.error(msg), serr)
raise socket.error(msg) from serr
else:
# AF_INET or AF_INET6 socket
# Get the correct address family for our host (allows IPv6
@ -2007,10 +1991,7 @@ class HTTPServer:
* https://gavv.github.io/blog/ephemeral-port-reuse/
"""
sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
if nodelay and not isinstance(
bind_addr,
(six.text_type, six.binary_type),
):
if nodelay and not isinstance(bind_addr, (str, bytes)):
sock.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1)
if ssl_adapter is not None:
@ -2059,7 +2040,7 @@ class HTTPServer:
"""
return bind_addr[:2]
if isinstance(bind_addr, six.binary_type):
if isinstance(bind_addr, bytes):
bind_addr = bton(bind_addr)
return bind_addr
@ -2109,10 +2090,7 @@ class HTTPServer:
sock = getattr(self, 'socket', None)
if sock:
if not isinstance(
self.bind_addr,
(six.text_type, six.binary_type),
):
if not isinstance(self.bind_addr, (str, bytes)):
# Touch our own socket to make accept() return immediately.
try:
host, port = sock.getsockname()[:2]
@ -2179,7 +2157,7 @@ ssl_adapters = {
def get_ssl_adapter_class(name='builtin'):
"""Return an SSL adapter class for the given name."""
adapter = ssl_adapters[name.lower()]
if isinstance(adapter, six.string_types):
if isinstance(adapter, str):
last_dot = adapter.rfind('.')
attr_name = adapter[last_dot + 1:]
mod_path = adapter[:last_dot]

View file

@ -1,15 +1,9 @@
"""Implementation of the SSL adapter base interface."""
from __future__ import absolute_import, division, print_function
__metaclass__ = type
from abc import ABCMeta, abstractmethod
from six import add_metaclass
@add_metaclass(ABCMeta)
class Adapter:
class Adapter(metaclass=ABCMeta):
"""Base class for SSL driver library adapters.
Required methods:

View file

@ -7,12 +7,10 @@ To use this module, set ``HTTPServer.ssl_adapter`` to an instance of
``BuiltinSSLAdapter``.
"""
from __future__ import absolute_import, division, print_function
__metaclass__ = type
import socket
import sys
import threading
from contextlib import suppress
try:
import ssl
@ -27,18 +25,13 @@ except ImportError:
except ImportError:
DEFAULT_BUFFER_SIZE = -1
import six
from . import Adapter
from .. import errors
from .._compat import IS_ABOVE_OPENSSL10, suppress
from .._compat import IS_ABOVE_OPENSSL10
from ..makefile import StreamReader, StreamWriter
from ..server import HTTPServer
if six.PY2:
generic_socket_error = socket.error
else:
generic_socket_error = OSError
generic_socket_error = OSError
def _assert_ssl_exc_contains(exc, *msgs):

View file

@ -1,7 +1,6 @@
from typing import Any
from . import Adapter
generic_socket_error: OSError
DEFAULT_BUFFER_SIZE: int
class BuiltinSSLAdapter(Adapter):
@ -14,5 +13,5 @@ class BuiltinSSLAdapter(Adapter):
def context(self, context) -> None: ...
def bind(self, sock): ...
def wrap(self, sock): ...
def get_environ(self): ...
def get_environ(self, sock): ...
def makefile(self, sock, mode: str = ..., bufsize: int = ...): ...

View file

@ -50,16 +50,11 @@ will be read, and the context will be automatically created from them.
pyopenssl
"""
from __future__ import absolute_import, division, print_function
__metaclass__ = type
import socket
import sys
import threading
import time
import six
try:
import OpenSSL.version
from OpenSSL import SSL
@ -229,8 +224,7 @@ class SSLConnectionProxyMeta:
return type(name, bases, nmspc)
@six.add_metaclass(SSLConnectionProxyMeta)
class SSLConnection:
class SSLConnection(metaclass=SSLConnectionProxyMeta):
r"""A thread-safe wrapper for an ``SSL.Connection``.
:param tuple args: the arguments to create the wrapped \

View file

@ -1,9 +1,9 @@
from . import Adapter
from ..makefile import StreamReader, StreamWriter
from OpenSSL import SSL
from typing import Any
from typing import Any, Type
ssl_conn_type: SSL.Connection
ssl_conn_type: Type[SSL.Connection]
class SSLFileobjectMixin:
ssl_timeout: int
@ -13,13 +13,13 @@ class SSLFileobjectMixin:
def sendall(self, *args, **kwargs): ...
def send(self, *args, **kwargs): ...
class SSLFileobjectStreamReader(SSLFileobjectMixin, StreamReader): ... # type:ignore
class SSLFileobjectStreamWriter(SSLFileobjectMixin, StreamWriter): ... # type:ignore
class SSLFileobjectStreamReader(SSLFileobjectMixin, StreamReader): ... # type:ignore[misc]
class SSLFileobjectStreamWriter(SSLFileobjectMixin, StreamWriter): ... # type:ignore[misc]
class SSLConnectionProxyMeta:
def __new__(mcl, name, bases, nmspc): ...
class SSLConnection():
class SSLConnection:
def __init__(self, *args) -> None: ...
class pyOpenSSLAdapter(Adapter):
@ -28,3 +28,4 @@ class pyOpenSSLAdapter(Adapter):
def wrap(self, sock): ...
def get_environ(self): ...
def makefile(self, sock, mode: str = ..., bufsize: int = ...): ...
def get_context(self) -> SSL.Context: ...

View file

@ -8,6 +8,7 @@ from __future__ import absolute_import, division, print_function
__metaclass__ = type
import pytest
import six
pytest_version = tuple(map(int, pytest.__version__.split('.')))
@ -43,8 +44,17 @@ def pytest_load_initial_conftests(early_config, parser, args):
'<socket.socket fd=-1, family=AF_INET6, '
'type=SocketKind.SOCK_STREAM, proto=.:'
'pytest.PytestUnraisableExceptionWarning:_pytest.unraisableexception',
'ignore:Exception ignored in. '
'<ssl.SSLSocket fd=-1, family=AddressFamily.AF_UNIX, '
'type=SocketKind.SOCK_STREAM, proto=.:'
'pytest.PytestUnraisableExceptionWarning:_pytest.unraisableexception',
))
if six.PY2:
return
# NOTE: `ResourceWarning` does not exist under Python 2 and so using
# NOTE: it in warning filters results in an `_OptionError` exception
# NOTE: being raised.
early_config._inicache['filterwarnings'].extend((
# FIXME: Try to figure out what causes this and ensure that the socket
# FIXME: gets closed.
'ignore:unclosed <socket.socket fd=:ResourceWarning',
'ignore:unclosed <ssl.SSLSocket fd=:ResourceWarning',
))

View file

@ -4,14 +4,12 @@ Contains fixtures, which are tightly bound to the Cheroot framework
itself, useless for end-users' app testing.
"""
from __future__ import absolute_import, division, print_function
__metaclass__ = type # pylint: disable=invalid-name
import threading
import time
import pytest
from .._compat import IS_MACOS, IS_WINDOWS # noqa: WPS436
from ..server import Gateway, HTTPServer
from ..testing import ( # noqa: F401 # pylint: disable=unused-import
native_server, wsgi_server,
@ -19,6 +17,20 @@ from ..testing import ( # noqa: F401 # pylint: disable=unused-import
from ..testing import get_server_client
@pytest.fixture
def http_request_timeout():
"""Return a common HTTP request timeout for tests with queries."""
computed_timeout = 0.1
if IS_MACOS:
computed_timeout *= 2
if IS_WINDOWS:
computed_timeout *= 10
return computed_timeout
@pytest.fixture
# pylint: disable=redefined-outer-name
def wsgi_server_client(wsgi_server): # noqa: F811

View file

@ -1,8 +1,5 @@
"""A library of helper functions for the Cheroot test suite."""
from __future__ import absolute_import, division, print_function
__metaclass__ = type
import datetime
import logging
import os
@ -10,10 +7,7 @@ import sys
import time
import threading
import types
from six.moves import http_client
import six
import http.client
import cheroot.server
import cheroot.wsgi
@ -60,14 +54,14 @@ class CherootWebCase(webtest.WebCase):
cls.scheme = 'http'
else:
ssl = ' (ssl)'
cls.HTTP_CONN = http_client.HTTPSConnection
cls.HTTP_CONN = http.client.HTTPSConnection
cls.scheme = 'https'
v = sys.version.split()[0]
log.info('Python version used to run this test script: %s' % v)
log.info('Cheroot version: %s' % cheroot.__version__)
log.info('HTTP server version: %s%s' % (cls.httpserver.protocol, ssl))
log.info('PID: %s' % os.getpid())
log.info('Python version used to run this test script: %s', v)
log.info('Cheroot version: %s', cheroot.__version__)
log.info('HTTP server version: %s%s', cls.httpserver.protocol, ssl)
log.info('PID: %s', os.getpid())
if hasattr(cls, 'setup_server'):
# Clear the wsgi server so that
@ -135,9 +129,9 @@ class Response:
"""Generate iterable response body object."""
if self.body is None:
return []
elif isinstance(self.body, six.text_type):
elif isinstance(self.body, str):
return [self.body.encode('iso-8859-1')]
elif isinstance(self.body, six.binary_type):
elif isinstance(self.body, bytes):
return [self.body]
else:
return [x.encode('iso-8859-1') for x in self.body]

View file

@ -1,13 +1,8 @@
# -*- coding: utf-8 -*-
"""Test suite for cross-python compatibility helpers."""
from __future__ import absolute_import, division, print_function
__metaclass__ = type
import pytest
import six
from cheroot._compat import extract_bytes, memoryview, ntob, ntou, bton
from cheroot._compat import extract_bytes, ntob, ntou, bton
@pytest.mark.parametrize(
@ -32,7 +27,7 @@ def test_compat_functions_positive(func, inp, out):
)
def test_compat_functions_negative_nonnative(func):
"""Check that compatibility functions fail loudly for incorrect input."""
non_native_test_str = u'bar' if six.PY2 else b'bar'
non_native_test_str = b'bar'
with pytest.raises(TypeError):
func(non_native_test_str, encoding='utf-8')

View file

@ -4,11 +4,8 @@
cli
"""
# -*- coding: utf-8 -*-
# vim: set fileencoding=utf-8 :
import sys
import six
import pytest
from cheroot.cli import (
@ -69,12 +66,6 @@ def wsgi_app(monkeypatch):
app = WSGIAppMock()
# patch sys.modules, to include the an instance of WSGIAppMock
# under a specific namespace
if six.PY2:
# python2 requires the previous namespaces to be part of sys.modules
# (e.g. for 'a.b.c' we need to insert 'a', 'a.b' and 'a.b.c')
# otherwise it fails, we're setting the same instance on each level,
# we don't really care about those, just the last one.
monkeypatch.setitem(sys.modules, 'mypkg', app)
monkeypatch.setitem(sys.modules, 'mypkg.wsgi', app)
return app

View file

@ -1,18 +1,14 @@
"""Tests for TCP connection handling, including proper and timely close."""
from __future__ import absolute_import, division, print_function
__metaclass__ = type
import errno
import socket
import time
import logging
import traceback as traceback_
from collections import namedtuple
import http.client
import urllib.request
from six.moves import range, http_client, urllib
import six
import pytest
from jaraco.text import trim, unwrap
@ -94,8 +90,6 @@ class Controller(helper.Controller):
WSGI 1.0 is a mess around unicode. Create endpoints
that match the PATH_INFO that it produces.
"""
if six.PY2:
return string
return string.encode('utf-8').decode('latin-1')
handlers = {
@ -242,7 +236,7 @@ def test_HTTP11_persistent_connections(test_client):
assert header_has_value('Connection', 'close', actual_headers)
# Make another request on the same connection, which should error.
with pytest.raises(http_client.NotConnected):
with pytest.raises(http.client.NotConnected):
test_client.get('/pov', http_conn=http_connection)
@ -309,7 +303,7 @@ def test_streaming_11(test_client, set_cl):
# Make another request on the same connection, which should
# error.
with pytest.raises(http_client.NotConnected):
with pytest.raises(http.client.NotConnected):
test_client.get('/pov', http_conn=http_connection)
# Try HEAD.
@ -324,6 +318,9 @@ def test_streaming_11(test_client, set_cl):
assert actual_resp_body == b''
assert not header_exists('Transfer-Encoding', actual_headers)
# Prevent the resource warnings:
http_connection.close()
@pytest.mark.parametrize(
'set_cl',
@ -389,7 +386,7 @@ def test_streaming_10(test_client, set_cl):
assert not header_exists('Transfer-Encoding', actual_headers)
# Make another request on the same connection, which should error.
with pytest.raises(http_client.NotConnected):
with pytest.raises(http.client.NotConnected):
test_client.get(
'/pov', http_conn=http_connection,
protocol='HTTP/1.0',
@ -397,6 +394,9 @@ def test_streaming_10(test_client, set_cl):
test_client.server_instance.protocol = original_server_protocol
# Prevent the resource warnings:
http_connection.close()
@pytest.mark.parametrize(
'http_server_protocol',
@ -466,6 +466,9 @@ def test_keepalive(test_client, http_server_protocol):
test_client.server_instance.protocol = original_server_protocol
# Prevent the resource warnings:
http_connection.close()
def test_keepalive_conn_management(test_client):
"""Test management of Keep-Alive connections."""
@ -511,9 +514,9 @@ def test_keepalive_conn_management(test_client):
)
disconnect_errors = (
http_client.BadStatusLine,
http_client.CannotSendRequest,
http_client.NotConnected,
http.client.BadStatusLine,
http.client.CannotSendRequest,
http.client.NotConnected,
)
# Make a new connection.
@ -565,6 +568,11 @@ def test_keepalive_conn_management(test_client):
# Restore original timeout.
test_client.server_instance.timeout = timeout
# Prevent the resource warnings:
c1.close()
c2.close()
c3.close()
@pytest.mark.parametrize(
('simulated_exception', 'error_number', 'exception_leaks'),
@ -597,7 +605,6 @@ def test_keepalive_conn_management(test_client):
pytest.param(RuntimeError, 666, True, id='RuntimeError(666)'),
pytest.param(socket.error, -1, True, id='socket.error(-1)'),
) + (
() if six.PY2 else (
pytest.param(
ConnectionResetError, errno.ECONNRESET, False,
id='ConnectionResetError(ECONNRESET)',
@ -610,7 +617,6 @@ def test_keepalive_conn_management(test_client):
BrokenPipeError, errno.ESHUTDOWN, False,
id='BrokenPipeError(ESHUTDOWN)',
),
)
),
)
def test_broken_connection_during_tcp_fin(
@ -765,7 +771,7 @@ def test_HTTP11_Timeout_after_request(test_client):
response = conn.response_class(conn.sock, method='GET')
try:
response.begin()
except (socket.error, http_client.BadStatusLine):
except (socket.error, http.client.BadStatusLine):
pass
except Exception as ex:
pytest.fail(fail_msg % ex)
@ -795,7 +801,7 @@ def test_HTTP11_Timeout_after_request(test_client):
response = conn.response_class(conn.sock, method='GET')
try:
response.begin()
except (socket.error, http_client.BadStatusLine):
except (socket.error, http.client.BadStatusLine):
pass
except Exception as ex:
pytest.fail(fail_msg % ex)
@ -845,8 +851,7 @@ def test_HTTP11_pipelining(test_client):
# ``conn.sock``. Until that bug get's fixed we will
# monkey patch the ``response`` instance.
# https://bugs.python.org/issue23377
if not six.PY2:
response.fp = conn.sock.makefile('rb', 0)
response.fp = conn.sock.makefile('rb', 0)
response.begin()
body = response.read(13)
assert response.status == 200
@ -1026,6 +1031,9 @@ def test_No_Message_Body(test_client):
assert actual_resp_body == b''
assert not header_exists('Connection', actual_headers)
# Prevent the resource warnings:
http_connection.close()
@pytest.mark.xfail(
reason=unwrap(

View file

@ -1,16 +1,10 @@
"""Tests for managing HTTP issues (malformed requests, etc)."""
# -*- coding: utf-8 -*-
# vim: set fileencoding=utf-8 :
from __future__ import absolute_import, division, print_function
__metaclass__ = type
import errno
import socket
import urllib.parse # noqa: WPS301
import pytest
import six
from six.moves import urllib
from cheroot.test import helper
@ -54,8 +48,6 @@ class HelloController(helper.Controller):
WSGI 1.0 is a mess around unicode. Create endpoints
that match the PATH_INFO that it produces.
"""
if six.PY2:
return string
return string.encode('utf-8').decode('latin-1')
handlers = {
@ -63,7 +55,13 @@ class HelloController(helper.Controller):
'/no_body': hello,
'/body_required': body_required,
'/query_string': query_string,
# FIXME: Unignore the pylint rules in pylint >= 2.15.4.
# Refs:
# * https://github.com/PyCQA/pylint/issues/6592
# * https://github.com/PyCQA/pylint/pull/7395
# pylint: disable-next=too-many-function-args
_munge('/привіт'): hello,
# pylint: disable-next=too-many-function-args
_munge('/Юххууу'): hello,
'/\xa0Ðblah key 0 900 4 data': hello,
'/*': asterisk,
@ -151,7 +149,6 @@ def test_parse_acceptable_uri(test_client, uri):
assert actual_status == HTTP_OK
@pytest.mark.xfail(six.PY2, reason='Fails on Python 2')
def test_parse_uri_unsafe_uri(test_client):
"""Test that malicious URI does not allow HTTP injection.
@ -263,6 +260,8 @@ def test_no_content_length(test_client):
assert actual_status == HTTP_OK
assert actual_resp_body == b'Hello world!'
c.close() # deal with the resource warning
def test_content_length_required(test_client):
"""Test POST query with body failing because of missing Content-Length."""
@ -278,6 +277,8 @@ def test_content_length_required(test_client):
actual_status = response.status
assert actual_status == HTTP_LENGTH_REQUIRED
c.close() # deal with the resource warning
@pytest.mark.xfail(
reason='https://github.com/cherrypy/cheroot/issues/106',
@ -350,6 +351,8 @@ def test_malformed_http_method(test_client):
actual_resp_body = response.read(21)
assert actual_resp_body == b'Malformed method name'
c.close() # deal with the resource warning
def test_malformed_header(test_client):
"""Check that broken HTTP header results in Bad Request."""
@ -366,6 +369,8 @@ def test_malformed_header(test_client):
actual_resp_body = response.read(20)
assert actual_resp_body == b'Illegal header line.'
c.close() # deal with the resource warning
def test_request_line_split_issue_1220(test_client):
"""Check that HTTP request line of exactly 256 chars length is OK."""

View file

@ -1,8 +1,4 @@
"""Tests for the HTTP server."""
# -*- coding: utf-8 -*-
# vim: set fileencoding=utf-8 :
from __future__ import absolute_import, division, print_function
from cheroot.wsgi import PathInfoDispatcher

View file

@ -3,9 +3,6 @@
from cheroot import makefile
__metaclass__ = type
class MockSocket:
"""A mock socket."""

View file

@ -1,23 +1,18 @@
"""Tests for the HTTP server."""
# -*- coding: utf-8 -*-
# vim: set fileencoding=utf-8 :
from __future__ import absolute_import, division, print_function
__metaclass__ = type
import os
import queue
import socket
import tempfile
import threading
import uuid
import urllib.parse # noqa: WPS301
import pytest
import requests
import requests_unixsocket
import six
from pypytools.gc.custom import DefaultGc
from six.moves import queue, urllib
from .._compat import bton, ntob
from .._compat import IS_LINUX, IS_MACOS, IS_WINDOWS, SYS_PLATFORM
@ -259,12 +254,12 @@ def peercreds_enabled_server(http_server, unix_sock_file):
@unix_only_sock_test
@non_macos_sock_test
def test_peercreds_unix_sock(peercreds_enabled_server):
def test_peercreds_unix_sock(http_request_timeout, peercreds_enabled_server):
"""Check that ``PEERCRED`` lookup works when enabled."""
httpserver = peercreds_enabled_server
bind_addr = httpserver.bind_addr
if isinstance(bind_addr, six.binary_type):
if isinstance(bind_addr, bytes):
bind_addr = bind_addr.decode()
# pylint: disable=possibly-unused-variable
@ -275,11 +270,17 @@ def test_peercreds_unix_sock(peercreds_enabled_server):
expected_peercreds = '|'.join(map(str, expected_peercreds))
with requests_unixsocket.monkeypatch():
peercreds_resp = requests.get(unix_base_uri + PEERCRED_IDS_URI)
peercreds_resp = requests.get(
unix_base_uri + PEERCRED_IDS_URI,
timeout=http_request_timeout,
)
peercreds_resp.raise_for_status()
assert peercreds_resp.text == expected_peercreds
peercreds_text_resp = requests.get(unix_base_uri + PEERCRED_TEXTS_URI)
peercreds_text_resp = requests.get(
unix_base_uri + PEERCRED_TEXTS_URI,
timeout=http_request_timeout,
)
assert peercreds_text_resp.status_code == 500
@ -290,14 +291,17 @@ def test_peercreds_unix_sock(peercreds_enabled_server):
)
@unix_only_sock_test
@non_macos_sock_test
def test_peercreds_unix_sock_with_lookup(peercreds_enabled_server):
def test_peercreds_unix_sock_with_lookup(
http_request_timeout,
peercreds_enabled_server,
):
"""Check that ``PEERCRED`` resolution works when enabled."""
httpserver = peercreds_enabled_server
httpserver.peercreds_resolve_enabled = True
bind_addr = httpserver.bind_addr
if isinstance(bind_addr, six.binary_type):
if isinstance(bind_addr, bytes):
bind_addr = bind_addr.decode()
# pylint: disable=possibly-unused-variable
@ -312,7 +316,10 @@ def test_peercreds_unix_sock_with_lookup(peercreds_enabled_server):
)
expected_textcreds = '!'.join(map(str, expected_textcreds))
with requests_unixsocket.monkeypatch():
peercreds_text_resp = requests.get(unix_base_uri + PEERCRED_TEXTS_URI)
peercreds_text_resp = requests.get(
unix_base_uri + PEERCRED_TEXTS_URI,
timeout=http_request_timeout,
)
peercreds_text_resp.raise_for_status()
assert peercreds_text_resp.text == expected_textcreds
@ -363,7 +370,10 @@ def test_high_number_of_file_descriptors(native_server_client, resource_limit):
assert any(fn >= resource_limit for fn in native_process_conn.filenos)
if not IS_WINDOWS:
ISSUE511 = IS_MACOS
if not IS_WINDOWS and not ISSUE511:
test_high_number_of_file_descriptors = pytest.mark.forked(
test_high_number_of_file_descriptors,
)

View file

@ -1,9 +1,4 @@
"""Tests for TLS support."""
# -*- coding: utf-8 -*-
# vim: set fileencoding=utf-8 :
from __future__ import absolute_import, division, print_function
__metaclass__ = type
import functools
import json
@ -14,11 +9,11 @@ import sys
import threading
import time
import traceback
import http.client
import OpenSSL.SSL
import pytest
import requests
import six
import trustme
from .._compat import bton, ntob, ntou
@ -49,9 +44,6 @@ IS_PYOPENSSL_SSL_VERSION_1_0 = (
OpenSSL.SSL.SSLeay_version(OpenSSL.SSL.SSLEAY_VERSION).
startswith(b'OpenSSL 1.0.')
)
PY27 = sys.version_info[:2] == (2, 7)
PY34 = sys.version_info[:2] == (3, 4)
PY3 = not six.PY2
PY310_PLUS = sys.version_info[:2] >= (3, 10)
@ -64,13 +56,12 @@ _stdlib_to_openssl_verify = {
fails_under_py3 = pytest.mark.xfail(
not six.PY2,
reason='Fails under Python 3+',
)
fails_under_py3_in_pypy = pytest.mark.xfail(
not six.PY2 and IS_PYPY,
IS_PYPY,
reason='Fails under PyPy3',
)
@ -213,6 +204,7 @@ def thread_exceptions():
),
)
def test_ssl_adapters(
http_request_timeout,
tls_http_server, adapter_type,
tls_certificate,
tls_certificate_chain_pem_path,
@ -241,6 +233,7 @@ def test_ssl_adapters(
resp = requests.get(
'https://{host!s}:{port!s}/'.format(host=interface, port=port),
timeout=http_request_timeout,
verify=tls_ca_certificate_pem_path,
)
@ -276,8 +269,9 @@ def test_ssl_adapters(
reason='Fails under PyPy in CI for unknown reason',
strict=False,
)
def test_tls_client_auth( # noqa: C901 # FIXME
def test_tls_client_auth( # noqa: C901, WPS213 # FIXME
# FIXME: remove twisted logic, separate tests
http_request_timeout,
mocker,
tls_http_server, adapter_type,
ca,
@ -331,6 +325,9 @@ def test_tls_client_auth( # noqa: C901 # FIXME
requests.get,
'https://{host!s}:{port!s}/'.format(host=interface, port=port),
# Don't wait for the first byte forever:
timeout=http_request_timeout,
# Server TLS certificate verification:
verify=tls_ca_certificate_pem_path,
@ -348,12 +345,13 @@ def test_tls_client_auth( # noqa: C901 # FIXME
and tls_verify_mode == ssl.CERT_REQUIRED
and tls_client_identity == 'localhost'
and is_trusted_cert
) or PY34:
):
pytest.xfail(
'OpenSSL 1.0 has problems with verifying client certs',
)
assert is_req_successful
assert resp.text == 'Hello world!'
resp.close()
return
# xfail some flaky tests
@ -366,29 +364,16 @@ def test_tls_client_auth( # noqa: C901 # FIXME
if issue_237:
pytest.xfail('Test sometimes fails')
expected_ssl_errors = (
requests.exceptions.SSLError,
OpenSSL.SSL.Error,
) if PY34 else (
requests.exceptions.SSLError,
)
expected_ssl_errors = requests.exceptions.SSLError,
if IS_WINDOWS or IS_GITHUB_ACTIONS_WORKFLOW:
expected_ssl_errors += requests.exceptions.ConnectionError,
with pytest.raises(expected_ssl_errors) as ssl_err:
make_https_request()
if PY34 and isinstance(ssl_err, OpenSSL.SSL.Error):
pytest.xfail(
'OpenSSL behaves wierdly under Python 3.4 '
'because of an outdated urllib3',
)
make_https_request().close()
try:
err_text = ssl_err.value.args[0].reason.args[0].args[0]
except AttributeError:
if PY34:
pytest.xfail('OpenSSL behaves wierdly under Python 3.4')
elif IS_WINDOWS or IS_GITHUB_ACTIONS_WORKFLOW:
if IS_WINDOWS or IS_GITHUB_ACTIONS_WORKFLOW:
err_text = str(ssl_err.value)
else:
raise
@ -400,9 +385,8 @@ def test_tls_client_auth( # noqa: C901 # FIXME
'sslv3 alert bad certificate' if IS_LIBRESSL_BACKEND
else 'tlsv1 alert unknown ca',
)
if not six.PY2:
if IS_MACOS and IS_PYPY and adapter_type == 'pyopenssl':
expected_substrings = ('tlsv1 alert unknown ca',)
if IS_MACOS and IS_PYPY and adapter_type == 'pyopenssl':
expected_substrings = ('tlsv1 alert unknown ca',)
if (
tls_verify_mode in (
ssl.CERT_REQUIRED,
@ -469,9 +453,9 @@ def test_tls_client_auth( # noqa: C901 # FIXME
pytest.param(
'builtin',
marks=pytest.mark.xfail(
IS_GITHUB_ACTIONS_WORKFLOW and IS_MACOS and PY310_PLUS,
IS_MACOS and PY310_PLUS,
reason='Unclosed TLS resource warnings happen on macOS '
'under Python 3.10',
'under Python 3.10 (#508)',
strict=False,
),
),
@ -492,6 +476,7 @@ def test_ssl_env( # noqa: C901 # FIXME
thread_exceptions,
recwarn,
mocker,
http_request_timeout,
tls_http_server, adapter_type,
ca, tls_verify_mode, tls_certificate,
tls_certificate_chain_pem_path,
@ -532,13 +517,10 @@ def test_ssl_env( # noqa: C901 # FIXME
resp = requests.get(
'https://' + interface + ':' + str(port) + '/env',
timeout=http_request_timeout,
verify=tls_ca_certificate_pem_path,
cert=cl_pem if use_client_cert else None,
)
if PY34 and resp.status_code != 200:
pytest.xfail(
'Python 3.4 has problems with verifying client certs',
)
env = json.loads(resp.content.decode('utf-8'))
@ -620,7 +602,7 @@ def test_https_over_http_error(http_server, ip_addr):
httpserver = http_server.send((ip_addr, EPHEMERAL_PORT))
interface, _host, port = _get_conn_data(httpserver.bind_addr)
with pytest.raises(ssl.SSLError) as ssl_err:
six.moves.http_client.HTTPSConnection(
http.client.HTTPSConnection(
'{interface}:{port}'.format(
interface=interface,
port=port,
@ -633,20 +615,10 @@ def test_https_over_http_error(http_server, ip_addr):
assert expected_substring in ssl_err.value.args[-1]
http_over_https_error_builtin_marks = []
if IS_WINDOWS and six.PY2:
http_over_https_error_builtin_marks.append(
pytest.mark.flaky(reruns=5, reruns_delay=2),
)
@pytest.mark.parametrize(
'adapter_type',
(
pytest.param(
'builtin',
marks=http_over_https_error_builtin_marks,
),
'builtin',
'pyopenssl',
),
)
@ -657,7 +629,9 @@ if IS_WINDOWS and six.PY2:
pytest.param(ANY_INTERFACE_IPV6, marks=missing_ipv6),
),
)
@pytest.mark.flaky(reruns=3, reruns_delay=2)
def test_http_over_https_error(
http_request_timeout,
tls_http_server, adapter_type,
ca, ip_addr,
tls_certificate,
@ -697,36 +671,12 @@ def test_http_over_https_error(
expect_fallback_response_over_plain_http = (
(
adapter_type == 'pyopenssl'
and (IS_ABOVE_OPENSSL10 or not six.PY2)
)
or PY27
) or (
IS_GITHUB_ACTIONS_WORKFLOW
and IS_WINDOWS
and six.PY2
and not IS_WIN2016
)
if (
IS_GITHUB_ACTIONS_WORKFLOW
and IS_WINDOWS
and six.PY2
and IS_WIN2016
and adapter_type == 'builtin'
and ip_addr is ANY_INTERFACE_IPV6
):
expect_fallback_response_over_plain_http = True
if (
IS_GITHUB_ACTIONS_WORKFLOW
and IS_WINDOWS
and six.PY2
and not IS_WIN2016
and adapter_type == 'builtin'
and ip_addr is not ANY_INTERFACE_IPV6
):
expect_fallback_response_over_plain_http = False
if expect_fallback_response_over_plain_http:
resp = requests.get(
'http://{host!s}:{port!s}/'.format(host=fqdn, port=port),
timeout=http_request_timeout,
)
assert resp.status_code == 400
assert resp.text == (
@ -738,6 +688,7 @@ def test_http_over_https_error(
with pytest.raises(requests.exceptions.ConnectionError) as ssl_err:
requests.get( # FIXME: make stdlib ssl behave like PyOpenSSL
'http://{host!s}:{port!s}/'.format(host=fqdn, port=port),
timeout=http_request_timeout,
)
if IS_LINUX:

View file

@ -37,6 +37,7 @@ def simple_wsgi_server():
yield locals()
@pytest.mark.flaky(reruns=3, reruns_delay=2)
def test_connection_keepalive(simple_wsgi_server):
"""Test the connection keepalive works (duh)."""
session = Session(base_url=simple_wsgi_server['url'])
@ -59,6 +60,7 @@ def test_connection_keepalive(simple_wsgi_server):
]
failures = sum(task.result() for task in tasks)
session.close()
assert not failures

View file

@ -15,9 +15,6 @@ the traceback to stdout, and keep any assertions you have from running
be of further significance to your tests).
"""
from __future__ import absolute_import, division, print_function
__metaclass__ = type
import pprint
import re
import socket
@ -29,9 +26,8 @@ import json
import unittest # pylint: disable=deprecated-module,preferred-module
import warnings
import functools
from six.moves import http_client, map, urllib_parse
import six
import http.client
import urllib.parse
from more_itertools.more import always_iterable
import jaraco.functools
@ -105,7 +101,7 @@ class WebCase(unittest.TestCase):
HOST = '127.0.0.1'
PORT = 8000
HTTP_CONN = http_client.HTTPConnection
HTTP_CONN = http.client.HTTPConnection
PROTOCOL = 'HTTP/1.1'
scheme = 'http'
@ -127,7 +123,7 @@ class WebCase(unittest.TestCase):
* from :py:mod:`python:http.client`.
"""
cls_name = '{scheme}Connection'.format(scheme=self.scheme.upper())
return getattr(http_client, cls_name)
return getattr(http.client, cls_name)
def get_conn(self, auto_open=False):
"""Return a connection to our HTTP server."""
@ -201,9 +197,9 @@ class WebCase(unittest.TestCase):
"""
ServerError.on = False
if isinstance(url, six.text_type):
if isinstance(url, str):
url = url.encode('utf-8')
if isinstance(body, six.text_type):
if isinstance(body, str):
body = body.encode('utf-8')
# for compatibility, support raise_subcls is None
@ -386,7 +382,7 @@ class WebCase(unittest.TestCase):
def assertBody(self, value, msg=None):
"""Fail if value != self.body."""
if isinstance(value, six.text_type):
if isinstance(value, str):
value = value.encode(self.encoding)
if value != self.body:
if msg is None:
@ -397,7 +393,7 @@ class WebCase(unittest.TestCase):
def assertInBody(self, value, msg=None):
"""Fail if value not in self.body."""
if isinstance(value, six.text_type):
if isinstance(value, str):
value = value.encode(self.encoding)
if value not in self.body:
if msg is None:
@ -406,7 +402,7 @@ class WebCase(unittest.TestCase):
def assertNotInBody(self, value, msg=None):
"""Fail if value in self.body."""
if isinstance(value, six.text_type):
if isinstance(value, str):
value = value.encode(self.encoding)
if value in self.body:
if msg is None:
@ -415,7 +411,7 @@ class WebCase(unittest.TestCase):
def assertMatchesBody(self, pattern, msg=None, flags=0):
"""Fail if value (a regex pattern) is not in self.body."""
if isinstance(pattern, six.text_type):
if isinstance(pattern, str):
pattern = pattern.encode(self.encoding)
if re.search(pattern, self.body, flags) is None:
if msg is None:
@ -464,25 +460,7 @@ def shb(response):
"""Return status, headers, body the way we like from a response."""
resp_status_line = '%s %s' % (response.status, response.reason)
if not six.PY2:
return resp_status_line, response.getheaders(), response.read()
h = []
key, value = None, None
for line in response.msg.headers:
if line:
if line[0] in ' \t':
value += line.strip()
else:
if key and value:
h.append((key, value))
key, value = line.split(':', 1)
key = key.strip()
value = value.strip()
if key and value:
h.append((key, value))
return resp_status_line, h, response.read()
return resp_status_line, response.getheaders(), response.read()
# def openURL(*args, raise_subcls=(), **kwargs):
@ -514,7 +492,7 @@ def openURL(*args, **kwargs):
def _open_url_once(
url, headers=None, method='GET', body=None,
host='127.0.0.1', port=8000, http_conn=http_client.HTTPConnection,
host='127.0.0.1', port=8000, http_conn=http.client.HTTPConnection,
protocol='HTTP/1.1', ssl_context=None,
):
"""Open the given HTTP resource and return status, headers, and body."""
@ -530,7 +508,7 @@ def _open_url_once(
conn = http_conn(interface(host), port, **kw)
conn._http_vsn_str = protocol
conn._http_vsn = int(''.join([x for x in protocol if x.isdigit()]))
if not six.PY2 and isinstance(url, bytes):
if isinstance(url, bytes):
url = url.decode()
conn.putrequest(
method.upper(), url, skip_host=True,
@ -572,10 +550,10 @@ def strip_netloc(url):
>>> strip_netloc('/foo/bar?bing#baz')
'/foo/bar?bing'
"""
parsed = urllib_parse.urlparse(url)
parsed = urllib.parse.urlparse(url)
_scheme, _netloc, path, params, query, _fragment = parsed
stripped = '', '', path, params, query, ''
return urllib_parse.urlunparse(stripped)
return urllib.parse.urlunparse(stripped)
# Add any exceptions which your web framework handles

View file

@ -1,16 +1,13 @@
"""Pytest fixtures and other helpers for doing testing by end-users."""
from __future__ import absolute_import, division, print_function
__metaclass__ = type
from contextlib import closing
from contextlib import closing, contextmanager
import errno
import socket
import threading
import time
import http.client
import pytest
from six.moves import http_client
import cheroot.server
from cheroot.test import webtest
@ -33,6 +30,7 @@ config = {
}
@contextmanager
def cheroot_server(server_factory):
"""Set up and tear down a Cheroot server instance."""
conf = config[server_factory].copy()
@ -64,14 +62,14 @@ def cheroot_server(server_factory):
@pytest.fixture
def wsgi_server():
"""Set up and tear down a Cheroot WSGI server instance."""
for srv in cheroot_server(cheroot.wsgi.Server):
with cheroot_server(cheroot.wsgi.Server) as srv:
yield srv
@pytest.fixture
def native_server():
"""Set up and tear down a Cheroot HTTP server instance."""
for srv in cheroot_server(cheroot.server.HTTPServer):
with cheroot_server(cheroot.server.HTTPServer) as srv:
yield srv
@ -89,9 +87,9 @@ class _TestClient:
port=self._port,
)
conn_cls = (
http_client.HTTPConnection
http.client.HTTPConnection
if self.server_instance.ssl_adapter is None else
http_client.HTTPSConnection
http.client.HTTPSConnection
)
return conn_cls(name)

View file

@ -5,17 +5,12 @@
joinable
"""
from __future__ import absolute_import, division, print_function
__metaclass__ = type
import collections
import threading
import time
import socket
import warnings
from six.moves import queue
import queue
from jaraco.functools import pass_none
@ -178,7 +173,7 @@ class ThreadPool:
for worker in self._threads:
worker.name = (
'CP Server {worker_name!s}'.
format(worker_name=worker.name),
format(worker_name=worker.name)
)
worker.start()
for worker in self._threads:
@ -228,7 +223,7 @@ class ThreadPool:
worker = WorkerThread(self.server)
worker.name = (
'CP Server {worker_name!s}'.
format(worker_name=worker.name),
format(worker_name=worker.name)
)
worker.start()
return worker

View file

@ -25,14 +25,8 @@ as you want in one instance by using a PathInfoDispatcher::
server = wsgi.Server(addr, d)
"""
from __future__ import absolute_import, division, print_function
__metaclass__ = type
import sys
import six
from six.moves import filter
from . import server
from .workers import threadpool
from ._compat import ntob, bton
@ -140,7 +134,7 @@ class Gateway(server.Gateway):
response = self.req.server.wsgi_app(self.env, self.start_response)
try:
for chunk in filter(None, response):
if not isinstance(chunk, six.binary_type):
if not isinstance(chunk, bytes):
raise ValueError('WSGI Applications must yield bytes')
self.write(chunk)
finally:
@ -149,7 +143,7 @@ class Gateway(server.Gateway):
if hasattr(response, 'close'):
response.close()
def start_response(self, status, headers, exc_info=None):
def start_response(self, status, headers, exc_info=None): # noqa: WPS238
"""WSGI callable to begin the HTTP response."""
# "The application may call start_response more than once,
# if and only if the exc_info argument is provided."
@ -164,10 +158,8 @@ class Gateway(server.Gateway):
# sent, start_response must raise an error, and should raise the
# exc_info tuple."
if self.req.sent_headers:
try:
six.reraise(*exc_info)
finally:
exc_info = None
value = exc_info[1]
raise value
self.req.status = self._encode_status(status)
@ -196,8 +188,6 @@ class Gateway(server.Gateway):
must be of type "str" but are restricted to code points in the
"Latin-1" set.
"""
if six.PY2:
return status
if not isinstance(status, str):
raise TypeError('WSGI response status is not of type str.')
return status.encode('ISO-8859-1')
@ -273,7 +263,7 @@ class Gateway_10(Gateway):
'wsgi.version': self.version,
}
if isinstance(req.server.bind_addr, six.string_types):
if isinstance(req.server.bind_addr, str):
# AF_UNIX. This isn't really allowed by WSGI, which doesn't
# address unix domain sockets. But it's better than nothing.
env['SERVER_PORT'] = ''
@ -332,10 +322,10 @@ class Gateway_u0(Gateway_10):
"""Return a new environ dict targeting the given wsgi.version."""
req = self.req
env_10 = super(Gateway_u0, self).get_environ()
env = dict(map(self._decode_key, env_10.items()))
env = dict(env_10.items())
# Request-URI
enc = env.setdefault(six.u('wsgi.url_encoding'), six.u('utf-8'))
enc = env.setdefault('wsgi.url_encoding', 'utf-8')
try:
env['PATH_INFO'] = req.path.decode(enc)
env['QUERY_STRING'] = req.qs.decode(enc)
@ -345,25 +335,10 @@ class Gateway_u0(Gateway_10):
env['PATH_INFO'] = env_10['PATH_INFO']
env['QUERY_STRING'] = env_10['QUERY_STRING']
env.update(map(self._decode_value, env.items()))
env.update(env.items())
return env
@staticmethod
def _decode_key(item):
k, v = item
if six.PY2:
k = k.decode('ISO-8859-1')
return k, v
@staticmethod
def _decode_value(item):
k, v = item
skip_keys = 'REQUEST_URI', 'wsgi.input'
if not six.PY2 or not isinstance(v, bytes) or k in skip_keys:
return k, v
return k, v.decode('ISO-8859-1')
wsgi_gateways = Gateway.gateway_map()

View file

@ -40,3 +40,10 @@ class PathInfoDispatcher:
apps: Any
def __init__(self, apps): ...
def __call__(self, environ, start_response): ...
WSGIServer = Server
WSGIGateway = Gateway
WSGIGateway_u0 = Gateway_u0
WSGIGateway_10 = Gateway_10
WSGIPathInfoDispatcher = PathInfoDispatcher

View file

@ -203,5 +203,6 @@ def _write_contents(target, source):
for item in source.iterdir():
_write_contents(child, item)
else:
child.open('wb').write(source.read_bytes())
with child.open('wb') as fp:
fp.write(source.read_bytes())
return child

0
lib/more_itertools/more.py Normal file → Executable file
View file

View file

@ -17,7 +17,7 @@ __title__ = "packaging"
__summary__ = "Core utilities for Python packages"
__uri__ = "https://github.com/pypa/packaging"
__version__ = "21.3"
__version__ = "22.0"
__author__ = "Donald Stufft and individual contributors"
__email__ = "donald@stufft.io"

108
lib/packaging/_elffile.py Normal file
View file

@ -0,0 +1,108 @@
"""
ELF file parser.
This provides a class ``ELFFile`` that parses an ELF executable in a similar
interface to ``ZipFile``. Only the read interface is implemented.
Based on: https://gist.github.com/lyssdod/f51579ae8d93c8657a5564aefc2ffbca
ELF header: https://refspecs.linuxfoundation.org/elf/gabi4+/ch4.eheader.html
"""
import enum
import os
import struct
from typing import IO, Optional, Tuple
class ELFInvalid(ValueError):
pass
class EIClass(enum.IntEnum):
C32 = 1
C64 = 2
class EIData(enum.IntEnum):
Lsb = 1
Msb = 2
class EMachine(enum.IntEnum):
I386 = 3
S390 = 22
Arm = 40
X8664 = 62
AArc64 = 183
class ELFFile:
"""
Representation of an ELF executable.
"""
def __init__(self, f: IO[bytes]) -> None:
self._f = f
try:
ident = self._read("16B")
except struct.error:
raise ELFInvalid("unable to parse identification")
magic = bytes(ident[:4])
if magic != b"\x7fELF":
raise ELFInvalid(f"invalid magic: {magic!r}")
self.capacity = ident[4] # Format for program header (bitness).
self.encoding = ident[5] # Data structure encoding (endianess).
try:
# e_fmt: Format for program header.
# p_fmt: Format for section header.
# p_idx: Indexes to find p_type, p_offset, and p_filesz.
e_fmt, self._p_fmt, self._p_idx = {
(1, 1): ("<HHIIIIIHHH", "<IIIIIIII", (0, 1, 4)), # 32-bit LSB.
(1, 2): (">HHIIIIIHHH", ">IIIIIIII", (0, 1, 4)), # 32-bit MSB.
(2, 1): ("<HHIQQQIHHH", "<IIQQQQQQ", (0, 2, 5)), # 64-bit LSB.
(2, 2): (">HHIQQQIHHH", ">IIQQQQQQ", (0, 2, 5)), # 64-bit MSB.
}[(self.capacity, self.encoding)]
except KeyError:
raise ELFInvalid(
f"unrecognized capacity ({self.capacity}) or "
f"encoding ({self.encoding})"
)
try:
(
_,
self.machine, # Architecture type.
_,
_,
self._e_phoff, # Offset of program header.
_,
self.flags, # Processor-specific flags.
_,
self._e_phentsize, # Size of section.
self._e_phnum, # Number of sections.
) = self._read(e_fmt)
except struct.error as e:
raise ELFInvalid("unable to parse machine and section information") from e
def _read(self, fmt: str) -> Tuple[int, ...]:
return struct.unpack(fmt, self._f.read(struct.calcsize(fmt)))
@property
def interpreter(self) -> Optional[str]:
"""
The path recorded in the ``PT_INTERP`` section header.
"""
for index in range(self._e_phnum):
self._f.seek(self._e_phoff + self._e_phentsize * index)
try:
data = self._read(self._p_fmt)
except struct.error:
continue
if data[self._p_idx[0]] != 3: # Not PT_INTERP.
continue
self._f.seek(data[self._p_idx[1]])
return os.fsdecode(self._f.read(data[self._p_idx[2]])).strip("\0")
return None

View file

@ -1,121 +1,58 @@
import collections
import contextlib
import functools
import os
import re
import struct
import sys
import warnings
from typing import IO, Dict, Iterator, NamedTuple, Optional, Tuple
from typing import Dict, Generator, Iterator, NamedTuple, Optional, Tuple
from ._elffile import EIClass, EIData, ELFFile, EMachine
EF_ARM_ABIMASK = 0xFF000000
EF_ARM_ABI_VER5 = 0x05000000
EF_ARM_ABI_FLOAT_HARD = 0x00000400
# Python does not provide platform information at sufficient granularity to
# identify the architecture of the running executable in some cases, so we
# determine it dynamically by reading the information from the running
# process. This only applies on Linux, which uses the ELF format.
class _ELFFileHeader:
# https://en.wikipedia.org/wiki/Executable_and_Linkable_Format#File_header
class _InvalidELFFileHeader(ValueError):
"""
An invalid ELF file header was found.
"""
ELF_MAGIC_NUMBER = 0x7F454C46
ELFCLASS32 = 1
ELFCLASS64 = 2
ELFDATA2LSB = 1
ELFDATA2MSB = 2
EM_386 = 3
EM_S390 = 22
EM_ARM = 40
EM_X86_64 = 62
EF_ARM_ABIMASK = 0xFF000000
EF_ARM_ABI_VER5 = 0x05000000
EF_ARM_ABI_FLOAT_HARD = 0x00000400
def __init__(self, file: IO[bytes]) -> None:
def unpack(fmt: str) -> int:
try:
data = file.read(struct.calcsize(fmt))
result: Tuple[int, ...] = struct.unpack(fmt, data)
except struct.error:
raise _ELFFileHeader._InvalidELFFileHeader()
return result[0]
self.e_ident_magic = unpack(">I")
if self.e_ident_magic != self.ELF_MAGIC_NUMBER:
raise _ELFFileHeader._InvalidELFFileHeader()
self.e_ident_class = unpack("B")
if self.e_ident_class not in {self.ELFCLASS32, self.ELFCLASS64}:
raise _ELFFileHeader._InvalidELFFileHeader()
self.e_ident_data = unpack("B")
if self.e_ident_data not in {self.ELFDATA2LSB, self.ELFDATA2MSB}:
raise _ELFFileHeader._InvalidELFFileHeader()
self.e_ident_version = unpack("B")
self.e_ident_osabi = unpack("B")
self.e_ident_abiversion = unpack("B")
self.e_ident_pad = file.read(7)
format_h = "<H" if self.e_ident_data == self.ELFDATA2LSB else ">H"
format_i = "<I" if self.e_ident_data == self.ELFDATA2LSB else ">I"
format_q = "<Q" if self.e_ident_data == self.ELFDATA2LSB else ">Q"
format_p = format_i if self.e_ident_class == self.ELFCLASS32 else format_q
self.e_type = unpack(format_h)
self.e_machine = unpack(format_h)
self.e_version = unpack(format_i)
self.e_entry = unpack(format_p)
self.e_phoff = unpack(format_p)
self.e_shoff = unpack(format_p)
self.e_flags = unpack(format_i)
self.e_ehsize = unpack(format_h)
self.e_phentsize = unpack(format_h)
self.e_phnum = unpack(format_h)
self.e_shentsize = unpack(format_h)
self.e_shnum = unpack(format_h)
self.e_shstrndx = unpack(format_h)
def _get_elf_header() -> Optional[_ELFFileHeader]:
@contextlib.contextmanager
def _parse_elf(path: str) -> Generator[Optional[ELFFile], None, None]:
try:
with open(sys.executable, "rb") as f:
elf_header = _ELFFileHeader(f)
except (OSError, TypeError, _ELFFileHeader._InvalidELFFileHeader):
return None
return elf_header
with open(path, "rb") as f:
yield ELFFile(f)
except (OSError, TypeError, ValueError):
yield None
def _is_linux_armhf() -> bool:
def _is_linux_armhf(executable: str) -> bool:
# hard-float ABI can be detected from the ELF header of the running
# process
# https://static.docs.arm.com/ihi0044/g/aaelf32.pdf
elf_header = _get_elf_header()
if elf_header is None:
return False
result = elf_header.e_ident_class == elf_header.ELFCLASS32
result &= elf_header.e_ident_data == elf_header.ELFDATA2LSB
result &= elf_header.e_machine == elf_header.EM_ARM
result &= (
elf_header.e_flags & elf_header.EF_ARM_ABIMASK
) == elf_header.EF_ARM_ABI_VER5
result &= (
elf_header.e_flags & elf_header.EF_ARM_ABI_FLOAT_HARD
) == elf_header.EF_ARM_ABI_FLOAT_HARD
return result
with _parse_elf(executable) as f:
return (
f is not None
and f.capacity == EIClass.C32
and f.encoding == EIData.Lsb
and f.machine == EMachine.Arm
and f.flags & EF_ARM_ABIMASK == EF_ARM_ABI_VER5
and f.flags & EF_ARM_ABI_FLOAT_HARD == EF_ARM_ABI_FLOAT_HARD
)
def _is_linux_i686() -> bool:
elf_header = _get_elf_header()
if elf_header is None:
return False
result = elf_header.e_ident_class == elf_header.ELFCLASS32
result &= elf_header.e_ident_data == elf_header.ELFDATA2LSB
result &= elf_header.e_machine == elf_header.EM_386
return result
def _is_linux_i686(executable: str) -> bool:
with _parse_elf(executable) as f:
return (
f is not None
and f.capacity == EIClass.C32
and f.encoding == EIData.Lsb
and f.machine == EMachine.I386
)
def _have_compatible_abi(arch: str) -> bool:
def _have_compatible_abi(executable: str, arch: str) -> bool:
if arch == "armv7l":
return _is_linux_armhf()
return _is_linux_armhf(executable)
if arch == "i686":
return _is_linux_i686()
return _is_linux_i686(executable)
return arch in {"x86_64", "aarch64", "ppc64", "ppc64le", "s390x"}
@ -141,10 +78,10 @@ def _glibc_version_string_confstr() -> Optional[str]:
# platform module.
# https://github.com/python/cpython/blob/fcf1d003bf4f0100c/Lib/platform.py#L175-L183
try:
# os.confstr("CS_GNU_LIBC_VERSION") returns a string like "glibc 2.17".
version_string = os.confstr("CS_GNU_LIBC_VERSION")
# Should be a string like "glibc 2.17".
version_string: str = getattr(os, "confstr")("CS_GNU_LIBC_VERSION")
assert version_string is not None
_, version = version_string.split()
_, version = version_string.rsplit()
except (AssertionError, AttributeError, OSError, ValueError):
# os.confstr() or CS_GNU_LIBC_VERSION not available (or a bad value)...
return None
@ -211,8 +148,8 @@ def _parse_glibc_version(version_str: str) -> Tuple[int, int]:
m = re.match(r"(?P<major>[0-9]+)\.(?P<minor>[0-9]+)", version_str)
if not m:
warnings.warn(
"Expected glibc version with 2 components major.minor,"
" got: %s" % version_str,
f"Expected glibc version with 2 components major.minor,"
f" got: {version_str}",
RuntimeWarning,
)
return -1, -1
@ -265,7 +202,7 @@ _LEGACY_MANYLINUX_MAP = {
def platform_tags(linux: str, arch: str) -> Iterator[str]:
if not _have_compatible_abi(arch):
if not _have_compatible_abi(sys.executable, arch):
return
# Oldest glibc to be supported regardless of architecture is (2, 17).
too_old_glibc2 = _GLibCVersion(2, 16)

View file

@ -4,68 +4,13 @@ This module implements logic to detect if the currently running Python is
linked against musl, and what musl version is used.
"""
import contextlib
import functools
import operator
import os
import re
import struct
import subprocess
import sys
from typing import IO, Iterator, NamedTuple, Optional, Tuple
from typing import Iterator, NamedTuple, Optional
def _read_unpacked(f: IO[bytes], fmt: str) -> Tuple[int, ...]:
return struct.unpack(fmt, f.read(struct.calcsize(fmt)))
def _parse_ld_musl_from_elf(f: IO[bytes]) -> Optional[str]:
"""Detect musl libc location by parsing the Python executable.
Based on: https://gist.github.com/lyssdod/f51579ae8d93c8657a5564aefc2ffbca
ELF header: https://refspecs.linuxfoundation.org/elf/gabi4+/ch4.eheader.html
"""
f.seek(0)
try:
ident = _read_unpacked(f, "16B")
except struct.error:
return None
if ident[:4] != tuple(b"\x7fELF"): # Invalid magic, not ELF.
return None
f.seek(struct.calcsize("HHI"), 1) # Skip file type, machine, and version.
try:
# e_fmt: Format for program header.
# p_fmt: Format for section header.
# p_idx: Indexes to find p_type, p_offset, and p_filesz.
e_fmt, p_fmt, p_idx = {
1: ("IIIIHHH", "IIIIIIII", (0, 1, 4)), # 32-bit.
2: ("QQQIHHH", "IIQQQQQQ", (0, 2, 5)), # 64-bit.
}[ident[4]]
except KeyError:
return None
else:
p_get = operator.itemgetter(*p_idx)
# Find the interpreter section and return its content.
try:
_, e_phoff, _, _, _, e_phentsize, e_phnum = _read_unpacked(f, e_fmt)
except struct.error:
return None
for i in range(e_phnum + 1):
f.seek(e_phoff + e_phentsize * i)
try:
p_type, p_offset, p_filesz = p_get(_read_unpacked(f, p_fmt))
except struct.error:
return None
if p_type != 3: # Not PT_INTERP.
continue
f.seek(p_offset)
interpreter = os.fsdecode(f.read(p_filesz)).strip("\0")
if "musl" not in interpreter:
return None
return interpreter
return None
from ._elffile import ELFFile
class _MuslVersion(NamedTuple):
@ -95,13 +40,12 @@ def _get_musl_version(executable: str) -> Optional[_MuslVersion]:
Version 1.2.2
Dynamic Program Loader
"""
with contextlib.ExitStack() as stack:
try:
f = stack.enter_context(open(executable, "rb"))
except OSError:
return None
ld = _parse_ld_musl_from_elf(f)
if not ld:
try:
with open(executable, "rb") as f:
ld = ELFFile(f).interpreter
except (OSError, TypeError, ValueError):
return None
if ld is None or "musl" not in ld:
return None
proc = subprocess.run([ld], stderr=subprocess.PIPE, universal_newlines=True)
return _parse_musl_version(proc.stderr)

333
lib/packaging/_parser.py Normal file
View file

@ -0,0 +1,333 @@
"""Handwritten parser of dependency specifiers.
The docstring for each __parse_* function contains ENBF-inspired grammar representing
the implementation.
"""
import ast
from typing import Any, List, NamedTuple, Optional, Tuple, Union
from ._tokenizer import DEFAULT_RULES, Tokenizer
class Node:
def __init__(self, value: str) -> None:
self.value = value
def __str__(self) -> str:
return self.value
def __repr__(self) -> str:
return f"<{self.__class__.__name__}('{self}')>"
def serialize(self) -> str:
raise NotImplementedError
class Variable(Node):
def serialize(self) -> str:
return str(self)
class Value(Node):
def serialize(self) -> str:
return f'"{self}"'
class Op(Node):
def serialize(self) -> str:
return str(self)
MarkerVar = Union[Variable, Value]
MarkerItem = Tuple[MarkerVar, Op, MarkerVar]
# MarkerAtom = Union[MarkerItem, List["MarkerAtom"]]
# MarkerList = List[Union["MarkerList", MarkerAtom, str]]
# mypy does not suport recursive type definition
# https://github.com/python/mypy/issues/731
MarkerAtom = Any
MarkerList = List[Any]
class ParsedRequirement(NamedTuple):
name: str
url: str
extras: List[str]
specifier: str
marker: Optional[MarkerList]
# --------------------------------------------------------------------------------------
# Recursive descent parser for dependency specifier
# --------------------------------------------------------------------------------------
def parse_requirement(source: str) -> ParsedRequirement:
return _parse_requirement(Tokenizer(source, rules=DEFAULT_RULES))
def _parse_requirement(tokenizer: Tokenizer) -> ParsedRequirement:
"""
requirement = WS? IDENTIFIER WS? extras WS? requirement_details
"""
tokenizer.consume("WS")
name_token = tokenizer.expect(
"IDENTIFIER", expected="package name at the start of dependency specifier"
)
name = name_token.text
tokenizer.consume("WS")
extras = _parse_extras(tokenizer)
tokenizer.consume("WS")
url, specifier, marker = _parse_requirement_details(tokenizer)
tokenizer.expect("END", expected="end of dependency specifier")
return ParsedRequirement(name, url, extras, specifier, marker)
def _parse_requirement_details(
tokenizer: Tokenizer,
) -> Tuple[str, str, Optional[MarkerList]]:
"""
requirement_details = AT URL (WS requirement_marker)?
| specifier WS? (requirement_marker)?
"""
specifier = ""
url = ""
marker = None
if tokenizer.check("AT"):
tokenizer.read()
tokenizer.consume("WS")
url_start = tokenizer.position
url = tokenizer.expect("URL", expected="URL after @").text
if tokenizer.check("END", peek=True):
return (url, specifier, marker)
tokenizer.expect("WS", expected="whitespace after URL")
marker = _parse_requirement_marker(
tokenizer, span_start=url_start, after="URL and whitespace"
)
else:
specifier_start = tokenizer.position
specifier = _parse_specifier(tokenizer)
tokenizer.consume("WS")
if tokenizer.check("END", peek=True):
return (url, specifier, marker)
marker = _parse_requirement_marker(
tokenizer,
span_start=specifier_start,
after=(
"version specifier"
if specifier
else "name and no valid version specifier"
),
)
return (url, specifier, marker)
def _parse_requirement_marker(
tokenizer: Tokenizer, *, span_start: int, after: str
) -> MarkerList:
"""
requirement_marker = SEMICOLON marker WS?
"""
if not tokenizer.check("SEMICOLON"):
tokenizer.raise_syntax_error(
f"Expected end or semicolon (after {after})",
span_start=span_start,
)
else:
tokenizer.read()
marker = _parse_marker(tokenizer)
tokenizer.consume("WS")
return marker
def _parse_extras(tokenizer: Tokenizer) -> List[str]:
"""
extras = (LEFT_BRACKET wsp* extras_list? wsp* RIGHT_BRACKET)?
"""
if not tokenizer.check("LEFT_BRACKET", peek=True):
return []
with tokenizer.enclosing_tokens("LEFT_BRACKET", "RIGHT_BRACKET"):
tokenizer.consume("WS")
extras = _parse_extras_list(tokenizer)
tokenizer.consume("WS")
return extras
def _parse_extras_list(tokenizer: Tokenizer) -> List[str]:
"""
extras_list = identifier (wsp* ',' wsp* identifier)*
"""
extras: List[str] = []
if not tokenizer.check("IDENTIFIER"):
return extras
extras.append(tokenizer.read().text)
while True:
tokenizer.consume("WS")
if tokenizer.check("IDENTIFIER", peek=True):
tokenizer.raise_syntax_error("Expected comma between extra names")
elif not tokenizer.check("COMMA"):
break
tokenizer.read()
tokenizer.consume("WS")
extra_token = tokenizer.expect("IDENTIFIER", expected="extra name after comma")
extras.append(extra_token.text)
return extras
def _parse_specifier(tokenizer: Tokenizer) -> str:
"""
specifier = LEFT_PARENTHESIS WS? version_many WS? RIGHT_PARENTHESIS
| WS? version_many WS?
"""
with tokenizer.enclosing_tokens("LEFT_PARENTHESIS", "RIGHT_PARENTHESIS"):
tokenizer.consume("WS")
parsed_specifiers = _parse_version_many(tokenizer)
tokenizer.consume("WS")
return parsed_specifiers
def _parse_version_many(tokenizer: Tokenizer) -> str:
"""
version_many = (OP VERSION (COMMA OP VERSION)*)?
"""
parsed_specifiers = ""
while tokenizer.check("OP"):
parsed_specifiers += tokenizer.read().text
# We intentionally do not consume whitespace here, since the regular expression
# for `VERSION` uses a lookback for the operator, to determine what
# corresponding syntax is permitted.
version_token = tokenizer.expect("VERSION", expected="version after operator")
parsed_specifiers += version_token.text
tokenizer.consume("WS")
if not tokenizer.check("COMMA"):
break
parsed_specifiers += tokenizer.read().text
tokenizer.consume("WS")
return parsed_specifiers
# --------------------------------------------------------------------------------------
# Recursive descent parser for marker expression
# --------------------------------------------------------------------------------------
def parse_marker(source: str) -> MarkerList:
return _parse_marker(Tokenizer(source, rules=DEFAULT_RULES))
def _parse_marker(tokenizer: Tokenizer) -> MarkerList:
"""
marker = marker_atom (BOOLOP marker_atom)+
"""
expression = [_parse_marker_atom(tokenizer)]
while tokenizer.check("BOOLOP"):
token = tokenizer.read()
expr_right = _parse_marker_atom(tokenizer)
expression.extend((token.text, expr_right))
return expression
def _parse_marker_atom(tokenizer: Tokenizer) -> MarkerAtom:
"""
marker_atom = WS? LEFT_PARENTHESIS WS? marker WS? RIGHT_PARENTHESIS WS?
| WS? marker_item WS?
"""
tokenizer.consume("WS")
if tokenizer.check("LEFT_PARENTHESIS", peek=True):
with tokenizer.enclosing_tokens("LEFT_PARENTHESIS", "RIGHT_PARENTHESIS"):
tokenizer.consume("WS")
marker: MarkerAtom = _parse_marker(tokenizer)
tokenizer.consume("WS")
else:
marker = _parse_marker_item(tokenizer)
tokenizer.consume("WS")
return marker
def _parse_marker_item(tokenizer: Tokenizer) -> MarkerItem:
"""
marker_item = WS? marker_var WS? marker_op WS? marker_var WS?
"""
tokenizer.consume("WS")
marker_var_left = _parse_marker_var(tokenizer)
tokenizer.consume("WS")
marker_op = _parse_marker_op(tokenizer)
tokenizer.consume("WS")
marker_var_right = _parse_marker_var(tokenizer)
tokenizer.consume("WS")
return (marker_var_left, marker_op, marker_var_right)
def _parse_marker_var(tokenizer: Tokenizer) -> MarkerVar:
"""
marker_var = VARIABLE | QUOTED_STRING
"""
if tokenizer.check("VARIABLE"):
return process_env_var(tokenizer.read().text.replace(".", "_"))
elif tokenizer.check("QUOTED_STRING"):
return process_python_str(tokenizer.read().text)
else:
tokenizer.raise_syntax_error(
message="Expected a marker variable or quoted string"
)
def process_env_var(env_var: str) -> Variable:
if (
env_var == "platform_python_implementation"
or env_var == "python_implementation"
):
return Variable("platform_python_implementation")
else:
return Variable(env_var)
def process_python_str(python_str: str) -> Value:
value = ast.literal_eval(python_str)
return Value(str(value))
def _parse_marker_op(tokenizer: Tokenizer) -> Op:
"""
marker_op = IN | NOT IN | OP
"""
if tokenizer.check("IN"):
tokenizer.read()
return Op("in")
elif tokenizer.check("NOT"):
tokenizer.read()
tokenizer.expect("WS", expected="whitespace after 'not'")
tokenizer.expect("IN", expected="'in' after 'not'")
return Op("not in")
elif tokenizer.check("OP"):
return Op(tokenizer.read().text)
else:
return tokenizer.raise_syntax_error(
"Expected marker operator, one of "
"<=, <, !=, ==, >=, >, ~=, ===, in, not in"
)

186
lib/packaging/_tokenizer.py Normal file
View file

@ -0,0 +1,186 @@
import contextlib
import re
from dataclasses import dataclass
from typing import Dict, Iterator, NoReturn, Optional, Tuple, Union
from .specifiers import Specifier
@dataclass
class Token:
name: str
text: str
position: int
class ParserSyntaxError(Exception):
"""The provided source text could not be parsed correctly."""
def __init__(
self,
message: str,
*,
source: str,
span: Tuple[int, int],
) -> None:
self.span = span
self.message = message
self.source = source
super().__init__()
def __str__(self) -> str:
marker = " " * self.span[0] + "~" * (self.span[1] - self.span[0]) + "^"
return "\n ".join([self.message, self.source, marker])
DEFAULT_RULES: "Dict[str, Union[str, re.Pattern[str]]]" = {
"LPAREN": r"\s*\(",
"LEFT_PARENTHESIS": r"\(",
"RIGHT_PARENTHESIS": r"\)",
"LEFT_BRACKET": r"\[",
"RIGHT_BRACKET": r"\]",
"SEMICOLON": r";",
"COMMA": r",",
"QUOTED_STRING": re.compile(
r"""
(
('[^']*')
|
("[^"]*")
)
""",
re.VERBOSE,
),
"OP": r"(===|==|~=|!=|<=|>=|<|>)",
"BOOLOP": r"\b(or|and)\b",
"IN": r"\bin\b",
"NOT": r"\bnot\b",
"VARIABLE": re.compile(
r"""
\b(
python_version
|python_full_version
|os[._]name
|sys[._]platform
|platform_(release|system)
|platform[._](version|machine|python_implementation)
|python_implementation
|implementation_(name|version)
|extra
)\b
""",
re.VERBOSE,
),
"VERSION": re.compile(Specifier._version_regex_str, re.VERBOSE | re.IGNORECASE),
"AT": r"\@",
"URL": r"[^ \t]+",
"IDENTIFIER": r"\b[a-zA-Z0-9][a-zA-Z0-9._-]*\b",
"WS": r"[ \t]+",
"END": r"$",
}
class Tokenizer:
"""Context-sensitive token parsing.
Provides methods to examine the input stream to check whether the next token
matches.
"""
def __init__(
self,
source: str,
*,
rules: "Dict[str, Union[str, re.Pattern[str]]]",
) -> None:
self.source = source
self.rules: Dict[str, re.Pattern[str]] = {
name: re.compile(pattern) for name, pattern in rules.items()
}
self.next_token: Optional[Token] = None
self.position = 0
def consume(self, name: str) -> None:
"""Move beyond provided token name, if at current position."""
if self.check(name):
self.read()
def check(self, name: str, *, peek: bool = False) -> bool:
"""Check whether the next token has the provided name.
By default, if the check succeeds, the token *must* be read before
another check. If `peek` is set to `True`, the token is not loaded and
would need to be checked again.
"""
assert (
self.next_token is None
), f"Cannot check for {name!r}, already have {self.next_token!r}"
assert name in self.rules, f"Unknown token name: {name!r}"
expression = self.rules[name]
match = expression.match(self.source, self.position)
if match is None:
return False
if not peek:
self.next_token = Token(name, match[0], self.position)
return True
def expect(self, name: str, *, expected: str) -> Token:
"""Expect a certain token name next, failing with a syntax error otherwise.
The token is *not* read.
"""
if not self.check(name):
raise self.raise_syntax_error(f"Expected {expected}")
return self.read()
def read(self) -> Token:
"""Consume the next token and return it."""
token = self.next_token
assert token is not None
self.position += len(token.text)
self.next_token = None
return token
def raise_syntax_error(
self,
message: str,
*,
span_start: Optional[int] = None,
span_end: Optional[int] = None,
) -> NoReturn:
"""Raise ParserSyntaxError at the given position."""
span = (
self.position if span_start is None else span_start,
self.position if span_end is None else span_end,
)
raise ParserSyntaxError(
message,
source=self.source,
span=span,
)
@contextlib.contextmanager
def enclosing_tokens(self, open_token: str, close_token: str) -> Iterator[bool]:
if self.check(open_token):
open_position = self.position
self.read()
else:
open_position = None
yield open_position is not None
if open_position is None:
return
if not self.check(close_token):
self.raise_syntax_error(
f"Expected closing {close_token}",
span_start=open_position,
)
self.read()

View file

@ -8,19 +8,10 @@ import platform
import sys
from typing import Any, Callable, Dict, List, Optional, Tuple, Union
from pyparsing import ( # noqa: N817
Forward,
Group,
Literal as L,
ParseException,
ParseResults,
QuotedString,
ZeroOrMore,
stringEnd,
stringStart,
)
from ._parser import MarkerAtom, MarkerList, Op, Value, Variable, parse_marker
from ._tokenizer import ParserSyntaxError
from .specifiers import InvalidSpecifier, Specifier
from .utils import canonicalize_name
__all__ = [
"InvalidMarker",
@ -52,101 +43,24 @@ class UndefinedEnvironmentName(ValueError):
"""
class Node:
def __init__(self, value: Any) -> None:
self.value = value
def __str__(self) -> str:
return str(self.value)
def __repr__(self) -> str:
return f"<{self.__class__.__name__}('{self}')>"
def serialize(self) -> str:
raise NotImplementedError
class Variable(Node):
def serialize(self) -> str:
return str(self)
class Value(Node):
def serialize(self) -> str:
return f'"{self}"'
class Op(Node):
def serialize(self) -> str:
return str(self)
VARIABLE = (
L("implementation_version")
| L("platform_python_implementation")
| L("implementation_name")
| L("python_full_version")
| L("platform_release")
| L("platform_version")
| L("platform_machine")
| L("platform_system")
| L("python_version")
| L("sys_platform")
| L("os_name")
| L("os.name") # PEP-345
| L("sys.platform") # PEP-345
| L("platform.version") # PEP-345
| L("platform.machine") # PEP-345
| L("platform.python_implementation") # PEP-345
| L("python_implementation") # undocumented setuptools legacy
| L("extra") # PEP-508
)
ALIASES = {
"os.name": "os_name",
"sys.platform": "sys_platform",
"platform.version": "platform_version",
"platform.machine": "platform_machine",
"platform.python_implementation": "platform_python_implementation",
"python_implementation": "platform_python_implementation",
}
VARIABLE.setParseAction(lambda s, l, t: Variable(ALIASES.get(t[0], t[0])))
VERSION_CMP = (
L("===") | L("==") | L(">=") | L("<=") | L("!=") | L("~=") | L(">") | L("<")
)
MARKER_OP = VERSION_CMP | L("not in") | L("in")
MARKER_OP.setParseAction(lambda s, l, t: Op(t[0]))
MARKER_VALUE = QuotedString("'") | QuotedString('"')
MARKER_VALUE.setParseAction(lambda s, l, t: Value(t[0]))
BOOLOP = L("and") | L("or")
MARKER_VAR = VARIABLE | MARKER_VALUE
MARKER_ITEM = Group(MARKER_VAR + MARKER_OP + MARKER_VAR)
MARKER_ITEM.setParseAction(lambda s, l, t: tuple(t[0]))
LPAREN = L("(").suppress()
RPAREN = L(")").suppress()
MARKER_EXPR = Forward()
MARKER_ATOM = MARKER_ITEM | Group(LPAREN + MARKER_EXPR + RPAREN)
MARKER_EXPR << MARKER_ATOM + ZeroOrMore(BOOLOP + MARKER_EXPR)
MARKER = stringStart + MARKER_EXPR + stringEnd
def _coerce_parse_result(results: Union[ParseResults, List[Any]]) -> List[Any]:
if isinstance(results, ParseResults):
return [_coerce_parse_result(i) for i in results]
else:
return results
def _normalize_extra_values(results: Any) -> Any:
"""
Normalize extra values.
"""
if isinstance(results[0], tuple):
lhs, op, rhs = results[0]
if isinstance(lhs, Variable) and lhs.value == "extra":
normalized_extra = canonicalize_name(rhs.value)
rhs = Value(normalized_extra)
elif isinstance(rhs, Variable) and rhs.value == "extra":
normalized_extra = canonicalize_name(lhs.value)
lhs = Value(normalized_extra)
results[0] = lhs, op, rhs
return results
def _format_marker(
marker: Union[List[str], Tuple[Node, ...], str], first: Optional[bool] = True
marker: Union[List[str], MarkerAtom, str], first: Optional[bool] = True
) -> str:
assert isinstance(marker, (list, tuple, str))
@ -192,7 +106,7 @@ def _eval_op(lhs: str, op: Op, rhs: str) -> bool:
except InvalidSpecifier:
pass
else:
return spec.contains(lhs)
return spec.contains(lhs, prereleases=True)
oper: Optional[Operator] = _operators.get(op.serialize())
if oper is None:
@ -201,25 +115,19 @@ def _eval_op(lhs: str, op: Op, rhs: str) -> bool:
return oper(lhs, rhs)
class Undefined:
pass
def _normalize(*values: str, key: str) -> Tuple[str, ...]:
# PEP 685 Comparison of extra names for optional distribution dependencies
# https://peps.python.org/pep-0685/
# > When comparing extra names, tools MUST normalize the names being
# > compared using the semantics outlined in PEP 503 for names
if key == "extra":
return tuple(canonicalize_name(v) for v in values)
# other environment markers don't have such standards
return values
_undefined = Undefined()
def _get_env(environment: Dict[str, str], name: str) -> str:
value: Union[str, Undefined] = environment.get(name, _undefined)
if isinstance(value, Undefined):
raise UndefinedEnvironmentName(
f"{name!r} does not exist in evaluation environment."
)
return value
def _evaluate_markers(markers: List[Any], environment: Dict[str, str]) -> bool:
def _evaluate_markers(markers: MarkerList, environment: Dict[str, str]) -> bool:
groups: List[List[bool]] = [[]]
for marker in markers:
@ -231,12 +139,15 @@ def _evaluate_markers(markers: List[Any], environment: Dict[str, str]) -> bool:
lhs, op, rhs = marker
if isinstance(lhs, Variable):
lhs_value = _get_env(environment, lhs.value)
environment_key = lhs.value
lhs_value = environment[environment_key]
rhs_value = rhs.value
else:
lhs_value = lhs.value
rhs_value = _get_env(environment, rhs.value)
environment_key = rhs.value
rhs_value = environment[environment_key]
lhs_value, rhs_value = _normalize(lhs_value, rhs_value, key=environment_key)
groups[-1].append(_eval_op(lhs_value, op, rhs_value))
else:
assert marker in ["and", "or"]
@ -274,13 +185,29 @@ def default_environment() -> Dict[str, str]:
class Marker:
def __init__(self, marker: str) -> None:
# Note: We create a Marker object without calling this constructor in
# packaging.requirements.Requirement. If any additional logic is
# added here, make sure to mirror/adapt Requirement.
try:
self._markers = _coerce_parse_result(MARKER.parseString(marker))
except ParseException as e:
raise InvalidMarker(
f"Invalid marker: {marker!r}, parse error at "
f"{marker[e.loc : e.loc + 8]!r}"
)
self._markers = _normalize_extra_values(parse_marker(marker))
# The attribute `_markers` can be described in terms of a recursive type:
# MarkerList = List[Union[Tuple[Node, ...], str, MarkerList]]
#
# For example, the following expression:
# python_version > "3.6" or (python_version == "3.6" and os_name == "unix")
#
# is parsed into:
# [
# (<Variable('python_version')>, <Op('>')>, <Value('3.6')>),
# 'and',
# [
# (<Variable('python_version')>, <Op('==')>, <Value('3.6')>),
# 'or',
# (<Variable('os_name')>, <Op('==')>, <Value('unix')>)
# ]
# ]
except ParserSyntaxError as e:
raise InvalidMarker(str(e)) from e
def __str__(self) -> str:
return _format_marker(self._markers)
@ -288,6 +215,15 @@ class Marker:
def __repr__(self) -> str:
return f"<Marker('{self}')>"
def __hash__(self) -> int:
return hash((self.__class__.__name__, str(self)))
def __eq__(self, other: Any) -> bool:
if not isinstance(other, Marker):
return NotImplemented
return str(self) == str(other)
def evaluate(self, environment: Optional[Dict[str, str]] = None) -> bool:
"""Evaluate a marker.
@ -298,6 +234,7 @@ class Marker:
The environment is determined from the current Python process.
"""
current_environment = default_environment()
current_environment["extra"] = ""
if environment is not None:
current_environment.update(environment)

View file

@ -2,26 +2,13 @@
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
# for complete details.
import re
import string
import urllib.parse
from typing import List, Optional as TOptional, Set
from typing import Any, List, Optional, Set
from pyparsing import ( # noqa
Combine,
Literal as L,
Optional,
ParseException,
Regex,
Word,
ZeroOrMore,
originalTextFor,
stringEnd,
stringStart,
)
from .markers import MARKER_EXPR, Marker
from .specifiers import LegacySpecifier, Specifier, SpecifierSet
from ._parser import parse_requirement
from ._tokenizer import ParserSyntaxError
from .markers import Marker, _normalize_extra_values
from .specifiers import SpecifierSet
class InvalidRequirement(ValueError):
@ -30,60 +17,6 @@ class InvalidRequirement(ValueError):
"""
ALPHANUM = Word(string.ascii_letters + string.digits)
LBRACKET = L("[").suppress()
RBRACKET = L("]").suppress()
LPAREN = L("(").suppress()
RPAREN = L(")").suppress()
COMMA = L(",").suppress()
SEMICOLON = L(";").suppress()
AT = L("@").suppress()
PUNCTUATION = Word("-_.")
IDENTIFIER_END = ALPHANUM | (ZeroOrMore(PUNCTUATION) + ALPHANUM)
IDENTIFIER = Combine(ALPHANUM + ZeroOrMore(IDENTIFIER_END))
NAME = IDENTIFIER("name")
EXTRA = IDENTIFIER
URI = Regex(r"[^ ]+")("url")
URL = AT + URI
EXTRAS_LIST = EXTRA + ZeroOrMore(COMMA + EXTRA)
EXTRAS = (LBRACKET + Optional(EXTRAS_LIST) + RBRACKET)("extras")
VERSION_PEP440 = Regex(Specifier._regex_str, re.VERBOSE | re.IGNORECASE)
VERSION_LEGACY = Regex(LegacySpecifier._regex_str, re.VERBOSE | re.IGNORECASE)
VERSION_ONE = VERSION_PEP440 ^ VERSION_LEGACY
VERSION_MANY = Combine(
VERSION_ONE + ZeroOrMore(COMMA + VERSION_ONE), joinString=",", adjacent=False
)("_raw_spec")
_VERSION_SPEC = Optional((LPAREN + VERSION_MANY + RPAREN) | VERSION_MANY)
_VERSION_SPEC.setParseAction(lambda s, l, t: t._raw_spec or "")
VERSION_SPEC = originalTextFor(_VERSION_SPEC)("specifier")
VERSION_SPEC.setParseAction(lambda s, l, t: t[1])
MARKER_EXPR = originalTextFor(MARKER_EXPR())("marker")
MARKER_EXPR.setParseAction(
lambda s, l, t: Marker(s[t._original_start : t._original_end])
)
MARKER_SEPARATOR = SEMICOLON
MARKER = MARKER_SEPARATOR + MARKER_EXPR
VERSION_AND_MARKER = VERSION_SPEC + Optional(MARKER)
URL_AND_MARKER = URL + Optional(MARKER)
NAMED_REQUIREMENT = NAME + Optional(EXTRAS) + (URL_AND_MARKER | VERSION_AND_MARKER)
REQUIREMENT = stringStart + NAMED_REQUIREMENT + stringEnd
# pyparsing isn't thread safe during initialization, so we do it eagerly, see
# issue #104
REQUIREMENT.parseString("x[]")
class Requirement:
"""Parse a requirement.
@ -99,28 +32,29 @@ class Requirement:
def __init__(self, requirement_string: str) -> None:
try:
req = REQUIREMENT.parseString(requirement_string)
except ParseException as e:
raise InvalidRequirement(
f'Parse error at "{ requirement_string[e.loc : e.loc + 8]!r}": {e.msg}'
)
parsed = parse_requirement(requirement_string)
except ParserSyntaxError as e:
raise InvalidRequirement(str(e)) from e
self.name: str = req.name
if req.url:
parsed_url = urllib.parse.urlparse(req.url)
self.name: str = parsed.name
if parsed.url:
parsed_url = urllib.parse.urlparse(parsed.url)
if parsed_url.scheme == "file":
if urllib.parse.urlunparse(parsed_url) != req.url:
if urllib.parse.urlunparse(parsed_url) != parsed.url:
raise InvalidRequirement("Invalid URL given")
elif not (parsed_url.scheme and parsed_url.netloc) or (
not parsed_url.scheme and not parsed_url.netloc
):
raise InvalidRequirement(f"Invalid URL: {req.url}")
self.url: TOptional[str] = req.url
raise InvalidRequirement(f"Invalid URL: {parsed.url}")
self.url: Optional[str] = parsed.url
else:
self.url = None
self.extras: Set[str] = set(req.extras.asList() if req.extras else [])
self.specifier: SpecifierSet = SpecifierSet(req.specifier)
self.marker: TOptional[Marker] = req.marker if req.marker else None
self.extras: Set[str] = set(parsed.extras if parsed.extras else [])
self.specifier: SpecifierSet = SpecifierSet(parsed.specifier)
self.marker: Optional[Marker] = None
if parsed.marker is not None:
self.marker = Marker.__new__(Marker)
self.marker._markers = _normalize_extra_values(parsed.marker)
def __str__(self) -> str:
parts: List[str] = [self.name]
@ -144,3 +78,18 @@ class Requirement:
def __repr__(self) -> str:
return f"<Requirement('{self}')>"
def __hash__(self) -> int:
return hash((self.__class__.__name__, str(self)))
def __eq__(self, other: Any) -> bool:
if not isinstance(other, Requirement):
return NotImplemented
return (
self.name == other.name
and self.extras == other.extras
and self.specifier == other.specifier
and self.url == other.url
and self.marker == other.marker
)

File diff suppressed because it is too large Load diff

View file

@ -4,6 +4,7 @@
import logging
import platform
import subprocess
import sys
import sysconfig
from importlib.machinery import EXTENSION_SUFFIXES
@ -36,7 +37,7 @@ INTERPRETER_SHORT_NAMES: Dict[str, str] = {
}
_32_BIT_INTERPRETER = sys.maxsize <= 2 ** 32
_32_BIT_INTERPRETER = sys.maxsize <= 2**32
class Tag:
@ -356,6 +357,22 @@ def mac_platforms(
version_str, _, cpu_arch = platform.mac_ver()
if version is None:
version = cast("MacVersion", tuple(map(int, version_str.split(".")[:2])))
if version == (10, 16):
# When built against an older macOS SDK, Python will report macOS 10.16
# instead of the real version.
version_str = subprocess.run(
[
sys.executable,
"-sS",
"-c",
"import platform; print(platform.mac_ver()[0])",
],
check=True,
env={"SYSTEM_VERSION_COMPAT": "0"},
stdout=subprocess.PIPE,
universal_newlines=True,
).stdout
version = cast("MacVersion", tuple(map(int, version_str.split(".")[:2])))
else:
version = version
if arch is None:
@ -482,6 +499,9 @@ def sys_tags(*, warn: bool = False) -> Iterator[Tag]:
yield from generic_tags()
if interp_name == "pp":
yield from compatible_tags(interpreter="pp3")
interp = "pp3"
elif interp_name == "cp":
interp = "cp" + interpreter_version(warn=warn)
else:
yield from compatible_tags()
interp = None
yield from compatible_tags(interpreter=interp)

View file

@ -35,7 +35,9 @@ def canonicalize_name(name: str) -> NormalizedName:
return cast(NormalizedName, value)
def canonicalize_version(version: Union[Version, str]) -> str:
def canonicalize_version(
version: Union[Version, str], *, strip_trailing_zero: bool = True
) -> str:
"""
This is very similar to Version.__str__, but has one subtle difference
with the way it handles the release segment.
@ -56,8 +58,11 @@ def canonicalize_version(version: Union[Version, str]) -> str:
parts.append(f"{parsed.epoch}!")
# Release segment
# NB: This strips trailing '.0's to normalize
parts.append(re.sub(r"(\.0)+$", "", ".".join(str(x) for x in parsed.release)))
release_segment = ".".join(str(x) for x in parsed.release)
if strip_trailing_zero:
# NB: This strips trailing '.0's to normalize
release_segment = re.sub(r"(\.0)+$", "", release_segment)
parts.append(release_segment)
# Pre-release
if parsed.pre is not None:

View file

@ -1,16 +1,20 @@
# This file is dual licensed under the terms of the Apache License, Version
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
# for complete details.
"""
.. testsetup::
from packaging.version import parse, Version
"""
import collections
import itertools
import re
import warnings
from typing import Callable, Iterator, List, Optional, SupportsInt, Tuple, Union
from typing import Callable, Optional, SupportsInt, Tuple, Union
from ._structures import Infinity, InfinityType, NegativeInfinity, NegativeInfinityType
__all__ = ["parse", "Version", "LegacyVersion", "InvalidVersion", "VERSION_PATTERN"]
__all__ = ["VERSION_PATTERN", "parse", "Version", "InvalidVersion"]
InfiniteTypes = Union[InfinityType, NegativeInfinityType]
PrePostDevType = Union[InfiniteTypes, Tuple[str, int]]
@ -29,36 +33,37 @@ LocalType = Union[
CmpKey = Tuple[
int, Tuple[int, ...], PrePostDevType, PrePostDevType, PrePostDevType, LocalType
]
LegacyCmpKey = Tuple[int, Tuple[str, ...]]
VersionComparisonMethod = Callable[
[Union[CmpKey, LegacyCmpKey], Union[CmpKey, LegacyCmpKey]], bool
]
VersionComparisonMethod = Callable[[CmpKey, CmpKey], bool]
_Version = collections.namedtuple(
"_Version", ["epoch", "release", "dev", "pre", "post", "local"]
)
def parse(version: str) -> Union["LegacyVersion", "Version"]:
def parse(version: str) -> "Version":
"""Parse the given version string.
>>> parse('1.0.dev1')
<Version('1.0.dev1')>
:param version: The version string to parse.
:raises InvalidVersion: When the version string is not a valid version.
"""
Parse the given version string and return either a :class:`Version` object
or a :class:`LegacyVersion` object depending on if the given version is
a valid PEP 440 version or a legacy version.
"""
try:
return Version(version)
except InvalidVersion:
return LegacyVersion(version)
return Version(version)
class InvalidVersion(ValueError):
"""
An invalid version was found, users should refer to PEP 440.
"""Raised when a version string is not a valid version.
>>> Version("invalid")
Traceback (most recent call last):
...
packaging.version.InvalidVersion: Invalid version: 'invalid'
"""
class _BaseVersion:
_key: Union[CmpKey, LegacyCmpKey]
_key: CmpKey
def __hash__(self) -> int:
return hash(self._key)
@ -103,126 +108,9 @@ class _BaseVersion:
return self._key != other._key
class LegacyVersion(_BaseVersion):
def __init__(self, version: str) -> None:
self._version = str(version)
self._key = _legacy_cmpkey(self._version)
warnings.warn(
"Creating a LegacyVersion has been deprecated and will be "
"removed in the next major release",
DeprecationWarning,
)
def __str__(self) -> str:
return self._version
def __repr__(self) -> str:
return f"<LegacyVersion('{self}')>"
@property
def public(self) -> str:
return self._version
@property
def base_version(self) -> str:
return self._version
@property
def epoch(self) -> int:
return -1
@property
def release(self) -> None:
return None
@property
def pre(self) -> None:
return None
@property
def post(self) -> None:
return None
@property
def dev(self) -> None:
return None
@property
def local(self) -> None:
return None
@property
def is_prerelease(self) -> bool:
return False
@property
def is_postrelease(self) -> bool:
return False
@property
def is_devrelease(self) -> bool:
return False
_legacy_version_component_re = re.compile(r"(\d+ | [a-z]+ | \.| -)", re.VERBOSE)
_legacy_version_replacement_map = {
"pre": "c",
"preview": "c",
"-": "final-",
"rc": "c",
"dev": "@",
}
def _parse_version_parts(s: str) -> Iterator[str]:
for part in _legacy_version_component_re.split(s):
part = _legacy_version_replacement_map.get(part, part)
if not part or part == ".":
continue
if part[:1] in "0123456789":
# pad for numeric comparison
yield part.zfill(8)
else:
yield "*" + part
# ensure that alpha/beta/candidate are before final
yield "*final"
def _legacy_cmpkey(version: str) -> LegacyCmpKey:
# We hardcode an epoch of -1 here. A PEP 440 version can only have a epoch
# greater than or equal to 0. This will effectively put the LegacyVersion,
# which uses the defacto standard originally implemented by setuptools,
# as before all PEP 440 versions.
epoch = -1
# This scheme is taken from pkg_resources.parse_version setuptools prior to
# it's adoption of the packaging library.
parts: List[str] = []
for part in _parse_version_parts(version.lower()):
if part.startswith("*"):
# remove "-" before a prerelease tag
if part < "*final":
while parts and parts[-1] == "*final-":
parts.pop()
# remove trailing zeros from each series of numeric parts
while parts and parts[-1] == "00000000":
parts.pop()
parts.append(part)
return epoch, tuple(parts)
# Deliberately not anchored to the start and end of the string, to make it
# easier for 3rd party code to reuse
VERSION_PATTERN = r"""
_VERSION_PATTERN = r"""
v?
(?:
(?:(?P<epoch>[0-9]+)!)? # epoch
@ -253,12 +141,55 @@ VERSION_PATTERN = r"""
(?:\+(?P<local>[a-z0-9]+(?:[-_\.][a-z0-9]+)*))? # local version
"""
VERSION_PATTERN = _VERSION_PATTERN
"""
A string containing the regular expression used to match a valid version.
The pattern is not anchored at either end, and is intended for embedding in larger
expressions (for example, matching a version number as part of a file name). The
regular expression should be compiled with the ``re.VERBOSE`` and ``re.IGNORECASE``
flags set.
:meta hide-value:
"""
class Version(_BaseVersion):
"""This class abstracts handling of a project's versions.
A :class:`Version` instance is comparison aware and can be compared and
sorted using the standard Python interfaces.
>>> v1 = Version("1.0a5")
>>> v2 = Version("1.0")
>>> v1
<Version('1.0a5')>
>>> v2
<Version('1.0')>
>>> v1 < v2
True
>>> v1 == v2
False
>>> v1 > v2
False
>>> v1 >= v2
False
>>> v1 <= v2
True
"""
_regex = re.compile(r"^\s*" + VERSION_PATTERN + r"\s*$", re.VERBOSE | re.IGNORECASE)
def __init__(self, version: str) -> None:
"""Initialize a Version object.
:param version:
The string representation of a version which will be parsed and normalized
before use.
:raises InvalidVersion:
If the ``version`` does not conform to PEP 440 in any way then this
exception will be raised.
"""
# Validate the version and parse it into pieces
match = self._regex.search(version)
@ -288,9 +219,19 @@ class Version(_BaseVersion):
)
def __repr__(self) -> str:
"""A representation of the Version that shows all internal state.
>>> Version('1.0.0')
<Version('1.0.0')>
"""
return f"<Version('{self}')>"
def __str__(self) -> str:
"""A string representation of the version that can be rounded-tripped.
>>> str(Version("1.0a5"))
'1.0a5'
"""
parts = []
# Epoch
@ -320,29 +261,80 @@ class Version(_BaseVersion):
@property
def epoch(self) -> int:
"""The epoch of the version.
>>> Version("2.0.0").epoch
0
>>> Version("1!2.0.0").epoch
1
"""
_epoch: int = self._version.epoch
return _epoch
@property
def release(self) -> Tuple[int, ...]:
"""The components of the "release" segment of the version.
>>> Version("1.2.3").release
(1, 2, 3)
>>> Version("2.0.0").release
(2, 0, 0)
>>> Version("1!2.0.0.post0").release
(2, 0, 0)
Includes trailing zeroes but not the epoch or any pre-release / development /
post-release suffixes.
"""
_release: Tuple[int, ...] = self._version.release
return _release
@property
def pre(self) -> Optional[Tuple[str, int]]:
"""The pre-release segment of the version.
>>> print(Version("1.2.3").pre)
None
>>> Version("1.2.3a1").pre
('a', 1)
>>> Version("1.2.3b1").pre
('b', 1)
>>> Version("1.2.3rc1").pre
('rc', 1)
"""
_pre: Optional[Tuple[str, int]] = self._version.pre
return _pre
@property
def post(self) -> Optional[int]:
"""The post-release number of the version.
>>> print(Version("1.2.3").post)
None
>>> Version("1.2.3.post1").post
1
"""
return self._version.post[1] if self._version.post else None
@property
def dev(self) -> Optional[int]:
"""The development number of the version.
>>> print(Version("1.2.3").dev)
None
>>> Version("1.2.3.dev1").dev
1
"""
return self._version.dev[1] if self._version.dev else None
@property
def local(self) -> Optional[str]:
"""The local version segment of the version.
>>> print(Version("1.2.3").local)
None
>>> Version("1.2.3+abc").local
'abc'
"""
if self._version.local:
return ".".join(str(x) for x in self._version.local)
else:
@ -350,10 +342,31 @@ class Version(_BaseVersion):
@property
def public(self) -> str:
"""The public portion of the version.
>>> Version("1.2.3").public
'1.2.3'
>>> Version("1.2.3+abc").public
'1.2.3'
>>> Version("1.2.3+abc.dev1").public
'1.2.3'
"""
return str(self).split("+", 1)[0]
@property
def base_version(self) -> str:
"""The "base version" of the version.
>>> Version("1.2.3").base_version
'1.2.3'
>>> Version("1.2.3+abc").base_version
'1.2.3'
>>> Version("1!1.2.3+abc.dev1").base_version
'1!1.2.3'
The "base version" is the public version of the project without any pre or post
release markers.
"""
parts = []
# Epoch
@ -367,26 +380,72 @@ class Version(_BaseVersion):
@property
def is_prerelease(self) -> bool:
"""Whether this version is a pre-release.
>>> Version("1.2.3").is_prerelease
False
>>> Version("1.2.3a1").is_prerelease
True
>>> Version("1.2.3b1").is_prerelease
True
>>> Version("1.2.3rc1").is_prerelease
True
>>> Version("1.2.3dev1").is_prerelease
True
"""
return self.dev is not None or self.pre is not None
@property
def is_postrelease(self) -> bool:
"""Whether this version is a post-release.
>>> Version("1.2.3").is_postrelease
False
>>> Version("1.2.3.post1").is_postrelease
True
"""
return self.post is not None
@property
def is_devrelease(self) -> bool:
"""Whether this version is a development release.
>>> Version("1.2.3").is_devrelease
False
>>> Version("1.2.3.dev1").is_devrelease
True
"""
return self.dev is not None
@property
def major(self) -> int:
"""The first item of :attr:`release` or ``0`` if unavailable.
>>> Version("1.2.3").major
1
"""
return self.release[0] if len(self.release) >= 1 else 0
@property
def minor(self) -> int:
"""The second item of :attr:`release` or ``0`` if unavailable.
>>> Version("1.2.3").minor
2
>>> Version("1").minor
0
"""
return self.release[1] if len(self.release) >= 2 else 0
@property
def micro(self) -> int:
"""The third item of :attr:`release` or ``0`` if unavailable.
>>> Version("1.2.3").micro
3
>>> Version("1").micro
0
"""
return self.release[2] if len(self.release) >= 3 else 0

View file

@ -6,6 +6,10 @@ import re
import numbers
import functools
import contextlib
from numbers import Number
from typing import Union, Tuple, Iterable
from typing import cast
from jaraco.functools import once
@ -33,7 +37,7 @@ hours_per_month = hours_per_day * days_per_year / 12
@once
def _needs_year_help():
def _needs_year_help() -> bool:
"""
Some versions of Python render %Y with only three characters :(
https://bugs.python.org/issue39103
@ -41,14 +45,19 @@ def _needs_year_help():
return len(datetime.date(900, 1, 1).strftime('%Y')) != 4
def ensure_datetime(ob):
AnyDatetime = Union[datetime.datetime, datetime.date, datetime.time]
StructDatetime = Union[Tuple[int, ...], time.struct_time]
def ensure_datetime(ob: AnyDatetime) -> datetime.datetime:
"""
Given a datetime or date or time object from the ``datetime``
module, always return a datetime using default values.
"""
if isinstance(ob, datetime.datetime):
return ob
date = time = ob
date = cast(datetime.date, ob)
time = cast(datetime.time, ob)
if isinstance(ob, datetime.date):
time = datetime.time()
if isinstance(ob, datetime.time):
@ -56,7 +65,13 @@ def ensure_datetime(ob):
return datetime.datetime.combine(date, time)
def strftime(fmt, t):
def infer_datetime(ob: Union[AnyDatetime, StructDatetime]) -> datetime.datetime:
if isinstance(ob, (time.struct_time, tuple)):
ob = datetime.datetime(*ob[:6]) # type: ignore
return ensure_datetime(ob)
def strftime(fmt: str, t: Union[AnyDatetime, tuple, time.struct_time]) -> str:
"""
Portable strftime.
@ -115,15 +130,11 @@ def strftime(fmt, t):
>>> strftime('%Y', datetime.time())
'1900'
"""
if isinstance(t, (time.struct_time, tuple)):
t = datetime.datetime(*t[:6])
t = ensure_datetime(t)
t = infer_datetime(t)
subs = (
('%s', '%03d' % (t.microsecond // 1000)),
('%µ', '%03d' % (t.microsecond % 1000)),
)
if _needs_year_help(): # pragma: nocover
subs += (('%Y', '%04d' % t.year),)
) + (('%Y', '%04d' % t.year),) * _needs_year_help()
def doSub(s, sub):
return s.replace(*sub)
@ -324,10 +335,10 @@ def calculate_prorated_values():
"""
rate = input("Enter the rate (3/hour, 50/month)> ")
for period, value in _prorated_values(rate):
print("per {period}: {value}".format(**locals()))
print(f"per {period}: {value}")
def _prorated_values(rate):
def _prorated_values(rate: str) -> Iterable[Tuple[str, Number]]:
"""
Given a rate (a string in units per unit time), and return that same
rate for various time periods.
@ -341,7 +352,8 @@ def _prorated_values(rate):
year: 175316.333
"""
res = re.match(r'(?P<value>[\d.]+)/(?P<period>\w+)$', rate).groupdict()
match = re.match(r'(?P<value>[\d.]+)/(?P<period>\w+)$', rate)
res = cast(re.Match, match).groupdict()
value = float(res['value'])
value_per_second = value / get_period_seconds(res['period'])
for period in ('minute', 'hour', 'day', 'month', 'year'):

View file

@ -130,7 +130,7 @@ class PeriodicCommand(DelayedCommand):
raise ValueError(
"A PeriodicCommand must have a positive, " "non-zero delay."
)
super(PeriodicCommand, self).__setattr__(key, value)
super().__setattr__(key, value)
class PeriodicCommandFixedDelay(PeriodicCommand):

View file

@ -115,7 +115,7 @@ class Timer(Stopwatch):
def __init__(self, target=float('Inf')):
self.target = self._accept(target)
super(Timer, self).__init__()
super().__init__()
@staticmethod
def _accept(target):

View file

@ -1,6 +1,6 @@
# IANA versions like 2020a are not valid PEP 440 identifiers; the recommended
# way to translate the version is to use YYYY.n where `n` is a 0-based index.
__version__ = "2022.6"
__version__ = "2022.7"
# This exposes the original IANA version number.
IANA_VERSION = "2022f"
IANA_VERSION = "2022g"

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

View file

@ -3,13 +3,13 @@
# This file is in the public domain, so clarified as of
# 2009-05-17 by Arthur David Olson.
#
# From Paul Eggert (2015-05-02):
# From Paul Eggert (2022-11-18):
# This file contains a table of two-letter country codes. Columns are
# separated by a single tab. Lines beginning with '#' are comments.
# All text uses UTF-8 encoding. The columns of the table are as follows:
#
# 1. ISO 3166-1 alpha-2 country code, current as of
# ISO 3166-1 N976 (2018-11-06). See: Updates on ISO 3166-1
# ISO 3166-1 N1087 (2022-09-02). See: Updates on ISO 3166-1
# https://isotc.iso.org/livelink/livelink/Open/16944257
# 2. The usual English name for the coded region,
# chosen so that alphabetic sorting of subsets produces helpful lists.
@ -238,7 +238,7 @@ SY Syria
SZ Eswatini (Swaziland)
TC Turks & Caicos Is
TD Chad
TF French Southern & Antarctic Lands
TF French Southern Territories
TG Togo
TH Thailand
TJ Tajikistan

View file

@ -1,4 +1,4 @@
# version 2022f
# version 2022g
# This zic input file is in the public domain.
R d 1916 o - Jun 14 23s 1 S
R d 1916 1919 - O Su>=1 23s 0 -
@ -1040,7 +1040,7 @@ Z Asia/Singapore 6:55:25 - LMT 1901
7:20 - +0720 1941 S
7:30 - +0730 1942 F 16
9 - +09 1945 S 12
7:30 - +0730 1982
7:30 - +0730 1981 D 31 16u
8 - +08
Z Asia/Colombo 5:19:24 - LMT 1880
5:19:32 - MMT 1906
@ -1754,7 +1754,8 @@ Z America/Scoresbysund -1:27:52 - LMT 1916 Jul 28
-1 E -01/+00
Z America/Nuuk -3:26:56 - LMT 1916 Jul 28
-3 - -03 1980 Ap 6 2
-3 E -03/-02
-3 E -03/-02 2023 Mar 25 22
-2 - -02
Z America/Thule -4:35:8 - LMT 1916 Jul 28
-4 Th A%sT
Z Europe/Tallinn 1:39 - LMT 1880
@ -3044,16 +3045,11 @@ R Y 1919 o - N 1 0 0 S
R Y 1942 o - F 9 2 1 W
R Y 1945 o - Au 14 23u 1 P
R Y 1945 o - S 30 2 0 S
R Y 1965 o - Ap lastSu 0 2 DD
R Y 1965 o - O lastSu 2 0 S
R Y 1980 1986 - Ap lastSu 2 1 D
R Y 1980 2006 - O lastSu 2 0 S
R Y 1972 1986 - Ap lastSu 2 1 D
R Y 1972 2006 - O lastSu 2 0 S
R Y 1987 2006 - Ap Su>=1 2 1 D
Z America/Pangnirtung 0 - -00 1921
-4 Y A%sT 1995 Ap Su>=1 2
-5 C E%sT 1999 O 31 2
-6 C C%sT 2000 O 29 2
-5 C E%sT
R Yu 1965 o - Ap lastSu 0 2 DD
R Yu 1965 o - O lastSu 2 0 S
Z America/Iqaluit 0 - -00 1942 Au
-5 Y E%sT 1999 O 31 2
-6 C C%sT 2000 O 29 2
@ -3082,13 +3078,15 @@ Z America/Inuvik 0 - -00 1953
-7 Y M%sT 1980
-7 C M%sT
Z America/Whitehorse -9:0:12 - LMT 1900 Au 20
-9 Y Y%sT 1967 May 28
-8 Y P%sT 1980
-9 Y Y%sT 1965
-9 Yu Y%sT 1966 F 27
-8 - PST 1980
-8 C P%sT 2020 N
-7 - MST
Z America/Dawson -9:17:40 - LMT 1900 Au 20
-9 Y Y%sT 1973 O 28
-8 Y P%sT 1980
-9 Y Y%sT 1965
-9 Yu Y%sT 1973 O 28
-8 - PST 1980
-8 C P%sT 2020 N
-7 - MST
R m 1931 o - May 1 23 1 D
@ -3132,6 +3130,17 @@ Z America/Mexico_City -6:36:36 - LMT 1922 Ja 1 7u
-6 m C%sT 2001 S 30 2
-6 - CST 2002 F 20
-6 m C%sT
Z America/Ciudad_Juarez -7:5:56 - LMT 1922 Ja 1 7u
-7 - MST 1927 Jun 10 23
-6 - CST 1930 N 15
-7 m M%sT 1932 Ap
-6 - CST 1996
-6 m C%sT 1998
-6 - CST 1998 Ap Su>=1 3
-7 m M%sT 2010
-7 u M%sT 2022 O 30 2
-6 - CST 2022 N 30
-7 u M%sT
Z America/Ojinaga -6:57:40 - LMT 1922 Ja 1 7u
-7 - MST 1927 Jun 10 23
-6 - CST 1930 N 15
@ -3141,7 +3150,8 @@ Z America/Ojinaga -6:57:40 - LMT 1922 Ja 1 7u
-6 - CST 1998 Ap Su>=1 3
-7 m M%sT 2010
-7 u M%sT 2022 O 30 2
-6 - CST
-6 - CST 2022 N 30
-6 u C%sT
Z America/Chihuahua -7:4:20 - LMT 1922 Ja 1 7u
-7 - MST 1927 Jun 10 23
-6 - CST 1930 N 15
@ -3771,7 +3781,7 @@ Z Antarctica/Palmer 0 - -00 1965
-4 x -04/-03 2016 D 4
-3 - -03
R CO 1992 o - May 3 0 1 -
R CO 1993 o - Ap 4 0 0 -
R CO 1993 o - F 6 24 0 -
Z America/Bogota -4:56:16 - LMT 1884 Mar 13
-4:56:16 - BMT 1914 N 23
-5 CO -05/-04
@ -4154,6 +4164,7 @@ L America/Tijuana America/Ensenada
L America/Indiana/Indianapolis America/Fort_Wayne
L America/Toronto America/Montreal
L America/Toronto America/Nipigon
L America/Iqaluit America/Pangnirtung
L America/Rio_Branco America/Porto_Acre
L America/Winnipeg America/Rainy_River
L America/Argentina/Cordoba America/Rosario

View file

@ -114,8 +114,7 @@ CA +4606-06447 America/Moncton Atlantic - New Brunswick
CA +5320-06025 America/Goose_Bay Atlantic - Labrador (most areas)
CA +5125-05707 America/Blanc-Sablon AST - QC (Lower North Shore)
CA +4339-07923 America/Toronto Eastern - ON, QC (most areas)
CA +6344-06828 America/Iqaluit Eastern - NU (most east areas)
CA +6608-06544 America/Pangnirtung Eastern - NU (Pangnirtung)
CA +6344-06828 America/Iqaluit Eastern - NU (most areas)
CA +484531-0913718 America/Atikokan EST - ON (Atikokan); NU (Coral H)
CA +4953-09709 America/Winnipeg Central - ON (west); Manitoba
CA +744144-0944945 America/Resolute Central - NU (Resolute)
@ -277,17 +276,18 @@ MT +3554+01431 Europe/Malta
MU -2010+05730 Indian/Mauritius
MV +0410+07330 Indian/Maldives
MW -1547+03500 Africa/Blantyre
MX +1924-09909 America/Mexico_City Central Time
MX +2105-08646 America/Cancun Eastern Standard Time - Quintana Roo
MX +2058-08937 America/Merida Central Time - Campeche, Yucatan
MX +2540-10019 America/Monterrey Central Time - Durango; Coahuila, Nuevo Leon, Tamaulipas (most areas)
MX +2550-09730 America/Matamoros Central Time US - Coahuila, Nuevo Leon, Tamaulipas (US border)
MX +2313-10625 America/Mazatlan Mountain Time - Baja California Sur, Nayarit, Sinaloa
MX +2838-10605 America/Chihuahua Mountain Time - Chihuahua (most areas)
MX +2934-10425 America/Ojinaga Mountain Time US - Chihuahua (US border)
MX +2904-11058 America/Hermosillo Mountain Standard Time - Sonora
MX +3232-11701 America/Tijuana Pacific Time US - Baja California
MX +2048-10515 America/Bahia_Banderas Central Time - Bahia de Banderas
MX +1924-09909 America/Mexico_City Central Mexico
MX +2105-08646 America/Cancun Quintana Roo
MX +2058-08937 America/Merida Campeche, Yucatan
MX +2540-10019 America/Monterrey Durango; Coahuila, Nuevo Leon, Tamaulipas (most areas)
MX +2550-09730 America/Matamoros Coahuila, Nuevo Leon, Tamaulipas (US border)
MX +2838-10605 America/Chihuahua Chihuahua (most areas)
MX +3144-10629 America/Ciudad_Juarez Chihuahua (US border - west)
MX +2934-10425 America/Ojinaga Chihuahua (US border - east)
MX +2313-10625 America/Mazatlan Baja California Sur, Nayarit (most areas), Sinaloa
MX +2048-10515 America/Bahia_Banderas Bahia de Banderas
MX +2904-11058 America/Hermosillo Sonora
MX +3232-11701 America/Tijuana Baja California
MY +0310+10142 Asia/Kuala_Lumpur Malaysia (peninsula)
MY +0133+11020 Asia/Kuching Sabah, Sarawak
MZ -2558+03235 Africa/Maputo

View file

@ -102,8 +102,7 @@ CA +4612-05957 America/Glace_Bay Atlantic - NS (Cape Breton)
CA +4606-06447 America/Moncton Atlantic - New Brunswick
CA +5320-06025 America/Goose_Bay Atlantic - Labrador (most areas)
CA,BS +4339-07923 America/Toronto Eastern - ON, QC (most areas), Bahamas
CA +6344-06828 America/Iqaluit Eastern - NU (most east areas)
CA +6608-06544 America/Pangnirtung Eastern - NU (Pangnirtung)
CA +6344-06828 America/Iqaluit Eastern - NU (most areas)
CA +4953-09709 America/Winnipeg Central - ON (west); Manitoba
CA +744144-0944945 America/Resolute Central - NU (Resolute)
CA +624900-0920459 America/Rankin_Inlet Central - NU (central)
@ -214,17 +213,18 @@ MQ +1436-06105 America/Martinique
MT +3554+01431 Europe/Malta
MU -2010+05730 Indian/Mauritius
MV,TF +0410+07330 Indian/Maldives Maldives, Kerguelen, St Paul I, Amsterdam I
MX +1924-09909 America/Mexico_City Central Time
MX +2105-08646 America/Cancun Eastern Standard Time - Quintana Roo
MX +2058-08937 America/Merida Central Time - Campeche, Yucatán
MX +2540-10019 America/Monterrey Central Time - Durango; Coahuila, Nuevo León, Tamaulipas (most areas)
MX +2550-09730 America/Matamoros Central Time US - Coahuila, Nuevo León, Tamaulipas (US border)
MX +2313-10625 America/Mazatlan Mountain Time - Baja California Sur, Nayarit, Sinaloa
MX +2838-10605 America/Chihuahua Mountain Time - Chihuahua (most areas)
MX +2934-10425 America/Ojinaga Mountain Time US - Chihuahua (US border)
MX +2904-11058 America/Hermosillo Mountain Standard Time - Sonora
MX +3232-11701 America/Tijuana Pacific Time US - Baja California
MX +2048-10515 America/Bahia_Banderas Central Time - Bahía de Banderas
MX +1924-09909 America/Mexico_City Central Mexico
MX +2105-08646 America/Cancun Quintana Roo
MX +2058-08937 America/Merida Campeche, Yucatán
MX +2540-10019 America/Monterrey Durango; Coahuila, Nuevo León, Tamaulipas (most areas)
MX +2550-09730 America/Matamoros Coahuila, Nuevo León, Tamaulipas (US border)
MX +2838-10605 America/Chihuahua Chihuahua (most areas)
MX +3144-10629 America/Ciudad_Juarez Chihuahua (US border - west)
MX +2934-10425 America/Ojinaga Chihuahua (US border - east)
MX +2313-10625 America/Mazatlan Baja California Sur, Nayarit (most areas), Sinaloa
MX +2048-10515 America/Bahia_Banderas Bahía de Banderas
MX +2904-11058 America/Hermosillo Sonora
MX +3232-11701 America/Tijuana Baja California
MY,BN +0133+11020 Asia/Kuching Sabah, Sarawak, Brunei
MZ,BI,BW,CD,MW,RW,ZM,ZW -2558+03235 Africa/Maputo Central Africa Time
NA -2234+01706 Africa/Windhoek

View file

@ -239,7 +239,6 @@ America/Edmonton
America/Vancouver
America/Dawson_Creek
America/Fort_Nelson
America/Pangnirtung
America/Iqaluit
America/Resolute
America/Rankin_Inlet
@ -253,6 +252,7 @@ America/Merida
America/Matamoros
America/Monterrey
America/Mexico_City
America/Ciudad_Juarez
America/Ojinaga
America/Chihuahua
America/Hermosillo
@ -554,6 +554,7 @@ America/Ensenada
America/Fort_Wayne
America/Montreal
America/Nipigon
America/Pangnirtung
America/Porto_Acre
America/Rainy_River
America/Rosario

View file

@ -1,2 +1,2 @@
# This file is protected via CODEOWNERS
__version__ = "1.26.12"
__version__ = "1.26.13"

View file

@ -862,7 +862,7 @@ class HTTPConnectionPool(ConnectionPool, RequestMethods):
)
# Check if we should retry the HTTP response.
has_retry_after = bool(response.getheader("Retry-After"))
has_retry_after = bool(response.headers.get("Retry-After"))
if retries.is_retry(method, response.status, has_retry_after):
try:
retries = retries.increment(method, url, response=response, _pool=self)

View file

@ -47,10 +47,10 @@ compression in Python 2 (see `CRIME attack`_).
"""
from __future__ import absolute_import
import OpenSSL.crypto
import OpenSSL.SSL
from cryptography import x509
from cryptography.hazmat.backends.openssl import backend as openssl_backend
from cryptography.hazmat.backends.openssl.x509 import _Certificate
try:
from cryptography.x509 import UnsupportedExtension
@ -228,9 +228,8 @@ def get_subj_alt_name(peer_cert):
if hasattr(peer_cert, "to_cryptography"):
cert = peer_cert.to_cryptography()
else:
# This is technically using private APIs, but should work across all
# relevant versions before PyOpenSSL got a proper API for this.
cert = _Certificate(openssl_backend, peer_cert._x509)
der = OpenSSL.crypto.dump_certificate(OpenSSL.crypto.FILETYPE_ASN1, peer_cert)
cert = x509.load_der_x509_certificate(der, openssl_backend)
# We want to find the SAN extension. Ask Cryptography to locate it (it's
# faster than looping in Python)

View file

@ -3,6 +3,7 @@ from __future__ import absolute_import
import io
import logging
import sys
import warnings
import zlib
from contextlib import contextmanager
from socket import error as SocketError
@ -663,9 +664,21 @@ class HTTPResponse(io.IOBase):
# Backwards-compatibility methods for http.client.HTTPResponse
def getheaders(self):
warnings.warn(
"HTTPResponse.getheaders() is deprecated and will be removed "
"in urllib3 v2.1.0. Instead access HTTResponse.headers directly.",
category=DeprecationWarning,
stacklevel=2,
)
return self.headers
def getheader(self, name, default=None):
warnings.warn(
"HTTPResponse.getheader() is deprecated and will be removed "
"in urllib3 v2.1.0. Instead use HTTResponse.headers.get(name, default).",
category=DeprecationWarning,
stacklevel=2,
)
return self.headers.get(name, default)
# Backwards compatibility for http.cookiejar

View file

@ -394,7 +394,7 @@ class Retry(object):
def get_retry_after(self, response):
"""Get the value of Retry-After in seconds."""
retry_after = response.getheader("Retry-After")
retry_after = response.headers.get("Retry-After")
if retry_after is None:
return None

View file

@ -63,7 +63,7 @@ IPV6_ADDRZ_RE = re.compile("^" + IPV6_ADDRZ_PAT + "$")
BRACELESS_IPV6_ADDRZ_RE = re.compile("^" + IPV6_ADDRZ_PAT[2:-2] + "$")
ZONE_ID_RE = re.compile("(" + ZONE_ID_PAT + r")\]$")
_HOST_PORT_PAT = ("^(%s|%s|%s)(?::([0-9]{0,5}))?$") % (
_HOST_PORT_PAT = ("^(%s|%s|%s)(?::0*([0-9]{0,5}))?$") % (
REG_NAME_PAT,
IPV4_PAT,
IPV6_ADDRZ_PAT,

View file

@ -4,6 +4,8 @@ import zipfile
import itertools
import contextlib
import pathlib
import re
import fnmatch
from .py310compat import text_encoding
@ -243,6 +245,18 @@ class Path:
self.root = FastLookup.make(root)
self.at = at
def __eq__(self, other):
"""
>>> Path(zipfile.ZipFile(io.BytesIO(), 'w')) == 'foo'
False
"""
if self.__class__ is not other.__class__:
return NotImplemented
return (self.root, self.at) == (other.root, other.at)
def __hash__(self):
return hash((self.root, self.at))
def open(self, mode='r', *args, pwd=None, **kwargs):
"""
Open this entry as text or binary following the semantics
@ -313,6 +327,38 @@ class Path:
subs = map(self._next, self.root.namelist())
return filter(self._is_child, subs)
def match(self, path_pattern):
return pathlib.Path(self.at).match(path_pattern)
def is_symlink(self):
"""
Return whether this path is a symlink. Always false (python/cpython#82102).
"""
return False
def _descendants(self):
for child in self.iterdir():
yield child
if child.is_dir():
yield from child._descendants()
def glob(self, pattern):
if not pattern:
raise ValueError("Unacceptable pattern: {!r}".format(pattern))
matches = re.compile(fnmatch.translate(pattern)).fullmatch
return (
child
for child in self._descendants()
if matches(str(child.relative_to(self)))
)
def rglob(self, pattern):
return self.glob(f'**/{pattern}')
def relative_to(self, other, *extra):
return posixpath.relpath(str(self), str(other.joinpath(*extra)))
def __str__(self):
return posixpath.join(self.root.filename, self.at)

View file

@ -1,6 +1,6 @@
apscheduler==3.9.1.post1
importlib-metadata==5.2.0
importlib-resources==5.10.0
importlib-resources==5.10.1
pyinstaller==5.6.2
pyopenssl==22.1.0
pycryptodomex==3.15.0

View file

@ -400,6 +400,12 @@ NOTIFICATION_PARAMETERS = [
{'name': 'Player', 'type': 'str', 'value': 'player', 'description': 'The name of the player being used for playback.'},
{'name': 'Initial Stream', 'type': 'int', 'value': 'initial_stream', 'description': 'If the stream is the initial stream of a continuous streaming session.', 'example': '0 or 1'},
{'name': 'IP Address', 'type': 'str', 'value': 'ip_address', 'description': 'The IP address of the device being used for playback.'},
{'name': 'Started Datestamp', 'type': 'str', 'value': 'started_datestamp', 'description': 'The date (in date format) when the stream started.'},
{'name': 'Started Timestamp', 'type': 'str', 'value': 'started_timestamp', 'description': 'The time (in time format) when the stream started.'},
{'name': 'Started Unix Time', 'type': 'int', 'value': 'started_unixtime', 'description': 'The unix timestamp when the stream started.'},
{'name': 'Stopped Datestamp', 'type': 'str', 'value': 'stopped_datestamp', 'description': 'The date (in date format) when the stream stopped.'},
{'name': 'Stopped Timestamp', 'type': 'str', 'value': 'stopped_timestamp', 'description': 'The time (in time format) when the stream stopped.'},
{'name': 'Stopped Unix Time', 'type': 'int', 'value': 'stopped_unixtime', 'description': 'The unix timestamp when the stream stopped.'},
{'name': 'Stream Duration', 'type': 'int', 'value': 'stream_duration', 'description': 'The duration (in minutes) for the stream.'},
{'name': 'Stream Duration (sec)', 'type': 'int', 'value': 'stream_duration_sec', 'description': 'The duration (in seconds) for the stream.'},
{'name': 'Stream Time', 'type': 'str', 'value': 'stream_time', 'description': 'The duration (in time format) of the stream.'},

View file

@ -985,6 +985,12 @@ def build_media_notify_params(notify_action=None, session=None, timeline=None, m
'product': notify_params['product'],
'player': notify_params['player'],
'ip_address': notify_params.get('ip_address', 'N/A'),
'started_datestamp': arrow.get(notify_params['started']).format(date_format),
'started_timestamp': arrow.get(notify_params['started']).format(time_format),
'started_unixtime': notify_params['started'],
'stopped_datestamp': arrow.get(notify_params['stopped']).format(date_format) if notify_params['stopped'] else '',
'stopped_timestamp': arrow.get(notify_params['stopped']).format(time_format) if notify_params['stopped'] else '',
'stopped_unixtime': notify_params['stopped'],
'stream_duration': stream_duration,
'stream_duration_sec': stream_duration_sec,
'stream_time': arrow.get(stream_duration_sec).format(duration_format),

View file

@ -6,8 +6,8 @@ backports.functools-lru-cache==1.6.4
backports.zoneinfo==0.2.1
beautifulsoup4==4.11.1
bleach==5.0.1
certifi==2022.9.24
cheroot==8.6.0
certifi==2022.12.7
cheroot==9.0.0
cherrypy==18.8.0
cloudinary==1.30.0
distro==1.8.0
@ -19,13 +19,13 @@ html5lib==1.1
httpagentparser==1.9.5
idna==3.4
importlib-metadata==5.2.0
importlib-resources==5.10.0
importlib-resources==5.10.1
git+https://github.com/Tautulli/ipwhois.git@master#egg=ipwhois
IPy==1.01
Mako==1.2.4
MarkupSafe==2.1.1
musicbrainzngs==0.7.1
packaging==21.3
packaging==22.0
paho-mqtt==1.6.1
plexapi==4.13.1
portend==3.1.0
@ -41,15 +41,15 @@ rumps==0.4.0; platform_system == "Darwin"
simplejson==3.18.0
six==1.16.0
soupsieve==2.3.2.post1
tempora==5.0.2
tempora==5.1.0
tokenize-rt==5.0.0
tzdata==2022.6
tzdata==2022.7
tzlocal==4.2
urllib3==1.26.12
urllib3==1.26.13
webencodings==0.5.1
websocket-client==1.4.2
xmltodict==0.13.0
zipp==3.10.0
zipp==3.11.0
# configobj==5.1.0
# sgmllib3k==1.0.0