diff --git a/lib/certifi/__init__.py b/lib/certifi/__init__.py index f61d77fa..bf83fa93 100644 --- a/lib/certifi/__init__.py +++ b/lib/certifi/__init__.py @@ -1,4 +1,4 @@ from .core import contents, where __all__ = ["contents", "where"] -__version__ = "2024.08.30" +__version__ = "2025.04.26" diff --git a/lib/certifi/cacert.pem b/lib/certifi/cacert.pem index 3c165a1b..b1d0cfd8 100644 --- a/lib/certifi/cacert.pem +++ b/lib/certifi/cacert.pem @@ -1,95 +1,4 @@ -# Issuer: CN=GlobalSign Root CA O=GlobalSign nv-sa OU=Root CA -# Subject: CN=GlobalSign Root CA O=GlobalSign nv-sa OU=Root CA -# Label: "GlobalSign Root CA" -# Serial: 4835703278459707669005204 -# MD5 Fingerprint: 3e:45:52:15:09:51:92:e1:b7:5d:37:9f:b1:87:29:8a -# SHA1 Fingerprint: b1:bc:96:8b:d4:f4:9d:62:2a:a8:9a:81:f2:15:01:52:a4:1d:82:9c -# SHA256 Fingerprint: eb:d4:10:40:e4:bb:3e:c7:42:c9:e3:81:d3:1e:f2:a4:1a:48:b6:68:5c:96:e7:ce:f3:c1:df:6c:d4:33:1c:99 ------BEGIN CERTIFICATE----- -MIIDdTCCAl2gAwIBAgILBAAAAAABFUtaw5QwDQYJKoZIhvcNAQEFBQAwVzELMAkG -A1UEBhMCQkUxGTAXBgNVBAoTEEdsb2JhbFNpZ24gbnYtc2ExEDAOBgNVBAsTB1Jv -b3QgQ0ExGzAZBgNVBAMTEkdsb2JhbFNpZ24gUm9vdCBDQTAeFw05ODA5MDExMjAw -MDBaFw0yODAxMjgxMjAwMDBaMFcxCzAJBgNVBAYTAkJFMRkwFwYDVQQKExBHbG9i -YWxTaWduIG52LXNhMRAwDgYDVQQLEwdSb290IENBMRswGQYDVQQDExJHbG9iYWxT -aWduIFJvb3QgQ0EwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQDaDuaZ -jc6j40+Kfvvxi4Mla+pIH/EqsLmVEQS98GPR4mdmzxzdzxtIK+6NiY6arymAZavp -xy0Sy6scTHAHoT0KMM0VjU/43dSMUBUc71DuxC73/OlS8pF94G3VNTCOXkNz8kHp -1Wrjsok6Vjk4bwY8iGlbKk3Fp1S4bInMm/k8yuX9ifUSPJJ4ltbcdG6TRGHRjcdG -snUOhugZitVtbNV4FpWi6cgKOOvyJBNPc1STE4U6G7weNLWLBYy5d4ux2x8gkasJ -U26Qzns3dLlwR5EiUWMWea6xrkEmCMgZK9FGqkjWZCrXgzT/LCrBbBlDSgeF59N8 -9iFo7+ryUp9/k5DPAgMBAAGjQjBAMA4GA1UdDwEB/wQEAwIBBjAPBgNVHRMBAf8E -BTADAQH/MB0GA1UdDgQWBBRge2YaRQ2XyolQL30EzTSo//z9SzANBgkqhkiG9w0B -AQUFAAOCAQEA1nPnfE920I2/7LqivjTFKDK1fPxsnCwrvQmeU79rXqoRSLblCKOz -yj1hTdNGCbM+w6DjY1Ub8rrvrTnhQ7k4o+YviiY776BQVvnGCv04zcQLcFGUl5gE -38NflNUVyRRBnMRddWQVDf9VMOyGj/8N7yy5Y0b2qvzfvGn9LhJIZJrglfCm7ymP -AbEVtQwdpf5pLGkkeB6zpxxxYu7KyJesF12KwvhHhm4qxFYxldBniYUr+WymXUad -DKqC5JlR3XC321Y9YeRq4VzW9v493kHMB65jUr9TU/Qr6cf9tveCX4XSQRjbgbME -HMUfpIBvFSDJ3gyICh3WZlXi/EjJKSZp4A== ------END CERTIFICATE----- - -# Issuer: CN=Entrust.net Certification Authority (2048) O=Entrust.net OU=www.entrust.net/CPS_2048 incorp. by ref. (limits liab.)/(c) 1999 Entrust.net Limited -# Subject: CN=Entrust.net Certification Authority (2048) O=Entrust.net OU=www.entrust.net/CPS_2048 incorp. by ref. (limits liab.)/(c) 1999 Entrust.net Limited -# Label: "Entrust.net Premium 2048 Secure Server CA" -# Serial: 946069240 -# MD5 Fingerprint: ee:29:31:bc:32:7e:9a:e6:e8:b5:f7:51:b4:34:71:90 -# SHA1 Fingerprint: 50:30:06:09:1d:97:d4:f5:ae:39:f7:cb:e7:92:7d:7d:65:2d:34:31 -# SHA256 Fingerprint: 6d:c4:71:72:e0:1c:bc:b0:bf:62:58:0d:89:5f:e2:b8:ac:9a:d4:f8:73:80:1e:0c:10:b9:c8:37:d2:1e:b1:77 ------BEGIN CERTIFICATE----- -MIIEKjCCAxKgAwIBAgIEOGPe+DANBgkqhkiG9w0BAQUFADCBtDEUMBIGA1UEChML -RW50cnVzdC5uZXQxQDA+BgNVBAsUN3d3dy5lbnRydXN0Lm5ldC9DUFNfMjA0OCBp -bmNvcnAuIGJ5IHJlZi4gKGxpbWl0cyBsaWFiLikxJTAjBgNVBAsTHChjKSAxOTk5 -IEVudHJ1c3QubmV0IExpbWl0ZWQxMzAxBgNVBAMTKkVudHJ1c3QubmV0IENlcnRp -ZmljYXRpb24gQXV0aG9yaXR5ICgyMDQ4KTAeFw05OTEyMjQxNzUwNTFaFw0yOTA3 -MjQxNDE1MTJaMIG0MRQwEgYDVQQKEwtFbnRydXN0Lm5ldDFAMD4GA1UECxQ3d3d3 -LmVudHJ1c3QubmV0L0NQU18yMDQ4IGluY29ycC4gYnkgcmVmLiAobGltaXRzIGxp -YWIuKTElMCMGA1UECxMcKGMpIDE5OTkgRW50cnVzdC5uZXQgTGltaXRlZDEzMDEG -A1UEAxMqRW50cnVzdC5uZXQgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkgKDIwNDgp -MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEArU1LqRKGsuqjIAcVFmQq -K0vRvwtKTY7tgHalZ7d4QMBzQshowNtTK91euHaYNZOLGp18EzoOH1u3Hs/lJBQe -sYGpjX24zGtLA/ECDNyrpUAkAH90lKGdCCmziAv1h3edVc3kw37XamSrhRSGlVuX -MlBvPci6Zgzj/L24ScF2iUkZ/cCovYmjZy/Gn7xxGWC4LeksyZB2ZnuU4q941mVT -XTzWnLLPKQP5L6RQstRIzgUyVYr9smRMDuSYB3Xbf9+5CFVghTAp+XtIpGmG4zU/ -HoZdenoVve8AjhUiVBcAkCaTvA5JaJG/+EfTnZVCwQ5N328mz8MYIWJmQ3DW1cAH -4QIDAQABo0IwQDAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB/zAdBgNV -HQ4EFgQUVeSB0RGAvtiJuQijMfmhJAkWuXAwDQYJKoZIhvcNAQEFBQADggEBADub -j1abMOdTmXx6eadNl9cZlZD7Bh/KM3xGY4+WZiT6QBshJ8rmcnPyT/4xmf3IDExo -U8aAghOY+rat2l098c5u9hURlIIM7j+VrxGrD9cv3h8Dj1csHsm7mhpElesYT6Yf -zX1XEC+bBAlahLVu2B064dae0Wx5XnkcFMXj0EyTO2U87d89vqbllRrDtRnDvV5b -u/8j72gZyxKTJ1wDLW8w0B62GqzeWvfRqqgnpv55gcR5mTNXuhKwqeBCbJPKVt7+ -bYQLCIt+jerXmCHG8+c8eS9enNFMFY3h7CI3zJpDC5fcgJCNs2ebb0gIFVbPv/Er -fF6adulZkMV8gzURZVE= ------END CERTIFICATE----- - -# Issuer: CN=Baltimore CyberTrust Root O=Baltimore OU=CyberTrust -# Subject: CN=Baltimore CyberTrust Root O=Baltimore OU=CyberTrust -# Label: "Baltimore CyberTrust Root" -# Serial: 33554617 -# MD5 Fingerprint: ac:b6:94:a5:9c:17:e0:d7:91:52:9b:b1:97:06:a6:e4 -# SHA1 Fingerprint: d4:de:20:d0:5e:66:fc:53:fe:1a:50:88:2c:78:db:28:52:ca:e4:74 -# SHA256 Fingerprint: 16:af:57:a9:f6:76:b0:ab:12:60:95:aa:5e:ba:de:f2:2a:b3:11:19:d6:44:ac:95:cd:4b:93:db:f3:f2:6a:eb ------BEGIN CERTIFICATE----- -MIIDdzCCAl+gAwIBAgIEAgAAuTANBgkqhkiG9w0BAQUFADBaMQswCQYDVQQGEwJJ -RTESMBAGA1UEChMJQmFsdGltb3JlMRMwEQYDVQQLEwpDeWJlclRydXN0MSIwIAYD -VQQDExlCYWx0aW1vcmUgQ3liZXJUcnVzdCBSb290MB4XDTAwMDUxMjE4NDYwMFoX -DTI1MDUxMjIzNTkwMFowWjELMAkGA1UEBhMCSUUxEjAQBgNVBAoTCUJhbHRpbW9y -ZTETMBEGA1UECxMKQ3liZXJUcnVzdDEiMCAGA1UEAxMZQmFsdGltb3JlIEN5YmVy -VHJ1c3QgUm9vdDCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAKMEuyKr -mD1X6CZymrV51Cni4eiVgLGw41uOKymaZN+hXe2wCQVt2yguzmKiYv60iNoS6zjr -IZ3AQSsBUnuId9Mcj8e6uYi1agnnc+gRQKfRzMpijS3ljwumUNKoUMMo6vWrJYeK -mpYcqWe4PwzV9/lSEy/CG9VwcPCPwBLKBsua4dnKM3p31vjsufFoREJIE9LAwqSu -XmD+tqYF/LTdB1kC1FkYmGP1pWPgkAx9XbIGevOF6uvUA65ehD5f/xXtabz5OTZy -dc93Uk3zyZAsuT3lySNTPx8kmCFcB5kpvcY67Oduhjprl3RjM71oGDHweI12v/ye -jl0qhqdNkNwnGjkCAwEAAaNFMEMwHQYDVR0OBBYEFOWdWTCCR1jMrPoIVDaGezq1 -BE3wMBIGA1UdEwEB/wQIMAYBAf8CAQMwDgYDVR0PAQH/BAQDAgEGMA0GCSqGSIb3 -DQEBBQUAA4IBAQCFDF2O5G9RaEIFoN27TyclhAO992T9Ldcw46QQF+vaKSm2eT92 -9hkTI7gQCvlYpNRhcL0EYWoSihfVCr3FvDB81ukMJY2GQE/szKN+OMY3EU/t3Wgx -jkzSswF07r51XgdIGn9w/xZchMB5hbgF/X++ZRGjD8ACtPhSNzkE1akxehi/oCr0 -Epn3o0WC4zxe9Z2etciefC7IpJ5OCBRLbf1wbWsaY71k5h+3zvDyny67G7fyUIhz -ksLi4xaNmjICq44Y3ekQEe5+NauQrz4wlHrQMz2nZQ/1/I6eYs9HRCwBXbsdtTLS -R9I4LtD+gdwyah617jzV/OeBHRnDJELqYzmp ------END CERTIFICATE----- - # Issuer: CN=Entrust Root Certification Authority O=Entrust, Inc. OU=www.entrust.net/CPS is incorporated by reference/(c) 2006 Entrust, Inc. # Subject: CN=Entrust Root Certification Authority O=Entrust, Inc. OU=www.entrust.net/CPS is incorporated by reference/(c) 2006 Entrust, Inc. # Label: "Entrust Root Certification Authority" @@ -125,39 +34,6 @@ eu6FSqdQgPCnXEqULl8FmTxSQeDNtGPPAUO6nIPcj2A781q0tHuu2guQOHXvgR1m 0vdXcDazv/wor3ElhVsT/h5/WrQ8 -----END CERTIFICATE----- -# Issuer: CN=AAA Certificate Services O=Comodo CA Limited -# Subject: CN=AAA Certificate Services O=Comodo CA Limited -# Label: "Comodo AAA Services root" -# Serial: 1 -# MD5 Fingerprint: 49:79:04:b0:eb:87:19:ac:47:b0:bc:11:51:9b:74:d0 -# SHA1 Fingerprint: d1:eb:23:a4:6d:17:d6:8f:d9:25:64:c2:f1:f1:60:17:64:d8:e3:49 -# SHA256 Fingerprint: d7:a7:a0:fb:5d:7e:27:31:d7:71:e9:48:4e:bc:de:f7:1d:5f:0c:3e:0a:29:48:78:2b:c8:3e:e0:ea:69:9e:f4 ------BEGIN CERTIFICATE----- -MIIEMjCCAxqgAwIBAgIBATANBgkqhkiG9w0BAQUFADB7MQswCQYDVQQGEwJHQjEb -MBkGA1UECAwSR3JlYXRlciBNYW5jaGVzdGVyMRAwDgYDVQQHDAdTYWxmb3JkMRow -GAYDVQQKDBFDb21vZG8gQ0EgTGltaXRlZDEhMB8GA1UEAwwYQUFBIENlcnRpZmlj -YXRlIFNlcnZpY2VzMB4XDTA0MDEwMTAwMDAwMFoXDTI4MTIzMTIzNTk1OVowezEL -MAkGA1UEBhMCR0IxGzAZBgNVBAgMEkdyZWF0ZXIgTWFuY2hlc3RlcjEQMA4GA1UE -BwwHU2FsZm9yZDEaMBgGA1UECgwRQ29tb2RvIENBIExpbWl0ZWQxITAfBgNVBAMM -GEFBQSBDZXJ0aWZpY2F0ZSBTZXJ2aWNlczCCASIwDQYJKoZIhvcNAQEBBQADggEP -ADCCAQoCggEBAL5AnfRu4ep2hxxNRUSOvkbIgwadwSr+GB+O5AL686tdUIoWMQua -BtDFcCLNSS1UY8y2bmhGC1Pqy0wkwLxyTurxFa70VJoSCsN6sjNg4tqJVfMiWPPe -3M/vg4aijJRPn2jymJBGhCfHdr/jzDUsi14HZGWCwEiwqJH5YZ92IFCokcdmtet4 -YgNW8IoaE+oxox6gmf049vYnMlhvB/VruPsUK6+3qszWY19zjNoFmag4qMsXeDZR -rOme9Hg6jc8P2ULimAyrL58OAd7vn5lJ8S3frHRNG5i1R8XlKdH5kBjHYpy+g8cm -ez6KJcfA3Z3mNWgQIJ2P2N7Sw4ScDV7oL8kCAwEAAaOBwDCBvTAdBgNVHQ4EFgQU -oBEKIz6W8Qfs4q8p74Klf9AwpLQwDgYDVR0PAQH/BAQDAgEGMA8GA1UdEwEB/wQF -MAMBAf8wewYDVR0fBHQwcjA4oDagNIYyaHR0cDovL2NybC5jb21vZG9jYS5jb20v -QUFBQ2VydGlmaWNhdGVTZXJ2aWNlcy5jcmwwNqA0oDKGMGh0dHA6Ly9jcmwuY29t -b2RvLm5ldC9BQUFDZXJ0aWZpY2F0ZVNlcnZpY2VzLmNybDANBgkqhkiG9w0BAQUF -AAOCAQEACFb8AvCb6P+k+tZ7xkSAzk/ExfYAWMymtrwUSWgEdujm7l3sAg9g1o1Q -GE8mTgHj5rCl7r+8dFRBv/38ErjHT1r0iWAFf2C3BUrz9vHCv8S5dIa2LX1rzNLz -Rt0vxuBqw8M0Ayx9lt1awg6nCpnBBYurDC/zXDrPbDdVCYfeU0BsWO/8tqtlbgT2 -G9w84FoVxp7Z8VlIMCFlA2zs6SFz7JsDoeA3raAVGI/6ugLOpyypEBMs1OUIJqsi -l2D4kF501KKaU73yqWjgom7C12yxow+ev+to51byrvLjKzg6CYG1a4XXvi3tPxq3 -smPi9WIsgtRqAEFQ8TmDn5XpNpaYbg== ------END CERTIFICATE----- - # Issuer: CN=QuoVadis Root CA 2 O=QuoVadis Limited # Subject: CN=QuoVadis Root CA 2 O=QuoVadis Limited # Label: "QuoVadis Root CA 2" @@ -245,103 +121,6 @@ mJlglFwjz1onl14LBQaTNx47aTbrqZ5hHY8y2o4M1nQ+ewkk2gF3R8Q7zTSMmfXK 4SVhM7JZG+Ju1zdXtg2pEto= -----END CERTIFICATE----- -# Issuer: CN=XRamp Global Certification Authority O=XRamp Security Services Inc OU=www.xrampsecurity.com -# Subject: CN=XRamp Global Certification Authority O=XRamp Security Services Inc OU=www.xrampsecurity.com -# Label: "XRamp Global CA Root" -# Serial: 107108908803651509692980124233745014957 -# MD5 Fingerprint: a1:0b:44:b3:ca:10:d8:00:6e:9d:0f:d8:0f:92:0a:d1 -# SHA1 Fingerprint: b8:01:86:d1:eb:9c:86:a5:41:04:cf:30:54:f3:4c:52:b7:e5:58:c6 -# SHA256 Fingerprint: ce:cd:dc:90:50:99:d8:da:df:c5:b1:d2:09:b7:37:cb:e2:c1:8c:fb:2c:10:c0:ff:0b:cf:0d:32:86:fc:1a:a2 ------BEGIN CERTIFICATE----- -MIIEMDCCAxigAwIBAgIQUJRs7Bjq1ZxN1ZfvdY+grTANBgkqhkiG9w0BAQUFADCB -gjELMAkGA1UEBhMCVVMxHjAcBgNVBAsTFXd3dy54cmFtcHNlY3VyaXR5LmNvbTEk -MCIGA1UEChMbWFJhbXAgU2VjdXJpdHkgU2VydmljZXMgSW5jMS0wKwYDVQQDEyRY -UmFtcCBHbG9iYWwgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwHhcNMDQxMTAxMTcx -NDA0WhcNMzUwMTAxMDUzNzE5WjCBgjELMAkGA1UEBhMCVVMxHjAcBgNVBAsTFXd3 -dy54cmFtcHNlY3VyaXR5LmNvbTEkMCIGA1UEChMbWFJhbXAgU2VjdXJpdHkgU2Vy -dmljZXMgSW5jMS0wKwYDVQQDEyRYUmFtcCBHbG9iYWwgQ2VydGlmaWNhdGlvbiBB -dXRob3JpdHkwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCYJB69FbS6 -38eMpSe2OAtp87ZOqCwuIR1cRN8hXX4jdP5efrRKt6atH67gBhbim1vZZ3RrXYCP -KZ2GG9mcDZhtdhAoWORlsH9KmHmf4MMxfoArtYzAQDsRhtDLooY2YKTVMIJt2W7Q -DxIEM5dfT2Fa8OT5kavnHTu86M/0ay00fOJIYRyO82FEzG+gSqmUsE3a56k0enI4 -qEHMPJQRfevIpoy3hsvKMzvZPTeL+3o+hiznc9cKV6xkmxnr9A8ECIqsAxcZZPRa -JSKNNCyy9mgdEm3Tih4U2sSPpuIjhdV6Db1q4Ons7Be7QhtnqiXtRYMh/MHJfNVi -PvryxS3T/dRlAgMBAAGjgZ8wgZwwEwYJKwYBBAGCNxQCBAYeBABDAEEwCwYDVR0P -BAQDAgGGMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFMZPoj0GY4QJnM5i5ASs -jVy16bYbMDYGA1UdHwQvMC0wK6ApoCeGJWh0dHA6Ly9jcmwueHJhbXBzZWN1cml0 -eS5jb20vWEdDQS5jcmwwEAYJKwYBBAGCNxUBBAMCAQEwDQYJKoZIhvcNAQEFBQAD -ggEBAJEVOQMBG2f7Shz5CmBbodpNl2L5JFMn14JkTpAuw0kbK5rc/Kh4ZzXxHfAR -vbdI4xD2Dd8/0sm2qlWkSLoC295ZLhVbO50WfUfXN+pfTXYSNrsf16GBBEYgoyxt -qZ4Bfj8pzgCT3/3JknOJiWSe5yvkHJEs0rnOfc5vMZnT5r7SHpDwCRR5XCOrTdLa -IR9NmXmd4c8nnxCbHIgNsIpkQTG4DmyQJKSbXHGPurt+HBvbaoAPIbzp26a3QPSy -i6mx5O+aGtA9aZnuqCij4Tyz8LIRnM98QObd50N9otg6tamN8jSZxNQQ4Qb9CYQQ -O+7ETPTsJ3xCwnR8gooJybQDJbw= ------END CERTIFICATE----- - -# Issuer: O=The Go Daddy Group, Inc. OU=Go Daddy Class 2 Certification Authority -# Subject: O=The Go Daddy Group, Inc. OU=Go Daddy Class 2 Certification Authority -# Label: "Go Daddy Class 2 CA" -# Serial: 0 -# MD5 Fingerprint: 91:de:06:25:ab:da:fd:32:17:0c:bb:25:17:2a:84:67 -# SHA1 Fingerprint: 27:96:ba:e6:3f:18:01:e2:77:26:1b:a0:d7:77:70:02:8f:20:ee:e4 -# SHA256 Fingerprint: c3:84:6b:f2:4b:9e:93:ca:64:27:4c:0e:c6:7c:1e:cc:5e:02:4f:fc:ac:d2:d7:40:19:35:0e:81:fe:54:6a:e4 ------BEGIN CERTIFICATE----- -MIIEADCCAuigAwIBAgIBADANBgkqhkiG9w0BAQUFADBjMQswCQYDVQQGEwJVUzEh -MB8GA1UEChMYVGhlIEdvIERhZGR5IEdyb3VwLCBJbmMuMTEwLwYDVQQLEyhHbyBE -YWRkeSBDbGFzcyAyIENlcnRpZmljYXRpb24gQXV0aG9yaXR5MB4XDTA0MDYyOTE3 -MDYyMFoXDTM0MDYyOTE3MDYyMFowYzELMAkGA1UEBhMCVVMxITAfBgNVBAoTGFRo -ZSBHbyBEYWRkeSBHcm91cCwgSW5jLjExMC8GA1UECxMoR28gRGFkZHkgQ2xhc3Mg -MiBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTCCASAwDQYJKoZIhvcNAQEBBQADggEN -ADCCAQgCggEBAN6d1+pXGEmhW+vXX0iG6r7d/+TvZxz0ZWizV3GgXne77ZtJ6XCA -PVYYYwhv2vLM0D9/AlQiVBDYsoHUwHU9S3/Hd8M+eKsaA7Ugay9qK7HFiH7Eux6w -wdhFJ2+qN1j3hybX2C32qRe3H3I2TqYXP2WYktsqbl2i/ojgC95/5Y0V4evLOtXi -EqITLdiOr18SPaAIBQi2XKVlOARFmR6jYGB0xUGlcmIbYsUfb18aQr4CUWWoriMY -avx4A6lNf4DD+qta/KFApMoZFv6yyO9ecw3ud72a9nmYvLEHZ6IVDd2gWMZEewo+ -YihfukEHU1jPEX44dMX4/7VpkI+EdOqXG68CAQOjgcAwgb0wHQYDVR0OBBYEFNLE -sNKR1EwRcbNhyz2h/t2oatTjMIGNBgNVHSMEgYUwgYKAFNLEsNKR1EwRcbNhyz2h -/t2oatTjoWekZTBjMQswCQYDVQQGEwJVUzEhMB8GA1UEChMYVGhlIEdvIERhZGR5 -IEdyb3VwLCBJbmMuMTEwLwYDVQQLEyhHbyBEYWRkeSBDbGFzcyAyIENlcnRpZmlj -YXRpb24gQXV0aG9yaXR5ggEAMAwGA1UdEwQFMAMBAf8wDQYJKoZIhvcNAQEFBQAD -ggEBADJL87LKPpH8EsahB4yOd6AzBhRckB4Y9wimPQoZ+YeAEW5p5JYXMP80kWNy -OO7MHAGjHZQopDH2esRU1/blMVgDoszOYtuURXO1v0XJJLXVggKtI3lpjbi2Tc7P -TMozI+gciKqdi0FuFskg5YmezTvacPd+mSYgFFQlq25zheabIZ0KbIIOqPjCDPoQ -HmyW74cNxA9hi63ugyuV+I6ShHI56yDqg+2DzZduCLzrTia2cyvk0/ZM/iZx4mER -dEr/VxqHD3VILs9RaRegAhJhldXRQLIQTO7ErBBDpqWeCtWVYpoNz4iCxTIM5Cuf -ReYNnyicsbkqWletNw+vHX/bvZ8= ------END CERTIFICATE----- - -# Issuer: O=Starfield Technologies, Inc. OU=Starfield Class 2 Certification Authority -# Subject: O=Starfield Technologies, Inc. OU=Starfield Class 2 Certification Authority -# Label: "Starfield Class 2 CA" -# Serial: 0 -# MD5 Fingerprint: 32:4a:4b:bb:c8:63:69:9b:be:74:9a:c6:dd:1d:46:24 -# SHA1 Fingerprint: ad:7e:1c:28:b0:64:ef:8f:60:03:40:20:14:c3:d0:e3:37:0e:b5:8a -# SHA256 Fingerprint: 14:65:fa:20:53:97:b8:76:fa:a6:f0:a9:95:8e:55:90:e4:0f:cc:7f:aa:4f:b7:c2:c8:67:75:21:fb:5f:b6:58 ------BEGIN CERTIFICATE----- -MIIEDzCCAvegAwIBAgIBADANBgkqhkiG9w0BAQUFADBoMQswCQYDVQQGEwJVUzEl -MCMGA1UEChMcU3RhcmZpZWxkIFRlY2hub2xvZ2llcywgSW5jLjEyMDAGA1UECxMp -U3RhcmZpZWxkIENsYXNzIDIgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwHhcNMDQw -NjI5MTczOTE2WhcNMzQwNjI5MTczOTE2WjBoMQswCQYDVQQGEwJVUzElMCMGA1UE -ChMcU3RhcmZpZWxkIFRlY2hub2xvZ2llcywgSW5jLjEyMDAGA1UECxMpU3RhcmZp -ZWxkIENsYXNzIDIgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwggEgMA0GCSqGSIb3 -DQEBAQUAA4IBDQAwggEIAoIBAQC3Msj+6XGmBIWtDBFk385N78gDGIc/oav7PKaf -8MOh2tTYbitTkPskpD6E8J7oX+zlJ0T1KKY/e97gKvDIr1MvnsoFAZMej2YcOadN -+lq2cwQlZut3f+dZxkqZJRRU6ybH838Z1TBwj6+wRir/resp7defqgSHo9T5iaU0 -X9tDkYI22WY8sbi5gv2cOj4QyDvvBmVmepsZGD3/cVE8MC5fvj13c7JdBmzDI1aa -K4UmkhynArPkPw2vCHmCuDY96pzTNbO8acr1zJ3o/WSNF4Azbl5KXZnJHoe0nRrA -1W4TNSNe35tfPe/W93bC6j67eA0cQmdrBNj41tpvi/JEoAGrAgEDo4HFMIHCMB0G -A1UdDgQWBBS/X7fRzt0fhvRbVazc1xDCDqmI5zCBkgYDVR0jBIGKMIGHgBS/X7fR -zt0fhvRbVazc1xDCDqmI56FspGowaDELMAkGA1UEBhMCVVMxJTAjBgNVBAoTHFN0 -YXJmaWVsZCBUZWNobm9sb2dpZXMsIEluYy4xMjAwBgNVBAsTKVN0YXJmaWVsZCBD -bGFzcyAyIENlcnRpZmljYXRpb24gQXV0aG9yaXR5ggEAMAwGA1UdEwQFMAMBAf8w -DQYJKoZIhvcNAQEFBQADggEBAAWdP4id0ckaVaGsafPzWdqbAYcaT1epoXkJKtv3 -L7IezMdeatiDh6GX70k1PncGQVhiv45YuApnP+yz3SFmH8lU+nLMPUxA2IGvd56D -eruix/U0F47ZEUD0/CwqTRV/p2JdLiXTAAsgGh1o+Re49L2L7ShZ3U0WixeDyLJl -xy16paq8U4Zt3VekyvggQQto8PT7dL5WXXp59fkdheMtlb71cZBDzI0fmgAKhynp -VSJYACPq4xJDKVtHCN2MQWplBqjlIapBtJUhlbl90TSrE9atvNziPTnNvT51cKEY -WQPJIrSPnNVeKtelttQKbfi3QBFGmh95DmK/D5fs4C8fF5Q= ------END CERTIFICATE----- - # Issuer: CN=DigiCert Assured ID Root CA O=DigiCert Inc OU=www.digicert.com # Subject: CN=DigiCert Assured ID Root CA O=DigiCert Inc OU=www.digicert.com # Label: "DigiCert Assured ID Root CA" @@ -474,47 +253,6 @@ ZMEBnunKoGqYDs/YYPIvSbjkQuE4NRb0yG5P94FW6LqjviOvrv1vA+ACOzB2+htt Qc8Bsem4yWb02ybzOqR08kkkW8mw0FfB+j564ZfJ -----END CERTIFICATE----- -# Issuer: CN=SwissSign Silver CA - G2 O=SwissSign AG -# Subject: CN=SwissSign Silver CA - G2 O=SwissSign AG -# Label: "SwissSign Silver CA - G2" -# Serial: 5700383053117599563 -# MD5 Fingerprint: e0:06:a1:c9:7d:cf:c9:fc:0d:c0:56:75:96:d8:62:13 -# SHA1 Fingerprint: 9b:aa:e5:9f:56:ee:21:cb:43:5a:be:25:93:df:a7:f0:40:d1:1d:cb -# SHA256 Fingerprint: be:6c:4d:a2:bb:b9:ba:59:b6:f3:93:97:68:37:42:46:c3:c0:05:99:3f:a9:8f:02:0d:1d:ed:be:d4:8a:81:d5 ------BEGIN CERTIFICATE----- -MIIFvTCCA6WgAwIBAgIITxvUL1S7L0swDQYJKoZIhvcNAQEFBQAwRzELMAkGA1UE -BhMCQ0gxFTATBgNVBAoTDFN3aXNzU2lnbiBBRzEhMB8GA1UEAxMYU3dpc3NTaWdu -IFNpbHZlciBDQSAtIEcyMB4XDTA2MTAyNTA4MzI0NloXDTM2MTAyNTA4MzI0Nlow -RzELMAkGA1UEBhMCQ0gxFTATBgNVBAoTDFN3aXNzU2lnbiBBRzEhMB8GA1UEAxMY -U3dpc3NTaWduIFNpbHZlciBDQSAtIEcyMIICIjANBgkqhkiG9w0BAQEFAAOCAg8A -MIICCgKCAgEAxPGHf9N4Mfc4yfjDmUO8x/e8N+dOcbpLj6VzHVxumK4DV644N0Mv -Fz0fyM5oEMF4rhkDKxD6LHmD9ui5aLlV8gREpzn5/ASLHvGiTSf5YXu6t+WiE7br -YT7QbNHm+/pe7R20nqA1W6GSy/BJkv6FCgU+5tkL4k+73JU3/JHpMjUi0R86TieF -nbAVlDLaYQ1HTWBCrpJH6INaUFjpiou5XaHc3ZlKHzZnu0jkg7Y360g6rw9njxcH -6ATK72oxh9TAtvmUcXtnZLi2kUpCe2UuMGoM9ZDulebyzYLs2aFK7PayS+VFheZt -eJMELpyCbTapxDFkH4aDCyr0NQp4yVXPQbBH6TCfmb5hqAaEuSh6XzjZG6k4sIN/ -c8HDO0gqgg8hm7jMqDXDhBuDsz6+pJVpATqJAHgE2cn0mRmrVn5bi4Y5FZGkECwJ -MoBgs5PAKrYYC51+jUnyEEp/+dVGLxmSo5mnJqy7jDzmDrxHB9xzUfFwZC8I+bRH -HTBsROopN4WSaGa8gzj+ezku01DwH/teYLappvonQfGbGHLy9YR0SslnxFSuSGTf -jNFusB3hB48IHpmccelM2KX3RxIfdNFRnobzwqIjQAtz20um53MGjMGg6cFZrEb6 -5i/4z3GcRm25xBWNOHkDRUjvxF3XCO6HOSKGsg0PWEP3calILv3q1h8CAwEAAaOB -rDCBqTAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQU -F6DNweRBtjpbO8tFnb0cwpj6hlgwHwYDVR0jBBgwFoAUF6DNweRBtjpbO8tFnb0c -wpj6hlgwRgYDVR0gBD8wPTA7BglghXQBWQEDAQEwLjAsBggrBgEFBQcCARYgaHR0 -cDovL3JlcG9zaXRvcnkuc3dpc3NzaWduLmNvbS8wDQYJKoZIhvcNAQEFBQADggIB -AHPGgeAn0i0P4JUw4ppBf1AsX19iYamGamkYDHRJ1l2E6kFSGG9YrVBWIGrGvShp -WJHckRE1qTodvBqlYJ7YH39FkWnZfrt4csEGDyrOj4VwYaygzQu4OSlWhDJOhrs9 -xCrZ1x9y7v5RoSJBsXECYxqCsGKrXlcSH9/L3XWgwF15kIwb4FDm3jH+mHtwX6WQ -2K34ArZv02DdQEsixT2tOnqfGhpHkXkzuoLcMmkDlm4fS/Bx/uNncqCxv1yL5PqZ -IseEuRuNI5c/7SXgz2W79WEE790eslpBIlqhn10s6FvJbakMDHiqYMZWjwFaDGi8 -aRl5xB9+lwW/xekkUV7U1UtT7dkjWjYDZaPBA61BMPNGG4WQr2W11bHkFlt4dR2X -em1ZqSqPe97Dh4kQmUlzeMg9vVE1dCrV8X5pGyq7O70luJpaPXJhkGaH7gzWTdQR -dAtq/gsD/KNVV4n+SsuuWxcFyPKNIzFTONItaj+CuY0IavdeQXRuwxF+B6wpYJE/ -OMpXEA29MC/HpeZBoNquBYeaoKRlbEwJDIm6uNO5wJOKMPqN5ZprFQFOZ6raYlY+ -hAhm0sQ2fac+EPyI4NSA5QC9qvNOBqN6avlicuMJT+ubDgEj8Z+7fNzcbBGXJbLy -tGMU0gYqZ4yD9c7qB9iaah7s5Aq7KkzrCWA5zspi2C5u ------END CERTIFICATE----- - # Issuer: CN=SecureTrust CA O=SecureTrust Corporation # Subject: CN=SecureTrust CA O=SecureTrust Corporation # Label: "SecureTrust CA" @@ -763,35 +501,6 @@ uLjbvrW5KfnaNwUASZQDhETnv0Mxz3WLJdH0pmT1kvarBes96aULNmLazAZfNou2 XjG4Kvte9nHfRCaexOYNkbQudZWAUWpLMKawYqGT8ZvYzsRjdT9ZR7E= -----END CERTIFICATE----- -# Issuer: CN=SecureSign RootCA11 O=Japan Certification Services, Inc. -# Subject: CN=SecureSign RootCA11 O=Japan Certification Services, Inc. -# Label: "SecureSign RootCA11" -# Serial: 1 -# MD5 Fingerprint: b7:52:74:e2:92:b4:80:93:f2:75:e4:cc:d7:f2:ea:26 -# SHA1 Fingerprint: 3b:c4:9f:48:f8:f3:73:a0:9c:1e:bd:f8:5b:b1:c3:65:c7:d8:11:b3 -# SHA256 Fingerprint: bf:0f:ee:fb:9e:3a:58:1a:d5:f9:e9:db:75:89:98:57:43:d2:61:08:5c:4d:31:4f:6f:5d:72:59:aa:42:16:12 ------BEGIN CERTIFICATE----- -MIIDbTCCAlWgAwIBAgIBATANBgkqhkiG9w0BAQUFADBYMQswCQYDVQQGEwJKUDEr -MCkGA1UEChMiSmFwYW4gQ2VydGlmaWNhdGlvbiBTZXJ2aWNlcywgSW5jLjEcMBoG -A1UEAxMTU2VjdXJlU2lnbiBSb290Q0ExMTAeFw0wOTA0MDgwNDU2NDdaFw0yOTA0 -MDgwNDU2NDdaMFgxCzAJBgNVBAYTAkpQMSswKQYDVQQKEyJKYXBhbiBDZXJ0aWZp -Y2F0aW9uIFNlcnZpY2VzLCBJbmMuMRwwGgYDVQQDExNTZWN1cmVTaWduIFJvb3RD -QTExMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA/XeqpRyQBTvLTJsz -i1oURaTnkBbR31fSIRCkF/3frNYfp+TbfPfs37gD2pRY/V1yfIw/XwFndBWW4wI8 -h9uuywGOwvNmxoVF9ALGOrVisq/6nL+k5tSAMJjzDbaTj6nU2DbysPyKyiyhFTOV -MdrAG/LuYpmGYz+/3ZMqg6h2uRMft85OQoWPIucuGvKVCbIFtUROd6EgvanyTgp9 -UK31BQ1FT0Zx/Sg+U/sE2C3XZR1KG/rPO7AxmjVuyIsG0wCR8pQIZUyxNAYAeoni -8McDWc/V1uinMrPmmECGxc0nEovMe863ETxiYAcjPitAbpSACW22s293bzUIUPsC -h8U+iQIDAQABo0IwQDAdBgNVHQ4EFgQUW/hNT7KlhtQ60vFjmqC+CfZXt94wDgYD -VR0PAQH/BAQDAgEGMA8GA1UdEwEB/wQFMAMBAf8wDQYJKoZIhvcNAQEFBQADggEB -AKChOBZmLqdWHyGcBvod7bkixTgm2E5P7KN/ed5GIaGHd48HCJqypMWvDzKYC3xm -KbabfSVSSUOrTC4rbnpwrxYO4wJs+0LmGJ1F2FXI6Dvd5+H0LgscNFxsWEr7jIhQ -X5Ucv+2rIrVls4W6ng+4reV6G4pQOh29Dbx7VFALuUKvVaAYga1lme++5Jy/xIWr -QbJUb9wlze144o4MjQlJ3WN7WmmWAiGovVJZ6X01y8hSyn+B/tlr0/cR7SXf+Of5 -pPpyl4RTDaXQMhhRdlkUbA/r7F+AjHVDg8OFmP9Mni0N5HeDk061lgeLKBObjBmN -QSdJQO7e5iNEOdyhIta6A/I= ------END CERTIFICATE----- - # Issuer: CN=Microsec e-Szigno Root CA 2009 O=Microsec Ltd. # Subject: CN=Microsec e-Szigno Root CA 2009 O=Microsec Ltd. # Label: "Microsec e-Szigno Root CA 2009" @@ -3100,50 +2809,6 @@ LJstxabArahH9CdMOA0uG0k7UvToiIMrVCjU8jVStDKDYmlkDJGcn5fqdBb9HxEG mpv0 -----END CERTIFICATE----- -# Issuer: CN=Entrust Root Certification Authority - G4 O=Entrust, Inc. OU=See www.entrust.net/legal-terms/(c) 2015 Entrust, Inc. - for authorized use only -# Subject: CN=Entrust Root Certification Authority - G4 O=Entrust, Inc. OU=See www.entrust.net/legal-terms/(c) 2015 Entrust, Inc. - for authorized use only -# Label: "Entrust Root Certification Authority - G4" -# Serial: 289383649854506086828220374796556676440 -# MD5 Fingerprint: 89:53:f1:83:23:b7:7c:8e:05:f1:8c:71:38:4e:1f:88 -# SHA1 Fingerprint: 14:88:4e:86:26:37:b0:26:af:59:62:5c:40:77:ec:35:29:ba:96:01 -# SHA256 Fingerprint: db:35:17:d1:f6:73:2a:2d:5a:b9:7c:53:3e:c7:07:79:ee:32:70:a6:2f:b4:ac:42:38:37:24:60:e6:f0:1e:88 ------BEGIN CERTIFICATE----- -MIIGSzCCBDOgAwIBAgIRANm1Q3+vqTkPAAAAAFVlrVgwDQYJKoZIhvcNAQELBQAw -gb4xCzAJBgNVBAYTAlVTMRYwFAYDVQQKEw1FbnRydXN0LCBJbmMuMSgwJgYDVQQL -Ex9TZWUgd3d3LmVudHJ1c3QubmV0L2xlZ2FsLXRlcm1zMTkwNwYDVQQLEzAoYykg -MjAxNSBFbnRydXN0LCBJbmMuIC0gZm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxMjAw -BgNVBAMTKUVudHJ1c3QgUm9vdCBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eSAtIEc0 -MB4XDTE1MDUyNzExMTExNloXDTM3MTIyNzExNDExNlowgb4xCzAJBgNVBAYTAlVT -MRYwFAYDVQQKEw1FbnRydXN0LCBJbmMuMSgwJgYDVQQLEx9TZWUgd3d3LmVudHJ1 -c3QubmV0L2xlZ2FsLXRlcm1zMTkwNwYDVQQLEzAoYykgMjAxNSBFbnRydXN0LCBJ -bmMuIC0gZm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxMjAwBgNVBAMTKUVudHJ1c3Qg -Um9vdCBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eSAtIEc0MIICIjANBgkqhkiG9w0B -AQEFAAOCAg8AMIICCgKCAgEAsewsQu7i0TD/pZJH4i3DumSXbcr3DbVZwbPLqGgZ -2K+EbTBwXX7zLtJTmeH+H17ZSK9dE43b/2MzTdMAArzE+NEGCJR5WIoV3imz/f3E -T+iq4qA7ec2/a0My3dl0ELn39GjUu9CH1apLiipvKgS1sqbHoHrmSKvS0VnM1n4j -5pds8ELl3FFLFUHtSUrJ3hCX1nbB76W1NhSXNdh4IjVS70O92yfbYVaCNNzLiGAM -C1rlLAHGVK/XqsEQe9IFWrhAnoanw5CGAlZSCXqc0ieCU0plUmr1POeo8pyvi73T -DtTUXm6Hnmo9RR3RXRv06QqsYJn7ibT/mCzPfB3pAqoEmh643IhuJbNsZvc8kPNX -wbMv9W3y+8qh+CmdRouzavbmZwe+LGcKKh9asj5XxNMhIWNlUpEbsZmOeX7m640A -2Vqq6nPopIICR5b+W45UYaPrL0swsIsjdXJ8ITzI9vF01Bx7owVV7rtNOzK+mndm -nqxpkCIHH2E6lr7lmk/MBTwoWdPBDFSoWWG9yHJM6Nyfh3+9nEg2XpWjDrk4JFX8 -dWbrAuMINClKxuMrLzOg2qOGpRKX/YAr2hRC45K9PvJdXmd0LhyIRyk0X+IyqJwl -N4y6mACXi0mWHv0liqzc2thddG5msP9E36EYxr5ILzeUePiVSj9/E15dWf10hkNj -c0kCAwEAAaNCMEAwDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwHQYD -VR0OBBYEFJ84xFYjwznooHFs6FRM5Og6sb9nMA0GCSqGSIb3DQEBCwUAA4ICAQAS -5UKme4sPDORGpbZgQIeMJX6tuGguW8ZAdjwD+MlZ9POrYs4QjbRaZIxowLByQzTS -Gwv2LFPSypBLhmb8qoMi9IsabyZIrHZ3CL/FmFz0Jomee8O5ZDIBf9PD3Vht7LGr -hFV0d4QEJ1JrhkzO3bll/9bGXp+aEJlLdWr+aumXIOTkdnrG0CSqkM0gkLpHZPt/ -B7NTeLUKYvJzQ85BK4FqLoUWlFPUa19yIqtRLULVAJyZv967lDtX/Zr1hstWO1uI -AeV8KEsD+UmDfLJ/fOPtjqF/YFOOVZ1QNBIPt5d7bIdKROf1beyAN/BYGW5KaHbw -H5Lk6rWS02FREAutp9lfx1/cH6NcjKF+m7ee01ZvZl4HliDtC3T7Zk6LERXpgUl+ -b7DUUH8i119lAg2m9IUe2K4GS0qn0jFmwvjO5QimpAKWRGhXxNUzzxkvFMSUHHuk -2fCfDrGA4tGeEWSpiBE6doLlYsKA2KSD7ZPvfC+QsDJMlhVoSFLUmQjAJOgc47Ol -IQ6SwJAfzyBfyjs4x7dtOvPmRLgOMWuIjnDrnBdSqEGULoe256YSxXXfW8AKbnuk -5F6G+TaU33fD6Q3AOfF5u0aOq0NZJ7cguyPpVkAh7DE9ZapD8j3fcEThuk0mEDuY -n/PIjhs4ViFqUZPTkcpG2om3PVODLAgfi49T3f+sHw== ------END CERTIFICATE----- - # Issuer: CN=Microsoft ECC Root Certificate Authority 2017 O=Microsoft Corporation # Subject: CN=Microsoft ECC Root Certificate Authority 2017 O=Microsoft Corporation # Label: "Microsoft ECC Root Certificate Authority 2017" @@ -3485,6 +3150,46 @@ DgQWBBQxCpCPtsad0kRLgLWi5h+xEk8blTAKBggqhkjOPQQDAwNoADBlAjEA31SQ +RHUjE7AwWHCFUyqqx0LMV87HOIAl0Qx5v5zli/altP+CAezNIm8BZ/3Hobui3A= -----END CERTIFICATE----- +# Issuer: CN=GLOBALTRUST 2020 O=e-commerce monitoring GmbH +# Subject: CN=GLOBALTRUST 2020 O=e-commerce monitoring GmbH +# Label: "GLOBALTRUST 2020" +# Serial: 109160994242082918454945253 +# MD5 Fingerprint: 8a:c7:6f:cb:6d:e3:cc:a2:f1:7c:83:fa:0e:78:d7:e8 +# SHA1 Fingerprint: d0:67:c1:13:51:01:0c:aa:d0:c7:6a:65:37:31:16:26:4f:53:71:a2 +# SHA256 Fingerprint: 9a:29:6a:51:82:d1:d4:51:a2:e3:7f:43:9b:74:da:af:a2:67:52:33:29:f9:0f:9a:0d:20:07:c3:34:e2:3c:9a +-----BEGIN CERTIFICATE----- +MIIFgjCCA2qgAwIBAgILWku9WvtPilv6ZeUwDQYJKoZIhvcNAQELBQAwTTELMAkG +A1UEBhMCQVQxIzAhBgNVBAoTGmUtY29tbWVyY2UgbW9uaXRvcmluZyBHbWJIMRkw +FwYDVQQDExBHTE9CQUxUUlVTVCAyMDIwMB4XDTIwMDIxMDAwMDAwMFoXDTQwMDYx +MDAwMDAwMFowTTELMAkGA1UEBhMCQVQxIzAhBgNVBAoTGmUtY29tbWVyY2UgbW9u +aXRvcmluZyBHbWJIMRkwFwYDVQQDExBHTE9CQUxUUlVTVCAyMDIwMIICIjANBgkq +hkiG9w0BAQEFAAOCAg8AMIICCgKCAgEAri5WrRsc7/aVj6B3GyvTY4+ETUWiD59b +RatZe1E0+eyLinjF3WuvvcTfk0Uev5E4C64OFudBc/jbu9G4UeDLgztzOG53ig9Z +YybNpyrOVPu44sB8R85gfD+yc/LAGbaKkoc1DZAoouQVBGM+uq/ufF7MpotQsjj3 +QWPKzv9pj2gOlTblzLmMCcpL3TGQlsjMH/1WljTbjhzqLL6FLmPdqqmV0/0plRPw +yJiT2S0WR5ARg6I6IqIoV6Lr/sCMKKCmfecqQjuCgGOlYx8ZzHyyZqjC0203b+J+ +BlHZRYQfEs4kUmSFC0iAToexIiIwquuuvuAC4EDosEKAA1GqtH6qRNdDYfOiaxaJ +SaSjpCuKAsR49GiKweR6NrFvG5Ybd0mN1MkGco/PU+PcF4UgStyYJ9ORJitHHmkH +r96i5OTUawuzXnzUJIBHKWk7buis/UDr2O1xcSvy6Fgd60GXIsUf1DnQJ4+H4xj0 +4KlGDfV0OoIu0G4skaMxXDtG6nsEEFZegB31pWXogvziB4xiRfUg3kZwhqG8k9Me +dKZssCz3AwyIDMvUclOGvGBG85hqwvG/Q/lwIHfKN0F5VVJjjVsSn8VoxIidrPIw +q7ejMZdnrY8XD2zHc+0klGvIg5rQmjdJBKuxFshsSUktq6HQjJLyQUp5ISXbY9e2 +nKd+Qmn7OmMCAwEAAaNjMGEwDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMC +AQYwHQYDVR0OBBYEFNwuH9FhN3nkq9XVsxJxaD1qaJwiMB8GA1UdIwQYMBaAFNwu +H9FhN3nkq9XVsxJxaD1qaJwiMA0GCSqGSIb3DQEBCwUAA4ICAQCR8EICaEDuw2jA +VC/f7GLDw56KoDEoqoOOpFaWEhCGVrqXctJUMHytGdUdaG/7FELYjQ7ztdGl4wJC +XtzoRlgHNQIw4Lx0SsFDKv/bGtCwr2zD/cuz9X9tAy5ZVp0tLTWMstZDFyySCstd +6IwPS3BD0IL/qMy/pJTAvoe9iuOTe8aPmxadJ2W8esVCgmxcB9CpwYhgROmYhRZf ++I/KARDOJcP5YBugxZfD0yyIMaK9MOzQ0MAS8cE54+X1+NZK3TTN+2/BT+MAi1bi +kvcoskJ3ciNnxz8RFbLEAwW+uxF7Cr+obuf/WEPPm2eggAe2HcqtbepBEX4tdJP7 +wry+UUTF72glJ4DjyKDUEuzZpTcdN3y0kcra1LGWge9oXHYQSa9+pTeAsRxSvTOB +TI/53WXZFM2KJVj04sWDpQmQ1GwUY7VA3+vA/MRYfg0UFodUJ25W5HCEuGwyEn6C +MUO+1918oa2u1qsgEu8KwxCMSZY13At1XrFP1U80DhEgB3VDRemjEdqso5nCtnkn +4rnvyOL2NSl6dPrFf4IFYqYK6miyeUcGbvJXqBUzxvd4Sj1Ce2t+/vdG6tHrju+I +aFvowdlxfv1k7/9nR4hYJS8+hge9+6jlgqispdNpQ80xiEmEU5LAsTkbOYMBMMTy +qfrQA71yN2BWHzZ8vTmR9W0Nv3vXkg== +-----END CERTIFICATE----- + # Issuer: CN=ANF Secure Server Root CA O=ANF Autoridad de Certificacion OU=ANF CA Raiz # Subject: CN=ANF Secure Server Root CA O=ANF Autoridad de Certificacion OU=ANF CA Raiz # Label: "ANF Secure Server Root CA" @@ -4214,46 +3919,6 @@ ut6Dacpps6kFtZaSF4fC0urQe87YQVt8rgIwRt7qy12a7DLCZRawTDBcMPPaTnOG BtjOiQRINzf43TNRnXCve1XYAS59BWQOhriR -----END CERTIFICATE----- -# Issuer: CN=Security Communication RootCA3 O=SECOM Trust Systems CO.,LTD. -# Subject: CN=Security Communication RootCA3 O=SECOM Trust Systems CO.,LTD. -# Label: "Security Communication RootCA3" -# Serial: 16247922307909811815 -# MD5 Fingerprint: 1c:9a:16:ff:9e:5c:e0:4d:8a:14:01:f4:35:5d:29:26 -# SHA1 Fingerprint: c3:03:c8:22:74:92:e5:61:a2:9c:5f:79:91:2b:1e:44:13:91:30:3a -# SHA256 Fingerprint: 24:a5:5c:2a:b0:51:44:2d:06:17:76:65:41:23:9a:4a:d0:32:d7:c5:51:75:aa:34:ff:de:2f:bc:4f:5c:52:94 ------BEGIN CERTIFICATE----- -MIIFfzCCA2egAwIBAgIJAOF8N0D9G/5nMA0GCSqGSIb3DQEBDAUAMF0xCzAJBgNV -BAYTAkpQMSUwIwYDVQQKExxTRUNPTSBUcnVzdCBTeXN0ZW1zIENPLixMVEQuMScw -JQYDVQQDEx5TZWN1cml0eSBDb21tdW5pY2F0aW9uIFJvb3RDQTMwHhcNMTYwNjE2 -MDYxNzE2WhcNMzgwMTE4MDYxNzE2WjBdMQswCQYDVQQGEwJKUDElMCMGA1UEChMc -U0VDT00gVHJ1c3QgU3lzdGVtcyBDTy4sTFRELjEnMCUGA1UEAxMeU2VjdXJpdHkg -Q29tbXVuaWNhdGlvbiBSb290Q0EzMIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIIC -CgKCAgEA48lySfcw3gl8qUCBWNO0Ot26YQ+TUG5pPDXC7ltzkBtnTCHsXzW7OT4r -CmDvu20rhvtxosis5FaU+cmvsXLUIKx00rgVrVH+hXShuRD+BYD5UpOzQD11EKzA -lrenfna84xtSGc4RHwsENPXY9Wk8d/Nk9A2qhd7gCVAEF5aEt8iKvE1y/By7z/MG -TfmfZPd+pmaGNXHIEYBMwXFAWB6+oHP2/D5Q4eAvJj1+XCO1eXDe+uDRpdYMQXF7 -9+qMHIjH7Iv10S9VlkZ8WjtYO/u62C21Jdp6Ts9EriGmnpjKIG58u4iFW/vAEGK7 -8vknR+/RiTlDxN/e4UG/VHMgly1s2vPUB6PmudhvrvyMGS7TZ2crldtYXLVqAvO4 -g160a75BflcJdURQVc1aEWEhCmHCqYj9E7wtiS/NYeCVvsq1e+F7NGcLH7YMx3we -GVPKp7FKFSBWFHA9K4IsD50VHUeAR/94mQ4xr28+j+2GaR57GIgUssL8gjMunEst -+3A7caoreyYn8xrC3PsXuKHqy6C0rtOUfnrQq8PsOC0RLoi/1D+tEjtCrI8Cbn3M -0V9hvqG8OmpI6iZVIhZdXw3/JzOfGAN0iltSIEdrRU0id4xVJ/CvHozJgyJUt5rQ -T9nO/NkuHJYosQLTA70lUhw0Zk8jq/R3gpYd0VcwCBEF/VfR2ccCAwEAAaNCMEAw -HQYDVR0OBBYEFGQUfPxYchamCik0FW8qy7z8r6irMA4GA1UdDwEB/wQEAwIBBjAP -BgNVHRMBAf8EBTADAQH/MA0GCSqGSIb3DQEBDAUAA4ICAQDcAiMI4u8hOscNtybS -YpOnpSNyByCCYN8Y11StaSWSntkUz5m5UoHPrmyKO1o5yGwBQ8IibQLwYs1OY0PA -FNr0Y/Dq9HHuTofjcan0yVflLl8cebsjqodEV+m9NU1Bu0soo5iyG9kLFwfl9+qd -9XbXv8S2gVj/yP9kaWJ5rW4OH3/uHWnlt3Jxs/6lATWUVCvAUm2PVcTJ0rjLyjQI -UYWg9by0F1jqClx6vWPGOi//lkkZhOpn2ASxYfQAW0q3nHE3GYV5v4GwxxMOdnE+ -OoAGrgYWp421wsTL/0ClXI2lyTrtcoHKXJg80jQDdwj98ClZXSEIx2C/pHF7uNke -gr4Jr2VvKKu/S7XuPghHJ6APbw+LP6yVGPO5DtxnVW5inkYO0QR4ynKudtml+LLf -iAlhi+8kTtFZP1rUPcmTPCtk9YENFpb3ksP+MW/oKjJ0DvRMmEoYDjBU1cXrvMUV -nuiZIesnKwkK2/HmcBhWuwzkvvnoEKQTkrgc4NtnHVMDpCKn3F2SEDzq//wbEBrD -2NCcnWXL0CsnMQMeNuE9dnUM/0Umud1RvCPHX9jYhxBAEg09ODfnRDwYwFMJZI// -1ZqmfHAuc1Uh6N//g7kdPjIe1qZ9LPFm6Vwdp6POXiUyK+OVrCoHzrQoeIY8Laad -TdJ0MN1kURXbg4NR16/9M51NZg== ------END CERTIFICATE----- - # Issuer: CN=Security Communication ECC RootCA1 O=SECOM Trust Systems CO.,LTD. # Subject: CN=Security Communication ECC RootCA1 O=SECOM Trust Systems CO.,LTD. # Label: "Security Communication ECC RootCA1" @@ -4927,3 +4592,85 @@ Af8wDgYDVR0PAQH/BAQDAgEGMB0GA1UdDgQWBBTrQciu/NWeUUj1vYv0hyCTQSvT 4P9mLQlO4E/0BdGF9jVg3PVys0Z9AjBEmEYagoUeYWmJSwdLZrWeqrqgHkHZAXQ6 bkU6iYAZezKYVWOr62Nuk22rGwlgMU4= -----END CERTIFICATE----- + +# Issuer: CN=D-TRUST BR Root CA 2 2023 O=D-Trust GmbH +# Subject: CN=D-TRUST BR Root CA 2 2023 O=D-Trust GmbH +# Label: "D-TRUST BR Root CA 2 2023" +# Serial: 153168538924886464690566649552453098598 +# MD5 Fingerprint: e1:09:ed:d3:60:d4:56:1b:47:1f:b7:0c:5f:1b:5f:85 +# SHA1 Fingerprint: 2d:b0:70:ee:71:94:af:69:68:17:db:79:ce:58:9f:a0:6b:96:f7:87 +# SHA256 Fingerprint: 05:52:e6:f8:3f:df:65:e8:fa:96:70:e6:66:df:28:a4:e2:13:40:b5:10:cb:e5:25:66:f9:7c:4f:b9:4b:2b:d1 +-----BEGIN CERTIFICATE----- +MIIFqTCCA5GgAwIBAgIQczswBEhb2U14LnNLyaHcZjANBgkqhkiG9w0BAQ0FADBI +MQswCQYDVQQGEwJERTEVMBMGA1UEChMMRC1UcnVzdCBHbWJIMSIwIAYDVQQDExlE +LVRSVVNUIEJSIFJvb3QgQ0EgMiAyMDIzMB4XDTIzMDUwOTA4NTYzMVoXDTM4MDUw +OTA4NTYzMFowSDELMAkGA1UEBhMCREUxFTATBgNVBAoTDEQtVHJ1c3QgR21iSDEi +MCAGA1UEAxMZRC1UUlVTVCBCUiBSb290IENBIDIgMjAyMzCCAiIwDQYJKoZIhvcN +AQEBBQADggIPADCCAgoCggIBAK7/CVmRgApKaOYkP7in5Mg6CjoWzckjYaCTcfKr +i3OPoGdlYNJUa2NRb0kz4HIHE304zQaSBylSa053bATTlfrdTIzZXcFhfUvnKLNE +gXtRr90zsWh81k5M/itoucpmacTsXld/9w3HnDY25QdgrMBM6ghs7wZ8T1soegj8 +k12b9py0i4a6Ibn08OhZWiihNIQaJZG2tY/vsvmA+vk9PBFy2OMvhnbFeSzBqZCT +Rphny4NqoFAjpzv2gTng7fC5v2Xx2Mt6++9zA84A9H3X4F07ZrjcjrqDy4d2A/wl +2ecjbwb9Z/Pg/4S8R7+1FhhGaRTMBffb00msa8yr5LULQyReS2tNZ9/WtT5PeB+U +cSTq3nD88ZP+npNa5JRal1QMNXtfbO4AHyTsA7oC9Xb0n9Sa7YUsOCIvx9gvdhFP +/Wxc6PWOJ4d/GUohR5AdeY0cW/jPSoXk7bNbjb7EZChdQcRurDhaTyN0dKkSw/bS +uREVMweR2Ds3OmMwBtHFIjYoYiMQ4EbMl6zWK11kJNXuHA7e+whadSr2Y23OC0K+ +0bpwHJwh5Q8xaRfX/Aq03u2AnMuStIv13lmiWAmlY0cL4UEyNEHZmrHZqLAbWt4N +DfTisl01gLmB1IRpkQLLddCNxbU9CZEJjxShFHR5PtbJFR2kWVki3PaKRT08EtY+ +XTIvAgMBAAGjgY4wgYswDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQUZ5Dw1t61 +GNVGKX5cq/ieCLxklRAwDgYDVR0PAQH/BAQDAgEGMEkGA1UdHwRCMEAwPqA8oDqG +OGh0dHA6Ly9jcmwuZC10cnVzdC5uZXQvY3JsL2QtdHJ1c3RfYnJfcm9vdF9jYV8y +XzIwMjMuY3JsMA0GCSqGSIb3DQEBDQUAA4ICAQA097N3U9swFrktpSHxQCF16+tI +FoE9c+CeJyrrd6kTpGoKWloUMz1oH4Guaf2Mn2VsNELZLdB/eBaxOqwjMa1ef67n +riv6uvw8l5VAk1/DLQOj7aRvU9f6QA4w9QAgLABMjDu0ox+2v5Eyq6+SmNMW5tTR +VFxDWy6u71cqqLRvpO8NVhTaIasgdp4D/Ca4nj8+AybmTNudX0KEPUUDAxxZiMrc +LmEkWqTqJwtzEr5SswrPMhfiHocaFpVIbVrg0M8JkiZmkdijYQ6qgYF/6FKC0ULn +4B0Y+qSFNueG4A3rvNTJ1jxD8V1Jbn6Bm2m1iWKPiFLY1/4nwSPFyysCu7Ff/vtD +hQNGvl3GyiEm/9cCnnRK3PgTFbGBVzbLZVzRHTF36SXDw7IyN9XxmAnkbWOACKsG +koHU6XCPpz+y7YaMgmo1yEJagtFSGkUPFaUA8JR7ZSdXOUPPfH/mvTWze/EZTN46 +ls/pdu4D58JDUjxqgejBWoC9EV2Ta/vH5mQ/u2kc6d0li690yVRAysuTEwrt+2aS +Ecr1wPrYg1UDfNPFIkZ1cGt5SAYqgpq/5usWDiJFAbzdNpQ0qTUmiteXue4Icr80 +knCDgKs4qllo3UCkGJCy89UDyibK79XH4I9TjvAA46jtn/mtd+ArY0+ew+43u3gJ +hJ65bvspmZDogNOfJA== +-----END CERTIFICATE----- + +# Issuer: CN=D-TRUST EV Root CA 2 2023 O=D-Trust GmbH +# Subject: CN=D-TRUST EV Root CA 2 2023 O=D-Trust GmbH +# Label: "D-TRUST EV Root CA 2 2023" +# Serial: 139766439402180512324132425437959641711 +# MD5 Fingerprint: 96:b4:78:09:f0:09:cb:77:eb:bb:1b:4d:6f:36:bc:b6 +# SHA1 Fingerprint: a5:5b:d8:47:6c:8f:19:f7:4c:f4:6d:6b:b6:c2:79:82:22:df:54:8b +# SHA256 Fingerprint: 8e:82:21:b2:e7:d4:00:78:36:a1:67:2f:0d:cc:29:9c:33:bc:07:d3:16:f1:32:fa:1a:20:6d:58:71:50:f1:ce +-----BEGIN CERTIFICATE----- +MIIFqTCCA5GgAwIBAgIQaSYJfoBLTKCnjHhiU19abzANBgkqhkiG9w0BAQ0FADBI +MQswCQYDVQQGEwJERTEVMBMGA1UEChMMRC1UcnVzdCBHbWJIMSIwIAYDVQQDExlE +LVRSVVNUIEVWIFJvb3QgQ0EgMiAyMDIzMB4XDTIzMDUwOTA5MTAzM1oXDTM4MDUw +OTA5MTAzMlowSDELMAkGA1UEBhMCREUxFTATBgNVBAoTDEQtVHJ1c3QgR21iSDEi +MCAGA1UEAxMZRC1UUlVTVCBFViBSb290IENBIDIgMjAyMzCCAiIwDQYJKoZIhvcN +AQEBBQADggIPADCCAgoCggIBANiOo4mAC7JXUtypU0w3uX9jFxPvp1sjW2l1sJkK +F8GLxNuo4MwxusLyzV3pt/gdr2rElYfXR8mV2IIEUD2BCP/kPbOx1sWy/YgJ25yE +7CUXFId/MHibaljJtnMoPDT3mfd/06b4HEV8rSyMlD/YZxBTfiLNTiVR8CUkNRFe +EMbsh2aJgWi6zCudR3Mfvc2RpHJqnKIbGKBv7FD0fUDCqDDPvXPIEysQEx6Lmqg6 +lHPTGGkKSv/BAQP/eX+1SH977ugpbzZMlWGG2Pmic4ruri+W7mjNPU0oQvlFKzIb +RlUWaqZLKfm7lVa/Rh3sHZMdwGWyH6FDrlaeoLGPaxK3YG14C8qKXO0elg6DpkiV +jTujIcSuWMYAsoS0I6SWhjW42J7YrDRJmGOVxcttSEfi8i4YHtAxq9107PncjLgc +jmgjutDzUNzPZY9zOjLHfP7KgiJPvo5iR2blzYfi6NUPGJ/lBHJLRjwQ8kTCZFZx +TnXonMkmdMV9WdEKWw9t/p51HBjGGjp82A0EzM23RWV6sY+4roRIPrN6TagD4uJ+ +ARZZaBhDM7DS3LAaQzXupdqpRlyuhoFBAUp0JuyfBr/CBTdkdXgpaP3F9ev+R/nk +hbDhezGdpn9yo7nELC7MmVcOIQxFAZRl62UJxmMiCzNJkkg8/M3OsD6Onov4/knF +NXJHAgMBAAGjgY4wgYswDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQUqvyREBuH +kV8Wub9PS5FeAByxMoAwDgYDVR0PAQH/BAQDAgEGMEkGA1UdHwRCMEAwPqA8oDqG +OGh0dHA6Ly9jcmwuZC10cnVzdC5uZXQvY3JsL2QtdHJ1c3RfZXZfcm9vdF9jYV8y +XzIwMjMuY3JsMA0GCSqGSIb3DQEBDQUAA4ICAQCTy6UfmRHsmg1fLBWTxj++EI14 +QvBukEdHjqOSMo1wj/Zbjb6JzkcBahsgIIlbyIIQbODnmaprxiqgYzWRaoUlrRc4 +pZt+UPJ26oUFKidBK7GB0aL2QHWpDsvxVUjY7NHss+jOFKE17MJeNRqrphYBBo7q +3C+jisosketSjl8MmxfPy3MHGcRqwnNU73xDUmPBEcrCRbH0O1P1aa4846XerOhU +t7KR/aypH/KH5BfGSah82ApB9PI+53c0BFLd6IHyTS9URZ0V4U/M5d40VxDJI3IX +cI1QcB9WbMy5/zpaT2N6w25lBx2Eof+pDGOJbbJAiDnXH3dotfyc1dZnaVuodNv8 +ifYbMvekJKZ2t0dT741Jj6m2g1qllpBFYfXeA08mD6iL8AOWsKwV0HFaanuU5nCT +2vFp4LJiTZ6P/4mdm13NRemUAiKN4DV/6PEEeXFsVIP4M7kFMhtYVRFP0OUnR3Hs +7dpn1mKmS00PaaLJvOwiS5THaJQXfuKOKD62xur1NGyfN4gHONuGcfrNlUhDbqNP +gofXNJhuS5N5YHVpD/Aa1VP6IQzCP+k/HxiMkl14p3ZnGbuy6n/pcAlWVqOwDAst +Nl7F6cTVg8uGF5csbBNvh1qvSaYd2804BC5f4ko1Di1L+KIkBI3Y4WNeApI02phh +XBxvWHZks/wCuPWdCg== +-----END CERTIFICATE----- diff --git a/lib/charset_normalizer/__init__.py b/lib/charset_normalizer/__init__.py index 55991fc3..0d3a3799 100644 --- a/lib/charset_normalizer/__init__.py +++ b/lib/charset_normalizer/__init__.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- """ Charset-Normalizer ~~~~~~~~~~~~~~ @@ -19,6 +18,9 @@ at . :copyright: (c) 2021 by Ahmed TAHRI :license: MIT, see LICENSE for more details. """ + +from __future__ import annotations + import logging from .api import from_bytes, from_fp, from_path, is_binary diff --git a/lib/charset_normalizer/__main__.py b/lib/charset_normalizer/__main__.py index beae2ef7..e0e76f7b 100644 --- a/lib/charset_normalizer/__main__.py +++ b/lib/charset_normalizer/__main__.py @@ -1,3 +1,5 @@ +from __future__ import annotations + from .cli import cli_detect if __name__ == "__main__": diff --git a/lib/charset_normalizer/api.py b/lib/charset_normalizer/api.py index e3f2283b..2c8c0618 100644 --- a/lib/charset_normalizer/api.py +++ b/lib/charset_normalizer/api.py @@ -1,6 +1,8 @@ +from __future__ import annotations + import logging from os import PathLike -from typing import BinaryIO, List, Optional, Set, Union +from typing import BinaryIO from .cd import ( coherence_ratio, @@ -21,8 +23,6 @@ from .utils import ( should_strip_sig_or_bom, ) -# Will most likely be controversial -# logging.addLevelName(TRACE, "TRACE") logger = logging.getLogger("charset_normalizer") explain_handler = logging.StreamHandler() explain_handler.setFormatter( @@ -31,12 +31,12 @@ explain_handler.setFormatter( def from_bytes( - sequences: Union[bytes, bytearray], + sequences: bytes | bytearray, steps: int = 5, chunk_size: int = 512, threshold: float = 0.2, - cp_isolation: Optional[List[str]] = None, - cp_exclusion: Optional[List[str]] = None, + cp_isolation: list[str] | None = None, + cp_exclusion: list[str] | None = None, preemptive_behaviour: bool = True, explain: bool = False, language_threshold: float = 0.1, @@ -62,7 +62,7 @@ def from_bytes( if not isinstance(sequences, (bytearray, bytes)): raise TypeError( - "Expected object of type bytes or bytearray, got: {0}".format( + "Expected object of type bytes or bytearray, got: {}".format( type(sequences) ) ) @@ -76,7 +76,7 @@ def from_bytes( if length == 0: logger.debug("Encoding detection on empty bytes, assuming utf_8 intention.") - if explain: + if explain: # Defensive: ensure exit path clean handler logger.removeHandler(explain_handler) logger.setLevel(previous_logger_level or logging.WARNING) return CharsetMatches([CharsetMatch(sequences, "utf_8", 0.0, False, [], "")]) @@ -135,9 +135,9 @@ def from_bytes( ), ) - prioritized_encodings: List[str] = [] + prioritized_encodings: list[str] = [] - specified_encoding: Optional[str] = ( + specified_encoding: str | None = ( any_specified_encoding(sequences) if preemptive_behaviour else None ) @@ -149,13 +149,13 @@ def from_bytes( specified_encoding, ) - tested: Set[str] = set() - tested_but_hard_failure: List[str] = [] - tested_but_soft_failure: List[str] = [] + tested: set[str] = set() + tested_but_hard_failure: list[str] = [] + tested_but_soft_failure: list[str] = [] - fallback_ascii: Optional[CharsetMatch] = None - fallback_u8: Optional[CharsetMatch] = None - fallback_specified: Optional[CharsetMatch] = None + fallback_ascii: CharsetMatch | None = None + fallback_u8: CharsetMatch | None = None + fallback_specified: CharsetMatch | None = None results: CharsetMatches = CharsetMatches() @@ -189,7 +189,7 @@ def from_bytes( tested.add(encoding_iana) - decoded_payload: Optional[str] = None + decoded_payload: str | None = None bom_or_sig_available: bool = sig_encoding == encoding_iana strip_sig_or_bom: bool = bom_or_sig_available and should_strip_sig_or_bom( encoding_iana @@ -292,7 +292,7 @@ def from_bytes( early_stop_count: int = 0 lazy_str_hard_failure = False - md_chunks: List[str] = [] + md_chunks: list[str] = [] md_ratios = [] try: @@ -397,7 +397,7 @@ def from_bytes( ) if not is_multi_byte_decoder: - target_languages: List[str] = encoding_languages(encoding_iana) + target_languages: list[str] = encoding_languages(encoding_iana) else: target_languages = mb_encoding_languages(encoding_iana) @@ -462,7 +462,7 @@ def from_bytes( "Encoding detection: %s is most likely the one.", current_match.encoding, ) - if explain: + if explain: # Defensive: ensure exit path clean handler logger.removeHandler(explain_handler) logger.setLevel(previous_logger_level) return CharsetMatches([current_match]) @@ -480,7 +480,7 @@ def from_bytes( "Encoding detection: %s is most likely the one.", probable_result.encoding, ) - if explain: + if explain: # Defensive: ensure exit path clean handler logger.removeHandler(explain_handler) logger.setLevel(previous_logger_level) @@ -492,7 +492,7 @@ def from_bytes( "the beginning of the sequence.", encoding_iana, ) - if explain: + if explain: # Defensive: ensure exit path clean handler logger.removeHandler(explain_handler) logger.setLevel(previous_logger_level) return CharsetMatches([results[encoding_iana]]) @@ -546,8 +546,8 @@ def from_fp( steps: int = 5, chunk_size: int = 512, threshold: float = 0.20, - cp_isolation: Optional[List[str]] = None, - cp_exclusion: Optional[List[str]] = None, + cp_isolation: list[str] | None = None, + cp_exclusion: list[str] | None = None, preemptive_behaviour: bool = True, explain: bool = False, language_threshold: float = 0.1, @@ -572,12 +572,12 @@ def from_fp( def from_path( - path: Union[str, bytes, PathLike], # type: ignore[type-arg] + path: str | bytes | PathLike, # type: ignore[type-arg] steps: int = 5, chunk_size: int = 512, threshold: float = 0.20, - cp_isolation: Optional[List[str]] = None, - cp_exclusion: Optional[List[str]] = None, + cp_isolation: list[str] | None = None, + cp_exclusion: list[str] | None = None, preemptive_behaviour: bool = True, explain: bool = False, language_threshold: float = 0.1, @@ -603,12 +603,12 @@ def from_path( def is_binary( - fp_or_path_or_payload: Union[PathLike, str, BinaryIO, bytes], # type: ignore[type-arg] + fp_or_path_or_payload: PathLike | str | BinaryIO | bytes, # type: ignore[type-arg] steps: int = 5, chunk_size: int = 512, threshold: float = 0.20, - cp_isolation: Optional[List[str]] = None, - cp_exclusion: Optional[List[str]] = None, + cp_isolation: list[str] | None = None, + cp_exclusion: list[str] | None = None, preemptive_behaviour: bool = True, explain: bool = False, language_threshold: float = 0.1, diff --git a/lib/charset_normalizer/cd.py b/lib/charset_normalizer/cd.py index 4ea6760c..71a3ed51 100644 --- a/lib/charset_normalizer/cd.py +++ b/lib/charset_normalizer/cd.py @@ -1,8 +1,10 @@ +from __future__ import annotations + import importlib from codecs import IncrementalDecoder from collections import Counter from functools import lru_cache -from typing import Counter as TypeCounter, Dict, List, Optional, Tuple +from typing import Counter as TypeCounter from .constant import ( FREQUENCIES, @@ -22,26 +24,24 @@ from .utils import ( ) -def encoding_unicode_range(iana_name: str) -> List[str]: +def encoding_unicode_range(iana_name: str) -> list[str]: """ Return associated unicode ranges in a single byte code page. """ if is_multi_byte_encoding(iana_name): - raise IOError("Function not supported on multi-byte code page") + raise OSError("Function not supported on multi-byte code page") - decoder = importlib.import_module( - "encodings.{}".format(iana_name) - ).IncrementalDecoder + decoder = importlib.import_module(f"encodings.{iana_name}").IncrementalDecoder p: IncrementalDecoder = decoder(errors="ignore") - seen_ranges: Dict[str, int] = {} + seen_ranges: dict[str, int] = {} character_count: int = 0 for i in range(0x40, 0xFF): chunk: str = p.decode(bytes([i])) if chunk: - character_range: Optional[str] = unicode_range(chunk) + character_range: str | None = unicode_range(chunk) if character_range is None: continue @@ -61,11 +61,11 @@ def encoding_unicode_range(iana_name: str) -> List[str]: ) -def unicode_range_languages(primary_range: str) -> List[str]: +def unicode_range_languages(primary_range: str) -> list[str]: """ Return inferred languages used with a unicode range. """ - languages: List[str] = [] + languages: list[str] = [] for language, characters in FREQUENCIES.items(): for character in characters: @@ -77,13 +77,13 @@ def unicode_range_languages(primary_range: str) -> List[str]: @lru_cache() -def encoding_languages(iana_name: str) -> List[str]: +def encoding_languages(iana_name: str) -> list[str]: """ Single-byte encoding language association. Some code page are heavily linked to particular language(s). This function does the correspondence. """ - unicode_ranges: List[str] = encoding_unicode_range(iana_name) - primary_range: Optional[str] = None + unicode_ranges: list[str] = encoding_unicode_range(iana_name) + primary_range: str | None = None for specified_range in unicode_ranges: if "Latin" not in specified_range: @@ -97,7 +97,7 @@ def encoding_languages(iana_name: str) -> List[str]: @lru_cache() -def mb_encoding_languages(iana_name: str) -> List[str]: +def mb_encoding_languages(iana_name: str) -> list[str]: """ Multi-byte encoding language association. Some code page are heavily linked to particular language(s). This function does the correspondence. @@ -118,7 +118,7 @@ def mb_encoding_languages(iana_name: str) -> List[str]: @lru_cache(maxsize=LANGUAGE_SUPPORTED_COUNT) -def get_target_features(language: str) -> Tuple[bool, bool]: +def get_target_features(language: str) -> tuple[bool, bool]: """ Determine main aspects from a supported language if it contains accents and if is pure Latin. """ @@ -135,12 +135,12 @@ def get_target_features(language: str) -> Tuple[bool, bool]: def alphabet_languages( - characters: List[str], ignore_non_latin: bool = False -) -> List[str]: + characters: list[str], ignore_non_latin: bool = False +) -> list[str]: """ Return associated languages associated to given characters. """ - languages: List[Tuple[str, float]] = [] + languages: list[tuple[str, float]] = [] source_have_accents = any(is_accentuated(character) for character in characters) @@ -170,7 +170,7 @@ def alphabet_languages( def characters_popularity_compare( - language: str, ordered_characters: List[str] + language: str, ordered_characters: list[str] ) -> float: """ Determine if a ordered characters list (by occurrence from most appearance to rarest) match a particular language. @@ -178,7 +178,7 @@ def characters_popularity_compare( Beware that is function is not strict on the match in order to ease the detection. (Meaning close match is 1.) """ if language not in FREQUENCIES: - raise ValueError("{} not available".format(language)) + raise ValueError(f"{language} not available") character_approved_count: int = 0 FREQUENCIES_language_set = set(FREQUENCIES[language]) @@ -214,14 +214,14 @@ def characters_popularity_compare( character_approved_count += 1 continue - characters_before_source: List[str] = FREQUENCIES[language][ + characters_before_source: list[str] = FREQUENCIES[language][ 0:character_rank_in_language ] - characters_after_source: List[str] = FREQUENCIES[language][ + characters_after_source: list[str] = FREQUENCIES[language][ character_rank_in_language: ] - characters_before: List[str] = ordered_characters[0:character_rank] - characters_after: List[str] = ordered_characters[character_rank:] + characters_before: list[str] = ordered_characters[0:character_rank] + characters_after: list[str] = ordered_characters[character_rank:] before_match_count: int = len( set(characters_before) & set(characters_before_source) @@ -249,24 +249,24 @@ def characters_popularity_compare( return character_approved_count / len(ordered_characters) -def alpha_unicode_split(decoded_sequence: str) -> List[str]: +def alpha_unicode_split(decoded_sequence: str) -> list[str]: """ Given a decoded text sequence, return a list of str. Unicode range / alphabet separation. Ex. a text containing English/Latin with a bit a Hebrew will return two items in the resulting list; One containing the latin letters and the other hebrew. """ - layers: Dict[str, str] = {} + layers: dict[str, str] = {} for character in decoded_sequence: if character.isalpha() is False: continue - character_range: Optional[str] = unicode_range(character) + character_range: str | None = unicode_range(character) if character_range is None: continue - layer_target_range: Optional[str] = None + layer_target_range: str | None = None for discovered_range in layers: if ( @@ -288,12 +288,12 @@ def alpha_unicode_split(decoded_sequence: str) -> List[str]: return list(layers.values()) -def merge_coherence_ratios(results: List[CoherenceMatches]) -> CoherenceMatches: +def merge_coherence_ratios(results: list[CoherenceMatches]) -> CoherenceMatches: """ This function merge results previously given by the function coherence_ratio. The return type is the same as coherence_ratio. """ - per_language_ratios: Dict[str, List[float]] = {} + per_language_ratios: dict[str, list[float]] = {} for result in results: for sub_result in result: language, ratio = sub_result @@ -321,7 +321,7 @@ def filter_alt_coherence_matches(results: CoherenceMatches) -> CoherenceMatches: We shall NOT return "English—" in CoherenceMatches because it is an alternative of "English". This function only keeps the best match and remove the em-dash in it. """ - index_results: Dict[str, List[float]] = dict() + index_results: dict[str, list[float]] = dict() for result in results: language, ratio = result @@ -345,14 +345,14 @@ def filter_alt_coherence_matches(results: CoherenceMatches) -> CoherenceMatches: @lru_cache(maxsize=2048) def coherence_ratio( - decoded_sequence: str, threshold: float = 0.1, lg_inclusion: Optional[str] = None + decoded_sequence: str, threshold: float = 0.1, lg_inclusion: str | None = None ) -> CoherenceMatches: """ Detect ANY language that can be identified in given sequence. The sequence will be analysed by layers. A layer = Character extraction by alphabets/ranges. """ - results: List[Tuple[str, float]] = [] + results: list[tuple[str, float]] = [] ignore_non_latin: bool = False sufficient_match_count: int = 0 @@ -371,7 +371,7 @@ def coherence_ratio( if character_count <= TOO_SMALL_SEQUENCE: continue - popular_character_ordered: List[str] = [c for c, o in most_common] + popular_character_ordered: list[str] = [c for c, o in most_common] for language in lg_inclusion_list or alphabet_languages( popular_character_ordered, ignore_non_latin diff --git a/lib/charset_normalizer/cli/__init__.py b/lib/charset_normalizer/cli/__init__.py index d95fedfe..543a5a4d 100644 --- a/lib/charset_normalizer/cli/__init__.py +++ b/lib/charset_normalizer/cli/__init__.py @@ -1,3 +1,5 @@ +from __future__ import annotations + from .__main__ import cli_detect, query_yes_no __all__ = ( diff --git a/lib/charset_normalizer/cli/__main__.py b/lib/charset_normalizer/cli/__main__.py index e7edd0fc..cb64156a 100644 --- a/lib/charset_normalizer/cli/__main__.py +++ b/lib/charset_normalizer/cli/__main__.py @@ -1,9 +1,11 @@ +from __future__ import annotations + import argparse import sys +import typing from json import dumps from os.path import abspath, basename, dirname, join, realpath from platform import python_version -from typing import List, Optional from unicodedata import unidata_version import charset_normalizer.md as md_module @@ -42,10 +44,69 @@ def query_yes_no(question: str, default: str = "yes") -> bool: elif choice in valid: return valid[choice] else: - sys.stdout.write("Please respond with 'yes' or 'no' " "(or 'y' or 'n').\n") + sys.stdout.write("Please respond with 'yes' or 'no' (or 'y' or 'n').\n") -def cli_detect(argv: Optional[List[str]] = None) -> int: +class FileType: + """Factory for creating file object types + + Instances of FileType are typically passed as type= arguments to the + ArgumentParser add_argument() method. + + Keyword Arguments: + - mode -- A string indicating how the file is to be opened. Accepts the + same values as the builtin open() function. + - bufsize -- The file's desired buffer size. Accepts the same values as + the builtin open() function. + - encoding -- The file's encoding. Accepts the same values as the + builtin open() function. + - errors -- A string indicating how encoding and decoding errors are to + be handled. Accepts the same value as the builtin open() function. + + Backported from CPython 3.12 + """ + + def __init__( + self, + mode: str = "r", + bufsize: int = -1, + encoding: str | None = None, + errors: str | None = None, + ): + self._mode = mode + self._bufsize = bufsize + self._encoding = encoding + self._errors = errors + + def __call__(self, string: str) -> typing.IO: # type: ignore[type-arg] + # the special argument "-" means sys.std{in,out} + if string == "-": + if "r" in self._mode: + return sys.stdin.buffer if "b" in self._mode else sys.stdin + elif any(c in self._mode for c in "wax"): + return sys.stdout.buffer if "b" in self._mode else sys.stdout + else: + msg = f'argument "-" with mode {self._mode}' + raise ValueError(msg) + + # all other arguments are used as file names + try: + return open(string, self._mode, self._bufsize, self._encoding, self._errors) + except OSError as e: + message = f"can't open '{string}': {e}" + raise argparse.ArgumentTypeError(message) + + def __repr__(self) -> str: + args = self._mode, self._bufsize + kwargs = [("encoding", self._encoding), ("errors", self._errors)] + args_str = ", ".join( + [repr(arg) for arg in args if arg != -1] + + [f"{kw}={arg!r}" for kw, arg in kwargs if arg is not None] + ) + return f"{type(self).__name__}({args_str})" + + +def cli_detect(argv: list[str] | None = None) -> int: """ CLI assistant using ARGV and ArgumentParser :param argv: @@ -58,7 +119,7 @@ def cli_detect(argv: Optional[List[str]] = None) -> int: ) parser.add_argument( - "files", type=argparse.FileType("rb"), nargs="+", help="File(s) to be analysed" + "files", type=FileType("rb"), nargs="+", help="File(s) to be analysed" ) parser.add_argument( "-v", @@ -124,7 +185,7 @@ def cli_detect(argv: Optional[List[str]] = None) -> int: default=0.2, type=float, dest="threshold", - help="Define a custom maximum amount of chaos allowed in decoded content. 0. <= chaos <= 1.", + help="Define a custom maximum amount of noise allowed in decoded content. 0. <= noise <= 1.", ) parser.add_argument( "--version", @@ -259,7 +320,7 @@ def cli_detect(argv: Optional[List[str]] = None) -> int: dir_path = dirname(realpath(my_file.name)) file_name = basename(realpath(my_file.name)) - o_: List[str] = file_name.split(".") + o_: list[str] = file_name.split(".") if args.replace is False: o_.insert(-1, best_guess.encoding) @@ -284,7 +345,7 @@ def cli_detect(argv: Optional[List[str]] = None) -> int: with open(x_[0].unicode_path, "wb") as fp: fp.write(best_guess.output()) - except IOError as e: + except OSError as e: print(str(e), file=sys.stderr) if my_file.closed is False: my_file.close() diff --git a/lib/charset_normalizer/constant.py b/lib/charset_normalizer/constant.py index f8f2a811..cc71a019 100644 --- a/lib/charset_normalizer/constant.py +++ b/lib/charset_normalizer/constant.py @@ -1,11 +1,12 @@ -# -*- coding: utf-8 -*- +from __future__ import annotations + from codecs import BOM_UTF8, BOM_UTF16_BE, BOM_UTF16_LE, BOM_UTF32_BE, BOM_UTF32_LE from encodings.aliases import aliases -from re import IGNORECASE, compile as re_compile -from typing import Dict, List, Set, Union +from re import IGNORECASE +from re import compile as re_compile # Contain for each eligible encoding a list of/item bytes SIG/BOM -ENCODING_MARKS: Dict[str, Union[bytes, List[bytes]]] = { +ENCODING_MARKS: dict[str, bytes | list[bytes]] = { "utf_8": BOM_UTF8, "utf_7": [ b"\x2b\x2f\x76\x38", @@ -25,7 +26,7 @@ TOO_BIG_SEQUENCE: int = int(10e6) UTF8_MAXIMAL_ALLOCATION: int = 1_112_064 # Up-to-date Unicode ucd/15.0.0 -UNICODE_RANGES_COMBINED: Dict[str, range] = { +UNICODE_RANGES_COMBINED: dict[str, range] = { "Control character": range(32), "Basic Latin": range(32, 128), "Latin-1 Supplement": range(128, 256), @@ -357,7 +358,7 @@ UNICODE_RANGES_COMBINED: Dict[str, range] = { } -UNICODE_SECONDARY_RANGE_KEYWORD: List[str] = [ +UNICODE_SECONDARY_RANGE_KEYWORD: list[str] = [ "Supplement", "Extended", "Extensions", @@ -392,7 +393,7 @@ IANA_NO_ALIASES = [ "koi8_u", ] -IANA_SUPPORTED: List[str] = sorted( +IANA_SUPPORTED: list[str] = sorted( filter( lambda x: x.endswith("_codec") is False and x not in {"rot_13", "tactis", "mbcs"}, @@ -403,7 +404,7 @@ IANA_SUPPORTED: List[str] = sorted( IANA_SUPPORTED_COUNT: int = len(IANA_SUPPORTED) # pre-computed code page that are similar using the function cp_similarity. -IANA_SUPPORTED_SIMILAR: Dict[str, List[str]] = { +IANA_SUPPORTED_SIMILAR: dict[str, list[str]] = { "cp037": ["cp1026", "cp1140", "cp273", "cp500"], "cp1026": ["cp037", "cp1140", "cp273", "cp500"], "cp1125": ["cp866"], @@ -492,7 +493,7 @@ IANA_SUPPORTED_SIMILAR: Dict[str, List[str]] = { } -CHARDET_CORRESPONDENCE: Dict[str, str] = { +CHARDET_CORRESPONDENCE: dict[str, str] = { "iso2022_kr": "ISO-2022-KR", "iso2022_jp": "ISO-2022-JP", "euc_kr": "EUC-KR", @@ -528,7 +529,7 @@ CHARDET_CORRESPONDENCE: Dict[str, str] = { } -COMMON_SAFE_ASCII_CHARACTERS: Set[str] = { +COMMON_SAFE_ASCII_CHARACTERS: set[str] = { "<", ">", "=", @@ -548,9 +549,26 @@ COMMON_SAFE_ASCII_CHARACTERS: Set[str] = { ")", } +# Sample character sets — replace with full lists if needed +COMMON_CHINESE_CHARACTERS = "的一是在不了有和人这中大为上个国我以要他时来用们生到作地于出就分对成会可主发年动同工也能下过子说产种面而方后多定行学法所民得经十三之进着等部度家电力里如水化高自二理起小物现实加量都两体制机当使点从业本去把性好应开它合还因由其些然前外天政四日那社义事平形相全表间样与关各重新线内数正心反你明看原又么利比或但质气第向道命此变条只没结解问意建月公无系军很情者最立代想已通并提直题党程展五果料象员革位入常文总次品式活设及管特件长求老头基资边流路级少图山统接知较将组见计别她手角期根论运农指几九区强放决西被干做必战先回则任取据处队南给色光门即保治北造百规热领七海口东导器压志世金增争济阶油思术极交受联什认六共权收证改清己美再采转更单风切打白教速花带安场身车例真务具万每目至达走积示议声报斗完类八离华名确才科张信马节话米整空元况今集温传土许步群广石记需段研界拉林律叫且究观越织装影算低持音众书布复容儿须际商非验连断深难近矿千周委素技备半办青省列习响约支般史感劳便团往酸历市克何除消构府太准精值号率族维划选标写存候毛亲快效斯院查江型眼王按格养易置派层片始却专状育厂京识适属圆包火住调满县局照参红细引听该铁价严龙飞" -KO_NAMES: Set[str] = {"johab", "cp949", "euc_kr"} -ZH_NAMES: Set[str] = {"big5", "cp950", "big5hkscs", "hz"} +COMMON_JAPANESE_CHARACTERS = "日一国年大十二本中長出三時行見月分後前生五間上東四今金九入学高円子外八六下来気小七山話女北午百書先名川千水半男西電校語土木聞食車何南万毎白天母火右読友左休父雨" + +COMMON_KOREAN_CHARACTERS = "一二三四五六七八九十百千萬上下左右中人女子大小山川日月火水木金土父母天地國名年時文校學生" + +# Combine all into a set +COMMON_CJK_CHARACTERS = set( + "".join( + [ + COMMON_CHINESE_CHARACTERS, + COMMON_JAPANESE_CHARACTERS, + COMMON_KOREAN_CHARACTERS, + ] + ) +) + +KO_NAMES: set[str] = {"johab", "cp949", "euc_kr"} +ZH_NAMES: set[str] = {"big5", "cp950", "big5hkscs", "hz"} # Logging LEVEL below DEBUG TRACE: int = 5 @@ -558,7 +576,7 @@ TRACE: int = 5 # Language label that contain the em dash "—" # character are to be considered alternative seq to origin -FREQUENCIES: Dict[str, List[str]] = { +FREQUENCIES: dict[str, list[str]] = { "English": [ "e", "a", diff --git a/lib/charset_normalizer/legacy.py b/lib/charset_normalizer/legacy.py index 3f6d4907..e221beca 100644 --- a/lib/charset_normalizer/legacy.py +++ b/lib/charset_normalizer/legacy.py @@ -1,6 +1,6 @@ from __future__ import annotations -from typing import TYPE_CHECKING, Any, Optional +from typing import TYPE_CHECKING, Any from warnings import warn from .api import from_bytes @@ -11,9 +11,9 @@ if TYPE_CHECKING: from typing_extensions import TypedDict class ResultDict(TypedDict): - encoding: Optional[str] + encoding: str | None language: str - confidence: Optional[float] + confidence: float | None def detect( @@ -37,8 +37,7 @@ def detect( if not isinstance(byte_str, (bytearray, bytes)): raise TypeError( # pragma: nocover - "Expected object of type bytes or bytearray, got: " - "{0}".format(type(byte_str)) + f"Expected object of type bytes or bytearray, got: {type(byte_str)}" ) if isinstance(byte_str, bytearray): diff --git a/lib/charset_normalizer/md.py b/lib/charset_normalizer/md.py index d834db0e..12ce024b 100644 --- a/lib/charset_normalizer/md.py +++ b/lib/charset_normalizer/md.py @@ -1,6 +1,7 @@ +from __future__ import annotations + from functools import lru_cache from logging import getLogger -from typing import List, Optional from .constant import ( COMMON_SAFE_ASCII_CHARACTERS, @@ -25,6 +26,7 @@ from .utils import ( is_unprintable, remove_accent, unicode_range, + is_cjk_uncommon, ) @@ -68,7 +70,7 @@ class TooManySymbolOrPunctuationPlugin(MessDetectorPlugin): self._symbol_count: int = 0 self._character_count: int = 0 - self._last_printable_char: Optional[str] = None + self._last_printable_char: str | None = None self._frenzy_symbol_in_word: bool = False def eligible(self, character: str) -> bool: @@ -92,7 +94,7 @@ class TooManySymbolOrPunctuationPlugin(MessDetectorPlugin): self._last_printable_char = character - def reset(self) -> None: # pragma: no cover + def reset(self) -> None: # Abstract self._punctuation_count = 0 self._character_count = 0 self._symbol_count = 0 @@ -123,7 +125,7 @@ class TooManyAccentuatedPlugin(MessDetectorPlugin): if is_accentuated(character): self._accentuated_count += 1 - def reset(self) -> None: # pragma: no cover + def reset(self) -> None: # Abstract self._character_count = 0 self._accentuated_count = 0 @@ -149,7 +151,7 @@ class UnprintablePlugin(MessDetectorPlugin): self._unprintable_count += 1 self._character_count += 1 - def reset(self) -> None: # pragma: no cover + def reset(self) -> None: # Abstract self._unprintable_count = 0 @property @@ -165,7 +167,7 @@ class SuspiciousDuplicateAccentPlugin(MessDetectorPlugin): self._successive_count: int = 0 self._character_count: int = 0 - self._last_latin_character: Optional[str] = None + self._last_latin_character: str | None = None def eligible(self, character: str) -> bool: return character.isalpha() and is_latin(character) @@ -184,7 +186,7 @@ class SuspiciousDuplicateAccentPlugin(MessDetectorPlugin): self._successive_count += 1 self._last_latin_character = character - def reset(self) -> None: # pragma: no cover + def reset(self) -> None: # Abstract self._successive_count = 0 self._character_count = 0 self._last_latin_character = None @@ -201,7 +203,7 @@ class SuspiciousRange(MessDetectorPlugin): def __init__(self) -> None: self._suspicious_successive_range_count: int = 0 self._character_count: int = 0 - self._last_printable_seen: Optional[str] = None + self._last_printable_seen: str | None = None def eligible(self, character: str) -> bool: return character.isprintable() @@ -221,15 +223,15 @@ class SuspiciousRange(MessDetectorPlugin): self._last_printable_seen = character return - unicode_range_a: Optional[str] = unicode_range(self._last_printable_seen) - unicode_range_b: Optional[str] = unicode_range(character) + unicode_range_a: str | None = unicode_range(self._last_printable_seen) + unicode_range_b: str | None = unicode_range(character) if is_suspiciously_successive_range(unicode_range_a, unicode_range_b): self._suspicious_successive_range_count += 1 self._last_printable_seen = character - def reset(self) -> None: # pragma: no cover + def reset(self) -> None: # Abstract self._character_count = 0 self._suspicious_successive_range_count = 0 self._last_printable_seen = None @@ -346,7 +348,7 @@ class SuperWeirdWordPlugin(MessDetectorPlugin): self._is_current_word_bad = True self._buffer += character - def reset(self) -> None: # pragma: no cover + def reset(self) -> None: # Abstract self._buffer = "" self._is_current_word_bad = False self._foreign_long_watch = False @@ -364,35 +366,39 @@ class SuperWeirdWordPlugin(MessDetectorPlugin): return self._bad_character_count / self._character_count -class CjkInvalidStopPlugin(MessDetectorPlugin): +class CjkUncommonPlugin(MessDetectorPlugin): """ - GB(Chinese) based encoding often render the stop incorrectly when the content does not fit and - can be easily detected. Searching for the overuse of '丅' and '丄'. + Detect messy CJK text that probably means nothing. """ def __init__(self) -> None: - self._wrong_stop_count: int = 0 - self._cjk_character_count: int = 0 + self._character_count: int = 0 + self._uncommon_count: int = 0 def eligible(self, character: str) -> bool: - return True + return is_cjk(character) def feed(self, character: str) -> None: - if character in {"丅", "丄"}: - self._wrong_stop_count += 1 - return - if is_cjk(character): - self._cjk_character_count += 1 + self._character_count += 1 - def reset(self) -> None: # pragma: no cover - self._wrong_stop_count = 0 - self._cjk_character_count = 0 + if is_cjk_uncommon(character): + self._uncommon_count += 1 + return + + def reset(self) -> None: # Abstract + self._character_count = 0 + self._uncommon_count = 0 @property def ratio(self) -> float: - if self._cjk_character_count < 16: + if self._character_count < 8: return 0.0 - return self._wrong_stop_count / self._cjk_character_count + + uncommon_form_usage: float = self._uncommon_count / self._character_count + + # we can be pretty sure it's garbage when uncommon characters are widely + # used. otherwise it could just be traditional chinese for example. + return uncommon_form_usage / 10 if uncommon_form_usage > 0.5 else 0.0 class ArchaicUpperLowerPlugin(MessDetectorPlugin): @@ -406,7 +412,7 @@ class ArchaicUpperLowerPlugin(MessDetectorPlugin): self._character_count: int = 0 - self._last_alpha_seen: Optional[str] = None + self._last_alpha_seen: str | None = None self._current_ascii_only: bool = True def eligible(self, character: str) -> bool: @@ -454,7 +460,7 @@ class ArchaicUpperLowerPlugin(MessDetectorPlugin): self._character_count_since_last_sep += 1 self._last_alpha_seen = character - def reset(self) -> None: # pragma: no cover + def reset(self) -> None: # Abstract self._character_count = 0 self._character_count_since_last_sep = 0 self._successive_upper_lower_count = 0 @@ -476,7 +482,7 @@ class ArabicIsolatedFormPlugin(MessDetectorPlugin): self._character_count: int = 0 self._isolated_form_count: int = 0 - def reset(self) -> None: # pragma: no cover + def reset(self) -> None: # Abstract self._character_count = 0 self._isolated_form_count = 0 @@ -501,7 +507,7 @@ class ArabicIsolatedFormPlugin(MessDetectorPlugin): @lru_cache(maxsize=1024) def is_suspiciously_successive_range( - unicode_range_a: Optional[str], unicode_range_b: Optional[str] + unicode_range_a: str | None, unicode_range_b: str | None ) -> bool: """ Determine if two Unicode range seen next to each other can be considered as suspicious. @@ -525,9 +531,10 @@ def is_suspiciously_successive_range( ): return False - keywords_range_a, keywords_range_b = unicode_range_a.split( - " " - ), unicode_range_b.split(" ") + keywords_range_a, keywords_range_b = ( + unicode_range_a.split(" "), + unicode_range_b.split(" "), + ) for el in keywords_range_a: if el in UNICODE_SECONDARY_RANGE_KEYWORD: @@ -580,7 +587,7 @@ def mess_ratio( Compute a mess ratio given a decoded bytes sequence. The maximum threshold does stop the computation earlier. """ - detectors: List[MessDetectorPlugin] = [ + detectors: list[MessDetectorPlugin] = [ md_class() for md_class in MessDetectorPlugin.__subclasses__() ] @@ -622,7 +629,7 @@ def mess_ratio( logger.log(TRACE, f"Starting with: {decoded_sequence[:16]}") logger.log(TRACE, f"Ending with: {decoded_sequence[-16::]}") - for dt in detectors: # pragma: nocover + for dt in detectors: logger.log(TRACE, f"{dt.__class__}: {dt.ratio}") return round(mean_mess_ratio, 3) diff --git a/lib/charset_normalizer/models.py b/lib/charset_normalizer/models.py index 6f6b86b3..1042758f 100644 --- a/lib/charset_normalizer/models.py +++ b/lib/charset_normalizer/models.py @@ -1,8 +1,10 @@ +from __future__ import annotations + from encodings.aliases import aliases from hashlib import sha256 from json import dumps from re import sub -from typing import Any, Dict, Iterator, List, Optional, Tuple, Union +from typing import Any, Iterator, List, Tuple from .constant import RE_POSSIBLE_ENCODING_INDICATION, TOO_BIG_SEQUENCE from .utils import iana_name, is_multi_byte_encoding, unicode_range @@ -15,9 +17,9 @@ class CharsetMatch: guessed_encoding: str, mean_mess_ratio: float, has_sig_or_bom: bool, - languages: "CoherenceMatches", - decoded_payload: Optional[str] = None, - preemptive_declaration: Optional[str] = None, + languages: CoherenceMatches, + decoded_payload: str | None = None, + preemptive_declaration: str | None = None, ): self._payload: bytes = payload @@ -25,17 +27,17 @@ class CharsetMatch: self._mean_mess_ratio: float = mean_mess_ratio self._languages: CoherenceMatches = languages self._has_sig_or_bom: bool = has_sig_or_bom - self._unicode_ranges: Optional[List[str]] = None + self._unicode_ranges: list[str] | None = None - self._leaves: List[CharsetMatch] = [] + self._leaves: list[CharsetMatch] = [] self._mean_coherence_ratio: float = 0.0 - self._output_payload: Optional[bytes] = None - self._output_encoding: Optional[str] = None + self._output_payload: bytes | None = None + self._output_encoding: str | None = None - self._string: Optional[str] = decoded_payload + self._string: str | None = decoded_payload - self._preemptive_declaration: Optional[str] = preemptive_declaration + self._preemptive_declaration: str | None = preemptive_declaration def __eq__(self, other: object) -> bool: if not isinstance(other, CharsetMatch): @@ -77,9 +79,9 @@ class CharsetMatch: return self._string def __repr__(self) -> str: - return "".format(self.encoding, self.fingerprint) + return f"" - def add_submatch(self, other: "CharsetMatch") -> None: + def add_submatch(self, other: CharsetMatch) -> None: if not isinstance(other, CharsetMatch) or other == self: raise ValueError( "Unable to add instance <{}> as a submatch of a CharsetMatch".format( @@ -95,11 +97,11 @@ class CharsetMatch: return self._encoding @property - def encoding_aliases(self) -> List[str]: + def encoding_aliases(self) -> list[str]: """ Encoding name are known by many name, using this could help when searching for IBM855 when it's listed as CP855. """ - also_known_as: List[str] = [] + also_known_as: list[str] = [] for u, p in aliases.items(): if self.encoding == u: also_known_as.append(p) @@ -116,7 +118,7 @@ class CharsetMatch: return self._has_sig_or_bom @property - def languages(self) -> List[str]: + def languages(self) -> list[str]: """ Return the complete list of possible languages found in decoded sequence. Usually not really useful. Returned list may be empty even if 'language' property return something != 'Unknown'. @@ -177,7 +179,7 @@ class CharsetMatch: return self._payload @property - def submatch(self) -> List["CharsetMatch"]: + def submatch(self) -> list[CharsetMatch]: return self._leaves @property @@ -185,19 +187,17 @@ class CharsetMatch: return len(self._leaves) > 0 @property - def alphabets(self) -> List[str]: + def alphabets(self) -> list[str]: if self._unicode_ranges is not None: return self._unicode_ranges # list detected ranges - detected_ranges: List[Optional[str]] = [ - unicode_range(char) for char in str(self) - ] + detected_ranges: list[str | None] = [unicode_range(char) for char in str(self)] # filter and sort self._unicode_ranges = sorted(list({r for r in detected_ranges if r})) return self._unicode_ranges @property - def could_be_from_charset(self) -> List[str]: + def could_be_from_charset(self) -> list[str]: """ The complete list of encoding that output the exact SAME str result and therefore could be the originating encoding. @@ -221,10 +221,11 @@ class CharsetMatch: patched_header = sub( RE_POSSIBLE_ENCODING_INDICATION, lambda m: m.string[m.span()[0] : m.span()[1]].replace( - m.groups()[0], iana_name(self._output_encoding) # type: ignore[arg-type] + m.groups()[0], + iana_name(self._output_encoding).replace("_", "-"), # type: ignore[arg-type] ), decoded_string[:8192], - 1, + count=1, ) decoded_string = patched_header + decoded_string[8192:] @@ -247,13 +248,13 @@ class CharsetMatches: Act like a list(iterable) but does not implements all related methods. """ - def __init__(self, results: Optional[List[CharsetMatch]] = None): - self._results: List[CharsetMatch] = sorted(results) if results else [] + def __init__(self, results: list[CharsetMatch] | None = None): + self._results: list[CharsetMatch] = sorted(results) if results else [] def __iter__(self) -> Iterator[CharsetMatch]: yield from self._results - def __getitem__(self, item: Union[int, str]) -> CharsetMatch: + def __getitem__(self, item: int | str) -> CharsetMatch: """ Retrieve a single item either by its position or encoding name (alias may be used here). Raise KeyError upon invalid index or encoding not present in results. @@ -293,7 +294,7 @@ class CharsetMatches: self._results.append(item) self._results = sorted(self._results) - def best(self) -> Optional["CharsetMatch"]: + def best(self) -> CharsetMatch | None: """ Simply return the first match. Strict equivalent to matches[0]. """ @@ -301,7 +302,7 @@ class CharsetMatches: return None return self._results[0] - def first(self) -> Optional["CharsetMatch"]: + def first(self) -> CharsetMatch | None: """ Redundant method, call the method best(). Kept for BC reasons. """ @@ -316,31 +317,31 @@ class CliDetectionResult: def __init__( self, path: str, - encoding: Optional[str], - encoding_aliases: List[str], - alternative_encodings: List[str], + encoding: str | None, + encoding_aliases: list[str], + alternative_encodings: list[str], language: str, - alphabets: List[str], + alphabets: list[str], has_sig_or_bom: bool, chaos: float, coherence: float, - unicode_path: Optional[str], + unicode_path: str | None, is_preferred: bool, ): self.path: str = path - self.unicode_path: Optional[str] = unicode_path - self.encoding: Optional[str] = encoding - self.encoding_aliases: List[str] = encoding_aliases - self.alternative_encodings: List[str] = alternative_encodings + self.unicode_path: str | None = unicode_path + self.encoding: str | None = encoding + self.encoding_aliases: list[str] = encoding_aliases + self.alternative_encodings: list[str] = alternative_encodings self.language: str = language - self.alphabets: List[str] = alphabets + self.alphabets: list[str] = alphabets self.has_sig_or_bom: bool = has_sig_or_bom self.chaos: float = chaos self.coherence: float = coherence self.is_preferred: bool = is_preferred @property - def __dict__(self) -> Dict[str, Any]: # type: ignore + def __dict__(self) -> dict[str, Any]: # type: ignore return { "path": self.path, "encoding": self.encoding, diff --git a/lib/charset_normalizer/utils.py b/lib/charset_normalizer/utils.py index e5cbbf4c..6bf0384c 100644 --- a/lib/charset_normalizer/utils.py +++ b/lib/charset_normalizer/utils.py @@ -1,3 +1,5 @@ +from __future__ import annotations + import importlib import logging import unicodedata @@ -5,9 +7,11 @@ from codecs import IncrementalDecoder from encodings.aliases import aliases from functools import lru_cache from re import findall -from typing import Generator, List, Optional, Set, Tuple, Union +from typing import Generator -from _multibytecodec import MultibyteIncrementalDecoder +from _multibytecodec import ( # type: ignore[import-not-found,import] + MultibyteIncrementalDecoder, +) from .constant import ( ENCODING_MARKS, @@ -16,6 +20,7 @@ from .constant import ( UNICODE_RANGES_COMBINED, UNICODE_SECONDARY_RANGE_KEYWORD, UTF8_MAXIMAL_ALLOCATION, + COMMON_CJK_CHARACTERS, ) @@ -23,7 +28,7 @@ from .constant import ( def is_accentuated(character: str) -> bool: try: description: str = unicodedata.name(character) - except ValueError: + except ValueError: # Defensive: unicode database outdated? return False return ( "WITH GRAVE" in description @@ -43,13 +48,13 @@ def remove_accent(character: str) -> str: if not decomposed: return character - codes: List[str] = decomposed.split(" ") + codes: list[str] = decomposed.split(" ") return chr(int(codes[0], 16)) @lru_cache(maxsize=UTF8_MAXIMAL_ALLOCATION) -def unicode_range(character: str) -> Optional[str]: +def unicode_range(character: str) -> str | None: """ Retrieve the Unicode range official name from a single character. """ @@ -66,7 +71,7 @@ def unicode_range(character: str) -> Optional[str]: def is_latin(character: str) -> bool: try: description: str = unicodedata.name(character) - except ValueError: + except ValueError: # Defensive: unicode database outdated? return False return "LATIN" in description @@ -78,7 +83,7 @@ def is_punctuation(character: str) -> bool: if "P" in character_category: return True - character_range: Optional[str] = unicode_range(character) + character_range: str | None = unicode_range(character) if character_range is None: return False @@ -93,7 +98,7 @@ def is_symbol(character: str) -> bool: if "S" in character_category or "N" in character_category: return True - character_range: Optional[str] = unicode_range(character) + character_range: str | None = unicode_range(character) if character_range is None: return False @@ -103,7 +108,7 @@ def is_symbol(character: str) -> bool: @lru_cache(maxsize=UTF8_MAXIMAL_ALLOCATION) def is_emoticon(character: str) -> bool: - character_range: Optional[str] = unicode_range(character) + character_range: str | None = unicode_range(character) if character_range is None: return False @@ -130,7 +135,7 @@ def is_case_variable(character: str) -> bool: def is_cjk(character: str) -> bool: try: character_name = unicodedata.name(character) - except ValueError: + except ValueError: # Defensive: unicode database outdated? return False return "CJK" in character_name @@ -140,7 +145,7 @@ def is_cjk(character: str) -> bool: def is_hiragana(character: str) -> bool: try: character_name = unicodedata.name(character) - except ValueError: + except ValueError: # Defensive: unicode database outdated? return False return "HIRAGANA" in character_name @@ -150,7 +155,7 @@ def is_hiragana(character: str) -> bool: def is_katakana(character: str) -> bool: try: character_name = unicodedata.name(character) - except ValueError: + except ValueError: # Defensive: unicode database outdated? return False return "KATAKANA" in character_name @@ -160,7 +165,7 @@ def is_katakana(character: str) -> bool: def is_hangul(character: str) -> bool: try: character_name = unicodedata.name(character) - except ValueError: + except ValueError: # Defensive: unicode database outdated? return False return "HANGUL" in character_name @@ -170,7 +175,7 @@ def is_hangul(character: str) -> bool: def is_thai(character: str) -> bool: try: character_name = unicodedata.name(character) - except ValueError: + except ValueError: # Defensive: unicode database outdated? return False return "THAI" in character_name @@ -180,7 +185,7 @@ def is_thai(character: str) -> bool: def is_arabic(character: str) -> bool: try: character_name = unicodedata.name(character) - except ValueError: + except ValueError: # Defensive: unicode database outdated? return False return "ARABIC" in character_name @@ -190,12 +195,17 @@ def is_arabic(character: str) -> bool: def is_arabic_isolated_form(character: str) -> bool: try: character_name = unicodedata.name(character) - except ValueError: + except ValueError: # Defensive: unicode database outdated? return False return "ARABIC" in character_name and "ISOLATED FORM" in character_name +@lru_cache(maxsize=UTF8_MAXIMAL_ALLOCATION) +def is_cjk_uncommon(character: str) -> bool: + return character not in COMMON_CJK_CHARACTERS + + @lru_cache(maxsize=len(UNICODE_RANGES_COMBINED)) def is_unicode_range_secondary(range_name: str) -> bool: return any(keyword in range_name for keyword in UNICODE_SECONDARY_RANGE_KEYWORD) @@ -206,13 +216,13 @@ def is_unprintable(character: str) -> bool: return ( character.isspace() is False # includes \n \t \r \v and character.isprintable() is False - and character != "\x1A" # Why? Its the ASCII substitute character. + and character != "\x1a" # Why? Its the ASCII substitute character. and character != "\ufeff" # bug discovered in Python, # Zero Width No-Break Space located in Arabic Presentation Forms-B, Unicode 1.1 not acknowledged as space. ) -def any_specified_encoding(sequence: bytes, search_zone: int = 8192) -> Optional[str]: +def any_specified_encoding(sequence: bytes, search_zone: int = 8192) -> str | None: """ Extract using ASCII-only decoder any specified encoding in the first n-bytes. """ @@ -221,7 +231,7 @@ def any_specified_encoding(sequence: bytes, search_zone: int = 8192) -> Optional seq_len: int = len(sequence) - results: List[str] = findall( + results: list[str] = findall( RE_POSSIBLE_ENCODING_INDICATION, sequence[: min(seq_len, search_zone)].decode("ascii", errors="ignore"), ) @@ -260,18 +270,18 @@ def is_multi_byte_encoding(name: str) -> bool: "utf_32_be", "utf_7", } or issubclass( - importlib.import_module("encodings.{}".format(name)).IncrementalDecoder, + importlib.import_module(f"encodings.{name}").IncrementalDecoder, MultibyteIncrementalDecoder, ) -def identify_sig_or_bom(sequence: bytes) -> Tuple[Optional[str], bytes]: +def identify_sig_or_bom(sequence: bytes) -> tuple[str | None, bytes]: """ Identify and extract SIG/BOM in given sequence. """ for iana_encoding in ENCODING_MARKS: - marks: Union[bytes, List[bytes]] = ENCODING_MARKS[iana_encoding] + marks: bytes | list[bytes] = ENCODING_MARKS[iana_encoding] if isinstance(marks, bytes): marks = [marks] @@ -288,6 +298,7 @@ def should_strip_sig_or_bom(iana_encoding: str) -> bool: def iana_name(cp_name: str, strict: bool = True) -> str: + """Returns the Python normalized encoding name (Not the IANA official name).""" cp_name = cp_name.lower().replace("-", "_") encoding_alias: str @@ -298,35 +309,17 @@ def iana_name(cp_name: str, strict: bool = True) -> str: return encoding_iana if strict: - raise ValueError("Unable to retrieve IANA for '{}'".format(cp_name)) + raise ValueError(f"Unable to retrieve IANA for '{cp_name}'") return cp_name -def range_scan(decoded_sequence: str) -> List[str]: - ranges: Set[str] = set() - - for character in decoded_sequence: - character_range: Optional[str] = unicode_range(character) - - if character_range is None: - continue - - ranges.add(character_range) - - return list(ranges) - - def cp_similarity(iana_name_a: str, iana_name_b: str) -> float: if is_multi_byte_encoding(iana_name_a) or is_multi_byte_encoding(iana_name_b): return 0.0 - decoder_a = importlib.import_module( - "encodings.{}".format(iana_name_a) - ).IncrementalDecoder - decoder_b = importlib.import_module( - "encodings.{}".format(iana_name_b) - ).IncrementalDecoder + decoder_a = importlib.import_module(f"encodings.{iana_name_a}").IncrementalDecoder + decoder_b = importlib.import_module(f"encodings.{iana_name_b}").IncrementalDecoder id_a: IncrementalDecoder = decoder_a(errors="ignore") id_b: IncrementalDecoder = decoder_b(errors="ignore") @@ -374,7 +367,7 @@ def cut_sequence_chunks( strip_sig_or_bom: bool, sig_payload: bytes, is_multi_byte_decoder: bool, - decoded_payload: Optional[str] = None, + decoded_payload: str | None = None, ) -> Generator[str, None, None]: if decoded_payload and is_multi_byte_decoder is False: for i in offsets: diff --git a/lib/charset_normalizer/version.py b/lib/charset_normalizer/version.py index 699990ee..e5687e3c 100644 --- a/lib/charset_normalizer/version.py +++ b/lib/charset_normalizer/version.py @@ -2,5 +2,7 @@ Expose version """ -__version__ = "3.4.0" +from __future__ import annotations + +__version__ = "3.4.2" VERSION = __version__.split(".") diff --git a/lib/plexapi/audio.py b/lib/plexapi/audio.py index 05d38a9c..3bc6f514 100644 --- a/lib/plexapi/audio.py +++ b/lib/plexapi/audio.py @@ -8,7 +8,7 @@ from urllib.parse import quote_plus from typing import Any, Dict, List, Optional, TypeVar from plexapi import media, utils -from plexapi.base import Playable, PlexPartialObject, PlexHistory, PlexSession +from plexapi.base import Playable, PlexPartialObject, PlexHistory, PlexSession, cached_data_property from plexapi.exceptions import BadRequest from plexapi.mixins import ( AdvancedSettingsMixin, SplitMergeMixin, UnmatchMatchMixin, ExtrasMixin, HubsMixin, PlayedUnplayedMixin, RatingMixin, @@ -59,14 +59,11 @@ class Audio(PlexPartialObject, PlayedUnplayedMixin): def _loadData(self, data): """ Load attribute values from Plex XML response. """ - self._data = data self.addedAt = utils.toDatetime(data.attrib.get('addedAt')) self.art = data.attrib.get('art') self.artBlurHash = data.attrib.get('artBlurHash') self.distance = utils.cast(float, data.attrib.get('distance')) - self.fields = self.findItems(data, media.Field) self.guid = data.attrib.get('guid') - self.images = self.findItems(data, media.Image) self.index = utils.cast(int, data.attrib.get('index')) self.key = data.attrib.get('key', '') self.lastRatedAt = utils.toDatetime(data.attrib.get('lastRatedAt')) @@ -75,7 +72,6 @@ class Audio(PlexPartialObject, PlayedUnplayedMixin): self.librarySectionKey = data.attrib.get('librarySectionKey') self.librarySectionTitle = data.attrib.get('librarySectionTitle') self.listType = 'audio' - self.moods = self.findItems(data, media.Mood) self.musicAnalysisVersion = utils.cast(int, data.attrib.get('musicAnalysisVersion')) self.ratingKey = utils.cast(int, data.attrib.get('ratingKey')) self.summary = data.attrib.get('summary') @@ -88,6 +84,18 @@ class Audio(PlexPartialObject, PlayedUnplayedMixin): self.userRating = utils.cast(float, data.attrib.get('userRating')) self.viewCount = utils.cast(int, data.attrib.get('viewCount', 0)) + @cached_data_property + def fields(self): + return self.findItems(self._data, media.Field) + + @cached_data_property + def images(self): + return self.findItems(self._data, media.Image) + + @cached_data_property + def moods(self): + return self.findItems(self._data, media.Mood) + def url(self, part): """ Returns the full URL for the audio item. Typically used for getting a specific track. """ return self._server.url(part, includeToken=True) if part else None @@ -205,18 +213,45 @@ class Artist( Audio._loadData(self, data) self.albumSort = utils.cast(int, data.attrib.get('albumSort', '-1')) self.audienceRating = utils.cast(float, data.attrib.get('audienceRating')) - self.collections = self.findItems(data, media.Collection) - self.countries = self.findItems(data, media.Country) - self.genres = self.findItems(data, media.Genre) - self.guids = self.findItems(data, media.Guid) self.key = self.key.replace('/children', '') # FIX_BUG_50 - self.labels = self.findItems(data, media.Label) - self.locations = self.listAttrs(data, 'path', etag='Location') self.rating = utils.cast(float, data.attrib.get('rating')) - self.similar = self.findItems(data, media.Similar) - self.styles = self.findItems(data, media.Style) self.theme = data.attrib.get('theme') - self.ultraBlurColors = self.findItem(data, media.UltraBlurColors) + + @cached_data_property + def collections(self): + return self.findItems(self._data, media.Collection) + + @cached_data_property + def countries(self): + return self.findItems(self._data, media.Country) + + @cached_data_property + def genres(self): + return self.findItems(self._data, media.Genre) + + @cached_data_property + def guids(self): + return self.findItems(self._data, media.Guid) + + @cached_data_property + def labels(self): + return self.findItems(self._data, media.Label) + + @cached_data_property + def locations(self): + return self.listAttrs(self._data, 'path', etag='Location') + + @cached_data_property + def similar(self): + return self.findItems(self._data, media.Similar) + + @cached_data_property + def styles(self): + return self.findItems(self._data, media.Style) + + @cached_data_property + def ultraBlurColors(self): + return self.findItem(self._data, media.UltraBlurColors) def __iter__(self): for album in self.albums(): @@ -355,12 +390,7 @@ class Album( """ Load attribute values from Plex XML response. """ Audio._loadData(self, data) self.audienceRating = utils.cast(float, data.attrib.get('audienceRating')) - self.collections = self.findItems(data, media.Collection) - self.formats = self.findItems(data, media.Format) - self.genres = self.findItems(data, media.Genre) - self.guids = self.findItems(data, media.Guid) self.key = self.key.replace('/children', '') # FIX_BUG_50 - self.labels = self.findItems(data, media.Label) self.leafCount = utils.cast(int, data.attrib.get('leafCount')) self.loudnessAnalysisVersion = utils.cast(int, data.attrib.get('loudnessAnalysisVersion')) self.originallyAvailableAt = utils.toDatetime(data.attrib.get('originallyAvailableAt'), '%Y-%m-%d') @@ -372,12 +402,41 @@ class Album( self.parentTitle = data.attrib.get('parentTitle') self.rating = utils.cast(float, data.attrib.get('rating')) self.studio = data.attrib.get('studio') - self.styles = self.findItems(data, media.Style) - self.subformats = self.findItems(data, media.Subformat) - self.ultraBlurColors = self.findItem(data, media.UltraBlurColors) self.viewedLeafCount = utils.cast(int, data.attrib.get('viewedLeafCount')) self.year = utils.cast(int, data.attrib.get('year')) + @cached_data_property + def collections(self): + return self.findItems(self._data, media.Collection) + + @cached_data_property + def formats(self): + return self.findItems(self._data, media.Format) + + @cached_data_property + def genres(self): + return self.findItems(self._data, media.Genre) + + @cached_data_property + def guids(self): + return self.findItems(self._data, media.Guid) + + @cached_data_property + def labels(self): + return self.findItems(self._data, media.Label) + + @cached_data_property + def styles(self): + return self.findItems(self._data, media.Style) + + @cached_data_property + def subformats(self): + return self.findItems(self._data, media.Subformat) + + @cached_data_property + def ultraBlurColors(self): + return self.findItem(self._data, media.UltraBlurColors) + def __iter__(self): for track in self.tracks(): yield track @@ -495,11 +554,8 @@ class Track( Audio._loadData(self, data) Playable._loadData(self, data) self.audienceRating = utils.cast(float, data.attrib.get('audienceRating')) - self.chapters = self.findItems(data, media.Chapter) self.chapterSource = data.attrib.get('chapterSource') - self.collections = self.findItems(data, media.Collection) self.duration = utils.cast(int, data.attrib.get('duration')) - self.genres = self.findItems(data, media.Genre) self.grandparentArt = data.attrib.get('grandparentArt') self.grandparentGuid = data.attrib.get('grandparentGuid') self.grandparentKey = data.attrib.get('grandparentKey') @@ -507,9 +563,6 @@ class Track( self.grandparentTheme = data.attrib.get('grandparentTheme') self.grandparentThumb = data.attrib.get('grandparentThumb') self.grandparentTitle = data.attrib.get('grandparentTitle') - self.guids = self.findItems(data, media.Guid) - self.labels = self.findItems(data, media.Label) - self.media = self.findItems(data, media.Media) self.originalTitle = data.attrib.get('originalTitle') self.parentGuid = data.attrib.get('parentGuid') self.parentIndex = utils.cast(int, data.attrib.get('parentIndex')) @@ -525,6 +578,30 @@ class Track( self.viewOffset = utils.cast(int, data.attrib.get('viewOffset', 0)) self.year = utils.cast(int, data.attrib.get('year')) + @cached_data_property + def chapters(self): + return self.findItems(self._data, media.Chapter) + + @cached_data_property + def collections(self): + return self.findItems(self._data, media.Collection) + + @cached_data_property + def genres(self): + return self.findItems(self._data, media.Genre) + + @cached_data_property + def guids(self): + return self.findItems(self._data, media.Guid) + + @cached_data_property + def labels(self): + return self.findItems(self._data, media.Label) + + @cached_data_property + def media(self): + return self.findItems(self._data, media.Media) + @property def locations(self): """ This does not exist in plex xml response but is added to have a common diff --git a/lib/plexapi/base.py b/lib/plexapi/base.py index 675ac5d9..fc519bb1 100644 --- a/lib/plexapi/base.py +++ b/lib/plexapi/base.py @@ -39,7 +39,42 @@ OPERATORS = { } -class PlexObject: +class cached_data_property(cached_property): + """Caching for PlexObject data properties. + + This decorator creates properties that cache their values with + automatic invalidation on data changes. + """ + + def __set_name__(self, owner, name): + """Register the annotated property in the parent class's _cached_data_properties set.""" + super().__set_name__(owner, name) + if not hasattr(owner, '_cached_data_properties'): + owner._cached_data_properties = set() + owner._cached_data_properties.add(name) + + +class PlexObjectMeta(type): + """Metaclass for PlexObject to handle cached_data_properties.""" + def __new__(mcs, name, bases, attrs): + cached_data_props = set() + + # Merge all _cached_data_properties from parent classes + for base in bases: + if hasattr(base, '_cached_data_properties'): + cached_data_props.update(base._cached_data_properties) + + # Find all properties annotated with cached_data_property in the current class + for attr_name, attr_value in attrs.items(): + if isinstance(attr_value, cached_data_property): + cached_data_props.add(attr_name) + + attrs['_cached_data_properties'] = cached_data_props + + return super().__new__(mcs, name, bases, attrs) + + +class PlexObject(metaclass=PlexObjectMeta): """ Base class for all Plex objects. Parameters: @@ -387,7 +422,7 @@ class PlexObject: return results def reload(self, key=None, **kwargs): - """ Reload the data for this object from self.key. + """ Reload the data for this object. Parameters: key (string, optional): Override the key to reload. @@ -435,7 +470,7 @@ class PlexObject: self._initpath = key data = self._server.query(key) self._overwriteNone = _overwriteNone - self._loadData(data[0]) + self._invalidateCacheAndLoadData(data[0]) self._overwriteNone = True return self @@ -497,9 +532,35 @@ class PlexObject: return float(value) return value + def _invalidateCacheAndLoadData(self, data): + """Load attribute values from Plex XML response and invalidate cached properties.""" + old_data_id = id(getattr(self, '_data', None)) + self._data = data + + # If the data's object ID has changed, invalidate cached properties + if id(data) != old_data_id: + self._invalidateCachedProperties() + + self._loadData(data) + + def _invalidateCachedProperties(self): + """Invalidate all cached data property values.""" + cached_props = getattr(self.__class__, '_cached_data_properties', set()) + + for prop_name in cached_props: + if prop_name in self.__dict__: + del self.__dict__[prop_name] + def _loadData(self, data): + """ Load attribute values from Plex XML response. """ raise NotImplementedError('Abstract method not implemented.') + def _findAndLoadElem(self, data, **kwargs): + """ Find and load the first element in the data that matches the specified attributes. """ + for elem in data: + if self._checkAttrs(elem, **kwargs): + self._invalidateCacheAndLoadData(elem) + @property def _searchType(self): return self.TYPE @@ -754,7 +815,7 @@ class PlexPartialObject(PlexObject): class Playable: - """ This is a general place to store functions specific to media that is Playable. + """ This is a mixin to store functions specific to media that is Playable. Things were getting mixed up a bit when dealing with Shows, Season, Artists, Albums which are all not playable. @@ -764,6 +825,7 @@ class Playable: """ def _loadData(self, data): + """ Load attribute values from Plex XML response. """ self.playlistItemID = utils.cast(int, data.attrib.get('playlistItemID')) # playlist self.playQueueItemID = utils.cast(int, data.attrib.get('playQueueItemID')) # playqueue @@ -931,8 +993,8 @@ class Playable: return self -class PlexSession(object): - """ This is a general place to store functions specific to media that is a Plex Session. +class PlexSession: + """ This is a mixin to store functions specific to media that is a Plex Session. Attributes: live (bool): True if this is a live tv session. @@ -945,23 +1007,44 @@ class PlexSession(object): """ def _loadData(self, data): + """ Load attribute values from Plex XML response. """ self.live = utils.cast(bool, data.attrib.get('live', '0')) - self.player = self.findItem(data, etag='Player') - self.session = self.findItem(data, etag='Session') self.sessionKey = utils.cast(int, data.attrib.get('sessionKey')) - self.transcodeSession = self.findItem(data, etag='TranscodeSession') user = data.find('User') self._username = user.attrib.get('title') self._userId = utils.cast(int, user.attrib.get('id')) # For backwards compatibility - self.players = [self.player] if self.player else [] - self.sessions = [self.session] if self.session else [] - self.transcodeSessions = [self.transcodeSession] if self.transcodeSession else [] self.usernames = [self._username] if self._username else [] + # `players`, `sessions`, and `transcodeSessions` are returned with properties + # to support lazy loading. See PR #1510 - @cached_property + @cached_data_property + def player(self): + return self.findItem(self._data, etag='Player') + + @cached_data_property + def session(self): + return self.findItem(self._data, etag='Session') + + @cached_data_property + def transcodeSession(self): + return self.findItem(self._data, etag='TranscodeSession') + + @property + def players(self): + return [self.player] if self.player else [] + + @property + def sessions(self): + return [self.session] if self.session else [] + + @property + def transcodeSessions(self): + return [self.transcodeSession] if self.transcodeSession else [] + + @cached_data_property def user(self): """ Returns the :class:`~plexapi.myplex.MyPlexAccount` object (for admin) or :class:`~plexapi.myplex.MyPlexUser` object (for users) for this session. @@ -978,18 +1061,11 @@ class PlexSession(object): """ return self._reload() - def _reload(self, _autoReload=False, **kwargs): - """ Perform the actual reload. """ - # Do not auto reload sessions - if _autoReload: - return self - + def _reload(self, **kwargs): + """ Reload the data for the session. """ key = self._initpath data = self._server.query(key) - for elem in data: - if elem.attrib.get('sessionKey') == str(self.sessionKey): - self._loadData(elem) - break + self._findAndLoadElem(data, sessionKey=str(self.sessionKey)) return self def source(self): @@ -1010,8 +1086,8 @@ class PlexSession(object): return self._server.query(key, params=params) -class PlexHistory(object): - """ This is a general place to store functions specific to media that is a Plex history item. +class PlexHistory: + """ This is a mixin to store functions specific to media that is a Plex history item. Attributes: accountID (int): The associated :class:`~plexapi.server.SystemAccount` ID. @@ -1021,6 +1097,7 @@ class PlexHistory(object): """ def _loadData(self, data): + """ Load attribute values from Plex XML response. """ self.accountID = utils.cast(int, data.attrib.get('accountID')) self.deviceID = utils.cast(int, data.attrib.get('deviceID')) self.historyKey = data.attrib.get('historyKey') @@ -1124,7 +1201,7 @@ class MediaContainer( setattr(self, key, getattr(__iterable, key)) def _loadData(self, data): - self._data = data + """ Load attribute values from Plex XML response. """ self.allowSync = utils.cast(int, data.attrib.get('allowSync')) self.augmentationKey = data.attrib.get('augmentationKey') self.identifier = data.attrib.get('identifier') diff --git a/lib/plexapi/client.py b/lib/plexapi/client.py index 3d89e3dc..9a904b40 100644 --- a/lib/plexapi/client.py +++ b/lib/plexapi/client.py @@ -115,7 +115,7 @@ class PlexClient(PlexObject): ) else: client = data[0] - self._loadData(client) + self._invalidateCacheAndLoadData(client) return self def reload(self): @@ -124,7 +124,6 @@ class PlexClient(PlexObject): def _loadData(self, data): """ Load attribute values from Plex XML response. """ - self._data = data self.deviceClass = data.attrib.get('deviceClass') self.machineIdentifier = data.attrib.get('machineIdentifier') self.product = data.attrib.get('product') @@ -197,8 +196,7 @@ class PlexClient(PlexObject): raise NotFound(message) else: raise BadRequest(message) - data = utils.cleanXMLString(response.text).encode('utf8') - return ElementTree.fromstring(data) if data.strip() else None + return utils.parseXMLString(response.text) def sendCommand(self, command, proxy=None, **params): """ Convenience wrapper around :func:`~plexapi.client.PlexClient.query` to more easily @@ -222,7 +220,7 @@ class PlexClient(PlexObject): proxy = self._proxyThroughServer if proxy is None else proxy query = self._server.query if proxy else self.query - # Workaround for ptp. See https://github.com/pkkid/python-plexapi/issues/244 + # Workaround for ptp. See https://github.com/pushingkarmaorg/python-plexapi/issues/244 t = time.time() if command == 'timeline/poll': self._last_call = t @@ -606,7 +604,7 @@ class ClientTimeline(PlexObject): key = 'timeline/poll' def _loadData(self, data): - self._data = data + """ Load attribute values from Plex XML response. """ self.address = data.attrib.get('address') self.audioStreamId = utils.cast(int, data.attrib.get('audioStreamId')) self.autoPlay = utils.cast(bool, data.attrib.get('autoPlay')) diff --git a/lib/plexapi/collection.py b/lib/plexapi/collection.py index 63ea8373..308604a0 100644 --- a/lib/plexapi/collection.py +++ b/lib/plexapi/collection.py @@ -3,7 +3,7 @@ from pathlib import Path from urllib.parse import quote_plus from plexapi import media, utils -from plexapi.base import PlexPartialObject +from plexapi.base import PlexPartialObject, cached_data_property from plexapi.exceptions import BadRequest, NotFound, Unsupported from plexapi.library import LibrarySection, ManagedHub from plexapi.mixins import ( @@ -69,7 +69,7 @@ class Collection( TYPE = 'collection' def _loadData(self, data): - self._data = data + """ Load attribute values from Plex XML response. """ self.addedAt = utils.toDatetime(data.attrib.get('addedAt')) self.art = data.attrib.get('art') self.artBlurHash = data.attrib.get('artBlurHash') @@ -81,12 +81,9 @@ class Collection( self.collectionSort = utils.cast(int, data.attrib.get('collectionSort', '0')) self.content = data.attrib.get('content') self.contentRating = data.attrib.get('contentRating') - self.fields = self.findItems(data, media.Field) self.guid = data.attrib.get('guid') - self.images = self.findItems(data, media.Image) self.index = utils.cast(int, data.attrib.get('index')) self.key = data.attrib.get('key', '').replace('/children', '') # FIX_BUG_50 - self.labels = self.findItems(data, media.Label) self.lastRatedAt = utils.toDatetime(data.attrib.get('lastRatedAt')) self.librarySectionID = utils.cast(int, data.attrib.get('librarySectionID')) self.librarySectionKey = data.attrib.get('librarySectionKey') @@ -105,12 +102,24 @@ class Collection( self.title = data.attrib.get('title') self.titleSort = data.attrib.get('titleSort', self.title) self.type = data.attrib.get('type') - self.ultraBlurColors = self.findItem(data, media.UltraBlurColors) self.updatedAt = utils.toDatetime(data.attrib.get('updatedAt')) self.userRating = utils.cast(float, data.attrib.get('userRating')) - self._items = None # cache for self.items - self._section = None # cache for self.section - self._filters = None # cache for self.filters + + @cached_data_property + def fields(self): + return self.findItems(self._data, media.Field) + + @cached_data_property + def images(self): + return self.findItems(self._data, media.Image) + + @cached_data_property + def labels(self): + return self.findItems(self._data, media.Label) + + @cached_data_property + def ultraBlurColors(self): + return self.findItem(self._data, media.UltraBlurColors) def __len__(self): # pragma: no cover return len(self.items()) @@ -162,20 +171,26 @@ class Collection( def children(self): return self.items() + @cached_data_property + def _filters(self): + """ Cache for filters. """ + return self._parseFilters(self.content) + def filters(self): """ Returns the search filter dict for smart collection. The filter dict be passed back into :func:`~plexapi.library.LibrarySection.search` to get the list of items. """ - if self.smart and self._filters is None: - self._filters = self._parseFilters(self.content) return self._filters + @cached_data_property + def _section(self): + """ Cache for section. """ + return super(Collection, self).section() + def section(self): """ Returns the :class:`~plexapi.library.LibrarySection` this collection belongs to. """ - if self._section is None: - self._section = super(Collection, self).section() return self._section def item(self, title): @@ -192,12 +207,14 @@ class Collection( return item raise NotFound(f'Item with title "{title}" not found in the collection') + @cached_data_property + def _items(self): + """ Cache for the items. """ + key = f'{self.key}/children' + return self.fetchItems(key) + def items(self): """ Returns a list of all items in the collection. """ - if self._items is None: - key = f'{self.key}/children' - items = self.fetchItems(key) - self._items = items return self._items def visibility(self): diff --git a/lib/plexapi/const.py b/lib/plexapi/const.py index bc3e81aa..5b88be9c 100644 --- a/lib/plexapi/const.py +++ b/lib/plexapi/const.py @@ -3,7 +3,7 @@ # Library version MAJOR_VERSION = 4 -MINOR_VERSION = 16 -PATCH_VERSION = 1 +MINOR_VERSION = 17 +PATCH_VERSION = 0 __short_version__ = f"{MAJOR_VERSION}.{MINOR_VERSION}" __version__ = f"{__short_version__}.{PATCH_VERSION}" diff --git a/lib/plexapi/library.py b/lib/plexapi/library.py index 93801a1d..5d54b6a8 100644 --- a/lib/plexapi/library.py +++ b/lib/plexapi/library.py @@ -6,11 +6,10 @@ from typing import Any, TYPE_CHECKING import warnings from collections import defaultdict from datetime import datetime -from functools import cached_property from urllib.parse import parse_qs, quote_plus, urlencode, urlparse from plexapi import log, media, utils -from plexapi.base import OPERATORS, PlexObject +from plexapi.base import OPERATORS, PlexObject, cached_data_property from plexapi.exceptions import BadRequest, NotFound from plexapi.mixins import ( MovieEditMixins, ShowEditMixins, SeasonEditMixins, EpisodeEditMixins, @@ -39,14 +38,13 @@ class Library(PlexObject): key = '/library' def _loadData(self, data): - self._data = data + """ Load attribute values from Plex XML response. """ self.identifier = data.attrib.get('identifier') self.mediaTagVersion = data.attrib.get('mediaTagVersion') self.title1 = data.attrib.get('title1') self.title2 = data.attrib.get('title2') - self._sectionsByID = {} # cached sections by key - self._sectionsByTitle = {} # cached sections by title + @cached_data_property def _loadSections(self): """ Loads and caches all the library sections. """ key = '/library/sections' @@ -64,15 +62,23 @@ class Library(PlexObject): sectionsByID[section.key] = section sectionsByTitle[section.title.lower().strip()].append(section) - self._sectionsByID = sectionsByID - self._sectionsByTitle = dict(sectionsByTitle) + return sectionsByID, dict(sectionsByTitle) + + @property + def _sectionsByID(self): + """ Returns a dictionary of all library sections by ID. """ + return self._loadSections[0] + + @property + def _sectionsByTitle(self): + """ Returns a dictionary of all library sections by title. """ + return self._loadSections[1] def sections(self): """ Returns a list of all media sections in this library. Library sections may be any of :class:`~plexapi.library.MovieSection`, :class:`~plexapi.library.ShowSection`, :class:`~plexapi.library.MusicSection`, :class:`~plexapi.library.PhotoSection`. """ - self._loadSections() return list(self._sectionsByID.values()) def section(self, title): @@ -87,8 +93,6 @@ class Library(PlexObject): :exc:`~plexapi.exceptions.NotFound`: The library section title is not found on the server. """ normalized_title = title.lower().strip() - if not self._sectionsByTitle or normalized_title not in self._sectionsByTitle: - self._loadSections() try: sections = self._sectionsByTitle[normalized_title] except KeyError: @@ -110,8 +114,6 @@ class Library(PlexObject): Raises: :exc:`~plexapi.exceptions.NotFound`: The library section ID is not found on the server. """ - if not self._sectionsByID or sectionID not in self._sectionsByID: - self._loadSections() try: return self._sectionsByID[sectionID] except KeyError: @@ -385,7 +387,9 @@ class Library(PlexObject): if kwargs: prefs_params = {f'prefs[{k}]': v for k, v in kwargs.items()} part += f'&{urlencode(prefs_params)}' - return self._server.query(part, method=self._server._session.post) + data = self._server.query(part, method=self._server._session.post) + self._invalidateCachedProperties() + return data def history(self, maxresults=None, mindate=None): """ Get Play History for all library Sections for the owner. @@ -432,7 +436,7 @@ class LibrarySection(PlexObject): """ def _loadData(self, data): - self._data = data + """ Load attribute values from Plex XML response. """ self.agent = data.attrib.get('agent') self.allowSync = utils.cast(bool, data.attrib.get('allowSync')) self.art = data.attrib.get('art') @@ -441,7 +445,6 @@ class LibrarySection(PlexObject): self.filters = utils.cast(bool, data.attrib.get('filters')) self.key = utils.cast(int, data.attrib.get('key')) self.language = data.attrib.get('language') - self.locations = self.listAttrs(data, 'path', etag='Location') self.refreshing = utils.cast(bool, data.attrib.get('refreshing')) self.scanner = data.attrib.get('scanner') self.thumb = data.attrib.get('thumb') @@ -449,14 +452,12 @@ class LibrarySection(PlexObject): self.type = data.attrib.get('type') self.updatedAt = utils.toDatetime(data.attrib.get('updatedAt')) self.uuid = data.attrib.get('uuid') - # Private attrs as we don't want a reload. - self._filterTypes = None - self._fieldTypes = None - self._totalViewSize = None - self._totalDuration = None - self._totalStorage = None - @cached_property + @cached_data_property + def locations(self): + return self.listAttrs(self._data, 'path', etag='Location') + + @cached_data_property def totalSize(self): """ Returns the total number of items in the library for the default library type. """ return self.totalViewSize(includeCollections=False) @@ -464,16 +465,12 @@ class LibrarySection(PlexObject): @property def totalDuration(self): """ Returns the total duration (in milliseconds) of items in the library. """ - if self._totalDuration is None: - self._getTotalDurationStorage() - return self._totalDuration + return self._getTotalDurationStorage[0] @property def totalStorage(self): """ Returns the total storage (in bytes) of items in the library. """ - if self._totalStorage is None: - self._getTotalDurationStorage() - return self._totalStorage + return self._getTotalDurationStorage[1] def __getattribute__(self, attr): # Intercept to call EditFieldMixin and EditTagMixin methods @@ -489,6 +486,7 @@ class LibrarySection(PlexObject): ) return value + @cached_data_property def _getTotalDurationStorage(self): """ Queries the Plex server for the total library duration and storage and caches the values. """ data = self._server.query('/media/providers?includeStorage=1') @@ -499,8 +497,10 @@ class LibrarySection(PlexObject): ) directory = next(iter(data.findall(xpath)), None) if directory: - self._totalDuration = utils.cast(int, directory.attrib.get('durationTotal')) - self._totalStorage = utils.cast(int, directory.attrib.get('storageTotal')) + totalDuration = utils.cast(int, directory.attrib.get('durationTotal')) + totalStorage = utils.cast(int, directory.attrib.get('storageTotal')) + return totalDuration, totalStorage + return None, None def totalViewSize(self, libtype=None, includeCollections=True): """ Returns the total number of items in the library for a specified libtype. @@ -531,18 +531,20 @@ class LibrarySection(PlexObject): def delete(self): """ Delete a library section. """ try: - return self._server.query(f'/library/sections/{self.key}', method=self._server._session.delete) + data = self._server.query(f'/library/sections/{self.key}', method=self._server._session.delete) + self._server.library._invalidateCachedProperties() + return data except BadRequest: # pragma: no cover msg = f'Failed to delete library {self.key}' msg += 'You may need to allow this permission in your Plex settings.' log.error(msg) raise - def reload(self): + def _reload(self, **kwargs): """ Reload the data for the library section. """ - self._server.library._loadSections() - newLibrary = self._server.library.sectionByID(self.key) - self.__dict__.update(newLibrary.__dict__) + key = self._initpath + data = self._server.query(key) + self._findAndLoadElem(data, key=str(self.key)) return self def edit(self, agent=None, **kwargs): @@ -871,6 +873,7 @@ class LibrarySection(PlexObject): self._server.query(key, method=self._server._session.delete) return self + @cached_data_property def _loadFilters(self): """ Retrieves and caches the list of :class:`~plexapi.library.FilteringType` and list of :class:`~plexapi.library.FilteringFieldType` for this library section. @@ -880,23 +883,23 @@ class LibrarySection(PlexObject): key = _key.format(key=self.key, filter='all') data = self._server.query(key) - self._filterTypes = self.findItems(data, FilteringType, rtag='Meta') - self._fieldTypes = self.findItems(data, FilteringFieldType, rtag='Meta') + filterTypes = self.findItems(data, FilteringType, rtag='Meta') + fieldTypes = self.findItems(data, FilteringFieldType, rtag='Meta') if self.TYPE != 'photo': # No collections for photo library key = _key.format(key=self.key, filter='collections') data = self._server.query(key) - self._filterTypes.extend(self.findItems(data, FilteringType, rtag='Meta')) + filterTypes.extend(self.findItems(data, FilteringType, rtag='Meta')) # Manually add guid field type, only allowing "is" operator guidFieldType = '' - self._fieldTypes.append(self._manuallyLoadXML(guidFieldType, FilteringFieldType)) + fieldTypes.append(self._manuallyLoadXML(guidFieldType, FilteringFieldType)) + + return filterTypes, fieldTypes def filterTypes(self): """ Returns a list of available :class:`~plexapi.library.FilteringType` for this library section. """ - if self._filterTypes is None: - self._loadFilters() - return self._filterTypes + return self._loadFilters[0] def getFilterType(self, libtype=None): """ Returns a :class:`~plexapi.library.FilteringType` for a specified libtype. @@ -918,9 +921,7 @@ class LibrarySection(PlexObject): def fieldTypes(self): """ Returns a list of available :class:`~plexapi.library.FilteringFieldType` for this library section. """ - if self._fieldTypes is None: - self._loadFilters() - return self._fieldTypes + return self._loadFilters[1] def getFieldType(self, fieldType): """ Returns a :class:`~plexapi.library.FilteringFieldType` for a specified fieldType. @@ -1969,7 +1970,7 @@ class MusicSection(LibrarySection, ArtistEditMixins, AlbumEditMixins, TrackEditM def stations(self): """ Returns a list of :class:`~plexapi.playlist.Playlist` stations in this section. """ - return next((hub.items for hub in self.hubs() if hub.context == 'hub.music.stations'), None) + return next((hub._partialItems for hub in self.hubs() if hub.context == 'hub.music.stations'), None) def searchArtists(self, **kwargs): """ Search for an artist. See :func:`~plexapi.library.LibrarySection.search` for usage. """ @@ -2165,7 +2166,6 @@ class LibraryTimeline(PlexObject): def _loadData(self, data): """ Load attribute values from Plex XML response. """ - self._data = data self.size = utils.cast(int, data.attrib.get('size')) self.allowSync = utils.cast(bool, data.attrib.get('allowSync')) self.art = data.attrib.get('art') @@ -2194,7 +2194,6 @@ class Location(PlexObject): def _loadData(self, data): """ Load attribute values from Plex XML response. """ - self._data = data self.id = utils.cast(int, data.attrib.get('id')) self.path = data.attrib.get('path') @@ -2208,9 +2207,10 @@ class Hub(PlexObject): context (str): The context of the hub. hubKey (str): API URL for these specific hub items. hubIdentifier (str): The identifier of the hub. - items (list): List of items in the hub. + items (list): List of items in the hub (automatically loads all items if more is True). key (str): API URL for the hub. - more (bool): True if there are more items to load (call reload() to fetch all items). + random (bool): True if the items in the hub are randomized. + more (bool): True if there are more items to load (call items to fetch all items). size (int): The number of items in the hub. style (str): The style of the hub. title (str): The title of the hub. @@ -2220,36 +2220,57 @@ class Hub(PlexObject): def _loadData(self, data): """ Load attribute values from Plex XML response. """ - self._data = data self.context = data.attrib.get('context') self.hubKey = data.attrib.get('hubKey') self.hubIdentifier = data.attrib.get('hubIdentifier') - self.items = self.findItems(data) self.key = data.attrib.get('key') self.more = utils.cast(bool, data.attrib.get('more')) + self.random = utils.cast(bool, data.attrib.get('random', '0')) self.size = utils.cast(int, data.attrib.get('size')) self.style = data.attrib.get('style') self.title = data.attrib.get('title') self.type = data.attrib.get('type') - self._section = None # cache for self.section def __len__(self): return self.size - def reload(self): - """ Reloads the hub to fetch all items in the hub. """ - if self.more and self.key: - self.items = self.fetchItems(self.key) + @cached_data_property + def _partialItems(self): + """ Cache for partial items. """ + return self.findItems(self._data) + + @cached_data_property + def _items(self): + """ Cache for items. """ + if self.more and self.key: # If there are more items to load, fetch them + items = self.fetchItems(self.key) self.more = False - self.size = len(self.items) + self.size = len(items) + return items + # Otherwise, all the data is in the initial _data XML response + return self._partialItems + + def items(self): + """ Returns a list of all items in the hub. """ + return self._items + + @cached_data_property + def _section(self): + """ Cache for section. """ + return self._server.library.sectionByID(self.librarySectionID) def section(self): """ Returns the :class:`~plexapi.library.LibrarySection` this hub belongs to. """ - if self._section is None: - self._section = self._server.library.sectionByID(self.librarySectionID) return self._section + def _reload(self, **kwargs): + """ Reload the data for the hub. """ + key = self._initpath + data = self._server.query(key) + self._findAndLoadElem(data, hubIdentifier=self.hubIdentifier) + return self + class LibraryMediaTag(PlexObject): """ Base class of library media tags. @@ -2279,7 +2300,6 @@ class LibraryMediaTag(PlexObject): def _loadData(self, data): """ Load attribute values from Plex XML response. """ - self._data = data self.count = utils.cast(int, data.attrib.get('count')) self.filter = data.attrib.get('filter') self.id = utils.cast(int, data.attrib.get('id')) @@ -2668,22 +2688,25 @@ class FilteringType(PlexObject): return f"<{':'.join([p for p in [self.__class__.__name__, _type] if p])}>" def _loadData(self, data): - self._data = data + """ Load attribute values from Plex XML response. """ self.active = utils.cast(bool, data.attrib.get('active', '0')) - self.fields = self.findItems(data, FilteringField) - self.filters = self.findItems(data, FilteringFilter) self.key = data.attrib.get('key') - self.sorts = self.findItems(data, FilteringSort) self.title = data.attrib.get('title') self.type = data.attrib.get('type') self._librarySectionID = self._parent().key - # Add additional manual filters, sorts, and fields which are available - # but not exposed on the Plex server - self.filters += self._manualFilters() - self.sorts += self._manualSorts() - self.fields += self._manualFields() + @cached_data_property + def fields(self): + return self.findItems(self._data, FilteringField) + self._manualFields() + + @cached_data_property + def filters(self): + return self.findItems(self._data, FilteringFilter) + self._manualFilters() + + @cached_data_property + def sorts(self): + return self.findItems(self._data, FilteringSort) + self._manualSorts() def _manualFilters(self): """ Manually add additional filters which are available @@ -2863,7 +2886,7 @@ class FilteringFilter(PlexObject): TAG = 'Filter' def _loadData(self, data): - self._data = data + """ Load attribute values from Plex XML response. """ self.filter = data.attrib.get('filter') self.filterType = data.attrib.get('filterType') self.key = data.attrib.get('key') @@ -2889,7 +2912,6 @@ class FilteringSort(PlexObject): def _loadData(self, data): """ Load attribute values from Plex XML response. """ - self._data = data self.active = utils.cast(bool, data.attrib.get('active', '0')) self.activeDirection = data.attrib.get('activeDirection') self.default = data.attrib.get('default') @@ -2914,7 +2936,6 @@ class FilteringField(PlexObject): def _loadData(self, data): """ Load attribute values from Plex XML response. """ - self._data = data self.key = data.attrib.get('key') self.title = data.attrib.get('title') self.type = data.attrib.get('type') @@ -2937,9 +2958,11 @@ class FilteringFieldType(PlexObject): def _loadData(self, data): """ Load attribute values from Plex XML response. """ - self._data = data self.type = data.attrib.get('type') - self.operators = self.findItems(data, FilteringOperator) + + @cached_data_property + def operators(self): + return self.findItems(self._data, FilteringOperator) class FilteringOperator(PlexObject): @@ -2976,7 +2999,6 @@ class FilterChoice(PlexObject): def _loadData(self, data): """ Load attribute values from Plex XML response. """ - self._data = data self.fastKey = data.attrib.get('fastKey') self.key = data.attrib.get('key') self.thumb = data.attrib.get('thumb') @@ -3006,7 +3028,6 @@ class ManagedHub(PlexObject): def _loadData(self, data): """ Load attribute values from Plex XML response. """ - self._data = data self.deletable = utils.cast(bool, data.attrib.get('deletable', True)) self.homeVisibility = data.attrib.get('homeVisibility', 'none') self.identifier = data.attrib.get('identifier') @@ -3020,11 +3041,11 @@ class ManagedHub(PlexObject): parent = self._parent() self.librarySectionID = parent.key if isinstance(parent, LibrarySection) else parent.librarySectionID - def reload(self): + def _reload(self, **kwargs): """ Reload the data for this managed hub. """ key = f'/hubs/sections/{self.librarySectionID}/manage' - hub = self.fetchItem(key, self.__class__, identifier=self.identifier) - self.__dict__.update(hub.__dict__) + data = self._server.query(key) + self._findAndLoadElem(data, identifier=self.identifier) return self def move(self, after=None): @@ -3170,7 +3191,6 @@ class FirstCharacter(PlexObject): def _loadData(self, data): """ Load attribute values from Plex XML response. """ - self._data = data self.key = data.attrib.get('key') self.size = data.attrib.get('size') self.title = data.attrib.get('title') @@ -3191,6 +3211,7 @@ class Path(PlexObject): TAG = 'Path' def _loadData(self, data): + """ Load attribute values from Plex XML response. """ self.home = utils.cast(bool, data.attrib.get('home')) self.key = data.attrib.get('key') self.network = utils.cast(bool, data.attrib.get('network')) @@ -3220,6 +3241,7 @@ class File(PlexObject): TAG = 'File' def _loadData(self, data): + """ Load attribute values from Plex XML response. """ self.key = data.attrib.get('key') self.path = data.attrib.get('path') self.title = data.attrib.get('title') @@ -3268,41 +3290,83 @@ class Common(PlexObject): TAG = 'Common' def _loadData(self, data): - self._data = data - self.collections = self.findItems(data, media.Collection) + """ Load attribute values from Plex XML response. """ self.contentRating = data.attrib.get('contentRating') - self.countries = self.findItems(data, media.Country) - self.directors = self.findItems(data, media.Director) self.editionTitle = data.attrib.get('editionTitle') - self.fields = self.findItems(data, media.Field) - self.genres = self.findItems(data, media.Genre) self.grandparentRatingKey = utils.cast(int, data.attrib.get('grandparentRatingKey')) self.grandparentTitle = data.attrib.get('grandparentTitle') self.guid = data.attrib.get('guid') - self.guids = self.findItems(data, media.Guid) self.index = utils.cast(int, data.attrib.get('index')) self.key = data.attrib.get('key') - self.labels = self.findItems(data, media.Label) self.mixedFields = data.attrib.get('mixedFields').split(',') - self.moods = self.findItems(data, media.Mood) self.originallyAvailableAt = utils.toDatetime(data.attrib.get('originallyAvailableAt')) self.parentRatingKey = utils.cast(int, data.attrib.get('parentRatingKey')) self.parentTitle = data.attrib.get('parentTitle') - self.producers = self.findItems(data, media.Producer) self.ratingKey = utils.cast(int, data.attrib.get('ratingKey')) - self.ratings = self.findItems(data, media.Rating) - self.roles = self.findItems(data, media.Role) self.studio = data.attrib.get('studio') - self.styles = self.findItems(data, media.Style) self.summary = data.attrib.get('summary') self.tagline = data.attrib.get('tagline') - self.tags = self.findItems(data, media.Tag) self.title = data.attrib.get('title') self.titleSort = data.attrib.get('titleSort') self.type = data.attrib.get('type') - self.writers = self.findItems(data, media.Writer) self.year = utils.cast(int, data.attrib.get('year')) + @cached_data_property + def collections(self): + return self.findItems(self._data, media.Collection) + + @cached_data_property + def countries(self): + return self.findItems(self._data, media.Country) + + @cached_data_property + def directors(self): + return self.findItems(self._data, media.Director) + + @cached_data_property + def fields(self): + return self.findItems(self._data, media.Field) + + @cached_data_property + def genres(self): + return self.findItems(self._data, media.Genre) + + @cached_data_property + def guids(self): + return self.findItems(self._data, media.Guid) + + @cached_data_property + def labels(self): + return self.findItems(self._data, media.Label) + + @cached_data_property + def moods(self): + return self.findItems(self._data, media.Mood) + + @cached_data_property + def producers(self): + return self.findItems(self._data, media.Producer) + + @cached_data_property + def ratings(self): + return self.findItems(self._data, media.Rating) + + @cached_data_property + def roles(self): + return self.findItems(self._data, media.Role) + + @cached_data_property + def styles(self): + return self.findItems(self._data, media.Style) + + @cached_data_property + def tags(self): + return self.findItems(self._data, media.Tag) + + @cached_data_property + def writers(self): + return self.findItems(self._data, media.Writer) + def __repr__(self): return '<%s:%s:%s>' % ( self.__class__.__name__, diff --git a/lib/plexapi/media.py b/lib/plexapi/media.py index 9c6e3115..c1530959 100644 --- a/lib/plexapi/media.py +++ b/lib/plexapi/media.py @@ -1,10 +1,10 @@ # -*- coding: utf-8 -*- -import xml from pathlib import Path from urllib.parse import quote_plus +from xml.etree import ElementTree from plexapi import log, settings, utils -from plexapi.base import PlexObject +from plexapi.base import PlexObject, cached_data_property from plexapi.exceptions import BadRequest from plexapi.utils import deprecated @@ -51,7 +51,6 @@ class Media(PlexObject): def _loadData(self, data): """ Load attribute values from Plex XML response. """ - self._data = data self.aspectRatio = utils.cast(float, data.attrib.get('aspectRatio')) self.audioChannels = utils.cast(int, data.attrib.get('audioChannels')) self.audioCodec = data.attrib.get('audioCodec') @@ -64,7 +63,6 @@ class Media(PlexObject): self.has64bitOffsets = utils.cast(bool, data.attrib.get('has64bitOffsets')) self.hasVoiceActivity = utils.cast(bool, data.attrib.get('hasVoiceActivity', '0')) self.optimizedForStreaming = utils.cast(bool, data.attrib.get('optimizedForStreaming')) - self.parts = self.findItems(data, MediaPart) self.proxyType = utils.cast(int, data.attrib.get('proxyType')) self.selected = utils.cast(bool, data.attrib.get('selected')) self.target = data.attrib.get('target') @@ -87,6 +85,10 @@ class Media(PlexObject): parent = self._parent() self._parentKey = parent.key + @cached_data_property + def parts(self): + return self.findItems(self._data, MediaPart) + @property def isOptimizedVersion(self): """ Returns True if the media is a Plex optimized version. """ @@ -138,7 +140,6 @@ class MediaPart(PlexObject): def _loadData(self, data): """ Load attribute values from Plex XML response. """ - self._data = data self.accessible = utils.cast(bool, data.attrib.get('accessible')) self.audioProfile = data.attrib.get('audioProfile') self.container = data.attrib.get('container') @@ -268,7 +269,6 @@ class MediaPartStream(PlexObject): def _loadData(self, data): """ Load attribute values from Plex XML response. """ - self._data = data self.bitrate = utils.cast(int, data.attrib.get('bitrate')) self.codec = data.attrib.get('codec') self.decision = data.attrib.get('decision') @@ -386,6 +386,7 @@ class AudioStream(MediaPartStream): profile (str): The profile of the audio stream. samplingRate (int): The sampling rate of the audio stream (ex: xxx) streamIdentifier (int): The stream identifier of the audio stream. + visualImpaired (bool): True if this is a visually impaired (AD) audio stream. Track_only_attributes: The following attributes are only available for tracks. @@ -413,6 +414,7 @@ class AudioStream(MediaPartStream): self.profile = data.attrib.get('profile') self.samplingRate = utils.cast(int, data.attrib.get('samplingRate')) self.streamIdentifier = utils.cast(int, data.attrib.get('streamIdentifier')) + self.visualImpaired = utils.cast(bool, data.attrib.get('visualImpaired', '0')) # Track only attributes self.albumGain = utils.cast(float, data.attrib.get('albumGain')) @@ -523,6 +525,7 @@ class Session(PlexObject): TAG = 'Session' def _loadData(self, data): + """ Load attribute values from Plex XML response. """ self.id = data.attrib.get('id') self.bandwidth = utils.cast(int, data.attrib.get('bandwidth')) self.location = data.attrib.get('location') @@ -569,7 +572,6 @@ class TranscodeSession(PlexObject): def _loadData(self, data): """ Load attribute values from Plex XML response. """ - self._data = data self.audioChannels = utils.cast(int, data.attrib.get('audioChannels')) self.audioCodec = data.attrib.get('audioCodec') self.audioDecision = data.attrib.get('audioDecision') @@ -610,7 +612,7 @@ class TranscodeJob(PlexObject): TAG = 'TranscodeJob' def _loadData(self, data): - self._data = data + """ Load attribute values from Plex XML response. """ self.generatorID = data.attrib.get('generatorID') self.key = data.attrib.get('key') self.progress = data.attrib.get('progress') @@ -629,7 +631,7 @@ class Optimized(PlexObject): TAG = 'Item' def _loadData(self, data): - self._data = data + """ Load attribute values from Plex XML response. """ self.id = data.attrib.get('id') self.composite = data.attrib.get('composite') self.title = data.attrib.get('title') @@ -667,7 +669,7 @@ class Conversion(PlexObject): TAG = 'Video' def _loadData(self, data): - self._data = data + """ Load attribute values from Plex XML response. """ self.addedAt = data.attrib.get('addedAt') self.art = data.attrib.get('art') self.chapterSource = data.attrib.get('chapterSource') @@ -743,7 +745,6 @@ class MediaTag(PlexObject): def _loadData(self, data): """ Load attribute values from Plex XML response. """ - self._data = data self.filter = data.attrib.get('filter') self.id = utils.cast(int, data.attrib.get('id')) self.key = data.attrib.get('key') @@ -954,7 +955,6 @@ class Guid(PlexObject): def _loadData(self, data): """ Load attribute values from Plex XML response. """ - self._data = data self.id = data.attrib.get('id') @@ -972,7 +972,6 @@ class Image(PlexObject): def _loadData(self, data): """ Load attribute values from Plex XML response. """ - self._data = data self.alt = data.attrib.get('alt') self.type = data.attrib.get('type') self.url = data.attrib.get('url') @@ -994,7 +993,6 @@ class Rating(PlexObject): def _loadData(self, data): """ Load attribute values from Plex XML response. """ - self._data = data self.image = data.attrib.get('image') self.type = data.attrib.get('type') self.value = utils.cast(float, data.attrib.get('value')) @@ -1017,7 +1015,7 @@ class Review(PlexObject): TAG = 'Review' def _loadData(self, data): - self._data = data + """ Load attribute values from Plex XML response. """ self.filter = data.attrib.get('filter') self.id = utils.cast(int, data.attrib.get('id', 0)) self.image = data.attrib.get('image') @@ -1042,7 +1040,6 @@ class UltraBlurColors(PlexObject): def _loadData(self, data): """ Load attribute values from Plex XML response. """ - self._data = data self.bottomLeft = data.attrib.get('bottomLeft') self.bottomRight = data.attrib.get('bottomRight') self.topLeft = data.attrib.get('topLeft') @@ -1063,7 +1060,7 @@ class BaseResource(PlexObject): """ def _loadData(self, data): - self._data = data + """ Load attribute values from Plex XML response. """ self.key = data.attrib.get('key') self.provider = data.attrib.get('provider') self.ratingKey = data.attrib.get('ratingKey') @@ -1075,7 +1072,7 @@ class BaseResource(PlexObject): data = f'{key}?url={quote_plus(self.ratingKey)}' try: self._server.query(data, method=self._server._session.put) - except xml.etree.ElementTree.ParseError: + except ElementTree.ParseError: pass @property @@ -1138,7 +1135,7 @@ class Chapter(PlexObject): return f"<{':'.join([self.__class__.__name__, name, offsets])}>" def _loadData(self, data): - self._data = data + """ Load attribute values from Plex XML response. """ self.end = utils.cast(int, data.attrib.get('endTimeOffset')) self.filter = data.attrib.get('filter') self.id = utils.cast(int, data.attrib.get('id', 0)) @@ -1172,7 +1169,7 @@ class Marker(PlexObject): return f"<{':'.join([self.__class__.__name__, name, offsets])}>" def _loadData(self, data): - self._data = data + """ Load attribute values from Plex XML response. """ self.end = utils.cast(int, data.attrib.get('endTimeOffset')) self.final = utils.cast(bool, data.attrib.get('final')) self.id = utils.cast(int, data.attrib.get('id')) @@ -1206,7 +1203,7 @@ class Field(PlexObject): TAG = 'Field' def _loadData(self, data): - self._data = data + """ Load attribute values from Plex XML response. """ self.locked = utils.cast(bool, data.attrib.get('locked')) self.name = data.attrib.get('name') @@ -1226,7 +1223,7 @@ class SearchResult(PlexObject): return f"<{':'.join([p for p in [self.__class__.__name__, name, score] if p])}>" def _loadData(self, data): - self._data = data + """ Load attribute values from Plex XML response. """ self.guid = data.attrib.get('guid') self.lifespanEnded = data.attrib.get('lifespanEnded') self.name = data.attrib.get('name') @@ -1248,7 +1245,7 @@ class Agent(PlexObject): return f"<{':'.join([p for p in [self.__class__.__name__, uid] if p])}>" def _loadData(self, data): - self._data = data + """ Load attribute values from Plex XML response. """ self.hasAttribution = data.attrib.get('hasAttribution') self.hasPrefs = data.attrib.get('hasPrefs') self.identifier = data.attrib.get('identifier') @@ -1256,12 +1253,17 @@ class Agent(PlexObject): self.primary = data.attrib.get('primary') self.shortIdentifier = self.identifier.rsplit('.', 1)[1] + @cached_data_property + def languageCodes(self): if 'mediaType' in self._initpath: - self.languageCodes = self.listAttrs(data, 'code', etag='Language') - self.mediaTypes = [] - else: - self.languageCodes = [] - self.mediaTypes = self.findItems(data, cls=AgentMediaType) + return self.listAttrs(self._data, 'code', etag='Language') + return [] + + @cached_data_property + def mediaTypes(self): + if 'mediaType' not in self._initpath: + return self.findItems(self._data, cls=AgentMediaType) + return [] @property @deprecated('use "languageCodes" instead') @@ -1291,10 +1293,14 @@ class AgentMediaType(Agent): return f"<{':'.join([p for p in [self.__class__.__name__, uid] if p])}>" def _loadData(self, data): - self.languageCodes = self.listAttrs(data, 'code', etag='Language') + """ Load attribute values from Plex XML response. """ self.mediaType = utils.cast(int, data.attrib.get('mediaType')) self.name = data.attrib.get('name') + @cached_data_property + def languageCodes(self): + return self.listAttrs(self._data, 'code', etag='Language') + @property @deprecated('use "languageCodes" instead') def languageCode(self): @@ -1325,7 +1331,7 @@ class Availability(PlexObject): return f'<{self.__class__.__name__}:{self.platform}:{self.offerType}>' def _loadData(self, data): - self._data = data + """ Load attribute values from Plex XML response. """ self.country = data.attrib.get('country') self.offerType = data.attrib.get('offerType') self.platform = data.attrib.get('platform') diff --git a/lib/plexapi/myplex.py b/lib/plexapi/myplex.py index 448a2649..411d5618 100644 --- a/lib/plexapi/myplex.py +++ b/lib/plexapi/myplex.py @@ -4,13 +4,12 @@ import html import threading import time from urllib.parse import parse_qsl, urlencode, urlsplit, urlunsplit -from xml.etree import ElementTree import requests from plexapi import (BASE_HEADERS, CONFIG, TIMEOUT, X_PLEX_ENABLE_FAST_CONNECT, X_PLEX_IDENTIFIER, log, logfilter, utils) -from plexapi.base import PlexObject +from plexapi.base import PlexObject, cached_data_property from plexapi.client import PlexClient from plexapi.exceptions import BadRequest, NotFound, Unauthorized, TwoFactorRequired from plexapi.library import LibrarySection @@ -144,7 +143,6 @@ class MyPlexAccount(PlexObject): def _loadData(self, data): """ Load attribute values from Plex XML response. """ - self._data = data self._token = logfilter.add_secret(data.attrib.get('authToken')) self._webhooks = [] @@ -185,7 +183,6 @@ class MyPlexAccount(PlexObject): subscription = data.find('subscription') self.subscriptionActive = utils.cast(bool, subscription.attrib.get('active')) self.subscriptionDescription = data.attrib.get('subscriptionDescription') - self.subscriptionFeatures = self.listAttrs(subscription, 'id', rtag='features', etag='feature') self.subscriptionPaymentService = subscription.attrib.get('paymentService') self.subscriptionPlan = subscription.attrib.get('plan') self.subscriptionStatus = subscription.attrib.get('status') @@ -201,21 +198,31 @@ class MyPlexAccount(PlexObject): self.profileDefaultSubtitleAccessibility = utils.cast(int, profile.attrib.get('defaultSubtitleAccessibility')) self.profileDefaultSubtitleForces = utils.cast(int, profile.attrib.get('defaultSubtitleForces')) - self.entitlements = self.listAttrs(data, 'id', rtag='entitlements', etag='entitlement') - self.roles = self.listAttrs(data, 'id', rtag='roles', etag='role') - # TODO: Fetch missing MyPlexAccount services self.services = None + @cached_data_property + def subscriptionFeatures(self): + subscription = self._data.find('subscription') + return self.listAttrs(subscription, 'id', rtag='features', etag='feature') + + @cached_data_property + def entitlements(self): + return self.listAttrs(self._data, 'id', rtag='entitlements', etag='entitlement') + + @cached_data_property + def roles(self): + return self.listAttrs(self._data, 'id', rtag='roles', etag='role') + @property def authenticationToken(self): """ Returns the authentication token for the account. Alias for ``authToken``. """ return self.authToken - def _reload(self, key=None, **kwargs): + def _reload(self, **kwargs): """ Perform the actual reload. """ data = self.query(self.key) - self._loadData(data) + self._invalidateCacheAndLoadData(data) return self def _headers(self, **kwargs): @@ -250,8 +257,7 @@ class MyPlexAccount(PlexObject): return response.json() elif 'text/plain' in response.headers.get('Content-Type', ''): return response.text.strip() - data = utils.cleanXMLString(response.text).encode('utf8') - return ElementTree.fromstring(data) if data.strip() else None + return utils.parseXMLString(response.text) def ping(self): """ Ping the Plex.tv API. @@ -1206,7 +1212,6 @@ class MyPlexUser(PlexObject): def _loadData(self, data): """ Load attribute values from Plex XML response. """ - self._data = data self.friend = self._initpath == self.key self.allowCameraUpload = utils.cast(bool, data.attrib.get('allowCameraUpload')) self.allowChannels = utils.cast(bool, data.attrib.get('allowChannels')) @@ -1225,10 +1230,13 @@ class MyPlexUser(PlexObject): self.thumb = data.attrib.get('thumb') self.title = data.attrib.get('title', '') self.username = data.attrib.get('username', '') - self.servers = self.findItems(data, MyPlexServerShare) for server in self.servers: server.accountID = self.id + @cached_data_property + def servers(self): + return self.findItems(self._data, MyPlexServerShare) + def get_token(self, machineIdentifier): try: for item in self._server.query(self._server.FRIENDINVITE.format(machineId=machineIdentifier)): @@ -1283,7 +1291,6 @@ class MyPlexInvite(PlexObject): def _loadData(self, data): """ Load attribute values from Plex XML response. """ - self._data = data self.createdAt = utils.toDatetime(data.attrib.get('createdAt')) self.email = data.attrib.get('email') self.friend = utils.cast(bool, data.attrib.get('friend')) @@ -1291,12 +1298,15 @@ class MyPlexInvite(PlexObject): self.home = utils.cast(bool, data.attrib.get('home')) self.id = utils.cast(int, data.attrib.get('id')) self.server = utils.cast(bool, data.attrib.get('server')) - self.servers = self.findItems(data, MyPlexServerShare) self.thumb = data.attrib.get('thumb') self.username = data.attrib.get('username', '') for server in self.servers: server.accountID = self.id + @cached_data_property + def servers(self): + return self.findItems(self._data, MyPlexServerShare) + class Section(PlexObject): """ This refers to a shared section. The raw xml for the data presented here @@ -1314,7 +1324,7 @@ class Section(PlexObject): TAG = 'Section' def _loadData(self, data): - self._data = data + """ Load attribute values from Plex XML response. """ self.id = utils.cast(int, data.attrib.get('id')) self.key = utils.cast(int, data.attrib.get('key')) self.shared = utils.cast(bool, data.attrib.get('shared', '0')) @@ -1353,7 +1363,6 @@ class MyPlexServerShare(PlexObject): def _loadData(self, data): """ Load attribute values from Plex XML response. """ - self._data = data self.id = utils.cast(int, data.attrib.get('id')) self.accountID = utils.cast(int, data.attrib.get('accountID')) self.serverId = utils.cast(int, data.attrib.get('serverId')) @@ -1437,10 +1446,9 @@ class MyPlexResource(PlexObject): DEFAULT_SCHEME_ORDER = ['https', 'http'] def _loadData(self, data): - self._data = data + """ Load attribute values from Plex XML response. """ self.accessToken = logfilter.add_secret(data.attrib.get('accessToken')) self.clientIdentifier = data.attrib.get('clientIdentifier') - self.connections = self.findItems(data, ResourceConnection, rtag='connections') self.createdAt = utils.toDatetime(data.attrib.get('createdAt'), "%Y-%m-%dT%H:%M:%SZ") self.device = data.attrib.get('device') self.dnsRebindingProtection = utils.cast(bool, data.attrib.get('dnsRebindingProtection')) @@ -1462,6 +1470,10 @@ class MyPlexResource(PlexObject): self.sourceTitle = data.attrib.get('sourceTitle') self.synced = utils.cast(bool, data.attrib.get('synced')) + @cached_data_property + def connections(self): + return self.findItems(self._data, ResourceConnection, rtag='connections') + def preferred_connections( self, ssl=None, @@ -1555,7 +1567,7 @@ class ResourceConnection(PlexObject): TAG = 'connection' def _loadData(self, data): - self._data = data + """ Load attribute values from Plex XML response. """ self.address = data.attrib.get('address') self.ipv6 = utils.cast(bool, data.attrib.get('IPv6')) self.local = utils.cast(bool, data.attrib.get('local')) @@ -1598,7 +1610,7 @@ class MyPlexDevice(PlexObject): key = 'https://plex.tv/devices.xml' def _loadData(self, data): - self._data = data + """ Load attribute values from Plex XML response. """ self.name = data.attrib.get('name') self.publicAddress = data.attrib.get('publicAddress') self.product = data.attrib.get('product') @@ -1617,7 +1629,10 @@ class MyPlexDevice(PlexObject): self.screenDensity = data.attrib.get('screenDensity') self.createdAt = utils.toDatetime(data.attrib.get('createdAt')) self.lastSeenAt = utils.toDatetime(data.attrib.get('lastSeenAt')) - self.connections = self.listAttrs(data, 'uri', etag='Connection') + + @cached_data_property + def connections(self): + return self.listAttrs(self._data, 'uri', etag='Connection') def connect(self, timeout=None): """ Returns a new :class:`~plexapi.client.PlexClient` or :class:`~plexapi.server.PlexServer` @@ -1879,8 +1894,7 @@ class MyPlexPinLogin: codename = codes.get(response.status_code)[0] errtext = response.text.replace('\n', ' ') raise BadRequest(f'({response.status_code}) {codename} {response.url}; {errtext}') - data = response.text.encode('utf8') - return ElementTree.fromstring(data) if data.strip() else None + return utils.parseXMLString(response.text) def _connect(cls, url, token, session, timeout, results, i, job_is_done_event=None): @@ -1939,6 +1953,7 @@ class AccountOptOut(PlexObject): CHOICES = {'opt_in', 'opt_out', 'opt_out_managed'} def _loadData(self, data): + """ Load attribute values from Plex XML response. """ self.key = data.attrib.get('key') self.value = data.attrib.get('value') @@ -1997,6 +2012,7 @@ class UserState(PlexObject): return f'<{self.__class__.__name__}:{self.ratingKey}>' def _loadData(self, data): + """ Load attribute values from Plex XML response. """ self.lastViewedAt = utils.toDatetime(data.attrib.get('lastViewedAt')) self.ratingKey = data.attrib.get('ratingKey') self.type = data.attrib.get('type') @@ -2026,7 +2042,7 @@ class GeoLocation(PlexObject): TAG = 'location' def _loadData(self, data): - self._data = data + """ Load attribute values from Plex XML response. """ self.city = data.attrib.get('city') self.code = data.attrib.get('code') self.continentCode = data.attrib.get('continent_code') diff --git a/lib/plexapi/photo.py b/lib/plexapi/photo.py index 4347f31a..e7c7239e 100644 --- a/lib/plexapi/photo.py +++ b/lib/plexapi/photo.py @@ -4,7 +4,7 @@ from pathlib import Path from urllib.parse import quote_plus from plexapi import media, utils, video -from plexapi.base import Playable, PlexPartialObject, PlexSession +from plexapi.base import Playable, PlexPartialObject, PlexSession, cached_data_property from plexapi.exceptions import BadRequest from plexapi.mixins import ( RatingMixin, @@ -56,9 +56,7 @@ class Photoalbum( self.addedAt = utils.toDatetime(data.attrib.get('addedAt')) self.art = data.attrib.get('art') self.composite = data.attrib.get('composite') - self.fields = self.findItems(data, media.Field) self.guid = data.attrib.get('guid') - self.images = self.findItems(data, media.Image) self.index = utils.cast(int, data.attrib.get('index')) self.key = data.attrib.get('key', '').replace('/children', '') # FIX_BUG_50 self.lastRatedAt = utils.toDatetime(data.attrib.get('lastRatedAt')) @@ -75,6 +73,14 @@ class Photoalbum( self.updatedAt = utils.toDatetime(data.attrib.get('updatedAt')) self.userRating = utils.cast(float, data.attrib.get('userRating')) + @cached_data_property + def fields(self): + return self.findItems(self._data, media.Field) + + @cached_data_property + def images(self): + return self.findItems(self._data, media.Image) + def album(self, title): """ Returns the :class:`~plexapi.photo.Photoalbum` that matches the specified title. @@ -205,9 +211,7 @@ class Photo( self.addedAt = utils.toDatetime(data.attrib.get('addedAt')) self.createdAtAccuracy = data.attrib.get('createdAtAccuracy') self.createdAtTZOffset = utils.cast(int, data.attrib.get('createdAtTZOffset')) - self.fields = self.findItems(data, media.Field) self.guid = data.attrib.get('guid') - self.images = self.findItems(data, media.Image) self.index = utils.cast(int, data.attrib.get('index')) self.key = data.attrib.get('key', '') self.lastRatedAt = utils.toDatetime(data.attrib.get('lastRatedAt')) @@ -215,7 +219,6 @@ class Photo( self.librarySectionKey = data.attrib.get('librarySectionKey') self.librarySectionTitle = data.attrib.get('librarySectionTitle') self.listType = 'photo' - self.media = self.findItems(data, media.Media) self.originallyAvailableAt = utils.toDatetime(data.attrib.get('originallyAvailableAt'), '%Y-%m-%d') self.parentGuid = data.attrib.get('parentGuid') self.parentIndex = utils.cast(int, data.attrib.get('parentIndex')) @@ -226,7 +229,6 @@ class Photo( self.ratingKey = utils.cast(int, data.attrib.get('ratingKey')) self.sourceURI = data.attrib.get('source') # remote playlist item self.summary = data.attrib.get('summary') - self.tags = self.findItems(data, media.Tag) self.thumb = data.attrib.get('thumb') self.title = data.attrib.get('title') self.titleSort = data.attrib.get('titleSort', self.title) @@ -235,6 +237,22 @@ class Photo( self.userRating = utils.cast(float, data.attrib.get('userRating')) self.year = utils.cast(int, data.attrib.get('year')) + @cached_data_property + def fields(self): + return self.findItems(self._data, media.Field) + + @cached_data_property + def images(self): + return self.findItems(self._data, media.Image) + + @cached_data_property + def media(self): + return self.findItems(self._data, media.Media) + + @cached_data_property + def tags(self): + return self.findItems(self._data, media.Tag) + def _prettyfilename(self): """ Returns a filename for use in download. """ if self.parentTitle: diff --git a/lib/plexapi/playlist.py b/lib/plexapi/playlist.py index e2c4da63..0fc79bf5 100644 --- a/lib/plexapi/playlist.py +++ b/lib/plexapi/playlist.py @@ -5,7 +5,7 @@ from pathlib import Path from urllib.parse import quote_plus, unquote from plexapi import media, utils -from plexapi.base import Playable, PlexPartialObject +from plexapi.base import Playable, PlexPartialObject, cached_data_property from plexapi.exceptions import BadRequest, NotFound, Unsupported from plexapi.library import LibrarySection, MusicSection from plexapi.mixins import SmartFilterMixin, ArtMixin, PosterMixin, PlaylistEditMixins @@ -60,7 +60,6 @@ class Playlist( self.content = data.attrib.get('content') self.duration = utils.cast(int, data.attrib.get('duration')) self.durationInSeconds = utils.cast(int, data.attrib.get('durationInSeconds')) - self.fields = self.findItems(data, media.Field) self.guid = data.attrib.get('guid') self.icon = data.attrib.get('icon') self.key = data.attrib.get('key', '').replace('/items', '') # FIX_BUG_50 @@ -77,9 +76,10 @@ class Playlist( self.titleSort = data.attrib.get('titleSort', self.title) self.type = data.attrib.get('type') self.updatedAt = utils.toDatetime(data.attrib.get('updatedAt')) - self._items = None # cache for self.items - self._section = None # cache for self.section - self._filters = None # cache for self.filters + + @cached_data_property + def fields(self): + return self.findItems(self._data, media.Field) def __len__(self): # pragma: no cover return len(self.items()) @@ -133,15 +133,36 @@ class Playlist( return _item.playlistItemID raise NotFound(f'Item with title "{item.title}" not found in the playlist') + @cached_data_property + def _filters(self): + """ Cache for filters. """ + return self._parseFilters(self.content) + def filters(self): """ Returns the search filter dict for smart playlist. The filter dict be passed back into :func:`~plexapi.library.LibrarySection.search` to get the list of items. """ - if self.smart and self._filters is None: - self._filters = self._parseFilters(self.content) return self._filters + @cached_data_property + def _section(self): + """ Cache for section. """ + if not self.smart: + raise BadRequest('Regular playlists are not associated with a library.') + + # Try to parse the library section from the content URI string + match = re.search(r'/library/sections/(\d+)/all', unquote(self.content or '')) + if match: + sectionKey = int(match.group(1)) + return self._server.library.sectionByID(sectionKey) + + # Try to get the library section from the first item in the playlist + if self.items(): + return self.items()[0].section() + + raise Unsupported('Unable to determine the library section') + def section(self): """ Returns the :class:`~plexapi.library.LibrarySection` this smart playlist belongs to. @@ -149,24 +170,6 @@ class Playlist( :class:`plexapi.exceptions.BadRequest`: When trying to get the section for a regular playlist. :class:`plexapi.exceptions.Unsupported`: When unable to determine the library section. """ - if not self.smart: - raise BadRequest('Regular playlists are not associated with a library.') - - if self._section is None: - # Try to parse the library section from the content URI string - match = re.search(r'/library/sections/(\d+)/all', unquote(self.content or '')) - if match: - sectionKey = int(match.group(1)) - self._section = self._server.library.sectionByID(sectionKey) - return self._section - - # Try to get the library section from the first item in the playlist - if self.items(): - self._section = self.items()[0].section() - return self._section - - raise Unsupported('Unable to determine the library section') - return self._section def item(self, title): @@ -183,28 +186,32 @@ class Playlist( return item raise NotFound(f'Item with title "{title}" not found in the playlist') - def items(self): - """ Returns a list of all items in the playlist. """ + @cached_data_property + def _items(self): + """ Cache for items. """ if self.radio: return [] - if self._items is None: - key = f'{self.key}/items' - items = self.fetchItems(key) - # Cache server connections to avoid reconnecting for each item - _servers = {} - for item in items: - if item.sourceURI: - serverID = item.sourceURI.split('/')[2] - if serverID not in _servers: - try: - _servers[serverID] = self._server.myPlexAccount().resource(serverID).connect() - except NotFound: - # Override the server connection with None if the server is not found - _servers[serverID] = None - item._server = _servers[serverID] + key = f'{self.key}/items' + items = self.fetchItems(key) - self._items = items + # Cache server connections to avoid reconnecting for each item + _servers = {} + for item in items: + if item.sourceURI: + serverID = item.sourceURI.split('/')[2] + if serverID not in _servers: + try: + _servers[serverID] = self._server.myPlexAccount().resource(serverID).connect() + except NotFound: + # Override the server connection with None if the server is not found + _servers[serverID] = None + item._server = _servers[serverID] + + return items + + def items(self): + """ Returns a list of all items in the playlist. """ return self._items def get(self, title): diff --git a/lib/plexapi/playqueue.py b/lib/plexapi/playqueue.py index 9835c0dd..8875ef07 100644 --- a/lib/plexapi/playqueue.py +++ b/lib/plexapi/playqueue.py @@ -2,7 +2,7 @@ from urllib.parse import quote_plus from plexapi import utils -from plexapi.base import PlexObject +from plexapi.base import PlexObject, cached_data_property from plexapi.exceptions import BadRequest @@ -36,7 +36,7 @@ class PlayQueue(PlexObject): TYPE = "playqueue" def _loadData(self, data): - self._data = data + """ Load attribute values from Plex XML response. """ self.identifier = data.attrib.get("identifier") self.mediaTagPrefix = data.attrib.get("mediaTagPrefix") self.mediaTagVersion = utils.cast(int, data.attrib.get("mediaTagVersion")) @@ -62,9 +62,12 @@ class PlayQueue(PlexObject): ) self.playQueueVersion = utils.cast(int, data.attrib.get("playQueueVersion")) self.size = utils.cast(int, data.attrib.get("size", 0)) - self.items = self.findItems(data) self.selectedItem = self[self.playQueueSelectedItemOffset] + @cached_data_property + def items(self): + return self.findItems(self._data) + def __getitem__(self, key): if not self.items: return None @@ -254,7 +257,7 @@ class PlayQueue(PlexObject): path = f"/playQueues/{self.playQueueID}{utils.joinArgs(args)}" data = self._server.query(path, method=self._server._session.put) - self._loadData(data) + self._invalidateCacheAndLoadData(data) return self def moveItem(self, item, after=None, refresh=True): @@ -283,7 +286,7 @@ class PlayQueue(PlexObject): path = f"/playQueues/{self.playQueueID}/items/{item.playQueueItemID}/move{utils.joinArgs(args)}" data = self._server.query(path, method=self._server._session.put) - self._loadData(data) + self._invalidateCacheAndLoadData(data) return self def removeItem(self, item, refresh=True): @@ -301,19 +304,19 @@ class PlayQueue(PlexObject): path = f"/playQueues/{self.playQueueID}/items/{item.playQueueItemID}" data = self._server.query(path, method=self._server._session.delete) - self._loadData(data) + self._invalidateCacheAndLoadData(data) return self def clear(self): """Remove all items from the PlayQueue.""" path = f"/playQueues/{self.playQueueID}/items" data = self._server.query(path, method=self._server._session.delete) - self._loadData(data) + self._invalidateCacheAndLoadData(data) return self def refresh(self): """Refresh the PlayQueue from the Plex server.""" path = f"/playQueues/{self.playQueueID}" data = self._server.query(path, method=self._server._session.get) - self._loadData(data) + self._invalidateCacheAndLoadData(data) return self diff --git a/lib/plexapi/server.py b/lib/plexapi/server.py index 8cd110d8..239036c4 100644 --- a/lib/plexapi/server.py +++ b/lib/plexapi/server.py @@ -1,15 +1,13 @@ # -*- coding: utf-8 -*- import os -from functools import cached_property from urllib.parse import urlencode -from xml.etree import ElementTree import requests from plexapi import BASE_HEADERS, CONFIG, TIMEOUT, log, logfilter from plexapi import utils from plexapi.alert import AlertListener -from plexapi.base import PlexObject +from plexapi.base import PlexObject, cached_data_property from plexapi.client import PlexClient from plexapi.collection import Collection from plexapi.exceptions import BadRequest, NotFound, Unauthorized @@ -110,15 +108,11 @@ class PlexServer(PlexObject): self._showSecrets = CONFIG.get('log.show_secrets', '').lower() == 'true' self._session = session or requests.Session() self._timeout = timeout or TIMEOUT - self._myPlexAccount = None # cached myPlexAccount - self._systemAccounts = None # cached list of SystemAccount - self._systemDevices = None # cached list of SystemDevice data = self.query(self.key, timeout=self._timeout) super(PlexServer, self).__init__(self, data, self.key) def _loadData(self, data): """ Load attribute values from Plex XML response. """ - self._data = data self.allowCameraUpload = utils.cast(bool, data.attrib.get('allowCameraUpload')) self.allowChannelAccess = utils.cast(bool, data.attrib.get('allowChannelAccess')) self.allowMediaDeletion = utils.cast(bool, data.attrib.get('allowMediaDeletion')) @@ -172,7 +166,7 @@ class PlexServer(PlexObject): def _uriRoot(self): return f'server://{self.machineIdentifier}/com.plexapp.plugins.library' - @cached_property + @cached_data_property def library(self): """ Library to browse or search your media. """ try: @@ -183,7 +177,7 @@ class PlexServer(PlexObject): data = self.query('/library/sections/') return Library(self, data) - @cached_property + @cached_data_property def settings(self): """ Returns a list of all server settings. """ data = self.query(Settings.key) @@ -276,11 +270,14 @@ class PlexServer(PlexObject): timeout = self._timeout return PlexServer(self._baseurl, token=userToken, session=session, timeout=timeout) + @cached_data_property + def _systemAccounts(self): + """ Cache for systemAccounts. """ + key = '/accounts' + return self.fetchItems(key, SystemAccount) + def systemAccounts(self): """ Returns a list of :class:`~plexapi.server.SystemAccount` objects this server contains. """ - if self._systemAccounts is None: - key = '/accounts' - self._systemAccounts = self.fetchItems(key, SystemAccount) return self._systemAccounts def systemAccount(self, accountID): @@ -294,11 +291,14 @@ class PlexServer(PlexObject): except StopIteration: raise NotFound(f'Unknown account with accountID={accountID}') from None + @cached_data_property + def _systemDevices(self): + """ Cache for systemDevices. """ + key = '/devices' + return self.fetchItems(key, SystemDevice) + def systemDevices(self): """ Returns a list of :class:`~plexapi.server.SystemDevice` objects this server contains. """ - if self._systemDevices is None: - key = '/devices' - self._systemDevices = self.fetchItems(key, SystemDevice) return self._systemDevices def systemDevice(self, deviceID): @@ -312,21 +312,24 @@ class PlexServer(PlexObject): except StopIteration: raise NotFound(f'Unknown device with deviceID={deviceID}') from None + @cached_data_property + def _myPlexAccount(self): + """ Cache for myPlexAccount. """ + from plexapi.myplex import MyPlexAccount + return MyPlexAccount(token=self._token, session=self._session) + def myPlexAccount(self): """ Returns a :class:`~plexapi.myplex.MyPlexAccount` object using the same token to access this server. If you are not the owner of this PlexServer you're likely to receive an authentication error calling this. """ - if self._myPlexAccount is None: - from plexapi.myplex import MyPlexAccount - self._myPlexAccount = MyPlexAccount(token=self._token, session=self._session) return self._myPlexAccount def _myPlexClientPorts(self): """ Sometimes the PlexServer does not properly advertise port numbers required to connect. This attempts to look up device port number from plex.tv. See issue #126: Make PlexServer.clients() more user friendly. - https://github.com/pkkid/python-plexapi/issues/126 + https://github.com/pushingkarmaorg/python-plexapi/issues/126 """ try: ports = {} @@ -768,8 +771,7 @@ class PlexServer(PlexObject): raise NotFound(message) else: raise BadRequest(message) - data = utils.cleanXMLString(response.text).encode('utf8') - return ElementTree.fromstring(data) if data.strip() else None + return utils.parseXMLString(response.text) def search(self, query, mediatype=None, limit=None, sectionId=None): """ Returns a list of media items or filter categories from the resulting @@ -804,9 +806,9 @@ class PlexServer(PlexObject): for hub in self.fetchItems(key, Hub): if mediatype: if hub.type == mediatype: - return hub.items + return hub._partialItems else: - results += hub.items + results += hub._partialItems return results def continueWatching(self): @@ -1093,7 +1095,7 @@ class Account(PlexObject): key = '/myplex/account' def _loadData(self, data): - self._data = data + """ Load attribute values from Plex XML response. """ self.authToken = data.attrib.get('authToken') self.username = data.attrib.get('username') self.mappingState = data.attrib.get('mappingState') @@ -1114,7 +1116,7 @@ class Activity(PlexObject): key = '/activities' def _loadData(self, data): - self._data = data + """ Load attribute values from Plex XML response. """ self.cancellable = utils.cast(bool, data.attrib.get('cancellable')) self.progress = utils.cast(int, data.attrib.get('progress')) self.title = data.attrib.get('title') @@ -1129,6 +1131,7 @@ class Release(PlexObject): key = '/updater/status' def _loadData(self, data): + """ Load attribute values from Plex XML response. """ self.download_key = data.attrib.get('key') self.version = data.attrib.get('version') self.added = data.attrib.get('added') @@ -1154,7 +1157,7 @@ class SystemAccount(PlexObject): TAG = 'Account' def _loadData(self, data): - self._data = data + """ Load attribute values from Plex XML response. """ self.autoSelectAudio = utils.cast(bool, data.attrib.get('autoSelectAudio')) self.defaultAudioLanguage = data.attrib.get('defaultAudioLanguage') self.defaultSubtitleLanguage = data.attrib.get('defaultSubtitleLanguage') @@ -1183,7 +1186,7 @@ class SystemDevice(PlexObject): TAG = 'Device' def _loadData(self, data): - self._data = data + """ Load attribute values from Plex XML response. """ self.clientIdentifier = data.attrib.get('clientIdentifier') self.createdAt = utils.toDatetime(data.attrib.get('createdAt')) self.id = utils.cast(int, data.attrib.get('id')) @@ -1209,7 +1212,7 @@ class StatisticsBandwidth(PlexObject): TAG = 'StatisticsBandwidth' def _loadData(self, data): - self._data = data + """ Load attribute values from Plex XML response. """ self.accountID = utils.cast(int, data.attrib.get('accountID')) self.at = utils.toDatetime(data.attrib.get('at')) self.bytes = utils.cast(int, data.attrib.get('bytes')) @@ -1251,7 +1254,7 @@ class StatisticsResources(PlexObject): TAG = 'StatisticsResources' def _loadData(self, data): - self._data = data + """ Load attribute values from Plex XML response. """ self.at = utils.toDatetime(data.attrib.get('at')) self.hostCpuUtilization = utils.cast(float, data.attrib.get('hostCpuUtilization')) self.hostMemoryUtilization = utils.cast(float, data.attrib.get('hostMemoryUtilization')) @@ -1279,7 +1282,7 @@ class ButlerTask(PlexObject): TAG = 'ButlerTask' def _loadData(self, data): - self._data = data + """ Load attribute values from Plex XML response. """ self.description = data.attrib.get('description') self.enabled = utils.cast(bool, data.attrib.get('enabled')) self.interval = utils.cast(int, data.attrib.get('interval')) @@ -1301,7 +1304,7 @@ class Identity(PlexObject): return f"<{self.__class__.__name__}:{self.machineIdentifier}>" def _loadData(self, data): - self._data = data + """ Load attribute values from Plex XML response. """ self.claimed = utils.cast(bool, data.attrib.get('claimed')) self.machineIdentifier = data.attrib.get('machineIdentifier') self.version = data.attrib.get('version') diff --git a/lib/plexapi/settings.py b/lib/plexapi/settings.py index c191e368..4e016e32 100644 --- a/lib/plexapi/settings.py +++ b/lib/plexapi/settings.py @@ -34,11 +34,10 @@ class Settings(PlexObject): def _loadData(self, data): """ Load attribute values from Plex XML response. """ - self._data = data for elem in data: id = utils.lowerFirst(elem.attrib['id']) if id in self._settings: - self._settings[id]._loadData(elem) + self._settings[id]._invalidateCacheAndLoadData(elem) continue self._settings[id] = Setting(self._server, elem, self._initpath) diff --git a/lib/plexapi/sonos.py b/lib/plexapi/sonos.py index 8f1295f4..1b61fd56 100644 --- a/lib/plexapi/sonos.py +++ b/lib/plexapi/sonos.py @@ -47,7 +47,6 @@ class PlexSonosClient(PlexClient): """ def __init__(self, account, data, timeout=None): - self._data = data self.deviceClass = data.attrib.get("deviceClass") self.machineIdentifier = data.attrib.get("machineIdentifier") self.product = data.attrib.get("product") diff --git a/lib/plexapi/sync.py b/lib/plexapi/sync.py index f57e89d9..3b00653d 100644 --- a/lib/plexapi/sync.py +++ b/lib/plexapi/sync.py @@ -63,7 +63,7 @@ class SyncItem(PlexObject): self.clientIdentifier = clientIdentifier def _loadData(self, data): - self._data = data + """ Load attribute values from Plex XML response. """ self.id = plexapi.utils.cast(int, data.attrib.get('id')) self.version = plexapi.utils.cast(int, data.attrib.get('version')) self.rootTitle = data.attrib.get('rootTitle') @@ -118,7 +118,7 @@ class SyncList(PlexObject): TAG = 'SyncList' def _loadData(self, data): - self._data = data + """ Load attribute values from Plex XML response. """ self.clientId = data.attrib.get('clientIdentifier') self.items = [] diff --git a/lib/plexapi/utils.py b/lib/plexapi/utils.py index dd1cfc9c..bbff6a8e 100644 --- a/lib/plexapi/utils.py +++ b/lib/plexapi/utils.py @@ -17,12 +17,12 @@ from getpass import getpass from hashlib import sha1 from threading import Event, Thread from urllib.parse import quote +from xml.etree import ElementTree import requests from requests.status_codes import _codes as codes from plexapi.exceptions import BadRequest, NotFound, Unauthorized - try: from tqdm import tqdm except ImportError: @@ -718,3 +718,14 @@ _illegal_XML_re = re.compile(fr'[{"".join(_illegal_XML_ranges)}]') def cleanXMLString(s): return _illegal_XML_re.sub('', s) + + +def parseXMLString(s: str): + """ Parse an XML string and return an ElementTree object. """ + if not s.strip(): + return None + try: # Attempt to parse the string as-is without cleaning (which is expensive) + return ElementTree.fromstring(s.encode('utf-8')) + except ElementTree.ParseError: # If it fails, clean the string and try again + cleaned_s = cleanXMLString(s).encode('utf-8') + return ElementTree.fromstring(cleaned_s) if cleaned_s.strip() else None diff --git a/lib/plexapi/video.py b/lib/plexapi/video.py index 9e4201b8..597bbca7 100644 --- a/lib/plexapi/video.py +++ b/lib/plexapi/video.py @@ -1,11 +1,10 @@ # -*- coding: utf-8 -*- import os -from functools import cached_property from pathlib import Path from urllib.parse import quote_plus from plexapi import media, utils -from plexapi.base import Playable, PlexPartialObject, PlexHistory, PlexSession +from plexapi.base import Playable, PlexPartialObject, PlexHistory, PlexSession, cached_data_property from plexapi.exceptions import BadRequest from plexapi.mixins import ( AdvancedSettingsMixin, SplitMergeMixin, UnmatchMatchMixin, ExtrasMixin, HubsMixin, PlayedUnplayedMixin, RatingMixin, @@ -48,13 +47,10 @@ class Video(PlexPartialObject, PlayedUnplayedMixin): def _loadData(self, data): """ Load attribute values from Plex XML response. """ - self._data = data self.addedAt = utils.toDatetime(data.attrib.get('addedAt')) self.art = data.attrib.get('art') self.artBlurHash = data.attrib.get('artBlurHash') - self.fields = self.findItems(data, media.Field) self.guid = data.attrib.get('guid') - self.images = self.findItems(data, media.Image) self.key = data.attrib.get('key', '') self.lastRatedAt = utils.toDatetime(data.attrib.get('lastRatedAt')) self.lastViewedAt = utils.toDatetime(data.attrib.get('lastViewedAt')) @@ -73,6 +69,14 @@ class Video(PlexPartialObject, PlayedUnplayedMixin): self.userRating = utils.cast(float, data.attrib.get('userRating')) self.viewCount = utils.cast(int, data.attrib.get('viewCount', 0)) + @cached_data_property + def fields(self): + return self.findItems(self._data, media.Field) + + @cached_data_property + def images(self): + return self.findItems(self._data, media.Image) + def url(self, part): """ Returns the full url for something. Typically used for getting a specific image. """ return self._server.url(part, includeToken=True) if part else None @@ -394,41 +398,86 @@ class Movie( Playable._loadData(self, data) self.audienceRating = utils.cast(float, data.attrib.get('audienceRating')) self.audienceRatingImage = data.attrib.get('audienceRatingImage') - self.chapters = self.findItems(data, media.Chapter) self.chapterSource = data.attrib.get('chapterSource') - self.collections = self.findItems(data, media.Collection) self.contentRating = data.attrib.get('contentRating') - self.countries = self.findItems(data, media.Country) - self.directors = self.findItems(data, media.Director) self.duration = utils.cast(int, data.attrib.get('duration')) self.editionTitle = data.attrib.get('editionTitle') self.enableCreditsMarkerGeneration = utils.cast(int, data.attrib.get('enableCreditsMarkerGeneration', '-1')) - self.genres = self.findItems(data, media.Genre) - self.guids = self.findItems(data, media.Guid) - self.labels = self.findItems(data, media.Label) self.languageOverride = data.attrib.get('languageOverride') - self.markers = self.findItems(data, media.Marker) - self.media = self.findItems(data, media.Media) self.originallyAvailableAt = utils.toDatetime(data.attrib.get('originallyAvailableAt'), '%Y-%m-%d') self.originalTitle = data.attrib.get('originalTitle') self.primaryExtraKey = data.attrib.get('primaryExtraKey') - self.producers = self.findItems(data, media.Producer) self.rating = utils.cast(float, data.attrib.get('rating')) self.ratingImage = data.attrib.get('ratingImage') - self.ratings = self.findItems(data, media.Rating) - self.roles = self.findItems(data, media.Role) self.slug = data.attrib.get('slug') - self.similar = self.findItems(data, media.Similar) self.sourceURI = data.attrib.get('source') # remote playlist item self.studio = data.attrib.get('studio') self.tagline = data.attrib.get('tagline') self.theme = data.attrib.get('theme') - self.ultraBlurColors = self.findItem(data, media.UltraBlurColors) self.useOriginalTitle = utils.cast(int, data.attrib.get('useOriginalTitle', '-1')) self.viewOffset = utils.cast(int, data.attrib.get('viewOffset', 0)) - self.writers = self.findItems(data, media.Writer) self.year = utils.cast(int, data.attrib.get('year')) + @cached_data_property + def chapters(self): + return self.findItems(self._data, media.Chapter) + + @cached_data_property + def collections(self): + return self.findItems(self._data, media.Collection) + + @cached_data_property + def countries(self): + return self.findItems(self._data, media.Country) + + @cached_data_property + def directors(self): + return self.findItems(self._data, media.Director) + + @cached_data_property + def genres(self): + return self.findItems(self._data, media.Genre) + + @cached_data_property + def guids(self): + return self.findItems(self._data, media.Guid) + + @cached_data_property + def labels(self): + return self.findItems(self._data, media.Label) + + @cached_data_property + def markers(self): + return self.findItems(self._data, media.Marker) + + @cached_data_property + def media(self): + return self.findItems(self._data, media.Media) + + @cached_data_property + def producers(self): + return self.findItems(self._data, media.Producer) + + @cached_data_property + def ratings(self): + return self.findItems(self._data, media.Rating) + + @cached_data_property + def roles(self): + return self.findItems(self._data, media.Role) + + @cached_data_property + def similar(self): + return self.findItems(self._data, media.Similar) + + @cached_data_property + def ultraBlurColors(self): + return self.findItem(self._data, media.UltraBlurColors) + + @cached_data_property + def writers(self): + return self.findItems(self._data, media.Writer) + @property def actors(self): """ Alias to self.roles. """ @@ -573,40 +622,67 @@ class Show( self.autoDeletionItemPolicyWatchedLibrary = utils.cast( int, data.attrib.get('autoDeletionItemPolicyWatchedLibrary', '0')) self.childCount = utils.cast(int, data.attrib.get('childCount')) - self.collections = self.findItems(data, media.Collection) self.contentRating = data.attrib.get('contentRating') self.duration = utils.cast(int, data.attrib.get('duration')) self.enableCreditsMarkerGeneration = utils.cast(int, data.attrib.get('enableCreditsMarkerGeneration', '-1')) self.episodeSort = utils.cast(int, data.attrib.get('episodeSort', '-1')) self.flattenSeasons = utils.cast(int, data.attrib.get('flattenSeasons', '-1')) - self.genres = self.findItems(data, media.Genre) - self.guids = self.findItems(data, media.Guid) self.index = utils.cast(int, data.attrib.get('index')) self.key = self.key.replace('/children', '') # FIX_BUG_50 - self.labels = self.findItems(data, media.Label) self.languageOverride = data.attrib.get('languageOverride') self.leafCount = utils.cast(int, data.attrib.get('leafCount')) - self.locations = self.listAttrs(data, 'path', etag='Location') self.network = data.attrib.get('network') self.originallyAvailableAt = utils.toDatetime(data.attrib.get('originallyAvailableAt'), '%Y-%m-%d') self.originalTitle = data.attrib.get('originalTitle') self.rating = utils.cast(float, data.attrib.get('rating')) - self.ratings = self.findItems(data, media.Rating) - self.roles = self.findItems(data, media.Role) self.seasonCount = utils.cast(int, data.attrib.get('seasonCount', self.childCount)) self.showOrdering = data.attrib.get('showOrdering') - self.similar = self.findItems(data, media.Similar) self.slug = data.attrib.get('slug') self.studio = data.attrib.get('studio') self.subtitleLanguage = data.attrib.get('subtitleLanguage', '') self.subtitleMode = utils.cast(int, data.attrib.get('subtitleMode', '-1')) self.tagline = data.attrib.get('tagline') self.theme = data.attrib.get('theme') - self.ultraBlurColors = self.findItem(data, media.UltraBlurColors) self.useOriginalTitle = utils.cast(int, data.attrib.get('useOriginalTitle', '-1')) self.viewedLeafCount = utils.cast(int, data.attrib.get('viewedLeafCount')) self.year = utils.cast(int, data.attrib.get('year')) + @cached_data_property + def collections(self): + return self.findItems(self._data, media.Collection) + + @cached_data_property + def genres(self): + return self.findItems(self._data, media.Genre) + + @cached_data_property + def guids(self): + return self.findItems(self._data, media.Guid) + + @cached_data_property + def labels(self): + return self.findItems(self._data, media.Label) + + @cached_data_property + def locations(self): + return self.listAttrs(self._data, 'path', etag='Location') + + @cached_data_property + def ratings(self): + return self.findItems(self._data, media.Rating) + + @cached_data_property + def roles(self): + return self.findItems(self._data, media.Role) + + @cached_data_property + def similar(self): + return self.findItems(self._data, media.Similar) + + @cached_data_property + def ultraBlurColors(self): + return self.findItem(self._data, media.UltraBlurColors) + def __iter__(self): for season in self.seasons(): yield season @@ -759,11 +835,8 @@ class Season( Video._loadData(self, data) self.audienceRating = utils.cast(float, data.attrib.get('audienceRating')) self.audioLanguage = data.attrib.get('audioLanguage', '') - self.collections = self.findItems(data, media.Collection) - self.guids = self.findItems(data, media.Guid) self.index = utils.cast(int, data.attrib.get('index')) self.key = self.key.replace('/children', '') # FIX_BUG_50 - self.labels = self.findItems(data, media.Label) self.leafCount = utils.cast(int, data.attrib.get('leafCount')) self.parentGuid = data.attrib.get('parentGuid') self.parentIndex = utils.cast(int, data.attrib.get('parentIndex')) @@ -775,13 +848,31 @@ class Season( self.parentThumb = data.attrib.get('parentThumb') self.parentTitle = data.attrib.get('parentTitle') self.rating = utils.cast(float, data.attrib.get('rating')) - self.ratings = self.findItems(data, media.Rating) self.subtitleLanguage = data.attrib.get('subtitleLanguage', '') self.subtitleMode = utils.cast(int, data.attrib.get('subtitleMode', '-1')) - self.ultraBlurColors = self.findItem(data, media.UltraBlurColors) self.viewedLeafCount = utils.cast(int, data.attrib.get('viewedLeafCount')) self.year = utils.cast(int, data.attrib.get('year')) + @cached_data_property + def collections(self): + return self.findItems(self._data, media.Collection) + + @cached_data_property + def guids(self): + return self.findItems(self._data, media.Guid) + + @cached_data_property + def labels(self): + return self.findItems(self._data, media.Label) + + @cached_data_property + def ratings(self): + return self.findItems(self._data, media.Rating) + + @cached_data_property + def ultraBlurColors(self): + return self.findItem(self._data, media.UltraBlurColors) + def __iter__(self): for episode in self.episodes(): yield episode @@ -942,11 +1033,8 @@ class Episode( Playable._loadData(self, data) self.audienceRating = utils.cast(float, data.attrib.get('audienceRating')) self.audienceRatingImage = data.attrib.get('audienceRatingImage') - self.chapters = self.findItems(data, media.Chapter) self.chapterSource = data.attrib.get('chapterSource') - self.collections = self.findItems(data, media.Collection) self.contentRating = data.attrib.get('contentRating') - self.directors = self.findItems(data, media.Director) self.duration = utils.cast(int, data.attrib.get('duration')) self.grandparentArt = data.attrib.get('grandparentArt') self.grandparentGuid = data.attrib.get('grandparentGuid') @@ -956,25 +1044,16 @@ class Episode( self.grandparentTheme = data.attrib.get('grandparentTheme') self.grandparentThumb = data.attrib.get('grandparentThumb') self.grandparentTitle = data.attrib.get('grandparentTitle') - self.guids = self.findItems(data, media.Guid) self.index = utils.cast(int, data.attrib.get('index')) - self.labels = self.findItems(data, media.Label) - self.markers = self.findItems(data, media.Marker) - self.media = self.findItems(data, media.Media) self.originallyAvailableAt = utils.toDatetime(data.attrib.get('originallyAvailableAt'), '%Y-%m-%d') self.parentGuid = data.attrib.get('parentGuid') self.parentIndex = utils.cast(int, data.attrib.get('parentIndex')) self.parentTitle = data.attrib.get('parentTitle') self.parentYear = utils.cast(int, data.attrib.get('parentYear')) - self.producers = self.findItems(data, media.Producer) self.rating = utils.cast(float, data.attrib.get('rating')) - self.ratings = self.findItems(data, media.Rating) - self.roles = self.findItems(data, media.Role) self.skipParent = utils.cast(bool, data.attrib.get('skipParent', '0')) self.sourceURI = data.attrib.get('source') # remote playlist item - self.ultraBlurColors = self.findItem(data, media.UltraBlurColors) self.viewOffset = utils.cast(int, data.attrib.get('viewOffset', 0)) - self.writers = self.findItems(data, media.Writer) self.year = utils.cast(int, data.attrib.get('year')) # If seasons are hidden, parentKey and parentRatingKey are missing from the XML response. @@ -984,7 +1063,55 @@ class Episode( self._parentRatingKey = utils.cast(int, data.attrib.get('parentRatingKey')) self._parentThumb = data.attrib.get('parentThumb') - @cached_property + @cached_data_property + def chapters(self): + return self.findItems(self._data, media.Chapter) + + @cached_data_property + def collections(self): + return self.findItems(self._data, media.Collection) + + @cached_data_property + def directors(self): + return self.findItems(self._data, media.Director) + + @cached_data_property + def guids(self): + return self.findItems(self._data, media.Guid) + + @cached_data_property + def labels(self): + return self.findItems(self._data, media.Label) + + @cached_data_property + def markers(self): + return self.findItems(self._data, media.Marker) + + @cached_data_property + def media(self): + return self.findItems(self._data, media.Media) + + @cached_data_property + def producers(self): + return self.findItems(self._data, media.Producer) + + @cached_data_property + def ratings(self): + return self.findItems(self._data, media.Rating) + + @cached_data_property + def roles(self): + return self.findItems(self._data, media.Role) + + @cached_data_property + def writers(self): + return self.findItems(self._data, media.Writer) + + @cached_data_property + def ultraBlurColors(self): + return self.findItem(self._data, media.UltraBlurColors) + + @cached_data_property def parentKey(self): """ Returns the parentKey. Refer to the Episode attributes. """ if self._parentKey: @@ -993,7 +1120,7 @@ class Episode( return f'/library/metadata/{self.parentRatingKey}' return None - @cached_property + @cached_data_property def parentRatingKey(self): """ Returns the parentRatingKey. Refer to the Episode attributes. """ if self._parentRatingKey is not None: @@ -1006,7 +1133,7 @@ class Episode( return self._season.ratingKey return None - @cached_property + @cached_data_property def parentThumb(self): """ Returns the parentThumb. Refer to the Episode attributes. """ if self._parentThumb: @@ -1015,7 +1142,7 @@ class Episode( return self._season.thumb return None - @cached_property + @cached_data_property def _season(self): """ Returns the :class:`~plexapi.video.Season` object by querying for the show's children. """ if self.grandparentKey and self.parentIndex is not None: @@ -1055,7 +1182,7 @@ class Episode( """ Returns the episode number. """ return self.index - @cached_property + @cached_data_property def seasonNumber(self): """ Returns the episode's season number. """ if isinstance(self.parentIndex, int): @@ -1149,12 +1276,10 @@ class Clip( """ Load attribute values from Plex XML response. """ Video._loadData(self, data) Playable._loadData(self, data) - self._data = data self.addedAt = utils.toDatetime(data.attrib.get('addedAt')) self.duration = utils.cast(int, data.attrib.get('duration')) self.extraType = utils.cast(int, data.attrib.get('extraType')) self.index = utils.cast(int, data.attrib.get('index')) - self.media = self.findItems(data, media.Media) self.originallyAvailableAt = utils.toDatetime( data.attrib.get('originallyAvailableAt'), '%Y-%m-%d') self.skipDetails = utils.cast(int, data.attrib.get('skipDetails')) @@ -1163,6 +1288,10 @@ class Clip( self.viewOffset = utils.cast(int, data.attrib.get('viewOffset', 0)) self.year = utils.cast(int, data.attrib.get('year')) + @cached_data_property + def media(self): + return self.findItems(self._data, media.Media) + @property def locations(self): """ This does not exist in plex xml response but is added to have a common diff --git a/requirements.txt b/requirements.txt index a34ae985..bb47f167 100644 --- a/requirements.txt +++ b/requirements.txt @@ -2,7 +2,7 @@ apscheduler==3.10.1 arrow==1.3.0 beautifulsoup4==4.12.3 bleach==6.2.0 -certifi==2024.8.30 +certifi==2025.04.26 cheroot==10.0.1 cherrypy==18.10.0 cherrypy-cors==1.7.0