mirror of
https://github.com/Tautulli/Tautulli.git
synced 2025-08-20 21:33:18 -07:00
Compare commits
261 commits
Author | SHA1 | Date | |
---|---|---|---|
|
1144bba580 |
||
|
443eb8da15 |
||
|
5ecd570f95 |
||
|
b8589513c1 |
||
|
d46710962c |
||
|
5921a7d83f |
||
|
9a6253d775 |
||
|
43fc7eebfe |
||
|
c4f8a81190 |
||
|
f6bffe1850 |
||
|
3cb71f94a3 |
||
|
ff5edc06fe |
||
|
cc88cffc1f |
||
|
e735294e1c |
||
|
889026b092 |
||
|
76f6a2da6b |
||
|
d2a14ea6c0 |
||
|
e6c0a12dd5 |
||
|
24dd403a72 |
||
|
a876e006d6 |
||
|
74786f0ed1 |
||
|
99e575383c |
||
|
3e784c7495 |
||
|
68dc095c83 |
||
|
ad2ec0e2bf |
||
|
09c28e434d |
||
|
cfc7b817b3 |
||
|
b3aa29c677 |
||
|
e4d181ba5b |
||
|
53e5f89725 |
||
|
0879b848b9 |
||
|
c70381c3ff |
||
|
f23d3eb81c |
||
|
2ed603f288 |
||
|
a96fd23d72 |
||
|
65dc466c07 |
||
|
0a4730625c |
||
|
67fa4ca645 |
||
|
078c293bd7 |
||
|
85e9237608 |
||
|
f9b3631745 |
||
|
8f03e27617 |
||
|
63fe386057 |
||
|
b7c4f2eefe |
||
|
37ef098718 |
||
|
78864d7a97 |
||
|
62a05712f8 |
||
|
ca0e1c321d |
||
|
b9cb7102c4 |
||
|
6e6fe1fb65 |
||
|
9c473c6528 |
||
|
5c38de0dfb |
||
|
ea66f6713b |
||
|
dd9a35df51 |
||
|
feca713b76 |
||
|
0836fb902c |
||
|
eb2c372d82 |
||
|
be2e63e7e0 |
||
|
2fe3f039cc |
||
|
baf926e5db |
||
|
85b63fb61a |
||
|
afc29604cc |
||
|
5b47cebdc7 |
||
|
d9f38f9390 |
||
|
86d775a586 |
||
|
ddb4f6131b |
||
|
599e52de6a |
||
|
84be60cb36 |
||
|
d9a87f9726 |
||
|
9289ead996 |
||
|
af752e0acc |
||
|
86abd130b0 |
||
|
fc2c7cc871 |
||
|
025e8bcf58 |
||
|
bf07912711 |
||
|
48b1c7b522 |
||
|
e69852fa0e |
||
|
01589cb8b0 |
||
|
f3a2c02e96 |
||
|
d3f7eef84f |
||
|
2f3d24a0e7 |
||
|
2d3271376b |
||
|
940c2ae6cd |
||
|
1cdfd5f30a |
||
|
e3f4851883 |
||
|
1353247b55 |
||
|
3cf6560de3 |
||
|
9ca8d59372 |
||
|
921a3a0af9 |
||
|
3bb53f480e |
||
|
6979a4025f |
||
|
cc1a325eac |
||
|
de697cb2ca |
||
|
596cf57d61 |
||
|
ac32297160 |
||
|
330b8a3a82 |
||
|
5cf39cb097 |
||
|
14c9c7a393 |
||
|
cf8fb2e65d |
||
|
623a9f2919 |
||
|
3fb46a9ab7 |
||
|
cfd81684b7 |
||
|
fb4f0046f3 |
||
|
7d4efac75d |
||
|
509d18801b |
||
|
da501df846 |
||
|
43cb027592 |
||
|
2e6f541ec2 |
||
|
822d5a452c |
||
|
7696d031d3 |
||
|
50ced86ba5 |
||
|
e934d09eff |
||
|
96c5cb216c |
||
|
2ee2ab652c |
||
|
193b82c54a |
||
|
7d00383d1c |
||
|
6f84ce8048 |
||
|
709db66b10 |
||
|
2f1607b96b |
||
|
28ad2716ba |
||
|
f1a8164b94 |
||
|
3bc94cad6c |
||
|
a528f052b9 |
||
|
5e977c044a |
||
|
afa25d45f6 |
||
|
43e71d836a |
||
|
55573d26ea |
||
|
f1d44c051d |
||
|
a3af8ed362 |
||
|
912fd75a2f |
||
|
5778672dab |
||
|
dcdf5a2992 |
||
|
73cfa8e0c0 |
||
|
795d568df2 |
||
|
8396a04ce8 |
||
|
8419eee4b2 |
||
|
c505e26656 |
||
|
37ffe68ce2 |
||
|
dc9e778111 |
||
|
68bf1c70f7 |
||
|
ee0b4c0602 |
||
|
5c115dec68 |
||
|
1d77f32665 |
||
|
af01b8c6cc |
||
|
dd9d3b97a2 |
||
|
96c20ad893 |
||
|
5e90f3bb31 |
||
|
dab46249f2 |
||
|
5d0ba8b222 |
||
|
3e8a5663a3 |
||
|
6414a0ba12 |
||
|
bcac5b7897 |
||
|
de3393d62b |
||
|
dcec1f6f5f |
||
|
65905a6647 |
||
|
5de2cf85c3 |
||
|
a7660d5c03 |
||
|
5e02db897f |
||
|
52e2950aa9 |
||
|
acf1b2384a |
||
|
07166ae6b2 |
||
|
80984bd296 |
||
|
6a9e532805 |
||
|
2258a88168 |
||
|
282810e9ca |
||
|
4582ff4a56 |
||
|
10e62ca42d |
||
|
e3245bc126 |
||
|
3dc6d226f8 |
||
|
177962d626 |
||
|
5e4f656155 |
||
|
980fc8a43f |
||
|
458e89b8d7 |
||
|
b8185afdf9 |
||
|
85519b1b45 |
||
|
653a6d5c12 |
||
|
b2a8601c70 |
||
|
938b48c5aa |
||
|
cdfffda877 |
||
|
b6eca379fa |
||
|
aa2006c2cc |
||
|
26358427ce |
||
|
1d96e0f859 |
||
|
0d1d2a3e6b |
||
|
452a4afdcf |
||
|
bfc4f66739 |
||
|
f82aecb88c |
||
|
75a1750a4e |
||
|
51196a7fb1 |
||
|
2fc618c01f |
||
|
fcd8ef11f4 |
||
|
c737161164 |
||
|
de3121cba9 |
||
|
9fe58a6d86 |
||
|
cfdb6975f0 |
||
|
41693ee5f1 |
||
|
7a11b10947 |
||
|
4430c14374 |
||
|
e248c13c15 |
||
|
b01b21ae05 |
||
|
6c6fa34ba4 |
||
|
24fff60ed4 |
||
|
4398dfa821 |
||
|
a0170a6f3d |
||
|
faef9a94c4 |
||
|
cfefa928be |
||
|
aca7e72715 |
||
|
b7836102a9 |
||
|
13eb0fd6db |
||
|
a4cfc6323d |
||
|
4468c3e4af |
||
|
52819f7da6 |
||
|
24b6d37bbe |
||
|
dbffb519f5 |
||
|
e307796475 |
||
|
c1b8be0227 |
||
|
d3504e8a3c |
||
|
344f19c9d6 |
||
|
901c484f89 |
||
|
149c7fa7a0 |
||
|
b73a2e9acc |
||
|
b48e9f4074 |
||
|
36fbff5503 |
||
|
355be99512 |
||
|
a397b90c23 |
||
|
6a19beb476 |
||
|
71e6ea00c9 |
||
|
7e4f7c0c56 |
||
|
36ed4bfa3c |
||
|
7ee2c59075 |
||
|
82089fdb7b |
||
|
cc070cfc6b |
||
|
78a7a48587 |
||
|
f403fdcc5b |
||
|
50dba6bf19 |
||
|
23591a0435 |
||
|
60f29b0fa4 |
||
|
b1c0972077 |
||
|
040972bcba |
||
|
c172965ec8 |
||
|
d0c07326ab |
||
|
d019efcf91 |
||
|
fe7a59c7f9 |
||
|
e3113ebd30 |
||
|
5525b9851c |
||
|
8cb74f7480 |
||
|
98ceb0a81d |
||
|
325271a88e |
||
|
ddc8a08fc7 |
||
|
d0c1e467bd |
||
|
380cbd232c |
||
|
98c363f559 |
||
|
5abdfd7377 |
||
|
8fd62e30b3 |
||
|
89aad6952b |
||
|
2da3714dd1 |
||
|
ab5836a65b |
||
|
ae17d2dde0 |
||
|
f1c12c0bbe |
||
|
32cf26884b |
||
|
dd380b583f |
969 changed files with 95318 additions and 43662 deletions
4
.github/workflows/codeql.yml
vendored
4
.github/workflows/codeql.yml
vendored
|
@ -27,12 +27,12 @@ jobs:
|
|||
uses: actions/checkout@v4
|
||||
|
||||
- name: Initialize CodeQL
|
||||
uses: github/codeql-action/init@v2
|
||||
uses: github/codeql-action/init@v3
|
||||
with:
|
||||
config-file: ./.github/codeql-config.yml
|
||||
languages: ${{ matrix.language }}
|
||||
|
||||
- name: Perform CodeQL Analysis
|
||||
uses: github/codeql-action/analyze@v2
|
||||
uses: github/codeql-action/analyze@v3
|
||||
with:
|
||||
category: "/language:${{matrix.language}}"
|
||||
|
|
4
.github/workflows/issues-stale.yml
vendored
4
.github/workflows/issues-stale.yml
vendored
|
@ -10,7 +10,7 @@ jobs:
|
|||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Stale
|
||||
uses: actions/stale@v8
|
||||
uses: actions/stale@v9
|
||||
with:
|
||||
stale-issue-message: >
|
||||
This issue is stale because it has been open for 30 days with no activity.
|
||||
|
@ -30,7 +30,7 @@ jobs:
|
|||
days-before-close: 5
|
||||
|
||||
- name: Invalid Template
|
||||
uses: actions/stale@v8
|
||||
uses: actions/stale@v9
|
||||
with:
|
||||
stale-issue-message: >
|
||||
Invalid issues template.
|
||||
|
|
2
.github/workflows/issues.yml
vendored
2
.github/workflows/issues.yml
vendored
|
@ -10,6 +10,6 @@ jobs:
|
|||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Label Issues
|
||||
uses: dessant/label-actions@v3
|
||||
uses: dessant/label-actions@v4
|
||||
with:
|
||||
github-token: ${{ github.token }}
|
||||
|
|
30
.github/workflows/publish-docker.yml
vendored
30
.github/workflows/publish-docker.yml
vendored
|
@ -33,7 +33,6 @@ jobs:
|
|||
echo "branch=${GITHUB_REF#refs/heads/}" >> $GITHUB_OUTPUT
|
||||
fi
|
||||
echo "commit=${GITHUB_SHA}" >> $GITHUB_OUTPUT
|
||||
echo "build_date=$(date -u +'%Y-%m-%dT%H:%M:%SZ')" >> $GITHUB_OUTPUT
|
||||
echo "docker_platforms=linux/amd64,linux/arm64/v8,linux/arm/v7,linux/arm/v6" >> $GITHUB_OUTPUT
|
||||
echo "docker_image=${{ secrets.DOCKER_REPO }}/tautulli" >> $GITHUB_OUTPUT
|
||||
|
||||
|
@ -47,7 +46,7 @@ jobs:
|
|||
version: latest
|
||||
|
||||
- name: Cache Docker Layers
|
||||
uses: actions/cache@v3
|
||||
uses: actions/cache@v4
|
||||
with:
|
||||
path: /tmp/.buildx-cache
|
||||
key: ${{ runner.os }}-buildx-${{ github.sha }}
|
||||
|
@ -59,7 +58,7 @@ jobs:
|
|||
if: success()
|
||||
with:
|
||||
username: ${{ secrets.DOCKER_USERNAME }}
|
||||
password: ${{ secrets.DOCKER_PASSWORD }}
|
||||
password: ${{ secrets.DOCKER_TOKEN }}
|
||||
|
||||
- name: Login to GitHub Container Registry
|
||||
uses: docker/login-action@v3
|
||||
|
@ -69,8 +68,14 @@ jobs:
|
|||
username: ${{ secrets.DOCKER_USERNAME }}
|
||||
password: ${{ secrets.GHCR_TOKEN }}
|
||||
|
||||
- name: Extract Docker Metadata
|
||||
id: metadata
|
||||
uses: docker/metadata-action@v5
|
||||
with:
|
||||
images: ${{ steps.prepare.outputs.docker_image }}
|
||||
|
||||
- name: Docker Build and Push
|
||||
uses: docker/build-push-action@v5
|
||||
uses: docker/build-push-action@v6
|
||||
if: success()
|
||||
with:
|
||||
context: .
|
||||
|
@ -81,10 +86,10 @@ jobs:
|
|||
TAG=${{ steps.prepare.outputs.tag }}
|
||||
BRANCH=${{ steps.prepare.outputs.branch }}
|
||||
COMMIT=${{ steps.prepare.outputs.commit }}
|
||||
BUILD_DATE=${{ steps.prepare.outputs.build_date }}
|
||||
tags: |
|
||||
${{ steps.prepare.outputs.docker_image }}:${{ steps.prepare.outputs.tag }}
|
||||
ghcr.io/${{ steps.prepare.outputs.docker_image }}:${{ steps.prepare.outputs.tag }}
|
||||
labels: ${{ steps.metadata.outputs.labels }}
|
||||
cache-from: type=local,src=/tmp/.buildx-cache
|
||||
cache-to: type=local,dest=/tmp/.buildx-cache
|
||||
|
||||
|
@ -94,23 +99,10 @@ jobs:
|
|||
if: always() && !contains(github.event.head_commit.message, '[skip ci]')
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Get Build Job Status
|
||||
uses: technote-space/workflow-conclusion-action@v3
|
||||
|
||||
- name: Combine Job Status
|
||||
id: status
|
||||
run: |
|
||||
failures=(neutral, skipped, timed_out, action_required)
|
||||
if [[ ${array[@]} =~ $WORKFLOW_CONCLUSION ]]; then
|
||||
echo "status=failure" >> $GITHUB_OUTPUT
|
||||
else
|
||||
echo "status=$WORKFLOW_CONCLUSION" >> $GITHUB_OUTPUT
|
||||
fi
|
||||
|
||||
- name: Post Status to Discord
|
||||
uses: sarisia/actions-status-discord@v1
|
||||
with:
|
||||
webhook: ${{ secrets.DISCORD_WEBHOOK }}
|
||||
status: ${{ steps.status.outputs.status }}
|
||||
status: ${{ needs.build-docker.result == 'success' && 'success' || contains(needs.*.result, 'failure') && 'failure' || 'cancelled' }}
|
||||
title: ${{ github.workflow }}
|
||||
nofail: true
|
||||
|
|
95
.github/workflows/publish-installers.yml
vendored
95
.github/workflows/publish-installers.yml
vendored
|
@ -6,10 +6,13 @@ on:
|
|||
branches: [master, beta, nightly]
|
||||
tags: [v*]
|
||||
|
||||
env:
|
||||
PYTHON_VERSION: '3.11'
|
||||
|
||||
jobs:
|
||||
build-installer:
|
||||
name: Build ${{ matrix.os_upper }} Installer
|
||||
runs-on: ${{ matrix.os }}-latest
|
||||
runs-on: ${{ matrix.os }}-${{ matrix.os_version }}
|
||||
if: ${{ !contains(github.event.head_commit.message, '[skip ci]') }}
|
||||
strategy:
|
||||
fail-fast: false
|
||||
|
@ -17,9 +20,13 @@ jobs:
|
|||
include:
|
||||
- os: 'windows'
|
||||
os_upper: 'Windows'
|
||||
os_version: 'latest'
|
||||
arch: 'x64'
|
||||
ext: 'exe'
|
||||
- os: 'macos'
|
||||
os_upper: 'MacOS'
|
||||
os_version: '14'
|
||||
arch: 'universal'
|
||||
ext: 'pkg'
|
||||
|
||||
steps:
|
||||
|
@ -52,29 +59,29 @@ jobs:
|
|||
echo $GITHUB_SHA > version.txt
|
||||
|
||||
- name: Set Up Python
|
||||
uses: actions/setup-python@v4
|
||||
uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: '3.9'
|
||||
python-version: ${{ env.PYTHON_VERSION }}
|
||||
cache: pip
|
||||
cache-dependency-path: '**/requirements*.txt'
|
||||
|
||||
- name: Install Dependencies
|
||||
run: |
|
||||
python -m pip install --upgrade pip
|
||||
pip install -r package/requirements-package.txt
|
||||
pip install -r package/requirements-package.txt --no-binary cffi
|
||||
|
||||
- name: Build Package
|
||||
run: |
|
||||
pyinstaller -y ./package/Tautulli-${{ matrix.os }}.spec
|
||||
|
||||
- name: Create Windows Installer
|
||||
uses: joncloud/makensis-action@v4
|
||||
uses: joncloud/makensis-action@v4.1
|
||||
if: matrix.os == 'windows'
|
||||
with:
|
||||
script-file: ./package/Tautulli.nsi
|
||||
arguments: >
|
||||
/DVERSION=${{ steps.get_version.outputs.VERSION_NSIS }}
|
||||
/DINSTALLER_NAME=..\Tautulli-windows-${{ steps.get_version.outputs.RELEASE_VERSION }}-x64.exe
|
||||
/DINSTALLER_NAME=..\Tautulli-${{ matrix.os }}-${{ steps.get_version.outputs.RELEASE_VERSION }}-${{ matrix.arch }}.${{ matrix.ext }}
|
||||
additional-plugin-paths: package/nsis-plugins
|
||||
|
||||
- name: Create MacOS Installer
|
||||
|
@ -85,13 +92,31 @@ jobs:
|
|||
--version ${{ steps.get_version.outputs.VERSION }} \
|
||||
--component ./dist/Tautulli.app \
|
||||
--scripts ./package/macos-scripts \
|
||||
Tautulli-macos-${{ steps.get_version.outputs.RELEASE_VERSION }}-x64.pkg
|
||||
Tautulli-${{ matrix.os }}-${{ steps.get_version.outputs.RELEASE_VERSION }}-${{ matrix.arch }}.${{ matrix.ext }}
|
||||
|
||||
- name: Upload Installer
|
||||
uses: actions/upload-artifact@v3
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: Tautulli-${{ matrix.os }}-installer
|
||||
path: Tautulli-${{ matrix.os }}-${{ steps.get_version.outputs.RELEASE_VERSION }}-x64.${{ matrix.ext }}
|
||||
path: Tautulli-${{ matrix.os }}-${{ steps.get_version.outputs.RELEASE_VERSION }}-${{ matrix.arch }}.${{ matrix.ext }}
|
||||
|
||||
virus-total:
|
||||
name: VirusTotal Scan
|
||||
needs: build-installer
|
||||
if: needs.build-installer.result == 'success' && !contains(github.event.head_commit.message, '[skip ci]')
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Download Installers
|
||||
if: needs.build-installer.result == 'success'
|
||||
uses: actions/download-artifact@v4
|
||||
|
||||
- name: Upload to VirusTotal
|
||||
uses: crazy-max/ghaction-virustotal@v4
|
||||
with:
|
||||
vt_api_key: ${{ secrets.VT_API_KEY }}
|
||||
files: |
|
||||
Tautulli-windows-installer/Tautulli-windows-*-x64.exe
|
||||
Tautulli-macos-installer/Tautulli-macos-*-universal.pkg
|
||||
|
||||
release:
|
||||
name: Release Installers
|
||||
|
@ -99,9 +124,6 @@ jobs:
|
|||
if: always() && startsWith(github.ref, 'refs/tags/') && !contains(github.event.head_commit.message, '[skip ci]')
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Get Build Job Status
|
||||
uses: technote-space/workflow-conclusion-action@v3
|
||||
|
||||
- name: Checkout Code
|
||||
uses: actions/checkout@v4
|
||||
|
||||
|
@ -111,8 +133,8 @@ jobs:
|
|||
echo "RELEASE_VERSION=${GITHUB_REF#refs/tags/}" >> $GITHUB_OUTPUT
|
||||
|
||||
- name: Download Installers
|
||||
if: env.WORKFLOW_CONCLUSION == 'success'
|
||||
uses: actions/download-artifact@v3
|
||||
if: needs.build-installer.result == 'success'
|
||||
uses: actions/download-artifact@v4
|
||||
|
||||
- name: Get Changelog
|
||||
id: get_changelog
|
||||
|
@ -125,41 +147,21 @@ jobs:
|
|||
echo "$EOF" >> $GITHUB_OUTPUT
|
||||
|
||||
- name: Create Release
|
||||
uses: actions/create-release@v1
|
||||
uses: softprops/action-gh-release@v2
|
||||
id: create_release
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GHACTIONS_TOKEN }}
|
||||
with:
|
||||
tag_name: ${{ steps.get_version.outputs.RELEASE_VERSION }}
|
||||
release_name: Tautulli ${{ steps.get_version.outputs.RELEASE_VERSION }}
|
||||
name: Tautulli ${{ steps.get_version.outputs.RELEASE_VERSION }}
|
||||
body: |
|
||||
## Changelog
|
||||
|
||||
##${{ steps.get_changelog.outputs.CHANGELOG }}
|
||||
draft: false
|
||||
prerelease: ${{ endsWith(steps.get_version.outputs.RELEASE_VERSION, '-beta') }}
|
||||
|
||||
- name: Upload Windows Installer
|
||||
uses: actions/upload-release-asset@v1
|
||||
if: env.WORKFLOW_CONCLUSION == 'success'
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GHACTIONS_TOKEN }}
|
||||
with:
|
||||
upload_url: ${{ steps.create_release.outputs.upload_url }}
|
||||
asset_path: Tautulli-windows-installer/Tautulli-windows-${{ steps.get_version.outputs.RELEASE_VERSION }}-x64.exe
|
||||
asset_name: Tautulli-windows-${{ steps.get_version.outputs.RELEASE_VERSION }}-x64.exe
|
||||
asset_content_type: application/vnd.microsoft.portable-executable
|
||||
|
||||
- name: Upload MacOS Installer
|
||||
uses: actions/upload-release-asset@v1
|
||||
if: env.WORKFLOW_CONCLUSION == 'success'
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GHACTIONS_TOKEN }}
|
||||
with:
|
||||
upload_url: ${{ steps.create_release.outputs.upload_url }}
|
||||
asset_path: Tautulli-macos-installer/Tautulli-macos-${{ steps.get_version.outputs.RELEASE_VERSION }}-x64.pkg
|
||||
asset_name: Tautulli-macos-${{ steps.get_version.outputs.RELEASE_VERSION }}-x64.pkg
|
||||
asset_content_type: application/vnd.apple.installer+xml
|
||||
files: |
|
||||
Tautulli-windows-installer/Tautulli-windows-${{ steps.get_version.outputs.RELEASE_VERSION }}-x64.exe
|
||||
Tautulli-macos-installer/Tautulli-macos-${{ steps.get_version.outputs.RELEASE_VERSION }}-universal.pkg
|
||||
|
||||
discord:
|
||||
name: Discord Notification
|
||||
|
@ -167,23 +169,10 @@ jobs:
|
|||
if: always() && !contains(github.event.head_commit.message, '[skip ci]')
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Get Build Job Status
|
||||
uses: technote-space/workflow-conclusion-action@v3
|
||||
|
||||
- name: Combine Job Status
|
||||
id: status
|
||||
run: |
|
||||
failures=(neutral, skipped, timed_out, action_required)
|
||||
if [[ ${array[@]} =~ $WORKFLOW_CONCLUSION ]]; then
|
||||
echo "status=failure" >> $GITHUB_OUTPUT
|
||||
else
|
||||
echo "status=$WORKFLOW_CONCLUSION" >> $GITHUB_OUTPUT
|
||||
fi
|
||||
|
||||
- name: Post Status to Discord
|
||||
uses: sarisia/actions-status-discord@v1
|
||||
with:
|
||||
webhook: ${{ secrets.DISCORD_WEBHOOK }}
|
||||
status: ${{ steps.status.outputs.status }}
|
||||
status: ${{ needs.build-installer.result == 'success' && 'success' || contains(needs.*.result, 'failure') && 'failure' || 'cancelled' }}
|
||||
title: ${{ github.workflow }}
|
||||
nofail: true
|
||||
|
|
21
.github/workflows/publish-snap.yml
vendored
21
.github/workflows/publish-snap.yml
vendored
|
@ -38,19 +38,19 @@ jobs:
|
|||
uses: docker/setup-qemu-action@v3
|
||||
|
||||
- name: Build Snap Package
|
||||
uses: diddlesnaps/snapcraft-multiarch-action@v1
|
||||
uses: diddlesnaps/snapcraft-multiarch-action@master
|
||||
id: build
|
||||
with:
|
||||
architecture: ${{ matrix.architecture }}
|
||||
|
||||
- name: Upload Snap Package
|
||||
uses: actions/upload-artifact@v3
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: Tautulli-snap-package-${{ matrix.architecture }}
|
||||
path: ${{ steps.build.outputs.snap }}
|
||||
|
||||
- name: Review Snap Package
|
||||
uses: diddlesnaps/snapcraft-review-tools-action@v1
|
||||
uses: diddlesnaps/snapcraft-review-tools-action@master
|
||||
with:
|
||||
snap: ${{ steps.build.outputs.snap }}
|
||||
|
||||
|
@ -69,23 +69,10 @@ jobs:
|
|||
if: always() && !contains(github.event.head_commit.message, '[skip ci]')
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Get Build Job Status
|
||||
uses: technote-space/workflow-conclusion-action@v3
|
||||
|
||||
- name: Combine Job Status
|
||||
id: status
|
||||
run: |
|
||||
failures=(neutral, skipped, timed_out, action_required)
|
||||
if [[ ${array[@]} =~ $WORKFLOW_CONCLUSION ]]; then
|
||||
echo "status=failure" >> $GITHUB_OUTPUT
|
||||
else
|
||||
echo "status=$WORKFLOW_CONCLUSION" >> $GITHUB_OUTPUT
|
||||
fi
|
||||
|
||||
- name: Post Status to Discord
|
||||
uses: sarisia/actions-status-discord@v1
|
||||
with:
|
||||
webhook: ${{ secrets.DISCORD_WEBHOOK }}
|
||||
status: ${{ steps.status.outputs.status }}
|
||||
status: ${{ needs.build-snap.result == 'success' && 'success' || contains(needs.*.result, 'failure') && 'failure' || 'cancelled' }}
|
||||
title: ${{ github.workflow }}
|
||||
nofail: true
|
||||
|
|
19
.github/workflows/submit-winget.yml
vendored
19
.github/workflows/submit-winget.yml
vendored
|
@ -11,6 +11,11 @@ jobs:
|
|||
runs-on: windows-latest
|
||||
if: ${{ !github.event.release.prerelease }}
|
||||
steps:
|
||||
- name: Sync Winget Fork
|
||||
run: gh repo sync ${{ secrets.WINGET_USERNAME }}/winget-pkgs -b master
|
||||
env:
|
||||
GH_TOKEN: ${{ secrets.WINGET_TOKEN }}
|
||||
|
||||
- name: Submit package to Windows Package Manager Community Repository
|
||||
run: |
|
||||
$wingetPackage = "Tautulli.Tautulli"
|
||||
|
@ -23,3 +28,17 @@ jobs:
|
|||
# getting latest wingetcreate file
|
||||
iwr https://aka.ms/wingetcreate/latest -OutFile wingetcreate.exe
|
||||
.\wingetcreate.exe update $wingetPackage -s -v $version -u $installerUrl -t $gitToken
|
||||
|
||||
virus-total:
|
||||
name: VirusTotal Scan
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Upload to VirusTotal
|
||||
uses: crazy-max/ghaction-virustotal@v4
|
||||
with:
|
||||
vt_api_key: ${{ secrets.VT_API_KEY }}
|
||||
github_token: ${{ secrets.GHACTIONS_TOKEN }}
|
||||
update_release_body: true
|
||||
files: |
|
||||
.exe$
|
||||
.pkg$
|
||||
|
|
180
CHANGELOG.md
180
CHANGELOG.md
|
@ -1,6 +1,185 @@
|
|||
# Changelog
|
||||
|
||||
## v2.15.3 (2025-08-03)
|
||||
|
||||
* Exporter:
|
||||
* New: Added hearingImpaired for subtitles and visualImpaired for audio attributes to exporter fields.
|
||||
* Graphs:
|
||||
* Fix: Remove duplicate "Total" entry in graph tooltips. (Thanks @zdimension) (#2534)
|
||||
* UI:
|
||||
* Fix: Failing to retrieve collections / playlists with over 1000 items.
|
||||
* Fix: Scrollbar not showing on macosx and webkit browsers. (#2221)
|
||||
* Fix: Incorrect rounding of minutes in global stats play duration.
|
||||
* Fix: Disable browser autocomplete for notification agent and newsletter agent configurations. (#2557)
|
||||
* API:
|
||||
* New: Added ability to return svg files using pms_image_proxy API command.
|
||||
* Other:
|
||||
* New: Added ability to set config values using environment variables. (Thanks @komuw) (#2309, #2543)
|
||||
|
||||
|
||||
## v2.15.2 (2025-04-12)
|
||||
|
||||
* Activity:
|
||||
* New: Added link to library by clicking media type icon.
|
||||
* New: Added stream count to tab title on homepage. (#2517)
|
||||
* History:
|
||||
* Fix: Check stream watched status before stream stopped status. (#2506)
|
||||
* Notifications:
|
||||
* Fix: ntfy notifications failing to send if provider link is blank.
|
||||
* Fix: Check Pushover notification attachment is under 5MB limit. (#2396)
|
||||
* Fix: Track URLs redirecting to the correct media page. (#2513)
|
||||
* New: Added audio profile notification parameters.
|
||||
* New: Added PATCH method for Webhook notifications.
|
||||
* Graphs:
|
||||
* New: Added Total line to daily streams graph. (Thanks @zdimension) (#2497)
|
||||
* UI:
|
||||
* Fix: Do not redirect API requests to the login page. (#2490)
|
||||
* Change: Swap source and stream columns in stream info modal.
|
||||
* Other:
|
||||
* Fix: Various typos. (Thanks @luzpaz) (#2520)
|
||||
* Fix: CherryPy CORS response header not being set correctly. (#2279)
|
||||
|
||||
|
||||
## v2.15.1 (2025-01-11)
|
||||
|
||||
* Activity:
|
||||
* Fix: Detection of HDR transcodes. (Thanks @cdecker08) (#2412, #2466)
|
||||
* Newsletters:
|
||||
* Fix: Disable basic authentication for /newsletter and /image endpoints. (#2472)
|
||||
* Exporter:
|
||||
* New: Added logos to season and episode exports.
|
||||
* Other:
|
||||
* Fix: Docker container https health check.
|
||||
|
||||
|
||||
## v2.15.0 (2024-11-24)
|
||||
|
||||
* Notes:
|
||||
* Support for Python 3.8 has been dropped. The minimum Python version is now 3.9.
|
||||
* Notifications:
|
||||
* New: Allow Telegram blockquote and tg-emoji HTML tags. (Thanks @MythodeaLoL) (#2427)
|
||||
* New: Added Plex slug and Plex Watch URL notification parameters. (#2420)
|
||||
* Change: Update OneSignal API calls to use the new API endpoint for Tautulli Remote App notifications.
|
||||
* Newsletters:
|
||||
* Fix: Dumping custom dates in raw newsletter json.
|
||||
* History:
|
||||
* Fix: Unable to fix match for artists. (#2429)
|
||||
* Exporter:
|
||||
* New: Added movie and episode hasVoiceActivity attribute to exporter fields.
|
||||
* New: Added subtitle canAutoSync attribute to exporter fields.
|
||||
* New: Added logos to the exporter fields.
|
||||
* UI:
|
||||
* New: Add friendly name to the top bar of config modals. (Thanks @peagravel) (#2432)
|
||||
* API:
|
||||
* New: Added plex slugs to metadata in the get_metadata API command.
|
||||
* Other:
|
||||
* Fix: Tautulli failing to start with Python 3.13. (#2426)
|
||||
|
||||
|
||||
## v2.14.6 (2024-10-12)
|
||||
|
||||
* Newsletters:
|
||||
* Fix: Allow formatting newsletter date parameters.
|
||||
* Change: Support apscheduler compatible cron expressions.
|
||||
* UI:
|
||||
* Fix: Round runtime before converting to human duration.
|
||||
* Fix: Make recently added/watched rows touch scrollable.
|
||||
* Other:
|
||||
* Fix: Auto-updater not running.
|
||||
|
||||
|
||||
## v2.14.5 (2024-09-20)
|
||||
|
||||
* Activity:
|
||||
* Fix: Display of 2k resolution on activity card.
|
||||
* Notifications:
|
||||
* Fix: ntfy notifications with special characters failing to send.
|
||||
* Other:
|
||||
* Fix: Memory leak with database closing. (#2404)
|
||||
|
||||
|
||||
## v2.14.4 (2024-08-10)
|
||||
|
||||
* Notifications:
|
||||
* Fix: Update Slack notification info card.
|
||||
* New: Added ntfy notification agent. (Thanks @nwithan8) (#2356, #2000)
|
||||
* UI:
|
||||
* Fix: macOS platform capitalization.
|
||||
* Other:
|
||||
* Fix: Remove deprecated getdefaultlocale. (Thanks @teodorstelian) (#2364, #2345)
|
||||
|
||||
|
||||
## v2.14.3 (2024-06-19)
|
||||
|
||||
* Graphs:
|
||||
* Fix: History table not loading when clicking on the graphs in some instances.
|
||||
* UI:
|
||||
* Fix: Scheduled tasks table not loading when certain tasks are disabled.
|
||||
* Removed: Unnecessary Remote Server checkbox from the settings page.
|
||||
* Other:
|
||||
* Fix: Webserver not restarting after the setup wizard.
|
||||
* Fix: Workaround webserver crashing in some instances.
|
||||
|
||||
|
||||
## v2.14.2 (2024-05-18)
|
||||
|
||||
* History:
|
||||
* Fix: Live TV activity not logging to history.
|
||||
* Fix: Incorrect grouping of live TV history.
|
||||
* Notifications:
|
||||
* Fix: Pushover configuration settings refreshing after entering a token.
|
||||
* Fix: Plex remote access down notifications not triggering.
|
||||
* Fix: Deleting all images from Cloudinary only deleting 1000 images.
|
||||
* New: Added platform version and product version notification parameters. (#2244)
|
||||
* New: Added LAN streams and WAN streams notification parameters. (#2276)
|
||||
* New: Added Dolby Vision notification parameters. (#2240)
|
||||
* New: Added live TV channel notification parameters.
|
||||
* Change: Improved Tautulli Remote App notification encryption method.
|
||||
* Note: Requires Tautulli Remote App version 3.2.4.
|
||||
* Exporter:
|
||||
* New: Added slug attribute to exporter fields.
|
||||
* New: Added track genres to exporter fields.
|
||||
* New: Added playlist source URI to exporter fields.
|
||||
* New: Added artProvider and thumbProvider to exporter fields.
|
||||
* UI:
|
||||
* Fix: Mask deleted usernames in the logs.
|
||||
* Fix: Live TV watch stats not showing on the media info page.
|
||||
* Fix: Users without access to Plex server not showing as inactive.
|
||||
* Removed: Deprecated synced item pages.
|
||||
* Removed: Anonymous redirect settings. Links now use browser no-referrer policy instead.
|
||||
* API:
|
||||
* New: Added Dolby Vision info to the get_metadata API command.
|
||||
* New: Added before and after parameters to the get_home_stats API command. (#2231)
|
||||
* Packages:
|
||||
* New: Universal binary for macOS for Apple silicon.
|
||||
* New: Bump Snap package to core22.
|
||||
* Other:
|
||||
* Change: Login cookie expires changed to max-age.
|
||||
* Change: Improved key generation for login password. It is recommended to reenter your HTTP Password in the settings after upgrading.
|
||||
* Removed: Python 2 compatibility code. (#2098, #2226) (Thanks @zdimension)
|
||||
|
||||
|
||||
## v2.13.4 (2023-12-07)
|
||||
|
||||
* UI:
|
||||
* Fix: Tautulli configuration settings page not loading when system language is None.
|
||||
* Fix: Login cookie expiring too quickly.
|
||||
|
||||
|
||||
## v2.13.3 (2023-12-03)
|
||||
|
||||
* Notifications:
|
||||
* New: Added duration_time notification parameter.
|
||||
* New: Added file_size_bytes notification parameter.
|
||||
* New: Added time formats notification text modifiers.
|
||||
* New: Added support for thetvdb_url for movies.
|
||||
* UI:
|
||||
* Fix: Activity card overflowing due to screen scaling. (#2033)
|
||||
* Fix: Stream duration on activity card not being updated on track changes in some cases. (#2206)
|
||||
|
||||
|
||||
## v2.13.2 (2023-10-26)
|
||||
|
||||
* History:
|
||||
* New: Added quarter values icons for history watch status. (#2179, #2156) (Thanks @herby2212)
|
||||
* Graphs:
|
||||
|
@ -15,6 +194,7 @@
|
|||
|
||||
|
||||
## v2.13.1 (2023-08-25)
|
||||
|
||||
* Notes:
|
||||
* Support for Python 3.7 has been dropped. The minimum Python version is now 3.8.
|
||||
* Other:
|
||||
|
|
|
@ -25,4 +25,4 @@ CMD [ "python", "Tautulli.py", "--datadir", "/config" ]
|
|||
ENTRYPOINT [ "./start.sh" ]
|
||||
|
||||
EXPOSE 8181
|
||||
HEALTHCHECK --start-period=90s CMD curl -ILfSs http://localhost:8181/status > /dev/null || curl -ILfkSs https://localhost:8181/status > /dev/null || exit 1
|
||||
HEALTHCHECK --start-period=90s CMD curl -ILfks https://localhost:8181/status > /dev/null || curl -ILfs http://localhost:8181/status > /dev/null || exit 1
|
||||
|
|
|
@ -36,7 +36,7 @@ and [PlexWatchWeb](https://github.com/ecleese/plexWatchWeb).
|
|||
[![Docker Stars][badge-docker-stars]][DockerHub]
|
||||
[![Downloads][badge-downloads]][Releases Latest]
|
||||
|
||||
[badge-python]: https://img.shields.io/badge/python->=3.8-blue?style=flat-square
|
||||
[badge-python]: https://img.shields.io/badge/python->=3.9-blue?style=flat-square
|
||||
[badge-docker-pulls]: https://img.shields.io/docker/pulls/tautulli/tautulli?style=flat-square
|
||||
[badge-docker-stars]: https://img.shields.io/docker/stars/tautulli/tautulli?style=flat-square
|
||||
[badge-downloads]: https://img.shields.io/github/downloads/Tautulli/Tautulli/total?style=flat-square
|
||||
|
@ -129,7 +129,7 @@ This is free software under the GPL v3 open source license. Feel free to do with
|
|||
but any modification must be open sourced. A copy of the license is included.
|
||||
|
||||
This software includes Highsoft software libraries which you may freely distribute for
|
||||
non-commercial use. Commerical users must licence this software, for more information visit
|
||||
non-commercial use. Commercial users must licence this software, for more information visit
|
||||
https://shop.highsoft.com/faq/non-commercial#non-commercial-redistribution.
|
||||
|
||||
|
||||
|
|
30
Tautulli.py
30
Tautulli.py
|
@ -23,18 +23,18 @@ import sys
|
|||
# Ensure lib added to path, before any other imports
|
||||
sys.path.insert(0, os.path.join(os.path.dirname(os.path.abspath(__file__)), 'lib'))
|
||||
|
||||
from future.builtins import str
|
||||
|
||||
import appdirs
|
||||
import argparse
|
||||
import datetime
|
||||
import locale
|
||||
import platformdirs
|
||||
import pytz
|
||||
import signal
|
||||
import shutil
|
||||
import time
|
||||
import threading
|
||||
import tzlocal
|
||||
import ctypes
|
||||
|
||||
import plexpy
|
||||
from plexpy import common, config, database, helpers, logger, webstart
|
||||
|
@ -70,8 +70,26 @@ def main():
|
|||
plexpy.SYS_ENCODING = None
|
||||
|
||||
try:
|
||||
locale.setlocale(locale.LC_ALL, "")
|
||||
plexpy.SYS_LANGUAGE, plexpy.SYS_ENCODING = locale.getdefaultlocale()
|
||||
|
||||
# Attempt to get the system's locale settings
|
||||
language_code, encoding = locale.getlocale()
|
||||
|
||||
# Special handling for Windows platform
|
||||
if sys.platform == 'win32':
|
||||
# Get the user's current language settings on Windows
|
||||
windll = ctypes.windll.kernel32
|
||||
lang_id = windll.GetUserDefaultLCID()
|
||||
|
||||
# Map Windows language ID to locale identifier
|
||||
language_code = locale.windows_locale.get(lang_id, '')
|
||||
|
||||
# Get the preferred encoding
|
||||
encoding = locale.getpreferredencoding()
|
||||
|
||||
# Assign values to application-specific variable
|
||||
plexpy.SYS_LANGUAGE = language_code
|
||||
plexpy.SYS_ENCODING = encoding
|
||||
|
||||
except (locale.Error, IOError):
|
||||
pass
|
||||
|
||||
|
@ -111,7 +129,7 @@ def main():
|
|||
if args.quiet:
|
||||
plexpy.QUIET = True
|
||||
|
||||
# Do an intial setup of the logger.
|
||||
# Do an initial setup of the logger.
|
||||
# Require verbose for pre-initilization to see critical errors
|
||||
logger.initLogger(console=not plexpy.QUIET, log_dir=False, verbose=True)
|
||||
|
||||
|
@ -186,7 +204,7 @@ def main():
|
|||
if args.datadir:
|
||||
plexpy.DATA_DIR = args.datadir
|
||||
elif plexpy.FROZEN:
|
||||
plexpy.DATA_DIR = appdirs.user_data_dir("Tautulli", False)
|
||||
plexpy.DATA_DIR = platformdirs.user_data_dir("Tautulli", False)
|
||||
else:
|
||||
plexpy.DATA_DIR = plexpy.PROG_DIR
|
||||
|
||||
|
|
|
@ -13,6 +13,7 @@
|
|||
<meta name="viewport" content="width=device-width, initial-scale=1.0">
|
||||
<meta name="description" content="">
|
||||
<meta name="author" content="">
|
||||
<meta name="referrer" content="no-referrer">
|
||||
<link href="${http_root}css/bootstrap3/bootstrap.min.css" rel="stylesheet">
|
||||
<link href="${http_root}css/pnotify.custom.min.css" rel="stylesheet" />
|
||||
<link href="${http_root}css/selectize.bootstrap3.css" rel="stylesheet" />
|
||||
|
@ -123,11 +124,6 @@
|
|||
% else:
|
||||
<li><a href="graphs">Graphs</a></li>
|
||||
% endif
|
||||
% if title == "Synced Items":
|
||||
<li class="active"><a href="sync">Synced Items</a></li>
|
||||
% else:
|
||||
<li><a href="sync">Synced Items</a></li>
|
||||
% endif
|
||||
% if title == "Settings":
|
||||
<li class="dropdown active">
|
||||
% else:
|
||||
|
@ -238,7 +234,7 @@ ${next.modalIncludes()}
|
|||
<li><a href="#patreon-donation" role="tab" data-toggle="tab">Patreon</a></li>
|
||||
<li><a href="#stripe-donation" role="tab" data-toggle="tab">Stripe</a></li>
|
||||
<li><a href="#paypal-donation" role="tab" data-toggle="tab">PayPal</a></li>
|
||||
<li><a href="#crypto-donation" role="tab" data-toggle="tab">Crypto</a></li>
|
||||
<li><a href="#crypto-donation" role="tab" data-toggle="tab" id="crypto-donation-tab">Crypto</a></li>
|
||||
</ul>
|
||||
<div class="tab-content">
|
||||
<div role="tabpanel" class="tab-pane active" id="github-donation" style="text-align: center">
|
||||
|
@ -287,7 +283,16 @@ ${next.modalIncludes()}
|
|||
</div>
|
||||
<div role="tabpanel" class="tab-pane" id="crypto-donation" style="text-align: center">
|
||||
<p>
|
||||
Click the button below to continue to Coinbase.
|
||||
Select a cryptocurrency.
|
||||
</p>
|
||||
<select class="form-control" id="crypto-select"></select>
|
||||
<div id="crypto-qrcode"></div>
|
||||
<div id="crypto-address" class="form-group">
|
||||
<label>Address:</label>
|
||||
<span class="inline-pre" id="crypto-address-value"></span>
|
||||
</div>
|
||||
<p>
|
||||
Or click the button below to continue to Coinbase.
|
||||
</p>
|
||||
<a href="${anon_url('https://commerce.coinbase.com/checkout/8a9fa08c-8a38-409e-9220-868124c4ba0c')}" target="_blank" rel="noreferrer" class="donate-with-crypto">
|
||||
<span>Donate with Crypto</span>
|
||||
|
@ -335,6 +340,7 @@ ${next.modalIncludes()}
|
|||
<script src="${http_root}js/blurhash_pure_js_port.min.js"></script>
|
||||
<script src="${http_root}js/script.js${cache_param}"></script>
|
||||
<script src="${http_root}js/ajaxNotifications.js"></script>
|
||||
<script src="${http_root}js/kjua.min.js"></script>
|
||||
<script>
|
||||
% if _session['user_group'] == 'admin':
|
||||
$('body').on('click', '#updateDismiss', function() {
|
||||
|
@ -408,6 +414,42 @@ ${next.modalIncludes()}
|
|||
checkUpdate(function () { $('#nav-update').html('<i class="fa fa-fw fa-arrow-alt-circle-up"></i> Check for Updates'); });
|
||||
});
|
||||
|
||||
$('#crypto-donation-tab').one('shown.bs.tab', function (e) {
|
||||
$.ajax({
|
||||
url: 'https://tautulli.com/donate/crypto-addresses.json',
|
||||
type: 'GET',
|
||||
dataType: 'json',
|
||||
cache: false,
|
||||
async: true,
|
||||
success: function (data) {
|
||||
$('#crypto-select').empty().append('<option selected disabled>Select Cryptocurrency</option>');
|
||||
$.each(data, function (index, crypto) {
|
||||
$('<option/>', {
|
||||
text: crypto.name + ' (' + crypto.symbol + ')',
|
||||
value: crypto.address
|
||||
}).appendTo('#crypto-select');
|
||||
});
|
||||
},
|
||||
error: function () {
|
||||
$('#crypto-select').empty().append('<option selected disabled>Error: Unable to load addresses</option>');
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
$('#crypto-select').change(function() {
|
||||
var address = $(this).val();
|
||||
$('#crypto-qrcode').empty().kjua({
|
||||
text: address,
|
||||
render: 'canvas',
|
||||
ecLevel: 'H',
|
||||
size: 256,
|
||||
fill: '#000',
|
||||
back: '#eee'
|
||||
}).show();
|
||||
$('#crypto-address-value').text(address);
|
||||
$('#crypto-address').show();
|
||||
})
|
||||
|
||||
% endif
|
||||
|
||||
$('.dropdown-toggle').click(function (e) {
|
||||
|
|
|
@ -11,6 +11,7 @@ DOCUMENTATION :: END
|
|||
|
||||
<%!
|
||||
import os
|
||||
import sqlite3
|
||||
import sys
|
||||
import plexpy
|
||||
from plexpy import common, logger
|
||||
|
@ -71,10 +72,18 @@ DOCUMENTATION :: END
|
|||
<td>System Timezone:</td>
|
||||
<td>${str(plexpy.SYS_TIMEZONE)} (${'UTC{}'.format(plexpy.SYS_UTC_OFFSET)})
|
||||
</tr>
|
||||
<tr>
|
||||
<td>System Language:</td>
|
||||
<td>${plexpy.SYS_LANGUAGE}${' (override {})'.format(plexpy.CONFIG.PMS_LANGUAGE) if plexpy.CONFIG.PMS_LANGUAGE else ''}</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>Python Version:</td>
|
||||
<td>${sys.version}</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>SQLite Version:</td>
|
||||
<td>${sqlite3.sqlite_version}</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td class="top-line">Resources:</td>
|
||||
<td class="top-line">
|
||||
|
|
|
@ -965,7 +965,7 @@ a .users-poster-face:hover {
|
|||
font-size: 10px;
|
||||
text-align: right;
|
||||
text-transform: uppercase;
|
||||
line-height: 14px;
|
||||
line-height: 10px;
|
||||
-webkit-flex-shrink: 0;
|
||||
flex-shrink: 0;
|
||||
}
|
||||
|
@ -1478,7 +1478,8 @@ a:hover .dashboard-stats-square {
|
|||
text-align: center;
|
||||
position: relative;
|
||||
z-index: 0;
|
||||
overflow: hidden;
|
||||
overflow: auto;
|
||||
scrollbar-width: none;
|
||||
}
|
||||
.dashboard-recent-media {
|
||||
width: 100%;
|
||||
|
@ -4324,6 +4325,10 @@ a:hover .overlay-refresh-image:hover {
|
|||
.stream-info tr:nth-child(even) td {
|
||||
background-color: rgba(255,255,255,0.010);
|
||||
}
|
||||
.stream-info td:nth-child(3),
|
||||
.stream-info th:nth-child(3) {
|
||||
width: 25px;
|
||||
}
|
||||
.number-input {
|
||||
margin: 0 !important;
|
||||
width: 55px !important;
|
||||
|
@ -4570,12 +4575,32 @@ a.donate-with-crypto::after {
|
|||
top: 0;
|
||||
left: 0;
|
||||
}
|
||||
#crypto-select {
|
||||
width: 280px;
|
||||
margin: 15px auto;
|
||||
}
|
||||
#crypto-qrcode {
|
||||
width: 258px;
|
||||
padding: 0;
|
||||
margin: 15px auto;
|
||||
line-height: 0;
|
||||
text-align: center;
|
||||
background-color: #eee;
|
||||
border: 1px solid #ccc;
|
||||
border-radius: 4px;
|
||||
display: none;
|
||||
}
|
||||
#crypto-address {
|
||||
margin: 15px auto;
|
||||
text-align: center;
|
||||
display: none;
|
||||
}
|
||||
|
||||
#api_qr_code {
|
||||
width: 100%;
|
||||
padding: 0;
|
||||
margin: 0 0 10px;
|
||||
line-height: 1;
|
||||
line-height: 0;
|
||||
text-align: center;
|
||||
background-color: #eee;
|
||||
border: 1px solid #ccc;
|
||||
|
|
|
@ -74,6 +74,7 @@ DOCUMENTATION :: END
|
|||
parent_href = page('info', data['parent_rating_key'])
|
||||
grandparent_href = page('info', data['grandparent_rating_key'])
|
||||
user_href = page('user', data['user_id']) if data['user_id'] else '#'
|
||||
library_href = page('library', data['section_id']) if data['section_id'] else '#'
|
||||
season = short_season(data['parent_title'])
|
||||
%>
|
||||
<div class="dashboard-activity-instance" id="activity-instance-${sk}" data-key="${sk}" data-id="${data['session_id']}"
|
||||
|
@ -369,7 +370,7 @@ DOCUMENTATION :: END
|
|||
% if data['media_type'] != 'photo':
|
||||
<div class="dashboard-activity-info-time">
|
||||
% if data['live']:
|
||||
<br /><span class="thumb-tooltip dashboard-activity-info-channel" data-toggle="popover" data-img="${data['channel_thumb']}" data-height="40" data-width="40">${data['channel_call_sign']} ${data['channel_identifier']}</span>
|
||||
<br /><span class="thumb-tooltip dashboard-activity-info-channel" data-toggle="popover" data-img="${data['channel_thumb']}" data-height="40" data-width="40">${data['channel_title'] or (data['channel_vcn'] + ' ' + data['channel_call_sign'])}</span>
|
||||
% elif data['view_offset']:
|
||||
ETA:
|
||||
<span id="stream-eta-${sk}">
|
||||
|
@ -463,21 +464,27 @@ DOCUMENTATION :: END
|
|||
<div class="dashboard-activity-metadata-subtitle-container">
|
||||
% if data['live']:
|
||||
<div id="media-type-${sk}" class="dashboard-activity-metadata-media_type-icon" title="Live TV">
|
||||
<i class="fa fa-fw fa-broadcast-tower"></i>
|
||||
<a href="${library_href}">
|
||||
<i class="fa fa-fw fa-broadcast-tower"></i>
|
||||
</a>
|
||||
</div>
|
||||
% elif data['channel_stream'] == 0:
|
||||
<div id="media-type-${sk}" class="dashboard-activity-metadata-media_type-icon" title="${data['media_type'].capitalize()}">
|
||||
% if data['media_type'] == 'movie':
|
||||
<i class="fa fa-fw fa-film"></i>
|
||||
% elif data['media_type'] == 'episode':
|
||||
<i class="fa fa-fw fa-television"></i>
|
||||
% elif data['media_type'] == 'track':
|
||||
<i class="fa fa-fw fa-music"></i>
|
||||
% elif data['media_type'] == 'photo':
|
||||
<i class="fa fa-fw fa-picture-o"></i>
|
||||
% elif data['media_type'] == 'clip':
|
||||
<i class="fa fa-fw fa-video-camera"></i>
|
||||
% endif
|
||||
<a href="${library_href}">
|
||||
% if data['media_type'] == 'movie':
|
||||
<i class="fa fa-fw fa-film"></i>
|
||||
% elif data['media_type'] == 'episode':
|
||||
<i class="fa fa-fw fa-television"></i>
|
||||
% elif data['media_type'] == 'track':
|
||||
<i class="fa fa-fw fa-music"></i>
|
||||
% elif data['media_type'] == 'photo':
|
||||
<i class="fa fa-fw fa-picture-o"></i>
|
||||
% elif data['media_type'] == 'clip':
|
||||
<i class="fa fa-fw fa-video-camera"></i>
|
||||
% else:
|
||||
<i class="fa fa-fw fa-question-circle"></i>
|
||||
% endif
|
||||
</a>
|
||||
</div>
|
||||
% else:
|
||||
<div id="media-type-${sk}" class="dashboard-activity-metadata-media_type-icon" title="Channel">
|
||||
|
|
|
@ -20,6 +20,7 @@ DOCUMENTATION :: END
|
|||
export = exporter.Export()
|
||||
thumb_media_types = ', '.join([export.PLURAL_MEDIA_TYPES[k] for k, v in export.MEDIA_TYPES.items() if v[0]])
|
||||
art_media_types = ', '.join([export.PLURAL_MEDIA_TYPES[k] for k, v in export.MEDIA_TYPES.items() if v[1]])
|
||||
logo_media_types = ', '.join([export.PLURAL_MEDIA_TYPES[k] for k, v in export.MEDIA_TYPES.items() if v[2]])
|
||||
%>
|
||||
<div class="modal-dialog" role="document">
|
||||
<div class="modal-content">
|
||||
|
@ -144,6 +145,22 @@ DOCUMENTATION :: END
|
|||
Select the level to export background artwork image files.<br>Note: Only applies to ${art_media_types}.
|
||||
</p>
|
||||
</div>
|
||||
<div class="form-group">
|
||||
<label for="export_logo_level">Logo Image Export Level</label>
|
||||
<div class="row">
|
||||
<div class="col-md-12">
|
||||
<select class="form-control" id="export_logo_level" name="export_logo_level">
|
||||
<option value="0" selected>Level 0 - None / Custom</option>
|
||||
<option value="1">Level 1 - Uploaded and Selected Logos Only</option>
|
||||
<option value="2">Level 2 - Selected and Locked Logos Only</option>
|
||||
<option value="9">Level 9 - All Selected Logos</option>
|
||||
</select>
|
||||
</div>
|
||||
</div>
|
||||
<p class="help-block">
|
||||
Select the level to export logo image files.<br>Note: Only applies to ${logo_media_types}.
|
||||
</p>
|
||||
</div>
|
||||
<p class="help-block">
|
||||
Warning: Exporting images may take a long time! Images will be saved to a folder alongside the data file.
|
||||
</p>
|
||||
|
@ -231,6 +248,7 @@ DOCUMENTATION :: END
|
|||
$('#export_media_info_level').prop('disabled', true);
|
||||
$("#export_thumb_level").prop('disabled', true);
|
||||
$("#export_art_level").prop('disabled', true);
|
||||
$("#export_logo_level").prop('disabled', true);
|
||||
export_custom_metadata_fields.disable();
|
||||
export_custom_media_info_fields.disable();
|
||||
} else {
|
||||
|
@ -238,6 +256,7 @@ DOCUMENTATION :: END
|
|||
$('#export_media_info_level').prop('disabled', false);
|
||||
$("#export_thumb_level").prop('disabled', false);
|
||||
$("#export_art_level").prop('disabled', false);
|
||||
$("#export_logo_level").prop('disabled', false);
|
||||
export_custom_metadata_fields.enable();
|
||||
export_custom_media_info_fields.enable();
|
||||
}
|
||||
|
@ -252,6 +271,7 @@ DOCUMENTATION :: END
|
|||
var file_format = $('#export_file_format option:selected').val();
|
||||
var thumb_level = $("#export_thumb_level option:selected").val();
|
||||
var art_level = $("#export_art_level option:selected").val();
|
||||
var logo_level = $("#export_logo_level option:selected").val();
|
||||
var custom_fields = [
|
||||
$('#export_custom_metadata_fields').val(),
|
||||
$('#export_custom_media_info_fields').val()
|
||||
|
@ -270,6 +290,7 @@ DOCUMENTATION :: END
|
|||
file_format: file_format,
|
||||
thumb_level: thumb_level,
|
||||
art_level: art_level,
|
||||
logo_level: logo_level,
|
||||
custom_fields: custom_fields,
|
||||
export_type: export_type,
|
||||
individual_files: individual_files
|
||||
|
|
|
@ -301,6 +301,10 @@
|
|||
return obj;
|
||||
}, {});
|
||||
|
||||
if (!("Total" in chart_visibility)) {
|
||||
chart_visibility["Total"] = false;
|
||||
}
|
||||
|
||||
return data_series.map(function(s) {
|
||||
var obj = Object.assign({}, s);
|
||||
obj.visible = (chart_visibility[s.name] !== false);
|
||||
|
@ -327,7 +331,8 @@
|
|||
'Direct Play': '#E5A00D',
|
||||
'Direct Stream': '#FFFFFF',
|
||||
'Transcode': '#F06464',
|
||||
'Max. Concurrent Streams': '#96C83C'
|
||||
'Max. Concurrent Streams': '#96C83C',
|
||||
'Total': '#96C83C'
|
||||
};
|
||||
var series_colors = [];
|
||||
$.each(data_series, function(index, series) {
|
||||
|
@ -755,6 +760,7 @@
|
|||
if (this.points.length > 1) {
|
||||
var total = 0;
|
||||
$.each(this.points, function(i, point) {
|
||||
if (point.series.name === 'Total') return;
|
||||
s += '<br/>'+point.series.name+': '+point.y;
|
||||
total += point.y;
|
||||
});
|
||||
|
@ -781,6 +787,7 @@
|
|||
if (this.points.length > 1) {
|
||||
var total = 0;
|
||||
$.each(this.points, function(i, point) {
|
||||
if (point.series.name === 'Total') return;
|
||||
s += '<br/>'+point.series.name+': '+moment.duration(point.y, 'hours').format('D [days] H [hrs] m [mins]');
|
||||
total += point.y;
|
||||
});
|
||||
|
|
|
@ -92,10 +92,10 @@
|
|||
<h3 class="pull-left"><span id="recently-added-xml">Recently Added</span></h3>
|
||||
<ul class="nav nav-header nav-dashboard pull-right" style="margin-top: -3px;">
|
||||
<li>
|
||||
<a href="#" id="recently-added-page-left" class="paginate btn-gray disabled" data-id="+1"><i class="fa fa-lg fa-chevron-left"></i></a>
|
||||
<a href="#" id="recently-added-page-left" class="paginate-added btn-gray disabled" data-id="-1"><i class="fa fa-lg fa-chevron-left"></i></a>
|
||||
</li>
|
||||
<li>
|
||||
<a href="#" id="recently-added-page-right" class="paginate btn-gray disabled" data-id="-1"><i class="fa fa-lg fa-chevron-right"></i></a>
|
||||
<a href="#" id="recently-added-page-right" class="paginate-added btn-gray disabled" data-id="+1"><i class="fa fa-lg fa-chevron-right"></i></a>
|
||||
</li>
|
||||
</ul>
|
||||
<div class="button-bar">
|
||||
|
@ -212,28 +212,6 @@
|
|||
</div>
|
||||
</div>
|
||||
</div>
|
||||
<% from plexpy.helpers import anon_url %>
|
||||
<div id="python2-modal" class="modal fade wide" tabindex="-1" role="dialog" aria-labelledby="python2-modal">
|
||||
<div class="modal-dialog" role="document">
|
||||
<div class="modal-content">
|
||||
<div class="modal-header">
|
||||
<button type="button" class="close" data-dismiss="modal" aria-hidden="true"><i class="fa fa-remove"></i></button>
|
||||
<h4 class="modal-title">Unable to Update</h4>
|
||||
</div>
|
||||
<div class="modal-body" style="text-align: center;">
|
||||
<p>Tautulli is still running using Python 2 and cannot be updated past v2.6.3.</p>
|
||||
<p>Python 3 is required to continue receiving updates.</p>
|
||||
<p>
|
||||
<strong>Please see the <a href="${anon_url('https://github.com/Tautulli/Tautulli/wiki/Upgrading-to-Python-3-%28Tautulli-v2.5%29')}" target="_blank" rel="noreferrer">wiki</a>
|
||||
for instructions on how to upgrade to Python 3.</strong>
|
||||
</p>
|
||||
</div>
|
||||
<div class="modal-footer">
|
||||
<input type="button" class="btn btn-bright" data-dismiss="modal" value="Close">
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
% endif
|
||||
|
||||
<div class="modal fade" id="ip-info-modal" tabindex="-1" role="dialog" aria-labelledby="ip-info-modal">
|
||||
|
@ -320,6 +298,8 @@
|
|||
|
||||
$('#currentActivityHeader-bandwidth-tooltip').tooltip({ container: 'body', placement: 'right', delay: 50 });
|
||||
|
||||
var title = document.title;
|
||||
|
||||
function getCurrentActivity() {
|
||||
activity_ready = false;
|
||||
|
||||
|
@ -390,6 +370,8 @@
|
|||
|
||||
$('#currentActivityHeader').show();
|
||||
|
||||
document.title = stream_count + ' stream' + (stream_count > 1 ? 's' : '') + ' | ' + title;
|
||||
|
||||
sessions.forEach(function (session) {
|
||||
var s = (typeof Proxy === "function") ? new Proxy(session, defaultHandler) : session;
|
||||
var key = s.session_key;
|
||||
|
@ -584,6 +566,7 @@
|
|||
|
||||
// Update the stream progress times
|
||||
$('#stream-eta-' + key).html(moment().add(parseInt(s.duration) - parseInt(s.view_offset), 'milliseconds').format(time_format));
|
||||
$('#stream-duration-' + key).html(millisecondsToMinutes(parseInt(s.stream_duration), false));
|
||||
var stream_view_offset = $('#stream-view-offset-' + key);
|
||||
stream_view_offset.data('state', s.state);
|
||||
if (stream_view_offset.data('last_view_offset') !== s.view_offset) {
|
||||
|
@ -621,6 +604,8 @@
|
|||
} else {
|
||||
$('#currentActivityHeader').hide();
|
||||
$('#currentActivity').html('<div id="dashboard-no-activity" class="text-muted">Nothing is currently being played.</div>');
|
||||
|
||||
document.title = title;
|
||||
}
|
||||
|
||||
activity_ready = true;
|
||||
|
@ -957,10 +942,14 @@
|
|||
count: recently_added_count,
|
||||
media_type: recently_added_type
|
||||
},
|
||||
beforeSend: function () {
|
||||
$(".dashboard-recent-media-row").animate({ scrollLeft: 0 }, 1000);
|
||||
},
|
||||
complete: function (xhr, status) {
|
||||
$("#recentlyAdded").html(xhr.responseText);
|
||||
$('#ajaxMsg').fadeOut();
|
||||
highlightAddedScrollerButton();
|
||||
highlightScrollerButton("#recently-added");
|
||||
paginateScroller("#recently-added", ".paginate-added");
|
||||
}
|
||||
});
|
||||
}
|
||||
|
@ -976,57 +965,11 @@
|
|||
recentlyAdded(recently_added_count, recently_added_type);
|
||||
}
|
||||
|
||||
function highlightAddedScrollerButton() {
|
||||
var scroller = $("#recently-added-row-scroller");
|
||||
var numElems = scroller.find("li:visible").length;
|
||||
scroller.width(numElems * 175);
|
||||
if (scroller.width() > $("body").find(".container-fluid").width()) {
|
||||
$("#recently-added-page-right").removeClass("disabled");
|
||||
} else {
|
||||
$("#recently-added-page-right").addClass("disabled");
|
||||
}
|
||||
}
|
||||
|
||||
$(window).resize(function () {
|
||||
highlightAddedScrollerButton();
|
||||
});
|
||||
|
||||
function resetScroller() {
|
||||
leftTotal = 0;
|
||||
$("#recently-added-row-scroller").animate({ left: leftTotal }, 1000);
|
||||
$("#recently-added-page-left").addClass("disabled").blur();
|
||||
}
|
||||
|
||||
var leftTotal = 0;
|
||||
$(".paginate").click(function (e) {
|
||||
e.preventDefault();
|
||||
var scroller = $("#recently-added-row-scroller");
|
||||
var containerWidth = $("body").find(".container-fluid").width();
|
||||
var scrollAmount = $(this).data("id") * parseInt((containerWidth - 15) / 175) * 175;
|
||||
var leftMax = Math.min(-parseInt(scroller.width()) + Math.abs(scrollAmount), 0);
|
||||
|
||||
leftTotal = Math.max(Math.min(leftTotal + scrollAmount, 0), leftMax);
|
||||
scroller.animate({ left: leftTotal }, 250);
|
||||
|
||||
if (leftTotal === 0) {
|
||||
$("#recently-added-page-left").addClass("disabled").blur();
|
||||
} else {
|
||||
$("#recently-added-page-left").removeClass("disabled");
|
||||
}
|
||||
|
||||
if (leftTotal === leftMax) {
|
||||
$("#recently-added-page-right").addClass("disabled").blur();
|
||||
} else {
|
||||
$("#recently-added-page-right").removeClass("disabled");
|
||||
}
|
||||
});
|
||||
|
||||
$('#recently-added-toggles').on('change', function () {
|
||||
$('#recently-added-toggles > label').removeClass('active');
|
||||
selected_filter = $('input[name=recently-added-toggle]:checked', '#recently-added-toggles');
|
||||
$(selected_filter).closest('label').addClass('active');
|
||||
recently_added_type = $(selected_filter).val();
|
||||
resetScroller();
|
||||
setLocalStorage('home_stats_recently_added_type', recently_added_type);
|
||||
recentlyAdded(recently_added_count, recently_added_type);
|
||||
});
|
||||
|
@ -1034,7 +977,6 @@
|
|||
$('#recently-added-count').change(function () {
|
||||
forceMinMax($(this));
|
||||
recently_added_count = $(this).val();
|
||||
resetScroller();
|
||||
setLocalStorage('home_stats_recently_added_count', recently_added_count);
|
||||
recentlyAdded(recently_added_count, recently_added_type);
|
||||
});
|
||||
|
@ -1066,16 +1008,4 @@
|
|||
});
|
||||
</script>
|
||||
% endif
|
||||
% if _session['user_group'] == 'admin':
|
||||
<script>
|
||||
const queryString = window.location.search;
|
||||
const urlParams = new URLSearchParams(queryString);
|
||||
if (urlParams.get('update') === 'python2') {
|
||||
$("#python2-modal").modal({
|
||||
backdrop: 'static',
|
||||
keyboard: false
|
||||
});
|
||||
}
|
||||
</script>
|
||||
% endif
|
||||
</%def>
|
||||
|
|
|
@ -408,8 +408,8 @@ DOCUMENTATION :: END
|
|||
% endif
|
||||
</div>
|
||||
<div class="summary-content-details-tag" id="channel-icon">
|
||||
% if media_info['channel_identifier']:
|
||||
Channel <strong> <span class="thumb-tooltip" data-toggle="popover" data-img="${media_info['channel_thumb']}" data-height="40" data-width="40">${media_info['channel_call_sign']} ${media_info['channel_identifier']}</span> </strong>
|
||||
% if media_info['channel_vcn']:
|
||||
Channel <strong> <span class="thumb-tooltip" data-toggle="popover" data-img="${media_info['channel_thumb']}" data-height="40" data-width="40">${media_info['channel_title'] or (media_info['channel_vcn'] + ' ' + media_info['channel_call_sign'])}</span> </strong>
|
||||
% endif
|
||||
</div>
|
||||
</div>
|
||||
|
@ -878,7 +878,7 @@ DOCUMENTATION :: END
|
|||
transcode_decision: transcode_decision,
|
||||
user_id: "${history_user_id}",
|
||||
% if data['live']:
|
||||
guid: "${data['guid']}
|
||||
guid: "${data['guid']}"
|
||||
% elif data['media_type'] in ('show', 'artist'):
|
||||
grandparent_rating_key: "${data['rating_key']}"
|
||||
% elif data['media_type'] in ('season', 'album'):
|
||||
|
@ -947,8 +947,12 @@ DOCUMENTATION :: END
|
|||
url: 'item_watch_time_stats',
|
||||
async: true,
|
||||
data: {
|
||||
% if data['live']:
|
||||
guid: "${data['guid']}"
|
||||
% else:
|
||||
rating_key: "${data['rating_key']}",
|
||||
media_type: "${data['media_type']}"
|
||||
media_type: "${data['media_type']}"
|
||||
% endif
|
||||
},
|
||||
complete: function(xhr, status) {
|
||||
$("#watch-time-stats").html(xhr.responseText);
|
||||
|
@ -959,8 +963,12 @@ DOCUMENTATION :: END
|
|||
url: 'item_user_stats',
|
||||
async: true,
|
||||
data: {
|
||||
% if data['live']:
|
||||
guid: "${data['guid']}"
|
||||
% else:
|
||||
rating_key: "${data['rating_key']}",
|
||||
media_type: "${data['media_type']}"
|
||||
media_type: "${data['media_type']}"
|
||||
% endif
|
||||
},
|
||||
complete: function(xhr, status) {
|
||||
$("#user-stats").html(xhr.responseText);
|
||||
|
|
File diff suppressed because one or more lines are too long
|
@ -288,23 +288,10 @@ function isPrivateIP(ip_address) {
|
|||
}
|
||||
|
||||
function humanTime(seconds) {
|
||||
var d = Math.floor(moment.duration(seconds, 'seconds').asDays());
|
||||
var h = Math.floor(moment.duration((seconds % 86400), 'seconds').asHours());
|
||||
var m = Math.round(moment.duration(((seconds % 86400) % 3600), 'seconds').asMinutes());
|
||||
|
||||
var text = '';
|
||||
if (d > 0) {
|
||||
text = '<h3>' + d + '</h3><p> day' + ((d > 1) ? 's' : '') + '</p>'
|
||||
+ '<h3>' + h + '</h3><p> hr' + ((h > 1) ? 's' : '') + '</p>'
|
||||
+ '<h3>' + m + '</h3><p> min' + ((m > 1) ? 's' : '') + '</p>';
|
||||
} else if (h > 0) {
|
||||
text = '<h3>' + h + '</h3><p> hr' + ((h > 1) ? 's' : '') + '</p>'
|
||||
+ '<h3>' + m + '</h3><p> min' + ((m > 1) ? 's' : '') + '</p>';
|
||||
} else {
|
||||
text = '<h3>' + m + '</h3><p> min' + ((m > 1) ? 's' : '') + '</p>';
|
||||
if (seconds > 0) {
|
||||
return humanDuration(seconds * 1000).replaceAll(/(\d+) (\w+)/g, '<h3>$1</h3><p>$2</p>')
|
||||
}
|
||||
|
||||
return text
|
||||
return "<h3>0</h3><p>mins</p>";
|
||||
}
|
||||
|
||||
String.prototype.toProperCase = function () {
|
||||
|
@ -360,7 +347,8 @@ function humanDuration(ms, sig='dhm', units='ms', return_seconds=300000) {
|
|||
sig = 'dhms'
|
||||
}
|
||||
|
||||
ms = ms * factors[units];
|
||||
r = factors[sig.slice(-1)];
|
||||
ms = Math.round(ms * factors[units] / r) * r;
|
||||
|
||||
h = ms % factors['d'];
|
||||
d = Math.trunc(ms / factors['d']);
|
||||
|
@ -929,3 +917,50 @@ $('.modal').on('hide.bs.modal', function (e) {
|
|||
$.fn.hasScrollBar = function() {
|
||||
return this.get(0).scrollHeight > this.get(0).clientHeight;
|
||||
}
|
||||
|
||||
function paginateScroller(scrollerId, buttonClass) {
|
||||
$(buttonClass).click(function (e) {
|
||||
e.preventDefault();
|
||||
var scroller = $(scrollerId + "-row-scroller");
|
||||
var scrollerParent = scroller.parent();
|
||||
var containerWidth = scrollerParent.width();
|
||||
var scrollCurrent = scrollerParent.scrollLeft();
|
||||
var scrollAmount = $(this).data("id") * parseInt(containerWidth / 175) * 175;
|
||||
var scrollMax = scroller.width() - Math.abs(scrollAmount);
|
||||
var scrollTotal = Math.min(parseInt(scrollCurrent / 175) * 175 + scrollAmount, scrollMax);
|
||||
scrollerParent.animate({ scrollLeft: scrollTotal }, 250);
|
||||
});
|
||||
}
|
||||
|
||||
function highlightScrollerButton(scrollerId) {
|
||||
var scroller = $(scrollerId + "-row-scroller");
|
||||
var scrollerParent = scroller.parent();
|
||||
var buttonLeft = $(scrollerId + "-page-left");
|
||||
var buttonRight = $(scrollerId + "-page-right");
|
||||
|
||||
var numElems = scroller.find("li").length;
|
||||
scroller.width(numElems * 175);
|
||||
$(buttonLeft).addClass("disabled").blur();
|
||||
if (scroller.width() > scrollerParent.width()) {
|
||||
$(buttonRight).removeClass("disabled");
|
||||
} else {
|
||||
$(buttonRight).addClass("disabled");
|
||||
}
|
||||
|
||||
scrollerParent.scroll(function () {
|
||||
var scrollCurrent = $(this).scrollLeft();
|
||||
var scrollMax = scroller.width() - $(this).width();
|
||||
|
||||
if (scrollCurrent == 0) {
|
||||
$(buttonLeft).addClass("disabled").blur();
|
||||
} else {
|
||||
$(buttonLeft).removeClass("disabled");
|
||||
}
|
||||
|
||||
if (scrollCurrent >= scrollMax) {
|
||||
$(buttonRight).addClass("disabled").blur();
|
||||
} else {
|
||||
$(buttonRight).removeClass("disabled");
|
||||
}
|
||||
});
|
||||
}
|
||||
|
|
|
@ -100,7 +100,7 @@ export_table_options = {
|
|||
"createdCell": function (td, cellData, rowData, row, col) {
|
||||
if (cellData !== '') {
|
||||
var images = '';
|
||||
if (rowData['thumb_level'] || rowData['art_level']) {
|
||||
if (rowData['thumb_level'] || rowData['art_level'] || rowData['logo_level']) {
|
||||
images = ' + images';
|
||||
}
|
||||
$(td).html(cellData + images);
|
||||
|
@ -161,14 +161,14 @@ export_table_options = {
|
|||
if (cellData === 1 && rowData['exists']) {
|
||||
var tooltip_title = '';
|
||||
var icon = '';
|
||||
if (rowData['thumb_level'] || rowData['art_level'] || rowData['individual_files']) {
|
||||
tooltip_title = 'Zip Archive';
|
||||
if (rowData['thumb_level'] || rowData['art_level'] || rowData['logo_level'] || rowData['individual_files']) {
|
||||
tooltip_title = 'ZIP Archive';
|
||||
icon = 'fa-file-archive';
|
||||
} else {
|
||||
tooltip_title = rowData['file_format'].toUpperCase() + ' File';
|
||||
icon = 'fa-file-download';
|
||||
}
|
||||
var icon = (rowData['thumb_level'] || rowData['art_level'] || rowData['individual_files']) ? 'fa-file-archive' : 'fa-file-download';
|
||||
var icon = (rowData['thumb_level'] || rowData['art_level'] || rowData['logo_level'] || rowData['individual_files']) ? 'fa-file-archive' : 'fa-file-download';
|
||||
$(td).html('<button class="btn btn-xs btn-success pull-left" data-id="' + rowData['export_id'] + '"><span data-toggle="tooltip" data-placement="left" title="' + tooltip_title + '"><i class="fa ' + icon + ' fa-fw"></i> Download</span></button>');
|
||||
} else if (cellData === 0) {
|
||||
var percent = Math.min(getPercent(rowData['exported_items'], rowData['total_items']), 99)
|
||||
|
|
|
@ -149,10 +149,10 @@ DOCUMENTATION :: END
|
|||
<div class="table-card-header">
|
||||
<ul class="nav nav-header nav-dashboard pull-right">
|
||||
<li>
|
||||
<a href="#" id="recently-watched-page-left" class="paginate-watched btn-gray disabled" data-id="+1"><i class="fa fa-lg fa-chevron-left"></i></a>
|
||||
<a href="#" id="recently-watched-page-left" class="paginate-watched btn-gray disabled" data-id="-1"><i class="fa fa-lg fa-chevron-left"></i></a>
|
||||
</li>
|
||||
<li>
|
||||
<a href="#" id="recently-watched-page-right" class="paginate-watched btn-gray disabled" data-id="-1"><i class="fa fa-lg fa-chevron-right"></i></a>
|
||||
<a href="#" id="recently-watched-page-right" class="paginate-watched btn-gray disabled" data-id="+1"><i class="fa fa-lg fa-chevron-right"></i></a>
|
||||
</li>
|
||||
</ul>
|
||||
<div class="header-bar">
|
||||
|
@ -175,10 +175,10 @@ DOCUMENTATION :: END
|
|||
<div class="table-card-header">
|
||||
<ul class="nav nav-header nav-dashboard pull-right">
|
||||
<li>
|
||||
<a href="#" id="recently-added-page-left" class="paginate-added btn-gray disabled" data-id="+1"><i class="fa fa-lg fa-chevron-left"></i></a>
|
||||
<a href="#" id="recently-added-page-left" class="paginate-added btn-gray disabled" data-id="-1"><i class="fa fa-lg fa-chevron-left"></i></a>
|
||||
</li>
|
||||
<li>
|
||||
<a href="#" id="recently-added-page-right" class="paginate-added btn-gray disabled" data-id="-1"><i class="fa fa-lg fa-chevron-right"></i></a>
|
||||
<a href="#" id="recently-added-page-right" class="paginate-added btn-gray disabled" data-id="+1"><i class="fa fa-lg fa-chevron-right"></i></a>
|
||||
</li>
|
||||
</ul>
|
||||
<div class="header-bar">
|
||||
|
@ -690,7 +690,8 @@ DOCUMENTATION :: END
|
|||
},
|
||||
complete: function(xhr, status) {
|
||||
$("#library-recently-watched").html(xhr.responseText);
|
||||
highlightWatchedScrollerButton();
|
||||
highlightScrollerButton("#recently-watched");
|
||||
paginateScroller("#recently-watched", ".paginate-watched");
|
||||
}
|
||||
});
|
||||
}
|
||||
|
@ -706,7 +707,8 @@ DOCUMENTATION :: END
|
|||
},
|
||||
complete: function(xhr, status) {
|
||||
$("#library-recently-added").html(xhr.responseText);
|
||||
highlightAddedScrollerButton();
|
||||
highlightScrollerButton("#recently-added");
|
||||
paginateScroller("#recently-added", ".paginate-added");
|
||||
}
|
||||
});
|
||||
}
|
||||
|
@ -716,83 +718,8 @@ DOCUMENTATION :: END
|
|||
recentlyAdded();
|
||||
% endif
|
||||
|
||||
function highlightWatchedScrollerButton() {
|
||||
var scroller = $("#recently-watched-row-scroller");
|
||||
var numElems = scroller.find("li").length;
|
||||
scroller.width(numElems * 175);
|
||||
if (scroller.width() > $("#library-recently-watched").width()) {
|
||||
$("#recently-watched-page-right").removeClass("disabled");
|
||||
} else {
|
||||
$("#recently-watched-page-right").addClass("disabled");
|
||||
}
|
||||
}
|
||||
|
||||
function highlightAddedScrollerButton() {
|
||||
var scroller = $("#recently-added-row-scroller");
|
||||
var numElems = scroller.find("li").length;
|
||||
scroller.width(numElems * 175);
|
||||
if (scroller.width() > $("#library-recently-added").width()) {
|
||||
$("#recently-added-page-right").removeClass("disabled");
|
||||
} else {
|
||||
$("#recently-added-page-right").addClass("disabled");
|
||||
}
|
||||
}
|
||||
|
||||
$(window).resize(function() {
|
||||
highlightWatchedScrollerButton();
|
||||
highlightAddedScrollerButton();
|
||||
});
|
||||
|
||||
$('div.art-face').animate({ opacity: 0.2 }, { duration: 1000 });
|
||||
|
||||
var leftTotalWatched = 0;
|
||||
$(".paginate-watched").click(function (e) {
|
||||
e.preventDefault();
|
||||
var scroller = $("#recently-watched-row-scroller");
|
||||
var containerWidth = $("#library-recently-watched").width();
|
||||
var scrollAmount = $(this).data("id") * parseInt(containerWidth / 175) * 175;
|
||||
var leftMax = Math.min(-parseInt(scroller.width()) + Math.abs(scrollAmount), 0);
|
||||
|
||||
leftTotalWatched = Math.max(Math.min(leftTotalWatched + scrollAmount, 0), leftMax);
|
||||
scroller.animate({ left: leftTotalWatched }, 250);
|
||||
|
||||
if (leftTotalWatched == 0) {
|
||||
$("#recently-watched-page-left").addClass("disabled").blur();
|
||||
} else {
|
||||
$("#recently-watched-page-left").removeClass("disabled");
|
||||
}
|
||||
|
||||
if (leftTotalWatched == leftMax) {
|
||||
$("#recently-watched-page-right").addClass("disabled").blur();
|
||||
} else {
|
||||
$("#recently-watched-page-right").removeClass("disabled");
|
||||
}
|
||||
});
|
||||
|
||||
var leftTotalAdded = 0;
|
||||
$(".paginate-added").click(function (e) {
|
||||
e.preventDefault();
|
||||
var scroller = $("#recently-added-row-scroller");
|
||||
var containerWidth = $("#library-recently-added").width();
|
||||
var scrollAmount = $(this).data("id") * parseInt(containerWidth / 175) * 175;
|
||||
var leftMax = Math.min(-parseInt(scroller.width()) + Math.abs(scrollAmount), 0);
|
||||
|
||||
leftTotalAdded = Math.max(Math.min(leftTotalAdded + scrollAmount, 0), leftMax);
|
||||
scroller.animate({ left: leftTotalAdded }, 250);
|
||||
|
||||
if (leftTotalAdded == 0) {
|
||||
$("#recently-added-page-left").addClass("disabled").blur();
|
||||
} else {
|
||||
$("#recently-added-page-left").removeClass("disabled");
|
||||
}
|
||||
|
||||
if (leftTotalAdded == leftMax) {
|
||||
$("#recently-added-page-right").addClass("disabled").blur();
|
||||
} else {
|
||||
$("#recently-added-page-right").removeClass("disabled");
|
||||
}
|
||||
});
|
||||
|
||||
$(document).ready(function () {
|
||||
|
||||
// Javascript to enable link to tab
|
||||
|
|
|
@ -36,7 +36,7 @@ DOCUMENTATION :: END
|
|||
|
||||
%>
|
||||
<div class="dashboard-recent-media-row">
|
||||
<div id="recently-added-row-scroller" style="left: 0;">
|
||||
<div id="recently-added-row-scroller">
|
||||
<ul class="dashboard-recent-media list-unstyled">
|
||||
% for item in data:
|
||||
<li>
|
||||
|
|
|
@ -11,6 +11,7 @@
|
|||
<meta name="viewport" content="width=device-width, initial-scale=1.0">
|
||||
<meta name="description" content="">
|
||||
<meta name="author" content="">
|
||||
<meta name="referrer" content="no-referrer">
|
||||
<link href="${http_root}css/bootstrap3/bootstrap.min.css" rel="stylesheet">
|
||||
<link href="${http_root}css/tautulli.css${cache_param}" rel="stylesheet">
|
||||
<link href="${http_root}css/opensans.min.css" rel="stylesheet">
|
||||
|
|
|
@ -3,7 +3,7 @@
|
|||
<div class="modal-content">
|
||||
<div class="modal-header">
|
||||
<button type="button" class="close" data-dismiss="modal" aria-hidden="true"><i class="fa fa-remove"></i></button>
|
||||
<h4 class="modal-title" id="mobile-device-config-modal-header">${device['device_name']} Settings <small><span class="device_id">(Device ID: ${device['id']})</span></small></h4>
|
||||
<h4 class="modal-title" id="mobile-device-config-modal-header">${device['device_name']} Settings <small><span class="device_id">(Device ID: ${device['id']}${' - ' + device['friendly_name'] if device['friendly_name'] else ''})</span></small></h4>
|
||||
</div>
|
||||
<div class="modal-body">
|
||||
<div class="container-fluid">
|
||||
|
|
|
@ -13,7 +13,7 @@
|
|||
<div class="modal-content">
|
||||
<div class="modal-header">
|
||||
<button type="button" class="close" data-dismiss="modal" aria-hidden="true"><i class="fa fa-remove"></i></button>
|
||||
<h4 class="modal-title" id="newsletter-config-modal-header">${newsletter['agent_label']} Newsletter Settings <small><span class="newsletter_id">(Newsletter ID: ${newsletter['id']})</span></small></h4>
|
||||
<h4 class="modal-title" id="newsletter-config-modal-header">${newsletter['agent_label']} Newsletter Settings <small><span class="newsletter_id">(Newsletter ID: ${newsletter['id']}${' - ' + newsletter['friendly_name'] if newsletter['friendly_name'] else ''})</span></small></h4>
|
||||
</div>
|
||||
<div class="modal-body">
|
||||
<div class="container-fluid">
|
||||
|
@ -32,7 +32,7 @@
|
|||
<div class="col-md-12">
|
||||
<div class="checkbox" style="margin-bottom: 20px;">
|
||||
<label>
|
||||
<input type="checkbox" data-id="active_value" class="checkboxes" value="1" ${checked(newsletter['active'])}> Enable the Newsletter
|
||||
<input type="checkbox" data-id="active_value" class="checkboxes" value="1" autocomplete="off" ${checked(newsletter['active'])}> Enable the Newsletter
|
||||
</label>
|
||||
<input type="hidden" id="active_value" name="active" value="${newsletter['active']}">
|
||||
</div>
|
||||
|
@ -40,17 +40,20 @@
|
|||
<label for="custom_cron">Schedule</label>
|
||||
<div class="row">
|
||||
<div class="col-md-12">
|
||||
<select class="form-control" id="custom_cron" name="newsletter_config_custom_cron">
|
||||
<select class="form-control" id="custom_cron" name="newsletter_config_custom_cron" autocomplete="off">
|
||||
<option value="0" ${'selected' if newsletter['config']['custom_cron'] == 0 else ''}>Simple</option>
|
||||
<option value="1" ${'selected' if newsletter['config']['custom_cron'] == 1 else ''}>Custom</option>
|
||||
</select>
|
||||
<input type="text" id="cron_value" name="cron" value="${newsletter['cron']}" />
|
||||
<input type="text" id="cron_value" name="cron" value="${newsletter['cron']}" autocomplete="off" />
|
||||
<div id="cron-widget"></div>
|
||||
</div>
|
||||
</div>
|
||||
<p class="help-block">
|
||||
<span id="simple_cron_message">Set the schedule for the newsletter.</span>
|
||||
<span id="custom_cron_message">Set the schedule for the newsletter using a <a href="${anon_url('https://crontab.guru')}" target="_blank" rel="noreferrer">custom crontab</a>. Only standard cron values are valid.</span>
|
||||
<span id="custom_cron_message">
|
||||
Set the schedule for the newsletter using a <a href="${anon_url('https://crontab.guru')}" target="_blank" rel="noreferrer">custom crontab</a>.
|
||||
<a href="${anon_url('https://apscheduler.readthedocs.io/en/3.x/modules/triggers/cron.html#expression-types')}" target="_blank" rel="noreferrer">Click here</a> for a list of supported expressions.
|
||||
</span>
|
||||
</p>
|
||||
</div>
|
||||
<div class="form-group">
|
||||
|
@ -59,8 +62,8 @@
|
|||
<div class="col-md-5">
|
||||
<div class="input-group newsletter-time_frame">
|
||||
<span class="input-group-addon form-control btn-dark inactive">Last</span>
|
||||
<input type="number" class="form-control" id="newsletter_config_time_frame" name="newsletter_config_time_frame" value="${newsletter['config']['time_frame']}">
|
||||
<select class="form-control" id="newsletter_config_time_frame_units" name="newsletter_config_time_frame_units">
|
||||
<input type="number" class="form-control" id="newsletter_config_time_frame" name="newsletter_config_time_frame" value="${newsletter['config']['time_frame']}" autocomplete="off">
|
||||
<select class="form-control" id="newsletter_config_time_frame_units" name="newsletter_config_time_frame_units" autocomplete="off">
|
||||
<option value="months" ${'selected' if newsletter['config']['time_frame_units'] == 'months' else ''}>months</option>
|
||||
<option value="days" ${'selected' if newsletter['config']['time_frame_units'] == 'days' else ''}>days</option>
|
||||
<option value="hours" ${'selected' if newsletter['config']['time_frame_units'] == 'hours' else ''}>hours</option>
|
||||
|
@ -85,7 +88,7 @@
|
|||
<label for="${item['name']}">${item['label']}</label>
|
||||
<div class="row">
|
||||
<div class="col-md-12">
|
||||
<input type="${item['input_type']}" class="form-control" id="${item['name']}" name="${item['name']}" value="${item['value']}" size="30" ${'readonly' if item.get('readonly') else ''}>
|
||||
<input type="${item['input_type']}" class="form-control" id="${item['name']}" name="${item['name']}" value="${item['value']}" size="30" autocomplete="off" ${'readonly' if item.get('readonly') else ''}>
|
||||
</div>
|
||||
</div>
|
||||
<p class="help-block">${item['description'] | n}</p>
|
||||
|
@ -95,7 +98,7 @@
|
|||
<label for="${item['name']}">${item['label']}</label>
|
||||
<div class="row">
|
||||
<div class="col-md-3">
|
||||
<input type="${item['input_type']}" class="form-control" id="${item['name']}" name="${item['name']}" value="${item['value']}" size="30">
|
||||
<input type="${item['input_type']}" class="form-control" id="${item['name']}" name="${item['name']}" value="${item['value']}" size="30" autocomplete="off">
|
||||
</div>
|
||||
</div>
|
||||
<p class="help-block">${item['description'] | n}</p>
|
||||
|
@ -113,7 +116,7 @@
|
|||
% elif item['input_type'] == 'checkbox':
|
||||
<div class="checkbox">
|
||||
<label>
|
||||
<input type="checkbox" data-id="${item['name']}" class="checkboxes" value="1" ${checked(item['value'])}> ${item['label']}
|
||||
<input type="checkbox" data-id="${item['name']}" class="checkboxes" value="1" autocomplete="off" ${checked(item['value'])}> ${item['label']}
|
||||
</label>
|
||||
<p class="help-block">${item['description'] | n}</p>
|
||||
<input type="hidden" id="${item['name']}" name="${item['name']}" value="${item['value']}">
|
||||
|
@ -123,7 +126,7 @@
|
|||
<label for="${item['name']}">${item['label']}</label>
|
||||
<div class="row">
|
||||
<div class="col-md-12">
|
||||
<select class="form-control" id="${item['name']}" name="${item['name']}">
|
||||
<select class="form-control" id="${item['name']}" name="${item['name']}" autocomplete="off">
|
||||
% for key, value in sorted(item['select_options'].items()):
|
||||
% if key == item['value']:
|
||||
<option value="${key}" selected>${value}</option>
|
||||
|
@ -141,7 +144,7 @@
|
|||
<label for="${item['name']}">${item['label']}</label>
|
||||
<div class="row">
|
||||
<div class="col-md-12">
|
||||
<select class="form-control" id="${item['name']}" name="${item['name']}">
|
||||
<select class="form-control" id="${item['name']}" name="${item['name']}" autocomplete="off">
|
||||
% if item['select_all']:
|
||||
<option value="select-all">Select All</option>
|
||||
<option value="remove-all">Remove All</option>
|
||||
|
@ -175,7 +178,7 @@
|
|||
<label for="id_name">Unique ID Name</label>
|
||||
<div class="row">
|
||||
<div class="col-md-12">
|
||||
<input type="text" class="form-control" id="id_name" name="id_name" value="${newsletter['id_name']}" size="30">
|
||||
<input type="text" class="form-control" id="id_name" name="id_name" value="${newsletter['id_name']}" size="30" autocomplete="off">
|
||||
</div>
|
||||
</div>
|
||||
<p class="help-block">
|
||||
|
@ -188,7 +191,7 @@
|
|||
<label for="friendly_name">Description</label>
|
||||
<div class="row">
|
||||
<div class="col-md-12">
|
||||
<input type="text" class="form-control" id="friendly_name" name="friendly_name" value="${newsletter['friendly_name']}" size="30">
|
||||
<input type="text" class="form-control" id="friendly_name" name="friendly_name" value="${newsletter['friendly_name']}" size="30" autocomplete="off">
|
||||
</div>
|
||||
</div>
|
||||
<p class="help-block">Optional: Enter a description to help identify this newsletter in the newsletters list.</p>
|
||||
|
@ -202,7 +205,7 @@
|
|||
<label>Saving</label>
|
||||
<div class="checkbox">
|
||||
<label>
|
||||
<input type="checkbox" id="newsletter_config_save_only_checkbox" data-id="newsletter_config_save_only" class="checkboxes" value="1" ${checked(newsletter['config']['save_only'])}> Save HTML File Only
|
||||
<input type="checkbox" id="newsletter_config_save_only_checkbox" data-id="newsletter_config_save_only" class="checkboxes" value="1" autocomplete="off" ${checked(newsletter['config']['save_only'])}> Save HTML File Only
|
||||
</label>
|
||||
<p class="help-block">Enable to save the newsletter HTML file without sending it to any notification agent.</p>
|
||||
<input type="hidden" id="newsletter_config_save_only" name="newsletter_config_save_only" value="${newsletter['config']['save_only']}">
|
||||
|
@ -211,7 +214,7 @@
|
|||
<label for="newsletter_config_filename">HTML File Name</label>
|
||||
<div class="row">
|
||||
<div class="col-md-12">
|
||||
<input type="text" class="form-control" id="newsletter_config_filename" name="newsletter_config_filename" value="${newsletter['config']['filename']}" size="30">
|
||||
<input type="text" class="form-control" id="newsletter_config_filename" name="newsletter_config_filename" value="${newsletter['config']['filename']}" size="30" autocomplete="off">
|
||||
</div>
|
||||
</div>
|
||||
<p class="help-block">Optional: Enter the file name to use when saving the newsletter (ending with <span class="inline-pre">.html</span>). You may use any of the <a href="#newsletter-text-sub-modal" data-toggle="modal">newsletter text parameters</a>. Leave blank for default.</p>
|
||||
|
@ -221,7 +224,7 @@
|
|||
<label>Sending</label>
|
||||
<div class="checkbox">
|
||||
<label>
|
||||
<input type="checkbox" id="newsletter_config_formatted_checkbox" data-id="newsletter_config_formatted" class="checkboxes" value="1" ${checked(newsletter['config']['formatted'])}> Send Newsletter as an HTML Formatted Email
|
||||
<input type="checkbox" id="newsletter_config_formatted_checkbox" data-id="newsletter_config_formatted" class="checkboxes" value="1" autocomplete="off" ${checked(newsletter['config']['formatted'])}> Send Newsletter as an HTML Formatted Email
|
||||
</label>
|
||||
<p class="help-block">Enable to send the newsletter as an HTML formatted Email. Disable to only send a subject and body message to a different notification agent.</p>
|
||||
<input type="hidden" id="newsletter_config_formatted" name="newsletter_config_formatted" value="${newsletter['config']['formatted']}">
|
||||
|
@ -229,7 +232,7 @@
|
|||
<div class="form-group" id="email_notifier_select">
|
||||
<div class="checkbox">
|
||||
<label>
|
||||
<input type="checkbox" id="newsletter_config_threaded_checkbox" data-id="newsletter_config_threaded" class="checkboxes" value="1" ${checked(newsletter['config']['threaded'])}> Enable Grouped Email Thread
|
||||
<input type="checkbox" id="newsletter_config_threaded_checkbox" data-id="newsletter_config_threaded" class="checkboxes" value="1" autocomplete="off" ${checked(newsletter['config']['threaded'])}> Enable Grouped Email Thread
|
||||
</label>
|
||||
<p class="help-block">Enable to group this newsletter together in a single Email thread. Disable to send a new Email for each newsletter.</p>
|
||||
<input type="hidden" id="newsletter_config_threaded" name="newsletter_config_threaded" value="${newsletter['config']['threaded']}">
|
||||
|
@ -237,7 +240,7 @@
|
|||
<label for="newsletter_email_notifier_id">Email Notification Agent</label>
|
||||
<div class="row">
|
||||
<div class="col-md-12">
|
||||
<select class="form-control" id="newsletter_email_notifier_id" name="newsletter_email_notifier_id">
|
||||
<select class="form-control" id="newsletter_email_notifier_id" name="newsletter_email_notifier_id" autocomplete="off">
|
||||
% for notifier in email_notifiers:
|
||||
<% selected = 'selected' if notifier['id'] == newsletter['email_config']['notifier_id'] else '' %>
|
||||
% if notifier['friendly_name']:
|
||||
|
@ -260,7 +263,7 @@
|
|||
<label for="newsletter_config_notifier_id">Notification Agent</label>
|
||||
<div class="row">
|
||||
<div class="col-md-12">
|
||||
<select class="form-control" id="newsletter_config_notifier_id" name="newsletter_config_notifier_id">
|
||||
<select class="form-control" id="newsletter_config_notifier_id" name="newsletter_config_notifier_id" autocomplete="off">
|
||||
% for notifier in other_notifiers:
|
||||
<% selected = 'selected' if notifier['id'] == newsletter['config']['notifier_id'] else '' %>
|
||||
% if notifier['friendly_name']:
|
||||
|
@ -291,7 +294,7 @@
|
|||
<label for="${item['name']}">${item['label']}</label>
|
||||
<div class="row">
|
||||
<div class="col-md-12">
|
||||
<input type="${item['input_type']}" class="form-control" id="${item['name']}" name="${item['name']}" value="${item['value']}" size="30" ${'readonly' if item.get('readonly') else ''}>
|
||||
<input type="${item['input_type']}" class="form-control" id="${item['name']}" name="${item['name']}" value="${item['value']}" size="30" autocomplete="off" ${'readonly' if item.get('readonly') else ''}>
|
||||
</div>
|
||||
</div>
|
||||
<p class="help-block">${item['description'] | n}</p>
|
||||
|
@ -301,7 +304,7 @@
|
|||
<label for="${item['name']}">${item['label']}</label>
|
||||
<div class="row">
|
||||
<div class="col-md-3">
|
||||
<input type="${item['input_type']}" class="form-control" id="${item['name']}" name="${item['name']}" value="${item['value']}" size="30">
|
||||
<input type="${item['input_type']}" class="form-control" id="${item['name']}" name="${item['name']}" value="${item['value']}" size="30" autocomplete="off">
|
||||
</div>
|
||||
</div>
|
||||
<p class="help-block">${item['description'] | n}</p>
|
||||
|
@ -319,7 +322,7 @@
|
|||
% elif item['input_type'] == 'checkbox' and item['name'] != 'newsletter_email_html_support':
|
||||
<div class="checkbox">
|
||||
<label>
|
||||
<input type="checkbox" data-id="${item['name']}" class="checkboxes" value="1" ${checked(item['value'])}> ${item['label']}
|
||||
<input type="checkbox" data-id="${item['name']}" class="checkboxes" value="1" autocomplete="off" ${checked(item['value'])}> ${item['label']}
|
||||
</label>
|
||||
<p class="help-block">${item['description'] | n}</p>
|
||||
<input type="hidden" id="${item['name']}" name="${item['name']}" value="${item['value']}">
|
||||
|
@ -329,7 +332,7 @@
|
|||
<label for="${item['name']}">${item['label']}</label>
|
||||
<div class="row">
|
||||
<div class="col-md-12">
|
||||
<select class="form-control" id="${item['name']}" name="${item['name']}">
|
||||
<select class="form-control" id="${item['name']}" name="${item['name']}" autocomplete="off">
|
||||
% for key, value in sorted(item['select_options'].items()):
|
||||
% if isinstance(value, list):
|
||||
<optgroup label="${key}">
|
||||
|
@ -351,7 +354,7 @@
|
|||
<label for="${item['name']}">${item['label']}</label>
|
||||
<div class="row">
|
||||
<div class="col-md-12">
|
||||
<select class="form-control" id="${item['name']}" name="${item['name']}">
|
||||
<select class="form-control" id="${item['name']}" name="${item['name']}" autocomplete="off">
|
||||
<option value="select-all">Select All</option>
|
||||
<option value="remove-all">Remove All</option>
|
||||
% if isinstance(item['select_options'], dict):
|
||||
|
@ -396,7 +399,7 @@
|
|||
<label for="subject">Subject</label>
|
||||
<div class="row">
|
||||
<div class="col-md-12">
|
||||
<input type="text" class="form-control" id="subject" name="subject" value="${newsletter['subject']}" size="30">
|
||||
<input type="text" class="form-control" id="subject" name="subject" value="${newsletter['subject']}" size="30" autocomplete="off">
|
||||
</div>
|
||||
</div>
|
||||
<p class="help-block">
|
||||
|
@ -407,7 +410,7 @@
|
|||
<label for="body">Body</label>
|
||||
<div class="row">
|
||||
<div class="col-md-12">
|
||||
<textarea class="form-control" id="body" name="body" data-autoresize>${newsletter['body']}</textarea>
|
||||
<textarea class="form-control" id="body" name="body" autocomplete="off" data-autoresize>${newsletter['body']}</textarea>
|
||||
</div>
|
||||
</div>
|
||||
<p class="help-block">
|
||||
|
@ -418,7 +421,7 @@
|
|||
<label for="message">Message</label>
|
||||
<div class="row">
|
||||
<div class="col-md-12">
|
||||
<textarea class="form-control" id="message" name="message" data-autoresize>${newsletter['message']}</textarea>
|
||||
<textarea class="form-control" id="message" name="message" autocomplete="off" data-autoresize>${newsletter['message']}</textarea>
|
||||
</div>
|
||||
</div>
|
||||
<p class="help-block">
|
||||
|
@ -481,7 +484,7 @@
|
|||
});
|
||||
|
||||
if (${newsletter['config']['custom_cron']}) {
|
||||
$('#cron_value').val('${newsletter['cron']}');
|
||||
$('#cron_value').val('${newsletter['cron'] | n}');
|
||||
} else {
|
||||
try {
|
||||
cron_widget.cron('value', '${newsletter['cron']}');
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
<%
|
||||
from six.moves.urllib.parse import urlencode
|
||||
from urllib.parse import urlencode
|
||||
%>
|
||||
<!doctype html>
|
||||
|
||||
|
|
|
@ -12,7 +12,7 @@
|
|||
<div class="modal-content">
|
||||
<div class="modal-header">
|
||||
<button type="button" class="close" data-dismiss="modal" aria-hidden="true"><i class="fa fa-remove"></i></button>
|
||||
<h4 class="modal-title" id="notifier-config-modal-header">${notifier['agent_label']} Settings <small><span class="notifier_id">(Notifier ID: ${notifier['id']})</span></small></h4>
|
||||
<h4 class="modal-title" id="notifier-config-modal-header">${notifier['agent_label']} Settings <small><span class="notifier_id">(Notifier ID: ${notifier['id']}${' - ' + notifier['friendly_name'] if notifier['friendly_name'] else ''})</span></small></h4>
|
||||
</div>
|
||||
<div class="modal-body">
|
||||
<div class="container-fluid">
|
||||
|
@ -51,13 +51,13 @@
|
|||
<div class="col-md-12">
|
||||
% if notifier['agent_name'] == 'scripts' and item['name'] == 'scripts_script_folder':
|
||||
<div class="input-group">
|
||||
<input type="${item['input_type']}" class="form-control" id="${item['name']}" name="${item['name']}" value="${item['value']}" size="30" ${'readonly' if item.get('readonly') else ''}>
|
||||
<input type="${item['input_type']}" class="form-control" id="${item['name']}" name="${item['name']}" value="${item['value']}" size="30" autocomplete="off" ${'readonly' if item.get('readonly') else ''}>
|
||||
<span class="input-group-btn">
|
||||
<button class="btn btn-form" type="button" id="${item['name']}_browse" data-toggle="browse" data-filter=".folderonly" data-target="#${item['name']}">Browse</button>
|
||||
</span>
|
||||
</div>
|
||||
% else:
|
||||
<input type="${item['input_type']}" class="form-control" id="${item['name']}" name="${item['name']}" value="${item['value']}" size="30" ${'readonly' if item.get('readonly') else ''}>
|
||||
<input type="${item['input_type']}" class="form-control" id="${item['name']}" name="${item['name']}" value="${item['value']}" size="30" autocomplete="off" ${'readonly' if item.get('readonly') else ''}>
|
||||
% endif
|
||||
</div>
|
||||
</div>
|
||||
|
@ -72,7 +72,7 @@
|
|||
<span class="input-group-btn">
|
||||
<button class="btn btn-form reveal-token" type="button"><i class="fa fa-eye-slash"></i></button>
|
||||
</span>
|
||||
<input type="password" class="form-control" id="${item['name']}" name="${item['name']}" value="${item['value']}" size="30" ${'readonly' if item.get('readonly') else ''}>
|
||||
<input type="password" class="form-control" id="${item['name']}" name="${item['name']}" value="${item['value']}" size="30" autocomplete="off" ${'readonly' if item.get('readonly') else ''}>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
@ -83,7 +83,7 @@
|
|||
<label for="${item['name']}">${item['label']}</label>
|
||||
<div class="row">
|
||||
<div class="col-md-3">
|
||||
<input type="${item['input_type']}" class="form-control" id="${item['name']}" name="${item['name']}" value="${item['value']}" size="30">
|
||||
<input type="${item['input_type']}" class="form-control" id="${item['name']}" name="${item['name']}" value="${item['value']}" size="30" autocomplete="off">
|
||||
</div>
|
||||
</div>
|
||||
<p class="help-block">${item['description'] | n}</p>
|
||||
|
@ -101,7 +101,7 @@
|
|||
% elif item['input_type'] == 'checkbox':
|
||||
<div class="checkbox">
|
||||
<label>
|
||||
<input type="checkbox" data-id="${item['name']}" class="checkboxes" value="1" ${checked(item['value'])}> ${item['label']}
|
||||
<input type="checkbox" data-id="${item['name']}" class="checkboxes" value="1" autocomplete="off" ${checked(item['value'])}> ${item['label']}
|
||||
</label>
|
||||
<p class="help-block">${item['description'] | n}</p>
|
||||
<input type="hidden" id="${item['name']}" name="${item['name']}" value="${item['value']}">
|
||||
|
@ -111,7 +111,7 @@
|
|||
<label for="${item['name']}">${item['label']}</label>
|
||||
<div class="row">
|
||||
<div class="col-md-12">
|
||||
<select class="form-control" id="${item['name']}" name="${item['name']}">
|
||||
<select class="form-control" id="${item['name']}" name="${item['name']}" autocomplete="off">
|
||||
% for key, value in sorted(item['select_options'].items()):
|
||||
% if isinstance(value, list):
|
||||
<optgroup label="${key}">
|
||||
|
@ -133,7 +133,7 @@
|
|||
<label for="${item['name']}">${item['label']}</label>
|
||||
<div class="row">
|
||||
<div class="col-md-12">
|
||||
<select class="form-control" id="${item['name']}" name="${item['name']}">
|
||||
<select class="form-control" id="${item['name']}" name="${item['name']}" autocomplete="off">
|
||||
% if item['select_all']:
|
||||
<option value="select-all">Select All</option>
|
||||
<option value="remove-all">Remove All</option>
|
||||
|
@ -167,7 +167,7 @@
|
|||
<label for="friendly_name">Description</label>
|
||||
<div class="row">
|
||||
<div class="col-md-12">
|
||||
<input type="text" class="form-control" id="friendly_name" name="friendly_name" value="${notifier['friendly_name']}" size="30">
|
||||
<input type="text" class="form-control" id="friendly_name" name="friendly_name" value="${notifier['friendly_name']}" size="30" autocomplete="off">
|
||||
</div>
|
||||
</div>
|
||||
<p class="help-block">Optional: Enter a description to help identify this agent in the notification agents list.</p>
|
||||
|
@ -185,7 +185,7 @@
|
|||
% for action in available_notification_actions:
|
||||
<div class="checkbox">
|
||||
<label>
|
||||
<input type="checkbox" data-id="${action['name']}" class="checkboxes" value="1" ${checked(notifier['actions'][action['name']])}> ${action['label']}
|
||||
<input type="checkbox" data-id="${action['name']}" class="checkboxes" value="1" autocomplete="off" ${checked(notifier['actions'][action['name']])}> ${action['label']}
|
||||
</label>
|
||||
<p class="help-block">${action['description'] | n}</p>
|
||||
<input type="hidden" id="${action['name']}" name="${action['name']}" value="${notifier['actions'][action['name']]}">
|
||||
|
@ -208,7 +208,7 @@
|
|||
|
||||
<div class="form-group">
|
||||
<label for="custom_conditions_logic">Condition Logic</label>
|
||||
<input type="text" class="form-control" name="custom_conditions_logic" id="custom_conditions_logic" value="${notifier['custom_conditions_logic']}" />
|
||||
<input type="text" class="form-control" name="custom_conditions_logic" id="custom_conditions_logic" value="${notifier['custom_conditions_logic']}" autocomplete="off" />
|
||||
<div id="custom_conditions_logic_error" class="alert alert-danger" role="alert" style="padding-top: 5px; padding-bottom: 5px; margin: 0; display: none;"><i class="fa fa-exclamation-triangle" style="color: #a94442;"></i> <span></span></div>
|
||||
<p class="help-block">
|
||||
Optional: Enter custom logic to use when evaluating the conditions (e.g. <span class="inline-pre">{1} and ({2} or {3})</span>).
|
||||
|
@ -254,7 +254,7 @@
|
|||
<li>
|
||||
<div class="form-group">
|
||||
<label for="${action['name']}_subject">Script Arguments</label>
|
||||
<input class="form-control" type="text" id="${action['name']}_subject" name="${action['name']}_subject" value="${notifier['notify_text'][action['name']]['subject']}" data-parsley-trigger="change" required>
|
||||
<input class="form-control" type="text" id="${action['name']}_subject" name="${action['name']}_subject" value="${notifier['notify_text'][action['name']]['subject']}" autocomplete="off" data-parsley-trigger="change" required>
|
||||
<p class="help-block">Set custom arguments passed to the script.</p>
|
||||
</div>
|
||||
<div class="form-group">
|
||||
|
@ -280,12 +280,12 @@
|
|||
<li>
|
||||
<div class="form-group">
|
||||
<label for="${action['name']}_subject">JSON Headers</label>
|
||||
<textarea class="form-control" id="${action['name']}_subject" name="${action['name']}_subject" data-parsley-trigger="change" data-autoresize required>${notifier['notify_text'][action['name']]['subject']}</textarea>
|
||||
<textarea class="form-control" id="${action['name']}_subject" name="${action['name']}_subject" data-parsley-trigger="change" autocomplete="off" data-autoresize required>${notifier['notify_text'][action['name']]['subject']}</textarea>
|
||||
<p class="help-block">Set custom JSON headers.</p>
|
||||
</div>
|
||||
<div class="form-group">
|
||||
<label for="${action['name']}_body">JSON Data</label>
|
||||
<textarea class="form-control" id="${action['name']}_body" name="${action['name']}_body" data-parsley-trigger="change" data-autoresize required>${notifier['notify_text'][action['name']]['body']}</textarea>
|
||||
<textarea class="form-control" id="${action['name']}_body" name="${action['name']}_body" data-parsley-trigger="change" autocomplete="off" data-autoresize required>${notifier['notify_text'][action['name']]['body']}</textarea>
|
||||
<p class="help-block">Set custom JSON data.</p>
|
||||
</div>
|
||||
<div class="form-group">
|
||||
|
@ -311,12 +311,12 @@
|
|||
<li>
|
||||
<div class="form-group">
|
||||
<label for="${action['name']}_subject">Subject Line</label>
|
||||
<input class="form-control" type="text" id="${action['name']}_subject" name="${action['name']}_subject" value="${notifier['notify_text'][action['name']]['subject']}" data-parsley-trigger="change" required>
|
||||
<input class="form-control" type="text" id="${action['name']}_subject" name="${action['name']}_subject" value="${notifier['notify_text'][action['name']]['subject']}" autocomplete="off" data-parsley-trigger="change" required>
|
||||
<p class="help-block">Set a custom subject line.</p>
|
||||
</div>
|
||||
<div class="form-group">
|
||||
<label for="${action['name']}_body">Message Body</label>
|
||||
<textarea class="form-control" id="${action['name']}_body" name="${action['name']}_body" data-parsley-trigger="change" data-autoresize required>${notifier['notify_text'][action['name']]['body']}</textarea>
|
||||
<textarea class="form-control" id="${action['name']}_body" name="${action['name']}_body" data-parsley-trigger="change" autocomplete="off" data-autoresize required>${notifier['notify_text'][action['name']]['body']}</textarea>
|
||||
<p class="help-block">Set a custom body.</p>
|
||||
</div>
|
||||
<div class="form-group">
|
||||
|
@ -347,7 +347,7 @@
|
|||
<label for="test_script">Script</label>
|
||||
<div class="row">
|
||||
<div class="col-md-12">
|
||||
<select class="form-control" id="test_script" name="test_script">
|
||||
<select class="form-control" id="test_script" name="test_script" autocomplete="off">
|
||||
% for key, value in sorted(notifier['config_options'][2]['select_options'].items()):
|
||||
<option value="${key}">${value}</option>
|
||||
% endfor
|
||||
|
@ -360,7 +360,7 @@
|
|||
<label for="test_script_args">Script Arguments</label>
|
||||
<div class="row">
|
||||
<div class="col-md-12">
|
||||
<input class="form-control" type="text" id="test_script_args" name="test_script_args" value="">
|
||||
<input class="form-control" type="text" id="test_script_args" name="test_script_args" value="" autocomplete="off">
|
||||
</div>
|
||||
</div>
|
||||
<p class="help-block">Set custom arguments passed to the script.</p>
|
||||
|
@ -370,7 +370,7 @@
|
|||
<label for="test_subject">JSON Headers</label>
|
||||
<div class="row">
|
||||
<div class="col-md-12">
|
||||
<textarea class="form-control" id="test_subject" name="test_subject" data-autoresize></textarea>
|
||||
<textarea class="form-control" id="test_subject" name="test_subject" autocomplete="off" data-autoresize></textarea>
|
||||
</div>
|
||||
</div>
|
||||
<p class="help-block">Set custom JSON headers sent to the webhook.</p>
|
||||
|
@ -379,7 +379,7 @@
|
|||
<label for="test_body">JSON Data</label>
|
||||
<div class="row">
|
||||
<div class="col-md-12">
|
||||
<textarea class="form-control" id="test_body" name="test_body" data-autoresize></textarea>
|
||||
<textarea class="form-control" id="test_body" name="test_body" autocomplete="off" data-autoresize></textarea>
|
||||
</div>
|
||||
</div>
|
||||
<p class="help-block">Set custom JSON data sent to the webhook.</p>
|
||||
|
@ -389,7 +389,7 @@
|
|||
<label for="test_subject">Subject Line</label>
|
||||
<div class="row">
|
||||
<div class="col-md-12">
|
||||
<input class="form-control" type="text" id="test_subject" name="test_subject" value="Tautulli">
|
||||
<input class="form-control" type="text" id="test_subject" name="test_subject" value="Tautulli" autocomplete="off">
|
||||
</div>
|
||||
</div>
|
||||
<p class="help-block">Set a custom subject line.</p>
|
||||
|
@ -398,7 +398,7 @@
|
|||
<label for="test_body">Message Body</label>
|
||||
<div class="row">
|
||||
<div class="col-md-12">
|
||||
<textarea class="form-control" id="test_body" name="test_body" data-autoresize>Test Notification</textarea>
|
||||
<textarea class="form-control" id="test_body" name="test_body" autocomplete="off" data-autoresize>Test Notification</textarea>
|
||||
</div>
|
||||
</div>
|
||||
<p class="help-block">Set a custom body.</p>
|
||||
|
|
|
@ -36,7 +36,7 @@ DOCUMENTATION :: END
|
|||
%>
|
||||
% if data:
|
||||
<div class="dashboard-recent-media-row">
|
||||
<div id="recently-added-row-scroller" style="left: 0;">
|
||||
<div id="recently-added-row-scroller">
|
||||
<ul class="dashboard-recent-media list-unstyled">
|
||||
% for item in data:
|
||||
<div class="dashboard-recent-media-instance">
|
||||
|
|
|
@ -13,8 +13,6 @@ DOCUMENTATION :: END
|
|||
import datetime
|
||||
import plexpy
|
||||
from plexpy import common, helpers
|
||||
|
||||
scheduled_jobs = [j.id for j in plexpy.SCHED.get_jobs()]
|
||||
%>
|
||||
|
||||
<table class="config-scheduler-table small-muted">
|
||||
|
@ -29,16 +27,15 @@ DOCUMENTATION :: END
|
|||
</thead>
|
||||
<tbody>
|
||||
% for job, job_type in common.SCHEDULER_LIST.items():
|
||||
% if job in scheduled_jobs:
|
||||
<%
|
||||
sched_job = plexpy.SCHED.get_job(job)
|
||||
now = datetime.datetime.now(sched_job.next_run_time.tzinfo)
|
||||
%>
|
||||
% if sched_job:
|
||||
<tr>
|
||||
<td>${sched_job.id}</td>
|
||||
<td><i class="fa fa-sm fa-fw fa-check"></i> Active</td>
|
||||
<td>${helpers.format_timedelta_Hms(sched_job.trigger.interval)}</td>
|
||||
<td>${helpers.format_timedelta_Hms(sched_job.next_run_time - now)}</td>
|
||||
<td>${helpers.format_timedelta_Hms(sched_job.next_run_time - datetime.datetime.now(sched_job.next_run_time.tzinfo))}</td>
|
||||
<td>${sched_job.next_run_time.astimezone(plexpy.SYS_TIMEZONE).strftime('%Y-%m-%d %H:%M:%S')}</td>
|
||||
</tr>
|
||||
% elif job_type == 'websocket' and plexpy.WS_CONNECTED:
|
||||
|
|
|
@ -676,27 +676,6 @@
|
|||
</div>
|
||||
</div>
|
||||
|
||||
<div class="checkbox advanced-setting">
|
||||
<label>
|
||||
<input type="checkbox" name="anon_redirect_dynamic" id="anon_redirect_dynamic" value="1" ${config['anon_redirect_dynamic']} /> Use Dynamic Anonymous Redirect Service
|
||||
</label>
|
||||
<p class="help-block">
|
||||
Allow Tautulli to use the dynamic anonymous redirect service listed <a href="${anon_url('https://tautulli.com/anonymizer.txt')}" target="_blank" rel="noreferrer">here</a>.
|
||||
Disable to specify your own service.
|
||||
</p>
|
||||
</div>
|
||||
<div id="anon_redirect_options">
|
||||
<div class="form-group advanced-setting">
|
||||
<label for="anon_redirect">Anonymous Redirect Service</label>
|
||||
<div class="row">
|
||||
<div class="col-md-4">
|
||||
<input type="text" class="form-control" id="anon_redirect" name="anon_redirect" value="${config['anon_redirect']}" size="30">
|
||||
</div>
|
||||
</div>
|
||||
<p class="help-block">Backlink protection via anonymizer service, must end in "?". Leave blank to disable.</p>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div class="padded-header">
|
||||
<h3>Authentication</h3>
|
||||
</div>
|
||||
|
@ -788,7 +767,6 @@
|
|||
data-identifier="${config['pms_identifier']}"
|
||||
data-ip="${config['pms_ip']}"
|
||||
data-port="${config['pms_port']}"
|
||||
data-local="${int(not int(config['pms_is_remote']))}"
|
||||
data-ssl="${config['pms_ssl']}"
|
||||
data-is_cloud="${config['pms_is_cloud']}"
|
||||
data-label="${config['pms_name'] or 'Local'}"
|
||||
|
@ -821,13 +799,6 @@
|
|||
</label>
|
||||
<p class="help-block">Connect to your Plex server using HTTPS if you have <a href="${anon_url('https://support.plex.tv/articles/206225077-how-to-use-secure-server-connections')}" target="_blank" rel="noreferrer">secure connections</a> enabled.</p>
|
||||
</div>
|
||||
<div class="checkbox">
|
||||
<label>
|
||||
<input type="checkbox" id="pms_is_remote_checkbox" class="checkbox-toggle pms-settings" data-id="pms_is_remote" value="1" ${checked(config['pms_is_remote'])}> Remote Server
|
||||
<input type="hidden" id="pms_is_remote" name="pms_is_remote" value="${config['pms_is_remote']}">
|
||||
</label>
|
||||
<p class="help-block">Check this if your Plex Server is not on the same local network as Tautulli.</p>
|
||||
</div>
|
||||
<div class="form-group">
|
||||
<label for="pms_url">Plex Server URL</label>
|
||||
<div class="row">
|
||||
|
@ -1923,6 +1894,21 @@
|
|||
<p><strong style="color: #fff;">Example:</strong></p>
|
||||
<pre>{media_type} --> movie
|
||||
{media_type!c} --> Movie</pre>
|
||||
</div>
|
||||
<div>
|
||||
<h4>Time Formats</h4>
|
||||
</div>
|
||||
<div style="padding-bottom: 10px;">
|
||||
<p class="help-block">
|
||||
Notification parameters which are "in date format" or "in time format" can be formatted using the
|
||||
<a href="javascript:void(0)" data-target="#dateTimeOptionsModal" data-toggle="modal">Date & Time Format Options</a>
|
||||
by adding a <span class="inline-pre">:format</span> specifier.
|
||||
If no format is specified, the default Date Format and Time Format under Settings > General will be used.
|
||||
</p>
|
||||
<p><strong style="color: #fff;">Example:</strong></p>
|
||||
<pre>{started_datestamp:ddd, MMMM DD, YYYY} --> Mon, December 25, 2023
|
||||
{stopped_timestamp:h:mm a} --> 9:56 pm
|
||||
{duration_time:HH:mm:ss} --> 01:42:20</pre>
|
||||
</div>
|
||||
<div>
|
||||
<h4>List Slicing</h4>
|
||||
|
@ -1994,7 +1980,7 @@ Rating: {rating}/10 --> Rating: /10
|
|||
<li>Evaluation</li>
|
||||
<li>Parameter</li>
|
||||
<li>Case Modifier</li>
|
||||
<li>List Slicing</li>
|
||||
<li>Time Formats / List Slicing</li>
|
||||
<li>Suffix</li>
|
||||
</ol>
|
||||
<p><strong style="color: #fff;">Example:</strong></p>
|
||||
|
@ -2169,7 +2155,6 @@ Rating: {rating}/10 --> Rating: /10
|
|||
<script src="${http_root}js/parsley.min.js"></script>
|
||||
<script src="${http_root}js/Sortable.min.js"></script>
|
||||
<script src="${http_root}js/jquery.inputaffix.min.js"></script>
|
||||
<script src="${http_root}js/kjua.min.js"></script>
|
||||
<script>
|
||||
function getConfigurationTable() {
|
||||
$.ajax({
|
||||
|
@ -2401,7 +2386,6 @@ $(document).ready(function() {
|
|||
|
||||
initConfigCheckbox('#api_enabled');
|
||||
initConfigCheckbox('#enable_https');
|
||||
initConfigCheckbox('#anon_redirect_dynamic', null, true);
|
||||
initConfigCheckbox('#https_create_cert');
|
||||
initConfigCheckbox('#check_github');
|
||||
initConfigCheckbox('#monitor_pms_updates');
|
||||
|
@ -2604,7 +2588,6 @@ $(document).ready(function() {
|
|||
return '<div data-identifier="' + item.clientIdentifier +
|
||||
'" data-ip="' + item.ip +
|
||||
'" data-port="' + item.port +
|
||||
'" data-local="' + item.local +
|
||||
'" data-ssl="' + item.httpsRequired +
|
||||
'" data-is_cloud="' + item.is_cloud +
|
||||
'" data-label="' + item.label + '">' +
|
||||
|
@ -2618,7 +2601,6 @@ $(document).ready(function() {
|
|||
return '<div data-identifier="' + item.clientIdentifier +
|
||||
'" data-ip="' + item.ip +
|
||||
'" data-port="' + item.port +
|
||||
'" data-local="' + item.local +
|
||||
'" data-ssl="' + item.httpsRequired +
|
||||
'" data-is_cloud="' + item.is_cloud +
|
||||
'" data-label="' + item.label + '">' +
|
||||
|
@ -2641,7 +2623,6 @@ $(document).ready(function() {
|
|||
var identifier = $(pms_ip_selected).data('identifier');
|
||||
var ip = $(pms_ip_selected).data('ip');
|
||||
var port = $(pms_ip_selected).data('port');
|
||||
var local = $(pms_ip_selected).data('local');
|
||||
var ssl = $(pms_ip_selected).data('ssl');
|
||||
var is_cloud = $(pms_ip_selected).data('is_cloud');
|
||||
var value = $(pms_ip_selected).data('value');
|
||||
|
@ -2649,8 +2630,6 @@ $(document).ready(function() {
|
|||
$("#pms_identifier").val(identifier !== 'undefined' ? identifier : '');
|
||||
$('#pms_ip').val(ip !== 'undefined' ? ip : value);
|
||||
$('#pms_port').val(port !== 'undefined' ? port : 32400);
|
||||
$('#pms_is_remote_checkbox').prop('checked', (local !== 'undefined' && local === 0));
|
||||
$('#pms_is_remote').val(local !== 'undefined' && local === 0 ? 1 : 0);
|
||||
$('#pms_ssl_checkbox').prop('checked', (ssl !== 'undefined' && ssl === 1));
|
||||
$('#pms_ssl').val(ssl !== 'undefined' && ssl === 1 ? 1 : 0);
|
||||
$('#pms_is_cloud').val(is_cloud !== 'undefined' && is_cloud === true ? 1 : 0);
|
||||
|
@ -2688,7 +2667,6 @@ $(document).ready(function() {
|
|||
var pms_port = $("#pms_port").val();
|
||||
var pms_identifier = $("#pms_identifier").val();
|
||||
var pms_ssl = $("#pms_ssl").val();
|
||||
var pms_is_remote = $("#pms_is_remote").val();
|
||||
var pms_url_manual = $("#pms_url_manual").is(':checked') ? 1 : 0;
|
||||
|
||||
if (($("#pms_ip").val() !== '') || ($("#pms_port").val() !== '')) {
|
||||
|
@ -2700,7 +2678,6 @@ $(document).ready(function() {
|
|||
hostname: pms_ip,
|
||||
port: pms_port,
|
||||
ssl: pms_ssl,
|
||||
remote: pms_is_remote,
|
||||
manual: pms_url_manual,
|
||||
get_url: true,
|
||||
test_websocket: true
|
||||
|
|
|
@ -68,14 +68,14 @@ DOCUMENTATION :: END
|
|||
<table class="stream-info" style="margin-top: 0;">
|
||||
<thead>
|
||||
<tr>
|
||||
<th>
|
||||
</th>
|
||||
<th class="heading">
|
||||
Stream Details
|
||||
</th>
|
||||
<th></th>
|
||||
<th class="heading">
|
||||
Source Details
|
||||
</th>
|
||||
<th><i class="fa fa-long-arrow-right"></i></th>
|
||||
<th class="heading">
|
||||
Stream Details
|
||||
</th>
|
||||
</tr>
|
||||
</thead>
|
||||
</table>
|
||||
|
@ -85,38 +85,46 @@ DOCUMENTATION :: END
|
|||
<th>
|
||||
Media
|
||||
</th>
|
||||
<th></th>
|
||||
<th></th>
|
||||
<th></th>
|
||||
</tr>
|
||||
</thead>
|
||||
<tbody>
|
||||
<tr>
|
||||
<td>Bitrate</td>
|
||||
<td>${data['stream_bitrate']} ${'kbps' if data['stream_bitrate'] else ''}</td>
|
||||
<td>${data['bitrate']} ${'kbps' if data['bitrate'] else ''}</td>
|
||||
<td><i class="fa fa-long-arrow-right"></i></td>
|
||||
<td>${data['stream_bitrate']} ${'kbps' if data['stream_bitrate'] else ''}</td>
|
||||
</tr>
|
||||
% if data['media_type'] != 'track':
|
||||
<tr>
|
||||
<td>Resolution</td>
|
||||
<td>${data['stream_video_full_resolution']}</td>
|
||||
<td>${data['video_full_resolution']}</td>
|
||||
<td><i class="fa fa-long-arrow-right"></i></td>
|
||||
<td>${data['stream_video_full_resolution']}</td>
|
||||
</tr>
|
||||
% endif
|
||||
<tr>
|
||||
<td>Quality</td>
|
||||
<td>${data['quality_profile']}</td>
|
||||
<td>-</td>
|
||||
<td></td>
|
||||
<td>${data['quality_profile']}</td>
|
||||
</tr>
|
||||
% if data['optimized_version'] == 1:
|
||||
<tr>
|
||||
<td>Optimized Version</td>
|
||||
<td>-</td>
|
||||
<td>${data['optimized_version_profile']}<br>(${data['optimized_version_title']})</td>
|
||||
<td></td>
|
||||
<td>-</td>
|
||||
</tr>
|
||||
% endif
|
||||
% if data['synced_version'] == 1:
|
||||
<tr>
|
||||
<td>Synced Version</td>
|
||||
<td>-</td>
|
||||
<td>${data['synced_version_profile']}</td>
|
||||
<td></td>
|
||||
<td>-</td>
|
||||
</tr>
|
||||
% endif
|
||||
</tbody>
|
||||
|
@ -127,6 +135,8 @@ DOCUMENTATION :: END
|
|||
<th>
|
||||
Container
|
||||
</th>
|
||||
<th></th>
|
||||
<th></th>
|
||||
<th>
|
||||
${data['stream_container_decision']}
|
||||
</th>
|
||||
|
@ -135,8 +145,9 @@ DOCUMENTATION :: END
|
|||
<tbody>
|
||||
<tr>
|
||||
<td>Container</td>
|
||||
<td>${data['stream_container'].upper()}</td>
|
||||
<td>${data['container'].upper()}</td>
|
||||
<td><i class="fa fa-long-arrow-right"></i></td>
|
||||
<td>${data['stream_container'].upper()}</td>
|
||||
</tr>
|
||||
</tbody>
|
||||
</table>
|
||||
|
@ -147,6 +158,8 @@ DOCUMENTATION :: END
|
|||
<th>
|
||||
Video
|
||||
</th>
|
||||
<th></th>
|
||||
<th></th>
|
||||
<th>
|
||||
${data['stream_video_decision']}
|
||||
</th>
|
||||
|
@ -155,38 +168,45 @@ DOCUMENTATION :: END
|
|||
<tbody>
|
||||
<tr>
|
||||
<td>Codec</td>
|
||||
<td>${data['stream_video_codec'].upper()} ${'(HW)' if data['transcode_hw_encoding'] else ''}</td>
|
||||
<td>${data['video_codec'].upper()} ${'(HW)' if data['transcode_hw_decoding'] else ''}</td>
|
||||
<td><i class="fa fa-long-arrow-right"></i></td>
|
||||
<td>${data['stream_video_codec'].upper()} ${'(HW)' if data['transcode_hw_encoding'] else ''}</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>Bitrate</td>
|
||||
<td>${data['stream_video_bitrate']} ${'kbps' if data['stream_video_bitrate'] else ''}</td>
|
||||
<td>${data['video_bitrate']} ${'kbps' if data['video_bitrate'] else ''}</td>
|
||||
<td><i class="fa fa-long-arrow-right"></i></td>
|
||||
<td>${data['stream_video_bitrate']} ${'kbps' if data['stream_video_bitrate'] else ''}</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>Width</td>
|
||||
<td>${data['stream_video_width']}</td>
|
||||
<td>${data['video_width']}</td>
|
||||
<td><i class="fa fa-long-arrow-right"></i></td>
|
||||
<td>${data['stream_video_width']}</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>Height</td>
|
||||
<td>${data['stream_video_height']}</td>
|
||||
<td>${data['video_height']}</td>
|
||||
<td><i class="fa fa-long-arrow-right"></i></td>
|
||||
<td>${data['stream_video_height']}</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>Framerate</td>
|
||||
<td>${data['stream_video_framerate']}</td>
|
||||
<td>${data['video_framerate']}</td>
|
||||
<td><i class="fa fa-long-arrow-right"></i></td>
|
||||
<td>${data['stream_video_framerate']}</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>Dynamic Range</td>
|
||||
<td>${data['stream_video_dynamic_range']}</td>
|
||||
<td>${data['video_dynamic_range']}</td>
|
||||
<td><i class="fa fa-long-arrow-right"></i></td>
|
||||
<td>${data['stream_video_dynamic_range']}</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>Aspect Ratio</td>
|
||||
<td>-</td>
|
||||
<td>${data['aspect_ratio']}</td>
|
||||
<td></td>
|
||||
<td>-</td>
|
||||
</tr>
|
||||
</tbody>
|
||||
</table>
|
||||
|
@ -197,6 +217,8 @@ DOCUMENTATION :: END
|
|||
<th>
|
||||
Audio
|
||||
</th>
|
||||
<th></th>
|
||||
<th></th>
|
||||
<th>
|
||||
${data['stream_audio_decision']}
|
||||
</th>
|
||||
|
@ -205,23 +227,27 @@ DOCUMENTATION :: END
|
|||
<tbody>
|
||||
<tr>
|
||||
<td>Codec</td>
|
||||
<td>${AUDIO_CODEC_OVERRIDES.get(data['stream_audio_codec'], data['stream_audio_codec'].upper())}</td>
|
||||
<td>${AUDIO_CODEC_OVERRIDES.get(data['audio_codec'], data['audio_codec'].upper())}</td>
|
||||
<td><i class="fa fa-long-arrow-right"></i></td>
|
||||
<td>${AUDIO_CODEC_OVERRIDES.get(data['stream_audio_codec'], data['stream_audio_codec'].upper())}</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>Bitrate</td>
|
||||
<td>${data['stream_audio_bitrate']} ${'kbps' if data['stream_audio_bitrate'] else ''}</td>
|
||||
<td>${data['audio_bitrate']} ${'kbps' if data['audio_bitrate'] else ''}</td>
|
||||
<td><i class="fa fa-long-arrow-right"></i></td>
|
||||
<td>${data['stream_audio_bitrate']} ${'kbps' if data['stream_audio_bitrate'] else ''}</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>Channels</td>
|
||||
<td>${data['stream_audio_channels']}</td>
|
||||
<td>${data['audio_channels']}</td>
|
||||
<td><i class="fa fa-long-arrow-right"></i></td>
|
||||
<td>${data['stream_audio_channels']}</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>Language</td>
|
||||
<td>-</td>
|
||||
<td>${data['audio_language'] or 'Unknown'}</td>
|
||||
<td></td>
|
||||
<td>-</td>
|
||||
</tr>
|
||||
|
||||
</tbody>
|
||||
|
@ -233,6 +259,8 @@ DOCUMENTATION :: END
|
|||
<th>
|
||||
Subtitles
|
||||
</th>
|
||||
<th></th>
|
||||
<th></th>
|
||||
<th>
|
||||
${'direct play' if data['stream_subtitle_decision'] not in ('transcode', 'copy', 'burn') else data['stream_subtitle_decision']}
|
||||
</th>
|
||||
|
@ -241,19 +269,22 @@ DOCUMENTATION :: END
|
|||
<tbody>
|
||||
<tr>
|
||||
<td>Codec</td>
|
||||
<td>${data['stream_subtitle_codec'].upper() or '-'}</td>
|
||||
<td>${data['subtitle_codec'].upper()}</td>
|
||||
<td><i class="fa fa-long-arrow-right"></i></td>
|
||||
<td>${data['stream_subtitle_codec'].upper() or '-'}</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>Language</td>
|
||||
<td>-</td>
|
||||
<td>${data['subtitle_language'] or 'Unknown'}</td>
|
||||
<td></td>
|
||||
<td>-</td>
|
||||
</tr>
|
||||
% if data['subtitle_forced']:
|
||||
<tr>
|
||||
<td>Forced</td>
|
||||
<td>-</td>
|
||||
<td>${bool(data['subtitle_forced'])}</td>
|
||||
<td></td>
|
||||
<td>-</td>
|
||||
</tr>
|
||||
% endif
|
||||
</tbody>
|
||||
|
|
|
@ -30,7 +30,7 @@
|
|||
<div class="iframe-container">
|
||||
<iframe class="iframe" allowfullscreen="true" id="support-iframe" data-name="Tautulli-Support" data-src="https://support.tautulli.com"
|
||||
sandbox="allow-presentation allow-forms allow-same-origin allow-pointer-lock allow-scripts allow-popups allow-modals allow-top-navigation"
|
||||
style="display: none;">
|
||||
style="display: none;" rel="noreferrer">
|
||||
</iframe>
|
||||
<div class="iframe-overlay">
|
||||
<div class="iframe-button-container">
|
||||
|
|
|
@ -76,7 +76,6 @@ DOCUMENTATION :: END
|
|||
% if _session['user_group'] == 'admin':
|
||||
<li><a id="nav-tabs-export" href="#tabs-export" role="tab" data-toggle="tab">Export</a></li>
|
||||
% endif
|
||||
<li><a id="nav-tabs-synceditems" href="#tabs-synceditems" role="tab" data-toggle="tab">Synced Items</a></li>
|
||||
<li><a id="nav-tabs-ipaddresses" href="#tabs-ipaddresses" role="tab" data-toggle="tab">IP Addresses</a></li>
|
||||
<li><a id="nav-tabs-tautullilogins" href="#tabs-tautullilogins" role="tab" data-toggle="tab">Tautulli Logins</a></li>
|
||||
</ul>
|
||||
|
@ -126,10 +125,10 @@ DOCUMENTATION :: END
|
|||
<div class="table-card-header">
|
||||
<ul class="nav nav-header nav-dashboard pull-right">
|
||||
<li>
|
||||
<a href="#" id="recently-watched-page-left" class="paginate btn-gray disabled" data-id="+1"><i class="fa fa-lg fa-chevron-left"></i></a>
|
||||
<a href="#" id="recently-watched-page-left" class="paginate-watched btn-gray disabled" data-id="-1"><i class="fa fa-lg fa-chevron-left"></i></a>
|
||||
</li>
|
||||
<li>
|
||||
<a href="#" id="recently-watched-page-right" class="paginate btn-gray" data-id="-1"><i class="fa fa-lg fa-chevron-right"></i></a>
|
||||
<a href="#" id="recently-watched-page-right" class="paginate-watched btn-gray" data-id="+1"><i class="fa fa-lg fa-chevron-right"></i></a>
|
||||
</li>
|
||||
</ul>
|
||||
<div class="header-bar">
|
||||
|
@ -316,57 +315,6 @@ DOCUMENTATION :: END
|
|||
</div>
|
||||
</div>
|
||||
% endif
|
||||
<div role="tabpanel" class="tab-pane" id="tabs-synceditems">
|
||||
<div class="container-fluid">
|
||||
<div class="row">
|
||||
<div class="col-md-12">
|
||||
<div class='table-card-header'>
|
||||
<div class="header-bar">
|
||||
<span>
|
||||
<i class="fa fa-cloud-download"></i> Synced Items for <strong>
|
||||
<span class="set-username">${data['friendly_name']}</span>
|
||||
</strong>
|
||||
</span>
|
||||
</div>
|
||||
<div class="button-bar">
|
||||
% if _session['user_group'] == 'admin':
|
||||
<div class="btn-group">
|
||||
<button class="btn btn-danger btn-edit" data-toggle="button" aria-pressed="false" autocomplete="off" id="sync-row-edit-mode">
|
||||
<i class="fa fa-trash-o"></i> Delete mode
|
||||
</button>
|
||||
</div>
|
||||
% endif
|
||||
<div class="btn-group">
|
||||
<button class="btn btn-dark refresh-syncs-button" id="refresh-syncs-list"><i class="fa fa-refresh"></i> Refresh synced items</button>
|
||||
</div>
|
||||
<div class="btn-group colvis-button-bar" id="button-bar-sync"></div>
|
||||
</div>
|
||||
</div>
|
||||
<div class="table-card-back">
|
||||
<table class="display sync_table" id="sync_table-UID-${data['user_id']}" width="100%">
|
||||
<thead>
|
||||
<tr>
|
||||
<th align="left" id="delete_row">Delete</th>
|
||||
<th align="left" id="state">State</th>
|
||||
<th align="left" id="username">Username</th>
|
||||
<th align="left" id="sync_title">Title</th>
|
||||
<th align="left" id="type">Type</th>
|
||||
<th align="left" id="sync_platform">Platform</th>
|
||||
<th align="left" id="device">Device</th>
|
||||
<th align="left" id="size">Total Size</th>
|
||||
<th align="left" id="items">Total Items</th>
|
||||
<th align="left" id="converted">Converted</th>
|
||||
<th align="left" id="downloaded">Downloaded</th>
|
||||
<th align="left" id="sync_percent_complete">Complete</th>
|
||||
</tr>
|
||||
</thead>
|
||||
<tbody></tbody>
|
||||
</table>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
<div role="tabpanel" class="tab-pane" id="tabs-ipaddresses">
|
||||
<div class="container-fluid">
|
||||
<div class="row">
|
||||
|
@ -642,12 +590,6 @@ DOCUMENTATION :: END
|
|||
clearSearchButton('sync_table-UID-${data["user_id"]}', sync_table);
|
||||
}
|
||||
|
||||
$('#nav-tabs-synceditems').on('shown.bs.tab', function() {
|
||||
if (typeof(sync_table) === 'undefined') {
|
||||
loadSyncTable(user_id);
|
||||
}
|
||||
});
|
||||
|
||||
$("#refresh-syncs-list").click(function() {
|
||||
sync_table.ajax.reload();
|
||||
});
|
||||
|
@ -724,52 +666,14 @@ DOCUMENTATION :: END
|
|||
},
|
||||
complete: function(xhr, status) {
|
||||
$("#user-recently-watched").html(xhr.responseText);
|
||||
highlightWatchedScrollerButton();
|
||||
highlightScrollerButton("#recently-watched");
|
||||
paginateScroller("#recently-watched", ".paginate-watched");
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
recentlyWatched();
|
||||
|
||||
function highlightWatchedScrollerButton() {
|
||||
var scroller = $("#recently-watched-row-scroller");
|
||||
var numElems = scroller.find("li").length;
|
||||
scroller.width(numElems * 175);
|
||||
if (scroller.width() > $("#user-recently-watched").width()) {
|
||||
$("#recently-watched-page-right").removeClass("disabled");
|
||||
} else {
|
||||
$("#recently-watched-page-right").addClass("disabled");
|
||||
}
|
||||
}
|
||||
|
||||
$(window).resize(function() {
|
||||
highlightWatchedScrollerButton();
|
||||
});
|
||||
|
||||
var leftTotal = 0;
|
||||
$(".paginate").click(function (e) {
|
||||
e.preventDefault();
|
||||
var scroller = $("#recently-watched-row-scroller");
|
||||
var containerWidth = $("#user-recently-watched").width();
|
||||
var scrollAmount = $(this).data("id") * parseInt(containerWidth / 175) * 175;
|
||||
var leftMax = Math.min(-parseInt(scroller.width()) + Math.abs(scrollAmount), 0);
|
||||
|
||||
leftTotal = Math.max(Math.min(leftTotal + scrollAmount, 0), leftMax);
|
||||
scroller.animate({ left: leftTotal }, 250);
|
||||
|
||||
if (leftTotal == 0) {
|
||||
$("#recently-watched-page-left").addClass("disabled").blur();
|
||||
} else {
|
||||
$("#recently-watched-page-left").removeClass("disabled");
|
||||
}
|
||||
|
||||
if (leftTotal == leftMax) {
|
||||
$("#recently-watched-page-right").addClass("disabled").blur();
|
||||
} else {
|
||||
$("#recently-watched-page-right").removeClass("disabled");
|
||||
}
|
||||
});
|
||||
|
||||
$(document).ready(function () {
|
||||
// Javascript to enable link to tab
|
||||
var hash = document.location.hash;
|
||||
|
|
|
@ -31,7 +31,7 @@ DOCUMENTATION :: END
|
|||
from plexpy.helpers import page, short_season
|
||||
%>
|
||||
<div class="dashboard-recent-media-row">
|
||||
<div id="recently-watched-row-scroller" style="left: 0;">
|
||||
<div id="recently-watched-row-scroller">
|
||||
<ul class="dashboard-recent-media list-unstyled">
|
||||
% for item in data:
|
||||
<li>
|
||||
|
|
|
@ -12,6 +12,7 @@
|
|||
<meta name="viewport" content="width=device-width, initial-scale=1.0">
|
||||
<meta name="description" content="">
|
||||
<meta name="author" content="">
|
||||
<meta name="referrer" content="no-referrer">
|
||||
<link href="${http_root}css/bootstrap3/bootstrap.min.css" rel="stylesheet">
|
||||
<link href="${http_root}css/bootstrap-wizard.css" rel="stylesheet">
|
||||
<link href="${http_root}css/tautulli.css${cache_param}" rel="stylesheet">
|
||||
|
@ -134,7 +135,6 @@
|
|||
data-identifier="${config['pms_identifier']}"
|
||||
data-ip="${config['pms_ip']}"
|
||||
data-port="${config['pms_port']}"
|
||||
data-local="${int(not int(config['pms_is_remote']))}"
|
||||
data-ssl="${config['pms_ssl']}"
|
||||
data-is_cloud="${config['pms_is_cloud']}"
|
||||
data-label="${config['pms_name'] or 'Local'}"
|
||||
|
@ -150,7 +150,7 @@
|
|||
<div class="col-xs-3">
|
||||
<input type="text" class="form-control pms-settings" name="pms_port" id="pms_port" placeholder="32400" value="${config['pms_port']}" required>
|
||||
</div>
|
||||
<div class="col-xs-4">
|
||||
<div class="col-xs-9">
|
||||
<div class="checkbox">
|
||||
<label>
|
||||
<input type="checkbox" id="pms_ssl_checkbox" class="checkbox-toggle pms-settings" data-id="pms_ssl" value="1" ${helpers.checked(config['pms_ssl'])}> Use Secure Connection
|
||||
|
@ -158,14 +158,6 @@
|
|||
</label>
|
||||
</div>
|
||||
</div>
|
||||
<div class="col-xs-4">
|
||||
<div class="checkbox">
|
||||
<label>
|
||||
<input type="checkbox" id="pms_is_remote_checkbox" class="checkbox-toggle pms-settings" data-id="pms_is_remote" value="1" ${helpers.checked(config['pms_is_remote'])}> Remote Server
|
||||
<input type="hidden" id="pms_is_remote" name="pms_is_remote" value="${config['pms_is_remote']}">
|
||||
</label>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
<input type="hidden" id="pms_valid" data-validate="validatePMSip" value="">
|
||||
|
@ -390,7 +382,6 @@ $(document).ready(function() {
|
|||
return '<div data-identifier="' + item.clientIdentifier +
|
||||
'" data-ip="' + item.ip +
|
||||
'" data-port="' + item.port +
|
||||
'" data-local="' + item.local +
|
||||
'" data-ssl="' + item.httpsRequired +
|
||||
'" data-is_cloud="' + item.is_cloud +
|
||||
'" data-label="' + item.label + '">' +
|
||||
|
@ -404,7 +395,6 @@ $(document).ready(function() {
|
|||
return '<div data-identifier="' + item.clientIdentifier +
|
||||
'" data-ip="' + item.ip +
|
||||
'" data-port="' + item.port +
|
||||
'" data-local="' + item.local +
|
||||
'" data-ssl="' + item.httpsRequired +
|
||||
'" data-is_cloud="' + item.is_cloud +
|
||||
'" data-label="' + item.label + '">' +
|
||||
|
@ -427,7 +417,6 @@ $(document).ready(function() {
|
|||
var identifier = $(pms_ip_selected).data('identifier');
|
||||
var ip = $(pms_ip_selected).data('ip');
|
||||
var port = $(pms_ip_selected).data('port');
|
||||
var local = $(pms_ip_selected).data('local');
|
||||
var ssl = $(pms_ip_selected).data('ssl');
|
||||
var is_cloud = $(pms_ip_selected).data('is_cloud');
|
||||
var value = $(pms_ip_selected).data('value');
|
||||
|
@ -438,19 +427,15 @@ $(document).ready(function() {
|
|||
$("#pms_identifier").val(identifier !== 'undefined' ? identifier : '');
|
||||
$('#pms_ip').val(ip !== 'undefined' ? ip : value);
|
||||
$('#pms_port').val(port !== 'undefined' ? port : 32400);
|
||||
$('#pms_is_remote_checkbox').prop('checked', (local !== 'undefined' && local === 0));
|
||||
$('#pms_is_remote').val(local !== 'undefined' && local === 0 ? 1 : 0);
|
||||
$('#pms_ssl_checkbox').prop('checked', (ssl !== 'undefined' && ssl === 1));
|
||||
$('#pms_ssl').val(ssl !== 'undefined' && ssl === 1 ? 1 : 0);
|
||||
$('#pms_is_cloud').val(is_cloud !== 'undefined' && is_cloud === true ? 1 : 0);
|
||||
|
||||
if (is_cloud === true) {
|
||||
$('#pms_port').prop('readonly', true);
|
||||
$('#pms_is_remote_checkbox').prop('disabled', true);
|
||||
$('#pms_ssl_checkbox').prop('disabled', true);
|
||||
} else {
|
||||
$('#pms_port').prop('readonly', false);
|
||||
$('#pms_is_remote_checkbox').prop('disabled', false);
|
||||
$('#pms_ssl_checkbox').prop('disabled', false);
|
||||
}
|
||||
},
|
||||
|
@ -487,7 +472,6 @@ $(document).ready(function() {
|
|||
var pms_port = $("#pms_port").val().trim();
|
||||
var pms_identifier = $("#pms_identifier").val();
|
||||
var pms_ssl = $("#pms_ssl").val();
|
||||
var pms_is_remote = $("#pms_is_remote").val();
|
||||
if ((pms_ip !== '') || (pms_port !== '')) {
|
||||
$("#pms-verify-status").html('<i class="fa fa-refresh fa-spin"></i> Verifying server...');
|
||||
$('#pms-verify-status').fadeIn('fast');
|
||||
|
@ -497,8 +481,7 @@ $(document).ready(function() {
|
|||
hostname: pms_ip,
|
||||
port: pms_port,
|
||||
identifier: pms_identifier,
|
||||
ssl: pms_ssl,
|
||||
remote: pms_is_remote
|
||||
ssl: pms_ssl
|
||||
},
|
||||
cache: true,
|
||||
async: true,
|
||||
|
|
396
lib/annotated_types/__init__.py
Normal file
396
lib/annotated_types/__init__.py
Normal file
|
@ -0,0 +1,396 @@
|
|||
import math
|
||||
import sys
|
||||
from dataclasses import dataclass
|
||||
from datetime import timezone
|
||||
from typing import TYPE_CHECKING, Any, Callable, Iterator, Optional, SupportsFloat, SupportsIndex, TypeVar, Union
|
||||
|
||||
if sys.version_info < (3, 8):
|
||||
from typing_extensions import Protocol, runtime_checkable
|
||||
else:
|
||||
from typing import Protocol, runtime_checkable
|
||||
|
||||
if sys.version_info < (3, 9):
|
||||
from typing_extensions import Annotated, Literal
|
||||
else:
|
||||
from typing import Annotated, Literal
|
||||
|
||||
if sys.version_info < (3, 10):
|
||||
EllipsisType = type(Ellipsis)
|
||||
KW_ONLY = {}
|
||||
SLOTS = {}
|
||||
else:
|
||||
from types import EllipsisType
|
||||
|
||||
KW_ONLY = {"kw_only": True}
|
||||
SLOTS = {"slots": True}
|
||||
|
||||
|
||||
__all__ = (
|
||||
'BaseMetadata',
|
||||
'GroupedMetadata',
|
||||
'Gt',
|
||||
'Ge',
|
||||
'Lt',
|
||||
'Le',
|
||||
'Interval',
|
||||
'MultipleOf',
|
||||
'MinLen',
|
||||
'MaxLen',
|
||||
'Len',
|
||||
'Timezone',
|
||||
'Predicate',
|
||||
'LowerCase',
|
||||
'UpperCase',
|
||||
'IsDigits',
|
||||
'IsFinite',
|
||||
'IsNotFinite',
|
||||
'IsNan',
|
||||
'IsNotNan',
|
||||
'IsInfinite',
|
||||
'IsNotInfinite',
|
||||
'doc',
|
||||
'DocInfo',
|
||||
'__version__',
|
||||
)
|
||||
|
||||
__version__ = '0.6.0'
|
||||
|
||||
|
||||
T = TypeVar('T')
|
||||
|
||||
|
||||
# arguments that start with __ are considered
|
||||
# positional only
|
||||
# see https://peps.python.org/pep-0484/#positional-only-arguments
|
||||
|
||||
|
||||
class SupportsGt(Protocol):
|
||||
def __gt__(self: T, __other: T) -> bool:
|
||||
...
|
||||
|
||||
|
||||
class SupportsGe(Protocol):
|
||||
def __ge__(self: T, __other: T) -> bool:
|
||||
...
|
||||
|
||||
|
||||
class SupportsLt(Protocol):
|
||||
def __lt__(self: T, __other: T) -> bool:
|
||||
...
|
||||
|
||||
|
||||
class SupportsLe(Protocol):
|
||||
def __le__(self: T, __other: T) -> bool:
|
||||
...
|
||||
|
||||
|
||||
class SupportsMod(Protocol):
|
||||
def __mod__(self: T, __other: T) -> T:
|
||||
...
|
||||
|
||||
|
||||
class SupportsDiv(Protocol):
|
||||
def __div__(self: T, __other: T) -> T:
|
||||
...
|
||||
|
||||
|
||||
class BaseMetadata:
|
||||
"""Base class for all metadata.
|
||||
|
||||
This exists mainly so that implementers
|
||||
can do `isinstance(..., BaseMetadata)` while traversing field annotations.
|
||||
"""
|
||||
|
||||
__slots__ = ()
|
||||
|
||||
|
||||
@dataclass(frozen=True, **SLOTS)
|
||||
class Gt(BaseMetadata):
|
||||
"""Gt(gt=x) implies that the value must be greater than x.
|
||||
|
||||
It can be used with any type that supports the ``>`` operator,
|
||||
including numbers, dates and times, strings, sets, and so on.
|
||||
"""
|
||||
|
||||
gt: SupportsGt
|
||||
|
||||
|
||||
@dataclass(frozen=True, **SLOTS)
|
||||
class Ge(BaseMetadata):
|
||||
"""Ge(ge=x) implies that the value must be greater than or equal to x.
|
||||
|
||||
It can be used with any type that supports the ``>=`` operator,
|
||||
including numbers, dates and times, strings, sets, and so on.
|
||||
"""
|
||||
|
||||
ge: SupportsGe
|
||||
|
||||
|
||||
@dataclass(frozen=True, **SLOTS)
|
||||
class Lt(BaseMetadata):
|
||||
"""Lt(lt=x) implies that the value must be less than x.
|
||||
|
||||
It can be used with any type that supports the ``<`` operator,
|
||||
including numbers, dates and times, strings, sets, and so on.
|
||||
"""
|
||||
|
||||
lt: SupportsLt
|
||||
|
||||
|
||||
@dataclass(frozen=True, **SLOTS)
|
||||
class Le(BaseMetadata):
|
||||
"""Le(le=x) implies that the value must be less than or equal to x.
|
||||
|
||||
It can be used with any type that supports the ``<=`` operator,
|
||||
including numbers, dates and times, strings, sets, and so on.
|
||||
"""
|
||||
|
||||
le: SupportsLe
|
||||
|
||||
|
||||
@runtime_checkable
|
||||
class GroupedMetadata(Protocol):
|
||||
"""A grouping of multiple BaseMetadata objects.
|
||||
|
||||
`GroupedMetadata` on its own is not metadata and has no meaning.
|
||||
All it the the constraint and metadata should be fully expressable
|
||||
in terms of the `BaseMetadata`'s returned by `GroupedMetadata.__iter__()`.
|
||||
|
||||
Concrete implementations should override `GroupedMetadata.__iter__()`
|
||||
to add their own metadata.
|
||||
For example:
|
||||
|
||||
>>> @dataclass
|
||||
>>> class Field(GroupedMetadata):
|
||||
>>> gt: float | None = None
|
||||
>>> description: str | None = None
|
||||
...
|
||||
>>> def __iter__(self) -> Iterable[BaseMetadata]:
|
||||
>>> if self.gt is not None:
|
||||
>>> yield Gt(self.gt)
|
||||
>>> if self.description is not None:
|
||||
>>> yield Description(self.gt)
|
||||
|
||||
Also see the implementation of `Interval` below for an example.
|
||||
|
||||
Parsers should recognize this and unpack it so that it can be used
|
||||
both with and without unpacking:
|
||||
|
||||
- `Annotated[int, Field(...)]` (parser must unpack Field)
|
||||
- `Annotated[int, *Field(...)]` (PEP-646)
|
||||
""" # noqa: trailing-whitespace
|
||||
|
||||
@property
|
||||
def __is_annotated_types_grouped_metadata__(self) -> Literal[True]:
|
||||
return True
|
||||
|
||||
def __iter__(self) -> Iterator[BaseMetadata]:
|
||||
...
|
||||
|
||||
if not TYPE_CHECKING:
|
||||
__slots__ = () # allow subclasses to use slots
|
||||
|
||||
def __init_subclass__(cls, *args: Any, **kwargs: Any) -> None:
|
||||
# Basic ABC like functionality without the complexity of an ABC
|
||||
super().__init_subclass__(*args, **kwargs)
|
||||
if cls.__iter__ is GroupedMetadata.__iter__:
|
||||
raise TypeError("Can't subclass GroupedMetadata without implementing __iter__")
|
||||
|
||||
def __iter__(self) -> Iterator[BaseMetadata]: # noqa: F811
|
||||
raise NotImplementedError # more helpful than "None has no attribute..." type errors
|
||||
|
||||
|
||||
@dataclass(frozen=True, **KW_ONLY, **SLOTS)
|
||||
class Interval(GroupedMetadata):
|
||||
"""Interval can express inclusive or exclusive bounds with a single object.
|
||||
|
||||
It accepts keyword arguments ``gt``, ``ge``, ``lt``, and/or ``le``, which
|
||||
are interpreted the same way as the single-bound constraints.
|
||||
"""
|
||||
|
||||
gt: Union[SupportsGt, None] = None
|
||||
ge: Union[SupportsGe, None] = None
|
||||
lt: Union[SupportsLt, None] = None
|
||||
le: Union[SupportsLe, None] = None
|
||||
|
||||
def __iter__(self) -> Iterator[BaseMetadata]:
|
||||
"""Unpack an Interval into zero or more single-bounds."""
|
||||
if self.gt is not None:
|
||||
yield Gt(self.gt)
|
||||
if self.ge is not None:
|
||||
yield Ge(self.ge)
|
||||
if self.lt is not None:
|
||||
yield Lt(self.lt)
|
||||
if self.le is not None:
|
||||
yield Le(self.le)
|
||||
|
||||
|
||||
@dataclass(frozen=True, **SLOTS)
|
||||
class MultipleOf(BaseMetadata):
|
||||
"""MultipleOf(multiple_of=x) might be interpreted in two ways:
|
||||
|
||||
1. Python semantics, implying ``value % multiple_of == 0``, or
|
||||
2. JSONschema semantics, where ``int(value / multiple_of) == value / multiple_of``
|
||||
|
||||
We encourage users to be aware of these two common interpretations,
|
||||
and libraries to carefully document which they implement.
|
||||
"""
|
||||
|
||||
multiple_of: Union[SupportsDiv, SupportsMod]
|
||||
|
||||
|
||||
@dataclass(frozen=True, **SLOTS)
|
||||
class MinLen(BaseMetadata):
|
||||
"""
|
||||
MinLen() implies minimum inclusive length,
|
||||
e.g. ``len(value) >= min_length``.
|
||||
"""
|
||||
|
||||
min_length: Annotated[int, Ge(0)]
|
||||
|
||||
|
||||
@dataclass(frozen=True, **SLOTS)
|
||||
class MaxLen(BaseMetadata):
|
||||
"""
|
||||
MaxLen() implies maximum inclusive length,
|
||||
e.g. ``len(value) <= max_length``.
|
||||
"""
|
||||
|
||||
max_length: Annotated[int, Ge(0)]
|
||||
|
||||
|
||||
@dataclass(frozen=True, **SLOTS)
|
||||
class Len(GroupedMetadata):
|
||||
"""
|
||||
Len() implies that ``min_length <= len(value) <= max_length``.
|
||||
|
||||
Upper bound may be omitted or ``None`` to indicate no upper length bound.
|
||||
"""
|
||||
|
||||
min_length: Annotated[int, Ge(0)] = 0
|
||||
max_length: Optional[Annotated[int, Ge(0)]] = None
|
||||
|
||||
def __iter__(self) -> Iterator[BaseMetadata]:
|
||||
"""Unpack a Len into zone or more single-bounds."""
|
||||
if self.min_length > 0:
|
||||
yield MinLen(self.min_length)
|
||||
if self.max_length is not None:
|
||||
yield MaxLen(self.max_length)
|
||||
|
||||
|
||||
@dataclass(frozen=True, **SLOTS)
|
||||
class Timezone(BaseMetadata):
|
||||
"""Timezone(tz=...) requires a datetime to be aware (or ``tz=None``, naive).
|
||||
|
||||
``Annotated[datetime, Timezone(None)]`` must be a naive datetime.
|
||||
``Timezone[...]`` (the ellipsis literal) expresses that the datetime must be
|
||||
tz-aware but any timezone is allowed.
|
||||
|
||||
You may also pass a specific timezone string or timezone object such as
|
||||
``Timezone(timezone.utc)`` or ``Timezone("Africa/Abidjan")`` to express that
|
||||
you only allow a specific timezone, though we note that this is often
|
||||
a symptom of poor design.
|
||||
"""
|
||||
|
||||
tz: Union[str, timezone, EllipsisType, None]
|
||||
|
||||
|
||||
@dataclass(frozen=True, **SLOTS)
|
||||
class Predicate(BaseMetadata):
|
||||
"""``Predicate(func: Callable)`` implies `func(value)` is truthy for valid values.
|
||||
|
||||
Users should prefer statically inspectable metadata, but if you need the full
|
||||
power and flexibility of arbitrary runtime predicates... here it is.
|
||||
|
||||
We provide a few predefined predicates for common string constraints:
|
||||
``IsLower = Predicate(str.islower)``, ``IsUpper = Predicate(str.isupper)``, and
|
||||
``IsDigit = Predicate(str.isdigit)``. Users are encouraged to use methods which
|
||||
can be given special handling, and avoid indirection like ``lambda s: s.lower()``.
|
||||
|
||||
Some libraries might have special logic to handle certain predicates, e.g. by
|
||||
checking for `str.isdigit` and using its presence to both call custom logic to
|
||||
enforce digit-only strings, and customise some generated external schema.
|
||||
|
||||
We do not specify what behaviour should be expected for predicates that raise
|
||||
an exception. For example `Annotated[int, Predicate(str.isdigit)]` might silently
|
||||
skip invalid constraints, or statically raise an error; or it might try calling it
|
||||
and then propogate or discard the resulting exception.
|
||||
"""
|
||||
|
||||
func: Callable[[Any], bool]
|
||||
|
||||
|
||||
@dataclass
|
||||
class Not:
|
||||
func: Callable[[Any], bool]
|
||||
|
||||
def __call__(self, __v: Any) -> bool:
|
||||
return not self.func(__v)
|
||||
|
||||
|
||||
_StrType = TypeVar("_StrType", bound=str)
|
||||
|
||||
LowerCase = Annotated[_StrType, Predicate(str.islower)]
|
||||
"""
|
||||
Return True if the string is a lowercase string, False otherwise.
|
||||
|
||||
A string is lowercase if all cased characters in the string are lowercase and there is at least one cased character in the string.
|
||||
""" # noqa: E501
|
||||
UpperCase = Annotated[_StrType, Predicate(str.isupper)]
|
||||
"""
|
||||
Return True if the string is an uppercase string, False otherwise.
|
||||
|
||||
A string is uppercase if all cased characters in the string are uppercase and there is at least one cased character in the string.
|
||||
""" # noqa: E501
|
||||
IsDigits = Annotated[_StrType, Predicate(str.isdigit)]
|
||||
"""
|
||||
Return True if the string is a digit string, False otherwise.
|
||||
|
||||
A string is a digit string if all characters in the string are digits and there is at least one character in the string.
|
||||
""" # noqa: E501
|
||||
IsAscii = Annotated[_StrType, Predicate(str.isascii)]
|
||||
"""
|
||||
Return True if all characters in the string are ASCII, False otherwise.
|
||||
|
||||
ASCII characters have code points in the range U+0000-U+007F. Empty string is ASCII too.
|
||||
"""
|
||||
|
||||
_NumericType = TypeVar('_NumericType', bound=Union[SupportsFloat, SupportsIndex])
|
||||
IsFinite = Annotated[_NumericType, Predicate(math.isfinite)]
|
||||
"""Return True if x is neither an infinity nor a NaN, and False otherwise."""
|
||||
IsNotFinite = Annotated[_NumericType, Predicate(Not(math.isfinite))]
|
||||
"""Return True if x is one of infinity or NaN, and False otherwise"""
|
||||
IsNan = Annotated[_NumericType, Predicate(math.isnan)]
|
||||
"""Return True if x is a NaN (not a number), and False otherwise."""
|
||||
IsNotNan = Annotated[_NumericType, Predicate(Not(math.isnan))]
|
||||
"""Return True if x is anything but NaN (not a number), and False otherwise."""
|
||||
IsInfinite = Annotated[_NumericType, Predicate(math.isinf)]
|
||||
"""Return True if x is a positive or negative infinity, and False otherwise."""
|
||||
IsNotInfinite = Annotated[_NumericType, Predicate(Not(math.isinf))]
|
||||
"""Return True if x is neither a positive or negative infinity, and False otherwise."""
|
||||
|
||||
try:
|
||||
from typing_extensions import DocInfo, doc # type: ignore [attr-defined]
|
||||
except ImportError:
|
||||
|
||||
@dataclass(frozen=True, **SLOTS)
|
||||
class DocInfo: # type: ignore [no-redef]
|
||||
""" "
|
||||
The return value of doc(), mainly to be used by tools that want to extract the
|
||||
Annotated documentation at runtime.
|
||||
"""
|
||||
|
||||
documentation: str
|
||||
"""The documentation string passed to doc()."""
|
||||
|
||||
def doc(
|
||||
documentation: str,
|
||||
) -> DocInfo:
|
||||
"""
|
||||
Add documentation to a type annotation inside of Annotated.
|
||||
|
||||
For example:
|
||||
|
||||
>>> def hi(name: Annotated[int, doc("The name of the user")]) -> None: ...
|
||||
"""
|
||||
return DocInfo(documentation)
|
147
lib/annotated_types/test_cases.py
Normal file
147
lib/annotated_types/test_cases.py
Normal file
|
@ -0,0 +1,147 @@
|
|||
import math
|
||||
import sys
|
||||
from datetime import date, datetime, timedelta, timezone
|
||||
from decimal import Decimal
|
||||
from typing import Any, Dict, Iterable, Iterator, List, NamedTuple, Set, Tuple
|
||||
|
||||
if sys.version_info < (3, 9):
|
||||
from typing_extensions import Annotated
|
||||
else:
|
||||
from typing import Annotated
|
||||
|
||||
import annotated_types as at
|
||||
|
||||
|
||||
class Case(NamedTuple):
|
||||
"""
|
||||
A test case for `annotated_types`.
|
||||
"""
|
||||
|
||||
annotation: Any
|
||||
valid_cases: Iterable[Any]
|
||||
invalid_cases: Iterable[Any]
|
||||
|
||||
|
||||
def cases() -> Iterable[Case]:
|
||||
# Gt, Ge, Lt, Le
|
||||
yield Case(Annotated[int, at.Gt(4)], (5, 6, 1000), (4, 0, -1))
|
||||
yield Case(Annotated[float, at.Gt(0.5)], (0.6, 0.7, 0.8, 0.9), (0.5, 0.0, -0.1))
|
||||
yield Case(
|
||||
Annotated[datetime, at.Gt(datetime(2000, 1, 1))],
|
||||
[datetime(2000, 1, 2), datetime(2000, 1, 3)],
|
||||
[datetime(2000, 1, 1), datetime(1999, 12, 31)],
|
||||
)
|
||||
yield Case(
|
||||
Annotated[datetime, at.Gt(date(2000, 1, 1))],
|
||||
[date(2000, 1, 2), date(2000, 1, 3)],
|
||||
[date(2000, 1, 1), date(1999, 12, 31)],
|
||||
)
|
||||
yield Case(
|
||||
Annotated[datetime, at.Gt(Decimal('1.123'))],
|
||||
[Decimal('1.1231'), Decimal('123')],
|
||||
[Decimal('1.123'), Decimal('0')],
|
||||
)
|
||||
|
||||
yield Case(Annotated[int, at.Ge(4)], (4, 5, 6, 1000, 4), (0, -1))
|
||||
yield Case(Annotated[float, at.Ge(0.5)], (0.5, 0.6, 0.7, 0.8, 0.9), (0.4, 0.0, -0.1))
|
||||
yield Case(
|
||||
Annotated[datetime, at.Ge(datetime(2000, 1, 1))],
|
||||
[datetime(2000, 1, 2), datetime(2000, 1, 3)],
|
||||
[datetime(1998, 1, 1), datetime(1999, 12, 31)],
|
||||
)
|
||||
|
||||
yield Case(Annotated[int, at.Lt(4)], (0, -1), (4, 5, 6, 1000, 4))
|
||||
yield Case(Annotated[float, at.Lt(0.5)], (0.4, 0.0, -0.1), (0.5, 0.6, 0.7, 0.8, 0.9))
|
||||
yield Case(
|
||||
Annotated[datetime, at.Lt(datetime(2000, 1, 1))],
|
||||
[datetime(1999, 12, 31), datetime(1999, 12, 31)],
|
||||
[datetime(2000, 1, 2), datetime(2000, 1, 3)],
|
||||
)
|
||||
|
||||
yield Case(Annotated[int, at.Le(4)], (4, 0, -1), (5, 6, 1000))
|
||||
yield Case(Annotated[float, at.Le(0.5)], (0.5, 0.0, -0.1), (0.6, 0.7, 0.8, 0.9))
|
||||
yield Case(
|
||||
Annotated[datetime, at.Le(datetime(2000, 1, 1))],
|
||||
[datetime(2000, 1, 1), datetime(1999, 12, 31)],
|
||||
[datetime(2000, 1, 2), datetime(2000, 1, 3)],
|
||||
)
|
||||
|
||||
# Interval
|
||||
yield Case(Annotated[int, at.Interval(gt=4)], (5, 6, 1000), (4, 0, -1))
|
||||
yield Case(Annotated[int, at.Interval(gt=4, lt=10)], (5, 6), (4, 10, 1000, 0, -1))
|
||||
yield Case(Annotated[float, at.Interval(ge=0.5, le=1)], (0.5, 0.9, 1), (0.49, 1.1))
|
||||
yield Case(
|
||||
Annotated[datetime, at.Interval(gt=datetime(2000, 1, 1), le=datetime(2000, 1, 3))],
|
||||
[datetime(2000, 1, 2), datetime(2000, 1, 3)],
|
||||
[datetime(2000, 1, 1), datetime(2000, 1, 4)],
|
||||
)
|
||||
|
||||
yield Case(Annotated[int, at.MultipleOf(multiple_of=3)], (0, 3, 9), (1, 2, 4))
|
||||
yield Case(Annotated[float, at.MultipleOf(multiple_of=0.5)], (0, 0.5, 1, 1.5), (0.4, 1.1))
|
||||
|
||||
# lengths
|
||||
|
||||
yield Case(Annotated[str, at.MinLen(3)], ('123', '1234', 'x' * 10), ('', '1', '12'))
|
||||
yield Case(Annotated[str, at.Len(3)], ('123', '1234', 'x' * 10), ('', '1', '12'))
|
||||
yield Case(Annotated[List[int], at.MinLen(3)], ([1, 2, 3], [1, 2, 3, 4], [1] * 10), ([], [1], [1, 2]))
|
||||
yield Case(Annotated[List[int], at.Len(3)], ([1, 2, 3], [1, 2, 3, 4], [1] * 10), ([], [1], [1, 2]))
|
||||
|
||||
yield Case(Annotated[str, at.MaxLen(4)], ('', '1234'), ('12345', 'x' * 10))
|
||||
yield Case(Annotated[str, at.Len(0, 4)], ('', '1234'), ('12345', 'x' * 10))
|
||||
yield Case(Annotated[List[str], at.MaxLen(4)], ([], ['a', 'bcdef'], ['a', 'b', 'c']), (['a'] * 5, ['b'] * 10))
|
||||
yield Case(Annotated[List[str], at.Len(0, 4)], ([], ['a', 'bcdef'], ['a', 'b', 'c']), (['a'] * 5, ['b'] * 10))
|
||||
|
||||
yield Case(Annotated[str, at.Len(3, 5)], ('123', '12345'), ('', '1', '12', '123456', 'x' * 10))
|
||||
yield Case(Annotated[str, at.Len(3, 3)], ('123',), ('12', '1234'))
|
||||
|
||||
yield Case(Annotated[Dict[int, int], at.Len(2, 3)], [{1: 1, 2: 2}], [{}, {1: 1}, {1: 1, 2: 2, 3: 3, 4: 4}])
|
||||
yield Case(Annotated[Set[int], at.Len(2, 3)], ({1, 2}, {1, 2, 3}), (set(), {1}, {1, 2, 3, 4}))
|
||||
yield Case(Annotated[Tuple[int, ...], at.Len(2, 3)], ((1, 2), (1, 2, 3)), ((), (1,), (1, 2, 3, 4)))
|
||||
|
||||
# Timezone
|
||||
|
||||
yield Case(
|
||||
Annotated[datetime, at.Timezone(None)], [datetime(2000, 1, 1)], [datetime(2000, 1, 1, tzinfo=timezone.utc)]
|
||||
)
|
||||
yield Case(
|
||||
Annotated[datetime, at.Timezone(...)], [datetime(2000, 1, 1, tzinfo=timezone.utc)], [datetime(2000, 1, 1)]
|
||||
)
|
||||
yield Case(
|
||||
Annotated[datetime, at.Timezone(timezone.utc)],
|
||||
[datetime(2000, 1, 1, tzinfo=timezone.utc)],
|
||||
[datetime(2000, 1, 1), datetime(2000, 1, 1, tzinfo=timezone(timedelta(hours=6)))],
|
||||
)
|
||||
yield Case(
|
||||
Annotated[datetime, at.Timezone('Europe/London')],
|
||||
[datetime(2000, 1, 1, tzinfo=timezone(timedelta(0), name='Europe/London'))],
|
||||
[datetime(2000, 1, 1), datetime(2000, 1, 1, tzinfo=timezone(timedelta(hours=6)))],
|
||||
)
|
||||
|
||||
# predicate types
|
||||
|
||||
yield Case(at.LowerCase[str], ['abc', 'foobar'], ['', 'A', 'Boom'])
|
||||
yield Case(at.UpperCase[str], ['ABC', 'DEFO'], ['', 'a', 'abc', 'AbC'])
|
||||
yield Case(at.IsDigits[str], ['123'], ['', 'ab', 'a1b2'])
|
||||
yield Case(at.IsAscii[str], ['123', 'foo bar'], ['£100', '😊', 'whatever 👀'])
|
||||
|
||||
yield Case(Annotated[int, at.Predicate(lambda x: x % 2 == 0)], [0, 2, 4], [1, 3, 5])
|
||||
|
||||
yield Case(at.IsFinite[float], [1.23], [math.nan, math.inf, -math.inf])
|
||||
yield Case(at.IsNotFinite[float], [math.nan, math.inf], [1.23])
|
||||
yield Case(at.IsNan[float], [math.nan], [1.23, math.inf])
|
||||
yield Case(at.IsNotNan[float], [1.23, math.inf], [math.nan])
|
||||
yield Case(at.IsInfinite[float], [math.inf], [math.nan, 1.23])
|
||||
yield Case(at.IsNotInfinite[float], [math.nan, 1.23], [math.inf])
|
||||
|
||||
# check stacked predicates
|
||||
yield Case(at.IsInfinite[Annotated[float, at.Predicate(lambda x: x > 0)]], [math.inf], [-math.inf, 1.23, math.nan])
|
||||
|
||||
# doc
|
||||
yield Case(Annotated[int, at.doc("A number")], [1, 2], [])
|
||||
|
||||
# custom GroupedMetadata
|
||||
class MyCustomGroupedMetadata(at.GroupedMetadata):
|
||||
def __iter__(self) -> Iterator[at.Predicate]:
|
||||
yield at.Predicate(lambda x: float(x).is_integer())
|
||||
|
||||
yield Case(Annotated[float, MyCustomGroupedMetadata()], [0, 2.0], [0.01, 1.5])
|
608
lib/appdirs.py
608
lib/appdirs.py
|
@ -1,608 +0,0 @@
|
|||
#!/usr/bin/env python
|
||||
# -*- coding: utf-8 -*-
|
||||
# Copyright (c) 2005-2010 ActiveState Software Inc.
|
||||
# Copyright (c) 2013 Eddy Petrișor
|
||||
|
||||
"""Utilities for determining application-specific dirs.
|
||||
|
||||
See <http://github.com/ActiveState/appdirs> for details and usage.
|
||||
"""
|
||||
# Dev Notes:
|
||||
# - MSDN on where to store app data files:
|
||||
# http://support.microsoft.com/default.aspx?scid=kb;en-us;310294#XSLTH3194121123120121120120
|
||||
# - Mac OS X: http://developer.apple.com/documentation/MacOSX/Conceptual/BPFileSystem/index.html
|
||||
# - XDG spec for Un*x: http://standards.freedesktop.org/basedir-spec/basedir-spec-latest.html
|
||||
|
||||
__version__ = "1.4.4"
|
||||
__version_info__ = tuple(int(segment) for segment in __version__.split("."))
|
||||
|
||||
|
||||
import sys
|
||||
import os
|
||||
|
||||
PY3 = sys.version_info[0] == 3
|
||||
|
||||
if PY3:
|
||||
unicode = str
|
||||
|
||||
if sys.platform.startswith('java'):
|
||||
import platform
|
||||
os_name = platform.java_ver()[3][0]
|
||||
if os_name.startswith('Windows'): # "Windows XP", "Windows 7", etc.
|
||||
system = 'win32'
|
||||
elif os_name.startswith('Mac'): # "Mac OS X", etc.
|
||||
system = 'darwin'
|
||||
else: # "Linux", "SunOS", "FreeBSD", etc.
|
||||
# Setting this to "linux2" is not ideal, but only Windows or Mac
|
||||
# are actually checked for and the rest of the module expects
|
||||
# *sys.platform* style strings.
|
||||
system = 'linux2'
|
||||
else:
|
||||
system = sys.platform
|
||||
|
||||
|
||||
|
||||
def user_data_dir(appname=None, appauthor=None, version=None, roaming=False):
|
||||
r"""Return full path to the user-specific data dir for this application.
|
||||
|
||||
"appname" is the name of application.
|
||||
If None, just the system directory is returned.
|
||||
"appauthor" (only used on Windows) is the name of the
|
||||
appauthor or distributing body for this application. Typically
|
||||
it is the owning company name. This falls back to appname. You may
|
||||
pass False to disable it.
|
||||
"version" is an optional version path element to append to the
|
||||
path. You might want to use this if you want multiple versions
|
||||
of your app to be able to run independently. If used, this
|
||||
would typically be "<major>.<minor>".
|
||||
Only applied when appname is present.
|
||||
"roaming" (boolean, default False) can be set True to use the Windows
|
||||
roaming appdata directory. That means that for users on a Windows
|
||||
network setup for roaming profiles, this user data will be
|
||||
sync'd on login. See
|
||||
<http://technet.microsoft.com/en-us/library/cc766489(WS.10).aspx>
|
||||
for a discussion of issues.
|
||||
|
||||
Typical user data directories are:
|
||||
Mac OS X: ~/Library/Application Support/<AppName>
|
||||
Unix: ~/.local/share/<AppName> # or in $XDG_DATA_HOME, if defined
|
||||
Win XP (not roaming): C:\Documents and Settings\<username>\Application Data\<AppAuthor>\<AppName>
|
||||
Win XP (roaming): C:\Documents and Settings\<username>\Local Settings\Application Data\<AppAuthor>\<AppName>
|
||||
Win 7 (not roaming): C:\Users\<username>\AppData\Local\<AppAuthor>\<AppName>
|
||||
Win 7 (roaming): C:\Users\<username>\AppData\Roaming\<AppAuthor>\<AppName>
|
||||
|
||||
For Unix, we follow the XDG spec and support $XDG_DATA_HOME.
|
||||
That means, by default "~/.local/share/<AppName>".
|
||||
"""
|
||||
if system == "win32":
|
||||
if appauthor is None:
|
||||
appauthor = appname
|
||||
const = roaming and "CSIDL_APPDATA" or "CSIDL_LOCAL_APPDATA"
|
||||
path = os.path.normpath(_get_win_folder(const))
|
||||
if appname:
|
||||
if appauthor is not False:
|
||||
path = os.path.join(path, appauthor, appname)
|
||||
else:
|
||||
path = os.path.join(path, appname)
|
||||
elif system == 'darwin':
|
||||
path = os.path.expanduser('~/Library/Application Support/')
|
||||
if appname:
|
||||
path = os.path.join(path, appname)
|
||||
else:
|
||||
path = os.getenv('XDG_DATA_HOME', os.path.expanduser("~/.local/share"))
|
||||
if appname:
|
||||
path = os.path.join(path, appname)
|
||||
if appname and version:
|
||||
path = os.path.join(path, version)
|
||||
return path
|
||||
|
||||
|
||||
def site_data_dir(appname=None, appauthor=None, version=None, multipath=False):
|
||||
r"""Return full path to the user-shared data dir for this application.
|
||||
|
||||
"appname" is the name of application.
|
||||
If None, just the system directory is returned.
|
||||
"appauthor" (only used on Windows) is the name of the
|
||||
appauthor or distributing body for this application. Typically
|
||||
it is the owning company name. This falls back to appname. You may
|
||||
pass False to disable it.
|
||||
"version" is an optional version path element to append to the
|
||||
path. You might want to use this if you want multiple versions
|
||||
of your app to be able to run independently. If used, this
|
||||
would typically be "<major>.<minor>".
|
||||
Only applied when appname is present.
|
||||
"multipath" is an optional parameter only applicable to *nix
|
||||
which indicates that the entire list of data dirs should be
|
||||
returned. By default, the first item from XDG_DATA_DIRS is
|
||||
returned, or '/usr/local/share/<AppName>',
|
||||
if XDG_DATA_DIRS is not set
|
||||
|
||||
Typical site data directories are:
|
||||
Mac OS X: /Library/Application Support/<AppName>
|
||||
Unix: /usr/local/share/<AppName> or /usr/share/<AppName>
|
||||
Win XP: C:\Documents and Settings\All Users\Application Data\<AppAuthor>\<AppName>
|
||||
Vista: (Fail! "C:\ProgramData" is a hidden *system* directory on Vista.)
|
||||
Win 7: C:\ProgramData\<AppAuthor>\<AppName> # Hidden, but writeable on Win 7.
|
||||
|
||||
For Unix, this is using the $XDG_DATA_DIRS[0] default.
|
||||
|
||||
WARNING: Do not use this on Windows. See the Vista-Fail note above for why.
|
||||
"""
|
||||
if system == "win32":
|
||||
if appauthor is None:
|
||||
appauthor = appname
|
||||
path = os.path.normpath(_get_win_folder("CSIDL_COMMON_APPDATA"))
|
||||
if appname:
|
||||
if appauthor is not False:
|
||||
path = os.path.join(path, appauthor, appname)
|
||||
else:
|
||||
path = os.path.join(path, appname)
|
||||
elif system == 'darwin':
|
||||
path = os.path.expanduser('/Library/Application Support')
|
||||
if appname:
|
||||
path = os.path.join(path, appname)
|
||||
else:
|
||||
# XDG default for $XDG_DATA_DIRS
|
||||
# only first, if multipath is False
|
||||
path = os.getenv('XDG_DATA_DIRS',
|
||||
os.pathsep.join(['/usr/local/share', '/usr/share']))
|
||||
pathlist = [os.path.expanduser(x.rstrip(os.sep)) for x in path.split(os.pathsep)]
|
||||
if appname:
|
||||
if version:
|
||||
appname = os.path.join(appname, version)
|
||||
pathlist = [os.sep.join([x, appname]) for x in pathlist]
|
||||
|
||||
if multipath:
|
||||
path = os.pathsep.join(pathlist)
|
||||
else:
|
||||
path = pathlist[0]
|
||||
return path
|
||||
|
||||
if appname and version:
|
||||
path = os.path.join(path, version)
|
||||
return path
|
||||
|
||||
|
||||
def user_config_dir(appname=None, appauthor=None, version=None, roaming=False):
|
||||
r"""Return full path to the user-specific config dir for this application.
|
||||
|
||||
"appname" is the name of application.
|
||||
If None, just the system directory is returned.
|
||||
"appauthor" (only used on Windows) is the name of the
|
||||
appauthor or distributing body for this application. Typically
|
||||
it is the owning company name. This falls back to appname. You may
|
||||
pass False to disable it.
|
||||
"version" is an optional version path element to append to the
|
||||
path. You might want to use this if you want multiple versions
|
||||
of your app to be able to run independently. If used, this
|
||||
would typically be "<major>.<minor>".
|
||||
Only applied when appname is present.
|
||||
"roaming" (boolean, default False) can be set True to use the Windows
|
||||
roaming appdata directory. That means that for users on a Windows
|
||||
network setup for roaming profiles, this user data will be
|
||||
sync'd on login. See
|
||||
<http://technet.microsoft.com/en-us/library/cc766489(WS.10).aspx>
|
||||
for a discussion of issues.
|
||||
|
||||
Typical user config directories are:
|
||||
Mac OS X: same as user_data_dir
|
||||
Unix: ~/.config/<AppName> # or in $XDG_CONFIG_HOME, if defined
|
||||
Win *: same as user_data_dir
|
||||
|
||||
For Unix, we follow the XDG spec and support $XDG_CONFIG_HOME.
|
||||
That means, by default "~/.config/<AppName>".
|
||||
"""
|
||||
if system in ["win32", "darwin"]:
|
||||
path = user_data_dir(appname, appauthor, None, roaming)
|
||||
else:
|
||||
path = os.getenv('XDG_CONFIG_HOME', os.path.expanduser("~/.config"))
|
||||
if appname:
|
||||
path = os.path.join(path, appname)
|
||||
if appname and version:
|
||||
path = os.path.join(path, version)
|
||||
return path
|
||||
|
||||
|
||||
def site_config_dir(appname=None, appauthor=None, version=None, multipath=False):
|
||||
r"""Return full path to the user-shared data dir for this application.
|
||||
|
||||
"appname" is the name of application.
|
||||
If None, just the system directory is returned.
|
||||
"appauthor" (only used on Windows) is the name of the
|
||||
appauthor or distributing body for this application. Typically
|
||||
it is the owning company name. This falls back to appname. You may
|
||||
pass False to disable it.
|
||||
"version" is an optional version path element to append to the
|
||||
path. You might want to use this if you want multiple versions
|
||||
of your app to be able to run independently. If used, this
|
||||
would typically be "<major>.<minor>".
|
||||
Only applied when appname is present.
|
||||
"multipath" is an optional parameter only applicable to *nix
|
||||
which indicates that the entire list of config dirs should be
|
||||
returned. By default, the first item from XDG_CONFIG_DIRS is
|
||||
returned, or '/etc/xdg/<AppName>', if XDG_CONFIG_DIRS is not set
|
||||
|
||||
Typical site config directories are:
|
||||
Mac OS X: same as site_data_dir
|
||||
Unix: /etc/xdg/<AppName> or $XDG_CONFIG_DIRS[i]/<AppName> for each value in
|
||||
$XDG_CONFIG_DIRS
|
||||
Win *: same as site_data_dir
|
||||
Vista: (Fail! "C:\ProgramData" is a hidden *system* directory on Vista.)
|
||||
|
||||
For Unix, this is using the $XDG_CONFIG_DIRS[0] default, if multipath=False
|
||||
|
||||
WARNING: Do not use this on Windows. See the Vista-Fail note above for why.
|
||||
"""
|
||||
if system in ["win32", "darwin"]:
|
||||
path = site_data_dir(appname, appauthor)
|
||||
if appname and version:
|
||||
path = os.path.join(path, version)
|
||||
else:
|
||||
# XDG default for $XDG_CONFIG_DIRS
|
||||
# only first, if multipath is False
|
||||
path = os.getenv('XDG_CONFIG_DIRS', '/etc/xdg')
|
||||
pathlist = [os.path.expanduser(x.rstrip(os.sep)) for x in path.split(os.pathsep)]
|
||||
if appname:
|
||||
if version:
|
||||
appname = os.path.join(appname, version)
|
||||
pathlist = [os.sep.join([x, appname]) for x in pathlist]
|
||||
|
||||
if multipath:
|
||||
path = os.pathsep.join(pathlist)
|
||||
else:
|
||||
path = pathlist[0]
|
||||
return path
|
||||
|
||||
|
||||
def user_cache_dir(appname=None, appauthor=None, version=None, opinion=True):
|
||||
r"""Return full path to the user-specific cache dir for this application.
|
||||
|
||||
"appname" is the name of application.
|
||||
If None, just the system directory is returned.
|
||||
"appauthor" (only used on Windows) is the name of the
|
||||
appauthor or distributing body for this application. Typically
|
||||
it is the owning company name. This falls back to appname. You may
|
||||
pass False to disable it.
|
||||
"version" is an optional version path element to append to the
|
||||
path. You might want to use this if you want multiple versions
|
||||
of your app to be able to run independently. If used, this
|
||||
would typically be "<major>.<minor>".
|
||||
Only applied when appname is present.
|
||||
"opinion" (boolean) can be False to disable the appending of
|
||||
"Cache" to the base app data dir for Windows. See
|
||||
discussion below.
|
||||
|
||||
Typical user cache directories are:
|
||||
Mac OS X: ~/Library/Caches/<AppName>
|
||||
Unix: ~/.cache/<AppName> (XDG default)
|
||||
Win XP: C:\Documents and Settings\<username>\Local Settings\Application Data\<AppAuthor>\<AppName>\Cache
|
||||
Vista: C:\Users\<username>\AppData\Local\<AppAuthor>\<AppName>\Cache
|
||||
|
||||
On Windows the only suggestion in the MSDN docs is that local settings go in
|
||||
the `CSIDL_LOCAL_APPDATA` directory. This is identical to the non-roaming
|
||||
app data dir (the default returned by `user_data_dir` above). Apps typically
|
||||
put cache data somewhere *under* the given dir here. Some examples:
|
||||
...\Mozilla\Firefox\Profiles\<ProfileName>\Cache
|
||||
...\Acme\SuperApp\Cache\1.0
|
||||
OPINION: This function appends "Cache" to the `CSIDL_LOCAL_APPDATA` value.
|
||||
This can be disabled with the `opinion=False` option.
|
||||
"""
|
||||
if system == "win32":
|
||||
if appauthor is None:
|
||||
appauthor = appname
|
||||
path = os.path.normpath(_get_win_folder("CSIDL_LOCAL_APPDATA"))
|
||||
if appname:
|
||||
if appauthor is not False:
|
||||
path = os.path.join(path, appauthor, appname)
|
||||
else:
|
||||
path = os.path.join(path, appname)
|
||||
if opinion:
|
||||
path = os.path.join(path, "Cache")
|
||||
elif system == 'darwin':
|
||||
path = os.path.expanduser('~/Library/Caches')
|
||||
if appname:
|
||||
path = os.path.join(path, appname)
|
||||
else:
|
||||
path = os.getenv('XDG_CACHE_HOME', os.path.expanduser('~/.cache'))
|
||||
if appname:
|
||||
path = os.path.join(path, appname)
|
||||
if appname and version:
|
||||
path = os.path.join(path, version)
|
||||
return path
|
||||
|
||||
|
||||
def user_state_dir(appname=None, appauthor=None, version=None, roaming=False):
|
||||
r"""Return full path to the user-specific state dir for this application.
|
||||
|
||||
"appname" is the name of application.
|
||||
If None, just the system directory is returned.
|
||||
"appauthor" (only used on Windows) is the name of the
|
||||
appauthor or distributing body for this application. Typically
|
||||
it is the owning company name. This falls back to appname. You may
|
||||
pass False to disable it.
|
||||
"version" is an optional version path element to append to the
|
||||
path. You might want to use this if you want multiple versions
|
||||
of your app to be able to run independently. If used, this
|
||||
would typically be "<major>.<minor>".
|
||||
Only applied when appname is present.
|
||||
"roaming" (boolean, default False) can be set True to use the Windows
|
||||
roaming appdata directory. That means that for users on a Windows
|
||||
network setup for roaming profiles, this user data will be
|
||||
sync'd on login. See
|
||||
<http://technet.microsoft.com/en-us/library/cc766489(WS.10).aspx>
|
||||
for a discussion of issues.
|
||||
|
||||
Typical user state directories are:
|
||||
Mac OS X: same as user_data_dir
|
||||
Unix: ~/.local/state/<AppName> # or in $XDG_STATE_HOME, if defined
|
||||
Win *: same as user_data_dir
|
||||
|
||||
For Unix, we follow this Debian proposal <https://wiki.debian.org/XDGBaseDirectorySpecification#state>
|
||||
to extend the XDG spec and support $XDG_STATE_HOME.
|
||||
|
||||
That means, by default "~/.local/state/<AppName>".
|
||||
"""
|
||||
if system in ["win32", "darwin"]:
|
||||
path = user_data_dir(appname, appauthor, None, roaming)
|
||||
else:
|
||||
path = os.getenv('XDG_STATE_HOME', os.path.expanduser("~/.local/state"))
|
||||
if appname:
|
||||
path = os.path.join(path, appname)
|
||||
if appname and version:
|
||||
path = os.path.join(path, version)
|
||||
return path
|
||||
|
||||
|
||||
def user_log_dir(appname=None, appauthor=None, version=None, opinion=True):
|
||||
r"""Return full path to the user-specific log dir for this application.
|
||||
|
||||
"appname" is the name of application.
|
||||
If None, just the system directory is returned.
|
||||
"appauthor" (only used on Windows) is the name of the
|
||||
appauthor or distributing body for this application. Typically
|
||||
it is the owning company name. This falls back to appname. You may
|
||||
pass False to disable it.
|
||||
"version" is an optional version path element to append to the
|
||||
path. You might want to use this if you want multiple versions
|
||||
of your app to be able to run independently. If used, this
|
||||
would typically be "<major>.<minor>".
|
||||
Only applied when appname is present.
|
||||
"opinion" (boolean) can be False to disable the appending of
|
||||
"Logs" to the base app data dir for Windows, and "log" to the
|
||||
base cache dir for Unix. See discussion below.
|
||||
|
||||
Typical user log directories are:
|
||||
Mac OS X: ~/Library/Logs/<AppName>
|
||||
Unix: ~/.cache/<AppName>/log # or under $XDG_CACHE_HOME if defined
|
||||
Win XP: C:\Documents and Settings\<username>\Local Settings\Application Data\<AppAuthor>\<AppName>\Logs
|
||||
Vista: C:\Users\<username>\AppData\Local\<AppAuthor>\<AppName>\Logs
|
||||
|
||||
On Windows the only suggestion in the MSDN docs is that local settings
|
||||
go in the `CSIDL_LOCAL_APPDATA` directory. (Note: I'm interested in
|
||||
examples of what some windows apps use for a logs dir.)
|
||||
|
||||
OPINION: This function appends "Logs" to the `CSIDL_LOCAL_APPDATA`
|
||||
value for Windows and appends "log" to the user cache dir for Unix.
|
||||
This can be disabled with the `opinion=False` option.
|
||||
"""
|
||||
if system == "darwin":
|
||||
path = os.path.join(
|
||||
os.path.expanduser('~/Library/Logs'),
|
||||
appname)
|
||||
elif system == "win32":
|
||||
path = user_data_dir(appname, appauthor, version)
|
||||
version = False
|
||||
if opinion:
|
||||
path = os.path.join(path, "Logs")
|
||||
else:
|
||||
path = user_cache_dir(appname, appauthor, version)
|
||||
version = False
|
||||
if opinion:
|
||||
path = os.path.join(path, "log")
|
||||
if appname and version:
|
||||
path = os.path.join(path, version)
|
||||
return path
|
||||
|
||||
|
||||
class AppDirs(object):
|
||||
"""Convenience wrapper for getting application dirs."""
|
||||
def __init__(self, appname=None, appauthor=None, version=None,
|
||||
roaming=False, multipath=False):
|
||||
self.appname = appname
|
||||
self.appauthor = appauthor
|
||||
self.version = version
|
||||
self.roaming = roaming
|
||||
self.multipath = multipath
|
||||
|
||||
@property
|
||||
def user_data_dir(self):
|
||||
return user_data_dir(self.appname, self.appauthor,
|
||||
version=self.version, roaming=self.roaming)
|
||||
|
||||
@property
|
||||
def site_data_dir(self):
|
||||
return site_data_dir(self.appname, self.appauthor,
|
||||
version=self.version, multipath=self.multipath)
|
||||
|
||||
@property
|
||||
def user_config_dir(self):
|
||||
return user_config_dir(self.appname, self.appauthor,
|
||||
version=self.version, roaming=self.roaming)
|
||||
|
||||
@property
|
||||
def site_config_dir(self):
|
||||
return site_config_dir(self.appname, self.appauthor,
|
||||
version=self.version, multipath=self.multipath)
|
||||
|
||||
@property
|
||||
def user_cache_dir(self):
|
||||
return user_cache_dir(self.appname, self.appauthor,
|
||||
version=self.version)
|
||||
|
||||
@property
|
||||
def user_state_dir(self):
|
||||
return user_state_dir(self.appname, self.appauthor,
|
||||
version=self.version)
|
||||
|
||||
@property
|
||||
def user_log_dir(self):
|
||||
return user_log_dir(self.appname, self.appauthor,
|
||||
version=self.version)
|
||||
|
||||
|
||||
#---- internal support stuff
|
||||
|
||||
def _get_win_folder_from_registry(csidl_name):
|
||||
"""This is a fallback technique at best. I'm not sure if using the
|
||||
registry for this guarantees us the correct answer for all CSIDL_*
|
||||
names.
|
||||
"""
|
||||
if PY3:
|
||||
import winreg as _winreg
|
||||
else:
|
||||
import _winreg
|
||||
|
||||
shell_folder_name = {
|
||||
"CSIDL_APPDATA": "AppData",
|
||||
"CSIDL_COMMON_APPDATA": "Common AppData",
|
||||
"CSIDL_LOCAL_APPDATA": "Local AppData",
|
||||
}[csidl_name]
|
||||
|
||||
key = _winreg.OpenKey(
|
||||
_winreg.HKEY_CURRENT_USER,
|
||||
r"Software\Microsoft\Windows\CurrentVersion\Explorer\Shell Folders"
|
||||
)
|
||||
dir, type = _winreg.QueryValueEx(key, shell_folder_name)
|
||||
return dir
|
||||
|
||||
|
||||
def _get_win_folder_with_pywin32(csidl_name):
|
||||
from win32com.shell import shellcon, shell
|
||||
dir = shell.SHGetFolderPath(0, getattr(shellcon, csidl_name), 0, 0)
|
||||
# Try to make this a unicode path because SHGetFolderPath does
|
||||
# not return unicode strings when there is unicode data in the
|
||||
# path.
|
||||
try:
|
||||
dir = unicode(dir)
|
||||
|
||||
# Downgrade to short path name if have highbit chars. See
|
||||
# <http://bugs.activestate.com/show_bug.cgi?id=85099>.
|
||||
has_high_char = False
|
||||
for c in dir:
|
||||
if ord(c) > 255:
|
||||
has_high_char = True
|
||||
break
|
||||
if has_high_char:
|
||||
try:
|
||||
import win32api
|
||||
dir = win32api.GetShortPathName(dir)
|
||||
except ImportError:
|
||||
pass
|
||||
except UnicodeError:
|
||||
pass
|
||||
return dir
|
||||
|
||||
|
||||
def _get_win_folder_with_ctypes(csidl_name):
|
||||
import ctypes
|
||||
|
||||
csidl_const = {
|
||||
"CSIDL_APPDATA": 26,
|
||||
"CSIDL_COMMON_APPDATA": 35,
|
||||
"CSIDL_LOCAL_APPDATA": 28,
|
||||
}[csidl_name]
|
||||
|
||||
buf = ctypes.create_unicode_buffer(1024)
|
||||
ctypes.windll.shell32.SHGetFolderPathW(None, csidl_const, None, 0, buf)
|
||||
|
||||
# Downgrade to short path name if have highbit chars. See
|
||||
# <http://bugs.activestate.com/show_bug.cgi?id=85099>.
|
||||
has_high_char = False
|
||||
for c in buf:
|
||||
if ord(c) > 255:
|
||||
has_high_char = True
|
||||
break
|
||||
if has_high_char:
|
||||
buf2 = ctypes.create_unicode_buffer(1024)
|
||||
if ctypes.windll.kernel32.GetShortPathNameW(buf.value, buf2, 1024):
|
||||
buf = buf2
|
||||
|
||||
return buf.value
|
||||
|
||||
def _get_win_folder_with_jna(csidl_name):
|
||||
import array
|
||||
from com.sun import jna
|
||||
from com.sun.jna.platform import win32
|
||||
|
||||
buf_size = win32.WinDef.MAX_PATH * 2
|
||||
buf = array.zeros('c', buf_size)
|
||||
shell = win32.Shell32.INSTANCE
|
||||
shell.SHGetFolderPath(None, getattr(win32.ShlObj, csidl_name), None, win32.ShlObj.SHGFP_TYPE_CURRENT, buf)
|
||||
dir = jna.Native.toString(buf.tostring()).rstrip("\0")
|
||||
|
||||
# Downgrade to short path name if have highbit chars. See
|
||||
# <http://bugs.activestate.com/show_bug.cgi?id=85099>.
|
||||
has_high_char = False
|
||||
for c in dir:
|
||||
if ord(c) > 255:
|
||||
has_high_char = True
|
||||
break
|
||||
if has_high_char:
|
||||
buf = array.zeros('c', buf_size)
|
||||
kernel = win32.Kernel32.INSTANCE
|
||||
if kernel.GetShortPathName(dir, buf, buf_size):
|
||||
dir = jna.Native.toString(buf.tostring()).rstrip("\0")
|
||||
|
||||
return dir
|
||||
|
||||
if system == "win32":
|
||||
try:
|
||||
import win32com.shell
|
||||
_get_win_folder = _get_win_folder_with_pywin32
|
||||
except ImportError:
|
||||
try:
|
||||
from ctypes import windll
|
||||
_get_win_folder = _get_win_folder_with_ctypes
|
||||
except ImportError:
|
||||
try:
|
||||
import com.sun.jna
|
||||
_get_win_folder = _get_win_folder_with_jna
|
||||
except ImportError:
|
||||
_get_win_folder = _get_win_folder_from_registry
|
||||
|
||||
|
||||
#---- self test code
|
||||
|
||||
if __name__ == "__main__":
|
||||
appname = "MyApp"
|
||||
appauthor = "MyCompany"
|
||||
|
||||
props = ("user_data_dir",
|
||||
"user_config_dir",
|
||||
"user_cache_dir",
|
||||
"user_state_dir",
|
||||
"user_log_dir",
|
||||
"site_data_dir",
|
||||
"site_config_dir")
|
||||
|
||||
print("-- app dirs %s --" % __version__)
|
||||
|
||||
print("-- app dirs (with optional 'version')")
|
||||
dirs = AppDirs(appname, appauthor, version="1.0")
|
||||
for prop in props:
|
||||
print("%s: %s" % (prop, getattr(dirs, prop)))
|
||||
|
||||
print("\n-- app dirs (without optional 'version')")
|
||||
dirs = AppDirs(appname, appauthor)
|
||||
for prop in props:
|
||||
print("%s: %s" % (prop, getattr(dirs, prop)))
|
||||
|
||||
print("\n-- app dirs (without optional 'appauthor')")
|
||||
dirs = AppDirs(appname)
|
||||
for prop in props:
|
||||
print("%s: %s" % (prop, getattr(dirs, prop)))
|
||||
|
||||
print("\n-- app dirs (with disabled 'appauthor')")
|
||||
dirs = AppDirs(appname, appauthor=False)
|
||||
for prop in props:
|
||||
print("%s: %s" % (prop, getattr(dirs, prop)))
|
|
@ -1 +1 @@
|
|||
__version__ = "1.2.3"
|
||||
__version__ = "1.3.0"
|
||||
|
|
|
@ -168,9 +168,9 @@ class Arrow:
|
|||
isinstance(tzinfo, dt_tzinfo)
|
||||
and hasattr(tzinfo, "localize")
|
||||
and hasattr(tzinfo, "zone")
|
||||
and tzinfo.zone # type: ignore[attr-defined]
|
||||
and tzinfo.zone
|
||||
):
|
||||
tzinfo = parser.TzinfoParser.parse(tzinfo.zone) # type: ignore[attr-defined]
|
||||
tzinfo = parser.TzinfoParser.parse(tzinfo.zone)
|
||||
elif isinstance(tzinfo, str):
|
||||
tzinfo = parser.TzinfoParser.parse(tzinfo)
|
||||
|
||||
|
@ -495,7 +495,7 @@ class Arrow:
|
|||
yield current
|
||||
|
||||
values = [getattr(current, f) for f in cls._ATTRS]
|
||||
current = cls(*values, tzinfo=tzinfo).shift( # type: ignore
|
||||
current = cls(*values, tzinfo=tzinfo).shift( # type: ignore[misc]
|
||||
**{frame_relative: relative_steps}
|
||||
)
|
||||
|
||||
|
@ -578,7 +578,7 @@ class Arrow:
|
|||
for _ in range(3 - len(values)):
|
||||
values.append(1)
|
||||
|
||||
floor = self.__class__(*values, tzinfo=self.tzinfo) # type: ignore
|
||||
floor = self.__class__(*values, tzinfo=self.tzinfo) # type: ignore[misc]
|
||||
|
||||
if frame_absolute == "week":
|
||||
# if week_start is greater than self.isoweekday() go back one week by setting delta = 7
|
||||
|
@ -792,7 +792,6 @@ class Arrow:
|
|||
return self._datetime.isoformat()
|
||||
|
||||
def __format__(self, formatstr: str) -> str:
|
||||
|
||||
if len(formatstr) > 0:
|
||||
return self.format(formatstr)
|
||||
|
||||
|
@ -804,7 +803,6 @@ class Arrow:
|
|||
# attributes and properties
|
||||
|
||||
def __getattr__(self, name: str) -> int:
|
||||
|
||||
if name == "week":
|
||||
return self.isocalendar()[1]
|
||||
|
||||
|
@ -965,7 +963,6 @@ class Arrow:
|
|||
absolute_kwargs = {}
|
||||
|
||||
for key, value in kwargs.items():
|
||||
|
||||
if key in self._ATTRS:
|
||||
absolute_kwargs[key] = value
|
||||
elif key in ["week", "quarter"]:
|
||||
|
@ -1022,7 +1019,6 @@ class Arrow:
|
|||
additional_attrs = ["weeks", "quarters", "weekday"]
|
||||
|
||||
for key, value in kwargs.items():
|
||||
|
||||
if key in self._ATTRS_PLURAL or key in additional_attrs:
|
||||
relative_kwargs[key] = value
|
||||
else:
|
||||
|
@ -1259,11 +1255,10 @@ class Arrow:
|
|||
)
|
||||
|
||||
if trunc(abs(delta)) != 1:
|
||||
granularity += "s" # type: ignore
|
||||
granularity += "s" # type: ignore[assignment]
|
||||
return locale.describe(granularity, delta, only_distance=only_distance)
|
||||
|
||||
else:
|
||||
|
||||
if not granularity:
|
||||
raise ValueError(
|
||||
"Empty granularity list provided. "
|
||||
|
@ -1314,7 +1309,7 @@ class Arrow:
|
|||
|
||||
def dehumanize(self, input_string: str, locale: str = "en_us") -> "Arrow":
|
||||
"""Returns a new :class:`Arrow <arrow.arrow.Arrow>` object, that represents
|
||||
the time difference relative to the attrbiutes of the
|
||||
the time difference relative to the attributes of the
|
||||
:class:`Arrow <arrow.arrow.Arrow>` object.
|
||||
|
||||
:param timestring: a ``str`` representing a humanized relative time.
|
||||
|
@ -1367,7 +1362,6 @@ class Arrow:
|
|||
|
||||
# Search input string for each time unit within locale
|
||||
for unit, unit_object in locale_obj.timeframes.items():
|
||||
|
||||
# Need to check the type of unit_object to create the correct dictionary
|
||||
if isinstance(unit_object, Mapping):
|
||||
strings_to_search = unit_object
|
||||
|
@ -1378,7 +1372,6 @@ class Arrow:
|
|||
# Needs to cycle all through strings as some locales have strings that
|
||||
# could overlap in a regex match, since input validation isn't being performed.
|
||||
for time_delta, time_string in strings_to_search.items():
|
||||
|
||||
# Replace {0} with regex \d representing digits
|
||||
search_string = str(time_string)
|
||||
search_string = search_string.format(r"\d+")
|
||||
|
@ -1419,7 +1412,7 @@ class Arrow:
|
|||
# Assert error if string does not modify any units
|
||||
if not any([True for k, v in unit_visited.items() if v]):
|
||||
raise ValueError(
|
||||
"Input string not valid. Note: Some locales do not support the week granulairty in Arrow. "
|
||||
"Input string not valid. Note: Some locales do not support the week granularity in Arrow. "
|
||||
"If you are attempting to use the week granularity on an unsupported locale, this could be the cause of this error."
|
||||
)
|
||||
|
||||
|
@ -1718,7 +1711,6 @@ class Arrow:
|
|||
# math
|
||||
|
||||
def __add__(self, other: Any) -> "Arrow":
|
||||
|
||||
if isinstance(other, (timedelta, relativedelta)):
|
||||
return self.fromdatetime(self._datetime + other, self._datetime.tzinfo)
|
||||
|
||||
|
@ -1736,7 +1728,6 @@ class Arrow:
|
|||
pass # pragma: no cover
|
||||
|
||||
def __sub__(self, other: Any) -> Union[timedelta, "Arrow"]:
|
||||
|
||||
if isinstance(other, (timedelta, relativedelta)):
|
||||
return self.fromdatetime(self._datetime - other, self._datetime.tzinfo)
|
||||
|
||||
|
@ -1749,7 +1740,6 @@ class Arrow:
|
|||
return NotImplemented
|
||||
|
||||
def __rsub__(self, other: Any) -> timedelta:
|
||||
|
||||
if isinstance(other, dt_datetime):
|
||||
return other - self._datetime
|
||||
|
||||
|
@ -1758,42 +1748,36 @@ class Arrow:
|
|||
# comparisons
|
||||
|
||||
def __eq__(self, other: Any) -> bool:
|
||||
|
||||
if not isinstance(other, (Arrow, dt_datetime)):
|
||||
return False
|
||||
|
||||
return self._datetime == self._get_datetime(other)
|
||||
|
||||
def __ne__(self, other: Any) -> bool:
|
||||
|
||||
if not isinstance(other, (Arrow, dt_datetime)):
|
||||
return True
|
||||
|
||||
return not self.__eq__(other)
|
||||
|
||||
def __gt__(self, other: Any) -> bool:
|
||||
|
||||
if not isinstance(other, (Arrow, dt_datetime)):
|
||||
return NotImplemented
|
||||
|
||||
return self._datetime > self._get_datetime(other)
|
||||
|
||||
def __ge__(self, other: Any) -> bool:
|
||||
|
||||
if not isinstance(other, (Arrow, dt_datetime)):
|
||||
return NotImplemented
|
||||
|
||||
return self._datetime >= self._get_datetime(other)
|
||||
|
||||
def __lt__(self, other: Any) -> bool:
|
||||
|
||||
if not isinstance(other, (Arrow, dt_datetime)):
|
||||
return NotImplemented
|
||||
|
||||
return self._datetime < self._get_datetime(other)
|
||||
|
||||
def __le__(self, other: Any) -> bool:
|
||||
|
||||
if not isinstance(other, (Arrow, dt_datetime)):
|
||||
return NotImplemented
|
||||
|
||||
|
@ -1865,7 +1849,6 @@ class Arrow:
|
|||
def _get_iteration_params(cls, end: Any, limit: Optional[int]) -> Tuple[Any, int]:
|
||||
"""Sets default end and limit values for range method."""
|
||||
if end is None:
|
||||
|
||||
if limit is None:
|
||||
raise ValueError("One of 'end' or 'limit' is required.")
|
||||
|
||||
|
|
|
@ -267,11 +267,9 @@ class ArrowFactory:
|
|||
raise TypeError(f"Cannot parse single argument of type {type(arg)!r}.")
|
||||
|
||||
elif arg_count == 2:
|
||||
|
||||
arg_1, arg_2 = args[0], args[1]
|
||||
|
||||
if isinstance(arg_1, datetime):
|
||||
|
||||
# (datetime, tzinfo/str) -> fromdatetime @ tzinfo
|
||||
if isinstance(arg_2, (dt_tzinfo, str)):
|
||||
return self.type.fromdatetime(arg_1, tzinfo=arg_2)
|
||||
|
@ -281,7 +279,6 @@ class ArrowFactory:
|
|||
)
|
||||
|
||||
elif isinstance(arg_1, date):
|
||||
|
||||
# (date, tzinfo/str) -> fromdate @ tzinfo
|
||||
if isinstance(arg_2, (dt_tzinfo, str)):
|
||||
return self.type.fromdate(arg_1, tzinfo=arg_2)
|
||||
|
|
|
@ -29,7 +29,6 @@ FORMAT_W3C: Final[str] = "YYYY-MM-DD HH:mm:ssZZ"
|
|||
|
||||
|
||||
class DateTimeFormatter:
|
||||
|
||||
# This pattern matches characters enclosed in square brackets are matched as
|
||||
# an atomic group. For more info on atomic groups and how to they are
|
||||
# emulated in Python's re library, see https://stackoverflow.com/a/13577411/2701578
|
||||
|
@ -41,18 +40,15 @@ class DateTimeFormatter:
|
|||
locale: locales.Locale
|
||||
|
||||
def __init__(self, locale: str = DEFAULT_LOCALE) -> None:
|
||||
|
||||
self.locale = locales.get_locale(locale)
|
||||
|
||||
def format(cls, dt: datetime, fmt: str) -> str:
|
||||
|
||||
# FIXME: _format_token() is nullable
|
||||
return cls._FORMAT_RE.sub(
|
||||
lambda m: cast(str, cls._format_token(dt, m.group(0))), fmt
|
||||
)
|
||||
|
||||
def _format_token(self, dt: datetime, token: Optional[str]) -> Optional[str]:
|
||||
|
||||
if token and token.startswith("[") and token.endswith("]"):
|
||||
return token[1:-1]
|
||||
|
||||
|
|
|
@ -129,7 +129,6 @@ class Locale:
|
|||
_locale_map[locale_name.lower().replace("_", "-")] = cls
|
||||
|
||||
def __init__(self) -> None:
|
||||
|
||||
self._month_name_to_ordinal = None
|
||||
|
||||
def describe(
|
||||
|
@ -174,7 +173,7 @@ class Locale:
|
|||
# Needed to determine the correct relative string to use
|
||||
timeframe_value = 0
|
||||
|
||||
for _unit_name, unit_value in timeframes:
|
||||
for _, unit_value in timeframes:
|
||||
if trunc(unit_value) != 0:
|
||||
timeframe_value = trunc(unit_value)
|
||||
break
|
||||
|
@ -285,7 +284,6 @@ class Locale:
|
|||
timeframe: TimeFrameLiteral,
|
||||
delta: Union[float, int],
|
||||
) -> str:
|
||||
|
||||
if timeframe == "now":
|
||||
return humanized
|
||||
|
||||
|
@ -425,7 +423,7 @@ class ItalianLocale(Locale):
|
|||
"hours": "{0} ore",
|
||||
"day": "un giorno",
|
||||
"days": "{0} giorni",
|
||||
"week": "una settimana,",
|
||||
"week": "una settimana",
|
||||
"weeks": "{0} settimane",
|
||||
"month": "un mese",
|
||||
"months": "{0} mesi",
|
||||
|
@ -867,14 +865,16 @@ class FinnishLocale(Locale):
|
|||
|
||||
timeframes: ClassVar[Mapping[TimeFrameLiteral, Union[str, Mapping[str, str]]]] = {
|
||||
"now": "juuri nyt",
|
||||
"second": "sekunti",
|
||||
"seconds": {"past": "{0} muutama sekunti", "future": "{0} muutaman sekunnin"},
|
||||
"second": {"past": "sekunti", "future": "sekunnin"},
|
||||
"seconds": {"past": "{0} sekuntia", "future": "{0} sekunnin"},
|
||||
"minute": {"past": "minuutti", "future": "minuutin"},
|
||||
"minutes": {"past": "{0} minuuttia", "future": "{0} minuutin"},
|
||||
"hour": {"past": "tunti", "future": "tunnin"},
|
||||
"hours": {"past": "{0} tuntia", "future": "{0} tunnin"},
|
||||
"day": "päivä",
|
||||
"day": {"past": "päivä", "future": "päivän"},
|
||||
"days": {"past": "{0} päivää", "future": "{0} päivän"},
|
||||
"week": {"past": "viikko", "future": "viikon"},
|
||||
"weeks": {"past": "{0} viikkoa", "future": "{0} viikon"},
|
||||
"month": {"past": "kuukausi", "future": "kuukauden"},
|
||||
"months": {"past": "{0} kuukautta", "future": "{0} kuukauden"},
|
||||
"year": {"past": "vuosi", "future": "vuoden"},
|
||||
|
@ -1887,7 +1887,7 @@ class GermanBaseLocale(Locale):
|
|||
future = "in {0}"
|
||||
and_word = "und"
|
||||
|
||||
timeframes = {
|
||||
timeframes: ClassVar[Dict[TimeFrameLiteral, str]] = {
|
||||
"now": "gerade eben",
|
||||
"second": "einer Sekunde",
|
||||
"seconds": "{0} Sekunden",
|
||||
|
@ -1982,7 +1982,9 @@ class GermanBaseLocale(Locale):
|
|||
return super().describe(timeframe, delta, only_distance)
|
||||
|
||||
# German uses a different case without 'in' or 'ago'
|
||||
humanized = self.timeframes_only_distance[timeframe].format(trunc(abs(delta)))
|
||||
humanized: str = self.timeframes_only_distance[timeframe].format(
|
||||
trunc(abs(delta))
|
||||
)
|
||||
|
||||
return humanized
|
||||
|
||||
|
@ -2547,6 +2549,8 @@ class ArabicLocale(Locale):
|
|||
"hours": {"2": "ساعتين", "ten": "{0} ساعات", "higher": "{0} ساعة"},
|
||||
"day": "يوم",
|
||||
"days": {"2": "يومين", "ten": "{0} أيام", "higher": "{0} يوم"},
|
||||
"week": "اسبوع",
|
||||
"weeks": {"2": "اسبوعين", "ten": "{0} أسابيع", "higher": "{0} اسبوع"},
|
||||
"month": "شهر",
|
||||
"months": {"2": "شهرين", "ten": "{0} أشهر", "higher": "{0} شهر"},
|
||||
"year": "سنة",
|
||||
|
@ -3709,6 +3713,8 @@ class HungarianLocale(Locale):
|
|||
"hours": {"past": "{0} órával", "future": "{0} óra"},
|
||||
"day": {"past": "egy nappal", "future": "egy nap"},
|
||||
"days": {"past": "{0} nappal", "future": "{0} nap"},
|
||||
"week": {"past": "egy héttel", "future": "egy hét"},
|
||||
"weeks": {"past": "{0} héttel", "future": "{0} hét"},
|
||||
"month": {"past": "egy hónappal", "future": "egy hónap"},
|
||||
"months": {"past": "{0} hónappal", "future": "{0} hónap"},
|
||||
"year": {"past": "egy évvel", "future": "egy év"},
|
||||
|
@ -3934,7 +3940,6 @@ class ThaiLocale(Locale):
|
|||
|
||||
|
||||
class LaotianLocale(Locale):
|
||||
|
||||
names = ["lo", "lo-la"]
|
||||
|
||||
past = "{0} ກ່ອນຫນ້ານີ້"
|
||||
|
@ -4119,6 +4124,7 @@ class BengaliLocale(Locale):
|
|||
return f"{n}র্থ"
|
||||
if n == 6:
|
||||
return f"{n}ষ্ঠ"
|
||||
return ""
|
||||
|
||||
|
||||
class RomanshLocale(Locale):
|
||||
|
@ -4137,6 +4143,8 @@ class RomanshLocale(Locale):
|
|||
"hours": "{0} ura",
|
||||
"day": "in di",
|
||||
"days": "{0} dis",
|
||||
"week": "in'emna",
|
||||
"weeks": "{0} emnas",
|
||||
"month": "in mais",
|
||||
"months": "{0} mais",
|
||||
"year": "in onn",
|
||||
|
@ -5399,7 +5407,7 @@ class LuxembourgishLocale(Locale):
|
|||
future = "an {0}"
|
||||
and_word = "an"
|
||||
|
||||
timeframes = {
|
||||
timeframes: ClassVar[Dict[TimeFrameLiteral, str]] = {
|
||||
"now": "just elo",
|
||||
"second": "enger Sekonn",
|
||||
"seconds": "{0} Sekonnen",
|
||||
|
@ -5487,7 +5495,9 @@ class LuxembourgishLocale(Locale):
|
|||
return super().describe(timeframe, delta, only_distance)
|
||||
|
||||
# Luxembourgish uses a different case without 'in' or 'ago'
|
||||
humanized = self.timeframes_only_distance[timeframe].format(trunc(abs(delta)))
|
||||
humanized: str = self.timeframes_only_distance[timeframe].format(
|
||||
trunc(abs(delta))
|
||||
)
|
||||
|
||||
return humanized
|
||||
|
||||
|
|
|
@ -159,7 +159,6 @@ class DateTimeParser:
|
|||
_input_re_map: Dict[_FORMAT_TYPE, Pattern[str]]
|
||||
|
||||
def __init__(self, locale: str = DEFAULT_LOCALE, cache_size: int = 0) -> None:
|
||||
|
||||
self.locale = locales.get_locale(locale)
|
||||
self._input_re_map = self._BASE_INPUT_RE_MAP.copy()
|
||||
self._input_re_map.update(
|
||||
|
@ -196,7 +195,6 @@ class DateTimeParser:
|
|||
def parse_iso(
|
||||
self, datetime_string: str, normalize_whitespace: bool = False
|
||||
) -> datetime:
|
||||
|
||||
if normalize_whitespace:
|
||||
datetime_string = re.sub(r"\s+", " ", datetime_string.strip())
|
||||
|
||||
|
@ -236,13 +234,14 @@ class DateTimeParser:
|
|||
]
|
||||
|
||||
if has_time:
|
||||
|
||||
if has_space_divider:
|
||||
date_string, time_string = datetime_string.split(" ", 1)
|
||||
else:
|
||||
date_string, time_string = datetime_string.split("T", 1)
|
||||
|
||||
time_parts = re.split(r"[\+\-Z]", time_string, 1, re.IGNORECASE)
|
||||
time_parts = re.split(
|
||||
r"[\+\-Z]", time_string, maxsplit=1, flags=re.IGNORECASE
|
||||
)
|
||||
|
||||
time_components: Optional[Match[str]] = self._TIME_RE.match(time_parts[0])
|
||||
|
||||
|
@ -303,7 +302,6 @@ class DateTimeParser:
|
|||
fmt: Union[List[str], str],
|
||||
normalize_whitespace: bool = False,
|
||||
) -> datetime:
|
||||
|
||||
if normalize_whitespace:
|
||||
datetime_string = re.sub(r"\s+", " ", datetime_string)
|
||||
|
||||
|
@ -341,12 +339,11 @@ class DateTimeParser:
|
|||
f"Unable to find a match group for the specified token {token!r}."
|
||||
)
|
||||
|
||||
self._parse_token(token, value, parts) # type: ignore
|
||||
self._parse_token(token, value, parts) # type: ignore[arg-type]
|
||||
|
||||
return self._build_datetime(parts)
|
||||
|
||||
def _generate_pattern_re(self, fmt: str) -> Tuple[List[_FORMAT_TYPE], Pattern[str]]:
|
||||
|
||||
# fmt is a string of tokens like 'YYYY-MM-DD'
|
||||
# we construct a new string by replacing each
|
||||
# token by its pattern:
|
||||
|
@ -498,7 +495,6 @@ class DateTimeParser:
|
|||
value: Any,
|
||||
parts: _Parts,
|
||||
) -> None:
|
||||
|
||||
if token == "YYYY":
|
||||
parts["year"] = int(value)
|
||||
|
||||
|
@ -508,7 +504,7 @@ class DateTimeParser:
|
|||
|
||||
elif token in ["MMMM", "MMM"]:
|
||||
# FIXME: month_number() is nullable
|
||||
parts["month"] = self.locale.month_number(value.lower()) # type: ignore
|
||||
parts["month"] = self.locale.month_number(value.lower()) # type: ignore[typeddict-item]
|
||||
|
||||
elif token in ["MM", "M"]:
|
||||
parts["month"] = int(value)
|
||||
|
@ -588,7 +584,6 @@ class DateTimeParser:
|
|||
weekdate = parts.get("weekdate")
|
||||
|
||||
if weekdate is not None:
|
||||
|
||||
year, week = int(weekdate[0]), int(weekdate[1])
|
||||
|
||||
if weekdate[2] is not None:
|
||||
|
@ -712,7 +707,6 @@ class DateTimeParser:
|
|||
)
|
||||
|
||||
def _parse_multiformat(self, string: str, formats: Iterable[str]) -> datetime:
|
||||
|
||||
_datetime: Optional[datetime] = None
|
||||
|
||||
for fmt in formats:
|
||||
|
@ -740,12 +734,11 @@ class DateTimeParser:
|
|||
|
||||
class TzinfoParser:
|
||||
_TZINFO_RE: ClassVar[Pattern[str]] = re.compile(
|
||||
r"^([\+\-])?(\d{2})(?:\:?(\d{2}))?$"
|
||||
r"^(?:\(UTC)*([\+\-])?(\d{2})(?:\:?(\d{2}))?"
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def parse(cls, tzinfo_string: str) -> dt_tzinfo:
|
||||
|
||||
tzinfo: Optional[dt_tzinfo] = None
|
||||
|
||||
if tzinfo_string == "local":
|
||||
|
@ -755,7 +748,6 @@ class TzinfoParser:
|
|||
tzinfo = tz.tzutc()
|
||||
|
||||
else:
|
||||
|
||||
iso_match = cls._TZINFO_RE.match(tzinfo_string)
|
||||
|
||||
if iso_match:
|
||||
|
|
|
@ -20,7 +20,7 @@ from functools import wraps
|
|||
from inspect import signature
|
||||
|
||||
|
||||
def _launch_forever_coro(coro, args, kwargs, loop):
|
||||
async def _run_forever_coro(coro, args, kwargs, loop):
|
||||
'''
|
||||
This helper function launches an async main function that was tagged with
|
||||
forever=True. There are two possibilities:
|
||||
|
@ -48,7 +48,7 @@ def _launch_forever_coro(coro, args, kwargs, loop):
|
|||
# forever=True feature from autoasync at some point in the future.
|
||||
thing = coro(*args, **kwargs)
|
||||
if iscoroutine(thing):
|
||||
loop.create_task(thing)
|
||||
await thing
|
||||
|
||||
|
||||
def autoasync(coro=None, *, loop=None, forever=False, pass_loop=False):
|
||||
|
@ -127,7 +127,9 @@ def autoasync(coro=None, *, loop=None, forever=False, pass_loop=False):
|
|||
args, kwargs = bound_args.args, bound_args.kwargs
|
||||
|
||||
if forever:
|
||||
_launch_forever_coro(coro, args, kwargs, local_loop)
|
||||
local_loop.create_task(_run_forever_coro(
|
||||
coro, args, kwargs, local_loop
|
||||
))
|
||||
local_loop.run_forever()
|
||||
else:
|
||||
return local_loop.run_until_complete(coro(*args, **kwargs))
|
||||
|
|
|
@ -1,979 +0,0 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
"""A port of Python 3's csv module to Python 2.
|
||||
|
||||
The API of the csv module in Python 2 is drastically different from
|
||||
the csv module in Python 3. This is due, for the most part, to the
|
||||
difference between str in Python 2 and Python 3.
|
||||
|
||||
The semantics of Python 3's version are more useful because they support
|
||||
unicode natively, while Python 2's csv does not.
|
||||
"""
|
||||
from __future__ import unicode_literals, absolute_import
|
||||
|
||||
__all__ = [ "QUOTE_MINIMAL", "QUOTE_ALL", "QUOTE_NONNUMERIC", "QUOTE_NONE",
|
||||
"Error", "Dialect", "__doc__", "excel", "excel_tab",
|
||||
"field_size_limit", "reader", "writer",
|
||||
"register_dialect", "get_dialect", "list_dialects", "Sniffer",
|
||||
"unregister_dialect", "__version__", "DictReader", "DictWriter" ]
|
||||
|
||||
import re
|
||||
import numbers
|
||||
from io import StringIO
|
||||
from csv import (
|
||||
QUOTE_MINIMAL, QUOTE_ALL, QUOTE_NONNUMERIC, QUOTE_NONE,
|
||||
__version__, __doc__, Error, field_size_limit,
|
||||
)
|
||||
|
||||
# Stuff needed from six
|
||||
import sys
|
||||
PY3 = sys.version_info[0] == 3
|
||||
if PY3:
|
||||
string_types = str
|
||||
text_type = str
|
||||
binary_type = bytes
|
||||
unichr = chr
|
||||
else:
|
||||
string_types = basestring
|
||||
text_type = unicode
|
||||
binary_type = str
|
||||
|
||||
|
||||
class QuoteStrategy(object):
|
||||
quoting = None
|
||||
|
||||
def __init__(self, dialect):
|
||||
if self.quoting is not None:
|
||||
assert dialect.quoting == self.quoting
|
||||
self.dialect = dialect
|
||||
self.setup()
|
||||
|
||||
escape_pattern_quoted = r'({quotechar})'.format(
|
||||
quotechar=re.escape(self.dialect.quotechar or '"'))
|
||||
escape_pattern_unquoted = r'([{specialchars}])'.format(
|
||||
specialchars=re.escape(self.specialchars))
|
||||
|
||||
self.escape_re_quoted = re.compile(escape_pattern_quoted)
|
||||
self.escape_re_unquoted = re.compile(escape_pattern_unquoted)
|
||||
|
||||
def setup(self):
|
||||
"""Optional method for strategy-wide optimizations."""
|
||||
|
||||
def quoted(self, field=None, raw_field=None, only=None):
|
||||
"""Determine whether this field should be quoted."""
|
||||
raise NotImplementedError(
|
||||
'quoted must be implemented by a subclass')
|
||||
|
||||
@property
|
||||
def specialchars(self):
|
||||
"""The special characters that need to be escaped."""
|
||||
raise NotImplementedError(
|
||||
'specialchars must be implemented by a subclass')
|
||||
|
||||
def escape_re(self, quoted=None):
|
||||
if quoted:
|
||||
return self.escape_re_quoted
|
||||
return self.escape_re_unquoted
|
||||
|
||||
def escapechar(self, quoted=None):
|
||||
if quoted and self.dialect.doublequote:
|
||||
return self.dialect.quotechar
|
||||
return self.dialect.escapechar
|
||||
|
||||
def prepare(self, raw_field, only=None):
|
||||
field = text_type(raw_field if raw_field is not None else '')
|
||||
quoted = self.quoted(field=field, raw_field=raw_field, only=only)
|
||||
|
||||
escape_re = self.escape_re(quoted=quoted)
|
||||
escapechar = self.escapechar(quoted=quoted)
|
||||
|
||||
if escape_re.search(field):
|
||||
escapechar = '\\\\' if escapechar == '\\' else escapechar
|
||||
if not escapechar:
|
||||
raise Error('No escapechar is set')
|
||||
escape_replace = r'{escapechar}\1'.format(escapechar=escapechar)
|
||||
field = escape_re.sub(escape_replace, field)
|
||||
|
||||
if quoted:
|
||||
field = '{quotechar}{field}{quotechar}'.format(
|
||||
quotechar=self.dialect.quotechar, field=field)
|
||||
|
||||
return field
|
||||
|
||||
|
||||
class QuoteMinimalStrategy(QuoteStrategy):
|
||||
quoting = QUOTE_MINIMAL
|
||||
|
||||
def setup(self):
|
||||
self.quoted_re = re.compile(r'[{specialchars}]'.format(
|
||||
specialchars=re.escape(self.specialchars)))
|
||||
|
||||
@property
|
||||
def specialchars(self):
|
||||
return (
|
||||
self.dialect.lineterminator +
|
||||
self.dialect.quotechar +
|
||||
self.dialect.delimiter +
|
||||
(self.dialect.escapechar or '')
|
||||
)
|
||||
|
||||
def quoted(self, field, only, **kwargs):
|
||||
if field == self.dialect.quotechar and not self.dialect.doublequote:
|
||||
# If the only character in the field is the quotechar, and
|
||||
# doublequote is false, then just escape without outer quotes.
|
||||
return False
|
||||
return field == '' and only or bool(self.quoted_re.search(field))
|
||||
|
||||
|
||||
class QuoteAllStrategy(QuoteStrategy):
|
||||
quoting = QUOTE_ALL
|
||||
|
||||
@property
|
||||
def specialchars(self):
|
||||
return self.dialect.quotechar
|
||||
|
||||
def quoted(self, **kwargs):
|
||||
return True
|
||||
|
||||
|
||||
class QuoteNonnumericStrategy(QuoteStrategy):
|
||||
quoting = QUOTE_NONNUMERIC
|
||||
|
||||
@property
|
||||
def specialchars(self):
|
||||
return (
|
||||
self.dialect.lineterminator +
|
||||
self.dialect.quotechar +
|
||||
self.dialect.delimiter +
|
||||
(self.dialect.escapechar or '')
|
||||
)
|
||||
|
||||
def quoted(self, raw_field, **kwargs):
|
||||
return not isinstance(raw_field, numbers.Number)
|
||||
|
||||
|
||||
class QuoteNoneStrategy(QuoteStrategy):
|
||||
quoting = QUOTE_NONE
|
||||
|
||||
@property
|
||||
def specialchars(self):
|
||||
return (
|
||||
self.dialect.lineterminator +
|
||||
(self.dialect.quotechar or '') +
|
||||
self.dialect.delimiter +
|
||||
(self.dialect.escapechar or '')
|
||||
)
|
||||
|
||||
def quoted(self, field, only, **kwargs):
|
||||
if field == '' and only:
|
||||
raise Error('single empty field record must be quoted')
|
||||
return False
|
||||
|
||||
|
||||
class writer(object):
|
||||
def __init__(self, fileobj, dialect='excel', **fmtparams):
|
||||
if fileobj is None:
|
||||
raise TypeError('fileobj must be file-like, not None')
|
||||
|
||||
self.fileobj = fileobj
|
||||
|
||||
if isinstance(dialect, text_type):
|
||||
dialect = get_dialect(dialect)
|
||||
|
||||
try:
|
||||
self.dialect = Dialect.combine(dialect, fmtparams)
|
||||
except Error as e:
|
||||
raise TypeError(*e.args)
|
||||
|
||||
strategies = {
|
||||
QUOTE_MINIMAL: QuoteMinimalStrategy,
|
||||
QUOTE_ALL: QuoteAllStrategy,
|
||||
QUOTE_NONNUMERIC: QuoteNonnumericStrategy,
|
||||
QUOTE_NONE: QuoteNoneStrategy,
|
||||
}
|
||||
self.strategy = strategies[self.dialect.quoting](self.dialect)
|
||||
|
||||
def writerow(self, row):
|
||||
if row is None:
|
||||
raise Error('row must be an iterable')
|
||||
|
||||
row = list(row)
|
||||
only = len(row) == 1
|
||||
row = [self.strategy.prepare(field, only=only) for field in row]
|
||||
|
||||
line = self.dialect.delimiter.join(row) + self.dialect.lineterminator
|
||||
return self.fileobj.write(line)
|
||||
|
||||
def writerows(self, rows):
|
||||
for row in rows:
|
||||
self.writerow(row)
|
||||
|
||||
|
||||
START_RECORD = 0
|
||||
START_FIELD = 1
|
||||
ESCAPED_CHAR = 2
|
||||
IN_FIELD = 3
|
||||
IN_QUOTED_FIELD = 4
|
||||
ESCAPE_IN_QUOTED_FIELD = 5
|
||||
QUOTE_IN_QUOTED_FIELD = 6
|
||||
EAT_CRNL = 7
|
||||
AFTER_ESCAPED_CRNL = 8
|
||||
|
||||
|
||||
class reader(object):
|
||||
def __init__(self, fileobj, dialect='excel', **fmtparams):
|
||||
self.input_iter = iter(fileobj)
|
||||
|
||||
if isinstance(dialect, text_type):
|
||||
dialect = get_dialect(dialect)
|
||||
|
||||
try:
|
||||
self.dialect = Dialect.combine(dialect, fmtparams)
|
||||
except Error as e:
|
||||
raise TypeError(*e.args)
|
||||
|
||||
self.fields = None
|
||||
self.field = None
|
||||
self.line_num = 0
|
||||
|
||||
def parse_reset(self):
|
||||
self.fields = []
|
||||
self.field = []
|
||||
self.state = START_RECORD
|
||||
self.numeric_field = False
|
||||
|
||||
def parse_save_field(self):
|
||||
field = ''.join(self.field)
|
||||
self.field = []
|
||||
if self.numeric_field:
|
||||
field = float(field)
|
||||
self.numeric_field = False
|
||||
self.fields.append(field)
|
||||
|
||||
def parse_add_char(self, c):
|
||||
if len(self.field) >= field_size_limit():
|
||||
raise Error('field size limit exceeded')
|
||||
self.field.append(c)
|
||||
|
||||
def parse_process_char(self, c):
|
||||
switch = {
|
||||
START_RECORD: self._parse_start_record,
|
||||
START_FIELD: self._parse_start_field,
|
||||
ESCAPED_CHAR: self._parse_escaped_char,
|
||||
AFTER_ESCAPED_CRNL: self._parse_after_escaped_crnl,
|
||||
IN_FIELD: self._parse_in_field,
|
||||
IN_QUOTED_FIELD: self._parse_in_quoted_field,
|
||||
ESCAPE_IN_QUOTED_FIELD: self._parse_escape_in_quoted_field,
|
||||
QUOTE_IN_QUOTED_FIELD: self._parse_quote_in_quoted_field,
|
||||
EAT_CRNL: self._parse_eat_crnl,
|
||||
}
|
||||
return switch[self.state](c)
|
||||
|
||||
def _parse_start_record(self, c):
|
||||
if c == '\0':
|
||||
return
|
||||
elif c == '\n' or c == '\r':
|
||||
self.state = EAT_CRNL
|
||||
return
|
||||
|
||||
self.state = START_FIELD
|
||||
return self._parse_start_field(c)
|
||||
|
||||
def _parse_start_field(self, c):
|
||||
if c == '\n' or c == '\r' or c == '\0':
|
||||
self.parse_save_field()
|
||||
self.state = START_RECORD if c == '\0' else EAT_CRNL
|
||||
elif (c == self.dialect.quotechar and
|
||||
self.dialect.quoting != QUOTE_NONE):
|
||||
self.state = IN_QUOTED_FIELD
|
||||
elif c == self.dialect.escapechar:
|
||||
self.state = ESCAPED_CHAR
|
||||
elif c == ' ' and self.dialect.skipinitialspace:
|
||||
pass # Ignore space at start of field
|
||||
elif c == self.dialect.delimiter:
|
||||
# Save empty field
|
||||
self.parse_save_field()
|
||||
else:
|
||||
# Begin new unquoted field
|
||||
if self.dialect.quoting == QUOTE_NONNUMERIC:
|
||||
self.numeric_field = True
|
||||
self.parse_add_char(c)
|
||||
self.state = IN_FIELD
|
||||
|
||||
def _parse_escaped_char(self, c):
|
||||
if c == '\n' or c == '\r':
|
||||
self.parse_add_char(c)
|
||||
self.state = AFTER_ESCAPED_CRNL
|
||||
return
|
||||
if c == '\0':
|
||||
c = '\n'
|
||||
self.parse_add_char(c)
|
||||
self.state = IN_FIELD
|
||||
|
||||
def _parse_after_escaped_crnl(self, c):
|
||||
if c == '\0':
|
||||
return
|
||||
return self._parse_in_field(c)
|
||||
|
||||
def _parse_in_field(self, c):
|
||||
# In unquoted field
|
||||
if c == '\n' or c == '\r' or c == '\0':
|
||||
# End of line - return [fields]
|
||||
self.parse_save_field()
|
||||
self.state = START_RECORD if c == '\0' else EAT_CRNL
|
||||
elif c == self.dialect.escapechar:
|
||||
self.state = ESCAPED_CHAR
|
||||
elif c == self.dialect.delimiter:
|
||||
self.parse_save_field()
|
||||
self.state = START_FIELD
|
||||
else:
|
||||
# Normal character - save in field
|
||||
self.parse_add_char(c)
|
||||
|
||||
def _parse_in_quoted_field(self, c):
|
||||
if c == '\0':
|
||||
pass
|
||||
elif c == self.dialect.escapechar:
|
||||
self.state = ESCAPE_IN_QUOTED_FIELD
|
||||
elif (c == self.dialect.quotechar and
|
||||
self.dialect.quoting != QUOTE_NONE):
|
||||
if self.dialect.doublequote:
|
||||
self.state = QUOTE_IN_QUOTED_FIELD
|
||||
else:
|
||||
self.state = IN_FIELD
|
||||
else:
|
||||
self.parse_add_char(c)
|
||||
|
||||
def _parse_escape_in_quoted_field(self, c):
|
||||
if c == '\0':
|
||||
c = '\n'
|
||||
|
||||
self.parse_add_char(c)
|
||||
self.state = IN_QUOTED_FIELD
|
||||
|
||||
def _parse_quote_in_quoted_field(self, c):
|
||||
if (self.dialect.quoting != QUOTE_NONE and
|
||||
c == self.dialect.quotechar):
|
||||
# save "" as "
|
||||
self.parse_add_char(c)
|
||||
self.state = IN_QUOTED_FIELD
|
||||
elif c == self.dialect.delimiter:
|
||||
self.parse_save_field()
|
||||
self.state = START_FIELD
|
||||
elif c == '\n' or c == '\r' or c == '\0':
|
||||
# End of line = return [fields]
|
||||
self.parse_save_field()
|
||||
self.state = START_RECORD if c == '\0' else EAT_CRNL
|
||||
elif not self.dialect.strict:
|
||||
self.parse_add_char(c)
|
||||
self.state = IN_FIELD
|
||||
else:
|
||||
# illegal
|
||||
raise Error("{delimiter}' expected after '{quotechar}".format(
|
||||
delimiter=self.dialect.delimiter,
|
||||
quotechar=self.dialect.quotechar,
|
||||
))
|
||||
|
||||
def _parse_eat_crnl(self, c):
|
||||
if c == '\n' or c == '\r':
|
||||
pass
|
||||
elif c == '\0':
|
||||
self.state = START_RECORD
|
||||
else:
|
||||
raise Error('new-line character seen in unquoted field - do you '
|
||||
'need to open the file in universal-newline mode?')
|
||||
|
||||
|
||||
def __iter__(self):
|
||||
return self
|
||||
|
||||
def __next__(self):
|
||||
self.parse_reset()
|
||||
|
||||
while True:
|
||||
try:
|
||||
lineobj = next(self.input_iter)
|
||||
except StopIteration:
|
||||
if len(self.field) != 0 or self.state == IN_QUOTED_FIELD:
|
||||
if self.dialect.strict:
|
||||
raise Error('unexpected end of data')
|
||||
self.parse_save_field()
|
||||
if self.fields:
|
||||
break
|
||||
raise
|
||||
|
||||
if not isinstance(lineobj, text_type):
|
||||
typ = type(lineobj)
|
||||
typ_name = 'bytes' if typ == bytes else typ.__name__
|
||||
err_str = ('iterator should return strings, not {0}'
|
||||
' (did you open the file in text mode?)')
|
||||
raise Error(err_str.format(typ_name))
|
||||
|
||||
self.line_num += 1
|
||||
for c in lineobj:
|
||||
if c == '\0':
|
||||
raise Error('line contains NULL byte')
|
||||
self.parse_process_char(c)
|
||||
|
||||
self.parse_process_char('\0')
|
||||
|
||||
if self.state == START_RECORD:
|
||||
break
|
||||
|
||||
fields = self.fields
|
||||
self.fields = None
|
||||
return fields
|
||||
|
||||
next = __next__
|
||||
|
||||
|
||||
_dialect_registry = {}
|
||||
def register_dialect(name, dialect='excel', **fmtparams):
|
||||
if not isinstance(name, text_type):
|
||||
raise TypeError('"name" must be a string')
|
||||
|
||||
dialect = Dialect.extend(dialect, fmtparams)
|
||||
|
||||
try:
|
||||
Dialect.validate(dialect)
|
||||
except:
|
||||
raise TypeError('dialect is invalid')
|
||||
|
||||
assert name not in _dialect_registry
|
||||
_dialect_registry[name] = dialect
|
||||
|
||||
def unregister_dialect(name):
|
||||
try:
|
||||
_dialect_registry.pop(name)
|
||||
except KeyError:
|
||||
raise Error('"{name}" not a registered dialect'.format(name=name))
|
||||
|
||||
def get_dialect(name):
|
||||
try:
|
||||
return _dialect_registry[name]
|
||||
except KeyError:
|
||||
raise Error('Could not find dialect {0}'.format(name))
|
||||
|
||||
def list_dialects():
|
||||
return list(_dialect_registry)
|
||||
|
||||
|
||||
class Dialect(object):
|
||||
"""Describe a CSV dialect.
|
||||
This must be subclassed (see csv.excel). Valid attributes are:
|
||||
delimiter, quotechar, escapechar, doublequote, skipinitialspace,
|
||||
lineterminator, quoting, strict.
|
||||
"""
|
||||
_name = ""
|
||||
_valid = False
|
||||
# placeholders
|
||||
delimiter = None
|
||||
quotechar = None
|
||||
escapechar = None
|
||||
doublequote = None
|
||||
skipinitialspace = None
|
||||
lineterminator = None
|
||||
quoting = None
|
||||
strict = None
|
||||
|
||||
def __init__(self):
|
||||
self.validate(self)
|
||||
if self.__class__ != Dialect:
|
||||
self._valid = True
|
||||
|
||||
@classmethod
|
||||
def validate(cls, dialect):
|
||||
dialect = cls.extend(dialect)
|
||||
|
||||
if not isinstance(dialect.quoting, int):
|
||||
raise Error('"quoting" must be an integer')
|
||||
|
||||
if dialect.delimiter is None:
|
||||
raise Error('delimiter must be set')
|
||||
cls.validate_text(dialect, 'delimiter')
|
||||
|
||||
if dialect.lineterminator is None:
|
||||
raise Error('lineterminator must be set')
|
||||
if not isinstance(dialect.lineterminator, text_type):
|
||||
raise Error('"lineterminator" must be a string')
|
||||
|
||||
if dialect.quoting not in [
|
||||
QUOTE_NONE, QUOTE_MINIMAL, QUOTE_NONNUMERIC, QUOTE_ALL]:
|
||||
raise Error('Invalid quoting specified')
|
||||
|
||||
if dialect.quoting != QUOTE_NONE:
|
||||
if dialect.quotechar is None and dialect.escapechar is None:
|
||||
raise Error('quotechar must be set if quoting enabled')
|
||||
if dialect.quotechar is not None:
|
||||
cls.validate_text(dialect, 'quotechar')
|
||||
|
||||
@staticmethod
|
||||
def validate_text(dialect, attr):
|
||||
val = getattr(dialect, attr)
|
||||
if not isinstance(val, text_type):
|
||||
if type(val) == bytes:
|
||||
raise Error('"{0}" must be string, not bytes'.format(attr))
|
||||
raise Error('"{0}" must be string, not {1}'.format(
|
||||
attr, type(val).__name__))
|
||||
|
||||
if len(val) != 1:
|
||||
raise Error('"{0}" must be a 1-character string'.format(attr))
|
||||
|
||||
@staticmethod
|
||||
def defaults():
|
||||
return {
|
||||
'delimiter': ',',
|
||||
'doublequote': True,
|
||||
'escapechar': None,
|
||||
'lineterminator': '\r\n',
|
||||
'quotechar': '"',
|
||||
'quoting': QUOTE_MINIMAL,
|
||||
'skipinitialspace': False,
|
||||
'strict': False,
|
||||
}
|
||||
|
||||
@classmethod
|
||||
def extend(cls, dialect, fmtparams=None):
|
||||
if isinstance(dialect, string_types):
|
||||
dialect = get_dialect(dialect)
|
||||
|
||||
if fmtparams is None:
|
||||
return dialect
|
||||
|
||||
defaults = cls.defaults()
|
||||
|
||||
if any(param not in defaults for param in fmtparams):
|
||||
raise TypeError('Invalid fmtparam')
|
||||
|
||||
specified = dict(
|
||||
(attr, getattr(dialect, attr, None))
|
||||
for attr in cls.defaults()
|
||||
)
|
||||
|
||||
specified.update(fmtparams)
|
||||
return type(str('ExtendedDialect'), (cls,), specified)
|
||||
|
||||
@classmethod
|
||||
def combine(cls, dialect, fmtparams):
|
||||
"""Create a new dialect with defaults and added parameters."""
|
||||
dialect = cls.extend(dialect, fmtparams)
|
||||
defaults = cls.defaults()
|
||||
specified = dict(
|
||||
(attr, getattr(dialect, attr, None))
|
||||
for attr in defaults
|
||||
if getattr(dialect, attr, None) is not None or
|
||||
attr in ['quotechar', 'delimiter', 'lineterminator', 'quoting']
|
||||
)
|
||||
|
||||
defaults.update(specified)
|
||||
dialect = type(str('CombinedDialect'), (cls,), defaults)
|
||||
cls.validate(dialect)
|
||||
return dialect()
|
||||
|
||||
def __delattr__(self, attr):
|
||||
if self._valid:
|
||||
raise AttributeError('dialect is immutable.')
|
||||
super(Dialect, self).__delattr__(attr)
|
||||
|
||||
def __setattr__(self, attr, value):
|
||||
if self._valid:
|
||||
raise AttributeError('dialect is immutable.')
|
||||
super(Dialect, self).__setattr__(attr, value)
|
||||
|
||||
|
||||
class excel(Dialect):
|
||||
"""Describe the usual properties of Excel-generated CSV files."""
|
||||
delimiter = ','
|
||||
quotechar = '"'
|
||||
doublequote = True
|
||||
skipinitialspace = False
|
||||
lineterminator = '\r\n'
|
||||
quoting = QUOTE_MINIMAL
|
||||
register_dialect("excel", excel)
|
||||
|
||||
class excel_tab(excel):
|
||||
"""Describe the usual properties of Excel-generated TAB-delimited files."""
|
||||
delimiter = '\t'
|
||||
register_dialect("excel-tab", excel_tab)
|
||||
|
||||
class unix_dialect(Dialect):
|
||||
"""Describe the usual properties of Unix-generated CSV files."""
|
||||
delimiter = ','
|
||||
quotechar = '"'
|
||||
doublequote = True
|
||||
skipinitialspace = False
|
||||
lineterminator = '\n'
|
||||
quoting = QUOTE_ALL
|
||||
register_dialect("unix", unix_dialect)
|
||||
|
||||
|
||||
class DictReader(object):
|
||||
def __init__(self, f, fieldnames=None, restkey=None, restval=None,
|
||||
dialect="excel", *args, **kwds):
|
||||
self._fieldnames = fieldnames # list of keys for the dict
|
||||
self.restkey = restkey # key to catch long rows
|
||||
self.restval = restval # default value for short rows
|
||||
self.reader = reader(f, dialect, *args, **kwds)
|
||||
self.dialect = dialect
|
||||
self.line_num = 0
|
||||
|
||||
def __iter__(self):
|
||||
return self
|
||||
|
||||
@property
|
||||
def fieldnames(self):
|
||||
if self._fieldnames is None:
|
||||
try:
|
||||
self._fieldnames = next(self.reader)
|
||||
except StopIteration:
|
||||
pass
|
||||
self.line_num = self.reader.line_num
|
||||
return self._fieldnames
|
||||
|
||||
@fieldnames.setter
|
||||
def fieldnames(self, value):
|
||||
self._fieldnames = value
|
||||
|
||||
def __next__(self):
|
||||
if self.line_num == 0:
|
||||
# Used only for its side effect.
|
||||
self.fieldnames
|
||||
row = next(self.reader)
|
||||
self.line_num = self.reader.line_num
|
||||
|
||||
# unlike the basic reader, we prefer not to return blanks,
|
||||
# because we will typically wind up with a dict full of None
|
||||
# values
|
||||
while row == []:
|
||||
row = next(self.reader)
|
||||
d = dict(zip(self.fieldnames, row))
|
||||
lf = len(self.fieldnames)
|
||||
lr = len(row)
|
||||
if lf < lr:
|
||||
d[self.restkey] = row[lf:]
|
||||
elif lf > lr:
|
||||
for key in self.fieldnames[lr:]:
|
||||
d[key] = self.restval
|
||||
return d
|
||||
|
||||
next = __next__
|
||||
|
||||
|
||||
class DictWriter(object):
|
||||
def __init__(self, f, fieldnames, restval="", extrasaction="raise",
|
||||
dialect="excel", *args, **kwds):
|
||||
self.fieldnames = fieldnames # list of keys for the dict
|
||||
self.restval = restval # for writing short dicts
|
||||
if extrasaction.lower() not in ("raise", "ignore"):
|
||||
raise ValueError("extrasaction (%s) must be 'raise' or 'ignore'"
|
||||
% extrasaction)
|
||||
self.extrasaction = extrasaction
|
||||
self.writer = writer(f, dialect, *args, **kwds)
|
||||
|
||||
def writeheader(self):
|
||||
header = dict(zip(self.fieldnames, self.fieldnames))
|
||||
self.writerow(header)
|
||||
|
||||
def _dict_to_list(self, rowdict):
|
||||
if self.extrasaction == "raise":
|
||||
wrong_fields = [k for k in rowdict if k not in self.fieldnames]
|
||||
if wrong_fields:
|
||||
raise ValueError("dict contains fields not in fieldnames: "
|
||||
+ ", ".join([repr(x) for x in wrong_fields]))
|
||||
return (rowdict.get(key, self.restval) for key in self.fieldnames)
|
||||
|
||||
def writerow(self, rowdict):
|
||||
return self.writer.writerow(self._dict_to_list(rowdict))
|
||||
|
||||
def writerows(self, rowdicts):
|
||||
return self.writer.writerows(map(self._dict_to_list, rowdicts))
|
||||
|
||||
# Guard Sniffer's type checking against builds that exclude complex()
|
||||
try:
|
||||
complex
|
||||
except NameError:
|
||||
complex = float
|
||||
|
||||
class Sniffer(object):
|
||||
'''
|
||||
"Sniffs" the format of a CSV file (i.e. delimiter, quotechar)
|
||||
Returns a Dialect object.
|
||||
'''
|
||||
def __init__(self):
|
||||
# in case there is more than one possible delimiter
|
||||
self.preferred = [',', '\t', ';', ' ', ':']
|
||||
|
||||
|
||||
def sniff(self, sample, delimiters=None):
|
||||
"""
|
||||
Returns a dialect (or None) corresponding to the sample
|
||||
"""
|
||||
|
||||
quotechar, doublequote, delimiter, skipinitialspace = \
|
||||
self._guess_quote_and_delimiter(sample, delimiters)
|
||||
if not delimiter:
|
||||
delimiter, skipinitialspace = self._guess_delimiter(sample,
|
||||
delimiters)
|
||||
|
||||
if not delimiter:
|
||||
raise Error("Could not determine delimiter")
|
||||
|
||||
class dialect(Dialect):
|
||||
_name = "sniffed"
|
||||
lineterminator = '\r\n'
|
||||
quoting = QUOTE_MINIMAL
|
||||
# escapechar = ''
|
||||
|
||||
dialect.doublequote = doublequote
|
||||
dialect.delimiter = delimiter
|
||||
# _csv.reader won't accept a quotechar of ''
|
||||
dialect.quotechar = quotechar or '"'
|
||||
dialect.skipinitialspace = skipinitialspace
|
||||
|
||||
return dialect
|
||||
|
||||
|
||||
def _guess_quote_and_delimiter(self, data, delimiters):
|
||||
"""
|
||||
Looks for text enclosed between two identical quotes
|
||||
(the probable quotechar) which are preceded and followed
|
||||
by the same character (the probable delimiter).
|
||||
For example:
|
||||
,'some text',
|
||||
The quote with the most wins, same with the delimiter.
|
||||
If there is no quotechar the delimiter can't be determined
|
||||
this way.
|
||||
"""
|
||||
|
||||
matches = []
|
||||
for restr in ('(?P<delim>[^\w\n"\'])(?P<space> ?)(?P<quote>["\']).*?(?P=quote)(?P=delim)', # ,".*?",
|
||||
'(?:^|\n)(?P<quote>["\']).*?(?P=quote)(?P<delim>[^\w\n"\'])(?P<space> ?)', # ".*?",
|
||||
'(?P<delim>>[^\w\n"\'])(?P<space> ?)(?P<quote>["\']).*?(?P=quote)(?:$|\n)', # ,".*?"
|
||||
'(?:^|\n)(?P<quote>["\']).*?(?P=quote)(?:$|\n)'): # ".*?" (no delim, no space)
|
||||
regexp = re.compile(restr, re.DOTALL | re.MULTILINE)
|
||||
matches = regexp.findall(data)
|
||||
if matches:
|
||||
break
|
||||
|
||||
if not matches:
|
||||
# (quotechar, doublequote, delimiter, skipinitialspace)
|
||||
return ('', False, None, 0)
|
||||
quotes = {}
|
||||
delims = {}
|
||||
spaces = 0
|
||||
groupindex = regexp.groupindex
|
||||
for m in matches:
|
||||
n = groupindex['quote'] - 1
|
||||
key = m[n]
|
||||
if key:
|
||||
quotes[key] = quotes.get(key, 0) + 1
|
||||
try:
|
||||
n = groupindex['delim'] - 1
|
||||
key = m[n]
|
||||
except KeyError:
|
||||
continue
|
||||
if key and (delimiters is None or key in delimiters):
|
||||
delims[key] = delims.get(key, 0) + 1
|
||||
try:
|
||||
n = groupindex['space'] - 1
|
||||
except KeyError:
|
||||
continue
|
||||
if m[n]:
|
||||
spaces += 1
|
||||
|
||||
quotechar = max(quotes, key=quotes.get)
|
||||
|
||||
if delims:
|
||||
delim = max(delims, key=delims.get)
|
||||
skipinitialspace = delims[delim] == spaces
|
||||
if delim == '\n': # most likely a file with a single column
|
||||
delim = ''
|
||||
else:
|
||||
# there is *no* delimiter, it's a single column of quoted data
|
||||
delim = ''
|
||||
skipinitialspace = 0
|
||||
|
||||
# if we see an extra quote between delimiters, we've got a
|
||||
# double quoted format
|
||||
dq_regexp = re.compile(
|
||||
r"((%(delim)s)|^)\W*%(quote)s[^%(delim)s\n]*%(quote)s[^%(delim)s\n]*%(quote)s\W*((%(delim)s)|$)" % \
|
||||
{'delim':re.escape(delim), 'quote':quotechar}, re.MULTILINE)
|
||||
|
||||
|
||||
|
||||
if dq_regexp.search(data):
|
||||
doublequote = True
|
||||
else:
|
||||
doublequote = False
|
||||
|
||||
return (quotechar, doublequote, delim, skipinitialspace)
|
||||
|
||||
|
||||
def _guess_delimiter(self, data, delimiters):
|
||||
"""
|
||||
The delimiter /should/ occur the same number of times on
|
||||
each row. However, due to malformed data, it may not. We don't want
|
||||
an all or nothing approach, so we allow for small variations in this
|
||||
number.
|
||||
1) build a table of the frequency of each character on every line.
|
||||
2) build a table of frequencies of this frequency (meta-frequency?),
|
||||
e.g. 'x occurred 5 times in 10 rows, 6 times in 1000 rows,
|
||||
7 times in 2 rows'
|
||||
3) use the mode of the meta-frequency to determine the /expected/
|
||||
frequency for that character
|
||||
4) find out how often the character actually meets that goal
|
||||
5) the character that best meets its goal is the delimiter
|
||||
For performance reasons, the data is evaluated in chunks, so it can
|
||||
try and evaluate the smallest portion of the data possible, evaluating
|
||||
additional chunks as necessary.
|
||||
"""
|
||||
|
||||
data = list(filter(None, data.split('\n')))
|
||||
|
||||
ascii = [unichr(c) for c in range(127)] # 7-bit ASCII
|
||||
|
||||
# build frequency tables
|
||||
chunkLength = min(10, len(data))
|
||||
iteration = 0
|
||||
charFrequency = {}
|
||||
modes = {}
|
||||
delims = {}
|
||||
start, end = 0, min(chunkLength, len(data))
|
||||
while start < len(data):
|
||||
iteration += 1
|
||||
for line in data[start:end]:
|
||||
for char in ascii:
|
||||
metaFrequency = charFrequency.get(char, {})
|
||||
# must count even if frequency is 0
|
||||
freq = line.count(char)
|
||||
# value is the mode
|
||||
metaFrequency[freq] = metaFrequency.get(freq, 0) + 1
|
||||
charFrequency[char] = metaFrequency
|
||||
|
||||
for char in charFrequency.keys():
|
||||
items = list(charFrequency[char].items())
|
||||
if len(items) == 1 and items[0][0] == 0:
|
||||
continue
|
||||
# get the mode of the frequencies
|
||||
if len(items) > 1:
|
||||
modes[char] = max(items, key=lambda x: x[1])
|
||||
# adjust the mode - subtract the sum of all
|
||||
# other frequencies
|
||||
items.remove(modes[char])
|
||||
modes[char] = (modes[char][0], modes[char][1]
|
||||
- sum(item[1] for item in items))
|
||||
else:
|
||||
modes[char] = items[0]
|
||||
|
||||
# build a list of possible delimiters
|
||||
modeList = modes.items()
|
||||
total = float(chunkLength * iteration)
|
||||
# (rows of consistent data) / (number of rows) = 100%
|
||||
consistency = 1.0
|
||||
# minimum consistency threshold
|
||||
threshold = 0.9
|
||||
while len(delims) == 0 and consistency >= threshold:
|
||||
for k, v in modeList:
|
||||
if v[0] > 0 and v[1] > 0:
|
||||
if ((v[1]/total) >= consistency and
|
||||
(delimiters is None or k in delimiters)):
|
||||
delims[k] = v
|
||||
consistency -= 0.01
|
||||
|
||||
if len(delims) == 1:
|
||||
delim = list(delims.keys())[0]
|
||||
skipinitialspace = (data[0].count(delim) ==
|
||||
data[0].count("%c " % delim))
|
||||
return (delim, skipinitialspace)
|
||||
|
||||
# analyze another chunkLength lines
|
||||
start = end
|
||||
end += chunkLength
|
||||
|
||||
if not delims:
|
||||
return ('', 0)
|
||||
|
||||
# if there's more than one, fall back to a 'preferred' list
|
||||
if len(delims) > 1:
|
||||
for d in self.preferred:
|
||||
if d in delims.keys():
|
||||
skipinitialspace = (data[0].count(d) ==
|
||||
data[0].count("%c " % d))
|
||||
return (d, skipinitialspace)
|
||||
|
||||
# nothing else indicates a preference, pick the character that
|
||||
# dominates(?)
|
||||
items = [(v,k) for (k,v) in delims.items()]
|
||||
items.sort()
|
||||
delim = items[-1][1]
|
||||
|
||||
skipinitialspace = (data[0].count(delim) ==
|
||||
data[0].count("%c " % delim))
|
||||
return (delim, skipinitialspace)
|
||||
|
||||
|
||||
def has_header(self, sample):
|
||||
# Creates a dictionary of types of data in each column. If any
|
||||
# column is of a single type (say, integers), *except* for the first
|
||||
# row, then the first row is presumed to be labels. If the type
|
||||
# can't be determined, it is assumed to be a string in which case
|
||||
# the length of the string is the determining factor: if all of the
|
||||
# rows except for the first are the same length, it's a header.
|
||||
# Finally, a 'vote' is taken at the end for each column, adding or
|
||||
# subtracting from the likelihood of the first row being a header.
|
||||
|
||||
rdr = reader(StringIO(sample), self.sniff(sample))
|
||||
|
||||
header = next(rdr) # assume first row is header
|
||||
|
||||
columns = len(header)
|
||||
columnTypes = {}
|
||||
for i in range(columns): columnTypes[i] = None
|
||||
|
||||
checked = 0
|
||||
for row in rdr:
|
||||
# arbitrary number of rows to check, to keep it sane
|
||||
if checked > 20:
|
||||
break
|
||||
checked += 1
|
||||
|
||||
if len(row) != columns:
|
||||
continue # skip rows that have irregular number of columns
|
||||
|
||||
for col in list(columnTypes.keys()):
|
||||
|
||||
for thisType in [int, float, complex]:
|
||||
try:
|
||||
thisType(row[col])
|
||||
break
|
||||
except (ValueError, OverflowError):
|
||||
pass
|
||||
else:
|
||||
# fallback to length of string
|
||||
thisType = len(row[col])
|
||||
|
||||
if thisType != columnTypes[col]:
|
||||
if columnTypes[col] is None: # add new column type
|
||||
columnTypes[col] = thisType
|
||||
else:
|
||||
# type is inconsistent, remove column from
|
||||
# consideration
|
||||
del columnTypes[col]
|
||||
|
||||
# finally, compare results against first row and "vote"
|
||||
# on whether it's a header
|
||||
hasHeader = 0
|
||||
for col, colType in columnTypes.items():
|
||||
if type(colType) == type(0): # it's a length
|
||||
if len(header[col]) != colType:
|
||||
hasHeader += 1
|
||||
else:
|
||||
hasHeader -= 1
|
||||
else: # attempt typecast
|
||||
try:
|
||||
colType(header[col])
|
||||
except (ValueError, TypeError):
|
||||
hasHeader += 1
|
||||
else:
|
||||
hasHeader -= 1
|
||||
|
||||
return hasHeader > 0
|
|
@ -1,195 +0,0 @@
|
|||
from __future__ import absolute_import
|
||||
|
||||
import functools
|
||||
from collections import namedtuple
|
||||
from threading import RLock
|
||||
|
||||
_CacheInfo = namedtuple("_CacheInfo", ["hits", "misses", "maxsize", "currsize"])
|
||||
|
||||
|
||||
@functools.wraps(functools.update_wrapper)
|
||||
def update_wrapper(
|
||||
wrapper,
|
||||
wrapped,
|
||||
assigned=functools.WRAPPER_ASSIGNMENTS,
|
||||
updated=functools.WRAPPER_UPDATES,
|
||||
):
|
||||
"""
|
||||
Patch two bugs in functools.update_wrapper.
|
||||
"""
|
||||
# workaround for http://bugs.python.org/issue3445
|
||||
assigned = tuple(attr for attr in assigned if hasattr(wrapped, attr))
|
||||
wrapper = functools.update_wrapper(wrapper, wrapped, assigned, updated)
|
||||
# workaround for https://bugs.python.org/issue17482
|
||||
wrapper.__wrapped__ = wrapped
|
||||
return wrapper
|
||||
|
||||
|
||||
class _HashedSeq(list):
|
||||
__slots__ = 'hashvalue'
|
||||
|
||||
def __init__(self, tup, hash=hash):
|
||||
self[:] = tup
|
||||
self.hashvalue = hash(tup)
|
||||
|
||||
def __hash__(self):
|
||||
return self.hashvalue
|
||||
|
||||
|
||||
def _make_key(
|
||||
args,
|
||||
kwds,
|
||||
typed,
|
||||
kwd_mark=(object(),),
|
||||
fasttypes=set([int, str, frozenset, type(None)]),
|
||||
sorted=sorted,
|
||||
tuple=tuple,
|
||||
type=type,
|
||||
len=len,
|
||||
):
|
||||
'Make a cache key from optionally typed positional and keyword arguments'
|
||||
key = args
|
||||
if kwds:
|
||||
sorted_items = sorted(kwds.items())
|
||||
key += kwd_mark
|
||||
for item in sorted_items:
|
||||
key += item
|
||||
if typed:
|
||||
key += tuple(type(v) for v in args)
|
||||
if kwds:
|
||||
key += tuple(type(v) for k, v in sorted_items)
|
||||
elif len(key) == 1 and type(key[0]) in fasttypes:
|
||||
return key[0]
|
||||
return _HashedSeq(key)
|
||||
|
||||
|
||||
def lru_cache(maxsize=100, typed=False): # noqa: C901
|
||||
"""Least-recently-used cache decorator.
|
||||
|
||||
If *maxsize* is set to None, the LRU features are disabled and the cache
|
||||
can grow without bound.
|
||||
|
||||
If *typed* is True, arguments of different types will be cached separately.
|
||||
For example, f(3.0) and f(3) will be treated as distinct calls with
|
||||
distinct results.
|
||||
|
||||
Arguments to the cached function must be hashable.
|
||||
|
||||
View the cache statistics named tuple (hits, misses, maxsize, currsize) with
|
||||
f.cache_info(). Clear the cache and statistics with f.cache_clear().
|
||||
Access the underlying function with f.__wrapped__.
|
||||
|
||||
See: http://en.wikipedia.org/wiki/Cache_algorithms#Least_Recently_Used
|
||||
|
||||
"""
|
||||
|
||||
# Users should only access the lru_cache through its public API:
|
||||
# cache_info, cache_clear, and f.__wrapped__
|
||||
# The internals of the lru_cache are encapsulated for thread safety and
|
||||
# to allow the implementation to change (including a possible C version).
|
||||
|
||||
def decorating_function(user_function):
|
||||
cache = dict()
|
||||
stats = [0, 0] # make statistics updateable non-locally
|
||||
HITS, MISSES = 0, 1 # names for the stats fields
|
||||
make_key = _make_key
|
||||
cache_get = cache.get # bound method to lookup key or return None
|
||||
_len = len # localize the global len() function
|
||||
lock = RLock() # because linkedlist updates aren't threadsafe
|
||||
root = [] # root of the circular doubly linked list
|
||||
root[:] = [root, root, None, None] # initialize by pointing to self
|
||||
nonlocal_root = [root] # make updateable non-locally
|
||||
PREV, NEXT, KEY, RESULT = 0, 1, 2, 3 # names for the link fields
|
||||
|
||||
if maxsize == 0:
|
||||
|
||||
def wrapper(*args, **kwds):
|
||||
# no caching, just do a statistics update after a successful call
|
||||
result = user_function(*args, **kwds)
|
||||
stats[MISSES] += 1
|
||||
return result
|
||||
|
||||
elif maxsize is None:
|
||||
|
||||
def wrapper(*args, **kwds):
|
||||
# simple caching without ordering or size limit
|
||||
key = make_key(args, kwds, typed)
|
||||
result = cache_get(
|
||||
key, root
|
||||
) # root used here as a unique not-found sentinel
|
||||
if result is not root:
|
||||
stats[HITS] += 1
|
||||
return result
|
||||
result = user_function(*args, **kwds)
|
||||
cache[key] = result
|
||||
stats[MISSES] += 1
|
||||
return result
|
||||
|
||||
else:
|
||||
|
||||
def wrapper(*args, **kwds):
|
||||
# size limited caching that tracks accesses by recency
|
||||
key = make_key(args, kwds, typed) if kwds or typed else args
|
||||
with lock:
|
||||
link = cache_get(key)
|
||||
if link is not None:
|
||||
# record recent use of the key by moving it
|
||||
# to the front of the list
|
||||
(root,) = nonlocal_root
|
||||
link_prev, link_next, key, result = link
|
||||
link_prev[NEXT] = link_next
|
||||
link_next[PREV] = link_prev
|
||||
last = root[PREV]
|
||||
last[NEXT] = root[PREV] = link
|
||||
link[PREV] = last
|
||||
link[NEXT] = root
|
||||
stats[HITS] += 1
|
||||
return result
|
||||
result = user_function(*args, **kwds)
|
||||
with lock:
|
||||
(root,) = nonlocal_root
|
||||
if key in cache:
|
||||
# getting here means that this same key was added to the
|
||||
# cache while the lock was released. since the link
|
||||
# update is already done, we need only return the
|
||||
# computed result and update the count of misses.
|
||||
pass
|
||||
elif _len(cache) >= maxsize:
|
||||
# use the old root to store the new key and result
|
||||
oldroot = root
|
||||
oldroot[KEY] = key
|
||||
oldroot[RESULT] = result
|
||||
# empty the oldest link and make it the new root
|
||||
root = nonlocal_root[0] = oldroot[NEXT]
|
||||
oldkey = root[KEY]
|
||||
root[KEY] = root[RESULT] = None
|
||||
# now update the cache dictionary for the new links
|
||||
del cache[oldkey]
|
||||
cache[key] = oldroot
|
||||
else:
|
||||
# put result in a new link at the front of the list
|
||||
last = root[PREV]
|
||||
link = [last, root, key, result]
|
||||
last[NEXT] = root[PREV] = cache[key] = link
|
||||
stats[MISSES] += 1
|
||||
return result
|
||||
|
||||
def cache_info():
|
||||
"""Report cache statistics"""
|
||||
with lock:
|
||||
return _CacheInfo(stats[HITS], stats[MISSES], maxsize, len(cache))
|
||||
|
||||
def cache_clear():
|
||||
"""Clear the cache and cache statistics"""
|
||||
with lock:
|
||||
cache.clear()
|
||||
root = nonlocal_root[0]
|
||||
root[:] = [root, root, None, None]
|
||||
stats[:] = [0, 0]
|
||||
|
||||
wrapper.__wrapped__ = user_function
|
||||
wrapper.cache_info = cache_info
|
||||
wrapper.cache_clear = cache_clear
|
||||
return update_wrapper(wrapper, user_function)
|
||||
|
||||
return decorating_function
|
2937
lib/backports/tarfile/__init__.py
Normal file
2937
lib/backports/tarfile/__init__.py
Normal file
File diff suppressed because it is too large
Load diff
5
lib/backports/tarfile/__main__.py
Normal file
5
lib/backports/tarfile/__main__.py
Normal file
|
@ -0,0 +1,5 @@
|
|||
from . import main
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
24
lib/backports/tarfile/compat/py38.py
Normal file
24
lib/backports/tarfile/compat/py38.py
Normal file
|
@ -0,0 +1,24 @@
|
|||
import sys
|
||||
|
||||
|
||||
if sys.version_info < (3, 9):
|
||||
|
||||
def removesuffix(self, suffix):
|
||||
# suffix='' should not call self[:-0].
|
||||
if suffix and self.endswith(suffix):
|
||||
return self[: -len(suffix)]
|
||||
else:
|
||||
return self[:]
|
||||
|
||||
def removeprefix(self, prefix):
|
||||
if self.startswith(prefix):
|
||||
return self[len(prefix) :]
|
||||
else:
|
||||
return self[:]
|
||||
else:
|
||||
|
||||
def removesuffix(self, suffix):
|
||||
return self.removesuffix(suffix)
|
||||
|
||||
def removeprefix(self, prefix):
|
||||
return self.removeprefix(prefix)
|
|
@ -1,49 +0,0 @@
|
|||
__all__ = [
|
||||
"ZoneInfo",
|
||||
"reset_tzpath",
|
||||
"available_timezones",
|
||||
"TZPATH",
|
||||
"ZoneInfoNotFoundError",
|
||||
"InvalidTZPathWarning",
|
||||
]
|
||||
import sys
|
||||
|
||||
from . import _tzpath
|
||||
from ._common import ZoneInfoNotFoundError
|
||||
from ._version import __version__
|
||||
|
||||
try:
|
||||
from ._czoneinfo import ZoneInfo
|
||||
except ImportError: # pragma: nocover
|
||||
from ._zoneinfo import ZoneInfo
|
||||
|
||||
reset_tzpath = _tzpath.reset_tzpath
|
||||
available_timezones = _tzpath.available_timezones
|
||||
InvalidTZPathWarning = _tzpath.InvalidTZPathWarning
|
||||
|
||||
if sys.version_info < (3, 7):
|
||||
# Module-level __getattr__ was added in Python 3.7, so instead of lazily
|
||||
# populating TZPATH on every access, we will register a callback with
|
||||
# reset_tzpath to update the top-level tuple.
|
||||
TZPATH = _tzpath.TZPATH
|
||||
|
||||
def _tzpath_callback(new_tzpath):
|
||||
global TZPATH
|
||||
TZPATH = new_tzpath
|
||||
|
||||
_tzpath.TZPATH_CALLBACKS.append(_tzpath_callback)
|
||||
del _tzpath_callback
|
||||
|
||||
else:
|
||||
|
||||
def __getattr__(name):
|
||||
if name == "TZPATH":
|
||||
return _tzpath.TZPATH
|
||||
else:
|
||||
raise AttributeError(
|
||||
f"module {__name__!r} has no attribute {name!r}"
|
||||
)
|
||||
|
||||
|
||||
def __dir__():
|
||||
return sorted(list(globals()) + ["TZPATH"])
|
|
@ -1,45 +0,0 @@
|
|||
import os
|
||||
import typing
|
||||
from datetime import datetime, tzinfo
|
||||
from typing import (
|
||||
Any,
|
||||
Iterable,
|
||||
Optional,
|
||||
Protocol,
|
||||
Sequence,
|
||||
Set,
|
||||
Type,
|
||||
Union,
|
||||
)
|
||||
|
||||
_T = typing.TypeVar("_T", bound="ZoneInfo")
|
||||
|
||||
class _IOBytes(Protocol):
|
||||
def read(self, __size: int) -> bytes: ...
|
||||
def seek(self, __size: int, __whence: int = ...) -> Any: ...
|
||||
|
||||
class ZoneInfo(tzinfo):
|
||||
@property
|
||||
def key(self) -> str: ...
|
||||
def __init__(self, key: str) -> None: ...
|
||||
@classmethod
|
||||
def no_cache(cls: Type[_T], key: str) -> _T: ...
|
||||
@classmethod
|
||||
def from_file(
|
||||
cls: Type[_T], __fobj: _IOBytes, key: Optional[str] = ...
|
||||
) -> _T: ...
|
||||
@classmethod
|
||||
def clear_cache(cls, *, only_keys: Iterable[str] = ...) -> None: ...
|
||||
|
||||
# Note: Both here and in clear_cache, the types allow the use of `str` where
|
||||
# a sequence of strings is required. This should be remedied if a solution
|
||||
# to this typing bug is found: https://github.com/python/typing/issues/256
|
||||
def reset_tzpath(
|
||||
to: Optional[Sequence[Union[os.PathLike, str]]] = ...
|
||||
) -> None: ...
|
||||
def available_timezones() -> Set[str]: ...
|
||||
|
||||
TZPATH: Sequence[str]
|
||||
|
||||
class ZoneInfoNotFoundError(KeyError): ...
|
||||
class InvalidTZPathWarning(RuntimeWarning): ...
|
|
@ -1,171 +0,0 @@
|
|||
import struct
|
||||
|
||||
|
||||
def load_tzdata(key):
|
||||
try:
|
||||
import importlib.resources as importlib_resources
|
||||
except ImportError:
|
||||
import importlib_resources
|
||||
|
||||
components = key.split("/")
|
||||
package_name = ".".join(["tzdata.zoneinfo"] + components[:-1])
|
||||
resource_name = components[-1]
|
||||
|
||||
try:
|
||||
return importlib_resources.open_binary(package_name, resource_name)
|
||||
except (ImportError, FileNotFoundError, UnicodeEncodeError):
|
||||
# There are three types of exception that can be raised that all amount
|
||||
# to "we cannot find this key":
|
||||
#
|
||||
# ImportError: If package_name doesn't exist (e.g. if tzdata is not
|
||||
# installed, or if there's an error in the folder name like
|
||||
# Amrica/New_York)
|
||||
# FileNotFoundError: If resource_name doesn't exist in the package
|
||||
# (e.g. Europe/Krasnoy)
|
||||
# UnicodeEncodeError: If package_name or resource_name are not UTF-8,
|
||||
# such as keys containing a surrogate character.
|
||||
raise ZoneInfoNotFoundError(f"No time zone found with key {key}")
|
||||
|
||||
|
||||
def load_data(fobj):
|
||||
header = _TZifHeader.from_file(fobj)
|
||||
|
||||
if header.version == 1:
|
||||
time_size = 4
|
||||
time_type = "l"
|
||||
else:
|
||||
# Version 2+ has 64-bit integer transition times
|
||||
time_size = 8
|
||||
time_type = "q"
|
||||
|
||||
# Version 2+ also starts with a Version 1 header and data, which
|
||||
# we need to skip now
|
||||
skip_bytes = (
|
||||
header.timecnt * 5 # Transition times and types
|
||||
+ header.typecnt * 6 # Local time type records
|
||||
+ header.charcnt # Time zone designations
|
||||
+ header.leapcnt * 8 # Leap second records
|
||||
+ header.isstdcnt # Standard/wall indicators
|
||||
+ header.isutcnt # UT/local indicators
|
||||
)
|
||||
|
||||
fobj.seek(skip_bytes, 1)
|
||||
|
||||
# Now we need to read the second header, which is not the same
|
||||
# as the first
|
||||
header = _TZifHeader.from_file(fobj)
|
||||
|
||||
typecnt = header.typecnt
|
||||
timecnt = header.timecnt
|
||||
charcnt = header.charcnt
|
||||
|
||||
# The data portion starts with timecnt transitions and indices
|
||||
if timecnt:
|
||||
trans_list_utc = struct.unpack(
|
||||
f">{timecnt}{time_type}", fobj.read(timecnt * time_size)
|
||||
)
|
||||
trans_idx = struct.unpack(f">{timecnt}B", fobj.read(timecnt))
|
||||
else:
|
||||
trans_list_utc = ()
|
||||
trans_idx = ()
|
||||
|
||||
# Read the ttinfo struct, (utoff, isdst, abbrind)
|
||||
if typecnt:
|
||||
utcoff, isdst, abbrind = zip(
|
||||
*(struct.unpack(">lbb", fobj.read(6)) for i in range(typecnt))
|
||||
)
|
||||
else:
|
||||
utcoff = ()
|
||||
isdst = ()
|
||||
abbrind = ()
|
||||
|
||||
# Now read the abbreviations. They are null-terminated strings, indexed
|
||||
# not by position in the array but by position in the unsplit
|
||||
# abbreviation string. I suppose this makes more sense in C, which uses
|
||||
# null to terminate the strings, but it's inconvenient here...
|
||||
abbr_vals = {}
|
||||
abbr_chars = fobj.read(charcnt)
|
||||
|
||||
def get_abbr(idx):
|
||||
# Gets a string starting at idx and running until the next \x00
|
||||
#
|
||||
# We cannot pre-populate abbr_vals by splitting on \x00 because there
|
||||
# are some zones that use subsets of longer abbreviations, like so:
|
||||
#
|
||||
# LMT\x00AHST\x00HDT\x00
|
||||
#
|
||||
# Where the idx to abbr mapping should be:
|
||||
#
|
||||
# {0: "LMT", 4: "AHST", 5: "HST", 9: "HDT"}
|
||||
if idx not in abbr_vals:
|
||||
span_end = abbr_chars.find(b"\x00", idx)
|
||||
abbr_vals[idx] = abbr_chars[idx:span_end].decode()
|
||||
|
||||
return abbr_vals[idx]
|
||||
|
||||
abbr = tuple(get_abbr(idx) for idx in abbrind)
|
||||
|
||||
# The remainder of the file consists of leap seconds (currently unused) and
|
||||
# the standard/wall and ut/local indicators, which are metadata we don't need.
|
||||
# In version 2 files, we need to skip the unnecessary data to get at the TZ string:
|
||||
if header.version >= 2:
|
||||
# Each leap second record has size (time_size + 4)
|
||||
skip_bytes = header.isutcnt + header.isstdcnt + header.leapcnt * 12
|
||||
fobj.seek(skip_bytes, 1)
|
||||
|
||||
c = fobj.read(1) # Should be \n
|
||||
assert c == b"\n", c
|
||||
|
||||
tz_bytes = b""
|
||||
while True:
|
||||
c = fobj.read(1)
|
||||
if c == b"\n":
|
||||
break
|
||||
tz_bytes += c
|
||||
|
||||
tz_str = tz_bytes
|
||||
else:
|
||||
tz_str = None
|
||||
|
||||
return trans_idx, trans_list_utc, utcoff, isdst, abbr, tz_str
|
||||
|
||||
|
||||
class _TZifHeader:
|
||||
__slots__ = [
|
||||
"version",
|
||||
"isutcnt",
|
||||
"isstdcnt",
|
||||
"leapcnt",
|
||||
"timecnt",
|
||||
"typecnt",
|
||||
"charcnt",
|
||||
]
|
||||
|
||||
def __init__(self, *args):
|
||||
assert len(self.__slots__) == len(args)
|
||||
for attr, val in zip(self.__slots__, args):
|
||||
setattr(self, attr, val)
|
||||
|
||||
@classmethod
|
||||
def from_file(cls, stream):
|
||||
# The header starts with a 4-byte "magic" value
|
||||
if stream.read(4) != b"TZif":
|
||||
raise ValueError("Invalid TZif file: magic not found")
|
||||
|
||||
_version = stream.read(1)
|
||||
if _version == b"\x00":
|
||||
version = 1
|
||||
else:
|
||||
version = int(_version)
|
||||
stream.read(15)
|
||||
|
||||
args = (version,)
|
||||
|
||||
# Slots are defined in the order that the bytes are arranged
|
||||
args = args + struct.unpack(">6l", stream.read(24))
|
||||
|
||||
return cls(*args)
|
||||
|
||||
|
||||
class ZoneInfoNotFoundError(KeyError):
|
||||
"""Exception raised when a ZoneInfo key is not found."""
|
|
@ -1,207 +0,0 @@
|
|||
import os
|
||||
import sys
|
||||
|
||||
PY36 = sys.version_info < (3, 7)
|
||||
|
||||
|
||||
def reset_tzpath(to=None):
|
||||
global TZPATH
|
||||
|
||||
tzpaths = to
|
||||
if tzpaths is not None:
|
||||
if isinstance(tzpaths, (str, bytes)):
|
||||
raise TypeError(
|
||||
f"tzpaths must be a list or tuple, "
|
||||
+ f"not {type(tzpaths)}: {tzpaths!r}"
|
||||
)
|
||||
|
||||
if not all(map(os.path.isabs, tzpaths)):
|
||||
raise ValueError(_get_invalid_paths_message(tzpaths))
|
||||
base_tzpath = tzpaths
|
||||
else:
|
||||
env_var = os.environ.get("PYTHONTZPATH", None)
|
||||
if env_var is not None:
|
||||
base_tzpath = _parse_python_tzpath(env_var)
|
||||
elif sys.platform != "win32":
|
||||
base_tzpath = [
|
||||
"/usr/share/zoneinfo",
|
||||
"/usr/lib/zoneinfo",
|
||||
"/usr/share/lib/zoneinfo",
|
||||
"/etc/zoneinfo",
|
||||
]
|
||||
|
||||
base_tzpath.sort(key=lambda x: not os.path.exists(x))
|
||||
else:
|
||||
base_tzpath = ()
|
||||
|
||||
TZPATH = tuple(base_tzpath)
|
||||
|
||||
if TZPATH_CALLBACKS:
|
||||
for callback in TZPATH_CALLBACKS:
|
||||
callback(TZPATH)
|
||||
|
||||
|
||||
def _parse_python_tzpath(env_var):
|
||||
if not env_var:
|
||||
return ()
|
||||
|
||||
raw_tzpath = env_var.split(os.pathsep)
|
||||
new_tzpath = tuple(filter(os.path.isabs, raw_tzpath))
|
||||
|
||||
# If anything has been filtered out, we will warn about it
|
||||
if len(new_tzpath) != len(raw_tzpath):
|
||||
import warnings
|
||||
|
||||
msg = _get_invalid_paths_message(raw_tzpath)
|
||||
|
||||
warnings.warn(
|
||||
"Invalid paths specified in PYTHONTZPATH environment variable."
|
||||
+ msg,
|
||||
InvalidTZPathWarning,
|
||||
)
|
||||
|
||||
return new_tzpath
|
||||
|
||||
|
||||
def _get_invalid_paths_message(tzpaths):
|
||||
invalid_paths = (path for path in tzpaths if not os.path.isabs(path))
|
||||
|
||||
prefix = "\n "
|
||||
indented_str = prefix + prefix.join(invalid_paths)
|
||||
|
||||
return (
|
||||
"Paths should be absolute but found the following relative paths:"
|
||||
+ indented_str
|
||||
)
|
||||
|
||||
|
||||
if sys.version_info < (3, 8):
|
||||
|
||||
def _isfile(path):
|
||||
# bpo-33721: In Python 3.8 non-UTF8 paths return False rather than
|
||||
# raising an error. See https://bugs.python.org/issue33721
|
||||
try:
|
||||
return os.path.isfile(path)
|
||||
except ValueError:
|
||||
return False
|
||||
|
||||
|
||||
else:
|
||||
_isfile = os.path.isfile
|
||||
|
||||
|
||||
def find_tzfile(key):
|
||||
"""Retrieve the path to a TZif file from a key."""
|
||||
_validate_tzfile_path(key)
|
||||
for search_path in TZPATH:
|
||||
filepath = os.path.join(search_path, key)
|
||||
if _isfile(filepath):
|
||||
return filepath
|
||||
|
||||
return None
|
||||
|
||||
|
||||
_TEST_PATH = os.path.normpath(os.path.join("_", "_"))[:-1]
|
||||
|
||||
|
||||
def _validate_tzfile_path(path, _base=_TEST_PATH):
|
||||
if os.path.isabs(path):
|
||||
raise ValueError(
|
||||
f"ZoneInfo keys may not be absolute paths, got: {path}"
|
||||
)
|
||||
|
||||
# We only care about the kinds of path normalizations that would change the
|
||||
# length of the key - e.g. a/../b -> a/b, or a/b/ -> a/b. On Windows,
|
||||
# normpath will also change from a/b to a\b, but that would still preserve
|
||||
# the length.
|
||||
new_path = os.path.normpath(path)
|
||||
if len(new_path) != len(path):
|
||||
raise ValueError(
|
||||
f"ZoneInfo keys must be normalized relative paths, got: {path}"
|
||||
)
|
||||
|
||||
resolved = os.path.normpath(os.path.join(_base, new_path))
|
||||
if not resolved.startswith(_base):
|
||||
raise ValueError(
|
||||
f"ZoneInfo keys must refer to subdirectories of TZPATH, got: {path}"
|
||||
)
|
||||
|
||||
|
||||
del _TEST_PATH
|
||||
|
||||
|
||||
def available_timezones():
|
||||
"""Returns a set containing all available time zones.
|
||||
|
||||
.. caution::
|
||||
|
||||
This may attempt to open a large number of files, since the best way to
|
||||
determine if a given file on the time zone search path is to open it
|
||||
and check for the "magic string" at the beginning.
|
||||
"""
|
||||
try:
|
||||
from importlib import resources
|
||||
except ImportError:
|
||||
import importlib_resources as resources
|
||||
|
||||
valid_zones = set()
|
||||
|
||||
# Start with loading from the tzdata package if it exists: this has a
|
||||
# pre-assembled list of zones that only requires opening one file.
|
||||
try:
|
||||
with resources.open_text("tzdata", "zones") as f:
|
||||
for zone in f:
|
||||
zone = zone.strip()
|
||||
if zone:
|
||||
valid_zones.add(zone)
|
||||
except (ImportError, FileNotFoundError):
|
||||
pass
|
||||
|
||||
def valid_key(fpath):
|
||||
try:
|
||||
with open(fpath, "rb") as f:
|
||||
return f.read(4) == b"TZif"
|
||||
except Exception: # pragma: nocover
|
||||
return False
|
||||
|
||||
for tz_root in TZPATH:
|
||||
if not os.path.exists(tz_root):
|
||||
continue
|
||||
|
||||
for root, dirnames, files in os.walk(tz_root):
|
||||
if root == tz_root:
|
||||
# right/ and posix/ are special directories and shouldn't be
|
||||
# included in the output of available zones
|
||||
if "right" in dirnames:
|
||||
dirnames.remove("right")
|
||||
if "posix" in dirnames:
|
||||
dirnames.remove("posix")
|
||||
|
||||
for file in files:
|
||||
fpath = os.path.join(root, file)
|
||||
|
||||
key = os.path.relpath(fpath, start=tz_root)
|
||||
if os.sep != "/": # pragma: nocover
|
||||
key = key.replace(os.sep, "/")
|
||||
|
||||
if not key or key in valid_zones:
|
||||
continue
|
||||
|
||||
if valid_key(fpath):
|
||||
valid_zones.add(key)
|
||||
|
||||
if "posixrules" in valid_zones:
|
||||
# posixrules is a special symlink-only time zone where it exists, it
|
||||
# should not be included in the output
|
||||
valid_zones.remove("posixrules")
|
||||
|
||||
return valid_zones
|
||||
|
||||
|
||||
class InvalidTZPathWarning(RuntimeWarning):
|
||||
"""Warning raised if an invalid path is specified in PYTHONTZPATH."""
|
||||
|
||||
|
||||
TZPATH = ()
|
||||
TZPATH_CALLBACKS = []
|
||||
reset_tzpath()
|
|
@ -1 +0,0 @@
|
|||
__version__ = "0.2.1"
|
|
@ -1,754 +0,0 @@
|
|||
import bisect
|
||||
import calendar
|
||||
import collections
|
||||
import functools
|
||||
import re
|
||||
import weakref
|
||||
from datetime import datetime, timedelta, tzinfo
|
||||
|
||||
from . import _common, _tzpath
|
||||
|
||||
EPOCH = datetime(1970, 1, 1)
|
||||
EPOCHORDINAL = datetime(1970, 1, 1).toordinal()
|
||||
|
||||
# It is relatively expensive to construct new timedelta objects, and in most
|
||||
# cases we're looking at the same deltas, like integer numbers of hours, etc.
|
||||
# To improve speed and memory use, we'll keep a dictionary with references
|
||||
# to the ones we've already used so far.
|
||||
#
|
||||
# Loading every time zone in the 2020a version of the time zone database
|
||||
# requires 447 timedeltas, which requires approximately the amount of space
|
||||
# that ZoneInfo("America/New_York") with 236 transitions takes up, so we will
|
||||
# set the cache size to 512 so that in the common case we always get cache
|
||||
# hits, but specifically crafted ZoneInfo objects don't leak arbitrary amounts
|
||||
# of memory.
|
||||
@functools.lru_cache(maxsize=512)
|
||||
def _load_timedelta(seconds):
|
||||
return timedelta(seconds=seconds)
|
||||
|
||||
|
||||
class ZoneInfo(tzinfo):
|
||||
_strong_cache_size = 8
|
||||
_strong_cache = collections.OrderedDict()
|
||||
_weak_cache = weakref.WeakValueDictionary()
|
||||
__module__ = "backports.zoneinfo"
|
||||
|
||||
def __init_subclass__(cls):
|
||||
cls._strong_cache = collections.OrderedDict()
|
||||
cls._weak_cache = weakref.WeakValueDictionary()
|
||||
|
||||
def __new__(cls, key):
|
||||
instance = cls._weak_cache.get(key, None)
|
||||
if instance is None:
|
||||
instance = cls._weak_cache.setdefault(key, cls._new_instance(key))
|
||||
instance._from_cache = True
|
||||
|
||||
# Update the "strong" cache
|
||||
cls._strong_cache[key] = cls._strong_cache.pop(key, instance)
|
||||
|
||||
if len(cls._strong_cache) > cls._strong_cache_size:
|
||||
cls._strong_cache.popitem(last=False)
|
||||
|
||||
return instance
|
||||
|
||||
@classmethod
|
||||
def no_cache(cls, key):
|
||||
obj = cls._new_instance(key)
|
||||
obj._from_cache = False
|
||||
|
||||
return obj
|
||||
|
||||
@classmethod
|
||||
def _new_instance(cls, key):
|
||||
obj = super().__new__(cls)
|
||||
obj._key = key
|
||||
obj._file_path = obj._find_tzfile(key)
|
||||
|
||||
if obj._file_path is not None:
|
||||
file_obj = open(obj._file_path, "rb")
|
||||
else:
|
||||
file_obj = _common.load_tzdata(key)
|
||||
|
||||
with file_obj as f:
|
||||
obj._load_file(f)
|
||||
|
||||
return obj
|
||||
|
||||
@classmethod
|
||||
def from_file(cls, fobj, key=None):
|
||||
obj = super().__new__(cls)
|
||||
obj._key = key
|
||||
obj._file_path = None
|
||||
obj._load_file(fobj)
|
||||
obj._file_repr = repr(fobj)
|
||||
|
||||
# Disable pickling for objects created from files
|
||||
obj.__reduce__ = obj._file_reduce
|
||||
|
||||
return obj
|
||||
|
||||
@classmethod
|
||||
def clear_cache(cls, *, only_keys=None):
|
||||
if only_keys is not None:
|
||||
for key in only_keys:
|
||||
cls._weak_cache.pop(key, None)
|
||||
cls._strong_cache.pop(key, None)
|
||||
|
||||
else:
|
||||
cls._weak_cache.clear()
|
||||
cls._strong_cache.clear()
|
||||
|
||||
@property
|
||||
def key(self):
|
||||
return self._key
|
||||
|
||||
def utcoffset(self, dt):
|
||||
return self._find_trans(dt).utcoff
|
||||
|
||||
def dst(self, dt):
|
||||
return self._find_trans(dt).dstoff
|
||||
|
||||
def tzname(self, dt):
|
||||
return self._find_trans(dt).tzname
|
||||
|
||||
def fromutc(self, dt):
|
||||
"""Convert from datetime in UTC to datetime in local time"""
|
||||
|
||||
if not isinstance(dt, datetime):
|
||||
raise TypeError("fromutc() requires a datetime argument")
|
||||
if dt.tzinfo is not self:
|
||||
raise ValueError("dt.tzinfo is not self")
|
||||
|
||||
timestamp = self._get_local_timestamp(dt)
|
||||
num_trans = len(self._trans_utc)
|
||||
|
||||
if num_trans >= 1 and timestamp < self._trans_utc[0]:
|
||||
tti = self._tti_before
|
||||
fold = 0
|
||||
elif (
|
||||
num_trans == 0 or timestamp > self._trans_utc[-1]
|
||||
) and not isinstance(self._tz_after, _ttinfo):
|
||||
tti, fold = self._tz_after.get_trans_info_fromutc(
|
||||
timestamp, dt.year
|
||||
)
|
||||
elif num_trans == 0:
|
||||
tti = self._tz_after
|
||||
fold = 0
|
||||
else:
|
||||
idx = bisect.bisect_right(self._trans_utc, timestamp)
|
||||
|
||||
if num_trans > 1 and timestamp >= self._trans_utc[1]:
|
||||
tti_prev, tti = self._ttinfos[idx - 2 : idx]
|
||||
elif timestamp > self._trans_utc[-1]:
|
||||
tti_prev = self._ttinfos[-1]
|
||||
tti = self._tz_after
|
||||
else:
|
||||
tti_prev = self._tti_before
|
||||
tti = self._ttinfos[0]
|
||||
|
||||
# Detect fold
|
||||
shift = tti_prev.utcoff - tti.utcoff
|
||||
fold = shift.total_seconds() > timestamp - self._trans_utc[idx - 1]
|
||||
dt += tti.utcoff
|
||||
if fold:
|
||||
return dt.replace(fold=1)
|
||||
else:
|
||||
return dt
|
||||
|
||||
def _find_trans(self, dt):
|
||||
if dt is None:
|
||||
if self._fixed_offset:
|
||||
return self._tz_after
|
||||
else:
|
||||
return _NO_TTINFO
|
||||
|
||||
ts = self._get_local_timestamp(dt)
|
||||
|
||||
lt = self._trans_local[dt.fold]
|
||||
|
||||
num_trans = len(lt)
|
||||
|
||||
if num_trans and ts < lt[0]:
|
||||
return self._tti_before
|
||||
elif not num_trans or ts > lt[-1]:
|
||||
if isinstance(self._tz_after, _TZStr):
|
||||
return self._tz_after.get_trans_info(ts, dt.year, dt.fold)
|
||||
else:
|
||||
return self._tz_after
|
||||
else:
|
||||
# idx is the transition that occurs after this timestamp, so we
|
||||
# subtract off 1 to get the current ttinfo
|
||||
idx = bisect.bisect_right(lt, ts) - 1
|
||||
assert idx >= 0
|
||||
return self._ttinfos[idx]
|
||||
|
||||
def _get_local_timestamp(self, dt):
|
||||
return (
|
||||
(dt.toordinal() - EPOCHORDINAL) * 86400
|
||||
+ dt.hour * 3600
|
||||
+ dt.minute * 60
|
||||
+ dt.second
|
||||
)
|
||||
|
||||
def __str__(self):
|
||||
if self._key is not None:
|
||||
return f"{self._key}"
|
||||
else:
|
||||
return repr(self)
|
||||
|
||||
def __repr__(self):
|
||||
if self._key is not None:
|
||||
return f"{self.__class__.__name__}(key={self._key!r})"
|
||||
else:
|
||||
return f"{self.__class__.__name__}.from_file({self._file_repr})"
|
||||
|
||||
def __reduce__(self):
|
||||
return (self.__class__._unpickle, (self._key, self._from_cache))
|
||||
|
||||
def _file_reduce(self):
|
||||
import pickle
|
||||
|
||||
raise pickle.PicklingError(
|
||||
"Cannot pickle a ZoneInfo file created from a file stream."
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def _unpickle(cls, key, from_cache):
|
||||
if from_cache:
|
||||
return cls(key)
|
||||
else:
|
||||
return cls.no_cache(key)
|
||||
|
||||
def _find_tzfile(self, key):
|
||||
return _tzpath.find_tzfile(key)
|
||||
|
||||
def _load_file(self, fobj):
|
||||
# Retrieve all the data as it exists in the zoneinfo file
|
||||
trans_idx, trans_utc, utcoff, isdst, abbr, tz_str = _common.load_data(
|
||||
fobj
|
||||
)
|
||||
|
||||
# Infer the DST offsets (needed for .dst()) from the data
|
||||
dstoff = self._utcoff_to_dstoff(trans_idx, utcoff, isdst)
|
||||
|
||||
# Convert all the transition times (UTC) into "seconds since 1970-01-01 local time"
|
||||
trans_local = self._ts_to_local(trans_idx, trans_utc, utcoff)
|
||||
|
||||
# Construct `_ttinfo` objects for each transition in the file
|
||||
_ttinfo_list = [
|
||||
_ttinfo(
|
||||
_load_timedelta(utcoffset), _load_timedelta(dstoffset), tzname
|
||||
)
|
||||
for utcoffset, dstoffset, tzname in zip(utcoff, dstoff, abbr)
|
||||
]
|
||||
|
||||
self._trans_utc = trans_utc
|
||||
self._trans_local = trans_local
|
||||
self._ttinfos = [_ttinfo_list[idx] for idx in trans_idx]
|
||||
|
||||
# Find the first non-DST transition
|
||||
for i in range(len(isdst)):
|
||||
if not isdst[i]:
|
||||
self._tti_before = _ttinfo_list[i]
|
||||
break
|
||||
else:
|
||||
if self._ttinfos:
|
||||
self._tti_before = self._ttinfos[0]
|
||||
else:
|
||||
self._tti_before = None
|
||||
|
||||
# Set the "fallback" time zone
|
||||
if tz_str is not None and tz_str != b"":
|
||||
self._tz_after = _parse_tz_str(tz_str.decode())
|
||||
else:
|
||||
if not self._ttinfos and not _ttinfo_list:
|
||||
raise ValueError("No time zone information found.")
|
||||
|
||||
if self._ttinfos:
|
||||
self._tz_after = self._ttinfos[-1]
|
||||
else:
|
||||
self._tz_after = _ttinfo_list[-1]
|
||||
|
||||
# Determine if this is a "fixed offset" zone, meaning that the output
|
||||
# of the utcoffset, dst and tzname functions does not depend on the
|
||||
# specific datetime passed.
|
||||
#
|
||||
# We make three simplifying assumptions here:
|
||||
#
|
||||
# 1. If _tz_after is not a _ttinfo, it has transitions that might
|
||||
# actually occur (it is possible to construct TZ strings that
|
||||
# specify STD and DST but no transitions ever occur, such as
|
||||
# AAA0BBB,0/0,J365/25).
|
||||
# 2. If _ttinfo_list contains more than one _ttinfo object, the objects
|
||||
# represent different offsets.
|
||||
# 3. _ttinfo_list contains no unused _ttinfos (in which case an
|
||||
# otherwise fixed-offset zone with extra _ttinfos defined may
|
||||
# appear to *not* be a fixed offset zone).
|
||||
#
|
||||
# Violations to these assumptions would be fairly exotic, and exotic
|
||||
# zones should almost certainly not be used with datetime.time (the
|
||||
# only thing that would be affected by this).
|
||||
if len(_ttinfo_list) > 1 or not isinstance(self._tz_after, _ttinfo):
|
||||
self._fixed_offset = False
|
||||
elif not _ttinfo_list:
|
||||
self._fixed_offset = True
|
||||
else:
|
||||
self._fixed_offset = _ttinfo_list[0] == self._tz_after
|
||||
|
||||
@staticmethod
|
||||
def _utcoff_to_dstoff(trans_idx, utcoffsets, isdsts):
|
||||
# Now we must transform our ttis and abbrs into `_ttinfo` objects,
|
||||
# but there is an issue: .dst() must return a timedelta with the
|
||||
# difference between utcoffset() and the "standard" offset, but
|
||||
# the "base offset" and "DST offset" are not encoded in the file;
|
||||
# we can infer what they are from the isdst flag, but it is not
|
||||
# sufficient to to just look at the last standard offset, because
|
||||
# occasionally countries will shift both DST offset and base offset.
|
||||
|
||||
typecnt = len(isdsts)
|
||||
dstoffs = [0] * typecnt # Provisionally assign all to 0.
|
||||
dst_cnt = sum(isdsts)
|
||||
dst_found = 0
|
||||
|
||||
for i in range(1, len(trans_idx)):
|
||||
if dst_cnt == dst_found:
|
||||
break
|
||||
|
||||
idx = trans_idx[i]
|
||||
|
||||
dst = isdsts[idx]
|
||||
|
||||
# We're only going to look at daylight saving time
|
||||
if not dst:
|
||||
continue
|
||||
|
||||
# Skip any offsets that have already been assigned
|
||||
if dstoffs[idx] != 0:
|
||||
continue
|
||||
|
||||
dstoff = 0
|
||||
utcoff = utcoffsets[idx]
|
||||
|
||||
comp_idx = trans_idx[i - 1]
|
||||
|
||||
if not isdsts[comp_idx]:
|
||||
dstoff = utcoff - utcoffsets[comp_idx]
|
||||
|
||||
if not dstoff and idx < (typecnt - 1):
|
||||
comp_idx = trans_idx[i + 1]
|
||||
|
||||
# If the following transition is also DST and we couldn't
|
||||
# find the DST offset by this point, we're going ot have to
|
||||
# skip it and hope this transition gets assigned later
|
||||
if isdsts[comp_idx]:
|
||||
continue
|
||||
|
||||
dstoff = utcoff - utcoffsets[comp_idx]
|
||||
|
||||
if dstoff:
|
||||
dst_found += 1
|
||||
dstoffs[idx] = dstoff
|
||||
else:
|
||||
# If we didn't find a valid value for a given index, we'll end up
|
||||
# with dstoff = 0 for something where `isdst=1`. This is obviously
|
||||
# wrong - one hour will be a much better guess than 0
|
||||
for idx in range(typecnt):
|
||||
if not dstoffs[idx] and isdsts[idx]:
|
||||
dstoffs[idx] = 3600
|
||||
|
||||
return dstoffs
|
||||
|
||||
@staticmethod
|
||||
def _ts_to_local(trans_idx, trans_list_utc, utcoffsets):
|
||||
"""Generate number of seconds since 1970 *in the local time*.
|
||||
|
||||
This is necessary to easily find the transition times in local time"""
|
||||
if not trans_list_utc:
|
||||
return [[], []]
|
||||
|
||||
# Start with the timestamps and modify in-place
|
||||
trans_list_wall = [list(trans_list_utc), list(trans_list_utc)]
|
||||
|
||||
if len(utcoffsets) > 1:
|
||||
offset_0 = utcoffsets[0]
|
||||
offset_1 = utcoffsets[trans_idx[0]]
|
||||
if offset_1 > offset_0:
|
||||
offset_1, offset_0 = offset_0, offset_1
|
||||
else:
|
||||
offset_0 = offset_1 = utcoffsets[0]
|
||||
|
||||
trans_list_wall[0][0] += offset_0
|
||||
trans_list_wall[1][0] += offset_1
|
||||
|
||||
for i in range(1, len(trans_idx)):
|
||||
offset_0 = utcoffsets[trans_idx[i - 1]]
|
||||
offset_1 = utcoffsets[trans_idx[i]]
|
||||
|
||||
if offset_1 > offset_0:
|
||||
offset_1, offset_0 = offset_0, offset_1
|
||||
|
||||
trans_list_wall[0][i] += offset_0
|
||||
trans_list_wall[1][i] += offset_1
|
||||
|
||||
return trans_list_wall
|
||||
|
||||
|
||||
class _ttinfo:
|
||||
__slots__ = ["utcoff", "dstoff", "tzname"]
|
||||
|
||||
def __init__(self, utcoff, dstoff, tzname):
|
||||
self.utcoff = utcoff
|
||||
self.dstoff = dstoff
|
||||
self.tzname = tzname
|
||||
|
||||
def __eq__(self, other):
|
||||
return (
|
||||
self.utcoff == other.utcoff
|
||||
and self.dstoff == other.dstoff
|
||||
and self.tzname == other.tzname
|
||||
)
|
||||
|
||||
def __repr__(self): # pragma: nocover
|
||||
return (
|
||||
f"{self.__class__.__name__}"
|
||||
+ f"({self.utcoff}, {self.dstoff}, {self.tzname})"
|
||||
)
|
||||
|
||||
|
||||
_NO_TTINFO = _ttinfo(None, None, None)
|
||||
|
||||
|
||||
class _TZStr:
|
||||
__slots__ = (
|
||||
"std",
|
||||
"dst",
|
||||
"start",
|
||||
"end",
|
||||
"get_trans_info",
|
||||
"get_trans_info_fromutc",
|
||||
"dst_diff",
|
||||
)
|
||||
|
||||
def __init__(
|
||||
self, std_abbr, std_offset, dst_abbr, dst_offset, start=None, end=None
|
||||
):
|
||||
self.dst_diff = dst_offset - std_offset
|
||||
std_offset = _load_timedelta(std_offset)
|
||||
self.std = _ttinfo(
|
||||
utcoff=std_offset, dstoff=_load_timedelta(0), tzname=std_abbr
|
||||
)
|
||||
|
||||
self.start = start
|
||||
self.end = end
|
||||
|
||||
dst_offset = _load_timedelta(dst_offset)
|
||||
delta = _load_timedelta(self.dst_diff)
|
||||
self.dst = _ttinfo(utcoff=dst_offset, dstoff=delta, tzname=dst_abbr)
|
||||
|
||||
# These are assertions because the constructor should only be called
|
||||
# by functions that would fail before passing start or end
|
||||
assert start is not None, "No transition start specified"
|
||||
assert end is not None, "No transition end specified"
|
||||
|
||||
self.get_trans_info = self._get_trans_info
|
||||
self.get_trans_info_fromutc = self._get_trans_info_fromutc
|
||||
|
||||
def transitions(self, year):
|
||||
start = self.start.year_to_epoch(year)
|
||||
end = self.end.year_to_epoch(year)
|
||||
return start, end
|
||||
|
||||
def _get_trans_info(self, ts, year, fold):
|
||||
"""Get the information about the current transition - tti"""
|
||||
start, end = self.transitions(year)
|
||||
|
||||
# With fold = 0, the period (denominated in local time) with the
|
||||
# smaller offset starts at the end of the gap and ends at the end of
|
||||
# the fold; with fold = 1, it runs from the start of the gap to the
|
||||
# beginning of the fold.
|
||||
#
|
||||
# So in order to determine the DST boundaries we need to know both
|
||||
# the fold and whether DST is positive or negative (rare), and it
|
||||
# turns out that this boils down to fold XOR is_positive.
|
||||
if fold == (self.dst_diff >= 0):
|
||||
end -= self.dst_diff
|
||||
else:
|
||||
start += self.dst_diff
|
||||
|
||||
if start < end:
|
||||
isdst = start <= ts < end
|
||||
else:
|
||||
isdst = not (end <= ts < start)
|
||||
|
||||
return self.dst if isdst else self.std
|
||||
|
||||
def _get_trans_info_fromutc(self, ts, year):
|
||||
start, end = self.transitions(year)
|
||||
start -= self.std.utcoff.total_seconds()
|
||||
end -= self.dst.utcoff.total_seconds()
|
||||
|
||||
if start < end:
|
||||
isdst = start <= ts < end
|
||||
else:
|
||||
isdst = not (end <= ts < start)
|
||||
|
||||
# For positive DST, the ambiguous period is one dst_diff after the end
|
||||
# of DST; for negative DST, the ambiguous period is one dst_diff before
|
||||
# the start of DST.
|
||||
if self.dst_diff > 0:
|
||||
ambig_start = end
|
||||
ambig_end = end + self.dst_diff
|
||||
else:
|
||||
ambig_start = start
|
||||
ambig_end = start - self.dst_diff
|
||||
|
||||
fold = ambig_start <= ts < ambig_end
|
||||
|
||||
return (self.dst if isdst else self.std, fold)
|
||||
|
||||
|
||||
def _post_epoch_days_before_year(year):
|
||||
"""Get the number of days between 1970-01-01 and YEAR-01-01"""
|
||||
y = year - 1
|
||||
return y * 365 + y // 4 - y // 100 + y // 400 - EPOCHORDINAL
|
||||
|
||||
|
||||
class _DayOffset:
|
||||
__slots__ = ["d", "julian", "hour", "minute", "second"]
|
||||
|
||||
def __init__(self, d, julian, hour=2, minute=0, second=0):
|
||||
if not (0 + julian) <= d <= 365:
|
||||
min_day = 0 + julian
|
||||
raise ValueError(f"d must be in [{min_day}, 365], not: {d}")
|
||||
|
||||
self.d = d
|
||||
self.julian = julian
|
||||
self.hour = hour
|
||||
self.minute = minute
|
||||
self.second = second
|
||||
|
||||
def year_to_epoch(self, year):
|
||||
days_before_year = _post_epoch_days_before_year(year)
|
||||
|
||||
d = self.d
|
||||
if self.julian and d >= 59 and calendar.isleap(year):
|
||||
d += 1
|
||||
|
||||
epoch = (days_before_year + d) * 86400
|
||||
epoch += self.hour * 3600 + self.minute * 60 + self.second
|
||||
|
||||
return epoch
|
||||
|
||||
|
||||
class _CalendarOffset:
|
||||
__slots__ = ["m", "w", "d", "hour", "minute", "second"]
|
||||
|
||||
_DAYS_BEFORE_MONTH = (
|
||||
-1,
|
||||
0,
|
||||
31,
|
||||
59,
|
||||
90,
|
||||
120,
|
||||
151,
|
||||
181,
|
||||
212,
|
||||
243,
|
||||
273,
|
||||
304,
|
||||
334,
|
||||
)
|
||||
|
||||
def __init__(self, m, w, d, hour=2, minute=0, second=0):
|
||||
if not 0 < m <= 12:
|
||||
raise ValueError("m must be in (0, 12]")
|
||||
|
||||
if not 0 < w <= 5:
|
||||
raise ValueError("w must be in (0, 5]")
|
||||
|
||||
if not 0 <= d <= 6:
|
||||
raise ValueError("d must be in [0, 6]")
|
||||
|
||||
self.m = m
|
||||
self.w = w
|
||||
self.d = d
|
||||
self.hour = hour
|
||||
self.minute = minute
|
||||
self.second = second
|
||||
|
||||
@classmethod
|
||||
def _ymd2ord(cls, year, month, day):
|
||||
return (
|
||||
_post_epoch_days_before_year(year)
|
||||
+ cls._DAYS_BEFORE_MONTH[month]
|
||||
+ (month > 2 and calendar.isleap(year))
|
||||
+ day
|
||||
)
|
||||
|
||||
# TODO: These are not actually epoch dates as they are expressed in local time
|
||||
def year_to_epoch(self, year):
|
||||
"""Calculates the datetime of the occurrence from the year"""
|
||||
# We know year and month, we need to convert w, d into day of month
|
||||
#
|
||||
# Week 1 is the first week in which day `d` (where 0 = Sunday) appears.
|
||||
# Week 5 represents the last occurrence of day `d`, so we need to know
|
||||
# the range of the month.
|
||||
first_day, days_in_month = calendar.monthrange(year, self.m)
|
||||
|
||||
# This equation seems magical, so I'll break it down:
|
||||
# 1. calendar says 0 = Monday, POSIX says 0 = Sunday
|
||||
# so we need first_day + 1 to get 1 = Monday -> 7 = Sunday,
|
||||
# which is still equivalent because this math is mod 7
|
||||
# 2. Get first day - desired day mod 7: -1 % 7 = 6, so we don't need
|
||||
# to do anything to adjust negative numbers.
|
||||
# 3. Add 1 because month days are a 1-based index.
|
||||
month_day = (self.d - (first_day + 1)) % 7 + 1
|
||||
|
||||
# Now use a 0-based index version of `w` to calculate the w-th
|
||||
# occurrence of `d`
|
||||
month_day += (self.w - 1) * 7
|
||||
|
||||
# month_day will only be > days_in_month if w was 5, and `w` means
|
||||
# "last occurrence of `d`", so now we just check if we over-shot the
|
||||
# end of the month and if so knock off 1 week.
|
||||
if month_day > days_in_month:
|
||||
month_day -= 7
|
||||
|
||||
ordinal = self._ymd2ord(year, self.m, month_day)
|
||||
epoch = ordinal * 86400
|
||||
epoch += self.hour * 3600 + self.minute * 60 + self.second
|
||||
return epoch
|
||||
|
||||
|
||||
def _parse_tz_str(tz_str):
|
||||
# The tz string has the format:
|
||||
#
|
||||
# std[offset[dst[offset],start[/time],end[/time]]]
|
||||
#
|
||||
# std and dst must be 3 or more characters long and must not contain
|
||||
# a leading colon, embedded digits, commas, nor a plus or minus signs;
|
||||
# The spaces between "std" and "offset" are only for display and are
|
||||
# not actually present in the string.
|
||||
#
|
||||
# The format of the offset is ``[+|-]hh[:mm[:ss]]``
|
||||
|
||||
offset_str, *start_end_str = tz_str.split(",", 1)
|
||||
|
||||
# fmt: off
|
||||
parser_re = re.compile(
|
||||
r"(?P<std>[^<0-9:.+-]+|<[a-zA-Z0-9+\-]+>)" +
|
||||
r"((?P<stdoff>[+-]?\d{1,2}(:\d{2}(:\d{2})?)?)" +
|
||||
r"((?P<dst>[^0-9:.+-]+|<[a-zA-Z0-9+\-]+>)" +
|
||||
r"((?P<dstoff>[+-]?\d{1,2}(:\d{2}(:\d{2})?)?))?" +
|
||||
r")?" + # dst
|
||||
r")?$" # stdoff
|
||||
)
|
||||
# fmt: on
|
||||
|
||||
m = parser_re.match(offset_str)
|
||||
|
||||
if m is None:
|
||||
raise ValueError(f"{tz_str} is not a valid TZ string")
|
||||
|
||||
std_abbr = m.group("std")
|
||||
dst_abbr = m.group("dst")
|
||||
dst_offset = None
|
||||
|
||||
std_abbr = std_abbr.strip("<>")
|
||||
|
||||
if dst_abbr:
|
||||
dst_abbr = dst_abbr.strip("<>")
|
||||
|
||||
std_offset = m.group("stdoff")
|
||||
if std_offset:
|
||||
try:
|
||||
std_offset = _parse_tz_delta(std_offset)
|
||||
except ValueError as e:
|
||||
raise ValueError(f"Invalid STD offset in {tz_str}") from e
|
||||
else:
|
||||
std_offset = 0
|
||||
|
||||
if dst_abbr is not None:
|
||||
dst_offset = m.group("dstoff")
|
||||
if dst_offset:
|
||||
try:
|
||||
dst_offset = _parse_tz_delta(dst_offset)
|
||||
except ValueError as e:
|
||||
raise ValueError(f"Invalid DST offset in {tz_str}") from e
|
||||
else:
|
||||
dst_offset = std_offset + 3600
|
||||
|
||||
if not start_end_str:
|
||||
raise ValueError(f"Missing transition rules: {tz_str}")
|
||||
|
||||
start_end_strs = start_end_str[0].split(",", 1)
|
||||
try:
|
||||
start, end = (_parse_dst_start_end(x) for x in start_end_strs)
|
||||
except ValueError as e:
|
||||
raise ValueError(f"Invalid TZ string: {tz_str}") from e
|
||||
|
||||
return _TZStr(std_abbr, std_offset, dst_abbr, dst_offset, start, end)
|
||||
elif start_end_str:
|
||||
raise ValueError(f"Transition rule present without DST: {tz_str}")
|
||||
else:
|
||||
# This is a static ttinfo, don't return _TZStr
|
||||
return _ttinfo(
|
||||
_load_timedelta(std_offset), _load_timedelta(0), std_abbr
|
||||
)
|
||||
|
||||
|
||||
def _parse_dst_start_end(dststr):
|
||||
date, *time = dststr.split("/")
|
||||
if date[0] == "M":
|
||||
n_is_julian = False
|
||||
m = re.match(r"M(\d{1,2})\.(\d).(\d)$", date)
|
||||
if m is None:
|
||||
raise ValueError(f"Invalid dst start/end date: {dststr}")
|
||||
date_offset = tuple(map(int, m.groups()))
|
||||
offset = _CalendarOffset(*date_offset)
|
||||
else:
|
||||
if date[0] == "J":
|
||||
n_is_julian = True
|
||||
date = date[1:]
|
||||
else:
|
||||
n_is_julian = False
|
||||
|
||||
doy = int(date)
|
||||
offset = _DayOffset(doy, n_is_julian)
|
||||
|
||||
if time:
|
||||
time_components = list(map(int, time[0].split(":")))
|
||||
n_components = len(time_components)
|
||||
if n_components < 3:
|
||||
time_components.extend([0] * (3 - n_components))
|
||||
offset.hour, offset.minute, offset.second = time_components
|
||||
|
||||
return offset
|
||||
|
||||
|
||||
def _parse_tz_delta(tz_delta):
|
||||
match = re.match(
|
||||
r"(?P<sign>[+-])?(?P<h>\d{1,2})(:(?P<m>\d{2})(:(?P<s>\d{2}))?)?",
|
||||
tz_delta,
|
||||
)
|
||||
# Anything passed to this function should already have hit an equivalent
|
||||
# regular expression to find the section to parse.
|
||||
assert match is not None, tz_delta
|
||||
|
||||
h, m, s = (
|
||||
int(v) if v is not None else 0
|
||||
for v in map(match.group, ("h", "m", "s"))
|
||||
)
|
||||
|
||||
total = h * 3600 + m * 60 + s
|
||||
|
||||
if not -86400 < total < 86400:
|
||||
raise ValueError(
|
||||
"Offset must be strictly between -24h and +24h:" + tz_delta
|
||||
)
|
||||
|
||||
# Yes, +5 maps to an offset of -5h
|
||||
if match.group("sign") != "-":
|
||||
total *= -1
|
||||
|
||||
return total
|
|
@ -11,9 +11,9 @@ from bleach.sanitizer import (
|
|||
|
||||
|
||||
# yyyymmdd
|
||||
__releasedate__ = "20230123"
|
||||
__releasedate__ = "20241029"
|
||||
# x.y.z or x.y.z.dev0 -- semver
|
||||
__version__ = "6.0.0"
|
||||
__version__ = "6.2.0"
|
||||
|
||||
|
||||
__all__ = ["clean", "linkify"]
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
from __future__ import absolute_import, division, unicode_literals
|
||||
|
||||
from six import text_type
|
||||
from six.moves import http_client, urllib
|
||||
from bleach.six_shim import text_type
|
||||
from bleach.six_shim import http_client, urllib
|
||||
|
||||
import codecs
|
||||
import re
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
from __future__ import absolute_import, division, unicode_literals
|
||||
|
||||
from six import unichr as chr
|
||||
from bleach.six_shim import unichr as chr
|
||||
|
||||
from collections import deque, OrderedDict
|
||||
from sys import version_info
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
from __future__ import absolute_import, division, unicode_literals
|
||||
from six import text_type
|
||||
from bleach.six_shim import text_type
|
||||
|
||||
from bisect import bisect_left
|
||||
|
||||
|
|
|
@ -7,7 +7,7 @@ try:
|
|||
except ImportError:
|
||||
from collections import Mapping
|
||||
|
||||
from six import text_type, PY3
|
||||
from bleach.six_shim import text_type, PY3
|
||||
|
||||
if PY3:
|
||||
import xml.etree.ElementTree as default_etree
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
from __future__ import absolute_import, division, unicode_literals
|
||||
|
||||
from six import text_type
|
||||
from bleach.six_shim import text_type
|
||||
|
||||
from . import base
|
||||
from ..constants import namespaces, voidElements
|
||||
|
|
|
@ -12,7 +12,7 @@ import re
|
|||
import warnings
|
||||
from xml.sax.saxutils import escape, unescape
|
||||
|
||||
from six.moves import urllib_parse as urlparse
|
||||
from bleach.six_shim import urllib_parse as urlparse
|
||||
|
||||
from . import base
|
||||
from ..constants import namespaces, prefixes
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
from __future__ import absolute_import, division, unicode_literals
|
||||
from six import with_metaclass, viewkeys
|
||||
from bleach.six_shim import viewkeys
|
||||
|
||||
import types
|
||||
|
||||
|
@ -423,7 +423,7 @@ def getPhases(debug):
|
|||
return type
|
||||
|
||||
# pylint:disable=unused-argument
|
||||
class Phase(with_metaclass(getMetaclass(debug, log))):
|
||||
class Phase(metaclass=getMetaclass(debug, log)):
|
||||
"""Base class for helper object that implements each phase of processing
|
||||
"""
|
||||
__slots__ = ("parser", "tree", "__startTagCache", "__endTagCache")
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
from __future__ import absolute_import, division, unicode_literals
|
||||
from six import text_type
|
||||
from bleach.six_shim import text_type
|
||||
|
||||
import re
|
||||
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
from __future__ import absolute_import, division, unicode_literals
|
||||
from six import text_type
|
||||
from bleach.six_shim import text_type
|
||||
|
||||
from ..constants import scopingElements, tableInsertModeElements, namespaces
|
||||
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
from __future__ import absolute_import, division, unicode_literals
|
||||
# pylint:disable=protected-access
|
||||
|
||||
from six import text_type
|
||||
from bleach.six_shim import text_type
|
||||
|
||||
import re
|
||||
|
||||
|
|
|
@ -28,7 +28,7 @@ from . import etree as etree_builders
|
|||
from .. import _ihatexml
|
||||
|
||||
import lxml.etree as etree
|
||||
from six import PY3, binary_type
|
||||
from bleach.six_shim import PY3, binary_type
|
||||
|
||||
|
||||
fullTree = True
|
||||
|
|
|
@ -3,7 +3,7 @@ from __future__ import absolute_import, division, unicode_literals
|
|||
from collections import OrderedDict
|
||||
import re
|
||||
|
||||
from six import string_types
|
||||
from bleach.six_shim import string_types
|
||||
|
||||
from . import base
|
||||
from .._utils import moduleFactoryFactory
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
from __future__ import absolute_import, division, unicode_literals
|
||||
from six import text_type
|
||||
from bleach.six_shim import text_type
|
||||
|
||||
from collections import OrderedDict
|
||||
|
||||
|
|
|
@ -7,8 +7,12 @@ set -o pipefail
|
|||
BLEACH_VENDOR_DIR=${BLEACH_VENDOR_DIR:-"."}
|
||||
DEST=${DEST:-"."}
|
||||
|
||||
# Install with no dependencies
|
||||
pip install --no-binary all --no-compile --no-deps -r "${BLEACH_VENDOR_DIR}/vendor.txt" --target "${DEST}"
|
||||
|
||||
# Apply patches
|
||||
(cd "${DEST}" && patch -p2 < 01_html5lib_six.patch)
|
||||
|
||||
# install Python 3.6.14 urllib.urlparse for #536
|
||||
curl --proto '=https' --tlsv1.2 -o "${DEST}/parse.py" https://raw.githubusercontent.com/python/cpython/v3.6.14/Lib/urllib/parse.py
|
||||
(cd "${DEST}" && sha256sum parse.py > parse.py.SHA256SUM)
|
||||
|
|
|
@ -395,10 +395,26 @@ class BleachHTMLTokenizer(HTMLTokenizer):
|
|||
# followed by a series of characters. It's treated as a tag
|
||||
# name that abruptly ends, but we should treat that like
|
||||
# character data
|
||||
yield {
|
||||
"type": TAG_TOKEN_TYPE_CHARACTERS,
|
||||
"data": "<" + self.currentToken["name"],
|
||||
}
|
||||
yield {"type": TAG_TOKEN_TYPE_CHARACTERS, "data": self.stream.get_tag()}
|
||||
|
||||
elif last_error_token["data"] in (
|
||||
"duplicate-attribute",
|
||||
"eof-in-attribute-name",
|
||||
"eof-in-attribute-value-no-quotes",
|
||||
"expected-end-of-tag-but-got-eof",
|
||||
):
|
||||
# Handle the case where the text being parsed ends with <
|
||||
# followed by characters and then space and then:
|
||||
#
|
||||
# * more characters
|
||||
# * more characters repeated with a space between (e.g. "abc abc")
|
||||
# * more characters and then a space and then an EOF (e.g. "abc def ")
|
||||
#
|
||||
# These cases are treated as a tag name followed by an
|
||||
# attribute that abruptly ends, but we should treat that like
|
||||
# character data instead.
|
||||
yield {"type": TAG_TOKEN_TYPE_CHARACTERS, "data": self.stream.get_tag()}
|
||||
|
||||
else:
|
||||
yield last_error_token
|
||||
|
||||
|
|
|
@ -45,8 +45,8 @@ def build_url_re(tlds=TLDS, protocols=html5lib_shim.allowed_protocols):
|
|||
r"""\(* # Match any opening parentheses.
|
||||
\b(?<![@.])(?:(?:{0}):/{{0,3}}(?:(?:\w+:)?\w+@)?)? # http://
|
||||
([\w-]+\.)+(?:{1})(?:\:[0-9]+)?(?!\.\w)\b # xx.yy.tld(:##)?
|
||||
(?:[/?][^\s\{{\}}\|\\\^\[\]`<>"]*)?
|
||||
# /path/zz (excluding "unsafe" chars from RFC 1738,
|
||||
(?:[/?][^\s\{{\}}\|\\\^`<>"]*)?
|
||||
# /path/zz (excluding "unsafe" chars from RFC 3986,
|
||||
# except for # and ~, which happen in practice)
|
||||
""".format(
|
||||
"|".join(sorted(protocols)), "|".join(sorted(tlds))
|
||||
|
@ -591,7 +591,7 @@ class LinkifyFilter(html5lib_shim.Filter):
|
|||
in_a = False
|
||||
token_buffer = []
|
||||
else:
|
||||
token_buffer.append(token)
|
||||
token_buffer.extend(list(self.extract_entities(token)))
|
||||
continue
|
||||
|
||||
if token["type"] in ["StartTag", "EmptyTag"]:
|
||||
|
|
19
lib/bleach/six_shim.py
Normal file
19
lib/bleach/six_shim.py
Normal file
|
@ -0,0 +1,19 @@
|
|||
"""
|
||||
Replacement module for what html5lib uses six for.
|
||||
"""
|
||||
|
||||
import http.client
|
||||
import operator
|
||||
import urllib
|
||||
|
||||
|
||||
PY3 = True
|
||||
binary_type = bytes
|
||||
string_types = (str,)
|
||||
text_type = str
|
||||
unichr = chr
|
||||
viewkeys = operator.methodcaller("keys")
|
||||
|
||||
http_client = http.client
|
||||
urllib = urllib
|
||||
urllib_parse = urllib.parse
|
|
@ -15,8 +15,8 @@ documentation: http://www.crummy.com/software/BeautifulSoup/bs4/doc/
|
|||
"""
|
||||
|
||||
__author__ = "Leonard Richardson (leonardr@segfault.org)"
|
||||
__version__ = "4.12.2"
|
||||
__copyright__ = "Copyright (c) 2004-2023 Leonard Richardson"
|
||||
__version__ = "4.12.3"
|
||||
__copyright__ = "Copyright (c) 2004-2024 Leonard Richardson"
|
||||
# Use of this source code is governed by the MIT license.
|
||||
__license__ = "MIT"
|
||||
|
||||
|
|
|
@ -514,15 +514,19 @@ class DetectsXMLParsedAsHTML(object):
|
|||
XML_PREFIX_B = b'<?xml'
|
||||
|
||||
@classmethod
|
||||
def warn_if_markup_looks_like_xml(cls, markup):
|
||||
def warn_if_markup_looks_like_xml(cls, markup, stacklevel=3):
|
||||
"""Perform a check on some markup to see if it looks like XML
|
||||
that's not XHTML. If so, issue a warning.
|
||||
|
||||
This is much less reliable than doing the check while parsing,
|
||||
but some of the tree builders can't do that.
|
||||
|
||||
:param stacklevel: The stacklevel of the code calling this
|
||||
function.
|
||||
|
||||
:return: True if the markup looks like non-XHTML XML, False
|
||||
otherwise.
|
||||
|
||||
"""
|
||||
if isinstance(markup, bytes):
|
||||
prefix = cls.XML_PREFIX_B
|
||||
|
@ -535,15 +539,16 @@ class DetectsXMLParsedAsHTML(object):
|
|||
and markup.startswith(prefix)
|
||||
and not looks_like_html.search(markup[:500])
|
||||
):
|
||||
cls._warn()
|
||||
cls._warn(stacklevel=stacklevel+2)
|
||||
return True
|
||||
return False
|
||||
|
||||
@classmethod
|
||||
def _warn(cls):
|
||||
def _warn(cls, stacklevel=5):
|
||||
"""Issue a warning about XML being parsed as HTML."""
|
||||
warnings.warn(
|
||||
XMLParsedAsHTMLWarning.MESSAGE, XMLParsedAsHTMLWarning
|
||||
XMLParsedAsHTMLWarning.MESSAGE, XMLParsedAsHTMLWarning,
|
||||
stacklevel=stacklevel
|
||||
)
|
||||
|
||||
def _initialize_xml_detector(self):
|
||||
|
|
|
@ -77,7 +77,9 @@ class HTML5TreeBuilder(HTMLTreeBuilder):
|
|||
|
||||
# html5lib only parses HTML, so if it's given XML that's worth
|
||||
# noting.
|
||||
DetectsXMLParsedAsHTML.warn_if_markup_looks_like_xml(markup)
|
||||
DetectsXMLParsedAsHTML.warn_if_markup_looks_like_xml(
|
||||
markup, stacklevel=3
|
||||
)
|
||||
|
||||
yield (markup, None, None, False)
|
||||
|
||||
|
|
|
@ -378,10 +378,10 @@ class HTMLParserTreeBuilder(HTMLTreeBuilder):
|
|||
parser.soup = self.soup
|
||||
try:
|
||||
parser.feed(markup)
|
||||
parser.close()
|
||||
except AssertionError as e:
|
||||
# html.parser raises AssertionError in rare cases to
|
||||
# indicate a fatal problem with the markup, especially
|
||||
# when there's an error in the doctype declaration.
|
||||
raise ParserRejectedMarkup(e)
|
||||
parser.close()
|
||||
parser.already_closed_empty_element = []
|
||||
|
|
|
@ -179,7 +179,9 @@ class LXMLTreeBuilderForXML(TreeBuilder):
|
|||
self.processing_instruction_class = ProcessingInstruction
|
||||
# We're in HTML mode, so if we're given XML, that's worth
|
||||
# noting.
|
||||
DetectsXMLParsedAsHTML.warn_if_markup_looks_like_xml(markup)
|
||||
DetectsXMLParsedAsHTML.warn_if_markup_looks_like_xml(
|
||||
markup, stacklevel=3
|
||||
)
|
||||
else:
|
||||
self.processing_instruction_class = XMLProcessingInstruction
|
||||
|
||||
|
|
|
@ -1356,7 +1356,7 @@ class Tag(PageElement):
|
|||
This is the first step in the deepcopy process.
|
||||
"""
|
||||
clone = type(self)(
|
||||
None, self.builder, self.name, self.namespace,
|
||||
None, None, self.name, self.namespace,
|
||||
self.prefix, self.attrs, is_xml=self._is_xml,
|
||||
sourceline=self.sourceline, sourcepos=self.sourcepos,
|
||||
can_be_empty_element=self.can_be_empty_element,
|
||||
|
@ -1845,6 +1845,11 @@ class Tag(PageElement):
|
|||
return space_before + s + space_after
|
||||
|
||||
def _format_tag(self, eventual_encoding, formatter, opening):
|
||||
if self.hidden:
|
||||
# A hidden tag is invisible, although its contents
|
||||
# are visible.
|
||||
return ''
|
||||
|
||||
# A tag starts with the < character (see below).
|
||||
|
||||
# Then the / character, if this is a closing tag.
|
||||
|
|
|
@ -51,7 +51,7 @@ class Formatter(EntitySubstitution):
|
|||
void_element_close_prefix='/', cdata_containing_tags=None,
|
||||
empty_attributes_are_booleans=False, indent=1,
|
||||
):
|
||||
"""Constructor.
|
||||
r"""Constructor.
|
||||
|
||||
:param language: This should be Formatter.XML if you are formatting
|
||||
XML markup and Formatter.HTML if you are formatting HTML markup.
|
||||
|
@ -76,7 +76,7 @@ class Formatter(EntitySubstitution):
|
|||
negative, or "" will only insert newlines. Using a
|
||||
positive integer indent indents that many spaces per
|
||||
level. If indent is a string (such as "\t"), that string
|
||||
is used to indent each level. The default behavior to
|
||||
is used to indent each level. The default behavior is to
|
||||
indent one space per level.
|
||||
"""
|
||||
self.language = language
|
||||
|
|
|
@ -1105,7 +1105,7 @@ class XMLTreeBuilderSmokeTest(TreeBuilderSmokeTest):
|
|||
doc = """<?xml version="1.0" encoding="utf-8"?>
|
||||
<Document xmlns="http://example.com/ns0"
|
||||
xmlns:ns1="http://example.com/ns1"
|
||||
xmlns:ns2="http://example.com/ns2"
|
||||
xmlns:ns2="http://example.com/ns2">
|
||||
<ns1:tag>foo</ns1:tag>
|
||||
<ns1:tag>bar</ns1:tag>
|
||||
<ns2:tag key="value">baz</ns2:tag>
|
||||
|
|
|
@ -0,0 +1 @@
|
|||
<20><> <20> <css
|
Binary file not shown.
Binary file not shown.
|
@ -0,0 +1 @@
|
|||
˙ ><applet></applet><applet></applet><apple|><applet><applet><appl›„><applet><applet></applet></applet></applet></applet><applet></applet><apple>t<applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet>et><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><azplet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><plet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet></applet></applet></applet></applet></appt></applet></applet></applet></applet></applet></applet></applet></applet></applet></applet></applet></applet></applet></applet></applet></applet></applet></applet><<meta charset=utf-8>
|
Binary file not shown.
|
@ -0,0 +1 @@
|
|||
- ˙˙ <math><select><mi><select><select>t
|
Binary file not shown.
|
@ -14,30 +14,75 @@ from bs4 import (
|
|||
BeautifulSoup,
|
||||
ParserRejectedMarkup,
|
||||
)
|
||||
try:
|
||||
from soupsieve.util import SelectorSyntaxError
|
||||
import lxml
|
||||
import html5lib
|
||||
fully_fuzzable = True
|
||||
except ImportError:
|
||||
fully_fuzzable = False
|
||||
|
||||
|
||||
@pytest.mark.skipif(not fully_fuzzable, reason="Prerequisites for fuzz tests are not installed.")
|
||||
class TestFuzz(object):
|
||||
|
||||
# Test case markup files from fuzzers are given this extension so
|
||||
# they can be included in builds.
|
||||
TESTCASE_SUFFIX = ".testcase"
|
||||
|
||||
# Copied 20230512 from
|
||||
# https://github.com/google/oss-fuzz/blob/4ac6a645a197a695fe76532251feb5067076b3f3/projects/bs4/bs4_fuzzer.py
|
||||
#
|
||||
# Copying the code lets us precisely duplicate the behavior of
|
||||
# oss-fuzz. The downside is that this code changes over time, so
|
||||
# multiple copies of the code must be kept around to run against
|
||||
# older tests. I'm not sure what to do about this, but I may
|
||||
# retire old tests after a time.
|
||||
def fuzz_test_with_css(self, filename):
|
||||
data = self.__markup(filename)
|
||||
parsers = ['lxml-xml', 'html5lib', 'html.parser', 'lxml']
|
||||
try:
|
||||
idx = int(data[0]) % len(parsers)
|
||||
except ValueError:
|
||||
return
|
||||
|
||||
css_selector, data = data[1:10], data[10:]
|
||||
|
||||
try:
|
||||
soup = BeautifulSoup(data[1:], features=parsers[idx])
|
||||
except ParserRejectedMarkup:
|
||||
return
|
||||
except ValueError:
|
||||
return
|
||||
|
||||
list(soup.find_all(True))
|
||||
try:
|
||||
soup.css.select(css_selector.decode('utf-8', 'replace'))
|
||||
except SelectorSyntaxError:
|
||||
return
|
||||
soup.prettify()
|
||||
|
||||
# This class of error has been fixed by catching a less helpful
|
||||
# exception from html.parser and raising ParserRejectedMarkup
|
||||
# instead.
|
||||
@pytest.mark.parametrize(
|
||||
"filename", [
|
||||
"clusterfuzz-testcase-minimized-bs4_fuzzer-5703933063462912",
|
||||
"crash-ffbdfa8a2b26f13537b68d3794b0478a4090ee4a",
|
||||
]
|
||||
)
|
||||
def test_rejected_markup(self, filename):
|
||||
markup = self.__markup(filename)
|
||||
with pytest.raises(ParserRejectedMarkup):
|
||||
BeautifulSoup(markup, 'html.parser')
|
||||
|
||||
|
||||
# This class of error has to do with very deeply nested documents
|
||||
# which overflow the Python call stack when the tree is converted
|
||||
# to a string. This is an issue with Beautiful Soup which was fixed
|
||||
# as part of [bug=1471755].
|
||||
#
|
||||
# These test cases are in the older format that doesn't specify
|
||||
# which parser to use or give a CSS selector.
|
||||
@pytest.mark.parametrize(
|
||||
"filename", [
|
||||
"clusterfuzz-testcase-minimized-bs4_fuzzer-5984173902397440",
|
||||
|
@ -46,18 +91,44 @@ class TestFuzz(object):
|
|||
"clusterfuzz-testcase-minimized-bs4_fuzzer-6450958476902400",
|
||||
]
|
||||
)
|
||||
def test_deeply_nested_document(self, filename):
|
||||
def test_deeply_nested_document_without_css(self, filename):
|
||||
# Parsing the document and encoding it back to a string is
|
||||
# sufficient to demonstrate that the overflow problem has
|
||||
# been fixed.
|
||||
markup = self.__markup(filename)
|
||||
BeautifulSoup(markup, 'html.parser').encode()
|
||||
|
||||
# This class of error has to do with very deeply nested documents
|
||||
# which overflow the Python call stack when the tree is converted
|
||||
# to a string. This is an issue with Beautiful Soup which was fixed
|
||||
# as part of [bug=1471755].
|
||||
@pytest.mark.parametrize(
|
||||
"filename", [
|
||||
"clusterfuzz-testcase-minimized-bs4_fuzzer-5000587759190016",
|
||||
"clusterfuzz-testcase-minimized-bs4_fuzzer-5375146639360000",
|
||||
"clusterfuzz-testcase-minimized-bs4_fuzzer-5492400320282624",
|
||||
]
|
||||
)
|
||||
def test_deeply_nested_document(self, filename):
|
||||
self.fuzz_test_with_css(filename)
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"filename", [
|
||||
"clusterfuzz-testcase-minimized-bs4_fuzzer-4670634698080256",
|
||||
"clusterfuzz-testcase-minimized-bs4_fuzzer-5270998950477824",
|
||||
]
|
||||
)
|
||||
def test_soupsieve_errors(self, filename):
|
||||
self.fuzz_test_with_css(filename)
|
||||
|
||||
# This class of error represents problems with html5lib's parser,
|
||||
# not Beautiful Soup. I use
|
||||
# https://github.com/html5lib/html5lib-python/issues/568 to notify
|
||||
# the html5lib developers of these issues.
|
||||
@pytest.mark.skip("html5lib problems")
|
||||
#
|
||||
# These test cases are in the older format that doesn't specify
|
||||
# which parser to use or give a CSS selector.
|
||||
@pytest.mark.skip(reason="html5lib-specific problems")
|
||||
@pytest.mark.parametrize(
|
||||
"filename", [
|
||||
# b"""ÿ<!DOCTyPEV PUBLIC'''Ð'"""
|
||||
|
@ -68,7 +139,7 @@ class TestFuzz(object):
|
|||
|
||||
# b'-<math><sElect><mi><sElect><sElect>'
|
||||
"clusterfuzz-testcase-minimized-bs4_fuzzer-5843991618256896",
|
||||
|
||||
|
||||
# b'ñ<table><svg><html>'
|
||||
"clusterfuzz-testcase-minimized-bs4_fuzzer-6241471367348224",
|
||||
|
||||
|
@ -79,10 +150,24 @@ class TestFuzz(object):
|
|||
"crash-0d306a50c8ed8bcd0785b67000fcd5dea1d33f08"
|
||||
]
|
||||
)
|
||||
def test_html5lib_parse_errors(self, filename):
|
||||
def test_html5lib_parse_errors_without_css(self, filename):
|
||||
markup = self.__markup(filename)
|
||||
print(BeautifulSoup(markup, 'html5lib').encode())
|
||||
|
||||
# This class of error represents problems with html5lib's parser,
|
||||
# not Beautiful Soup. I use
|
||||
# https://github.com/html5lib/html5lib-python/issues/568 to notify
|
||||
# the html5lib developers of these issues.
|
||||
@pytest.mark.skip(reason="html5lib-specific problems")
|
||||
@pytest.mark.parametrize(
|
||||
"filename", [
|
||||
# b'- \xff\xff <math>\x10<select><mi><select><select>t'
|
||||
"clusterfuzz-testcase-minimized-bs4_fuzzer-6306874195312640",
|
||||
]
|
||||
)
|
||||
def test_html5lib_parse_errors(self, filename):
|
||||
self.fuzz_test_with_css(filename)
|
||||
|
||||
def __markup(self, filename):
|
||||
if not filename.endswith(self.TESTCASE_SUFFIX):
|
||||
filename += self.TESTCASE_SUFFIX
|
||||
|
|
|
@ -219,3 +219,16 @@ class TestMultiValuedAttributes(SoupTest):
|
|||
)
|
||||
assert soup.a['class'] == 'foo'
|
||||
assert soup.a['id'] == ['bar']
|
||||
|
||||
def test_hidden_tag_is_invisible(self):
|
||||
# Setting .hidden on a tag makes it invisible in output, but
|
||||
# leaves its contents visible.
|
||||
#
|
||||
# This is not a documented or supported feature of Beautiful
|
||||
# Soup (e.g. NavigableString doesn't support .hidden even
|
||||
# though it could), but some people use it and it's not
|
||||
# hurting anything to verify that it keeps working.
|
||||
#
|
||||
soup = self.soup('<div id="1"><span id="2">a string</span></div>')
|
||||
soup.span.hidden = True
|
||||
assert '<div id="1">a string</div>' == str(soup.div)
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
from .core import contents, where
|
||||
|
||||
__all__ = ["contents", "where"]
|
||||
__version__ = "2023.07.22"
|
||||
__version__ = "2025.04.26"
|
||||
|
|
|
@ -1,95 +1,4 @@
|
|||
|
||||
# Issuer: CN=GlobalSign Root CA O=GlobalSign nv-sa OU=Root CA
|
||||
# Subject: CN=GlobalSign Root CA O=GlobalSign nv-sa OU=Root CA
|
||||
# Label: "GlobalSign Root CA"
|
||||
# Serial: 4835703278459707669005204
|
||||
# MD5 Fingerprint: 3e:45:52:15:09:51:92:e1:b7:5d:37:9f:b1:87:29:8a
|
||||
# SHA1 Fingerprint: b1:bc:96:8b:d4:f4:9d:62:2a:a8:9a:81:f2:15:01:52:a4:1d:82:9c
|
||||
# SHA256 Fingerprint: eb:d4:10:40:e4:bb:3e:c7:42:c9:e3:81:d3:1e:f2:a4:1a:48:b6:68:5c:96:e7:ce:f3:c1:df:6c:d4:33:1c:99
|
||||
-----BEGIN CERTIFICATE-----
|
||||
MIIDdTCCAl2gAwIBAgILBAAAAAABFUtaw5QwDQYJKoZIhvcNAQEFBQAwVzELMAkG
|
||||
A1UEBhMCQkUxGTAXBgNVBAoTEEdsb2JhbFNpZ24gbnYtc2ExEDAOBgNVBAsTB1Jv
|
||||
b3QgQ0ExGzAZBgNVBAMTEkdsb2JhbFNpZ24gUm9vdCBDQTAeFw05ODA5MDExMjAw
|
||||
MDBaFw0yODAxMjgxMjAwMDBaMFcxCzAJBgNVBAYTAkJFMRkwFwYDVQQKExBHbG9i
|
||||
YWxTaWduIG52LXNhMRAwDgYDVQQLEwdSb290IENBMRswGQYDVQQDExJHbG9iYWxT
|
||||
aWduIFJvb3QgQ0EwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQDaDuaZ
|
||||
jc6j40+Kfvvxi4Mla+pIH/EqsLmVEQS98GPR4mdmzxzdzxtIK+6NiY6arymAZavp
|
||||
xy0Sy6scTHAHoT0KMM0VjU/43dSMUBUc71DuxC73/OlS8pF94G3VNTCOXkNz8kHp
|
||||
1Wrjsok6Vjk4bwY8iGlbKk3Fp1S4bInMm/k8yuX9ifUSPJJ4ltbcdG6TRGHRjcdG
|
||||
snUOhugZitVtbNV4FpWi6cgKOOvyJBNPc1STE4U6G7weNLWLBYy5d4ux2x8gkasJ
|
||||
U26Qzns3dLlwR5EiUWMWea6xrkEmCMgZK9FGqkjWZCrXgzT/LCrBbBlDSgeF59N8
|
||||
9iFo7+ryUp9/k5DPAgMBAAGjQjBAMA4GA1UdDwEB/wQEAwIBBjAPBgNVHRMBAf8E
|
||||
BTADAQH/MB0GA1UdDgQWBBRge2YaRQ2XyolQL30EzTSo//z9SzANBgkqhkiG9w0B
|
||||
AQUFAAOCAQEA1nPnfE920I2/7LqivjTFKDK1fPxsnCwrvQmeU79rXqoRSLblCKOz
|
||||
yj1hTdNGCbM+w6DjY1Ub8rrvrTnhQ7k4o+YviiY776BQVvnGCv04zcQLcFGUl5gE
|
||||
38NflNUVyRRBnMRddWQVDf9VMOyGj/8N7yy5Y0b2qvzfvGn9LhJIZJrglfCm7ymP
|
||||
AbEVtQwdpf5pLGkkeB6zpxxxYu7KyJesF12KwvhHhm4qxFYxldBniYUr+WymXUad
|
||||
DKqC5JlR3XC321Y9YeRq4VzW9v493kHMB65jUr9TU/Qr6cf9tveCX4XSQRjbgbME
|
||||
HMUfpIBvFSDJ3gyICh3WZlXi/EjJKSZp4A==
|
||||
-----END CERTIFICATE-----
|
||||
|
||||
# Issuer: CN=Entrust.net Certification Authority (2048) O=Entrust.net OU=www.entrust.net/CPS_2048 incorp. by ref. (limits liab.)/(c) 1999 Entrust.net Limited
|
||||
# Subject: CN=Entrust.net Certification Authority (2048) O=Entrust.net OU=www.entrust.net/CPS_2048 incorp. by ref. (limits liab.)/(c) 1999 Entrust.net Limited
|
||||
# Label: "Entrust.net Premium 2048 Secure Server CA"
|
||||
# Serial: 946069240
|
||||
# MD5 Fingerprint: ee:29:31:bc:32:7e:9a:e6:e8:b5:f7:51:b4:34:71:90
|
||||
# SHA1 Fingerprint: 50:30:06:09:1d:97:d4:f5:ae:39:f7:cb:e7:92:7d:7d:65:2d:34:31
|
||||
# SHA256 Fingerprint: 6d:c4:71:72:e0:1c:bc:b0:bf:62:58:0d:89:5f:e2:b8:ac:9a:d4:f8:73:80:1e:0c:10:b9:c8:37:d2:1e:b1:77
|
||||
-----BEGIN CERTIFICATE-----
|
||||
MIIEKjCCAxKgAwIBAgIEOGPe+DANBgkqhkiG9w0BAQUFADCBtDEUMBIGA1UEChML
|
||||
RW50cnVzdC5uZXQxQDA+BgNVBAsUN3d3dy5lbnRydXN0Lm5ldC9DUFNfMjA0OCBp
|
||||
bmNvcnAuIGJ5IHJlZi4gKGxpbWl0cyBsaWFiLikxJTAjBgNVBAsTHChjKSAxOTk5
|
||||
IEVudHJ1c3QubmV0IExpbWl0ZWQxMzAxBgNVBAMTKkVudHJ1c3QubmV0IENlcnRp
|
||||
ZmljYXRpb24gQXV0aG9yaXR5ICgyMDQ4KTAeFw05OTEyMjQxNzUwNTFaFw0yOTA3
|
||||
MjQxNDE1MTJaMIG0MRQwEgYDVQQKEwtFbnRydXN0Lm5ldDFAMD4GA1UECxQ3d3d3
|
||||
LmVudHJ1c3QubmV0L0NQU18yMDQ4IGluY29ycC4gYnkgcmVmLiAobGltaXRzIGxp
|
||||
YWIuKTElMCMGA1UECxMcKGMpIDE5OTkgRW50cnVzdC5uZXQgTGltaXRlZDEzMDEG
|
||||
A1UEAxMqRW50cnVzdC5uZXQgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkgKDIwNDgp
|
||||
MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEArU1LqRKGsuqjIAcVFmQq
|
||||
K0vRvwtKTY7tgHalZ7d4QMBzQshowNtTK91euHaYNZOLGp18EzoOH1u3Hs/lJBQe
|
||||
sYGpjX24zGtLA/ECDNyrpUAkAH90lKGdCCmziAv1h3edVc3kw37XamSrhRSGlVuX
|
||||
MlBvPci6Zgzj/L24ScF2iUkZ/cCovYmjZy/Gn7xxGWC4LeksyZB2ZnuU4q941mVT
|
||||
XTzWnLLPKQP5L6RQstRIzgUyVYr9smRMDuSYB3Xbf9+5CFVghTAp+XtIpGmG4zU/
|
||||
HoZdenoVve8AjhUiVBcAkCaTvA5JaJG/+EfTnZVCwQ5N328mz8MYIWJmQ3DW1cAH
|
||||
4QIDAQABo0IwQDAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB/zAdBgNV
|
||||
HQ4EFgQUVeSB0RGAvtiJuQijMfmhJAkWuXAwDQYJKoZIhvcNAQEFBQADggEBADub
|
||||
j1abMOdTmXx6eadNl9cZlZD7Bh/KM3xGY4+WZiT6QBshJ8rmcnPyT/4xmf3IDExo
|
||||
U8aAghOY+rat2l098c5u9hURlIIM7j+VrxGrD9cv3h8Dj1csHsm7mhpElesYT6Yf
|
||||
zX1XEC+bBAlahLVu2B064dae0Wx5XnkcFMXj0EyTO2U87d89vqbllRrDtRnDvV5b
|
||||
u/8j72gZyxKTJ1wDLW8w0B62GqzeWvfRqqgnpv55gcR5mTNXuhKwqeBCbJPKVt7+
|
||||
bYQLCIt+jerXmCHG8+c8eS9enNFMFY3h7CI3zJpDC5fcgJCNs2ebb0gIFVbPv/Er
|
||||
fF6adulZkMV8gzURZVE=
|
||||
-----END CERTIFICATE-----
|
||||
|
||||
# Issuer: CN=Baltimore CyberTrust Root O=Baltimore OU=CyberTrust
|
||||
# Subject: CN=Baltimore CyberTrust Root O=Baltimore OU=CyberTrust
|
||||
# Label: "Baltimore CyberTrust Root"
|
||||
# Serial: 33554617
|
||||
# MD5 Fingerprint: ac:b6:94:a5:9c:17:e0:d7:91:52:9b:b1:97:06:a6:e4
|
||||
# SHA1 Fingerprint: d4:de:20:d0:5e:66:fc:53:fe:1a:50:88:2c:78:db:28:52:ca:e4:74
|
||||
# SHA256 Fingerprint: 16:af:57:a9:f6:76:b0:ab:12:60:95:aa:5e:ba:de:f2:2a:b3:11:19:d6:44:ac:95:cd:4b:93:db:f3:f2:6a:eb
|
||||
-----BEGIN CERTIFICATE-----
|
||||
MIIDdzCCAl+gAwIBAgIEAgAAuTANBgkqhkiG9w0BAQUFADBaMQswCQYDVQQGEwJJ
|
||||
RTESMBAGA1UEChMJQmFsdGltb3JlMRMwEQYDVQQLEwpDeWJlclRydXN0MSIwIAYD
|
||||
VQQDExlCYWx0aW1vcmUgQ3liZXJUcnVzdCBSb290MB4XDTAwMDUxMjE4NDYwMFoX
|
||||
DTI1MDUxMjIzNTkwMFowWjELMAkGA1UEBhMCSUUxEjAQBgNVBAoTCUJhbHRpbW9y
|
||||
ZTETMBEGA1UECxMKQ3liZXJUcnVzdDEiMCAGA1UEAxMZQmFsdGltb3JlIEN5YmVy
|
||||
VHJ1c3QgUm9vdDCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAKMEuyKr
|
||||
mD1X6CZymrV51Cni4eiVgLGw41uOKymaZN+hXe2wCQVt2yguzmKiYv60iNoS6zjr
|
||||
IZ3AQSsBUnuId9Mcj8e6uYi1agnnc+gRQKfRzMpijS3ljwumUNKoUMMo6vWrJYeK
|
||||
mpYcqWe4PwzV9/lSEy/CG9VwcPCPwBLKBsua4dnKM3p31vjsufFoREJIE9LAwqSu
|
||||
XmD+tqYF/LTdB1kC1FkYmGP1pWPgkAx9XbIGevOF6uvUA65ehD5f/xXtabz5OTZy
|
||||
dc93Uk3zyZAsuT3lySNTPx8kmCFcB5kpvcY67Oduhjprl3RjM71oGDHweI12v/ye
|
||||
jl0qhqdNkNwnGjkCAwEAAaNFMEMwHQYDVR0OBBYEFOWdWTCCR1jMrPoIVDaGezq1
|
||||
BE3wMBIGA1UdEwEB/wQIMAYBAf8CAQMwDgYDVR0PAQH/BAQDAgEGMA0GCSqGSIb3
|
||||
DQEBBQUAA4IBAQCFDF2O5G9RaEIFoN27TyclhAO992T9Ldcw46QQF+vaKSm2eT92
|
||||
9hkTI7gQCvlYpNRhcL0EYWoSihfVCr3FvDB81ukMJY2GQE/szKN+OMY3EU/t3Wgx
|
||||
jkzSswF07r51XgdIGn9w/xZchMB5hbgF/X++ZRGjD8ACtPhSNzkE1akxehi/oCr0
|
||||
Epn3o0WC4zxe9Z2etciefC7IpJ5OCBRLbf1wbWsaY71k5h+3zvDyny67G7fyUIhz
|
||||
ksLi4xaNmjICq44Y3ekQEe5+NauQrz4wlHrQMz2nZQ/1/I6eYs9HRCwBXbsdtTLS
|
||||
R9I4LtD+gdwyah617jzV/OeBHRnDJELqYzmp
|
||||
-----END CERTIFICATE-----
|
||||
|
||||
# Issuer: CN=Entrust Root Certification Authority O=Entrust, Inc. OU=www.entrust.net/CPS is incorporated by reference/(c) 2006 Entrust, Inc.
|
||||
# Subject: CN=Entrust Root Certification Authority O=Entrust, Inc. OU=www.entrust.net/CPS is incorporated by reference/(c) 2006 Entrust, Inc.
|
||||
# Label: "Entrust Root Certification Authority"
|
||||
|
@ -125,39 +34,6 @@ eu6FSqdQgPCnXEqULl8FmTxSQeDNtGPPAUO6nIPcj2A781q0tHuu2guQOHXvgR1m
|
|||
0vdXcDazv/wor3ElhVsT/h5/WrQ8
|
||||
-----END CERTIFICATE-----
|
||||
|
||||
# Issuer: CN=AAA Certificate Services O=Comodo CA Limited
|
||||
# Subject: CN=AAA Certificate Services O=Comodo CA Limited
|
||||
# Label: "Comodo AAA Services root"
|
||||
# Serial: 1
|
||||
# MD5 Fingerprint: 49:79:04:b0:eb:87:19:ac:47:b0:bc:11:51:9b:74:d0
|
||||
# SHA1 Fingerprint: d1:eb:23:a4:6d:17:d6:8f:d9:25:64:c2:f1:f1:60:17:64:d8:e3:49
|
||||
# SHA256 Fingerprint: d7:a7:a0:fb:5d:7e:27:31:d7:71:e9:48:4e:bc:de:f7:1d:5f:0c:3e:0a:29:48:78:2b:c8:3e:e0:ea:69:9e:f4
|
||||
-----BEGIN CERTIFICATE-----
|
||||
MIIEMjCCAxqgAwIBAgIBATANBgkqhkiG9w0BAQUFADB7MQswCQYDVQQGEwJHQjEb
|
||||
MBkGA1UECAwSR3JlYXRlciBNYW5jaGVzdGVyMRAwDgYDVQQHDAdTYWxmb3JkMRow
|
||||
GAYDVQQKDBFDb21vZG8gQ0EgTGltaXRlZDEhMB8GA1UEAwwYQUFBIENlcnRpZmlj
|
||||
YXRlIFNlcnZpY2VzMB4XDTA0MDEwMTAwMDAwMFoXDTI4MTIzMTIzNTk1OVowezEL
|
||||
MAkGA1UEBhMCR0IxGzAZBgNVBAgMEkdyZWF0ZXIgTWFuY2hlc3RlcjEQMA4GA1UE
|
||||
BwwHU2FsZm9yZDEaMBgGA1UECgwRQ29tb2RvIENBIExpbWl0ZWQxITAfBgNVBAMM
|
||||
GEFBQSBDZXJ0aWZpY2F0ZSBTZXJ2aWNlczCCASIwDQYJKoZIhvcNAQEBBQADggEP
|
||||
ADCCAQoCggEBAL5AnfRu4ep2hxxNRUSOvkbIgwadwSr+GB+O5AL686tdUIoWMQua
|
||||
BtDFcCLNSS1UY8y2bmhGC1Pqy0wkwLxyTurxFa70VJoSCsN6sjNg4tqJVfMiWPPe
|
||||
3M/vg4aijJRPn2jymJBGhCfHdr/jzDUsi14HZGWCwEiwqJH5YZ92IFCokcdmtet4
|
||||
YgNW8IoaE+oxox6gmf049vYnMlhvB/VruPsUK6+3qszWY19zjNoFmag4qMsXeDZR
|
||||
rOme9Hg6jc8P2ULimAyrL58OAd7vn5lJ8S3frHRNG5i1R8XlKdH5kBjHYpy+g8cm
|
||||
ez6KJcfA3Z3mNWgQIJ2P2N7Sw4ScDV7oL8kCAwEAAaOBwDCBvTAdBgNVHQ4EFgQU
|
||||
oBEKIz6W8Qfs4q8p74Klf9AwpLQwDgYDVR0PAQH/BAQDAgEGMA8GA1UdEwEB/wQF
|
||||
MAMBAf8wewYDVR0fBHQwcjA4oDagNIYyaHR0cDovL2NybC5jb21vZG9jYS5jb20v
|
||||
QUFBQ2VydGlmaWNhdGVTZXJ2aWNlcy5jcmwwNqA0oDKGMGh0dHA6Ly9jcmwuY29t
|
||||
b2RvLm5ldC9BQUFDZXJ0aWZpY2F0ZVNlcnZpY2VzLmNybDANBgkqhkiG9w0BAQUF
|
||||
AAOCAQEACFb8AvCb6P+k+tZ7xkSAzk/ExfYAWMymtrwUSWgEdujm7l3sAg9g1o1Q
|
||||
GE8mTgHj5rCl7r+8dFRBv/38ErjHT1r0iWAFf2C3BUrz9vHCv8S5dIa2LX1rzNLz
|
||||
Rt0vxuBqw8M0Ayx9lt1awg6nCpnBBYurDC/zXDrPbDdVCYfeU0BsWO/8tqtlbgT2
|
||||
G9w84FoVxp7Z8VlIMCFlA2zs6SFz7JsDoeA3raAVGI/6ugLOpyypEBMs1OUIJqsi
|
||||
l2D4kF501KKaU73yqWjgom7C12yxow+ev+to51byrvLjKzg6CYG1a4XXvi3tPxq3
|
||||
smPi9WIsgtRqAEFQ8TmDn5XpNpaYbg==
|
||||
-----END CERTIFICATE-----
|
||||
|
||||
# Issuer: CN=QuoVadis Root CA 2 O=QuoVadis Limited
|
||||
# Subject: CN=QuoVadis Root CA 2 O=QuoVadis Limited
|
||||
# Label: "QuoVadis Root CA 2"
|
||||
|
@ -245,131 +121,6 @@ mJlglFwjz1onl14LBQaTNx47aTbrqZ5hHY8y2o4M1nQ+ewkk2gF3R8Q7zTSMmfXK
|
|||
4SVhM7JZG+Ju1zdXtg2pEto=
|
||||
-----END CERTIFICATE-----
|
||||
|
||||
# Issuer: O=SECOM Trust.net OU=Security Communication RootCA1
|
||||
# Subject: O=SECOM Trust.net OU=Security Communication RootCA1
|
||||
# Label: "Security Communication Root CA"
|
||||
# Serial: 0
|
||||
# MD5 Fingerprint: f1:bc:63:6a:54:e0:b5:27:f5:cd:e7:1a:e3:4d:6e:4a
|
||||
# SHA1 Fingerprint: 36:b1:2b:49:f9:81:9e:d7:4c:9e:bc:38:0f:c6:56:8f:5d:ac:b2:f7
|
||||
# SHA256 Fingerprint: e7:5e:72:ed:9f:56:0e:ec:6e:b4:80:00:73:a4:3f:c3:ad:19:19:5a:39:22:82:01:78:95:97:4a:99:02:6b:6c
|
||||
-----BEGIN CERTIFICATE-----
|
||||
MIIDWjCCAkKgAwIBAgIBADANBgkqhkiG9w0BAQUFADBQMQswCQYDVQQGEwJKUDEY
|
||||
MBYGA1UEChMPU0VDT00gVHJ1c3QubmV0MScwJQYDVQQLEx5TZWN1cml0eSBDb21t
|
||||
dW5pY2F0aW9uIFJvb3RDQTEwHhcNMDMwOTMwMDQyMDQ5WhcNMjMwOTMwMDQyMDQ5
|
||||
WjBQMQswCQYDVQQGEwJKUDEYMBYGA1UEChMPU0VDT00gVHJ1c3QubmV0MScwJQYD
|
||||
VQQLEx5TZWN1cml0eSBDb21tdW5pY2F0aW9uIFJvb3RDQTEwggEiMA0GCSqGSIb3
|
||||
DQEBAQUAA4IBDwAwggEKAoIBAQCzs/5/022x7xZ8V6UMbXaKL0u/ZPtM7orw8yl8
|
||||
9f/uKuDp6bpbZCKamm8sOiZpUQWZJtzVHGpxxpp9Hp3dfGzGjGdnSj74cbAZJ6kJ
|
||||
DKaVv0uMDPpVmDvY6CKhS3E4eayXkmmziX7qIWgGmBSWh9JhNrxtJ1aeV+7AwFb9
|
||||
Ms+k2Y7CI9eNqPPYJayX5HA49LY6tJ07lyZDo6G8SVlyTCMwhwFY9k6+HGhWZq/N
|
||||
QV3Is00qVUarH9oe4kA92819uZKAnDfdDJZkndwi92SL32HeFZRSFaB9UslLqCHJ
|
||||
xrHty8OVYNEP8Ktw+N/LTX7s1vqr2b1/VPKl6Xn62dZ2JChzAgMBAAGjPzA9MB0G
|
||||
A1UdDgQWBBSgc0mZaNyFW2XjmygvV5+9M7wHSDALBgNVHQ8EBAMCAQYwDwYDVR0T
|
||||
AQH/BAUwAwEB/zANBgkqhkiG9w0BAQUFAAOCAQEAaECpqLvkT115swW1F7NgE+vG
|
||||
kl3g0dNq/vu+m22/xwVtWSDEHPC32oRYAmP6SBbvT6UL90qY8j+eG61Ha2POCEfr
|
||||
Uj94nK9NrvjVT8+amCoQQTlSxN3Zmw7vkwGusi7KaEIkQmywszo+zenaSMQVy+n5
|
||||
Bw+SUEmK3TGXX8npN6o7WWWXlDLJs58+OmJYxUmtYg5xpTKqL8aJdkNAExNnPaJU
|
||||
JRDL8Try2frbSVa7pv6nQTXD4IhhyYjH3zYQIphZ6rBK+1YWc26sTfcioU+tHXot
|
||||
RSflMMFe8toTyyVCUZVHA4xsIcx0Qu1T/zOLjw9XARYvz6buyXAiFL39vmwLAw==
|
||||
-----END CERTIFICATE-----
|
||||
|
||||
# Issuer: CN=XRamp Global Certification Authority O=XRamp Security Services Inc OU=www.xrampsecurity.com
|
||||
# Subject: CN=XRamp Global Certification Authority O=XRamp Security Services Inc OU=www.xrampsecurity.com
|
||||
# Label: "XRamp Global CA Root"
|
||||
# Serial: 107108908803651509692980124233745014957
|
||||
# MD5 Fingerprint: a1:0b:44:b3:ca:10:d8:00:6e:9d:0f:d8:0f:92:0a:d1
|
||||
# SHA1 Fingerprint: b8:01:86:d1:eb:9c:86:a5:41:04:cf:30:54:f3:4c:52:b7:e5:58:c6
|
||||
# SHA256 Fingerprint: ce:cd:dc:90:50:99:d8:da:df:c5:b1:d2:09:b7:37:cb:e2:c1:8c:fb:2c:10:c0:ff:0b:cf:0d:32:86:fc:1a:a2
|
||||
-----BEGIN CERTIFICATE-----
|
||||
MIIEMDCCAxigAwIBAgIQUJRs7Bjq1ZxN1ZfvdY+grTANBgkqhkiG9w0BAQUFADCB
|
||||
gjELMAkGA1UEBhMCVVMxHjAcBgNVBAsTFXd3dy54cmFtcHNlY3VyaXR5LmNvbTEk
|
||||
MCIGA1UEChMbWFJhbXAgU2VjdXJpdHkgU2VydmljZXMgSW5jMS0wKwYDVQQDEyRY
|
||||
UmFtcCBHbG9iYWwgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwHhcNMDQxMTAxMTcx
|
||||
NDA0WhcNMzUwMTAxMDUzNzE5WjCBgjELMAkGA1UEBhMCVVMxHjAcBgNVBAsTFXd3
|
||||
dy54cmFtcHNlY3VyaXR5LmNvbTEkMCIGA1UEChMbWFJhbXAgU2VjdXJpdHkgU2Vy
|
||||
dmljZXMgSW5jMS0wKwYDVQQDEyRYUmFtcCBHbG9iYWwgQ2VydGlmaWNhdGlvbiBB
|
||||
dXRob3JpdHkwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCYJB69FbS6
|
||||
38eMpSe2OAtp87ZOqCwuIR1cRN8hXX4jdP5efrRKt6atH67gBhbim1vZZ3RrXYCP
|
||||
KZ2GG9mcDZhtdhAoWORlsH9KmHmf4MMxfoArtYzAQDsRhtDLooY2YKTVMIJt2W7Q
|
||||
DxIEM5dfT2Fa8OT5kavnHTu86M/0ay00fOJIYRyO82FEzG+gSqmUsE3a56k0enI4
|
||||
qEHMPJQRfevIpoy3hsvKMzvZPTeL+3o+hiznc9cKV6xkmxnr9A8ECIqsAxcZZPRa
|
||||
JSKNNCyy9mgdEm3Tih4U2sSPpuIjhdV6Db1q4Ons7Be7QhtnqiXtRYMh/MHJfNVi
|
||||
PvryxS3T/dRlAgMBAAGjgZ8wgZwwEwYJKwYBBAGCNxQCBAYeBABDAEEwCwYDVR0P
|
||||
BAQDAgGGMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFMZPoj0GY4QJnM5i5ASs
|
||||
jVy16bYbMDYGA1UdHwQvMC0wK6ApoCeGJWh0dHA6Ly9jcmwueHJhbXBzZWN1cml0
|
||||
eS5jb20vWEdDQS5jcmwwEAYJKwYBBAGCNxUBBAMCAQEwDQYJKoZIhvcNAQEFBQAD
|
||||
ggEBAJEVOQMBG2f7Shz5CmBbodpNl2L5JFMn14JkTpAuw0kbK5rc/Kh4ZzXxHfAR
|
||||
vbdI4xD2Dd8/0sm2qlWkSLoC295ZLhVbO50WfUfXN+pfTXYSNrsf16GBBEYgoyxt
|
||||
qZ4Bfj8pzgCT3/3JknOJiWSe5yvkHJEs0rnOfc5vMZnT5r7SHpDwCRR5XCOrTdLa
|
||||
IR9NmXmd4c8nnxCbHIgNsIpkQTG4DmyQJKSbXHGPurt+HBvbaoAPIbzp26a3QPSy
|
||||
i6mx5O+aGtA9aZnuqCij4Tyz8LIRnM98QObd50N9otg6tamN8jSZxNQQ4Qb9CYQQ
|
||||
O+7ETPTsJ3xCwnR8gooJybQDJbw=
|
||||
-----END CERTIFICATE-----
|
||||
|
||||
# Issuer: O=The Go Daddy Group, Inc. OU=Go Daddy Class 2 Certification Authority
|
||||
# Subject: O=The Go Daddy Group, Inc. OU=Go Daddy Class 2 Certification Authority
|
||||
# Label: "Go Daddy Class 2 CA"
|
||||
# Serial: 0
|
||||
# MD5 Fingerprint: 91:de:06:25:ab:da:fd:32:17:0c:bb:25:17:2a:84:67
|
||||
# SHA1 Fingerprint: 27:96:ba:e6:3f:18:01:e2:77:26:1b:a0:d7:77:70:02:8f:20:ee:e4
|
||||
# SHA256 Fingerprint: c3:84:6b:f2:4b:9e:93:ca:64:27:4c:0e:c6:7c:1e:cc:5e:02:4f:fc:ac:d2:d7:40:19:35:0e:81:fe:54:6a:e4
|
||||
-----BEGIN CERTIFICATE-----
|
||||
MIIEADCCAuigAwIBAgIBADANBgkqhkiG9w0BAQUFADBjMQswCQYDVQQGEwJVUzEh
|
||||
MB8GA1UEChMYVGhlIEdvIERhZGR5IEdyb3VwLCBJbmMuMTEwLwYDVQQLEyhHbyBE
|
||||
YWRkeSBDbGFzcyAyIENlcnRpZmljYXRpb24gQXV0aG9yaXR5MB4XDTA0MDYyOTE3
|
||||
MDYyMFoXDTM0MDYyOTE3MDYyMFowYzELMAkGA1UEBhMCVVMxITAfBgNVBAoTGFRo
|
||||
ZSBHbyBEYWRkeSBHcm91cCwgSW5jLjExMC8GA1UECxMoR28gRGFkZHkgQ2xhc3Mg
|
||||
MiBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTCCASAwDQYJKoZIhvcNAQEBBQADggEN
|
||||
ADCCAQgCggEBAN6d1+pXGEmhW+vXX0iG6r7d/+TvZxz0ZWizV3GgXne77ZtJ6XCA
|
||||
PVYYYwhv2vLM0D9/AlQiVBDYsoHUwHU9S3/Hd8M+eKsaA7Ugay9qK7HFiH7Eux6w
|
||||
wdhFJ2+qN1j3hybX2C32qRe3H3I2TqYXP2WYktsqbl2i/ojgC95/5Y0V4evLOtXi
|
||||
EqITLdiOr18SPaAIBQi2XKVlOARFmR6jYGB0xUGlcmIbYsUfb18aQr4CUWWoriMY
|
||||
avx4A6lNf4DD+qta/KFApMoZFv6yyO9ecw3ud72a9nmYvLEHZ6IVDd2gWMZEewo+
|
||||
YihfukEHU1jPEX44dMX4/7VpkI+EdOqXG68CAQOjgcAwgb0wHQYDVR0OBBYEFNLE
|
||||
sNKR1EwRcbNhyz2h/t2oatTjMIGNBgNVHSMEgYUwgYKAFNLEsNKR1EwRcbNhyz2h
|
||||
/t2oatTjoWekZTBjMQswCQYDVQQGEwJVUzEhMB8GA1UEChMYVGhlIEdvIERhZGR5
|
||||
IEdyb3VwLCBJbmMuMTEwLwYDVQQLEyhHbyBEYWRkeSBDbGFzcyAyIENlcnRpZmlj
|
||||
YXRpb24gQXV0aG9yaXR5ggEAMAwGA1UdEwQFMAMBAf8wDQYJKoZIhvcNAQEFBQAD
|
||||
ggEBADJL87LKPpH8EsahB4yOd6AzBhRckB4Y9wimPQoZ+YeAEW5p5JYXMP80kWNy
|
||||
OO7MHAGjHZQopDH2esRU1/blMVgDoszOYtuURXO1v0XJJLXVggKtI3lpjbi2Tc7P
|
||||
TMozI+gciKqdi0FuFskg5YmezTvacPd+mSYgFFQlq25zheabIZ0KbIIOqPjCDPoQ
|
||||
HmyW74cNxA9hi63ugyuV+I6ShHI56yDqg+2DzZduCLzrTia2cyvk0/ZM/iZx4mER
|
||||
dEr/VxqHD3VILs9RaRegAhJhldXRQLIQTO7ErBBDpqWeCtWVYpoNz4iCxTIM5Cuf
|
||||
ReYNnyicsbkqWletNw+vHX/bvZ8=
|
||||
-----END CERTIFICATE-----
|
||||
|
||||
# Issuer: O=Starfield Technologies, Inc. OU=Starfield Class 2 Certification Authority
|
||||
# Subject: O=Starfield Technologies, Inc. OU=Starfield Class 2 Certification Authority
|
||||
# Label: "Starfield Class 2 CA"
|
||||
# Serial: 0
|
||||
# MD5 Fingerprint: 32:4a:4b:bb:c8:63:69:9b:be:74:9a:c6:dd:1d:46:24
|
||||
# SHA1 Fingerprint: ad:7e:1c:28:b0:64:ef:8f:60:03:40:20:14:c3:d0:e3:37:0e:b5:8a
|
||||
# SHA256 Fingerprint: 14:65:fa:20:53:97:b8:76:fa:a6:f0:a9:95:8e:55:90:e4:0f:cc:7f:aa:4f:b7:c2:c8:67:75:21:fb:5f:b6:58
|
||||
-----BEGIN CERTIFICATE-----
|
||||
MIIEDzCCAvegAwIBAgIBADANBgkqhkiG9w0BAQUFADBoMQswCQYDVQQGEwJVUzEl
|
||||
MCMGA1UEChMcU3RhcmZpZWxkIFRlY2hub2xvZ2llcywgSW5jLjEyMDAGA1UECxMp
|
||||
U3RhcmZpZWxkIENsYXNzIDIgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwHhcNMDQw
|
||||
NjI5MTczOTE2WhcNMzQwNjI5MTczOTE2WjBoMQswCQYDVQQGEwJVUzElMCMGA1UE
|
||||
ChMcU3RhcmZpZWxkIFRlY2hub2xvZ2llcywgSW5jLjEyMDAGA1UECxMpU3RhcmZp
|
||||
ZWxkIENsYXNzIDIgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwggEgMA0GCSqGSIb3
|
||||
DQEBAQUAA4IBDQAwggEIAoIBAQC3Msj+6XGmBIWtDBFk385N78gDGIc/oav7PKaf
|
||||
8MOh2tTYbitTkPskpD6E8J7oX+zlJ0T1KKY/e97gKvDIr1MvnsoFAZMej2YcOadN
|
||||
+lq2cwQlZut3f+dZxkqZJRRU6ybH838Z1TBwj6+wRir/resp7defqgSHo9T5iaU0
|
||||
X9tDkYI22WY8sbi5gv2cOj4QyDvvBmVmepsZGD3/cVE8MC5fvj13c7JdBmzDI1aa
|
||||
K4UmkhynArPkPw2vCHmCuDY96pzTNbO8acr1zJ3o/WSNF4Azbl5KXZnJHoe0nRrA
|
||||
1W4TNSNe35tfPe/W93bC6j67eA0cQmdrBNj41tpvi/JEoAGrAgEDo4HFMIHCMB0G
|
||||
A1UdDgQWBBS/X7fRzt0fhvRbVazc1xDCDqmI5zCBkgYDVR0jBIGKMIGHgBS/X7fR
|
||||
zt0fhvRbVazc1xDCDqmI56FspGowaDELMAkGA1UEBhMCVVMxJTAjBgNVBAoTHFN0
|
||||
YXJmaWVsZCBUZWNobm9sb2dpZXMsIEluYy4xMjAwBgNVBAsTKVN0YXJmaWVsZCBD
|
||||
bGFzcyAyIENlcnRpZmljYXRpb24gQXV0aG9yaXR5ggEAMAwGA1UdEwQFMAMBAf8w
|
||||
DQYJKoZIhvcNAQEFBQADggEBAAWdP4id0ckaVaGsafPzWdqbAYcaT1epoXkJKtv3
|
||||
L7IezMdeatiDh6GX70k1PncGQVhiv45YuApnP+yz3SFmH8lU+nLMPUxA2IGvd56D
|
||||
eruix/U0F47ZEUD0/CwqTRV/p2JdLiXTAAsgGh1o+Re49L2L7ShZ3U0WixeDyLJl
|
||||
xy16paq8U4Zt3VekyvggQQto8PT7dL5WXXp59fkdheMtlb71cZBDzI0fmgAKhynp
|
||||
VSJYACPq4xJDKVtHCN2MQWplBqjlIapBtJUhlbl90TSrE9atvNziPTnNvT51cKEY
|
||||
WQPJIrSPnNVeKtelttQKbfi3QBFGmh95DmK/D5fs4C8fF5Q=
|
||||
-----END CERTIFICATE-----
|
||||
|
||||
# Issuer: CN=DigiCert Assured ID Root CA O=DigiCert Inc OU=www.digicert.com
|
||||
# Subject: CN=DigiCert Assured ID Root CA O=DigiCert Inc OU=www.digicert.com
|
||||
# Label: "DigiCert Assured ID Root CA"
|
||||
|
@ -502,47 +253,6 @@ ZMEBnunKoGqYDs/YYPIvSbjkQuE4NRb0yG5P94FW6LqjviOvrv1vA+ACOzB2+htt
|
|||
Qc8Bsem4yWb02ybzOqR08kkkW8mw0FfB+j564ZfJ
|
||||
-----END CERTIFICATE-----
|
||||
|
||||
# Issuer: CN=SwissSign Silver CA - G2 O=SwissSign AG
|
||||
# Subject: CN=SwissSign Silver CA - G2 O=SwissSign AG
|
||||
# Label: "SwissSign Silver CA - G2"
|
||||
# Serial: 5700383053117599563
|
||||
# MD5 Fingerprint: e0:06:a1:c9:7d:cf:c9:fc:0d:c0:56:75:96:d8:62:13
|
||||
# SHA1 Fingerprint: 9b:aa:e5:9f:56:ee:21:cb:43:5a:be:25:93:df:a7:f0:40:d1:1d:cb
|
||||
# SHA256 Fingerprint: be:6c:4d:a2:bb:b9:ba:59:b6:f3:93:97:68:37:42:46:c3:c0:05:99:3f:a9:8f:02:0d:1d:ed:be:d4:8a:81:d5
|
||||
-----BEGIN CERTIFICATE-----
|
||||
MIIFvTCCA6WgAwIBAgIITxvUL1S7L0swDQYJKoZIhvcNAQEFBQAwRzELMAkGA1UE
|
||||
BhMCQ0gxFTATBgNVBAoTDFN3aXNzU2lnbiBBRzEhMB8GA1UEAxMYU3dpc3NTaWdu
|
||||
IFNpbHZlciBDQSAtIEcyMB4XDTA2MTAyNTA4MzI0NloXDTM2MTAyNTA4MzI0Nlow
|
||||
RzELMAkGA1UEBhMCQ0gxFTATBgNVBAoTDFN3aXNzU2lnbiBBRzEhMB8GA1UEAxMY
|
||||
U3dpc3NTaWduIFNpbHZlciBDQSAtIEcyMIICIjANBgkqhkiG9w0BAQEFAAOCAg8A
|
||||
MIICCgKCAgEAxPGHf9N4Mfc4yfjDmUO8x/e8N+dOcbpLj6VzHVxumK4DV644N0Mv
|
||||
Fz0fyM5oEMF4rhkDKxD6LHmD9ui5aLlV8gREpzn5/ASLHvGiTSf5YXu6t+WiE7br
|
||||
YT7QbNHm+/pe7R20nqA1W6GSy/BJkv6FCgU+5tkL4k+73JU3/JHpMjUi0R86TieF
|
||||
nbAVlDLaYQ1HTWBCrpJH6INaUFjpiou5XaHc3ZlKHzZnu0jkg7Y360g6rw9njxcH
|
||||
6ATK72oxh9TAtvmUcXtnZLi2kUpCe2UuMGoM9ZDulebyzYLs2aFK7PayS+VFheZt
|
||||
eJMELpyCbTapxDFkH4aDCyr0NQp4yVXPQbBH6TCfmb5hqAaEuSh6XzjZG6k4sIN/
|
||||
c8HDO0gqgg8hm7jMqDXDhBuDsz6+pJVpATqJAHgE2cn0mRmrVn5bi4Y5FZGkECwJ
|
||||
MoBgs5PAKrYYC51+jUnyEEp/+dVGLxmSo5mnJqy7jDzmDrxHB9xzUfFwZC8I+bRH
|
||||
HTBsROopN4WSaGa8gzj+ezku01DwH/teYLappvonQfGbGHLy9YR0SslnxFSuSGTf
|
||||
jNFusB3hB48IHpmccelM2KX3RxIfdNFRnobzwqIjQAtz20um53MGjMGg6cFZrEb6
|
||||
5i/4z3GcRm25xBWNOHkDRUjvxF3XCO6HOSKGsg0PWEP3calILv3q1h8CAwEAAaOB
|
||||
rDCBqTAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQU
|
||||
F6DNweRBtjpbO8tFnb0cwpj6hlgwHwYDVR0jBBgwFoAUF6DNweRBtjpbO8tFnb0c
|
||||
wpj6hlgwRgYDVR0gBD8wPTA7BglghXQBWQEDAQEwLjAsBggrBgEFBQcCARYgaHR0
|
||||
cDovL3JlcG9zaXRvcnkuc3dpc3NzaWduLmNvbS8wDQYJKoZIhvcNAQEFBQADggIB
|
||||
AHPGgeAn0i0P4JUw4ppBf1AsX19iYamGamkYDHRJ1l2E6kFSGG9YrVBWIGrGvShp
|
||||
WJHckRE1qTodvBqlYJ7YH39FkWnZfrt4csEGDyrOj4VwYaygzQu4OSlWhDJOhrs9
|
||||
xCrZ1x9y7v5RoSJBsXECYxqCsGKrXlcSH9/L3XWgwF15kIwb4FDm3jH+mHtwX6WQ
|
||||
2K34ArZv02DdQEsixT2tOnqfGhpHkXkzuoLcMmkDlm4fS/Bx/uNncqCxv1yL5PqZ
|
||||
IseEuRuNI5c/7SXgz2W79WEE790eslpBIlqhn10s6FvJbakMDHiqYMZWjwFaDGi8
|
||||
aRl5xB9+lwW/xekkUV7U1UtT7dkjWjYDZaPBA61BMPNGG4WQr2W11bHkFlt4dR2X
|
||||
em1ZqSqPe97Dh4kQmUlzeMg9vVE1dCrV8X5pGyq7O70luJpaPXJhkGaH7gzWTdQR
|
||||
dAtq/gsD/KNVV4n+SsuuWxcFyPKNIzFTONItaj+CuY0IavdeQXRuwxF+B6wpYJE/
|
||||
OMpXEA29MC/HpeZBoNquBYeaoKRlbEwJDIm6uNO5wJOKMPqN5ZprFQFOZ6raYlY+
|
||||
hAhm0sQ2fac+EPyI4NSA5QC9qvNOBqN6avlicuMJT+ubDgEj8Z+7fNzcbBGXJbLy
|
||||
tGMU0gYqZ4yD9c7qB9iaah7s5Aq7KkzrCWA5zspi2C5u
|
||||
-----END CERTIFICATE-----
|
||||
|
||||
# Issuer: CN=SecureTrust CA O=SecureTrust Corporation
|
||||
# Subject: CN=SecureTrust CA O=SecureTrust Corporation
|
||||
# Label: "SecureTrust CA"
|
||||
|
@ -791,35 +501,6 @@ uLjbvrW5KfnaNwUASZQDhETnv0Mxz3WLJdH0pmT1kvarBes96aULNmLazAZfNou2
|
|||
XjG4Kvte9nHfRCaexOYNkbQudZWAUWpLMKawYqGT8ZvYzsRjdT9ZR7E=
|
||||
-----END CERTIFICATE-----
|
||||
|
||||
# Issuer: CN=SecureSign RootCA11 O=Japan Certification Services, Inc.
|
||||
# Subject: CN=SecureSign RootCA11 O=Japan Certification Services, Inc.
|
||||
# Label: "SecureSign RootCA11"
|
||||
# Serial: 1
|
||||
# MD5 Fingerprint: b7:52:74:e2:92:b4:80:93:f2:75:e4:cc:d7:f2:ea:26
|
||||
# SHA1 Fingerprint: 3b:c4:9f:48:f8:f3:73:a0:9c:1e:bd:f8:5b:b1:c3:65:c7:d8:11:b3
|
||||
# SHA256 Fingerprint: bf:0f:ee:fb:9e:3a:58:1a:d5:f9:e9:db:75:89:98:57:43:d2:61:08:5c:4d:31:4f:6f:5d:72:59:aa:42:16:12
|
||||
-----BEGIN CERTIFICATE-----
|
||||
MIIDbTCCAlWgAwIBAgIBATANBgkqhkiG9w0BAQUFADBYMQswCQYDVQQGEwJKUDEr
|
||||
MCkGA1UEChMiSmFwYW4gQ2VydGlmaWNhdGlvbiBTZXJ2aWNlcywgSW5jLjEcMBoG
|
||||
A1UEAxMTU2VjdXJlU2lnbiBSb290Q0ExMTAeFw0wOTA0MDgwNDU2NDdaFw0yOTA0
|
||||
MDgwNDU2NDdaMFgxCzAJBgNVBAYTAkpQMSswKQYDVQQKEyJKYXBhbiBDZXJ0aWZp
|
||||
Y2F0aW9uIFNlcnZpY2VzLCBJbmMuMRwwGgYDVQQDExNTZWN1cmVTaWduIFJvb3RD
|
||||
QTExMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA/XeqpRyQBTvLTJsz
|
||||
i1oURaTnkBbR31fSIRCkF/3frNYfp+TbfPfs37gD2pRY/V1yfIw/XwFndBWW4wI8
|
||||
h9uuywGOwvNmxoVF9ALGOrVisq/6nL+k5tSAMJjzDbaTj6nU2DbysPyKyiyhFTOV
|
||||
MdrAG/LuYpmGYz+/3ZMqg6h2uRMft85OQoWPIucuGvKVCbIFtUROd6EgvanyTgp9
|
||||
UK31BQ1FT0Zx/Sg+U/sE2C3XZR1KG/rPO7AxmjVuyIsG0wCR8pQIZUyxNAYAeoni
|
||||
8McDWc/V1uinMrPmmECGxc0nEovMe863ETxiYAcjPitAbpSACW22s293bzUIUPsC
|
||||
h8U+iQIDAQABo0IwQDAdBgNVHQ4EFgQUW/hNT7KlhtQ60vFjmqC+CfZXt94wDgYD
|
||||
VR0PAQH/BAQDAgEGMA8GA1UdEwEB/wQFMAMBAf8wDQYJKoZIhvcNAQEFBQADggEB
|
||||
AKChOBZmLqdWHyGcBvod7bkixTgm2E5P7KN/ed5GIaGHd48HCJqypMWvDzKYC3xm
|
||||
KbabfSVSSUOrTC4rbnpwrxYO4wJs+0LmGJ1F2FXI6Dvd5+H0LgscNFxsWEr7jIhQ
|
||||
X5Ucv+2rIrVls4W6ng+4reV6G4pQOh29Dbx7VFALuUKvVaAYga1lme++5Jy/xIWr
|
||||
QbJUb9wlze144o4MjQlJ3WN7WmmWAiGovVJZ6X01y8hSyn+B/tlr0/cR7SXf+Of5
|
||||
pPpyl4RTDaXQMhhRdlkUbA/r7F+AjHVDg8OFmP9Mni0N5HeDk061lgeLKBObjBmN
|
||||
QSdJQO7e5iNEOdyhIta6A/I=
|
||||
-----END CERTIFICATE-----
|
||||
|
||||
# Issuer: CN=Microsec e-Szigno Root CA 2009 O=Microsec Ltd.
|
||||
# Subject: CN=Microsec e-Szigno Root CA 2009 O=Microsec Ltd.
|
||||
# Label: "Microsec e-Szigno Root CA 2009"
|
||||
|
@ -881,49 +562,6 @@ Mx86OyXShkDOOyyGeMlhLxS67ttVb9+E7gUJTb0o2HLO02JQZR7rkpeDMdmztcpH
|
|||
WD9f
|
||||
-----END CERTIFICATE-----
|
||||
|
||||
# Issuer: CN=Autoridad de Certificacion Firmaprofesional CIF A62634068
|
||||
# Subject: CN=Autoridad de Certificacion Firmaprofesional CIF A62634068
|
||||
# Label: "Autoridad de Certificacion Firmaprofesional CIF A62634068"
|
||||
# Serial: 6047274297262753887
|
||||
# MD5 Fingerprint: 73:3a:74:7a:ec:bb:a3:96:a6:c2:e4:e2:c8:9b:c0:c3
|
||||
# SHA1 Fingerprint: ae:c5:fb:3f:c8:e1:bf:c4:e5:4f:03:07:5a:9a:e8:00:b7:f7:b6:fa
|
||||
# SHA256 Fingerprint: 04:04:80:28:bf:1f:28:64:d4:8f:9a:d4:d8:32:94:36:6a:82:88:56:55:3f:3b:14:30:3f:90:14:7f:5d:40:ef
|
||||
-----BEGIN CERTIFICATE-----
|
||||
MIIGFDCCA/ygAwIBAgIIU+w77vuySF8wDQYJKoZIhvcNAQEFBQAwUTELMAkGA1UE
|
||||
BhMCRVMxQjBABgNVBAMMOUF1dG9yaWRhZCBkZSBDZXJ0aWZpY2FjaW9uIEZpcm1h
|
||||
cHJvZmVzaW9uYWwgQ0lGIEE2MjYzNDA2ODAeFw0wOTA1MjAwODM4MTVaFw0zMDEy
|
||||
MzEwODM4MTVaMFExCzAJBgNVBAYTAkVTMUIwQAYDVQQDDDlBdXRvcmlkYWQgZGUg
|
||||
Q2VydGlmaWNhY2lvbiBGaXJtYXByb2Zlc2lvbmFsIENJRiBBNjI2MzQwNjgwggIi
|
||||
MA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQDKlmuO6vj78aI14H9M2uDDUtd9
|
||||
thDIAl6zQyrET2qyyhxdKJp4ERppWVevtSBC5IsP5t9bpgOSL/UR5GLXMnE42QQM
|
||||
cas9UX4PB99jBVzpv5RvwSmCwLTaUbDBPLutN0pcyvFLNg4kq7/DhHf9qFD0sefG
|
||||
L9ItWY16Ck6WaVICqjaY7Pz6FIMMNx/Jkjd/14Et5cS54D40/mf0PmbR0/RAz15i
|
||||
NA9wBj4gGFrO93IbJWyTdBSTo3OxDqqHECNZXyAFGUftaI6SEspd/NYrspI8IM/h
|
||||
X68gvqB2f3bl7BqGYTM+53u0P6APjqK5am+5hyZvQWyIplD9amML9ZMWGxmPsu2b
|
||||
m8mQ9QEM3xk9Dz44I8kvjwzRAv4bVdZO0I08r0+k8/6vKtMFnXkIoctXMbScyJCy
|
||||
Z/QYFpM6/EfY0XiWMR+6KwxfXZmtY4laJCB22N/9q06mIqqdXuYnin1oKaPnirja
|
||||
EbsXLZmdEyRG98Xi2J+Of8ePdG1asuhy9azuJBCtLxTa/y2aRnFHvkLfuwHb9H/T
|
||||
KI8xWVvTyQKmtFLKbpf7Q8UIJm+K9Lv9nyiqDdVF8xM6HdjAeI9BZzwelGSuewvF
|
||||
6NkBiDkal4ZkQdU7hwxu+g/GvUgUvzlN1J5Bto+WHWOWk9mVBngxaJ43BjuAiUVh
|
||||
OSPHG0SjFeUc+JIwuwIDAQABo4HvMIHsMBIGA1UdEwEB/wQIMAYBAf8CAQEwDgYD
|
||||
VR0PAQH/BAQDAgEGMB0GA1UdDgQWBBRlzeurNR4APn7VdMActHNHDhpkLzCBpgYD
|
||||
VR0gBIGeMIGbMIGYBgRVHSAAMIGPMC8GCCsGAQUFBwIBFiNodHRwOi8vd3d3LmZp
|
||||
cm1hcHJvZmVzaW9uYWwuY29tL2NwczBcBggrBgEFBQcCAjBQHk4AUABhAHMAZQBv
|
||||
ACAAZABlACAAbABhACAAQgBvAG4AYQBuAG8AdgBhACAANAA3ACAAQgBhAHIAYwBl
|
||||
AGwAbwBuAGEAIAAwADgAMAAxADcwDQYJKoZIhvcNAQEFBQADggIBABd9oPm03cXF
|
||||
661LJLWhAqvdpYhKsg9VSytXjDvlMd3+xDLx51tkljYyGOylMnfX40S2wBEqgLk9
|
||||
am58m9Ot/MPWo+ZkKXzR4Tgegiv/J2Wv+xYVxC5xhOW1//qkR71kMrv2JYSiJ0L1
|
||||
ILDCExARzRAVukKQKtJE4ZYm6zFIEv0q2skGz3QeqUvVhyj5eTSSPi5E6PaPT481
|
||||
PyWzOdxjKpBrIF/EUhJOlywqrJ2X3kjyo2bbwtKDlaZmp54lD+kLM5FlClrD2VQS
|
||||
3a/DTg4fJl4N3LON7NWBcN7STyQF82xO9UxJZo3R/9ILJUFI/lGExkKvgATP0H5k
|
||||
SeTy36LssUzAKh3ntLFlosS88Zj0qnAHY7S42jtM+kAiMFsRpvAFDsYCA0irhpuF
|
||||
3dvd6qJ2gHN99ZwExEWN57kci57q13XRcrHedUTnQn3iV2t93Jm8PYMo6oCTjcVM
|
||||
ZcFwgbg4/EMxsvYDNEeyrPsiBsse3RdHHF9mudMaotoRsaS8I8nkvof/uZS2+F0g
|
||||
StRf571oe2XyFR7SOqkt6dhrJKyXWERHrVkY8SFlcN7ONGCoQPHzPKTDKCOM/icz
|
||||
Q0CgFzzr6juwcqajuUpLXhZI9LK8yIySxZ2frHI2vDSANGupi5LAuBft7HZT9SQB
|
||||
jLMi6Et8Vcad+qMUu2WFbm5PEn4KPJ2V
|
||||
-----END CERTIFICATE-----
|
||||
|
||||
# Issuer: CN=Izenpe.com O=IZENPE S.A.
|
||||
# Subject: CN=Izenpe.com O=IZENPE S.A.
|
||||
# Label: "Izenpe.com"
|
||||
|
@ -3171,50 +2809,6 @@ LJstxabArahH9CdMOA0uG0k7UvToiIMrVCjU8jVStDKDYmlkDJGcn5fqdBb9HxEG
|
|||
mpv0
|
||||
-----END CERTIFICATE-----
|
||||
|
||||
# Issuer: CN=Entrust Root Certification Authority - G4 O=Entrust, Inc. OU=See www.entrust.net/legal-terms/(c) 2015 Entrust, Inc. - for authorized use only
|
||||
# Subject: CN=Entrust Root Certification Authority - G4 O=Entrust, Inc. OU=See www.entrust.net/legal-terms/(c) 2015 Entrust, Inc. - for authorized use only
|
||||
# Label: "Entrust Root Certification Authority - G4"
|
||||
# Serial: 289383649854506086828220374796556676440
|
||||
# MD5 Fingerprint: 89:53:f1:83:23:b7:7c:8e:05:f1:8c:71:38:4e:1f:88
|
||||
# SHA1 Fingerprint: 14:88:4e:86:26:37:b0:26:af:59:62:5c:40:77:ec:35:29:ba:96:01
|
||||
# SHA256 Fingerprint: db:35:17:d1:f6:73:2a:2d:5a:b9:7c:53:3e:c7:07:79:ee:32:70:a6:2f:b4:ac:42:38:37:24:60:e6:f0:1e:88
|
||||
-----BEGIN CERTIFICATE-----
|
||||
MIIGSzCCBDOgAwIBAgIRANm1Q3+vqTkPAAAAAFVlrVgwDQYJKoZIhvcNAQELBQAw
|
||||
gb4xCzAJBgNVBAYTAlVTMRYwFAYDVQQKEw1FbnRydXN0LCBJbmMuMSgwJgYDVQQL
|
||||
Ex9TZWUgd3d3LmVudHJ1c3QubmV0L2xlZ2FsLXRlcm1zMTkwNwYDVQQLEzAoYykg
|
||||
MjAxNSBFbnRydXN0LCBJbmMuIC0gZm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxMjAw
|
||||
BgNVBAMTKUVudHJ1c3QgUm9vdCBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eSAtIEc0
|
||||
MB4XDTE1MDUyNzExMTExNloXDTM3MTIyNzExNDExNlowgb4xCzAJBgNVBAYTAlVT
|
||||
MRYwFAYDVQQKEw1FbnRydXN0LCBJbmMuMSgwJgYDVQQLEx9TZWUgd3d3LmVudHJ1
|
||||
c3QubmV0L2xlZ2FsLXRlcm1zMTkwNwYDVQQLEzAoYykgMjAxNSBFbnRydXN0LCBJ
|
||||
bmMuIC0gZm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxMjAwBgNVBAMTKUVudHJ1c3Qg
|
||||
Um9vdCBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eSAtIEc0MIICIjANBgkqhkiG9w0B
|
||||
AQEFAAOCAg8AMIICCgKCAgEAsewsQu7i0TD/pZJH4i3DumSXbcr3DbVZwbPLqGgZ
|
||||
2K+EbTBwXX7zLtJTmeH+H17ZSK9dE43b/2MzTdMAArzE+NEGCJR5WIoV3imz/f3E
|
||||
T+iq4qA7ec2/a0My3dl0ELn39GjUu9CH1apLiipvKgS1sqbHoHrmSKvS0VnM1n4j
|
||||
5pds8ELl3FFLFUHtSUrJ3hCX1nbB76W1NhSXNdh4IjVS70O92yfbYVaCNNzLiGAM
|
||||
C1rlLAHGVK/XqsEQe9IFWrhAnoanw5CGAlZSCXqc0ieCU0plUmr1POeo8pyvi73T
|
||||
DtTUXm6Hnmo9RR3RXRv06QqsYJn7ibT/mCzPfB3pAqoEmh643IhuJbNsZvc8kPNX
|
||||
wbMv9W3y+8qh+CmdRouzavbmZwe+LGcKKh9asj5XxNMhIWNlUpEbsZmOeX7m640A
|
||||
2Vqq6nPopIICR5b+W45UYaPrL0swsIsjdXJ8ITzI9vF01Bx7owVV7rtNOzK+mndm
|
||||
nqxpkCIHH2E6lr7lmk/MBTwoWdPBDFSoWWG9yHJM6Nyfh3+9nEg2XpWjDrk4JFX8
|
||||
dWbrAuMINClKxuMrLzOg2qOGpRKX/YAr2hRC45K9PvJdXmd0LhyIRyk0X+IyqJwl
|
||||
N4y6mACXi0mWHv0liqzc2thddG5msP9E36EYxr5ILzeUePiVSj9/E15dWf10hkNj
|
||||
c0kCAwEAAaNCMEAwDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwHQYD
|
||||
VR0OBBYEFJ84xFYjwznooHFs6FRM5Og6sb9nMA0GCSqGSIb3DQEBCwUAA4ICAQAS
|
||||
5UKme4sPDORGpbZgQIeMJX6tuGguW8ZAdjwD+MlZ9POrYs4QjbRaZIxowLByQzTS
|
||||
Gwv2LFPSypBLhmb8qoMi9IsabyZIrHZ3CL/FmFz0Jomee8O5ZDIBf9PD3Vht7LGr
|
||||
hFV0d4QEJ1JrhkzO3bll/9bGXp+aEJlLdWr+aumXIOTkdnrG0CSqkM0gkLpHZPt/
|
||||
B7NTeLUKYvJzQ85BK4FqLoUWlFPUa19yIqtRLULVAJyZv967lDtX/Zr1hstWO1uI
|
||||
AeV8KEsD+UmDfLJ/fOPtjqF/YFOOVZ1QNBIPt5d7bIdKROf1beyAN/BYGW5KaHbw
|
||||
H5Lk6rWS02FREAutp9lfx1/cH6NcjKF+m7ee01ZvZl4HliDtC3T7Zk6LERXpgUl+
|
||||
b7DUUH8i119lAg2m9IUe2K4GS0qn0jFmwvjO5QimpAKWRGhXxNUzzxkvFMSUHHuk
|
||||
2fCfDrGA4tGeEWSpiBE6doLlYsKA2KSD7ZPvfC+QsDJMlhVoSFLUmQjAJOgc47Ol
|
||||
IQ6SwJAfzyBfyjs4x7dtOvPmRLgOMWuIjnDrnBdSqEGULoe256YSxXXfW8AKbnuk
|
||||
5F6G+TaU33fD6Q3AOfF5u0aOq0NZJ7cguyPpVkAh7DE9ZapD8j3fcEThuk0mEDuY
|
||||
n/PIjhs4ViFqUZPTkcpG2om3PVODLAgfi49T3f+sHw==
|
||||
-----END CERTIFICATE-----
|
||||
|
||||
# Issuer: CN=Microsoft ECC Root Certificate Authority 2017 O=Microsoft Corporation
|
||||
# Subject: CN=Microsoft ECC Root Certificate Authority 2017 O=Microsoft Corporation
|
||||
# Label: "Microsoft ECC Root Certificate Authority 2017"
|
||||
|
@ -4325,46 +3919,6 @@ ut6Dacpps6kFtZaSF4fC0urQe87YQVt8rgIwRt7qy12a7DLCZRawTDBcMPPaTnOG
|
|||
BtjOiQRINzf43TNRnXCve1XYAS59BWQOhriR
|
||||
-----END CERTIFICATE-----
|
||||
|
||||
# Issuer: CN=Security Communication RootCA3 O=SECOM Trust Systems CO.,LTD.
|
||||
# Subject: CN=Security Communication RootCA3 O=SECOM Trust Systems CO.,LTD.
|
||||
# Label: "Security Communication RootCA3"
|
||||
# Serial: 16247922307909811815
|
||||
# MD5 Fingerprint: 1c:9a:16:ff:9e:5c:e0:4d:8a:14:01:f4:35:5d:29:26
|
||||
# SHA1 Fingerprint: c3:03:c8:22:74:92:e5:61:a2:9c:5f:79:91:2b:1e:44:13:91:30:3a
|
||||
# SHA256 Fingerprint: 24:a5:5c:2a:b0:51:44:2d:06:17:76:65:41:23:9a:4a:d0:32:d7:c5:51:75:aa:34:ff:de:2f:bc:4f:5c:52:94
|
||||
-----BEGIN CERTIFICATE-----
|
||||
MIIFfzCCA2egAwIBAgIJAOF8N0D9G/5nMA0GCSqGSIb3DQEBDAUAMF0xCzAJBgNV
|
||||
BAYTAkpQMSUwIwYDVQQKExxTRUNPTSBUcnVzdCBTeXN0ZW1zIENPLixMVEQuMScw
|
||||
JQYDVQQDEx5TZWN1cml0eSBDb21tdW5pY2F0aW9uIFJvb3RDQTMwHhcNMTYwNjE2
|
||||
MDYxNzE2WhcNMzgwMTE4MDYxNzE2WjBdMQswCQYDVQQGEwJKUDElMCMGA1UEChMc
|
||||
U0VDT00gVHJ1c3QgU3lzdGVtcyBDTy4sTFRELjEnMCUGA1UEAxMeU2VjdXJpdHkg
|
||||
Q29tbXVuaWNhdGlvbiBSb290Q0EzMIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIIC
|
||||
CgKCAgEA48lySfcw3gl8qUCBWNO0Ot26YQ+TUG5pPDXC7ltzkBtnTCHsXzW7OT4r
|
||||
CmDvu20rhvtxosis5FaU+cmvsXLUIKx00rgVrVH+hXShuRD+BYD5UpOzQD11EKzA
|
||||
lrenfna84xtSGc4RHwsENPXY9Wk8d/Nk9A2qhd7gCVAEF5aEt8iKvE1y/By7z/MG
|
||||
TfmfZPd+pmaGNXHIEYBMwXFAWB6+oHP2/D5Q4eAvJj1+XCO1eXDe+uDRpdYMQXF7
|
||||
9+qMHIjH7Iv10S9VlkZ8WjtYO/u62C21Jdp6Ts9EriGmnpjKIG58u4iFW/vAEGK7
|
||||
8vknR+/RiTlDxN/e4UG/VHMgly1s2vPUB6PmudhvrvyMGS7TZ2crldtYXLVqAvO4
|
||||
g160a75BflcJdURQVc1aEWEhCmHCqYj9E7wtiS/NYeCVvsq1e+F7NGcLH7YMx3we
|
||||
GVPKp7FKFSBWFHA9K4IsD50VHUeAR/94mQ4xr28+j+2GaR57GIgUssL8gjMunEst
|
||||
+3A7caoreyYn8xrC3PsXuKHqy6C0rtOUfnrQq8PsOC0RLoi/1D+tEjtCrI8Cbn3M
|
||||
0V9hvqG8OmpI6iZVIhZdXw3/JzOfGAN0iltSIEdrRU0id4xVJ/CvHozJgyJUt5rQ
|
||||
T9nO/NkuHJYosQLTA70lUhw0Zk8jq/R3gpYd0VcwCBEF/VfR2ccCAwEAAaNCMEAw
|
||||
HQYDVR0OBBYEFGQUfPxYchamCik0FW8qy7z8r6irMA4GA1UdDwEB/wQEAwIBBjAP
|
||||
BgNVHRMBAf8EBTADAQH/MA0GCSqGSIb3DQEBDAUAA4ICAQDcAiMI4u8hOscNtybS
|
||||
YpOnpSNyByCCYN8Y11StaSWSntkUz5m5UoHPrmyKO1o5yGwBQ8IibQLwYs1OY0PA
|
||||
FNr0Y/Dq9HHuTofjcan0yVflLl8cebsjqodEV+m9NU1Bu0soo5iyG9kLFwfl9+qd
|
||||
9XbXv8S2gVj/yP9kaWJ5rW4OH3/uHWnlt3Jxs/6lATWUVCvAUm2PVcTJ0rjLyjQI
|
||||
UYWg9by0F1jqClx6vWPGOi//lkkZhOpn2ASxYfQAW0q3nHE3GYV5v4GwxxMOdnE+
|
||||
OoAGrgYWp421wsTL/0ClXI2lyTrtcoHKXJg80jQDdwj98ClZXSEIx2C/pHF7uNke
|
||||
gr4Jr2VvKKu/S7XuPghHJ6APbw+LP6yVGPO5DtxnVW5inkYO0QR4ynKudtml+LLf
|
||||
iAlhi+8kTtFZP1rUPcmTPCtk9YENFpb3ksP+MW/oKjJ0DvRMmEoYDjBU1cXrvMUV
|
||||
nuiZIesnKwkK2/HmcBhWuwzkvvnoEKQTkrgc4NtnHVMDpCKn3F2SEDzq//wbEBrD
|
||||
2NCcnWXL0CsnMQMeNuE9dnUM/0Umud1RvCPHX9jYhxBAEg09ODfnRDwYwFMJZI//
|
||||
1ZqmfHAuc1Uh6N//g7kdPjIe1qZ9LPFm6Vwdp6POXiUyK+OVrCoHzrQoeIY8Laad
|
||||
TdJ0MN1kURXbg4NR16/9M51NZg==
|
||||
-----END CERTIFICATE-----
|
||||
|
||||
# Issuer: CN=Security Communication ECC RootCA1 O=SECOM Trust Systems CO.,LTD.
|
||||
# Subject: CN=Security Communication ECC RootCA1 O=SECOM Trust Systems CO.,LTD.
|
||||
# Label: "Security Communication ECC RootCA1"
|
||||
|
@ -4633,3 +4187,490 @@ o7Ey7Nmj1m+UI/87tyll5gfp77YZ6ufCOB0yiJA8EytuzO+rdwY0d4RPcuSBhPm5
|
|||
dDTedk+SKlOxJTnbPP/lPqYO5Wue/9vsL3SD3460s6neFE3/MaNFcyT6lSnMEpcE
|
||||
oji2jbDwN/zIIX8/syQbPYtuzE2wFg2WHYMfRsCbvUOZ58SWLs5fyQ==
|
||||
-----END CERTIFICATE-----
|
||||
|
||||
# Issuer: CN=TrustAsia Global Root CA G3 O=TrustAsia Technologies, Inc.
|
||||
# Subject: CN=TrustAsia Global Root CA G3 O=TrustAsia Technologies, Inc.
|
||||
# Label: "TrustAsia Global Root CA G3"
|
||||
# Serial: 576386314500428537169965010905813481816650257167
|
||||
# MD5 Fingerprint: 30:42:1b:b7:bb:81:75:35:e4:16:4f:53:d2:94:de:04
|
||||
# SHA1 Fingerprint: 63:cf:b6:c1:27:2b:56:e4:88:8e:1c:23:9a:b6:2e:81:47:24:c3:c7
|
||||
# SHA256 Fingerprint: e0:d3:22:6a:eb:11:63:c2:e4:8f:f9:be:3b:50:b4:c6:43:1b:e7:bb:1e:ac:c5:c3:6b:5d:5e:c5:09:03:9a:08
|
||||
-----BEGIN CERTIFICATE-----
|
||||
MIIFpTCCA42gAwIBAgIUZPYOZXdhaqs7tOqFhLuxibhxkw8wDQYJKoZIhvcNAQEM
|
||||
BQAwWjELMAkGA1UEBhMCQ04xJTAjBgNVBAoMHFRydXN0QXNpYSBUZWNobm9sb2dp
|
||||
ZXMsIEluYy4xJDAiBgNVBAMMG1RydXN0QXNpYSBHbG9iYWwgUm9vdCBDQSBHMzAe
|
||||
Fw0yMTA1MjAwMjEwMTlaFw00NjA1MTkwMjEwMTlaMFoxCzAJBgNVBAYTAkNOMSUw
|
||||
IwYDVQQKDBxUcnVzdEFzaWEgVGVjaG5vbG9naWVzLCBJbmMuMSQwIgYDVQQDDBtU
|
||||
cnVzdEFzaWEgR2xvYmFsIFJvb3QgQ0EgRzMwggIiMA0GCSqGSIb3DQEBAQUAA4IC
|
||||
DwAwggIKAoICAQDAMYJhkuSUGwoqZdC+BqmHO1ES6nBBruL7dOoKjbmzTNyPtxNS
|
||||
T1QY4SxzlZHFZjtqz6xjbYdT8PfxObegQ2OwxANdV6nnRM7EoYNl9lA+sX4WuDqK
|
||||
AtCWHwDNBSHvBm3dIZwZQ0WhxeiAysKtQGIXBsaqvPPW5vxQfmZCHzyLpnl5hkA1
|
||||
nyDvP+uLRx+PjsXUjrYsyUQE49RDdT/VP68czH5GX6zfZBCK70bwkPAPLfSIC7Ep
|
||||
qq+FqklYqL9joDiR5rPmd2jE+SoZhLsO4fWvieylL1AgdB4SQXMeJNnKziyhWTXA
|
||||
yB1GJ2Faj/lN03J5Zh6fFZAhLf3ti1ZwA0pJPn9pMRJpxx5cynoTi+jm9WAPzJMs
|
||||
hH/x/Gr8m0ed262IPfN2dTPXS6TIi/n1Q1hPy8gDVI+lhXgEGvNz8teHHUGf59gX
|
||||
zhqcD0r83ERoVGjiQTz+LISGNzzNPy+i2+f3VANfWdP3kXjHi3dqFuVJhZBFcnAv
|
||||
kV34PmVACxmZySYgWmjBNb9Pp1Hx2BErW+Canig7CjoKH8GB5S7wprlppYiU5msT
|
||||
f9FkPz2ccEblooV7WIQn3MSAPmeamseaMQ4w7OYXQJXZRe0Blqq/DPNL0WP3E1jA
|
||||
uPP6Z92bfW1K/zJMtSU7/xxnD4UiWQWRkUF3gdCFTIcQcf+eQxuulXUtgQIDAQAB
|
||||
o2MwYTAPBgNVHRMBAf8EBTADAQH/MB8GA1UdIwQYMBaAFEDk5PIj7zjKsK5Xf/Ih
|
||||
MBY027ySMB0GA1UdDgQWBBRA5OTyI+84yrCuV3/yITAWNNu8kjAOBgNVHQ8BAf8E
|
||||
BAMCAQYwDQYJKoZIhvcNAQEMBQADggIBACY7UeFNOPMyGLS0XuFlXsSUT9SnYaP4
|
||||
wM8zAQLpw6o1D/GUE3d3NZ4tVlFEbuHGLige/9rsR82XRBf34EzC4Xx8MnpmyFq2
|
||||
XFNFV1pF1AWZLy4jVe5jaN/TG3inEpQGAHUNcoTpLrxaatXeL1nHo+zSh2bbt1S1
|
||||
JKv0Q3jbSwTEb93mPmY+KfJLaHEih6D4sTNjduMNhXJEIlU/HHzp/LgV6FL6qj6j
|
||||
ITk1dImmasI5+njPtqzn59ZW/yOSLlALqbUHM/Q4X6RJpstlcHboCoWASzY9M/eV
|
||||
VHUl2qzEc4Jl6VL1XP04lQJqaTDFHApXB64ipCz5xUG3uOyfT0gA+QEEVcys+TIx
|
||||
xHWVBqB/0Y0n3bOppHKH/lmLmnp0Ft0WpWIp6zqW3IunaFnT63eROfjXy9mPX1on
|
||||
AX1daBli2MjN9LdyR75bl87yraKZk62Uy5P2EgmVtqvXO9A/EcswFi55gORngS1d
|
||||
7XB4tmBZrOFdRWOPyN9yaFvqHbgB8X7754qz41SgOAngPN5C8sLtLpvzHzW2Ntjj
|
||||
gKGLzZlkD8Kqq7HK9W+eQ42EVJmzbsASZthwEPEGNTNDqJwuuhQxzhB/HIbjj9LV
|
||||
+Hfsm6vxL2PZQl/gZ4FkkfGXL/xuJvYz+NO1+MRiqzFRJQJ6+N1rZdVtTTDIZbpo
|
||||
FGWsJwt0ivKH
|
||||
-----END CERTIFICATE-----
|
||||
|
||||
# Issuer: CN=TrustAsia Global Root CA G4 O=TrustAsia Technologies, Inc.
|
||||
# Subject: CN=TrustAsia Global Root CA G4 O=TrustAsia Technologies, Inc.
|
||||
# Label: "TrustAsia Global Root CA G4"
|
||||
# Serial: 451799571007117016466790293371524403291602933463
|
||||
# MD5 Fingerprint: 54:dd:b2:d7:5f:d8:3e:ed:7c:e0:0b:2e:cc:ed:eb:eb
|
||||
# SHA1 Fingerprint: 57:73:a5:61:5d:80:b2:e6:ac:38:82:fc:68:07:31:ac:9f:b5:92:5a
|
||||
# SHA256 Fingerprint: be:4b:56:cb:50:56:c0:13:6a:52:6d:f4:44:50:8d:aa:36:a0:b5:4f:42:e4:ac:38:f7:2a:f4:70:e4:79:65:4c
|
||||
-----BEGIN CERTIFICATE-----
|
||||
MIICVTCCAdygAwIBAgIUTyNkuI6XY57GU4HBdk7LKnQV1tcwCgYIKoZIzj0EAwMw
|
||||
WjELMAkGA1UEBhMCQ04xJTAjBgNVBAoMHFRydXN0QXNpYSBUZWNobm9sb2dpZXMs
|
||||
IEluYy4xJDAiBgNVBAMMG1RydXN0QXNpYSBHbG9iYWwgUm9vdCBDQSBHNDAeFw0y
|
||||
MTA1MjAwMjEwMjJaFw00NjA1MTkwMjEwMjJaMFoxCzAJBgNVBAYTAkNOMSUwIwYD
|
||||
VQQKDBxUcnVzdEFzaWEgVGVjaG5vbG9naWVzLCBJbmMuMSQwIgYDVQQDDBtUcnVz
|
||||
dEFzaWEgR2xvYmFsIFJvb3QgQ0EgRzQwdjAQBgcqhkjOPQIBBgUrgQQAIgNiAATx
|
||||
s8045CVD5d4ZCbuBeaIVXxVjAd7Cq92zphtnS4CDr5nLrBfbK5bKfFJV4hrhPVbw
|
||||
LxYI+hW8m7tH5j/uqOFMjPXTNvk4XatwmkcN4oFBButJ+bAp3TPsUKV/eSm4IJij
|
||||
YzBhMA8GA1UdEwEB/wQFMAMBAf8wHwYDVR0jBBgwFoAUpbtKl86zK3+kMd6Xg1mD
|
||||
pm9xy94wHQYDVR0OBBYEFKW7SpfOsyt/pDHel4NZg6ZvccveMA4GA1UdDwEB/wQE
|
||||
AwIBBjAKBggqhkjOPQQDAwNnADBkAjBe8usGzEkxn0AAbbd+NvBNEU/zy4k6LHiR
|
||||
UKNbwMp1JvK/kF0LgoxgKJ/GcJpo5PECMFxYDlZ2z1jD1xCMuo6u47xkdUfFVZDj
|
||||
/bpV6wfEU6s3qe4hsiFbYI89MvHVI5TWWA==
|
||||
-----END CERTIFICATE-----
|
||||
|
||||
# Issuer: CN=CommScope Public Trust ECC Root-01 O=CommScope
|
||||
# Subject: CN=CommScope Public Trust ECC Root-01 O=CommScope
|
||||
# Label: "CommScope Public Trust ECC Root-01"
|
||||
# Serial: 385011430473757362783587124273108818652468453534
|
||||
# MD5 Fingerprint: 3a:40:a7:fc:03:8c:9c:38:79:2f:3a:a2:6c:b6:0a:16
|
||||
# SHA1 Fingerprint: 07:86:c0:d8:dd:8e:c0:80:98:06:98:d0:58:7a:ef:de:a6:cc:a2:5d
|
||||
# SHA256 Fingerprint: 11:43:7c:da:7b:b4:5e:41:36:5f:45:b3:9a:38:98:6b:0d:e0:0d:ef:34:8e:0c:7b:b0:87:36:33:80:0b:c3:8b
|
||||
-----BEGIN CERTIFICATE-----
|
||||
MIICHTCCAaOgAwIBAgIUQ3CCd89NXTTxyq4yLzf39H91oJ4wCgYIKoZIzj0EAwMw
|
||||
TjELMAkGA1UEBhMCVVMxEjAQBgNVBAoMCUNvbW1TY29wZTErMCkGA1UEAwwiQ29t
|
||||
bVNjb3BlIFB1YmxpYyBUcnVzdCBFQ0MgUm9vdC0wMTAeFw0yMTA0MjgxNzM1NDNa
|
||||
Fw00NjA0MjgxNzM1NDJaME4xCzAJBgNVBAYTAlVTMRIwEAYDVQQKDAlDb21tU2Nv
|
||||
cGUxKzApBgNVBAMMIkNvbW1TY29wZSBQdWJsaWMgVHJ1c3QgRUNDIFJvb3QtMDEw
|
||||
djAQBgcqhkjOPQIBBgUrgQQAIgNiAARLNumuV16ocNfQj3Rid8NeeqrltqLxeP0C
|
||||
flfdkXmcbLlSiFS8LwS+uM32ENEp7LXQoMPwiXAZu1FlxUOcw5tjnSCDPgYLpkJE
|
||||
hRGnSjot6dZoL0hOUysHP029uax3OVejQjBAMA8GA1UdEwEB/wQFMAMBAf8wDgYD
|
||||
VR0PAQH/BAQDAgEGMB0GA1UdDgQWBBSOB2LAUN3GGQYARnQE9/OufXVNMDAKBggq
|
||||
hkjOPQQDAwNoADBlAjEAnDPfQeMjqEI2Jpc1XHvr20v4qotzVRVcrHgpD7oh2MSg
|
||||
2NED3W3ROT3Ek2DS43KyAjB8xX6I01D1HiXo+k515liWpDVfG2XqYZpwI7UNo5uS
|
||||
Um9poIyNStDuiw7LR47QjRE=
|
||||
-----END CERTIFICATE-----
|
||||
|
||||
# Issuer: CN=CommScope Public Trust ECC Root-02 O=CommScope
|
||||
# Subject: CN=CommScope Public Trust ECC Root-02 O=CommScope
|
||||
# Label: "CommScope Public Trust ECC Root-02"
|
||||
# Serial: 234015080301808452132356021271193974922492992893
|
||||
# MD5 Fingerprint: 59:b0:44:d5:65:4d:b8:5c:55:19:92:02:b6:d1:94:b2
|
||||
# SHA1 Fingerprint: 3c:3f:ef:57:0f:fe:65:93:86:9e:a0:fe:b0:f6:ed:8e:d1:13:c7:e5
|
||||
# SHA256 Fingerprint: 2f:fb:7f:81:3b:bb:b3:c8:9a:b4:e8:16:2d:0f:16:d7:15:09:a8:30:cc:9d:73:c2:62:e5:14:08:75:d1:ad:4a
|
||||
-----BEGIN CERTIFICATE-----
|
||||
MIICHDCCAaOgAwIBAgIUKP2ZYEFHpgE6yhR7H+/5aAiDXX0wCgYIKoZIzj0EAwMw
|
||||
TjELMAkGA1UEBhMCVVMxEjAQBgNVBAoMCUNvbW1TY29wZTErMCkGA1UEAwwiQ29t
|
||||
bVNjb3BlIFB1YmxpYyBUcnVzdCBFQ0MgUm9vdC0wMjAeFw0yMTA0MjgxNzQ0NTRa
|
||||
Fw00NjA0MjgxNzQ0NTNaME4xCzAJBgNVBAYTAlVTMRIwEAYDVQQKDAlDb21tU2Nv
|
||||
cGUxKzApBgNVBAMMIkNvbW1TY29wZSBQdWJsaWMgVHJ1c3QgRUNDIFJvb3QtMDIw
|
||||
djAQBgcqhkjOPQIBBgUrgQQAIgNiAAR4MIHoYx7l63FRD/cHB8o5mXxO1Q/MMDAL
|
||||
j2aTPs+9xYa9+bG3tD60B8jzljHz7aRP+KNOjSkVWLjVb3/ubCK1sK9IRQq9qEmU
|
||||
v4RDsNuESgMjGWdqb8FuvAY5N9GIIvejQjBAMA8GA1UdEwEB/wQFMAMBAf8wDgYD
|
||||
VR0PAQH/BAQDAgEGMB0GA1UdDgQWBBTmGHX/72DehKT1RsfeSlXjMjZ59TAKBggq
|
||||
hkjOPQQDAwNnADBkAjAmc0l6tqvmSfR9Uj/UQQSugEODZXW5hYA4O9Zv5JOGq4/n
|
||||
ich/m35rChJVYaoR4HkCMHfoMXGsPHED1oQmHhS48zs73u1Z/GtMMH9ZzkXpc2AV
|
||||
mkzw5l4lIhVtwodZ0LKOag==
|
||||
-----END CERTIFICATE-----
|
||||
|
||||
# Issuer: CN=CommScope Public Trust RSA Root-01 O=CommScope
|
||||
# Subject: CN=CommScope Public Trust RSA Root-01 O=CommScope
|
||||
# Label: "CommScope Public Trust RSA Root-01"
|
||||
# Serial: 354030733275608256394402989253558293562031411421
|
||||
# MD5 Fingerprint: 0e:b4:15:bc:87:63:5d:5d:02:73:d4:26:38:68:73:d8
|
||||
# SHA1 Fingerprint: 6d:0a:5f:f7:b4:23:06:b4:85:b3:b7:97:64:fc:ac:75:f5:33:f2:93
|
||||
# SHA256 Fingerprint: 02:bd:f9:6e:2a:45:dd:9b:f1:8f:c7:e1:db:df:21:a0:37:9b:a3:c9:c2:61:03:44:cf:d8:d6:06:fe:c1:ed:81
|
||||
-----BEGIN CERTIFICATE-----
|
||||
MIIFbDCCA1SgAwIBAgIUPgNJgXUWdDGOTKvVxZAplsU5EN0wDQYJKoZIhvcNAQEL
|
||||
BQAwTjELMAkGA1UEBhMCVVMxEjAQBgNVBAoMCUNvbW1TY29wZTErMCkGA1UEAwwi
|
||||
Q29tbVNjb3BlIFB1YmxpYyBUcnVzdCBSU0EgUm9vdC0wMTAeFw0yMTA0MjgxNjQ1
|
||||
NTRaFw00NjA0MjgxNjQ1NTNaME4xCzAJBgNVBAYTAlVTMRIwEAYDVQQKDAlDb21t
|
||||
U2NvcGUxKzApBgNVBAMMIkNvbW1TY29wZSBQdWJsaWMgVHJ1c3QgUlNBIFJvb3Qt
|
||||
MDEwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQCwSGWjDR1C45FtnYSk
|
||||
YZYSwu3D2iM0GXb26v1VWvZVAVMP8syMl0+5UMuzAURWlv2bKOx7dAvnQmtVzslh
|
||||
suitQDy6uUEKBU8bJoWPQ7VAtYXR1HHcg0Hz9kXHgKKEUJdGzqAMxGBWBB0HW0al
|
||||
DrJLpA6lfO741GIDuZNqihS4cPgugkY4Iw50x2tBt9Apo52AsH53k2NC+zSDO3Oj
|
||||
WiE260f6GBfZumbCk6SP/F2krfxQapWsvCQz0b2If4b19bJzKo98rwjyGpg/qYFl
|
||||
P8GMicWWMJoKz/TUyDTtnS+8jTiGU+6Xn6myY5QXjQ/cZip8UlF1y5mO6D1cv547
|
||||
KI2DAg+pn3LiLCuz3GaXAEDQpFSOm117RTYm1nJD68/A6g3czhLmfTifBSeolz7p
|
||||
UcZsBSjBAg/pGG3svZwG1KdJ9FQFa2ww8esD1eo9anbCyxooSU1/ZOD6K9pzg4H/
|
||||
kQO9lLvkuI6cMmPNn7togbGEW682v3fuHX/3SZtS7NJ3Wn2RnU3COS3kuoL4b/JO
|
||||
Hg9O5j9ZpSPcPYeoKFgo0fEbNttPxP/hjFtyjMcmAyejOQoBqsCyMWCDIqFPEgkB
|
||||
Ea801M/XrmLTBQe0MXXgDW1XT2mH+VepuhX2yFJtocucH+X8eKg1mp9BFM6ltM6U
|
||||
CBwJrVbl2rZJmkrqYxhTnCwuwwIDAQABo0IwQDAPBgNVHRMBAf8EBTADAQH/MA4G
|
||||
A1UdDwEB/wQEAwIBBjAdBgNVHQ4EFgQUN12mmnQywsL5x6YVEFm45P3luG0wDQYJ
|
||||
KoZIhvcNAQELBQADggIBAK+nz97/4L1CjU3lIpbfaOp9TSp90K09FlxD533Ahuh6
|
||||
NWPxzIHIxgvoLlI1pKZJkGNRrDSsBTtXAOnTYtPZKdVUvhwQkZyybf5Z/Xn36lbQ
|
||||
nmhUQo8mUuJM3y+Xpi/SB5io82BdS5pYV4jvguX6r2yBS5KPQJqTRlnLX3gWsWc+
|
||||
QgvfKNmwrZggvkN80V4aCRckjXtdlemrwWCrWxhkgPut4AZ9HcpZuPN4KWfGVh2v
|
||||
trV0KnahP/t1MJ+UXjulYPPLXAziDslg+MkfFoom3ecnf+slpoq9uC02EJqxWE2a
|
||||
aE9gVOX2RhOOiKy8IUISrcZKiX2bwdgt6ZYD9KJ0DLwAHb/WNyVntHKLr4W96ioD
|
||||
j8z7PEQkguIBpQtZtjSNMgsSDesnwv1B10A8ckYpwIzqug/xBpMu95yo9GA+o/E4
|
||||
Xo4TwbM6l4c/ksp4qRyv0LAbJh6+cOx69TOY6lz/KwsETkPdY34Op054A5U+1C0w
|
||||
lREQKC6/oAI+/15Z0wUOlV9TRe9rh9VIzRamloPh37MG88EU26fsHItdkJANclHn
|
||||
YfkUyq+Dj7+vsQpZXdxc1+SWrVtgHdqul7I52Qb1dgAT+GhMIbA1xNxVssnBQVoc
|
||||
icCMb3SgazNNtQEo/a2tiRc7ppqEvOuM6sRxJKi6KfkIsidWNTJf6jn7MZrVGczw
|
||||
-----END CERTIFICATE-----
|
||||
|
||||
# Issuer: CN=CommScope Public Trust RSA Root-02 O=CommScope
|
||||
# Subject: CN=CommScope Public Trust RSA Root-02 O=CommScope
|
||||
# Label: "CommScope Public Trust RSA Root-02"
|
||||
# Serial: 480062499834624527752716769107743131258796508494
|
||||
# MD5 Fingerprint: e1:29:f9:62:7b:76:e2:96:6d:f3:d4:d7:0f:ae:1f:aa
|
||||
# SHA1 Fingerprint: ea:b0:e2:52:1b:89:93:4c:11:68:f2:d8:9a:ac:22:4c:a3:8a:57:ae
|
||||
# SHA256 Fingerprint: ff:e9:43:d7:93:42:4b:4f:7c:44:0c:1c:3d:64:8d:53:63:f3:4b:82:dc:87:aa:7a:9f:11:8f:c5:de:e1:01:f1
|
||||
-----BEGIN CERTIFICATE-----
|
||||
MIIFbDCCA1SgAwIBAgIUVBa/O345lXGN0aoApYYNK496BU4wDQYJKoZIhvcNAQEL
|
||||
BQAwTjELMAkGA1UEBhMCVVMxEjAQBgNVBAoMCUNvbW1TY29wZTErMCkGA1UEAwwi
|
||||
Q29tbVNjb3BlIFB1YmxpYyBUcnVzdCBSU0EgUm9vdC0wMjAeFw0yMTA0MjgxNzE2
|
||||
NDNaFw00NjA0MjgxNzE2NDJaME4xCzAJBgNVBAYTAlVTMRIwEAYDVQQKDAlDb21t
|
||||
U2NvcGUxKzApBgNVBAMMIkNvbW1TY29wZSBQdWJsaWMgVHJ1c3QgUlNBIFJvb3Qt
|
||||
MDIwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQDh+g77aAASyE3VrCLE
|
||||
NQE7xVTlWXZjpX/rwcRqmL0yjReA61260WI9JSMZNRTpf4mnG2I81lDnNJUDMrG0
|
||||
kyI9p+Kx7eZ7Ti6Hmw0zdQreqjXnfuU2mKKuJZ6VszKWpCtYHu8//mI0SFHRtI1C
|
||||
rWDaSWqVcN3SAOLMV2MCe5bdSZdbkk6V0/nLKR8YSvgBKtJjCW4k6YnS5cciTNxz
|
||||
hkcAqg2Ijq6FfUrpuzNPDlJwnZXjfG2WWy09X6GDRl224yW4fKcZgBzqZUPckXk2
|
||||
LHR88mcGyYnJ27/aaL8j7dxrrSiDeS/sOKUNNwFnJ5rpM9kzXzehxfCrPfp4sOcs
|
||||
n/Y+n2Dg70jpkEUeBVF4GiwSLFworA2iI540jwXmojPOEXcT1A6kHkIfhs1w/tku
|
||||
FT0du7jyU1fbzMZ0KZwYszZ1OC4PVKH4kh+Jlk+71O6d6Ts2QrUKOyrUZHk2EOH5
|
||||
kQMreyBUzQ0ZGshBMjTRsJnhkB4BQDa1t/qp5Xd1pCKBXbCL5CcSD1SIxtuFdOa3
|
||||
wNemKfrb3vOTlycEVS8KbzfFPROvCgCpLIscgSjX74Yxqa7ybrjKaixUR9gqiC6v
|
||||
wQcQeKwRoi9C8DfF8rhW3Q5iLc4tVn5V8qdE9isy9COoR+jUKgF4z2rDN6ieZdIs
|
||||
5fq6M8EGRPbmz6UNp2YINIos8wIDAQABo0IwQDAPBgNVHRMBAf8EBTADAQH/MA4G
|
||||
A1UdDwEB/wQEAwIBBjAdBgNVHQ4EFgQUR9DnsSL/nSz12Vdgs7GxcJXvYXowDQYJ
|
||||
KoZIhvcNAQELBQADggIBAIZpsU0v6Z9PIpNojuQhmaPORVMbc0RTAIFhzTHjCLqB
|
||||
KCh6krm2qMhDnscTJk3C2OVVnJJdUNjCK9v+5qiXz1I6JMNlZFxHMaNlNRPDk7n3
|
||||
+VGXu6TwYofF1gbTl4MgqX67tiHCpQ2EAOHyJxCDut0DgdXdaMNmEMjRdrSzbyme
|
||||
APnCKfWxkxlSaRosTKCL4BWaMS/TiJVZbuXEs1DIFAhKm4sTg7GkcrI7djNB3Nyq
|
||||
pgdvHSQSn8h2vS/ZjvQs7rfSOBAkNlEv41xdgSGn2rtO/+YHqP65DSdsu3BaVXoT
|
||||
6fEqSWnHX4dXTEN5bTpl6TBcQe7rd6VzEojov32u5cSoHw2OHG1QAk8mGEPej1WF
|
||||
sQs3BWDJVTkSBKEqz3EWnzZRSb9wO55nnPt7eck5HHisd5FUmrh1CoFSl+NmYWvt
|
||||
PjgelmFV4ZFUjO2MJB+ByRCac5krFk5yAD9UG/iNuovnFNa2RU9g7Jauwy8CTl2d
|
||||
lklyALKrdVwPaFsdZcJfMw8eD/A7hvWwTruc9+olBdytoptLFwG+Qt81IR2tq670
|
||||
v64fG9PiO/yzcnMcmyiQiRM9HcEARwmWmjgb3bHPDcK0RPOWlc4yOo80nOAXx17O
|
||||
rg3bhzjlP1v9mxnhMUF6cKojawHhRUzNlM47ni3niAIi9G7oyOzWPPO5std3eqx7
|
||||
-----END CERTIFICATE-----
|
||||
|
||||
# Issuer: CN=Telekom Security TLS ECC Root 2020 O=Deutsche Telekom Security GmbH
|
||||
# Subject: CN=Telekom Security TLS ECC Root 2020 O=Deutsche Telekom Security GmbH
|
||||
# Label: "Telekom Security TLS ECC Root 2020"
|
||||
# Serial: 72082518505882327255703894282316633856
|
||||
# MD5 Fingerprint: c1:ab:fe:6a:10:2c:03:8d:bc:1c:22:32:c0:85:a7:fd
|
||||
# SHA1 Fingerprint: c0:f8:96:c5:a9:3b:01:06:21:07:da:18:42:48:bc:e9:9d:88:d5:ec
|
||||
# SHA256 Fingerprint: 57:8a:f4:de:d0:85:3f:4e:59:98:db:4a:ea:f9:cb:ea:8d:94:5f:60:b6:20:a3:8d:1a:3c:13:b2:bc:7b:a8:e1
|
||||
-----BEGIN CERTIFICATE-----
|
||||
MIICQjCCAcmgAwIBAgIQNjqWjMlcsljN0AFdxeVXADAKBggqhkjOPQQDAzBjMQsw
|
||||
CQYDVQQGEwJERTEnMCUGA1UECgweRGV1dHNjaGUgVGVsZWtvbSBTZWN1cml0eSBH
|
||||
bWJIMSswKQYDVQQDDCJUZWxla29tIFNlY3VyaXR5IFRMUyBFQ0MgUm9vdCAyMDIw
|
||||
MB4XDTIwMDgyNTA3NDgyMFoXDTQ1MDgyNTIzNTk1OVowYzELMAkGA1UEBhMCREUx
|
||||
JzAlBgNVBAoMHkRldXRzY2hlIFRlbGVrb20gU2VjdXJpdHkgR21iSDErMCkGA1UE
|
||||
AwwiVGVsZWtvbSBTZWN1cml0eSBUTFMgRUNDIFJvb3QgMjAyMDB2MBAGByqGSM49
|
||||
AgEGBSuBBAAiA2IABM6//leov9Wq9xCazbzREaK9Z0LMkOsVGJDZos0MKiXrPk/O
|
||||
tdKPD/M12kOLAoC+b1EkHQ9rK8qfwm9QMuU3ILYg/4gND21Ju9sGpIeQkpT0CdDP
|
||||
f8iAC8GXs7s1J8nCG6NCMEAwHQYDVR0OBBYEFONyzG6VmUex5rNhTNHLq+O6zd6f
|
||||
MA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgEGMAoGCCqGSM49BAMDA2cA
|
||||
MGQCMHVSi7ekEE+uShCLsoRbQuHmKjYC2qBuGT8lv9pZMo7k+5Dck2TOrbRBR2Di
|
||||
z6fLHgIwN0GMZt9Ba9aDAEH9L1r3ULRn0SyocddDypwnJJGDSA3PzfdUga/sf+Rn
|
||||
27iQ7t0l
|
||||
-----END CERTIFICATE-----
|
||||
|
||||
# Issuer: CN=Telekom Security TLS RSA Root 2023 O=Deutsche Telekom Security GmbH
|
||||
# Subject: CN=Telekom Security TLS RSA Root 2023 O=Deutsche Telekom Security GmbH
|
||||
# Label: "Telekom Security TLS RSA Root 2023"
|
||||
# Serial: 44676229530606711399881795178081572759
|
||||
# MD5 Fingerprint: bf:5b:eb:54:40:cd:48:71:c4:20:8d:7d:de:0a:42:f2
|
||||
# SHA1 Fingerprint: 54:d3:ac:b3:bd:57:56:f6:85:9d:ce:e5:c3:21:e2:d4:ad:83:d0:93
|
||||
# SHA256 Fingerprint: ef:c6:5c:ad:bb:59:ad:b6:ef:e8:4d:a2:23:11:b3:56:24:b7:1b:3b:1e:a0:da:8b:66:55:17:4e:c8:97:86:46
|
||||
-----BEGIN CERTIFICATE-----
|
||||
MIIFszCCA5ugAwIBAgIQIZxULej27HF3+k7ow3BXlzANBgkqhkiG9w0BAQwFADBj
|
||||
MQswCQYDVQQGEwJERTEnMCUGA1UECgweRGV1dHNjaGUgVGVsZWtvbSBTZWN1cml0
|
||||
eSBHbWJIMSswKQYDVQQDDCJUZWxla29tIFNlY3VyaXR5IFRMUyBSU0EgUm9vdCAy
|
||||
MDIzMB4XDTIzMDMyODEyMTY0NVoXDTQ4MDMyNzIzNTk1OVowYzELMAkGA1UEBhMC
|
||||
REUxJzAlBgNVBAoMHkRldXRzY2hlIFRlbGVrb20gU2VjdXJpdHkgR21iSDErMCkG
|
||||
A1UEAwwiVGVsZWtvbSBTZWN1cml0eSBUTFMgUlNBIFJvb3QgMjAyMzCCAiIwDQYJ
|
||||
KoZIhvcNAQEBBQADggIPADCCAgoCggIBAO01oYGA88tKaVvC+1GDrib94W7zgRJ9
|
||||
cUD/h3VCKSHtgVIs3xLBGYSJwb3FKNXVS2xE1kzbB5ZKVXrKNoIENqil/Cf2SfHV
|
||||
cp6R+SPWcHu79ZvB7JPPGeplfohwoHP89v+1VmLhc2o0mD6CuKyVU/QBoCcHcqMA
|
||||
U6DksquDOFczJZSfvkgdmOGjup5czQRxUX11eKvzWarE4GC+j4NSuHUaQTXtvPM6
|
||||
Y+mpFEXX5lLRbtLevOP1Czvm4MS9Q2QTps70mDdsipWol8hHD/BeEIvnHRz+sTug
|
||||
BTNoBUGCwQMrAcjnj02r6LX2zWtEtefdi+zqJbQAIldNsLGyMcEWzv/9FIS3R/qy
|
||||
8XDe24tsNlikfLMR0cN3f1+2JeANxdKz+bi4d9s3cXFH42AYTyS2dTd4uaNir73J
|
||||
co4vzLuu2+QVUhkHM/tqty1LkCiCc/4YizWN26cEar7qwU02OxY2kTLvtkCJkUPg
|
||||
8qKrBC7m8kwOFjQgrIfBLX7JZkcXFBGk8/ehJImr2BrIoVyxo/eMbcgByU/J7MT8
|
||||
rFEz0ciD0cmfHdRHNCk+y7AO+oMLKFjlKdw/fKifybYKu6boRhYPluV75Gp6SG12
|
||||
mAWl3G0eQh5C2hrgUve1g8Aae3g1LDj1H/1Joy7SWWO/gLCMk3PLNaaZlSJhZQNg
|
||||
+y+TS/qanIA7AgMBAAGjYzBhMA4GA1UdDwEB/wQEAwIBBjAdBgNVHQ4EFgQUtqeX
|
||||
gj10hZv3PJ+TmpV5dVKMbUcwDwYDVR0TAQH/BAUwAwEB/zAfBgNVHSMEGDAWgBS2
|
||||
p5eCPXSFm/c8n5OalXl1UoxtRzANBgkqhkiG9w0BAQwFAAOCAgEAqMxhpr51nhVQ
|
||||
pGv7qHBFfLp+sVr8WyP6Cnf4mHGCDG3gXkaqk/QeoMPhk9tLrbKmXauw1GLLXrtm
|
||||
9S3ul0A8Yute1hTWjOKWi0FpkzXmuZlrYrShF2Y0pmtjxrlO8iLpWA1WQdH6DErw
|
||||
M807u20hOq6OcrXDSvvpfeWxm4bu4uB9tPcy/SKE8YXJN3nptT+/XOR0so8RYgDd
|
||||
GGah2XsjX/GO1WfoVNpbOms2b/mBsTNHM3dA+VKq3dSDz4V4mZqTuXNnQkYRIer+
|
||||
CqkbGmVps4+uFrb2S1ayLfmlyOw7YqPta9BO1UAJpB+Y1zqlklkg5LB9zVtzaL1t
|
||||
xKITDmcZuI1CfmwMmm6gJC3VRRvcxAIU/oVbZZfKTpBQCHpCNfnqwmbU+AGuHrS+
|
||||
w6jv/naaoqYfRvaE7fzbzsQCzndILIyy7MMAo+wsVRjBfhnu4S/yrYObnqsZ38aK
|
||||
L4x35bcF7DvB7L6Gs4a8wPfc5+pbrrLMtTWGS9DiP7bY+A4A7l3j941Y/8+LN+lj
|
||||
X273CXE2whJdV/LItM3z7gLfEdxquVeEHVlNjM7IDiPCtyaaEBRx/pOyiriA8A4Q
|
||||
ntOoUAw3gi/q4Iqd4Sw5/7W0cwDk90imc6y/st53BIe0o82bNSQ3+pCTE4FCxpgm
|
||||
dTdmQRCsu/WU48IxK63nI1bMNSWSs1A=
|
||||
-----END CERTIFICATE-----
|
||||
|
||||
# Issuer: CN=FIRMAPROFESIONAL CA ROOT-A WEB O=Firmaprofesional SA
|
||||
# Subject: CN=FIRMAPROFESIONAL CA ROOT-A WEB O=Firmaprofesional SA
|
||||
# Label: "FIRMAPROFESIONAL CA ROOT-A WEB"
|
||||
# Serial: 65916896770016886708751106294915943533
|
||||
# MD5 Fingerprint: 82:b2:ad:45:00:82:b0:66:63:f8:5f:c3:67:4e:ce:a3
|
||||
# SHA1 Fingerprint: a8:31:11:74:a6:14:15:0d:ca:77:dd:0e:e4:0c:5d:58:fc:a0:72:a5
|
||||
# SHA256 Fingerprint: be:f2:56:da:f2:6e:9c:69:bd:ec:16:02:35:97:98:f3:ca:f7:18:21:a0:3e:01:82:57:c5:3c:65:61:7f:3d:4a
|
||||
-----BEGIN CERTIFICATE-----
|
||||
MIICejCCAgCgAwIBAgIQMZch7a+JQn81QYehZ1ZMbTAKBggqhkjOPQQDAzBuMQsw
|
||||
CQYDVQQGEwJFUzEcMBoGA1UECgwTRmlybWFwcm9mZXNpb25hbCBTQTEYMBYGA1UE
|
||||
YQwPVkFURVMtQTYyNjM0MDY4MScwJQYDVQQDDB5GSVJNQVBST0ZFU0lPTkFMIENB
|
||||
IFJPT1QtQSBXRUIwHhcNMjIwNDA2MDkwMTM2WhcNNDcwMzMxMDkwMTM2WjBuMQsw
|
||||
CQYDVQQGEwJFUzEcMBoGA1UECgwTRmlybWFwcm9mZXNpb25hbCBTQTEYMBYGA1UE
|
||||
YQwPVkFURVMtQTYyNjM0MDY4MScwJQYDVQQDDB5GSVJNQVBST0ZFU0lPTkFMIENB
|
||||
IFJPT1QtQSBXRUIwdjAQBgcqhkjOPQIBBgUrgQQAIgNiAARHU+osEaR3xyrq89Zf
|
||||
e9MEkVz6iMYiuYMQYneEMy3pA4jU4DP37XcsSmDq5G+tbbT4TIqk5B/K6k84Si6C
|
||||
cyvHZpsKjECcfIr28jlgst7L7Ljkb+qbXbdTkBgyVcUgt5SjYzBhMA8GA1UdEwEB
|
||||
/wQFMAMBAf8wHwYDVR0jBBgwFoAUk+FDY1w8ndYn81LsF7Kpryz3dvgwHQYDVR0O
|
||||
BBYEFJPhQ2NcPJ3WJ/NS7Beyqa8s93b4MA4GA1UdDwEB/wQEAwIBBjAKBggqhkjO
|
||||
PQQDAwNoADBlAjAdfKR7w4l1M+E7qUW/Runpod3JIha3RxEL2Jq68cgLcFBTApFw
|
||||
hVmpHqTm6iMxoAACMQD94vizrxa5HnPEluPBMBnYfubDl94cT7iJLzPrSA8Z94dG
|
||||
XSaQpYXFuXqUPoeovQA=
|
||||
-----END CERTIFICATE-----
|
||||
|
||||
# Issuer: CN=TWCA CYBER Root CA O=TAIWAN-CA OU=Root CA
|
||||
# Subject: CN=TWCA CYBER Root CA O=TAIWAN-CA OU=Root CA
|
||||
# Label: "TWCA CYBER Root CA"
|
||||
# Serial: 85076849864375384482682434040119489222
|
||||
# MD5 Fingerprint: 0b:33:a0:97:52:95:d4:a9:fd:bb:db:6e:a3:55:5b:51
|
||||
# SHA1 Fingerprint: f6:b1:1c:1a:83:38:e9:7b:db:b3:a8:c8:33:24:e0:2d:9c:7f:26:66
|
||||
# SHA256 Fingerprint: 3f:63:bb:28:14:be:17:4e:c8:b6:43:9c:f0:8d:6d:56:f0:b7:c4:05:88:3a:56:48:a3:34:42:4d:6b:3e:c5:58
|
||||
-----BEGIN CERTIFICATE-----
|
||||
MIIFjTCCA3WgAwIBAgIQQAE0jMIAAAAAAAAAATzyxjANBgkqhkiG9w0BAQwFADBQ
|
||||
MQswCQYDVQQGEwJUVzESMBAGA1UEChMJVEFJV0FOLUNBMRAwDgYDVQQLEwdSb290
|
||||
IENBMRswGQYDVQQDExJUV0NBIENZQkVSIFJvb3QgQ0EwHhcNMjIxMTIyMDY1NDI5
|
||||
WhcNNDcxMTIyMTU1OTU5WjBQMQswCQYDVQQGEwJUVzESMBAGA1UEChMJVEFJV0FO
|
||||
LUNBMRAwDgYDVQQLEwdSb290IENBMRswGQYDVQQDExJUV0NBIENZQkVSIFJvb3Qg
|
||||
Q0EwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQDG+Moe2Qkgfh1sTs6P
|
||||
40czRJzHyWmqOlt47nDSkvgEs1JSHWdyKKHfi12VCv7qze33Kc7wb3+szT3vsxxF
|
||||
avcokPFhV8UMxKNQXd7UtcsZyoC5dc4pztKFIuwCY8xEMCDa6pFbVuYdHNWdZsc/
|
||||
34bKS1PE2Y2yHer43CdTo0fhYcx9tbD47nORxc5zb87uEB8aBs/pJ2DFTxnk684i
|
||||
JkXXYJndzk834H/nY62wuFm40AZoNWDTNq5xQwTxaWV4fPMf88oon1oglWa0zbfu
|
||||
j3ikRRjpJi+NmykosaS3Om251Bw4ckVYsV7r8Cibt4LK/c/WMw+f+5eesRycnupf
|
||||
Xtuq3VTpMCEobY5583WSjCb+3MX2w7DfRFlDo7YDKPYIMKoNM+HvnKkHIuNZW0CP
|
||||
2oi3aQiotyMuRAlZN1vH4xfyIutuOVLF3lSnmMlLIJXcRolftBL5hSmO68gnFSDA
|
||||
S9TMfAxsNAwmmyYxpjyn9tnQS6Jk/zuZQXLB4HCX8SS7K8R0IrGsayIyJNN4KsDA
|
||||
oS/xUgXJP+92ZuJF2A09rZXIx4kmyA+upwMu+8Ff+iDhcK2wZSA3M2Cw1a/XDBzC
|
||||
kHDXShi8fgGwsOsVHkQGzaRP6AzRwyAQ4VRlnrZR0Bp2a0JaWHY06rc3Ga4udfmW
|
||||
5cFZ95RXKSWNOkyrTZpB0F8mAwIDAQABo2MwYTAOBgNVHQ8BAf8EBAMCAQYwDwYD
|
||||
VR0TAQH/BAUwAwEB/zAfBgNVHSMEGDAWgBSdhWEUfMFib5do5E83QOGt4A1WNzAd
|
||||
BgNVHQ4EFgQUnYVhFHzBYm+XaORPN0DhreANVjcwDQYJKoZIhvcNAQEMBQADggIB
|
||||
AGSPesRiDrWIzLjHhg6hShbNcAu3p4ULs3a2D6f/CIsLJc+o1IN1KriWiLb73y0t
|
||||
tGlTITVX1olNc79pj3CjYcya2x6a4CD4bLubIp1dhDGaLIrdaqHXKGnK/nZVekZn
|
||||
68xDiBaiA9a5F/gZbG0jAn/xX9AKKSM70aoK7akXJlQKTcKlTfjF/biBzysseKNn
|
||||
TKkHmvPfXvt89YnNdJdhEGoHK4Fa0o635yDRIG4kqIQnoVesqlVYL9zZyvpoBJ7t
|
||||
RCT5dEA7IzOrg1oYJkK2bVS1FmAwbLGg+LhBoF1JSdJlBTrq/p1hvIbZv97Tujqx
|
||||
f36SNI7JAG7cmL3c7IAFrQI932XtCwP39xaEBDG6k5TY8hL4iuO/Qq+n1M0RFxbI
|
||||
Qh0UqEL20kCGoE8jypZFVmAGzbdVAaYBlGX+bgUJurSkquLvWL69J1bY73NxW0Qz
|
||||
8ppy6rBePm6pUlvscG21h483XjyMnM7k8M4MZ0HMzvaAq07MTFb1wWFZk7Q+ptq4
|
||||
NxKfKjLji7gh7MMrZQzvIt6IKTtM1/r+t+FHvpw+PoP7UV31aPcuIYXcv/Fa4nzX
|
||||
xeSDwWrruoBa3lwtcHb4yOWHh8qgnaHlIhInD0Q9HWzq1MKLL295q39QpsQZp6F6
|
||||
t5b5wR9iWqJDB0BeJsas7a5wFsWqynKKTbDPAYsDP27X
|
||||
-----END CERTIFICATE-----
|
||||
|
||||
# Issuer: CN=SecureSign Root CA12 O=Cybertrust Japan Co., Ltd.
|
||||
# Subject: CN=SecureSign Root CA12 O=Cybertrust Japan Co., Ltd.
|
||||
# Label: "SecureSign Root CA12"
|
||||
# Serial: 587887345431707215246142177076162061960426065942
|
||||
# MD5 Fingerprint: c6:89:ca:64:42:9b:62:08:49:0b:1e:7f:e9:07:3d:e8
|
||||
# SHA1 Fingerprint: 7a:22:1e:3d:de:1b:06:ac:9e:c8:47:70:16:8e:3c:e5:f7:6b:06:f4
|
||||
# SHA256 Fingerprint: 3f:03:4b:b5:70:4d:44:b2:d0:85:45:a0:20:57:de:93:eb:f3:90:5f:ce:72:1a:cb:c7:30:c0:6d:da:ee:90:4e
|
||||
-----BEGIN CERTIFICATE-----
|
||||
MIIDcjCCAlqgAwIBAgIUZvnHwa/swlG07VOX5uaCwysckBYwDQYJKoZIhvcNAQEL
|
||||
BQAwUTELMAkGA1UEBhMCSlAxIzAhBgNVBAoTGkN5YmVydHJ1c3QgSmFwYW4gQ28u
|
||||
LCBMdGQuMR0wGwYDVQQDExRTZWN1cmVTaWduIFJvb3QgQ0ExMjAeFw0yMDA0MDgw
|
||||
NTM2NDZaFw00MDA0MDgwNTM2NDZaMFExCzAJBgNVBAYTAkpQMSMwIQYDVQQKExpD
|
||||
eWJlcnRydXN0IEphcGFuIENvLiwgTHRkLjEdMBsGA1UEAxMUU2VjdXJlU2lnbiBS
|
||||
b290IENBMTIwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQC6OcE3emhF
|
||||
KxS06+QT61d1I02PJC0W6K6OyX2kVzsqdiUzg2zqMoqUm048luT9Ub+ZyZN+v/mt
|
||||
p7JIKwccJ/VMvHASd6SFVLX9kHrko+RRWAPNEHl57muTH2SOa2SroxPjcf59q5zd
|
||||
J1M3s6oYwlkm7Fsf0uZlfO+TvdhYXAvA42VvPMfKWeP+bl+sg779XSVOKik71gur
|
||||
FzJ4pOE+lEa+Ym6b3kaosRbnhW70CEBFEaCeVESE99g2zvVQR9wsMJvuwPWW0v4J
|
||||
hscGWa5Pro4RmHvzC1KqYiaqId+OJTN5lxZJjfU+1UefNzFJM3IFTQy2VYzxV4+K
|
||||
h9GtxRESOaCtAgMBAAGjQjBAMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQD
|
||||
AgEGMB0GA1UdDgQWBBRXNPN0zwRL1SXm8UC2LEzZLemgrTANBgkqhkiG9w0BAQsF
|
||||
AAOCAQEAPrvbFxbS8hQBICw4g0utvsqFepq2m2um4fylOqyttCg6r9cBg0krY6Ld
|
||||
mmQOmFxv3Y67ilQiLUoT865AQ9tPkbeGGuwAtEGBpE/6aouIs3YIcipJQMPTw4WJ
|
||||
mBClnW8Zt7vPemVV2zfrPIpyMpcemik+rY3moxtt9XUa5rBouVui7mlHJzWhhpmA
|
||||
8zNL4WukJsPvdFlseqJkth5Ew1DgDzk9qTPxpfPSvWKErI4cqc1avTc7bgoitPQV
|
||||
55FYxTpE05Uo2cBl6XLK0A+9H7MV2anjpEcJnuDLN/v9vZfVvhgaaaI5gdka9at/
|
||||
yOPiZwud9AzqVN/Ssq+xIvEg37xEHA==
|
||||
-----END CERTIFICATE-----
|
||||
|
||||
# Issuer: CN=SecureSign Root CA14 O=Cybertrust Japan Co., Ltd.
|
||||
# Subject: CN=SecureSign Root CA14 O=Cybertrust Japan Co., Ltd.
|
||||
# Label: "SecureSign Root CA14"
|
||||
# Serial: 575790784512929437950770173562378038616896959179
|
||||
# MD5 Fingerprint: 71:0d:72:fa:92:19:65:5e:89:04:ac:16:33:f0:bc:d5
|
||||
# SHA1 Fingerprint: dd:50:c0:f7:79:b3:64:2e:74:a2:b8:9d:9f:d3:40:dd:bb:f0:f2:4f
|
||||
# SHA256 Fingerprint: 4b:00:9c:10:34:49:4f:9a:b5:6b:ba:3b:a1:d6:27:31:fc:4d:20:d8:95:5a:dc:ec:10:a9:25:60:72:61:e3:38
|
||||
-----BEGIN CERTIFICATE-----
|
||||
MIIFcjCCA1qgAwIBAgIUZNtaDCBO6Ncpd8hQJ6JaJ90t8sswDQYJKoZIhvcNAQEM
|
||||
BQAwUTELMAkGA1UEBhMCSlAxIzAhBgNVBAoTGkN5YmVydHJ1c3QgSmFwYW4gQ28u
|
||||
LCBMdGQuMR0wGwYDVQQDExRTZWN1cmVTaWduIFJvb3QgQ0ExNDAeFw0yMDA0MDgw
|
||||
NzA2MTlaFw00NTA0MDgwNzA2MTlaMFExCzAJBgNVBAYTAkpQMSMwIQYDVQQKExpD
|
||||
eWJlcnRydXN0IEphcGFuIENvLiwgTHRkLjEdMBsGA1UEAxMUU2VjdXJlU2lnbiBS
|
||||
b290IENBMTQwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQDF0nqh1oq/
|
||||
FjHQmNE6lPxauG4iwWL3pwon71D2LrGeaBLwbCRjOfHw3xDG3rdSINVSW0KZnvOg
|
||||
vlIfX8xnbacuUKLBl422+JX1sLrcneC+y9/3OPJH9aaakpUqYllQC6KxNedlsmGy
|
||||
6pJxaeQp8E+BgQQ8sqVb1MWoWWd7VRxJq3qdwudzTe/NCcLEVxLbAQ4jeQkHO6Lo
|
||||
/IrPj8BGJJw4J+CDnRugv3gVEOuGTgpa/d/aLIJ+7sr2KeH6caH3iGicnPCNvg9J
|
||||
kdjqOvn90Ghx2+m1K06Ckm9mH+Dw3EzsytHqunQG+bOEkJTRX45zGRBdAuVwpcAQ
|
||||
0BB8b8VYSbSwbprafZX1zNoCr7gsfXmPvkPx+SgojQlD+Ajda8iLLCSxjVIHvXib
|
||||
y8posqTdDEx5YMaZ0ZPxMBoH064iwurO8YQJzOAUbn8/ftKChazcqRZOhaBgy/ac
|
||||
18izju3Gm5h1DVXoX+WViwKkrkMpKBGk5hIwAUt1ax5mnXkvpXYvHUC0bcl9eQjs
|
||||
0Wq2XSqypWa9a4X0dFbD9ed1Uigspf9mR6XU/v6eVL9lfgHWMI+lNpyiUBzuOIAB
|
||||
SMbHdPTGrMNASRZhdCyvjG817XsYAFs2PJxQDcqSMxDxJklt33UkN4Ii1+iW/RVL
|
||||
ApY+B3KVfqs9TC7XyvDf4Fg/LS8EmjijAQIDAQABo0IwQDAPBgNVHRMBAf8EBTAD
|
||||
AQH/MA4GA1UdDwEB/wQEAwIBBjAdBgNVHQ4EFgQUBpOjCl4oaTeqYR3r6/wtbyPk
|
||||
86AwDQYJKoZIhvcNAQEMBQADggIBAJaAcgkGfpzMkwQWu6A6jZJOtxEaCnFxEM0E
|
||||
rX+lRVAQZk5KQaID2RFPeje5S+LGjzJmdSX7684/AykmjbgWHfYfM25I5uj4V7Ib
|
||||
ed87hwriZLoAymzvftAj63iP/2SbNDefNWWipAA9EiOWWF3KY4fGoweITedpdopT
|
||||
zfFP7ELyk+OZpDc8h7hi2/DsHzc/N19DzFGdtfCXwreFamgLRB7lUe6TzktuhsHS
|
||||
DCRZNhqfLJGP4xjblJUK7ZGqDpncllPjYYPGFrojutzdfhrGe0K22VoF3Jpf1d+4
|
||||
2kd92jjbrDnVHmtsKheMYc2xbXIBw8MgAGJoFjHVdqqGuw6qnsb58Nn4DSEC5MUo
|
||||
FlkRudlpcyqSeLiSV5sI8jrlL5WwWLdrIBRtFO8KvH7YVdiI2i/6GaX7i+B/OfVy
|
||||
K4XELKzvGUWSTLNhB9xNH27SgRNcmvMSZ4PPmz+Ln52kuaiWA3rF7iDeM9ovnhp6
|
||||
dB7h7sxaOgTdsxoEqBRjrLdHEoOabPXm6RUVkRqEGQ6UROcSjiVbgGcZ3GOTEAtl
|
||||
Lor6CZpO2oYofaphNdgOpygau1LgePhsumywbrmHXumZNTfxPWQrqaA0k89jL9WB
|
||||
365jJ6UeTo3cKXhZ+PmhIIynJkBugnLNeLLIjzwec+fBH7/PzqUqm9tEZDKgu39c
|
||||
JRNItX+S
|
||||
-----END CERTIFICATE-----
|
||||
|
||||
# Issuer: CN=SecureSign Root CA15 O=Cybertrust Japan Co., Ltd.
|
||||
# Subject: CN=SecureSign Root CA15 O=Cybertrust Japan Co., Ltd.
|
||||
# Label: "SecureSign Root CA15"
|
||||
# Serial: 126083514594751269499665114766174399806381178503
|
||||
# MD5 Fingerprint: 13:30:fc:c4:62:a6:a9:de:b5:c1:68:af:b5:d2:31:47
|
||||
# SHA1 Fingerprint: cb:ba:83:c8:c1:5a:5d:f1:f9:73:6f:ca:d7:ef:28:13:06:4a:07:7d
|
||||
# SHA256 Fingerprint: e7:78:f0:f0:95:fe:84:37:29:cd:1a:00:82:17:9e:53:14:a9:c2:91:44:28:05:e1:fb:1d:8f:b6:b8:88:6c:3a
|
||||
-----BEGIN CERTIFICATE-----
|
||||
MIICIzCCAamgAwIBAgIUFhXHw9hJp75pDIqI7fBw+d23PocwCgYIKoZIzj0EAwMw
|
||||
UTELMAkGA1UEBhMCSlAxIzAhBgNVBAoTGkN5YmVydHJ1c3QgSmFwYW4gQ28uLCBM
|
||||
dGQuMR0wGwYDVQQDExRTZWN1cmVTaWduIFJvb3QgQ0ExNTAeFw0yMDA0MDgwODMy
|
||||
NTZaFw00NTA0MDgwODMyNTZaMFExCzAJBgNVBAYTAkpQMSMwIQYDVQQKExpDeWJl
|
||||
cnRydXN0IEphcGFuIENvLiwgTHRkLjEdMBsGA1UEAxMUU2VjdXJlU2lnbiBSb290
|
||||
IENBMTUwdjAQBgcqhkjOPQIBBgUrgQQAIgNiAAQLUHSNZDKZmbPSYAi4Io5GdCx4
|
||||
wCtELW1fHcmuS1Iggz24FG1Th2CeX2yF2wYUleDHKP+dX+Sq8bOLbe1PL0vJSpSR
|
||||
ZHX+AezB2Ot6lHhWGENfa4HL9rzatAy2KZMIaY+jQjBAMA8GA1UdEwEB/wQFMAMB
|
||||
Af8wDgYDVR0PAQH/BAQDAgEGMB0GA1UdDgQWBBTrQciu/NWeUUj1vYv0hyCTQSvT
|
||||
9DAKBggqhkjOPQQDAwNoADBlAjEA2S6Jfl5OpBEHvVnCB96rMjhTKkZEBhd6zlHp
|
||||
4P9mLQlO4E/0BdGF9jVg3PVys0Z9AjBEmEYagoUeYWmJSwdLZrWeqrqgHkHZAXQ6
|
||||
bkU6iYAZezKYVWOr62Nuk22rGwlgMU4=
|
||||
-----END CERTIFICATE-----
|
||||
|
||||
# Issuer: CN=D-TRUST BR Root CA 2 2023 O=D-Trust GmbH
|
||||
# Subject: CN=D-TRUST BR Root CA 2 2023 O=D-Trust GmbH
|
||||
# Label: "D-TRUST BR Root CA 2 2023"
|
||||
# Serial: 153168538924886464690566649552453098598
|
||||
# MD5 Fingerprint: e1:09:ed:d3:60:d4:56:1b:47:1f:b7:0c:5f:1b:5f:85
|
||||
# SHA1 Fingerprint: 2d:b0:70:ee:71:94:af:69:68:17:db:79:ce:58:9f:a0:6b:96:f7:87
|
||||
# SHA256 Fingerprint: 05:52:e6:f8:3f:df:65:e8:fa:96:70:e6:66:df:28:a4:e2:13:40:b5:10:cb:e5:25:66:f9:7c:4f:b9:4b:2b:d1
|
||||
-----BEGIN CERTIFICATE-----
|
||||
MIIFqTCCA5GgAwIBAgIQczswBEhb2U14LnNLyaHcZjANBgkqhkiG9w0BAQ0FADBI
|
||||
MQswCQYDVQQGEwJERTEVMBMGA1UEChMMRC1UcnVzdCBHbWJIMSIwIAYDVQQDExlE
|
||||
LVRSVVNUIEJSIFJvb3QgQ0EgMiAyMDIzMB4XDTIzMDUwOTA4NTYzMVoXDTM4MDUw
|
||||
OTA4NTYzMFowSDELMAkGA1UEBhMCREUxFTATBgNVBAoTDEQtVHJ1c3QgR21iSDEi
|
||||
MCAGA1UEAxMZRC1UUlVTVCBCUiBSb290IENBIDIgMjAyMzCCAiIwDQYJKoZIhvcN
|
||||
AQEBBQADggIPADCCAgoCggIBAK7/CVmRgApKaOYkP7in5Mg6CjoWzckjYaCTcfKr
|
||||
i3OPoGdlYNJUa2NRb0kz4HIHE304zQaSBylSa053bATTlfrdTIzZXcFhfUvnKLNE
|
||||
gXtRr90zsWh81k5M/itoucpmacTsXld/9w3HnDY25QdgrMBM6ghs7wZ8T1soegj8
|
||||
k12b9py0i4a6Ibn08OhZWiihNIQaJZG2tY/vsvmA+vk9PBFy2OMvhnbFeSzBqZCT
|
||||
Rphny4NqoFAjpzv2gTng7fC5v2Xx2Mt6++9zA84A9H3X4F07ZrjcjrqDy4d2A/wl
|
||||
2ecjbwb9Z/Pg/4S8R7+1FhhGaRTMBffb00msa8yr5LULQyReS2tNZ9/WtT5PeB+U
|
||||
cSTq3nD88ZP+npNa5JRal1QMNXtfbO4AHyTsA7oC9Xb0n9Sa7YUsOCIvx9gvdhFP
|
||||
/Wxc6PWOJ4d/GUohR5AdeY0cW/jPSoXk7bNbjb7EZChdQcRurDhaTyN0dKkSw/bS
|
||||
uREVMweR2Ds3OmMwBtHFIjYoYiMQ4EbMl6zWK11kJNXuHA7e+whadSr2Y23OC0K+
|
||||
0bpwHJwh5Q8xaRfX/Aq03u2AnMuStIv13lmiWAmlY0cL4UEyNEHZmrHZqLAbWt4N
|
||||
DfTisl01gLmB1IRpkQLLddCNxbU9CZEJjxShFHR5PtbJFR2kWVki3PaKRT08EtY+
|
||||
XTIvAgMBAAGjgY4wgYswDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQUZ5Dw1t61
|
||||
GNVGKX5cq/ieCLxklRAwDgYDVR0PAQH/BAQDAgEGMEkGA1UdHwRCMEAwPqA8oDqG
|
||||
OGh0dHA6Ly9jcmwuZC10cnVzdC5uZXQvY3JsL2QtdHJ1c3RfYnJfcm9vdF9jYV8y
|
||||
XzIwMjMuY3JsMA0GCSqGSIb3DQEBDQUAA4ICAQA097N3U9swFrktpSHxQCF16+tI
|
||||
FoE9c+CeJyrrd6kTpGoKWloUMz1oH4Guaf2Mn2VsNELZLdB/eBaxOqwjMa1ef67n
|
||||
riv6uvw8l5VAk1/DLQOj7aRvU9f6QA4w9QAgLABMjDu0ox+2v5Eyq6+SmNMW5tTR
|
||||
VFxDWy6u71cqqLRvpO8NVhTaIasgdp4D/Ca4nj8+AybmTNudX0KEPUUDAxxZiMrc
|
||||
LmEkWqTqJwtzEr5SswrPMhfiHocaFpVIbVrg0M8JkiZmkdijYQ6qgYF/6FKC0ULn
|
||||
4B0Y+qSFNueG4A3rvNTJ1jxD8V1Jbn6Bm2m1iWKPiFLY1/4nwSPFyysCu7Ff/vtD
|
||||
hQNGvl3GyiEm/9cCnnRK3PgTFbGBVzbLZVzRHTF36SXDw7IyN9XxmAnkbWOACKsG
|
||||
koHU6XCPpz+y7YaMgmo1yEJagtFSGkUPFaUA8JR7ZSdXOUPPfH/mvTWze/EZTN46
|
||||
ls/pdu4D58JDUjxqgejBWoC9EV2Ta/vH5mQ/u2kc6d0li690yVRAysuTEwrt+2aS
|
||||
Ecr1wPrYg1UDfNPFIkZ1cGt5SAYqgpq/5usWDiJFAbzdNpQ0qTUmiteXue4Icr80
|
||||
knCDgKs4qllo3UCkGJCy89UDyibK79XH4I9TjvAA46jtn/mtd+ArY0+ew+43u3gJ
|
||||
hJ65bvspmZDogNOfJA==
|
||||
-----END CERTIFICATE-----
|
||||
|
||||
# Issuer: CN=D-TRUST EV Root CA 2 2023 O=D-Trust GmbH
|
||||
# Subject: CN=D-TRUST EV Root CA 2 2023 O=D-Trust GmbH
|
||||
# Label: "D-TRUST EV Root CA 2 2023"
|
||||
# Serial: 139766439402180512324132425437959641711
|
||||
# MD5 Fingerprint: 96:b4:78:09:f0:09:cb:77:eb:bb:1b:4d:6f:36:bc:b6
|
||||
# SHA1 Fingerprint: a5:5b:d8:47:6c:8f:19:f7:4c:f4:6d:6b:b6:c2:79:82:22:df:54:8b
|
||||
# SHA256 Fingerprint: 8e:82:21:b2:e7:d4:00:78:36:a1:67:2f:0d:cc:29:9c:33:bc:07:d3:16:f1:32:fa:1a:20:6d:58:71:50:f1:ce
|
||||
-----BEGIN CERTIFICATE-----
|
||||
MIIFqTCCA5GgAwIBAgIQaSYJfoBLTKCnjHhiU19abzANBgkqhkiG9w0BAQ0FADBI
|
||||
MQswCQYDVQQGEwJERTEVMBMGA1UEChMMRC1UcnVzdCBHbWJIMSIwIAYDVQQDExlE
|
||||
LVRSVVNUIEVWIFJvb3QgQ0EgMiAyMDIzMB4XDTIzMDUwOTA5MTAzM1oXDTM4MDUw
|
||||
OTA5MTAzMlowSDELMAkGA1UEBhMCREUxFTATBgNVBAoTDEQtVHJ1c3QgR21iSDEi
|
||||
MCAGA1UEAxMZRC1UUlVTVCBFViBSb290IENBIDIgMjAyMzCCAiIwDQYJKoZIhvcN
|
||||
AQEBBQADggIPADCCAgoCggIBANiOo4mAC7JXUtypU0w3uX9jFxPvp1sjW2l1sJkK
|
||||
F8GLxNuo4MwxusLyzV3pt/gdr2rElYfXR8mV2IIEUD2BCP/kPbOx1sWy/YgJ25yE
|
||||
7CUXFId/MHibaljJtnMoPDT3mfd/06b4HEV8rSyMlD/YZxBTfiLNTiVR8CUkNRFe
|
||||
EMbsh2aJgWi6zCudR3Mfvc2RpHJqnKIbGKBv7FD0fUDCqDDPvXPIEysQEx6Lmqg6
|
||||
lHPTGGkKSv/BAQP/eX+1SH977ugpbzZMlWGG2Pmic4ruri+W7mjNPU0oQvlFKzIb
|
||||
RlUWaqZLKfm7lVa/Rh3sHZMdwGWyH6FDrlaeoLGPaxK3YG14C8qKXO0elg6DpkiV
|
||||
jTujIcSuWMYAsoS0I6SWhjW42J7YrDRJmGOVxcttSEfi8i4YHtAxq9107PncjLgc
|
||||
jmgjutDzUNzPZY9zOjLHfP7KgiJPvo5iR2blzYfi6NUPGJ/lBHJLRjwQ8kTCZFZx
|
||||
TnXonMkmdMV9WdEKWw9t/p51HBjGGjp82A0EzM23RWV6sY+4roRIPrN6TagD4uJ+
|
||||
ARZZaBhDM7DS3LAaQzXupdqpRlyuhoFBAUp0JuyfBr/CBTdkdXgpaP3F9ev+R/nk
|
||||
hbDhezGdpn9yo7nELC7MmVcOIQxFAZRl62UJxmMiCzNJkkg8/M3OsD6Onov4/knF
|
||||
NXJHAgMBAAGjgY4wgYswDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQUqvyREBuH
|
||||
kV8Wub9PS5FeAByxMoAwDgYDVR0PAQH/BAQDAgEGMEkGA1UdHwRCMEAwPqA8oDqG
|
||||
OGh0dHA6Ly9jcmwuZC10cnVzdC5uZXQvY3JsL2QtdHJ1c3RfZXZfcm9vdF9jYV8y
|
||||
XzIwMjMuY3JsMA0GCSqGSIb3DQEBDQUAA4ICAQCTy6UfmRHsmg1fLBWTxj++EI14
|
||||
QvBukEdHjqOSMo1wj/Zbjb6JzkcBahsgIIlbyIIQbODnmaprxiqgYzWRaoUlrRc4
|
||||
pZt+UPJ26oUFKidBK7GB0aL2QHWpDsvxVUjY7NHss+jOFKE17MJeNRqrphYBBo7q
|
||||
3C+jisosketSjl8MmxfPy3MHGcRqwnNU73xDUmPBEcrCRbH0O1P1aa4846XerOhU
|
||||
t7KR/aypH/KH5BfGSah82ApB9PI+53c0BFLd6IHyTS9URZ0V4U/M5d40VxDJI3IX
|
||||
cI1QcB9WbMy5/zpaT2N6w25lBx2Eof+pDGOJbbJAiDnXH3dotfyc1dZnaVuodNv8
|
||||
ifYbMvekJKZ2t0dT741Jj6m2g1qllpBFYfXeA08mD6iL8AOWsKwV0HFaanuU5nCT
|
||||
2vFp4LJiTZ6P/4mdm13NRemUAiKN4DV/6PEEeXFsVIP4M7kFMhtYVRFP0OUnR3Hs
|
||||
7dpn1mKmS00PaaLJvOwiS5THaJQXfuKOKD62xur1NGyfN4gHONuGcfrNlUhDbqNP
|
||||
gofXNJhuS5N5YHVpD/Aa1VP6IQzCP+k/HxiMkl14p3ZnGbuy6n/pcAlWVqOwDAst
|
||||
Nl7F6cTVg8uGF5csbBNvh1qvSaYd2804BC5f4ko1Di1L+KIkBI3Y4WNeApI02phh
|
||||
XBxvWHZks/wCuPWdCg==
|
||||
-----END CERTIFICATE-----
|
||||
|
|
|
@ -5,6 +5,10 @@ certifi.py
|
|||
This module returns the installation location of cacert.pem or its contents.
|
||||
"""
|
||||
import sys
|
||||
import atexit
|
||||
|
||||
def exit_cacert_ctx() -> None:
|
||||
_CACERT_CTX.__exit__(None, None, None) # type: ignore[union-attr]
|
||||
|
||||
|
||||
if sys.version_info >= (3, 11):
|
||||
|
@ -35,6 +39,7 @@ if sys.version_info >= (3, 11):
|
|||
# we will also store that at the global level as well.
|
||||
_CACERT_CTX = as_file(files("certifi").joinpath("cacert.pem"))
|
||||
_CACERT_PATH = str(_CACERT_CTX.__enter__())
|
||||
atexit.register(exit_cacert_ctx)
|
||||
|
||||
return _CACERT_PATH
|
||||
|
||||
|
@ -70,6 +75,7 @@ elif sys.version_info >= (3, 7):
|
|||
# we will also store that at the global level as well.
|
||||
_CACERT_CTX = get_path("certifi", "cacert.pem")
|
||||
_CACERT_PATH = str(_CACERT_CTX.__enter__())
|
||||
atexit.register(exit_cacert_ctx)
|
||||
|
||||
return _CACERT_PATH
|
||||
|
||||
|
|
|
@ -1,4 +1,3 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
"""
|
||||
Charset-Normalizer
|
||||
~~~~~~~~~~~~~~
|
||||
|
@ -19,6 +18,9 @@ at <https://github.com/Ousret/charset_normalizer>.
|
|||
:copyright: (c) 2021 by Ahmed TAHRI
|
||||
:license: MIT, see LICENSE for more details.
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import logging
|
||||
|
||||
from .api import from_bytes, from_fp, from_path, is_binary
|
||||
|
|
6
lib/charset_normalizer/__main__.py
Normal file
6
lib/charset_normalizer/__main__.py
Normal file
|
@ -0,0 +1,6 @@
|
|||
from __future__ import annotations
|
||||
|
||||
from .cli import cli_detect
|
||||
|
||||
if __name__ == "__main__":
|
||||
cli_detect()
|
Some files were not shown because too many files have changed in this diff Show more
Loading…
Add table
Add a link
Reference in a new issue