mirror of
https://github.com/Tautulli/Tautulli.git
synced 2025-07-06 05:01:14 -07:00
Compare commits
141 commits
v2.14.0-be
...
master
Author | SHA1 | Date | |
---|---|---|---|
|
76f6a2da6b | ||
|
d2a14ea6c0 | ||
|
e6c0a12dd5 | ||
|
24dd403a72 | ||
|
a876e006d6 | ||
|
74786f0ed1 | ||
|
99e575383c | ||
|
3e784c7495 | ||
|
68dc095c83 | ||
|
ad2ec0e2bf | ||
|
09c28e434d | ||
|
cfc7b817b3 | ||
|
b3aa29c677 | ||
|
e4d181ba5b | ||
|
53e5f89725 | ||
|
0879b848b9 | ||
|
c70381c3ff | ||
|
f23d3eb81c | ||
|
2ed603f288 | ||
|
a96fd23d72 | ||
|
65dc466c07 | ||
|
0a4730625c | ||
|
67fa4ca645 | ||
|
078c293bd7 | ||
|
85e9237608 | ||
|
f9b3631745 | ||
|
8f03e27617 | ||
|
63fe386057 | ||
|
b7c4f2eefe | ||
|
37ef098718 | ||
|
78864d7a97 | ||
|
62a05712f8 | ||
|
ca0e1c321d | ||
|
b9cb7102c4 | ||
|
6e6fe1fb65 | ||
|
9c473c6528 | ||
|
5c38de0dfb | ||
|
ea66f6713b | ||
|
dd9a35df51 | ||
|
feca713b76 | ||
|
0836fb902c | ||
|
eb2c372d82 | ||
|
be2e63e7e0 | ||
|
2fe3f039cc | ||
|
baf926e5db | ||
|
85b63fb61a | ||
|
afc29604cc | ||
|
5b47cebdc7 | ||
|
d9f38f9390 | ||
|
86d775a586 | ||
|
ddb4f6131b | ||
|
599e52de6a | ||
|
84be60cb36 | ||
|
d9a87f9726 | ||
|
9289ead996 | ||
|
af752e0acc | ||
|
86abd130b0 | ||
|
fc2c7cc871 | ||
|
025e8bcf58 | ||
|
bf07912711 | ||
|
48b1c7b522 | ||
|
e69852fa0e | ||
|
01589cb8b0 | ||
|
f3a2c02e96 | ||
|
d3f7eef84f | ||
|
2f3d24a0e7 | ||
|
2d3271376b | ||
|
940c2ae6cd | ||
|
1cdfd5f30a | ||
|
e3f4851883 | ||
|
1353247b55 | ||
|
3cf6560de3 | ||
|
9ca8d59372 | ||
|
921a3a0af9 | ||
|
3bb53f480e | ||
|
6979a4025f | ||
|
cc1a325eac | ||
|
de697cb2ca | ||
|
596cf57d61 | ||
|
ac32297160 | ||
|
330b8a3a82 | ||
|
5cf39cb097 | ||
|
14c9c7a393 | ||
|
cf8fb2e65d | ||
|
623a9f2919 | ||
|
3fb46a9ab7 | ||
|
cfd81684b7 | ||
|
fb4f0046f3 | ||
|
7d4efac75d | ||
|
509d18801b | ||
|
da501df846 | ||
|
43cb027592 | ||
|
2e6f541ec2 | ||
|
822d5a452c | ||
|
7696d031d3 | ||
|
50ced86ba5 | ||
|
e934d09eff | ||
|
96c5cb216c | ||
|
2ee2ab652c | ||
|
193b82c54a | ||
|
7d00383d1c | ||
|
6f84ce8048 | ||
|
709db66b10 | ||
|
2f1607b96b | ||
|
28ad2716ba | ||
|
f1a8164b94 | ||
|
3bc94cad6c | ||
|
a528f052b9 | ||
|
5e977c044a | ||
|
afa25d45f6 | ||
|
43e71d836a | ||
|
55573d26ea | ||
|
f1d44c051d | ||
|
a3af8ed362 | ||
|
912fd75a2f | ||
|
5778672dab | ||
|
dcdf5a2992 | ||
|
73cfa8e0c0 | ||
|
795d568df2 | ||
|
8396a04ce8 | ||
|
8419eee4b2 | ||
|
c505e26656 | ||
|
37ffe68ce2 | ||
|
dc9e778111 | ||
|
68bf1c70f7 | ||
|
ee0b4c0602 | ||
|
5c115dec68 | ||
|
1d77f32665 | ||
|
af01b8c6cc | ||
|
dd9d3b97a2 | ||
|
96c20ad893 | ||
|
5e90f3bb31 | ||
|
dab46249f2 | ||
|
5d0ba8b222 | ||
|
3e8a5663a3 | ||
|
6414a0ba12 | ||
|
bcac5b7897 | ||
|
de3393d62b | ||
|
dcec1f6f5f | ||
|
65905a6647 | ||
|
5de2cf85c3 |
551 changed files with 31619 additions and 23053 deletions
13
.github/workflows/publish-docker.yml
vendored
13
.github/workflows/publish-docker.yml
vendored
|
@ -33,7 +33,6 @@ jobs:
|
||||||
echo "branch=${GITHUB_REF#refs/heads/}" >> $GITHUB_OUTPUT
|
echo "branch=${GITHUB_REF#refs/heads/}" >> $GITHUB_OUTPUT
|
||||||
fi
|
fi
|
||||||
echo "commit=${GITHUB_SHA}" >> $GITHUB_OUTPUT
|
echo "commit=${GITHUB_SHA}" >> $GITHUB_OUTPUT
|
||||||
echo "build_date=$(date -u +'%Y-%m-%dT%H:%M:%SZ')" >> $GITHUB_OUTPUT
|
|
||||||
echo "docker_platforms=linux/amd64,linux/arm64/v8,linux/arm/v7,linux/arm/v6" >> $GITHUB_OUTPUT
|
echo "docker_platforms=linux/amd64,linux/arm64/v8,linux/arm/v7,linux/arm/v6" >> $GITHUB_OUTPUT
|
||||||
echo "docker_image=${{ secrets.DOCKER_REPO }}/tautulli" >> $GITHUB_OUTPUT
|
echo "docker_image=${{ secrets.DOCKER_REPO }}/tautulli" >> $GITHUB_OUTPUT
|
||||||
|
|
||||||
|
@ -59,7 +58,7 @@ jobs:
|
||||||
if: success()
|
if: success()
|
||||||
with:
|
with:
|
||||||
username: ${{ secrets.DOCKER_USERNAME }}
|
username: ${{ secrets.DOCKER_USERNAME }}
|
||||||
password: ${{ secrets.DOCKER_PASSWORD }}
|
password: ${{ secrets.DOCKER_TOKEN }}
|
||||||
|
|
||||||
- name: Login to GitHub Container Registry
|
- name: Login to GitHub Container Registry
|
||||||
uses: docker/login-action@v3
|
uses: docker/login-action@v3
|
||||||
|
@ -69,8 +68,14 @@ jobs:
|
||||||
username: ${{ secrets.DOCKER_USERNAME }}
|
username: ${{ secrets.DOCKER_USERNAME }}
|
||||||
password: ${{ secrets.GHCR_TOKEN }}
|
password: ${{ secrets.GHCR_TOKEN }}
|
||||||
|
|
||||||
|
- name: Extract Docker Metadata
|
||||||
|
id: metadata
|
||||||
|
uses: docker/metadata-action@v5
|
||||||
|
with:
|
||||||
|
images: ${{ steps.prepare.outputs.docker_image }}
|
||||||
|
|
||||||
- name: Docker Build and Push
|
- name: Docker Build and Push
|
||||||
uses: docker/build-push-action@v5
|
uses: docker/build-push-action@v6
|
||||||
if: success()
|
if: success()
|
||||||
with:
|
with:
|
||||||
context: .
|
context: .
|
||||||
|
@ -81,10 +86,10 @@ jobs:
|
||||||
TAG=${{ steps.prepare.outputs.tag }}
|
TAG=${{ steps.prepare.outputs.tag }}
|
||||||
BRANCH=${{ steps.prepare.outputs.branch }}
|
BRANCH=${{ steps.prepare.outputs.branch }}
|
||||||
COMMIT=${{ steps.prepare.outputs.commit }}
|
COMMIT=${{ steps.prepare.outputs.commit }}
|
||||||
BUILD_DATE=${{ steps.prepare.outputs.build_date }}
|
|
||||||
tags: |
|
tags: |
|
||||||
${{ steps.prepare.outputs.docker_image }}:${{ steps.prepare.outputs.tag }}
|
${{ steps.prepare.outputs.docker_image }}:${{ steps.prepare.outputs.tag }}
|
||||||
ghcr.io/${{ steps.prepare.outputs.docker_image }}:${{ steps.prepare.outputs.tag }}
|
ghcr.io/${{ steps.prepare.outputs.docker_image }}:${{ steps.prepare.outputs.tag }}
|
||||||
|
labels: ${{ steps.metadata.outputs.labels }}
|
||||||
cache-from: type=local,src=/tmp/.buildx-cache
|
cache-from: type=local,src=/tmp/.buildx-cache
|
||||||
cache-to: type=local,dest=/tmp/.buildx-cache
|
cache-to: type=local,dest=/tmp/.buildx-cache
|
||||||
|
|
||||||
|
|
22
.github/workflows/publish-installers.yml
vendored
22
.github/workflows/publish-installers.yml
vendored
|
@ -75,7 +75,7 @@ jobs:
|
||||||
pyinstaller -y ./package/Tautulli-${{ matrix.os }}.spec
|
pyinstaller -y ./package/Tautulli-${{ matrix.os }}.spec
|
||||||
|
|
||||||
- name: Create Windows Installer
|
- name: Create Windows Installer
|
||||||
uses: joncloud/makensis-action@v4
|
uses: joncloud/makensis-action@v4.1
|
||||||
if: matrix.os == 'windows'
|
if: matrix.os == 'windows'
|
||||||
with:
|
with:
|
||||||
script-file: ./package/Tautulli.nsi
|
script-file: ./package/Tautulli.nsi
|
||||||
|
@ -100,6 +100,24 @@ jobs:
|
||||||
name: Tautulli-${{ matrix.os }}-installer
|
name: Tautulli-${{ matrix.os }}-installer
|
||||||
path: Tautulli-${{ matrix.os }}-${{ steps.get_version.outputs.RELEASE_VERSION }}-${{ matrix.arch }}.${{ matrix.ext }}
|
path: Tautulli-${{ matrix.os }}-${{ steps.get_version.outputs.RELEASE_VERSION }}-${{ matrix.arch }}.${{ matrix.ext }}
|
||||||
|
|
||||||
|
virus-total:
|
||||||
|
name: VirusTotal Scan
|
||||||
|
needs: build-installer
|
||||||
|
if: needs.build-installer.result == 'success' && !contains(github.event.head_commit.message, '[skip ci]')
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- name: Download Installers
|
||||||
|
if: needs.build-installer.result == 'success'
|
||||||
|
uses: actions/download-artifact@v4
|
||||||
|
|
||||||
|
- name: Upload to VirusTotal
|
||||||
|
uses: crazy-max/ghaction-virustotal@v4
|
||||||
|
with:
|
||||||
|
vt_api_key: ${{ secrets.VT_API_KEY }}
|
||||||
|
files: |
|
||||||
|
Tautulli-windows-installer/Tautulli-windows-*-x64.exe
|
||||||
|
Tautulli-macos-installer/Tautulli-macos-*-universal.pkg
|
||||||
|
|
||||||
release:
|
release:
|
||||||
name: Release Installers
|
name: Release Installers
|
||||||
needs: build-installer
|
needs: build-installer
|
||||||
|
@ -143,7 +161,7 @@ jobs:
|
||||||
prerelease: ${{ endsWith(steps.get_version.outputs.RELEASE_VERSION, '-beta') }}
|
prerelease: ${{ endsWith(steps.get_version.outputs.RELEASE_VERSION, '-beta') }}
|
||||||
files: |
|
files: |
|
||||||
Tautulli-windows-installer/Tautulli-windows-${{ steps.get_version.outputs.RELEASE_VERSION }}-x64.exe
|
Tautulli-windows-installer/Tautulli-windows-${{ steps.get_version.outputs.RELEASE_VERSION }}-x64.exe
|
||||||
Tautulli-macos-installer/Tautulli-macos-${{ steps.get_version.outputs.RELEASE_VERSION }}-x64.pkg
|
Tautulli-macos-installer/Tautulli-macos-${{ steps.get_version.outputs.RELEASE_VERSION }}-universal.pkg
|
||||||
|
|
||||||
discord:
|
discord:
|
||||||
name: Discord Notification
|
name: Discord Notification
|
||||||
|
|
4
.github/workflows/publish-snap.yml
vendored
4
.github/workflows/publish-snap.yml
vendored
|
@ -38,7 +38,7 @@ jobs:
|
||||||
uses: docker/setup-qemu-action@v3
|
uses: docker/setup-qemu-action@v3
|
||||||
|
|
||||||
- name: Build Snap Package
|
- name: Build Snap Package
|
||||||
uses: diddlesnaps/snapcraft-multiarch-action@v1
|
uses: diddlesnaps/snapcraft-multiarch-action@master
|
||||||
id: build
|
id: build
|
||||||
with:
|
with:
|
||||||
architecture: ${{ matrix.architecture }}
|
architecture: ${{ matrix.architecture }}
|
||||||
|
@ -50,7 +50,7 @@ jobs:
|
||||||
path: ${{ steps.build.outputs.snap }}
|
path: ${{ steps.build.outputs.snap }}
|
||||||
|
|
||||||
- name: Review Snap Package
|
- name: Review Snap Package
|
||||||
uses: diddlesnaps/snapcraft-review-tools-action@v1
|
uses: diddlesnaps/snapcraft-review-tools-action@master
|
||||||
with:
|
with:
|
||||||
snap: ${{ steps.build.outputs.snap }}
|
snap: ${{ steps.build.outputs.snap }}
|
||||||
|
|
||||||
|
|
14
.github/workflows/submit-winget.yml
vendored
14
.github/workflows/submit-winget.yml
vendored
|
@ -23,3 +23,17 @@ jobs:
|
||||||
# getting latest wingetcreate file
|
# getting latest wingetcreate file
|
||||||
iwr https://aka.ms/wingetcreate/latest -OutFile wingetcreate.exe
|
iwr https://aka.ms/wingetcreate/latest -OutFile wingetcreate.exe
|
||||||
.\wingetcreate.exe update $wingetPackage -s -v $version -u $installerUrl -t $gitToken
|
.\wingetcreate.exe update $wingetPackage -s -v $version -u $installerUrl -t $gitToken
|
||||||
|
|
||||||
|
virus-total:
|
||||||
|
name: VirusTotal Scan
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- name: Upload to VirusTotal
|
||||||
|
uses: crazy-max/ghaction-virustotal@v4
|
||||||
|
with:
|
||||||
|
vt_api_key: ${{ secrets.VT_API_KEY }}
|
||||||
|
github_token: ${{ secrets.GHACTIONS_TOKEN }}
|
||||||
|
update_release_body: true
|
||||||
|
files: |
|
||||||
|
.exe$
|
||||||
|
.pkg$
|
||||||
|
|
117
CHANGELOG.md
117
CHANGELOG.md
|
@ -1,6 +1,110 @@
|
||||||
# Changelog
|
# Changelog
|
||||||
|
|
||||||
## v2.14.0-beta (2024-04-19)
|
## v2.15.2 (2025-04-12)
|
||||||
|
|
||||||
|
* Activity:
|
||||||
|
* New: Added link to library by clicking media type icon.
|
||||||
|
* New: Added stream count to tab title on homepage. (#2517)
|
||||||
|
* History:
|
||||||
|
* Fix: Check stream watched status before stream stopped status. (#2506)
|
||||||
|
* Notifications:
|
||||||
|
* Fix: ntfy notifications failing to send if provider link is blank.
|
||||||
|
* Fix: Check Pushover notification attachment is under 5MB limit. (#2396)
|
||||||
|
* Fix: Track URLs redirecting to the correct media page. (#2513)
|
||||||
|
* New: Added audio profile notification parameters.
|
||||||
|
* New: Added PATCH method for Webhook notifications.
|
||||||
|
* Graphs:
|
||||||
|
* New: Added Total line to daily streams graph. (Thanks @zdimension) (#2497)
|
||||||
|
* UI:
|
||||||
|
* Fix: Do not redirect API requests to the login page. (#2490)
|
||||||
|
* Change: Swap source and stream columns in stream info modal.
|
||||||
|
* Other:
|
||||||
|
* Fix: Various typos. (Thanks @luzpaz) (#2520)
|
||||||
|
* Fix: CherryPy CORS response header not being set correctly. (#2279)
|
||||||
|
|
||||||
|
|
||||||
|
## v2.15.1 (2025-01-11)
|
||||||
|
|
||||||
|
* Activity:
|
||||||
|
* Fix: Detection of HDR transcodes. (Thanks @cdecker08) (#2412, #2466)
|
||||||
|
* Newsletters:
|
||||||
|
* Fix: Disable basic authentication for /newsletter and /image endpoints. (#2472)
|
||||||
|
* Exporter:
|
||||||
|
* New: Added logos to season and episode exports.
|
||||||
|
* Other:
|
||||||
|
* Fix: Docker container https health check.
|
||||||
|
|
||||||
|
|
||||||
|
## v2.15.0 (2024-11-24)
|
||||||
|
|
||||||
|
* Notes:
|
||||||
|
* Support for Python 3.8 has been dropped. The minimum Python version is now 3.9.
|
||||||
|
* Notifications:
|
||||||
|
* New: Allow Telegram blockquote and tg-emoji HTML tags. (Thanks @MythodeaLoL) (#2427)
|
||||||
|
* New: Added Plex slug and Plex Watch URL notification parameters. (#2420)
|
||||||
|
* Change: Update OneSignal API calls to use the new API endpoint for Tautulli Remote App notifications.
|
||||||
|
* Newsletters:
|
||||||
|
* Fix: Dumping custom dates in raw newsletter json.
|
||||||
|
* History:
|
||||||
|
* Fix: Unable to fix match for artists. (#2429)
|
||||||
|
* Exporter:
|
||||||
|
* New: Added movie and episode hasVoiceActivity attribute to exporter fields.
|
||||||
|
* New: Added subtitle canAutoSync attribute to exporter fields.
|
||||||
|
* New: Added logos to the exporter fields.
|
||||||
|
* UI:
|
||||||
|
* New: Add friendly name to the top bar of config modals. (Thanks @peagravel) (#2432)
|
||||||
|
* API:
|
||||||
|
* New: Added plex slugs to metadata in the get_metadata API command.
|
||||||
|
* Other:
|
||||||
|
* Fix: Tautulli failing to start with Python 3.13. (#2426)
|
||||||
|
|
||||||
|
|
||||||
|
## v2.14.6 (2024-10-12)
|
||||||
|
|
||||||
|
* Newsletters:
|
||||||
|
* Fix: Allow formatting newsletter date parameters.
|
||||||
|
* Change: Support apscheduler compatible cron expressions.
|
||||||
|
* UI:
|
||||||
|
* Fix: Round runtime before converting to human duration.
|
||||||
|
* Fix: Make recently added/watched rows touch scrollable.
|
||||||
|
* Other:
|
||||||
|
* Fix: Auto-updater not running.
|
||||||
|
|
||||||
|
|
||||||
|
## v2.14.5 (2024-09-20)
|
||||||
|
|
||||||
|
* Activity:
|
||||||
|
* Fix: Display of 2k resolution on activity card.
|
||||||
|
* Notifications:
|
||||||
|
* Fix: ntfy notifications with special characters failing to send.
|
||||||
|
* Other:
|
||||||
|
* Fix: Memory leak with database closing. (#2404)
|
||||||
|
|
||||||
|
|
||||||
|
## v2.14.4 (2024-08-10)
|
||||||
|
|
||||||
|
* Notifications:
|
||||||
|
* Fix: Update Slack notification info card.
|
||||||
|
* New: Added ntfy notification agent. (Thanks @nwithan8) (#2356, #2000)
|
||||||
|
* UI:
|
||||||
|
* Fix: macOS platform capitalization.
|
||||||
|
* Other:
|
||||||
|
* Fix: Remove deprecated getdefaultlocale. (Thanks @teodorstelian) (#2364, #2345)
|
||||||
|
|
||||||
|
|
||||||
|
## v2.14.3 (2024-06-19)
|
||||||
|
|
||||||
|
* Graphs:
|
||||||
|
* Fix: History table not loading when clicking on the graphs in some instances.
|
||||||
|
* UI:
|
||||||
|
* Fix: Scheduled tasks table not loading when certain tasks are disabled.
|
||||||
|
* Removed: Unnecessary Remote Server checkbox from the settings page.
|
||||||
|
* Other:
|
||||||
|
* Fix: Webserver not restarting after the setup wizard.
|
||||||
|
* Fix: Workaround webserver crashing in some instances.
|
||||||
|
|
||||||
|
|
||||||
|
## v2.14.2 (2024-05-18)
|
||||||
|
|
||||||
* History:
|
* History:
|
||||||
* Fix: Live TV activity not logging to history.
|
* Fix: Live TV activity not logging to history.
|
||||||
|
@ -9,9 +113,9 @@
|
||||||
* Fix: Pushover configuration settings refreshing after entering a token.
|
* Fix: Pushover configuration settings refreshing after entering a token.
|
||||||
* Fix: Plex remote access down notifications not triggering.
|
* Fix: Plex remote access down notifications not triggering.
|
||||||
* Fix: Deleting all images from Cloudinary only deleting 1000 images.
|
* Fix: Deleting all images from Cloudinary only deleting 1000 images.
|
||||||
* New: Added platform version and product version notification parameters.
|
* New: Added platform version and product version notification parameters. (#2244)
|
||||||
* New: Added LAN streams and WAN streams notification parameters.
|
* New: Added LAN streams and WAN streams notification parameters. (#2276)
|
||||||
* New: Added Dolby Vision notification parameters.
|
* New: Added Dolby Vision notification parameters. (#2240)
|
||||||
* New: Added live TV channel notification parameters.
|
* New: Added live TV channel notification parameters.
|
||||||
* Change: Improved Tautulli Remote App notification encryption method.
|
* Change: Improved Tautulli Remote App notification encryption method.
|
||||||
* Note: Requires Tautulli Remote App version 3.2.4.
|
* Note: Requires Tautulli Remote App version 3.2.4.
|
||||||
|
@ -19,20 +123,23 @@
|
||||||
* New: Added slug attribute to exporter fields.
|
* New: Added slug attribute to exporter fields.
|
||||||
* New: Added track genres to exporter fields.
|
* New: Added track genres to exporter fields.
|
||||||
* New: Added playlist source URI to exporter fields.
|
* New: Added playlist source URI to exporter fields.
|
||||||
|
* New: Added artProvider and thumbProvider to exporter fields.
|
||||||
* UI:
|
* UI:
|
||||||
* Fix: Mask deleted usernames in the logs.
|
* Fix: Mask deleted usernames in the logs.
|
||||||
* Fix: Live TV watch stats not showing on the media info page.
|
* Fix: Live TV watch stats not showing on the media info page.
|
||||||
|
* Fix: Users without access to Plex server not showing as inactive.
|
||||||
* Removed: Deprecated synced item pages.
|
* Removed: Deprecated synced item pages.
|
||||||
* Removed: Anonymous redirect settings. Links now use browser no-referrer policy instead.
|
* Removed: Anonymous redirect settings. Links now use browser no-referrer policy instead.
|
||||||
* API:
|
* API:
|
||||||
* New: Added Dolby Vision info to the get_metadata API command.
|
* New: Added Dolby Vision info to the get_metadata API command.
|
||||||
* New: Added before and after parameters to the get_home_stats API command.
|
* New: Added before and after parameters to the get_home_stats API command. (#2231)
|
||||||
* Packages:
|
* Packages:
|
||||||
* New: Universal binary for macOS for Apple silicon.
|
* New: Universal binary for macOS for Apple silicon.
|
||||||
* New: Bump Snap package to core22.
|
* New: Bump Snap package to core22.
|
||||||
* Other:
|
* Other:
|
||||||
* Change: Login cookie expires changed to max-age.
|
* Change: Login cookie expires changed to max-age.
|
||||||
* Change: Improved key generation for login password. It is recommended to reenter your HTTP Password in the settings after upgrading.
|
* Change: Improved key generation for login password. It is recommended to reenter your HTTP Password in the settings after upgrading.
|
||||||
|
* Removed: Python 2 compatibility code. (#2098, #2226) (Thanks @zdimension)
|
||||||
|
|
||||||
|
|
||||||
## v2.13.4 (2023-12-07)
|
## v2.13.4 (2023-12-07)
|
||||||
|
|
|
@ -25,4 +25,4 @@ CMD [ "python", "Tautulli.py", "--datadir", "/config" ]
|
||||||
ENTRYPOINT [ "./start.sh" ]
|
ENTRYPOINT [ "./start.sh" ]
|
||||||
|
|
||||||
EXPOSE 8181
|
EXPOSE 8181
|
||||||
HEALTHCHECK --start-period=90s CMD curl -ILfSs http://localhost:8181/status > /dev/null || curl -ILfkSs https://localhost:8181/status > /dev/null || exit 1
|
HEALTHCHECK --start-period=90s CMD curl -ILfks https://localhost:8181/status > /dev/null || curl -ILfs http://localhost:8181/status > /dev/null || exit 1
|
||||||
|
|
|
@ -36,7 +36,7 @@ and [PlexWatchWeb](https://github.com/ecleese/plexWatchWeb).
|
||||||
[![Docker Stars][badge-docker-stars]][DockerHub]
|
[![Docker Stars][badge-docker-stars]][DockerHub]
|
||||||
[![Downloads][badge-downloads]][Releases Latest]
|
[![Downloads][badge-downloads]][Releases Latest]
|
||||||
|
|
||||||
[badge-python]: https://img.shields.io/badge/python->=3.8-blue?style=flat-square
|
[badge-python]: https://img.shields.io/badge/python->=3.9-blue?style=flat-square
|
||||||
[badge-docker-pulls]: https://img.shields.io/docker/pulls/tautulli/tautulli?style=flat-square
|
[badge-docker-pulls]: https://img.shields.io/docker/pulls/tautulli/tautulli?style=flat-square
|
||||||
[badge-docker-stars]: https://img.shields.io/docker/stars/tautulli/tautulli?style=flat-square
|
[badge-docker-stars]: https://img.shields.io/docker/stars/tautulli/tautulli?style=flat-square
|
||||||
[badge-downloads]: https://img.shields.io/github/downloads/Tautulli/Tautulli/total?style=flat-square
|
[badge-downloads]: https://img.shields.io/github/downloads/Tautulli/Tautulli/total?style=flat-square
|
||||||
|
@ -129,7 +129,7 @@ This is free software under the GPL v3 open source license. Feel free to do with
|
||||||
but any modification must be open sourced. A copy of the license is included.
|
but any modification must be open sourced. A copy of the license is included.
|
||||||
|
|
||||||
This software includes Highsoft software libraries which you may freely distribute for
|
This software includes Highsoft software libraries which you may freely distribute for
|
||||||
non-commercial use. Commerical users must licence this software, for more information visit
|
non-commercial use. Commercial users must licence this software, for more information visit
|
||||||
https://shop.highsoft.com/faq/non-commercial#non-commercial-redistribution.
|
https://shop.highsoft.com/faq/non-commercial#non-commercial-redistribution.
|
||||||
|
|
||||||
|
|
||||||
|
|
26
Tautulli.py
26
Tautulli.py
|
@ -23,7 +23,6 @@ import sys
|
||||||
# Ensure lib added to path, before any other imports
|
# Ensure lib added to path, before any other imports
|
||||||
sys.path.insert(0, os.path.join(os.path.dirname(os.path.abspath(__file__)), 'lib'))
|
sys.path.insert(0, os.path.join(os.path.dirname(os.path.abspath(__file__)), 'lib'))
|
||||||
|
|
||||||
from future.builtins import str
|
|
||||||
|
|
||||||
import argparse
|
import argparse
|
||||||
import datetime
|
import datetime
|
||||||
|
@ -35,6 +34,7 @@ import shutil
|
||||||
import time
|
import time
|
||||||
import threading
|
import threading
|
||||||
import tzlocal
|
import tzlocal
|
||||||
|
import ctypes
|
||||||
|
|
||||||
import plexpy
|
import plexpy
|
||||||
from plexpy import common, config, database, helpers, logger, webstart
|
from plexpy import common, config, database, helpers, logger, webstart
|
||||||
|
@ -70,8 +70,26 @@ def main():
|
||||||
plexpy.SYS_ENCODING = None
|
plexpy.SYS_ENCODING = None
|
||||||
|
|
||||||
try:
|
try:
|
||||||
locale.setlocale(locale.LC_ALL, "")
|
|
||||||
plexpy.SYS_LANGUAGE, plexpy.SYS_ENCODING = locale.getdefaultlocale()
|
# Attempt to get the system's locale settings
|
||||||
|
language_code, encoding = locale.getlocale()
|
||||||
|
|
||||||
|
# Special handling for Windows platform
|
||||||
|
if sys.platform == 'win32':
|
||||||
|
# Get the user's current language settings on Windows
|
||||||
|
windll = ctypes.windll.kernel32
|
||||||
|
lang_id = windll.GetUserDefaultLCID()
|
||||||
|
|
||||||
|
# Map Windows language ID to locale identifier
|
||||||
|
language_code = locale.windows_locale.get(lang_id, '')
|
||||||
|
|
||||||
|
# Get the preferred encoding
|
||||||
|
encoding = locale.getpreferredencoding()
|
||||||
|
|
||||||
|
# Assign values to application-specific variable
|
||||||
|
plexpy.SYS_LANGUAGE = language_code
|
||||||
|
plexpy.SYS_ENCODING = encoding
|
||||||
|
|
||||||
except (locale.Error, IOError):
|
except (locale.Error, IOError):
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
@ -111,7 +129,7 @@ def main():
|
||||||
if args.quiet:
|
if args.quiet:
|
||||||
plexpy.QUIET = True
|
plexpy.QUIET = True
|
||||||
|
|
||||||
# Do an intial setup of the logger.
|
# Do an initial setup of the logger.
|
||||||
# Require verbose for pre-initilization to see critical errors
|
# Require verbose for pre-initilization to see critical errors
|
||||||
logger.initLogger(console=not plexpy.QUIET, log_dir=False, verbose=True)
|
logger.initLogger(console=not plexpy.QUIET, log_dir=False, verbose=True)
|
||||||
|
|
||||||
|
|
|
@ -1478,7 +1478,8 @@ a:hover .dashboard-stats-square {
|
||||||
text-align: center;
|
text-align: center;
|
||||||
position: relative;
|
position: relative;
|
||||||
z-index: 0;
|
z-index: 0;
|
||||||
overflow: hidden;
|
overflow: auto;
|
||||||
|
scrollbar-width: none;
|
||||||
}
|
}
|
||||||
.dashboard-recent-media {
|
.dashboard-recent-media {
|
||||||
width: 100%;
|
width: 100%;
|
||||||
|
@ -4324,6 +4325,10 @@ a:hover .overlay-refresh-image:hover {
|
||||||
.stream-info tr:nth-child(even) td {
|
.stream-info tr:nth-child(even) td {
|
||||||
background-color: rgba(255,255,255,0.010);
|
background-color: rgba(255,255,255,0.010);
|
||||||
}
|
}
|
||||||
|
.stream-info td:nth-child(3),
|
||||||
|
.stream-info th:nth-child(3) {
|
||||||
|
width: 25px;
|
||||||
|
}
|
||||||
.number-input {
|
.number-input {
|
||||||
margin: 0 !important;
|
margin: 0 !important;
|
||||||
width: 55px !important;
|
width: 55px !important;
|
||||||
|
|
|
@ -74,6 +74,7 @@ DOCUMENTATION :: END
|
||||||
parent_href = page('info', data['parent_rating_key'])
|
parent_href = page('info', data['parent_rating_key'])
|
||||||
grandparent_href = page('info', data['grandparent_rating_key'])
|
grandparent_href = page('info', data['grandparent_rating_key'])
|
||||||
user_href = page('user', data['user_id']) if data['user_id'] else '#'
|
user_href = page('user', data['user_id']) if data['user_id'] else '#'
|
||||||
|
library_href = page('library', data['section_id']) if data['section_id'] else '#'
|
||||||
season = short_season(data['parent_title'])
|
season = short_season(data['parent_title'])
|
||||||
%>
|
%>
|
||||||
<div class="dashboard-activity-instance" id="activity-instance-${sk}" data-key="${sk}" data-id="${data['session_id']}"
|
<div class="dashboard-activity-instance" id="activity-instance-${sk}" data-key="${sk}" data-id="${data['session_id']}"
|
||||||
|
@ -463,21 +464,27 @@ DOCUMENTATION :: END
|
||||||
<div class="dashboard-activity-metadata-subtitle-container">
|
<div class="dashboard-activity-metadata-subtitle-container">
|
||||||
% if data['live']:
|
% if data['live']:
|
||||||
<div id="media-type-${sk}" class="dashboard-activity-metadata-media_type-icon" title="Live TV">
|
<div id="media-type-${sk}" class="dashboard-activity-metadata-media_type-icon" title="Live TV">
|
||||||
<i class="fa fa-fw fa-broadcast-tower"></i>
|
<a href="${library_href}">
|
||||||
|
<i class="fa fa-fw fa-broadcast-tower"></i>
|
||||||
|
</a>
|
||||||
</div>
|
</div>
|
||||||
% elif data['channel_stream'] == 0:
|
% elif data['channel_stream'] == 0:
|
||||||
<div id="media-type-${sk}" class="dashboard-activity-metadata-media_type-icon" title="${data['media_type'].capitalize()}">
|
<div id="media-type-${sk}" class="dashboard-activity-metadata-media_type-icon" title="${data['media_type'].capitalize()}">
|
||||||
% if data['media_type'] == 'movie':
|
<a href="${library_href}">
|
||||||
<i class="fa fa-fw fa-film"></i>
|
% if data['media_type'] == 'movie':
|
||||||
% elif data['media_type'] == 'episode':
|
<i class="fa fa-fw fa-film"></i>
|
||||||
<i class="fa fa-fw fa-television"></i>
|
% elif data['media_type'] == 'episode':
|
||||||
% elif data['media_type'] == 'track':
|
<i class="fa fa-fw fa-television"></i>
|
||||||
<i class="fa fa-fw fa-music"></i>
|
% elif data['media_type'] == 'track':
|
||||||
% elif data['media_type'] == 'photo':
|
<i class="fa fa-fw fa-music"></i>
|
||||||
<i class="fa fa-fw fa-picture-o"></i>
|
% elif data['media_type'] == 'photo':
|
||||||
% elif data['media_type'] == 'clip':
|
<i class="fa fa-fw fa-picture-o"></i>
|
||||||
<i class="fa fa-fw fa-video-camera"></i>
|
% elif data['media_type'] == 'clip':
|
||||||
% endif
|
<i class="fa fa-fw fa-video-camera"></i>
|
||||||
|
% else:
|
||||||
|
<i class="fa fa-fw fa-question-circle"></i>
|
||||||
|
% endif
|
||||||
|
</a>
|
||||||
</div>
|
</div>
|
||||||
% else:
|
% else:
|
||||||
<div id="media-type-${sk}" class="dashboard-activity-metadata-media_type-icon" title="Channel">
|
<div id="media-type-${sk}" class="dashboard-activity-metadata-media_type-icon" title="Channel">
|
||||||
|
|
|
@ -20,6 +20,7 @@ DOCUMENTATION :: END
|
||||||
export = exporter.Export()
|
export = exporter.Export()
|
||||||
thumb_media_types = ', '.join([export.PLURAL_MEDIA_TYPES[k] for k, v in export.MEDIA_TYPES.items() if v[0]])
|
thumb_media_types = ', '.join([export.PLURAL_MEDIA_TYPES[k] for k, v in export.MEDIA_TYPES.items() if v[0]])
|
||||||
art_media_types = ', '.join([export.PLURAL_MEDIA_TYPES[k] for k, v in export.MEDIA_TYPES.items() if v[1]])
|
art_media_types = ', '.join([export.PLURAL_MEDIA_TYPES[k] for k, v in export.MEDIA_TYPES.items() if v[1]])
|
||||||
|
logo_media_types = ', '.join([export.PLURAL_MEDIA_TYPES[k] for k, v in export.MEDIA_TYPES.items() if v[2]])
|
||||||
%>
|
%>
|
||||||
<div class="modal-dialog" role="document">
|
<div class="modal-dialog" role="document">
|
||||||
<div class="modal-content">
|
<div class="modal-content">
|
||||||
|
@ -144,6 +145,22 @@ DOCUMENTATION :: END
|
||||||
Select the level to export background artwork image files.<br>Note: Only applies to ${art_media_types}.
|
Select the level to export background artwork image files.<br>Note: Only applies to ${art_media_types}.
|
||||||
</p>
|
</p>
|
||||||
</div>
|
</div>
|
||||||
|
<div class="form-group">
|
||||||
|
<label for="export_logo_level">Logo Image Export Level</label>
|
||||||
|
<div class="row">
|
||||||
|
<div class="col-md-12">
|
||||||
|
<select class="form-control" id="export_logo_level" name="export_logo_level">
|
||||||
|
<option value="0" selected>Level 0 - None / Custom</option>
|
||||||
|
<option value="1">Level 1 - Uploaded and Selected Logos Only</option>
|
||||||
|
<option value="2">Level 2 - Selected and Locked Logos Only</option>
|
||||||
|
<option value="9">Level 9 - All Selected Logos</option>
|
||||||
|
</select>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
<p class="help-block">
|
||||||
|
Select the level to export logo image files.<br>Note: Only applies to ${logo_media_types}.
|
||||||
|
</p>
|
||||||
|
</div>
|
||||||
<p class="help-block">
|
<p class="help-block">
|
||||||
Warning: Exporting images may take a long time! Images will be saved to a folder alongside the data file.
|
Warning: Exporting images may take a long time! Images will be saved to a folder alongside the data file.
|
||||||
</p>
|
</p>
|
||||||
|
@ -231,6 +248,7 @@ DOCUMENTATION :: END
|
||||||
$('#export_media_info_level').prop('disabled', true);
|
$('#export_media_info_level').prop('disabled', true);
|
||||||
$("#export_thumb_level").prop('disabled', true);
|
$("#export_thumb_level").prop('disabled', true);
|
||||||
$("#export_art_level").prop('disabled', true);
|
$("#export_art_level").prop('disabled', true);
|
||||||
|
$("#export_logo_level").prop('disabled', true);
|
||||||
export_custom_metadata_fields.disable();
|
export_custom_metadata_fields.disable();
|
||||||
export_custom_media_info_fields.disable();
|
export_custom_media_info_fields.disable();
|
||||||
} else {
|
} else {
|
||||||
|
@ -238,6 +256,7 @@ DOCUMENTATION :: END
|
||||||
$('#export_media_info_level').prop('disabled', false);
|
$('#export_media_info_level').prop('disabled', false);
|
||||||
$("#export_thumb_level").prop('disabled', false);
|
$("#export_thumb_level").prop('disabled', false);
|
||||||
$("#export_art_level").prop('disabled', false);
|
$("#export_art_level").prop('disabled', false);
|
||||||
|
$("#export_logo_level").prop('disabled', false);
|
||||||
export_custom_metadata_fields.enable();
|
export_custom_metadata_fields.enable();
|
||||||
export_custom_media_info_fields.enable();
|
export_custom_media_info_fields.enable();
|
||||||
}
|
}
|
||||||
|
@ -252,6 +271,7 @@ DOCUMENTATION :: END
|
||||||
var file_format = $('#export_file_format option:selected').val();
|
var file_format = $('#export_file_format option:selected').val();
|
||||||
var thumb_level = $("#export_thumb_level option:selected").val();
|
var thumb_level = $("#export_thumb_level option:selected").val();
|
||||||
var art_level = $("#export_art_level option:selected").val();
|
var art_level = $("#export_art_level option:selected").val();
|
||||||
|
var logo_level = $("#export_logo_level option:selected").val();
|
||||||
var custom_fields = [
|
var custom_fields = [
|
||||||
$('#export_custom_metadata_fields').val(),
|
$('#export_custom_metadata_fields').val(),
|
||||||
$('#export_custom_media_info_fields').val()
|
$('#export_custom_media_info_fields').val()
|
||||||
|
@ -270,6 +290,7 @@ DOCUMENTATION :: END
|
||||||
file_format: file_format,
|
file_format: file_format,
|
||||||
thumb_level: thumb_level,
|
thumb_level: thumb_level,
|
||||||
art_level: art_level,
|
art_level: art_level,
|
||||||
|
logo_level: logo_level,
|
||||||
custom_fields: custom_fields,
|
custom_fields: custom_fields,
|
||||||
export_type: export_type,
|
export_type: export_type,
|
||||||
individual_files: individual_files
|
individual_files: individual_files
|
||||||
|
|
|
@ -301,6 +301,10 @@
|
||||||
return obj;
|
return obj;
|
||||||
}, {});
|
}, {});
|
||||||
|
|
||||||
|
if (!("Total" in chart_visibility)) {
|
||||||
|
chart_visibility["Total"] = false;
|
||||||
|
}
|
||||||
|
|
||||||
return data_series.map(function(s) {
|
return data_series.map(function(s) {
|
||||||
var obj = Object.assign({}, s);
|
var obj = Object.assign({}, s);
|
||||||
obj.visible = (chart_visibility[s.name] !== false);
|
obj.visible = (chart_visibility[s.name] !== false);
|
||||||
|
@ -327,7 +331,8 @@
|
||||||
'Direct Play': '#E5A00D',
|
'Direct Play': '#E5A00D',
|
||||||
'Direct Stream': '#FFFFFF',
|
'Direct Stream': '#FFFFFF',
|
||||||
'Transcode': '#F06464',
|
'Transcode': '#F06464',
|
||||||
'Max. Concurrent Streams': '#96C83C'
|
'Max. Concurrent Streams': '#96C83C',
|
||||||
|
'Total': '#96C83C'
|
||||||
};
|
};
|
||||||
var series_colors = [];
|
var series_colors = [];
|
||||||
$.each(data_series, function(index, series) {
|
$.each(data_series, function(index, series) {
|
||||||
|
|
|
@ -92,10 +92,10 @@
|
||||||
<h3 class="pull-left"><span id="recently-added-xml">Recently Added</span></h3>
|
<h3 class="pull-left"><span id="recently-added-xml">Recently Added</span></h3>
|
||||||
<ul class="nav nav-header nav-dashboard pull-right" style="margin-top: -3px;">
|
<ul class="nav nav-header nav-dashboard pull-right" style="margin-top: -3px;">
|
||||||
<li>
|
<li>
|
||||||
<a href="#" id="recently-added-page-left" class="paginate btn-gray disabled" data-id="+1"><i class="fa fa-lg fa-chevron-left"></i></a>
|
<a href="#" id="recently-added-page-left" class="paginate-added btn-gray disabled" data-id="-1"><i class="fa fa-lg fa-chevron-left"></i></a>
|
||||||
</li>
|
</li>
|
||||||
<li>
|
<li>
|
||||||
<a href="#" id="recently-added-page-right" class="paginate btn-gray disabled" data-id="-1"><i class="fa fa-lg fa-chevron-right"></i></a>
|
<a href="#" id="recently-added-page-right" class="paginate-added btn-gray disabled" data-id="+1"><i class="fa fa-lg fa-chevron-right"></i></a>
|
||||||
</li>
|
</li>
|
||||||
</ul>
|
</ul>
|
||||||
<div class="button-bar">
|
<div class="button-bar">
|
||||||
|
@ -212,28 +212,6 @@
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
<% from plexpy.helpers import anon_url %>
|
|
||||||
<div id="python2-modal" class="modal fade wide" tabindex="-1" role="dialog" aria-labelledby="python2-modal">
|
|
||||||
<div class="modal-dialog" role="document">
|
|
||||||
<div class="modal-content">
|
|
||||||
<div class="modal-header">
|
|
||||||
<button type="button" class="close" data-dismiss="modal" aria-hidden="true"><i class="fa fa-remove"></i></button>
|
|
||||||
<h4 class="modal-title">Unable to Update</h4>
|
|
||||||
</div>
|
|
||||||
<div class="modal-body" style="text-align: center;">
|
|
||||||
<p>Tautulli is still running using Python 2 and cannot be updated past v2.6.3.</p>
|
|
||||||
<p>Python 3 is required to continue receiving updates.</p>
|
|
||||||
<p>
|
|
||||||
<strong>Please see the <a href="${anon_url('https://github.com/Tautulli/Tautulli/wiki/Upgrading-to-Python-3-%28Tautulli-v2.5%29')}" target="_blank" rel="noreferrer">wiki</a>
|
|
||||||
for instructions on how to upgrade to Python 3.</strong>
|
|
||||||
</p>
|
|
||||||
</div>
|
|
||||||
<div class="modal-footer">
|
|
||||||
<input type="button" class="btn btn-bright" data-dismiss="modal" value="Close">
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
% endif
|
% endif
|
||||||
|
|
||||||
<div class="modal fade" id="ip-info-modal" tabindex="-1" role="dialog" aria-labelledby="ip-info-modal">
|
<div class="modal fade" id="ip-info-modal" tabindex="-1" role="dialog" aria-labelledby="ip-info-modal">
|
||||||
|
@ -320,6 +298,8 @@
|
||||||
|
|
||||||
$('#currentActivityHeader-bandwidth-tooltip').tooltip({ container: 'body', placement: 'right', delay: 50 });
|
$('#currentActivityHeader-bandwidth-tooltip').tooltip({ container: 'body', placement: 'right', delay: 50 });
|
||||||
|
|
||||||
|
var title = document.title;
|
||||||
|
|
||||||
function getCurrentActivity() {
|
function getCurrentActivity() {
|
||||||
activity_ready = false;
|
activity_ready = false;
|
||||||
|
|
||||||
|
@ -390,6 +370,8 @@
|
||||||
|
|
||||||
$('#currentActivityHeader').show();
|
$('#currentActivityHeader').show();
|
||||||
|
|
||||||
|
document.title = stream_count + ' stream' + (stream_count > 1 ? 's' : '') + ' | ' + title;
|
||||||
|
|
||||||
sessions.forEach(function (session) {
|
sessions.forEach(function (session) {
|
||||||
var s = (typeof Proxy === "function") ? new Proxy(session, defaultHandler) : session;
|
var s = (typeof Proxy === "function") ? new Proxy(session, defaultHandler) : session;
|
||||||
var key = s.session_key;
|
var key = s.session_key;
|
||||||
|
@ -622,6 +604,8 @@
|
||||||
} else {
|
} else {
|
||||||
$('#currentActivityHeader').hide();
|
$('#currentActivityHeader').hide();
|
||||||
$('#currentActivity').html('<div id="dashboard-no-activity" class="text-muted">Nothing is currently being played.</div>');
|
$('#currentActivity').html('<div id="dashboard-no-activity" class="text-muted">Nothing is currently being played.</div>');
|
||||||
|
|
||||||
|
document.title = title;
|
||||||
}
|
}
|
||||||
|
|
||||||
activity_ready = true;
|
activity_ready = true;
|
||||||
|
@ -958,10 +942,14 @@
|
||||||
count: recently_added_count,
|
count: recently_added_count,
|
||||||
media_type: recently_added_type
|
media_type: recently_added_type
|
||||||
},
|
},
|
||||||
|
beforeSend: function () {
|
||||||
|
$(".dashboard-recent-media-row").animate({ scrollLeft: 0 }, 1000);
|
||||||
|
},
|
||||||
complete: function (xhr, status) {
|
complete: function (xhr, status) {
|
||||||
$("#recentlyAdded").html(xhr.responseText);
|
$("#recentlyAdded").html(xhr.responseText);
|
||||||
$('#ajaxMsg').fadeOut();
|
$('#ajaxMsg').fadeOut();
|
||||||
highlightAddedScrollerButton();
|
highlightScrollerButton("#recently-added");
|
||||||
|
paginateScroller("#recently-added", ".paginate-added");
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
@ -977,57 +965,11 @@
|
||||||
recentlyAdded(recently_added_count, recently_added_type);
|
recentlyAdded(recently_added_count, recently_added_type);
|
||||||
}
|
}
|
||||||
|
|
||||||
function highlightAddedScrollerButton() {
|
|
||||||
var scroller = $("#recently-added-row-scroller");
|
|
||||||
var numElems = scroller.find("li:visible").length;
|
|
||||||
scroller.width(numElems * 175);
|
|
||||||
if (scroller.width() > $("body").find(".container-fluid").width()) {
|
|
||||||
$("#recently-added-page-right").removeClass("disabled");
|
|
||||||
} else {
|
|
||||||
$("#recently-added-page-right").addClass("disabled");
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
$(window).resize(function () {
|
|
||||||
highlightAddedScrollerButton();
|
|
||||||
});
|
|
||||||
|
|
||||||
function resetScroller() {
|
|
||||||
leftTotal = 0;
|
|
||||||
$("#recently-added-row-scroller").animate({ left: leftTotal }, 1000);
|
|
||||||
$("#recently-added-page-left").addClass("disabled").blur();
|
|
||||||
}
|
|
||||||
|
|
||||||
var leftTotal = 0;
|
|
||||||
$(".paginate").click(function (e) {
|
|
||||||
e.preventDefault();
|
|
||||||
var scroller = $("#recently-added-row-scroller");
|
|
||||||
var containerWidth = $("body").find(".container-fluid").width();
|
|
||||||
var scrollAmount = $(this).data("id") * parseInt((containerWidth - 15) / 175) * 175;
|
|
||||||
var leftMax = Math.min(-parseInt(scroller.width()) + Math.abs(scrollAmount), 0);
|
|
||||||
|
|
||||||
leftTotal = Math.max(Math.min(leftTotal + scrollAmount, 0), leftMax);
|
|
||||||
scroller.animate({ left: leftTotal }, 250);
|
|
||||||
|
|
||||||
if (leftTotal === 0) {
|
|
||||||
$("#recently-added-page-left").addClass("disabled").blur();
|
|
||||||
} else {
|
|
||||||
$("#recently-added-page-left").removeClass("disabled");
|
|
||||||
}
|
|
||||||
|
|
||||||
if (leftTotal === leftMax) {
|
|
||||||
$("#recently-added-page-right").addClass("disabled").blur();
|
|
||||||
} else {
|
|
||||||
$("#recently-added-page-right").removeClass("disabled");
|
|
||||||
}
|
|
||||||
});
|
|
||||||
|
|
||||||
$('#recently-added-toggles').on('change', function () {
|
$('#recently-added-toggles').on('change', function () {
|
||||||
$('#recently-added-toggles > label').removeClass('active');
|
$('#recently-added-toggles > label').removeClass('active');
|
||||||
selected_filter = $('input[name=recently-added-toggle]:checked', '#recently-added-toggles');
|
selected_filter = $('input[name=recently-added-toggle]:checked', '#recently-added-toggles');
|
||||||
$(selected_filter).closest('label').addClass('active');
|
$(selected_filter).closest('label').addClass('active');
|
||||||
recently_added_type = $(selected_filter).val();
|
recently_added_type = $(selected_filter).val();
|
||||||
resetScroller();
|
|
||||||
setLocalStorage('home_stats_recently_added_type', recently_added_type);
|
setLocalStorage('home_stats_recently_added_type', recently_added_type);
|
||||||
recentlyAdded(recently_added_count, recently_added_type);
|
recentlyAdded(recently_added_count, recently_added_type);
|
||||||
});
|
});
|
||||||
|
@ -1035,7 +977,6 @@
|
||||||
$('#recently-added-count').change(function () {
|
$('#recently-added-count').change(function () {
|
||||||
forceMinMax($(this));
|
forceMinMax($(this));
|
||||||
recently_added_count = $(this).val();
|
recently_added_count = $(this).val();
|
||||||
resetScroller();
|
|
||||||
setLocalStorage('home_stats_recently_added_count', recently_added_count);
|
setLocalStorage('home_stats_recently_added_count', recently_added_count);
|
||||||
recentlyAdded(recently_added_count, recently_added_type);
|
recentlyAdded(recently_added_count, recently_added_type);
|
||||||
});
|
});
|
||||||
|
@ -1067,16 +1008,4 @@
|
||||||
});
|
});
|
||||||
</script>
|
</script>
|
||||||
% endif
|
% endif
|
||||||
% if _session['user_group'] == 'admin':
|
|
||||||
<script>
|
|
||||||
const queryString = window.location.search;
|
|
||||||
const urlParams = new URLSearchParams(queryString);
|
|
||||||
if (urlParams.get('update') === 'python2') {
|
|
||||||
$("#python2-modal").modal({
|
|
||||||
backdrop: 'static',
|
|
||||||
keyboard: false
|
|
||||||
});
|
|
||||||
}
|
|
||||||
</script>
|
|
||||||
% endif
|
|
||||||
</%def>
|
</%def>
|
||||||
|
|
|
@ -360,7 +360,8 @@ function humanDuration(ms, sig='dhm', units='ms', return_seconds=300000) {
|
||||||
sig = 'dhms'
|
sig = 'dhms'
|
||||||
}
|
}
|
||||||
|
|
||||||
ms = ms * factors[units];
|
r = factors[sig.slice(-1)];
|
||||||
|
ms = Math.round(ms * factors[units] / r) * r;
|
||||||
|
|
||||||
h = ms % factors['d'];
|
h = ms % factors['d'];
|
||||||
d = Math.trunc(ms / factors['d']);
|
d = Math.trunc(ms / factors['d']);
|
||||||
|
@ -929,3 +930,50 @@ $('.modal').on('hide.bs.modal', function (e) {
|
||||||
$.fn.hasScrollBar = function() {
|
$.fn.hasScrollBar = function() {
|
||||||
return this.get(0).scrollHeight > this.get(0).clientHeight;
|
return this.get(0).scrollHeight > this.get(0).clientHeight;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
function paginateScroller(scrollerId, buttonClass) {
|
||||||
|
$(buttonClass).click(function (e) {
|
||||||
|
e.preventDefault();
|
||||||
|
var scroller = $(scrollerId + "-row-scroller");
|
||||||
|
var scrollerParent = scroller.parent();
|
||||||
|
var containerWidth = scrollerParent.width();
|
||||||
|
var scrollCurrent = scrollerParent.scrollLeft();
|
||||||
|
var scrollAmount = $(this).data("id") * parseInt(containerWidth / 175) * 175;
|
||||||
|
var scrollMax = scroller.width() - Math.abs(scrollAmount);
|
||||||
|
var scrollTotal = Math.min(parseInt(scrollCurrent / 175) * 175 + scrollAmount, scrollMax);
|
||||||
|
scrollerParent.animate({ scrollLeft: scrollTotal }, 250);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
function highlightScrollerButton(scrollerId) {
|
||||||
|
var scroller = $(scrollerId + "-row-scroller");
|
||||||
|
var scrollerParent = scroller.parent();
|
||||||
|
var buttonLeft = $(scrollerId + "-page-left");
|
||||||
|
var buttonRight = $(scrollerId + "-page-right");
|
||||||
|
|
||||||
|
var numElems = scroller.find("li").length;
|
||||||
|
scroller.width(numElems * 175);
|
||||||
|
$(buttonLeft).addClass("disabled").blur();
|
||||||
|
if (scroller.width() > scrollerParent.width()) {
|
||||||
|
$(buttonRight).removeClass("disabled");
|
||||||
|
} else {
|
||||||
|
$(buttonRight).addClass("disabled");
|
||||||
|
}
|
||||||
|
|
||||||
|
scrollerParent.scroll(function () {
|
||||||
|
var scrollCurrent = $(this).scrollLeft();
|
||||||
|
var scrollMax = scroller.width() - $(this).width();
|
||||||
|
|
||||||
|
if (scrollCurrent == 0) {
|
||||||
|
$(buttonLeft).addClass("disabled").blur();
|
||||||
|
} else {
|
||||||
|
$(buttonLeft).removeClass("disabled");
|
||||||
|
}
|
||||||
|
|
||||||
|
if (scrollCurrent >= scrollMax) {
|
||||||
|
$(buttonRight).addClass("disabled").blur();
|
||||||
|
} else {
|
||||||
|
$(buttonRight).removeClass("disabled");
|
||||||
|
}
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
|
@ -100,7 +100,7 @@ export_table_options = {
|
||||||
"createdCell": function (td, cellData, rowData, row, col) {
|
"createdCell": function (td, cellData, rowData, row, col) {
|
||||||
if (cellData !== '') {
|
if (cellData !== '') {
|
||||||
var images = '';
|
var images = '';
|
||||||
if (rowData['thumb_level'] || rowData['art_level']) {
|
if (rowData['thumb_level'] || rowData['art_level'] || rowData['logo_level']) {
|
||||||
images = ' + images';
|
images = ' + images';
|
||||||
}
|
}
|
||||||
$(td).html(cellData + images);
|
$(td).html(cellData + images);
|
||||||
|
@ -161,14 +161,14 @@ export_table_options = {
|
||||||
if (cellData === 1 && rowData['exists']) {
|
if (cellData === 1 && rowData['exists']) {
|
||||||
var tooltip_title = '';
|
var tooltip_title = '';
|
||||||
var icon = '';
|
var icon = '';
|
||||||
if (rowData['thumb_level'] || rowData['art_level'] || rowData['individual_files']) {
|
if (rowData['thumb_level'] || rowData['art_level'] || rowData['logo_level'] || rowData['individual_files']) {
|
||||||
tooltip_title = 'Zip Archive';
|
tooltip_title = 'Zip Archive';
|
||||||
icon = 'fa-file-archive';
|
icon = 'fa-file-archive';
|
||||||
} else {
|
} else {
|
||||||
tooltip_title = rowData['file_format'].toUpperCase() + ' File';
|
tooltip_title = rowData['file_format'].toUpperCase() + ' File';
|
||||||
icon = 'fa-file-download';
|
icon = 'fa-file-download';
|
||||||
}
|
}
|
||||||
var icon = (rowData['thumb_level'] || rowData['art_level'] || rowData['individual_files']) ? 'fa-file-archive' : 'fa-file-download';
|
var icon = (rowData['thumb_level'] || rowData['art_level'] || rowData['logo_level'] || rowData['individual_files']) ? 'fa-file-archive' : 'fa-file-download';
|
||||||
$(td).html('<button class="btn btn-xs btn-success pull-left" data-id="' + rowData['export_id'] + '"><span data-toggle="tooltip" data-placement="left" title="' + tooltip_title + '"><i class="fa ' + icon + ' fa-fw"></i> Download</span></button>');
|
$(td).html('<button class="btn btn-xs btn-success pull-left" data-id="' + rowData['export_id'] + '"><span data-toggle="tooltip" data-placement="left" title="' + tooltip_title + '"><i class="fa ' + icon + ' fa-fw"></i> Download</span></button>');
|
||||||
} else if (cellData === 0) {
|
} else if (cellData === 0) {
|
||||||
var percent = Math.min(getPercent(rowData['exported_items'], rowData['total_items']), 99)
|
var percent = Math.min(getPercent(rowData['exported_items'], rowData['total_items']), 99)
|
||||||
|
|
|
@ -149,10 +149,10 @@ DOCUMENTATION :: END
|
||||||
<div class="table-card-header">
|
<div class="table-card-header">
|
||||||
<ul class="nav nav-header nav-dashboard pull-right">
|
<ul class="nav nav-header nav-dashboard pull-right">
|
||||||
<li>
|
<li>
|
||||||
<a href="#" id="recently-watched-page-left" class="paginate-watched btn-gray disabled" data-id="+1"><i class="fa fa-lg fa-chevron-left"></i></a>
|
<a href="#" id="recently-watched-page-left" class="paginate-watched btn-gray disabled" data-id="-1"><i class="fa fa-lg fa-chevron-left"></i></a>
|
||||||
</li>
|
</li>
|
||||||
<li>
|
<li>
|
||||||
<a href="#" id="recently-watched-page-right" class="paginate-watched btn-gray disabled" data-id="-1"><i class="fa fa-lg fa-chevron-right"></i></a>
|
<a href="#" id="recently-watched-page-right" class="paginate-watched btn-gray disabled" data-id="+1"><i class="fa fa-lg fa-chevron-right"></i></a>
|
||||||
</li>
|
</li>
|
||||||
</ul>
|
</ul>
|
||||||
<div class="header-bar">
|
<div class="header-bar">
|
||||||
|
@ -175,10 +175,10 @@ DOCUMENTATION :: END
|
||||||
<div class="table-card-header">
|
<div class="table-card-header">
|
||||||
<ul class="nav nav-header nav-dashboard pull-right">
|
<ul class="nav nav-header nav-dashboard pull-right">
|
||||||
<li>
|
<li>
|
||||||
<a href="#" id="recently-added-page-left" class="paginate-added btn-gray disabled" data-id="+1"><i class="fa fa-lg fa-chevron-left"></i></a>
|
<a href="#" id="recently-added-page-left" class="paginate-added btn-gray disabled" data-id="-1"><i class="fa fa-lg fa-chevron-left"></i></a>
|
||||||
</li>
|
</li>
|
||||||
<li>
|
<li>
|
||||||
<a href="#" id="recently-added-page-right" class="paginate-added btn-gray disabled" data-id="-1"><i class="fa fa-lg fa-chevron-right"></i></a>
|
<a href="#" id="recently-added-page-right" class="paginate-added btn-gray disabled" data-id="+1"><i class="fa fa-lg fa-chevron-right"></i></a>
|
||||||
</li>
|
</li>
|
||||||
</ul>
|
</ul>
|
||||||
<div class="header-bar">
|
<div class="header-bar">
|
||||||
|
@ -690,7 +690,8 @@ DOCUMENTATION :: END
|
||||||
},
|
},
|
||||||
complete: function(xhr, status) {
|
complete: function(xhr, status) {
|
||||||
$("#library-recently-watched").html(xhr.responseText);
|
$("#library-recently-watched").html(xhr.responseText);
|
||||||
highlightWatchedScrollerButton();
|
highlightScrollerButton("#recently-watched");
|
||||||
|
paginateScroller("#recently-watched", ".paginate-watched");
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
@ -706,7 +707,8 @@ DOCUMENTATION :: END
|
||||||
},
|
},
|
||||||
complete: function(xhr, status) {
|
complete: function(xhr, status) {
|
||||||
$("#library-recently-added").html(xhr.responseText);
|
$("#library-recently-added").html(xhr.responseText);
|
||||||
highlightAddedScrollerButton();
|
highlightScrollerButton("#recently-added");
|
||||||
|
paginateScroller("#recently-added", ".paginate-added");
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
@ -716,83 +718,8 @@ DOCUMENTATION :: END
|
||||||
recentlyAdded();
|
recentlyAdded();
|
||||||
% endif
|
% endif
|
||||||
|
|
||||||
function highlightWatchedScrollerButton() {
|
|
||||||
var scroller = $("#recently-watched-row-scroller");
|
|
||||||
var numElems = scroller.find("li").length;
|
|
||||||
scroller.width(numElems * 175);
|
|
||||||
if (scroller.width() > $("#library-recently-watched").width()) {
|
|
||||||
$("#recently-watched-page-right").removeClass("disabled");
|
|
||||||
} else {
|
|
||||||
$("#recently-watched-page-right").addClass("disabled");
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
function highlightAddedScrollerButton() {
|
|
||||||
var scroller = $("#recently-added-row-scroller");
|
|
||||||
var numElems = scroller.find("li").length;
|
|
||||||
scroller.width(numElems * 175);
|
|
||||||
if (scroller.width() > $("#library-recently-added").width()) {
|
|
||||||
$("#recently-added-page-right").removeClass("disabled");
|
|
||||||
} else {
|
|
||||||
$("#recently-added-page-right").addClass("disabled");
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
$(window).resize(function() {
|
|
||||||
highlightWatchedScrollerButton();
|
|
||||||
highlightAddedScrollerButton();
|
|
||||||
});
|
|
||||||
|
|
||||||
$('div.art-face').animate({ opacity: 0.2 }, { duration: 1000 });
|
$('div.art-face').animate({ opacity: 0.2 }, { duration: 1000 });
|
||||||
|
|
||||||
var leftTotalWatched = 0;
|
|
||||||
$(".paginate-watched").click(function (e) {
|
|
||||||
e.preventDefault();
|
|
||||||
var scroller = $("#recently-watched-row-scroller");
|
|
||||||
var containerWidth = $("#library-recently-watched").width();
|
|
||||||
var scrollAmount = $(this).data("id") * parseInt(containerWidth / 175) * 175;
|
|
||||||
var leftMax = Math.min(-parseInt(scroller.width()) + Math.abs(scrollAmount), 0);
|
|
||||||
|
|
||||||
leftTotalWatched = Math.max(Math.min(leftTotalWatched + scrollAmount, 0), leftMax);
|
|
||||||
scroller.animate({ left: leftTotalWatched }, 250);
|
|
||||||
|
|
||||||
if (leftTotalWatched == 0) {
|
|
||||||
$("#recently-watched-page-left").addClass("disabled").blur();
|
|
||||||
} else {
|
|
||||||
$("#recently-watched-page-left").removeClass("disabled");
|
|
||||||
}
|
|
||||||
|
|
||||||
if (leftTotalWatched == leftMax) {
|
|
||||||
$("#recently-watched-page-right").addClass("disabled").blur();
|
|
||||||
} else {
|
|
||||||
$("#recently-watched-page-right").removeClass("disabled");
|
|
||||||
}
|
|
||||||
});
|
|
||||||
|
|
||||||
var leftTotalAdded = 0;
|
|
||||||
$(".paginate-added").click(function (e) {
|
|
||||||
e.preventDefault();
|
|
||||||
var scroller = $("#recently-added-row-scroller");
|
|
||||||
var containerWidth = $("#library-recently-added").width();
|
|
||||||
var scrollAmount = $(this).data("id") * parseInt(containerWidth / 175) * 175;
|
|
||||||
var leftMax = Math.min(-parseInt(scroller.width()) + Math.abs(scrollAmount), 0);
|
|
||||||
|
|
||||||
leftTotalAdded = Math.max(Math.min(leftTotalAdded + scrollAmount, 0), leftMax);
|
|
||||||
scroller.animate({ left: leftTotalAdded }, 250);
|
|
||||||
|
|
||||||
if (leftTotalAdded == 0) {
|
|
||||||
$("#recently-added-page-left").addClass("disabled").blur();
|
|
||||||
} else {
|
|
||||||
$("#recently-added-page-left").removeClass("disabled");
|
|
||||||
}
|
|
||||||
|
|
||||||
if (leftTotalAdded == leftMax) {
|
|
||||||
$("#recently-added-page-right").addClass("disabled").blur();
|
|
||||||
} else {
|
|
||||||
$("#recently-added-page-right").removeClass("disabled");
|
|
||||||
}
|
|
||||||
});
|
|
||||||
|
|
||||||
$(document).ready(function () {
|
$(document).ready(function () {
|
||||||
|
|
||||||
// Javascript to enable link to tab
|
// Javascript to enable link to tab
|
||||||
|
|
|
@ -36,7 +36,7 @@ DOCUMENTATION :: END
|
||||||
|
|
||||||
%>
|
%>
|
||||||
<div class="dashboard-recent-media-row">
|
<div class="dashboard-recent-media-row">
|
||||||
<div id="recently-added-row-scroller" style="left: 0;">
|
<div id="recently-added-row-scroller">
|
||||||
<ul class="dashboard-recent-media list-unstyled">
|
<ul class="dashboard-recent-media list-unstyled">
|
||||||
% for item in data:
|
% for item in data:
|
||||||
<li>
|
<li>
|
||||||
|
|
|
@ -3,7 +3,7 @@
|
||||||
<div class="modal-content">
|
<div class="modal-content">
|
||||||
<div class="modal-header">
|
<div class="modal-header">
|
||||||
<button type="button" class="close" data-dismiss="modal" aria-hidden="true"><i class="fa fa-remove"></i></button>
|
<button type="button" class="close" data-dismiss="modal" aria-hidden="true"><i class="fa fa-remove"></i></button>
|
||||||
<h4 class="modal-title" id="mobile-device-config-modal-header">${device['device_name']} Settings <small><span class="device_id">(Device ID: ${device['id']})</span></small></h4>
|
<h4 class="modal-title" id="mobile-device-config-modal-header">${device['device_name']} Settings <small><span class="device_id">(Device ID: ${device['id']}${' - ' + device['friendly_name'] if device['friendly_name'] else ''})</span></small></h4>
|
||||||
</div>
|
</div>
|
||||||
<div class="modal-body">
|
<div class="modal-body">
|
||||||
<div class="container-fluid">
|
<div class="container-fluid">
|
||||||
|
|
|
@ -13,7 +13,7 @@
|
||||||
<div class="modal-content">
|
<div class="modal-content">
|
||||||
<div class="modal-header">
|
<div class="modal-header">
|
||||||
<button type="button" class="close" data-dismiss="modal" aria-hidden="true"><i class="fa fa-remove"></i></button>
|
<button type="button" class="close" data-dismiss="modal" aria-hidden="true"><i class="fa fa-remove"></i></button>
|
||||||
<h4 class="modal-title" id="newsletter-config-modal-header">${newsletter['agent_label']} Newsletter Settings <small><span class="newsletter_id">(Newsletter ID: ${newsletter['id']})</span></small></h4>
|
<h4 class="modal-title" id="newsletter-config-modal-header">${newsletter['agent_label']} Newsletter Settings <small><span class="newsletter_id">(Newsletter ID: ${newsletter['id']}${' - ' + newsletter['friendly_name'] if newsletter['friendly_name'] else ''})</span></small></h4>
|
||||||
</div>
|
</div>
|
||||||
<div class="modal-body">
|
<div class="modal-body">
|
||||||
<div class="container-fluid">
|
<div class="container-fluid">
|
||||||
|
@ -50,7 +50,10 @@
|
||||||
</div>
|
</div>
|
||||||
<p class="help-block">
|
<p class="help-block">
|
||||||
<span id="simple_cron_message">Set the schedule for the newsletter.</span>
|
<span id="simple_cron_message">Set the schedule for the newsletter.</span>
|
||||||
<span id="custom_cron_message">Set the schedule for the newsletter using a <a href="${anon_url('https://crontab.guru')}" target="_blank" rel="noreferrer">custom crontab</a>. Only standard cron values are valid.</span>
|
<span id="custom_cron_message">
|
||||||
|
Set the schedule for the newsletter using a <a href="${anon_url('https://crontab.guru')}" target="_blank" rel="noreferrer">custom crontab</a>.
|
||||||
|
<a href="${anon_url('https://apscheduler.readthedocs.io/en/3.x/modules/triggers/cron.html#expression-types')}" target="_blank" rel="noreferrer">Click here</a> for a list of supported expressions.
|
||||||
|
</span>
|
||||||
</p>
|
</p>
|
||||||
</div>
|
</div>
|
||||||
<div class="form-group">
|
<div class="form-group">
|
||||||
|
@ -481,7 +484,7 @@
|
||||||
});
|
});
|
||||||
|
|
||||||
if (${newsletter['config']['custom_cron']}) {
|
if (${newsletter['config']['custom_cron']}) {
|
||||||
$('#cron_value').val('${newsletter['cron']}');
|
$('#cron_value').val('${newsletter['cron'] | n}');
|
||||||
} else {
|
} else {
|
||||||
try {
|
try {
|
||||||
cron_widget.cron('value', '${newsletter['cron']}');
|
cron_widget.cron('value', '${newsletter['cron']}');
|
||||||
|
|
|
@ -1,5 +1,5 @@
|
||||||
<%
|
<%
|
||||||
from six.moves.urllib.parse import urlencode
|
from urllib.parse import urlencode
|
||||||
%>
|
%>
|
||||||
<!doctype html>
|
<!doctype html>
|
||||||
|
|
||||||
|
|
|
@ -12,7 +12,7 @@
|
||||||
<div class="modal-content">
|
<div class="modal-content">
|
||||||
<div class="modal-header">
|
<div class="modal-header">
|
||||||
<button type="button" class="close" data-dismiss="modal" aria-hidden="true"><i class="fa fa-remove"></i></button>
|
<button type="button" class="close" data-dismiss="modal" aria-hidden="true"><i class="fa fa-remove"></i></button>
|
||||||
<h4 class="modal-title" id="notifier-config-modal-header">${notifier['agent_label']} Settings <small><span class="notifier_id">(Notifier ID: ${notifier['id']})</span></small></h4>
|
<h4 class="modal-title" id="notifier-config-modal-header">${notifier['agent_label']} Settings <small><span class="notifier_id">(Notifier ID: ${notifier['id']}${' - ' + notifier['friendly_name'] if notifier['friendly_name'] else ''})</span></small></h4>
|
||||||
</div>
|
</div>
|
||||||
<div class="modal-body">
|
<div class="modal-body">
|
||||||
<div class="container-fluid">
|
<div class="container-fluid">
|
||||||
|
|
|
@ -36,7 +36,7 @@ DOCUMENTATION :: END
|
||||||
%>
|
%>
|
||||||
% if data:
|
% if data:
|
||||||
<div class="dashboard-recent-media-row">
|
<div class="dashboard-recent-media-row">
|
||||||
<div id="recently-added-row-scroller" style="left: 0;">
|
<div id="recently-added-row-scroller">
|
||||||
<ul class="dashboard-recent-media list-unstyled">
|
<ul class="dashboard-recent-media list-unstyled">
|
||||||
% for item in data:
|
% for item in data:
|
||||||
<div class="dashboard-recent-media-instance">
|
<div class="dashboard-recent-media-instance">
|
||||||
|
|
|
@ -13,8 +13,6 @@ DOCUMENTATION :: END
|
||||||
import datetime
|
import datetime
|
||||||
import plexpy
|
import plexpy
|
||||||
from plexpy import common, helpers
|
from plexpy import common, helpers
|
||||||
|
|
||||||
scheduled_jobs = [j.id for j in plexpy.SCHED.get_jobs()]
|
|
||||||
%>
|
%>
|
||||||
|
|
||||||
<table class="config-scheduler-table small-muted">
|
<table class="config-scheduler-table small-muted">
|
||||||
|
@ -29,16 +27,15 @@ DOCUMENTATION :: END
|
||||||
</thead>
|
</thead>
|
||||||
<tbody>
|
<tbody>
|
||||||
% for job, job_type in common.SCHEDULER_LIST.items():
|
% for job, job_type in common.SCHEDULER_LIST.items():
|
||||||
% if job in scheduled_jobs:
|
|
||||||
<%
|
<%
|
||||||
sched_job = plexpy.SCHED.get_job(job)
|
sched_job = plexpy.SCHED.get_job(job)
|
||||||
now = datetime.datetime.now(sched_job.next_run_time.tzinfo)
|
|
||||||
%>
|
%>
|
||||||
|
% if sched_job:
|
||||||
<tr>
|
<tr>
|
||||||
<td>${sched_job.id}</td>
|
<td>${sched_job.id}</td>
|
||||||
<td><i class="fa fa-sm fa-fw fa-check"></i> Active</td>
|
<td><i class="fa fa-sm fa-fw fa-check"></i> Active</td>
|
||||||
<td>${helpers.format_timedelta_Hms(sched_job.trigger.interval)}</td>
|
<td>${helpers.format_timedelta_Hms(sched_job.trigger.interval)}</td>
|
||||||
<td>${helpers.format_timedelta_Hms(sched_job.next_run_time - now)}</td>
|
<td>${helpers.format_timedelta_Hms(sched_job.next_run_time - datetime.datetime.now(sched_job.next_run_time.tzinfo))}</td>
|
||||||
<td>${sched_job.next_run_time.astimezone(plexpy.SYS_TIMEZONE).strftime('%Y-%m-%d %H:%M:%S')}</td>
|
<td>${sched_job.next_run_time.astimezone(plexpy.SYS_TIMEZONE).strftime('%Y-%m-%d %H:%M:%S')}</td>
|
||||||
</tr>
|
</tr>
|
||||||
% elif job_type == 'websocket' and plexpy.WS_CONNECTED:
|
% elif job_type == 'websocket' and plexpy.WS_CONNECTED:
|
||||||
|
|
|
@ -767,7 +767,6 @@
|
||||||
data-identifier="${config['pms_identifier']}"
|
data-identifier="${config['pms_identifier']}"
|
||||||
data-ip="${config['pms_ip']}"
|
data-ip="${config['pms_ip']}"
|
||||||
data-port="${config['pms_port']}"
|
data-port="${config['pms_port']}"
|
||||||
data-local="${int(not int(config['pms_is_remote']))}"
|
|
||||||
data-ssl="${config['pms_ssl']}"
|
data-ssl="${config['pms_ssl']}"
|
||||||
data-is_cloud="${config['pms_is_cloud']}"
|
data-is_cloud="${config['pms_is_cloud']}"
|
||||||
data-label="${config['pms_name'] or 'Local'}"
|
data-label="${config['pms_name'] or 'Local'}"
|
||||||
|
@ -800,13 +799,6 @@
|
||||||
</label>
|
</label>
|
||||||
<p class="help-block">Connect to your Plex server using HTTPS if you have <a href="${anon_url('https://support.plex.tv/articles/206225077-how-to-use-secure-server-connections')}" target="_blank" rel="noreferrer">secure connections</a> enabled.</p>
|
<p class="help-block">Connect to your Plex server using HTTPS if you have <a href="${anon_url('https://support.plex.tv/articles/206225077-how-to-use-secure-server-connections')}" target="_blank" rel="noreferrer">secure connections</a> enabled.</p>
|
||||||
</div>
|
</div>
|
||||||
<div class="checkbox">
|
|
||||||
<label>
|
|
||||||
<input type="checkbox" id="pms_is_remote_checkbox" class="checkbox-toggle pms-settings" data-id="pms_is_remote" value="1" ${checked(config['pms_is_remote'])}> Remote Server
|
|
||||||
<input type="hidden" id="pms_is_remote" name="pms_is_remote" value="${config['pms_is_remote']}">
|
|
||||||
</label>
|
|
||||||
<p class="help-block">Check this if your Plex Server is not on the same local network as Tautulli.</p>
|
|
||||||
</div>
|
|
||||||
<div class="form-group">
|
<div class="form-group">
|
||||||
<label for="pms_url">Plex Server URL</label>
|
<label for="pms_url">Plex Server URL</label>
|
||||||
<div class="row">
|
<div class="row">
|
||||||
|
@ -2597,7 +2589,6 @@ $(document).ready(function() {
|
||||||
return '<div data-identifier="' + item.clientIdentifier +
|
return '<div data-identifier="' + item.clientIdentifier +
|
||||||
'" data-ip="' + item.ip +
|
'" data-ip="' + item.ip +
|
||||||
'" data-port="' + item.port +
|
'" data-port="' + item.port +
|
||||||
'" data-local="' + item.local +
|
|
||||||
'" data-ssl="' + item.httpsRequired +
|
'" data-ssl="' + item.httpsRequired +
|
||||||
'" data-is_cloud="' + item.is_cloud +
|
'" data-is_cloud="' + item.is_cloud +
|
||||||
'" data-label="' + item.label + '">' +
|
'" data-label="' + item.label + '">' +
|
||||||
|
@ -2611,7 +2602,6 @@ $(document).ready(function() {
|
||||||
return '<div data-identifier="' + item.clientIdentifier +
|
return '<div data-identifier="' + item.clientIdentifier +
|
||||||
'" data-ip="' + item.ip +
|
'" data-ip="' + item.ip +
|
||||||
'" data-port="' + item.port +
|
'" data-port="' + item.port +
|
||||||
'" data-local="' + item.local +
|
|
||||||
'" data-ssl="' + item.httpsRequired +
|
'" data-ssl="' + item.httpsRequired +
|
||||||
'" data-is_cloud="' + item.is_cloud +
|
'" data-is_cloud="' + item.is_cloud +
|
||||||
'" data-label="' + item.label + '">' +
|
'" data-label="' + item.label + '">' +
|
||||||
|
@ -2634,7 +2624,6 @@ $(document).ready(function() {
|
||||||
var identifier = $(pms_ip_selected).data('identifier');
|
var identifier = $(pms_ip_selected).data('identifier');
|
||||||
var ip = $(pms_ip_selected).data('ip');
|
var ip = $(pms_ip_selected).data('ip');
|
||||||
var port = $(pms_ip_selected).data('port');
|
var port = $(pms_ip_selected).data('port');
|
||||||
var local = $(pms_ip_selected).data('local');
|
|
||||||
var ssl = $(pms_ip_selected).data('ssl');
|
var ssl = $(pms_ip_selected).data('ssl');
|
||||||
var is_cloud = $(pms_ip_selected).data('is_cloud');
|
var is_cloud = $(pms_ip_selected).data('is_cloud');
|
||||||
var value = $(pms_ip_selected).data('value');
|
var value = $(pms_ip_selected).data('value');
|
||||||
|
@ -2642,8 +2631,6 @@ $(document).ready(function() {
|
||||||
$("#pms_identifier").val(identifier !== 'undefined' ? identifier : '');
|
$("#pms_identifier").val(identifier !== 'undefined' ? identifier : '');
|
||||||
$('#pms_ip').val(ip !== 'undefined' ? ip : value);
|
$('#pms_ip').val(ip !== 'undefined' ? ip : value);
|
||||||
$('#pms_port').val(port !== 'undefined' ? port : 32400);
|
$('#pms_port').val(port !== 'undefined' ? port : 32400);
|
||||||
$('#pms_is_remote_checkbox').prop('checked', (local !== 'undefined' && local === 0));
|
|
||||||
$('#pms_is_remote').val(local !== 'undefined' && local === 0 ? 1 : 0);
|
|
||||||
$('#pms_ssl_checkbox').prop('checked', (ssl !== 'undefined' && ssl === 1));
|
$('#pms_ssl_checkbox').prop('checked', (ssl !== 'undefined' && ssl === 1));
|
||||||
$('#pms_ssl').val(ssl !== 'undefined' && ssl === 1 ? 1 : 0);
|
$('#pms_ssl').val(ssl !== 'undefined' && ssl === 1 ? 1 : 0);
|
||||||
$('#pms_is_cloud').val(is_cloud !== 'undefined' && is_cloud === true ? 1 : 0);
|
$('#pms_is_cloud').val(is_cloud !== 'undefined' && is_cloud === true ? 1 : 0);
|
||||||
|
@ -2681,7 +2668,6 @@ $(document).ready(function() {
|
||||||
var pms_port = $("#pms_port").val();
|
var pms_port = $("#pms_port").val();
|
||||||
var pms_identifier = $("#pms_identifier").val();
|
var pms_identifier = $("#pms_identifier").val();
|
||||||
var pms_ssl = $("#pms_ssl").val();
|
var pms_ssl = $("#pms_ssl").val();
|
||||||
var pms_is_remote = $("#pms_is_remote").val();
|
|
||||||
var pms_url_manual = $("#pms_url_manual").is(':checked') ? 1 : 0;
|
var pms_url_manual = $("#pms_url_manual").is(':checked') ? 1 : 0;
|
||||||
|
|
||||||
if (($("#pms_ip").val() !== '') || ($("#pms_port").val() !== '')) {
|
if (($("#pms_ip").val() !== '') || ($("#pms_port").val() !== '')) {
|
||||||
|
@ -2693,7 +2679,6 @@ $(document).ready(function() {
|
||||||
hostname: pms_ip,
|
hostname: pms_ip,
|
||||||
port: pms_port,
|
port: pms_port,
|
||||||
ssl: pms_ssl,
|
ssl: pms_ssl,
|
||||||
remote: pms_is_remote,
|
|
||||||
manual: pms_url_manual,
|
manual: pms_url_manual,
|
||||||
get_url: true,
|
get_url: true,
|
||||||
test_websocket: true
|
test_websocket: true
|
||||||
|
|
|
@ -68,14 +68,14 @@ DOCUMENTATION :: END
|
||||||
<table class="stream-info" style="margin-top: 0;">
|
<table class="stream-info" style="margin-top: 0;">
|
||||||
<thead>
|
<thead>
|
||||||
<tr>
|
<tr>
|
||||||
<th>
|
<th></th>
|
||||||
</th>
|
|
||||||
<th class="heading">
|
|
||||||
Stream Details
|
|
||||||
</th>
|
|
||||||
<th class="heading">
|
<th class="heading">
|
||||||
Source Details
|
Source Details
|
||||||
</th>
|
</th>
|
||||||
|
<th><i class="fa fa-long-arrow-right"></i></th>
|
||||||
|
<th class="heading">
|
||||||
|
Stream Details
|
||||||
|
</th>
|
||||||
</tr>
|
</tr>
|
||||||
</thead>
|
</thead>
|
||||||
</table>
|
</table>
|
||||||
|
@ -85,38 +85,46 @@ DOCUMENTATION :: END
|
||||||
<th>
|
<th>
|
||||||
Media
|
Media
|
||||||
</th>
|
</th>
|
||||||
|
<th></th>
|
||||||
|
<th></th>
|
||||||
|
<th></th>
|
||||||
</tr>
|
</tr>
|
||||||
</thead>
|
</thead>
|
||||||
<tbody>
|
<tbody>
|
||||||
<tr>
|
<tr>
|
||||||
<td>Bitrate</td>
|
<td>Bitrate</td>
|
||||||
<td>${data['stream_bitrate']} ${'kbps' if data['stream_bitrate'] else ''}</td>
|
|
||||||
<td>${data['bitrate']} ${'kbps' if data['bitrate'] else ''}</td>
|
<td>${data['bitrate']} ${'kbps' if data['bitrate'] else ''}</td>
|
||||||
|
<td><i class="fa fa-long-arrow-right"></i></td>
|
||||||
|
<td>${data['stream_bitrate']} ${'kbps' if data['stream_bitrate'] else ''}</td>
|
||||||
</tr>
|
</tr>
|
||||||
% if data['media_type'] != 'track':
|
% if data['media_type'] != 'track':
|
||||||
<tr>
|
<tr>
|
||||||
<td>Resolution</td>
|
<td>Resolution</td>
|
||||||
<td>${data['stream_video_full_resolution']}</td>
|
|
||||||
<td>${data['video_full_resolution']}</td>
|
<td>${data['video_full_resolution']}</td>
|
||||||
|
<td><i class="fa fa-long-arrow-right"></i></td>
|
||||||
|
<td>${data['stream_video_full_resolution']}</td>
|
||||||
</tr>
|
</tr>
|
||||||
% endif
|
% endif
|
||||||
<tr>
|
<tr>
|
||||||
<td>Quality</td>
|
<td>Quality</td>
|
||||||
<td>${data['quality_profile']}</td>
|
|
||||||
<td>-</td>
|
<td>-</td>
|
||||||
|
<td></td>
|
||||||
|
<td>${data['quality_profile']}</td>
|
||||||
</tr>
|
</tr>
|
||||||
% if data['optimized_version'] == 1:
|
% if data['optimized_version'] == 1:
|
||||||
<tr>
|
<tr>
|
||||||
<td>Optimized Version</td>
|
<td>Optimized Version</td>
|
||||||
<td>-</td>
|
|
||||||
<td>${data['optimized_version_profile']}<br>(${data['optimized_version_title']})</td>
|
<td>${data['optimized_version_profile']}<br>(${data['optimized_version_title']})</td>
|
||||||
|
<td></td>
|
||||||
|
<td>-</td>
|
||||||
</tr>
|
</tr>
|
||||||
% endif
|
% endif
|
||||||
% if data['synced_version'] == 1:
|
% if data['synced_version'] == 1:
|
||||||
<tr>
|
<tr>
|
||||||
<td>Synced Version</td>
|
<td>Synced Version</td>
|
||||||
<td>-</td>
|
|
||||||
<td>${data['synced_version_profile']}</td>
|
<td>${data['synced_version_profile']}</td>
|
||||||
|
<td></td>
|
||||||
|
<td>-</td>
|
||||||
</tr>
|
</tr>
|
||||||
% endif
|
% endif
|
||||||
</tbody>
|
</tbody>
|
||||||
|
@ -127,6 +135,8 @@ DOCUMENTATION :: END
|
||||||
<th>
|
<th>
|
||||||
Container
|
Container
|
||||||
</th>
|
</th>
|
||||||
|
<th></th>
|
||||||
|
<th></th>
|
||||||
<th>
|
<th>
|
||||||
${data['stream_container_decision']}
|
${data['stream_container_decision']}
|
||||||
</th>
|
</th>
|
||||||
|
@ -135,8 +145,9 @@ DOCUMENTATION :: END
|
||||||
<tbody>
|
<tbody>
|
||||||
<tr>
|
<tr>
|
||||||
<td>Container</td>
|
<td>Container</td>
|
||||||
<td>${data['stream_container'].upper()}</td>
|
|
||||||
<td>${data['container'].upper()}</td>
|
<td>${data['container'].upper()}</td>
|
||||||
|
<td><i class="fa fa-long-arrow-right"></i></td>
|
||||||
|
<td>${data['stream_container'].upper()}</td>
|
||||||
</tr>
|
</tr>
|
||||||
</tbody>
|
</tbody>
|
||||||
</table>
|
</table>
|
||||||
|
@ -147,6 +158,8 @@ DOCUMENTATION :: END
|
||||||
<th>
|
<th>
|
||||||
Video
|
Video
|
||||||
</th>
|
</th>
|
||||||
|
<th></th>
|
||||||
|
<th></th>
|
||||||
<th>
|
<th>
|
||||||
${data['stream_video_decision']}
|
${data['stream_video_decision']}
|
||||||
</th>
|
</th>
|
||||||
|
@ -155,38 +168,45 @@ DOCUMENTATION :: END
|
||||||
<tbody>
|
<tbody>
|
||||||
<tr>
|
<tr>
|
||||||
<td>Codec</td>
|
<td>Codec</td>
|
||||||
<td>${data['stream_video_codec'].upper()} ${'(HW)' if data['transcode_hw_encoding'] else ''}</td>
|
|
||||||
<td>${data['video_codec'].upper()} ${'(HW)' if data['transcode_hw_decoding'] else ''}</td>
|
<td>${data['video_codec'].upper()} ${'(HW)' if data['transcode_hw_decoding'] else ''}</td>
|
||||||
|
<td><i class="fa fa-long-arrow-right"></i></td>
|
||||||
|
<td>${data['stream_video_codec'].upper()} ${'(HW)' if data['transcode_hw_encoding'] else ''}</td>
|
||||||
</tr>
|
</tr>
|
||||||
<tr>
|
<tr>
|
||||||
<td>Bitrate</td>
|
<td>Bitrate</td>
|
||||||
<td>${data['stream_video_bitrate']} ${'kbps' if data['stream_video_bitrate'] else ''}</td>
|
|
||||||
<td>${data['video_bitrate']} ${'kbps' if data['video_bitrate'] else ''}</td>
|
<td>${data['video_bitrate']} ${'kbps' if data['video_bitrate'] else ''}</td>
|
||||||
|
<td><i class="fa fa-long-arrow-right"></i></td>
|
||||||
|
<td>${data['stream_video_bitrate']} ${'kbps' if data['stream_video_bitrate'] else ''}</td>
|
||||||
</tr>
|
</tr>
|
||||||
<tr>
|
<tr>
|
||||||
<td>Width</td>
|
<td>Width</td>
|
||||||
<td>${data['stream_video_width']}</td>
|
|
||||||
<td>${data['video_width']}</td>
|
<td>${data['video_width']}</td>
|
||||||
|
<td><i class="fa fa-long-arrow-right"></i></td>
|
||||||
|
<td>${data['stream_video_width']}</td>
|
||||||
</tr>
|
</tr>
|
||||||
<tr>
|
<tr>
|
||||||
<td>Height</td>
|
<td>Height</td>
|
||||||
<td>${data['stream_video_height']}</td>
|
|
||||||
<td>${data['video_height']}</td>
|
<td>${data['video_height']}</td>
|
||||||
|
<td><i class="fa fa-long-arrow-right"></i></td>
|
||||||
|
<td>${data['stream_video_height']}</td>
|
||||||
</tr>
|
</tr>
|
||||||
<tr>
|
<tr>
|
||||||
<td>Framerate</td>
|
<td>Framerate</td>
|
||||||
<td>${data['stream_video_framerate']}</td>
|
|
||||||
<td>${data['video_framerate']}</td>
|
<td>${data['video_framerate']}</td>
|
||||||
|
<td><i class="fa fa-long-arrow-right"></i></td>
|
||||||
|
<td>${data['stream_video_framerate']}</td>
|
||||||
</tr>
|
</tr>
|
||||||
<tr>
|
<tr>
|
||||||
<td>Dynamic Range</td>
|
<td>Dynamic Range</td>
|
||||||
<td>${data['stream_video_dynamic_range']}</td>
|
|
||||||
<td>${data['video_dynamic_range']}</td>
|
<td>${data['video_dynamic_range']}</td>
|
||||||
|
<td><i class="fa fa-long-arrow-right"></i></td>
|
||||||
|
<td>${data['stream_video_dynamic_range']}</td>
|
||||||
</tr>
|
</tr>
|
||||||
<tr>
|
<tr>
|
||||||
<td>Aspect Ratio</td>
|
<td>Aspect Ratio</td>
|
||||||
<td>-</td>
|
|
||||||
<td>${data['aspect_ratio']}</td>
|
<td>${data['aspect_ratio']}</td>
|
||||||
|
<td></td>
|
||||||
|
<td>-</td>
|
||||||
</tr>
|
</tr>
|
||||||
</tbody>
|
</tbody>
|
||||||
</table>
|
</table>
|
||||||
|
@ -197,6 +217,8 @@ DOCUMENTATION :: END
|
||||||
<th>
|
<th>
|
||||||
Audio
|
Audio
|
||||||
</th>
|
</th>
|
||||||
|
<th></th>
|
||||||
|
<th></th>
|
||||||
<th>
|
<th>
|
||||||
${data['stream_audio_decision']}
|
${data['stream_audio_decision']}
|
||||||
</th>
|
</th>
|
||||||
|
@ -205,23 +227,27 @@ DOCUMENTATION :: END
|
||||||
<tbody>
|
<tbody>
|
||||||
<tr>
|
<tr>
|
||||||
<td>Codec</td>
|
<td>Codec</td>
|
||||||
<td>${AUDIO_CODEC_OVERRIDES.get(data['stream_audio_codec'], data['stream_audio_codec'].upper())}</td>
|
|
||||||
<td>${AUDIO_CODEC_OVERRIDES.get(data['audio_codec'], data['audio_codec'].upper())}</td>
|
<td>${AUDIO_CODEC_OVERRIDES.get(data['audio_codec'], data['audio_codec'].upper())}</td>
|
||||||
|
<td><i class="fa fa-long-arrow-right"></i></td>
|
||||||
|
<td>${AUDIO_CODEC_OVERRIDES.get(data['stream_audio_codec'], data['stream_audio_codec'].upper())}</td>
|
||||||
</tr>
|
</tr>
|
||||||
<tr>
|
<tr>
|
||||||
<td>Bitrate</td>
|
<td>Bitrate</td>
|
||||||
<td>${data['stream_audio_bitrate']} ${'kbps' if data['stream_audio_bitrate'] else ''}</td>
|
|
||||||
<td>${data['audio_bitrate']} ${'kbps' if data['audio_bitrate'] else ''}</td>
|
<td>${data['audio_bitrate']} ${'kbps' if data['audio_bitrate'] else ''}</td>
|
||||||
|
<td><i class="fa fa-long-arrow-right"></i></td>
|
||||||
|
<td>${data['stream_audio_bitrate']} ${'kbps' if data['stream_audio_bitrate'] else ''}</td>
|
||||||
</tr>
|
</tr>
|
||||||
<tr>
|
<tr>
|
||||||
<td>Channels</td>
|
<td>Channels</td>
|
||||||
<td>${data['stream_audio_channels']}</td>
|
|
||||||
<td>${data['audio_channels']}</td>
|
<td>${data['audio_channels']}</td>
|
||||||
|
<td><i class="fa fa-long-arrow-right"></i></td>
|
||||||
|
<td>${data['stream_audio_channels']}</td>
|
||||||
</tr>
|
</tr>
|
||||||
<tr>
|
<tr>
|
||||||
<td>Language</td>
|
<td>Language</td>
|
||||||
<td>-</td>
|
|
||||||
<td>${data['audio_language'] or 'Unknown'}</td>
|
<td>${data['audio_language'] or 'Unknown'}</td>
|
||||||
|
<td></td>
|
||||||
|
<td>-</td>
|
||||||
</tr>
|
</tr>
|
||||||
|
|
||||||
</tbody>
|
</tbody>
|
||||||
|
@ -233,6 +259,8 @@ DOCUMENTATION :: END
|
||||||
<th>
|
<th>
|
||||||
Subtitles
|
Subtitles
|
||||||
</th>
|
</th>
|
||||||
|
<th></th>
|
||||||
|
<th></th>
|
||||||
<th>
|
<th>
|
||||||
${'direct play' if data['stream_subtitle_decision'] not in ('transcode', 'copy', 'burn') else data['stream_subtitle_decision']}
|
${'direct play' if data['stream_subtitle_decision'] not in ('transcode', 'copy', 'burn') else data['stream_subtitle_decision']}
|
||||||
</th>
|
</th>
|
||||||
|
@ -241,19 +269,22 @@ DOCUMENTATION :: END
|
||||||
<tbody>
|
<tbody>
|
||||||
<tr>
|
<tr>
|
||||||
<td>Codec</td>
|
<td>Codec</td>
|
||||||
<td>${data['stream_subtitle_codec'].upper() or '-'}</td>
|
|
||||||
<td>${data['subtitle_codec'].upper()}</td>
|
<td>${data['subtitle_codec'].upper()}</td>
|
||||||
|
<td><i class="fa fa-long-arrow-right"></i></td>
|
||||||
|
<td>${data['stream_subtitle_codec'].upper() or '-'}</td>
|
||||||
</tr>
|
</tr>
|
||||||
<tr>
|
<tr>
|
||||||
<td>Language</td>
|
<td>Language</td>
|
||||||
<td>-</td>
|
|
||||||
<td>${data['subtitle_language'] or 'Unknown'}</td>
|
<td>${data['subtitle_language'] or 'Unknown'}</td>
|
||||||
|
<td></td>
|
||||||
|
<td>-</td>
|
||||||
</tr>
|
</tr>
|
||||||
% if data['subtitle_forced']:
|
% if data['subtitle_forced']:
|
||||||
<tr>
|
<tr>
|
||||||
<td>Forced</td>
|
<td>Forced</td>
|
||||||
<td>-</td>
|
|
||||||
<td>${bool(data['subtitle_forced'])}</td>
|
<td>${bool(data['subtitle_forced'])}</td>
|
||||||
|
<td></td>
|
||||||
|
<td>-</td>
|
||||||
</tr>
|
</tr>
|
||||||
% endif
|
% endif
|
||||||
</tbody>
|
</tbody>
|
||||||
|
|
|
@ -125,10 +125,10 @@ DOCUMENTATION :: END
|
||||||
<div class="table-card-header">
|
<div class="table-card-header">
|
||||||
<ul class="nav nav-header nav-dashboard pull-right">
|
<ul class="nav nav-header nav-dashboard pull-right">
|
||||||
<li>
|
<li>
|
||||||
<a href="#" id="recently-watched-page-left" class="paginate btn-gray disabled" data-id="+1"><i class="fa fa-lg fa-chevron-left"></i></a>
|
<a href="#" id="recently-watched-page-left" class="paginate-watched btn-gray disabled" data-id="-1"><i class="fa fa-lg fa-chevron-left"></i></a>
|
||||||
</li>
|
</li>
|
||||||
<li>
|
<li>
|
||||||
<a href="#" id="recently-watched-page-right" class="paginate btn-gray" data-id="-1"><i class="fa fa-lg fa-chevron-right"></i></a>
|
<a href="#" id="recently-watched-page-right" class="paginate-watched btn-gray" data-id="+1"><i class="fa fa-lg fa-chevron-right"></i></a>
|
||||||
</li>
|
</li>
|
||||||
</ul>
|
</ul>
|
||||||
<div class="header-bar">
|
<div class="header-bar">
|
||||||
|
@ -666,52 +666,14 @@ DOCUMENTATION :: END
|
||||||
},
|
},
|
||||||
complete: function(xhr, status) {
|
complete: function(xhr, status) {
|
||||||
$("#user-recently-watched").html(xhr.responseText);
|
$("#user-recently-watched").html(xhr.responseText);
|
||||||
highlightWatchedScrollerButton();
|
highlightScrollerButton("#recently-watched");
|
||||||
|
paginateScroller("#recently-watched", ".paginate-watched");
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
recentlyWatched();
|
recentlyWatched();
|
||||||
|
|
||||||
function highlightWatchedScrollerButton() {
|
|
||||||
var scroller = $("#recently-watched-row-scroller");
|
|
||||||
var numElems = scroller.find("li").length;
|
|
||||||
scroller.width(numElems * 175);
|
|
||||||
if (scroller.width() > $("#user-recently-watched").width()) {
|
|
||||||
$("#recently-watched-page-right").removeClass("disabled");
|
|
||||||
} else {
|
|
||||||
$("#recently-watched-page-right").addClass("disabled");
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
$(window).resize(function() {
|
|
||||||
highlightWatchedScrollerButton();
|
|
||||||
});
|
|
||||||
|
|
||||||
var leftTotal = 0;
|
|
||||||
$(".paginate").click(function (e) {
|
|
||||||
e.preventDefault();
|
|
||||||
var scroller = $("#recently-watched-row-scroller");
|
|
||||||
var containerWidth = $("#user-recently-watched").width();
|
|
||||||
var scrollAmount = $(this).data("id") * parseInt(containerWidth / 175) * 175;
|
|
||||||
var leftMax = Math.min(-parseInt(scroller.width()) + Math.abs(scrollAmount), 0);
|
|
||||||
|
|
||||||
leftTotal = Math.max(Math.min(leftTotal + scrollAmount, 0), leftMax);
|
|
||||||
scroller.animate({ left: leftTotal }, 250);
|
|
||||||
|
|
||||||
if (leftTotal == 0) {
|
|
||||||
$("#recently-watched-page-left").addClass("disabled").blur();
|
|
||||||
} else {
|
|
||||||
$("#recently-watched-page-left").removeClass("disabled");
|
|
||||||
}
|
|
||||||
|
|
||||||
if (leftTotal == leftMax) {
|
|
||||||
$("#recently-watched-page-right").addClass("disabled").blur();
|
|
||||||
} else {
|
|
||||||
$("#recently-watched-page-right").removeClass("disabled");
|
|
||||||
}
|
|
||||||
});
|
|
||||||
|
|
||||||
$(document).ready(function () {
|
$(document).ready(function () {
|
||||||
// Javascript to enable link to tab
|
// Javascript to enable link to tab
|
||||||
var hash = document.location.hash;
|
var hash = document.location.hash;
|
||||||
|
|
|
@ -31,7 +31,7 @@ DOCUMENTATION :: END
|
||||||
from plexpy.helpers import page, short_season
|
from plexpy.helpers import page, short_season
|
||||||
%>
|
%>
|
||||||
<div class="dashboard-recent-media-row">
|
<div class="dashboard-recent-media-row">
|
||||||
<div id="recently-watched-row-scroller" style="left: 0;">
|
<div id="recently-watched-row-scroller">
|
||||||
<ul class="dashboard-recent-media list-unstyled">
|
<ul class="dashboard-recent-media list-unstyled">
|
||||||
% for item in data:
|
% for item in data:
|
||||||
<li>
|
<li>
|
||||||
|
|
|
@ -135,7 +135,6 @@
|
||||||
data-identifier="${config['pms_identifier']}"
|
data-identifier="${config['pms_identifier']}"
|
||||||
data-ip="${config['pms_ip']}"
|
data-ip="${config['pms_ip']}"
|
||||||
data-port="${config['pms_port']}"
|
data-port="${config['pms_port']}"
|
||||||
data-local="${int(not int(config['pms_is_remote']))}"
|
|
||||||
data-ssl="${config['pms_ssl']}"
|
data-ssl="${config['pms_ssl']}"
|
||||||
data-is_cloud="${config['pms_is_cloud']}"
|
data-is_cloud="${config['pms_is_cloud']}"
|
||||||
data-label="${config['pms_name'] or 'Local'}"
|
data-label="${config['pms_name'] or 'Local'}"
|
||||||
|
@ -151,7 +150,7 @@
|
||||||
<div class="col-xs-3">
|
<div class="col-xs-3">
|
||||||
<input type="text" class="form-control pms-settings" name="pms_port" id="pms_port" placeholder="32400" value="${config['pms_port']}" required>
|
<input type="text" class="form-control pms-settings" name="pms_port" id="pms_port" placeholder="32400" value="${config['pms_port']}" required>
|
||||||
</div>
|
</div>
|
||||||
<div class="col-xs-4">
|
<div class="col-xs-9">
|
||||||
<div class="checkbox">
|
<div class="checkbox">
|
||||||
<label>
|
<label>
|
||||||
<input type="checkbox" id="pms_ssl_checkbox" class="checkbox-toggle pms-settings" data-id="pms_ssl" value="1" ${helpers.checked(config['pms_ssl'])}> Use Secure Connection
|
<input type="checkbox" id="pms_ssl_checkbox" class="checkbox-toggle pms-settings" data-id="pms_ssl" value="1" ${helpers.checked(config['pms_ssl'])}> Use Secure Connection
|
||||||
|
@ -159,14 +158,6 @@
|
||||||
</label>
|
</label>
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
<div class="col-xs-4">
|
|
||||||
<div class="checkbox">
|
|
||||||
<label>
|
|
||||||
<input type="checkbox" id="pms_is_remote_checkbox" class="checkbox-toggle pms-settings" data-id="pms_is_remote" value="1" ${helpers.checked(config['pms_is_remote'])}> Remote Server
|
|
||||||
<input type="hidden" id="pms_is_remote" name="pms_is_remote" value="${config['pms_is_remote']}">
|
|
||||||
</label>
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
<input type="hidden" id="pms_valid" data-validate="validatePMSip" value="">
|
<input type="hidden" id="pms_valid" data-validate="validatePMSip" value="">
|
||||||
|
@ -391,7 +382,6 @@ $(document).ready(function() {
|
||||||
return '<div data-identifier="' + item.clientIdentifier +
|
return '<div data-identifier="' + item.clientIdentifier +
|
||||||
'" data-ip="' + item.ip +
|
'" data-ip="' + item.ip +
|
||||||
'" data-port="' + item.port +
|
'" data-port="' + item.port +
|
||||||
'" data-local="' + item.local +
|
|
||||||
'" data-ssl="' + item.httpsRequired +
|
'" data-ssl="' + item.httpsRequired +
|
||||||
'" data-is_cloud="' + item.is_cloud +
|
'" data-is_cloud="' + item.is_cloud +
|
||||||
'" data-label="' + item.label + '">' +
|
'" data-label="' + item.label + '">' +
|
||||||
|
@ -405,7 +395,6 @@ $(document).ready(function() {
|
||||||
return '<div data-identifier="' + item.clientIdentifier +
|
return '<div data-identifier="' + item.clientIdentifier +
|
||||||
'" data-ip="' + item.ip +
|
'" data-ip="' + item.ip +
|
||||||
'" data-port="' + item.port +
|
'" data-port="' + item.port +
|
||||||
'" data-local="' + item.local +
|
|
||||||
'" data-ssl="' + item.httpsRequired +
|
'" data-ssl="' + item.httpsRequired +
|
||||||
'" data-is_cloud="' + item.is_cloud +
|
'" data-is_cloud="' + item.is_cloud +
|
||||||
'" data-label="' + item.label + '">' +
|
'" data-label="' + item.label + '">' +
|
||||||
|
@ -428,7 +417,6 @@ $(document).ready(function() {
|
||||||
var identifier = $(pms_ip_selected).data('identifier');
|
var identifier = $(pms_ip_selected).data('identifier');
|
||||||
var ip = $(pms_ip_selected).data('ip');
|
var ip = $(pms_ip_selected).data('ip');
|
||||||
var port = $(pms_ip_selected).data('port');
|
var port = $(pms_ip_selected).data('port');
|
||||||
var local = $(pms_ip_selected).data('local');
|
|
||||||
var ssl = $(pms_ip_selected).data('ssl');
|
var ssl = $(pms_ip_selected).data('ssl');
|
||||||
var is_cloud = $(pms_ip_selected).data('is_cloud');
|
var is_cloud = $(pms_ip_selected).data('is_cloud');
|
||||||
var value = $(pms_ip_selected).data('value');
|
var value = $(pms_ip_selected).data('value');
|
||||||
|
@ -439,19 +427,15 @@ $(document).ready(function() {
|
||||||
$("#pms_identifier").val(identifier !== 'undefined' ? identifier : '');
|
$("#pms_identifier").val(identifier !== 'undefined' ? identifier : '');
|
||||||
$('#pms_ip').val(ip !== 'undefined' ? ip : value);
|
$('#pms_ip').val(ip !== 'undefined' ? ip : value);
|
||||||
$('#pms_port').val(port !== 'undefined' ? port : 32400);
|
$('#pms_port').val(port !== 'undefined' ? port : 32400);
|
||||||
$('#pms_is_remote_checkbox').prop('checked', (local !== 'undefined' && local === 0));
|
|
||||||
$('#pms_is_remote').val(local !== 'undefined' && local === 0 ? 1 : 0);
|
|
||||||
$('#pms_ssl_checkbox').prop('checked', (ssl !== 'undefined' && ssl === 1));
|
$('#pms_ssl_checkbox').prop('checked', (ssl !== 'undefined' && ssl === 1));
|
||||||
$('#pms_ssl').val(ssl !== 'undefined' && ssl === 1 ? 1 : 0);
|
$('#pms_ssl').val(ssl !== 'undefined' && ssl === 1 ? 1 : 0);
|
||||||
$('#pms_is_cloud').val(is_cloud !== 'undefined' && is_cloud === true ? 1 : 0);
|
$('#pms_is_cloud').val(is_cloud !== 'undefined' && is_cloud === true ? 1 : 0);
|
||||||
|
|
||||||
if (is_cloud === true) {
|
if (is_cloud === true) {
|
||||||
$('#pms_port').prop('readonly', true);
|
$('#pms_port').prop('readonly', true);
|
||||||
$('#pms_is_remote_checkbox').prop('disabled', true);
|
|
||||||
$('#pms_ssl_checkbox').prop('disabled', true);
|
$('#pms_ssl_checkbox').prop('disabled', true);
|
||||||
} else {
|
} else {
|
||||||
$('#pms_port').prop('readonly', false);
|
$('#pms_port').prop('readonly', false);
|
||||||
$('#pms_is_remote_checkbox').prop('disabled', false);
|
|
||||||
$('#pms_ssl_checkbox').prop('disabled', false);
|
$('#pms_ssl_checkbox').prop('disabled', false);
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
@ -488,7 +472,6 @@ $(document).ready(function() {
|
||||||
var pms_port = $("#pms_port").val().trim();
|
var pms_port = $("#pms_port").val().trim();
|
||||||
var pms_identifier = $("#pms_identifier").val();
|
var pms_identifier = $("#pms_identifier").val();
|
||||||
var pms_ssl = $("#pms_ssl").val();
|
var pms_ssl = $("#pms_ssl").val();
|
||||||
var pms_is_remote = $("#pms_is_remote").val();
|
|
||||||
if ((pms_ip !== '') || (pms_port !== '')) {
|
if ((pms_ip !== '') || (pms_port !== '')) {
|
||||||
$("#pms-verify-status").html('<i class="fa fa-refresh fa-spin"></i> Verifying server...');
|
$("#pms-verify-status").html('<i class="fa fa-refresh fa-spin"></i> Verifying server...');
|
||||||
$('#pms-verify-status').fadeIn('fast');
|
$('#pms-verify-status').fadeIn('fast');
|
||||||
|
@ -498,8 +481,7 @@ $(document).ready(function() {
|
||||||
hostname: pms_ip,
|
hostname: pms_ip,
|
||||||
port: pms_port,
|
port: pms_port,
|
||||||
identifier: pms_identifier,
|
identifier: pms_identifier,
|
||||||
ssl: pms_ssl,
|
ssl: pms_ssl
|
||||||
remote: pms_is_remote
|
|
||||||
},
|
},
|
||||||
cache: true,
|
cache: true,
|
||||||
async: true,
|
async: true,
|
||||||
|
|
|
@ -1,979 +0,0 @@
|
||||||
# -*- coding: utf-8 -*-
|
|
||||||
"""A port of Python 3's csv module to Python 2.
|
|
||||||
|
|
||||||
The API of the csv module in Python 2 is drastically different from
|
|
||||||
the csv module in Python 3. This is due, for the most part, to the
|
|
||||||
difference between str in Python 2 and Python 3.
|
|
||||||
|
|
||||||
The semantics of Python 3's version are more useful because they support
|
|
||||||
unicode natively, while Python 2's csv does not.
|
|
||||||
"""
|
|
||||||
from __future__ import unicode_literals, absolute_import
|
|
||||||
|
|
||||||
__all__ = [ "QUOTE_MINIMAL", "QUOTE_ALL", "QUOTE_NONNUMERIC", "QUOTE_NONE",
|
|
||||||
"Error", "Dialect", "__doc__", "excel", "excel_tab",
|
|
||||||
"field_size_limit", "reader", "writer",
|
|
||||||
"register_dialect", "get_dialect", "list_dialects", "Sniffer",
|
|
||||||
"unregister_dialect", "__version__", "DictReader", "DictWriter" ]
|
|
||||||
|
|
||||||
import re
|
|
||||||
import numbers
|
|
||||||
from io import StringIO
|
|
||||||
from csv import (
|
|
||||||
QUOTE_MINIMAL, QUOTE_ALL, QUOTE_NONNUMERIC, QUOTE_NONE,
|
|
||||||
__version__, __doc__, Error, field_size_limit,
|
|
||||||
)
|
|
||||||
|
|
||||||
# Stuff needed from six
|
|
||||||
import sys
|
|
||||||
PY3 = sys.version_info[0] == 3
|
|
||||||
if PY3:
|
|
||||||
string_types = str
|
|
||||||
text_type = str
|
|
||||||
binary_type = bytes
|
|
||||||
unichr = chr
|
|
||||||
else:
|
|
||||||
string_types = basestring
|
|
||||||
text_type = unicode
|
|
||||||
binary_type = str
|
|
||||||
|
|
||||||
|
|
||||||
class QuoteStrategy(object):
|
|
||||||
quoting = None
|
|
||||||
|
|
||||||
def __init__(self, dialect):
|
|
||||||
if self.quoting is not None:
|
|
||||||
assert dialect.quoting == self.quoting
|
|
||||||
self.dialect = dialect
|
|
||||||
self.setup()
|
|
||||||
|
|
||||||
escape_pattern_quoted = r'({quotechar})'.format(
|
|
||||||
quotechar=re.escape(self.dialect.quotechar or '"'))
|
|
||||||
escape_pattern_unquoted = r'([{specialchars}])'.format(
|
|
||||||
specialchars=re.escape(self.specialchars))
|
|
||||||
|
|
||||||
self.escape_re_quoted = re.compile(escape_pattern_quoted)
|
|
||||||
self.escape_re_unquoted = re.compile(escape_pattern_unquoted)
|
|
||||||
|
|
||||||
def setup(self):
|
|
||||||
"""Optional method for strategy-wide optimizations."""
|
|
||||||
|
|
||||||
def quoted(self, field=None, raw_field=None, only=None):
|
|
||||||
"""Determine whether this field should be quoted."""
|
|
||||||
raise NotImplementedError(
|
|
||||||
'quoted must be implemented by a subclass')
|
|
||||||
|
|
||||||
@property
|
|
||||||
def specialchars(self):
|
|
||||||
"""The special characters that need to be escaped."""
|
|
||||||
raise NotImplementedError(
|
|
||||||
'specialchars must be implemented by a subclass')
|
|
||||||
|
|
||||||
def escape_re(self, quoted=None):
|
|
||||||
if quoted:
|
|
||||||
return self.escape_re_quoted
|
|
||||||
return self.escape_re_unquoted
|
|
||||||
|
|
||||||
def escapechar(self, quoted=None):
|
|
||||||
if quoted and self.dialect.doublequote:
|
|
||||||
return self.dialect.quotechar
|
|
||||||
return self.dialect.escapechar
|
|
||||||
|
|
||||||
def prepare(self, raw_field, only=None):
|
|
||||||
field = text_type(raw_field if raw_field is not None else '')
|
|
||||||
quoted = self.quoted(field=field, raw_field=raw_field, only=only)
|
|
||||||
|
|
||||||
escape_re = self.escape_re(quoted=quoted)
|
|
||||||
escapechar = self.escapechar(quoted=quoted)
|
|
||||||
|
|
||||||
if escape_re.search(field):
|
|
||||||
escapechar = '\\\\' if escapechar == '\\' else escapechar
|
|
||||||
if not escapechar:
|
|
||||||
raise Error('No escapechar is set')
|
|
||||||
escape_replace = r'{escapechar}\1'.format(escapechar=escapechar)
|
|
||||||
field = escape_re.sub(escape_replace, field)
|
|
||||||
|
|
||||||
if quoted:
|
|
||||||
field = '{quotechar}{field}{quotechar}'.format(
|
|
||||||
quotechar=self.dialect.quotechar, field=field)
|
|
||||||
|
|
||||||
return field
|
|
||||||
|
|
||||||
|
|
||||||
class QuoteMinimalStrategy(QuoteStrategy):
|
|
||||||
quoting = QUOTE_MINIMAL
|
|
||||||
|
|
||||||
def setup(self):
|
|
||||||
self.quoted_re = re.compile(r'[{specialchars}]'.format(
|
|
||||||
specialchars=re.escape(self.specialchars)))
|
|
||||||
|
|
||||||
@property
|
|
||||||
def specialchars(self):
|
|
||||||
return (
|
|
||||||
self.dialect.lineterminator +
|
|
||||||
self.dialect.quotechar +
|
|
||||||
self.dialect.delimiter +
|
|
||||||
(self.dialect.escapechar or '')
|
|
||||||
)
|
|
||||||
|
|
||||||
def quoted(self, field, only, **kwargs):
|
|
||||||
if field == self.dialect.quotechar and not self.dialect.doublequote:
|
|
||||||
# If the only character in the field is the quotechar, and
|
|
||||||
# doublequote is false, then just escape without outer quotes.
|
|
||||||
return False
|
|
||||||
return field == '' and only or bool(self.quoted_re.search(field))
|
|
||||||
|
|
||||||
|
|
||||||
class QuoteAllStrategy(QuoteStrategy):
|
|
||||||
quoting = QUOTE_ALL
|
|
||||||
|
|
||||||
@property
|
|
||||||
def specialchars(self):
|
|
||||||
return self.dialect.quotechar
|
|
||||||
|
|
||||||
def quoted(self, **kwargs):
|
|
||||||
return True
|
|
||||||
|
|
||||||
|
|
||||||
class QuoteNonnumericStrategy(QuoteStrategy):
|
|
||||||
quoting = QUOTE_NONNUMERIC
|
|
||||||
|
|
||||||
@property
|
|
||||||
def specialchars(self):
|
|
||||||
return (
|
|
||||||
self.dialect.lineterminator +
|
|
||||||
self.dialect.quotechar +
|
|
||||||
self.dialect.delimiter +
|
|
||||||
(self.dialect.escapechar or '')
|
|
||||||
)
|
|
||||||
|
|
||||||
def quoted(self, raw_field, **kwargs):
|
|
||||||
return not isinstance(raw_field, numbers.Number)
|
|
||||||
|
|
||||||
|
|
||||||
class QuoteNoneStrategy(QuoteStrategy):
|
|
||||||
quoting = QUOTE_NONE
|
|
||||||
|
|
||||||
@property
|
|
||||||
def specialchars(self):
|
|
||||||
return (
|
|
||||||
self.dialect.lineterminator +
|
|
||||||
(self.dialect.quotechar or '') +
|
|
||||||
self.dialect.delimiter +
|
|
||||||
(self.dialect.escapechar or '')
|
|
||||||
)
|
|
||||||
|
|
||||||
def quoted(self, field, only, **kwargs):
|
|
||||||
if field == '' and only:
|
|
||||||
raise Error('single empty field record must be quoted')
|
|
||||||
return False
|
|
||||||
|
|
||||||
|
|
||||||
class writer(object):
|
|
||||||
def __init__(self, fileobj, dialect='excel', **fmtparams):
|
|
||||||
if fileobj is None:
|
|
||||||
raise TypeError('fileobj must be file-like, not None')
|
|
||||||
|
|
||||||
self.fileobj = fileobj
|
|
||||||
|
|
||||||
if isinstance(dialect, text_type):
|
|
||||||
dialect = get_dialect(dialect)
|
|
||||||
|
|
||||||
try:
|
|
||||||
self.dialect = Dialect.combine(dialect, fmtparams)
|
|
||||||
except Error as e:
|
|
||||||
raise TypeError(*e.args)
|
|
||||||
|
|
||||||
strategies = {
|
|
||||||
QUOTE_MINIMAL: QuoteMinimalStrategy,
|
|
||||||
QUOTE_ALL: QuoteAllStrategy,
|
|
||||||
QUOTE_NONNUMERIC: QuoteNonnumericStrategy,
|
|
||||||
QUOTE_NONE: QuoteNoneStrategy,
|
|
||||||
}
|
|
||||||
self.strategy = strategies[self.dialect.quoting](self.dialect)
|
|
||||||
|
|
||||||
def writerow(self, row):
|
|
||||||
if row is None:
|
|
||||||
raise Error('row must be an iterable')
|
|
||||||
|
|
||||||
row = list(row)
|
|
||||||
only = len(row) == 1
|
|
||||||
row = [self.strategy.prepare(field, only=only) for field in row]
|
|
||||||
|
|
||||||
line = self.dialect.delimiter.join(row) + self.dialect.lineterminator
|
|
||||||
return self.fileobj.write(line)
|
|
||||||
|
|
||||||
def writerows(self, rows):
|
|
||||||
for row in rows:
|
|
||||||
self.writerow(row)
|
|
||||||
|
|
||||||
|
|
||||||
START_RECORD = 0
|
|
||||||
START_FIELD = 1
|
|
||||||
ESCAPED_CHAR = 2
|
|
||||||
IN_FIELD = 3
|
|
||||||
IN_QUOTED_FIELD = 4
|
|
||||||
ESCAPE_IN_QUOTED_FIELD = 5
|
|
||||||
QUOTE_IN_QUOTED_FIELD = 6
|
|
||||||
EAT_CRNL = 7
|
|
||||||
AFTER_ESCAPED_CRNL = 8
|
|
||||||
|
|
||||||
|
|
||||||
class reader(object):
|
|
||||||
def __init__(self, fileobj, dialect='excel', **fmtparams):
|
|
||||||
self.input_iter = iter(fileobj)
|
|
||||||
|
|
||||||
if isinstance(dialect, text_type):
|
|
||||||
dialect = get_dialect(dialect)
|
|
||||||
|
|
||||||
try:
|
|
||||||
self.dialect = Dialect.combine(dialect, fmtparams)
|
|
||||||
except Error as e:
|
|
||||||
raise TypeError(*e.args)
|
|
||||||
|
|
||||||
self.fields = None
|
|
||||||
self.field = None
|
|
||||||
self.line_num = 0
|
|
||||||
|
|
||||||
def parse_reset(self):
|
|
||||||
self.fields = []
|
|
||||||
self.field = []
|
|
||||||
self.state = START_RECORD
|
|
||||||
self.numeric_field = False
|
|
||||||
|
|
||||||
def parse_save_field(self):
|
|
||||||
field = ''.join(self.field)
|
|
||||||
self.field = []
|
|
||||||
if self.numeric_field:
|
|
||||||
field = float(field)
|
|
||||||
self.numeric_field = False
|
|
||||||
self.fields.append(field)
|
|
||||||
|
|
||||||
def parse_add_char(self, c):
|
|
||||||
if len(self.field) >= field_size_limit():
|
|
||||||
raise Error('field size limit exceeded')
|
|
||||||
self.field.append(c)
|
|
||||||
|
|
||||||
def parse_process_char(self, c):
|
|
||||||
switch = {
|
|
||||||
START_RECORD: self._parse_start_record,
|
|
||||||
START_FIELD: self._parse_start_field,
|
|
||||||
ESCAPED_CHAR: self._parse_escaped_char,
|
|
||||||
AFTER_ESCAPED_CRNL: self._parse_after_escaped_crnl,
|
|
||||||
IN_FIELD: self._parse_in_field,
|
|
||||||
IN_QUOTED_FIELD: self._parse_in_quoted_field,
|
|
||||||
ESCAPE_IN_QUOTED_FIELD: self._parse_escape_in_quoted_field,
|
|
||||||
QUOTE_IN_QUOTED_FIELD: self._parse_quote_in_quoted_field,
|
|
||||||
EAT_CRNL: self._parse_eat_crnl,
|
|
||||||
}
|
|
||||||
return switch[self.state](c)
|
|
||||||
|
|
||||||
def _parse_start_record(self, c):
|
|
||||||
if c == '\0':
|
|
||||||
return
|
|
||||||
elif c == '\n' or c == '\r':
|
|
||||||
self.state = EAT_CRNL
|
|
||||||
return
|
|
||||||
|
|
||||||
self.state = START_FIELD
|
|
||||||
return self._parse_start_field(c)
|
|
||||||
|
|
||||||
def _parse_start_field(self, c):
|
|
||||||
if c == '\n' or c == '\r' or c == '\0':
|
|
||||||
self.parse_save_field()
|
|
||||||
self.state = START_RECORD if c == '\0' else EAT_CRNL
|
|
||||||
elif (c == self.dialect.quotechar and
|
|
||||||
self.dialect.quoting != QUOTE_NONE):
|
|
||||||
self.state = IN_QUOTED_FIELD
|
|
||||||
elif c == self.dialect.escapechar:
|
|
||||||
self.state = ESCAPED_CHAR
|
|
||||||
elif c == ' ' and self.dialect.skipinitialspace:
|
|
||||||
pass # Ignore space at start of field
|
|
||||||
elif c == self.dialect.delimiter:
|
|
||||||
# Save empty field
|
|
||||||
self.parse_save_field()
|
|
||||||
else:
|
|
||||||
# Begin new unquoted field
|
|
||||||
if self.dialect.quoting == QUOTE_NONNUMERIC:
|
|
||||||
self.numeric_field = True
|
|
||||||
self.parse_add_char(c)
|
|
||||||
self.state = IN_FIELD
|
|
||||||
|
|
||||||
def _parse_escaped_char(self, c):
|
|
||||||
if c == '\n' or c == '\r':
|
|
||||||
self.parse_add_char(c)
|
|
||||||
self.state = AFTER_ESCAPED_CRNL
|
|
||||||
return
|
|
||||||
if c == '\0':
|
|
||||||
c = '\n'
|
|
||||||
self.parse_add_char(c)
|
|
||||||
self.state = IN_FIELD
|
|
||||||
|
|
||||||
def _parse_after_escaped_crnl(self, c):
|
|
||||||
if c == '\0':
|
|
||||||
return
|
|
||||||
return self._parse_in_field(c)
|
|
||||||
|
|
||||||
def _parse_in_field(self, c):
|
|
||||||
# In unquoted field
|
|
||||||
if c == '\n' or c == '\r' or c == '\0':
|
|
||||||
# End of line - return [fields]
|
|
||||||
self.parse_save_field()
|
|
||||||
self.state = START_RECORD if c == '\0' else EAT_CRNL
|
|
||||||
elif c == self.dialect.escapechar:
|
|
||||||
self.state = ESCAPED_CHAR
|
|
||||||
elif c == self.dialect.delimiter:
|
|
||||||
self.parse_save_field()
|
|
||||||
self.state = START_FIELD
|
|
||||||
else:
|
|
||||||
# Normal character - save in field
|
|
||||||
self.parse_add_char(c)
|
|
||||||
|
|
||||||
def _parse_in_quoted_field(self, c):
|
|
||||||
if c == '\0':
|
|
||||||
pass
|
|
||||||
elif c == self.dialect.escapechar:
|
|
||||||
self.state = ESCAPE_IN_QUOTED_FIELD
|
|
||||||
elif (c == self.dialect.quotechar and
|
|
||||||
self.dialect.quoting != QUOTE_NONE):
|
|
||||||
if self.dialect.doublequote:
|
|
||||||
self.state = QUOTE_IN_QUOTED_FIELD
|
|
||||||
else:
|
|
||||||
self.state = IN_FIELD
|
|
||||||
else:
|
|
||||||
self.parse_add_char(c)
|
|
||||||
|
|
||||||
def _parse_escape_in_quoted_field(self, c):
|
|
||||||
if c == '\0':
|
|
||||||
c = '\n'
|
|
||||||
|
|
||||||
self.parse_add_char(c)
|
|
||||||
self.state = IN_QUOTED_FIELD
|
|
||||||
|
|
||||||
def _parse_quote_in_quoted_field(self, c):
|
|
||||||
if (self.dialect.quoting != QUOTE_NONE and
|
|
||||||
c == self.dialect.quotechar):
|
|
||||||
# save "" as "
|
|
||||||
self.parse_add_char(c)
|
|
||||||
self.state = IN_QUOTED_FIELD
|
|
||||||
elif c == self.dialect.delimiter:
|
|
||||||
self.parse_save_field()
|
|
||||||
self.state = START_FIELD
|
|
||||||
elif c == '\n' or c == '\r' or c == '\0':
|
|
||||||
# End of line = return [fields]
|
|
||||||
self.parse_save_field()
|
|
||||||
self.state = START_RECORD if c == '\0' else EAT_CRNL
|
|
||||||
elif not self.dialect.strict:
|
|
||||||
self.parse_add_char(c)
|
|
||||||
self.state = IN_FIELD
|
|
||||||
else:
|
|
||||||
# illegal
|
|
||||||
raise Error("{delimiter}' expected after '{quotechar}".format(
|
|
||||||
delimiter=self.dialect.delimiter,
|
|
||||||
quotechar=self.dialect.quotechar,
|
|
||||||
))
|
|
||||||
|
|
||||||
def _parse_eat_crnl(self, c):
|
|
||||||
if c == '\n' or c == '\r':
|
|
||||||
pass
|
|
||||||
elif c == '\0':
|
|
||||||
self.state = START_RECORD
|
|
||||||
else:
|
|
||||||
raise Error('new-line character seen in unquoted field - do you '
|
|
||||||
'need to open the file in universal-newline mode?')
|
|
||||||
|
|
||||||
|
|
||||||
def __iter__(self):
|
|
||||||
return self
|
|
||||||
|
|
||||||
def __next__(self):
|
|
||||||
self.parse_reset()
|
|
||||||
|
|
||||||
while True:
|
|
||||||
try:
|
|
||||||
lineobj = next(self.input_iter)
|
|
||||||
except StopIteration:
|
|
||||||
if len(self.field) != 0 or self.state == IN_QUOTED_FIELD:
|
|
||||||
if self.dialect.strict:
|
|
||||||
raise Error('unexpected end of data')
|
|
||||||
self.parse_save_field()
|
|
||||||
if self.fields:
|
|
||||||
break
|
|
||||||
raise
|
|
||||||
|
|
||||||
if not isinstance(lineobj, text_type):
|
|
||||||
typ = type(lineobj)
|
|
||||||
typ_name = 'bytes' if typ == bytes else typ.__name__
|
|
||||||
err_str = ('iterator should return strings, not {0}'
|
|
||||||
' (did you open the file in text mode?)')
|
|
||||||
raise Error(err_str.format(typ_name))
|
|
||||||
|
|
||||||
self.line_num += 1
|
|
||||||
for c in lineobj:
|
|
||||||
if c == '\0':
|
|
||||||
raise Error('line contains NULL byte')
|
|
||||||
self.parse_process_char(c)
|
|
||||||
|
|
||||||
self.parse_process_char('\0')
|
|
||||||
|
|
||||||
if self.state == START_RECORD:
|
|
||||||
break
|
|
||||||
|
|
||||||
fields = self.fields
|
|
||||||
self.fields = None
|
|
||||||
return fields
|
|
||||||
|
|
||||||
next = __next__
|
|
||||||
|
|
||||||
|
|
||||||
_dialect_registry = {}
|
|
||||||
def register_dialect(name, dialect='excel', **fmtparams):
|
|
||||||
if not isinstance(name, text_type):
|
|
||||||
raise TypeError('"name" must be a string')
|
|
||||||
|
|
||||||
dialect = Dialect.extend(dialect, fmtparams)
|
|
||||||
|
|
||||||
try:
|
|
||||||
Dialect.validate(dialect)
|
|
||||||
except:
|
|
||||||
raise TypeError('dialect is invalid')
|
|
||||||
|
|
||||||
assert name not in _dialect_registry
|
|
||||||
_dialect_registry[name] = dialect
|
|
||||||
|
|
||||||
def unregister_dialect(name):
|
|
||||||
try:
|
|
||||||
_dialect_registry.pop(name)
|
|
||||||
except KeyError:
|
|
||||||
raise Error('"{name}" not a registered dialect'.format(name=name))
|
|
||||||
|
|
||||||
def get_dialect(name):
|
|
||||||
try:
|
|
||||||
return _dialect_registry[name]
|
|
||||||
except KeyError:
|
|
||||||
raise Error('Could not find dialect {0}'.format(name))
|
|
||||||
|
|
||||||
def list_dialects():
|
|
||||||
return list(_dialect_registry)
|
|
||||||
|
|
||||||
|
|
||||||
class Dialect(object):
|
|
||||||
"""Describe a CSV dialect.
|
|
||||||
This must be subclassed (see csv.excel). Valid attributes are:
|
|
||||||
delimiter, quotechar, escapechar, doublequote, skipinitialspace,
|
|
||||||
lineterminator, quoting, strict.
|
|
||||||
"""
|
|
||||||
_name = ""
|
|
||||||
_valid = False
|
|
||||||
# placeholders
|
|
||||||
delimiter = None
|
|
||||||
quotechar = None
|
|
||||||
escapechar = None
|
|
||||||
doublequote = None
|
|
||||||
skipinitialspace = None
|
|
||||||
lineterminator = None
|
|
||||||
quoting = None
|
|
||||||
strict = None
|
|
||||||
|
|
||||||
def __init__(self):
|
|
||||||
self.validate(self)
|
|
||||||
if self.__class__ != Dialect:
|
|
||||||
self._valid = True
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def validate(cls, dialect):
|
|
||||||
dialect = cls.extend(dialect)
|
|
||||||
|
|
||||||
if not isinstance(dialect.quoting, int):
|
|
||||||
raise Error('"quoting" must be an integer')
|
|
||||||
|
|
||||||
if dialect.delimiter is None:
|
|
||||||
raise Error('delimiter must be set')
|
|
||||||
cls.validate_text(dialect, 'delimiter')
|
|
||||||
|
|
||||||
if dialect.lineterminator is None:
|
|
||||||
raise Error('lineterminator must be set')
|
|
||||||
if not isinstance(dialect.lineterminator, text_type):
|
|
||||||
raise Error('"lineterminator" must be a string')
|
|
||||||
|
|
||||||
if dialect.quoting not in [
|
|
||||||
QUOTE_NONE, QUOTE_MINIMAL, QUOTE_NONNUMERIC, QUOTE_ALL]:
|
|
||||||
raise Error('Invalid quoting specified')
|
|
||||||
|
|
||||||
if dialect.quoting != QUOTE_NONE:
|
|
||||||
if dialect.quotechar is None and dialect.escapechar is None:
|
|
||||||
raise Error('quotechar must be set if quoting enabled')
|
|
||||||
if dialect.quotechar is not None:
|
|
||||||
cls.validate_text(dialect, 'quotechar')
|
|
||||||
|
|
||||||
@staticmethod
|
|
||||||
def validate_text(dialect, attr):
|
|
||||||
val = getattr(dialect, attr)
|
|
||||||
if not isinstance(val, text_type):
|
|
||||||
if type(val) == bytes:
|
|
||||||
raise Error('"{0}" must be string, not bytes'.format(attr))
|
|
||||||
raise Error('"{0}" must be string, not {1}'.format(
|
|
||||||
attr, type(val).__name__))
|
|
||||||
|
|
||||||
if len(val) != 1:
|
|
||||||
raise Error('"{0}" must be a 1-character string'.format(attr))
|
|
||||||
|
|
||||||
@staticmethod
|
|
||||||
def defaults():
|
|
||||||
return {
|
|
||||||
'delimiter': ',',
|
|
||||||
'doublequote': True,
|
|
||||||
'escapechar': None,
|
|
||||||
'lineterminator': '\r\n',
|
|
||||||
'quotechar': '"',
|
|
||||||
'quoting': QUOTE_MINIMAL,
|
|
||||||
'skipinitialspace': False,
|
|
||||||
'strict': False,
|
|
||||||
}
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def extend(cls, dialect, fmtparams=None):
|
|
||||||
if isinstance(dialect, string_types):
|
|
||||||
dialect = get_dialect(dialect)
|
|
||||||
|
|
||||||
if fmtparams is None:
|
|
||||||
return dialect
|
|
||||||
|
|
||||||
defaults = cls.defaults()
|
|
||||||
|
|
||||||
if any(param not in defaults for param in fmtparams):
|
|
||||||
raise TypeError('Invalid fmtparam')
|
|
||||||
|
|
||||||
specified = dict(
|
|
||||||
(attr, getattr(dialect, attr, None))
|
|
||||||
for attr in cls.defaults()
|
|
||||||
)
|
|
||||||
|
|
||||||
specified.update(fmtparams)
|
|
||||||
return type(str('ExtendedDialect'), (cls,), specified)
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def combine(cls, dialect, fmtparams):
|
|
||||||
"""Create a new dialect with defaults and added parameters."""
|
|
||||||
dialect = cls.extend(dialect, fmtparams)
|
|
||||||
defaults = cls.defaults()
|
|
||||||
specified = dict(
|
|
||||||
(attr, getattr(dialect, attr, None))
|
|
||||||
for attr in defaults
|
|
||||||
if getattr(dialect, attr, None) is not None or
|
|
||||||
attr in ['quotechar', 'delimiter', 'lineterminator', 'quoting']
|
|
||||||
)
|
|
||||||
|
|
||||||
defaults.update(specified)
|
|
||||||
dialect = type(str('CombinedDialect'), (cls,), defaults)
|
|
||||||
cls.validate(dialect)
|
|
||||||
return dialect()
|
|
||||||
|
|
||||||
def __delattr__(self, attr):
|
|
||||||
if self._valid:
|
|
||||||
raise AttributeError('dialect is immutable.')
|
|
||||||
super(Dialect, self).__delattr__(attr)
|
|
||||||
|
|
||||||
def __setattr__(self, attr, value):
|
|
||||||
if self._valid:
|
|
||||||
raise AttributeError('dialect is immutable.')
|
|
||||||
super(Dialect, self).__setattr__(attr, value)
|
|
||||||
|
|
||||||
|
|
||||||
class excel(Dialect):
|
|
||||||
"""Describe the usual properties of Excel-generated CSV files."""
|
|
||||||
delimiter = ','
|
|
||||||
quotechar = '"'
|
|
||||||
doublequote = True
|
|
||||||
skipinitialspace = False
|
|
||||||
lineterminator = '\r\n'
|
|
||||||
quoting = QUOTE_MINIMAL
|
|
||||||
register_dialect("excel", excel)
|
|
||||||
|
|
||||||
class excel_tab(excel):
|
|
||||||
"""Describe the usual properties of Excel-generated TAB-delimited files."""
|
|
||||||
delimiter = '\t'
|
|
||||||
register_dialect("excel-tab", excel_tab)
|
|
||||||
|
|
||||||
class unix_dialect(Dialect):
|
|
||||||
"""Describe the usual properties of Unix-generated CSV files."""
|
|
||||||
delimiter = ','
|
|
||||||
quotechar = '"'
|
|
||||||
doublequote = True
|
|
||||||
skipinitialspace = False
|
|
||||||
lineterminator = '\n'
|
|
||||||
quoting = QUOTE_ALL
|
|
||||||
register_dialect("unix", unix_dialect)
|
|
||||||
|
|
||||||
|
|
||||||
class DictReader(object):
|
|
||||||
def __init__(self, f, fieldnames=None, restkey=None, restval=None,
|
|
||||||
dialect="excel", *args, **kwds):
|
|
||||||
self._fieldnames = fieldnames # list of keys for the dict
|
|
||||||
self.restkey = restkey # key to catch long rows
|
|
||||||
self.restval = restval # default value for short rows
|
|
||||||
self.reader = reader(f, dialect, *args, **kwds)
|
|
||||||
self.dialect = dialect
|
|
||||||
self.line_num = 0
|
|
||||||
|
|
||||||
def __iter__(self):
|
|
||||||
return self
|
|
||||||
|
|
||||||
@property
|
|
||||||
def fieldnames(self):
|
|
||||||
if self._fieldnames is None:
|
|
||||||
try:
|
|
||||||
self._fieldnames = next(self.reader)
|
|
||||||
except StopIteration:
|
|
||||||
pass
|
|
||||||
self.line_num = self.reader.line_num
|
|
||||||
return self._fieldnames
|
|
||||||
|
|
||||||
@fieldnames.setter
|
|
||||||
def fieldnames(self, value):
|
|
||||||
self._fieldnames = value
|
|
||||||
|
|
||||||
def __next__(self):
|
|
||||||
if self.line_num == 0:
|
|
||||||
# Used only for its side effect.
|
|
||||||
self.fieldnames
|
|
||||||
row = next(self.reader)
|
|
||||||
self.line_num = self.reader.line_num
|
|
||||||
|
|
||||||
# unlike the basic reader, we prefer not to return blanks,
|
|
||||||
# because we will typically wind up with a dict full of None
|
|
||||||
# values
|
|
||||||
while row == []:
|
|
||||||
row = next(self.reader)
|
|
||||||
d = dict(zip(self.fieldnames, row))
|
|
||||||
lf = len(self.fieldnames)
|
|
||||||
lr = len(row)
|
|
||||||
if lf < lr:
|
|
||||||
d[self.restkey] = row[lf:]
|
|
||||||
elif lf > lr:
|
|
||||||
for key in self.fieldnames[lr:]:
|
|
||||||
d[key] = self.restval
|
|
||||||
return d
|
|
||||||
|
|
||||||
next = __next__
|
|
||||||
|
|
||||||
|
|
||||||
class DictWriter(object):
|
|
||||||
def __init__(self, f, fieldnames, restval="", extrasaction="raise",
|
|
||||||
dialect="excel", *args, **kwds):
|
|
||||||
self.fieldnames = fieldnames # list of keys for the dict
|
|
||||||
self.restval = restval # for writing short dicts
|
|
||||||
if extrasaction.lower() not in ("raise", "ignore"):
|
|
||||||
raise ValueError("extrasaction (%s) must be 'raise' or 'ignore'"
|
|
||||||
% extrasaction)
|
|
||||||
self.extrasaction = extrasaction
|
|
||||||
self.writer = writer(f, dialect, *args, **kwds)
|
|
||||||
|
|
||||||
def writeheader(self):
|
|
||||||
header = dict(zip(self.fieldnames, self.fieldnames))
|
|
||||||
self.writerow(header)
|
|
||||||
|
|
||||||
def _dict_to_list(self, rowdict):
|
|
||||||
if self.extrasaction == "raise":
|
|
||||||
wrong_fields = [k for k in rowdict if k not in self.fieldnames]
|
|
||||||
if wrong_fields:
|
|
||||||
raise ValueError("dict contains fields not in fieldnames: "
|
|
||||||
+ ", ".join([repr(x) for x in wrong_fields]))
|
|
||||||
return (rowdict.get(key, self.restval) for key in self.fieldnames)
|
|
||||||
|
|
||||||
def writerow(self, rowdict):
|
|
||||||
return self.writer.writerow(self._dict_to_list(rowdict))
|
|
||||||
|
|
||||||
def writerows(self, rowdicts):
|
|
||||||
return self.writer.writerows(map(self._dict_to_list, rowdicts))
|
|
||||||
|
|
||||||
# Guard Sniffer's type checking against builds that exclude complex()
|
|
||||||
try:
|
|
||||||
complex
|
|
||||||
except NameError:
|
|
||||||
complex = float
|
|
||||||
|
|
||||||
class Sniffer(object):
|
|
||||||
'''
|
|
||||||
"Sniffs" the format of a CSV file (i.e. delimiter, quotechar)
|
|
||||||
Returns a Dialect object.
|
|
||||||
'''
|
|
||||||
def __init__(self):
|
|
||||||
# in case there is more than one possible delimiter
|
|
||||||
self.preferred = [',', '\t', ';', ' ', ':']
|
|
||||||
|
|
||||||
|
|
||||||
def sniff(self, sample, delimiters=None):
|
|
||||||
"""
|
|
||||||
Returns a dialect (or None) corresponding to the sample
|
|
||||||
"""
|
|
||||||
|
|
||||||
quotechar, doublequote, delimiter, skipinitialspace = \
|
|
||||||
self._guess_quote_and_delimiter(sample, delimiters)
|
|
||||||
if not delimiter:
|
|
||||||
delimiter, skipinitialspace = self._guess_delimiter(sample,
|
|
||||||
delimiters)
|
|
||||||
|
|
||||||
if not delimiter:
|
|
||||||
raise Error("Could not determine delimiter")
|
|
||||||
|
|
||||||
class dialect(Dialect):
|
|
||||||
_name = "sniffed"
|
|
||||||
lineterminator = '\r\n'
|
|
||||||
quoting = QUOTE_MINIMAL
|
|
||||||
# escapechar = ''
|
|
||||||
|
|
||||||
dialect.doublequote = doublequote
|
|
||||||
dialect.delimiter = delimiter
|
|
||||||
# _csv.reader won't accept a quotechar of ''
|
|
||||||
dialect.quotechar = quotechar or '"'
|
|
||||||
dialect.skipinitialspace = skipinitialspace
|
|
||||||
|
|
||||||
return dialect
|
|
||||||
|
|
||||||
|
|
||||||
def _guess_quote_and_delimiter(self, data, delimiters):
|
|
||||||
"""
|
|
||||||
Looks for text enclosed between two identical quotes
|
|
||||||
(the probable quotechar) which are preceded and followed
|
|
||||||
by the same character (the probable delimiter).
|
|
||||||
For example:
|
|
||||||
,'some text',
|
|
||||||
The quote with the most wins, same with the delimiter.
|
|
||||||
If there is no quotechar the delimiter can't be determined
|
|
||||||
this way.
|
|
||||||
"""
|
|
||||||
|
|
||||||
matches = []
|
|
||||||
for restr in ('(?P<delim>[^\w\n"\'])(?P<space> ?)(?P<quote>["\']).*?(?P=quote)(?P=delim)', # ,".*?",
|
|
||||||
'(?:^|\n)(?P<quote>["\']).*?(?P=quote)(?P<delim>[^\w\n"\'])(?P<space> ?)', # ".*?",
|
|
||||||
'(?P<delim>>[^\w\n"\'])(?P<space> ?)(?P<quote>["\']).*?(?P=quote)(?:$|\n)', # ,".*?"
|
|
||||||
'(?:^|\n)(?P<quote>["\']).*?(?P=quote)(?:$|\n)'): # ".*?" (no delim, no space)
|
|
||||||
regexp = re.compile(restr, re.DOTALL | re.MULTILINE)
|
|
||||||
matches = regexp.findall(data)
|
|
||||||
if matches:
|
|
||||||
break
|
|
||||||
|
|
||||||
if not matches:
|
|
||||||
# (quotechar, doublequote, delimiter, skipinitialspace)
|
|
||||||
return ('', False, None, 0)
|
|
||||||
quotes = {}
|
|
||||||
delims = {}
|
|
||||||
spaces = 0
|
|
||||||
groupindex = regexp.groupindex
|
|
||||||
for m in matches:
|
|
||||||
n = groupindex['quote'] - 1
|
|
||||||
key = m[n]
|
|
||||||
if key:
|
|
||||||
quotes[key] = quotes.get(key, 0) + 1
|
|
||||||
try:
|
|
||||||
n = groupindex['delim'] - 1
|
|
||||||
key = m[n]
|
|
||||||
except KeyError:
|
|
||||||
continue
|
|
||||||
if key and (delimiters is None or key in delimiters):
|
|
||||||
delims[key] = delims.get(key, 0) + 1
|
|
||||||
try:
|
|
||||||
n = groupindex['space'] - 1
|
|
||||||
except KeyError:
|
|
||||||
continue
|
|
||||||
if m[n]:
|
|
||||||
spaces += 1
|
|
||||||
|
|
||||||
quotechar = max(quotes, key=quotes.get)
|
|
||||||
|
|
||||||
if delims:
|
|
||||||
delim = max(delims, key=delims.get)
|
|
||||||
skipinitialspace = delims[delim] == spaces
|
|
||||||
if delim == '\n': # most likely a file with a single column
|
|
||||||
delim = ''
|
|
||||||
else:
|
|
||||||
# there is *no* delimiter, it's a single column of quoted data
|
|
||||||
delim = ''
|
|
||||||
skipinitialspace = 0
|
|
||||||
|
|
||||||
# if we see an extra quote between delimiters, we've got a
|
|
||||||
# double quoted format
|
|
||||||
dq_regexp = re.compile(
|
|
||||||
r"((%(delim)s)|^)\W*%(quote)s[^%(delim)s\n]*%(quote)s[^%(delim)s\n]*%(quote)s\W*((%(delim)s)|$)" % \
|
|
||||||
{'delim':re.escape(delim), 'quote':quotechar}, re.MULTILINE)
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
if dq_regexp.search(data):
|
|
||||||
doublequote = True
|
|
||||||
else:
|
|
||||||
doublequote = False
|
|
||||||
|
|
||||||
return (quotechar, doublequote, delim, skipinitialspace)
|
|
||||||
|
|
||||||
|
|
||||||
def _guess_delimiter(self, data, delimiters):
|
|
||||||
"""
|
|
||||||
The delimiter /should/ occur the same number of times on
|
|
||||||
each row. However, due to malformed data, it may not. We don't want
|
|
||||||
an all or nothing approach, so we allow for small variations in this
|
|
||||||
number.
|
|
||||||
1) build a table of the frequency of each character on every line.
|
|
||||||
2) build a table of frequencies of this frequency (meta-frequency?),
|
|
||||||
e.g. 'x occurred 5 times in 10 rows, 6 times in 1000 rows,
|
|
||||||
7 times in 2 rows'
|
|
||||||
3) use the mode of the meta-frequency to determine the /expected/
|
|
||||||
frequency for that character
|
|
||||||
4) find out how often the character actually meets that goal
|
|
||||||
5) the character that best meets its goal is the delimiter
|
|
||||||
For performance reasons, the data is evaluated in chunks, so it can
|
|
||||||
try and evaluate the smallest portion of the data possible, evaluating
|
|
||||||
additional chunks as necessary.
|
|
||||||
"""
|
|
||||||
|
|
||||||
data = list(filter(None, data.split('\n')))
|
|
||||||
|
|
||||||
ascii = [unichr(c) for c in range(127)] # 7-bit ASCII
|
|
||||||
|
|
||||||
# build frequency tables
|
|
||||||
chunkLength = min(10, len(data))
|
|
||||||
iteration = 0
|
|
||||||
charFrequency = {}
|
|
||||||
modes = {}
|
|
||||||
delims = {}
|
|
||||||
start, end = 0, min(chunkLength, len(data))
|
|
||||||
while start < len(data):
|
|
||||||
iteration += 1
|
|
||||||
for line in data[start:end]:
|
|
||||||
for char in ascii:
|
|
||||||
metaFrequency = charFrequency.get(char, {})
|
|
||||||
# must count even if frequency is 0
|
|
||||||
freq = line.count(char)
|
|
||||||
# value is the mode
|
|
||||||
metaFrequency[freq] = metaFrequency.get(freq, 0) + 1
|
|
||||||
charFrequency[char] = metaFrequency
|
|
||||||
|
|
||||||
for char in charFrequency.keys():
|
|
||||||
items = list(charFrequency[char].items())
|
|
||||||
if len(items) == 1 and items[0][0] == 0:
|
|
||||||
continue
|
|
||||||
# get the mode of the frequencies
|
|
||||||
if len(items) > 1:
|
|
||||||
modes[char] = max(items, key=lambda x: x[1])
|
|
||||||
# adjust the mode - subtract the sum of all
|
|
||||||
# other frequencies
|
|
||||||
items.remove(modes[char])
|
|
||||||
modes[char] = (modes[char][0], modes[char][1]
|
|
||||||
- sum(item[1] for item in items))
|
|
||||||
else:
|
|
||||||
modes[char] = items[0]
|
|
||||||
|
|
||||||
# build a list of possible delimiters
|
|
||||||
modeList = modes.items()
|
|
||||||
total = float(chunkLength * iteration)
|
|
||||||
# (rows of consistent data) / (number of rows) = 100%
|
|
||||||
consistency = 1.0
|
|
||||||
# minimum consistency threshold
|
|
||||||
threshold = 0.9
|
|
||||||
while len(delims) == 0 and consistency >= threshold:
|
|
||||||
for k, v in modeList:
|
|
||||||
if v[0] > 0 and v[1] > 0:
|
|
||||||
if ((v[1]/total) >= consistency and
|
|
||||||
(delimiters is None or k in delimiters)):
|
|
||||||
delims[k] = v
|
|
||||||
consistency -= 0.01
|
|
||||||
|
|
||||||
if len(delims) == 1:
|
|
||||||
delim = list(delims.keys())[0]
|
|
||||||
skipinitialspace = (data[0].count(delim) ==
|
|
||||||
data[0].count("%c " % delim))
|
|
||||||
return (delim, skipinitialspace)
|
|
||||||
|
|
||||||
# analyze another chunkLength lines
|
|
||||||
start = end
|
|
||||||
end += chunkLength
|
|
||||||
|
|
||||||
if not delims:
|
|
||||||
return ('', 0)
|
|
||||||
|
|
||||||
# if there's more than one, fall back to a 'preferred' list
|
|
||||||
if len(delims) > 1:
|
|
||||||
for d in self.preferred:
|
|
||||||
if d in delims.keys():
|
|
||||||
skipinitialspace = (data[0].count(d) ==
|
|
||||||
data[0].count("%c " % d))
|
|
||||||
return (d, skipinitialspace)
|
|
||||||
|
|
||||||
# nothing else indicates a preference, pick the character that
|
|
||||||
# dominates(?)
|
|
||||||
items = [(v,k) for (k,v) in delims.items()]
|
|
||||||
items.sort()
|
|
||||||
delim = items[-1][1]
|
|
||||||
|
|
||||||
skipinitialspace = (data[0].count(delim) ==
|
|
||||||
data[0].count("%c " % delim))
|
|
||||||
return (delim, skipinitialspace)
|
|
||||||
|
|
||||||
|
|
||||||
def has_header(self, sample):
|
|
||||||
# Creates a dictionary of types of data in each column. If any
|
|
||||||
# column is of a single type (say, integers), *except* for the first
|
|
||||||
# row, then the first row is presumed to be labels. If the type
|
|
||||||
# can't be determined, it is assumed to be a string in which case
|
|
||||||
# the length of the string is the determining factor: if all of the
|
|
||||||
# rows except for the first are the same length, it's a header.
|
|
||||||
# Finally, a 'vote' is taken at the end for each column, adding or
|
|
||||||
# subtracting from the likelihood of the first row being a header.
|
|
||||||
|
|
||||||
rdr = reader(StringIO(sample), self.sniff(sample))
|
|
||||||
|
|
||||||
header = next(rdr) # assume first row is header
|
|
||||||
|
|
||||||
columns = len(header)
|
|
||||||
columnTypes = {}
|
|
||||||
for i in range(columns): columnTypes[i] = None
|
|
||||||
|
|
||||||
checked = 0
|
|
||||||
for row in rdr:
|
|
||||||
# arbitrary number of rows to check, to keep it sane
|
|
||||||
if checked > 20:
|
|
||||||
break
|
|
||||||
checked += 1
|
|
||||||
|
|
||||||
if len(row) != columns:
|
|
||||||
continue # skip rows that have irregular number of columns
|
|
||||||
|
|
||||||
for col in list(columnTypes.keys()):
|
|
||||||
|
|
||||||
for thisType in [int, float, complex]:
|
|
||||||
try:
|
|
||||||
thisType(row[col])
|
|
||||||
break
|
|
||||||
except (ValueError, OverflowError):
|
|
||||||
pass
|
|
||||||
else:
|
|
||||||
# fallback to length of string
|
|
||||||
thisType = len(row[col])
|
|
||||||
|
|
||||||
if thisType != columnTypes[col]:
|
|
||||||
if columnTypes[col] is None: # add new column type
|
|
||||||
columnTypes[col] = thisType
|
|
||||||
else:
|
|
||||||
# type is inconsistent, remove column from
|
|
||||||
# consideration
|
|
||||||
del columnTypes[col]
|
|
||||||
|
|
||||||
# finally, compare results against first row and "vote"
|
|
||||||
# on whether it's a header
|
|
||||||
hasHeader = 0
|
|
||||||
for col, colType in columnTypes.items():
|
|
||||||
if type(colType) == type(0): # it's a length
|
|
||||||
if len(header[col]) != colType:
|
|
||||||
hasHeader += 1
|
|
||||||
else:
|
|
||||||
hasHeader -= 1
|
|
||||||
else: # attempt typecast
|
|
||||||
try:
|
|
||||||
colType(header[col])
|
|
||||||
except (ValueError, TypeError):
|
|
||||||
hasHeader += 1
|
|
||||||
else:
|
|
||||||
hasHeader -= 1
|
|
||||||
|
|
||||||
return hasHeader > 0
|
|
|
@ -1,243 +0,0 @@
|
||||||
from __future__ import absolute_import
|
|
||||||
|
|
||||||
import functools
|
|
||||||
from collections import namedtuple
|
|
||||||
from threading import RLock
|
|
||||||
|
|
||||||
_CacheInfo = namedtuple("_CacheInfo", ["hits", "misses", "maxsize", "currsize"])
|
|
||||||
|
|
||||||
|
|
||||||
@functools.wraps(functools.update_wrapper)
|
|
||||||
def update_wrapper(
|
|
||||||
wrapper,
|
|
||||||
wrapped,
|
|
||||||
assigned=functools.WRAPPER_ASSIGNMENTS,
|
|
||||||
updated=functools.WRAPPER_UPDATES,
|
|
||||||
):
|
|
||||||
"""
|
|
||||||
Patch two bugs in functools.update_wrapper.
|
|
||||||
"""
|
|
||||||
# workaround for http://bugs.python.org/issue3445
|
|
||||||
assigned = tuple(attr for attr in assigned if hasattr(wrapped, attr))
|
|
||||||
wrapper = functools.update_wrapper(wrapper, wrapped, assigned, updated)
|
|
||||||
# workaround for https://bugs.python.org/issue17482
|
|
||||||
wrapper.__wrapped__ = wrapped
|
|
||||||
return wrapper
|
|
||||||
|
|
||||||
|
|
||||||
class _HashedSeq(list):
|
|
||||||
"""This class guarantees that hash() will be called no more than once
|
|
||||||
per element. This is important because the lru_cache() will hash
|
|
||||||
the key multiple times on a cache miss.
|
|
||||||
|
|
||||||
"""
|
|
||||||
|
|
||||||
__slots__ = 'hashvalue'
|
|
||||||
|
|
||||||
def __init__(self, tup, hash=hash):
|
|
||||||
self[:] = tup
|
|
||||||
self.hashvalue = hash(tup)
|
|
||||||
|
|
||||||
def __hash__(self):
|
|
||||||
return self.hashvalue
|
|
||||||
|
|
||||||
|
|
||||||
def _make_key(
|
|
||||||
args,
|
|
||||||
kwds,
|
|
||||||
typed,
|
|
||||||
kwd_mark=(object(),),
|
|
||||||
fasttypes={int, str},
|
|
||||||
tuple=tuple,
|
|
||||||
type=type,
|
|
||||||
len=len,
|
|
||||||
):
|
|
||||||
"""Make a cache key from optionally typed positional and keyword arguments
|
|
||||||
|
|
||||||
The key is constructed in a way that is flat as possible rather than
|
|
||||||
as a nested structure that would take more memory.
|
|
||||||
|
|
||||||
If there is only a single argument and its data type is known to cache
|
|
||||||
its hash value, then that argument is returned without a wrapper. This
|
|
||||||
saves space and improves lookup speed.
|
|
||||||
|
|
||||||
"""
|
|
||||||
# All of code below relies on kwds preserving the order input by the user.
|
|
||||||
# Formerly, we sorted() the kwds before looping. The new way is *much*
|
|
||||||
# faster; however, it means that f(x=1, y=2) will now be treated as a
|
|
||||||
# distinct call from f(y=2, x=1) which will be cached separately.
|
|
||||||
key = args
|
|
||||||
if kwds:
|
|
||||||
key += kwd_mark
|
|
||||||
for item in kwds.items():
|
|
||||||
key += item
|
|
||||||
if typed:
|
|
||||||
key += tuple(type(v) for v in args)
|
|
||||||
if kwds:
|
|
||||||
key += tuple(type(v) for v in kwds.values())
|
|
||||||
elif len(key) == 1 and type(key[0]) in fasttypes:
|
|
||||||
return key[0]
|
|
||||||
return _HashedSeq(key)
|
|
||||||
|
|
||||||
|
|
||||||
def lru_cache(maxsize=128, typed=False):
|
|
||||||
"""Least-recently-used cache decorator.
|
|
||||||
|
|
||||||
If *maxsize* is set to None, the LRU features are disabled and the cache
|
|
||||||
can grow without bound.
|
|
||||||
|
|
||||||
If *typed* is True, arguments of different types will be cached separately.
|
|
||||||
For example, f(decimal.Decimal("3.0")) and f(3.0) will be treated as
|
|
||||||
distinct calls with distinct results. Some types such as str and int may
|
|
||||||
be cached separately even when typed is false.
|
|
||||||
|
|
||||||
Arguments to the cached function must be hashable.
|
|
||||||
|
|
||||||
View the cache statistics named tuple (hits, misses, maxsize, currsize)
|
|
||||||
with f.cache_info(). Clear the cache and statistics with f.cache_clear().
|
|
||||||
Access the underlying function with f.__wrapped__.
|
|
||||||
|
|
||||||
See: https://en.wikipedia.org/wiki/Cache_replacement_policies#Least_recently_used_(LRU)
|
|
||||||
|
|
||||||
"""
|
|
||||||
|
|
||||||
# Users should only access the lru_cache through its public API:
|
|
||||||
# cache_info, cache_clear, and f.__wrapped__
|
|
||||||
# The internals of the lru_cache are encapsulated for thread safety and
|
|
||||||
# to allow the implementation to change (including a possible C version).
|
|
||||||
|
|
||||||
if isinstance(maxsize, int):
|
|
||||||
# Negative maxsize is treated as 0
|
|
||||||
if maxsize < 0:
|
|
||||||
maxsize = 0
|
|
||||||
elif callable(maxsize) and isinstance(typed, bool):
|
|
||||||
# The user_function was passed in directly via the maxsize argument
|
|
||||||
user_function, maxsize = maxsize, 128
|
|
||||||
wrapper = _lru_cache_wrapper(user_function, maxsize, typed, _CacheInfo)
|
|
||||||
wrapper.cache_parameters = lambda: {'maxsize': maxsize, 'typed': typed}
|
|
||||||
return update_wrapper(wrapper, user_function)
|
|
||||||
elif maxsize is not None:
|
|
||||||
raise TypeError('Expected first argument to be an integer, a callable, or None')
|
|
||||||
|
|
||||||
def decorating_function(user_function):
|
|
||||||
wrapper = _lru_cache_wrapper(user_function, maxsize, typed, _CacheInfo)
|
|
||||||
wrapper.cache_parameters = lambda: {'maxsize': maxsize, 'typed': typed}
|
|
||||||
return update_wrapper(wrapper, user_function)
|
|
||||||
|
|
||||||
return decorating_function
|
|
||||||
|
|
||||||
|
|
||||||
def _lru_cache_wrapper(user_function, maxsize, typed, _CacheInfo):
|
|
||||||
# Constants shared by all lru cache instances:
|
|
||||||
sentinel = object() # unique object used to signal cache misses
|
|
||||||
make_key = _make_key # build a key from the function arguments
|
|
||||||
PREV, NEXT, KEY, RESULT = 0, 1, 2, 3 # names for the link fields
|
|
||||||
|
|
||||||
cache = {}
|
|
||||||
hits = misses = 0
|
|
||||||
full = False
|
|
||||||
cache_get = cache.get # bound method to lookup a key or return None
|
|
||||||
cache_len = cache.__len__ # get cache size without calling len()
|
|
||||||
lock = RLock() # because linkedlist updates aren't threadsafe
|
|
||||||
root = [] # root of the circular doubly linked list
|
|
||||||
root[:] = [root, root, None, None] # initialize by pointing to self
|
|
||||||
|
|
||||||
if maxsize == 0:
|
|
||||||
|
|
||||||
def wrapper(*args, **kwds):
|
|
||||||
# No caching -- just a statistics update
|
|
||||||
nonlocal misses
|
|
||||||
misses += 1
|
|
||||||
result = user_function(*args, **kwds)
|
|
||||||
return result
|
|
||||||
|
|
||||||
elif maxsize is None:
|
|
||||||
|
|
||||||
def wrapper(*args, **kwds):
|
|
||||||
# Simple caching without ordering or size limit
|
|
||||||
nonlocal hits, misses
|
|
||||||
key = make_key(args, kwds, typed)
|
|
||||||
result = cache_get(key, sentinel)
|
|
||||||
if result is not sentinel:
|
|
||||||
hits += 1
|
|
||||||
return result
|
|
||||||
misses += 1
|
|
||||||
result = user_function(*args, **kwds)
|
|
||||||
cache[key] = result
|
|
||||||
return result
|
|
||||||
|
|
||||||
else:
|
|
||||||
|
|
||||||
def wrapper(*args, **kwds):
|
|
||||||
# Size limited caching that tracks accesses by recency
|
|
||||||
nonlocal root, hits, misses, full
|
|
||||||
key = make_key(args, kwds, typed)
|
|
||||||
with lock:
|
|
||||||
link = cache_get(key)
|
|
||||||
if link is not None:
|
|
||||||
# Move the link to the front of the circular queue
|
|
||||||
link_prev, link_next, _key, result = link
|
|
||||||
link_prev[NEXT] = link_next
|
|
||||||
link_next[PREV] = link_prev
|
|
||||||
last = root[PREV]
|
|
||||||
last[NEXT] = root[PREV] = link
|
|
||||||
link[PREV] = last
|
|
||||||
link[NEXT] = root
|
|
||||||
hits += 1
|
|
||||||
return result
|
|
||||||
misses += 1
|
|
||||||
result = user_function(*args, **kwds)
|
|
||||||
with lock:
|
|
||||||
if key in cache:
|
|
||||||
# Getting here means that this same key was added to the
|
|
||||||
# cache while the lock was released. Since the link
|
|
||||||
# update is already done, we need only return the
|
|
||||||
# computed result and update the count of misses.
|
|
||||||
pass
|
|
||||||
elif full:
|
|
||||||
# Use the old root to store the new key and result.
|
|
||||||
oldroot = root
|
|
||||||
oldroot[KEY] = key
|
|
||||||
oldroot[RESULT] = result
|
|
||||||
# Empty the oldest link and make it the new root.
|
|
||||||
# Keep a reference to the old key and old result to
|
|
||||||
# prevent their ref counts from going to zero during the
|
|
||||||
# update. That will prevent potentially arbitrary object
|
|
||||||
# clean-up code (i.e. __del__) from running while we're
|
|
||||||
# still adjusting the links.
|
|
||||||
root = oldroot[NEXT]
|
|
||||||
oldkey = root[KEY]
|
|
||||||
root[KEY] = root[RESULT] = None
|
|
||||||
# Now update the cache dictionary.
|
|
||||||
del cache[oldkey]
|
|
||||||
# Save the potentially reentrant cache[key] assignment
|
|
||||||
# for last, after the root and links have been put in
|
|
||||||
# a consistent state.
|
|
||||||
cache[key] = oldroot
|
|
||||||
else:
|
|
||||||
# Put result in a new link at the front of the queue.
|
|
||||||
last = root[PREV]
|
|
||||||
link = [last, root, key, result]
|
|
||||||
last[NEXT] = root[PREV] = cache[key] = link
|
|
||||||
# Use the cache_len bound method instead of the len() function
|
|
||||||
# which could potentially be wrapped in an lru_cache itself.
|
|
||||||
full = cache_len() >= maxsize
|
|
||||||
return result
|
|
||||||
|
|
||||||
def cache_info():
|
|
||||||
"""Report cache statistics"""
|
|
||||||
with lock:
|
|
||||||
return _CacheInfo(hits, misses, maxsize, cache_len())
|
|
||||||
|
|
||||||
def cache_clear():
|
|
||||||
"""Clear the cache and cache statistics"""
|
|
||||||
nonlocal hits, misses, full
|
|
||||||
with lock:
|
|
||||||
cache.clear()
|
|
||||||
root[:] = [root, root, None, None]
|
|
||||||
hits = misses = 0
|
|
||||||
full = False
|
|
||||||
|
|
||||||
wrapper.cache_info = cache_info
|
|
||||||
wrapper.cache_clear = cache_clear
|
|
||||||
return wrapper
|
|
2937
lib/backports/tarfile/__init__.py
Normal file
2937
lib/backports/tarfile/__init__.py
Normal file
File diff suppressed because it is too large
Load diff
5
lib/backports/tarfile/__main__.py
Normal file
5
lib/backports/tarfile/__main__.py
Normal file
|
@ -0,0 +1,5 @@
|
||||||
|
from . import main
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == '__main__':
|
||||||
|
main()
|
24
lib/backports/tarfile/compat/py38.py
Normal file
24
lib/backports/tarfile/compat/py38.py
Normal file
|
@ -0,0 +1,24 @@
|
||||||
|
import sys
|
||||||
|
|
||||||
|
|
||||||
|
if sys.version_info < (3, 9):
|
||||||
|
|
||||||
|
def removesuffix(self, suffix):
|
||||||
|
# suffix='' should not call self[:-0].
|
||||||
|
if suffix and self.endswith(suffix):
|
||||||
|
return self[: -len(suffix)]
|
||||||
|
else:
|
||||||
|
return self[:]
|
||||||
|
|
||||||
|
def removeprefix(self, prefix):
|
||||||
|
if self.startswith(prefix):
|
||||||
|
return self[len(prefix) :]
|
||||||
|
else:
|
||||||
|
return self[:]
|
||||||
|
else:
|
||||||
|
|
||||||
|
def removesuffix(self, suffix):
|
||||||
|
return self.removesuffix(suffix)
|
||||||
|
|
||||||
|
def removeprefix(self, prefix):
|
||||||
|
return self.removeprefix(prefix)
|
|
@ -1,49 +0,0 @@
|
||||||
__all__ = [
|
|
||||||
"ZoneInfo",
|
|
||||||
"reset_tzpath",
|
|
||||||
"available_timezones",
|
|
||||||
"TZPATH",
|
|
||||||
"ZoneInfoNotFoundError",
|
|
||||||
"InvalidTZPathWarning",
|
|
||||||
]
|
|
||||||
import sys
|
|
||||||
|
|
||||||
from . import _tzpath
|
|
||||||
from ._common import ZoneInfoNotFoundError
|
|
||||||
from ._version import __version__
|
|
||||||
|
|
||||||
try:
|
|
||||||
from ._czoneinfo import ZoneInfo
|
|
||||||
except ImportError: # pragma: nocover
|
|
||||||
from ._zoneinfo import ZoneInfo
|
|
||||||
|
|
||||||
reset_tzpath = _tzpath.reset_tzpath
|
|
||||||
available_timezones = _tzpath.available_timezones
|
|
||||||
InvalidTZPathWarning = _tzpath.InvalidTZPathWarning
|
|
||||||
|
|
||||||
if sys.version_info < (3, 7):
|
|
||||||
# Module-level __getattr__ was added in Python 3.7, so instead of lazily
|
|
||||||
# populating TZPATH on every access, we will register a callback with
|
|
||||||
# reset_tzpath to update the top-level tuple.
|
|
||||||
TZPATH = _tzpath.TZPATH
|
|
||||||
|
|
||||||
def _tzpath_callback(new_tzpath):
|
|
||||||
global TZPATH
|
|
||||||
TZPATH = new_tzpath
|
|
||||||
|
|
||||||
_tzpath.TZPATH_CALLBACKS.append(_tzpath_callback)
|
|
||||||
del _tzpath_callback
|
|
||||||
|
|
||||||
else:
|
|
||||||
|
|
||||||
def __getattr__(name):
|
|
||||||
if name == "TZPATH":
|
|
||||||
return _tzpath.TZPATH
|
|
||||||
else:
|
|
||||||
raise AttributeError(
|
|
||||||
f"module {__name__!r} has no attribute {name!r}"
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
def __dir__():
|
|
||||||
return sorted(list(globals()) + ["TZPATH"])
|
|
|
@ -1,45 +0,0 @@
|
||||||
import os
|
|
||||||
import typing
|
|
||||||
from datetime import datetime, tzinfo
|
|
||||||
from typing import (
|
|
||||||
Any,
|
|
||||||
Iterable,
|
|
||||||
Optional,
|
|
||||||
Protocol,
|
|
||||||
Sequence,
|
|
||||||
Set,
|
|
||||||
Type,
|
|
||||||
Union,
|
|
||||||
)
|
|
||||||
|
|
||||||
_T = typing.TypeVar("_T", bound="ZoneInfo")
|
|
||||||
|
|
||||||
class _IOBytes(Protocol):
|
|
||||||
def read(self, __size: int) -> bytes: ...
|
|
||||||
def seek(self, __size: int, __whence: int = ...) -> Any: ...
|
|
||||||
|
|
||||||
class ZoneInfo(tzinfo):
|
|
||||||
@property
|
|
||||||
def key(self) -> str: ...
|
|
||||||
def __init__(self, key: str) -> None: ...
|
|
||||||
@classmethod
|
|
||||||
def no_cache(cls: Type[_T], key: str) -> _T: ...
|
|
||||||
@classmethod
|
|
||||||
def from_file(
|
|
||||||
cls: Type[_T], __fobj: _IOBytes, key: Optional[str] = ...
|
|
||||||
) -> _T: ...
|
|
||||||
@classmethod
|
|
||||||
def clear_cache(cls, *, only_keys: Iterable[str] = ...) -> None: ...
|
|
||||||
|
|
||||||
# Note: Both here and in clear_cache, the types allow the use of `str` where
|
|
||||||
# a sequence of strings is required. This should be remedied if a solution
|
|
||||||
# to this typing bug is found: https://github.com/python/typing/issues/256
|
|
||||||
def reset_tzpath(
|
|
||||||
to: Optional[Sequence[Union[os.PathLike, str]]] = ...
|
|
||||||
) -> None: ...
|
|
||||||
def available_timezones() -> Set[str]: ...
|
|
||||||
|
|
||||||
TZPATH: Sequence[str]
|
|
||||||
|
|
||||||
class ZoneInfoNotFoundError(KeyError): ...
|
|
||||||
class InvalidTZPathWarning(RuntimeWarning): ...
|
|
|
@ -1,171 +0,0 @@
|
||||||
import struct
|
|
||||||
|
|
||||||
|
|
||||||
def load_tzdata(key):
|
|
||||||
try:
|
|
||||||
import importlib.resources as importlib_resources
|
|
||||||
except ImportError:
|
|
||||||
import importlib_resources
|
|
||||||
|
|
||||||
components = key.split("/")
|
|
||||||
package_name = ".".join(["tzdata.zoneinfo"] + components[:-1])
|
|
||||||
resource_name = components[-1]
|
|
||||||
|
|
||||||
try:
|
|
||||||
return importlib_resources.open_binary(package_name, resource_name)
|
|
||||||
except (ImportError, FileNotFoundError, UnicodeEncodeError):
|
|
||||||
# There are three types of exception that can be raised that all amount
|
|
||||||
# to "we cannot find this key":
|
|
||||||
#
|
|
||||||
# ImportError: If package_name doesn't exist (e.g. if tzdata is not
|
|
||||||
# installed, or if there's an error in the folder name like
|
|
||||||
# Amrica/New_York)
|
|
||||||
# FileNotFoundError: If resource_name doesn't exist in the package
|
|
||||||
# (e.g. Europe/Krasnoy)
|
|
||||||
# UnicodeEncodeError: If package_name or resource_name are not UTF-8,
|
|
||||||
# such as keys containing a surrogate character.
|
|
||||||
raise ZoneInfoNotFoundError(f"No time zone found with key {key}")
|
|
||||||
|
|
||||||
|
|
||||||
def load_data(fobj):
|
|
||||||
header = _TZifHeader.from_file(fobj)
|
|
||||||
|
|
||||||
if header.version == 1:
|
|
||||||
time_size = 4
|
|
||||||
time_type = "l"
|
|
||||||
else:
|
|
||||||
# Version 2+ has 64-bit integer transition times
|
|
||||||
time_size = 8
|
|
||||||
time_type = "q"
|
|
||||||
|
|
||||||
# Version 2+ also starts with a Version 1 header and data, which
|
|
||||||
# we need to skip now
|
|
||||||
skip_bytes = (
|
|
||||||
header.timecnt * 5 # Transition times and types
|
|
||||||
+ header.typecnt * 6 # Local time type records
|
|
||||||
+ header.charcnt # Time zone designations
|
|
||||||
+ header.leapcnt * 8 # Leap second records
|
|
||||||
+ header.isstdcnt # Standard/wall indicators
|
|
||||||
+ header.isutcnt # UT/local indicators
|
|
||||||
)
|
|
||||||
|
|
||||||
fobj.seek(skip_bytes, 1)
|
|
||||||
|
|
||||||
# Now we need to read the second header, which is not the same
|
|
||||||
# as the first
|
|
||||||
header = _TZifHeader.from_file(fobj)
|
|
||||||
|
|
||||||
typecnt = header.typecnt
|
|
||||||
timecnt = header.timecnt
|
|
||||||
charcnt = header.charcnt
|
|
||||||
|
|
||||||
# The data portion starts with timecnt transitions and indices
|
|
||||||
if timecnt:
|
|
||||||
trans_list_utc = struct.unpack(
|
|
||||||
f">{timecnt}{time_type}", fobj.read(timecnt * time_size)
|
|
||||||
)
|
|
||||||
trans_idx = struct.unpack(f">{timecnt}B", fobj.read(timecnt))
|
|
||||||
else:
|
|
||||||
trans_list_utc = ()
|
|
||||||
trans_idx = ()
|
|
||||||
|
|
||||||
# Read the ttinfo struct, (utoff, isdst, abbrind)
|
|
||||||
if typecnt:
|
|
||||||
utcoff, isdst, abbrind = zip(
|
|
||||||
*(struct.unpack(">lbb", fobj.read(6)) for i in range(typecnt))
|
|
||||||
)
|
|
||||||
else:
|
|
||||||
utcoff = ()
|
|
||||||
isdst = ()
|
|
||||||
abbrind = ()
|
|
||||||
|
|
||||||
# Now read the abbreviations. They are null-terminated strings, indexed
|
|
||||||
# not by position in the array but by position in the unsplit
|
|
||||||
# abbreviation string. I suppose this makes more sense in C, which uses
|
|
||||||
# null to terminate the strings, but it's inconvenient here...
|
|
||||||
abbr_vals = {}
|
|
||||||
abbr_chars = fobj.read(charcnt)
|
|
||||||
|
|
||||||
def get_abbr(idx):
|
|
||||||
# Gets a string starting at idx and running until the next \x00
|
|
||||||
#
|
|
||||||
# We cannot pre-populate abbr_vals by splitting on \x00 because there
|
|
||||||
# are some zones that use subsets of longer abbreviations, like so:
|
|
||||||
#
|
|
||||||
# LMT\x00AHST\x00HDT\x00
|
|
||||||
#
|
|
||||||
# Where the idx to abbr mapping should be:
|
|
||||||
#
|
|
||||||
# {0: "LMT", 4: "AHST", 5: "HST", 9: "HDT"}
|
|
||||||
if idx not in abbr_vals:
|
|
||||||
span_end = abbr_chars.find(b"\x00", idx)
|
|
||||||
abbr_vals[idx] = abbr_chars[idx:span_end].decode()
|
|
||||||
|
|
||||||
return abbr_vals[idx]
|
|
||||||
|
|
||||||
abbr = tuple(get_abbr(idx) for idx in abbrind)
|
|
||||||
|
|
||||||
# The remainder of the file consists of leap seconds (currently unused) and
|
|
||||||
# the standard/wall and ut/local indicators, which are metadata we don't need.
|
|
||||||
# In version 2 files, we need to skip the unnecessary data to get at the TZ string:
|
|
||||||
if header.version >= 2:
|
|
||||||
# Each leap second record has size (time_size + 4)
|
|
||||||
skip_bytes = header.isutcnt + header.isstdcnt + header.leapcnt * 12
|
|
||||||
fobj.seek(skip_bytes, 1)
|
|
||||||
|
|
||||||
c = fobj.read(1) # Should be \n
|
|
||||||
assert c == b"\n", c
|
|
||||||
|
|
||||||
tz_bytes = b""
|
|
||||||
while True:
|
|
||||||
c = fobj.read(1)
|
|
||||||
if c == b"\n":
|
|
||||||
break
|
|
||||||
tz_bytes += c
|
|
||||||
|
|
||||||
tz_str = tz_bytes
|
|
||||||
else:
|
|
||||||
tz_str = None
|
|
||||||
|
|
||||||
return trans_idx, trans_list_utc, utcoff, isdst, abbr, tz_str
|
|
||||||
|
|
||||||
|
|
||||||
class _TZifHeader:
|
|
||||||
__slots__ = [
|
|
||||||
"version",
|
|
||||||
"isutcnt",
|
|
||||||
"isstdcnt",
|
|
||||||
"leapcnt",
|
|
||||||
"timecnt",
|
|
||||||
"typecnt",
|
|
||||||
"charcnt",
|
|
||||||
]
|
|
||||||
|
|
||||||
def __init__(self, *args):
|
|
||||||
assert len(self.__slots__) == len(args)
|
|
||||||
for attr, val in zip(self.__slots__, args):
|
|
||||||
setattr(self, attr, val)
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def from_file(cls, stream):
|
|
||||||
# The header starts with a 4-byte "magic" value
|
|
||||||
if stream.read(4) != b"TZif":
|
|
||||||
raise ValueError("Invalid TZif file: magic not found")
|
|
||||||
|
|
||||||
_version = stream.read(1)
|
|
||||||
if _version == b"\x00":
|
|
||||||
version = 1
|
|
||||||
else:
|
|
||||||
version = int(_version)
|
|
||||||
stream.read(15)
|
|
||||||
|
|
||||||
args = (version,)
|
|
||||||
|
|
||||||
# Slots are defined in the order that the bytes are arranged
|
|
||||||
args = args + struct.unpack(">6l", stream.read(24))
|
|
||||||
|
|
||||||
return cls(*args)
|
|
||||||
|
|
||||||
|
|
||||||
class ZoneInfoNotFoundError(KeyError):
|
|
||||||
"""Exception raised when a ZoneInfo key is not found."""
|
|
|
@ -1,207 +0,0 @@
|
||||||
import os
|
|
||||||
import sys
|
|
||||||
|
|
||||||
PY36 = sys.version_info < (3, 7)
|
|
||||||
|
|
||||||
|
|
||||||
def reset_tzpath(to=None):
|
|
||||||
global TZPATH
|
|
||||||
|
|
||||||
tzpaths = to
|
|
||||||
if tzpaths is not None:
|
|
||||||
if isinstance(tzpaths, (str, bytes)):
|
|
||||||
raise TypeError(
|
|
||||||
f"tzpaths must be a list or tuple, "
|
|
||||||
+ f"not {type(tzpaths)}: {tzpaths!r}"
|
|
||||||
)
|
|
||||||
|
|
||||||
if not all(map(os.path.isabs, tzpaths)):
|
|
||||||
raise ValueError(_get_invalid_paths_message(tzpaths))
|
|
||||||
base_tzpath = tzpaths
|
|
||||||
else:
|
|
||||||
env_var = os.environ.get("PYTHONTZPATH", None)
|
|
||||||
if env_var is not None:
|
|
||||||
base_tzpath = _parse_python_tzpath(env_var)
|
|
||||||
elif sys.platform != "win32":
|
|
||||||
base_tzpath = [
|
|
||||||
"/usr/share/zoneinfo",
|
|
||||||
"/usr/lib/zoneinfo",
|
|
||||||
"/usr/share/lib/zoneinfo",
|
|
||||||
"/etc/zoneinfo",
|
|
||||||
]
|
|
||||||
|
|
||||||
base_tzpath.sort(key=lambda x: not os.path.exists(x))
|
|
||||||
else:
|
|
||||||
base_tzpath = ()
|
|
||||||
|
|
||||||
TZPATH = tuple(base_tzpath)
|
|
||||||
|
|
||||||
if TZPATH_CALLBACKS:
|
|
||||||
for callback in TZPATH_CALLBACKS:
|
|
||||||
callback(TZPATH)
|
|
||||||
|
|
||||||
|
|
||||||
def _parse_python_tzpath(env_var):
|
|
||||||
if not env_var:
|
|
||||||
return ()
|
|
||||||
|
|
||||||
raw_tzpath = env_var.split(os.pathsep)
|
|
||||||
new_tzpath = tuple(filter(os.path.isabs, raw_tzpath))
|
|
||||||
|
|
||||||
# If anything has been filtered out, we will warn about it
|
|
||||||
if len(new_tzpath) != len(raw_tzpath):
|
|
||||||
import warnings
|
|
||||||
|
|
||||||
msg = _get_invalid_paths_message(raw_tzpath)
|
|
||||||
|
|
||||||
warnings.warn(
|
|
||||||
"Invalid paths specified in PYTHONTZPATH environment variable."
|
|
||||||
+ msg,
|
|
||||||
InvalidTZPathWarning,
|
|
||||||
)
|
|
||||||
|
|
||||||
return new_tzpath
|
|
||||||
|
|
||||||
|
|
||||||
def _get_invalid_paths_message(tzpaths):
|
|
||||||
invalid_paths = (path for path in tzpaths if not os.path.isabs(path))
|
|
||||||
|
|
||||||
prefix = "\n "
|
|
||||||
indented_str = prefix + prefix.join(invalid_paths)
|
|
||||||
|
|
||||||
return (
|
|
||||||
"Paths should be absolute but found the following relative paths:"
|
|
||||||
+ indented_str
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
if sys.version_info < (3, 8):
|
|
||||||
|
|
||||||
def _isfile(path):
|
|
||||||
# bpo-33721: In Python 3.8 non-UTF8 paths return False rather than
|
|
||||||
# raising an error. See https://bugs.python.org/issue33721
|
|
||||||
try:
|
|
||||||
return os.path.isfile(path)
|
|
||||||
except ValueError:
|
|
||||||
return False
|
|
||||||
|
|
||||||
|
|
||||||
else:
|
|
||||||
_isfile = os.path.isfile
|
|
||||||
|
|
||||||
|
|
||||||
def find_tzfile(key):
|
|
||||||
"""Retrieve the path to a TZif file from a key."""
|
|
||||||
_validate_tzfile_path(key)
|
|
||||||
for search_path in TZPATH:
|
|
||||||
filepath = os.path.join(search_path, key)
|
|
||||||
if _isfile(filepath):
|
|
||||||
return filepath
|
|
||||||
|
|
||||||
return None
|
|
||||||
|
|
||||||
|
|
||||||
_TEST_PATH = os.path.normpath(os.path.join("_", "_"))[:-1]
|
|
||||||
|
|
||||||
|
|
||||||
def _validate_tzfile_path(path, _base=_TEST_PATH):
|
|
||||||
if os.path.isabs(path):
|
|
||||||
raise ValueError(
|
|
||||||
f"ZoneInfo keys may not be absolute paths, got: {path}"
|
|
||||||
)
|
|
||||||
|
|
||||||
# We only care about the kinds of path normalizations that would change the
|
|
||||||
# length of the key - e.g. a/../b -> a/b, or a/b/ -> a/b. On Windows,
|
|
||||||
# normpath will also change from a/b to a\b, but that would still preserve
|
|
||||||
# the length.
|
|
||||||
new_path = os.path.normpath(path)
|
|
||||||
if len(new_path) != len(path):
|
|
||||||
raise ValueError(
|
|
||||||
f"ZoneInfo keys must be normalized relative paths, got: {path}"
|
|
||||||
)
|
|
||||||
|
|
||||||
resolved = os.path.normpath(os.path.join(_base, new_path))
|
|
||||||
if not resolved.startswith(_base):
|
|
||||||
raise ValueError(
|
|
||||||
f"ZoneInfo keys must refer to subdirectories of TZPATH, got: {path}"
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
del _TEST_PATH
|
|
||||||
|
|
||||||
|
|
||||||
def available_timezones():
|
|
||||||
"""Returns a set containing all available time zones.
|
|
||||||
|
|
||||||
.. caution::
|
|
||||||
|
|
||||||
This may attempt to open a large number of files, since the best way to
|
|
||||||
determine if a given file on the time zone search path is to open it
|
|
||||||
and check for the "magic string" at the beginning.
|
|
||||||
"""
|
|
||||||
try:
|
|
||||||
from importlib import resources
|
|
||||||
except ImportError:
|
|
||||||
import importlib_resources as resources
|
|
||||||
|
|
||||||
valid_zones = set()
|
|
||||||
|
|
||||||
# Start with loading from the tzdata package if it exists: this has a
|
|
||||||
# pre-assembled list of zones that only requires opening one file.
|
|
||||||
try:
|
|
||||||
with resources.open_text("tzdata", "zones") as f:
|
|
||||||
for zone in f:
|
|
||||||
zone = zone.strip()
|
|
||||||
if zone:
|
|
||||||
valid_zones.add(zone)
|
|
||||||
except (ImportError, FileNotFoundError):
|
|
||||||
pass
|
|
||||||
|
|
||||||
def valid_key(fpath):
|
|
||||||
try:
|
|
||||||
with open(fpath, "rb") as f:
|
|
||||||
return f.read(4) == b"TZif"
|
|
||||||
except Exception: # pragma: nocover
|
|
||||||
return False
|
|
||||||
|
|
||||||
for tz_root in TZPATH:
|
|
||||||
if not os.path.exists(tz_root):
|
|
||||||
continue
|
|
||||||
|
|
||||||
for root, dirnames, files in os.walk(tz_root):
|
|
||||||
if root == tz_root:
|
|
||||||
# right/ and posix/ are special directories and shouldn't be
|
|
||||||
# included in the output of available zones
|
|
||||||
if "right" in dirnames:
|
|
||||||
dirnames.remove("right")
|
|
||||||
if "posix" in dirnames:
|
|
||||||
dirnames.remove("posix")
|
|
||||||
|
|
||||||
for file in files:
|
|
||||||
fpath = os.path.join(root, file)
|
|
||||||
|
|
||||||
key = os.path.relpath(fpath, start=tz_root)
|
|
||||||
if os.sep != "/": # pragma: nocover
|
|
||||||
key = key.replace(os.sep, "/")
|
|
||||||
|
|
||||||
if not key or key in valid_zones:
|
|
||||||
continue
|
|
||||||
|
|
||||||
if valid_key(fpath):
|
|
||||||
valid_zones.add(key)
|
|
||||||
|
|
||||||
if "posixrules" in valid_zones:
|
|
||||||
# posixrules is a special symlink-only time zone where it exists, it
|
|
||||||
# should not be included in the output
|
|
||||||
valid_zones.remove("posixrules")
|
|
||||||
|
|
||||||
return valid_zones
|
|
||||||
|
|
||||||
|
|
||||||
class InvalidTZPathWarning(RuntimeWarning):
|
|
||||||
"""Warning raised if an invalid path is specified in PYTHONTZPATH."""
|
|
||||||
|
|
||||||
|
|
||||||
TZPATH = ()
|
|
||||||
TZPATH_CALLBACKS = []
|
|
||||||
reset_tzpath()
|
|
|
@ -1 +0,0 @@
|
||||||
__version__ = "0.2.1"
|
|
|
@ -1,754 +0,0 @@
|
||||||
import bisect
|
|
||||||
import calendar
|
|
||||||
import collections
|
|
||||||
import functools
|
|
||||||
import re
|
|
||||||
import weakref
|
|
||||||
from datetime import datetime, timedelta, tzinfo
|
|
||||||
|
|
||||||
from . import _common, _tzpath
|
|
||||||
|
|
||||||
EPOCH = datetime(1970, 1, 1)
|
|
||||||
EPOCHORDINAL = datetime(1970, 1, 1).toordinal()
|
|
||||||
|
|
||||||
# It is relatively expensive to construct new timedelta objects, and in most
|
|
||||||
# cases we're looking at the same deltas, like integer numbers of hours, etc.
|
|
||||||
# To improve speed and memory use, we'll keep a dictionary with references
|
|
||||||
# to the ones we've already used so far.
|
|
||||||
#
|
|
||||||
# Loading every time zone in the 2020a version of the time zone database
|
|
||||||
# requires 447 timedeltas, which requires approximately the amount of space
|
|
||||||
# that ZoneInfo("America/New_York") with 236 transitions takes up, so we will
|
|
||||||
# set the cache size to 512 so that in the common case we always get cache
|
|
||||||
# hits, but specifically crafted ZoneInfo objects don't leak arbitrary amounts
|
|
||||||
# of memory.
|
|
||||||
@functools.lru_cache(maxsize=512)
|
|
||||||
def _load_timedelta(seconds):
|
|
||||||
return timedelta(seconds=seconds)
|
|
||||||
|
|
||||||
|
|
||||||
class ZoneInfo(tzinfo):
|
|
||||||
_strong_cache_size = 8
|
|
||||||
_strong_cache = collections.OrderedDict()
|
|
||||||
_weak_cache = weakref.WeakValueDictionary()
|
|
||||||
__module__ = "backports.zoneinfo"
|
|
||||||
|
|
||||||
def __init_subclass__(cls):
|
|
||||||
cls._strong_cache = collections.OrderedDict()
|
|
||||||
cls._weak_cache = weakref.WeakValueDictionary()
|
|
||||||
|
|
||||||
def __new__(cls, key):
|
|
||||||
instance = cls._weak_cache.get(key, None)
|
|
||||||
if instance is None:
|
|
||||||
instance = cls._weak_cache.setdefault(key, cls._new_instance(key))
|
|
||||||
instance._from_cache = True
|
|
||||||
|
|
||||||
# Update the "strong" cache
|
|
||||||
cls._strong_cache[key] = cls._strong_cache.pop(key, instance)
|
|
||||||
|
|
||||||
if len(cls._strong_cache) > cls._strong_cache_size:
|
|
||||||
cls._strong_cache.popitem(last=False)
|
|
||||||
|
|
||||||
return instance
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def no_cache(cls, key):
|
|
||||||
obj = cls._new_instance(key)
|
|
||||||
obj._from_cache = False
|
|
||||||
|
|
||||||
return obj
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def _new_instance(cls, key):
|
|
||||||
obj = super().__new__(cls)
|
|
||||||
obj._key = key
|
|
||||||
obj._file_path = obj._find_tzfile(key)
|
|
||||||
|
|
||||||
if obj._file_path is not None:
|
|
||||||
file_obj = open(obj._file_path, "rb")
|
|
||||||
else:
|
|
||||||
file_obj = _common.load_tzdata(key)
|
|
||||||
|
|
||||||
with file_obj as f:
|
|
||||||
obj._load_file(f)
|
|
||||||
|
|
||||||
return obj
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def from_file(cls, fobj, key=None):
|
|
||||||
obj = super().__new__(cls)
|
|
||||||
obj._key = key
|
|
||||||
obj._file_path = None
|
|
||||||
obj._load_file(fobj)
|
|
||||||
obj._file_repr = repr(fobj)
|
|
||||||
|
|
||||||
# Disable pickling for objects created from files
|
|
||||||
obj.__reduce__ = obj._file_reduce
|
|
||||||
|
|
||||||
return obj
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def clear_cache(cls, *, only_keys=None):
|
|
||||||
if only_keys is not None:
|
|
||||||
for key in only_keys:
|
|
||||||
cls._weak_cache.pop(key, None)
|
|
||||||
cls._strong_cache.pop(key, None)
|
|
||||||
|
|
||||||
else:
|
|
||||||
cls._weak_cache.clear()
|
|
||||||
cls._strong_cache.clear()
|
|
||||||
|
|
||||||
@property
|
|
||||||
def key(self):
|
|
||||||
return self._key
|
|
||||||
|
|
||||||
def utcoffset(self, dt):
|
|
||||||
return self._find_trans(dt).utcoff
|
|
||||||
|
|
||||||
def dst(self, dt):
|
|
||||||
return self._find_trans(dt).dstoff
|
|
||||||
|
|
||||||
def tzname(self, dt):
|
|
||||||
return self._find_trans(dt).tzname
|
|
||||||
|
|
||||||
def fromutc(self, dt):
|
|
||||||
"""Convert from datetime in UTC to datetime in local time"""
|
|
||||||
|
|
||||||
if not isinstance(dt, datetime):
|
|
||||||
raise TypeError("fromutc() requires a datetime argument")
|
|
||||||
if dt.tzinfo is not self:
|
|
||||||
raise ValueError("dt.tzinfo is not self")
|
|
||||||
|
|
||||||
timestamp = self._get_local_timestamp(dt)
|
|
||||||
num_trans = len(self._trans_utc)
|
|
||||||
|
|
||||||
if num_trans >= 1 and timestamp < self._trans_utc[0]:
|
|
||||||
tti = self._tti_before
|
|
||||||
fold = 0
|
|
||||||
elif (
|
|
||||||
num_trans == 0 or timestamp > self._trans_utc[-1]
|
|
||||||
) and not isinstance(self._tz_after, _ttinfo):
|
|
||||||
tti, fold = self._tz_after.get_trans_info_fromutc(
|
|
||||||
timestamp, dt.year
|
|
||||||
)
|
|
||||||
elif num_trans == 0:
|
|
||||||
tti = self._tz_after
|
|
||||||
fold = 0
|
|
||||||
else:
|
|
||||||
idx = bisect.bisect_right(self._trans_utc, timestamp)
|
|
||||||
|
|
||||||
if num_trans > 1 and timestamp >= self._trans_utc[1]:
|
|
||||||
tti_prev, tti = self._ttinfos[idx - 2 : idx]
|
|
||||||
elif timestamp > self._trans_utc[-1]:
|
|
||||||
tti_prev = self._ttinfos[-1]
|
|
||||||
tti = self._tz_after
|
|
||||||
else:
|
|
||||||
tti_prev = self._tti_before
|
|
||||||
tti = self._ttinfos[0]
|
|
||||||
|
|
||||||
# Detect fold
|
|
||||||
shift = tti_prev.utcoff - tti.utcoff
|
|
||||||
fold = shift.total_seconds() > timestamp - self._trans_utc[idx - 1]
|
|
||||||
dt += tti.utcoff
|
|
||||||
if fold:
|
|
||||||
return dt.replace(fold=1)
|
|
||||||
else:
|
|
||||||
return dt
|
|
||||||
|
|
||||||
def _find_trans(self, dt):
|
|
||||||
if dt is None:
|
|
||||||
if self._fixed_offset:
|
|
||||||
return self._tz_after
|
|
||||||
else:
|
|
||||||
return _NO_TTINFO
|
|
||||||
|
|
||||||
ts = self._get_local_timestamp(dt)
|
|
||||||
|
|
||||||
lt = self._trans_local[dt.fold]
|
|
||||||
|
|
||||||
num_trans = len(lt)
|
|
||||||
|
|
||||||
if num_trans and ts < lt[0]:
|
|
||||||
return self._tti_before
|
|
||||||
elif not num_trans or ts > lt[-1]:
|
|
||||||
if isinstance(self._tz_after, _TZStr):
|
|
||||||
return self._tz_after.get_trans_info(ts, dt.year, dt.fold)
|
|
||||||
else:
|
|
||||||
return self._tz_after
|
|
||||||
else:
|
|
||||||
# idx is the transition that occurs after this timestamp, so we
|
|
||||||
# subtract off 1 to get the current ttinfo
|
|
||||||
idx = bisect.bisect_right(lt, ts) - 1
|
|
||||||
assert idx >= 0
|
|
||||||
return self._ttinfos[idx]
|
|
||||||
|
|
||||||
def _get_local_timestamp(self, dt):
|
|
||||||
return (
|
|
||||||
(dt.toordinal() - EPOCHORDINAL) * 86400
|
|
||||||
+ dt.hour * 3600
|
|
||||||
+ dt.minute * 60
|
|
||||||
+ dt.second
|
|
||||||
)
|
|
||||||
|
|
||||||
def __str__(self):
|
|
||||||
if self._key is not None:
|
|
||||||
return f"{self._key}"
|
|
||||||
else:
|
|
||||||
return repr(self)
|
|
||||||
|
|
||||||
def __repr__(self):
|
|
||||||
if self._key is not None:
|
|
||||||
return f"{self.__class__.__name__}(key={self._key!r})"
|
|
||||||
else:
|
|
||||||
return f"{self.__class__.__name__}.from_file({self._file_repr})"
|
|
||||||
|
|
||||||
def __reduce__(self):
|
|
||||||
return (self.__class__._unpickle, (self._key, self._from_cache))
|
|
||||||
|
|
||||||
def _file_reduce(self):
|
|
||||||
import pickle
|
|
||||||
|
|
||||||
raise pickle.PicklingError(
|
|
||||||
"Cannot pickle a ZoneInfo file created from a file stream."
|
|
||||||
)
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def _unpickle(cls, key, from_cache):
|
|
||||||
if from_cache:
|
|
||||||
return cls(key)
|
|
||||||
else:
|
|
||||||
return cls.no_cache(key)
|
|
||||||
|
|
||||||
def _find_tzfile(self, key):
|
|
||||||
return _tzpath.find_tzfile(key)
|
|
||||||
|
|
||||||
def _load_file(self, fobj):
|
|
||||||
# Retrieve all the data as it exists in the zoneinfo file
|
|
||||||
trans_idx, trans_utc, utcoff, isdst, abbr, tz_str = _common.load_data(
|
|
||||||
fobj
|
|
||||||
)
|
|
||||||
|
|
||||||
# Infer the DST offsets (needed for .dst()) from the data
|
|
||||||
dstoff = self._utcoff_to_dstoff(trans_idx, utcoff, isdst)
|
|
||||||
|
|
||||||
# Convert all the transition times (UTC) into "seconds since 1970-01-01 local time"
|
|
||||||
trans_local = self._ts_to_local(trans_idx, trans_utc, utcoff)
|
|
||||||
|
|
||||||
# Construct `_ttinfo` objects for each transition in the file
|
|
||||||
_ttinfo_list = [
|
|
||||||
_ttinfo(
|
|
||||||
_load_timedelta(utcoffset), _load_timedelta(dstoffset), tzname
|
|
||||||
)
|
|
||||||
for utcoffset, dstoffset, tzname in zip(utcoff, dstoff, abbr)
|
|
||||||
]
|
|
||||||
|
|
||||||
self._trans_utc = trans_utc
|
|
||||||
self._trans_local = trans_local
|
|
||||||
self._ttinfos = [_ttinfo_list[idx] for idx in trans_idx]
|
|
||||||
|
|
||||||
# Find the first non-DST transition
|
|
||||||
for i in range(len(isdst)):
|
|
||||||
if not isdst[i]:
|
|
||||||
self._tti_before = _ttinfo_list[i]
|
|
||||||
break
|
|
||||||
else:
|
|
||||||
if self._ttinfos:
|
|
||||||
self._tti_before = self._ttinfos[0]
|
|
||||||
else:
|
|
||||||
self._tti_before = None
|
|
||||||
|
|
||||||
# Set the "fallback" time zone
|
|
||||||
if tz_str is not None and tz_str != b"":
|
|
||||||
self._tz_after = _parse_tz_str(tz_str.decode())
|
|
||||||
else:
|
|
||||||
if not self._ttinfos and not _ttinfo_list:
|
|
||||||
raise ValueError("No time zone information found.")
|
|
||||||
|
|
||||||
if self._ttinfos:
|
|
||||||
self._tz_after = self._ttinfos[-1]
|
|
||||||
else:
|
|
||||||
self._tz_after = _ttinfo_list[-1]
|
|
||||||
|
|
||||||
# Determine if this is a "fixed offset" zone, meaning that the output
|
|
||||||
# of the utcoffset, dst and tzname functions does not depend on the
|
|
||||||
# specific datetime passed.
|
|
||||||
#
|
|
||||||
# We make three simplifying assumptions here:
|
|
||||||
#
|
|
||||||
# 1. If _tz_after is not a _ttinfo, it has transitions that might
|
|
||||||
# actually occur (it is possible to construct TZ strings that
|
|
||||||
# specify STD and DST but no transitions ever occur, such as
|
|
||||||
# AAA0BBB,0/0,J365/25).
|
|
||||||
# 2. If _ttinfo_list contains more than one _ttinfo object, the objects
|
|
||||||
# represent different offsets.
|
|
||||||
# 3. _ttinfo_list contains no unused _ttinfos (in which case an
|
|
||||||
# otherwise fixed-offset zone with extra _ttinfos defined may
|
|
||||||
# appear to *not* be a fixed offset zone).
|
|
||||||
#
|
|
||||||
# Violations to these assumptions would be fairly exotic, and exotic
|
|
||||||
# zones should almost certainly not be used with datetime.time (the
|
|
||||||
# only thing that would be affected by this).
|
|
||||||
if len(_ttinfo_list) > 1 or not isinstance(self._tz_after, _ttinfo):
|
|
||||||
self._fixed_offset = False
|
|
||||||
elif not _ttinfo_list:
|
|
||||||
self._fixed_offset = True
|
|
||||||
else:
|
|
||||||
self._fixed_offset = _ttinfo_list[0] == self._tz_after
|
|
||||||
|
|
||||||
@staticmethod
|
|
||||||
def _utcoff_to_dstoff(trans_idx, utcoffsets, isdsts):
|
|
||||||
# Now we must transform our ttis and abbrs into `_ttinfo` objects,
|
|
||||||
# but there is an issue: .dst() must return a timedelta with the
|
|
||||||
# difference between utcoffset() and the "standard" offset, but
|
|
||||||
# the "base offset" and "DST offset" are not encoded in the file;
|
|
||||||
# we can infer what they are from the isdst flag, but it is not
|
|
||||||
# sufficient to to just look at the last standard offset, because
|
|
||||||
# occasionally countries will shift both DST offset and base offset.
|
|
||||||
|
|
||||||
typecnt = len(isdsts)
|
|
||||||
dstoffs = [0] * typecnt # Provisionally assign all to 0.
|
|
||||||
dst_cnt = sum(isdsts)
|
|
||||||
dst_found = 0
|
|
||||||
|
|
||||||
for i in range(1, len(trans_idx)):
|
|
||||||
if dst_cnt == dst_found:
|
|
||||||
break
|
|
||||||
|
|
||||||
idx = trans_idx[i]
|
|
||||||
|
|
||||||
dst = isdsts[idx]
|
|
||||||
|
|
||||||
# We're only going to look at daylight saving time
|
|
||||||
if not dst:
|
|
||||||
continue
|
|
||||||
|
|
||||||
# Skip any offsets that have already been assigned
|
|
||||||
if dstoffs[idx] != 0:
|
|
||||||
continue
|
|
||||||
|
|
||||||
dstoff = 0
|
|
||||||
utcoff = utcoffsets[idx]
|
|
||||||
|
|
||||||
comp_idx = trans_idx[i - 1]
|
|
||||||
|
|
||||||
if not isdsts[comp_idx]:
|
|
||||||
dstoff = utcoff - utcoffsets[comp_idx]
|
|
||||||
|
|
||||||
if not dstoff and idx < (typecnt - 1):
|
|
||||||
comp_idx = trans_idx[i + 1]
|
|
||||||
|
|
||||||
# If the following transition is also DST and we couldn't
|
|
||||||
# find the DST offset by this point, we're going ot have to
|
|
||||||
# skip it and hope this transition gets assigned later
|
|
||||||
if isdsts[comp_idx]:
|
|
||||||
continue
|
|
||||||
|
|
||||||
dstoff = utcoff - utcoffsets[comp_idx]
|
|
||||||
|
|
||||||
if dstoff:
|
|
||||||
dst_found += 1
|
|
||||||
dstoffs[idx] = dstoff
|
|
||||||
else:
|
|
||||||
# If we didn't find a valid value for a given index, we'll end up
|
|
||||||
# with dstoff = 0 for something where `isdst=1`. This is obviously
|
|
||||||
# wrong - one hour will be a much better guess than 0
|
|
||||||
for idx in range(typecnt):
|
|
||||||
if not dstoffs[idx] and isdsts[idx]:
|
|
||||||
dstoffs[idx] = 3600
|
|
||||||
|
|
||||||
return dstoffs
|
|
||||||
|
|
||||||
@staticmethod
|
|
||||||
def _ts_to_local(trans_idx, trans_list_utc, utcoffsets):
|
|
||||||
"""Generate number of seconds since 1970 *in the local time*.
|
|
||||||
|
|
||||||
This is necessary to easily find the transition times in local time"""
|
|
||||||
if not trans_list_utc:
|
|
||||||
return [[], []]
|
|
||||||
|
|
||||||
# Start with the timestamps and modify in-place
|
|
||||||
trans_list_wall = [list(trans_list_utc), list(trans_list_utc)]
|
|
||||||
|
|
||||||
if len(utcoffsets) > 1:
|
|
||||||
offset_0 = utcoffsets[0]
|
|
||||||
offset_1 = utcoffsets[trans_idx[0]]
|
|
||||||
if offset_1 > offset_0:
|
|
||||||
offset_1, offset_0 = offset_0, offset_1
|
|
||||||
else:
|
|
||||||
offset_0 = offset_1 = utcoffsets[0]
|
|
||||||
|
|
||||||
trans_list_wall[0][0] += offset_0
|
|
||||||
trans_list_wall[1][0] += offset_1
|
|
||||||
|
|
||||||
for i in range(1, len(trans_idx)):
|
|
||||||
offset_0 = utcoffsets[trans_idx[i - 1]]
|
|
||||||
offset_1 = utcoffsets[trans_idx[i]]
|
|
||||||
|
|
||||||
if offset_1 > offset_0:
|
|
||||||
offset_1, offset_0 = offset_0, offset_1
|
|
||||||
|
|
||||||
trans_list_wall[0][i] += offset_0
|
|
||||||
trans_list_wall[1][i] += offset_1
|
|
||||||
|
|
||||||
return trans_list_wall
|
|
||||||
|
|
||||||
|
|
||||||
class _ttinfo:
|
|
||||||
__slots__ = ["utcoff", "dstoff", "tzname"]
|
|
||||||
|
|
||||||
def __init__(self, utcoff, dstoff, tzname):
|
|
||||||
self.utcoff = utcoff
|
|
||||||
self.dstoff = dstoff
|
|
||||||
self.tzname = tzname
|
|
||||||
|
|
||||||
def __eq__(self, other):
|
|
||||||
return (
|
|
||||||
self.utcoff == other.utcoff
|
|
||||||
and self.dstoff == other.dstoff
|
|
||||||
and self.tzname == other.tzname
|
|
||||||
)
|
|
||||||
|
|
||||||
def __repr__(self): # pragma: nocover
|
|
||||||
return (
|
|
||||||
f"{self.__class__.__name__}"
|
|
||||||
+ f"({self.utcoff}, {self.dstoff}, {self.tzname})"
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
_NO_TTINFO = _ttinfo(None, None, None)
|
|
||||||
|
|
||||||
|
|
||||||
class _TZStr:
|
|
||||||
__slots__ = (
|
|
||||||
"std",
|
|
||||||
"dst",
|
|
||||||
"start",
|
|
||||||
"end",
|
|
||||||
"get_trans_info",
|
|
||||||
"get_trans_info_fromutc",
|
|
||||||
"dst_diff",
|
|
||||||
)
|
|
||||||
|
|
||||||
def __init__(
|
|
||||||
self, std_abbr, std_offset, dst_abbr, dst_offset, start=None, end=None
|
|
||||||
):
|
|
||||||
self.dst_diff = dst_offset - std_offset
|
|
||||||
std_offset = _load_timedelta(std_offset)
|
|
||||||
self.std = _ttinfo(
|
|
||||||
utcoff=std_offset, dstoff=_load_timedelta(0), tzname=std_abbr
|
|
||||||
)
|
|
||||||
|
|
||||||
self.start = start
|
|
||||||
self.end = end
|
|
||||||
|
|
||||||
dst_offset = _load_timedelta(dst_offset)
|
|
||||||
delta = _load_timedelta(self.dst_diff)
|
|
||||||
self.dst = _ttinfo(utcoff=dst_offset, dstoff=delta, tzname=dst_abbr)
|
|
||||||
|
|
||||||
# These are assertions because the constructor should only be called
|
|
||||||
# by functions that would fail before passing start or end
|
|
||||||
assert start is not None, "No transition start specified"
|
|
||||||
assert end is not None, "No transition end specified"
|
|
||||||
|
|
||||||
self.get_trans_info = self._get_trans_info
|
|
||||||
self.get_trans_info_fromutc = self._get_trans_info_fromutc
|
|
||||||
|
|
||||||
def transitions(self, year):
|
|
||||||
start = self.start.year_to_epoch(year)
|
|
||||||
end = self.end.year_to_epoch(year)
|
|
||||||
return start, end
|
|
||||||
|
|
||||||
def _get_trans_info(self, ts, year, fold):
|
|
||||||
"""Get the information about the current transition - tti"""
|
|
||||||
start, end = self.transitions(year)
|
|
||||||
|
|
||||||
# With fold = 0, the period (denominated in local time) with the
|
|
||||||
# smaller offset starts at the end of the gap and ends at the end of
|
|
||||||
# the fold; with fold = 1, it runs from the start of the gap to the
|
|
||||||
# beginning of the fold.
|
|
||||||
#
|
|
||||||
# So in order to determine the DST boundaries we need to know both
|
|
||||||
# the fold and whether DST is positive or negative (rare), and it
|
|
||||||
# turns out that this boils down to fold XOR is_positive.
|
|
||||||
if fold == (self.dst_diff >= 0):
|
|
||||||
end -= self.dst_diff
|
|
||||||
else:
|
|
||||||
start += self.dst_diff
|
|
||||||
|
|
||||||
if start < end:
|
|
||||||
isdst = start <= ts < end
|
|
||||||
else:
|
|
||||||
isdst = not (end <= ts < start)
|
|
||||||
|
|
||||||
return self.dst if isdst else self.std
|
|
||||||
|
|
||||||
def _get_trans_info_fromutc(self, ts, year):
|
|
||||||
start, end = self.transitions(year)
|
|
||||||
start -= self.std.utcoff.total_seconds()
|
|
||||||
end -= self.dst.utcoff.total_seconds()
|
|
||||||
|
|
||||||
if start < end:
|
|
||||||
isdst = start <= ts < end
|
|
||||||
else:
|
|
||||||
isdst = not (end <= ts < start)
|
|
||||||
|
|
||||||
# For positive DST, the ambiguous period is one dst_diff after the end
|
|
||||||
# of DST; for negative DST, the ambiguous period is one dst_diff before
|
|
||||||
# the start of DST.
|
|
||||||
if self.dst_diff > 0:
|
|
||||||
ambig_start = end
|
|
||||||
ambig_end = end + self.dst_diff
|
|
||||||
else:
|
|
||||||
ambig_start = start
|
|
||||||
ambig_end = start - self.dst_diff
|
|
||||||
|
|
||||||
fold = ambig_start <= ts < ambig_end
|
|
||||||
|
|
||||||
return (self.dst if isdst else self.std, fold)
|
|
||||||
|
|
||||||
|
|
||||||
def _post_epoch_days_before_year(year):
|
|
||||||
"""Get the number of days between 1970-01-01 and YEAR-01-01"""
|
|
||||||
y = year - 1
|
|
||||||
return y * 365 + y // 4 - y // 100 + y // 400 - EPOCHORDINAL
|
|
||||||
|
|
||||||
|
|
||||||
class _DayOffset:
|
|
||||||
__slots__ = ["d", "julian", "hour", "minute", "second"]
|
|
||||||
|
|
||||||
def __init__(self, d, julian, hour=2, minute=0, second=0):
|
|
||||||
if not (0 + julian) <= d <= 365:
|
|
||||||
min_day = 0 + julian
|
|
||||||
raise ValueError(f"d must be in [{min_day}, 365], not: {d}")
|
|
||||||
|
|
||||||
self.d = d
|
|
||||||
self.julian = julian
|
|
||||||
self.hour = hour
|
|
||||||
self.minute = minute
|
|
||||||
self.second = second
|
|
||||||
|
|
||||||
def year_to_epoch(self, year):
|
|
||||||
days_before_year = _post_epoch_days_before_year(year)
|
|
||||||
|
|
||||||
d = self.d
|
|
||||||
if self.julian and d >= 59 and calendar.isleap(year):
|
|
||||||
d += 1
|
|
||||||
|
|
||||||
epoch = (days_before_year + d) * 86400
|
|
||||||
epoch += self.hour * 3600 + self.minute * 60 + self.second
|
|
||||||
|
|
||||||
return epoch
|
|
||||||
|
|
||||||
|
|
||||||
class _CalendarOffset:
|
|
||||||
__slots__ = ["m", "w", "d", "hour", "minute", "second"]
|
|
||||||
|
|
||||||
_DAYS_BEFORE_MONTH = (
|
|
||||||
-1,
|
|
||||||
0,
|
|
||||||
31,
|
|
||||||
59,
|
|
||||||
90,
|
|
||||||
120,
|
|
||||||
151,
|
|
||||||
181,
|
|
||||||
212,
|
|
||||||
243,
|
|
||||||
273,
|
|
||||||
304,
|
|
||||||
334,
|
|
||||||
)
|
|
||||||
|
|
||||||
def __init__(self, m, w, d, hour=2, minute=0, second=0):
|
|
||||||
if not 0 < m <= 12:
|
|
||||||
raise ValueError("m must be in (0, 12]")
|
|
||||||
|
|
||||||
if not 0 < w <= 5:
|
|
||||||
raise ValueError("w must be in (0, 5]")
|
|
||||||
|
|
||||||
if not 0 <= d <= 6:
|
|
||||||
raise ValueError("d must be in [0, 6]")
|
|
||||||
|
|
||||||
self.m = m
|
|
||||||
self.w = w
|
|
||||||
self.d = d
|
|
||||||
self.hour = hour
|
|
||||||
self.minute = minute
|
|
||||||
self.second = second
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def _ymd2ord(cls, year, month, day):
|
|
||||||
return (
|
|
||||||
_post_epoch_days_before_year(year)
|
|
||||||
+ cls._DAYS_BEFORE_MONTH[month]
|
|
||||||
+ (month > 2 and calendar.isleap(year))
|
|
||||||
+ day
|
|
||||||
)
|
|
||||||
|
|
||||||
# TODO: These are not actually epoch dates as they are expressed in local time
|
|
||||||
def year_to_epoch(self, year):
|
|
||||||
"""Calculates the datetime of the occurrence from the year"""
|
|
||||||
# We know year and month, we need to convert w, d into day of month
|
|
||||||
#
|
|
||||||
# Week 1 is the first week in which day `d` (where 0 = Sunday) appears.
|
|
||||||
# Week 5 represents the last occurrence of day `d`, so we need to know
|
|
||||||
# the range of the month.
|
|
||||||
first_day, days_in_month = calendar.monthrange(year, self.m)
|
|
||||||
|
|
||||||
# This equation seems magical, so I'll break it down:
|
|
||||||
# 1. calendar says 0 = Monday, POSIX says 0 = Sunday
|
|
||||||
# so we need first_day + 1 to get 1 = Monday -> 7 = Sunday,
|
|
||||||
# which is still equivalent because this math is mod 7
|
|
||||||
# 2. Get first day - desired day mod 7: -1 % 7 = 6, so we don't need
|
|
||||||
# to do anything to adjust negative numbers.
|
|
||||||
# 3. Add 1 because month days are a 1-based index.
|
|
||||||
month_day = (self.d - (first_day + 1)) % 7 + 1
|
|
||||||
|
|
||||||
# Now use a 0-based index version of `w` to calculate the w-th
|
|
||||||
# occurrence of `d`
|
|
||||||
month_day += (self.w - 1) * 7
|
|
||||||
|
|
||||||
# month_day will only be > days_in_month if w was 5, and `w` means
|
|
||||||
# "last occurrence of `d`", so now we just check if we over-shot the
|
|
||||||
# end of the month and if so knock off 1 week.
|
|
||||||
if month_day > days_in_month:
|
|
||||||
month_day -= 7
|
|
||||||
|
|
||||||
ordinal = self._ymd2ord(year, self.m, month_day)
|
|
||||||
epoch = ordinal * 86400
|
|
||||||
epoch += self.hour * 3600 + self.minute * 60 + self.second
|
|
||||||
return epoch
|
|
||||||
|
|
||||||
|
|
||||||
def _parse_tz_str(tz_str):
|
|
||||||
# The tz string has the format:
|
|
||||||
#
|
|
||||||
# std[offset[dst[offset],start[/time],end[/time]]]
|
|
||||||
#
|
|
||||||
# std and dst must be 3 or more characters long and must not contain
|
|
||||||
# a leading colon, embedded digits, commas, nor a plus or minus signs;
|
|
||||||
# The spaces between "std" and "offset" are only for display and are
|
|
||||||
# not actually present in the string.
|
|
||||||
#
|
|
||||||
# The format of the offset is ``[+|-]hh[:mm[:ss]]``
|
|
||||||
|
|
||||||
offset_str, *start_end_str = tz_str.split(",", 1)
|
|
||||||
|
|
||||||
# fmt: off
|
|
||||||
parser_re = re.compile(
|
|
||||||
r"(?P<std>[^<0-9:.+-]+|<[a-zA-Z0-9+\-]+>)" +
|
|
||||||
r"((?P<stdoff>[+-]?\d{1,2}(:\d{2}(:\d{2})?)?)" +
|
|
||||||
r"((?P<dst>[^0-9:.+-]+|<[a-zA-Z0-9+\-]+>)" +
|
|
||||||
r"((?P<dstoff>[+-]?\d{1,2}(:\d{2}(:\d{2})?)?))?" +
|
|
||||||
r")?" + # dst
|
|
||||||
r")?$" # stdoff
|
|
||||||
)
|
|
||||||
# fmt: on
|
|
||||||
|
|
||||||
m = parser_re.match(offset_str)
|
|
||||||
|
|
||||||
if m is None:
|
|
||||||
raise ValueError(f"{tz_str} is not a valid TZ string")
|
|
||||||
|
|
||||||
std_abbr = m.group("std")
|
|
||||||
dst_abbr = m.group("dst")
|
|
||||||
dst_offset = None
|
|
||||||
|
|
||||||
std_abbr = std_abbr.strip("<>")
|
|
||||||
|
|
||||||
if dst_abbr:
|
|
||||||
dst_abbr = dst_abbr.strip("<>")
|
|
||||||
|
|
||||||
std_offset = m.group("stdoff")
|
|
||||||
if std_offset:
|
|
||||||
try:
|
|
||||||
std_offset = _parse_tz_delta(std_offset)
|
|
||||||
except ValueError as e:
|
|
||||||
raise ValueError(f"Invalid STD offset in {tz_str}") from e
|
|
||||||
else:
|
|
||||||
std_offset = 0
|
|
||||||
|
|
||||||
if dst_abbr is not None:
|
|
||||||
dst_offset = m.group("dstoff")
|
|
||||||
if dst_offset:
|
|
||||||
try:
|
|
||||||
dst_offset = _parse_tz_delta(dst_offset)
|
|
||||||
except ValueError as e:
|
|
||||||
raise ValueError(f"Invalid DST offset in {tz_str}") from e
|
|
||||||
else:
|
|
||||||
dst_offset = std_offset + 3600
|
|
||||||
|
|
||||||
if not start_end_str:
|
|
||||||
raise ValueError(f"Missing transition rules: {tz_str}")
|
|
||||||
|
|
||||||
start_end_strs = start_end_str[0].split(",", 1)
|
|
||||||
try:
|
|
||||||
start, end = (_parse_dst_start_end(x) for x in start_end_strs)
|
|
||||||
except ValueError as e:
|
|
||||||
raise ValueError(f"Invalid TZ string: {tz_str}") from e
|
|
||||||
|
|
||||||
return _TZStr(std_abbr, std_offset, dst_abbr, dst_offset, start, end)
|
|
||||||
elif start_end_str:
|
|
||||||
raise ValueError(f"Transition rule present without DST: {tz_str}")
|
|
||||||
else:
|
|
||||||
# This is a static ttinfo, don't return _TZStr
|
|
||||||
return _ttinfo(
|
|
||||||
_load_timedelta(std_offset), _load_timedelta(0), std_abbr
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
def _parse_dst_start_end(dststr):
|
|
||||||
date, *time = dststr.split("/")
|
|
||||||
if date[0] == "M":
|
|
||||||
n_is_julian = False
|
|
||||||
m = re.match(r"M(\d{1,2})\.(\d).(\d)$", date)
|
|
||||||
if m is None:
|
|
||||||
raise ValueError(f"Invalid dst start/end date: {dststr}")
|
|
||||||
date_offset = tuple(map(int, m.groups()))
|
|
||||||
offset = _CalendarOffset(*date_offset)
|
|
||||||
else:
|
|
||||||
if date[0] == "J":
|
|
||||||
n_is_julian = True
|
|
||||||
date = date[1:]
|
|
||||||
else:
|
|
||||||
n_is_julian = False
|
|
||||||
|
|
||||||
doy = int(date)
|
|
||||||
offset = _DayOffset(doy, n_is_julian)
|
|
||||||
|
|
||||||
if time:
|
|
||||||
time_components = list(map(int, time[0].split(":")))
|
|
||||||
n_components = len(time_components)
|
|
||||||
if n_components < 3:
|
|
||||||
time_components.extend([0] * (3 - n_components))
|
|
||||||
offset.hour, offset.minute, offset.second = time_components
|
|
||||||
|
|
||||||
return offset
|
|
||||||
|
|
||||||
|
|
||||||
def _parse_tz_delta(tz_delta):
|
|
||||||
match = re.match(
|
|
||||||
r"(?P<sign>[+-])?(?P<h>\d{1,2})(:(?P<m>\d{2})(:(?P<s>\d{2}))?)?",
|
|
||||||
tz_delta,
|
|
||||||
)
|
|
||||||
# Anything passed to this function should already have hit an equivalent
|
|
||||||
# regular expression to find the section to parse.
|
|
||||||
assert match is not None, tz_delta
|
|
||||||
|
|
||||||
h, m, s = (
|
|
||||||
int(v) if v is not None else 0
|
|
||||||
for v in map(match.group, ("h", "m", "s"))
|
|
||||||
)
|
|
||||||
|
|
||||||
total = h * 3600 + m * 60 + s
|
|
||||||
|
|
||||||
if not -86400 < total < 86400:
|
|
||||||
raise ValueError(
|
|
||||||
"Offset must be strictly between -24h and +24h:" + tz_delta
|
|
||||||
)
|
|
||||||
|
|
||||||
# Yes, +5 maps to an offset of -5h
|
|
||||||
if match.group("sign") != "-":
|
|
||||||
total *= -1
|
|
||||||
|
|
||||||
return total
|
|
|
@ -11,9 +11,9 @@ from bleach.sanitizer import (
|
||||||
|
|
||||||
|
|
||||||
# yyyymmdd
|
# yyyymmdd
|
||||||
__releasedate__ = "20231006"
|
__releasedate__ = "20241029"
|
||||||
# x.y.z or x.y.z.dev0 -- semver
|
# x.y.z or x.y.z.dev0 -- semver
|
||||||
__version__ = "6.1.0"
|
__version__ = "6.2.0"
|
||||||
|
|
||||||
|
|
||||||
__all__ = ["clean", "linkify"]
|
__all__ = ["clean", "linkify"]
|
||||||
|
|
|
@ -1,7 +1,7 @@
|
||||||
from __future__ import absolute_import, division, unicode_literals
|
from __future__ import absolute_import, division, unicode_literals
|
||||||
|
|
||||||
from six import text_type
|
from bleach.six_shim import text_type
|
||||||
from six.moves import http_client, urllib
|
from bleach.six_shim import http_client, urllib
|
||||||
|
|
||||||
import codecs
|
import codecs
|
||||||
import re
|
import re
|
||||||
|
|
|
@ -1,6 +1,6 @@
|
||||||
from __future__ import absolute_import, division, unicode_literals
|
from __future__ import absolute_import, division, unicode_literals
|
||||||
|
|
||||||
from six import unichr as chr
|
from bleach.six_shim import unichr as chr
|
||||||
|
|
||||||
from collections import deque, OrderedDict
|
from collections import deque, OrderedDict
|
||||||
from sys import version_info
|
from sys import version_info
|
||||||
|
|
|
@ -1,5 +1,5 @@
|
||||||
from __future__ import absolute_import, division, unicode_literals
|
from __future__ import absolute_import, division, unicode_literals
|
||||||
from six import text_type
|
from bleach.six_shim import text_type
|
||||||
|
|
||||||
from bisect import bisect_left
|
from bisect import bisect_left
|
||||||
|
|
||||||
|
|
|
@ -7,7 +7,7 @@ try:
|
||||||
except ImportError:
|
except ImportError:
|
||||||
from collections import Mapping
|
from collections import Mapping
|
||||||
|
|
||||||
from six import text_type, PY3
|
from bleach.six_shim import text_type, PY3
|
||||||
|
|
||||||
if PY3:
|
if PY3:
|
||||||
import xml.etree.ElementTree as default_etree
|
import xml.etree.ElementTree as default_etree
|
||||||
|
|
|
@ -1,6 +1,6 @@
|
||||||
from __future__ import absolute_import, division, unicode_literals
|
from __future__ import absolute_import, division, unicode_literals
|
||||||
|
|
||||||
from six import text_type
|
from bleach.six_shim import text_type
|
||||||
|
|
||||||
from . import base
|
from . import base
|
||||||
from ..constants import namespaces, voidElements
|
from ..constants import namespaces, voidElements
|
||||||
|
|
|
@ -12,7 +12,7 @@ import re
|
||||||
import warnings
|
import warnings
|
||||||
from xml.sax.saxutils import escape, unescape
|
from xml.sax.saxutils import escape, unescape
|
||||||
|
|
||||||
from six.moves import urllib_parse as urlparse
|
from bleach.six_shim import urllib_parse as urlparse
|
||||||
|
|
||||||
from . import base
|
from . import base
|
||||||
from ..constants import namespaces, prefixes
|
from ..constants import namespaces, prefixes
|
||||||
|
|
|
@ -1,5 +1,5 @@
|
||||||
from __future__ import absolute_import, division, unicode_literals
|
from __future__ import absolute_import, division, unicode_literals
|
||||||
from six import with_metaclass, viewkeys
|
from bleach.six_shim import viewkeys
|
||||||
|
|
||||||
import types
|
import types
|
||||||
|
|
||||||
|
@ -423,7 +423,7 @@ def getPhases(debug):
|
||||||
return type
|
return type
|
||||||
|
|
||||||
# pylint:disable=unused-argument
|
# pylint:disable=unused-argument
|
||||||
class Phase(with_metaclass(getMetaclass(debug, log))):
|
class Phase(metaclass=getMetaclass(debug, log)):
|
||||||
"""Base class for helper object that implements each phase of processing
|
"""Base class for helper object that implements each phase of processing
|
||||||
"""
|
"""
|
||||||
__slots__ = ("parser", "tree", "__startTagCache", "__endTagCache")
|
__slots__ = ("parser", "tree", "__startTagCache", "__endTagCache")
|
||||||
|
|
|
@ -1,5 +1,5 @@
|
||||||
from __future__ import absolute_import, division, unicode_literals
|
from __future__ import absolute_import, division, unicode_literals
|
||||||
from six import text_type
|
from bleach.six_shim import text_type
|
||||||
|
|
||||||
import re
|
import re
|
||||||
|
|
||||||
|
|
|
@ -1,5 +1,5 @@
|
||||||
from __future__ import absolute_import, division, unicode_literals
|
from __future__ import absolute_import, division, unicode_literals
|
||||||
from six import text_type
|
from bleach.six_shim import text_type
|
||||||
|
|
||||||
from ..constants import scopingElements, tableInsertModeElements, namespaces
|
from ..constants import scopingElements, tableInsertModeElements, namespaces
|
||||||
|
|
||||||
|
|
|
@ -1,7 +1,7 @@
|
||||||
from __future__ import absolute_import, division, unicode_literals
|
from __future__ import absolute_import, division, unicode_literals
|
||||||
# pylint:disable=protected-access
|
# pylint:disable=protected-access
|
||||||
|
|
||||||
from six import text_type
|
from bleach.six_shim import text_type
|
||||||
|
|
||||||
import re
|
import re
|
||||||
|
|
||||||
|
|
|
@ -28,7 +28,7 @@ from . import etree as etree_builders
|
||||||
from .. import _ihatexml
|
from .. import _ihatexml
|
||||||
|
|
||||||
import lxml.etree as etree
|
import lxml.etree as etree
|
||||||
from six import PY3, binary_type
|
from bleach.six_shim import PY3, binary_type
|
||||||
|
|
||||||
|
|
||||||
fullTree = True
|
fullTree = True
|
||||||
|
|
|
@ -3,7 +3,7 @@ from __future__ import absolute_import, division, unicode_literals
|
||||||
from collections import OrderedDict
|
from collections import OrderedDict
|
||||||
import re
|
import re
|
||||||
|
|
||||||
from six import string_types
|
from bleach.six_shim import string_types
|
||||||
|
|
||||||
from . import base
|
from . import base
|
||||||
from .._utils import moduleFactoryFactory
|
from .._utils import moduleFactoryFactory
|
||||||
|
|
|
@ -1,5 +1,5 @@
|
||||||
from __future__ import absolute_import, division, unicode_literals
|
from __future__ import absolute_import, division, unicode_literals
|
||||||
from six import text_type
|
from bleach.six_shim import text_type
|
||||||
|
|
||||||
from collections import OrderedDict
|
from collections import OrderedDict
|
||||||
|
|
||||||
|
|
|
@ -7,8 +7,12 @@ set -o pipefail
|
||||||
BLEACH_VENDOR_DIR=${BLEACH_VENDOR_DIR:-"."}
|
BLEACH_VENDOR_DIR=${BLEACH_VENDOR_DIR:-"."}
|
||||||
DEST=${DEST:-"."}
|
DEST=${DEST:-"."}
|
||||||
|
|
||||||
|
# Install with no dependencies
|
||||||
pip install --no-binary all --no-compile --no-deps -r "${BLEACH_VENDOR_DIR}/vendor.txt" --target "${DEST}"
|
pip install --no-binary all --no-compile --no-deps -r "${BLEACH_VENDOR_DIR}/vendor.txt" --target "${DEST}"
|
||||||
|
|
||||||
|
# Apply patches
|
||||||
|
(cd "${DEST}" && patch -p2 < 01_html5lib_six.patch)
|
||||||
|
|
||||||
# install Python 3.6.14 urllib.urlparse for #536
|
# install Python 3.6.14 urllib.urlparse for #536
|
||||||
curl --proto '=https' --tlsv1.2 -o "${DEST}/parse.py" https://raw.githubusercontent.com/python/cpython/v3.6.14/Lib/urllib/parse.py
|
curl --proto '=https' --tlsv1.2 -o "${DEST}/parse.py" https://raw.githubusercontent.com/python/cpython/v3.6.14/Lib/urllib/parse.py
|
||||||
(cd "${DEST}" && sha256sum parse.py > parse.py.SHA256SUM)
|
(cd "${DEST}" && sha256sum parse.py > parse.py.SHA256SUM)
|
||||||
|
|
|
@ -396,16 +396,25 @@ class BleachHTMLTokenizer(HTMLTokenizer):
|
||||||
# name that abruptly ends, but we should treat that like
|
# name that abruptly ends, but we should treat that like
|
||||||
# character data
|
# character data
|
||||||
yield {"type": TAG_TOKEN_TYPE_CHARACTERS, "data": self.stream.get_tag()}
|
yield {"type": TAG_TOKEN_TYPE_CHARACTERS, "data": self.stream.get_tag()}
|
||||||
|
|
||||||
elif last_error_token["data"] in (
|
elif last_error_token["data"] in (
|
||||||
|
"duplicate-attribute",
|
||||||
"eof-in-attribute-name",
|
"eof-in-attribute-name",
|
||||||
"eof-in-attribute-value-no-quotes",
|
"eof-in-attribute-value-no-quotes",
|
||||||
|
"expected-end-of-tag-but-got-eof",
|
||||||
):
|
):
|
||||||
# Handle the case where the text being parsed ends with <
|
# Handle the case where the text being parsed ends with <
|
||||||
# followed by a series of characters and then space and then
|
# followed by characters and then space and then:
|
||||||
# more characters. It's treated as a tag name followed by an
|
#
|
||||||
|
# * more characters
|
||||||
|
# * more characters repeated with a space between (e.g. "abc abc")
|
||||||
|
# * more characters and then a space and then an EOF (e.g. "abc def ")
|
||||||
|
#
|
||||||
|
# These cases are treated as a tag name followed by an
|
||||||
# attribute that abruptly ends, but we should treat that like
|
# attribute that abruptly ends, but we should treat that like
|
||||||
# character data.
|
# character data instead.
|
||||||
yield {"type": TAG_TOKEN_TYPE_CHARACTERS, "data": self.stream.get_tag()}
|
yield {"type": TAG_TOKEN_TYPE_CHARACTERS, "data": self.stream.get_tag()}
|
||||||
|
|
||||||
else:
|
else:
|
||||||
yield last_error_token
|
yield last_error_token
|
||||||
|
|
||||||
|
|
19
lib/bleach/six_shim.py
Normal file
19
lib/bleach/six_shim.py
Normal file
|
@ -0,0 +1,19 @@
|
||||||
|
"""
|
||||||
|
Replacement module for what html5lib uses six for.
|
||||||
|
"""
|
||||||
|
|
||||||
|
import http.client
|
||||||
|
import operator
|
||||||
|
import urllib
|
||||||
|
|
||||||
|
|
||||||
|
PY3 = True
|
||||||
|
binary_type = bytes
|
||||||
|
string_types = (str,)
|
||||||
|
text_type = str
|
||||||
|
unichr = chr
|
||||||
|
viewkeys = operator.methodcaller("keys")
|
||||||
|
|
||||||
|
http_client = http.client
|
||||||
|
urllib = urllib
|
||||||
|
urllib_parse = urllib.parse
|
|
@ -1,4 +1,4 @@
|
||||||
from .core import contents, where
|
from .core import contents, where
|
||||||
|
|
||||||
__all__ = ["contents", "where"]
|
__all__ = ["contents", "where"]
|
||||||
__version__ = "2024.02.02"
|
__version__ = "2024.08.30"
|
||||||
|
|
|
@ -3485,46 +3485,6 @@ DgQWBBQxCpCPtsad0kRLgLWi5h+xEk8blTAKBggqhkjOPQQDAwNoADBlAjEA31SQ
|
||||||
+RHUjE7AwWHCFUyqqx0LMV87HOIAl0Qx5v5zli/altP+CAezNIm8BZ/3Hobui3A=
|
+RHUjE7AwWHCFUyqqx0LMV87HOIAl0Qx5v5zli/altP+CAezNIm8BZ/3Hobui3A=
|
||||||
-----END CERTIFICATE-----
|
-----END CERTIFICATE-----
|
||||||
|
|
||||||
# Issuer: CN=GLOBALTRUST 2020 O=e-commerce monitoring GmbH
|
|
||||||
# Subject: CN=GLOBALTRUST 2020 O=e-commerce monitoring GmbH
|
|
||||||
# Label: "GLOBALTRUST 2020"
|
|
||||||
# Serial: 109160994242082918454945253
|
|
||||||
# MD5 Fingerprint: 8a:c7:6f:cb:6d:e3:cc:a2:f1:7c:83:fa:0e:78:d7:e8
|
|
||||||
# SHA1 Fingerprint: d0:67:c1:13:51:01:0c:aa:d0:c7:6a:65:37:31:16:26:4f:53:71:a2
|
|
||||||
# SHA256 Fingerprint: 9a:29:6a:51:82:d1:d4:51:a2:e3:7f:43:9b:74:da:af:a2:67:52:33:29:f9:0f:9a:0d:20:07:c3:34:e2:3c:9a
|
|
||||||
-----BEGIN CERTIFICATE-----
|
|
||||||
MIIFgjCCA2qgAwIBAgILWku9WvtPilv6ZeUwDQYJKoZIhvcNAQELBQAwTTELMAkG
|
|
||||||
A1UEBhMCQVQxIzAhBgNVBAoTGmUtY29tbWVyY2UgbW9uaXRvcmluZyBHbWJIMRkw
|
|
||||||
FwYDVQQDExBHTE9CQUxUUlVTVCAyMDIwMB4XDTIwMDIxMDAwMDAwMFoXDTQwMDYx
|
|
||||||
MDAwMDAwMFowTTELMAkGA1UEBhMCQVQxIzAhBgNVBAoTGmUtY29tbWVyY2UgbW9u
|
|
||||||
aXRvcmluZyBHbWJIMRkwFwYDVQQDExBHTE9CQUxUUlVTVCAyMDIwMIICIjANBgkq
|
|
||||||
hkiG9w0BAQEFAAOCAg8AMIICCgKCAgEAri5WrRsc7/aVj6B3GyvTY4+ETUWiD59b
|
|
||||||
RatZe1E0+eyLinjF3WuvvcTfk0Uev5E4C64OFudBc/jbu9G4UeDLgztzOG53ig9Z
|
|
||||||
YybNpyrOVPu44sB8R85gfD+yc/LAGbaKkoc1DZAoouQVBGM+uq/ufF7MpotQsjj3
|
|
||||||
QWPKzv9pj2gOlTblzLmMCcpL3TGQlsjMH/1WljTbjhzqLL6FLmPdqqmV0/0plRPw
|
|
||||||
yJiT2S0WR5ARg6I6IqIoV6Lr/sCMKKCmfecqQjuCgGOlYx8ZzHyyZqjC0203b+J+
|
|
||||||
BlHZRYQfEs4kUmSFC0iAToexIiIwquuuvuAC4EDosEKAA1GqtH6qRNdDYfOiaxaJ
|
|
||||||
SaSjpCuKAsR49GiKweR6NrFvG5Ybd0mN1MkGco/PU+PcF4UgStyYJ9ORJitHHmkH
|
|
||||||
r96i5OTUawuzXnzUJIBHKWk7buis/UDr2O1xcSvy6Fgd60GXIsUf1DnQJ4+H4xj0
|
|
||||||
4KlGDfV0OoIu0G4skaMxXDtG6nsEEFZegB31pWXogvziB4xiRfUg3kZwhqG8k9Me
|
|
||||||
dKZssCz3AwyIDMvUclOGvGBG85hqwvG/Q/lwIHfKN0F5VVJjjVsSn8VoxIidrPIw
|
|
||||||
q7ejMZdnrY8XD2zHc+0klGvIg5rQmjdJBKuxFshsSUktq6HQjJLyQUp5ISXbY9e2
|
|
||||||
nKd+Qmn7OmMCAwEAAaNjMGEwDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMC
|
|
||||||
AQYwHQYDVR0OBBYEFNwuH9FhN3nkq9XVsxJxaD1qaJwiMB8GA1UdIwQYMBaAFNwu
|
|
||||||
H9FhN3nkq9XVsxJxaD1qaJwiMA0GCSqGSIb3DQEBCwUAA4ICAQCR8EICaEDuw2jA
|
|
||||||
VC/f7GLDw56KoDEoqoOOpFaWEhCGVrqXctJUMHytGdUdaG/7FELYjQ7ztdGl4wJC
|
|
||||||
XtzoRlgHNQIw4Lx0SsFDKv/bGtCwr2zD/cuz9X9tAy5ZVp0tLTWMstZDFyySCstd
|
|
||||||
6IwPS3BD0IL/qMy/pJTAvoe9iuOTe8aPmxadJ2W8esVCgmxcB9CpwYhgROmYhRZf
|
|
||||||
+I/KARDOJcP5YBugxZfD0yyIMaK9MOzQ0MAS8cE54+X1+NZK3TTN+2/BT+MAi1bi
|
|
||||||
kvcoskJ3ciNnxz8RFbLEAwW+uxF7Cr+obuf/WEPPm2eggAe2HcqtbepBEX4tdJP7
|
|
||||||
wry+UUTF72glJ4DjyKDUEuzZpTcdN3y0kcra1LGWge9oXHYQSa9+pTeAsRxSvTOB
|
|
||||||
TI/53WXZFM2KJVj04sWDpQmQ1GwUY7VA3+vA/MRYfg0UFodUJ25W5HCEuGwyEn6C
|
|
||||||
MUO+1918oa2u1qsgEu8KwxCMSZY13At1XrFP1U80DhEgB3VDRemjEdqso5nCtnkn
|
|
||||||
4rnvyOL2NSl6dPrFf4IFYqYK6miyeUcGbvJXqBUzxvd4Sj1Ce2t+/vdG6tHrju+I
|
|
||||||
aFvowdlxfv1k7/9nR4hYJS8+hge9+6jlgqispdNpQ80xiEmEU5LAsTkbOYMBMMTy
|
|
||||||
qfrQA71yN2BWHzZ8vTmR9W0Nv3vXkg==
|
|
||||||
-----END CERTIFICATE-----
|
|
||||||
|
|
||||||
# Issuer: CN=ANF Secure Server Root CA O=ANF Autoridad de Certificacion OU=ANF CA Raiz
|
# Issuer: CN=ANF Secure Server Root CA O=ANF Autoridad de Certificacion OU=ANF CA Raiz
|
||||||
# Subject: CN=ANF Secure Server Root CA O=ANF Autoridad de Certificacion OU=ANF CA Raiz
|
# Subject: CN=ANF Secure Server Root CA O=ANF Autoridad de Certificacion OU=ANF CA Raiz
|
||||||
# Label: "ANF Secure Server Root CA"
|
# Label: "ANF Secure Server Root CA"
|
||||||
|
@ -4812,3 +4772,158 @@ X273CXE2whJdV/LItM3z7gLfEdxquVeEHVlNjM7IDiPCtyaaEBRx/pOyiriA8A4Q
|
||||||
ntOoUAw3gi/q4Iqd4Sw5/7W0cwDk90imc6y/st53BIe0o82bNSQ3+pCTE4FCxpgm
|
ntOoUAw3gi/q4Iqd4Sw5/7W0cwDk90imc6y/st53BIe0o82bNSQ3+pCTE4FCxpgm
|
||||||
dTdmQRCsu/WU48IxK63nI1bMNSWSs1A=
|
dTdmQRCsu/WU48IxK63nI1bMNSWSs1A=
|
||||||
-----END CERTIFICATE-----
|
-----END CERTIFICATE-----
|
||||||
|
|
||||||
|
# Issuer: CN=FIRMAPROFESIONAL CA ROOT-A WEB O=Firmaprofesional SA
|
||||||
|
# Subject: CN=FIRMAPROFESIONAL CA ROOT-A WEB O=Firmaprofesional SA
|
||||||
|
# Label: "FIRMAPROFESIONAL CA ROOT-A WEB"
|
||||||
|
# Serial: 65916896770016886708751106294915943533
|
||||||
|
# MD5 Fingerprint: 82:b2:ad:45:00:82:b0:66:63:f8:5f:c3:67:4e:ce:a3
|
||||||
|
# SHA1 Fingerprint: a8:31:11:74:a6:14:15:0d:ca:77:dd:0e:e4:0c:5d:58:fc:a0:72:a5
|
||||||
|
# SHA256 Fingerprint: be:f2:56:da:f2:6e:9c:69:bd:ec:16:02:35:97:98:f3:ca:f7:18:21:a0:3e:01:82:57:c5:3c:65:61:7f:3d:4a
|
||||||
|
-----BEGIN CERTIFICATE-----
|
||||||
|
MIICejCCAgCgAwIBAgIQMZch7a+JQn81QYehZ1ZMbTAKBggqhkjOPQQDAzBuMQsw
|
||||||
|
CQYDVQQGEwJFUzEcMBoGA1UECgwTRmlybWFwcm9mZXNpb25hbCBTQTEYMBYGA1UE
|
||||||
|
YQwPVkFURVMtQTYyNjM0MDY4MScwJQYDVQQDDB5GSVJNQVBST0ZFU0lPTkFMIENB
|
||||||
|
IFJPT1QtQSBXRUIwHhcNMjIwNDA2MDkwMTM2WhcNNDcwMzMxMDkwMTM2WjBuMQsw
|
||||||
|
CQYDVQQGEwJFUzEcMBoGA1UECgwTRmlybWFwcm9mZXNpb25hbCBTQTEYMBYGA1UE
|
||||||
|
YQwPVkFURVMtQTYyNjM0MDY4MScwJQYDVQQDDB5GSVJNQVBST0ZFU0lPTkFMIENB
|
||||||
|
IFJPT1QtQSBXRUIwdjAQBgcqhkjOPQIBBgUrgQQAIgNiAARHU+osEaR3xyrq89Zf
|
||||||
|
e9MEkVz6iMYiuYMQYneEMy3pA4jU4DP37XcsSmDq5G+tbbT4TIqk5B/K6k84Si6C
|
||||||
|
cyvHZpsKjECcfIr28jlgst7L7Ljkb+qbXbdTkBgyVcUgt5SjYzBhMA8GA1UdEwEB
|
||||||
|
/wQFMAMBAf8wHwYDVR0jBBgwFoAUk+FDY1w8ndYn81LsF7Kpryz3dvgwHQYDVR0O
|
||||||
|
BBYEFJPhQ2NcPJ3WJ/NS7Beyqa8s93b4MA4GA1UdDwEB/wQEAwIBBjAKBggqhkjO
|
||||||
|
PQQDAwNoADBlAjAdfKR7w4l1M+E7qUW/Runpod3JIha3RxEL2Jq68cgLcFBTApFw
|
||||||
|
hVmpHqTm6iMxoAACMQD94vizrxa5HnPEluPBMBnYfubDl94cT7iJLzPrSA8Z94dG
|
||||||
|
XSaQpYXFuXqUPoeovQA=
|
||||||
|
-----END CERTIFICATE-----
|
||||||
|
|
||||||
|
# Issuer: CN=TWCA CYBER Root CA O=TAIWAN-CA OU=Root CA
|
||||||
|
# Subject: CN=TWCA CYBER Root CA O=TAIWAN-CA OU=Root CA
|
||||||
|
# Label: "TWCA CYBER Root CA"
|
||||||
|
# Serial: 85076849864375384482682434040119489222
|
||||||
|
# MD5 Fingerprint: 0b:33:a0:97:52:95:d4:a9:fd:bb:db:6e:a3:55:5b:51
|
||||||
|
# SHA1 Fingerprint: f6:b1:1c:1a:83:38:e9:7b:db:b3:a8:c8:33:24:e0:2d:9c:7f:26:66
|
||||||
|
# SHA256 Fingerprint: 3f:63:bb:28:14:be:17:4e:c8:b6:43:9c:f0:8d:6d:56:f0:b7:c4:05:88:3a:56:48:a3:34:42:4d:6b:3e:c5:58
|
||||||
|
-----BEGIN CERTIFICATE-----
|
||||||
|
MIIFjTCCA3WgAwIBAgIQQAE0jMIAAAAAAAAAATzyxjANBgkqhkiG9w0BAQwFADBQ
|
||||||
|
MQswCQYDVQQGEwJUVzESMBAGA1UEChMJVEFJV0FOLUNBMRAwDgYDVQQLEwdSb290
|
||||||
|
IENBMRswGQYDVQQDExJUV0NBIENZQkVSIFJvb3QgQ0EwHhcNMjIxMTIyMDY1NDI5
|
||||||
|
WhcNNDcxMTIyMTU1OTU5WjBQMQswCQYDVQQGEwJUVzESMBAGA1UEChMJVEFJV0FO
|
||||||
|
LUNBMRAwDgYDVQQLEwdSb290IENBMRswGQYDVQQDExJUV0NBIENZQkVSIFJvb3Qg
|
||||||
|
Q0EwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQDG+Moe2Qkgfh1sTs6P
|
||||||
|
40czRJzHyWmqOlt47nDSkvgEs1JSHWdyKKHfi12VCv7qze33Kc7wb3+szT3vsxxF
|
||||||
|
avcokPFhV8UMxKNQXd7UtcsZyoC5dc4pztKFIuwCY8xEMCDa6pFbVuYdHNWdZsc/
|
||||||
|
34bKS1PE2Y2yHer43CdTo0fhYcx9tbD47nORxc5zb87uEB8aBs/pJ2DFTxnk684i
|
||||||
|
JkXXYJndzk834H/nY62wuFm40AZoNWDTNq5xQwTxaWV4fPMf88oon1oglWa0zbfu
|
||||||
|
j3ikRRjpJi+NmykosaS3Om251Bw4ckVYsV7r8Cibt4LK/c/WMw+f+5eesRycnupf
|
||||||
|
Xtuq3VTpMCEobY5583WSjCb+3MX2w7DfRFlDo7YDKPYIMKoNM+HvnKkHIuNZW0CP
|
||||||
|
2oi3aQiotyMuRAlZN1vH4xfyIutuOVLF3lSnmMlLIJXcRolftBL5hSmO68gnFSDA
|
||||||
|
S9TMfAxsNAwmmyYxpjyn9tnQS6Jk/zuZQXLB4HCX8SS7K8R0IrGsayIyJNN4KsDA
|
||||||
|
oS/xUgXJP+92ZuJF2A09rZXIx4kmyA+upwMu+8Ff+iDhcK2wZSA3M2Cw1a/XDBzC
|
||||||
|
kHDXShi8fgGwsOsVHkQGzaRP6AzRwyAQ4VRlnrZR0Bp2a0JaWHY06rc3Ga4udfmW
|
||||||
|
5cFZ95RXKSWNOkyrTZpB0F8mAwIDAQABo2MwYTAOBgNVHQ8BAf8EBAMCAQYwDwYD
|
||||||
|
VR0TAQH/BAUwAwEB/zAfBgNVHSMEGDAWgBSdhWEUfMFib5do5E83QOGt4A1WNzAd
|
||||||
|
BgNVHQ4EFgQUnYVhFHzBYm+XaORPN0DhreANVjcwDQYJKoZIhvcNAQEMBQADggIB
|
||||||
|
AGSPesRiDrWIzLjHhg6hShbNcAu3p4ULs3a2D6f/CIsLJc+o1IN1KriWiLb73y0t
|
||||||
|
tGlTITVX1olNc79pj3CjYcya2x6a4CD4bLubIp1dhDGaLIrdaqHXKGnK/nZVekZn
|
||||||
|
68xDiBaiA9a5F/gZbG0jAn/xX9AKKSM70aoK7akXJlQKTcKlTfjF/biBzysseKNn
|
||||||
|
TKkHmvPfXvt89YnNdJdhEGoHK4Fa0o635yDRIG4kqIQnoVesqlVYL9zZyvpoBJ7t
|
||||||
|
RCT5dEA7IzOrg1oYJkK2bVS1FmAwbLGg+LhBoF1JSdJlBTrq/p1hvIbZv97Tujqx
|
||||||
|
f36SNI7JAG7cmL3c7IAFrQI932XtCwP39xaEBDG6k5TY8hL4iuO/Qq+n1M0RFxbI
|
||||||
|
Qh0UqEL20kCGoE8jypZFVmAGzbdVAaYBlGX+bgUJurSkquLvWL69J1bY73NxW0Qz
|
||||||
|
8ppy6rBePm6pUlvscG21h483XjyMnM7k8M4MZ0HMzvaAq07MTFb1wWFZk7Q+ptq4
|
||||||
|
NxKfKjLji7gh7MMrZQzvIt6IKTtM1/r+t+FHvpw+PoP7UV31aPcuIYXcv/Fa4nzX
|
||||||
|
xeSDwWrruoBa3lwtcHb4yOWHh8qgnaHlIhInD0Q9HWzq1MKLL295q39QpsQZp6F6
|
||||||
|
t5b5wR9iWqJDB0BeJsas7a5wFsWqynKKTbDPAYsDP27X
|
||||||
|
-----END CERTIFICATE-----
|
||||||
|
|
||||||
|
# Issuer: CN=SecureSign Root CA12 O=Cybertrust Japan Co., Ltd.
|
||||||
|
# Subject: CN=SecureSign Root CA12 O=Cybertrust Japan Co., Ltd.
|
||||||
|
# Label: "SecureSign Root CA12"
|
||||||
|
# Serial: 587887345431707215246142177076162061960426065942
|
||||||
|
# MD5 Fingerprint: c6:89:ca:64:42:9b:62:08:49:0b:1e:7f:e9:07:3d:e8
|
||||||
|
# SHA1 Fingerprint: 7a:22:1e:3d:de:1b:06:ac:9e:c8:47:70:16:8e:3c:e5:f7:6b:06:f4
|
||||||
|
# SHA256 Fingerprint: 3f:03:4b:b5:70:4d:44:b2:d0:85:45:a0:20:57:de:93:eb:f3:90:5f:ce:72:1a:cb:c7:30:c0:6d:da:ee:90:4e
|
||||||
|
-----BEGIN CERTIFICATE-----
|
||||||
|
MIIDcjCCAlqgAwIBAgIUZvnHwa/swlG07VOX5uaCwysckBYwDQYJKoZIhvcNAQEL
|
||||||
|
BQAwUTELMAkGA1UEBhMCSlAxIzAhBgNVBAoTGkN5YmVydHJ1c3QgSmFwYW4gQ28u
|
||||||
|
LCBMdGQuMR0wGwYDVQQDExRTZWN1cmVTaWduIFJvb3QgQ0ExMjAeFw0yMDA0MDgw
|
||||||
|
NTM2NDZaFw00MDA0MDgwNTM2NDZaMFExCzAJBgNVBAYTAkpQMSMwIQYDVQQKExpD
|
||||||
|
eWJlcnRydXN0IEphcGFuIENvLiwgTHRkLjEdMBsGA1UEAxMUU2VjdXJlU2lnbiBS
|
||||||
|
b290IENBMTIwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQC6OcE3emhF
|
||||||
|
KxS06+QT61d1I02PJC0W6K6OyX2kVzsqdiUzg2zqMoqUm048luT9Ub+ZyZN+v/mt
|
||||||
|
p7JIKwccJ/VMvHASd6SFVLX9kHrko+RRWAPNEHl57muTH2SOa2SroxPjcf59q5zd
|
||||||
|
J1M3s6oYwlkm7Fsf0uZlfO+TvdhYXAvA42VvPMfKWeP+bl+sg779XSVOKik71gur
|
||||||
|
FzJ4pOE+lEa+Ym6b3kaosRbnhW70CEBFEaCeVESE99g2zvVQR9wsMJvuwPWW0v4J
|
||||||
|
hscGWa5Pro4RmHvzC1KqYiaqId+OJTN5lxZJjfU+1UefNzFJM3IFTQy2VYzxV4+K
|
||||||
|
h9GtxRESOaCtAgMBAAGjQjBAMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQD
|
||||||
|
AgEGMB0GA1UdDgQWBBRXNPN0zwRL1SXm8UC2LEzZLemgrTANBgkqhkiG9w0BAQsF
|
||||||
|
AAOCAQEAPrvbFxbS8hQBICw4g0utvsqFepq2m2um4fylOqyttCg6r9cBg0krY6Ld
|
||||||
|
mmQOmFxv3Y67ilQiLUoT865AQ9tPkbeGGuwAtEGBpE/6aouIs3YIcipJQMPTw4WJ
|
||||||
|
mBClnW8Zt7vPemVV2zfrPIpyMpcemik+rY3moxtt9XUa5rBouVui7mlHJzWhhpmA
|
||||||
|
8zNL4WukJsPvdFlseqJkth5Ew1DgDzk9qTPxpfPSvWKErI4cqc1avTc7bgoitPQV
|
||||||
|
55FYxTpE05Uo2cBl6XLK0A+9H7MV2anjpEcJnuDLN/v9vZfVvhgaaaI5gdka9at/
|
||||||
|
yOPiZwud9AzqVN/Ssq+xIvEg37xEHA==
|
||||||
|
-----END CERTIFICATE-----
|
||||||
|
|
||||||
|
# Issuer: CN=SecureSign Root CA14 O=Cybertrust Japan Co., Ltd.
|
||||||
|
# Subject: CN=SecureSign Root CA14 O=Cybertrust Japan Co., Ltd.
|
||||||
|
# Label: "SecureSign Root CA14"
|
||||||
|
# Serial: 575790784512929437950770173562378038616896959179
|
||||||
|
# MD5 Fingerprint: 71:0d:72:fa:92:19:65:5e:89:04:ac:16:33:f0:bc:d5
|
||||||
|
# SHA1 Fingerprint: dd:50:c0:f7:79:b3:64:2e:74:a2:b8:9d:9f:d3:40:dd:bb:f0:f2:4f
|
||||||
|
# SHA256 Fingerprint: 4b:00:9c:10:34:49:4f:9a:b5:6b:ba:3b:a1:d6:27:31:fc:4d:20:d8:95:5a:dc:ec:10:a9:25:60:72:61:e3:38
|
||||||
|
-----BEGIN CERTIFICATE-----
|
||||||
|
MIIFcjCCA1qgAwIBAgIUZNtaDCBO6Ncpd8hQJ6JaJ90t8sswDQYJKoZIhvcNAQEM
|
||||||
|
BQAwUTELMAkGA1UEBhMCSlAxIzAhBgNVBAoTGkN5YmVydHJ1c3QgSmFwYW4gQ28u
|
||||||
|
LCBMdGQuMR0wGwYDVQQDExRTZWN1cmVTaWduIFJvb3QgQ0ExNDAeFw0yMDA0MDgw
|
||||||
|
NzA2MTlaFw00NTA0MDgwNzA2MTlaMFExCzAJBgNVBAYTAkpQMSMwIQYDVQQKExpD
|
||||||
|
eWJlcnRydXN0IEphcGFuIENvLiwgTHRkLjEdMBsGA1UEAxMUU2VjdXJlU2lnbiBS
|
||||||
|
b290IENBMTQwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQDF0nqh1oq/
|
||||||
|
FjHQmNE6lPxauG4iwWL3pwon71D2LrGeaBLwbCRjOfHw3xDG3rdSINVSW0KZnvOg
|
||||||
|
vlIfX8xnbacuUKLBl422+JX1sLrcneC+y9/3OPJH9aaakpUqYllQC6KxNedlsmGy
|
||||||
|
6pJxaeQp8E+BgQQ8sqVb1MWoWWd7VRxJq3qdwudzTe/NCcLEVxLbAQ4jeQkHO6Lo
|
||||||
|
/IrPj8BGJJw4J+CDnRugv3gVEOuGTgpa/d/aLIJ+7sr2KeH6caH3iGicnPCNvg9J
|
||||||
|
kdjqOvn90Ghx2+m1K06Ckm9mH+Dw3EzsytHqunQG+bOEkJTRX45zGRBdAuVwpcAQ
|
||||||
|
0BB8b8VYSbSwbprafZX1zNoCr7gsfXmPvkPx+SgojQlD+Ajda8iLLCSxjVIHvXib
|
||||||
|
y8posqTdDEx5YMaZ0ZPxMBoH064iwurO8YQJzOAUbn8/ftKChazcqRZOhaBgy/ac
|
||||||
|
18izju3Gm5h1DVXoX+WViwKkrkMpKBGk5hIwAUt1ax5mnXkvpXYvHUC0bcl9eQjs
|
||||||
|
0Wq2XSqypWa9a4X0dFbD9ed1Uigspf9mR6XU/v6eVL9lfgHWMI+lNpyiUBzuOIAB
|
||||||
|
SMbHdPTGrMNASRZhdCyvjG817XsYAFs2PJxQDcqSMxDxJklt33UkN4Ii1+iW/RVL
|
||||||
|
ApY+B3KVfqs9TC7XyvDf4Fg/LS8EmjijAQIDAQABo0IwQDAPBgNVHRMBAf8EBTAD
|
||||||
|
AQH/MA4GA1UdDwEB/wQEAwIBBjAdBgNVHQ4EFgQUBpOjCl4oaTeqYR3r6/wtbyPk
|
||||||
|
86AwDQYJKoZIhvcNAQEMBQADggIBAJaAcgkGfpzMkwQWu6A6jZJOtxEaCnFxEM0E
|
||||||
|
rX+lRVAQZk5KQaID2RFPeje5S+LGjzJmdSX7684/AykmjbgWHfYfM25I5uj4V7Ib
|
||||||
|
ed87hwriZLoAymzvftAj63iP/2SbNDefNWWipAA9EiOWWF3KY4fGoweITedpdopT
|
||||||
|
zfFP7ELyk+OZpDc8h7hi2/DsHzc/N19DzFGdtfCXwreFamgLRB7lUe6TzktuhsHS
|
||||||
|
DCRZNhqfLJGP4xjblJUK7ZGqDpncllPjYYPGFrojutzdfhrGe0K22VoF3Jpf1d+4
|
||||||
|
2kd92jjbrDnVHmtsKheMYc2xbXIBw8MgAGJoFjHVdqqGuw6qnsb58Nn4DSEC5MUo
|
||||||
|
FlkRudlpcyqSeLiSV5sI8jrlL5WwWLdrIBRtFO8KvH7YVdiI2i/6GaX7i+B/OfVy
|
||||||
|
K4XELKzvGUWSTLNhB9xNH27SgRNcmvMSZ4PPmz+Ln52kuaiWA3rF7iDeM9ovnhp6
|
||||||
|
dB7h7sxaOgTdsxoEqBRjrLdHEoOabPXm6RUVkRqEGQ6UROcSjiVbgGcZ3GOTEAtl
|
||||||
|
Lor6CZpO2oYofaphNdgOpygau1LgePhsumywbrmHXumZNTfxPWQrqaA0k89jL9WB
|
||||||
|
365jJ6UeTo3cKXhZ+PmhIIynJkBugnLNeLLIjzwec+fBH7/PzqUqm9tEZDKgu39c
|
||||||
|
JRNItX+S
|
||||||
|
-----END CERTIFICATE-----
|
||||||
|
|
||||||
|
# Issuer: CN=SecureSign Root CA15 O=Cybertrust Japan Co., Ltd.
|
||||||
|
# Subject: CN=SecureSign Root CA15 O=Cybertrust Japan Co., Ltd.
|
||||||
|
# Label: "SecureSign Root CA15"
|
||||||
|
# Serial: 126083514594751269499665114766174399806381178503
|
||||||
|
# MD5 Fingerprint: 13:30:fc:c4:62:a6:a9:de:b5:c1:68:af:b5:d2:31:47
|
||||||
|
# SHA1 Fingerprint: cb:ba:83:c8:c1:5a:5d:f1:f9:73:6f:ca:d7:ef:28:13:06:4a:07:7d
|
||||||
|
# SHA256 Fingerprint: e7:78:f0:f0:95:fe:84:37:29:cd:1a:00:82:17:9e:53:14:a9:c2:91:44:28:05:e1:fb:1d:8f:b6:b8:88:6c:3a
|
||||||
|
-----BEGIN CERTIFICATE-----
|
||||||
|
MIICIzCCAamgAwIBAgIUFhXHw9hJp75pDIqI7fBw+d23PocwCgYIKoZIzj0EAwMw
|
||||||
|
UTELMAkGA1UEBhMCSlAxIzAhBgNVBAoTGkN5YmVydHJ1c3QgSmFwYW4gQ28uLCBM
|
||||||
|
dGQuMR0wGwYDVQQDExRTZWN1cmVTaWduIFJvb3QgQ0ExNTAeFw0yMDA0MDgwODMy
|
||||||
|
NTZaFw00NTA0MDgwODMyNTZaMFExCzAJBgNVBAYTAkpQMSMwIQYDVQQKExpDeWJl
|
||||||
|
cnRydXN0IEphcGFuIENvLiwgTHRkLjEdMBsGA1UEAxMUU2VjdXJlU2lnbiBSb290
|
||||||
|
IENBMTUwdjAQBgcqhkjOPQIBBgUrgQQAIgNiAAQLUHSNZDKZmbPSYAi4Io5GdCx4
|
||||||
|
wCtELW1fHcmuS1Iggz24FG1Th2CeX2yF2wYUleDHKP+dX+Sq8bOLbe1PL0vJSpSR
|
||||||
|
ZHX+AezB2Ot6lHhWGENfa4HL9rzatAy2KZMIaY+jQjBAMA8GA1UdEwEB/wQFMAMB
|
||||||
|
Af8wDgYDVR0PAQH/BAQDAgEGMB0GA1UdDgQWBBTrQciu/NWeUUj1vYv0hyCTQSvT
|
||||||
|
9DAKBggqhkjOPQQDAwNoADBlAjEA2S6Jfl5OpBEHvVnCB96rMjhTKkZEBhd6zlHp
|
||||||
|
4P9mLQlO4E/0BdGF9jVg3PVys0Z9AjBEmEYagoUeYWmJSwdLZrWeqrqgHkHZAXQ6
|
||||||
|
bkU6iYAZezKYVWOr62Nuk22rGwlgMU4=
|
||||||
|
-----END CERTIFICATE-----
|
||||||
|
|
|
@ -159,6 +159,8 @@ def from_bytes(
|
||||||
|
|
||||||
results: CharsetMatches = CharsetMatches()
|
results: CharsetMatches = CharsetMatches()
|
||||||
|
|
||||||
|
early_stop_results: CharsetMatches = CharsetMatches()
|
||||||
|
|
||||||
sig_encoding, sig_payload = identify_sig_or_bom(sequences)
|
sig_encoding, sig_payload = identify_sig_or_bom(sequences)
|
||||||
|
|
||||||
if sig_encoding is not None:
|
if sig_encoding is not None:
|
||||||
|
@ -221,16 +223,20 @@ def from_bytes(
|
||||||
try:
|
try:
|
||||||
if is_too_large_sequence and is_multi_byte_decoder is False:
|
if is_too_large_sequence and is_multi_byte_decoder is False:
|
||||||
str(
|
str(
|
||||||
sequences[: int(50e4)]
|
(
|
||||||
if strip_sig_or_bom is False
|
sequences[: int(50e4)]
|
||||||
else sequences[len(sig_payload) : int(50e4)],
|
if strip_sig_or_bom is False
|
||||||
|
else sequences[len(sig_payload) : int(50e4)]
|
||||||
|
),
|
||||||
encoding=encoding_iana,
|
encoding=encoding_iana,
|
||||||
)
|
)
|
||||||
else:
|
else:
|
||||||
decoded_payload = str(
|
decoded_payload = str(
|
||||||
sequences
|
(
|
||||||
if strip_sig_or_bom is False
|
sequences
|
||||||
else sequences[len(sig_payload) :],
|
if strip_sig_or_bom is False
|
||||||
|
else sequences[len(sig_payload) :]
|
||||||
|
),
|
||||||
encoding=encoding_iana,
|
encoding=encoding_iana,
|
||||||
)
|
)
|
||||||
except (UnicodeDecodeError, LookupError) as e:
|
except (UnicodeDecodeError, LookupError) as e:
|
||||||
|
@ -367,7 +373,13 @@ def from_bytes(
|
||||||
and not lazy_str_hard_failure
|
and not lazy_str_hard_failure
|
||||||
):
|
):
|
||||||
fallback_entry = CharsetMatch(
|
fallback_entry = CharsetMatch(
|
||||||
sequences, encoding_iana, threshold, False, [], decoded_payload
|
sequences,
|
||||||
|
encoding_iana,
|
||||||
|
threshold,
|
||||||
|
False,
|
||||||
|
[],
|
||||||
|
decoded_payload,
|
||||||
|
preemptive_declaration=specified_encoding,
|
||||||
)
|
)
|
||||||
if encoding_iana == specified_encoding:
|
if encoding_iana == specified_encoding:
|
||||||
fallback_specified = fallback_entry
|
fallback_specified = fallback_entry
|
||||||
|
@ -421,28 +433,58 @@ def from_bytes(
|
||||||
),
|
),
|
||||||
)
|
)
|
||||||
|
|
||||||
results.append(
|
current_match = CharsetMatch(
|
||||||
CharsetMatch(
|
sequences,
|
||||||
sequences,
|
encoding_iana,
|
||||||
encoding_iana,
|
mean_mess_ratio,
|
||||||
mean_mess_ratio,
|
bom_or_sig_available,
|
||||||
bom_or_sig_available,
|
cd_ratios_merged,
|
||||||
cd_ratios_merged,
|
(
|
||||||
decoded_payload,
|
decoded_payload
|
||||||
)
|
if (
|
||||||
|
is_too_large_sequence is False
|
||||||
|
or encoding_iana in [specified_encoding, "ascii", "utf_8"]
|
||||||
|
)
|
||||||
|
else None
|
||||||
|
),
|
||||||
|
preemptive_declaration=specified_encoding,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
results.append(current_match)
|
||||||
|
|
||||||
if (
|
if (
|
||||||
encoding_iana in [specified_encoding, "ascii", "utf_8"]
|
encoding_iana in [specified_encoding, "ascii", "utf_8"]
|
||||||
and mean_mess_ratio < 0.1
|
and mean_mess_ratio < 0.1
|
||||||
):
|
):
|
||||||
|
# If md says nothing to worry about, then... stop immediately!
|
||||||
|
if mean_mess_ratio == 0.0:
|
||||||
|
logger.debug(
|
||||||
|
"Encoding detection: %s is most likely the one.",
|
||||||
|
current_match.encoding,
|
||||||
|
)
|
||||||
|
if explain:
|
||||||
|
logger.removeHandler(explain_handler)
|
||||||
|
logger.setLevel(previous_logger_level)
|
||||||
|
return CharsetMatches([current_match])
|
||||||
|
|
||||||
|
early_stop_results.append(current_match)
|
||||||
|
|
||||||
|
if (
|
||||||
|
len(early_stop_results)
|
||||||
|
and (specified_encoding is None or specified_encoding in tested)
|
||||||
|
and "ascii" in tested
|
||||||
|
and "utf_8" in tested
|
||||||
|
):
|
||||||
|
probable_result: CharsetMatch = early_stop_results.best() # type: ignore[assignment]
|
||||||
logger.debug(
|
logger.debug(
|
||||||
"Encoding detection: %s is most likely the one.", encoding_iana
|
"Encoding detection: %s is most likely the one.",
|
||||||
|
probable_result.encoding,
|
||||||
)
|
)
|
||||||
if explain:
|
if explain:
|
||||||
logger.removeHandler(explain_handler)
|
logger.removeHandler(explain_handler)
|
||||||
logger.setLevel(previous_logger_level)
|
logger.setLevel(previous_logger_level)
|
||||||
return CharsetMatches([results[encoding_iana]])
|
|
||||||
|
return CharsetMatches([probable_result])
|
||||||
|
|
||||||
if encoding_iana == sig_encoding:
|
if encoding_iana == sig_encoding:
|
||||||
logger.debug(
|
logger.debug(
|
||||||
|
|
|
@ -109,6 +109,14 @@ def cli_detect(argv: Optional[List[str]] = None) -> int:
|
||||||
dest="force",
|
dest="force",
|
||||||
help="Replace file without asking if you are sure, use this flag with caution.",
|
help="Replace file without asking if you are sure, use this flag with caution.",
|
||||||
)
|
)
|
||||||
|
parser.add_argument(
|
||||||
|
"-i",
|
||||||
|
"--no-preemptive",
|
||||||
|
action="store_true",
|
||||||
|
default=False,
|
||||||
|
dest="no_preemptive",
|
||||||
|
help="Disable looking at a charset declaration to hint the detector.",
|
||||||
|
)
|
||||||
parser.add_argument(
|
parser.add_argument(
|
||||||
"-t",
|
"-t",
|
||||||
"--threshold",
|
"--threshold",
|
||||||
|
@ -133,21 +141,35 @@ def cli_detect(argv: Optional[List[str]] = None) -> int:
|
||||||
args = parser.parse_args(argv)
|
args = parser.parse_args(argv)
|
||||||
|
|
||||||
if args.replace is True and args.normalize is False:
|
if args.replace is True and args.normalize is False:
|
||||||
|
if args.files:
|
||||||
|
for my_file in args.files:
|
||||||
|
my_file.close()
|
||||||
print("Use --replace in addition of --normalize only.", file=sys.stderr)
|
print("Use --replace in addition of --normalize only.", file=sys.stderr)
|
||||||
return 1
|
return 1
|
||||||
|
|
||||||
if args.force is True and args.replace is False:
|
if args.force is True and args.replace is False:
|
||||||
|
if args.files:
|
||||||
|
for my_file in args.files:
|
||||||
|
my_file.close()
|
||||||
print("Use --force in addition of --replace only.", file=sys.stderr)
|
print("Use --force in addition of --replace only.", file=sys.stderr)
|
||||||
return 1
|
return 1
|
||||||
|
|
||||||
if args.threshold < 0.0 or args.threshold > 1.0:
|
if args.threshold < 0.0 or args.threshold > 1.0:
|
||||||
|
if args.files:
|
||||||
|
for my_file in args.files:
|
||||||
|
my_file.close()
|
||||||
print("--threshold VALUE should be between 0. AND 1.", file=sys.stderr)
|
print("--threshold VALUE should be between 0. AND 1.", file=sys.stderr)
|
||||||
return 1
|
return 1
|
||||||
|
|
||||||
x_ = []
|
x_ = []
|
||||||
|
|
||||||
for my_file in args.files:
|
for my_file in args.files:
|
||||||
matches = from_fp(my_file, threshold=args.threshold, explain=args.verbose)
|
matches = from_fp(
|
||||||
|
my_file,
|
||||||
|
threshold=args.threshold,
|
||||||
|
explain=args.verbose,
|
||||||
|
preemptive_behaviour=args.no_preemptive is False,
|
||||||
|
)
|
||||||
|
|
||||||
best_guess = matches.best()
|
best_guess = matches.best()
|
||||||
|
|
||||||
|
@ -155,9 +177,11 @@ def cli_detect(argv: Optional[List[str]] = None) -> int:
|
||||||
print(
|
print(
|
||||||
'Unable to identify originating encoding for "{}". {}'.format(
|
'Unable to identify originating encoding for "{}". {}'.format(
|
||||||
my_file.name,
|
my_file.name,
|
||||||
"Maybe try increasing maximum amount of chaos."
|
(
|
||||||
if args.threshold < 1.0
|
"Maybe try increasing maximum amount of chaos."
|
||||||
else "",
|
if args.threshold < 1.0
|
||||||
|
else ""
|
||||||
|
),
|
||||||
),
|
),
|
||||||
file=sys.stderr,
|
file=sys.stderr,
|
||||||
)
|
)
|
||||||
|
@ -258,8 +282,8 @@ def cli_detect(argv: Optional[List[str]] = None) -> int:
|
||||||
try:
|
try:
|
||||||
x_[0].unicode_path = join(dir_path, ".".join(o_))
|
x_[0].unicode_path = join(dir_path, ".".join(o_))
|
||||||
|
|
||||||
with open(x_[0].unicode_path, "w", encoding="utf-8") as fp:
|
with open(x_[0].unicode_path, "wb") as fp:
|
||||||
fp.write(str(best_guess))
|
fp.write(best_guess.output())
|
||||||
except IOError as e:
|
except IOError as e:
|
||||||
print(str(e), file=sys.stderr)
|
print(str(e), file=sys.stderr)
|
||||||
if my_file.closed is False:
|
if my_file.closed is False:
|
||||||
|
|
|
@ -544,6 +544,8 @@ COMMON_SAFE_ASCII_CHARACTERS: Set[str] = {
|
||||||
"|",
|
"|",
|
||||||
'"',
|
'"',
|
||||||
"-",
|
"-",
|
||||||
|
"(",
|
||||||
|
")",
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -1,13 +1,24 @@
|
||||||
from typing import Any, Dict, Optional, Union
|
from __future__ import annotations
|
||||||
|
|
||||||
|
from typing import TYPE_CHECKING, Any, Optional
|
||||||
from warnings import warn
|
from warnings import warn
|
||||||
|
|
||||||
from .api import from_bytes
|
from .api import from_bytes
|
||||||
from .constant import CHARDET_CORRESPONDENCE
|
from .constant import CHARDET_CORRESPONDENCE
|
||||||
|
|
||||||
|
# TODO: remove this check when dropping Python 3.7 support
|
||||||
|
if TYPE_CHECKING:
|
||||||
|
from typing_extensions import TypedDict
|
||||||
|
|
||||||
|
class ResultDict(TypedDict):
|
||||||
|
encoding: Optional[str]
|
||||||
|
language: str
|
||||||
|
confidence: Optional[float]
|
||||||
|
|
||||||
|
|
||||||
def detect(
|
def detect(
|
||||||
byte_str: bytes, should_rename_legacy: bool = False, **kwargs: Any
|
byte_str: bytes, should_rename_legacy: bool = False, **kwargs: Any
|
||||||
) -> Dict[str, Optional[Union[str, float]]]:
|
) -> ResultDict:
|
||||||
"""
|
"""
|
||||||
chardet legacy method
|
chardet legacy method
|
||||||
Detect the encoding of the given byte string. It should be mostly backward-compatible.
|
Detect the encoding of the given byte string. It should be mostly backward-compatible.
|
||||||
|
|
|
@ -236,7 +236,7 @@ class SuspiciousRange(MessDetectorPlugin):
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def ratio(self) -> float:
|
def ratio(self) -> float:
|
||||||
if self._character_count <= 24:
|
if self._character_count <= 13:
|
||||||
return 0.0
|
return 0.0
|
||||||
|
|
||||||
ratio_of_suspicious_range_usage: float = (
|
ratio_of_suspicious_range_usage: float = (
|
||||||
|
@ -260,6 +260,7 @@ class SuperWeirdWordPlugin(MessDetectorPlugin):
|
||||||
|
|
||||||
self._buffer: str = ""
|
self._buffer: str = ""
|
||||||
self._buffer_accent_count: int = 0
|
self._buffer_accent_count: int = 0
|
||||||
|
self._buffer_glyph_count: int = 0
|
||||||
|
|
||||||
def eligible(self, character: str) -> bool:
|
def eligible(self, character: str) -> bool:
|
||||||
return True
|
return True
|
||||||
|
@ -279,6 +280,14 @@ class SuperWeirdWordPlugin(MessDetectorPlugin):
|
||||||
and is_thai(character) is False
|
and is_thai(character) is False
|
||||||
):
|
):
|
||||||
self._foreign_long_watch = True
|
self._foreign_long_watch = True
|
||||||
|
if (
|
||||||
|
is_cjk(character)
|
||||||
|
or is_hangul(character)
|
||||||
|
or is_katakana(character)
|
||||||
|
or is_hiragana(character)
|
||||||
|
or is_thai(character)
|
||||||
|
):
|
||||||
|
self._buffer_glyph_count += 1
|
||||||
return
|
return
|
||||||
if not self._buffer:
|
if not self._buffer:
|
||||||
return
|
return
|
||||||
|
@ -291,17 +300,20 @@ class SuperWeirdWordPlugin(MessDetectorPlugin):
|
||||||
self._character_count += buffer_length
|
self._character_count += buffer_length
|
||||||
|
|
||||||
if buffer_length >= 4:
|
if buffer_length >= 4:
|
||||||
if self._buffer_accent_count / buffer_length > 0.34:
|
if self._buffer_accent_count / buffer_length >= 0.5:
|
||||||
self._is_current_word_bad = True
|
self._is_current_word_bad = True
|
||||||
# Word/Buffer ending with an upper case accentuated letter are so rare,
|
# Word/Buffer ending with an upper case accentuated letter are so rare,
|
||||||
# that we will consider them all as suspicious. Same weight as foreign_long suspicious.
|
# that we will consider them all as suspicious. Same weight as foreign_long suspicious.
|
||||||
if (
|
elif (
|
||||||
is_accentuated(self._buffer[-1])
|
is_accentuated(self._buffer[-1])
|
||||||
and self._buffer[-1].isupper()
|
and self._buffer[-1].isupper()
|
||||||
and all(_.isupper() for _ in self._buffer) is False
|
and all(_.isupper() for _ in self._buffer) is False
|
||||||
):
|
):
|
||||||
self._foreign_long_count += 1
|
self._foreign_long_count += 1
|
||||||
self._is_current_word_bad = True
|
self._is_current_word_bad = True
|
||||||
|
elif self._buffer_glyph_count == 1:
|
||||||
|
self._is_current_word_bad = True
|
||||||
|
self._foreign_long_count += 1
|
||||||
if buffer_length >= 24 and self._foreign_long_watch:
|
if buffer_length >= 24 and self._foreign_long_watch:
|
||||||
camel_case_dst = [
|
camel_case_dst = [
|
||||||
i
|
i
|
||||||
|
@ -325,6 +337,7 @@ class SuperWeirdWordPlugin(MessDetectorPlugin):
|
||||||
self._foreign_long_watch = False
|
self._foreign_long_watch = False
|
||||||
self._buffer = ""
|
self._buffer = ""
|
||||||
self._buffer_accent_count = 0
|
self._buffer_accent_count = 0
|
||||||
|
self._buffer_glyph_count = 0
|
||||||
elif (
|
elif (
|
||||||
character not in {"<", ">", "-", "=", "~", "|", "_"}
|
character not in {"<", ">", "-", "=", "~", "|", "_"}
|
||||||
and character.isdigit() is False
|
and character.isdigit() is False
|
||||||
|
|
|
@ -1,9 +1,10 @@
|
||||||
from encodings.aliases import aliases
|
from encodings.aliases import aliases
|
||||||
from hashlib import sha256
|
from hashlib import sha256
|
||||||
from json import dumps
|
from json import dumps
|
||||||
|
from re import sub
|
||||||
from typing import Any, Dict, Iterator, List, Optional, Tuple, Union
|
from typing import Any, Dict, Iterator, List, Optional, Tuple, Union
|
||||||
|
|
||||||
from .constant import TOO_BIG_SEQUENCE
|
from .constant import RE_POSSIBLE_ENCODING_INDICATION, TOO_BIG_SEQUENCE
|
||||||
from .utils import iana_name, is_multi_byte_encoding, unicode_range
|
from .utils import iana_name, is_multi_byte_encoding, unicode_range
|
||||||
|
|
||||||
|
|
||||||
|
@ -16,6 +17,7 @@ class CharsetMatch:
|
||||||
has_sig_or_bom: bool,
|
has_sig_or_bom: bool,
|
||||||
languages: "CoherenceMatches",
|
languages: "CoherenceMatches",
|
||||||
decoded_payload: Optional[str] = None,
|
decoded_payload: Optional[str] = None,
|
||||||
|
preemptive_declaration: Optional[str] = None,
|
||||||
):
|
):
|
||||||
self._payload: bytes = payload
|
self._payload: bytes = payload
|
||||||
|
|
||||||
|
@ -33,13 +35,13 @@ class CharsetMatch:
|
||||||
|
|
||||||
self._string: Optional[str] = decoded_payload
|
self._string: Optional[str] = decoded_payload
|
||||||
|
|
||||||
|
self._preemptive_declaration: Optional[str] = preemptive_declaration
|
||||||
|
|
||||||
def __eq__(self, other: object) -> bool:
|
def __eq__(self, other: object) -> bool:
|
||||||
if not isinstance(other, CharsetMatch):
|
if not isinstance(other, CharsetMatch):
|
||||||
raise TypeError(
|
if isinstance(other, str):
|
||||||
"__eq__ cannot be invoked on {} and {}.".format(
|
return iana_name(other) == self.encoding
|
||||||
str(other.__class__), str(self.__class__)
|
return False
|
||||||
)
|
|
||||||
)
|
|
||||||
return self.encoding == other.encoding and self.fingerprint == other.fingerprint
|
return self.encoding == other.encoding and self.fingerprint == other.fingerprint
|
||||||
|
|
||||||
def __lt__(self, other: object) -> bool:
|
def __lt__(self, other: object) -> bool:
|
||||||
|
@ -210,7 +212,24 @@ class CharsetMatch:
|
||||||
"""
|
"""
|
||||||
if self._output_encoding is None or self._output_encoding != encoding:
|
if self._output_encoding is None or self._output_encoding != encoding:
|
||||||
self._output_encoding = encoding
|
self._output_encoding = encoding
|
||||||
self._output_payload = str(self).encode(encoding, "replace")
|
decoded_string = str(self)
|
||||||
|
if (
|
||||||
|
self._preemptive_declaration is not None
|
||||||
|
and self._preemptive_declaration.lower()
|
||||||
|
not in ["utf-8", "utf8", "utf_8"]
|
||||||
|
):
|
||||||
|
patched_header = sub(
|
||||||
|
RE_POSSIBLE_ENCODING_INDICATION,
|
||||||
|
lambda m: m.string[m.span()[0] : m.span()[1]].replace(
|
||||||
|
m.groups()[0], iana_name(self._output_encoding) # type: ignore[arg-type]
|
||||||
|
),
|
||||||
|
decoded_string[:8192],
|
||||||
|
1,
|
||||||
|
)
|
||||||
|
|
||||||
|
decoded_string = patched_header + decoded_string[8192:]
|
||||||
|
|
||||||
|
self._output_payload = decoded_string.encode(encoding, "replace")
|
||||||
|
|
||||||
return self._output_payload # type: ignore
|
return self._output_payload # type: ignore
|
||||||
|
|
||||||
|
@ -266,7 +285,7 @@ class CharsetMatches:
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
# We should disable the submatch factoring when the input file is too heavy (conserve RAM usage)
|
# We should disable the submatch factoring when the input file is too heavy (conserve RAM usage)
|
||||||
if len(item.raw) <= TOO_BIG_SEQUENCE:
|
if len(item.raw) < TOO_BIG_SEQUENCE:
|
||||||
for match in self._results:
|
for match in self._results:
|
||||||
if match.fingerprint == item.fingerprint and match.chaos == item.chaos:
|
if match.fingerprint == item.fingerprint and match.chaos == item.chaos:
|
||||||
match.add_submatch(item)
|
match.add_submatch(item)
|
||||||
|
|
|
@ -2,5 +2,5 @@
|
||||||
Expose version
|
Expose version
|
||||||
"""
|
"""
|
||||||
|
|
||||||
__version__ = "3.3.2"
|
__version__ = "3.4.0"
|
||||||
VERSION = __version__.split(".")
|
VERSION = __version__.split(".")
|
||||||
|
|
|
@ -292,7 +292,20 @@ class ConnectionManager:
|
||||||
if self.server.ssl_adapter is not None:
|
if self.server.ssl_adapter is not None:
|
||||||
try:
|
try:
|
||||||
s, ssl_env = self.server.ssl_adapter.wrap(s)
|
s, ssl_env = self.server.ssl_adapter.wrap(s)
|
||||||
except errors.NoSSLError:
|
except errors.FatalSSLAlert as tls_connection_drop_error:
|
||||||
|
self.server.error_log(
|
||||||
|
f'Client {addr !s} lost — peer dropped the TLS '
|
||||||
|
'connection suddenly, during handshake: '
|
||||||
|
f'{tls_connection_drop_error !s}',
|
||||||
|
)
|
||||||
|
return
|
||||||
|
except errors.NoSSLError as http_over_https_err:
|
||||||
|
self.server.error_log(
|
||||||
|
f'Client {addr !s} attempted to speak plain HTTP into '
|
||||||
|
'a TCP connection configured for TLS-only traffic — '
|
||||||
|
'trying to send back a plain HTTP error response: '
|
||||||
|
f'{http_over_https_err !s}',
|
||||||
|
)
|
||||||
msg = (
|
msg = (
|
||||||
'The client sent a plain HTTP request, but '
|
'The client sent a plain HTTP request, but '
|
||||||
'this server only speaks HTTPS on this port.'
|
'this server only speaks HTTPS on this port.'
|
||||||
|
@ -311,8 +324,6 @@ class ConnectionManager:
|
||||||
if ex.args[0] not in errors.socket_errors_to_ignore:
|
if ex.args[0] not in errors.socket_errors_to_ignore:
|
||||||
raise
|
raise
|
||||||
return
|
return
|
||||||
if not s:
|
|
||||||
return
|
|
||||||
mf = self.server.ssl_adapter.makefile
|
mf = self.server.ssl_adapter.makefile
|
||||||
# Re-apply our timeout since we may have a new socket object
|
# Re-apply our timeout since we may have a new socket object
|
||||||
if hasattr(s, 'settimeout'):
|
if hasattr(s, 'settimeout'):
|
||||||
|
|
|
@ -157,7 +157,7 @@ QUOTED_SLASH = b'%2F'
|
||||||
QUOTED_SLASH_REGEX = re.compile(b''.join((b'(?i)', QUOTED_SLASH)))
|
QUOTED_SLASH_REGEX = re.compile(b''.join((b'(?i)', QUOTED_SLASH)))
|
||||||
|
|
||||||
|
|
||||||
_STOPPING_FOR_INTERRUPT = object() # sentinel used during shutdown
|
_STOPPING_FOR_INTERRUPT = Exception() # sentinel used during shutdown
|
||||||
|
|
||||||
|
|
||||||
comma_separated_headers = [
|
comma_separated_headers = [
|
||||||
|
@ -209,7 +209,11 @@ class HeaderReader:
|
||||||
if not line.endswith(CRLF):
|
if not line.endswith(CRLF):
|
||||||
raise ValueError('HTTP requires CRLF terminators')
|
raise ValueError('HTTP requires CRLF terminators')
|
||||||
|
|
||||||
if line[0] in (SPACE, TAB):
|
if line[:1] in (SPACE, TAB):
|
||||||
|
# NOTE: `type(line[0]) is int` and `type(line[:1]) is bytes`.
|
||||||
|
# NOTE: The former causes a the following warning:
|
||||||
|
# NOTE: `BytesWarning('Comparison between bytes and int')`
|
||||||
|
# NOTE: The latter is equivalent and does not.
|
||||||
# It's a continuation line.
|
# It's a continuation line.
|
||||||
v = line.strip()
|
v = line.strip()
|
||||||
else:
|
else:
|
||||||
|
@ -1725,16 +1729,16 @@ class HTTPServer:
|
||||||
"""Run the server forever, and stop it cleanly on exit."""
|
"""Run the server forever, and stop it cleanly on exit."""
|
||||||
try:
|
try:
|
||||||
self.start()
|
self.start()
|
||||||
except (KeyboardInterrupt, IOError):
|
except KeyboardInterrupt as kb_intr_exc:
|
||||||
# The time.sleep call might raise
|
underlying_interrupt = self.interrupt
|
||||||
# "IOError: [Errno 4] Interrupted function call" on KBInt.
|
if not underlying_interrupt:
|
||||||
self.error_log('Keyboard Interrupt: shutting down')
|
self.interrupt = kb_intr_exc
|
||||||
self.stop()
|
raise kb_intr_exc from underlying_interrupt
|
||||||
raise
|
except SystemExit as sys_exit_exc:
|
||||||
except SystemExit:
|
underlying_interrupt = self.interrupt
|
||||||
self.error_log('SystemExit raised: shutting down')
|
if not underlying_interrupt:
|
||||||
self.stop()
|
self.interrupt = sys_exit_exc
|
||||||
raise
|
raise sys_exit_exc from underlying_interrupt
|
||||||
|
|
||||||
def prepare(self): # noqa: C901 # FIXME
|
def prepare(self): # noqa: C901 # FIXME
|
||||||
"""Prepare server to serving requests.
|
"""Prepare server to serving requests.
|
||||||
|
@ -2111,6 +2115,13 @@ class HTTPServer:
|
||||||
has completed.
|
has completed.
|
||||||
"""
|
"""
|
||||||
self._interrupt = _STOPPING_FOR_INTERRUPT
|
self._interrupt = _STOPPING_FOR_INTERRUPT
|
||||||
|
|
||||||
|
if isinstance(interrupt, KeyboardInterrupt):
|
||||||
|
self.error_log('Keyboard Interrupt: shutting down')
|
||||||
|
|
||||||
|
if isinstance(interrupt, SystemExit):
|
||||||
|
self.error_log('SystemExit raised: shutting down')
|
||||||
|
|
||||||
self.stop()
|
self.stop()
|
||||||
self._interrupt = interrupt
|
self._interrupt = interrupt
|
||||||
|
|
||||||
|
|
|
@ -27,12 +27,9 @@ except ImportError:
|
||||||
|
|
||||||
from . import Adapter
|
from . import Adapter
|
||||||
from .. import errors
|
from .. import errors
|
||||||
from .._compat import IS_ABOVE_OPENSSL10
|
|
||||||
from ..makefile import StreamReader, StreamWriter
|
from ..makefile import StreamReader, StreamWriter
|
||||||
from ..server import HTTPServer
|
from ..server import HTTPServer
|
||||||
|
|
||||||
generic_socket_error = OSError
|
|
||||||
|
|
||||||
|
|
||||||
def _assert_ssl_exc_contains(exc, *msgs):
|
def _assert_ssl_exc_contains(exc, *msgs):
|
||||||
"""Check whether SSL exception contains either of messages provided."""
|
"""Check whether SSL exception contains either of messages provided."""
|
||||||
|
@ -265,62 +262,35 @@ class BuiltinSSLAdapter(Adapter):
|
||||||
|
|
||||||
def wrap(self, sock):
|
def wrap(self, sock):
|
||||||
"""Wrap and return the given socket, plus WSGI environ entries."""
|
"""Wrap and return the given socket, plus WSGI environ entries."""
|
||||||
EMPTY_RESULT = None, {}
|
|
||||||
try:
|
try:
|
||||||
s = self.context.wrap_socket(
|
s = self.context.wrap_socket(
|
||||||
sock, do_handshake_on_connect=True, server_side=True,
|
sock, do_handshake_on_connect=True, server_side=True,
|
||||||
)
|
)
|
||||||
except ssl.SSLError as ex:
|
except (
|
||||||
if ex.errno == ssl.SSL_ERROR_EOF:
|
ssl.SSLEOFError,
|
||||||
# This is almost certainly due to the cherrypy engine
|
ssl.SSLZeroReturnError,
|
||||||
# 'pinging' the socket to assert it's connectable;
|
) as tls_connection_drop_error:
|
||||||
# the 'ping' isn't SSL.
|
raise errors.FatalSSLAlert(
|
||||||
return EMPTY_RESULT
|
*tls_connection_drop_error.args,
|
||||||
elif ex.errno == ssl.SSL_ERROR_SSL:
|
) from tls_connection_drop_error
|
||||||
if _assert_ssl_exc_contains(ex, 'http request'):
|
except ssl.SSLError as generic_tls_error:
|
||||||
# The client is speaking HTTP to an HTTPS server.
|
peer_speaks_plain_http_over_https = (
|
||||||
raise errors.NoSSLError
|
generic_tls_error.errno == ssl.SSL_ERROR_SSL and
|
||||||
|
_assert_ssl_exc_contains(generic_tls_error, 'http request')
|
||||||
|
)
|
||||||
|
if peer_speaks_plain_http_over_https:
|
||||||
|
reraised_connection_drop_exc_cls = errors.NoSSLError
|
||||||
|
else:
|
||||||
|
reraised_connection_drop_exc_cls = errors.FatalSSLAlert
|
||||||
|
|
||||||
# Check if it's one of the known errors
|
raise reraised_connection_drop_exc_cls(
|
||||||
# Errors that are caught by PyOpenSSL, but thrown by
|
*generic_tls_error.args,
|
||||||
# built-in ssl
|
) from generic_tls_error
|
||||||
_block_errors = (
|
except OSError as tcp_connection_drop_error:
|
||||||
'unknown protocol', 'unknown ca', 'unknown_ca',
|
raise errors.FatalSSLAlert(
|
||||||
'unknown error',
|
*tcp_connection_drop_error.args,
|
||||||
'https proxy request', 'inappropriate fallback',
|
) from tcp_connection_drop_error
|
||||||
'wrong version number',
|
|
||||||
'no shared cipher', 'certificate unknown',
|
|
||||||
'ccs received early',
|
|
||||||
'certificate verify failed', # client cert w/o trusted CA
|
|
||||||
'version too low', # caused by SSL3 connections
|
|
||||||
'unsupported protocol', # caused by TLS1 connections
|
|
||||||
)
|
|
||||||
if _assert_ssl_exc_contains(ex, *_block_errors):
|
|
||||||
# Accepted error, let's pass
|
|
||||||
return EMPTY_RESULT
|
|
||||||
elif _assert_ssl_exc_contains(ex, 'handshake operation timed out'):
|
|
||||||
# This error is thrown by builtin SSL after a timeout
|
|
||||||
# when client is speaking HTTP to an HTTPS server.
|
|
||||||
# The connection can safely be dropped.
|
|
||||||
return EMPTY_RESULT
|
|
||||||
raise
|
|
||||||
except generic_socket_error as exc:
|
|
||||||
"""It is unclear why exactly this happens.
|
|
||||||
|
|
||||||
It's reproducible only with openssl>1.0 and stdlib
|
|
||||||
:py:mod:`ssl` wrapper.
|
|
||||||
In CherryPy it's triggered by Checker plugin, which connects
|
|
||||||
to the app listening to the socket port in TLS mode via plain
|
|
||||||
HTTP during startup (from the same process).
|
|
||||||
|
|
||||||
|
|
||||||
Ref: https://github.com/cherrypy/cherrypy/issues/1618
|
|
||||||
"""
|
|
||||||
is_error0 = exc.args == (0, 'Error')
|
|
||||||
|
|
||||||
if is_error0 and IS_ABOVE_OPENSSL10:
|
|
||||||
return EMPTY_RESULT
|
|
||||||
raise
|
|
||||||
return s, self.get_environ(s)
|
return s, self.get_environ(s)
|
||||||
|
|
||||||
def get_environ(self, sock):
|
def get_environ(self, sock):
|
||||||
|
|
|
@ -150,7 +150,7 @@ class SSLFileobjectMixin:
|
||||||
return self._safe_call(
|
return self._safe_call(
|
||||||
False,
|
False,
|
||||||
super(SSLFileobjectMixin, self).sendall,
|
super(SSLFileobjectMixin, self).sendall,
|
||||||
*args, **kwargs
|
*args, **kwargs,
|
||||||
)
|
)
|
||||||
|
|
||||||
def send(self, *args, **kwargs):
|
def send(self, *args, **kwargs):
|
||||||
|
@ -158,7 +158,7 @@ class SSLFileobjectMixin:
|
||||||
return self._safe_call(
|
return self._safe_call(
|
||||||
False,
|
False,
|
||||||
super(SSLFileobjectMixin, self).send,
|
super(SSLFileobjectMixin, self).send,
|
||||||
*args, **kwargs
|
*args, **kwargs,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@ -196,6 +196,7 @@ class SSLConnectionProxyMeta:
|
||||||
|
|
||||||
def lock_decorator(method):
|
def lock_decorator(method):
|
||||||
"""Create a proxy method for a new class."""
|
"""Create a proxy method for a new class."""
|
||||||
|
|
||||||
def proxy_wrapper(self, *args):
|
def proxy_wrapper(self, *args):
|
||||||
self._lock.acquire()
|
self._lock.acquire()
|
||||||
try:
|
try:
|
||||||
|
@ -212,6 +213,7 @@ class SSLConnectionProxyMeta:
|
||||||
|
|
||||||
def make_property(property_):
|
def make_property(property_):
|
||||||
"""Create a proxy method for a new class."""
|
"""Create a proxy method for a new class."""
|
||||||
|
|
||||||
def proxy_prop_wrapper(self):
|
def proxy_prop_wrapper(self):
|
||||||
return getattr(self._ssl_conn, property_)
|
return getattr(self._ssl_conn, property_)
|
||||||
proxy_prop_wrapper.__name__ = property_
|
proxy_prop_wrapper.__name__ = property_
|
||||||
|
|
|
@ -12,7 +12,10 @@ import pytest
|
||||||
from .._compat import IS_MACOS, IS_WINDOWS # noqa: WPS436
|
from .._compat import IS_MACOS, IS_WINDOWS # noqa: WPS436
|
||||||
from ..server import Gateway, HTTPServer
|
from ..server import Gateway, HTTPServer
|
||||||
from ..testing import ( # noqa: F401 # pylint: disable=unused-import
|
from ..testing import ( # noqa: F401 # pylint: disable=unused-import
|
||||||
native_server, wsgi_server,
|
native_server,
|
||||||
|
thread_and_wsgi_server,
|
||||||
|
thread_and_native_server,
|
||||||
|
wsgi_server,
|
||||||
)
|
)
|
||||||
from ..testing import get_server_client
|
from ..testing import get_server_client
|
||||||
|
|
||||||
|
@ -31,6 +34,28 @@ def http_request_timeout():
|
||||||
return computed_timeout
|
return computed_timeout
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture
|
||||||
|
# pylint: disable=redefined-outer-name
|
||||||
|
def wsgi_server_thread(thread_and_wsgi_server): # noqa: F811
|
||||||
|
"""Set up and tear down a Cheroot WSGI server instance.
|
||||||
|
|
||||||
|
This exposes the server thread.
|
||||||
|
"""
|
||||||
|
server_thread, _srv = thread_and_wsgi_server
|
||||||
|
return server_thread
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture
|
||||||
|
# pylint: disable=redefined-outer-name
|
||||||
|
def native_server_thread(thread_and_native_server): # noqa: F811
|
||||||
|
"""Set up and tear down a Cheroot HTTP server instance.
|
||||||
|
|
||||||
|
This exposes the server thread.
|
||||||
|
"""
|
||||||
|
server_thread, _srv = thread_and_native_server
|
||||||
|
return server_thread
|
||||||
|
|
||||||
|
|
||||||
@pytest.fixture
|
@pytest.fixture
|
||||||
# pylint: disable=redefined-outer-name
|
# pylint: disable=redefined-outer-name
|
||||||
def wsgi_server_client(wsgi_server): # noqa: F811
|
def wsgi_server_client(wsgi_server): # noqa: F811
|
||||||
|
|
|
@ -1,7 +1,9 @@
|
||||||
"""Tests for TCP connection handling, including proper and timely close."""
|
"""Tests for TCP connection handling, including proper and timely close."""
|
||||||
|
|
||||||
import errno
|
import errno
|
||||||
|
from re import match as _matches_pattern
|
||||||
import socket
|
import socket
|
||||||
|
import sys
|
||||||
import time
|
import time
|
||||||
import logging
|
import logging
|
||||||
import traceback as traceback_
|
import traceback as traceback_
|
||||||
|
@ -17,6 +19,7 @@ from cheroot._compat import IS_CI, IS_MACOS, IS_PYPY, IS_WINDOWS
|
||||||
import cheroot.server
|
import cheroot.server
|
||||||
|
|
||||||
|
|
||||||
|
IS_PY36 = sys.version_info[:2] == (3, 6)
|
||||||
IS_SLOW_ENV = IS_MACOS or IS_WINDOWS
|
IS_SLOW_ENV = IS_MACOS or IS_WINDOWS
|
||||||
|
|
||||||
|
|
||||||
|
@ -53,7 +56,8 @@ class Controller(helper.Controller):
|
||||||
"'POST' != request.method %r" %
|
"'POST' != request.method %r" %
|
||||||
req.environ['REQUEST_METHOD'],
|
req.environ['REQUEST_METHOD'],
|
||||||
)
|
)
|
||||||
return "thanks for '%s'" % req.environ['wsgi.input'].read()
|
input_contents = req.environ['wsgi.input'].read().decode('utf-8')
|
||||||
|
return f"thanks for '{input_contents !s}'"
|
||||||
|
|
||||||
def custom_204(req, resp):
|
def custom_204(req, resp):
|
||||||
"""Render response with status 204."""
|
"""Render response with status 204."""
|
||||||
|
@ -605,18 +609,18 @@ def test_keepalive_conn_management(test_client):
|
||||||
pytest.param(RuntimeError, 666, True, id='RuntimeError(666)'),
|
pytest.param(RuntimeError, 666, True, id='RuntimeError(666)'),
|
||||||
pytest.param(socket.error, -1, True, id='socket.error(-1)'),
|
pytest.param(socket.error, -1, True, id='socket.error(-1)'),
|
||||||
) + (
|
) + (
|
||||||
pytest.param(
|
pytest.param(
|
||||||
ConnectionResetError, errno.ECONNRESET, False,
|
ConnectionResetError, errno.ECONNRESET, False,
|
||||||
id='ConnectionResetError(ECONNRESET)',
|
id='ConnectionResetError(ECONNRESET)',
|
||||||
),
|
),
|
||||||
pytest.param(
|
pytest.param(
|
||||||
BrokenPipeError, errno.EPIPE, False,
|
BrokenPipeError, errno.EPIPE, False,
|
||||||
id='BrokenPipeError(EPIPE)',
|
id='BrokenPipeError(EPIPE)',
|
||||||
),
|
),
|
||||||
pytest.param(
|
pytest.param(
|
||||||
BrokenPipeError, errno.ESHUTDOWN, False,
|
BrokenPipeError, errno.ESHUTDOWN, False,
|
||||||
id='BrokenPipeError(ESHUTDOWN)',
|
id='BrokenPipeError(ESHUTDOWN)',
|
||||||
),
|
),
|
||||||
),
|
),
|
||||||
)
|
)
|
||||||
def test_broken_connection_during_tcp_fin(
|
def test_broken_connection_during_tcp_fin(
|
||||||
|
@ -699,6 +703,275 @@ def test_broken_connection_during_tcp_fin(
|
||||||
assert _close_kernel_socket.exception_leaked is exception_leaks
|
assert _close_kernel_socket.exception_leaked is exception_leaks
|
||||||
|
|
||||||
|
|
||||||
|
def test_broken_connection_during_http_communication_fallback( # noqa: WPS118
|
||||||
|
monkeypatch,
|
||||||
|
test_client,
|
||||||
|
testing_server,
|
||||||
|
wsgi_server_thread,
|
||||||
|
):
|
||||||
|
"""Test that unhandled internal error cascades into shutdown."""
|
||||||
|
def _raise_connection_reset(*_args, **_kwargs):
|
||||||
|
raise ConnectionResetError(666)
|
||||||
|
|
||||||
|
def _read_request_line(self):
|
||||||
|
monkeypatch.setattr(self.conn.rfile, 'close', _raise_connection_reset)
|
||||||
|
monkeypatch.setattr(self.conn.wfile, 'write', _raise_connection_reset)
|
||||||
|
_raise_connection_reset()
|
||||||
|
|
||||||
|
monkeypatch.setattr(
|
||||||
|
test_client.server_instance.ConnectionClass.RequestHandlerClass,
|
||||||
|
'read_request_line',
|
||||||
|
_read_request_line,
|
||||||
|
)
|
||||||
|
|
||||||
|
test_client.get_connection().send(b'GET / HTTP/1.1')
|
||||||
|
wsgi_server_thread.join() # no extra logs upon server termination
|
||||||
|
|
||||||
|
actual_log_entries = testing_server.error_log.calls[:]
|
||||||
|
testing_server.error_log.calls.clear() # prevent post-test assertions
|
||||||
|
|
||||||
|
expected_log_entries = (
|
||||||
|
(logging.WARNING, r'^socket\.error 666$'),
|
||||||
|
(
|
||||||
|
logging.INFO,
|
||||||
|
'^Got a connection error while handling a connection '
|
||||||
|
r'from .*:\d{1,5} \(666\)',
|
||||||
|
),
|
||||||
|
(
|
||||||
|
logging.CRITICAL,
|
||||||
|
r'A fatal exception happened\. Setting the server interrupt flag '
|
||||||
|
r'to ConnectionResetError\(666,?\) and giving up\.\n\nPlease, '
|
||||||
|
'report this on the Cheroot tracker at '
|
||||||
|
r'<https://github\.com/cherrypy/cheroot/issues/new/choose>, '
|
||||||
|
'providing a full reproducer with as much context and details '
|
||||||
|
r'as possible\.$',
|
||||||
|
),
|
||||||
|
)
|
||||||
|
|
||||||
|
assert len(actual_log_entries) == len(expected_log_entries)
|
||||||
|
|
||||||
|
for ( # noqa: WPS352
|
||||||
|
(expected_log_level, expected_msg_regex),
|
||||||
|
(actual_msg, actual_log_level, _tb),
|
||||||
|
) in zip(expected_log_entries, actual_log_entries):
|
||||||
|
assert expected_log_level == actual_log_level
|
||||||
|
assert _matches_pattern(expected_msg_regex, actual_msg) is not None, (
|
||||||
|
f'{actual_msg !r} does not match {expected_msg_regex !r}'
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def test_kb_int_from_http_handler(
|
||||||
|
test_client,
|
||||||
|
testing_server,
|
||||||
|
wsgi_server_thread,
|
||||||
|
):
|
||||||
|
"""Test that a keyboard interrupt from HTTP handler causes shutdown."""
|
||||||
|
def _trigger_kb_intr(_req, _resp):
|
||||||
|
raise KeyboardInterrupt('simulated test handler keyboard interrupt')
|
||||||
|
testing_server.wsgi_app.handlers['/kb_intr'] = _trigger_kb_intr
|
||||||
|
|
||||||
|
http_conn = test_client.get_connection()
|
||||||
|
http_conn.putrequest('GET', '/kb_intr', skip_host=True)
|
||||||
|
http_conn.putheader('Host', http_conn.host)
|
||||||
|
http_conn.endheaders()
|
||||||
|
wsgi_server_thread.join() # no extra logs upon server termination
|
||||||
|
|
||||||
|
actual_log_entries = testing_server.error_log.calls[:]
|
||||||
|
testing_server.error_log.calls.clear() # prevent post-test assertions
|
||||||
|
|
||||||
|
expected_log_entries = (
|
||||||
|
(
|
||||||
|
logging.DEBUG,
|
||||||
|
'^Got a server shutdown request while handling a connection '
|
||||||
|
r'from .*:\d{1,5} \(simulated test handler keyboard interrupt\)$',
|
||||||
|
),
|
||||||
|
(
|
||||||
|
logging.DEBUG,
|
||||||
|
'^Setting the server interrupt flag to KeyboardInterrupt'
|
||||||
|
r"\('simulated test handler keyboard interrupt',?\)$",
|
||||||
|
),
|
||||||
|
(
|
||||||
|
logging.INFO,
|
||||||
|
'^Keyboard Interrupt: shutting down$',
|
||||||
|
),
|
||||||
|
)
|
||||||
|
|
||||||
|
assert len(actual_log_entries) == len(expected_log_entries)
|
||||||
|
|
||||||
|
for ( # noqa: WPS352
|
||||||
|
(expected_log_level, expected_msg_regex),
|
||||||
|
(actual_msg, actual_log_level, _tb),
|
||||||
|
) in zip(expected_log_entries, actual_log_entries):
|
||||||
|
assert expected_log_level == actual_log_level
|
||||||
|
assert _matches_pattern(expected_msg_regex, actual_msg) is not None, (
|
||||||
|
f'{actual_msg !r} does not match {expected_msg_regex !r}'
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.xfail(
|
||||||
|
IS_CI and IS_PYPY and IS_PY36 and not IS_SLOW_ENV,
|
||||||
|
reason='Fails under PyPy 3.6 under Ubuntu 20.04 in CI for unknown reason',
|
||||||
|
# NOTE: Actually covers any Linux
|
||||||
|
strict=False,
|
||||||
|
)
|
||||||
|
def test_unhandled_exception_in_request_handler(
|
||||||
|
mocker,
|
||||||
|
monkeypatch,
|
||||||
|
test_client,
|
||||||
|
testing_server,
|
||||||
|
wsgi_server_thread,
|
||||||
|
):
|
||||||
|
"""Ensure worker threads are resilient to in-handler exceptions."""
|
||||||
|
|
||||||
|
class SillyMistake(BaseException): # noqa: WPS418, WPS431
|
||||||
|
"""A simulated crash within an HTTP handler."""
|
||||||
|
|
||||||
|
def _trigger_scary_exc(_req, _resp):
|
||||||
|
raise SillyMistake('simulated unhandled exception 💣 in test handler')
|
||||||
|
|
||||||
|
testing_server.wsgi_app.handlers['/scary_exc'] = _trigger_scary_exc
|
||||||
|
|
||||||
|
server_connection_close_spy = mocker.spy(
|
||||||
|
test_client.server_instance.ConnectionClass,
|
||||||
|
'close',
|
||||||
|
)
|
||||||
|
|
||||||
|
http_conn = test_client.get_connection()
|
||||||
|
http_conn.putrequest('GET', '/scary_exc', skip_host=True)
|
||||||
|
http_conn.putheader('Host', http_conn.host)
|
||||||
|
http_conn.endheaders()
|
||||||
|
|
||||||
|
# NOTE: This spy ensure the log entry gets recorded before we're testing
|
||||||
|
# NOTE: them and before server shutdown, preserving their order and making
|
||||||
|
# NOTE: the log entry presence non-flaky.
|
||||||
|
while not server_connection_close_spy.called: # noqa: WPS328
|
||||||
|
pass
|
||||||
|
|
||||||
|
assert len(testing_server.requests._threads) == 10
|
||||||
|
while testing_server.requests.idle < 10: # noqa: WPS328
|
||||||
|
pass
|
||||||
|
assert len(testing_server.requests._threads) == 10
|
||||||
|
testing_server.interrupt = SystemExit('test requesting shutdown')
|
||||||
|
assert not testing_server.requests._threads
|
||||||
|
wsgi_server_thread.join() # no extra logs upon server termination
|
||||||
|
|
||||||
|
actual_log_entries = testing_server.error_log.calls[:]
|
||||||
|
testing_server.error_log.calls.clear() # prevent post-test assertions
|
||||||
|
|
||||||
|
expected_log_entries = (
|
||||||
|
(
|
||||||
|
logging.ERROR,
|
||||||
|
'^Unhandled error while processing an incoming connection '
|
||||||
|
'SillyMistake'
|
||||||
|
r"\('simulated unhandled exception 💣 in test handler',?\)$",
|
||||||
|
),
|
||||||
|
(
|
||||||
|
logging.INFO,
|
||||||
|
'^SystemExit raised: shutting down$',
|
||||||
|
),
|
||||||
|
)
|
||||||
|
|
||||||
|
assert len(actual_log_entries) == len(expected_log_entries)
|
||||||
|
|
||||||
|
for ( # noqa: WPS352
|
||||||
|
(expected_log_level, expected_msg_regex),
|
||||||
|
(actual_msg, actual_log_level, _tb),
|
||||||
|
) in zip(expected_log_entries, actual_log_entries):
|
||||||
|
assert expected_log_level == actual_log_level
|
||||||
|
assert _matches_pattern(expected_msg_regex, actual_msg) is not None, (
|
||||||
|
f'{actual_msg !r} does not match {expected_msg_regex !r}'
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.xfail(
|
||||||
|
IS_CI and IS_PYPY and IS_PY36 and not IS_SLOW_ENV,
|
||||||
|
reason='Fails under PyPy 3.6 under Ubuntu 20.04 in CI for unknown reason',
|
||||||
|
# NOTE: Actually covers any Linux
|
||||||
|
strict=False,
|
||||||
|
)
|
||||||
|
def test_remains_alive_post_unhandled_exception(
|
||||||
|
mocker,
|
||||||
|
monkeypatch,
|
||||||
|
test_client,
|
||||||
|
testing_server,
|
||||||
|
wsgi_server_thread,
|
||||||
|
):
|
||||||
|
"""Ensure worker threads are resilient to unhandled exceptions."""
|
||||||
|
|
||||||
|
class ScaryCrash(BaseException): # noqa: WPS418, WPS431
|
||||||
|
"""A simulated crash during HTTP parsing."""
|
||||||
|
|
||||||
|
_orig_read_request_line = (
|
||||||
|
test_client.server_instance.
|
||||||
|
ConnectionClass.RequestHandlerClass.
|
||||||
|
read_request_line
|
||||||
|
)
|
||||||
|
|
||||||
|
def _read_request_line(self):
|
||||||
|
_orig_read_request_line(self)
|
||||||
|
raise ScaryCrash(666)
|
||||||
|
|
||||||
|
monkeypatch.setattr(
|
||||||
|
test_client.server_instance.ConnectionClass.RequestHandlerClass,
|
||||||
|
'read_request_line',
|
||||||
|
_read_request_line,
|
||||||
|
)
|
||||||
|
|
||||||
|
server_connection_close_spy = mocker.spy(
|
||||||
|
test_client.server_instance.ConnectionClass,
|
||||||
|
'close',
|
||||||
|
)
|
||||||
|
|
||||||
|
# NOTE: The initial worker thread count is 10.
|
||||||
|
assert len(testing_server.requests._threads) == 10
|
||||||
|
|
||||||
|
test_client.get_connection().send(b'GET / HTTP/1.1')
|
||||||
|
|
||||||
|
# NOTE: This spy ensure the log entry gets recorded before we're testing
|
||||||
|
# NOTE: them and before server shutdown, preserving their order and making
|
||||||
|
# NOTE: the log entry presence non-flaky.
|
||||||
|
while not server_connection_close_spy.called: # noqa: WPS328
|
||||||
|
pass
|
||||||
|
|
||||||
|
# NOTE: This checks for whether there's any crashed threads
|
||||||
|
while testing_server.requests.idle < 10: # noqa: WPS328
|
||||||
|
pass
|
||||||
|
assert len(testing_server.requests._threads) == 10
|
||||||
|
assert all(
|
||||||
|
worker_thread.is_alive()
|
||||||
|
for worker_thread in testing_server.requests._threads
|
||||||
|
)
|
||||||
|
testing_server.interrupt = SystemExit('test requesting shutdown')
|
||||||
|
assert not testing_server.requests._threads
|
||||||
|
wsgi_server_thread.join() # no extra logs upon server termination
|
||||||
|
|
||||||
|
actual_log_entries = testing_server.error_log.calls[:]
|
||||||
|
testing_server.error_log.calls.clear() # prevent post-test assertions
|
||||||
|
|
||||||
|
expected_log_entries = (
|
||||||
|
(
|
||||||
|
logging.ERROR,
|
||||||
|
'^Unhandled error while processing an incoming connection '
|
||||||
|
r'ScaryCrash\(666,?\)$',
|
||||||
|
),
|
||||||
|
(
|
||||||
|
logging.INFO,
|
||||||
|
'^SystemExit raised: shutting down$',
|
||||||
|
),
|
||||||
|
)
|
||||||
|
|
||||||
|
assert len(actual_log_entries) == len(expected_log_entries)
|
||||||
|
|
||||||
|
for ( # noqa: WPS352
|
||||||
|
(expected_log_level, expected_msg_regex),
|
||||||
|
(actual_msg, actual_log_level, _tb),
|
||||||
|
) in zip(expected_log_entries, actual_log_entries):
|
||||||
|
assert expected_log_level == actual_log_level
|
||||||
|
assert _matches_pattern(expected_msg_regex, actual_msg) is not None, (
|
||||||
|
f'{actual_msg !r} does not match {expected_msg_regex !r}'
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.parametrize(
|
@pytest.mark.parametrize(
|
||||||
'timeout_before_headers',
|
'timeout_before_headers',
|
||||||
(
|
(
|
||||||
|
@ -917,7 +1190,7 @@ def test_100_Continue(test_client):
|
||||||
status_line, _actual_headers, actual_resp_body = webtest.shb(response)
|
status_line, _actual_headers, actual_resp_body = webtest.shb(response)
|
||||||
actual_status = int(status_line[:3])
|
actual_status = int(status_line[:3])
|
||||||
assert actual_status == 200
|
assert actual_status == 200
|
||||||
expected_resp_body = ("thanks for '%s'" % body).encode()
|
expected_resp_body = f"thanks for '{body.decode() !s}'".encode()
|
||||||
assert actual_resp_body == expected_resp_body
|
assert actual_resp_body == expected_resp_body
|
||||||
conn.close()
|
conn.close()
|
||||||
|
|
||||||
|
@ -987,7 +1260,7 @@ def test_readall_or_close(test_client, max_request_body_size):
|
||||||
status_line, actual_headers, actual_resp_body = webtest.shb(response)
|
status_line, actual_headers, actual_resp_body = webtest.shb(response)
|
||||||
actual_status = int(status_line[:3])
|
actual_status = int(status_line[:3])
|
||||||
assert actual_status == 200
|
assert actual_status == 200
|
||||||
expected_resp_body = ("thanks for '%s'" % body).encode()
|
expected_resp_body = f"thanks for '{body.decode() !s}'".encode()
|
||||||
assert actual_resp_body == expected_resp_body
|
assert actual_resp_body == expected_resp_body
|
||||||
conn.close()
|
conn.close()
|
||||||
|
|
||||||
|
|
|
@ -134,7 +134,7 @@ def test_query_string_request(test_client):
|
||||||
'/hello', # plain
|
'/hello', # plain
|
||||||
'/query_string?test=True', # query
|
'/query_string?test=True', # query
|
||||||
'/{0}?{1}={2}'.format( # quoted unicode
|
'/{0}?{1}={2}'.format( # quoted unicode
|
||||||
*map(urllib.parse.quote, ('Юххууу', 'ї', 'йо'))
|
*map(urllib.parse.quote, ('Юххууу', 'ї', 'йо')),
|
||||||
),
|
),
|
||||||
),
|
),
|
||||||
)
|
)
|
||||||
|
|
|
@ -31,7 +31,7 @@ config = {
|
||||||
|
|
||||||
|
|
||||||
@contextmanager
|
@contextmanager
|
||||||
def cheroot_server(server_factory):
|
def cheroot_server(server_factory): # noqa: WPS210
|
||||||
"""Set up and tear down a Cheroot server instance."""
|
"""Set up and tear down a Cheroot server instance."""
|
||||||
conf = config[server_factory].copy()
|
conf = config[server_factory].copy()
|
||||||
bind_port = conf.pop('bind_addr')[-1]
|
bind_port = conf.pop('bind_addr')[-1]
|
||||||
|
@ -41,7 +41,7 @@ def cheroot_server(server_factory):
|
||||||
actual_bind_addr = (interface, bind_port)
|
actual_bind_addr = (interface, bind_port)
|
||||||
httpserver = server_factory( # create it
|
httpserver = server_factory( # create it
|
||||||
bind_addr=actual_bind_addr,
|
bind_addr=actual_bind_addr,
|
||||||
**conf
|
**conf,
|
||||||
)
|
)
|
||||||
except OSError:
|
except OSError:
|
||||||
pass
|
pass
|
||||||
|
@ -50,27 +50,52 @@ def cheroot_server(server_factory):
|
||||||
|
|
||||||
httpserver.shutdown_timeout = 0 # Speed-up tests teardown
|
httpserver.shutdown_timeout = 0 # Speed-up tests teardown
|
||||||
|
|
||||||
threading.Thread(target=httpserver.safe_start).start() # spawn it
|
# FIXME: Expose this thread through a fixture so that it
|
||||||
|
# FIXME: could be awaited in tests.
|
||||||
|
server_thread = threading.Thread(target=httpserver.safe_start)
|
||||||
|
server_thread.start() # spawn it
|
||||||
while not httpserver.ready: # wait until fully initialized and bound
|
while not httpserver.ready: # wait until fully initialized and bound
|
||||||
time.sleep(0.1)
|
time.sleep(0.1)
|
||||||
|
|
||||||
yield httpserver
|
try:
|
||||||
|
yield server_thread, httpserver
|
||||||
httpserver.stop() # destroy it
|
finally:
|
||||||
|
httpserver.stop() # destroy it
|
||||||
|
server_thread.join() # wait for the thread to be turn down
|
||||||
|
|
||||||
|
|
||||||
@pytest.fixture
|
@pytest.fixture
|
||||||
def wsgi_server():
|
def thread_and_wsgi_server():
|
||||||
|
"""Set up and tear down a Cheroot WSGI server instance.
|
||||||
|
|
||||||
|
This emits a tuple of a thread and a server instance.
|
||||||
|
"""
|
||||||
|
with cheroot_server(cheroot.wsgi.Server) as (server_thread, srv):
|
||||||
|
yield server_thread, srv
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture
|
||||||
|
def thread_and_native_server():
|
||||||
|
"""Set up and tear down a Cheroot HTTP server instance.
|
||||||
|
|
||||||
|
This emits a tuple of a thread and a server instance.
|
||||||
|
"""
|
||||||
|
with cheroot_server(cheroot.server.HTTPServer) as (server_thread, srv):
|
||||||
|
yield server_thread, srv
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture
|
||||||
|
def wsgi_server(thread_and_wsgi_server): # noqa: WPS442
|
||||||
"""Set up and tear down a Cheroot WSGI server instance."""
|
"""Set up and tear down a Cheroot WSGI server instance."""
|
||||||
with cheroot_server(cheroot.wsgi.Server) as srv:
|
_server_thread, srv = thread_and_wsgi_server
|
||||||
yield srv
|
return srv
|
||||||
|
|
||||||
|
|
||||||
@pytest.fixture
|
@pytest.fixture
|
||||||
def native_server():
|
def native_server(thread_and_native_server): # noqa: WPS442
|
||||||
"""Set up and tear down a Cheroot HTTP server instance."""
|
"""Set up and tear down a Cheroot HTTP server instance."""
|
||||||
with cheroot_server(cheroot.server.HTTPServer) as srv:
|
_server_thread, srv = thread_and_native_server
|
||||||
yield srv
|
return srv
|
||||||
|
|
||||||
|
|
||||||
class _TestClient:
|
class _TestClient:
|
||||||
|
|
|
@ -6,6 +6,7 @@
|
||||||
"""
|
"""
|
||||||
|
|
||||||
import collections
|
import collections
|
||||||
|
import logging
|
||||||
import threading
|
import threading
|
||||||
import time
|
import time
|
||||||
import socket
|
import socket
|
||||||
|
@ -30,7 +31,7 @@ class TrueyZero:
|
||||||
|
|
||||||
trueyzero = TrueyZero()
|
trueyzero = TrueyZero()
|
||||||
|
|
||||||
_SHUTDOWNREQUEST = None
|
_SHUTDOWNREQUEST = object()
|
||||||
|
|
||||||
|
|
||||||
class WorkerThread(threading.Thread):
|
class WorkerThread(threading.Thread):
|
||||||
|
@ -99,39 +100,127 @@ class WorkerThread(threading.Thread):
|
||||||
threading.Thread.__init__(self)
|
threading.Thread.__init__(self)
|
||||||
|
|
||||||
def run(self):
|
def run(self):
|
||||||
"""Process incoming HTTP connections.
|
"""Set up incoming HTTP connection processing loop.
|
||||||
|
|
||||||
Retrieves incoming connections from thread pool.
|
This is the thread's entry-point. It performs lop-layer
|
||||||
|
exception handling and interrupt processing.
|
||||||
|
:exc:`KeyboardInterrupt` and :exc:`SystemExit` bubbling up
|
||||||
|
from the inner-layer code constitute a global server interrupt
|
||||||
|
request. When they happen, the worker thread exits.
|
||||||
|
|
||||||
|
:raises BaseException: when an unexpected non-interrupt
|
||||||
|
exception leaks from the inner layers
|
||||||
|
|
||||||
|
# noqa: DAR401 KeyboardInterrupt SystemExit
|
||||||
"""
|
"""
|
||||||
self.server.stats['Worker Threads'][self.name] = self.stats
|
self.server.stats['Worker Threads'][self.name] = self.stats
|
||||||
|
self.ready = True
|
||||||
try:
|
try:
|
||||||
self.ready = True
|
self._process_connections_until_interrupted()
|
||||||
while True:
|
except (KeyboardInterrupt, SystemExit) as interrupt_exc:
|
||||||
conn = self.server.requests.get()
|
interrupt_cause = interrupt_exc.__cause__ or interrupt_exc
|
||||||
if conn is _SHUTDOWNREQUEST:
|
self.server.error_log(
|
||||||
return
|
f'Setting the server interrupt flag to {interrupt_cause !r}',
|
||||||
|
level=logging.DEBUG,
|
||||||
|
)
|
||||||
|
self.server.interrupt = interrupt_cause
|
||||||
|
except BaseException as underlying_exc: # noqa: WPS424
|
||||||
|
# NOTE: This is the last resort logging with the last dying breath
|
||||||
|
# NOTE: of the worker. It is only reachable when exceptions happen
|
||||||
|
# NOTE: in the `finally` branch of the internal try/except block.
|
||||||
|
self.server.error_log(
|
||||||
|
'A fatal exception happened. Setting the server interrupt flag'
|
||||||
|
f' to {underlying_exc !r} and giving up.'
|
||||||
|
'\N{NEW LINE}\N{NEW LINE}'
|
||||||
|
'Please, report this on the Cheroot tracker at '
|
||||||
|
'<https://github.com/cherrypy/cheroot/issues/new/choose>, '
|
||||||
|
'providing a full reproducer with as much context and details as possible.',
|
||||||
|
level=logging.CRITICAL,
|
||||||
|
traceback=True,
|
||||||
|
)
|
||||||
|
self.server.interrupt = underlying_exc
|
||||||
|
raise
|
||||||
|
finally:
|
||||||
|
self.ready = False
|
||||||
|
|
||||||
self.conn = conn
|
def _process_connections_until_interrupted(self):
|
||||||
is_stats_enabled = self.server.stats['Enabled']
|
"""Process incoming HTTP connections in an infinite loop.
|
||||||
|
|
||||||
|
Retrieves incoming connections from thread pool, processing
|
||||||
|
them one by one.
|
||||||
|
|
||||||
|
:raises SystemExit: on the internal requests to stop the
|
||||||
|
server instance
|
||||||
|
"""
|
||||||
|
while True:
|
||||||
|
conn = self.server.requests.get()
|
||||||
|
if conn is _SHUTDOWNREQUEST:
|
||||||
|
return
|
||||||
|
|
||||||
|
self.conn = conn
|
||||||
|
is_stats_enabled = self.server.stats['Enabled']
|
||||||
|
if is_stats_enabled:
|
||||||
|
self.start_time = time.time()
|
||||||
|
keep_conn_open = False
|
||||||
|
try:
|
||||||
|
keep_conn_open = conn.communicate()
|
||||||
|
except ConnectionError as connection_error:
|
||||||
|
keep_conn_open = False # Drop the connection cleanly
|
||||||
|
self.server.error_log(
|
||||||
|
'Got a connection error while handling a '
|
||||||
|
f'connection from {conn.remote_addr !s}:'
|
||||||
|
f'{conn.remote_port !s} ({connection_error !s})',
|
||||||
|
level=logging.INFO,
|
||||||
|
)
|
||||||
|
continue
|
||||||
|
except (KeyboardInterrupt, SystemExit) as shutdown_request:
|
||||||
|
# Shutdown request
|
||||||
|
keep_conn_open = False # Drop the connection cleanly
|
||||||
|
self.server.error_log(
|
||||||
|
'Got a server shutdown request while handling a '
|
||||||
|
f'connection from {conn.remote_addr !s}:'
|
||||||
|
f'{conn.remote_port !s} ({shutdown_request !s})',
|
||||||
|
level=logging.DEBUG,
|
||||||
|
)
|
||||||
|
raise SystemExit(
|
||||||
|
str(shutdown_request),
|
||||||
|
) from shutdown_request
|
||||||
|
except BaseException as unhandled_error: # noqa: WPS424
|
||||||
|
# NOTE: Only a shutdown request should bubble up to the
|
||||||
|
# NOTE: external cleanup code. Otherwise, this thread dies.
|
||||||
|
# NOTE: If this were to happen, the threadpool would still
|
||||||
|
# NOTE: list a dead thread without knowing its state. And
|
||||||
|
# NOTE: the calling code would fail to schedule processing
|
||||||
|
# NOTE: of new requests.
|
||||||
|
self.server.error_log(
|
||||||
|
'Unhandled error while processing an incoming '
|
||||||
|
f'connection {unhandled_error !r}',
|
||||||
|
level=logging.ERROR,
|
||||||
|
traceback=True,
|
||||||
|
)
|
||||||
|
continue # Prevent the thread from dying
|
||||||
|
finally:
|
||||||
|
# NOTE: Any exceptions coming from within `finally` may
|
||||||
|
# NOTE: kill the thread, causing the threadpool to only
|
||||||
|
# NOTE: contain references to dead threads rendering the
|
||||||
|
# NOTE: server defunct, effectively meaning a DoS.
|
||||||
|
# NOTE: Ideally, things called here should process
|
||||||
|
# NOTE: everything recoverable internally. Any unhandled
|
||||||
|
# NOTE: errors will bubble up into the outer try/except
|
||||||
|
# NOTE: block. They will be treated as fatal and turned
|
||||||
|
# NOTE: into server shutdown requests and then reraised
|
||||||
|
# NOTE: unconditionally.
|
||||||
|
if keep_conn_open:
|
||||||
|
self.server.put_conn(conn)
|
||||||
|
else:
|
||||||
|
conn.close()
|
||||||
if is_stats_enabled:
|
if is_stats_enabled:
|
||||||
self.start_time = time.time()
|
self.requests_seen += conn.requests_seen
|
||||||
keep_conn_open = False
|
self.bytes_read += conn.rfile.bytes_read
|
||||||
try:
|
self.bytes_written += conn.wfile.bytes_written
|
||||||
keep_conn_open = conn.communicate()
|
self.work_time += time.time() - self.start_time
|
||||||
finally:
|
self.start_time = None
|
||||||
if keep_conn_open:
|
self.conn = None
|
||||||
self.server.put_conn(conn)
|
|
||||||
else:
|
|
||||||
conn.close()
|
|
||||||
if is_stats_enabled:
|
|
||||||
self.requests_seen += self.conn.requests_seen
|
|
||||||
self.bytes_read += self.conn.rfile.bytes_read
|
|
||||||
self.bytes_written += self.conn.wfile.bytes_written
|
|
||||||
self.work_time += time.time() - self.start_time
|
|
||||||
self.start_time = None
|
|
||||||
self.conn = None
|
|
||||||
except (KeyboardInterrupt, SystemExit) as ex:
|
|
||||||
self.server.interrupt = ex
|
|
||||||
|
|
||||||
|
|
||||||
class ThreadPool:
|
class ThreadPool:
|
||||||
|
|
|
@ -57,9 +57,11 @@ These API's are described in the `CherryPy specification
|
||||||
"""
|
"""
|
||||||
|
|
||||||
try:
|
try:
|
||||||
import pkg_resources
|
import importlib.metadata as importlib_metadata
|
||||||
except ImportError:
|
except ImportError:
|
||||||
pass
|
# fall back for python <= 3.7
|
||||||
|
# This try/except can be removed with py <= 3.7 support
|
||||||
|
import importlib_metadata
|
||||||
|
|
||||||
from threading import local as _local
|
from threading import local as _local
|
||||||
|
|
||||||
|
@ -109,7 +111,7 @@ tree = _cptree.Tree()
|
||||||
|
|
||||||
|
|
||||||
try:
|
try:
|
||||||
__version__ = pkg_resources.require('cherrypy')[0].version
|
__version__ = importlib_metadata.version('cherrypy')
|
||||||
except Exception:
|
except Exception:
|
||||||
__version__ = 'unknown'
|
__version__ = 'unknown'
|
||||||
|
|
||||||
|
@ -181,24 +183,28 @@ def quickstart(root=None, script_name='', config=None):
|
||||||
class _Serving(_local):
|
class _Serving(_local):
|
||||||
"""An interface for registering request and response objects.
|
"""An interface for registering request and response objects.
|
||||||
|
|
||||||
Rather than have a separate "thread local" object for the request and
|
Rather than have a separate "thread local" object for the request
|
||||||
the response, this class works as a single threadlocal container for
|
and the response, this class works as a single threadlocal container
|
||||||
both objects (and any others which developers wish to define). In this
|
for both objects (and any others which developers wish to define).
|
||||||
way, we can easily dump those objects when we stop/start a new HTTP
|
In this way, we can easily dump those objects when we stop/start a
|
||||||
conversation, yet still refer to them as module-level globals in a
|
new HTTP conversation, yet still refer to them as module-level
|
||||||
thread-safe way.
|
globals in a thread-safe way.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
request = _cprequest.Request(_httputil.Host('127.0.0.1', 80),
|
request = _cprequest.Request(_httputil.Host('127.0.0.1', 80),
|
||||||
_httputil.Host('127.0.0.1', 1111))
|
_httputil.Host('127.0.0.1', 1111))
|
||||||
|
"""The request object for the current thread.
|
||||||
|
|
||||||
|
In the main thread, and any threads which are not receiving HTTP
|
||||||
|
requests, this is None.
|
||||||
"""
|
"""
|
||||||
The request object for the current thread. In the main thread,
|
|
||||||
and any threads which are not receiving HTTP requests, this is None."""
|
|
||||||
|
|
||||||
response = _cprequest.Response()
|
response = _cprequest.Response()
|
||||||
|
"""The response object for the current thread.
|
||||||
|
|
||||||
|
In the main thread, and any threads which are not receiving HTTP
|
||||||
|
requests, this is None.
|
||||||
"""
|
"""
|
||||||
The response object for the current thread. In the main thread,
|
|
||||||
and any threads which are not receiving HTTP requests, this is None."""
|
|
||||||
|
|
||||||
def load(self, request, response):
|
def load(self, request, response):
|
||||||
self.request = request
|
self.request = request
|
||||||
|
@ -316,8 +322,8 @@ class _GlobalLogManager(_cplogging.LogManager):
|
||||||
def __call__(self, *args, **kwargs):
|
def __call__(self, *args, **kwargs):
|
||||||
"""Log the given message to the app.log or global log.
|
"""Log the given message to the app.log or global log.
|
||||||
|
|
||||||
Log the given message to the app.log or global
|
Log the given message to the app.log or global log as
|
||||||
log as appropriate.
|
appropriate.
|
||||||
"""
|
"""
|
||||||
# Do NOT use try/except here. See
|
# Do NOT use try/except here. See
|
||||||
# https://github.com/cherrypy/cherrypy/issues/945
|
# https://github.com/cherrypy/cherrypy/issues/945
|
||||||
|
@ -330,8 +336,8 @@ class _GlobalLogManager(_cplogging.LogManager):
|
||||||
def access(self):
|
def access(self):
|
||||||
"""Log an access message to the app.log or global log.
|
"""Log an access message to the app.log or global log.
|
||||||
|
|
||||||
Log the given message to the app.log or global
|
Log the given message to the app.log or global log as
|
||||||
log as appropriate.
|
appropriate.
|
||||||
"""
|
"""
|
||||||
try:
|
try:
|
||||||
return request.app.log.access()
|
return request.app.log.access()
|
||||||
|
|
|
@ -313,7 +313,10 @@ class Checker(object):
|
||||||
|
|
||||||
# -------------------- Specific config warnings -------------------- #
|
# -------------------- Specific config warnings -------------------- #
|
||||||
def check_localhost(self):
|
def check_localhost(self):
|
||||||
"""Warn if any socket_host is 'localhost'. See #711."""
|
"""Warn if any socket_host is 'localhost'.
|
||||||
|
|
||||||
|
See #711.
|
||||||
|
"""
|
||||||
for k, v in cherrypy.config.items():
|
for k, v in cherrypy.config.items():
|
||||||
if k == 'server.socket_host' and v == 'localhost':
|
if k == 'server.socket_host' and v == 'localhost':
|
||||||
warnings.warn("The use of 'localhost' as a socket host can "
|
warnings.warn("The use of 'localhost' as a socket host can "
|
||||||
|
|
|
@ -1,5 +1,4 @@
|
||||||
"""
|
"""Configuration system for CherryPy.
|
||||||
Configuration system for CherryPy.
|
|
||||||
|
|
||||||
Configuration in CherryPy is implemented via dictionaries. Keys are strings
|
Configuration in CherryPy is implemented via dictionaries. Keys are strings
|
||||||
which name the mapped value, which may be of any type.
|
which name the mapped value, which may be of any type.
|
||||||
|
@ -132,8 +131,8 @@ def _if_filename_register_autoreload(ob):
|
||||||
def merge(base, other):
|
def merge(base, other):
|
||||||
"""Merge one app config (from a dict, file, or filename) into another.
|
"""Merge one app config (from a dict, file, or filename) into another.
|
||||||
|
|
||||||
If the given config is a filename, it will be appended to
|
If the given config is a filename, it will be appended to the list
|
||||||
the list of files to monitor for "autoreload" changes.
|
of files to monitor for "autoreload" changes.
|
||||||
"""
|
"""
|
||||||
_if_filename_register_autoreload(other)
|
_if_filename_register_autoreload(other)
|
||||||
|
|
||||||
|
|
|
@ -1,9 +1,10 @@
|
||||||
"""CherryPy dispatchers.
|
"""CherryPy dispatchers.
|
||||||
|
|
||||||
A 'dispatcher' is the object which looks up the 'page handler' callable
|
A 'dispatcher' is the object which looks up the 'page handler' callable
|
||||||
and collects config for the current request based on the path_info, other
|
and collects config for the current request based on the path_info,
|
||||||
request attributes, and the application architecture. The core calls the
|
other request attributes, and the application architecture. The core
|
||||||
dispatcher as early as possible, passing it a 'path_info' argument.
|
calls the dispatcher as early as possible, passing it a 'path_info'
|
||||||
|
argument.
|
||||||
|
|
||||||
The default dispatcher discovers the page handler by matching path_info
|
The default dispatcher discovers the page handler by matching path_info
|
||||||
to a hierarchical arrangement of objects, starting at request.app.root.
|
to a hierarchical arrangement of objects, starting at request.app.root.
|
||||||
|
@ -21,7 +22,6 @@ import cherrypy
|
||||||
|
|
||||||
|
|
||||||
class PageHandler(object):
|
class PageHandler(object):
|
||||||
|
|
||||||
"""Callable which sets response.body."""
|
"""Callable which sets response.body."""
|
||||||
|
|
||||||
def __init__(self, callable, *args, **kwargs):
|
def __init__(self, callable, *args, **kwargs):
|
||||||
|
@ -64,8 +64,7 @@ class PageHandler(object):
|
||||||
|
|
||||||
|
|
||||||
def test_callable_spec(callable, callable_args, callable_kwargs):
|
def test_callable_spec(callable, callable_args, callable_kwargs):
|
||||||
"""
|
"""Inspect callable and test to see if the given args are suitable for it.
|
||||||
Inspect callable and test to see if the given args are suitable for it.
|
|
||||||
|
|
||||||
When an error occurs during the handler's invoking stage there are 2
|
When an error occurs during the handler's invoking stage there are 2
|
||||||
erroneous cases:
|
erroneous cases:
|
||||||
|
@ -252,16 +251,16 @@ else:
|
||||||
|
|
||||||
|
|
||||||
class Dispatcher(object):
|
class Dispatcher(object):
|
||||||
|
|
||||||
"""CherryPy Dispatcher which walks a tree of objects to find a handler.
|
"""CherryPy Dispatcher which walks a tree of objects to find a handler.
|
||||||
|
|
||||||
The tree is rooted at cherrypy.request.app.root, and each hierarchical
|
The tree is rooted at cherrypy.request.app.root, and each
|
||||||
component in the path_info argument is matched to a corresponding nested
|
hierarchical component in the path_info argument is matched to a
|
||||||
attribute of the root object. Matching handlers must have an 'exposed'
|
corresponding nested attribute of the root object. Matching handlers
|
||||||
attribute which evaluates to True. The special method name "index"
|
must have an 'exposed' attribute which evaluates to True. The
|
||||||
matches a URI which ends in a slash ("/"). The special method name
|
special method name "index" matches a URI which ends in a slash
|
||||||
"default" may match a portion of the path_info (but only when no longer
|
("/"). The special method name "default" may match a portion of the
|
||||||
substring of the path_info matches some other object).
|
path_info (but only when no longer substring of the path_info
|
||||||
|
matches some other object).
|
||||||
|
|
||||||
This is the default, built-in dispatcher for CherryPy.
|
This is the default, built-in dispatcher for CherryPy.
|
||||||
"""
|
"""
|
||||||
|
@ -306,9 +305,9 @@ class Dispatcher(object):
|
||||||
|
|
||||||
The second object returned will be a list of names which are
|
The second object returned will be a list of names which are
|
||||||
'virtual path' components: parts of the URL which are dynamic,
|
'virtual path' components: parts of the URL which are dynamic,
|
||||||
and were not used when looking up the handler.
|
and were not used when looking up the handler. These virtual
|
||||||
These virtual path components are passed to the handler as
|
path components are passed to the handler as positional
|
||||||
positional arguments.
|
arguments.
|
||||||
"""
|
"""
|
||||||
request = cherrypy.serving.request
|
request = cherrypy.serving.request
|
||||||
app = request.app
|
app = request.app
|
||||||
|
@ -448,13 +447,11 @@ class Dispatcher(object):
|
||||||
|
|
||||||
|
|
||||||
class MethodDispatcher(Dispatcher):
|
class MethodDispatcher(Dispatcher):
|
||||||
|
|
||||||
"""Additional dispatch based on cherrypy.request.method.upper().
|
"""Additional dispatch based on cherrypy.request.method.upper().
|
||||||
|
|
||||||
Methods named GET, POST, etc will be called on an exposed class.
|
Methods named GET, POST, etc will be called on an exposed class. The
|
||||||
The method names must be all caps; the appropriate Allow header
|
method names must be all caps; the appropriate Allow header will be
|
||||||
will be output showing all capitalized method names as allowable
|
output showing all capitalized method names as allowable HTTP verbs.
|
||||||
HTTP verbs.
|
|
||||||
|
|
||||||
Note that the containing class must be exposed, not the methods.
|
Note that the containing class must be exposed, not the methods.
|
||||||
"""
|
"""
|
||||||
|
@ -492,16 +489,14 @@ class MethodDispatcher(Dispatcher):
|
||||||
|
|
||||||
|
|
||||||
class RoutesDispatcher(object):
|
class RoutesDispatcher(object):
|
||||||
|
|
||||||
"""A Routes based dispatcher for CherryPy."""
|
"""A Routes based dispatcher for CherryPy."""
|
||||||
|
|
||||||
def __init__(self, full_result=False, **mapper_options):
|
def __init__(self, full_result=False, **mapper_options):
|
||||||
"""
|
"""Routes dispatcher.
|
||||||
Routes dispatcher
|
|
||||||
|
|
||||||
Set full_result to True if you wish the controller
|
Set full_result to True if you wish the controller and the
|
||||||
and the action to be passed on to the page handler
|
action to be passed on to the page handler parameters. By
|
||||||
parameters. By default they won't be.
|
default they won't be.
|
||||||
"""
|
"""
|
||||||
import routes
|
import routes
|
||||||
self.full_result = full_result
|
self.full_result = full_result
|
||||||
|
@ -617,8 +612,7 @@ def XMLRPCDispatcher(next_dispatcher=Dispatcher()):
|
||||||
|
|
||||||
def VirtualHost(next_dispatcher=Dispatcher(), use_x_forwarded_host=True,
|
def VirtualHost(next_dispatcher=Dispatcher(), use_x_forwarded_host=True,
|
||||||
**domains):
|
**domains):
|
||||||
"""
|
"""Select a different handler based on the Host header.
|
||||||
Select a different handler based on the Host header.
|
|
||||||
|
|
||||||
This can be useful when running multiple sites within one CP server.
|
This can be useful when running multiple sites within one CP server.
|
||||||
It allows several domains to point to different parts of a single
|
It allows several domains to point to different parts of a single
|
||||||
|
|
|
@ -136,19 +136,17 @@ from cherrypy.lib import httputil as _httputil
|
||||||
|
|
||||||
|
|
||||||
class CherryPyException(Exception):
|
class CherryPyException(Exception):
|
||||||
|
|
||||||
"""A base class for CherryPy exceptions."""
|
"""A base class for CherryPy exceptions."""
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
|
||||||
class InternalRedirect(CherryPyException):
|
class InternalRedirect(CherryPyException):
|
||||||
|
|
||||||
"""Exception raised to switch to the handler for a different URL.
|
"""Exception raised to switch to the handler for a different URL.
|
||||||
|
|
||||||
This exception will redirect processing to another path within the site
|
This exception will redirect processing to another path within the
|
||||||
(without informing the client). Provide the new path as an argument when
|
site (without informing the client). Provide the new path as an
|
||||||
raising the exception. Provide any params in the querystring for the new
|
argument when raising the exception. Provide any params in the
|
||||||
URL.
|
querystring for the new URL.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def __init__(self, path, query_string=''):
|
def __init__(self, path, query_string=''):
|
||||||
|
@ -173,7 +171,6 @@ class InternalRedirect(CherryPyException):
|
||||||
|
|
||||||
|
|
||||||
class HTTPRedirect(CherryPyException):
|
class HTTPRedirect(CherryPyException):
|
||||||
|
|
||||||
"""Exception raised when the request should be redirected.
|
"""Exception raised when the request should be redirected.
|
||||||
|
|
||||||
This exception will force a HTTP redirect to the URL or URL's you give it.
|
This exception will force a HTTP redirect to the URL or URL's you give it.
|
||||||
|
@ -202,7 +199,7 @@ class HTTPRedirect(CherryPyException):
|
||||||
"""The list of URL's to emit."""
|
"""The list of URL's to emit."""
|
||||||
|
|
||||||
encoding = 'utf-8'
|
encoding = 'utf-8'
|
||||||
"""The encoding when passed urls are not native strings"""
|
"""The encoding when passed urls are not native strings."""
|
||||||
|
|
||||||
def __init__(self, urls, status=None, encoding=None):
|
def __init__(self, urls, status=None, encoding=None):
|
||||||
self.urls = abs_urls = [
|
self.urls = abs_urls = [
|
||||||
|
@ -230,8 +227,7 @@ class HTTPRedirect(CherryPyException):
|
||||||
|
|
||||||
@classproperty
|
@classproperty
|
||||||
def default_status(cls):
|
def default_status(cls):
|
||||||
"""
|
"""The default redirect status for the request.
|
||||||
The default redirect status for the request.
|
|
||||||
|
|
||||||
RFC 2616 indicates a 301 response code fits our goal; however,
|
RFC 2616 indicates a 301 response code fits our goal; however,
|
||||||
browser support for 301 is quite messy. Use 302/303 instead. See
|
browser support for 301 is quite messy. Use 302/303 instead. See
|
||||||
|
@ -249,8 +245,9 @@ class HTTPRedirect(CherryPyException):
|
||||||
"""Modify cherrypy.response status, headers, and body to represent
|
"""Modify cherrypy.response status, headers, and body to represent
|
||||||
self.
|
self.
|
||||||
|
|
||||||
CherryPy uses this internally, but you can also use it to create an
|
CherryPy uses this internally, but you can also use it to create
|
||||||
HTTPRedirect object and set its output without *raising* the exception.
|
an HTTPRedirect object and set its output without *raising* the
|
||||||
|
exception.
|
||||||
"""
|
"""
|
||||||
response = cherrypy.serving.response
|
response = cherrypy.serving.response
|
||||||
response.status = status = self.status
|
response.status = status = self.status
|
||||||
|
@ -339,7 +336,6 @@ def clean_headers(status):
|
||||||
|
|
||||||
|
|
||||||
class HTTPError(CherryPyException):
|
class HTTPError(CherryPyException):
|
||||||
|
|
||||||
"""Exception used to return an HTTP error code (4xx-5xx) to the client.
|
"""Exception used to return an HTTP error code (4xx-5xx) to the client.
|
||||||
|
|
||||||
This exception can be used to automatically send a response using a
|
This exception can be used to automatically send a response using a
|
||||||
|
@ -358,7 +354,9 @@ class HTTPError(CherryPyException):
|
||||||
"""
|
"""
|
||||||
|
|
||||||
status = None
|
status = None
|
||||||
"""The HTTP status code. May be of type int or str (with a Reason-Phrase).
|
"""The HTTP status code.
|
||||||
|
|
||||||
|
May be of type int or str (with a Reason-Phrase).
|
||||||
"""
|
"""
|
||||||
|
|
||||||
code = None
|
code = None
|
||||||
|
@ -386,8 +384,9 @@ class HTTPError(CherryPyException):
|
||||||
"""Modify cherrypy.response status, headers, and body to represent
|
"""Modify cherrypy.response status, headers, and body to represent
|
||||||
self.
|
self.
|
||||||
|
|
||||||
CherryPy uses this internally, but you can also use it to create an
|
CherryPy uses this internally, but you can also use it to create
|
||||||
HTTPError object and set its output without *raising* the exception.
|
an HTTPError object and set its output without *raising* the
|
||||||
|
exception.
|
||||||
"""
|
"""
|
||||||
response = cherrypy.serving.response
|
response = cherrypy.serving.response
|
||||||
|
|
||||||
|
@ -426,11 +425,10 @@ class HTTPError(CherryPyException):
|
||||||
|
|
||||||
|
|
||||||
class NotFound(HTTPError):
|
class NotFound(HTTPError):
|
||||||
|
|
||||||
"""Exception raised when a URL could not be mapped to any handler (404).
|
"""Exception raised when a URL could not be mapped to any handler (404).
|
||||||
|
|
||||||
This is equivalent to raising
|
This is equivalent to raising :class:`HTTPError("404 Not Found")
|
||||||
:class:`HTTPError("404 Not Found") <cherrypy._cperror.HTTPError>`.
|
<cherrypy._cperror.HTTPError>`.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def __init__(self, path=None):
|
def __init__(self, path=None):
|
||||||
|
@ -477,8 +475,8 @@ _HTTPErrorTemplate = '''<!DOCTYPE html PUBLIC
|
||||||
def get_error_page(status, **kwargs):
|
def get_error_page(status, **kwargs):
|
||||||
"""Return an HTML page, containing a pretty error response.
|
"""Return an HTML page, containing a pretty error response.
|
||||||
|
|
||||||
status should be an int or a str.
|
status should be an int or a str. kwargs will be interpolated into
|
||||||
kwargs will be interpolated into the page template.
|
the page template.
|
||||||
"""
|
"""
|
||||||
try:
|
try:
|
||||||
code, reason, message = _httputil.valid_status(status)
|
code, reason, message = _httputil.valid_status(status)
|
||||||
|
@ -595,8 +593,8 @@ def bare_error(extrabody=None):
|
||||||
"""Produce status, headers, body for a critical error.
|
"""Produce status, headers, body for a critical error.
|
||||||
|
|
||||||
Returns a triple without calling any other questionable functions,
|
Returns a triple without calling any other questionable functions,
|
||||||
so it should be as error-free as possible. Call it from an HTTP server
|
so it should be as error-free as possible. Call it from an HTTP
|
||||||
if you get errors outside of the request.
|
server if you get errors outside of the request.
|
||||||
|
|
||||||
If extrabody is None, a friendly but rather unhelpful error message
|
If extrabody is None, a friendly but rather unhelpful error message
|
||||||
is set in the body. If extrabody is a string, it will be appended
|
is set in the body. If extrabody is a string, it will be appended
|
||||||
|
|
|
@ -123,7 +123,6 @@ logfmt = logging.Formatter('%(message)s')
|
||||||
|
|
||||||
|
|
||||||
class NullHandler(logging.Handler):
|
class NullHandler(logging.Handler):
|
||||||
|
|
||||||
"""A no-op logging handler to silence the logging.lastResort handler."""
|
"""A no-op logging handler to silence the logging.lastResort handler."""
|
||||||
|
|
||||||
def handle(self, record):
|
def handle(self, record):
|
||||||
|
@ -137,15 +136,16 @@ class NullHandler(logging.Handler):
|
||||||
|
|
||||||
|
|
||||||
class LogManager(object):
|
class LogManager(object):
|
||||||
|
|
||||||
"""An object to assist both simple and advanced logging.
|
"""An object to assist both simple and advanced logging.
|
||||||
|
|
||||||
``cherrypy.log`` is an instance of this class.
|
``cherrypy.log`` is an instance of this class.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
appid = None
|
appid = None
|
||||||
"""The id() of the Application object which owns this log manager. If this
|
"""The id() of the Application object which owns this log manager.
|
||||||
is a global log manager, appid is None."""
|
|
||||||
|
If this is a global log manager, appid is None.
|
||||||
|
"""
|
||||||
|
|
||||||
error_log = None
|
error_log = None
|
||||||
"""The actual :class:`logging.Logger` instance for error messages."""
|
"""The actual :class:`logging.Logger` instance for error messages."""
|
||||||
|
@ -317,8 +317,8 @@ class LogManager(object):
|
||||||
def screen(self):
|
def screen(self):
|
||||||
"""Turn stderr/stdout logging on or off.
|
"""Turn stderr/stdout logging on or off.
|
||||||
|
|
||||||
If you set this to True, it'll add the appropriate StreamHandler for
|
If you set this to True, it'll add the appropriate StreamHandler
|
||||||
you. If you set it to False, it will remove the handler.
|
for you. If you set it to False, it will remove the handler.
|
||||||
"""
|
"""
|
||||||
h = self._get_builtin_handler
|
h = self._get_builtin_handler
|
||||||
has_h = h(self.error_log, 'screen') or h(self.access_log, 'screen')
|
has_h = h(self.error_log, 'screen') or h(self.access_log, 'screen')
|
||||||
|
@ -414,7 +414,6 @@ class LogManager(object):
|
||||||
|
|
||||||
|
|
||||||
class WSGIErrorHandler(logging.Handler):
|
class WSGIErrorHandler(logging.Handler):
|
||||||
|
|
||||||
"A handler class which writes logging records to environ['wsgi.errors']."
|
"A handler class which writes logging records to environ['wsgi.errors']."
|
||||||
|
|
||||||
def flush(self):
|
def flush(self):
|
||||||
|
@ -452,6 +451,8 @@ class WSGIErrorHandler(logging.Handler):
|
||||||
|
|
||||||
class LazyRfc3339UtcTime(object):
|
class LazyRfc3339UtcTime(object):
|
||||||
def __str__(self):
|
def __str__(self):
|
||||||
"""Return utcnow() in RFC3339 UTC Format."""
|
"""Return datetime in RFC3339 UTC Format."""
|
||||||
iso_formatted_now = datetime.datetime.utcnow().isoformat('T')
|
iso_formatted_now = datetime.datetime.now(
|
||||||
|
datetime.timezone.utc,
|
||||||
|
).isoformat('T')
|
||||||
return f'{iso_formatted_now!s}Z'
|
return f'{iso_formatted_now!s}Z'
|
||||||
|
|
|
@ -1,4 +1,4 @@
|
||||||
"""Native adapter for serving CherryPy via mod_python
|
"""Native adapter for serving CherryPy via mod_python.
|
||||||
|
|
||||||
Basic usage:
|
Basic usage:
|
||||||
|
|
||||||
|
|
|
@ -120,10 +120,10 @@ class NativeGateway(cheroot.server.Gateway):
|
||||||
class CPHTTPServer(cheroot.server.HTTPServer):
|
class CPHTTPServer(cheroot.server.HTTPServer):
|
||||||
"""Wrapper for cheroot.server.HTTPServer.
|
"""Wrapper for cheroot.server.HTTPServer.
|
||||||
|
|
||||||
cheroot has been designed to not reference CherryPy in any way,
|
cheroot has been designed to not reference CherryPy in any way, so
|
||||||
so that it can be used in other frameworks and applications.
|
that it can be used in other frameworks and applications. Therefore,
|
||||||
Therefore, we wrap it here, so we can apply some attributes
|
we wrap it here, so we can apply some attributes from config ->
|
||||||
from config -> cherrypy.server -> HTTPServer.
|
cherrypy.server -> HTTPServer.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def __init__(self, server_adapter=cherrypy.server):
|
def __init__(self, server_adapter=cherrypy.server):
|
||||||
|
|
|
@ -248,7 +248,10 @@ def process_multipart_form_data(entity):
|
||||||
|
|
||||||
|
|
||||||
def _old_process_multipart(entity):
|
def _old_process_multipart(entity):
|
||||||
"""The behavior of 3.2 and lower. Deprecated and will be changed in 3.3."""
|
"""The behavior of 3.2 and lower.
|
||||||
|
|
||||||
|
Deprecated and will be changed in 3.3.
|
||||||
|
"""
|
||||||
process_multipart(entity)
|
process_multipart(entity)
|
||||||
|
|
||||||
params = entity.params
|
params = entity.params
|
||||||
|
@ -277,7 +280,6 @@ def _old_process_multipart(entity):
|
||||||
|
|
||||||
# -------------------------------- Entities --------------------------------- #
|
# -------------------------------- Entities --------------------------------- #
|
||||||
class Entity(object):
|
class Entity(object):
|
||||||
|
|
||||||
"""An HTTP request body, or MIME multipart body.
|
"""An HTTP request body, or MIME multipart body.
|
||||||
|
|
||||||
This class collects information about the HTTP request entity. When a
|
This class collects information about the HTTP request entity. When a
|
||||||
|
@ -346,13 +348,15 @@ class Entity(object):
|
||||||
content_type = None
|
content_type = None
|
||||||
"""The value of the Content-Type request header.
|
"""The value of the Content-Type request header.
|
||||||
|
|
||||||
If the Entity is part of a multipart payload, this will be the Content-Type
|
If the Entity is part of a multipart payload, this will be the
|
||||||
given in the MIME headers for this part.
|
Content-Type given in the MIME headers for this part.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
default_content_type = 'application/x-www-form-urlencoded'
|
default_content_type = 'application/x-www-form-urlencoded'
|
||||||
"""This defines a default ``Content-Type`` to use if no Content-Type header
|
"""This defines a default ``Content-Type`` to use if no Content-Type header
|
||||||
is given. The empty string is used for RequestBody, which results in the
|
is given.
|
||||||
|
|
||||||
|
The empty string is used for RequestBody, which results in the
|
||||||
request body not being read or parsed at all. This is by design; a missing
|
request body not being read or parsed at all. This is by design; a missing
|
||||||
``Content-Type`` header in the HTTP request entity is an error at best,
|
``Content-Type`` header in the HTTP request entity is an error at best,
|
||||||
and a security hole at worst. For multipart parts, however, the MIME spec
|
and a security hole at worst. For multipart parts, however, the MIME spec
|
||||||
|
@ -402,8 +406,8 @@ class Entity(object):
|
||||||
part_class = None
|
part_class = None
|
||||||
"""The class used for multipart parts.
|
"""The class used for multipart parts.
|
||||||
|
|
||||||
You can replace this with custom subclasses to alter the processing of
|
You can replace this with custom subclasses to alter the processing
|
||||||
multipart parts.
|
of multipart parts.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def __init__(self, fp, headers, params=None, parts=None):
|
def __init__(self, fp, headers, params=None, parts=None):
|
||||||
|
@ -509,7 +513,8 @@ class Entity(object):
|
||||||
"""Return a file-like object into which the request body will be read.
|
"""Return a file-like object into which the request body will be read.
|
||||||
|
|
||||||
By default, this will return a TemporaryFile. Override as needed.
|
By default, this will return a TemporaryFile. Override as needed.
|
||||||
See also :attr:`cherrypy._cpreqbody.Part.maxrambytes`."""
|
See also :attr:`cherrypy._cpreqbody.Part.maxrambytes`.
|
||||||
|
"""
|
||||||
return tempfile.TemporaryFile()
|
return tempfile.TemporaryFile()
|
||||||
|
|
||||||
def fullvalue(self):
|
def fullvalue(self):
|
||||||
|
@ -525,7 +530,7 @@ class Entity(object):
|
||||||
return value
|
return value
|
||||||
|
|
||||||
def decode_entity(self, value):
|
def decode_entity(self, value):
|
||||||
"""Return a given byte encoded value as a string"""
|
"""Return a given byte encoded value as a string."""
|
||||||
for charset in self.attempt_charsets:
|
for charset in self.attempt_charsets:
|
||||||
try:
|
try:
|
||||||
value = value.decode(charset)
|
value = value.decode(charset)
|
||||||
|
@ -569,7 +574,6 @@ class Entity(object):
|
||||||
|
|
||||||
|
|
||||||
class Part(Entity):
|
class Part(Entity):
|
||||||
|
|
||||||
"""A MIME part entity, part of a multipart entity."""
|
"""A MIME part entity, part of a multipart entity."""
|
||||||
|
|
||||||
# "The default character set, which must be assumed in the absence of a
|
# "The default character set, which must be assumed in the absence of a
|
||||||
|
@ -653,8 +657,8 @@ class Part(Entity):
|
||||||
def read_lines_to_boundary(self, fp_out=None):
|
def read_lines_to_boundary(self, fp_out=None):
|
||||||
"""Read bytes from self.fp and return or write them to a file.
|
"""Read bytes from self.fp and return or write them to a file.
|
||||||
|
|
||||||
If the 'fp_out' argument is None (the default), all bytes read are
|
If the 'fp_out' argument is None (the default), all bytes read
|
||||||
returned in a single byte string.
|
are returned in a single byte string.
|
||||||
|
|
||||||
If the 'fp_out' argument is not None, it must be a file-like
|
If the 'fp_out' argument is not None, it must be a file-like
|
||||||
object that supports the 'write' method; all bytes read will be
|
object that supports the 'write' method; all bytes read will be
|
||||||
|
@ -755,15 +759,15 @@ class SizedReader:
|
||||||
def read(self, size=None, fp_out=None):
|
def read(self, size=None, fp_out=None):
|
||||||
"""Read bytes from the request body and return or write them to a file.
|
"""Read bytes from the request body and return or write them to a file.
|
||||||
|
|
||||||
A number of bytes less than or equal to the 'size' argument are read
|
A number of bytes less than or equal to the 'size' argument are
|
||||||
off the socket. The actual number of bytes read are tracked in
|
read off the socket. The actual number of bytes read are tracked
|
||||||
self.bytes_read. The number may be smaller than 'size' when 1) the
|
in self.bytes_read. The number may be smaller than 'size' when
|
||||||
client sends fewer bytes, 2) the 'Content-Length' request header
|
1) the client sends fewer bytes, 2) the 'Content-Length' request
|
||||||
specifies fewer bytes than requested, or 3) the number of bytes read
|
header specifies fewer bytes than requested, or 3) the number of
|
||||||
exceeds self.maxbytes (in which case, 413 is raised).
|
bytes read exceeds self.maxbytes (in which case, 413 is raised).
|
||||||
|
|
||||||
If the 'fp_out' argument is None (the default), all bytes read are
|
If the 'fp_out' argument is None (the default), all bytes read
|
||||||
returned in a single byte string.
|
are returned in a single byte string.
|
||||||
|
|
||||||
If the 'fp_out' argument is not None, it must be a file-like
|
If the 'fp_out' argument is not None, it must be a file-like
|
||||||
object that supports the 'write' method; all bytes read will be
|
object that supports the 'write' method; all bytes read will be
|
||||||
|
@ -918,7 +922,6 @@ class SizedReader:
|
||||||
|
|
||||||
|
|
||||||
class RequestBody(Entity):
|
class RequestBody(Entity):
|
||||||
|
|
||||||
"""The entity of the HTTP request."""
|
"""The entity of the HTTP request."""
|
||||||
|
|
||||||
bufsize = 8 * 1024
|
bufsize = 8 * 1024
|
||||||
|
|
|
@ -16,7 +16,6 @@ from cherrypy.lib import httputil, reprconf, encoding
|
||||||
|
|
||||||
|
|
||||||
class Hook(object):
|
class Hook(object):
|
||||||
|
|
||||||
"""A callback and its metadata: failsafe, priority, and kwargs."""
|
"""A callback and its metadata: failsafe, priority, and kwargs."""
|
||||||
|
|
||||||
callback = None
|
callback = None
|
||||||
|
@ -30,10 +29,12 @@ class Hook(object):
|
||||||
from the same call point raise exceptions."""
|
from the same call point raise exceptions."""
|
||||||
|
|
||||||
priority = 50
|
priority = 50
|
||||||
|
"""Defines the order of execution for a list of Hooks.
|
||||||
|
|
||||||
|
Priority numbers should be limited to the closed interval [0, 100],
|
||||||
|
but values outside this range are acceptable, as are fractional
|
||||||
|
values.
|
||||||
"""
|
"""
|
||||||
Defines the order of execution for a list of Hooks. Priority numbers
|
|
||||||
should be limited to the closed interval [0, 100], but values outside
|
|
||||||
this range are acceptable, as are fractional values."""
|
|
||||||
|
|
||||||
kwargs = {}
|
kwargs = {}
|
||||||
"""
|
"""
|
||||||
|
@ -74,7 +75,6 @@ class Hook(object):
|
||||||
|
|
||||||
|
|
||||||
class HookMap(dict):
|
class HookMap(dict):
|
||||||
|
|
||||||
"""A map of call points to lists of callbacks (Hook objects)."""
|
"""A map of call points to lists of callbacks (Hook objects)."""
|
||||||
|
|
||||||
def __new__(cls, points=None):
|
def __new__(cls, points=None):
|
||||||
|
@ -190,23 +190,23 @@ hookpoints = ['on_start_resource', 'before_request_body',
|
||||||
|
|
||||||
|
|
||||||
class Request(object):
|
class Request(object):
|
||||||
|
|
||||||
"""An HTTP request.
|
"""An HTTP request.
|
||||||
|
|
||||||
This object represents the metadata of an HTTP request message;
|
This object represents the metadata of an HTTP request message; that
|
||||||
that is, it contains attributes which describe the environment
|
is, it contains attributes which describe the environment in which
|
||||||
in which the request URL, headers, and body were sent (if you
|
the request URL, headers, and body were sent (if you want tools to
|
||||||
want tools to interpret the headers and body, those are elsewhere,
|
interpret the headers and body, those are elsewhere, mostly in
|
||||||
mostly in Tools). This 'metadata' consists of socket data,
|
Tools). This 'metadata' consists of socket data, transport
|
||||||
transport characteristics, and the Request-Line. This object
|
characteristics, and the Request-Line. This object also contains
|
||||||
also contains data regarding the configuration in effect for
|
data regarding the configuration in effect for the given URL, and
|
||||||
the given URL, and the execution plan for generating a response.
|
the execution plan for generating a response.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
prev = None
|
prev = None
|
||||||
|
"""The previous Request object (if any).
|
||||||
|
|
||||||
|
This should be None unless we are processing an InternalRedirect.
|
||||||
"""
|
"""
|
||||||
The previous Request object (if any). This should be None
|
|
||||||
unless we are processing an InternalRedirect."""
|
|
||||||
|
|
||||||
# Conversation/connection attributes
|
# Conversation/connection attributes
|
||||||
local = httputil.Host('127.0.0.1', 80)
|
local = httputil.Host('127.0.0.1', 80)
|
||||||
|
@ -216,9 +216,10 @@ class Request(object):
|
||||||
'An httputil.Host(ip, port, hostname) object for the client socket.'
|
'An httputil.Host(ip, port, hostname) object for the client socket.'
|
||||||
|
|
||||||
scheme = 'http'
|
scheme = 'http'
|
||||||
|
"""The protocol used between client and server.
|
||||||
|
|
||||||
|
In most cases, this will be either 'http' or 'https'.
|
||||||
"""
|
"""
|
||||||
The protocol used between client and server. In most cases,
|
|
||||||
this will be either 'http' or 'https'."""
|
|
||||||
|
|
||||||
server_protocol = 'HTTP/1.1'
|
server_protocol = 'HTTP/1.1'
|
||||||
"""
|
"""
|
||||||
|
@ -227,25 +228,30 @@ class Request(object):
|
||||||
|
|
||||||
base = ''
|
base = ''
|
||||||
"""The (scheme://host) portion of the requested URL.
|
"""The (scheme://host) portion of the requested URL.
|
||||||
|
|
||||||
In some cases (e.g. when proxying via mod_rewrite), this may contain
|
In some cases (e.g. when proxying via mod_rewrite), this may contain
|
||||||
path segments which cherrypy.url uses when constructing url's, but
|
path segments which cherrypy.url uses when constructing url's, but
|
||||||
which otherwise are ignored by CherryPy. Regardless, this value
|
which otherwise are ignored by CherryPy. Regardless, this value MUST
|
||||||
MUST NOT end in a slash."""
|
NOT end in a slash.
|
||||||
|
"""
|
||||||
|
|
||||||
# Request-Line attributes
|
# Request-Line attributes
|
||||||
request_line = ''
|
request_line = ''
|
||||||
|
"""The complete Request-Line received from the client.
|
||||||
|
|
||||||
|
This is a single string consisting of the request method, URI, and
|
||||||
|
protocol version (joined by spaces). Any final CRLF is removed.
|
||||||
"""
|
"""
|
||||||
The complete Request-Line received from the client. This is a
|
|
||||||
single string consisting of the request method, URI, and protocol
|
|
||||||
version (joined by spaces). Any final CRLF is removed."""
|
|
||||||
|
|
||||||
method = 'GET'
|
method = 'GET'
|
||||||
|
"""Indicates the HTTP method to be performed on the resource identified by
|
||||||
|
the Request-URI.
|
||||||
|
|
||||||
|
Common methods include GET, HEAD, POST, PUT, and DELETE. CherryPy
|
||||||
|
allows any extension method; however, various HTTP servers and
|
||||||
|
gateways may restrict the set of allowable methods. CherryPy
|
||||||
|
applications SHOULD restrict the set (on a per-URI basis).
|
||||||
"""
|
"""
|
||||||
Indicates the HTTP method to be performed on the resource identified
|
|
||||||
by the Request-URI. Common methods include GET, HEAD, POST, PUT, and
|
|
||||||
DELETE. CherryPy allows any extension method; however, various HTTP
|
|
||||||
servers and gateways may restrict the set of allowable methods.
|
|
||||||
CherryPy applications SHOULD restrict the set (on a per-URI basis)."""
|
|
||||||
|
|
||||||
query_string = ''
|
query_string = ''
|
||||||
"""
|
"""
|
||||||
|
@ -277,22 +283,26 @@ class Request(object):
|
||||||
A dict which combines query string (GET) and request entity (POST)
|
A dict which combines query string (GET) and request entity (POST)
|
||||||
variables. This is populated in two stages: GET params are added
|
variables. This is populated in two stages: GET params are added
|
||||||
before the 'on_start_resource' hook, and POST params are added
|
before the 'on_start_resource' hook, and POST params are added
|
||||||
between the 'before_request_body' and 'before_handler' hooks."""
|
between the 'before_request_body' and 'before_handler' hooks.
|
||||||
|
"""
|
||||||
|
|
||||||
# Message attributes
|
# Message attributes
|
||||||
header_list = []
|
header_list = []
|
||||||
|
"""A list of the HTTP request headers as (name, value) tuples.
|
||||||
|
|
||||||
|
In general, you should use request.headers (a dict) instead.
|
||||||
"""
|
"""
|
||||||
A list of the HTTP request headers as (name, value) tuples.
|
|
||||||
In general, you should use request.headers (a dict) instead."""
|
|
||||||
|
|
||||||
headers = httputil.HeaderMap()
|
headers = httputil.HeaderMap()
|
||||||
"""
|
"""A dict-like object containing the request headers.
|
||||||
A dict-like object containing the request headers. Keys are header
|
|
||||||
|
Keys are header
|
||||||
names (in Title-Case format); however, you may get and set them in
|
names (in Title-Case format); however, you may get and set them in
|
||||||
a case-insensitive manner. That is, headers['Content-Type'] and
|
a case-insensitive manner. That is, headers['Content-Type'] and
|
||||||
headers['content-type'] refer to the same value. Values are header
|
headers['content-type'] refer to the same value. Values are header
|
||||||
values (decoded according to :rfc:`2047` if necessary). See also:
|
values (decoded according to :rfc:`2047` if necessary). See also:
|
||||||
httputil.HeaderMap, httputil.HeaderElement."""
|
httputil.HeaderMap, httputil.HeaderElement.
|
||||||
|
"""
|
||||||
|
|
||||||
cookie = SimpleCookie()
|
cookie = SimpleCookie()
|
||||||
"""See help(Cookie)."""
|
"""See help(Cookie)."""
|
||||||
|
@ -336,7 +346,8 @@ class Request(object):
|
||||||
or multipart, this will be None. Otherwise, this will be an instance
|
or multipart, this will be None. Otherwise, this will be an instance
|
||||||
of :class:`RequestBody<cherrypy._cpreqbody.RequestBody>` (which you
|
of :class:`RequestBody<cherrypy._cpreqbody.RequestBody>` (which you
|
||||||
can .read()); this value is set between the 'before_request_body' and
|
can .read()); this value is set between the 'before_request_body' and
|
||||||
'before_handler' hooks (assuming that process_request_body is True)."""
|
'before_handler' hooks (assuming that process_request_body is True).
|
||||||
|
"""
|
||||||
|
|
||||||
# Dispatch attributes
|
# Dispatch attributes
|
||||||
dispatch = cherrypy.dispatch.Dispatcher()
|
dispatch = cherrypy.dispatch.Dispatcher()
|
||||||
|
@ -347,23 +358,24 @@ class Request(object):
|
||||||
calls the dispatcher as early as possible, passing it a 'path_info'
|
calls the dispatcher as early as possible, passing it a 'path_info'
|
||||||
argument.
|
argument.
|
||||||
|
|
||||||
The default dispatcher discovers the page handler by matching path_info
|
The default dispatcher discovers the page handler by matching
|
||||||
to a hierarchical arrangement of objects, starting at request.app.root.
|
path_info to a hierarchical arrangement of objects, starting at
|
||||||
See help(cherrypy.dispatch) for more information."""
|
request.app.root. See help(cherrypy.dispatch) for more information.
|
||||||
|
"""
|
||||||
|
|
||||||
script_name = ''
|
script_name = ''
|
||||||
"""
|
"""The 'mount point' of the application which is handling this request.
|
||||||
The 'mount point' of the application which is handling this request.
|
|
||||||
|
|
||||||
This attribute MUST NOT end in a slash. If the script_name refers to
|
This attribute MUST NOT end in a slash. If the script_name refers to
|
||||||
the root of the URI, it MUST be an empty string (not "/").
|
the root of the URI, it MUST be an empty string (not "/").
|
||||||
"""
|
"""
|
||||||
|
|
||||||
path_info = '/'
|
path_info = '/'
|
||||||
|
"""The 'relative path' portion of the Request-URI.
|
||||||
|
|
||||||
|
This is relative to the script_name ('mount point') of the
|
||||||
|
application which is handling this request.
|
||||||
"""
|
"""
|
||||||
The 'relative path' portion of the Request-URI. This is relative
|
|
||||||
to the script_name ('mount point') of the application which is
|
|
||||||
handling this request."""
|
|
||||||
|
|
||||||
login = None
|
login = None
|
||||||
"""
|
"""
|
||||||
|
@ -391,14 +403,16 @@ class Request(object):
|
||||||
of the form: {Toolbox.namespace: {Tool.name: config dict}}."""
|
of the form: {Toolbox.namespace: {Tool.name: config dict}}."""
|
||||||
|
|
||||||
config = None
|
config = None
|
||||||
|
"""A flat dict of all configuration entries which apply to the current
|
||||||
|
request.
|
||||||
|
|
||||||
|
These entries are collected from global config, application config
|
||||||
|
(based on request.path_info), and from handler config (exactly how
|
||||||
|
is governed by the request.dispatch object in effect for this
|
||||||
|
request; by default, handler config can be attached anywhere in the
|
||||||
|
tree between request.app.root and the final handler, and inherits
|
||||||
|
downward).
|
||||||
"""
|
"""
|
||||||
A flat dict of all configuration entries which apply to the
|
|
||||||
current request. These entries are collected from global config,
|
|
||||||
application config (based on request.path_info), and from handler
|
|
||||||
config (exactly how is governed by the request.dispatch object in
|
|
||||||
effect for this request; by default, handler config can be attached
|
|
||||||
anywhere in the tree between request.app.root and the final handler,
|
|
||||||
and inherits downward)."""
|
|
||||||
|
|
||||||
is_index = None
|
is_index = None
|
||||||
"""
|
"""
|
||||||
|
@ -409,13 +423,14 @@ class Request(object):
|
||||||
the trailing slash. See cherrypy.tools.trailing_slash."""
|
the trailing slash. See cherrypy.tools.trailing_slash."""
|
||||||
|
|
||||||
hooks = HookMap(hookpoints)
|
hooks = HookMap(hookpoints)
|
||||||
"""
|
"""A HookMap (dict-like object) of the form: {hookpoint: [hook, ...]}.
|
||||||
A HookMap (dict-like object) of the form: {hookpoint: [hook, ...]}.
|
|
||||||
Each key is a str naming the hook point, and each value is a list
|
Each key is a str naming the hook point, and each value is a list
|
||||||
of hooks which will be called at that hook point during this request.
|
of hooks which will be called at that hook point during this request.
|
||||||
The list of hooks is generally populated as early as possible (mostly
|
The list of hooks is generally populated as early as possible (mostly
|
||||||
from Tools specified in config), but may be extended at any time.
|
from Tools specified in config), but may be extended at any time.
|
||||||
See also: _cprequest.Hook, _cprequest.HookMap, and cherrypy.tools."""
|
See also: _cprequest.Hook, _cprequest.HookMap, and cherrypy.tools.
|
||||||
|
"""
|
||||||
|
|
||||||
error_response = cherrypy.HTTPError(500).set_response
|
error_response = cherrypy.HTTPError(500).set_response
|
||||||
"""
|
"""
|
||||||
|
@ -428,12 +443,11 @@ class Request(object):
|
||||||
error response to the user-agent."""
|
error response to the user-agent."""
|
||||||
|
|
||||||
error_page = {}
|
error_page = {}
|
||||||
"""
|
"""A dict of {error code: response filename or callable} pairs.
|
||||||
A dict of {error code: response filename or callable} pairs.
|
|
||||||
|
|
||||||
The error code must be an int representing a given HTTP error code,
|
The error code must be an int representing a given HTTP error code,
|
||||||
or the string 'default', which will be used if no matching entry
|
or the string 'default', which will be used if no matching entry is
|
||||||
is found for a given numeric code.
|
found for a given numeric code.
|
||||||
|
|
||||||
If a filename is provided, the file should contain a Python string-
|
If a filename is provided, the file should contain a Python string-
|
||||||
formatting template, and can expect by default to receive format
|
formatting template, and can expect by default to receive format
|
||||||
|
@ -447,8 +461,8 @@ class Request(object):
|
||||||
iterable of strings which will be set to response.body. It may also
|
iterable of strings which will be set to response.body. It may also
|
||||||
override headers or perform any other processing.
|
override headers or perform any other processing.
|
||||||
|
|
||||||
If no entry is given for an error code, and no 'default' entry exists,
|
If no entry is given for an error code, and no 'default' entry
|
||||||
a default template will be used.
|
exists, a default template will be used.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
show_tracebacks = True
|
show_tracebacks = True
|
||||||
|
@ -473,9 +487,10 @@ class Request(object):
|
||||||
"""True once the close method has been called, False otherwise."""
|
"""True once the close method has been called, False otherwise."""
|
||||||
|
|
||||||
stage = None
|
stage = None
|
||||||
|
"""A string containing the stage reached in the request-handling process.
|
||||||
|
|
||||||
|
This is useful when debugging a live server with hung requests.
|
||||||
"""
|
"""
|
||||||
A string containing the stage reached in the request-handling process.
|
|
||||||
This is useful when debugging a live server with hung requests."""
|
|
||||||
|
|
||||||
unique_id = None
|
unique_id = None
|
||||||
"""A lazy object generating and memorizing UUID4 on ``str()`` render."""
|
"""A lazy object generating and memorizing UUID4 on ``str()`` render."""
|
||||||
|
@ -492,9 +507,10 @@ class Request(object):
|
||||||
server_protocol='HTTP/1.1'):
|
server_protocol='HTTP/1.1'):
|
||||||
"""Populate a new Request object.
|
"""Populate a new Request object.
|
||||||
|
|
||||||
local_host should be an httputil.Host object with the server info.
|
local_host should be an httputil.Host object with the server
|
||||||
remote_host should be an httputil.Host object with the client info.
|
info. remote_host should be an httputil.Host object with the
|
||||||
scheme should be a string, either "http" or "https".
|
client info. scheme should be a string, either "http" or
|
||||||
|
"https".
|
||||||
"""
|
"""
|
||||||
self.local = local_host
|
self.local = local_host
|
||||||
self.remote = remote_host
|
self.remote = remote_host
|
||||||
|
@ -514,7 +530,10 @@ class Request(object):
|
||||||
self.unique_id = LazyUUID4()
|
self.unique_id = LazyUUID4()
|
||||||
|
|
||||||
def close(self):
|
def close(self):
|
||||||
"""Run cleanup code. (Core)"""
|
"""Run cleanup code.
|
||||||
|
|
||||||
|
(Core)
|
||||||
|
"""
|
||||||
if not self.closed:
|
if not self.closed:
|
||||||
self.closed = True
|
self.closed = True
|
||||||
self.stage = 'on_end_request'
|
self.stage = 'on_end_request'
|
||||||
|
@ -551,7 +570,6 @@ class Request(object):
|
||||||
|
|
||||||
Consumer code (HTTP servers) should then access these response
|
Consumer code (HTTP servers) should then access these response
|
||||||
attributes to build the outbound stream.
|
attributes to build the outbound stream.
|
||||||
|
|
||||||
"""
|
"""
|
||||||
response = cherrypy.serving.response
|
response = cherrypy.serving.response
|
||||||
self.stage = 'run'
|
self.stage = 'run'
|
||||||
|
@ -631,7 +649,10 @@ class Request(object):
|
||||||
return response
|
return response
|
||||||
|
|
||||||
def respond(self, path_info):
|
def respond(self, path_info):
|
||||||
"""Generate a response for the resource at self.path_info. (Core)"""
|
"""Generate a response for the resource at self.path_info.
|
||||||
|
|
||||||
|
(Core)
|
||||||
|
"""
|
||||||
try:
|
try:
|
||||||
try:
|
try:
|
||||||
try:
|
try:
|
||||||
|
@ -702,7 +723,10 @@ class Request(object):
|
||||||
response.finalize()
|
response.finalize()
|
||||||
|
|
||||||
def process_query_string(self):
|
def process_query_string(self):
|
||||||
"""Parse the query string into Python structures. (Core)"""
|
"""Parse the query string into Python structures.
|
||||||
|
|
||||||
|
(Core)
|
||||||
|
"""
|
||||||
try:
|
try:
|
||||||
p = httputil.parse_query_string(
|
p = httputil.parse_query_string(
|
||||||
self.query_string, encoding=self.query_string_encoding)
|
self.query_string, encoding=self.query_string_encoding)
|
||||||
|
@ -715,7 +739,10 @@ class Request(object):
|
||||||
self.params.update(p)
|
self.params.update(p)
|
||||||
|
|
||||||
def process_headers(self):
|
def process_headers(self):
|
||||||
"""Parse HTTP header data into Python structures. (Core)"""
|
"""Parse HTTP header data into Python structures.
|
||||||
|
|
||||||
|
(Core)
|
||||||
|
"""
|
||||||
# Process the headers into self.headers
|
# Process the headers into self.headers
|
||||||
headers = self.headers
|
headers = self.headers
|
||||||
for name, value in self.header_list:
|
for name, value in self.header_list:
|
||||||
|
@ -751,7 +778,10 @@ class Request(object):
|
||||||
self.base = '%s://%s' % (self.scheme, host)
|
self.base = '%s://%s' % (self.scheme, host)
|
||||||
|
|
||||||
def get_resource(self, path):
|
def get_resource(self, path):
|
||||||
"""Call a dispatcher (which sets self.handler and .config). (Core)"""
|
"""Call a dispatcher (which sets self.handler and .config).
|
||||||
|
|
||||||
|
(Core)
|
||||||
|
"""
|
||||||
# First, see if there is a custom dispatch at this URI. Custom
|
# First, see if there is a custom dispatch at this URI. Custom
|
||||||
# dispatchers can only be specified in app.config, not in _cp_config
|
# dispatchers can only be specified in app.config, not in _cp_config
|
||||||
# (since custom dispatchers may not even have an app.root).
|
# (since custom dispatchers may not even have an app.root).
|
||||||
|
@ -762,7 +792,10 @@ class Request(object):
|
||||||
dispatch(path)
|
dispatch(path)
|
||||||
|
|
||||||
def handle_error(self):
|
def handle_error(self):
|
||||||
"""Handle the last unanticipated exception. (Core)"""
|
"""Handle the last unanticipated exception.
|
||||||
|
|
||||||
|
(Core)
|
||||||
|
"""
|
||||||
try:
|
try:
|
||||||
self.hooks.run('before_error_response')
|
self.hooks.run('before_error_response')
|
||||||
if self.error_response:
|
if self.error_response:
|
||||||
|
@ -776,7 +809,6 @@ class Request(object):
|
||||||
|
|
||||||
|
|
||||||
class ResponseBody(object):
|
class ResponseBody(object):
|
||||||
|
|
||||||
"""The body of the HTTP response (the response entity)."""
|
"""The body of the HTTP response (the response entity)."""
|
||||||
|
|
||||||
unicode_err = ('Page handlers MUST return bytes. Use tools.encode '
|
unicode_err = ('Page handlers MUST return bytes. Use tools.encode '
|
||||||
|
@ -802,18 +834,18 @@ class ResponseBody(object):
|
||||||
|
|
||||||
|
|
||||||
class Response(object):
|
class Response(object):
|
||||||
|
|
||||||
"""An HTTP Response, including status, headers, and body."""
|
"""An HTTP Response, including status, headers, and body."""
|
||||||
|
|
||||||
status = ''
|
status = ''
|
||||||
"""The HTTP Status-Code and Reason-Phrase."""
|
"""The HTTP Status-Code and Reason-Phrase."""
|
||||||
|
|
||||||
header_list = []
|
header_list = []
|
||||||
"""
|
"""A list of the HTTP response headers as (name, value) tuples.
|
||||||
A list of the HTTP response headers as (name, value) tuples.
|
|
||||||
In general, you should use response.headers (a dict) instead. This
|
In general, you should use response.headers (a dict) instead. This
|
||||||
attribute is generated from response.headers and is not valid until
|
attribute is generated from response.headers and is not valid until
|
||||||
after the finalize phase."""
|
after the finalize phase.
|
||||||
|
"""
|
||||||
|
|
||||||
headers = httputil.HeaderMap()
|
headers = httputil.HeaderMap()
|
||||||
"""
|
"""
|
||||||
|
@ -833,7 +865,10 @@ class Response(object):
|
||||||
"""The body (entity) of the HTTP response."""
|
"""The body (entity) of the HTTP response."""
|
||||||
|
|
||||||
time = None
|
time = None
|
||||||
"""The value of time.time() when created. Use in HTTP dates."""
|
"""The value of time.time() when created.
|
||||||
|
|
||||||
|
Use in HTTP dates.
|
||||||
|
"""
|
||||||
|
|
||||||
stream = False
|
stream = False
|
||||||
"""If False, buffer the response body."""
|
"""If False, buffer the response body."""
|
||||||
|
@ -861,15 +896,15 @@ class Response(object):
|
||||||
return new_body
|
return new_body
|
||||||
|
|
||||||
def _flush_body(self):
|
def _flush_body(self):
|
||||||
"""
|
"""Discard self.body but consume any generator such that any
|
||||||
Discard self.body but consume any generator such that
|
finalization can occur, such as is required by caching.tee_output()."""
|
||||||
any finalization can occur, such as is required by
|
|
||||||
caching.tee_output().
|
|
||||||
"""
|
|
||||||
consume(iter(self.body))
|
consume(iter(self.body))
|
||||||
|
|
||||||
def finalize(self):
|
def finalize(self):
|
||||||
"""Transform headers (and cookies) into self.header_list. (Core)"""
|
"""Transform headers (and cookies) into self.header_list.
|
||||||
|
|
||||||
|
(Core)
|
||||||
|
"""
|
||||||
try:
|
try:
|
||||||
code, reason, _ = httputil.valid_status(self.status)
|
code, reason, _ = httputil.valid_status(self.status)
|
||||||
except ValueError:
|
except ValueError:
|
||||||
|
|
|
@ -50,7 +50,8 @@ class Server(ServerAdapter):
|
||||||
"""If given, the name of the UNIX socket to use instead of TCP/IP.
|
"""If given, the name of the UNIX socket to use instead of TCP/IP.
|
||||||
|
|
||||||
When this option is not None, the `socket_host` and `socket_port` options
|
When this option is not None, the `socket_host` and `socket_port` options
|
||||||
are ignored."""
|
are ignored.
|
||||||
|
"""
|
||||||
|
|
||||||
socket_queue_size = 5
|
socket_queue_size = 5
|
||||||
"""The 'backlog' argument to socket.listen(); specifies the maximum number
|
"""The 'backlog' argument to socket.listen(); specifies the maximum number
|
||||||
|
@ -79,17 +80,24 @@ class Server(ServerAdapter):
|
||||||
"""The number of worker threads to start up in the pool."""
|
"""The number of worker threads to start up in the pool."""
|
||||||
|
|
||||||
thread_pool_max = -1
|
thread_pool_max = -1
|
||||||
"""The maximum size of the worker-thread pool. Use -1 to indicate no limit.
|
"""The maximum size of the worker-thread pool.
|
||||||
|
|
||||||
|
Use -1 to indicate no limit.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
max_request_header_size = 500 * 1024
|
max_request_header_size = 500 * 1024
|
||||||
"""The maximum number of bytes allowable in the request headers.
|
"""The maximum number of bytes allowable in the request headers.
|
||||||
If exceeded, the HTTP server should return "413 Request Entity Too Large".
|
|
||||||
|
If exceeded, the HTTP server should return "413 Request Entity Too
|
||||||
|
Large".
|
||||||
"""
|
"""
|
||||||
|
|
||||||
max_request_body_size = 100 * 1024 * 1024
|
max_request_body_size = 100 * 1024 * 1024
|
||||||
"""The maximum number of bytes allowable in the request body. If exceeded,
|
"""The maximum number of bytes allowable in the request body.
|
||||||
the HTTP server should return "413 Request Entity Too Large"."""
|
|
||||||
|
If exceeded, the HTTP server should return "413 Request Entity Too
|
||||||
|
Large".
|
||||||
|
"""
|
||||||
|
|
||||||
instance = None
|
instance = None
|
||||||
"""If not None, this should be an HTTP server instance (such as
|
"""If not None, this should be an HTTP server instance (such as
|
||||||
|
@ -119,7 +127,8 @@ class Server(ServerAdapter):
|
||||||
the builtin WSGI server. Builtin options are: 'builtin' (to
|
the builtin WSGI server. Builtin options are: 'builtin' (to
|
||||||
use the SSL library built into recent versions of Python).
|
use the SSL library built into recent versions of Python).
|
||||||
You may also register your own classes in the
|
You may also register your own classes in the
|
||||||
cheroot.server.ssl_adapters dict."""
|
cheroot.server.ssl_adapters dict.
|
||||||
|
"""
|
||||||
|
|
||||||
statistics = False
|
statistics = False
|
||||||
"""Turns statistics-gathering on or off for aware HTTP servers."""
|
"""Turns statistics-gathering on or off for aware HTTP servers."""
|
||||||
|
@ -129,11 +138,13 @@ class Server(ServerAdapter):
|
||||||
|
|
||||||
wsgi_version = (1, 0)
|
wsgi_version = (1, 0)
|
||||||
"""The WSGI version tuple to use with the builtin WSGI server.
|
"""The WSGI version tuple to use with the builtin WSGI server.
|
||||||
The provided options are (1, 0) [which includes support for PEP 3333,
|
|
||||||
which declares it covers WSGI version 1.0.1 but still mandates the
|
The provided options are (1, 0) [which includes support for PEP
|
||||||
wsgi.version (1, 0)] and ('u', 0), an experimental unicode version.
|
3333, which declares it covers WSGI version 1.0.1 but still mandates
|
||||||
You may create and register your own experimental versions of the WSGI
|
the wsgi.version (1, 0)] and ('u', 0), an experimental unicode
|
||||||
protocol by adding custom classes to the cheroot.server.wsgi_gateways dict.
|
version. You may create and register your own experimental versions
|
||||||
|
of the WSGI protocol by adding custom classes to the
|
||||||
|
cheroot.server.wsgi_gateways dict.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
peercreds = False
|
peercreds = False
|
||||||
|
@ -184,7 +195,8 @@ class Server(ServerAdapter):
|
||||||
def bind_addr(self):
|
def bind_addr(self):
|
||||||
"""Return bind address.
|
"""Return bind address.
|
||||||
|
|
||||||
A (host, port) tuple for TCP sockets or a str for Unix domain sockts.
|
A (host, port) tuple for TCP sockets or a str for Unix domain
|
||||||
|
sockets.
|
||||||
"""
|
"""
|
||||||
if self.socket_file:
|
if self.socket_file:
|
||||||
return self.socket_file
|
return self.socket_file
|
||||||
|
|
|
@ -1,7 +1,7 @@
|
||||||
"""CherryPy tools. A "tool" is any helper, adapted to CP.
|
"""CherryPy tools. A "tool" is any helper, adapted to CP.
|
||||||
|
|
||||||
Tools are usually designed to be used in a variety of ways (although some
|
Tools are usually designed to be used in a variety of ways (although
|
||||||
may only offer one if they choose):
|
some may only offer one if they choose):
|
||||||
|
|
||||||
Library calls
|
Library calls
|
||||||
All tools are callables that can be used wherever needed.
|
All tools are callables that can be used wherever needed.
|
||||||
|
@ -48,10 +48,10 @@ _attr_error = (
|
||||||
|
|
||||||
|
|
||||||
class Tool(object):
|
class Tool(object):
|
||||||
|
|
||||||
"""A registered function for use with CherryPy request-processing hooks.
|
"""A registered function for use with CherryPy request-processing hooks.
|
||||||
|
|
||||||
help(tool.callable) should give you more information about this Tool.
|
help(tool.callable) should give you more information about this
|
||||||
|
Tool.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
namespace = 'tools'
|
namespace = 'tools'
|
||||||
|
@ -135,8 +135,8 @@ class Tool(object):
|
||||||
def _setup(self):
|
def _setup(self):
|
||||||
"""Hook this tool into cherrypy.request.
|
"""Hook this tool into cherrypy.request.
|
||||||
|
|
||||||
The standard CherryPy request object will automatically call this
|
The standard CherryPy request object will automatically call
|
||||||
method when the tool is "turned on" in config.
|
this method when the tool is "turned on" in config.
|
||||||
"""
|
"""
|
||||||
conf = self._merged_args()
|
conf = self._merged_args()
|
||||||
p = conf.pop('priority', None)
|
p = conf.pop('priority', None)
|
||||||
|
@ -147,15 +147,15 @@ class Tool(object):
|
||||||
|
|
||||||
|
|
||||||
class HandlerTool(Tool):
|
class HandlerTool(Tool):
|
||||||
|
|
||||||
"""Tool which is called 'before main', that may skip normal handlers.
|
"""Tool which is called 'before main', that may skip normal handlers.
|
||||||
|
|
||||||
If the tool successfully handles the request (by setting response.body),
|
If the tool successfully handles the request (by setting
|
||||||
if should return True. This will cause CherryPy to skip any 'normal' page
|
response.body), if should return True. This will cause CherryPy to
|
||||||
handler. If the tool did not handle the request, it should return False
|
skip any 'normal' page handler. If the tool did not handle the
|
||||||
to tell CherryPy to continue on and call the normal page handler. If the
|
request, it should return False to tell CherryPy to continue on and
|
||||||
tool is declared AS a page handler (see the 'handler' method), returning
|
call the normal page handler. If the tool is declared AS a page
|
||||||
False will raise NotFound.
|
handler (see the 'handler' method), returning False will raise
|
||||||
|
NotFound.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def __init__(self, callable, name=None):
|
def __init__(self, callable, name=None):
|
||||||
|
@ -185,8 +185,8 @@ class HandlerTool(Tool):
|
||||||
def _setup(self):
|
def _setup(self):
|
||||||
"""Hook this tool into cherrypy.request.
|
"""Hook this tool into cherrypy.request.
|
||||||
|
|
||||||
The standard CherryPy request object will automatically call this
|
The standard CherryPy request object will automatically call
|
||||||
method when the tool is "turned on" in config.
|
this method when the tool is "turned on" in config.
|
||||||
"""
|
"""
|
||||||
conf = self._merged_args()
|
conf = self._merged_args()
|
||||||
p = conf.pop('priority', None)
|
p = conf.pop('priority', None)
|
||||||
|
@ -197,7 +197,6 @@ class HandlerTool(Tool):
|
||||||
|
|
||||||
|
|
||||||
class HandlerWrapperTool(Tool):
|
class HandlerWrapperTool(Tool):
|
||||||
|
|
||||||
"""Tool which wraps request.handler in a provided wrapper function.
|
"""Tool which wraps request.handler in a provided wrapper function.
|
||||||
|
|
||||||
The 'newhandler' arg must be a handler wrapper function that takes a
|
The 'newhandler' arg must be a handler wrapper function that takes a
|
||||||
|
@ -232,7 +231,6 @@ class HandlerWrapperTool(Tool):
|
||||||
|
|
||||||
|
|
||||||
class ErrorTool(Tool):
|
class ErrorTool(Tool):
|
||||||
|
|
||||||
"""Tool which is used to replace the default request.error_response."""
|
"""Tool which is used to replace the default request.error_response."""
|
||||||
|
|
||||||
def __init__(self, callable, name=None):
|
def __init__(self, callable, name=None):
|
||||||
|
@ -244,8 +242,8 @@ class ErrorTool(Tool):
|
||||||
def _setup(self):
|
def _setup(self):
|
||||||
"""Hook this tool into cherrypy.request.
|
"""Hook this tool into cherrypy.request.
|
||||||
|
|
||||||
The standard CherryPy request object will automatically call this
|
The standard CherryPy request object will automatically call
|
||||||
method when the tool is "turned on" in config.
|
this method when the tool is "turned on" in config.
|
||||||
"""
|
"""
|
||||||
cherrypy.serving.request.error_response = self._wrapper
|
cherrypy.serving.request.error_response = self._wrapper
|
||||||
|
|
||||||
|
@ -254,7 +252,6 @@ class ErrorTool(Tool):
|
||||||
|
|
||||||
|
|
||||||
class SessionTool(Tool):
|
class SessionTool(Tool):
|
||||||
|
|
||||||
"""Session Tool for CherryPy.
|
"""Session Tool for CherryPy.
|
||||||
|
|
||||||
sessions.locking
|
sessions.locking
|
||||||
|
@ -282,8 +279,8 @@ class SessionTool(Tool):
|
||||||
def _setup(self):
|
def _setup(self):
|
||||||
"""Hook this tool into cherrypy.request.
|
"""Hook this tool into cherrypy.request.
|
||||||
|
|
||||||
The standard CherryPy request object will automatically call this
|
The standard CherryPy request object will automatically call
|
||||||
method when the tool is "turned on" in config.
|
this method when the tool is "turned on" in config.
|
||||||
"""
|
"""
|
||||||
hooks = cherrypy.serving.request.hooks
|
hooks = cherrypy.serving.request.hooks
|
||||||
|
|
||||||
|
@ -325,7 +322,6 @@ class SessionTool(Tool):
|
||||||
|
|
||||||
|
|
||||||
class XMLRPCController(object):
|
class XMLRPCController(object):
|
||||||
|
|
||||||
"""A Controller (page handler collection) for XML-RPC.
|
"""A Controller (page handler collection) for XML-RPC.
|
||||||
|
|
||||||
To use it, have your controllers subclass this base class (it will
|
To use it, have your controllers subclass this base class (it will
|
||||||
|
@ -392,7 +388,6 @@ class SessionAuthTool(HandlerTool):
|
||||||
|
|
||||||
|
|
||||||
class CachingTool(Tool):
|
class CachingTool(Tool):
|
||||||
|
|
||||||
"""Caching Tool for CherryPy."""
|
"""Caching Tool for CherryPy."""
|
||||||
|
|
||||||
def _wrapper(self, **kwargs):
|
def _wrapper(self, **kwargs):
|
||||||
|
@ -416,11 +411,11 @@ class CachingTool(Tool):
|
||||||
|
|
||||||
|
|
||||||
class Toolbox(object):
|
class Toolbox(object):
|
||||||
|
|
||||||
"""A collection of Tools.
|
"""A collection of Tools.
|
||||||
|
|
||||||
This object also functions as a config namespace handler for itself.
|
This object also functions as a config namespace handler for itself.
|
||||||
Custom toolboxes should be added to each Application's toolboxes dict.
|
Custom toolboxes should be added to each Application's toolboxes
|
||||||
|
dict.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def __init__(self, namespace):
|
def __init__(self, namespace):
|
||||||
|
|
|
@ -10,19 +10,22 @@ from cherrypy.lib import httputil, reprconf
|
||||||
class Application(object):
|
class Application(object):
|
||||||
"""A CherryPy Application.
|
"""A CherryPy Application.
|
||||||
|
|
||||||
Servers and gateways should not instantiate Request objects directly.
|
Servers and gateways should not instantiate Request objects
|
||||||
Instead, they should ask an Application object for a request object.
|
directly. Instead, they should ask an Application object for a
|
||||||
|
request object.
|
||||||
|
|
||||||
An instance of this class may also be used as a WSGI callable
|
An instance of this class may also be used as a WSGI callable (WSGI
|
||||||
(WSGI application object) for itself.
|
application object) for itself.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
root = None
|
root = None
|
||||||
"""The top-most container of page handlers for this app. Handlers should
|
"""The top-most container of page handlers for this app.
|
||||||
be arranged in a hierarchy of attributes, matching the expected URI
|
|
||||||
hierarchy; the default dispatcher then searches this hierarchy for a
|
Handlers should be arranged in a hierarchy of attributes, matching
|
||||||
matching handler. When using a dispatcher other than the default,
|
the expected URI hierarchy; the default dispatcher then searches
|
||||||
this value may be None."""
|
this hierarchy for a matching handler. When using a dispatcher other
|
||||||
|
than the default, this value may be None.
|
||||||
|
"""
|
||||||
|
|
||||||
config = {}
|
config = {}
|
||||||
"""A dict of {path: pathconf} pairs, where 'pathconf' is itself a dict
|
"""A dict of {path: pathconf} pairs, where 'pathconf' is itself a dict
|
||||||
|
@ -32,10 +35,16 @@ class Application(object):
|
||||||
toolboxes = {'tools': cherrypy.tools}
|
toolboxes = {'tools': cherrypy.tools}
|
||||||
|
|
||||||
log = None
|
log = None
|
||||||
"""A LogManager instance. See _cplogging."""
|
"""A LogManager instance.
|
||||||
|
|
||||||
|
See _cplogging.
|
||||||
|
"""
|
||||||
|
|
||||||
wsgiapp = None
|
wsgiapp = None
|
||||||
"""A CPWSGIApp instance. See _cpwsgi."""
|
"""A CPWSGIApp instance.
|
||||||
|
|
||||||
|
See _cpwsgi.
|
||||||
|
"""
|
||||||
|
|
||||||
request_class = _cprequest.Request
|
request_class = _cprequest.Request
|
||||||
response_class = _cprequest.Response
|
response_class = _cprequest.Response
|
||||||
|
@ -82,12 +91,15 @@ class Application(object):
|
||||||
def script_name(self): # noqa: D401; irrelevant for properties
|
def script_name(self): # noqa: D401; irrelevant for properties
|
||||||
"""The URI "mount point" for this app.
|
"""The URI "mount point" for this app.
|
||||||
|
|
||||||
A mount point is that portion of the URI which is constant for all URIs
|
A mount point is that portion of the URI which is constant for
|
||||||
that are serviced by this application; it does not include scheme,
|
all URIs that are serviced by this application; it does not
|
||||||
host, or proxy ("virtual host") portions of the URI.
|
include scheme, host, or proxy ("virtual host") portions of the
|
||||||
|
URI.
|
||||||
|
|
||||||
For example, if script_name is "/my/cool/app", then the URL
|
For example, if script_name is "/my/cool/app", then the URL "
|
||||||
"http://www.example.com/my/cool/app/page1" might be handled by a
|
|
||||||
|
http://www.example.com/my/cool/app/page1"
|
||||||
|
might be handled by a
|
||||||
"page1" method on the root object.
|
"page1" method on the root object.
|
||||||
|
|
||||||
The value of script_name MUST NOT end in a slash. If the script_name
|
The value of script_name MUST NOT end in a slash. If the script_name
|
||||||
|
@ -171,9 +183,9 @@ class Application(object):
|
||||||
class Tree(object):
|
class Tree(object):
|
||||||
"""A registry of CherryPy applications, mounted at diverse points.
|
"""A registry of CherryPy applications, mounted at diverse points.
|
||||||
|
|
||||||
An instance of this class may also be used as a WSGI callable
|
An instance of this class may also be used as a WSGI callable (WSGI
|
||||||
(WSGI application object), in which case it dispatches to all
|
application object), in which case it dispatches to all mounted
|
||||||
mounted apps.
|
apps.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
apps = {}
|
apps = {}
|
||||||
|
|
|
@ -1,10 +1,10 @@
|
||||||
"""WSGI interface (see PEP 333 and 3333).
|
"""WSGI interface (see PEP 333 and 3333).
|
||||||
|
|
||||||
Note that WSGI environ keys and values are 'native strings'; that is,
|
Note that WSGI environ keys and values are 'native strings'; that is,
|
||||||
whatever the type of "" is. For Python 2, that's a byte string; for Python 3,
|
whatever the type of "" is. For Python 2, that's a byte string; for
|
||||||
it's a unicode string. But PEP 3333 says: "even if Python's str type is
|
Python 3, it's a unicode string. But PEP 3333 says: "even if Python's
|
||||||
actually Unicode "under the hood", the content of native strings must
|
str type is actually Unicode "under the hood", the content of native
|
||||||
still be translatable to bytes via the Latin-1 encoding!"
|
strings must still be translatable to bytes via the Latin-1 encoding!"
|
||||||
"""
|
"""
|
||||||
|
|
||||||
import sys as _sys
|
import sys as _sys
|
||||||
|
@ -34,7 +34,6 @@ def downgrade_wsgi_ux_to_1x(environ):
|
||||||
|
|
||||||
|
|
||||||
class VirtualHost(object):
|
class VirtualHost(object):
|
||||||
|
|
||||||
"""Select a different WSGI application based on the Host header.
|
"""Select a different WSGI application based on the Host header.
|
||||||
|
|
||||||
This can be useful when running multiple sites within one CP server.
|
This can be useful when running multiple sites within one CP server.
|
||||||
|
@ -56,7 +55,10 @@ class VirtualHost(object):
|
||||||
cherrypy.tree.graft(vhost)
|
cherrypy.tree.graft(vhost)
|
||||||
"""
|
"""
|
||||||
default = None
|
default = None
|
||||||
"""Required. The default WSGI application."""
|
"""Required.
|
||||||
|
|
||||||
|
The default WSGI application.
|
||||||
|
"""
|
||||||
|
|
||||||
use_x_forwarded_host = True
|
use_x_forwarded_host = True
|
||||||
"""If True (the default), any "X-Forwarded-Host"
|
"""If True (the default), any "X-Forwarded-Host"
|
||||||
|
@ -65,11 +67,12 @@ class VirtualHost(object):
|
||||||
|
|
||||||
domains = {}
|
domains = {}
|
||||||
"""A dict of {host header value: application} pairs.
|
"""A dict of {host header value: application} pairs.
|
||||||
The incoming "Host" request header is looked up in this dict,
|
|
||||||
and, if a match is found, the corresponding WSGI application
|
The incoming "Host" request header is looked up in this dict, and,
|
||||||
will be called instead of the default. Note that you often need
|
if a match is found, the corresponding WSGI application will be
|
||||||
separate entries for "example.com" and "www.example.com".
|
called instead of the default. Note that you often need separate
|
||||||
In addition, "Host" headers may contain the port number.
|
entries for "example.com" and "www.example.com". In addition, "Host"
|
||||||
|
headers may contain the port number.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def __init__(self, default, domains=None, use_x_forwarded_host=True):
|
def __init__(self, default, domains=None, use_x_forwarded_host=True):
|
||||||
|
@ -89,7 +92,6 @@ class VirtualHost(object):
|
||||||
|
|
||||||
|
|
||||||
class InternalRedirector(object):
|
class InternalRedirector(object):
|
||||||
|
|
||||||
"""WSGI middleware that handles raised cherrypy.InternalRedirect."""
|
"""WSGI middleware that handles raised cherrypy.InternalRedirect."""
|
||||||
|
|
||||||
def __init__(self, nextapp, recursive=False):
|
def __init__(self, nextapp, recursive=False):
|
||||||
|
@ -137,7 +139,6 @@ class InternalRedirector(object):
|
||||||
|
|
||||||
|
|
||||||
class ExceptionTrapper(object):
|
class ExceptionTrapper(object):
|
||||||
|
|
||||||
"""WSGI middleware that traps exceptions."""
|
"""WSGI middleware that traps exceptions."""
|
||||||
|
|
||||||
def __init__(self, nextapp, throws=(KeyboardInterrupt, SystemExit)):
|
def __init__(self, nextapp, throws=(KeyboardInterrupt, SystemExit)):
|
||||||
|
@ -226,7 +227,6 @@ class _TrappedResponse(object):
|
||||||
|
|
||||||
|
|
||||||
class AppResponse(object):
|
class AppResponse(object):
|
||||||
|
|
||||||
"""WSGI response iterable for CherryPy applications."""
|
"""WSGI response iterable for CherryPy applications."""
|
||||||
|
|
||||||
def __init__(self, environ, start_response, cpapp):
|
def __init__(self, environ, start_response, cpapp):
|
||||||
|
@ -277,7 +277,10 @@ class AppResponse(object):
|
||||||
return next(self.iter_response)
|
return next(self.iter_response)
|
||||||
|
|
||||||
def close(self):
|
def close(self):
|
||||||
"""Close and de-reference the current request and response. (Core)"""
|
"""Close and de-reference the current request and response.
|
||||||
|
|
||||||
|
(Core)
|
||||||
|
"""
|
||||||
streaming = _cherrypy.serving.response.stream
|
streaming = _cherrypy.serving.response.stream
|
||||||
self.cpapp.release_serving()
|
self.cpapp.release_serving()
|
||||||
|
|
||||||
|
@ -380,18 +383,20 @@ class AppResponse(object):
|
||||||
|
|
||||||
|
|
||||||
class CPWSGIApp(object):
|
class CPWSGIApp(object):
|
||||||
|
|
||||||
"""A WSGI application object for a CherryPy Application."""
|
"""A WSGI application object for a CherryPy Application."""
|
||||||
|
|
||||||
pipeline = [
|
pipeline = [
|
||||||
('ExceptionTrapper', ExceptionTrapper),
|
('ExceptionTrapper', ExceptionTrapper),
|
||||||
('InternalRedirector', InternalRedirector),
|
('InternalRedirector', InternalRedirector),
|
||||||
]
|
]
|
||||||
"""A list of (name, wsgiapp) pairs. Each 'wsgiapp' MUST be a
|
"""A list of (name, wsgiapp) pairs.
|
||||||
constructor that takes an initial, positional 'nextapp' argument,
|
|
||||||
plus optional keyword arguments, and returns a WSGI application
|
Each 'wsgiapp' MUST be a constructor that takes an initial,
|
||||||
(that takes environ and start_response arguments). The 'name' can
|
positional 'nextapp' argument, plus optional keyword arguments, and
|
||||||
be any you choose, and will correspond to keys in self.config."""
|
returns a WSGI application (that takes environ and start_response
|
||||||
|
arguments). The 'name' can be any you choose, and will correspond to
|
||||||
|
keys in self.config.
|
||||||
|
"""
|
||||||
|
|
||||||
head = None
|
head = None
|
||||||
"""Rather than nest all apps in the pipeline on each call, it's only
|
"""Rather than nest all apps in the pipeline on each call, it's only
|
||||||
|
@ -399,9 +404,12 @@ class CPWSGIApp(object):
|
||||||
this to None again if you change self.pipeline after calling self."""
|
this to None again if you change self.pipeline after calling self."""
|
||||||
|
|
||||||
config = {}
|
config = {}
|
||||||
"""A dict whose keys match names listed in the pipeline. Each
|
"""A dict whose keys match names listed in the pipeline.
|
||||||
value is a further dict which will be passed to the corresponding
|
|
||||||
named WSGI callable (from the pipeline) as keyword arguments."""
|
Each value is a further dict which will be passed to the
|
||||||
|
corresponding named WSGI callable (from the pipeline) as keyword
|
||||||
|
arguments.
|
||||||
|
"""
|
||||||
|
|
||||||
response_class = AppResponse
|
response_class = AppResponse
|
||||||
"""The class to instantiate and return as the next app in the WSGI chain.
|
"""The class to instantiate and return as the next app in the WSGI chain.
|
||||||
|
@ -417,8 +425,8 @@ class CPWSGIApp(object):
|
||||||
def tail(self, environ, start_response):
|
def tail(self, environ, start_response):
|
||||||
"""WSGI application callable for the actual CherryPy application.
|
"""WSGI application callable for the actual CherryPy application.
|
||||||
|
|
||||||
You probably shouldn't call this; call self.__call__ instead,
|
You probably shouldn't call this; call self.__call__ instead, so
|
||||||
so that any WSGI middleware in self.pipeline can run first.
|
that any WSGI middleware in self.pipeline can run first.
|
||||||
"""
|
"""
|
||||||
return self.response_class(environ, start_response, self.cpapp)
|
return self.response_class(environ, start_response, self.cpapp)
|
||||||
|
|
||||||
|
|
|
@ -1,7 +1,7 @@
|
||||||
"""
|
"""WSGI server interface (see PEP 333).
|
||||||
WSGI server interface (see PEP 333).
|
|
||||||
|
|
||||||
This adds some CP-specific bits to the framework-agnostic cheroot package.
|
This adds some CP-specific bits to the framework-agnostic cheroot
|
||||||
|
package.
|
||||||
"""
|
"""
|
||||||
import sys
|
import sys
|
||||||
|
|
||||||
|
@ -35,10 +35,11 @@ class CPWSGIHTTPRequest(cheroot.server.HTTPRequest):
|
||||||
class CPWSGIServer(cheroot.wsgi.Server):
|
class CPWSGIServer(cheroot.wsgi.Server):
|
||||||
"""Wrapper for cheroot.wsgi.Server.
|
"""Wrapper for cheroot.wsgi.Server.
|
||||||
|
|
||||||
cheroot has been designed to not reference CherryPy in any way,
|
cheroot has been designed to not reference CherryPy in any way, so
|
||||||
so that it can be used in other frameworks and applications. Therefore,
|
that it can be used in other frameworks and applications. Therefore,
|
||||||
we wrap it here, so we can set our own mount points from cherrypy.tree
|
we wrap it here, so we can set our own mount points from
|
||||||
and apply some attributes from config -> cherrypy.server -> wsgi.Server.
|
cherrypy.tree and apply some attributes from config ->
|
||||||
|
cherrypy.server -> wsgi.Server.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
fmt = 'CherryPy/{cherrypy.__version__} {cheroot.wsgi.Server.version}'
|
fmt = 'CherryPy/{cherrypy.__version__} {cheroot.wsgi.Server.version}'
|
||||||
|
|
|
@ -137,7 +137,6 @@ def popargs(*args, **kwargs):
|
||||||
class Root:
|
class Root:
|
||||||
def index(self):
|
def index(self):
|
||||||
#...
|
#...
|
||||||
|
|
||||||
"""
|
"""
|
||||||
# Since keyword arg comes after *args, we have to process it ourselves
|
# Since keyword arg comes after *args, we have to process it ourselves
|
||||||
# for lower versions of python.
|
# for lower versions of python.
|
||||||
|
@ -201,16 +200,17 @@ def url(path='', qs='', script_name=None, base=None, relative=None):
|
||||||
If it does not start with a slash, this returns
|
If it does not start with a slash, this returns
|
||||||
(base + script_name [+ request.path_info] + path + qs).
|
(base + script_name [+ request.path_info] + path + qs).
|
||||||
|
|
||||||
If script_name is None, cherrypy.request will be used
|
If script_name is None, cherrypy.request will be used to find a
|
||||||
to find a script_name, if available.
|
script_name, if available.
|
||||||
|
|
||||||
If base is None, cherrypy.request.base will be used (if available).
|
If base is None, cherrypy.request.base will be used (if available).
|
||||||
Note that you can use cherrypy.tools.proxy to change this.
|
Note that you can use cherrypy.tools.proxy to change this.
|
||||||
|
|
||||||
Finally, note that this function can be used to obtain an absolute URL
|
Finally, note that this function can be used to obtain an absolute
|
||||||
for the current request path (minus the querystring) by passing no args.
|
URL for the current request path (minus the querystring) by passing
|
||||||
If you call url(qs=cherrypy.request.query_string), you should get the
|
no args. If you call url(qs=cherrypy.request.query_string), you
|
||||||
original browser URL (assuming no internal redirections).
|
should get the original browser URL (assuming no internal
|
||||||
|
redirections).
|
||||||
|
|
||||||
If relative is None or not provided, request.app.relative_urls will
|
If relative is None or not provided, request.app.relative_urls will
|
||||||
be used (if available, else False). If False, the output will be an
|
be used (if available, else False). If False, the output will be an
|
||||||
|
@ -320,8 +320,8 @@ def normalize_path(path):
|
||||||
class _ClassPropertyDescriptor(object):
|
class _ClassPropertyDescriptor(object):
|
||||||
"""Descript for read-only class-based property.
|
"""Descript for read-only class-based property.
|
||||||
|
|
||||||
Turns a classmethod-decorated func into a read-only property of that class
|
Turns a classmethod-decorated func into a read-only property of that
|
||||||
type (means the value cannot be set).
|
class type (means the value cannot be set).
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def __init__(self, fget, fset=None):
|
def __init__(self, fget, fset=None):
|
||||||
|
|
|
@ -1,5 +1,4 @@
|
||||||
"""
|
"""JSON support.
|
||||||
JSON support.
|
|
||||||
|
|
||||||
Expose preferred json module as json and provide encode/decode
|
Expose preferred json module as json and provide encode/decode
|
||||||
convenience functions.
|
convenience functions.
|
||||||
|
|
|
@ -6,8 +6,8 @@ def is_iterator(obj):
|
||||||
|
|
||||||
(i.e. like a generator).
|
(i.e. like a generator).
|
||||||
|
|
||||||
This will return False for objects which are iterable,
|
This will return False for objects which are iterable, but not
|
||||||
but not iterators themselves.
|
iterators themselves.
|
||||||
"""
|
"""
|
||||||
from types import GeneratorType
|
from types import GeneratorType
|
||||||
if isinstance(obj, GeneratorType):
|
if isinstance(obj, GeneratorType):
|
||||||
|
|
|
@ -18,7 +18,6 @@ as the credentials store::
|
||||||
'tools.auth_basic.accept_charset': 'UTF-8',
|
'tools.auth_basic.accept_charset': 'UTF-8',
|
||||||
}
|
}
|
||||||
app_config = { '/' : basic_auth }
|
app_config = { '/' : basic_auth }
|
||||||
|
|
||||||
"""
|
"""
|
||||||
|
|
||||||
import binascii
|
import binascii
|
||||||
|
|
|
@ -55,7 +55,7 @@ def TRACE(msg):
|
||||||
|
|
||||||
|
|
||||||
def get_ha1_dict_plain(user_password_dict):
|
def get_ha1_dict_plain(user_password_dict):
|
||||||
"""Returns a get_ha1 function which obtains a plaintext password from a
|
"""Return a get_ha1 function which obtains a plaintext password from a
|
||||||
dictionary of the form: {username : password}.
|
dictionary of the form: {username : password}.
|
||||||
|
|
||||||
If you want a simple dictionary-based authentication scheme, with plaintext
|
If you want a simple dictionary-based authentication scheme, with plaintext
|
||||||
|
@ -72,7 +72,7 @@ def get_ha1_dict_plain(user_password_dict):
|
||||||
|
|
||||||
|
|
||||||
def get_ha1_dict(user_ha1_dict):
|
def get_ha1_dict(user_ha1_dict):
|
||||||
"""Returns a get_ha1 function which obtains a HA1 password hash from a
|
"""Return a get_ha1 function which obtains a HA1 password hash from a
|
||||||
dictionary of the form: {username : HA1}.
|
dictionary of the form: {username : HA1}.
|
||||||
|
|
||||||
If you want a dictionary-based authentication scheme, but with
|
If you want a dictionary-based authentication scheme, but with
|
||||||
|
@ -87,7 +87,7 @@ def get_ha1_dict(user_ha1_dict):
|
||||||
|
|
||||||
|
|
||||||
def get_ha1_file_htdigest(filename):
|
def get_ha1_file_htdigest(filename):
|
||||||
"""Returns a get_ha1 function which obtains a HA1 password hash from a
|
"""Return a get_ha1 function which obtains a HA1 password hash from a
|
||||||
flat file with lines of the same format as that produced by the Apache
|
flat file with lines of the same format as that produced by the Apache
|
||||||
htdigest utility. For example, for realm 'wonderland', username 'alice',
|
htdigest utility. For example, for realm 'wonderland', username 'alice',
|
||||||
and password '4x5istwelve', the htdigest line would be::
|
and password '4x5istwelve', the htdigest line would be::
|
||||||
|
@ -135,7 +135,7 @@ def synthesize_nonce(s, key, timestamp=None):
|
||||||
|
|
||||||
|
|
||||||
def H(s):
|
def H(s):
|
||||||
"""The hash function H"""
|
"""The hash function H."""
|
||||||
return md5_hex(s)
|
return md5_hex(s)
|
||||||
|
|
||||||
|
|
||||||
|
@ -259,10 +259,11 @@ class HttpDigestAuthorization(object):
|
||||||
return False
|
return False
|
||||||
|
|
||||||
def is_nonce_stale(self, max_age_seconds=600):
|
def is_nonce_stale(self, max_age_seconds=600):
|
||||||
"""Returns True if a validated nonce is stale. The nonce contains a
|
"""Return True if a validated nonce is stale.
|
||||||
timestamp in plaintext and also a secure hash of the timestamp.
|
|
||||||
You should first validate the nonce to ensure the plaintext
|
The nonce contains a timestamp in plaintext and also a secure
|
||||||
timestamp is not spoofed.
|
hash of the timestamp. You should first validate the nonce to
|
||||||
|
ensure the plaintext timestamp is not spoofed.
|
||||||
"""
|
"""
|
||||||
try:
|
try:
|
||||||
timestamp, hashpart = self.nonce.split(':', 1)
|
timestamp, hashpart = self.nonce.split(':', 1)
|
||||||
|
@ -275,7 +276,10 @@ class HttpDigestAuthorization(object):
|
||||||
return True
|
return True
|
||||||
|
|
||||||
def HA2(self, entity_body=''):
|
def HA2(self, entity_body=''):
|
||||||
"""Returns the H(A2) string. See :rfc:`2617` section 3.2.2.3."""
|
"""Return the H(A2) string.
|
||||||
|
|
||||||
|
See :rfc:`2617` section 3.2.2.3.
|
||||||
|
"""
|
||||||
# RFC 2617 3.2.2.3
|
# RFC 2617 3.2.2.3
|
||||||
# If the "qop" directive's value is "auth" or is unspecified,
|
# If the "qop" directive's value is "auth" or is unspecified,
|
||||||
# then A2 is:
|
# then A2 is:
|
||||||
|
@ -306,7 +310,6 @@ class HttpDigestAuthorization(object):
|
||||||
4.3. This refers to the entity the user agent sent in the
|
4.3. This refers to the entity the user agent sent in the
|
||||||
request which has the Authorization header. Typically GET
|
request which has the Authorization header. Typically GET
|
||||||
requests don't have an entity, and POST requests do.
|
requests don't have an entity, and POST requests do.
|
||||||
|
|
||||||
"""
|
"""
|
||||||
ha2 = self.HA2(entity_body)
|
ha2 = self.HA2(entity_body)
|
||||||
# Request-Digest -- RFC 2617 3.2.2.1
|
# Request-Digest -- RFC 2617 3.2.2.1
|
||||||
|
@ -395,7 +398,6 @@ def digest_auth(realm, get_ha1, key, debug=False, accept_charset='utf-8'):
|
||||||
key
|
key
|
||||||
A secret string known only to the server, used in the synthesis
|
A secret string known only to the server, used in the synthesis
|
||||||
of nonces.
|
of nonces.
|
||||||
|
|
||||||
"""
|
"""
|
||||||
request = cherrypy.serving.request
|
request = cherrypy.serving.request
|
||||||
|
|
||||||
|
@ -447,9 +449,7 @@ def digest_auth(realm, get_ha1, key, debug=False, accept_charset='utf-8'):
|
||||||
|
|
||||||
|
|
||||||
def _respond_401(realm, key, accept_charset, debug, **kwargs):
|
def _respond_401(realm, key, accept_charset, debug, **kwargs):
|
||||||
"""
|
"""Respond with 401 status and a WWW-Authenticate header."""
|
||||||
Respond with 401 status and a WWW-Authenticate header
|
|
||||||
"""
|
|
||||||
header = www_authenticate(
|
header = www_authenticate(
|
||||||
realm, key,
|
realm, key,
|
||||||
accept_charset=accept_charset,
|
accept_charset=accept_charset,
|
||||||
|
|
|
@ -42,7 +42,6 @@ from cherrypy.lib import cptools, httputil
|
||||||
|
|
||||||
|
|
||||||
class Cache(object):
|
class Cache(object):
|
||||||
|
|
||||||
"""Base class for Cache implementations."""
|
"""Base class for Cache implementations."""
|
||||||
|
|
||||||
def get(self):
|
def get(self):
|
||||||
|
@ -64,17 +63,16 @@ class Cache(object):
|
||||||
|
|
||||||
# ------------------------------ Memory Cache ------------------------------- #
|
# ------------------------------ Memory Cache ------------------------------- #
|
||||||
class AntiStampedeCache(dict):
|
class AntiStampedeCache(dict):
|
||||||
|
|
||||||
"""A storage system for cached items which reduces stampede collisions."""
|
"""A storage system for cached items which reduces stampede collisions."""
|
||||||
|
|
||||||
def wait(self, key, timeout=5, debug=False):
|
def wait(self, key, timeout=5, debug=False):
|
||||||
"""Return the cached value for the given key, or None.
|
"""Return the cached value for the given key, or None.
|
||||||
|
|
||||||
If timeout is not None, and the value is already
|
If timeout is not None, and the value is already being
|
||||||
being calculated by another thread, wait until the given timeout has
|
calculated by another thread, wait until the given timeout has
|
||||||
elapsed. If the value is available before the timeout expires, it is
|
elapsed. If the value is available before the timeout expires,
|
||||||
returned. If not, None is returned, and a sentinel placed in the cache
|
it is returned. If not, None is returned, and a sentinel placed
|
||||||
to signal other threads to wait.
|
in the cache to signal other threads to wait.
|
||||||
|
|
||||||
If timeout is None, no waiting is performed nor sentinels used.
|
If timeout is None, no waiting is performed nor sentinels used.
|
||||||
"""
|
"""
|
||||||
|
@ -127,7 +125,6 @@ class AntiStampedeCache(dict):
|
||||||
|
|
||||||
|
|
||||||
class MemoryCache(Cache):
|
class MemoryCache(Cache):
|
||||||
|
|
||||||
"""An in-memory cache for varying response content.
|
"""An in-memory cache for varying response content.
|
||||||
|
|
||||||
Each key in self.store is a URI, and each value is an AntiStampedeCache.
|
Each key in self.store is a URI, and each value is an AntiStampedeCache.
|
||||||
|
@ -381,7 +378,10 @@ def get(invalid_methods=('POST', 'PUT', 'DELETE'), debug=False, **kwargs):
|
||||||
|
|
||||||
|
|
||||||
def tee_output():
|
def tee_output():
|
||||||
"""Tee response output to cache storage. Internal."""
|
"""Tee response output to cache storage.
|
||||||
|
|
||||||
|
Internal.
|
||||||
|
"""
|
||||||
# Used by CachingTool by attaching to request.hooks
|
# Used by CachingTool by attaching to request.hooks
|
||||||
|
|
||||||
request = cherrypy.serving.request
|
request = cherrypy.serving.request
|
||||||
|
@ -441,7 +441,6 @@ def expires(secs=0, force=False, debug=False):
|
||||||
* Expires
|
* Expires
|
||||||
|
|
||||||
If any are already present, none of the above response headers are set.
|
If any are already present, none of the above response headers are set.
|
||||||
|
|
||||||
"""
|
"""
|
||||||
|
|
||||||
response = cherrypy.serving.response
|
response = cherrypy.serving.response
|
||||||
|
|
|
@ -22,7 +22,7 @@ it will call ``serve()`` for you.
|
||||||
|
|
||||||
import re
|
import re
|
||||||
import sys
|
import sys
|
||||||
import cgi
|
import html
|
||||||
import os
|
import os
|
||||||
import os.path
|
import os.path
|
||||||
import urllib.parse
|
import urllib.parse
|
||||||
|
@ -352,9 +352,9 @@ class CoverStats(object):
|
||||||
buffer.append((lineno, line))
|
buffer.append((lineno, line))
|
||||||
if empty_the_buffer:
|
if empty_the_buffer:
|
||||||
for lno, pastline in buffer:
|
for lno, pastline in buffer:
|
||||||
yield template % (lno, cgi.escape(pastline))
|
yield template % (lno, html.escape(pastline))
|
||||||
buffer = []
|
buffer = []
|
||||||
yield template % (lineno, cgi.escape(line))
|
yield template % (lineno, html.escape(line))
|
||||||
|
|
||||||
@cherrypy.expose
|
@cherrypy.expose
|
||||||
def report(self, name):
|
def report(self, name):
|
||||||
|
|
|
@ -184,7 +184,6 @@ To report statistics::
|
||||||
To format statistics reports::
|
To format statistics reports::
|
||||||
|
|
||||||
See 'Reporting', above.
|
See 'Reporting', above.
|
||||||
|
|
||||||
"""
|
"""
|
||||||
|
|
||||||
import logging
|
import logging
|
||||||
|
@ -254,7 +253,6 @@ def proc_time(s):
|
||||||
|
|
||||||
|
|
||||||
class ByteCountWrapper(object):
|
class ByteCountWrapper(object):
|
||||||
|
|
||||||
"""Wraps a file-like object, counting the number of bytes read."""
|
"""Wraps a file-like object, counting the number of bytes read."""
|
||||||
|
|
||||||
def __init__(self, rfile):
|
def __init__(self, rfile):
|
||||||
|
@ -307,7 +305,6 @@ def _get_threading_ident():
|
||||||
|
|
||||||
|
|
||||||
class StatsTool(cherrypy.Tool):
|
class StatsTool(cherrypy.Tool):
|
||||||
|
|
||||||
"""Record various information about the current request."""
|
"""Record various information about the current request."""
|
||||||
|
|
||||||
def __init__(self):
|
def __init__(self):
|
||||||
|
@ -316,8 +313,8 @@ class StatsTool(cherrypy.Tool):
|
||||||
def _setup(self):
|
def _setup(self):
|
||||||
"""Hook this tool into cherrypy.request.
|
"""Hook this tool into cherrypy.request.
|
||||||
|
|
||||||
The standard CherryPy request object will automatically call this
|
The standard CherryPy request object will automatically call
|
||||||
method when the tool is "turned on" in config.
|
this method when the tool is "turned on" in config.
|
||||||
"""
|
"""
|
||||||
if appstats.get('Enabled', False):
|
if appstats.get('Enabled', False):
|
||||||
cherrypy.Tool._setup(self)
|
cherrypy.Tool._setup(self)
|
||||||
|
|
|
@ -94,8 +94,8 @@ def validate_etags(autotags=False, debug=False):
|
||||||
def validate_since():
|
def validate_since():
|
||||||
"""Validate the current Last-Modified against If-Modified-Since headers.
|
"""Validate the current Last-Modified against If-Modified-Since headers.
|
||||||
|
|
||||||
If no code has set the Last-Modified response header, then no validation
|
If no code has set the Last-Modified response header, then no
|
||||||
will be performed.
|
validation will be performed.
|
||||||
"""
|
"""
|
||||||
response = cherrypy.serving.response
|
response = cherrypy.serving.response
|
||||||
lastmod = response.headers.get('Last-Modified')
|
lastmod = response.headers.get('Last-Modified')
|
||||||
|
@ -123,9 +123,9 @@ def validate_since():
|
||||||
def allow(methods=None, debug=False):
|
def allow(methods=None, debug=False):
|
||||||
"""Raise 405 if request.method not in methods (default ['GET', 'HEAD']).
|
"""Raise 405 if request.method not in methods (default ['GET', 'HEAD']).
|
||||||
|
|
||||||
The given methods are case-insensitive, and may be in any order.
|
The given methods are case-insensitive, and may be in any order. If
|
||||||
If only one method is allowed, you may supply a single string;
|
only one method is allowed, you may supply a single string; if more
|
||||||
if more than one, supply a list of strings.
|
than one, supply a list of strings.
|
||||||
|
|
||||||
Regardless of whether the current method is allowed or not, this
|
Regardless of whether the current method is allowed or not, this
|
||||||
also emits an 'Allow' response header, containing the given methods.
|
also emits an 'Allow' response header, containing the given methods.
|
||||||
|
@ -154,22 +154,23 @@ def proxy(base=None, local='X-Forwarded-Host', remote='X-Forwarded-For',
|
||||||
scheme='X-Forwarded-Proto', debug=False):
|
scheme='X-Forwarded-Proto', debug=False):
|
||||||
"""Change the base URL (scheme://host[:port][/path]).
|
"""Change the base URL (scheme://host[:port][/path]).
|
||||||
|
|
||||||
For running a CP server behind Apache, lighttpd, or other HTTP server.
|
For running a CP server behind Apache, lighttpd, or other HTTP
|
||||||
|
server.
|
||||||
|
|
||||||
For Apache and lighttpd, you should leave the 'local' argument at the
|
For Apache and lighttpd, you should leave the 'local' argument at
|
||||||
default value of 'X-Forwarded-Host'. For Squid, you probably want to set
|
the default value of 'X-Forwarded-Host'. For Squid, you probably
|
||||||
tools.proxy.local = 'Origin'.
|
want to set tools.proxy.local = 'Origin'.
|
||||||
|
|
||||||
If you want the new request.base to include path info (not just the host),
|
If you want the new request.base to include path info (not just the
|
||||||
you must explicitly set base to the full base path, and ALSO set 'local'
|
host), you must explicitly set base to the full base path, and ALSO
|
||||||
to '', so that the X-Forwarded-Host request header (which never includes
|
set 'local' to '', so that the X-Forwarded-Host request header
|
||||||
path info) does not override it. Regardless, the value for 'base' MUST
|
(which never includes path info) does not override it. Regardless,
|
||||||
NOT end in a slash.
|
the value for 'base' MUST NOT end in a slash.
|
||||||
|
|
||||||
cherrypy.request.remote.ip (the IP address of the client) will be
|
cherrypy.request.remote.ip (the IP address of the client) will be
|
||||||
rewritten if the header specified by the 'remote' arg is valid.
|
rewritten if the header specified by the 'remote' arg is valid. By
|
||||||
By default, 'remote' is set to 'X-Forwarded-For'. If you do not
|
default, 'remote' is set to 'X-Forwarded-For'. If you do not want to
|
||||||
want to rewrite remote.ip, set the 'remote' arg to an empty string.
|
rewrite remote.ip, set the 'remote' arg to an empty string.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
request = cherrypy.serving.request
|
request = cherrypy.serving.request
|
||||||
|
@ -217,8 +218,8 @@ def proxy(base=None, local='X-Forwarded-Host', remote='X-Forwarded-For',
|
||||||
def ignore_headers(headers=('Range',), debug=False):
|
def ignore_headers(headers=('Range',), debug=False):
|
||||||
"""Delete request headers whose field names are included in 'headers'.
|
"""Delete request headers whose field names are included in 'headers'.
|
||||||
|
|
||||||
This is a useful tool for working behind certain HTTP servers;
|
This is a useful tool for working behind certain HTTP servers; for
|
||||||
for example, Apache duplicates the work that CP does for 'Range'
|
example, Apache duplicates the work that CP does for 'Range'
|
||||||
headers, and will doubly-truncate the response.
|
headers, and will doubly-truncate the response.
|
||||||
"""
|
"""
|
||||||
request = cherrypy.serving.request
|
request = cherrypy.serving.request
|
||||||
|
@ -281,7 +282,6 @@ def referer(pattern, accept=True, accept_missing=False, error=403,
|
||||||
|
|
||||||
|
|
||||||
class SessionAuth(object):
|
class SessionAuth(object):
|
||||||
|
|
||||||
"""Assert that the user is logged in."""
|
"""Assert that the user is logged in."""
|
||||||
|
|
||||||
session_key = 'username'
|
session_key = 'username'
|
||||||
|
@ -319,7 +319,10 @@ Message: %(error_msg)s
|
||||||
</body></html>""") % vars()).encode('utf-8')
|
</body></html>""") % vars()).encode('utf-8')
|
||||||
|
|
||||||
def do_login(self, username, password, from_page='..', **kwargs):
|
def do_login(self, username, password, from_page='..', **kwargs):
|
||||||
"""Login. May raise redirect, or return True if request handled."""
|
"""Login.
|
||||||
|
|
||||||
|
May raise redirect, or return True if request handled.
|
||||||
|
"""
|
||||||
response = cherrypy.serving.response
|
response = cherrypy.serving.response
|
||||||
error_msg = self.check_username_and_password(username, password)
|
error_msg = self.check_username_and_password(username, password)
|
||||||
if error_msg:
|
if error_msg:
|
||||||
|
@ -336,7 +339,10 @@ Message: %(error_msg)s
|
||||||
raise cherrypy.HTTPRedirect(from_page or '/')
|
raise cherrypy.HTTPRedirect(from_page or '/')
|
||||||
|
|
||||||
def do_logout(self, from_page='..', **kwargs):
|
def do_logout(self, from_page='..', **kwargs):
|
||||||
"""Logout. May raise redirect, or return True if request handled."""
|
"""Logout.
|
||||||
|
|
||||||
|
May raise redirect, or return True if request handled.
|
||||||
|
"""
|
||||||
sess = cherrypy.session
|
sess = cherrypy.session
|
||||||
username = sess.get(self.session_key)
|
username = sess.get(self.session_key)
|
||||||
sess[self.session_key] = None
|
sess[self.session_key] = None
|
||||||
|
@ -346,7 +352,9 @@ Message: %(error_msg)s
|
||||||
raise cherrypy.HTTPRedirect(from_page)
|
raise cherrypy.HTTPRedirect(from_page)
|
||||||
|
|
||||||
def do_check(self):
|
def do_check(self):
|
||||||
"""Assert username. Raise redirect, or return True if request handled.
|
"""Assert username.
|
||||||
|
|
||||||
|
Raise redirect, or return True if request handled.
|
||||||
"""
|
"""
|
||||||
sess = cherrypy.session
|
sess = cherrypy.session
|
||||||
request = cherrypy.serving.request
|
request = cherrypy.serving.request
|
||||||
|
@ -408,8 +416,7 @@ def session_auth(**kwargs):
|
||||||
|
|
||||||
Any attribute of the SessionAuth class may be overridden
|
Any attribute of the SessionAuth class may be overridden
|
||||||
via a keyword arg to this function:
|
via a keyword arg to this function:
|
||||||
|
""" + '\n' + '\n '.join(
|
||||||
""" + '\n '.join(
|
|
||||||
'{!s}: {!s}'.format(k, type(getattr(SessionAuth, k)).__name__)
|
'{!s}: {!s}'.format(k, type(getattr(SessionAuth, k)).__name__)
|
||||||
for k in dir(SessionAuth)
|
for k in dir(SessionAuth)
|
||||||
if not k.startswith('__')
|
if not k.startswith('__')
|
||||||
|
@ -490,8 +497,8 @@ def trailing_slash(missing=True, extra=False, status=None, debug=False):
|
||||||
def flatten(debug=False):
|
def flatten(debug=False):
|
||||||
"""Wrap response.body in a generator that recursively iterates over body.
|
"""Wrap response.body in a generator that recursively iterates over body.
|
||||||
|
|
||||||
This allows cherrypy.response.body to consist of 'nested generators';
|
This allows cherrypy.response.body to consist of 'nested
|
||||||
that is, a set of generators that yield generators.
|
generators'; that is, a set of generators that yield generators.
|
||||||
"""
|
"""
|
||||||
def flattener(input):
|
def flattener(input):
|
||||||
numchunks = 0
|
numchunks = 0
|
||||||
|
|
|
@ -261,9 +261,7 @@ class ResponseEncoder:
|
||||||
|
|
||||||
|
|
||||||
def prepare_iter(value):
|
def prepare_iter(value):
|
||||||
"""
|
"""Ensure response body is iterable and resolves to False when empty."""
|
||||||
Ensure response body is iterable and resolves to False when empty.
|
|
||||||
"""
|
|
||||||
if isinstance(value, text_or_bytes):
|
if isinstance(value, text_or_bytes):
|
||||||
# strings get wrapped in a list because iterating over a single
|
# strings get wrapped in a list because iterating over a single
|
||||||
# item list is much faster than iterating over every character
|
# item list is much faster than iterating over every character
|
||||||
|
@ -360,7 +358,6 @@ def gzip(compress_level=5, mime_types=['text/html', 'text/plain'],
|
||||||
* No 'gzip' or 'x-gzip' is present in the Accept-Encoding header
|
* No 'gzip' or 'x-gzip' is present in the Accept-Encoding header
|
||||||
* No 'gzip' or 'x-gzip' with a qvalue > 0 is present
|
* No 'gzip' or 'x-gzip' with a qvalue > 0 is present
|
||||||
* The 'identity' value is given with a qvalue > 0.
|
* The 'identity' value is given with a qvalue > 0.
|
||||||
|
|
||||||
"""
|
"""
|
||||||
request = cherrypy.serving.request
|
request = cherrypy.serving.request
|
||||||
response = cherrypy.serving.response
|
response = cherrypy.serving.response
|
||||||
|
|
Some files were not shown because too many files have changed in this diff Show more
Loading…
Add table
Add a link
Reference in a new issue