mirror of
https://github.com/Tautulli/Tautulli.git
synced 2025-07-06 05:01:14 -07:00
Compare commits
127 commits
v2.14.1-be
...
master
Author | SHA1 | Date | |
---|---|---|---|
|
76f6a2da6b | ||
|
d2a14ea6c0 | ||
|
e6c0a12dd5 | ||
|
24dd403a72 | ||
|
a876e006d6 | ||
|
74786f0ed1 | ||
|
99e575383c | ||
|
3e784c7495 | ||
|
68dc095c83 | ||
|
ad2ec0e2bf | ||
|
09c28e434d | ||
|
cfc7b817b3 | ||
|
b3aa29c677 | ||
|
e4d181ba5b | ||
|
53e5f89725 | ||
|
0879b848b9 | ||
|
c70381c3ff | ||
|
f23d3eb81c | ||
|
2ed603f288 | ||
|
a96fd23d72 | ||
|
65dc466c07 | ||
|
0a4730625c | ||
|
67fa4ca645 | ||
|
078c293bd7 | ||
|
85e9237608 | ||
|
f9b3631745 | ||
|
8f03e27617 | ||
|
63fe386057 | ||
|
b7c4f2eefe | ||
|
37ef098718 | ||
|
78864d7a97 | ||
|
62a05712f8 | ||
|
ca0e1c321d | ||
|
b9cb7102c4 | ||
|
6e6fe1fb65 | ||
|
9c473c6528 | ||
|
5c38de0dfb | ||
|
ea66f6713b | ||
|
dd9a35df51 | ||
|
feca713b76 | ||
|
0836fb902c | ||
|
eb2c372d82 | ||
|
be2e63e7e0 | ||
|
2fe3f039cc | ||
|
baf926e5db | ||
|
85b63fb61a | ||
|
afc29604cc | ||
|
5b47cebdc7 | ||
|
d9f38f9390 | ||
|
86d775a586 | ||
|
ddb4f6131b | ||
|
599e52de6a | ||
|
84be60cb36 | ||
|
d9a87f9726 | ||
|
9289ead996 | ||
|
af752e0acc | ||
|
86abd130b0 | ||
|
fc2c7cc871 | ||
|
025e8bcf58 | ||
|
bf07912711 | ||
|
48b1c7b522 | ||
|
e69852fa0e | ||
|
01589cb8b0 | ||
|
f3a2c02e96 | ||
|
d3f7eef84f | ||
|
2f3d24a0e7 | ||
|
2d3271376b | ||
|
940c2ae6cd | ||
|
1cdfd5f30a | ||
|
e3f4851883 | ||
|
1353247b55 | ||
|
3cf6560de3 | ||
|
9ca8d59372 | ||
|
921a3a0af9 | ||
|
3bb53f480e | ||
|
6979a4025f | ||
|
cc1a325eac | ||
|
de697cb2ca | ||
|
596cf57d61 | ||
|
ac32297160 | ||
|
330b8a3a82 | ||
|
5cf39cb097 | ||
|
14c9c7a393 | ||
|
cf8fb2e65d | ||
|
623a9f2919 | ||
|
3fb46a9ab7 | ||
|
cfd81684b7 | ||
|
fb4f0046f3 | ||
|
7d4efac75d | ||
|
509d18801b | ||
|
da501df846 | ||
|
43cb027592 | ||
|
2e6f541ec2 | ||
|
822d5a452c | ||
|
7696d031d3 | ||
|
50ced86ba5 | ||
|
e934d09eff | ||
|
96c5cb216c | ||
|
2ee2ab652c | ||
|
193b82c54a | ||
|
7d00383d1c | ||
|
6f84ce8048 | ||
|
709db66b10 | ||
|
2f1607b96b | ||
|
28ad2716ba | ||
|
f1a8164b94 | ||
|
3bc94cad6c | ||
|
a528f052b9 | ||
|
5e977c044a | ||
|
afa25d45f6 | ||
|
43e71d836a | ||
|
55573d26ea | ||
|
f1d44c051d | ||
|
a3af8ed362 | ||
|
912fd75a2f | ||
|
5778672dab | ||
|
dcdf5a2992 | ||
|
73cfa8e0c0 | ||
|
795d568df2 | ||
|
8396a04ce8 | ||
|
8419eee4b2 | ||
|
c505e26656 | ||
|
37ffe68ce2 | ||
|
dc9e778111 | ||
|
68bf1c70f7 | ||
|
ee0b4c0602 | ||
|
5c115dec68 |
453 changed files with 23301 additions and 19776 deletions
13
.github/workflows/publish-docker.yml
vendored
13
.github/workflows/publish-docker.yml
vendored
|
@ -33,7 +33,6 @@ jobs:
|
|||
echo "branch=${GITHUB_REF#refs/heads/}" >> $GITHUB_OUTPUT
|
||||
fi
|
||||
echo "commit=${GITHUB_SHA}" >> $GITHUB_OUTPUT
|
||||
echo "build_date=$(date -u +'%Y-%m-%dT%H:%M:%SZ')" >> $GITHUB_OUTPUT
|
||||
echo "docker_platforms=linux/amd64,linux/arm64/v8,linux/arm/v7,linux/arm/v6" >> $GITHUB_OUTPUT
|
||||
echo "docker_image=${{ secrets.DOCKER_REPO }}/tautulli" >> $GITHUB_OUTPUT
|
||||
|
||||
|
@ -59,7 +58,7 @@ jobs:
|
|||
if: success()
|
||||
with:
|
||||
username: ${{ secrets.DOCKER_USERNAME }}
|
||||
password: ${{ secrets.DOCKER_PASSWORD }}
|
||||
password: ${{ secrets.DOCKER_TOKEN }}
|
||||
|
||||
- name: Login to GitHub Container Registry
|
||||
uses: docker/login-action@v3
|
||||
|
@ -69,8 +68,14 @@ jobs:
|
|||
username: ${{ secrets.DOCKER_USERNAME }}
|
||||
password: ${{ secrets.GHCR_TOKEN }}
|
||||
|
||||
- name: Extract Docker Metadata
|
||||
id: metadata
|
||||
uses: docker/metadata-action@v5
|
||||
with:
|
||||
images: ${{ steps.prepare.outputs.docker_image }}
|
||||
|
||||
- name: Docker Build and Push
|
||||
uses: docker/build-push-action@v5
|
||||
uses: docker/build-push-action@v6
|
||||
if: success()
|
||||
with:
|
||||
context: .
|
||||
|
@ -81,10 +86,10 @@ jobs:
|
|||
TAG=${{ steps.prepare.outputs.tag }}
|
||||
BRANCH=${{ steps.prepare.outputs.branch }}
|
||||
COMMIT=${{ steps.prepare.outputs.commit }}
|
||||
BUILD_DATE=${{ steps.prepare.outputs.build_date }}
|
||||
tags: |
|
||||
${{ steps.prepare.outputs.docker_image }}:${{ steps.prepare.outputs.tag }}
|
||||
ghcr.io/${{ steps.prepare.outputs.docker_image }}:${{ steps.prepare.outputs.tag }}
|
||||
labels: ${{ steps.metadata.outputs.labels }}
|
||||
cache-from: type=local,src=/tmp/.buildx-cache
|
||||
cache-to: type=local,dest=/tmp/.buildx-cache
|
||||
|
||||
|
|
20
.github/workflows/publish-installers.yml
vendored
20
.github/workflows/publish-installers.yml
vendored
|
@ -100,6 +100,24 @@ jobs:
|
|||
name: Tautulli-${{ matrix.os }}-installer
|
||||
path: Tautulli-${{ matrix.os }}-${{ steps.get_version.outputs.RELEASE_VERSION }}-${{ matrix.arch }}.${{ matrix.ext }}
|
||||
|
||||
virus-total:
|
||||
name: VirusTotal Scan
|
||||
needs: build-installer
|
||||
if: needs.build-installer.result == 'success' && !contains(github.event.head_commit.message, '[skip ci]')
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Download Installers
|
||||
if: needs.build-installer.result == 'success'
|
||||
uses: actions/download-artifact@v4
|
||||
|
||||
- name: Upload to VirusTotal
|
||||
uses: crazy-max/ghaction-virustotal@v4
|
||||
with:
|
||||
vt_api_key: ${{ secrets.VT_API_KEY }}
|
||||
files: |
|
||||
Tautulli-windows-installer/Tautulli-windows-*-x64.exe
|
||||
Tautulli-macos-installer/Tautulli-macos-*-universal.pkg
|
||||
|
||||
release:
|
||||
name: Release Installers
|
||||
needs: build-installer
|
||||
|
@ -143,7 +161,7 @@ jobs:
|
|||
prerelease: ${{ endsWith(steps.get_version.outputs.RELEASE_VERSION, '-beta') }}
|
||||
files: |
|
||||
Tautulli-windows-installer/Tautulli-windows-${{ steps.get_version.outputs.RELEASE_VERSION }}-x64.exe
|
||||
Tautulli-macos-installer/Tautulli-macos-${{ steps.get_version.outputs.RELEASE_VERSION }}-x64.pkg
|
||||
Tautulli-macos-installer/Tautulli-macos-${{ steps.get_version.outputs.RELEASE_VERSION }}-universal.pkg
|
||||
|
||||
discord:
|
||||
name: Discord Notification
|
||||
|
|
4
.github/workflows/publish-snap.yml
vendored
4
.github/workflows/publish-snap.yml
vendored
|
@ -38,7 +38,7 @@ jobs:
|
|||
uses: docker/setup-qemu-action@v3
|
||||
|
||||
- name: Build Snap Package
|
||||
uses: diddlesnaps/snapcraft-multiarch-action@v1
|
||||
uses: diddlesnaps/snapcraft-multiarch-action@master
|
||||
id: build
|
||||
with:
|
||||
architecture: ${{ matrix.architecture }}
|
||||
|
@ -50,7 +50,7 @@ jobs:
|
|||
path: ${{ steps.build.outputs.snap }}
|
||||
|
||||
- name: Review Snap Package
|
||||
uses: diddlesnaps/snapcraft-review-tools-action@v1
|
||||
uses: diddlesnaps/snapcraft-review-tools-action@master
|
||||
with:
|
||||
snap: ${{ steps.build.outputs.snap }}
|
||||
|
||||
|
|
14
.github/workflows/submit-winget.yml
vendored
14
.github/workflows/submit-winget.yml
vendored
|
@ -23,3 +23,17 @@ jobs:
|
|||
# getting latest wingetcreate file
|
||||
iwr https://aka.ms/wingetcreate/latest -OutFile wingetcreate.exe
|
||||
.\wingetcreate.exe update $wingetPackage -s -v $version -u $installerUrl -t $gitToken
|
||||
|
||||
virus-total:
|
||||
name: VirusTotal Scan
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Upload to VirusTotal
|
||||
uses: crazy-max/ghaction-virustotal@v4
|
||||
with:
|
||||
vt_api_key: ${{ secrets.VT_API_KEY }}
|
||||
github_token: ${{ secrets.GHACTIONS_TOKEN }}
|
||||
update_release_body: true
|
||||
files: |
|
||||
.exe$
|
||||
.pkg$
|
||||
|
|
107
CHANGELOG.md
107
CHANGELOG.md
|
@ -1,14 +1,110 @@
|
|||
# Changelog
|
||||
|
||||
## v2.14.1-beta (2024-05-11)
|
||||
## v2.15.2 (2025-04-12)
|
||||
|
||||
* Activity:
|
||||
* New: Added link to library by clicking media type icon.
|
||||
* New: Added stream count to tab title on homepage. (#2517)
|
||||
* History:
|
||||
* Fix: Check stream watched status before stream stopped status. (#2506)
|
||||
* Notifications:
|
||||
* Fix: ntfy notifications failing to send if provider link is blank.
|
||||
* Fix: Check Pushover notification attachment is under 5MB limit. (#2396)
|
||||
* Fix: Track URLs redirecting to the correct media page. (#2513)
|
||||
* New: Added audio profile notification parameters.
|
||||
* New: Added PATCH method for Webhook notifications.
|
||||
* Graphs:
|
||||
* New: Added Total line to daily streams graph. (Thanks @zdimension) (#2497)
|
||||
* UI:
|
||||
* Fix: Users without access to Plex server not showing as inactive.
|
||||
* Fix: Do not redirect API requests to the login page. (#2490)
|
||||
* Change: Swap source and stream columns in stream info modal.
|
||||
* Other:
|
||||
* Removed: Python 2 compatibility code. (#2098, #2226) (Thanks @zdimension)
|
||||
* Fix: Various typos. (Thanks @luzpaz) (#2520)
|
||||
* Fix: CherryPy CORS response header not being set correctly. (#2279)
|
||||
|
||||
|
||||
## v2.14.0-beta (2024-04-19)
|
||||
## v2.15.1 (2025-01-11)
|
||||
|
||||
* Activity:
|
||||
* Fix: Detection of HDR transcodes. (Thanks @cdecker08) (#2412, #2466)
|
||||
* Newsletters:
|
||||
* Fix: Disable basic authentication for /newsletter and /image endpoints. (#2472)
|
||||
* Exporter:
|
||||
* New: Added logos to season and episode exports.
|
||||
* Other:
|
||||
* Fix: Docker container https health check.
|
||||
|
||||
|
||||
## v2.15.0 (2024-11-24)
|
||||
|
||||
* Notes:
|
||||
* Support for Python 3.8 has been dropped. The minimum Python version is now 3.9.
|
||||
* Notifications:
|
||||
* New: Allow Telegram blockquote and tg-emoji HTML tags. (Thanks @MythodeaLoL) (#2427)
|
||||
* New: Added Plex slug and Plex Watch URL notification parameters. (#2420)
|
||||
* Change: Update OneSignal API calls to use the new API endpoint for Tautulli Remote App notifications.
|
||||
* Newsletters:
|
||||
* Fix: Dumping custom dates in raw newsletter json.
|
||||
* History:
|
||||
* Fix: Unable to fix match for artists. (#2429)
|
||||
* Exporter:
|
||||
* New: Added movie and episode hasVoiceActivity attribute to exporter fields.
|
||||
* New: Added subtitle canAutoSync attribute to exporter fields.
|
||||
* New: Added logos to the exporter fields.
|
||||
* UI:
|
||||
* New: Add friendly name to the top bar of config modals. (Thanks @peagravel) (#2432)
|
||||
* API:
|
||||
* New: Added plex slugs to metadata in the get_metadata API command.
|
||||
* Other:
|
||||
* Fix: Tautulli failing to start with Python 3.13. (#2426)
|
||||
|
||||
|
||||
## v2.14.6 (2024-10-12)
|
||||
|
||||
* Newsletters:
|
||||
* Fix: Allow formatting newsletter date parameters.
|
||||
* Change: Support apscheduler compatible cron expressions.
|
||||
* UI:
|
||||
* Fix: Round runtime before converting to human duration.
|
||||
* Fix: Make recently added/watched rows touch scrollable.
|
||||
* Other:
|
||||
* Fix: Auto-updater not running.
|
||||
|
||||
|
||||
## v2.14.5 (2024-09-20)
|
||||
|
||||
* Activity:
|
||||
* Fix: Display of 2k resolution on activity card.
|
||||
* Notifications:
|
||||
* Fix: ntfy notifications with special characters failing to send.
|
||||
* Other:
|
||||
* Fix: Memory leak with database closing. (#2404)
|
||||
|
||||
|
||||
## v2.14.4 (2024-08-10)
|
||||
|
||||
* Notifications:
|
||||
* Fix: Update Slack notification info card.
|
||||
* New: Added ntfy notification agent. (Thanks @nwithan8) (#2356, #2000)
|
||||
* UI:
|
||||
* Fix: macOS platform capitalization.
|
||||
* Other:
|
||||
* Fix: Remove deprecated getdefaultlocale. (Thanks @teodorstelian) (#2364, #2345)
|
||||
|
||||
|
||||
## v2.14.3 (2024-06-19)
|
||||
|
||||
* Graphs:
|
||||
* Fix: History table not loading when clicking on the graphs in some instances.
|
||||
* UI:
|
||||
* Fix: Scheduled tasks table not loading when certain tasks are disabled.
|
||||
* Removed: Unnecessary Remote Server checkbox from the settings page.
|
||||
* Other:
|
||||
* Fix: Webserver not restarting after the setup wizard.
|
||||
* Fix: Workaround webserver crashing in some instances.
|
||||
|
||||
|
||||
## v2.14.2 (2024-05-18)
|
||||
|
||||
* History:
|
||||
* Fix: Live TV activity not logging to history.
|
||||
|
@ -27,9 +123,11 @@
|
|||
* New: Added slug attribute to exporter fields.
|
||||
* New: Added track genres to exporter fields.
|
||||
* New: Added playlist source URI to exporter fields.
|
||||
* New: Added artProvider and thumbProvider to exporter fields.
|
||||
* UI:
|
||||
* Fix: Mask deleted usernames in the logs.
|
||||
* Fix: Live TV watch stats not showing on the media info page.
|
||||
* Fix: Users without access to Plex server not showing as inactive.
|
||||
* Removed: Deprecated synced item pages.
|
||||
* Removed: Anonymous redirect settings. Links now use browser no-referrer policy instead.
|
||||
* API:
|
||||
|
@ -41,6 +139,7 @@
|
|||
* Other:
|
||||
* Change: Login cookie expires changed to max-age.
|
||||
* Change: Improved key generation for login password. It is recommended to reenter your HTTP Password in the settings after upgrading.
|
||||
* Removed: Python 2 compatibility code. (#2098, #2226) (Thanks @zdimension)
|
||||
|
||||
|
||||
## v2.13.4 (2023-12-07)
|
||||
|
|
|
@ -25,4 +25,4 @@ CMD [ "python", "Tautulli.py", "--datadir", "/config" ]
|
|||
ENTRYPOINT [ "./start.sh" ]
|
||||
|
||||
EXPOSE 8181
|
||||
HEALTHCHECK --start-period=90s CMD curl -ILfSs http://localhost:8181/status > /dev/null || curl -ILfkSs https://localhost:8181/status > /dev/null || exit 1
|
||||
HEALTHCHECK --start-period=90s CMD curl -ILfks https://localhost:8181/status > /dev/null || curl -ILfs http://localhost:8181/status > /dev/null || exit 1
|
||||
|
|
|
@ -36,7 +36,7 @@ and [PlexWatchWeb](https://github.com/ecleese/plexWatchWeb).
|
|||
[![Docker Stars][badge-docker-stars]][DockerHub]
|
||||
[![Downloads][badge-downloads]][Releases Latest]
|
||||
|
||||
[badge-python]: https://img.shields.io/badge/python->=3.8-blue?style=flat-square
|
||||
[badge-python]: https://img.shields.io/badge/python->=3.9-blue?style=flat-square
|
||||
[badge-docker-pulls]: https://img.shields.io/docker/pulls/tautulli/tautulli?style=flat-square
|
||||
[badge-docker-stars]: https://img.shields.io/docker/stars/tautulli/tautulli?style=flat-square
|
||||
[badge-downloads]: https://img.shields.io/github/downloads/Tautulli/Tautulli/total?style=flat-square
|
||||
|
@ -129,7 +129,7 @@ This is free software under the GPL v3 open source license. Feel free to do with
|
|||
but any modification must be open sourced. A copy of the license is included.
|
||||
|
||||
This software includes Highsoft software libraries which you may freely distribute for
|
||||
non-commercial use. Commerical users must licence this software, for more information visit
|
||||
non-commercial use. Commercial users must licence this software, for more information visit
|
||||
https://shop.highsoft.com/faq/non-commercial#non-commercial-redistribution.
|
||||
|
||||
|
||||
|
|
25
Tautulli.py
25
Tautulli.py
|
@ -34,6 +34,7 @@ import shutil
|
|||
import time
|
||||
import threading
|
||||
import tzlocal
|
||||
import ctypes
|
||||
|
||||
import plexpy
|
||||
from plexpy import common, config, database, helpers, logger, webstart
|
||||
|
@ -69,8 +70,26 @@ def main():
|
|||
plexpy.SYS_ENCODING = None
|
||||
|
||||
try:
|
||||
locale.setlocale(locale.LC_ALL, "")
|
||||
plexpy.SYS_LANGUAGE, plexpy.SYS_ENCODING = locale.getdefaultlocale()
|
||||
|
||||
# Attempt to get the system's locale settings
|
||||
language_code, encoding = locale.getlocale()
|
||||
|
||||
# Special handling for Windows platform
|
||||
if sys.platform == 'win32':
|
||||
# Get the user's current language settings on Windows
|
||||
windll = ctypes.windll.kernel32
|
||||
lang_id = windll.GetUserDefaultLCID()
|
||||
|
||||
# Map Windows language ID to locale identifier
|
||||
language_code = locale.windows_locale.get(lang_id, '')
|
||||
|
||||
# Get the preferred encoding
|
||||
encoding = locale.getpreferredencoding()
|
||||
|
||||
# Assign values to application-specific variable
|
||||
plexpy.SYS_LANGUAGE = language_code
|
||||
plexpy.SYS_ENCODING = encoding
|
||||
|
||||
except (locale.Error, IOError):
|
||||
pass
|
||||
|
||||
|
@ -110,7 +129,7 @@ def main():
|
|||
if args.quiet:
|
||||
plexpy.QUIET = True
|
||||
|
||||
# Do an intial setup of the logger.
|
||||
# Do an initial setup of the logger.
|
||||
# Require verbose for pre-initilization to see critical errors
|
||||
logger.initLogger(console=not plexpy.QUIET, log_dir=False, verbose=True)
|
||||
|
||||
|
|
|
@ -1478,7 +1478,8 @@ a:hover .dashboard-stats-square {
|
|||
text-align: center;
|
||||
position: relative;
|
||||
z-index: 0;
|
||||
overflow: hidden;
|
||||
overflow: auto;
|
||||
scrollbar-width: none;
|
||||
}
|
||||
.dashboard-recent-media {
|
||||
width: 100%;
|
||||
|
@ -4324,6 +4325,10 @@ a:hover .overlay-refresh-image:hover {
|
|||
.stream-info tr:nth-child(even) td {
|
||||
background-color: rgba(255,255,255,0.010);
|
||||
}
|
||||
.stream-info td:nth-child(3),
|
||||
.stream-info th:nth-child(3) {
|
||||
width: 25px;
|
||||
}
|
||||
.number-input {
|
||||
margin: 0 !important;
|
||||
width: 55px !important;
|
||||
|
|
|
@ -74,6 +74,7 @@ DOCUMENTATION :: END
|
|||
parent_href = page('info', data['parent_rating_key'])
|
||||
grandparent_href = page('info', data['grandparent_rating_key'])
|
||||
user_href = page('user', data['user_id']) if data['user_id'] else '#'
|
||||
library_href = page('library', data['section_id']) if data['section_id'] else '#'
|
||||
season = short_season(data['parent_title'])
|
||||
%>
|
||||
<div class="dashboard-activity-instance" id="activity-instance-${sk}" data-key="${sk}" data-id="${data['session_id']}"
|
||||
|
@ -463,21 +464,27 @@ DOCUMENTATION :: END
|
|||
<div class="dashboard-activity-metadata-subtitle-container">
|
||||
% if data['live']:
|
||||
<div id="media-type-${sk}" class="dashboard-activity-metadata-media_type-icon" title="Live TV">
|
||||
<i class="fa fa-fw fa-broadcast-tower"></i>
|
||||
<a href="${library_href}">
|
||||
<i class="fa fa-fw fa-broadcast-tower"></i>
|
||||
</a>
|
||||
</div>
|
||||
% elif data['channel_stream'] == 0:
|
||||
<div id="media-type-${sk}" class="dashboard-activity-metadata-media_type-icon" title="${data['media_type'].capitalize()}">
|
||||
<a href="${library_href}">
|
||||
% if data['media_type'] == 'movie':
|
||||
<i class="fa fa-fw fa-film"></i>
|
||||
<i class="fa fa-fw fa-film"></i>
|
||||
% elif data['media_type'] == 'episode':
|
||||
<i class="fa fa-fw fa-television"></i>
|
||||
<i class="fa fa-fw fa-television"></i>
|
||||
% elif data['media_type'] == 'track':
|
||||
<i class="fa fa-fw fa-music"></i>
|
||||
<i class="fa fa-fw fa-music"></i>
|
||||
% elif data['media_type'] == 'photo':
|
||||
<i class="fa fa-fw fa-picture-o"></i>
|
||||
<i class="fa fa-fw fa-picture-o"></i>
|
||||
% elif data['media_type'] == 'clip':
|
||||
<i class="fa fa-fw fa-video-camera"></i>
|
||||
<i class="fa fa-fw fa-video-camera"></i>
|
||||
% else:
|
||||
<i class="fa fa-fw fa-question-circle"></i>
|
||||
% endif
|
||||
</a>
|
||||
</div>
|
||||
% else:
|
||||
<div id="media-type-${sk}" class="dashboard-activity-metadata-media_type-icon" title="Channel">
|
||||
|
|
|
@ -20,6 +20,7 @@ DOCUMENTATION :: END
|
|||
export = exporter.Export()
|
||||
thumb_media_types = ', '.join([export.PLURAL_MEDIA_TYPES[k] for k, v in export.MEDIA_TYPES.items() if v[0]])
|
||||
art_media_types = ', '.join([export.PLURAL_MEDIA_TYPES[k] for k, v in export.MEDIA_TYPES.items() if v[1]])
|
||||
logo_media_types = ', '.join([export.PLURAL_MEDIA_TYPES[k] for k, v in export.MEDIA_TYPES.items() if v[2]])
|
||||
%>
|
||||
<div class="modal-dialog" role="document">
|
||||
<div class="modal-content">
|
||||
|
@ -144,6 +145,22 @@ DOCUMENTATION :: END
|
|||
Select the level to export background artwork image files.<br>Note: Only applies to ${art_media_types}.
|
||||
</p>
|
||||
</div>
|
||||
<div class="form-group">
|
||||
<label for="export_logo_level">Logo Image Export Level</label>
|
||||
<div class="row">
|
||||
<div class="col-md-12">
|
||||
<select class="form-control" id="export_logo_level" name="export_logo_level">
|
||||
<option value="0" selected>Level 0 - None / Custom</option>
|
||||
<option value="1">Level 1 - Uploaded and Selected Logos Only</option>
|
||||
<option value="2">Level 2 - Selected and Locked Logos Only</option>
|
||||
<option value="9">Level 9 - All Selected Logos</option>
|
||||
</select>
|
||||
</div>
|
||||
</div>
|
||||
<p class="help-block">
|
||||
Select the level to export logo image files.<br>Note: Only applies to ${logo_media_types}.
|
||||
</p>
|
||||
</div>
|
||||
<p class="help-block">
|
||||
Warning: Exporting images may take a long time! Images will be saved to a folder alongside the data file.
|
||||
</p>
|
||||
|
@ -231,6 +248,7 @@ DOCUMENTATION :: END
|
|||
$('#export_media_info_level').prop('disabled', true);
|
||||
$("#export_thumb_level").prop('disabled', true);
|
||||
$("#export_art_level").prop('disabled', true);
|
||||
$("#export_logo_level").prop('disabled', true);
|
||||
export_custom_metadata_fields.disable();
|
||||
export_custom_media_info_fields.disable();
|
||||
} else {
|
||||
|
@ -238,6 +256,7 @@ DOCUMENTATION :: END
|
|||
$('#export_media_info_level').prop('disabled', false);
|
||||
$("#export_thumb_level").prop('disabled', false);
|
||||
$("#export_art_level").prop('disabled', false);
|
||||
$("#export_logo_level").prop('disabled', false);
|
||||
export_custom_metadata_fields.enable();
|
||||
export_custom_media_info_fields.enable();
|
||||
}
|
||||
|
@ -252,6 +271,7 @@ DOCUMENTATION :: END
|
|||
var file_format = $('#export_file_format option:selected').val();
|
||||
var thumb_level = $("#export_thumb_level option:selected").val();
|
||||
var art_level = $("#export_art_level option:selected").val();
|
||||
var logo_level = $("#export_logo_level option:selected").val();
|
||||
var custom_fields = [
|
||||
$('#export_custom_metadata_fields').val(),
|
||||
$('#export_custom_media_info_fields').val()
|
||||
|
@ -270,6 +290,7 @@ DOCUMENTATION :: END
|
|||
file_format: file_format,
|
||||
thumb_level: thumb_level,
|
||||
art_level: art_level,
|
||||
logo_level: logo_level,
|
||||
custom_fields: custom_fields,
|
||||
export_type: export_type,
|
||||
individual_files: individual_files
|
||||
|
|
|
@ -301,6 +301,10 @@
|
|||
return obj;
|
||||
}, {});
|
||||
|
||||
if (!("Total" in chart_visibility)) {
|
||||
chart_visibility["Total"] = false;
|
||||
}
|
||||
|
||||
return data_series.map(function(s) {
|
||||
var obj = Object.assign({}, s);
|
||||
obj.visible = (chart_visibility[s.name] !== false);
|
||||
|
@ -327,7 +331,8 @@
|
|||
'Direct Play': '#E5A00D',
|
||||
'Direct Stream': '#FFFFFF',
|
||||
'Transcode': '#F06464',
|
||||
'Max. Concurrent Streams': '#96C83C'
|
||||
'Max. Concurrent Streams': '#96C83C',
|
||||
'Total': '#96C83C'
|
||||
};
|
||||
var series_colors = [];
|
||||
$.each(data_series, function(index, series) {
|
||||
|
|
|
@ -92,10 +92,10 @@
|
|||
<h3 class="pull-left"><span id="recently-added-xml">Recently Added</span></h3>
|
||||
<ul class="nav nav-header nav-dashboard pull-right" style="margin-top: -3px;">
|
||||
<li>
|
||||
<a href="#" id="recently-added-page-left" class="paginate btn-gray disabled" data-id="+1"><i class="fa fa-lg fa-chevron-left"></i></a>
|
||||
<a href="#" id="recently-added-page-left" class="paginate-added btn-gray disabled" data-id="-1"><i class="fa fa-lg fa-chevron-left"></i></a>
|
||||
</li>
|
||||
<li>
|
||||
<a href="#" id="recently-added-page-right" class="paginate btn-gray disabled" data-id="-1"><i class="fa fa-lg fa-chevron-right"></i></a>
|
||||
<a href="#" id="recently-added-page-right" class="paginate-added btn-gray disabled" data-id="+1"><i class="fa fa-lg fa-chevron-right"></i></a>
|
||||
</li>
|
||||
</ul>
|
||||
<div class="button-bar">
|
||||
|
@ -298,6 +298,8 @@
|
|||
|
||||
$('#currentActivityHeader-bandwidth-tooltip').tooltip({ container: 'body', placement: 'right', delay: 50 });
|
||||
|
||||
var title = document.title;
|
||||
|
||||
function getCurrentActivity() {
|
||||
activity_ready = false;
|
||||
|
||||
|
@ -368,6 +370,8 @@
|
|||
|
||||
$('#currentActivityHeader').show();
|
||||
|
||||
document.title = stream_count + ' stream' + (stream_count > 1 ? 's' : '') + ' | ' + title;
|
||||
|
||||
sessions.forEach(function (session) {
|
||||
var s = (typeof Proxy === "function") ? new Proxy(session, defaultHandler) : session;
|
||||
var key = s.session_key;
|
||||
|
@ -600,6 +604,8 @@
|
|||
} else {
|
||||
$('#currentActivityHeader').hide();
|
||||
$('#currentActivity').html('<div id="dashboard-no-activity" class="text-muted">Nothing is currently being played.</div>');
|
||||
|
||||
document.title = title;
|
||||
}
|
||||
|
||||
activity_ready = true;
|
||||
|
@ -936,10 +942,14 @@
|
|||
count: recently_added_count,
|
||||
media_type: recently_added_type
|
||||
},
|
||||
beforeSend: function () {
|
||||
$(".dashboard-recent-media-row").animate({ scrollLeft: 0 }, 1000);
|
||||
},
|
||||
complete: function (xhr, status) {
|
||||
$("#recentlyAdded").html(xhr.responseText);
|
||||
$('#ajaxMsg').fadeOut();
|
||||
highlightAddedScrollerButton();
|
||||
highlightScrollerButton("#recently-added");
|
||||
paginateScroller("#recently-added", ".paginate-added");
|
||||
}
|
||||
});
|
||||
}
|
||||
|
@ -955,57 +965,11 @@
|
|||
recentlyAdded(recently_added_count, recently_added_type);
|
||||
}
|
||||
|
||||
function highlightAddedScrollerButton() {
|
||||
var scroller = $("#recently-added-row-scroller");
|
||||
var numElems = scroller.find("li:visible").length;
|
||||
scroller.width(numElems * 175);
|
||||
if (scroller.width() > $("body").find(".container-fluid").width()) {
|
||||
$("#recently-added-page-right").removeClass("disabled");
|
||||
} else {
|
||||
$("#recently-added-page-right").addClass("disabled");
|
||||
}
|
||||
}
|
||||
|
||||
$(window).resize(function () {
|
||||
highlightAddedScrollerButton();
|
||||
});
|
||||
|
||||
function resetScroller() {
|
||||
leftTotal = 0;
|
||||
$("#recently-added-row-scroller").animate({ left: leftTotal }, 1000);
|
||||
$("#recently-added-page-left").addClass("disabled").blur();
|
||||
}
|
||||
|
||||
var leftTotal = 0;
|
||||
$(".paginate").click(function (e) {
|
||||
e.preventDefault();
|
||||
var scroller = $("#recently-added-row-scroller");
|
||||
var containerWidth = $("body").find(".container-fluid").width();
|
||||
var scrollAmount = $(this).data("id") * parseInt((containerWidth - 15) / 175) * 175;
|
||||
var leftMax = Math.min(-parseInt(scroller.width()) + Math.abs(scrollAmount), 0);
|
||||
|
||||
leftTotal = Math.max(Math.min(leftTotal + scrollAmount, 0), leftMax);
|
||||
scroller.animate({ left: leftTotal }, 250);
|
||||
|
||||
if (leftTotal === 0) {
|
||||
$("#recently-added-page-left").addClass("disabled").blur();
|
||||
} else {
|
||||
$("#recently-added-page-left").removeClass("disabled");
|
||||
}
|
||||
|
||||
if (leftTotal === leftMax) {
|
||||
$("#recently-added-page-right").addClass("disabled").blur();
|
||||
} else {
|
||||
$("#recently-added-page-right").removeClass("disabled");
|
||||
}
|
||||
});
|
||||
|
||||
$('#recently-added-toggles').on('change', function () {
|
||||
$('#recently-added-toggles > label').removeClass('active');
|
||||
selected_filter = $('input[name=recently-added-toggle]:checked', '#recently-added-toggles');
|
||||
$(selected_filter).closest('label').addClass('active');
|
||||
recently_added_type = $(selected_filter).val();
|
||||
resetScroller();
|
||||
setLocalStorage('home_stats_recently_added_type', recently_added_type);
|
||||
recentlyAdded(recently_added_count, recently_added_type);
|
||||
});
|
||||
|
@ -1013,7 +977,6 @@
|
|||
$('#recently-added-count').change(function () {
|
||||
forceMinMax($(this));
|
||||
recently_added_count = $(this).val();
|
||||
resetScroller();
|
||||
setLocalStorage('home_stats_recently_added_count', recently_added_count);
|
||||
recentlyAdded(recently_added_count, recently_added_type);
|
||||
});
|
||||
|
|
|
@ -360,7 +360,8 @@ function humanDuration(ms, sig='dhm', units='ms', return_seconds=300000) {
|
|||
sig = 'dhms'
|
||||
}
|
||||
|
||||
ms = ms * factors[units];
|
||||
r = factors[sig.slice(-1)];
|
||||
ms = Math.round(ms * factors[units] / r) * r;
|
||||
|
||||
h = ms % factors['d'];
|
||||
d = Math.trunc(ms / factors['d']);
|
||||
|
@ -929,3 +930,50 @@ $('.modal').on('hide.bs.modal', function (e) {
|
|||
$.fn.hasScrollBar = function() {
|
||||
return this.get(0).scrollHeight > this.get(0).clientHeight;
|
||||
}
|
||||
|
||||
function paginateScroller(scrollerId, buttonClass) {
|
||||
$(buttonClass).click(function (e) {
|
||||
e.preventDefault();
|
||||
var scroller = $(scrollerId + "-row-scroller");
|
||||
var scrollerParent = scroller.parent();
|
||||
var containerWidth = scrollerParent.width();
|
||||
var scrollCurrent = scrollerParent.scrollLeft();
|
||||
var scrollAmount = $(this).data("id") * parseInt(containerWidth / 175) * 175;
|
||||
var scrollMax = scroller.width() - Math.abs(scrollAmount);
|
||||
var scrollTotal = Math.min(parseInt(scrollCurrent / 175) * 175 + scrollAmount, scrollMax);
|
||||
scrollerParent.animate({ scrollLeft: scrollTotal }, 250);
|
||||
});
|
||||
}
|
||||
|
||||
function highlightScrollerButton(scrollerId) {
|
||||
var scroller = $(scrollerId + "-row-scroller");
|
||||
var scrollerParent = scroller.parent();
|
||||
var buttonLeft = $(scrollerId + "-page-left");
|
||||
var buttonRight = $(scrollerId + "-page-right");
|
||||
|
||||
var numElems = scroller.find("li").length;
|
||||
scroller.width(numElems * 175);
|
||||
$(buttonLeft).addClass("disabled").blur();
|
||||
if (scroller.width() > scrollerParent.width()) {
|
||||
$(buttonRight).removeClass("disabled");
|
||||
} else {
|
||||
$(buttonRight).addClass("disabled");
|
||||
}
|
||||
|
||||
scrollerParent.scroll(function () {
|
||||
var scrollCurrent = $(this).scrollLeft();
|
||||
var scrollMax = scroller.width() - $(this).width();
|
||||
|
||||
if (scrollCurrent == 0) {
|
||||
$(buttonLeft).addClass("disabled").blur();
|
||||
} else {
|
||||
$(buttonLeft).removeClass("disabled");
|
||||
}
|
||||
|
||||
if (scrollCurrent >= scrollMax) {
|
||||
$(buttonRight).addClass("disabled").blur();
|
||||
} else {
|
||||
$(buttonRight).removeClass("disabled");
|
||||
}
|
||||
});
|
||||
}
|
||||
|
|
|
@ -100,7 +100,7 @@ export_table_options = {
|
|||
"createdCell": function (td, cellData, rowData, row, col) {
|
||||
if (cellData !== '') {
|
||||
var images = '';
|
||||
if (rowData['thumb_level'] || rowData['art_level']) {
|
||||
if (rowData['thumb_level'] || rowData['art_level'] || rowData['logo_level']) {
|
||||
images = ' + images';
|
||||
}
|
||||
$(td).html(cellData + images);
|
||||
|
@ -161,14 +161,14 @@ export_table_options = {
|
|||
if (cellData === 1 && rowData['exists']) {
|
||||
var tooltip_title = '';
|
||||
var icon = '';
|
||||
if (rowData['thumb_level'] || rowData['art_level'] || rowData['individual_files']) {
|
||||
if (rowData['thumb_level'] || rowData['art_level'] || rowData['logo_level'] || rowData['individual_files']) {
|
||||
tooltip_title = 'Zip Archive';
|
||||
icon = 'fa-file-archive';
|
||||
} else {
|
||||
tooltip_title = rowData['file_format'].toUpperCase() + ' File';
|
||||
icon = 'fa-file-download';
|
||||
}
|
||||
var icon = (rowData['thumb_level'] || rowData['art_level'] || rowData['individual_files']) ? 'fa-file-archive' : 'fa-file-download';
|
||||
var icon = (rowData['thumb_level'] || rowData['art_level'] || rowData['logo_level'] || rowData['individual_files']) ? 'fa-file-archive' : 'fa-file-download';
|
||||
$(td).html('<button class="btn btn-xs btn-success pull-left" data-id="' + rowData['export_id'] + '"><span data-toggle="tooltip" data-placement="left" title="' + tooltip_title + '"><i class="fa ' + icon + ' fa-fw"></i> Download</span></button>');
|
||||
} else if (cellData === 0) {
|
||||
var percent = Math.min(getPercent(rowData['exported_items'], rowData['total_items']), 99)
|
||||
|
|
|
@ -149,10 +149,10 @@ DOCUMENTATION :: END
|
|||
<div class="table-card-header">
|
||||
<ul class="nav nav-header nav-dashboard pull-right">
|
||||
<li>
|
||||
<a href="#" id="recently-watched-page-left" class="paginate-watched btn-gray disabled" data-id="+1"><i class="fa fa-lg fa-chevron-left"></i></a>
|
||||
<a href="#" id="recently-watched-page-left" class="paginate-watched btn-gray disabled" data-id="-1"><i class="fa fa-lg fa-chevron-left"></i></a>
|
||||
</li>
|
||||
<li>
|
||||
<a href="#" id="recently-watched-page-right" class="paginate-watched btn-gray disabled" data-id="-1"><i class="fa fa-lg fa-chevron-right"></i></a>
|
||||
<a href="#" id="recently-watched-page-right" class="paginate-watched btn-gray disabled" data-id="+1"><i class="fa fa-lg fa-chevron-right"></i></a>
|
||||
</li>
|
||||
</ul>
|
||||
<div class="header-bar">
|
||||
|
@ -175,10 +175,10 @@ DOCUMENTATION :: END
|
|||
<div class="table-card-header">
|
||||
<ul class="nav nav-header nav-dashboard pull-right">
|
||||
<li>
|
||||
<a href="#" id="recently-added-page-left" class="paginate-added btn-gray disabled" data-id="+1"><i class="fa fa-lg fa-chevron-left"></i></a>
|
||||
<a href="#" id="recently-added-page-left" class="paginate-added btn-gray disabled" data-id="-1"><i class="fa fa-lg fa-chevron-left"></i></a>
|
||||
</li>
|
||||
<li>
|
||||
<a href="#" id="recently-added-page-right" class="paginate-added btn-gray disabled" data-id="-1"><i class="fa fa-lg fa-chevron-right"></i></a>
|
||||
<a href="#" id="recently-added-page-right" class="paginate-added btn-gray disabled" data-id="+1"><i class="fa fa-lg fa-chevron-right"></i></a>
|
||||
</li>
|
||||
</ul>
|
||||
<div class="header-bar">
|
||||
|
@ -690,7 +690,8 @@ DOCUMENTATION :: END
|
|||
},
|
||||
complete: function(xhr, status) {
|
||||
$("#library-recently-watched").html(xhr.responseText);
|
||||
highlightWatchedScrollerButton();
|
||||
highlightScrollerButton("#recently-watched");
|
||||
paginateScroller("#recently-watched", ".paginate-watched");
|
||||
}
|
||||
});
|
||||
}
|
||||
|
@ -706,7 +707,8 @@ DOCUMENTATION :: END
|
|||
},
|
||||
complete: function(xhr, status) {
|
||||
$("#library-recently-added").html(xhr.responseText);
|
||||
highlightAddedScrollerButton();
|
||||
highlightScrollerButton("#recently-added");
|
||||
paginateScroller("#recently-added", ".paginate-added");
|
||||
}
|
||||
});
|
||||
}
|
||||
|
@ -716,83 +718,8 @@ DOCUMENTATION :: END
|
|||
recentlyAdded();
|
||||
% endif
|
||||
|
||||
function highlightWatchedScrollerButton() {
|
||||
var scroller = $("#recently-watched-row-scroller");
|
||||
var numElems = scroller.find("li").length;
|
||||
scroller.width(numElems * 175);
|
||||
if (scroller.width() > $("#library-recently-watched").width()) {
|
||||
$("#recently-watched-page-right").removeClass("disabled");
|
||||
} else {
|
||||
$("#recently-watched-page-right").addClass("disabled");
|
||||
}
|
||||
}
|
||||
|
||||
function highlightAddedScrollerButton() {
|
||||
var scroller = $("#recently-added-row-scroller");
|
||||
var numElems = scroller.find("li").length;
|
||||
scroller.width(numElems * 175);
|
||||
if (scroller.width() > $("#library-recently-added").width()) {
|
||||
$("#recently-added-page-right").removeClass("disabled");
|
||||
} else {
|
||||
$("#recently-added-page-right").addClass("disabled");
|
||||
}
|
||||
}
|
||||
|
||||
$(window).resize(function() {
|
||||
highlightWatchedScrollerButton();
|
||||
highlightAddedScrollerButton();
|
||||
});
|
||||
|
||||
$('div.art-face').animate({ opacity: 0.2 }, { duration: 1000 });
|
||||
|
||||
var leftTotalWatched = 0;
|
||||
$(".paginate-watched").click(function (e) {
|
||||
e.preventDefault();
|
||||
var scroller = $("#recently-watched-row-scroller");
|
||||
var containerWidth = $("#library-recently-watched").width();
|
||||
var scrollAmount = $(this).data("id") * parseInt(containerWidth / 175) * 175;
|
||||
var leftMax = Math.min(-parseInt(scroller.width()) + Math.abs(scrollAmount), 0);
|
||||
|
||||
leftTotalWatched = Math.max(Math.min(leftTotalWatched + scrollAmount, 0), leftMax);
|
||||
scroller.animate({ left: leftTotalWatched }, 250);
|
||||
|
||||
if (leftTotalWatched == 0) {
|
||||
$("#recently-watched-page-left").addClass("disabled").blur();
|
||||
} else {
|
||||
$("#recently-watched-page-left").removeClass("disabled");
|
||||
}
|
||||
|
||||
if (leftTotalWatched == leftMax) {
|
||||
$("#recently-watched-page-right").addClass("disabled").blur();
|
||||
} else {
|
||||
$("#recently-watched-page-right").removeClass("disabled");
|
||||
}
|
||||
});
|
||||
|
||||
var leftTotalAdded = 0;
|
||||
$(".paginate-added").click(function (e) {
|
||||
e.preventDefault();
|
||||
var scroller = $("#recently-added-row-scroller");
|
||||
var containerWidth = $("#library-recently-added").width();
|
||||
var scrollAmount = $(this).data("id") * parseInt(containerWidth / 175) * 175;
|
||||
var leftMax = Math.min(-parseInt(scroller.width()) + Math.abs(scrollAmount), 0);
|
||||
|
||||
leftTotalAdded = Math.max(Math.min(leftTotalAdded + scrollAmount, 0), leftMax);
|
||||
scroller.animate({ left: leftTotalAdded }, 250);
|
||||
|
||||
if (leftTotalAdded == 0) {
|
||||
$("#recently-added-page-left").addClass("disabled").blur();
|
||||
} else {
|
||||
$("#recently-added-page-left").removeClass("disabled");
|
||||
}
|
||||
|
||||
if (leftTotalAdded == leftMax) {
|
||||
$("#recently-added-page-right").addClass("disabled").blur();
|
||||
} else {
|
||||
$("#recently-added-page-right").removeClass("disabled");
|
||||
}
|
||||
});
|
||||
|
||||
$(document).ready(function () {
|
||||
|
||||
// Javascript to enable link to tab
|
||||
|
|
|
@ -36,7 +36,7 @@ DOCUMENTATION :: END
|
|||
|
||||
%>
|
||||
<div class="dashboard-recent-media-row">
|
||||
<div id="recently-added-row-scroller" style="left: 0;">
|
||||
<div id="recently-added-row-scroller">
|
||||
<ul class="dashboard-recent-media list-unstyled">
|
||||
% for item in data:
|
||||
<li>
|
||||
|
|
|
@ -3,7 +3,7 @@
|
|||
<div class="modal-content">
|
||||
<div class="modal-header">
|
||||
<button type="button" class="close" data-dismiss="modal" aria-hidden="true"><i class="fa fa-remove"></i></button>
|
||||
<h4 class="modal-title" id="mobile-device-config-modal-header">${device['device_name']} Settings <small><span class="device_id">(Device ID: ${device['id']})</span></small></h4>
|
||||
<h4 class="modal-title" id="mobile-device-config-modal-header">${device['device_name']} Settings <small><span class="device_id">(Device ID: ${device['id']}${' - ' + device['friendly_name'] if device['friendly_name'] else ''})</span></small></h4>
|
||||
</div>
|
||||
<div class="modal-body">
|
||||
<div class="container-fluid">
|
||||
|
|
|
@ -13,7 +13,7 @@
|
|||
<div class="modal-content">
|
||||
<div class="modal-header">
|
||||
<button type="button" class="close" data-dismiss="modal" aria-hidden="true"><i class="fa fa-remove"></i></button>
|
||||
<h4 class="modal-title" id="newsletter-config-modal-header">${newsletter['agent_label']} Newsletter Settings <small><span class="newsletter_id">(Newsletter ID: ${newsletter['id']})</span></small></h4>
|
||||
<h4 class="modal-title" id="newsletter-config-modal-header">${newsletter['agent_label']} Newsletter Settings <small><span class="newsletter_id">(Newsletter ID: ${newsletter['id']}${' - ' + newsletter['friendly_name'] if newsletter['friendly_name'] else ''})</span></small></h4>
|
||||
</div>
|
||||
<div class="modal-body">
|
||||
<div class="container-fluid">
|
||||
|
@ -50,7 +50,10 @@
|
|||
</div>
|
||||
<p class="help-block">
|
||||
<span id="simple_cron_message">Set the schedule for the newsletter.</span>
|
||||
<span id="custom_cron_message">Set the schedule for the newsletter using a <a href="${anon_url('https://crontab.guru')}" target="_blank" rel="noreferrer">custom crontab</a>. Only standard cron values are valid.</span>
|
||||
<span id="custom_cron_message">
|
||||
Set the schedule for the newsletter using a <a href="${anon_url('https://crontab.guru')}" target="_blank" rel="noreferrer">custom crontab</a>.
|
||||
<a href="${anon_url('https://apscheduler.readthedocs.io/en/3.x/modules/triggers/cron.html#expression-types')}" target="_blank" rel="noreferrer">Click here</a> for a list of supported expressions.
|
||||
</span>
|
||||
</p>
|
||||
</div>
|
||||
<div class="form-group">
|
||||
|
@ -481,7 +484,7 @@
|
|||
});
|
||||
|
||||
if (${newsletter['config']['custom_cron']}) {
|
||||
$('#cron_value').val('${newsletter['cron']}');
|
||||
$('#cron_value').val('${newsletter['cron'] | n}');
|
||||
} else {
|
||||
try {
|
||||
cron_widget.cron('value', '${newsletter['cron']}');
|
||||
|
|
|
@ -12,7 +12,7 @@
|
|||
<div class="modal-content">
|
||||
<div class="modal-header">
|
||||
<button type="button" class="close" data-dismiss="modal" aria-hidden="true"><i class="fa fa-remove"></i></button>
|
||||
<h4 class="modal-title" id="notifier-config-modal-header">${notifier['agent_label']} Settings <small><span class="notifier_id">(Notifier ID: ${notifier['id']})</span></small></h4>
|
||||
<h4 class="modal-title" id="notifier-config-modal-header">${notifier['agent_label']} Settings <small><span class="notifier_id">(Notifier ID: ${notifier['id']}${' - ' + notifier['friendly_name'] if notifier['friendly_name'] else ''})</span></small></h4>
|
||||
</div>
|
||||
<div class="modal-body">
|
||||
<div class="container-fluid">
|
||||
|
|
|
@ -36,7 +36,7 @@ DOCUMENTATION :: END
|
|||
%>
|
||||
% if data:
|
||||
<div class="dashboard-recent-media-row">
|
||||
<div id="recently-added-row-scroller" style="left: 0;">
|
||||
<div id="recently-added-row-scroller">
|
||||
<ul class="dashboard-recent-media list-unstyled">
|
||||
% for item in data:
|
||||
<div class="dashboard-recent-media-instance">
|
||||
|
|
|
@ -13,8 +13,6 @@ DOCUMENTATION :: END
|
|||
import datetime
|
||||
import plexpy
|
||||
from plexpy import common, helpers
|
||||
|
||||
scheduled_jobs = [j.id for j in plexpy.SCHED.get_jobs()]
|
||||
%>
|
||||
|
||||
<table class="config-scheduler-table small-muted">
|
||||
|
@ -29,16 +27,15 @@ DOCUMENTATION :: END
|
|||
</thead>
|
||||
<tbody>
|
||||
% for job, job_type in common.SCHEDULER_LIST.items():
|
||||
% if job in scheduled_jobs:
|
||||
<%
|
||||
sched_job = plexpy.SCHED.get_job(job)
|
||||
now = datetime.datetime.now(sched_job.next_run_time.tzinfo)
|
||||
%>
|
||||
% if sched_job:
|
||||
<tr>
|
||||
<td>${sched_job.id}</td>
|
||||
<td><i class="fa fa-sm fa-fw fa-check"></i> Active</td>
|
||||
<td>${helpers.format_timedelta_Hms(sched_job.trigger.interval)}</td>
|
||||
<td>${helpers.format_timedelta_Hms(sched_job.next_run_time - now)}</td>
|
||||
<td>${helpers.format_timedelta_Hms(sched_job.next_run_time - datetime.datetime.now(sched_job.next_run_time.tzinfo))}</td>
|
||||
<td>${sched_job.next_run_time.astimezone(plexpy.SYS_TIMEZONE).strftime('%Y-%m-%d %H:%M:%S')}</td>
|
||||
</tr>
|
||||
% elif job_type == 'websocket' and plexpy.WS_CONNECTED:
|
||||
|
|
|
@ -767,7 +767,6 @@
|
|||
data-identifier="${config['pms_identifier']}"
|
||||
data-ip="${config['pms_ip']}"
|
||||
data-port="${config['pms_port']}"
|
||||
data-local="${int(not int(config['pms_is_remote']))}"
|
||||
data-ssl="${config['pms_ssl']}"
|
||||
data-is_cloud="${config['pms_is_cloud']}"
|
||||
data-label="${config['pms_name'] or 'Local'}"
|
||||
|
@ -800,13 +799,6 @@
|
|||
</label>
|
||||
<p class="help-block">Connect to your Plex server using HTTPS if you have <a href="${anon_url('https://support.plex.tv/articles/206225077-how-to-use-secure-server-connections')}" target="_blank" rel="noreferrer">secure connections</a> enabled.</p>
|
||||
</div>
|
||||
<div class="checkbox">
|
||||
<label>
|
||||
<input type="checkbox" id="pms_is_remote_checkbox" class="checkbox-toggle pms-settings" data-id="pms_is_remote" value="1" ${checked(config['pms_is_remote'])}> Remote Server
|
||||
<input type="hidden" id="pms_is_remote" name="pms_is_remote" value="${config['pms_is_remote']}">
|
||||
</label>
|
||||
<p class="help-block">Check this if your Plex Server is not on the same local network as Tautulli.</p>
|
||||
</div>
|
||||
<div class="form-group">
|
||||
<label for="pms_url">Plex Server URL</label>
|
||||
<div class="row">
|
||||
|
@ -2597,7 +2589,6 @@ $(document).ready(function() {
|
|||
return '<div data-identifier="' + item.clientIdentifier +
|
||||
'" data-ip="' + item.ip +
|
||||
'" data-port="' + item.port +
|
||||
'" data-local="' + item.local +
|
||||
'" data-ssl="' + item.httpsRequired +
|
||||
'" data-is_cloud="' + item.is_cloud +
|
||||
'" data-label="' + item.label + '">' +
|
||||
|
@ -2611,7 +2602,6 @@ $(document).ready(function() {
|
|||
return '<div data-identifier="' + item.clientIdentifier +
|
||||
'" data-ip="' + item.ip +
|
||||
'" data-port="' + item.port +
|
||||
'" data-local="' + item.local +
|
||||
'" data-ssl="' + item.httpsRequired +
|
||||
'" data-is_cloud="' + item.is_cloud +
|
||||
'" data-label="' + item.label + '">' +
|
||||
|
@ -2634,7 +2624,6 @@ $(document).ready(function() {
|
|||
var identifier = $(pms_ip_selected).data('identifier');
|
||||
var ip = $(pms_ip_selected).data('ip');
|
||||
var port = $(pms_ip_selected).data('port');
|
||||
var local = $(pms_ip_selected).data('local');
|
||||
var ssl = $(pms_ip_selected).data('ssl');
|
||||
var is_cloud = $(pms_ip_selected).data('is_cloud');
|
||||
var value = $(pms_ip_selected).data('value');
|
||||
|
@ -2642,8 +2631,6 @@ $(document).ready(function() {
|
|||
$("#pms_identifier").val(identifier !== 'undefined' ? identifier : '');
|
||||
$('#pms_ip').val(ip !== 'undefined' ? ip : value);
|
||||
$('#pms_port').val(port !== 'undefined' ? port : 32400);
|
||||
$('#pms_is_remote_checkbox').prop('checked', (local !== 'undefined' && local === 0));
|
||||
$('#pms_is_remote').val(local !== 'undefined' && local === 0 ? 1 : 0);
|
||||
$('#pms_ssl_checkbox').prop('checked', (ssl !== 'undefined' && ssl === 1));
|
||||
$('#pms_ssl').val(ssl !== 'undefined' && ssl === 1 ? 1 : 0);
|
||||
$('#pms_is_cloud').val(is_cloud !== 'undefined' && is_cloud === true ? 1 : 0);
|
||||
|
@ -2681,7 +2668,6 @@ $(document).ready(function() {
|
|||
var pms_port = $("#pms_port").val();
|
||||
var pms_identifier = $("#pms_identifier").val();
|
||||
var pms_ssl = $("#pms_ssl").val();
|
||||
var pms_is_remote = $("#pms_is_remote").val();
|
||||
var pms_url_manual = $("#pms_url_manual").is(':checked') ? 1 : 0;
|
||||
|
||||
if (($("#pms_ip").val() !== '') || ($("#pms_port").val() !== '')) {
|
||||
|
@ -2693,7 +2679,6 @@ $(document).ready(function() {
|
|||
hostname: pms_ip,
|
||||
port: pms_port,
|
||||
ssl: pms_ssl,
|
||||
remote: pms_is_remote,
|
||||
manual: pms_url_manual,
|
||||
get_url: true,
|
||||
test_websocket: true
|
||||
|
|
|
@ -68,14 +68,14 @@ DOCUMENTATION :: END
|
|||
<table class="stream-info" style="margin-top: 0;">
|
||||
<thead>
|
||||
<tr>
|
||||
<th>
|
||||
</th>
|
||||
<th class="heading">
|
||||
Stream Details
|
||||
</th>
|
||||
<th></th>
|
||||
<th class="heading">
|
||||
Source Details
|
||||
</th>
|
||||
<th><i class="fa fa-long-arrow-right"></i></th>
|
||||
<th class="heading">
|
||||
Stream Details
|
||||
</th>
|
||||
</tr>
|
||||
</thead>
|
||||
</table>
|
||||
|
@ -85,38 +85,46 @@ DOCUMENTATION :: END
|
|||
<th>
|
||||
Media
|
||||
</th>
|
||||
<th></th>
|
||||
<th></th>
|
||||
<th></th>
|
||||
</tr>
|
||||
</thead>
|
||||
<tbody>
|
||||
<tr>
|
||||
<td>Bitrate</td>
|
||||
<td>${data['stream_bitrate']} ${'kbps' if data['stream_bitrate'] else ''}</td>
|
||||
<td>${data['bitrate']} ${'kbps' if data['bitrate'] else ''}</td>
|
||||
<td><i class="fa fa-long-arrow-right"></i></td>
|
||||
<td>${data['stream_bitrate']} ${'kbps' if data['stream_bitrate'] else ''}</td>
|
||||
</tr>
|
||||
% if data['media_type'] != 'track':
|
||||
<tr>
|
||||
<td>Resolution</td>
|
||||
<td>${data['stream_video_full_resolution']}</td>
|
||||
<td>${data['video_full_resolution']}</td>
|
||||
<td><i class="fa fa-long-arrow-right"></i></td>
|
||||
<td>${data['stream_video_full_resolution']}</td>
|
||||
</tr>
|
||||
% endif
|
||||
<tr>
|
||||
<td>Quality</td>
|
||||
<td>${data['quality_profile']}</td>
|
||||
<td>-</td>
|
||||
<td></td>
|
||||
<td>${data['quality_profile']}</td>
|
||||
</tr>
|
||||
% if data['optimized_version'] == 1:
|
||||
<tr>
|
||||
<td>Optimized Version</td>
|
||||
<td>-</td>
|
||||
<td>${data['optimized_version_profile']}<br>(${data['optimized_version_title']})</td>
|
||||
<td></td>
|
||||
<td>-</td>
|
||||
</tr>
|
||||
% endif
|
||||
% if data['synced_version'] == 1:
|
||||
<tr>
|
||||
<td>Synced Version</td>
|
||||
<td>-</td>
|
||||
<td>${data['synced_version_profile']}</td>
|
||||
<td></td>
|
||||
<td>-</td>
|
||||
</tr>
|
||||
% endif
|
||||
</tbody>
|
||||
|
@ -127,6 +135,8 @@ DOCUMENTATION :: END
|
|||
<th>
|
||||
Container
|
||||
</th>
|
||||
<th></th>
|
||||
<th></th>
|
||||
<th>
|
||||
${data['stream_container_decision']}
|
||||
</th>
|
||||
|
@ -135,8 +145,9 @@ DOCUMENTATION :: END
|
|||
<tbody>
|
||||
<tr>
|
||||
<td>Container</td>
|
||||
<td>${data['stream_container'].upper()}</td>
|
||||
<td>${data['container'].upper()}</td>
|
||||
<td><i class="fa fa-long-arrow-right"></i></td>
|
||||
<td>${data['stream_container'].upper()}</td>
|
||||
</tr>
|
||||
</tbody>
|
||||
</table>
|
||||
|
@ -147,6 +158,8 @@ DOCUMENTATION :: END
|
|||
<th>
|
||||
Video
|
||||
</th>
|
||||
<th></th>
|
||||
<th></th>
|
||||
<th>
|
||||
${data['stream_video_decision']}
|
||||
</th>
|
||||
|
@ -155,38 +168,45 @@ DOCUMENTATION :: END
|
|||
<tbody>
|
||||
<tr>
|
||||
<td>Codec</td>
|
||||
<td>${data['stream_video_codec'].upper()} ${'(HW)' if data['transcode_hw_encoding'] else ''}</td>
|
||||
<td>${data['video_codec'].upper()} ${'(HW)' if data['transcode_hw_decoding'] else ''}</td>
|
||||
<td><i class="fa fa-long-arrow-right"></i></td>
|
||||
<td>${data['stream_video_codec'].upper()} ${'(HW)' if data['transcode_hw_encoding'] else ''}</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>Bitrate</td>
|
||||
<td>${data['stream_video_bitrate']} ${'kbps' if data['stream_video_bitrate'] else ''}</td>
|
||||
<td>${data['video_bitrate']} ${'kbps' if data['video_bitrate'] else ''}</td>
|
||||
<td><i class="fa fa-long-arrow-right"></i></td>
|
||||
<td>${data['stream_video_bitrate']} ${'kbps' if data['stream_video_bitrate'] else ''}</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>Width</td>
|
||||
<td>${data['stream_video_width']}</td>
|
||||
<td>${data['video_width']}</td>
|
||||
<td><i class="fa fa-long-arrow-right"></i></td>
|
||||
<td>${data['stream_video_width']}</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>Height</td>
|
||||
<td>${data['stream_video_height']}</td>
|
||||
<td>${data['video_height']}</td>
|
||||
<td><i class="fa fa-long-arrow-right"></i></td>
|
||||
<td>${data['stream_video_height']}</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>Framerate</td>
|
||||
<td>${data['stream_video_framerate']}</td>
|
||||
<td>${data['video_framerate']}</td>
|
||||
<td><i class="fa fa-long-arrow-right"></i></td>
|
||||
<td>${data['stream_video_framerate']}</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>Dynamic Range</td>
|
||||
<td>${data['stream_video_dynamic_range']}</td>
|
||||
<td>${data['video_dynamic_range']}</td>
|
||||
<td><i class="fa fa-long-arrow-right"></i></td>
|
||||
<td>${data['stream_video_dynamic_range']}</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>Aspect Ratio</td>
|
||||
<td>-</td>
|
||||
<td>${data['aspect_ratio']}</td>
|
||||
<td></td>
|
||||
<td>-</td>
|
||||
</tr>
|
||||
</tbody>
|
||||
</table>
|
||||
|
@ -197,6 +217,8 @@ DOCUMENTATION :: END
|
|||
<th>
|
||||
Audio
|
||||
</th>
|
||||
<th></th>
|
||||
<th></th>
|
||||
<th>
|
||||
${data['stream_audio_decision']}
|
||||
</th>
|
||||
|
@ -205,23 +227,27 @@ DOCUMENTATION :: END
|
|||
<tbody>
|
||||
<tr>
|
||||
<td>Codec</td>
|
||||
<td>${AUDIO_CODEC_OVERRIDES.get(data['stream_audio_codec'], data['stream_audio_codec'].upper())}</td>
|
||||
<td>${AUDIO_CODEC_OVERRIDES.get(data['audio_codec'], data['audio_codec'].upper())}</td>
|
||||
<td><i class="fa fa-long-arrow-right"></i></td>
|
||||
<td>${AUDIO_CODEC_OVERRIDES.get(data['stream_audio_codec'], data['stream_audio_codec'].upper())}</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>Bitrate</td>
|
||||
<td>${data['stream_audio_bitrate']} ${'kbps' if data['stream_audio_bitrate'] else ''}</td>
|
||||
<td>${data['audio_bitrate']} ${'kbps' if data['audio_bitrate'] else ''}</td>
|
||||
<td><i class="fa fa-long-arrow-right"></i></td>
|
||||
<td>${data['stream_audio_bitrate']} ${'kbps' if data['stream_audio_bitrate'] else ''}</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>Channels</td>
|
||||
<td>${data['stream_audio_channels']}</td>
|
||||
<td>${data['audio_channels']}</td>
|
||||
<td><i class="fa fa-long-arrow-right"></i></td>
|
||||
<td>${data['stream_audio_channels']}</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>Language</td>
|
||||
<td>-</td>
|
||||
<td>${data['audio_language'] or 'Unknown'}</td>
|
||||
<td></td>
|
||||
<td>-</td>
|
||||
</tr>
|
||||
|
||||
</tbody>
|
||||
|
@ -233,6 +259,8 @@ DOCUMENTATION :: END
|
|||
<th>
|
||||
Subtitles
|
||||
</th>
|
||||
<th></th>
|
||||
<th></th>
|
||||
<th>
|
||||
${'direct play' if data['stream_subtitle_decision'] not in ('transcode', 'copy', 'burn') else data['stream_subtitle_decision']}
|
||||
</th>
|
||||
|
@ -241,19 +269,22 @@ DOCUMENTATION :: END
|
|||
<tbody>
|
||||
<tr>
|
||||
<td>Codec</td>
|
||||
<td>${data['stream_subtitle_codec'].upper() or '-'}</td>
|
||||
<td>${data['subtitle_codec'].upper()}</td>
|
||||
<td><i class="fa fa-long-arrow-right"></i></td>
|
||||
<td>${data['stream_subtitle_codec'].upper() or '-'}</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>Language</td>
|
||||
<td>-</td>
|
||||
<td>${data['subtitle_language'] or 'Unknown'}</td>
|
||||
<td></td>
|
||||
<td>-</td>
|
||||
</tr>
|
||||
% if data['subtitle_forced']:
|
||||
<tr>
|
||||
<td>Forced</td>
|
||||
<td>-</td>
|
||||
<td>${bool(data['subtitle_forced'])}</td>
|
||||
<td></td>
|
||||
<td>-</td>
|
||||
</tr>
|
||||
% endif
|
||||
</tbody>
|
||||
|
|
|
@ -125,10 +125,10 @@ DOCUMENTATION :: END
|
|||
<div class="table-card-header">
|
||||
<ul class="nav nav-header nav-dashboard pull-right">
|
||||
<li>
|
||||
<a href="#" id="recently-watched-page-left" class="paginate btn-gray disabled" data-id="+1"><i class="fa fa-lg fa-chevron-left"></i></a>
|
||||
<a href="#" id="recently-watched-page-left" class="paginate-watched btn-gray disabled" data-id="-1"><i class="fa fa-lg fa-chevron-left"></i></a>
|
||||
</li>
|
||||
<li>
|
||||
<a href="#" id="recently-watched-page-right" class="paginate btn-gray" data-id="-1"><i class="fa fa-lg fa-chevron-right"></i></a>
|
||||
<a href="#" id="recently-watched-page-right" class="paginate-watched btn-gray" data-id="+1"><i class="fa fa-lg fa-chevron-right"></i></a>
|
||||
</li>
|
||||
</ul>
|
||||
<div class="header-bar">
|
||||
|
@ -666,52 +666,14 @@ DOCUMENTATION :: END
|
|||
},
|
||||
complete: function(xhr, status) {
|
||||
$("#user-recently-watched").html(xhr.responseText);
|
||||
highlightWatchedScrollerButton();
|
||||
highlightScrollerButton("#recently-watched");
|
||||
paginateScroller("#recently-watched", ".paginate-watched");
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
recentlyWatched();
|
||||
|
||||
function highlightWatchedScrollerButton() {
|
||||
var scroller = $("#recently-watched-row-scroller");
|
||||
var numElems = scroller.find("li").length;
|
||||
scroller.width(numElems * 175);
|
||||
if (scroller.width() > $("#user-recently-watched").width()) {
|
||||
$("#recently-watched-page-right").removeClass("disabled");
|
||||
} else {
|
||||
$("#recently-watched-page-right").addClass("disabled");
|
||||
}
|
||||
}
|
||||
|
||||
$(window).resize(function() {
|
||||
highlightWatchedScrollerButton();
|
||||
});
|
||||
|
||||
var leftTotal = 0;
|
||||
$(".paginate").click(function (e) {
|
||||
e.preventDefault();
|
||||
var scroller = $("#recently-watched-row-scroller");
|
||||
var containerWidth = $("#user-recently-watched").width();
|
||||
var scrollAmount = $(this).data("id") * parseInt(containerWidth / 175) * 175;
|
||||
var leftMax = Math.min(-parseInt(scroller.width()) + Math.abs(scrollAmount), 0);
|
||||
|
||||
leftTotal = Math.max(Math.min(leftTotal + scrollAmount, 0), leftMax);
|
||||
scroller.animate({ left: leftTotal }, 250);
|
||||
|
||||
if (leftTotal == 0) {
|
||||
$("#recently-watched-page-left").addClass("disabled").blur();
|
||||
} else {
|
||||
$("#recently-watched-page-left").removeClass("disabled");
|
||||
}
|
||||
|
||||
if (leftTotal == leftMax) {
|
||||
$("#recently-watched-page-right").addClass("disabled").blur();
|
||||
} else {
|
||||
$("#recently-watched-page-right").removeClass("disabled");
|
||||
}
|
||||
});
|
||||
|
||||
$(document).ready(function () {
|
||||
// Javascript to enable link to tab
|
||||
var hash = document.location.hash;
|
||||
|
|
|
@ -31,7 +31,7 @@ DOCUMENTATION :: END
|
|||
from plexpy.helpers import page, short_season
|
||||
%>
|
||||
<div class="dashboard-recent-media-row">
|
||||
<div id="recently-watched-row-scroller" style="left: 0;">
|
||||
<div id="recently-watched-row-scroller">
|
||||
<ul class="dashboard-recent-media list-unstyled">
|
||||
% for item in data:
|
||||
<li>
|
||||
|
|
|
@ -135,7 +135,6 @@
|
|||
data-identifier="${config['pms_identifier']}"
|
||||
data-ip="${config['pms_ip']}"
|
||||
data-port="${config['pms_port']}"
|
||||
data-local="${int(not int(config['pms_is_remote']))}"
|
||||
data-ssl="${config['pms_ssl']}"
|
||||
data-is_cloud="${config['pms_is_cloud']}"
|
||||
data-label="${config['pms_name'] or 'Local'}"
|
||||
|
@ -151,7 +150,7 @@
|
|||
<div class="col-xs-3">
|
||||
<input type="text" class="form-control pms-settings" name="pms_port" id="pms_port" placeholder="32400" value="${config['pms_port']}" required>
|
||||
</div>
|
||||
<div class="col-xs-4">
|
||||
<div class="col-xs-9">
|
||||
<div class="checkbox">
|
||||
<label>
|
||||
<input type="checkbox" id="pms_ssl_checkbox" class="checkbox-toggle pms-settings" data-id="pms_ssl" value="1" ${helpers.checked(config['pms_ssl'])}> Use Secure Connection
|
||||
|
@ -159,14 +158,6 @@
|
|||
</label>
|
||||
</div>
|
||||
</div>
|
||||
<div class="col-xs-4">
|
||||
<div class="checkbox">
|
||||
<label>
|
||||
<input type="checkbox" id="pms_is_remote_checkbox" class="checkbox-toggle pms-settings" data-id="pms_is_remote" value="1" ${helpers.checked(config['pms_is_remote'])}> Remote Server
|
||||
<input type="hidden" id="pms_is_remote" name="pms_is_remote" value="${config['pms_is_remote']}">
|
||||
</label>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
<input type="hidden" id="pms_valid" data-validate="validatePMSip" value="">
|
||||
|
@ -391,7 +382,6 @@ $(document).ready(function() {
|
|||
return '<div data-identifier="' + item.clientIdentifier +
|
||||
'" data-ip="' + item.ip +
|
||||
'" data-port="' + item.port +
|
||||
'" data-local="' + item.local +
|
||||
'" data-ssl="' + item.httpsRequired +
|
||||
'" data-is_cloud="' + item.is_cloud +
|
||||
'" data-label="' + item.label + '">' +
|
||||
|
@ -405,7 +395,6 @@ $(document).ready(function() {
|
|||
return '<div data-identifier="' + item.clientIdentifier +
|
||||
'" data-ip="' + item.ip +
|
||||
'" data-port="' + item.port +
|
||||
'" data-local="' + item.local +
|
||||
'" data-ssl="' + item.httpsRequired +
|
||||
'" data-is_cloud="' + item.is_cloud +
|
||||
'" data-label="' + item.label + '">' +
|
||||
|
@ -428,7 +417,6 @@ $(document).ready(function() {
|
|||
var identifier = $(pms_ip_selected).data('identifier');
|
||||
var ip = $(pms_ip_selected).data('ip');
|
||||
var port = $(pms_ip_selected).data('port');
|
||||
var local = $(pms_ip_selected).data('local');
|
||||
var ssl = $(pms_ip_selected).data('ssl');
|
||||
var is_cloud = $(pms_ip_selected).data('is_cloud');
|
||||
var value = $(pms_ip_selected).data('value');
|
||||
|
@ -439,19 +427,15 @@ $(document).ready(function() {
|
|||
$("#pms_identifier").val(identifier !== 'undefined' ? identifier : '');
|
||||
$('#pms_ip').val(ip !== 'undefined' ? ip : value);
|
||||
$('#pms_port').val(port !== 'undefined' ? port : 32400);
|
||||
$('#pms_is_remote_checkbox').prop('checked', (local !== 'undefined' && local === 0));
|
||||
$('#pms_is_remote').val(local !== 'undefined' && local === 0 ? 1 : 0);
|
||||
$('#pms_ssl_checkbox').prop('checked', (ssl !== 'undefined' && ssl === 1));
|
||||
$('#pms_ssl').val(ssl !== 'undefined' && ssl === 1 ? 1 : 0);
|
||||
$('#pms_is_cloud').val(is_cloud !== 'undefined' && is_cloud === true ? 1 : 0);
|
||||
|
||||
if (is_cloud === true) {
|
||||
$('#pms_port').prop('readonly', true);
|
||||
$('#pms_is_remote_checkbox').prop('disabled', true);
|
||||
$('#pms_ssl_checkbox').prop('disabled', true);
|
||||
} else {
|
||||
$('#pms_port').prop('readonly', false);
|
||||
$('#pms_is_remote_checkbox').prop('disabled', false);
|
||||
$('#pms_ssl_checkbox').prop('disabled', false);
|
||||
}
|
||||
},
|
||||
|
@ -488,7 +472,6 @@ $(document).ready(function() {
|
|||
var pms_port = $("#pms_port").val().trim();
|
||||
var pms_identifier = $("#pms_identifier").val();
|
||||
var pms_ssl = $("#pms_ssl").val();
|
||||
var pms_is_remote = $("#pms_is_remote").val();
|
||||
if ((pms_ip !== '') || (pms_port !== '')) {
|
||||
$("#pms-verify-status").html('<i class="fa fa-refresh fa-spin"></i> Verifying server...');
|
||||
$('#pms-verify-status').fadeIn('fast');
|
||||
|
@ -498,8 +481,7 @@ $(document).ready(function() {
|
|||
hostname: pms_ip,
|
||||
port: pms_port,
|
||||
identifier: pms_identifier,
|
||||
ssl: pms_ssl,
|
||||
remote: pms_is_remote
|
||||
ssl: pms_ssl
|
||||
},
|
||||
cache: true,
|
||||
async: true,
|
||||
|
|
|
@ -1 +1 @@
|
|||
__path__ = __import__("pkgutil").extend_path(__path__, __name__)
|
||||
__path__ = __import__('pkgutil').extend_path(__path__, __name__) # type: ignore
|
||||
|
|
|
@ -1,4 +1,3 @@
|
|||
#!/usr/bin/env python3
|
||||
#-------------------------------------------------------------------
|
||||
# tarfile.py
|
||||
#-------------------------------------------------------------------
|
||||
|
@ -46,7 +45,6 @@ import time
|
|||
import struct
|
||||
import copy
|
||||
import re
|
||||
import warnings
|
||||
|
||||
from .compat.py38 import removesuffix
|
||||
|
||||
|
@ -639,6 +637,10 @@ class _FileInFile(object):
|
|||
def flush(self):
|
||||
pass
|
||||
|
||||
@property
|
||||
def mode(self):
|
||||
return 'rb'
|
||||
|
||||
def readable(self):
|
||||
return True
|
||||
|
||||
|
@ -875,7 +877,7 @@ class TarInfo(object):
|
|||
pax_headers = ('A dictionary containing key-value pairs of an '
|
||||
'associated pax extended header.'),
|
||||
sparse = 'Sparse member information.',
|
||||
tarfile = None,
|
||||
_tarfile = None,
|
||||
_sparse_structs = None,
|
||||
_link_target = None,
|
||||
)
|
||||
|
@ -904,6 +906,24 @@ class TarInfo(object):
|
|||
self.sparse = None # sparse member information
|
||||
self.pax_headers = {} # pax header information
|
||||
|
||||
@property
|
||||
def tarfile(self):
|
||||
import warnings
|
||||
warnings.warn(
|
||||
'The undocumented "tarfile" attribute of TarInfo objects '
|
||||
+ 'is deprecated and will be removed in Python 3.16',
|
||||
DeprecationWarning, stacklevel=2)
|
||||
return self._tarfile
|
||||
|
||||
@tarfile.setter
|
||||
def tarfile(self, tarfile):
|
||||
import warnings
|
||||
warnings.warn(
|
||||
'The undocumented "tarfile" attribute of TarInfo objects '
|
||||
+ 'is deprecated and will be removed in Python 3.16',
|
||||
DeprecationWarning, stacklevel=2)
|
||||
self._tarfile = tarfile
|
||||
|
||||
@property
|
||||
def path(self):
|
||||
'In pax headers, "name" is called "path".'
|
||||
|
@ -1198,7 +1218,7 @@ class TarInfo(object):
|
|||
for keyword, value in pax_headers.items():
|
||||
keyword = keyword.encode("utf-8")
|
||||
if binary:
|
||||
# Try to restore the original byte representation of `value'.
|
||||
# Try to restore the original byte representation of 'value'.
|
||||
# Needless to say, that the encoding must match the string.
|
||||
value = value.encode(encoding, "surrogateescape")
|
||||
else:
|
||||
|
@ -1643,14 +1663,14 @@ class TarFile(object):
|
|||
def __init__(self, name=None, mode="r", fileobj=None, format=None,
|
||||
tarinfo=None, dereference=None, ignore_zeros=None, encoding=None,
|
||||
errors="surrogateescape", pax_headers=None, debug=None,
|
||||
errorlevel=None, copybufsize=None):
|
||||
"""Open an (uncompressed) tar archive `name'. `mode' is either 'r' to
|
||||
errorlevel=None, copybufsize=None, stream=False):
|
||||
"""Open an (uncompressed) tar archive 'name'. 'mode' is either 'r' to
|
||||
read from an existing archive, 'a' to append data to an existing
|
||||
file or 'w' to create a new file overwriting an existing one. `mode'
|
||||
file or 'w' to create a new file overwriting an existing one. 'mode'
|
||||
defaults to 'r'.
|
||||
If `fileobj' is given, it is used for reading or writing data. If it
|
||||
can be determined, `mode' is overridden by `fileobj's mode.
|
||||
`fileobj' is not closed, when TarFile is closed.
|
||||
If 'fileobj' is given, it is used for reading or writing data. If it
|
||||
can be determined, 'mode' is overridden by 'fileobj's mode.
|
||||
'fileobj' is not closed, when TarFile is closed.
|
||||
"""
|
||||
modes = {"r": "rb", "a": "r+b", "w": "wb", "x": "xb"}
|
||||
if mode not in modes:
|
||||
|
@ -1675,6 +1695,8 @@ class TarFile(object):
|
|||
self.name = os.path.abspath(name) if name else None
|
||||
self.fileobj = fileobj
|
||||
|
||||
self.stream = stream
|
||||
|
||||
# Init attributes.
|
||||
if format is not None:
|
||||
self.format = format
|
||||
|
@ -1977,7 +1999,7 @@ class TarFile(object):
|
|||
self.fileobj.close()
|
||||
|
||||
def getmember(self, name):
|
||||
"""Return a TarInfo object for member ``name``. If ``name`` can not be
|
||||
"""Return a TarInfo object for member 'name'. If 'name' can not be
|
||||
found in the archive, KeyError is raised. If a member occurs more
|
||||
than once in the archive, its last occurrence is assumed to be the
|
||||
most up-to-date version.
|
||||
|
@ -2005,9 +2027,9 @@ class TarFile(object):
|
|||
|
||||
def gettarinfo(self, name=None, arcname=None, fileobj=None):
|
||||
"""Create a TarInfo object from the result of os.stat or equivalent
|
||||
on an existing file. The file is either named by ``name``, or
|
||||
specified as a file object ``fileobj`` with a file descriptor. If
|
||||
given, ``arcname`` specifies an alternative name for the file in the
|
||||
on an existing file. The file is either named by 'name', or
|
||||
specified as a file object 'fileobj' with a file descriptor. If
|
||||
given, 'arcname' specifies an alternative name for the file in the
|
||||
archive, otherwise, the name is taken from the 'name' attribute of
|
||||
'fileobj', or the 'name' argument. The name should be a text
|
||||
string.
|
||||
|
@ -2031,7 +2053,7 @@ class TarFile(object):
|
|||
# Now, fill the TarInfo object with
|
||||
# information specific for the file.
|
||||
tarinfo = self.tarinfo()
|
||||
tarinfo.tarfile = self # Not needed
|
||||
tarinfo._tarfile = self # To be removed in 3.16.
|
||||
|
||||
# Use os.stat or os.lstat, depending on if symlinks shall be resolved.
|
||||
if fileobj is None:
|
||||
|
@ -2103,11 +2125,15 @@ class TarFile(object):
|
|||
return tarinfo
|
||||
|
||||
def list(self, verbose=True, *, members=None):
|
||||
"""Print a table of contents to sys.stdout. If ``verbose`` is False, only
|
||||
the names of the members are printed. If it is True, an `ls -l'-like
|
||||
output is produced. ``members`` is optional and must be a subset of the
|
||||
"""Print a table of contents to sys.stdout. If 'verbose' is False, only
|
||||
the names of the members are printed. If it is True, an 'ls -l'-like
|
||||
output is produced. 'members' is optional and must be a subset of the
|
||||
list returned by getmembers().
|
||||
"""
|
||||
# Convert tarinfo type to stat type.
|
||||
type2mode = {REGTYPE: stat.S_IFREG, SYMTYPE: stat.S_IFLNK,
|
||||
FIFOTYPE: stat.S_IFIFO, CHRTYPE: stat.S_IFCHR,
|
||||
DIRTYPE: stat.S_IFDIR, BLKTYPE: stat.S_IFBLK}
|
||||
self._check()
|
||||
|
||||
if members is None:
|
||||
|
@ -2117,7 +2143,8 @@ class TarFile(object):
|
|||
if tarinfo.mode is None:
|
||||
_safe_print("??????????")
|
||||
else:
|
||||
_safe_print(stat.filemode(tarinfo.mode))
|
||||
modetype = type2mode.get(tarinfo.type, 0)
|
||||
_safe_print(stat.filemode(modetype | tarinfo.mode))
|
||||
_safe_print("%s/%s" % (tarinfo.uname or tarinfo.uid,
|
||||
tarinfo.gname or tarinfo.gid))
|
||||
if tarinfo.ischr() or tarinfo.isblk():
|
||||
|
@ -2141,11 +2168,11 @@ class TarFile(object):
|
|||
print()
|
||||
|
||||
def add(self, name, arcname=None, recursive=True, *, filter=None):
|
||||
"""Add the file ``name`` to the archive. ``name`` may be any type of file
|
||||
(directory, fifo, symbolic link, etc.). If given, ``arcname``
|
||||
"""Add the file 'name' to the archive. 'name' may be any type of file
|
||||
(directory, fifo, symbolic link, etc.). If given, 'arcname'
|
||||
specifies an alternative name for the file in the archive.
|
||||
Directories are added recursively by default. This can be avoided by
|
||||
setting ``recursive`` to False. ``filter`` is a function
|
||||
setting 'recursive' to False. 'filter' is a function
|
||||
that expects a TarInfo object argument and returns the changed
|
||||
TarInfo object, if it returns None the TarInfo object will be
|
||||
excluded from the archive.
|
||||
|
@ -2192,13 +2219,16 @@ class TarFile(object):
|
|||
self.addfile(tarinfo)
|
||||
|
||||
def addfile(self, tarinfo, fileobj=None):
|
||||
"""Add the TarInfo object ``tarinfo`` to the archive. If ``fileobj`` is
|
||||
given, it should be a binary file, and tarinfo.size bytes are read
|
||||
from it and added to the archive. You can create TarInfo objects
|
||||
directly, or by using gettarinfo().
|
||||
"""Add the TarInfo object 'tarinfo' to the archive. If 'tarinfo' represents
|
||||
a non zero-size regular file, the 'fileobj' argument should be a binary file,
|
||||
and tarinfo.size bytes are read from it and added to the archive.
|
||||
You can create TarInfo objects directly, or by using gettarinfo().
|
||||
"""
|
||||
self._check("awx")
|
||||
|
||||
if fileobj is None and tarinfo.isreg() and tarinfo.size != 0:
|
||||
raise ValueError("fileobj not provided for non zero-size regular file")
|
||||
|
||||
tarinfo = copy.copy(tarinfo)
|
||||
|
||||
buf = tarinfo.tobuf(self.format, self.encoding, self.errors)
|
||||
|
@ -2220,11 +2250,12 @@ class TarFile(object):
|
|||
if filter is None:
|
||||
filter = self.extraction_filter
|
||||
if filter is None:
|
||||
import warnings
|
||||
warnings.warn(
|
||||
'Python 3.14 will, by default, filter extracted tar '
|
||||
+ 'archives and reject files or modify their metadata. '
|
||||
+ 'Use the filter argument to control this behavior.',
|
||||
DeprecationWarning)
|
||||
DeprecationWarning, stacklevel=3)
|
||||
return fully_trusted_filter
|
||||
if isinstance(filter, str):
|
||||
raise TypeError(
|
||||
|
@ -2243,12 +2274,12 @@ class TarFile(object):
|
|||
filter=None):
|
||||
"""Extract all members from the archive to the current working
|
||||
directory and set owner, modification time and permissions on
|
||||
directories afterwards. `path' specifies a different directory
|
||||
to extract to. `members' is optional and must be a subset of the
|
||||
list returned by getmembers(). If `numeric_owner` is True, only
|
||||
directories afterwards. 'path' specifies a different directory
|
||||
to extract to. 'members' is optional and must be a subset of the
|
||||
list returned by getmembers(). If 'numeric_owner' is True, only
|
||||
the numbers for user/group names are used and not the names.
|
||||
|
||||
The `filter` function will be called on each member just
|
||||
The 'filter' function will be called on each member just
|
||||
before extraction.
|
||||
It can return a changed TarInfo or None to skip the member.
|
||||
String names of common filters are accepted.
|
||||
|
@ -2288,13 +2319,13 @@ class TarFile(object):
|
|||
filter=None):
|
||||
"""Extract a member from the archive to the current working directory,
|
||||
using its full name. Its file information is extracted as accurately
|
||||
as possible. `member' may be a filename or a TarInfo object. You can
|
||||
specify a different directory using `path'. File attributes (owner,
|
||||
mtime, mode) are set unless `set_attrs' is False. If `numeric_owner`
|
||||
as possible. 'member' may be a filename or a TarInfo object. You can
|
||||
specify a different directory using 'path'. File attributes (owner,
|
||||
mtime, mode) are set unless 'set_attrs' is False. If 'numeric_owner'
|
||||
is True, only the numbers for user/group names are used and not
|
||||
the names.
|
||||
|
||||
The `filter` function will be called before extraction.
|
||||
The 'filter' function will be called before extraction.
|
||||
It can return a changed TarInfo or None to skip the member.
|
||||
String names of common filters are accepted.
|
||||
"""
|
||||
|
@ -2359,10 +2390,10 @@ class TarFile(object):
|
|||
self._dbg(1, "tarfile: %s %s" % (type(e).__name__, e))
|
||||
|
||||
def extractfile(self, member):
|
||||
"""Extract a member from the archive as a file object. ``member`` may be
|
||||
a filename or a TarInfo object. If ``member`` is a regular file or
|
||||
"""Extract a member from the archive as a file object. 'member' may be
|
||||
a filename or a TarInfo object. If 'member' is a regular file or
|
||||
a link, an io.BufferedReader object is returned. For all other
|
||||
existing members, None is returned. If ``member`` does not appear
|
||||
existing members, None is returned. If 'member' does not appear
|
||||
in the archive, KeyError is raised.
|
||||
"""
|
||||
self._check("r")
|
||||
|
@ -2406,7 +2437,7 @@ class TarFile(object):
|
|||
if upperdirs and not os.path.exists(upperdirs):
|
||||
# Create directories that are not part of the archive with
|
||||
# default permissions.
|
||||
os.makedirs(upperdirs)
|
||||
os.makedirs(upperdirs, exist_ok=True)
|
||||
|
||||
if tarinfo.islnk() or tarinfo.issym():
|
||||
self._dbg(1, "%s -> %s" % (tarinfo.name, tarinfo.linkname))
|
||||
|
@ -2559,7 +2590,8 @@ class TarFile(object):
|
|||
os.lchown(targetpath, u, g)
|
||||
else:
|
||||
os.chown(targetpath, u, g)
|
||||
except OSError as e:
|
||||
except (OSError, OverflowError) as e:
|
||||
# OverflowError can be raised if an ID doesn't fit in 'id_t'
|
||||
raise ExtractError("could not change owner") from e
|
||||
|
||||
def chmod(self, tarinfo, targetpath):
|
||||
|
@ -2642,6 +2674,8 @@ class TarFile(object):
|
|||
break
|
||||
|
||||
if tarinfo is not None:
|
||||
# if streaming the file we do not want to cache the tarinfo
|
||||
if not self.stream:
|
||||
self.members.append(tarinfo)
|
||||
else:
|
||||
self._loaded = True
|
||||
|
@ -2693,8 +2727,9 @@ class TarFile(object):
|
|||
|
||||
def _load(self):
|
||||
"""Read through the entire archive file and look for readable
|
||||
members.
|
||||
members. This should not run if the file is set to stream.
|
||||
"""
|
||||
if not self.stream:
|
||||
while self.next() is not None:
|
||||
pass
|
||||
self._loaded = True
|
||||
|
|
|
@ -1,49 +0,0 @@
|
|||
__all__ = [
|
||||
"ZoneInfo",
|
||||
"reset_tzpath",
|
||||
"available_timezones",
|
||||
"TZPATH",
|
||||
"ZoneInfoNotFoundError",
|
||||
"InvalidTZPathWarning",
|
||||
]
|
||||
import sys
|
||||
|
||||
from . import _tzpath
|
||||
from ._common import ZoneInfoNotFoundError
|
||||
from ._version import __version__
|
||||
|
||||
try:
|
||||
from ._czoneinfo import ZoneInfo
|
||||
except ImportError: # pragma: nocover
|
||||
from ._zoneinfo import ZoneInfo
|
||||
|
||||
reset_tzpath = _tzpath.reset_tzpath
|
||||
available_timezones = _tzpath.available_timezones
|
||||
InvalidTZPathWarning = _tzpath.InvalidTZPathWarning
|
||||
|
||||
if sys.version_info < (3, 7):
|
||||
# Module-level __getattr__ was added in Python 3.7, so instead of lazily
|
||||
# populating TZPATH on every access, we will register a callback with
|
||||
# reset_tzpath to update the top-level tuple.
|
||||
TZPATH = _tzpath.TZPATH
|
||||
|
||||
def _tzpath_callback(new_tzpath):
|
||||
global TZPATH
|
||||
TZPATH = new_tzpath
|
||||
|
||||
_tzpath.TZPATH_CALLBACKS.append(_tzpath_callback)
|
||||
del _tzpath_callback
|
||||
|
||||
else:
|
||||
|
||||
def __getattr__(name):
|
||||
if name == "TZPATH":
|
||||
return _tzpath.TZPATH
|
||||
else:
|
||||
raise AttributeError(
|
||||
f"module {__name__!r} has no attribute {name!r}"
|
||||
)
|
||||
|
||||
|
||||
def __dir__():
|
||||
return sorted(list(globals()) + ["TZPATH"])
|
|
@ -1,45 +0,0 @@
|
|||
import os
|
||||
import typing
|
||||
from datetime import datetime, tzinfo
|
||||
from typing import (
|
||||
Any,
|
||||
Iterable,
|
||||
Optional,
|
||||
Protocol,
|
||||
Sequence,
|
||||
Set,
|
||||
Type,
|
||||
Union,
|
||||
)
|
||||
|
||||
_T = typing.TypeVar("_T", bound="ZoneInfo")
|
||||
|
||||
class _IOBytes(Protocol):
|
||||
def read(self, __size: int) -> bytes: ...
|
||||
def seek(self, __size: int, __whence: int = ...) -> Any: ...
|
||||
|
||||
class ZoneInfo(tzinfo):
|
||||
@property
|
||||
def key(self) -> str: ...
|
||||
def __init__(self, key: str) -> None: ...
|
||||
@classmethod
|
||||
def no_cache(cls: Type[_T], key: str) -> _T: ...
|
||||
@classmethod
|
||||
def from_file(
|
||||
cls: Type[_T], __fobj: _IOBytes, key: Optional[str] = ...
|
||||
) -> _T: ...
|
||||
@classmethod
|
||||
def clear_cache(cls, *, only_keys: Iterable[str] = ...) -> None: ...
|
||||
|
||||
# Note: Both here and in clear_cache, the types allow the use of `str` where
|
||||
# a sequence of strings is required. This should be remedied if a solution
|
||||
# to this typing bug is found: https://github.com/python/typing/issues/256
|
||||
def reset_tzpath(
|
||||
to: Optional[Sequence[Union[os.PathLike, str]]] = ...
|
||||
) -> None: ...
|
||||
def available_timezones() -> Set[str]: ...
|
||||
|
||||
TZPATH: Sequence[str]
|
||||
|
||||
class ZoneInfoNotFoundError(KeyError): ...
|
||||
class InvalidTZPathWarning(RuntimeWarning): ...
|
|
@ -1,171 +0,0 @@
|
|||
import struct
|
||||
|
||||
|
||||
def load_tzdata(key):
|
||||
try:
|
||||
import importlib.resources as importlib_resources
|
||||
except ImportError:
|
||||
import importlib_resources
|
||||
|
||||
components = key.split("/")
|
||||
package_name = ".".join(["tzdata.zoneinfo"] + components[:-1])
|
||||
resource_name = components[-1]
|
||||
|
||||
try:
|
||||
return importlib_resources.open_binary(package_name, resource_name)
|
||||
except (ImportError, FileNotFoundError, UnicodeEncodeError):
|
||||
# There are three types of exception that can be raised that all amount
|
||||
# to "we cannot find this key":
|
||||
#
|
||||
# ImportError: If package_name doesn't exist (e.g. if tzdata is not
|
||||
# installed, or if there's an error in the folder name like
|
||||
# Amrica/New_York)
|
||||
# FileNotFoundError: If resource_name doesn't exist in the package
|
||||
# (e.g. Europe/Krasnoy)
|
||||
# UnicodeEncodeError: If package_name or resource_name are not UTF-8,
|
||||
# such as keys containing a surrogate character.
|
||||
raise ZoneInfoNotFoundError(f"No time zone found with key {key}")
|
||||
|
||||
|
||||
def load_data(fobj):
|
||||
header = _TZifHeader.from_file(fobj)
|
||||
|
||||
if header.version == 1:
|
||||
time_size = 4
|
||||
time_type = "l"
|
||||
else:
|
||||
# Version 2+ has 64-bit integer transition times
|
||||
time_size = 8
|
||||
time_type = "q"
|
||||
|
||||
# Version 2+ also starts with a Version 1 header and data, which
|
||||
# we need to skip now
|
||||
skip_bytes = (
|
||||
header.timecnt * 5 # Transition times and types
|
||||
+ header.typecnt * 6 # Local time type records
|
||||
+ header.charcnt # Time zone designations
|
||||
+ header.leapcnt * 8 # Leap second records
|
||||
+ header.isstdcnt # Standard/wall indicators
|
||||
+ header.isutcnt # UT/local indicators
|
||||
)
|
||||
|
||||
fobj.seek(skip_bytes, 1)
|
||||
|
||||
# Now we need to read the second header, which is not the same
|
||||
# as the first
|
||||
header = _TZifHeader.from_file(fobj)
|
||||
|
||||
typecnt = header.typecnt
|
||||
timecnt = header.timecnt
|
||||
charcnt = header.charcnt
|
||||
|
||||
# The data portion starts with timecnt transitions and indices
|
||||
if timecnt:
|
||||
trans_list_utc = struct.unpack(
|
||||
f">{timecnt}{time_type}", fobj.read(timecnt * time_size)
|
||||
)
|
||||
trans_idx = struct.unpack(f">{timecnt}B", fobj.read(timecnt))
|
||||
else:
|
||||
trans_list_utc = ()
|
||||
trans_idx = ()
|
||||
|
||||
# Read the ttinfo struct, (utoff, isdst, abbrind)
|
||||
if typecnt:
|
||||
utcoff, isdst, abbrind = zip(
|
||||
*(struct.unpack(">lbb", fobj.read(6)) for i in range(typecnt))
|
||||
)
|
||||
else:
|
||||
utcoff = ()
|
||||
isdst = ()
|
||||
abbrind = ()
|
||||
|
||||
# Now read the abbreviations. They are null-terminated strings, indexed
|
||||
# not by position in the array but by position in the unsplit
|
||||
# abbreviation string. I suppose this makes more sense in C, which uses
|
||||
# null to terminate the strings, but it's inconvenient here...
|
||||
abbr_vals = {}
|
||||
abbr_chars = fobj.read(charcnt)
|
||||
|
||||
def get_abbr(idx):
|
||||
# Gets a string starting at idx and running until the next \x00
|
||||
#
|
||||
# We cannot pre-populate abbr_vals by splitting on \x00 because there
|
||||
# are some zones that use subsets of longer abbreviations, like so:
|
||||
#
|
||||
# LMT\x00AHST\x00HDT\x00
|
||||
#
|
||||
# Where the idx to abbr mapping should be:
|
||||
#
|
||||
# {0: "LMT", 4: "AHST", 5: "HST", 9: "HDT"}
|
||||
if idx not in abbr_vals:
|
||||
span_end = abbr_chars.find(b"\x00", idx)
|
||||
abbr_vals[idx] = abbr_chars[idx:span_end].decode()
|
||||
|
||||
return abbr_vals[idx]
|
||||
|
||||
abbr = tuple(get_abbr(idx) for idx in abbrind)
|
||||
|
||||
# The remainder of the file consists of leap seconds (currently unused) and
|
||||
# the standard/wall and ut/local indicators, which are metadata we don't need.
|
||||
# In version 2 files, we need to skip the unnecessary data to get at the TZ string:
|
||||
if header.version >= 2:
|
||||
# Each leap second record has size (time_size + 4)
|
||||
skip_bytes = header.isutcnt + header.isstdcnt + header.leapcnt * 12
|
||||
fobj.seek(skip_bytes, 1)
|
||||
|
||||
c = fobj.read(1) # Should be \n
|
||||
assert c == b"\n", c
|
||||
|
||||
tz_bytes = b""
|
||||
while True:
|
||||
c = fobj.read(1)
|
||||
if c == b"\n":
|
||||
break
|
||||
tz_bytes += c
|
||||
|
||||
tz_str = tz_bytes
|
||||
else:
|
||||
tz_str = None
|
||||
|
||||
return trans_idx, trans_list_utc, utcoff, isdst, abbr, tz_str
|
||||
|
||||
|
||||
class _TZifHeader:
|
||||
__slots__ = [
|
||||
"version",
|
||||
"isutcnt",
|
||||
"isstdcnt",
|
||||
"leapcnt",
|
||||
"timecnt",
|
||||
"typecnt",
|
||||
"charcnt",
|
||||
]
|
||||
|
||||
def __init__(self, *args):
|
||||
assert len(self.__slots__) == len(args)
|
||||
for attr, val in zip(self.__slots__, args):
|
||||
setattr(self, attr, val)
|
||||
|
||||
@classmethod
|
||||
def from_file(cls, stream):
|
||||
# The header starts with a 4-byte "magic" value
|
||||
if stream.read(4) != b"TZif":
|
||||
raise ValueError("Invalid TZif file: magic not found")
|
||||
|
||||
_version = stream.read(1)
|
||||
if _version == b"\x00":
|
||||
version = 1
|
||||
else:
|
||||
version = int(_version)
|
||||
stream.read(15)
|
||||
|
||||
args = (version,)
|
||||
|
||||
# Slots are defined in the order that the bytes are arranged
|
||||
args = args + struct.unpack(">6l", stream.read(24))
|
||||
|
||||
return cls(*args)
|
||||
|
||||
|
||||
class ZoneInfoNotFoundError(KeyError):
|
||||
"""Exception raised when a ZoneInfo key is not found."""
|
|
@ -1,207 +0,0 @@
|
|||
import os
|
||||
import sys
|
||||
|
||||
PY36 = sys.version_info < (3, 7)
|
||||
|
||||
|
||||
def reset_tzpath(to=None):
|
||||
global TZPATH
|
||||
|
||||
tzpaths = to
|
||||
if tzpaths is not None:
|
||||
if isinstance(tzpaths, (str, bytes)):
|
||||
raise TypeError(
|
||||
f"tzpaths must be a list or tuple, "
|
||||
+ f"not {type(tzpaths)}: {tzpaths!r}"
|
||||
)
|
||||
|
||||
if not all(map(os.path.isabs, tzpaths)):
|
||||
raise ValueError(_get_invalid_paths_message(tzpaths))
|
||||
base_tzpath = tzpaths
|
||||
else:
|
||||
env_var = os.environ.get("PYTHONTZPATH", None)
|
||||
if env_var is not None:
|
||||
base_tzpath = _parse_python_tzpath(env_var)
|
||||
elif sys.platform != "win32":
|
||||
base_tzpath = [
|
||||
"/usr/share/zoneinfo",
|
||||
"/usr/lib/zoneinfo",
|
||||
"/usr/share/lib/zoneinfo",
|
||||
"/etc/zoneinfo",
|
||||
]
|
||||
|
||||
base_tzpath.sort(key=lambda x: not os.path.exists(x))
|
||||
else:
|
||||
base_tzpath = ()
|
||||
|
||||
TZPATH = tuple(base_tzpath)
|
||||
|
||||
if TZPATH_CALLBACKS:
|
||||
for callback in TZPATH_CALLBACKS:
|
||||
callback(TZPATH)
|
||||
|
||||
|
||||
def _parse_python_tzpath(env_var):
|
||||
if not env_var:
|
||||
return ()
|
||||
|
||||
raw_tzpath = env_var.split(os.pathsep)
|
||||
new_tzpath = tuple(filter(os.path.isabs, raw_tzpath))
|
||||
|
||||
# If anything has been filtered out, we will warn about it
|
||||
if len(new_tzpath) != len(raw_tzpath):
|
||||
import warnings
|
||||
|
||||
msg = _get_invalid_paths_message(raw_tzpath)
|
||||
|
||||
warnings.warn(
|
||||
"Invalid paths specified in PYTHONTZPATH environment variable."
|
||||
+ msg,
|
||||
InvalidTZPathWarning,
|
||||
)
|
||||
|
||||
return new_tzpath
|
||||
|
||||
|
||||
def _get_invalid_paths_message(tzpaths):
|
||||
invalid_paths = (path for path in tzpaths if not os.path.isabs(path))
|
||||
|
||||
prefix = "\n "
|
||||
indented_str = prefix + prefix.join(invalid_paths)
|
||||
|
||||
return (
|
||||
"Paths should be absolute but found the following relative paths:"
|
||||
+ indented_str
|
||||
)
|
||||
|
||||
|
||||
if sys.version_info < (3, 8):
|
||||
|
||||
def _isfile(path):
|
||||
# bpo-33721: In Python 3.8 non-UTF8 paths return False rather than
|
||||
# raising an error. See https://bugs.python.org/issue33721
|
||||
try:
|
||||
return os.path.isfile(path)
|
||||
except ValueError:
|
||||
return False
|
||||
|
||||
|
||||
else:
|
||||
_isfile = os.path.isfile
|
||||
|
||||
|
||||
def find_tzfile(key):
|
||||
"""Retrieve the path to a TZif file from a key."""
|
||||
_validate_tzfile_path(key)
|
||||
for search_path in TZPATH:
|
||||
filepath = os.path.join(search_path, key)
|
||||
if _isfile(filepath):
|
||||
return filepath
|
||||
|
||||
return None
|
||||
|
||||
|
||||
_TEST_PATH = os.path.normpath(os.path.join("_", "_"))[:-1]
|
||||
|
||||
|
||||
def _validate_tzfile_path(path, _base=_TEST_PATH):
|
||||
if os.path.isabs(path):
|
||||
raise ValueError(
|
||||
f"ZoneInfo keys may not be absolute paths, got: {path}"
|
||||
)
|
||||
|
||||
# We only care about the kinds of path normalizations that would change the
|
||||
# length of the key - e.g. a/../b -> a/b, or a/b/ -> a/b. On Windows,
|
||||
# normpath will also change from a/b to a\b, but that would still preserve
|
||||
# the length.
|
||||
new_path = os.path.normpath(path)
|
||||
if len(new_path) != len(path):
|
||||
raise ValueError(
|
||||
f"ZoneInfo keys must be normalized relative paths, got: {path}"
|
||||
)
|
||||
|
||||
resolved = os.path.normpath(os.path.join(_base, new_path))
|
||||
if not resolved.startswith(_base):
|
||||
raise ValueError(
|
||||
f"ZoneInfo keys must refer to subdirectories of TZPATH, got: {path}"
|
||||
)
|
||||
|
||||
|
||||
del _TEST_PATH
|
||||
|
||||
|
||||
def available_timezones():
|
||||
"""Returns a set containing all available time zones.
|
||||
|
||||
.. caution::
|
||||
|
||||
This may attempt to open a large number of files, since the best way to
|
||||
determine if a given file on the time zone search path is to open it
|
||||
and check for the "magic string" at the beginning.
|
||||
"""
|
||||
try:
|
||||
from importlib import resources
|
||||
except ImportError:
|
||||
import importlib_resources as resources
|
||||
|
||||
valid_zones = set()
|
||||
|
||||
# Start with loading from the tzdata package if it exists: this has a
|
||||
# pre-assembled list of zones that only requires opening one file.
|
||||
try:
|
||||
with resources.open_text("tzdata", "zones") as f:
|
||||
for zone in f:
|
||||
zone = zone.strip()
|
||||
if zone:
|
||||
valid_zones.add(zone)
|
||||
except (ImportError, FileNotFoundError):
|
||||
pass
|
||||
|
||||
def valid_key(fpath):
|
||||
try:
|
||||
with open(fpath, "rb") as f:
|
||||
return f.read(4) == b"TZif"
|
||||
except Exception: # pragma: nocover
|
||||
return False
|
||||
|
||||
for tz_root in TZPATH:
|
||||
if not os.path.exists(tz_root):
|
||||
continue
|
||||
|
||||
for root, dirnames, files in os.walk(tz_root):
|
||||
if root == tz_root:
|
||||
# right/ and posix/ are special directories and shouldn't be
|
||||
# included in the output of available zones
|
||||
if "right" in dirnames:
|
||||
dirnames.remove("right")
|
||||
if "posix" in dirnames:
|
||||
dirnames.remove("posix")
|
||||
|
||||
for file in files:
|
||||
fpath = os.path.join(root, file)
|
||||
|
||||
key = os.path.relpath(fpath, start=tz_root)
|
||||
if os.sep != "/": # pragma: nocover
|
||||
key = key.replace(os.sep, "/")
|
||||
|
||||
if not key or key in valid_zones:
|
||||
continue
|
||||
|
||||
if valid_key(fpath):
|
||||
valid_zones.add(key)
|
||||
|
||||
if "posixrules" in valid_zones:
|
||||
# posixrules is a special symlink-only time zone where it exists, it
|
||||
# should not be included in the output
|
||||
valid_zones.remove("posixrules")
|
||||
|
||||
return valid_zones
|
||||
|
||||
|
||||
class InvalidTZPathWarning(RuntimeWarning):
|
||||
"""Warning raised if an invalid path is specified in PYTHONTZPATH."""
|
||||
|
||||
|
||||
TZPATH = ()
|
||||
TZPATH_CALLBACKS = []
|
||||
reset_tzpath()
|
|
@ -1 +0,0 @@
|
|||
__version__ = "0.2.1"
|
|
@ -1,754 +0,0 @@
|
|||
import bisect
|
||||
import calendar
|
||||
import collections
|
||||
import functools
|
||||
import re
|
||||
import weakref
|
||||
from datetime import datetime, timedelta, tzinfo
|
||||
|
||||
from . import _common, _tzpath
|
||||
|
||||
EPOCH = datetime(1970, 1, 1)
|
||||
EPOCHORDINAL = datetime(1970, 1, 1).toordinal()
|
||||
|
||||
# It is relatively expensive to construct new timedelta objects, and in most
|
||||
# cases we're looking at the same deltas, like integer numbers of hours, etc.
|
||||
# To improve speed and memory use, we'll keep a dictionary with references
|
||||
# to the ones we've already used so far.
|
||||
#
|
||||
# Loading every time zone in the 2020a version of the time zone database
|
||||
# requires 447 timedeltas, which requires approximately the amount of space
|
||||
# that ZoneInfo("America/New_York") with 236 transitions takes up, so we will
|
||||
# set the cache size to 512 so that in the common case we always get cache
|
||||
# hits, but specifically crafted ZoneInfo objects don't leak arbitrary amounts
|
||||
# of memory.
|
||||
@functools.lru_cache(maxsize=512)
|
||||
def _load_timedelta(seconds):
|
||||
return timedelta(seconds=seconds)
|
||||
|
||||
|
||||
class ZoneInfo(tzinfo):
|
||||
_strong_cache_size = 8
|
||||
_strong_cache = collections.OrderedDict()
|
||||
_weak_cache = weakref.WeakValueDictionary()
|
||||
__module__ = "backports.zoneinfo"
|
||||
|
||||
def __init_subclass__(cls):
|
||||
cls._strong_cache = collections.OrderedDict()
|
||||
cls._weak_cache = weakref.WeakValueDictionary()
|
||||
|
||||
def __new__(cls, key):
|
||||
instance = cls._weak_cache.get(key, None)
|
||||
if instance is None:
|
||||
instance = cls._weak_cache.setdefault(key, cls._new_instance(key))
|
||||
instance._from_cache = True
|
||||
|
||||
# Update the "strong" cache
|
||||
cls._strong_cache[key] = cls._strong_cache.pop(key, instance)
|
||||
|
||||
if len(cls._strong_cache) > cls._strong_cache_size:
|
||||
cls._strong_cache.popitem(last=False)
|
||||
|
||||
return instance
|
||||
|
||||
@classmethod
|
||||
def no_cache(cls, key):
|
||||
obj = cls._new_instance(key)
|
||||
obj._from_cache = False
|
||||
|
||||
return obj
|
||||
|
||||
@classmethod
|
||||
def _new_instance(cls, key):
|
||||
obj = super().__new__(cls)
|
||||
obj._key = key
|
||||
obj._file_path = obj._find_tzfile(key)
|
||||
|
||||
if obj._file_path is not None:
|
||||
file_obj = open(obj._file_path, "rb")
|
||||
else:
|
||||
file_obj = _common.load_tzdata(key)
|
||||
|
||||
with file_obj as f:
|
||||
obj._load_file(f)
|
||||
|
||||
return obj
|
||||
|
||||
@classmethod
|
||||
def from_file(cls, fobj, key=None):
|
||||
obj = super().__new__(cls)
|
||||
obj._key = key
|
||||
obj._file_path = None
|
||||
obj._load_file(fobj)
|
||||
obj._file_repr = repr(fobj)
|
||||
|
||||
# Disable pickling for objects created from files
|
||||
obj.__reduce__ = obj._file_reduce
|
||||
|
||||
return obj
|
||||
|
||||
@classmethod
|
||||
def clear_cache(cls, *, only_keys=None):
|
||||
if only_keys is not None:
|
||||
for key in only_keys:
|
||||
cls._weak_cache.pop(key, None)
|
||||
cls._strong_cache.pop(key, None)
|
||||
|
||||
else:
|
||||
cls._weak_cache.clear()
|
||||
cls._strong_cache.clear()
|
||||
|
||||
@property
|
||||
def key(self):
|
||||
return self._key
|
||||
|
||||
def utcoffset(self, dt):
|
||||
return self._find_trans(dt).utcoff
|
||||
|
||||
def dst(self, dt):
|
||||
return self._find_trans(dt).dstoff
|
||||
|
||||
def tzname(self, dt):
|
||||
return self._find_trans(dt).tzname
|
||||
|
||||
def fromutc(self, dt):
|
||||
"""Convert from datetime in UTC to datetime in local time"""
|
||||
|
||||
if not isinstance(dt, datetime):
|
||||
raise TypeError("fromutc() requires a datetime argument")
|
||||
if dt.tzinfo is not self:
|
||||
raise ValueError("dt.tzinfo is not self")
|
||||
|
||||
timestamp = self._get_local_timestamp(dt)
|
||||
num_trans = len(self._trans_utc)
|
||||
|
||||
if num_trans >= 1 and timestamp < self._trans_utc[0]:
|
||||
tti = self._tti_before
|
||||
fold = 0
|
||||
elif (
|
||||
num_trans == 0 or timestamp > self._trans_utc[-1]
|
||||
) and not isinstance(self._tz_after, _ttinfo):
|
||||
tti, fold = self._tz_after.get_trans_info_fromutc(
|
||||
timestamp, dt.year
|
||||
)
|
||||
elif num_trans == 0:
|
||||
tti = self._tz_after
|
||||
fold = 0
|
||||
else:
|
||||
idx = bisect.bisect_right(self._trans_utc, timestamp)
|
||||
|
||||
if num_trans > 1 and timestamp >= self._trans_utc[1]:
|
||||
tti_prev, tti = self._ttinfos[idx - 2 : idx]
|
||||
elif timestamp > self._trans_utc[-1]:
|
||||
tti_prev = self._ttinfos[-1]
|
||||
tti = self._tz_after
|
||||
else:
|
||||
tti_prev = self._tti_before
|
||||
tti = self._ttinfos[0]
|
||||
|
||||
# Detect fold
|
||||
shift = tti_prev.utcoff - tti.utcoff
|
||||
fold = shift.total_seconds() > timestamp - self._trans_utc[idx - 1]
|
||||
dt += tti.utcoff
|
||||
if fold:
|
||||
return dt.replace(fold=1)
|
||||
else:
|
||||
return dt
|
||||
|
||||
def _find_trans(self, dt):
|
||||
if dt is None:
|
||||
if self._fixed_offset:
|
||||
return self._tz_after
|
||||
else:
|
||||
return _NO_TTINFO
|
||||
|
||||
ts = self._get_local_timestamp(dt)
|
||||
|
||||
lt = self._trans_local[dt.fold]
|
||||
|
||||
num_trans = len(lt)
|
||||
|
||||
if num_trans and ts < lt[0]:
|
||||
return self._tti_before
|
||||
elif not num_trans or ts > lt[-1]:
|
||||
if isinstance(self._tz_after, _TZStr):
|
||||
return self._tz_after.get_trans_info(ts, dt.year, dt.fold)
|
||||
else:
|
||||
return self._tz_after
|
||||
else:
|
||||
# idx is the transition that occurs after this timestamp, so we
|
||||
# subtract off 1 to get the current ttinfo
|
||||
idx = bisect.bisect_right(lt, ts) - 1
|
||||
assert idx >= 0
|
||||
return self._ttinfos[idx]
|
||||
|
||||
def _get_local_timestamp(self, dt):
|
||||
return (
|
||||
(dt.toordinal() - EPOCHORDINAL) * 86400
|
||||
+ dt.hour * 3600
|
||||
+ dt.minute * 60
|
||||
+ dt.second
|
||||
)
|
||||
|
||||
def __str__(self):
|
||||
if self._key is not None:
|
||||
return f"{self._key}"
|
||||
else:
|
||||
return repr(self)
|
||||
|
||||
def __repr__(self):
|
||||
if self._key is not None:
|
||||
return f"{self.__class__.__name__}(key={self._key!r})"
|
||||
else:
|
||||
return f"{self.__class__.__name__}.from_file({self._file_repr})"
|
||||
|
||||
def __reduce__(self):
|
||||
return (self.__class__._unpickle, (self._key, self._from_cache))
|
||||
|
||||
def _file_reduce(self):
|
||||
import pickle
|
||||
|
||||
raise pickle.PicklingError(
|
||||
"Cannot pickle a ZoneInfo file created from a file stream."
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def _unpickle(cls, key, from_cache):
|
||||
if from_cache:
|
||||
return cls(key)
|
||||
else:
|
||||
return cls.no_cache(key)
|
||||
|
||||
def _find_tzfile(self, key):
|
||||
return _tzpath.find_tzfile(key)
|
||||
|
||||
def _load_file(self, fobj):
|
||||
# Retrieve all the data as it exists in the zoneinfo file
|
||||
trans_idx, trans_utc, utcoff, isdst, abbr, tz_str = _common.load_data(
|
||||
fobj
|
||||
)
|
||||
|
||||
# Infer the DST offsets (needed for .dst()) from the data
|
||||
dstoff = self._utcoff_to_dstoff(trans_idx, utcoff, isdst)
|
||||
|
||||
# Convert all the transition times (UTC) into "seconds since 1970-01-01 local time"
|
||||
trans_local = self._ts_to_local(trans_idx, trans_utc, utcoff)
|
||||
|
||||
# Construct `_ttinfo` objects for each transition in the file
|
||||
_ttinfo_list = [
|
||||
_ttinfo(
|
||||
_load_timedelta(utcoffset), _load_timedelta(dstoffset), tzname
|
||||
)
|
||||
for utcoffset, dstoffset, tzname in zip(utcoff, dstoff, abbr)
|
||||
]
|
||||
|
||||
self._trans_utc = trans_utc
|
||||
self._trans_local = trans_local
|
||||
self._ttinfos = [_ttinfo_list[idx] for idx in trans_idx]
|
||||
|
||||
# Find the first non-DST transition
|
||||
for i in range(len(isdst)):
|
||||
if not isdst[i]:
|
||||
self._tti_before = _ttinfo_list[i]
|
||||
break
|
||||
else:
|
||||
if self._ttinfos:
|
||||
self._tti_before = self._ttinfos[0]
|
||||
else:
|
||||
self._tti_before = None
|
||||
|
||||
# Set the "fallback" time zone
|
||||
if tz_str is not None and tz_str != b"":
|
||||
self._tz_after = _parse_tz_str(tz_str.decode())
|
||||
else:
|
||||
if not self._ttinfos and not _ttinfo_list:
|
||||
raise ValueError("No time zone information found.")
|
||||
|
||||
if self._ttinfos:
|
||||
self._tz_after = self._ttinfos[-1]
|
||||
else:
|
||||
self._tz_after = _ttinfo_list[-1]
|
||||
|
||||
# Determine if this is a "fixed offset" zone, meaning that the output
|
||||
# of the utcoffset, dst and tzname functions does not depend on the
|
||||
# specific datetime passed.
|
||||
#
|
||||
# We make three simplifying assumptions here:
|
||||
#
|
||||
# 1. If _tz_after is not a _ttinfo, it has transitions that might
|
||||
# actually occur (it is possible to construct TZ strings that
|
||||
# specify STD and DST but no transitions ever occur, such as
|
||||
# AAA0BBB,0/0,J365/25).
|
||||
# 2. If _ttinfo_list contains more than one _ttinfo object, the objects
|
||||
# represent different offsets.
|
||||
# 3. _ttinfo_list contains no unused _ttinfos (in which case an
|
||||
# otherwise fixed-offset zone with extra _ttinfos defined may
|
||||
# appear to *not* be a fixed offset zone).
|
||||
#
|
||||
# Violations to these assumptions would be fairly exotic, and exotic
|
||||
# zones should almost certainly not be used with datetime.time (the
|
||||
# only thing that would be affected by this).
|
||||
if len(_ttinfo_list) > 1 or not isinstance(self._tz_after, _ttinfo):
|
||||
self._fixed_offset = False
|
||||
elif not _ttinfo_list:
|
||||
self._fixed_offset = True
|
||||
else:
|
||||
self._fixed_offset = _ttinfo_list[0] == self._tz_after
|
||||
|
||||
@staticmethod
|
||||
def _utcoff_to_dstoff(trans_idx, utcoffsets, isdsts):
|
||||
# Now we must transform our ttis and abbrs into `_ttinfo` objects,
|
||||
# but there is an issue: .dst() must return a timedelta with the
|
||||
# difference between utcoffset() and the "standard" offset, but
|
||||
# the "base offset" and "DST offset" are not encoded in the file;
|
||||
# we can infer what they are from the isdst flag, but it is not
|
||||
# sufficient to to just look at the last standard offset, because
|
||||
# occasionally countries will shift both DST offset and base offset.
|
||||
|
||||
typecnt = len(isdsts)
|
||||
dstoffs = [0] * typecnt # Provisionally assign all to 0.
|
||||
dst_cnt = sum(isdsts)
|
||||
dst_found = 0
|
||||
|
||||
for i in range(1, len(trans_idx)):
|
||||
if dst_cnt == dst_found:
|
||||
break
|
||||
|
||||
idx = trans_idx[i]
|
||||
|
||||
dst = isdsts[idx]
|
||||
|
||||
# We're only going to look at daylight saving time
|
||||
if not dst:
|
||||
continue
|
||||
|
||||
# Skip any offsets that have already been assigned
|
||||
if dstoffs[idx] != 0:
|
||||
continue
|
||||
|
||||
dstoff = 0
|
||||
utcoff = utcoffsets[idx]
|
||||
|
||||
comp_idx = trans_idx[i - 1]
|
||||
|
||||
if not isdsts[comp_idx]:
|
||||
dstoff = utcoff - utcoffsets[comp_idx]
|
||||
|
||||
if not dstoff and idx < (typecnt - 1):
|
||||
comp_idx = trans_idx[i + 1]
|
||||
|
||||
# If the following transition is also DST and we couldn't
|
||||
# find the DST offset by this point, we're going ot have to
|
||||
# skip it and hope this transition gets assigned later
|
||||
if isdsts[comp_idx]:
|
||||
continue
|
||||
|
||||
dstoff = utcoff - utcoffsets[comp_idx]
|
||||
|
||||
if dstoff:
|
||||
dst_found += 1
|
||||
dstoffs[idx] = dstoff
|
||||
else:
|
||||
# If we didn't find a valid value for a given index, we'll end up
|
||||
# with dstoff = 0 for something where `isdst=1`. This is obviously
|
||||
# wrong - one hour will be a much better guess than 0
|
||||
for idx in range(typecnt):
|
||||
if not dstoffs[idx] and isdsts[idx]:
|
||||
dstoffs[idx] = 3600
|
||||
|
||||
return dstoffs
|
||||
|
||||
@staticmethod
|
||||
def _ts_to_local(trans_idx, trans_list_utc, utcoffsets):
|
||||
"""Generate number of seconds since 1970 *in the local time*.
|
||||
|
||||
This is necessary to easily find the transition times in local time"""
|
||||
if not trans_list_utc:
|
||||
return [[], []]
|
||||
|
||||
# Start with the timestamps and modify in-place
|
||||
trans_list_wall = [list(trans_list_utc), list(trans_list_utc)]
|
||||
|
||||
if len(utcoffsets) > 1:
|
||||
offset_0 = utcoffsets[0]
|
||||
offset_1 = utcoffsets[trans_idx[0]]
|
||||
if offset_1 > offset_0:
|
||||
offset_1, offset_0 = offset_0, offset_1
|
||||
else:
|
||||
offset_0 = offset_1 = utcoffsets[0]
|
||||
|
||||
trans_list_wall[0][0] += offset_0
|
||||
trans_list_wall[1][0] += offset_1
|
||||
|
||||
for i in range(1, len(trans_idx)):
|
||||
offset_0 = utcoffsets[trans_idx[i - 1]]
|
||||
offset_1 = utcoffsets[trans_idx[i]]
|
||||
|
||||
if offset_1 > offset_0:
|
||||
offset_1, offset_0 = offset_0, offset_1
|
||||
|
||||
trans_list_wall[0][i] += offset_0
|
||||
trans_list_wall[1][i] += offset_1
|
||||
|
||||
return trans_list_wall
|
||||
|
||||
|
||||
class _ttinfo:
|
||||
__slots__ = ["utcoff", "dstoff", "tzname"]
|
||||
|
||||
def __init__(self, utcoff, dstoff, tzname):
|
||||
self.utcoff = utcoff
|
||||
self.dstoff = dstoff
|
||||
self.tzname = tzname
|
||||
|
||||
def __eq__(self, other):
|
||||
return (
|
||||
self.utcoff == other.utcoff
|
||||
and self.dstoff == other.dstoff
|
||||
and self.tzname == other.tzname
|
||||
)
|
||||
|
||||
def __repr__(self): # pragma: nocover
|
||||
return (
|
||||
f"{self.__class__.__name__}"
|
||||
+ f"({self.utcoff}, {self.dstoff}, {self.tzname})"
|
||||
)
|
||||
|
||||
|
||||
_NO_TTINFO = _ttinfo(None, None, None)
|
||||
|
||||
|
||||
class _TZStr:
|
||||
__slots__ = (
|
||||
"std",
|
||||
"dst",
|
||||
"start",
|
||||
"end",
|
||||
"get_trans_info",
|
||||
"get_trans_info_fromutc",
|
||||
"dst_diff",
|
||||
)
|
||||
|
||||
def __init__(
|
||||
self, std_abbr, std_offset, dst_abbr, dst_offset, start=None, end=None
|
||||
):
|
||||
self.dst_diff = dst_offset - std_offset
|
||||
std_offset = _load_timedelta(std_offset)
|
||||
self.std = _ttinfo(
|
||||
utcoff=std_offset, dstoff=_load_timedelta(0), tzname=std_abbr
|
||||
)
|
||||
|
||||
self.start = start
|
||||
self.end = end
|
||||
|
||||
dst_offset = _load_timedelta(dst_offset)
|
||||
delta = _load_timedelta(self.dst_diff)
|
||||
self.dst = _ttinfo(utcoff=dst_offset, dstoff=delta, tzname=dst_abbr)
|
||||
|
||||
# These are assertions because the constructor should only be called
|
||||
# by functions that would fail before passing start or end
|
||||
assert start is not None, "No transition start specified"
|
||||
assert end is not None, "No transition end specified"
|
||||
|
||||
self.get_trans_info = self._get_trans_info
|
||||
self.get_trans_info_fromutc = self._get_trans_info_fromutc
|
||||
|
||||
def transitions(self, year):
|
||||
start = self.start.year_to_epoch(year)
|
||||
end = self.end.year_to_epoch(year)
|
||||
return start, end
|
||||
|
||||
def _get_trans_info(self, ts, year, fold):
|
||||
"""Get the information about the current transition - tti"""
|
||||
start, end = self.transitions(year)
|
||||
|
||||
# With fold = 0, the period (denominated in local time) with the
|
||||
# smaller offset starts at the end of the gap and ends at the end of
|
||||
# the fold; with fold = 1, it runs from the start of the gap to the
|
||||
# beginning of the fold.
|
||||
#
|
||||
# So in order to determine the DST boundaries we need to know both
|
||||
# the fold and whether DST is positive or negative (rare), and it
|
||||
# turns out that this boils down to fold XOR is_positive.
|
||||
if fold == (self.dst_diff >= 0):
|
||||
end -= self.dst_diff
|
||||
else:
|
||||
start += self.dst_diff
|
||||
|
||||
if start < end:
|
||||
isdst = start <= ts < end
|
||||
else:
|
||||
isdst = not (end <= ts < start)
|
||||
|
||||
return self.dst if isdst else self.std
|
||||
|
||||
def _get_trans_info_fromutc(self, ts, year):
|
||||
start, end = self.transitions(year)
|
||||
start -= self.std.utcoff.total_seconds()
|
||||
end -= self.dst.utcoff.total_seconds()
|
||||
|
||||
if start < end:
|
||||
isdst = start <= ts < end
|
||||
else:
|
||||
isdst = not (end <= ts < start)
|
||||
|
||||
# For positive DST, the ambiguous period is one dst_diff after the end
|
||||
# of DST; for negative DST, the ambiguous period is one dst_diff before
|
||||
# the start of DST.
|
||||
if self.dst_diff > 0:
|
||||
ambig_start = end
|
||||
ambig_end = end + self.dst_diff
|
||||
else:
|
||||
ambig_start = start
|
||||
ambig_end = start - self.dst_diff
|
||||
|
||||
fold = ambig_start <= ts < ambig_end
|
||||
|
||||
return (self.dst if isdst else self.std, fold)
|
||||
|
||||
|
||||
def _post_epoch_days_before_year(year):
|
||||
"""Get the number of days between 1970-01-01 and YEAR-01-01"""
|
||||
y = year - 1
|
||||
return y * 365 + y // 4 - y // 100 + y // 400 - EPOCHORDINAL
|
||||
|
||||
|
||||
class _DayOffset:
|
||||
__slots__ = ["d", "julian", "hour", "minute", "second"]
|
||||
|
||||
def __init__(self, d, julian, hour=2, minute=0, second=0):
|
||||
if not (0 + julian) <= d <= 365:
|
||||
min_day = 0 + julian
|
||||
raise ValueError(f"d must be in [{min_day}, 365], not: {d}")
|
||||
|
||||
self.d = d
|
||||
self.julian = julian
|
||||
self.hour = hour
|
||||
self.minute = minute
|
||||
self.second = second
|
||||
|
||||
def year_to_epoch(self, year):
|
||||
days_before_year = _post_epoch_days_before_year(year)
|
||||
|
||||
d = self.d
|
||||
if self.julian and d >= 59 and calendar.isleap(year):
|
||||
d += 1
|
||||
|
||||
epoch = (days_before_year + d) * 86400
|
||||
epoch += self.hour * 3600 + self.minute * 60 + self.second
|
||||
|
||||
return epoch
|
||||
|
||||
|
||||
class _CalendarOffset:
|
||||
__slots__ = ["m", "w", "d", "hour", "minute", "second"]
|
||||
|
||||
_DAYS_BEFORE_MONTH = (
|
||||
-1,
|
||||
0,
|
||||
31,
|
||||
59,
|
||||
90,
|
||||
120,
|
||||
151,
|
||||
181,
|
||||
212,
|
||||
243,
|
||||
273,
|
||||
304,
|
||||
334,
|
||||
)
|
||||
|
||||
def __init__(self, m, w, d, hour=2, minute=0, second=0):
|
||||
if not 0 < m <= 12:
|
||||
raise ValueError("m must be in (0, 12]")
|
||||
|
||||
if not 0 < w <= 5:
|
||||
raise ValueError("w must be in (0, 5]")
|
||||
|
||||
if not 0 <= d <= 6:
|
||||
raise ValueError("d must be in [0, 6]")
|
||||
|
||||
self.m = m
|
||||
self.w = w
|
||||
self.d = d
|
||||
self.hour = hour
|
||||
self.minute = minute
|
||||
self.second = second
|
||||
|
||||
@classmethod
|
||||
def _ymd2ord(cls, year, month, day):
|
||||
return (
|
||||
_post_epoch_days_before_year(year)
|
||||
+ cls._DAYS_BEFORE_MONTH[month]
|
||||
+ (month > 2 and calendar.isleap(year))
|
||||
+ day
|
||||
)
|
||||
|
||||
# TODO: These are not actually epoch dates as they are expressed in local time
|
||||
def year_to_epoch(self, year):
|
||||
"""Calculates the datetime of the occurrence from the year"""
|
||||
# We know year and month, we need to convert w, d into day of month
|
||||
#
|
||||
# Week 1 is the first week in which day `d` (where 0 = Sunday) appears.
|
||||
# Week 5 represents the last occurrence of day `d`, so we need to know
|
||||
# the range of the month.
|
||||
first_day, days_in_month = calendar.monthrange(year, self.m)
|
||||
|
||||
# This equation seems magical, so I'll break it down:
|
||||
# 1. calendar says 0 = Monday, POSIX says 0 = Sunday
|
||||
# so we need first_day + 1 to get 1 = Monday -> 7 = Sunday,
|
||||
# which is still equivalent because this math is mod 7
|
||||
# 2. Get first day - desired day mod 7: -1 % 7 = 6, so we don't need
|
||||
# to do anything to adjust negative numbers.
|
||||
# 3. Add 1 because month days are a 1-based index.
|
||||
month_day = (self.d - (first_day + 1)) % 7 + 1
|
||||
|
||||
# Now use a 0-based index version of `w` to calculate the w-th
|
||||
# occurrence of `d`
|
||||
month_day += (self.w - 1) * 7
|
||||
|
||||
# month_day will only be > days_in_month if w was 5, and `w` means
|
||||
# "last occurrence of `d`", so now we just check if we over-shot the
|
||||
# end of the month and if so knock off 1 week.
|
||||
if month_day > days_in_month:
|
||||
month_day -= 7
|
||||
|
||||
ordinal = self._ymd2ord(year, self.m, month_day)
|
||||
epoch = ordinal * 86400
|
||||
epoch += self.hour * 3600 + self.minute * 60 + self.second
|
||||
return epoch
|
||||
|
||||
|
||||
def _parse_tz_str(tz_str):
|
||||
# The tz string has the format:
|
||||
#
|
||||
# std[offset[dst[offset],start[/time],end[/time]]]
|
||||
#
|
||||
# std and dst must be 3 or more characters long and must not contain
|
||||
# a leading colon, embedded digits, commas, nor a plus or minus signs;
|
||||
# The spaces between "std" and "offset" are only for display and are
|
||||
# not actually present in the string.
|
||||
#
|
||||
# The format of the offset is ``[+|-]hh[:mm[:ss]]``
|
||||
|
||||
offset_str, *start_end_str = tz_str.split(",", 1)
|
||||
|
||||
# fmt: off
|
||||
parser_re = re.compile(
|
||||
r"(?P<std>[^<0-9:.+-]+|<[a-zA-Z0-9+\-]+>)" +
|
||||
r"((?P<stdoff>[+-]?\d{1,2}(:\d{2}(:\d{2})?)?)" +
|
||||
r"((?P<dst>[^0-9:.+-]+|<[a-zA-Z0-9+\-]+>)" +
|
||||
r"((?P<dstoff>[+-]?\d{1,2}(:\d{2}(:\d{2})?)?))?" +
|
||||
r")?" + # dst
|
||||
r")?$" # stdoff
|
||||
)
|
||||
# fmt: on
|
||||
|
||||
m = parser_re.match(offset_str)
|
||||
|
||||
if m is None:
|
||||
raise ValueError(f"{tz_str} is not a valid TZ string")
|
||||
|
||||
std_abbr = m.group("std")
|
||||
dst_abbr = m.group("dst")
|
||||
dst_offset = None
|
||||
|
||||
std_abbr = std_abbr.strip("<>")
|
||||
|
||||
if dst_abbr:
|
||||
dst_abbr = dst_abbr.strip("<>")
|
||||
|
||||
std_offset = m.group("stdoff")
|
||||
if std_offset:
|
||||
try:
|
||||
std_offset = _parse_tz_delta(std_offset)
|
||||
except ValueError as e:
|
||||
raise ValueError(f"Invalid STD offset in {tz_str}") from e
|
||||
else:
|
||||
std_offset = 0
|
||||
|
||||
if dst_abbr is not None:
|
||||
dst_offset = m.group("dstoff")
|
||||
if dst_offset:
|
||||
try:
|
||||
dst_offset = _parse_tz_delta(dst_offset)
|
||||
except ValueError as e:
|
||||
raise ValueError(f"Invalid DST offset in {tz_str}") from e
|
||||
else:
|
||||
dst_offset = std_offset + 3600
|
||||
|
||||
if not start_end_str:
|
||||
raise ValueError(f"Missing transition rules: {tz_str}")
|
||||
|
||||
start_end_strs = start_end_str[0].split(",", 1)
|
||||
try:
|
||||
start, end = (_parse_dst_start_end(x) for x in start_end_strs)
|
||||
except ValueError as e:
|
||||
raise ValueError(f"Invalid TZ string: {tz_str}") from e
|
||||
|
||||
return _TZStr(std_abbr, std_offset, dst_abbr, dst_offset, start, end)
|
||||
elif start_end_str:
|
||||
raise ValueError(f"Transition rule present without DST: {tz_str}")
|
||||
else:
|
||||
# This is a static ttinfo, don't return _TZStr
|
||||
return _ttinfo(
|
||||
_load_timedelta(std_offset), _load_timedelta(0), std_abbr
|
||||
)
|
||||
|
||||
|
||||
def _parse_dst_start_end(dststr):
|
||||
date, *time = dststr.split("/")
|
||||
if date[0] == "M":
|
||||
n_is_julian = False
|
||||
m = re.match(r"M(\d{1,2})\.(\d).(\d)$", date)
|
||||
if m is None:
|
||||
raise ValueError(f"Invalid dst start/end date: {dststr}")
|
||||
date_offset = tuple(map(int, m.groups()))
|
||||
offset = _CalendarOffset(*date_offset)
|
||||
else:
|
||||
if date[0] == "J":
|
||||
n_is_julian = True
|
||||
date = date[1:]
|
||||
else:
|
||||
n_is_julian = False
|
||||
|
||||
doy = int(date)
|
||||
offset = _DayOffset(doy, n_is_julian)
|
||||
|
||||
if time:
|
||||
time_components = list(map(int, time[0].split(":")))
|
||||
n_components = len(time_components)
|
||||
if n_components < 3:
|
||||
time_components.extend([0] * (3 - n_components))
|
||||
offset.hour, offset.minute, offset.second = time_components
|
||||
|
||||
return offset
|
||||
|
||||
|
||||
def _parse_tz_delta(tz_delta):
|
||||
match = re.match(
|
||||
r"(?P<sign>[+-])?(?P<h>\d{1,2})(:(?P<m>\d{2})(:(?P<s>\d{2}))?)?",
|
||||
tz_delta,
|
||||
)
|
||||
# Anything passed to this function should already have hit an equivalent
|
||||
# regular expression to find the section to parse.
|
||||
assert match is not None, tz_delta
|
||||
|
||||
h, m, s = (
|
||||
int(v) if v is not None else 0
|
||||
for v in map(match.group, ("h", "m", "s"))
|
||||
)
|
||||
|
||||
total = h * 3600 + m * 60 + s
|
||||
|
||||
if not -86400 < total < 86400:
|
||||
raise ValueError(
|
||||
"Offset must be strictly between -24h and +24h:" + tz_delta
|
||||
)
|
||||
|
||||
# Yes, +5 maps to an offset of -5h
|
||||
if match.group("sign") != "-":
|
||||
total *= -1
|
||||
|
||||
return total
|
|
@ -11,9 +11,9 @@ from bleach.sanitizer import (
|
|||
|
||||
|
||||
# yyyymmdd
|
||||
__releasedate__ = "20231006"
|
||||
__releasedate__ = "20241029"
|
||||
# x.y.z or x.y.z.dev0 -- semver
|
||||
__version__ = "6.1.0"
|
||||
__version__ = "6.2.0"
|
||||
|
||||
|
||||
__all__ = ["clean", "linkify"]
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
from __future__ import absolute_import, division, unicode_literals
|
||||
|
||||
from six import text_type
|
||||
from six.moves import http_client, urllib
|
||||
from bleach.six_shim import text_type
|
||||
from bleach.six_shim import http_client, urllib
|
||||
|
||||
import codecs
|
||||
import re
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
from __future__ import absolute_import, division, unicode_literals
|
||||
|
||||
from six import unichr as chr
|
||||
from bleach.six_shim import unichr as chr
|
||||
|
||||
from collections import deque, OrderedDict
|
||||
from sys import version_info
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
from __future__ import absolute_import, division, unicode_literals
|
||||
from six import text_type
|
||||
from bleach.six_shim import text_type
|
||||
|
||||
from bisect import bisect_left
|
||||
|
||||
|
|
|
@ -7,7 +7,7 @@ try:
|
|||
except ImportError:
|
||||
from collections import Mapping
|
||||
|
||||
from six import text_type, PY3
|
||||
from bleach.six_shim import text_type, PY3
|
||||
|
||||
if PY3:
|
||||
import xml.etree.ElementTree as default_etree
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
from __future__ import absolute_import, division, unicode_literals
|
||||
|
||||
from six import text_type
|
||||
from bleach.six_shim import text_type
|
||||
|
||||
from . import base
|
||||
from ..constants import namespaces, voidElements
|
||||
|
|
|
@ -12,7 +12,7 @@ import re
|
|||
import warnings
|
||||
from xml.sax.saxutils import escape, unescape
|
||||
|
||||
from six.moves import urllib_parse as urlparse
|
||||
from bleach.six_shim import urllib_parse as urlparse
|
||||
|
||||
from . import base
|
||||
from ..constants import namespaces, prefixes
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
from __future__ import absolute_import, division, unicode_literals
|
||||
from six import with_metaclass, viewkeys
|
||||
from bleach.six_shim import viewkeys
|
||||
|
||||
import types
|
||||
|
||||
|
@ -423,7 +423,7 @@ def getPhases(debug):
|
|||
return type
|
||||
|
||||
# pylint:disable=unused-argument
|
||||
class Phase(with_metaclass(getMetaclass(debug, log))):
|
||||
class Phase(metaclass=getMetaclass(debug, log)):
|
||||
"""Base class for helper object that implements each phase of processing
|
||||
"""
|
||||
__slots__ = ("parser", "tree", "__startTagCache", "__endTagCache")
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
from __future__ import absolute_import, division, unicode_literals
|
||||
from six import text_type
|
||||
from bleach.six_shim import text_type
|
||||
|
||||
import re
|
||||
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
from __future__ import absolute_import, division, unicode_literals
|
||||
from six import text_type
|
||||
from bleach.six_shim import text_type
|
||||
|
||||
from ..constants import scopingElements, tableInsertModeElements, namespaces
|
||||
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
from __future__ import absolute_import, division, unicode_literals
|
||||
# pylint:disable=protected-access
|
||||
|
||||
from six import text_type
|
||||
from bleach.six_shim import text_type
|
||||
|
||||
import re
|
||||
|
||||
|
|
|
@ -28,7 +28,7 @@ from . import etree as etree_builders
|
|||
from .. import _ihatexml
|
||||
|
||||
import lxml.etree as etree
|
||||
from six import PY3, binary_type
|
||||
from bleach.six_shim import PY3, binary_type
|
||||
|
||||
|
||||
fullTree = True
|
||||
|
|
|
@ -3,7 +3,7 @@ from __future__ import absolute_import, division, unicode_literals
|
|||
from collections import OrderedDict
|
||||
import re
|
||||
|
||||
from six import string_types
|
||||
from bleach.six_shim import string_types
|
||||
|
||||
from . import base
|
||||
from .._utils import moduleFactoryFactory
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
from __future__ import absolute_import, division, unicode_literals
|
||||
from six import text_type
|
||||
from bleach.six_shim import text_type
|
||||
|
||||
from collections import OrderedDict
|
||||
|
||||
|
|
|
@ -7,8 +7,12 @@ set -o pipefail
|
|||
BLEACH_VENDOR_DIR=${BLEACH_VENDOR_DIR:-"."}
|
||||
DEST=${DEST:-"."}
|
||||
|
||||
# Install with no dependencies
|
||||
pip install --no-binary all --no-compile --no-deps -r "${BLEACH_VENDOR_DIR}/vendor.txt" --target "${DEST}"
|
||||
|
||||
# Apply patches
|
||||
(cd "${DEST}" && patch -p2 < 01_html5lib_six.patch)
|
||||
|
||||
# install Python 3.6.14 urllib.urlparse for #536
|
||||
curl --proto '=https' --tlsv1.2 -o "${DEST}/parse.py" https://raw.githubusercontent.com/python/cpython/v3.6.14/Lib/urllib/parse.py
|
||||
(cd "${DEST}" && sha256sum parse.py > parse.py.SHA256SUM)
|
||||
|
|
|
@ -396,16 +396,25 @@ class BleachHTMLTokenizer(HTMLTokenizer):
|
|||
# name that abruptly ends, but we should treat that like
|
||||
# character data
|
||||
yield {"type": TAG_TOKEN_TYPE_CHARACTERS, "data": self.stream.get_tag()}
|
||||
|
||||
elif last_error_token["data"] in (
|
||||
"duplicate-attribute",
|
||||
"eof-in-attribute-name",
|
||||
"eof-in-attribute-value-no-quotes",
|
||||
"expected-end-of-tag-but-got-eof",
|
||||
):
|
||||
# Handle the case where the text being parsed ends with <
|
||||
# followed by a series of characters and then space and then
|
||||
# more characters. It's treated as a tag name followed by an
|
||||
# followed by characters and then space and then:
|
||||
#
|
||||
# * more characters
|
||||
# * more characters repeated with a space between (e.g. "abc abc")
|
||||
# * more characters and then a space and then an EOF (e.g. "abc def ")
|
||||
#
|
||||
# These cases are treated as a tag name followed by an
|
||||
# attribute that abruptly ends, but we should treat that like
|
||||
# character data.
|
||||
# character data instead.
|
||||
yield {"type": TAG_TOKEN_TYPE_CHARACTERS, "data": self.stream.get_tag()}
|
||||
|
||||
else:
|
||||
yield last_error_token
|
||||
|
||||
|
|
19
lib/bleach/six_shim.py
Normal file
19
lib/bleach/six_shim.py
Normal file
|
@ -0,0 +1,19 @@
|
|||
"""
|
||||
Replacement module for what html5lib uses six for.
|
||||
"""
|
||||
|
||||
import http.client
|
||||
import operator
|
||||
import urllib
|
||||
|
||||
|
||||
PY3 = True
|
||||
binary_type = bytes
|
||||
string_types = (str,)
|
||||
text_type = str
|
||||
unichr = chr
|
||||
viewkeys = operator.methodcaller("keys")
|
||||
|
||||
http_client = http.client
|
||||
urllib = urllib
|
||||
urllib_parse = urllib.parse
|
|
@ -1,4 +1,4 @@
|
|||
from .core import contents, where
|
||||
|
||||
__all__ = ["contents", "where"]
|
||||
__version__ = "2024.02.02"
|
||||
__version__ = "2024.08.30"
|
||||
|
|
|
@ -3485,46 +3485,6 @@ DgQWBBQxCpCPtsad0kRLgLWi5h+xEk8blTAKBggqhkjOPQQDAwNoADBlAjEA31SQ
|
|||
+RHUjE7AwWHCFUyqqx0LMV87HOIAl0Qx5v5zli/altP+CAezNIm8BZ/3Hobui3A=
|
||||
-----END CERTIFICATE-----
|
||||
|
||||
# Issuer: CN=GLOBALTRUST 2020 O=e-commerce monitoring GmbH
|
||||
# Subject: CN=GLOBALTRUST 2020 O=e-commerce monitoring GmbH
|
||||
# Label: "GLOBALTRUST 2020"
|
||||
# Serial: 109160994242082918454945253
|
||||
# MD5 Fingerprint: 8a:c7:6f:cb:6d:e3:cc:a2:f1:7c:83:fa:0e:78:d7:e8
|
||||
# SHA1 Fingerprint: d0:67:c1:13:51:01:0c:aa:d0:c7:6a:65:37:31:16:26:4f:53:71:a2
|
||||
# SHA256 Fingerprint: 9a:29:6a:51:82:d1:d4:51:a2:e3:7f:43:9b:74:da:af:a2:67:52:33:29:f9:0f:9a:0d:20:07:c3:34:e2:3c:9a
|
||||
-----BEGIN CERTIFICATE-----
|
||||
MIIFgjCCA2qgAwIBAgILWku9WvtPilv6ZeUwDQYJKoZIhvcNAQELBQAwTTELMAkG
|
||||
A1UEBhMCQVQxIzAhBgNVBAoTGmUtY29tbWVyY2UgbW9uaXRvcmluZyBHbWJIMRkw
|
||||
FwYDVQQDExBHTE9CQUxUUlVTVCAyMDIwMB4XDTIwMDIxMDAwMDAwMFoXDTQwMDYx
|
||||
MDAwMDAwMFowTTELMAkGA1UEBhMCQVQxIzAhBgNVBAoTGmUtY29tbWVyY2UgbW9u
|
||||
aXRvcmluZyBHbWJIMRkwFwYDVQQDExBHTE9CQUxUUlVTVCAyMDIwMIICIjANBgkq
|
||||
hkiG9w0BAQEFAAOCAg8AMIICCgKCAgEAri5WrRsc7/aVj6B3GyvTY4+ETUWiD59b
|
||||
RatZe1E0+eyLinjF3WuvvcTfk0Uev5E4C64OFudBc/jbu9G4UeDLgztzOG53ig9Z
|
||||
YybNpyrOVPu44sB8R85gfD+yc/LAGbaKkoc1DZAoouQVBGM+uq/ufF7MpotQsjj3
|
||||
QWPKzv9pj2gOlTblzLmMCcpL3TGQlsjMH/1WljTbjhzqLL6FLmPdqqmV0/0plRPw
|
||||
yJiT2S0WR5ARg6I6IqIoV6Lr/sCMKKCmfecqQjuCgGOlYx8ZzHyyZqjC0203b+J+
|
||||
BlHZRYQfEs4kUmSFC0iAToexIiIwquuuvuAC4EDosEKAA1GqtH6qRNdDYfOiaxaJ
|
||||
SaSjpCuKAsR49GiKweR6NrFvG5Ybd0mN1MkGco/PU+PcF4UgStyYJ9ORJitHHmkH
|
||||
r96i5OTUawuzXnzUJIBHKWk7buis/UDr2O1xcSvy6Fgd60GXIsUf1DnQJ4+H4xj0
|
||||
4KlGDfV0OoIu0G4skaMxXDtG6nsEEFZegB31pWXogvziB4xiRfUg3kZwhqG8k9Me
|
||||
dKZssCz3AwyIDMvUclOGvGBG85hqwvG/Q/lwIHfKN0F5VVJjjVsSn8VoxIidrPIw
|
||||
q7ejMZdnrY8XD2zHc+0klGvIg5rQmjdJBKuxFshsSUktq6HQjJLyQUp5ISXbY9e2
|
||||
nKd+Qmn7OmMCAwEAAaNjMGEwDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMC
|
||||
AQYwHQYDVR0OBBYEFNwuH9FhN3nkq9XVsxJxaD1qaJwiMB8GA1UdIwQYMBaAFNwu
|
||||
H9FhN3nkq9XVsxJxaD1qaJwiMA0GCSqGSIb3DQEBCwUAA4ICAQCR8EICaEDuw2jA
|
||||
VC/f7GLDw56KoDEoqoOOpFaWEhCGVrqXctJUMHytGdUdaG/7FELYjQ7ztdGl4wJC
|
||||
XtzoRlgHNQIw4Lx0SsFDKv/bGtCwr2zD/cuz9X9tAy5ZVp0tLTWMstZDFyySCstd
|
||||
6IwPS3BD0IL/qMy/pJTAvoe9iuOTe8aPmxadJ2W8esVCgmxcB9CpwYhgROmYhRZf
|
||||
+I/KARDOJcP5YBugxZfD0yyIMaK9MOzQ0MAS8cE54+X1+NZK3TTN+2/BT+MAi1bi
|
||||
kvcoskJ3ciNnxz8RFbLEAwW+uxF7Cr+obuf/WEPPm2eggAe2HcqtbepBEX4tdJP7
|
||||
wry+UUTF72glJ4DjyKDUEuzZpTcdN3y0kcra1LGWge9oXHYQSa9+pTeAsRxSvTOB
|
||||
TI/53WXZFM2KJVj04sWDpQmQ1GwUY7VA3+vA/MRYfg0UFodUJ25W5HCEuGwyEn6C
|
||||
MUO+1918oa2u1qsgEu8KwxCMSZY13At1XrFP1U80DhEgB3VDRemjEdqso5nCtnkn
|
||||
4rnvyOL2NSl6dPrFf4IFYqYK6miyeUcGbvJXqBUzxvd4Sj1Ce2t+/vdG6tHrju+I
|
||||
aFvowdlxfv1k7/9nR4hYJS8+hge9+6jlgqispdNpQ80xiEmEU5LAsTkbOYMBMMTy
|
||||
qfrQA71yN2BWHzZ8vTmR9W0Nv3vXkg==
|
||||
-----END CERTIFICATE-----
|
||||
|
||||
# Issuer: CN=ANF Secure Server Root CA O=ANF Autoridad de Certificacion OU=ANF CA Raiz
|
||||
# Subject: CN=ANF Secure Server Root CA O=ANF Autoridad de Certificacion OU=ANF CA Raiz
|
||||
# Label: "ANF Secure Server Root CA"
|
||||
|
@ -4812,3 +4772,158 @@ X273CXE2whJdV/LItM3z7gLfEdxquVeEHVlNjM7IDiPCtyaaEBRx/pOyiriA8A4Q
|
|||
ntOoUAw3gi/q4Iqd4Sw5/7W0cwDk90imc6y/st53BIe0o82bNSQ3+pCTE4FCxpgm
|
||||
dTdmQRCsu/WU48IxK63nI1bMNSWSs1A=
|
||||
-----END CERTIFICATE-----
|
||||
|
||||
# Issuer: CN=FIRMAPROFESIONAL CA ROOT-A WEB O=Firmaprofesional SA
|
||||
# Subject: CN=FIRMAPROFESIONAL CA ROOT-A WEB O=Firmaprofesional SA
|
||||
# Label: "FIRMAPROFESIONAL CA ROOT-A WEB"
|
||||
# Serial: 65916896770016886708751106294915943533
|
||||
# MD5 Fingerprint: 82:b2:ad:45:00:82:b0:66:63:f8:5f:c3:67:4e:ce:a3
|
||||
# SHA1 Fingerprint: a8:31:11:74:a6:14:15:0d:ca:77:dd:0e:e4:0c:5d:58:fc:a0:72:a5
|
||||
# SHA256 Fingerprint: be:f2:56:da:f2:6e:9c:69:bd:ec:16:02:35:97:98:f3:ca:f7:18:21:a0:3e:01:82:57:c5:3c:65:61:7f:3d:4a
|
||||
-----BEGIN CERTIFICATE-----
|
||||
MIICejCCAgCgAwIBAgIQMZch7a+JQn81QYehZ1ZMbTAKBggqhkjOPQQDAzBuMQsw
|
||||
CQYDVQQGEwJFUzEcMBoGA1UECgwTRmlybWFwcm9mZXNpb25hbCBTQTEYMBYGA1UE
|
||||
YQwPVkFURVMtQTYyNjM0MDY4MScwJQYDVQQDDB5GSVJNQVBST0ZFU0lPTkFMIENB
|
||||
IFJPT1QtQSBXRUIwHhcNMjIwNDA2MDkwMTM2WhcNNDcwMzMxMDkwMTM2WjBuMQsw
|
||||
CQYDVQQGEwJFUzEcMBoGA1UECgwTRmlybWFwcm9mZXNpb25hbCBTQTEYMBYGA1UE
|
||||
YQwPVkFURVMtQTYyNjM0MDY4MScwJQYDVQQDDB5GSVJNQVBST0ZFU0lPTkFMIENB
|
||||
IFJPT1QtQSBXRUIwdjAQBgcqhkjOPQIBBgUrgQQAIgNiAARHU+osEaR3xyrq89Zf
|
||||
e9MEkVz6iMYiuYMQYneEMy3pA4jU4DP37XcsSmDq5G+tbbT4TIqk5B/K6k84Si6C
|
||||
cyvHZpsKjECcfIr28jlgst7L7Ljkb+qbXbdTkBgyVcUgt5SjYzBhMA8GA1UdEwEB
|
||||
/wQFMAMBAf8wHwYDVR0jBBgwFoAUk+FDY1w8ndYn81LsF7Kpryz3dvgwHQYDVR0O
|
||||
BBYEFJPhQ2NcPJ3WJ/NS7Beyqa8s93b4MA4GA1UdDwEB/wQEAwIBBjAKBggqhkjO
|
||||
PQQDAwNoADBlAjAdfKR7w4l1M+E7qUW/Runpod3JIha3RxEL2Jq68cgLcFBTApFw
|
||||
hVmpHqTm6iMxoAACMQD94vizrxa5HnPEluPBMBnYfubDl94cT7iJLzPrSA8Z94dG
|
||||
XSaQpYXFuXqUPoeovQA=
|
||||
-----END CERTIFICATE-----
|
||||
|
||||
# Issuer: CN=TWCA CYBER Root CA O=TAIWAN-CA OU=Root CA
|
||||
# Subject: CN=TWCA CYBER Root CA O=TAIWAN-CA OU=Root CA
|
||||
# Label: "TWCA CYBER Root CA"
|
||||
# Serial: 85076849864375384482682434040119489222
|
||||
# MD5 Fingerprint: 0b:33:a0:97:52:95:d4:a9:fd:bb:db:6e:a3:55:5b:51
|
||||
# SHA1 Fingerprint: f6:b1:1c:1a:83:38:e9:7b:db:b3:a8:c8:33:24:e0:2d:9c:7f:26:66
|
||||
# SHA256 Fingerprint: 3f:63:bb:28:14:be:17:4e:c8:b6:43:9c:f0:8d:6d:56:f0:b7:c4:05:88:3a:56:48:a3:34:42:4d:6b:3e:c5:58
|
||||
-----BEGIN CERTIFICATE-----
|
||||
MIIFjTCCA3WgAwIBAgIQQAE0jMIAAAAAAAAAATzyxjANBgkqhkiG9w0BAQwFADBQ
|
||||
MQswCQYDVQQGEwJUVzESMBAGA1UEChMJVEFJV0FOLUNBMRAwDgYDVQQLEwdSb290
|
||||
IENBMRswGQYDVQQDExJUV0NBIENZQkVSIFJvb3QgQ0EwHhcNMjIxMTIyMDY1NDI5
|
||||
WhcNNDcxMTIyMTU1OTU5WjBQMQswCQYDVQQGEwJUVzESMBAGA1UEChMJVEFJV0FO
|
||||
LUNBMRAwDgYDVQQLEwdSb290IENBMRswGQYDVQQDExJUV0NBIENZQkVSIFJvb3Qg
|
||||
Q0EwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQDG+Moe2Qkgfh1sTs6P
|
||||
40czRJzHyWmqOlt47nDSkvgEs1JSHWdyKKHfi12VCv7qze33Kc7wb3+szT3vsxxF
|
||||
avcokPFhV8UMxKNQXd7UtcsZyoC5dc4pztKFIuwCY8xEMCDa6pFbVuYdHNWdZsc/
|
||||
34bKS1PE2Y2yHer43CdTo0fhYcx9tbD47nORxc5zb87uEB8aBs/pJ2DFTxnk684i
|
||||
JkXXYJndzk834H/nY62wuFm40AZoNWDTNq5xQwTxaWV4fPMf88oon1oglWa0zbfu
|
||||
j3ikRRjpJi+NmykosaS3Om251Bw4ckVYsV7r8Cibt4LK/c/WMw+f+5eesRycnupf
|
||||
Xtuq3VTpMCEobY5583WSjCb+3MX2w7DfRFlDo7YDKPYIMKoNM+HvnKkHIuNZW0CP
|
||||
2oi3aQiotyMuRAlZN1vH4xfyIutuOVLF3lSnmMlLIJXcRolftBL5hSmO68gnFSDA
|
||||
S9TMfAxsNAwmmyYxpjyn9tnQS6Jk/zuZQXLB4HCX8SS7K8R0IrGsayIyJNN4KsDA
|
||||
oS/xUgXJP+92ZuJF2A09rZXIx4kmyA+upwMu+8Ff+iDhcK2wZSA3M2Cw1a/XDBzC
|
||||
kHDXShi8fgGwsOsVHkQGzaRP6AzRwyAQ4VRlnrZR0Bp2a0JaWHY06rc3Ga4udfmW
|
||||
5cFZ95RXKSWNOkyrTZpB0F8mAwIDAQABo2MwYTAOBgNVHQ8BAf8EBAMCAQYwDwYD
|
||||
VR0TAQH/BAUwAwEB/zAfBgNVHSMEGDAWgBSdhWEUfMFib5do5E83QOGt4A1WNzAd
|
||||
BgNVHQ4EFgQUnYVhFHzBYm+XaORPN0DhreANVjcwDQYJKoZIhvcNAQEMBQADggIB
|
||||
AGSPesRiDrWIzLjHhg6hShbNcAu3p4ULs3a2D6f/CIsLJc+o1IN1KriWiLb73y0t
|
||||
tGlTITVX1olNc79pj3CjYcya2x6a4CD4bLubIp1dhDGaLIrdaqHXKGnK/nZVekZn
|
||||
68xDiBaiA9a5F/gZbG0jAn/xX9AKKSM70aoK7akXJlQKTcKlTfjF/biBzysseKNn
|
||||
TKkHmvPfXvt89YnNdJdhEGoHK4Fa0o635yDRIG4kqIQnoVesqlVYL9zZyvpoBJ7t
|
||||
RCT5dEA7IzOrg1oYJkK2bVS1FmAwbLGg+LhBoF1JSdJlBTrq/p1hvIbZv97Tujqx
|
||||
f36SNI7JAG7cmL3c7IAFrQI932XtCwP39xaEBDG6k5TY8hL4iuO/Qq+n1M0RFxbI
|
||||
Qh0UqEL20kCGoE8jypZFVmAGzbdVAaYBlGX+bgUJurSkquLvWL69J1bY73NxW0Qz
|
||||
8ppy6rBePm6pUlvscG21h483XjyMnM7k8M4MZ0HMzvaAq07MTFb1wWFZk7Q+ptq4
|
||||
NxKfKjLji7gh7MMrZQzvIt6IKTtM1/r+t+FHvpw+PoP7UV31aPcuIYXcv/Fa4nzX
|
||||
xeSDwWrruoBa3lwtcHb4yOWHh8qgnaHlIhInD0Q9HWzq1MKLL295q39QpsQZp6F6
|
||||
t5b5wR9iWqJDB0BeJsas7a5wFsWqynKKTbDPAYsDP27X
|
||||
-----END CERTIFICATE-----
|
||||
|
||||
# Issuer: CN=SecureSign Root CA12 O=Cybertrust Japan Co., Ltd.
|
||||
# Subject: CN=SecureSign Root CA12 O=Cybertrust Japan Co., Ltd.
|
||||
# Label: "SecureSign Root CA12"
|
||||
# Serial: 587887345431707215246142177076162061960426065942
|
||||
# MD5 Fingerprint: c6:89:ca:64:42:9b:62:08:49:0b:1e:7f:e9:07:3d:e8
|
||||
# SHA1 Fingerprint: 7a:22:1e:3d:de:1b:06:ac:9e:c8:47:70:16:8e:3c:e5:f7:6b:06:f4
|
||||
# SHA256 Fingerprint: 3f:03:4b:b5:70:4d:44:b2:d0:85:45:a0:20:57:de:93:eb:f3:90:5f:ce:72:1a:cb:c7:30:c0:6d:da:ee:90:4e
|
||||
-----BEGIN CERTIFICATE-----
|
||||
MIIDcjCCAlqgAwIBAgIUZvnHwa/swlG07VOX5uaCwysckBYwDQYJKoZIhvcNAQEL
|
||||
BQAwUTELMAkGA1UEBhMCSlAxIzAhBgNVBAoTGkN5YmVydHJ1c3QgSmFwYW4gQ28u
|
||||
LCBMdGQuMR0wGwYDVQQDExRTZWN1cmVTaWduIFJvb3QgQ0ExMjAeFw0yMDA0MDgw
|
||||
NTM2NDZaFw00MDA0MDgwNTM2NDZaMFExCzAJBgNVBAYTAkpQMSMwIQYDVQQKExpD
|
||||
eWJlcnRydXN0IEphcGFuIENvLiwgTHRkLjEdMBsGA1UEAxMUU2VjdXJlU2lnbiBS
|
||||
b290IENBMTIwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQC6OcE3emhF
|
||||
KxS06+QT61d1I02PJC0W6K6OyX2kVzsqdiUzg2zqMoqUm048luT9Ub+ZyZN+v/mt
|
||||
p7JIKwccJ/VMvHASd6SFVLX9kHrko+RRWAPNEHl57muTH2SOa2SroxPjcf59q5zd
|
||||
J1M3s6oYwlkm7Fsf0uZlfO+TvdhYXAvA42VvPMfKWeP+bl+sg779XSVOKik71gur
|
||||
FzJ4pOE+lEa+Ym6b3kaosRbnhW70CEBFEaCeVESE99g2zvVQR9wsMJvuwPWW0v4J
|
||||
hscGWa5Pro4RmHvzC1KqYiaqId+OJTN5lxZJjfU+1UefNzFJM3IFTQy2VYzxV4+K
|
||||
h9GtxRESOaCtAgMBAAGjQjBAMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQD
|
||||
AgEGMB0GA1UdDgQWBBRXNPN0zwRL1SXm8UC2LEzZLemgrTANBgkqhkiG9w0BAQsF
|
||||
AAOCAQEAPrvbFxbS8hQBICw4g0utvsqFepq2m2um4fylOqyttCg6r9cBg0krY6Ld
|
||||
mmQOmFxv3Y67ilQiLUoT865AQ9tPkbeGGuwAtEGBpE/6aouIs3YIcipJQMPTw4WJ
|
||||
mBClnW8Zt7vPemVV2zfrPIpyMpcemik+rY3moxtt9XUa5rBouVui7mlHJzWhhpmA
|
||||
8zNL4WukJsPvdFlseqJkth5Ew1DgDzk9qTPxpfPSvWKErI4cqc1avTc7bgoitPQV
|
||||
55FYxTpE05Uo2cBl6XLK0A+9H7MV2anjpEcJnuDLN/v9vZfVvhgaaaI5gdka9at/
|
||||
yOPiZwud9AzqVN/Ssq+xIvEg37xEHA==
|
||||
-----END CERTIFICATE-----
|
||||
|
||||
# Issuer: CN=SecureSign Root CA14 O=Cybertrust Japan Co., Ltd.
|
||||
# Subject: CN=SecureSign Root CA14 O=Cybertrust Japan Co., Ltd.
|
||||
# Label: "SecureSign Root CA14"
|
||||
# Serial: 575790784512929437950770173562378038616896959179
|
||||
# MD5 Fingerprint: 71:0d:72:fa:92:19:65:5e:89:04:ac:16:33:f0:bc:d5
|
||||
# SHA1 Fingerprint: dd:50:c0:f7:79:b3:64:2e:74:a2:b8:9d:9f:d3:40:dd:bb:f0:f2:4f
|
||||
# SHA256 Fingerprint: 4b:00:9c:10:34:49:4f:9a:b5:6b:ba:3b:a1:d6:27:31:fc:4d:20:d8:95:5a:dc:ec:10:a9:25:60:72:61:e3:38
|
||||
-----BEGIN CERTIFICATE-----
|
||||
MIIFcjCCA1qgAwIBAgIUZNtaDCBO6Ncpd8hQJ6JaJ90t8sswDQYJKoZIhvcNAQEM
|
||||
BQAwUTELMAkGA1UEBhMCSlAxIzAhBgNVBAoTGkN5YmVydHJ1c3QgSmFwYW4gQ28u
|
||||
LCBMdGQuMR0wGwYDVQQDExRTZWN1cmVTaWduIFJvb3QgQ0ExNDAeFw0yMDA0MDgw
|
||||
NzA2MTlaFw00NTA0MDgwNzA2MTlaMFExCzAJBgNVBAYTAkpQMSMwIQYDVQQKExpD
|
||||
eWJlcnRydXN0IEphcGFuIENvLiwgTHRkLjEdMBsGA1UEAxMUU2VjdXJlU2lnbiBS
|
||||
b290IENBMTQwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQDF0nqh1oq/
|
||||
FjHQmNE6lPxauG4iwWL3pwon71D2LrGeaBLwbCRjOfHw3xDG3rdSINVSW0KZnvOg
|
||||
vlIfX8xnbacuUKLBl422+JX1sLrcneC+y9/3OPJH9aaakpUqYllQC6KxNedlsmGy
|
||||
6pJxaeQp8E+BgQQ8sqVb1MWoWWd7VRxJq3qdwudzTe/NCcLEVxLbAQ4jeQkHO6Lo
|
||||
/IrPj8BGJJw4J+CDnRugv3gVEOuGTgpa/d/aLIJ+7sr2KeH6caH3iGicnPCNvg9J
|
||||
kdjqOvn90Ghx2+m1K06Ckm9mH+Dw3EzsytHqunQG+bOEkJTRX45zGRBdAuVwpcAQ
|
||||
0BB8b8VYSbSwbprafZX1zNoCr7gsfXmPvkPx+SgojQlD+Ajda8iLLCSxjVIHvXib
|
||||
y8posqTdDEx5YMaZ0ZPxMBoH064iwurO8YQJzOAUbn8/ftKChazcqRZOhaBgy/ac
|
||||
18izju3Gm5h1DVXoX+WViwKkrkMpKBGk5hIwAUt1ax5mnXkvpXYvHUC0bcl9eQjs
|
||||
0Wq2XSqypWa9a4X0dFbD9ed1Uigspf9mR6XU/v6eVL9lfgHWMI+lNpyiUBzuOIAB
|
||||
SMbHdPTGrMNASRZhdCyvjG817XsYAFs2PJxQDcqSMxDxJklt33UkN4Ii1+iW/RVL
|
||||
ApY+B3KVfqs9TC7XyvDf4Fg/LS8EmjijAQIDAQABo0IwQDAPBgNVHRMBAf8EBTAD
|
||||
AQH/MA4GA1UdDwEB/wQEAwIBBjAdBgNVHQ4EFgQUBpOjCl4oaTeqYR3r6/wtbyPk
|
||||
86AwDQYJKoZIhvcNAQEMBQADggIBAJaAcgkGfpzMkwQWu6A6jZJOtxEaCnFxEM0E
|
||||
rX+lRVAQZk5KQaID2RFPeje5S+LGjzJmdSX7684/AykmjbgWHfYfM25I5uj4V7Ib
|
||||
ed87hwriZLoAymzvftAj63iP/2SbNDefNWWipAA9EiOWWF3KY4fGoweITedpdopT
|
||||
zfFP7ELyk+OZpDc8h7hi2/DsHzc/N19DzFGdtfCXwreFamgLRB7lUe6TzktuhsHS
|
||||
DCRZNhqfLJGP4xjblJUK7ZGqDpncllPjYYPGFrojutzdfhrGe0K22VoF3Jpf1d+4
|
||||
2kd92jjbrDnVHmtsKheMYc2xbXIBw8MgAGJoFjHVdqqGuw6qnsb58Nn4DSEC5MUo
|
||||
FlkRudlpcyqSeLiSV5sI8jrlL5WwWLdrIBRtFO8KvH7YVdiI2i/6GaX7i+B/OfVy
|
||||
K4XELKzvGUWSTLNhB9xNH27SgRNcmvMSZ4PPmz+Ln52kuaiWA3rF7iDeM9ovnhp6
|
||||
dB7h7sxaOgTdsxoEqBRjrLdHEoOabPXm6RUVkRqEGQ6UROcSjiVbgGcZ3GOTEAtl
|
||||
Lor6CZpO2oYofaphNdgOpygau1LgePhsumywbrmHXumZNTfxPWQrqaA0k89jL9WB
|
||||
365jJ6UeTo3cKXhZ+PmhIIynJkBugnLNeLLIjzwec+fBH7/PzqUqm9tEZDKgu39c
|
||||
JRNItX+S
|
||||
-----END CERTIFICATE-----
|
||||
|
||||
# Issuer: CN=SecureSign Root CA15 O=Cybertrust Japan Co., Ltd.
|
||||
# Subject: CN=SecureSign Root CA15 O=Cybertrust Japan Co., Ltd.
|
||||
# Label: "SecureSign Root CA15"
|
||||
# Serial: 126083514594751269499665114766174399806381178503
|
||||
# MD5 Fingerprint: 13:30:fc:c4:62:a6:a9:de:b5:c1:68:af:b5:d2:31:47
|
||||
# SHA1 Fingerprint: cb:ba:83:c8:c1:5a:5d:f1:f9:73:6f:ca:d7:ef:28:13:06:4a:07:7d
|
||||
# SHA256 Fingerprint: e7:78:f0:f0:95:fe:84:37:29:cd:1a:00:82:17:9e:53:14:a9:c2:91:44:28:05:e1:fb:1d:8f:b6:b8:88:6c:3a
|
||||
-----BEGIN CERTIFICATE-----
|
||||
MIICIzCCAamgAwIBAgIUFhXHw9hJp75pDIqI7fBw+d23PocwCgYIKoZIzj0EAwMw
|
||||
UTELMAkGA1UEBhMCSlAxIzAhBgNVBAoTGkN5YmVydHJ1c3QgSmFwYW4gQ28uLCBM
|
||||
dGQuMR0wGwYDVQQDExRTZWN1cmVTaWduIFJvb3QgQ0ExNTAeFw0yMDA0MDgwODMy
|
||||
NTZaFw00NTA0MDgwODMyNTZaMFExCzAJBgNVBAYTAkpQMSMwIQYDVQQKExpDeWJl
|
||||
cnRydXN0IEphcGFuIENvLiwgTHRkLjEdMBsGA1UEAxMUU2VjdXJlU2lnbiBSb290
|
||||
IENBMTUwdjAQBgcqhkjOPQIBBgUrgQQAIgNiAAQLUHSNZDKZmbPSYAi4Io5GdCx4
|
||||
wCtELW1fHcmuS1Iggz24FG1Th2CeX2yF2wYUleDHKP+dX+Sq8bOLbe1PL0vJSpSR
|
||||
ZHX+AezB2Ot6lHhWGENfa4HL9rzatAy2KZMIaY+jQjBAMA8GA1UdEwEB/wQFMAMB
|
||||
Af8wDgYDVR0PAQH/BAQDAgEGMB0GA1UdDgQWBBTrQciu/NWeUUj1vYv0hyCTQSvT
|
||||
9DAKBggqhkjOPQQDAwNoADBlAjEA2S6Jfl5OpBEHvVnCB96rMjhTKkZEBhd6zlHp
|
||||
4P9mLQlO4E/0BdGF9jVg3PVys0Z9AjBEmEYagoUeYWmJSwdLZrWeqrqgHkHZAXQ6
|
||||
bkU6iYAZezKYVWOr62Nuk22rGwlgMU4=
|
||||
-----END CERTIFICATE-----
|
||||
|
|
|
@ -159,6 +159,8 @@ def from_bytes(
|
|||
|
||||
results: CharsetMatches = CharsetMatches()
|
||||
|
||||
early_stop_results: CharsetMatches = CharsetMatches()
|
||||
|
||||
sig_encoding, sig_payload = identify_sig_or_bom(sequences)
|
||||
|
||||
if sig_encoding is not None:
|
||||
|
@ -221,16 +223,20 @@ def from_bytes(
|
|||
try:
|
||||
if is_too_large_sequence and is_multi_byte_decoder is False:
|
||||
str(
|
||||
(
|
||||
sequences[: int(50e4)]
|
||||
if strip_sig_or_bom is False
|
||||
else sequences[len(sig_payload) : int(50e4)],
|
||||
else sequences[len(sig_payload) : int(50e4)]
|
||||
),
|
||||
encoding=encoding_iana,
|
||||
)
|
||||
else:
|
||||
decoded_payload = str(
|
||||
(
|
||||
sequences
|
||||
if strip_sig_or_bom is False
|
||||
else sequences[len(sig_payload) :],
|
||||
else sequences[len(sig_payload) :]
|
||||
),
|
||||
encoding=encoding_iana,
|
||||
)
|
||||
except (UnicodeDecodeError, LookupError) as e:
|
||||
|
@ -367,7 +373,13 @@ def from_bytes(
|
|||
and not lazy_str_hard_failure
|
||||
):
|
||||
fallback_entry = CharsetMatch(
|
||||
sequences, encoding_iana, threshold, False, [], decoded_payload
|
||||
sequences,
|
||||
encoding_iana,
|
||||
threshold,
|
||||
False,
|
||||
[],
|
||||
decoded_payload,
|
||||
preemptive_declaration=specified_encoding,
|
||||
)
|
||||
if encoding_iana == specified_encoding:
|
||||
fallback_specified = fallback_entry
|
||||
|
@ -421,28 +433,58 @@ def from_bytes(
|
|||
),
|
||||
)
|
||||
|
||||
results.append(
|
||||
CharsetMatch(
|
||||
current_match = CharsetMatch(
|
||||
sequences,
|
||||
encoding_iana,
|
||||
mean_mess_ratio,
|
||||
bom_or_sig_available,
|
||||
cd_ratios_merged,
|
||||
decoded_payload,
|
||||
(
|
||||
decoded_payload
|
||||
if (
|
||||
is_too_large_sequence is False
|
||||
or encoding_iana in [specified_encoding, "ascii", "utf_8"]
|
||||
)
|
||||
else None
|
||||
),
|
||||
preemptive_declaration=specified_encoding,
|
||||
)
|
||||
|
||||
results.append(current_match)
|
||||
|
||||
if (
|
||||
encoding_iana in [specified_encoding, "ascii", "utf_8"]
|
||||
and mean_mess_ratio < 0.1
|
||||
):
|
||||
# If md says nothing to worry about, then... stop immediately!
|
||||
if mean_mess_ratio == 0.0:
|
||||
logger.debug(
|
||||
"Encoding detection: %s is most likely the one.", encoding_iana
|
||||
"Encoding detection: %s is most likely the one.",
|
||||
current_match.encoding,
|
||||
)
|
||||
if explain:
|
||||
logger.removeHandler(explain_handler)
|
||||
logger.setLevel(previous_logger_level)
|
||||
return CharsetMatches([results[encoding_iana]])
|
||||
return CharsetMatches([current_match])
|
||||
|
||||
early_stop_results.append(current_match)
|
||||
|
||||
if (
|
||||
len(early_stop_results)
|
||||
and (specified_encoding is None or specified_encoding in tested)
|
||||
and "ascii" in tested
|
||||
and "utf_8" in tested
|
||||
):
|
||||
probable_result: CharsetMatch = early_stop_results.best() # type: ignore[assignment]
|
||||
logger.debug(
|
||||
"Encoding detection: %s is most likely the one.",
|
||||
probable_result.encoding,
|
||||
)
|
||||
if explain:
|
||||
logger.removeHandler(explain_handler)
|
||||
logger.setLevel(previous_logger_level)
|
||||
|
||||
return CharsetMatches([probable_result])
|
||||
|
||||
if encoding_iana == sig_encoding:
|
||||
logger.debug(
|
||||
|
|
|
@ -109,6 +109,14 @@ def cli_detect(argv: Optional[List[str]] = None) -> int:
|
|||
dest="force",
|
||||
help="Replace file without asking if you are sure, use this flag with caution.",
|
||||
)
|
||||
parser.add_argument(
|
||||
"-i",
|
||||
"--no-preemptive",
|
||||
action="store_true",
|
||||
default=False,
|
||||
dest="no_preemptive",
|
||||
help="Disable looking at a charset declaration to hint the detector.",
|
||||
)
|
||||
parser.add_argument(
|
||||
"-t",
|
||||
"--threshold",
|
||||
|
@ -133,21 +141,35 @@ def cli_detect(argv: Optional[List[str]] = None) -> int:
|
|||
args = parser.parse_args(argv)
|
||||
|
||||
if args.replace is True and args.normalize is False:
|
||||
if args.files:
|
||||
for my_file in args.files:
|
||||
my_file.close()
|
||||
print("Use --replace in addition of --normalize only.", file=sys.stderr)
|
||||
return 1
|
||||
|
||||
if args.force is True and args.replace is False:
|
||||
if args.files:
|
||||
for my_file in args.files:
|
||||
my_file.close()
|
||||
print("Use --force in addition of --replace only.", file=sys.stderr)
|
||||
return 1
|
||||
|
||||
if args.threshold < 0.0 or args.threshold > 1.0:
|
||||
if args.files:
|
||||
for my_file in args.files:
|
||||
my_file.close()
|
||||
print("--threshold VALUE should be between 0. AND 1.", file=sys.stderr)
|
||||
return 1
|
||||
|
||||
x_ = []
|
||||
|
||||
for my_file in args.files:
|
||||
matches = from_fp(my_file, threshold=args.threshold, explain=args.verbose)
|
||||
matches = from_fp(
|
||||
my_file,
|
||||
threshold=args.threshold,
|
||||
explain=args.verbose,
|
||||
preemptive_behaviour=args.no_preemptive is False,
|
||||
)
|
||||
|
||||
best_guess = matches.best()
|
||||
|
||||
|
@ -155,9 +177,11 @@ def cli_detect(argv: Optional[List[str]] = None) -> int:
|
|||
print(
|
||||
'Unable to identify originating encoding for "{}". {}'.format(
|
||||
my_file.name,
|
||||
(
|
||||
"Maybe try increasing maximum amount of chaos."
|
||||
if args.threshold < 1.0
|
||||
else "",
|
||||
else ""
|
||||
),
|
||||
),
|
||||
file=sys.stderr,
|
||||
)
|
||||
|
@ -258,8 +282,8 @@ def cli_detect(argv: Optional[List[str]] = None) -> int:
|
|||
try:
|
||||
x_[0].unicode_path = join(dir_path, ".".join(o_))
|
||||
|
||||
with open(x_[0].unicode_path, "w", encoding="utf-8") as fp:
|
||||
fp.write(str(best_guess))
|
||||
with open(x_[0].unicode_path, "wb") as fp:
|
||||
fp.write(best_guess.output())
|
||||
except IOError as e:
|
||||
print(str(e), file=sys.stderr)
|
||||
if my_file.closed is False:
|
||||
|
|
|
@ -544,6 +544,8 @@ COMMON_SAFE_ASCII_CHARACTERS: Set[str] = {
|
|||
"|",
|
||||
'"',
|
||||
"-",
|
||||
"(",
|
||||
")",
|
||||
}
|
||||
|
||||
|
||||
|
|
|
@ -1,13 +1,24 @@
|
|||
from typing import Any, Dict, Optional, Union
|
||||
from __future__ import annotations
|
||||
|
||||
from typing import TYPE_CHECKING, Any, Optional
|
||||
from warnings import warn
|
||||
|
||||
from .api import from_bytes
|
||||
from .constant import CHARDET_CORRESPONDENCE
|
||||
|
||||
# TODO: remove this check when dropping Python 3.7 support
|
||||
if TYPE_CHECKING:
|
||||
from typing_extensions import TypedDict
|
||||
|
||||
class ResultDict(TypedDict):
|
||||
encoding: Optional[str]
|
||||
language: str
|
||||
confidence: Optional[float]
|
||||
|
||||
|
||||
def detect(
|
||||
byte_str: bytes, should_rename_legacy: bool = False, **kwargs: Any
|
||||
) -> Dict[str, Optional[Union[str, float]]]:
|
||||
) -> ResultDict:
|
||||
"""
|
||||
chardet legacy method
|
||||
Detect the encoding of the given byte string. It should be mostly backward-compatible.
|
||||
|
|
|
@ -236,7 +236,7 @@ class SuspiciousRange(MessDetectorPlugin):
|
|||
|
||||
@property
|
||||
def ratio(self) -> float:
|
||||
if self._character_count <= 24:
|
||||
if self._character_count <= 13:
|
||||
return 0.0
|
||||
|
||||
ratio_of_suspicious_range_usage: float = (
|
||||
|
@ -260,6 +260,7 @@ class SuperWeirdWordPlugin(MessDetectorPlugin):
|
|||
|
||||
self._buffer: str = ""
|
||||
self._buffer_accent_count: int = 0
|
||||
self._buffer_glyph_count: int = 0
|
||||
|
||||
def eligible(self, character: str) -> bool:
|
||||
return True
|
||||
|
@ -279,6 +280,14 @@ class SuperWeirdWordPlugin(MessDetectorPlugin):
|
|||
and is_thai(character) is False
|
||||
):
|
||||
self._foreign_long_watch = True
|
||||
if (
|
||||
is_cjk(character)
|
||||
or is_hangul(character)
|
||||
or is_katakana(character)
|
||||
or is_hiragana(character)
|
||||
or is_thai(character)
|
||||
):
|
||||
self._buffer_glyph_count += 1
|
||||
return
|
||||
if not self._buffer:
|
||||
return
|
||||
|
@ -291,17 +300,20 @@ class SuperWeirdWordPlugin(MessDetectorPlugin):
|
|||
self._character_count += buffer_length
|
||||
|
||||
if buffer_length >= 4:
|
||||
if self._buffer_accent_count / buffer_length > 0.34:
|
||||
if self._buffer_accent_count / buffer_length >= 0.5:
|
||||
self._is_current_word_bad = True
|
||||
# Word/Buffer ending with an upper case accentuated letter are so rare,
|
||||
# that we will consider them all as suspicious. Same weight as foreign_long suspicious.
|
||||
if (
|
||||
elif (
|
||||
is_accentuated(self._buffer[-1])
|
||||
and self._buffer[-1].isupper()
|
||||
and all(_.isupper() for _ in self._buffer) is False
|
||||
):
|
||||
self._foreign_long_count += 1
|
||||
self._is_current_word_bad = True
|
||||
elif self._buffer_glyph_count == 1:
|
||||
self._is_current_word_bad = True
|
||||
self._foreign_long_count += 1
|
||||
if buffer_length >= 24 and self._foreign_long_watch:
|
||||
camel_case_dst = [
|
||||
i
|
||||
|
@ -325,6 +337,7 @@ class SuperWeirdWordPlugin(MessDetectorPlugin):
|
|||
self._foreign_long_watch = False
|
||||
self._buffer = ""
|
||||
self._buffer_accent_count = 0
|
||||
self._buffer_glyph_count = 0
|
||||
elif (
|
||||
character not in {"<", ">", "-", "=", "~", "|", "_"}
|
||||
and character.isdigit() is False
|
||||
|
|
|
@ -1,9 +1,10 @@
|
|||
from encodings.aliases import aliases
|
||||
from hashlib import sha256
|
||||
from json import dumps
|
||||
from re import sub
|
||||
from typing import Any, Dict, Iterator, List, Optional, Tuple, Union
|
||||
|
||||
from .constant import TOO_BIG_SEQUENCE
|
||||
from .constant import RE_POSSIBLE_ENCODING_INDICATION, TOO_BIG_SEQUENCE
|
||||
from .utils import iana_name, is_multi_byte_encoding, unicode_range
|
||||
|
||||
|
||||
|
@ -16,6 +17,7 @@ class CharsetMatch:
|
|||
has_sig_or_bom: bool,
|
||||
languages: "CoherenceMatches",
|
||||
decoded_payload: Optional[str] = None,
|
||||
preemptive_declaration: Optional[str] = None,
|
||||
):
|
||||
self._payload: bytes = payload
|
||||
|
||||
|
@ -33,13 +35,13 @@ class CharsetMatch:
|
|||
|
||||
self._string: Optional[str] = decoded_payload
|
||||
|
||||
self._preemptive_declaration: Optional[str] = preemptive_declaration
|
||||
|
||||
def __eq__(self, other: object) -> bool:
|
||||
if not isinstance(other, CharsetMatch):
|
||||
raise TypeError(
|
||||
"__eq__ cannot be invoked on {} and {}.".format(
|
||||
str(other.__class__), str(self.__class__)
|
||||
)
|
||||
)
|
||||
if isinstance(other, str):
|
||||
return iana_name(other) == self.encoding
|
||||
return False
|
||||
return self.encoding == other.encoding and self.fingerprint == other.fingerprint
|
||||
|
||||
def __lt__(self, other: object) -> bool:
|
||||
|
@ -210,7 +212,24 @@ class CharsetMatch:
|
|||
"""
|
||||
if self._output_encoding is None or self._output_encoding != encoding:
|
||||
self._output_encoding = encoding
|
||||
self._output_payload = str(self).encode(encoding, "replace")
|
||||
decoded_string = str(self)
|
||||
if (
|
||||
self._preemptive_declaration is not None
|
||||
and self._preemptive_declaration.lower()
|
||||
not in ["utf-8", "utf8", "utf_8"]
|
||||
):
|
||||
patched_header = sub(
|
||||
RE_POSSIBLE_ENCODING_INDICATION,
|
||||
lambda m: m.string[m.span()[0] : m.span()[1]].replace(
|
||||
m.groups()[0], iana_name(self._output_encoding) # type: ignore[arg-type]
|
||||
),
|
||||
decoded_string[:8192],
|
||||
1,
|
||||
)
|
||||
|
||||
decoded_string = patched_header + decoded_string[8192:]
|
||||
|
||||
self._output_payload = decoded_string.encode(encoding, "replace")
|
||||
|
||||
return self._output_payload # type: ignore
|
||||
|
||||
|
@ -266,7 +285,7 @@ class CharsetMatches:
|
|||
)
|
||||
)
|
||||
# We should disable the submatch factoring when the input file is too heavy (conserve RAM usage)
|
||||
if len(item.raw) <= TOO_BIG_SEQUENCE:
|
||||
if len(item.raw) < TOO_BIG_SEQUENCE:
|
||||
for match in self._results:
|
||||
if match.fingerprint == item.fingerprint and match.chaos == item.chaos:
|
||||
match.add_submatch(item)
|
||||
|
|
|
@ -2,5 +2,5 @@
|
|||
Expose version
|
||||
"""
|
||||
|
||||
__version__ = "3.3.2"
|
||||
__version__ = "3.4.0"
|
||||
VERSION = __version__.split(".")
|
||||
|
|
|
@ -57,9 +57,11 @@ These API's are described in the `CherryPy specification
|
|||
"""
|
||||
|
||||
try:
|
||||
import pkg_resources
|
||||
import importlib.metadata as importlib_metadata
|
||||
except ImportError:
|
||||
pass
|
||||
# fall back for python <= 3.7
|
||||
# This try/except can be removed with py <= 3.7 support
|
||||
import importlib_metadata
|
||||
|
||||
from threading import local as _local
|
||||
|
||||
|
@ -109,7 +111,7 @@ tree = _cptree.Tree()
|
|||
|
||||
|
||||
try:
|
||||
__version__ = pkg_resources.require('cherrypy')[0].version
|
||||
__version__ = importlib_metadata.version('cherrypy')
|
||||
except Exception:
|
||||
__version__ = 'unknown'
|
||||
|
||||
|
@ -181,24 +183,28 @@ def quickstart(root=None, script_name='', config=None):
|
|||
class _Serving(_local):
|
||||
"""An interface for registering request and response objects.
|
||||
|
||||
Rather than have a separate "thread local" object for the request and
|
||||
the response, this class works as a single threadlocal container for
|
||||
both objects (and any others which developers wish to define). In this
|
||||
way, we can easily dump those objects when we stop/start a new HTTP
|
||||
conversation, yet still refer to them as module-level globals in a
|
||||
thread-safe way.
|
||||
Rather than have a separate "thread local" object for the request
|
||||
and the response, this class works as a single threadlocal container
|
||||
for both objects (and any others which developers wish to define).
|
||||
In this way, we can easily dump those objects when we stop/start a
|
||||
new HTTP conversation, yet still refer to them as module-level
|
||||
globals in a thread-safe way.
|
||||
"""
|
||||
|
||||
request = _cprequest.Request(_httputil.Host('127.0.0.1', 80),
|
||||
_httputil.Host('127.0.0.1', 1111))
|
||||
"""The request object for the current thread.
|
||||
|
||||
In the main thread, and any threads which are not receiving HTTP
|
||||
requests, this is None.
|
||||
"""
|
||||
The request object for the current thread. In the main thread,
|
||||
and any threads which are not receiving HTTP requests, this is None."""
|
||||
|
||||
response = _cprequest.Response()
|
||||
"""The response object for the current thread.
|
||||
|
||||
In the main thread, and any threads which are not receiving HTTP
|
||||
requests, this is None.
|
||||
"""
|
||||
The response object for the current thread. In the main thread,
|
||||
and any threads which are not receiving HTTP requests, this is None."""
|
||||
|
||||
def load(self, request, response):
|
||||
self.request = request
|
||||
|
@ -316,8 +322,8 @@ class _GlobalLogManager(_cplogging.LogManager):
|
|||
def __call__(self, *args, **kwargs):
|
||||
"""Log the given message to the app.log or global log.
|
||||
|
||||
Log the given message to the app.log or global
|
||||
log as appropriate.
|
||||
Log the given message to the app.log or global log as
|
||||
appropriate.
|
||||
"""
|
||||
# Do NOT use try/except here. See
|
||||
# https://github.com/cherrypy/cherrypy/issues/945
|
||||
|
@ -330,8 +336,8 @@ class _GlobalLogManager(_cplogging.LogManager):
|
|||
def access(self):
|
||||
"""Log an access message to the app.log or global log.
|
||||
|
||||
Log the given message to the app.log or global
|
||||
log as appropriate.
|
||||
Log the given message to the app.log or global log as
|
||||
appropriate.
|
||||
"""
|
||||
try:
|
||||
return request.app.log.access()
|
||||
|
|
|
@ -313,7 +313,10 @@ class Checker(object):
|
|||
|
||||
# -------------------- Specific config warnings -------------------- #
|
||||
def check_localhost(self):
|
||||
"""Warn if any socket_host is 'localhost'. See #711."""
|
||||
"""Warn if any socket_host is 'localhost'.
|
||||
|
||||
See #711.
|
||||
"""
|
||||
for k, v in cherrypy.config.items():
|
||||
if k == 'server.socket_host' and v == 'localhost':
|
||||
warnings.warn("The use of 'localhost' as a socket host can "
|
||||
|
|
|
@ -1,5 +1,4 @@
|
|||
"""
|
||||
Configuration system for CherryPy.
|
||||
"""Configuration system for CherryPy.
|
||||
|
||||
Configuration in CherryPy is implemented via dictionaries. Keys are strings
|
||||
which name the mapped value, which may be of any type.
|
||||
|
@ -132,8 +131,8 @@ def _if_filename_register_autoreload(ob):
|
|||
def merge(base, other):
|
||||
"""Merge one app config (from a dict, file, or filename) into another.
|
||||
|
||||
If the given config is a filename, it will be appended to
|
||||
the list of files to monitor for "autoreload" changes.
|
||||
If the given config is a filename, it will be appended to the list
|
||||
of files to monitor for "autoreload" changes.
|
||||
"""
|
||||
_if_filename_register_autoreload(other)
|
||||
|
||||
|
|
|
@ -1,9 +1,10 @@
|
|||
"""CherryPy dispatchers.
|
||||
|
||||
A 'dispatcher' is the object which looks up the 'page handler' callable
|
||||
and collects config for the current request based on the path_info, other
|
||||
request attributes, and the application architecture. The core calls the
|
||||
dispatcher as early as possible, passing it a 'path_info' argument.
|
||||
and collects config for the current request based on the path_info,
|
||||
other request attributes, and the application architecture. The core
|
||||
calls the dispatcher as early as possible, passing it a 'path_info'
|
||||
argument.
|
||||
|
||||
The default dispatcher discovers the page handler by matching path_info
|
||||
to a hierarchical arrangement of objects, starting at request.app.root.
|
||||
|
@ -21,7 +22,6 @@ import cherrypy
|
|||
|
||||
|
||||
class PageHandler(object):
|
||||
|
||||
"""Callable which sets response.body."""
|
||||
|
||||
def __init__(self, callable, *args, **kwargs):
|
||||
|
@ -64,8 +64,7 @@ class PageHandler(object):
|
|||
|
||||
|
||||
def test_callable_spec(callable, callable_args, callable_kwargs):
|
||||
"""
|
||||
Inspect callable and test to see if the given args are suitable for it.
|
||||
"""Inspect callable and test to see if the given args are suitable for it.
|
||||
|
||||
When an error occurs during the handler's invoking stage there are 2
|
||||
erroneous cases:
|
||||
|
@ -252,16 +251,16 @@ else:
|
|||
|
||||
|
||||
class Dispatcher(object):
|
||||
|
||||
"""CherryPy Dispatcher which walks a tree of objects to find a handler.
|
||||
|
||||
The tree is rooted at cherrypy.request.app.root, and each hierarchical
|
||||
component in the path_info argument is matched to a corresponding nested
|
||||
attribute of the root object. Matching handlers must have an 'exposed'
|
||||
attribute which evaluates to True. The special method name "index"
|
||||
matches a URI which ends in a slash ("/"). The special method name
|
||||
"default" may match a portion of the path_info (but only when no longer
|
||||
substring of the path_info matches some other object).
|
||||
The tree is rooted at cherrypy.request.app.root, and each
|
||||
hierarchical component in the path_info argument is matched to a
|
||||
corresponding nested attribute of the root object. Matching handlers
|
||||
must have an 'exposed' attribute which evaluates to True. The
|
||||
special method name "index" matches a URI which ends in a slash
|
||||
("/"). The special method name "default" may match a portion of the
|
||||
path_info (but only when no longer substring of the path_info
|
||||
matches some other object).
|
||||
|
||||
This is the default, built-in dispatcher for CherryPy.
|
||||
"""
|
||||
|
@ -306,9 +305,9 @@ class Dispatcher(object):
|
|||
|
||||
The second object returned will be a list of names which are
|
||||
'virtual path' components: parts of the URL which are dynamic,
|
||||
and were not used when looking up the handler.
|
||||
These virtual path components are passed to the handler as
|
||||
positional arguments.
|
||||
and were not used when looking up the handler. These virtual
|
||||
path components are passed to the handler as positional
|
||||
arguments.
|
||||
"""
|
||||
request = cherrypy.serving.request
|
||||
app = request.app
|
||||
|
@ -448,13 +447,11 @@ class Dispatcher(object):
|
|||
|
||||
|
||||
class MethodDispatcher(Dispatcher):
|
||||
|
||||
"""Additional dispatch based on cherrypy.request.method.upper().
|
||||
|
||||
Methods named GET, POST, etc will be called on an exposed class.
|
||||
The method names must be all caps; the appropriate Allow header
|
||||
will be output showing all capitalized method names as allowable
|
||||
HTTP verbs.
|
||||
Methods named GET, POST, etc will be called on an exposed class. The
|
||||
method names must be all caps; the appropriate Allow header will be
|
||||
output showing all capitalized method names as allowable HTTP verbs.
|
||||
|
||||
Note that the containing class must be exposed, not the methods.
|
||||
"""
|
||||
|
@ -492,16 +489,14 @@ class MethodDispatcher(Dispatcher):
|
|||
|
||||
|
||||
class RoutesDispatcher(object):
|
||||
|
||||
"""A Routes based dispatcher for CherryPy."""
|
||||
|
||||
def __init__(self, full_result=False, **mapper_options):
|
||||
"""
|
||||
Routes dispatcher
|
||||
"""Routes dispatcher.
|
||||
|
||||
Set full_result to True if you wish the controller
|
||||
and the action to be passed on to the page handler
|
||||
parameters. By default they won't be.
|
||||
Set full_result to True if you wish the controller and the
|
||||
action to be passed on to the page handler parameters. By
|
||||
default they won't be.
|
||||
"""
|
||||
import routes
|
||||
self.full_result = full_result
|
||||
|
@ -617,8 +612,7 @@ def XMLRPCDispatcher(next_dispatcher=Dispatcher()):
|
|||
|
||||
def VirtualHost(next_dispatcher=Dispatcher(), use_x_forwarded_host=True,
|
||||
**domains):
|
||||
"""
|
||||
Select a different handler based on the Host header.
|
||||
"""Select a different handler based on the Host header.
|
||||
|
||||
This can be useful when running multiple sites within one CP server.
|
||||
It allows several domains to point to different parts of a single
|
||||
|
|
|
@ -136,19 +136,17 @@ from cherrypy.lib import httputil as _httputil
|
|||
|
||||
|
||||
class CherryPyException(Exception):
|
||||
|
||||
"""A base class for CherryPy exceptions."""
|
||||
pass
|
||||
|
||||
|
||||
class InternalRedirect(CherryPyException):
|
||||
|
||||
"""Exception raised to switch to the handler for a different URL.
|
||||
|
||||
This exception will redirect processing to another path within the site
|
||||
(without informing the client). Provide the new path as an argument when
|
||||
raising the exception. Provide any params in the querystring for the new
|
||||
URL.
|
||||
This exception will redirect processing to another path within the
|
||||
site (without informing the client). Provide the new path as an
|
||||
argument when raising the exception. Provide any params in the
|
||||
querystring for the new URL.
|
||||
"""
|
||||
|
||||
def __init__(self, path, query_string=''):
|
||||
|
@ -173,7 +171,6 @@ class InternalRedirect(CherryPyException):
|
|||
|
||||
|
||||
class HTTPRedirect(CherryPyException):
|
||||
|
||||
"""Exception raised when the request should be redirected.
|
||||
|
||||
This exception will force a HTTP redirect to the URL or URL's you give it.
|
||||
|
@ -202,7 +199,7 @@ class HTTPRedirect(CherryPyException):
|
|||
"""The list of URL's to emit."""
|
||||
|
||||
encoding = 'utf-8'
|
||||
"""The encoding when passed urls are not native strings"""
|
||||
"""The encoding when passed urls are not native strings."""
|
||||
|
||||
def __init__(self, urls, status=None, encoding=None):
|
||||
self.urls = abs_urls = [
|
||||
|
@ -230,8 +227,7 @@ class HTTPRedirect(CherryPyException):
|
|||
|
||||
@classproperty
|
||||
def default_status(cls):
|
||||
"""
|
||||
The default redirect status for the request.
|
||||
"""The default redirect status for the request.
|
||||
|
||||
RFC 2616 indicates a 301 response code fits our goal; however,
|
||||
browser support for 301 is quite messy. Use 302/303 instead. See
|
||||
|
@ -249,8 +245,9 @@ class HTTPRedirect(CherryPyException):
|
|||
"""Modify cherrypy.response status, headers, and body to represent
|
||||
self.
|
||||
|
||||
CherryPy uses this internally, but you can also use it to create an
|
||||
HTTPRedirect object and set its output without *raising* the exception.
|
||||
CherryPy uses this internally, but you can also use it to create
|
||||
an HTTPRedirect object and set its output without *raising* the
|
||||
exception.
|
||||
"""
|
||||
response = cherrypy.serving.response
|
||||
response.status = status = self.status
|
||||
|
@ -339,7 +336,6 @@ def clean_headers(status):
|
|||
|
||||
|
||||
class HTTPError(CherryPyException):
|
||||
|
||||
"""Exception used to return an HTTP error code (4xx-5xx) to the client.
|
||||
|
||||
This exception can be used to automatically send a response using a
|
||||
|
@ -358,7 +354,9 @@ class HTTPError(CherryPyException):
|
|||
"""
|
||||
|
||||
status = None
|
||||
"""The HTTP status code. May be of type int or str (with a Reason-Phrase).
|
||||
"""The HTTP status code.
|
||||
|
||||
May be of type int or str (with a Reason-Phrase).
|
||||
"""
|
||||
|
||||
code = None
|
||||
|
@ -386,8 +384,9 @@ class HTTPError(CherryPyException):
|
|||
"""Modify cherrypy.response status, headers, and body to represent
|
||||
self.
|
||||
|
||||
CherryPy uses this internally, but you can also use it to create an
|
||||
HTTPError object and set its output without *raising* the exception.
|
||||
CherryPy uses this internally, but you can also use it to create
|
||||
an HTTPError object and set its output without *raising* the
|
||||
exception.
|
||||
"""
|
||||
response = cherrypy.serving.response
|
||||
|
||||
|
@ -426,11 +425,10 @@ class HTTPError(CherryPyException):
|
|||
|
||||
|
||||
class NotFound(HTTPError):
|
||||
|
||||
"""Exception raised when a URL could not be mapped to any handler (404).
|
||||
|
||||
This is equivalent to raising
|
||||
:class:`HTTPError("404 Not Found") <cherrypy._cperror.HTTPError>`.
|
||||
This is equivalent to raising :class:`HTTPError("404 Not Found")
|
||||
<cherrypy._cperror.HTTPError>`.
|
||||
"""
|
||||
|
||||
def __init__(self, path=None):
|
||||
|
@ -477,8 +475,8 @@ _HTTPErrorTemplate = '''<!DOCTYPE html PUBLIC
|
|||
def get_error_page(status, **kwargs):
|
||||
"""Return an HTML page, containing a pretty error response.
|
||||
|
||||
status should be an int or a str.
|
||||
kwargs will be interpolated into the page template.
|
||||
status should be an int or a str. kwargs will be interpolated into
|
||||
the page template.
|
||||
"""
|
||||
try:
|
||||
code, reason, message = _httputil.valid_status(status)
|
||||
|
@ -595,8 +593,8 @@ def bare_error(extrabody=None):
|
|||
"""Produce status, headers, body for a critical error.
|
||||
|
||||
Returns a triple without calling any other questionable functions,
|
||||
so it should be as error-free as possible. Call it from an HTTP server
|
||||
if you get errors outside of the request.
|
||||
so it should be as error-free as possible. Call it from an HTTP
|
||||
server if you get errors outside of the request.
|
||||
|
||||
If extrabody is None, a friendly but rather unhelpful error message
|
||||
is set in the body. If extrabody is a string, it will be appended
|
||||
|
|
|
@ -123,7 +123,6 @@ logfmt = logging.Formatter('%(message)s')
|
|||
|
||||
|
||||
class NullHandler(logging.Handler):
|
||||
|
||||
"""A no-op logging handler to silence the logging.lastResort handler."""
|
||||
|
||||
def handle(self, record):
|
||||
|
@ -137,15 +136,16 @@ class NullHandler(logging.Handler):
|
|||
|
||||
|
||||
class LogManager(object):
|
||||
|
||||
"""An object to assist both simple and advanced logging.
|
||||
|
||||
``cherrypy.log`` is an instance of this class.
|
||||
"""
|
||||
|
||||
appid = None
|
||||
"""The id() of the Application object which owns this log manager. If this
|
||||
is a global log manager, appid is None."""
|
||||
"""The id() of the Application object which owns this log manager.
|
||||
|
||||
If this is a global log manager, appid is None.
|
||||
"""
|
||||
|
||||
error_log = None
|
||||
"""The actual :class:`logging.Logger` instance for error messages."""
|
||||
|
@ -317,8 +317,8 @@ class LogManager(object):
|
|||
def screen(self):
|
||||
"""Turn stderr/stdout logging on or off.
|
||||
|
||||
If you set this to True, it'll add the appropriate StreamHandler for
|
||||
you. If you set it to False, it will remove the handler.
|
||||
If you set this to True, it'll add the appropriate StreamHandler
|
||||
for you. If you set it to False, it will remove the handler.
|
||||
"""
|
||||
h = self._get_builtin_handler
|
||||
has_h = h(self.error_log, 'screen') or h(self.access_log, 'screen')
|
||||
|
@ -414,7 +414,6 @@ class LogManager(object):
|
|||
|
||||
|
||||
class WSGIErrorHandler(logging.Handler):
|
||||
|
||||
"A handler class which writes logging records to environ['wsgi.errors']."
|
||||
|
||||
def flush(self):
|
||||
|
@ -452,6 +451,8 @@ class WSGIErrorHandler(logging.Handler):
|
|||
|
||||
class LazyRfc3339UtcTime(object):
|
||||
def __str__(self):
|
||||
"""Return utcnow() in RFC3339 UTC Format."""
|
||||
iso_formatted_now = datetime.datetime.utcnow().isoformat('T')
|
||||
"""Return datetime in RFC3339 UTC Format."""
|
||||
iso_formatted_now = datetime.datetime.now(
|
||||
datetime.timezone.utc,
|
||||
).isoformat('T')
|
||||
return f'{iso_formatted_now!s}Z'
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
"""Native adapter for serving CherryPy via mod_python
|
||||
"""Native adapter for serving CherryPy via mod_python.
|
||||
|
||||
Basic usage:
|
||||
|
||||
|
|
|
@ -120,10 +120,10 @@ class NativeGateway(cheroot.server.Gateway):
|
|||
class CPHTTPServer(cheroot.server.HTTPServer):
|
||||
"""Wrapper for cheroot.server.HTTPServer.
|
||||
|
||||
cheroot has been designed to not reference CherryPy in any way,
|
||||
so that it can be used in other frameworks and applications.
|
||||
Therefore, we wrap it here, so we can apply some attributes
|
||||
from config -> cherrypy.server -> HTTPServer.
|
||||
cheroot has been designed to not reference CherryPy in any way, so
|
||||
that it can be used in other frameworks and applications. Therefore,
|
||||
we wrap it here, so we can apply some attributes from config ->
|
||||
cherrypy.server -> HTTPServer.
|
||||
"""
|
||||
|
||||
def __init__(self, server_adapter=cherrypy.server):
|
||||
|
|
|
@ -248,7 +248,10 @@ def process_multipart_form_data(entity):
|
|||
|
||||
|
||||
def _old_process_multipart(entity):
|
||||
"""The behavior of 3.2 and lower. Deprecated and will be changed in 3.3."""
|
||||
"""The behavior of 3.2 and lower.
|
||||
|
||||
Deprecated and will be changed in 3.3.
|
||||
"""
|
||||
process_multipart(entity)
|
||||
|
||||
params = entity.params
|
||||
|
@ -277,7 +280,6 @@ def _old_process_multipart(entity):
|
|||
|
||||
# -------------------------------- Entities --------------------------------- #
|
||||
class Entity(object):
|
||||
|
||||
"""An HTTP request body, or MIME multipart body.
|
||||
|
||||
This class collects information about the HTTP request entity. When a
|
||||
|
@ -346,13 +348,15 @@ class Entity(object):
|
|||
content_type = None
|
||||
"""The value of the Content-Type request header.
|
||||
|
||||
If the Entity is part of a multipart payload, this will be the Content-Type
|
||||
given in the MIME headers for this part.
|
||||
If the Entity is part of a multipart payload, this will be the
|
||||
Content-Type given in the MIME headers for this part.
|
||||
"""
|
||||
|
||||
default_content_type = 'application/x-www-form-urlencoded'
|
||||
"""This defines a default ``Content-Type`` to use if no Content-Type header
|
||||
is given. The empty string is used for RequestBody, which results in the
|
||||
is given.
|
||||
|
||||
The empty string is used for RequestBody, which results in the
|
||||
request body not being read or parsed at all. This is by design; a missing
|
||||
``Content-Type`` header in the HTTP request entity is an error at best,
|
||||
and a security hole at worst. For multipart parts, however, the MIME spec
|
||||
|
@ -402,8 +406,8 @@ class Entity(object):
|
|||
part_class = None
|
||||
"""The class used for multipart parts.
|
||||
|
||||
You can replace this with custom subclasses to alter the processing of
|
||||
multipart parts.
|
||||
You can replace this with custom subclasses to alter the processing
|
||||
of multipart parts.
|
||||
"""
|
||||
|
||||
def __init__(self, fp, headers, params=None, parts=None):
|
||||
|
@ -509,7 +513,8 @@ class Entity(object):
|
|||
"""Return a file-like object into which the request body will be read.
|
||||
|
||||
By default, this will return a TemporaryFile. Override as needed.
|
||||
See also :attr:`cherrypy._cpreqbody.Part.maxrambytes`."""
|
||||
See also :attr:`cherrypy._cpreqbody.Part.maxrambytes`.
|
||||
"""
|
||||
return tempfile.TemporaryFile()
|
||||
|
||||
def fullvalue(self):
|
||||
|
@ -525,7 +530,7 @@ class Entity(object):
|
|||
return value
|
||||
|
||||
def decode_entity(self, value):
|
||||
"""Return a given byte encoded value as a string"""
|
||||
"""Return a given byte encoded value as a string."""
|
||||
for charset in self.attempt_charsets:
|
||||
try:
|
||||
value = value.decode(charset)
|
||||
|
@ -569,7 +574,6 @@ class Entity(object):
|
|||
|
||||
|
||||
class Part(Entity):
|
||||
|
||||
"""A MIME part entity, part of a multipart entity."""
|
||||
|
||||
# "The default character set, which must be assumed in the absence of a
|
||||
|
@ -653,8 +657,8 @@ class Part(Entity):
|
|||
def read_lines_to_boundary(self, fp_out=None):
|
||||
"""Read bytes from self.fp and return or write them to a file.
|
||||
|
||||
If the 'fp_out' argument is None (the default), all bytes read are
|
||||
returned in a single byte string.
|
||||
If the 'fp_out' argument is None (the default), all bytes read
|
||||
are returned in a single byte string.
|
||||
|
||||
If the 'fp_out' argument is not None, it must be a file-like
|
||||
object that supports the 'write' method; all bytes read will be
|
||||
|
@ -755,15 +759,15 @@ class SizedReader:
|
|||
def read(self, size=None, fp_out=None):
|
||||
"""Read bytes from the request body and return or write them to a file.
|
||||
|
||||
A number of bytes less than or equal to the 'size' argument are read
|
||||
off the socket. The actual number of bytes read are tracked in
|
||||
self.bytes_read. The number may be smaller than 'size' when 1) the
|
||||
client sends fewer bytes, 2) the 'Content-Length' request header
|
||||
specifies fewer bytes than requested, or 3) the number of bytes read
|
||||
exceeds self.maxbytes (in which case, 413 is raised).
|
||||
A number of bytes less than or equal to the 'size' argument are
|
||||
read off the socket. The actual number of bytes read are tracked
|
||||
in self.bytes_read. The number may be smaller than 'size' when
|
||||
1) the client sends fewer bytes, 2) the 'Content-Length' request
|
||||
header specifies fewer bytes than requested, or 3) the number of
|
||||
bytes read exceeds self.maxbytes (in which case, 413 is raised).
|
||||
|
||||
If the 'fp_out' argument is None (the default), all bytes read are
|
||||
returned in a single byte string.
|
||||
If the 'fp_out' argument is None (the default), all bytes read
|
||||
are returned in a single byte string.
|
||||
|
||||
If the 'fp_out' argument is not None, it must be a file-like
|
||||
object that supports the 'write' method; all bytes read will be
|
||||
|
@ -918,7 +922,6 @@ class SizedReader:
|
|||
|
||||
|
||||
class RequestBody(Entity):
|
||||
|
||||
"""The entity of the HTTP request."""
|
||||
|
||||
bufsize = 8 * 1024
|
||||
|
|
|
@ -16,7 +16,6 @@ from cherrypy.lib import httputil, reprconf, encoding
|
|||
|
||||
|
||||
class Hook(object):
|
||||
|
||||
"""A callback and its metadata: failsafe, priority, and kwargs."""
|
||||
|
||||
callback = None
|
||||
|
@ -30,10 +29,12 @@ class Hook(object):
|
|||
from the same call point raise exceptions."""
|
||||
|
||||
priority = 50
|
||||
"""Defines the order of execution for a list of Hooks.
|
||||
|
||||
Priority numbers should be limited to the closed interval [0, 100],
|
||||
but values outside this range are acceptable, as are fractional
|
||||
values.
|
||||
"""
|
||||
Defines the order of execution for a list of Hooks. Priority numbers
|
||||
should be limited to the closed interval [0, 100], but values outside
|
||||
this range are acceptable, as are fractional values."""
|
||||
|
||||
kwargs = {}
|
||||
"""
|
||||
|
@ -74,7 +75,6 @@ class Hook(object):
|
|||
|
||||
|
||||
class HookMap(dict):
|
||||
|
||||
"""A map of call points to lists of callbacks (Hook objects)."""
|
||||
|
||||
def __new__(cls, points=None):
|
||||
|
@ -190,23 +190,23 @@ hookpoints = ['on_start_resource', 'before_request_body',
|
|||
|
||||
|
||||
class Request(object):
|
||||
|
||||
"""An HTTP request.
|
||||
|
||||
This object represents the metadata of an HTTP request message;
|
||||
that is, it contains attributes which describe the environment
|
||||
in which the request URL, headers, and body were sent (if you
|
||||
want tools to interpret the headers and body, those are elsewhere,
|
||||
mostly in Tools). This 'metadata' consists of socket data,
|
||||
transport characteristics, and the Request-Line. This object
|
||||
also contains data regarding the configuration in effect for
|
||||
the given URL, and the execution plan for generating a response.
|
||||
This object represents the metadata of an HTTP request message; that
|
||||
is, it contains attributes which describe the environment in which
|
||||
the request URL, headers, and body were sent (if you want tools to
|
||||
interpret the headers and body, those are elsewhere, mostly in
|
||||
Tools). This 'metadata' consists of socket data, transport
|
||||
characteristics, and the Request-Line. This object also contains
|
||||
data regarding the configuration in effect for the given URL, and
|
||||
the execution plan for generating a response.
|
||||
"""
|
||||
|
||||
prev = None
|
||||
"""The previous Request object (if any).
|
||||
|
||||
This should be None unless we are processing an InternalRedirect.
|
||||
"""
|
||||
The previous Request object (if any). This should be None
|
||||
unless we are processing an InternalRedirect."""
|
||||
|
||||
# Conversation/connection attributes
|
||||
local = httputil.Host('127.0.0.1', 80)
|
||||
|
@ -216,9 +216,10 @@ class Request(object):
|
|||
'An httputil.Host(ip, port, hostname) object for the client socket.'
|
||||
|
||||
scheme = 'http'
|
||||
"""The protocol used between client and server.
|
||||
|
||||
In most cases, this will be either 'http' or 'https'.
|
||||
"""
|
||||
The protocol used between client and server. In most cases,
|
||||
this will be either 'http' or 'https'."""
|
||||
|
||||
server_protocol = 'HTTP/1.1'
|
||||
"""
|
||||
|
@ -227,25 +228,30 @@ class Request(object):
|
|||
|
||||
base = ''
|
||||
"""The (scheme://host) portion of the requested URL.
|
||||
|
||||
In some cases (e.g. when proxying via mod_rewrite), this may contain
|
||||
path segments which cherrypy.url uses when constructing url's, but
|
||||
which otherwise are ignored by CherryPy. Regardless, this value
|
||||
MUST NOT end in a slash."""
|
||||
which otherwise are ignored by CherryPy. Regardless, this value MUST
|
||||
NOT end in a slash.
|
||||
"""
|
||||
|
||||
# Request-Line attributes
|
||||
request_line = ''
|
||||
"""The complete Request-Line received from the client.
|
||||
|
||||
This is a single string consisting of the request method, URI, and
|
||||
protocol version (joined by spaces). Any final CRLF is removed.
|
||||
"""
|
||||
The complete Request-Line received from the client. This is a
|
||||
single string consisting of the request method, URI, and protocol
|
||||
version (joined by spaces). Any final CRLF is removed."""
|
||||
|
||||
method = 'GET'
|
||||
"""Indicates the HTTP method to be performed on the resource identified by
|
||||
the Request-URI.
|
||||
|
||||
Common methods include GET, HEAD, POST, PUT, and DELETE. CherryPy
|
||||
allows any extension method; however, various HTTP servers and
|
||||
gateways may restrict the set of allowable methods. CherryPy
|
||||
applications SHOULD restrict the set (on a per-URI basis).
|
||||
"""
|
||||
Indicates the HTTP method to be performed on the resource identified
|
||||
by the Request-URI. Common methods include GET, HEAD, POST, PUT, and
|
||||
DELETE. CherryPy allows any extension method; however, various HTTP
|
||||
servers and gateways may restrict the set of allowable methods.
|
||||
CherryPy applications SHOULD restrict the set (on a per-URI basis)."""
|
||||
|
||||
query_string = ''
|
||||
"""
|
||||
|
@ -277,22 +283,26 @@ class Request(object):
|
|||
A dict which combines query string (GET) and request entity (POST)
|
||||
variables. This is populated in two stages: GET params are added
|
||||
before the 'on_start_resource' hook, and POST params are added
|
||||
between the 'before_request_body' and 'before_handler' hooks."""
|
||||
between the 'before_request_body' and 'before_handler' hooks.
|
||||
"""
|
||||
|
||||
# Message attributes
|
||||
header_list = []
|
||||
"""A list of the HTTP request headers as (name, value) tuples.
|
||||
|
||||
In general, you should use request.headers (a dict) instead.
|
||||
"""
|
||||
A list of the HTTP request headers as (name, value) tuples.
|
||||
In general, you should use request.headers (a dict) instead."""
|
||||
|
||||
headers = httputil.HeaderMap()
|
||||
"""
|
||||
A dict-like object containing the request headers. Keys are header
|
||||
"""A dict-like object containing the request headers.
|
||||
|
||||
Keys are header
|
||||
names (in Title-Case format); however, you may get and set them in
|
||||
a case-insensitive manner. That is, headers['Content-Type'] and
|
||||
headers['content-type'] refer to the same value. Values are header
|
||||
values (decoded according to :rfc:`2047` if necessary). See also:
|
||||
httputil.HeaderMap, httputil.HeaderElement."""
|
||||
httputil.HeaderMap, httputil.HeaderElement.
|
||||
"""
|
||||
|
||||
cookie = SimpleCookie()
|
||||
"""See help(Cookie)."""
|
||||
|
@ -336,7 +346,8 @@ class Request(object):
|
|||
or multipart, this will be None. Otherwise, this will be an instance
|
||||
of :class:`RequestBody<cherrypy._cpreqbody.RequestBody>` (which you
|
||||
can .read()); this value is set between the 'before_request_body' and
|
||||
'before_handler' hooks (assuming that process_request_body is True)."""
|
||||
'before_handler' hooks (assuming that process_request_body is True).
|
||||
"""
|
||||
|
||||
# Dispatch attributes
|
||||
dispatch = cherrypy.dispatch.Dispatcher()
|
||||
|
@ -347,23 +358,24 @@ class Request(object):
|
|||
calls the dispatcher as early as possible, passing it a 'path_info'
|
||||
argument.
|
||||
|
||||
The default dispatcher discovers the page handler by matching path_info
|
||||
to a hierarchical arrangement of objects, starting at request.app.root.
|
||||
See help(cherrypy.dispatch) for more information."""
|
||||
The default dispatcher discovers the page handler by matching
|
||||
path_info to a hierarchical arrangement of objects, starting at
|
||||
request.app.root. See help(cherrypy.dispatch) for more information.
|
||||
"""
|
||||
|
||||
script_name = ''
|
||||
"""
|
||||
The 'mount point' of the application which is handling this request.
|
||||
"""The 'mount point' of the application which is handling this request.
|
||||
|
||||
This attribute MUST NOT end in a slash. If the script_name refers to
|
||||
the root of the URI, it MUST be an empty string (not "/").
|
||||
"""
|
||||
|
||||
path_info = '/'
|
||||
"""The 'relative path' portion of the Request-URI.
|
||||
|
||||
This is relative to the script_name ('mount point') of the
|
||||
application which is handling this request.
|
||||
"""
|
||||
The 'relative path' portion of the Request-URI. This is relative
|
||||
to the script_name ('mount point') of the application which is
|
||||
handling this request."""
|
||||
|
||||
login = None
|
||||
"""
|
||||
|
@ -391,14 +403,16 @@ class Request(object):
|
|||
of the form: {Toolbox.namespace: {Tool.name: config dict}}."""
|
||||
|
||||
config = None
|
||||
"""A flat dict of all configuration entries which apply to the current
|
||||
request.
|
||||
|
||||
These entries are collected from global config, application config
|
||||
(based on request.path_info), and from handler config (exactly how
|
||||
is governed by the request.dispatch object in effect for this
|
||||
request; by default, handler config can be attached anywhere in the
|
||||
tree between request.app.root and the final handler, and inherits
|
||||
downward).
|
||||
"""
|
||||
A flat dict of all configuration entries which apply to the
|
||||
current request. These entries are collected from global config,
|
||||
application config (based on request.path_info), and from handler
|
||||
config (exactly how is governed by the request.dispatch object in
|
||||
effect for this request; by default, handler config can be attached
|
||||
anywhere in the tree between request.app.root and the final handler,
|
||||
and inherits downward)."""
|
||||
|
||||
is_index = None
|
||||
"""
|
||||
|
@ -409,13 +423,14 @@ class Request(object):
|
|||
the trailing slash. See cherrypy.tools.trailing_slash."""
|
||||
|
||||
hooks = HookMap(hookpoints)
|
||||
"""
|
||||
A HookMap (dict-like object) of the form: {hookpoint: [hook, ...]}.
|
||||
"""A HookMap (dict-like object) of the form: {hookpoint: [hook, ...]}.
|
||||
|
||||
Each key is a str naming the hook point, and each value is a list
|
||||
of hooks which will be called at that hook point during this request.
|
||||
The list of hooks is generally populated as early as possible (mostly
|
||||
from Tools specified in config), but may be extended at any time.
|
||||
See also: _cprequest.Hook, _cprequest.HookMap, and cherrypy.tools."""
|
||||
See also: _cprequest.Hook, _cprequest.HookMap, and cherrypy.tools.
|
||||
"""
|
||||
|
||||
error_response = cherrypy.HTTPError(500).set_response
|
||||
"""
|
||||
|
@ -428,12 +443,11 @@ class Request(object):
|
|||
error response to the user-agent."""
|
||||
|
||||
error_page = {}
|
||||
"""
|
||||
A dict of {error code: response filename or callable} pairs.
|
||||
"""A dict of {error code: response filename or callable} pairs.
|
||||
|
||||
The error code must be an int representing a given HTTP error code,
|
||||
or the string 'default', which will be used if no matching entry
|
||||
is found for a given numeric code.
|
||||
or the string 'default', which will be used if no matching entry is
|
||||
found for a given numeric code.
|
||||
|
||||
If a filename is provided, the file should contain a Python string-
|
||||
formatting template, and can expect by default to receive format
|
||||
|
@ -447,8 +461,8 @@ class Request(object):
|
|||
iterable of strings which will be set to response.body. It may also
|
||||
override headers or perform any other processing.
|
||||
|
||||
If no entry is given for an error code, and no 'default' entry exists,
|
||||
a default template will be used.
|
||||
If no entry is given for an error code, and no 'default' entry
|
||||
exists, a default template will be used.
|
||||
"""
|
||||
|
||||
show_tracebacks = True
|
||||
|
@ -473,9 +487,10 @@ class Request(object):
|
|||
"""True once the close method has been called, False otherwise."""
|
||||
|
||||
stage = None
|
||||
"""A string containing the stage reached in the request-handling process.
|
||||
|
||||
This is useful when debugging a live server with hung requests.
|
||||
"""
|
||||
A string containing the stage reached in the request-handling process.
|
||||
This is useful when debugging a live server with hung requests."""
|
||||
|
||||
unique_id = None
|
||||
"""A lazy object generating and memorizing UUID4 on ``str()`` render."""
|
||||
|
@ -492,9 +507,10 @@ class Request(object):
|
|||
server_protocol='HTTP/1.1'):
|
||||
"""Populate a new Request object.
|
||||
|
||||
local_host should be an httputil.Host object with the server info.
|
||||
remote_host should be an httputil.Host object with the client info.
|
||||
scheme should be a string, either "http" or "https".
|
||||
local_host should be an httputil.Host object with the server
|
||||
info. remote_host should be an httputil.Host object with the
|
||||
client info. scheme should be a string, either "http" or
|
||||
"https".
|
||||
"""
|
||||
self.local = local_host
|
||||
self.remote = remote_host
|
||||
|
@ -514,7 +530,10 @@ class Request(object):
|
|||
self.unique_id = LazyUUID4()
|
||||
|
||||
def close(self):
|
||||
"""Run cleanup code. (Core)"""
|
||||
"""Run cleanup code.
|
||||
|
||||
(Core)
|
||||
"""
|
||||
if not self.closed:
|
||||
self.closed = True
|
||||
self.stage = 'on_end_request'
|
||||
|
@ -551,7 +570,6 @@ class Request(object):
|
|||
|
||||
Consumer code (HTTP servers) should then access these response
|
||||
attributes to build the outbound stream.
|
||||
|
||||
"""
|
||||
response = cherrypy.serving.response
|
||||
self.stage = 'run'
|
||||
|
@ -631,7 +649,10 @@ class Request(object):
|
|||
return response
|
||||
|
||||
def respond(self, path_info):
|
||||
"""Generate a response for the resource at self.path_info. (Core)"""
|
||||
"""Generate a response for the resource at self.path_info.
|
||||
|
||||
(Core)
|
||||
"""
|
||||
try:
|
||||
try:
|
||||
try:
|
||||
|
@ -702,7 +723,10 @@ class Request(object):
|
|||
response.finalize()
|
||||
|
||||
def process_query_string(self):
|
||||
"""Parse the query string into Python structures. (Core)"""
|
||||
"""Parse the query string into Python structures.
|
||||
|
||||
(Core)
|
||||
"""
|
||||
try:
|
||||
p = httputil.parse_query_string(
|
||||
self.query_string, encoding=self.query_string_encoding)
|
||||
|
@ -715,7 +739,10 @@ class Request(object):
|
|||
self.params.update(p)
|
||||
|
||||
def process_headers(self):
|
||||
"""Parse HTTP header data into Python structures. (Core)"""
|
||||
"""Parse HTTP header data into Python structures.
|
||||
|
||||
(Core)
|
||||
"""
|
||||
# Process the headers into self.headers
|
||||
headers = self.headers
|
||||
for name, value in self.header_list:
|
||||
|
@ -751,7 +778,10 @@ class Request(object):
|
|||
self.base = '%s://%s' % (self.scheme, host)
|
||||
|
||||
def get_resource(self, path):
|
||||
"""Call a dispatcher (which sets self.handler and .config). (Core)"""
|
||||
"""Call a dispatcher (which sets self.handler and .config).
|
||||
|
||||
(Core)
|
||||
"""
|
||||
# First, see if there is a custom dispatch at this URI. Custom
|
||||
# dispatchers can only be specified in app.config, not in _cp_config
|
||||
# (since custom dispatchers may not even have an app.root).
|
||||
|
@ -762,7 +792,10 @@ class Request(object):
|
|||
dispatch(path)
|
||||
|
||||
def handle_error(self):
|
||||
"""Handle the last unanticipated exception. (Core)"""
|
||||
"""Handle the last unanticipated exception.
|
||||
|
||||
(Core)
|
||||
"""
|
||||
try:
|
||||
self.hooks.run('before_error_response')
|
||||
if self.error_response:
|
||||
|
@ -776,7 +809,6 @@ class Request(object):
|
|||
|
||||
|
||||
class ResponseBody(object):
|
||||
|
||||
"""The body of the HTTP response (the response entity)."""
|
||||
|
||||
unicode_err = ('Page handlers MUST return bytes. Use tools.encode '
|
||||
|
@ -802,18 +834,18 @@ class ResponseBody(object):
|
|||
|
||||
|
||||
class Response(object):
|
||||
|
||||
"""An HTTP Response, including status, headers, and body."""
|
||||
|
||||
status = ''
|
||||
"""The HTTP Status-Code and Reason-Phrase."""
|
||||
|
||||
header_list = []
|
||||
"""
|
||||
A list of the HTTP response headers as (name, value) tuples.
|
||||
"""A list of the HTTP response headers as (name, value) tuples.
|
||||
|
||||
In general, you should use response.headers (a dict) instead. This
|
||||
attribute is generated from response.headers and is not valid until
|
||||
after the finalize phase."""
|
||||
after the finalize phase.
|
||||
"""
|
||||
|
||||
headers = httputil.HeaderMap()
|
||||
"""
|
||||
|
@ -833,7 +865,10 @@ class Response(object):
|
|||
"""The body (entity) of the HTTP response."""
|
||||
|
||||
time = None
|
||||
"""The value of time.time() when created. Use in HTTP dates."""
|
||||
"""The value of time.time() when created.
|
||||
|
||||
Use in HTTP dates.
|
||||
"""
|
||||
|
||||
stream = False
|
||||
"""If False, buffer the response body."""
|
||||
|
@ -861,15 +896,15 @@ class Response(object):
|
|||
return new_body
|
||||
|
||||
def _flush_body(self):
|
||||
"""
|
||||
Discard self.body but consume any generator such that
|
||||
any finalization can occur, such as is required by
|
||||
caching.tee_output().
|
||||
"""
|
||||
"""Discard self.body but consume any generator such that any
|
||||
finalization can occur, such as is required by caching.tee_output()."""
|
||||
consume(iter(self.body))
|
||||
|
||||
def finalize(self):
|
||||
"""Transform headers (and cookies) into self.header_list. (Core)"""
|
||||
"""Transform headers (and cookies) into self.header_list.
|
||||
|
||||
(Core)
|
||||
"""
|
||||
try:
|
||||
code, reason, _ = httputil.valid_status(self.status)
|
||||
except ValueError:
|
||||
|
|
|
@ -50,7 +50,8 @@ class Server(ServerAdapter):
|
|||
"""If given, the name of the UNIX socket to use instead of TCP/IP.
|
||||
|
||||
When this option is not None, the `socket_host` and `socket_port` options
|
||||
are ignored."""
|
||||
are ignored.
|
||||
"""
|
||||
|
||||
socket_queue_size = 5
|
||||
"""The 'backlog' argument to socket.listen(); specifies the maximum number
|
||||
|
@ -79,17 +80,24 @@ class Server(ServerAdapter):
|
|||
"""The number of worker threads to start up in the pool."""
|
||||
|
||||
thread_pool_max = -1
|
||||
"""The maximum size of the worker-thread pool. Use -1 to indicate no limit.
|
||||
"""The maximum size of the worker-thread pool.
|
||||
|
||||
Use -1 to indicate no limit.
|
||||
"""
|
||||
|
||||
max_request_header_size = 500 * 1024
|
||||
"""The maximum number of bytes allowable in the request headers.
|
||||
If exceeded, the HTTP server should return "413 Request Entity Too Large".
|
||||
|
||||
If exceeded, the HTTP server should return "413 Request Entity Too
|
||||
Large".
|
||||
"""
|
||||
|
||||
max_request_body_size = 100 * 1024 * 1024
|
||||
"""The maximum number of bytes allowable in the request body. If exceeded,
|
||||
the HTTP server should return "413 Request Entity Too Large"."""
|
||||
"""The maximum number of bytes allowable in the request body.
|
||||
|
||||
If exceeded, the HTTP server should return "413 Request Entity Too
|
||||
Large".
|
||||
"""
|
||||
|
||||
instance = None
|
||||
"""If not None, this should be an HTTP server instance (such as
|
||||
|
@ -119,7 +127,8 @@ class Server(ServerAdapter):
|
|||
the builtin WSGI server. Builtin options are: 'builtin' (to
|
||||
use the SSL library built into recent versions of Python).
|
||||
You may also register your own classes in the
|
||||
cheroot.server.ssl_adapters dict."""
|
||||
cheroot.server.ssl_adapters dict.
|
||||
"""
|
||||
|
||||
statistics = False
|
||||
"""Turns statistics-gathering on or off for aware HTTP servers."""
|
||||
|
@ -129,11 +138,13 @@ class Server(ServerAdapter):
|
|||
|
||||
wsgi_version = (1, 0)
|
||||
"""The WSGI version tuple to use with the builtin WSGI server.
|
||||
The provided options are (1, 0) [which includes support for PEP 3333,
|
||||
which declares it covers WSGI version 1.0.1 but still mandates the
|
||||
wsgi.version (1, 0)] and ('u', 0), an experimental unicode version.
|
||||
You may create and register your own experimental versions of the WSGI
|
||||
protocol by adding custom classes to the cheroot.server.wsgi_gateways dict.
|
||||
|
||||
The provided options are (1, 0) [which includes support for PEP
|
||||
3333, which declares it covers WSGI version 1.0.1 but still mandates
|
||||
the wsgi.version (1, 0)] and ('u', 0), an experimental unicode
|
||||
version. You may create and register your own experimental versions
|
||||
of the WSGI protocol by adding custom classes to the
|
||||
cheroot.server.wsgi_gateways dict.
|
||||
"""
|
||||
|
||||
peercreds = False
|
||||
|
@ -184,7 +195,8 @@ class Server(ServerAdapter):
|
|||
def bind_addr(self):
|
||||
"""Return bind address.
|
||||
|
||||
A (host, port) tuple for TCP sockets or a str for Unix domain sockts.
|
||||
A (host, port) tuple for TCP sockets or a str for Unix domain
|
||||
sockets.
|
||||
"""
|
||||
if self.socket_file:
|
||||
return self.socket_file
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
"""CherryPy tools. A "tool" is any helper, adapted to CP.
|
||||
|
||||
Tools are usually designed to be used in a variety of ways (although some
|
||||
may only offer one if they choose):
|
||||
Tools are usually designed to be used in a variety of ways (although
|
||||
some may only offer one if they choose):
|
||||
|
||||
Library calls
|
||||
All tools are callables that can be used wherever needed.
|
||||
|
@ -48,10 +48,10 @@ _attr_error = (
|
|||
|
||||
|
||||
class Tool(object):
|
||||
|
||||
"""A registered function for use with CherryPy request-processing hooks.
|
||||
|
||||
help(tool.callable) should give you more information about this Tool.
|
||||
help(tool.callable) should give you more information about this
|
||||
Tool.
|
||||
"""
|
||||
|
||||
namespace = 'tools'
|
||||
|
@ -135,8 +135,8 @@ class Tool(object):
|
|||
def _setup(self):
|
||||
"""Hook this tool into cherrypy.request.
|
||||
|
||||
The standard CherryPy request object will automatically call this
|
||||
method when the tool is "turned on" in config.
|
||||
The standard CherryPy request object will automatically call
|
||||
this method when the tool is "turned on" in config.
|
||||
"""
|
||||
conf = self._merged_args()
|
||||
p = conf.pop('priority', None)
|
||||
|
@ -147,15 +147,15 @@ class Tool(object):
|
|||
|
||||
|
||||
class HandlerTool(Tool):
|
||||
|
||||
"""Tool which is called 'before main', that may skip normal handlers.
|
||||
|
||||
If the tool successfully handles the request (by setting response.body),
|
||||
if should return True. This will cause CherryPy to skip any 'normal' page
|
||||
handler. If the tool did not handle the request, it should return False
|
||||
to tell CherryPy to continue on and call the normal page handler. If the
|
||||
tool is declared AS a page handler (see the 'handler' method), returning
|
||||
False will raise NotFound.
|
||||
If the tool successfully handles the request (by setting
|
||||
response.body), if should return True. This will cause CherryPy to
|
||||
skip any 'normal' page handler. If the tool did not handle the
|
||||
request, it should return False to tell CherryPy to continue on and
|
||||
call the normal page handler. If the tool is declared AS a page
|
||||
handler (see the 'handler' method), returning False will raise
|
||||
NotFound.
|
||||
"""
|
||||
|
||||
def __init__(self, callable, name=None):
|
||||
|
@ -185,8 +185,8 @@ class HandlerTool(Tool):
|
|||
def _setup(self):
|
||||
"""Hook this tool into cherrypy.request.
|
||||
|
||||
The standard CherryPy request object will automatically call this
|
||||
method when the tool is "turned on" in config.
|
||||
The standard CherryPy request object will automatically call
|
||||
this method when the tool is "turned on" in config.
|
||||
"""
|
||||
conf = self._merged_args()
|
||||
p = conf.pop('priority', None)
|
||||
|
@ -197,7 +197,6 @@ class HandlerTool(Tool):
|
|||
|
||||
|
||||
class HandlerWrapperTool(Tool):
|
||||
|
||||
"""Tool which wraps request.handler in a provided wrapper function.
|
||||
|
||||
The 'newhandler' arg must be a handler wrapper function that takes a
|
||||
|
@ -232,7 +231,6 @@ class HandlerWrapperTool(Tool):
|
|||
|
||||
|
||||
class ErrorTool(Tool):
|
||||
|
||||
"""Tool which is used to replace the default request.error_response."""
|
||||
|
||||
def __init__(self, callable, name=None):
|
||||
|
@ -244,8 +242,8 @@ class ErrorTool(Tool):
|
|||
def _setup(self):
|
||||
"""Hook this tool into cherrypy.request.
|
||||
|
||||
The standard CherryPy request object will automatically call this
|
||||
method when the tool is "turned on" in config.
|
||||
The standard CherryPy request object will automatically call
|
||||
this method when the tool is "turned on" in config.
|
||||
"""
|
||||
cherrypy.serving.request.error_response = self._wrapper
|
||||
|
||||
|
@ -254,7 +252,6 @@ class ErrorTool(Tool):
|
|||
|
||||
|
||||
class SessionTool(Tool):
|
||||
|
||||
"""Session Tool for CherryPy.
|
||||
|
||||
sessions.locking
|
||||
|
@ -282,8 +279,8 @@ class SessionTool(Tool):
|
|||
def _setup(self):
|
||||
"""Hook this tool into cherrypy.request.
|
||||
|
||||
The standard CherryPy request object will automatically call this
|
||||
method when the tool is "turned on" in config.
|
||||
The standard CherryPy request object will automatically call
|
||||
this method when the tool is "turned on" in config.
|
||||
"""
|
||||
hooks = cherrypy.serving.request.hooks
|
||||
|
||||
|
@ -325,7 +322,6 @@ class SessionTool(Tool):
|
|||
|
||||
|
||||
class XMLRPCController(object):
|
||||
|
||||
"""A Controller (page handler collection) for XML-RPC.
|
||||
|
||||
To use it, have your controllers subclass this base class (it will
|
||||
|
@ -392,7 +388,6 @@ class SessionAuthTool(HandlerTool):
|
|||
|
||||
|
||||
class CachingTool(Tool):
|
||||
|
||||
"""Caching Tool for CherryPy."""
|
||||
|
||||
def _wrapper(self, **kwargs):
|
||||
|
@ -416,11 +411,11 @@ class CachingTool(Tool):
|
|||
|
||||
|
||||
class Toolbox(object):
|
||||
|
||||
"""A collection of Tools.
|
||||
|
||||
This object also functions as a config namespace handler for itself.
|
||||
Custom toolboxes should be added to each Application's toolboxes dict.
|
||||
Custom toolboxes should be added to each Application's toolboxes
|
||||
dict.
|
||||
"""
|
||||
|
||||
def __init__(self, namespace):
|
||||
|
|
|
@ -10,19 +10,22 @@ from cherrypy.lib import httputil, reprconf
|
|||
class Application(object):
|
||||
"""A CherryPy Application.
|
||||
|
||||
Servers and gateways should not instantiate Request objects directly.
|
||||
Instead, they should ask an Application object for a request object.
|
||||
Servers and gateways should not instantiate Request objects
|
||||
directly. Instead, they should ask an Application object for a
|
||||
request object.
|
||||
|
||||
An instance of this class may also be used as a WSGI callable
|
||||
(WSGI application object) for itself.
|
||||
An instance of this class may also be used as a WSGI callable (WSGI
|
||||
application object) for itself.
|
||||
"""
|
||||
|
||||
root = None
|
||||
"""The top-most container of page handlers for this app. Handlers should
|
||||
be arranged in a hierarchy of attributes, matching the expected URI
|
||||
hierarchy; the default dispatcher then searches this hierarchy for a
|
||||
matching handler. When using a dispatcher other than the default,
|
||||
this value may be None."""
|
||||
"""The top-most container of page handlers for this app.
|
||||
|
||||
Handlers should be arranged in a hierarchy of attributes, matching
|
||||
the expected URI hierarchy; the default dispatcher then searches
|
||||
this hierarchy for a matching handler. When using a dispatcher other
|
||||
than the default, this value may be None.
|
||||
"""
|
||||
|
||||
config = {}
|
||||
"""A dict of {path: pathconf} pairs, where 'pathconf' is itself a dict
|
||||
|
@ -32,10 +35,16 @@ class Application(object):
|
|||
toolboxes = {'tools': cherrypy.tools}
|
||||
|
||||
log = None
|
||||
"""A LogManager instance. See _cplogging."""
|
||||
"""A LogManager instance.
|
||||
|
||||
See _cplogging.
|
||||
"""
|
||||
|
||||
wsgiapp = None
|
||||
"""A CPWSGIApp instance. See _cpwsgi."""
|
||||
"""A CPWSGIApp instance.
|
||||
|
||||
See _cpwsgi.
|
||||
"""
|
||||
|
||||
request_class = _cprequest.Request
|
||||
response_class = _cprequest.Response
|
||||
|
@ -82,12 +91,15 @@ class Application(object):
|
|||
def script_name(self): # noqa: D401; irrelevant for properties
|
||||
"""The URI "mount point" for this app.
|
||||
|
||||
A mount point is that portion of the URI which is constant for all URIs
|
||||
that are serviced by this application; it does not include scheme,
|
||||
host, or proxy ("virtual host") portions of the URI.
|
||||
A mount point is that portion of the URI which is constant for
|
||||
all URIs that are serviced by this application; it does not
|
||||
include scheme, host, or proxy ("virtual host") portions of the
|
||||
URI.
|
||||
|
||||
For example, if script_name is "/my/cool/app", then the URL
|
||||
"http://www.example.com/my/cool/app/page1" might be handled by a
|
||||
For example, if script_name is "/my/cool/app", then the URL "
|
||||
|
||||
http://www.example.com/my/cool/app/page1"
|
||||
might be handled by a
|
||||
"page1" method on the root object.
|
||||
|
||||
The value of script_name MUST NOT end in a slash. If the script_name
|
||||
|
@ -171,9 +183,9 @@ class Application(object):
|
|||
class Tree(object):
|
||||
"""A registry of CherryPy applications, mounted at diverse points.
|
||||
|
||||
An instance of this class may also be used as a WSGI callable
|
||||
(WSGI application object), in which case it dispatches to all
|
||||
mounted apps.
|
||||
An instance of this class may also be used as a WSGI callable (WSGI
|
||||
application object), in which case it dispatches to all mounted
|
||||
apps.
|
||||
"""
|
||||
|
||||
apps = {}
|
||||
|
|
|
@ -1,10 +1,10 @@
|
|||
"""WSGI interface (see PEP 333 and 3333).
|
||||
|
||||
Note that WSGI environ keys and values are 'native strings'; that is,
|
||||
whatever the type of "" is. For Python 2, that's a byte string; for Python 3,
|
||||
it's a unicode string. But PEP 3333 says: "even if Python's str type is
|
||||
actually Unicode "under the hood", the content of native strings must
|
||||
still be translatable to bytes via the Latin-1 encoding!"
|
||||
whatever the type of "" is. For Python 2, that's a byte string; for
|
||||
Python 3, it's a unicode string. But PEP 3333 says: "even if Python's
|
||||
str type is actually Unicode "under the hood", the content of native
|
||||
strings must still be translatable to bytes via the Latin-1 encoding!"
|
||||
"""
|
||||
|
||||
import sys as _sys
|
||||
|
@ -34,7 +34,6 @@ def downgrade_wsgi_ux_to_1x(environ):
|
|||
|
||||
|
||||
class VirtualHost(object):
|
||||
|
||||
"""Select a different WSGI application based on the Host header.
|
||||
|
||||
This can be useful when running multiple sites within one CP server.
|
||||
|
@ -56,7 +55,10 @@ class VirtualHost(object):
|
|||
cherrypy.tree.graft(vhost)
|
||||
"""
|
||||
default = None
|
||||
"""Required. The default WSGI application."""
|
||||
"""Required.
|
||||
|
||||
The default WSGI application.
|
||||
"""
|
||||
|
||||
use_x_forwarded_host = True
|
||||
"""If True (the default), any "X-Forwarded-Host"
|
||||
|
@ -65,11 +67,12 @@ class VirtualHost(object):
|
|||
|
||||
domains = {}
|
||||
"""A dict of {host header value: application} pairs.
|
||||
The incoming "Host" request header is looked up in this dict,
|
||||
and, if a match is found, the corresponding WSGI application
|
||||
will be called instead of the default. Note that you often need
|
||||
separate entries for "example.com" and "www.example.com".
|
||||
In addition, "Host" headers may contain the port number.
|
||||
|
||||
The incoming "Host" request header is looked up in this dict, and,
|
||||
if a match is found, the corresponding WSGI application will be
|
||||
called instead of the default. Note that you often need separate
|
||||
entries for "example.com" and "www.example.com". In addition, "Host"
|
||||
headers may contain the port number.
|
||||
"""
|
||||
|
||||
def __init__(self, default, domains=None, use_x_forwarded_host=True):
|
||||
|
@ -89,7 +92,6 @@ class VirtualHost(object):
|
|||
|
||||
|
||||
class InternalRedirector(object):
|
||||
|
||||
"""WSGI middleware that handles raised cherrypy.InternalRedirect."""
|
||||
|
||||
def __init__(self, nextapp, recursive=False):
|
||||
|
@ -137,7 +139,6 @@ class InternalRedirector(object):
|
|||
|
||||
|
||||
class ExceptionTrapper(object):
|
||||
|
||||
"""WSGI middleware that traps exceptions."""
|
||||
|
||||
def __init__(self, nextapp, throws=(KeyboardInterrupt, SystemExit)):
|
||||
|
@ -226,7 +227,6 @@ class _TrappedResponse(object):
|
|||
|
||||
|
||||
class AppResponse(object):
|
||||
|
||||
"""WSGI response iterable for CherryPy applications."""
|
||||
|
||||
def __init__(self, environ, start_response, cpapp):
|
||||
|
@ -277,7 +277,10 @@ class AppResponse(object):
|
|||
return next(self.iter_response)
|
||||
|
||||
def close(self):
|
||||
"""Close and de-reference the current request and response. (Core)"""
|
||||
"""Close and de-reference the current request and response.
|
||||
|
||||
(Core)
|
||||
"""
|
||||
streaming = _cherrypy.serving.response.stream
|
||||
self.cpapp.release_serving()
|
||||
|
||||
|
@ -380,18 +383,20 @@ class AppResponse(object):
|
|||
|
||||
|
||||
class CPWSGIApp(object):
|
||||
|
||||
"""A WSGI application object for a CherryPy Application."""
|
||||
|
||||
pipeline = [
|
||||
('ExceptionTrapper', ExceptionTrapper),
|
||||
('InternalRedirector', InternalRedirector),
|
||||
]
|
||||
"""A list of (name, wsgiapp) pairs. Each 'wsgiapp' MUST be a
|
||||
constructor that takes an initial, positional 'nextapp' argument,
|
||||
plus optional keyword arguments, and returns a WSGI application
|
||||
(that takes environ and start_response arguments). The 'name' can
|
||||
be any you choose, and will correspond to keys in self.config."""
|
||||
"""A list of (name, wsgiapp) pairs.
|
||||
|
||||
Each 'wsgiapp' MUST be a constructor that takes an initial,
|
||||
positional 'nextapp' argument, plus optional keyword arguments, and
|
||||
returns a WSGI application (that takes environ and start_response
|
||||
arguments). The 'name' can be any you choose, and will correspond to
|
||||
keys in self.config.
|
||||
"""
|
||||
|
||||
head = None
|
||||
"""Rather than nest all apps in the pipeline on each call, it's only
|
||||
|
@ -399,9 +404,12 @@ class CPWSGIApp(object):
|
|||
this to None again if you change self.pipeline after calling self."""
|
||||
|
||||
config = {}
|
||||
"""A dict whose keys match names listed in the pipeline. Each
|
||||
value is a further dict which will be passed to the corresponding
|
||||
named WSGI callable (from the pipeline) as keyword arguments."""
|
||||
"""A dict whose keys match names listed in the pipeline.
|
||||
|
||||
Each value is a further dict which will be passed to the
|
||||
corresponding named WSGI callable (from the pipeline) as keyword
|
||||
arguments.
|
||||
"""
|
||||
|
||||
response_class = AppResponse
|
||||
"""The class to instantiate and return as the next app in the WSGI chain.
|
||||
|
@ -417,8 +425,8 @@ class CPWSGIApp(object):
|
|||
def tail(self, environ, start_response):
|
||||
"""WSGI application callable for the actual CherryPy application.
|
||||
|
||||
You probably shouldn't call this; call self.__call__ instead,
|
||||
so that any WSGI middleware in self.pipeline can run first.
|
||||
You probably shouldn't call this; call self.__call__ instead, so
|
||||
that any WSGI middleware in self.pipeline can run first.
|
||||
"""
|
||||
return self.response_class(environ, start_response, self.cpapp)
|
||||
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
"""
|
||||
WSGI server interface (see PEP 333).
|
||||
"""WSGI server interface (see PEP 333).
|
||||
|
||||
This adds some CP-specific bits to the framework-agnostic cheroot package.
|
||||
This adds some CP-specific bits to the framework-agnostic cheroot
|
||||
package.
|
||||
"""
|
||||
import sys
|
||||
|
||||
|
@ -35,10 +35,11 @@ class CPWSGIHTTPRequest(cheroot.server.HTTPRequest):
|
|||
class CPWSGIServer(cheroot.wsgi.Server):
|
||||
"""Wrapper for cheroot.wsgi.Server.
|
||||
|
||||
cheroot has been designed to not reference CherryPy in any way,
|
||||
so that it can be used in other frameworks and applications. Therefore,
|
||||
we wrap it here, so we can set our own mount points from cherrypy.tree
|
||||
and apply some attributes from config -> cherrypy.server -> wsgi.Server.
|
||||
cheroot has been designed to not reference CherryPy in any way, so
|
||||
that it can be used in other frameworks and applications. Therefore,
|
||||
we wrap it here, so we can set our own mount points from
|
||||
cherrypy.tree and apply some attributes from config ->
|
||||
cherrypy.server -> wsgi.Server.
|
||||
"""
|
||||
|
||||
fmt = 'CherryPy/{cherrypy.__version__} {cheroot.wsgi.Server.version}'
|
||||
|
|
|
@ -137,7 +137,6 @@ def popargs(*args, **kwargs):
|
|||
class Root:
|
||||
def index(self):
|
||||
#...
|
||||
|
||||
"""
|
||||
# Since keyword arg comes after *args, we have to process it ourselves
|
||||
# for lower versions of python.
|
||||
|
@ -201,16 +200,17 @@ def url(path='', qs='', script_name=None, base=None, relative=None):
|
|||
If it does not start with a slash, this returns
|
||||
(base + script_name [+ request.path_info] + path + qs).
|
||||
|
||||
If script_name is None, cherrypy.request will be used
|
||||
to find a script_name, if available.
|
||||
If script_name is None, cherrypy.request will be used to find a
|
||||
script_name, if available.
|
||||
|
||||
If base is None, cherrypy.request.base will be used (if available).
|
||||
Note that you can use cherrypy.tools.proxy to change this.
|
||||
|
||||
Finally, note that this function can be used to obtain an absolute URL
|
||||
for the current request path (minus the querystring) by passing no args.
|
||||
If you call url(qs=cherrypy.request.query_string), you should get the
|
||||
original browser URL (assuming no internal redirections).
|
||||
Finally, note that this function can be used to obtain an absolute
|
||||
URL for the current request path (minus the querystring) by passing
|
||||
no args. If you call url(qs=cherrypy.request.query_string), you
|
||||
should get the original browser URL (assuming no internal
|
||||
redirections).
|
||||
|
||||
If relative is None or not provided, request.app.relative_urls will
|
||||
be used (if available, else False). If False, the output will be an
|
||||
|
@ -320,8 +320,8 @@ def normalize_path(path):
|
|||
class _ClassPropertyDescriptor(object):
|
||||
"""Descript for read-only class-based property.
|
||||
|
||||
Turns a classmethod-decorated func into a read-only property of that class
|
||||
type (means the value cannot be set).
|
||||
Turns a classmethod-decorated func into a read-only property of that
|
||||
class type (means the value cannot be set).
|
||||
"""
|
||||
|
||||
def __init__(self, fget, fset=None):
|
||||
|
|
|
@ -1,5 +1,4 @@
|
|||
"""
|
||||
JSON support.
|
||||
"""JSON support.
|
||||
|
||||
Expose preferred json module as json and provide encode/decode
|
||||
convenience functions.
|
||||
|
|
|
@ -6,8 +6,8 @@ def is_iterator(obj):
|
|||
|
||||
(i.e. like a generator).
|
||||
|
||||
This will return False for objects which are iterable,
|
||||
but not iterators themselves.
|
||||
This will return False for objects which are iterable, but not
|
||||
iterators themselves.
|
||||
"""
|
||||
from types import GeneratorType
|
||||
if isinstance(obj, GeneratorType):
|
||||
|
|
|
@ -18,7 +18,6 @@ as the credentials store::
|
|||
'tools.auth_basic.accept_charset': 'UTF-8',
|
||||
}
|
||||
app_config = { '/' : basic_auth }
|
||||
|
||||
"""
|
||||
|
||||
import binascii
|
||||
|
|
|
@ -55,7 +55,7 @@ def TRACE(msg):
|
|||
|
||||
|
||||
def get_ha1_dict_plain(user_password_dict):
|
||||
"""Returns a get_ha1 function which obtains a plaintext password from a
|
||||
"""Return a get_ha1 function which obtains a plaintext password from a
|
||||
dictionary of the form: {username : password}.
|
||||
|
||||
If you want a simple dictionary-based authentication scheme, with plaintext
|
||||
|
@ -72,7 +72,7 @@ def get_ha1_dict_plain(user_password_dict):
|
|||
|
||||
|
||||
def get_ha1_dict(user_ha1_dict):
|
||||
"""Returns a get_ha1 function which obtains a HA1 password hash from a
|
||||
"""Return a get_ha1 function which obtains a HA1 password hash from a
|
||||
dictionary of the form: {username : HA1}.
|
||||
|
||||
If you want a dictionary-based authentication scheme, but with
|
||||
|
@ -87,7 +87,7 @@ def get_ha1_dict(user_ha1_dict):
|
|||
|
||||
|
||||
def get_ha1_file_htdigest(filename):
|
||||
"""Returns a get_ha1 function which obtains a HA1 password hash from a
|
||||
"""Return a get_ha1 function which obtains a HA1 password hash from a
|
||||
flat file with lines of the same format as that produced by the Apache
|
||||
htdigest utility. For example, for realm 'wonderland', username 'alice',
|
||||
and password '4x5istwelve', the htdigest line would be::
|
||||
|
@ -135,7 +135,7 @@ def synthesize_nonce(s, key, timestamp=None):
|
|||
|
||||
|
||||
def H(s):
|
||||
"""The hash function H"""
|
||||
"""The hash function H."""
|
||||
return md5_hex(s)
|
||||
|
||||
|
||||
|
@ -259,10 +259,11 @@ class HttpDigestAuthorization(object):
|
|||
return False
|
||||
|
||||
def is_nonce_stale(self, max_age_seconds=600):
|
||||
"""Returns True if a validated nonce is stale. The nonce contains a
|
||||
timestamp in plaintext and also a secure hash of the timestamp.
|
||||
You should first validate the nonce to ensure the plaintext
|
||||
timestamp is not spoofed.
|
||||
"""Return True if a validated nonce is stale.
|
||||
|
||||
The nonce contains a timestamp in plaintext and also a secure
|
||||
hash of the timestamp. You should first validate the nonce to
|
||||
ensure the plaintext timestamp is not spoofed.
|
||||
"""
|
||||
try:
|
||||
timestamp, hashpart = self.nonce.split(':', 1)
|
||||
|
@ -275,7 +276,10 @@ class HttpDigestAuthorization(object):
|
|||
return True
|
||||
|
||||
def HA2(self, entity_body=''):
|
||||
"""Returns the H(A2) string. See :rfc:`2617` section 3.2.2.3."""
|
||||
"""Return the H(A2) string.
|
||||
|
||||
See :rfc:`2617` section 3.2.2.3.
|
||||
"""
|
||||
# RFC 2617 3.2.2.3
|
||||
# If the "qop" directive's value is "auth" or is unspecified,
|
||||
# then A2 is:
|
||||
|
@ -306,7 +310,6 @@ class HttpDigestAuthorization(object):
|
|||
4.3. This refers to the entity the user agent sent in the
|
||||
request which has the Authorization header. Typically GET
|
||||
requests don't have an entity, and POST requests do.
|
||||
|
||||
"""
|
||||
ha2 = self.HA2(entity_body)
|
||||
# Request-Digest -- RFC 2617 3.2.2.1
|
||||
|
@ -395,7 +398,6 @@ def digest_auth(realm, get_ha1, key, debug=False, accept_charset='utf-8'):
|
|||
key
|
||||
A secret string known only to the server, used in the synthesis
|
||||
of nonces.
|
||||
|
||||
"""
|
||||
request = cherrypy.serving.request
|
||||
|
||||
|
@ -447,9 +449,7 @@ def digest_auth(realm, get_ha1, key, debug=False, accept_charset='utf-8'):
|
|||
|
||||
|
||||
def _respond_401(realm, key, accept_charset, debug, **kwargs):
|
||||
"""
|
||||
Respond with 401 status and a WWW-Authenticate header
|
||||
"""
|
||||
"""Respond with 401 status and a WWW-Authenticate header."""
|
||||
header = www_authenticate(
|
||||
realm, key,
|
||||
accept_charset=accept_charset,
|
||||
|
|
|
@ -42,7 +42,6 @@ from cherrypy.lib import cptools, httputil
|
|||
|
||||
|
||||
class Cache(object):
|
||||
|
||||
"""Base class for Cache implementations."""
|
||||
|
||||
def get(self):
|
||||
|
@ -64,17 +63,16 @@ class Cache(object):
|
|||
|
||||
# ------------------------------ Memory Cache ------------------------------- #
|
||||
class AntiStampedeCache(dict):
|
||||
|
||||
"""A storage system for cached items which reduces stampede collisions."""
|
||||
|
||||
def wait(self, key, timeout=5, debug=False):
|
||||
"""Return the cached value for the given key, or None.
|
||||
|
||||
If timeout is not None, and the value is already
|
||||
being calculated by another thread, wait until the given timeout has
|
||||
elapsed. If the value is available before the timeout expires, it is
|
||||
returned. If not, None is returned, and a sentinel placed in the cache
|
||||
to signal other threads to wait.
|
||||
If timeout is not None, and the value is already being
|
||||
calculated by another thread, wait until the given timeout has
|
||||
elapsed. If the value is available before the timeout expires,
|
||||
it is returned. If not, None is returned, and a sentinel placed
|
||||
in the cache to signal other threads to wait.
|
||||
|
||||
If timeout is None, no waiting is performed nor sentinels used.
|
||||
"""
|
||||
|
@ -127,7 +125,6 @@ class AntiStampedeCache(dict):
|
|||
|
||||
|
||||
class MemoryCache(Cache):
|
||||
|
||||
"""An in-memory cache for varying response content.
|
||||
|
||||
Each key in self.store is a URI, and each value is an AntiStampedeCache.
|
||||
|
@ -381,7 +378,10 @@ def get(invalid_methods=('POST', 'PUT', 'DELETE'), debug=False, **kwargs):
|
|||
|
||||
|
||||
def tee_output():
|
||||
"""Tee response output to cache storage. Internal."""
|
||||
"""Tee response output to cache storage.
|
||||
|
||||
Internal.
|
||||
"""
|
||||
# Used by CachingTool by attaching to request.hooks
|
||||
|
||||
request = cherrypy.serving.request
|
||||
|
@ -441,7 +441,6 @@ def expires(secs=0, force=False, debug=False):
|
|||
* Expires
|
||||
|
||||
If any are already present, none of the above response headers are set.
|
||||
|
||||
"""
|
||||
|
||||
response = cherrypy.serving.response
|
||||
|
|
|
@ -22,7 +22,7 @@ it will call ``serve()`` for you.
|
|||
|
||||
import re
|
||||
import sys
|
||||
import cgi
|
||||
import html
|
||||
import os
|
||||
import os.path
|
||||
import urllib.parse
|
||||
|
@ -352,9 +352,9 @@ class CoverStats(object):
|
|||
buffer.append((lineno, line))
|
||||
if empty_the_buffer:
|
||||
for lno, pastline in buffer:
|
||||
yield template % (lno, cgi.escape(pastline))
|
||||
yield template % (lno, html.escape(pastline))
|
||||
buffer = []
|
||||
yield template % (lineno, cgi.escape(line))
|
||||
yield template % (lineno, html.escape(line))
|
||||
|
||||
@cherrypy.expose
|
||||
def report(self, name):
|
||||
|
|
|
@ -184,7 +184,6 @@ To report statistics::
|
|||
To format statistics reports::
|
||||
|
||||
See 'Reporting', above.
|
||||
|
||||
"""
|
||||
|
||||
import logging
|
||||
|
@ -254,7 +253,6 @@ def proc_time(s):
|
|||
|
||||
|
||||
class ByteCountWrapper(object):
|
||||
|
||||
"""Wraps a file-like object, counting the number of bytes read."""
|
||||
|
||||
def __init__(self, rfile):
|
||||
|
@ -307,7 +305,6 @@ def _get_threading_ident():
|
|||
|
||||
|
||||
class StatsTool(cherrypy.Tool):
|
||||
|
||||
"""Record various information about the current request."""
|
||||
|
||||
def __init__(self):
|
||||
|
@ -316,8 +313,8 @@ class StatsTool(cherrypy.Tool):
|
|||
def _setup(self):
|
||||
"""Hook this tool into cherrypy.request.
|
||||
|
||||
The standard CherryPy request object will automatically call this
|
||||
method when the tool is "turned on" in config.
|
||||
The standard CherryPy request object will automatically call
|
||||
this method when the tool is "turned on" in config.
|
||||
"""
|
||||
if appstats.get('Enabled', False):
|
||||
cherrypy.Tool._setup(self)
|
||||
|
|
|
@ -94,8 +94,8 @@ def validate_etags(autotags=False, debug=False):
|
|||
def validate_since():
|
||||
"""Validate the current Last-Modified against If-Modified-Since headers.
|
||||
|
||||
If no code has set the Last-Modified response header, then no validation
|
||||
will be performed.
|
||||
If no code has set the Last-Modified response header, then no
|
||||
validation will be performed.
|
||||
"""
|
||||
response = cherrypy.serving.response
|
||||
lastmod = response.headers.get('Last-Modified')
|
||||
|
@ -123,9 +123,9 @@ def validate_since():
|
|||
def allow(methods=None, debug=False):
|
||||
"""Raise 405 if request.method not in methods (default ['GET', 'HEAD']).
|
||||
|
||||
The given methods are case-insensitive, and may be in any order.
|
||||
If only one method is allowed, you may supply a single string;
|
||||
if more than one, supply a list of strings.
|
||||
The given methods are case-insensitive, and may be in any order. If
|
||||
only one method is allowed, you may supply a single string; if more
|
||||
than one, supply a list of strings.
|
||||
|
||||
Regardless of whether the current method is allowed or not, this
|
||||
also emits an 'Allow' response header, containing the given methods.
|
||||
|
@ -154,22 +154,23 @@ def proxy(base=None, local='X-Forwarded-Host', remote='X-Forwarded-For',
|
|||
scheme='X-Forwarded-Proto', debug=False):
|
||||
"""Change the base URL (scheme://host[:port][/path]).
|
||||
|
||||
For running a CP server behind Apache, lighttpd, or other HTTP server.
|
||||
For running a CP server behind Apache, lighttpd, or other HTTP
|
||||
server.
|
||||
|
||||
For Apache and lighttpd, you should leave the 'local' argument at the
|
||||
default value of 'X-Forwarded-Host'. For Squid, you probably want to set
|
||||
tools.proxy.local = 'Origin'.
|
||||
For Apache and lighttpd, you should leave the 'local' argument at
|
||||
the default value of 'X-Forwarded-Host'. For Squid, you probably
|
||||
want to set tools.proxy.local = 'Origin'.
|
||||
|
||||
If you want the new request.base to include path info (not just the host),
|
||||
you must explicitly set base to the full base path, and ALSO set 'local'
|
||||
to '', so that the X-Forwarded-Host request header (which never includes
|
||||
path info) does not override it. Regardless, the value for 'base' MUST
|
||||
NOT end in a slash.
|
||||
If you want the new request.base to include path info (not just the
|
||||
host), you must explicitly set base to the full base path, and ALSO
|
||||
set 'local' to '', so that the X-Forwarded-Host request header
|
||||
(which never includes path info) does not override it. Regardless,
|
||||
the value for 'base' MUST NOT end in a slash.
|
||||
|
||||
cherrypy.request.remote.ip (the IP address of the client) will be
|
||||
rewritten if the header specified by the 'remote' arg is valid.
|
||||
By default, 'remote' is set to 'X-Forwarded-For'. If you do not
|
||||
want to rewrite remote.ip, set the 'remote' arg to an empty string.
|
||||
rewritten if the header specified by the 'remote' arg is valid. By
|
||||
default, 'remote' is set to 'X-Forwarded-For'. If you do not want to
|
||||
rewrite remote.ip, set the 'remote' arg to an empty string.
|
||||
"""
|
||||
|
||||
request = cherrypy.serving.request
|
||||
|
@ -217,8 +218,8 @@ def proxy(base=None, local='X-Forwarded-Host', remote='X-Forwarded-For',
|
|||
def ignore_headers(headers=('Range',), debug=False):
|
||||
"""Delete request headers whose field names are included in 'headers'.
|
||||
|
||||
This is a useful tool for working behind certain HTTP servers;
|
||||
for example, Apache duplicates the work that CP does for 'Range'
|
||||
This is a useful tool for working behind certain HTTP servers; for
|
||||
example, Apache duplicates the work that CP does for 'Range'
|
||||
headers, and will doubly-truncate the response.
|
||||
"""
|
||||
request = cherrypy.serving.request
|
||||
|
@ -281,7 +282,6 @@ def referer(pattern, accept=True, accept_missing=False, error=403,
|
|||
|
||||
|
||||
class SessionAuth(object):
|
||||
|
||||
"""Assert that the user is logged in."""
|
||||
|
||||
session_key = 'username'
|
||||
|
@ -319,7 +319,10 @@ Message: %(error_msg)s
|
|||
</body></html>""") % vars()).encode('utf-8')
|
||||
|
||||
def do_login(self, username, password, from_page='..', **kwargs):
|
||||
"""Login. May raise redirect, or return True if request handled."""
|
||||
"""Login.
|
||||
|
||||
May raise redirect, or return True if request handled.
|
||||
"""
|
||||
response = cherrypy.serving.response
|
||||
error_msg = self.check_username_and_password(username, password)
|
||||
if error_msg:
|
||||
|
@ -336,7 +339,10 @@ Message: %(error_msg)s
|
|||
raise cherrypy.HTTPRedirect(from_page or '/')
|
||||
|
||||
def do_logout(self, from_page='..', **kwargs):
|
||||
"""Logout. May raise redirect, or return True if request handled."""
|
||||
"""Logout.
|
||||
|
||||
May raise redirect, or return True if request handled.
|
||||
"""
|
||||
sess = cherrypy.session
|
||||
username = sess.get(self.session_key)
|
||||
sess[self.session_key] = None
|
||||
|
@ -346,7 +352,9 @@ Message: %(error_msg)s
|
|||
raise cherrypy.HTTPRedirect(from_page)
|
||||
|
||||
def do_check(self):
|
||||
"""Assert username. Raise redirect, or return True if request handled.
|
||||
"""Assert username.
|
||||
|
||||
Raise redirect, or return True if request handled.
|
||||
"""
|
||||
sess = cherrypy.session
|
||||
request = cherrypy.serving.request
|
||||
|
@ -408,8 +416,7 @@ def session_auth(**kwargs):
|
|||
|
||||
Any attribute of the SessionAuth class may be overridden
|
||||
via a keyword arg to this function:
|
||||
|
||||
""" + '\n '.join(
|
||||
""" + '\n' + '\n '.join(
|
||||
'{!s}: {!s}'.format(k, type(getattr(SessionAuth, k)).__name__)
|
||||
for k in dir(SessionAuth)
|
||||
if not k.startswith('__')
|
||||
|
@ -490,8 +497,8 @@ def trailing_slash(missing=True, extra=False, status=None, debug=False):
|
|||
def flatten(debug=False):
|
||||
"""Wrap response.body in a generator that recursively iterates over body.
|
||||
|
||||
This allows cherrypy.response.body to consist of 'nested generators';
|
||||
that is, a set of generators that yield generators.
|
||||
This allows cherrypy.response.body to consist of 'nested
|
||||
generators'; that is, a set of generators that yield generators.
|
||||
"""
|
||||
def flattener(input):
|
||||
numchunks = 0
|
||||
|
|
|
@ -261,9 +261,7 @@ class ResponseEncoder:
|
|||
|
||||
|
||||
def prepare_iter(value):
|
||||
"""
|
||||
Ensure response body is iterable and resolves to False when empty.
|
||||
"""
|
||||
"""Ensure response body is iterable and resolves to False when empty."""
|
||||
if isinstance(value, text_or_bytes):
|
||||
# strings get wrapped in a list because iterating over a single
|
||||
# item list is much faster than iterating over every character
|
||||
|
@ -360,7 +358,6 @@ def gzip(compress_level=5, mime_types=['text/html', 'text/plain'],
|
|||
* No 'gzip' or 'x-gzip' is present in the Accept-Encoding header
|
||||
* No 'gzip' or 'x-gzip' with a qvalue > 0 is present
|
||||
* The 'identity' value is given with a qvalue > 0.
|
||||
|
||||
"""
|
||||
request = cherrypy.serving.request
|
||||
response = cherrypy.serving.response
|
||||
|
|
|
@ -14,7 +14,6 @@ from cherrypy.process.plugins import SimplePlugin
|
|||
|
||||
|
||||
class ReferrerTree(object):
|
||||
|
||||
"""An object which gathers all referrers of an object to a given depth."""
|
||||
|
||||
peek_length = 40
|
||||
|
@ -132,7 +131,6 @@ def get_context(obj):
|
|||
|
||||
|
||||
class GCRoot(object):
|
||||
|
||||
"""A CherryPy page handler for testing reference leaks."""
|
||||
|
||||
classes = [
|
||||
|
|
39
lib/cherrypy/lib/headers.py
Normal file
39
lib/cherrypy/lib/headers.py
Normal file
|
@ -0,0 +1,39 @@
|
|||
"""headers."""
|
||||
|
||||
|
||||
def _parse_param(s):
|
||||
while s[:1] == ';':
|
||||
s = s[1:]
|
||||
end = s.find(';')
|
||||
while end > 0 and (s.count('"', 0, end) - s.count('\\"', 0, end)) % 2:
|
||||
end = s.find(';', end + 1)
|
||||
if end < 0:
|
||||
end = len(s)
|
||||
f = s[:end]
|
||||
yield f.strip()
|
||||
s = s[end:]
|
||||
|
||||
|
||||
def parse_header(line):
|
||||
"""Parse a Content-type like header.
|
||||
|
||||
Return the main content-type and a dictionary of options.
|
||||
|
||||
Copied from removed stdlib cgi module. See
|
||||
`cherrypy/cherrypy#2014 (comment)
|
||||
<https://github.com/cherrypy/cherrypy/issues/2014#issuecomment-1883774891>`_
|
||||
for background.
|
||||
"""
|
||||
parts = _parse_param(';' + line)
|
||||
key = parts.__next__()
|
||||
pdict = {}
|
||||
for p in parts:
|
||||
i = p.find('=')
|
||||
if i >= 0:
|
||||
name = p[:i].strip().lower()
|
||||
value = p[i + 1:].strip()
|
||||
if len(value) >= 2 and value[0] == value[-1] == '"':
|
||||
value = value[1:-1]
|
||||
value = value.replace('\\\\', '\\').replace('\\"', '"')
|
||||
pdict[name] = value
|
||||
return key, pdict
|
|
@ -12,7 +12,6 @@ import email.utils
|
|||
import re
|
||||
import builtins
|
||||
from binascii import b2a_base64
|
||||
from cgi import parse_header
|
||||
from email.header import decode_header
|
||||
from http.server import BaseHTTPRequestHandler
|
||||
from urllib.parse import unquote_plus
|
||||
|
@ -21,6 +20,7 @@ import jaraco.collections
|
|||
|
||||
import cherrypy
|
||||
from cherrypy._cpcompat import ntob, ntou
|
||||
from .headers import parse_header
|
||||
|
||||
response_codes = BaseHTTPRequestHandler.responses.copy()
|
||||
|
||||
|
@ -71,10 +71,10 @@ def protocol_from_http(protocol_str):
|
|||
def get_ranges(headervalue, content_length):
|
||||
"""Return a list of (start, stop) indices from a Range header, or None.
|
||||
|
||||
Each (start, stop) tuple will be composed of two ints, which are suitable
|
||||
for use in a slicing operation. That is, the header "Range: bytes=3-6",
|
||||
if applied against a Python string, is requesting resource[3:7]. This
|
||||
function will return the list [(3, 7)].
|
||||
Each (start, stop) tuple will be composed of two ints, which are
|
||||
suitable for use in a slicing operation. That is, the header "Range:
|
||||
bytes=3-6", if applied against a Python string, is requesting
|
||||
resource[3:7]. This function will return the list [(3, 7)].
|
||||
|
||||
If this function returns an empty list, you should return HTTP 416.
|
||||
"""
|
||||
|
@ -127,7 +127,6 @@ def get_ranges(headervalue, content_length):
|
|||
|
||||
|
||||
class HeaderElement(object):
|
||||
|
||||
"""An element (with parameters) from an HTTP header's element list."""
|
||||
|
||||
def __init__(self, value, params=None):
|
||||
|
@ -169,14 +168,14 @@ q_separator = re.compile(r'; *q *=')
|
|||
|
||||
|
||||
class AcceptElement(HeaderElement):
|
||||
|
||||
"""An element (with parameters) from an Accept* header's element list.
|
||||
|
||||
AcceptElement objects are comparable; the more-preferred object will be
|
||||
"less than" the less-preferred object. They are also therefore sortable;
|
||||
if you sort a list of AcceptElement objects, they will be listed in
|
||||
priority order; the most preferred value will be first. Yes, it should
|
||||
have been the other way around, but it's too late to fix now.
|
||||
AcceptElement objects are comparable; the more-preferred object will
|
||||
be "less than" the less-preferred object. They are also therefore
|
||||
sortable; if you sort a list of AcceptElement objects, they will be
|
||||
listed in priority order; the most preferred value will be first.
|
||||
Yes, it should have been the other way around, but it's too late to
|
||||
fix now.
|
||||
"""
|
||||
|
||||
@classmethod
|
||||
|
@ -249,8 +248,7 @@ def header_elements(fieldname, fieldvalue):
|
|||
|
||||
|
||||
def decode_TEXT(value):
|
||||
r"""
|
||||
Decode :rfc:`2047` TEXT
|
||||
r"""Decode :rfc:`2047` TEXT.
|
||||
|
||||
>>> decode_TEXT("=?utf-8?q?f=C3=BCr?=") == b'f\xfcr'.decode('latin-1')
|
||||
True
|
||||
|
@ -265,9 +263,7 @@ def decode_TEXT(value):
|
|||
|
||||
|
||||
def decode_TEXT_maybe(value):
|
||||
"""
|
||||
Decode the text but only if '=?' appears in it.
|
||||
"""
|
||||
"""Decode the text but only if '=?' appears in it."""
|
||||
return decode_TEXT(value) if '=?' in value else value
|
||||
|
||||
|
||||
|
@ -388,7 +384,6 @@ def parse_query_string(query_string, keep_blank_values=True, encoding='utf-8'):
|
|||
|
||||
|
||||
class CaseInsensitiveDict(jaraco.collections.KeyTransformingDict):
|
||||
|
||||
"""A case-insensitive dict subclass.
|
||||
|
||||
Each key is changed on entry to title case.
|
||||
|
@ -417,7 +412,6 @@ else:
|
|||
|
||||
|
||||
class HeaderMap(CaseInsensitiveDict):
|
||||
|
||||
"""A dict subclass for HTTP request and response headers.
|
||||
|
||||
Each key is changed on entry to str(key).title(). This allows headers
|
||||
|
@ -494,7 +488,6 @@ class HeaderMap(CaseInsensitiveDict):
|
|||
|
||||
|
||||
class Host(object):
|
||||
|
||||
"""An internet address.
|
||||
|
||||
name
|
||||
|
|
|
@ -7,22 +7,22 @@ class NeverExpires(object):
|
|||
|
||||
|
||||
class Timer(object):
|
||||
"""
|
||||
A simple timer that will indicate when an expiration time has passed.
|
||||
"""
|
||||
"""A simple timer that will indicate when an expiration time has passed."""
|
||||
def __init__(self, expiration):
|
||||
'Create a timer that expires at `expiration` (UTC datetime)'
|
||||
self.expiration = expiration
|
||||
|
||||
@classmethod
|
||||
def after(cls, elapsed):
|
||||
"""
|
||||
Return a timer that will expire after `elapsed` passes.
|
||||
"""
|
||||
return cls(datetime.datetime.utcnow() + elapsed)
|
||||
"""Return a timer that will expire after `elapsed` passes."""
|
||||
return cls(
|
||||
datetime.datetime.now(datetime.timezone.utc) + elapsed,
|
||||
)
|
||||
|
||||
def expired(self):
|
||||
return datetime.datetime.utcnow() >= self.expiration
|
||||
return datetime.datetime.now(
|
||||
datetime.timezone.utc,
|
||||
) >= self.expiration
|
||||
|
||||
|
||||
class LockTimeout(Exception):
|
||||
|
@ -30,9 +30,7 @@ class LockTimeout(Exception):
|
|||
|
||||
|
||||
class LockChecker(object):
|
||||
"""
|
||||
Keep track of the time and detect if a timeout has expired
|
||||
"""
|
||||
"""Keep track of the time and detect if a timeout has expired."""
|
||||
def __init__(self, session_id, timeout):
|
||||
self.session_id = session_id
|
||||
if timeout:
|
||||
|
|
|
@ -30,7 +30,6 @@ to get a quick sanity-check on overall CP performance. Use the
|
|||
``--profile`` flag when running the test suite. Then, use the ``serve()``
|
||||
function to browse the results in a web browser. If you run this
|
||||
module from the command line, it will call ``serve()`` for you.
|
||||
|
||||
"""
|
||||
|
||||
import io
|
||||
|
|
|
@ -27,18 +27,17 @@ from cherrypy._cpcompat import text_or_bytes
|
|||
|
||||
|
||||
class NamespaceSet(dict):
|
||||
|
||||
"""A dict of config namespace names and handlers.
|
||||
|
||||
Each config entry should begin with a namespace name; the corresponding
|
||||
namespace handler will be called once for each config entry in that
|
||||
namespace, and will be passed two arguments: the config key (with the
|
||||
namespace removed) and the config value.
|
||||
Each config entry should begin with a namespace name; the
|
||||
corresponding namespace handler will be called once for each config
|
||||
entry in that namespace, and will be passed two arguments: the
|
||||
config key (with the namespace removed) and the config value.
|
||||
|
||||
Namespace handlers may be any Python callable; they may also be
|
||||
context managers, in which case their __enter__
|
||||
method should return a callable to be used as the handler.
|
||||
See cherrypy.tools (the Toolbox class) for an example.
|
||||
context managers, in which case their __enter__ method should return
|
||||
a callable to be used as the handler. See cherrypy.tools (the
|
||||
Toolbox class) for an example.
|
||||
"""
|
||||
|
||||
def __call__(self, config):
|
||||
|
@ -48,9 +47,10 @@ class NamespaceSet(dict):
|
|||
A flat dict, where keys use dots to separate
|
||||
namespaces, and values are arbitrary.
|
||||
|
||||
The first name in each config key is used to look up the corresponding
|
||||
namespace handler. For example, a config entry of {'tools.gzip.on': v}
|
||||
will call the 'tools' namespace handler with the args: ('gzip.on', v)
|
||||
The first name in each config key is used to look up the
|
||||
corresponding namespace handler. For example, a config entry of
|
||||
{'tools.gzip.on': v} will call the 'tools' namespace handler
|
||||
with the args: ('gzip.on', v)
|
||||
"""
|
||||
# Separate the given config into namespaces
|
||||
ns_confs = {}
|
||||
|
@ -103,7 +103,6 @@ class NamespaceSet(dict):
|
|||
|
||||
|
||||
class Config(dict):
|
||||
|
||||
"""A dict-like set of configuration data, with defaults and namespaces.
|
||||
|
||||
May take a file, filename, or dict.
|
||||
|
@ -167,7 +166,7 @@ class Parser(configparser.ConfigParser):
|
|||
self._read(fp, filename)
|
||||
|
||||
def as_dict(self, raw=False, vars=None):
|
||||
"""Convert an INI file to a dictionary"""
|
||||
"""Convert an INI file to a dictionary."""
|
||||
# Load INI file into a dict
|
||||
result = {}
|
||||
for section in self.sections():
|
||||
|
|
|
@ -120,7 +120,6 @@ missing = object()
|
|||
|
||||
|
||||
class Session(object):
|
||||
|
||||
"""A CherryPy dict-like Session object (one per request)."""
|
||||
|
||||
_id = None
|
||||
|
@ -148,9 +147,11 @@ class Session(object):
|
|||
to session data."""
|
||||
|
||||
loaded = False
|
||||
"""If True, data has been retrieved from storage.
|
||||
|
||||
This should happen automatically on the first attempt to access
|
||||
session data.
|
||||
"""
|
||||
If True, data has been retrieved from storage. This should happen
|
||||
automatically on the first attempt to access session data."""
|
||||
|
||||
clean_thread = None
|
||||
'Class-level Monitor which calls self.clean_up.'
|
||||
|
@ -165,9 +166,10 @@ class Session(object):
|
|||
'True if the session requested by the client did not exist.'
|
||||
|
||||
regenerated = False
|
||||
"""True if the application called session.regenerate().
|
||||
|
||||
This is not set by internal calls to regenerate the session id.
|
||||
"""
|
||||
True if the application called session.regenerate(). This is not set by
|
||||
internal calls to regenerate the session id."""
|
||||
|
||||
debug = False
|
||||
'If True, log debug information.'
|
||||
|
@ -335,8 +337,9 @@ class Session(object):
|
|||
|
||||
def pop(self, key, default=missing):
|
||||
"""Remove the specified key and return the corresponding value.
|
||||
If key is not found, default is returned if given,
|
||||
otherwise KeyError is raised.
|
||||
|
||||
If key is not found, default is returned if given, otherwise
|
||||
KeyError is raised.
|
||||
"""
|
||||
if not self.loaded:
|
||||
self.load()
|
||||
|
@ -351,13 +354,19 @@ class Session(object):
|
|||
return key in self._data
|
||||
|
||||
def get(self, key, default=None):
|
||||
"""D.get(k[,d]) -> D[k] if k in D, else d. d defaults to None."""
|
||||
"""D.get(k[,d]) -> D[k] if k in D, else d.
|
||||
|
||||
d defaults to None.
|
||||
"""
|
||||
if not self.loaded:
|
||||
self.load()
|
||||
return self._data.get(key, default)
|
||||
|
||||
def update(self, d):
|
||||
"""D.update(E) -> None. Update D from E: for k in E: D[k] = E[k]."""
|
||||
"""D.update(E) -> None.
|
||||
|
||||
Update D from E: for k in E: D[k] = E[k].
|
||||
"""
|
||||
if not self.loaded:
|
||||
self.load()
|
||||
self._data.update(d)
|
||||
|
@ -369,7 +378,10 @@ class Session(object):
|
|||
return self._data.setdefault(key, default)
|
||||
|
||||
def clear(self):
|
||||
"""D.clear() -> None. Remove all items from D."""
|
||||
"""D.clear() -> None.
|
||||
|
||||
Remove all items from D.
|
||||
"""
|
||||
if not self.loaded:
|
||||
self.load()
|
||||
self._data.clear()
|
||||
|
@ -492,7 +504,8 @@ class FileSession(Session):
|
|||
"""Set up the storage system for file-based sessions.
|
||||
|
||||
This should only be called once per process; this will be done
|
||||
automatically when using sessions.init (as the built-in Tool does).
|
||||
automatically when using sessions.init (as the built-in Tool
|
||||
does).
|
||||
"""
|
||||
# The 'storage_path' arg is required for file-based sessions.
|
||||
kwargs['storage_path'] = os.path.abspath(kwargs['storage_path'])
|
||||
|
@ -616,7 +629,8 @@ class MemcachedSession(Session):
|
|||
"""Set up the storage system for memcached-based sessions.
|
||||
|
||||
This should only be called once per process; this will be done
|
||||
automatically when using sessions.init (as the built-in Tool does).
|
||||
automatically when using sessions.init (as the built-in Tool
|
||||
does).
|
||||
"""
|
||||
for k, v in kwargs.items():
|
||||
setattr(cls, k, v)
|
||||
|
|
|
@ -56,15 +56,15 @@ def serve_file(path, content_type=None, disposition=None, name=None,
|
|||
debug=False):
|
||||
"""Set status, headers, and body in order to serve the given path.
|
||||
|
||||
The Content-Type header will be set to the content_type arg, if provided.
|
||||
If not provided, the Content-Type will be guessed by the file extension
|
||||
of the 'path' argument.
|
||||
The Content-Type header will be set to the content_type arg, if
|
||||
provided. If not provided, the Content-Type will be guessed by the
|
||||
file extension of the 'path' argument.
|
||||
|
||||
If disposition is not None, the Content-Disposition header will be set
|
||||
to "<disposition>; filename=<name>; filename*=utf-8''<name>"
|
||||
as described in :rfc:`6266#appendix-D`.
|
||||
If name is None, it will be set to the basename of path.
|
||||
If disposition is None, no Content-Disposition header will be written.
|
||||
If disposition is not None, the Content-Disposition header will be
|
||||
set to "<disposition>; filename=<name>; filename*=utf-8''<name>" as
|
||||
described in :rfc:`6266#appendix-D`. If name is None, it will be set
|
||||
to the basename of path. If disposition is None, no Content-
|
||||
Disposition header will be written.
|
||||
"""
|
||||
response = cherrypy.serving.response
|
||||
|
||||
|
|
|
@ -31,7 +31,6 @@ _module__file__base = os.getcwd()
|
|||
|
||||
|
||||
class SimplePlugin(object):
|
||||
|
||||
"""Plugin base class which auto-subscribes methods for known channels."""
|
||||
|
||||
bus = None
|
||||
|
@ -59,7 +58,6 @@ class SimplePlugin(object):
|
|||
|
||||
|
||||
class SignalHandler(object):
|
||||
|
||||
"""Register bus channels (and listeners) for system signals.
|
||||
|
||||
You can modify what signals your application listens for, and what it does
|
||||
|
@ -171,8 +169,8 @@ class SignalHandler(object):
|
|||
If the optional 'listener' argument is provided, it will be
|
||||
subscribed as a listener for the given signal's channel.
|
||||
|
||||
If the given signal name or number is not available on the current
|
||||
platform, ValueError is raised.
|
||||
If the given signal name or number is not available on the
|
||||
current platform, ValueError is raised.
|
||||
"""
|
||||
if isinstance(signal, text_or_bytes):
|
||||
signum = getattr(_signal, signal, None)
|
||||
|
@ -218,11 +216,10 @@ except ImportError:
|
|||
|
||||
|
||||
class DropPrivileges(SimplePlugin):
|
||||
|
||||
"""Drop privileges. uid/gid arguments not available on Windows.
|
||||
|
||||
Special thanks to `Gavin Baker
|
||||
<http://antonym.org/2005/12/dropping-privileges-in-python.html>`_
|
||||
<http://antonym.org/2005/12/dropping-privileges-in-python.html>`_.
|
||||
"""
|
||||
|
||||
def __init__(self, bus, umask=None, uid=None, gid=None):
|
||||
|
@ -234,7 +231,10 @@ class DropPrivileges(SimplePlugin):
|
|||
|
||||
@property
|
||||
def uid(self):
|
||||
"""The uid under which to run. Availability: Unix."""
|
||||
"""The uid under which to run.
|
||||
|
||||
Availability: Unix.
|
||||
"""
|
||||
return self._uid
|
||||
|
||||
@uid.setter
|
||||
|
@ -250,7 +250,10 @@ class DropPrivileges(SimplePlugin):
|
|||
|
||||
@property
|
||||
def gid(self):
|
||||
"""The gid under which to run. Availability: Unix."""
|
||||
"""The gid under which to run.
|
||||
|
||||
Availability: Unix.
|
||||
"""
|
||||
return self._gid
|
||||
|
||||
@gid.setter
|
||||
|
@ -332,7 +335,6 @@ class DropPrivileges(SimplePlugin):
|
|||
|
||||
|
||||
class Daemonizer(SimplePlugin):
|
||||
|
||||
"""Daemonize the running script.
|
||||
|
||||
Use this with a Web Site Process Bus via::
|
||||
|
@ -423,7 +425,6 @@ class Daemonizer(SimplePlugin):
|
|||
|
||||
|
||||
class PIDFile(SimplePlugin):
|
||||
|
||||
"""Maintain a PID file via a WSPBus."""
|
||||
|
||||
def __init__(self, bus, pidfile):
|
||||
|
@ -453,12 +454,11 @@ class PIDFile(SimplePlugin):
|
|||
|
||||
|
||||
class PerpetualTimer(threading.Timer):
|
||||
|
||||
"""A responsive subclass of threading.Timer whose run() method repeats.
|
||||
|
||||
Use this timer only when you really need a very interruptible timer;
|
||||
this checks its 'finished' condition up to 20 times a second, which can
|
||||
results in pretty high CPU usage
|
||||
this checks its 'finished' condition up to 20 times a second, which
|
||||
can results in pretty high CPU usage
|
||||
"""
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
|
@ -483,14 +483,14 @@ class PerpetualTimer(threading.Timer):
|
|||
|
||||
|
||||
class BackgroundTask(threading.Thread):
|
||||
|
||||
"""A subclass of threading.Thread whose run() method repeats.
|
||||
|
||||
Use this class for most repeating tasks. It uses time.sleep() to wait
|
||||
for each interval, which isn't very responsive; that is, even if you call
|
||||
self.cancel(), you'll have to wait until the sleep() call finishes before
|
||||
the thread stops. To compensate, it defaults to being daemonic, which means
|
||||
it won't delay stopping the whole process.
|
||||
Use this class for most repeating tasks. It uses time.sleep() to
|
||||
wait for each interval, which isn't very responsive; that is, even
|
||||
if you call self.cancel(), you'll have to wait until the sleep()
|
||||
call finishes before the thread stops. To compensate, it defaults to
|
||||
being daemonic, which means it won't delay stopping the whole
|
||||
process.
|
||||
"""
|
||||
|
||||
def __init__(self, interval, function, args=[], kwargs={}, bus=None):
|
||||
|
@ -525,7 +525,6 @@ class BackgroundTask(threading.Thread):
|
|||
|
||||
|
||||
class Monitor(SimplePlugin):
|
||||
|
||||
"""WSPBus listener to periodically run a callback in its own thread."""
|
||||
|
||||
callback = None
|
||||
|
@ -582,7 +581,6 @@ class Monitor(SimplePlugin):
|
|||
|
||||
|
||||
class Autoreloader(Monitor):
|
||||
|
||||
"""Monitor which re-executes the process when files change.
|
||||
|
||||
This :ref:`plugin<plugins>` restarts the process (via :func:`os.execv`)
|
||||
|
@ -699,20 +697,20 @@ class Autoreloader(Monitor):
|
|||
|
||||
|
||||
class ThreadManager(SimplePlugin):
|
||||
|
||||
"""Manager for HTTP request threads.
|
||||
|
||||
If you have control over thread creation and destruction, publish to
|
||||
the 'acquire_thread' and 'release_thread' channels (for each thread).
|
||||
This will register/unregister the current thread and publish to
|
||||
'start_thread' and 'stop_thread' listeners in the bus as needed.
|
||||
the 'acquire_thread' and 'release_thread' channels (for each
|
||||
thread). This will register/unregister the current thread and
|
||||
publish to 'start_thread' and 'stop_thread' listeners in the bus as
|
||||
needed.
|
||||
|
||||
If threads are created and destroyed by code you do not control
|
||||
(e.g., Apache), then, at the beginning of every HTTP request,
|
||||
publish to 'acquire_thread' only. You should not publish to
|
||||
'release_thread' in this case, since you do not know whether
|
||||
the thread will be re-used or not. The bus will call
|
||||
'stop_thread' listeners for you when it stops.
|
||||
'release_thread' in this case, since you do not know whether the
|
||||
thread will be re-used or not. The bus will call 'stop_thread'
|
||||
listeners for you when it stops.
|
||||
"""
|
||||
|
||||
threads = None
|
||||
|
|
|
@ -132,7 +132,6 @@ class Timeouts:
|
|||
|
||||
|
||||
class ServerAdapter(object):
|
||||
|
||||
"""Adapter for an HTTP server.
|
||||
|
||||
If you need to start more than one HTTP server (to serve on multiple
|
||||
|
@ -188,9 +187,7 @@ class ServerAdapter(object):
|
|||
|
||||
@property
|
||||
def description(self):
|
||||
"""
|
||||
A description about where this server is bound.
|
||||
"""
|
||||
"""A description about where this server is bound."""
|
||||
if self.bind_addr is None:
|
||||
on_what = 'unknown interface (dynamic?)'
|
||||
elif isinstance(self.bind_addr, tuple):
|
||||
|
@ -292,7 +289,6 @@ class ServerAdapter(object):
|
|||
|
||||
|
||||
class FlupCGIServer(object):
|
||||
|
||||
"""Adapter for a flup.server.cgi.WSGIServer."""
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
|
@ -316,7 +312,6 @@ class FlupCGIServer(object):
|
|||
|
||||
|
||||
class FlupFCGIServer(object):
|
||||
|
||||
"""Adapter for a flup.server.fcgi.WSGIServer."""
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
|
@ -362,7 +357,6 @@ class FlupFCGIServer(object):
|
|||
|
||||
|
||||
class FlupSCGIServer(object):
|
||||
|
||||
"""Adapter for a flup.server.scgi.WSGIServer."""
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
|
|
|
@ -1,4 +1,7 @@
|
|||
"""Windows service. Requires pywin32."""
|
||||
"""Windows service.
|
||||
|
||||
Requires pywin32.
|
||||
"""
|
||||
|
||||
import os
|
||||
import win32api
|
||||
|
@ -11,7 +14,6 @@ from cherrypy.process import wspbus, plugins
|
|||
|
||||
|
||||
class ConsoleCtrlHandler(plugins.SimplePlugin):
|
||||
|
||||
"""A WSPBus plugin for handling Win32 console events (like Ctrl-C)."""
|
||||
|
||||
def __init__(self, bus):
|
||||
|
@ -69,10 +71,10 @@ class ConsoleCtrlHandler(plugins.SimplePlugin):
|
|||
|
||||
|
||||
class Win32Bus(wspbus.Bus):
|
||||
|
||||
"""A Web Site Process Bus implementation for Win32.
|
||||
|
||||
Instead of time.sleep, this bus blocks using native win32event objects.
|
||||
Instead of time.sleep, this bus blocks using native win32event
|
||||
objects.
|
||||
"""
|
||||
|
||||
def __init__(self):
|
||||
|
@ -120,7 +122,6 @@ class Win32Bus(wspbus.Bus):
|
|||
|
||||
|
||||
class _ControlCodes(dict):
|
||||
|
||||
"""Control codes used to "signal" a service via ControlService.
|
||||
|
||||
User-defined control codes are in the range 128-255. We generally use
|
||||
|
@ -152,7 +153,6 @@ def signal_child(service, command):
|
|||
|
||||
|
||||
class PyWebService(win32serviceutil.ServiceFramework):
|
||||
|
||||
"""Python Web Service."""
|
||||
|
||||
_svc_name_ = 'Python Web Service'
|
||||
|
|
|
@ -57,7 +57,6 @@ the new state.::
|
|||
| \ |
|
||||
| V V
|
||||
STARTED <-- STARTING
|
||||
|
||||
"""
|
||||
|
||||
import atexit
|
||||
|
@ -65,7 +64,7 @@ import atexit
|
|||
try:
|
||||
import ctypes
|
||||
except ImportError:
|
||||
"""Google AppEngine is shipped without ctypes
|
||||
"""Google AppEngine is shipped without ctypes.
|
||||
|
||||
:seealso: http://stackoverflow.com/a/6523777/70170
|
||||
"""
|
||||
|
@ -165,8 +164,8 @@ class Bus(object):
|
|||
All listeners for a given channel are guaranteed to be called even
|
||||
if others at the same channel fail. Each failure is logged, but
|
||||
execution proceeds on to the next listener. The only way to stop all
|
||||
processing from inside a listener is to raise SystemExit and stop the
|
||||
whole server.
|
||||
processing from inside a listener is to raise SystemExit and stop
|
||||
the whole server.
|
||||
"""
|
||||
|
||||
states = states
|
||||
|
@ -312,8 +311,9 @@ class Bus(object):
|
|||
def restart(self):
|
||||
"""Restart the process (may close connections).
|
||||
|
||||
This method does not restart the process from the calling thread;
|
||||
instead, it stops the bus and asks the main thread to call execv.
|
||||
This method does not restart the process from the calling
|
||||
thread; instead, it stops the bus and asks the main thread to
|
||||
call execv.
|
||||
"""
|
||||
self.execv = True
|
||||
self.exit()
|
||||
|
@ -327,10 +327,11 @@ class Bus(object):
|
|||
"""Wait for the EXITING state, KeyboardInterrupt or SystemExit.
|
||||
|
||||
This function is intended to be called only by the main thread.
|
||||
After waiting for the EXITING state, it also waits for all threads
|
||||
to terminate, and then calls os.execv if self.execv is True. This
|
||||
design allows another thread to call bus.restart, yet have the main
|
||||
thread perform the actual execv call (required on some platforms).
|
||||
After waiting for the EXITING state, it also waits for all
|
||||
threads to terminate, and then calls os.execv if self.execv is
|
||||
True. This design allows another thread to call bus.restart, yet
|
||||
have the main thread perform the actual execv call (required on
|
||||
some platforms).
|
||||
"""
|
||||
try:
|
||||
self.wait(states.EXITING, interval=interval, channel='main')
|
||||
|
@ -379,13 +380,14 @@ class Bus(object):
|
|||
def _do_execv(self):
|
||||
"""Re-execute the current process.
|
||||
|
||||
This must be called from the main thread, because certain platforms
|
||||
(OS X) don't allow execv to be called in a child thread very well.
|
||||
This must be called from the main thread, because certain
|
||||
platforms (OS X) don't allow execv to be called in a child
|
||||
thread very well.
|
||||
"""
|
||||
try:
|
||||
args = self._get_true_argv()
|
||||
except NotImplementedError:
|
||||
"""It's probably win32 or GAE"""
|
||||
"""It's probably win32 or GAE."""
|
||||
args = [sys.executable] + self._get_interpreter_argv() + sys.argv
|
||||
|
||||
self.log('Re-spawning %s' % ' '.join(args))
|
||||
|
@ -472,7 +474,7 @@ class Bus(object):
|
|||
c_ind = None
|
||||
|
||||
if is_module:
|
||||
"""It's containing `-m -m` sequence of arguments"""
|
||||
"""It's containing `-m -m` sequence of arguments."""
|
||||
if is_command and c_ind < m_ind:
|
||||
"""There's `-c -c` before `-m`"""
|
||||
raise RuntimeError(
|
||||
|
@ -481,7 +483,7 @@ class Bus(object):
|
|||
# Survive module argument here
|
||||
original_module = sys.argv[0]
|
||||
if not os.access(original_module, os.R_OK):
|
||||
"""There's no such module exist"""
|
||||
"""There's no such module exist."""
|
||||
raise AttributeError(
|
||||
"{} doesn't seem to be a module "
|
||||
'accessible by current user'.format(original_module))
|
||||
|
@ -489,7 +491,7 @@ class Bus(object):
|
|||
# ... and substitute it with the original module path:
|
||||
_argv.insert(m_ind, original_module)
|
||||
elif is_command:
|
||||
"""It's containing just `-c -c` sequence of arguments"""
|
||||
"""It's containing just `-c -c` sequence of arguments."""
|
||||
raise RuntimeError(
|
||||
"Cannot reconstruct command from '-c'. "
|
||||
'Ref: https://github.com/cherrypy/cherrypy/issues/1545')
|
||||
|
@ -512,13 +514,13 @@ class Bus(object):
|
|||
"""Prepend current working dir to PATH environment variable if needed.
|
||||
|
||||
If sys.path[0] is an empty string, the interpreter was likely
|
||||
invoked with -m and the effective path is about to change on
|
||||
re-exec. Add the current directory to $PYTHONPATH to ensure
|
||||
that the new process sees the same path.
|
||||
invoked with -m and the effective path is about to change on re-
|
||||
exec. Add the current directory to $PYTHONPATH to ensure that
|
||||
the new process sees the same path.
|
||||
|
||||
This issue cannot be addressed in the general case because
|
||||
Python cannot reliably reconstruct the
|
||||
original command line (http://bugs.python.org/issue14208).
|
||||
Python cannot reliably reconstruct the original command line (
|
||||
http://bugs.python.org/issue14208).
|
||||
|
||||
(This idea filched from tornado.autoreload)
|
||||
"""
|
||||
|
@ -536,10 +538,10 @@ class Bus(object):
|
|||
"""Set the CLOEXEC flag on all open files (except stdin/out/err).
|
||||
|
||||
If self.max_cloexec_files is an integer (the default), then on
|
||||
platforms which support it, it represents the max open files setting
|
||||
for the operating system. This function will be called just before
|
||||
the process is restarted via os.execv() to prevent open files
|
||||
from persisting into the new process.
|
||||
platforms which support it, it represents the max open files
|
||||
setting for the operating system. This function will be called
|
||||
just before the process is restarted via os.execv() to prevent
|
||||
open files from persisting into the new process.
|
||||
|
||||
Set self.max_cloexec_files to 0 to disable this behavior.
|
||||
"""
|
||||
|
@ -578,7 +580,10 @@ class Bus(object):
|
|||
return t
|
||||
|
||||
def log(self, msg='', level=20, traceback=False):
|
||||
"""Log the given message. Append the last traceback if requested."""
|
||||
"""Log the given message.
|
||||
|
||||
Append the last traceback if requested.
|
||||
"""
|
||||
if traceback:
|
||||
msg += '\n' + ''.join(_traceback.format_exception(*sys.exc_info()))
|
||||
self.publish('log', msg, level)
|
||||
|
|
|
@ -9,7 +9,6 @@ Even before any tweaking, this should serve a few demonstration pages.
|
|||
Change to this directory and run:
|
||||
|
||||
cherryd -c site.conf
|
||||
|
||||
"""
|
||||
|
||||
import cherrypy
|
||||
|
|
Some files were not shown because too many files have changed in this diff Show more
Loading…
Add table
Add a link
Reference in a new issue