mirror of
https://github.com/Tautulli/Tautulli.git
synced 2025-07-06 13:11:15 -07:00
Compare commits
No commits in common. "master" and "v2.14.0-beta" have entirely different histories.
master
...
v2.14.0-be
551 changed files with 23036 additions and 31602 deletions
13
.github/workflows/publish-docker.yml
vendored
13
.github/workflows/publish-docker.yml
vendored
|
@ -33,6 +33,7 @@ jobs:
|
|||
echo "branch=${GITHUB_REF#refs/heads/}" >> $GITHUB_OUTPUT
|
||||
fi
|
||||
echo "commit=${GITHUB_SHA}" >> $GITHUB_OUTPUT
|
||||
echo "build_date=$(date -u +'%Y-%m-%dT%H:%M:%SZ')" >> $GITHUB_OUTPUT
|
||||
echo "docker_platforms=linux/amd64,linux/arm64/v8,linux/arm/v7,linux/arm/v6" >> $GITHUB_OUTPUT
|
||||
echo "docker_image=${{ secrets.DOCKER_REPO }}/tautulli" >> $GITHUB_OUTPUT
|
||||
|
||||
|
@ -58,7 +59,7 @@ jobs:
|
|||
if: success()
|
||||
with:
|
||||
username: ${{ secrets.DOCKER_USERNAME }}
|
||||
password: ${{ secrets.DOCKER_TOKEN }}
|
||||
password: ${{ secrets.DOCKER_PASSWORD }}
|
||||
|
||||
- name: Login to GitHub Container Registry
|
||||
uses: docker/login-action@v3
|
||||
|
@ -68,14 +69,8 @@ jobs:
|
|||
username: ${{ secrets.DOCKER_USERNAME }}
|
||||
password: ${{ secrets.GHCR_TOKEN }}
|
||||
|
||||
- name: Extract Docker Metadata
|
||||
id: metadata
|
||||
uses: docker/metadata-action@v5
|
||||
with:
|
||||
images: ${{ steps.prepare.outputs.docker_image }}
|
||||
|
||||
- name: Docker Build and Push
|
||||
uses: docker/build-push-action@v6
|
||||
uses: docker/build-push-action@v5
|
||||
if: success()
|
||||
with:
|
||||
context: .
|
||||
|
@ -86,10 +81,10 @@ jobs:
|
|||
TAG=${{ steps.prepare.outputs.tag }}
|
||||
BRANCH=${{ steps.prepare.outputs.branch }}
|
||||
COMMIT=${{ steps.prepare.outputs.commit }}
|
||||
BUILD_DATE=${{ steps.prepare.outputs.build_date }}
|
||||
tags: |
|
||||
${{ steps.prepare.outputs.docker_image }}:${{ steps.prepare.outputs.tag }}
|
||||
ghcr.io/${{ steps.prepare.outputs.docker_image }}:${{ steps.prepare.outputs.tag }}
|
||||
labels: ${{ steps.metadata.outputs.labels }}
|
||||
cache-from: type=local,src=/tmp/.buildx-cache
|
||||
cache-to: type=local,dest=/tmp/.buildx-cache
|
||||
|
||||
|
|
22
.github/workflows/publish-installers.yml
vendored
22
.github/workflows/publish-installers.yml
vendored
|
@ -75,7 +75,7 @@ jobs:
|
|||
pyinstaller -y ./package/Tautulli-${{ matrix.os }}.spec
|
||||
|
||||
- name: Create Windows Installer
|
||||
uses: joncloud/makensis-action@v4.1
|
||||
uses: joncloud/makensis-action@v4
|
||||
if: matrix.os == 'windows'
|
||||
with:
|
||||
script-file: ./package/Tautulli.nsi
|
||||
|
@ -100,24 +100,6 @@ jobs:
|
|||
name: Tautulli-${{ matrix.os }}-installer
|
||||
path: Tautulli-${{ matrix.os }}-${{ steps.get_version.outputs.RELEASE_VERSION }}-${{ matrix.arch }}.${{ matrix.ext }}
|
||||
|
||||
virus-total:
|
||||
name: VirusTotal Scan
|
||||
needs: build-installer
|
||||
if: needs.build-installer.result == 'success' && !contains(github.event.head_commit.message, '[skip ci]')
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Download Installers
|
||||
if: needs.build-installer.result == 'success'
|
||||
uses: actions/download-artifact@v4
|
||||
|
||||
- name: Upload to VirusTotal
|
||||
uses: crazy-max/ghaction-virustotal@v4
|
||||
with:
|
||||
vt_api_key: ${{ secrets.VT_API_KEY }}
|
||||
files: |
|
||||
Tautulli-windows-installer/Tautulli-windows-*-x64.exe
|
||||
Tautulli-macos-installer/Tautulli-macos-*-universal.pkg
|
||||
|
||||
release:
|
||||
name: Release Installers
|
||||
needs: build-installer
|
||||
|
@ -161,7 +143,7 @@ jobs:
|
|||
prerelease: ${{ endsWith(steps.get_version.outputs.RELEASE_VERSION, '-beta') }}
|
||||
files: |
|
||||
Tautulli-windows-installer/Tautulli-windows-${{ steps.get_version.outputs.RELEASE_VERSION }}-x64.exe
|
||||
Tautulli-macos-installer/Tautulli-macos-${{ steps.get_version.outputs.RELEASE_VERSION }}-universal.pkg
|
||||
Tautulli-macos-installer/Tautulli-macos-${{ steps.get_version.outputs.RELEASE_VERSION }}-x64.pkg
|
||||
|
||||
discord:
|
||||
name: Discord Notification
|
||||
|
|
4
.github/workflows/publish-snap.yml
vendored
4
.github/workflows/publish-snap.yml
vendored
|
@ -38,7 +38,7 @@ jobs:
|
|||
uses: docker/setup-qemu-action@v3
|
||||
|
||||
- name: Build Snap Package
|
||||
uses: diddlesnaps/snapcraft-multiarch-action@master
|
||||
uses: diddlesnaps/snapcraft-multiarch-action@v1
|
||||
id: build
|
||||
with:
|
||||
architecture: ${{ matrix.architecture }}
|
||||
|
@ -50,7 +50,7 @@ jobs:
|
|||
path: ${{ steps.build.outputs.snap }}
|
||||
|
||||
- name: Review Snap Package
|
||||
uses: diddlesnaps/snapcraft-review-tools-action@master
|
||||
uses: diddlesnaps/snapcraft-review-tools-action@v1
|
||||
with:
|
||||
snap: ${{ steps.build.outputs.snap }}
|
||||
|
||||
|
|
14
.github/workflows/submit-winget.yml
vendored
14
.github/workflows/submit-winget.yml
vendored
|
@ -23,17 +23,3 @@ jobs:
|
|||
# getting latest wingetcreate file
|
||||
iwr https://aka.ms/wingetcreate/latest -OutFile wingetcreate.exe
|
||||
.\wingetcreate.exe update $wingetPackage -s -v $version -u $installerUrl -t $gitToken
|
||||
|
||||
virus-total:
|
||||
name: VirusTotal Scan
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Upload to VirusTotal
|
||||
uses: crazy-max/ghaction-virustotal@v4
|
||||
with:
|
||||
vt_api_key: ${{ secrets.VT_API_KEY }}
|
||||
github_token: ${{ secrets.GHACTIONS_TOKEN }}
|
||||
update_release_body: true
|
||||
files: |
|
||||
.exe$
|
||||
.pkg$
|
||||
|
|
117
CHANGELOG.md
117
CHANGELOG.md
|
@ -1,110 +1,6 @@
|
|||
# Changelog
|
||||
|
||||
## v2.15.2 (2025-04-12)
|
||||
|
||||
* Activity:
|
||||
* New: Added link to library by clicking media type icon.
|
||||
* New: Added stream count to tab title on homepage. (#2517)
|
||||
* History:
|
||||
* Fix: Check stream watched status before stream stopped status. (#2506)
|
||||
* Notifications:
|
||||
* Fix: ntfy notifications failing to send if provider link is blank.
|
||||
* Fix: Check Pushover notification attachment is under 5MB limit. (#2396)
|
||||
* Fix: Track URLs redirecting to the correct media page. (#2513)
|
||||
* New: Added audio profile notification parameters.
|
||||
* New: Added PATCH method for Webhook notifications.
|
||||
* Graphs:
|
||||
* New: Added Total line to daily streams graph. (Thanks @zdimension) (#2497)
|
||||
* UI:
|
||||
* Fix: Do not redirect API requests to the login page. (#2490)
|
||||
* Change: Swap source and stream columns in stream info modal.
|
||||
* Other:
|
||||
* Fix: Various typos. (Thanks @luzpaz) (#2520)
|
||||
* Fix: CherryPy CORS response header not being set correctly. (#2279)
|
||||
|
||||
|
||||
## v2.15.1 (2025-01-11)
|
||||
|
||||
* Activity:
|
||||
* Fix: Detection of HDR transcodes. (Thanks @cdecker08) (#2412, #2466)
|
||||
* Newsletters:
|
||||
* Fix: Disable basic authentication for /newsletter and /image endpoints. (#2472)
|
||||
* Exporter:
|
||||
* New: Added logos to season and episode exports.
|
||||
* Other:
|
||||
* Fix: Docker container https health check.
|
||||
|
||||
|
||||
## v2.15.0 (2024-11-24)
|
||||
|
||||
* Notes:
|
||||
* Support for Python 3.8 has been dropped. The minimum Python version is now 3.9.
|
||||
* Notifications:
|
||||
* New: Allow Telegram blockquote and tg-emoji HTML tags. (Thanks @MythodeaLoL) (#2427)
|
||||
* New: Added Plex slug and Plex Watch URL notification parameters. (#2420)
|
||||
* Change: Update OneSignal API calls to use the new API endpoint for Tautulli Remote App notifications.
|
||||
* Newsletters:
|
||||
* Fix: Dumping custom dates in raw newsletter json.
|
||||
* History:
|
||||
* Fix: Unable to fix match for artists. (#2429)
|
||||
* Exporter:
|
||||
* New: Added movie and episode hasVoiceActivity attribute to exporter fields.
|
||||
* New: Added subtitle canAutoSync attribute to exporter fields.
|
||||
* New: Added logos to the exporter fields.
|
||||
* UI:
|
||||
* New: Add friendly name to the top bar of config modals. (Thanks @peagravel) (#2432)
|
||||
* API:
|
||||
* New: Added plex slugs to metadata in the get_metadata API command.
|
||||
* Other:
|
||||
* Fix: Tautulli failing to start with Python 3.13. (#2426)
|
||||
|
||||
|
||||
## v2.14.6 (2024-10-12)
|
||||
|
||||
* Newsletters:
|
||||
* Fix: Allow formatting newsletter date parameters.
|
||||
* Change: Support apscheduler compatible cron expressions.
|
||||
* UI:
|
||||
* Fix: Round runtime before converting to human duration.
|
||||
* Fix: Make recently added/watched rows touch scrollable.
|
||||
* Other:
|
||||
* Fix: Auto-updater not running.
|
||||
|
||||
|
||||
## v2.14.5 (2024-09-20)
|
||||
|
||||
* Activity:
|
||||
* Fix: Display of 2k resolution on activity card.
|
||||
* Notifications:
|
||||
* Fix: ntfy notifications with special characters failing to send.
|
||||
* Other:
|
||||
* Fix: Memory leak with database closing. (#2404)
|
||||
|
||||
|
||||
## v2.14.4 (2024-08-10)
|
||||
|
||||
* Notifications:
|
||||
* Fix: Update Slack notification info card.
|
||||
* New: Added ntfy notification agent. (Thanks @nwithan8) (#2356, #2000)
|
||||
* UI:
|
||||
* Fix: macOS platform capitalization.
|
||||
* Other:
|
||||
* Fix: Remove deprecated getdefaultlocale. (Thanks @teodorstelian) (#2364, #2345)
|
||||
|
||||
|
||||
## v2.14.3 (2024-06-19)
|
||||
|
||||
* Graphs:
|
||||
* Fix: History table not loading when clicking on the graphs in some instances.
|
||||
* UI:
|
||||
* Fix: Scheduled tasks table not loading when certain tasks are disabled.
|
||||
* Removed: Unnecessary Remote Server checkbox from the settings page.
|
||||
* Other:
|
||||
* Fix: Webserver not restarting after the setup wizard.
|
||||
* Fix: Workaround webserver crashing in some instances.
|
||||
|
||||
|
||||
## v2.14.2 (2024-05-18)
|
||||
## v2.14.0-beta (2024-04-19)
|
||||
|
||||
* History:
|
||||
* Fix: Live TV activity not logging to history.
|
||||
|
@ -113,9 +9,9 @@
|
|||
* Fix: Pushover configuration settings refreshing after entering a token.
|
||||
* Fix: Plex remote access down notifications not triggering.
|
||||
* Fix: Deleting all images from Cloudinary only deleting 1000 images.
|
||||
* New: Added platform version and product version notification parameters. (#2244)
|
||||
* New: Added LAN streams and WAN streams notification parameters. (#2276)
|
||||
* New: Added Dolby Vision notification parameters. (#2240)
|
||||
* New: Added platform version and product version notification parameters.
|
||||
* New: Added LAN streams and WAN streams notification parameters.
|
||||
* New: Added Dolby Vision notification parameters.
|
||||
* New: Added live TV channel notification parameters.
|
||||
* Change: Improved Tautulli Remote App notification encryption method.
|
||||
* Note: Requires Tautulli Remote App version 3.2.4.
|
||||
|
@ -123,23 +19,20 @@
|
|||
* New: Added slug attribute to exporter fields.
|
||||
* New: Added track genres to exporter fields.
|
||||
* New: Added playlist source URI to exporter fields.
|
||||
* New: Added artProvider and thumbProvider to exporter fields.
|
||||
* UI:
|
||||
* Fix: Mask deleted usernames in the logs.
|
||||
* Fix: Live TV watch stats not showing on the media info page.
|
||||
* Fix: Users without access to Plex server not showing as inactive.
|
||||
* Removed: Deprecated synced item pages.
|
||||
* Removed: Anonymous redirect settings. Links now use browser no-referrer policy instead.
|
||||
* API:
|
||||
* New: Added Dolby Vision info to the get_metadata API command.
|
||||
* New: Added before and after parameters to the get_home_stats API command. (#2231)
|
||||
* New: Added before and after parameters to the get_home_stats API command.
|
||||
* Packages:
|
||||
* New: Universal binary for macOS for Apple silicon.
|
||||
* New: Bump Snap package to core22.
|
||||
* Other:
|
||||
* Change: Login cookie expires changed to max-age.
|
||||
* Change: Improved key generation for login password. It is recommended to reenter your HTTP Password in the settings after upgrading.
|
||||
* Removed: Python 2 compatibility code. (#2098, #2226) (Thanks @zdimension)
|
||||
|
||||
|
||||
## v2.13.4 (2023-12-07)
|
||||
|
|
|
@ -25,4 +25,4 @@ CMD [ "python", "Tautulli.py", "--datadir", "/config" ]
|
|||
ENTRYPOINT [ "./start.sh" ]
|
||||
|
||||
EXPOSE 8181
|
||||
HEALTHCHECK --start-period=90s CMD curl -ILfks https://localhost:8181/status > /dev/null || curl -ILfs http://localhost:8181/status > /dev/null || exit 1
|
||||
HEALTHCHECK --start-period=90s CMD curl -ILfSs http://localhost:8181/status > /dev/null || curl -ILfkSs https://localhost:8181/status > /dev/null || exit 1
|
||||
|
|
|
@ -36,7 +36,7 @@ and [PlexWatchWeb](https://github.com/ecleese/plexWatchWeb).
|
|||
[![Docker Stars][badge-docker-stars]][DockerHub]
|
||||
[![Downloads][badge-downloads]][Releases Latest]
|
||||
|
||||
[badge-python]: https://img.shields.io/badge/python->=3.9-blue?style=flat-square
|
||||
[badge-python]: https://img.shields.io/badge/python->=3.8-blue?style=flat-square
|
||||
[badge-docker-pulls]: https://img.shields.io/docker/pulls/tautulli/tautulli?style=flat-square
|
||||
[badge-docker-stars]: https://img.shields.io/docker/stars/tautulli/tautulli?style=flat-square
|
||||
[badge-downloads]: https://img.shields.io/github/downloads/Tautulli/Tautulli/total?style=flat-square
|
||||
|
@ -129,7 +129,7 @@ This is free software under the GPL v3 open source license. Feel free to do with
|
|||
but any modification must be open sourced. A copy of the license is included.
|
||||
|
||||
This software includes Highsoft software libraries which you may freely distribute for
|
||||
non-commercial use. Commercial users must licence this software, for more information visit
|
||||
non-commercial use. Commerical users must licence this software, for more information visit
|
||||
https://shop.highsoft.com/faq/non-commercial#non-commercial-redistribution.
|
||||
|
||||
|
||||
|
|
26
Tautulli.py
26
Tautulli.py
|
@ -23,6 +23,7 @@ import sys
|
|||
# Ensure lib added to path, before any other imports
|
||||
sys.path.insert(0, os.path.join(os.path.dirname(os.path.abspath(__file__)), 'lib'))
|
||||
|
||||
from future.builtins import str
|
||||
|
||||
import argparse
|
||||
import datetime
|
||||
|
@ -34,7 +35,6 @@ import shutil
|
|||
import time
|
||||
import threading
|
||||
import tzlocal
|
||||
import ctypes
|
||||
|
||||
import plexpy
|
||||
from plexpy import common, config, database, helpers, logger, webstart
|
||||
|
@ -70,26 +70,8 @@ def main():
|
|||
plexpy.SYS_ENCODING = None
|
||||
|
||||
try:
|
||||
|
||||
# Attempt to get the system's locale settings
|
||||
language_code, encoding = locale.getlocale()
|
||||
|
||||
# Special handling for Windows platform
|
||||
if sys.platform == 'win32':
|
||||
# Get the user's current language settings on Windows
|
||||
windll = ctypes.windll.kernel32
|
||||
lang_id = windll.GetUserDefaultLCID()
|
||||
|
||||
# Map Windows language ID to locale identifier
|
||||
language_code = locale.windows_locale.get(lang_id, '')
|
||||
|
||||
# Get the preferred encoding
|
||||
encoding = locale.getpreferredencoding()
|
||||
|
||||
# Assign values to application-specific variable
|
||||
plexpy.SYS_LANGUAGE = language_code
|
||||
plexpy.SYS_ENCODING = encoding
|
||||
|
||||
locale.setlocale(locale.LC_ALL, "")
|
||||
plexpy.SYS_LANGUAGE, plexpy.SYS_ENCODING = locale.getdefaultlocale()
|
||||
except (locale.Error, IOError):
|
||||
pass
|
||||
|
||||
|
@ -129,7 +111,7 @@ def main():
|
|||
if args.quiet:
|
||||
plexpy.QUIET = True
|
||||
|
||||
# Do an initial setup of the logger.
|
||||
# Do an intial setup of the logger.
|
||||
# Require verbose for pre-initilization to see critical errors
|
||||
logger.initLogger(console=not plexpy.QUIET, log_dir=False, verbose=True)
|
||||
|
||||
|
|
|
@ -1478,8 +1478,7 @@ a:hover .dashboard-stats-square {
|
|||
text-align: center;
|
||||
position: relative;
|
||||
z-index: 0;
|
||||
overflow: auto;
|
||||
scrollbar-width: none;
|
||||
overflow: hidden;
|
||||
}
|
||||
.dashboard-recent-media {
|
||||
width: 100%;
|
||||
|
@ -4325,10 +4324,6 @@ a:hover .overlay-refresh-image:hover {
|
|||
.stream-info tr:nth-child(even) td {
|
||||
background-color: rgba(255,255,255,0.010);
|
||||
}
|
||||
.stream-info td:nth-child(3),
|
||||
.stream-info th:nth-child(3) {
|
||||
width: 25px;
|
||||
}
|
||||
.number-input {
|
||||
margin: 0 !important;
|
||||
width: 55px !important;
|
||||
|
|
|
@ -74,7 +74,6 @@ DOCUMENTATION :: END
|
|||
parent_href = page('info', data['parent_rating_key'])
|
||||
grandparent_href = page('info', data['grandparent_rating_key'])
|
||||
user_href = page('user', data['user_id']) if data['user_id'] else '#'
|
||||
library_href = page('library', data['section_id']) if data['section_id'] else '#'
|
||||
season = short_season(data['parent_title'])
|
||||
%>
|
||||
<div class="dashboard-activity-instance" id="activity-instance-${sk}" data-key="${sk}" data-id="${data['session_id']}"
|
||||
|
@ -464,27 +463,21 @@ DOCUMENTATION :: END
|
|||
<div class="dashboard-activity-metadata-subtitle-container">
|
||||
% if data['live']:
|
||||
<div id="media-type-${sk}" class="dashboard-activity-metadata-media_type-icon" title="Live TV">
|
||||
<a href="${library_href}">
|
||||
<i class="fa fa-fw fa-broadcast-tower"></i>
|
||||
</a>
|
||||
<i class="fa fa-fw fa-broadcast-tower"></i>
|
||||
</div>
|
||||
% elif data['channel_stream'] == 0:
|
||||
<div id="media-type-${sk}" class="dashboard-activity-metadata-media_type-icon" title="${data['media_type'].capitalize()}">
|
||||
<a href="${library_href}">
|
||||
% if data['media_type'] == 'movie':
|
||||
<i class="fa fa-fw fa-film"></i>
|
||||
<i class="fa fa-fw fa-film"></i>
|
||||
% elif data['media_type'] == 'episode':
|
||||
<i class="fa fa-fw fa-television"></i>
|
||||
<i class="fa fa-fw fa-television"></i>
|
||||
% elif data['media_type'] == 'track':
|
||||
<i class="fa fa-fw fa-music"></i>
|
||||
<i class="fa fa-fw fa-music"></i>
|
||||
% elif data['media_type'] == 'photo':
|
||||
<i class="fa fa-fw fa-picture-o"></i>
|
||||
<i class="fa fa-fw fa-picture-o"></i>
|
||||
% elif data['media_type'] == 'clip':
|
||||
<i class="fa fa-fw fa-video-camera"></i>
|
||||
% else:
|
||||
<i class="fa fa-fw fa-question-circle"></i>
|
||||
<i class="fa fa-fw fa-video-camera"></i>
|
||||
% endif
|
||||
</a>
|
||||
</div>
|
||||
% else:
|
||||
<div id="media-type-${sk}" class="dashboard-activity-metadata-media_type-icon" title="Channel">
|
||||
|
|
|
@ -20,7 +20,6 @@ DOCUMENTATION :: END
|
|||
export = exporter.Export()
|
||||
thumb_media_types = ', '.join([export.PLURAL_MEDIA_TYPES[k] for k, v in export.MEDIA_TYPES.items() if v[0]])
|
||||
art_media_types = ', '.join([export.PLURAL_MEDIA_TYPES[k] for k, v in export.MEDIA_TYPES.items() if v[1]])
|
||||
logo_media_types = ', '.join([export.PLURAL_MEDIA_TYPES[k] for k, v in export.MEDIA_TYPES.items() if v[2]])
|
||||
%>
|
||||
<div class="modal-dialog" role="document">
|
||||
<div class="modal-content">
|
||||
|
@ -145,22 +144,6 @@ DOCUMENTATION :: END
|
|||
Select the level to export background artwork image files.<br>Note: Only applies to ${art_media_types}.
|
||||
</p>
|
||||
</div>
|
||||
<div class="form-group">
|
||||
<label for="export_logo_level">Logo Image Export Level</label>
|
||||
<div class="row">
|
||||
<div class="col-md-12">
|
||||
<select class="form-control" id="export_logo_level" name="export_logo_level">
|
||||
<option value="0" selected>Level 0 - None / Custom</option>
|
||||
<option value="1">Level 1 - Uploaded and Selected Logos Only</option>
|
||||
<option value="2">Level 2 - Selected and Locked Logos Only</option>
|
||||
<option value="9">Level 9 - All Selected Logos</option>
|
||||
</select>
|
||||
</div>
|
||||
</div>
|
||||
<p class="help-block">
|
||||
Select the level to export logo image files.<br>Note: Only applies to ${logo_media_types}.
|
||||
</p>
|
||||
</div>
|
||||
<p class="help-block">
|
||||
Warning: Exporting images may take a long time! Images will be saved to a folder alongside the data file.
|
||||
</p>
|
||||
|
@ -248,7 +231,6 @@ DOCUMENTATION :: END
|
|||
$('#export_media_info_level').prop('disabled', true);
|
||||
$("#export_thumb_level").prop('disabled', true);
|
||||
$("#export_art_level").prop('disabled', true);
|
||||
$("#export_logo_level").prop('disabled', true);
|
||||
export_custom_metadata_fields.disable();
|
||||
export_custom_media_info_fields.disable();
|
||||
} else {
|
||||
|
@ -256,7 +238,6 @@ DOCUMENTATION :: END
|
|||
$('#export_media_info_level').prop('disabled', false);
|
||||
$("#export_thumb_level").prop('disabled', false);
|
||||
$("#export_art_level").prop('disabled', false);
|
||||
$("#export_logo_level").prop('disabled', false);
|
||||
export_custom_metadata_fields.enable();
|
||||
export_custom_media_info_fields.enable();
|
||||
}
|
||||
|
@ -271,7 +252,6 @@ DOCUMENTATION :: END
|
|||
var file_format = $('#export_file_format option:selected').val();
|
||||
var thumb_level = $("#export_thumb_level option:selected").val();
|
||||
var art_level = $("#export_art_level option:selected").val();
|
||||
var logo_level = $("#export_logo_level option:selected").val();
|
||||
var custom_fields = [
|
||||
$('#export_custom_metadata_fields').val(),
|
||||
$('#export_custom_media_info_fields').val()
|
||||
|
@ -290,7 +270,6 @@ DOCUMENTATION :: END
|
|||
file_format: file_format,
|
||||
thumb_level: thumb_level,
|
||||
art_level: art_level,
|
||||
logo_level: logo_level,
|
||||
custom_fields: custom_fields,
|
||||
export_type: export_type,
|
||||
individual_files: individual_files
|
||||
|
|
|
@ -301,10 +301,6 @@
|
|||
return obj;
|
||||
}, {});
|
||||
|
||||
if (!("Total" in chart_visibility)) {
|
||||
chart_visibility["Total"] = false;
|
||||
}
|
||||
|
||||
return data_series.map(function(s) {
|
||||
var obj = Object.assign({}, s);
|
||||
obj.visible = (chart_visibility[s.name] !== false);
|
||||
|
@ -331,8 +327,7 @@
|
|||
'Direct Play': '#E5A00D',
|
||||
'Direct Stream': '#FFFFFF',
|
||||
'Transcode': '#F06464',
|
||||
'Max. Concurrent Streams': '#96C83C',
|
||||
'Total': '#96C83C'
|
||||
'Max. Concurrent Streams': '#96C83C'
|
||||
};
|
||||
var series_colors = [];
|
||||
$.each(data_series, function(index, series) {
|
||||
|
|
|
@ -92,10 +92,10 @@
|
|||
<h3 class="pull-left"><span id="recently-added-xml">Recently Added</span></h3>
|
||||
<ul class="nav nav-header nav-dashboard pull-right" style="margin-top: -3px;">
|
||||
<li>
|
||||
<a href="#" id="recently-added-page-left" class="paginate-added btn-gray disabled" data-id="-1"><i class="fa fa-lg fa-chevron-left"></i></a>
|
||||
<a href="#" id="recently-added-page-left" class="paginate btn-gray disabled" data-id="+1"><i class="fa fa-lg fa-chevron-left"></i></a>
|
||||
</li>
|
||||
<li>
|
||||
<a href="#" id="recently-added-page-right" class="paginate-added btn-gray disabled" data-id="+1"><i class="fa fa-lg fa-chevron-right"></i></a>
|
||||
<a href="#" id="recently-added-page-right" class="paginate btn-gray disabled" data-id="-1"><i class="fa fa-lg fa-chevron-right"></i></a>
|
||||
</li>
|
||||
</ul>
|
||||
<div class="button-bar">
|
||||
|
@ -212,6 +212,28 @@
|
|||
</div>
|
||||
</div>
|
||||
</div>
|
||||
<% from plexpy.helpers import anon_url %>
|
||||
<div id="python2-modal" class="modal fade wide" tabindex="-1" role="dialog" aria-labelledby="python2-modal">
|
||||
<div class="modal-dialog" role="document">
|
||||
<div class="modal-content">
|
||||
<div class="modal-header">
|
||||
<button type="button" class="close" data-dismiss="modal" aria-hidden="true"><i class="fa fa-remove"></i></button>
|
||||
<h4 class="modal-title">Unable to Update</h4>
|
||||
</div>
|
||||
<div class="modal-body" style="text-align: center;">
|
||||
<p>Tautulli is still running using Python 2 and cannot be updated past v2.6.3.</p>
|
||||
<p>Python 3 is required to continue receiving updates.</p>
|
||||
<p>
|
||||
<strong>Please see the <a href="${anon_url('https://github.com/Tautulli/Tautulli/wiki/Upgrading-to-Python-3-%28Tautulli-v2.5%29')}" target="_blank" rel="noreferrer">wiki</a>
|
||||
for instructions on how to upgrade to Python 3.</strong>
|
||||
</p>
|
||||
</div>
|
||||
<div class="modal-footer">
|
||||
<input type="button" class="btn btn-bright" data-dismiss="modal" value="Close">
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
% endif
|
||||
|
||||
<div class="modal fade" id="ip-info-modal" tabindex="-1" role="dialog" aria-labelledby="ip-info-modal">
|
||||
|
@ -298,8 +320,6 @@
|
|||
|
||||
$('#currentActivityHeader-bandwidth-tooltip').tooltip({ container: 'body', placement: 'right', delay: 50 });
|
||||
|
||||
var title = document.title;
|
||||
|
||||
function getCurrentActivity() {
|
||||
activity_ready = false;
|
||||
|
||||
|
@ -370,8 +390,6 @@
|
|||
|
||||
$('#currentActivityHeader').show();
|
||||
|
||||
document.title = stream_count + ' stream' + (stream_count > 1 ? 's' : '') + ' | ' + title;
|
||||
|
||||
sessions.forEach(function (session) {
|
||||
var s = (typeof Proxy === "function") ? new Proxy(session, defaultHandler) : session;
|
||||
var key = s.session_key;
|
||||
|
@ -604,8 +622,6 @@
|
|||
} else {
|
||||
$('#currentActivityHeader').hide();
|
||||
$('#currentActivity').html('<div id="dashboard-no-activity" class="text-muted">Nothing is currently being played.</div>');
|
||||
|
||||
document.title = title;
|
||||
}
|
||||
|
||||
activity_ready = true;
|
||||
|
@ -942,14 +958,10 @@
|
|||
count: recently_added_count,
|
||||
media_type: recently_added_type
|
||||
},
|
||||
beforeSend: function () {
|
||||
$(".dashboard-recent-media-row").animate({ scrollLeft: 0 }, 1000);
|
||||
},
|
||||
complete: function (xhr, status) {
|
||||
$("#recentlyAdded").html(xhr.responseText);
|
||||
$('#ajaxMsg').fadeOut();
|
||||
highlightScrollerButton("#recently-added");
|
||||
paginateScroller("#recently-added", ".paginate-added");
|
||||
highlightAddedScrollerButton();
|
||||
}
|
||||
});
|
||||
}
|
||||
|
@ -965,11 +977,57 @@
|
|||
recentlyAdded(recently_added_count, recently_added_type);
|
||||
}
|
||||
|
||||
function highlightAddedScrollerButton() {
|
||||
var scroller = $("#recently-added-row-scroller");
|
||||
var numElems = scroller.find("li:visible").length;
|
||||
scroller.width(numElems * 175);
|
||||
if (scroller.width() > $("body").find(".container-fluid").width()) {
|
||||
$("#recently-added-page-right").removeClass("disabled");
|
||||
} else {
|
||||
$("#recently-added-page-right").addClass("disabled");
|
||||
}
|
||||
}
|
||||
|
||||
$(window).resize(function () {
|
||||
highlightAddedScrollerButton();
|
||||
});
|
||||
|
||||
function resetScroller() {
|
||||
leftTotal = 0;
|
||||
$("#recently-added-row-scroller").animate({ left: leftTotal }, 1000);
|
||||
$("#recently-added-page-left").addClass("disabled").blur();
|
||||
}
|
||||
|
||||
var leftTotal = 0;
|
||||
$(".paginate").click(function (e) {
|
||||
e.preventDefault();
|
||||
var scroller = $("#recently-added-row-scroller");
|
||||
var containerWidth = $("body").find(".container-fluid").width();
|
||||
var scrollAmount = $(this).data("id") * parseInt((containerWidth - 15) / 175) * 175;
|
||||
var leftMax = Math.min(-parseInt(scroller.width()) + Math.abs(scrollAmount), 0);
|
||||
|
||||
leftTotal = Math.max(Math.min(leftTotal + scrollAmount, 0), leftMax);
|
||||
scroller.animate({ left: leftTotal }, 250);
|
||||
|
||||
if (leftTotal === 0) {
|
||||
$("#recently-added-page-left").addClass("disabled").blur();
|
||||
} else {
|
||||
$("#recently-added-page-left").removeClass("disabled");
|
||||
}
|
||||
|
||||
if (leftTotal === leftMax) {
|
||||
$("#recently-added-page-right").addClass("disabled").blur();
|
||||
} else {
|
||||
$("#recently-added-page-right").removeClass("disabled");
|
||||
}
|
||||
});
|
||||
|
||||
$('#recently-added-toggles').on('change', function () {
|
||||
$('#recently-added-toggles > label').removeClass('active');
|
||||
selected_filter = $('input[name=recently-added-toggle]:checked', '#recently-added-toggles');
|
||||
$(selected_filter).closest('label').addClass('active');
|
||||
recently_added_type = $(selected_filter).val();
|
||||
resetScroller();
|
||||
setLocalStorage('home_stats_recently_added_type', recently_added_type);
|
||||
recentlyAdded(recently_added_count, recently_added_type);
|
||||
});
|
||||
|
@ -977,6 +1035,7 @@
|
|||
$('#recently-added-count').change(function () {
|
||||
forceMinMax($(this));
|
||||
recently_added_count = $(this).val();
|
||||
resetScroller();
|
||||
setLocalStorage('home_stats_recently_added_count', recently_added_count);
|
||||
recentlyAdded(recently_added_count, recently_added_type);
|
||||
});
|
||||
|
@ -1008,4 +1067,16 @@
|
|||
});
|
||||
</script>
|
||||
% endif
|
||||
% if _session['user_group'] == 'admin':
|
||||
<script>
|
||||
const queryString = window.location.search;
|
||||
const urlParams = new URLSearchParams(queryString);
|
||||
if (urlParams.get('update') === 'python2') {
|
||||
$("#python2-modal").modal({
|
||||
backdrop: 'static',
|
||||
keyboard: false
|
||||
});
|
||||
}
|
||||
</script>
|
||||
% endif
|
||||
</%def>
|
||||
|
|
|
@ -360,8 +360,7 @@ function humanDuration(ms, sig='dhm', units='ms', return_seconds=300000) {
|
|||
sig = 'dhms'
|
||||
}
|
||||
|
||||
r = factors[sig.slice(-1)];
|
||||
ms = Math.round(ms * factors[units] / r) * r;
|
||||
ms = ms * factors[units];
|
||||
|
||||
h = ms % factors['d'];
|
||||
d = Math.trunc(ms / factors['d']);
|
||||
|
@ -930,50 +929,3 @@ $('.modal').on('hide.bs.modal', function (e) {
|
|||
$.fn.hasScrollBar = function() {
|
||||
return this.get(0).scrollHeight > this.get(0).clientHeight;
|
||||
}
|
||||
|
||||
function paginateScroller(scrollerId, buttonClass) {
|
||||
$(buttonClass).click(function (e) {
|
||||
e.preventDefault();
|
||||
var scroller = $(scrollerId + "-row-scroller");
|
||||
var scrollerParent = scroller.parent();
|
||||
var containerWidth = scrollerParent.width();
|
||||
var scrollCurrent = scrollerParent.scrollLeft();
|
||||
var scrollAmount = $(this).data("id") * parseInt(containerWidth / 175) * 175;
|
||||
var scrollMax = scroller.width() - Math.abs(scrollAmount);
|
||||
var scrollTotal = Math.min(parseInt(scrollCurrent / 175) * 175 + scrollAmount, scrollMax);
|
||||
scrollerParent.animate({ scrollLeft: scrollTotal }, 250);
|
||||
});
|
||||
}
|
||||
|
||||
function highlightScrollerButton(scrollerId) {
|
||||
var scroller = $(scrollerId + "-row-scroller");
|
||||
var scrollerParent = scroller.parent();
|
||||
var buttonLeft = $(scrollerId + "-page-left");
|
||||
var buttonRight = $(scrollerId + "-page-right");
|
||||
|
||||
var numElems = scroller.find("li").length;
|
||||
scroller.width(numElems * 175);
|
||||
$(buttonLeft).addClass("disabled").blur();
|
||||
if (scroller.width() > scrollerParent.width()) {
|
||||
$(buttonRight).removeClass("disabled");
|
||||
} else {
|
||||
$(buttonRight).addClass("disabled");
|
||||
}
|
||||
|
||||
scrollerParent.scroll(function () {
|
||||
var scrollCurrent = $(this).scrollLeft();
|
||||
var scrollMax = scroller.width() - $(this).width();
|
||||
|
||||
if (scrollCurrent == 0) {
|
||||
$(buttonLeft).addClass("disabled").blur();
|
||||
} else {
|
||||
$(buttonLeft).removeClass("disabled");
|
||||
}
|
||||
|
||||
if (scrollCurrent >= scrollMax) {
|
||||
$(buttonRight).addClass("disabled").blur();
|
||||
} else {
|
||||
$(buttonRight).removeClass("disabled");
|
||||
}
|
||||
});
|
||||
}
|
||||
|
|
|
@ -100,7 +100,7 @@ export_table_options = {
|
|||
"createdCell": function (td, cellData, rowData, row, col) {
|
||||
if (cellData !== '') {
|
||||
var images = '';
|
||||
if (rowData['thumb_level'] || rowData['art_level'] || rowData['logo_level']) {
|
||||
if (rowData['thumb_level'] || rowData['art_level']) {
|
||||
images = ' + images';
|
||||
}
|
||||
$(td).html(cellData + images);
|
||||
|
@ -161,14 +161,14 @@ export_table_options = {
|
|||
if (cellData === 1 && rowData['exists']) {
|
||||
var tooltip_title = '';
|
||||
var icon = '';
|
||||
if (rowData['thumb_level'] || rowData['art_level'] || rowData['logo_level'] || rowData['individual_files']) {
|
||||
if (rowData['thumb_level'] || rowData['art_level'] || rowData['individual_files']) {
|
||||
tooltip_title = 'Zip Archive';
|
||||
icon = 'fa-file-archive';
|
||||
} else {
|
||||
tooltip_title = rowData['file_format'].toUpperCase() + ' File';
|
||||
icon = 'fa-file-download';
|
||||
}
|
||||
var icon = (rowData['thumb_level'] || rowData['art_level'] || rowData['logo_level'] || rowData['individual_files']) ? 'fa-file-archive' : 'fa-file-download';
|
||||
var icon = (rowData['thumb_level'] || rowData['art_level'] || rowData['individual_files']) ? 'fa-file-archive' : 'fa-file-download';
|
||||
$(td).html('<button class="btn btn-xs btn-success pull-left" data-id="' + rowData['export_id'] + '"><span data-toggle="tooltip" data-placement="left" title="' + tooltip_title + '"><i class="fa ' + icon + ' fa-fw"></i> Download</span></button>');
|
||||
} else if (cellData === 0) {
|
||||
var percent = Math.min(getPercent(rowData['exported_items'], rowData['total_items']), 99)
|
||||
|
|
|
@ -149,10 +149,10 @@ DOCUMENTATION :: END
|
|||
<div class="table-card-header">
|
||||
<ul class="nav nav-header nav-dashboard pull-right">
|
||||
<li>
|
||||
<a href="#" id="recently-watched-page-left" class="paginate-watched btn-gray disabled" data-id="-1"><i class="fa fa-lg fa-chevron-left"></i></a>
|
||||
<a href="#" id="recently-watched-page-left" class="paginate-watched btn-gray disabled" data-id="+1"><i class="fa fa-lg fa-chevron-left"></i></a>
|
||||
</li>
|
||||
<li>
|
||||
<a href="#" id="recently-watched-page-right" class="paginate-watched btn-gray disabled" data-id="+1"><i class="fa fa-lg fa-chevron-right"></i></a>
|
||||
<a href="#" id="recently-watched-page-right" class="paginate-watched btn-gray disabled" data-id="-1"><i class="fa fa-lg fa-chevron-right"></i></a>
|
||||
</li>
|
||||
</ul>
|
||||
<div class="header-bar">
|
||||
|
@ -175,10 +175,10 @@ DOCUMENTATION :: END
|
|||
<div class="table-card-header">
|
||||
<ul class="nav nav-header nav-dashboard pull-right">
|
||||
<li>
|
||||
<a href="#" id="recently-added-page-left" class="paginate-added btn-gray disabled" data-id="-1"><i class="fa fa-lg fa-chevron-left"></i></a>
|
||||
<a href="#" id="recently-added-page-left" class="paginate-added btn-gray disabled" data-id="+1"><i class="fa fa-lg fa-chevron-left"></i></a>
|
||||
</li>
|
||||
<li>
|
||||
<a href="#" id="recently-added-page-right" class="paginate-added btn-gray disabled" data-id="+1"><i class="fa fa-lg fa-chevron-right"></i></a>
|
||||
<a href="#" id="recently-added-page-right" class="paginate-added btn-gray disabled" data-id="-1"><i class="fa fa-lg fa-chevron-right"></i></a>
|
||||
</li>
|
||||
</ul>
|
||||
<div class="header-bar">
|
||||
|
@ -690,8 +690,7 @@ DOCUMENTATION :: END
|
|||
},
|
||||
complete: function(xhr, status) {
|
||||
$("#library-recently-watched").html(xhr.responseText);
|
||||
highlightScrollerButton("#recently-watched");
|
||||
paginateScroller("#recently-watched", ".paginate-watched");
|
||||
highlightWatchedScrollerButton();
|
||||
}
|
||||
});
|
||||
}
|
||||
|
@ -707,8 +706,7 @@ DOCUMENTATION :: END
|
|||
},
|
||||
complete: function(xhr, status) {
|
||||
$("#library-recently-added").html(xhr.responseText);
|
||||
highlightScrollerButton("#recently-added");
|
||||
paginateScroller("#recently-added", ".paginate-added");
|
||||
highlightAddedScrollerButton();
|
||||
}
|
||||
});
|
||||
}
|
||||
|
@ -718,8 +716,83 @@ DOCUMENTATION :: END
|
|||
recentlyAdded();
|
||||
% endif
|
||||
|
||||
function highlightWatchedScrollerButton() {
|
||||
var scroller = $("#recently-watched-row-scroller");
|
||||
var numElems = scroller.find("li").length;
|
||||
scroller.width(numElems * 175);
|
||||
if (scroller.width() > $("#library-recently-watched").width()) {
|
||||
$("#recently-watched-page-right").removeClass("disabled");
|
||||
} else {
|
||||
$("#recently-watched-page-right").addClass("disabled");
|
||||
}
|
||||
}
|
||||
|
||||
function highlightAddedScrollerButton() {
|
||||
var scroller = $("#recently-added-row-scroller");
|
||||
var numElems = scroller.find("li").length;
|
||||
scroller.width(numElems * 175);
|
||||
if (scroller.width() > $("#library-recently-added").width()) {
|
||||
$("#recently-added-page-right").removeClass("disabled");
|
||||
} else {
|
||||
$("#recently-added-page-right").addClass("disabled");
|
||||
}
|
||||
}
|
||||
|
||||
$(window).resize(function() {
|
||||
highlightWatchedScrollerButton();
|
||||
highlightAddedScrollerButton();
|
||||
});
|
||||
|
||||
$('div.art-face').animate({ opacity: 0.2 }, { duration: 1000 });
|
||||
|
||||
var leftTotalWatched = 0;
|
||||
$(".paginate-watched").click(function (e) {
|
||||
e.preventDefault();
|
||||
var scroller = $("#recently-watched-row-scroller");
|
||||
var containerWidth = $("#library-recently-watched").width();
|
||||
var scrollAmount = $(this).data("id") * parseInt(containerWidth / 175) * 175;
|
||||
var leftMax = Math.min(-parseInt(scroller.width()) + Math.abs(scrollAmount), 0);
|
||||
|
||||
leftTotalWatched = Math.max(Math.min(leftTotalWatched + scrollAmount, 0), leftMax);
|
||||
scroller.animate({ left: leftTotalWatched }, 250);
|
||||
|
||||
if (leftTotalWatched == 0) {
|
||||
$("#recently-watched-page-left").addClass("disabled").blur();
|
||||
} else {
|
||||
$("#recently-watched-page-left").removeClass("disabled");
|
||||
}
|
||||
|
||||
if (leftTotalWatched == leftMax) {
|
||||
$("#recently-watched-page-right").addClass("disabled").blur();
|
||||
} else {
|
||||
$("#recently-watched-page-right").removeClass("disabled");
|
||||
}
|
||||
});
|
||||
|
||||
var leftTotalAdded = 0;
|
||||
$(".paginate-added").click(function (e) {
|
||||
e.preventDefault();
|
||||
var scroller = $("#recently-added-row-scroller");
|
||||
var containerWidth = $("#library-recently-added").width();
|
||||
var scrollAmount = $(this).data("id") * parseInt(containerWidth / 175) * 175;
|
||||
var leftMax = Math.min(-parseInt(scroller.width()) + Math.abs(scrollAmount), 0);
|
||||
|
||||
leftTotalAdded = Math.max(Math.min(leftTotalAdded + scrollAmount, 0), leftMax);
|
||||
scroller.animate({ left: leftTotalAdded }, 250);
|
||||
|
||||
if (leftTotalAdded == 0) {
|
||||
$("#recently-added-page-left").addClass("disabled").blur();
|
||||
} else {
|
||||
$("#recently-added-page-left").removeClass("disabled");
|
||||
}
|
||||
|
||||
if (leftTotalAdded == leftMax) {
|
||||
$("#recently-added-page-right").addClass("disabled").blur();
|
||||
} else {
|
||||
$("#recently-added-page-right").removeClass("disabled");
|
||||
}
|
||||
});
|
||||
|
||||
$(document).ready(function () {
|
||||
|
||||
// Javascript to enable link to tab
|
||||
|
|
|
@ -36,7 +36,7 @@ DOCUMENTATION :: END
|
|||
|
||||
%>
|
||||
<div class="dashboard-recent-media-row">
|
||||
<div id="recently-added-row-scroller">
|
||||
<div id="recently-added-row-scroller" style="left: 0;">
|
||||
<ul class="dashboard-recent-media list-unstyled">
|
||||
% for item in data:
|
||||
<li>
|
||||
|
|
|
@ -3,7 +3,7 @@
|
|||
<div class="modal-content">
|
||||
<div class="modal-header">
|
||||
<button type="button" class="close" data-dismiss="modal" aria-hidden="true"><i class="fa fa-remove"></i></button>
|
||||
<h4 class="modal-title" id="mobile-device-config-modal-header">${device['device_name']} Settings <small><span class="device_id">(Device ID: ${device['id']}${' - ' + device['friendly_name'] if device['friendly_name'] else ''})</span></small></h4>
|
||||
<h4 class="modal-title" id="mobile-device-config-modal-header">${device['device_name']} Settings <small><span class="device_id">(Device ID: ${device['id']})</span></small></h4>
|
||||
</div>
|
||||
<div class="modal-body">
|
||||
<div class="container-fluid">
|
||||
|
|
|
@ -13,7 +13,7 @@
|
|||
<div class="modal-content">
|
||||
<div class="modal-header">
|
||||
<button type="button" class="close" data-dismiss="modal" aria-hidden="true"><i class="fa fa-remove"></i></button>
|
||||
<h4 class="modal-title" id="newsletter-config-modal-header">${newsletter['agent_label']} Newsletter Settings <small><span class="newsletter_id">(Newsletter ID: ${newsletter['id']}${' - ' + newsletter['friendly_name'] if newsletter['friendly_name'] else ''})</span></small></h4>
|
||||
<h4 class="modal-title" id="newsletter-config-modal-header">${newsletter['agent_label']} Newsletter Settings <small><span class="newsletter_id">(Newsletter ID: ${newsletter['id']})</span></small></h4>
|
||||
</div>
|
||||
<div class="modal-body">
|
||||
<div class="container-fluid">
|
||||
|
@ -50,10 +50,7 @@
|
|||
</div>
|
||||
<p class="help-block">
|
||||
<span id="simple_cron_message">Set the schedule for the newsletter.</span>
|
||||
<span id="custom_cron_message">
|
||||
Set the schedule for the newsletter using a <a href="${anon_url('https://crontab.guru')}" target="_blank" rel="noreferrer">custom crontab</a>.
|
||||
<a href="${anon_url('https://apscheduler.readthedocs.io/en/3.x/modules/triggers/cron.html#expression-types')}" target="_blank" rel="noreferrer">Click here</a> for a list of supported expressions.
|
||||
</span>
|
||||
<span id="custom_cron_message">Set the schedule for the newsletter using a <a href="${anon_url('https://crontab.guru')}" target="_blank" rel="noreferrer">custom crontab</a>. Only standard cron values are valid.</span>
|
||||
</p>
|
||||
</div>
|
||||
<div class="form-group">
|
||||
|
@ -484,7 +481,7 @@
|
|||
});
|
||||
|
||||
if (${newsletter['config']['custom_cron']}) {
|
||||
$('#cron_value').val('${newsletter['cron'] | n}');
|
||||
$('#cron_value').val('${newsletter['cron']}');
|
||||
} else {
|
||||
try {
|
||||
cron_widget.cron('value', '${newsletter['cron']}');
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
<%
|
||||
from urllib.parse import urlencode
|
||||
from six.moves.urllib.parse import urlencode
|
||||
%>
|
||||
<!doctype html>
|
||||
|
||||
|
|
|
@ -12,7 +12,7 @@
|
|||
<div class="modal-content">
|
||||
<div class="modal-header">
|
||||
<button type="button" class="close" data-dismiss="modal" aria-hidden="true"><i class="fa fa-remove"></i></button>
|
||||
<h4 class="modal-title" id="notifier-config-modal-header">${notifier['agent_label']} Settings <small><span class="notifier_id">(Notifier ID: ${notifier['id']}${' - ' + notifier['friendly_name'] if notifier['friendly_name'] else ''})</span></small></h4>
|
||||
<h4 class="modal-title" id="notifier-config-modal-header">${notifier['agent_label']} Settings <small><span class="notifier_id">(Notifier ID: ${notifier['id']})</span></small></h4>
|
||||
</div>
|
||||
<div class="modal-body">
|
||||
<div class="container-fluid">
|
||||
|
|
|
@ -36,7 +36,7 @@ DOCUMENTATION :: END
|
|||
%>
|
||||
% if data:
|
||||
<div class="dashboard-recent-media-row">
|
||||
<div id="recently-added-row-scroller">
|
||||
<div id="recently-added-row-scroller" style="left: 0;">
|
||||
<ul class="dashboard-recent-media list-unstyled">
|
||||
% for item in data:
|
||||
<div class="dashboard-recent-media-instance">
|
||||
|
|
|
@ -13,6 +13,8 @@ DOCUMENTATION :: END
|
|||
import datetime
|
||||
import plexpy
|
||||
from plexpy import common, helpers
|
||||
|
||||
scheduled_jobs = [j.id for j in plexpy.SCHED.get_jobs()]
|
||||
%>
|
||||
|
||||
<table class="config-scheduler-table small-muted">
|
||||
|
@ -27,15 +29,16 @@ DOCUMENTATION :: END
|
|||
</thead>
|
||||
<tbody>
|
||||
% for job, job_type in common.SCHEDULER_LIST.items():
|
||||
% if job in scheduled_jobs:
|
||||
<%
|
||||
sched_job = plexpy.SCHED.get_job(job)
|
||||
now = datetime.datetime.now(sched_job.next_run_time.tzinfo)
|
||||
%>
|
||||
% if sched_job:
|
||||
<tr>
|
||||
<td>${sched_job.id}</td>
|
||||
<td><i class="fa fa-sm fa-fw fa-check"></i> Active</td>
|
||||
<td>${helpers.format_timedelta_Hms(sched_job.trigger.interval)}</td>
|
||||
<td>${helpers.format_timedelta_Hms(sched_job.next_run_time - datetime.datetime.now(sched_job.next_run_time.tzinfo))}</td>
|
||||
<td>${helpers.format_timedelta_Hms(sched_job.next_run_time - now)}</td>
|
||||
<td>${sched_job.next_run_time.astimezone(plexpy.SYS_TIMEZONE).strftime('%Y-%m-%d %H:%M:%S')}</td>
|
||||
</tr>
|
||||
% elif job_type == 'websocket' and plexpy.WS_CONNECTED:
|
||||
|
|
|
@ -767,6 +767,7 @@
|
|||
data-identifier="${config['pms_identifier']}"
|
||||
data-ip="${config['pms_ip']}"
|
||||
data-port="${config['pms_port']}"
|
||||
data-local="${int(not int(config['pms_is_remote']))}"
|
||||
data-ssl="${config['pms_ssl']}"
|
||||
data-is_cloud="${config['pms_is_cloud']}"
|
||||
data-label="${config['pms_name'] or 'Local'}"
|
||||
|
@ -799,6 +800,13 @@
|
|||
</label>
|
||||
<p class="help-block">Connect to your Plex server using HTTPS if you have <a href="${anon_url('https://support.plex.tv/articles/206225077-how-to-use-secure-server-connections')}" target="_blank" rel="noreferrer">secure connections</a> enabled.</p>
|
||||
</div>
|
||||
<div class="checkbox">
|
||||
<label>
|
||||
<input type="checkbox" id="pms_is_remote_checkbox" class="checkbox-toggle pms-settings" data-id="pms_is_remote" value="1" ${checked(config['pms_is_remote'])}> Remote Server
|
||||
<input type="hidden" id="pms_is_remote" name="pms_is_remote" value="${config['pms_is_remote']}">
|
||||
</label>
|
||||
<p class="help-block">Check this if your Plex Server is not on the same local network as Tautulli.</p>
|
||||
</div>
|
||||
<div class="form-group">
|
||||
<label for="pms_url">Plex Server URL</label>
|
||||
<div class="row">
|
||||
|
@ -2589,6 +2597,7 @@ $(document).ready(function() {
|
|||
return '<div data-identifier="' + item.clientIdentifier +
|
||||
'" data-ip="' + item.ip +
|
||||
'" data-port="' + item.port +
|
||||
'" data-local="' + item.local +
|
||||
'" data-ssl="' + item.httpsRequired +
|
||||
'" data-is_cloud="' + item.is_cloud +
|
||||
'" data-label="' + item.label + '">' +
|
||||
|
@ -2602,6 +2611,7 @@ $(document).ready(function() {
|
|||
return '<div data-identifier="' + item.clientIdentifier +
|
||||
'" data-ip="' + item.ip +
|
||||
'" data-port="' + item.port +
|
||||
'" data-local="' + item.local +
|
||||
'" data-ssl="' + item.httpsRequired +
|
||||
'" data-is_cloud="' + item.is_cloud +
|
||||
'" data-label="' + item.label + '">' +
|
||||
|
@ -2624,6 +2634,7 @@ $(document).ready(function() {
|
|||
var identifier = $(pms_ip_selected).data('identifier');
|
||||
var ip = $(pms_ip_selected).data('ip');
|
||||
var port = $(pms_ip_selected).data('port');
|
||||
var local = $(pms_ip_selected).data('local');
|
||||
var ssl = $(pms_ip_selected).data('ssl');
|
||||
var is_cloud = $(pms_ip_selected).data('is_cloud');
|
||||
var value = $(pms_ip_selected).data('value');
|
||||
|
@ -2631,6 +2642,8 @@ $(document).ready(function() {
|
|||
$("#pms_identifier").val(identifier !== 'undefined' ? identifier : '');
|
||||
$('#pms_ip').val(ip !== 'undefined' ? ip : value);
|
||||
$('#pms_port').val(port !== 'undefined' ? port : 32400);
|
||||
$('#pms_is_remote_checkbox').prop('checked', (local !== 'undefined' && local === 0));
|
||||
$('#pms_is_remote').val(local !== 'undefined' && local === 0 ? 1 : 0);
|
||||
$('#pms_ssl_checkbox').prop('checked', (ssl !== 'undefined' && ssl === 1));
|
||||
$('#pms_ssl').val(ssl !== 'undefined' && ssl === 1 ? 1 : 0);
|
||||
$('#pms_is_cloud').val(is_cloud !== 'undefined' && is_cloud === true ? 1 : 0);
|
||||
|
@ -2668,6 +2681,7 @@ $(document).ready(function() {
|
|||
var pms_port = $("#pms_port").val();
|
||||
var pms_identifier = $("#pms_identifier").val();
|
||||
var pms_ssl = $("#pms_ssl").val();
|
||||
var pms_is_remote = $("#pms_is_remote").val();
|
||||
var pms_url_manual = $("#pms_url_manual").is(':checked') ? 1 : 0;
|
||||
|
||||
if (($("#pms_ip").val() !== '') || ($("#pms_port").val() !== '')) {
|
||||
|
@ -2679,6 +2693,7 @@ $(document).ready(function() {
|
|||
hostname: pms_ip,
|
||||
port: pms_port,
|
||||
ssl: pms_ssl,
|
||||
remote: pms_is_remote,
|
||||
manual: pms_url_manual,
|
||||
get_url: true,
|
||||
test_websocket: true
|
||||
|
|
|
@ -68,14 +68,14 @@ DOCUMENTATION :: END
|
|||
<table class="stream-info" style="margin-top: 0;">
|
||||
<thead>
|
||||
<tr>
|
||||
<th></th>
|
||||
<th class="heading">
|
||||
Source Details
|
||||
<th>
|
||||
</th>
|
||||
<th><i class="fa fa-long-arrow-right"></i></th>
|
||||
<th class="heading">
|
||||
Stream Details
|
||||
</th>
|
||||
<th class="heading">
|
||||
Source Details
|
||||
</th>
|
||||
</tr>
|
||||
</thead>
|
||||
</table>
|
||||
|
@ -85,46 +85,38 @@ DOCUMENTATION :: END
|
|||
<th>
|
||||
Media
|
||||
</th>
|
||||
<th></th>
|
||||
<th></th>
|
||||
<th></th>
|
||||
</tr>
|
||||
</thead>
|
||||
<tbody>
|
||||
<tr>
|
||||
<td>Bitrate</td>
|
||||
<td>${data['bitrate']} ${'kbps' if data['bitrate'] else ''}</td>
|
||||
<td><i class="fa fa-long-arrow-right"></i></td>
|
||||
<td>${data['stream_bitrate']} ${'kbps' if data['stream_bitrate'] else ''}</td>
|
||||
<td>${data['bitrate']} ${'kbps' if data['bitrate'] else ''}</td>
|
||||
</tr>
|
||||
% if data['media_type'] != 'track':
|
||||
<tr>
|
||||
<td>Resolution</td>
|
||||
<td>${data['video_full_resolution']}</td>
|
||||
<td><i class="fa fa-long-arrow-right"></i></td>
|
||||
<td>${data['stream_video_full_resolution']}</td>
|
||||
<td>${data['video_full_resolution']}</td>
|
||||
</tr>
|
||||
% endif
|
||||
<tr>
|
||||
<td>Quality</td>
|
||||
<td>-</td>
|
||||
<td></td>
|
||||
<td>${data['quality_profile']}</td>
|
||||
<td>-</td>
|
||||
</tr>
|
||||
% if data['optimized_version'] == 1:
|
||||
<tr>
|
||||
<td>Optimized Version</td>
|
||||
<td>${data['optimized_version_profile']}<br>(${data['optimized_version_title']})</td>
|
||||
<td></td>
|
||||
<td>-</td>
|
||||
<td>${data['optimized_version_profile']}<br>(${data['optimized_version_title']})</td>
|
||||
</tr>
|
||||
% endif
|
||||
% if data['synced_version'] == 1:
|
||||
<tr>
|
||||
<td>Synced Version</td>
|
||||
<td>${data['synced_version_profile']}</td>
|
||||
<td></td>
|
||||
<td>-</td>
|
||||
<td>${data['synced_version_profile']}</td>
|
||||
</tr>
|
||||
% endif
|
||||
</tbody>
|
||||
|
@ -135,8 +127,6 @@ DOCUMENTATION :: END
|
|||
<th>
|
||||
Container
|
||||
</th>
|
||||
<th></th>
|
||||
<th></th>
|
||||
<th>
|
||||
${data['stream_container_decision']}
|
||||
</th>
|
||||
|
@ -145,9 +135,8 @@ DOCUMENTATION :: END
|
|||
<tbody>
|
||||
<tr>
|
||||
<td>Container</td>
|
||||
<td>${data['container'].upper()}</td>
|
||||
<td><i class="fa fa-long-arrow-right"></i></td>
|
||||
<td>${data['stream_container'].upper()}</td>
|
||||
<td>${data['container'].upper()}</td>
|
||||
</tr>
|
||||
</tbody>
|
||||
</table>
|
||||
|
@ -158,8 +147,6 @@ DOCUMENTATION :: END
|
|||
<th>
|
||||
Video
|
||||
</th>
|
||||
<th></th>
|
||||
<th></th>
|
||||
<th>
|
||||
${data['stream_video_decision']}
|
||||
</th>
|
||||
|
@ -168,45 +155,38 @@ DOCUMENTATION :: END
|
|||
<tbody>
|
||||
<tr>
|
||||
<td>Codec</td>
|
||||
<td>${data['video_codec'].upper()} ${'(HW)' if data['transcode_hw_decoding'] else ''}</td>
|
||||
<td><i class="fa fa-long-arrow-right"></i></td>
|
||||
<td>${data['stream_video_codec'].upper()} ${'(HW)' if data['transcode_hw_encoding'] else ''}</td>
|
||||
<td>${data['video_codec'].upper()} ${'(HW)' if data['transcode_hw_decoding'] else ''}</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>Bitrate</td>
|
||||
<td>${data['video_bitrate']} ${'kbps' if data['video_bitrate'] else ''}</td>
|
||||
<td><i class="fa fa-long-arrow-right"></i></td>
|
||||
<td>${data['stream_video_bitrate']} ${'kbps' if data['stream_video_bitrate'] else ''}</td>
|
||||
<td>${data['video_bitrate']} ${'kbps' if data['video_bitrate'] else ''}</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>Width</td>
|
||||
<td>${data['video_width']}</td>
|
||||
<td><i class="fa fa-long-arrow-right"></i></td>
|
||||
<td>${data['stream_video_width']}</td>
|
||||
<td>${data['video_width']}</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>Height</td>
|
||||
<td>${data['video_height']}</td>
|
||||
<td><i class="fa fa-long-arrow-right"></i></td>
|
||||
<td>${data['stream_video_height']}</td>
|
||||
<td>${data['video_height']}</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>Framerate</td>
|
||||
<td>${data['video_framerate']}</td>
|
||||
<td><i class="fa fa-long-arrow-right"></i></td>
|
||||
<td>${data['stream_video_framerate']}</td>
|
||||
<td>${data['video_framerate']}</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>Dynamic Range</td>
|
||||
<td>${data['video_dynamic_range']}</td>
|
||||
<td><i class="fa fa-long-arrow-right"></i></td>
|
||||
<td>${data['stream_video_dynamic_range']}</td>
|
||||
<td>${data['video_dynamic_range']}</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>Aspect Ratio</td>
|
||||
<td>${data['aspect_ratio']}</td>
|
||||
<td></td>
|
||||
<td>-</td>
|
||||
<td>${data['aspect_ratio']}</td>
|
||||
</tr>
|
||||
</tbody>
|
||||
</table>
|
||||
|
@ -217,8 +197,6 @@ DOCUMENTATION :: END
|
|||
<th>
|
||||
Audio
|
||||
</th>
|
||||
<th></th>
|
||||
<th></th>
|
||||
<th>
|
||||
${data['stream_audio_decision']}
|
||||
</th>
|
||||
|
@ -227,27 +205,23 @@ DOCUMENTATION :: END
|
|||
<tbody>
|
||||
<tr>
|
||||
<td>Codec</td>
|
||||
<td>${AUDIO_CODEC_OVERRIDES.get(data['audio_codec'], data['audio_codec'].upper())}</td>
|
||||
<td><i class="fa fa-long-arrow-right"></i></td>
|
||||
<td>${AUDIO_CODEC_OVERRIDES.get(data['stream_audio_codec'], data['stream_audio_codec'].upper())}</td>
|
||||
<td>${AUDIO_CODEC_OVERRIDES.get(data['audio_codec'], data['audio_codec'].upper())}</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>Bitrate</td>
|
||||
<td>${data['audio_bitrate']} ${'kbps' if data['audio_bitrate'] else ''}</td>
|
||||
<td><i class="fa fa-long-arrow-right"></i></td>
|
||||
<td>${data['stream_audio_bitrate']} ${'kbps' if data['stream_audio_bitrate'] else ''}</td>
|
||||
<td>${data['audio_bitrate']} ${'kbps' if data['audio_bitrate'] else ''}</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>Channels</td>
|
||||
<td>${data['audio_channels']}</td>
|
||||
<td><i class="fa fa-long-arrow-right"></i></td>
|
||||
<td>${data['stream_audio_channels']}</td>
|
||||
<td>${data['audio_channels']}</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>Language</td>
|
||||
<td>${data['audio_language'] or 'Unknown'}</td>
|
||||
<td></td>
|
||||
<td>-</td>
|
||||
<td>${data['audio_language'] or 'Unknown'}</td>
|
||||
</tr>
|
||||
|
||||
</tbody>
|
||||
|
@ -259,8 +233,6 @@ DOCUMENTATION :: END
|
|||
<th>
|
||||
Subtitles
|
||||
</th>
|
||||
<th></th>
|
||||
<th></th>
|
||||
<th>
|
||||
${'direct play' if data['stream_subtitle_decision'] not in ('transcode', 'copy', 'burn') else data['stream_subtitle_decision']}
|
||||
</th>
|
||||
|
@ -269,22 +241,19 @@ DOCUMENTATION :: END
|
|||
<tbody>
|
||||
<tr>
|
||||
<td>Codec</td>
|
||||
<td>${data['subtitle_codec'].upper()}</td>
|
||||
<td><i class="fa fa-long-arrow-right"></i></td>
|
||||
<td>${data['stream_subtitle_codec'].upper() or '-'}</td>
|
||||
<td>${data['subtitle_codec'].upper()}</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>Language</td>
|
||||
<td>${data['subtitle_language'] or 'Unknown'}</td>
|
||||
<td></td>
|
||||
<td>-</td>
|
||||
<td>${data['subtitle_language'] or 'Unknown'}</td>
|
||||
</tr>
|
||||
% if data['subtitle_forced']:
|
||||
<tr>
|
||||
<td>Forced</td>
|
||||
<td>${bool(data['subtitle_forced'])}</td>
|
||||
<td></td>
|
||||
<td>-</td>
|
||||
<td>${bool(data['subtitle_forced'])}</td>
|
||||
</tr>
|
||||
% endif
|
||||
</tbody>
|
||||
|
|
|
@ -125,10 +125,10 @@ DOCUMENTATION :: END
|
|||
<div class="table-card-header">
|
||||
<ul class="nav nav-header nav-dashboard pull-right">
|
||||
<li>
|
||||
<a href="#" id="recently-watched-page-left" class="paginate-watched btn-gray disabled" data-id="-1"><i class="fa fa-lg fa-chevron-left"></i></a>
|
||||
<a href="#" id="recently-watched-page-left" class="paginate btn-gray disabled" data-id="+1"><i class="fa fa-lg fa-chevron-left"></i></a>
|
||||
</li>
|
||||
<li>
|
||||
<a href="#" id="recently-watched-page-right" class="paginate-watched btn-gray" data-id="+1"><i class="fa fa-lg fa-chevron-right"></i></a>
|
||||
<a href="#" id="recently-watched-page-right" class="paginate btn-gray" data-id="-1"><i class="fa fa-lg fa-chevron-right"></i></a>
|
||||
</li>
|
||||
</ul>
|
||||
<div class="header-bar">
|
||||
|
@ -666,14 +666,52 @@ DOCUMENTATION :: END
|
|||
},
|
||||
complete: function(xhr, status) {
|
||||
$("#user-recently-watched").html(xhr.responseText);
|
||||
highlightScrollerButton("#recently-watched");
|
||||
paginateScroller("#recently-watched", ".paginate-watched");
|
||||
highlightWatchedScrollerButton();
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
recentlyWatched();
|
||||
|
||||
function highlightWatchedScrollerButton() {
|
||||
var scroller = $("#recently-watched-row-scroller");
|
||||
var numElems = scroller.find("li").length;
|
||||
scroller.width(numElems * 175);
|
||||
if (scroller.width() > $("#user-recently-watched").width()) {
|
||||
$("#recently-watched-page-right").removeClass("disabled");
|
||||
} else {
|
||||
$("#recently-watched-page-right").addClass("disabled");
|
||||
}
|
||||
}
|
||||
|
||||
$(window).resize(function() {
|
||||
highlightWatchedScrollerButton();
|
||||
});
|
||||
|
||||
var leftTotal = 0;
|
||||
$(".paginate").click(function (e) {
|
||||
e.preventDefault();
|
||||
var scroller = $("#recently-watched-row-scroller");
|
||||
var containerWidth = $("#user-recently-watched").width();
|
||||
var scrollAmount = $(this).data("id") * parseInt(containerWidth / 175) * 175;
|
||||
var leftMax = Math.min(-parseInt(scroller.width()) + Math.abs(scrollAmount), 0);
|
||||
|
||||
leftTotal = Math.max(Math.min(leftTotal + scrollAmount, 0), leftMax);
|
||||
scroller.animate({ left: leftTotal }, 250);
|
||||
|
||||
if (leftTotal == 0) {
|
||||
$("#recently-watched-page-left").addClass("disabled").blur();
|
||||
} else {
|
||||
$("#recently-watched-page-left").removeClass("disabled");
|
||||
}
|
||||
|
||||
if (leftTotal == leftMax) {
|
||||
$("#recently-watched-page-right").addClass("disabled").blur();
|
||||
} else {
|
||||
$("#recently-watched-page-right").removeClass("disabled");
|
||||
}
|
||||
});
|
||||
|
||||
$(document).ready(function () {
|
||||
// Javascript to enable link to tab
|
||||
var hash = document.location.hash;
|
||||
|
|
|
@ -31,7 +31,7 @@ DOCUMENTATION :: END
|
|||
from plexpy.helpers import page, short_season
|
||||
%>
|
||||
<div class="dashboard-recent-media-row">
|
||||
<div id="recently-watched-row-scroller">
|
||||
<div id="recently-watched-row-scroller" style="left: 0;">
|
||||
<ul class="dashboard-recent-media list-unstyled">
|
||||
% for item in data:
|
||||
<li>
|
||||
|
|
|
@ -135,6 +135,7 @@
|
|||
data-identifier="${config['pms_identifier']}"
|
||||
data-ip="${config['pms_ip']}"
|
||||
data-port="${config['pms_port']}"
|
||||
data-local="${int(not int(config['pms_is_remote']))}"
|
||||
data-ssl="${config['pms_ssl']}"
|
||||
data-is_cloud="${config['pms_is_cloud']}"
|
||||
data-label="${config['pms_name'] or 'Local'}"
|
||||
|
@ -150,7 +151,7 @@
|
|||
<div class="col-xs-3">
|
||||
<input type="text" class="form-control pms-settings" name="pms_port" id="pms_port" placeholder="32400" value="${config['pms_port']}" required>
|
||||
</div>
|
||||
<div class="col-xs-9">
|
||||
<div class="col-xs-4">
|
||||
<div class="checkbox">
|
||||
<label>
|
||||
<input type="checkbox" id="pms_ssl_checkbox" class="checkbox-toggle pms-settings" data-id="pms_ssl" value="1" ${helpers.checked(config['pms_ssl'])}> Use Secure Connection
|
||||
|
@ -158,6 +159,14 @@
|
|||
</label>
|
||||
</div>
|
||||
</div>
|
||||
<div class="col-xs-4">
|
||||
<div class="checkbox">
|
||||
<label>
|
||||
<input type="checkbox" id="pms_is_remote_checkbox" class="checkbox-toggle pms-settings" data-id="pms_is_remote" value="1" ${helpers.checked(config['pms_is_remote'])}> Remote Server
|
||||
<input type="hidden" id="pms_is_remote" name="pms_is_remote" value="${config['pms_is_remote']}">
|
||||
</label>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
<input type="hidden" id="pms_valid" data-validate="validatePMSip" value="">
|
||||
|
@ -382,6 +391,7 @@ $(document).ready(function() {
|
|||
return '<div data-identifier="' + item.clientIdentifier +
|
||||
'" data-ip="' + item.ip +
|
||||
'" data-port="' + item.port +
|
||||
'" data-local="' + item.local +
|
||||
'" data-ssl="' + item.httpsRequired +
|
||||
'" data-is_cloud="' + item.is_cloud +
|
||||
'" data-label="' + item.label + '">' +
|
||||
|
@ -395,6 +405,7 @@ $(document).ready(function() {
|
|||
return '<div data-identifier="' + item.clientIdentifier +
|
||||
'" data-ip="' + item.ip +
|
||||
'" data-port="' + item.port +
|
||||
'" data-local="' + item.local +
|
||||
'" data-ssl="' + item.httpsRequired +
|
||||
'" data-is_cloud="' + item.is_cloud +
|
||||
'" data-label="' + item.label + '">' +
|
||||
|
@ -417,6 +428,7 @@ $(document).ready(function() {
|
|||
var identifier = $(pms_ip_selected).data('identifier');
|
||||
var ip = $(pms_ip_selected).data('ip');
|
||||
var port = $(pms_ip_selected).data('port');
|
||||
var local = $(pms_ip_selected).data('local');
|
||||
var ssl = $(pms_ip_selected).data('ssl');
|
||||
var is_cloud = $(pms_ip_selected).data('is_cloud');
|
||||
var value = $(pms_ip_selected).data('value');
|
||||
|
@ -427,15 +439,19 @@ $(document).ready(function() {
|
|||
$("#pms_identifier").val(identifier !== 'undefined' ? identifier : '');
|
||||
$('#pms_ip').val(ip !== 'undefined' ? ip : value);
|
||||
$('#pms_port').val(port !== 'undefined' ? port : 32400);
|
||||
$('#pms_is_remote_checkbox').prop('checked', (local !== 'undefined' && local === 0));
|
||||
$('#pms_is_remote').val(local !== 'undefined' && local === 0 ? 1 : 0);
|
||||
$('#pms_ssl_checkbox').prop('checked', (ssl !== 'undefined' && ssl === 1));
|
||||
$('#pms_ssl').val(ssl !== 'undefined' && ssl === 1 ? 1 : 0);
|
||||
$('#pms_is_cloud').val(is_cloud !== 'undefined' && is_cloud === true ? 1 : 0);
|
||||
|
||||
if (is_cloud === true) {
|
||||
$('#pms_port').prop('readonly', true);
|
||||
$('#pms_is_remote_checkbox').prop('disabled', true);
|
||||
$('#pms_ssl_checkbox').prop('disabled', true);
|
||||
} else {
|
||||
$('#pms_port').prop('readonly', false);
|
||||
$('#pms_is_remote_checkbox').prop('disabled', false);
|
||||
$('#pms_ssl_checkbox').prop('disabled', false);
|
||||
}
|
||||
},
|
||||
|
@ -472,6 +488,7 @@ $(document).ready(function() {
|
|||
var pms_port = $("#pms_port").val().trim();
|
||||
var pms_identifier = $("#pms_identifier").val();
|
||||
var pms_ssl = $("#pms_ssl").val();
|
||||
var pms_is_remote = $("#pms_is_remote").val();
|
||||
if ((pms_ip !== '') || (pms_port !== '')) {
|
||||
$("#pms-verify-status").html('<i class="fa fa-refresh fa-spin"></i> Verifying server...');
|
||||
$('#pms-verify-status').fadeIn('fast');
|
||||
|
@ -481,7 +498,8 @@ $(document).ready(function() {
|
|||
hostname: pms_ip,
|
||||
port: pms_port,
|
||||
identifier: pms_identifier,
|
||||
ssl: pms_ssl
|
||||
ssl: pms_ssl,
|
||||
remote: pms_is_remote
|
||||
},
|
||||
cache: true,
|
||||
async: true,
|
||||
|
|
979
lib/backports/csv.py
Normal file
979
lib/backports/csv.py
Normal file
|
@ -0,0 +1,979 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
"""A port of Python 3's csv module to Python 2.
|
||||
|
||||
The API of the csv module in Python 2 is drastically different from
|
||||
the csv module in Python 3. This is due, for the most part, to the
|
||||
difference between str in Python 2 and Python 3.
|
||||
|
||||
The semantics of Python 3's version are more useful because they support
|
||||
unicode natively, while Python 2's csv does not.
|
||||
"""
|
||||
from __future__ import unicode_literals, absolute_import
|
||||
|
||||
__all__ = [ "QUOTE_MINIMAL", "QUOTE_ALL", "QUOTE_NONNUMERIC", "QUOTE_NONE",
|
||||
"Error", "Dialect", "__doc__", "excel", "excel_tab",
|
||||
"field_size_limit", "reader", "writer",
|
||||
"register_dialect", "get_dialect", "list_dialects", "Sniffer",
|
||||
"unregister_dialect", "__version__", "DictReader", "DictWriter" ]
|
||||
|
||||
import re
|
||||
import numbers
|
||||
from io import StringIO
|
||||
from csv import (
|
||||
QUOTE_MINIMAL, QUOTE_ALL, QUOTE_NONNUMERIC, QUOTE_NONE,
|
||||
__version__, __doc__, Error, field_size_limit,
|
||||
)
|
||||
|
||||
# Stuff needed from six
|
||||
import sys
|
||||
PY3 = sys.version_info[0] == 3
|
||||
if PY3:
|
||||
string_types = str
|
||||
text_type = str
|
||||
binary_type = bytes
|
||||
unichr = chr
|
||||
else:
|
||||
string_types = basestring
|
||||
text_type = unicode
|
||||
binary_type = str
|
||||
|
||||
|
||||
class QuoteStrategy(object):
|
||||
quoting = None
|
||||
|
||||
def __init__(self, dialect):
|
||||
if self.quoting is not None:
|
||||
assert dialect.quoting == self.quoting
|
||||
self.dialect = dialect
|
||||
self.setup()
|
||||
|
||||
escape_pattern_quoted = r'({quotechar})'.format(
|
||||
quotechar=re.escape(self.dialect.quotechar or '"'))
|
||||
escape_pattern_unquoted = r'([{specialchars}])'.format(
|
||||
specialchars=re.escape(self.specialchars))
|
||||
|
||||
self.escape_re_quoted = re.compile(escape_pattern_quoted)
|
||||
self.escape_re_unquoted = re.compile(escape_pattern_unquoted)
|
||||
|
||||
def setup(self):
|
||||
"""Optional method for strategy-wide optimizations."""
|
||||
|
||||
def quoted(self, field=None, raw_field=None, only=None):
|
||||
"""Determine whether this field should be quoted."""
|
||||
raise NotImplementedError(
|
||||
'quoted must be implemented by a subclass')
|
||||
|
||||
@property
|
||||
def specialchars(self):
|
||||
"""The special characters that need to be escaped."""
|
||||
raise NotImplementedError(
|
||||
'specialchars must be implemented by a subclass')
|
||||
|
||||
def escape_re(self, quoted=None):
|
||||
if quoted:
|
||||
return self.escape_re_quoted
|
||||
return self.escape_re_unquoted
|
||||
|
||||
def escapechar(self, quoted=None):
|
||||
if quoted and self.dialect.doublequote:
|
||||
return self.dialect.quotechar
|
||||
return self.dialect.escapechar
|
||||
|
||||
def prepare(self, raw_field, only=None):
|
||||
field = text_type(raw_field if raw_field is not None else '')
|
||||
quoted = self.quoted(field=field, raw_field=raw_field, only=only)
|
||||
|
||||
escape_re = self.escape_re(quoted=quoted)
|
||||
escapechar = self.escapechar(quoted=quoted)
|
||||
|
||||
if escape_re.search(field):
|
||||
escapechar = '\\\\' if escapechar == '\\' else escapechar
|
||||
if not escapechar:
|
||||
raise Error('No escapechar is set')
|
||||
escape_replace = r'{escapechar}\1'.format(escapechar=escapechar)
|
||||
field = escape_re.sub(escape_replace, field)
|
||||
|
||||
if quoted:
|
||||
field = '{quotechar}{field}{quotechar}'.format(
|
||||
quotechar=self.dialect.quotechar, field=field)
|
||||
|
||||
return field
|
||||
|
||||
|
||||
class QuoteMinimalStrategy(QuoteStrategy):
|
||||
quoting = QUOTE_MINIMAL
|
||||
|
||||
def setup(self):
|
||||
self.quoted_re = re.compile(r'[{specialchars}]'.format(
|
||||
specialchars=re.escape(self.specialchars)))
|
||||
|
||||
@property
|
||||
def specialchars(self):
|
||||
return (
|
||||
self.dialect.lineterminator +
|
||||
self.dialect.quotechar +
|
||||
self.dialect.delimiter +
|
||||
(self.dialect.escapechar or '')
|
||||
)
|
||||
|
||||
def quoted(self, field, only, **kwargs):
|
||||
if field == self.dialect.quotechar and not self.dialect.doublequote:
|
||||
# If the only character in the field is the quotechar, and
|
||||
# doublequote is false, then just escape without outer quotes.
|
||||
return False
|
||||
return field == '' and only or bool(self.quoted_re.search(field))
|
||||
|
||||
|
||||
class QuoteAllStrategy(QuoteStrategy):
|
||||
quoting = QUOTE_ALL
|
||||
|
||||
@property
|
||||
def specialchars(self):
|
||||
return self.dialect.quotechar
|
||||
|
||||
def quoted(self, **kwargs):
|
||||
return True
|
||||
|
||||
|
||||
class QuoteNonnumericStrategy(QuoteStrategy):
|
||||
quoting = QUOTE_NONNUMERIC
|
||||
|
||||
@property
|
||||
def specialchars(self):
|
||||
return (
|
||||
self.dialect.lineterminator +
|
||||
self.dialect.quotechar +
|
||||
self.dialect.delimiter +
|
||||
(self.dialect.escapechar or '')
|
||||
)
|
||||
|
||||
def quoted(self, raw_field, **kwargs):
|
||||
return not isinstance(raw_field, numbers.Number)
|
||||
|
||||
|
||||
class QuoteNoneStrategy(QuoteStrategy):
|
||||
quoting = QUOTE_NONE
|
||||
|
||||
@property
|
||||
def specialchars(self):
|
||||
return (
|
||||
self.dialect.lineterminator +
|
||||
(self.dialect.quotechar or '') +
|
||||
self.dialect.delimiter +
|
||||
(self.dialect.escapechar or '')
|
||||
)
|
||||
|
||||
def quoted(self, field, only, **kwargs):
|
||||
if field == '' and only:
|
||||
raise Error('single empty field record must be quoted')
|
||||
return False
|
||||
|
||||
|
||||
class writer(object):
|
||||
def __init__(self, fileobj, dialect='excel', **fmtparams):
|
||||
if fileobj is None:
|
||||
raise TypeError('fileobj must be file-like, not None')
|
||||
|
||||
self.fileobj = fileobj
|
||||
|
||||
if isinstance(dialect, text_type):
|
||||
dialect = get_dialect(dialect)
|
||||
|
||||
try:
|
||||
self.dialect = Dialect.combine(dialect, fmtparams)
|
||||
except Error as e:
|
||||
raise TypeError(*e.args)
|
||||
|
||||
strategies = {
|
||||
QUOTE_MINIMAL: QuoteMinimalStrategy,
|
||||
QUOTE_ALL: QuoteAllStrategy,
|
||||
QUOTE_NONNUMERIC: QuoteNonnumericStrategy,
|
||||
QUOTE_NONE: QuoteNoneStrategy,
|
||||
}
|
||||
self.strategy = strategies[self.dialect.quoting](self.dialect)
|
||||
|
||||
def writerow(self, row):
|
||||
if row is None:
|
||||
raise Error('row must be an iterable')
|
||||
|
||||
row = list(row)
|
||||
only = len(row) == 1
|
||||
row = [self.strategy.prepare(field, only=only) for field in row]
|
||||
|
||||
line = self.dialect.delimiter.join(row) + self.dialect.lineterminator
|
||||
return self.fileobj.write(line)
|
||||
|
||||
def writerows(self, rows):
|
||||
for row in rows:
|
||||
self.writerow(row)
|
||||
|
||||
|
||||
START_RECORD = 0
|
||||
START_FIELD = 1
|
||||
ESCAPED_CHAR = 2
|
||||
IN_FIELD = 3
|
||||
IN_QUOTED_FIELD = 4
|
||||
ESCAPE_IN_QUOTED_FIELD = 5
|
||||
QUOTE_IN_QUOTED_FIELD = 6
|
||||
EAT_CRNL = 7
|
||||
AFTER_ESCAPED_CRNL = 8
|
||||
|
||||
|
||||
class reader(object):
|
||||
def __init__(self, fileobj, dialect='excel', **fmtparams):
|
||||
self.input_iter = iter(fileobj)
|
||||
|
||||
if isinstance(dialect, text_type):
|
||||
dialect = get_dialect(dialect)
|
||||
|
||||
try:
|
||||
self.dialect = Dialect.combine(dialect, fmtparams)
|
||||
except Error as e:
|
||||
raise TypeError(*e.args)
|
||||
|
||||
self.fields = None
|
||||
self.field = None
|
||||
self.line_num = 0
|
||||
|
||||
def parse_reset(self):
|
||||
self.fields = []
|
||||
self.field = []
|
||||
self.state = START_RECORD
|
||||
self.numeric_field = False
|
||||
|
||||
def parse_save_field(self):
|
||||
field = ''.join(self.field)
|
||||
self.field = []
|
||||
if self.numeric_field:
|
||||
field = float(field)
|
||||
self.numeric_field = False
|
||||
self.fields.append(field)
|
||||
|
||||
def parse_add_char(self, c):
|
||||
if len(self.field) >= field_size_limit():
|
||||
raise Error('field size limit exceeded')
|
||||
self.field.append(c)
|
||||
|
||||
def parse_process_char(self, c):
|
||||
switch = {
|
||||
START_RECORD: self._parse_start_record,
|
||||
START_FIELD: self._parse_start_field,
|
||||
ESCAPED_CHAR: self._parse_escaped_char,
|
||||
AFTER_ESCAPED_CRNL: self._parse_after_escaped_crnl,
|
||||
IN_FIELD: self._parse_in_field,
|
||||
IN_QUOTED_FIELD: self._parse_in_quoted_field,
|
||||
ESCAPE_IN_QUOTED_FIELD: self._parse_escape_in_quoted_field,
|
||||
QUOTE_IN_QUOTED_FIELD: self._parse_quote_in_quoted_field,
|
||||
EAT_CRNL: self._parse_eat_crnl,
|
||||
}
|
||||
return switch[self.state](c)
|
||||
|
||||
def _parse_start_record(self, c):
|
||||
if c == '\0':
|
||||
return
|
||||
elif c == '\n' or c == '\r':
|
||||
self.state = EAT_CRNL
|
||||
return
|
||||
|
||||
self.state = START_FIELD
|
||||
return self._parse_start_field(c)
|
||||
|
||||
def _parse_start_field(self, c):
|
||||
if c == '\n' or c == '\r' or c == '\0':
|
||||
self.parse_save_field()
|
||||
self.state = START_RECORD if c == '\0' else EAT_CRNL
|
||||
elif (c == self.dialect.quotechar and
|
||||
self.dialect.quoting != QUOTE_NONE):
|
||||
self.state = IN_QUOTED_FIELD
|
||||
elif c == self.dialect.escapechar:
|
||||
self.state = ESCAPED_CHAR
|
||||
elif c == ' ' and self.dialect.skipinitialspace:
|
||||
pass # Ignore space at start of field
|
||||
elif c == self.dialect.delimiter:
|
||||
# Save empty field
|
||||
self.parse_save_field()
|
||||
else:
|
||||
# Begin new unquoted field
|
||||
if self.dialect.quoting == QUOTE_NONNUMERIC:
|
||||
self.numeric_field = True
|
||||
self.parse_add_char(c)
|
||||
self.state = IN_FIELD
|
||||
|
||||
def _parse_escaped_char(self, c):
|
||||
if c == '\n' or c == '\r':
|
||||
self.parse_add_char(c)
|
||||
self.state = AFTER_ESCAPED_CRNL
|
||||
return
|
||||
if c == '\0':
|
||||
c = '\n'
|
||||
self.parse_add_char(c)
|
||||
self.state = IN_FIELD
|
||||
|
||||
def _parse_after_escaped_crnl(self, c):
|
||||
if c == '\0':
|
||||
return
|
||||
return self._parse_in_field(c)
|
||||
|
||||
def _parse_in_field(self, c):
|
||||
# In unquoted field
|
||||
if c == '\n' or c == '\r' or c == '\0':
|
||||
# End of line - return [fields]
|
||||
self.parse_save_field()
|
||||
self.state = START_RECORD if c == '\0' else EAT_CRNL
|
||||
elif c == self.dialect.escapechar:
|
||||
self.state = ESCAPED_CHAR
|
||||
elif c == self.dialect.delimiter:
|
||||
self.parse_save_field()
|
||||
self.state = START_FIELD
|
||||
else:
|
||||
# Normal character - save in field
|
||||
self.parse_add_char(c)
|
||||
|
||||
def _parse_in_quoted_field(self, c):
|
||||
if c == '\0':
|
||||
pass
|
||||
elif c == self.dialect.escapechar:
|
||||
self.state = ESCAPE_IN_QUOTED_FIELD
|
||||
elif (c == self.dialect.quotechar and
|
||||
self.dialect.quoting != QUOTE_NONE):
|
||||
if self.dialect.doublequote:
|
||||
self.state = QUOTE_IN_QUOTED_FIELD
|
||||
else:
|
||||
self.state = IN_FIELD
|
||||
else:
|
||||
self.parse_add_char(c)
|
||||
|
||||
def _parse_escape_in_quoted_field(self, c):
|
||||
if c == '\0':
|
||||
c = '\n'
|
||||
|
||||
self.parse_add_char(c)
|
||||
self.state = IN_QUOTED_FIELD
|
||||
|
||||
def _parse_quote_in_quoted_field(self, c):
|
||||
if (self.dialect.quoting != QUOTE_NONE and
|
||||
c == self.dialect.quotechar):
|
||||
# save "" as "
|
||||
self.parse_add_char(c)
|
||||
self.state = IN_QUOTED_FIELD
|
||||
elif c == self.dialect.delimiter:
|
||||
self.parse_save_field()
|
||||
self.state = START_FIELD
|
||||
elif c == '\n' or c == '\r' or c == '\0':
|
||||
# End of line = return [fields]
|
||||
self.parse_save_field()
|
||||
self.state = START_RECORD if c == '\0' else EAT_CRNL
|
||||
elif not self.dialect.strict:
|
||||
self.parse_add_char(c)
|
||||
self.state = IN_FIELD
|
||||
else:
|
||||
# illegal
|
||||
raise Error("{delimiter}' expected after '{quotechar}".format(
|
||||
delimiter=self.dialect.delimiter,
|
||||
quotechar=self.dialect.quotechar,
|
||||
))
|
||||
|
||||
def _parse_eat_crnl(self, c):
|
||||
if c == '\n' or c == '\r':
|
||||
pass
|
||||
elif c == '\0':
|
||||
self.state = START_RECORD
|
||||
else:
|
||||
raise Error('new-line character seen in unquoted field - do you '
|
||||
'need to open the file in universal-newline mode?')
|
||||
|
||||
|
||||
def __iter__(self):
|
||||
return self
|
||||
|
||||
def __next__(self):
|
||||
self.parse_reset()
|
||||
|
||||
while True:
|
||||
try:
|
||||
lineobj = next(self.input_iter)
|
||||
except StopIteration:
|
||||
if len(self.field) != 0 or self.state == IN_QUOTED_FIELD:
|
||||
if self.dialect.strict:
|
||||
raise Error('unexpected end of data')
|
||||
self.parse_save_field()
|
||||
if self.fields:
|
||||
break
|
||||
raise
|
||||
|
||||
if not isinstance(lineobj, text_type):
|
||||
typ = type(lineobj)
|
||||
typ_name = 'bytes' if typ == bytes else typ.__name__
|
||||
err_str = ('iterator should return strings, not {0}'
|
||||
' (did you open the file in text mode?)')
|
||||
raise Error(err_str.format(typ_name))
|
||||
|
||||
self.line_num += 1
|
||||
for c in lineobj:
|
||||
if c == '\0':
|
||||
raise Error('line contains NULL byte')
|
||||
self.parse_process_char(c)
|
||||
|
||||
self.parse_process_char('\0')
|
||||
|
||||
if self.state == START_RECORD:
|
||||
break
|
||||
|
||||
fields = self.fields
|
||||
self.fields = None
|
||||
return fields
|
||||
|
||||
next = __next__
|
||||
|
||||
|
||||
_dialect_registry = {}
|
||||
def register_dialect(name, dialect='excel', **fmtparams):
|
||||
if not isinstance(name, text_type):
|
||||
raise TypeError('"name" must be a string')
|
||||
|
||||
dialect = Dialect.extend(dialect, fmtparams)
|
||||
|
||||
try:
|
||||
Dialect.validate(dialect)
|
||||
except:
|
||||
raise TypeError('dialect is invalid')
|
||||
|
||||
assert name not in _dialect_registry
|
||||
_dialect_registry[name] = dialect
|
||||
|
||||
def unregister_dialect(name):
|
||||
try:
|
||||
_dialect_registry.pop(name)
|
||||
except KeyError:
|
||||
raise Error('"{name}" not a registered dialect'.format(name=name))
|
||||
|
||||
def get_dialect(name):
|
||||
try:
|
||||
return _dialect_registry[name]
|
||||
except KeyError:
|
||||
raise Error('Could not find dialect {0}'.format(name))
|
||||
|
||||
def list_dialects():
|
||||
return list(_dialect_registry)
|
||||
|
||||
|
||||
class Dialect(object):
|
||||
"""Describe a CSV dialect.
|
||||
This must be subclassed (see csv.excel). Valid attributes are:
|
||||
delimiter, quotechar, escapechar, doublequote, skipinitialspace,
|
||||
lineterminator, quoting, strict.
|
||||
"""
|
||||
_name = ""
|
||||
_valid = False
|
||||
# placeholders
|
||||
delimiter = None
|
||||
quotechar = None
|
||||
escapechar = None
|
||||
doublequote = None
|
||||
skipinitialspace = None
|
||||
lineterminator = None
|
||||
quoting = None
|
||||
strict = None
|
||||
|
||||
def __init__(self):
|
||||
self.validate(self)
|
||||
if self.__class__ != Dialect:
|
||||
self._valid = True
|
||||
|
||||
@classmethod
|
||||
def validate(cls, dialect):
|
||||
dialect = cls.extend(dialect)
|
||||
|
||||
if not isinstance(dialect.quoting, int):
|
||||
raise Error('"quoting" must be an integer')
|
||||
|
||||
if dialect.delimiter is None:
|
||||
raise Error('delimiter must be set')
|
||||
cls.validate_text(dialect, 'delimiter')
|
||||
|
||||
if dialect.lineterminator is None:
|
||||
raise Error('lineterminator must be set')
|
||||
if not isinstance(dialect.lineterminator, text_type):
|
||||
raise Error('"lineterminator" must be a string')
|
||||
|
||||
if dialect.quoting not in [
|
||||
QUOTE_NONE, QUOTE_MINIMAL, QUOTE_NONNUMERIC, QUOTE_ALL]:
|
||||
raise Error('Invalid quoting specified')
|
||||
|
||||
if dialect.quoting != QUOTE_NONE:
|
||||
if dialect.quotechar is None and dialect.escapechar is None:
|
||||
raise Error('quotechar must be set if quoting enabled')
|
||||
if dialect.quotechar is not None:
|
||||
cls.validate_text(dialect, 'quotechar')
|
||||
|
||||
@staticmethod
|
||||
def validate_text(dialect, attr):
|
||||
val = getattr(dialect, attr)
|
||||
if not isinstance(val, text_type):
|
||||
if type(val) == bytes:
|
||||
raise Error('"{0}" must be string, not bytes'.format(attr))
|
||||
raise Error('"{0}" must be string, not {1}'.format(
|
||||
attr, type(val).__name__))
|
||||
|
||||
if len(val) != 1:
|
||||
raise Error('"{0}" must be a 1-character string'.format(attr))
|
||||
|
||||
@staticmethod
|
||||
def defaults():
|
||||
return {
|
||||
'delimiter': ',',
|
||||
'doublequote': True,
|
||||
'escapechar': None,
|
||||
'lineterminator': '\r\n',
|
||||
'quotechar': '"',
|
||||
'quoting': QUOTE_MINIMAL,
|
||||
'skipinitialspace': False,
|
||||
'strict': False,
|
||||
}
|
||||
|
||||
@classmethod
|
||||
def extend(cls, dialect, fmtparams=None):
|
||||
if isinstance(dialect, string_types):
|
||||
dialect = get_dialect(dialect)
|
||||
|
||||
if fmtparams is None:
|
||||
return dialect
|
||||
|
||||
defaults = cls.defaults()
|
||||
|
||||
if any(param not in defaults for param in fmtparams):
|
||||
raise TypeError('Invalid fmtparam')
|
||||
|
||||
specified = dict(
|
||||
(attr, getattr(dialect, attr, None))
|
||||
for attr in cls.defaults()
|
||||
)
|
||||
|
||||
specified.update(fmtparams)
|
||||
return type(str('ExtendedDialect'), (cls,), specified)
|
||||
|
||||
@classmethod
|
||||
def combine(cls, dialect, fmtparams):
|
||||
"""Create a new dialect with defaults and added parameters."""
|
||||
dialect = cls.extend(dialect, fmtparams)
|
||||
defaults = cls.defaults()
|
||||
specified = dict(
|
||||
(attr, getattr(dialect, attr, None))
|
||||
for attr in defaults
|
||||
if getattr(dialect, attr, None) is not None or
|
||||
attr in ['quotechar', 'delimiter', 'lineterminator', 'quoting']
|
||||
)
|
||||
|
||||
defaults.update(specified)
|
||||
dialect = type(str('CombinedDialect'), (cls,), defaults)
|
||||
cls.validate(dialect)
|
||||
return dialect()
|
||||
|
||||
def __delattr__(self, attr):
|
||||
if self._valid:
|
||||
raise AttributeError('dialect is immutable.')
|
||||
super(Dialect, self).__delattr__(attr)
|
||||
|
||||
def __setattr__(self, attr, value):
|
||||
if self._valid:
|
||||
raise AttributeError('dialect is immutable.')
|
||||
super(Dialect, self).__setattr__(attr, value)
|
||||
|
||||
|
||||
class excel(Dialect):
|
||||
"""Describe the usual properties of Excel-generated CSV files."""
|
||||
delimiter = ','
|
||||
quotechar = '"'
|
||||
doublequote = True
|
||||
skipinitialspace = False
|
||||
lineterminator = '\r\n'
|
||||
quoting = QUOTE_MINIMAL
|
||||
register_dialect("excel", excel)
|
||||
|
||||
class excel_tab(excel):
|
||||
"""Describe the usual properties of Excel-generated TAB-delimited files."""
|
||||
delimiter = '\t'
|
||||
register_dialect("excel-tab", excel_tab)
|
||||
|
||||
class unix_dialect(Dialect):
|
||||
"""Describe the usual properties of Unix-generated CSV files."""
|
||||
delimiter = ','
|
||||
quotechar = '"'
|
||||
doublequote = True
|
||||
skipinitialspace = False
|
||||
lineterminator = '\n'
|
||||
quoting = QUOTE_ALL
|
||||
register_dialect("unix", unix_dialect)
|
||||
|
||||
|
||||
class DictReader(object):
|
||||
def __init__(self, f, fieldnames=None, restkey=None, restval=None,
|
||||
dialect="excel", *args, **kwds):
|
||||
self._fieldnames = fieldnames # list of keys for the dict
|
||||
self.restkey = restkey # key to catch long rows
|
||||
self.restval = restval # default value for short rows
|
||||
self.reader = reader(f, dialect, *args, **kwds)
|
||||
self.dialect = dialect
|
||||
self.line_num = 0
|
||||
|
||||
def __iter__(self):
|
||||
return self
|
||||
|
||||
@property
|
||||
def fieldnames(self):
|
||||
if self._fieldnames is None:
|
||||
try:
|
||||
self._fieldnames = next(self.reader)
|
||||
except StopIteration:
|
||||
pass
|
||||
self.line_num = self.reader.line_num
|
||||
return self._fieldnames
|
||||
|
||||
@fieldnames.setter
|
||||
def fieldnames(self, value):
|
||||
self._fieldnames = value
|
||||
|
||||
def __next__(self):
|
||||
if self.line_num == 0:
|
||||
# Used only for its side effect.
|
||||
self.fieldnames
|
||||
row = next(self.reader)
|
||||
self.line_num = self.reader.line_num
|
||||
|
||||
# unlike the basic reader, we prefer not to return blanks,
|
||||
# because we will typically wind up with a dict full of None
|
||||
# values
|
||||
while row == []:
|
||||
row = next(self.reader)
|
||||
d = dict(zip(self.fieldnames, row))
|
||||
lf = len(self.fieldnames)
|
||||
lr = len(row)
|
||||
if lf < lr:
|
||||
d[self.restkey] = row[lf:]
|
||||
elif lf > lr:
|
||||
for key in self.fieldnames[lr:]:
|
||||
d[key] = self.restval
|
||||
return d
|
||||
|
||||
next = __next__
|
||||
|
||||
|
||||
class DictWriter(object):
|
||||
def __init__(self, f, fieldnames, restval="", extrasaction="raise",
|
||||
dialect="excel", *args, **kwds):
|
||||
self.fieldnames = fieldnames # list of keys for the dict
|
||||
self.restval = restval # for writing short dicts
|
||||
if extrasaction.lower() not in ("raise", "ignore"):
|
||||
raise ValueError("extrasaction (%s) must be 'raise' or 'ignore'"
|
||||
% extrasaction)
|
||||
self.extrasaction = extrasaction
|
||||
self.writer = writer(f, dialect, *args, **kwds)
|
||||
|
||||
def writeheader(self):
|
||||
header = dict(zip(self.fieldnames, self.fieldnames))
|
||||
self.writerow(header)
|
||||
|
||||
def _dict_to_list(self, rowdict):
|
||||
if self.extrasaction == "raise":
|
||||
wrong_fields = [k for k in rowdict if k not in self.fieldnames]
|
||||
if wrong_fields:
|
||||
raise ValueError("dict contains fields not in fieldnames: "
|
||||
+ ", ".join([repr(x) for x in wrong_fields]))
|
||||
return (rowdict.get(key, self.restval) for key in self.fieldnames)
|
||||
|
||||
def writerow(self, rowdict):
|
||||
return self.writer.writerow(self._dict_to_list(rowdict))
|
||||
|
||||
def writerows(self, rowdicts):
|
||||
return self.writer.writerows(map(self._dict_to_list, rowdicts))
|
||||
|
||||
# Guard Sniffer's type checking against builds that exclude complex()
|
||||
try:
|
||||
complex
|
||||
except NameError:
|
||||
complex = float
|
||||
|
||||
class Sniffer(object):
|
||||
'''
|
||||
"Sniffs" the format of a CSV file (i.e. delimiter, quotechar)
|
||||
Returns a Dialect object.
|
||||
'''
|
||||
def __init__(self):
|
||||
# in case there is more than one possible delimiter
|
||||
self.preferred = [',', '\t', ';', ' ', ':']
|
||||
|
||||
|
||||
def sniff(self, sample, delimiters=None):
|
||||
"""
|
||||
Returns a dialect (or None) corresponding to the sample
|
||||
"""
|
||||
|
||||
quotechar, doublequote, delimiter, skipinitialspace = \
|
||||
self._guess_quote_and_delimiter(sample, delimiters)
|
||||
if not delimiter:
|
||||
delimiter, skipinitialspace = self._guess_delimiter(sample,
|
||||
delimiters)
|
||||
|
||||
if not delimiter:
|
||||
raise Error("Could not determine delimiter")
|
||||
|
||||
class dialect(Dialect):
|
||||
_name = "sniffed"
|
||||
lineterminator = '\r\n'
|
||||
quoting = QUOTE_MINIMAL
|
||||
# escapechar = ''
|
||||
|
||||
dialect.doublequote = doublequote
|
||||
dialect.delimiter = delimiter
|
||||
# _csv.reader won't accept a quotechar of ''
|
||||
dialect.quotechar = quotechar or '"'
|
||||
dialect.skipinitialspace = skipinitialspace
|
||||
|
||||
return dialect
|
||||
|
||||
|
||||
def _guess_quote_and_delimiter(self, data, delimiters):
|
||||
"""
|
||||
Looks for text enclosed between two identical quotes
|
||||
(the probable quotechar) which are preceded and followed
|
||||
by the same character (the probable delimiter).
|
||||
For example:
|
||||
,'some text',
|
||||
The quote with the most wins, same with the delimiter.
|
||||
If there is no quotechar the delimiter can't be determined
|
||||
this way.
|
||||
"""
|
||||
|
||||
matches = []
|
||||
for restr in ('(?P<delim>[^\w\n"\'])(?P<space> ?)(?P<quote>["\']).*?(?P=quote)(?P=delim)', # ,".*?",
|
||||
'(?:^|\n)(?P<quote>["\']).*?(?P=quote)(?P<delim>[^\w\n"\'])(?P<space> ?)', # ".*?",
|
||||
'(?P<delim>>[^\w\n"\'])(?P<space> ?)(?P<quote>["\']).*?(?P=quote)(?:$|\n)', # ,".*?"
|
||||
'(?:^|\n)(?P<quote>["\']).*?(?P=quote)(?:$|\n)'): # ".*?" (no delim, no space)
|
||||
regexp = re.compile(restr, re.DOTALL | re.MULTILINE)
|
||||
matches = regexp.findall(data)
|
||||
if matches:
|
||||
break
|
||||
|
||||
if not matches:
|
||||
# (quotechar, doublequote, delimiter, skipinitialspace)
|
||||
return ('', False, None, 0)
|
||||
quotes = {}
|
||||
delims = {}
|
||||
spaces = 0
|
||||
groupindex = regexp.groupindex
|
||||
for m in matches:
|
||||
n = groupindex['quote'] - 1
|
||||
key = m[n]
|
||||
if key:
|
||||
quotes[key] = quotes.get(key, 0) + 1
|
||||
try:
|
||||
n = groupindex['delim'] - 1
|
||||
key = m[n]
|
||||
except KeyError:
|
||||
continue
|
||||
if key and (delimiters is None or key in delimiters):
|
||||
delims[key] = delims.get(key, 0) + 1
|
||||
try:
|
||||
n = groupindex['space'] - 1
|
||||
except KeyError:
|
||||
continue
|
||||
if m[n]:
|
||||
spaces += 1
|
||||
|
||||
quotechar = max(quotes, key=quotes.get)
|
||||
|
||||
if delims:
|
||||
delim = max(delims, key=delims.get)
|
||||
skipinitialspace = delims[delim] == spaces
|
||||
if delim == '\n': # most likely a file with a single column
|
||||
delim = ''
|
||||
else:
|
||||
# there is *no* delimiter, it's a single column of quoted data
|
||||
delim = ''
|
||||
skipinitialspace = 0
|
||||
|
||||
# if we see an extra quote between delimiters, we've got a
|
||||
# double quoted format
|
||||
dq_regexp = re.compile(
|
||||
r"((%(delim)s)|^)\W*%(quote)s[^%(delim)s\n]*%(quote)s[^%(delim)s\n]*%(quote)s\W*((%(delim)s)|$)" % \
|
||||
{'delim':re.escape(delim), 'quote':quotechar}, re.MULTILINE)
|
||||
|
||||
|
||||
|
||||
if dq_regexp.search(data):
|
||||
doublequote = True
|
||||
else:
|
||||
doublequote = False
|
||||
|
||||
return (quotechar, doublequote, delim, skipinitialspace)
|
||||
|
||||
|
||||
def _guess_delimiter(self, data, delimiters):
|
||||
"""
|
||||
The delimiter /should/ occur the same number of times on
|
||||
each row. However, due to malformed data, it may not. We don't want
|
||||
an all or nothing approach, so we allow for small variations in this
|
||||
number.
|
||||
1) build a table of the frequency of each character on every line.
|
||||
2) build a table of frequencies of this frequency (meta-frequency?),
|
||||
e.g. 'x occurred 5 times in 10 rows, 6 times in 1000 rows,
|
||||
7 times in 2 rows'
|
||||
3) use the mode of the meta-frequency to determine the /expected/
|
||||
frequency for that character
|
||||
4) find out how often the character actually meets that goal
|
||||
5) the character that best meets its goal is the delimiter
|
||||
For performance reasons, the data is evaluated in chunks, so it can
|
||||
try and evaluate the smallest portion of the data possible, evaluating
|
||||
additional chunks as necessary.
|
||||
"""
|
||||
|
||||
data = list(filter(None, data.split('\n')))
|
||||
|
||||
ascii = [unichr(c) for c in range(127)] # 7-bit ASCII
|
||||
|
||||
# build frequency tables
|
||||
chunkLength = min(10, len(data))
|
||||
iteration = 0
|
||||
charFrequency = {}
|
||||
modes = {}
|
||||
delims = {}
|
||||
start, end = 0, min(chunkLength, len(data))
|
||||
while start < len(data):
|
||||
iteration += 1
|
||||
for line in data[start:end]:
|
||||
for char in ascii:
|
||||
metaFrequency = charFrequency.get(char, {})
|
||||
# must count even if frequency is 0
|
||||
freq = line.count(char)
|
||||
# value is the mode
|
||||
metaFrequency[freq] = metaFrequency.get(freq, 0) + 1
|
||||
charFrequency[char] = metaFrequency
|
||||
|
||||
for char in charFrequency.keys():
|
||||
items = list(charFrequency[char].items())
|
||||
if len(items) == 1 and items[0][0] == 0:
|
||||
continue
|
||||
# get the mode of the frequencies
|
||||
if len(items) > 1:
|
||||
modes[char] = max(items, key=lambda x: x[1])
|
||||
# adjust the mode - subtract the sum of all
|
||||
# other frequencies
|
||||
items.remove(modes[char])
|
||||
modes[char] = (modes[char][0], modes[char][1]
|
||||
- sum(item[1] for item in items))
|
||||
else:
|
||||
modes[char] = items[0]
|
||||
|
||||
# build a list of possible delimiters
|
||||
modeList = modes.items()
|
||||
total = float(chunkLength * iteration)
|
||||
# (rows of consistent data) / (number of rows) = 100%
|
||||
consistency = 1.0
|
||||
# minimum consistency threshold
|
||||
threshold = 0.9
|
||||
while len(delims) == 0 and consistency >= threshold:
|
||||
for k, v in modeList:
|
||||
if v[0] > 0 and v[1] > 0:
|
||||
if ((v[1]/total) >= consistency and
|
||||
(delimiters is None or k in delimiters)):
|
||||
delims[k] = v
|
||||
consistency -= 0.01
|
||||
|
||||
if len(delims) == 1:
|
||||
delim = list(delims.keys())[0]
|
||||
skipinitialspace = (data[0].count(delim) ==
|
||||
data[0].count("%c " % delim))
|
||||
return (delim, skipinitialspace)
|
||||
|
||||
# analyze another chunkLength lines
|
||||
start = end
|
||||
end += chunkLength
|
||||
|
||||
if not delims:
|
||||
return ('', 0)
|
||||
|
||||
# if there's more than one, fall back to a 'preferred' list
|
||||
if len(delims) > 1:
|
||||
for d in self.preferred:
|
||||
if d in delims.keys():
|
||||
skipinitialspace = (data[0].count(d) ==
|
||||
data[0].count("%c " % d))
|
||||
return (d, skipinitialspace)
|
||||
|
||||
# nothing else indicates a preference, pick the character that
|
||||
# dominates(?)
|
||||
items = [(v,k) for (k,v) in delims.items()]
|
||||
items.sort()
|
||||
delim = items[-1][1]
|
||||
|
||||
skipinitialspace = (data[0].count(delim) ==
|
||||
data[0].count("%c " % delim))
|
||||
return (delim, skipinitialspace)
|
||||
|
||||
|
||||
def has_header(self, sample):
|
||||
# Creates a dictionary of types of data in each column. If any
|
||||
# column is of a single type (say, integers), *except* for the first
|
||||
# row, then the first row is presumed to be labels. If the type
|
||||
# can't be determined, it is assumed to be a string in which case
|
||||
# the length of the string is the determining factor: if all of the
|
||||
# rows except for the first are the same length, it's a header.
|
||||
# Finally, a 'vote' is taken at the end for each column, adding or
|
||||
# subtracting from the likelihood of the first row being a header.
|
||||
|
||||
rdr = reader(StringIO(sample), self.sniff(sample))
|
||||
|
||||
header = next(rdr) # assume first row is header
|
||||
|
||||
columns = len(header)
|
||||
columnTypes = {}
|
||||
for i in range(columns): columnTypes[i] = None
|
||||
|
||||
checked = 0
|
||||
for row in rdr:
|
||||
# arbitrary number of rows to check, to keep it sane
|
||||
if checked > 20:
|
||||
break
|
||||
checked += 1
|
||||
|
||||
if len(row) != columns:
|
||||
continue # skip rows that have irregular number of columns
|
||||
|
||||
for col in list(columnTypes.keys()):
|
||||
|
||||
for thisType in [int, float, complex]:
|
||||
try:
|
||||
thisType(row[col])
|
||||
break
|
||||
except (ValueError, OverflowError):
|
||||
pass
|
||||
else:
|
||||
# fallback to length of string
|
||||
thisType = len(row[col])
|
||||
|
||||
if thisType != columnTypes[col]:
|
||||
if columnTypes[col] is None: # add new column type
|
||||
columnTypes[col] = thisType
|
||||
else:
|
||||
# type is inconsistent, remove column from
|
||||
# consideration
|
||||
del columnTypes[col]
|
||||
|
||||
# finally, compare results against first row and "vote"
|
||||
# on whether it's a header
|
||||
hasHeader = 0
|
||||
for col, colType in columnTypes.items():
|
||||
if type(colType) == type(0): # it's a length
|
||||
if len(header[col]) != colType:
|
||||
hasHeader += 1
|
||||
else:
|
||||
hasHeader -= 1
|
||||
else: # attempt typecast
|
||||
try:
|
||||
colType(header[col])
|
||||
except (ValueError, TypeError):
|
||||
hasHeader += 1
|
||||
else:
|
||||
hasHeader -= 1
|
||||
|
||||
return hasHeader > 0
|
243
lib/backports/functools_lru_cache.py
Normal file
243
lib/backports/functools_lru_cache.py
Normal file
|
@ -0,0 +1,243 @@
|
|||
from __future__ import absolute_import
|
||||
|
||||
import functools
|
||||
from collections import namedtuple
|
||||
from threading import RLock
|
||||
|
||||
_CacheInfo = namedtuple("_CacheInfo", ["hits", "misses", "maxsize", "currsize"])
|
||||
|
||||
|
||||
@functools.wraps(functools.update_wrapper)
|
||||
def update_wrapper(
|
||||
wrapper,
|
||||
wrapped,
|
||||
assigned=functools.WRAPPER_ASSIGNMENTS,
|
||||
updated=functools.WRAPPER_UPDATES,
|
||||
):
|
||||
"""
|
||||
Patch two bugs in functools.update_wrapper.
|
||||
"""
|
||||
# workaround for http://bugs.python.org/issue3445
|
||||
assigned = tuple(attr for attr in assigned if hasattr(wrapped, attr))
|
||||
wrapper = functools.update_wrapper(wrapper, wrapped, assigned, updated)
|
||||
# workaround for https://bugs.python.org/issue17482
|
||||
wrapper.__wrapped__ = wrapped
|
||||
return wrapper
|
||||
|
||||
|
||||
class _HashedSeq(list):
|
||||
"""This class guarantees that hash() will be called no more than once
|
||||
per element. This is important because the lru_cache() will hash
|
||||
the key multiple times on a cache miss.
|
||||
|
||||
"""
|
||||
|
||||
__slots__ = 'hashvalue'
|
||||
|
||||
def __init__(self, tup, hash=hash):
|
||||
self[:] = tup
|
||||
self.hashvalue = hash(tup)
|
||||
|
||||
def __hash__(self):
|
||||
return self.hashvalue
|
||||
|
||||
|
||||
def _make_key(
|
||||
args,
|
||||
kwds,
|
||||
typed,
|
||||
kwd_mark=(object(),),
|
||||
fasttypes={int, str},
|
||||
tuple=tuple,
|
||||
type=type,
|
||||
len=len,
|
||||
):
|
||||
"""Make a cache key from optionally typed positional and keyword arguments
|
||||
|
||||
The key is constructed in a way that is flat as possible rather than
|
||||
as a nested structure that would take more memory.
|
||||
|
||||
If there is only a single argument and its data type is known to cache
|
||||
its hash value, then that argument is returned without a wrapper. This
|
||||
saves space and improves lookup speed.
|
||||
|
||||
"""
|
||||
# All of code below relies on kwds preserving the order input by the user.
|
||||
# Formerly, we sorted() the kwds before looping. The new way is *much*
|
||||
# faster; however, it means that f(x=1, y=2) will now be treated as a
|
||||
# distinct call from f(y=2, x=1) which will be cached separately.
|
||||
key = args
|
||||
if kwds:
|
||||
key += kwd_mark
|
||||
for item in kwds.items():
|
||||
key += item
|
||||
if typed:
|
||||
key += tuple(type(v) for v in args)
|
||||
if kwds:
|
||||
key += tuple(type(v) for v in kwds.values())
|
||||
elif len(key) == 1 and type(key[0]) in fasttypes:
|
||||
return key[0]
|
||||
return _HashedSeq(key)
|
||||
|
||||
|
||||
def lru_cache(maxsize=128, typed=False):
|
||||
"""Least-recently-used cache decorator.
|
||||
|
||||
If *maxsize* is set to None, the LRU features are disabled and the cache
|
||||
can grow without bound.
|
||||
|
||||
If *typed* is True, arguments of different types will be cached separately.
|
||||
For example, f(decimal.Decimal("3.0")) and f(3.0) will be treated as
|
||||
distinct calls with distinct results. Some types such as str and int may
|
||||
be cached separately even when typed is false.
|
||||
|
||||
Arguments to the cached function must be hashable.
|
||||
|
||||
View the cache statistics named tuple (hits, misses, maxsize, currsize)
|
||||
with f.cache_info(). Clear the cache and statistics with f.cache_clear().
|
||||
Access the underlying function with f.__wrapped__.
|
||||
|
||||
See: https://en.wikipedia.org/wiki/Cache_replacement_policies#Least_recently_used_(LRU)
|
||||
|
||||
"""
|
||||
|
||||
# Users should only access the lru_cache through its public API:
|
||||
# cache_info, cache_clear, and f.__wrapped__
|
||||
# The internals of the lru_cache are encapsulated for thread safety and
|
||||
# to allow the implementation to change (including a possible C version).
|
||||
|
||||
if isinstance(maxsize, int):
|
||||
# Negative maxsize is treated as 0
|
||||
if maxsize < 0:
|
||||
maxsize = 0
|
||||
elif callable(maxsize) and isinstance(typed, bool):
|
||||
# The user_function was passed in directly via the maxsize argument
|
||||
user_function, maxsize = maxsize, 128
|
||||
wrapper = _lru_cache_wrapper(user_function, maxsize, typed, _CacheInfo)
|
||||
wrapper.cache_parameters = lambda: {'maxsize': maxsize, 'typed': typed}
|
||||
return update_wrapper(wrapper, user_function)
|
||||
elif maxsize is not None:
|
||||
raise TypeError('Expected first argument to be an integer, a callable, or None')
|
||||
|
||||
def decorating_function(user_function):
|
||||
wrapper = _lru_cache_wrapper(user_function, maxsize, typed, _CacheInfo)
|
||||
wrapper.cache_parameters = lambda: {'maxsize': maxsize, 'typed': typed}
|
||||
return update_wrapper(wrapper, user_function)
|
||||
|
||||
return decorating_function
|
||||
|
||||
|
||||
def _lru_cache_wrapper(user_function, maxsize, typed, _CacheInfo):
|
||||
# Constants shared by all lru cache instances:
|
||||
sentinel = object() # unique object used to signal cache misses
|
||||
make_key = _make_key # build a key from the function arguments
|
||||
PREV, NEXT, KEY, RESULT = 0, 1, 2, 3 # names for the link fields
|
||||
|
||||
cache = {}
|
||||
hits = misses = 0
|
||||
full = False
|
||||
cache_get = cache.get # bound method to lookup a key or return None
|
||||
cache_len = cache.__len__ # get cache size without calling len()
|
||||
lock = RLock() # because linkedlist updates aren't threadsafe
|
||||
root = [] # root of the circular doubly linked list
|
||||
root[:] = [root, root, None, None] # initialize by pointing to self
|
||||
|
||||
if maxsize == 0:
|
||||
|
||||
def wrapper(*args, **kwds):
|
||||
# No caching -- just a statistics update
|
||||
nonlocal misses
|
||||
misses += 1
|
||||
result = user_function(*args, **kwds)
|
||||
return result
|
||||
|
||||
elif maxsize is None:
|
||||
|
||||
def wrapper(*args, **kwds):
|
||||
# Simple caching without ordering or size limit
|
||||
nonlocal hits, misses
|
||||
key = make_key(args, kwds, typed)
|
||||
result = cache_get(key, sentinel)
|
||||
if result is not sentinel:
|
||||
hits += 1
|
||||
return result
|
||||
misses += 1
|
||||
result = user_function(*args, **kwds)
|
||||
cache[key] = result
|
||||
return result
|
||||
|
||||
else:
|
||||
|
||||
def wrapper(*args, **kwds):
|
||||
# Size limited caching that tracks accesses by recency
|
||||
nonlocal root, hits, misses, full
|
||||
key = make_key(args, kwds, typed)
|
||||
with lock:
|
||||
link = cache_get(key)
|
||||
if link is not None:
|
||||
# Move the link to the front of the circular queue
|
||||
link_prev, link_next, _key, result = link
|
||||
link_prev[NEXT] = link_next
|
||||
link_next[PREV] = link_prev
|
||||
last = root[PREV]
|
||||
last[NEXT] = root[PREV] = link
|
||||
link[PREV] = last
|
||||
link[NEXT] = root
|
||||
hits += 1
|
||||
return result
|
||||
misses += 1
|
||||
result = user_function(*args, **kwds)
|
||||
with lock:
|
||||
if key in cache:
|
||||
# Getting here means that this same key was added to the
|
||||
# cache while the lock was released. Since the link
|
||||
# update is already done, we need only return the
|
||||
# computed result and update the count of misses.
|
||||
pass
|
||||
elif full:
|
||||
# Use the old root to store the new key and result.
|
||||
oldroot = root
|
||||
oldroot[KEY] = key
|
||||
oldroot[RESULT] = result
|
||||
# Empty the oldest link and make it the new root.
|
||||
# Keep a reference to the old key and old result to
|
||||
# prevent their ref counts from going to zero during the
|
||||
# update. That will prevent potentially arbitrary object
|
||||
# clean-up code (i.e. __del__) from running while we're
|
||||
# still adjusting the links.
|
||||
root = oldroot[NEXT]
|
||||
oldkey = root[KEY]
|
||||
root[KEY] = root[RESULT] = None
|
||||
# Now update the cache dictionary.
|
||||
del cache[oldkey]
|
||||
# Save the potentially reentrant cache[key] assignment
|
||||
# for last, after the root and links have been put in
|
||||
# a consistent state.
|
||||
cache[key] = oldroot
|
||||
else:
|
||||
# Put result in a new link at the front of the queue.
|
||||
last = root[PREV]
|
||||
link = [last, root, key, result]
|
||||
last[NEXT] = root[PREV] = cache[key] = link
|
||||
# Use the cache_len bound method instead of the len() function
|
||||
# which could potentially be wrapped in an lru_cache itself.
|
||||
full = cache_len() >= maxsize
|
||||
return result
|
||||
|
||||
def cache_info():
|
||||
"""Report cache statistics"""
|
||||
with lock:
|
||||
return _CacheInfo(hits, misses, maxsize, cache_len())
|
||||
|
||||
def cache_clear():
|
||||
"""Clear the cache and cache statistics"""
|
||||
nonlocal hits, misses, full
|
||||
with lock:
|
||||
cache.clear()
|
||||
root[:] = [root, root, None, None]
|
||||
hits = misses = 0
|
||||
full = False
|
||||
|
||||
wrapper.cache_info = cache_info
|
||||
wrapper.cache_clear = cache_clear
|
||||
return wrapper
|
File diff suppressed because it is too large
Load diff
|
@ -1,5 +0,0 @@
|
|||
from . import main
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
|
@ -1,24 +0,0 @@
|
|||
import sys
|
||||
|
||||
|
||||
if sys.version_info < (3, 9):
|
||||
|
||||
def removesuffix(self, suffix):
|
||||
# suffix='' should not call self[:-0].
|
||||
if suffix and self.endswith(suffix):
|
||||
return self[: -len(suffix)]
|
||||
else:
|
||||
return self[:]
|
||||
|
||||
def removeprefix(self, prefix):
|
||||
if self.startswith(prefix):
|
||||
return self[len(prefix) :]
|
||||
else:
|
||||
return self[:]
|
||||
else:
|
||||
|
||||
def removesuffix(self, suffix):
|
||||
return self.removesuffix(suffix)
|
||||
|
||||
def removeprefix(self, prefix):
|
||||
return self.removeprefix(prefix)
|
49
lib/backports/zoneinfo/__init__.py
Normal file
49
lib/backports/zoneinfo/__init__.py
Normal file
|
@ -0,0 +1,49 @@
|
|||
__all__ = [
|
||||
"ZoneInfo",
|
||||
"reset_tzpath",
|
||||
"available_timezones",
|
||||
"TZPATH",
|
||||
"ZoneInfoNotFoundError",
|
||||
"InvalidTZPathWarning",
|
||||
]
|
||||
import sys
|
||||
|
||||
from . import _tzpath
|
||||
from ._common import ZoneInfoNotFoundError
|
||||
from ._version import __version__
|
||||
|
||||
try:
|
||||
from ._czoneinfo import ZoneInfo
|
||||
except ImportError: # pragma: nocover
|
||||
from ._zoneinfo import ZoneInfo
|
||||
|
||||
reset_tzpath = _tzpath.reset_tzpath
|
||||
available_timezones = _tzpath.available_timezones
|
||||
InvalidTZPathWarning = _tzpath.InvalidTZPathWarning
|
||||
|
||||
if sys.version_info < (3, 7):
|
||||
# Module-level __getattr__ was added in Python 3.7, so instead of lazily
|
||||
# populating TZPATH on every access, we will register a callback with
|
||||
# reset_tzpath to update the top-level tuple.
|
||||
TZPATH = _tzpath.TZPATH
|
||||
|
||||
def _tzpath_callback(new_tzpath):
|
||||
global TZPATH
|
||||
TZPATH = new_tzpath
|
||||
|
||||
_tzpath.TZPATH_CALLBACKS.append(_tzpath_callback)
|
||||
del _tzpath_callback
|
||||
|
||||
else:
|
||||
|
||||
def __getattr__(name):
|
||||
if name == "TZPATH":
|
||||
return _tzpath.TZPATH
|
||||
else:
|
||||
raise AttributeError(
|
||||
f"module {__name__!r} has no attribute {name!r}"
|
||||
)
|
||||
|
||||
|
||||
def __dir__():
|
||||
return sorted(list(globals()) + ["TZPATH"])
|
45
lib/backports/zoneinfo/__init__.pyi
Normal file
45
lib/backports/zoneinfo/__init__.pyi
Normal file
|
@ -0,0 +1,45 @@
|
|||
import os
|
||||
import typing
|
||||
from datetime import datetime, tzinfo
|
||||
from typing import (
|
||||
Any,
|
||||
Iterable,
|
||||
Optional,
|
||||
Protocol,
|
||||
Sequence,
|
||||
Set,
|
||||
Type,
|
||||
Union,
|
||||
)
|
||||
|
||||
_T = typing.TypeVar("_T", bound="ZoneInfo")
|
||||
|
||||
class _IOBytes(Protocol):
|
||||
def read(self, __size: int) -> bytes: ...
|
||||
def seek(self, __size: int, __whence: int = ...) -> Any: ...
|
||||
|
||||
class ZoneInfo(tzinfo):
|
||||
@property
|
||||
def key(self) -> str: ...
|
||||
def __init__(self, key: str) -> None: ...
|
||||
@classmethod
|
||||
def no_cache(cls: Type[_T], key: str) -> _T: ...
|
||||
@classmethod
|
||||
def from_file(
|
||||
cls: Type[_T], __fobj: _IOBytes, key: Optional[str] = ...
|
||||
) -> _T: ...
|
||||
@classmethod
|
||||
def clear_cache(cls, *, only_keys: Iterable[str] = ...) -> None: ...
|
||||
|
||||
# Note: Both here and in clear_cache, the types allow the use of `str` where
|
||||
# a sequence of strings is required. This should be remedied if a solution
|
||||
# to this typing bug is found: https://github.com/python/typing/issues/256
|
||||
def reset_tzpath(
|
||||
to: Optional[Sequence[Union[os.PathLike, str]]] = ...
|
||||
) -> None: ...
|
||||
def available_timezones() -> Set[str]: ...
|
||||
|
||||
TZPATH: Sequence[str]
|
||||
|
||||
class ZoneInfoNotFoundError(KeyError): ...
|
||||
class InvalidTZPathWarning(RuntimeWarning): ...
|
171
lib/backports/zoneinfo/_common.py
Normal file
171
lib/backports/zoneinfo/_common.py
Normal file
|
@ -0,0 +1,171 @@
|
|||
import struct
|
||||
|
||||
|
||||
def load_tzdata(key):
|
||||
try:
|
||||
import importlib.resources as importlib_resources
|
||||
except ImportError:
|
||||
import importlib_resources
|
||||
|
||||
components = key.split("/")
|
||||
package_name = ".".join(["tzdata.zoneinfo"] + components[:-1])
|
||||
resource_name = components[-1]
|
||||
|
||||
try:
|
||||
return importlib_resources.open_binary(package_name, resource_name)
|
||||
except (ImportError, FileNotFoundError, UnicodeEncodeError):
|
||||
# There are three types of exception that can be raised that all amount
|
||||
# to "we cannot find this key":
|
||||
#
|
||||
# ImportError: If package_name doesn't exist (e.g. if tzdata is not
|
||||
# installed, or if there's an error in the folder name like
|
||||
# Amrica/New_York)
|
||||
# FileNotFoundError: If resource_name doesn't exist in the package
|
||||
# (e.g. Europe/Krasnoy)
|
||||
# UnicodeEncodeError: If package_name or resource_name are not UTF-8,
|
||||
# such as keys containing a surrogate character.
|
||||
raise ZoneInfoNotFoundError(f"No time zone found with key {key}")
|
||||
|
||||
|
||||
def load_data(fobj):
|
||||
header = _TZifHeader.from_file(fobj)
|
||||
|
||||
if header.version == 1:
|
||||
time_size = 4
|
||||
time_type = "l"
|
||||
else:
|
||||
# Version 2+ has 64-bit integer transition times
|
||||
time_size = 8
|
||||
time_type = "q"
|
||||
|
||||
# Version 2+ also starts with a Version 1 header and data, which
|
||||
# we need to skip now
|
||||
skip_bytes = (
|
||||
header.timecnt * 5 # Transition times and types
|
||||
+ header.typecnt * 6 # Local time type records
|
||||
+ header.charcnt # Time zone designations
|
||||
+ header.leapcnt * 8 # Leap second records
|
||||
+ header.isstdcnt # Standard/wall indicators
|
||||
+ header.isutcnt # UT/local indicators
|
||||
)
|
||||
|
||||
fobj.seek(skip_bytes, 1)
|
||||
|
||||
# Now we need to read the second header, which is not the same
|
||||
# as the first
|
||||
header = _TZifHeader.from_file(fobj)
|
||||
|
||||
typecnt = header.typecnt
|
||||
timecnt = header.timecnt
|
||||
charcnt = header.charcnt
|
||||
|
||||
# The data portion starts with timecnt transitions and indices
|
||||
if timecnt:
|
||||
trans_list_utc = struct.unpack(
|
||||
f">{timecnt}{time_type}", fobj.read(timecnt * time_size)
|
||||
)
|
||||
trans_idx = struct.unpack(f">{timecnt}B", fobj.read(timecnt))
|
||||
else:
|
||||
trans_list_utc = ()
|
||||
trans_idx = ()
|
||||
|
||||
# Read the ttinfo struct, (utoff, isdst, abbrind)
|
||||
if typecnt:
|
||||
utcoff, isdst, abbrind = zip(
|
||||
*(struct.unpack(">lbb", fobj.read(6)) for i in range(typecnt))
|
||||
)
|
||||
else:
|
||||
utcoff = ()
|
||||
isdst = ()
|
||||
abbrind = ()
|
||||
|
||||
# Now read the abbreviations. They are null-terminated strings, indexed
|
||||
# not by position in the array but by position in the unsplit
|
||||
# abbreviation string. I suppose this makes more sense in C, which uses
|
||||
# null to terminate the strings, but it's inconvenient here...
|
||||
abbr_vals = {}
|
||||
abbr_chars = fobj.read(charcnt)
|
||||
|
||||
def get_abbr(idx):
|
||||
# Gets a string starting at idx and running until the next \x00
|
||||
#
|
||||
# We cannot pre-populate abbr_vals by splitting on \x00 because there
|
||||
# are some zones that use subsets of longer abbreviations, like so:
|
||||
#
|
||||
# LMT\x00AHST\x00HDT\x00
|
||||
#
|
||||
# Where the idx to abbr mapping should be:
|
||||
#
|
||||
# {0: "LMT", 4: "AHST", 5: "HST", 9: "HDT"}
|
||||
if idx not in abbr_vals:
|
||||
span_end = abbr_chars.find(b"\x00", idx)
|
||||
abbr_vals[idx] = abbr_chars[idx:span_end].decode()
|
||||
|
||||
return abbr_vals[idx]
|
||||
|
||||
abbr = tuple(get_abbr(idx) for idx in abbrind)
|
||||
|
||||
# The remainder of the file consists of leap seconds (currently unused) and
|
||||
# the standard/wall and ut/local indicators, which are metadata we don't need.
|
||||
# In version 2 files, we need to skip the unnecessary data to get at the TZ string:
|
||||
if header.version >= 2:
|
||||
# Each leap second record has size (time_size + 4)
|
||||
skip_bytes = header.isutcnt + header.isstdcnt + header.leapcnt * 12
|
||||
fobj.seek(skip_bytes, 1)
|
||||
|
||||
c = fobj.read(1) # Should be \n
|
||||
assert c == b"\n", c
|
||||
|
||||
tz_bytes = b""
|
||||
while True:
|
||||
c = fobj.read(1)
|
||||
if c == b"\n":
|
||||
break
|
||||
tz_bytes += c
|
||||
|
||||
tz_str = tz_bytes
|
||||
else:
|
||||
tz_str = None
|
||||
|
||||
return trans_idx, trans_list_utc, utcoff, isdst, abbr, tz_str
|
||||
|
||||
|
||||
class _TZifHeader:
|
||||
__slots__ = [
|
||||
"version",
|
||||
"isutcnt",
|
||||
"isstdcnt",
|
||||
"leapcnt",
|
||||
"timecnt",
|
||||
"typecnt",
|
||||
"charcnt",
|
||||
]
|
||||
|
||||
def __init__(self, *args):
|
||||
assert len(self.__slots__) == len(args)
|
||||
for attr, val in zip(self.__slots__, args):
|
||||
setattr(self, attr, val)
|
||||
|
||||
@classmethod
|
||||
def from_file(cls, stream):
|
||||
# The header starts with a 4-byte "magic" value
|
||||
if stream.read(4) != b"TZif":
|
||||
raise ValueError("Invalid TZif file: magic not found")
|
||||
|
||||
_version = stream.read(1)
|
||||
if _version == b"\x00":
|
||||
version = 1
|
||||
else:
|
||||
version = int(_version)
|
||||
stream.read(15)
|
||||
|
||||
args = (version,)
|
||||
|
||||
# Slots are defined in the order that the bytes are arranged
|
||||
args = args + struct.unpack(">6l", stream.read(24))
|
||||
|
||||
return cls(*args)
|
||||
|
||||
|
||||
class ZoneInfoNotFoundError(KeyError):
|
||||
"""Exception raised when a ZoneInfo key is not found."""
|
207
lib/backports/zoneinfo/_tzpath.py
Normal file
207
lib/backports/zoneinfo/_tzpath.py
Normal file
|
@ -0,0 +1,207 @@
|
|||
import os
|
||||
import sys
|
||||
|
||||
PY36 = sys.version_info < (3, 7)
|
||||
|
||||
|
||||
def reset_tzpath(to=None):
|
||||
global TZPATH
|
||||
|
||||
tzpaths = to
|
||||
if tzpaths is not None:
|
||||
if isinstance(tzpaths, (str, bytes)):
|
||||
raise TypeError(
|
||||
f"tzpaths must be a list or tuple, "
|
||||
+ f"not {type(tzpaths)}: {tzpaths!r}"
|
||||
)
|
||||
|
||||
if not all(map(os.path.isabs, tzpaths)):
|
||||
raise ValueError(_get_invalid_paths_message(tzpaths))
|
||||
base_tzpath = tzpaths
|
||||
else:
|
||||
env_var = os.environ.get("PYTHONTZPATH", None)
|
||||
if env_var is not None:
|
||||
base_tzpath = _parse_python_tzpath(env_var)
|
||||
elif sys.platform != "win32":
|
||||
base_tzpath = [
|
||||
"/usr/share/zoneinfo",
|
||||
"/usr/lib/zoneinfo",
|
||||
"/usr/share/lib/zoneinfo",
|
||||
"/etc/zoneinfo",
|
||||
]
|
||||
|
||||
base_tzpath.sort(key=lambda x: not os.path.exists(x))
|
||||
else:
|
||||
base_tzpath = ()
|
||||
|
||||
TZPATH = tuple(base_tzpath)
|
||||
|
||||
if TZPATH_CALLBACKS:
|
||||
for callback in TZPATH_CALLBACKS:
|
||||
callback(TZPATH)
|
||||
|
||||
|
||||
def _parse_python_tzpath(env_var):
|
||||
if not env_var:
|
||||
return ()
|
||||
|
||||
raw_tzpath = env_var.split(os.pathsep)
|
||||
new_tzpath = tuple(filter(os.path.isabs, raw_tzpath))
|
||||
|
||||
# If anything has been filtered out, we will warn about it
|
||||
if len(new_tzpath) != len(raw_tzpath):
|
||||
import warnings
|
||||
|
||||
msg = _get_invalid_paths_message(raw_tzpath)
|
||||
|
||||
warnings.warn(
|
||||
"Invalid paths specified in PYTHONTZPATH environment variable."
|
||||
+ msg,
|
||||
InvalidTZPathWarning,
|
||||
)
|
||||
|
||||
return new_tzpath
|
||||
|
||||
|
||||
def _get_invalid_paths_message(tzpaths):
|
||||
invalid_paths = (path for path in tzpaths if not os.path.isabs(path))
|
||||
|
||||
prefix = "\n "
|
||||
indented_str = prefix + prefix.join(invalid_paths)
|
||||
|
||||
return (
|
||||
"Paths should be absolute but found the following relative paths:"
|
||||
+ indented_str
|
||||
)
|
||||
|
||||
|
||||
if sys.version_info < (3, 8):
|
||||
|
||||
def _isfile(path):
|
||||
# bpo-33721: In Python 3.8 non-UTF8 paths return False rather than
|
||||
# raising an error. See https://bugs.python.org/issue33721
|
||||
try:
|
||||
return os.path.isfile(path)
|
||||
except ValueError:
|
||||
return False
|
||||
|
||||
|
||||
else:
|
||||
_isfile = os.path.isfile
|
||||
|
||||
|
||||
def find_tzfile(key):
|
||||
"""Retrieve the path to a TZif file from a key."""
|
||||
_validate_tzfile_path(key)
|
||||
for search_path in TZPATH:
|
||||
filepath = os.path.join(search_path, key)
|
||||
if _isfile(filepath):
|
||||
return filepath
|
||||
|
||||
return None
|
||||
|
||||
|
||||
_TEST_PATH = os.path.normpath(os.path.join("_", "_"))[:-1]
|
||||
|
||||
|
||||
def _validate_tzfile_path(path, _base=_TEST_PATH):
|
||||
if os.path.isabs(path):
|
||||
raise ValueError(
|
||||
f"ZoneInfo keys may not be absolute paths, got: {path}"
|
||||
)
|
||||
|
||||
# We only care about the kinds of path normalizations that would change the
|
||||
# length of the key - e.g. a/../b -> a/b, or a/b/ -> a/b. On Windows,
|
||||
# normpath will also change from a/b to a\b, but that would still preserve
|
||||
# the length.
|
||||
new_path = os.path.normpath(path)
|
||||
if len(new_path) != len(path):
|
||||
raise ValueError(
|
||||
f"ZoneInfo keys must be normalized relative paths, got: {path}"
|
||||
)
|
||||
|
||||
resolved = os.path.normpath(os.path.join(_base, new_path))
|
||||
if not resolved.startswith(_base):
|
||||
raise ValueError(
|
||||
f"ZoneInfo keys must refer to subdirectories of TZPATH, got: {path}"
|
||||
)
|
||||
|
||||
|
||||
del _TEST_PATH
|
||||
|
||||
|
||||
def available_timezones():
|
||||
"""Returns a set containing all available time zones.
|
||||
|
||||
.. caution::
|
||||
|
||||
This may attempt to open a large number of files, since the best way to
|
||||
determine if a given file on the time zone search path is to open it
|
||||
and check for the "magic string" at the beginning.
|
||||
"""
|
||||
try:
|
||||
from importlib import resources
|
||||
except ImportError:
|
||||
import importlib_resources as resources
|
||||
|
||||
valid_zones = set()
|
||||
|
||||
# Start with loading from the tzdata package if it exists: this has a
|
||||
# pre-assembled list of zones that only requires opening one file.
|
||||
try:
|
||||
with resources.open_text("tzdata", "zones") as f:
|
||||
for zone in f:
|
||||
zone = zone.strip()
|
||||
if zone:
|
||||
valid_zones.add(zone)
|
||||
except (ImportError, FileNotFoundError):
|
||||
pass
|
||||
|
||||
def valid_key(fpath):
|
||||
try:
|
||||
with open(fpath, "rb") as f:
|
||||
return f.read(4) == b"TZif"
|
||||
except Exception: # pragma: nocover
|
||||
return False
|
||||
|
||||
for tz_root in TZPATH:
|
||||
if not os.path.exists(tz_root):
|
||||
continue
|
||||
|
||||
for root, dirnames, files in os.walk(tz_root):
|
||||
if root == tz_root:
|
||||
# right/ and posix/ are special directories and shouldn't be
|
||||
# included in the output of available zones
|
||||
if "right" in dirnames:
|
||||
dirnames.remove("right")
|
||||
if "posix" in dirnames:
|
||||
dirnames.remove("posix")
|
||||
|
||||
for file in files:
|
||||
fpath = os.path.join(root, file)
|
||||
|
||||
key = os.path.relpath(fpath, start=tz_root)
|
||||
if os.sep != "/": # pragma: nocover
|
||||
key = key.replace(os.sep, "/")
|
||||
|
||||
if not key or key in valid_zones:
|
||||
continue
|
||||
|
||||
if valid_key(fpath):
|
||||
valid_zones.add(key)
|
||||
|
||||
if "posixrules" in valid_zones:
|
||||
# posixrules is a special symlink-only time zone where it exists, it
|
||||
# should not be included in the output
|
||||
valid_zones.remove("posixrules")
|
||||
|
||||
return valid_zones
|
||||
|
||||
|
||||
class InvalidTZPathWarning(RuntimeWarning):
|
||||
"""Warning raised if an invalid path is specified in PYTHONTZPATH."""
|
||||
|
||||
|
||||
TZPATH = ()
|
||||
TZPATH_CALLBACKS = []
|
||||
reset_tzpath()
|
1
lib/backports/zoneinfo/_version.py
Normal file
1
lib/backports/zoneinfo/_version.py
Normal file
|
@ -0,0 +1 @@
|
|||
__version__ = "0.2.1"
|
754
lib/backports/zoneinfo/_zoneinfo.py
Normal file
754
lib/backports/zoneinfo/_zoneinfo.py
Normal file
|
@ -0,0 +1,754 @@
|
|||
import bisect
|
||||
import calendar
|
||||
import collections
|
||||
import functools
|
||||
import re
|
||||
import weakref
|
||||
from datetime import datetime, timedelta, tzinfo
|
||||
|
||||
from . import _common, _tzpath
|
||||
|
||||
EPOCH = datetime(1970, 1, 1)
|
||||
EPOCHORDINAL = datetime(1970, 1, 1).toordinal()
|
||||
|
||||
# It is relatively expensive to construct new timedelta objects, and in most
|
||||
# cases we're looking at the same deltas, like integer numbers of hours, etc.
|
||||
# To improve speed and memory use, we'll keep a dictionary with references
|
||||
# to the ones we've already used so far.
|
||||
#
|
||||
# Loading every time zone in the 2020a version of the time zone database
|
||||
# requires 447 timedeltas, which requires approximately the amount of space
|
||||
# that ZoneInfo("America/New_York") with 236 transitions takes up, so we will
|
||||
# set the cache size to 512 so that in the common case we always get cache
|
||||
# hits, but specifically crafted ZoneInfo objects don't leak arbitrary amounts
|
||||
# of memory.
|
||||
@functools.lru_cache(maxsize=512)
|
||||
def _load_timedelta(seconds):
|
||||
return timedelta(seconds=seconds)
|
||||
|
||||
|
||||
class ZoneInfo(tzinfo):
|
||||
_strong_cache_size = 8
|
||||
_strong_cache = collections.OrderedDict()
|
||||
_weak_cache = weakref.WeakValueDictionary()
|
||||
__module__ = "backports.zoneinfo"
|
||||
|
||||
def __init_subclass__(cls):
|
||||
cls._strong_cache = collections.OrderedDict()
|
||||
cls._weak_cache = weakref.WeakValueDictionary()
|
||||
|
||||
def __new__(cls, key):
|
||||
instance = cls._weak_cache.get(key, None)
|
||||
if instance is None:
|
||||
instance = cls._weak_cache.setdefault(key, cls._new_instance(key))
|
||||
instance._from_cache = True
|
||||
|
||||
# Update the "strong" cache
|
||||
cls._strong_cache[key] = cls._strong_cache.pop(key, instance)
|
||||
|
||||
if len(cls._strong_cache) > cls._strong_cache_size:
|
||||
cls._strong_cache.popitem(last=False)
|
||||
|
||||
return instance
|
||||
|
||||
@classmethod
|
||||
def no_cache(cls, key):
|
||||
obj = cls._new_instance(key)
|
||||
obj._from_cache = False
|
||||
|
||||
return obj
|
||||
|
||||
@classmethod
|
||||
def _new_instance(cls, key):
|
||||
obj = super().__new__(cls)
|
||||
obj._key = key
|
||||
obj._file_path = obj._find_tzfile(key)
|
||||
|
||||
if obj._file_path is not None:
|
||||
file_obj = open(obj._file_path, "rb")
|
||||
else:
|
||||
file_obj = _common.load_tzdata(key)
|
||||
|
||||
with file_obj as f:
|
||||
obj._load_file(f)
|
||||
|
||||
return obj
|
||||
|
||||
@classmethod
|
||||
def from_file(cls, fobj, key=None):
|
||||
obj = super().__new__(cls)
|
||||
obj._key = key
|
||||
obj._file_path = None
|
||||
obj._load_file(fobj)
|
||||
obj._file_repr = repr(fobj)
|
||||
|
||||
# Disable pickling for objects created from files
|
||||
obj.__reduce__ = obj._file_reduce
|
||||
|
||||
return obj
|
||||
|
||||
@classmethod
|
||||
def clear_cache(cls, *, only_keys=None):
|
||||
if only_keys is not None:
|
||||
for key in only_keys:
|
||||
cls._weak_cache.pop(key, None)
|
||||
cls._strong_cache.pop(key, None)
|
||||
|
||||
else:
|
||||
cls._weak_cache.clear()
|
||||
cls._strong_cache.clear()
|
||||
|
||||
@property
|
||||
def key(self):
|
||||
return self._key
|
||||
|
||||
def utcoffset(self, dt):
|
||||
return self._find_trans(dt).utcoff
|
||||
|
||||
def dst(self, dt):
|
||||
return self._find_trans(dt).dstoff
|
||||
|
||||
def tzname(self, dt):
|
||||
return self._find_trans(dt).tzname
|
||||
|
||||
def fromutc(self, dt):
|
||||
"""Convert from datetime in UTC to datetime in local time"""
|
||||
|
||||
if not isinstance(dt, datetime):
|
||||
raise TypeError("fromutc() requires a datetime argument")
|
||||
if dt.tzinfo is not self:
|
||||
raise ValueError("dt.tzinfo is not self")
|
||||
|
||||
timestamp = self._get_local_timestamp(dt)
|
||||
num_trans = len(self._trans_utc)
|
||||
|
||||
if num_trans >= 1 and timestamp < self._trans_utc[0]:
|
||||
tti = self._tti_before
|
||||
fold = 0
|
||||
elif (
|
||||
num_trans == 0 or timestamp > self._trans_utc[-1]
|
||||
) and not isinstance(self._tz_after, _ttinfo):
|
||||
tti, fold = self._tz_after.get_trans_info_fromutc(
|
||||
timestamp, dt.year
|
||||
)
|
||||
elif num_trans == 0:
|
||||
tti = self._tz_after
|
||||
fold = 0
|
||||
else:
|
||||
idx = bisect.bisect_right(self._trans_utc, timestamp)
|
||||
|
||||
if num_trans > 1 and timestamp >= self._trans_utc[1]:
|
||||
tti_prev, tti = self._ttinfos[idx - 2 : idx]
|
||||
elif timestamp > self._trans_utc[-1]:
|
||||
tti_prev = self._ttinfos[-1]
|
||||
tti = self._tz_after
|
||||
else:
|
||||
tti_prev = self._tti_before
|
||||
tti = self._ttinfos[0]
|
||||
|
||||
# Detect fold
|
||||
shift = tti_prev.utcoff - tti.utcoff
|
||||
fold = shift.total_seconds() > timestamp - self._trans_utc[idx - 1]
|
||||
dt += tti.utcoff
|
||||
if fold:
|
||||
return dt.replace(fold=1)
|
||||
else:
|
||||
return dt
|
||||
|
||||
def _find_trans(self, dt):
|
||||
if dt is None:
|
||||
if self._fixed_offset:
|
||||
return self._tz_after
|
||||
else:
|
||||
return _NO_TTINFO
|
||||
|
||||
ts = self._get_local_timestamp(dt)
|
||||
|
||||
lt = self._trans_local[dt.fold]
|
||||
|
||||
num_trans = len(lt)
|
||||
|
||||
if num_trans and ts < lt[0]:
|
||||
return self._tti_before
|
||||
elif not num_trans or ts > lt[-1]:
|
||||
if isinstance(self._tz_after, _TZStr):
|
||||
return self._tz_after.get_trans_info(ts, dt.year, dt.fold)
|
||||
else:
|
||||
return self._tz_after
|
||||
else:
|
||||
# idx is the transition that occurs after this timestamp, so we
|
||||
# subtract off 1 to get the current ttinfo
|
||||
idx = bisect.bisect_right(lt, ts) - 1
|
||||
assert idx >= 0
|
||||
return self._ttinfos[idx]
|
||||
|
||||
def _get_local_timestamp(self, dt):
|
||||
return (
|
||||
(dt.toordinal() - EPOCHORDINAL) * 86400
|
||||
+ dt.hour * 3600
|
||||
+ dt.minute * 60
|
||||
+ dt.second
|
||||
)
|
||||
|
||||
def __str__(self):
|
||||
if self._key is not None:
|
||||
return f"{self._key}"
|
||||
else:
|
||||
return repr(self)
|
||||
|
||||
def __repr__(self):
|
||||
if self._key is not None:
|
||||
return f"{self.__class__.__name__}(key={self._key!r})"
|
||||
else:
|
||||
return f"{self.__class__.__name__}.from_file({self._file_repr})"
|
||||
|
||||
def __reduce__(self):
|
||||
return (self.__class__._unpickle, (self._key, self._from_cache))
|
||||
|
||||
def _file_reduce(self):
|
||||
import pickle
|
||||
|
||||
raise pickle.PicklingError(
|
||||
"Cannot pickle a ZoneInfo file created from a file stream."
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def _unpickle(cls, key, from_cache):
|
||||
if from_cache:
|
||||
return cls(key)
|
||||
else:
|
||||
return cls.no_cache(key)
|
||||
|
||||
def _find_tzfile(self, key):
|
||||
return _tzpath.find_tzfile(key)
|
||||
|
||||
def _load_file(self, fobj):
|
||||
# Retrieve all the data as it exists in the zoneinfo file
|
||||
trans_idx, trans_utc, utcoff, isdst, abbr, tz_str = _common.load_data(
|
||||
fobj
|
||||
)
|
||||
|
||||
# Infer the DST offsets (needed for .dst()) from the data
|
||||
dstoff = self._utcoff_to_dstoff(trans_idx, utcoff, isdst)
|
||||
|
||||
# Convert all the transition times (UTC) into "seconds since 1970-01-01 local time"
|
||||
trans_local = self._ts_to_local(trans_idx, trans_utc, utcoff)
|
||||
|
||||
# Construct `_ttinfo` objects for each transition in the file
|
||||
_ttinfo_list = [
|
||||
_ttinfo(
|
||||
_load_timedelta(utcoffset), _load_timedelta(dstoffset), tzname
|
||||
)
|
||||
for utcoffset, dstoffset, tzname in zip(utcoff, dstoff, abbr)
|
||||
]
|
||||
|
||||
self._trans_utc = trans_utc
|
||||
self._trans_local = trans_local
|
||||
self._ttinfos = [_ttinfo_list[idx] for idx in trans_idx]
|
||||
|
||||
# Find the first non-DST transition
|
||||
for i in range(len(isdst)):
|
||||
if not isdst[i]:
|
||||
self._tti_before = _ttinfo_list[i]
|
||||
break
|
||||
else:
|
||||
if self._ttinfos:
|
||||
self._tti_before = self._ttinfos[0]
|
||||
else:
|
||||
self._tti_before = None
|
||||
|
||||
# Set the "fallback" time zone
|
||||
if tz_str is not None and tz_str != b"":
|
||||
self._tz_after = _parse_tz_str(tz_str.decode())
|
||||
else:
|
||||
if not self._ttinfos and not _ttinfo_list:
|
||||
raise ValueError("No time zone information found.")
|
||||
|
||||
if self._ttinfos:
|
||||
self._tz_after = self._ttinfos[-1]
|
||||
else:
|
||||
self._tz_after = _ttinfo_list[-1]
|
||||
|
||||
# Determine if this is a "fixed offset" zone, meaning that the output
|
||||
# of the utcoffset, dst and tzname functions does not depend on the
|
||||
# specific datetime passed.
|
||||
#
|
||||
# We make three simplifying assumptions here:
|
||||
#
|
||||
# 1. If _tz_after is not a _ttinfo, it has transitions that might
|
||||
# actually occur (it is possible to construct TZ strings that
|
||||
# specify STD and DST but no transitions ever occur, such as
|
||||
# AAA0BBB,0/0,J365/25).
|
||||
# 2. If _ttinfo_list contains more than one _ttinfo object, the objects
|
||||
# represent different offsets.
|
||||
# 3. _ttinfo_list contains no unused _ttinfos (in which case an
|
||||
# otherwise fixed-offset zone with extra _ttinfos defined may
|
||||
# appear to *not* be a fixed offset zone).
|
||||
#
|
||||
# Violations to these assumptions would be fairly exotic, and exotic
|
||||
# zones should almost certainly not be used with datetime.time (the
|
||||
# only thing that would be affected by this).
|
||||
if len(_ttinfo_list) > 1 or not isinstance(self._tz_after, _ttinfo):
|
||||
self._fixed_offset = False
|
||||
elif not _ttinfo_list:
|
||||
self._fixed_offset = True
|
||||
else:
|
||||
self._fixed_offset = _ttinfo_list[0] == self._tz_after
|
||||
|
||||
@staticmethod
|
||||
def _utcoff_to_dstoff(trans_idx, utcoffsets, isdsts):
|
||||
# Now we must transform our ttis and abbrs into `_ttinfo` objects,
|
||||
# but there is an issue: .dst() must return a timedelta with the
|
||||
# difference between utcoffset() and the "standard" offset, but
|
||||
# the "base offset" and "DST offset" are not encoded in the file;
|
||||
# we can infer what they are from the isdst flag, but it is not
|
||||
# sufficient to to just look at the last standard offset, because
|
||||
# occasionally countries will shift both DST offset and base offset.
|
||||
|
||||
typecnt = len(isdsts)
|
||||
dstoffs = [0] * typecnt # Provisionally assign all to 0.
|
||||
dst_cnt = sum(isdsts)
|
||||
dst_found = 0
|
||||
|
||||
for i in range(1, len(trans_idx)):
|
||||
if dst_cnt == dst_found:
|
||||
break
|
||||
|
||||
idx = trans_idx[i]
|
||||
|
||||
dst = isdsts[idx]
|
||||
|
||||
# We're only going to look at daylight saving time
|
||||
if not dst:
|
||||
continue
|
||||
|
||||
# Skip any offsets that have already been assigned
|
||||
if dstoffs[idx] != 0:
|
||||
continue
|
||||
|
||||
dstoff = 0
|
||||
utcoff = utcoffsets[idx]
|
||||
|
||||
comp_idx = trans_idx[i - 1]
|
||||
|
||||
if not isdsts[comp_idx]:
|
||||
dstoff = utcoff - utcoffsets[comp_idx]
|
||||
|
||||
if not dstoff and idx < (typecnt - 1):
|
||||
comp_idx = trans_idx[i + 1]
|
||||
|
||||
# If the following transition is also DST and we couldn't
|
||||
# find the DST offset by this point, we're going ot have to
|
||||
# skip it and hope this transition gets assigned later
|
||||
if isdsts[comp_idx]:
|
||||
continue
|
||||
|
||||
dstoff = utcoff - utcoffsets[comp_idx]
|
||||
|
||||
if dstoff:
|
||||
dst_found += 1
|
||||
dstoffs[idx] = dstoff
|
||||
else:
|
||||
# If we didn't find a valid value for a given index, we'll end up
|
||||
# with dstoff = 0 for something where `isdst=1`. This is obviously
|
||||
# wrong - one hour will be a much better guess than 0
|
||||
for idx in range(typecnt):
|
||||
if not dstoffs[idx] and isdsts[idx]:
|
||||
dstoffs[idx] = 3600
|
||||
|
||||
return dstoffs
|
||||
|
||||
@staticmethod
|
||||
def _ts_to_local(trans_idx, trans_list_utc, utcoffsets):
|
||||
"""Generate number of seconds since 1970 *in the local time*.
|
||||
|
||||
This is necessary to easily find the transition times in local time"""
|
||||
if not trans_list_utc:
|
||||
return [[], []]
|
||||
|
||||
# Start with the timestamps and modify in-place
|
||||
trans_list_wall = [list(trans_list_utc), list(trans_list_utc)]
|
||||
|
||||
if len(utcoffsets) > 1:
|
||||
offset_0 = utcoffsets[0]
|
||||
offset_1 = utcoffsets[trans_idx[0]]
|
||||
if offset_1 > offset_0:
|
||||
offset_1, offset_0 = offset_0, offset_1
|
||||
else:
|
||||
offset_0 = offset_1 = utcoffsets[0]
|
||||
|
||||
trans_list_wall[0][0] += offset_0
|
||||
trans_list_wall[1][0] += offset_1
|
||||
|
||||
for i in range(1, len(trans_idx)):
|
||||
offset_0 = utcoffsets[trans_idx[i - 1]]
|
||||
offset_1 = utcoffsets[trans_idx[i]]
|
||||
|
||||
if offset_1 > offset_0:
|
||||
offset_1, offset_0 = offset_0, offset_1
|
||||
|
||||
trans_list_wall[0][i] += offset_0
|
||||
trans_list_wall[1][i] += offset_1
|
||||
|
||||
return trans_list_wall
|
||||
|
||||
|
||||
class _ttinfo:
|
||||
__slots__ = ["utcoff", "dstoff", "tzname"]
|
||||
|
||||
def __init__(self, utcoff, dstoff, tzname):
|
||||
self.utcoff = utcoff
|
||||
self.dstoff = dstoff
|
||||
self.tzname = tzname
|
||||
|
||||
def __eq__(self, other):
|
||||
return (
|
||||
self.utcoff == other.utcoff
|
||||
and self.dstoff == other.dstoff
|
||||
and self.tzname == other.tzname
|
||||
)
|
||||
|
||||
def __repr__(self): # pragma: nocover
|
||||
return (
|
||||
f"{self.__class__.__name__}"
|
||||
+ f"({self.utcoff}, {self.dstoff}, {self.tzname})"
|
||||
)
|
||||
|
||||
|
||||
_NO_TTINFO = _ttinfo(None, None, None)
|
||||
|
||||
|
||||
class _TZStr:
|
||||
__slots__ = (
|
||||
"std",
|
||||
"dst",
|
||||
"start",
|
||||
"end",
|
||||
"get_trans_info",
|
||||
"get_trans_info_fromutc",
|
||||
"dst_diff",
|
||||
)
|
||||
|
||||
def __init__(
|
||||
self, std_abbr, std_offset, dst_abbr, dst_offset, start=None, end=None
|
||||
):
|
||||
self.dst_diff = dst_offset - std_offset
|
||||
std_offset = _load_timedelta(std_offset)
|
||||
self.std = _ttinfo(
|
||||
utcoff=std_offset, dstoff=_load_timedelta(0), tzname=std_abbr
|
||||
)
|
||||
|
||||
self.start = start
|
||||
self.end = end
|
||||
|
||||
dst_offset = _load_timedelta(dst_offset)
|
||||
delta = _load_timedelta(self.dst_diff)
|
||||
self.dst = _ttinfo(utcoff=dst_offset, dstoff=delta, tzname=dst_abbr)
|
||||
|
||||
# These are assertions because the constructor should only be called
|
||||
# by functions that would fail before passing start or end
|
||||
assert start is not None, "No transition start specified"
|
||||
assert end is not None, "No transition end specified"
|
||||
|
||||
self.get_trans_info = self._get_trans_info
|
||||
self.get_trans_info_fromutc = self._get_trans_info_fromutc
|
||||
|
||||
def transitions(self, year):
|
||||
start = self.start.year_to_epoch(year)
|
||||
end = self.end.year_to_epoch(year)
|
||||
return start, end
|
||||
|
||||
def _get_trans_info(self, ts, year, fold):
|
||||
"""Get the information about the current transition - tti"""
|
||||
start, end = self.transitions(year)
|
||||
|
||||
# With fold = 0, the period (denominated in local time) with the
|
||||
# smaller offset starts at the end of the gap and ends at the end of
|
||||
# the fold; with fold = 1, it runs from the start of the gap to the
|
||||
# beginning of the fold.
|
||||
#
|
||||
# So in order to determine the DST boundaries we need to know both
|
||||
# the fold and whether DST is positive or negative (rare), and it
|
||||
# turns out that this boils down to fold XOR is_positive.
|
||||
if fold == (self.dst_diff >= 0):
|
||||
end -= self.dst_diff
|
||||
else:
|
||||
start += self.dst_diff
|
||||
|
||||
if start < end:
|
||||
isdst = start <= ts < end
|
||||
else:
|
||||
isdst = not (end <= ts < start)
|
||||
|
||||
return self.dst if isdst else self.std
|
||||
|
||||
def _get_trans_info_fromutc(self, ts, year):
|
||||
start, end = self.transitions(year)
|
||||
start -= self.std.utcoff.total_seconds()
|
||||
end -= self.dst.utcoff.total_seconds()
|
||||
|
||||
if start < end:
|
||||
isdst = start <= ts < end
|
||||
else:
|
||||
isdst = not (end <= ts < start)
|
||||
|
||||
# For positive DST, the ambiguous period is one dst_diff after the end
|
||||
# of DST; for negative DST, the ambiguous period is one dst_diff before
|
||||
# the start of DST.
|
||||
if self.dst_diff > 0:
|
||||
ambig_start = end
|
||||
ambig_end = end + self.dst_diff
|
||||
else:
|
||||
ambig_start = start
|
||||
ambig_end = start - self.dst_diff
|
||||
|
||||
fold = ambig_start <= ts < ambig_end
|
||||
|
||||
return (self.dst if isdst else self.std, fold)
|
||||
|
||||
|
||||
def _post_epoch_days_before_year(year):
|
||||
"""Get the number of days between 1970-01-01 and YEAR-01-01"""
|
||||
y = year - 1
|
||||
return y * 365 + y // 4 - y // 100 + y // 400 - EPOCHORDINAL
|
||||
|
||||
|
||||
class _DayOffset:
|
||||
__slots__ = ["d", "julian", "hour", "minute", "second"]
|
||||
|
||||
def __init__(self, d, julian, hour=2, minute=0, second=0):
|
||||
if not (0 + julian) <= d <= 365:
|
||||
min_day = 0 + julian
|
||||
raise ValueError(f"d must be in [{min_day}, 365], not: {d}")
|
||||
|
||||
self.d = d
|
||||
self.julian = julian
|
||||
self.hour = hour
|
||||
self.minute = minute
|
||||
self.second = second
|
||||
|
||||
def year_to_epoch(self, year):
|
||||
days_before_year = _post_epoch_days_before_year(year)
|
||||
|
||||
d = self.d
|
||||
if self.julian and d >= 59 and calendar.isleap(year):
|
||||
d += 1
|
||||
|
||||
epoch = (days_before_year + d) * 86400
|
||||
epoch += self.hour * 3600 + self.minute * 60 + self.second
|
||||
|
||||
return epoch
|
||||
|
||||
|
||||
class _CalendarOffset:
|
||||
__slots__ = ["m", "w", "d", "hour", "minute", "second"]
|
||||
|
||||
_DAYS_BEFORE_MONTH = (
|
||||
-1,
|
||||
0,
|
||||
31,
|
||||
59,
|
||||
90,
|
||||
120,
|
||||
151,
|
||||
181,
|
||||
212,
|
||||
243,
|
||||
273,
|
||||
304,
|
||||
334,
|
||||
)
|
||||
|
||||
def __init__(self, m, w, d, hour=2, minute=0, second=0):
|
||||
if not 0 < m <= 12:
|
||||
raise ValueError("m must be in (0, 12]")
|
||||
|
||||
if not 0 < w <= 5:
|
||||
raise ValueError("w must be in (0, 5]")
|
||||
|
||||
if not 0 <= d <= 6:
|
||||
raise ValueError("d must be in [0, 6]")
|
||||
|
||||
self.m = m
|
||||
self.w = w
|
||||
self.d = d
|
||||
self.hour = hour
|
||||
self.minute = minute
|
||||
self.second = second
|
||||
|
||||
@classmethod
|
||||
def _ymd2ord(cls, year, month, day):
|
||||
return (
|
||||
_post_epoch_days_before_year(year)
|
||||
+ cls._DAYS_BEFORE_MONTH[month]
|
||||
+ (month > 2 and calendar.isleap(year))
|
||||
+ day
|
||||
)
|
||||
|
||||
# TODO: These are not actually epoch dates as they are expressed in local time
|
||||
def year_to_epoch(self, year):
|
||||
"""Calculates the datetime of the occurrence from the year"""
|
||||
# We know year and month, we need to convert w, d into day of month
|
||||
#
|
||||
# Week 1 is the first week in which day `d` (where 0 = Sunday) appears.
|
||||
# Week 5 represents the last occurrence of day `d`, so we need to know
|
||||
# the range of the month.
|
||||
first_day, days_in_month = calendar.monthrange(year, self.m)
|
||||
|
||||
# This equation seems magical, so I'll break it down:
|
||||
# 1. calendar says 0 = Monday, POSIX says 0 = Sunday
|
||||
# so we need first_day + 1 to get 1 = Monday -> 7 = Sunday,
|
||||
# which is still equivalent because this math is mod 7
|
||||
# 2. Get first day - desired day mod 7: -1 % 7 = 6, so we don't need
|
||||
# to do anything to adjust negative numbers.
|
||||
# 3. Add 1 because month days are a 1-based index.
|
||||
month_day = (self.d - (first_day + 1)) % 7 + 1
|
||||
|
||||
# Now use a 0-based index version of `w` to calculate the w-th
|
||||
# occurrence of `d`
|
||||
month_day += (self.w - 1) * 7
|
||||
|
||||
# month_day will only be > days_in_month if w was 5, and `w` means
|
||||
# "last occurrence of `d`", so now we just check if we over-shot the
|
||||
# end of the month and if so knock off 1 week.
|
||||
if month_day > days_in_month:
|
||||
month_day -= 7
|
||||
|
||||
ordinal = self._ymd2ord(year, self.m, month_day)
|
||||
epoch = ordinal * 86400
|
||||
epoch += self.hour * 3600 + self.minute * 60 + self.second
|
||||
return epoch
|
||||
|
||||
|
||||
def _parse_tz_str(tz_str):
|
||||
# The tz string has the format:
|
||||
#
|
||||
# std[offset[dst[offset],start[/time],end[/time]]]
|
||||
#
|
||||
# std and dst must be 3 or more characters long and must not contain
|
||||
# a leading colon, embedded digits, commas, nor a plus or minus signs;
|
||||
# The spaces between "std" and "offset" are only for display and are
|
||||
# not actually present in the string.
|
||||
#
|
||||
# The format of the offset is ``[+|-]hh[:mm[:ss]]``
|
||||
|
||||
offset_str, *start_end_str = tz_str.split(",", 1)
|
||||
|
||||
# fmt: off
|
||||
parser_re = re.compile(
|
||||
r"(?P<std>[^<0-9:.+-]+|<[a-zA-Z0-9+\-]+>)" +
|
||||
r"((?P<stdoff>[+-]?\d{1,2}(:\d{2}(:\d{2})?)?)" +
|
||||
r"((?P<dst>[^0-9:.+-]+|<[a-zA-Z0-9+\-]+>)" +
|
||||
r"((?P<dstoff>[+-]?\d{1,2}(:\d{2}(:\d{2})?)?))?" +
|
||||
r")?" + # dst
|
||||
r")?$" # stdoff
|
||||
)
|
||||
# fmt: on
|
||||
|
||||
m = parser_re.match(offset_str)
|
||||
|
||||
if m is None:
|
||||
raise ValueError(f"{tz_str} is not a valid TZ string")
|
||||
|
||||
std_abbr = m.group("std")
|
||||
dst_abbr = m.group("dst")
|
||||
dst_offset = None
|
||||
|
||||
std_abbr = std_abbr.strip("<>")
|
||||
|
||||
if dst_abbr:
|
||||
dst_abbr = dst_abbr.strip("<>")
|
||||
|
||||
std_offset = m.group("stdoff")
|
||||
if std_offset:
|
||||
try:
|
||||
std_offset = _parse_tz_delta(std_offset)
|
||||
except ValueError as e:
|
||||
raise ValueError(f"Invalid STD offset in {tz_str}") from e
|
||||
else:
|
||||
std_offset = 0
|
||||
|
||||
if dst_abbr is not None:
|
||||
dst_offset = m.group("dstoff")
|
||||
if dst_offset:
|
||||
try:
|
||||
dst_offset = _parse_tz_delta(dst_offset)
|
||||
except ValueError as e:
|
||||
raise ValueError(f"Invalid DST offset in {tz_str}") from e
|
||||
else:
|
||||
dst_offset = std_offset + 3600
|
||||
|
||||
if not start_end_str:
|
||||
raise ValueError(f"Missing transition rules: {tz_str}")
|
||||
|
||||
start_end_strs = start_end_str[0].split(",", 1)
|
||||
try:
|
||||
start, end = (_parse_dst_start_end(x) for x in start_end_strs)
|
||||
except ValueError as e:
|
||||
raise ValueError(f"Invalid TZ string: {tz_str}") from e
|
||||
|
||||
return _TZStr(std_abbr, std_offset, dst_abbr, dst_offset, start, end)
|
||||
elif start_end_str:
|
||||
raise ValueError(f"Transition rule present without DST: {tz_str}")
|
||||
else:
|
||||
# This is a static ttinfo, don't return _TZStr
|
||||
return _ttinfo(
|
||||
_load_timedelta(std_offset), _load_timedelta(0), std_abbr
|
||||
)
|
||||
|
||||
|
||||
def _parse_dst_start_end(dststr):
|
||||
date, *time = dststr.split("/")
|
||||
if date[0] == "M":
|
||||
n_is_julian = False
|
||||
m = re.match(r"M(\d{1,2})\.(\d).(\d)$", date)
|
||||
if m is None:
|
||||
raise ValueError(f"Invalid dst start/end date: {dststr}")
|
||||
date_offset = tuple(map(int, m.groups()))
|
||||
offset = _CalendarOffset(*date_offset)
|
||||
else:
|
||||
if date[0] == "J":
|
||||
n_is_julian = True
|
||||
date = date[1:]
|
||||
else:
|
||||
n_is_julian = False
|
||||
|
||||
doy = int(date)
|
||||
offset = _DayOffset(doy, n_is_julian)
|
||||
|
||||
if time:
|
||||
time_components = list(map(int, time[0].split(":")))
|
||||
n_components = len(time_components)
|
||||
if n_components < 3:
|
||||
time_components.extend([0] * (3 - n_components))
|
||||
offset.hour, offset.minute, offset.second = time_components
|
||||
|
||||
return offset
|
||||
|
||||
|
||||
def _parse_tz_delta(tz_delta):
|
||||
match = re.match(
|
||||
r"(?P<sign>[+-])?(?P<h>\d{1,2})(:(?P<m>\d{2})(:(?P<s>\d{2}))?)?",
|
||||
tz_delta,
|
||||
)
|
||||
# Anything passed to this function should already have hit an equivalent
|
||||
# regular expression to find the section to parse.
|
||||
assert match is not None, tz_delta
|
||||
|
||||
h, m, s = (
|
||||
int(v) if v is not None else 0
|
||||
for v in map(match.group, ("h", "m", "s"))
|
||||
)
|
||||
|
||||
total = h * 3600 + m * 60 + s
|
||||
|
||||
if not -86400 < total < 86400:
|
||||
raise ValueError(
|
||||
"Offset must be strictly between -24h and +24h:" + tz_delta
|
||||
)
|
||||
|
||||
# Yes, +5 maps to an offset of -5h
|
||||
if match.group("sign") != "-":
|
||||
total *= -1
|
||||
|
||||
return total
|
|
@ -11,9 +11,9 @@ from bleach.sanitizer import (
|
|||
|
||||
|
||||
# yyyymmdd
|
||||
__releasedate__ = "20241029"
|
||||
__releasedate__ = "20231006"
|
||||
# x.y.z or x.y.z.dev0 -- semver
|
||||
__version__ = "6.2.0"
|
||||
__version__ = "6.1.0"
|
||||
|
||||
|
||||
__all__ = ["clean", "linkify"]
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
from __future__ import absolute_import, division, unicode_literals
|
||||
|
||||
from bleach.six_shim import text_type
|
||||
from bleach.six_shim import http_client, urllib
|
||||
from six import text_type
|
||||
from six.moves import http_client, urllib
|
||||
|
||||
import codecs
|
||||
import re
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
from __future__ import absolute_import, division, unicode_literals
|
||||
|
||||
from bleach.six_shim import unichr as chr
|
||||
from six import unichr as chr
|
||||
|
||||
from collections import deque, OrderedDict
|
||||
from sys import version_info
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
from __future__ import absolute_import, division, unicode_literals
|
||||
from bleach.six_shim import text_type
|
||||
from six import text_type
|
||||
|
||||
from bisect import bisect_left
|
||||
|
||||
|
|
|
@ -7,7 +7,7 @@ try:
|
|||
except ImportError:
|
||||
from collections import Mapping
|
||||
|
||||
from bleach.six_shim import text_type, PY3
|
||||
from six import text_type, PY3
|
||||
|
||||
if PY3:
|
||||
import xml.etree.ElementTree as default_etree
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
from __future__ import absolute_import, division, unicode_literals
|
||||
|
||||
from bleach.six_shim import text_type
|
||||
from six import text_type
|
||||
|
||||
from . import base
|
||||
from ..constants import namespaces, voidElements
|
||||
|
|
|
@ -12,7 +12,7 @@ import re
|
|||
import warnings
|
||||
from xml.sax.saxutils import escape, unescape
|
||||
|
||||
from bleach.six_shim import urllib_parse as urlparse
|
||||
from six.moves import urllib_parse as urlparse
|
||||
|
||||
from . import base
|
||||
from ..constants import namespaces, prefixes
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
from __future__ import absolute_import, division, unicode_literals
|
||||
from bleach.six_shim import viewkeys
|
||||
from six import with_metaclass, viewkeys
|
||||
|
||||
import types
|
||||
|
||||
|
@ -423,7 +423,7 @@ def getPhases(debug):
|
|||
return type
|
||||
|
||||
# pylint:disable=unused-argument
|
||||
class Phase(metaclass=getMetaclass(debug, log)):
|
||||
class Phase(with_metaclass(getMetaclass(debug, log))):
|
||||
"""Base class for helper object that implements each phase of processing
|
||||
"""
|
||||
__slots__ = ("parser", "tree", "__startTagCache", "__endTagCache")
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
from __future__ import absolute_import, division, unicode_literals
|
||||
from bleach.six_shim import text_type
|
||||
from six import text_type
|
||||
|
||||
import re
|
||||
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
from __future__ import absolute_import, division, unicode_literals
|
||||
from bleach.six_shim import text_type
|
||||
from six import text_type
|
||||
|
||||
from ..constants import scopingElements, tableInsertModeElements, namespaces
|
||||
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
from __future__ import absolute_import, division, unicode_literals
|
||||
# pylint:disable=protected-access
|
||||
|
||||
from bleach.six_shim import text_type
|
||||
from six import text_type
|
||||
|
||||
import re
|
||||
|
||||
|
|
|
@ -28,7 +28,7 @@ from . import etree as etree_builders
|
|||
from .. import _ihatexml
|
||||
|
||||
import lxml.etree as etree
|
||||
from bleach.six_shim import PY3, binary_type
|
||||
from six import PY3, binary_type
|
||||
|
||||
|
||||
fullTree = True
|
||||
|
|
|
@ -3,7 +3,7 @@ from __future__ import absolute_import, division, unicode_literals
|
|||
from collections import OrderedDict
|
||||
import re
|
||||
|
||||
from bleach.six_shim import string_types
|
||||
from six import string_types
|
||||
|
||||
from . import base
|
||||
from .._utils import moduleFactoryFactory
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
from __future__ import absolute_import, division, unicode_literals
|
||||
from bleach.six_shim import text_type
|
||||
from six import text_type
|
||||
|
||||
from collections import OrderedDict
|
||||
|
||||
|
|
|
@ -7,12 +7,8 @@ set -o pipefail
|
|||
BLEACH_VENDOR_DIR=${BLEACH_VENDOR_DIR:-"."}
|
||||
DEST=${DEST:-"."}
|
||||
|
||||
# Install with no dependencies
|
||||
pip install --no-binary all --no-compile --no-deps -r "${BLEACH_VENDOR_DIR}/vendor.txt" --target "${DEST}"
|
||||
|
||||
# Apply patches
|
||||
(cd "${DEST}" && patch -p2 < 01_html5lib_six.patch)
|
||||
|
||||
# install Python 3.6.14 urllib.urlparse for #536
|
||||
curl --proto '=https' --tlsv1.2 -o "${DEST}/parse.py" https://raw.githubusercontent.com/python/cpython/v3.6.14/Lib/urllib/parse.py
|
||||
(cd "${DEST}" && sha256sum parse.py > parse.py.SHA256SUM)
|
||||
|
|
|
@ -396,25 +396,16 @@ class BleachHTMLTokenizer(HTMLTokenizer):
|
|||
# name that abruptly ends, but we should treat that like
|
||||
# character data
|
||||
yield {"type": TAG_TOKEN_TYPE_CHARACTERS, "data": self.stream.get_tag()}
|
||||
|
||||
elif last_error_token["data"] in (
|
||||
"duplicate-attribute",
|
||||
"eof-in-attribute-name",
|
||||
"eof-in-attribute-value-no-quotes",
|
||||
"expected-end-of-tag-but-got-eof",
|
||||
):
|
||||
# Handle the case where the text being parsed ends with <
|
||||
# followed by characters and then space and then:
|
||||
#
|
||||
# * more characters
|
||||
# * more characters repeated with a space between (e.g. "abc abc")
|
||||
# * more characters and then a space and then an EOF (e.g. "abc def ")
|
||||
#
|
||||
# These cases are treated as a tag name followed by an
|
||||
# followed by a series of characters and then space and then
|
||||
# more characters. It's treated as a tag name followed by an
|
||||
# attribute that abruptly ends, but we should treat that like
|
||||
# character data instead.
|
||||
# character data.
|
||||
yield {"type": TAG_TOKEN_TYPE_CHARACTERS, "data": self.stream.get_tag()}
|
||||
|
||||
else:
|
||||
yield last_error_token
|
||||
|
||||
|
|
|
@ -1,19 +0,0 @@
|
|||
"""
|
||||
Replacement module for what html5lib uses six for.
|
||||
"""
|
||||
|
||||
import http.client
|
||||
import operator
|
||||
import urllib
|
||||
|
||||
|
||||
PY3 = True
|
||||
binary_type = bytes
|
||||
string_types = (str,)
|
||||
text_type = str
|
||||
unichr = chr
|
||||
viewkeys = operator.methodcaller("keys")
|
||||
|
||||
http_client = http.client
|
||||
urllib = urllib
|
||||
urllib_parse = urllib.parse
|
|
@ -1,4 +1,4 @@
|
|||
from .core import contents, where
|
||||
|
||||
__all__ = ["contents", "where"]
|
||||
__version__ = "2024.08.30"
|
||||
__version__ = "2024.02.02"
|
||||
|
|
|
@ -3485,6 +3485,46 @@ DgQWBBQxCpCPtsad0kRLgLWi5h+xEk8blTAKBggqhkjOPQQDAwNoADBlAjEA31SQ
|
|||
+RHUjE7AwWHCFUyqqx0LMV87HOIAl0Qx5v5zli/altP+CAezNIm8BZ/3Hobui3A=
|
||||
-----END CERTIFICATE-----
|
||||
|
||||
# Issuer: CN=GLOBALTRUST 2020 O=e-commerce monitoring GmbH
|
||||
# Subject: CN=GLOBALTRUST 2020 O=e-commerce monitoring GmbH
|
||||
# Label: "GLOBALTRUST 2020"
|
||||
# Serial: 109160994242082918454945253
|
||||
# MD5 Fingerprint: 8a:c7:6f:cb:6d:e3:cc:a2:f1:7c:83:fa:0e:78:d7:e8
|
||||
# SHA1 Fingerprint: d0:67:c1:13:51:01:0c:aa:d0:c7:6a:65:37:31:16:26:4f:53:71:a2
|
||||
# SHA256 Fingerprint: 9a:29:6a:51:82:d1:d4:51:a2:e3:7f:43:9b:74:da:af:a2:67:52:33:29:f9:0f:9a:0d:20:07:c3:34:e2:3c:9a
|
||||
-----BEGIN CERTIFICATE-----
|
||||
MIIFgjCCA2qgAwIBAgILWku9WvtPilv6ZeUwDQYJKoZIhvcNAQELBQAwTTELMAkG
|
||||
A1UEBhMCQVQxIzAhBgNVBAoTGmUtY29tbWVyY2UgbW9uaXRvcmluZyBHbWJIMRkw
|
||||
FwYDVQQDExBHTE9CQUxUUlVTVCAyMDIwMB4XDTIwMDIxMDAwMDAwMFoXDTQwMDYx
|
||||
MDAwMDAwMFowTTELMAkGA1UEBhMCQVQxIzAhBgNVBAoTGmUtY29tbWVyY2UgbW9u
|
||||
aXRvcmluZyBHbWJIMRkwFwYDVQQDExBHTE9CQUxUUlVTVCAyMDIwMIICIjANBgkq
|
||||
hkiG9w0BAQEFAAOCAg8AMIICCgKCAgEAri5WrRsc7/aVj6B3GyvTY4+ETUWiD59b
|
||||
RatZe1E0+eyLinjF3WuvvcTfk0Uev5E4C64OFudBc/jbu9G4UeDLgztzOG53ig9Z
|
||||
YybNpyrOVPu44sB8R85gfD+yc/LAGbaKkoc1DZAoouQVBGM+uq/ufF7MpotQsjj3
|
||||
QWPKzv9pj2gOlTblzLmMCcpL3TGQlsjMH/1WljTbjhzqLL6FLmPdqqmV0/0plRPw
|
||||
yJiT2S0WR5ARg6I6IqIoV6Lr/sCMKKCmfecqQjuCgGOlYx8ZzHyyZqjC0203b+J+
|
||||
BlHZRYQfEs4kUmSFC0iAToexIiIwquuuvuAC4EDosEKAA1GqtH6qRNdDYfOiaxaJ
|
||||
SaSjpCuKAsR49GiKweR6NrFvG5Ybd0mN1MkGco/PU+PcF4UgStyYJ9ORJitHHmkH
|
||||
r96i5OTUawuzXnzUJIBHKWk7buis/UDr2O1xcSvy6Fgd60GXIsUf1DnQJ4+H4xj0
|
||||
4KlGDfV0OoIu0G4skaMxXDtG6nsEEFZegB31pWXogvziB4xiRfUg3kZwhqG8k9Me
|
||||
dKZssCz3AwyIDMvUclOGvGBG85hqwvG/Q/lwIHfKN0F5VVJjjVsSn8VoxIidrPIw
|
||||
q7ejMZdnrY8XD2zHc+0klGvIg5rQmjdJBKuxFshsSUktq6HQjJLyQUp5ISXbY9e2
|
||||
nKd+Qmn7OmMCAwEAAaNjMGEwDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMC
|
||||
AQYwHQYDVR0OBBYEFNwuH9FhN3nkq9XVsxJxaD1qaJwiMB8GA1UdIwQYMBaAFNwu
|
||||
H9FhN3nkq9XVsxJxaD1qaJwiMA0GCSqGSIb3DQEBCwUAA4ICAQCR8EICaEDuw2jA
|
||||
VC/f7GLDw56KoDEoqoOOpFaWEhCGVrqXctJUMHytGdUdaG/7FELYjQ7ztdGl4wJC
|
||||
XtzoRlgHNQIw4Lx0SsFDKv/bGtCwr2zD/cuz9X9tAy5ZVp0tLTWMstZDFyySCstd
|
||||
6IwPS3BD0IL/qMy/pJTAvoe9iuOTe8aPmxadJ2W8esVCgmxcB9CpwYhgROmYhRZf
|
||||
+I/KARDOJcP5YBugxZfD0yyIMaK9MOzQ0MAS8cE54+X1+NZK3TTN+2/BT+MAi1bi
|
||||
kvcoskJ3ciNnxz8RFbLEAwW+uxF7Cr+obuf/WEPPm2eggAe2HcqtbepBEX4tdJP7
|
||||
wry+UUTF72glJ4DjyKDUEuzZpTcdN3y0kcra1LGWge9oXHYQSa9+pTeAsRxSvTOB
|
||||
TI/53WXZFM2KJVj04sWDpQmQ1GwUY7VA3+vA/MRYfg0UFodUJ25W5HCEuGwyEn6C
|
||||
MUO+1918oa2u1qsgEu8KwxCMSZY13At1XrFP1U80DhEgB3VDRemjEdqso5nCtnkn
|
||||
4rnvyOL2NSl6dPrFf4IFYqYK6miyeUcGbvJXqBUzxvd4Sj1Ce2t+/vdG6tHrju+I
|
||||
aFvowdlxfv1k7/9nR4hYJS8+hge9+6jlgqispdNpQ80xiEmEU5LAsTkbOYMBMMTy
|
||||
qfrQA71yN2BWHzZ8vTmR9W0Nv3vXkg==
|
||||
-----END CERTIFICATE-----
|
||||
|
||||
# Issuer: CN=ANF Secure Server Root CA O=ANF Autoridad de Certificacion OU=ANF CA Raiz
|
||||
# Subject: CN=ANF Secure Server Root CA O=ANF Autoridad de Certificacion OU=ANF CA Raiz
|
||||
# Label: "ANF Secure Server Root CA"
|
||||
|
@ -4772,158 +4812,3 @@ X273CXE2whJdV/LItM3z7gLfEdxquVeEHVlNjM7IDiPCtyaaEBRx/pOyiriA8A4Q
|
|||
ntOoUAw3gi/q4Iqd4Sw5/7W0cwDk90imc6y/st53BIe0o82bNSQ3+pCTE4FCxpgm
|
||||
dTdmQRCsu/WU48IxK63nI1bMNSWSs1A=
|
||||
-----END CERTIFICATE-----
|
||||
|
||||
# Issuer: CN=FIRMAPROFESIONAL CA ROOT-A WEB O=Firmaprofesional SA
|
||||
# Subject: CN=FIRMAPROFESIONAL CA ROOT-A WEB O=Firmaprofesional SA
|
||||
# Label: "FIRMAPROFESIONAL CA ROOT-A WEB"
|
||||
# Serial: 65916896770016886708751106294915943533
|
||||
# MD5 Fingerprint: 82:b2:ad:45:00:82:b0:66:63:f8:5f:c3:67:4e:ce:a3
|
||||
# SHA1 Fingerprint: a8:31:11:74:a6:14:15:0d:ca:77:dd:0e:e4:0c:5d:58:fc:a0:72:a5
|
||||
# SHA256 Fingerprint: be:f2:56:da:f2:6e:9c:69:bd:ec:16:02:35:97:98:f3:ca:f7:18:21:a0:3e:01:82:57:c5:3c:65:61:7f:3d:4a
|
||||
-----BEGIN CERTIFICATE-----
|
||||
MIICejCCAgCgAwIBAgIQMZch7a+JQn81QYehZ1ZMbTAKBggqhkjOPQQDAzBuMQsw
|
||||
CQYDVQQGEwJFUzEcMBoGA1UECgwTRmlybWFwcm9mZXNpb25hbCBTQTEYMBYGA1UE
|
||||
YQwPVkFURVMtQTYyNjM0MDY4MScwJQYDVQQDDB5GSVJNQVBST0ZFU0lPTkFMIENB
|
||||
IFJPT1QtQSBXRUIwHhcNMjIwNDA2MDkwMTM2WhcNNDcwMzMxMDkwMTM2WjBuMQsw
|
||||
CQYDVQQGEwJFUzEcMBoGA1UECgwTRmlybWFwcm9mZXNpb25hbCBTQTEYMBYGA1UE
|
||||
YQwPVkFURVMtQTYyNjM0MDY4MScwJQYDVQQDDB5GSVJNQVBST0ZFU0lPTkFMIENB
|
||||
IFJPT1QtQSBXRUIwdjAQBgcqhkjOPQIBBgUrgQQAIgNiAARHU+osEaR3xyrq89Zf
|
||||
e9MEkVz6iMYiuYMQYneEMy3pA4jU4DP37XcsSmDq5G+tbbT4TIqk5B/K6k84Si6C
|
||||
cyvHZpsKjECcfIr28jlgst7L7Ljkb+qbXbdTkBgyVcUgt5SjYzBhMA8GA1UdEwEB
|
||||
/wQFMAMBAf8wHwYDVR0jBBgwFoAUk+FDY1w8ndYn81LsF7Kpryz3dvgwHQYDVR0O
|
||||
BBYEFJPhQ2NcPJ3WJ/NS7Beyqa8s93b4MA4GA1UdDwEB/wQEAwIBBjAKBggqhkjO
|
||||
PQQDAwNoADBlAjAdfKR7w4l1M+E7qUW/Runpod3JIha3RxEL2Jq68cgLcFBTApFw
|
||||
hVmpHqTm6iMxoAACMQD94vizrxa5HnPEluPBMBnYfubDl94cT7iJLzPrSA8Z94dG
|
||||
XSaQpYXFuXqUPoeovQA=
|
||||
-----END CERTIFICATE-----
|
||||
|
||||
# Issuer: CN=TWCA CYBER Root CA O=TAIWAN-CA OU=Root CA
|
||||
# Subject: CN=TWCA CYBER Root CA O=TAIWAN-CA OU=Root CA
|
||||
# Label: "TWCA CYBER Root CA"
|
||||
# Serial: 85076849864375384482682434040119489222
|
||||
# MD5 Fingerprint: 0b:33:a0:97:52:95:d4:a9:fd:bb:db:6e:a3:55:5b:51
|
||||
# SHA1 Fingerprint: f6:b1:1c:1a:83:38:e9:7b:db:b3:a8:c8:33:24:e0:2d:9c:7f:26:66
|
||||
# SHA256 Fingerprint: 3f:63:bb:28:14:be:17:4e:c8:b6:43:9c:f0:8d:6d:56:f0:b7:c4:05:88:3a:56:48:a3:34:42:4d:6b:3e:c5:58
|
||||
-----BEGIN CERTIFICATE-----
|
||||
MIIFjTCCA3WgAwIBAgIQQAE0jMIAAAAAAAAAATzyxjANBgkqhkiG9w0BAQwFADBQ
|
||||
MQswCQYDVQQGEwJUVzESMBAGA1UEChMJVEFJV0FOLUNBMRAwDgYDVQQLEwdSb290
|
||||
IENBMRswGQYDVQQDExJUV0NBIENZQkVSIFJvb3QgQ0EwHhcNMjIxMTIyMDY1NDI5
|
||||
WhcNNDcxMTIyMTU1OTU5WjBQMQswCQYDVQQGEwJUVzESMBAGA1UEChMJVEFJV0FO
|
||||
LUNBMRAwDgYDVQQLEwdSb290IENBMRswGQYDVQQDExJUV0NBIENZQkVSIFJvb3Qg
|
||||
Q0EwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQDG+Moe2Qkgfh1sTs6P
|
||||
40czRJzHyWmqOlt47nDSkvgEs1JSHWdyKKHfi12VCv7qze33Kc7wb3+szT3vsxxF
|
||||
avcokPFhV8UMxKNQXd7UtcsZyoC5dc4pztKFIuwCY8xEMCDa6pFbVuYdHNWdZsc/
|
||||
34bKS1PE2Y2yHer43CdTo0fhYcx9tbD47nORxc5zb87uEB8aBs/pJ2DFTxnk684i
|
||||
JkXXYJndzk834H/nY62wuFm40AZoNWDTNq5xQwTxaWV4fPMf88oon1oglWa0zbfu
|
||||
j3ikRRjpJi+NmykosaS3Om251Bw4ckVYsV7r8Cibt4LK/c/WMw+f+5eesRycnupf
|
||||
Xtuq3VTpMCEobY5583WSjCb+3MX2w7DfRFlDo7YDKPYIMKoNM+HvnKkHIuNZW0CP
|
||||
2oi3aQiotyMuRAlZN1vH4xfyIutuOVLF3lSnmMlLIJXcRolftBL5hSmO68gnFSDA
|
||||
S9TMfAxsNAwmmyYxpjyn9tnQS6Jk/zuZQXLB4HCX8SS7K8R0IrGsayIyJNN4KsDA
|
||||
oS/xUgXJP+92ZuJF2A09rZXIx4kmyA+upwMu+8Ff+iDhcK2wZSA3M2Cw1a/XDBzC
|
||||
kHDXShi8fgGwsOsVHkQGzaRP6AzRwyAQ4VRlnrZR0Bp2a0JaWHY06rc3Ga4udfmW
|
||||
5cFZ95RXKSWNOkyrTZpB0F8mAwIDAQABo2MwYTAOBgNVHQ8BAf8EBAMCAQYwDwYD
|
||||
VR0TAQH/BAUwAwEB/zAfBgNVHSMEGDAWgBSdhWEUfMFib5do5E83QOGt4A1WNzAd
|
||||
BgNVHQ4EFgQUnYVhFHzBYm+XaORPN0DhreANVjcwDQYJKoZIhvcNAQEMBQADggIB
|
||||
AGSPesRiDrWIzLjHhg6hShbNcAu3p4ULs3a2D6f/CIsLJc+o1IN1KriWiLb73y0t
|
||||
tGlTITVX1olNc79pj3CjYcya2x6a4CD4bLubIp1dhDGaLIrdaqHXKGnK/nZVekZn
|
||||
68xDiBaiA9a5F/gZbG0jAn/xX9AKKSM70aoK7akXJlQKTcKlTfjF/biBzysseKNn
|
||||
TKkHmvPfXvt89YnNdJdhEGoHK4Fa0o635yDRIG4kqIQnoVesqlVYL9zZyvpoBJ7t
|
||||
RCT5dEA7IzOrg1oYJkK2bVS1FmAwbLGg+LhBoF1JSdJlBTrq/p1hvIbZv97Tujqx
|
||||
f36SNI7JAG7cmL3c7IAFrQI932XtCwP39xaEBDG6k5TY8hL4iuO/Qq+n1M0RFxbI
|
||||
Qh0UqEL20kCGoE8jypZFVmAGzbdVAaYBlGX+bgUJurSkquLvWL69J1bY73NxW0Qz
|
||||
8ppy6rBePm6pUlvscG21h483XjyMnM7k8M4MZ0HMzvaAq07MTFb1wWFZk7Q+ptq4
|
||||
NxKfKjLji7gh7MMrZQzvIt6IKTtM1/r+t+FHvpw+PoP7UV31aPcuIYXcv/Fa4nzX
|
||||
xeSDwWrruoBa3lwtcHb4yOWHh8qgnaHlIhInD0Q9HWzq1MKLL295q39QpsQZp6F6
|
||||
t5b5wR9iWqJDB0BeJsas7a5wFsWqynKKTbDPAYsDP27X
|
||||
-----END CERTIFICATE-----
|
||||
|
||||
# Issuer: CN=SecureSign Root CA12 O=Cybertrust Japan Co., Ltd.
|
||||
# Subject: CN=SecureSign Root CA12 O=Cybertrust Japan Co., Ltd.
|
||||
# Label: "SecureSign Root CA12"
|
||||
# Serial: 587887345431707215246142177076162061960426065942
|
||||
# MD5 Fingerprint: c6:89:ca:64:42:9b:62:08:49:0b:1e:7f:e9:07:3d:e8
|
||||
# SHA1 Fingerprint: 7a:22:1e:3d:de:1b:06:ac:9e:c8:47:70:16:8e:3c:e5:f7:6b:06:f4
|
||||
# SHA256 Fingerprint: 3f:03:4b:b5:70:4d:44:b2:d0:85:45:a0:20:57:de:93:eb:f3:90:5f:ce:72:1a:cb:c7:30:c0:6d:da:ee:90:4e
|
||||
-----BEGIN CERTIFICATE-----
|
||||
MIIDcjCCAlqgAwIBAgIUZvnHwa/swlG07VOX5uaCwysckBYwDQYJKoZIhvcNAQEL
|
||||
BQAwUTELMAkGA1UEBhMCSlAxIzAhBgNVBAoTGkN5YmVydHJ1c3QgSmFwYW4gQ28u
|
||||
LCBMdGQuMR0wGwYDVQQDExRTZWN1cmVTaWduIFJvb3QgQ0ExMjAeFw0yMDA0MDgw
|
||||
NTM2NDZaFw00MDA0MDgwNTM2NDZaMFExCzAJBgNVBAYTAkpQMSMwIQYDVQQKExpD
|
||||
eWJlcnRydXN0IEphcGFuIENvLiwgTHRkLjEdMBsGA1UEAxMUU2VjdXJlU2lnbiBS
|
||||
b290IENBMTIwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQC6OcE3emhF
|
||||
KxS06+QT61d1I02PJC0W6K6OyX2kVzsqdiUzg2zqMoqUm048luT9Ub+ZyZN+v/mt
|
||||
p7JIKwccJ/VMvHASd6SFVLX9kHrko+RRWAPNEHl57muTH2SOa2SroxPjcf59q5zd
|
||||
J1M3s6oYwlkm7Fsf0uZlfO+TvdhYXAvA42VvPMfKWeP+bl+sg779XSVOKik71gur
|
||||
FzJ4pOE+lEa+Ym6b3kaosRbnhW70CEBFEaCeVESE99g2zvVQR9wsMJvuwPWW0v4J
|
||||
hscGWa5Pro4RmHvzC1KqYiaqId+OJTN5lxZJjfU+1UefNzFJM3IFTQy2VYzxV4+K
|
||||
h9GtxRESOaCtAgMBAAGjQjBAMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQD
|
||||
AgEGMB0GA1UdDgQWBBRXNPN0zwRL1SXm8UC2LEzZLemgrTANBgkqhkiG9w0BAQsF
|
||||
AAOCAQEAPrvbFxbS8hQBICw4g0utvsqFepq2m2um4fylOqyttCg6r9cBg0krY6Ld
|
||||
mmQOmFxv3Y67ilQiLUoT865AQ9tPkbeGGuwAtEGBpE/6aouIs3YIcipJQMPTw4WJ
|
||||
mBClnW8Zt7vPemVV2zfrPIpyMpcemik+rY3moxtt9XUa5rBouVui7mlHJzWhhpmA
|
||||
8zNL4WukJsPvdFlseqJkth5Ew1DgDzk9qTPxpfPSvWKErI4cqc1avTc7bgoitPQV
|
||||
55FYxTpE05Uo2cBl6XLK0A+9H7MV2anjpEcJnuDLN/v9vZfVvhgaaaI5gdka9at/
|
||||
yOPiZwud9AzqVN/Ssq+xIvEg37xEHA==
|
||||
-----END CERTIFICATE-----
|
||||
|
||||
# Issuer: CN=SecureSign Root CA14 O=Cybertrust Japan Co., Ltd.
|
||||
# Subject: CN=SecureSign Root CA14 O=Cybertrust Japan Co., Ltd.
|
||||
# Label: "SecureSign Root CA14"
|
||||
# Serial: 575790784512929437950770173562378038616896959179
|
||||
# MD5 Fingerprint: 71:0d:72:fa:92:19:65:5e:89:04:ac:16:33:f0:bc:d5
|
||||
# SHA1 Fingerprint: dd:50:c0:f7:79:b3:64:2e:74:a2:b8:9d:9f:d3:40:dd:bb:f0:f2:4f
|
||||
# SHA256 Fingerprint: 4b:00:9c:10:34:49:4f:9a:b5:6b:ba:3b:a1:d6:27:31:fc:4d:20:d8:95:5a:dc:ec:10:a9:25:60:72:61:e3:38
|
||||
-----BEGIN CERTIFICATE-----
|
||||
MIIFcjCCA1qgAwIBAgIUZNtaDCBO6Ncpd8hQJ6JaJ90t8sswDQYJKoZIhvcNAQEM
|
||||
BQAwUTELMAkGA1UEBhMCSlAxIzAhBgNVBAoTGkN5YmVydHJ1c3QgSmFwYW4gQ28u
|
||||
LCBMdGQuMR0wGwYDVQQDExRTZWN1cmVTaWduIFJvb3QgQ0ExNDAeFw0yMDA0MDgw
|
||||
NzA2MTlaFw00NTA0MDgwNzA2MTlaMFExCzAJBgNVBAYTAkpQMSMwIQYDVQQKExpD
|
||||
eWJlcnRydXN0IEphcGFuIENvLiwgTHRkLjEdMBsGA1UEAxMUU2VjdXJlU2lnbiBS
|
||||
b290IENBMTQwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQDF0nqh1oq/
|
||||
FjHQmNE6lPxauG4iwWL3pwon71D2LrGeaBLwbCRjOfHw3xDG3rdSINVSW0KZnvOg
|
||||
vlIfX8xnbacuUKLBl422+JX1sLrcneC+y9/3OPJH9aaakpUqYllQC6KxNedlsmGy
|
||||
6pJxaeQp8E+BgQQ8sqVb1MWoWWd7VRxJq3qdwudzTe/NCcLEVxLbAQ4jeQkHO6Lo
|
||||
/IrPj8BGJJw4J+CDnRugv3gVEOuGTgpa/d/aLIJ+7sr2KeH6caH3iGicnPCNvg9J
|
||||
kdjqOvn90Ghx2+m1K06Ckm9mH+Dw3EzsytHqunQG+bOEkJTRX45zGRBdAuVwpcAQ
|
||||
0BB8b8VYSbSwbprafZX1zNoCr7gsfXmPvkPx+SgojQlD+Ajda8iLLCSxjVIHvXib
|
||||
y8posqTdDEx5YMaZ0ZPxMBoH064iwurO8YQJzOAUbn8/ftKChazcqRZOhaBgy/ac
|
||||
18izju3Gm5h1DVXoX+WViwKkrkMpKBGk5hIwAUt1ax5mnXkvpXYvHUC0bcl9eQjs
|
||||
0Wq2XSqypWa9a4X0dFbD9ed1Uigspf9mR6XU/v6eVL9lfgHWMI+lNpyiUBzuOIAB
|
||||
SMbHdPTGrMNASRZhdCyvjG817XsYAFs2PJxQDcqSMxDxJklt33UkN4Ii1+iW/RVL
|
||||
ApY+B3KVfqs9TC7XyvDf4Fg/LS8EmjijAQIDAQABo0IwQDAPBgNVHRMBAf8EBTAD
|
||||
AQH/MA4GA1UdDwEB/wQEAwIBBjAdBgNVHQ4EFgQUBpOjCl4oaTeqYR3r6/wtbyPk
|
||||
86AwDQYJKoZIhvcNAQEMBQADggIBAJaAcgkGfpzMkwQWu6A6jZJOtxEaCnFxEM0E
|
||||
rX+lRVAQZk5KQaID2RFPeje5S+LGjzJmdSX7684/AykmjbgWHfYfM25I5uj4V7Ib
|
||||
ed87hwriZLoAymzvftAj63iP/2SbNDefNWWipAA9EiOWWF3KY4fGoweITedpdopT
|
||||
zfFP7ELyk+OZpDc8h7hi2/DsHzc/N19DzFGdtfCXwreFamgLRB7lUe6TzktuhsHS
|
||||
DCRZNhqfLJGP4xjblJUK7ZGqDpncllPjYYPGFrojutzdfhrGe0K22VoF3Jpf1d+4
|
||||
2kd92jjbrDnVHmtsKheMYc2xbXIBw8MgAGJoFjHVdqqGuw6qnsb58Nn4DSEC5MUo
|
||||
FlkRudlpcyqSeLiSV5sI8jrlL5WwWLdrIBRtFO8KvH7YVdiI2i/6GaX7i+B/OfVy
|
||||
K4XELKzvGUWSTLNhB9xNH27SgRNcmvMSZ4PPmz+Ln52kuaiWA3rF7iDeM9ovnhp6
|
||||
dB7h7sxaOgTdsxoEqBRjrLdHEoOabPXm6RUVkRqEGQ6UROcSjiVbgGcZ3GOTEAtl
|
||||
Lor6CZpO2oYofaphNdgOpygau1LgePhsumywbrmHXumZNTfxPWQrqaA0k89jL9WB
|
||||
365jJ6UeTo3cKXhZ+PmhIIynJkBugnLNeLLIjzwec+fBH7/PzqUqm9tEZDKgu39c
|
||||
JRNItX+S
|
||||
-----END CERTIFICATE-----
|
||||
|
||||
# Issuer: CN=SecureSign Root CA15 O=Cybertrust Japan Co., Ltd.
|
||||
# Subject: CN=SecureSign Root CA15 O=Cybertrust Japan Co., Ltd.
|
||||
# Label: "SecureSign Root CA15"
|
||||
# Serial: 126083514594751269499665114766174399806381178503
|
||||
# MD5 Fingerprint: 13:30:fc:c4:62:a6:a9:de:b5:c1:68:af:b5:d2:31:47
|
||||
# SHA1 Fingerprint: cb:ba:83:c8:c1:5a:5d:f1:f9:73:6f:ca:d7:ef:28:13:06:4a:07:7d
|
||||
# SHA256 Fingerprint: e7:78:f0:f0:95:fe:84:37:29:cd:1a:00:82:17:9e:53:14:a9:c2:91:44:28:05:e1:fb:1d:8f:b6:b8:88:6c:3a
|
||||
-----BEGIN CERTIFICATE-----
|
||||
MIICIzCCAamgAwIBAgIUFhXHw9hJp75pDIqI7fBw+d23PocwCgYIKoZIzj0EAwMw
|
||||
UTELMAkGA1UEBhMCSlAxIzAhBgNVBAoTGkN5YmVydHJ1c3QgSmFwYW4gQ28uLCBM
|
||||
dGQuMR0wGwYDVQQDExRTZWN1cmVTaWduIFJvb3QgQ0ExNTAeFw0yMDA0MDgwODMy
|
||||
NTZaFw00NTA0MDgwODMyNTZaMFExCzAJBgNVBAYTAkpQMSMwIQYDVQQKExpDeWJl
|
||||
cnRydXN0IEphcGFuIENvLiwgTHRkLjEdMBsGA1UEAxMUU2VjdXJlU2lnbiBSb290
|
||||
IENBMTUwdjAQBgcqhkjOPQIBBgUrgQQAIgNiAAQLUHSNZDKZmbPSYAi4Io5GdCx4
|
||||
wCtELW1fHcmuS1Iggz24FG1Th2CeX2yF2wYUleDHKP+dX+Sq8bOLbe1PL0vJSpSR
|
||||
ZHX+AezB2Ot6lHhWGENfa4HL9rzatAy2KZMIaY+jQjBAMA8GA1UdEwEB/wQFMAMB
|
||||
Af8wDgYDVR0PAQH/BAQDAgEGMB0GA1UdDgQWBBTrQciu/NWeUUj1vYv0hyCTQSvT
|
||||
9DAKBggqhkjOPQQDAwNoADBlAjEA2S6Jfl5OpBEHvVnCB96rMjhTKkZEBhd6zlHp
|
||||
4P9mLQlO4E/0BdGF9jVg3PVys0Z9AjBEmEYagoUeYWmJSwdLZrWeqrqgHkHZAXQ6
|
||||
bkU6iYAZezKYVWOr62Nuk22rGwlgMU4=
|
||||
-----END CERTIFICATE-----
|
||||
|
|
|
@ -159,8 +159,6 @@ def from_bytes(
|
|||
|
||||
results: CharsetMatches = CharsetMatches()
|
||||
|
||||
early_stop_results: CharsetMatches = CharsetMatches()
|
||||
|
||||
sig_encoding, sig_payload = identify_sig_or_bom(sequences)
|
||||
|
||||
if sig_encoding is not None:
|
||||
|
@ -223,20 +221,16 @@ def from_bytes(
|
|||
try:
|
||||
if is_too_large_sequence and is_multi_byte_decoder is False:
|
||||
str(
|
||||
(
|
||||
sequences[: int(50e4)]
|
||||
if strip_sig_or_bom is False
|
||||
else sequences[len(sig_payload) : int(50e4)]
|
||||
),
|
||||
else sequences[len(sig_payload) : int(50e4)],
|
||||
encoding=encoding_iana,
|
||||
)
|
||||
else:
|
||||
decoded_payload = str(
|
||||
(
|
||||
sequences
|
||||
if strip_sig_or_bom is False
|
||||
else sequences[len(sig_payload) :]
|
||||
),
|
||||
else sequences[len(sig_payload) :],
|
||||
encoding=encoding_iana,
|
||||
)
|
||||
except (UnicodeDecodeError, LookupError) as e:
|
||||
|
@ -373,13 +367,7 @@ def from_bytes(
|
|||
and not lazy_str_hard_failure
|
||||
):
|
||||
fallback_entry = CharsetMatch(
|
||||
sequences,
|
||||
encoding_iana,
|
||||
threshold,
|
||||
False,
|
||||
[],
|
||||
decoded_payload,
|
||||
preemptive_declaration=specified_encoding,
|
||||
sequences, encoding_iana, threshold, False, [], decoded_payload
|
||||
)
|
||||
if encoding_iana == specified_encoding:
|
||||
fallback_specified = fallback_entry
|
||||
|
@ -433,58 +421,28 @@ def from_bytes(
|
|||
),
|
||||
)
|
||||
|
||||
current_match = CharsetMatch(
|
||||
results.append(
|
||||
CharsetMatch(
|
||||
sequences,
|
||||
encoding_iana,
|
||||
mean_mess_ratio,
|
||||
bom_or_sig_available,
|
||||
cd_ratios_merged,
|
||||
(
|
||||
decoded_payload
|
||||
if (
|
||||
is_too_large_sequence is False
|
||||
or encoding_iana in [specified_encoding, "ascii", "utf_8"]
|
||||
decoded_payload,
|
||||
)
|
||||
else None
|
||||
),
|
||||
preemptive_declaration=specified_encoding,
|
||||
)
|
||||
|
||||
results.append(current_match)
|
||||
|
||||
if (
|
||||
encoding_iana in [specified_encoding, "ascii", "utf_8"]
|
||||
and mean_mess_ratio < 0.1
|
||||
):
|
||||
# If md says nothing to worry about, then... stop immediately!
|
||||
if mean_mess_ratio == 0.0:
|
||||
logger.debug(
|
||||
"Encoding detection: %s is most likely the one.",
|
||||
current_match.encoding,
|
||||
"Encoding detection: %s is most likely the one.", encoding_iana
|
||||
)
|
||||
if explain:
|
||||
logger.removeHandler(explain_handler)
|
||||
logger.setLevel(previous_logger_level)
|
||||
return CharsetMatches([current_match])
|
||||
|
||||
early_stop_results.append(current_match)
|
||||
|
||||
if (
|
||||
len(early_stop_results)
|
||||
and (specified_encoding is None or specified_encoding in tested)
|
||||
and "ascii" in tested
|
||||
and "utf_8" in tested
|
||||
):
|
||||
probable_result: CharsetMatch = early_stop_results.best() # type: ignore[assignment]
|
||||
logger.debug(
|
||||
"Encoding detection: %s is most likely the one.",
|
||||
probable_result.encoding,
|
||||
)
|
||||
if explain:
|
||||
logger.removeHandler(explain_handler)
|
||||
logger.setLevel(previous_logger_level)
|
||||
|
||||
return CharsetMatches([probable_result])
|
||||
return CharsetMatches([results[encoding_iana]])
|
||||
|
||||
if encoding_iana == sig_encoding:
|
||||
logger.debug(
|
||||
|
|
|
@ -109,14 +109,6 @@ def cli_detect(argv: Optional[List[str]] = None) -> int:
|
|||
dest="force",
|
||||
help="Replace file without asking if you are sure, use this flag with caution.",
|
||||
)
|
||||
parser.add_argument(
|
||||
"-i",
|
||||
"--no-preemptive",
|
||||
action="store_true",
|
||||
default=False,
|
||||
dest="no_preemptive",
|
||||
help="Disable looking at a charset declaration to hint the detector.",
|
||||
)
|
||||
parser.add_argument(
|
||||
"-t",
|
||||
"--threshold",
|
||||
|
@ -141,35 +133,21 @@ def cli_detect(argv: Optional[List[str]] = None) -> int:
|
|||
args = parser.parse_args(argv)
|
||||
|
||||
if args.replace is True and args.normalize is False:
|
||||
if args.files:
|
||||
for my_file in args.files:
|
||||
my_file.close()
|
||||
print("Use --replace in addition of --normalize only.", file=sys.stderr)
|
||||
return 1
|
||||
|
||||
if args.force is True and args.replace is False:
|
||||
if args.files:
|
||||
for my_file in args.files:
|
||||
my_file.close()
|
||||
print("Use --force in addition of --replace only.", file=sys.stderr)
|
||||
return 1
|
||||
|
||||
if args.threshold < 0.0 or args.threshold > 1.0:
|
||||
if args.files:
|
||||
for my_file in args.files:
|
||||
my_file.close()
|
||||
print("--threshold VALUE should be between 0. AND 1.", file=sys.stderr)
|
||||
return 1
|
||||
|
||||
x_ = []
|
||||
|
||||
for my_file in args.files:
|
||||
matches = from_fp(
|
||||
my_file,
|
||||
threshold=args.threshold,
|
||||
explain=args.verbose,
|
||||
preemptive_behaviour=args.no_preemptive is False,
|
||||
)
|
||||
matches = from_fp(my_file, threshold=args.threshold, explain=args.verbose)
|
||||
|
||||
best_guess = matches.best()
|
||||
|
||||
|
@ -177,11 +155,9 @@ def cli_detect(argv: Optional[List[str]] = None) -> int:
|
|||
print(
|
||||
'Unable to identify originating encoding for "{}". {}'.format(
|
||||
my_file.name,
|
||||
(
|
||||
"Maybe try increasing maximum amount of chaos."
|
||||
if args.threshold < 1.0
|
||||
else ""
|
||||
),
|
||||
else "",
|
||||
),
|
||||
file=sys.stderr,
|
||||
)
|
||||
|
@ -282,8 +258,8 @@ def cli_detect(argv: Optional[List[str]] = None) -> int:
|
|||
try:
|
||||
x_[0].unicode_path = join(dir_path, ".".join(o_))
|
||||
|
||||
with open(x_[0].unicode_path, "wb") as fp:
|
||||
fp.write(best_guess.output())
|
||||
with open(x_[0].unicode_path, "w", encoding="utf-8") as fp:
|
||||
fp.write(str(best_guess))
|
||||
except IOError as e:
|
||||
print(str(e), file=sys.stderr)
|
||||
if my_file.closed is False:
|
||||
|
|
|
@ -544,8 +544,6 @@ COMMON_SAFE_ASCII_CHARACTERS: Set[str] = {
|
|||
"|",
|
||||
'"',
|
||||
"-",
|
||||
"(",
|
||||
")",
|
||||
}
|
||||
|
||||
|
||||
|
|
|
@ -1,24 +1,13 @@
|
|||
from __future__ import annotations
|
||||
|
||||
from typing import TYPE_CHECKING, Any, Optional
|
||||
from typing import Any, Dict, Optional, Union
|
||||
from warnings import warn
|
||||
|
||||
from .api import from_bytes
|
||||
from .constant import CHARDET_CORRESPONDENCE
|
||||
|
||||
# TODO: remove this check when dropping Python 3.7 support
|
||||
if TYPE_CHECKING:
|
||||
from typing_extensions import TypedDict
|
||||
|
||||
class ResultDict(TypedDict):
|
||||
encoding: Optional[str]
|
||||
language: str
|
||||
confidence: Optional[float]
|
||||
|
||||
|
||||
def detect(
|
||||
byte_str: bytes, should_rename_legacy: bool = False, **kwargs: Any
|
||||
) -> ResultDict:
|
||||
) -> Dict[str, Optional[Union[str, float]]]:
|
||||
"""
|
||||
chardet legacy method
|
||||
Detect the encoding of the given byte string. It should be mostly backward-compatible.
|
||||
|
|
|
@ -236,7 +236,7 @@ class SuspiciousRange(MessDetectorPlugin):
|
|||
|
||||
@property
|
||||
def ratio(self) -> float:
|
||||
if self._character_count <= 13:
|
||||
if self._character_count <= 24:
|
||||
return 0.0
|
||||
|
||||
ratio_of_suspicious_range_usage: float = (
|
||||
|
@ -260,7 +260,6 @@ class SuperWeirdWordPlugin(MessDetectorPlugin):
|
|||
|
||||
self._buffer: str = ""
|
||||
self._buffer_accent_count: int = 0
|
||||
self._buffer_glyph_count: int = 0
|
||||
|
||||
def eligible(self, character: str) -> bool:
|
||||
return True
|
||||
|
@ -280,14 +279,6 @@ class SuperWeirdWordPlugin(MessDetectorPlugin):
|
|||
and is_thai(character) is False
|
||||
):
|
||||
self._foreign_long_watch = True
|
||||
if (
|
||||
is_cjk(character)
|
||||
or is_hangul(character)
|
||||
or is_katakana(character)
|
||||
or is_hiragana(character)
|
||||
or is_thai(character)
|
||||
):
|
||||
self._buffer_glyph_count += 1
|
||||
return
|
||||
if not self._buffer:
|
||||
return
|
||||
|
@ -300,20 +291,17 @@ class SuperWeirdWordPlugin(MessDetectorPlugin):
|
|||
self._character_count += buffer_length
|
||||
|
||||
if buffer_length >= 4:
|
||||
if self._buffer_accent_count / buffer_length >= 0.5:
|
||||
if self._buffer_accent_count / buffer_length > 0.34:
|
||||
self._is_current_word_bad = True
|
||||
# Word/Buffer ending with an upper case accentuated letter are so rare,
|
||||
# that we will consider them all as suspicious. Same weight as foreign_long suspicious.
|
||||
elif (
|
||||
if (
|
||||
is_accentuated(self._buffer[-1])
|
||||
and self._buffer[-1].isupper()
|
||||
and all(_.isupper() for _ in self._buffer) is False
|
||||
):
|
||||
self._foreign_long_count += 1
|
||||
self._is_current_word_bad = True
|
||||
elif self._buffer_glyph_count == 1:
|
||||
self._is_current_word_bad = True
|
||||
self._foreign_long_count += 1
|
||||
if buffer_length >= 24 and self._foreign_long_watch:
|
||||
camel_case_dst = [
|
||||
i
|
||||
|
@ -337,7 +325,6 @@ class SuperWeirdWordPlugin(MessDetectorPlugin):
|
|||
self._foreign_long_watch = False
|
||||
self._buffer = ""
|
||||
self._buffer_accent_count = 0
|
||||
self._buffer_glyph_count = 0
|
||||
elif (
|
||||
character not in {"<", ">", "-", "=", "~", "|", "_"}
|
||||
and character.isdigit() is False
|
||||
|
|
|
@ -1,10 +1,9 @@
|
|||
from encodings.aliases import aliases
|
||||
from hashlib import sha256
|
||||
from json import dumps
|
||||
from re import sub
|
||||
from typing import Any, Dict, Iterator, List, Optional, Tuple, Union
|
||||
|
||||
from .constant import RE_POSSIBLE_ENCODING_INDICATION, TOO_BIG_SEQUENCE
|
||||
from .constant import TOO_BIG_SEQUENCE
|
||||
from .utils import iana_name, is_multi_byte_encoding, unicode_range
|
||||
|
||||
|
||||
|
@ -17,7 +16,6 @@ class CharsetMatch:
|
|||
has_sig_or_bom: bool,
|
||||
languages: "CoherenceMatches",
|
||||
decoded_payload: Optional[str] = None,
|
||||
preemptive_declaration: Optional[str] = None,
|
||||
):
|
||||
self._payload: bytes = payload
|
||||
|
||||
|
@ -35,13 +33,13 @@ class CharsetMatch:
|
|||
|
||||
self._string: Optional[str] = decoded_payload
|
||||
|
||||
self._preemptive_declaration: Optional[str] = preemptive_declaration
|
||||
|
||||
def __eq__(self, other: object) -> bool:
|
||||
if not isinstance(other, CharsetMatch):
|
||||
if isinstance(other, str):
|
||||
return iana_name(other) == self.encoding
|
||||
return False
|
||||
raise TypeError(
|
||||
"__eq__ cannot be invoked on {} and {}.".format(
|
||||
str(other.__class__), str(self.__class__)
|
||||
)
|
||||
)
|
||||
return self.encoding == other.encoding and self.fingerprint == other.fingerprint
|
||||
|
||||
def __lt__(self, other: object) -> bool:
|
||||
|
@ -212,24 +210,7 @@ class CharsetMatch:
|
|||
"""
|
||||
if self._output_encoding is None or self._output_encoding != encoding:
|
||||
self._output_encoding = encoding
|
||||
decoded_string = str(self)
|
||||
if (
|
||||
self._preemptive_declaration is not None
|
||||
and self._preemptive_declaration.lower()
|
||||
not in ["utf-8", "utf8", "utf_8"]
|
||||
):
|
||||
patched_header = sub(
|
||||
RE_POSSIBLE_ENCODING_INDICATION,
|
||||
lambda m: m.string[m.span()[0] : m.span()[1]].replace(
|
||||
m.groups()[0], iana_name(self._output_encoding) # type: ignore[arg-type]
|
||||
),
|
||||
decoded_string[:8192],
|
||||
1,
|
||||
)
|
||||
|
||||
decoded_string = patched_header + decoded_string[8192:]
|
||||
|
||||
self._output_payload = decoded_string.encode(encoding, "replace")
|
||||
self._output_payload = str(self).encode(encoding, "replace")
|
||||
|
||||
return self._output_payload # type: ignore
|
||||
|
||||
|
@ -285,7 +266,7 @@ class CharsetMatches:
|
|||
)
|
||||
)
|
||||
# We should disable the submatch factoring when the input file is too heavy (conserve RAM usage)
|
||||
if len(item.raw) < TOO_BIG_SEQUENCE:
|
||||
if len(item.raw) <= TOO_BIG_SEQUENCE:
|
||||
for match in self._results:
|
||||
if match.fingerprint == item.fingerprint and match.chaos == item.chaos:
|
||||
match.add_submatch(item)
|
||||
|
|
|
@ -2,5 +2,5 @@
|
|||
Expose version
|
||||
"""
|
||||
|
||||
__version__ = "3.4.0"
|
||||
__version__ = "3.3.2"
|
||||
VERSION = __version__.split(".")
|
||||
|
|
|
@ -292,20 +292,7 @@ class ConnectionManager:
|
|||
if self.server.ssl_adapter is not None:
|
||||
try:
|
||||
s, ssl_env = self.server.ssl_adapter.wrap(s)
|
||||
except errors.FatalSSLAlert as tls_connection_drop_error:
|
||||
self.server.error_log(
|
||||
f'Client {addr !s} lost — peer dropped the TLS '
|
||||
'connection suddenly, during handshake: '
|
||||
f'{tls_connection_drop_error !s}',
|
||||
)
|
||||
return
|
||||
except errors.NoSSLError as http_over_https_err:
|
||||
self.server.error_log(
|
||||
f'Client {addr !s} attempted to speak plain HTTP into '
|
||||
'a TCP connection configured for TLS-only traffic — '
|
||||
'trying to send back a plain HTTP error response: '
|
||||
f'{http_over_https_err !s}',
|
||||
)
|
||||
except errors.NoSSLError:
|
||||
msg = (
|
||||
'The client sent a plain HTTP request, but '
|
||||
'this server only speaks HTTPS on this port.'
|
||||
|
@ -324,6 +311,8 @@ class ConnectionManager:
|
|||
if ex.args[0] not in errors.socket_errors_to_ignore:
|
||||
raise
|
||||
return
|
||||
if not s:
|
||||
return
|
||||
mf = self.server.ssl_adapter.makefile
|
||||
# Re-apply our timeout since we may have a new socket object
|
||||
if hasattr(s, 'settimeout'):
|
||||
|
|
|
@ -157,7 +157,7 @@ QUOTED_SLASH = b'%2F'
|
|||
QUOTED_SLASH_REGEX = re.compile(b''.join((b'(?i)', QUOTED_SLASH)))
|
||||
|
||||
|
||||
_STOPPING_FOR_INTERRUPT = Exception() # sentinel used during shutdown
|
||||
_STOPPING_FOR_INTERRUPT = object() # sentinel used during shutdown
|
||||
|
||||
|
||||
comma_separated_headers = [
|
||||
|
@ -209,11 +209,7 @@ class HeaderReader:
|
|||
if not line.endswith(CRLF):
|
||||
raise ValueError('HTTP requires CRLF terminators')
|
||||
|
||||
if line[:1] in (SPACE, TAB):
|
||||
# NOTE: `type(line[0]) is int` and `type(line[:1]) is bytes`.
|
||||
# NOTE: The former causes a the following warning:
|
||||
# NOTE: `BytesWarning('Comparison between bytes and int')`
|
||||
# NOTE: The latter is equivalent and does not.
|
||||
if line[0] in (SPACE, TAB):
|
||||
# It's a continuation line.
|
||||
v = line.strip()
|
||||
else:
|
||||
|
@ -1729,16 +1725,16 @@ class HTTPServer:
|
|||
"""Run the server forever, and stop it cleanly on exit."""
|
||||
try:
|
||||
self.start()
|
||||
except KeyboardInterrupt as kb_intr_exc:
|
||||
underlying_interrupt = self.interrupt
|
||||
if not underlying_interrupt:
|
||||
self.interrupt = kb_intr_exc
|
||||
raise kb_intr_exc from underlying_interrupt
|
||||
except SystemExit as sys_exit_exc:
|
||||
underlying_interrupt = self.interrupt
|
||||
if not underlying_interrupt:
|
||||
self.interrupt = sys_exit_exc
|
||||
raise sys_exit_exc from underlying_interrupt
|
||||
except (KeyboardInterrupt, IOError):
|
||||
# The time.sleep call might raise
|
||||
# "IOError: [Errno 4] Interrupted function call" on KBInt.
|
||||
self.error_log('Keyboard Interrupt: shutting down')
|
||||
self.stop()
|
||||
raise
|
||||
except SystemExit:
|
||||
self.error_log('SystemExit raised: shutting down')
|
||||
self.stop()
|
||||
raise
|
||||
|
||||
def prepare(self): # noqa: C901 # FIXME
|
||||
"""Prepare server to serving requests.
|
||||
|
@ -2115,13 +2111,6 @@ class HTTPServer:
|
|||
has completed.
|
||||
"""
|
||||
self._interrupt = _STOPPING_FOR_INTERRUPT
|
||||
|
||||
if isinstance(interrupt, KeyboardInterrupt):
|
||||
self.error_log('Keyboard Interrupt: shutting down')
|
||||
|
||||
if isinstance(interrupt, SystemExit):
|
||||
self.error_log('SystemExit raised: shutting down')
|
||||
|
||||
self.stop()
|
||||
self._interrupt = interrupt
|
||||
|
||||
|
|
|
@ -27,9 +27,12 @@ except ImportError:
|
|||
|
||||
from . import Adapter
|
||||
from .. import errors
|
||||
from .._compat import IS_ABOVE_OPENSSL10
|
||||
from ..makefile import StreamReader, StreamWriter
|
||||
from ..server import HTTPServer
|
||||
|
||||
generic_socket_error = OSError
|
||||
|
||||
|
||||
def _assert_ssl_exc_contains(exc, *msgs):
|
||||
"""Check whether SSL exception contains either of messages provided."""
|
||||
|
@ -262,35 +265,62 @@ class BuiltinSSLAdapter(Adapter):
|
|||
|
||||
def wrap(self, sock):
|
||||
"""Wrap and return the given socket, plus WSGI environ entries."""
|
||||
EMPTY_RESULT = None, {}
|
||||
try:
|
||||
s = self.context.wrap_socket(
|
||||
sock, do_handshake_on_connect=True, server_side=True,
|
||||
)
|
||||
except (
|
||||
ssl.SSLEOFError,
|
||||
ssl.SSLZeroReturnError,
|
||||
) as tls_connection_drop_error:
|
||||
raise errors.FatalSSLAlert(
|
||||
*tls_connection_drop_error.args,
|
||||
) from tls_connection_drop_error
|
||||
except ssl.SSLError as generic_tls_error:
|
||||
peer_speaks_plain_http_over_https = (
|
||||
generic_tls_error.errno == ssl.SSL_ERROR_SSL and
|
||||
_assert_ssl_exc_contains(generic_tls_error, 'http request')
|
||||
except ssl.SSLError as ex:
|
||||
if ex.errno == ssl.SSL_ERROR_EOF:
|
||||
# This is almost certainly due to the cherrypy engine
|
||||
# 'pinging' the socket to assert it's connectable;
|
||||
# the 'ping' isn't SSL.
|
||||
return EMPTY_RESULT
|
||||
elif ex.errno == ssl.SSL_ERROR_SSL:
|
||||
if _assert_ssl_exc_contains(ex, 'http request'):
|
||||
# The client is speaking HTTP to an HTTPS server.
|
||||
raise errors.NoSSLError
|
||||
|
||||
# Check if it's one of the known errors
|
||||
# Errors that are caught by PyOpenSSL, but thrown by
|
||||
# built-in ssl
|
||||
_block_errors = (
|
||||
'unknown protocol', 'unknown ca', 'unknown_ca',
|
||||
'unknown error',
|
||||
'https proxy request', 'inappropriate fallback',
|
||||
'wrong version number',
|
||||
'no shared cipher', 'certificate unknown',
|
||||
'ccs received early',
|
||||
'certificate verify failed', # client cert w/o trusted CA
|
||||
'version too low', # caused by SSL3 connections
|
||||
'unsupported protocol', # caused by TLS1 connections
|
||||
)
|
||||
if peer_speaks_plain_http_over_https:
|
||||
reraised_connection_drop_exc_cls = errors.NoSSLError
|
||||
else:
|
||||
reraised_connection_drop_exc_cls = errors.FatalSSLAlert
|
||||
if _assert_ssl_exc_contains(ex, *_block_errors):
|
||||
# Accepted error, let's pass
|
||||
return EMPTY_RESULT
|
||||
elif _assert_ssl_exc_contains(ex, 'handshake operation timed out'):
|
||||
# This error is thrown by builtin SSL after a timeout
|
||||
# when client is speaking HTTP to an HTTPS server.
|
||||
# The connection can safely be dropped.
|
||||
return EMPTY_RESULT
|
||||
raise
|
||||
except generic_socket_error as exc:
|
||||
"""It is unclear why exactly this happens.
|
||||
|
||||
raise reraised_connection_drop_exc_cls(
|
||||
*generic_tls_error.args,
|
||||
) from generic_tls_error
|
||||
except OSError as tcp_connection_drop_error:
|
||||
raise errors.FatalSSLAlert(
|
||||
*tcp_connection_drop_error.args,
|
||||
) from tcp_connection_drop_error
|
||||
It's reproducible only with openssl>1.0 and stdlib
|
||||
:py:mod:`ssl` wrapper.
|
||||
In CherryPy it's triggered by Checker plugin, which connects
|
||||
to the app listening to the socket port in TLS mode via plain
|
||||
HTTP during startup (from the same process).
|
||||
|
||||
|
||||
Ref: https://github.com/cherrypy/cherrypy/issues/1618
|
||||
"""
|
||||
is_error0 = exc.args == (0, 'Error')
|
||||
|
||||
if is_error0 and IS_ABOVE_OPENSSL10:
|
||||
return EMPTY_RESULT
|
||||
raise
|
||||
return s, self.get_environ(s)
|
||||
|
||||
def get_environ(self, sock):
|
||||
|
|
|
@ -150,7 +150,7 @@ class SSLFileobjectMixin:
|
|||
return self._safe_call(
|
||||
False,
|
||||
super(SSLFileobjectMixin, self).sendall,
|
||||
*args, **kwargs,
|
||||
*args, **kwargs
|
||||
)
|
||||
|
||||
def send(self, *args, **kwargs):
|
||||
|
@ -158,7 +158,7 @@ class SSLFileobjectMixin:
|
|||
return self._safe_call(
|
||||
False,
|
||||
super(SSLFileobjectMixin, self).send,
|
||||
*args, **kwargs,
|
||||
*args, **kwargs
|
||||
)
|
||||
|
||||
|
||||
|
@ -196,7 +196,6 @@ class SSLConnectionProxyMeta:
|
|||
|
||||
def lock_decorator(method):
|
||||
"""Create a proxy method for a new class."""
|
||||
|
||||
def proxy_wrapper(self, *args):
|
||||
self._lock.acquire()
|
||||
try:
|
||||
|
@ -213,7 +212,6 @@ class SSLConnectionProxyMeta:
|
|||
|
||||
def make_property(property_):
|
||||
"""Create a proxy method for a new class."""
|
||||
|
||||
def proxy_prop_wrapper(self):
|
||||
return getattr(self._ssl_conn, property_)
|
||||
proxy_prop_wrapper.__name__ = property_
|
||||
|
|
|
@ -12,10 +12,7 @@ import pytest
|
|||
from .._compat import IS_MACOS, IS_WINDOWS # noqa: WPS436
|
||||
from ..server import Gateway, HTTPServer
|
||||
from ..testing import ( # noqa: F401 # pylint: disable=unused-import
|
||||
native_server,
|
||||
thread_and_wsgi_server,
|
||||
thread_and_native_server,
|
||||
wsgi_server,
|
||||
native_server, wsgi_server,
|
||||
)
|
||||
from ..testing import get_server_client
|
||||
|
||||
|
@ -34,28 +31,6 @@ def http_request_timeout():
|
|||
return computed_timeout
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
# pylint: disable=redefined-outer-name
|
||||
def wsgi_server_thread(thread_and_wsgi_server): # noqa: F811
|
||||
"""Set up and tear down a Cheroot WSGI server instance.
|
||||
|
||||
This exposes the server thread.
|
||||
"""
|
||||
server_thread, _srv = thread_and_wsgi_server
|
||||
return server_thread
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
# pylint: disable=redefined-outer-name
|
||||
def native_server_thread(thread_and_native_server): # noqa: F811
|
||||
"""Set up and tear down a Cheroot HTTP server instance.
|
||||
|
||||
This exposes the server thread.
|
||||
"""
|
||||
server_thread, _srv = thread_and_native_server
|
||||
return server_thread
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
# pylint: disable=redefined-outer-name
|
||||
def wsgi_server_client(wsgi_server): # noqa: F811
|
||||
|
|
|
@ -1,9 +1,7 @@
|
|||
"""Tests for TCP connection handling, including proper and timely close."""
|
||||
|
||||
import errno
|
||||
from re import match as _matches_pattern
|
||||
import socket
|
||||
import sys
|
||||
import time
|
||||
import logging
|
||||
import traceback as traceback_
|
||||
|
@ -19,7 +17,6 @@ from cheroot._compat import IS_CI, IS_MACOS, IS_PYPY, IS_WINDOWS
|
|||
import cheroot.server
|
||||
|
||||
|
||||
IS_PY36 = sys.version_info[:2] == (3, 6)
|
||||
IS_SLOW_ENV = IS_MACOS or IS_WINDOWS
|
||||
|
||||
|
||||
|
@ -56,8 +53,7 @@ class Controller(helper.Controller):
|
|||
"'POST' != request.method %r" %
|
||||
req.environ['REQUEST_METHOD'],
|
||||
)
|
||||
input_contents = req.environ['wsgi.input'].read().decode('utf-8')
|
||||
return f"thanks for '{input_contents !s}'"
|
||||
return "thanks for '%s'" % req.environ['wsgi.input'].read()
|
||||
|
||||
def custom_204(req, resp):
|
||||
"""Render response with status 204."""
|
||||
|
@ -703,275 +699,6 @@ def test_broken_connection_during_tcp_fin(
|
|||
assert _close_kernel_socket.exception_leaked is exception_leaks
|
||||
|
||||
|
||||
def test_broken_connection_during_http_communication_fallback( # noqa: WPS118
|
||||
monkeypatch,
|
||||
test_client,
|
||||
testing_server,
|
||||
wsgi_server_thread,
|
||||
):
|
||||
"""Test that unhandled internal error cascades into shutdown."""
|
||||
def _raise_connection_reset(*_args, **_kwargs):
|
||||
raise ConnectionResetError(666)
|
||||
|
||||
def _read_request_line(self):
|
||||
monkeypatch.setattr(self.conn.rfile, 'close', _raise_connection_reset)
|
||||
monkeypatch.setattr(self.conn.wfile, 'write', _raise_connection_reset)
|
||||
_raise_connection_reset()
|
||||
|
||||
monkeypatch.setattr(
|
||||
test_client.server_instance.ConnectionClass.RequestHandlerClass,
|
||||
'read_request_line',
|
||||
_read_request_line,
|
||||
)
|
||||
|
||||
test_client.get_connection().send(b'GET / HTTP/1.1')
|
||||
wsgi_server_thread.join() # no extra logs upon server termination
|
||||
|
||||
actual_log_entries = testing_server.error_log.calls[:]
|
||||
testing_server.error_log.calls.clear() # prevent post-test assertions
|
||||
|
||||
expected_log_entries = (
|
||||
(logging.WARNING, r'^socket\.error 666$'),
|
||||
(
|
||||
logging.INFO,
|
||||
'^Got a connection error while handling a connection '
|
||||
r'from .*:\d{1,5} \(666\)',
|
||||
),
|
||||
(
|
||||
logging.CRITICAL,
|
||||
r'A fatal exception happened\. Setting the server interrupt flag '
|
||||
r'to ConnectionResetError\(666,?\) and giving up\.\n\nPlease, '
|
||||
'report this on the Cheroot tracker at '
|
||||
r'<https://github\.com/cherrypy/cheroot/issues/new/choose>, '
|
||||
'providing a full reproducer with as much context and details '
|
||||
r'as possible\.$',
|
||||
),
|
||||
)
|
||||
|
||||
assert len(actual_log_entries) == len(expected_log_entries)
|
||||
|
||||
for ( # noqa: WPS352
|
||||
(expected_log_level, expected_msg_regex),
|
||||
(actual_msg, actual_log_level, _tb),
|
||||
) in zip(expected_log_entries, actual_log_entries):
|
||||
assert expected_log_level == actual_log_level
|
||||
assert _matches_pattern(expected_msg_regex, actual_msg) is not None, (
|
||||
f'{actual_msg !r} does not match {expected_msg_regex !r}'
|
||||
)
|
||||
|
||||
|
||||
def test_kb_int_from_http_handler(
|
||||
test_client,
|
||||
testing_server,
|
||||
wsgi_server_thread,
|
||||
):
|
||||
"""Test that a keyboard interrupt from HTTP handler causes shutdown."""
|
||||
def _trigger_kb_intr(_req, _resp):
|
||||
raise KeyboardInterrupt('simulated test handler keyboard interrupt')
|
||||
testing_server.wsgi_app.handlers['/kb_intr'] = _trigger_kb_intr
|
||||
|
||||
http_conn = test_client.get_connection()
|
||||
http_conn.putrequest('GET', '/kb_intr', skip_host=True)
|
||||
http_conn.putheader('Host', http_conn.host)
|
||||
http_conn.endheaders()
|
||||
wsgi_server_thread.join() # no extra logs upon server termination
|
||||
|
||||
actual_log_entries = testing_server.error_log.calls[:]
|
||||
testing_server.error_log.calls.clear() # prevent post-test assertions
|
||||
|
||||
expected_log_entries = (
|
||||
(
|
||||
logging.DEBUG,
|
||||
'^Got a server shutdown request while handling a connection '
|
||||
r'from .*:\d{1,5} \(simulated test handler keyboard interrupt\)$',
|
||||
),
|
||||
(
|
||||
logging.DEBUG,
|
||||
'^Setting the server interrupt flag to KeyboardInterrupt'
|
||||
r"\('simulated test handler keyboard interrupt',?\)$",
|
||||
),
|
||||
(
|
||||
logging.INFO,
|
||||
'^Keyboard Interrupt: shutting down$',
|
||||
),
|
||||
)
|
||||
|
||||
assert len(actual_log_entries) == len(expected_log_entries)
|
||||
|
||||
for ( # noqa: WPS352
|
||||
(expected_log_level, expected_msg_regex),
|
||||
(actual_msg, actual_log_level, _tb),
|
||||
) in zip(expected_log_entries, actual_log_entries):
|
||||
assert expected_log_level == actual_log_level
|
||||
assert _matches_pattern(expected_msg_regex, actual_msg) is not None, (
|
||||
f'{actual_msg !r} does not match {expected_msg_regex !r}'
|
||||
)
|
||||
|
||||
|
||||
@pytest.mark.xfail(
|
||||
IS_CI and IS_PYPY and IS_PY36 and not IS_SLOW_ENV,
|
||||
reason='Fails under PyPy 3.6 under Ubuntu 20.04 in CI for unknown reason',
|
||||
# NOTE: Actually covers any Linux
|
||||
strict=False,
|
||||
)
|
||||
def test_unhandled_exception_in_request_handler(
|
||||
mocker,
|
||||
monkeypatch,
|
||||
test_client,
|
||||
testing_server,
|
||||
wsgi_server_thread,
|
||||
):
|
||||
"""Ensure worker threads are resilient to in-handler exceptions."""
|
||||
|
||||
class SillyMistake(BaseException): # noqa: WPS418, WPS431
|
||||
"""A simulated crash within an HTTP handler."""
|
||||
|
||||
def _trigger_scary_exc(_req, _resp):
|
||||
raise SillyMistake('simulated unhandled exception 💣 in test handler')
|
||||
|
||||
testing_server.wsgi_app.handlers['/scary_exc'] = _trigger_scary_exc
|
||||
|
||||
server_connection_close_spy = mocker.spy(
|
||||
test_client.server_instance.ConnectionClass,
|
||||
'close',
|
||||
)
|
||||
|
||||
http_conn = test_client.get_connection()
|
||||
http_conn.putrequest('GET', '/scary_exc', skip_host=True)
|
||||
http_conn.putheader('Host', http_conn.host)
|
||||
http_conn.endheaders()
|
||||
|
||||
# NOTE: This spy ensure the log entry gets recorded before we're testing
|
||||
# NOTE: them and before server shutdown, preserving their order and making
|
||||
# NOTE: the log entry presence non-flaky.
|
||||
while not server_connection_close_spy.called: # noqa: WPS328
|
||||
pass
|
||||
|
||||
assert len(testing_server.requests._threads) == 10
|
||||
while testing_server.requests.idle < 10: # noqa: WPS328
|
||||
pass
|
||||
assert len(testing_server.requests._threads) == 10
|
||||
testing_server.interrupt = SystemExit('test requesting shutdown')
|
||||
assert not testing_server.requests._threads
|
||||
wsgi_server_thread.join() # no extra logs upon server termination
|
||||
|
||||
actual_log_entries = testing_server.error_log.calls[:]
|
||||
testing_server.error_log.calls.clear() # prevent post-test assertions
|
||||
|
||||
expected_log_entries = (
|
||||
(
|
||||
logging.ERROR,
|
||||
'^Unhandled error while processing an incoming connection '
|
||||
'SillyMistake'
|
||||
r"\('simulated unhandled exception 💣 in test handler',?\)$",
|
||||
),
|
||||
(
|
||||
logging.INFO,
|
||||
'^SystemExit raised: shutting down$',
|
||||
),
|
||||
)
|
||||
|
||||
assert len(actual_log_entries) == len(expected_log_entries)
|
||||
|
||||
for ( # noqa: WPS352
|
||||
(expected_log_level, expected_msg_regex),
|
||||
(actual_msg, actual_log_level, _tb),
|
||||
) in zip(expected_log_entries, actual_log_entries):
|
||||
assert expected_log_level == actual_log_level
|
||||
assert _matches_pattern(expected_msg_regex, actual_msg) is not None, (
|
||||
f'{actual_msg !r} does not match {expected_msg_regex !r}'
|
||||
)
|
||||
|
||||
|
||||
@pytest.mark.xfail(
|
||||
IS_CI and IS_PYPY and IS_PY36 and not IS_SLOW_ENV,
|
||||
reason='Fails under PyPy 3.6 under Ubuntu 20.04 in CI for unknown reason',
|
||||
# NOTE: Actually covers any Linux
|
||||
strict=False,
|
||||
)
|
||||
def test_remains_alive_post_unhandled_exception(
|
||||
mocker,
|
||||
monkeypatch,
|
||||
test_client,
|
||||
testing_server,
|
||||
wsgi_server_thread,
|
||||
):
|
||||
"""Ensure worker threads are resilient to unhandled exceptions."""
|
||||
|
||||
class ScaryCrash(BaseException): # noqa: WPS418, WPS431
|
||||
"""A simulated crash during HTTP parsing."""
|
||||
|
||||
_orig_read_request_line = (
|
||||
test_client.server_instance.
|
||||
ConnectionClass.RequestHandlerClass.
|
||||
read_request_line
|
||||
)
|
||||
|
||||
def _read_request_line(self):
|
||||
_orig_read_request_line(self)
|
||||
raise ScaryCrash(666)
|
||||
|
||||
monkeypatch.setattr(
|
||||
test_client.server_instance.ConnectionClass.RequestHandlerClass,
|
||||
'read_request_line',
|
||||
_read_request_line,
|
||||
)
|
||||
|
||||
server_connection_close_spy = mocker.spy(
|
||||
test_client.server_instance.ConnectionClass,
|
||||
'close',
|
||||
)
|
||||
|
||||
# NOTE: The initial worker thread count is 10.
|
||||
assert len(testing_server.requests._threads) == 10
|
||||
|
||||
test_client.get_connection().send(b'GET / HTTP/1.1')
|
||||
|
||||
# NOTE: This spy ensure the log entry gets recorded before we're testing
|
||||
# NOTE: them and before server shutdown, preserving their order and making
|
||||
# NOTE: the log entry presence non-flaky.
|
||||
while not server_connection_close_spy.called: # noqa: WPS328
|
||||
pass
|
||||
|
||||
# NOTE: This checks for whether there's any crashed threads
|
||||
while testing_server.requests.idle < 10: # noqa: WPS328
|
||||
pass
|
||||
assert len(testing_server.requests._threads) == 10
|
||||
assert all(
|
||||
worker_thread.is_alive()
|
||||
for worker_thread in testing_server.requests._threads
|
||||
)
|
||||
testing_server.interrupt = SystemExit('test requesting shutdown')
|
||||
assert not testing_server.requests._threads
|
||||
wsgi_server_thread.join() # no extra logs upon server termination
|
||||
|
||||
actual_log_entries = testing_server.error_log.calls[:]
|
||||
testing_server.error_log.calls.clear() # prevent post-test assertions
|
||||
|
||||
expected_log_entries = (
|
||||
(
|
||||
logging.ERROR,
|
||||
'^Unhandled error while processing an incoming connection '
|
||||
r'ScaryCrash\(666,?\)$',
|
||||
),
|
||||
(
|
||||
logging.INFO,
|
||||
'^SystemExit raised: shutting down$',
|
||||
),
|
||||
)
|
||||
|
||||
assert len(actual_log_entries) == len(expected_log_entries)
|
||||
|
||||
for ( # noqa: WPS352
|
||||
(expected_log_level, expected_msg_regex),
|
||||
(actual_msg, actual_log_level, _tb),
|
||||
) in zip(expected_log_entries, actual_log_entries):
|
||||
assert expected_log_level == actual_log_level
|
||||
assert _matches_pattern(expected_msg_regex, actual_msg) is not None, (
|
||||
f'{actual_msg !r} does not match {expected_msg_regex !r}'
|
||||
)
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
'timeout_before_headers',
|
||||
(
|
||||
|
@ -1190,7 +917,7 @@ def test_100_Continue(test_client):
|
|||
status_line, _actual_headers, actual_resp_body = webtest.shb(response)
|
||||
actual_status = int(status_line[:3])
|
||||
assert actual_status == 200
|
||||
expected_resp_body = f"thanks for '{body.decode() !s}'".encode()
|
||||
expected_resp_body = ("thanks for '%s'" % body).encode()
|
||||
assert actual_resp_body == expected_resp_body
|
||||
conn.close()
|
||||
|
||||
|
@ -1260,7 +987,7 @@ def test_readall_or_close(test_client, max_request_body_size):
|
|||
status_line, actual_headers, actual_resp_body = webtest.shb(response)
|
||||
actual_status = int(status_line[:3])
|
||||
assert actual_status == 200
|
||||
expected_resp_body = f"thanks for '{body.decode() !s}'".encode()
|
||||
expected_resp_body = ("thanks for '%s'" % body).encode()
|
||||
assert actual_resp_body == expected_resp_body
|
||||
conn.close()
|
||||
|
||||
|
|
|
@ -134,7 +134,7 @@ def test_query_string_request(test_client):
|
|||
'/hello', # plain
|
||||
'/query_string?test=True', # query
|
||||
'/{0}?{1}={2}'.format( # quoted unicode
|
||||
*map(urllib.parse.quote, ('Юххууу', 'ї', 'йо')),
|
||||
*map(urllib.parse.quote, ('Юххууу', 'ї', 'йо'))
|
||||
),
|
||||
),
|
||||
)
|
||||
|
|
|
@ -31,7 +31,7 @@ config = {
|
|||
|
||||
|
||||
@contextmanager
|
||||
def cheroot_server(server_factory): # noqa: WPS210
|
||||
def cheroot_server(server_factory):
|
||||
"""Set up and tear down a Cheroot server instance."""
|
||||
conf = config[server_factory].copy()
|
||||
bind_port = conf.pop('bind_addr')[-1]
|
||||
|
@ -41,7 +41,7 @@ def cheroot_server(server_factory): # noqa: WPS210
|
|||
actual_bind_addr = (interface, bind_port)
|
||||
httpserver = server_factory( # create it
|
||||
bind_addr=actual_bind_addr,
|
||||
**conf,
|
||||
**conf
|
||||
)
|
||||
except OSError:
|
||||
pass
|
||||
|
@ -50,52 +50,27 @@ def cheroot_server(server_factory): # noqa: WPS210
|
|||
|
||||
httpserver.shutdown_timeout = 0 # Speed-up tests teardown
|
||||
|
||||
# FIXME: Expose this thread through a fixture so that it
|
||||
# FIXME: could be awaited in tests.
|
||||
server_thread = threading.Thread(target=httpserver.safe_start)
|
||||
server_thread.start() # spawn it
|
||||
threading.Thread(target=httpserver.safe_start).start() # spawn it
|
||||
while not httpserver.ready: # wait until fully initialized and bound
|
||||
time.sleep(0.1)
|
||||
|
||||
try:
|
||||
yield server_thread, httpserver
|
||||
finally:
|
||||
yield httpserver
|
||||
|
||||
httpserver.stop() # destroy it
|
||||
server_thread.join() # wait for the thread to be turn down
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def thread_and_wsgi_server():
|
||||
"""Set up and tear down a Cheroot WSGI server instance.
|
||||
|
||||
This emits a tuple of a thread and a server instance.
|
||||
"""
|
||||
with cheroot_server(cheroot.wsgi.Server) as (server_thread, srv):
|
||||
yield server_thread, srv
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def thread_and_native_server():
|
||||
"""Set up and tear down a Cheroot HTTP server instance.
|
||||
|
||||
This emits a tuple of a thread and a server instance.
|
||||
"""
|
||||
with cheroot_server(cheroot.server.HTTPServer) as (server_thread, srv):
|
||||
yield server_thread, srv
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def wsgi_server(thread_and_wsgi_server): # noqa: WPS442
|
||||
def wsgi_server():
|
||||
"""Set up and tear down a Cheroot WSGI server instance."""
|
||||
_server_thread, srv = thread_and_wsgi_server
|
||||
return srv
|
||||
with cheroot_server(cheroot.wsgi.Server) as srv:
|
||||
yield srv
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def native_server(thread_and_native_server): # noqa: WPS442
|
||||
def native_server():
|
||||
"""Set up and tear down a Cheroot HTTP server instance."""
|
||||
_server_thread, srv = thread_and_native_server
|
||||
return srv
|
||||
with cheroot_server(cheroot.server.HTTPServer) as srv:
|
||||
yield srv
|
||||
|
||||
|
||||
class _TestClient:
|
||||
|
|
|
@ -6,7 +6,6 @@
|
|||
"""
|
||||
|
||||
import collections
|
||||
import logging
|
||||
import threading
|
||||
import time
|
||||
import socket
|
||||
|
@ -31,7 +30,7 @@ class TrueyZero:
|
|||
|
||||
trueyzero = TrueyZero()
|
||||
|
||||
_SHUTDOWNREQUEST = object()
|
||||
_SHUTDOWNREQUEST = None
|
||||
|
||||
|
||||
class WorkerThread(threading.Thread):
|
||||
|
@ -100,58 +99,13 @@ class WorkerThread(threading.Thread):
|
|||
threading.Thread.__init__(self)
|
||||
|
||||
def run(self):
|
||||
"""Set up incoming HTTP connection processing loop.
|
||||
"""Process incoming HTTP connections.
|
||||
|
||||
This is the thread's entry-point. It performs lop-layer
|
||||
exception handling and interrupt processing.
|
||||
:exc:`KeyboardInterrupt` and :exc:`SystemExit` bubbling up
|
||||
from the inner-layer code constitute a global server interrupt
|
||||
request. When they happen, the worker thread exits.
|
||||
|
||||
:raises BaseException: when an unexpected non-interrupt
|
||||
exception leaks from the inner layers
|
||||
|
||||
# noqa: DAR401 KeyboardInterrupt SystemExit
|
||||
Retrieves incoming connections from thread pool.
|
||||
"""
|
||||
self.server.stats['Worker Threads'][self.name] = self.stats
|
||||
self.ready = True
|
||||
try:
|
||||
self._process_connections_until_interrupted()
|
||||
except (KeyboardInterrupt, SystemExit) as interrupt_exc:
|
||||
interrupt_cause = interrupt_exc.__cause__ or interrupt_exc
|
||||
self.server.error_log(
|
||||
f'Setting the server interrupt flag to {interrupt_cause !r}',
|
||||
level=logging.DEBUG,
|
||||
)
|
||||
self.server.interrupt = interrupt_cause
|
||||
except BaseException as underlying_exc: # noqa: WPS424
|
||||
# NOTE: This is the last resort logging with the last dying breath
|
||||
# NOTE: of the worker. It is only reachable when exceptions happen
|
||||
# NOTE: in the `finally` branch of the internal try/except block.
|
||||
self.server.error_log(
|
||||
'A fatal exception happened. Setting the server interrupt flag'
|
||||
f' to {underlying_exc !r} and giving up.'
|
||||
'\N{NEW LINE}\N{NEW LINE}'
|
||||
'Please, report this on the Cheroot tracker at '
|
||||
'<https://github.com/cherrypy/cheroot/issues/new/choose>, '
|
||||
'providing a full reproducer with as much context and details as possible.',
|
||||
level=logging.CRITICAL,
|
||||
traceback=True,
|
||||
)
|
||||
self.server.interrupt = underlying_exc
|
||||
raise
|
||||
finally:
|
||||
self.ready = False
|
||||
|
||||
def _process_connections_until_interrupted(self):
|
||||
"""Process incoming HTTP connections in an infinite loop.
|
||||
|
||||
Retrieves incoming connections from thread pool, processing
|
||||
them one by one.
|
||||
|
||||
:raises SystemExit: on the internal requests to stop the
|
||||
server instance
|
||||
"""
|
||||
self.ready = True
|
||||
while True:
|
||||
conn = self.server.requests.get()
|
||||
if conn is _SHUTDOWNREQUEST:
|
||||
|
@ -164,63 +118,20 @@ class WorkerThread(threading.Thread):
|
|||
keep_conn_open = False
|
||||
try:
|
||||
keep_conn_open = conn.communicate()
|
||||
except ConnectionError as connection_error:
|
||||
keep_conn_open = False # Drop the connection cleanly
|
||||
self.server.error_log(
|
||||
'Got a connection error while handling a '
|
||||
f'connection from {conn.remote_addr !s}:'
|
||||
f'{conn.remote_port !s} ({connection_error !s})',
|
||||
level=logging.INFO,
|
||||
)
|
||||
continue
|
||||
except (KeyboardInterrupt, SystemExit) as shutdown_request:
|
||||
# Shutdown request
|
||||
keep_conn_open = False # Drop the connection cleanly
|
||||
self.server.error_log(
|
||||
'Got a server shutdown request while handling a '
|
||||
f'connection from {conn.remote_addr !s}:'
|
||||
f'{conn.remote_port !s} ({shutdown_request !s})',
|
||||
level=logging.DEBUG,
|
||||
)
|
||||
raise SystemExit(
|
||||
str(shutdown_request),
|
||||
) from shutdown_request
|
||||
except BaseException as unhandled_error: # noqa: WPS424
|
||||
# NOTE: Only a shutdown request should bubble up to the
|
||||
# NOTE: external cleanup code. Otherwise, this thread dies.
|
||||
# NOTE: If this were to happen, the threadpool would still
|
||||
# NOTE: list a dead thread without knowing its state. And
|
||||
# NOTE: the calling code would fail to schedule processing
|
||||
# NOTE: of new requests.
|
||||
self.server.error_log(
|
||||
'Unhandled error while processing an incoming '
|
||||
f'connection {unhandled_error !r}',
|
||||
level=logging.ERROR,
|
||||
traceback=True,
|
||||
)
|
||||
continue # Prevent the thread from dying
|
||||
finally:
|
||||
# NOTE: Any exceptions coming from within `finally` may
|
||||
# NOTE: kill the thread, causing the threadpool to only
|
||||
# NOTE: contain references to dead threads rendering the
|
||||
# NOTE: server defunct, effectively meaning a DoS.
|
||||
# NOTE: Ideally, things called here should process
|
||||
# NOTE: everything recoverable internally. Any unhandled
|
||||
# NOTE: errors will bubble up into the outer try/except
|
||||
# NOTE: block. They will be treated as fatal and turned
|
||||
# NOTE: into server shutdown requests and then reraised
|
||||
# NOTE: unconditionally.
|
||||
if keep_conn_open:
|
||||
self.server.put_conn(conn)
|
||||
else:
|
||||
conn.close()
|
||||
if is_stats_enabled:
|
||||
self.requests_seen += conn.requests_seen
|
||||
self.bytes_read += conn.rfile.bytes_read
|
||||
self.bytes_written += conn.wfile.bytes_written
|
||||
self.requests_seen += self.conn.requests_seen
|
||||
self.bytes_read += self.conn.rfile.bytes_read
|
||||
self.bytes_written += self.conn.wfile.bytes_written
|
||||
self.work_time += time.time() - self.start_time
|
||||
self.start_time = None
|
||||
self.conn = None
|
||||
except (KeyboardInterrupt, SystemExit) as ex:
|
||||
self.server.interrupt = ex
|
||||
|
||||
|
||||
class ThreadPool:
|
||||
|
|
|
@ -57,11 +57,9 @@ These API's are described in the `CherryPy specification
|
|||
"""
|
||||
|
||||
try:
|
||||
import importlib.metadata as importlib_metadata
|
||||
import pkg_resources
|
||||
except ImportError:
|
||||
# fall back for python <= 3.7
|
||||
# This try/except can be removed with py <= 3.7 support
|
||||
import importlib_metadata
|
||||
pass
|
||||
|
||||
from threading import local as _local
|
||||
|
||||
|
@ -111,7 +109,7 @@ tree = _cptree.Tree()
|
|||
|
||||
|
||||
try:
|
||||
__version__ = importlib_metadata.version('cherrypy')
|
||||
__version__ = pkg_resources.require('cherrypy')[0].version
|
||||
except Exception:
|
||||
__version__ = 'unknown'
|
||||
|
||||
|
@ -183,28 +181,24 @@ def quickstart(root=None, script_name='', config=None):
|
|||
class _Serving(_local):
|
||||
"""An interface for registering request and response objects.
|
||||
|
||||
Rather than have a separate "thread local" object for the request
|
||||
and the response, this class works as a single threadlocal container
|
||||
for both objects (and any others which developers wish to define).
|
||||
In this way, we can easily dump those objects when we stop/start a
|
||||
new HTTP conversation, yet still refer to them as module-level
|
||||
globals in a thread-safe way.
|
||||
Rather than have a separate "thread local" object for the request and
|
||||
the response, this class works as a single threadlocal container for
|
||||
both objects (and any others which developers wish to define). In this
|
||||
way, we can easily dump those objects when we stop/start a new HTTP
|
||||
conversation, yet still refer to them as module-level globals in a
|
||||
thread-safe way.
|
||||
"""
|
||||
|
||||
request = _cprequest.Request(_httputil.Host('127.0.0.1', 80),
|
||||
_httputil.Host('127.0.0.1', 1111))
|
||||
"""The request object for the current thread.
|
||||
|
||||
In the main thread, and any threads which are not receiving HTTP
|
||||
requests, this is None.
|
||||
"""
|
||||
The request object for the current thread. In the main thread,
|
||||
and any threads which are not receiving HTTP requests, this is None."""
|
||||
|
||||
response = _cprequest.Response()
|
||||
"""The response object for the current thread.
|
||||
|
||||
In the main thread, and any threads which are not receiving HTTP
|
||||
requests, this is None.
|
||||
"""
|
||||
The response object for the current thread. In the main thread,
|
||||
and any threads which are not receiving HTTP requests, this is None."""
|
||||
|
||||
def load(self, request, response):
|
||||
self.request = request
|
||||
|
@ -322,8 +316,8 @@ class _GlobalLogManager(_cplogging.LogManager):
|
|||
def __call__(self, *args, **kwargs):
|
||||
"""Log the given message to the app.log or global log.
|
||||
|
||||
Log the given message to the app.log or global log as
|
||||
appropriate.
|
||||
Log the given message to the app.log or global
|
||||
log as appropriate.
|
||||
"""
|
||||
# Do NOT use try/except here. See
|
||||
# https://github.com/cherrypy/cherrypy/issues/945
|
||||
|
@ -336,8 +330,8 @@ class _GlobalLogManager(_cplogging.LogManager):
|
|||
def access(self):
|
||||
"""Log an access message to the app.log or global log.
|
||||
|
||||
Log the given message to the app.log or global log as
|
||||
appropriate.
|
||||
Log the given message to the app.log or global
|
||||
log as appropriate.
|
||||
"""
|
||||
try:
|
||||
return request.app.log.access()
|
||||
|
|
|
@ -313,10 +313,7 @@ class Checker(object):
|
|||
|
||||
# -------------------- Specific config warnings -------------------- #
|
||||
def check_localhost(self):
|
||||
"""Warn if any socket_host is 'localhost'.
|
||||
|
||||
See #711.
|
||||
"""
|
||||
"""Warn if any socket_host is 'localhost'. See #711."""
|
||||
for k, v in cherrypy.config.items():
|
||||
if k == 'server.socket_host' and v == 'localhost':
|
||||
warnings.warn("The use of 'localhost' as a socket host can "
|
||||
|
|
|
@ -1,4 +1,5 @@
|
|||
"""Configuration system for CherryPy.
|
||||
"""
|
||||
Configuration system for CherryPy.
|
||||
|
||||
Configuration in CherryPy is implemented via dictionaries. Keys are strings
|
||||
which name the mapped value, which may be of any type.
|
||||
|
@ -131,8 +132,8 @@ def _if_filename_register_autoreload(ob):
|
|||
def merge(base, other):
|
||||
"""Merge one app config (from a dict, file, or filename) into another.
|
||||
|
||||
If the given config is a filename, it will be appended to the list
|
||||
of files to monitor for "autoreload" changes.
|
||||
If the given config is a filename, it will be appended to
|
||||
the list of files to monitor for "autoreload" changes.
|
||||
"""
|
||||
_if_filename_register_autoreload(other)
|
||||
|
||||
|
|
|
@ -1,10 +1,9 @@
|
|||
"""CherryPy dispatchers.
|
||||
|
||||
A 'dispatcher' is the object which looks up the 'page handler' callable
|
||||
and collects config for the current request based on the path_info,
|
||||
other request attributes, and the application architecture. The core
|
||||
calls the dispatcher as early as possible, passing it a 'path_info'
|
||||
argument.
|
||||
and collects config for the current request based on the path_info, other
|
||||
request attributes, and the application architecture. The core calls the
|
||||
dispatcher as early as possible, passing it a 'path_info' argument.
|
||||
|
||||
The default dispatcher discovers the page handler by matching path_info
|
||||
to a hierarchical arrangement of objects, starting at request.app.root.
|
||||
|
@ -22,6 +21,7 @@ import cherrypy
|
|||
|
||||
|
||||
class PageHandler(object):
|
||||
|
||||
"""Callable which sets response.body."""
|
||||
|
||||
def __init__(self, callable, *args, **kwargs):
|
||||
|
@ -64,7 +64,8 @@ class PageHandler(object):
|
|||
|
||||
|
||||
def test_callable_spec(callable, callable_args, callable_kwargs):
|
||||
"""Inspect callable and test to see if the given args are suitable for it.
|
||||
"""
|
||||
Inspect callable and test to see if the given args are suitable for it.
|
||||
|
||||
When an error occurs during the handler's invoking stage there are 2
|
||||
erroneous cases:
|
||||
|
@ -251,16 +252,16 @@ else:
|
|||
|
||||
|
||||
class Dispatcher(object):
|
||||
|
||||
"""CherryPy Dispatcher which walks a tree of objects to find a handler.
|
||||
|
||||
The tree is rooted at cherrypy.request.app.root, and each
|
||||
hierarchical component in the path_info argument is matched to a
|
||||
corresponding nested attribute of the root object. Matching handlers
|
||||
must have an 'exposed' attribute which evaluates to True. The
|
||||
special method name "index" matches a URI which ends in a slash
|
||||
("/"). The special method name "default" may match a portion of the
|
||||
path_info (but only when no longer substring of the path_info
|
||||
matches some other object).
|
||||
The tree is rooted at cherrypy.request.app.root, and each hierarchical
|
||||
component in the path_info argument is matched to a corresponding nested
|
||||
attribute of the root object. Matching handlers must have an 'exposed'
|
||||
attribute which evaluates to True. The special method name "index"
|
||||
matches a URI which ends in a slash ("/"). The special method name
|
||||
"default" may match a portion of the path_info (but only when no longer
|
||||
substring of the path_info matches some other object).
|
||||
|
||||
This is the default, built-in dispatcher for CherryPy.
|
||||
"""
|
||||
|
@ -305,9 +306,9 @@ class Dispatcher(object):
|
|||
|
||||
The second object returned will be a list of names which are
|
||||
'virtual path' components: parts of the URL which are dynamic,
|
||||
and were not used when looking up the handler. These virtual
|
||||
path components are passed to the handler as positional
|
||||
arguments.
|
||||
and were not used when looking up the handler.
|
||||
These virtual path components are passed to the handler as
|
||||
positional arguments.
|
||||
"""
|
||||
request = cherrypy.serving.request
|
||||
app = request.app
|
||||
|
@ -447,11 +448,13 @@ class Dispatcher(object):
|
|||
|
||||
|
||||
class MethodDispatcher(Dispatcher):
|
||||
|
||||
"""Additional dispatch based on cherrypy.request.method.upper().
|
||||
|
||||
Methods named GET, POST, etc will be called on an exposed class. The
|
||||
method names must be all caps; the appropriate Allow header will be
|
||||
output showing all capitalized method names as allowable HTTP verbs.
|
||||
Methods named GET, POST, etc will be called on an exposed class.
|
||||
The method names must be all caps; the appropriate Allow header
|
||||
will be output showing all capitalized method names as allowable
|
||||
HTTP verbs.
|
||||
|
||||
Note that the containing class must be exposed, not the methods.
|
||||
"""
|
||||
|
@ -489,14 +492,16 @@ class MethodDispatcher(Dispatcher):
|
|||
|
||||
|
||||
class RoutesDispatcher(object):
|
||||
|
||||
"""A Routes based dispatcher for CherryPy."""
|
||||
|
||||
def __init__(self, full_result=False, **mapper_options):
|
||||
"""Routes dispatcher.
|
||||
"""
|
||||
Routes dispatcher
|
||||
|
||||
Set full_result to True if you wish the controller and the
|
||||
action to be passed on to the page handler parameters. By
|
||||
default they won't be.
|
||||
Set full_result to True if you wish the controller
|
||||
and the action to be passed on to the page handler
|
||||
parameters. By default they won't be.
|
||||
"""
|
||||
import routes
|
||||
self.full_result = full_result
|
||||
|
@ -612,7 +617,8 @@ def XMLRPCDispatcher(next_dispatcher=Dispatcher()):
|
|||
|
||||
def VirtualHost(next_dispatcher=Dispatcher(), use_x_forwarded_host=True,
|
||||
**domains):
|
||||
"""Select a different handler based on the Host header.
|
||||
"""
|
||||
Select a different handler based on the Host header.
|
||||
|
||||
This can be useful when running multiple sites within one CP server.
|
||||
It allows several domains to point to different parts of a single
|
||||
|
|
|
@ -136,17 +136,19 @@ from cherrypy.lib import httputil as _httputil
|
|||
|
||||
|
||||
class CherryPyException(Exception):
|
||||
|
||||
"""A base class for CherryPy exceptions."""
|
||||
pass
|
||||
|
||||
|
||||
class InternalRedirect(CherryPyException):
|
||||
|
||||
"""Exception raised to switch to the handler for a different URL.
|
||||
|
||||
This exception will redirect processing to another path within the
|
||||
site (without informing the client). Provide the new path as an
|
||||
argument when raising the exception. Provide any params in the
|
||||
querystring for the new URL.
|
||||
This exception will redirect processing to another path within the site
|
||||
(without informing the client). Provide the new path as an argument when
|
||||
raising the exception. Provide any params in the querystring for the new
|
||||
URL.
|
||||
"""
|
||||
|
||||
def __init__(self, path, query_string=''):
|
||||
|
@ -171,6 +173,7 @@ class InternalRedirect(CherryPyException):
|
|||
|
||||
|
||||
class HTTPRedirect(CherryPyException):
|
||||
|
||||
"""Exception raised when the request should be redirected.
|
||||
|
||||
This exception will force a HTTP redirect to the URL or URL's you give it.
|
||||
|
@ -199,7 +202,7 @@ class HTTPRedirect(CherryPyException):
|
|||
"""The list of URL's to emit."""
|
||||
|
||||
encoding = 'utf-8'
|
||||
"""The encoding when passed urls are not native strings."""
|
||||
"""The encoding when passed urls are not native strings"""
|
||||
|
||||
def __init__(self, urls, status=None, encoding=None):
|
||||
self.urls = abs_urls = [
|
||||
|
@ -227,7 +230,8 @@ class HTTPRedirect(CherryPyException):
|
|||
|
||||
@classproperty
|
||||
def default_status(cls):
|
||||
"""The default redirect status for the request.
|
||||
"""
|
||||
The default redirect status for the request.
|
||||
|
||||
RFC 2616 indicates a 301 response code fits our goal; however,
|
||||
browser support for 301 is quite messy. Use 302/303 instead. See
|
||||
|
@ -245,9 +249,8 @@ class HTTPRedirect(CherryPyException):
|
|||
"""Modify cherrypy.response status, headers, and body to represent
|
||||
self.
|
||||
|
||||
CherryPy uses this internally, but you can also use it to create
|
||||
an HTTPRedirect object and set its output without *raising* the
|
||||
exception.
|
||||
CherryPy uses this internally, but you can also use it to create an
|
||||
HTTPRedirect object and set its output without *raising* the exception.
|
||||
"""
|
||||
response = cherrypy.serving.response
|
||||
response.status = status = self.status
|
||||
|
@ -336,6 +339,7 @@ def clean_headers(status):
|
|||
|
||||
|
||||
class HTTPError(CherryPyException):
|
||||
|
||||
"""Exception used to return an HTTP error code (4xx-5xx) to the client.
|
||||
|
||||
This exception can be used to automatically send a response using a
|
||||
|
@ -354,9 +358,7 @@ class HTTPError(CherryPyException):
|
|||
"""
|
||||
|
||||
status = None
|
||||
"""The HTTP status code.
|
||||
|
||||
May be of type int or str (with a Reason-Phrase).
|
||||
"""The HTTP status code. May be of type int or str (with a Reason-Phrase).
|
||||
"""
|
||||
|
||||
code = None
|
||||
|
@ -384,9 +386,8 @@ class HTTPError(CherryPyException):
|
|||
"""Modify cherrypy.response status, headers, and body to represent
|
||||
self.
|
||||
|
||||
CherryPy uses this internally, but you can also use it to create
|
||||
an HTTPError object and set its output without *raising* the
|
||||
exception.
|
||||
CherryPy uses this internally, but you can also use it to create an
|
||||
HTTPError object and set its output without *raising* the exception.
|
||||
"""
|
||||
response = cherrypy.serving.response
|
||||
|
||||
|
@ -425,10 +426,11 @@ class HTTPError(CherryPyException):
|
|||
|
||||
|
||||
class NotFound(HTTPError):
|
||||
|
||||
"""Exception raised when a URL could not be mapped to any handler (404).
|
||||
|
||||
This is equivalent to raising :class:`HTTPError("404 Not Found")
|
||||
<cherrypy._cperror.HTTPError>`.
|
||||
This is equivalent to raising
|
||||
:class:`HTTPError("404 Not Found") <cherrypy._cperror.HTTPError>`.
|
||||
"""
|
||||
|
||||
def __init__(self, path=None):
|
||||
|
@ -475,8 +477,8 @@ _HTTPErrorTemplate = '''<!DOCTYPE html PUBLIC
|
|||
def get_error_page(status, **kwargs):
|
||||
"""Return an HTML page, containing a pretty error response.
|
||||
|
||||
status should be an int or a str. kwargs will be interpolated into
|
||||
the page template.
|
||||
status should be an int or a str.
|
||||
kwargs will be interpolated into the page template.
|
||||
"""
|
||||
try:
|
||||
code, reason, message = _httputil.valid_status(status)
|
||||
|
@ -593,8 +595,8 @@ def bare_error(extrabody=None):
|
|||
"""Produce status, headers, body for a critical error.
|
||||
|
||||
Returns a triple without calling any other questionable functions,
|
||||
so it should be as error-free as possible. Call it from an HTTP
|
||||
server if you get errors outside of the request.
|
||||
so it should be as error-free as possible. Call it from an HTTP server
|
||||
if you get errors outside of the request.
|
||||
|
||||
If extrabody is None, a friendly but rather unhelpful error message
|
||||
is set in the body. If extrabody is a string, it will be appended
|
||||
|
|
|
@ -123,6 +123,7 @@ logfmt = logging.Formatter('%(message)s')
|
|||
|
||||
|
||||
class NullHandler(logging.Handler):
|
||||
|
||||
"""A no-op logging handler to silence the logging.lastResort handler."""
|
||||
|
||||
def handle(self, record):
|
||||
|
@ -136,16 +137,15 @@ class NullHandler(logging.Handler):
|
|||
|
||||
|
||||
class LogManager(object):
|
||||
|
||||
"""An object to assist both simple and advanced logging.
|
||||
|
||||
``cherrypy.log`` is an instance of this class.
|
||||
"""
|
||||
|
||||
appid = None
|
||||
"""The id() of the Application object which owns this log manager.
|
||||
|
||||
If this is a global log manager, appid is None.
|
||||
"""
|
||||
"""The id() of the Application object which owns this log manager. If this
|
||||
is a global log manager, appid is None."""
|
||||
|
||||
error_log = None
|
||||
"""The actual :class:`logging.Logger` instance for error messages."""
|
||||
|
@ -317,8 +317,8 @@ class LogManager(object):
|
|||
def screen(self):
|
||||
"""Turn stderr/stdout logging on or off.
|
||||
|
||||
If you set this to True, it'll add the appropriate StreamHandler
|
||||
for you. If you set it to False, it will remove the handler.
|
||||
If you set this to True, it'll add the appropriate StreamHandler for
|
||||
you. If you set it to False, it will remove the handler.
|
||||
"""
|
||||
h = self._get_builtin_handler
|
||||
has_h = h(self.error_log, 'screen') or h(self.access_log, 'screen')
|
||||
|
@ -414,6 +414,7 @@ class LogManager(object):
|
|||
|
||||
|
||||
class WSGIErrorHandler(logging.Handler):
|
||||
|
||||
"A handler class which writes logging records to environ['wsgi.errors']."
|
||||
|
||||
def flush(self):
|
||||
|
@ -451,8 +452,6 @@ class WSGIErrorHandler(logging.Handler):
|
|||
|
||||
class LazyRfc3339UtcTime(object):
|
||||
def __str__(self):
|
||||
"""Return datetime in RFC3339 UTC Format."""
|
||||
iso_formatted_now = datetime.datetime.now(
|
||||
datetime.timezone.utc,
|
||||
).isoformat('T')
|
||||
"""Return utcnow() in RFC3339 UTC Format."""
|
||||
iso_formatted_now = datetime.datetime.utcnow().isoformat('T')
|
||||
return f'{iso_formatted_now!s}Z'
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
"""Native adapter for serving CherryPy via mod_python.
|
||||
"""Native adapter for serving CherryPy via mod_python
|
||||
|
||||
Basic usage:
|
||||
|
||||
|
|
|
@ -120,10 +120,10 @@ class NativeGateway(cheroot.server.Gateway):
|
|||
class CPHTTPServer(cheroot.server.HTTPServer):
|
||||
"""Wrapper for cheroot.server.HTTPServer.
|
||||
|
||||
cheroot has been designed to not reference CherryPy in any way, so
|
||||
that it can be used in other frameworks and applications. Therefore,
|
||||
we wrap it here, so we can apply some attributes from config ->
|
||||
cherrypy.server -> HTTPServer.
|
||||
cheroot has been designed to not reference CherryPy in any way,
|
||||
so that it can be used in other frameworks and applications.
|
||||
Therefore, we wrap it here, so we can apply some attributes
|
||||
from config -> cherrypy.server -> HTTPServer.
|
||||
"""
|
||||
|
||||
def __init__(self, server_adapter=cherrypy.server):
|
||||
|
|
|
@ -248,10 +248,7 @@ def process_multipart_form_data(entity):
|
|||
|
||||
|
||||
def _old_process_multipart(entity):
|
||||
"""The behavior of 3.2 and lower.
|
||||
|
||||
Deprecated and will be changed in 3.3.
|
||||
"""
|
||||
"""The behavior of 3.2 and lower. Deprecated and will be changed in 3.3."""
|
||||
process_multipart(entity)
|
||||
|
||||
params = entity.params
|
||||
|
@ -280,6 +277,7 @@ def _old_process_multipart(entity):
|
|||
|
||||
# -------------------------------- Entities --------------------------------- #
|
||||
class Entity(object):
|
||||
|
||||
"""An HTTP request body, or MIME multipart body.
|
||||
|
||||
This class collects information about the HTTP request entity. When a
|
||||
|
@ -348,15 +346,13 @@ class Entity(object):
|
|||
content_type = None
|
||||
"""The value of the Content-Type request header.
|
||||
|
||||
If the Entity is part of a multipart payload, this will be the
|
||||
Content-Type given in the MIME headers for this part.
|
||||
If the Entity is part of a multipart payload, this will be the Content-Type
|
||||
given in the MIME headers for this part.
|
||||
"""
|
||||
|
||||
default_content_type = 'application/x-www-form-urlencoded'
|
||||
"""This defines a default ``Content-Type`` to use if no Content-Type header
|
||||
is given.
|
||||
|
||||
The empty string is used for RequestBody, which results in the
|
||||
is given. The empty string is used for RequestBody, which results in the
|
||||
request body not being read or parsed at all. This is by design; a missing
|
||||
``Content-Type`` header in the HTTP request entity is an error at best,
|
||||
and a security hole at worst. For multipart parts, however, the MIME spec
|
||||
|
@ -406,8 +402,8 @@ class Entity(object):
|
|||
part_class = None
|
||||
"""The class used for multipart parts.
|
||||
|
||||
You can replace this with custom subclasses to alter the processing
|
||||
of multipart parts.
|
||||
You can replace this with custom subclasses to alter the processing of
|
||||
multipart parts.
|
||||
"""
|
||||
|
||||
def __init__(self, fp, headers, params=None, parts=None):
|
||||
|
@ -513,8 +509,7 @@ class Entity(object):
|
|||
"""Return a file-like object into which the request body will be read.
|
||||
|
||||
By default, this will return a TemporaryFile. Override as needed.
|
||||
See also :attr:`cherrypy._cpreqbody.Part.maxrambytes`.
|
||||
"""
|
||||
See also :attr:`cherrypy._cpreqbody.Part.maxrambytes`."""
|
||||
return tempfile.TemporaryFile()
|
||||
|
||||
def fullvalue(self):
|
||||
|
@ -530,7 +525,7 @@ class Entity(object):
|
|||
return value
|
||||
|
||||
def decode_entity(self, value):
|
||||
"""Return a given byte encoded value as a string."""
|
||||
"""Return a given byte encoded value as a string"""
|
||||
for charset in self.attempt_charsets:
|
||||
try:
|
||||
value = value.decode(charset)
|
||||
|
@ -574,6 +569,7 @@ class Entity(object):
|
|||
|
||||
|
||||
class Part(Entity):
|
||||
|
||||
"""A MIME part entity, part of a multipart entity."""
|
||||
|
||||
# "The default character set, which must be assumed in the absence of a
|
||||
|
@ -657,8 +653,8 @@ class Part(Entity):
|
|||
def read_lines_to_boundary(self, fp_out=None):
|
||||
"""Read bytes from self.fp and return or write them to a file.
|
||||
|
||||
If the 'fp_out' argument is None (the default), all bytes read
|
||||
are returned in a single byte string.
|
||||
If the 'fp_out' argument is None (the default), all bytes read are
|
||||
returned in a single byte string.
|
||||
|
||||
If the 'fp_out' argument is not None, it must be a file-like
|
||||
object that supports the 'write' method; all bytes read will be
|
||||
|
@ -759,15 +755,15 @@ class SizedReader:
|
|||
def read(self, size=None, fp_out=None):
|
||||
"""Read bytes from the request body and return or write them to a file.
|
||||
|
||||
A number of bytes less than or equal to the 'size' argument are
|
||||
read off the socket. The actual number of bytes read are tracked
|
||||
in self.bytes_read. The number may be smaller than 'size' when
|
||||
1) the client sends fewer bytes, 2) the 'Content-Length' request
|
||||
header specifies fewer bytes than requested, or 3) the number of
|
||||
bytes read exceeds self.maxbytes (in which case, 413 is raised).
|
||||
A number of bytes less than or equal to the 'size' argument are read
|
||||
off the socket. The actual number of bytes read are tracked in
|
||||
self.bytes_read. The number may be smaller than 'size' when 1) the
|
||||
client sends fewer bytes, 2) the 'Content-Length' request header
|
||||
specifies fewer bytes than requested, or 3) the number of bytes read
|
||||
exceeds self.maxbytes (in which case, 413 is raised).
|
||||
|
||||
If the 'fp_out' argument is None (the default), all bytes read
|
||||
are returned in a single byte string.
|
||||
If the 'fp_out' argument is None (the default), all bytes read are
|
||||
returned in a single byte string.
|
||||
|
||||
If the 'fp_out' argument is not None, it must be a file-like
|
||||
object that supports the 'write' method; all bytes read will be
|
||||
|
@ -922,6 +918,7 @@ class SizedReader:
|
|||
|
||||
|
||||
class RequestBody(Entity):
|
||||
|
||||
"""The entity of the HTTP request."""
|
||||
|
||||
bufsize = 8 * 1024
|
||||
|
|
|
@ -16,6 +16,7 @@ from cherrypy.lib import httputil, reprconf, encoding
|
|||
|
||||
|
||||
class Hook(object):
|
||||
|
||||
"""A callback and its metadata: failsafe, priority, and kwargs."""
|
||||
|
||||
callback = None
|
||||
|
@ -29,12 +30,10 @@ class Hook(object):
|
|||
from the same call point raise exceptions."""
|
||||
|
||||
priority = 50
|
||||
"""Defines the order of execution for a list of Hooks.
|
||||
|
||||
Priority numbers should be limited to the closed interval [0, 100],
|
||||
but values outside this range are acceptable, as are fractional
|
||||
values.
|
||||
"""
|
||||
Defines the order of execution for a list of Hooks. Priority numbers
|
||||
should be limited to the closed interval [0, 100], but values outside
|
||||
this range are acceptable, as are fractional values."""
|
||||
|
||||
kwargs = {}
|
||||
"""
|
||||
|
@ -75,6 +74,7 @@ class Hook(object):
|
|||
|
||||
|
||||
class HookMap(dict):
|
||||
|
||||
"""A map of call points to lists of callbacks (Hook objects)."""
|
||||
|
||||
def __new__(cls, points=None):
|
||||
|
@ -190,23 +190,23 @@ hookpoints = ['on_start_resource', 'before_request_body',
|
|||
|
||||
|
||||
class Request(object):
|
||||
|
||||
"""An HTTP request.
|
||||
|
||||
This object represents the metadata of an HTTP request message; that
|
||||
is, it contains attributes which describe the environment in which
|
||||
the request URL, headers, and body were sent (if you want tools to
|
||||
interpret the headers and body, those are elsewhere, mostly in
|
||||
Tools). This 'metadata' consists of socket data, transport
|
||||
characteristics, and the Request-Line. This object also contains
|
||||
data regarding the configuration in effect for the given URL, and
|
||||
the execution plan for generating a response.
|
||||
This object represents the metadata of an HTTP request message;
|
||||
that is, it contains attributes which describe the environment
|
||||
in which the request URL, headers, and body were sent (if you
|
||||
want tools to interpret the headers and body, those are elsewhere,
|
||||
mostly in Tools). This 'metadata' consists of socket data,
|
||||
transport characteristics, and the Request-Line. This object
|
||||
also contains data regarding the configuration in effect for
|
||||
the given URL, and the execution plan for generating a response.
|
||||
"""
|
||||
|
||||
prev = None
|
||||
"""The previous Request object (if any).
|
||||
|
||||
This should be None unless we are processing an InternalRedirect.
|
||||
"""
|
||||
The previous Request object (if any). This should be None
|
||||
unless we are processing an InternalRedirect."""
|
||||
|
||||
# Conversation/connection attributes
|
||||
local = httputil.Host('127.0.0.1', 80)
|
||||
|
@ -216,10 +216,9 @@ class Request(object):
|
|||
'An httputil.Host(ip, port, hostname) object for the client socket.'
|
||||
|
||||
scheme = 'http'
|
||||
"""The protocol used between client and server.
|
||||
|
||||
In most cases, this will be either 'http' or 'https'.
|
||||
"""
|
||||
The protocol used between client and server. In most cases,
|
||||
this will be either 'http' or 'https'."""
|
||||
|
||||
server_protocol = 'HTTP/1.1'
|
||||
"""
|
||||
|
@ -228,30 +227,25 @@ class Request(object):
|
|||
|
||||
base = ''
|
||||
"""The (scheme://host) portion of the requested URL.
|
||||
|
||||
In some cases (e.g. when proxying via mod_rewrite), this may contain
|
||||
path segments which cherrypy.url uses when constructing url's, but
|
||||
which otherwise are ignored by CherryPy. Regardless, this value MUST
|
||||
NOT end in a slash.
|
||||
"""
|
||||
which otherwise are ignored by CherryPy. Regardless, this value
|
||||
MUST NOT end in a slash."""
|
||||
|
||||
# Request-Line attributes
|
||||
request_line = ''
|
||||
"""The complete Request-Line received from the client.
|
||||
|
||||
This is a single string consisting of the request method, URI, and
|
||||
protocol version (joined by spaces). Any final CRLF is removed.
|
||||
"""
|
||||
The complete Request-Line received from the client. This is a
|
||||
single string consisting of the request method, URI, and protocol
|
||||
version (joined by spaces). Any final CRLF is removed."""
|
||||
|
||||
method = 'GET'
|
||||
"""Indicates the HTTP method to be performed on the resource identified by
|
||||
the Request-URI.
|
||||
|
||||
Common methods include GET, HEAD, POST, PUT, and DELETE. CherryPy
|
||||
allows any extension method; however, various HTTP servers and
|
||||
gateways may restrict the set of allowable methods. CherryPy
|
||||
applications SHOULD restrict the set (on a per-URI basis).
|
||||
"""
|
||||
Indicates the HTTP method to be performed on the resource identified
|
||||
by the Request-URI. Common methods include GET, HEAD, POST, PUT, and
|
||||
DELETE. CherryPy allows any extension method; however, various HTTP
|
||||
servers and gateways may restrict the set of allowable methods.
|
||||
CherryPy applications SHOULD restrict the set (on a per-URI basis)."""
|
||||
|
||||
query_string = ''
|
||||
"""
|
||||
|
@ -283,26 +277,22 @@ class Request(object):
|
|||
A dict which combines query string (GET) and request entity (POST)
|
||||
variables. This is populated in two stages: GET params are added
|
||||
before the 'on_start_resource' hook, and POST params are added
|
||||
between the 'before_request_body' and 'before_handler' hooks.
|
||||
"""
|
||||
between the 'before_request_body' and 'before_handler' hooks."""
|
||||
|
||||
# Message attributes
|
||||
header_list = []
|
||||
"""A list of the HTTP request headers as (name, value) tuples.
|
||||
|
||||
In general, you should use request.headers (a dict) instead.
|
||||
"""
|
||||
A list of the HTTP request headers as (name, value) tuples.
|
||||
In general, you should use request.headers (a dict) instead."""
|
||||
|
||||
headers = httputil.HeaderMap()
|
||||
"""A dict-like object containing the request headers.
|
||||
|
||||
Keys are header
|
||||
"""
|
||||
A dict-like object containing the request headers. Keys are header
|
||||
names (in Title-Case format); however, you may get and set them in
|
||||
a case-insensitive manner. That is, headers['Content-Type'] and
|
||||
headers['content-type'] refer to the same value. Values are header
|
||||
values (decoded according to :rfc:`2047` if necessary). See also:
|
||||
httputil.HeaderMap, httputil.HeaderElement.
|
||||
"""
|
||||
httputil.HeaderMap, httputil.HeaderElement."""
|
||||
|
||||
cookie = SimpleCookie()
|
||||
"""See help(Cookie)."""
|
||||
|
@ -346,8 +336,7 @@ class Request(object):
|
|||
or multipart, this will be None. Otherwise, this will be an instance
|
||||
of :class:`RequestBody<cherrypy._cpreqbody.RequestBody>` (which you
|
||||
can .read()); this value is set between the 'before_request_body' and
|
||||
'before_handler' hooks (assuming that process_request_body is True).
|
||||
"""
|
||||
'before_handler' hooks (assuming that process_request_body is True)."""
|
||||
|
||||
# Dispatch attributes
|
||||
dispatch = cherrypy.dispatch.Dispatcher()
|
||||
|
@ -358,24 +347,23 @@ class Request(object):
|
|||
calls the dispatcher as early as possible, passing it a 'path_info'
|
||||
argument.
|
||||
|
||||
The default dispatcher discovers the page handler by matching
|
||||
path_info to a hierarchical arrangement of objects, starting at
|
||||
request.app.root. See help(cherrypy.dispatch) for more information.
|
||||
"""
|
||||
The default dispatcher discovers the page handler by matching path_info
|
||||
to a hierarchical arrangement of objects, starting at request.app.root.
|
||||
See help(cherrypy.dispatch) for more information."""
|
||||
|
||||
script_name = ''
|
||||
"""The 'mount point' of the application which is handling this request.
|
||||
"""
|
||||
The 'mount point' of the application which is handling this request.
|
||||
|
||||
This attribute MUST NOT end in a slash. If the script_name refers to
|
||||
the root of the URI, it MUST be an empty string (not "/").
|
||||
"""
|
||||
|
||||
path_info = '/'
|
||||
"""The 'relative path' portion of the Request-URI.
|
||||
|
||||
This is relative to the script_name ('mount point') of the
|
||||
application which is handling this request.
|
||||
"""
|
||||
The 'relative path' portion of the Request-URI. This is relative
|
||||
to the script_name ('mount point') of the application which is
|
||||
handling this request."""
|
||||
|
||||
login = None
|
||||
"""
|
||||
|
@ -403,16 +391,14 @@ class Request(object):
|
|||
of the form: {Toolbox.namespace: {Tool.name: config dict}}."""
|
||||
|
||||
config = None
|
||||
"""A flat dict of all configuration entries which apply to the current
|
||||
request.
|
||||
|
||||
These entries are collected from global config, application config
|
||||
(based on request.path_info), and from handler config (exactly how
|
||||
is governed by the request.dispatch object in effect for this
|
||||
request; by default, handler config can be attached anywhere in the
|
||||
tree between request.app.root and the final handler, and inherits
|
||||
downward).
|
||||
"""
|
||||
A flat dict of all configuration entries which apply to the
|
||||
current request. These entries are collected from global config,
|
||||
application config (based on request.path_info), and from handler
|
||||
config (exactly how is governed by the request.dispatch object in
|
||||
effect for this request; by default, handler config can be attached
|
||||
anywhere in the tree between request.app.root and the final handler,
|
||||
and inherits downward)."""
|
||||
|
||||
is_index = None
|
||||
"""
|
||||
|
@ -423,14 +409,13 @@ class Request(object):
|
|||
the trailing slash. See cherrypy.tools.trailing_slash."""
|
||||
|
||||
hooks = HookMap(hookpoints)
|
||||
"""A HookMap (dict-like object) of the form: {hookpoint: [hook, ...]}.
|
||||
|
||||
"""
|
||||
A HookMap (dict-like object) of the form: {hookpoint: [hook, ...]}.
|
||||
Each key is a str naming the hook point, and each value is a list
|
||||
of hooks which will be called at that hook point during this request.
|
||||
The list of hooks is generally populated as early as possible (mostly
|
||||
from Tools specified in config), but may be extended at any time.
|
||||
See also: _cprequest.Hook, _cprequest.HookMap, and cherrypy.tools.
|
||||
"""
|
||||
See also: _cprequest.Hook, _cprequest.HookMap, and cherrypy.tools."""
|
||||
|
||||
error_response = cherrypy.HTTPError(500).set_response
|
||||
"""
|
||||
|
@ -443,11 +428,12 @@ class Request(object):
|
|||
error response to the user-agent."""
|
||||
|
||||
error_page = {}
|
||||
"""A dict of {error code: response filename or callable} pairs.
|
||||
"""
|
||||
A dict of {error code: response filename or callable} pairs.
|
||||
|
||||
The error code must be an int representing a given HTTP error code,
|
||||
or the string 'default', which will be used if no matching entry is
|
||||
found for a given numeric code.
|
||||
or the string 'default', which will be used if no matching entry
|
||||
is found for a given numeric code.
|
||||
|
||||
If a filename is provided, the file should contain a Python string-
|
||||
formatting template, and can expect by default to receive format
|
||||
|
@ -461,8 +447,8 @@ class Request(object):
|
|||
iterable of strings which will be set to response.body. It may also
|
||||
override headers or perform any other processing.
|
||||
|
||||
If no entry is given for an error code, and no 'default' entry
|
||||
exists, a default template will be used.
|
||||
If no entry is given for an error code, and no 'default' entry exists,
|
||||
a default template will be used.
|
||||
"""
|
||||
|
||||
show_tracebacks = True
|
||||
|
@ -487,10 +473,9 @@ class Request(object):
|
|||
"""True once the close method has been called, False otherwise."""
|
||||
|
||||
stage = None
|
||||
"""A string containing the stage reached in the request-handling process.
|
||||
|
||||
This is useful when debugging a live server with hung requests.
|
||||
"""
|
||||
A string containing the stage reached in the request-handling process.
|
||||
This is useful when debugging a live server with hung requests."""
|
||||
|
||||
unique_id = None
|
||||
"""A lazy object generating and memorizing UUID4 on ``str()`` render."""
|
||||
|
@ -507,10 +492,9 @@ class Request(object):
|
|||
server_protocol='HTTP/1.1'):
|
||||
"""Populate a new Request object.
|
||||
|
||||
local_host should be an httputil.Host object with the server
|
||||
info. remote_host should be an httputil.Host object with the
|
||||
client info. scheme should be a string, either "http" or
|
||||
"https".
|
||||
local_host should be an httputil.Host object with the server info.
|
||||
remote_host should be an httputil.Host object with the client info.
|
||||
scheme should be a string, either "http" or "https".
|
||||
"""
|
||||
self.local = local_host
|
||||
self.remote = remote_host
|
||||
|
@ -530,10 +514,7 @@ class Request(object):
|
|||
self.unique_id = LazyUUID4()
|
||||
|
||||
def close(self):
|
||||
"""Run cleanup code.
|
||||
|
||||
(Core)
|
||||
"""
|
||||
"""Run cleanup code. (Core)"""
|
||||
if not self.closed:
|
||||
self.closed = True
|
||||
self.stage = 'on_end_request'
|
||||
|
@ -570,6 +551,7 @@ class Request(object):
|
|||
|
||||
Consumer code (HTTP servers) should then access these response
|
||||
attributes to build the outbound stream.
|
||||
|
||||
"""
|
||||
response = cherrypy.serving.response
|
||||
self.stage = 'run'
|
||||
|
@ -649,10 +631,7 @@ class Request(object):
|
|||
return response
|
||||
|
||||
def respond(self, path_info):
|
||||
"""Generate a response for the resource at self.path_info.
|
||||
|
||||
(Core)
|
||||
"""
|
||||
"""Generate a response for the resource at self.path_info. (Core)"""
|
||||
try:
|
||||
try:
|
||||
try:
|
||||
|
@ -723,10 +702,7 @@ class Request(object):
|
|||
response.finalize()
|
||||
|
||||
def process_query_string(self):
|
||||
"""Parse the query string into Python structures.
|
||||
|
||||
(Core)
|
||||
"""
|
||||
"""Parse the query string into Python structures. (Core)"""
|
||||
try:
|
||||
p = httputil.parse_query_string(
|
||||
self.query_string, encoding=self.query_string_encoding)
|
||||
|
@ -739,10 +715,7 @@ class Request(object):
|
|||
self.params.update(p)
|
||||
|
||||
def process_headers(self):
|
||||
"""Parse HTTP header data into Python structures.
|
||||
|
||||
(Core)
|
||||
"""
|
||||
"""Parse HTTP header data into Python structures. (Core)"""
|
||||
# Process the headers into self.headers
|
||||
headers = self.headers
|
||||
for name, value in self.header_list:
|
||||
|
@ -778,10 +751,7 @@ class Request(object):
|
|||
self.base = '%s://%s' % (self.scheme, host)
|
||||
|
||||
def get_resource(self, path):
|
||||
"""Call a dispatcher (which sets self.handler and .config).
|
||||
|
||||
(Core)
|
||||
"""
|
||||
"""Call a dispatcher (which sets self.handler and .config). (Core)"""
|
||||
# First, see if there is a custom dispatch at this URI. Custom
|
||||
# dispatchers can only be specified in app.config, not in _cp_config
|
||||
# (since custom dispatchers may not even have an app.root).
|
||||
|
@ -792,10 +762,7 @@ class Request(object):
|
|||
dispatch(path)
|
||||
|
||||
def handle_error(self):
|
||||
"""Handle the last unanticipated exception.
|
||||
|
||||
(Core)
|
||||
"""
|
||||
"""Handle the last unanticipated exception. (Core)"""
|
||||
try:
|
||||
self.hooks.run('before_error_response')
|
||||
if self.error_response:
|
||||
|
@ -809,6 +776,7 @@ class Request(object):
|
|||
|
||||
|
||||
class ResponseBody(object):
|
||||
|
||||
"""The body of the HTTP response (the response entity)."""
|
||||
|
||||
unicode_err = ('Page handlers MUST return bytes. Use tools.encode '
|
||||
|
@ -834,18 +802,18 @@ class ResponseBody(object):
|
|||
|
||||
|
||||
class Response(object):
|
||||
|
||||
"""An HTTP Response, including status, headers, and body."""
|
||||
|
||||
status = ''
|
||||
"""The HTTP Status-Code and Reason-Phrase."""
|
||||
|
||||
header_list = []
|
||||
"""A list of the HTTP response headers as (name, value) tuples.
|
||||
|
||||
"""
|
||||
A list of the HTTP response headers as (name, value) tuples.
|
||||
In general, you should use response.headers (a dict) instead. This
|
||||
attribute is generated from response.headers and is not valid until
|
||||
after the finalize phase.
|
||||
"""
|
||||
after the finalize phase."""
|
||||
|
||||
headers = httputil.HeaderMap()
|
||||
"""
|
||||
|
@ -865,10 +833,7 @@ class Response(object):
|
|||
"""The body (entity) of the HTTP response."""
|
||||
|
||||
time = None
|
||||
"""The value of time.time() when created.
|
||||
|
||||
Use in HTTP dates.
|
||||
"""
|
||||
"""The value of time.time() when created. Use in HTTP dates."""
|
||||
|
||||
stream = False
|
||||
"""If False, buffer the response body."""
|
||||
|
@ -896,15 +861,15 @@ class Response(object):
|
|||
return new_body
|
||||
|
||||
def _flush_body(self):
|
||||
"""Discard self.body but consume any generator such that any
|
||||
finalization can occur, such as is required by caching.tee_output()."""
|
||||
"""
|
||||
Discard self.body but consume any generator such that
|
||||
any finalization can occur, such as is required by
|
||||
caching.tee_output().
|
||||
"""
|
||||
consume(iter(self.body))
|
||||
|
||||
def finalize(self):
|
||||
"""Transform headers (and cookies) into self.header_list.
|
||||
|
||||
(Core)
|
||||
"""
|
||||
"""Transform headers (and cookies) into self.header_list. (Core)"""
|
||||
try:
|
||||
code, reason, _ = httputil.valid_status(self.status)
|
||||
except ValueError:
|
||||
|
|
|
@ -50,8 +50,7 @@ class Server(ServerAdapter):
|
|||
"""If given, the name of the UNIX socket to use instead of TCP/IP.
|
||||
|
||||
When this option is not None, the `socket_host` and `socket_port` options
|
||||
are ignored.
|
||||
"""
|
||||
are ignored."""
|
||||
|
||||
socket_queue_size = 5
|
||||
"""The 'backlog' argument to socket.listen(); specifies the maximum number
|
||||
|
@ -80,24 +79,17 @@ class Server(ServerAdapter):
|
|||
"""The number of worker threads to start up in the pool."""
|
||||
|
||||
thread_pool_max = -1
|
||||
"""The maximum size of the worker-thread pool.
|
||||
|
||||
Use -1 to indicate no limit.
|
||||
"""The maximum size of the worker-thread pool. Use -1 to indicate no limit.
|
||||
"""
|
||||
|
||||
max_request_header_size = 500 * 1024
|
||||
"""The maximum number of bytes allowable in the request headers.
|
||||
|
||||
If exceeded, the HTTP server should return "413 Request Entity Too
|
||||
Large".
|
||||
If exceeded, the HTTP server should return "413 Request Entity Too Large".
|
||||
"""
|
||||
|
||||
max_request_body_size = 100 * 1024 * 1024
|
||||
"""The maximum number of bytes allowable in the request body.
|
||||
|
||||
If exceeded, the HTTP server should return "413 Request Entity Too
|
||||
Large".
|
||||
"""
|
||||
"""The maximum number of bytes allowable in the request body. If exceeded,
|
||||
the HTTP server should return "413 Request Entity Too Large"."""
|
||||
|
||||
instance = None
|
||||
"""If not None, this should be an HTTP server instance (such as
|
||||
|
@ -127,8 +119,7 @@ class Server(ServerAdapter):
|
|||
the builtin WSGI server. Builtin options are: 'builtin' (to
|
||||
use the SSL library built into recent versions of Python).
|
||||
You may also register your own classes in the
|
||||
cheroot.server.ssl_adapters dict.
|
||||
"""
|
||||
cheroot.server.ssl_adapters dict."""
|
||||
|
||||
statistics = False
|
||||
"""Turns statistics-gathering on or off for aware HTTP servers."""
|
||||
|
@ -138,13 +129,11 @@ class Server(ServerAdapter):
|
|||
|
||||
wsgi_version = (1, 0)
|
||||
"""The WSGI version tuple to use with the builtin WSGI server.
|
||||
|
||||
The provided options are (1, 0) [which includes support for PEP
|
||||
3333, which declares it covers WSGI version 1.0.1 but still mandates
|
||||
the wsgi.version (1, 0)] and ('u', 0), an experimental unicode
|
||||
version. You may create and register your own experimental versions
|
||||
of the WSGI protocol by adding custom classes to the
|
||||
cheroot.server.wsgi_gateways dict.
|
||||
The provided options are (1, 0) [which includes support for PEP 3333,
|
||||
which declares it covers WSGI version 1.0.1 but still mandates the
|
||||
wsgi.version (1, 0)] and ('u', 0), an experimental unicode version.
|
||||
You may create and register your own experimental versions of the WSGI
|
||||
protocol by adding custom classes to the cheroot.server.wsgi_gateways dict.
|
||||
"""
|
||||
|
||||
peercreds = False
|
||||
|
@ -195,8 +184,7 @@ class Server(ServerAdapter):
|
|||
def bind_addr(self):
|
||||
"""Return bind address.
|
||||
|
||||
A (host, port) tuple for TCP sockets or a str for Unix domain
|
||||
sockets.
|
||||
A (host, port) tuple for TCP sockets or a str for Unix domain sockts.
|
||||
"""
|
||||
if self.socket_file:
|
||||
return self.socket_file
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
"""CherryPy tools. A "tool" is any helper, adapted to CP.
|
||||
|
||||
Tools are usually designed to be used in a variety of ways (although
|
||||
some may only offer one if they choose):
|
||||
Tools are usually designed to be used in a variety of ways (although some
|
||||
may only offer one if they choose):
|
||||
|
||||
Library calls
|
||||
All tools are callables that can be used wherever needed.
|
||||
|
@ -48,10 +48,10 @@ _attr_error = (
|
|||
|
||||
|
||||
class Tool(object):
|
||||
|
||||
"""A registered function for use with CherryPy request-processing hooks.
|
||||
|
||||
help(tool.callable) should give you more information about this
|
||||
Tool.
|
||||
help(tool.callable) should give you more information about this Tool.
|
||||
"""
|
||||
|
||||
namespace = 'tools'
|
||||
|
@ -135,8 +135,8 @@ class Tool(object):
|
|||
def _setup(self):
|
||||
"""Hook this tool into cherrypy.request.
|
||||
|
||||
The standard CherryPy request object will automatically call
|
||||
this method when the tool is "turned on" in config.
|
||||
The standard CherryPy request object will automatically call this
|
||||
method when the tool is "turned on" in config.
|
||||
"""
|
||||
conf = self._merged_args()
|
||||
p = conf.pop('priority', None)
|
||||
|
@ -147,15 +147,15 @@ class Tool(object):
|
|||
|
||||
|
||||
class HandlerTool(Tool):
|
||||
|
||||
"""Tool which is called 'before main', that may skip normal handlers.
|
||||
|
||||
If the tool successfully handles the request (by setting
|
||||
response.body), if should return True. This will cause CherryPy to
|
||||
skip any 'normal' page handler. If the tool did not handle the
|
||||
request, it should return False to tell CherryPy to continue on and
|
||||
call the normal page handler. If the tool is declared AS a page
|
||||
handler (see the 'handler' method), returning False will raise
|
||||
NotFound.
|
||||
If the tool successfully handles the request (by setting response.body),
|
||||
if should return True. This will cause CherryPy to skip any 'normal' page
|
||||
handler. If the tool did not handle the request, it should return False
|
||||
to tell CherryPy to continue on and call the normal page handler. If the
|
||||
tool is declared AS a page handler (see the 'handler' method), returning
|
||||
False will raise NotFound.
|
||||
"""
|
||||
|
||||
def __init__(self, callable, name=None):
|
||||
|
@ -185,8 +185,8 @@ class HandlerTool(Tool):
|
|||
def _setup(self):
|
||||
"""Hook this tool into cherrypy.request.
|
||||
|
||||
The standard CherryPy request object will automatically call
|
||||
this method when the tool is "turned on" in config.
|
||||
The standard CherryPy request object will automatically call this
|
||||
method when the tool is "turned on" in config.
|
||||
"""
|
||||
conf = self._merged_args()
|
||||
p = conf.pop('priority', None)
|
||||
|
@ -197,6 +197,7 @@ class HandlerTool(Tool):
|
|||
|
||||
|
||||
class HandlerWrapperTool(Tool):
|
||||
|
||||
"""Tool which wraps request.handler in a provided wrapper function.
|
||||
|
||||
The 'newhandler' arg must be a handler wrapper function that takes a
|
||||
|
@ -231,6 +232,7 @@ class HandlerWrapperTool(Tool):
|
|||
|
||||
|
||||
class ErrorTool(Tool):
|
||||
|
||||
"""Tool which is used to replace the default request.error_response."""
|
||||
|
||||
def __init__(self, callable, name=None):
|
||||
|
@ -242,8 +244,8 @@ class ErrorTool(Tool):
|
|||
def _setup(self):
|
||||
"""Hook this tool into cherrypy.request.
|
||||
|
||||
The standard CherryPy request object will automatically call
|
||||
this method when the tool is "turned on" in config.
|
||||
The standard CherryPy request object will automatically call this
|
||||
method when the tool is "turned on" in config.
|
||||
"""
|
||||
cherrypy.serving.request.error_response = self._wrapper
|
||||
|
||||
|
@ -252,6 +254,7 @@ class ErrorTool(Tool):
|
|||
|
||||
|
||||
class SessionTool(Tool):
|
||||
|
||||
"""Session Tool for CherryPy.
|
||||
|
||||
sessions.locking
|
||||
|
@ -279,8 +282,8 @@ class SessionTool(Tool):
|
|||
def _setup(self):
|
||||
"""Hook this tool into cherrypy.request.
|
||||
|
||||
The standard CherryPy request object will automatically call
|
||||
this method when the tool is "turned on" in config.
|
||||
The standard CherryPy request object will automatically call this
|
||||
method when the tool is "turned on" in config.
|
||||
"""
|
||||
hooks = cherrypy.serving.request.hooks
|
||||
|
||||
|
@ -322,6 +325,7 @@ class SessionTool(Tool):
|
|||
|
||||
|
||||
class XMLRPCController(object):
|
||||
|
||||
"""A Controller (page handler collection) for XML-RPC.
|
||||
|
||||
To use it, have your controllers subclass this base class (it will
|
||||
|
@ -388,6 +392,7 @@ class SessionAuthTool(HandlerTool):
|
|||
|
||||
|
||||
class CachingTool(Tool):
|
||||
|
||||
"""Caching Tool for CherryPy."""
|
||||
|
||||
def _wrapper(self, **kwargs):
|
||||
|
@ -411,11 +416,11 @@ class CachingTool(Tool):
|
|||
|
||||
|
||||
class Toolbox(object):
|
||||
|
||||
"""A collection of Tools.
|
||||
|
||||
This object also functions as a config namespace handler for itself.
|
||||
Custom toolboxes should be added to each Application's toolboxes
|
||||
dict.
|
||||
Custom toolboxes should be added to each Application's toolboxes dict.
|
||||
"""
|
||||
|
||||
def __init__(self, namespace):
|
||||
|
|
|
@ -10,22 +10,19 @@ from cherrypy.lib import httputil, reprconf
|
|||
class Application(object):
|
||||
"""A CherryPy Application.
|
||||
|
||||
Servers and gateways should not instantiate Request objects
|
||||
directly. Instead, they should ask an Application object for a
|
||||
request object.
|
||||
Servers and gateways should not instantiate Request objects directly.
|
||||
Instead, they should ask an Application object for a request object.
|
||||
|
||||
An instance of this class may also be used as a WSGI callable (WSGI
|
||||
application object) for itself.
|
||||
An instance of this class may also be used as a WSGI callable
|
||||
(WSGI application object) for itself.
|
||||
"""
|
||||
|
||||
root = None
|
||||
"""The top-most container of page handlers for this app.
|
||||
|
||||
Handlers should be arranged in a hierarchy of attributes, matching
|
||||
the expected URI hierarchy; the default dispatcher then searches
|
||||
this hierarchy for a matching handler. When using a dispatcher other
|
||||
than the default, this value may be None.
|
||||
"""
|
||||
"""The top-most container of page handlers for this app. Handlers should
|
||||
be arranged in a hierarchy of attributes, matching the expected URI
|
||||
hierarchy; the default dispatcher then searches this hierarchy for a
|
||||
matching handler. When using a dispatcher other than the default,
|
||||
this value may be None."""
|
||||
|
||||
config = {}
|
||||
"""A dict of {path: pathconf} pairs, where 'pathconf' is itself a dict
|
||||
|
@ -35,16 +32,10 @@ class Application(object):
|
|||
toolboxes = {'tools': cherrypy.tools}
|
||||
|
||||
log = None
|
||||
"""A LogManager instance.
|
||||
|
||||
See _cplogging.
|
||||
"""
|
||||
"""A LogManager instance. See _cplogging."""
|
||||
|
||||
wsgiapp = None
|
||||
"""A CPWSGIApp instance.
|
||||
|
||||
See _cpwsgi.
|
||||
"""
|
||||
"""A CPWSGIApp instance. See _cpwsgi."""
|
||||
|
||||
request_class = _cprequest.Request
|
||||
response_class = _cprequest.Response
|
||||
|
@ -91,15 +82,12 @@ class Application(object):
|
|||
def script_name(self): # noqa: D401; irrelevant for properties
|
||||
"""The URI "mount point" for this app.
|
||||
|
||||
A mount point is that portion of the URI which is constant for
|
||||
all URIs that are serviced by this application; it does not
|
||||
include scheme, host, or proxy ("virtual host") portions of the
|
||||
URI.
|
||||
A mount point is that portion of the URI which is constant for all URIs
|
||||
that are serviced by this application; it does not include scheme,
|
||||
host, or proxy ("virtual host") portions of the URI.
|
||||
|
||||
For example, if script_name is "/my/cool/app", then the URL "
|
||||
|
||||
http://www.example.com/my/cool/app/page1"
|
||||
might be handled by a
|
||||
For example, if script_name is "/my/cool/app", then the URL
|
||||
"http://www.example.com/my/cool/app/page1" might be handled by a
|
||||
"page1" method on the root object.
|
||||
|
||||
The value of script_name MUST NOT end in a slash. If the script_name
|
||||
|
@ -183,9 +171,9 @@ class Application(object):
|
|||
class Tree(object):
|
||||
"""A registry of CherryPy applications, mounted at diverse points.
|
||||
|
||||
An instance of this class may also be used as a WSGI callable (WSGI
|
||||
application object), in which case it dispatches to all mounted
|
||||
apps.
|
||||
An instance of this class may also be used as a WSGI callable
|
||||
(WSGI application object), in which case it dispatches to all
|
||||
mounted apps.
|
||||
"""
|
||||
|
||||
apps = {}
|
||||
|
|
|
@ -1,10 +1,10 @@
|
|||
"""WSGI interface (see PEP 333 and 3333).
|
||||
|
||||
Note that WSGI environ keys and values are 'native strings'; that is,
|
||||
whatever the type of "" is. For Python 2, that's a byte string; for
|
||||
Python 3, it's a unicode string. But PEP 3333 says: "even if Python's
|
||||
str type is actually Unicode "under the hood", the content of native
|
||||
strings must still be translatable to bytes via the Latin-1 encoding!"
|
||||
whatever the type of "" is. For Python 2, that's a byte string; for Python 3,
|
||||
it's a unicode string. But PEP 3333 says: "even if Python's str type is
|
||||
actually Unicode "under the hood", the content of native strings must
|
||||
still be translatable to bytes via the Latin-1 encoding!"
|
||||
"""
|
||||
|
||||
import sys as _sys
|
||||
|
@ -34,6 +34,7 @@ def downgrade_wsgi_ux_to_1x(environ):
|
|||
|
||||
|
||||
class VirtualHost(object):
|
||||
|
||||
"""Select a different WSGI application based on the Host header.
|
||||
|
||||
This can be useful when running multiple sites within one CP server.
|
||||
|
@ -55,10 +56,7 @@ class VirtualHost(object):
|
|||
cherrypy.tree.graft(vhost)
|
||||
"""
|
||||
default = None
|
||||
"""Required.
|
||||
|
||||
The default WSGI application.
|
||||
"""
|
||||
"""Required. The default WSGI application."""
|
||||
|
||||
use_x_forwarded_host = True
|
||||
"""If True (the default), any "X-Forwarded-Host"
|
||||
|
@ -67,12 +65,11 @@ class VirtualHost(object):
|
|||
|
||||
domains = {}
|
||||
"""A dict of {host header value: application} pairs.
|
||||
|
||||
The incoming "Host" request header is looked up in this dict, and,
|
||||
if a match is found, the corresponding WSGI application will be
|
||||
called instead of the default. Note that you often need separate
|
||||
entries for "example.com" and "www.example.com". In addition, "Host"
|
||||
headers may contain the port number.
|
||||
The incoming "Host" request header is looked up in this dict,
|
||||
and, if a match is found, the corresponding WSGI application
|
||||
will be called instead of the default. Note that you often need
|
||||
separate entries for "example.com" and "www.example.com".
|
||||
In addition, "Host" headers may contain the port number.
|
||||
"""
|
||||
|
||||
def __init__(self, default, domains=None, use_x_forwarded_host=True):
|
||||
|
@ -92,6 +89,7 @@ class VirtualHost(object):
|
|||
|
||||
|
||||
class InternalRedirector(object):
|
||||
|
||||
"""WSGI middleware that handles raised cherrypy.InternalRedirect."""
|
||||
|
||||
def __init__(self, nextapp, recursive=False):
|
||||
|
@ -139,6 +137,7 @@ class InternalRedirector(object):
|
|||
|
||||
|
||||
class ExceptionTrapper(object):
|
||||
|
||||
"""WSGI middleware that traps exceptions."""
|
||||
|
||||
def __init__(self, nextapp, throws=(KeyboardInterrupt, SystemExit)):
|
||||
|
@ -227,6 +226,7 @@ class _TrappedResponse(object):
|
|||
|
||||
|
||||
class AppResponse(object):
|
||||
|
||||
"""WSGI response iterable for CherryPy applications."""
|
||||
|
||||
def __init__(self, environ, start_response, cpapp):
|
||||
|
@ -277,10 +277,7 @@ class AppResponse(object):
|
|||
return next(self.iter_response)
|
||||
|
||||
def close(self):
|
||||
"""Close and de-reference the current request and response.
|
||||
|
||||
(Core)
|
||||
"""
|
||||
"""Close and de-reference the current request and response. (Core)"""
|
||||
streaming = _cherrypy.serving.response.stream
|
||||
self.cpapp.release_serving()
|
||||
|
||||
|
@ -383,20 +380,18 @@ class AppResponse(object):
|
|||
|
||||
|
||||
class CPWSGIApp(object):
|
||||
|
||||
"""A WSGI application object for a CherryPy Application."""
|
||||
|
||||
pipeline = [
|
||||
('ExceptionTrapper', ExceptionTrapper),
|
||||
('InternalRedirector', InternalRedirector),
|
||||
]
|
||||
"""A list of (name, wsgiapp) pairs.
|
||||
|
||||
Each 'wsgiapp' MUST be a constructor that takes an initial,
|
||||
positional 'nextapp' argument, plus optional keyword arguments, and
|
||||
returns a WSGI application (that takes environ and start_response
|
||||
arguments). The 'name' can be any you choose, and will correspond to
|
||||
keys in self.config.
|
||||
"""
|
||||
"""A list of (name, wsgiapp) pairs. Each 'wsgiapp' MUST be a
|
||||
constructor that takes an initial, positional 'nextapp' argument,
|
||||
plus optional keyword arguments, and returns a WSGI application
|
||||
(that takes environ and start_response arguments). The 'name' can
|
||||
be any you choose, and will correspond to keys in self.config."""
|
||||
|
||||
head = None
|
||||
"""Rather than nest all apps in the pipeline on each call, it's only
|
||||
|
@ -404,12 +399,9 @@ class CPWSGIApp(object):
|
|||
this to None again if you change self.pipeline after calling self."""
|
||||
|
||||
config = {}
|
||||
"""A dict whose keys match names listed in the pipeline.
|
||||
|
||||
Each value is a further dict which will be passed to the
|
||||
corresponding named WSGI callable (from the pipeline) as keyword
|
||||
arguments.
|
||||
"""
|
||||
"""A dict whose keys match names listed in the pipeline. Each
|
||||
value is a further dict which will be passed to the corresponding
|
||||
named WSGI callable (from the pipeline) as keyword arguments."""
|
||||
|
||||
response_class = AppResponse
|
||||
"""The class to instantiate and return as the next app in the WSGI chain.
|
||||
|
@ -425,8 +417,8 @@ class CPWSGIApp(object):
|
|||
def tail(self, environ, start_response):
|
||||
"""WSGI application callable for the actual CherryPy application.
|
||||
|
||||
You probably shouldn't call this; call self.__call__ instead, so
|
||||
that any WSGI middleware in self.pipeline can run first.
|
||||
You probably shouldn't call this; call self.__call__ instead,
|
||||
so that any WSGI middleware in self.pipeline can run first.
|
||||
"""
|
||||
return self.response_class(environ, start_response, self.cpapp)
|
||||
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
"""WSGI server interface (see PEP 333).
|
||||
"""
|
||||
WSGI server interface (see PEP 333).
|
||||
|
||||
This adds some CP-specific bits to the framework-agnostic cheroot
|
||||
package.
|
||||
This adds some CP-specific bits to the framework-agnostic cheroot package.
|
||||
"""
|
||||
import sys
|
||||
|
||||
|
@ -35,11 +35,10 @@ class CPWSGIHTTPRequest(cheroot.server.HTTPRequest):
|
|||
class CPWSGIServer(cheroot.wsgi.Server):
|
||||
"""Wrapper for cheroot.wsgi.Server.
|
||||
|
||||
cheroot has been designed to not reference CherryPy in any way, so
|
||||
that it can be used in other frameworks and applications. Therefore,
|
||||
we wrap it here, so we can set our own mount points from
|
||||
cherrypy.tree and apply some attributes from config ->
|
||||
cherrypy.server -> wsgi.Server.
|
||||
cheroot has been designed to not reference CherryPy in any way,
|
||||
so that it can be used in other frameworks and applications. Therefore,
|
||||
we wrap it here, so we can set our own mount points from cherrypy.tree
|
||||
and apply some attributes from config -> cherrypy.server -> wsgi.Server.
|
||||
"""
|
||||
|
||||
fmt = 'CherryPy/{cherrypy.__version__} {cheroot.wsgi.Server.version}'
|
||||
|
|
|
@ -137,6 +137,7 @@ def popargs(*args, **kwargs):
|
|||
class Root:
|
||||
def index(self):
|
||||
#...
|
||||
|
||||
"""
|
||||
# Since keyword arg comes after *args, we have to process it ourselves
|
||||
# for lower versions of python.
|
||||
|
@ -200,17 +201,16 @@ def url(path='', qs='', script_name=None, base=None, relative=None):
|
|||
If it does not start with a slash, this returns
|
||||
(base + script_name [+ request.path_info] + path + qs).
|
||||
|
||||
If script_name is None, cherrypy.request will be used to find a
|
||||
script_name, if available.
|
||||
If script_name is None, cherrypy.request will be used
|
||||
to find a script_name, if available.
|
||||
|
||||
If base is None, cherrypy.request.base will be used (if available).
|
||||
Note that you can use cherrypy.tools.proxy to change this.
|
||||
|
||||
Finally, note that this function can be used to obtain an absolute
|
||||
URL for the current request path (minus the querystring) by passing
|
||||
no args. If you call url(qs=cherrypy.request.query_string), you
|
||||
should get the original browser URL (assuming no internal
|
||||
redirections).
|
||||
Finally, note that this function can be used to obtain an absolute URL
|
||||
for the current request path (minus the querystring) by passing no args.
|
||||
If you call url(qs=cherrypy.request.query_string), you should get the
|
||||
original browser URL (assuming no internal redirections).
|
||||
|
||||
If relative is None or not provided, request.app.relative_urls will
|
||||
be used (if available, else False). If False, the output will be an
|
||||
|
@ -320,8 +320,8 @@ def normalize_path(path):
|
|||
class _ClassPropertyDescriptor(object):
|
||||
"""Descript for read-only class-based property.
|
||||
|
||||
Turns a classmethod-decorated func into a read-only property of that
|
||||
class type (means the value cannot be set).
|
||||
Turns a classmethod-decorated func into a read-only property of that class
|
||||
type (means the value cannot be set).
|
||||
"""
|
||||
|
||||
def __init__(self, fget, fset=None):
|
||||
|
|
|
@ -1,4 +1,5 @@
|
|||
"""JSON support.
|
||||
"""
|
||||
JSON support.
|
||||
|
||||
Expose preferred json module as json and provide encode/decode
|
||||
convenience functions.
|
||||
|
|
|
@ -6,8 +6,8 @@ def is_iterator(obj):
|
|||
|
||||
(i.e. like a generator).
|
||||
|
||||
This will return False for objects which are iterable, but not
|
||||
iterators themselves.
|
||||
This will return False for objects which are iterable,
|
||||
but not iterators themselves.
|
||||
"""
|
||||
from types import GeneratorType
|
||||
if isinstance(obj, GeneratorType):
|
||||
|
|
|
@ -18,6 +18,7 @@ as the credentials store::
|
|||
'tools.auth_basic.accept_charset': 'UTF-8',
|
||||
}
|
||||
app_config = { '/' : basic_auth }
|
||||
|
||||
"""
|
||||
|
||||
import binascii
|
||||
|
|
|
@ -55,7 +55,7 @@ def TRACE(msg):
|
|||
|
||||
|
||||
def get_ha1_dict_plain(user_password_dict):
|
||||
"""Return a get_ha1 function which obtains a plaintext password from a
|
||||
"""Returns a get_ha1 function which obtains a plaintext password from a
|
||||
dictionary of the form: {username : password}.
|
||||
|
||||
If you want a simple dictionary-based authentication scheme, with plaintext
|
||||
|
@ -72,7 +72,7 @@ def get_ha1_dict_plain(user_password_dict):
|
|||
|
||||
|
||||
def get_ha1_dict(user_ha1_dict):
|
||||
"""Return a get_ha1 function which obtains a HA1 password hash from a
|
||||
"""Returns a get_ha1 function which obtains a HA1 password hash from a
|
||||
dictionary of the form: {username : HA1}.
|
||||
|
||||
If you want a dictionary-based authentication scheme, but with
|
||||
|
@ -87,7 +87,7 @@ def get_ha1_dict(user_ha1_dict):
|
|||
|
||||
|
||||
def get_ha1_file_htdigest(filename):
|
||||
"""Return a get_ha1 function which obtains a HA1 password hash from a
|
||||
"""Returns a get_ha1 function which obtains a HA1 password hash from a
|
||||
flat file with lines of the same format as that produced by the Apache
|
||||
htdigest utility. For example, for realm 'wonderland', username 'alice',
|
||||
and password '4x5istwelve', the htdigest line would be::
|
||||
|
@ -135,7 +135,7 @@ def synthesize_nonce(s, key, timestamp=None):
|
|||
|
||||
|
||||
def H(s):
|
||||
"""The hash function H."""
|
||||
"""The hash function H"""
|
||||
return md5_hex(s)
|
||||
|
||||
|
||||
|
@ -259,11 +259,10 @@ class HttpDigestAuthorization(object):
|
|||
return False
|
||||
|
||||
def is_nonce_stale(self, max_age_seconds=600):
|
||||
"""Return True if a validated nonce is stale.
|
||||
|
||||
The nonce contains a timestamp in plaintext and also a secure
|
||||
hash of the timestamp. You should first validate the nonce to
|
||||
ensure the plaintext timestamp is not spoofed.
|
||||
"""Returns True if a validated nonce is stale. The nonce contains a
|
||||
timestamp in plaintext and also a secure hash of the timestamp.
|
||||
You should first validate the nonce to ensure the plaintext
|
||||
timestamp is not spoofed.
|
||||
"""
|
||||
try:
|
||||
timestamp, hashpart = self.nonce.split(':', 1)
|
||||
|
@ -276,10 +275,7 @@ class HttpDigestAuthorization(object):
|
|||
return True
|
||||
|
||||
def HA2(self, entity_body=''):
|
||||
"""Return the H(A2) string.
|
||||
|
||||
See :rfc:`2617` section 3.2.2.3.
|
||||
"""
|
||||
"""Returns the H(A2) string. See :rfc:`2617` section 3.2.2.3."""
|
||||
# RFC 2617 3.2.2.3
|
||||
# If the "qop" directive's value is "auth" or is unspecified,
|
||||
# then A2 is:
|
||||
|
@ -310,6 +306,7 @@ class HttpDigestAuthorization(object):
|
|||
4.3. This refers to the entity the user agent sent in the
|
||||
request which has the Authorization header. Typically GET
|
||||
requests don't have an entity, and POST requests do.
|
||||
|
||||
"""
|
||||
ha2 = self.HA2(entity_body)
|
||||
# Request-Digest -- RFC 2617 3.2.2.1
|
||||
|
@ -398,6 +395,7 @@ def digest_auth(realm, get_ha1, key, debug=False, accept_charset='utf-8'):
|
|||
key
|
||||
A secret string known only to the server, used in the synthesis
|
||||
of nonces.
|
||||
|
||||
"""
|
||||
request = cherrypy.serving.request
|
||||
|
||||
|
@ -449,7 +447,9 @@ def digest_auth(realm, get_ha1, key, debug=False, accept_charset='utf-8'):
|
|||
|
||||
|
||||
def _respond_401(realm, key, accept_charset, debug, **kwargs):
|
||||
"""Respond with 401 status and a WWW-Authenticate header."""
|
||||
"""
|
||||
Respond with 401 status and a WWW-Authenticate header
|
||||
"""
|
||||
header = www_authenticate(
|
||||
realm, key,
|
||||
accept_charset=accept_charset,
|
||||
|
|
|
@ -42,6 +42,7 @@ from cherrypy.lib import cptools, httputil
|
|||
|
||||
|
||||
class Cache(object):
|
||||
|
||||
"""Base class for Cache implementations."""
|
||||
|
||||
def get(self):
|
||||
|
@ -63,16 +64,17 @@ class Cache(object):
|
|||
|
||||
# ------------------------------ Memory Cache ------------------------------- #
|
||||
class AntiStampedeCache(dict):
|
||||
|
||||
"""A storage system for cached items which reduces stampede collisions."""
|
||||
|
||||
def wait(self, key, timeout=5, debug=False):
|
||||
"""Return the cached value for the given key, or None.
|
||||
|
||||
If timeout is not None, and the value is already being
|
||||
calculated by another thread, wait until the given timeout has
|
||||
elapsed. If the value is available before the timeout expires,
|
||||
it is returned. If not, None is returned, and a sentinel placed
|
||||
in the cache to signal other threads to wait.
|
||||
If timeout is not None, and the value is already
|
||||
being calculated by another thread, wait until the given timeout has
|
||||
elapsed. If the value is available before the timeout expires, it is
|
||||
returned. If not, None is returned, and a sentinel placed in the cache
|
||||
to signal other threads to wait.
|
||||
|
||||
If timeout is None, no waiting is performed nor sentinels used.
|
||||
"""
|
||||
|
@ -125,6 +127,7 @@ class AntiStampedeCache(dict):
|
|||
|
||||
|
||||
class MemoryCache(Cache):
|
||||
|
||||
"""An in-memory cache for varying response content.
|
||||
|
||||
Each key in self.store is a URI, and each value is an AntiStampedeCache.
|
||||
|
@ -378,10 +381,7 @@ def get(invalid_methods=('POST', 'PUT', 'DELETE'), debug=False, **kwargs):
|
|||
|
||||
|
||||
def tee_output():
|
||||
"""Tee response output to cache storage.
|
||||
|
||||
Internal.
|
||||
"""
|
||||
"""Tee response output to cache storage. Internal."""
|
||||
# Used by CachingTool by attaching to request.hooks
|
||||
|
||||
request = cherrypy.serving.request
|
||||
|
@ -441,6 +441,7 @@ def expires(secs=0, force=False, debug=False):
|
|||
* Expires
|
||||
|
||||
If any are already present, none of the above response headers are set.
|
||||
|
||||
"""
|
||||
|
||||
response = cherrypy.serving.response
|
||||
|
|
|
@ -22,7 +22,7 @@ it will call ``serve()`` for you.
|
|||
|
||||
import re
|
||||
import sys
|
||||
import html
|
||||
import cgi
|
||||
import os
|
||||
import os.path
|
||||
import urllib.parse
|
||||
|
@ -352,9 +352,9 @@ class CoverStats(object):
|
|||
buffer.append((lineno, line))
|
||||
if empty_the_buffer:
|
||||
for lno, pastline in buffer:
|
||||
yield template % (lno, html.escape(pastline))
|
||||
yield template % (lno, cgi.escape(pastline))
|
||||
buffer = []
|
||||
yield template % (lineno, html.escape(line))
|
||||
yield template % (lineno, cgi.escape(line))
|
||||
|
||||
@cherrypy.expose
|
||||
def report(self, name):
|
||||
|
|
|
@ -184,6 +184,7 @@ To report statistics::
|
|||
To format statistics reports::
|
||||
|
||||
See 'Reporting', above.
|
||||
|
||||
"""
|
||||
|
||||
import logging
|
||||
|
@ -253,6 +254,7 @@ def proc_time(s):
|
|||
|
||||
|
||||
class ByteCountWrapper(object):
|
||||
|
||||
"""Wraps a file-like object, counting the number of bytes read."""
|
||||
|
||||
def __init__(self, rfile):
|
||||
|
@ -305,6 +307,7 @@ def _get_threading_ident():
|
|||
|
||||
|
||||
class StatsTool(cherrypy.Tool):
|
||||
|
||||
"""Record various information about the current request."""
|
||||
|
||||
def __init__(self):
|
||||
|
@ -313,8 +316,8 @@ class StatsTool(cherrypy.Tool):
|
|||
def _setup(self):
|
||||
"""Hook this tool into cherrypy.request.
|
||||
|
||||
The standard CherryPy request object will automatically call
|
||||
this method when the tool is "turned on" in config.
|
||||
The standard CherryPy request object will automatically call this
|
||||
method when the tool is "turned on" in config.
|
||||
"""
|
||||
if appstats.get('Enabled', False):
|
||||
cherrypy.Tool._setup(self)
|
||||
|
|
|
@ -94,8 +94,8 @@ def validate_etags(autotags=False, debug=False):
|
|||
def validate_since():
|
||||
"""Validate the current Last-Modified against If-Modified-Since headers.
|
||||
|
||||
If no code has set the Last-Modified response header, then no
|
||||
validation will be performed.
|
||||
If no code has set the Last-Modified response header, then no validation
|
||||
will be performed.
|
||||
"""
|
||||
response = cherrypy.serving.response
|
||||
lastmod = response.headers.get('Last-Modified')
|
||||
|
@ -123,9 +123,9 @@ def validate_since():
|
|||
def allow(methods=None, debug=False):
|
||||
"""Raise 405 if request.method not in methods (default ['GET', 'HEAD']).
|
||||
|
||||
The given methods are case-insensitive, and may be in any order. If
|
||||
only one method is allowed, you may supply a single string; if more
|
||||
than one, supply a list of strings.
|
||||
The given methods are case-insensitive, and may be in any order.
|
||||
If only one method is allowed, you may supply a single string;
|
||||
if more than one, supply a list of strings.
|
||||
|
||||
Regardless of whether the current method is allowed or not, this
|
||||
also emits an 'Allow' response header, containing the given methods.
|
||||
|
@ -154,23 +154,22 @@ def proxy(base=None, local='X-Forwarded-Host', remote='X-Forwarded-For',
|
|||
scheme='X-Forwarded-Proto', debug=False):
|
||||
"""Change the base URL (scheme://host[:port][/path]).
|
||||
|
||||
For running a CP server behind Apache, lighttpd, or other HTTP
|
||||
server.
|
||||
For running a CP server behind Apache, lighttpd, or other HTTP server.
|
||||
|
||||
For Apache and lighttpd, you should leave the 'local' argument at
|
||||
the default value of 'X-Forwarded-Host'. For Squid, you probably
|
||||
want to set tools.proxy.local = 'Origin'.
|
||||
For Apache and lighttpd, you should leave the 'local' argument at the
|
||||
default value of 'X-Forwarded-Host'. For Squid, you probably want to set
|
||||
tools.proxy.local = 'Origin'.
|
||||
|
||||
If you want the new request.base to include path info (not just the
|
||||
host), you must explicitly set base to the full base path, and ALSO
|
||||
set 'local' to '', so that the X-Forwarded-Host request header
|
||||
(which never includes path info) does not override it. Regardless,
|
||||
the value for 'base' MUST NOT end in a slash.
|
||||
If you want the new request.base to include path info (not just the host),
|
||||
you must explicitly set base to the full base path, and ALSO set 'local'
|
||||
to '', so that the X-Forwarded-Host request header (which never includes
|
||||
path info) does not override it. Regardless, the value for 'base' MUST
|
||||
NOT end in a slash.
|
||||
|
||||
cherrypy.request.remote.ip (the IP address of the client) will be
|
||||
rewritten if the header specified by the 'remote' arg is valid. By
|
||||
default, 'remote' is set to 'X-Forwarded-For'. If you do not want to
|
||||
rewrite remote.ip, set the 'remote' arg to an empty string.
|
||||
rewritten if the header specified by the 'remote' arg is valid.
|
||||
By default, 'remote' is set to 'X-Forwarded-For'. If you do not
|
||||
want to rewrite remote.ip, set the 'remote' arg to an empty string.
|
||||
"""
|
||||
|
||||
request = cherrypy.serving.request
|
||||
|
@ -218,8 +217,8 @@ def proxy(base=None, local='X-Forwarded-Host', remote='X-Forwarded-For',
|
|||
def ignore_headers(headers=('Range',), debug=False):
|
||||
"""Delete request headers whose field names are included in 'headers'.
|
||||
|
||||
This is a useful tool for working behind certain HTTP servers; for
|
||||
example, Apache duplicates the work that CP does for 'Range'
|
||||
This is a useful tool for working behind certain HTTP servers;
|
||||
for example, Apache duplicates the work that CP does for 'Range'
|
||||
headers, and will doubly-truncate the response.
|
||||
"""
|
||||
request = cherrypy.serving.request
|
||||
|
@ -282,6 +281,7 @@ def referer(pattern, accept=True, accept_missing=False, error=403,
|
|||
|
||||
|
||||
class SessionAuth(object):
|
||||
|
||||
"""Assert that the user is logged in."""
|
||||
|
||||
session_key = 'username'
|
||||
|
@ -319,10 +319,7 @@ Message: %(error_msg)s
|
|||
</body></html>""") % vars()).encode('utf-8')
|
||||
|
||||
def do_login(self, username, password, from_page='..', **kwargs):
|
||||
"""Login.
|
||||
|
||||
May raise redirect, or return True if request handled.
|
||||
"""
|
||||
"""Login. May raise redirect, or return True if request handled."""
|
||||
response = cherrypy.serving.response
|
||||
error_msg = self.check_username_and_password(username, password)
|
||||
if error_msg:
|
||||
|
@ -339,10 +336,7 @@ Message: %(error_msg)s
|
|||
raise cherrypy.HTTPRedirect(from_page or '/')
|
||||
|
||||
def do_logout(self, from_page='..', **kwargs):
|
||||
"""Logout.
|
||||
|
||||
May raise redirect, or return True if request handled.
|
||||
"""
|
||||
"""Logout. May raise redirect, or return True if request handled."""
|
||||
sess = cherrypy.session
|
||||
username = sess.get(self.session_key)
|
||||
sess[self.session_key] = None
|
||||
|
@ -352,9 +346,7 @@ Message: %(error_msg)s
|
|||
raise cherrypy.HTTPRedirect(from_page)
|
||||
|
||||
def do_check(self):
|
||||
"""Assert username.
|
||||
|
||||
Raise redirect, or return True if request handled.
|
||||
"""Assert username. Raise redirect, or return True if request handled.
|
||||
"""
|
||||
sess = cherrypy.session
|
||||
request = cherrypy.serving.request
|
||||
|
@ -416,7 +408,8 @@ def session_auth(**kwargs):
|
|||
|
||||
Any attribute of the SessionAuth class may be overridden
|
||||
via a keyword arg to this function:
|
||||
""" + '\n' + '\n '.join(
|
||||
|
||||
""" + '\n '.join(
|
||||
'{!s}: {!s}'.format(k, type(getattr(SessionAuth, k)).__name__)
|
||||
for k in dir(SessionAuth)
|
||||
if not k.startswith('__')
|
||||
|
@ -497,8 +490,8 @@ def trailing_slash(missing=True, extra=False, status=None, debug=False):
|
|||
def flatten(debug=False):
|
||||
"""Wrap response.body in a generator that recursively iterates over body.
|
||||
|
||||
This allows cherrypy.response.body to consist of 'nested
|
||||
generators'; that is, a set of generators that yield generators.
|
||||
This allows cherrypy.response.body to consist of 'nested generators';
|
||||
that is, a set of generators that yield generators.
|
||||
"""
|
||||
def flattener(input):
|
||||
numchunks = 0
|
||||
|
|
|
@ -261,7 +261,9 @@ class ResponseEncoder:
|
|||
|
||||
|
||||
def prepare_iter(value):
|
||||
"""Ensure response body is iterable and resolves to False when empty."""
|
||||
"""
|
||||
Ensure response body is iterable and resolves to False when empty.
|
||||
"""
|
||||
if isinstance(value, text_or_bytes):
|
||||
# strings get wrapped in a list because iterating over a single
|
||||
# item list is much faster than iterating over every character
|
||||
|
@ -358,6 +360,7 @@ def gzip(compress_level=5, mime_types=['text/html', 'text/plain'],
|
|||
* No 'gzip' or 'x-gzip' is present in the Accept-Encoding header
|
||||
* No 'gzip' or 'x-gzip' with a qvalue > 0 is present
|
||||
* The 'identity' value is given with a qvalue > 0.
|
||||
|
||||
"""
|
||||
request = cherrypy.serving.request
|
||||
response = cherrypy.serving.response
|
||||
|
|
Some files were not shown because too many files have changed in this diff Show more
Loading…
Add table
Add a link
Reference in a new issue