mirror of
https://github.com/Tautulli/Tautulli.git
synced 2025-08-22 22:23:36 -07:00
Merge branch 'nightly' into dependabot/pip/nightly/importlib-resources-5.10.0
This commit is contained in:
commit
d94ee41d82
27 changed files with 236 additions and 66 deletions
4
.github/workflows/issues-stale.yml
vendored
4
.github/workflows/issues-stale.yml
vendored
|
@ -10,7 +10,7 @@ jobs:
|
|||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Stale
|
||||
uses: actions/stale@v5
|
||||
uses: actions/stale@v6
|
||||
with:
|
||||
stale-issue-message: >
|
||||
This issue is stale because it has been open for 30 days with no activity.
|
||||
|
@ -30,7 +30,7 @@ jobs:
|
|||
days-before-close: 5
|
||||
|
||||
- name: Invalid Template
|
||||
uses: actions/stale@v5
|
||||
uses: actions/stale@v6
|
||||
with:
|
||||
stale-issue-message: >
|
||||
Invalid issues template.
|
||||
|
|
28
.github/workflows/publish-docker.yml
vendored
28
.github/workflows/publish-docker.yml
vendored
|
@ -13,29 +13,29 @@ jobs:
|
|||
if: ${{ !contains(github.event.head_commit.message, '[skip ci]') }}
|
||||
steps:
|
||||
- name: Checkout Code
|
||||
uses: actions/checkout@v3.0.2
|
||||
uses: actions/checkout@v3.1.0
|
||||
|
||||
- name: Prepare
|
||||
id: prepare
|
||||
run: |
|
||||
if [[ $GITHUB_REF == refs/tags/* ]]; then
|
||||
echo ::set-output name=tag::${GITHUB_REF#refs/tags/}
|
||||
echo "tag=${GITHUB_REF#refs/tags/}" >> $GITHUB_OUTPUT
|
||||
elif [[ $GITHUB_REF == refs/heads/master ]]; then
|
||||
echo ::set-output name=tag::latest
|
||||
echo "tag=latest" >> $GITHUB_OUTPUT
|
||||
else
|
||||
echo ::set-output name=tag::${GITHUB_REF#refs/heads/}
|
||||
echo "tag=${GITHUB_REF#refs/heads/}" >> $GITHUB_OUTPUT
|
||||
fi
|
||||
if [[ $GITHUB_REF == refs/tags/*-beta ]]; then
|
||||
echo ::set-output name=branch::beta
|
||||
echo "branch=beta" >> $GITHUB_OUTPUT
|
||||
elif [[ $GITHUB_REF == refs/tags/* ]]; then
|
||||
echo ::set-output name=branch::master
|
||||
echo "branch=master" >> $GITHUB_OUTPUT
|
||||
else
|
||||
echo ::set-output name=branch::${GITHUB_REF#refs/heads/}
|
||||
echo "branch=${GITHUB_REF#refs/heads/}" >> $GITHUB_OUTPUT
|
||||
fi
|
||||
echo ::set-output name=commit::${GITHUB_SHA}
|
||||
echo ::set-output name=build_date::$(date -u +'%Y-%m-%dT%H:%M:%SZ')
|
||||
echo ::set-output name=docker_platforms::linux/amd64,linux/arm64/v8,linux/arm/v7,linux/arm/v6
|
||||
echo ::set-output name=docker_image::${{ secrets.DOCKER_REPO }}/tautulli
|
||||
echo "commit=${GITHUB_SHA}" >> $GITHUB_OUTPUT
|
||||
echo "build_date=$(date -u +'%Y-%m-%dT%H:%M:%SZ')" >> $GITHUB_OUTPUT
|
||||
echo "docker_platforms=linux/amd64,linux/arm64/v8,linux/arm/v7,linux/arm/v6" >> $GITHUB_OUTPUT
|
||||
echo "docker_image=${{ secrets.DOCKER_REPO }}/tautulli" >> $GITHUB_OUTPUT
|
||||
|
||||
- name: Set Up QEMU
|
||||
uses: docker/setup-qemu-action@v2
|
||||
|
@ -47,7 +47,7 @@ jobs:
|
|||
version: latest
|
||||
|
||||
- name: Cache Docker Layers
|
||||
uses: actions/cache@v3.0.8
|
||||
uses: actions/cache@v3.0.11
|
||||
with:
|
||||
path: /tmp/.buildx-cache
|
||||
key: ${{ runner.os }}-buildx-${{ github.sha }}
|
||||
|
@ -102,9 +102,9 @@ jobs:
|
|||
run: |
|
||||
failures=(neutral, skipped, timed_out, action_required)
|
||||
if [[ ${array[@]} =~ $WORKFLOW_CONCLUSION ]]; then
|
||||
echo ::set-output name=status::failure
|
||||
echo "status=failure" >> $GITHUB_OUTPUT
|
||||
else
|
||||
echo ::set-output name=status::$WORKFLOW_CONCLUSION
|
||||
echo "status=$WORKFLOW_CONCLUSION" >> $GITHUB_OUTPUT
|
||||
fi
|
||||
|
||||
- name: Post Status to Discord
|
||||
|
|
27
.github/workflows/publish-installers.yml
vendored
27
.github/workflows/publish-installers.yml
vendored
|
@ -24,7 +24,7 @@ jobs:
|
|||
|
||||
steps:
|
||||
- name: Checkout Code
|
||||
uses: actions/checkout@v3.0.2
|
||||
uses: actions/checkout@v3.1.0
|
||||
|
||||
- name: Set Release Version
|
||||
id: get_version
|
||||
|
@ -33,14 +33,14 @@ jobs:
|
|||
if [[ $GITHUB_REF == refs/tags/* ]]; then
|
||||
echo "VERSION=${GITHUB_REF#refs/tags/v}" >> $GITHUB_ENV
|
||||
VERSION_NSIS=${GITHUB_REF#refs/tags/v}.1
|
||||
echo ::set-output name=VERSION_NSIS::${VERSION_NSIS/%-beta.1/.0}
|
||||
echo ::set-output name=VERSION::${GITHUB_REF#refs/tags/v}
|
||||
echo ::set-output name=RELEASE_VERSION::${GITHUB_REF#refs/tags/}
|
||||
echo "VERSION_NSIS=${VERSION_NSIS/%-beta.1/.0}" >> $GITHUB_OUTPUT
|
||||
echo "VERSION=${GITHUB_REF#refs/tags/v}" >> $GITHUB_OUTPUT
|
||||
echo "RELEASE_VERSION=${GITHUB_REF#refs/tags/}" >> $GITHUB_OUTPUT
|
||||
else
|
||||
echo "VERSION=0.0.0" >> $GITHUB_ENV
|
||||
echo ::set-output name=VERSION_NSIS::0.0.0.0
|
||||
echo ::set-output name=VERSION::0.0.0
|
||||
echo ::set-output name=RELEASE_VERSION::${GITHUB_SHA::7}
|
||||
echo "VERSION_NSIS=0.0.0.0" >> $GITHUB_OUTPUT
|
||||
echo "VERSION=0.0.0" >> $GITHUB_OUTPUT
|
||||
echo "RELEASE_VERSION=${GITHUB_SHA::7}" >> $GITHUB_OUTPUT
|
||||
fi
|
||||
if [[ $GITHUB_REF == refs/tags/*-beta ]]; then
|
||||
echo "beta" > branch.txt
|
||||
|
@ -52,7 +52,7 @@ jobs:
|
|||
echo $GITHUB_SHA > version.txt
|
||||
|
||||
- name: Set Up Python
|
||||
uses: actions/setup-python@v4.2.0
|
||||
uses: actions/setup-python@v4.3.0
|
||||
with:
|
||||
python-version: '3.9'
|
||||
cache: pip
|
||||
|
@ -103,12 +103,12 @@ jobs:
|
|||
uses: technote-space/workflow-conclusion-action@v3.0
|
||||
|
||||
- name: Checkout Code
|
||||
uses: actions/checkout@v3.0.2
|
||||
uses: actions/checkout@v3.1.0
|
||||
|
||||
- name: Set Release Version
|
||||
id: get_version
|
||||
run: |
|
||||
echo ::set-output name=RELEASE_VERSION::${GITHUB_REF#refs/tags/}
|
||||
echo "RELEASE_VERSION=${GITHUB_REF#refs/tags/}" >> $GITHUB_OUTPUT
|
||||
|
||||
- name: Download Installers
|
||||
if: env.WORKFLOW_CONCLUSION == 'success'
|
||||
|
@ -117,8 +117,9 @@ jobs:
|
|||
- name: Get Changelog
|
||||
id: get_changelog
|
||||
run: |
|
||||
echo ::set-output name=CHANGELOG::"$( sed -n '/^## /{p; :loop n; p; /^## /q; b loop}' CHANGELOG.md \
|
||||
CHANGELOG="$( sed -n '/^## /{p; :loop n; p; /^## /q; b loop}' CHANGELOG.md \
|
||||
| sed '$d' | sed '$d' | sed '$d' | sed ':a;N;$!ba;s/\n/%0A/g' )"
|
||||
echo "CHANGELOG=${CHANGELOG}" >> $GITHUB_OUTPUT
|
||||
|
||||
- name: Create Release
|
||||
uses: actions/create-release@v1
|
||||
|
@ -171,9 +172,9 @@ jobs:
|
|||
run: |
|
||||
failures=(neutral, skipped, timed_out, action_required)
|
||||
if [[ ${array[@]} =~ $WORKFLOW_CONCLUSION ]]; then
|
||||
echo ::set-output name=status::failure
|
||||
echo "status=failure" >> $GITHUB_OUTPUT
|
||||
else
|
||||
echo ::set-output name=status::$WORKFLOW_CONCLUSION
|
||||
echo "status=$WORKFLOW_CONCLUSION" >> $GITHUB_OUTPUT
|
||||
fi
|
||||
|
||||
- name: Post Status to Discord
|
||||
|
|
12
.github/workflows/publish-snap.yml
vendored
12
.github/workflows/publish-snap.yml
vendored
|
@ -20,18 +20,18 @@ jobs:
|
|||
- armhf
|
||||
steps:
|
||||
- name: Checkout Code
|
||||
uses: actions/checkout@v3.0.2
|
||||
uses: actions/checkout@v3.1.0
|
||||
|
||||
- name: Prepare
|
||||
id: prepare
|
||||
run: |
|
||||
git fetch --prune --unshallow --tags
|
||||
if [[ $GITHUB_REF == refs/tags/*-beta || $GITHUB_REF == refs/heads/beta ]]; then
|
||||
echo ::set-output name=RELEASE::beta
|
||||
echo "RELEASE=beta" >> $GITHUB_OUTPUT
|
||||
elif [[ $GITHUB_REF == refs/tags/* || $GITHUB_REF == refs/heads/master ]]; then
|
||||
echo ::set-output name=RELEASE::stable
|
||||
echo "RELEASE=stable" >> $GITHUB_OUTPUT
|
||||
else
|
||||
echo ::set-output name=RELEASE::edge
|
||||
echo "RELEASE=edge" >> $GITHUB_OUTPUT
|
||||
fi
|
||||
|
||||
- name: Set Up QEMU
|
||||
|
@ -77,9 +77,9 @@ jobs:
|
|||
run: |
|
||||
failures=(neutral, skipped, timed_out, action_required)
|
||||
if [[ ${array[@]} =~ $WORKFLOW_CONCLUSION ]]; then
|
||||
echo ::set-output name=status::failure
|
||||
echo "status=failure" >> $GITHUB_OUTPUT
|
||||
else
|
||||
echo ::set-output name=status::$WORKFLOW_CONCLUSION
|
||||
echo "status=$WORKFLOW_CONCLUSION" >> $GITHUB_OUTPUT
|
||||
fi
|
||||
|
||||
- name: Post Status to Discord
|
||||
|
|
2
.github/workflows/pull-requests.yml
vendored
2
.github/workflows/pull-requests.yml
vendored
|
@ -10,7 +10,7 @@ jobs:
|
|||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout Code
|
||||
uses: actions/checkout@v3.0.2
|
||||
uses: actions/checkout@v3.1.0
|
||||
|
||||
- name: Comment on Pull Request
|
||||
uses: mshick/add-pr-comment@v1
|
||||
|
|
2
.github/workflows/submit-winget.yml
vendored
2
.github/workflows/submit-winget.yml
vendored
|
@ -13,7 +13,7 @@ jobs:
|
|||
- name: Submit package to Windows Package Manager Community Repository
|
||||
run: |
|
||||
$wingetPackage = "Tautulli.Tautulli"
|
||||
$gitToken = "${{ secrets.GITHUB_TOKEN }}"
|
||||
$gitToken = "${{ secrets.WINGET_TOKEN }}"
|
||||
|
||||
$github = Invoke-RestMethod -uri "https://api.github.com/repos/Tautulli/Tautulli/releases/latest"
|
||||
$installerUrl = $github | Select -ExpandProperty assets -First 1 | Where-Object -Property name -match "Tautulli-windows-.*-x64.exe" | Select -ExpandProperty browser_download_url
|
||||
|
|
24
CHANGELOG.md
24
CHANGELOG.md
|
@ -1,5 +1,29 @@
|
|||
# Changelog
|
||||
|
||||
## v2.10.5 (2022-11-07)
|
||||
|
||||
* Notifications:
|
||||
* New: Added edition_title notification parameter. (#1838)
|
||||
* Change: Track notifications link to MusicBrainz track instead of album.
|
||||
* Newsletters:
|
||||
* New: Added months time frame for newsletters. (#1876)
|
||||
* UI:
|
||||
* Fix: Broken link on library statistic cards. (#1852)
|
||||
* Fix: Check for IPv6 host when generating QR code for app registration.
|
||||
* Fix: Missing padding on condition operator dropdown on small screens.
|
||||
* Other:
|
||||
* Fix: Launching browser when webserver is bound to IPv6.
|
||||
* New: Tautulli can be installed via the Windows Package Manager (winget).
|
||||
* Change: Separate stdout and stderr console logging. (#1874)
|
||||
* API:
|
||||
* Fix: API not returning 400 response code.
|
||||
* New: Added edition_title to get_metadata API response.
|
||||
* New: Added collections to get_children_metadata API response.
|
||||
* New: Added user_thumb to get_history API response.
|
||||
* New: Validate custom notification conditions before saving notification agents. (#1846)
|
||||
* Change: Fallback to parent_thumb for seasons in get_metadata API response.
|
||||
|
||||
|
||||
## v2.10.4 (2022-09-05)
|
||||
|
||||
* Activity:
|
||||
|
|
|
@ -9,12 +9,12 @@ All pull requests should be based on the `nightly` branch, to minimize cross mer
|
|||
### Python Code
|
||||
|
||||
#### Compatibility
|
||||
The code should work with Python 3.6+. Note that Tautulli runs on many different platforms.
|
||||
The code should work with Python 3.7+. Note that Tautulli runs on many different platforms.
|
||||
|
||||
Re-use existing code. Do not hesitate to add logging in your code. You can the logger module `plexpy.logger.*` for this. Web requests are invoked via `plexpy.request.*` and derived ones. Use these methods to automatically add proper and meaningful error handling.
|
||||
|
||||
#### Code conventions
|
||||
Although Tautulli did not adapt a code convention in the past, we try to follow the [PEP8](http://legacy.python.org/dev/peps/pep-0008/) conventions for future code. A short summary to remind you (copied from http://wiki.ros.org/PyStyleGuide):
|
||||
Although Tautulli did not adopt a code convention in the past, we try to follow [PEP8](http://legacy.python.org/dev/peps/pep-0008/) conventions for future code. A short summary to remind you (copied from http://wiki.ros.org/PyStyleGuide):
|
||||
|
||||
* 4 space indentation
|
||||
* 80 characters per line
|
||||
|
|
|
@ -246,7 +246,7 @@ def main():
|
|||
# Start the background threads
|
||||
plexpy.start()
|
||||
|
||||
# Force the http port if neccessary
|
||||
# Force the http port if necessary
|
||||
if args.port:
|
||||
plexpy.HTTP_PORT = args.port
|
||||
logger.info('Using forced web server port: %i', plexpy.HTTP_PORT)
|
||||
|
|
|
@ -122,6 +122,16 @@ select.form-control {
|
|||
#condition-widget .fa-minus {
|
||||
cursor: pointer;
|
||||
}
|
||||
#condition-widget .condition-operator-col {
|
||||
padding-left: 0;
|
||||
padding-right: 0;
|
||||
}
|
||||
@media (max-width: 767px) {
|
||||
#condition-widget .condition-operator-col {
|
||||
padding-left: 15px;
|
||||
padding-right: 15px;
|
||||
}
|
||||
}
|
||||
.react-selectize.root-node .react-selectize-control .react-selectize-placeholder {
|
||||
color: #eee !important;
|
||||
}
|
||||
|
|
File diff suppressed because one or more lines are too long
|
@ -58,7 +58,7 @@ DOCUMENTATION :: END
|
|||
|
||||
getPlexPyURL = function () {
|
||||
var deferred = $.Deferred();
|
||||
if (location.hostname !== "localhost" && location.hostname !== "127.0.0.1") {
|
||||
if (location.hostname !== "localhost" && location.hostname !== "127.0.0.1" && location.hostname !== "[::1]") {
|
||||
deferred.resolve(location.href.split('/settings')[0]);
|
||||
} else {
|
||||
$.get('get_plexpy_url').then(function (url) {
|
||||
|
@ -74,7 +74,7 @@ DOCUMENTATION :: END
|
|||
var hostname = parser.hostname;
|
||||
var protocol = parser.protocol;
|
||||
|
||||
if (hostname === '127.0.0.1' || hostname === 'localhost') {
|
||||
if (hostname === 'localhost' || hostname === '127.0.0.1' || hostname === '[::1]') {
|
||||
$('#api_qr_localhost').toggle(true);
|
||||
$('#api_qr_private').toggle(false);
|
||||
} else {
|
||||
|
|
|
@ -56,11 +56,12 @@
|
|||
<div class="form-group">
|
||||
<label for="time_frame">Time Frame</label>
|
||||
<div class="row">
|
||||
<div class="col-md-4">
|
||||
<div class="col-md-5">
|
||||
<div class="input-group newsletter-time_frame">
|
||||
<span class="input-group-addon form-control btn-dark inactive">Last</span>
|
||||
<input type="number" class="form-control" id="newsletter_config_time_frame" name="newsletter_config_time_frame" value="${newsletter['config']['time_frame']}">
|
||||
<select class="form-control" id="newsletter_config_time_frame_units" name="newsletter_config_time_frame_units">
|
||||
<option value="months" ${'selected' if newsletter['config']['time_frame_units'] == 'months' else ''}>months</option>
|
||||
<option value="days" ${'selected' if newsletter['config']['time_frame_units'] == 'days' else ''}>days</option>
|
||||
<option value="hours" ${'selected' if newsletter['config']['time_frame_units'] == 'hours' else ''}>hours</option>
|
||||
</select>
|
||||
|
|
|
@ -1,11 +1,11 @@
|
|||
apscheduler==3.9.1
|
||||
importlib-metadata==4.11.4
|
||||
importlib-metadata==5.0.0
|
||||
importlib-resources==5.10.0
|
||||
pyinstaller==5.1
|
||||
pyopenssl==22.0.0
|
||||
pycryptodomex==3.14.1
|
||||
pycryptodomex==3.15.0
|
||||
|
||||
pyobjc-framework-Cocoa==8.5; platform_system == "Darwin"
|
||||
pyobjc-core==8.5; platform_system == "Darwin"
|
||||
pyobjc-core==9.0; platform_system == "Darwin"
|
||||
|
||||
pywin32==304; platform_system == "Windows"
|
||||
|
|
|
@ -429,7 +429,7 @@ def daemonize():
|
|||
|
||||
def launch_browser(host, port, root):
|
||||
if not no_browser:
|
||||
if host == '0.0.0.0':
|
||||
if host in ('0.0.0.0', '::'):
|
||||
host = 'localhost'
|
||||
|
||||
if CONFIG.ENABLE_HTTPS:
|
||||
|
|
|
@ -824,7 +824,7 @@ General optional parameters:
|
|||
|
||||
if self._api_result_type == 'success' and not self._api_response_code:
|
||||
self._api_response_code = 200
|
||||
elif self._api_result_type == 'error' and not self._api_response_code:
|
||||
elif self._api_result_type == 'error' and self._api_response_code != 500:
|
||||
self._api_response_code = 400
|
||||
|
||||
if not self._api_response_code:
|
||||
|
|
|
@ -679,3 +679,8 @@ NEWSLETTER_PARAMETERS = [
|
|||
]
|
||||
}
|
||||
]
|
||||
|
||||
|
||||
NOTIFICATION_PARAMETERS_TYPES = {
|
||||
parameter['value']: parameter['type'] for category in NOTIFICATION_PARAMETERS for parameter in category['parameters']
|
||||
}
|
||||
|
|
|
@ -34,6 +34,7 @@ if plexpy.PYTHON2:
|
|||
import logger
|
||||
import pmsconnect
|
||||
import session
|
||||
import users
|
||||
else:
|
||||
from plexpy import libraries
|
||||
from plexpy import common
|
||||
|
@ -43,6 +44,7 @@ else:
|
|||
from plexpy import logger
|
||||
from plexpy import pmsconnect
|
||||
from plexpy import session
|
||||
from plexpy import users
|
||||
|
||||
# Temporarily store update_metadata row ids in memory to prevent rating_key collisions
|
||||
_UPDATE_METADATA_IDS = {
|
||||
|
@ -103,6 +105,8 @@ class DataFactory(object):
|
|||
'session_history.user',
|
||||
'(CASE WHEN users.friendly_name IS NULL OR TRIM(users.friendly_name) = "" \
|
||||
THEN users.username ELSE users.friendly_name END) AS friendly_name',
|
||||
'users.thumb AS user_thumb',
|
||||
'users.custom_avatar_url AS custom_thumb',
|
||||
'platform',
|
||||
'product',
|
||||
'player',
|
||||
|
@ -161,6 +165,8 @@ class DataFactory(object):
|
|||
'user',
|
||||
'(CASE WHEN friendly_name IS NULL OR TRIM(friendly_name) = "" \
|
||||
THEN user ELSE friendly_name END) AS friendly_name',
|
||||
'NULL AS user_thumb',
|
||||
'NULL AS custom_thumb',
|
||||
'platform',
|
||||
'product',
|
||||
'player',
|
||||
|
@ -244,7 +250,18 @@ class DataFactory(object):
|
|||
}
|
||||
|
||||
rows = []
|
||||
|
||||
users_lookup = {}
|
||||
|
||||
for item in history:
|
||||
if item['state']:
|
||||
# Get user thumb from database for current activity
|
||||
if not users_lookup:
|
||||
# Cache user lookup
|
||||
users_lookup = {u['user_id']: u['thumb'] for u in users.Users().get_users()}
|
||||
|
||||
item['user_thumb'] = users_lookup.get(item['user_id'])
|
||||
|
||||
filter_duration += int(item['duration'])
|
||||
|
||||
if item['media_type'] == 'episode' and item['parent_thumb']:
|
||||
|
@ -267,6 +284,13 @@ class DataFactory(object):
|
|||
# Rename Mystery platform names
|
||||
platform = common.PLATFORM_NAME_OVERRIDES.get(item['platform'], item['platform'])
|
||||
|
||||
if item['custom_thumb'] and item['custom_thumb'] != item['user_thumb']:
|
||||
user_thumb = item['custom_thumb']
|
||||
elif item['user_thumb']:
|
||||
user_thumb = item['user_thumb']
|
||||
else:
|
||||
user_thumb = common.DEFAULT_USER_THUMB
|
||||
|
||||
row = {'reference_id': item['reference_id'],
|
||||
'row_id': item['row_id'],
|
||||
'id': item['row_id'],
|
||||
|
@ -278,6 +302,7 @@ class DataFactory(object):
|
|||
'user_id': item['user_id'],
|
||||
'user': item['user'],
|
||||
'friendly_name': item['friendly_name'],
|
||||
'user_thumb': user_thumb,
|
||||
'platform': platform,
|
||||
'product': item['product'],
|
||||
'player': item['player'],
|
||||
|
|
|
@ -1191,9 +1191,10 @@ def get_plexpy_url(hostname=None):
|
|||
else:
|
||||
scheme = 'http'
|
||||
|
||||
if hostname is None and plexpy.CONFIG.HTTP_HOST == '0.0.0.0':
|
||||
if hostname is None and plexpy.CONFIG.HTTP_HOST in ('0.0.0.0', '::'):
|
||||
import socket
|
||||
try:
|
||||
# Only returns IPv4 address
|
||||
s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
|
||||
s.setsockopt(socket.SOL_SOCKET, socket.SO_BROADCAST, 1)
|
||||
s.connect(('<broadcast>', 0))
|
||||
|
@ -1206,7 +1207,7 @@ def get_plexpy_url(hostname=None):
|
|||
|
||||
if not hostname:
|
||||
hostname = 'localhost'
|
||||
elif hostname == 'localhost' and plexpy.CONFIG.HTTP_HOST != '0.0.0.0':
|
||||
elif hostname == 'localhost' and plexpy.CONFIG.HTTP_HOST not in ('0.0.0.0', '::'):
|
||||
hostname = plexpy.CONFIG.HTTP_HOST
|
||||
else:
|
||||
hostname = hostname or plexpy.CONFIG.HTTP_HOST
|
||||
|
|
|
@ -85,6 +85,16 @@ def filter_usernames(new_users=None):
|
|||
_FILTER_USERNAMES = sorted(_FILTER_USERNAMES, key=len, reverse=True)
|
||||
|
||||
|
||||
class LogLevelFilter(logging.Filter):
|
||||
def __init__(self, max_level):
|
||||
super(LogLevelFilter, self).__init__()
|
||||
|
||||
self.max_level = max_level
|
||||
|
||||
def filter(self, record):
|
||||
return record.levelno <= self.max_level
|
||||
|
||||
|
||||
class NoThreadFilter(logging.Filter):
|
||||
"""
|
||||
Log filter for the current thread
|
||||
|
@ -330,12 +340,20 @@ def initLogger(console=False, log_dir=False, verbose=False):
|
|||
# Setup console logger
|
||||
if console:
|
||||
console_formatter = logging.Formatter('%(asctime)s - %(levelname)s :: %(threadName)s : %(message)s', '%Y-%m-%d %H:%M:%S')
|
||||
console_handler = logging.StreamHandler()
|
||||
console_handler.setFormatter(console_formatter)
|
||||
console_handler.setLevel(logging.DEBUG)
|
||||
|
||||
logger.addHandler(console_handler)
|
||||
cherrypy.log.error_log.addHandler(console_handler)
|
||||
stdout_handler = logging.StreamHandler(sys.stdout)
|
||||
stdout_handler.setFormatter(console_formatter)
|
||||
stdout_handler.setLevel(logging.DEBUG)
|
||||
stdout_handler.addFilter(LogLevelFilter(logging.INFO))
|
||||
|
||||
stderr_handler = logging.StreamHandler(sys.stderr)
|
||||
stderr_handler.setFormatter(console_formatter)
|
||||
stderr_handler.setLevel(logging.WARNING)
|
||||
|
||||
logger.addHandler(stdout_handler)
|
||||
logger.addHandler(stderr_handler)
|
||||
cherrypy.log.error_log.addHandler(stdout_handler)
|
||||
cherrypy.log.error_log.addHandler(stderr_handler)
|
||||
|
||||
# Add filters to log handlers
|
||||
# Only add filters after the config file has been initialized
|
||||
|
|
|
@ -402,7 +402,9 @@ class Newsletter(object):
|
|||
pass
|
||||
|
||||
if self.start_date is None:
|
||||
if self.config['time_frame_units'] == 'days':
|
||||
if self.config['time_frame_units'] == 'months':
|
||||
self.start_date = self.end_date.shift(months=-self.config['time_frame'])
|
||||
elif self.config['time_frame_units'] == 'days':
|
||||
self.start_date = self.end_date.shift(days=-self.config['time_frame'])
|
||||
else:
|
||||
self.start_date = self.end_date.shift(hours=-self.config['time_frame'])
|
||||
|
|
|
@ -288,7 +288,7 @@ def notify_custom_conditions(notifier_id=None, parameters=None):
|
|||
continue
|
||||
|
||||
# Make sure the condition values is in a list
|
||||
if isinstance(values, str):
|
||||
if not isinstance(values, list):
|
||||
values = [values]
|
||||
|
||||
# Cast the condition values to the correct type
|
||||
|
@ -302,6 +302,9 @@ def notify_custom_conditions(notifier_id=None, parameters=None):
|
|||
elif parameter_type == 'float':
|
||||
values = [helpers.cast_to_float(v) for v in values]
|
||||
|
||||
else:
|
||||
raise ValueError
|
||||
|
||||
except ValueError as e:
|
||||
logger.error("Tautulli NotificationHandler :: {%s} Unable to cast condition '%s', values '%s', to type '%s'."
|
||||
% (i+1, parameter, values, parameter_type))
|
||||
|
@ -318,6 +321,9 @@ def notify_custom_conditions(notifier_id=None, parameters=None):
|
|||
elif parameter_type == 'float':
|
||||
parameter_value = helpers.cast_to_float(parameter_value)
|
||||
|
||||
else:
|
||||
raise ValueError
|
||||
|
||||
except ValueError as e:
|
||||
logger.error("Tautulli NotificationHandler :: {%s} Unable to cast parameter '%s', value '%s', to type '%s'."
|
||||
% (i+1, parameter, parameter_value, parameter_type))
|
||||
|
|
|
@ -112,7 +112,12 @@ AGENT_IDS = {'growl': 0,
|
|||
'gotify': 29
|
||||
}
|
||||
|
||||
DEFAULT_CUSTOM_CONDITIONS = [{'parameter': '', 'operator': '', 'value': ''}]
|
||||
DEFAULT_CUSTOM_CONDITIONS = [{'parameter': '', 'operator': '', 'value': [], 'type': None}]
|
||||
CUSTOM_CONDITION_TYPE_OPERATORS = {
|
||||
'float': ['is', 'is not', 'is greater than', 'is less than'],
|
||||
'int': ['is', 'is not', 'is greater than', 'is less than'],
|
||||
'str': ['contains', 'does not contain', 'is', 'is not', 'begins with', 'does not begin with', 'ends with', 'does not end with'],
|
||||
}
|
||||
|
||||
|
||||
def available_notification_agents():
|
||||
|
@ -642,13 +647,18 @@ def set_notifier_config(notifier_id=None, agent_id=None, **kwargs):
|
|||
|
||||
agent_class = get_agent_class(agent_id=agent['id'], config=notifier_config)
|
||||
|
||||
custom_conditions = validate_conditions(kwargs.get('custom_conditions'))
|
||||
if custom_conditions is False:
|
||||
logger.error("Tautulli Notifiers :: Unable to update notification agent: Invalid custom conditions.")
|
||||
return False
|
||||
|
||||
keys = {'id': notifier_id}
|
||||
values = {'agent_id': agent['id'],
|
||||
'agent_name': agent['name'],
|
||||
'agent_label': agent['label'],
|
||||
'friendly_name': kwargs.get('friendly_name', ''),
|
||||
'notifier_config': json.dumps(agent_class.config),
|
||||
'custom_conditions': kwargs.get('custom_conditions', json.dumps(DEFAULT_CUSTOM_CONDITIONS)),
|
||||
'custom_conditions': json.dumps(custom_conditions or DEFAULT_CUSTOM_CONDITIONS),
|
||||
'custom_conditions_logic': kwargs.get('custom_conditions_logic', ''),
|
||||
}
|
||||
values.update(actions)
|
||||
|
@ -685,6 +695,66 @@ def send_notification(notifier_id=None, subject='', body='', notify_action='', n
|
|||
logger.debug("Tautulli Notifiers :: Notification requested but no notifier_id received.")
|
||||
|
||||
|
||||
def validate_conditions(custom_conditions):
|
||||
if custom_conditions is None:
|
||||
return DEFAULT_CUSTOM_CONDITIONS
|
||||
|
||||
try:
|
||||
conditions = json.loads(custom_conditions)
|
||||
except ValueError:
|
||||
logger.error("Tautulli Notifiers :: Unable to parse custom conditions json: %s" % custom_conditions)
|
||||
return False
|
||||
|
||||
if not isinstance(conditions, list):
|
||||
logger.error("Tautulli Notifiers :: Invalid custom conditions: %s. Conditions must be a list." % conditions)
|
||||
return False
|
||||
|
||||
validated_conditions = []
|
||||
|
||||
for condition in conditions:
|
||||
validated_condition = DEFAULT_CUSTOM_CONDITIONS[0].copy()
|
||||
|
||||
if not isinstance(condition, dict):
|
||||
logger.error("Tautulli Notifiers :: Invalid custom condition: %s. Condition must be a dict." % condition)
|
||||
return False
|
||||
|
||||
parameter = str(condition.get('parameter', '')).lower()
|
||||
operator = str(condition.get('operator', '')).lower()
|
||||
values = condition.get('value', [])
|
||||
|
||||
if parameter:
|
||||
parameter_type = common.NOTIFICATION_PARAMETERS_TYPES.get(parameter)
|
||||
|
||||
if not parameter_type:
|
||||
logger.error("Tautulli Notifiers :: Invalid parameter '%s' in custom condition: %s" % (parameter, condition))
|
||||
return False
|
||||
|
||||
validated_condition['parameter'] = parameter.lower()
|
||||
validated_condition['type'] = parameter_type
|
||||
|
||||
if operator:
|
||||
if operator not in CUSTOM_CONDITION_TYPE_OPERATORS.get(parameter_type, []):
|
||||
logger.error("Tautulli Notifiers :: Invalid operator '%s' for parameter '%s' in custom condition: %s" % (operator, parameter, condition))
|
||||
return False
|
||||
|
||||
validated_condition['operator'] = operator
|
||||
|
||||
if values:
|
||||
if not isinstance(values, list):
|
||||
values = [values]
|
||||
|
||||
for value in values:
|
||||
if not isinstance(value, (str, int, float)):
|
||||
logger.error("Tautulli Notifiers :: Invalid value '%s' for parameter '%s' in custom condition: %s" % (value, parameter, condition))
|
||||
return False
|
||||
|
||||
validated_condition['value'] = values
|
||||
|
||||
validated_conditions.append(validated_condition)
|
||||
|
||||
return validated_conditions
|
||||
|
||||
|
||||
def blacklist_logger():
|
||||
db = database.MonitorDatabase()
|
||||
notifiers = db.select('SELECT notifier_config FROM notifiers')
|
||||
|
|
|
@ -924,7 +924,7 @@ class PmsConnect(object):
|
|||
'parent_year': show_details.get('year', ''),
|
||||
'grandparent_year': helpers.get_xml_attr(metadata_main, 'grandparentYear'),
|
||||
'thumb': helpers.get_xml_attr(metadata_main, 'thumb'),
|
||||
'parent_thumb': helpers.get_xml_attr(metadata_main, 'parentThumb'),
|
||||
'parent_thumb': helpers.get_xml_attr(metadata_main, 'parentThumb') or show_details.get('thumb'),
|
||||
'grandparent_thumb': helpers.get_xml_attr(metadata_main, 'grandparentThumb'),
|
||||
'art': helpers.get_xml_attr(metadata_main, 'art'),
|
||||
'banner': show_details.get('banner', ''),
|
||||
|
@ -1003,7 +1003,7 @@ class PmsConnect(object):
|
|||
'parent_year': season_details.get('year', ''),
|
||||
'grandparent_year': show_details.get('year', ''),
|
||||
'thumb': helpers.get_xml_attr(metadata_main, 'thumb'),
|
||||
'parent_thumb': parent_thumb,
|
||||
'parent_thumb': parent_thumb or show_details.get('thumb'),
|
||||
'grandparent_thumb': helpers.get_xml_attr(metadata_main, 'grandparentThumb'),
|
||||
'art': helpers.get_xml_attr(metadata_main, 'art'),
|
||||
'banner': show_details.get('banner', ''),
|
||||
|
@ -2442,6 +2442,7 @@ class PmsConnect(object):
|
|||
actors = []
|
||||
genres = []
|
||||
labels = []
|
||||
collections = []
|
||||
|
||||
if m.getElementsByTagName('Director'):
|
||||
for director in m.getElementsByTagName('Director'):
|
||||
|
@ -2463,6 +2464,10 @@ class PmsConnect(object):
|
|||
for label in m.getElementsByTagName('Label'):
|
||||
labels.append(helpers.get_xml_attr(label, 'tag'))
|
||||
|
||||
if m.getElementsByTagName('Collection'):
|
||||
for collection in m.getElementsByTagName('Collection'):
|
||||
collections.append(helpers.get_xml_attr(collection, 'tag'))
|
||||
|
||||
media_type = helpers.get_xml_attr(m, 'type')
|
||||
if m.nodeName == 'Directory' and media_type == 'photo':
|
||||
media_type = 'photo_album'
|
||||
|
@ -2506,6 +2511,7 @@ class PmsConnect(object):
|
|||
'actors': actors,
|
||||
'genres': genres,
|
||||
'labels': labels,
|
||||
'collections': collections,
|
||||
'full_title': helpers.get_xml_attr(m, 'title')
|
||||
}
|
||||
children_list.append(children_output)
|
||||
|
|
|
@ -18,4 +18,4 @@
|
|||
from __future__ import unicode_literals
|
||||
|
||||
PLEXPY_BRANCH = "master"
|
||||
PLEXPY_RELEASE_VERSION = "v2.10.4"
|
||||
PLEXPY_RELEASE_VERSION = "v2.10.5"
|
||||
|
|
|
@ -4591,6 +4591,7 @@ class WebInterface(object):
|
|||
"audience_rating": "",
|
||||
"audience_rating_image": "",
|
||||
"banner": "",
|
||||
"collections": [],
|
||||
"content_rating": "",
|
||||
"directors": [],
|
||||
"duration": "",
|
||||
|
@ -5442,8 +5443,8 @@ class WebInterface(object):
|
|||
"tagline": "",
|
||||
"thumb": "/library/metadata/153037/thumb/1462175060",
|
||||
"title": "The Red Woman",
|
||||
"user_rating": "9.0",
|
||||
"updated_at": "1462175060",
|
||||
"user_rating": "9.0",
|
||||
"writers": [
|
||||
"David Benioff",
|
||||
"D. B. Weiss"
|
||||
|
|
|
@ -18,7 +18,7 @@ gntp==1.0.3
|
|||
html5lib==1.1
|
||||
httpagentparser==1.9.2
|
||||
idna==3.3
|
||||
importlib-metadata==4.11.4
|
||||
importlib-metadata==5.0.0
|
||||
importlib-resources==5.10.0
|
||||
git+https://github.com/Tautulli/ipwhois.git@master#egg=ipwhois
|
||||
IPy==1.01
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue