Merge branch 'nightly' into dependabot/pip/nightly/idna-3.4

This commit is contained in:
JonnyWong16 2022-11-12 17:10:59 -08:00 committed by GitHub
commit b706980233
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
42 changed files with 667 additions and 395 deletions

View file

@ -10,7 +10,7 @@ jobs:
runs-on: ubuntu-latest
steps:
- name: Stale
uses: actions/stale@v5
uses: actions/stale@v6
with:
stale-issue-message: >
This issue is stale because it has been open for 30 days with no activity.
@ -30,7 +30,7 @@ jobs:
days-before-close: 5
- name: Invalid Template
uses: actions/stale@v5
uses: actions/stale@v6
with:
stale-issue-message: >
Invalid issues template.

View file

@ -13,29 +13,29 @@ jobs:
if: ${{ !contains(github.event.head_commit.message, '[skip ci]') }}
steps:
- name: Checkout Code
uses: actions/checkout@v3.0.2
uses: actions/checkout@v3.1.0
- name: Prepare
id: prepare
run: |
if [[ $GITHUB_REF == refs/tags/* ]]; then
echo ::set-output name=tag::${GITHUB_REF#refs/tags/}
echo "tag=${GITHUB_REF#refs/tags/}" >> $GITHUB_OUTPUT
elif [[ $GITHUB_REF == refs/heads/master ]]; then
echo ::set-output name=tag::latest
echo "tag=latest" >> $GITHUB_OUTPUT
else
echo ::set-output name=tag::${GITHUB_REF#refs/heads/}
echo "tag=${GITHUB_REF#refs/heads/}" >> $GITHUB_OUTPUT
fi
if [[ $GITHUB_REF == refs/tags/*-beta ]]; then
echo ::set-output name=branch::beta
echo "branch=beta" >> $GITHUB_OUTPUT
elif [[ $GITHUB_REF == refs/tags/* ]]; then
echo ::set-output name=branch::master
echo "branch=master" >> $GITHUB_OUTPUT
else
echo ::set-output name=branch::${GITHUB_REF#refs/heads/}
echo "branch=${GITHUB_REF#refs/heads/}" >> $GITHUB_OUTPUT
fi
echo ::set-output name=commit::${GITHUB_SHA}
echo ::set-output name=build_date::$(date -u +'%Y-%m-%dT%H:%M:%SZ')
echo ::set-output name=docker_platforms::linux/amd64,linux/arm64/v8,linux/arm/v7,linux/arm/v6
echo ::set-output name=docker_image::${{ secrets.DOCKER_REPO }}/tautulli
echo "commit=${GITHUB_SHA}" >> $GITHUB_OUTPUT
echo "build_date=$(date -u +'%Y-%m-%dT%H:%M:%SZ')" >> $GITHUB_OUTPUT
echo "docker_platforms=linux/amd64,linux/arm64/v8,linux/arm/v7,linux/arm/v6" >> $GITHUB_OUTPUT
echo "docker_image=${{ secrets.DOCKER_REPO }}/tautulli" >> $GITHUB_OUTPUT
- name: Set Up QEMU
uses: docker/setup-qemu-action@v2
@ -47,7 +47,7 @@ jobs:
version: latest
- name: Cache Docker Layers
uses: actions/cache@v3.0.8
uses: actions/cache@v3.0.11
with:
path: /tmp/.buildx-cache
key: ${{ runner.os }}-buildx-${{ github.sha }}
@ -102,9 +102,9 @@ jobs:
run: |
failures=(neutral, skipped, timed_out, action_required)
if [[ ${array[@]} =~ $WORKFLOW_CONCLUSION ]]; then
echo ::set-output name=status::failure
echo "status=failure" >> $GITHUB_OUTPUT
else
echo ::set-output name=status::$WORKFLOW_CONCLUSION
echo "status=$WORKFLOW_CONCLUSION" >> $GITHUB_OUTPUT
fi
- name: Post Status to Discord

View file

@ -24,7 +24,7 @@ jobs:
steps:
- name: Checkout Code
uses: actions/checkout@v3.0.2
uses: actions/checkout@v3.1.0
- name: Set Release Version
id: get_version
@ -33,14 +33,14 @@ jobs:
if [[ $GITHUB_REF == refs/tags/* ]]; then
echo "VERSION=${GITHUB_REF#refs/tags/v}" >> $GITHUB_ENV
VERSION_NSIS=${GITHUB_REF#refs/tags/v}.1
echo ::set-output name=VERSION_NSIS::${VERSION_NSIS/%-beta.1/.0}
echo ::set-output name=VERSION::${GITHUB_REF#refs/tags/v}
echo ::set-output name=RELEASE_VERSION::${GITHUB_REF#refs/tags/}
echo "VERSION_NSIS=${VERSION_NSIS/%-beta.1/.0}" >> $GITHUB_OUTPUT
echo "VERSION=${GITHUB_REF#refs/tags/v}" >> $GITHUB_OUTPUT
echo "RELEASE_VERSION=${GITHUB_REF#refs/tags/}" >> $GITHUB_OUTPUT
else
echo "VERSION=0.0.0" >> $GITHUB_ENV
echo ::set-output name=VERSION_NSIS::0.0.0.0
echo ::set-output name=VERSION::0.0.0
echo ::set-output name=RELEASE_VERSION::${GITHUB_SHA::7}
echo "VERSION_NSIS=0.0.0.0" >> $GITHUB_OUTPUT
echo "VERSION=0.0.0" >> $GITHUB_OUTPUT
echo "RELEASE_VERSION=${GITHUB_SHA::7}" >> $GITHUB_OUTPUT
fi
if [[ $GITHUB_REF == refs/tags/*-beta ]]; then
echo "beta" > branch.txt
@ -52,16 +52,11 @@ jobs:
echo $GITHUB_SHA > version.txt
- name: Set Up Python
uses: actions/setup-python@v4.2.0
uses: actions/setup-python@v4.3.0
with:
python-version: 3.9
- name: Cache Dependencies
uses: actions/cache@v3.0.8
with:
path: ~\AppData\Local\pip\Cache
key: ${{ runner.os }}-pip-${{ hashFiles('package/requirements-package.txt') }}
restore-keys: ${{ runner.os }}-pip-
python-version: '3.9'
cache: pip
cache-dependency-path: '**/requirements*.txt'
- name: Install Dependencies
run: |
@ -108,12 +103,12 @@ jobs:
uses: technote-space/workflow-conclusion-action@v3.0
- name: Checkout Code
uses: actions/checkout@v3.0.2
uses: actions/checkout@v3.1.0
- name: Set Release Version
id: get_version
run: |
echo ::set-output name=RELEASE_VERSION::${GITHUB_REF#refs/tags/}
echo "RELEASE_VERSION=${GITHUB_REF#refs/tags/}" >> $GITHUB_OUTPUT
- name: Download Installers
if: env.WORKFLOW_CONCLUSION == 'success'
@ -122,8 +117,9 @@ jobs:
- name: Get Changelog
id: get_changelog
run: |
echo ::set-output name=CHANGELOG::"$( sed -n '/^## /{p; :loop n; p; /^## /q; b loop}' CHANGELOG.md \
CHANGELOG="$( sed -n '/^## /{p; :loop n; p; /^## /q; b loop}' CHANGELOG.md \
| sed '$d' | sed '$d' | sed '$d' | sed ':a;N;$!ba;s/\n/%0A/g' )"
echo "CHANGELOG=${CHANGELOG}" >> $GITHUB_OUTPUT
- name: Create Release
uses: actions/create-release@v1
@ -176,9 +172,9 @@ jobs:
run: |
failures=(neutral, skipped, timed_out, action_required)
if [[ ${array[@]} =~ $WORKFLOW_CONCLUSION ]]; then
echo ::set-output name=status::failure
echo "status=failure" >> $GITHUB_OUTPUT
else
echo ::set-output name=status::$WORKFLOW_CONCLUSION
echo "status=$WORKFLOW_CONCLUSION" >> $GITHUB_OUTPUT
fi
- name: Post Status to Discord

View file

@ -20,18 +20,18 @@ jobs:
- armhf
steps:
- name: Checkout Code
uses: actions/checkout@v3.0.2
uses: actions/checkout@v3.1.0
- name: Prepare
id: prepare
run: |
git fetch --prune --unshallow --tags
if [[ $GITHUB_REF == refs/tags/*-beta || $GITHUB_REF == refs/heads/beta ]]; then
echo ::set-output name=RELEASE::beta
echo "RELEASE=beta" >> $GITHUB_OUTPUT
elif [[ $GITHUB_REF == refs/tags/* || $GITHUB_REF == refs/heads/master ]]; then
echo ::set-output name=RELEASE::stable
echo "RELEASE=stable" >> $GITHUB_OUTPUT
else
echo ::set-output name=RELEASE::edge
echo "RELEASE=edge" >> $GITHUB_OUTPUT
fi
- name: Set Up QEMU
@ -77,9 +77,9 @@ jobs:
run: |
failures=(neutral, skipped, timed_out, action_required)
if [[ ${array[@]} =~ $WORKFLOW_CONCLUSION ]]; then
echo ::set-output name=status::failure
echo "status=failure" >> $GITHUB_OUTPUT
else
echo ::set-output name=status::$WORKFLOW_CONCLUSION
echo "status=$WORKFLOW_CONCLUSION" >> $GITHUB_OUTPUT
fi
- name: Post Status to Discord

View file

@ -10,10 +10,10 @@ jobs:
runs-on: ubuntu-latest
steps:
- name: Checkout Code
uses: actions/checkout@v3.0.2
uses: actions/checkout@v3.1.0
- name: Comment on Pull Request
uses: mshick/add-pr-comment@v1
uses: mshick/add-pr-comment@v2
if: github.base_ref != 'nightly'
with:
message: Pull requests must be made to the `nightly` branch. Thanks.

View file

@ -13,7 +13,7 @@ jobs:
- name: Submit package to Windows Package Manager Community Repository
run: |
$wingetPackage = "Tautulli.Tautulli"
$gitToken = "${{ secrets.GITHUB_TOKEN }}"
$gitToken = "${{ secrets.WINGET_TOKEN }}"
$github = Invoke-RestMethod -uri "https://api.github.com/repos/Tautulli/Tautulli/releases/latest"
$installerUrl = $github | Select -ExpandProperty assets -First 1 | Where-Object -Property name -match "Tautulli-windows-.*-x64.exe" | Select -ExpandProperty browser_download_url

View file

@ -1,5 +1,29 @@
# Changelog
## v2.10.5 (2022-11-07)
* Notifications:
* New: Added edition_title notification parameter. (#1838)
* Change: Track notifications link to MusicBrainz track instead of album.
* Newsletters:
* New: Added months time frame for newsletters. (#1876)
* UI:
* Fix: Broken link on library statistic cards. (#1852)
* Fix: Check for IPv6 host when generating QR code for app registration.
* Fix: Missing padding on condition operator dropdown on small screens.
* Other:
* Fix: Launching browser when webserver is bound to IPv6.
* New: Tautulli can be installed via the Windows Package Manager (winget).
* Change: Separate stdout and stderr console logging. (#1874)
* API:
* Fix: API not returning 400 response code.
* New: Added edition_title to get_metadata API response.
* New: Added collections to get_children_metadata API response.
* New: Added user_thumb to get_history API response.
* New: Validate custom notification conditions before saving notification agents. (#1846)
* Change: Fallback to parent_thumb for seasons in get_metadata API response.
## v2.10.4 (2022-09-05)
* Activity:

View file

@ -9,12 +9,12 @@ All pull requests should be based on the `nightly` branch, to minimize cross mer
### Python Code
#### Compatibility
The code should work with Python 3.6+. Note that Tautulli runs on many different platforms.
The code should work with Python 3.7+. Note that Tautulli runs on many different platforms.
Re-use existing code. Do not hesitate to add logging in your code. You can the logger module `plexpy.logger.*` for this. Web requests are invoked via `plexpy.request.*` and derived ones. Use these methods to automatically add proper and meaningful error handling.
#### Code conventions
Although Tautulli did not adapt a code convention in the past, we try to follow the [PEP8](http://legacy.python.org/dev/peps/pep-0008/) conventions for future code. A short summary to remind you (copied from http://wiki.ros.org/PyStyleGuide):
Although Tautulli did not adopt a code convention in the past, we try to follow [PEP8](http://legacy.python.org/dev/peps/pep-0008/) conventions for future code. A short summary to remind you (copied from http://wiki.ros.org/PyStyleGuide):
* 4 space indentation
* 80 characters per line

View file

@ -246,7 +246,7 @@ def main():
# Start the background threads
plexpy.start()
# Force the http port if neccessary
# Force the http port if necessary
if args.port:
plexpy.HTTP_PORT = args.port
logger.info('Using forced web server port: %i', plexpy.HTTP_PORT)

View file

@ -122,6 +122,16 @@ select.form-control {
#condition-widget .fa-minus {
cursor: pointer;
}
#condition-widget .condition-operator-col {
padding-left: 0;
padding-right: 0;
}
@media (max-width: 767px) {
#condition-widget .condition-operator-col {
padding-left: 15px;
padding-right: 15px;
}
}
.react-selectize.root-node .react-selectize-control .react-selectize-placeholder {
color: #eee !important;
}

File diff suppressed because one or more lines are too long

View file

@ -58,7 +58,7 @@ DOCUMENTATION :: END
getPlexPyURL = function () {
var deferred = $.Deferred();
if (location.hostname !== "localhost" && location.hostname !== "127.0.0.1") {
if (location.hostname !== "localhost" && location.hostname !== "127.0.0.1" && location.hostname !== "[::1]") {
deferred.resolve(location.href.split('/settings')[0]);
} else {
$.get('get_plexpy_url').then(function (url) {
@ -74,7 +74,7 @@ DOCUMENTATION :: END
var hostname = parser.hostname;
var protocol = parser.protocol;
if (hostname === '127.0.0.1' || hostname === 'localhost') {
if (hostname === 'localhost' || hostname === '127.0.0.1' || hostname === '[::1]') {
$('#api_qr_localhost').toggle(true);
$('#api_qr_private').toggle(false);
} else {

View file

@ -56,11 +56,12 @@
<div class="form-group">
<label for="time_frame">Time Frame</label>
<div class="row">
<div class="col-md-4">
<div class="col-md-5">
<div class="input-group newsletter-time_frame">
<span class="input-group-addon form-control btn-dark inactive">Last</span>
<input type="number" class="form-control" id="newsletter_config_time_frame" name="newsletter_config_time_frame" value="${newsletter['config']['time_frame']}">
<select class="form-control" id="newsletter_config_time_frame_units" name="newsletter_config_time_frame_units">
<option value="months" ${'selected' if newsletter['config']['time_frame_units'] == 'months' else ''}>months</option>
<option value="days" ${'selected' if newsletter['config']['time_frame_units'] == 'days' else ''}>days</option>
<option value="hours" ${'selected' if newsletter['config']['time_frame_units'] == 'hours' else ''}>hours</option>
</select>

View file

@ -14,7 +14,7 @@ import itertools
import posixpath
import collections
from . import _adapters, _meta
from . import _adapters, _meta, _py39compat
from ._collections import FreezableDefaultDict, Pair
from ._compat import (
NullFinder,
@ -29,7 +29,7 @@ from contextlib import suppress
from importlib import import_module
from importlib.abc import MetaPathFinder
from itertools import starmap
from typing import List, Mapping, Optional, Union
from typing import List, Mapping, Optional
__all__ = [
@ -189,6 +189,10 @@ class EntryPoint(DeprecatedTuple):
following the attr, and following any extras.
"""
name: str
value: str
group: str
dist: Optional['Distribution'] = None
def __init__(self, name, value, group):
@ -223,17 +227,6 @@ class EntryPoint(DeprecatedTuple):
vars(self).update(dist=dist)
return self
def __iter__(self):
"""
Supply iter so one may construct dicts of EntryPoints by name.
"""
msg = (
"Construction of dict of EntryPoints is deprecated in "
"favor of EntryPoints."
)
warnings.warn(msg, DeprecationWarning)
return iter((self.name, self))
def matches(self, **params):
"""
EntryPoint matches the given parameters.
@ -279,77 +272,7 @@ class EntryPoint(DeprecatedTuple):
return hash(self._key())
class DeprecatedList(list):
"""
Allow an otherwise immutable object to implement mutability
for compatibility.
>>> recwarn = getfixture('recwarn')
>>> dl = DeprecatedList(range(3))
>>> dl[0] = 1
>>> dl.append(3)
>>> del dl[3]
>>> dl.reverse()
>>> dl.sort()
>>> dl.extend([4])
>>> dl.pop(-1)
4
>>> dl.remove(1)
>>> dl += [5]
>>> dl + [6]
[1, 2, 5, 6]
>>> dl + (6,)
[1, 2, 5, 6]
>>> dl.insert(0, 0)
>>> dl
[0, 1, 2, 5]
>>> dl == [0, 1, 2, 5]
True
>>> dl == (0, 1, 2, 5)
True
>>> len(recwarn)
1
"""
__slots__ = ()
_warn = functools.partial(
warnings.warn,
"EntryPoints list interface is deprecated. Cast to list if needed.",
DeprecationWarning,
stacklevel=pypy_partial(2),
)
def _wrap_deprecated_method(method_name: str): # type: ignore
def wrapped(self, *args, **kwargs):
self._warn()
return getattr(super(), method_name)(*args, **kwargs)
return method_name, wrapped
locals().update(
map(
_wrap_deprecated_method,
'__setitem__ __delitem__ append reverse extend pop remove '
'__iadd__ insert sort'.split(),
)
)
def __add__(self, other):
if not isinstance(other, tuple):
self._warn()
other = tuple(other)
return self.__class__(tuple(self) + other)
def __eq__(self, other):
if not isinstance(other, tuple):
self._warn()
other = tuple(other)
return tuple(self).__eq__(other)
class EntryPoints(DeprecatedList):
class EntryPoints(tuple):
"""
An immutable collection of selectable EntryPoint objects.
"""
@ -360,14 +283,6 @@ class EntryPoints(DeprecatedList):
"""
Get the EntryPoint in self matching name.
"""
if isinstance(name, int):
warnings.warn(
"Accessing entry points by index is deprecated. "
"Cast to tuple if needed.",
DeprecationWarning,
stacklevel=2,
)
return super().__getitem__(name)
try:
return next(iter(self.select(name=name)))
except StopIteration:
@ -378,7 +293,8 @@ class EntryPoints(DeprecatedList):
Select entry points from self that match the
given parameters (typically group and/or name).
"""
return EntryPoints(ep for ep in self if ep.matches(**params))
candidates = (_py39compat.ep_matches(ep, **params) for ep in self)
return EntryPoints(ep for ep, predicate in candidates if predicate)
@property
def names(self):
@ -391,10 +307,6 @@ class EntryPoints(DeprecatedList):
def groups(self):
"""
Return the set of all groups of all entry points.
For coverage while SelectableGroups is present.
>>> EntryPoints().groups
set()
"""
return {ep.group for ep in self}
@ -410,101 +322,6 @@ class EntryPoints(DeprecatedList):
)
class Deprecated:
"""
Compatibility add-in for mapping to indicate that
mapping behavior is deprecated.
>>> recwarn = getfixture('recwarn')
>>> class DeprecatedDict(Deprecated, dict): pass
>>> dd = DeprecatedDict(foo='bar')
>>> dd.get('baz', None)
>>> dd['foo']
'bar'
>>> list(dd)
['foo']
>>> list(dd.keys())
['foo']
>>> 'foo' in dd
True
>>> list(dd.values())
['bar']
>>> len(recwarn)
1
"""
_warn = functools.partial(
warnings.warn,
"SelectableGroups dict interface is deprecated. Use select.",
DeprecationWarning,
stacklevel=pypy_partial(2),
)
def __getitem__(self, name):
self._warn()
return super().__getitem__(name)
def get(self, name, default=None):
self._warn()
return super().get(name, default)
def __iter__(self):
self._warn()
return super().__iter__()
def __contains__(self, *args):
self._warn()
return super().__contains__(*args)
def keys(self):
self._warn()
return super().keys()
def values(self):
self._warn()
return super().values()
class SelectableGroups(Deprecated, dict):
"""
A backward- and forward-compatible result from
entry_points that fully implements the dict interface.
"""
@classmethod
def load(cls, eps):
by_group = operator.attrgetter('group')
ordered = sorted(eps, key=by_group)
grouped = itertools.groupby(ordered, by_group)
return cls((group, EntryPoints(eps)) for group, eps in grouped)
@property
def _all(self):
"""
Reconstruct a list of all entrypoints from the groups.
"""
groups = super(Deprecated, self).values()
return EntryPoints(itertools.chain.from_iterable(groups))
@property
def groups(self):
return self._all.groups
@property
def names(self):
"""
for coverage:
>>> SelectableGroups().names
set()
"""
return self._all.names
def select(self, **params):
if not params:
return self
return self._all.select(**params)
class PackagePath(pathlib.PurePosixPath):
"""A reference to a path in a package"""
@ -548,7 +365,7 @@ class Distribution:
"""
@classmethod
def from_name(cls, name):
def from_name(cls, name: str):
"""Return the Distribution for the given package name.
:param name: The name of the distribution package to search for.
@ -556,13 +373,13 @@ class Distribution:
package, if found.
:raises PackageNotFoundError: When the named package's distribution
metadata cannot be found.
:raises ValueError: When an invalid value is supplied for name.
"""
for resolver in cls._discover_resolvers():
dists = resolver(DistributionFinder.Context(name=name))
dist = next(iter(dists), None)
if dist is not None:
return dist
else:
if not name:
raise ValueError("A distribution name is required.")
try:
return next(cls.discover(name=name))
except StopIteration:
raise PackageNotFoundError(name)
@classmethod
@ -1017,34 +834,26 @@ def version(distribution_name):
_unique = functools.partial(
unique_everseen,
key=operator.attrgetter('_normalized_name'),
key=_py39compat.normalized_name,
)
"""
Wrapper for ``distributions`` to return unique distributions by name.
"""
def entry_points(**params) -> Union[EntryPoints, SelectableGroups]:
def entry_points(**params) -> EntryPoints:
"""Return EntryPoint objects for all installed packages.
Pass selection parameters (group or name) to filter the
result to entry points matching those properties (see
EntryPoints.select()).
For compatibility, returns ``SelectableGroups`` object unless
selection parameters are supplied. In the future, this function
will return ``EntryPoints`` instead of ``SelectableGroups``
even when no selection parameters are supplied.
For maximum future compatibility, pass selection parameters
or invoke ``.select`` with parameters on the result.
:return: EntryPoints or SelectableGroups for all installed packages.
:return: EntryPoints for all installed packages.
"""
eps = itertools.chain.from_iterable(
dist.entry_points for dist in _unique(distributions())
)
return SelectableGroups.load(eps).select(**params)
return EntryPoints(eps).select(**params)
def files(distribution_name):

View file

@ -8,6 +8,7 @@ __all__ = ['install', 'NullFinder', 'Protocol']
try:
from typing import Protocol
except ImportError: # pragma: no cover
# Python 3.7 compatibility
from typing_extensions import Protocol # type: ignore

View file

@ -0,0 +1,48 @@
"""
Compatibility layer with Python 3.8/3.9
"""
from typing import TYPE_CHECKING, Any, Optional, Tuple
if TYPE_CHECKING: # pragma: no cover
# Prevent circular imports on runtime.
from . import Distribution, EntryPoint
else:
Distribution = EntryPoint = Any
def normalized_name(dist: Distribution) -> Optional[str]:
"""
Honor name normalization for distributions that don't provide ``_normalized_name``.
"""
try:
return dist._normalized_name
except AttributeError:
from . import Prepared # -> delay to prevent circular imports.
return Prepared.normalize(getattr(dist, "name", None) or dist.metadata['Name'])
def ep_matches(ep: EntryPoint, **params) -> Tuple[EntryPoint, bool]:
"""
Workaround for ``EntryPoint`` objects without the ``matches`` method.
For the sake of convenience, a tuple is returned containing not only the
boolean value corresponding to the predicate evalutation, but also a compatible
``EntryPoint`` object that can be safely used at a later stage.
For example, the following sequences of expressions should be compatible:
# Sequence 1: using the compatibility layer
candidates = (_py39compat.ep_matches(ep, **params) for ep in entry_points)
[ep for ep, predicate in candidates if predicate]
# Sequence 2: using Python 3.9+
[ep for ep in entry_points if ep.matches(**params)]
"""
try:
return ep, ep.matches(**params)
except AttributeError:
from . import EntryPoint # -> delay to prevent circular imports.
# Reconstruct the EntryPoint object to make sure it is compatible.
_ep = EntryPoint(ep.name, ep.value, ep.group)
return _ep, _ep.matches(**params)

View file

@ -17,7 +17,7 @@ from ._legacy import (
Resource,
)
from importlib_resources.abc import ResourceReader
from .abc import ResourceReader
__all__ = [

View file

@ -5,25 +5,58 @@ import functools
import contextlib
import types
import importlib
import inspect
import warnings
import itertools
from typing import Union, Optional
from typing import Union, Optional, cast
from .abc import ResourceReader, Traversable
from ._compat import wrap_spec
Package = Union[types.ModuleType, str]
Anchor = Package
def files(package):
# type: (Package) -> Traversable
def package_to_anchor(func):
"""
Get a Traversable resource from a package
Replace 'package' parameter as 'anchor' and warn about the change.
Other errors should fall through.
>>> files('a', 'b')
Traceback (most recent call last):
TypeError: files() takes from 0 to 1 positional arguments but 2 were given
"""
return from_package(get_package(package))
undefined = object()
@functools.wraps(func)
def wrapper(anchor=undefined, package=undefined):
if package is not undefined:
if anchor is not undefined:
return func(anchor, package)
warnings.warn(
"First parameter to files is renamed to 'anchor'",
DeprecationWarning,
stacklevel=2,
)
return func(package)
elif anchor is undefined:
return func()
return func(anchor)
return wrapper
def get_resource_reader(package):
# type: (types.ModuleType) -> Optional[ResourceReader]
@package_to_anchor
def files(anchor: Optional[Anchor] = None) -> Traversable:
"""
Get a Traversable resource for an anchor.
"""
return from_package(resolve(anchor))
def get_resource_reader(package: types.ModuleType) -> Optional[ResourceReader]:
"""
Return the package's loader if it's a ResourceReader.
"""
@ -39,24 +72,39 @@ def get_resource_reader(package):
return reader(spec.name) # type: ignore
def resolve(cand):
# type: (Package) -> types.ModuleType
return cand if isinstance(cand, types.ModuleType) else importlib.import_module(cand)
@functools.singledispatch
def resolve(cand: Optional[Anchor]) -> types.ModuleType:
return cast(types.ModuleType, cand)
def get_package(package):
# type: (Package) -> types.ModuleType
"""Take a package name or module object and return the module.
@resolve.register
def _(cand: str) -> types.ModuleType:
return importlib.import_module(cand)
Raise an exception if the resolved module is not a package.
@resolve.register
def _(cand: None) -> types.ModuleType:
return resolve(_infer_caller().f_globals['__name__'])
def _infer_caller():
"""
resolved = resolve(package)
if wrap_spec(resolved).submodule_search_locations is None:
raise TypeError(f'{package!r} is not a package')
return resolved
Walk the stack and find the frame of the first caller not in this module.
"""
def is_this_file(frame_info):
return frame_info.filename == __file__
def is_wrapper(frame_info):
return frame_info.function == 'wrapper'
not_this_file = itertools.filterfalse(is_this_file, inspect.stack())
# also exclude 'wrapper' due to singledispatch in the call stack
callers = itertools.filterfalse(is_wrapper, not_this_file)
return next(callers).frame
def from_package(package):
def from_package(package: types.ModuleType):
"""
Return a Traversable object for the given package.
@ -67,7 +115,14 @@ def from_package(package):
@contextlib.contextmanager
def _tempfile(reader, suffix=''):
def _tempfile(
reader,
suffix='',
# gh-93353: Keep a reference to call os.remove() in late Python
# finalization.
*,
_os_remove=os.remove,
):
# Not using tempfile.NamedTemporaryFile as it leads to deeper 'try'
# blocks due to the need to close the temporary file to work on Windows
# properly.
@ -81,18 +136,35 @@ def _tempfile(reader, suffix=''):
yield pathlib.Path(raw_path)
finally:
try:
os.remove(raw_path)
_os_remove(raw_path)
except FileNotFoundError:
pass
def _temp_file(path):
return _tempfile(path.read_bytes, suffix=path.name)
def _is_present_dir(path: Traversable) -> bool:
"""
Some Traversables implement ``is_dir()`` to raise an
exception (i.e. ``FileNotFoundError``) when the
directory doesn't exist. This function wraps that call
to always return a boolean and only return True
if there's a dir and it exists.
"""
with contextlib.suppress(FileNotFoundError):
return path.is_dir()
return False
@functools.singledispatch
def as_file(path):
"""
Given a Traversable object, return that object as a
path on the local file system in a context manager.
"""
return _tempfile(path.read_bytes, suffix=path.name)
return _temp_dir(path) if _is_present_dir(path) else _temp_file(path)
@as_file.register(pathlib.Path)
@ -102,3 +174,34 @@ def _(path):
Degenerate behavior for pathlib.Path objects.
"""
yield path
@contextlib.contextmanager
def _temp_path(dir: tempfile.TemporaryDirectory):
"""
Wrap tempfile.TemporyDirectory to return a pathlib object.
"""
with dir as result:
yield pathlib.Path(result)
@contextlib.contextmanager
def _temp_dir(path):
"""
Given a traversable dir, recursively replicate the whole tree
to the file system in a context manager.
"""
assert path.is_dir()
with _temp_path(tempfile.TemporaryDirectory()) as temp_dir:
yield _write_contents(temp_dir, path)
def _write_contents(target, source):
child = target.joinpath(source.name)
if source.is_dir():
child.mkdir()
for item in source.iterdir():
_write_contents(child, item)
else:
child.open('wb').write(source.read_bytes())
return child

View file

@ -27,8 +27,7 @@ def deprecated(func):
return wrapper
def normalize_path(path):
# type: (Any) -> str
def normalize_path(path: Any) -> str:
"""Normalize a path by ensuring it is a string.
If the resulting string contains path separators, an exception is raised.

View file

@ -1,5 +1,7 @@
import abc
import io
import itertools
import pathlib
from typing import Any, BinaryIO, Iterable, Iterator, NoReturn, Text, Optional
from ._compat import runtime_checkable, Protocol, StrPath
@ -50,6 +52,10 @@ class ResourceReader(metaclass=abc.ABCMeta):
raise FileNotFoundError
class TraversalError(Exception):
pass
@runtime_checkable
class Traversable(Protocol):
"""
@ -92,7 +98,6 @@ class Traversable(Protocol):
Return True if self is a file
"""
@abc.abstractmethod
def joinpath(self, *descendants: StrPath) -> "Traversable":
"""
Return Traversable resolved with any descendants applied.
@ -101,6 +106,22 @@ class Traversable(Protocol):
and each may contain multiple levels separated by
``posixpath.sep`` (``/``).
"""
if not descendants:
return self
names = itertools.chain.from_iterable(
path.parts for path in map(pathlib.PurePosixPath, descendants)
)
target = next(names)
matches = (
traversable for traversable in self.iterdir() if traversable.name == target
)
try:
match = next(matches)
except StopIteration:
raise TraversalError(
"Target not found during traversal.", target, list(names)
)
return match.joinpath(*names)
def __truediv__(self, child: StrPath) -> "Traversable":
"""
@ -118,7 +139,8 @@ class Traversable(Protocol):
accepted by io.TextIOWrapper.
"""
@abc.abstractproperty
@property
@abc.abstractmethod
def name(self) -> str:
"""
The base name of this object without any parent references.

View file

@ -82,15 +82,13 @@ class MultiplexedPath(abc.Traversable):
def is_file(self):
return False
def joinpath(self, child):
# first try to find child in current paths
for file in self.iterdir():
if file.name == child:
return file
# if it does not exist, construct it with the first path
return self._paths[0] / child
__truediv__ = joinpath
def joinpath(self, *descendants):
try:
return super().joinpath(*descendants)
except abc.TraversalError:
# One of the paths did not resolve (a directory does not exist).
# Just return something that will not exist.
return self._paths[0].joinpath(*descendants)
def open(self, *args, **kwargs):
raise FileNotFoundError(f'{self} is not a file')

View file

@ -16,31 +16,28 @@ class SimpleReader(abc.ABC):
provider.
"""
@abc.abstractproperty
def package(self):
# type: () -> str
@property
@abc.abstractmethod
def package(self) -> str:
"""
The name of the package for which this reader loads resources.
"""
@abc.abstractmethod
def children(self):
# type: () -> List['SimpleReader']
def children(self) -> List['SimpleReader']:
"""
Obtain an iterable of SimpleReader for available
child containers (e.g. directories).
"""
@abc.abstractmethod
def resources(self):
# type: () -> List[str]
def resources(self) -> List[str]:
"""
Obtain available named resources for this virtual package.
"""
@abc.abstractmethod
def open_binary(self, resource):
# type: (str) -> BinaryIO
def open_binary(self, resource: str) -> BinaryIO:
"""
Obtain a File-like for a named resource.
"""
@ -50,13 +47,35 @@ class SimpleReader(abc.ABC):
return self.package.split('.')[-1]
class ResourceContainer(Traversable):
"""
Traversable container for a package's resources via its reader.
"""
def __init__(self, reader: SimpleReader):
self.reader = reader
def is_dir(self):
return True
def is_file(self):
return False
def iterdir(self):
files = (ResourceHandle(self, name) for name in self.reader.resources)
dirs = map(ResourceContainer, self.reader.children())
return itertools.chain(files, dirs)
def open(self, *args, **kwargs):
raise IsADirectoryError()
class ResourceHandle(Traversable):
"""
Handle to a named resource in a ResourceReader.
"""
def __init__(self, parent, name):
# type: (ResourceContainer, str) -> None
def __init__(self, parent: ResourceContainer, name: str):
self.parent = parent
self.name = name # type: ignore
@ -76,44 +95,6 @@ class ResourceHandle(Traversable):
raise RuntimeError("Cannot traverse into a resource")
class ResourceContainer(Traversable):
"""
Traversable container for a package's resources via its reader.
"""
def __init__(self, reader):
# type: (SimpleReader) -> None
self.reader = reader
def is_dir(self):
return True
def is_file(self):
return False
def iterdir(self):
files = (ResourceHandle(self, name) for name in self.reader.resources)
dirs = map(ResourceContainer, self.reader.children())
return itertools.chain(files, dirs)
def open(self, *args, **kwargs):
raise IsADirectoryError()
@staticmethod
def _flatten(compound_names):
for name in compound_names:
yield from name.split('/')
def joinpath(self, *descendants):
if not descendants:
return self
names = self._flatten(descendants)
target = next(names)
return next(
traversable for traversable in self.iterdir() if traversable.name == target
).joinpath(*names)
class TraversableReader(TraversableResources, SimpleReader):
"""
A TraversableResources based on SimpleReader. Resource providers

View file

@ -6,7 +6,20 @@ try:
except ImportError:
# Python 3.9 and earlier
class import_helper: # type: ignore
from test.support import modules_setup, modules_cleanup
from test.support import (
modules_setup,
modules_cleanup,
DirsOnSysPath,
CleanImport,
)
try:
from test.support import os_helper # type: ignore
except ImportError:
# Python 3.9 compat
class os_helper: # type:ignore
from test.support import temp_dir
try:

View file

@ -0,0 +1,50 @@
import pathlib
import functools
####
# from jaraco.path 3.4
def build(spec, prefix=pathlib.Path()):
"""
Build a set of files/directories, as described by the spec.
Each key represents a pathname, and the value represents
the content. Content may be a nested directory.
>>> spec = {
... 'README.txt': "A README file",
... "foo": {
... "__init__.py": "",
... "bar": {
... "__init__.py": "",
... },
... "baz.py": "# Some code",
... }
... }
>>> tmpdir = getfixture('tmpdir')
>>> build(spec, tmpdir)
"""
for name, contents in spec.items():
create(contents, pathlib.Path(prefix) / name)
@functools.singledispatch
def create(content, path):
path.mkdir(exist_ok=True)
build(content, prefix=path) # type: ignore
@create.register
def _(content: bytes, path):
path.write_bytes(content)
@create.register
def _(content: str, path):
path.write_text(content)
# end from jaraco.path
####

View file

@ -1,10 +1,23 @@
import typing
import textwrap
import unittest
import warnings
import importlib
import contextlib
import importlib_resources as resources
from importlib_resources.abc import Traversable
from ..abc import Traversable
from . import data01
from . import util
from . import _path
from ._compat import os_helper, import_helper
@contextlib.contextmanager
def suppress_known_deprecation():
with warnings.catch_warnings(record=True) as ctx:
warnings.simplefilter('default', category=DeprecationWarning)
yield ctx
class FilesTests:
@ -25,6 +38,14 @@ class FilesTests:
def test_traversable(self):
assert isinstance(resources.files(self.data), Traversable)
def test_old_parameter(self):
"""
Files used to take a 'package' parameter. Make sure anyone
passing by name is still supported.
"""
with suppress_known_deprecation():
resources.files(package=self.data)
class OpenDiskTests(FilesTests, unittest.TestCase):
def setUp(self):
@ -42,5 +63,50 @@ class OpenNamespaceTests(FilesTests, unittest.TestCase):
self.data = namespacedata01
class SiteDir:
def setUp(self):
self.fixtures = contextlib.ExitStack()
self.addCleanup(self.fixtures.close)
self.site_dir = self.fixtures.enter_context(os_helper.temp_dir())
self.fixtures.enter_context(import_helper.DirsOnSysPath(self.site_dir))
self.fixtures.enter_context(import_helper.CleanImport())
class ModulesFilesTests(SiteDir, unittest.TestCase):
def test_module_resources(self):
"""
A module can have resources found adjacent to the module.
"""
spec = {
'mod.py': '',
'res.txt': 'resources are the best',
}
_path.build(spec, self.site_dir)
import mod
actual = resources.files(mod).joinpath('res.txt').read_text()
assert actual == spec['res.txt']
class ImplicitContextFilesTests(SiteDir, unittest.TestCase):
def test_implicit_files(self):
"""
Without any parameter, files() will infer the location as the caller.
"""
spec = {
'somepkg': {
'__init__.py': textwrap.dedent(
"""
import importlib_resources as res
val = res.files().joinpath('res.txt').read_text()
"""
),
'res.txt': 'resources are the best',
},
}
_path.build(spec, self.site_dir)
assert importlib.import_module('somepkg').val == 'resources are the best'
if __name__ == '__main__':
unittest.main()

View file

@ -75,6 +75,11 @@ class MultiplexedPathTest(unittest.TestCase):
str(path.joinpath('imaginary'))[len(prefix) + 1 :],
os.path.join('namespacedata01', 'imaginary'),
)
self.assertEqual(path.joinpath(), path)
def test_join_path_compound(self):
path = MultiplexedPath(self.folder)
assert not path.joinpath('imaginary/foo.py').exists()
def test_repr(self):
self.assertEqual(

View file

@ -111,6 +111,14 @@ class ResourceFromZipsTest01(util.ZipSetupBase, unittest.TestCase):
{'__init__.py', 'binary.file'},
)
def test_as_file_directory(self):
with resources.as_file(resources.files('ziptestdata')) as data:
assert data.name == 'ziptestdata'
assert data.is_dir()
assert data.joinpath('subdirectory').is_dir()
assert len(list(data.iterdir()))
assert not data.parent.exists()
class ResourceFromZipsTest02(util.ZipSetupBase, unittest.TestCase):
ZIP_MODULE = zipdata02 # type: ignore

View file

@ -3,7 +3,7 @@ import importlib
import io
import sys
import types
from pathlib import Path, PurePath
import pathlib
from . import data01
from . import zipdata01
@ -94,7 +94,7 @@ class CommonTests(metaclass=abc.ABCMeta):
def test_pathlib_path(self):
# Passing in a pathlib.PurePath object for the path should succeed.
path = PurePath('utf-8.file')
path = pathlib.PurePath('utf-8.file')
self.execute(data01, path)
def test_importing_module_as_side_effect(self):
@ -102,17 +102,6 @@ class CommonTests(metaclass=abc.ABCMeta):
del sys.modules[data01.__name__]
self.execute(data01.__name__, 'utf-8.file')
def test_non_package_by_name(self):
# The anchor package cannot be a module.
with self.assertRaises(TypeError):
self.execute(__name__, 'utf-8.file')
def test_non_package_by_package(self):
# The anchor package cannot be a module.
with self.assertRaises(TypeError):
module = sys.modules['importlib_resources.tests.util']
self.execute(module, 'utf-8.file')
def test_missing_path(self):
# Attempting to open or read or request the path for a
# non-existent path should succeed if open_resource
@ -144,7 +133,7 @@ class ZipSetupBase:
@classmethod
def setUpClass(cls):
data_path = Path(cls.ZIP_MODULE.__file__)
data_path = pathlib.Path(cls.ZIP_MODULE.__file__)
data_dir = data_path.parent
cls._zip_path = str(data_dir / 'ziptestdata.zip')
sys.path.append(cls._zip_path)

View file

@ -1,11 +1,11 @@
apscheduler==3.9.1
importlib-metadata==4.11.4
importlib-resources==5.7.1
pyinstaller==5.1
importlib-metadata==5.0.0
importlib-resources==5.10.0
pyinstaller==5.6.2
pyopenssl==22.0.0
pycryptodomex==3.14.1
pycryptodomex==3.15.0
pyobjc-framework-Cocoa==8.5; platform_system == "Darwin"
pyobjc-core==8.5; platform_system == "Darwin"
pyobjc-core==9.0; platform_system == "Darwin"
pywin32==304; platform_system == "Windows"

View file

@ -429,7 +429,7 @@ def daemonize():
def launch_browser(host, port, root):
if not no_browser:
if host == '0.0.0.0':
if host in ('0.0.0.0', '::'):
host = 'localhost'
if CONFIG.ENABLE_HTTPS:

View file

@ -824,7 +824,7 @@ General optional parameters:
if self._api_result_type == 'success' and not self._api_response_code:
self._api_response_code = 200
elif self._api_result_type == 'error' and not self._api_response_code:
elif self._api_result_type == 'error' and self._api_response_code != 500:
self._api_response_code = 400
if not self._api_response_code:

View file

@ -489,8 +489,9 @@ NOTIFICATION_PARAMETERS = [
'category': 'Source Metadata Details',
'parameters': [
{'name': 'Media Type', 'type': 'str', 'value': 'media_type', 'description': 'The type of media.', 'example': 'movie, show, season, episode, artist, album, track, clip'},
{'name': 'Title', 'type': 'str', 'value': 'title', 'description': 'The full title of the item.'},
{'name': 'Library Name', 'type': 'str', 'value': 'library_name', 'description': 'The library name of the item.'},
{'name': 'Title', 'type': 'str', 'value': 'title', 'description': 'The full title of the item.'},
{'name': 'Edition Title', 'type': 'str', 'value': 'edition_title', 'description': 'The edition title of the movie.'},
{'name': 'Show Name', 'type': 'str', 'value': 'show_name', 'description': 'The title of the TV show.'},
{'name': 'Season Name', 'type': 'str', 'value': 'season_name', 'description': 'The title of the TV season.'},
{'name': 'Episode Name', 'type': 'str', 'value': 'episode_name', 'description': 'The title of the TV episode.'},
@ -678,3 +679,8 @@ NEWSLETTER_PARAMETERS = [
]
}
]
NOTIFICATION_PARAMETERS_TYPES = {
parameter['value']: parameter['type'] for category in NOTIFICATION_PARAMETERS for parameter in category['parameters']
}

View file

@ -34,6 +34,7 @@ if plexpy.PYTHON2:
import logger
import pmsconnect
import session
import users
else:
from plexpy import libraries
from plexpy import common
@ -43,6 +44,7 @@ else:
from plexpy import logger
from plexpy import pmsconnect
from plexpy import session
from plexpy import users
# Temporarily store update_metadata row ids in memory to prevent rating_key collisions
_UPDATE_METADATA_IDS = {
@ -103,6 +105,8 @@ class DataFactory(object):
'session_history.user',
'(CASE WHEN users.friendly_name IS NULL OR TRIM(users.friendly_name) = "" \
THEN users.username ELSE users.friendly_name END) AS friendly_name',
'users.thumb AS user_thumb',
'users.custom_avatar_url AS custom_thumb',
'platform',
'product',
'player',
@ -161,6 +165,8 @@ class DataFactory(object):
'user',
'(CASE WHEN friendly_name IS NULL OR TRIM(friendly_name) = "" \
THEN user ELSE friendly_name END) AS friendly_name',
'NULL AS user_thumb',
'NULL AS custom_thumb',
'platform',
'product',
'player',
@ -244,7 +250,18 @@ class DataFactory(object):
}
rows = []
users_lookup = {}
for item in history:
if item['state']:
# Get user thumb from database for current activity
if not users_lookup:
# Cache user lookup
users_lookup = {u['user_id']: u['thumb'] for u in users.Users().get_users()}
item['user_thumb'] = users_lookup.get(item['user_id'])
filter_duration += int(item['duration'])
if item['media_type'] == 'episode' and item['parent_thumb']:
@ -267,6 +284,13 @@ class DataFactory(object):
# Rename Mystery platform names
platform = common.PLATFORM_NAME_OVERRIDES.get(item['platform'], item['platform'])
if item['custom_thumb'] and item['custom_thumb'] != item['user_thumb']:
user_thumb = item['custom_thumb']
elif item['user_thumb']:
user_thumb = item['user_thumb']
else:
user_thumb = common.DEFAULT_USER_THUMB
row = {'reference_id': item['reference_id'],
'row_id': item['row_id'],
'id': item['row_id'],
@ -278,6 +302,7 @@ class DataFactory(object):
'user_id': item['user_id'],
'user': item['user'],
'friendly_name': item['friendly_name'],
'user_thumb': user_thumb,
'platform': platform,
'product': item['product'],
'player': item['player'],
@ -1044,7 +1069,7 @@ class DataFactory(object):
'sh.id, shm.title, shm.grandparent_title, shm.full_title, shm.year, ' \
'shm.media_index, shm.parent_media_index, ' \
'sh.rating_key, shm.grandparent_rating_key, shm.thumb, shm.grandparent_thumb, ' \
'sh.user, sh.user_id, sh.player, sh.section_id, ' \
'sh.user, sh.user_id, sh.player, ' \
'shm.art, sh.media_type, shm.content_rating, shm.labels, shm.live, shm.guid, ' \
'MAX(sh.started) AS last_watch ' \
'FROM library_sections AS ls ' \

View file

@ -1191,9 +1191,10 @@ def get_plexpy_url(hostname=None):
else:
scheme = 'http'
if hostname is None and plexpy.CONFIG.HTTP_HOST == '0.0.0.0':
if hostname is None and plexpy.CONFIG.HTTP_HOST in ('0.0.0.0', '::'):
import socket
try:
# Only returns IPv4 address
s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
s.setsockopt(socket.SOL_SOCKET, socket.SO_BROADCAST, 1)
s.connect(('<broadcast>', 0))
@ -1206,7 +1207,7 @@ def get_plexpy_url(hostname=None):
if not hostname:
hostname = 'localhost'
elif hostname == 'localhost' and plexpy.CONFIG.HTTP_HOST != '0.0.0.0':
elif hostname == 'localhost' and plexpy.CONFIG.HTTP_HOST not in ('0.0.0.0', '::'):
hostname = plexpy.CONFIG.HTTP_HOST
else:
hostname = hostname or plexpy.CONFIG.HTTP_HOST

View file

@ -85,6 +85,16 @@ def filter_usernames(new_users=None):
_FILTER_USERNAMES = sorted(_FILTER_USERNAMES, key=len, reverse=True)
class LogLevelFilter(logging.Filter):
def __init__(self, max_level):
super(LogLevelFilter, self).__init__()
self.max_level = max_level
def filter(self, record):
return record.levelno <= self.max_level
class NoThreadFilter(logging.Filter):
"""
Log filter for the current thread
@ -330,12 +340,20 @@ def initLogger(console=False, log_dir=False, verbose=False):
# Setup console logger
if console:
console_formatter = logging.Formatter('%(asctime)s - %(levelname)s :: %(threadName)s : %(message)s', '%Y-%m-%d %H:%M:%S')
console_handler = logging.StreamHandler()
console_handler.setFormatter(console_formatter)
console_handler.setLevel(logging.DEBUG)
logger.addHandler(console_handler)
cherrypy.log.error_log.addHandler(console_handler)
stdout_handler = logging.StreamHandler(sys.stdout)
stdout_handler.setFormatter(console_formatter)
stdout_handler.setLevel(logging.DEBUG)
stdout_handler.addFilter(LogLevelFilter(logging.INFO))
stderr_handler = logging.StreamHandler(sys.stderr)
stderr_handler.setFormatter(console_formatter)
stderr_handler.setLevel(logging.WARNING)
logger.addHandler(stdout_handler)
logger.addHandler(stderr_handler)
cherrypy.log.error_log.addHandler(stdout_handler)
cherrypy.log.error_log.addHandler(stderr_handler)
# Add filters to log handlers
# Only add filters after the config file has been initialized

View file

@ -402,7 +402,9 @@ class Newsletter(object):
pass
if self.start_date is None:
if self.config['time_frame_units'] == 'days':
if self.config['time_frame_units'] == 'months':
self.start_date = self.end_date.shift(months=-self.config['time_frame'])
elif self.config['time_frame_units'] == 'days':
self.start_date = self.end_date.shift(days=-self.config['time_frame'])
else:
self.start_date = self.end_date.shift(hours=-self.config['time_frame'])

View file

@ -288,7 +288,7 @@ def notify_custom_conditions(notifier_id=None, parameters=None):
continue
# Make sure the condition values is in a list
if isinstance(values, str):
if not isinstance(values, list):
values = [values]
# Cast the condition values to the correct type
@ -302,6 +302,9 @@ def notify_custom_conditions(notifier_id=None, parameters=None):
elif parameter_type == 'float':
values = [helpers.cast_to_float(v) for v in values]
else:
raise ValueError
except ValueError as e:
logger.error("Tautulli NotificationHandler :: {%s} Unable to cast condition '%s', values '%s', to type '%s'."
% (i+1, parameter, values, parameter_type))
@ -318,6 +321,9 @@ def notify_custom_conditions(notifier_id=None, parameters=None):
elif parameter_type == 'float':
parameter_value = helpers.cast_to_float(parameter_value)
else:
raise ValueError
except ValueError as e:
logger.error("Tautulli NotificationHandler :: {%s} Unable to cast parameter '%s', value '%s', to type '%s'."
% (i+1, parameter, parameter_value, parameter_type))
@ -1066,8 +1072,9 @@ def build_media_notify_params(notify_action=None, session=None, timeline=None, m
'machine_id': notify_params['machine_id'],
# Source metadata parameters
'media_type': notify_params['media_type'],
'title': notify_params['full_title'],
'library_name': notify_params['library_name'],
'title': notify_params['full_title'],
'edition_title': notify_params['edition_title'],
'show_name': show_name,
'season_name': season_name,
'episode_name': episode_name,

View file

@ -112,7 +112,12 @@ AGENT_IDS = {'growl': 0,
'gotify': 29
}
DEFAULT_CUSTOM_CONDITIONS = [{'parameter': '', 'operator': '', 'value': ''}]
DEFAULT_CUSTOM_CONDITIONS = [{'parameter': '', 'operator': '', 'value': [], 'type': None}]
CUSTOM_CONDITION_TYPE_OPERATORS = {
'float': ['is', 'is not', 'is greater than', 'is less than'],
'int': ['is', 'is not', 'is greater than', 'is less than'],
'str': ['contains', 'does not contain', 'is', 'is not', 'begins with', 'does not begin with', 'ends with', 'does not end with'],
}
def available_notification_agents():
@ -642,13 +647,18 @@ def set_notifier_config(notifier_id=None, agent_id=None, **kwargs):
agent_class = get_agent_class(agent_id=agent['id'], config=notifier_config)
custom_conditions = validate_conditions(kwargs.get('custom_conditions'))
if custom_conditions is False:
logger.error("Tautulli Notifiers :: Unable to update notification agent: Invalid custom conditions.")
return False
keys = {'id': notifier_id}
values = {'agent_id': agent['id'],
'agent_name': agent['name'],
'agent_label': agent['label'],
'friendly_name': kwargs.get('friendly_name', ''),
'notifier_config': json.dumps(agent_class.config),
'custom_conditions': kwargs.get('custom_conditions', json.dumps(DEFAULT_CUSTOM_CONDITIONS)),
'custom_conditions': json.dumps(custom_conditions or DEFAULT_CUSTOM_CONDITIONS),
'custom_conditions_logic': kwargs.get('custom_conditions_logic', ''),
}
values.update(actions)
@ -685,6 +695,66 @@ def send_notification(notifier_id=None, subject='', body='', notify_action='', n
logger.debug("Tautulli Notifiers :: Notification requested but no notifier_id received.")
def validate_conditions(custom_conditions):
if custom_conditions is None:
return DEFAULT_CUSTOM_CONDITIONS
try:
conditions = json.loads(custom_conditions)
except ValueError:
logger.error("Tautulli Notifiers :: Unable to parse custom conditions json: %s" % custom_conditions)
return False
if not isinstance(conditions, list):
logger.error("Tautulli Notifiers :: Invalid custom conditions: %s. Conditions must be a list." % conditions)
return False
validated_conditions = []
for condition in conditions:
validated_condition = DEFAULT_CUSTOM_CONDITIONS[0].copy()
if not isinstance(condition, dict):
logger.error("Tautulli Notifiers :: Invalid custom condition: %s. Condition must be a dict." % condition)
return False
parameter = str(condition.get('parameter', '')).lower()
operator = str(condition.get('operator', '')).lower()
values = condition.get('value', [])
if parameter:
parameter_type = common.NOTIFICATION_PARAMETERS_TYPES.get(parameter)
if not parameter_type:
logger.error("Tautulli Notifiers :: Invalid parameter '%s' in custom condition: %s" % (parameter, condition))
return False
validated_condition['parameter'] = parameter.lower()
validated_condition['type'] = parameter_type
if operator:
if operator not in CUSTOM_CONDITION_TYPE_OPERATORS.get(parameter_type, []):
logger.error("Tautulli Notifiers :: Invalid operator '%s' for parameter '%s' in custom condition: %s" % (operator, parameter, condition))
return False
validated_condition['operator'] = operator
if values:
if not isinstance(values, list):
values = [values]
for value in values:
if not isinstance(value, (str, int, float)):
logger.error("Tautulli Notifiers :: Invalid value '%s' for parameter '%s' in custom condition: %s" % (value, parameter, condition))
return False
validated_condition['value'] = values
validated_conditions.append(validated_condition)
return validated_conditions
def blacklist_logger():
db = database.MonitorDatabase()
notifiers = db.select('SELECT notifier_config FROM notifiers')

View file

@ -786,6 +786,7 @@ class PmsConnect(object):
'grandparent_title': helpers.get_xml_attr(metadata_main, 'grandparentTitle'),
'original_title': helpers.get_xml_attr(metadata_main, 'originalTitle'),
'sort_title': helpers.get_xml_attr(metadata_main, 'titleSort'),
'edition_title': helpers.get_xml_attr(metadata_main, 'editionTitle'),
'media_index': helpers.get_xml_attr(metadata_main, 'index'),
'parent_media_index': helpers.get_xml_attr(metadata_main, 'parentIndex'),
'studio': helpers.get_xml_attr(metadata_main, 'studio'),
@ -844,6 +845,7 @@ class PmsConnect(object):
'grandparent_title': helpers.get_xml_attr(metadata_main, 'grandparentTitle'),
'original_title': helpers.get_xml_attr(metadata_main, 'originalTitle'),
'sort_title': helpers.get_xml_attr(metadata_main, 'titleSort'),
'edition_title': helpers.get_xml_attr(metadata_main, 'editionTitle'),
'media_index': helpers.get_xml_attr(metadata_main, 'index'),
'parent_media_index': helpers.get_xml_attr(metadata_main, 'parentIndex'),
'studio': helpers.get_xml_attr(metadata_main, 'studio'),
@ -905,6 +907,7 @@ class PmsConnect(object):
'grandparent_title': helpers.get_xml_attr(metadata_main, 'grandparentTitle'),
'original_title': helpers.get_xml_attr(metadata_main, 'originalTitle'),
'sort_title': helpers.get_xml_attr(metadata_main, 'titleSort'),
'edition_title': helpers.get_xml_attr(metadata_main, 'editionTitle'),
'media_index': helpers.get_xml_attr(metadata_main, 'index'),
'parent_media_index': helpers.get_xml_attr(metadata_main, 'parentIndex'),
'studio': show_details.get('studio', ''),
@ -921,7 +924,7 @@ class PmsConnect(object):
'parent_year': show_details.get('year', ''),
'grandparent_year': helpers.get_xml_attr(metadata_main, 'grandparentYear'),
'thumb': helpers.get_xml_attr(metadata_main, 'thumb'),
'parent_thumb': helpers.get_xml_attr(metadata_main, 'parentThumb'),
'parent_thumb': helpers.get_xml_attr(metadata_main, 'parentThumb') or show_details.get('thumb'),
'grandparent_thumb': helpers.get_xml_attr(metadata_main, 'grandparentThumb'),
'art': helpers.get_xml_attr(metadata_main, 'art'),
'banner': show_details.get('banner', ''),
@ -983,6 +986,7 @@ class PmsConnect(object):
'grandparent_title': helpers.get_xml_attr(metadata_main, 'grandparentTitle'),
'original_title': helpers.get_xml_attr(metadata_main, 'originalTitle'),
'sort_title': helpers.get_xml_attr(metadata_main, 'titleSort'),
'edition_title': helpers.get_xml_attr(metadata_main, 'editionTitle'),
'media_index': helpers.get_xml_attr(metadata_main, 'index'),
'parent_media_index': parent_media_index,
'studio': show_details.get('studio', ''),
@ -999,7 +1003,7 @@ class PmsConnect(object):
'parent_year': season_details.get('year', ''),
'grandparent_year': show_details.get('year', ''),
'thumb': helpers.get_xml_attr(metadata_main, 'thumb'),
'parent_thumb': parent_thumb,
'parent_thumb': parent_thumb or show_details.get('thumb'),
'grandparent_thumb': helpers.get_xml_attr(metadata_main, 'grandparentThumb'),
'art': helpers.get_xml_attr(metadata_main, 'art'),
'banner': show_details.get('banner', ''),
@ -1037,6 +1041,7 @@ class PmsConnect(object):
'grandparent_title': helpers.get_xml_attr(metadata_main, 'grandparentTitle'),
'original_title': helpers.get_xml_attr(metadata_main, 'originalTitle'),
'sort_title': helpers.get_xml_attr(metadata_main, 'titleSort'),
'edition_title': helpers.get_xml_attr(metadata_main, 'editionTitle'),
'media_index': helpers.get_xml_attr(metadata_main, 'index'),
'parent_media_index': helpers.get_xml_attr(metadata_main, 'parentIndex'),
'studio': helpers.get_xml_attr(metadata_main, 'studio'),
@ -1092,6 +1097,7 @@ class PmsConnect(object):
'grandparent_title': helpers.get_xml_attr(metadata_main, 'grandparentTitle'),
'original_title': helpers.get_xml_attr(metadata_main, 'originalTitle'),
'sort_title': helpers.get_xml_attr(metadata_main, 'titleSort'),
'edition_title': helpers.get_xml_attr(metadata_main, 'editionTitle'),
'media_index': helpers.get_xml_attr(metadata_main, 'index'),
'parent_media_index': helpers.get_xml_attr(metadata_main, 'parentIndex'),
'studio': helpers.get_xml_attr(metadata_main, 'studio'),
@ -1150,6 +1156,7 @@ class PmsConnect(object):
'grandparent_title': helpers.get_xml_attr(metadata_main, 'grandparentTitle'),
'original_title': helpers.get_xml_attr(metadata_main, 'originalTitle'),
'sort_title': helpers.get_xml_attr(metadata_main, 'titleSort'),
'edition_title': helpers.get_xml_attr(metadata_main, 'editionTitle'),
'media_index': helpers.get_xml_attr(metadata_main, 'index'),
'parent_media_index': helpers.get_xml_attr(metadata_main, 'parentIndex'),
'studio': helpers.get_xml_attr(metadata_main, 'studio'),
@ -1204,6 +1211,7 @@ class PmsConnect(object):
'grandparent_title': helpers.get_xml_attr(metadata_main, 'grandparentTitle'),
'original_title': helpers.get_xml_attr(metadata_main, 'originalTitle'),
'sort_title': helpers.get_xml_attr(metadata_main, 'titleSort'),
'edition_title': helpers.get_xml_attr(metadata_main, 'editionTitle'),
'media_index': helpers.get_xml_attr(metadata_main, 'index'),
'parent_media_index': helpers.get_xml_attr(metadata_main, 'parentIndex'),
'studio': helpers.get_xml_attr(metadata_main, 'studio'),
@ -1259,6 +1267,7 @@ class PmsConnect(object):
'grandparent_title': helpers.get_xml_attr(metadata_main, 'grandparentTitle'),
'original_title': helpers.get_xml_attr(metadata_main, 'originalTitle'),
'sort_title': helpers.get_xml_attr(metadata_main, 'titleSort'),
'edition_title': helpers.get_xml_attr(metadata_main, 'editionTitle'),
'media_index': helpers.get_xml_attr(metadata_main, 'index'),
'parent_media_index': helpers.get_xml_attr(metadata_main, 'parentIndex'),
'studio': helpers.get_xml_attr(metadata_main, 'studio'),
@ -1314,6 +1323,7 @@ class PmsConnect(object):
'grandparent_title': helpers.get_xml_attr(metadata_main, 'grandparentTitle'),
'original_title': helpers.get_xml_attr(metadata_main, 'originalTitle'),
'sort_title': helpers.get_xml_attr(metadata_main, 'titleSort'),
'edition_title': helpers.get_xml_attr(metadata_main, 'editionTitle'),
'media_index': helpers.get_xml_attr(metadata_main, 'index'),
'parent_media_index': helpers.get_xml_attr(metadata_main, 'parentIndex'),
'studio': helpers.get_xml_attr(metadata_main, 'studio'),
@ -1391,6 +1401,7 @@ class PmsConnect(object):
'grandparent_title': helpers.get_xml_attr(metadata_main, 'grandparentTitle'),
'original_title': helpers.get_xml_attr(metadata_main, 'originalTitle'),
'sort_title': helpers.get_xml_attr(metadata_main, 'titleSort'),
'edition_title': helpers.get_xml_attr(metadata_main, 'editionTitle'),
'media_index': helpers.get_xml_attr(metadata_main, 'index'),
'parent_media_index': helpers.get_xml_attr(metadata_main, 'parentIndex'),
'studio': helpers.get_xml_attr(metadata_main, 'studio'),
@ -2431,6 +2442,7 @@ class PmsConnect(object):
actors = []
genres = []
labels = []
collections = []
if m.getElementsByTagName('Director'):
for director in m.getElementsByTagName('Director'):
@ -2452,6 +2464,10 @@ class PmsConnect(object):
for label in m.getElementsByTagName('Label'):
labels.append(helpers.get_xml_attr(label, 'tag'))
if m.getElementsByTagName('Collection'):
for collection in m.getElementsByTagName('Collection'):
collections.append(helpers.get_xml_attr(collection, 'tag'))
media_type = helpers.get_xml_attr(m, 'type')
if m.nodeName == 'Directory' and media_type == 'photo':
media_type = 'photo_album'
@ -2495,6 +2511,7 @@ class PmsConnect(object):
'actors': actors,
'genres': genres,
'labels': labels,
'collections': collections,
'full_title': helpers.get_xml_attr(m, 'title')
}
children_list.append(children_output)
@ -2757,6 +2774,7 @@ class PmsConnect(object):
return []
library_count = '0'
children_list = []
for a in xml_head:

View file

@ -18,4 +18,4 @@
from __future__ import unicode_literals
PLEXPY_BRANCH = "master"
PLEXPY_RELEASE_VERSION = "v2.10.4"
PLEXPY_RELEASE_VERSION = "v2.10.5"

View file

@ -4591,6 +4591,7 @@ class WebInterface(object):
"audience_rating": "",
"audience_rating_image": "",
"banner": "",
"collections": [],
"content_rating": "",
"directors": [],
"duration": "",
@ -5310,6 +5311,7 @@ class WebInterface(object):
"Jeremy Podeswa"
],
"duration": "2998290",
"edition_title": "",
"full_title": "Game of Thrones - The Red Woman",
"genres": [
"Action/Adventure",
@ -5441,8 +5443,8 @@ class WebInterface(object):
"tagline": "",
"thumb": "/library/metadata/153037/thumb/1462175060",
"title": "The Red Woman",
"user_rating": "9.0",
"updated_at": "1462175060",
"user_rating": "9.0",
"writers": [
"David Benioff",
"D. B. Weiss"

View file

@ -6,7 +6,7 @@ backports.functools-lru-cache==1.6.4
backports.zoneinfo==0.2.1
beautifulsoup4==4.11.1
bleach==5.0.0
certifi==2022.5.18.1
certifi==2022.9.24
cheroot==8.6.0
cherrypy==18.6.1
cloudinary==1.29.0
@ -18,8 +18,8 @@ gntp==1.0.3
html5lib==1.1
httpagentparser==1.9.2
idna==3.4
importlib-metadata==4.11.4
importlib-resources==5.7.1
importlib-metadata==5.0.0
importlib-resources==5.10.0
git+https://github.com/Tautulli/ipwhois.git@master#egg=ipwhois
IPy==1.01
Mako==1.2.0
@ -45,7 +45,7 @@ tempora==5.0.1
tokenize-rt==4.2.1
tzdata==2022.1
tzlocal==4.2
urllib3==1.26.9
urllib3==1.26.12
webencodings==0.5.1
websocket-client==1.3.2
xmltodict==0.13.0