mirror of
https://github.com/Tautulli/Tautulli.git
synced 2025-07-05 20:51:15 -07:00
Bump plexapi from 4.15.16 to 4.16.0 (#2439)
* Bump plexapi from 4.15.16 to 4.16.0 Bumps [plexapi](https://github.com/pkkid/python-plexapi) from 4.15.16 to 4.16.0. - [Release notes](https://github.com/pkkid/python-plexapi/releases) - [Commits](https://github.com/pkkid/python-plexapi/compare/4.15.16...4.16.0) --- updated-dependencies: - dependency-name: plexapi dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] <support@github.com> * Update plexapi==4.16.0 --------- Signed-off-by: dependabot[bot] <support@github.com> Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Co-authored-by: JonnyWong16 <9099342+JonnyWong16@users.noreply.github.com> [skip ci]
This commit is contained in:
parent
eb2c372d82
commit
0836fb902c
20 changed files with 287 additions and 49 deletions
|
@ -159,6 +159,8 @@ def from_bytes(
|
||||||
|
|
||||||
results: CharsetMatches = CharsetMatches()
|
results: CharsetMatches = CharsetMatches()
|
||||||
|
|
||||||
|
early_stop_results: CharsetMatches = CharsetMatches()
|
||||||
|
|
||||||
sig_encoding, sig_payload = identify_sig_or_bom(sequences)
|
sig_encoding, sig_payload = identify_sig_or_bom(sequences)
|
||||||
|
|
||||||
if sig_encoding is not None:
|
if sig_encoding is not None:
|
||||||
|
@ -221,16 +223,20 @@ def from_bytes(
|
||||||
try:
|
try:
|
||||||
if is_too_large_sequence and is_multi_byte_decoder is False:
|
if is_too_large_sequence and is_multi_byte_decoder is False:
|
||||||
str(
|
str(
|
||||||
sequences[: int(50e4)]
|
(
|
||||||
if strip_sig_or_bom is False
|
sequences[: int(50e4)]
|
||||||
else sequences[len(sig_payload) : int(50e4)],
|
if strip_sig_or_bom is False
|
||||||
|
else sequences[len(sig_payload) : int(50e4)]
|
||||||
|
),
|
||||||
encoding=encoding_iana,
|
encoding=encoding_iana,
|
||||||
)
|
)
|
||||||
else:
|
else:
|
||||||
decoded_payload = str(
|
decoded_payload = str(
|
||||||
sequences
|
(
|
||||||
if strip_sig_or_bom is False
|
sequences
|
||||||
else sequences[len(sig_payload) :],
|
if strip_sig_or_bom is False
|
||||||
|
else sequences[len(sig_payload) :]
|
||||||
|
),
|
||||||
encoding=encoding_iana,
|
encoding=encoding_iana,
|
||||||
)
|
)
|
||||||
except (UnicodeDecodeError, LookupError) as e:
|
except (UnicodeDecodeError, LookupError) as e:
|
||||||
|
@ -367,7 +373,13 @@ def from_bytes(
|
||||||
and not lazy_str_hard_failure
|
and not lazy_str_hard_failure
|
||||||
):
|
):
|
||||||
fallback_entry = CharsetMatch(
|
fallback_entry = CharsetMatch(
|
||||||
sequences, encoding_iana, threshold, False, [], decoded_payload
|
sequences,
|
||||||
|
encoding_iana,
|
||||||
|
threshold,
|
||||||
|
False,
|
||||||
|
[],
|
||||||
|
decoded_payload,
|
||||||
|
preemptive_declaration=specified_encoding,
|
||||||
)
|
)
|
||||||
if encoding_iana == specified_encoding:
|
if encoding_iana == specified_encoding:
|
||||||
fallback_specified = fallback_entry
|
fallback_specified = fallback_entry
|
||||||
|
@ -421,28 +433,58 @@ def from_bytes(
|
||||||
),
|
),
|
||||||
)
|
)
|
||||||
|
|
||||||
results.append(
|
current_match = CharsetMatch(
|
||||||
CharsetMatch(
|
sequences,
|
||||||
sequences,
|
encoding_iana,
|
||||||
encoding_iana,
|
mean_mess_ratio,
|
||||||
mean_mess_ratio,
|
bom_or_sig_available,
|
||||||
bom_or_sig_available,
|
cd_ratios_merged,
|
||||||
cd_ratios_merged,
|
(
|
||||||
decoded_payload,
|
decoded_payload
|
||||||
)
|
if (
|
||||||
|
is_too_large_sequence is False
|
||||||
|
or encoding_iana in [specified_encoding, "ascii", "utf_8"]
|
||||||
|
)
|
||||||
|
else None
|
||||||
|
),
|
||||||
|
preemptive_declaration=specified_encoding,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
results.append(current_match)
|
||||||
|
|
||||||
if (
|
if (
|
||||||
encoding_iana in [specified_encoding, "ascii", "utf_8"]
|
encoding_iana in [specified_encoding, "ascii", "utf_8"]
|
||||||
and mean_mess_ratio < 0.1
|
and mean_mess_ratio < 0.1
|
||||||
):
|
):
|
||||||
|
# If md says nothing to worry about, then... stop immediately!
|
||||||
|
if mean_mess_ratio == 0.0:
|
||||||
|
logger.debug(
|
||||||
|
"Encoding detection: %s is most likely the one.",
|
||||||
|
current_match.encoding,
|
||||||
|
)
|
||||||
|
if explain:
|
||||||
|
logger.removeHandler(explain_handler)
|
||||||
|
logger.setLevel(previous_logger_level)
|
||||||
|
return CharsetMatches([current_match])
|
||||||
|
|
||||||
|
early_stop_results.append(current_match)
|
||||||
|
|
||||||
|
if (
|
||||||
|
len(early_stop_results)
|
||||||
|
and (specified_encoding is None or specified_encoding in tested)
|
||||||
|
and "ascii" in tested
|
||||||
|
and "utf_8" in tested
|
||||||
|
):
|
||||||
|
probable_result: CharsetMatch = early_stop_results.best() # type: ignore[assignment]
|
||||||
logger.debug(
|
logger.debug(
|
||||||
"Encoding detection: %s is most likely the one.", encoding_iana
|
"Encoding detection: %s is most likely the one.",
|
||||||
|
probable_result.encoding,
|
||||||
)
|
)
|
||||||
if explain:
|
if explain:
|
||||||
logger.removeHandler(explain_handler)
|
logger.removeHandler(explain_handler)
|
||||||
logger.setLevel(previous_logger_level)
|
logger.setLevel(previous_logger_level)
|
||||||
return CharsetMatches([results[encoding_iana]])
|
|
||||||
|
return CharsetMatches([probable_result])
|
||||||
|
|
||||||
if encoding_iana == sig_encoding:
|
if encoding_iana == sig_encoding:
|
||||||
logger.debug(
|
logger.debug(
|
||||||
|
|
|
@ -109,6 +109,14 @@ def cli_detect(argv: Optional[List[str]] = None) -> int:
|
||||||
dest="force",
|
dest="force",
|
||||||
help="Replace file without asking if you are sure, use this flag with caution.",
|
help="Replace file without asking if you are sure, use this flag with caution.",
|
||||||
)
|
)
|
||||||
|
parser.add_argument(
|
||||||
|
"-i",
|
||||||
|
"--no-preemptive",
|
||||||
|
action="store_true",
|
||||||
|
default=False,
|
||||||
|
dest="no_preemptive",
|
||||||
|
help="Disable looking at a charset declaration to hint the detector.",
|
||||||
|
)
|
||||||
parser.add_argument(
|
parser.add_argument(
|
||||||
"-t",
|
"-t",
|
||||||
"--threshold",
|
"--threshold",
|
||||||
|
@ -133,21 +141,35 @@ def cli_detect(argv: Optional[List[str]] = None) -> int:
|
||||||
args = parser.parse_args(argv)
|
args = parser.parse_args(argv)
|
||||||
|
|
||||||
if args.replace is True and args.normalize is False:
|
if args.replace is True and args.normalize is False:
|
||||||
|
if args.files:
|
||||||
|
for my_file in args.files:
|
||||||
|
my_file.close()
|
||||||
print("Use --replace in addition of --normalize only.", file=sys.stderr)
|
print("Use --replace in addition of --normalize only.", file=sys.stderr)
|
||||||
return 1
|
return 1
|
||||||
|
|
||||||
if args.force is True and args.replace is False:
|
if args.force is True and args.replace is False:
|
||||||
|
if args.files:
|
||||||
|
for my_file in args.files:
|
||||||
|
my_file.close()
|
||||||
print("Use --force in addition of --replace only.", file=sys.stderr)
|
print("Use --force in addition of --replace only.", file=sys.stderr)
|
||||||
return 1
|
return 1
|
||||||
|
|
||||||
if args.threshold < 0.0 or args.threshold > 1.0:
|
if args.threshold < 0.0 or args.threshold > 1.0:
|
||||||
|
if args.files:
|
||||||
|
for my_file in args.files:
|
||||||
|
my_file.close()
|
||||||
print("--threshold VALUE should be between 0. AND 1.", file=sys.stderr)
|
print("--threshold VALUE should be between 0. AND 1.", file=sys.stderr)
|
||||||
return 1
|
return 1
|
||||||
|
|
||||||
x_ = []
|
x_ = []
|
||||||
|
|
||||||
for my_file in args.files:
|
for my_file in args.files:
|
||||||
matches = from_fp(my_file, threshold=args.threshold, explain=args.verbose)
|
matches = from_fp(
|
||||||
|
my_file,
|
||||||
|
threshold=args.threshold,
|
||||||
|
explain=args.verbose,
|
||||||
|
preemptive_behaviour=args.no_preemptive is False,
|
||||||
|
)
|
||||||
|
|
||||||
best_guess = matches.best()
|
best_guess = matches.best()
|
||||||
|
|
||||||
|
@ -155,9 +177,11 @@ def cli_detect(argv: Optional[List[str]] = None) -> int:
|
||||||
print(
|
print(
|
||||||
'Unable to identify originating encoding for "{}". {}'.format(
|
'Unable to identify originating encoding for "{}". {}'.format(
|
||||||
my_file.name,
|
my_file.name,
|
||||||
"Maybe try increasing maximum amount of chaos."
|
(
|
||||||
if args.threshold < 1.0
|
"Maybe try increasing maximum amount of chaos."
|
||||||
else "",
|
if args.threshold < 1.0
|
||||||
|
else ""
|
||||||
|
),
|
||||||
),
|
),
|
||||||
file=sys.stderr,
|
file=sys.stderr,
|
||||||
)
|
)
|
||||||
|
@ -258,8 +282,8 @@ def cli_detect(argv: Optional[List[str]] = None) -> int:
|
||||||
try:
|
try:
|
||||||
x_[0].unicode_path = join(dir_path, ".".join(o_))
|
x_[0].unicode_path = join(dir_path, ".".join(o_))
|
||||||
|
|
||||||
with open(x_[0].unicode_path, "w", encoding="utf-8") as fp:
|
with open(x_[0].unicode_path, "wb") as fp:
|
||||||
fp.write(str(best_guess))
|
fp.write(best_guess.output())
|
||||||
except IOError as e:
|
except IOError as e:
|
||||||
print(str(e), file=sys.stderr)
|
print(str(e), file=sys.stderr)
|
||||||
if my_file.closed is False:
|
if my_file.closed is False:
|
||||||
|
|
|
@ -544,6 +544,8 @@ COMMON_SAFE_ASCII_CHARACTERS: Set[str] = {
|
||||||
"|",
|
"|",
|
||||||
'"',
|
'"',
|
||||||
"-",
|
"-",
|
||||||
|
"(",
|
||||||
|
")",
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -1,13 +1,24 @@
|
||||||
from typing import Any, Dict, Optional, Union
|
from __future__ import annotations
|
||||||
|
|
||||||
|
from typing import TYPE_CHECKING, Any, Optional
|
||||||
from warnings import warn
|
from warnings import warn
|
||||||
|
|
||||||
from .api import from_bytes
|
from .api import from_bytes
|
||||||
from .constant import CHARDET_CORRESPONDENCE
|
from .constant import CHARDET_CORRESPONDENCE
|
||||||
|
|
||||||
|
# TODO: remove this check when dropping Python 3.7 support
|
||||||
|
if TYPE_CHECKING:
|
||||||
|
from typing_extensions import TypedDict
|
||||||
|
|
||||||
|
class ResultDict(TypedDict):
|
||||||
|
encoding: Optional[str]
|
||||||
|
language: str
|
||||||
|
confidence: Optional[float]
|
||||||
|
|
||||||
|
|
||||||
def detect(
|
def detect(
|
||||||
byte_str: bytes, should_rename_legacy: bool = False, **kwargs: Any
|
byte_str: bytes, should_rename_legacy: bool = False, **kwargs: Any
|
||||||
) -> Dict[str, Optional[Union[str, float]]]:
|
) -> ResultDict:
|
||||||
"""
|
"""
|
||||||
chardet legacy method
|
chardet legacy method
|
||||||
Detect the encoding of the given byte string. It should be mostly backward-compatible.
|
Detect the encoding of the given byte string. It should be mostly backward-compatible.
|
||||||
|
|
|
@ -236,7 +236,7 @@ class SuspiciousRange(MessDetectorPlugin):
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def ratio(self) -> float:
|
def ratio(self) -> float:
|
||||||
if self._character_count <= 24:
|
if self._character_count <= 13:
|
||||||
return 0.0
|
return 0.0
|
||||||
|
|
||||||
ratio_of_suspicious_range_usage: float = (
|
ratio_of_suspicious_range_usage: float = (
|
||||||
|
@ -260,6 +260,7 @@ class SuperWeirdWordPlugin(MessDetectorPlugin):
|
||||||
|
|
||||||
self._buffer: str = ""
|
self._buffer: str = ""
|
||||||
self._buffer_accent_count: int = 0
|
self._buffer_accent_count: int = 0
|
||||||
|
self._buffer_glyph_count: int = 0
|
||||||
|
|
||||||
def eligible(self, character: str) -> bool:
|
def eligible(self, character: str) -> bool:
|
||||||
return True
|
return True
|
||||||
|
@ -279,6 +280,14 @@ class SuperWeirdWordPlugin(MessDetectorPlugin):
|
||||||
and is_thai(character) is False
|
and is_thai(character) is False
|
||||||
):
|
):
|
||||||
self._foreign_long_watch = True
|
self._foreign_long_watch = True
|
||||||
|
if (
|
||||||
|
is_cjk(character)
|
||||||
|
or is_hangul(character)
|
||||||
|
or is_katakana(character)
|
||||||
|
or is_hiragana(character)
|
||||||
|
or is_thai(character)
|
||||||
|
):
|
||||||
|
self._buffer_glyph_count += 1
|
||||||
return
|
return
|
||||||
if not self._buffer:
|
if not self._buffer:
|
||||||
return
|
return
|
||||||
|
@ -291,17 +300,20 @@ class SuperWeirdWordPlugin(MessDetectorPlugin):
|
||||||
self._character_count += buffer_length
|
self._character_count += buffer_length
|
||||||
|
|
||||||
if buffer_length >= 4:
|
if buffer_length >= 4:
|
||||||
if self._buffer_accent_count / buffer_length > 0.34:
|
if self._buffer_accent_count / buffer_length >= 0.5:
|
||||||
self._is_current_word_bad = True
|
self._is_current_word_bad = True
|
||||||
# Word/Buffer ending with an upper case accentuated letter are so rare,
|
# Word/Buffer ending with an upper case accentuated letter are so rare,
|
||||||
# that we will consider them all as suspicious. Same weight as foreign_long suspicious.
|
# that we will consider them all as suspicious. Same weight as foreign_long suspicious.
|
||||||
if (
|
elif (
|
||||||
is_accentuated(self._buffer[-1])
|
is_accentuated(self._buffer[-1])
|
||||||
and self._buffer[-1].isupper()
|
and self._buffer[-1].isupper()
|
||||||
and all(_.isupper() for _ in self._buffer) is False
|
and all(_.isupper() for _ in self._buffer) is False
|
||||||
):
|
):
|
||||||
self._foreign_long_count += 1
|
self._foreign_long_count += 1
|
||||||
self._is_current_word_bad = True
|
self._is_current_word_bad = True
|
||||||
|
elif self._buffer_glyph_count == 1:
|
||||||
|
self._is_current_word_bad = True
|
||||||
|
self._foreign_long_count += 1
|
||||||
if buffer_length >= 24 and self._foreign_long_watch:
|
if buffer_length >= 24 and self._foreign_long_watch:
|
||||||
camel_case_dst = [
|
camel_case_dst = [
|
||||||
i
|
i
|
||||||
|
@ -325,6 +337,7 @@ class SuperWeirdWordPlugin(MessDetectorPlugin):
|
||||||
self._foreign_long_watch = False
|
self._foreign_long_watch = False
|
||||||
self._buffer = ""
|
self._buffer = ""
|
||||||
self._buffer_accent_count = 0
|
self._buffer_accent_count = 0
|
||||||
|
self._buffer_glyph_count = 0
|
||||||
elif (
|
elif (
|
||||||
character not in {"<", ">", "-", "=", "~", "|", "_"}
|
character not in {"<", ">", "-", "=", "~", "|", "_"}
|
||||||
and character.isdigit() is False
|
and character.isdigit() is False
|
||||||
|
|
|
@ -1,9 +1,10 @@
|
||||||
from encodings.aliases import aliases
|
from encodings.aliases import aliases
|
||||||
from hashlib import sha256
|
from hashlib import sha256
|
||||||
from json import dumps
|
from json import dumps
|
||||||
|
from re import sub
|
||||||
from typing import Any, Dict, Iterator, List, Optional, Tuple, Union
|
from typing import Any, Dict, Iterator, List, Optional, Tuple, Union
|
||||||
|
|
||||||
from .constant import TOO_BIG_SEQUENCE
|
from .constant import RE_POSSIBLE_ENCODING_INDICATION, TOO_BIG_SEQUENCE
|
||||||
from .utils import iana_name, is_multi_byte_encoding, unicode_range
|
from .utils import iana_name, is_multi_byte_encoding, unicode_range
|
||||||
|
|
||||||
|
|
||||||
|
@ -16,6 +17,7 @@ class CharsetMatch:
|
||||||
has_sig_or_bom: bool,
|
has_sig_or_bom: bool,
|
||||||
languages: "CoherenceMatches",
|
languages: "CoherenceMatches",
|
||||||
decoded_payload: Optional[str] = None,
|
decoded_payload: Optional[str] = None,
|
||||||
|
preemptive_declaration: Optional[str] = None,
|
||||||
):
|
):
|
||||||
self._payload: bytes = payload
|
self._payload: bytes = payload
|
||||||
|
|
||||||
|
@ -33,13 +35,13 @@ class CharsetMatch:
|
||||||
|
|
||||||
self._string: Optional[str] = decoded_payload
|
self._string: Optional[str] = decoded_payload
|
||||||
|
|
||||||
|
self._preemptive_declaration: Optional[str] = preemptive_declaration
|
||||||
|
|
||||||
def __eq__(self, other: object) -> bool:
|
def __eq__(self, other: object) -> bool:
|
||||||
if not isinstance(other, CharsetMatch):
|
if not isinstance(other, CharsetMatch):
|
||||||
raise TypeError(
|
if isinstance(other, str):
|
||||||
"__eq__ cannot be invoked on {} and {}.".format(
|
return iana_name(other) == self.encoding
|
||||||
str(other.__class__), str(self.__class__)
|
return False
|
||||||
)
|
|
||||||
)
|
|
||||||
return self.encoding == other.encoding and self.fingerprint == other.fingerprint
|
return self.encoding == other.encoding and self.fingerprint == other.fingerprint
|
||||||
|
|
||||||
def __lt__(self, other: object) -> bool:
|
def __lt__(self, other: object) -> bool:
|
||||||
|
@ -210,7 +212,24 @@ class CharsetMatch:
|
||||||
"""
|
"""
|
||||||
if self._output_encoding is None or self._output_encoding != encoding:
|
if self._output_encoding is None or self._output_encoding != encoding:
|
||||||
self._output_encoding = encoding
|
self._output_encoding = encoding
|
||||||
self._output_payload = str(self).encode(encoding, "replace")
|
decoded_string = str(self)
|
||||||
|
if (
|
||||||
|
self._preemptive_declaration is not None
|
||||||
|
and self._preemptive_declaration.lower()
|
||||||
|
not in ["utf-8", "utf8", "utf_8"]
|
||||||
|
):
|
||||||
|
patched_header = sub(
|
||||||
|
RE_POSSIBLE_ENCODING_INDICATION,
|
||||||
|
lambda m: m.string[m.span()[0] : m.span()[1]].replace(
|
||||||
|
m.groups()[0], iana_name(self._output_encoding) # type: ignore[arg-type]
|
||||||
|
),
|
||||||
|
decoded_string[:8192],
|
||||||
|
1,
|
||||||
|
)
|
||||||
|
|
||||||
|
decoded_string = patched_header + decoded_string[8192:]
|
||||||
|
|
||||||
|
self._output_payload = decoded_string.encode(encoding, "replace")
|
||||||
|
|
||||||
return self._output_payload # type: ignore
|
return self._output_payload # type: ignore
|
||||||
|
|
||||||
|
@ -266,7 +285,7 @@ class CharsetMatches:
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
# We should disable the submatch factoring when the input file is too heavy (conserve RAM usage)
|
# We should disable the submatch factoring when the input file is too heavy (conserve RAM usage)
|
||||||
if len(item.raw) <= TOO_BIG_SEQUENCE:
|
if len(item.raw) < TOO_BIG_SEQUENCE:
|
||||||
for match in self._results:
|
for match in self._results:
|
||||||
if match.fingerprint == item.fingerprint and match.chaos == item.chaos:
|
if match.fingerprint == item.fingerprint and match.chaos == item.chaos:
|
||||||
match.add_submatch(item)
|
match.add_submatch(item)
|
||||||
|
|
|
@ -2,5 +2,5 @@
|
||||||
Expose version
|
Expose version
|
||||||
"""
|
"""
|
||||||
|
|
||||||
__version__ = "3.3.2"
|
__version__ = "3.4.0"
|
||||||
VERSION = __version__.split(".")
|
VERSION = __version__.split(".")
|
||||||
|
|
|
@ -33,6 +33,7 @@ class Audio(PlexPartialObject, PlayedUnplayedMixin):
|
||||||
distance (float): Sonic Distance of the item from the seed item.
|
distance (float): Sonic Distance of the item from the seed item.
|
||||||
fields (List<:class:`~plexapi.media.Field`>): List of field objects.
|
fields (List<:class:`~plexapi.media.Field`>): List of field objects.
|
||||||
guid (str): Plex GUID for the artist, album, or track (plex://artist/5d07bcb0403c64029053ac4c).
|
guid (str): Plex GUID for the artist, album, or track (plex://artist/5d07bcb0403c64029053ac4c).
|
||||||
|
images (List<:class:`~plexapi.media.Image`>): List of image objects.
|
||||||
index (int): Plex index number (often the track number).
|
index (int): Plex index number (often the track number).
|
||||||
key (str): API URL (/library/metadata/<ratingkey>).
|
key (str): API URL (/library/metadata/<ratingkey>).
|
||||||
lastRatedAt (datetime): Datetime the item was last rated.
|
lastRatedAt (datetime): Datetime the item was last rated.
|
||||||
|
@ -65,6 +66,7 @@ class Audio(PlexPartialObject, PlayedUnplayedMixin):
|
||||||
self.distance = utils.cast(float, data.attrib.get('distance'))
|
self.distance = utils.cast(float, data.attrib.get('distance'))
|
||||||
self.fields = self.findItems(data, media.Field)
|
self.fields = self.findItems(data, media.Field)
|
||||||
self.guid = data.attrib.get('guid')
|
self.guid = data.attrib.get('guid')
|
||||||
|
self.images = self.findItems(data, media.Image)
|
||||||
self.index = utils.cast(int, data.attrib.get('index'))
|
self.index = utils.cast(int, data.attrib.get('index'))
|
||||||
self.key = data.attrib.get('key', '')
|
self.key = data.attrib.get('key', '')
|
||||||
self.lastRatedAt = utils.toDatetime(data.attrib.get('lastRatedAt'))
|
self.lastRatedAt = utils.toDatetime(data.attrib.get('lastRatedAt'))
|
||||||
|
|
|
@ -17,7 +17,7 @@ PlexObjectT = TypeVar("PlexObjectT", bound='PlexObject')
|
||||||
MediaContainerT = TypeVar("MediaContainerT", bound="MediaContainer")
|
MediaContainerT = TypeVar("MediaContainerT", bound="MediaContainer")
|
||||||
|
|
||||||
USER_DONT_RELOAD_FOR_KEYS = set()
|
USER_DONT_RELOAD_FOR_KEYS = set()
|
||||||
_DONT_RELOAD_FOR_KEYS = {'key'}
|
_DONT_RELOAD_FOR_KEYS = {'key', 'sourceURI'}
|
||||||
OPERATORS = {
|
OPERATORS = {
|
||||||
'exact': lambda v, q: v == q,
|
'exact': lambda v, q: v == q,
|
||||||
'iexact': lambda v, q: v.lower() == q.lower(),
|
'iexact': lambda v, q: v.lower() == q.lower(),
|
||||||
|
@ -71,7 +71,7 @@ class PlexObject:
|
||||||
self._details_key = self._buildDetailsKey()
|
self._details_key = self._buildDetailsKey()
|
||||||
|
|
||||||
def __repr__(self):
|
def __repr__(self):
|
||||||
uid = self._clean(self.firstAttr('_baseurl', 'ratingKey', 'id', 'key', 'playQueueID', 'uri'))
|
uid = self._clean(self.firstAttr('_baseurl', 'ratingKey', 'id', 'key', 'playQueueID', 'uri', 'type'))
|
||||||
name = self._clean(self.firstAttr('title', 'name', 'username', 'product', 'tag', 'value'))
|
name = self._clean(self.firstAttr('title', 'name', 'username', 'product', 'tag', 'value'))
|
||||||
return f"<{':'.join([p for p in [self.__class__.__name__, uid, name] if p])}>"
|
return f"<{':'.join([p for p in [self.__class__.__name__, uid, name] if p])}>"
|
||||||
|
|
||||||
|
|
|
@ -39,6 +39,7 @@ class Collection(
|
||||||
contentRating (str) Content rating (PG-13; NR; TV-G).
|
contentRating (str) Content rating (PG-13; NR; TV-G).
|
||||||
fields (List<:class:`~plexapi.media.Field`>): List of field objects.
|
fields (List<:class:`~plexapi.media.Field`>): List of field objects.
|
||||||
guid (str): Plex GUID for the collection (collection://XXXXXXXXX-XXXX-XXXX-XXXX-XXXXXXXXX).
|
guid (str): Plex GUID for the collection (collection://XXXXXXXXX-XXXX-XXXX-XXXX-XXXXXXXXX).
|
||||||
|
images (List<:class:`~plexapi.media.Image`>): List of image objects.
|
||||||
index (int): Plex index number for the collection.
|
index (int): Plex index number for the collection.
|
||||||
key (str): API URL (/library/metadata/<ratingkey>).
|
key (str): API URL (/library/metadata/<ratingkey>).
|
||||||
labels (List<:class:`~plexapi.media.Label`>): List of label objects.
|
labels (List<:class:`~plexapi.media.Label`>): List of label objects.
|
||||||
|
@ -82,6 +83,7 @@ class Collection(
|
||||||
self.contentRating = data.attrib.get('contentRating')
|
self.contentRating = data.attrib.get('contentRating')
|
||||||
self.fields = self.findItems(data, media.Field)
|
self.fields = self.findItems(data, media.Field)
|
||||||
self.guid = data.attrib.get('guid')
|
self.guid = data.attrib.get('guid')
|
||||||
|
self.images = self.findItems(data, media.Image)
|
||||||
self.index = utils.cast(int, data.attrib.get('index'))
|
self.index = utils.cast(int, data.attrib.get('index'))
|
||||||
self.key = data.attrib.get('key', '').replace('/children', '') # FIX_BUG_50
|
self.key = data.attrib.get('key', '').replace('/children', '') # FIX_BUG_50
|
||||||
self.labels = self.findItems(data, media.Label)
|
self.labels = self.findItems(data, media.Label)
|
||||||
|
|
|
@ -3,7 +3,7 @@
|
||||||
|
|
||||||
# Library version
|
# Library version
|
||||||
MAJOR_VERSION = 4
|
MAJOR_VERSION = 4
|
||||||
MINOR_VERSION = 15
|
MINOR_VERSION = 16
|
||||||
PATCH_VERSION = 16
|
PATCH_VERSION = 0
|
||||||
__short_version__ = f"{MAJOR_VERSION}.{MINOR_VERSION}"
|
__short_version__ = f"{MAJOR_VERSION}.{MINOR_VERSION}"
|
||||||
__version__ = f"{__short_version__}.{PATCH_VERSION}"
|
__version__ = f"{__short_version__}.{PATCH_VERSION}"
|
||||||
|
|
|
@ -1740,7 +1740,7 @@ class LibrarySection(PlexObject):
|
||||||
|
|
||||||
def _edit(self, items=None, **kwargs):
|
def _edit(self, items=None, **kwargs):
|
||||||
""" Actually edit multiple objects. """
|
""" Actually edit multiple objects. """
|
||||||
if isinstance(self._edits, dict):
|
if isinstance(self._edits, dict) and items is None:
|
||||||
self._edits.update(kwargs)
|
self._edits.update(kwargs)
|
||||||
return self
|
return self
|
||||||
|
|
||||||
|
|
|
@ -26,6 +26,7 @@ class Media(PlexObject):
|
||||||
height (int): The height of the media in pixels (ex: 256).
|
height (int): The height of the media in pixels (ex: 256).
|
||||||
id (int): The unique ID for this media on the server.
|
id (int): The unique ID for this media on the server.
|
||||||
has64bitOffsets (bool): True if video has 64 bit offsets.
|
has64bitOffsets (bool): True if video has 64 bit offsets.
|
||||||
|
hasVoiceActivity (bool): True if video has voice activity analyzed.
|
||||||
optimizedForStreaming (bool): True if video is optimized for streaming.
|
optimizedForStreaming (bool): True if video is optimized for streaming.
|
||||||
parts (List<:class:`~plexapi.media.MediaPart`>): List of media part objects.
|
parts (List<:class:`~plexapi.media.MediaPart`>): List of media part objects.
|
||||||
proxyType (int): Equals 42 for optimized versions.
|
proxyType (int): Equals 42 for optimized versions.
|
||||||
|
@ -61,6 +62,7 @@ class Media(PlexObject):
|
||||||
self.height = utils.cast(int, data.attrib.get('height'))
|
self.height = utils.cast(int, data.attrib.get('height'))
|
||||||
self.id = utils.cast(int, data.attrib.get('id'))
|
self.id = utils.cast(int, data.attrib.get('id'))
|
||||||
self.has64bitOffsets = utils.cast(bool, data.attrib.get('has64bitOffsets'))
|
self.has64bitOffsets = utils.cast(bool, data.attrib.get('has64bitOffsets'))
|
||||||
|
self.hasVoiceActivity = utils.cast(bool, data.attrib.get('hasVoiceActivity', '0'))
|
||||||
self.optimizedForStreaming = utils.cast(bool, data.attrib.get('optimizedForStreaming'))
|
self.optimizedForStreaming = utils.cast(bool, data.attrib.get('optimizedForStreaming'))
|
||||||
self.parts = self.findItems(data, MediaPart)
|
self.parts = self.findItems(data, MediaPart)
|
||||||
self.proxyType = utils.cast(int, data.attrib.get('proxyType'))
|
self.proxyType = utils.cast(int, data.attrib.get('proxyType'))
|
||||||
|
@ -441,6 +443,7 @@ class SubtitleStream(MediaPartStream):
|
||||||
Attributes:
|
Attributes:
|
||||||
TAG (str): 'Stream'
|
TAG (str): 'Stream'
|
||||||
STREAMTYPE (int): 3
|
STREAMTYPE (int): 3
|
||||||
|
canAutoSync (bool): True if the subtitle stream can be auto synced.
|
||||||
container (str): The container of the subtitle stream.
|
container (str): The container of the subtitle stream.
|
||||||
forced (bool): True if this is a forced subtitle.
|
forced (bool): True if this is a forced subtitle.
|
||||||
format (str): The format of the subtitle stream (ex: srt).
|
format (str): The format of the subtitle stream (ex: srt).
|
||||||
|
@ -459,6 +462,7 @@ class SubtitleStream(MediaPartStream):
|
||||||
def _loadData(self, data):
|
def _loadData(self, data):
|
||||||
""" Load attribute values from Plex XML response. """
|
""" Load attribute values from Plex XML response. """
|
||||||
super(SubtitleStream, self)._loadData(data)
|
super(SubtitleStream, self)._loadData(data)
|
||||||
|
self.canAutoSync = utils.cast(bool, data.attrib.get('canAutoSync'))
|
||||||
self.container = data.attrib.get('container')
|
self.container = data.attrib.get('container')
|
||||||
self.forced = utils.cast(bool, data.attrib.get('forced', '0'))
|
self.forced = utils.cast(bool, data.attrib.get('forced', '0'))
|
||||||
self.format = data.attrib.get('format')
|
self.format = data.attrib.get('format')
|
||||||
|
@ -954,6 +958,26 @@ class Guid(PlexObject):
|
||||||
self.id = data.attrib.get('id')
|
self.id = data.attrib.get('id')
|
||||||
|
|
||||||
|
|
||||||
|
@utils.registerPlexObject
|
||||||
|
class Image(PlexObject):
|
||||||
|
""" Represents a single Image media tag.
|
||||||
|
|
||||||
|
Attributes:
|
||||||
|
TAG (str): 'Image'
|
||||||
|
alt (str): The alt text for the image.
|
||||||
|
type (str): The type of image (e.g. coverPoster, background, snapshot).
|
||||||
|
url (str): The API URL (/library/metadata/<ratingKey>/thumb/<thumbid>).
|
||||||
|
"""
|
||||||
|
TAG = 'Image'
|
||||||
|
|
||||||
|
def _loadData(self, data):
|
||||||
|
""" Load attribute values from Plex XML response. """
|
||||||
|
self._data = data
|
||||||
|
self.alt = data.attrib.get('alt')
|
||||||
|
self.type = data.attrib.get('type')
|
||||||
|
self.url = data.attrib.get('url')
|
||||||
|
|
||||||
|
|
||||||
@utils.registerPlexObject
|
@utils.registerPlexObject
|
||||||
class Rating(PlexObject):
|
class Rating(PlexObject):
|
||||||
""" Represents a single Rating media tag.
|
""" Represents a single Rating media tag.
|
||||||
|
@ -1074,6 +1098,11 @@ class Art(BaseResource):
|
||||||
TAG = 'Photo'
|
TAG = 'Photo'
|
||||||
|
|
||||||
|
|
||||||
|
class Logo(BaseResource):
|
||||||
|
""" Represents a single Logo object. """
|
||||||
|
TAG = 'Photo'
|
||||||
|
|
||||||
|
|
||||||
class Poster(BaseResource):
|
class Poster(BaseResource):
|
||||||
""" Represents a single Poster object. """
|
""" Represents a single Poster object. """
|
||||||
TAG = 'Photo'
|
TAG = 'Photo'
|
||||||
|
|
|
@ -403,6 +403,63 @@ class ArtMixin(ArtUrlMixin, ArtLockMixin):
|
||||||
return self
|
return self
|
||||||
|
|
||||||
|
|
||||||
|
class LogoUrlMixin:
|
||||||
|
""" Mixin for Plex objects that can have a logo url. """
|
||||||
|
|
||||||
|
@property
|
||||||
|
def logoUrl(self):
|
||||||
|
""" Return the logo url for the Plex object. """
|
||||||
|
image = next((i for i in self.images if i.type == 'clearLogo'), None)
|
||||||
|
return self._server.url(image.url, includeToken=True) if image else None
|
||||||
|
|
||||||
|
|
||||||
|
class LogoLockMixin:
|
||||||
|
""" Mixin for Plex objects that can have a locked logo. """
|
||||||
|
|
||||||
|
def lockLogo(self):
|
||||||
|
""" Lock the logo for a Plex object. """
|
||||||
|
raise NotImplementedError('Logo cannot be locked through the API.')
|
||||||
|
|
||||||
|
def unlockLogo(self):
|
||||||
|
""" Unlock the logo for a Plex object. """
|
||||||
|
raise NotImplementedError('Logo cannot be unlocked through the API.')
|
||||||
|
|
||||||
|
|
||||||
|
class LogoMixin(LogoUrlMixin, LogoLockMixin):
|
||||||
|
""" Mixin for Plex objects that can have logos. """
|
||||||
|
|
||||||
|
def logos(self):
|
||||||
|
""" Returns list of available :class:`~plexapi.media.Logo` objects. """
|
||||||
|
return self.fetchItems(f'/library/metadata/{self.ratingKey}/clearLogos', cls=media.Logo)
|
||||||
|
|
||||||
|
def uploadLogo(self, url=None, filepath=None):
|
||||||
|
""" Upload a logo from a url or filepath.
|
||||||
|
|
||||||
|
Parameters:
|
||||||
|
url (str): The full URL to the image to upload.
|
||||||
|
filepath (str): The full file path the the image to upload or file-like object.
|
||||||
|
"""
|
||||||
|
if url:
|
||||||
|
key = f'/library/metadata/{self.ratingKey}/clearLogos?url={quote_plus(url)}'
|
||||||
|
self._server.query(key, method=self._server._session.post)
|
||||||
|
elif filepath:
|
||||||
|
key = f'/library/metadata/{self.ratingKey}/clearLogos'
|
||||||
|
data = openOrRead(filepath)
|
||||||
|
self._server.query(key, method=self._server._session.post, data=data)
|
||||||
|
return self
|
||||||
|
|
||||||
|
def setLogo(self, logo):
|
||||||
|
""" Set the logo for a Plex object.
|
||||||
|
|
||||||
|
Raises:
|
||||||
|
:exc:`~plexapi.exceptions.NotImplementedError`: Logo cannot be set through the API.
|
||||||
|
"""
|
||||||
|
raise NotImplementedError(
|
||||||
|
'Logo cannot be set through the API. '
|
||||||
|
'Re-upload the logo using "uploadLogo" to set it.'
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
class PosterUrlMixin:
|
class PosterUrlMixin:
|
||||||
""" Mixin for Plex objects that can have a poster url. """
|
""" Mixin for Plex objects that can have a poster url. """
|
||||||
|
|
||||||
|
@ -513,6 +570,11 @@ class ThemeMixin(ThemeUrlMixin, ThemeLockMixin):
|
||||||
return self
|
return self
|
||||||
|
|
||||||
def setTheme(self, theme):
|
def setTheme(self, theme):
|
||||||
|
""" Set the theme for a Plex object.
|
||||||
|
|
||||||
|
Raises:
|
||||||
|
:exc:`~plexapi.exceptions.NotImplementedError`: Themes cannot be set through the API.
|
||||||
|
"""
|
||||||
raise NotImplementedError(
|
raise NotImplementedError(
|
||||||
'Themes cannot be set through the API. '
|
'Themes cannot be set through the API. '
|
||||||
'Re-upload the theme using "uploadTheme" to set it.'
|
'Re-upload the theme using "uploadTheme" to set it.'
|
||||||
|
|
|
@ -283,10 +283,10 @@ class MyPlexAccount(PlexObject):
|
||||||
""" Returns the :class:`~plexapi.myplex.MyPlexResource` that matches the name specified.
|
""" Returns the :class:`~plexapi.myplex.MyPlexResource` that matches the name specified.
|
||||||
|
|
||||||
Parameters:
|
Parameters:
|
||||||
name (str): Name to match against.
|
name (str): Name or machine identifier to match against.
|
||||||
"""
|
"""
|
||||||
for resource in self.resources():
|
for resource in self.resources():
|
||||||
if resource.name.lower() == name.lower():
|
if resource.name.lower() == name.lower() or resource.clientIdentifier == name:
|
||||||
return resource
|
return resource
|
||||||
raise NotFound(f'Unable to find resource {name}')
|
raise NotFound(f'Unable to find resource {name}')
|
||||||
|
|
||||||
|
|
|
@ -30,6 +30,7 @@ class Photoalbum(
|
||||||
composite (str): URL to composite image (/library/metadata/<ratingKey>/composite/<compositeid>)
|
composite (str): URL to composite image (/library/metadata/<ratingKey>/composite/<compositeid>)
|
||||||
fields (List<:class:`~plexapi.media.Field`>): List of field objects.
|
fields (List<:class:`~plexapi.media.Field`>): List of field objects.
|
||||||
guid (str): Plex GUID for the photo album (local://229674).
|
guid (str): Plex GUID for the photo album (local://229674).
|
||||||
|
images (List<:class:`~plexapi.media.Image`>): List of image objects.
|
||||||
index (sting): Plex index number for the photo album.
|
index (sting): Plex index number for the photo album.
|
||||||
key (str): API URL (/library/metadata/<ratingkey>).
|
key (str): API URL (/library/metadata/<ratingkey>).
|
||||||
lastRatedAt (datetime): Datetime the photo album was last rated.
|
lastRatedAt (datetime): Datetime the photo album was last rated.
|
||||||
|
@ -57,6 +58,7 @@ class Photoalbum(
|
||||||
self.composite = data.attrib.get('composite')
|
self.composite = data.attrib.get('composite')
|
||||||
self.fields = self.findItems(data, media.Field)
|
self.fields = self.findItems(data, media.Field)
|
||||||
self.guid = data.attrib.get('guid')
|
self.guid = data.attrib.get('guid')
|
||||||
|
self.images = self.findItems(data, media.Image)
|
||||||
self.index = utils.cast(int, data.attrib.get('index'))
|
self.index = utils.cast(int, data.attrib.get('index'))
|
||||||
self.key = data.attrib.get('key', '').replace('/children', '') # FIX_BUG_50
|
self.key = data.attrib.get('key', '').replace('/children', '') # FIX_BUG_50
|
||||||
self.lastRatedAt = utils.toDatetime(data.attrib.get('lastRatedAt'))
|
self.lastRatedAt = utils.toDatetime(data.attrib.get('lastRatedAt'))
|
||||||
|
@ -164,6 +166,7 @@ class Photo(
|
||||||
createdAtTZOffset (int): Unknown (-25200).
|
createdAtTZOffset (int): Unknown (-25200).
|
||||||
fields (List<:class:`~plexapi.media.Field`>): List of field objects.
|
fields (List<:class:`~plexapi.media.Field`>): List of field objects.
|
||||||
guid (str): Plex GUID for the photo (com.plexapp.agents.none://231714?lang=xn).
|
guid (str): Plex GUID for the photo (com.plexapp.agents.none://231714?lang=xn).
|
||||||
|
images (List<:class:`~plexapi.media.Image`>): List of image objects.
|
||||||
index (sting): Plex index number for the photo.
|
index (sting): Plex index number for the photo.
|
||||||
key (str): API URL (/library/metadata/<ratingkey>).
|
key (str): API URL (/library/metadata/<ratingkey>).
|
||||||
lastRatedAt (datetime): Datetime the photo was last rated.
|
lastRatedAt (datetime): Datetime the photo was last rated.
|
||||||
|
@ -204,6 +207,7 @@ class Photo(
|
||||||
self.createdAtTZOffset = utils.cast(int, data.attrib.get('createdAtTZOffset'))
|
self.createdAtTZOffset = utils.cast(int, data.attrib.get('createdAtTZOffset'))
|
||||||
self.fields = self.findItems(data, media.Field)
|
self.fields = self.findItems(data, media.Field)
|
||||||
self.guid = data.attrib.get('guid')
|
self.guid = data.attrib.get('guid')
|
||||||
|
self.images = self.findItems(data, media.Image)
|
||||||
self.index = utils.cast(int, data.attrib.get('index'))
|
self.index = utils.cast(int, data.attrib.get('index'))
|
||||||
self.key = data.attrib.get('key', '')
|
self.key = data.attrib.get('key', '')
|
||||||
self.lastRatedAt = utils.toDatetime(data.attrib.get('lastRatedAt'))
|
self.lastRatedAt = utils.toDatetime(data.attrib.get('lastRatedAt'))
|
||||||
|
|
|
@ -190,6 +190,20 @@ class Playlist(
|
||||||
if self._items is None:
|
if self._items is None:
|
||||||
key = f'{self.key}/items'
|
key = f'{self.key}/items'
|
||||||
items = self.fetchItems(key)
|
items = self.fetchItems(key)
|
||||||
|
|
||||||
|
# Cache server connections to avoid reconnecting for each item
|
||||||
|
_servers = {}
|
||||||
|
for item in items:
|
||||||
|
if item.sourceURI:
|
||||||
|
serverID = item.sourceURI.split('/')[2]
|
||||||
|
if serverID not in _servers:
|
||||||
|
try:
|
||||||
|
_servers[serverID] = self._server.myPlexAccount().resource(serverID).connect()
|
||||||
|
except NotFound:
|
||||||
|
# Override the server connection with None if the server is not found
|
||||||
|
_servers[serverID] = None
|
||||||
|
item._server = _servers[serverID]
|
||||||
|
|
||||||
self._items = items
|
self._items = items
|
||||||
return self._items
|
return self._items
|
||||||
|
|
||||||
|
|
|
@ -90,6 +90,8 @@ TAGTYPES = {
|
||||||
'theme': 317,
|
'theme': 317,
|
||||||
'studio': 318,
|
'studio': 318,
|
||||||
'network': 319,
|
'network': 319,
|
||||||
|
'showOrdering': 322,
|
||||||
|
'clearLogo': 323,
|
||||||
'place': 400,
|
'place': 400,
|
||||||
}
|
}
|
||||||
REVERSETAGTYPES = {v: k for k, v in TAGTYPES.items()}
|
REVERSETAGTYPES = {v: k for k, v in TAGTYPES.items()}
|
||||||
|
|
|
@ -9,7 +9,7 @@ from plexapi.base import Playable, PlexPartialObject, PlexHistory, PlexSession
|
||||||
from plexapi.exceptions import BadRequest
|
from plexapi.exceptions import BadRequest
|
||||||
from plexapi.mixins import (
|
from plexapi.mixins import (
|
||||||
AdvancedSettingsMixin, SplitMergeMixin, UnmatchMatchMixin, ExtrasMixin, HubsMixin, PlayedUnplayedMixin, RatingMixin,
|
AdvancedSettingsMixin, SplitMergeMixin, UnmatchMatchMixin, ExtrasMixin, HubsMixin, PlayedUnplayedMixin, RatingMixin,
|
||||||
ArtUrlMixin, ArtMixin, PosterUrlMixin, PosterMixin, ThemeUrlMixin, ThemeMixin,
|
ArtUrlMixin, ArtMixin, LogoMixin, PosterUrlMixin, PosterMixin, ThemeUrlMixin, ThemeMixin,
|
||||||
MovieEditMixins, ShowEditMixins, SeasonEditMixins, EpisodeEditMixins,
|
MovieEditMixins, ShowEditMixins, SeasonEditMixins, EpisodeEditMixins,
|
||||||
WatchlistMixin
|
WatchlistMixin
|
||||||
)
|
)
|
||||||
|
@ -26,6 +26,7 @@ class Video(PlexPartialObject, PlayedUnplayedMixin):
|
||||||
artBlurHash (str): BlurHash string for artwork image.
|
artBlurHash (str): BlurHash string for artwork image.
|
||||||
fields (List<:class:`~plexapi.media.Field`>): List of field objects.
|
fields (List<:class:`~plexapi.media.Field`>): List of field objects.
|
||||||
guid (str): Plex GUID for the movie, show, season, episode, or clip (plex://movie/5d776b59ad5437001f79c6f8).
|
guid (str): Plex GUID for the movie, show, season, episode, or clip (plex://movie/5d776b59ad5437001f79c6f8).
|
||||||
|
images (List<:class:`~plexapi.media.Image`>): List of image objects.
|
||||||
key (str): API URL (/library/metadata/<ratingkey>).
|
key (str): API URL (/library/metadata/<ratingkey>).
|
||||||
lastRatedAt (datetime): Datetime the item was last rated.
|
lastRatedAt (datetime): Datetime the item was last rated.
|
||||||
lastViewedAt (datetime): Datetime the item was last played.
|
lastViewedAt (datetime): Datetime the item was last played.
|
||||||
|
@ -53,6 +54,7 @@ class Video(PlexPartialObject, PlayedUnplayedMixin):
|
||||||
self.artBlurHash = data.attrib.get('artBlurHash')
|
self.artBlurHash = data.attrib.get('artBlurHash')
|
||||||
self.fields = self.findItems(data, media.Field)
|
self.fields = self.findItems(data, media.Field)
|
||||||
self.guid = data.attrib.get('guid')
|
self.guid = data.attrib.get('guid')
|
||||||
|
self.images = self.findItems(data, media.Image)
|
||||||
self.key = data.attrib.get('key', '')
|
self.key = data.attrib.get('key', '')
|
||||||
self.lastRatedAt = utils.toDatetime(data.attrib.get('lastRatedAt'))
|
self.lastRatedAt = utils.toDatetime(data.attrib.get('lastRatedAt'))
|
||||||
self.lastViewedAt = utils.toDatetime(data.attrib.get('lastViewedAt'))
|
self.lastViewedAt = utils.toDatetime(data.attrib.get('lastViewedAt'))
|
||||||
|
@ -332,7 +334,7 @@ class Video(PlexPartialObject, PlayedUnplayedMixin):
|
||||||
class Movie(
|
class Movie(
|
||||||
Video, Playable,
|
Video, Playable,
|
||||||
AdvancedSettingsMixin, SplitMergeMixin, UnmatchMatchMixin, ExtrasMixin, HubsMixin, RatingMixin,
|
AdvancedSettingsMixin, SplitMergeMixin, UnmatchMatchMixin, ExtrasMixin, HubsMixin, RatingMixin,
|
||||||
ArtMixin, PosterMixin, ThemeMixin,
|
ArtMixin, LogoMixin, PosterMixin, ThemeMixin,
|
||||||
MovieEditMixins,
|
MovieEditMixins,
|
||||||
WatchlistMixin
|
WatchlistMixin
|
||||||
):
|
):
|
||||||
|
@ -447,6 +449,11 @@ class Movie(
|
||||||
""" Returns True if the movie has a credits marker. """
|
""" Returns True if the movie has a credits marker. """
|
||||||
return any(marker.type == 'credits' for marker in self.markers)
|
return any(marker.type == 'credits' for marker in self.markers)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def hasVoiceActivity(self):
|
||||||
|
""" Returns True if any of the media has voice activity analyzed. """
|
||||||
|
return any(media.hasVoiceActivity for media in self.media)
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def hasPreviewThumbnails(self):
|
def hasPreviewThumbnails(self):
|
||||||
""" Returns True if any of the media parts has generated preview (BIF) thumbnails. """
|
""" Returns True if any of the media parts has generated preview (BIF) thumbnails. """
|
||||||
|
@ -489,7 +496,7 @@ class Movie(
|
||||||
class Show(
|
class Show(
|
||||||
Video,
|
Video,
|
||||||
AdvancedSettingsMixin, SplitMergeMixin, UnmatchMatchMixin, ExtrasMixin, HubsMixin, RatingMixin,
|
AdvancedSettingsMixin, SplitMergeMixin, UnmatchMatchMixin, ExtrasMixin, HubsMixin, RatingMixin,
|
||||||
ArtMixin, PosterMixin, ThemeMixin,
|
ArtMixin, LogoMixin, PosterMixin, ThemeMixin,
|
||||||
ShowEditMixins,
|
ShowEditMixins,
|
||||||
WatchlistMixin
|
WatchlistMixin
|
||||||
):
|
):
|
||||||
|
@ -1077,6 +1084,11 @@ class Episode(
|
||||||
""" Returns True if the episode has a credits marker. """
|
""" Returns True if the episode has a credits marker. """
|
||||||
return any(marker.type == 'credits' for marker in self.markers)
|
return any(marker.type == 'credits' for marker in self.markers)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def hasVoiceActivity(self):
|
||||||
|
""" Returns True if any of the media has voice activity analyzed. """
|
||||||
|
return any(media.hasVoiceActivity for media in self.media)
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def hasPreviewThumbnails(self):
|
def hasPreviewThumbnails(self):
|
||||||
""" Returns True if any of the media parts has generated preview (BIF) thumbnails. """
|
""" Returns True if any of the media parts has generated preview (BIF) thumbnails. """
|
||||||
|
|
|
@ -25,7 +25,7 @@ musicbrainzngs==0.7.1
|
||||||
packaging==24.2
|
packaging==24.2
|
||||||
paho-mqtt==2.1.0
|
paho-mqtt==2.1.0
|
||||||
platformdirs==4.3.6
|
platformdirs==4.3.6
|
||||||
plexapi==4.15.16
|
plexapi==4.16.0
|
||||||
portend==3.2.0
|
portend==3.2.0
|
||||||
profilehooks==1.13.0
|
profilehooks==1.13.0
|
||||||
PyJWT==2.9.0
|
PyJWT==2.9.0
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue