mirror of
https://github.com/Tautulli/Tautulli.git
synced 2025-07-06 05:01:14 -07:00
Update PlexAPI to 3.3.0
This commit is contained in:
parent
2fa62f71e1
commit
c55c00a19e
17 changed files with 1559 additions and 135 deletions
|
@ -7,15 +7,18 @@ import time
|
|||
import zipfile
|
||||
from datetime import datetime
|
||||
from getpass import getpass
|
||||
from threading import Thread
|
||||
from threading import Thread, Event
|
||||
from tqdm import tqdm
|
||||
from plexapi import compat
|
||||
from plexapi.exceptions import NotFound
|
||||
|
||||
log = logging.getLogger('plexapi')
|
||||
|
||||
# Search Types - Plex uses these to filter specific media types when searching.
|
||||
# Library Types - Populated at runtime
|
||||
SEARCHTYPES = {'movie': 1, 'show': 2, 'season': 3, 'episode': 4,
|
||||
'artist': 8, 'album': 9, 'track': 10, 'photo': 14}
|
||||
SEARCHTYPES = {'movie': 1, 'show': 2, 'season': 3, 'episode': 4, 'trailer': 5, 'comic': 6, 'person': 7,
|
||||
'artist': 8, 'album': 9, 'track': 10, 'picture': 11, 'clip': 12, 'photo': 13, 'photoalbum': 14,
|
||||
'playlist': 15, 'playlistFolder': 16, 'collection': 18, 'userPlaylistItem': 1001}
|
||||
PLEXOBJECTS = {}
|
||||
|
||||
|
||||
|
@ -129,10 +132,10 @@ def searchType(libtype):
|
|||
""" Returns the integer value of the library string type.
|
||||
|
||||
Parameters:
|
||||
libtype (str): LibType to lookup (movie, show, season, episode, artist, album, track)
|
||||
|
||||
libtype (str): LibType to lookup (movie, show, season, episode, artist, album, track,
|
||||
collection)
|
||||
Raises:
|
||||
NotFound: Unknown libtype
|
||||
:class:`plexapi.exceptions.NotFound`: Unknown libtype
|
||||
"""
|
||||
libtype = compat.ustr(libtype)
|
||||
if libtype in [compat.ustr(v) for v in SEARCHTYPES.values()]:
|
||||
|
@ -144,22 +147,26 @@ def searchType(libtype):
|
|||
|
||||
def threaded(callback, listargs):
|
||||
""" Returns the result of <callback> for each set of \*args in listargs. Each call
|
||||
to <callback. is called concurrently in their own separate threads.
|
||||
to <callback> is called concurrently in their own separate threads.
|
||||
|
||||
Parameters:
|
||||
callback (func): Callback function to apply to each set of \*args.
|
||||
listargs (list): List of lists; \*args to pass each thread.
|
||||
"""
|
||||
threads, results = [], []
|
||||
job_is_done_event = Event()
|
||||
for args in listargs:
|
||||
args += [results, len(results)]
|
||||
results.append(None)
|
||||
threads.append(Thread(target=callback, args=args))
|
||||
threads.append(Thread(target=callback, args=args, kwargs=dict(job_is_done_event=job_is_done_event)))
|
||||
threads[-1].setDaemon(True)
|
||||
threads[-1].start()
|
||||
for thread in threads:
|
||||
thread.join()
|
||||
return results
|
||||
while not job_is_done_event.is_set():
|
||||
if all([not t.is_alive() for t in threads]):
|
||||
break
|
||||
time.sleep(0.05)
|
||||
|
||||
return [r for r in results if r is not None]
|
||||
|
||||
|
||||
def toDatetime(value, format=None):
|
||||
|
@ -171,8 +178,17 @@ def toDatetime(value, format=None):
|
|||
"""
|
||||
if value and value is not None:
|
||||
if format:
|
||||
value = datetime.strptime(value, format)
|
||||
try:
|
||||
value = datetime.strptime(value, format)
|
||||
except ValueError:
|
||||
log.info('Failed to parse %s to datetime, defaulting to None', value)
|
||||
return None
|
||||
else:
|
||||
# https://bugs.python.org/issue30684
|
||||
# And platform support for before epoch seems to be flaky.
|
||||
# TODO check for others errors too.
|
||||
if int(value) <= 0:
|
||||
value = 86400
|
||||
value = datetime.fromtimestamp(int(value))
|
||||
return value
|
||||
|
||||
|
@ -242,8 +258,6 @@ def download(url, token, filename=None, savepath=None, session=None, chunksize=4
|
|||
>>> download(a_episode.getStreamURL(), a_episode.location)
|
||||
/path/to/file
|
||||
"""
|
||||
|
||||
from plexapi import log
|
||||
# fetch the data to be saved
|
||||
session = session or requests.Session()
|
||||
headers = {'X-Plex-Token': token}
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue