mirror of
https://github.com/Tautulli/Tautulli.git
synced 2025-08-22 14:13:40 -07:00
what was i smoking..
This commit is contained in:
parent
339ddec484
commit
726327847d
3 changed files with 105 additions and 14 deletions
28
lib/requests_futures/__init__.py
Normal file
28
lib/requests_futures/__init__.py
Normal file
|
@ -0,0 +1,28 @@
|
||||||
|
# -*- coding: utf-8 -*-
|
||||||
|
|
||||||
|
# Requests Futures
|
||||||
|
|
||||||
|
"""
|
||||||
|
async requests HTTP library
|
||||||
|
~~~~~~~~~~~~~~~~~~~~~
|
||||||
|
|
||||||
|
|
||||||
|
"""
|
||||||
|
|
||||||
|
__title__ = 'requests-futures'
|
||||||
|
__version__ = '0.9.5'
|
||||||
|
__build__ = 0x000000
|
||||||
|
__author__ = 'Ross McFarland'
|
||||||
|
__license__ = 'Apache 2.0'
|
||||||
|
__copyright__ = 'Copyright 2013 Ross McFarland'
|
||||||
|
|
||||||
|
# Set default logging handler to avoid "No handler found" warnings.
|
||||||
|
import logging
|
||||||
|
try: # Python 2.7+
|
||||||
|
from logging import NullHandler
|
||||||
|
except ImportError:
|
||||||
|
class NullHandler(logging.Handler):
|
||||||
|
def emit(self, record):
|
||||||
|
pass
|
||||||
|
|
||||||
|
logging.getLogger(__name__).addHandler(NullHandler())
|
73
lib/requests_futures/sessions.py
Normal file
73
lib/requests_futures/sessions.py
Normal file
|
@ -0,0 +1,73 @@
|
||||||
|
# -*- coding: utf-8 -*-
|
||||||
|
"""
|
||||||
|
requests_futures
|
||||||
|
~~~~~~~~~~~~~~~~
|
||||||
|
|
||||||
|
This module provides a small add-on for the requests http library. It makes use
|
||||||
|
of python 3.3's concurrent.futures or the futures backport for previous
|
||||||
|
releases of python.
|
||||||
|
|
||||||
|
from requests_futures import FuturesSession
|
||||||
|
|
||||||
|
session = FuturesSession()
|
||||||
|
# request is run in the background
|
||||||
|
future = session.get('http://httpbin.org/get')
|
||||||
|
# ... do other stuff ...
|
||||||
|
# wait for the request to complete, if it hasn't already
|
||||||
|
response = future.result()
|
||||||
|
print('response status: {0}'.format(response.status_code))
|
||||||
|
print(response.content)
|
||||||
|
|
||||||
|
"""
|
||||||
|
|
||||||
|
from concurrent.futures import ThreadPoolExecutor
|
||||||
|
from requests import Session
|
||||||
|
from requests.adapters import DEFAULT_POOLSIZE, HTTPAdapter
|
||||||
|
|
||||||
|
class FuturesSession(Session):
|
||||||
|
|
||||||
|
def __init__(self, executor=None, max_workers=2, *args, **kwargs):
|
||||||
|
"""Creates a FuturesSession
|
||||||
|
|
||||||
|
Notes
|
||||||
|
~~~~~
|
||||||
|
|
||||||
|
* ProcessPoolExecutor is not supported b/c Response objects are
|
||||||
|
not picklable.
|
||||||
|
|
||||||
|
* If you provide both `executor` and `max_workers`, the latter is
|
||||||
|
ignored and provided executor is used as is.
|
||||||
|
"""
|
||||||
|
super(FuturesSession, self).__init__(*args, **kwargs)
|
||||||
|
if executor is None:
|
||||||
|
executor = ThreadPoolExecutor(max_workers=max_workers)
|
||||||
|
# set connection pool size equal to max_workers if needed
|
||||||
|
if max_workers > DEFAULT_POOLSIZE:
|
||||||
|
adapter_kwargs = dict(pool_connections=max_workers,
|
||||||
|
pool_maxsize=max_workers)
|
||||||
|
self.mount('https://', HTTPAdapter(**adapter_kwargs))
|
||||||
|
self.mount('http://', HTTPAdapter(**adapter_kwargs))
|
||||||
|
|
||||||
|
self.executor = executor
|
||||||
|
|
||||||
|
def request(self, *args, **kwargs):
|
||||||
|
"""Maintains the existing api for Session.request.
|
||||||
|
|
||||||
|
Used by all of the higher level methods, e.g. Session.get.
|
||||||
|
|
||||||
|
The background_callback param allows you to do some processing on the
|
||||||
|
response in the background, e.g. call resp.json() so that json parsing
|
||||||
|
happens in the background thread.
|
||||||
|
"""
|
||||||
|
func = sup = super(FuturesSession, self).request
|
||||||
|
|
||||||
|
background_callback = kwargs.pop('background_callback', None)
|
||||||
|
if background_callback:
|
||||||
|
def wrap(*args_, **kwargs_):
|
||||||
|
resp = sup(*args_, **kwargs_)
|
||||||
|
background_callback(self, resp)
|
||||||
|
return resp
|
||||||
|
|
||||||
|
func = wrap
|
||||||
|
|
||||||
|
return self.executor.submit(func, *args, **kwargs)
|
|
@ -706,12 +706,11 @@ class PmsConnect(object):
|
||||||
Returns a list of all unwatched shows
|
Returns a list of all unwatched shows
|
||||||
|
|
||||||
named args: Used for enabled and disabling sorting/filtering
|
named args: Used for enabled and disabling sorting/filtering
|
||||||
kwargs: Used for filtering inside the dicts. Adding type="movie" will only list movies
|
kwargs: Used for filtering inside the dicts. Adding type="movie" will only returns movies
|
||||||
|
|
||||||
|
|
||||||
Output: List for dicts
|
Output: List for dicts
|
||||||
|
|
||||||
# Adding all_params=1 Makes the call insane slow.
|
|
||||||
"""
|
"""
|
||||||
# Add a cache?
|
# Add a cache?
|
||||||
|
|
||||||
|
@ -896,20 +895,11 @@ class PmsConnect(object):
|
||||||
if kwargs:
|
if kwargs:
|
||||||
logger.debug('kwargs was given %s filtering the dicts based on them' % kwargs)
|
logger.debug('kwargs was given %s filtering the dicts based on them' % kwargs)
|
||||||
if not all_params:
|
if not all_params:
|
||||||
t_result = [d for d in t_result for k,v in kwargs.iteritems() if d.get(k) == maybe_number(kwargs.get(k))]
|
t_result = [d for d in t_result if any(d.get(k) == maybe_number(kwargs[k]) for k in kwargs)]
|
||||||
else:
|
else:
|
||||||
logger.debug('All kwargs is required to be in the list')
|
logger.debug('All kwargs is required to be in the list')
|
||||||
all_params_result = []
|
t_result = [d for d in t_result if all(d.get(k, None) == maybe_number(kwargs[k]) for k in kwargs)]
|
||||||
|
|
||||||
# Please fix, i would like to do this
|
|
||||||
# faster but i don't know how to..
|
|
||||||
for item in t_result:
|
|
||||||
if all([item.get(k) == maybe_number(kwargs.get(k)) for k,v in kwargs.iteritems() for i in t_result]):
|
|
||||||
all_params_result.append(item)
|
|
||||||
|
|
||||||
if all_params_result:
|
|
||||||
t_result = all_params_result
|
|
||||||
|
|
||||||
if use_watched_older_then_sort:
|
if use_watched_older_then_sort:
|
||||||
t_result = [i for i in t_result if not i['viewCount'] or i['lastViewedAt'] <= watched_older_then]
|
t_result = [i for i in t_result if not i['viewCount'] or i['lastViewedAt'] <= watched_older_then]
|
||||||
|
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue