mirror of
https://github.com/clinton-hall/nzbToMedia.git
synced 2025-07-14 01:02:55 -07:00
Updated requests lib.
More code cleanup done.
This commit is contained in:
parent
504ea8ac45
commit
086eaaa516
19 changed files with 347 additions and 123 deletions
|
@ -9,7 +9,6 @@ and maintain connections.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
import socket
|
import socket
|
||||||
import copy
|
|
||||||
|
|
||||||
from .models import Response
|
from .models import Response
|
||||||
from .packages.urllib3.poolmanager import PoolManager, proxy_from_url
|
from .packages.urllib3.poolmanager import PoolManager, proxy_from_url
|
||||||
|
@ -63,7 +62,7 @@ class HTTPAdapter(BaseAdapter):
|
||||||
|
|
||||||
Usage::
|
Usage::
|
||||||
|
|
||||||
>>> import lib.requests
|
>>> import requests
|
||||||
>>> s = requests.Session()
|
>>> s = requests.Session()
|
||||||
>>> a = requests.adapters.HTTPAdapter(max_retries=3)
|
>>> a = requests.adapters.HTTPAdapter(max_retries=3)
|
||||||
>>> s.mount('http://', a)
|
>>> s.mount('http://', a)
|
||||||
|
|
|
@ -408,9 +408,7 @@ class PreparedRequest(RequestEncodingMixin, RequestHooksMixin):
|
||||||
|
|
||||||
is_stream = all([
|
is_stream = all([
|
||||||
hasattr(data, '__iter__'),
|
hasattr(data, '__iter__'),
|
||||||
not isinstance(data, basestring),
|
not isinstance(data, (basestring, list, tuple, dict))
|
||||||
not isinstance(data, list),
|
|
||||||
not isinstance(data, dict)
|
|
||||||
])
|
])
|
||||||
|
|
||||||
try:
|
try:
|
||||||
|
|
|
@ -10,7 +10,7 @@ urllib3 - Thread-safe connection pooling and re-using.
|
||||||
|
|
||||||
__author__ = 'Andrey Petrov (andrey.petrov@shazow.net)'
|
__author__ = 'Andrey Petrov (andrey.petrov@shazow.net)'
|
||||||
__license__ = 'MIT'
|
__license__ = 'MIT'
|
||||||
__version__ = 'dev'
|
__version__ = '1.8'
|
||||||
|
|
||||||
|
|
||||||
from .connectionpool import (
|
from .connectionpool import (
|
||||||
|
|
|
@ -4,7 +4,7 @@
|
||||||
# This module is part of urllib3 and is released under
|
# This module is part of urllib3 and is released under
|
||||||
# the MIT License: http://www.opensource.org/licenses/mit-license.php
|
# the MIT License: http://www.opensource.org/licenses/mit-license.php
|
||||||
|
|
||||||
from collections import MutableMapping
|
from collections import Mapping, MutableMapping
|
||||||
try:
|
try:
|
||||||
from threading import RLock
|
from threading import RLock
|
||||||
except ImportError: # Platform-specific: No threads available
|
except ImportError: # Platform-specific: No threads available
|
||||||
|
@ -20,9 +20,10 @@ try: # Python 2.7+
|
||||||
from collections import OrderedDict
|
from collections import OrderedDict
|
||||||
except ImportError:
|
except ImportError:
|
||||||
from .packages.ordered_dict import OrderedDict
|
from .packages.ordered_dict import OrderedDict
|
||||||
|
from .packages.six import itervalues
|
||||||
|
|
||||||
|
|
||||||
__all__ = ['RecentlyUsedContainer']
|
__all__ = ['RecentlyUsedContainer', 'HTTPHeaderDict']
|
||||||
|
|
||||||
|
|
||||||
_Null = object()
|
_Null = object()
|
||||||
|
@ -101,3 +102,104 @@ class RecentlyUsedContainer(MutableMapping):
|
||||||
def keys(self):
|
def keys(self):
|
||||||
with self.lock:
|
with self.lock:
|
||||||
return self._container.keys()
|
return self._container.keys()
|
||||||
|
|
||||||
|
|
||||||
|
class HTTPHeaderDict(MutableMapping):
|
||||||
|
"""
|
||||||
|
:param headers:
|
||||||
|
An iterable of field-value pairs. Must not contain multiple field names
|
||||||
|
when compared case-insensitively.
|
||||||
|
|
||||||
|
:param kwargs:
|
||||||
|
Additional field-value pairs to pass in to ``dict.update``.
|
||||||
|
|
||||||
|
A ``dict`` like container for storing HTTP Headers.
|
||||||
|
|
||||||
|
Field names are stored and compared case-insensitively in compliance with
|
||||||
|
RFC 2616. Iteration provides the first case-sensitive key seen for each
|
||||||
|
case-insensitive pair.
|
||||||
|
|
||||||
|
Using ``__setitem__`` syntax overwrites fields that compare equal
|
||||||
|
case-insensitively in order to maintain ``dict``'s api. For fields that
|
||||||
|
compare equal, instead create a new ``HTTPHeaderDict`` and use ``.add``
|
||||||
|
in a loop.
|
||||||
|
|
||||||
|
If multiple fields that are equal case-insensitively are passed to the
|
||||||
|
constructor or ``.update``, the behavior is undefined and some will be
|
||||||
|
lost.
|
||||||
|
|
||||||
|
>>> headers = HTTPHeaderDict()
|
||||||
|
>>> headers.add('Set-Cookie', 'foo=bar')
|
||||||
|
>>> headers.add('set-cookie', 'baz=quxx')
|
||||||
|
>>> headers['content-length'] = '7'
|
||||||
|
>>> headers['SET-cookie']
|
||||||
|
'foo=bar, baz=quxx'
|
||||||
|
>>> headers['Content-Length']
|
||||||
|
'7'
|
||||||
|
|
||||||
|
If you want to access the raw headers with their original casing
|
||||||
|
for debugging purposes you can access the private ``._data`` attribute
|
||||||
|
which is a normal python ``dict`` that maps the case-insensitive key to a
|
||||||
|
list of tuples stored as (case-sensitive-original-name, value). Using the
|
||||||
|
structure from above as our example:
|
||||||
|
|
||||||
|
>>> headers._data
|
||||||
|
{'set-cookie': [('Set-Cookie', 'foo=bar'), ('set-cookie', 'baz=quxx')],
|
||||||
|
'content-length': [('content-length', '7')]}
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(self, headers=None, **kwargs):
|
||||||
|
self._data = {}
|
||||||
|
if headers is None:
|
||||||
|
headers = {}
|
||||||
|
self.update(headers, **kwargs)
|
||||||
|
|
||||||
|
def add(self, key, value):
|
||||||
|
"""Adds a (name, value) pair, doesn't overwrite the value if it already
|
||||||
|
exists.
|
||||||
|
|
||||||
|
>>> headers = HTTPHeaderDict(foo='bar')
|
||||||
|
>>> headers.add('Foo', 'baz')
|
||||||
|
>>> headers['foo']
|
||||||
|
'bar, baz'
|
||||||
|
"""
|
||||||
|
self._data.setdefault(key.lower(), []).append((key, value))
|
||||||
|
|
||||||
|
def getlist(self, key):
|
||||||
|
"""Returns a list of all the values for the named field. Returns an
|
||||||
|
empty list if the key doesn't exist."""
|
||||||
|
return self[key].split(', ') if key in self else []
|
||||||
|
|
||||||
|
def copy(self):
|
||||||
|
h = HTTPHeaderDict()
|
||||||
|
for key in self._data:
|
||||||
|
for rawkey, value in self._data[key]:
|
||||||
|
h.add(rawkey, value)
|
||||||
|
return h
|
||||||
|
|
||||||
|
def __eq__(self, other):
|
||||||
|
if not isinstance(other, Mapping):
|
||||||
|
return False
|
||||||
|
other = HTTPHeaderDict(other)
|
||||||
|
return dict((k1, self[k1]) for k1 in self._data) == \
|
||||||
|
dict((k2, other[k2]) for k2 in other._data)
|
||||||
|
|
||||||
|
def __getitem__(self, key):
|
||||||
|
values = self._data[key.lower()]
|
||||||
|
return ', '.join(value[1] for value in values)
|
||||||
|
|
||||||
|
def __setitem__(self, key, value):
|
||||||
|
self._data[key.lower()] = [(key, value)]
|
||||||
|
|
||||||
|
def __delitem__(self, key):
|
||||||
|
del self._data[key.lower()]
|
||||||
|
|
||||||
|
def __len__(self):
|
||||||
|
return len(self._data)
|
||||||
|
|
||||||
|
def __iter__(self):
|
||||||
|
for headers in itervalues(self._data):
|
||||||
|
yield headers[0][0]
|
||||||
|
|
||||||
|
def __repr__(self):
|
||||||
|
return '%s(%r)' % (self.__class__.__name__, dict(self.items()))
|
||||||
|
|
|
@ -4,6 +4,7 @@
|
||||||
# This module is part of urllib3 and is released under
|
# This module is part of urllib3 and is released under
|
||||||
# the MIT License: http://www.opensource.org/licenses/mit-license.php
|
# the MIT License: http://www.opensource.org/licenses/mit-license.php
|
||||||
|
|
||||||
|
import sys
|
||||||
import socket
|
import socket
|
||||||
from socket import timeout as SocketTimeout
|
from socket import timeout as SocketTimeout
|
||||||
|
|
||||||
|
@ -38,6 +39,7 @@ from .exceptions import (
|
||||||
ConnectTimeoutError,
|
ConnectTimeoutError,
|
||||||
)
|
)
|
||||||
from .packages.ssl_match_hostname import match_hostname
|
from .packages.ssl_match_hostname import match_hostname
|
||||||
|
from .packages import six
|
||||||
from .util import (
|
from .util import (
|
||||||
assert_fingerprint,
|
assert_fingerprint,
|
||||||
resolve_cert_reqs,
|
resolve_cert_reqs,
|
||||||
|
@ -53,26 +55,39 @@ port_by_scheme = {
|
||||||
|
|
||||||
|
|
||||||
class HTTPConnection(_HTTPConnection, object):
|
class HTTPConnection(_HTTPConnection, object):
|
||||||
|
"""
|
||||||
|
Based on httplib.HTTPConnection but provides an extra constructor
|
||||||
|
backwards-compatibility layer between older and newer Pythons.
|
||||||
|
"""
|
||||||
|
|
||||||
default_port = port_by_scheme['http']
|
default_port = port_by_scheme['http']
|
||||||
|
|
||||||
# By default, disable Nagle's Algorithm.
|
# By default, disable Nagle's Algorithm.
|
||||||
tcp_nodelay = 1
|
tcp_nodelay = 1
|
||||||
|
|
||||||
|
def __init__(self, *args, **kw):
|
||||||
|
if six.PY3: # Python 3
|
||||||
|
kw.pop('strict', None)
|
||||||
|
|
||||||
|
if sys.version_info < (2, 7): # Python 2.6 and earlier
|
||||||
|
kw.pop('source_address', None)
|
||||||
|
self.source_address = None
|
||||||
|
|
||||||
|
_HTTPConnection.__init__(self, *args, **kw)
|
||||||
|
|
||||||
def _new_conn(self):
|
def _new_conn(self):
|
||||||
""" Establish a socket connection and set nodelay settings on it
|
""" Establish a socket connection and set nodelay settings on it
|
||||||
|
|
||||||
:return: a new socket connection
|
:return: a new socket connection
|
||||||
"""
|
"""
|
||||||
try:
|
extra_args = []
|
||||||
conn = socket.create_connection(
|
if self.source_address: # Python 2.7+
|
||||||
(self.host, self.port),
|
extra_args.append(self.source_address)
|
||||||
self.timeout,
|
|
||||||
self.source_address,
|
|
||||||
)
|
|
||||||
except AttributeError: # Python 2.6
|
|
||||||
conn = socket.create_connection(
|
conn = socket.create_connection(
|
||||||
(self.host, self.port),
|
(self.host, self.port),
|
||||||
self.timeout,
|
self.timeout,
|
||||||
|
*extra_args
|
||||||
)
|
)
|
||||||
conn.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY,
|
conn.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY,
|
||||||
self.tcp_nodelay)
|
self.tcp_nodelay)
|
||||||
|
@ -95,10 +110,12 @@ class HTTPSConnection(HTTPConnection):
|
||||||
def __init__(self, host, port=None, key_file=None, cert_file=None,
|
def __init__(self, host, port=None, key_file=None, cert_file=None,
|
||||||
strict=None, timeout=socket._GLOBAL_DEFAULT_TIMEOUT,
|
strict=None, timeout=socket._GLOBAL_DEFAULT_TIMEOUT,
|
||||||
source_address=None):
|
source_address=None):
|
||||||
try:
|
|
||||||
HTTPConnection.__init__(self, host, port, strict, timeout, source_address)
|
HTTPConnection.__init__(self, host, port,
|
||||||
except TypeError: # Python 2.6
|
strict=strict,
|
||||||
HTTPConnection.__init__(self, host, port, strict, timeout)
|
timeout=timeout,
|
||||||
|
source_address=source_address)
|
||||||
|
|
||||||
self.key_file = key_file
|
self.key_file = key_file
|
||||||
self.cert_file = cert_file
|
self.cert_file = cert_file
|
||||||
|
|
||||||
|
|
|
@ -19,6 +19,7 @@ except ImportError:
|
||||||
|
|
||||||
from .exceptions import (
|
from .exceptions import (
|
||||||
ClosedPoolError,
|
ClosedPoolError,
|
||||||
|
ConnectionError,
|
||||||
ConnectTimeoutError,
|
ConnectTimeoutError,
|
||||||
EmptyPoolError,
|
EmptyPoolError,
|
||||||
HostChangedError,
|
HostChangedError,
|
||||||
|
@ -170,13 +171,9 @@ class HTTPConnectionPool(ConnectionPool, RequestMethods):
|
||||||
log.info("Starting new HTTP connection (%d): %s" %
|
log.info("Starting new HTTP connection (%d): %s" %
|
||||||
(self.num_connections, self.host))
|
(self.num_connections, self.host))
|
||||||
|
|
||||||
extra_params = {}
|
|
||||||
if not six.PY3: # Python 2
|
|
||||||
extra_params['strict'] = self.strict
|
|
||||||
|
|
||||||
conn = self.ConnectionCls(host=self.host, port=self.port,
|
conn = self.ConnectionCls(host=self.host, port=self.port,
|
||||||
timeout=self.timeout.connect_timeout,
|
timeout=self.timeout.connect_timeout,
|
||||||
**extra_params)
|
strict=self.strict)
|
||||||
if self.proxy is not None:
|
if self.proxy is not None:
|
||||||
# Enable Nagle's algorithm for proxies, to avoid packet
|
# Enable Nagle's algorithm for proxies, to avoid packet
|
||||||
# fragmentation.
|
# fragmentation.
|
||||||
|
@ -238,8 +235,9 @@ class HTTPConnectionPool(ConnectionPool, RequestMethods):
|
||||||
pass
|
pass
|
||||||
except Full:
|
except Full:
|
||||||
# This should never happen if self.block == True
|
# This should never happen if self.block == True
|
||||||
log.warning("HttpConnectionPool is full, discarding connection: %s"
|
log.warning(
|
||||||
% self.host)
|
"Connection pool is full, discarding connection: %s" %
|
||||||
|
self.host)
|
||||||
|
|
||||||
# Connection never got put back into the pool, close it.
|
# Connection never got put back into the pool, close it.
|
||||||
if conn:
|
if conn:
|
||||||
|
@ -414,10 +412,13 @@ class HTTPConnectionPool(ConnectionPool, RequestMethods):
|
||||||
|
|
||||||
:param retries:
|
:param retries:
|
||||||
Number of retries to allow before raising a MaxRetryError exception.
|
Number of retries to allow before raising a MaxRetryError exception.
|
||||||
|
If `False`, then retries are disabled and any exception is raised
|
||||||
|
immediately.
|
||||||
|
|
||||||
:param redirect:
|
:param redirect:
|
||||||
If True, automatically handle redirects (status codes 301, 302,
|
If True, automatically handle redirects (status codes 301, 302,
|
||||||
303, 307, 308). Each redirect counts as a retry.
|
303, 307, 308). Each redirect counts as a retry. Disabling retries
|
||||||
|
will disable redirect, too.
|
||||||
|
|
||||||
:param assert_same_host:
|
:param assert_same_host:
|
||||||
If ``True``, will make sure that the host of the pool requests is
|
If ``True``, will make sure that the host of the pool requests is
|
||||||
|
@ -451,7 +452,7 @@ class HTTPConnectionPool(ConnectionPool, RequestMethods):
|
||||||
if headers is None:
|
if headers is None:
|
||||||
headers = self.headers
|
headers = self.headers
|
||||||
|
|
||||||
if retries < 0:
|
if retries < 0 and retries is not False:
|
||||||
raise MaxRetryError(self, url)
|
raise MaxRetryError(self, url)
|
||||||
|
|
||||||
if release_conn is None:
|
if release_conn is None:
|
||||||
|
@ -470,6 +471,10 @@ class HTTPConnectionPool(ConnectionPool, RequestMethods):
|
||||||
headers = headers.copy()
|
headers = headers.copy()
|
||||||
headers.update(self.proxy_headers)
|
headers.update(self.proxy_headers)
|
||||||
|
|
||||||
|
# Must keep the exception bound to a separate variable or else Python 3
|
||||||
|
# complains about UnboundLocalError.
|
||||||
|
err = None
|
||||||
|
|
||||||
try:
|
try:
|
||||||
# Request a connection from the queue
|
# Request a connection from the queue
|
||||||
conn = self._get_conn(timeout=pool_timeout)
|
conn = self._get_conn(timeout=pool_timeout)
|
||||||
|
@ -497,38 +502,41 @@ class HTTPConnectionPool(ConnectionPool, RequestMethods):
|
||||||
# ``response.read()``)
|
# ``response.read()``)
|
||||||
|
|
||||||
except Empty:
|
except Empty:
|
||||||
# Timed out by queue
|
# Timed out by queue.
|
||||||
raise EmptyPoolError(self, "No pool connections are available.")
|
raise EmptyPoolError(self, "No pool connections are available.")
|
||||||
|
|
||||||
except BaseSSLError as e:
|
except (BaseSSLError, CertificateError) as e:
|
||||||
|
# Release connection unconditionally because there is no way to
|
||||||
|
# close it externally in case of exception.
|
||||||
|
release_conn = True
|
||||||
raise SSLError(e)
|
raise SSLError(e)
|
||||||
|
|
||||||
except CertificateError as e:
|
except (TimeoutError, HTTPException, SocketError) as e:
|
||||||
# Name mismatch
|
if conn:
|
||||||
raise SSLError(e)
|
# Discard the connection for these exceptions. It will be
|
||||||
|
# be replaced during the next _get_conn() call.
|
||||||
except TimeoutError as e:
|
conn.close()
|
||||||
# Connection broken, discard.
|
|
||||||
conn = None
|
conn = None
|
||||||
# Save the error off for retry logic.
|
|
||||||
err = e
|
|
||||||
|
|
||||||
if retries == 0:
|
if not retries:
|
||||||
|
if isinstance(e, TimeoutError):
|
||||||
|
# TimeoutError is exempt from MaxRetryError-wrapping.
|
||||||
|
# FIXME: ... Not sure why. Add a reason here.
|
||||||
raise
|
raise
|
||||||
|
|
||||||
except (HTTPException, SocketError) as e:
|
# Wrap unexpected exceptions with the most appropriate
|
||||||
# Connection broken, discard. It will be replaced next _get_conn().
|
# module-level exception and re-raise.
|
||||||
conn = None
|
if isinstance(e, SocketError) and self.proxy:
|
||||||
# This is necessary so we can access e below
|
raise ProxyError('Cannot connect to proxy.', e)
|
||||||
err = e
|
|
||||||
|
if retries is False:
|
||||||
|
raise ConnectionError('Connection failed.', e)
|
||||||
|
|
||||||
if retries == 0:
|
|
||||||
if isinstance(e, SocketError) and self.proxy is not None:
|
|
||||||
raise ProxyError('Cannot connect to proxy. '
|
|
||||||
'Socket error: %s.' % e)
|
|
||||||
else:
|
|
||||||
raise MaxRetryError(self, url, e)
|
raise MaxRetryError(self, url, e)
|
||||||
|
|
||||||
|
# Keep track of the error for the retry warning.
|
||||||
|
err = e
|
||||||
|
|
||||||
finally:
|
finally:
|
||||||
if release_conn:
|
if release_conn:
|
||||||
# Put the connection back to be reused. If the connection is
|
# Put the connection back to be reused. If the connection is
|
||||||
|
@ -538,7 +546,7 @@ class HTTPConnectionPool(ConnectionPool, RequestMethods):
|
||||||
|
|
||||||
if not conn:
|
if not conn:
|
||||||
# Try again
|
# Try again
|
||||||
log.warn("Retrying (%d attempts remain) after connection "
|
log.warning("Retrying (%d attempts remain) after connection "
|
||||||
"broken by '%r': %s" % (retries, err, url))
|
"broken by '%r': %s" % (retries, err, url))
|
||||||
return self.urlopen(method, url, body, headers, retries - 1,
|
return self.urlopen(method, url, body, headers, retries - 1,
|
||||||
redirect, assert_same_host,
|
redirect, assert_same_host,
|
||||||
|
@ -547,7 +555,7 @@ class HTTPConnectionPool(ConnectionPool, RequestMethods):
|
||||||
|
|
||||||
# Handle redirect?
|
# Handle redirect?
|
||||||
redirect_location = redirect and response.get_redirect_location()
|
redirect_location = redirect and response.get_redirect_location()
|
||||||
if redirect_location:
|
if redirect_location and retries is not False:
|
||||||
if response.status == 303:
|
if response.status == 303:
|
||||||
method = 'GET'
|
method = 'GET'
|
||||||
log.info("Redirecting %s -> %s" % (url, redirect_location))
|
log.info("Redirecting %s -> %s" % (url, redirect_location))
|
||||||
|
|
|
@ -29,9 +29,8 @@ Module Variables
|
||||||
----------------
|
----------------
|
||||||
|
|
||||||
:var DEFAULT_SSL_CIPHER_LIST: The list of supported SSL/TLS cipher suites.
|
:var DEFAULT_SSL_CIPHER_LIST: The list of supported SSL/TLS cipher suites.
|
||||||
Default: ``EECDH+ECDSA+AESGCM EECDH+aRSA+AESGCM EECDH+ECDSA+SHA256
|
Default: ``ECDH+AESGCM:DH+AESGCM:ECDH+AES256:DH+AES256:ECDH+AES128:DH+AES:
|
||||||
EECDH+aRSA+SHA256 EECDH+aRSA+RC4 EDH+aRSA EECDH RC4 !aNULL !eNULL !LOW !3DES
|
ECDH+3DES:DH+3DES:RSA+AESGCM:RSA+AES:RSA+3DES:!aNULL:!MD5:!DSS``
|
||||||
!MD5 !EXP !PSK !SRP !DSS'``
|
|
||||||
|
|
||||||
.. _sni: https://en.wikipedia.org/wiki/Server_Name_Indication
|
.. _sni: https://en.wikipedia.org/wiki/Server_Name_Indication
|
||||||
.. _crime attack: https://en.wikipedia.org/wiki/CRIME_(security_exploit)
|
.. _crime attack: https://en.wikipedia.org/wiki/CRIME_(security_exploit)
|
||||||
|
@ -43,7 +42,7 @@ from ndg.httpsclient.subj_alt_name import SubjectAltName as BaseSubjectAltName
|
||||||
import OpenSSL.SSL
|
import OpenSSL.SSL
|
||||||
from pyasn1.codec.der import decoder as der_decoder
|
from pyasn1.codec.der import decoder as der_decoder
|
||||||
from pyasn1.type import univ, constraint
|
from pyasn1.type import univ, constraint
|
||||||
from socket import _fileobject
|
from socket import _fileobject, timeout
|
||||||
import ssl
|
import ssl
|
||||||
import select
|
import select
|
||||||
from cStringIO import StringIO
|
from cStringIO import StringIO
|
||||||
|
@ -69,12 +68,22 @@ _openssl_verify = {
|
||||||
+ OpenSSL.SSL.VERIFY_FAIL_IF_NO_PEER_CERT,
|
+ OpenSSL.SSL.VERIFY_FAIL_IF_NO_PEER_CERT,
|
||||||
}
|
}
|
||||||
|
|
||||||
# Default SSL/TLS cipher list.
|
# A secure default.
|
||||||
# Recommendation by https://community.qualys.com/blogs/securitylabs/2013/08/05/
|
# Sources for more information on TLS ciphers:
|
||||||
# configuring-apache-nginx-and-openssl-for-forward-secrecy
|
#
|
||||||
DEFAULT_SSL_CIPHER_LIST = 'EECDH+ECDSA+AESGCM EECDH+aRSA+AESGCM ' + \
|
# - https://wiki.mozilla.org/Security/Server_Side_TLS
|
||||||
'EECDH+ECDSA+SHA256 EECDH+aRSA+SHA256 EECDH+aRSA+RC4 EDH+aRSA ' + \
|
# - https://www.ssllabs.com/projects/best-practices/index.html
|
||||||
'EECDH RC4 !aNULL !eNULL !LOW !3DES !MD5 !EXP !PSK !SRP !DSS'
|
# - https://hynek.me/articles/hardening-your-web-servers-ssl-ciphers/
|
||||||
|
#
|
||||||
|
# The general intent is:
|
||||||
|
# - Prefer cipher suites that offer perfect forward secrecy (DHE/ECDHE),
|
||||||
|
# - prefer ECDHE over DHE for better performance,
|
||||||
|
# - prefer any AES-GCM over any AES-CBC for better performance and security,
|
||||||
|
# - use 3DES as fallback which is secure but slow,
|
||||||
|
# - disable NULL authentication, MD5 MACs and DSS for security reasons.
|
||||||
|
DEFAULT_SSL_CIPHER_LIST = "ECDH+AESGCM:DH+AESGCM:ECDH+AES256:DH+AES256:" + \
|
||||||
|
"ECDH+AES128:DH+AES:ECDH+3DES:DH+3DES:RSA+AESGCM:RSA+AES:RSA+3DES:" + \
|
||||||
|
"!aNULL:!MD5:!DSS"
|
||||||
|
|
||||||
|
|
||||||
orig_util_HAS_SNI = util.HAS_SNI
|
orig_util_HAS_SNI = util.HAS_SNI
|
||||||
|
@ -139,6 +148,13 @@ def get_subj_alt_name(peer_cert):
|
||||||
|
|
||||||
class fileobject(_fileobject):
|
class fileobject(_fileobject):
|
||||||
|
|
||||||
|
def _wait_for_sock(self):
|
||||||
|
rd, wd, ed = select.select([self._sock], [], [],
|
||||||
|
self._sock.gettimeout())
|
||||||
|
if not rd:
|
||||||
|
raise timeout()
|
||||||
|
|
||||||
|
|
||||||
def read(self, size=-1):
|
def read(self, size=-1):
|
||||||
# Use max, disallow tiny reads in a loop as they are very inefficient.
|
# Use max, disallow tiny reads in a loop as they are very inefficient.
|
||||||
# We never leave read() with any leftover data from a new recv() call
|
# We never leave read() with any leftover data from a new recv() call
|
||||||
|
@ -156,6 +172,7 @@ class fileobject(_fileobject):
|
||||||
try:
|
try:
|
||||||
data = self._sock.recv(rbufsize)
|
data = self._sock.recv(rbufsize)
|
||||||
except OpenSSL.SSL.WantReadError:
|
except OpenSSL.SSL.WantReadError:
|
||||||
|
self._wait_for_sock()
|
||||||
continue
|
continue
|
||||||
if not data:
|
if not data:
|
||||||
break
|
break
|
||||||
|
@ -183,6 +200,7 @@ class fileobject(_fileobject):
|
||||||
try:
|
try:
|
||||||
data = self._sock.recv(left)
|
data = self._sock.recv(left)
|
||||||
except OpenSSL.SSL.WantReadError:
|
except OpenSSL.SSL.WantReadError:
|
||||||
|
self._wait_for_sock()
|
||||||
continue
|
continue
|
||||||
if not data:
|
if not data:
|
||||||
break
|
break
|
||||||
|
@ -234,6 +252,7 @@ class fileobject(_fileobject):
|
||||||
break
|
break
|
||||||
buffers.append(data)
|
buffers.append(data)
|
||||||
except OpenSSL.SSL.WantReadError:
|
except OpenSSL.SSL.WantReadError:
|
||||||
|
self._wait_for_sock()
|
||||||
continue
|
continue
|
||||||
break
|
break
|
||||||
return "".join(buffers)
|
return "".join(buffers)
|
||||||
|
@ -244,6 +263,7 @@ class fileobject(_fileobject):
|
||||||
try:
|
try:
|
||||||
data = self._sock.recv(self._rbufsize)
|
data = self._sock.recv(self._rbufsize)
|
||||||
except OpenSSL.SSL.WantReadError:
|
except OpenSSL.SSL.WantReadError:
|
||||||
|
self._wait_for_sock()
|
||||||
continue
|
continue
|
||||||
if not data:
|
if not data:
|
||||||
break
|
break
|
||||||
|
@ -271,6 +291,7 @@ class fileobject(_fileobject):
|
||||||
try:
|
try:
|
||||||
data = self._sock.recv(self._rbufsize)
|
data = self._sock.recv(self._rbufsize)
|
||||||
except OpenSSL.SSL.WantReadError:
|
except OpenSSL.SSL.WantReadError:
|
||||||
|
self._wait_for_sock()
|
||||||
continue
|
continue
|
||||||
if not data:
|
if not data:
|
||||||
break
|
break
|
||||||
|
@ -366,6 +387,8 @@ def ssl_wrap_socket(sock, keyfile=None, certfile=None, cert_reqs=None,
|
||||||
ctx.load_verify_locations(ca_certs, None)
|
ctx.load_verify_locations(ca_certs, None)
|
||||||
except OpenSSL.SSL.Error as e:
|
except OpenSSL.SSL.Error as e:
|
||||||
raise ssl.SSLError('bad ca_certs: %r' % ca_certs, e)
|
raise ssl.SSLError('bad ca_certs: %r' % ca_certs, e)
|
||||||
|
else:
|
||||||
|
ctx.set_default_verify_paths()
|
||||||
|
|
||||||
# Disable TLS compression to migitate CRIME attack (issue #309)
|
# Disable TLS compression to migitate CRIME attack (issue #309)
|
||||||
OP_NO_COMPRESSION = 0x20000
|
OP_NO_COMPRESSION = 0x20000
|
||||||
|
|
|
@ -44,6 +44,11 @@ class ProxyError(HTTPError):
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
class ConnectionError(HTTPError):
|
||||||
|
"Raised when a normal connection fails."
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
class DecodeError(HTTPError):
|
class DecodeError(HTTPError):
|
||||||
"Raised when automatic decoding based on Content-Type fails."
|
"Raised when automatic decoding based on Content-Type fails."
|
||||||
pass
|
pass
|
||||||
|
|
|
@ -7,7 +7,7 @@ except ImportError:
|
||||||
from backports.ssl_match_hostname import CertificateError, match_hostname
|
from backports.ssl_match_hostname import CertificateError, match_hostname
|
||||||
except ImportError:
|
except ImportError:
|
||||||
# Our vendored copy
|
# Our vendored copy
|
||||||
from _implementation import CertificateError, match_hostname
|
from ._implementation import CertificateError, match_hostname
|
||||||
|
|
||||||
# Not needed, but documenting what we provide.
|
# Not needed, but documenting what we provide.
|
||||||
__all__ = ('CertificateError', 'match_hostname')
|
__all__ = ('CertificateError', 'match_hostname')
|
||||||
|
|
|
@ -9,6 +9,7 @@ import logging
|
||||||
import zlib
|
import zlib
|
||||||
import io
|
import io
|
||||||
|
|
||||||
|
from ._collections import HTTPHeaderDict
|
||||||
from .exceptions import DecodeError
|
from .exceptions import DecodeError
|
||||||
from .packages.six import string_types as basestring, binary_type
|
from .packages.six import string_types as basestring, binary_type
|
||||||
from .util import is_fp_closed
|
from .util import is_fp_closed
|
||||||
|
@ -79,7 +80,10 @@ class HTTPResponse(io.IOBase):
|
||||||
def __init__(self, body='', headers=None, status=0, version=0, reason=None,
|
def __init__(self, body='', headers=None, status=0, version=0, reason=None,
|
||||||
strict=0, preload_content=True, decode_content=True,
|
strict=0, preload_content=True, decode_content=True,
|
||||||
original_response=None, pool=None, connection=None):
|
original_response=None, pool=None, connection=None):
|
||||||
self.headers = headers or {}
|
|
||||||
|
self.headers = HTTPHeaderDict()
|
||||||
|
if headers:
|
||||||
|
self.headers.update(headers)
|
||||||
self.status = status
|
self.status = status
|
||||||
self.version = version
|
self.version = version
|
||||||
self.reason = reason
|
self.reason = reason
|
||||||
|
@ -249,17 +253,9 @@ class HTTPResponse(io.IOBase):
|
||||||
with ``original_response=r``.
|
with ``original_response=r``.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
# Normalize headers between different versions of Python
|
headers = HTTPHeaderDict()
|
||||||
headers = {}
|
|
||||||
for k, v in r.getheaders():
|
for k, v in r.getheaders():
|
||||||
# Python 3: Header keys are returned capitalised
|
headers.add(k, v)
|
||||||
k = k.lower()
|
|
||||||
|
|
||||||
has_value = headers.get(k)
|
|
||||||
if has_value: # Python 3: Repeating header keys are unmerged.
|
|
||||||
v = ', '.join([has_value, v])
|
|
||||||
|
|
||||||
headers[k] = v
|
|
||||||
|
|
||||||
# HTTPResponse objects in Python 3 don't have a .strict attribute
|
# HTTPResponse objects in Python 3 don't have a .strict attribute
|
||||||
strict = getattr(r, 'strict', 0)
|
strict = getattr(r, 'strict', 0)
|
||||||
|
|
|
@ -12,6 +12,7 @@ import os
|
||||||
from collections import Mapping
|
from collections import Mapping
|
||||||
from datetime import datetime
|
from datetime import datetime
|
||||||
|
|
||||||
|
from .auth import _basic_auth_str
|
||||||
from .compat import cookielib, OrderedDict, urljoin, urlparse, builtin_str
|
from .compat import cookielib, OrderedDict, urljoin, urlparse, builtin_str
|
||||||
from .cookies import (
|
from .cookies import (
|
||||||
cookiejar_from_dict, extract_cookies_to_jar, RequestsCookieJar, merge_cookies)
|
cookiejar_from_dict, extract_cookies_to_jar, RequestsCookieJar, merge_cookies)
|
||||||
|
@ -23,7 +24,10 @@ from .structures import CaseInsensitiveDict
|
||||||
|
|
||||||
from .adapters import HTTPAdapter
|
from .adapters import HTTPAdapter
|
||||||
|
|
||||||
from .utils import requote_uri, get_environ_proxies, get_netrc_auth
|
from .utils import (
|
||||||
|
requote_uri, get_environ_proxies, get_netrc_auth, should_bypass_proxies,
|
||||||
|
get_auth_from_url
|
||||||
|
)
|
||||||
|
|
||||||
from .status_codes import codes
|
from .status_codes import codes
|
||||||
|
|
||||||
|
@ -154,22 +158,15 @@ class SessionRedirectMixin(object):
|
||||||
prepared_request._cookies.update(self.cookies)
|
prepared_request._cookies.update(self.cookies)
|
||||||
prepared_request.prepare_cookies(prepared_request._cookies)
|
prepared_request.prepare_cookies(prepared_request._cookies)
|
||||||
|
|
||||||
if 'Authorization' in headers:
|
# Rebuild auth and proxy information.
|
||||||
# If we get redirected to a new host, we should strip out any
|
proxies = self.rebuild_proxies(prepared_request, proxies)
|
||||||
# authentication headers.
|
self.rebuild_auth(prepared_request, resp)
|
||||||
original_parsed = urlparse(resp.request.url)
|
|
||||||
redirect_parsed = urlparse(url)
|
|
||||||
|
|
||||||
if (original_parsed.hostname != redirect_parsed.hostname):
|
# Override the original request.
|
||||||
del headers['Authorization']
|
req = prepared_request
|
||||||
|
|
||||||
# .netrc might have more auth for us.
|
|
||||||
new_auth = get_netrc_auth(url) if self.trust_env else None
|
|
||||||
if new_auth is not None:
|
|
||||||
prepared_request.prepare_auth(new_auth)
|
|
||||||
|
|
||||||
resp = self.send(
|
resp = self.send(
|
||||||
prepared_request,
|
req,
|
||||||
stream=stream,
|
stream=stream,
|
||||||
timeout=timeout,
|
timeout=timeout,
|
||||||
verify=verify,
|
verify=verify,
|
||||||
|
@ -183,6 +180,68 @@ class SessionRedirectMixin(object):
|
||||||
i += 1
|
i += 1
|
||||||
yield resp
|
yield resp
|
||||||
|
|
||||||
|
def rebuild_auth(self, prepared_request, response):
|
||||||
|
"""
|
||||||
|
When being redirected we may want to strip authentication from the
|
||||||
|
request to avoid leaking credentials. This method intelligently removes
|
||||||
|
and reapplies authentication where possible to avoid credential loss.
|
||||||
|
"""
|
||||||
|
headers = prepared_request.headers
|
||||||
|
url = prepared_request.url
|
||||||
|
|
||||||
|
if 'Authorization' in headers:
|
||||||
|
# If we get redirected to a new host, we should strip out any
|
||||||
|
# authentication headers.
|
||||||
|
original_parsed = urlparse(response.request.url)
|
||||||
|
redirect_parsed = urlparse(url)
|
||||||
|
|
||||||
|
if (original_parsed.hostname != redirect_parsed.hostname):
|
||||||
|
del headers['Authorization']
|
||||||
|
|
||||||
|
# .netrc might have more auth for us on our new host.
|
||||||
|
new_auth = get_netrc_auth(url) if self.trust_env else None
|
||||||
|
if new_auth is not None:
|
||||||
|
prepared_request.prepare_auth(new_auth)
|
||||||
|
|
||||||
|
return
|
||||||
|
|
||||||
|
def rebuild_proxies(self, prepared_request, proxies):
|
||||||
|
"""
|
||||||
|
This method re-evaluates the proxy configuration by considering the
|
||||||
|
environment variables. If we are redirected to a URL covered by
|
||||||
|
NO_PROXY, we strip the proxy configuration. Otherwise, we set missing
|
||||||
|
proxy keys for this URL (in case they were stripped by a previous
|
||||||
|
redirect).
|
||||||
|
|
||||||
|
This method also replaces the Proxy-Authorization header where
|
||||||
|
necessary.
|
||||||
|
"""
|
||||||
|
headers = prepared_request.headers
|
||||||
|
url = prepared_request.url
|
||||||
|
new_proxies = {}
|
||||||
|
|
||||||
|
if not should_bypass_proxies(url):
|
||||||
|
environ_proxies = get_environ_proxies(url)
|
||||||
|
scheme = urlparse(url).scheme
|
||||||
|
|
||||||
|
proxy = environ_proxies.get(scheme)
|
||||||
|
|
||||||
|
if proxy:
|
||||||
|
new_proxies.setdefault(scheme, environ_proxies[scheme])
|
||||||
|
|
||||||
|
if 'Proxy-Authorization' in headers:
|
||||||
|
del headers['Proxy-Authorization']
|
||||||
|
|
||||||
|
try:
|
||||||
|
username, password = get_auth_from_url(new_proxies[scheme])
|
||||||
|
except KeyError:
|
||||||
|
username, password = None, None
|
||||||
|
|
||||||
|
if username and password:
|
||||||
|
headers['Proxy-Authorization'] = _basic_auth_str(username, password)
|
||||||
|
|
||||||
|
return new_proxies
|
||||||
|
|
||||||
|
|
||||||
class Session(SessionRedirectMixin):
|
class Session(SessionRedirectMixin):
|
||||||
"""A Requests session.
|
"""A Requests session.
|
||||||
|
@ -527,7 +586,7 @@ class Session(SessionRedirectMixin):
|
||||||
history.insert(0, r)
|
history.insert(0, r)
|
||||||
# Get the last request made
|
# Get the last request made
|
||||||
r = history.pop()
|
r = history.pop()
|
||||||
r.history = tuple(history)
|
r.history = history
|
||||||
|
|
||||||
return r
|
return r
|
||||||
|
|
||||||
|
|
|
@ -61,7 +61,7 @@ def super_len(o):
|
||||||
return os.fstat(fileno).st_size
|
return os.fstat(fileno).st_size
|
||||||
|
|
||||||
if hasattr(o, 'getvalue'):
|
if hasattr(o, 'getvalue'):
|
||||||
# e.g. BytesIO, cStringIO.StringI
|
# e.g. BytesIO, cStringIO.StringIO
|
||||||
return len(o.getvalue())
|
return len(o.getvalue())
|
||||||
|
|
||||||
|
|
||||||
|
@ -466,9 +466,10 @@ def is_valid_cidr(string_network):
|
||||||
return True
|
return True
|
||||||
|
|
||||||
|
|
||||||
def get_environ_proxies(url):
|
def should_bypass_proxies(url):
|
||||||
"""Return a dict of environment proxies."""
|
"""
|
||||||
|
Returns whether we should bypass proxies or not.
|
||||||
|
"""
|
||||||
get_proxy = lambda k: os.environ.get(k) or os.environ.get(k.upper())
|
get_proxy = lambda k: os.environ.get(k) or os.environ.get(k.upper())
|
||||||
|
|
||||||
# First check whether no_proxy is defined. If it is, check that the URL
|
# First check whether no_proxy is defined. If it is, check that the URL
|
||||||
|
@ -486,13 +487,13 @@ def get_environ_proxies(url):
|
||||||
for proxy_ip in no_proxy:
|
for proxy_ip in no_proxy:
|
||||||
if is_valid_cidr(proxy_ip):
|
if is_valid_cidr(proxy_ip):
|
||||||
if address_in_network(ip, proxy_ip):
|
if address_in_network(ip, proxy_ip):
|
||||||
return {}
|
return True
|
||||||
else:
|
else:
|
||||||
for host in no_proxy:
|
for host in no_proxy:
|
||||||
if netloc.endswith(host) or netloc.split(':')[0].endswith(host):
|
if netloc.endswith(host) or netloc.split(':')[0].endswith(host):
|
||||||
# The URL does match something in no_proxy, so we don't want
|
# The URL does match something in no_proxy, so we don't want
|
||||||
# to apply the proxies on this URL.
|
# to apply the proxies on this URL.
|
||||||
return {}
|
return True
|
||||||
|
|
||||||
# If the system proxy settings indicate that this URL should be bypassed,
|
# If the system proxy settings indicate that this URL should be bypassed,
|
||||||
# don't proxy.
|
# don't proxy.
|
||||||
|
@ -506,11 +507,15 @@ def get_environ_proxies(url):
|
||||||
bypass = False
|
bypass = False
|
||||||
|
|
||||||
if bypass:
|
if bypass:
|
||||||
return {}
|
return True
|
||||||
|
|
||||||
# If we get here, we either didn't have no_proxy set or we're not going
|
return False
|
||||||
# anywhere that no_proxy applies to, and the system settings don't require
|
|
||||||
# bypassing the proxy for the current URL.
|
def get_environ_proxies(url):
|
||||||
|
"""Return a dict of environment proxies."""
|
||||||
|
if should_bypass_proxies(url):
|
||||||
|
return {}
|
||||||
|
else:
|
||||||
return getproxies()
|
return getproxies()
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -58,8 +58,7 @@ class autoProcessComics:
|
||||||
else:
|
else:
|
||||||
protocol = "http://"
|
protocol = "http://"
|
||||||
|
|
||||||
url = protocol + host + ":" + port + web_root + "/post_process"
|
url = "%s%s:%s%s/post_process" % (protocol, host, port, web_root)
|
||||||
|
|
||||||
logger.debug("Opening URL: %s", url)
|
logger.debug("Opening URL: %s", url)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
|
|
|
@ -43,7 +43,7 @@ class autoProcessGames:
|
||||||
|
|
||||||
nzbName, dirName = convert_to_ascii(nzbName, dirName)
|
nzbName, dirName = convert_to_ascii(nzbName, dirName)
|
||||||
|
|
||||||
baseURL = protocol + host + ":" + port + web_root + "/api"
|
url = "%s%s:%s%s/api" % (protocol, host, port, web_root)
|
||||||
|
|
||||||
fields = nzbName.split("-")
|
fields = nzbName.split("-")
|
||||||
|
|
||||||
|
@ -59,8 +59,6 @@ class autoProcessGames:
|
||||||
params['db_id'] = gamezID
|
params['db_id'] = gamezID
|
||||||
params['status'] = downloadStatus
|
params['status'] = downloadStatus
|
||||||
|
|
||||||
url = baseURL
|
|
||||||
|
|
||||||
logger.debug("Opening URL: %s", url)
|
logger.debug("Opening URL: %s", url)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
|
|
|
@ -147,7 +147,7 @@ class autoProcessMovie:
|
||||||
else:
|
else:
|
||||||
protocol = "http://"
|
protocol = "http://"
|
||||||
|
|
||||||
baseURL = protocol + host + ":" + port + web_root + "/api/" + apikey
|
baseURL = "%s%s:%s%s/api/%s" % (protocol, host, port, web_root, apikey)
|
||||||
|
|
||||||
releases = self.get_releases(baseURL, download_id, dirName, nzbName)
|
releases = self.get_releases(baseURL, download_id, dirName, nzbName)
|
||||||
|
|
||||||
|
|
|
@ -74,7 +74,7 @@ class autoProcessMusic:
|
||||||
|
|
||||||
nzbName, dirName = convert_to_ascii(nzbName, dirName)
|
nzbName, dirName = convert_to_ascii(nzbName, dirName)
|
||||||
|
|
||||||
baseURL = protocol + host + ":" + port + web_root + "/api"
|
url = "%s%s:%s%s/api" % (protocol,host,port,web_root)
|
||||||
|
|
||||||
if status == 0:
|
if status == 0:
|
||||||
|
|
||||||
|
@ -87,7 +87,6 @@ class autoProcessMusic:
|
||||||
dirName_new = os.path.join(remote_path, os.path.basename(os.path.dirname(dirName))).replace("\\", "/")
|
dirName_new = os.path.join(remote_path, os.path.basename(os.path.dirname(dirName))).replace("\\", "/")
|
||||||
params['dir'] = dirName_new
|
params['dir'] = dirName_new
|
||||||
|
|
||||||
url = baseURL
|
|
||||||
|
|
||||||
release_status = self.get_status(url, apikey, dirName)
|
release_status = self.get_status(url, apikey, dirName)
|
||||||
|
|
||||||
|
|
|
@ -166,9 +166,9 @@ class autoProcessTV:
|
||||||
|
|
||||||
url = None
|
url = None
|
||||||
if section == "SickBeard":
|
if section == "SickBeard":
|
||||||
url = protocol + host + ":" + port + web_root + "/home/postprocess/processEpisode"
|
url = "%s%s:%s%s/home/postprocess/processEpisode" % (protocol,host,port,web_root)
|
||||||
elif section == "NzbDrone":
|
elif section == "NzbDrone":
|
||||||
url = protocol + host + ":" + port + web_root + "/api/command"
|
url = "%s%s:%s%s/api/command" % (protocol, host, port, web_root)
|
||||||
|
|
||||||
logger.debug("Opening URL: %s", url)
|
logger.debug("Opening URL: %s", url)
|
||||||
|
|
||||||
|
|
|
@ -6,6 +6,7 @@ import struct
|
||||||
import shutil
|
import shutil
|
||||||
import sys
|
import sys
|
||||||
import time
|
import time
|
||||||
|
from lib import requests
|
||||||
import nzbtomedia
|
import nzbtomedia
|
||||||
|
|
||||||
from nzbtomedia.linktastic import linktastic
|
from nzbtomedia.linktastic import linktastic
|
||||||
|
@ -495,4 +496,18 @@ def find_download(clientAgent, nzbName, download_id):
|
||||||
if clientAgent == 'deluge':
|
if clientAgent == 'deluge':
|
||||||
pass
|
pass
|
||||||
if clientAgent == 'sabnzbd':
|
if clientAgent == 'sabnzbd':
|
||||||
|
baseURL = "http://%s:%s/api" % (nzbtomedia.SABNZBDHOST, nzbtomedia.SABNZBDPORT)
|
||||||
|
url = baseURL
|
||||||
|
params = {}
|
||||||
|
params['apikey'] = nzbtomedia.SABNZBDAPIKEY
|
||||||
|
params['mode'] = "history"
|
||||||
|
params['output'] = 'json'
|
||||||
|
|
||||||
|
try:
|
||||||
|
r = requests.get(url, params=params)
|
||||||
|
except requests.ConnectionError:
|
||||||
|
logger.error("Unable to open URL")
|
||||||
|
return 1 # failure
|
||||||
|
|
||||||
|
result = r.json()
|
||||||
pass
|
pass
|
|
@ -13,10 +13,11 @@ os.environ['TR_TORRENT_HASH']="154206e6390a03bbf01e61f013e1a52494a52dfa"
|
||||||
# Initialize the config
|
# Initialize the config
|
||||||
nzbtomedia.initialize()
|
nzbtomedia.initialize()
|
||||||
|
|
||||||
clientAgent = nzbtomedia.TORRENT_CLIENTAGENT
|
clientAgent = nzbtomedia.NZB_CLIENTAGENT
|
||||||
nzbName = 'Ron.White.A.Little.Unprofessional.2012.DVDRip.XViD-IGUANA[rarbg]'
|
nzbName = 'Anger.Management.S02E57.HDTV.x264-KILLERS'
|
||||||
download_id = '51C9B415382894727C5C7D8442554D3AC08B390F'
|
#download_id = '51C9B415382894727C5C7D8442554D3AC08B390F'
|
||||||
|
|
||||||
|
download_id = 'SABnzbd_nzo_uBYaGb'
|
||||||
if find_download(clientAgent, nzbName, download_id):
|
if find_download(clientAgent, nzbName, download_id):
|
||||||
print 'found'
|
print 'found'
|
||||||
else:
|
else:
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue