mirror of
https://github.com/Tautulli/Tautulli.git
synced 2025-07-06 21:21:15 -07:00
Update requests-2.26.0
This commit is contained in:
parent
b6595232d2
commit
f3f4f9edf6
10 changed files with 109 additions and 30 deletions
|
@ -41,12 +41,20 @@ is at <https://requests.readthedocs.io>.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
import urllib3
|
import urllib3
|
||||||
import chardet
|
|
||||||
import warnings
|
import warnings
|
||||||
from .exceptions import RequestsDependencyWarning
|
from .exceptions import RequestsDependencyWarning
|
||||||
|
|
||||||
|
try:
|
||||||
|
from charset_normalizer import __version__ as charset_normalizer_version
|
||||||
|
except ImportError:
|
||||||
|
charset_normalizer_version = None
|
||||||
|
|
||||||
def check_compatibility(urllib3_version, chardet_version):
|
try:
|
||||||
|
from chardet import __version__ as chardet_version
|
||||||
|
except ImportError:
|
||||||
|
chardet_version = None
|
||||||
|
|
||||||
|
def check_compatibility(urllib3_version, chardet_version, charset_normalizer_version):
|
||||||
urllib3_version = urllib3_version.split('.')
|
urllib3_version = urllib3_version.split('.')
|
||||||
assert urllib3_version != ['dev'] # Verify urllib3 isn't installed from git.
|
assert urllib3_version != ['dev'] # Verify urllib3 isn't installed from git.
|
||||||
|
|
||||||
|
@ -62,12 +70,19 @@ def check_compatibility(urllib3_version, chardet_version):
|
||||||
assert minor >= 21
|
assert minor >= 21
|
||||||
assert minor <= 26
|
assert minor <= 26
|
||||||
|
|
||||||
# Check chardet for compatibility.
|
# Check charset_normalizer for compatibility.
|
||||||
|
if chardet_version:
|
||||||
major, minor, patch = chardet_version.split('.')[:3]
|
major, minor, patch = chardet_version.split('.')[:3]
|
||||||
major, minor, patch = int(major), int(minor), int(patch)
|
major, minor, patch = int(major), int(minor), int(patch)
|
||||||
# chardet >= 3.0.2, < 5.0.0
|
# chardet_version >= 3.0.2, < 5.0.0
|
||||||
assert (3, 0, 2) <= (major, minor, patch) < (5, 0, 0)
|
assert (3, 0, 2) <= (major, minor, patch) < (5, 0, 0)
|
||||||
|
elif charset_normalizer_version:
|
||||||
|
major, minor, patch = charset_normalizer_version.split('.')[:3]
|
||||||
|
major, minor, patch = int(major), int(minor), int(patch)
|
||||||
|
# charset_normalizer >= 2.0.0 < 3.0.0
|
||||||
|
assert (2, 0, 0) <= (major, minor, patch) < (3, 0, 0)
|
||||||
|
else:
|
||||||
|
raise Exception("You need either charset_normalizer or chardet installed")
|
||||||
|
|
||||||
def _check_cryptography(cryptography_version):
|
def _check_cryptography(cryptography_version):
|
||||||
# cryptography < 1.3.4
|
# cryptography < 1.3.4
|
||||||
|
@ -82,10 +97,10 @@ def _check_cryptography(cryptography_version):
|
||||||
|
|
||||||
# Check imported dependencies for compatibility.
|
# Check imported dependencies for compatibility.
|
||||||
try:
|
try:
|
||||||
check_compatibility(urllib3.__version__, chardet.__version__)
|
check_compatibility(urllib3.__version__, chardet_version, charset_normalizer_version)
|
||||||
except (AssertionError, ValueError):
|
except (AssertionError, ValueError):
|
||||||
warnings.warn("urllib3 ({}) or chardet ({}) doesn't match a supported "
|
warnings.warn("urllib3 ({}) or chardet ({})/charset_normalizer ({}) doesn't match a supported "
|
||||||
"version!".format(urllib3.__version__, chardet.__version__),
|
"version!".format(urllib3.__version__, chardet_version, charset_normalizer_version),
|
||||||
RequestsDependencyWarning)
|
RequestsDependencyWarning)
|
||||||
|
|
||||||
# Attempt to enable urllib3's fallback for SNI support
|
# Attempt to enable urllib3's fallback for SNI support
|
||||||
|
|
|
@ -5,8 +5,8 @@
|
||||||
__title__ = 'requests'
|
__title__ = 'requests'
|
||||||
__description__ = 'Python HTTP for Humans.'
|
__description__ = 'Python HTTP for Humans.'
|
||||||
__url__ = 'https://requests.readthedocs.io'
|
__url__ = 'https://requests.readthedocs.io'
|
||||||
__version__ = '2.25.1'
|
__version__ = '2.26.0'
|
||||||
__build__ = 0x022501
|
__build__ = 0x022600
|
||||||
__author__ = 'Kenneth Reitz'
|
__author__ = 'Kenneth Reitz'
|
||||||
__author_email__ = 'me@kennethreitz.org'
|
__author_email__ = 'me@kennethreitz.org'
|
||||||
__license__ = 'Apache 2.0'
|
__license__ = 'Apache 2.0'
|
||||||
|
|
|
@ -72,7 +72,6 @@ def get(url, params=None, **kwargs):
|
||||||
:rtype: requests.Response
|
:rtype: requests.Response
|
||||||
"""
|
"""
|
||||||
|
|
||||||
kwargs.setdefault('allow_redirects', True)
|
|
||||||
return request('get', url, params=params, **kwargs)
|
return request('get', url, params=params, **kwargs)
|
||||||
|
|
||||||
|
|
||||||
|
@ -85,7 +84,6 @@ def options(url, **kwargs):
|
||||||
:rtype: requests.Response
|
:rtype: requests.Response
|
||||||
"""
|
"""
|
||||||
|
|
||||||
kwargs.setdefault('allow_redirects', True)
|
|
||||||
return request('options', url, **kwargs)
|
return request('options', url, **kwargs)
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -8,7 +8,10 @@ This module handles import compatibility issues between Python 2 and
|
||||||
Python 3.
|
Python 3.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
try:
|
||||||
import chardet
|
import chardet
|
||||||
|
except ImportError:
|
||||||
|
import charset_normalizer as chardet
|
||||||
|
|
||||||
import sys
|
import sys
|
||||||
|
|
||||||
|
|
|
@ -25,6 +25,10 @@ class RequestException(IOError):
|
||||||
super(RequestException, self).__init__(*args, **kwargs)
|
super(RequestException, self).__init__(*args, **kwargs)
|
||||||
|
|
||||||
|
|
||||||
|
class InvalidJSONError(RequestException):
|
||||||
|
"""A JSON error occurred."""
|
||||||
|
|
||||||
|
|
||||||
class HTTPError(RequestException):
|
class HTTPError(RequestException):
|
||||||
"""An HTTP error occurred."""
|
"""An HTTP error occurred."""
|
||||||
|
|
||||||
|
|
|
@ -8,10 +8,19 @@ import ssl
|
||||||
|
|
||||||
import idna
|
import idna
|
||||||
import urllib3
|
import urllib3
|
||||||
import chardet
|
|
||||||
|
|
||||||
from . import __version__ as requests_version
|
from . import __version__ as requests_version
|
||||||
|
|
||||||
|
try:
|
||||||
|
import charset_normalizer
|
||||||
|
except ImportError:
|
||||||
|
charset_normalizer = None
|
||||||
|
|
||||||
|
try:
|
||||||
|
import chardet
|
||||||
|
except ImportError:
|
||||||
|
chardet = None
|
||||||
|
|
||||||
try:
|
try:
|
||||||
from urllib3.contrib import pyopenssl
|
from urllib3.contrib import pyopenssl
|
||||||
except ImportError:
|
except ImportError:
|
||||||
|
@ -71,6 +80,11 @@ def info():
|
||||||
|
|
||||||
implementation_info = _implementation()
|
implementation_info = _implementation()
|
||||||
urllib3_info = {'version': urllib3.__version__}
|
urllib3_info = {'version': urllib3.__version__}
|
||||||
|
charset_normalizer_info = {'version': None}
|
||||||
|
chardet_info = {'version': None}
|
||||||
|
if charset_normalizer:
|
||||||
|
charset_normalizer_info = {'version': charset_normalizer.__version__}
|
||||||
|
if chardet:
|
||||||
chardet_info = {'version': chardet.__version__}
|
chardet_info = {'version': chardet.__version__}
|
||||||
|
|
||||||
pyopenssl_info = {
|
pyopenssl_info = {
|
||||||
|
@ -99,9 +113,11 @@ def info():
|
||||||
'implementation': implementation_info,
|
'implementation': implementation_info,
|
||||||
'system_ssl': system_ssl_info,
|
'system_ssl': system_ssl_info,
|
||||||
'using_pyopenssl': pyopenssl is not None,
|
'using_pyopenssl': pyopenssl is not None,
|
||||||
|
'using_charset_normalizer': chardet is None,
|
||||||
'pyOpenSSL': pyopenssl_info,
|
'pyOpenSSL': pyopenssl_info,
|
||||||
'urllib3': urllib3_info,
|
'urllib3': urllib3_info,
|
||||||
'chardet': chardet_info,
|
'chardet': chardet_info,
|
||||||
|
'charset_normalizer': charset_normalizer_info,
|
||||||
'cryptography': cryptography_info,
|
'cryptography': cryptography_info,
|
||||||
'idna': idna_info,
|
'idna': idna_info,
|
||||||
'requests': {
|
'requests': {
|
||||||
|
|
|
@ -29,7 +29,7 @@ from .auth import HTTPBasicAuth
|
||||||
from .cookies import cookiejar_from_dict, get_cookie_header, _copy_cookie_jar
|
from .cookies import cookiejar_from_dict, get_cookie_header, _copy_cookie_jar
|
||||||
from .exceptions import (
|
from .exceptions import (
|
||||||
HTTPError, MissingSchema, InvalidURL, ChunkedEncodingError,
|
HTTPError, MissingSchema, InvalidURL, ChunkedEncodingError,
|
||||||
ContentDecodingError, ConnectionError, StreamConsumedError)
|
ContentDecodingError, ConnectionError, StreamConsumedError, InvalidJSONError)
|
||||||
from ._internal_utils import to_native_string, unicode_is_ascii
|
from ._internal_utils import to_native_string, unicode_is_ascii
|
||||||
from .utils import (
|
from .utils import (
|
||||||
guess_filename, get_auth_from_url, requote_uri,
|
guess_filename, get_auth_from_url, requote_uri,
|
||||||
|
@ -466,7 +466,12 @@ class PreparedRequest(RequestEncodingMixin, RequestHooksMixin):
|
||||||
# urllib3 requires a bytes-like body. Python 2's json.dumps
|
# urllib3 requires a bytes-like body. Python 2's json.dumps
|
||||||
# provides this natively, but Python 3 gives a Unicode string.
|
# provides this natively, but Python 3 gives a Unicode string.
|
||||||
content_type = 'application/json'
|
content_type = 'application/json'
|
||||||
body = complexjson.dumps(json)
|
|
||||||
|
try:
|
||||||
|
body = complexjson.dumps(json, allow_nan=False)
|
||||||
|
except ValueError as ve:
|
||||||
|
raise InvalidJSONError(ve, request=self)
|
||||||
|
|
||||||
if not isinstance(body, bytes):
|
if not isinstance(body, bytes):
|
||||||
body = body.encode('utf-8')
|
body = body.encode('utf-8')
|
||||||
|
|
||||||
|
@ -726,7 +731,7 @@ class Response(object):
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def apparent_encoding(self):
|
def apparent_encoding(self):
|
||||||
"""The apparent encoding, provided by the chardet library."""
|
"""The apparent encoding, provided by the charset_normalizer or chardet libraries."""
|
||||||
return chardet.detect(self.content)['encoding']
|
return chardet.detect(self.content)['encoding']
|
||||||
|
|
||||||
def iter_content(self, chunk_size=1, decode_unicode=False):
|
def iter_content(self, chunk_size=1, decode_unicode=False):
|
||||||
|
@ -840,7 +845,7 @@ class Response(object):
|
||||||
"""Content of the response, in unicode.
|
"""Content of the response, in unicode.
|
||||||
|
|
||||||
If Response.encoding is None, encoding will be guessed using
|
If Response.encoding is None, encoding will be guessed using
|
||||||
``chardet``.
|
``charset_normalizer`` or ``chardet``.
|
||||||
|
|
||||||
The encoding of the response content is determined based solely on HTTP
|
The encoding of the response content is determined based solely on HTTP
|
||||||
headers, following RFC 2616 to the letter. If you can take advantage of
|
headers, following RFC 2616 to the letter. If you can take advantage of
|
||||||
|
@ -877,13 +882,18 @@ class Response(object):
|
||||||
r"""Returns the json-encoded content of a response, if any.
|
r"""Returns the json-encoded content of a response, if any.
|
||||||
|
|
||||||
:param \*\*kwargs: Optional arguments that ``json.loads`` takes.
|
:param \*\*kwargs: Optional arguments that ``json.loads`` takes.
|
||||||
:raises ValueError: If the response body does not contain valid json.
|
:raises simplejson.JSONDecodeError: If the response body does not
|
||||||
|
contain valid json and simplejson is installed.
|
||||||
|
:raises json.JSONDecodeError: If the response body does not contain
|
||||||
|
valid json and simplejson is not installed on Python 3.
|
||||||
|
:raises ValueError: If the response body does not contain valid
|
||||||
|
json and simplejson is not installed on Python 2.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
if not self.encoding and self.content and len(self.content) > 3:
|
if not self.encoding and self.content and len(self.content) > 3:
|
||||||
# No encoding set. JSON RFC 4627 section 3 states we should expect
|
# No encoding set. JSON RFC 4627 section 3 states we should expect
|
||||||
# UTF-8, -16 or -32. Detect which one to use; If the detection or
|
# UTF-8, -16 or -32. Detect which one to use; If the detection or
|
||||||
# decoding fails, fall back to `self.text` (using chardet to make
|
# decoding fails, fall back to `self.text` (using charset_normalizer to make
|
||||||
# a best guess).
|
# a best guess).
|
||||||
encoding = guess_json_utf(self.content)
|
encoding = guess_json_utf(self.content)
|
||||||
if encoding is not None:
|
if encoding is not None:
|
||||||
|
|
|
@ -1,9 +1,17 @@
|
||||||
import sys
|
import sys
|
||||||
|
|
||||||
|
try:
|
||||||
|
import chardet
|
||||||
|
except ImportError:
|
||||||
|
import charset_normalizer as chardet
|
||||||
|
import warnings
|
||||||
|
|
||||||
|
warnings.filterwarnings('ignore', 'Trying to detect', module='charset_normalizer')
|
||||||
|
|
||||||
# This code exists for backwards compatibility reasons.
|
# This code exists for backwards compatibility reasons.
|
||||||
# I don't like it either. Just look the other way. :)
|
# I don't like it either. Just look the other way. :)
|
||||||
|
|
||||||
for package in ('urllib3', 'idna', 'chardet'):
|
for package in ('urllib3', 'idna'):
|
||||||
locals()[package] = __import__(package)
|
locals()[package] = __import__(package)
|
||||||
# This traversal is apparently necessary such that the identities are
|
# This traversal is apparently necessary such that the identities are
|
||||||
# preserved (requests.packages.urllib3.* is urllib3.*)
|
# preserved (requests.packages.urllib3.* is urllib3.*)
|
||||||
|
@ -11,4 +19,8 @@ for package in ('urllib3', 'idna', 'chardet'):
|
||||||
if mod == package or mod.startswith(package + '.'):
|
if mod == package or mod.startswith(package + '.'):
|
||||||
sys.modules['requests.packages.' + mod] = sys.modules[mod]
|
sys.modules['requests.packages.' + mod] = sys.modules[mod]
|
||||||
|
|
||||||
|
target = chardet.__name__
|
||||||
|
for mod in list(sys.modules):
|
||||||
|
if mod == target or mod.startswith(target + '.'):
|
||||||
|
sys.modules['requests.packages.' + target.replace(target, 'chardet')] = sys.modules[mod]
|
||||||
# Kinda cool, though, right?
|
# Kinda cool, though, right?
|
||||||
|
|
|
@ -633,7 +633,7 @@ class Session(SessionRedirectMixin):
|
||||||
kwargs.setdefault('stream', self.stream)
|
kwargs.setdefault('stream', self.stream)
|
||||||
kwargs.setdefault('verify', self.verify)
|
kwargs.setdefault('verify', self.verify)
|
||||||
kwargs.setdefault('cert', self.cert)
|
kwargs.setdefault('cert', self.cert)
|
||||||
kwargs.setdefault('proxies', self.proxies)
|
kwargs.setdefault('proxies', self.rebuild_proxies(request, self.proxies))
|
||||||
|
|
||||||
# It's possible that users might accidentally send a Request object.
|
# It's possible that users might accidentally send a Request object.
|
||||||
# Guard against that specific failure case.
|
# Guard against that specific failure case.
|
||||||
|
|
|
@ -20,6 +20,7 @@ import tempfile
|
||||||
import warnings
|
import warnings
|
||||||
import zipfile
|
import zipfile
|
||||||
from collections import OrderedDict
|
from collections import OrderedDict
|
||||||
|
from urllib3.util import make_headers
|
||||||
|
|
||||||
from .__version__ import __version__
|
from .__version__ import __version__
|
||||||
from . import certs
|
from . import certs
|
||||||
|
@ -41,6 +42,11 @@ DEFAULT_CA_BUNDLE_PATH = certs.where()
|
||||||
|
|
||||||
DEFAULT_PORTS = {'http': 80, 'https': 443}
|
DEFAULT_PORTS = {'http': 80, 'https': 443}
|
||||||
|
|
||||||
|
# Ensure that ', ' is used to preserve previous delimiter behavior.
|
||||||
|
DEFAULT_ACCEPT_ENCODING = ", ".join(
|
||||||
|
re.split(r",\s*", make_headers(accept_encoding=True)["accept-encoding"])
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
if sys.platform == 'win32':
|
if sys.platform == 'win32':
|
||||||
# provide a proxy_bypass version on Windows without DNS lookups
|
# provide a proxy_bypass version on Windows without DNS lookups
|
||||||
|
@ -256,13 +262,28 @@ def extract_zipped_paths(path):
|
||||||
|
|
||||||
# we have a valid zip archive and a valid member of that archive
|
# we have a valid zip archive and a valid member of that archive
|
||||||
tmp = tempfile.gettempdir()
|
tmp = tempfile.gettempdir()
|
||||||
extracted_path = os.path.join(tmp, *member.split('/'))
|
extracted_path = os.path.join(tmp, member.split('/')[-1])
|
||||||
if not os.path.exists(extracted_path):
|
if not os.path.exists(extracted_path):
|
||||||
extracted_path = zip_file.extract(member, path=tmp)
|
# use read + write to avoid the creating nested folders, we only want the file, avoids mkdir racing condition
|
||||||
|
with atomic_open(extracted_path) as file_handler:
|
||||||
|
file_handler.write(zip_file.read(member))
|
||||||
return extracted_path
|
return extracted_path
|
||||||
|
|
||||||
|
|
||||||
|
@contextlib.contextmanager
|
||||||
|
def atomic_open(filename):
|
||||||
|
"""Write a file to the disk in an atomic fashion"""
|
||||||
|
replacer = os.rename if sys.version_info[0] == 2 else os.replace
|
||||||
|
tmp_descriptor, tmp_name = tempfile.mkstemp(dir=os.path.dirname(filename))
|
||||||
|
try:
|
||||||
|
with os.fdopen(tmp_descriptor, 'wb') as tmp_handler:
|
||||||
|
yield tmp_handler
|
||||||
|
replacer(tmp_name, filename)
|
||||||
|
except BaseException:
|
||||||
|
os.remove(tmp_name)
|
||||||
|
raise
|
||||||
|
|
||||||
|
|
||||||
def from_key_val_list(value):
|
def from_key_val_list(value):
|
||||||
"""Take an object and test to see if it can be represented as a
|
"""Take an object and test to see if it can be represented as a
|
||||||
dictionary. Unless it can not be represented as such, return an
|
dictionary. Unless it can not be represented as such, return an
|
||||||
|
@ -820,7 +841,7 @@ def default_headers():
|
||||||
"""
|
"""
|
||||||
return CaseInsensitiveDict({
|
return CaseInsensitiveDict({
|
||||||
'User-Agent': default_user_agent(),
|
'User-Agent': default_user_agent(),
|
||||||
'Accept-Encoding': ', '.join(('gzip', 'deflate')),
|
'Accept-Encoding': DEFAULT_ACCEPT_ENCODING,
|
||||||
'Accept': '*/*',
|
'Accept': '*/*',
|
||||||
'Connection': 'keep-alive',
|
'Connection': 'keep-alive',
|
||||||
})
|
})
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue