mirror of
https://github.com/Tautulli/Tautulli.git
synced 2025-07-16 02:02:58 -07:00
Bump requests from 2.27.1 to 2.28.1 (#1781)
* Bump requests from 2.27.1 to 2.28.1 Bumps [requests](https://github.com/psf/requests) from 2.27.1 to 2.28.1. - [Release notes](https://github.com/psf/requests/releases) - [Changelog](https://github.com/psf/requests/blob/main/HISTORY.md) - [Commits](https://github.com/psf/requests/compare/v2.27.1...v2.28.1) --- updated-dependencies: - dependency-name: requests dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] <support@github.com> * Update requests==2.28.1 * Update urllib3==1.26.12 * Update certifi==2022.9.24 * Update idna==3.4 * Update charset-normalizer==2.1.1 Signed-off-by: dependabot[bot] <support@github.com> Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Co-authored-by: JonnyWong16 <9099342+JonnyWong16@users.noreply.github.com> [skip ci]
This commit is contained in:
parent
baa0e08c2a
commit
af1aed0b6b
46 changed files with 3295 additions and 2709 deletions
|
@ -1,5 +1,3 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
|
||||
# __
|
||||
# /__) _ _ _ _ _/ _
|
||||
# / ( (- (/ (/ (- _) / _)
|
||||
|
@ -40,8 +38,10 @@ is at <https://requests.readthedocs.io>.
|
|||
:license: Apache 2.0, see LICENSE for more details.
|
||||
"""
|
||||
|
||||
import urllib3
|
||||
import warnings
|
||||
|
||||
import urllib3
|
||||
|
||||
from .exceptions import RequestsDependencyWarning
|
||||
|
||||
try:
|
||||
|
@ -54,13 +54,14 @@ try:
|
|||
except ImportError:
|
||||
chardet_version = None
|
||||
|
||||
|
||||
def check_compatibility(urllib3_version, chardet_version, charset_normalizer_version):
|
||||
urllib3_version = urllib3_version.split('.')
|
||||
assert urllib3_version != ['dev'] # Verify urllib3 isn't installed from git.
|
||||
urllib3_version = urllib3_version.split(".")
|
||||
assert urllib3_version != ["dev"] # Verify urllib3 isn't installed from git.
|
||||
|
||||
# Sometimes, urllib3 only reports its version as 16.1.
|
||||
if len(urllib3_version) == 2:
|
||||
urllib3_version.append('0')
|
||||
urllib3_version.append("0")
|
||||
|
||||
# Check urllib3 for compatibility.
|
||||
major, minor, patch = urllib3_version # noqa: F811
|
||||
|
@ -72,36 +73,46 @@ def check_compatibility(urllib3_version, chardet_version, charset_normalizer_ver
|
|||
|
||||
# Check charset_normalizer for compatibility.
|
||||
if chardet_version:
|
||||
major, minor, patch = chardet_version.split('.')[:3]
|
||||
major, minor, patch = chardet_version.split(".")[:3]
|
||||
major, minor, patch = int(major), int(minor), int(patch)
|
||||
# chardet_version >= 3.0.2, < 5.0.0
|
||||
assert (3, 0, 2) <= (major, minor, patch) < (5, 0, 0)
|
||||
# chardet_version >= 3.0.2, < 6.0.0
|
||||
assert (3, 0, 2) <= (major, minor, patch) < (6, 0, 0)
|
||||
elif charset_normalizer_version:
|
||||
major, minor, patch = charset_normalizer_version.split('.')[:3]
|
||||
major, minor, patch = charset_normalizer_version.split(".")[:3]
|
||||
major, minor, patch = int(major), int(minor), int(patch)
|
||||
# charset_normalizer >= 2.0.0 < 3.0.0
|
||||
assert (2, 0, 0) <= (major, minor, patch) < (3, 0, 0)
|
||||
else:
|
||||
raise Exception("You need either charset_normalizer or chardet installed")
|
||||
|
||||
|
||||
def _check_cryptography(cryptography_version):
|
||||
# cryptography < 1.3.4
|
||||
try:
|
||||
cryptography_version = list(map(int, cryptography_version.split('.')))
|
||||
cryptography_version = list(map(int, cryptography_version.split(".")))
|
||||
except ValueError:
|
||||
return
|
||||
|
||||
if cryptography_version < [1, 3, 4]:
|
||||
warning = 'Old version of cryptography ({}) may cause slowdown.'.format(cryptography_version)
|
||||
warning = "Old version of cryptography ({}) may cause slowdown.".format(
|
||||
cryptography_version
|
||||
)
|
||||
warnings.warn(warning, RequestsDependencyWarning)
|
||||
|
||||
|
||||
# Check imported dependencies for compatibility.
|
||||
try:
|
||||
check_compatibility(urllib3.__version__, chardet_version, charset_normalizer_version)
|
||||
check_compatibility(
|
||||
urllib3.__version__, chardet_version, charset_normalizer_version
|
||||
)
|
||||
except (AssertionError, ValueError):
|
||||
warnings.warn("urllib3 ({}) or chardet ({})/charset_normalizer ({}) doesn't match a supported "
|
||||
"version!".format(urllib3.__version__, chardet_version, charset_normalizer_version),
|
||||
RequestsDependencyWarning)
|
||||
warnings.warn(
|
||||
"urllib3 ({}) or chardet ({})/charset_normalizer ({}) doesn't match a supported "
|
||||
"version!".format(
|
||||
urllib3.__version__, chardet_version, charset_normalizer_version
|
||||
),
|
||||
RequestsDependencyWarning,
|
||||
)
|
||||
|
||||
# Attempt to enable urllib3's fallback for SNI support
|
||||
# if the standard library doesn't support SNI or the
|
||||
|
@ -114,39 +125,56 @@ try:
|
|||
|
||||
if not getattr(ssl, "HAS_SNI", False):
|
||||
from urllib3.contrib import pyopenssl
|
||||
|
||||
pyopenssl.inject_into_urllib3()
|
||||
|
||||
# Check cryptography version
|
||||
from cryptography import __version__ as cryptography_version
|
||||
|
||||
_check_cryptography(cryptography_version)
|
||||
except ImportError:
|
||||
pass
|
||||
|
||||
# urllib3's DependencyWarnings should be silenced.
|
||||
from urllib3.exceptions import DependencyWarning
|
||||
warnings.simplefilter('ignore', DependencyWarning)
|
||||
|
||||
from .__version__ import __title__, __description__, __url__, __version__
|
||||
from .__version__ import __build__, __author__, __author_email__, __license__
|
||||
from .__version__ import __copyright__, __cake__
|
||||
|
||||
from . import utils
|
||||
from . import packages
|
||||
from .models import Request, Response, PreparedRequest
|
||||
from .api import request, get, head, post, patch, put, delete, options
|
||||
from .sessions import session, Session
|
||||
from .status_codes import codes
|
||||
from .exceptions import (
|
||||
RequestException, Timeout, URLRequired,
|
||||
TooManyRedirects, HTTPError, ConnectionError,
|
||||
FileModeWarning, ConnectTimeout, ReadTimeout, JSONDecodeError
|
||||
)
|
||||
warnings.simplefilter("ignore", DependencyWarning)
|
||||
|
||||
# Set default logging handler to avoid "No handler found" warnings.
|
||||
import logging
|
||||
from logging import NullHandler
|
||||
|
||||
from . import packages, utils
|
||||
from .__version__ import (
|
||||
__author__,
|
||||
__author_email__,
|
||||
__build__,
|
||||
__cake__,
|
||||
__copyright__,
|
||||
__description__,
|
||||
__license__,
|
||||
__title__,
|
||||
__url__,
|
||||
__version__,
|
||||
)
|
||||
from .api import delete, get, head, options, patch, post, put, request
|
||||
from .exceptions import (
|
||||
ConnectionError,
|
||||
ConnectTimeout,
|
||||
FileModeWarning,
|
||||
HTTPError,
|
||||
JSONDecodeError,
|
||||
ReadTimeout,
|
||||
RequestException,
|
||||
Timeout,
|
||||
TooManyRedirects,
|
||||
URLRequired,
|
||||
)
|
||||
from .models import PreparedRequest, Request, Response
|
||||
from .sessions import Session, session
|
||||
from .status_codes import codes
|
||||
|
||||
logging.getLogger(__name__).addHandler(NullHandler())
|
||||
|
||||
# FileModeWarnings go off per the default.
|
||||
warnings.simplefilter('default', FileModeWarning, append=True)
|
||||
warnings.simplefilter("default", FileModeWarning, append=True)
|
||||
|
|
|
@ -2,13 +2,13 @@
|
|||
# |( |- |.| | | |- `-. | `-.
|
||||
# ' ' `-' `-`.`-' `-' `-' ' `-'
|
||||
|
||||
__title__ = 'requests'
|
||||
__description__ = 'Python HTTP for Humans.'
|
||||
__url__ = 'https://requests.readthedocs.io'
|
||||
__version__ = '2.27.1'
|
||||
__build__ = 0x022701
|
||||
__author__ = 'Kenneth Reitz'
|
||||
__author_email__ = 'me@kennethreitz.org'
|
||||
__license__ = 'Apache 2.0'
|
||||
__copyright__ = 'Copyright 2022 Kenneth Reitz'
|
||||
__cake__ = u'\u2728 \U0001f370 \u2728'
|
||||
__title__ = "requests"
|
||||
__description__ = "Python HTTP for Humans."
|
||||
__url__ = "https://requests.readthedocs.io"
|
||||
__version__ = "2.28.1"
|
||||
__build__ = 0x022801
|
||||
__author__ = "Kenneth Reitz"
|
||||
__author_email__ = "me@kennethreitz.org"
|
||||
__license__ = "Apache 2.0"
|
||||
__copyright__ = "Copyright 2022 Kenneth Reitz"
|
||||
__cake__ = "\u2728 \U0001f370 \u2728"
|
||||
|
|
|
@ -1,5 +1,3 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
|
||||
"""
|
||||
requests._internal_utils
|
||||
~~~~~~~~~~~~~~
|
||||
|
@ -7,11 +5,22 @@ requests._internal_utils
|
|||
Provides utility functions that are consumed internally by Requests
|
||||
which depend on extremely few external helpers (such as compat)
|
||||
"""
|
||||
import re
|
||||
|
||||
from .compat import is_py2, builtin_str, str
|
||||
from .compat import builtin_str
|
||||
|
||||
_VALID_HEADER_NAME_RE_BYTE = re.compile(rb"^[^:\s][^:\r\n]*$")
|
||||
_VALID_HEADER_NAME_RE_STR = re.compile(r"^[^:\s][^:\r\n]*$")
|
||||
_VALID_HEADER_VALUE_RE_BYTE = re.compile(rb"^\S[^\r\n]*$|^$")
|
||||
_VALID_HEADER_VALUE_RE_STR = re.compile(r"^\S[^\r\n]*$|^$")
|
||||
|
||||
HEADER_VALIDATORS = {
|
||||
bytes: (_VALID_HEADER_NAME_RE_BYTE, _VALID_HEADER_VALUE_RE_BYTE),
|
||||
str: (_VALID_HEADER_NAME_RE_STR, _VALID_HEADER_VALUE_RE_STR),
|
||||
}
|
||||
|
||||
|
||||
def to_native_string(string, encoding='ascii'):
|
||||
def to_native_string(string, encoding="ascii"):
|
||||
"""Given a string object, regardless of type, returns a representation of
|
||||
that string in the native string type, encoding and decoding where
|
||||
necessary. This assumes ASCII unless told otherwise.
|
||||
|
@ -19,10 +28,7 @@ def to_native_string(string, encoding='ascii'):
|
|||
if isinstance(string, builtin_str):
|
||||
out = string
|
||||
else:
|
||||
if is_py2:
|
||||
out = string.encode(encoding)
|
||||
else:
|
||||
out = string.decode(encoding)
|
||||
out = string.decode(encoding)
|
||||
|
||||
return out
|
||||
|
||||
|
@ -36,7 +42,7 @@ def unicode_is_ascii(u_string):
|
|||
"""
|
||||
assert isinstance(u_string, str)
|
||||
try:
|
||||
u_string.encode('ascii')
|
||||
u_string.encode("ascii")
|
||||
return True
|
||||
except UnicodeEncodeError:
|
||||
return False
|
||||
|
|
|
@ -1,5 +1,3 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
|
||||
"""
|
||||
requests.adapters
|
||||
~~~~~~~~~~~~~~~~~
|
||||
|
@ -9,58 +7,76 @@ and maintain connections.
|
|||
"""
|
||||
|
||||
import os.path
|
||||
import socket
|
||||
import socket # noqa: F401
|
||||
|
||||
from urllib3.poolmanager import PoolManager, proxy_from_url
|
||||
from urllib3.response import HTTPResponse
|
||||
from urllib3.util import parse_url
|
||||
from urllib3.util import Timeout as TimeoutSauce
|
||||
from urllib3.util.retry import Retry
|
||||
from urllib3.exceptions import ClosedPoolError
|
||||
from urllib3.exceptions import ConnectTimeoutError
|
||||
from urllib3.exceptions import ClosedPoolError, ConnectTimeoutError
|
||||
from urllib3.exceptions import HTTPError as _HTTPError
|
||||
from urllib3.exceptions import InvalidHeader as _InvalidHeader
|
||||
from urllib3.exceptions import MaxRetryError
|
||||
from urllib3.exceptions import NewConnectionError
|
||||
from urllib3.exceptions import (
|
||||
LocationValueError,
|
||||
MaxRetryError,
|
||||
NewConnectionError,
|
||||
ProtocolError,
|
||||
)
|
||||
from urllib3.exceptions import ProxyError as _ProxyError
|
||||
from urllib3.exceptions import ProtocolError
|
||||
from urllib3.exceptions import ReadTimeoutError
|
||||
from urllib3.exceptions import ReadTimeoutError, ResponseError
|
||||
from urllib3.exceptions import SSLError as _SSLError
|
||||
from urllib3.exceptions import ResponseError
|
||||
from urllib3.exceptions import LocationValueError
|
||||
from urllib3.poolmanager import PoolManager, proxy_from_url
|
||||
from urllib3.response import HTTPResponse
|
||||
from urllib3.util import Timeout as TimeoutSauce
|
||||
from urllib3.util import parse_url
|
||||
from urllib3.util.retry import Retry
|
||||
|
||||
from .models import Response
|
||||
from .compat import urlparse, basestring
|
||||
from .utils import (DEFAULT_CA_BUNDLE_PATH, extract_zipped_paths,
|
||||
get_encoding_from_headers, prepend_scheme_if_needed,
|
||||
get_auth_from_url, urldefragauth, select_proxy)
|
||||
from .structures import CaseInsensitiveDict
|
||||
from .cookies import extract_cookies_to_jar
|
||||
from .exceptions import (ConnectionError, ConnectTimeout, ReadTimeout, SSLError,
|
||||
ProxyError, RetryError, InvalidSchema, InvalidProxyURL,
|
||||
InvalidURL, InvalidHeader)
|
||||
from .auth import _basic_auth_str
|
||||
from .compat import basestring, urlparse
|
||||
from .cookies import extract_cookies_to_jar
|
||||
from .exceptions import (
|
||||
ConnectionError,
|
||||
ConnectTimeout,
|
||||
InvalidHeader,
|
||||
InvalidProxyURL,
|
||||
InvalidSchema,
|
||||
InvalidURL,
|
||||
ProxyError,
|
||||
ReadTimeout,
|
||||
RetryError,
|
||||
SSLError,
|
||||
)
|
||||
from .models import Response
|
||||
from .structures import CaseInsensitiveDict
|
||||
from .utils import (
|
||||
DEFAULT_CA_BUNDLE_PATH,
|
||||
extract_zipped_paths,
|
||||
get_auth_from_url,
|
||||
get_encoding_from_headers,
|
||||
prepend_scheme_if_needed,
|
||||
select_proxy,
|
||||
urldefragauth,
|
||||
)
|
||||
|
||||
try:
|
||||
from urllib3.contrib.socks import SOCKSProxyManager
|
||||
except ImportError:
|
||||
|
||||
def SOCKSProxyManager(*args, **kwargs):
|
||||
raise InvalidSchema("Missing dependencies for SOCKS support.")
|
||||
|
||||
|
||||
DEFAULT_POOLBLOCK = False
|
||||
DEFAULT_POOLSIZE = 10
|
||||
DEFAULT_RETRIES = 0
|
||||
DEFAULT_POOL_TIMEOUT = None
|
||||
|
||||
|
||||
class BaseAdapter(object):
|
||||
class BaseAdapter:
|
||||
"""The Base Transport Adapter"""
|
||||
|
||||
def __init__(self):
|
||||
super(BaseAdapter, self).__init__()
|
||||
super().__init__()
|
||||
|
||||
def send(self, request, stream=False, timeout=None, verify=True,
|
||||
cert=None, proxies=None):
|
||||
def send(
|
||||
self, request, stream=False, timeout=None, verify=True, cert=None, proxies=None
|
||||
):
|
||||
"""Sends PreparedRequest object. Returns Response object.
|
||||
|
||||
:param request: The :class:`PreparedRequest <PreparedRequest>` being sent.
|
||||
|
@ -108,12 +124,22 @@ class HTTPAdapter(BaseAdapter):
|
|||
>>> a = requests.adapters.HTTPAdapter(max_retries=3)
|
||||
>>> s.mount('http://', a)
|
||||
"""
|
||||
__attrs__ = ['max_retries', 'config', '_pool_connections', '_pool_maxsize',
|
||||
'_pool_block']
|
||||
|
||||
def __init__(self, pool_connections=DEFAULT_POOLSIZE,
|
||||
pool_maxsize=DEFAULT_POOLSIZE, max_retries=DEFAULT_RETRIES,
|
||||
pool_block=DEFAULT_POOLBLOCK):
|
||||
__attrs__ = [
|
||||
"max_retries",
|
||||
"config",
|
||||
"_pool_connections",
|
||||
"_pool_maxsize",
|
||||
"_pool_block",
|
||||
]
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
pool_connections=DEFAULT_POOLSIZE,
|
||||
pool_maxsize=DEFAULT_POOLSIZE,
|
||||
max_retries=DEFAULT_RETRIES,
|
||||
pool_block=DEFAULT_POOLBLOCK,
|
||||
):
|
||||
if max_retries == DEFAULT_RETRIES:
|
||||
self.max_retries = Retry(0, read=False)
|
||||
else:
|
||||
|
@ -121,7 +147,7 @@ class HTTPAdapter(BaseAdapter):
|
|||
self.config = {}
|
||||
self.proxy_manager = {}
|
||||
|
||||
super(HTTPAdapter, self).__init__()
|
||||
super().__init__()
|
||||
|
||||
self._pool_connections = pool_connections
|
||||
self._pool_maxsize = pool_maxsize
|
||||
|
@ -141,10 +167,13 @@ class HTTPAdapter(BaseAdapter):
|
|||
for attr, value in state.items():
|
||||
setattr(self, attr, value)
|
||||
|
||||
self.init_poolmanager(self._pool_connections, self._pool_maxsize,
|
||||
block=self._pool_block)
|
||||
self.init_poolmanager(
|
||||
self._pool_connections, self._pool_maxsize, block=self._pool_block
|
||||
)
|
||||
|
||||
def init_poolmanager(self, connections, maxsize, block=DEFAULT_POOLBLOCK, **pool_kwargs):
|
||||
def init_poolmanager(
|
||||
self, connections, maxsize, block=DEFAULT_POOLBLOCK, **pool_kwargs
|
||||
):
|
||||
"""Initializes a urllib3 PoolManager.
|
||||
|
||||
This method should not be called from user code, and is only
|
||||
|
@ -161,8 +190,13 @@ class HTTPAdapter(BaseAdapter):
|
|||
self._pool_maxsize = maxsize
|
||||
self._pool_block = block
|
||||
|
||||
self.poolmanager = PoolManager(num_pools=connections, maxsize=maxsize,
|
||||
block=block, strict=True, **pool_kwargs)
|
||||
self.poolmanager = PoolManager(
|
||||
num_pools=connections,
|
||||
maxsize=maxsize,
|
||||
block=block,
|
||||
strict=True,
|
||||
**pool_kwargs,
|
||||
)
|
||||
|
||||
def proxy_manager_for(self, proxy, **proxy_kwargs):
|
||||
"""Return urllib3 ProxyManager for the given proxy.
|
||||
|
@ -178,7 +212,7 @@ class HTTPAdapter(BaseAdapter):
|
|||
"""
|
||||
if proxy in self.proxy_manager:
|
||||
manager = self.proxy_manager[proxy]
|
||||
elif proxy.lower().startswith('socks'):
|
||||
elif proxy.lower().startswith("socks"):
|
||||
username, password = get_auth_from_url(proxy)
|
||||
manager = self.proxy_manager[proxy] = SOCKSProxyManager(
|
||||
proxy,
|
||||
|
@ -187,7 +221,7 @@ class HTTPAdapter(BaseAdapter):
|
|||
num_pools=self._pool_connections,
|
||||
maxsize=self._pool_maxsize,
|
||||
block=self._pool_block,
|
||||
**proxy_kwargs
|
||||
**proxy_kwargs,
|
||||
)
|
||||
else:
|
||||
proxy_headers = self.proxy_headers(proxy)
|
||||
|
@ -197,7 +231,8 @@ class HTTPAdapter(BaseAdapter):
|
|||
num_pools=self._pool_connections,
|
||||
maxsize=self._pool_maxsize,
|
||||
block=self._pool_block,
|
||||
**proxy_kwargs)
|
||||
**proxy_kwargs,
|
||||
)
|
||||
|
||||
return manager
|
||||
|
||||
|
@ -213,7 +248,7 @@ class HTTPAdapter(BaseAdapter):
|
|||
to a CA bundle to use
|
||||
:param cert: The SSL certificate to verify.
|
||||
"""
|
||||
if url.lower().startswith('https') and verify:
|
||||
if url.lower().startswith("https") and verify:
|
||||
|
||||
cert_loc = None
|
||||
|
||||
|
@ -225,17 +260,19 @@ class HTTPAdapter(BaseAdapter):
|
|||
cert_loc = extract_zipped_paths(DEFAULT_CA_BUNDLE_PATH)
|
||||
|
||||
if not cert_loc or not os.path.exists(cert_loc):
|
||||
raise IOError("Could not find a suitable TLS CA certificate bundle, "
|
||||
"invalid path: {}".format(cert_loc))
|
||||
raise OSError(
|
||||
f"Could not find a suitable TLS CA certificate bundle, "
|
||||
f"invalid path: {cert_loc}"
|
||||
)
|
||||
|
||||
conn.cert_reqs = 'CERT_REQUIRED'
|
||||
conn.cert_reqs = "CERT_REQUIRED"
|
||||
|
||||
if not os.path.isdir(cert_loc):
|
||||
conn.ca_certs = cert_loc
|
||||
else:
|
||||
conn.ca_cert_dir = cert_loc
|
||||
else:
|
||||
conn.cert_reqs = 'CERT_NONE'
|
||||
conn.cert_reqs = "CERT_NONE"
|
||||
conn.ca_certs = None
|
||||
conn.ca_cert_dir = None
|
||||
|
||||
|
@ -247,11 +284,14 @@ class HTTPAdapter(BaseAdapter):
|
|||
conn.cert_file = cert
|
||||
conn.key_file = None
|
||||
if conn.cert_file and not os.path.exists(conn.cert_file):
|
||||
raise IOError("Could not find the TLS certificate file, "
|
||||
"invalid path: {}".format(conn.cert_file))
|
||||
raise OSError(
|
||||
f"Could not find the TLS certificate file, "
|
||||
f"invalid path: {conn.cert_file}"
|
||||
)
|
||||
if conn.key_file and not os.path.exists(conn.key_file):
|
||||
raise IOError("Could not find the TLS key file, "
|
||||
"invalid path: {}".format(conn.key_file))
|
||||
raise OSError(
|
||||
f"Could not find the TLS key file, invalid path: {conn.key_file}"
|
||||
)
|
||||
|
||||
def build_response(self, req, resp):
|
||||
"""Builds a :class:`Response <requests.Response>` object from a urllib3
|
||||
|
@ -266,10 +306,10 @@ class HTTPAdapter(BaseAdapter):
|
|||
response = Response()
|
||||
|
||||
# Fallback to None if there's no status_code, for whatever reason.
|
||||
response.status_code = getattr(resp, 'status', None)
|
||||
response.status_code = getattr(resp, "status", None)
|
||||
|
||||
# Make headers case-insensitive.
|
||||
response.headers = CaseInsensitiveDict(getattr(resp, 'headers', {}))
|
||||
response.headers = CaseInsensitiveDict(getattr(resp, "headers", {}))
|
||||
|
||||
# Set encoding.
|
||||
response.encoding = get_encoding_from_headers(response.headers)
|
||||
|
@ -277,7 +317,7 @@ class HTTPAdapter(BaseAdapter):
|
|||
response.reason = response.raw.reason
|
||||
|
||||
if isinstance(req.url, bytes):
|
||||
response.url = req.url.decode('utf-8')
|
||||
response.url = req.url.decode("utf-8")
|
||||
else:
|
||||
response.url = req.url
|
||||
|
||||
|
@ -302,11 +342,13 @@ class HTTPAdapter(BaseAdapter):
|
|||
proxy = select_proxy(url, proxies)
|
||||
|
||||
if proxy:
|
||||
proxy = prepend_scheme_if_needed(proxy, 'http')
|
||||
proxy = prepend_scheme_if_needed(proxy, "http")
|
||||
proxy_url = parse_url(proxy)
|
||||
if not proxy_url.host:
|
||||
raise InvalidProxyURL("Please check proxy URL. It is malformed"
|
||||
" and could be missing the host.")
|
||||
raise InvalidProxyURL(
|
||||
"Please check proxy URL. It is malformed "
|
||||
"and could be missing the host."
|
||||
)
|
||||
proxy_manager = self.proxy_manager_for(proxy)
|
||||
conn = proxy_manager.connection_from_url(url)
|
||||
else:
|
||||
|
@ -344,11 +386,11 @@ class HTTPAdapter(BaseAdapter):
|
|||
proxy = select_proxy(request.url, proxies)
|
||||
scheme = urlparse(request.url).scheme
|
||||
|
||||
is_proxied_http_request = (proxy and scheme != 'https')
|
||||
is_proxied_http_request = proxy and scheme != "https"
|
||||
using_socks_proxy = False
|
||||
if proxy:
|
||||
proxy_scheme = urlparse(proxy).scheme.lower()
|
||||
using_socks_proxy = proxy_scheme.startswith('socks')
|
||||
using_socks_proxy = proxy_scheme.startswith("socks")
|
||||
|
||||
url = request.path_url
|
||||
if is_proxied_http_request and not using_socks_proxy:
|
||||
|
@ -387,12 +429,13 @@ class HTTPAdapter(BaseAdapter):
|
|||
username, password = get_auth_from_url(proxy)
|
||||
|
||||
if username:
|
||||
headers['Proxy-Authorization'] = _basic_auth_str(username,
|
||||
password)
|
||||
headers["Proxy-Authorization"] = _basic_auth_str(username, password)
|
||||
|
||||
return headers
|
||||
|
||||
def send(self, request, stream=False, timeout=None, verify=True, cert=None, proxies=None):
|
||||
def send(
|
||||
self, request, stream=False, timeout=None, verify=True, cert=None, proxies=None
|
||||
):
|
||||
"""Sends PreparedRequest object. Returns Response object.
|
||||
|
||||
:param request: The :class:`PreparedRequest <PreparedRequest>` being sent.
|
||||
|
@ -416,20 +459,26 @@ class HTTPAdapter(BaseAdapter):
|
|||
|
||||
self.cert_verify(conn, request.url, verify, cert)
|
||||
url = self.request_url(request, proxies)
|
||||
self.add_headers(request, stream=stream, timeout=timeout, verify=verify, cert=cert, proxies=proxies)
|
||||
self.add_headers(
|
||||
request,
|
||||
stream=stream,
|
||||
timeout=timeout,
|
||||
verify=verify,
|
||||
cert=cert,
|
||||
proxies=proxies,
|
||||
)
|
||||
|
||||
chunked = not (request.body is None or 'Content-Length' in request.headers)
|
||||
chunked = not (request.body is None or "Content-Length" in request.headers)
|
||||
|
||||
if isinstance(timeout, tuple):
|
||||
try:
|
||||
connect, read = timeout
|
||||
timeout = TimeoutSauce(connect=connect, read=read)
|
||||
except ValueError as e:
|
||||
# this may raise a string formatting error.
|
||||
err = ("Invalid timeout {}. Pass a (connect, read) "
|
||||
"timeout tuple, or a single float to set "
|
||||
"both timeouts to the same value".format(timeout))
|
||||
raise ValueError(err)
|
||||
except ValueError:
|
||||
raise ValueError(
|
||||
f"Invalid timeout {timeout}. Pass a (connect, read) timeout tuple, "
|
||||
f"or a single float to set both timeouts to the same value."
|
||||
)
|
||||
elif isinstance(timeout, TimeoutSauce):
|
||||
pass
|
||||
else:
|
||||
|
@ -447,22 +496,24 @@ class HTTPAdapter(BaseAdapter):
|
|||
preload_content=False,
|
||||
decode_content=False,
|
||||
retries=self.max_retries,
|
||||
timeout=timeout
|
||||
timeout=timeout,
|
||||
)
|
||||
|
||||
# Send the request.
|
||||
else:
|
||||
if hasattr(conn, 'proxy_pool'):
|
||||
if hasattr(conn, "proxy_pool"):
|
||||
conn = conn.proxy_pool
|
||||
|
||||
low_conn = conn._get_conn(timeout=DEFAULT_POOL_TIMEOUT)
|
||||
|
||||
try:
|
||||
skip_host = 'Host' in request.headers
|
||||
low_conn.putrequest(request.method,
|
||||
url,
|
||||
skip_accept_encoding=True,
|
||||
skip_host=skip_host)
|
||||
skip_host = "Host" in request.headers
|
||||
low_conn.putrequest(
|
||||
request.method,
|
||||
url,
|
||||
skip_accept_encoding=True,
|
||||
skip_host=skip_host,
|
||||
)
|
||||
|
||||
for header, value in request.headers.items():
|
||||
low_conn.putheader(header, value)
|
||||
|
@ -470,34 +521,29 @@ class HTTPAdapter(BaseAdapter):
|
|||
low_conn.endheaders()
|
||||
|
||||
for i in request.body:
|
||||
low_conn.send(hex(len(i))[2:].encode('utf-8'))
|
||||
low_conn.send(b'\r\n')
|
||||
low_conn.send(hex(len(i))[2:].encode("utf-8"))
|
||||
low_conn.send(b"\r\n")
|
||||
low_conn.send(i)
|
||||
low_conn.send(b'\r\n')
|
||||
low_conn.send(b'0\r\n\r\n')
|
||||
low_conn.send(b"\r\n")
|
||||
low_conn.send(b"0\r\n\r\n")
|
||||
|
||||
# Receive the response from the server
|
||||
try:
|
||||
# For Python 2.7, use buffering of HTTP responses
|
||||
r = low_conn.getresponse(buffering=True)
|
||||
except TypeError:
|
||||
# For compatibility with Python 3.3+
|
||||
r = low_conn.getresponse()
|
||||
r = low_conn.getresponse()
|
||||
|
||||
resp = HTTPResponse.from_httplib(
|
||||
r,
|
||||
pool=conn,
|
||||
connection=low_conn,
|
||||
preload_content=False,
|
||||
decode_content=False
|
||||
decode_content=False,
|
||||
)
|
||||
except:
|
||||
except Exception:
|
||||
# If we hit any problems here, clean up the connection.
|
||||
# Then, reraise so that we can handle the actual exception.
|
||||
# Then, raise so that we can handle the actual exception.
|
||||
low_conn.close()
|
||||
raise
|
||||
|
||||
except (ProtocolError, socket.error) as err:
|
||||
except (ProtocolError, OSError) as err:
|
||||
raise ConnectionError(err, request=request)
|
||||
|
||||
except MaxRetryError as e:
|
||||
|
|
|
@ -1,5 +1,3 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
|
||||
"""
|
||||
requests.api
|
||||
~~~~~~~~~~~~
|
||||
|
@ -72,7 +70,7 @@ def get(url, params=None, **kwargs):
|
|||
:rtype: requests.Response
|
||||
"""
|
||||
|
||||
return request('get', url, params=params, **kwargs)
|
||||
return request("get", url, params=params, **kwargs)
|
||||
|
||||
|
||||
def options(url, **kwargs):
|
||||
|
@ -84,7 +82,7 @@ def options(url, **kwargs):
|
|||
:rtype: requests.Response
|
||||
"""
|
||||
|
||||
return request('options', url, **kwargs)
|
||||
return request("options", url, **kwargs)
|
||||
|
||||
|
||||
def head(url, **kwargs):
|
||||
|
@ -98,8 +96,8 @@ def head(url, **kwargs):
|
|||
:rtype: requests.Response
|
||||
"""
|
||||
|
||||
kwargs.setdefault('allow_redirects', False)
|
||||
return request('head', url, **kwargs)
|
||||
kwargs.setdefault("allow_redirects", False)
|
||||
return request("head", url, **kwargs)
|
||||
|
||||
|
||||
def post(url, data=None, json=None, **kwargs):
|
||||
|
@ -114,7 +112,7 @@ def post(url, data=None, json=None, **kwargs):
|
|||
:rtype: requests.Response
|
||||
"""
|
||||
|
||||
return request('post', url, data=data, json=json, **kwargs)
|
||||
return request("post", url, data=data, json=json, **kwargs)
|
||||
|
||||
|
||||
def put(url, data=None, **kwargs):
|
||||
|
@ -129,7 +127,7 @@ def put(url, data=None, **kwargs):
|
|||
:rtype: requests.Response
|
||||
"""
|
||||
|
||||
return request('put', url, data=data, **kwargs)
|
||||
return request("put", url, data=data, **kwargs)
|
||||
|
||||
|
||||
def patch(url, data=None, **kwargs):
|
||||
|
@ -144,7 +142,7 @@ def patch(url, data=None, **kwargs):
|
|||
:rtype: requests.Response
|
||||
"""
|
||||
|
||||
return request('patch', url, data=data, **kwargs)
|
||||
return request("patch", url, data=data, **kwargs)
|
||||
|
||||
|
||||
def delete(url, **kwargs):
|
||||
|
@ -156,4 +154,4 @@ def delete(url, **kwargs):
|
|||
:rtype: requests.Response
|
||||
"""
|
||||
|
||||
return request('delete', url, **kwargs)
|
||||
return request("delete", url, **kwargs)
|
||||
|
|
|
@ -1,5 +1,3 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
|
||||
"""
|
||||
requests.auth
|
||||
~~~~~~~~~~~~~
|
||||
|
@ -7,22 +5,21 @@ requests.auth
|
|||
This module contains the authentication handlers for Requests.
|
||||
"""
|
||||
|
||||
import hashlib
|
||||
import os
|
||||
import re
|
||||
import time
|
||||
import hashlib
|
||||
import threading
|
||||
import time
|
||||
import warnings
|
||||
|
||||
from base64 import b64encode
|
||||
|
||||
from .compat import urlparse, str, basestring
|
||||
from .cookies import extract_cookies_to_jar
|
||||
from ._internal_utils import to_native_string
|
||||
from .compat import basestring, str, urlparse
|
||||
from .cookies import extract_cookies_to_jar
|
||||
from .utils import parse_dict_header
|
||||
|
||||
CONTENT_TYPE_FORM_URLENCODED = 'application/x-www-form-urlencoded'
|
||||
CONTENT_TYPE_MULTI_PART = 'multipart/form-data'
|
||||
CONTENT_TYPE_FORM_URLENCODED = "application/x-www-form-urlencoded"
|
||||
CONTENT_TYPE_MULTI_PART = "multipart/form-data"
|
||||
|
||||
|
||||
def _basic_auth_str(username, password):
|
||||
|
@ -57,23 +54,23 @@ def _basic_auth_str(username, password):
|
|||
# -- End Removal --
|
||||
|
||||
if isinstance(username, str):
|
||||
username = username.encode('latin1')
|
||||
username = username.encode("latin1")
|
||||
|
||||
if isinstance(password, str):
|
||||
password = password.encode('latin1')
|
||||
password = password.encode("latin1")
|
||||
|
||||
authstr = 'Basic ' + to_native_string(
|
||||
b64encode(b':'.join((username, password))).strip()
|
||||
authstr = "Basic " + to_native_string(
|
||||
b64encode(b":".join((username, password))).strip()
|
||||
)
|
||||
|
||||
return authstr
|
||||
|
||||
|
||||
class AuthBase(object):
|
||||
class AuthBase:
|
||||
"""Base class that all auth implementations derive from"""
|
||||
|
||||
def __call__(self, r):
|
||||
raise NotImplementedError('Auth hooks must be callable.')
|
||||
raise NotImplementedError("Auth hooks must be callable.")
|
||||
|
||||
|
||||
class HTTPBasicAuth(AuthBase):
|
||||
|
@ -84,16 +81,18 @@ class HTTPBasicAuth(AuthBase):
|
|||
self.password = password
|
||||
|
||||
def __eq__(self, other):
|
||||
return all([
|
||||
self.username == getattr(other, 'username', None),
|
||||
self.password == getattr(other, 'password', None)
|
||||
])
|
||||
return all(
|
||||
[
|
||||
self.username == getattr(other, "username", None),
|
||||
self.password == getattr(other, "password", None),
|
||||
]
|
||||
)
|
||||
|
||||
def __ne__(self, other):
|
||||
return not self == other
|
||||
|
||||
def __call__(self, r):
|
||||
r.headers['Authorization'] = _basic_auth_str(self.username, self.password)
|
||||
r.headers["Authorization"] = _basic_auth_str(self.username, self.password)
|
||||
return r
|
||||
|
||||
|
||||
|
@ -101,7 +100,7 @@ class HTTPProxyAuth(HTTPBasicAuth):
|
|||
"""Attaches HTTP Proxy Authentication to a given Request object."""
|
||||
|
||||
def __call__(self, r):
|
||||
r.headers['Proxy-Authorization'] = _basic_auth_str(self.username, self.password)
|
||||
r.headers["Proxy-Authorization"] = _basic_auth_str(self.username, self.password)
|
||||
return r
|
||||
|
||||
|
||||
|
@ -116,9 +115,9 @@ class HTTPDigestAuth(AuthBase):
|
|||
|
||||
def init_per_thread_state(self):
|
||||
# Ensure state is initialized just once per-thread
|
||||
if not hasattr(self._thread_local, 'init'):
|
||||
if not hasattr(self._thread_local, "init"):
|
||||
self._thread_local.init = True
|
||||
self._thread_local.last_nonce = ''
|
||||
self._thread_local.last_nonce = ""
|
||||
self._thread_local.nonce_count = 0
|
||||
self._thread_local.chal = {}
|
||||
self._thread_local.pos = None
|
||||
|
@ -129,44 +128,52 @@ class HTTPDigestAuth(AuthBase):
|
|||
:rtype: str
|
||||
"""
|
||||
|
||||
realm = self._thread_local.chal['realm']
|
||||
nonce = self._thread_local.chal['nonce']
|
||||
qop = self._thread_local.chal.get('qop')
|
||||
algorithm = self._thread_local.chal.get('algorithm')
|
||||
opaque = self._thread_local.chal.get('opaque')
|
||||
realm = self._thread_local.chal["realm"]
|
||||
nonce = self._thread_local.chal["nonce"]
|
||||
qop = self._thread_local.chal.get("qop")
|
||||
algorithm = self._thread_local.chal.get("algorithm")
|
||||
opaque = self._thread_local.chal.get("opaque")
|
||||
hash_utf8 = None
|
||||
|
||||
if algorithm is None:
|
||||
_algorithm = 'MD5'
|
||||
_algorithm = "MD5"
|
||||
else:
|
||||
_algorithm = algorithm.upper()
|
||||
# lambdas assume digest modules are imported at the top level
|
||||
if _algorithm == 'MD5' or _algorithm == 'MD5-SESS':
|
||||
if _algorithm == "MD5" or _algorithm == "MD5-SESS":
|
||||
|
||||
def md5_utf8(x):
|
||||
if isinstance(x, str):
|
||||
x = x.encode('utf-8')
|
||||
x = x.encode("utf-8")
|
||||
return hashlib.md5(x).hexdigest()
|
||||
|
||||
hash_utf8 = md5_utf8
|
||||
elif _algorithm == 'SHA':
|
||||
elif _algorithm == "SHA":
|
||||
|
||||
def sha_utf8(x):
|
||||
if isinstance(x, str):
|
||||
x = x.encode('utf-8')
|
||||
x = x.encode("utf-8")
|
||||
return hashlib.sha1(x).hexdigest()
|
||||
|
||||
hash_utf8 = sha_utf8
|
||||
elif _algorithm == 'SHA-256':
|
||||
elif _algorithm == "SHA-256":
|
||||
|
||||
def sha256_utf8(x):
|
||||
if isinstance(x, str):
|
||||
x = x.encode('utf-8')
|
||||
x = x.encode("utf-8")
|
||||
return hashlib.sha256(x).hexdigest()
|
||||
|
||||
hash_utf8 = sha256_utf8
|
||||
elif _algorithm == 'SHA-512':
|
||||
elif _algorithm == "SHA-512":
|
||||
|
||||
def sha512_utf8(x):
|
||||
if isinstance(x, str):
|
||||
x = x.encode('utf-8')
|
||||
x = x.encode("utf-8")
|
||||
return hashlib.sha512(x).hexdigest()
|
||||
|
||||
hash_utf8 = sha512_utf8
|
||||
|
||||
KD = lambda s, d: hash_utf8("%s:%s" % (s, d))
|
||||
KD = lambda s, d: hash_utf8(f"{s}:{d}") # noqa:E731
|
||||
|
||||
if hash_utf8 is None:
|
||||
return None
|
||||
|
@ -177,10 +184,10 @@ class HTTPDigestAuth(AuthBase):
|
|||
#: path is request-uri defined in RFC 2616 which should not be empty
|
||||
path = p_parsed.path or "/"
|
||||
if p_parsed.query:
|
||||
path += '?' + p_parsed.query
|
||||
path += f"?{p_parsed.query}"
|
||||
|
||||
A1 = '%s:%s:%s' % (self.username, realm, self.password)
|
||||
A2 = '%s:%s' % (method, path)
|
||||
A1 = f"{self.username}:{realm}:{self.password}"
|
||||
A2 = f"{method}:{path}"
|
||||
|
||||
HA1 = hash_utf8(A1)
|
||||
HA2 = hash_utf8(A2)
|
||||
|
@ -189,22 +196,20 @@ class HTTPDigestAuth(AuthBase):
|
|||
self._thread_local.nonce_count += 1
|
||||
else:
|
||||
self._thread_local.nonce_count = 1
|
||||
ncvalue = '%08x' % self._thread_local.nonce_count
|
||||
s = str(self._thread_local.nonce_count).encode('utf-8')
|
||||
s += nonce.encode('utf-8')
|
||||
s += time.ctime().encode('utf-8')
|
||||
ncvalue = f"{self._thread_local.nonce_count:08x}"
|
||||
s = str(self._thread_local.nonce_count).encode("utf-8")
|
||||
s += nonce.encode("utf-8")
|
||||
s += time.ctime().encode("utf-8")
|
||||
s += os.urandom(8)
|
||||
|
||||
cnonce = (hashlib.sha1(s).hexdigest()[:16])
|
||||
if _algorithm == 'MD5-SESS':
|
||||
HA1 = hash_utf8('%s:%s:%s' % (HA1, nonce, cnonce))
|
||||
cnonce = hashlib.sha1(s).hexdigest()[:16]
|
||||
if _algorithm == "MD5-SESS":
|
||||
HA1 = hash_utf8(f"{HA1}:{nonce}:{cnonce}")
|
||||
|
||||
if not qop:
|
||||
respdig = KD(HA1, "%s:%s" % (nonce, HA2))
|
||||
elif qop == 'auth' or 'auth' in qop.split(','):
|
||||
noncebit = "%s:%s:%s:%s:%s" % (
|
||||
nonce, ncvalue, cnonce, 'auth', HA2
|
||||
)
|
||||
respdig = KD(HA1, f"{nonce}:{HA2}")
|
||||
elif qop == "auth" or "auth" in qop.split(","):
|
||||
noncebit = f"{nonce}:{ncvalue}:{cnonce}:auth:{HA2}"
|
||||
respdig = KD(HA1, noncebit)
|
||||
else:
|
||||
# XXX handle auth-int.
|
||||
|
@ -213,18 +218,20 @@ class HTTPDigestAuth(AuthBase):
|
|||
self._thread_local.last_nonce = nonce
|
||||
|
||||
# XXX should the partial digests be encoded too?
|
||||
base = 'username="%s", realm="%s", nonce="%s", uri="%s", ' \
|
||||
'response="%s"' % (self.username, realm, nonce, path, respdig)
|
||||
base = (
|
||||
f'username="{self.username}", realm="{realm}", nonce="{nonce}", '
|
||||
f'uri="{path}", response="{respdig}"'
|
||||
)
|
||||
if opaque:
|
||||
base += ', opaque="%s"' % opaque
|
||||
base += f', opaque="{opaque}"'
|
||||
if algorithm:
|
||||
base += ', algorithm="%s"' % algorithm
|
||||
base += f', algorithm="{algorithm}"'
|
||||
if entdig:
|
||||
base += ', digest="%s"' % entdig
|
||||
base += f', digest="{entdig}"'
|
||||
if qop:
|
||||
base += ', qop="auth", nc=%s, cnonce="%s"' % (ncvalue, cnonce)
|
||||
base += f', qop="auth", nc={ncvalue}, cnonce="{cnonce}"'
|
||||
|
||||
return 'Digest %s' % (base)
|
||||
return f"Digest {base}"
|
||||
|
||||
def handle_redirect(self, r, **kwargs):
|
||||
"""Reset num_401_calls counter on redirects."""
|
||||
|
@ -248,13 +255,13 @@ class HTTPDigestAuth(AuthBase):
|
|||
# Rewind the file position indicator of the body to where
|
||||
# it was to resend the request.
|
||||
r.request.body.seek(self._thread_local.pos)
|
||||
s_auth = r.headers.get('www-authenticate', '')
|
||||
s_auth = r.headers.get("www-authenticate", "")
|
||||
|
||||
if 'digest' in s_auth.lower() and self._thread_local.num_401_calls < 2:
|
||||
if "digest" in s_auth.lower() and self._thread_local.num_401_calls < 2:
|
||||
|
||||
self._thread_local.num_401_calls += 1
|
||||
pat = re.compile(r'digest ', flags=re.IGNORECASE)
|
||||
self._thread_local.chal = parse_dict_header(pat.sub('', s_auth, count=1))
|
||||
pat = re.compile(r"digest ", flags=re.IGNORECASE)
|
||||
self._thread_local.chal = parse_dict_header(pat.sub("", s_auth, count=1))
|
||||
|
||||
# Consume content and release the original connection
|
||||
# to allow our new request to reuse the same one.
|
||||
|
@ -264,8 +271,9 @@ class HTTPDigestAuth(AuthBase):
|
|||
extract_cookies_to_jar(prep._cookies, r.request, r.raw)
|
||||
prep.prepare_cookies(prep._cookies)
|
||||
|
||||
prep.headers['Authorization'] = self.build_digest_header(
|
||||
prep.method, prep.url)
|
||||
prep.headers["Authorization"] = self.build_digest_header(
|
||||
prep.method, prep.url
|
||||
)
|
||||
_r = r.connection.send(prep, **kwargs)
|
||||
_r.history.append(r)
|
||||
_r.request = prep
|
||||
|
@ -280,7 +288,7 @@ class HTTPDigestAuth(AuthBase):
|
|||
self.init_per_thread_state()
|
||||
# If we have a saved nonce, skip the 401
|
||||
if self._thread_local.last_nonce:
|
||||
r.headers['Authorization'] = self.build_digest_header(r.method, r.url)
|
||||
r.headers["Authorization"] = self.build_digest_header(r.method, r.url)
|
||||
try:
|
||||
self._thread_local.pos = r.body.tell()
|
||||
except AttributeError:
|
||||
|
@ -289,17 +297,19 @@ class HTTPDigestAuth(AuthBase):
|
|||
# file position of the previous body. Ensure it's set to
|
||||
# None.
|
||||
self._thread_local.pos = None
|
||||
r.register_hook('response', self.handle_401)
|
||||
r.register_hook('response', self.handle_redirect)
|
||||
r.register_hook("response", self.handle_401)
|
||||
r.register_hook("response", self.handle_redirect)
|
||||
self._thread_local.num_401_calls = 1
|
||||
|
||||
return r
|
||||
|
||||
def __eq__(self, other):
|
||||
return all([
|
||||
self.username == getattr(other, 'username', None),
|
||||
self.password == getattr(other, 'password', None)
|
||||
])
|
||||
return all(
|
||||
[
|
||||
self.username == getattr(other, "username", None),
|
||||
self.password == getattr(other, "password", None),
|
||||
]
|
||||
)
|
||||
|
||||
def __ne__(self, other):
|
||||
return not self == other
|
||||
|
|
|
@ -1,5 +1,4 @@
|
|||
#!/usr/bin/env python
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
"""
|
||||
requests.certs
|
||||
|
@ -14,5 +13,5 @@ packaged CA bundle.
|
|||
"""
|
||||
from certifi import where
|
||||
|
||||
if __name__ == '__main__':
|
||||
if __name__ == "__main__":
|
||||
print(where())
|
||||
|
|
|
@ -1,11 +1,10 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
|
||||
"""
|
||||
requests.compat
|
||||
~~~~~~~~~~~~~~~
|
||||
|
||||
This module handles import compatibility issues between Python 2 and
|
||||
Python 3.
|
||||
This module previously handled import compatibility issues
|
||||
between Python 2 and Python 3. It remains for backwards
|
||||
compatibility until the next major version.
|
||||
"""
|
||||
|
||||
try:
|
||||
|
@ -23,59 +22,58 @@ import sys
|
|||
_ver = sys.version_info
|
||||
|
||||
#: Python 2.x?
|
||||
is_py2 = (_ver[0] == 2)
|
||||
is_py2 = _ver[0] == 2
|
||||
|
||||
#: Python 3.x?
|
||||
is_py3 = (_ver[0] == 3)
|
||||
is_py3 = _ver[0] == 3
|
||||
|
||||
# json/simplejson module import resolution
|
||||
has_simplejson = False
|
||||
try:
|
||||
import simplejson as json
|
||||
|
||||
has_simplejson = True
|
||||
except ImportError:
|
||||
import json
|
||||
|
||||
# ---------
|
||||
# Specifics
|
||||
# ---------
|
||||
if has_simplejson:
|
||||
from simplejson import JSONDecodeError
|
||||
else:
|
||||
from json import JSONDecodeError
|
||||
|
||||
if is_py2:
|
||||
from urllib import (
|
||||
quote, unquote, quote_plus, unquote_plus, urlencode, getproxies,
|
||||
proxy_bypass, proxy_bypass_environment, getproxies_environment)
|
||||
from urlparse import urlparse, urlunparse, urljoin, urlsplit, urldefrag
|
||||
from urllib2 import parse_http_list
|
||||
import cookielib
|
||||
from Cookie import Morsel
|
||||
from StringIO import StringIO
|
||||
# Keep OrderedDict for backwards compatibility.
|
||||
from collections import Callable, Mapping, MutableMapping, OrderedDict
|
||||
# Keep OrderedDict for backwards compatibility.
|
||||
from collections import OrderedDict
|
||||
from collections.abc import Callable, Mapping, MutableMapping
|
||||
from http import cookiejar as cookielib
|
||||
from http.cookies import Morsel
|
||||
from io import StringIO
|
||||
|
||||
builtin_str = str
|
||||
bytes = str
|
||||
str = unicode
|
||||
basestring = basestring
|
||||
numeric_types = (int, long, float)
|
||||
integer_types = (int, long)
|
||||
JSONDecodeError = ValueError
|
||||
# --------------
|
||||
# Legacy Imports
|
||||
# --------------
|
||||
from urllib.parse import (
|
||||
quote,
|
||||
quote_plus,
|
||||
unquote,
|
||||
unquote_plus,
|
||||
urldefrag,
|
||||
urlencode,
|
||||
urljoin,
|
||||
urlparse,
|
||||
urlsplit,
|
||||
urlunparse,
|
||||
)
|
||||
from urllib.request import (
|
||||
getproxies,
|
||||
getproxies_environment,
|
||||
parse_http_list,
|
||||
proxy_bypass,
|
||||
proxy_bypass_environment,
|
||||
)
|
||||
|
||||
elif is_py3:
|
||||
from urllib.parse import urlparse, urlunparse, urljoin, urlsplit, urlencode, quote, unquote, quote_plus, unquote_plus, urldefrag
|
||||
from urllib.request import parse_http_list, getproxies, proxy_bypass, proxy_bypass_environment, getproxies_environment
|
||||
from http import cookiejar as cookielib
|
||||
from http.cookies import Morsel
|
||||
from io import StringIO
|
||||
# Keep OrderedDict for backwards compatibility.
|
||||
from collections import OrderedDict
|
||||
from collections.abc import Callable, Mapping, MutableMapping
|
||||
if has_simplejson:
|
||||
from simplejson import JSONDecodeError
|
||||
else:
|
||||
from json import JSONDecodeError
|
||||
|
||||
builtin_str = str
|
||||
str = str
|
||||
bytes = bytes
|
||||
basestring = (str, bytes)
|
||||
numeric_types = (int, float)
|
||||
integer_types = (int,)
|
||||
builtin_str = str
|
||||
str = str
|
||||
bytes = bytes
|
||||
basestring = (str, bytes)
|
||||
numeric_types = (int, float)
|
||||
integer_types = (int,)
|
||||
|
|
|
@ -1,5 +1,3 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
|
||||
"""
|
||||
requests.cookies
|
||||
~~~~~~~~~~~~~~~~
|
||||
|
@ -9,12 +7,12 @@ Compatibility code to be able to use `cookielib.CookieJar` with requests.
|
|||
requests.utils imports from here, so be careful with imports.
|
||||
"""
|
||||
|
||||
import calendar
|
||||
import copy
|
||||
import time
|
||||
import calendar
|
||||
|
||||
from ._internal_utils import to_native_string
|
||||
from .compat import cookielib, urlparse, urlunparse, Morsel, MutableMapping
|
||||
from .compat import Morsel, MutableMapping, cookielib, urlparse, urlunparse
|
||||
|
||||
try:
|
||||
import threading
|
||||
|
@ -22,7 +20,7 @@ except ImportError:
|
|||
import dummy_threading as threading
|
||||
|
||||
|
||||
class MockRequest(object):
|
||||
class MockRequest:
|
||||
"""Wraps a `requests.Request` to mimic a `urllib2.Request`.
|
||||
|
||||
The code in `cookielib.CookieJar` expects this interface in order to correctly
|
||||
|
@ -51,16 +49,22 @@ class MockRequest(object):
|
|||
def get_full_url(self):
|
||||
# Only return the response's URL if the user hadn't set the Host
|
||||
# header
|
||||
if not self._r.headers.get('Host'):
|
||||
if not self._r.headers.get("Host"):
|
||||
return self._r.url
|
||||
# If they did set it, retrieve it and reconstruct the expected domain
|
||||
host = to_native_string(self._r.headers['Host'], encoding='utf-8')
|
||||
host = to_native_string(self._r.headers["Host"], encoding="utf-8")
|
||||
parsed = urlparse(self._r.url)
|
||||
# Reconstruct the URL as we expect it
|
||||
return urlunparse([
|
||||
parsed.scheme, host, parsed.path, parsed.params, parsed.query,
|
||||
parsed.fragment
|
||||
])
|
||||
return urlunparse(
|
||||
[
|
||||
parsed.scheme,
|
||||
host,
|
||||
parsed.path,
|
||||
parsed.params,
|
||||
parsed.query,
|
||||
parsed.fragment,
|
||||
]
|
||||
)
|
||||
|
||||
def is_unverifiable(self):
|
||||
return True
|
||||
|
@ -73,7 +77,9 @@ class MockRequest(object):
|
|||
|
||||
def add_header(self, key, val):
|
||||
"""cookielib has no legitimate use for this method; add it back if you find one."""
|
||||
raise NotImplementedError("Cookie headers should be added with add_unredirected_header()")
|
||||
raise NotImplementedError(
|
||||
"Cookie headers should be added with add_unredirected_header()"
|
||||
)
|
||||
|
||||
def add_unredirected_header(self, name, value):
|
||||
self._new_headers[name] = value
|
||||
|
@ -94,7 +100,7 @@ class MockRequest(object):
|
|||
return self.get_host()
|
||||
|
||||
|
||||
class MockResponse(object):
|
||||
class MockResponse:
|
||||
"""Wraps a `httplib.HTTPMessage` to mimic a `urllib.addinfourl`.
|
||||
|
||||
...what? Basically, expose the parsed HTTP headers from the server response
|
||||
|
@ -122,8 +128,7 @@ def extract_cookies_to_jar(jar, request, response):
|
|||
:param request: our own requests.Request object
|
||||
:param response: urllib3.HTTPResponse object
|
||||
"""
|
||||
if not (hasattr(response, '_original_response') and
|
||||
response._original_response):
|
||||
if not (hasattr(response, "_original_response") and response._original_response):
|
||||
return
|
||||
# the _original_response field is the wrapped httplib.HTTPResponse object,
|
||||
req = MockRequest(request)
|
||||
|
@ -140,7 +145,7 @@ def get_cookie_header(jar, request):
|
|||
"""
|
||||
r = MockRequest(request)
|
||||
jar.add_cookie_header(r)
|
||||
return r.get_new_headers().get('Cookie')
|
||||
return r.get_new_headers().get("Cookie")
|
||||
|
||||
|
||||
def remove_cookie_by_name(cookiejar, name, domain=None, path=None):
|
||||
|
@ -205,7 +210,9 @@ class RequestsCookieJar(cookielib.CookieJar, MutableMapping):
|
|||
"""
|
||||
# support client code that unsets cookies by assignment of a None value:
|
||||
if value is None:
|
||||
remove_cookie_by_name(self, name, domain=kwargs.get('domain'), path=kwargs.get('path'))
|
||||
remove_cookie_by_name(
|
||||
self, name, domain=kwargs.get("domain"), path=kwargs.get("path")
|
||||
)
|
||||
return
|
||||
|
||||
if isinstance(value, Morsel):
|
||||
|
@ -305,16 +312,15 @@ class RequestsCookieJar(cookielib.CookieJar, MutableMapping):
|
|||
"""
|
||||
dictionary = {}
|
||||
for cookie in iter(self):
|
||||
if (
|
||||
(domain is None or cookie.domain == domain) and
|
||||
(path is None or cookie.path == path)
|
||||
if (domain is None or cookie.domain == domain) and (
|
||||
path is None or cookie.path == path
|
||||
):
|
||||
dictionary[cookie.name] = cookie.value
|
||||
return dictionary
|
||||
|
||||
def __contains__(self, name):
|
||||
try:
|
||||
return super(RequestsCookieJar, self).__contains__(name)
|
||||
return super().__contains__(name)
|
||||
except CookieConflictError:
|
||||
return True
|
||||
|
||||
|
@ -341,9 +347,13 @@ class RequestsCookieJar(cookielib.CookieJar, MutableMapping):
|
|||
remove_cookie_by_name(self, name)
|
||||
|
||||
def set_cookie(self, cookie, *args, **kwargs):
|
||||
if hasattr(cookie.value, 'startswith') and cookie.value.startswith('"') and cookie.value.endswith('"'):
|
||||
cookie.value = cookie.value.replace('\\"', '')
|
||||
return super(RequestsCookieJar, self).set_cookie(cookie, *args, **kwargs)
|
||||
if (
|
||||
hasattr(cookie.value, "startswith")
|
||||
and cookie.value.startswith('"')
|
||||
and cookie.value.endswith('"')
|
||||
):
|
||||
cookie.value = cookie.value.replace('\\"', "")
|
||||
return super().set_cookie(cookie, *args, **kwargs)
|
||||
|
||||
def update(self, other):
|
||||
"""Updates this jar with cookies from another CookieJar or dict-like"""
|
||||
|
@ -351,7 +361,7 @@ class RequestsCookieJar(cookielib.CookieJar, MutableMapping):
|
|||
for cookie in other:
|
||||
self.set_cookie(copy.copy(cookie))
|
||||
else:
|
||||
super(RequestsCookieJar, self).update(other)
|
||||
super().update(other)
|
||||
|
||||
def _find(self, name, domain=None, path=None):
|
||||
"""Requests uses this method internally to get cookie values.
|
||||
|
@ -371,7 +381,7 @@ class RequestsCookieJar(cookielib.CookieJar, MutableMapping):
|
|||
if path is None or cookie.path == path:
|
||||
return cookie.value
|
||||
|
||||
raise KeyError('name=%r, domain=%r, path=%r' % (name, domain, path))
|
||||
raise KeyError(f"name={name!r}, domain={domain!r}, path={path!r}")
|
||||
|
||||
def _find_no_duplicates(self, name, domain=None, path=None):
|
||||
"""Both ``__get_item__`` and ``get`` call this function: it's never
|
||||
|
@ -390,25 +400,29 @@ class RequestsCookieJar(cookielib.CookieJar, MutableMapping):
|
|||
if cookie.name == name:
|
||||
if domain is None or cookie.domain == domain:
|
||||
if path is None or cookie.path == path:
|
||||
if toReturn is not None: # if there are multiple cookies that meet passed in criteria
|
||||
raise CookieConflictError('There are multiple cookies with name, %r' % (name))
|
||||
toReturn = cookie.value # we will eventually return this as long as no cookie conflict
|
||||
if toReturn is not None:
|
||||
# if there are multiple cookies that meet passed in criteria
|
||||
raise CookieConflictError(
|
||||
f"There are multiple cookies with name, {name!r}"
|
||||
)
|
||||
# we will eventually return this as long as no cookie conflict
|
||||
toReturn = cookie.value
|
||||
|
||||
if toReturn:
|
||||
return toReturn
|
||||
raise KeyError('name=%r, domain=%r, path=%r' % (name, domain, path))
|
||||
raise KeyError(f"name={name!r}, domain={domain!r}, path={path!r}")
|
||||
|
||||
def __getstate__(self):
|
||||
"""Unlike a normal CookieJar, this class is pickleable."""
|
||||
state = self.__dict__.copy()
|
||||
# remove the unpickleable RLock object
|
||||
state.pop('_cookies_lock')
|
||||
state.pop("_cookies_lock")
|
||||
return state
|
||||
|
||||
def __setstate__(self, state):
|
||||
"""Unlike a normal CookieJar, this class is pickleable."""
|
||||
self.__dict__.update(state)
|
||||
if '_cookies_lock' not in self.__dict__:
|
||||
if "_cookies_lock" not in self.__dict__:
|
||||
self._cookies_lock = threading.RLock()
|
||||
|
||||
def copy(self):
|
||||
|
@ -427,7 +441,7 @@ def _copy_cookie_jar(jar):
|
|||
if jar is None:
|
||||
return None
|
||||
|
||||
if hasattr(jar, 'copy'):
|
||||
if hasattr(jar, "copy"):
|
||||
# We're dealing with an instance of RequestsCookieJar
|
||||
return jar.copy()
|
||||
# We're dealing with a generic CookieJar instance
|
||||
|
@ -445,31 +459,32 @@ def create_cookie(name, value, **kwargs):
|
|||
and sent on every request (this is sometimes called a "supercookie").
|
||||
"""
|
||||
result = {
|
||||
'version': 0,
|
||||
'name': name,
|
||||
'value': value,
|
||||
'port': None,
|
||||
'domain': '',
|
||||
'path': '/',
|
||||
'secure': False,
|
||||
'expires': None,
|
||||
'discard': True,
|
||||
'comment': None,
|
||||
'comment_url': None,
|
||||
'rest': {'HttpOnly': None},
|
||||
'rfc2109': False,
|
||||
"version": 0,
|
||||
"name": name,
|
||||
"value": value,
|
||||
"port": None,
|
||||
"domain": "",
|
||||
"path": "/",
|
||||
"secure": False,
|
||||
"expires": None,
|
||||
"discard": True,
|
||||
"comment": None,
|
||||
"comment_url": None,
|
||||
"rest": {"HttpOnly": None},
|
||||
"rfc2109": False,
|
||||
}
|
||||
|
||||
badargs = set(kwargs) - set(result)
|
||||
if badargs:
|
||||
err = 'create_cookie() got unexpected keyword arguments: %s'
|
||||
raise TypeError(err % list(badargs))
|
||||
raise TypeError(
|
||||
f"create_cookie() got unexpected keyword arguments: {list(badargs)}"
|
||||
)
|
||||
|
||||
result.update(kwargs)
|
||||
result['port_specified'] = bool(result['port'])
|
||||
result['domain_specified'] = bool(result['domain'])
|
||||
result['domain_initial_dot'] = result['domain'].startswith('.')
|
||||
result['path_specified'] = bool(result['path'])
|
||||
result["port_specified"] = bool(result["port"])
|
||||
result["domain_specified"] = bool(result["domain"])
|
||||
result["domain_initial_dot"] = result["domain"].startswith(".")
|
||||
result["path_specified"] = bool(result["path"])
|
||||
|
||||
return cookielib.Cookie(**result)
|
||||
|
||||
|
@ -478,30 +493,28 @@ def morsel_to_cookie(morsel):
|
|||
"""Convert a Morsel object into a Cookie containing the one k/v pair."""
|
||||
|
||||
expires = None
|
||||
if morsel['max-age']:
|
||||
if morsel["max-age"]:
|
||||
try:
|
||||
expires = int(time.time() + int(morsel['max-age']))
|
||||
expires = int(time.time() + int(morsel["max-age"]))
|
||||
except ValueError:
|
||||
raise TypeError('max-age: %s must be integer' % morsel['max-age'])
|
||||
elif morsel['expires']:
|
||||
time_template = '%a, %d-%b-%Y %H:%M:%S GMT'
|
||||
expires = calendar.timegm(
|
||||
time.strptime(morsel['expires'], time_template)
|
||||
)
|
||||
raise TypeError(f"max-age: {morsel['max-age']} must be integer")
|
||||
elif morsel["expires"]:
|
||||
time_template = "%a, %d-%b-%Y %H:%M:%S GMT"
|
||||
expires = calendar.timegm(time.strptime(morsel["expires"], time_template))
|
||||
return create_cookie(
|
||||
comment=morsel['comment'],
|
||||
comment_url=bool(morsel['comment']),
|
||||
comment=morsel["comment"],
|
||||
comment_url=bool(morsel["comment"]),
|
||||
discard=False,
|
||||
domain=morsel['domain'],
|
||||
domain=morsel["domain"],
|
||||
expires=expires,
|
||||
name=morsel.key,
|
||||
path=morsel['path'],
|
||||
path=morsel["path"],
|
||||
port=None,
|
||||
rest={'HttpOnly': morsel['httponly']},
|
||||
rest={"HttpOnly": morsel["httponly"]},
|
||||
rfc2109=False,
|
||||
secure=bool(morsel['secure']),
|
||||
secure=bool(morsel["secure"]),
|
||||
value=morsel.value,
|
||||
version=morsel['version'] or 0,
|
||||
version=morsel["version"] or 0,
|
||||
)
|
||||
|
||||
|
||||
|
@ -534,11 +547,10 @@ def merge_cookies(cookiejar, cookies):
|
|||
:rtype: CookieJar
|
||||
"""
|
||||
if not isinstance(cookiejar, cookielib.CookieJar):
|
||||
raise ValueError('You can only merge into CookieJar')
|
||||
raise ValueError("You can only merge into CookieJar")
|
||||
|
||||
if isinstance(cookies, dict):
|
||||
cookiejar = cookiejar_from_dict(
|
||||
cookies, cookiejar=cookiejar, overwrite=False)
|
||||
cookiejar = cookiejar_from_dict(cookies, cookiejar=cookiejar, overwrite=False)
|
||||
elif isinstance(cookies, cookielib.CookieJar):
|
||||
try:
|
||||
cookiejar.update(cookies)
|
||||
|
|
|
@ -1,5 +1,3 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
|
||||
"""
|
||||
requests.exceptions
|
||||
~~~~~~~~~~~~~~~~~~~
|
||||
|
@ -18,13 +16,12 @@ class RequestException(IOError):
|
|||
|
||||
def __init__(self, *args, **kwargs):
|
||||
"""Initialize RequestException with `request` and `response` objects."""
|
||||
response = kwargs.pop('response', None)
|
||||
response = kwargs.pop("response", None)
|
||||
self.response = response
|
||||
self.request = kwargs.pop('request', None)
|
||||
if (response is not None and not self.request and
|
||||
hasattr(response, 'request')):
|
||||
self.request = kwargs.pop("request", None)
|
||||
if response is not None and not self.request and hasattr(response, "request"):
|
||||
self.request = self.response.request
|
||||
super(RequestException, self).__init__(*args, **kwargs)
|
||||
super().__init__(*args, **kwargs)
|
||||
|
||||
|
||||
class InvalidJSONError(RequestException):
|
||||
|
@ -34,6 +31,16 @@ class InvalidJSONError(RequestException):
|
|||
class JSONDecodeError(InvalidJSONError, CompatJSONDecodeError):
|
||||
"""Couldn't decode the text into json"""
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
"""
|
||||
Construct the JSONDecodeError instance first with all
|
||||
args. Then use it's args to construct the IOError so that
|
||||
the json specific args aren't used as IOError specific args
|
||||
and the error message from JSONDecodeError is preserved.
|
||||
"""
|
||||
CompatJSONDecodeError.__init__(self, *args)
|
||||
InvalidJSONError.__init__(self, *self.args, **kwargs)
|
||||
|
||||
|
||||
class HTTPError(RequestException):
|
||||
"""An HTTP error occurred."""
|
||||
|
@ -118,6 +125,7 @@ class RetryError(RequestException):
|
|||
class UnrewindableBodyError(RequestException):
|
||||
"""Requests encountered an error when trying to rewind a body."""
|
||||
|
||||
|
||||
# Warnings
|
||||
|
||||
|
||||
|
|
|
@ -1,10 +1,9 @@
|
|||
"""Module containing bug report helper(s)."""
|
||||
from __future__ import print_function
|
||||
|
||||
import json
|
||||
import platform
|
||||
import sys
|
||||
import ssl
|
||||
import sys
|
||||
|
||||
import idna
|
||||
import urllib3
|
||||
|
@ -28,16 +27,16 @@ except ImportError:
|
|||
OpenSSL = None
|
||||
cryptography = None
|
||||
else:
|
||||
import OpenSSL
|
||||
import cryptography
|
||||
import OpenSSL
|
||||
|
||||
|
||||
def _implementation():
|
||||
"""Return a dict with the Python implementation and version.
|
||||
|
||||
Provide both the name and the version of the Python implementation
|
||||
currently running. For example, on CPython 2.7.5 it will return
|
||||
{'name': 'CPython', 'version': '2.7.5'}.
|
||||
currently running. For example, on CPython 3.10.3 it will return
|
||||
{'name': 'CPython', 'version': '3.10.3'}.
|
||||
|
||||
This function works best on CPython and PyPy: in particular, it probably
|
||||
doesn't work for Jython or IronPython. Future investigation should be done
|
||||
|
@ -45,83 +44,83 @@ def _implementation():
|
|||
"""
|
||||
implementation = platform.python_implementation()
|
||||
|
||||
if implementation == 'CPython':
|
||||
if implementation == "CPython":
|
||||
implementation_version = platform.python_version()
|
||||
elif implementation == 'PyPy':
|
||||
implementation_version = '%s.%s.%s' % (sys.pypy_version_info.major,
|
||||
sys.pypy_version_info.minor,
|
||||
sys.pypy_version_info.micro)
|
||||
if sys.pypy_version_info.releaselevel != 'final':
|
||||
implementation_version = ''.join([
|
||||
implementation_version, sys.pypy_version_info.releaselevel
|
||||
])
|
||||
elif implementation == 'Jython':
|
||||
elif implementation == "PyPy":
|
||||
implementation_version = "{}.{}.{}".format(
|
||||
sys.pypy_version_info.major,
|
||||
sys.pypy_version_info.minor,
|
||||
sys.pypy_version_info.micro,
|
||||
)
|
||||
if sys.pypy_version_info.releaselevel != "final":
|
||||
implementation_version = "".join(
|
||||
[implementation_version, sys.pypy_version_info.releaselevel]
|
||||
)
|
||||
elif implementation == "Jython":
|
||||
implementation_version = platform.python_version() # Complete Guess
|
||||
elif implementation == 'IronPython':
|
||||
elif implementation == "IronPython":
|
||||
implementation_version = platform.python_version() # Complete Guess
|
||||
else:
|
||||
implementation_version = 'Unknown'
|
||||
implementation_version = "Unknown"
|
||||
|
||||
return {'name': implementation, 'version': implementation_version}
|
||||
return {"name": implementation, "version": implementation_version}
|
||||
|
||||
|
||||
def info():
|
||||
"""Generate information for a bug report."""
|
||||
try:
|
||||
platform_info = {
|
||||
'system': platform.system(),
|
||||
'release': platform.release(),
|
||||
"system": platform.system(),
|
||||
"release": platform.release(),
|
||||
}
|
||||
except IOError:
|
||||
except OSError:
|
||||
platform_info = {
|
||||
'system': 'Unknown',
|
||||
'release': 'Unknown',
|
||||
"system": "Unknown",
|
||||
"release": "Unknown",
|
||||
}
|
||||
|
||||
implementation_info = _implementation()
|
||||
urllib3_info = {'version': urllib3.__version__}
|
||||
charset_normalizer_info = {'version': None}
|
||||
chardet_info = {'version': None}
|
||||
urllib3_info = {"version": urllib3.__version__}
|
||||
charset_normalizer_info = {"version": None}
|
||||
chardet_info = {"version": None}
|
||||
if charset_normalizer:
|
||||
charset_normalizer_info = {'version': charset_normalizer.__version__}
|
||||
charset_normalizer_info = {"version": charset_normalizer.__version__}
|
||||
if chardet:
|
||||
chardet_info = {'version': chardet.__version__}
|
||||
chardet_info = {"version": chardet.__version__}
|
||||
|
||||
pyopenssl_info = {
|
||||
'version': None,
|
||||
'openssl_version': '',
|
||||
"version": None,
|
||||
"openssl_version": "",
|
||||
}
|
||||
if OpenSSL:
|
||||
pyopenssl_info = {
|
||||
'version': OpenSSL.__version__,
|
||||
'openssl_version': '%x' % OpenSSL.SSL.OPENSSL_VERSION_NUMBER,
|
||||
"version": OpenSSL.__version__,
|
||||
"openssl_version": f"{OpenSSL.SSL.OPENSSL_VERSION_NUMBER:x}",
|
||||
}
|
||||
cryptography_info = {
|
||||
'version': getattr(cryptography, '__version__', ''),
|
||||
"version": getattr(cryptography, "__version__", ""),
|
||||
}
|
||||
idna_info = {
|
||||
'version': getattr(idna, '__version__', ''),
|
||||
"version": getattr(idna, "__version__", ""),
|
||||
}
|
||||
|
||||
system_ssl = ssl.OPENSSL_VERSION_NUMBER
|
||||
system_ssl_info = {
|
||||
'version': '%x' % system_ssl if system_ssl is not None else ''
|
||||
}
|
||||
system_ssl_info = {"version": f"{system_ssl:x}" if system_ssl is not None else ""}
|
||||
|
||||
return {
|
||||
'platform': platform_info,
|
||||
'implementation': implementation_info,
|
||||
'system_ssl': system_ssl_info,
|
||||
'using_pyopenssl': pyopenssl is not None,
|
||||
'using_charset_normalizer': chardet is None,
|
||||
'pyOpenSSL': pyopenssl_info,
|
||||
'urllib3': urllib3_info,
|
||||
'chardet': chardet_info,
|
||||
'charset_normalizer': charset_normalizer_info,
|
||||
'cryptography': cryptography_info,
|
||||
'idna': idna_info,
|
||||
'requests': {
|
||||
'version': requests_version,
|
||||
"platform": platform_info,
|
||||
"implementation": implementation_info,
|
||||
"system_ssl": system_ssl_info,
|
||||
"using_pyopenssl": pyopenssl is not None,
|
||||
"using_charset_normalizer": chardet is None,
|
||||
"pyOpenSSL": pyopenssl_info,
|
||||
"urllib3": urllib3_info,
|
||||
"chardet": chardet_info,
|
||||
"charset_normalizer": charset_normalizer_info,
|
||||
"cryptography": cryptography_info,
|
||||
"idna": idna_info,
|
||||
"requests": {
|
||||
"version": requests_version,
|
||||
},
|
||||
}
|
||||
|
||||
|
@ -131,5 +130,5 @@ def main():
|
|||
print(json.dumps(info(), sort_keys=True, indent=2))
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
|
|
|
@ -1,5 +1,3 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
|
||||
"""
|
||||
requests.hooks
|
||||
~~~~~~~~~~~~~~
|
||||
|
@ -11,12 +9,13 @@ Available hooks:
|
|||
``response``:
|
||||
The response generated from a Request.
|
||||
"""
|
||||
HOOKS = ['response']
|
||||
HOOKS = ["response"]
|
||||
|
||||
|
||||
def default_hooks():
|
||||
return {event: [] for event in HOOKS}
|
||||
|
||||
|
||||
# TODO: response is the only one
|
||||
|
||||
|
||||
|
@ -25,7 +24,7 @@ def dispatch_hook(key, hooks, hook_data, **kwargs):
|
|||
hooks = hooks or {}
|
||||
hooks = hooks.get(key)
|
||||
if hooks:
|
||||
if hasattr(hooks, '__call__'):
|
||||
if hasattr(hooks, "__call__"):
|
||||
hooks = [hooks]
|
||||
for hook in hooks:
|
||||
_hook_data = hook(hook_data, **kwargs)
|
||||
|
|
|
@ -1,5 +1,3 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
|
||||
"""
|
||||
requests.models
|
||||
~~~~~~~~~~~~~~~
|
||||
|
@ -8,48 +6,72 @@ This module contains the primary objects that power Requests.
|
|||
"""
|
||||
|
||||
import datetime
|
||||
import sys
|
||||
|
||||
# Import encoding now, to avoid implicit import later.
|
||||
# Implicit import within threads may cause LookupError when standard library is in a ZIP,
|
||||
# such as in Embedded Python. See https://github.com/psf/requests/issues/3578.
|
||||
import encodings.idna
|
||||
import encodings.idna # noqa: F401
|
||||
from io import UnsupportedOperation
|
||||
|
||||
from urllib3.exceptions import (
|
||||
DecodeError,
|
||||
LocationParseError,
|
||||
ProtocolError,
|
||||
ReadTimeoutError,
|
||||
SSLError,
|
||||
)
|
||||
from urllib3.fields import RequestField
|
||||
from urllib3.filepost import encode_multipart_formdata
|
||||
from urllib3.util import parse_url
|
||||
from urllib3.exceptions import (
|
||||
DecodeError, ReadTimeoutError, ProtocolError, LocationParseError)
|
||||
|
||||
from io import UnsupportedOperation
|
||||
from .hooks import default_hooks
|
||||
from .structures import CaseInsensitiveDict
|
||||
|
||||
from .auth import HTTPBasicAuth
|
||||
from .cookies import cookiejar_from_dict, get_cookie_header, _copy_cookie_jar
|
||||
from .exceptions import (
|
||||
HTTPError, MissingSchema, InvalidURL, ChunkedEncodingError,
|
||||
ContentDecodingError, ConnectionError, StreamConsumedError,
|
||||
InvalidJSONError)
|
||||
from .exceptions import JSONDecodeError as RequestsJSONDecodeError
|
||||
from ._internal_utils import to_native_string, unicode_is_ascii
|
||||
from .utils import (
|
||||
guess_filename, get_auth_from_url, requote_uri,
|
||||
stream_decode_response_unicode, to_key_val_list, parse_header_links,
|
||||
iter_slices, guess_json_utf, super_len, check_header_validity)
|
||||
from .auth import HTTPBasicAuth
|
||||
from .compat import (
|
||||
Callable, Mapping,
|
||||
cookielib, urlunparse, urlsplit, urlencode, str, bytes,
|
||||
is_py2, chardet, builtin_str, basestring, JSONDecodeError)
|
||||
Callable,
|
||||
JSONDecodeError,
|
||||
Mapping,
|
||||
basestring,
|
||||
builtin_str,
|
||||
chardet,
|
||||
cookielib,
|
||||
)
|
||||
from .compat import json as complexjson
|
||||
from .compat import urlencode, urlsplit, urlunparse
|
||||
from .cookies import _copy_cookie_jar, cookiejar_from_dict, get_cookie_header
|
||||
from .exceptions import (
|
||||
ChunkedEncodingError,
|
||||
ConnectionError,
|
||||
ContentDecodingError,
|
||||
HTTPError,
|
||||
InvalidJSONError,
|
||||
InvalidURL,
|
||||
)
|
||||
from .exceptions import JSONDecodeError as RequestsJSONDecodeError
|
||||
from .exceptions import MissingSchema
|
||||
from .exceptions import SSLError as RequestsSSLError
|
||||
from .exceptions import StreamConsumedError
|
||||
from .hooks import default_hooks
|
||||
from .status_codes import codes
|
||||
from .structures import CaseInsensitiveDict
|
||||
from .utils import (
|
||||
check_header_validity,
|
||||
get_auth_from_url,
|
||||
guess_filename,
|
||||
guess_json_utf,
|
||||
iter_slices,
|
||||
parse_header_links,
|
||||
requote_uri,
|
||||
stream_decode_response_unicode,
|
||||
super_len,
|
||||
to_key_val_list,
|
||||
)
|
||||
|
||||
#: The set of HTTP status codes that indicate an automatically
|
||||
#: processable redirect.
|
||||
REDIRECT_STATI = (
|
||||
codes.moved, # 301
|
||||
codes.found, # 302
|
||||
codes.other, # 303
|
||||
codes.moved, # 301
|
||||
codes.found, # 302
|
||||
codes.other, # 303
|
||||
codes.temporary_redirect, # 307
|
||||
codes.permanent_redirect, # 308
|
||||
)
|
||||
|
@ -59,7 +81,7 @@ CONTENT_CHUNK_SIZE = 10 * 1024
|
|||
ITER_CHUNK_SIZE = 512
|
||||
|
||||
|
||||
class RequestEncodingMixin(object):
|
||||
class RequestEncodingMixin:
|
||||
@property
|
||||
def path_url(self):
|
||||
"""Build the path URL to use."""
|
||||
|
@ -70,16 +92,16 @@ class RequestEncodingMixin(object):
|
|||
|
||||
path = p.path
|
||||
if not path:
|
||||
path = '/'
|
||||
path = "/"
|
||||
|
||||
url.append(path)
|
||||
|
||||
query = p.query
|
||||
if query:
|
||||
url.append('?')
|
||||
url.append("?")
|
||||
url.append(query)
|
||||
|
||||
return ''.join(url)
|
||||
return "".join(url)
|
||||
|
||||
@staticmethod
|
||||
def _encode_params(data):
|
||||
|
@ -92,18 +114,21 @@ class RequestEncodingMixin(object):
|
|||
|
||||
if isinstance(data, (str, bytes)):
|
||||
return data
|
||||
elif hasattr(data, 'read'):
|
||||
elif hasattr(data, "read"):
|
||||
return data
|
||||
elif hasattr(data, '__iter__'):
|
||||
elif hasattr(data, "__iter__"):
|
||||
result = []
|
||||
for k, vs in to_key_val_list(data):
|
||||
if isinstance(vs, basestring) or not hasattr(vs, '__iter__'):
|
||||
if isinstance(vs, basestring) or not hasattr(vs, "__iter__"):
|
||||
vs = [vs]
|
||||
for v in vs:
|
||||
if v is not None:
|
||||
result.append(
|
||||
(k.encode('utf-8') if isinstance(k, str) else k,
|
||||
v.encode('utf-8') if isinstance(v, str) else v))
|
||||
(
|
||||
k.encode("utf-8") if isinstance(k, str) else k,
|
||||
v.encode("utf-8") if isinstance(v, str) else v,
|
||||
)
|
||||
)
|
||||
return urlencode(result, doseq=True)
|
||||
else:
|
||||
return data
|
||||
|
@ -118,7 +143,7 @@ class RequestEncodingMixin(object):
|
|||
The tuples may be 2-tuples (filename, fileobj), 3-tuples (filename, fileobj, contentype)
|
||||
or 4-tuples (filename, fileobj, contentype, custom_headers).
|
||||
"""
|
||||
if (not files):
|
||||
if not files:
|
||||
raise ValueError("Files must be provided.")
|
||||
elif isinstance(data, basestring):
|
||||
raise ValueError("Data must not be a string.")
|
||||
|
@ -128,7 +153,7 @@ class RequestEncodingMixin(object):
|
|||
files = to_key_val_list(files or {})
|
||||
|
||||
for field, val in fields:
|
||||
if isinstance(val, basestring) or not hasattr(val, '__iter__'):
|
||||
if isinstance(val, basestring) or not hasattr(val, "__iter__"):
|
||||
val = [val]
|
||||
for v in val:
|
||||
if v is not None:
|
||||
|
@ -137,8 +162,13 @@ class RequestEncodingMixin(object):
|
|||
v = str(v)
|
||||
|
||||
new_fields.append(
|
||||
(field.decode('utf-8') if isinstance(field, bytes) else field,
|
||||
v.encode('utf-8') if isinstance(v, str) else v))
|
||||
(
|
||||
field.decode("utf-8")
|
||||
if isinstance(field, bytes)
|
||||
else field,
|
||||
v.encode("utf-8") if isinstance(v, str) else v,
|
||||
)
|
||||
)
|
||||
|
||||
for (k, v) in files:
|
||||
# support for explicit filename
|
||||
|
@ -157,7 +187,7 @@ class RequestEncodingMixin(object):
|
|||
|
||||
if isinstance(fp, (str, bytes, bytearray)):
|
||||
fdata = fp
|
||||
elif hasattr(fp, 'read'):
|
||||
elif hasattr(fp, "read"):
|
||||
fdata = fp.read()
|
||||
elif fp is None:
|
||||
continue
|
||||
|
@ -173,16 +203,16 @@ class RequestEncodingMixin(object):
|
|||
return body, content_type
|
||||
|
||||
|
||||
class RequestHooksMixin(object):
|
||||
class RequestHooksMixin:
|
||||
def register_hook(self, event, hook):
|
||||
"""Properly register a hook."""
|
||||
|
||||
if event not in self.hooks:
|
||||
raise ValueError('Unsupported event specified, with event name "%s"' % (event))
|
||||
raise ValueError(f'Unsupported event specified, with event name "{event}"')
|
||||
|
||||
if isinstance(hook, Callable):
|
||||
self.hooks[event].append(hook)
|
||||
elif hasattr(hook, '__iter__'):
|
||||
elif hasattr(hook, "__iter__"):
|
||||
self.hooks[event].extend(h for h in hook if isinstance(h, Callable))
|
||||
|
||||
def deregister_hook(self, event, hook):
|
||||
|
@ -225,9 +255,19 @@ class Request(RequestHooksMixin):
|
|||
<PreparedRequest [GET]>
|
||||
"""
|
||||
|
||||
def __init__(self,
|
||||
method=None, url=None, headers=None, files=None, data=None,
|
||||
params=None, auth=None, cookies=None, hooks=None, json=None):
|
||||
def __init__(
|
||||
self,
|
||||
method=None,
|
||||
url=None,
|
||||
headers=None,
|
||||
files=None,
|
||||
data=None,
|
||||
params=None,
|
||||
auth=None,
|
||||
cookies=None,
|
||||
hooks=None,
|
||||
json=None,
|
||||
):
|
||||
|
||||
# Default empty dicts for dict params.
|
||||
data = [] if data is None else data
|
||||
|
@ -251,7 +291,7 @@ class Request(RequestHooksMixin):
|
|||
self.cookies = cookies
|
||||
|
||||
def __repr__(self):
|
||||
return '<Request [%s]>' % (self.method)
|
||||
return f"<Request [{self.method}]>"
|
||||
|
||||
def prepare(self):
|
||||
"""Constructs a :class:`PreparedRequest <PreparedRequest>` for transmission and returns it."""
|
||||
|
@ -309,9 +349,19 @@ class PreparedRequest(RequestEncodingMixin, RequestHooksMixin):
|
|||
#: integer denoting starting position of a readable file-like body.
|
||||
self._body_position = None
|
||||
|
||||
def prepare(self,
|
||||
method=None, url=None, headers=None, files=None, data=None,
|
||||
params=None, auth=None, cookies=None, hooks=None, json=None):
|
||||
def prepare(
|
||||
self,
|
||||
method=None,
|
||||
url=None,
|
||||
headers=None,
|
||||
files=None,
|
||||
data=None,
|
||||
params=None,
|
||||
auth=None,
|
||||
cookies=None,
|
||||
hooks=None,
|
||||
json=None,
|
||||
):
|
||||
"""Prepares the entire request with the given parameters."""
|
||||
|
||||
self.prepare_method(method)
|
||||
|
@ -328,7 +378,7 @@ class PreparedRequest(RequestEncodingMixin, RequestHooksMixin):
|
|||
self.prepare_hooks(hooks)
|
||||
|
||||
def __repr__(self):
|
||||
return '<PreparedRequest [%s]>' % (self.method)
|
||||
return f"<PreparedRequest [{self.method}]>"
|
||||
|
||||
def copy(self):
|
||||
p = PreparedRequest()
|
||||
|
@ -352,7 +402,7 @@ class PreparedRequest(RequestEncodingMixin, RequestHooksMixin):
|
|||
import idna
|
||||
|
||||
try:
|
||||
host = idna.encode(host, uts46=True).decode('utf-8')
|
||||
host = idna.encode(host, uts46=True).decode("utf-8")
|
||||
except idna.IDNAError:
|
||||
raise UnicodeError
|
||||
return host
|
||||
|
@ -365,9 +415,9 @@ class PreparedRequest(RequestEncodingMixin, RequestHooksMixin):
|
|||
#: on python 3.x.
|
||||
#: https://github.com/psf/requests/pull/2238
|
||||
if isinstance(url, bytes):
|
||||
url = url.decode('utf8')
|
||||
url = url.decode("utf8")
|
||||
else:
|
||||
url = unicode(url) if is_py2 else str(url)
|
||||
url = str(url)
|
||||
|
||||
# Remove leading whitespaces from url
|
||||
url = url.lstrip()
|
||||
|
@ -375,7 +425,7 @@ class PreparedRequest(RequestEncodingMixin, RequestHooksMixin):
|
|||
# Don't do any URL preparation for non-HTTP schemes like `mailto`,
|
||||
# `data` etc to work around exceptions from `url_parse`, which
|
||||
# handles RFC 3986 only.
|
||||
if ':' in url and not url.lower().startswith('http'):
|
||||
if ":" in url and not url.lower().startswith("http"):
|
||||
self.url = url
|
||||
return
|
||||
|
||||
|
@ -386,13 +436,13 @@ class PreparedRequest(RequestEncodingMixin, RequestHooksMixin):
|
|||
raise InvalidURL(*e.args)
|
||||
|
||||
if not scheme:
|
||||
error = ("Invalid URL {0!r}: No scheme supplied. Perhaps you meant http://{0}?")
|
||||
error = error.format(to_native_string(url, 'utf8'))
|
||||
|
||||
raise MissingSchema(error)
|
||||
raise MissingSchema(
|
||||
f"Invalid URL {url!r}: No scheme supplied. "
|
||||
f"Perhaps you meant http://{url}?"
|
||||
)
|
||||
|
||||
if not host:
|
||||
raise InvalidURL("Invalid URL %r: No host supplied" % url)
|
||||
raise InvalidURL(f"Invalid URL {url!r}: No host supplied")
|
||||
|
||||
# In general, we want to try IDNA encoding the hostname if the string contains
|
||||
# non-ASCII characters. This allows users to automatically get the correct IDNA
|
||||
|
@ -402,33 +452,21 @@ class PreparedRequest(RequestEncodingMixin, RequestHooksMixin):
|
|||
try:
|
||||
host = self._get_idna_encoded_host(host)
|
||||
except UnicodeError:
|
||||
raise InvalidURL('URL has an invalid label.')
|
||||
elif host.startswith((u'*', u'.')):
|
||||
raise InvalidURL('URL has an invalid label.')
|
||||
raise InvalidURL("URL has an invalid label.")
|
||||
elif host.startswith(("*", ".")):
|
||||
raise InvalidURL("URL has an invalid label.")
|
||||
|
||||
# Carefully reconstruct the network location
|
||||
netloc = auth or ''
|
||||
netloc = auth or ""
|
||||
if netloc:
|
||||
netloc += '@'
|
||||
netloc += "@"
|
||||
netloc += host
|
||||
if port:
|
||||
netloc += ':' + str(port)
|
||||
netloc += f":{port}"
|
||||
|
||||
# Bare domains aren't valid URLs.
|
||||
if not path:
|
||||
path = '/'
|
||||
|
||||
if is_py2:
|
||||
if isinstance(scheme, str):
|
||||
scheme = scheme.encode('utf-8')
|
||||
if isinstance(netloc, str):
|
||||
netloc = netloc.encode('utf-8')
|
||||
if isinstance(path, str):
|
||||
path = path.encode('utf-8')
|
||||
if isinstance(query, str):
|
||||
query = query.encode('utf-8')
|
||||
if isinstance(fragment, str):
|
||||
fragment = fragment.encode('utf-8')
|
||||
path = "/"
|
||||
|
||||
if isinstance(params, (str, bytes)):
|
||||
params = to_native_string(params)
|
||||
|
@ -436,7 +474,7 @@ class PreparedRequest(RequestEncodingMixin, RequestHooksMixin):
|
|||
enc_params = self._encode_params(params)
|
||||
if enc_params:
|
||||
if query:
|
||||
query = '%s&%s' % (query, enc_params)
|
||||
query = f"{query}&{enc_params}"
|
||||
else:
|
||||
query = enc_params
|
||||
|
||||
|
@ -467,7 +505,7 @@ class PreparedRequest(RequestEncodingMixin, RequestHooksMixin):
|
|||
if not data and json is not None:
|
||||
# urllib3 requires a bytes-like body. Python 2's json.dumps
|
||||
# provides this natively, but Python 3 gives a Unicode string.
|
||||
content_type = 'application/json'
|
||||
content_type = "application/json"
|
||||
|
||||
try:
|
||||
body = complexjson.dumps(json, allow_nan=False)
|
||||
|
@ -475,12 +513,14 @@ class PreparedRequest(RequestEncodingMixin, RequestHooksMixin):
|
|||
raise InvalidJSONError(ve, request=self)
|
||||
|
||||
if not isinstance(body, bytes):
|
||||
body = body.encode('utf-8')
|
||||
body = body.encode("utf-8")
|
||||
|
||||
is_stream = all([
|
||||
hasattr(data, '__iter__'),
|
||||
not isinstance(data, (basestring, list, tuple, Mapping))
|
||||
])
|
||||
is_stream = all(
|
||||
[
|
||||
hasattr(data, "__iter__"),
|
||||
not isinstance(data, (basestring, list, tuple, Mapping)),
|
||||
]
|
||||
)
|
||||
|
||||
if is_stream:
|
||||
try:
|
||||
|
@ -490,24 +530,26 @@ class PreparedRequest(RequestEncodingMixin, RequestHooksMixin):
|
|||
|
||||
body = data
|
||||
|
||||
if getattr(body, 'tell', None) is not None:
|
||||
if getattr(body, "tell", None) is not None:
|
||||
# Record the current file position before reading.
|
||||
# This will allow us to rewind a file in the event
|
||||
# of a redirect.
|
||||
try:
|
||||
self._body_position = body.tell()
|
||||
except (IOError, OSError):
|
||||
except OSError:
|
||||
# This differentiates from None, allowing us to catch
|
||||
# a failed `tell()` later when trying to rewind the body
|
||||
self._body_position = object()
|
||||
|
||||
if files:
|
||||
raise NotImplementedError('Streamed bodies and files are mutually exclusive.')
|
||||
raise NotImplementedError(
|
||||
"Streamed bodies and files are mutually exclusive."
|
||||
)
|
||||
|
||||
if length:
|
||||
self.headers['Content-Length'] = builtin_str(length)
|
||||
self.headers["Content-Length"] = builtin_str(length)
|
||||
else:
|
||||
self.headers['Transfer-Encoding'] = 'chunked'
|
||||
self.headers["Transfer-Encoding"] = "chunked"
|
||||
else:
|
||||
# Multi-part file uploads.
|
||||
if files:
|
||||
|
@ -515,16 +557,16 @@ class PreparedRequest(RequestEncodingMixin, RequestHooksMixin):
|
|||
else:
|
||||
if data:
|
||||
body = self._encode_params(data)
|
||||
if isinstance(data, basestring) or hasattr(data, 'read'):
|
||||
if isinstance(data, basestring) or hasattr(data, "read"):
|
||||
content_type = None
|
||||
else:
|
||||
content_type = 'application/x-www-form-urlencoded'
|
||||
content_type = "application/x-www-form-urlencoded"
|
||||
|
||||
self.prepare_content_length(body)
|
||||
|
||||
# Add content-type if it wasn't explicitly provided.
|
||||
if content_type and ('content-type' not in self.headers):
|
||||
self.headers['Content-Type'] = content_type
|
||||
if content_type and ("content-type" not in self.headers):
|
||||
self.headers["Content-Type"] = content_type
|
||||
|
||||
self.body = body
|
||||
|
||||
|
@ -535,13 +577,16 @@ class PreparedRequest(RequestEncodingMixin, RequestHooksMixin):
|
|||
if length:
|
||||
# If length exists, set it. Otherwise, we fallback
|
||||
# to Transfer-Encoding: chunked.
|
||||
self.headers['Content-Length'] = builtin_str(length)
|
||||
elif self.method not in ('GET', 'HEAD') and self.headers.get('Content-Length') is None:
|
||||
self.headers["Content-Length"] = builtin_str(length)
|
||||
elif (
|
||||
self.method not in ("GET", "HEAD")
|
||||
and self.headers.get("Content-Length") is None
|
||||
):
|
||||
# Set Content-Length to 0 for methods that can have a body
|
||||
# but don't provide one. (i.e. not GET or HEAD)
|
||||
self.headers['Content-Length'] = '0'
|
||||
self.headers["Content-Length"] = "0"
|
||||
|
||||
def prepare_auth(self, auth, url=''):
|
||||
def prepare_auth(self, auth, url=""):
|
||||
"""Prepares the given HTTP auth data."""
|
||||
|
||||
# If no Auth is explicitly provided, extract it from the URL first.
|
||||
|
@ -581,7 +626,7 @@ class PreparedRequest(RequestEncodingMixin, RequestHooksMixin):
|
|||
|
||||
cookie_header = get_cookie_header(self._cookies, self)
|
||||
if cookie_header is not None:
|
||||
self.headers['Cookie'] = cookie_header
|
||||
self.headers["Cookie"] = cookie_header
|
||||
|
||||
def prepare_hooks(self, hooks):
|
||||
"""Prepares the given hooks."""
|
||||
|
@ -593,14 +638,22 @@ class PreparedRequest(RequestEncodingMixin, RequestHooksMixin):
|
|||
self.register_hook(event, hooks[event])
|
||||
|
||||
|
||||
class Response(object):
|
||||
class Response:
|
||||
"""The :class:`Response <Response>` object, which contains a
|
||||
server's response to an HTTP request.
|
||||
"""
|
||||
|
||||
__attrs__ = [
|
||||
'_content', 'status_code', 'headers', 'url', 'history',
|
||||
'encoding', 'reason', 'cookies', 'elapsed', 'request'
|
||||
"_content",
|
||||
"status_code",
|
||||
"headers",
|
||||
"url",
|
||||
"history",
|
||||
"encoding",
|
||||
"reason",
|
||||
"cookies",
|
||||
"elapsed",
|
||||
"request",
|
||||
]
|
||||
|
||||
def __init__(self):
|
||||
|
@ -669,11 +722,11 @@ class Response(object):
|
|||
setattr(self, name, value)
|
||||
|
||||
# pickled objects do not have .raw
|
||||
setattr(self, '_content_consumed', True)
|
||||
setattr(self, 'raw', None)
|
||||
setattr(self, "_content_consumed", True)
|
||||
setattr(self, "raw", None)
|
||||
|
||||
def __repr__(self):
|
||||
return '<Response [%s]>' % (self.status_code)
|
||||
return f"<Response [{self.status_code}]>"
|
||||
|
||||
def __bool__(self):
|
||||
"""Returns True if :attr:`status_code` is less than 400.
|
||||
|
@ -719,12 +772,15 @@ class Response(object):
|
|||
"""True if this Response is a well-formed HTTP redirect that could have
|
||||
been processed automatically (by :meth:`Session.resolve_redirects`).
|
||||
"""
|
||||
return ('location' in self.headers and self.status_code in REDIRECT_STATI)
|
||||
return "location" in self.headers and self.status_code in REDIRECT_STATI
|
||||
|
||||
@property
|
||||
def is_permanent_redirect(self):
|
||||
"""True if this Response one of the permanent versions of redirect."""
|
||||
return ('location' in self.headers and self.status_code in (codes.moved_permanently, codes.permanent_redirect))
|
||||
return "location" in self.headers and self.status_code in (
|
||||
codes.moved_permanently,
|
||||
codes.permanent_redirect,
|
||||
)
|
||||
|
||||
@property
|
||||
def next(self):
|
||||
|
@ -734,7 +790,7 @@ class Response(object):
|
|||
@property
|
||||
def apparent_encoding(self):
|
||||
"""The apparent encoding, provided by the charset_normalizer or chardet libraries."""
|
||||
return chardet.detect(self.content)['encoding']
|
||||
return chardet.detect(self.content)["encoding"]
|
||||
|
||||
def iter_content(self, chunk_size=1, decode_unicode=False):
|
||||
"""Iterates over the response data. When stream=True is set on the
|
||||
|
@ -755,16 +811,17 @@ class Response(object):
|
|||
|
||||
def generate():
|
||||
# Special case for urllib3.
|
||||
if hasattr(self.raw, 'stream'):
|
||||
if hasattr(self.raw, "stream"):
|
||||
try:
|
||||
for chunk in self.raw.stream(chunk_size, decode_content=True):
|
||||
yield chunk
|
||||
yield from self.raw.stream(chunk_size, decode_content=True)
|
||||
except ProtocolError as e:
|
||||
raise ChunkedEncodingError(e)
|
||||
except DecodeError as e:
|
||||
raise ContentDecodingError(e)
|
||||
except ReadTimeoutError as e:
|
||||
raise ConnectionError(e)
|
||||
except SSLError as e:
|
||||
raise RequestsSSLError(e)
|
||||
else:
|
||||
# Standard file-like object.
|
||||
while True:
|
||||
|
@ -778,7 +835,9 @@ class Response(object):
|
|||
if self._content_consumed and isinstance(self._content, bool):
|
||||
raise StreamConsumedError()
|
||||
elif chunk_size is not None and not isinstance(chunk_size, int):
|
||||
raise TypeError("chunk_size must be an int, it is instead a %s." % type(chunk_size))
|
||||
raise TypeError(
|
||||
f"chunk_size must be an int, it is instead a {type(chunk_size)}."
|
||||
)
|
||||
# simulate reading small chunks of the content
|
||||
reused_chunks = iter_slices(self._content, chunk_size)
|
||||
|
||||
|
@ -791,7 +850,9 @@ class Response(object):
|
|||
|
||||
return chunks
|
||||
|
||||
def iter_lines(self, chunk_size=ITER_CHUNK_SIZE, decode_unicode=False, delimiter=None):
|
||||
def iter_lines(
|
||||
self, chunk_size=ITER_CHUNK_SIZE, decode_unicode=False, delimiter=None
|
||||
):
|
||||
"""Iterates over the response data, one line at a time. When
|
||||
stream=True is set on the request, this avoids reading the
|
||||
content at once into memory for large responses.
|
||||
|
@ -801,7 +862,9 @@ class Response(object):
|
|||
|
||||
pending = None
|
||||
|
||||
for chunk in self.iter_content(chunk_size=chunk_size, decode_unicode=decode_unicode):
|
||||
for chunk in self.iter_content(
|
||||
chunk_size=chunk_size, decode_unicode=decode_unicode
|
||||
):
|
||||
|
||||
if pending is not None:
|
||||
chunk = pending + chunk
|
||||
|
@ -816,8 +879,7 @@ class Response(object):
|
|||
else:
|
||||
pending = None
|
||||
|
||||
for line in lines:
|
||||
yield line
|
||||
yield from lines
|
||||
|
||||
if pending is not None:
|
||||
yield pending
|
||||
|
@ -829,13 +891,12 @@ class Response(object):
|
|||
if self._content is False:
|
||||
# Read the contents.
|
||||
if self._content_consumed:
|
||||
raise RuntimeError(
|
||||
'The content for this response was already consumed')
|
||||
raise RuntimeError("The content for this response was already consumed")
|
||||
|
||||
if self.status_code == 0 or self.raw is None:
|
||||
self._content = None
|
||||
else:
|
||||
self._content = b''.join(self.iter_content(CONTENT_CHUNK_SIZE)) or b''
|
||||
self._content = b"".join(self.iter_content(CONTENT_CHUNK_SIZE)) or b""
|
||||
|
||||
self._content_consumed = True
|
||||
# don't need to release the connection; that's been handled by urllib3
|
||||
|
@ -860,7 +921,7 @@ class Response(object):
|
|||
encoding = self.encoding
|
||||
|
||||
if not self.content:
|
||||
return str('')
|
||||
return ""
|
||||
|
||||
# Fallback to auto-detected encoding.
|
||||
if self.encoding is None:
|
||||
|
@ -868,7 +929,7 @@ class Response(object):
|
|||
|
||||
# Decode unicode from given encoding.
|
||||
try:
|
||||
content = str(self.content, encoding, errors='replace')
|
||||
content = str(self.content, encoding, errors="replace")
|
||||
except (LookupError, TypeError):
|
||||
# A LookupError is raised if the encoding was not found which could
|
||||
# indicate a misspelling or similar mistake.
|
||||
|
@ -876,7 +937,7 @@ class Response(object):
|
|||
# A TypeError can be raised if encoding is None
|
||||
#
|
||||
# So we try blindly encoding.
|
||||
content = str(self.content, errors='replace')
|
||||
content = str(self.content, errors="replace")
|
||||
|
||||
return content
|
||||
|
||||
|
@ -896,65 +957,65 @@ class Response(object):
|
|||
encoding = guess_json_utf(self.content)
|
||||
if encoding is not None:
|
||||
try:
|
||||
return complexjson.loads(
|
||||
self.content.decode(encoding), **kwargs
|
||||
)
|
||||
return complexjson.loads(self.content.decode(encoding), **kwargs)
|
||||
except UnicodeDecodeError:
|
||||
# Wrong UTF codec detected; usually because it's not UTF-8
|
||||
# but some other 8-bit codec. This is an RFC violation,
|
||||
# and the server didn't bother to tell us what codec *was*
|
||||
# used.
|
||||
pass
|
||||
except JSONDecodeError as e:
|
||||
raise RequestsJSONDecodeError(e.msg, e.doc, e.pos)
|
||||
|
||||
try:
|
||||
return complexjson.loads(self.text, **kwargs)
|
||||
except JSONDecodeError as e:
|
||||
# Catch JSON-related errors and raise as requests.JSONDecodeError
|
||||
# This aliases json.JSONDecodeError and simplejson.JSONDecodeError
|
||||
if is_py2: # e is a ValueError
|
||||
raise RequestsJSONDecodeError(e.message)
|
||||
else:
|
||||
raise RequestsJSONDecodeError(e.msg, e.doc, e.pos)
|
||||
raise RequestsJSONDecodeError(e.msg, e.doc, e.pos)
|
||||
|
||||
@property
|
||||
def links(self):
|
||||
"""Returns the parsed header links of the response, if any."""
|
||||
|
||||
header = self.headers.get('link')
|
||||
header = self.headers.get("link")
|
||||
|
||||
# l = MultiDict()
|
||||
l = {}
|
||||
resolved_links = {}
|
||||
|
||||
if header:
|
||||
links = parse_header_links(header)
|
||||
|
||||
for link in links:
|
||||
key = link.get('rel') or link.get('url')
|
||||
l[key] = link
|
||||
key = link.get("rel") or link.get("url")
|
||||
resolved_links[key] = link
|
||||
|
||||
return l
|
||||
return resolved_links
|
||||
|
||||
def raise_for_status(self):
|
||||
"""Raises :class:`HTTPError`, if one occurred."""
|
||||
|
||||
http_error_msg = ''
|
||||
http_error_msg = ""
|
||||
if isinstance(self.reason, bytes):
|
||||
# We attempt to decode utf-8 first because some servers
|
||||
# choose to localize their reason strings. If the string
|
||||
# isn't utf-8, we fall back to iso-8859-1 for all other
|
||||
# encodings. (See PR #3538)
|
||||
try:
|
||||
reason = self.reason.decode('utf-8')
|
||||
reason = self.reason.decode("utf-8")
|
||||
except UnicodeDecodeError:
|
||||
reason = self.reason.decode('iso-8859-1')
|
||||
reason = self.reason.decode("iso-8859-1")
|
||||
else:
|
||||
reason = self.reason
|
||||
|
||||
if 400 <= self.status_code < 500:
|
||||
http_error_msg = u'%s Client Error: %s for url: %s' % (self.status_code, reason, self.url)
|
||||
http_error_msg = (
|
||||
f"{self.status_code} Client Error: {reason} for url: {self.url}"
|
||||
)
|
||||
|
||||
elif 500 <= self.status_code < 600:
|
||||
http_error_msg = u'%s Server Error: %s for url: %s' % (self.status_code, reason, self.url)
|
||||
http_error_msg = (
|
||||
f"{self.status_code} Server Error: {reason} for url: {self.url}"
|
||||
)
|
||||
|
||||
if http_error_msg:
|
||||
raise HTTPError(http_error_msg, response=self)
|
||||
|
@ -968,6 +1029,6 @@ class Response(object):
|
|||
if not self._content_consumed:
|
||||
self.raw.close()
|
||||
|
||||
release_conn = getattr(self.raw, 'release_conn', None)
|
||||
release_conn = getattr(self.raw, "release_conn", None)
|
||||
if release_conn is not None:
|
||||
release_conn()
|
||||
|
|
|
@ -3,24 +3,26 @@ import sys
|
|||
try:
|
||||
import chardet
|
||||
except ImportError:
|
||||
import charset_normalizer as chardet
|
||||
import warnings
|
||||
|
||||
warnings.filterwarnings('ignore', 'Trying to detect', module='charset_normalizer')
|
||||
import charset_normalizer as chardet
|
||||
|
||||
warnings.filterwarnings("ignore", "Trying to detect", module="charset_normalizer")
|
||||
|
||||
# This code exists for backwards compatibility reasons.
|
||||
# I don't like it either. Just look the other way. :)
|
||||
|
||||
for package in ('urllib3', 'idna'):
|
||||
for package in ("urllib3", "idna"):
|
||||
locals()[package] = __import__(package)
|
||||
# This traversal is apparently necessary such that the identities are
|
||||
# preserved (requests.packages.urllib3.* is urllib3.*)
|
||||
for mod in list(sys.modules):
|
||||
if mod == package or mod.startswith(package + '.'):
|
||||
sys.modules['requests.packages.' + mod] = sys.modules[mod]
|
||||
if mod == package or mod.startswith(f"{package}."):
|
||||
sys.modules[f"requests.packages.{mod}"] = sys.modules[mod]
|
||||
|
||||
target = chardet.__name__
|
||||
for mod in list(sys.modules):
|
||||
if mod == target or mod.startswith(target + '.'):
|
||||
sys.modules['requests.packages.' + target.replace(target, 'chardet')] = sys.modules[mod]
|
||||
if mod == target or mod.startswith(f"{target}."):
|
||||
target = target.replace(target, "chardet")
|
||||
sys.modules[f"requests.packages.{target}"] = sys.modules[mod]
|
||||
# Kinda cool, though, right?
|
||||
|
|
|
@ -1,5 +1,3 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
|
||||
"""
|
||||
requests.sessions
|
||||
~~~~~~~~~~~~~~~~~
|
||||
|
@ -10,39 +8,52 @@ requests (cookies, auth, proxies).
|
|||
import os
|
||||
import sys
|
||||
import time
|
||||
from datetime import timedelta
|
||||
from collections import OrderedDict
|
||||
from datetime import timedelta
|
||||
|
||||
from .auth import _basic_auth_str
|
||||
from .compat import cookielib, is_py3, urljoin, urlparse, Mapping
|
||||
from .cookies import (
|
||||
cookiejar_from_dict, extract_cookies_to_jar, RequestsCookieJar, merge_cookies)
|
||||
from .models import Request, PreparedRequest, DEFAULT_REDIRECT_LIMIT
|
||||
from .hooks import default_hooks, dispatch_hook
|
||||
from ._internal_utils import to_native_string
|
||||
from .utils import to_key_val_list, default_headers, DEFAULT_PORTS
|
||||
from .exceptions import (
|
||||
TooManyRedirects, InvalidSchema, ChunkedEncodingError, ContentDecodingError)
|
||||
|
||||
from .structures import CaseInsensitiveDict
|
||||
from .adapters import HTTPAdapter
|
||||
|
||||
from .utils import (
|
||||
requote_uri, get_environ_proxies, get_netrc_auth, should_bypass_proxies,
|
||||
get_auth_from_url, rewind_body, resolve_proxies
|
||||
from .auth import _basic_auth_str
|
||||
from .compat import Mapping, cookielib, urljoin, urlparse
|
||||
from .cookies import (
|
||||
RequestsCookieJar,
|
||||
cookiejar_from_dict,
|
||||
extract_cookies_to_jar,
|
||||
merge_cookies,
|
||||
)
|
||||
|
||||
from .status_codes import codes
|
||||
from .exceptions import (
|
||||
ChunkedEncodingError,
|
||||
ContentDecodingError,
|
||||
InvalidSchema,
|
||||
TooManyRedirects,
|
||||
)
|
||||
from .hooks import default_hooks, dispatch_hook
|
||||
|
||||
# formerly defined here, reexposed here for backward compatibility
|
||||
from .models import REDIRECT_STATI
|
||||
from .models import ( # noqa: F401
|
||||
DEFAULT_REDIRECT_LIMIT,
|
||||
REDIRECT_STATI,
|
||||
PreparedRequest,
|
||||
Request,
|
||||
)
|
||||
from .status_codes import codes
|
||||
from .structures import CaseInsensitiveDict
|
||||
from .utils import ( # noqa: F401
|
||||
DEFAULT_PORTS,
|
||||
default_headers,
|
||||
get_auth_from_url,
|
||||
get_environ_proxies,
|
||||
get_netrc_auth,
|
||||
requote_uri,
|
||||
resolve_proxies,
|
||||
rewind_body,
|
||||
should_bypass_proxies,
|
||||
to_key_val_list,
|
||||
)
|
||||
|
||||
# Preferred clock, based on which one is more accurate on a given system.
|
||||
if sys.platform == 'win32':
|
||||
try: # Python 3.4+
|
||||
preferred_clock = time.perf_counter
|
||||
except AttributeError: # Earlier than Python 3.
|
||||
preferred_clock = time.clock
|
||||
if sys.platform == "win32":
|
||||
preferred_clock = time.perf_counter
|
||||
else:
|
||||
preferred_clock = time.time
|
||||
|
||||
|
@ -61,8 +72,7 @@ def merge_setting(request_setting, session_setting, dict_class=OrderedDict):
|
|||
|
||||
# Bypass if not a dictionary (e.g. verify)
|
||||
if not (
|
||||
isinstance(session_setting, Mapping) and
|
||||
isinstance(request_setting, Mapping)
|
||||
isinstance(session_setting, Mapping) and isinstance(request_setting, Mapping)
|
||||
):
|
||||
return request_setting
|
||||
|
||||
|
@ -84,17 +94,16 @@ def merge_hooks(request_hooks, session_hooks, dict_class=OrderedDict):
|
|||
This is necessary because when request_hooks == {'response': []}, the
|
||||
merge breaks Session hooks entirely.
|
||||
"""
|
||||
if session_hooks is None or session_hooks.get('response') == []:
|
||||
if session_hooks is None or session_hooks.get("response") == []:
|
||||
return request_hooks
|
||||
|
||||
if request_hooks is None or request_hooks.get('response') == []:
|
||||
if request_hooks is None or request_hooks.get("response") == []:
|
||||
return session_hooks
|
||||
|
||||
return merge_setting(request_hooks, session_hooks, dict_class)
|
||||
|
||||
|
||||
class SessionRedirectMixin(object):
|
||||
|
||||
class SessionRedirectMixin:
|
||||
def get_redirect_target(self, resp):
|
||||
"""Receives a Response. Returns a redirect URI or ``None``"""
|
||||
# Due to the nature of how requests processes redirects this method will
|
||||
|
@ -104,16 +113,15 @@ class SessionRedirectMixin(object):
|
|||
# to cache the redirect location onto the response object as a private
|
||||
# attribute.
|
||||
if resp.is_redirect:
|
||||
location = resp.headers['location']
|
||||
location = resp.headers["location"]
|
||||
# Currently the underlying http module on py3 decode headers
|
||||
# in latin1, but empirical evidence suggests that latin1 is very
|
||||
# rarely used with non-ASCII characters in HTTP headers.
|
||||
# It is more likely to get UTF8 header rather than latin1.
|
||||
# This causes incorrect handling of UTF8 encoded location headers.
|
||||
# To solve this, we re-encode the location in latin1.
|
||||
if is_py3:
|
||||
location = location.encode('latin1')
|
||||
return to_native_string(location, 'utf8')
|
||||
location = location.encode("latin1")
|
||||
return to_native_string(location, "utf8")
|
||||
return None
|
||||
|
||||
def should_strip_auth(self, old_url, new_url):
|
||||
|
@ -126,23 +134,40 @@ class SessionRedirectMixin(object):
|
|||
# ports. This isn't specified by RFC 7235, but is kept to avoid
|
||||
# breaking backwards compatibility with older versions of requests
|
||||
# that allowed any redirects on the same host.
|
||||
if (old_parsed.scheme == 'http' and old_parsed.port in (80, None)
|
||||
and new_parsed.scheme == 'https' and new_parsed.port in (443, None)):
|
||||
if (
|
||||
old_parsed.scheme == "http"
|
||||
and old_parsed.port in (80, None)
|
||||
and new_parsed.scheme == "https"
|
||||
and new_parsed.port in (443, None)
|
||||
):
|
||||
return False
|
||||
|
||||
# Handle default port usage corresponding to scheme.
|
||||
changed_port = old_parsed.port != new_parsed.port
|
||||
changed_scheme = old_parsed.scheme != new_parsed.scheme
|
||||
default_port = (DEFAULT_PORTS.get(old_parsed.scheme, None), None)
|
||||
if (not changed_scheme and old_parsed.port in default_port
|
||||
and new_parsed.port in default_port):
|
||||
if (
|
||||
not changed_scheme
|
||||
and old_parsed.port in default_port
|
||||
and new_parsed.port in default_port
|
||||
):
|
||||
return False
|
||||
|
||||
# Standard case: root URI must match
|
||||
return changed_port or changed_scheme
|
||||
|
||||
def resolve_redirects(self, resp, req, stream=False, timeout=None,
|
||||
verify=True, cert=None, proxies=None, yield_requests=False, **adapter_kwargs):
|
||||
def resolve_redirects(
|
||||
self,
|
||||
resp,
|
||||
req,
|
||||
stream=False,
|
||||
timeout=None,
|
||||
verify=True,
|
||||
cert=None,
|
||||
proxies=None,
|
||||
yield_requests=False,
|
||||
**adapter_kwargs,
|
||||
):
|
||||
"""Receives a Response. Returns a generator of Responses or Requests."""
|
||||
|
||||
hist = [] # keep track of history
|
||||
|
@ -163,19 +188,21 @@ class SessionRedirectMixin(object):
|
|||
resp.raw.read(decode_content=False)
|
||||
|
||||
if len(resp.history) >= self.max_redirects:
|
||||
raise TooManyRedirects('Exceeded {} redirects.'.format(self.max_redirects), response=resp)
|
||||
raise TooManyRedirects(
|
||||
f"Exceeded {self.max_redirects} redirects.", response=resp
|
||||
)
|
||||
|
||||
# Release the connection back into the pool.
|
||||
resp.close()
|
||||
|
||||
# Handle redirection without scheme (see: RFC 1808 Section 4)
|
||||
if url.startswith('//'):
|
||||
if url.startswith("//"):
|
||||
parsed_rurl = urlparse(resp.url)
|
||||
url = ':'.join([to_native_string(parsed_rurl.scheme), url])
|
||||
url = ":".join([to_native_string(parsed_rurl.scheme), url])
|
||||
|
||||
# Normalize url case and attach previous fragment if needed (RFC 7231 7.1.2)
|
||||
parsed = urlparse(url)
|
||||
if parsed.fragment == '' and previous_fragment:
|
||||
if parsed.fragment == "" and previous_fragment:
|
||||
parsed = parsed._replace(fragment=previous_fragment)
|
||||
elif parsed.fragment:
|
||||
previous_fragment = parsed.fragment
|
||||
|
@ -194,15 +221,18 @@ class SessionRedirectMixin(object):
|
|||
self.rebuild_method(prepared_request, resp)
|
||||
|
||||
# https://github.com/psf/requests/issues/1084
|
||||
if resp.status_code not in (codes.temporary_redirect, codes.permanent_redirect):
|
||||
if resp.status_code not in (
|
||||
codes.temporary_redirect,
|
||||
codes.permanent_redirect,
|
||||
):
|
||||
# https://github.com/psf/requests/issues/3490
|
||||
purged_headers = ('Content-Length', 'Content-Type', 'Transfer-Encoding')
|
||||
purged_headers = ("Content-Length", "Content-Type", "Transfer-Encoding")
|
||||
for header in purged_headers:
|
||||
prepared_request.headers.pop(header, None)
|
||||
prepared_request.body = None
|
||||
|
||||
headers = prepared_request.headers
|
||||
headers.pop('Cookie', None)
|
||||
headers.pop("Cookie", None)
|
||||
|
||||
# Extract any cookies sent on the response to the cookiejar
|
||||
# in the new request. Because we've mutated our copied prepared
|
||||
|
@ -218,9 +248,8 @@ class SessionRedirectMixin(object):
|
|||
# A failed tell() sets `_body_position` to `object()`. This non-None
|
||||
# value ensures `rewindable` will be True, allowing us to raise an
|
||||
# UnrewindableBodyError, instead of hanging the connection.
|
||||
rewindable = (
|
||||
prepared_request._body_position is not None and
|
||||
('Content-Length' in headers or 'Transfer-Encoding' in headers)
|
||||
rewindable = prepared_request._body_position is not None and (
|
||||
"Content-Length" in headers or "Transfer-Encoding" in headers
|
||||
)
|
||||
|
||||
# Attempt to rewind consumed file-like object.
|
||||
|
@ -242,7 +271,7 @@ class SessionRedirectMixin(object):
|
|||
cert=cert,
|
||||
proxies=proxies,
|
||||
allow_redirects=False,
|
||||
**adapter_kwargs
|
||||
**adapter_kwargs,
|
||||
)
|
||||
|
||||
extract_cookies_to_jar(self.cookies, prepared_request, resp.raw)
|
||||
|
@ -259,10 +288,12 @@ class SessionRedirectMixin(object):
|
|||
headers = prepared_request.headers
|
||||
url = prepared_request.url
|
||||
|
||||
if 'Authorization' in headers and self.should_strip_auth(response.request.url, url):
|
||||
if "Authorization" in headers and self.should_strip_auth(
|
||||
response.request.url, url
|
||||
):
|
||||
# If we get redirected to a new host, we should strip out any
|
||||
# authentication headers.
|
||||
del headers['Authorization']
|
||||
del headers["Authorization"]
|
||||
|
||||
# .netrc might have more auth for us on our new host.
|
||||
new_auth = get_netrc_auth(url) if self.trust_env else None
|
||||
|
@ -285,8 +316,8 @@ class SessionRedirectMixin(object):
|
|||
scheme = urlparse(prepared_request.url).scheme
|
||||
new_proxies = resolve_proxies(prepared_request, proxies, self.trust_env)
|
||||
|
||||
if 'Proxy-Authorization' in headers:
|
||||
del headers['Proxy-Authorization']
|
||||
if "Proxy-Authorization" in headers:
|
||||
del headers["Proxy-Authorization"]
|
||||
|
||||
try:
|
||||
username, password = get_auth_from_url(new_proxies[scheme])
|
||||
|
@ -294,7 +325,7 @@ class SessionRedirectMixin(object):
|
|||
username, password = None, None
|
||||
|
||||
if username and password:
|
||||
headers['Proxy-Authorization'] = _basic_auth_str(username, password)
|
||||
headers["Proxy-Authorization"] = _basic_auth_str(username, password)
|
||||
|
||||
return new_proxies
|
||||
|
||||
|
@ -305,18 +336,18 @@ class SessionRedirectMixin(object):
|
|||
method = prepared_request.method
|
||||
|
||||
# https://tools.ietf.org/html/rfc7231#section-6.4.4
|
||||
if response.status_code == codes.see_other and method != 'HEAD':
|
||||
method = 'GET'
|
||||
if response.status_code == codes.see_other and method != "HEAD":
|
||||
method = "GET"
|
||||
|
||||
# Do what the browsers do, despite standards...
|
||||
# First, turn 302s into GETs.
|
||||
if response.status_code == codes.found and method != 'HEAD':
|
||||
method = 'GET'
|
||||
if response.status_code == codes.found and method != "HEAD":
|
||||
method = "GET"
|
||||
|
||||
# Second, if a POST is responded to with a 301, turn it into a GET.
|
||||
# This bizarre behaviour is explained in Issue 1704.
|
||||
if response.status_code == codes.moved and method == 'POST':
|
||||
method = 'GET'
|
||||
if response.status_code == codes.moved and method == "POST":
|
||||
method = "GET"
|
||||
|
||||
prepared_request.method = method
|
||||
|
||||
|
@ -341,9 +372,18 @@ class Session(SessionRedirectMixin):
|
|||
"""
|
||||
|
||||
__attrs__ = [
|
||||
'headers', 'cookies', 'auth', 'proxies', 'hooks', 'params', 'verify',
|
||||
'cert', 'adapters', 'stream', 'trust_env',
|
||||
'max_redirects',
|
||||
"headers",
|
||||
"cookies",
|
||||
"auth",
|
||||
"proxies",
|
||||
"hooks",
|
||||
"params",
|
||||
"verify",
|
||||
"cert",
|
||||
"adapters",
|
||||
"stream",
|
||||
"trust_env",
|
||||
"max_redirects",
|
||||
]
|
||||
|
||||
def __init__(self):
|
||||
|
@ -405,8 +445,8 @@ class Session(SessionRedirectMixin):
|
|||
|
||||
# Default connection adapters.
|
||||
self.adapters = OrderedDict()
|
||||
self.mount('https://', HTTPAdapter())
|
||||
self.mount('http://', HTTPAdapter())
|
||||
self.mount("https://", HTTPAdapter())
|
||||
self.mount("http://", HTTPAdapter())
|
||||
|
||||
def __enter__(self):
|
||||
return self
|
||||
|
@ -432,7 +472,8 @@ class Session(SessionRedirectMixin):
|
|||
|
||||
# Merge with session cookies
|
||||
merged_cookies = merge_cookies(
|
||||
merge_cookies(RequestsCookieJar(), self.cookies), cookies)
|
||||
merge_cookies(RequestsCookieJar(), self.cookies), cookies
|
||||
)
|
||||
|
||||
# Set environment's basic authentication if not explicitly set.
|
||||
auth = request.auth
|
||||
|
@ -446,7 +487,9 @@ class Session(SessionRedirectMixin):
|
|||
files=request.files,
|
||||
data=request.data,
|
||||
json=request.json,
|
||||
headers=merge_setting(request.headers, self.headers, dict_class=CaseInsensitiveDict),
|
||||
headers=merge_setting(
|
||||
request.headers, self.headers, dict_class=CaseInsensitiveDict
|
||||
),
|
||||
params=merge_setting(request.params, self.params),
|
||||
auth=merge_setting(auth, self.auth),
|
||||
cookies=merged_cookies,
|
||||
|
@ -454,10 +497,25 @@ class Session(SessionRedirectMixin):
|
|||
)
|
||||
return p
|
||||
|
||||
def request(self, method, url,
|
||||
params=None, data=None, headers=None, cookies=None, files=None,
|
||||
auth=None, timeout=None, allow_redirects=True, proxies=None,
|
||||
hooks=None, stream=None, verify=None, cert=None, json=None):
|
||||
def request(
|
||||
self,
|
||||
method,
|
||||
url,
|
||||
params=None,
|
||||
data=None,
|
||||
headers=None,
|
||||
cookies=None,
|
||||
files=None,
|
||||
auth=None,
|
||||
timeout=None,
|
||||
allow_redirects=True,
|
||||
proxies=None,
|
||||
hooks=None,
|
||||
stream=None,
|
||||
verify=None,
|
||||
cert=None,
|
||||
json=None,
|
||||
):
|
||||
"""Constructs a :class:`Request <Request>`, prepares it and sends it.
|
||||
Returns :class:`Response <Response>` object.
|
||||
|
||||
|
@ -493,7 +551,7 @@ class Session(SessionRedirectMixin):
|
|||
``False``, requests will accept any TLS certificate presented by
|
||||
the server, and will ignore hostname mismatches and/or expired
|
||||
certificates, which will make your application vulnerable to
|
||||
man-in-the-middle (MitM) attacks. Setting verify to ``False``
|
||||
man-in-the-middle (MitM) attacks. Setting verify to ``False``
|
||||
may be useful during local development or testing.
|
||||
:param cert: (optional) if String, path to ssl client cert file (.pem).
|
||||
If Tuple, ('cert', 'key') pair.
|
||||
|
@ -522,8 +580,8 @@ class Session(SessionRedirectMixin):
|
|||
|
||||
# Send the request.
|
||||
send_kwargs = {
|
||||
'timeout': timeout,
|
||||
'allow_redirects': allow_redirects,
|
||||
"timeout": timeout,
|
||||
"allow_redirects": allow_redirects,
|
||||
}
|
||||
send_kwargs.update(settings)
|
||||
resp = self.send(prep, **send_kwargs)
|
||||
|
@ -538,8 +596,8 @@ class Session(SessionRedirectMixin):
|
|||
:rtype: requests.Response
|
||||
"""
|
||||
|
||||
kwargs.setdefault('allow_redirects', True)
|
||||
return self.request('GET', url, **kwargs)
|
||||
kwargs.setdefault("allow_redirects", True)
|
||||
return self.request("GET", url, **kwargs)
|
||||
|
||||
def options(self, url, **kwargs):
|
||||
r"""Sends a OPTIONS request. Returns :class:`Response` object.
|
||||
|
@ -549,8 +607,8 @@ class Session(SessionRedirectMixin):
|
|||
:rtype: requests.Response
|
||||
"""
|
||||
|
||||
kwargs.setdefault('allow_redirects', True)
|
||||
return self.request('OPTIONS', url, **kwargs)
|
||||
kwargs.setdefault("allow_redirects", True)
|
||||
return self.request("OPTIONS", url, **kwargs)
|
||||
|
||||
def head(self, url, **kwargs):
|
||||
r"""Sends a HEAD request. Returns :class:`Response` object.
|
||||
|
@ -560,8 +618,8 @@ class Session(SessionRedirectMixin):
|
|||
:rtype: requests.Response
|
||||
"""
|
||||
|
||||
kwargs.setdefault('allow_redirects', False)
|
||||
return self.request('HEAD', url, **kwargs)
|
||||
kwargs.setdefault("allow_redirects", False)
|
||||
return self.request("HEAD", url, **kwargs)
|
||||
|
||||
def post(self, url, data=None, json=None, **kwargs):
|
||||
r"""Sends a POST request. Returns :class:`Response` object.
|
||||
|
@ -574,7 +632,7 @@ class Session(SessionRedirectMixin):
|
|||
:rtype: requests.Response
|
||||
"""
|
||||
|
||||
return self.request('POST', url, data=data, json=json, **kwargs)
|
||||
return self.request("POST", url, data=data, json=json, **kwargs)
|
||||
|
||||
def put(self, url, data=None, **kwargs):
|
||||
r"""Sends a PUT request. Returns :class:`Response` object.
|
||||
|
@ -586,7 +644,7 @@ class Session(SessionRedirectMixin):
|
|||
:rtype: requests.Response
|
||||
"""
|
||||
|
||||
return self.request('PUT', url, data=data, **kwargs)
|
||||
return self.request("PUT", url, data=data, **kwargs)
|
||||
|
||||
def patch(self, url, data=None, **kwargs):
|
||||
r"""Sends a PATCH request. Returns :class:`Response` object.
|
||||
|
@ -598,7 +656,7 @@ class Session(SessionRedirectMixin):
|
|||
:rtype: requests.Response
|
||||
"""
|
||||
|
||||
return self.request('PATCH', url, data=data, **kwargs)
|
||||
return self.request("PATCH", url, data=data, **kwargs)
|
||||
|
||||
def delete(self, url, **kwargs):
|
||||
r"""Sends a DELETE request. Returns :class:`Response` object.
|
||||
|
@ -608,7 +666,7 @@ class Session(SessionRedirectMixin):
|
|||
:rtype: requests.Response
|
||||
"""
|
||||
|
||||
return self.request('DELETE', url, **kwargs)
|
||||
return self.request("DELETE", url, **kwargs)
|
||||
|
||||
def send(self, request, **kwargs):
|
||||
"""Send a given PreparedRequest.
|
||||
|
@ -617,22 +675,20 @@ class Session(SessionRedirectMixin):
|
|||
"""
|
||||
# Set defaults that the hooks can utilize to ensure they always have
|
||||
# the correct parameters to reproduce the previous request.
|
||||
kwargs.setdefault('stream', self.stream)
|
||||
kwargs.setdefault('verify', self.verify)
|
||||
kwargs.setdefault('cert', self.cert)
|
||||
if 'proxies' not in kwargs:
|
||||
kwargs['proxies'] = resolve_proxies(
|
||||
request, self.proxies, self.trust_env
|
||||
)
|
||||
kwargs.setdefault("stream", self.stream)
|
||||
kwargs.setdefault("verify", self.verify)
|
||||
kwargs.setdefault("cert", self.cert)
|
||||
if "proxies" not in kwargs:
|
||||
kwargs["proxies"] = resolve_proxies(request, self.proxies, self.trust_env)
|
||||
|
||||
# It's possible that users might accidentally send a Request object.
|
||||
# Guard against that specific failure case.
|
||||
if isinstance(request, Request):
|
||||
raise ValueError('You can only send PreparedRequests.')
|
||||
raise ValueError("You can only send PreparedRequests.")
|
||||
|
||||
# Set up variables needed for resolve_redirects and dispatching of hooks
|
||||
allow_redirects = kwargs.pop('allow_redirects', True)
|
||||
stream = kwargs.get('stream')
|
||||
allow_redirects = kwargs.pop("allow_redirects", True)
|
||||
stream = kwargs.get("stream")
|
||||
hooks = request.hooks
|
||||
|
||||
# Get the appropriate adapter to use
|
||||
|
@ -649,7 +705,7 @@ class Session(SessionRedirectMixin):
|
|||
r.elapsed = timedelta(seconds=elapsed)
|
||||
|
||||
# Response manipulation hooks
|
||||
r = dispatch_hook('response', hooks, r, **kwargs)
|
||||
r = dispatch_hook("response", hooks, r, **kwargs)
|
||||
|
||||
# Persist cookies
|
||||
if r.history:
|
||||
|
@ -679,7 +735,9 @@ class Session(SessionRedirectMixin):
|
|||
# If redirects aren't being followed, store the response on the Request for Response.next().
|
||||
if not allow_redirects:
|
||||
try:
|
||||
r._next = next(self.resolve_redirects(r, request, yield_requests=True, **kwargs))
|
||||
r._next = next(
|
||||
self.resolve_redirects(r, request, yield_requests=True, **kwargs)
|
||||
)
|
||||
except StopIteration:
|
||||
pass
|
||||
|
||||
|
@ -697,16 +755,19 @@ class Session(SessionRedirectMixin):
|
|||
# Gather clues from the surrounding environment.
|
||||
if self.trust_env:
|
||||
# Set environment's proxies.
|
||||
no_proxy = proxies.get('no_proxy') if proxies is not None else None
|
||||
no_proxy = proxies.get("no_proxy") if proxies is not None else None
|
||||
env_proxies = get_environ_proxies(url, no_proxy=no_proxy)
|
||||
for (k, v) in env_proxies.items():
|
||||
proxies.setdefault(k, v)
|
||||
|
||||
# Look for requests environment configuration and be compatible
|
||||
# with cURL.
|
||||
# Look for requests environment configuration
|
||||
# and be compatible with cURL.
|
||||
if verify is True or verify is None:
|
||||
verify = (os.environ.get('REQUESTS_CA_BUNDLE') or
|
||||
os.environ.get('CURL_CA_BUNDLE'))
|
||||
verify = (
|
||||
os.environ.get("REQUESTS_CA_BUNDLE")
|
||||
or os.environ.get("CURL_CA_BUNDLE")
|
||||
or verify
|
||||
)
|
||||
|
||||
# Merge all the kwargs.
|
||||
proxies = merge_setting(proxies, self.proxies)
|
||||
|
@ -714,8 +775,7 @@ class Session(SessionRedirectMixin):
|
|||
verify = merge_setting(verify, self.verify)
|
||||
cert = merge_setting(cert, self.cert)
|
||||
|
||||
return {'verify': verify, 'proxies': proxies, 'stream': stream,
|
||||
'cert': cert}
|
||||
return {"proxies": proxies, "stream": stream, "verify": verify, "cert": cert}
|
||||
|
||||
def get_adapter(self, url):
|
||||
"""
|
||||
|
@ -729,7 +789,7 @@ class Session(SessionRedirectMixin):
|
|||
return adapter
|
||||
|
||||
# Nothing matches :-/
|
||||
raise InvalidSchema("No connection adapters were found for {!r}".format(url))
|
||||
raise InvalidSchema(f"No connection adapters were found for {url!r}")
|
||||
|
||||
def close(self):
|
||||
"""Closes all adapters and as such the session"""
|
||||
|
|
|
@ -1,5 +1,3 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
|
||||
r"""
|
||||
The ``codes`` object defines a mapping from common names for HTTP statuses
|
||||
to their numerical codes, accessible either as attributes or as dictionary
|
||||
|
@ -23,101 +21,108 @@ the names are allowed. For example, ``codes.ok``, ``codes.OK``, and
|
|||
from .structures import LookupDict
|
||||
|
||||
_codes = {
|
||||
|
||||
# Informational.
|
||||
100: ('continue',),
|
||||
101: ('switching_protocols',),
|
||||
102: ('processing',),
|
||||
103: ('checkpoint',),
|
||||
122: ('uri_too_long', 'request_uri_too_long'),
|
||||
200: ('ok', 'okay', 'all_ok', 'all_okay', 'all_good', '\\o/', '✓'),
|
||||
201: ('created',),
|
||||
202: ('accepted',),
|
||||
203: ('non_authoritative_info', 'non_authoritative_information'),
|
||||
204: ('no_content',),
|
||||
205: ('reset_content', 'reset'),
|
||||
206: ('partial_content', 'partial'),
|
||||
207: ('multi_status', 'multiple_status', 'multi_stati', 'multiple_stati'),
|
||||
208: ('already_reported',),
|
||||
226: ('im_used',),
|
||||
|
||||
100: ("continue",),
|
||||
101: ("switching_protocols",),
|
||||
102: ("processing",),
|
||||
103: ("checkpoint",),
|
||||
122: ("uri_too_long", "request_uri_too_long"),
|
||||
200: ("ok", "okay", "all_ok", "all_okay", "all_good", "\\o/", "✓"),
|
||||
201: ("created",),
|
||||
202: ("accepted",),
|
||||
203: ("non_authoritative_info", "non_authoritative_information"),
|
||||
204: ("no_content",),
|
||||
205: ("reset_content", "reset"),
|
||||
206: ("partial_content", "partial"),
|
||||
207: ("multi_status", "multiple_status", "multi_stati", "multiple_stati"),
|
||||
208: ("already_reported",),
|
||||
226: ("im_used",),
|
||||
# Redirection.
|
||||
300: ('multiple_choices',),
|
||||
301: ('moved_permanently', 'moved', '\\o-'),
|
||||
302: ('found',),
|
||||
303: ('see_other', 'other'),
|
||||
304: ('not_modified',),
|
||||
305: ('use_proxy',),
|
||||
306: ('switch_proxy',),
|
||||
307: ('temporary_redirect', 'temporary_moved', 'temporary'),
|
||||
308: ('permanent_redirect',
|
||||
'resume_incomplete', 'resume',), # These 2 to be removed in 3.0
|
||||
|
||||
300: ("multiple_choices",),
|
||||
301: ("moved_permanently", "moved", "\\o-"),
|
||||
302: ("found",),
|
||||
303: ("see_other", "other"),
|
||||
304: ("not_modified",),
|
||||
305: ("use_proxy",),
|
||||
306: ("switch_proxy",),
|
||||
307: ("temporary_redirect", "temporary_moved", "temporary"),
|
||||
308: (
|
||||
"permanent_redirect",
|
||||
"resume_incomplete",
|
||||
"resume",
|
||||
), # "resume" and "resume_incomplete" to be removed in 3.0
|
||||
# Client Error.
|
||||
400: ('bad_request', 'bad'),
|
||||
401: ('unauthorized',),
|
||||
402: ('payment_required', 'payment'),
|
||||
403: ('forbidden',),
|
||||
404: ('not_found', '-o-'),
|
||||
405: ('method_not_allowed', 'not_allowed'),
|
||||
406: ('not_acceptable',),
|
||||
407: ('proxy_authentication_required', 'proxy_auth', 'proxy_authentication'),
|
||||
408: ('request_timeout', 'timeout'),
|
||||
409: ('conflict',),
|
||||
410: ('gone',),
|
||||
411: ('length_required',),
|
||||
412: ('precondition_failed', 'precondition'),
|
||||
413: ('request_entity_too_large',),
|
||||
414: ('request_uri_too_large',),
|
||||
415: ('unsupported_media_type', 'unsupported_media', 'media_type'),
|
||||
416: ('requested_range_not_satisfiable', 'requested_range', 'range_not_satisfiable'),
|
||||
417: ('expectation_failed',),
|
||||
418: ('im_a_teapot', 'teapot', 'i_am_a_teapot'),
|
||||
421: ('misdirected_request',),
|
||||
422: ('unprocessable_entity', 'unprocessable'),
|
||||
423: ('locked',),
|
||||
424: ('failed_dependency', 'dependency'),
|
||||
425: ('unordered_collection', 'unordered'),
|
||||
426: ('upgrade_required', 'upgrade'),
|
||||
428: ('precondition_required', 'precondition'),
|
||||
429: ('too_many_requests', 'too_many'),
|
||||
431: ('header_fields_too_large', 'fields_too_large'),
|
||||
444: ('no_response', 'none'),
|
||||
449: ('retry_with', 'retry'),
|
||||
450: ('blocked_by_windows_parental_controls', 'parental_controls'),
|
||||
451: ('unavailable_for_legal_reasons', 'legal_reasons'),
|
||||
499: ('client_closed_request',),
|
||||
|
||||
400: ("bad_request", "bad"),
|
||||
401: ("unauthorized",),
|
||||
402: ("payment_required", "payment"),
|
||||
403: ("forbidden",),
|
||||
404: ("not_found", "-o-"),
|
||||
405: ("method_not_allowed", "not_allowed"),
|
||||
406: ("not_acceptable",),
|
||||
407: ("proxy_authentication_required", "proxy_auth", "proxy_authentication"),
|
||||
408: ("request_timeout", "timeout"),
|
||||
409: ("conflict",),
|
||||
410: ("gone",),
|
||||
411: ("length_required",),
|
||||
412: ("precondition_failed", "precondition"),
|
||||
413: ("request_entity_too_large",),
|
||||
414: ("request_uri_too_large",),
|
||||
415: ("unsupported_media_type", "unsupported_media", "media_type"),
|
||||
416: (
|
||||
"requested_range_not_satisfiable",
|
||||
"requested_range",
|
||||
"range_not_satisfiable",
|
||||
),
|
||||
417: ("expectation_failed",),
|
||||
418: ("im_a_teapot", "teapot", "i_am_a_teapot"),
|
||||
421: ("misdirected_request",),
|
||||
422: ("unprocessable_entity", "unprocessable"),
|
||||
423: ("locked",),
|
||||
424: ("failed_dependency", "dependency"),
|
||||
425: ("unordered_collection", "unordered"),
|
||||
426: ("upgrade_required", "upgrade"),
|
||||
428: ("precondition_required", "precondition"),
|
||||
429: ("too_many_requests", "too_many"),
|
||||
431: ("header_fields_too_large", "fields_too_large"),
|
||||
444: ("no_response", "none"),
|
||||
449: ("retry_with", "retry"),
|
||||
450: ("blocked_by_windows_parental_controls", "parental_controls"),
|
||||
451: ("unavailable_for_legal_reasons", "legal_reasons"),
|
||||
499: ("client_closed_request",),
|
||||
# Server Error.
|
||||
500: ('internal_server_error', 'server_error', '/o\\', '✗'),
|
||||
501: ('not_implemented',),
|
||||
502: ('bad_gateway',),
|
||||
503: ('service_unavailable', 'unavailable'),
|
||||
504: ('gateway_timeout',),
|
||||
505: ('http_version_not_supported', 'http_version'),
|
||||
506: ('variant_also_negotiates',),
|
||||
507: ('insufficient_storage',),
|
||||
509: ('bandwidth_limit_exceeded', 'bandwidth'),
|
||||
510: ('not_extended',),
|
||||
511: ('network_authentication_required', 'network_auth', 'network_authentication'),
|
||||
500: ("internal_server_error", "server_error", "/o\\", "✗"),
|
||||
501: ("not_implemented",),
|
||||
502: ("bad_gateway",),
|
||||
503: ("service_unavailable", "unavailable"),
|
||||
504: ("gateway_timeout",),
|
||||
505: ("http_version_not_supported", "http_version"),
|
||||
506: ("variant_also_negotiates",),
|
||||
507: ("insufficient_storage",),
|
||||
509: ("bandwidth_limit_exceeded", "bandwidth"),
|
||||
510: ("not_extended",),
|
||||
511: ("network_authentication_required", "network_auth", "network_authentication"),
|
||||
}
|
||||
|
||||
codes = LookupDict(name='status_codes')
|
||||
codes = LookupDict(name="status_codes")
|
||||
|
||||
|
||||
def _init():
|
||||
for code, titles in _codes.items():
|
||||
for title in titles:
|
||||
setattr(codes, title, code)
|
||||
if not title.startswith(('\\', '/')):
|
||||
if not title.startswith(("\\", "/")):
|
||||
setattr(codes, title.upper(), code)
|
||||
|
||||
def doc(code):
|
||||
names = ', '.join('``%s``' % n for n in _codes[code])
|
||||
return '* %d: %s' % (code, names)
|
||||
names = ", ".join(f"``{n}``" for n in _codes[code])
|
||||
return "* %d: %s" % (code, names)
|
||||
|
||||
global __doc__
|
||||
__doc__ = (__doc__ + '\n' +
|
||||
'\n'.join(doc(code) for code in sorted(_codes))
|
||||
if __doc__ is not None else None)
|
||||
__doc__ = (
|
||||
__doc__ + "\n" + "\n".join(doc(code) for code in sorted(_codes))
|
||||
if __doc__ is not None
|
||||
else None
|
||||
)
|
||||
|
||||
|
||||
_init()
|
||||
|
|
|
@ -1,5 +1,3 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
|
||||
"""
|
||||
requests.structures
|
||||
~~~~~~~~~~~~~~~~~~~
|
||||
|
@ -64,11 +62,7 @@ class CaseInsensitiveDict(MutableMapping):
|
|||
|
||||
def lower_items(self):
|
||||
"""Like iteritems(), but with all lowercase keys."""
|
||||
return (
|
||||
(lowerkey, keyval[1])
|
||||
for (lowerkey, keyval)
|
||||
in self._store.items()
|
||||
)
|
||||
return ((lowerkey, keyval[1]) for (lowerkey, keyval) in self._store.items())
|
||||
|
||||
def __eq__(self, other):
|
||||
if isinstance(other, Mapping):
|
||||
|
@ -91,10 +85,10 @@ class LookupDict(dict):
|
|||
|
||||
def __init__(self, name=None):
|
||||
self.name = name
|
||||
super(LookupDict, self).__init__()
|
||||
super().__init__()
|
||||
|
||||
def __repr__(self):
|
||||
return '<lookup \'%s\'>' % (self.name)
|
||||
return f"<lookup '{self.name}'>"
|
||||
|
||||
def __getitem__(self, key):
|
||||
# We allow fall-through here, so values default to None
|
||||
|
|
|
@ -1,5 +1,3 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
|
||||
"""
|
||||
requests.utils
|
||||
~~~~~~~~~~~~~~
|
||||
|
@ -20,28 +18,46 @@ import tempfile
|
|||
import warnings
|
||||
import zipfile
|
||||
from collections import OrderedDict
|
||||
from urllib3.util import make_headers
|
||||
from urllib3.util import parse_url
|
||||
|
||||
from .__version__ import __version__
|
||||
from urllib3.util import make_headers, parse_url
|
||||
|
||||
from . import certs
|
||||
from .__version__ import __version__
|
||||
|
||||
# to_native_string is unused here, but imported here for backwards compatibility
|
||||
from ._internal_utils import to_native_string
|
||||
from ._internal_utils import HEADER_VALIDATORS, to_native_string # noqa: F401
|
||||
from .compat import (
|
||||
Mapping,
|
||||
basestring,
|
||||
bytes,
|
||||
getproxies,
|
||||
getproxies_environment,
|
||||
integer_types,
|
||||
)
|
||||
from .compat import parse_http_list as _parse_list_header
|
||||
from .compat import (
|
||||
quote, urlparse, bytes, str, unquote, getproxies,
|
||||
proxy_bypass, urlunparse, basestring, integer_types, is_py3,
|
||||
proxy_bypass_environment, getproxies_environment, Mapping)
|
||||
proxy_bypass,
|
||||
proxy_bypass_environment,
|
||||
quote,
|
||||
str,
|
||||
unquote,
|
||||
urlparse,
|
||||
urlunparse,
|
||||
)
|
||||
from .cookies import cookiejar_from_dict
|
||||
from .structures import CaseInsensitiveDict
|
||||
from .exceptions import (
|
||||
InvalidURL, InvalidHeader, FileModeWarning, UnrewindableBodyError)
|
||||
FileModeWarning,
|
||||
InvalidHeader,
|
||||
InvalidURL,
|
||||
UnrewindableBodyError,
|
||||
)
|
||||
from .structures import CaseInsensitiveDict
|
||||
|
||||
NETRC_FILES = ('.netrc', '_netrc')
|
||||
NETRC_FILES = (".netrc", "_netrc")
|
||||
|
||||
DEFAULT_CA_BUNDLE_PATH = certs.where()
|
||||
|
||||
DEFAULT_PORTS = {'http': 80, 'https': 443}
|
||||
DEFAULT_PORTS = {"http": 80, "https": 443}
|
||||
|
||||
# Ensure that ', ' is used to preserve previous delimiter behavior.
|
||||
DEFAULT_ACCEPT_ENCODING = ", ".join(
|
||||
|
@ -49,28 +65,25 @@ DEFAULT_ACCEPT_ENCODING = ", ".join(
|
|||
)
|
||||
|
||||
|
||||
if sys.platform == 'win32':
|
||||
if sys.platform == "win32":
|
||||
# provide a proxy_bypass version on Windows without DNS lookups
|
||||
|
||||
def proxy_bypass_registry(host):
|
||||
try:
|
||||
if is_py3:
|
||||
import winreg
|
||||
else:
|
||||
import _winreg as winreg
|
||||
import winreg
|
||||
except ImportError:
|
||||
return False
|
||||
|
||||
try:
|
||||
internetSettings = winreg.OpenKey(winreg.HKEY_CURRENT_USER,
|
||||
r'Software\Microsoft\Windows\CurrentVersion\Internet Settings')
|
||||
internetSettings = winreg.OpenKey(
|
||||
winreg.HKEY_CURRENT_USER,
|
||||
r"Software\Microsoft\Windows\CurrentVersion\Internet Settings",
|
||||
)
|
||||
# ProxyEnable could be REG_SZ or REG_DWORD, normalizing it
|
||||
proxyEnable = int(winreg.QueryValueEx(internetSettings,
|
||||
'ProxyEnable')[0])
|
||||
proxyEnable = int(winreg.QueryValueEx(internetSettings, "ProxyEnable")[0])
|
||||
# ProxyOverride is almost always a string
|
||||
proxyOverride = winreg.QueryValueEx(internetSettings,
|
||||
'ProxyOverride')[0]
|
||||
except OSError:
|
||||
proxyOverride = winreg.QueryValueEx(internetSettings, "ProxyOverride")[0]
|
||||
except (OSError, ValueError):
|
||||
return False
|
||||
if not proxyEnable or not proxyOverride:
|
||||
return False
|
||||
|
@ -78,15 +91,15 @@ if sys.platform == 'win32':
|
|||
# make a check value list from the registry entry: replace the
|
||||
# '<local>' string by the localhost entry and the corresponding
|
||||
# canonical entry.
|
||||
proxyOverride = proxyOverride.split(';')
|
||||
proxyOverride = proxyOverride.split(";")
|
||||
# now check if we match one of the registry values.
|
||||
for test in proxyOverride:
|
||||
if test == '<local>':
|
||||
if '.' not in host:
|
||||
if test == "<local>":
|
||||
if "." not in host:
|
||||
return True
|
||||
test = test.replace(".", r"\.") # mask dots
|
||||
test = test.replace("*", r".*") # change glob sequence
|
||||
test = test.replace("?", r".") # change glob char
|
||||
test = test.replace(".", r"\.") # mask dots
|
||||
test = test.replace("*", r".*") # change glob sequence
|
||||
test = test.replace("?", r".") # change glob char
|
||||
if re.match(test, host, re.I):
|
||||
return True
|
||||
return False
|
||||
|
@ -106,7 +119,7 @@ if sys.platform == 'win32':
|
|||
def dict_to_sequence(d):
|
||||
"""Returns an internal sequence dictionary update."""
|
||||
|
||||
if hasattr(d, 'items'):
|
||||
if hasattr(d, "items"):
|
||||
d = d.items()
|
||||
|
||||
return d
|
||||
|
@ -116,13 +129,13 @@ def super_len(o):
|
|||
total_length = None
|
||||
current_position = 0
|
||||
|
||||
if hasattr(o, '__len__'):
|
||||
if hasattr(o, "__len__"):
|
||||
total_length = len(o)
|
||||
|
||||
elif hasattr(o, 'len'):
|
||||
elif hasattr(o, "len"):
|
||||
total_length = o.len
|
||||
|
||||
elif hasattr(o, 'fileno'):
|
||||
elif hasattr(o, "fileno"):
|
||||
try:
|
||||
fileno = o.fileno()
|
||||
except (io.UnsupportedOperation, AttributeError):
|
||||
|
@ -135,21 +148,23 @@ def super_len(o):
|
|||
|
||||
# Having used fstat to determine the file length, we need to
|
||||
# confirm that this file was opened up in binary mode.
|
||||
if 'b' not in o.mode:
|
||||
warnings.warn((
|
||||
"Requests has determined the content-length for this "
|
||||
"request using the binary size of the file: however, the "
|
||||
"file has been opened in text mode (i.e. without the 'b' "
|
||||
"flag in the mode). This may lead to an incorrect "
|
||||
"content-length. In Requests 3.0, support will be removed "
|
||||
"for files in text mode."),
|
||||
FileModeWarning
|
||||
if "b" not in o.mode:
|
||||
warnings.warn(
|
||||
(
|
||||
"Requests has determined the content-length for this "
|
||||
"request using the binary size of the file: however, the "
|
||||
"file has been opened in text mode (i.e. without the 'b' "
|
||||
"flag in the mode). This may lead to an incorrect "
|
||||
"content-length. In Requests 3.0, support will be removed "
|
||||
"for files in text mode."
|
||||
),
|
||||
FileModeWarning,
|
||||
)
|
||||
|
||||
if hasattr(o, 'tell'):
|
||||
if hasattr(o, "tell"):
|
||||
try:
|
||||
current_position = o.tell()
|
||||
except (OSError, IOError):
|
||||
except OSError:
|
||||
# This can happen in some weird situations, such as when the file
|
||||
# is actually a special file descriptor like stdin. In this
|
||||
# instance, we don't know what the length is, so set it to zero and
|
||||
|
@ -157,7 +172,7 @@ def super_len(o):
|
|||
if total_length is not None:
|
||||
current_position = total_length
|
||||
else:
|
||||
if hasattr(o, 'seek') and total_length is None:
|
||||
if hasattr(o, "seek") and total_length is None:
|
||||
# StringIO and BytesIO have seek but no usable fileno
|
||||
try:
|
||||
# seek to end of file
|
||||
|
@ -167,7 +182,7 @@ def super_len(o):
|
|||
# seek back to current position to support
|
||||
# partially read file-like objects
|
||||
o.seek(current_position or 0)
|
||||
except (OSError, IOError):
|
||||
except OSError:
|
||||
total_length = 0
|
||||
|
||||
if total_length is None:
|
||||
|
@ -179,14 +194,14 @@ def super_len(o):
|
|||
def get_netrc_auth(url, raise_errors=False):
|
||||
"""Returns the Requests tuple auth for a given url from netrc."""
|
||||
|
||||
netrc_file = os.environ.get('NETRC')
|
||||
netrc_file = os.environ.get("NETRC")
|
||||
if netrc_file is not None:
|
||||
netrc_locations = (netrc_file,)
|
||||
else:
|
||||
netrc_locations = ('~/{}'.format(f) for f in NETRC_FILES)
|
||||
netrc_locations = (f"~/{f}" for f in NETRC_FILES)
|
||||
|
||||
try:
|
||||
from netrc import netrc, NetrcParseError
|
||||
from netrc import NetrcParseError, netrc
|
||||
|
||||
netrc_path = None
|
||||
|
||||
|
@ -211,18 +226,18 @@ def get_netrc_auth(url, raise_errors=False):
|
|||
|
||||
# Strip port numbers from netloc. This weird `if...encode`` dance is
|
||||
# used for Python 3.2, which doesn't support unicode literals.
|
||||
splitstr = b':'
|
||||
splitstr = b":"
|
||||
if isinstance(url, str):
|
||||
splitstr = splitstr.decode('ascii')
|
||||
splitstr = splitstr.decode("ascii")
|
||||
host = ri.netloc.split(splitstr)[0]
|
||||
|
||||
try:
|
||||
_netrc = netrc(netrc_path).authenticators(host)
|
||||
if _netrc:
|
||||
# Return with login / password
|
||||
login_i = (0 if _netrc[0] else 1)
|
||||
login_i = 0 if _netrc[0] else 1
|
||||
return (_netrc[login_i], _netrc[2])
|
||||
except (NetrcParseError, IOError):
|
||||
except (NetrcParseError, OSError):
|
||||
# If there was a parsing error or a permissions issue reading the file,
|
||||
# we'll just skip netrc auth unless explicitly asked to raise errors.
|
||||
if raise_errors:
|
||||
|
@ -235,9 +250,8 @@ def get_netrc_auth(url, raise_errors=False):
|
|||
|
||||
def guess_filename(obj):
|
||||
"""Tries to guess the filename of the given object."""
|
||||
name = getattr(obj, 'name', None)
|
||||
if (name and isinstance(name, basestring) and name[0] != '<' and
|
||||
name[-1] != '>'):
|
||||
name = getattr(obj, "name", None)
|
||||
if name and isinstance(name, basestring) and name[0] != "<" and name[-1] != ">":
|
||||
return os.path.basename(name)
|
||||
|
||||
|
||||
|
@ -259,7 +273,7 @@ def extract_zipped_paths(path):
|
|||
# If we don't check for an empty prefix after the split (in other words, archive remains unchanged after the split),
|
||||
# we _can_ end up in an infinite loop on a rare corner case affecting a small number of users
|
||||
break
|
||||
member = '/'.join([prefix, member])
|
||||
member = "/".join([prefix, member])
|
||||
|
||||
if not zipfile.is_zipfile(archive):
|
||||
return path
|
||||
|
@ -270,7 +284,7 @@ def extract_zipped_paths(path):
|
|||
|
||||
# we have a valid zip archive and a valid member of that archive
|
||||
tmp = tempfile.gettempdir()
|
||||
extracted_path = os.path.join(tmp, member.split('/')[-1])
|
||||
extracted_path = os.path.join(tmp, member.split("/")[-1])
|
||||
if not os.path.exists(extracted_path):
|
||||
# use read + write to avoid the creating nested folders, we only want the file, avoids mkdir racing condition
|
||||
with atomic_open(extracted_path) as file_handler:
|
||||
|
@ -281,12 +295,11 @@ def extract_zipped_paths(path):
|
|||
@contextlib.contextmanager
|
||||
def atomic_open(filename):
|
||||
"""Write a file to the disk in an atomic fashion"""
|
||||
replacer = os.rename if sys.version_info[0] == 2 else os.replace
|
||||
tmp_descriptor, tmp_name = tempfile.mkstemp(dir=os.path.dirname(filename))
|
||||
try:
|
||||
with os.fdopen(tmp_descriptor, 'wb') as tmp_handler:
|
||||
with os.fdopen(tmp_descriptor, "wb") as tmp_handler:
|
||||
yield tmp_handler
|
||||
replacer(tmp_name, filename)
|
||||
os.replace(tmp_name, filename)
|
||||
except BaseException:
|
||||
os.remove(tmp_name)
|
||||
raise
|
||||
|
@ -314,7 +327,7 @@ def from_key_val_list(value):
|
|||
return None
|
||||
|
||||
if isinstance(value, (str, bytes, bool, int)):
|
||||
raise ValueError('cannot encode objects that are not 2-tuples')
|
||||
raise ValueError("cannot encode objects that are not 2-tuples")
|
||||
|
||||
return OrderedDict(value)
|
||||
|
||||
|
@ -340,7 +353,7 @@ def to_key_val_list(value):
|
|||
return None
|
||||
|
||||
if isinstance(value, (str, bytes, bool, int)):
|
||||
raise ValueError('cannot encode objects that are not 2-tuples')
|
||||
raise ValueError("cannot encode objects that are not 2-tuples")
|
||||
|
||||
if isinstance(value, Mapping):
|
||||
value = value.items()
|
||||
|
@ -405,10 +418,10 @@ def parse_dict_header(value):
|
|||
"""
|
||||
result = {}
|
||||
for item in _parse_list_header(value):
|
||||
if '=' not in item:
|
||||
if "=" not in item:
|
||||
result[item] = None
|
||||
continue
|
||||
name, value = item.split('=', 1)
|
||||
name, value = item.split("=", 1)
|
||||
if value[:1] == value[-1:] == '"':
|
||||
value = unquote_header_value(value[1:-1])
|
||||
result[name] = value
|
||||
|
@ -436,8 +449,8 @@ def unquote_header_value(value, is_filename=False):
|
|||
# replace sequence below on a UNC path has the effect of turning
|
||||
# the leading double slash into a single slash and then
|
||||
# _fix_ie_filename() doesn't work correctly. See #458.
|
||||
if not is_filename or value[:2] != '\\\\':
|
||||
return value.replace('\\\\', '\\').replace('\\"', '"')
|
||||
if not is_filename or value[:2] != "\\\\":
|
||||
return value.replace("\\\\", "\\").replace('\\"', '"')
|
||||
return value
|
||||
|
||||
|
||||
|
@ -472,19 +485,24 @@ def get_encodings_from_content(content):
|
|||
|
||||
:param content: bytestring to extract encodings from.
|
||||
"""
|
||||
warnings.warn((
|
||||
'In requests 3.0, get_encodings_from_content will be removed. For '
|
||||
'more information, please see the discussion on issue #2266. (This'
|
||||
' warning should only appear once.)'),
|
||||
DeprecationWarning)
|
||||
warnings.warn(
|
||||
(
|
||||
"In requests 3.0, get_encodings_from_content will be removed. For "
|
||||
"more information, please see the discussion on issue #2266. (This"
|
||||
" warning should only appear once.)"
|
||||
),
|
||||
DeprecationWarning,
|
||||
)
|
||||
|
||||
charset_re = re.compile(r'<meta.*?charset=["\']*(.+?)["\'>]', flags=re.I)
|
||||
pragma_re = re.compile(r'<meta.*?content=["\']*;?charset=(.+?)["\'>]', flags=re.I)
|
||||
xml_re = re.compile(r'^<\?xml.*?encoding=["\']*(.+?)["\'>]')
|
||||
|
||||
return (charset_re.findall(content) +
|
||||
pragma_re.findall(content) +
|
||||
xml_re.findall(content))
|
||||
return (
|
||||
charset_re.findall(content)
|
||||
+ pragma_re.findall(content)
|
||||
+ xml_re.findall(content)
|
||||
)
|
||||
|
||||
|
||||
def _parse_content_type_header(header):
|
||||
|
@ -495,7 +513,7 @@ def _parse_content_type_header(header):
|
|||
parameters
|
||||
"""
|
||||
|
||||
tokens = header.split(';')
|
||||
tokens = header.split(";")
|
||||
content_type, params = tokens[0].strip(), tokens[1:]
|
||||
params_dict = {}
|
||||
items_to_strip = "\"' "
|
||||
|
@ -507,7 +525,7 @@ def _parse_content_type_header(header):
|
|||
index_of_equals = param.find("=")
|
||||
if index_of_equals != -1:
|
||||
key = param[:index_of_equals].strip(items_to_strip)
|
||||
value = param[index_of_equals + 1:].strip(items_to_strip)
|
||||
value = param[index_of_equals + 1 :].strip(items_to_strip)
|
||||
params_dict[key.lower()] = value
|
||||
return content_type, params_dict
|
||||
|
||||
|
@ -519,38 +537,37 @@ def get_encoding_from_headers(headers):
|
|||
:rtype: str
|
||||
"""
|
||||
|
||||
content_type = headers.get('content-type')
|
||||
content_type = headers.get("content-type")
|
||||
|
||||
if not content_type:
|
||||
return None
|
||||
|
||||
content_type, params = _parse_content_type_header(content_type)
|
||||
|
||||
if 'charset' in params:
|
||||
return params['charset'].strip("'\"")
|
||||
if "charset" in params:
|
||||
return params["charset"].strip("'\"")
|
||||
|
||||
if 'text' in content_type:
|
||||
return 'ISO-8859-1'
|
||||
if "text" in content_type:
|
||||
return "ISO-8859-1"
|
||||
|
||||
if 'application/json' in content_type:
|
||||
if "application/json" in content_type:
|
||||
# Assume UTF-8 based on RFC 4627: https://www.ietf.org/rfc/rfc4627.txt since the charset was unset
|
||||
return 'utf-8'
|
||||
return "utf-8"
|
||||
|
||||
|
||||
def stream_decode_response_unicode(iterator, r):
|
||||
"""Stream decodes a iterator."""
|
||||
"""Stream decodes an iterator."""
|
||||
|
||||
if r.encoding is None:
|
||||
for item in iterator:
|
||||
yield item
|
||||
yield from iterator
|
||||
return
|
||||
|
||||
decoder = codecs.getincrementaldecoder(r.encoding)(errors='replace')
|
||||
decoder = codecs.getincrementaldecoder(r.encoding)(errors="replace")
|
||||
for chunk in iterator:
|
||||
rv = decoder.decode(chunk)
|
||||
if rv:
|
||||
yield rv
|
||||
rv = decoder.decode(b'', final=True)
|
||||
rv = decoder.decode(b"", final=True)
|
||||
if rv:
|
||||
yield rv
|
||||
|
||||
|
@ -561,7 +578,7 @@ def iter_slices(string, slice_length):
|
|||
if slice_length is None or slice_length <= 0:
|
||||
slice_length = len(string)
|
||||
while pos < len(string):
|
||||
yield string[pos:pos + slice_length]
|
||||
yield string[pos : pos + slice_length]
|
||||
pos += slice_length
|
||||
|
||||
|
||||
|
@ -577,11 +594,14 @@ def get_unicode_from_response(r):
|
|||
|
||||
:rtype: str
|
||||
"""
|
||||
warnings.warn((
|
||||
'In requests 3.0, get_unicode_from_response will be removed. For '
|
||||
'more information, please see the discussion on issue #2266. (This'
|
||||
' warning should only appear once.)'),
|
||||
DeprecationWarning)
|
||||
warnings.warn(
|
||||
(
|
||||
"In requests 3.0, get_unicode_from_response will be removed. For "
|
||||
"more information, please see the discussion on issue #2266. (This"
|
||||
" warning should only appear once.)"
|
||||
),
|
||||
DeprecationWarning,
|
||||
)
|
||||
|
||||
tried_encodings = []
|
||||
|
||||
|
@ -596,14 +616,15 @@ def get_unicode_from_response(r):
|
|||
|
||||
# Fall back:
|
||||
try:
|
||||
return str(r.content, encoding, errors='replace')
|
||||
return str(r.content, encoding, errors="replace")
|
||||
except TypeError:
|
||||
return r.content
|
||||
|
||||
|
||||
# The unreserved URI characters (RFC 3986)
|
||||
UNRESERVED_SET = frozenset(
|
||||
"ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz" + "0123456789-._~")
|
||||
"ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz" + "0123456789-._~"
|
||||
)
|
||||
|
||||
|
||||
def unquote_unreserved(uri):
|
||||
|
@ -612,22 +633,22 @@ def unquote_unreserved(uri):
|
|||
|
||||
:rtype: str
|
||||
"""
|
||||
parts = uri.split('%')
|
||||
parts = uri.split("%")
|
||||
for i in range(1, len(parts)):
|
||||
h = parts[i][0:2]
|
||||
if len(h) == 2 and h.isalnum():
|
||||
try:
|
||||
c = chr(int(h, 16))
|
||||
except ValueError:
|
||||
raise InvalidURL("Invalid percent-escape sequence: '%s'" % h)
|
||||
raise InvalidURL(f"Invalid percent-escape sequence: '{h}'")
|
||||
|
||||
if c in UNRESERVED_SET:
|
||||
parts[i] = c + parts[i][2:]
|
||||
else:
|
||||
parts[i] = '%' + parts[i]
|
||||
parts[i] = f"%{parts[i]}"
|
||||
else:
|
||||
parts[i] = '%' + parts[i]
|
||||
return ''.join(parts)
|
||||
parts[i] = f"%{parts[i]}"
|
||||
return "".join(parts)
|
||||
|
||||
|
||||
def requote_uri(uri):
|
||||
|
@ -660,10 +681,10 @@ def address_in_network(ip, net):
|
|||
|
||||
:rtype: bool
|
||||
"""
|
||||
ipaddr = struct.unpack('=L', socket.inet_aton(ip))[0]
|
||||
netaddr, bits = net.split('/')
|
||||
netmask = struct.unpack('=L', socket.inet_aton(dotted_netmask(int(bits))))[0]
|
||||
network = struct.unpack('=L', socket.inet_aton(netaddr))[0] & netmask
|
||||
ipaddr = struct.unpack("=L", socket.inet_aton(ip))[0]
|
||||
netaddr, bits = net.split("/")
|
||||
netmask = struct.unpack("=L", socket.inet_aton(dotted_netmask(int(bits))))[0]
|
||||
network = struct.unpack("=L", socket.inet_aton(netaddr))[0] & netmask
|
||||
return (ipaddr & netmask) == (network & netmask)
|
||||
|
||||
|
||||
|
@ -674,8 +695,8 @@ def dotted_netmask(mask):
|
|||
|
||||
:rtype: str
|
||||
"""
|
||||
bits = 0xffffffff ^ (1 << 32 - mask) - 1
|
||||
return socket.inet_ntoa(struct.pack('>I', bits))
|
||||
bits = 0xFFFFFFFF ^ (1 << 32 - mask) - 1
|
||||
return socket.inet_ntoa(struct.pack(">I", bits))
|
||||
|
||||
|
||||
def is_ipv4_address(string_ip):
|
||||
|
@ -684,7 +705,7 @@ def is_ipv4_address(string_ip):
|
|||
"""
|
||||
try:
|
||||
socket.inet_aton(string_ip)
|
||||
except socket.error:
|
||||
except OSError:
|
||||
return False
|
||||
return True
|
||||
|
||||
|
@ -695,9 +716,9 @@ def is_valid_cidr(string_network):
|
|||
|
||||
:rtype: bool
|
||||
"""
|
||||
if string_network.count('/') == 1:
|
||||
if string_network.count("/") == 1:
|
||||
try:
|
||||
mask = int(string_network.split('/')[1])
|
||||
mask = int(string_network.split("/")[1])
|
||||
except ValueError:
|
||||
return False
|
||||
|
||||
|
@ -705,8 +726,8 @@ def is_valid_cidr(string_network):
|
|||
return False
|
||||
|
||||
try:
|
||||
socket.inet_aton(string_network.split('/')[0])
|
||||
except socket.error:
|
||||
socket.inet_aton(string_network.split("/")[0])
|
||||
except OSError:
|
||||
return False
|
||||
else:
|
||||
return False
|
||||
|
@ -743,13 +764,14 @@ def should_bypass_proxies(url, no_proxy):
|
|||
"""
|
||||
# Prioritize lowercase environment variables over uppercase
|
||||
# to keep a consistent behaviour with other http projects (curl, wget).
|
||||
get_proxy = lambda k: os.environ.get(k) or os.environ.get(k.upper())
|
||||
def get_proxy(key):
|
||||
return os.environ.get(key) or os.environ.get(key.upper())
|
||||
|
||||
# First check whether no_proxy is defined. If it is, check that the URL
|
||||
# we're getting isn't in the no_proxy list.
|
||||
no_proxy_arg = no_proxy
|
||||
if no_proxy is None:
|
||||
no_proxy = get_proxy('no_proxy')
|
||||
no_proxy = get_proxy("no_proxy")
|
||||
parsed = urlparse(url)
|
||||
|
||||
if parsed.hostname is None:
|
||||
|
@ -759,9 +781,7 @@ def should_bypass_proxies(url, no_proxy):
|
|||
if no_proxy:
|
||||
# We need to check whether we match here. We need to see if we match
|
||||
# the end of the hostname, both with and without the port.
|
||||
no_proxy = (
|
||||
host for host in no_proxy.replace(' ', '').split(',') if host
|
||||
)
|
||||
no_proxy = (host for host in no_proxy.replace(" ", "").split(",") if host)
|
||||
|
||||
if is_ipv4_address(parsed.hostname):
|
||||
for proxy_ip in no_proxy:
|
||||
|
@ -775,7 +795,7 @@ def should_bypass_proxies(url, no_proxy):
|
|||
else:
|
||||
host_with_port = parsed.hostname
|
||||
if parsed.port:
|
||||
host_with_port += ':{}'.format(parsed.port)
|
||||
host_with_port += f":{parsed.port}"
|
||||
|
||||
for host in no_proxy:
|
||||
if parsed.hostname.endswith(host) or host_with_port.endswith(host):
|
||||
|
@ -783,7 +803,7 @@ def should_bypass_proxies(url, no_proxy):
|
|||
# to apply the proxies on this URL.
|
||||
return True
|
||||
|
||||
with set_environ('no_proxy', no_proxy_arg):
|
||||
with set_environ("no_proxy", no_proxy_arg):
|
||||
# parsed.hostname can be `None` in cases such as a file URI.
|
||||
try:
|
||||
bypass = proxy_bypass(parsed.hostname)
|
||||
|
@ -817,13 +837,13 @@ def select_proxy(url, proxies):
|
|||
proxies = proxies or {}
|
||||
urlparts = urlparse(url)
|
||||
if urlparts.hostname is None:
|
||||
return proxies.get(urlparts.scheme, proxies.get('all'))
|
||||
return proxies.get(urlparts.scheme, proxies.get("all"))
|
||||
|
||||
proxy_keys = [
|
||||
urlparts.scheme + '://' + urlparts.hostname,
|
||||
urlparts.scheme + "://" + urlparts.hostname,
|
||||
urlparts.scheme,
|
||||
'all://' + urlparts.hostname,
|
||||
'all',
|
||||
"all://" + urlparts.hostname,
|
||||
"all",
|
||||
]
|
||||
proxy = None
|
||||
for proxy_key in proxy_keys:
|
||||
|
@ -848,13 +868,13 @@ def resolve_proxies(request, proxies, trust_env=True):
|
|||
proxies = proxies if proxies is not None else {}
|
||||
url = request.url
|
||||
scheme = urlparse(url).scheme
|
||||
no_proxy = proxies.get('no_proxy')
|
||||
no_proxy = proxies.get("no_proxy")
|
||||
new_proxies = proxies.copy()
|
||||
|
||||
if trust_env and not should_bypass_proxies(url, no_proxy=no_proxy):
|
||||
environ_proxies = get_environ_proxies(url, no_proxy=no_proxy)
|
||||
|
||||
proxy = environ_proxies.get(scheme, environ_proxies.get('all'))
|
||||
proxy = environ_proxies.get(scheme, environ_proxies.get("all"))
|
||||
|
||||
if proxy:
|
||||
new_proxies.setdefault(scheme, proxy)
|
||||
|
@ -867,19 +887,21 @@ def default_user_agent(name="python-requests"):
|
|||
|
||||
:rtype: str
|
||||
"""
|
||||
return '%s/%s' % (name, __version__)
|
||||
return f"{name}/{__version__}"
|
||||
|
||||
|
||||
def default_headers():
|
||||
"""
|
||||
:rtype: requests.structures.CaseInsensitiveDict
|
||||
"""
|
||||
return CaseInsensitiveDict({
|
||||
'User-Agent': default_user_agent(),
|
||||
'Accept-Encoding': DEFAULT_ACCEPT_ENCODING,
|
||||
'Accept': '*/*',
|
||||
'Connection': 'keep-alive',
|
||||
})
|
||||
return CaseInsensitiveDict(
|
||||
{
|
||||
"User-Agent": default_user_agent(),
|
||||
"Accept-Encoding": DEFAULT_ACCEPT_ENCODING,
|
||||
"Accept": "*/*",
|
||||
"Connection": "keep-alive",
|
||||
}
|
||||
)
|
||||
|
||||
|
||||
def parse_header_links(value):
|
||||
|
@ -892,23 +914,23 @@ def parse_header_links(value):
|
|||
|
||||
links = []
|
||||
|
||||
replace_chars = ' \'"'
|
||||
replace_chars = " '\""
|
||||
|
||||
value = value.strip(replace_chars)
|
||||
if not value:
|
||||
return links
|
||||
|
||||
for val in re.split(', *<', value):
|
||||
for val in re.split(", *<", value):
|
||||
try:
|
||||
url, params = val.split(';', 1)
|
||||
url, params = val.split(";", 1)
|
||||
except ValueError:
|
||||
url, params = val, ''
|
||||
url, params = val, ""
|
||||
|
||||
link = {'url': url.strip('<> \'"')}
|
||||
link = {"url": url.strip("<> '\"")}
|
||||
|
||||
for param in params.split(';'):
|
||||
for param in params.split(";"):
|
||||
try:
|
||||
key, value = param.split('=')
|
||||
key, value = param.split("=")
|
||||
except ValueError:
|
||||
break
|
||||
|
||||
|
@ -920,7 +942,7 @@ def parse_header_links(value):
|
|||
|
||||
|
||||
# Null bytes; no need to recreate these on each call to guess_json_utf
|
||||
_null = '\x00'.encode('ascii') # encoding to ASCII for Python 3
|
||||
_null = "\x00".encode("ascii") # encoding to ASCII for Python 3
|
||||
_null2 = _null * 2
|
||||
_null3 = _null * 3
|
||||
|
||||
|
@ -934,25 +956,25 @@ def guess_json_utf(data):
|
|||
# determine the encoding. Also detect a BOM, if present.
|
||||
sample = data[:4]
|
||||
if sample in (codecs.BOM_UTF32_LE, codecs.BOM_UTF32_BE):
|
||||
return 'utf-32' # BOM included
|
||||
return "utf-32" # BOM included
|
||||
if sample[:3] == codecs.BOM_UTF8:
|
||||
return 'utf-8-sig' # BOM included, MS style (discouraged)
|
||||
return "utf-8-sig" # BOM included, MS style (discouraged)
|
||||
if sample[:2] in (codecs.BOM_UTF16_LE, codecs.BOM_UTF16_BE):
|
||||
return 'utf-16' # BOM included
|
||||
return "utf-16" # BOM included
|
||||
nullcount = sample.count(_null)
|
||||
if nullcount == 0:
|
||||
return 'utf-8'
|
||||
return "utf-8"
|
||||
if nullcount == 2:
|
||||
if sample[::2] == _null2: # 1st and 3rd are null
|
||||
return 'utf-16-be'
|
||||
if sample[::2] == _null2: # 1st and 3rd are null
|
||||
return "utf-16-be"
|
||||
if sample[1::2] == _null2: # 2nd and 4th are null
|
||||
return 'utf-16-le'
|
||||
return "utf-16-le"
|
||||
# Did not detect 2 valid UTF-16 ascii-range characters
|
||||
if nullcount == 3:
|
||||
if sample[:3] == _null3:
|
||||
return 'utf-32-be'
|
||||
return "utf-32-be"
|
||||
if sample[1:] == _null3:
|
||||
return 'utf-32-le'
|
||||
return "utf-32-le"
|
||||
# Did not detect a valid UTF-32 ascii-range character
|
||||
return None
|
||||
|
||||
|
@ -977,13 +999,13 @@ def prepend_scheme_if_needed(url, new_scheme):
|
|||
if auth:
|
||||
# parse_url doesn't provide the netloc with auth
|
||||
# so we'll add it ourselves.
|
||||
netloc = '@'.join([auth, netloc])
|
||||
netloc = "@".join([auth, netloc])
|
||||
if scheme is None:
|
||||
scheme = new_scheme
|
||||
if path is None:
|
||||
path = ''
|
||||
path = ""
|
||||
|
||||
return urlunparse((scheme, netloc, path, '', query, fragment))
|
||||
return urlunparse((scheme, netloc, path, "", query, fragment))
|
||||
|
||||
|
||||
def get_auth_from_url(url):
|
||||
|
@ -997,35 +1019,36 @@ def get_auth_from_url(url):
|
|||
try:
|
||||
auth = (unquote(parsed.username), unquote(parsed.password))
|
||||
except (AttributeError, TypeError):
|
||||
auth = ('', '')
|
||||
auth = ("", "")
|
||||
|
||||
return auth
|
||||
|
||||
|
||||
# Moved outside of function to avoid recompile every call
|
||||
_CLEAN_HEADER_REGEX_BYTE = re.compile(b'^\\S[^\\r\\n]*$|^$')
|
||||
_CLEAN_HEADER_REGEX_STR = re.compile(r'^\S[^\r\n]*$|^$')
|
||||
|
||||
|
||||
def check_header_validity(header):
|
||||
"""Verifies that header value is a string which doesn't contain
|
||||
leading whitespace or return characters. This prevents unintended
|
||||
header injection.
|
||||
"""Verifies that header parts don't contain leading whitespace
|
||||
reserved characters, or return characters.
|
||||
|
||||
:param header: tuple, in the format (name, value).
|
||||
"""
|
||||
name, value = header
|
||||
|
||||
if isinstance(value, bytes):
|
||||
pat = _CLEAN_HEADER_REGEX_BYTE
|
||||
else:
|
||||
pat = _CLEAN_HEADER_REGEX_STR
|
||||
try:
|
||||
if not pat.match(value):
|
||||
raise InvalidHeader("Invalid return character or leading space in header: %s" % name)
|
||||
except TypeError:
|
||||
raise InvalidHeader("Value for header {%s: %s} must be of type str or "
|
||||
"bytes, not %s" % (name, value, type(value)))
|
||||
for part in header:
|
||||
if type(part) not in HEADER_VALIDATORS:
|
||||
raise InvalidHeader(
|
||||
f"Header part ({part!r}) from {{{name!r}: {value!r}}} must be "
|
||||
f"of type str or bytes, not {type(part)}"
|
||||
)
|
||||
|
||||
_validate_header_part(name, "name", HEADER_VALIDATORS[type(name)][0])
|
||||
_validate_header_part(value, "value", HEADER_VALIDATORS[type(value)][1])
|
||||
|
||||
|
||||
def _validate_header_part(header_part, header_kind, validator):
|
||||
if not validator.match(header_part):
|
||||
raise InvalidHeader(
|
||||
f"Invalid leading whitespace, reserved character(s), or return"
|
||||
f"character(s) in header {header_kind}: {header_part!r}"
|
||||
)
|
||||
|
||||
|
||||
def urldefragauth(url):
|
||||
|
@ -1040,21 +1063,24 @@ def urldefragauth(url):
|
|||
if not netloc:
|
||||
netloc, path = path, netloc
|
||||
|
||||
netloc = netloc.rsplit('@', 1)[-1]
|
||||
netloc = netloc.rsplit("@", 1)[-1]
|
||||
|
||||
return urlunparse((scheme, netloc, path, params, query, ''))
|
||||
return urlunparse((scheme, netloc, path, params, query, ""))
|
||||
|
||||
|
||||
def rewind_body(prepared_request):
|
||||
"""Move file pointer back to its recorded starting position
|
||||
so it can be read again on redirect.
|
||||
"""
|
||||
body_seek = getattr(prepared_request.body, 'seek', None)
|
||||
if body_seek is not None and isinstance(prepared_request._body_position, integer_types):
|
||||
body_seek = getattr(prepared_request.body, "seek", None)
|
||||
if body_seek is not None and isinstance(
|
||||
prepared_request._body_position, integer_types
|
||||
):
|
||||
try:
|
||||
body_seek(prepared_request._body_position)
|
||||
except (IOError, OSError):
|
||||
raise UnrewindableBodyError("An error occurred when rewinding request "
|
||||
"body for redirect.")
|
||||
except OSError:
|
||||
raise UnrewindableBodyError(
|
||||
"An error occurred when rewinding request body for redirect."
|
||||
)
|
||||
else:
|
||||
raise UnrewindableBodyError("Unable to rewind request body for redirect.")
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue