Update requests==2.28.1

This commit is contained in:
JonnyWong16 2022-11-12 17:08:52 -08:00
commit 36ef41083b
No known key found for this signature in database
GPG key ID: B1F1F9807184697A
18 changed files with 1190 additions and 939 deletions

View file

@ -1,5 +1,3 @@
# -*- coding: utf-8 -*-
# __ # __
# /__) _ _ _ _ _/ _ # /__) _ _ _ _ _/ _
# / ( (- (/ (/ (- _) / _) # / ( (- (/ (/ (- _) / _)
@ -40,8 +38,10 @@ is at <https://requests.readthedocs.io>.
:license: Apache 2.0, see LICENSE for more details. :license: Apache 2.0, see LICENSE for more details.
""" """
import urllib3
import warnings import warnings
import urllib3
from .exceptions import RequestsDependencyWarning from .exceptions import RequestsDependencyWarning
try: try:
@ -54,13 +54,14 @@ try:
except ImportError: except ImportError:
chardet_version = None chardet_version = None
def check_compatibility(urllib3_version, chardet_version, charset_normalizer_version): def check_compatibility(urllib3_version, chardet_version, charset_normalizer_version):
urllib3_version = urllib3_version.split('.') urllib3_version = urllib3_version.split(".")
assert urllib3_version != ['dev'] # Verify urllib3 isn't installed from git. assert urllib3_version != ["dev"] # Verify urllib3 isn't installed from git.
# Sometimes, urllib3 only reports its version as 16.1. # Sometimes, urllib3 only reports its version as 16.1.
if len(urllib3_version) == 2: if len(urllib3_version) == 2:
urllib3_version.append('0') urllib3_version.append("0")
# Check urllib3 for compatibility. # Check urllib3 for compatibility.
major, minor, patch = urllib3_version # noqa: F811 major, minor, patch = urllib3_version # noqa: F811
@ -72,36 +73,46 @@ def check_compatibility(urllib3_version, chardet_version, charset_normalizer_ver
# Check charset_normalizer for compatibility. # Check charset_normalizer for compatibility.
if chardet_version: if chardet_version:
major, minor, patch = chardet_version.split('.')[:3] major, minor, patch = chardet_version.split(".")[:3]
major, minor, patch = int(major), int(minor), int(patch) major, minor, patch = int(major), int(minor), int(patch)
# chardet_version >= 3.0.2, < 5.0.0 # chardet_version >= 3.0.2, < 6.0.0
assert (3, 0, 2) <= (major, minor, patch) < (5, 0, 0) assert (3, 0, 2) <= (major, minor, patch) < (6, 0, 0)
elif charset_normalizer_version: elif charset_normalizer_version:
major, minor, patch = charset_normalizer_version.split('.')[:3] major, minor, patch = charset_normalizer_version.split(".")[:3]
major, minor, patch = int(major), int(minor), int(patch) major, minor, patch = int(major), int(minor), int(patch)
# charset_normalizer >= 2.0.0 < 3.0.0 # charset_normalizer >= 2.0.0 < 3.0.0
assert (2, 0, 0) <= (major, minor, patch) < (3, 0, 0) assert (2, 0, 0) <= (major, minor, patch) < (3, 0, 0)
else: else:
raise Exception("You need either charset_normalizer or chardet installed") raise Exception("You need either charset_normalizer or chardet installed")
def _check_cryptography(cryptography_version): def _check_cryptography(cryptography_version):
# cryptography < 1.3.4 # cryptography < 1.3.4
try: try:
cryptography_version = list(map(int, cryptography_version.split('.'))) cryptography_version = list(map(int, cryptography_version.split(".")))
except ValueError: except ValueError:
return return
if cryptography_version < [1, 3, 4]: if cryptography_version < [1, 3, 4]:
warning = 'Old version of cryptography ({}) may cause slowdown.'.format(cryptography_version) warning = "Old version of cryptography ({}) may cause slowdown.".format(
cryptography_version
)
warnings.warn(warning, RequestsDependencyWarning) warnings.warn(warning, RequestsDependencyWarning)
# Check imported dependencies for compatibility. # Check imported dependencies for compatibility.
try: try:
check_compatibility(urllib3.__version__, chardet_version, charset_normalizer_version) check_compatibility(
urllib3.__version__, chardet_version, charset_normalizer_version
)
except (AssertionError, ValueError): except (AssertionError, ValueError):
warnings.warn("urllib3 ({}) or chardet ({})/charset_normalizer ({}) doesn't match a supported " warnings.warn(
"version!".format(urllib3.__version__, chardet_version, charset_normalizer_version), "urllib3 ({}) or chardet ({})/charset_normalizer ({}) doesn't match a supported "
RequestsDependencyWarning) "version!".format(
urllib3.__version__, chardet_version, charset_normalizer_version
),
RequestsDependencyWarning,
)
# Attempt to enable urllib3's fallback for SNI support # Attempt to enable urllib3's fallback for SNI support
# if the standard library doesn't support SNI or the # if the standard library doesn't support SNI or the
@ -114,39 +125,56 @@ try:
if not getattr(ssl, "HAS_SNI", False): if not getattr(ssl, "HAS_SNI", False):
from urllib3.contrib import pyopenssl from urllib3.contrib import pyopenssl
pyopenssl.inject_into_urllib3() pyopenssl.inject_into_urllib3()
# Check cryptography version # Check cryptography version
from cryptography import __version__ as cryptography_version from cryptography import __version__ as cryptography_version
_check_cryptography(cryptography_version) _check_cryptography(cryptography_version)
except ImportError: except ImportError:
pass pass
# urllib3's DependencyWarnings should be silenced. # urllib3's DependencyWarnings should be silenced.
from urllib3.exceptions import DependencyWarning from urllib3.exceptions import DependencyWarning
warnings.simplefilter('ignore', DependencyWarning)
from .__version__ import __title__, __description__, __url__, __version__ warnings.simplefilter("ignore", DependencyWarning)
from .__version__ import __build__, __author__, __author_email__, __license__
from .__version__ import __copyright__, __cake__
from . import utils
from . import packages
from .models import Request, Response, PreparedRequest
from .api import request, get, head, post, patch, put, delete, options
from .sessions import session, Session
from .status_codes import codes
from .exceptions import (
RequestException, Timeout, URLRequired,
TooManyRedirects, HTTPError, ConnectionError,
FileModeWarning, ConnectTimeout, ReadTimeout, JSONDecodeError
)
# Set default logging handler to avoid "No handler found" warnings. # Set default logging handler to avoid "No handler found" warnings.
import logging import logging
from logging import NullHandler from logging import NullHandler
from . import packages, utils
from .__version__ import (
__author__,
__author_email__,
__build__,
__cake__,
__copyright__,
__description__,
__license__,
__title__,
__url__,
__version__,
)
from .api import delete, get, head, options, patch, post, put, request
from .exceptions import (
ConnectionError,
ConnectTimeout,
FileModeWarning,
HTTPError,
JSONDecodeError,
ReadTimeout,
RequestException,
Timeout,
TooManyRedirects,
URLRequired,
)
from .models import PreparedRequest, Request, Response
from .sessions import Session, session
from .status_codes import codes
logging.getLogger(__name__).addHandler(NullHandler()) logging.getLogger(__name__).addHandler(NullHandler())
# FileModeWarnings go off per the default. # FileModeWarnings go off per the default.
warnings.simplefilter('default', FileModeWarning, append=True) warnings.simplefilter("default", FileModeWarning, append=True)

View file

@ -2,13 +2,13 @@
# |( |- |.| | | |- `-. | `-. # |( |- |.| | | |- `-. | `-.
# ' ' `-' `-`.`-' `-' `-' ' `-' # ' ' `-' `-`.`-' `-' `-' ' `-'
__title__ = 'requests' __title__ = "requests"
__description__ = 'Python HTTP for Humans.' __description__ = "Python HTTP for Humans."
__url__ = 'https://requests.readthedocs.io' __url__ = "https://requests.readthedocs.io"
__version__ = '2.27.1' __version__ = "2.28.1"
__build__ = 0x022701 __build__ = 0x022801
__author__ = 'Kenneth Reitz' __author__ = "Kenneth Reitz"
__author_email__ = 'me@kennethreitz.org' __author_email__ = "me@kennethreitz.org"
__license__ = 'Apache 2.0' __license__ = "Apache 2.0"
__copyright__ = 'Copyright 2022 Kenneth Reitz' __copyright__ = "Copyright 2022 Kenneth Reitz"
__cake__ = u'\u2728 \U0001f370 \u2728' __cake__ = "\u2728 \U0001f370 \u2728"

View file

@ -1,5 +1,3 @@
# -*- coding: utf-8 -*-
""" """
requests._internal_utils requests._internal_utils
~~~~~~~~~~~~~~ ~~~~~~~~~~~~~~
@ -7,11 +5,22 @@ requests._internal_utils
Provides utility functions that are consumed internally by Requests Provides utility functions that are consumed internally by Requests
which depend on extremely few external helpers (such as compat) which depend on extremely few external helpers (such as compat)
""" """
import re
from .compat import is_py2, builtin_str, str from .compat import builtin_str
_VALID_HEADER_NAME_RE_BYTE = re.compile(rb"^[^:\s][^:\r\n]*$")
_VALID_HEADER_NAME_RE_STR = re.compile(r"^[^:\s][^:\r\n]*$")
_VALID_HEADER_VALUE_RE_BYTE = re.compile(rb"^\S[^\r\n]*$|^$")
_VALID_HEADER_VALUE_RE_STR = re.compile(r"^\S[^\r\n]*$|^$")
HEADER_VALIDATORS = {
bytes: (_VALID_HEADER_NAME_RE_BYTE, _VALID_HEADER_VALUE_RE_BYTE),
str: (_VALID_HEADER_NAME_RE_STR, _VALID_HEADER_VALUE_RE_STR),
}
def to_native_string(string, encoding='ascii'): def to_native_string(string, encoding="ascii"):
"""Given a string object, regardless of type, returns a representation of """Given a string object, regardless of type, returns a representation of
that string in the native string type, encoding and decoding where that string in the native string type, encoding and decoding where
necessary. This assumes ASCII unless told otherwise. necessary. This assumes ASCII unless told otherwise.
@ -19,10 +28,7 @@ def to_native_string(string, encoding='ascii'):
if isinstance(string, builtin_str): if isinstance(string, builtin_str):
out = string out = string
else: else:
if is_py2: out = string.decode(encoding)
out = string.encode(encoding)
else:
out = string.decode(encoding)
return out return out
@ -36,7 +42,7 @@ def unicode_is_ascii(u_string):
""" """
assert isinstance(u_string, str) assert isinstance(u_string, str)
try: try:
u_string.encode('ascii') u_string.encode("ascii")
return True return True
except UnicodeEncodeError: except UnicodeEncodeError:
return False return False

View file

@ -1,5 +1,3 @@
# -*- coding: utf-8 -*-
""" """
requests.adapters requests.adapters
~~~~~~~~~~~~~~~~~ ~~~~~~~~~~~~~~~~~
@ -9,58 +7,76 @@ and maintain connections.
""" """
import os.path import os.path
import socket import socket # noqa: F401
from urllib3.poolmanager import PoolManager, proxy_from_url from urllib3.exceptions import ClosedPoolError, ConnectTimeoutError
from urllib3.response import HTTPResponse
from urllib3.util import parse_url
from urllib3.util import Timeout as TimeoutSauce
from urllib3.util.retry import Retry
from urllib3.exceptions import ClosedPoolError
from urllib3.exceptions import ConnectTimeoutError
from urllib3.exceptions import HTTPError as _HTTPError from urllib3.exceptions import HTTPError as _HTTPError
from urllib3.exceptions import InvalidHeader as _InvalidHeader from urllib3.exceptions import InvalidHeader as _InvalidHeader
from urllib3.exceptions import MaxRetryError from urllib3.exceptions import (
from urllib3.exceptions import NewConnectionError LocationValueError,
MaxRetryError,
NewConnectionError,
ProtocolError,
)
from urllib3.exceptions import ProxyError as _ProxyError from urllib3.exceptions import ProxyError as _ProxyError
from urllib3.exceptions import ProtocolError from urllib3.exceptions import ReadTimeoutError, ResponseError
from urllib3.exceptions import ReadTimeoutError
from urllib3.exceptions import SSLError as _SSLError from urllib3.exceptions import SSLError as _SSLError
from urllib3.exceptions import ResponseError from urllib3.poolmanager import PoolManager, proxy_from_url
from urllib3.exceptions import LocationValueError from urllib3.response import HTTPResponse
from urllib3.util import Timeout as TimeoutSauce
from urllib3.util import parse_url
from urllib3.util.retry import Retry
from .models import Response
from .compat import urlparse, basestring
from .utils import (DEFAULT_CA_BUNDLE_PATH, extract_zipped_paths,
get_encoding_from_headers, prepend_scheme_if_needed,
get_auth_from_url, urldefragauth, select_proxy)
from .structures import CaseInsensitiveDict
from .cookies import extract_cookies_to_jar
from .exceptions import (ConnectionError, ConnectTimeout, ReadTimeout, SSLError,
ProxyError, RetryError, InvalidSchema, InvalidProxyURL,
InvalidURL, InvalidHeader)
from .auth import _basic_auth_str from .auth import _basic_auth_str
from .compat import basestring, urlparse
from .cookies import extract_cookies_to_jar
from .exceptions import (
ConnectionError,
ConnectTimeout,
InvalidHeader,
InvalidProxyURL,
InvalidSchema,
InvalidURL,
ProxyError,
ReadTimeout,
RetryError,
SSLError,
)
from .models import Response
from .structures import CaseInsensitiveDict
from .utils import (
DEFAULT_CA_BUNDLE_PATH,
extract_zipped_paths,
get_auth_from_url,
get_encoding_from_headers,
prepend_scheme_if_needed,
select_proxy,
urldefragauth,
)
try: try:
from urllib3.contrib.socks import SOCKSProxyManager from urllib3.contrib.socks import SOCKSProxyManager
except ImportError: except ImportError:
def SOCKSProxyManager(*args, **kwargs): def SOCKSProxyManager(*args, **kwargs):
raise InvalidSchema("Missing dependencies for SOCKS support.") raise InvalidSchema("Missing dependencies for SOCKS support.")
DEFAULT_POOLBLOCK = False DEFAULT_POOLBLOCK = False
DEFAULT_POOLSIZE = 10 DEFAULT_POOLSIZE = 10
DEFAULT_RETRIES = 0 DEFAULT_RETRIES = 0
DEFAULT_POOL_TIMEOUT = None DEFAULT_POOL_TIMEOUT = None
class BaseAdapter(object): class BaseAdapter:
"""The Base Transport Adapter""" """The Base Transport Adapter"""
def __init__(self): def __init__(self):
super(BaseAdapter, self).__init__() super().__init__()
def send(self, request, stream=False, timeout=None, verify=True, def send(
cert=None, proxies=None): self, request, stream=False, timeout=None, verify=True, cert=None, proxies=None
):
"""Sends PreparedRequest object. Returns Response object. """Sends PreparedRequest object. Returns Response object.
:param request: The :class:`PreparedRequest <PreparedRequest>` being sent. :param request: The :class:`PreparedRequest <PreparedRequest>` being sent.
@ -108,12 +124,22 @@ class HTTPAdapter(BaseAdapter):
>>> a = requests.adapters.HTTPAdapter(max_retries=3) >>> a = requests.adapters.HTTPAdapter(max_retries=3)
>>> s.mount('http://', a) >>> s.mount('http://', a)
""" """
__attrs__ = ['max_retries', 'config', '_pool_connections', '_pool_maxsize',
'_pool_block']
def __init__(self, pool_connections=DEFAULT_POOLSIZE, __attrs__ = [
pool_maxsize=DEFAULT_POOLSIZE, max_retries=DEFAULT_RETRIES, "max_retries",
pool_block=DEFAULT_POOLBLOCK): "config",
"_pool_connections",
"_pool_maxsize",
"_pool_block",
]
def __init__(
self,
pool_connections=DEFAULT_POOLSIZE,
pool_maxsize=DEFAULT_POOLSIZE,
max_retries=DEFAULT_RETRIES,
pool_block=DEFAULT_POOLBLOCK,
):
if max_retries == DEFAULT_RETRIES: if max_retries == DEFAULT_RETRIES:
self.max_retries = Retry(0, read=False) self.max_retries = Retry(0, read=False)
else: else:
@ -121,7 +147,7 @@ class HTTPAdapter(BaseAdapter):
self.config = {} self.config = {}
self.proxy_manager = {} self.proxy_manager = {}
super(HTTPAdapter, self).__init__() super().__init__()
self._pool_connections = pool_connections self._pool_connections = pool_connections
self._pool_maxsize = pool_maxsize self._pool_maxsize = pool_maxsize
@ -141,10 +167,13 @@ class HTTPAdapter(BaseAdapter):
for attr, value in state.items(): for attr, value in state.items():
setattr(self, attr, value) setattr(self, attr, value)
self.init_poolmanager(self._pool_connections, self._pool_maxsize, self.init_poolmanager(
block=self._pool_block) self._pool_connections, self._pool_maxsize, block=self._pool_block
)
def init_poolmanager(self, connections, maxsize, block=DEFAULT_POOLBLOCK, **pool_kwargs): def init_poolmanager(
self, connections, maxsize, block=DEFAULT_POOLBLOCK, **pool_kwargs
):
"""Initializes a urllib3 PoolManager. """Initializes a urllib3 PoolManager.
This method should not be called from user code, and is only This method should not be called from user code, and is only
@ -161,8 +190,13 @@ class HTTPAdapter(BaseAdapter):
self._pool_maxsize = maxsize self._pool_maxsize = maxsize
self._pool_block = block self._pool_block = block
self.poolmanager = PoolManager(num_pools=connections, maxsize=maxsize, self.poolmanager = PoolManager(
block=block, strict=True, **pool_kwargs) num_pools=connections,
maxsize=maxsize,
block=block,
strict=True,
**pool_kwargs,
)
def proxy_manager_for(self, proxy, **proxy_kwargs): def proxy_manager_for(self, proxy, **proxy_kwargs):
"""Return urllib3 ProxyManager for the given proxy. """Return urllib3 ProxyManager for the given proxy.
@ -178,7 +212,7 @@ class HTTPAdapter(BaseAdapter):
""" """
if proxy in self.proxy_manager: if proxy in self.proxy_manager:
manager = self.proxy_manager[proxy] manager = self.proxy_manager[proxy]
elif proxy.lower().startswith('socks'): elif proxy.lower().startswith("socks"):
username, password = get_auth_from_url(proxy) username, password = get_auth_from_url(proxy)
manager = self.proxy_manager[proxy] = SOCKSProxyManager( manager = self.proxy_manager[proxy] = SOCKSProxyManager(
proxy, proxy,
@ -187,7 +221,7 @@ class HTTPAdapter(BaseAdapter):
num_pools=self._pool_connections, num_pools=self._pool_connections,
maxsize=self._pool_maxsize, maxsize=self._pool_maxsize,
block=self._pool_block, block=self._pool_block,
**proxy_kwargs **proxy_kwargs,
) )
else: else:
proxy_headers = self.proxy_headers(proxy) proxy_headers = self.proxy_headers(proxy)
@ -197,7 +231,8 @@ class HTTPAdapter(BaseAdapter):
num_pools=self._pool_connections, num_pools=self._pool_connections,
maxsize=self._pool_maxsize, maxsize=self._pool_maxsize,
block=self._pool_block, block=self._pool_block,
**proxy_kwargs) **proxy_kwargs,
)
return manager return manager
@ -213,7 +248,7 @@ class HTTPAdapter(BaseAdapter):
to a CA bundle to use to a CA bundle to use
:param cert: The SSL certificate to verify. :param cert: The SSL certificate to verify.
""" """
if url.lower().startswith('https') and verify: if url.lower().startswith("https") and verify:
cert_loc = None cert_loc = None
@ -225,17 +260,19 @@ class HTTPAdapter(BaseAdapter):
cert_loc = extract_zipped_paths(DEFAULT_CA_BUNDLE_PATH) cert_loc = extract_zipped_paths(DEFAULT_CA_BUNDLE_PATH)
if not cert_loc or not os.path.exists(cert_loc): if not cert_loc or not os.path.exists(cert_loc):
raise IOError("Could not find a suitable TLS CA certificate bundle, " raise OSError(
"invalid path: {}".format(cert_loc)) f"Could not find a suitable TLS CA certificate bundle, "
f"invalid path: {cert_loc}"
)
conn.cert_reqs = 'CERT_REQUIRED' conn.cert_reqs = "CERT_REQUIRED"
if not os.path.isdir(cert_loc): if not os.path.isdir(cert_loc):
conn.ca_certs = cert_loc conn.ca_certs = cert_loc
else: else:
conn.ca_cert_dir = cert_loc conn.ca_cert_dir = cert_loc
else: else:
conn.cert_reqs = 'CERT_NONE' conn.cert_reqs = "CERT_NONE"
conn.ca_certs = None conn.ca_certs = None
conn.ca_cert_dir = None conn.ca_cert_dir = None
@ -247,11 +284,14 @@ class HTTPAdapter(BaseAdapter):
conn.cert_file = cert conn.cert_file = cert
conn.key_file = None conn.key_file = None
if conn.cert_file and not os.path.exists(conn.cert_file): if conn.cert_file and not os.path.exists(conn.cert_file):
raise IOError("Could not find the TLS certificate file, " raise OSError(
"invalid path: {}".format(conn.cert_file)) f"Could not find the TLS certificate file, "
f"invalid path: {conn.cert_file}"
)
if conn.key_file and not os.path.exists(conn.key_file): if conn.key_file and not os.path.exists(conn.key_file):
raise IOError("Could not find the TLS key file, " raise OSError(
"invalid path: {}".format(conn.key_file)) f"Could not find the TLS key file, invalid path: {conn.key_file}"
)
def build_response(self, req, resp): def build_response(self, req, resp):
"""Builds a :class:`Response <requests.Response>` object from a urllib3 """Builds a :class:`Response <requests.Response>` object from a urllib3
@ -266,10 +306,10 @@ class HTTPAdapter(BaseAdapter):
response = Response() response = Response()
# Fallback to None if there's no status_code, for whatever reason. # Fallback to None if there's no status_code, for whatever reason.
response.status_code = getattr(resp, 'status', None) response.status_code = getattr(resp, "status", None)
# Make headers case-insensitive. # Make headers case-insensitive.
response.headers = CaseInsensitiveDict(getattr(resp, 'headers', {})) response.headers = CaseInsensitiveDict(getattr(resp, "headers", {}))
# Set encoding. # Set encoding.
response.encoding = get_encoding_from_headers(response.headers) response.encoding = get_encoding_from_headers(response.headers)
@ -277,7 +317,7 @@ class HTTPAdapter(BaseAdapter):
response.reason = response.raw.reason response.reason = response.raw.reason
if isinstance(req.url, bytes): if isinstance(req.url, bytes):
response.url = req.url.decode('utf-8') response.url = req.url.decode("utf-8")
else: else:
response.url = req.url response.url = req.url
@ -302,11 +342,13 @@ class HTTPAdapter(BaseAdapter):
proxy = select_proxy(url, proxies) proxy = select_proxy(url, proxies)
if proxy: if proxy:
proxy = prepend_scheme_if_needed(proxy, 'http') proxy = prepend_scheme_if_needed(proxy, "http")
proxy_url = parse_url(proxy) proxy_url = parse_url(proxy)
if not proxy_url.host: if not proxy_url.host:
raise InvalidProxyURL("Please check proxy URL. It is malformed" raise InvalidProxyURL(
" and could be missing the host.") "Please check proxy URL. It is malformed "
"and could be missing the host."
)
proxy_manager = self.proxy_manager_for(proxy) proxy_manager = self.proxy_manager_for(proxy)
conn = proxy_manager.connection_from_url(url) conn = proxy_manager.connection_from_url(url)
else: else:
@ -344,11 +386,11 @@ class HTTPAdapter(BaseAdapter):
proxy = select_proxy(request.url, proxies) proxy = select_proxy(request.url, proxies)
scheme = urlparse(request.url).scheme scheme = urlparse(request.url).scheme
is_proxied_http_request = (proxy and scheme != 'https') is_proxied_http_request = proxy and scheme != "https"
using_socks_proxy = False using_socks_proxy = False
if proxy: if proxy:
proxy_scheme = urlparse(proxy).scheme.lower() proxy_scheme = urlparse(proxy).scheme.lower()
using_socks_proxy = proxy_scheme.startswith('socks') using_socks_proxy = proxy_scheme.startswith("socks")
url = request.path_url url = request.path_url
if is_proxied_http_request and not using_socks_proxy: if is_proxied_http_request and not using_socks_proxy:
@ -387,12 +429,13 @@ class HTTPAdapter(BaseAdapter):
username, password = get_auth_from_url(proxy) username, password = get_auth_from_url(proxy)
if username: if username:
headers['Proxy-Authorization'] = _basic_auth_str(username, headers["Proxy-Authorization"] = _basic_auth_str(username, password)
password)
return headers return headers
def send(self, request, stream=False, timeout=None, verify=True, cert=None, proxies=None): def send(
self, request, stream=False, timeout=None, verify=True, cert=None, proxies=None
):
"""Sends PreparedRequest object. Returns Response object. """Sends PreparedRequest object. Returns Response object.
:param request: The :class:`PreparedRequest <PreparedRequest>` being sent. :param request: The :class:`PreparedRequest <PreparedRequest>` being sent.
@ -416,20 +459,26 @@ class HTTPAdapter(BaseAdapter):
self.cert_verify(conn, request.url, verify, cert) self.cert_verify(conn, request.url, verify, cert)
url = self.request_url(request, proxies) url = self.request_url(request, proxies)
self.add_headers(request, stream=stream, timeout=timeout, verify=verify, cert=cert, proxies=proxies) self.add_headers(
request,
stream=stream,
timeout=timeout,
verify=verify,
cert=cert,
proxies=proxies,
)
chunked = not (request.body is None or 'Content-Length' in request.headers) chunked = not (request.body is None or "Content-Length" in request.headers)
if isinstance(timeout, tuple): if isinstance(timeout, tuple):
try: try:
connect, read = timeout connect, read = timeout
timeout = TimeoutSauce(connect=connect, read=read) timeout = TimeoutSauce(connect=connect, read=read)
except ValueError as e: except ValueError:
# this may raise a string formatting error. raise ValueError(
err = ("Invalid timeout {}. Pass a (connect, read) " f"Invalid timeout {timeout}. Pass a (connect, read) timeout tuple, "
"timeout tuple, or a single float to set " f"or a single float to set both timeouts to the same value."
"both timeouts to the same value".format(timeout)) )
raise ValueError(err)
elif isinstance(timeout, TimeoutSauce): elif isinstance(timeout, TimeoutSauce):
pass pass
else: else:
@ -447,22 +496,24 @@ class HTTPAdapter(BaseAdapter):
preload_content=False, preload_content=False,
decode_content=False, decode_content=False,
retries=self.max_retries, retries=self.max_retries,
timeout=timeout timeout=timeout,
) )
# Send the request. # Send the request.
else: else:
if hasattr(conn, 'proxy_pool'): if hasattr(conn, "proxy_pool"):
conn = conn.proxy_pool conn = conn.proxy_pool
low_conn = conn._get_conn(timeout=DEFAULT_POOL_TIMEOUT) low_conn = conn._get_conn(timeout=DEFAULT_POOL_TIMEOUT)
try: try:
skip_host = 'Host' in request.headers skip_host = "Host" in request.headers
low_conn.putrequest(request.method, low_conn.putrequest(
url, request.method,
skip_accept_encoding=True, url,
skip_host=skip_host) skip_accept_encoding=True,
skip_host=skip_host,
)
for header, value in request.headers.items(): for header, value in request.headers.items():
low_conn.putheader(header, value) low_conn.putheader(header, value)
@ -470,34 +521,29 @@ class HTTPAdapter(BaseAdapter):
low_conn.endheaders() low_conn.endheaders()
for i in request.body: for i in request.body:
low_conn.send(hex(len(i))[2:].encode('utf-8')) low_conn.send(hex(len(i))[2:].encode("utf-8"))
low_conn.send(b'\r\n') low_conn.send(b"\r\n")
low_conn.send(i) low_conn.send(i)
low_conn.send(b'\r\n') low_conn.send(b"\r\n")
low_conn.send(b'0\r\n\r\n') low_conn.send(b"0\r\n\r\n")
# Receive the response from the server # Receive the response from the server
try: r = low_conn.getresponse()
# For Python 2.7, use buffering of HTTP responses
r = low_conn.getresponse(buffering=True)
except TypeError:
# For compatibility with Python 3.3+
r = low_conn.getresponse()
resp = HTTPResponse.from_httplib( resp = HTTPResponse.from_httplib(
r, r,
pool=conn, pool=conn,
connection=low_conn, connection=low_conn,
preload_content=False, preload_content=False,
decode_content=False decode_content=False,
) )
except: except Exception:
# If we hit any problems here, clean up the connection. # If we hit any problems here, clean up the connection.
# Then, reraise so that we can handle the actual exception. # Then, raise so that we can handle the actual exception.
low_conn.close() low_conn.close()
raise raise
except (ProtocolError, socket.error) as err: except (ProtocolError, OSError) as err:
raise ConnectionError(err, request=request) raise ConnectionError(err, request=request)
except MaxRetryError as e: except MaxRetryError as e:

View file

@ -1,5 +1,3 @@
# -*- coding: utf-8 -*-
""" """
requests.api requests.api
~~~~~~~~~~~~ ~~~~~~~~~~~~
@ -72,7 +70,7 @@ def get(url, params=None, **kwargs):
:rtype: requests.Response :rtype: requests.Response
""" """
return request('get', url, params=params, **kwargs) return request("get", url, params=params, **kwargs)
def options(url, **kwargs): def options(url, **kwargs):
@ -84,7 +82,7 @@ def options(url, **kwargs):
:rtype: requests.Response :rtype: requests.Response
""" """
return request('options', url, **kwargs) return request("options", url, **kwargs)
def head(url, **kwargs): def head(url, **kwargs):
@ -98,8 +96,8 @@ def head(url, **kwargs):
:rtype: requests.Response :rtype: requests.Response
""" """
kwargs.setdefault('allow_redirects', False) kwargs.setdefault("allow_redirects", False)
return request('head', url, **kwargs) return request("head", url, **kwargs)
def post(url, data=None, json=None, **kwargs): def post(url, data=None, json=None, **kwargs):
@ -114,7 +112,7 @@ def post(url, data=None, json=None, **kwargs):
:rtype: requests.Response :rtype: requests.Response
""" """
return request('post', url, data=data, json=json, **kwargs) return request("post", url, data=data, json=json, **kwargs)
def put(url, data=None, **kwargs): def put(url, data=None, **kwargs):
@ -129,7 +127,7 @@ def put(url, data=None, **kwargs):
:rtype: requests.Response :rtype: requests.Response
""" """
return request('put', url, data=data, **kwargs) return request("put", url, data=data, **kwargs)
def patch(url, data=None, **kwargs): def patch(url, data=None, **kwargs):
@ -144,7 +142,7 @@ def patch(url, data=None, **kwargs):
:rtype: requests.Response :rtype: requests.Response
""" """
return request('patch', url, data=data, **kwargs) return request("patch", url, data=data, **kwargs)
def delete(url, **kwargs): def delete(url, **kwargs):
@ -156,4 +154,4 @@ def delete(url, **kwargs):
:rtype: requests.Response :rtype: requests.Response
""" """
return request('delete', url, **kwargs) return request("delete", url, **kwargs)

View file

@ -1,5 +1,3 @@
# -*- coding: utf-8 -*-
""" """
requests.auth requests.auth
~~~~~~~~~~~~~ ~~~~~~~~~~~~~
@ -7,22 +5,21 @@ requests.auth
This module contains the authentication handlers for Requests. This module contains the authentication handlers for Requests.
""" """
import hashlib
import os import os
import re import re
import time
import hashlib
import threading import threading
import time
import warnings import warnings
from base64 import b64encode from base64 import b64encode
from .compat import urlparse, str, basestring
from .cookies import extract_cookies_to_jar
from ._internal_utils import to_native_string from ._internal_utils import to_native_string
from .compat import basestring, str, urlparse
from .cookies import extract_cookies_to_jar
from .utils import parse_dict_header from .utils import parse_dict_header
CONTENT_TYPE_FORM_URLENCODED = 'application/x-www-form-urlencoded' CONTENT_TYPE_FORM_URLENCODED = "application/x-www-form-urlencoded"
CONTENT_TYPE_MULTI_PART = 'multipart/form-data' CONTENT_TYPE_MULTI_PART = "multipart/form-data"
def _basic_auth_str(username, password): def _basic_auth_str(username, password):
@ -57,23 +54,23 @@ def _basic_auth_str(username, password):
# -- End Removal -- # -- End Removal --
if isinstance(username, str): if isinstance(username, str):
username = username.encode('latin1') username = username.encode("latin1")
if isinstance(password, str): if isinstance(password, str):
password = password.encode('latin1') password = password.encode("latin1")
authstr = 'Basic ' + to_native_string( authstr = "Basic " + to_native_string(
b64encode(b':'.join((username, password))).strip() b64encode(b":".join((username, password))).strip()
) )
return authstr return authstr
class AuthBase(object): class AuthBase:
"""Base class that all auth implementations derive from""" """Base class that all auth implementations derive from"""
def __call__(self, r): def __call__(self, r):
raise NotImplementedError('Auth hooks must be callable.') raise NotImplementedError("Auth hooks must be callable.")
class HTTPBasicAuth(AuthBase): class HTTPBasicAuth(AuthBase):
@ -84,16 +81,18 @@ class HTTPBasicAuth(AuthBase):
self.password = password self.password = password
def __eq__(self, other): def __eq__(self, other):
return all([ return all(
self.username == getattr(other, 'username', None), [
self.password == getattr(other, 'password', None) self.username == getattr(other, "username", None),
]) self.password == getattr(other, "password", None),
]
)
def __ne__(self, other): def __ne__(self, other):
return not self == other return not self == other
def __call__(self, r): def __call__(self, r):
r.headers['Authorization'] = _basic_auth_str(self.username, self.password) r.headers["Authorization"] = _basic_auth_str(self.username, self.password)
return r return r
@ -101,7 +100,7 @@ class HTTPProxyAuth(HTTPBasicAuth):
"""Attaches HTTP Proxy Authentication to a given Request object.""" """Attaches HTTP Proxy Authentication to a given Request object."""
def __call__(self, r): def __call__(self, r):
r.headers['Proxy-Authorization'] = _basic_auth_str(self.username, self.password) r.headers["Proxy-Authorization"] = _basic_auth_str(self.username, self.password)
return r return r
@ -116,9 +115,9 @@ class HTTPDigestAuth(AuthBase):
def init_per_thread_state(self): def init_per_thread_state(self):
# Ensure state is initialized just once per-thread # Ensure state is initialized just once per-thread
if not hasattr(self._thread_local, 'init'): if not hasattr(self._thread_local, "init"):
self._thread_local.init = True self._thread_local.init = True
self._thread_local.last_nonce = '' self._thread_local.last_nonce = ""
self._thread_local.nonce_count = 0 self._thread_local.nonce_count = 0
self._thread_local.chal = {} self._thread_local.chal = {}
self._thread_local.pos = None self._thread_local.pos = None
@ -129,44 +128,52 @@ class HTTPDigestAuth(AuthBase):
:rtype: str :rtype: str
""" """
realm = self._thread_local.chal['realm'] realm = self._thread_local.chal["realm"]
nonce = self._thread_local.chal['nonce'] nonce = self._thread_local.chal["nonce"]
qop = self._thread_local.chal.get('qop') qop = self._thread_local.chal.get("qop")
algorithm = self._thread_local.chal.get('algorithm') algorithm = self._thread_local.chal.get("algorithm")
opaque = self._thread_local.chal.get('opaque') opaque = self._thread_local.chal.get("opaque")
hash_utf8 = None hash_utf8 = None
if algorithm is None: if algorithm is None:
_algorithm = 'MD5' _algorithm = "MD5"
else: else:
_algorithm = algorithm.upper() _algorithm = algorithm.upper()
# lambdas assume digest modules are imported at the top level # lambdas assume digest modules are imported at the top level
if _algorithm == 'MD5' or _algorithm == 'MD5-SESS': if _algorithm == "MD5" or _algorithm == "MD5-SESS":
def md5_utf8(x): def md5_utf8(x):
if isinstance(x, str): if isinstance(x, str):
x = x.encode('utf-8') x = x.encode("utf-8")
return hashlib.md5(x).hexdigest() return hashlib.md5(x).hexdigest()
hash_utf8 = md5_utf8 hash_utf8 = md5_utf8
elif _algorithm == 'SHA': elif _algorithm == "SHA":
def sha_utf8(x): def sha_utf8(x):
if isinstance(x, str): if isinstance(x, str):
x = x.encode('utf-8') x = x.encode("utf-8")
return hashlib.sha1(x).hexdigest() return hashlib.sha1(x).hexdigest()
hash_utf8 = sha_utf8 hash_utf8 = sha_utf8
elif _algorithm == 'SHA-256': elif _algorithm == "SHA-256":
def sha256_utf8(x): def sha256_utf8(x):
if isinstance(x, str): if isinstance(x, str):
x = x.encode('utf-8') x = x.encode("utf-8")
return hashlib.sha256(x).hexdigest() return hashlib.sha256(x).hexdigest()
hash_utf8 = sha256_utf8 hash_utf8 = sha256_utf8
elif _algorithm == 'SHA-512': elif _algorithm == "SHA-512":
def sha512_utf8(x): def sha512_utf8(x):
if isinstance(x, str): if isinstance(x, str):
x = x.encode('utf-8') x = x.encode("utf-8")
return hashlib.sha512(x).hexdigest() return hashlib.sha512(x).hexdigest()
hash_utf8 = sha512_utf8 hash_utf8 = sha512_utf8
KD = lambda s, d: hash_utf8("%s:%s" % (s, d)) KD = lambda s, d: hash_utf8(f"{s}:{d}") # noqa:E731
if hash_utf8 is None: if hash_utf8 is None:
return None return None
@ -177,10 +184,10 @@ class HTTPDigestAuth(AuthBase):
#: path is request-uri defined in RFC 2616 which should not be empty #: path is request-uri defined in RFC 2616 which should not be empty
path = p_parsed.path or "/" path = p_parsed.path or "/"
if p_parsed.query: if p_parsed.query:
path += '?' + p_parsed.query path += f"?{p_parsed.query}"
A1 = '%s:%s:%s' % (self.username, realm, self.password) A1 = f"{self.username}:{realm}:{self.password}"
A2 = '%s:%s' % (method, path) A2 = f"{method}:{path}"
HA1 = hash_utf8(A1) HA1 = hash_utf8(A1)
HA2 = hash_utf8(A2) HA2 = hash_utf8(A2)
@ -189,22 +196,20 @@ class HTTPDigestAuth(AuthBase):
self._thread_local.nonce_count += 1 self._thread_local.nonce_count += 1
else: else:
self._thread_local.nonce_count = 1 self._thread_local.nonce_count = 1
ncvalue = '%08x' % self._thread_local.nonce_count ncvalue = f"{self._thread_local.nonce_count:08x}"
s = str(self._thread_local.nonce_count).encode('utf-8') s = str(self._thread_local.nonce_count).encode("utf-8")
s += nonce.encode('utf-8') s += nonce.encode("utf-8")
s += time.ctime().encode('utf-8') s += time.ctime().encode("utf-8")
s += os.urandom(8) s += os.urandom(8)
cnonce = (hashlib.sha1(s).hexdigest()[:16]) cnonce = hashlib.sha1(s).hexdigest()[:16]
if _algorithm == 'MD5-SESS': if _algorithm == "MD5-SESS":
HA1 = hash_utf8('%s:%s:%s' % (HA1, nonce, cnonce)) HA1 = hash_utf8(f"{HA1}:{nonce}:{cnonce}")
if not qop: if not qop:
respdig = KD(HA1, "%s:%s" % (nonce, HA2)) respdig = KD(HA1, f"{nonce}:{HA2}")
elif qop == 'auth' or 'auth' in qop.split(','): elif qop == "auth" or "auth" in qop.split(","):
noncebit = "%s:%s:%s:%s:%s" % ( noncebit = f"{nonce}:{ncvalue}:{cnonce}:auth:{HA2}"
nonce, ncvalue, cnonce, 'auth', HA2
)
respdig = KD(HA1, noncebit) respdig = KD(HA1, noncebit)
else: else:
# XXX handle auth-int. # XXX handle auth-int.
@ -213,18 +218,20 @@ class HTTPDigestAuth(AuthBase):
self._thread_local.last_nonce = nonce self._thread_local.last_nonce = nonce
# XXX should the partial digests be encoded too? # XXX should the partial digests be encoded too?
base = 'username="%s", realm="%s", nonce="%s", uri="%s", ' \ base = (
'response="%s"' % (self.username, realm, nonce, path, respdig) f'username="{self.username}", realm="{realm}", nonce="{nonce}", '
f'uri="{path}", response="{respdig}"'
)
if opaque: if opaque:
base += ', opaque="%s"' % opaque base += f', opaque="{opaque}"'
if algorithm: if algorithm:
base += ', algorithm="%s"' % algorithm base += f', algorithm="{algorithm}"'
if entdig: if entdig:
base += ', digest="%s"' % entdig base += f', digest="{entdig}"'
if qop: if qop:
base += ', qop="auth", nc=%s, cnonce="%s"' % (ncvalue, cnonce) base += f', qop="auth", nc={ncvalue}, cnonce="{cnonce}"'
return 'Digest %s' % (base) return f"Digest {base}"
def handle_redirect(self, r, **kwargs): def handle_redirect(self, r, **kwargs):
"""Reset num_401_calls counter on redirects.""" """Reset num_401_calls counter on redirects."""
@ -248,13 +255,13 @@ class HTTPDigestAuth(AuthBase):
# Rewind the file position indicator of the body to where # Rewind the file position indicator of the body to where
# it was to resend the request. # it was to resend the request.
r.request.body.seek(self._thread_local.pos) r.request.body.seek(self._thread_local.pos)
s_auth = r.headers.get('www-authenticate', '') s_auth = r.headers.get("www-authenticate", "")
if 'digest' in s_auth.lower() and self._thread_local.num_401_calls < 2: if "digest" in s_auth.lower() and self._thread_local.num_401_calls < 2:
self._thread_local.num_401_calls += 1 self._thread_local.num_401_calls += 1
pat = re.compile(r'digest ', flags=re.IGNORECASE) pat = re.compile(r"digest ", flags=re.IGNORECASE)
self._thread_local.chal = parse_dict_header(pat.sub('', s_auth, count=1)) self._thread_local.chal = parse_dict_header(pat.sub("", s_auth, count=1))
# Consume content and release the original connection # Consume content and release the original connection
# to allow our new request to reuse the same one. # to allow our new request to reuse the same one.
@ -264,8 +271,9 @@ class HTTPDigestAuth(AuthBase):
extract_cookies_to_jar(prep._cookies, r.request, r.raw) extract_cookies_to_jar(prep._cookies, r.request, r.raw)
prep.prepare_cookies(prep._cookies) prep.prepare_cookies(prep._cookies)
prep.headers['Authorization'] = self.build_digest_header( prep.headers["Authorization"] = self.build_digest_header(
prep.method, prep.url) prep.method, prep.url
)
_r = r.connection.send(prep, **kwargs) _r = r.connection.send(prep, **kwargs)
_r.history.append(r) _r.history.append(r)
_r.request = prep _r.request = prep
@ -280,7 +288,7 @@ class HTTPDigestAuth(AuthBase):
self.init_per_thread_state() self.init_per_thread_state()
# If we have a saved nonce, skip the 401 # If we have a saved nonce, skip the 401
if self._thread_local.last_nonce: if self._thread_local.last_nonce:
r.headers['Authorization'] = self.build_digest_header(r.method, r.url) r.headers["Authorization"] = self.build_digest_header(r.method, r.url)
try: try:
self._thread_local.pos = r.body.tell() self._thread_local.pos = r.body.tell()
except AttributeError: except AttributeError:
@ -289,17 +297,19 @@ class HTTPDigestAuth(AuthBase):
# file position of the previous body. Ensure it's set to # file position of the previous body. Ensure it's set to
# None. # None.
self._thread_local.pos = None self._thread_local.pos = None
r.register_hook('response', self.handle_401) r.register_hook("response", self.handle_401)
r.register_hook('response', self.handle_redirect) r.register_hook("response", self.handle_redirect)
self._thread_local.num_401_calls = 1 self._thread_local.num_401_calls = 1
return r return r
def __eq__(self, other): def __eq__(self, other):
return all([ return all(
self.username == getattr(other, 'username', None), [
self.password == getattr(other, 'password', None) self.username == getattr(other, "username", None),
]) self.password == getattr(other, "password", None),
]
)
def __ne__(self, other): def __ne__(self, other):
return not self == other return not self == other

View file

@ -1,5 +1,4 @@
#!/usr/bin/env python #!/usr/bin/env python
# -*- coding: utf-8 -*-
""" """
requests.certs requests.certs
@ -14,5 +13,5 @@ packaged CA bundle.
""" """
from certifi import where from certifi import where
if __name__ == '__main__': if __name__ == "__main__":
print(where()) print(where())

View file

@ -1,11 +1,10 @@
# -*- coding: utf-8 -*-
""" """
requests.compat requests.compat
~~~~~~~~~~~~~~~ ~~~~~~~~~~~~~~~
This module handles import compatibility issues between Python 2 and This module previously handled import compatibility issues
Python 3. between Python 2 and Python 3. It remains for backwards
compatibility until the next major version.
""" """
try: try:
@ -23,59 +22,58 @@ import sys
_ver = sys.version_info _ver = sys.version_info
#: Python 2.x? #: Python 2.x?
is_py2 = (_ver[0] == 2) is_py2 = _ver[0] == 2
#: Python 3.x? #: Python 3.x?
is_py3 = (_ver[0] == 3) is_py3 = _ver[0] == 3
# json/simplejson module import resolution
has_simplejson = False has_simplejson = False
try: try:
import simplejson as json import simplejson as json
has_simplejson = True has_simplejson = True
except ImportError: except ImportError:
import json import json
# --------- if has_simplejson:
# Specifics from simplejson import JSONDecodeError
# --------- else:
from json import JSONDecodeError
if is_py2: # Keep OrderedDict for backwards compatibility.
from urllib import ( from collections import OrderedDict
quote, unquote, quote_plus, unquote_plus, urlencode, getproxies, from collections.abc import Callable, Mapping, MutableMapping
proxy_bypass, proxy_bypass_environment, getproxies_environment) from http import cookiejar as cookielib
from urlparse import urlparse, urlunparse, urljoin, urlsplit, urldefrag from http.cookies import Morsel
from urllib2 import parse_http_list from io import StringIO
import cookielib
from Cookie import Morsel
from StringIO import StringIO
# Keep OrderedDict for backwards compatibility.
from collections import Callable, Mapping, MutableMapping, OrderedDict
builtin_str = str # --------------
bytes = str # Legacy Imports
str = unicode # --------------
basestring = basestring from urllib.parse import (
numeric_types = (int, long, float) quote,
integer_types = (int, long) quote_plus,
JSONDecodeError = ValueError unquote,
unquote_plus,
urldefrag,
urlencode,
urljoin,
urlparse,
urlsplit,
urlunparse,
)
from urllib.request import (
getproxies,
getproxies_environment,
parse_http_list,
proxy_bypass,
proxy_bypass_environment,
)
elif is_py3: builtin_str = str
from urllib.parse import urlparse, urlunparse, urljoin, urlsplit, urlencode, quote, unquote, quote_plus, unquote_plus, urldefrag str = str
from urllib.request import parse_http_list, getproxies, proxy_bypass, proxy_bypass_environment, getproxies_environment bytes = bytes
from http import cookiejar as cookielib basestring = (str, bytes)
from http.cookies import Morsel numeric_types = (int, float)
from io import StringIO integer_types = (int,)
# Keep OrderedDict for backwards compatibility.
from collections import OrderedDict
from collections.abc import Callable, Mapping, MutableMapping
if has_simplejson:
from simplejson import JSONDecodeError
else:
from json import JSONDecodeError
builtin_str = str
str = str
bytes = bytes
basestring = (str, bytes)
numeric_types = (int, float)
integer_types = (int,)

View file

@ -1,5 +1,3 @@
# -*- coding: utf-8 -*-
""" """
requests.cookies requests.cookies
~~~~~~~~~~~~~~~~ ~~~~~~~~~~~~~~~~
@ -9,12 +7,12 @@ Compatibility code to be able to use `cookielib.CookieJar` with requests.
requests.utils imports from here, so be careful with imports. requests.utils imports from here, so be careful with imports.
""" """
import calendar
import copy import copy
import time import time
import calendar
from ._internal_utils import to_native_string from ._internal_utils import to_native_string
from .compat import cookielib, urlparse, urlunparse, Morsel, MutableMapping from .compat import Morsel, MutableMapping, cookielib, urlparse, urlunparse
try: try:
import threading import threading
@ -22,7 +20,7 @@ except ImportError:
import dummy_threading as threading import dummy_threading as threading
class MockRequest(object): class MockRequest:
"""Wraps a `requests.Request` to mimic a `urllib2.Request`. """Wraps a `requests.Request` to mimic a `urllib2.Request`.
The code in `cookielib.CookieJar` expects this interface in order to correctly The code in `cookielib.CookieJar` expects this interface in order to correctly
@ -51,16 +49,22 @@ class MockRequest(object):
def get_full_url(self): def get_full_url(self):
# Only return the response's URL if the user hadn't set the Host # Only return the response's URL if the user hadn't set the Host
# header # header
if not self._r.headers.get('Host'): if not self._r.headers.get("Host"):
return self._r.url return self._r.url
# If they did set it, retrieve it and reconstruct the expected domain # If they did set it, retrieve it and reconstruct the expected domain
host = to_native_string(self._r.headers['Host'], encoding='utf-8') host = to_native_string(self._r.headers["Host"], encoding="utf-8")
parsed = urlparse(self._r.url) parsed = urlparse(self._r.url)
# Reconstruct the URL as we expect it # Reconstruct the URL as we expect it
return urlunparse([ return urlunparse(
parsed.scheme, host, parsed.path, parsed.params, parsed.query, [
parsed.fragment parsed.scheme,
]) host,
parsed.path,
parsed.params,
parsed.query,
parsed.fragment,
]
)
def is_unverifiable(self): def is_unverifiable(self):
return True return True
@ -73,7 +77,9 @@ class MockRequest(object):
def add_header(self, key, val): def add_header(self, key, val):
"""cookielib has no legitimate use for this method; add it back if you find one.""" """cookielib has no legitimate use for this method; add it back if you find one."""
raise NotImplementedError("Cookie headers should be added with add_unredirected_header()") raise NotImplementedError(
"Cookie headers should be added with add_unredirected_header()"
)
def add_unredirected_header(self, name, value): def add_unredirected_header(self, name, value):
self._new_headers[name] = value self._new_headers[name] = value
@ -94,7 +100,7 @@ class MockRequest(object):
return self.get_host() return self.get_host()
class MockResponse(object): class MockResponse:
"""Wraps a `httplib.HTTPMessage` to mimic a `urllib.addinfourl`. """Wraps a `httplib.HTTPMessage` to mimic a `urllib.addinfourl`.
...what? Basically, expose the parsed HTTP headers from the server response ...what? Basically, expose the parsed HTTP headers from the server response
@ -122,8 +128,7 @@ def extract_cookies_to_jar(jar, request, response):
:param request: our own requests.Request object :param request: our own requests.Request object
:param response: urllib3.HTTPResponse object :param response: urllib3.HTTPResponse object
""" """
if not (hasattr(response, '_original_response') and if not (hasattr(response, "_original_response") and response._original_response):
response._original_response):
return return
# the _original_response field is the wrapped httplib.HTTPResponse object, # the _original_response field is the wrapped httplib.HTTPResponse object,
req = MockRequest(request) req = MockRequest(request)
@ -140,7 +145,7 @@ def get_cookie_header(jar, request):
""" """
r = MockRequest(request) r = MockRequest(request)
jar.add_cookie_header(r) jar.add_cookie_header(r)
return r.get_new_headers().get('Cookie') return r.get_new_headers().get("Cookie")
def remove_cookie_by_name(cookiejar, name, domain=None, path=None): def remove_cookie_by_name(cookiejar, name, domain=None, path=None):
@ -205,7 +210,9 @@ class RequestsCookieJar(cookielib.CookieJar, MutableMapping):
""" """
# support client code that unsets cookies by assignment of a None value: # support client code that unsets cookies by assignment of a None value:
if value is None: if value is None:
remove_cookie_by_name(self, name, domain=kwargs.get('domain'), path=kwargs.get('path')) remove_cookie_by_name(
self, name, domain=kwargs.get("domain"), path=kwargs.get("path")
)
return return
if isinstance(value, Morsel): if isinstance(value, Morsel):
@ -305,16 +312,15 @@ class RequestsCookieJar(cookielib.CookieJar, MutableMapping):
""" """
dictionary = {} dictionary = {}
for cookie in iter(self): for cookie in iter(self):
if ( if (domain is None or cookie.domain == domain) and (
(domain is None or cookie.domain == domain) and path is None or cookie.path == path
(path is None or cookie.path == path)
): ):
dictionary[cookie.name] = cookie.value dictionary[cookie.name] = cookie.value
return dictionary return dictionary
def __contains__(self, name): def __contains__(self, name):
try: try:
return super(RequestsCookieJar, self).__contains__(name) return super().__contains__(name)
except CookieConflictError: except CookieConflictError:
return True return True
@ -341,9 +347,13 @@ class RequestsCookieJar(cookielib.CookieJar, MutableMapping):
remove_cookie_by_name(self, name) remove_cookie_by_name(self, name)
def set_cookie(self, cookie, *args, **kwargs): def set_cookie(self, cookie, *args, **kwargs):
if hasattr(cookie.value, 'startswith') and cookie.value.startswith('"') and cookie.value.endswith('"'): if (
cookie.value = cookie.value.replace('\\"', '') hasattr(cookie.value, "startswith")
return super(RequestsCookieJar, self).set_cookie(cookie, *args, **kwargs) and cookie.value.startswith('"')
and cookie.value.endswith('"')
):
cookie.value = cookie.value.replace('\\"', "")
return super().set_cookie(cookie, *args, **kwargs)
def update(self, other): def update(self, other):
"""Updates this jar with cookies from another CookieJar or dict-like""" """Updates this jar with cookies from another CookieJar or dict-like"""
@ -351,7 +361,7 @@ class RequestsCookieJar(cookielib.CookieJar, MutableMapping):
for cookie in other: for cookie in other:
self.set_cookie(copy.copy(cookie)) self.set_cookie(copy.copy(cookie))
else: else:
super(RequestsCookieJar, self).update(other) super().update(other)
def _find(self, name, domain=None, path=None): def _find(self, name, domain=None, path=None):
"""Requests uses this method internally to get cookie values. """Requests uses this method internally to get cookie values.
@ -371,7 +381,7 @@ class RequestsCookieJar(cookielib.CookieJar, MutableMapping):
if path is None or cookie.path == path: if path is None or cookie.path == path:
return cookie.value return cookie.value
raise KeyError('name=%r, domain=%r, path=%r' % (name, domain, path)) raise KeyError(f"name={name!r}, domain={domain!r}, path={path!r}")
def _find_no_duplicates(self, name, domain=None, path=None): def _find_no_duplicates(self, name, domain=None, path=None):
"""Both ``__get_item__`` and ``get`` call this function: it's never """Both ``__get_item__`` and ``get`` call this function: it's never
@ -390,25 +400,29 @@ class RequestsCookieJar(cookielib.CookieJar, MutableMapping):
if cookie.name == name: if cookie.name == name:
if domain is None or cookie.domain == domain: if domain is None or cookie.domain == domain:
if path is None or cookie.path == path: if path is None or cookie.path == path:
if toReturn is not None: # if there are multiple cookies that meet passed in criteria if toReturn is not None:
raise CookieConflictError('There are multiple cookies with name, %r' % (name)) # if there are multiple cookies that meet passed in criteria
toReturn = cookie.value # we will eventually return this as long as no cookie conflict raise CookieConflictError(
f"There are multiple cookies with name, {name!r}"
)
# we will eventually return this as long as no cookie conflict
toReturn = cookie.value
if toReturn: if toReturn:
return toReturn return toReturn
raise KeyError('name=%r, domain=%r, path=%r' % (name, domain, path)) raise KeyError(f"name={name!r}, domain={domain!r}, path={path!r}")
def __getstate__(self): def __getstate__(self):
"""Unlike a normal CookieJar, this class is pickleable.""" """Unlike a normal CookieJar, this class is pickleable."""
state = self.__dict__.copy() state = self.__dict__.copy()
# remove the unpickleable RLock object # remove the unpickleable RLock object
state.pop('_cookies_lock') state.pop("_cookies_lock")
return state return state
def __setstate__(self, state): def __setstate__(self, state):
"""Unlike a normal CookieJar, this class is pickleable.""" """Unlike a normal CookieJar, this class is pickleable."""
self.__dict__.update(state) self.__dict__.update(state)
if '_cookies_lock' not in self.__dict__: if "_cookies_lock" not in self.__dict__:
self._cookies_lock = threading.RLock() self._cookies_lock = threading.RLock()
def copy(self): def copy(self):
@ -427,7 +441,7 @@ def _copy_cookie_jar(jar):
if jar is None: if jar is None:
return None return None
if hasattr(jar, 'copy'): if hasattr(jar, "copy"):
# We're dealing with an instance of RequestsCookieJar # We're dealing with an instance of RequestsCookieJar
return jar.copy() return jar.copy()
# We're dealing with a generic CookieJar instance # We're dealing with a generic CookieJar instance
@ -445,31 +459,32 @@ def create_cookie(name, value, **kwargs):
and sent on every request (this is sometimes called a "supercookie"). and sent on every request (this is sometimes called a "supercookie").
""" """
result = { result = {
'version': 0, "version": 0,
'name': name, "name": name,
'value': value, "value": value,
'port': None, "port": None,
'domain': '', "domain": "",
'path': '/', "path": "/",
'secure': False, "secure": False,
'expires': None, "expires": None,
'discard': True, "discard": True,
'comment': None, "comment": None,
'comment_url': None, "comment_url": None,
'rest': {'HttpOnly': None}, "rest": {"HttpOnly": None},
'rfc2109': False, "rfc2109": False,
} }
badargs = set(kwargs) - set(result) badargs = set(kwargs) - set(result)
if badargs: if badargs:
err = 'create_cookie() got unexpected keyword arguments: %s' raise TypeError(
raise TypeError(err % list(badargs)) f"create_cookie() got unexpected keyword arguments: {list(badargs)}"
)
result.update(kwargs) result.update(kwargs)
result['port_specified'] = bool(result['port']) result["port_specified"] = bool(result["port"])
result['domain_specified'] = bool(result['domain']) result["domain_specified"] = bool(result["domain"])
result['domain_initial_dot'] = result['domain'].startswith('.') result["domain_initial_dot"] = result["domain"].startswith(".")
result['path_specified'] = bool(result['path']) result["path_specified"] = bool(result["path"])
return cookielib.Cookie(**result) return cookielib.Cookie(**result)
@ -478,30 +493,28 @@ def morsel_to_cookie(morsel):
"""Convert a Morsel object into a Cookie containing the one k/v pair.""" """Convert a Morsel object into a Cookie containing the one k/v pair."""
expires = None expires = None
if morsel['max-age']: if morsel["max-age"]:
try: try:
expires = int(time.time() + int(morsel['max-age'])) expires = int(time.time() + int(morsel["max-age"]))
except ValueError: except ValueError:
raise TypeError('max-age: %s must be integer' % morsel['max-age']) raise TypeError(f"max-age: {morsel['max-age']} must be integer")
elif morsel['expires']: elif morsel["expires"]:
time_template = '%a, %d-%b-%Y %H:%M:%S GMT' time_template = "%a, %d-%b-%Y %H:%M:%S GMT"
expires = calendar.timegm( expires = calendar.timegm(time.strptime(morsel["expires"], time_template))
time.strptime(morsel['expires'], time_template)
)
return create_cookie( return create_cookie(
comment=morsel['comment'], comment=morsel["comment"],
comment_url=bool(morsel['comment']), comment_url=bool(morsel["comment"]),
discard=False, discard=False,
domain=morsel['domain'], domain=morsel["domain"],
expires=expires, expires=expires,
name=morsel.key, name=morsel.key,
path=morsel['path'], path=morsel["path"],
port=None, port=None,
rest={'HttpOnly': morsel['httponly']}, rest={"HttpOnly": morsel["httponly"]},
rfc2109=False, rfc2109=False,
secure=bool(morsel['secure']), secure=bool(morsel["secure"]),
value=morsel.value, value=morsel.value,
version=morsel['version'] or 0, version=morsel["version"] or 0,
) )
@ -534,11 +547,10 @@ def merge_cookies(cookiejar, cookies):
:rtype: CookieJar :rtype: CookieJar
""" """
if not isinstance(cookiejar, cookielib.CookieJar): if not isinstance(cookiejar, cookielib.CookieJar):
raise ValueError('You can only merge into CookieJar') raise ValueError("You can only merge into CookieJar")
if isinstance(cookies, dict): if isinstance(cookies, dict):
cookiejar = cookiejar_from_dict( cookiejar = cookiejar_from_dict(cookies, cookiejar=cookiejar, overwrite=False)
cookies, cookiejar=cookiejar, overwrite=False)
elif isinstance(cookies, cookielib.CookieJar): elif isinstance(cookies, cookielib.CookieJar):
try: try:
cookiejar.update(cookies) cookiejar.update(cookies)

View file

@ -1,5 +1,3 @@
# -*- coding: utf-8 -*-
""" """
requests.exceptions requests.exceptions
~~~~~~~~~~~~~~~~~~~ ~~~~~~~~~~~~~~~~~~~
@ -18,13 +16,12 @@ class RequestException(IOError):
def __init__(self, *args, **kwargs): def __init__(self, *args, **kwargs):
"""Initialize RequestException with `request` and `response` objects.""" """Initialize RequestException with `request` and `response` objects."""
response = kwargs.pop('response', None) response = kwargs.pop("response", None)
self.response = response self.response = response
self.request = kwargs.pop('request', None) self.request = kwargs.pop("request", None)
if (response is not None and not self.request and if response is not None and not self.request and hasattr(response, "request"):
hasattr(response, 'request')):
self.request = self.response.request self.request = self.response.request
super(RequestException, self).__init__(*args, **kwargs) super().__init__(*args, **kwargs)
class InvalidJSONError(RequestException): class InvalidJSONError(RequestException):
@ -34,6 +31,16 @@ class InvalidJSONError(RequestException):
class JSONDecodeError(InvalidJSONError, CompatJSONDecodeError): class JSONDecodeError(InvalidJSONError, CompatJSONDecodeError):
"""Couldn't decode the text into json""" """Couldn't decode the text into json"""
def __init__(self, *args, **kwargs):
"""
Construct the JSONDecodeError instance first with all
args. Then use it's args to construct the IOError so that
the json specific args aren't used as IOError specific args
and the error message from JSONDecodeError is preserved.
"""
CompatJSONDecodeError.__init__(self, *args)
InvalidJSONError.__init__(self, *self.args, **kwargs)
class HTTPError(RequestException): class HTTPError(RequestException):
"""An HTTP error occurred.""" """An HTTP error occurred."""
@ -118,6 +125,7 @@ class RetryError(RequestException):
class UnrewindableBodyError(RequestException): class UnrewindableBodyError(RequestException):
"""Requests encountered an error when trying to rewind a body.""" """Requests encountered an error when trying to rewind a body."""
# Warnings # Warnings

View file

@ -1,10 +1,9 @@
"""Module containing bug report helper(s).""" """Module containing bug report helper(s)."""
from __future__ import print_function
import json import json
import platform import platform
import sys
import ssl import ssl
import sys
import idna import idna
import urllib3 import urllib3
@ -28,16 +27,16 @@ except ImportError:
OpenSSL = None OpenSSL = None
cryptography = None cryptography = None
else: else:
import OpenSSL
import cryptography import cryptography
import OpenSSL
def _implementation(): def _implementation():
"""Return a dict with the Python implementation and version. """Return a dict with the Python implementation and version.
Provide both the name and the version of the Python implementation Provide both the name and the version of the Python implementation
currently running. For example, on CPython 2.7.5 it will return currently running. For example, on CPython 3.10.3 it will return
{'name': 'CPython', 'version': '2.7.5'}. {'name': 'CPython', 'version': '3.10.3'}.
This function works best on CPython and PyPy: in particular, it probably This function works best on CPython and PyPy: in particular, it probably
doesn't work for Jython or IronPython. Future investigation should be done doesn't work for Jython or IronPython. Future investigation should be done
@ -45,83 +44,83 @@ def _implementation():
""" """
implementation = platform.python_implementation() implementation = platform.python_implementation()
if implementation == 'CPython': if implementation == "CPython":
implementation_version = platform.python_version() implementation_version = platform.python_version()
elif implementation == 'PyPy': elif implementation == "PyPy":
implementation_version = '%s.%s.%s' % (sys.pypy_version_info.major, implementation_version = "{}.{}.{}".format(
sys.pypy_version_info.minor, sys.pypy_version_info.major,
sys.pypy_version_info.micro) sys.pypy_version_info.minor,
if sys.pypy_version_info.releaselevel != 'final': sys.pypy_version_info.micro,
implementation_version = ''.join([ )
implementation_version, sys.pypy_version_info.releaselevel if sys.pypy_version_info.releaselevel != "final":
]) implementation_version = "".join(
elif implementation == 'Jython': [implementation_version, sys.pypy_version_info.releaselevel]
)
elif implementation == "Jython":
implementation_version = platform.python_version() # Complete Guess implementation_version = platform.python_version() # Complete Guess
elif implementation == 'IronPython': elif implementation == "IronPython":
implementation_version = platform.python_version() # Complete Guess implementation_version = platform.python_version() # Complete Guess
else: else:
implementation_version = 'Unknown' implementation_version = "Unknown"
return {'name': implementation, 'version': implementation_version} return {"name": implementation, "version": implementation_version}
def info(): def info():
"""Generate information for a bug report.""" """Generate information for a bug report."""
try: try:
platform_info = { platform_info = {
'system': platform.system(), "system": platform.system(),
'release': platform.release(), "release": platform.release(),
} }
except IOError: except OSError:
platform_info = { platform_info = {
'system': 'Unknown', "system": "Unknown",
'release': 'Unknown', "release": "Unknown",
} }
implementation_info = _implementation() implementation_info = _implementation()
urllib3_info = {'version': urllib3.__version__} urllib3_info = {"version": urllib3.__version__}
charset_normalizer_info = {'version': None} charset_normalizer_info = {"version": None}
chardet_info = {'version': None} chardet_info = {"version": None}
if charset_normalizer: if charset_normalizer:
charset_normalizer_info = {'version': charset_normalizer.__version__} charset_normalizer_info = {"version": charset_normalizer.__version__}
if chardet: if chardet:
chardet_info = {'version': chardet.__version__} chardet_info = {"version": chardet.__version__}
pyopenssl_info = { pyopenssl_info = {
'version': None, "version": None,
'openssl_version': '', "openssl_version": "",
} }
if OpenSSL: if OpenSSL:
pyopenssl_info = { pyopenssl_info = {
'version': OpenSSL.__version__, "version": OpenSSL.__version__,
'openssl_version': '%x' % OpenSSL.SSL.OPENSSL_VERSION_NUMBER, "openssl_version": f"{OpenSSL.SSL.OPENSSL_VERSION_NUMBER:x}",
} }
cryptography_info = { cryptography_info = {
'version': getattr(cryptography, '__version__', ''), "version": getattr(cryptography, "__version__", ""),
} }
idna_info = { idna_info = {
'version': getattr(idna, '__version__', ''), "version": getattr(idna, "__version__", ""),
} }
system_ssl = ssl.OPENSSL_VERSION_NUMBER system_ssl = ssl.OPENSSL_VERSION_NUMBER
system_ssl_info = { system_ssl_info = {"version": f"{system_ssl:x}" if system_ssl is not None else ""}
'version': '%x' % system_ssl if system_ssl is not None else ''
}
return { return {
'platform': platform_info, "platform": platform_info,
'implementation': implementation_info, "implementation": implementation_info,
'system_ssl': system_ssl_info, "system_ssl": system_ssl_info,
'using_pyopenssl': pyopenssl is not None, "using_pyopenssl": pyopenssl is not None,
'using_charset_normalizer': chardet is None, "using_charset_normalizer": chardet is None,
'pyOpenSSL': pyopenssl_info, "pyOpenSSL": pyopenssl_info,
'urllib3': urllib3_info, "urllib3": urllib3_info,
'chardet': chardet_info, "chardet": chardet_info,
'charset_normalizer': charset_normalizer_info, "charset_normalizer": charset_normalizer_info,
'cryptography': cryptography_info, "cryptography": cryptography_info,
'idna': idna_info, "idna": idna_info,
'requests': { "requests": {
'version': requests_version, "version": requests_version,
}, },
} }
@ -131,5 +130,5 @@ def main():
print(json.dumps(info(), sort_keys=True, indent=2)) print(json.dumps(info(), sort_keys=True, indent=2))
if __name__ == '__main__': if __name__ == "__main__":
main() main()

View file

@ -1,5 +1,3 @@
# -*- coding: utf-8 -*-
""" """
requests.hooks requests.hooks
~~~~~~~~~~~~~~ ~~~~~~~~~~~~~~
@ -11,12 +9,13 @@ Available hooks:
``response``: ``response``:
The response generated from a Request. The response generated from a Request.
""" """
HOOKS = ['response'] HOOKS = ["response"]
def default_hooks(): def default_hooks():
return {event: [] for event in HOOKS} return {event: [] for event in HOOKS}
# TODO: response is the only one # TODO: response is the only one
@ -25,7 +24,7 @@ def dispatch_hook(key, hooks, hook_data, **kwargs):
hooks = hooks or {} hooks = hooks or {}
hooks = hooks.get(key) hooks = hooks.get(key)
if hooks: if hooks:
if hasattr(hooks, '__call__'): if hasattr(hooks, "__call__"):
hooks = [hooks] hooks = [hooks]
for hook in hooks: for hook in hooks:
_hook_data = hook(hook_data, **kwargs) _hook_data = hook(hook_data, **kwargs)

View file

@ -1,5 +1,3 @@
# -*- coding: utf-8 -*-
""" """
requests.models requests.models
~~~~~~~~~~~~~~~ ~~~~~~~~~~~~~~~
@ -8,48 +6,72 @@ This module contains the primary objects that power Requests.
""" """
import datetime import datetime
import sys
# Import encoding now, to avoid implicit import later. # Import encoding now, to avoid implicit import later.
# Implicit import within threads may cause LookupError when standard library is in a ZIP, # Implicit import within threads may cause LookupError when standard library is in a ZIP,
# such as in Embedded Python. See https://github.com/psf/requests/issues/3578. # such as in Embedded Python. See https://github.com/psf/requests/issues/3578.
import encodings.idna import encodings.idna # noqa: F401
from io import UnsupportedOperation
from urllib3.exceptions import (
DecodeError,
LocationParseError,
ProtocolError,
ReadTimeoutError,
SSLError,
)
from urllib3.fields import RequestField from urllib3.fields import RequestField
from urllib3.filepost import encode_multipart_formdata from urllib3.filepost import encode_multipart_formdata
from urllib3.util import parse_url from urllib3.util import parse_url
from urllib3.exceptions import (
DecodeError, ReadTimeoutError, ProtocolError, LocationParseError)
from io import UnsupportedOperation
from .hooks import default_hooks
from .structures import CaseInsensitiveDict
from .auth import HTTPBasicAuth
from .cookies import cookiejar_from_dict, get_cookie_header, _copy_cookie_jar
from .exceptions import (
HTTPError, MissingSchema, InvalidURL, ChunkedEncodingError,
ContentDecodingError, ConnectionError, StreamConsumedError,
InvalidJSONError)
from .exceptions import JSONDecodeError as RequestsJSONDecodeError
from ._internal_utils import to_native_string, unicode_is_ascii from ._internal_utils import to_native_string, unicode_is_ascii
from .utils import ( from .auth import HTTPBasicAuth
guess_filename, get_auth_from_url, requote_uri,
stream_decode_response_unicode, to_key_val_list, parse_header_links,
iter_slices, guess_json_utf, super_len, check_header_validity)
from .compat import ( from .compat import (
Callable, Mapping, Callable,
cookielib, urlunparse, urlsplit, urlencode, str, bytes, JSONDecodeError,
is_py2, chardet, builtin_str, basestring, JSONDecodeError) Mapping,
basestring,
builtin_str,
chardet,
cookielib,
)
from .compat import json as complexjson from .compat import json as complexjson
from .compat import urlencode, urlsplit, urlunparse
from .cookies import _copy_cookie_jar, cookiejar_from_dict, get_cookie_header
from .exceptions import (
ChunkedEncodingError,
ConnectionError,
ContentDecodingError,
HTTPError,
InvalidJSONError,
InvalidURL,
)
from .exceptions import JSONDecodeError as RequestsJSONDecodeError
from .exceptions import MissingSchema
from .exceptions import SSLError as RequestsSSLError
from .exceptions import StreamConsumedError
from .hooks import default_hooks
from .status_codes import codes from .status_codes import codes
from .structures import CaseInsensitiveDict
from .utils import (
check_header_validity,
get_auth_from_url,
guess_filename,
guess_json_utf,
iter_slices,
parse_header_links,
requote_uri,
stream_decode_response_unicode,
super_len,
to_key_val_list,
)
#: The set of HTTP status codes that indicate an automatically #: The set of HTTP status codes that indicate an automatically
#: processable redirect. #: processable redirect.
REDIRECT_STATI = ( REDIRECT_STATI = (
codes.moved, # 301 codes.moved, # 301
codes.found, # 302 codes.found, # 302
codes.other, # 303 codes.other, # 303
codes.temporary_redirect, # 307 codes.temporary_redirect, # 307
codes.permanent_redirect, # 308 codes.permanent_redirect, # 308
) )
@ -59,7 +81,7 @@ CONTENT_CHUNK_SIZE = 10 * 1024
ITER_CHUNK_SIZE = 512 ITER_CHUNK_SIZE = 512
class RequestEncodingMixin(object): class RequestEncodingMixin:
@property @property
def path_url(self): def path_url(self):
"""Build the path URL to use.""" """Build the path URL to use."""
@ -70,16 +92,16 @@ class RequestEncodingMixin(object):
path = p.path path = p.path
if not path: if not path:
path = '/' path = "/"
url.append(path) url.append(path)
query = p.query query = p.query
if query: if query:
url.append('?') url.append("?")
url.append(query) url.append(query)
return ''.join(url) return "".join(url)
@staticmethod @staticmethod
def _encode_params(data): def _encode_params(data):
@ -92,18 +114,21 @@ class RequestEncodingMixin(object):
if isinstance(data, (str, bytes)): if isinstance(data, (str, bytes)):
return data return data
elif hasattr(data, 'read'): elif hasattr(data, "read"):
return data return data
elif hasattr(data, '__iter__'): elif hasattr(data, "__iter__"):
result = [] result = []
for k, vs in to_key_val_list(data): for k, vs in to_key_val_list(data):
if isinstance(vs, basestring) or not hasattr(vs, '__iter__'): if isinstance(vs, basestring) or not hasattr(vs, "__iter__"):
vs = [vs] vs = [vs]
for v in vs: for v in vs:
if v is not None: if v is not None:
result.append( result.append(
(k.encode('utf-8') if isinstance(k, str) else k, (
v.encode('utf-8') if isinstance(v, str) else v)) k.encode("utf-8") if isinstance(k, str) else k,
v.encode("utf-8") if isinstance(v, str) else v,
)
)
return urlencode(result, doseq=True) return urlencode(result, doseq=True)
else: else:
return data return data
@ -118,7 +143,7 @@ class RequestEncodingMixin(object):
The tuples may be 2-tuples (filename, fileobj), 3-tuples (filename, fileobj, contentype) The tuples may be 2-tuples (filename, fileobj), 3-tuples (filename, fileobj, contentype)
or 4-tuples (filename, fileobj, contentype, custom_headers). or 4-tuples (filename, fileobj, contentype, custom_headers).
""" """
if (not files): if not files:
raise ValueError("Files must be provided.") raise ValueError("Files must be provided.")
elif isinstance(data, basestring): elif isinstance(data, basestring):
raise ValueError("Data must not be a string.") raise ValueError("Data must not be a string.")
@ -128,7 +153,7 @@ class RequestEncodingMixin(object):
files = to_key_val_list(files or {}) files = to_key_val_list(files or {})
for field, val in fields: for field, val in fields:
if isinstance(val, basestring) or not hasattr(val, '__iter__'): if isinstance(val, basestring) or not hasattr(val, "__iter__"):
val = [val] val = [val]
for v in val: for v in val:
if v is not None: if v is not None:
@ -137,8 +162,13 @@ class RequestEncodingMixin(object):
v = str(v) v = str(v)
new_fields.append( new_fields.append(
(field.decode('utf-8') if isinstance(field, bytes) else field, (
v.encode('utf-8') if isinstance(v, str) else v)) field.decode("utf-8")
if isinstance(field, bytes)
else field,
v.encode("utf-8") if isinstance(v, str) else v,
)
)
for (k, v) in files: for (k, v) in files:
# support for explicit filename # support for explicit filename
@ -157,7 +187,7 @@ class RequestEncodingMixin(object):
if isinstance(fp, (str, bytes, bytearray)): if isinstance(fp, (str, bytes, bytearray)):
fdata = fp fdata = fp
elif hasattr(fp, 'read'): elif hasattr(fp, "read"):
fdata = fp.read() fdata = fp.read()
elif fp is None: elif fp is None:
continue continue
@ -173,16 +203,16 @@ class RequestEncodingMixin(object):
return body, content_type return body, content_type
class RequestHooksMixin(object): class RequestHooksMixin:
def register_hook(self, event, hook): def register_hook(self, event, hook):
"""Properly register a hook.""" """Properly register a hook."""
if event not in self.hooks: if event not in self.hooks:
raise ValueError('Unsupported event specified, with event name "%s"' % (event)) raise ValueError(f'Unsupported event specified, with event name "{event}"')
if isinstance(hook, Callable): if isinstance(hook, Callable):
self.hooks[event].append(hook) self.hooks[event].append(hook)
elif hasattr(hook, '__iter__'): elif hasattr(hook, "__iter__"):
self.hooks[event].extend(h for h in hook if isinstance(h, Callable)) self.hooks[event].extend(h for h in hook if isinstance(h, Callable))
def deregister_hook(self, event, hook): def deregister_hook(self, event, hook):
@ -225,9 +255,19 @@ class Request(RequestHooksMixin):
<PreparedRequest [GET]> <PreparedRequest [GET]>
""" """
def __init__(self, def __init__(
method=None, url=None, headers=None, files=None, data=None, self,
params=None, auth=None, cookies=None, hooks=None, json=None): method=None,
url=None,
headers=None,
files=None,
data=None,
params=None,
auth=None,
cookies=None,
hooks=None,
json=None,
):
# Default empty dicts for dict params. # Default empty dicts for dict params.
data = [] if data is None else data data = [] if data is None else data
@ -251,7 +291,7 @@ class Request(RequestHooksMixin):
self.cookies = cookies self.cookies = cookies
def __repr__(self): def __repr__(self):
return '<Request [%s]>' % (self.method) return f"<Request [{self.method}]>"
def prepare(self): def prepare(self):
"""Constructs a :class:`PreparedRequest <PreparedRequest>` for transmission and returns it.""" """Constructs a :class:`PreparedRequest <PreparedRequest>` for transmission and returns it."""
@ -309,9 +349,19 @@ class PreparedRequest(RequestEncodingMixin, RequestHooksMixin):
#: integer denoting starting position of a readable file-like body. #: integer denoting starting position of a readable file-like body.
self._body_position = None self._body_position = None
def prepare(self, def prepare(
method=None, url=None, headers=None, files=None, data=None, self,
params=None, auth=None, cookies=None, hooks=None, json=None): method=None,
url=None,
headers=None,
files=None,
data=None,
params=None,
auth=None,
cookies=None,
hooks=None,
json=None,
):
"""Prepares the entire request with the given parameters.""" """Prepares the entire request with the given parameters."""
self.prepare_method(method) self.prepare_method(method)
@ -328,7 +378,7 @@ class PreparedRequest(RequestEncodingMixin, RequestHooksMixin):
self.prepare_hooks(hooks) self.prepare_hooks(hooks)
def __repr__(self): def __repr__(self):
return '<PreparedRequest [%s]>' % (self.method) return f"<PreparedRequest [{self.method}]>"
def copy(self): def copy(self):
p = PreparedRequest() p = PreparedRequest()
@ -352,7 +402,7 @@ class PreparedRequest(RequestEncodingMixin, RequestHooksMixin):
import idna import idna
try: try:
host = idna.encode(host, uts46=True).decode('utf-8') host = idna.encode(host, uts46=True).decode("utf-8")
except idna.IDNAError: except idna.IDNAError:
raise UnicodeError raise UnicodeError
return host return host
@ -365,9 +415,9 @@ class PreparedRequest(RequestEncodingMixin, RequestHooksMixin):
#: on python 3.x. #: on python 3.x.
#: https://github.com/psf/requests/pull/2238 #: https://github.com/psf/requests/pull/2238
if isinstance(url, bytes): if isinstance(url, bytes):
url = url.decode('utf8') url = url.decode("utf8")
else: else:
url = unicode(url) if is_py2 else str(url) url = str(url)
# Remove leading whitespaces from url # Remove leading whitespaces from url
url = url.lstrip() url = url.lstrip()
@ -375,7 +425,7 @@ class PreparedRequest(RequestEncodingMixin, RequestHooksMixin):
# Don't do any URL preparation for non-HTTP schemes like `mailto`, # Don't do any URL preparation for non-HTTP schemes like `mailto`,
# `data` etc to work around exceptions from `url_parse`, which # `data` etc to work around exceptions from `url_parse`, which
# handles RFC 3986 only. # handles RFC 3986 only.
if ':' in url and not url.lower().startswith('http'): if ":" in url and not url.lower().startswith("http"):
self.url = url self.url = url
return return
@ -386,13 +436,13 @@ class PreparedRequest(RequestEncodingMixin, RequestHooksMixin):
raise InvalidURL(*e.args) raise InvalidURL(*e.args)
if not scheme: if not scheme:
error = ("Invalid URL {0!r}: No scheme supplied. Perhaps you meant http://{0}?") raise MissingSchema(
error = error.format(to_native_string(url, 'utf8')) f"Invalid URL {url!r}: No scheme supplied. "
f"Perhaps you meant http://{url}?"
raise MissingSchema(error) )
if not host: if not host:
raise InvalidURL("Invalid URL %r: No host supplied" % url) raise InvalidURL(f"Invalid URL {url!r}: No host supplied")
# In general, we want to try IDNA encoding the hostname if the string contains # In general, we want to try IDNA encoding the hostname if the string contains
# non-ASCII characters. This allows users to automatically get the correct IDNA # non-ASCII characters. This allows users to automatically get the correct IDNA
@ -402,33 +452,21 @@ class PreparedRequest(RequestEncodingMixin, RequestHooksMixin):
try: try:
host = self._get_idna_encoded_host(host) host = self._get_idna_encoded_host(host)
except UnicodeError: except UnicodeError:
raise InvalidURL('URL has an invalid label.') raise InvalidURL("URL has an invalid label.")
elif host.startswith((u'*', u'.')): elif host.startswith(("*", ".")):
raise InvalidURL('URL has an invalid label.') raise InvalidURL("URL has an invalid label.")
# Carefully reconstruct the network location # Carefully reconstruct the network location
netloc = auth or '' netloc = auth or ""
if netloc: if netloc:
netloc += '@' netloc += "@"
netloc += host netloc += host
if port: if port:
netloc += ':' + str(port) netloc += f":{port}"
# Bare domains aren't valid URLs. # Bare domains aren't valid URLs.
if not path: if not path:
path = '/' path = "/"
if is_py2:
if isinstance(scheme, str):
scheme = scheme.encode('utf-8')
if isinstance(netloc, str):
netloc = netloc.encode('utf-8')
if isinstance(path, str):
path = path.encode('utf-8')
if isinstance(query, str):
query = query.encode('utf-8')
if isinstance(fragment, str):
fragment = fragment.encode('utf-8')
if isinstance(params, (str, bytes)): if isinstance(params, (str, bytes)):
params = to_native_string(params) params = to_native_string(params)
@ -436,7 +474,7 @@ class PreparedRequest(RequestEncodingMixin, RequestHooksMixin):
enc_params = self._encode_params(params) enc_params = self._encode_params(params)
if enc_params: if enc_params:
if query: if query:
query = '%s&%s' % (query, enc_params) query = f"{query}&{enc_params}"
else: else:
query = enc_params query = enc_params
@ -467,7 +505,7 @@ class PreparedRequest(RequestEncodingMixin, RequestHooksMixin):
if not data and json is not None: if not data and json is not None:
# urllib3 requires a bytes-like body. Python 2's json.dumps # urllib3 requires a bytes-like body. Python 2's json.dumps
# provides this natively, but Python 3 gives a Unicode string. # provides this natively, but Python 3 gives a Unicode string.
content_type = 'application/json' content_type = "application/json"
try: try:
body = complexjson.dumps(json, allow_nan=False) body = complexjson.dumps(json, allow_nan=False)
@ -475,12 +513,14 @@ class PreparedRequest(RequestEncodingMixin, RequestHooksMixin):
raise InvalidJSONError(ve, request=self) raise InvalidJSONError(ve, request=self)
if not isinstance(body, bytes): if not isinstance(body, bytes):
body = body.encode('utf-8') body = body.encode("utf-8")
is_stream = all([ is_stream = all(
hasattr(data, '__iter__'), [
not isinstance(data, (basestring, list, tuple, Mapping)) hasattr(data, "__iter__"),
]) not isinstance(data, (basestring, list, tuple, Mapping)),
]
)
if is_stream: if is_stream:
try: try:
@ -490,24 +530,26 @@ class PreparedRequest(RequestEncodingMixin, RequestHooksMixin):
body = data body = data
if getattr(body, 'tell', None) is not None: if getattr(body, "tell", None) is not None:
# Record the current file position before reading. # Record the current file position before reading.
# This will allow us to rewind a file in the event # This will allow us to rewind a file in the event
# of a redirect. # of a redirect.
try: try:
self._body_position = body.tell() self._body_position = body.tell()
except (IOError, OSError): except OSError:
# This differentiates from None, allowing us to catch # This differentiates from None, allowing us to catch
# a failed `tell()` later when trying to rewind the body # a failed `tell()` later when trying to rewind the body
self._body_position = object() self._body_position = object()
if files: if files:
raise NotImplementedError('Streamed bodies and files are mutually exclusive.') raise NotImplementedError(
"Streamed bodies and files are mutually exclusive."
)
if length: if length:
self.headers['Content-Length'] = builtin_str(length) self.headers["Content-Length"] = builtin_str(length)
else: else:
self.headers['Transfer-Encoding'] = 'chunked' self.headers["Transfer-Encoding"] = "chunked"
else: else:
# Multi-part file uploads. # Multi-part file uploads.
if files: if files:
@ -515,16 +557,16 @@ class PreparedRequest(RequestEncodingMixin, RequestHooksMixin):
else: else:
if data: if data:
body = self._encode_params(data) body = self._encode_params(data)
if isinstance(data, basestring) or hasattr(data, 'read'): if isinstance(data, basestring) or hasattr(data, "read"):
content_type = None content_type = None
else: else:
content_type = 'application/x-www-form-urlencoded' content_type = "application/x-www-form-urlencoded"
self.prepare_content_length(body) self.prepare_content_length(body)
# Add content-type if it wasn't explicitly provided. # Add content-type if it wasn't explicitly provided.
if content_type and ('content-type' not in self.headers): if content_type and ("content-type" not in self.headers):
self.headers['Content-Type'] = content_type self.headers["Content-Type"] = content_type
self.body = body self.body = body
@ -535,13 +577,16 @@ class PreparedRequest(RequestEncodingMixin, RequestHooksMixin):
if length: if length:
# If length exists, set it. Otherwise, we fallback # If length exists, set it. Otherwise, we fallback
# to Transfer-Encoding: chunked. # to Transfer-Encoding: chunked.
self.headers['Content-Length'] = builtin_str(length) self.headers["Content-Length"] = builtin_str(length)
elif self.method not in ('GET', 'HEAD') and self.headers.get('Content-Length') is None: elif (
self.method not in ("GET", "HEAD")
and self.headers.get("Content-Length") is None
):
# Set Content-Length to 0 for methods that can have a body # Set Content-Length to 0 for methods that can have a body
# but don't provide one. (i.e. not GET or HEAD) # but don't provide one. (i.e. not GET or HEAD)
self.headers['Content-Length'] = '0' self.headers["Content-Length"] = "0"
def prepare_auth(self, auth, url=''): def prepare_auth(self, auth, url=""):
"""Prepares the given HTTP auth data.""" """Prepares the given HTTP auth data."""
# If no Auth is explicitly provided, extract it from the URL first. # If no Auth is explicitly provided, extract it from the URL first.
@ -581,7 +626,7 @@ class PreparedRequest(RequestEncodingMixin, RequestHooksMixin):
cookie_header = get_cookie_header(self._cookies, self) cookie_header = get_cookie_header(self._cookies, self)
if cookie_header is not None: if cookie_header is not None:
self.headers['Cookie'] = cookie_header self.headers["Cookie"] = cookie_header
def prepare_hooks(self, hooks): def prepare_hooks(self, hooks):
"""Prepares the given hooks.""" """Prepares the given hooks."""
@ -593,14 +638,22 @@ class PreparedRequest(RequestEncodingMixin, RequestHooksMixin):
self.register_hook(event, hooks[event]) self.register_hook(event, hooks[event])
class Response(object): class Response:
"""The :class:`Response <Response>` object, which contains a """The :class:`Response <Response>` object, which contains a
server's response to an HTTP request. server's response to an HTTP request.
""" """
__attrs__ = [ __attrs__ = [
'_content', 'status_code', 'headers', 'url', 'history', "_content",
'encoding', 'reason', 'cookies', 'elapsed', 'request' "status_code",
"headers",
"url",
"history",
"encoding",
"reason",
"cookies",
"elapsed",
"request",
] ]
def __init__(self): def __init__(self):
@ -669,11 +722,11 @@ class Response(object):
setattr(self, name, value) setattr(self, name, value)
# pickled objects do not have .raw # pickled objects do not have .raw
setattr(self, '_content_consumed', True) setattr(self, "_content_consumed", True)
setattr(self, 'raw', None) setattr(self, "raw", None)
def __repr__(self): def __repr__(self):
return '<Response [%s]>' % (self.status_code) return f"<Response [{self.status_code}]>"
def __bool__(self): def __bool__(self):
"""Returns True if :attr:`status_code` is less than 400. """Returns True if :attr:`status_code` is less than 400.
@ -719,12 +772,15 @@ class Response(object):
"""True if this Response is a well-formed HTTP redirect that could have """True if this Response is a well-formed HTTP redirect that could have
been processed automatically (by :meth:`Session.resolve_redirects`). been processed automatically (by :meth:`Session.resolve_redirects`).
""" """
return ('location' in self.headers and self.status_code in REDIRECT_STATI) return "location" in self.headers and self.status_code in REDIRECT_STATI
@property @property
def is_permanent_redirect(self): def is_permanent_redirect(self):
"""True if this Response one of the permanent versions of redirect.""" """True if this Response one of the permanent versions of redirect."""
return ('location' in self.headers and self.status_code in (codes.moved_permanently, codes.permanent_redirect)) return "location" in self.headers and self.status_code in (
codes.moved_permanently,
codes.permanent_redirect,
)
@property @property
def next(self): def next(self):
@ -734,7 +790,7 @@ class Response(object):
@property @property
def apparent_encoding(self): def apparent_encoding(self):
"""The apparent encoding, provided by the charset_normalizer or chardet libraries.""" """The apparent encoding, provided by the charset_normalizer or chardet libraries."""
return chardet.detect(self.content)['encoding'] return chardet.detect(self.content)["encoding"]
def iter_content(self, chunk_size=1, decode_unicode=False): def iter_content(self, chunk_size=1, decode_unicode=False):
"""Iterates over the response data. When stream=True is set on the """Iterates over the response data. When stream=True is set on the
@ -755,16 +811,17 @@ class Response(object):
def generate(): def generate():
# Special case for urllib3. # Special case for urllib3.
if hasattr(self.raw, 'stream'): if hasattr(self.raw, "stream"):
try: try:
for chunk in self.raw.stream(chunk_size, decode_content=True): yield from self.raw.stream(chunk_size, decode_content=True)
yield chunk
except ProtocolError as e: except ProtocolError as e:
raise ChunkedEncodingError(e) raise ChunkedEncodingError(e)
except DecodeError as e: except DecodeError as e:
raise ContentDecodingError(e) raise ContentDecodingError(e)
except ReadTimeoutError as e: except ReadTimeoutError as e:
raise ConnectionError(e) raise ConnectionError(e)
except SSLError as e:
raise RequestsSSLError(e)
else: else:
# Standard file-like object. # Standard file-like object.
while True: while True:
@ -778,7 +835,9 @@ class Response(object):
if self._content_consumed and isinstance(self._content, bool): if self._content_consumed and isinstance(self._content, bool):
raise StreamConsumedError() raise StreamConsumedError()
elif chunk_size is not None and not isinstance(chunk_size, int): elif chunk_size is not None and not isinstance(chunk_size, int):
raise TypeError("chunk_size must be an int, it is instead a %s." % type(chunk_size)) raise TypeError(
f"chunk_size must be an int, it is instead a {type(chunk_size)}."
)
# simulate reading small chunks of the content # simulate reading small chunks of the content
reused_chunks = iter_slices(self._content, chunk_size) reused_chunks = iter_slices(self._content, chunk_size)
@ -791,7 +850,9 @@ class Response(object):
return chunks return chunks
def iter_lines(self, chunk_size=ITER_CHUNK_SIZE, decode_unicode=False, delimiter=None): def iter_lines(
self, chunk_size=ITER_CHUNK_SIZE, decode_unicode=False, delimiter=None
):
"""Iterates over the response data, one line at a time. When """Iterates over the response data, one line at a time. When
stream=True is set on the request, this avoids reading the stream=True is set on the request, this avoids reading the
content at once into memory for large responses. content at once into memory for large responses.
@ -801,7 +862,9 @@ class Response(object):
pending = None pending = None
for chunk in self.iter_content(chunk_size=chunk_size, decode_unicode=decode_unicode): for chunk in self.iter_content(
chunk_size=chunk_size, decode_unicode=decode_unicode
):
if pending is not None: if pending is not None:
chunk = pending + chunk chunk = pending + chunk
@ -816,8 +879,7 @@ class Response(object):
else: else:
pending = None pending = None
for line in lines: yield from lines
yield line
if pending is not None: if pending is not None:
yield pending yield pending
@ -829,13 +891,12 @@ class Response(object):
if self._content is False: if self._content is False:
# Read the contents. # Read the contents.
if self._content_consumed: if self._content_consumed:
raise RuntimeError( raise RuntimeError("The content for this response was already consumed")
'The content for this response was already consumed')
if self.status_code == 0 or self.raw is None: if self.status_code == 0 or self.raw is None:
self._content = None self._content = None
else: else:
self._content = b''.join(self.iter_content(CONTENT_CHUNK_SIZE)) or b'' self._content = b"".join(self.iter_content(CONTENT_CHUNK_SIZE)) or b""
self._content_consumed = True self._content_consumed = True
# don't need to release the connection; that's been handled by urllib3 # don't need to release the connection; that's been handled by urllib3
@ -860,7 +921,7 @@ class Response(object):
encoding = self.encoding encoding = self.encoding
if not self.content: if not self.content:
return str('') return ""
# Fallback to auto-detected encoding. # Fallback to auto-detected encoding.
if self.encoding is None: if self.encoding is None:
@ -868,7 +929,7 @@ class Response(object):
# Decode unicode from given encoding. # Decode unicode from given encoding.
try: try:
content = str(self.content, encoding, errors='replace') content = str(self.content, encoding, errors="replace")
except (LookupError, TypeError): except (LookupError, TypeError):
# A LookupError is raised if the encoding was not found which could # A LookupError is raised if the encoding was not found which could
# indicate a misspelling or similar mistake. # indicate a misspelling or similar mistake.
@ -876,7 +937,7 @@ class Response(object):
# A TypeError can be raised if encoding is None # A TypeError can be raised if encoding is None
# #
# So we try blindly encoding. # So we try blindly encoding.
content = str(self.content, errors='replace') content = str(self.content, errors="replace")
return content return content
@ -896,65 +957,65 @@ class Response(object):
encoding = guess_json_utf(self.content) encoding = guess_json_utf(self.content)
if encoding is not None: if encoding is not None:
try: try:
return complexjson.loads( return complexjson.loads(self.content.decode(encoding), **kwargs)
self.content.decode(encoding), **kwargs
)
except UnicodeDecodeError: except UnicodeDecodeError:
# Wrong UTF codec detected; usually because it's not UTF-8 # Wrong UTF codec detected; usually because it's not UTF-8
# but some other 8-bit codec. This is an RFC violation, # but some other 8-bit codec. This is an RFC violation,
# and the server didn't bother to tell us what codec *was* # and the server didn't bother to tell us what codec *was*
# used. # used.
pass pass
except JSONDecodeError as e:
raise RequestsJSONDecodeError(e.msg, e.doc, e.pos)
try: try:
return complexjson.loads(self.text, **kwargs) return complexjson.loads(self.text, **kwargs)
except JSONDecodeError as e: except JSONDecodeError as e:
# Catch JSON-related errors and raise as requests.JSONDecodeError # Catch JSON-related errors and raise as requests.JSONDecodeError
# This aliases json.JSONDecodeError and simplejson.JSONDecodeError # This aliases json.JSONDecodeError and simplejson.JSONDecodeError
if is_py2: # e is a ValueError raise RequestsJSONDecodeError(e.msg, e.doc, e.pos)
raise RequestsJSONDecodeError(e.message)
else:
raise RequestsJSONDecodeError(e.msg, e.doc, e.pos)
@property @property
def links(self): def links(self):
"""Returns the parsed header links of the response, if any.""" """Returns the parsed header links of the response, if any."""
header = self.headers.get('link') header = self.headers.get("link")
# l = MultiDict() resolved_links = {}
l = {}
if header: if header:
links = parse_header_links(header) links = parse_header_links(header)
for link in links: for link in links:
key = link.get('rel') or link.get('url') key = link.get("rel") or link.get("url")
l[key] = link resolved_links[key] = link
return l return resolved_links
def raise_for_status(self): def raise_for_status(self):
"""Raises :class:`HTTPError`, if one occurred.""" """Raises :class:`HTTPError`, if one occurred."""
http_error_msg = '' http_error_msg = ""
if isinstance(self.reason, bytes): if isinstance(self.reason, bytes):
# We attempt to decode utf-8 first because some servers # We attempt to decode utf-8 first because some servers
# choose to localize their reason strings. If the string # choose to localize their reason strings. If the string
# isn't utf-8, we fall back to iso-8859-1 for all other # isn't utf-8, we fall back to iso-8859-1 for all other
# encodings. (See PR #3538) # encodings. (See PR #3538)
try: try:
reason = self.reason.decode('utf-8') reason = self.reason.decode("utf-8")
except UnicodeDecodeError: except UnicodeDecodeError:
reason = self.reason.decode('iso-8859-1') reason = self.reason.decode("iso-8859-1")
else: else:
reason = self.reason reason = self.reason
if 400 <= self.status_code < 500: if 400 <= self.status_code < 500:
http_error_msg = u'%s Client Error: %s for url: %s' % (self.status_code, reason, self.url) http_error_msg = (
f"{self.status_code} Client Error: {reason} for url: {self.url}"
)
elif 500 <= self.status_code < 600: elif 500 <= self.status_code < 600:
http_error_msg = u'%s Server Error: %s for url: %s' % (self.status_code, reason, self.url) http_error_msg = (
f"{self.status_code} Server Error: {reason} for url: {self.url}"
)
if http_error_msg: if http_error_msg:
raise HTTPError(http_error_msg, response=self) raise HTTPError(http_error_msg, response=self)
@ -968,6 +1029,6 @@ class Response(object):
if not self._content_consumed: if not self._content_consumed:
self.raw.close() self.raw.close()
release_conn = getattr(self.raw, 'release_conn', None) release_conn = getattr(self.raw, "release_conn", None)
if release_conn is not None: if release_conn is not None:
release_conn() release_conn()

View file

@ -3,24 +3,26 @@ import sys
try: try:
import chardet import chardet
except ImportError: except ImportError:
import charset_normalizer as chardet
import warnings import warnings
warnings.filterwarnings('ignore', 'Trying to detect', module='charset_normalizer') import charset_normalizer as chardet
warnings.filterwarnings("ignore", "Trying to detect", module="charset_normalizer")
# This code exists for backwards compatibility reasons. # This code exists for backwards compatibility reasons.
# I don't like it either. Just look the other way. :) # I don't like it either. Just look the other way. :)
for package in ('urllib3', 'idna'): for package in ("urllib3", "idna"):
locals()[package] = __import__(package) locals()[package] = __import__(package)
# This traversal is apparently necessary such that the identities are # This traversal is apparently necessary such that the identities are
# preserved (requests.packages.urllib3.* is urllib3.*) # preserved (requests.packages.urllib3.* is urllib3.*)
for mod in list(sys.modules): for mod in list(sys.modules):
if mod == package or mod.startswith(package + '.'): if mod == package or mod.startswith(f"{package}."):
sys.modules['requests.packages.' + mod] = sys.modules[mod] sys.modules[f"requests.packages.{mod}"] = sys.modules[mod]
target = chardet.__name__ target = chardet.__name__
for mod in list(sys.modules): for mod in list(sys.modules):
if mod == target or mod.startswith(target + '.'): if mod == target or mod.startswith(f"{target}."):
sys.modules['requests.packages.' + target.replace(target, 'chardet')] = sys.modules[mod] target = target.replace(target, "chardet")
sys.modules[f"requests.packages.{target}"] = sys.modules[mod]
# Kinda cool, though, right? # Kinda cool, though, right?

View file

@ -1,5 +1,3 @@
# -*- coding: utf-8 -*-
""" """
requests.sessions requests.sessions
~~~~~~~~~~~~~~~~~ ~~~~~~~~~~~~~~~~~
@ -10,39 +8,52 @@ requests (cookies, auth, proxies).
import os import os
import sys import sys
import time import time
from datetime import timedelta
from collections import OrderedDict from collections import OrderedDict
from datetime import timedelta
from .auth import _basic_auth_str
from .compat import cookielib, is_py3, urljoin, urlparse, Mapping
from .cookies import (
cookiejar_from_dict, extract_cookies_to_jar, RequestsCookieJar, merge_cookies)
from .models import Request, PreparedRequest, DEFAULT_REDIRECT_LIMIT
from .hooks import default_hooks, dispatch_hook
from ._internal_utils import to_native_string from ._internal_utils import to_native_string
from .utils import to_key_val_list, default_headers, DEFAULT_PORTS
from .exceptions import (
TooManyRedirects, InvalidSchema, ChunkedEncodingError, ContentDecodingError)
from .structures import CaseInsensitiveDict
from .adapters import HTTPAdapter from .adapters import HTTPAdapter
from .auth import _basic_auth_str
from .utils import ( from .compat import Mapping, cookielib, urljoin, urlparse
requote_uri, get_environ_proxies, get_netrc_auth, should_bypass_proxies, from .cookies import (
get_auth_from_url, rewind_body, resolve_proxies RequestsCookieJar,
cookiejar_from_dict,
extract_cookies_to_jar,
merge_cookies,
) )
from .exceptions import (
from .status_codes import codes ChunkedEncodingError,
ContentDecodingError,
InvalidSchema,
TooManyRedirects,
)
from .hooks import default_hooks, dispatch_hook
# formerly defined here, reexposed here for backward compatibility # formerly defined here, reexposed here for backward compatibility
from .models import REDIRECT_STATI from .models import ( # noqa: F401
DEFAULT_REDIRECT_LIMIT,
REDIRECT_STATI,
PreparedRequest,
Request,
)
from .status_codes import codes
from .structures import CaseInsensitiveDict
from .utils import ( # noqa: F401
DEFAULT_PORTS,
default_headers,
get_auth_from_url,
get_environ_proxies,
get_netrc_auth,
requote_uri,
resolve_proxies,
rewind_body,
should_bypass_proxies,
to_key_val_list,
)
# Preferred clock, based on which one is more accurate on a given system. # Preferred clock, based on which one is more accurate on a given system.
if sys.platform == 'win32': if sys.platform == "win32":
try: # Python 3.4+ preferred_clock = time.perf_counter
preferred_clock = time.perf_counter
except AttributeError: # Earlier than Python 3.
preferred_clock = time.clock
else: else:
preferred_clock = time.time preferred_clock = time.time
@ -61,8 +72,7 @@ def merge_setting(request_setting, session_setting, dict_class=OrderedDict):
# Bypass if not a dictionary (e.g. verify) # Bypass if not a dictionary (e.g. verify)
if not ( if not (
isinstance(session_setting, Mapping) and isinstance(session_setting, Mapping) and isinstance(request_setting, Mapping)
isinstance(request_setting, Mapping)
): ):
return request_setting return request_setting
@ -84,17 +94,16 @@ def merge_hooks(request_hooks, session_hooks, dict_class=OrderedDict):
This is necessary because when request_hooks == {'response': []}, the This is necessary because when request_hooks == {'response': []}, the
merge breaks Session hooks entirely. merge breaks Session hooks entirely.
""" """
if session_hooks is None or session_hooks.get('response') == []: if session_hooks is None or session_hooks.get("response") == []:
return request_hooks return request_hooks
if request_hooks is None or request_hooks.get('response') == []: if request_hooks is None or request_hooks.get("response") == []:
return session_hooks return session_hooks
return merge_setting(request_hooks, session_hooks, dict_class) return merge_setting(request_hooks, session_hooks, dict_class)
class SessionRedirectMixin(object): class SessionRedirectMixin:
def get_redirect_target(self, resp): def get_redirect_target(self, resp):
"""Receives a Response. Returns a redirect URI or ``None``""" """Receives a Response. Returns a redirect URI or ``None``"""
# Due to the nature of how requests processes redirects this method will # Due to the nature of how requests processes redirects this method will
@ -104,16 +113,15 @@ class SessionRedirectMixin(object):
# to cache the redirect location onto the response object as a private # to cache the redirect location onto the response object as a private
# attribute. # attribute.
if resp.is_redirect: if resp.is_redirect:
location = resp.headers['location'] location = resp.headers["location"]
# Currently the underlying http module on py3 decode headers # Currently the underlying http module on py3 decode headers
# in latin1, but empirical evidence suggests that latin1 is very # in latin1, but empirical evidence suggests that latin1 is very
# rarely used with non-ASCII characters in HTTP headers. # rarely used with non-ASCII characters in HTTP headers.
# It is more likely to get UTF8 header rather than latin1. # It is more likely to get UTF8 header rather than latin1.
# This causes incorrect handling of UTF8 encoded location headers. # This causes incorrect handling of UTF8 encoded location headers.
# To solve this, we re-encode the location in latin1. # To solve this, we re-encode the location in latin1.
if is_py3: location = location.encode("latin1")
location = location.encode('latin1') return to_native_string(location, "utf8")
return to_native_string(location, 'utf8')
return None return None
def should_strip_auth(self, old_url, new_url): def should_strip_auth(self, old_url, new_url):
@ -126,23 +134,40 @@ class SessionRedirectMixin(object):
# ports. This isn't specified by RFC 7235, but is kept to avoid # ports. This isn't specified by RFC 7235, but is kept to avoid
# breaking backwards compatibility with older versions of requests # breaking backwards compatibility with older versions of requests
# that allowed any redirects on the same host. # that allowed any redirects on the same host.
if (old_parsed.scheme == 'http' and old_parsed.port in (80, None) if (
and new_parsed.scheme == 'https' and new_parsed.port in (443, None)): old_parsed.scheme == "http"
and old_parsed.port in (80, None)
and new_parsed.scheme == "https"
and new_parsed.port in (443, None)
):
return False return False
# Handle default port usage corresponding to scheme. # Handle default port usage corresponding to scheme.
changed_port = old_parsed.port != new_parsed.port changed_port = old_parsed.port != new_parsed.port
changed_scheme = old_parsed.scheme != new_parsed.scheme changed_scheme = old_parsed.scheme != new_parsed.scheme
default_port = (DEFAULT_PORTS.get(old_parsed.scheme, None), None) default_port = (DEFAULT_PORTS.get(old_parsed.scheme, None), None)
if (not changed_scheme and old_parsed.port in default_port if (
and new_parsed.port in default_port): not changed_scheme
and old_parsed.port in default_port
and new_parsed.port in default_port
):
return False return False
# Standard case: root URI must match # Standard case: root URI must match
return changed_port or changed_scheme return changed_port or changed_scheme
def resolve_redirects(self, resp, req, stream=False, timeout=None, def resolve_redirects(
verify=True, cert=None, proxies=None, yield_requests=False, **adapter_kwargs): self,
resp,
req,
stream=False,
timeout=None,
verify=True,
cert=None,
proxies=None,
yield_requests=False,
**adapter_kwargs,
):
"""Receives a Response. Returns a generator of Responses or Requests.""" """Receives a Response. Returns a generator of Responses or Requests."""
hist = [] # keep track of history hist = [] # keep track of history
@ -163,19 +188,21 @@ class SessionRedirectMixin(object):
resp.raw.read(decode_content=False) resp.raw.read(decode_content=False)
if len(resp.history) >= self.max_redirects: if len(resp.history) >= self.max_redirects:
raise TooManyRedirects('Exceeded {} redirects.'.format(self.max_redirects), response=resp) raise TooManyRedirects(
f"Exceeded {self.max_redirects} redirects.", response=resp
)
# Release the connection back into the pool. # Release the connection back into the pool.
resp.close() resp.close()
# Handle redirection without scheme (see: RFC 1808 Section 4) # Handle redirection without scheme (see: RFC 1808 Section 4)
if url.startswith('//'): if url.startswith("//"):
parsed_rurl = urlparse(resp.url) parsed_rurl = urlparse(resp.url)
url = ':'.join([to_native_string(parsed_rurl.scheme), url]) url = ":".join([to_native_string(parsed_rurl.scheme), url])
# Normalize url case and attach previous fragment if needed (RFC 7231 7.1.2) # Normalize url case and attach previous fragment if needed (RFC 7231 7.1.2)
parsed = urlparse(url) parsed = urlparse(url)
if parsed.fragment == '' and previous_fragment: if parsed.fragment == "" and previous_fragment:
parsed = parsed._replace(fragment=previous_fragment) parsed = parsed._replace(fragment=previous_fragment)
elif parsed.fragment: elif parsed.fragment:
previous_fragment = parsed.fragment previous_fragment = parsed.fragment
@ -194,15 +221,18 @@ class SessionRedirectMixin(object):
self.rebuild_method(prepared_request, resp) self.rebuild_method(prepared_request, resp)
# https://github.com/psf/requests/issues/1084 # https://github.com/psf/requests/issues/1084
if resp.status_code not in (codes.temporary_redirect, codes.permanent_redirect): if resp.status_code not in (
codes.temporary_redirect,
codes.permanent_redirect,
):
# https://github.com/psf/requests/issues/3490 # https://github.com/psf/requests/issues/3490
purged_headers = ('Content-Length', 'Content-Type', 'Transfer-Encoding') purged_headers = ("Content-Length", "Content-Type", "Transfer-Encoding")
for header in purged_headers: for header in purged_headers:
prepared_request.headers.pop(header, None) prepared_request.headers.pop(header, None)
prepared_request.body = None prepared_request.body = None
headers = prepared_request.headers headers = prepared_request.headers
headers.pop('Cookie', None) headers.pop("Cookie", None)
# Extract any cookies sent on the response to the cookiejar # Extract any cookies sent on the response to the cookiejar
# in the new request. Because we've mutated our copied prepared # in the new request. Because we've mutated our copied prepared
@ -218,9 +248,8 @@ class SessionRedirectMixin(object):
# A failed tell() sets `_body_position` to `object()`. This non-None # A failed tell() sets `_body_position` to `object()`. This non-None
# value ensures `rewindable` will be True, allowing us to raise an # value ensures `rewindable` will be True, allowing us to raise an
# UnrewindableBodyError, instead of hanging the connection. # UnrewindableBodyError, instead of hanging the connection.
rewindable = ( rewindable = prepared_request._body_position is not None and (
prepared_request._body_position is not None and "Content-Length" in headers or "Transfer-Encoding" in headers
('Content-Length' in headers or 'Transfer-Encoding' in headers)
) )
# Attempt to rewind consumed file-like object. # Attempt to rewind consumed file-like object.
@ -242,7 +271,7 @@ class SessionRedirectMixin(object):
cert=cert, cert=cert,
proxies=proxies, proxies=proxies,
allow_redirects=False, allow_redirects=False,
**adapter_kwargs **adapter_kwargs,
) )
extract_cookies_to_jar(self.cookies, prepared_request, resp.raw) extract_cookies_to_jar(self.cookies, prepared_request, resp.raw)
@ -259,10 +288,12 @@ class SessionRedirectMixin(object):
headers = prepared_request.headers headers = prepared_request.headers
url = prepared_request.url url = prepared_request.url
if 'Authorization' in headers and self.should_strip_auth(response.request.url, url): if "Authorization" in headers and self.should_strip_auth(
response.request.url, url
):
# If we get redirected to a new host, we should strip out any # If we get redirected to a new host, we should strip out any
# authentication headers. # authentication headers.
del headers['Authorization'] del headers["Authorization"]
# .netrc might have more auth for us on our new host. # .netrc might have more auth for us on our new host.
new_auth = get_netrc_auth(url) if self.trust_env else None new_auth = get_netrc_auth(url) if self.trust_env else None
@ -285,8 +316,8 @@ class SessionRedirectMixin(object):
scheme = urlparse(prepared_request.url).scheme scheme = urlparse(prepared_request.url).scheme
new_proxies = resolve_proxies(prepared_request, proxies, self.trust_env) new_proxies = resolve_proxies(prepared_request, proxies, self.trust_env)
if 'Proxy-Authorization' in headers: if "Proxy-Authorization" in headers:
del headers['Proxy-Authorization'] del headers["Proxy-Authorization"]
try: try:
username, password = get_auth_from_url(new_proxies[scheme]) username, password = get_auth_from_url(new_proxies[scheme])
@ -294,7 +325,7 @@ class SessionRedirectMixin(object):
username, password = None, None username, password = None, None
if username and password: if username and password:
headers['Proxy-Authorization'] = _basic_auth_str(username, password) headers["Proxy-Authorization"] = _basic_auth_str(username, password)
return new_proxies return new_proxies
@ -305,18 +336,18 @@ class SessionRedirectMixin(object):
method = prepared_request.method method = prepared_request.method
# https://tools.ietf.org/html/rfc7231#section-6.4.4 # https://tools.ietf.org/html/rfc7231#section-6.4.4
if response.status_code == codes.see_other and method != 'HEAD': if response.status_code == codes.see_other and method != "HEAD":
method = 'GET' method = "GET"
# Do what the browsers do, despite standards... # Do what the browsers do, despite standards...
# First, turn 302s into GETs. # First, turn 302s into GETs.
if response.status_code == codes.found and method != 'HEAD': if response.status_code == codes.found and method != "HEAD":
method = 'GET' method = "GET"
# Second, if a POST is responded to with a 301, turn it into a GET. # Second, if a POST is responded to with a 301, turn it into a GET.
# This bizarre behaviour is explained in Issue 1704. # This bizarre behaviour is explained in Issue 1704.
if response.status_code == codes.moved and method == 'POST': if response.status_code == codes.moved and method == "POST":
method = 'GET' method = "GET"
prepared_request.method = method prepared_request.method = method
@ -341,9 +372,18 @@ class Session(SessionRedirectMixin):
""" """
__attrs__ = [ __attrs__ = [
'headers', 'cookies', 'auth', 'proxies', 'hooks', 'params', 'verify', "headers",
'cert', 'adapters', 'stream', 'trust_env', "cookies",
'max_redirects', "auth",
"proxies",
"hooks",
"params",
"verify",
"cert",
"adapters",
"stream",
"trust_env",
"max_redirects",
] ]
def __init__(self): def __init__(self):
@ -405,8 +445,8 @@ class Session(SessionRedirectMixin):
# Default connection adapters. # Default connection adapters.
self.adapters = OrderedDict() self.adapters = OrderedDict()
self.mount('https://', HTTPAdapter()) self.mount("https://", HTTPAdapter())
self.mount('http://', HTTPAdapter()) self.mount("http://", HTTPAdapter())
def __enter__(self): def __enter__(self):
return self return self
@ -432,7 +472,8 @@ class Session(SessionRedirectMixin):
# Merge with session cookies # Merge with session cookies
merged_cookies = merge_cookies( merged_cookies = merge_cookies(
merge_cookies(RequestsCookieJar(), self.cookies), cookies) merge_cookies(RequestsCookieJar(), self.cookies), cookies
)
# Set environment's basic authentication if not explicitly set. # Set environment's basic authentication if not explicitly set.
auth = request.auth auth = request.auth
@ -446,7 +487,9 @@ class Session(SessionRedirectMixin):
files=request.files, files=request.files,
data=request.data, data=request.data,
json=request.json, json=request.json,
headers=merge_setting(request.headers, self.headers, dict_class=CaseInsensitiveDict), headers=merge_setting(
request.headers, self.headers, dict_class=CaseInsensitiveDict
),
params=merge_setting(request.params, self.params), params=merge_setting(request.params, self.params),
auth=merge_setting(auth, self.auth), auth=merge_setting(auth, self.auth),
cookies=merged_cookies, cookies=merged_cookies,
@ -454,10 +497,25 @@ class Session(SessionRedirectMixin):
) )
return p return p
def request(self, method, url, def request(
params=None, data=None, headers=None, cookies=None, files=None, self,
auth=None, timeout=None, allow_redirects=True, proxies=None, method,
hooks=None, stream=None, verify=None, cert=None, json=None): url,
params=None,
data=None,
headers=None,
cookies=None,
files=None,
auth=None,
timeout=None,
allow_redirects=True,
proxies=None,
hooks=None,
stream=None,
verify=None,
cert=None,
json=None,
):
"""Constructs a :class:`Request <Request>`, prepares it and sends it. """Constructs a :class:`Request <Request>`, prepares it and sends it.
Returns :class:`Response <Response>` object. Returns :class:`Response <Response>` object.
@ -522,8 +580,8 @@ class Session(SessionRedirectMixin):
# Send the request. # Send the request.
send_kwargs = { send_kwargs = {
'timeout': timeout, "timeout": timeout,
'allow_redirects': allow_redirects, "allow_redirects": allow_redirects,
} }
send_kwargs.update(settings) send_kwargs.update(settings)
resp = self.send(prep, **send_kwargs) resp = self.send(prep, **send_kwargs)
@ -538,8 +596,8 @@ class Session(SessionRedirectMixin):
:rtype: requests.Response :rtype: requests.Response
""" """
kwargs.setdefault('allow_redirects', True) kwargs.setdefault("allow_redirects", True)
return self.request('GET', url, **kwargs) return self.request("GET", url, **kwargs)
def options(self, url, **kwargs): def options(self, url, **kwargs):
r"""Sends a OPTIONS request. Returns :class:`Response` object. r"""Sends a OPTIONS request. Returns :class:`Response` object.
@ -549,8 +607,8 @@ class Session(SessionRedirectMixin):
:rtype: requests.Response :rtype: requests.Response
""" """
kwargs.setdefault('allow_redirects', True) kwargs.setdefault("allow_redirects", True)
return self.request('OPTIONS', url, **kwargs) return self.request("OPTIONS", url, **kwargs)
def head(self, url, **kwargs): def head(self, url, **kwargs):
r"""Sends a HEAD request. Returns :class:`Response` object. r"""Sends a HEAD request. Returns :class:`Response` object.
@ -560,8 +618,8 @@ class Session(SessionRedirectMixin):
:rtype: requests.Response :rtype: requests.Response
""" """
kwargs.setdefault('allow_redirects', False) kwargs.setdefault("allow_redirects", False)
return self.request('HEAD', url, **kwargs) return self.request("HEAD", url, **kwargs)
def post(self, url, data=None, json=None, **kwargs): def post(self, url, data=None, json=None, **kwargs):
r"""Sends a POST request. Returns :class:`Response` object. r"""Sends a POST request. Returns :class:`Response` object.
@ -574,7 +632,7 @@ class Session(SessionRedirectMixin):
:rtype: requests.Response :rtype: requests.Response
""" """
return self.request('POST', url, data=data, json=json, **kwargs) return self.request("POST", url, data=data, json=json, **kwargs)
def put(self, url, data=None, **kwargs): def put(self, url, data=None, **kwargs):
r"""Sends a PUT request. Returns :class:`Response` object. r"""Sends a PUT request. Returns :class:`Response` object.
@ -586,7 +644,7 @@ class Session(SessionRedirectMixin):
:rtype: requests.Response :rtype: requests.Response
""" """
return self.request('PUT', url, data=data, **kwargs) return self.request("PUT", url, data=data, **kwargs)
def patch(self, url, data=None, **kwargs): def patch(self, url, data=None, **kwargs):
r"""Sends a PATCH request. Returns :class:`Response` object. r"""Sends a PATCH request. Returns :class:`Response` object.
@ -598,7 +656,7 @@ class Session(SessionRedirectMixin):
:rtype: requests.Response :rtype: requests.Response
""" """
return self.request('PATCH', url, data=data, **kwargs) return self.request("PATCH", url, data=data, **kwargs)
def delete(self, url, **kwargs): def delete(self, url, **kwargs):
r"""Sends a DELETE request. Returns :class:`Response` object. r"""Sends a DELETE request. Returns :class:`Response` object.
@ -608,7 +666,7 @@ class Session(SessionRedirectMixin):
:rtype: requests.Response :rtype: requests.Response
""" """
return self.request('DELETE', url, **kwargs) return self.request("DELETE", url, **kwargs)
def send(self, request, **kwargs): def send(self, request, **kwargs):
"""Send a given PreparedRequest. """Send a given PreparedRequest.
@ -617,22 +675,20 @@ class Session(SessionRedirectMixin):
""" """
# Set defaults that the hooks can utilize to ensure they always have # Set defaults that the hooks can utilize to ensure they always have
# the correct parameters to reproduce the previous request. # the correct parameters to reproduce the previous request.
kwargs.setdefault('stream', self.stream) kwargs.setdefault("stream", self.stream)
kwargs.setdefault('verify', self.verify) kwargs.setdefault("verify", self.verify)
kwargs.setdefault('cert', self.cert) kwargs.setdefault("cert", self.cert)
if 'proxies' not in kwargs: if "proxies" not in kwargs:
kwargs['proxies'] = resolve_proxies( kwargs["proxies"] = resolve_proxies(request, self.proxies, self.trust_env)
request, self.proxies, self.trust_env
)
# It's possible that users might accidentally send a Request object. # It's possible that users might accidentally send a Request object.
# Guard against that specific failure case. # Guard against that specific failure case.
if isinstance(request, Request): if isinstance(request, Request):
raise ValueError('You can only send PreparedRequests.') raise ValueError("You can only send PreparedRequests.")
# Set up variables needed for resolve_redirects and dispatching of hooks # Set up variables needed for resolve_redirects and dispatching of hooks
allow_redirects = kwargs.pop('allow_redirects', True) allow_redirects = kwargs.pop("allow_redirects", True)
stream = kwargs.get('stream') stream = kwargs.get("stream")
hooks = request.hooks hooks = request.hooks
# Get the appropriate adapter to use # Get the appropriate adapter to use
@ -649,7 +705,7 @@ class Session(SessionRedirectMixin):
r.elapsed = timedelta(seconds=elapsed) r.elapsed = timedelta(seconds=elapsed)
# Response manipulation hooks # Response manipulation hooks
r = dispatch_hook('response', hooks, r, **kwargs) r = dispatch_hook("response", hooks, r, **kwargs)
# Persist cookies # Persist cookies
if r.history: if r.history:
@ -679,7 +735,9 @@ class Session(SessionRedirectMixin):
# If redirects aren't being followed, store the response on the Request for Response.next(). # If redirects aren't being followed, store the response on the Request for Response.next().
if not allow_redirects: if not allow_redirects:
try: try:
r._next = next(self.resolve_redirects(r, request, yield_requests=True, **kwargs)) r._next = next(
self.resolve_redirects(r, request, yield_requests=True, **kwargs)
)
except StopIteration: except StopIteration:
pass pass
@ -697,16 +755,19 @@ class Session(SessionRedirectMixin):
# Gather clues from the surrounding environment. # Gather clues from the surrounding environment.
if self.trust_env: if self.trust_env:
# Set environment's proxies. # Set environment's proxies.
no_proxy = proxies.get('no_proxy') if proxies is not None else None no_proxy = proxies.get("no_proxy") if proxies is not None else None
env_proxies = get_environ_proxies(url, no_proxy=no_proxy) env_proxies = get_environ_proxies(url, no_proxy=no_proxy)
for (k, v) in env_proxies.items(): for (k, v) in env_proxies.items():
proxies.setdefault(k, v) proxies.setdefault(k, v)
# Look for requests environment configuration and be compatible # Look for requests environment configuration
# with cURL. # and be compatible with cURL.
if verify is True or verify is None: if verify is True or verify is None:
verify = (os.environ.get('REQUESTS_CA_BUNDLE') or verify = (
os.environ.get('CURL_CA_BUNDLE')) os.environ.get("REQUESTS_CA_BUNDLE")
or os.environ.get("CURL_CA_BUNDLE")
or verify
)
# Merge all the kwargs. # Merge all the kwargs.
proxies = merge_setting(proxies, self.proxies) proxies = merge_setting(proxies, self.proxies)
@ -714,8 +775,7 @@ class Session(SessionRedirectMixin):
verify = merge_setting(verify, self.verify) verify = merge_setting(verify, self.verify)
cert = merge_setting(cert, self.cert) cert = merge_setting(cert, self.cert)
return {'verify': verify, 'proxies': proxies, 'stream': stream, return {"proxies": proxies, "stream": stream, "verify": verify, "cert": cert}
'cert': cert}
def get_adapter(self, url): def get_adapter(self, url):
""" """
@ -729,7 +789,7 @@ class Session(SessionRedirectMixin):
return adapter return adapter
# Nothing matches :-/ # Nothing matches :-/
raise InvalidSchema("No connection adapters were found for {!r}".format(url)) raise InvalidSchema(f"No connection adapters were found for {url!r}")
def close(self): def close(self):
"""Closes all adapters and as such the session""" """Closes all adapters and as such the session"""

View file

@ -1,5 +1,3 @@
# -*- coding: utf-8 -*-
r""" r"""
The ``codes`` object defines a mapping from common names for HTTP statuses The ``codes`` object defines a mapping from common names for HTTP statuses
to their numerical codes, accessible either as attributes or as dictionary to their numerical codes, accessible either as attributes or as dictionary
@ -23,101 +21,108 @@ the names are allowed. For example, ``codes.ok``, ``codes.OK``, and
from .structures import LookupDict from .structures import LookupDict
_codes = { _codes = {
# Informational. # Informational.
100: ('continue',), 100: ("continue",),
101: ('switching_protocols',), 101: ("switching_protocols",),
102: ('processing',), 102: ("processing",),
103: ('checkpoint',), 103: ("checkpoint",),
122: ('uri_too_long', 'request_uri_too_long'), 122: ("uri_too_long", "request_uri_too_long"),
200: ('ok', 'okay', 'all_ok', 'all_okay', 'all_good', '\\o/', ''), 200: ("ok", "okay", "all_ok", "all_okay", "all_good", "\\o/", ""),
201: ('created',), 201: ("created",),
202: ('accepted',), 202: ("accepted",),
203: ('non_authoritative_info', 'non_authoritative_information'), 203: ("non_authoritative_info", "non_authoritative_information"),
204: ('no_content',), 204: ("no_content",),
205: ('reset_content', 'reset'), 205: ("reset_content", "reset"),
206: ('partial_content', 'partial'), 206: ("partial_content", "partial"),
207: ('multi_status', 'multiple_status', 'multi_stati', 'multiple_stati'), 207: ("multi_status", "multiple_status", "multi_stati", "multiple_stati"),
208: ('already_reported',), 208: ("already_reported",),
226: ('im_used',), 226: ("im_used",),
# Redirection. # Redirection.
300: ('multiple_choices',), 300: ("multiple_choices",),
301: ('moved_permanently', 'moved', '\\o-'), 301: ("moved_permanently", "moved", "\\o-"),
302: ('found',), 302: ("found",),
303: ('see_other', 'other'), 303: ("see_other", "other"),
304: ('not_modified',), 304: ("not_modified",),
305: ('use_proxy',), 305: ("use_proxy",),
306: ('switch_proxy',), 306: ("switch_proxy",),
307: ('temporary_redirect', 'temporary_moved', 'temporary'), 307: ("temporary_redirect", "temporary_moved", "temporary"),
308: ('permanent_redirect', 308: (
'resume_incomplete', 'resume',), # These 2 to be removed in 3.0 "permanent_redirect",
"resume_incomplete",
"resume",
), # "resume" and "resume_incomplete" to be removed in 3.0
# Client Error. # Client Error.
400: ('bad_request', 'bad'), 400: ("bad_request", "bad"),
401: ('unauthorized',), 401: ("unauthorized",),
402: ('payment_required', 'payment'), 402: ("payment_required", "payment"),
403: ('forbidden',), 403: ("forbidden",),
404: ('not_found', '-o-'), 404: ("not_found", "-o-"),
405: ('method_not_allowed', 'not_allowed'), 405: ("method_not_allowed", "not_allowed"),
406: ('not_acceptable',), 406: ("not_acceptable",),
407: ('proxy_authentication_required', 'proxy_auth', 'proxy_authentication'), 407: ("proxy_authentication_required", "proxy_auth", "proxy_authentication"),
408: ('request_timeout', 'timeout'), 408: ("request_timeout", "timeout"),
409: ('conflict',), 409: ("conflict",),
410: ('gone',), 410: ("gone",),
411: ('length_required',), 411: ("length_required",),
412: ('precondition_failed', 'precondition'), 412: ("precondition_failed", "precondition"),
413: ('request_entity_too_large',), 413: ("request_entity_too_large",),
414: ('request_uri_too_large',), 414: ("request_uri_too_large",),
415: ('unsupported_media_type', 'unsupported_media', 'media_type'), 415: ("unsupported_media_type", "unsupported_media", "media_type"),
416: ('requested_range_not_satisfiable', 'requested_range', 'range_not_satisfiable'), 416: (
417: ('expectation_failed',), "requested_range_not_satisfiable",
418: ('im_a_teapot', 'teapot', 'i_am_a_teapot'), "requested_range",
421: ('misdirected_request',), "range_not_satisfiable",
422: ('unprocessable_entity', 'unprocessable'), ),
423: ('locked',), 417: ("expectation_failed",),
424: ('failed_dependency', 'dependency'), 418: ("im_a_teapot", "teapot", "i_am_a_teapot"),
425: ('unordered_collection', 'unordered'), 421: ("misdirected_request",),
426: ('upgrade_required', 'upgrade'), 422: ("unprocessable_entity", "unprocessable"),
428: ('precondition_required', 'precondition'), 423: ("locked",),
429: ('too_many_requests', 'too_many'), 424: ("failed_dependency", "dependency"),
431: ('header_fields_too_large', 'fields_too_large'), 425: ("unordered_collection", "unordered"),
444: ('no_response', 'none'), 426: ("upgrade_required", "upgrade"),
449: ('retry_with', 'retry'), 428: ("precondition_required", "precondition"),
450: ('blocked_by_windows_parental_controls', 'parental_controls'), 429: ("too_many_requests", "too_many"),
451: ('unavailable_for_legal_reasons', 'legal_reasons'), 431: ("header_fields_too_large", "fields_too_large"),
499: ('client_closed_request',), 444: ("no_response", "none"),
449: ("retry_with", "retry"),
450: ("blocked_by_windows_parental_controls", "parental_controls"),
451: ("unavailable_for_legal_reasons", "legal_reasons"),
499: ("client_closed_request",),
# Server Error. # Server Error.
500: ('internal_server_error', 'server_error', '/o\\', ''), 500: ("internal_server_error", "server_error", "/o\\", ""),
501: ('not_implemented',), 501: ("not_implemented",),
502: ('bad_gateway',), 502: ("bad_gateway",),
503: ('service_unavailable', 'unavailable'), 503: ("service_unavailable", "unavailable"),
504: ('gateway_timeout',), 504: ("gateway_timeout",),
505: ('http_version_not_supported', 'http_version'), 505: ("http_version_not_supported", "http_version"),
506: ('variant_also_negotiates',), 506: ("variant_also_negotiates",),
507: ('insufficient_storage',), 507: ("insufficient_storage",),
509: ('bandwidth_limit_exceeded', 'bandwidth'), 509: ("bandwidth_limit_exceeded", "bandwidth"),
510: ('not_extended',), 510: ("not_extended",),
511: ('network_authentication_required', 'network_auth', 'network_authentication'), 511: ("network_authentication_required", "network_auth", "network_authentication"),
} }
codes = LookupDict(name='status_codes') codes = LookupDict(name="status_codes")
def _init(): def _init():
for code, titles in _codes.items(): for code, titles in _codes.items():
for title in titles: for title in titles:
setattr(codes, title, code) setattr(codes, title, code)
if not title.startswith(('\\', '/')): if not title.startswith(("\\", "/")):
setattr(codes, title.upper(), code) setattr(codes, title.upper(), code)
def doc(code): def doc(code):
names = ', '.join('``%s``' % n for n in _codes[code]) names = ", ".join(f"``{n}``" for n in _codes[code])
return '* %d: %s' % (code, names) return "* %d: %s" % (code, names)
global __doc__ global __doc__
__doc__ = (__doc__ + '\n' + __doc__ = (
'\n'.join(doc(code) for code in sorted(_codes)) __doc__ + "\n" + "\n".join(doc(code) for code in sorted(_codes))
if __doc__ is not None else None) if __doc__ is not None
else None
)
_init() _init()

View file

@ -1,5 +1,3 @@
# -*- coding: utf-8 -*-
""" """
requests.structures requests.structures
~~~~~~~~~~~~~~~~~~~ ~~~~~~~~~~~~~~~~~~~
@ -64,11 +62,7 @@ class CaseInsensitiveDict(MutableMapping):
def lower_items(self): def lower_items(self):
"""Like iteritems(), but with all lowercase keys.""" """Like iteritems(), but with all lowercase keys."""
return ( return ((lowerkey, keyval[1]) for (lowerkey, keyval) in self._store.items())
(lowerkey, keyval[1])
for (lowerkey, keyval)
in self._store.items()
)
def __eq__(self, other): def __eq__(self, other):
if isinstance(other, Mapping): if isinstance(other, Mapping):
@ -91,10 +85,10 @@ class LookupDict(dict):
def __init__(self, name=None): def __init__(self, name=None):
self.name = name self.name = name
super(LookupDict, self).__init__() super().__init__()
def __repr__(self): def __repr__(self):
return '<lookup \'%s\'>' % (self.name) return f"<lookup '{self.name}'>"
def __getitem__(self, key): def __getitem__(self, key):
# We allow fall-through here, so values default to None # We allow fall-through here, so values default to None

View file

@ -1,5 +1,3 @@
# -*- coding: utf-8 -*-
""" """
requests.utils requests.utils
~~~~~~~~~~~~~~ ~~~~~~~~~~~~~~
@ -20,28 +18,46 @@ import tempfile
import warnings import warnings
import zipfile import zipfile
from collections import OrderedDict from collections import OrderedDict
from urllib3.util import make_headers
from urllib3.util import parse_url
from .__version__ import __version__ from urllib3.util import make_headers, parse_url
from . import certs from . import certs
from .__version__ import __version__
# to_native_string is unused here, but imported here for backwards compatibility # to_native_string is unused here, but imported here for backwards compatibility
from ._internal_utils import to_native_string from ._internal_utils import HEADER_VALIDATORS, to_native_string # noqa: F401
from .compat import (
Mapping,
basestring,
bytes,
getproxies,
getproxies_environment,
integer_types,
)
from .compat import parse_http_list as _parse_list_header from .compat import parse_http_list as _parse_list_header
from .compat import ( from .compat import (
quote, urlparse, bytes, str, unquote, getproxies, proxy_bypass,
proxy_bypass, urlunparse, basestring, integer_types, is_py3, proxy_bypass_environment,
proxy_bypass_environment, getproxies_environment, Mapping) quote,
str,
unquote,
urlparse,
urlunparse,
)
from .cookies import cookiejar_from_dict from .cookies import cookiejar_from_dict
from .structures import CaseInsensitiveDict
from .exceptions import ( from .exceptions import (
InvalidURL, InvalidHeader, FileModeWarning, UnrewindableBodyError) FileModeWarning,
InvalidHeader,
InvalidURL,
UnrewindableBodyError,
)
from .structures import CaseInsensitiveDict
NETRC_FILES = ('.netrc', '_netrc') NETRC_FILES = (".netrc", "_netrc")
DEFAULT_CA_BUNDLE_PATH = certs.where() DEFAULT_CA_BUNDLE_PATH = certs.where()
DEFAULT_PORTS = {'http': 80, 'https': 443} DEFAULT_PORTS = {"http": 80, "https": 443}
# Ensure that ', ' is used to preserve previous delimiter behavior. # Ensure that ', ' is used to preserve previous delimiter behavior.
DEFAULT_ACCEPT_ENCODING = ", ".join( DEFAULT_ACCEPT_ENCODING = ", ".join(
@ -49,28 +65,25 @@ DEFAULT_ACCEPT_ENCODING = ", ".join(
) )
if sys.platform == 'win32': if sys.platform == "win32":
# provide a proxy_bypass version on Windows without DNS lookups # provide a proxy_bypass version on Windows without DNS lookups
def proxy_bypass_registry(host): def proxy_bypass_registry(host):
try: try:
if is_py3: import winreg
import winreg
else:
import _winreg as winreg
except ImportError: except ImportError:
return False return False
try: try:
internetSettings = winreg.OpenKey(winreg.HKEY_CURRENT_USER, internetSettings = winreg.OpenKey(
r'Software\Microsoft\Windows\CurrentVersion\Internet Settings') winreg.HKEY_CURRENT_USER,
r"Software\Microsoft\Windows\CurrentVersion\Internet Settings",
)
# ProxyEnable could be REG_SZ or REG_DWORD, normalizing it # ProxyEnable could be REG_SZ or REG_DWORD, normalizing it
proxyEnable = int(winreg.QueryValueEx(internetSettings, proxyEnable = int(winreg.QueryValueEx(internetSettings, "ProxyEnable")[0])
'ProxyEnable')[0])
# ProxyOverride is almost always a string # ProxyOverride is almost always a string
proxyOverride = winreg.QueryValueEx(internetSettings, proxyOverride = winreg.QueryValueEx(internetSettings, "ProxyOverride")[0]
'ProxyOverride')[0] except (OSError, ValueError):
except OSError:
return False return False
if not proxyEnable or not proxyOverride: if not proxyEnable or not proxyOverride:
return False return False
@ -78,15 +91,15 @@ if sys.platform == 'win32':
# make a check value list from the registry entry: replace the # make a check value list from the registry entry: replace the
# '<local>' string by the localhost entry and the corresponding # '<local>' string by the localhost entry and the corresponding
# canonical entry. # canonical entry.
proxyOverride = proxyOverride.split(';') proxyOverride = proxyOverride.split(";")
# now check if we match one of the registry values. # now check if we match one of the registry values.
for test in proxyOverride: for test in proxyOverride:
if test == '<local>': if test == "<local>":
if '.' not in host: if "." not in host:
return True return True
test = test.replace(".", r"\.") # mask dots test = test.replace(".", r"\.") # mask dots
test = test.replace("*", r".*") # change glob sequence test = test.replace("*", r".*") # change glob sequence
test = test.replace("?", r".") # change glob char test = test.replace("?", r".") # change glob char
if re.match(test, host, re.I): if re.match(test, host, re.I):
return True return True
return False return False
@ -106,7 +119,7 @@ if sys.platform == 'win32':
def dict_to_sequence(d): def dict_to_sequence(d):
"""Returns an internal sequence dictionary update.""" """Returns an internal sequence dictionary update."""
if hasattr(d, 'items'): if hasattr(d, "items"):
d = d.items() d = d.items()
return d return d
@ -116,13 +129,13 @@ def super_len(o):
total_length = None total_length = None
current_position = 0 current_position = 0
if hasattr(o, '__len__'): if hasattr(o, "__len__"):
total_length = len(o) total_length = len(o)
elif hasattr(o, 'len'): elif hasattr(o, "len"):
total_length = o.len total_length = o.len
elif hasattr(o, 'fileno'): elif hasattr(o, "fileno"):
try: try:
fileno = o.fileno() fileno = o.fileno()
except (io.UnsupportedOperation, AttributeError): except (io.UnsupportedOperation, AttributeError):
@ -135,21 +148,23 @@ def super_len(o):
# Having used fstat to determine the file length, we need to # Having used fstat to determine the file length, we need to
# confirm that this file was opened up in binary mode. # confirm that this file was opened up in binary mode.
if 'b' not in o.mode: if "b" not in o.mode:
warnings.warn(( warnings.warn(
"Requests has determined the content-length for this " (
"request using the binary size of the file: however, the " "Requests has determined the content-length for this "
"file has been opened in text mode (i.e. without the 'b' " "request using the binary size of the file: however, the "
"flag in the mode). This may lead to an incorrect " "file has been opened in text mode (i.e. without the 'b' "
"content-length. In Requests 3.0, support will be removed " "flag in the mode). This may lead to an incorrect "
"for files in text mode."), "content-length. In Requests 3.0, support will be removed "
FileModeWarning "for files in text mode."
),
FileModeWarning,
) )
if hasattr(o, 'tell'): if hasattr(o, "tell"):
try: try:
current_position = o.tell() current_position = o.tell()
except (OSError, IOError): except OSError:
# This can happen in some weird situations, such as when the file # This can happen in some weird situations, such as when the file
# is actually a special file descriptor like stdin. In this # is actually a special file descriptor like stdin. In this
# instance, we don't know what the length is, so set it to zero and # instance, we don't know what the length is, so set it to zero and
@ -157,7 +172,7 @@ def super_len(o):
if total_length is not None: if total_length is not None:
current_position = total_length current_position = total_length
else: else:
if hasattr(o, 'seek') and total_length is None: if hasattr(o, "seek") and total_length is None:
# StringIO and BytesIO have seek but no usable fileno # StringIO and BytesIO have seek but no usable fileno
try: try:
# seek to end of file # seek to end of file
@ -167,7 +182,7 @@ def super_len(o):
# seek back to current position to support # seek back to current position to support
# partially read file-like objects # partially read file-like objects
o.seek(current_position or 0) o.seek(current_position or 0)
except (OSError, IOError): except OSError:
total_length = 0 total_length = 0
if total_length is None: if total_length is None:
@ -179,14 +194,14 @@ def super_len(o):
def get_netrc_auth(url, raise_errors=False): def get_netrc_auth(url, raise_errors=False):
"""Returns the Requests tuple auth for a given url from netrc.""" """Returns the Requests tuple auth for a given url from netrc."""
netrc_file = os.environ.get('NETRC') netrc_file = os.environ.get("NETRC")
if netrc_file is not None: if netrc_file is not None:
netrc_locations = (netrc_file,) netrc_locations = (netrc_file,)
else: else:
netrc_locations = ('~/{}'.format(f) for f in NETRC_FILES) netrc_locations = (f"~/{f}" for f in NETRC_FILES)
try: try:
from netrc import netrc, NetrcParseError from netrc import NetrcParseError, netrc
netrc_path = None netrc_path = None
@ -211,18 +226,18 @@ def get_netrc_auth(url, raise_errors=False):
# Strip port numbers from netloc. This weird `if...encode`` dance is # Strip port numbers from netloc. This weird `if...encode`` dance is
# used for Python 3.2, which doesn't support unicode literals. # used for Python 3.2, which doesn't support unicode literals.
splitstr = b':' splitstr = b":"
if isinstance(url, str): if isinstance(url, str):
splitstr = splitstr.decode('ascii') splitstr = splitstr.decode("ascii")
host = ri.netloc.split(splitstr)[0] host = ri.netloc.split(splitstr)[0]
try: try:
_netrc = netrc(netrc_path).authenticators(host) _netrc = netrc(netrc_path).authenticators(host)
if _netrc: if _netrc:
# Return with login / password # Return with login / password
login_i = (0 if _netrc[0] else 1) login_i = 0 if _netrc[0] else 1
return (_netrc[login_i], _netrc[2]) return (_netrc[login_i], _netrc[2])
except (NetrcParseError, IOError): except (NetrcParseError, OSError):
# If there was a parsing error or a permissions issue reading the file, # If there was a parsing error or a permissions issue reading the file,
# we'll just skip netrc auth unless explicitly asked to raise errors. # we'll just skip netrc auth unless explicitly asked to raise errors.
if raise_errors: if raise_errors:
@ -235,9 +250,8 @@ def get_netrc_auth(url, raise_errors=False):
def guess_filename(obj): def guess_filename(obj):
"""Tries to guess the filename of the given object.""" """Tries to guess the filename of the given object."""
name = getattr(obj, 'name', None) name = getattr(obj, "name", None)
if (name and isinstance(name, basestring) and name[0] != '<' and if name and isinstance(name, basestring) and name[0] != "<" and name[-1] != ">":
name[-1] != '>'):
return os.path.basename(name) return os.path.basename(name)
@ -259,7 +273,7 @@ def extract_zipped_paths(path):
# If we don't check for an empty prefix after the split (in other words, archive remains unchanged after the split), # If we don't check for an empty prefix after the split (in other words, archive remains unchanged after the split),
# we _can_ end up in an infinite loop on a rare corner case affecting a small number of users # we _can_ end up in an infinite loop on a rare corner case affecting a small number of users
break break
member = '/'.join([prefix, member]) member = "/".join([prefix, member])
if not zipfile.is_zipfile(archive): if not zipfile.is_zipfile(archive):
return path return path
@ -270,7 +284,7 @@ def extract_zipped_paths(path):
# we have a valid zip archive and a valid member of that archive # we have a valid zip archive and a valid member of that archive
tmp = tempfile.gettempdir() tmp = tempfile.gettempdir()
extracted_path = os.path.join(tmp, member.split('/')[-1]) extracted_path = os.path.join(tmp, member.split("/")[-1])
if not os.path.exists(extracted_path): if not os.path.exists(extracted_path):
# use read + write to avoid the creating nested folders, we only want the file, avoids mkdir racing condition # use read + write to avoid the creating nested folders, we only want the file, avoids mkdir racing condition
with atomic_open(extracted_path) as file_handler: with atomic_open(extracted_path) as file_handler:
@ -281,12 +295,11 @@ def extract_zipped_paths(path):
@contextlib.contextmanager @contextlib.contextmanager
def atomic_open(filename): def atomic_open(filename):
"""Write a file to the disk in an atomic fashion""" """Write a file to the disk in an atomic fashion"""
replacer = os.rename if sys.version_info[0] == 2 else os.replace
tmp_descriptor, tmp_name = tempfile.mkstemp(dir=os.path.dirname(filename)) tmp_descriptor, tmp_name = tempfile.mkstemp(dir=os.path.dirname(filename))
try: try:
with os.fdopen(tmp_descriptor, 'wb') as tmp_handler: with os.fdopen(tmp_descriptor, "wb") as tmp_handler:
yield tmp_handler yield tmp_handler
replacer(tmp_name, filename) os.replace(tmp_name, filename)
except BaseException: except BaseException:
os.remove(tmp_name) os.remove(tmp_name)
raise raise
@ -314,7 +327,7 @@ def from_key_val_list(value):
return None return None
if isinstance(value, (str, bytes, bool, int)): if isinstance(value, (str, bytes, bool, int)):
raise ValueError('cannot encode objects that are not 2-tuples') raise ValueError("cannot encode objects that are not 2-tuples")
return OrderedDict(value) return OrderedDict(value)
@ -340,7 +353,7 @@ def to_key_val_list(value):
return None return None
if isinstance(value, (str, bytes, bool, int)): if isinstance(value, (str, bytes, bool, int)):
raise ValueError('cannot encode objects that are not 2-tuples') raise ValueError("cannot encode objects that are not 2-tuples")
if isinstance(value, Mapping): if isinstance(value, Mapping):
value = value.items() value = value.items()
@ -405,10 +418,10 @@ def parse_dict_header(value):
""" """
result = {} result = {}
for item in _parse_list_header(value): for item in _parse_list_header(value):
if '=' not in item: if "=" not in item:
result[item] = None result[item] = None
continue continue
name, value = item.split('=', 1) name, value = item.split("=", 1)
if value[:1] == value[-1:] == '"': if value[:1] == value[-1:] == '"':
value = unquote_header_value(value[1:-1]) value = unquote_header_value(value[1:-1])
result[name] = value result[name] = value
@ -436,8 +449,8 @@ def unquote_header_value(value, is_filename=False):
# replace sequence below on a UNC path has the effect of turning # replace sequence below on a UNC path has the effect of turning
# the leading double slash into a single slash and then # the leading double slash into a single slash and then
# _fix_ie_filename() doesn't work correctly. See #458. # _fix_ie_filename() doesn't work correctly. See #458.
if not is_filename or value[:2] != '\\\\': if not is_filename or value[:2] != "\\\\":
return value.replace('\\\\', '\\').replace('\\"', '"') return value.replace("\\\\", "\\").replace('\\"', '"')
return value return value
@ -472,19 +485,24 @@ def get_encodings_from_content(content):
:param content: bytestring to extract encodings from. :param content: bytestring to extract encodings from.
""" """
warnings.warn(( warnings.warn(
'In requests 3.0, get_encodings_from_content will be removed. For ' (
'more information, please see the discussion on issue #2266. (This' "In requests 3.0, get_encodings_from_content will be removed. For "
' warning should only appear once.)'), "more information, please see the discussion on issue #2266. (This"
DeprecationWarning) " warning should only appear once.)"
),
DeprecationWarning,
)
charset_re = re.compile(r'<meta.*?charset=["\']*(.+?)["\'>]', flags=re.I) charset_re = re.compile(r'<meta.*?charset=["\']*(.+?)["\'>]', flags=re.I)
pragma_re = re.compile(r'<meta.*?content=["\']*;?charset=(.+?)["\'>]', flags=re.I) pragma_re = re.compile(r'<meta.*?content=["\']*;?charset=(.+?)["\'>]', flags=re.I)
xml_re = re.compile(r'^<\?xml.*?encoding=["\']*(.+?)["\'>]') xml_re = re.compile(r'^<\?xml.*?encoding=["\']*(.+?)["\'>]')
return (charset_re.findall(content) + return (
pragma_re.findall(content) + charset_re.findall(content)
xml_re.findall(content)) + pragma_re.findall(content)
+ xml_re.findall(content)
)
def _parse_content_type_header(header): def _parse_content_type_header(header):
@ -495,7 +513,7 @@ def _parse_content_type_header(header):
parameters parameters
""" """
tokens = header.split(';') tokens = header.split(";")
content_type, params = tokens[0].strip(), tokens[1:] content_type, params = tokens[0].strip(), tokens[1:]
params_dict = {} params_dict = {}
items_to_strip = "\"' " items_to_strip = "\"' "
@ -507,7 +525,7 @@ def _parse_content_type_header(header):
index_of_equals = param.find("=") index_of_equals = param.find("=")
if index_of_equals != -1: if index_of_equals != -1:
key = param[:index_of_equals].strip(items_to_strip) key = param[:index_of_equals].strip(items_to_strip)
value = param[index_of_equals + 1:].strip(items_to_strip) value = param[index_of_equals + 1 :].strip(items_to_strip)
params_dict[key.lower()] = value params_dict[key.lower()] = value
return content_type, params_dict return content_type, params_dict
@ -519,38 +537,37 @@ def get_encoding_from_headers(headers):
:rtype: str :rtype: str
""" """
content_type = headers.get('content-type') content_type = headers.get("content-type")
if not content_type: if not content_type:
return None return None
content_type, params = _parse_content_type_header(content_type) content_type, params = _parse_content_type_header(content_type)
if 'charset' in params: if "charset" in params:
return params['charset'].strip("'\"") return params["charset"].strip("'\"")
if 'text' in content_type: if "text" in content_type:
return 'ISO-8859-1' return "ISO-8859-1"
if 'application/json' in content_type: if "application/json" in content_type:
# Assume UTF-8 based on RFC 4627: https://www.ietf.org/rfc/rfc4627.txt since the charset was unset # Assume UTF-8 based on RFC 4627: https://www.ietf.org/rfc/rfc4627.txt since the charset was unset
return 'utf-8' return "utf-8"
def stream_decode_response_unicode(iterator, r): def stream_decode_response_unicode(iterator, r):
"""Stream decodes a iterator.""" """Stream decodes an iterator."""
if r.encoding is None: if r.encoding is None:
for item in iterator: yield from iterator
yield item
return return
decoder = codecs.getincrementaldecoder(r.encoding)(errors='replace') decoder = codecs.getincrementaldecoder(r.encoding)(errors="replace")
for chunk in iterator: for chunk in iterator:
rv = decoder.decode(chunk) rv = decoder.decode(chunk)
if rv: if rv:
yield rv yield rv
rv = decoder.decode(b'', final=True) rv = decoder.decode(b"", final=True)
if rv: if rv:
yield rv yield rv
@ -561,7 +578,7 @@ def iter_slices(string, slice_length):
if slice_length is None or slice_length <= 0: if slice_length is None or slice_length <= 0:
slice_length = len(string) slice_length = len(string)
while pos < len(string): while pos < len(string):
yield string[pos:pos + slice_length] yield string[pos : pos + slice_length]
pos += slice_length pos += slice_length
@ -577,11 +594,14 @@ def get_unicode_from_response(r):
:rtype: str :rtype: str
""" """
warnings.warn(( warnings.warn(
'In requests 3.0, get_unicode_from_response will be removed. For ' (
'more information, please see the discussion on issue #2266. (This' "In requests 3.0, get_unicode_from_response will be removed. For "
' warning should only appear once.)'), "more information, please see the discussion on issue #2266. (This"
DeprecationWarning) " warning should only appear once.)"
),
DeprecationWarning,
)
tried_encodings = [] tried_encodings = []
@ -596,14 +616,15 @@ def get_unicode_from_response(r):
# Fall back: # Fall back:
try: try:
return str(r.content, encoding, errors='replace') return str(r.content, encoding, errors="replace")
except TypeError: except TypeError:
return r.content return r.content
# The unreserved URI characters (RFC 3986) # The unreserved URI characters (RFC 3986)
UNRESERVED_SET = frozenset( UNRESERVED_SET = frozenset(
"ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz" + "0123456789-._~") "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz" + "0123456789-._~"
)
def unquote_unreserved(uri): def unquote_unreserved(uri):
@ -612,22 +633,22 @@ def unquote_unreserved(uri):
:rtype: str :rtype: str
""" """
parts = uri.split('%') parts = uri.split("%")
for i in range(1, len(parts)): for i in range(1, len(parts)):
h = parts[i][0:2] h = parts[i][0:2]
if len(h) == 2 and h.isalnum(): if len(h) == 2 and h.isalnum():
try: try:
c = chr(int(h, 16)) c = chr(int(h, 16))
except ValueError: except ValueError:
raise InvalidURL("Invalid percent-escape sequence: '%s'" % h) raise InvalidURL(f"Invalid percent-escape sequence: '{h}'")
if c in UNRESERVED_SET: if c in UNRESERVED_SET:
parts[i] = c + parts[i][2:] parts[i] = c + parts[i][2:]
else: else:
parts[i] = '%' + parts[i] parts[i] = f"%{parts[i]}"
else: else:
parts[i] = '%' + parts[i] parts[i] = f"%{parts[i]}"
return ''.join(parts) return "".join(parts)
def requote_uri(uri): def requote_uri(uri):
@ -660,10 +681,10 @@ def address_in_network(ip, net):
:rtype: bool :rtype: bool
""" """
ipaddr = struct.unpack('=L', socket.inet_aton(ip))[0] ipaddr = struct.unpack("=L", socket.inet_aton(ip))[0]
netaddr, bits = net.split('/') netaddr, bits = net.split("/")
netmask = struct.unpack('=L', socket.inet_aton(dotted_netmask(int(bits))))[0] netmask = struct.unpack("=L", socket.inet_aton(dotted_netmask(int(bits))))[0]
network = struct.unpack('=L', socket.inet_aton(netaddr))[0] & netmask network = struct.unpack("=L", socket.inet_aton(netaddr))[0] & netmask
return (ipaddr & netmask) == (network & netmask) return (ipaddr & netmask) == (network & netmask)
@ -674,8 +695,8 @@ def dotted_netmask(mask):
:rtype: str :rtype: str
""" """
bits = 0xffffffff ^ (1 << 32 - mask) - 1 bits = 0xFFFFFFFF ^ (1 << 32 - mask) - 1
return socket.inet_ntoa(struct.pack('>I', bits)) return socket.inet_ntoa(struct.pack(">I", bits))
def is_ipv4_address(string_ip): def is_ipv4_address(string_ip):
@ -684,7 +705,7 @@ def is_ipv4_address(string_ip):
""" """
try: try:
socket.inet_aton(string_ip) socket.inet_aton(string_ip)
except socket.error: except OSError:
return False return False
return True return True
@ -695,9 +716,9 @@ def is_valid_cidr(string_network):
:rtype: bool :rtype: bool
""" """
if string_network.count('/') == 1: if string_network.count("/") == 1:
try: try:
mask = int(string_network.split('/')[1]) mask = int(string_network.split("/")[1])
except ValueError: except ValueError:
return False return False
@ -705,8 +726,8 @@ def is_valid_cidr(string_network):
return False return False
try: try:
socket.inet_aton(string_network.split('/')[0]) socket.inet_aton(string_network.split("/")[0])
except socket.error: except OSError:
return False return False
else: else:
return False return False
@ -743,13 +764,14 @@ def should_bypass_proxies(url, no_proxy):
""" """
# Prioritize lowercase environment variables over uppercase # Prioritize lowercase environment variables over uppercase
# to keep a consistent behaviour with other http projects (curl, wget). # to keep a consistent behaviour with other http projects (curl, wget).
get_proxy = lambda k: os.environ.get(k) or os.environ.get(k.upper()) def get_proxy(key):
return os.environ.get(key) or os.environ.get(key.upper())
# First check whether no_proxy is defined. If it is, check that the URL # First check whether no_proxy is defined. If it is, check that the URL
# we're getting isn't in the no_proxy list. # we're getting isn't in the no_proxy list.
no_proxy_arg = no_proxy no_proxy_arg = no_proxy
if no_proxy is None: if no_proxy is None:
no_proxy = get_proxy('no_proxy') no_proxy = get_proxy("no_proxy")
parsed = urlparse(url) parsed = urlparse(url)
if parsed.hostname is None: if parsed.hostname is None:
@ -759,9 +781,7 @@ def should_bypass_proxies(url, no_proxy):
if no_proxy: if no_proxy:
# We need to check whether we match here. We need to see if we match # We need to check whether we match here. We need to see if we match
# the end of the hostname, both with and without the port. # the end of the hostname, both with and without the port.
no_proxy = ( no_proxy = (host for host in no_proxy.replace(" ", "").split(",") if host)
host for host in no_proxy.replace(' ', '').split(',') if host
)
if is_ipv4_address(parsed.hostname): if is_ipv4_address(parsed.hostname):
for proxy_ip in no_proxy: for proxy_ip in no_proxy:
@ -775,7 +795,7 @@ def should_bypass_proxies(url, no_proxy):
else: else:
host_with_port = parsed.hostname host_with_port = parsed.hostname
if parsed.port: if parsed.port:
host_with_port += ':{}'.format(parsed.port) host_with_port += f":{parsed.port}"
for host in no_proxy: for host in no_proxy:
if parsed.hostname.endswith(host) or host_with_port.endswith(host): if parsed.hostname.endswith(host) or host_with_port.endswith(host):
@ -783,7 +803,7 @@ def should_bypass_proxies(url, no_proxy):
# to apply the proxies on this URL. # to apply the proxies on this URL.
return True return True
with set_environ('no_proxy', no_proxy_arg): with set_environ("no_proxy", no_proxy_arg):
# parsed.hostname can be `None` in cases such as a file URI. # parsed.hostname can be `None` in cases such as a file URI.
try: try:
bypass = proxy_bypass(parsed.hostname) bypass = proxy_bypass(parsed.hostname)
@ -817,13 +837,13 @@ def select_proxy(url, proxies):
proxies = proxies or {} proxies = proxies or {}
urlparts = urlparse(url) urlparts = urlparse(url)
if urlparts.hostname is None: if urlparts.hostname is None:
return proxies.get(urlparts.scheme, proxies.get('all')) return proxies.get(urlparts.scheme, proxies.get("all"))
proxy_keys = [ proxy_keys = [
urlparts.scheme + '://' + urlparts.hostname, urlparts.scheme + "://" + urlparts.hostname,
urlparts.scheme, urlparts.scheme,
'all://' + urlparts.hostname, "all://" + urlparts.hostname,
'all', "all",
] ]
proxy = None proxy = None
for proxy_key in proxy_keys: for proxy_key in proxy_keys:
@ -848,13 +868,13 @@ def resolve_proxies(request, proxies, trust_env=True):
proxies = proxies if proxies is not None else {} proxies = proxies if proxies is not None else {}
url = request.url url = request.url
scheme = urlparse(url).scheme scheme = urlparse(url).scheme
no_proxy = proxies.get('no_proxy') no_proxy = proxies.get("no_proxy")
new_proxies = proxies.copy() new_proxies = proxies.copy()
if trust_env and not should_bypass_proxies(url, no_proxy=no_proxy): if trust_env and not should_bypass_proxies(url, no_proxy=no_proxy):
environ_proxies = get_environ_proxies(url, no_proxy=no_proxy) environ_proxies = get_environ_proxies(url, no_proxy=no_proxy)
proxy = environ_proxies.get(scheme, environ_proxies.get('all')) proxy = environ_proxies.get(scheme, environ_proxies.get("all"))
if proxy: if proxy:
new_proxies.setdefault(scheme, proxy) new_proxies.setdefault(scheme, proxy)
@ -867,19 +887,21 @@ def default_user_agent(name="python-requests"):
:rtype: str :rtype: str
""" """
return '%s/%s' % (name, __version__) return f"{name}/{__version__}"
def default_headers(): def default_headers():
""" """
:rtype: requests.structures.CaseInsensitiveDict :rtype: requests.structures.CaseInsensitiveDict
""" """
return CaseInsensitiveDict({ return CaseInsensitiveDict(
'User-Agent': default_user_agent(), {
'Accept-Encoding': DEFAULT_ACCEPT_ENCODING, "User-Agent": default_user_agent(),
'Accept': '*/*', "Accept-Encoding": DEFAULT_ACCEPT_ENCODING,
'Connection': 'keep-alive', "Accept": "*/*",
}) "Connection": "keep-alive",
}
)
def parse_header_links(value): def parse_header_links(value):
@ -892,23 +914,23 @@ def parse_header_links(value):
links = [] links = []
replace_chars = ' \'"' replace_chars = " '\""
value = value.strip(replace_chars) value = value.strip(replace_chars)
if not value: if not value:
return links return links
for val in re.split(', *<', value): for val in re.split(", *<", value):
try: try:
url, params = val.split(';', 1) url, params = val.split(";", 1)
except ValueError: except ValueError:
url, params = val, '' url, params = val, ""
link = {'url': url.strip('<> \'"')} link = {"url": url.strip("<> '\"")}
for param in params.split(';'): for param in params.split(";"):
try: try:
key, value = param.split('=') key, value = param.split("=")
except ValueError: except ValueError:
break break
@ -920,7 +942,7 @@ def parse_header_links(value):
# Null bytes; no need to recreate these on each call to guess_json_utf # Null bytes; no need to recreate these on each call to guess_json_utf
_null = '\x00'.encode('ascii') # encoding to ASCII for Python 3 _null = "\x00".encode("ascii") # encoding to ASCII for Python 3
_null2 = _null * 2 _null2 = _null * 2
_null3 = _null * 3 _null3 = _null * 3
@ -934,25 +956,25 @@ def guess_json_utf(data):
# determine the encoding. Also detect a BOM, if present. # determine the encoding. Also detect a BOM, if present.
sample = data[:4] sample = data[:4]
if sample in (codecs.BOM_UTF32_LE, codecs.BOM_UTF32_BE): if sample in (codecs.BOM_UTF32_LE, codecs.BOM_UTF32_BE):
return 'utf-32' # BOM included return "utf-32" # BOM included
if sample[:3] == codecs.BOM_UTF8: if sample[:3] == codecs.BOM_UTF8:
return 'utf-8-sig' # BOM included, MS style (discouraged) return "utf-8-sig" # BOM included, MS style (discouraged)
if sample[:2] in (codecs.BOM_UTF16_LE, codecs.BOM_UTF16_BE): if sample[:2] in (codecs.BOM_UTF16_LE, codecs.BOM_UTF16_BE):
return 'utf-16' # BOM included return "utf-16" # BOM included
nullcount = sample.count(_null) nullcount = sample.count(_null)
if nullcount == 0: if nullcount == 0:
return 'utf-8' return "utf-8"
if nullcount == 2: if nullcount == 2:
if sample[::2] == _null2: # 1st and 3rd are null if sample[::2] == _null2: # 1st and 3rd are null
return 'utf-16-be' return "utf-16-be"
if sample[1::2] == _null2: # 2nd and 4th are null if sample[1::2] == _null2: # 2nd and 4th are null
return 'utf-16-le' return "utf-16-le"
# Did not detect 2 valid UTF-16 ascii-range characters # Did not detect 2 valid UTF-16 ascii-range characters
if nullcount == 3: if nullcount == 3:
if sample[:3] == _null3: if sample[:3] == _null3:
return 'utf-32-be' return "utf-32-be"
if sample[1:] == _null3: if sample[1:] == _null3:
return 'utf-32-le' return "utf-32-le"
# Did not detect a valid UTF-32 ascii-range character # Did not detect a valid UTF-32 ascii-range character
return None return None
@ -977,13 +999,13 @@ def prepend_scheme_if_needed(url, new_scheme):
if auth: if auth:
# parse_url doesn't provide the netloc with auth # parse_url doesn't provide the netloc with auth
# so we'll add it ourselves. # so we'll add it ourselves.
netloc = '@'.join([auth, netloc]) netloc = "@".join([auth, netloc])
if scheme is None: if scheme is None:
scheme = new_scheme scheme = new_scheme
if path is None: if path is None:
path = '' path = ""
return urlunparse((scheme, netloc, path, '', query, fragment)) return urlunparse((scheme, netloc, path, "", query, fragment))
def get_auth_from_url(url): def get_auth_from_url(url):
@ -997,35 +1019,36 @@ def get_auth_from_url(url):
try: try:
auth = (unquote(parsed.username), unquote(parsed.password)) auth = (unquote(parsed.username), unquote(parsed.password))
except (AttributeError, TypeError): except (AttributeError, TypeError):
auth = ('', '') auth = ("", "")
return auth return auth
# Moved outside of function to avoid recompile every call
_CLEAN_HEADER_REGEX_BYTE = re.compile(b'^\\S[^\\r\\n]*$|^$')
_CLEAN_HEADER_REGEX_STR = re.compile(r'^\S[^\r\n]*$|^$')
def check_header_validity(header): def check_header_validity(header):
"""Verifies that header value is a string which doesn't contain """Verifies that header parts don't contain leading whitespace
leading whitespace or return characters. This prevents unintended reserved characters, or return characters.
header injection.
:param header: tuple, in the format (name, value). :param header: tuple, in the format (name, value).
""" """
name, value = header name, value = header
if isinstance(value, bytes): for part in header:
pat = _CLEAN_HEADER_REGEX_BYTE if type(part) not in HEADER_VALIDATORS:
else: raise InvalidHeader(
pat = _CLEAN_HEADER_REGEX_STR f"Header part ({part!r}) from {{{name!r}: {value!r}}} must be "
try: f"of type str or bytes, not {type(part)}"
if not pat.match(value): )
raise InvalidHeader("Invalid return character or leading space in header: %s" % name)
except TypeError: _validate_header_part(name, "name", HEADER_VALIDATORS[type(name)][0])
raise InvalidHeader("Value for header {%s: %s} must be of type str or " _validate_header_part(value, "value", HEADER_VALIDATORS[type(value)][1])
"bytes, not %s" % (name, value, type(value)))
def _validate_header_part(header_part, header_kind, validator):
if not validator.match(header_part):
raise InvalidHeader(
f"Invalid leading whitespace, reserved character(s), or return"
f"character(s) in header {header_kind}: {header_part!r}"
)
def urldefragauth(url): def urldefragauth(url):
@ -1040,21 +1063,24 @@ def urldefragauth(url):
if not netloc: if not netloc:
netloc, path = path, netloc netloc, path = path, netloc
netloc = netloc.rsplit('@', 1)[-1] netloc = netloc.rsplit("@", 1)[-1]
return urlunparse((scheme, netloc, path, params, query, '')) return urlunparse((scheme, netloc, path, params, query, ""))
def rewind_body(prepared_request): def rewind_body(prepared_request):
"""Move file pointer back to its recorded starting position """Move file pointer back to its recorded starting position
so it can be read again on redirect. so it can be read again on redirect.
""" """
body_seek = getattr(prepared_request.body, 'seek', None) body_seek = getattr(prepared_request.body, "seek", None)
if body_seek is not None and isinstance(prepared_request._body_position, integer_types): if body_seek is not None and isinstance(
prepared_request._body_position, integer_types
):
try: try:
body_seek(prepared_request._body_position) body_seek(prepared_request._body_position)
except (IOError, OSError): except OSError:
raise UnrewindableBodyError("An error occurred when rewinding request " raise UnrewindableBodyError(
"body for redirect.") "An error occurred when rewinding request body for redirect."
)
else: else:
raise UnrewindableBodyError("Unable to rewind request body for redirect.") raise UnrewindableBodyError("Unable to rewind request body for redirect.")