Bump requests from 2.31.0 to 2.32.3 (#2338)

* Bump requests from 2.31.0 to 2.32.3

Bumps [requests](https://github.com/psf/requests) from 2.31.0 to 2.32.3.
- [Release notes](https://github.com/psf/requests/releases)
- [Changelog](https://github.com/psf/requests/blob/main/HISTORY.md)
- [Commits](https://github.com/psf/requests/compare/v2.31.0...v2.32.3)

---
updated-dependencies:
- dependency-name: requests
  dependency-type: direct:production
  update-type: version-update:semver-minor
...

Signed-off-by: dependabot[bot] <support@github.com>

* Update requests==2.32.3

---------

Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
Co-authored-by: JonnyWong16 <9099342+JonnyWong16@users.noreply.github.com>

[skip ci]
This commit is contained in:
dependabot[bot] 2024-06-19 00:01:34 -07:00 committed by GitHub
parent 55573d26ea
commit 43e71d836a
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
14 changed files with 287 additions and 80 deletions

View file

@ -83,7 +83,11 @@ def check_compatibility(urllib3_version, chardet_version, charset_normalizer_ver
# charset_normalizer >= 2.0.0 < 4.0.0 # charset_normalizer >= 2.0.0 < 4.0.0
assert (2, 0, 0) <= (major, minor, patch) < (4, 0, 0) assert (2, 0, 0) <= (major, minor, patch) < (4, 0, 0)
else: else:
raise Exception("You need either charset_normalizer or chardet installed") warnings.warn(
"Unable to find acceptable character detection dependency "
"(chardet or charset_normalizer).",
RequestsDependencyWarning,
)
def _check_cryptography(cryptography_version): def _check_cryptography(cryptography_version):

View file

@ -5,10 +5,10 @@
__title__ = "requests" __title__ = "requests"
__description__ = "Python HTTP for Humans." __description__ = "Python HTTP for Humans."
__url__ = "https://requests.readthedocs.io" __url__ = "https://requests.readthedocs.io"
__version__ = "2.31.0" __version__ = "2.32.3"
__build__ = 0x023100 __build__ = 0x023203
__author__ = "Kenneth Reitz" __author__ = "Kenneth Reitz"
__author_email__ = "me@kennethreitz.org" __author_email__ = "me@kennethreitz.org"
__license__ = "Apache 2.0" __license__ = "Apache-2.0"
__copyright__ = "Copyright Kenneth Reitz" __copyright__ = "Copyright Kenneth Reitz"
__cake__ = "\u2728 \U0001f370 \u2728" __cake__ = "\u2728 \U0001f370 \u2728"

View file

@ -8,6 +8,8 @@ and maintain connections.
import os.path import os.path
import socket # noqa: F401 import socket # noqa: F401
import typing
import warnings
from urllib3.exceptions import ClosedPoolError, ConnectTimeoutError from urllib3.exceptions import ClosedPoolError, ConnectTimeoutError
from urllib3.exceptions import HTTPError as _HTTPError from urllib3.exceptions import HTTPError as _HTTPError
@ -25,6 +27,7 @@ from urllib3.poolmanager import PoolManager, proxy_from_url
from urllib3.util import Timeout as TimeoutSauce from urllib3.util import Timeout as TimeoutSauce
from urllib3.util import parse_url from urllib3.util import parse_url
from urllib3.util.retry import Retry from urllib3.util.retry import Retry
from urllib3.util.ssl_ import create_urllib3_context
from .auth import _basic_auth_str from .auth import _basic_auth_str
from .compat import basestring, urlparse from .compat import basestring, urlparse
@ -61,12 +64,76 @@ except ImportError:
raise InvalidSchema("Missing dependencies for SOCKS support.") raise InvalidSchema("Missing dependencies for SOCKS support.")
if typing.TYPE_CHECKING:
from .models import PreparedRequest
DEFAULT_POOLBLOCK = False DEFAULT_POOLBLOCK = False
DEFAULT_POOLSIZE = 10 DEFAULT_POOLSIZE = 10
DEFAULT_RETRIES = 0 DEFAULT_RETRIES = 0
DEFAULT_POOL_TIMEOUT = None DEFAULT_POOL_TIMEOUT = None
try:
import ssl # noqa: F401
_preloaded_ssl_context = create_urllib3_context()
_preloaded_ssl_context.load_verify_locations(
extract_zipped_paths(DEFAULT_CA_BUNDLE_PATH)
)
except ImportError:
# Bypass default SSLContext creation when Python
# interpreter isn't built with the ssl module.
_preloaded_ssl_context = None
def _urllib3_request_context(
request: "PreparedRequest",
verify: "bool | str | None",
client_cert: "typing.Tuple[str, str] | str | None",
poolmanager: "PoolManager",
) -> "(typing.Dict[str, typing.Any], typing.Dict[str, typing.Any])":
host_params = {}
pool_kwargs = {}
parsed_request_url = urlparse(request.url)
scheme = parsed_request_url.scheme.lower()
port = parsed_request_url.port
# Determine if we have and should use our default SSLContext
# to optimize performance on standard requests.
poolmanager_kwargs = getattr(poolmanager, "connection_pool_kw", {})
has_poolmanager_ssl_context = poolmanager_kwargs.get("ssl_context")
should_use_default_ssl_context = (
_preloaded_ssl_context is not None and not has_poolmanager_ssl_context
)
cert_reqs = "CERT_REQUIRED"
if verify is False:
cert_reqs = "CERT_NONE"
elif verify is True and should_use_default_ssl_context:
pool_kwargs["ssl_context"] = _preloaded_ssl_context
elif isinstance(verify, str):
if not os.path.isdir(verify):
pool_kwargs["ca_certs"] = verify
else:
pool_kwargs["ca_cert_dir"] = verify
pool_kwargs["cert_reqs"] = cert_reqs
if client_cert is not None:
if isinstance(client_cert, tuple) and len(client_cert) == 2:
pool_kwargs["cert_file"] = client_cert[0]
pool_kwargs["key_file"] = client_cert[1]
else:
# According to our docs, we allow users to specify just the client
# cert path
pool_kwargs["cert_file"] = client_cert
host_params = {
"scheme": scheme,
"host": parsed_request_url.hostname,
"port": port,
}
return host_params, pool_kwargs
class BaseAdapter: class BaseAdapter:
"""The Base Transport Adapter""" """The Base Transport Adapter"""
@ -247,24 +314,22 @@ class HTTPAdapter(BaseAdapter):
:param cert: The SSL certificate to verify. :param cert: The SSL certificate to verify.
""" """
if url.lower().startswith("https") and verify: if url.lower().startswith("https") and verify:
conn.cert_reqs = "CERT_REQUIRED"
cert_loc = None # Only load the CA certificates if 'verify' is a string indicating the CA bundle to use.
# Otherwise, if verify is a boolean, we don't load anything since
# Allow self-specified cert location. # the connection will be using a context with the default certificates already loaded,
# and this avoids a call to the slow load_verify_locations()
if verify is not True: if verify is not True:
# `verify` must be a str with a path then
cert_loc = verify cert_loc = verify
if not cert_loc: if not os.path.exists(cert_loc):
cert_loc = extract_zipped_paths(DEFAULT_CA_BUNDLE_PATH)
if not cert_loc or not os.path.exists(cert_loc):
raise OSError( raise OSError(
f"Could not find a suitable TLS CA certificate bundle, " f"Could not find a suitable TLS CA certificate bundle, "
f"invalid path: {cert_loc}" f"invalid path: {cert_loc}"
) )
conn.cert_reqs = "CERT_REQUIRED"
if not os.path.isdir(cert_loc): if not os.path.isdir(cert_loc):
conn.ca_certs = cert_loc conn.ca_certs = cert_loc
else: else:
@ -328,8 +393,110 @@ class HTTPAdapter(BaseAdapter):
return response return response
def build_connection_pool_key_attributes(self, request, verify, cert=None):
"""Build the PoolKey attributes used by urllib3 to return a connection.
This looks at the PreparedRequest, the user-specified verify value,
and the value of the cert parameter to determine what PoolKey values
to use to select a connection from a given urllib3 Connection Pool.
The SSL related pool key arguments are not consistently set. As of
this writing, use the following to determine what keys may be in that
dictionary:
* If ``verify`` is ``True``, ``"ssl_context"`` will be set and will be the
default Requests SSL Context
* If ``verify`` is ``False``, ``"ssl_context"`` will not be set but
``"cert_reqs"`` will be set
* If ``verify`` is a string, (i.e., it is a user-specified trust bundle)
``"ca_certs"`` will be set if the string is not a directory recognized
by :py:func:`os.path.isdir`, otherwise ``"ca_certs_dir"`` will be
set.
* If ``"cert"`` is specified, ``"cert_file"`` will always be set. If
``"cert"`` is a tuple with a second item, ``"key_file"`` will also
be present
To override these settings, one may subclass this class, call this
method and use the above logic to change parameters as desired. For
example, if one wishes to use a custom :py:class:`ssl.SSLContext` one
must both set ``"ssl_context"`` and based on what else they require,
alter the other keys to ensure the desired behaviour.
:param request:
The PreparedReqest being sent over the connection.
:type request:
:class:`~requests.models.PreparedRequest`
:param verify:
Either a boolean, in which case it controls whether
we verify the server's TLS certificate, or a string, in which case it
must be a path to a CA bundle to use.
:param cert:
(optional) Any user-provided SSL certificate for client
authentication (a.k.a., mTLS). This may be a string (i.e., just
the path to a file which holds both certificate and key) or a
tuple of length 2 with the certificate file path and key file
path.
:returns:
A tuple of two dictionaries. The first is the "host parameters"
portion of the Pool Key including scheme, hostname, and port. The
second is a dictionary of SSLContext related parameters.
"""
return _urllib3_request_context(request, verify, cert, self.poolmanager)
def get_connection_with_tls_context(self, request, verify, proxies=None, cert=None):
"""Returns a urllib3 connection for the given request and TLS settings.
This should not be called from user code, and is only exposed for use
when subclassing the :class:`HTTPAdapter <requests.adapters.HTTPAdapter>`.
:param request:
The :class:`PreparedRequest <PreparedRequest>` object to be sent
over the connection.
:param verify:
Either a boolean, in which case it controls whether we verify the
server's TLS certificate, or a string, in which case it must be a
path to a CA bundle to use.
:param proxies:
(optional) The proxies dictionary to apply to the request.
:param cert:
(optional) Any user-provided SSL certificate to be used for client
authentication (a.k.a., mTLS).
:rtype:
urllib3.ConnectionPool
"""
proxy = select_proxy(request.url, proxies)
try:
host_params, pool_kwargs = self.build_connection_pool_key_attributes(
request,
verify,
cert,
)
except ValueError as e:
raise InvalidURL(e, request=request)
if proxy:
proxy = prepend_scheme_if_needed(proxy, "http")
proxy_url = parse_url(proxy)
if not proxy_url.host:
raise InvalidProxyURL(
"Please check proxy URL. It is malformed "
"and could be missing the host."
)
proxy_manager = self.proxy_manager_for(proxy)
conn = proxy_manager.connection_from_host(
**host_params, pool_kwargs=pool_kwargs
)
else:
# Only scheme should be lower case
conn = self.poolmanager.connection_from_host(
**host_params, pool_kwargs=pool_kwargs
)
return conn
def get_connection(self, url, proxies=None): def get_connection(self, url, proxies=None):
"""Returns a urllib3 connection for the given URL. This should not be """DEPRECATED: Users should move to `get_connection_with_tls_context`
for all subclasses of HTTPAdapter using Requests>=2.32.2.
Returns a urllib3 connection for the given URL. This should not be
called from user code, and is only exposed for use when subclassing the called from user code, and is only exposed for use when subclassing the
:class:`HTTPAdapter <requests.adapters.HTTPAdapter>`. :class:`HTTPAdapter <requests.adapters.HTTPAdapter>`.
@ -337,6 +504,15 @@ class HTTPAdapter(BaseAdapter):
:param proxies: (optional) A Requests-style dictionary of proxies used on this request. :param proxies: (optional) A Requests-style dictionary of proxies used on this request.
:rtype: urllib3.ConnectionPool :rtype: urllib3.ConnectionPool
""" """
warnings.warn(
(
"`get_connection` has been deprecated in favor of "
"`get_connection_with_tls_context`. Custom HTTPAdapter subclasses "
"will need to migrate for Requests>=2.32.2. Please see "
"https://github.com/psf/requests/pull/6710 for more details."
),
DeprecationWarning,
)
proxy = select_proxy(url, proxies) proxy = select_proxy(url, proxies)
if proxy: if proxy:
@ -391,6 +567,9 @@ class HTTPAdapter(BaseAdapter):
using_socks_proxy = proxy_scheme.startswith("socks") using_socks_proxy = proxy_scheme.startswith("socks")
url = request.path_url url = request.path_url
if url.startswith("//"): # Don't confuse urllib3
url = f"/{url.lstrip('/')}"
if is_proxied_http_request and not using_socks_proxy: if is_proxied_http_request and not using_socks_proxy:
url = urldefragauth(request.url) url = urldefragauth(request.url)
@ -451,7 +630,9 @@ class HTTPAdapter(BaseAdapter):
""" """
try: try:
conn = self.get_connection(request.url, proxies) conn = self.get_connection_with_tls_context(
request, verify, proxies=proxies, cert=cert
)
except LocationValueError as e: except LocationValueError as e:
raise InvalidURL(e, request=request) raise InvalidURL(e, request=request)

View file

@ -25,7 +25,7 @@ def request(method, url, **kwargs):
:param cookies: (optional) Dict or CookieJar object to send with the :class:`Request`. :param cookies: (optional) Dict or CookieJar object to send with the :class:`Request`.
:param files: (optional) Dictionary of ``'name': file-like-objects`` (or ``{'name': file-tuple}``) for multipart encoding upload. :param files: (optional) Dictionary of ``'name': file-like-objects`` (or ``{'name': file-tuple}``) for multipart encoding upload.
``file-tuple`` can be a 2-tuple ``('filename', fileobj)``, 3-tuple ``('filename', fileobj, 'content_type')`` ``file-tuple`` can be a 2-tuple ``('filename', fileobj)``, 3-tuple ``('filename', fileobj, 'content_type')``
or a 4-tuple ``('filename', fileobj, 'content_type', custom_headers)``, where ``'content-type'`` is a string or a 4-tuple ``('filename', fileobj, 'content_type', custom_headers)``, where ``'content_type'`` is a string
defining the content type of the given file and ``custom_headers`` a dict-like object containing additional headers defining the content type of the given file and ``custom_headers`` a dict-like object containing additional headers
to add for the file. to add for the file.
:param auth: (optional) Auth tuple to enable Basic/Digest/Custom HTTP Auth. :param auth: (optional) Auth tuple to enable Basic/Digest/Custom HTTP Auth.

View file

@ -258,7 +258,6 @@ class HTTPDigestAuth(AuthBase):
s_auth = r.headers.get("www-authenticate", "") s_auth = r.headers.get("www-authenticate", "")
if "digest" in s_auth.lower() and self._thread_local.num_401_calls < 2: if "digest" in s_auth.lower() and self._thread_local.num_401_calls < 2:
self._thread_local.num_401_calls += 1 self._thread_local.num_401_calls += 1
pat = re.compile(r"digest ", flags=re.IGNORECASE) pat = re.compile(r"digest ", flags=re.IGNORECASE)
self._thread_local.chal = parse_dict_header(pat.sub("", s_auth, count=1)) self._thread_local.chal = parse_dict_header(pat.sub("", s_auth, count=1))

View file

@ -7,13 +7,28 @@ between Python 2 and Python 3. It remains for backwards
compatibility until the next major version. compatibility until the next major version.
""" """
try: import importlib
import chardet
except ImportError:
import charset_normalizer as chardet
import sys import sys
# -------------------
# Character Detection
# -------------------
def _resolve_char_detection():
"""Find supported character detection libraries."""
chardet = None
for lib in ("chardet", "charset_normalizer"):
if chardet is None:
try:
chardet = importlib.import_module(lib)
except ImportError:
pass
return chardet
chardet = _resolve_char_detection()
# ------- # -------
# Pythons # Pythons
# ------- # -------

View file

@ -2,7 +2,7 @@
requests.cookies requests.cookies
~~~~~~~~~~~~~~~~ ~~~~~~~~~~~~~~~~
Compatibility code to be able to use `cookielib.CookieJar` with requests. Compatibility code to be able to use `http.cookiejar.CookieJar` with requests.
requests.utils imports from here, so be careful with imports. requests.utils imports from here, so be careful with imports.
""" """
@ -23,7 +23,7 @@ except ImportError:
class MockRequest: class MockRequest:
"""Wraps a `requests.Request` to mimic a `urllib2.Request`. """Wraps a `requests.Request` to mimic a `urllib2.Request`.
The code in `cookielib.CookieJar` expects this interface in order to correctly The code in `http.cookiejar.CookieJar` expects this interface in order to correctly
manage cookie policies, i.e., determine whether a cookie can be set, given the manage cookie policies, i.e., determine whether a cookie can be set, given the
domains of the request and the cookie. domains of the request and the cookie.
@ -76,7 +76,7 @@ class MockRequest:
return self._r.headers.get(name, self._new_headers.get(name, default)) return self._r.headers.get(name, self._new_headers.get(name, default))
def add_header(self, key, val): def add_header(self, key, val):
"""cookielib has no legitimate use for this method; add it back if you find one.""" """cookiejar has no legitimate use for this method; add it back if you find one."""
raise NotImplementedError( raise NotImplementedError(
"Cookie headers should be added with add_unredirected_header()" "Cookie headers should be added with add_unredirected_header()"
) )
@ -104,11 +104,11 @@ class MockResponse:
"""Wraps a `httplib.HTTPMessage` to mimic a `urllib.addinfourl`. """Wraps a `httplib.HTTPMessage` to mimic a `urllib.addinfourl`.
...what? Basically, expose the parsed HTTP headers from the server response ...what? Basically, expose the parsed HTTP headers from the server response
the way `cookielib` expects to see them. the way `http.cookiejar` expects to see them.
""" """
def __init__(self, headers): def __init__(self, headers):
"""Make a MockResponse for `cookielib` to read. """Make a MockResponse for `cookiejar` to read.
:param headers: a httplib.HTTPMessage or analogous carrying the headers :param headers: a httplib.HTTPMessage or analogous carrying the headers
""" """
@ -124,7 +124,7 @@ class MockResponse:
def extract_cookies_to_jar(jar, request, response): def extract_cookies_to_jar(jar, request, response):
"""Extract the cookies from the response into a CookieJar. """Extract the cookies from the response into a CookieJar.
:param jar: cookielib.CookieJar (not necessarily a RequestsCookieJar) :param jar: http.cookiejar.CookieJar (not necessarily a RequestsCookieJar)
:param request: our own requests.Request object :param request: our own requests.Request object
:param response: urllib3.HTTPResponse object :param response: urllib3.HTTPResponse object
""" """
@ -174,7 +174,7 @@ class CookieConflictError(RuntimeError):
class RequestsCookieJar(cookielib.CookieJar, MutableMapping): class RequestsCookieJar(cookielib.CookieJar, MutableMapping):
"""Compatibility class; is a cookielib.CookieJar, but exposes a dict """Compatibility class; is a http.cookiejar.CookieJar, but exposes a dict
interface. interface.
This is the CookieJar we create by default for requests and sessions that This is the CookieJar we create by default for requests and sessions that
@ -341,7 +341,7 @@ class RequestsCookieJar(cookielib.CookieJar, MutableMapping):
self.set(name, value) self.set(name, value)
def __delitem__(self, name): def __delitem__(self, name):
"""Deletes a cookie given a name. Wraps ``cookielib.CookieJar``'s """Deletes a cookie given a name. Wraps ``http.cookiejar.CookieJar``'s
``remove_cookie_by_name()``. ``remove_cookie_by_name()``.
""" """
remove_cookie_by_name(self, name) remove_cookie_by_name(self, name)

View file

@ -41,6 +41,16 @@ class JSONDecodeError(InvalidJSONError, CompatJSONDecodeError):
CompatJSONDecodeError.__init__(self, *args) CompatJSONDecodeError.__init__(self, *args)
InvalidJSONError.__init__(self, *self.args, **kwargs) InvalidJSONError.__init__(self, *self.args, **kwargs)
def __reduce__(self):
"""
The __reduce__ method called when pickling the object must
be the one from the JSONDecodeError (be it json/simplejson)
as it expects all the arguments for instantiation, not just
one like the IOError, and the MRO would by default call the
__reduce__ method from the IOError due to the inheritance order.
"""
return CompatJSONDecodeError.__reduce__(self)
class HTTPError(RequestException): class HTTPError(RequestException):
"""An HTTP error occurred.""" """An HTTP error occurred."""

View file

@ -170,7 +170,7 @@ class RequestEncodingMixin:
) )
) )
for (k, v) in files: for k, v in files:
# support for explicit filename # support for explicit filename
ft = None ft = None
fh = None fh = None
@ -268,7 +268,6 @@ class Request(RequestHooksMixin):
hooks=None, hooks=None,
json=None, json=None,
): ):
# Default empty dicts for dict params. # Default empty dicts for dict params.
data = [] if data is None else data data = [] if data is None else data
files = [] if files is None else files files = [] if files is None else files
@ -277,7 +276,7 @@ class Request(RequestHooksMixin):
hooks = {} if hooks is None else hooks hooks = {} if hooks is None else hooks
self.hooks = default_hooks() self.hooks = default_hooks()
for (k, v) in list(hooks.items()): for k, v in list(hooks.items()):
self.register_hook(event=k, hook=v) self.register_hook(event=k, hook=v)
self.method = method self.method = method
@ -790,7 +789,12 @@ class Response:
@property @property
def apparent_encoding(self): def apparent_encoding(self):
"""The apparent encoding, provided by the charset_normalizer or chardet libraries.""" """The apparent encoding, provided by the charset_normalizer or chardet libraries."""
if chardet is not None:
return chardet.detect(self.content)["encoding"] return chardet.detect(self.content)["encoding"]
else:
# If no character detection library is available, we'll fall back
# to a standard Python utf-8 str.
return "utf-8"
def iter_content(self, chunk_size=1, decode_unicode=False): def iter_content(self, chunk_size=1, decode_unicode=False):
"""Iterates over the response data. When stream=True is set on the """Iterates over the response data. When stream=True is set on the
@ -865,7 +869,6 @@ class Response:
for chunk in self.iter_content( for chunk in self.iter_content(
chunk_size=chunk_size, decode_unicode=decode_unicode chunk_size=chunk_size, decode_unicode=decode_unicode
): ):
if pending is not None: if pending is not None:
chunk = pending + chunk chunk = pending + chunk

View file

@ -1,13 +1,6 @@
import sys import sys
try: from .compat import chardet
import chardet
except ImportError:
import warnings
import charset_normalizer as chardet
warnings.filterwarnings("ignore", "Trying to detect", module="charset_normalizer")
# This code exists for backwards compatibility reasons. # This code exists for backwards compatibility reasons.
# I don't like it either. Just look the other way. :) # I don't like it either. Just look the other way. :)
@ -20,9 +13,11 @@ for package in ("urllib3", "idna"):
if mod == package or mod.startswith(f"{package}."): if mod == package or mod.startswith(f"{package}."):
sys.modules[f"requests.packages.{mod}"] = sys.modules[mod] sys.modules[f"requests.packages.{mod}"] = sys.modules[mod]
target = chardet.__name__ if chardet is not None:
for mod in list(sys.modules): target = chardet.__name__
for mod in list(sys.modules):
if mod == target or mod.startswith(f"{target}."): if mod == target or mod.startswith(f"{target}."):
target = target.replace(target, "chardet") imported_mod = sys.modules[mod]
sys.modules[f"requests.packages.{target}"] = sys.modules[mod] sys.modules[f"requests.packages.{mod}"] = imported_mod
# Kinda cool, though, right? mod = mod.replace(target, "chardet")
sys.modules[f"requests.packages.{mod}"] = imported_mod

View file

@ -262,7 +262,6 @@ class SessionRedirectMixin:
if yield_requests: if yield_requests:
yield req yield req
else: else:
resp = self.send( resp = self.send(
req, req,
stream=stream, stream=stream,
@ -326,7 +325,7 @@ class SessionRedirectMixin:
# urllib3 handles proxy authorization for us in the standard adapter. # urllib3 handles proxy authorization for us in the standard adapter.
# Avoid appending this to TLS tunneled requests where it may be leaked. # Avoid appending this to TLS tunneled requests where it may be leaked.
if not scheme.startswith('https') and username and password: if not scheme.startswith("https") and username and password:
headers["Proxy-Authorization"] = _basic_auth_str(username, password) headers["Proxy-Authorization"] = _basic_auth_str(username, password)
return new_proxies return new_proxies
@ -389,7 +388,6 @@ class Session(SessionRedirectMixin):
] ]
def __init__(self): def __init__(self):
#: A case-insensitive dictionary of headers to be sent on each #: A case-insensitive dictionary of headers to be sent on each
#: :class:`Request <Request>` sent from this #: :class:`Request <Request>` sent from this
#: :class:`Session <Session>`. #: :class:`Session <Session>`.
@ -545,6 +543,8 @@ class Session(SessionRedirectMixin):
:type allow_redirects: bool :type allow_redirects: bool
:param proxies: (optional) Dictionary mapping protocol or protocol and :param proxies: (optional) Dictionary mapping protocol or protocol and
hostname to the URL of the proxy. hostname to the URL of the proxy.
:param hooks: (optional) Dictionary mapping hook name to one event or
list of events, event must be callable.
:param stream: (optional) whether to immediately download the response :param stream: (optional) whether to immediately download the response
content. Defaults to ``False``. content. Defaults to ``False``.
:param verify: (optional) Either a boolean, in which case it controls whether we verify :param verify: (optional) Either a boolean, in which case it controls whether we verify
@ -711,7 +711,6 @@ class Session(SessionRedirectMixin):
# Persist cookies # Persist cookies
if r.history: if r.history:
# If the hooks create history then we want those cookies too # If the hooks create history then we want those cookies too
for resp in r.history: for resp in r.history:
extract_cookies_to_jar(self.cookies, resp.request, resp.raw) extract_cookies_to_jar(self.cookies, resp.request, resp.raw)
@ -759,7 +758,7 @@ class Session(SessionRedirectMixin):
# Set environment's proxies. # Set environment's proxies.
no_proxy = proxies.get("no_proxy") if proxies is not None else None no_proxy = proxies.get("no_proxy") if proxies is not None else None
env_proxies = get_environ_proxies(url, no_proxy=no_proxy) env_proxies = get_environ_proxies(url, no_proxy=no_proxy)
for (k, v) in env_proxies.items(): for k, v in env_proxies.items():
proxies.setdefault(k, v) proxies.setdefault(k, v)
# Look for requests environment configuration # Look for requests environment configuration
@ -785,8 +784,7 @@ class Session(SessionRedirectMixin):
:rtype: requests.adapters.BaseAdapter :rtype: requests.adapters.BaseAdapter
""" """
for (prefix, adapter) in self.adapters.items(): for prefix, adapter in self.adapters.items():
if url.lower().startswith(prefix.lower()): if url.lower().startswith(prefix.lower()):
return adapter return adapter

View file

@ -24,7 +24,7 @@ _codes = {
# Informational. # Informational.
100: ("continue",), 100: ("continue",),
101: ("switching_protocols",), 101: ("switching_protocols",),
102: ("processing",), 102: ("processing", "early-hints"),
103: ("checkpoint",), 103: ("checkpoint",),
122: ("uri_too_long", "request_uri_too_long"), 122: ("uri_too_long", "request_uri_too_long"),
200: ("ok", "okay", "all_ok", "all_okay", "all_good", "\\o/", ""), 200: ("ok", "okay", "all_ok", "all_okay", "all_good", "\\o/", ""),
@ -65,8 +65,8 @@ _codes = {
410: ("gone",), 410: ("gone",),
411: ("length_required",), 411: ("length_required",),
412: ("precondition_failed", "precondition"), 412: ("precondition_failed", "precondition"),
413: ("request_entity_too_large",), 413: ("request_entity_too_large", "content_too_large"),
414: ("request_uri_too_large",), 414: ("request_uri_too_large", "uri_too_long"),
415: ("unsupported_media_type", "unsupported_media", "media_type"), 415: ("unsupported_media_type", "unsupported_media", "media_type"),
416: ( 416: (
"requested_range_not_satisfiable", "requested_range_not_satisfiable",
@ -76,10 +76,10 @@ _codes = {
417: ("expectation_failed",), 417: ("expectation_failed",),
418: ("im_a_teapot", "teapot", "i_am_a_teapot"), 418: ("im_a_teapot", "teapot", "i_am_a_teapot"),
421: ("misdirected_request",), 421: ("misdirected_request",),
422: ("unprocessable_entity", "unprocessable"), 422: ("unprocessable_entity", "unprocessable", "unprocessable_content"),
423: ("locked",), 423: ("locked",),
424: ("failed_dependency", "dependency"), 424: ("failed_dependency", "dependency"),
425: ("unordered_collection", "unordered"), 425: ("unordered_collection", "unordered", "too_early"),
426: ("upgrade_required", "upgrade"), 426: ("upgrade_required", "upgrade"),
428: ("precondition_required", "precondition"), 428: ("precondition_required", "precondition"),
429: ("too_many_requests", "too_many"), 429: ("too_many_requests", "too_many"),

View file

@ -97,6 +97,8 @@ if sys.platform == "win32":
# '<local>' string by the localhost entry and the corresponding # '<local>' string by the localhost entry and the corresponding
# canonical entry. # canonical entry.
proxyOverride = proxyOverride.split(";") proxyOverride = proxyOverride.split(";")
# filter out empty strings to avoid re.match return true in the following code.
proxyOverride = filter(None, proxyOverride)
# now check if we match one of the registry values. # now check if we match one of the registry values.
for test in proxyOverride: for test in proxyOverride:
if test == "<local>": if test == "<local>":
@ -134,6 +136,9 @@ def super_len(o):
total_length = None total_length = None
current_position = 0 current_position = 0
if isinstance(o, str):
o = o.encode("utf-8")
if hasattr(o, "__len__"): if hasattr(o, "__len__"):
total_length = len(o) total_length = len(o)
@ -466,11 +471,7 @@ def dict_from_cookiejar(cj):
:rtype: dict :rtype: dict
""" """
cookie_dict = {} cookie_dict = {cookie.name: cookie.value for cookie in cj}
for cookie in cj:
cookie_dict[cookie.name] = cookie.value
return cookie_dict return cookie_dict
@ -767,6 +768,7 @@ def should_bypass_proxies(url, no_proxy):
:rtype: bool :rtype: bool
""" """
# Prioritize lowercase environment variables over uppercase # Prioritize lowercase environment variables over uppercase
# to keep a consistent behaviour with other http projects (curl, wget). # to keep a consistent behaviour with other http projects (curl, wget).
def get_proxy(key): def get_proxy(key):
@ -862,7 +864,7 @@ def select_proxy(url, proxies):
def resolve_proxies(request, proxies, trust_env=True): def resolve_proxies(request, proxies, trust_env=True):
"""This method takes proxy information from a request and configuration """This method takes proxy information from a request and configuration
input to resolve a mapping of target proxies. This will consider settings input to resolve a mapping of target proxies. This will consider settings
such a NO_PROXY to strip proxy configurations. such as NO_PROXY to strip proxy configurations.
:param request: Request or PreparedRequest :param request: Request or PreparedRequest
:param proxies: A dictionary of schemes or schemes and hosts to proxy URLs :param proxies: A dictionary of schemes or schemes and hosts to proxy URLs
@ -1054,7 +1056,7 @@ def _validate_header_part(header, header_part, header_validator_index):
if not validator.match(header_part): if not validator.match(header_part):
header_kind = "name" if header_validator_index == 0 else "value" header_kind = "name" if header_validator_index == 0 else "value"
raise InvalidHeader( raise InvalidHeader(
f"Invalid leading whitespace, reserved character(s), or return" f"Invalid leading whitespace, reserved character(s), or return "
f"character(s) in header {header_kind}: {header_part!r}" f"character(s) in header {header_kind}: {header_part!r}"
) )

View file

@ -34,7 +34,7 @@ pyparsing==3.1.2
python-dateutil==2.9.0.post0 python-dateutil==2.9.0.post0
python-twitter==3.5 python-twitter==3.5
pytz==2024.1 pytz==2024.1
requests==2.31.0 requests==2.32.3
requests-oauthlib==2.0.0 requests-oauthlib==2.0.0
rumps==0.4.0; platform_system == "Darwin" rumps==0.4.0; platform_system == "Darwin"
simplejson==3.19.2 simplejson==3.19.2