From 43e71d836a7e7b21cf96acd9d85f2bd02b1d227b Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Wed, 19 Jun 2024 00:01:34 -0700 Subject: [PATCH] Bump requests from 2.31.0 to 2.32.3 (#2338) * Bump requests from 2.31.0 to 2.32.3 Bumps [requests](https://github.com/psf/requests) from 2.31.0 to 2.32.3. - [Release notes](https://github.com/psf/requests/releases) - [Changelog](https://github.com/psf/requests/blob/main/HISTORY.md) - [Commits](https://github.com/psf/requests/compare/v2.31.0...v2.32.3) --- updated-dependencies: - dependency-name: requests dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] * Update requests==2.32.3 --------- Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Co-authored-by: JonnyWong16 <9099342+JonnyWong16@users.noreply.github.com> [skip ci] --- lib/requests/__init__.py | 6 +- lib/requests/__version__.py | 6 +- lib/requests/adapters.py | 225 +++++++++++++++++++++++++++++++---- lib/requests/api.py | 2 +- lib/requests/auth.py | 1 - lib/requests/compat.py | 25 +++- lib/requests/cookies.py | 16 +-- lib/requests/exceptions.py | 10 ++ lib/requests/models.py | 13 +- lib/requests/packages.py | 23 ++-- lib/requests/sessions.py | 12 +- lib/requests/status_codes.py | 10 +- lib/requests/utils.py | 16 +-- requirements.txt | 2 +- 14 files changed, 287 insertions(+), 80 deletions(-) diff --git a/lib/requests/__init__.py b/lib/requests/__init__.py index 300a16c5..051cda13 100644 --- a/lib/requests/__init__.py +++ b/lib/requests/__init__.py @@ -83,7 +83,11 @@ def check_compatibility(urllib3_version, chardet_version, charset_normalizer_ver # charset_normalizer >= 2.0.0 < 4.0.0 assert (2, 0, 0) <= (major, minor, patch) < (4, 0, 0) else: - raise Exception("You need either charset_normalizer or chardet installed") + warnings.warn( + "Unable to find acceptable character detection dependency " + "(chardet or charset_normalizer).", + RequestsDependencyWarning, + ) def _check_cryptography(cryptography_version): diff --git a/lib/requests/__version__.py b/lib/requests/__version__.py index 5063c3f8..2c105aca 100644 --- a/lib/requests/__version__.py +++ b/lib/requests/__version__.py @@ -5,10 +5,10 @@ __title__ = "requests" __description__ = "Python HTTP for Humans." __url__ = "https://requests.readthedocs.io" -__version__ = "2.31.0" -__build__ = 0x023100 +__version__ = "2.32.3" +__build__ = 0x023203 __author__ = "Kenneth Reitz" __author_email__ = "me@kennethreitz.org" -__license__ = "Apache 2.0" +__license__ = "Apache-2.0" __copyright__ = "Copyright Kenneth Reitz" __cake__ = "\u2728 \U0001f370 \u2728" diff --git a/lib/requests/adapters.py b/lib/requests/adapters.py index 78e3bb6e..9a58b160 100644 --- a/lib/requests/adapters.py +++ b/lib/requests/adapters.py @@ -8,6 +8,8 @@ and maintain connections. import os.path import socket # noqa: F401 +import typing +import warnings from urllib3.exceptions import ClosedPoolError, ConnectTimeoutError from urllib3.exceptions import HTTPError as _HTTPError @@ -25,6 +27,7 @@ from urllib3.poolmanager import PoolManager, proxy_from_url from urllib3.util import Timeout as TimeoutSauce from urllib3.util import parse_url from urllib3.util.retry import Retry +from urllib3.util.ssl_ import create_urllib3_context from .auth import _basic_auth_str from .compat import basestring, urlparse @@ -61,12 +64,76 @@ except ImportError: raise InvalidSchema("Missing dependencies for SOCKS support.") +if typing.TYPE_CHECKING: + from .models import PreparedRequest + + DEFAULT_POOLBLOCK = False DEFAULT_POOLSIZE = 10 DEFAULT_RETRIES = 0 DEFAULT_POOL_TIMEOUT = None +try: + import ssl # noqa: F401 + + _preloaded_ssl_context = create_urllib3_context() + _preloaded_ssl_context.load_verify_locations( + extract_zipped_paths(DEFAULT_CA_BUNDLE_PATH) + ) +except ImportError: + # Bypass default SSLContext creation when Python + # interpreter isn't built with the ssl module. + _preloaded_ssl_context = None + + +def _urllib3_request_context( + request: "PreparedRequest", + verify: "bool | str | None", + client_cert: "typing.Tuple[str, str] | str | None", + poolmanager: "PoolManager", +) -> "(typing.Dict[str, typing.Any], typing.Dict[str, typing.Any])": + host_params = {} + pool_kwargs = {} + parsed_request_url = urlparse(request.url) + scheme = parsed_request_url.scheme.lower() + port = parsed_request_url.port + + # Determine if we have and should use our default SSLContext + # to optimize performance on standard requests. + poolmanager_kwargs = getattr(poolmanager, "connection_pool_kw", {}) + has_poolmanager_ssl_context = poolmanager_kwargs.get("ssl_context") + should_use_default_ssl_context = ( + _preloaded_ssl_context is not None and not has_poolmanager_ssl_context + ) + + cert_reqs = "CERT_REQUIRED" + if verify is False: + cert_reqs = "CERT_NONE" + elif verify is True and should_use_default_ssl_context: + pool_kwargs["ssl_context"] = _preloaded_ssl_context + elif isinstance(verify, str): + if not os.path.isdir(verify): + pool_kwargs["ca_certs"] = verify + else: + pool_kwargs["ca_cert_dir"] = verify + pool_kwargs["cert_reqs"] = cert_reqs + if client_cert is not None: + if isinstance(client_cert, tuple) and len(client_cert) == 2: + pool_kwargs["cert_file"] = client_cert[0] + pool_kwargs["key_file"] = client_cert[1] + else: + # According to our docs, we allow users to specify just the client + # cert path + pool_kwargs["cert_file"] = client_cert + host_params = { + "scheme": scheme, + "host": parsed_request_url.hostname, + "port": port, + } + return host_params, pool_kwargs + + class BaseAdapter: """The Base Transport Adapter""" @@ -247,28 +314,26 @@ class HTTPAdapter(BaseAdapter): :param cert: The SSL certificate to verify. """ if url.lower().startswith("https") and verify: - - cert_loc = None - - # Allow self-specified cert location. - if verify is not True: - cert_loc = verify - - if not cert_loc: - cert_loc = extract_zipped_paths(DEFAULT_CA_BUNDLE_PATH) - - if not cert_loc or not os.path.exists(cert_loc): - raise OSError( - f"Could not find a suitable TLS CA certificate bundle, " - f"invalid path: {cert_loc}" - ) - conn.cert_reqs = "CERT_REQUIRED" - if not os.path.isdir(cert_loc): - conn.ca_certs = cert_loc - else: - conn.ca_cert_dir = cert_loc + # Only load the CA certificates if 'verify' is a string indicating the CA bundle to use. + # Otherwise, if verify is a boolean, we don't load anything since + # the connection will be using a context with the default certificates already loaded, + # and this avoids a call to the slow load_verify_locations() + if verify is not True: + # `verify` must be a str with a path then + cert_loc = verify + + if not os.path.exists(cert_loc): + raise OSError( + f"Could not find a suitable TLS CA certificate bundle, " + f"invalid path: {cert_loc}" + ) + + if not os.path.isdir(cert_loc): + conn.ca_certs = cert_loc + else: + conn.ca_cert_dir = cert_loc else: conn.cert_reqs = "CERT_NONE" conn.ca_certs = None @@ -328,8 +393,110 @@ class HTTPAdapter(BaseAdapter): return response + def build_connection_pool_key_attributes(self, request, verify, cert=None): + """Build the PoolKey attributes used by urllib3 to return a connection. + + This looks at the PreparedRequest, the user-specified verify value, + and the value of the cert parameter to determine what PoolKey values + to use to select a connection from a given urllib3 Connection Pool. + + The SSL related pool key arguments are not consistently set. As of + this writing, use the following to determine what keys may be in that + dictionary: + + * If ``verify`` is ``True``, ``"ssl_context"`` will be set and will be the + default Requests SSL Context + * If ``verify`` is ``False``, ``"ssl_context"`` will not be set but + ``"cert_reqs"`` will be set + * If ``verify`` is a string, (i.e., it is a user-specified trust bundle) + ``"ca_certs"`` will be set if the string is not a directory recognized + by :py:func:`os.path.isdir`, otherwise ``"ca_certs_dir"`` will be + set. + * If ``"cert"`` is specified, ``"cert_file"`` will always be set. If + ``"cert"`` is a tuple with a second item, ``"key_file"`` will also + be present + + To override these settings, one may subclass this class, call this + method and use the above logic to change parameters as desired. For + example, if one wishes to use a custom :py:class:`ssl.SSLContext` one + must both set ``"ssl_context"`` and based on what else they require, + alter the other keys to ensure the desired behaviour. + + :param request: + The PreparedReqest being sent over the connection. + :type request: + :class:`~requests.models.PreparedRequest` + :param verify: + Either a boolean, in which case it controls whether + we verify the server's TLS certificate, or a string, in which case it + must be a path to a CA bundle to use. + :param cert: + (optional) Any user-provided SSL certificate for client + authentication (a.k.a., mTLS). This may be a string (i.e., just + the path to a file which holds both certificate and key) or a + tuple of length 2 with the certificate file path and key file + path. + :returns: + A tuple of two dictionaries. The first is the "host parameters" + portion of the Pool Key including scheme, hostname, and port. The + second is a dictionary of SSLContext related parameters. + """ + return _urllib3_request_context(request, verify, cert, self.poolmanager) + + def get_connection_with_tls_context(self, request, verify, proxies=None, cert=None): + """Returns a urllib3 connection for the given request and TLS settings. + This should not be called from user code, and is only exposed for use + when subclassing the :class:`HTTPAdapter `. + + :param request: + The :class:`PreparedRequest ` object to be sent + over the connection. + :param verify: + Either a boolean, in which case it controls whether we verify the + server's TLS certificate, or a string, in which case it must be a + path to a CA bundle to use. + :param proxies: + (optional) The proxies dictionary to apply to the request. + :param cert: + (optional) Any user-provided SSL certificate to be used for client + authentication (a.k.a., mTLS). + :rtype: + urllib3.ConnectionPool + """ + proxy = select_proxy(request.url, proxies) + try: + host_params, pool_kwargs = self.build_connection_pool_key_attributes( + request, + verify, + cert, + ) + except ValueError as e: + raise InvalidURL(e, request=request) + if proxy: + proxy = prepend_scheme_if_needed(proxy, "http") + proxy_url = parse_url(proxy) + if not proxy_url.host: + raise InvalidProxyURL( + "Please check proxy URL. It is malformed " + "and could be missing the host." + ) + proxy_manager = self.proxy_manager_for(proxy) + conn = proxy_manager.connection_from_host( + **host_params, pool_kwargs=pool_kwargs + ) + else: + # Only scheme should be lower case + conn = self.poolmanager.connection_from_host( + **host_params, pool_kwargs=pool_kwargs + ) + + return conn + def get_connection(self, url, proxies=None): - """Returns a urllib3 connection for the given URL. This should not be + """DEPRECATED: Users should move to `get_connection_with_tls_context` + for all subclasses of HTTPAdapter using Requests>=2.32.2. + + Returns a urllib3 connection for the given URL. This should not be called from user code, and is only exposed for use when subclassing the :class:`HTTPAdapter `. @@ -337,6 +504,15 @@ class HTTPAdapter(BaseAdapter): :param proxies: (optional) A Requests-style dictionary of proxies used on this request. :rtype: urllib3.ConnectionPool """ + warnings.warn( + ( + "`get_connection` has been deprecated in favor of " + "`get_connection_with_tls_context`. Custom HTTPAdapter subclasses " + "will need to migrate for Requests>=2.32.2. Please see " + "https://github.com/psf/requests/pull/6710 for more details." + ), + DeprecationWarning, + ) proxy = select_proxy(url, proxies) if proxy: @@ -391,6 +567,9 @@ class HTTPAdapter(BaseAdapter): using_socks_proxy = proxy_scheme.startswith("socks") url = request.path_url + if url.startswith("//"): # Don't confuse urllib3 + url = f"/{url.lstrip('/')}" + if is_proxied_http_request and not using_socks_proxy: url = urldefragauth(request.url) @@ -451,7 +630,9 @@ class HTTPAdapter(BaseAdapter): """ try: - conn = self.get_connection(request.url, proxies) + conn = self.get_connection_with_tls_context( + request, verify, proxies=proxies, cert=cert + ) except LocationValueError as e: raise InvalidURL(e, request=request) diff --git a/lib/requests/api.py b/lib/requests/api.py index cd0b3eea..59607445 100644 --- a/lib/requests/api.py +++ b/lib/requests/api.py @@ -25,7 +25,7 @@ def request(method, url, **kwargs): :param cookies: (optional) Dict or CookieJar object to send with the :class:`Request`. :param files: (optional) Dictionary of ``'name': file-like-objects`` (or ``{'name': file-tuple}``) for multipart encoding upload. ``file-tuple`` can be a 2-tuple ``('filename', fileobj)``, 3-tuple ``('filename', fileobj, 'content_type')`` - or a 4-tuple ``('filename', fileobj, 'content_type', custom_headers)``, where ``'content-type'`` is a string + or a 4-tuple ``('filename', fileobj, 'content_type', custom_headers)``, where ``'content_type'`` is a string defining the content type of the given file and ``custom_headers`` a dict-like object containing additional headers to add for the file. :param auth: (optional) Auth tuple to enable Basic/Digest/Custom HTTP Auth. diff --git a/lib/requests/auth.py b/lib/requests/auth.py index 9733686d..4a7ce6dc 100644 --- a/lib/requests/auth.py +++ b/lib/requests/auth.py @@ -258,7 +258,6 @@ class HTTPDigestAuth(AuthBase): s_auth = r.headers.get("www-authenticate", "") if "digest" in s_auth.lower() and self._thread_local.num_401_calls < 2: - self._thread_local.num_401_calls += 1 pat = re.compile(r"digest ", flags=re.IGNORECASE) self._thread_local.chal = parse_dict_header(pat.sub("", s_auth, count=1)) diff --git a/lib/requests/compat.py b/lib/requests/compat.py index 6776163c..095de1b6 100644 --- a/lib/requests/compat.py +++ b/lib/requests/compat.py @@ -7,13 +7,28 @@ between Python 2 and Python 3. It remains for backwards compatibility until the next major version. """ -try: - import chardet -except ImportError: - import charset_normalizer as chardet - +import importlib import sys +# ------------------- +# Character Detection +# ------------------- + + +def _resolve_char_detection(): + """Find supported character detection libraries.""" + chardet = None + for lib in ("chardet", "charset_normalizer"): + if chardet is None: + try: + chardet = importlib.import_module(lib) + except ImportError: + pass + return chardet + + +chardet = _resolve_char_detection() + # ------- # Pythons # ------- diff --git a/lib/requests/cookies.py b/lib/requests/cookies.py index bf54ab23..f69d0cda 100644 --- a/lib/requests/cookies.py +++ b/lib/requests/cookies.py @@ -2,7 +2,7 @@ requests.cookies ~~~~~~~~~~~~~~~~ -Compatibility code to be able to use `cookielib.CookieJar` with requests. +Compatibility code to be able to use `http.cookiejar.CookieJar` with requests. requests.utils imports from here, so be careful with imports. """ @@ -23,7 +23,7 @@ except ImportError: class MockRequest: """Wraps a `requests.Request` to mimic a `urllib2.Request`. - The code in `cookielib.CookieJar` expects this interface in order to correctly + The code in `http.cookiejar.CookieJar` expects this interface in order to correctly manage cookie policies, i.e., determine whether a cookie can be set, given the domains of the request and the cookie. @@ -76,7 +76,7 @@ class MockRequest: return self._r.headers.get(name, self._new_headers.get(name, default)) def add_header(self, key, val): - """cookielib has no legitimate use for this method; add it back if you find one.""" + """cookiejar has no legitimate use for this method; add it back if you find one.""" raise NotImplementedError( "Cookie headers should be added with add_unredirected_header()" ) @@ -104,11 +104,11 @@ class MockResponse: """Wraps a `httplib.HTTPMessage` to mimic a `urllib.addinfourl`. ...what? Basically, expose the parsed HTTP headers from the server response - the way `cookielib` expects to see them. + the way `http.cookiejar` expects to see them. """ def __init__(self, headers): - """Make a MockResponse for `cookielib` to read. + """Make a MockResponse for `cookiejar` to read. :param headers: a httplib.HTTPMessage or analogous carrying the headers """ @@ -124,7 +124,7 @@ class MockResponse: def extract_cookies_to_jar(jar, request, response): """Extract the cookies from the response into a CookieJar. - :param jar: cookielib.CookieJar (not necessarily a RequestsCookieJar) + :param jar: http.cookiejar.CookieJar (not necessarily a RequestsCookieJar) :param request: our own requests.Request object :param response: urllib3.HTTPResponse object """ @@ -174,7 +174,7 @@ class CookieConflictError(RuntimeError): class RequestsCookieJar(cookielib.CookieJar, MutableMapping): - """Compatibility class; is a cookielib.CookieJar, but exposes a dict + """Compatibility class; is a http.cookiejar.CookieJar, but exposes a dict interface. This is the CookieJar we create by default for requests and sessions that @@ -341,7 +341,7 @@ class RequestsCookieJar(cookielib.CookieJar, MutableMapping): self.set(name, value) def __delitem__(self, name): - """Deletes a cookie given a name. Wraps ``cookielib.CookieJar``'s + """Deletes a cookie given a name. Wraps ``http.cookiejar.CookieJar``'s ``remove_cookie_by_name()``. """ remove_cookie_by_name(self, name) diff --git a/lib/requests/exceptions.py b/lib/requests/exceptions.py index e1cedf88..83986b48 100644 --- a/lib/requests/exceptions.py +++ b/lib/requests/exceptions.py @@ -41,6 +41,16 @@ class JSONDecodeError(InvalidJSONError, CompatJSONDecodeError): CompatJSONDecodeError.__init__(self, *args) InvalidJSONError.__init__(self, *self.args, **kwargs) + def __reduce__(self): + """ + The __reduce__ method called when pickling the object must + be the one from the JSONDecodeError (be it json/simplejson) + as it expects all the arguments for instantiation, not just + one like the IOError, and the MRO would by default call the + __reduce__ method from the IOError due to the inheritance order. + """ + return CompatJSONDecodeError.__reduce__(self) + class HTTPError(RequestException): """An HTTP error occurred.""" diff --git a/lib/requests/models.py b/lib/requests/models.py index 617a4134..8f56ca7d 100644 --- a/lib/requests/models.py +++ b/lib/requests/models.py @@ -170,7 +170,7 @@ class RequestEncodingMixin: ) ) - for (k, v) in files: + for k, v in files: # support for explicit filename ft = None fh = None @@ -268,7 +268,6 @@ class Request(RequestHooksMixin): hooks=None, json=None, ): - # Default empty dicts for dict params. data = [] if data is None else data files = [] if files is None else files @@ -277,7 +276,7 @@ class Request(RequestHooksMixin): hooks = {} if hooks is None else hooks self.hooks = default_hooks() - for (k, v) in list(hooks.items()): + for k, v in list(hooks.items()): self.register_hook(event=k, hook=v) self.method = method @@ -790,7 +789,12 @@ class Response: @property def apparent_encoding(self): """The apparent encoding, provided by the charset_normalizer or chardet libraries.""" - return chardet.detect(self.content)["encoding"] + if chardet is not None: + return chardet.detect(self.content)["encoding"] + else: + # If no character detection library is available, we'll fall back + # to a standard Python utf-8 str. + return "utf-8" def iter_content(self, chunk_size=1, decode_unicode=False): """Iterates over the response data. When stream=True is set on the @@ -865,7 +869,6 @@ class Response: for chunk in self.iter_content( chunk_size=chunk_size, decode_unicode=decode_unicode ): - if pending is not None: chunk = pending + chunk diff --git a/lib/requests/packages.py b/lib/requests/packages.py index 77c45c9e..5ab3d8e2 100644 --- a/lib/requests/packages.py +++ b/lib/requests/packages.py @@ -1,13 +1,6 @@ import sys -try: - import chardet -except ImportError: - import warnings - - import charset_normalizer as chardet - - warnings.filterwarnings("ignore", "Trying to detect", module="charset_normalizer") +from .compat import chardet # This code exists for backwards compatibility reasons. # I don't like it either. Just look the other way. :) @@ -20,9 +13,11 @@ for package in ("urllib3", "idna"): if mod == package or mod.startswith(f"{package}."): sys.modules[f"requests.packages.{mod}"] = sys.modules[mod] -target = chardet.__name__ -for mod in list(sys.modules): - if mod == target or mod.startswith(f"{target}."): - target = target.replace(target, "chardet") - sys.modules[f"requests.packages.{target}"] = sys.modules[mod] -# Kinda cool, though, right? +if chardet is not None: + target = chardet.__name__ + for mod in list(sys.modules): + if mod == target or mod.startswith(f"{target}."): + imported_mod = sys.modules[mod] + sys.modules[f"requests.packages.{mod}"] = imported_mod + mod = mod.replace(target, "chardet") + sys.modules[f"requests.packages.{mod}"] = imported_mod diff --git a/lib/requests/sessions.py b/lib/requests/sessions.py index dbcf2a7b..b387bc36 100644 --- a/lib/requests/sessions.py +++ b/lib/requests/sessions.py @@ -262,7 +262,6 @@ class SessionRedirectMixin: if yield_requests: yield req else: - resp = self.send( req, stream=stream, @@ -326,7 +325,7 @@ class SessionRedirectMixin: # urllib3 handles proxy authorization for us in the standard adapter. # Avoid appending this to TLS tunneled requests where it may be leaked. - if not scheme.startswith('https') and username and password: + if not scheme.startswith("https") and username and password: headers["Proxy-Authorization"] = _basic_auth_str(username, password) return new_proxies @@ -389,7 +388,6 @@ class Session(SessionRedirectMixin): ] def __init__(self): - #: A case-insensitive dictionary of headers to be sent on each #: :class:`Request ` sent from this #: :class:`Session `. @@ -545,6 +543,8 @@ class Session(SessionRedirectMixin): :type allow_redirects: bool :param proxies: (optional) Dictionary mapping protocol or protocol and hostname to the URL of the proxy. + :param hooks: (optional) Dictionary mapping hook name to one event or + list of events, event must be callable. :param stream: (optional) whether to immediately download the response content. Defaults to ``False``. :param verify: (optional) Either a boolean, in which case it controls whether we verify @@ -711,7 +711,6 @@ class Session(SessionRedirectMixin): # Persist cookies if r.history: - # If the hooks create history then we want those cookies too for resp in r.history: extract_cookies_to_jar(self.cookies, resp.request, resp.raw) @@ -759,7 +758,7 @@ class Session(SessionRedirectMixin): # Set environment's proxies. no_proxy = proxies.get("no_proxy") if proxies is not None else None env_proxies = get_environ_proxies(url, no_proxy=no_proxy) - for (k, v) in env_proxies.items(): + for k, v in env_proxies.items(): proxies.setdefault(k, v) # Look for requests environment configuration @@ -785,8 +784,7 @@ class Session(SessionRedirectMixin): :rtype: requests.adapters.BaseAdapter """ - for (prefix, adapter) in self.adapters.items(): - + for prefix, adapter in self.adapters.items(): if url.lower().startswith(prefix.lower()): return adapter diff --git a/lib/requests/status_codes.py b/lib/requests/status_codes.py index 4bd072be..c7945a2f 100644 --- a/lib/requests/status_codes.py +++ b/lib/requests/status_codes.py @@ -24,7 +24,7 @@ _codes = { # Informational. 100: ("continue",), 101: ("switching_protocols",), - 102: ("processing",), + 102: ("processing", "early-hints"), 103: ("checkpoint",), 122: ("uri_too_long", "request_uri_too_long"), 200: ("ok", "okay", "all_ok", "all_okay", "all_good", "\\o/", "✓"), @@ -65,8 +65,8 @@ _codes = { 410: ("gone",), 411: ("length_required",), 412: ("precondition_failed", "precondition"), - 413: ("request_entity_too_large",), - 414: ("request_uri_too_large",), + 413: ("request_entity_too_large", "content_too_large"), + 414: ("request_uri_too_large", "uri_too_long"), 415: ("unsupported_media_type", "unsupported_media", "media_type"), 416: ( "requested_range_not_satisfiable", @@ -76,10 +76,10 @@ _codes = { 417: ("expectation_failed",), 418: ("im_a_teapot", "teapot", "i_am_a_teapot"), 421: ("misdirected_request",), - 422: ("unprocessable_entity", "unprocessable"), + 422: ("unprocessable_entity", "unprocessable", "unprocessable_content"), 423: ("locked",), 424: ("failed_dependency", "dependency"), - 425: ("unordered_collection", "unordered"), + 425: ("unordered_collection", "unordered", "too_early"), 426: ("upgrade_required", "upgrade"), 428: ("precondition_required", "precondition"), 429: ("too_many_requests", "too_many"), diff --git a/lib/requests/utils.py b/lib/requests/utils.py index a367417f..ae6c42f6 100644 --- a/lib/requests/utils.py +++ b/lib/requests/utils.py @@ -97,6 +97,8 @@ if sys.platform == "win32": # '' string by the localhost entry and the corresponding # canonical entry. proxyOverride = proxyOverride.split(";") + # filter out empty strings to avoid re.match return true in the following code. + proxyOverride = filter(None, proxyOverride) # now check if we match one of the registry values. for test in proxyOverride: if test == "": @@ -134,6 +136,9 @@ def super_len(o): total_length = None current_position = 0 + if isinstance(o, str): + o = o.encode("utf-8") + if hasattr(o, "__len__"): total_length = len(o) @@ -466,11 +471,7 @@ def dict_from_cookiejar(cj): :rtype: dict """ - cookie_dict = {} - - for cookie in cj: - cookie_dict[cookie.name] = cookie.value - + cookie_dict = {cookie.name: cookie.value for cookie in cj} return cookie_dict @@ -767,6 +768,7 @@ def should_bypass_proxies(url, no_proxy): :rtype: bool """ + # Prioritize lowercase environment variables over uppercase # to keep a consistent behaviour with other http projects (curl, wget). def get_proxy(key): @@ -862,7 +864,7 @@ def select_proxy(url, proxies): def resolve_proxies(request, proxies, trust_env=True): """This method takes proxy information from a request and configuration input to resolve a mapping of target proxies. This will consider settings - such a NO_PROXY to strip proxy configurations. + such as NO_PROXY to strip proxy configurations. :param request: Request or PreparedRequest :param proxies: A dictionary of schemes or schemes and hosts to proxy URLs @@ -1054,7 +1056,7 @@ def _validate_header_part(header, header_part, header_validator_index): if not validator.match(header_part): header_kind = "name" if header_validator_index == 0 else "value" raise InvalidHeader( - f"Invalid leading whitespace, reserved character(s), or return" + f"Invalid leading whitespace, reserved character(s), or return " f"character(s) in header {header_kind}: {header_part!r}" ) diff --git a/requirements.txt b/requirements.txt index 6edced4e..5e3825d1 100644 --- a/requirements.txt +++ b/requirements.txt @@ -34,7 +34,7 @@ pyparsing==3.1.2 python-dateutil==2.9.0.post0 python-twitter==3.5 pytz==2024.1 -requests==2.31.0 +requests==2.32.3 requests-oauthlib==2.0.0 rumps==0.4.0; platform_system == "Darwin" simplejson==3.19.2