Bump requests from 2.27.1 to 2.28.1 (#1781)

* Bump requests from 2.27.1 to 2.28.1

Bumps [requests](https://github.com/psf/requests) from 2.27.1 to 2.28.1.
- [Release notes](https://github.com/psf/requests/releases)
- [Changelog](https://github.com/psf/requests/blob/main/HISTORY.md)
- [Commits](https://github.com/psf/requests/compare/v2.27.1...v2.28.1)

---
updated-dependencies:
- dependency-name: requests
  dependency-type: direct:production
  update-type: version-update:semver-minor
...

Signed-off-by: dependabot[bot] <support@github.com>

* Update requests==2.28.1

* Update urllib3==1.26.12

* Update certifi==2022.9.24

* Update idna==3.4

* Update charset-normalizer==2.1.1

Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
Co-authored-by: JonnyWong16 <9099342+JonnyWong16@users.noreply.github.com>

[skip ci]
This commit is contained in:
dependabot[bot] 2022-11-12 17:12:19 -08:00 committed by GitHub
parent baa0e08c2a
commit af1aed0b6b
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
46 changed files with 3295 additions and 2709 deletions

View file

@ -1,5 +1,3 @@
# -*- coding: utf-8 -*-
"""
requests.models
~~~~~~~~~~~~~~~
@ -8,48 +6,72 @@ This module contains the primary objects that power Requests.
"""
import datetime
import sys
# Import encoding now, to avoid implicit import later.
# Implicit import within threads may cause LookupError when standard library is in a ZIP,
# such as in Embedded Python. See https://github.com/psf/requests/issues/3578.
import encodings.idna
import encodings.idna # noqa: F401
from io import UnsupportedOperation
from urllib3.exceptions import (
DecodeError,
LocationParseError,
ProtocolError,
ReadTimeoutError,
SSLError,
)
from urllib3.fields import RequestField
from urllib3.filepost import encode_multipart_formdata
from urllib3.util import parse_url
from urllib3.exceptions import (
DecodeError, ReadTimeoutError, ProtocolError, LocationParseError)
from io import UnsupportedOperation
from .hooks import default_hooks
from .structures import CaseInsensitiveDict
from .auth import HTTPBasicAuth
from .cookies import cookiejar_from_dict, get_cookie_header, _copy_cookie_jar
from .exceptions import (
HTTPError, MissingSchema, InvalidURL, ChunkedEncodingError,
ContentDecodingError, ConnectionError, StreamConsumedError,
InvalidJSONError)
from .exceptions import JSONDecodeError as RequestsJSONDecodeError
from ._internal_utils import to_native_string, unicode_is_ascii
from .utils import (
guess_filename, get_auth_from_url, requote_uri,
stream_decode_response_unicode, to_key_val_list, parse_header_links,
iter_slices, guess_json_utf, super_len, check_header_validity)
from .auth import HTTPBasicAuth
from .compat import (
Callable, Mapping,
cookielib, urlunparse, urlsplit, urlencode, str, bytes,
is_py2, chardet, builtin_str, basestring, JSONDecodeError)
Callable,
JSONDecodeError,
Mapping,
basestring,
builtin_str,
chardet,
cookielib,
)
from .compat import json as complexjson
from .compat import urlencode, urlsplit, urlunparse
from .cookies import _copy_cookie_jar, cookiejar_from_dict, get_cookie_header
from .exceptions import (
ChunkedEncodingError,
ConnectionError,
ContentDecodingError,
HTTPError,
InvalidJSONError,
InvalidURL,
)
from .exceptions import JSONDecodeError as RequestsJSONDecodeError
from .exceptions import MissingSchema
from .exceptions import SSLError as RequestsSSLError
from .exceptions import StreamConsumedError
from .hooks import default_hooks
from .status_codes import codes
from .structures import CaseInsensitiveDict
from .utils import (
check_header_validity,
get_auth_from_url,
guess_filename,
guess_json_utf,
iter_slices,
parse_header_links,
requote_uri,
stream_decode_response_unicode,
super_len,
to_key_val_list,
)
#: The set of HTTP status codes that indicate an automatically
#: processable redirect.
REDIRECT_STATI = (
codes.moved, # 301
codes.found, # 302
codes.other, # 303
codes.moved, # 301
codes.found, # 302
codes.other, # 303
codes.temporary_redirect, # 307
codes.permanent_redirect, # 308
)
@ -59,7 +81,7 @@ CONTENT_CHUNK_SIZE = 10 * 1024
ITER_CHUNK_SIZE = 512
class RequestEncodingMixin(object):
class RequestEncodingMixin:
@property
def path_url(self):
"""Build the path URL to use."""
@ -70,16 +92,16 @@ class RequestEncodingMixin(object):
path = p.path
if not path:
path = '/'
path = "/"
url.append(path)
query = p.query
if query:
url.append('?')
url.append("?")
url.append(query)
return ''.join(url)
return "".join(url)
@staticmethod
def _encode_params(data):
@ -92,18 +114,21 @@ class RequestEncodingMixin(object):
if isinstance(data, (str, bytes)):
return data
elif hasattr(data, 'read'):
elif hasattr(data, "read"):
return data
elif hasattr(data, '__iter__'):
elif hasattr(data, "__iter__"):
result = []
for k, vs in to_key_val_list(data):
if isinstance(vs, basestring) or not hasattr(vs, '__iter__'):
if isinstance(vs, basestring) or not hasattr(vs, "__iter__"):
vs = [vs]
for v in vs:
if v is not None:
result.append(
(k.encode('utf-8') if isinstance(k, str) else k,
v.encode('utf-8') if isinstance(v, str) else v))
(
k.encode("utf-8") if isinstance(k, str) else k,
v.encode("utf-8") if isinstance(v, str) else v,
)
)
return urlencode(result, doseq=True)
else:
return data
@ -118,7 +143,7 @@ class RequestEncodingMixin(object):
The tuples may be 2-tuples (filename, fileobj), 3-tuples (filename, fileobj, contentype)
or 4-tuples (filename, fileobj, contentype, custom_headers).
"""
if (not files):
if not files:
raise ValueError("Files must be provided.")
elif isinstance(data, basestring):
raise ValueError("Data must not be a string.")
@ -128,7 +153,7 @@ class RequestEncodingMixin(object):
files = to_key_val_list(files or {})
for field, val in fields:
if isinstance(val, basestring) or not hasattr(val, '__iter__'):
if isinstance(val, basestring) or not hasattr(val, "__iter__"):
val = [val]
for v in val:
if v is not None:
@ -137,8 +162,13 @@ class RequestEncodingMixin(object):
v = str(v)
new_fields.append(
(field.decode('utf-8') if isinstance(field, bytes) else field,
v.encode('utf-8') if isinstance(v, str) else v))
(
field.decode("utf-8")
if isinstance(field, bytes)
else field,
v.encode("utf-8") if isinstance(v, str) else v,
)
)
for (k, v) in files:
# support for explicit filename
@ -157,7 +187,7 @@ class RequestEncodingMixin(object):
if isinstance(fp, (str, bytes, bytearray)):
fdata = fp
elif hasattr(fp, 'read'):
elif hasattr(fp, "read"):
fdata = fp.read()
elif fp is None:
continue
@ -173,16 +203,16 @@ class RequestEncodingMixin(object):
return body, content_type
class RequestHooksMixin(object):
class RequestHooksMixin:
def register_hook(self, event, hook):
"""Properly register a hook."""
if event not in self.hooks:
raise ValueError('Unsupported event specified, with event name "%s"' % (event))
raise ValueError(f'Unsupported event specified, with event name "{event}"')
if isinstance(hook, Callable):
self.hooks[event].append(hook)
elif hasattr(hook, '__iter__'):
elif hasattr(hook, "__iter__"):
self.hooks[event].extend(h for h in hook if isinstance(h, Callable))
def deregister_hook(self, event, hook):
@ -225,9 +255,19 @@ class Request(RequestHooksMixin):
<PreparedRequest [GET]>
"""
def __init__(self,
method=None, url=None, headers=None, files=None, data=None,
params=None, auth=None, cookies=None, hooks=None, json=None):
def __init__(
self,
method=None,
url=None,
headers=None,
files=None,
data=None,
params=None,
auth=None,
cookies=None,
hooks=None,
json=None,
):
# Default empty dicts for dict params.
data = [] if data is None else data
@ -251,7 +291,7 @@ class Request(RequestHooksMixin):
self.cookies = cookies
def __repr__(self):
return '<Request [%s]>' % (self.method)
return f"<Request [{self.method}]>"
def prepare(self):
"""Constructs a :class:`PreparedRequest <PreparedRequest>` for transmission and returns it."""
@ -309,9 +349,19 @@ class PreparedRequest(RequestEncodingMixin, RequestHooksMixin):
#: integer denoting starting position of a readable file-like body.
self._body_position = None
def prepare(self,
method=None, url=None, headers=None, files=None, data=None,
params=None, auth=None, cookies=None, hooks=None, json=None):
def prepare(
self,
method=None,
url=None,
headers=None,
files=None,
data=None,
params=None,
auth=None,
cookies=None,
hooks=None,
json=None,
):
"""Prepares the entire request with the given parameters."""
self.prepare_method(method)
@ -328,7 +378,7 @@ class PreparedRequest(RequestEncodingMixin, RequestHooksMixin):
self.prepare_hooks(hooks)
def __repr__(self):
return '<PreparedRequest [%s]>' % (self.method)
return f"<PreparedRequest [{self.method}]>"
def copy(self):
p = PreparedRequest()
@ -352,7 +402,7 @@ class PreparedRequest(RequestEncodingMixin, RequestHooksMixin):
import idna
try:
host = idna.encode(host, uts46=True).decode('utf-8')
host = idna.encode(host, uts46=True).decode("utf-8")
except idna.IDNAError:
raise UnicodeError
return host
@ -365,9 +415,9 @@ class PreparedRequest(RequestEncodingMixin, RequestHooksMixin):
#: on python 3.x.
#: https://github.com/psf/requests/pull/2238
if isinstance(url, bytes):
url = url.decode('utf8')
url = url.decode("utf8")
else:
url = unicode(url) if is_py2 else str(url)
url = str(url)
# Remove leading whitespaces from url
url = url.lstrip()
@ -375,7 +425,7 @@ class PreparedRequest(RequestEncodingMixin, RequestHooksMixin):
# Don't do any URL preparation for non-HTTP schemes like `mailto`,
# `data` etc to work around exceptions from `url_parse`, which
# handles RFC 3986 only.
if ':' in url and not url.lower().startswith('http'):
if ":" in url and not url.lower().startswith("http"):
self.url = url
return
@ -386,13 +436,13 @@ class PreparedRequest(RequestEncodingMixin, RequestHooksMixin):
raise InvalidURL(*e.args)
if not scheme:
error = ("Invalid URL {0!r}: No scheme supplied. Perhaps you meant http://{0}?")
error = error.format(to_native_string(url, 'utf8'))
raise MissingSchema(error)
raise MissingSchema(
f"Invalid URL {url!r}: No scheme supplied. "
f"Perhaps you meant http://{url}?"
)
if not host:
raise InvalidURL("Invalid URL %r: No host supplied" % url)
raise InvalidURL(f"Invalid URL {url!r}: No host supplied")
# In general, we want to try IDNA encoding the hostname if the string contains
# non-ASCII characters. This allows users to automatically get the correct IDNA
@ -402,33 +452,21 @@ class PreparedRequest(RequestEncodingMixin, RequestHooksMixin):
try:
host = self._get_idna_encoded_host(host)
except UnicodeError:
raise InvalidURL('URL has an invalid label.')
elif host.startswith((u'*', u'.')):
raise InvalidURL('URL has an invalid label.')
raise InvalidURL("URL has an invalid label.")
elif host.startswith(("*", ".")):
raise InvalidURL("URL has an invalid label.")
# Carefully reconstruct the network location
netloc = auth or ''
netloc = auth or ""
if netloc:
netloc += '@'
netloc += "@"
netloc += host
if port:
netloc += ':' + str(port)
netloc += f":{port}"
# Bare domains aren't valid URLs.
if not path:
path = '/'
if is_py2:
if isinstance(scheme, str):
scheme = scheme.encode('utf-8')
if isinstance(netloc, str):
netloc = netloc.encode('utf-8')
if isinstance(path, str):
path = path.encode('utf-8')
if isinstance(query, str):
query = query.encode('utf-8')
if isinstance(fragment, str):
fragment = fragment.encode('utf-8')
path = "/"
if isinstance(params, (str, bytes)):
params = to_native_string(params)
@ -436,7 +474,7 @@ class PreparedRequest(RequestEncodingMixin, RequestHooksMixin):
enc_params = self._encode_params(params)
if enc_params:
if query:
query = '%s&%s' % (query, enc_params)
query = f"{query}&{enc_params}"
else:
query = enc_params
@ -467,7 +505,7 @@ class PreparedRequest(RequestEncodingMixin, RequestHooksMixin):
if not data and json is not None:
# urllib3 requires a bytes-like body. Python 2's json.dumps
# provides this natively, but Python 3 gives a Unicode string.
content_type = 'application/json'
content_type = "application/json"
try:
body = complexjson.dumps(json, allow_nan=False)
@ -475,12 +513,14 @@ class PreparedRequest(RequestEncodingMixin, RequestHooksMixin):
raise InvalidJSONError(ve, request=self)
if not isinstance(body, bytes):
body = body.encode('utf-8')
body = body.encode("utf-8")
is_stream = all([
hasattr(data, '__iter__'),
not isinstance(data, (basestring, list, tuple, Mapping))
])
is_stream = all(
[
hasattr(data, "__iter__"),
not isinstance(data, (basestring, list, tuple, Mapping)),
]
)
if is_stream:
try:
@ -490,24 +530,26 @@ class PreparedRequest(RequestEncodingMixin, RequestHooksMixin):
body = data
if getattr(body, 'tell', None) is not None:
if getattr(body, "tell", None) is not None:
# Record the current file position before reading.
# This will allow us to rewind a file in the event
# of a redirect.
try:
self._body_position = body.tell()
except (IOError, OSError):
except OSError:
# This differentiates from None, allowing us to catch
# a failed `tell()` later when trying to rewind the body
self._body_position = object()
if files:
raise NotImplementedError('Streamed bodies and files are mutually exclusive.')
raise NotImplementedError(
"Streamed bodies and files are mutually exclusive."
)
if length:
self.headers['Content-Length'] = builtin_str(length)
self.headers["Content-Length"] = builtin_str(length)
else:
self.headers['Transfer-Encoding'] = 'chunked'
self.headers["Transfer-Encoding"] = "chunked"
else:
# Multi-part file uploads.
if files:
@ -515,16 +557,16 @@ class PreparedRequest(RequestEncodingMixin, RequestHooksMixin):
else:
if data:
body = self._encode_params(data)
if isinstance(data, basestring) or hasattr(data, 'read'):
if isinstance(data, basestring) or hasattr(data, "read"):
content_type = None
else:
content_type = 'application/x-www-form-urlencoded'
content_type = "application/x-www-form-urlencoded"
self.prepare_content_length(body)
# Add content-type if it wasn't explicitly provided.
if content_type and ('content-type' not in self.headers):
self.headers['Content-Type'] = content_type
if content_type and ("content-type" not in self.headers):
self.headers["Content-Type"] = content_type
self.body = body
@ -535,13 +577,16 @@ class PreparedRequest(RequestEncodingMixin, RequestHooksMixin):
if length:
# If length exists, set it. Otherwise, we fallback
# to Transfer-Encoding: chunked.
self.headers['Content-Length'] = builtin_str(length)
elif self.method not in ('GET', 'HEAD') and self.headers.get('Content-Length') is None:
self.headers["Content-Length"] = builtin_str(length)
elif (
self.method not in ("GET", "HEAD")
and self.headers.get("Content-Length") is None
):
# Set Content-Length to 0 for methods that can have a body
# but don't provide one. (i.e. not GET or HEAD)
self.headers['Content-Length'] = '0'
self.headers["Content-Length"] = "0"
def prepare_auth(self, auth, url=''):
def prepare_auth(self, auth, url=""):
"""Prepares the given HTTP auth data."""
# If no Auth is explicitly provided, extract it from the URL first.
@ -581,7 +626,7 @@ class PreparedRequest(RequestEncodingMixin, RequestHooksMixin):
cookie_header = get_cookie_header(self._cookies, self)
if cookie_header is not None:
self.headers['Cookie'] = cookie_header
self.headers["Cookie"] = cookie_header
def prepare_hooks(self, hooks):
"""Prepares the given hooks."""
@ -593,14 +638,22 @@ class PreparedRequest(RequestEncodingMixin, RequestHooksMixin):
self.register_hook(event, hooks[event])
class Response(object):
class Response:
"""The :class:`Response <Response>` object, which contains a
server's response to an HTTP request.
"""
__attrs__ = [
'_content', 'status_code', 'headers', 'url', 'history',
'encoding', 'reason', 'cookies', 'elapsed', 'request'
"_content",
"status_code",
"headers",
"url",
"history",
"encoding",
"reason",
"cookies",
"elapsed",
"request",
]
def __init__(self):
@ -669,11 +722,11 @@ class Response(object):
setattr(self, name, value)
# pickled objects do not have .raw
setattr(self, '_content_consumed', True)
setattr(self, 'raw', None)
setattr(self, "_content_consumed", True)
setattr(self, "raw", None)
def __repr__(self):
return '<Response [%s]>' % (self.status_code)
return f"<Response [{self.status_code}]>"
def __bool__(self):
"""Returns True if :attr:`status_code` is less than 400.
@ -719,12 +772,15 @@ class Response(object):
"""True if this Response is a well-formed HTTP redirect that could have
been processed automatically (by :meth:`Session.resolve_redirects`).
"""
return ('location' in self.headers and self.status_code in REDIRECT_STATI)
return "location" in self.headers and self.status_code in REDIRECT_STATI
@property
def is_permanent_redirect(self):
"""True if this Response one of the permanent versions of redirect."""
return ('location' in self.headers and self.status_code in (codes.moved_permanently, codes.permanent_redirect))
return "location" in self.headers and self.status_code in (
codes.moved_permanently,
codes.permanent_redirect,
)
@property
def next(self):
@ -734,7 +790,7 @@ class Response(object):
@property
def apparent_encoding(self):
"""The apparent encoding, provided by the charset_normalizer or chardet libraries."""
return chardet.detect(self.content)['encoding']
return chardet.detect(self.content)["encoding"]
def iter_content(self, chunk_size=1, decode_unicode=False):
"""Iterates over the response data. When stream=True is set on the
@ -755,16 +811,17 @@ class Response(object):
def generate():
# Special case for urllib3.
if hasattr(self.raw, 'stream'):
if hasattr(self.raw, "stream"):
try:
for chunk in self.raw.stream(chunk_size, decode_content=True):
yield chunk
yield from self.raw.stream(chunk_size, decode_content=True)
except ProtocolError as e:
raise ChunkedEncodingError(e)
except DecodeError as e:
raise ContentDecodingError(e)
except ReadTimeoutError as e:
raise ConnectionError(e)
except SSLError as e:
raise RequestsSSLError(e)
else:
# Standard file-like object.
while True:
@ -778,7 +835,9 @@ class Response(object):
if self._content_consumed and isinstance(self._content, bool):
raise StreamConsumedError()
elif chunk_size is not None and not isinstance(chunk_size, int):
raise TypeError("chunk_size must be an int, it is instead a %s." % type(chunk_size))
raise TypeError(
f"chunk_size must be an int, it is instead a {type(chunk_size)}."
)
# simulate reading small chunks of the content
reused_chunks = iter_slices(self._content, chunk_size)
@ -791,7 +850,9 @@ class Response(object):
return chunks
def iter_lines(self, chunk_size=ITER_CHUNK_SIZE, decode_unicode=False, delimiter=None):
def iter_lines(
self, chunk_size=ITER_CHUNK_SIZE, decode_unicode=False, delimiter=None
):
"""Iterates over the response data, one line at a time. When
stream=True is set on the request, this avoids reading the
content at once into memory for large responses.
@ -801,7 +862,9 @@ class Response(object):
pending = None
for chunk in self.iter_content(chunk_size=chunk_size, decode_unicode=decode_unicode):
for chunk in self.iter_content(
chunk_size=chunk_size, decode_unicode=decode_unicode
):
if pending is not None:
chunk = pending + chunk
@ -816,8 +879,7 @@ class Response(object):
else:
pending = None
for line in lines:
yield line
yield from lines
if pending is not None:
yield pending
@ -829,13 +891,12 @@ class Response(object):
if self._content is False:
# Read the contents.
if self._content_consumed:
raise RuntimeError(
'The content for this response was already consumed')
raise RuntimeError("The content for this response was already consumed")
if self.status_code == 0 or self.raw is None:
self._content = None
else:
self._content = b''.join(self.iter_content(CONTENT_CHUNK_SIZE)) or b''
self._content = b"".join(self.iter_content(CONTENT_CHUNK_SIZE)) or b""
self._content_consumed = True
# don't need to release the connection; that's been handled by urllib3
@ -860,7 +921,7 @@ class Response(object):
encoding = self.encoding
if not self.content:
return str('')
return ""
# Fallback to auto-detected encoding.
if self.encoding is None:
@ -868,7 +929,7 @@ class Response(object):
# Decode unicode from given encoding.
try:
content = str(self.content, encoding, errors='replace')
content = str(self.content, encoding, errors="replace")
except (LookupError, TypeError):
# A LookupError is raised if the encoding was not found which could
# indicate a misspelling or similar mistake.
@ -876,7 +937,7 @@ class Response(object):
# A TypeError can be raised if encoding is None
#
# So we try blindly encoding.
content = str(self.content, errors='replace')
content = str(self.content, errors="replace")
return content
@ -896,65 +957,65 @@ class Response(object):
encoding = guess_json_utf(self.content)
if encoding is not None:
try:
return complexjson.loads(
self.content.decode(encoding), **kwargs
)
return complexjson.loads(self.content.decode(encoding), **kwargs)
except UnicodeDecodeError:
# Wrong UTF codec detected; usually because it's not UTF-8
# but some other 8-bit codec. This is an RFC violation,
# and the server didn't bother to tell us what codec *was*
# used.
pass
except JSONDecodeError as e:
raise RequestsJSONDecodeError(e.msg, e.doc, e.pos)
try:
return complexjson.loads(self.text, **kwargs)
except JSONDecodeError as e:
# Catch JSON-related errors and raise as requests.JSONDecodeError
# This aliases json.JSONDecodeError and simplejson.JSONDecodeError
if is_py2: # e is a ValueError
raise RequestsJSONDecodeError(e.message)
else:
raise RequestsJSONDecodeError(e.msg, e.doc, e.pos)
raise RequestsJSONDecodeError(e.msg, e.doc, e.pos)
@property
def links(self):
"""Returns the parsed header links of the response, if any."""
header = self.headers.get('link')
header = self.headers.get("link")
# l = MultiDict()
l = {}
resolved_links = {}
if header:
links = parse_header_links(header)
for link in links:
key = link.get('rel') or link.get('url')
l[key] = link
key = link.get("rel") or link.get("url")
resolved_links[key] = link
return l
return resolved_links
def raise_for_status(self):
"""Raises :class:`HTTPError`, if one occurred."""
http_error_msg = ''
http_error_msg = ""
if isinstance(self.reason, bytes):
# We attempt to decode utf-8 first because some servers
# choose to localize their reason strings. If the string
# isn't utf-8, we fall back to iso-8859-1 for all other
# encodings. (See PR #3538)
try:
reason = self.reason.decode('utf-8')
reason = self.reason.decode("utf-8")
except UnicodeDecodeError:
reason = self.reason.decode('iso-8859-1')
reason = self.reason.decode("iso-8859-1")
else:
reason = self.reason
if 400 <= self.status_code < 500:
http_error_msg = u'%s Client Error: %s for url: %s' % (self.status_code, reason, self.url)
http_error_msg = (
f"{self.status_code} Client Error: {reason} for url: {self.url}"
)
elif 500 <= self.status_code < 600:
http_error_msg = u'%s Server Error: %s for url: %s' % (self.status_code, reason, self.url)
http_error_msg = (
f"{self.status_code} Server Error: {reason} for url: {self.url}"
)
if http_error_msg:
raise HTTPError(http_error_msg, response=self)
@ -968,6 +1029,6 @@ class Response(object):
if not self._content_consumed:
self.raw.close()
release_conn = getattr(self.raw, 'release_conn', None)
release_conn = getattr(self.raw, "release_conn", None)
if release_conn is not None:
release_conn()