mirror of
https://github.com/Tautulli/Tautulli.git
synced 2025-07-11 15:56:07 -07:00
Bump requests from 2.26.0 to 2.27.0 (#1602)
* Bump requests from 2.26.0 to 2.27.0 Bumps [requests](https://github.com/psf/requests) from 2.26.0 to 2.27.0. - [Release notes](https://github.com/psf/requests/releases) - [Changelog](https://github.com/psf/requests/blob/main/HISTORY.md) - [Commits](https://github.com/psf/requests/compare/v2.26.0...v2.27.0) --- updated-dependencies: - dependency-name: requests dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] <support@github.com> * Update requests==2.27.0 Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Co-authored-by: JonnyWong16 <9099342+JonnyWong16@users.noreply.github.com>
This commit is contained in:
parent
2c7a3934cb
commit
bb5ebe0fa5
11 changed files with 156 additions and 83 deletions
|
@ -21,6 +21,7 @@ import warnings
|
|||
import zipfile
|
||||
from collections import OrderedDict
|
||||
from urllib3.util import make_headers
|
||||
from urllib3.util import parse_url
|
||||
|
||||
from .__version__ import __version__
|
||||
from . import certs
|
||||
|
@ -124,7 +125,10 @@ def super_len(o):
|
|||
elif hasattr(o, 'fileno'):
|
||||
try:
|
||||
fileno = o.fileno()
|
||||
except io.UnsupportedOperation:
|
||||
except (io.UnsupportedOperation, AttributeError):
|
||||
# AttributeError is a surprising exception, seeing as how we've just checked
|
||||
# that `hasattr(o, 'fileno')`. It happens for objects obtained via
|
||||
# `Tarfile.extractfile()`, per issue 5229.
|
||||
pass
|
||||
else:
|
||||
total_length = os.fstat(fileno).st_size
|
||||
|
@ -154,7 +158,7 @@ def super_len(o):
|
|||
current_position = total_length
|
||||
else:
|
||||
if hasattr(o, 'seek') and total_length is None:
|
||||
# StringIO and BytesIO have seek but no useable fileno
|
||||
# StringIO and BytesIO have seek but no usable fileno
|
||||
try:
|
||||
# seek to end of file
|
||||
o.seek(0, 2)
|
||||
|
@ -251,6 +255,10 @@ def extract_zipped_paths(path):
|
|||
archive, member = os.path.split(path)
|
||||
while archive and not os.path.exists(archive):
|
||||
archive, prefix = os.path.split(archive)
|
||||
if not prefix:
|
||||
# If we don't check for an empty prefix after the split (in other words, archive remains unchanged after the split),
|
||||
# we _can_ end up in an infinite loop on a rare corner case affecting a small number of users
|
||||
break
|
||||
member = '/'.join([prefix, member])
|
||||
|
||||
if not zipfile.is_zipfile(archive):
|
||||
|
@ -826,6 +834,33 @@ def select_proxy(url, proxies):
|
|||
return proxy
|
||||
|
||||
|
||||
def resolve_proxies(request, proxies, trust_env=True):
|
||||
"""This method takes proxy information from a request and configuration
|
||||
input to resolve a mapping of target proxies. This will consider settings
|
||||
such a NO_PROXY to strip proxy configurations.
|
||||
|
||||
:param request: Request or PreparedRequest
|
||||
:param proxies: A dictionary of schemes or schemes and hosts to proxy URLs
|
||||
:param trust_env: Boolean declaring whether to trust environment configs
|
||||
|
||||
:rtype: dict
|
||||
"""
|
||||
proxies = proxies if proxies is not None else {}
|
||||
url = request.url
|
||||
scheme = urlparse(url).scheme
|
||||
no_proxy = proxies.get('no_proxy')
|
||||
new_proxies = proxies.copy()
|
||||
|
||||
if trust_env and not should_bypass_proxies(url, no_proxy=no_proxy):
|
||||
environ_proxies = get_environ_proxies(url, no_proxy=no_proxy)
|
||||
|
||||
proxy = environ_proxies.get(scheme, environ_proxies.get('all'))
|
||||
|
||||
if proxy:
|
||||
new_proxies.setdefault(scheme, proxy)
|
||||
return new_proxies
|
||||
|
||||
|
||||
def default_user_agent(name="python-requests"):
|
||||
"""
|
||||
Return a string representing the default user agent.
|
||||
|
@ -928,15 +963,23 @@ def prepend_scheme_if_needed(url, new_scheme):
|
|||
|
||||
:rtype: str
|
||||
"""
|
||||
scheme, netloc, path, params, query, fragment = urlparse(url, new_scheme)
|
||||
parsed = parse_url(url)
|
||||
scheme, auth, host, port, path, query, fragment = parsed
|
||||
|
||||
# urlparse is a finicky beast, and sometimes decides that there isn't a
|
||||
# netloc present. Assume that it's being over-cautious, and switch netloc
|
||||
# and path if urlparse decided there was no netloc.
|
||||
# A defect in urlparse determines that there isn't a netloc present in some
|
||||
# urls. We previously assumed parsing was overly cautious, and swapped the
|
||||
# netloc and path. Due to a lack of tests on the original defect, this is
|
||||
# maintained with parse_url for backwards compatibility.
|
||||
netloc = parsed.netloc
|
||||
if not netloc:
|
||||
netloc, path = path, netloc
|
||||
|
||||
return urlunparse((scheme, netloc, path, params, query, fragment))
|
||||
if scheme is None:
|
||||
scheme = new_scheme
|
||||
if path is None:
|
||||
path = ''
|
||||
|
||||
return urlunparse((scheme, netloc, path, '', query, fragment))
|
||||
|
||||
|
||||
def get_auth_from_url(url):
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue