mirror of
https://github.com/Tautulli/Tautulli.git
synced 2025-07-06 21:21:15 -07:00
Bump requests-oauthlib from 1.3.0 to 1.3.1 (#1636)
* Bump requests-oauthlib from 1.3.0 to 1.3.1 Bumps [requests-oauthlib](https://github.com/requests/requests-oauthlib) from 1.3.0 to 1.3.1. - [Release notes](https://github.com/requests/requests-oauthlib/releases) - [Changelog](https://github.com/requests/requests-oauthlib/blob/master/HISTORY.rst) - [Commits](https://github.com/requests/requests-oauthlib/compare/v1.3.0...v1.3.1) --- updated-dependencies: - dependency-name: requests-oauthlib dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] <support@github.com> * Update requests-oauthlib==1.3.1 Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Co-authored-by: JonnyWong16 <9099342+JonnyWong16@users.noreply.github.com> [skip ci]
This commit is contained in:
parent
5523d4ba88
commit
61960aa744
26 changed files with 464 additions and 77 deletions
|
@ -13,7 +13,7 @@ from .cd import (
|
||||||
mb_encoding_languages,
|
mb_encoding_languages,
|
||||||
merge_coherence_ratios,
|
merge_coherence_ratios,
|
||||||
)
|
)
|
||||||
from .constant import IANA_SUPPORTED, TOO_BIG_SEQUENCE, TOO_SMALL_SEQUENCE
|
from .constant import IANA_SUPPORTED, TOO_BIG_SEQUENCE, TOO_SMALL_SEQUENCE, TRACE
|
||||||
from .md import mess_ratio
|
from .md import mess_ratio
|
||||||
from .models import CharsetMatch, CharsetMatches
|
from .models import CharsetMatch, CharsetMatches
|
||||||
from .utils import (
|
from .utils import (
|
||||||
|
@ -25,6 +25,8 @@ from .utils import (
|
||||||
should_strip_sig_or_bom,
|
should_strip_sig_or_bom,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
# Will most likely be controversial
|
||||||
|
# logging.addLevelName(TRACE, "TRACE")
|
||||||
logger = logging.getLogger("charset_normalizer")
|
logger = logging.getLogger("charset_normalizer")
|
||||||
explain_handler = logging.StreamHandler()
|
explain_handler = logging.StreamHandler()
|
||||||
explain_handler.setFormatter(
|
explain_handler.setFormatter(
|
||||||
|
@ -70,19 +72,20 @@ def from_bytes(
|
||||||
if explain:
|
if explain:
|
||||||
previous_logger_level = logger.level # type: int
|
previous_logger_level = logger.level # type: int
|
||||||
logger.addHandler(explain_handler)
|
logger.addHandler(explain_handler)
|
||||||
logger.setLevel(logging.DEBUG)
|
logger.setLevel(TRACE)
|
||||||
|
|
||||||
length = len(sequences) # type: int
|
length = len(sequences) # type: int
|
||||||
|
|
||||||
if length == 0:
|
if length == 0:
|
||||||
logger.warning("Encoding detection on empty bytes, assuming utf_8 intention.")
|
logger.debug("Encoding detection on empty bytes, assuming utf_8 intention.")
|
||||||
if explain:
|
if explain:
|
||||||
logger.removeHandler(explain_handler)
|
logger.removeHandler(explain_handler)
|
||||||
logger.setLevel(previous_logger_level or logging.WARNING)
|
logger.setLevel(previous_logger_level or logging.WARNING)
|
||||||
return CharsetMatches([CharsetMatch(sequences, "utf_8", 0.0, False, [], "")])
|
return CharsetMatches([CharsetMatch(sequences, "utf_8", 0.0, False, [], "")])
|
||||||
|
|
||||||
if cp_isolation is not None:
|
if cp_isolation is not None:
|
||||||
logger.debug(
|
logger.log(
|
||||||
|
TRACE,
|
||||||
"cp_isolation is set. use this flag for debugging purpose. "
|
"cp_isolation is set. use this flag for debugging purpose. "
|
||||||
"limited list of encoding allowed : %s.",
|
"limited list of encoding allowed : %s.",
|
||||||
", ".join(cp_isolation),
|
", ".join(cp_isolation),
|
||||||
|
@ -92,7 +95,8 @@ def from_bytes(
|
||||||
cp_isolation = []
|
cp_isolation = []
|
||||||
|
|
||||||
if cp_exclusion is not None:
|
if cp_exclusion is not None:
|
||||||
logger.debug(
|
logger.log(
|
||||||
|
TRACE,
|
||||||
"cp_exclusion is set. use this flag for debugging purpose. "
|
"cp_exclusion is set. use this flag for debugging purpose. "
|
||||||
"limited list of encoding excluded : %s.",
|
"limited list of encoding excluded : %s.",
|
||||||
", ".join(cp_exclusion),
|
", ".join(cp_exclusion),
|
||||||
|
@ -102,7 +106,8 @@ def from_bytes(
|
||||||
cp_exclusion = []
|
cp_exclusion = []
|
||||||
|
|
||||||
if length <= (chunk_size * steps):
|
if length <= (chunk_size * steps):
|
||||||
logger.debug(
|
logger.log(
|
||||||
|
TRACE,
|
||||||
"override steps (%i) and chunk_size (%i) as content does not fit (%i byte(s) given) parameters.",
|
"override steps (%i) and chunk_size (%i) as content does not fit (%i byte(s) given) parameters.",
|
||||||
steps,
|
steps,
|
||||||
chunk_size,
|
chunk_size,
|
||||||
|
@ -118,16 +123,18 @@ def from_bytes(
|
||||||
is_too_large_sequence = len(sequences) >= TOO_BIG_SEQUENCE # type: bool
|
is_too_large_sequence = len(sequences) >= TOO_BIG_SEQUENCE # type: bool
|
||||||
|
|
||||||
if is_too_small_sequence:
|
if is_too_small_sequence:
|
||||||
logger.warning(
|
logger.log(
|
||||||
|
TRACE,
|
||||||
"Trying to detect encoding from a tiny portion of ({}) byte(s).".format(
|
"Trying to detect encoding from a tiny portion of ({}) byte(s).".format(
|
||||||
length
|
length
|
||||||
)
|
),
|
||||||
)
|
)
|
||||||
elif is_too_large_sequence:
|
elif is_too_large_sequence:
|
||||||
logger.info(
|
logger.log(
|
||||||
|
TRACE,
|
||||||
"Using lazy str decoding because the payload is quite large, ({}) byte(s).".format(
|
"Using lazy str decoding because the payload is quite large, ({}) byte(s).".format(
|
||||||
length
|
length
|
||||||
)
|
),
|
||||||
)
|
)
|
||||||
|
|
||||||
prioritized_encodings = [] # type: List[str]
|
prioritized_encodings = [] # type: List[str]
|
||||||
|
@ -138,7 +145,8 @@ def from_bytes(
|
||||||
|
|
||||||
if specified_encoding is not None:
|
if specified_encoding is not None:
|
||||||
prioritized_encodings.append(specified_encoding)
|
prioritized_encodings.append(specified_encoding)
|
||||||
logger.info(
|
logger.log(
|
||||||
|
TRACE,
|
||||||
"Detected declarative mark in sequence. Priority +1 given for %s.",
|
"Detected declarative mark in sequence. Priority +1 given for %s.",
|
||||||
specified_encoding,
|
specified_encoding,
|
||||||
)
|
)
|
||||||
|
@ -157,7 +165,8 @@ def from_bytes(
|
||||||
|
|
||||||
if sig_encoding is not None:
|
if sig_encoding is not None:
|
||||||
prioritized_encodings.append(sig_encoding)
|
prioritized_encodings.append(sig_encoding)
|
||||||
logger.info(
|
logger.log(
|
||||||
|
TRACE,
|
||||||
"Detected a SIG or BOM mark on first %i byte(s). Priority +1 given for %s.",
|
"Detected a SIG or BOM mark on first %i byte(s). Priority +1 given for %s.",
|
||||||
len(sig_payload),
|
len(sig_payload),
|
||||||
sig_encoding,
|
sig_encoding,
|
||||||
|
@ -188,7 +197,8 @@ def from_bytes(
|
||||||
) # type: bool
|
) # type: bool
|
||||||
|
|
||||||
if encoding_iana in {"utf_16", "utf_32"} and not bom_or_sig_available:
|
if encoding_iana in {"utf_16", "utf_32"} and not bom_or_sig_available:
|
||||||
logger.debug(
|
logger.log(
|
||||||
|
TRACE,
|
||||||
"Encoding %s wont be tested as-is because it require a BOM. Will try some sub-encoder LE/BE.",
|
"Encoding %s wont be tested as-is because it require a BOM. Will try some sub-encoder LE/BE.",
|
||||||
encoding_iana,
|
encoding_iana,
|
||||||
)
|
)
|
||||||
|
@ -197,8 +207,10 @@ def from_bytes(
|
||||||
try:
|
try:
|
||||||
is_multi_byte_decoder = is_multi_byte_encoding(encoding_iana) # type: bool
|
is_multi_byte_decoder = is_multi_byte_encoding(encoding_iana) # type: bool
|
||||||
except (ModuleNotFoundError, ImportError):
|
except (ModuleNotFoundError, ImportError):
|
||||||
logger.debug(
|
logger.log(
|
||||||
"Encoding %s does not provide an IncrementalDecoder", encoding_iana
|
TRACE,
|
||||||
|
"Encoding %s does not provide an IncrementalDecoder",
|
||||||
|
encoding_iana,
|
||||||
)
|
)
|
||||||
continue
|
continue
|
||||||
|
|
||||||
|
@ -219,7 +231,8 @@ def from_bytes(
|
||||||
)
|
)
|
||||||
except (UnicodeDecodeError, LookupError) as e:
|
except (UnicodeDecodeError, LookupError) as e:
|
||||||
if not isinstance(e, LookupError):
|
if not isinstance(e, LookupError):
|
||||||
logger.debug(
|
logger.log(
|
||||||
|
TRACE,
|
||||||
"Code page %s does not fit given bytes sequence at ALL. %s",
|
"Code page %s does not fit given bytes sequence at ALL. %s",
|
||||||
encoding_iana,
|
encoding_iana,
|
||||||
str(e),
|
str(e),
|
||||||
|
@ -235,7 +248,8 @@ def from_bytes(
|
||||||
break
|
break
|
||||||
|
|
||||||
if similar_soft_failure_test:
|
if similar_soft_failure_test:
|
||||||
logger.debug(
|
logger.log(
|
||||||
|
TRACE,
|
||||||
"%s is deemed too similar to code page %s and was consider unsuited already. Continuing!",
|
"%s is deemed too similar to code page %s and was consider unsuited already. Continuing!",
|
||||||
encoding_iana,
|
encoding_iana,
|
||||||
encoding_soft_failed,
|
encoding_soft_failed,
|
||||||
|
@ -255,7 +269,8 @@ def from_bytes(
|
||||||
) # type: bool
|
) # type: bool
|
||||||
|
|
||||||
if multi_byte_bonus:
|
if multi_byte_bonus:
|
||||||
logger.debug(
|
logger.log(
|
||||||
|
TRACE,
|
||||||
"Code page %s is a multi byte encoding table and it appear that at least one character "
|
"Code page %s is a multi byte encoding table and it appear that at least one character "
|
||||||
"was encoded using n-bytes.",
|
"was encoded using n-bytes.",
|
||||||
encoding_iana,
|
encoding_iana,
|
||||||
|
@ -285,7 +300,8 @@ def from_bytes(
|
||||||
errors="ignore" if is_multi_byte_decoder else "strict",
|
errors="ignore" if is_multi_byte_decoder else "strict",
|
||||||
) # type: str
|
) # type: str
|
||||||
except UnicodeDecodeError as e: # Lazy str loading may have missed something there
|
except UnicodeDecodeError as e: # Lazy str loading may have missed something there
|
||||||
logger.debug(
|
logger.log(
|
||||||
|
TRACE,
|
||||||
"LazyStr Loading: After MD chunk decode, code page %s does not fit given bytes sequence at ALL. %s",
|
"LazyStr Loading: After MD chunk decode, code page %s does not fit given bytes sequence at ALL. %s",
|
||||||
encoding_iana,
|
encoding_iana,
|
||||||
str(e),
|
str(e),
|
||||||
|
@ -337,7 +353,8 @@ def from_bytes(
|
||||||
try:
|
try:
|
||||||
sequences[int(50e3) :].decode(encoding_iana, errors="strict")
|
sequences[int(50e3) :].decode(encoding_iana, errors="strict")
|
||||||
except UnicodeDecodeError as e:
|
except UnicodeDecodeError as e:
|
||||||
logger.debug(
|
logger.log(
|
||||||
|
TRACE,
|
||||||
"LazyStr Loading: After final lookup, code page %s does not fit given bytes sequence at ALL. %s",
|
"LazyStr Loading: After final lookup, code page %s does not fit given bytes sequence at ALL. %s",
|
||||||
encoding_iana,
|
encoding_iana,
|
||||||
str(e),
|
str(e),
|
||||||
|
@ -350,7 +367,8 @@ def from_bytes(
|
||||||
) # type: float
|
) # type: float
|
||||||
if mean_mess_ratio >= threshold or early_stop_count >= max_chunk_gave_up:
|
if mean_mess_ratio >= threshold or early_stop_count >= max_chunk_gave_up:
|
||||||
tested_but_soft_failure.append(encoding_iana)
|
tested_but_soft_failure.append(encoding_iana)
|
||||||
logger.info(
|
logger.log(
|
||||||
|
TRACE,
|
||||||
"%s was excluded because of initial chaos probing. Gave up %i time(s). "
|
"%s was excluded because of initial chaos probing. Gave up %i time(s). "
|
||||||
"Computed mean chaos is %f %%.",
|
"Computed mean chaos is %f %%.",
|
||||||
encoding_iana,
|
encoding_iana,
|
||||||
|
@ -373,7 +391,8 @@ def from_bytes(
|
||||||
fallback_u8 = fallback_entry
|
fallback_u8 = fallback_entry
|
||||||
continue
|
continue
|
||||||
|
|
||||||
logger.info(
|
logger.log(
|
||||||
|
TRACE,
|
||||||
"%s passed initial chaos probing. Mean measured chaos is %f %%",
|
"%s passed initial chaos probing. Mean measured chaos is %f %%",
|
||||||
encoding_iana,
|
encoding_iana,
|
||||||
round(mean_mess_ratio * 100, ndigits=3),
|
round(mean_mess_ratio * 100, ndigits=3),
|
||||||
|
@ -385,10 +404,11 @@ def from_bytes(
|
||||||
target_languages = mb_encoding_languages(encoding_iana)
|
target_languages = mb_encoding_languages(encoding_iana)
|
||||||
|
|
||||||
if target_languages:
|
if target_languages:
|
||||||
logger.debug(
|
logger.log(
|
||||||
|
TRACE,
|
||||||
"{} should target any language(s) of {}".format(
|
"{} should target any language(s) of {}".format(
|
||||||
encoding_iana, str(target_languages)
|
encoding_iana, str(target_languages)
|
||||||
)
|
),
|
||||||
)
|
)
|
||||||
|
|
||||||
cd_ratios = []
|
cd_ratios = []
|
||||||
|
@ -406,10 +426,11 @@ def from_bytes(
|
||||||
cd_ratios_merged = merge_coherence_ratios(cd_ratios)
|
cd_ratios_merged = merge_coherence_ratios(cd_ratios)
|
||||||
|
|
||||||
if cd_ratios_merged:
|
if cd_ratios_merged:
|
||||||
logger.info(
|
logger.log(
|
||||||
|
TRACE,
|
||||||
"We detected language {} using {}".format(
|
"We detected language {} using {}".format(
|
||||||
cd_ratios_merged, encoding_iana
|
cd_ratios_merged, encoding_iana
|
||||||
)
|
),
|
||||||
)
|
)
|
||||||
|
|
||||||
results.append(
|
results.append(
|
||||||
|
@ -427,8 +448,8 @@ def from_bytes(
|
||||||
encoding_iana in [specified_encoding, "ascii", "utf_8"]
|
encoding_iana in [specified_encoding, "ascii", "utf_8"]
|
||||||
and mean_mess_ratio < 0.1
|
and mean_mess_ratio < 0.1
|
||||||
):
|
):
|
||||||
logger.info(
|
logger.debug(
|
||||||
"%s is most likely the one. Stopping the process.", encoding_iana
|
"Encoding detection: %s is most likely the one.", encoding_iana
|
||||||
)
|
)
|
||||||
if explain:
|
if explain:
|
||||||
logger.removeHandler(explain_handler)
|
logger.removeHandler(explain_handler)
|
||||||
|
@ -436,8 +457,9 @@ def from_bytes(
|
||||||
return CharsetMatches([results[encoding_iana]])
|
return CharsetMatches([results[encoding_iana]])
|
||||||
|
|
||||||
if encoding_iana == sig_encoding:
|
if encoding_iana == sig_encoding:
|
||||||
logger.info(
|
logger.debug(
|
||||||
"%s is most likely the one as we detected a BOM or SIG within the beginning of the sequence.",
|
"Encoding detection: %s is most likely the one as we detected a BOM or SIG within "
|
||||||
|
"the beginning of the sequence.",
|
||||||
encoding_iana,
|
encoding_iana,
|
||||||
)
|
)
|
||||||
if explain:
|
if explain:
|
||||||
|
@ -447,13 +469,15 @@ def from_bytes(
|
||||||
|
|
||||||
if len(results) == 0:
|
if len(results) == 0:
|
||||||
if fallback_u8 or fallback_ascii or fallback_specified:
|
if fallback_u8 or fallback_ascii or fallback_specified:
|
||||||
logger.debug(
|
logger.log(
|
||||||
"Nothing got out of the detection process. Using ASCII/UTF-8/Specified fallback."
|
TRACE,
|
||||||
|
"Nothing got out of the detection process. Using ASCII/UTF-8/Specified fallback.",
|
||||||
)
|
)
|
||||||
|
|
||||||
if fallback_specified:
|
if fallback_specified:
|
||||||
logger.debug(
|
logger.debug(
|
||||||
"%s will be used as a fallback match", fallback_specified.encoding
|
"Encoding detection: %s will be used as a fallback match",
|
||||||
|
fallback_specified.encoding,
|
||||||
)
|
)
|
||||||
results.append(fallback_specified)
|
results.append(fallback_specified)
|
||||||
elif (
|
elif (
|
||||||
|
@ -465,12 +489,21 @@ def from_bytes(
|
||||||
)
|
)
|
||||||
or (fallback_u8 is not None)
|
or (fallback_u8 is not None)
|
||||||
):
|
):
|
||||||
logger.warning("utf_8 will be used as a fallback match")
|
logger.debug("Encoding detection: utf_8 will be used as a fallback match")
|
||||||
results.append(fallback_u8)
|
results.append(fallback_u8)
|
||||||
elif fallback_ascii:
|
elif fallback_ascii:
|
||||||
logger.warning("ascii will be used as a fallback match")
|
logger.debug("Encoding detection: ascii will be used as a fallback match")
|
||||||
results.append(fallback_ascii)
|
results.append(fallback_ascii)
|
||||||
|
|
||||||
|
if results:
|
||||||
|
logger.debug(
|
||||||
|
"Encoding detection: Found %s as plausible (best-candidate) for content. With %i alternatives.",
|
||||||
|
results.best().encoding, # type: ignore
|
||||||
|
len(results) - 1,
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
logger.debug("Encoding detection: Unable to determine any suitable charset.")
|
||||||
|
|
||||||
if explain:
|
if explain:
|
||||||
logger.removeHandler(explain_handler)
|
logger.removeHandler(explain_handler)
|
||||||
logger.setLevel(previous_logger_level)
|
logger.setLevel(previous_logger_level)
|
||||||
|
|
|
@ -498,3 +498,6 @@ ZH_NAMES = {"big5", "cp950", "big5hkscs", "hz"} # type: Set[str]
|
||||||
NOT_PRINTABLE_PATTERN = re_compile(r"[0-9\W\n\r\t]+")
|
NOT_PRINTABLE_PATTERN = re_compile(r"[0-9\W\n\r\t]+")
|
||||||
|
|
||||||
LANGUAGE_SUPPORTED_COUNT = len(FREQUENCIES) # type: int
|
LANGUAGE_SUPPORTED_COUNT = len(FREQUENCIES) # type: int
|
||||||
|
|
||||||
|
# Logging LEVEL bellow DEBUG
|
||||||
|
TRACE = 5 # type: int
|
||||||
|
|
|
@ -2,5 +2,5 @@
|
||||||
Expose version
|
Expose version
|
||||||
"""
|
"""
|
||||||
|
|
||||||
__version__ = "2.0.10"
|
__version__ = "2.0.11"
|
||||||
VERSION = __version__.split(".")
|
VERSION = __version__.split(".")
|
||||||
|
|
|
@ -12,7 +12,7 @@ import logging
|
||||||
from logging import NullHandler
|
from logging import NullHandler
|
||||||
|
|
||||||
__author__ = 'The OAuthlib Community'
|
__author__ = 'The OAuthlib Community'
|
||||||
__version__ = '3.1.1'
|
__version__ = '3.2.0'
|
||||||
|
|
||||||
logging.getLogger('oauthlib').addHandler(NullHandler())
|
logging.getLogger('oauthlib').addHandler(NullHandler())
|
||||||
|
|
||||||
|
|
|
@ -33,3 +33,4 @@ from .rfc6749.grant_types import (
|
||||||
from .rfc6749.request_validator import RequestValidator
|
from .rfc6749.request_validator import RequestValidator
|
||||||
from .rfc6749.tokens import BearerToken, OAuth2Token
|
from .rfc6749.tokens import BearerToken, OAuth2Token
|
||||||
from .rfc6749.utils import is_secure_transport
|
from .rfc6749.utils import is_secure_transport
|
||||||
|
from .rfc8628.clients import DeviceClient
|
||||||
|
|
|
@ -8,6 +8,10 @@ for consuming OAuth 2.0 RFC6749.
|
||||||
"""
|
"""
|
||||||
import time
|
import time
|
||||||
import warnings
|
import warnings
|
||||||
|
import secrets
|
||||||
|
import re
|
||||||
|
import hashlib
|
||||||
|
import base64
|
||||||
|
|
||||||
from oauthlib.common import generate_token
|
from oauthlib.common import generate_token
|
||||||
from oauthlib.oauth2.rfc6749 import tokens
|
from oauthlib.oauth2.rfc6749 import tokens
|
||||||
|
@ -61,6 +65,9 @@ class Client:
|
||||||
state=None,
|
state=None,
|
||||||
redirect_url=None,
|
redirect_url=None,
|
||||||
state_generator=generate_token,
|
state_generator=generate_token,
|
||||||
|
code_verifier=None,
|
||||||
|
code_challenge=None,
|
||||||
|
code_challenge_method=None,
|
||||||
**kwargs):
|
**kwargs):
|
||||||
"""Initialize a client with commonly used attributes.
|
"""Initialize a client with commonly used attributes.
|
||||||
|
|
||||||
|
@ -99,6 +106,15 @@ class Client:
|
||||||
|
|
||||||
:param state_generator: A no argument state generation callable. Defaults
|
:param state_generator: A no argument state generation callable. Defaults
|
||||||
to :py:meth:`oauthlib.common.generate_token`.
|
to :py:meth:`oauthlib.common.generate_token`.
|
||||||
|
|
||||||
|
:param code_verifier: PKCE parameter. A cryptographically random string that is used to correlate the
|
||||||
|
authorization request to the token request.
|
||||||
|
|
||||||
|
:param code_challenge: PKCE parameter. A challenge derived from the code verifier that is sent in the
|
||||||
|
authorization request, to be verified against later.
|
||||||
|
|
||||||
|
:param code_challenge_method: PKCE parameter. A method that was used to derive code challenge.
|
||||||
|
Defaults to "plain" if not present in the request.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
self.client_id = client_id
|
self.client_id = client_id
|
||||||
|
@ -113,6 +129,9 @@ class Client:
|
||||||
self.state_generator = state_generator
|
self.state_generator = state_generator
|
||||||
self.state = state
|
self.state = state
|
||||||
self.redirect_url = redirect_url
|
self.redirect_url = redirect_url
|
||||||
|
self.code_verifier = code_verifier
|
||||||
|
self.code_challenge = code_challenge
|
||||||
|
self.code_challenge_method = code_challenge_method
|
||||||
self.code = None
|
self.code = None
|
||||||
self.expires_in = None
|
self.expires_in = None
|
||||||
self._expires_at = None
|
self._expires_at = None
|
||||||
|
@ -471,6 +490,91 @@ class Client:
|
||||||
raise ValueError("Invalid token placement.")
|
raise ValueError("Invalid token placement.")
|
||||||
return uri, headers, body
|
return uri, headers, body
|
||||||
|
|
||||||
|
def create_code_verifier(self, length):
|
||||||
|
"""Create PKCE **code_verifier** used in computing **code_challenge**.
|
||||||
|
|
||||||
|
:param length: REQUIRED. The length of the code_verifier.
|
||||||
|
|
||||||
|
The client first creates a code verifier, "code_verifier", for each
|
||||||
|
OAuth 2.0 [RFC6749] Authorization Request, in the following manner:
|
||||||
|
|
||||||
|
code_verifier = high-entropy cryptographic random STRING using the
|
||||||
|
unreserved characters [A-Z] / [a-z] / [0-9] / "-" / "." / "_" / "~"
|
||||||
|
from Section 2.3 of [RFC3986], with a minimum length of 43 characters
|
||||||
|
and a maximum length of 128 characters.
|
||||||
|
|
||||||
|
.. _`Section 4.1`: https://tools.ietf.org/html/rfc7636#section-4.1
|
||||||
|
"""
|
||||||
|
code_verifier = None
|
||||||
|
|
||||||
|
if not length >= 43:
|
||||||
|
raise ValueError("Length must be greater than or equal to 43")
|
||||||
|
|
||||||
|
if not length <= 128:
|
||||||
|
raise ValueError("Length must be less than or equal to 128")
|
||||||
|
|
||||||
|
allowed_characters = re.compile('^[A-Zaa-z0-9-._~]')
|
||||||
|
code_verifier = secrets.token_urlsafe(length)
|
||||||
|
|
||||||
|
if not re.search(allowed_characters, code_verifier):
|
||||||
|
raise ValueError("code_verifier contains invalid characters")
|
||||||
|
|
||||||
|
self.code_verifier = code_verifier
|
||||||
|
|
||||||
|
return code_verifier
|
||||||
|
|
||||||
|
def create_code_challenge(self, code_verifier, code_challenge_method=None):
|
||||||
|
"""Create PKCE **code_challenge** derived from the **code_verifier**.
|
||||||
|
|
||||||
|
:param code_verifier: REQUIRED. The **code_verifier** generated from create_code_verifier().
|
||||||
|
:param code_challenge_method: OPTIONAL. The method used to derive the **code_challenge**. Acceptable
|
||||||
|
values include "S256". DEFAULT is "plain".
|
||||||
|
|
||||||
|
|
||||||
|
The client then creates a code challenge derived from the code
|
||||||
|
verifier by using one of the following transformations on the code
|
||||||
|
verifier:
|
||||||
|
|
||||||
|
plain
|
||||||
|
code_challenge = code_verifier
|
||||||
|
|
||||||
|
S256
|
||||||
|
code_challenge = BASE64URL-ENCODE(SHA256(ASCII(code_verifier)))
|
||||||
|
|
||||||
|
If the client is capable of using "S256", it MUST use "S256", as
|
||||||
|
"S256" is Mandatory To Implement (MTI) on the server. Clients are
|
||||||
|
permitted to use "plain" only if they cannot support "S256" for some
|
||||||
|
technical reason and know via out-of-band configuration that the
|
||||||
|
server supports "plain".
|
||||||
|
|
||||||
|
The plain transformation is for compatibility with existing
|
||||||
|
deployments and for constrained environments that can't use the S256
|
||||||
|
transformation.
|
||||||
|
|
||||||
|
.. _`Section 4.2`: https://tools.ietf.org/html/rfc7636#section-4.2
|
||||||
|
"""
|
||||||
|
code_challenge = None
|
||||||
|
|
||||||
|
if code_verifier == None:
|
||||||
|
raise ValueError("Invalid code_verifier")
|
||||||
|
|
||||||
|
if code_challenge_method == None:
|
||||||
|
code_challenge_method = "plain"
|
||||||
|
self.code_challenge_method = code_challenge_method
|
||||||
|
code_challenge = code_verifier
|
||||||
|
self.code_challenge = code_challenge
|
||||||
|
|
||||||
|
if code_challenge_method == "S256":
|
||||||
|
h = hashlib.sha256()
|
||||||
|
h.update(code_verifier.encode(encoding='ascii'))
|
||||||
|
sha256_val = h.digest()
|
||||||
|
code_challenge = bytes.decode(base64.urlsafe_b64encode(sha256_val))
|
||||||
|
# replace '+' with '-', '/' with '_', and remove trailing '='
|
||||||
|
code_challenge = code_challenge.replace("+", "-").replace("/", "_").replace("=", "")
|
||||||
|
self.code_challenge = code_challenge
|
||||||
|
|
||||||
|
return code_challenge
|
||||||
|
|
||||||
def _add_mac_token(self, uri, http_method='GET', body=None,
|
def _add_mac_token(self, uri, http_method='GET', body=None,
|
||||||
headers=None, token_placement=AUTH_HEADER, ext=None, **kwargs):
|
headers=None, token_placement=AUTH_HEADER, ext=None, **kwargs):
|
||||||
"""Add a MAC token to the request authorization header.
|
"""Add a MAC token to the request authorization header.
|
||||||
|
@ -513,7 +617,10 @@ class Client:
|
||||||
self._expires_at = time.time() + int(self.expires_in)
|
self._expires_at = time.time() + int(self.expires_in)
|
||||||
|
|
||||||
if 'expires_at' in response:
|
if 'expires_at' in response:
|
||||||
|
try:
|
||||||
self._expires_at = int(response.get('expires_at'))
|
self._expires_at = int(response.get('expires_at'))
|
||||||
|
except:
|
||||||
|
self._expires_at = None
|
||||||
|
|
||||||
if 'mac_key' in response:
|
if 'mac_key' in response:
|
||||||
self.mac_key = response.get('mac_key')
|
self.mac_key = response.get('mac_key')
|
||||||
|
|
|
@ -41,7 +41,7 @@ class WebApplicationClient(Client):
|
||||||
self.code = code
|
self.code = code
|
||||||
|
|
||||||
def prepare_request_uri(self, uri, redirect_uri=None, scope=None,
|
def prepare_request_uri(self, uri, redirect_uri=None, scope=None,
|
||||||
state=None, **kwargs):
|
state=None, code_challenge=None, code_challenge_method='plain', **kwargs):
|
||||||
"""Prepare the authorization code request URI
|
"""Prepare the authorization code request URI
|
||||||
|
|
||||||
The client constructs the request URI by adding the following
|
The client constructs the request URI by adding the following
|
||||||
|
@ -62,6 +62,13 @@ class WebApplicationClient(Client):
|
||||||
to the client. The parameter SHOULD be used for preventing
|
to the client. The parameter SHOULD be used for preventing
|
||||||
cross-site request forgery as described in `Section 10.12`_.
|
cross-site request forgery as described in `Section 10.12`_.
|
||||||
|
|
||||||
|
:param code_challenge: OPTIONAL. PKCE parameter. REQUIRED if PKCE is enforced.
|
||||||
|
A challenge derived from the code_verifier that is sent in the
|
||||||
|
authorization request, to be verified against later.
|
||||||
|
|
||||||
|
:param code_challenge_method: OPTIONAL. PKCE parameter. A method that was used to derive code challenge.
|
||||||
|
Defaults to "plain" if not present in the request.
|
||||||
|
|
||||||
:param kwargs: Extra arguments to include in the request URI.
|
:param kwargs: Extra arguments to include in the request URI.
|
||||||
|
|
||||||
In addition to supplied parameters, OAuthLib will append the ``client_id``
|
In addition to supplied parameters, OAuthLib will append the ``client_id``
|
||||||
|
@ -76,6 +83,10 @@ class WebApplicationClient(Client):
|
||||||
'https://example.com?client_id=your_id&response_type=code&redirect_uri=https%3A%2F%2Fa.b%2Fcallback'
|
'https://example.com?client_id=your_id&response_type=code&redirect_uri=https%3A%2F%2Fa.b%2Fcallback'
|
||||||
>>> client.prepare_request_uri('https://example.com', scope=['profile', 'pictures'])
|
>>> client.prepare_request_uri('https://example.com', scope=['profile', 'pictures'])
|
||||||
'https://example.com?client_id=your_id&response_type=code&scope=profile+pictures'
|
'https://example.com?client_id=your_id&response_type=code&scope=profile+pictures'
|
||||||
|
>>> client.prepare_request_uri('https://example.com', code_challenge='kjasBS523KdkAILD2k78NdcJSk2k3KHG6')
|
||||||
|
'https://example.com?client_id=your_id&response_type=code&code_challenge=kjasBS523KdkAILD2k78NdcJSk2k3KHG6'
|
||||||
|
>>> client.prepare_request_uri('https://example.com', code_challenge_method='S256')
|
||||||
|
'https://example.com?client_id=your_id&response_type=code&code_challenge_method=S256'
|
||||||
>>> client.prepare_request_uri('https://example.com', foo='bar')
|
>>> client.prepare_request_uri('https://example.com', foo='bar')
|
||||||
'https://example.com?client_id=your_id&response_type=code&foo=bar'
|
'https://example.com?client_id=your_id&response_type=code&foo=bar'
|
||||||
|
|
||||||
|
@ -87,10 +98,11 @@ class WebApplicationClient(Client):
|
||||||
"""
|
"""
|
||||||
scope = self.scope if scope is None else scope
|
scope = self.scope if scope is None else scope
|
||||||
return prepare_grant_uri(uri, self.client_id, 'code',
|
return prepare_grant_uri(uri, self.client_id, 'code',
|
||||||
redirect_uri=redirect_uri, scope=scope, state=state, **kwargs)
|
redirect_uri=redirect_uri, scope=scope, state=state, code_challenge=code_challenge,
|
||||||
|
code_challenge_method=code_challenge_method, **kwargs)
|
||||||
|
|
||||||
def prepare_request_body(self, code=None, redirect_uri=None, body='',
|
def prepare_request_body(self, code=None, redirect_uri=None, body='',
|
||||||
include_client_id=True, **kwargs):
|
include_client_id=True, code_verifier=None, **kwargs):
|
||||||
"""Prepare the access token request body.
|
"""Prepare the access token request body.
|
||||||
|
|
||||||
The client makes a request to the token endpoint by adding the
|
The client makes a request to the token endpoint by adding the
|
||||||
|
@ -113,6 +125,9 @@ class WebApplicationClient(Client):
|
||||||
authorization server as described in `Section 3.2.1`_.
|
authorization server as described in `Section 3.2.1`_.
|
||||||
:type include_client_id: Boolean
|
:type include_client_id: Boolean
|
||||||
|
|
||||||
|
:param code_verifier: OPTIONAL. A cryptographically random string that is used to correlate the
|
||||||
|
authorization request to the token request.
|
||||||
|
|
||||||
:param kwargs: Extra parameters to include in the token request.
|
:param kwargs: Extra parameters to include in the token request.
|
||||||
|
|
||||||
In addition OAuthLib will add the ``grant_type`` parameter set to
|
In addition OAuthLib will add the ``grant_type`` parameter set to
|
||||||
|
@ -127,6 +142,8 @@ class WebApplicationClient(Client):
|
||||||
>>> client = WebApplicationClient('your_id')
|
>>> client = WebApplicationClient('your_id')
|
||||||
>>> client.prepare_request_body(code='sh35ksdf09sf')
|
>>> client.prepare_request_body(code='sh35ksdf09sf')
|
||||||
'grant_type=authorization_code&code=sh35ksdf09sf'
|
'grant_type=authorization_code&code=sh35ksdf09sf'
|
||||||
|
>>> client.prepare_request_body(code_verifier='KB46DCKJ873NCGXK5GD682NHDKK34GR')
|
||||||
|
'grant_type=authorization_code&code_verifier=KB46DCKJ873NCGXK5GD682NHDKK34GR'
|
||||||
>>> client.prepare_request_body(code='sh35ksdf09sf', foo='bar')
|
>>> client.prepare_request_body(code='sh35ksdf09sf', foo='bar')
|
||||||
'grant_type=authorization_code&code=sh35ksdf09sf&foo=bar'
|
'grant_type=authorization_code&code=sh35ksdf09sf&foo=bar'
|
||||||
|
|
||||||
|
@ -154,7 +171,7 @@ class WebApplicationClient(Client):
|
||||||
kwargs['client_id'] = self.client_id
|
kwargs['client_id'] = self.client_id
|
||||||
kwargs['include_client_id'] = include_client_id
|
kwargs['include_client_id'] = include_client_id
|
||||||
return prepare_token_request(self.grant_type, code=code, body=body,
|
return prepare_token_request(self.grant_type, code=code, body=body,
|
||||||
redirect_uri=redirect_uri, **kwargs)
|
redirect_uri=redirect_uri, code_verifier=code_verifier, **kwargs)
|
||||||
|
|
||||||
def parse_request_uri_response(self, uri, state=None):
|
def parse_request_uri_response(self, uri, state=None):
|
||||||
"""Parse the URI query for code and state.
|
"""Parse the URI query for code and state.
|
||||||
|
|
|
@ -54,7 +54,8 @@ class MetadataEndpoint(BaseEndpoint):
|
||||||
"""Create metadata response
|
"""Create metadata response
|
||||||
"""
|
"""
|
||||||
headers = {
|
headers = {
|
||||||
'Content-Type': 'application/json'
|
'Content-Type': 'application/json',
|
||||||
|
'Access-Control-Allow-Origin': '*',
|
||||||
}
|
}
|
||||||
return headers, json.dumps(self.claims), 200
|
return headers, json.dumps(self.claims), 200
|
||||||
|
|
||||||
|
|
|
@ -103,15 +103,12 @@ class OAuth2Error(Exception):
|
||||||
value "Bearer". This scheme MUST be followed by one or more
|
value "Bearer". This scheme MUST be followed by one or more
|
||||||
auth-param values.
|
auth-param values.
|
||||||
"""
|
"""
|
||||||
authvalues = [
|
authvalues = ['error="{}"'.format(self.error)]
|
||||||
"Bearer",
|
|
||||||
'error="{}"'.format(self.error)
|
|
||||||
]
|
|
||||||
if self.description:
|
if self.description:
|
||||||
authvalues.append('error_description="{}"'.format(self.description))
|
authvalues.append('error_description="{}"'.format(self.description))
|
||||||
if self.uri:
|
if self.uri:
|
||||||
authvalues.append('error_uri="{}"'.format(self.uri))
|
authvalues.append('error_uri="{}"'.format(self.uri))
|
||||||
return {"WWW-Authenticate": ", ".join(authvalues)}
|
return {"WWW-Authenticate": "Bearer " + ", ".join(authvalues)}
|
||||||
return {}
|
return {}
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -10,6 +10,7 @@ import logging
|
||||||
from oauthlib import common
|
from oauthlib import common
|
||||||
|
|
||||||
from .. import errors
|
from .. import errors
|
||||||
|
from ..utils import is_secure_transport
|
||||||
from .base import GrantTypeBase
|
from .base import GrantTypeBase
|
||||||
|
|
||||||
log = logging.getLogger(__name__)
|
log = logging.getLogger(__name__)
|
||||||
|
@ -272,6 +273,8 @@ class AuthorizationCodeGrant(GrantTypeBase):
|
||||||
grant = self.create_authorization_code(request)
|
grant = self.create_authorization_code(request)
|
||||||
for modifier in self._code_modifiers:
|
for modifier in self._code_modifiers:
|
||||||
grant = modifier(grant, token_handler, request)
|
grant = modifier(grant, token_handler, request)
|
||||||
|
if 'access_token' in grant:
|
||||||
|
self.request_validator.save_token(grant, request)
|
||||||
log.debug('Saving grant %r for %r.', grant, request)
|
log.debug('Saving grant %r for %r.', grant, request)
|
||||||
self.request_validator.save_authorization_code(
|
self.request_validator.save_authorization_code(
|
||||||
request.client_id, grant, request)
|
request.client_id, grant, request)
|
||||||
|
@ -310,6 +313,7 @@ class AuthorizationCodeGrant(GrantTypeBase):
|
||||||
self.request_validator.save_token(token, request)
|
self.request_validator.save_token(token, request)
|
||||||
self.request_validator.invalidate_authorization_code(
|
self.request_validator.invalidate_authorization_code(
|
||||||
request.client_id, request.code, request)
|
request.client_id, request.code, request)
|
||||||
|
headers.update(self._create_cors_headers(request))
|
||||||
return headers, json.dumps(token), 200
|
return headers, json.dumps(token), 200
|
||||||
|
|
||||||
def validate_authorization_request(self, request):
|
def validate_authorization_request(self, request):
|
||||||
|
@ -543,3 +547,20 @@ class AuthorizationCodeGrant(GrantTypeBase):
|
||||||
if challenge_method in self._code_challenge_methods:
|
if challenge_method in self._code_challenge_methods:
|
||||||
return self._code_challenge_methods[challenge_method](verifier, challenge)
|
return self._code_challenge_methods[challenge_method](verifier, challenge)
|
||||||
raise NotImplementedError('Unknown challenge_method %s' % challenge_method)
|
raise NotImplementedError('Unknown challenge_method %s' % challenge_method)
|
||||||
|
|
||||||
|
def _create_cors_headers(self, request):
|
||||||
|
"""If CORS is allowed, create the appropriate headers."""
|
||||||
|
if 'origin' not in request.headers:
|
||||||
|
return {}
|
||||||
|
|
||||||
|
origin = request.headers['origin']
|
||||||
|
if not is_secure_transport(origin):
|
||||||
|
log.debug('Origin "%s" is not HTTPS, CORS not allowed.', origin)
|
||||||
|
return {}
|
||||||
|
elif not self.request_validator.is_origin_allowed(
|
||||||
|
request.client_id, origin, request):
|
||||||
|
log.debug('Invalid origin "%s", CORS not allowed.', origin)
|
||||||
|
return {}
|
||||||
|
else:
|
||||||
|
log.debug('Valid origin "%s", injecting CORS headers.', origin)
|
||||||
|
return {'Access-Control-Allow-Origin': origin}
|
||||||
|
|
|
@ -63,7 +63,7 @@ class RefreshTokenGrant(GrantTypeBase):
|
||||||
refresh_token=self.issue_new_refresh_tokens)
|
refresh_token=self.issue_new_refresh_tokens)
|
||||||
|
|
||||||
for modifier in self._token_modifiers:
|
for modifier in self._token_modifiers:
|
||||||
token = modifier(token)
|
token = modifier(token, token_handler, request)
|
||||||
|
|
||||||
self.request_validator.save_token(token, request)
|
self.request_validator.save_token(token, request)
|
||||||
|
|
||||||
|
|
|
@ -23,7 +23,7 @@ from .utils import is_secure_transport, list_to_scope, scope_to_list
|
||||||
|
|
||||||
|
|
||||||
def prepare_grant_uri(uri, client_id, response_type, redirect_uri=None,
|
def prepare_grant_uri(uri, client_id, response_type, redirect_uri=None,
|
||||||
scope=None, state=None, **kwargs):
|
scope=None, state=None, code_challenge=None, code_challenge_method='plain', **kwargs):
|
||||||
"""Prepare the authorization grant request URI.
|
"""Prepare the authorization grant request URI.
|
||||||
|
|
||||||
The client constructs the request URI by adding the following
|
The client constructs the request URI by adding the following
|
||||||
|
@ -45,6 +45,11 @@ def prepare_grant_uri(uri, client_id, response_type, redirect_uri=None,
|
||||||
back to the client. The parameter SHOULD be used for
|
back to the client. The parameter SHOULD be used for
|
||||||
preventing cross-site request forgery as described in
|
preventing cross-site request forgery as described in
|
||||||
`Section 10.12`_.
|
`Section 10.12`_.
|
||||||
|
:param code_challenge: PKCE paramater. A challenge derived from the
|
||||||
|
code_verifier that is sent in the authorization
|
||||||
|
request, to be verified against later.
|
||||||
|
:param code_challenge_method: PKCE parameter. A method that was used to derive the
|
||||||
|
code_challenge. Defaults to "plain" if not present in the request.
|
||||||
:param kwargs: Extra arguments to embed in the grant/authorization URL.
|
:param kwargs: Extra arguments to embed in the grant/authorization URL.
|
||||||
|
|
||||||
An example of an authorization code grant authorization URL:
|
An example of an authorization code grant authorization URL:
|
||||||
|
@ -52,6 +57,7 @@ def prepare_grant_uri(uri, client_id, response_type, redirect_uri=None,
|
||||||
.. code-block:: http
|
.. code-block:: http
|
||||||
|
|
||||||
GET /authorize?response_type=code&client_id=s6BhdRkqt3&state=xyz
|
GET /authorize?response_type=code&client_id=s6BhdRkqt3&state=xyz
|
||||||
|
&code_challenge=kjasBS523KdkAILD2k78NdcJSk2k3KHG6&code_challenge_method=S256
|
||||||
&redirect_uri=https%3A%2F%2Fclient%2Eexample%2Ecom%2Fcb HTTP/1.1
|
&redirect_uri=https%3A%2F%2Fclient%2Eexample%2Ecom%2Fcb HTTP/1.1
|
||||||
Host: server.example.com
|
Host: server.example.com
|
||||||
|
|
||||||
|
@ -73,6 +79,9 @@ def prepare_grant_uri(uri, client_id, response_type, redirect_uri=None,
|
||||||
params.append(('scope', list_to_scope(scope)))
|
params.append(('scope', list_to_scope(scope)))
|
||||||
if state:
|
if state:
|
||||||
params.append(('state', state))
|
params.append(('state', state))
|
||||||
|
if code_challenge is not None:
|
||||||
|
params.append(('code_challenge', code_challenge))
|
||||||
|
params.append(('code_challenge_method', code_challenge_method))
|
||||||
|
|
||||||
for k in kwargs:
|
for k in kwargs:
|
||||||
if kwargs[k]:
|
if kwargs[k]:
|
||||||
|
@ -81,7 +90,7 @@ def prepare_grant_uri(uri, client_id, response_type, redirect_uri=None,
|
||||||
return add_params_to_uri(uri, params)
|
return add_params_to_uri(uri, params)
|
||||||
|
|
||||||
|
|
||||||
def prepare_token_request(grant_type, body='', include_client_id=True, **kwargs):
|
def prepare_token_request(grant_type, body='', include_client_id=True, code_verifier=None, **kwargs):
|
||||||
"""Prepare the access token request.
|
"""Prepare the access token request.
|
||||||
|
|
||||||
The client makes a request to the token endpoint by adding the
|
The client makes a request to the token endpoint by adding the
|
||||||
|
@ -116,6 +125,9 @@ def prepare_token_request(grant_type, body='', include_client_id=True, **kwargs)
|
||||||
authorization request as described in
|
authorization request as described in
|
||||||
`Section 4.1.1`_, and their values MUST be identical. *
|
`Section 4.1.1`_, and their values MUST be identical. *
|
||||||
|
|
||||||
|
:param code_verifier: PKCE parameter. A cryptographically random string that is used to correlate the
|
||||||
|
authorization request to the token request.
|
||||||
|
|
||||||
:param kwargs: Extra arguments to embed in the request body.
|
:param kwargs: Extra arguments to embed in the request body.
|
||||||
|
|
||||||
Parameters marked with a `*` above are not explicit arguments in the
|
Parameters marked with a `*` above are not explicit arguments in the
|
||||||
|
@ -142,6 +154,10 @@ def prepare_token_request(grant_type, body='', include_client_id=True, **kwargs)
|
||||||
if client_id is not None:
|
if client_id is not None:
|
||||||
params.append(('client_id', client_id))
|
params.append(('client_id', client_id))
|
||||||
|
|
||||||
|
# use code_verifier if code_challenge was passed in the authorization request
|
||||||
|
if code_verifier is not None:
|
||||||
|
params.append(('code_verifier', code_verifier))
|
||||||
|
|
||||||
# the kwargs iteration below only supports including boolean truth (truthy)
|
# the kwargs iteration below only supports including boolean truth (truthy)
|
||||||
# values, but some servers may require an empty string for `client_secret`
|
# values, but some servers may require an empty string for `client_secret`
|
||||||
client_secret = kwargs.pop('client_secret', None)
|
client_secret = kwargs.pop('client_secret', None)
|
||||||
|
|
|
@ -649,3 +649,28 @@ class RequestValidator:
|
||||||
|
|
||||||
"""
|
"""
|
||||||
raise NotImplementedError('Subclasses must implement this method.')
|
raise NotImplementedError('Subclasses must implement this method.')
|
||||||
|
|
||||||
|
def is_origin_allowed(self, client_id, origin, request, *args, **kwargs):
|
||||||
|
"""Indicate if the given origin is allowed to access the token endpoint
|
||||||
|
via Cross-Origin Resource Sharing (CORS). CORS is used by browser-based
|
||||||
|
clients, such as Single-Page Applications, to perform the Authorization
|
||||||
|
Code Grant.
|
||||||
|
|
||||||
|
(Note: If performing Authorization Code Grant via a public client such
|
||||||
|
as a browser, you should use PKCE as well.)
|
||||||
|
|
||||||
|
If this method returns true, the appropriate CORS headers will be added
|
||||||
|
to the response. By default this method always returns False, meaning
|
||||||
|
CORS is disabled.
|
||||||
|
|
||||||
|
:param client_id: Unicode client identifier.
|
||||||
|
:param redirect_uri: Unicode origin.
|
||||||
|
:param request: OAuthlib request.
|
||||||
|
:type request: oauthlib.common.Request
|
||||||
|
:rtype: bool
|
||||||
|
|
||||||
|
Method is used by:
|
||||||
|
- Authorization Code Grant
|
||||||
|
|
||||||
|
"""
|
||||||
|
return False
|
||||||
|
|
10
lib/oauthlib/oauth2/rfc8628/__init__.py
Normal file
10
lib/oauthlib/oauth2/rfc8628/__init__.py
Normal file
|
@ -0,0 +1,10 @@
|
||||||
|
"""
|
||||||
|
oauthlib.oauth2.rfc8628
|
||||||
|
~~~~~~~~~~~~~~~~~~~~~~~
|
||||||
|
|
||||||
|
This module is an implementation of various logic needed
|
||||||
|
for consuming and providing OAuth 2.0 Device Authorization RFC8628.
|
||||||
|
"""
|
||||||
|
import logging
|
||||||
|
|
||||||
|
log = logging.getLogger(__name__)
|
8
lib/oauthlib/oauth2/rfc8628/clients/__init__.py
Normal file
8
lib/oauthlib/oauth2/rfc8628/clients/__init__.py
Normal file
|
@ -0,0 +1,8 @@
|
||||||
|
"""
|
||||||
|
oauthlib.oauth2.rfc8628
|
||||||
|
~~~~~~~~~~~~~~~~~~~~~~~
|
||||||
|
|
||||||
|
This module is an implementation of various logic needed
|
||||||
|
for consuming OAuth 2.0 Device Authorization RFC8628.
|
||||||
|
"""
|
||||||
|
from .device import DeviceClient
|
94
lib/oauthlib/oauth2/rfc8628/clients/device.py
Normal file
94
lib/oauthlib/oauth2/rfc8628/clients/device.py
Normal file
|
@ -0,0 +1,94 @@
|
||||||
|
"""
|
||||||
|
oauthlib.oauth2.rfc8628
|
||||||
|
~~~~~~~~~~~~~~~~~~~~~~~
|
||||||
|
|
||||||
|
This module is an implementation of various logic needed
|
||||||
|
for consuming and providing OAuth 2.0 Device Authorization RFC8628.
|
||||||
|
"""
|
||||||
|
|
||||||
|
from oauthlib.oauth2 import BackendApplicationClient, Client
|
||||||
|
from oauthlib.oauth2.rfc6749.errors import InsecureTransportError
|
||||||
|
from oauthlib.oauth2.rfc6749.parameters import prepare_token_request
|
||||||
|
from oauthlib.oauth2.rfc6749.utils import is_secure_transport, list_to_scope
|
||||||
|
from oauthlib.common import add_params_to_uri
|
||||||
|
|
||||||
|
|
||||||
|
class DeviceClient(Client):
|
||||||
|
|
||||||
|
"""A public client utilizing the device authorization workflow.
|
||||||
|
|
||||||
|
The client can request an access token using a device code and
|
||||||
|
a public client id associated with the device code as defined
|
||||||
|
in RFC8628.
|
||||||
|
|
||||||
|
The device authorization grant type can be used to obtain both
|
||||||
|
access tokens and refresh tokens and is intended to be used in
|
||||||
|
a scenario where the device being authorized does not have a
|
||||||
|
user interface that is suitable for performing authentication.
|
||||||
|
"""
|
||||||
|
|
||||||
|
grant_type = 'urn:ietf:params:oauth:grant-type:device_code'
|
||||||
|
|
||||||
|
def __init__(self, client_id, **kwargs):
|
||||||
|
super().__init__(client_id, **kwargs)
|
||||||
|
self.client_secret = kwargs.get('client_secret')
|
||||||
|
|
||||||
|
def prepare_request_uri(self, uri, scope=None, **kwargs):
|
||||||
|
if not is_secure_transport(uri):
|
||||||
|
raise InsecureTransportError()
|
||||||
|
|
||||||
|
scope = self.scope if scope is None else scope
|
||||||
|
params = [(('client_id', self.client_id)), (('grant_type', self.grant_type))]
|
||||||
|
|
||||||
|
if self.client_secret is not None:
|
||||||
|
params.append(('client_secret', self.client_secret))
|
||||||
|
|
||||||
|
if scope:
|
||||||
|
params.append(('scope', list_to_scope(scope)))
|
||||||
|
|
||||||
|
for k in kwargs:
|
||||||
|
if kwargs[k]:
|
||||||
|
params.append((str(k), kwargs[k]))
|
||||||
|
|
||||||
|
return add_params_to_uri(uri, params)
|
||||||
|
|
||||||
|
def prepare_request_body(self, device_code, body='', scope=None,
|
||||||
|
include_client_id=False, **kwargs):
|
||||||
|
"""Add device_code to request body
|
||||||
|
|
||||||
|
The client makes a request to the token endpoint by adding the
|
||||||
|
device_code as a parameter using the
|
||||||
|
"application/x-www-form-urlencoded" format to the HTTP request
|
||||||
|
body.
|
||||||
|
|
||||||
|
:param body: Existing request body (URL encoded string) to embed parameters
|
||||||
|
into. This may contain extra paramters. Default ''.
|
||||||
|
:param scope: The scope of the access request as described by
|
||||||
|
`Section 3.3`_.
|
||||||
|
|
||||||
|
:param include_client_id: `True` to send the `client_id` in the
|
||||||
|
body of the upstream request. This is required
|
||||||
|
if the client is not authenticating with the
|
||||||
|
authorization server as described in
|
||||||
|
`Section 3.2.1`_. False otherwise (default).
|
||||||
|
:type include_client_id: Boolean
|
||||||
|
|
||||||
|
:param kwargs: Extra credentials to include in the token request.
|
||||||
|
|
||||||
|
The prepared body will include all provided device_code as well as
|
||||||
|
the ``grant_type`` parameter set to
|
||||||
|
``urn:ietf:params:oauth:grant-type:device_code``::
|
||||||
|
|
||||||
|
>>> from oauthlib.oauth2 import DeviceClient
|
||||||
|
>>> client = DeviceClient('your_id', 'your_code')
|
||||||
|
>>> client.prepare_request_body(scope=['hello', 'world'])
|
||||||
|
'grant_type=urn:ietf:params:oauth:grant-type:device_code&scope=hello+world'
|
||||||
|
|
||||||
|
.. _`Section 3.4`: https://datatracker.ietf.org/doc/html/rfc8628#section-3.4
|
||||||
|
"""
|
||||||
|
|
||||||
|
kwargs['client_id'] = self.client_id
|
||||||
|
kwargs['include_client_id'] = include_client_id
|
||||||
|
scope = self.scope if scope is None else scope
|
||||||
|
return prepare_token_request(self.grant_type, body=body, device_code=device_code,
|
||||||
|
scope=scope, **kwargs)
|
|
@ -10,3 +10,4 @@ from .dispatchers import (
|
||||||
)
|
)
|
||||||
from .hybrid import HybridGrant
|
from .hybrid import HybridGrant
|
||||||
from .implicit import ImplicitGrant
|
from .implicit import ImplicitGrant
|
||||||
|
from .refresh_token import RefreshTokenGrant
|
||||||
|
|
|
@ -0,0 +1,34 @@
|
||||||
|
"""
|
||||||
|
oauthlib.openid.connect.core.grant_types
|
||||||
|
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||||
|
"""
|
||||||
|
import logging
|
||||||
|
|
||||||
|
from oauthlib.oauth2.rfc6749.grant_types.refresh_token import (
|
||||||
|
RefreshTokenGrant as OAuth2RefreshTokenGrant,
|
||||||
|
)
|
||||||
|
|
||||||
|
from .base import GrantTypeBase
|
||||||
|
|
||||||
|
log = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
class RefreshTokenGrant(GrantTypeBase):
|
||||||
|
|
||||||
|
def __init__(self, request_validator=None, **kwargs):
|
||||||
|
self.proxy_target = OAuth2RefreshTokenGrant(
|
||||||
|
request_validator=request_validator, **kwargs)
|
||||||
|
self.register_token_modifier(self.add_id_token)
|
||||||
|
|
||||||
|
def add_id_token(self, token, token_handler, request):
|
||||||
|
"""
|
||||||
|
Construct an initial version of id_token, and let the
|
||||||
|
request_validator sign or encrypt it.
|
||||||
|
|
||||||
|
The authorization_code version of this method is used to
|
||||||
|
retrieve the nonce accordingly to the code storage.
|
||||||
|
"""
|
||||||
|
if not self.request_validator.refresh_id_token(request):
|
||||||
|
return token
|
||||||
|
|
||||||
|
return super().add_id_token(token, token_handler, request)
|
|
@ -306,3 +306,15 @@ class RequestValidator(OAuth2RequestValidator):
|
||||||
Method is used by:
|
Method is used by:
|
||||||
UserInfoEndpoint
|
UserInfoEndpoint
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
def refresh_id_token(self, request):
|
||||||
|
"""Whether the id token should be refreshed. Default, True
|
||||||
|
|
||||||
|
:param request: OAuthlib request.
|
||||||
|
:type request: oauthlib.common.Request
|
||||||
|
:rtype: True or False
|
||||||
|
|
||||||
|
Method is used by:
|
||||||
|
RefreshTokenGrant
|
||||||
|
"""
|
||||||
|
return True
|
||||||
|
|
|
@ -5,7 +5,7 @@ from .oauth1_session import OAuth1Session
|
||||||
from .oauth2_auth import OAuth2
|
from .oauth2_auth import OAuth2
|
||||||
from .oauth2_session import OAuth2Session, TokenUpdated
|
from .oauth2_session import OAuth2Session, TokenUpdated
|
||||||
|
|
||||||
__version__ = "1.3.0"
|
__version__ = "1.3.1"
|
||||||
|
|
||||||
import requests
|
import requests
|
||||||
|
|
||||||
|
|
|
@ -2,9 +2,9 @@ from __future__ import absolute_import
|
||||||
|
|
||||||
from .facebook import facebook_compliance_fix
|
from .facebook import facebook_compliance_fix
|
||||||
from .fitbit import fitbit_compliance_fix
|
from .fitbit import fitbit_compliance_fix
|
||||||
from .linkedin import linkedin_compliance_fix
|
|
||||||
from .slack import slack_compliance_fix
|
from .slack import slack_compliance_fix
|
||||||
from .instagram import instagram_compliance_fix
|
from .instagram import instagram_compliance_fix
|
||||||
from .mailchimp import mailchimp_compliance_fix
|
from .mailchimp import mailchimp_compliance_fix
|
||||||
from .weibo import weibo_compliance_fix
|
from .weibo import weibo_compliance_fix
|
||||||
from .plentymarkets import plentymarkets_compliance_fix
|
from .plentymarkets import plentymarkets_compliance_fix
|
||||||
|
from .ebay import ebay_compliance_fix
|
||||||
|
|
23
lib/requests_oauthlib/compliance_fixes/ebay.py
Normal file
23
lib/requests_oauthlib/compliance_fixes/ebay.py
Normal file
|
@ -0,0 +1,23 @@
|
||||||
|
import json
|
||||||
|
from oauthlib.common import to_unicode
|
||||||
|
|
||||||
|
|
||||||
|
def ebay_compliance_fix(session):
|
||||||
|
def _compliance_fix(response):
|
||||||
|
token = json.loads(response.text)
|
||||||
|
|
||||||
|
# eBay responds with non-compliant token types.
|
||||||
|
# https://developer.ebay.com/api-docs/static/oauth-client-credentials-grant.html
|
||||||
|
# https://developer.ebay.com/api-docs/static/oauth-auth-code-grant-request.html
|
||||||
|
# Modify these to be "Bearer".
|
||||||
|
if token.get("token_type") in ["Application Access Token", "User Access Token"]:
|
||||||
|
token["token_type"] = "Bearer"
|
||||||
|
fixed_token = json.dumps(token)
|
||||||
|
response._content = to_unicode(fixed_token).encode("utf-8")
|
||||||
|
|
||||||
|
return response
|
||||||
|
|
||||||
|
session.register_compliance_hook("access_token_response", _compliance_fix)
|
||||||
|
session.register_compliance_hook("refresh_token_response", _compliance_fix)
|
||||||
|
|
||||||
|
return session
|
|
@ -1,21 +0,0 @@
|
||||||
from json import loads, dumps
|
|
||||||
|
|
||||||
from oauthlib.common import add_params_to_uri, to_unicode
|
|
||||||
|
|
||||||
|
|
||||||
def linkedin_compliance_fix(session):
|
|
||||||
def _missing_token_type(r):
|
|
||||||
token = loads(r.text)
|
|
||||||
token["token_type"] = "Bearer"
|
|
||||||
r._content = to_unicode(dumps(token)).encode("UTF-8")
|
|
||||||
return r
|
|
||||||
|
|
||||||
def _non_compliant_param_name(url, headers, data):
|
|
||||||
token = [("oauth2_access_token", session.access_token)]
|
|
||||||
url = add_params_to_uri(url, token)
|
|
||||||
return url, headers, data
|
|
||||||
|
|
||||||
session._client.default_token_placement = "query"
|
|
||||||
session.register_compliance_hook("access_token_response", _missing_token_type)
|
|
||||||
session.register_compliance_hook("protected_request", _non_compliant_param_name)
|
|
||||||
return session
|
|
|
@ -189,6 +189,7 @@ class OAuth2Session(requests.Session):
|
||||||
proxies=None,
|
proxies=None,
|
||||||
include_client_id=None,
|
include_client_id=None,
|
||||||
client_secret=None,
|
client_secret=None,
|
||||||
|
cert=None,
|
||||||
**kwargs
|
**kwargs
|
||||||
):
|
):
|
||||||
"""Generic method for fetching an access token from the token endpoint.
|
"""Generic method for fetching an access token from the token endpoint.
|
||||||
|
@ -229,6 +230,10 @@ class OAuth2Session(requests.Session):
|
||||||
`auth` tuple. If the value is `None`, it will be
|
`auth` tuple. If the value is `None`, it will be
|
||||||
omitted from the request, however if the value is
|
omitted from the request, however if the value is
|
||||||
an empty string, an empty string will be sent.
|
an empty string, an empty string will be sent.
|
||||||
|
:param cert: Client certificate to send for OAuth 2.0 Mutual-TLS Client
|
||||||
|
Authentication (draft-ietf-oauth-mtls). Can either be the
|
||||||
|
path of a file containing the private key and certificate or
|
||||||
|
a tuple of two filenames for certificate and key.
|
||||||
:param kwargs: Extra parameters to include in the token request.
|
:param kwargs: Extra parameters to include in the token request.
|
||||||
:return: A token dict
|
:return: A token dict
|
||||||
"""
|
"""
|
||||||
|
@ -341,6 +346,7 @@ class OAuth2Session(requests.Session):
|
||||||
auth=auth,
|
auth=auth,
|
||||||
verify=verify,
|
verify=verify,
|
||||||
proxies=proxies,
|
proxies=proxies,
|
||||||
|
cert=cert,
|
||||||
**request_kwargs
|
**request_kwargs
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
|
@ -24,7 +24,6 @@ IPy==1.01
|
||||||
Mako==1.1.6
|
Mako==1.1.6
|
||||||
MarkupSafe==2.0.1
|
MarkupSafe==2.0.1
|
||||||
musicbrainzngs==0.7.1
|
musicbrainzngs==0.7.1
|
||||||
oauthlib==3.1.1
|
|
||||||
packaging==21.3
|
packaging==21.3
|
||||||
paho-mqtt==1.6.1
|
paho-mqtt==1.6.1
|
||||||
plexapi==4.9.1
|
plexapi==4.9.1
|
||||||
|
@ -36,7 +35,7 @@ python-dateutil==2.8.2
|
||||||
python-twitter==3.5
|
python-twitter==3.5
|
||||||
pytz==2021.3
|
pytz==2021.3
|
||||||
requests==2.27.1
|
requests==2.27.1
|
||||||
requests-oauthlib==1.3.0
|
requests-oauthlib==1.3.1
|
||||||
rumps==0.3.0; platform_system == "Darwin"
|
rumps==0.3.0; platform_system == "Darwin"
|
||||||
simplejson==3.17.6
|
simplejson==3.17.6
|
||||||
six==1.16.0
|
six==1.16.0
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue