Merge branch 'nightly' into dependabot/pip/nightly/certifi-2024.2.2

This commit is contained in:
JonnyWong16 2024-03-24 15:22:58 -07:00 committed by GitHub
commit 107ffbc07f
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
113 changed files with 4926 additions and 2964 deletions

View file

@ -47,7 +47,7 @@ jobs:
version: latest version: latest
- name: Cache Docker Layers - name: Cache Docker Layers
uses: actions/cache@v3 uses: actions/cache@v4
with: with:
path: /tmp/.buildx-cache path: /tmp/.buildx-cache
key: ${{ runner.os }}-buildx-${{ github.sha }} key: ${{ runner.os }}-buildx-${{ github.sha }}

View file

@ -129,7 +129,7 @@ jobs:
echo "$EOF" >> $GITHUB_OUTPUT echo "$EOF" >> $GITHUB_OUTPUT
- name: Create Release - name: Create Release
uses: softprops/action-gh-release@v1 uses: softprops/action-gh-release@v2
id: create_release id: create_release
env: env:
GITHUB_TOKEN: ${{ secrets.GHACTIONS_TOKEN }} GITHUB_TOKEN: ${{ secrets.GHACTIONS_TOKEN }}

View file

@ -1 +1 @@
__version__ = "1.2.3" __version__ = "1.3.0"

View file

@ -168,9 +168,9 @@ class Arrow:
isinstance(tzinfo, dt_tzinfo) isinstance(tzinfo, dt_tzinfo)
and hasattr(tzinfo, "localize") and hasattr(tzinfo, "localize")
and hasattr(tzinfo, "zone") and hasattr(tzinfo, "zone")
and tzinfo.zone # type: ignore[attr-defined] and tzinfo.zone
): ):
tzinfo = parser.TzinfoParser.parse(tzinfo.zone) # type: ignore[attr-defined] tzinfo = parser.TzinfoParser.parse(tzinfo.zone)
elif isinstance(tzinfo, str): elif isinstance(tzinfo, str):
tzinfo = parser.TzinfoParser.parse(tzinfo) tzinfo = parser.TzinfoParser.parse(tzinfo)
@ -495,7 +495,7 @@ class Arrow:
yield current yield current
values = [getattr(current, f) for f in cls._ATTRS] values = [getattr(current, f) for f in cls._ATTRS]
current = cls(*values, tzinfo=tzinfo).shift( # type: ignore current = cls(*values, tzinfo=tzinfo).shift( # type: ignore[misc]
**{frame_relative: relative_steps} **{frame_relative: relative_steps}
) )
@ -578,7 +578,7 @@ class Arrow:
for _ in range(3 - len(values)): for _ in range(3 - len(values)):
values.append(1) values.append(1)
floor = self.__class__(*values, tzinfo=self.tzinfo) # type: ignore floor = self.__class__(*values, tzinfo=self.tzinfo) # type: ignore[misc]
if frame_absolute == "week": if frame_absolute == "week":
# if week_start is greater than self.isoweekday() go back one week by setting delta = 7 # if week_start is greater than self.isoweekday() go back one week by setting delta = 7
@ -792,7 +792,6 @@ class Arrow:
return self._datetime.isoformat() return self._datetime.isoformat()
def __format__(self, formatstr: str) -> str: def __format__(self, formatstr: str) -> str:
if len(formatstr) > 0: if len(formatstr) > 0:
return self.format(formatstr) return self.format(formatstr)
@ -804,7 +803,6 @@ class Arrow:
# attributes and properties # attributes and properties
def __getattr__(self, name: str) -> int: def __getattr__(self, name: str) -> int:
if name == "week": if name == "week":
return self.isocalendar()[1] return self.isocalendar()[1]
@ -965,7 +963,6 @@ class Arrow:
absolute_kwargs = {} absolute_kwargs = {}
for key, value in kwargs.items(): for key, value in kwargs.items():
if key in self._ATTRS: if key in self._ATTRS:
absolute_kwargs[key] = value absolute_kwargs[key] = value
elif key in ["week", "quarter"]: elif key in ["week", "quarter"]:
@ -1022,7 +1019,6 @@ class Arrow:
additional_attrs = ["weeks", "quarters", "weekday"] additional_attrs = ["weeks", "quarters", "weekday"]
for key, value in kwargs.items(): for key, value in kwargs.items():
if key in self._ATTRS_PLURAL or key in additional_attrs: if key in self._ATTRS_PLURAL or key in additional_attrs:
relative_kwargs[key] = value relative_kwargs[key] = value
else: else:
@ -1259,11 +1255,10 @@ class Arrow:
) )
if trunc(abs(delta)) != 1: if trunc(abs(delta)) != 1:
granularity += "s" # type: ignore granularity += "s" # type: ignore[assignment]
return locale.describe(granularity, delta, only_distance=only_distance) return locale.describe(granularity, delta, only_distance=only_distance)
else: else:
if not granularity: if not granularity:
raise ValueError( raise ValueError(
"Empty granularity list provided. " "Empty granularity list provided. "
@ -1314,7 +1309,7 @@ class Arrow:
def dehumanize(self, input_string: str, locale: str = "en_us") -> "Arrow": def dehumanize(self, input_string: str, locale: str = "en_us") -> "Arrow":
"""Returns a new :class:`Arrow <arrow.arrow.Arrow>` object, that represents """Returns a new :class:`Arrow <arrow.arrow.Arrow>` object, that represents
the time difference relative to the attrbiutes of the the time difference relative to the attributes of the
:class:`Arrow <arrow.arrow.Arrow>` object. :class:`Arrow <arrow.arrow.Arrow>` object.
:param timestring: a ``str`` representing a humanized relative time. :param timestring: a ``str`` representing a humanized relative time.
@ -1367,7 +1362,6 @@ class Arrow:
# Search input string for each time unit within locale # Search input string for each time unit within locale
for unit, unit_object in locale_obj.timeframes.items(): for unit, unit_object in locale_obj.timeframes.items():
# Need to check the type of unit_object to create the correct dictionary # Need to check the type of unit_object to create the correct dictionary
if isinstance(unit_object, Mapping): if isinstance(unit_object, Mapping):
strings_to_search = unit_object strings_to_search = unit_object
@ -1378,7 +1372,6 @@ class Arrow:
# Needs to cycle all through strings as some locales have strings that # Needs to cycle all through strings as some locales have strings that
# could overlap in a regex match, since input validation isn't being performed. # could overlap in a regex match, since input validation isn't being performed.
for time_delta, time_string in strings_to_search.items(): for time_delta, time_string in strings_to_search.items():
# Replace {0} with regex \d representing digits # Replace {0} with regex \d representing digits
search_string = str(time_string) search_string = str(time_string)
search_string = search_string.format(r"\d+") search_string = search_string.format(r"\d+")
@ -1419,7 +1412,7 @@ class Arrow:
# Assert error if string does not modify any units # Assert error if string does not modify any units
if not any([True for k, v in unit_visited.items() if v]): if not any([True for k, v in unit_visited.items() if v]):
raise ValueError( raise ValueError(
"Input string not valid. Note: Some locales do not support the week granulairty in Arrow. " "Input string not valid. Note: Some locales do not support the week granularity in Arrow. "
"If you are attempting to use the week granularity on an unsupported locale, this could be the cause of this error." "If you are attempting to use the week granularity on an unsupported locale, this could be the cause of this error."
) )
@ -1718,7 +1711,6 @@ class Arrow:
# math # math
def __add__(self, other: Any) -> "Arrow": def __add__(self, other: Any) -> "Arrow":
if isinstance(other, (timedelta, relativedelta)): if isinstance(other, (timedelta, relativedelta)):
return self.fromdatetime(self._datetime + other, self._datetime.tzinfo) return self.fromdatetime(self._datetime + other, self._datetime.tzinfo)
@ -1736,7 +1728,6 @@ class Arrow:
pass # pragma: no cover pass # pragma: no cover
def __sub__(self, other: Any) -> Union[timedelta, "Arrow"]: def __sub__(self, other: Any) -> Union[timedelta, "Arrow"]:
if isinstance(other, (timedelta, relativedelta)): if isinstance(other, (timedelta, relativedelta)):
return self.fromdatetime(self._datetime - other, self._datetime.tzinfo) return self.fromdatetime(self._datetime - other, self._datetime.tzinfo)
@ -1749,7 +1740,6 @@ class Arrow:
return NotImplemented return NotImplemented
def __rsub__(self, other: Any) -> timedelta: def __rsub__(self, other: Any) -> timedelta:
if isinstance(other, dt_datetime): if isinstance(other, dt_datetime):
return other - self._datetime return other - self._datetime
@ -1758,42 +1748,36 @@ class Arrow:
# comparisons # comparisons
def __eq__(self, other: Any) -> bool: def __eq__(self, other: Any) -> bool:
if not isinstance(other, (Arrow, dt_datetime)): if not isinstance(other, (Arrow, dt_datetime)):
return False return False
return self._datetime == self._get_datetime(other) return self._datetime == self._get_datetime(other)
def __ne__(self, other: Any) -> bool: def __ne__(self, other: Any) -> bool:
if not isinstance(other, (Arrow, dt_datetime)): if not isinstance(other, (Arrow, dt_datetime)):
return True return True
return not self.__eq__(other) return not self.__eq__(other)
def __gt__(self, other: Any) -> bool: def __gt__(self, other: Any) -> bool:
if not isinstance(other, (Arrow, dt_datetime)): if not isinstance(other, (Arrow, dt_datetime)):
return NotImplemented return NotImplemented
return self._datetime > self._get_datetime(other) return self._datetime > self._get_datetime(other)
def __ge__(self, other: Any) -> bool: def __ge__(self, other: Any) -> bool:
if not isinstance(other, (Arrow, dt_datetime)): if not isinstance(other, (Arrow, dt_datetime)):
return NotImplemented return NotImplemented
return self._datetime >= self._get_datetime(other) return self._datetime >= self._get_datetime(other)
def __lt__(self, other: Any) -> bool: def __lt__(self, other: Any) -> bool:
if not isinstance(other, (Arrow, dt_datetime)): if not isinstance(other, (Arrow, dt_datetime)):
return NotImplemented return NotImplemented
return self._datetime < self._get_datetime(other) return self._datetime < self._get_datetime(other)
def __le__(self, other: Any) -> bool: def __le__(self, other: Any) -> bool:
if not isinstance(other, (Arrow, dt_datetime)): if not isinstance(other, (Arrow, dt_datetime)):
return NotImplemented return NotImplemented
@ -1865,7 +1849,6 @@ class Arrow:
def _get_iteration_params(cls, end: Any, limit: Optional[int]) -> Tuple[Any, int]: def _get_iteration_params(cls, end: Any, limit: Optional[int]) -> Tuple[Any, int]:
"""Sets default end and limit values for range method.""" """Sets default end and limit values for range method."""
if end is None: if end is None:
if limit is None: if limit is None:
raise ValueError("One of 'end' or 'limit' is required.") raise ValueError("One of 'end' or 'limit' is required.")

View file

@ -267,11 +267,9 @@ class ArrowFactory:
raise TypeError(f"Cannot parse single argument of type {type(arg)!r}.") raise TypeError(f"Cannot parse single argument of type {type(arg)!r}.")
elif arg_count == 2: elif arg_count == 2:
arg_1, arg_2 = args[0], args[1] arg_1, arg_2 = args[0], args[1]
if isinstance(arg_1, datetime): if isinstance(arg_1, datetime):
# (datetime, tzinfo/str) -> fromdatetime @ tzinfo # (datetime, tzinfo/str) -> fromdatetime @ tzinfo
if isinstance(arg_2, (dt_tzinfo, str)): if isinstance(arg_2, (dt_tzinfo, str)):
return self.type.fromdatetime(arg_1, tzinfo=arg_2) return self.type.fromdatetime(arg_1, tzinfo=arg_2)
@ -281,7 +279,6 @@ class ArrowFactory:
) )
elif isinstance(arg_1, date): elif isinstance(arg_1, date):
# (date, tzinfo/str) -> fromdate @ tzinfo # (date, tzinfo/str) -> fromdate @ tzinfo
if isinstance(arg_2, (dt_tzinfo, str)): if isinstance(arg_2, (dt_tzinfo, str)):
return self.type.fromdate(arg_1, tzinfo=arg_2) return self.type.fromdate(arg_1, tzinfo=arg_2)

View file

@ -29,7 +29,6 @@ FORMAT_W3C: Final[str] = "YYYY-MM-DD HH:mm:ssZZ"
class DateTimeFormatter: class DateTimeFormatter:
# This pattern matches characters enclosed in square brackets are matched as # This pattern matches characters enclosed in square brackets are matched as
# an atomic group. For more info on atomic groups and how to they are # an atomic group. For more info on atomic groups and how to they are
# emulated in Python's re library, see https://stackoverflow.com/a/13577411/2701578 # emulated in Python's re library, see https://stackoverflow.com/a/13577411/2701578
@ -41,18 +40,15 @@ class DateTimeFormatter:
locale: locales.Locale locale: locales.Locale
def __init__(self, locale: str = DEFAULT_LOCALE) -> None: def __init__(self, locale: str = DEFAULT_LOCALE) -> None:
self.locale = locales.get_locale(locale) self.locale = locales.get_locale(locale)
def format(cls, dt: datetime, fmt: str) -> str: def format(cls, dt: datetime, fmt: str) -> str:
# FIXME: _format_token() is nullable # FIXME: _format_token() is nullable
return cls._FORMAT_RE.sub( return cls._FORMAT_RE.sub(
lambda m: cast(str, cls._format_token(dt, m.group(0))), fmt lambda m: cast(str, cls._format_token(dt, m.group(0))), fmt
) )
def _format_token(self, dt: datetime, token: Optional[str]) -> Optional[str]: def _format_token(self, dt: datetime, token: Optional[str]) -> Optional[str]:
if token and token.startswith("[") and token.endswith("]"): if token and token.startswith("[") and token.endswith("]"):
return token[1:-1] return token[1:-1]

View file

@ -129,7 +129,6 @@ class Locale:
_locale_map[locale_name.lower().replace("_", "-")] = cls _locale_map[locale_name.lower().replace("_", "-")] = cls
def __init__(self) -> None: def __init__(self) -> None:
self._month_name_to_ordinal = None self._month_name_to_ordinal = None
def describe( def describe(
@ -174,7 +173,7 @@ class Locale:
# Needed to determine the correct relative string to use # Needed to determine the correct relative string to use
timeframe_value = 0 timeframe_value = 0
for _unit_name, unit_value in timeframes: for _, unit_value in timeframes:
if trunc(unit_value) != 0: if trunc(unit_value) != 0:
timeframe_value = trunc(unit_value) timeframe_value = trunc(unit_value)
break break
@ -285,7 +284,6 @@ class Locale:
timeframe: TimeFrameLiteral, timeframe: TimeFrameLiteral,
delta: Union[float, int], delta: Union[float, int],
) -> str: ) -> str:
if timeframe == "now": if timeframe == "now":
return humanized return humanized
@ -425,7 +423,7 @@ class ItalianLocale(Locale):
"hours": "{0} ore", "hours": "{0} ore",
"day": "un giorno", "day": "un giorno",
"days": "{0} giorni", "days": "{0} giorni",
"week": "una settimana,", "week": "una settimana",
"weeks": "{0} settimane", "weeks": "{0} settimane",
"month": "un mese", "month": "un mese",
"months": "{0} mesi", "months": "{0} mesi",
@ -867,14 +865,16 @@ class FinnishLocale(Locale):
timeframes: ClassVar[Mapping[TimeFrameLiteral, Union[str, Mapping[str, str]]]] = { timeframes: ClassVar[Mapping[TimeFrameLiteral, Union[str, Mapping[str, str]]]] = {
"now": "juuri nyt", "now": "juuri nyt",
"second": "sekunti", "second": {"past": "sekunti", "future": "sekunnin"},
"seconds": {"past": "{0} muutama sekunti", "future": "{0} muutaman sekunnin"}, "seconds": {"past": "{0} sekuntia", "future": "{0} sekunnin"},
"minute": {"past": "minuutti", "future": "minuutin"}, "minute": {"past": "minuutti", "future": "minuutin"},
"minutes": {"past": "{0} minuuttia", "future": "{0} minuutin"}, "minutes": {"past": "{0} minuuttia", "future": "{0} minuutin"},
"hour": {"past": "tunti", "future": "tunnin"}, "hour": {"past": "tunti", "future": "tunnin"},
"hours": {"past": "{0} tuntia", "future": "{0} tunnin"}, "hours": {"past": "{0} tuntia", "future": "{0} tunnin"},
"day": "päivä", "day": {"past": "päivä", "future": "päivän"},
"days": {"past": "{0} päivää", "future": "{0} päivän"}, "days": {"past": "{0} päivää", "future": "{0} päivän"},
"week": {"past": "viikko", "future": "viikon"},
"weeks": {"past": "{0} viikkoa", "future": "{0} viikon"},
"month": {"past": "kuukausi", "future": "kuukauden"}, "month": {"past": "kuukausi", "future": "kuukauden"},
"months": {"past": "{0} kuukautta", "future": "{0} kuukauden"}, "months": {"past": "{0} kuukautta", "future": "{0} kuukauden"},
"year": {"past": "vuosi", "future": "vuoden"}, "year": {"past": "vuosi", "future": "vuoden"},
@ -1887,7 +1887,7 @@ class GermanBaseLocale(Locale):
future = "in {0}" future = "in {0}"
and_word = "und" and_word = "und"
timeframes = { timeframes: ClassVar[Dict[TimeFrameLiteral, str]] = {
"now": "gerade eben", "now": "gerade eben",
"second": "einer Sekunde", "second": "einer Sekunde",
"seconds": "{0} Sekunden", "seconds": "{0} Sekunden",
@ -1982,7 +1982,9 @@ class GermanBaseLocale(Locale):
return super().describe(timeframe, delta, only_distance) return super().describe(timeframe, delta, only_distance)
# German uses a different case without 'in' or 'ago' # German uses a different case without 'in' or 'ago'
humanized = self.timeframes_only_distance[timeframe].format(trunc(abs(delta))) humanized: str = self.timeframes_only_distance[timeframe].format(
trunc(abs(delta))
)
return humanized return humanized
@ -2547,6 +2549,8 @@ class ArabicLocale(Locale):
"hours": {"2": "ساعتين", "ten": "{0} ساعات", "higher": "{0} ساعة"}, "hours": {"2": "ساعتين", "ten": "{0} ساعات", "higher": "{0} ساعة"},
"day": "يوم", "day": "يوم",
"days": {"2": "يومين", "ten": "{0} أيام", "higher": "{0} يوم"}, "days": {"2": "يومين", "ten": "{0} أيام", "higher": "{0} يوم"},
"week": "اسبوع",
"weeks": {"2": "اسبوعين", "ten": "{0} أسابيع", "higher": "{0} اسبوع"},
"month": "شهر", "month": "شهر",
"months": {"2": "شهرين", "ten": "{0} أشهر", "higher": "{0} شهر"}, "months": {"2": "شهرين", "ten": "{0} أشهر", "higher": "{0} شهر"},
"year": "سنة", "year": "سنة",
@ -3709,6 +3713,8 @@ class HungarianLocale(Locale):
"hours": {"past": "{0} órával", "future": "{0} óra"}, "hours": {"past": "{0} órával", "future": "{0} óra"},
"day": {"past": "egy nappal", "future": "egy nap"}, "day": {"past": "egy nappal", "future": "egy nap"},
"days": {"past": "{0} nappal", "future": "{0} nap"}, "days": {"past": "{0} nappal", "future": "{0} nap"},
"week": {"past": "egy héttel", "future": "egy hét"},
"weeks": {"past": "{0} héttel", "future": "{0} hét"},
"month": {"past": "egy hónappal", "future": "egy hónap"}, "month": {"past": "egy hónappal", "future": "egy hónap"},
"months": {"past": "{0} hónappal", "future": "{0} hónap"}, "months": {"past": "{0} hónappal", "future": "{0} hónap"},
"year": {"past": "egy évvel", "future": "egy év"}, "year": {"past": "egy évvel", "future": "egy év"},
@ -3934,7 +3940,6 @@ class ThaiLocale(Locale):
class LaotianLocale(Locale): class LaotianLocale(Locale):
names = ["lo", "lo-la"] names = ["lo", "lo-la"]
past = "{0} ກ່ອນຫນ້ານີ້" past = "{0} ກ່ອນຫນ້ານີ້"
@ -4119,6 +4124,7 @@ class BengaliLocale(Locale):
return f"{n}র্থ" return f"{n}র্থ"
if n == 6: if n == 6:
return f"{n}ষ্ঠ" return f"{n}ষ্ঠ"
return ""
class RomanshLocale(Locale): class RomanshLocale(Locale):
@ -4137,6 +4143,8 @@ class RomanshLocale(Locale):
"hours": "{0} ura", "hours": "{0} ura",
"day": "in di", "day": "in di",
"days": "{0} dis", "days": "{0} dis",
"week": "in'emna",
"weeks": "{0} emnas",
"month": "in mais", "month": "in mais",
"months": "{0} mais", "months": "{0} mais",
"year": "in onn", "year": "in onn",
@ -5399,7 +5407,7 @@ class LuxembourgishLocale(Locale):
future = "an {0}" future = "an {0}"
and_word = "an" and_word = "an"
timeframes = { timeframes: ClassVar[Dict[TimeFrameLiteral, str]] = {
"now": "just elo", "now": "just elo",
"second": "enger Sekonn", "second": "enger Sekonn",
"seconds": "{0} Sekonnen", "seconds": "{0} Sekonnen",
@ -5487,7 +5495,9 @@ class LuxembourgishLocale(Locale):
return super().describe(timeframe, delta, only_distance) return super().describe(timeframe, delta, only_distance)
# Luxembourgish uses a different case without 'in' or 'ago' # Luxembourgish uses a different case without 'in' or 'ago'
humanized = self.timeframes_only_distance[timeframe].format(trunc(abs(delta))) humanized: str = self.timeframes_only_distance[timeframe].format(
trunc(abs(delta))
)
return humanized return humanized

View file

@ -159,7 +159,6 @@ class DateTimeParser:
_input_re_map: Dict[_FORMAT_TYPE, Pattern[str]] _input_re_map: Dict[_FORMAT_TYPE, Pattern[str]]
def __init__(self, locale: str = DEFAULT_LOCALE, cache_size: int = 0) -> None: def __init__(self, locale: str = DEFAULT_LOCALE, cache_size: int = 0) -> None:
self.locale = locales.get_locale(locale) self.locale = locales.get_locale(locale)
self._input_re_map = self._BASE_INPUT_RE_MAP.copy() self._input_re_map = self._BASE_INPUT_RE_MAP.copy()
self._input_re_map.update( self._input_re_map.update(
@ -196,7 +195,6 @@ class DateTimeParser:
def parse_iso( def parse_iso(
self, datetime_string: str, normalize_whitespace: bool = False self, datetime_string: str, normalize_whitespace: bool = False
) -> datetime: ) -> datetime:
if normalize_whitespace: if normalize_whitespace:
datetime_string = re.sub(r"\s+", " ", datetime_string.strip()) datetime_string = re.sub(r"\s+", " ", datetime_string.strip())
@ -236,13 +234,14 @@ class DateTimeParser:
] ]
if has_time: if has_time:
if has_space_divider: if has_space_divider:
date_string, time_string = datetime_string.split(" ", 1) date_string, time_string = datetime_string.split(" ", 1)
else: else:
date_string, time_string = datetime_string.split("T", 1) date_string, time_string = datetime_string.split("T", 1)
time_parts = re.split(r"[\+\-Z]", time_string, 1, re.IGNORECASE) time_parts = re.split(
r"[\+\-Z]", time_string, maxsplit=1, flags=re.IGNORECASE
)
time_components: Optional[Match[str]] = self._TIME_RE.match(time_parts[0]) time_components: Optional[Match[str]] = self._TIME_RE.match(time_parts[0])
@ -303,7 +302,6 @@ class DateTimeParser:
fmt: Union[List[str], str], fmt: Union[List[str], str],
normalize_whitespace: bool = False, normalize_whitespace: bool = False,
) -> datetime: ) -> datetime:
if normalize_whitespace: if normalize_whitespace:
datetime_string = re.sub(r"\s+", " ", datetime_string) datetime_string = re.sub(r"\s+", " ", datetime_string)
@ -341,12 +339,11 @@ class DateTimeParser:
f"Unable to find a match group for the specified token {token!r}." f"Unable to find a match group for the specified token {token!r}."
) )
self._parse_token(token, value, parts) # type: ignore self._parse_token(token, value, parts) # type: ignore[arg-type]
return self._build_datetime(parts) return self._build_datetime(parts)
def _generate_pattern_re(self, fmt: str) -> Tuple[List[_FORMAT_TYPE], Pattern[str]]: def _generate_pattern_re(self, fmt: str) -> Tuple[List[_FORMAT_TYPE], Pattern[str]]:
# fmt is a string of tokens like 'YYYY-MM-DD' # fmt is a string of tokens like 'YYYY-MM-DD'
# we construct a new string by replacing each # we construct a new string by replacing each
# token by its pattern: # token by its pattern:
@ -498,7 +495,6 @@ class DateTimeParser:
value: Any, value: Any,
parts: _Parts, parts: _Parts,
) -> None: ) -> None:
if token == "YYYY": if token == "YYYY":
parts["year"] = int(value) parts["year"] = int(value)
@ -508,7 +504,7 @@ class DateTimeParser:
elif token in ["MMMM", "MMM"]: elif token in ["MMMM", "MMM"]:
# FIXME: month_number() is nullable # FIXME: month_number() is nullable
parts["month"] = self.locale.month_number(value.lower()) # type: ignore parts["month"] = self.locale.month_number(value.lower()) # type: ignore[typeddict-item]
elif token in ["MM", "M"]: elif token in ["MM", "M"]:
parts["month"] = int(value) parts["month"] = int(value)
@ -588,7 +584,6 @@ class DateTimeParser:
weekdate = parts.get("weekdate") weekdate = parts.get("weekdate")
if weekdate is not None: if weekdate is not None:
year, week = int(weekdate[0]), int(weekdate[1]) year, week = int(weekdate[0]), int(weekdate[1])
if weekdate[2] is not None: if weekdate[2] is not None:
@ -712,7 +707,6 @@ class DateTimeParser:
) )
def _parse_multiformat(self, string: str, formats: Iterable[str]) -> datetime: def _parse_multiformat(self, string: str, formats: Iterable[str]) -> datetime:
_datetime: Optional[datetime] = None _datetime: Optional[datetime] = None
for fmt in formats: for fmt in formats:
@ -740,12 +734,11 @@ class DateTimeParser:
class TzinfoParser: class TzinfoParser:
_TZINFO_RE: ClassVar[Pattern[str]] = re.compile( _TZINFO_RE: ClassVar[Pattern[str]] = re.compile(
r"^([\+\-])?(\d{2})(?:\:?(\d{2}))?$" r"^(?:\(UTC)*([\+\-])?(\d{2})(?:\:?(\d{2}))?"
) )
@classmethod @classmethod
def parse(cls, tzinfo_string: str) -> dt_tzinfo: def parse(cls, tzinfo_string: str) -> dt_tzinfo:
tzinfo: Optional[dt_tzinfo] = None tzinfo: Optional[dt_tzinfo] = None
if tzinfo_string == "local": if tzinfo_string == "local":
@ -755,7 +748,6 @@ class TzinfoParser:
tzinfo = tz.tzutc() tzinfo = tz.tzutc()
else: else:
iso_match = cls._TZINFO_RE.match(tzinfo_string) iso_match = cls._TZINFO_RE.match(tzinfo_string)
if iso_match: if iso_match:

View file

@ -11,9 +11,9 @@ from bleach.sanitizer import (
# yyyymmdd # yyyymmdd
__releasedate__ = "20230123" __releasedate__ = "20231006"
# x.y.z or x.y.z.dev0 -- semver # x.y.z or x.y.z.dev0 -- semver
__version__ = "6.0.0" __version__ = "6.1.0"
__all__ = ["clean", "linkify"] __all__ = ["clean", "linkify"]

View file

@ -395,10 +395,17 @@ class BleachHTMLTokenizer(HTMLTokenizer):
# followed by a series of characters. It's treated as a tag # followed by a series of characters. It's treated as a tag
# name that abruptly ends, but we should treat that like # name that abruptly ends, but we should treat that like
# character data # character data
yield { yield {"type": TAG_TOKEN_TYPE_CHARACTERS, "data": self.stream.get_tag()}
"type": TAG_TOKEN_TYPE_CHARACTERS, elif last_error_token["data"] in (
"data": "<" + self.currentToken["name"], "eof-in-attribute-name",
} "eof-in-attribute-value-no-quotes",
):
# Handle the case where the text being parsed ends with <
# followed by a series of characters and then space and then
# more characters. It's treated as a tag name followed by an
# attribute that abruptly ends, but we should treat that like
# character data.
yield {"type": TAG_TOKEN_TYPE_CHARACTERS, "data": self.stream.get_tag()}
else: else:
yield last_error_token yield last_error_token

View file

@ -45,8 +45,8 @@ def build_url_re(tlds=TLDS, protocols=html5lib_shim.allowed_protocols):
r"""\(* # Match any opening parentheses. r"""\(* # Match any opening parentheses.
\b(?<![@.])(?:(?:{0}):/{{0,3}}(?:(?:\w+:)?\w+@)?)? # http:// \b(?<![@.])(?:(?:{0}):/{{0,3}}(?:(?:\w+:)?\w+@)?)? # http://
([\w-]+\.)+(?:{1})(?:\:[0-9]+)?(?!\.\w)\b # xx.yy.tld(:##)? ([\w-]+\.)+(?:{1})(?:\:[0-9]+)?(?!\.\w)\b # xx.yy.tld(:##)?
(?:[/?][^\s\{{\}}\|\\\^\[\]`<>"]*)? (?:[/?][^\s\{{\}}\|\\\^`<>"]*)?
# /path/zz (excluding "unsafe" chars from RFC 1738, # /path/zz (excluding "unsafe" chars from RFC 3986,
# except for # and ~, which happen in practice) # except for # and ~, which happen in practice)
""".format( """.format(
"|".join(sorted(protocols)), "|".join(sorted(tlds)) "|".join(sorted(protocols)), "|".join(sorted(tlds))
@ -591,7 +591,7 @@ class LinkifyFilter(html5lib_shim.Filter):
in_a = False in_a = False
token_buffer = [] token_buffer = []
else: else:
token_buffer.append(token) token_buffer.extend(list(self.extract_entities(token)))
continue continue
if token["type"] in ["StartTag", "EmptyTag"]: if token["type"] in ["StartTag", "EmptyTag"]:

View file

@ -0,0 +1,6 @@
version = "2.9.*"
upstream_repository = "https://github.com/dateutil/dateutil"
partial_stub = true
[tool.stubtest]
ignore_missing_stub = true

View file

View file

@ -0,0 +1,9 @@
from typing_extensions import Self
class weekday:
def __init__(self, weekday: int, n: int | None = None) -> None: ...
def __call__(self, n: int) -> Self: ...
def __eq__(self, other: object) -> bool: ...
def __hash__(self) -> int: ...
weekday: int
n: int

View file

@ -0,0 +1,8 @@
from datetime import date
from typing import Literal
EASTER_JULIAN: Literal[1]
EASTER_ORTHODOX: Literal[2]
EASTER_WESTERN: Literal[3]
def easter(year: int, method: Literal[1, 2, 3] = 3) -> date: ...

View file

@ -0,0 +1,67 @@
from collections.abc import Callable, Mapping
from datetime import datetime, tzinfo
from typing import IO, Any
from typing_extensions import TypeAlias
from .isoparser import isoparse as isoparse, isoparser as isoparser
_FileOrStr: TypeAlias = bytes | str | IO[str] | IO[Any]
_TzData: TypeAlias = tzinfo | int | str | None
_TzInfo: TypeAlias = Mapping[str, _TzData] | Callable[[str, int], _TzData]
class parserinfo:
JUMP: list[str]
WEEKDAYS: list[tuple[str, ...]]
MONTHS: list[tuple[str, ...]]
HMS: list[tuple[str, str, str]]
AMPM: list[tuple[str, str]]
UTCZONE: list[str]
PERTAIN: list[str]
TZOFFSET: dict[str, int]
def __init__(self, dayfirst: bool = False, yearfirst: bool = False) -> None: ...
def jump(self, name: str) -> bool: ...
def weekday(self, name: str) -> int | None: ...
def month(self, name: str) -> int | None: ...
def hms(self, name: str) -> int | None: ...
def ampm(self, name: str) -> int | None: ...
def pertain(self, name: str) -> bool: ...
def utczone(self, name: str) -> bool: ...
def tzoffset(self, name: str) -> int | None: ...
def convertyear(self, year: int) -> int: ...
def validate(self, res: datetime) -> bool: ...
class parser:
def __init__(self, info: parserinfo | None = None) -> None: ...
def parse(
self,
timestr: _FileOrStr,
default: datetime | None = None,
ignoretz: bool = False,
tzinfos: _TzInfo | None = None,
*,
dayfirst: bool | None = ...,
yearfirst: bool | None = ...,
fuzzy: bool = ...,
fuzzy_with_tokens: bool = ...,
) -> datetime: ...
DEFAULTPARSER: parser
def parse(
timestr: _FileOrStr,
parserinfo: parserinfo | None = None,
*,
dayfirst: bool | None = ...,
yearfirst: bool | None = ...,
ignoretz: bool = ...,
fuzzy: bool = ...,
fuzzy_with_tokens: bool = ...,
default: datetime | None = ...,
tzinfos: _TzInfo | None = ...,
) -> datetime: ...
class _tzparser: ...
DEFAULTTZPARSER: _tzparser
class ParserError(ValueError): ...

View file

@ -0,0 +1,15 @@
from _typeshed import SupportsRead
from datetime import date, datetime, time, tzinfo
from typing_extensions import TypeAlias
_Readable: TypeAlias = SupportsRead[str | bytes]
_TakesAscii: TypeAlias = str | bytes | _Readable
class isoparser:
def __init__(self, sep: str | bytes | None = None): ...
def isoparse(self, dt_str: _TakesAscii) -> datetime: ...
def parse_isodate(self, datestr: _TakesAscii) -> date: ...
def parse_isotime(self, timestr: _TakesAscii) -> time: ...
def parse_tzstr(self, tzstr: _TakesAscii, zero_as_utc: bool = True) -> tzinfo: ...
def isoparse(dt_str: _TakesAscii) -> datetime: ...

View file

@ -0,0 +1 @@
partial

View file

@ -0,0 +1,97 @@
from datetime import date, timedelta
from typing import SupportsFloat, TypeVar, overload
from typing_extensions import Self, TypeAlias
# See #9817 for why we reexport this here
from ._common import weekday as weekday
_DateT = TypeVar("_DateT", bound=date)
# Work around attribute and type having the same name.
_Weekday: TypeAlias = weekday
MO: weekday
TU: weekday
WE: weekday
TH: weekday
FR: weekday
SA: weekday
SU: weekday
class relativedelta:
years: int
months: int
days: int
leapdays: int
hours: int
minutes: int
seconds: int
microseconds: int
year: int | None
month: int | None
weekday: _Weekday | None
day: int | None
hour: int | None
minute: int | None
second: int | None
microsecond: int | None
def __init__(
self,
dt1: date | None = None,
dt2: date | None = None,
years: int | None = 0,
months: int | None = 0,
days: int | None = 0,
leapdays: int | None = 0,
weeks: int | None = 0,
hours: int | None = 0,
minutes: int | None = 0,
seconds: int | None = 0,
microseconds: int | None = 0,
year: int | None = None,
month: int | None = None,
day: int | None = None,
weekday: int | _Weekday | None = None,
yearday: int | None = None,
nlyearday: int | None = None,
hour: int | None = None,
minute: int | None = None,
second: int | None = None,
microsecond: int | None = None,
) -> None: ...
@property
def weeks(self) -> int: ...
@weeks.setter
def weeks(self, value: int) -> None: ...
def normalized(self) -> Self: ...
# TODO: use Union when mypy will handle it properly in overloaded operator
# methods (#2129, #1442, #1264 in mypy)
@overload
def __add__(self, other: relativedelta) -> Self: ...
@overload
def __add__(self, other: timedelta) -> Self: ...
@overload
def __add__(self, other: _DateT) -> _DateT: ...
@overload
def __radd__(self, other: relativedelta) -> Self: ...
@overload
def __radd__(self, other: timedelta) -> Self: ...
@overload
def __radd__(self, other: _DateT) -> _DateT: ...
@overload
def __rsub__(self, other: relativedelta) -> Self: ...
@overload
def __rsub__(self, other: timedelta) -> Self: ...
@overload
def __rsub__(self, other: _DateT) -> _DateT: ...
def __sub__(self, other: relativedelta) -> Self: ...
def __neg__(self) -> Self: ...
def __bool__(self) -> bool: ...
def __nonzero__(self) -> bool: ...
def __mul__(self, other: SupportsFloat) -> Self: ...
def __rmul__(self, other: SupportsFloat) -> Self: ...
def __eq__(self, other: object) -> bool: ...
def __ne__(self, other: object) -> bool: ...
def __div__(self, other: SupportsFloat) -> Self: ...
def __truediv__(self, other: SupportsFloat) -> Self: ...
def __abs__(self) -> Self: ...
def __hash__(self) -> int: ...

View file

@ -0,0 +1,111 @@
import datetime
from _typeshed import Incomplete
from collections.abc import Iterable, Iterator, Sequence
from typing_extensions import TypeAlias
from ._common import weekday as weekdaybase
YEARLY: int
MONTHLY: int
WEEKLY: int
DAILY: int
HOURLY: int
MINUTELY: int
SECONDLY: int
class weekday(weekdaybase): ...
weekdays: tuple[weekday, weekday, weekday, weekday, weekday, weekday, weekday]
MO: weekday
TU: weekday
WE: weekday
TH: weekday
FR: weekday
SA: weekday
SU: weekday
class rrulebase:
def __init__(self, cache: bool = False) -> None: ...
def __iter__(self) -> Iterator[datetime.datetime]: ...
def __getitem__(self, item): ...
def __contains__(self, item): ...
def count(self): ...
def before(self, dt, inc: bool = False): ...
def after(self, dt, inc: bool = False): ...
def xafter(self, dt, count: Incomplete | None = None, inc: bool = False): ...
def between(self, after, before, inc: bool = False, count: int = 1): ...
class rrule(rrulebase):
def __init__(
self,
freq,
dtstart: datetime.date | None = None,
interval: int = 1,
wkst: weekday | int | None = None,
count: int | None = None,
until: datetime.date | int | None = None,
bysetpos: int | Iterable[int] | None = None,
bymonth: int | Iterable[int] | None = None,
bymonthday: int | Iterable[int] | None = None,
byyearday: int | Iterable[int] | None = None,
byeaster: int | Iterable[int] | None = None,
byweekno: int | Iterable[int] | None = None,
byweekday: int | weekday | Iterable[int] | Iterable[weekday] | None = None,
byhour: int | Iterable[int] | None = None,
byminute: int | Iterable[int] | None = None,
bysecond: int | Iterable[int] | None = None,
cache: bool = False,
) -> None: ...
def replace(self, **kwargs): ...
_RRule: TypeAlias = rrule
class _iterinfo:
rrule: _RRule
def __init__(self, rrule: _RRule) -> None: ...
yearlen: int | None
nextyearlen: int | None
yearordinal: int | None
yearweekday: int | None
mmask: Sequence[int] | None
mdaymask: Sequence[int] | None
nmdaymask: Sequence[int] | None
wdaymask: Sequence[int] | None
mrange: Sequence[int] | None
wnomask: Sequence[int] | None
nwdaymask: Sequence[int] | None
eastermask: Sequence[int] | None
lastyear: int | None
lastmonth: int | None
def rebuild(self, year, month): ...
def ydayset(self, year, month, day): ...
def mdayset(self, year, month, day): ...
def wdayset(self, year, month, day): ...
def ddayset(self, year, month, day): ...
def htimeset(self, hour, minute, second): ...
def mtimeset(self, hour, minute, second): ...
def stimeset(self, hour, minute, second): ...
class rruleset(rrulebase):
class _genitem:
dt: Incomplete
genlist: list[Incomplete]
gen: Incomplete
def __init__(self, genlist, gen) -> None: ...
def __next__(self) -> None: ...
next = __next__
def __lt__(self, other) -> bool: ...
def __gt__(self, other) -> bool: ...
def __eq__(self, other) -> bool: ...
def __ne__(self, other) -> bool: ...
def __init__(self, cache: bool = False) -> None: ...
def rrule(self, rrule: _RRule): ...
def rdate(self, rdate): ...
def exrule(self, exrule): ...
def exdate(self, exdate): ...
class _rrulestr:
def __call__(self, s, **kwargs) -> rrule | rruleset: ...
rrulestr: _rrulestr

View file

@ -0,0 +1,15 @@
from .tz import (
datetime_ambiguous as datetime_ambiguous,
datetime_exists as datetime_exists,
gettz as gettz,
resolve_imaginary as resolve_imaginary,
tzfile as tzfile,
tzical as tzical,
tzlocal as tzlocal,
tzoffset as tzoffset,
tzrange as tzrange,
tzstr as tzstr,
tzutc as tzutc,
)
UTC: tzutc

View file

@ -0,0 +1,28 @@
import abc
from datetime import datetime, timedelta, tzinfo
from typing import ClassVar
def tzname_in_python2(namefunc): ...
def enfold(dt: datetime, fold: int = 1): ...
class _DatetimeWithFold(datetime):
@property
def fold(self): ...
# Doesn't actually have ABCMeta as the metaclass at runtime,
# but mypy complains if we don't have it in the stub.
# See discussion in #8908
class _tzinfo(tzinfo, metaclass=abc.ABCMeta):
def is_ambiguous(self, dt: datetime) -> bool: ...
def fromutc(self, dt: datetime) -> datetime: ...
class tzrangebase(_tzinfo):
def __init__(self) -> None: ...
def utcoffset(self, dt: datetime | None) -> timedelta | None: ...
def dst(self, dt: datetime | None) -> timedelta | None: ...
def tzname(self, dt: datetime | None) -> str: ...
def fromutc(self, dt: datetime) -> datetime: ...
def is_ambiguous(self, dt: datetime) -> bool: ...
__hash__: ClassVar[None] # type: ignore[assignment]
def __ne__(self, other): ...
__reduce__ = object.__reduce__

View file

@ -0,0 +1,115 @@
import datetime
from _typeshed import Incomplete
from typing import ClassVar, Literal, Protocol, TypeVar
from ..relativedelta import relativedelta
from ._common import _tzinfo as _tzinfo, enfold as enfold, tzname_in_python2 as tzname_in_python2, tzrangebase as tzrangebase
_DT = TypeVar("_DT", bound=datetime.datetime)
ZERO: datetime.timedelta
EPOCH: datetime.datetime
EPOCHORDINAL: int
class tzutc(datetime.tzinfo):
def utcoffset(self, dt: datetime.datetime | None) -> datetime.timedelta | None: ...
def dst(self, dt: datetime.datetime | None) -> datetime.timedelta | None: ...
def tzname(self, dt: datetime.datetime | None) -> str: ...
def is_ambiguous(self, dt: datetime.datetime | None) -> bool: ...
def fromutc(self, dt: _DT) -> _DT: ...
def __eq__(self, other): ...
__hash__: ClassVar[None] # type: ignore[assignment]
def __ne__(self, other): ...
__reduce__ = object.__reduce__
class tzoffset(datetime.tzinfo):
def __init__(self, name, offset) -> None: ...
def utcoffset(self, dt: datetime.datetime | None) -> datetime.timedelta | None: ...
def dst(self, dt: datetime.datetime | None) -> datetime.timedelta | None: ...
def is_ambiguous(self, dt: datetime.datetime | None) -> bool: ...
def tzname(self, dt: datetime.datetime | None) -> str: ...
def fromutc(self, dt: _DT) -> _DT: ...
def __eq__(self, other): ...
__hash__: ClassVar[None] # type: ignore[assignment]
def __ne__(self, other): ...
__reduce__ = object.__reduce__
@classmethod
def instance(cls, name, offset) -> tzoffset: ...
class tzlocal(_tzinfo):
def __init__(self) -> None: ...
def utcoffset(self, dt: datetime.datetime | None) -> datetime.timedelta | None: ...
def dst(self, dt: datetime.datetime | None) -> datetime.timedelta | None: ...
def tzname(self, dt: datetime.datetime | None) -> str: ...
def is_ambiguous(self, dt: datetime.datetime | None) -> bool: ...
def __eq__(self, other): ...
__hash__: ClassVar[None] # type: ignore[assignment]
def __ne__(self, other): ...
__reduce__ = object.__reduce__
class _ttinfo:
def __init__(self) -> None: ...
def __eq__(self, other): ...
__hash__: ClassVar[None] # type: ignore[assignment]
def __ne__(self, other): ...
class _TZFileReader(Protocol):
# optional attribute:
# name: str
def read(self, size: int, /) -> bytes: ...
def seek(self, target: int, whence: Literal[1], /) -> object: ...
class tzfile(_tzinfo):
def __init__(self, fileobj: str | _TZFileReader, filename: str | None = None) -> None: ...
def is_ambiguous(self, dt: datetime.datetime | None, idx: int | None = None) -> bool: ...
def utcoffset(self, dt: datetime.datetime | None) -> datetime.timedelta | None: ...
def dst(self, dt: datetime.datetime | None) -> datetime.timedelta | None: ...
def tzname(self, dt: datetime.datetime | None) -> str: ...
def __eq__(self, other): ...
__hash__: ClassVar[None] # type: ignore[assignment]
def __ne__(self, other): ...
def __reduce__(self): ...
def __reduce_ex__(self, protocol): ...
class tzrange(tzrangebase):
hasdst: bool
def __init__(
self,
stdabbr: str,
stdoffset: int | datetime.timedelta | None = None,
dstabbr: str | None = None,
dstoffset: int | datetime.timedelta | None = None,
start: relativedelta | None = None,
end: relativedelta | None = None,
) -> None: ...
def transitions(self, year: int) -> tuple[datetime.datetime, datetime.datetime]: ...
def __eq__(self, other): ...
class tzstr(tzrange):
hasdst: bool
def __init__(self, s: str, posix_offset: bool = False) -> None: ...
@classmethod
def instance(cls, name, offset) -> tzoffset: ...
class _ICalReader(Protocol):
# optional attribute:
# name: str
def read(self) -> str: ...
class tzical:
def __init__(self, fileobj: str | _ICalReader) -> None: ...
def keys(self): ...
def get(self, tzid: Incomplete | None = None): ...
TZFILES: list[str]
TZPATHS: list[str]
def datetime_exists(dt: datetime.datetime, tz: datetime.tzinfo | None = None) -> bool: ...
def datetime_ambiguous(dt: datetime.datetime, tz: datetime.tzinfo | None = None) -> bool: ...
def resolve_imaginary(dt: datetime.datetime) -> datetime.datetime: ...
class _GetTZ:
def __call__(self, name: str | None = ...) -> datetime.tzinfo | None: ...
def nocache(self, name: str | None) -> datetime.tzinfo | None: ...
gettz: _GetTZ

View file

@ -0,0 +1,5 @@
from datetime import datetime, timedelta, tzinfo
def default_tzinfo(dt: datetime, tzinfo: tzinfo) -> datetime: ...
def today(tzinfo: tzinfo | None = None) -> datetime: ...
def within_delta(dt1: datetime, dt2: datetime, delta: timedelta) -> bool: ...

View file

@ -0,0 +1,17 @@
from _typeshed import Incomplete
from typing import IO
from typing_extensions import TypeAlias
__all__ = ["get_zonefile_instance", "gettz", "gettz_db_metadata"]
_MetadataType: TypeAlias = dict[str, Incomplete]
class ZoneInfoFile:
zones: dict[Incomplete, Incomplete]
metadata: _MetadataType | None
def __init__(self, zonefile_stream: IO[bytes] | None = None) -> None: ...
def get(self, name, default: Incomplete | None = None): ...
def get_zonefile_instance(new_instance: bool = False) -> ZoneInfoFile: ...
def gettz(name): ...
def gettz_db_metadata() -> _MetadataType: ...

View file

@ -0,0 +1,11 @@
from _typeshed import Incomplete, StrOrBytesPath
from collections.abc import Sequence
from tarfile import TarInfo
def rebuild(
filename: StrOrBytesPath,
tag: Incomplete | None = None,
format: str = "gz",
zonegroups: Sequence[str | TarInfo] = [],
metadata: Incomplete | None = None,
) -> None: ...

View file

@ -1,4 +1,6 @@
# -*- coding: utf-8 -*- # -*- coding: utf-8 -*-
import sys
try: try:
from ._version import version as __version__ from ._version import version as __version__
except ImportError: except ImportError:
@ -6,3 +8,17 @@ except ImportError:
__all__ = ['easter', 'parser', 'relativedelta', 'rrule', 'tz', __all__ = ['easter', 'parser', 'relativedelta', 'rrule', 'tz',
'utils', 'zoneinfo'] 'utils', 'zoneinfo']
def __getattr__(name):
import importlib
if name in __all__:
return importlib.import_module("." + name, __name__)
raise AttributeError(
"module {!r} has not attribute {!r}".format(__name__, name)
)
def __dir__():
# __dir__ should include all the lazy-importable modules as well.
return [x for x in globals() if x not in sys.modules] + __all__

View file

@ -1,5 +1,4 @@
# coding: utf-8
# file generated by setuptools_scm # file generated by setuptools_scm
# don't change, don't track in version control # don't change, don't track in version control
version = '2.8.2' __version__ = version = '2.9.0.post0'
version_tuple = (2, 8, 2) __version_tuple__ = version_tuple = (2, 9, 0)

View file

@ -72,7 +72,7 @@ class isoparser(object):
Common: Common:
- ``YYYY`` - ``YYYY``
- ``YYYY-MM`` or ``YYYYMM`` - ``YYYY-MM``
- ``YYYY-MM-DD`` or ``YYYYMMDD`` - ``YYYY-MM-DD`` or ``YYYYMMDD``
Uncommon: Uncommon:

View file

@ -182,7 +182,7 @@ class rrulebase(object):
# __len__() introduces a large performance penalty. # __len__() introduces a large performance penalty.
def count(self): def count(self):
""" Returns the number of recurrences in this set. It will have go """ Returns the number of recurrences in this set. It will have go
trough the whole recurrence, if this hasn't been done before. """ through the whole recurrence, if this hasn't been done before. """
if self._len is None: if self._len is None:
for x in self: for x in self:
pass pass

View file

@ -34,7 +34,7 @@ except ImportError:
from warnings import warn from warnings import warn
ZERO = datetime.timedelta(0) ZERO = datetime.timedelta(0)
EPOCH = datetime.datetime.utcfromtimestamp(0) EPOCH = datetime.datetime(1970, 1, 1, 0, 0)
EPOCHORDINAL = EPOCH.toordinal() EPOCHORDINAL = EPOCH.toordinal()

View file

@ -1,8 +1,8 @@
# mako/__init__.py # mako/__init__.py
# Copyright 2006-2022 the Mako authors and contributors <see AUTHORS file> # Copyright 2006-2024 the Mako authors and contributors <see AUTHORS file>
# #
# This module is part of Mako and is released under # This module is part of Mako and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php # the MIT License: http://www.opensource.org/licenses/mit-license.php
__version__ = "1.2.4" __version__ = "1.3.2"

View file

@ -1,5 +1,5 @@
# mako/_ast_util.py # mako/_ast_util.py
# Copyright 2006-2022 the Mako authors and contributors <see AUTHORS file> # Copyright 2006-2024 the Mako authors and contributors <see AUTHORS file>
# #
# This module is part of Mako and is released under # This module is part of Mako and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php # the MIT License: http://www.opensource.org/licenses/mit-license.php

View file

@ -1,5 +1,5 @@
# mako/ast.py # mako/ast.py
# Copyright 2006-2022 the Mako authors and contributors <see AUTHORS file> # Copyright 2006-2024 the Mako authors and contributors <see AUTHORS file>
# #
# This module is part of Mako and is released under # This module is part of Mako and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php # the MIT License: http://www.opensource.org/licenses/mit-license.php

View file

@ -1,5 +1,5 @@
# mako/cache.py # mako/cache.py
# Copyright 2006-2022 the Mako authors and contributors <see AUTHORS file> # Copyright 2006-2024 the Mako authors and contributors <see AUTHORS file>
# #
# This module is part of Mako and is released under # This module is part of Mako and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php # the MIT License: http://www.opensource.org/licenses/mit-license.php

View file

@ -1,5 +1,5 @@
# mako/cmd.py # mako/cmd.py
# Copyright 2006-2022 the Mako authors and contributors <see AUTHORS file> # Copyright 2006-2024 the Mako authors and contributors <see AUTHORS file>
# #
# This module is part of Mako and is released under # This module is part of Mako and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php # the MIT License: http://www.opensource.org/licenses/mit-license.php
@ -25,7 +25,6 @@ def _exit():
def cmdline(argv=None): def cmdline(argv=None):
parser = ArgumentParser() parser = ArgumentParser()
parser.add_argument( parser.add_argument(
"--var", "--var",

View file

@ -1,5 +1,5 @@
# mako/codegen.py # mako/codegen.py
# Copyright 2006-2022 the Mako authors and contributors <see AUTHORS file> # Copyright 2006-2024 the Mako authors and contributors <see AUTHORS file>
# #
# This module is part of Mako and is released under # This module is part of Mako and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php # the MIT License: http://www.opensource.org/licenses/mit-license.php
@ -816,7 +816,6 @@ class _GenerateRenderMethod:
) )
or len(self.compiler.default_filters) or len(self.compiler.default_filters)
): ):
s = self.create_filter_callable( s = self.create_filter_callable(
node.escapes_code.args, "%s" % node.text, True node.escapes_code.args, "%s" % node.text, True
) )
@ -1181,7 +1180,6 @@ class _Identifiers:
def visitBlockTag(self, node): def visitBlockTag(self, node):
if node is not self.node and not node.is_anonymous: if node is not self.node and not node.is_anonymous:
if isinstance(self.node, parsetree.DefTag): if isinstance(self.node, parsetree.DefTag):
raise exceptions.CompileException( raise exceptions.CompileException(
"Named block '%s' not allowed inside of def '%s'" "Named block '%s' not allowed inside of def '%s'"

View file

@ -1,17 +1,17 @@
# mako/compat.py # mako/compat.py
# Copyright 2006-2022 the Mako authors and contributors <see AUTHORS file> # Copyright 2006-2024 the Mako authors and contributors <see AUTHORS file>
# #
# This module is part of Mako and is released under # This module is part of Mako and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php # the MIT License: http://www.opensource.org/licenses/mit-license.php
import collections import collections
from importlib import metadata as importlib_metadata
from importlib import util from importlib import util
import inspect import inspect
import sys import sys
win32 = sys.platform.startswith("win") win32 = sys.platform.startswith("win")
pypy = hasattr(sys, "pypy_version_info") pypy = hasattr(sys, "pypy_version_info")
py38 = sys.version_info >= (3, 8)
ArgSpec = collections.namedtuple( ArgSpec = collections.namedtuple(
"ArgSpec", ["args", "varargs", "keywords", "defaults"] "ArgSpec", ["args", "varargs", "keywords", "defaults"]
@ -62,12 +62,6 @@ def exception_name(exc):
return exc.__class__.__name__ return exc.__class__.__name__
if py38:
from importlib import metadata as importlib_metadata
else:
import importlib_metadata # noqa
def importlib_metadata_get(group): def importlib_metadata_get(group):
ep = importlib_metadata.entry_points() ep = importlib_metadata.entry_points()
if hasattr(ep, "select"): if hasattr(ep, "select"):

View file

@ -1,5 +1,5 @@
# mako/exceptions.py # mako/exceptions.py
# Copyright 2006-2022 the Mako authors and contributors <see AUTHORS file> # Copyright 2006-2024 the Mako authors and contributors <see AUTHORS file>
# #
# This module is part of Mako and is released under # This module is part of Mako and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php # the MIT License: http://www.opensource.org/licenses/mit-license.php

View file

@ -1,5 +1,5 @@
# ext/autohandler.py # ext/autohandler.py
# Copyright 2006-2022 the Mako authors and contributors <see AUTHORS file> # Copyright 2006-2024 the Mako authors and contributors <see AUTHORS file>
# #
# This module is part of Mako and is released under # This module is part of Mako and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php # the MIT License: http://www.opensource.org/licenses/mit-license.php

View file

@ -1,5 +1,5 @@
# ext/babelplugin.py # ext/babelplugin.py
# Copyright 2006-2022 the Mako authors and contributors <see AUTHORS file> # Copyright 2006-2024 the Mako authors and contributors <see AUTHORS file>
# #
# This module is part of Mako and is released under # This module is part of Mako and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php # the MIT License: http://www.opensource.org/licenses/mit-license.php

View file

@ -1,5 +1,5 @@
# ext/beaker_cache.py # ext/beaker_cache.py
# Copyright 2006-2022 the Mako authors and contributors <see AUTHORS file> # Copyright 2006-2024 the Mako authors and contributors <see AUTHORS file>
# #
# This module is part of Mako and is released under # This module is part of Mako and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php # the MIT License: http://www.opensource.org/licenses/mit-license.php

View file

@ -1,5 +1,5 @@
# ext/extract.py # ext/extract.py
# Copyright 2006-2022 the Mako authors and contributors <see AUTHORS file> # Copyright 2006-2024 the Mako authors and contributors <see AUTHORS file>
# #
# This module is part of Mako and is released under # This module is part of Mako and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php # the MIT License: http://www.opensource.org/licenses/mit-license.php

View file

@ -1,5 +1,5 @@
# ext/linguaplugin.py # ext/linguaplugin.py
# Copyright 2006-2022 the Mako authors and contributors <see AUTHORS file> # Copyright 2006-2024 the Mako authors and contributors <see AUTHORS file>
# #
# This module is part of Mako and is released under # This module is part of Mako and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php # the MIT License: http://www.opensource.org/licenses/mit-license.php

View file

@ -1,5 +1,5 @@
# ext/preprocessors.py # ext/preprocessors.py
# Copyright 2006-2022 the Mako authors and contributors <see AUTHORS file> # Copyright 2006-2024 the Mako authors and contributors <see AUTHORS file>
# #
# This module is part of Mako and is released under # This module is part of Mako and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php # the MIT License: http://www.opensource.org/licenses/mit-license.php

View file

@ -1,5 +1,5 @@
# ext/pygmentplugin.py # ext/pygmentplugin.py
# Copyright 2006-2022 the Mako authors and contributors <see AUTHORS file> # Copyright 2006-2024 the Mako authors and contributors <see AUTHORS file>
# #
# This module is part of Mako and is released under # This module is part of Mako and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php # the MIT License: http://www.opensource.org/licenses/mit-license.php

View file

@ -1,5 +1,5 @@
# ext/turbogears.py # ext/turbogears.py
# Copyright 2006-2022 the Mako authors and contributors <see AUTHORS file> # Copyright 2006-2024 the Mako authors and contributors <see AUTHORS file>
# #
# This module is part of Mako and is released under # This module is part of Mako and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php # the MIT License: http://www.opensource.org/licenses/mit-license.php

View file

@ -1,5 +1,5 @@
# mako/filters.py # mako/filters.py
# Copyright 2006-2022 the Mako authors and contributors <see AUTHORS file> # Copyright 2006-2024 the Mako authors and contributors <see AUTHORS file>
# #
# This module is part of Mako and is released under # This module is part of Mako and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php # the MIT License: http://www.opensource.org/licenses/mit-license.php

View file

@ -1,5 +1,5 @@
# mako/lexer.py # mako/lexer.py
# Copyright 2006-2022 the Mako authors and contributors <see AUTHORS file> # Copyright 2006-2024 the Mako authors and contributors <see AUTHORS file>
# #
# This module is part of Mako and is released under # This module is part of Mako and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php # the MIT License: http://www.opensource.org/licenses/mit-license.php
@ -247,6 +247,8 @@ class Lexer:
continue continue
if self.match_python_block(): if self.match_python_block():
continue continue
if self.match_percent():
continue
if self.match_text(): if self.match_text():
continue continue
@ -352,14 +354,24 @@ class Lexer:
else: else:
return True return True
def match_percent(self):
match = self.match(r"(?<=^)(\s*)%%(%*)", re.M)
if match:
self.append_node(
parsetree.Text, match.group(1) + "%" + match.group(2)
)
return True
else:
return False
def match_text(self): def match_text(self):
match = self.match( match = self.match(
r""" r"""
(.*?) # anything, followed by: (.*?) # anything, followed by:
( (
(?<=\n)(?=[ \t]*(?=%|\#\#)) # an eval or line-based (?<=\n)(?=[ \t]*(?=%|\#\#)) # an eval or line-based
# comment preceded by a # comment, preceded by a
# consumed newline and whitespace # consumed newline and whitespace
| |
(?=\${) # an expression (?=\${) # an expression
| |

View file

@ -1,5 +1,5 @@
# mako/lookup.py # mako/lookup.py
# Copyright 2006-2022 the Mako authors and contributors <see AUTHORS file> # Copyright 2006-2024 the Mako authors and contributors <see AUTHORS file>
# #
# This module is part of Mako and is released under # This module is part of Mako and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php # the MIT License: http://www.opensource.org/licenses/mit-license.php
@ -178,7 +178,6 @@ class TemplateLookup(TemplateCollection):
lexer_cls=None, lexer_cls=None,
include_error_handler=None, include_error_handler=None,
): ):
self.directories = [ self.directories = [
posixpath.normpath(d) for d in util.to_list(directories, ()) posixpath.normpath(d) for d in util.to_list(directories, ())
] ]

View file

@ -1,5 +1,5 @@
# mako/parsetree.py # mako/parsetree.py
# Copyright 2006-2022 the Mako authors and contributors <see AUTHORS file> # Copyright 2006-2024 the Mako authors and contributors <see AUTHORS file>
# #
# This module is part of Mako and is released under # This module is part of Mako and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php # the MIT License: http://www.opensource.org/licenses/mit-license.php

View file

@ -1,5 +1,5 @@
# mako/pygen.py # mako/pygen.py
# Copyright 2006-2022 the Mako authors and contributors <see AUTHORS file> # Copyright 2006-2024 the Mako authors and contributors <see AUTHORS file>
# #
# This module is part of Mako and is released under # This module is part of Mako and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php # the MIT License: http://www.opensource.org/licenses/mit-license.php

View file

@ -1,5 +1,5 @@
# mako/pyparser.py # mako/pyparser.py
# Copyright 2006-2022 the Mako authors and contributors <see AUTHORS file> # Copyright 2006-2024 the Mako authors and contributors <see AUTHORS file>
# #
# This module is part of Mako and is released under # This module is part of Mako and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php # the MIT License: http://www.opensource.org/licenses/mit-license.php
@ -64,7 +64,6 @@ class FindIdentifiers(_ast_util.NodeVisitor):
self._add_declared(node.name) self._add_declared(node.name)
def visit_Assign(self, node): def visit_Assign(self, node):
# flip around the visiting of Assign so the expression gets # flip around the visiting of Assign so the expression gets
# evaluated first, in the case of a clause like "x=x+5" (x # evaluated first, in the case of a clause like "x=x+5" (x
# is undeclared) # is undeclared)
@ -99,7 +98,6 @@ class FindIdentifiers(_ast_util.NodeVisitor):
yield arg yield arg
def _visit_function(self, node, islambda): def _visit_function(self, node, islambda):
# push function state onto stack. dont log any more # push function state onto stack. dont log any more
# identifiers as "declared" until outside of the function, # identifiers as "declared" until outside of the function,
# but keep logging identifiers as "undeclared". track # but keep logging identifiers as "undeclared". track
@ -122,7 +120,6 @@ class FindIdentifiers(_ast_util.NodeVisitor):
self.local_ident_stack = local_ident_stack self.local_ident_stack = local_ident_stack
def visit_For(self, node): def visit_For(self, node):
# flip around visit # flip around visit
self.visit(node.iter) self.visit(node.iter)

View file

@ -530,7 +530,7 @@ class Namespace:
def _populate(self, d, l): def _populate(self, d, l):
for ident in l: for ident in l:
if ident == "*": if ident == "*":
for (k, v) in self._get_star(): for k, v in self._get_star():
d[k] = v d[k] = v
else: else:
d[ident] = getattr(self, ident) d[ident] = getattr(self, ident)

View file

@ -1,5 +1,5 @@
# mako/template.py # mako/template.py
# Copyright 2006-2022 the Mako authors and contributors <see AUTHORS file> # Copyright 2006-2024 the Mako authors and contributors <see AUTHORS file>
# #
# This module is part of Mako and is released under # This module is part of Mako and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php # the MIT License: http://www.opensource.org/licenses/mit-license.php
@ -26,7 +26,6 @@ from mako.lexer import Lexer
class Template: class Template:
r"""Represents a compiled template. r"""Represents a compiled template.
:class:`.Template` includes a reference to the original :class:`.Template` includes a reference to the original

View file

@ -103,7 +103,6 @@ def _assert_raises(
check_context=False, check_context=False,
cause_cls=None, cause_cls=None,
): ):
with _expect_raises(except_cls, msg, check_context, cause_cls) as ec: with _expect_raises(except_cls, msg, check_context, cause_cls) as ec:
callable_(*args, **kwargs) callable_(*args, **kwargs)
return ec.error return ec.error

View file

@ -19,6 +19,10 @@ def result_lines(result):
] ]
def result_raw_lines(result):
return [x for x in re.split(r"\r?\n", result) if x.strip() != ""]
def make_path( def make_path(
filespec: Union[Path, str], filespec: Union[Path, str],
make_absolute: bool = True, make_absolute: bool = True,

View file

@ -1,5 +1,5 @@
# mako/util.py # mako/util.py
# Copyright 2006-2022 the Mako authors and contributors <see AUTHORS file> # Copyright 2006-2024 the Mako authors and contributors <see AUTHORS file>
# #
# This module is part of Mako and is released under # This module is part of Mako and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php # the MIT License: http://www.opensource.org/licenses/mit-license.php

View file

@ -1,5 +1,4 @@
import functools import functools
import re
import string import string
import sys import sys
import typing as t import typing as t
@ -14,10 +13,7 @@ if t.TYPE_CHECKING:
_P = te.ParamSpec("_P") _P = te.ParamSpec("_P")
__version__ = "2.1.3" __version__ = "2.1.5"
_strip_comments_re = re.compile(r"<!--.*?-->", re.DOTALL)
_strip_tags_re = re.compile(r"<.*?>", re.DOTALL)
def _simple_escaping_wrapper(func: "t.Callable[_P, str]") -> "t.Callable[_P, Markup]": def _simple_escaping_wrapper(func: "t.Callable[_P, str]") -> "t.Callable[_P, Markup]":
@ -162,9 +158,41 @@ class Markup(str):
>>> Markup("Main &raquo;\t<em>About</em>").striptags() >>> Markup("Main &raquo;\t<em>About</em>").striptags()
'Main » About' 'Main » About'
""" """
# Use two regexes to avoid ambiguous matches. value = str(self)
value = _strip_comments_re.sub("", self)
value = _strip_tags_re.sub("", value) # Look for comments then tags separately. Otherwise, a comment that
# contains a tag would end early, leaving some of the comment behind.
while True:
# keep finding comment start marks
start = value.find("<!--")
if start == -1:
break
# find a comment end mark beyond the start, otherwise stop
end = value.find("-->", start)
if end == -1:
break
value = f"{value[:start]}{value[end + 3:]}"
# remove tags using the same method
while True:
start = value.find("<")
if start == -1:
break
end = value.find(">", start)
if end == -1:
break
value = f"{value[:start]}{value[end + 1:]}"
# collapse spaces
value = " ".join(value.split()) value = " ".join(value.split())
return self.__class__(value).unescape() return self.__class__(value).unescape()

View file

@ -22,8 +22,8 @@ from pytz.tzfile import build_tzinfo
# The IANA (nee Olson) database is updated several times a year. # The IANA (nee Olson) database is updated several times a year.
OLSON_VERSION = '2023c' OLSON_VERSION = '2024a'
VERSION = '2023.3' # pip compatible version number. VERSION = '2024.1' # pip compatible version number.
__version__ = VERSION __version__ = VERSION
OLSEN_VERSION = OLSON_VERSION # Old releases had this misspelling OLSEN_VERSION = OLSON_VERSION # Old releases had this misspelling

View file

@ -24,7 +24,8 @@ def memorized_timedelta(seconds):
_timedelta_cache[seconds] = delta _timedelta_cache[seconds] = delta
return delta return delta
_epoch = datetime.utcfromtimestamp(0)
_epoch = datetime(1970, 1, 1, 0, 0) # datetime.utcfromtimestamp(0)
_datetime_cache = {0: _epoch} _datetime_cache = {0: _epoch}
@ -33,12 +34,13 @@ def memorized_datetime(seconds):
try: try:
return _datetime_cache[seconds] return _datetime_cache[seconds]
except KeyError: except KeyError:
# NB. We can't just do datetime.utcfromtimestamp(seconds) as this # NB. We can't just do datetime.fromtimestamp(seconds, tz=timezone.utc).replace(tzinfo=None)
# fails with negative values under Windows (Bug #90096) # as this fails with negative values under Windows (Bug #90096)
dt = _epoch + timedelta(seconds=seconds) dt = _epoch + timedelta(seconds=seconds)
_datetime_cache[seconds] = dt _datetime_cache[seconds] = dt
return dt return dt
_ttinfo_cache = {} _ttinfo_cache = {}
@ -55,6 +57,7 @@ def memorized_ttinfo(*args):
_ttinfo_cache[args] = ttinfo _ttinfo_cache[args] = ttinfo
return ttinfo return ttinfo
_notime = memorized_timedelta(0) _notime = memorized_timedelta(0)
@ -355,7 +358,7 @@ class DstTzInfo(BaseTzInfo):
is_dst=False) + timedelta(hours=6) is_dst=False) + timedelta(hours=6)
# If we get this far, we have multiple possible timezones - this # If we get this far, we have multiple possible timezones - this
# is an ambiguous case occuring during the end-of-DST transition. # is an ambiguous case occurring during the end-of-DST transition.
# If told to be strict, raise an exception since we have an # If told to be strict, raise an exception since we have an
# ambiguous case # ambiguous case

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

View file

@ -3,17 +3,22 @@
# This file is in the public domain, so clarified as of # This file is in the public domain, so clarified as of
# 2009-05-17 by Arthur David Olson. # 2009-05-17 by Arthur David Olson.
# #
# From Paul Eggert (2022-11-18): # From Paul Eggert (2023-09-06):
# This file contains a table of two-letter country codes. Columns are # This file contains a table of two-letter country codes. Columns are
# separated by a single tab. Lines beginning with '#' are comments. # separated by a single tab. Lines beginning with '#' are comments.
# All text uses UTF-8 encoding. The columns of the table are as follows: # All text uses UTF-8 encoding. The columns of the table are as follows:
# #
# 1. ISO 3166-1 alpha-2 country code, current as of # 1. ISO 3166-1 alpha-2 country code, current as of
# ISO 3166-1 N1087 (2022-09-02). See: Updates on ISO 3166-1 # ISO/TC 46 N1108 (2023-04-05). See: ISO/TC 46 Documents
# https://isotc.iso.org/livelink/livelink/Open/16944257 # https://www.iso.org/committee/48750.html?view=documents
# 2. The usual English name for the coded region, # 2. The usual English name for the coded region. This sometimes
# chosen so that alphabetic sorting of subsets produces helpful lists. # departs from ISO-listed names, sometimes so that sorted subsets
# This is not the same as the English name in the ISO 3166 tables. # of names are useful (e.g., "Samoa (American)" and "Samoa
# (western)" rather than "American Samoa" and "Samoa"),
# sometimes to avoid confusion among non-experts (e.g.,
# "Czech Republic" and "Turkey" rather than "Czechia" and "Türkiye"),
# and sometimes to omit needless detail or churn (e.g., "Netherlands"
# rather than "Netherlands (the)" or "Netherlands (Kingdom of the)").
# #
# The table is sorted by country code. # The table is sorted by country code.
# #

View file

@ -3,13 +3,10 @@
# This file is in the public domain. # This file is in the public domain.
# This file is generated automatically from the data in the public-domain # This file is generated automatically from the data in the public-domain
# NIST format leap-seconds.list file, which can be copied from # NIST/IERS format leap-seconds.list file, which can be copied from
# <ftp://ftp.nist.gov/pub/time/leap-seconds.list>
# or <ftp://ftp.boulder.nist.gov/pub/time/leap-seconds.list>.
# The NIST file is used instead of its IERS upstream counterpart
# <https://hpiers.obspm.fr/iers/bul/bulc/ntp/leap-seconds.list> # <https://hpiers.obspm.fr/iers/bul/bulc/ntp/leap-seconds.list>
# because under US law the NIST file is public domain # or, in a variant with different comments, from
# whereas the IERS file's copyright and license status is unclear. # <ftp://ftp.boulder.nist.gov/pub/time/leap-seconds.list>.
# For more about leap-seconds.list, please see # For more about leap-seconds.list, please see
# The NTP Timescale and Leap Seconds # The NTP Timescale and Leap Seconds
# <https://www.eecis.udel.edu/~mills/leap.html>. # <https://www.eecis.udel.edu/~mills/leap.html>.
@ -72,11 +69,11 @@ Leap 2016 Dec 31 23:59:60 + S
# Any additional leap seconds will come after this. # Any additional leap seconds will come after this.
# This Expires line is commented out for now, # This Expires line is commented out for now,
# so that pre-2020a zic implementations do not reject this file. # so that pre-2020a zic implementations do not reject this file.
#Expires 2023 Dec 28 00:00:00 #Expires 2024 Dec 28 00:00:00
# POSIX timestamps for the data in this file: # POSIX timestamps for the data in this file:
#updated 1467936000 (2016-07-08 00:00:00 UTC) #updated 1704708379 (2024-01-08 10:06:19 UTC)
#expires 1703721600 (2023-12-28 00:00:00 UTC) #expires 1735344000 (2024-12-28 00:00:00 UTC)
# Updated through IERS Bulletin C65 # Updated through IERS Bulletin C (https://hpiers.obspm.fr/iers/bul/bulc/bulletinc.dat)
# File expires on: 28 December 2023 # File expires on 28 December 2024

File diff suppressed because it is too large Load diff

View file

@ -48,7 +48,7 @@ AR -3124-06411 America/Argentina/Cordoba Argentina (most areas: CB, CC, CN, ER,
AR -2447-06525 America/Argentina/Salta Salta (SA, LP, NQ, RN) AR -2447-06525 America/Argentina/Salta Salta (SA, LP, NQ, RN)
AR -2411-06518 America/Argentina/Jujuy Jujuy (JY) AR -2411-06518 America/Argentina/Jujuy Jujuy (JY)
AR -2649-06513 America/Argentina/Tucuman Tucuman (TM) AR -2649-06513 America/Argentina/Tucuman Tucuman (TM)
AR -2828-06547 America/Argentina/Catamarca Catamarca (CT); Chubut (CH) AR -2828-06547 America/Argentina/Catamarca Catamarca (CT), Chubut (CH)
AR -2926-06651 America/Argentina/La_Rioja La Rioja (LR) AR -2926-06651 America/Argentina/La_Rioja La Rioja (LR)
AR -3132-06831 America/Argentina/San_Juan San Juan (SJ) AR -3132-06831 America/Argentina/San_Juan San Juan (SJ)
AR -3253-06849 America/Argentina/Mendoza Mendoza (MZ) AR -3253-06849 America/Argentina/Mendoza Mendoza (MZ)
@ -87,7 +87,7 @@ BN +0456+11455 Asia/Brunei
BO -1630-06809 America/La_Paz BO -1630-06809 America/La_Paz
BQ +120903-0681636 America/Kralendijk BQ +120903-0681636 America/Kralendijk
BR -0351-03225 America/Noronha Atlantic islands BR -0351-03225 America/Noronha Atlantic islands
BR -0127-04829 America/Belem Para (east); Amapa BR -0127-04829 America/Belem Para (east), Amapa
BR -0343-03830 America/Fortaleza Brazil (northeast: MA, PI, CE, RN, PB) BR -0343-03830 America/Fortaleza Brazil (northeast: MA, PI, CE, RN, PB)
BR -0803-03454 America/Recife Pernambuco BR -0803-03454 America/Recife Pernambuco
BR -0712-04812 America/Araguaina Tocantins BR -0712-04812 America/Araguaina Tocantins
@ -107,21 +107,21 @@ BT +2728+08939 Asia/Thimphu
BW -2439+02555 Africa/Gaborone BW -2439+02555 Africa/Gaborone
BY +5354+02734 Europe/Minsk BY +5354+02734 Europe/Minsk
BZ +1730-08812 America/Belize BZ +1730-08812 America/Belize
CA +4734-05243 America/St_Johns Newfoundland; Labrador (southeast) CA +4734-05243 America/St_Johns Newfoundland, Labrador (SE)
CA +4439-06336 America/Halifax Atlantic - NS (most areas); PE CA +4439-06336 America/Halifax Atlantic - NS (most areas), PE
CA +4612-05957 America/Glace_Bay Atlantic - NS (Cape Breton) CA +4612-05957 America/Glace_Bay Atlantic - NS (Cape Breton)
CA +4606-06447 America/Moncton Atlantic - New Brunswick CA +4606-06447 America/Moncton Atlantic - New Brunswick
CA +5320-06025 America/Goose_Bay Atlantic - Labrador (most areas) CA +5320-06025 America/Goose_Bay Atlantic - Labrador (most areas)
CA +5125-05707 America/Blanc-Sablon AST - QC (Lower North Shore) CA +5125-05707 America/Blanc-Sablon AST - QC (Lower North Shore)
CA +4339-07923 America/Toronto Eastern - ON, QC (most areas) CA +4339-07923 America/Toronto Eastern - ON & QC (most areas)
CA +6344-06828 America/Iqaluit Eastern - NU (most areas) CA +6344-06828 America/Iqaluit Eastern - NU (most areas)
CA +484531-0913718 America/Atikokan EST - ON (Atikokan); NU (Coral H) CA +484531-0913718 America/Atikokan EST - ON (Atikokan), NU (Coral H)
CA +4953-09709 America/Winnipeg Central - ON (west); Manitoba CA +4953-09709 America/Winnipeg Central - ON (west), Manitoba
CA +744144-0944945 America/Resolute Central - NU (Resolute) CA +744144-0944945 America/Resolute Central - NU (Resolute)
CA +624900-0920459 America/Rankin_Inlet Central - NU (central) CA +624900-0920459 America/Rankin_Inlet Central - NU (central)
CA +5024-10439 America/Regina CST - SK (most areas) CA +5024-10439 America/Regina CST - SK (most areas)
CA +5017-10750 America/Swift_Current CST - SK (midwest) CA +5017-10750 America/Swift_Current CST - SK (midwest)
CA +5333-11328 America/Edmonton Mountain - AB; BC (E); NT (E); SK (W) CA +5333-11328 America/Edmonton Mountain - AB, BC(E), NT(E), SK(W)
CA +690650-1050310 America/Cambridge_Bay Mountain - NU (west) CA +690650-1050310 America/Cambridge_Bay Mountain - NU (west)
CA +682059-1334300 America/Inuvik Mountain - NT (west) CA +682059-1334300 America/Inuvik Mountain - NT (west)
CA +4906-11631 America/Creston MST - BC (Creston) CA +4906-11631 America/Creston MST - BC (Creston)
@ -207,8 +207,8 @@ HT +1832-07220 America/Port-au-Prince
HU +4730+01905 Europe/Budapest HU +4730+01905 Europe/Budapest
ID -0610+10648 Asia/Jakarta Java, Sumatra ID -0610+10648 Asia/Jakarta Java, Sumatra
ID -0002+10920 Asia/Pontianak Borneo (west, central) ID -0002+10920 Asia/Pontianak Borneo (west, central)
ID -0507+11924 Asia/Makassar Borneo (east, south); Sulawesi/Celebes, Bali, Nusa Tengarra; Timor (west) ID -0507+11924 Asia/Makassar Borneo (east, south), Sulawesi/Celebes, Bali, Nusa Tengarra, Timor (west)
ID -0232+14042 Asia/Jayapura New Guinea (West Papua / Irian Jaya); Malukus/Moluccas ID -0232+14042 Asia/Jayapura New Guinea (West Papua / Irian Jaya), Malukus/Moluccas
IE +5320-00615 Europe/Dublin IE +5320-00615 Europe/Dublin
IL +314650+0351326 Asia/Jerusalem IL +314650+0351326 Asia/Jerusalem
IM +5409-00428 Europe/Isle_of_Man IM +5409-00428 Europe/Isle_of_Man
@ -355,7 +355,7 @@ RU +4310+13156 Asia/Vladivostok MSK+07 - Amur River
RU +643337+1431336 Asia/Ust-Nera MSK+07 - Oymyakonsky RU +643337+1431336 Asia/Ust-Nera MSK+07 - Oymyakonsky
RU +5934+15048 Asia/Magadan MSK+08 - Magadan RU +5934+15048 Asia/Magadan MSK+08 - Magadan
RU +4658+14242 Asia/Sakhalin MSK+08 - Sakhalin Island RU +4658+14242 Asia/Sakhalin MSK+08 - Sakhalin Island
RU +6728+15343 Asia/Srednekolymsk MSK+08 - Sakha (E); N Kuril Is RU +6728+15343 Asia/Srednekolymsk MSK+08 - Sakha (E), N Kuril Is
RU +5301+15839 Asia/Kamchatka MSK+09 - Kamchatka RU +5301+15839 Asia/Kamchatka MSK+09 - Kamchatka
RU +6445+17729 Asia/Anadyr MSK+09 - Bering Sea RU +6445+17729 Asia/Anadyr MSK+09 - Bering Sea
RW -0157+03004 Africa/Kigali RW -0157+03004 Africa/Kigali
@ -418,7 +418,7 @@ US +470659-1011757 America/North_Dakota/Center Central - ND (Oliver)
US +465042-1012439 America/North_Dakota/New_Salem Central - ND (Morton rural) US +465042-1012439 America/North_Dakota/New_Salem Central - ND (Morton rural)
US +471551-1014640 America/North_Dakota/Beulah Central - ND (Mercer) US +471551-1014640 America/North_Dakota/Beulah Central - ND (Mercer)
US +394421-1045903 America/Denver Mountain (most areas) US +394421-1045903 America/Denver Mountain (most areas)
US +433649-1161209 America/Boise Mountain - ID (south); OR (east) US +433649-1161209 America/Boise Mountain - ID (south), OR (east)
US +332654-1120424 America/Phoenix MST - AZ (except Navajo) US +332654-1120424 America/Phoenix MST - AZ (except Navajo)
US +340308-1181434 America/Los_Angeles Pacific US +340308-1181434 America/Los_Angeles Pacific
US +611305-1495401 America/Anchorage Alaska (most areas) US +611305-1495401 America/Anchorage Alaska (most areas)

View file

@ -37,7 +37,7 @@
#country- #country-
#codes coordinates TZ comments #codes coordinates TZ comments
AD +4230+00131 Europe/Andorra AD +4230+00131 Europe/Andorra
AE,OM,RE,SC,TF +2518+05518 Asia/Dubai Crozet, Scattered Is AE,OM,RE,SC,TF +2518+05518 Asia/Dubai Crozet
AF +3431+06912 Asia/Kabul AF +3431+06912 Asia/Kabul
AL +4120+01950 Europe/Tirane AL +4120+01950 Europe/Tirane
AM +4011+04430 Asia/Yerevan AM +4011+04430 Asia/Yerevan
@ -47,12 +47,13 @@ AQ -6736+06253 Antarctica/Mawson Mawson
AQ -6448-06406 Antarctica/Palmer Palmer AQ -6448-06406 Antarctica/Palmer Palmer
AQ -6734-06808 Antarctica/Rothera Rothera AQ -6734-06808 Antarctica/Rothera Rothera
AQ -720041+0023206 Antarctica/Troll Troll AQ -720041+0023206 Antarctica/Troll Troll
AQ -7824+10654 Antarctica/Vostok Vostok
AR -3436-05827 America/Argentina/Buenos_Aires Buenos Aires (BA, CF) AR -3436-05827 America/Argentina/Buenos_Aires Buenos Aires (BA, CF)
AR -3124-06411 America/Argentina/Cordoba most areas: CB, CC, CN, ER, FM, MN, SE, SF AR -3124-06411 America/Argentina/Cordoba most areas: CB, CC, CN, ER, FM, MN, SE, SF
AR -2447-06525 America/Argentina/Salta Salta (SA, LP, NQ, RN) AR -2447-06525 America/Argentina/Salta Salta (SA, LP, NQ, RN)
AR -2411-06518 America/Argentina/Jujuy Jujuy (JY) AR -2411-06518 America/Argentina/Jujuy Jujuy (JY)
AR -2649-06513 America/Argentina/Tucuman Tucumán (TM) AR -2649-06513 America/Argentina/Tucuman Tucumán (TM)
AR -2828-06547 America/Argentina/Catamarca Catamarca (CT); Chubut (CH) AR -2828-06547 America/Argentina/Catamarca Catamarca (CT), Chubut (CH)
AR -2926-06651 America/Argentina/La_Rioja La Rioja (LR) AR -2926-06651 America/Argentina/La_Rioja La Rioja (LR)
AR -3132-06831 America/Argentina/San_Juan San Juan (SJ) AR -3132-06831 America/Argentina/San_Juan San Juan (SJ)
AR -3253-06849 America/Argentina/Mendoza Mendoza (MZ) AR -3253-06849 America/Argentina/Mendoza Mendoza (MZ)
@ -81,7 +82,7 @@ BG +4241+02319 Europe/Sofia
BM +3217-06446 Atlantic/Bermuda BM +3217-06446 Atlantic/Bermuda
BO -1630-06809 America/La_Paz BO -1630-06809 America/La_Paz
BR -0351-03225 America/Noronha Atlantic islands BR -0351-03225 America/Noronha Atlantic islands
BR -0127-04829 America/Belem Pará (east); Amapá BR -0127-04829 America/Belem Pará (east), Amapá
BR -0343-03830 America/Fortaleza Brazil (northeast: MA, PI, CE, RN, PB) BR -0343-03830 America/Fortaleza Brazil (northeast: MA, PI, CE, RN, PB)
BR -0803-03454 America/Recife Pernambuco BR -0803-03454 America/Recife Pernambuco
BR -0712-04812 America/Araguaina Tocantins BR -0712-04812 America/Araguaina Tocantins
@ -99,19 +100,19 @@ BR -0958-06748 America/Rio_Branco Acre
BT +2728+08939 Asia/Thimphu BT +2728+08939 Asia/Thimphu
BY +5354+02734 Europe/Minsk BY +5354+02734 Europe/Minsk
BZ +1730-08812 America/Belize BZ +1730-08812 America/Belize
CA +4734-05243 America/St_Johns Newfoundland; Labrador (southeast) CA +4734-05243 America/St_Johns Newfoundland, Labrador (SE)
CA +4439-06336 America/Halifax Atlantic - NS (most areas); PE CA +4439-06336 America/Halifax Atlantic - NS (most areas), PE
CA +4612-05957 America/Glace_Bay Atlantic - NS (Cape Breton) CA +4612-05957 America/Glace_Bay Atlantic - NS (Cape Breton)
CA +4606-06447 America/Moncton Atlantic - New Brunswick CA +4606-06447 America/Moncton Atlantic - New Brunswick
CA +5320-06025 America/Goose_Bay Atlantic - Labrador (most areas) CA +5320-06025 America/Goose_Bay Atlantic - Labrador (most areas)
CA,BS +4339-07923 America/Toronto Eastern - ON, QC (most areas) CA,BS +4339-07923 America/Toronto Eastern - ON & QC (most areas)
CA +6344-06828 America/Iqaluit Eastern - NU (most areas) CA +6344-06828 America/Iqaluit Eastern - NU (most areas)
CA +4953-09709 America/Winnipeg Central - ON (west); Manitoba CA +4953-09709 America/Winnipeg Central - ON (west), Manitoba
CA +744144-0944945 America/Resolute Central - NU (Resolute) CA +744144-0944945 America/Resolute Central - NU (Resolute)
CA +624900-0920459 America/Rankin_Inlet Central - NU (central) CA +624900-0920459 America/Rankin_Inlet Central - NU (central)
CA +5024-10439 America/Regina CST - SK (most areas) CA +5024-10439 America/Regina CST - SK (most areas)
CA +5017-10750 America/Swift_Current CST - SK (midwest) CA +5017-10750 America/Swift_Current CST - SK (midwest)
CA +5333-11328 America/Edmonton Mountain - AB; BC (E); NT (E); SK (W) CA +5333-11328 America/Edmonton Mountain - AB, BC(E), NT(E), SK(W)
CA +690650-1050310 America/Cambridge_Bay Mountain - NU (west) CA +690650-1050310 America/Cambridge_Bay Mountain - NU (west)
CA +682059-1334300 America/Inuvik Mountain - NT (west) CA +682059-1334300 America/Inuvik Mountain - NT (west)
CA +5546-12014 America/Dawson_Creek MST - BC (Dawson Cr, Ft St John) CA +5546-12014 America/Dawson_Creek MST - BC (Dawson Cr, Ft St John)
@ -126,7 +127,7 @@ CL -3327-07040 America/Santiago most of Chile
CL -5309-07055 America/Punta_Arenas Region of Magallanes CL -5309-07055 America/Punta_Arenas Region of Magallanes
CL -2709-10926 Pacific/Easter Easter Island CL -2709-10926 Pacific/Easter Easter Island
CN +3114+12128 Asia/Shanghai Beijing Time CN +3114+12128 Asia/Shanghai Beijing Time
CN,AQ +4348+08735 Asia/Urumqi Xinjiang Time, Vostok CN +4348+08735 Asia/Urumqi Xinjiang Time
CO +0436-07405 America/Bogota CO +0436-07405 America/Bogota
CR +0956-08405 America/Costa_Rica CR +0956-08405 America/Costa_Rica
CU +2308-08222 America/Havana CU +2308-08222 America/Havana
@ -171,8 +172,8 @@ HT +1832-07220 America/Port-au-Prince
HU +4730+01905 Europe/Budapest HU +4730+01905 Europe/Budapest
ID -0610+10648 Asia/Jakarta Java, Sumatra ID -0610+10648 Asia/Jakarta Java, Sumatra
ID -0002+10920 Asia/Pontianak Borneo (west, central) ID -0002+10920 Asia/Pontianak Borneo (west, central)
ID -0507+11924 Asia/Makassar Borneo (east, south); Sulawesi/Celebes, Bali, Nusa Tengarra; Timor (west) ID -0507+11924 Asia/Makassar Borneo (east, south), Sulawesi/Celebes, Bali, Nusa Tengarra, Timor (west)
ID -0232+14042 Asia/Jayapura New Guinea (West Papua / Irian Jaya); Malukus/Moluccas ID -0232+14042 Asia/Jayapura New Guinea (West Papua / Irian Jaya), Malukus/Moluccas
IE +5320-00615 Europe/Dublin IE +5320-00615 Europe/Dublin
IL +314650+0351326 Asia/Jerusalem IL +314650+0351326 Asia/Jerusalem
IN +2232+08822 Asia/Kolkata IN +2232+08822 Asia/Kolkata
@ -251,7 +252,7 @@ PK +2452+06703 Asia/Karachi
PL +5215+02100 Europe/Warsaw PL +5215+02100 Europe/Warsaw
PM +4703-05620 America/Miquelon PM +4703-05620 America/Miquelon
PN -2504-13005 Pacific/Pitcairn PN -2504-13005 Pacific/Pitcairn
PR,AG,CA,AI,AW,BL,BQ,CW,DM,GD,GP,KN,LC,MF,MS,SX,TT,VC,VG,VI +182806-0660622 America/Puerto_Rico AST PR,AG,CA,AI,AW,BL,BQ,CW,DM,GD,GP,KN,LC,MF,MS,SX,TT,VC,VG,VI +182806-0660622 America/Puerto_Rico AST - QC (Lower North Shore)
PS +3130+03428 Asia/Gaza Gaza Strip PS +3130+03428 Asia/Gaza Gaza Strip
PS +313200+0350542 Asia/Hebron West Bank PS +313200+0350542 Asia/Hebron West Bank
PT +3843-00908 Europe/Lisbon Portugal (mainland) PT +3843-00908 Europe/Lisbon Portugal (mainland)
@ -287,7 +288,7 @@ RU +4310+13156 Asia/Vladivostok MSK+07 - Amur River
RU +643337+1431336 Asia/Ust-Nera MSK+07 - Oymyakonsky RU +643337+1431336 Asia/Ust-Nera MSK+07 - Oymyakonsky
RU +5934+15048 Asia/Magadan MSK+08 - Magadan RU +5934+15048 Asia/Magadan MSK+08 - Magadan
RU +4658+14242 Asia/Sakhalin MSK+08 - Sakhalin Island RU +4658+14242 Asia/Sakhalin MSK+08 - Sakhalin Island
RU +6728+15343 Asia/Srednekolymsk MSK+08 - Sakha (E); N Kuril Is RU +6728+15343 Asia/Srednekolymsk MSK+08 - Sakha (E), N Kuril Is
RU +5301+15839 Asia/Kamchatka MSK+09 - Kamchatka RU +5301+15839 Asia/Kamchatka MSK+09 - Kamchatka
RU +6445+17729 Asia/Anadyr MSK+09 - Bering Sea RU +6445+17729 Asia/Anadyr MSK+09 - Bering Sea
SA,AQ,KW,YE +2438+04643 Asia/Riyadh Syowa SA,AQ,KW,YE +2438+04643 Asia/Riyadh Syowa
@ -329,7 +330,7 @@ US +470659-1011757 America/North_Dakota/Center Central - ND (Oliver)
US +465042-1012439 America/North_Dakota/New_Salem Central - ND (Morton rural) US +465042-1012439 America/North_Dakota/New_Salem Central - ND (Morton rural)
US +471551-1014640 America/North_Dakota/Beulah Central - ND (Mercer) US +471551-1014640 America/North_Dakota/Beulah Central - ND (Mercer)
US +394421-1045903 America/Denver Mountain (most areas) US +394421-1045903 America/Denver Mountain (most areas)
US +433649-1161209 America/Boise Mountain - ID (south); OR (east) US +433649-1161209 America/Boise Mountain - ID (south), OR (east)
US,CA +332654-1120424 America/Phoenix MST - AZ (most areas), Creston BC US,CA +332654-1120424 America/Phoenix MST - AZ (most areas), Creston BC
US +340308-1181434 America/Los_Angeles Pacific US +340308-1181434 America/Los_Angeles Pacific
US +611305-1495401 America/Anchorage Alaska (most areas) US +611305-1495401 America/Anchorage Alaska (most areas)

View file

@ -0,0 +1,303 @@
# tzdb timezone descriptions, for users who do not care about old timestamps
#
# This file is in the public domain.
#
# From Paul Eggert (2023-12-18):
# This file contains a table where each row stands for a timezone
# where civil timestamps are predicted to agree from now on.
# This file is like zone1970.tab (see zone1970.tab's coments),
# but with the following changes:
#
# 1. Each timezone corresponds to a set of clocks that are planned
# to agree from now on. This is a larger set of clocks than in
# zone1970.tab, where each timezone's clocks must agree from 1970 on.
# 2. The first column is irrelevant and ignored.
# 3. The table is sorted in a different way:
# first by standard time UTC offset;
# then, if DST is used, by daylight saving UTC offset;
# then by time zone abbreviation.
# 4. Every timezone has a nonempty comments column, with wording
# distinguishing the timezone only from other timezones with the
# same UTC offset at some point during the year.
#
# The format of this table is experimental, and may change in future versions.
#
# This table is intended as an aid for users, to help them select timezones
# appropriate for their practical needs. It is not intended to take or
# endorse any position on legal or territorial claims.
#
#XX coordinates TZ comments
#
# -11 - SST
XX -1416-17042 Pacific/Pago_Pago Midway; Samoa ("SST")
#
# -11
XX -1901-16955 Pacific/Niue Niue
#
# -10 - HST
XX +211825-1575130 Pacific/Honolulu Hawaii ("HST")
#
# -10
XX -1732-14934 Pacific/Tahiti Tahiti; Cook Islands
#
# -10/-09 - HST / HDT (North America DST)
XX +515248-1763929 America/Adak western Aleutians in Alaska ("HST/HDT")
#
# -09:30
XX -0900-13930 Pacific/Marquesas Marquesas
#
# -09
XX -2308-13457 Pacific/Gambier Gambier
#
# -09/-08 - AKST/AKDT (North America DST)
XX +611305-1495401 America/Anchorage most of Alaska ("AKST/AKDT")
#
# -08
XX -2504-13005 Pacific/Pitcairn Pitcairn
#
# -08/-07 - PST/PDT (North America DST)
XX +340308-1181434 America/Los_Angeles Pacific ("PST/PDT") - US & Canada; Mexico near US border
#
# -07 - MST
XX +332654-1120424 America/Phoenix Mountain Standard ("MST") - Arizona; western Mexico; Yukon
#
# -07/-06 - MST/MDT (North America DST)
XX +394421-1045903 America/Denver Mountain ("MST/MDT") - US & Canada; Mexico near US border
#
# -06
XX -0054-08936 Pacific/Galapagos Galápagos
#
# -06 - CST
XX +1924-09909 America/Mexico_City Central Standard ("CST") - Saskatchewan; central Mexico; Central America
#
# -06/-05 (Chile DST)
XX -2709-10926 Pacific/Easter Easter Island
#
# -06/-05 - CST/CDT (North America DST)
XX +415100-0873900 America/Chicago Central ("CST/CDT") - US & Canada; Mexico near US border
#
# -05
XX -1203-07703 America/Lima eastern South America
#
# -05 - EST
XX +175805-0764736 America/Jamaica Eastern Standard ("EST") - Caymans; Jamaica; eastern Mexico; Panama
#
# -05/-04 - CST/CDT (Cuba DST)
XX +2308-08222 America/Havana Cuba
#
# -05/-04 - EST/EDT (North America DST)
XX +404251-0740023 America/New_York Eastern ("EST/EDT") - US & Canada
#
# -04
XX +1030-06656 America/Caracas western South America
#
# -04 - AST
XX +1828-06954 America/Santo_Domingo Atlantic Standard ("AST") - eastern Caribbean
#
# -04/-03 (Chile DST)
XX -3327-07040 America/Santiago most of Chile
#
# -04/-03 (Paraguay DST)
XX -2516-05740 America/Asuncion Paraguay
#
# -04/-03 - AST/ADT (North America DST)
XX +4439-06336 America/Halifax Atlantic ("AST/ADT") - Canada; Bermuda
#
# -03:30/-02:30 - NST/NDT (North America DST)
XX +4734-05243 America/St_Johns Newfoundland ("NST/NDT")
#
# -03
XX -2332-04637 America/Sao_Paulo eastern South America
#
# -03/-02 (North America DST)
XX +4703-05620 America/Miquelon St Pierre & Miquelon
#
# -02
XX -0351-03225 America/Noronha Fernando de Noronha; South Georgia
#
# -02/-01 (EU DST)
XX +6411-05144 America/Nuuk most of Greenland
#
# -01
XX +1455-02331 Atlantic/Cape_Verde Cape Verde
#
# -01/+00 (EU DST)
XX +3744-02540 Atlantic/Azores Azores
# -01/+00 (EU DST) until 2024-03-31; then -02/-01 (EU DST)
XX +7029-02158 America/Scoresbysund Ittoqqortoormiit
#
# +00 - GMT
XX +0519-00402 Africa/Abidjan far western Africa; Iceland ("GMT")
#
# +00/+01 - GMT/BST (EU DST)
XX +513030-0000731 Europe/London United Kingdom ("GMT/BST")
#
# +00/+01 - WET/WEST (EU DST)
XX +3843-00908 Europe/Lisbon western Europe ("WET/WEST")
#
# +00/+02 - Troll DST
XX -720041+0023206 Antarctica/Troll Troll Station in Antarctica
#
# +01 - CET
XX +3647+00303 Africa/Algiers Algeria, Tunisia ("CET")
#
# +01 - WAT
XX +0627+00324 Africa/Lagos western Africa ("WAT")
#
# +01/+00 - IST/GMT (EU DST in reverse)
XX +5320-00615 Europe/Dublin Ireland ("IST/GMT")
#
# +01/+00 - (Morocco DST)
XX +3339-00735 Africa/Casablanca Morocco
#
# +01/+02 - CET/CEST (EU DST)
XX +4852+00220 Europe/Paris central Europe ("CET/CEST")
#
# +02 - CAT
XX -2558+03235 Africa/Maputo central Africa ("CAT")
#
# +02 - EET
XX +3254+01311 Africa/Tripoli Libya; Kaliningrad ("EET")
#
# +02 - SAST
XX -2615+02800 Africa/Johannesburg southern Africa ("SAST")
#
# +02/+03 - EET/EEST (EU DST)
XX +3758+02343 Europe/Athens eastern Europe ("EET/EEST")
#
# +02/+03 - EET/EEST (Egypt DST)
XX +3003+03115 Africa/Cairo Egypt
#
# +02/+03 - EET/EEST (Lebanon DST)
XX +3353+03530 Asia/Beirut Lebanon
#
# +02/+03 - EET/EEST (Moldova DST)
XX +4700+02850 Europe/Chisinau Moldova
#
# +02/+03 - EET/EEST (Palestine DST)
XX +3130+03428 Asia/Gaza Palestine
#
# +02/+03 - IST/IDT (Israel DST)
XX +314650+0351326 Asia/Jerusalem Israel
#
# +03
XX +4101+02858 Europe/Istanbul Near East; Belarus
#
# +03 - EAT
XX -0117+03649 Africa/Nairobi eastern Africa ("EAT")
#
# +03 - MSK
XX +554521+0373704 Europe/Moscow Moscow ("MSK")
#
# +03:30
XX +3540+05126 Asia/Tehran Iran
#
# +04
XX +2518+05518 Asia/Dubai Russia; Caucasus; Persian Gulf; Seychelles; Réunion
#
# +04:30
XX +3431+06912 Asia/Kabul Afghanistan
#
# +05
XX +4120+06918 Asia/Tashkent Russia; west Kazakhstan; Tajikistan; Turkmenistan; Uzbekistan; Maldives
#
# +05 - PKT
XX +2452+06703 Asia/Karachi Pakistan ("PKT")
#
# +05:30
XX +0656+07951 Asia/Colombo Sri Lanka
#
# +05:30 - IST
XX +2232+08822 Asia/Kolkata India ("IST")
#
# +05:45
XX +2743+08519 Asia/Kathmandu Nepal
#
# +06
XX +2343+09025 Asia/Dhaka Russia; Kyrgyzstan; Bhutan; Bangladesh; Chagos
# +06 until 2024-03-01; then +05
XX +4315+07657 Asia/Almaty Kazakhstan (except western areas)
#
# +06:30
XX +1647+09610 Asia/Yangon Myanmar; Cocos
#
# +07
XX +1345+10031 Asia/Bangkok Russia; Indochina; Christmas Island
#
# +07 - WIB
XX -0610+10648 Asia/Jakarta Indonesia ("WIB")
#
# +08
XX +0117+10351 Asia/Singapore Russia; Brunei; Malaysia; Singapore
#
# +08 - AWST
XX -3157+11551 Australia/Perth Western Australia ("AWST")
#
# +08 - CST
XX +3114+12128 Asia/Shanghai China ("CST")
#
# +08 - HKT
XX +2217+11409 Asia/Hong_Kong Hong Kong ("HKT")
#
# +08 - PHT
XX +1435+12100 Asia/Manila Philippines ("PHT")
#
# +08 - WITA
XX -0507+11924 Asia/Makassar Indonesia ("WITA")
#
# +08:45
XX -3143+12852 Australia/Eucla Eucla
#
# +09
XX +5203+11328 Asia/Chita Russia; Palau; East Timor
#
# +09 - JST
XX +353916+1394441 Asia/Tokyo Japan ("JST")
#
# +09 - KST
XX +3733+12658 Asia/Seoul Korea ("KST")
#
# +09 - WIT
XX -0232+14042 Asia/Jayapura Indonesia ("WIT")
#
# +09:30 - ACST
XX -1228+13050 Australia/Darwin Northern Territory ("ACST")
#
# +09:30/+10:30 - ACST/ACDT (Australia DST)
XX -3455+13835 Australia/Adelaide South Australia ("ACST/ACDT")
#
# +10
XX +4310+13156 Asia/Vladivostok Russia; Yap; Chuuk; Papua New Guinea; Dumont d'Urville
#
# +10 - AEST
XX -2728+15302 Australia/Brisbane Queensland ("AEST")
#
# +10 - ChST
XX +1328+14445 Pacific/Guam Mariana Islands ("ChST")
#
# +10/+11 - AEST/AEDT (Australia DST)
XX -3352+15113 Australia/Sydney southeast Australia ("AEST/AEDT")
#
# +10:30/+11
XX -3133+15905 Australia/Lord_Howe Lord Howe Island
#
# +11
XX -0613+15534 Pacific/Bougainville Russia; Kosrae; Bougainville; Solomons
#
# +11/+12 (Australia DST)
XX -2903+16758 Pacific/Norfolk Norfolk Island
#
# +12
XX +5301+15839 Asia/Kamchatka Russia; Tuvalu; Fiji; etc.
#
# +12/+13 (New Zealand DST)
XX -3652+17446 Pacific/Auckland New Zealand ("NZST/NZDT")
#
# +12:45/+13:45 (Chatham DST)
XX -4357-17633 Pacific/Chatham Chatham Islands
#
# +13
XX -210800-1751200 Pacific/Tongatapu Kanton; Tokelau; Samoa (western); Tonga
#
# +14
XX +0152-15720 Pacific/Kiritimati Kiritimati

View file

@ -118,7 +118,7 @@ Serializing multiple objects to JSON lines (newline-delimited JSON)::
""" """
from __future__ import absolute_import from __future__ import absolute_import
__version__ = '3.19.1' __version__ = '3.19.2'
__all__ = [ __all__ = [
'dump', 'dumps', 'load', 'loads', 'dump', 'dumps', 'load', 'loads',
'JSONDecoder', 'JSONDecodeError', 'JSONEncoder', 'JSONDecoder', 'JSONDecodeError', 'JSONEncoder',

View file

@ -23,4 +23,4 @@ from ._exceptions import *
from ._logging import * from ._logging import *
from ._socket import * from ._socket import *
__version__ = "1.6.2" __version__ = "1.7.0"

View file

@ -2,10 +2,11 @@ import array
import os import os
import struct import struct
import sys import sys
from threading import Lock
from typing import Callable, Optional, Union
from ._exceptions import * from ._exceptions import *
from ._utils import validate_utf8 from ._utils import validate_utf8
from threading import Lock
""" """
_abnf.py _abnf.py
@ -33,8 +34,9 @@ try:
# Note that wsaccel is unmaintained. # Note that wsaccel is unmaintained.
from wsaccel.xormask import XorMaskerSimple from wsaccel.xormask import XorMaskerSimple
def _mask(_m, _d) -> bytes: def _mask(mask_value: array.array, data_value: array.array) -> bytes:
return XorMaskerSimple(_m).process(_d) mask_result: bytes = XorMaskerSimple(mask_value).process(data_value)
return mask_result
except ImportError: except ImportError:
# wsaccel is not available, use websocket-client _mask() # wsaccel is not available, use websocket-client _mask()
@ -42,26 +44,30 @@ except ImportError:
def _mask(mask_value: array.array, data_value: array.array) -> bytes: def _mask(mask_value: array.array, data_value: array.array) -> bytes:
datalen = len(data_value) datalen = len(data_value)
data_value = int.from_bytes(data_value, native_byteorder) int_data_value = int.from_bytes(data_value, native_byteorder)
mask_value = int.from_bytes(mask_value * (datalen // 4) + mask_value[: datalen % 4], native_byteorder) int_mask_value = int.from_bytes(
return (data_value ^ mask_value).to_bytes(datalen, native_byteorder) mask_value * (datalen // 4) + mask_value[: datalen % 4], native_byteorder
)
return (int_data_value ^ int_mask_value).to_bytes(datalen, native_byteorder)
__all__ = [ __all__ = [
'ABNF', 'continuous_frame', 'frame_buffer', "ABNF",
'STATUS_NORMAL', "continuous_frame",
'STATUS_GOING_AWAY', "frame_buffer",
'STATUS_PROTOCOL_ERROR', "STATUS_NORMAL",
'STATUS_UNSUPPORTED_DATA_TYPE', "STATUS_GOING_AWAY",
'STATUS_STATUS_NOT_AVAILABLE', "STATUS_PROTOCOL_ERROR",
'STATUS_ABNORMAL_CLOSED', "STATUS_UNSUPPORTED_DATA_TYPE",
'STATUS_INVALID_PAYLOAD', "STATUS_STATUS_NOT_AVAILABLE",
'STATUS_POLICY_VIOLATION', "STATUS_ABNORMAL_CLOSED",
'STATUS_MESSAGE_TOO_BIG', "STATUS_INVALID_PAYLOAD",
'STATUS_INVALID_EXTENSION', "STATUS_POLICY_VIOLATION",
'STATUS_UNEXPECTED_CONDITION', "STATUS_MESSAGE_TOO_BIG",
'STATUS_BAD_GATEWAY', "STATUS_INVALID_EXTENSION",
'STATUS_TLS_HANDSHAKE_ERROR', "STATUS_UNEXPECTED_CONDITION",
"STATUS_BAD_GATEWAY",
"STATUS_TLS_HANDSHAKE_ERROR",
] ]
# closing frame status codes. # closing frame status codes.
@ -110,11 +116,17 @@ class ABNF:
OPCODE_BINARY = 0x2 OPCODE_BINARY = 0x2
OPCODE_CLOSE = 0x8 OPCODE_CLOSE = 0x8
OPCODE_PING = 0x9 OPCODE_PING = 0x9
OPCODE_PONG = 0xa OPCODE_PONG = 0xA
# available operation code value tuple # available operation code value tuple
OPCODES = (OPCODE_CONT, OPCODE_TEXT, OPCODE_BINARY, OPCODE_CLOSE, OPCODES = (
OPCODE_PING, OPCODE_PONG) OPCODE_CONT,
OPCODE_TEXT,
OPCODE_BINARY,
OPCODE_CLOSE,
OPCODE_PING,
OPCODE_PONG,
)
# opcode human readable string # opcode human readable string
OPCODE_MAP = { OPCODE_MAP = {
@ -123,16 +135,24 @@ class ABNF:
OPCODE_BINARY: "binary", OPCODE_BINARY: "binary",
OPCODE_CLOSE: "close", OPCODE_CLOSE: "close",
OPCODE_PING: "ping", OPCODE_PING: "ping",
OPCODE_PONG: "pong" OPCODE_PONG: "pong",
} }
# data length threshold. # data length threshold.
LENGTH_7 = 0x7e LENGTH_7 = 0x7E
LENGTH_16 = 1 << 16 LENGTH_16 = 1 << 16
LENGTH_63 = 1 << 63 LENGTH_63 = 1 << 63
def __init__(self, fin: int = 0, rsv1: int = 0, rsv2: int = 0, rsv3: int = 0, def __init__(
opcode: int = OPCODE_TEXT, mask: int = 1, data: str or bytes = "") -> None: self,
fin: int = 0,
rsv1: int = 0,
rsv2: int = 0,
rsv3: int = 0,
opcode: int = OPCODE_TEXT,
mask_value: int = 1,
data: Union[str, bytes, None] = "",
) -> None:
""" """
Constructor for ABNF. Please check RFC for arguments. Constructor for ABNF. Please check RFC for arguments.
""" """
@ -141,7 +161,7 @@ class ABNF:
self.rsv2 = rsv2 self.rsv2 = rsv2
self.rsv3 = rsv3 self.rsv3 = rsv3
self.opcode = opcode self.opcode = opcode
self.mask = mask self.mask_value = mask_value
if data is None: if data is None:
data = "" data = ""
self.data = data self.data = data
@ -173,7 +193,7 @@ class ABNF:
if l > 2 and not skip_utf8_validation and not validate_utf8(self.data[2:]): if l > 2 and not skip_utf8_validation and not validate_utf8(self.data[2:]):
raise WebSocketProtocolException("Invalid close frame.") raise WebSocketProtocolException("Invalid close frame.")
code = 256 * self.data[0] + self.data[1] code = 256 * int(self.data[0]) + int(self.data[1])
if not self._is_valid_close_status(code): if not self._is_valid_close_status(code):
raise WebSocketProtocolException("Invalid close opcode %r", code) raise WebSocketProtocolException("Invalid close opcode %r", code)
@ -182,12 +202,10 @@ class ABNF:
return code in VALID_CLOSE_STATUS or (3000 <= code < 5000) return code in VALID_CLOSE_STATUS or (3000 <= code < 5000)
def __str__(self) -> str: def __str__(self) -> str:
return "fin=" + str(self.fin) \ return f"fin={self.fin} opcode={self.opcode} data={self.data}"
+ " opcode=" + str(self.opcode) \
+ " data=" + str(self.data)
@staticmethod @staticmethod
def create_frame(data: str, opcode: int, fin: int = 1) -> 'ABNF': def create_frame(data: Union[bytes, str], opcode: int, fin: int = 1) -> "ABNF":
""" """
Create frame to send text, binary and other data. Create frame to send text, binary and other data.
@ -219,34 +237,39 @@ class ABNF:
if length >= ABNF.LENGTH_63: if length >= ABNF.LENGTH_63:
raise ValueError("data is too long") raise ValueError("data is too long")
frame_header = chr(self.fin << 7 | frame_header = chr(
self.rsv1 << 6 | self.rsv2 << 5 | self.rsv3 << 4 | self.fin << 7
self.opcode).encode('latin-1') | self.rsv1 << 6
| self.rsv2 << 5
| self.rsv3 << 4
| self.opcode
).encode("latin-1")
if length < ABNF.LENGTH_7: if length < ABNF.LENGTH_7:
frame_header += chr(self.mask << 7 | length).encode('latin-1') frame_header += chr(self.mask_value << 7 | length).encode("latin-1")
elif length < ABNF.LENGTH_16: elif length < ABNF.LENGTH_16:
frame_header += chr(self.mask << 7 | 0x7e).encode('latin-1') frame_header += chr(self.mask_value << 7 | 0x7E).encode("latin-1")
frame_header += struct.pack("!H", length) frame_header += struct.pack("!H", length)
else: else:
frame_header += chr(self.mask << 7 | 0x7f).encode('latin-1') frame_header += chr(self.mask_value << 7 | 0x7F).encode("latin-1")
frame_header += struct.pack("!Q", length) frame_header += struct.pack("!Q", length)
if not self.mask: if not self.mask_value:
if isinstance(self.data, str):
self.data = self.data.encode("utf-8")
return frame_header + self.data return frame_header + self.data
else: mask_key = self.get_mask_key(4)
mask_key = self.get_mask_key(4) return frame_header + self._get_masked(mask_key)
return frame_header + self._get_masked(mask_key)
def _get_masked(self, mask_key: str or bytes) -> bytes: def _get_masked(self, mask_key: Union[str, bytes]) -> bytes:
s = ABNF.mask(mask_key, self.data) s = ABNF.mask(mask_key, self.data)
if isinstance(mask_key, str): if isinstance(mask_key, str):
mask_key = mask_key.encode('utf-8') mask_key = mask_key.encode("utf-8")
return mask_key + s return mask_key + s
@staticmethod @staticmethod
def mask(mask_key: str or bytes, data: str or bytes) -> bytes: def mask(mask_key: Union[str, bytes], data: Union[str, bytes]) -> bytes:
""" """
Mask or unmask data. Just do xor for each byte Mask or unmask data. Just do xor for each byte
@ -261,10 +284,10 @@ class ABNF:
data = "" data = ""
if isinstance(mask_key, str): if isinstance(mask_key, str):
mask_key = mask_key.encode('latin-1') mask_key = mask_key.encode("latin-1")
if isinstance(data, str): if isinstance(data, str):
data = data.encode('latin-1') data = data.encode("latin-1")
return _mask(array.array("B", mask_key), array.array("B", data)) return _mask(array.array("B", mask_key), array.array("B", data))
@ -273,19 +296,21 @@ class frame_buffer:
_HEADER_MASK_INDEX = 5 _HEADER_MASK_INDEX = 5
_HEADER_LENGTH_INDEX = 6 _HEADER_LENGTH_INDEX = 6
def __init__(self, recv_fn: int, skip_utf8_validation: bool) -> None: def __init__(
self, recv_fn: Callable[[int], int], skip_utf8_validation: bool
) -> None:
self.recv = recv_fn self.recv = recv_fn
self.skip_utf8_validation = skip_utf8_validation self.skip_utf8_validation = skip_utf8_validation
# Buffers over the packets from the layer beneath until desired amount # Buffers over the packets from the layer beneath until desired amount
# bytes of bytes are received. # bytes of bytes are received.
self.recv_buffer = [] self.recv_buffer: list = []
self.clear() self.clear()
self.lock = Lock() self.lock = Lock()
def clear(self) -> None: def clear(self) -> None:
self.header = None self.header: Optional[tuple] = None
self.length = None self.length: Optional[int] = None
self.mask = None self.mask_value: Union[bytes, str, None] = None
def has_received_header(self) -> bool: def has_received_header(self) -> bool:
return self.header is None return self.header is None
@ -297,41 +322,41 @@ class frame_buffer:
rsv1 = b1 >> 6 & 1 rsv1 = b1 >> 6 & 1
rsv2 = b1 >> 5 & 1 rsv2 = b1 >> 5 & 1
rsv3 = b1 >> 4 & 1 rsv3 = b1 >> 4 & 1
opcode = b1 & 0xf opcode = b1 & 0xF
b2 = header[1] b2 = header[1]
has_mask = b2 >> 7 & 1 has_mask = b2 >> 7 & 1
length_bits = b2 & 0x7f length_bits = b2 & 0x7F
self.header = (fin, rsv1, rsv2, rsv3, opcode, has_mask, length_bits) self.header = (fin, rsv1, rsv2, rsv3, opcode, has_mask, length_bits)
def has_mask(self) -> bool or int: def has_mask(self) -> Union[bool, int]:
if not self.header: if not self.header:
return False return False
return self.header[frame_buffer._HEADER_MASK_INDEX] header_val: int = self.header[frame_buffer._HEADER_MASK_INDEX]
return header_val
def has_received_length(self) -> bool: def has_received_length(self) -> bool:
return self.length is None return self.length is None
def recv_length(self) -> None: def recv_length(self) -> None:
bits = self.header[frame_buffer._HEADER_LENGTH_INDEX] bits = self.header[frame_buffer._HEADER_LENGTH_INDEX]
length_bits = bits & 0x7f length_bits = bits & 0x7F
if length_bits == 0x7e: if length_bits == 0x7E:
v = self.recv_strict(2) v = self.recv_strict(2)
self.length = struct.unpack("!H", v)[0] self.length = struct.unpack("!H", v)[0]
elif length_bits == 0x7f: elif length_bits == 0x7F:
v = self.recv_strict(8) v = self.recv_strict(8)
self.length = struct.unpack("!Q", v)[0] self.length = struct.unpack("!Q", v)[0]
else: else:
self.length = length_bits self.length = length_bits
def has_received_mask(self) -> bool: def has_received_mask(self) -> bool:
return self.mask is None return self.mask_value is None
def recv_mask(self) -> None: def recv_mask(self) -> None:
self.mask = self.recv_strict(4) if self.has_mask() else "" self.mask_value = self.recv_strict(4) if self.has_mask() else ""
def recv_frame(self) -> ABNF: def recv_frame(self) -> ABNF:
with self.lock: with self.lock:
# Header # Header
if self.has_received_header(): if self.has_received_header():
@ -346,12 +371,12 @@ class frame_buffer:
# Mask # Mask
if self.has_received_mask(): if self.has_received_mask():
self.recv_mask() self.recv_mask()
mask = self.mask mask_value = self.mask_value
# Payload # Payload
payload = self.recv_strict(length) payload = self.recv_strict(length)
if has_mask: if has_mask:
payload = ABNF.mask(mask, payload) payload = ABNF.mask(mask_value, payload)
# Reset for next frame # Reset for next frame
self.clear() self.clear()
@ -385,18 +410,19 @@ class frame_buffer:
class continuous_frame: class continuous_frame:
def __init__(self, fire_cont_frame: bool, skip_utf8_validation: bool) -> None: def __init__(self, fire_cont_frame: bool, skip_utf8_validation: bool) -> None:
self.fire_cont_frame = fire_cont_frame self.fire_cont_frame = fire_cont_frame
self.skip_utf8_validation = skip_utf8_validation self.skip_utf8_validation = skip_utf8_validation
self.cont_data = None self.cont_data: Optional[list] = None
self.recving_frames = None self.recving_frames: Optional[int] = None
def validate(self, frame: ABNF) -> None: def validate(self, frame: ABNF) -> None:
if not self.recving_frames and frame.opcode == ABNF.OPCODE_CONT: if not self.recving_frames and frame.opcode == ABNF.OPCODE_CONT:
raise WebSocketProtocolException("Illegal frame") raise WebSocketProtocolException("Illegal frame")
if self.recving_frames and \ if self.recving_frames and frame.opcode in (
frame.opcode in (ABNF.OPCODE_TEXT, ABNF.OPCODE_BINARY): ABNF.OPCODE_TEXT,
ABNF.OPCODE_BINARY,
):
raise WebSocketProtocolException("Illegal frame") raise WebSocketProtocolException("Illegal frame")
def add(self, frame: ABNF) -> None: def add(self, frame: ABNF) -> None:
@ -410,15 +436,18 @@ class continuous_frame:
if frame.fin: if frame.fin:
self.recving_frames = None self.recving_frames = None
def is_fire(self, frame: ABNF) -> bool or int: def is_fire(self, frame: ABNF) -> Union[bool, int]:
return frame.fin or self.fire_cont_frame return frame.fin or self.fire_cont_frame
def extract(self, frame: ABNF) -> list: def extract(self, frame: ABNF) -> tuple:
data = self.cont_data data = self.cont_data
self.cont_data = None self.cont_data = None
frame.data = data[1] frame.data = data[1]
if not self.fire_cont_frame and data[0] == ABNF.OPCODE_TEXT and not self.skip_utf8_validation and not validate_utf8(frame.data): if (
raise WebSocketPayloadException( not self.fire_cont_frame
"cannot decode: " + repr(frame.data)) and data[0] == ABNF.OPCODE_TEXT
and not self.skip_utf8_validation
return [data[0], frame] and not validate_utf8(frame.data)
):
raise WebSocketPayloadException(f"cannot decode: {repr(frame.data)}")
return data[0], frame

View file

@ -1,18 +1,19 @@
import inspect import inspect
import selectors import selectors
import sys import socket
import threading import threading
import time import time
import traceback from typing import Any, Callable, Optional, Union
import socket
from typing import Callable, Any
from . import _logging from . import _logging
from ._abnf import ABNF from ._abnf import ABNF
from ._url import parse_url
from ._core import WebSocket, getdefaulttimeout from ._core import WebSocket, getdefaulttimeout
from ._exceptions import * from ._exceptions import (
WebSocketConnectionClosedException,
WebSocketException,
WebSocketTimeoutException,
)
from ._url import parse_url
""" """
_app.py _app.py
@ -47,22 +48,24 @@ class DispatcherBase:
""" """
DispatcherBase DispatcherBase
""" """
def __init__(self, app: Any, ping_timeout: float) -> None:
def __init__(self, app: Any, ping_timeout: Union[float, int, None]) -> None:
self.app = app self.app = app
self.ping_timeout = ping_timeout self.ping_timeout = ping_timeout
def timeout(self, seconds: int, callback: Callable) -> None: def timeout(self, seconds: Union[float, int, None], callback: Callable) -> None:
time.sleep(seconds) time.sleep(seconds)
callback() callback()
def reconnect(self, seconds: int, reconnector: Callable) -> None: def reconnect(self, seconds: int, reconnector: Callable) -> None:
try: try:
_logging.info("reconnect() - retrying in {seconds_count} seconds [{frame_count} frames in stack]".format( _logging.info(
seconds_count=seconds, frame_count=len(inspect.stack()))) f"reconnect() - retrying in {seconds} seconds [{len(inspect.stack())} frames in stack]"
)
time.sleep(seconds) time.sleep(seconds)
reconnector(reconnecting=True) reconnector(reconnecting=True)
except KeyboardInterrupt as e: except KeyboardInterrupt as e:
_logging.info("User exited {err}".format(err=e)) _logging.info(f"User exited {e}")
raise e raise e
@ -70,13 +73,18 @@ class Dispatcher(DispatcherBase):
""" """
Dispatcher Dispatcher
""" """
def read(self, sock: socket.socket, read_callback: Callable, check_callback: Callable) -> None:
def read(
self,
sock: socket.socket,
read_callback: Callable,
check_callback: Callable,
) -> None:
sel = selectors.DefaultSelector() sel = selectors.DefaultSelector()
sel.register(self.app.sock.sock, selectors.EVENT_READ) sel.register(self.app.sock.sock, selectors.EVENT_READ)
try: try:
while self.app.keep_running: while self.app.keep_running:
r = sel.select(self.ping_timeout) if sel.select(self.ping_timeout):
if r:
if not read_callback(): if not read_callback():
break break
check_callback() check_callback()
@ -88,24 +96,31 @@ class SSLDispatcher(DispatcherBase):
""" """
SSLDispatcher SSLDispatcher
""" """
def read(self, sock: socket.socket, read_callback: Callable, check_callback: Callable) -> None:
def read(
self,
sock: socket.socket,
read_callback: Callable,
check_callback: Callable,
) -> None:
sock = self.app.sock.sock sock = self.app.sock.sock
sel = selectors.DefaultSelector() sel = selectors.DefaultSelector()
sel.register(sock, selectors.EVENT_READ) sel.register(sock, selectors.EVENT_READ)
try: try:
while self.app.keep_running: while self.app.keep_running:
r = self.select(sock, sel) if self.select(sock, sel):
if r:
if not read_callback(): if not read_callback():
break break
check_callback() check_callback()
finally: finally:
sel.close() sel.close()
def select(self, sock, sel:selectors.DefaultSelector): def select(self, sock, sel: selectors.DefaultSelector):
sock = self.app.sock.sock sock = self.app.sock.sock
if sock.pending(): if sock.pending():
return [sock,] return [
sock,
]
r = sel.select(self.ping_timeout) r = sel.select(self.ping_timeout)
@ -117,17 +132,23 @@ class WrappedDispatcher:
""" """
WrappedDispatcher WrappedDispatcher
""" """
def __init__(self, app, ping_timeout: float, dispatcher: Dispatcher) -> None:
def __init__(self, app, ping_timeout: Union[float, int, None], dispatcher) -> None:
self.app = app self.app = app
self.ping_timeout = ping_timeout self.ping_timeout = ping_timeout
self.dispatcher = dispatcher self.dispatcher = dispatcher
dispatcher.signal(2, dispatcher.abort) # keyboard interrupt dispatcher.signal(2, dispatcher.abort) # keyboard interrupt
def read(self, sock: socket.socket, read_callback: Callable, check_callback: Callable) -> None: def read(
self,
sock: socket.socket,
read_callback: Callable,
check_callback: Callable,
) -> None:
self.dispatcher.read(sock, read_callback) self.dispatcher.read(sock, read_callback)
self.ping_timeout and self.timeout(self.ping_timeout, check_callback) self.ping_timeout and self.timeout(self.ping_timeout, check_callback)
def timeout(self, seconds: int, callback: Callable) -> None: def timeout(self, seconds: float, callback: Callable) -> None:
self.dispatcher.timeout(seconds, callback) self.dispatcher.timeout(seconds, callback)
def reconnect(self, seconds: int, reconnector: Callable) -> None: def reconnect(self, seconds: int, reconnector: Callable) -> None:
@ -139,14 +160,24 @@ class WebSocketApp:
Higher level of APIs are provided. The interface is like JavaScript WebSocket object. Higher level of APIs are provided. The interface is like JavaScript WebSocket object.
""" """
def __init__(self, url: str, header: list or dict or Callable = None, def __init__(
on_open: Callable = None, on_message: Callable = None, on_error: Callable = None, self,
on_close: Callable = None, on_ping: Callable = None, on_pong: Callable = None, url: str,
on_cont_message: Callable = None, header: Union[list, dict, Callable, None] = None,
keep_running: bool = True, get_mask_key: Callable = None, cookie: str = None, on_open: Optional[Callable[[WebSocket], None]] = None,
subprotocols: list = None, on_message: Optional[Callable[[WebSocket, Any], None]] = None,
on_data: Callable = None, on_error: Optional[Callable[[WebSocket, Any], None]] = None,
socket: socket.socket = None) -> None: on_close: Optional[Callable[[WebSocket, Any, Any], None]] = None,
on_ping: Optional[Callable] = None,
on_pong: Optional[Callable] = None,
on_cont_message: Optional[Callable] = None,
keep_running: bool = True,
get_mask_key: Optional[Callable] = None,
cookie: Optional[str] = None,
subprotocols: Optional[list] = None,
on_data: Optional[Callable] = None,
socket: Optional[socket.socket] = None,
) -> None:
""" """
WebSocketApp initialization WebSocketApp initialization
@ -222,13 +253,13 @@ class WebSocketApp:
self.on_cont_message = on_cont_message self.on_cont_message = on_cont_message
self.keep_running = False self.keep_running = False
self.get_mask_key = get_mask_key self.get_mask_key = get_mask_key
self.sock = None self.sock: Optional[WebSocket] = None
self.last_ping_tm = 0 self.last_ping_tm = float(0)
self.last_pong_tm = 0 self.last_pong_tm = float(0)
self.ping_thread = None self.ping_thread: Optional[threading.Thread] = None
self.stop_ping = None self.stop_ping: Optional[threading.Event] = None
self.ping_interval = 0 self.ping_interval = float(0)
self.ping_timeout = None self.ping_timeout: Union[float, int, None] = None
self.ping_payload = "" self.ping_payload = ""
self.subprotocols = subprotocols self.subprotocols = subprotocols
self.prepared_socket = socket self.prepared_socket = socket
@ -236,7 +267,7 @@ class WebSocketApp:
self.has_done_teardown = False self.has_done_teardown = False
self.has_done_teardown_lock = threading.Lock() self.has_done_teardown_lock = threading.Lock()
def send(self, data: str, opcode: int = ABNF.OPCODE_TEXT) -> None: def send(self, data: Union[bytes, str], opcode: int = ABNF.OPCODE_TEXT) -> None:
""" """
send message send message
@ -250,8 +281,21 @@ class WebSocketApp:
""" """
if not self.sock or self.sock.send(data, opcode) == 0: if not self.sock or self.sock.send(data, opcode) == 0:
raise WebSocketConnectionClosedException( raise WebSocketConnectionClosedException("Connection is already closed.")
"Connection is already closed.")
def send_text(self, text_data: str) -> None:
"""
Sends UTF-8 encoded text.
"""
if not self.sock or self.sock.send(text_data, ABNF.OPCODE_TEXT) == 0:
raise WebSocketConnectionClosedException("Connection is already closed.")
def send_bytes(self, data: Union[bytes, bytearray]) -> None:
"""
Sends a sequence of bytes.
"""
if not self.sock or self.sock.send(data, ABNF.OPCODE_BINARY) == 0:
raise WebSocketConnectionClosedException("Connection is already closed.")
def close(self, **kwargs) -> None: def close(self, **kwargs) -> None:
""" """
@ -263,7 +307,7 @@ class WebSocketApp:
self.sock = None self.sock = None
def _start_ping_thread(self) -> None: def _start_ping_thread(self) -> None:
self.last_ping_tm = self.last_pong_tm = 0 self.last_ping_tm = self.last_pong_tm = float(0)
self.stop_ping = threading.Event() self.stop_ping = threading.Event()
self.ping_thread = threading.Thread(target=self._send_ping) self.ping_thread = threading.Thread(target=self._send_ping)
self.ping_thread.daemon = True self.ping_thread.daemon = True
@ -274,7 +318,7 @@ class WebSocketApp:
self.stop_ping.set() self.stop_ping.set()
if self.ping_thread and self.ping_thread.is_alive(): if self.ping_thread and self.ping_thread.is_alive():
self.ping_thread.join(3) self.ping_thread.join(3)
self.last_ping_tm = self.last_pong_tm = 0 self.last_ping_tm = self.last_pong_tm = float(0)
def _send_ping(self) -> None: def _send_ping(self) -> None:
if self.stop_ping.wait(self.ping_interval) or self.keep_running is False: if self.stop_ping.wait(self.ping_interval) or self.keep_running is False:
@ -286,17 +330,28 @@ class WebSocketApp:
_logging.debug("Sending ping") _logging.debug("Sending ping")
self.sock.ping(self.ping_payload) self.sock.ping(self.ping_payload)
except Exception as e: except Exception as e:
_logging.debug("Failed to send ping: {err}".format(err=e)) _logging.debug(f"Failed to send ping: {e}")
def run_forever(self, sockopt: tuple = None, sslopt: dict = None, def run_forever(
ping_interval: float = 0, ping_timeout: float or None = None, self,
ping_payload: str = "", sockopt: tuple = None,
http_proxy_host: str = None, http_proxy_port: int or str = None, sslopt: dict = None,
http_no_proxy: list = None, http_proxy_auth: tuple = None, ping_interval: Union[float, int] = 0,
http_proxy_timeout: float = None, ping_timeout: Union[float, int, None] = None,
skip_utf8_validation: bool = False, ping_payload: str = "",
host: str = None, origin: str = None, dispatcher: Dispatcher = None, http_proxy_host: str = None,
suppress_origin: bool = False, proxy_type: str = None, reconnect: int = None) -> bool: http_proxy_port: Union[int, str] = None,
http_no_proxy: list = None,
http_proxy_auth: tuple = None,
http_proxy_timeout: Optional[float] = None,
skip_utf8_validation: bool = False,
host: str = None,
origin: str = None,
dispatcher=None,
suppress_origin: bool = False,
proxy_type: str = None,
reconnect: int = None,
) -> bool:
""" """
Run event loop for WebSocket framework. Run event loop for WebSocket framework.
@ -360,7 +415,7 @@ class WebSocketApp:
if ping_timeout and ping_interval and ping_interval <= ping_timeout: if ping_timeout and ping_interval and ping_interval <= ping_timeout:
raise WebSocketException("Ensure ping_interval > ping_timeout") raise WebSocketException("Ensure ping_interval > ping_timeout")
if not sockopt: if not sockopt:
sockopt = [] sockopt = ()
if not sslopt: if not sslopt:
sslopt = {} sslopt = {}
if self.sock: if self.sock:
@ -394,7 +449,8 @@ class WebSocketApp:
if self.sock: if self.sock:
self.sock.close() self.sock.close()
close_status_code, close_reason = self._get_close_args( close_status_code, close_reason = self._get_close_args(
close_frame if close_frame else None) close_frame if close_frame else None
)
self.sock = None self.sock = None
# Finally call the callback AFTER all teardown is complete # Finally call the callback AFTER all teardown is complete
@ -405,24 +461,34 @@ class WebSocketApp:
self.sock.shutdown() self.sock.shutdown()
self.sock = WebSocket( self.sock = WebSocket(
self.get_mask_key, sockopt=sockopt, sslopt=sslopt, self.get_mask_key,
sockopt=sockopt,
sslopt=sslopt,
fire_cont_frame=self.on_cont_message is not None, fire_cont_frame=self.on_cont_message is not None,
skip_utf8_validation=skip_utf8_validation, skip_utf8_validation=skip_utf8_validation,
enable_multithread=True) enable_multithread=True,
)
self.sock.settimeout(getdefaulttimeout()) self.sock.settimeout(getdefaulttimeout())
try: try:
header = self.header() if callable(self.header) else self.header header = self.header() if callable(self.header) else self.header
self.sock.connect( self.sock.connect(
self.url, header=header, cookie=self.cookie, self.url,
header=header,
cookie=self.cookie,
http_proxy_host=http_proxy_host, http_proxy_host=http_proxy_host,
http_proxy_port=http_proxy_port, http_no_proxy=http_no_proxy, http_proxy_port=http_proxy_port,
http_proxy_auth=http_proxy_auth, http_proxy_timeout=http_proxy_timeout, http_no_proxy=http_no_proxy,
http_proxy_auth=http_proxy_auth,
http_proxy_timeout=http_proxy_timeout,
subprotocols=self.subprotocols, subprotocols=self.subprotocols,
host=host, origin=origin, suppress_origin=suppress_origin, host=host,
proxy_type=proxy_type, socket=self.prepared_socket) origin=origin,
suppress_origin=suppress_origin,
proxy_type=proxy_type,
socket=self.prepared_socket,
)
_logging.info("Websocket connected") _logging.info("Websocket connected")
@ -432,7 +498,13 @@ class WebSocketApp:
self._callback(self.on_open) self._callback(self.on_open)
dispatcher.read(self.sock.sock, read, check) dispatcher.read(self.sock.sock, read, check)
except (WebSocketConnectionClosedException, ConnectionRefusedError, KeyboardInterrupt, SystemExit, Exception) as e: except (
WebSocketConnectionClosedException,
ConnectionRefusedError,
KeyboardInterrupt,
SystemExit,
Exception,
) as e:
handleDisconnect(e, reconnecting) handleDisconnect(e, reconnecting)
def read() -> bool: def read() -> bool:
@ -441,7 +513,10 @@ class WebSocketApp:
try: try:
op_code, frame = self.sock.recv_data_frame(True) op_code, frame = self.sock.recv_data_frame(True)
except (WebSocketConnectionClosedException, KeyboardInterrupt) as e: except (
WebSocketConnectionClosedException,
KeyboardInterrupt,
) as e:
if custom_dispatcher: if custom_dispatcher:
return handleDisconnect(e) return handleDisconnect(e)
else: else:
@ -455,10 +530,8 @@ class WebSocketApp:
self.last_pong_tm = time.time() self.last_pong_tm = time.time()
self._callback(self.on_pong, frame.data) self._callback(self.on_pong, frame.data)
elif op_code == ABNF.OPCODE_CONT and self.on_cont_message: elif op_code == ABNF.OPCODE_CONT and self.on_cont_message:
self._callback(self.on_data, frame.data, self._callback(self.on_data, frame.data, frame.opcode, frame.fin)
frame.opcode, frame.fin) self._callback(self.on_cont_message, frame.data, frame.fin)
self._callback(self.on_cont_message,
frame.data, frame.fin)
else: else:
data = frame.data data = frame.data
if op_code == ABNF.OPCODE_TEXT and not skip_utf8_validation: if op_code == ABNF.OPCODE_TEXT and not skip_utf8_validation:
@ -469,18 +542,38 @@ class WebSocketApp:
return True return True
def check() -> bool: def check() -> bool:
if (self.ping_timeout): if self.ping_timeout:
has_timeout_expired = time.time() - self.last_ping_tm > self.ping_timeout has_timeout_expired = (
has_pong_not_arrived_after_last_ping = self.last_pong_tm - self.last_ping_tm < 0 time.time() - self.last_ping_tm > self.ping_timeout
has_pong_arrived_too_late = self.last_pong_tm - self.last_ping_tm > self.ping_timeout )
has_pong_not_arrived_after_last_ping = (
self.last_pong_tm - self.last_ping_tm < 0
)
has_pong_arrived_too_late = (
self.last_pong_tm - self.last_ping_tm > self.ping_timeout
)
if (self.last_ping_tm and if (
has_timeout_expired and self.last_ping_tm
(has_pong_not_arrived_after_last_ping or has_pong_arrived_too_late)): and has_timeout_expired
and (
has_pong_not_arrived_after_last_ping
or has_pong_arrived_too_late
)
):
raise WebSocketTimeoutException("ping/pong timed out") raise WebSocketTimeoutException("ping/pong timed out")
return True return True
def handleDisconnect(e: Exception, reconnecting: bool = False) -> bool: def handleDisconnect(
e: Union[
WebSocketConnectionClosedException,
ConnectionRefusedError,
KeyboardInterrupt,
SystemExit,
Exception,
],
reconnecting: bool = False,
) -> bool:
self.has_errored = True self.has_errored = True
self._stop_ping_thread() self._stop_ping_thread()
if not reconnecting: if not reconnecting:
@ -492,25 +585,31 @@ class WebSocketApp:
raise raise
if reconnect: if reconnect:
_logging.info("{err} - reconnect".format(err=e)) _logging.info(f"{e} - reconnect")
if custom_dispatcher: if custom_dispatcher:
_logging.debug("Calling custom dispatcher reconnect [{frame_count} frames in stack]".format(frame_count=len(inspect.stack()))) _logging.debug(
f"Calling custom dispatcher reconnect [{len(inspect.stack())} frames in stack]"
)
dispatcher.reconnect(reconnect, setSock) dispatcher.reconnect(reconnect, setSock)
else: else:
_logging.error("{err} - goodbye".format(err=e)) _logging.error(f"{e} - goodbye")
teardown() teardown()
custom_dispatcher = bool(dispatcher) custom_dispatcher = bool(dispatcher)
dispatcher = self.create_dispatcher(ping_timeout, dispatcher, parse_url(self.url)[3]) dispatcher = self.create_dispatcher(
ping_timeout, dispatcher, parse_url(self.url)[3]
)
try: try:
setSock() setSock()
if not custom_dispatcher and reconnect: if not custom_dispatcher and reconnect:
while self.keep_running: while self.keep_running:
_logging.debug("Calling dispatcher reconnect [{frame_count} frames in stack]".format(frame_count=len(inspect.stack()))) _logging.debug(
f"Calling dispatcher reconnect [{len(inspect.stack())} frames in stack]"
)
dispatcher.reconnect(reconnect, setSock) dispatcher.reconnect(reconnect, setSock)
except (KeyboardInterrupt, Exception) as e: except (KeyboardInterrupt, Exception) as e:
_logging.info("tearing down on exception {err}".format(err=e)) _logging.info(f"tearing down on exception {e}")
teardown() teardown()
finally: finally:
if not custom_dispatcher: if not custom_dispatcher:
@ -519,13 +618,17 @@ class WebSocketApp:
return self.has_errored return self.has_errored
def create_dispatcher(self, ping_timeout: int, dispatcher: Dispatcher = None, is_ssl: bool = False) -> DispatcherBase: def create_dispatcher(
self,
ping_timeout: Union[float, int, None],
dispatcher: Optional[DispatcherBase] = None,
is_ssl: bool = False,
) -> Union[Dispatcher, SSLDispatcher, WrappedDispatcher]:
if dispatcher: # If custom dispatcher is set, use WrappedDispatcher if dispatcher: # If custom dispatcher is set, use WrappedDispatcher
return WrappedDispatcher(self, ping_timeout, dispatcher) return WrappedDispatcher(self, ping_timeout, dispatcher)
timeout = ping_timeout or 10 timeout = ping_timeout or 10
if is_ssl: if is_ssl:
return SSLDispatcher(self, timeout) return SSLDispatcher(self, timeout)
return Dispatcher(self, timeout) return Dispatcher(self, timeout)
def _get_close_args(self, close_frame: ABNF) -> list: def _get_close_args(self, close_frame: ABNF) -> list:
@ -540,8 +643,12 @@ class WebSocketApp:
# Extract close frame status code # Extract close frame status code
if close_frame.data and len(close_frame.data) >= 2: if close_frame.data and len(close_frame.data) >= 2:
close_status_code = 256 * close_frame.data[0] + close_frame.data[1] close_status_code = 256 * int(close_frame.data[0]) + int(
reason = close_frame.data[2:].decode('utf-8') close_frame.data[1]
)
reason = close_frame.data[2:]
if isinstance(reason, bytes):
reason = reason.decode("utf-8")
return [close_status_code, reason] return [close_status_code, reason]
else: else:
# Most likely reached this because len(close_frame_data.data) < 2 # Most likely reached this because len(close_frame_data.data) < 2
@ -553,6 +660,6 @@ class WebSocketApp:
callback(self, *args) callback(self, *args)
except Exception as e: except Exception as e:
_logging.error("error from callback {callback}: {err}".format(callback=callback, err=e)) _logging.error(f"error from callback {callback}: {e}")
if self.on_error: if self.on_error:
self.on_error(self, e) self.on_error(self, e)

View file

@ -1,4 +1,5 @@
import http.cookies import http.cookies
from typing import Optional
""" """
_cookiejar.py _cookiejar.py
@ -22,18 +23,21 @@ limitations under the License.
class SimpleCookieJar: class SimpleCookieJar:
def __init__(self) -> None: def __init__(self) -> None:
self.jar = dict() self.jar: dict = dict()
def add(self, set_cookie: str) -> None: def add(self, set_cookie: Optional[str]) -> None:
if set_cookie: if set_cookie:
simpleCookie = http.cookies.SimpleCookie(set_cookie) simpleCookie = http.cookies.SimpleCookie(set_cookie)
for k, v in simpleCookie.items(): for k, v in simpleCookie.items():
domain = v.get("domain") if domain := v.get("domain"):
if domain:
if not domain.startswith("."): if not domain.startswith("."):
domain = "." + domain domain = f".{domain}"
cookie = self.jar.get(domain) if self.jar.get(domain) else http.cookies.SimpleCookie() cookie = (
self.jar.get(domain)
if self.jar.get(domain)
else http.cookies.SimpleCookie()
)
cookie.update(simpleCookie) cookie.update(simpleCookie)
self.jar[domain.lower()] = cookie self.jar[domain.lower()] = cookie
@ -42,10 +46,9 @@ class SimpleCookieJar:
simpleCookie = http.cookies.SimpleCookie(set_cookie) simpleCookie = http.cookies.SimpleCookie(set_cookie)
for k, v in simpleCookie.items(): for k, v in simpleCookie.items():
domain = v.get("domain") if domain := v.get("domain"):
if domain:
if not domain.startswith("."): if not domain.startswith("."):
domain = "." + domain domain = f".{domain}"
self.jar[domain.lower()] = simpleCookie self.jar[domain.lower()] = simpleCookie
def get(self, host: str) -> str: def get(self, host: str) -> str:
@ -58,7 +61,15 @@ class SimpleCookieJar:
if host.endswith(domain) or host == domain[1:]: if host.endswith(domain) or host == domain[1:]:
cookies.append(self.jar.get(domain)) cookies.append(self.jar.get(domain))
return "; ".join(filter( return "; ".join(
None, sorted( filter(
["%s=%s" % (k, v.value) for cookie in filter(None, cookies) for k, v in cookie.items()] None,
))) sorted(
[
"%s=%s" % (k, v.value)
for cookie in filter(None, cookies)
for k, v in cookie.items()
]
),
)
)

View file

@ -2,6 +2,7 @@ import socket
import struct import struct
import threading import threading
import time import time
from typing import Optional, Union
# websocket modules # websocket modules
from ._abnf import * from ._abnf import *
@ -32,7 +33,7 @@ See the License for the specific language governing permissions and
limitations under the License. limitations under the License.
""" """
__all__ = ['WebSocket', 'create_connection'] __all__ = ["WebSocket", "create_connection"]
class WebSocket: class WebSocket:
@ -73,9 +74,16 @@ class WebSocket:
Skip utf8 validation. Skip utf8 validation.
""" """
def __init__(self, get_mask_key=None, sockopt=None, sslopt=None, def __init__(
fire_cont_frame: bool = False, enable_multithread: bool = True, self,
skip_utf8_validation: bool = False, **_): get_mask_key=None,
sockopt=None,
sslopt=None,
fire_cont_frame: bool = False,
enable_multithread: bool = True,
skip_utf8_validation: bool = False,
**_,
):
""" """
Initialize WebSocket object. Initialize WebSocket object.
@ -86,14 +94,13 @@ class WebSocket:
""" """
self.sock_opt = sock_opt(sockopt, sslopt) self.sock_opt = sock_opt(sockopt, sslopt)
self.handshake_response = None self.handshake_response = None
self.sock = None self.sock: Optional[socket.socket] = None
self.connected = False self.connected = False
self.get_mask_key = get_mask_key self.get_mask_key = get_mask_key
# These buffer over the build-up of a single frame. # These buffer over the build-up of a single frame.
self.frame_buffer = frame_buffer(self._recv, skip_utf8_validation) self.frame_buffer = frame_buffer(self._recv, skip_utf8_validation)
self.cont_frame = continuous_frame( self.cont_frame = continuous_frame(fire_cont_frame, skip_utf8_validation)
fire_cont_frame, skip_utf8_validation)
if enable_multithread: if enable_multithread:
self.lock = threading.Lock() self.lock = threading.Lock()
@ -133,7 +140,7 @@ class WebSocket:
""" """
self.get_mask_key = func self.get_mask_key = func
def gettimeout(self) -> float: def gettimeout(self) -> Union[float, int, None]:
""" """
Get the websocket timeout (in seconds) as an int or float Get the websocket timeout (in seconds) as an int or float
@ -144,7 +151,7 @@ class WebSocket:
""" """
return self.sock_opt.timeout return self.sock_opt.timeout
def settimeout(self, timeout: float): def settimeout(self, timeout: Union[float, int, None]):
""" """
Set the timeout to the websocket. Set the timeout to the websocket.
@ -245,19 +252,26 @@ class WebSocket:
socket: socket socket: socket
Pre-initialized stream socket. Pre-initialized stream socket.
""" """
self.sock_opt.timeout = options.get('timeout', self.sock_opt.timeout) self.sock_opt.timeout = options.get("timeout", self.sock_opt.timeout)
self.sock, addrs = connect(url, self.sock_opt, proxy_info(**options), self.sock, addrs = connect(
options.pop('socket', None)) url, self.sock_opt, proxy_info(**options), options.pop("socket", None)
)
try: try:
self.handshake_response = handshake(self.sock, url, *addrs, **options) self.handshake_response = handshake(self.sock, url, *addrs, **options)
for attempt in range(options.pop('redirect_limit', 3)): for attempt in range(options.pop("redirect_limit", 3)):
if self.handshake_response.status in SUPPORTED_REDIRECT_STATUSES: if self.handshake_response.status in SUPPORTED_REDIRECT_STATUSES:
url = self.handshake_response.headers['location'] url = self.handshake_response.headers["location"]
self.sock.close() self.sock.close()
self.sock, addrs = connect(url, self.sock_opt, proxy_info(**options), self.sock, addrs = connect(
options.pop('socket', None)) url,
self.handshake_response = handshake(self.sock, url, *addrs, **options) self.sock_opt,
proxy_info(**options),
options.pop("socket", None),
)
self.handshake_response = handshake(
self.sock, url, *addrs, **options
)
self.connected = True self.connected = True
except: except:
if self.sock: if self.sock:
@ -265,7 +279,7 @@ class WebSocket:
self.sock = None self.sock = None
raise raise
def send(self, payload: bytes or str, opcode: int = ABNF.OPCODE_TEXT) -> int: def send(self, payload: Union[bytes, str], opcode: int = ABNF.OPCODE_TEXT) -> int:
""" """
Send the data as string. Send the data as string.
@ -282,6 +296,18 @@ class WebSocket:
frame = ABNF.create_frame(payload, opcode) frame = ABNF.create_frame(payload, opcode)
return self.send_frame(frame) return self.send_frame(frame)
def send_text(self, text_data: str) -> int:
"""
Sends UTF-8 encoded text.
"""
return self.send(text_data, ABNF.OPCODE_TEXT)
def send_bytes(self, data: Union[bytes, bytearray]) -> int:
"""
Sends a sequence of bytes.
"""
return self.send(data, ABNF.OPCODE_BINARY)
def send_frame(self, frame) -> int: def send_frame(self, frame) -> int:
""" """
Send the data frame. Send the data frame.
@ -303,9 +329,9 @@ class WebSocket:
frame.get_mask_key = self.get_mask_key frame.get_mask_key = self.get_mask_key
data = frame.format() data = frame.format()
length = len(data) length = len(data)
if (isEnabledForTrace()): if isEnabledForTrace():
trace("++Sent raw: " + repr(data)) trace(f"++Sent raw: {repr(data)}")
trace("++Sent decoded: " + frame.__str__()) trace(f"++Sent decoded: {frame.__str__()}")
with self.lock: with self.lock:
while data: while data:
l = self._send(data) l = self._send(data)
@ -324,7 +350,7 @@ class WebSocket:
""" """
return self.send(payload, ABNF.OPCODE_BINARY) return self.send(payload, ABNF.OPCODE_BINARY)
def ping(self, payload: str or bytes = ""): def ping(self, payload: Union[str, bytes] = ""):
""" """
Send ping data. Send ping data.
@ -337,7 +363,7 @@ class WebSocket:
payload = payload.encode("utf-8") payload = payload.encode("utf-8")
self.send(payload, ABNF.OPCODE_PING) self.send(payload, ABNF.OPCODE_PING)
def pong(self, payload: str or bytes = ""): def pong(self, payload: Union[str, bytes] = ""):
""" """
Send pong data. Send pong data.
@ -350,7 +376,7 @@ class WebSocket:
payload = payload.encode("utf-8") payload = payload.encode("utf-8")
self.send(payload, ABNF.OPCODE_PONG) self.send(payload, ABNF.OPCODE_PONG)
def recv(self) -> str or bytes: def recv(self) -> Union[str, bytes]:
""" """
Receive string data(byte array) from the server. Receive string data(byte array) from the server.
@ -361,11 +387,16 @@ class WebSocket:
with self.readlock: with self.readlock:
opcode, data = self.recv_data() opcode, data = self.recv_data()
if opcode == ABNF.OPCODE_TEXT: if opcode == ABNF.OPCODE_TEXT:
return data.decode("utf-8") data_received: Union[bytes, str] = data
elif opcode == ABNF.OPCODE_TEXT or opcode == ABNF.OPCODE_BINARY: if isinstance(data_received, bytes):
return data return data_received.decode("utf-8")
elif isinstance(data_received, str):
return data_received
elif opcode == ABNF.OPCODE_BINARY:
data_binary: bytes = data
return data_binary
else: else:
return '' return ""
def recv_data(self, control_frame: bool = False) -> tuple: def recv_data(self, control_frame: bool = False) -> tuple:
""" """
@ -385,7 +416,7 @@ class WebSocket:
opcode, frame = self.recv_data_frame(control_frame) opcode, frame = self.recv_data_frame(control_frame)
return opcode, frame.data return opcode, frame.data
def recv_data_frame(self, control_frame: bool = False): def recv_data_frame(self, control_frame: bool = False) -> tuple:
""" """
Receive data with operation code. Receive data with operation code.
@ -404,15 +435,18 @@ class WebSocket:
""" """
while True: while True:
frame = self.recv_frame() frame = self.recv_frame()
if (isEnabledForTrace()): if isEnabledForTrace():
trace("++Rcv raw: " + repr(frame.format())) trace(f"++Rcv raw: {repr(frame.format())}")
trace("++Rcv decoded: " + frame.__str__()) trace(f"++Rcv decoded: {frame.__str__()}")
if not frame: if not frame:
# handle error: # handle error:
# 'NoneType' object has no attribute 'opcode' # 'NoneType' object has no attribute 'opcode'
raise WebSocketProtocolException( raise WebSocketProtocolException(f"Not a valid frame {frame}")
"Not a valid frame {frame}".format(frame=frame)) elif frame.opcode in (
elif frame.opcode in (ABNF.OPCODE_TEXT, ABNF.OPCODE_BINARY, ABNF.OPCODE_CONT): ABNF.OPCODE_TEXT,
ABNF.OPCODE_BINARY,
ABNF.OPCODE_CONT,
):
self.cont_frame.validate(frame) self.cont_frame.validate(frame)
self.cont_frame.add(frame) self.cont_frame.add(frame)
@ -426,8 +460,7 @@ class WebSocket:
if len(frame.data) < 126: if len(frame.data) < 126:
self.pong(frame.data) self.pong(frame.data)
else: else:
raise WebSocketProtocolException( raise WebSocketProtocolException("Ping message is too long")
"Ping message is too long")
if control_frame: if control_frame:
return frame.opcode, frame return frame.opcode, frame
elif frame.opcode == ABNF.OPCODE_PONG: elif frame.opcode == ABNF.OPCODE_PONG:
@ -458,9 +491,9 @@ class WebSocket:
if status < 0 or status >= ABNF.LENGTH_16: if status < 0 or status >= ABNF.LENGTH_16:
raise ValueError("code is invalid range") raise ValueError("code is invalid range")
self.connected = False self.connected = False
self.send(struct.pack('!H', status) + reason, ABNF.OPCODE_CLOSE) self.send(struct.pack("!H", status) + reason, ABNF.OPCODE_CLOSE)
def close(self, status: int = STATUS_NORMAL, reason: bytes = b"", timeout: float = 3): def close(self, status: int = STATUS_NORMAL, reason: bytes = b"", timeout: int = 3):
""" """
Close Websocket object Close Websocket object
@ -474,36 +507,37 @@ class WebSocket:
Timeout until receive a close frame. Timeout until receive a close frame.
If None, it will wait forever until receive a close frame. If None, it will wait forever until receive a close frame.
""" """
if self.connected: if not self.connected:
if status < 0 or status >= ABNF.LENGTH_16: return
raise ValueError("code is invalid range") if status < 0 or status >= ABNF.LENGTH_16:
raise ValueError("code is invalid range")
try: try:
self.connected = False self.connected = False
self.send(struct.pack('!H', status) + reason, ABNF.OPCODE_CLOSE) self.send(struct.pack("!H", status) + reason, ABNF.OPCODE_CLOSE)
sock_timeout = self.sock.gettimeout() sock_timeout = self.sock.gettimeout()
self.sock.settimeout(timeout) self.sock.settimeout(timeout)
start_time = time.time() start_time = time.time()
while timeout is None or time.time() - start_time < timeout: while timeout is None or time.time() - start_time < timeout:
try: try:
frame = self.recv_frame() frame = self.recv_frame()
if frame.opcode != ABNF.OPCODE_CLOSE: if frame.opcode != ABNF.OPCODE_CLOSE:
continue continue
if isEnabledForError(): if isEnabledForError():
recv_status = struct.unpack("!H", frame.data[0:2])[0] recv_status = struct.unpack("!H", frame.data[0:2])[0]
if recv_status >= 3000 and recv_status <= 4999: if recv_status >= 3000 and recv_status <= 4999:
debug("close status: " + repr(recv_status)) debug(f"close status: {repr(recv_status)}")
elif recv_status != STATUS_NORMAL: elif recv_status != STATUS_NORMAL:
error("close status: " + repr(recv_status)) error(f"close status: {repr(recv_status)}")
break break
except: except:
break break
self.sock.settimeout(sock_timeout) self.sock.settimeout(sock_timeout)
self.sock.shutdown(socket.SHUT_RDWR) self.sock.shutdown(socket.SHUT_RDWR)
except: except:
pass pass
self.shutdown() self.shutdown()
def abort(self): def abort(self):
""" """
@ -521,7 +555,7 @@ class WebSocket:
self.sock = None self.sock = None
self.connected = False self.connected = False
def _send(self, data: str or bytes): def _send(self, data: Union[str, bytes]):
return send(self.sock, data) return send(self.sock, data)
def _recv(self, bufsize): def _recv(self, bufsize):
@ -600,10 +634,14 @@ def create_connection(url: str, timeout=None, class_=WebSocket, **options):
fire_cont_frame = options.pop("fire_cont_frame", False) fire_cont_frame = options.pop("fire_cont_frame", False)
enable_multithread = options.pop("enable_multithread", True) enable_multithread = options.pop("enable_multithread", True)
skip_utf8_validation = options.pop("skip_utf8_validation", False) skip_utf8_validation = options.pop("skip_utf8_validation", False)
websock = class_(sockopt=sockopt, sslopt=sslopt, websock = class_(
fire_cont_frame=fire_cont_frame, sockopt=sockopt,
enable_multithread=enable_multithread, sslopt=sslopt,
skip_utf8_validation=skip_utf8_validation, **options) fire_cont_frame=fire_cont_frame,
enable_multithread=enable_multithread,
skip_utf8_validation=skip_utf8_validation,
**options,
)
websock.settimeout(timeout if timeout is not None else getdefaulttimeout()) websock.settimeout(timeout if timeout is not None else getdefaulttimeout())
websock.connect(url, **options) websock.connect(url, **options)
return websock return websock

View file

@ -22,6 +22,7 @@ class WebSocketException(Exception):
""" """
WebSocket exception class. WebSocket exception class.
""" """
pass pass
@ -29,6 +30,7 @@ class WebSocketProtocolException(WebSocketException):
""" """
If the WebSocket protocol is invalid, this exception will be raised. If the WebSocket protocol is invalid, this exception will be raised.
""" """
pass pass
@ -36,6 +38,7 @@ class WebSocketPayloadException(WebSocketException):
""" """
If the WebSocket payload is invalid, this exception will be raised. If the WebSocket payload is invalid, this exception will be raised.
""" """
pass pass
@ -44,6 +47,7 @@ class WebSocketConnectionClosedException(WebSocketException):
If remote host closed the connection or some network error happened, If remote host closed the connection or some network error happened,
this exception will be raised. this exception will be raised.
""" """
pass pass
@ -51,6 +55,7 @@ class WebSocketTimeoutException(WebSocketException):
""" """
WebSocketTimeoutException will be raised at socket timeout during read/write data. WebSocketTimeoutException will be raised at socket timeout during read/write data.
""" """
pass pass
@ -58,6 +63,7 @@ class WebSocketProxyException(WebSocketException):
""" """
WebSocketProxyException will be raised when proxy error occurred. WebSocketProxyException will be raised when proxy error occurred.
""" """
pass pass
@ -66,7 +72,14 @@ class WebSocketBadStatusException(WebSocketException):
WebSocketBadStatusException will be raised when we get bad handshake status code. WebSocketBadStatusException will be raised when we get bad handshake status code.
""" """
def __init__(self, message: str, status_code: int, status_message=None, resp_headers=None, resp_body=None): def __init__(
self,
message: str,
status_code: int,
status_message=None,
resp_headers=None,
resp_body=None,
):
super().__init__(message) super().__init__(message)
self.status_code = status_code self.status_code = status_code
self.resp_headers = resp_headers self.resp_headers = resp_headers
@ -77,4 +90,5 @@ class WebSocketAddressException(WebSocketException):
""" """
If the websocket address info cannot be found, this exception will be raised. If the websocket address info cannot be found, this exception will be raised.
""" """
pass pass

View file

@ -20,7 +20,8 @@ import hashlib
import hmac import hmac
import os import os
from base64 import encodebytes as base64encode from base64 import encodebytes as base64encode
from http import client as HTTPStatus from http import HTTPStatus
from ._cookiejar import SimpleCookieJar from ._cookiejar import SimpleCookieJar
from ._exceptions import * from ._exceptions import *
from ._http import * from ._http import *
@ -32,14 +33,19 @@ __all__ = ["handshake_response", "handshake", "SUPPORTED_REDIRECT_STATUSES"]
# websocket supported version. # websocket supported version.
VERSION = 13 VERSION = 13
SUPPORTED_REDIRECT_STATUSES = (HTTPStatus.MOVED_PERMANENTLY, HTTPStatus.FOUND, HTTPStatus.SEE_OTHER,) SUPPORTED_REDIRECT_STATUSES = (
HTTPStatus.MOVED_PERMANENTLY,
HTTPStatus.FOUND,
HTTPStatus.SEE_OTHER,
HTTPStatus.TEMPORARY_REDIRECT,
HTTPStatus.PERMANENT_REDIRECT,
)
SUCCESS_STATUSES = SUPPORTED_REDIRECT_STATUSES + (HTTPStatus.SWITCHING_PROTOCOLS,) SUCCESS_STATUSES = SUPPORTED_REDIRECT_STATUSES + (HTTPStatus.SWITCHING_PROTOCOLS,)
CookieJar = SimpleCookieJar() CookieJar = SimpleCookieJar()
class handshake_response: class handshake_response:
def __init__(self, status: int, headers: dict, subprotocol): def __init__(self, status: int, headers: dict, subprotocol):
self.status = status self.status = status
self.headers = headers self.headers = headers
@ -47,7 +53,9 @@ class handshake_response:
CookieJar.add(headers.get("set-cookie")) CookieJar.add(headers.get("set-cookie"))
def handshake(sock, url: str, hostname: str, port: int, resource: str, **options): def handshake(
sock, url: str, hostname: str, port: int, resource: str, **options
) -> handshake_response:
headers, key = _get_handshake_headers(resource, url, hostname, port, options) headers, key = _get_handshake_headers(resource, url, hostname, port, options)
header_str = "\r\n".join(headers) header_str = "\r\n".join(headers)
@ -66,74 +74,64 @@ def handshake(sock, url: str, hostname: str, port: int, resource: str, **options
def _pack_hostname(hostname: str) -> str: def _pack_hostname(hostname: str) -> str:
# IPv6 address # IPv6 address
if ':' in hostname: if ":" in hostname:
return '[' + hostname + ']' return f"[{hostname}]"
return hostname return hostname
def _get_handshake_headers(resource: str, url: str, host: str, port: int, options: dict): def _get_handshake_headers(
headers = [ resource: str, url: str, host: str, port: int, options: dict
"GET {resource} HTTP/1.1".format(resource=resource), ) -> tuple:
"Upgrade: websocket" headers = [f"GET {resource} HTTP/1.1", "Upgrade: websocket"]
] if port in [80, 443]:
if port == 80 or port == 443:
hostport = _pack_hostname(host) hostport = _pack_hostname(host)
else: else:
hostport = "{h}:{p}".format(h=_pack_hostname(host), p=port) hostport = f"{_pack_hostname(host)}:{port}"
if options.get("host"): if options.get("host"):
headers.append("Host: {h}".format(h=options["host"])) headers.append(f'Host: {options["host"]}')
else: else:
headers.append("Host: {hp}".format(hp=hostport)) headers.append(f"Host: {hostport}")
# scheme indicates whether http or https is used in Origin # scheme indicates whether http or https is used in Origin
# The same approach is used in parse_url of _url.py to set default port # The same approach is used in parse_url of _url.py to set default port
scheme, url = url.split(":", 1) scheme, url = url.split(":", 1)
if not options.get("suppress_origin"): if not options.get("suppress_origin"):
if "origin" in options and options["origin"] is not None: if "origin" in options and options["origin"] is not None:
headers.append("Origin: {origin}".format(origin=options["origin"])) headers.append(f'Origin: {options["origin"]}')
elif scheme == "wss": elif scheme == "wss":
headers.append("Origin: https://{hp}".format(hp=hostport)) headers.append(f"Origin: https://{hostport}")
else: else:
headers.append("Origin: http://{hp}".format(hp=hostport)) headers.append(f"Origin: http://{hostport}")
key = _create_sec_websocket_key() key = _create_sec_websocket_key()
# Append Sec-WebSocket-Key & Sec-WebSocket-Version if not manually specified # Append Sec-WebSocket-Key & Sec-WebSocket-Version if not manually specified
if not options.get('header') or 'Sec-WebSocket-Key' not in options['header']: if not options.get("header") or "Sec-WebSocket-Key" not in options["header"]:
headers.append("Sec-WebSocket-Key: {key}".format(key=key)) headers.append(f"Sec-WebSocket-Key: {key}")
else: else:
key = options['header']['Sec-WebSocket-Key'] key = options["header"]["Sec-WebSocket-Key"]
if not options.get('header') or 'Sec-WebSocket-Version' not in options['header']: if not options.get("header") or "Sec-WebSocket-Version" not in options["header"]:
headers.append("Sec-WebSocket-Version: {version}".format(version=VERSION)) headers.append(f"Sec-WebSocket-Version: {VERSION}")
if not options.get('connection'): if not options.get("connection"):
headers.append('Connection: Upgrade') headers.append("Connection: Upgrade")
else: else:
headers.append(options['connection']) headers.append(options["connection"])
subprotocols = options.get("subprotocols") if subprotocols := options.get("subprotocols"):
if subprotocols: headers.append(f'Sec-WebSocket-Protocol: {",".join(subprotocols)}')
headers.append("Sec-WebSocket-Protocol: {protocols}".format(protocols=",".join(subprotocols)))
header = options.get("header") if header := options.get("header"):
if header:
if isinstance(header, dict): if isinstance(header, dict):
header = [ header = [": ".join([k, v]) for k, v in header.items() if v is not None]
": ".join([k, v])
for k, v in header.items()
if v is not None
]
headers.extend(header) headers.extend(header)
server_cookie = CookieJar.get(host) server_cookie = CookieJar.get(host)
client_cookie = options.get("cookie", None) client_cookie = options.get("cookie", None)
cookie = "; ".join(filter(None, [server_cookie, client_cookie])) if cookie := "; ".join(filter(None, [server_cookie, client_cookie])):
headers.append(f"Cookie: {cookie}")
if cookie:
headers.append("Cookie: {cookie}".format(cookie=cookie))
headers.extend(("", "")) headers.extend(("", ""))
return headers, key return headers, key
@ -142,12 +140,20 @@ def _get_handshake_headers(resource: str, url: str, host: str, port: int, option
def _get_resp_headers(sock, success_statuses: tuple = SUCCESS_STATUSES) -> tuple: def _get_resp_headers(sock, success_statuses: tuple = SUCCESS_STATUSES) -> tuple:
status, resp_headers, status_message = read_headers(sock) status, resp_headers, status_message = read_headers(sock)
if status not in success_statuses: if status not in success_statuses:
content_len = resp_headers.get('content-length') content_len = resp_headers.get("content-length")
if content_len: if content_len:
response_body = sock.recv(int(content_len)) # read the body of the HTTP error message response and include it in the exception response_body = sock.recv(
int(content_len)
) # read the body of the HTTP error message response and include it in the exception
else: else:
response_body = None response_body = None
raise WebSocketBadStatusException("Handshake status {status} {message} -+-+- {headers} -+-+- {body}".format(status=status, message=status_message, headers=resp_headers, body=response_body), status, status_message, resp_headers, response_body) raise WebSocketBadStatusException(
f"Handshake status {status} {status_message} -+-+- {resp_headers} -+-+- {response_body}",
status,
status_message,
resp_headers,
response_body,
)
return status, resp_headers return status, resp_headers
@ -157,20 +163,20 @@ _HEADERS_TO_CHECK = {
} }
def _validate(headers, key: str, subprotocols): def _validate(headers, key: str, subprotocols) -> tuple:
subproto = None subproto = None
for k, v in _HEADERS_TO_CHECK.items(): for k, v in _HEADERS_TO_CHECK.items():
r = headers.get(k, None) r = headers.get(k, None)
if not r: if not r:
return False, None return False, None
r = [x.strip().lower() for x in r.split(',')] r = [x.strip().lower() for x in r.split(",")]
if v not in r: if v not in r:
return False, None return False, None
if subprotocols: if subprotocols:
subproto = headers.get("sec-websocket-protocol", None) subproto = headers.get("sec-websocket-protocol", None)
if not subproto or subproto.lower() not in [s.lower() for s in subprotocols]: if not subproto or subproto.lower() not in [s.lower() for s in subprotocols]:
error("Invalid subprotocol: " + str(subprotocols)) error(f"Invalid subprotocol: {subprotocols}")
return False, None return False, None
subproto = subproto.lower() subproto = subproto.lower()
@ -180,13 +186,12 @@ def _validate(headers, key: str, subprotocols):
result = result.lower() result = result.lower()
if isinstance(result, str): if isinstance(result, str):
result = result.encode('utf-8') result = result.encode("utf-8")
value = (key + "258EAFA5-E914-47DA-95CA-C5AB0DC85B11").encode('utf-8') value = f"{key}258EAFA5-E914-47DA-95CA-C5AB0DC85B11".encode("utf-8")
hashed = base64encode(hashlib.sha1(value).digest()).strip().lower() hashed = base64encode(hashlib.sha1(value).digest()).strip().lower()
success = hmac.compare_digest(hashed, result)
if success: if hmac.compare_digest(hashed, result):
return True, subproto return True, subproto
else: else:
return False, None return False, None
@ -194,4 +199,4 @@ def _validate(headers, key: str, subprotocols):
def _create_sec_websocket_key() -> str: def _create_sec_websocket_key() -> str:
randomness = os.urandom(16) randomness = os.urandom(16)
return base64encode(randomness).decode('utf-8').strip() return base64encode(randomness).decode("utf-8").strip()

View file

@ -19,6 +19,7 @@ limitations under the License.
import errno import errno
import os import os
import socket import socket
from base64 import encodebytes as base64encode
from ._exceptions import * from ._exceptions import *
from ._logging import * from ._logging import *
@ -26,14 +27,13 @@ from ._socket import *
from ._ssl_compat import * from ._ssl_compat import *
from ._url import * from ._url import *
from base64 import encodebytes as base64encode
__all__ = ["proxy_info", "connect", "read_headers"] __all__ = ["proxy_info", "connect", "read_headers"]
try: try:
from python_socks.sync import Proxy
from python_socks._errors import * from python_socks._errors import *
from python_socks._types import ProxyType from python_socks._types import ProxyType
from python_socks.sync import Proxy
HAVE_PYTHON_SOCKS = True HAVE_PYTHON_SOCKS = True
except: except:
HAVE_PYTHON_SOCKS = False HAVE_PYTHON_SOCKS = False
@ -49,7 +49,6 @@ except:
class proxy_info: class proxy_info:
def __init__(self, **options): def __init__(self, **options):
self.proxy_host = options.get("http_proxy_host", None) self.proxy_host = options.get("http_proxy_host", None)
if self.proxy_host: if self.proxy_host:
@ -59,8 +58,16 @@ class proxy_info:
self.proxy_protocol = options.get("proxy_type", "http") self.proxy_protocol = options.get("proxy_type", "http")
# Note: If timeout not specified, default python-socks timeout is 60 seconds # Note: If timeout not specified, default python-socks timeout is 60 seconds
self.proxy_timeout = options.get("http_proxy_timeout", None) self.proxy_timeout = options.get("http_proxy_timeout", None)
if self.proxy_protocol not in ['http', 'socks4', 'socks4a', 'socks5', 'socks5h']: if self.proxy_protocol not in [
raise ProxyError("Only http, socks4, socks5 proxy protocols are supported") "http",
"socks4",
"socks4a",
"socks5",
"socks5h",
]:
raise ProxyError(
"Only http, socks4, socks5 proxy protocols are supported"
)
else: else:
self.proxy_port = 0 self.proxy_port = 0
self.auth = None self.auth = None
@ -68,25 +75,28 @@ class proxy_info:
self.proxy_protocol = "http" self.proxy_protocol = "http"
def _start_proxied_socket(url: str, options, proxy): def _start_proxied_socket(url: str, options, proxy) -> tuple:
if not HAVE_PYTHON_SOCKS: if not HAVE_PYTHON_SOCKS:
raise WebSocketException("Python Socks is needed for SOCKS proxying but is not available") raise WebSocketException(
"Python Socks is needed for SOCKS proxying but is not available"
)
hostname, port, resource, is_secure = parse_url(url) hostname, port, resource, is_secure = parse_url(url)
if proxy.proxy_protocol == "socks5":
rdns = False
proxy_type = ProxyType.SOCKS5
if proxy.proxy_protocol == "socks4": if proxy.proxy_protocol == "socks4":
rdns = False rdns = False
proxy_type = ProxyType.SOCKS4 proxy_type = ProxyType.SOCKS4
# socks5h and socks4a send DNS through proxy # socks4a sends DNS through proxy
if proxy.proxy_protocol == "socks5h": elif proxy.proxy_protocol == "socks4a":
rdns = True
proxy_type = ProxyType.SOCKS5
if proxy.proxy_protocol == "socks4a":
rdns = True rdns = True
proxy_type = ProxyType.SOCKS4 proxy_type = ProxyType.SOCKS4
elif proxy.proxy_protocol == "socks5":
rdns = False
proxy_type = ProxyType.SOCKS5
# socks5h sends DNS through proxy
elif proxy.proxy_protocol == "socks5h":
rdns = True
proxy_type = ProxyType.SOCKS5
ws_proxy = Proxy.create( ws_proxy = Proxy.create(
proxy_type=proxy_type, proxy_type=proxy_type,
@ -94,14 +104,16 @@ def _start_proxied_socket(url: str, options, proxy):
port=int(proxy.proxy_port), port=int(proxy.proxy_port),
username=proxy.auth[0] if proxy.auth else None, username=proxy.auth[0] if proxy.auth else None,
password=proxy.auth[1] if proxy.auth else None, password=proxy.auth[1] if proxy.auth else None,
rdns=rdns) rdns=rdns,
)
sock = ws_proxy.connect(hostname, port, timeout=proxy.proxy_timeout) sock = ws_proxy.connect(hostname, port, timeout=proxy.proxy_timeout)
if is_secure and HAVE_SSL: if is_secure:
sock = _ssl_socket(sock, options.sslopt, hostname) if HAVE_SSL:
elif is_secure: sock = _ssl_socket(sock, options.sslopt, hostname)
raise WebSocketException("SSL not available.") else:
raise WebSocketException("SSL not available.")
return sock, (hostname, port, resource) return sock, (hostname, port, resource)
@ -110,7 +122,7 @@ def connect(url: str, options, proxy, socket):
# Use _start_proxied_socket() only for socks4 or socks5 proxy # Use _start_proxied_socket() only for socks4 or socks5 proxy
# Use _tunnel() for http proxy # Use _tunnel() for http proxy
# TODO: Use python-socks for http protocol also, to standardize flow # TODO: Use python-socks for http protocol also, to standardize flow
if proxy.proxy_host and not socket and not (proxy.proxy_protocol == "http"): if proxy.proxy_host and not socket and proxy.proxy_protocol != "http":
return _start_proxied_socket(url, options, proxy) return _start_proxied_socket(url, options, proxy)
hostname, port_from_url, resource, is_secure = parse_url(url) hostname, port_from_url, resource, is_secure = parse_url(url)
@ -119,10 +131,10 @@ def connect(url: str, options, proxy, socket):
return socket, (hostname, port_from_url, resource) return socket, (hostname, port_from_url, resource)
addrinfo_list, need_tunnel, auth = _get_addrinfo_list( addrinfo_list, need_tunnel, auth = _get_addrinfo_list(
hostname, port_from_url, is_secure, proxy) hostname, port_from_url, is_secure, proxy
)
if not addrinfo_list: if not addrinfo_list:
raise WebSocketException( raise WebSocketException(f"Host not found.: {hostname}:{port_from_url}")
"Host not found.: " + hostname + ":" + str(port_from_url))
sock = None sock = None
try: try:
@ -143,16 +155,23 @@ def connect(url: str, options, proxy, socket):
raise raise
def _get_addrinfo_list(hostname, port, is_secure, proxy): def _get_addrinfo_list(hostname, port: int, is_secure: bool, proxy) -> tuple:
phost, pport, pauth = get_proxy_info( phost, pport, pauth = get_proxy_info(
hostname, is_secure, proxy.proxy_host, proxy.proxy_port, proxy.auth, proxy.no_proxy) hostname,
is_secure,
proxy.proxy_host,
proxy.proxy_port,
proxy.auth,
proxy.no_proxy,
)
try: try:
# when running on windows 10, getaddrinfo without socktype returns a socktype 0. # when running on windows 10, getaddrinfo without socktype returns a socktype 0.
# This generates an error exception: `_on_error: exception Socket type must be stream or datagram, not 0` # This generates an error exception: `_on_error: exception Socket type must be stream or datagram, not 0`
# or `OSError: [Errno 22] Invalid argument` when creating socket. Force the socket type to SOCK_STREAM. # or `OSError: [Errno 22] Invalid argument` when creating socket. Force the socket type to SOCK_STREAM.
if not phost: if not phost:
addrinfo_list = socket.getaddrinfo( addrinfo_list = socket.getaddrinfo(
hostname, port, 0, socket.SOCK_STREAM, socket.SOL_TCP) hostname, port, 0, socket.SOCK_STREAM, socket.SOL_TCP
)
return addrinfo_list, False, None return addrinfo_list, False, None
else: else:
pport = pport and pport or 80 pport = pport and pport or 80
@ -160,7 +179,9 @@ def _get_addrinfo_list(hostname, port, is_secure, proxy):
# returns a socktype 0. This generates an error exception: # returns a socktype 0. This generates an error exception:
# _on_error: exception Socket type must be stream or datagram, not 0 # _on_error: exception Socket type must be stream or datagram, not 0
# Force the socket type to SOCK_STREAM # Force the socket type to SOCK_STREAM
addrinfo_list = socket.getaddrinfo(phost, pport, 0, socket.SOCK_STREAM, socket.SOL_TCP) addrinfo_list = socket.getaddrinfo(
phost, pport, 0, socket.SOCK_STREAM, socket.SOL_TCP
)
return addrinfo_list, True, pauth return addrinfo_list, True, pauth
except socket.gaierror as e: except socket.gaierror as e:
raise WebSocketAddressException(e) raise WebSocketAddressException(e)
@ -186,14 +207,17 @@ def _open_socket(addrinfo_list, sockopt, timeout):
sock.close() sock.close()
error.remote_ip = str(address[0]) error.remote_ip = str(address[0])
try: try:
eConnRefused = (errno.ECONNREFUSED, errno.WSAECONNREFUSED, errno.ENETUNREACH) eConnRefused = (
errno.ECONNREFUSED,
errno.WSAECONNREFUSED,
errno.ENETUNREACH,
)
except AttributeError: except AttributeError:
eConnRefused = (errno.ECONNREFUSED, errno.ENETUNREACH) eConnRefused = (errno.ECONNREFUSED, errno.ENETUNREACH)
if error.errno in eConnRefused: if error.errno not in eConnRefused:
err = error
continue
else:
raise error raise error
err = error
continue
else: else:
break break
else: else:
@ -206,89 +230,97 @@ def _open_socket(addrinfo_list, sockopt, timeout):
return sock return sock
def _wrap_sni_socket(sock, sslopt, hostname, check_hostname): def _wrap_sni_socket(sock: socket.socket, sslopt: dict, hostname, check_hostname):
context = sslopt.get('context', None) context = sslopt.get("context", None)
if not context: if not context:
context = ssl.SSLContext(sslopt.get('ssl_version', ssl.PROTOCOL_TLS_CLIENT)) context = ssl.SSLContext(sslopt.get("ssl_version", ssl.PROTOCOL_TLS_CLIENT))
# Non default context need to manually enable SSLKEYLOGFILE support by setting the keylog_filename attribute. # Non default context need to manually enable SSLKEYLOGFILE support by setting the keylog_filename attribute.
# For more details see also: # For more details see also:
# * https://docs.python.org/3.8/library/ssl.html?highlight=sslkeylogfile#context-creation # * https://docs.python.org/3.8/library/ssl.html?highlight=sslkeylogfile#context-creation
# * https://docs.python.org/3.8/library/ssl.html?highlight=sslkeylogfile#ssl.SSLContext.keylog_filename # * https://docs.python.org/3.8/library/ssl.html?highlight=sslkeylogfile#ssl.SSLContext.keylog_filename
context.keylog_filename = os.environ.get("SSLKEYLOGFILE", None) context.keylog_filename = os.environ.get("SSLKEYLOGFILE", None)
if sslopt.get('cert_reqs', ssl.CERT_NONE) != ssl.CERT_NONE: if sslopt.get("cert_reqs", ssl.CERT_NONE) != ssl.CERT_NONE:
cafile = sslopt.get('ca_certs', None) cafile = sslopt.get("ca_certs", None)
capath = sslopt.get('ca_cert_path', None) capath = sslopt.get("ca_cert_path", None)
if cafile or capath: if cafile or capath:
context.load_verify_locations(cafile=cafile, capath=capath) context.load_verify_locations(cafile=cafile, capath=capath)
elif hasattr(context, 'load_default_certs'): elif hasattr(context, "load_default_certs"):
context.load_default_certs(ssl.Purpose.SERVER_AUTH) context.load_default_certs(ssl.Purpose.SERVER_AUTH)
if sslopt.get('certfile', None): if sslopt.get("certfile", None):
context.load_cert_chain( context.load_cert_chain(
sslopt['certfile'], sslopt["certfile"],
sslopt.get('keyfile', None), sslopt.get("keyfile", None),
sslopt.get('password', None), sslopt.get("password", None),
) )
# Python 3.10 switch to PROTOCOL_TLS_CLIENT defaults to "cert_reqs = ssl.CERT_REQUIRED" and "check_hostname = True" # Python 3.10 switch to PROTOCOL_TLS_CLIENT defaults to "cert_reqs = ssl.CERT_REQUIRED" and "check_hostname = True"
# If both disabled, set check_hostname before verify_mode # If both disabled, set check_hostname before verify_mode
# see https://github.com/liris/websocket-client/commit/b96a2e8fa765753e82eea531adb19716b52ca3ca#commitcomment-10803153 # see https://github.com/liris/websocket-client/commit/b96a2e8fa765753e82eea531adb19716b52ca3ca#commitcomment-10803153
if sslopt.get('cert_reqs', ssl.CERT_NONE) == ssl.CERT_NONE and not sslopt.get('check_hostname', False): if sslopt.get("cert_reqs", ssl.CERT_NONE) == ssl.CERT_NONE and not sslopt.get(
"check_hostname", False
):
context.check_hostname = False context.check_hostname = False
context.verify_mode = ssl.CERT_NONE context.verify_mode = ssl.CERT_NONE
else: else:
context.check_hostname = sslopt.get('check_hostname', True) context.check_hostname = sslopt.get("check_hostname", True)
context.verify_mode = sslopt.get('cert_reqs', ssl.CERT_REQUIRED) context.verify_mode = sslopt.get("cert_reqs", ssl.CERT_REQUIRED)
if 'ciphers' in sslopt: if "ciphers" in sslopt:
context.set_ciphers(sslopt['ciphers']) context.set_ciphers(sslopt["ciphers"])
if 'cert_chain' in sslopt: if "cert_chain" in sslopt:
certfile, keyfile, password = sslopt['cert_chain'] certfile, keyfile, password = sslopt["cert_chain"]
context.load_cert_chain(certfile, keyfile, password) context.load_cert_chain(certfile, keyfile, password)
if 'ecdh_curve' in sslopt: if "ecdh_curve" in sslopt:
context.set_ecdh_curve(sslopt['ecdh_curve']) context.set_ecdh_curve(sslopt["ecdh_curve"])
return context.wrap_socket( return context.wrap_socket(
sock, sock,
do_handshake_on_connect=sslopt.get('do_handshake_on_connect', True), do_handshake_on_connect=sslopt.get("do_handshake_on_connect", True),
suppress_ragged_eofs=sslopt.get('suppress_ragged_eofs', True), suppress_ragged_eofs=sslopt.get("suppress_ragged_eofs", True),
server_hostname=hostname, server_hostname=hostname,
) )
def _ssl_socket(sock, user_sslopt, hostname): def _ssl_socket(sock: socket.socket, user_sslopt: dict, hostname):
sslopt = dict(cert_reqs=ssl.CERT_REQUIRED) sslopt: dict = dict(cert_reqs=ssl.CERT_REQUIRED)
sslopt.update(user_sslopt) sslopt.update(user_sslopt)
certPath = os.environ.get('WEBSOCKET_CLIENT_CA_BUNDLE') certPath = os.environ.get("WEBSOCKET_CLIENT_CA_BUNDLE")
if certPath and os.path.isfile(certPath) \ if (
and user_sslopt.get('ca_certs', None) is None: certPath
sslopt['ca_certs'] = certPath and os.path.isfile(certPath)
elif certPath and os.path.isdir(certPath) \ and user_sslopt.get("ca_certs", None) is None
and user_sslopt.get('ca_cert_path', None) is None: ):
sslopt['ca_cert_path'] = certPath sslopt["ca_certs"] = certPath
elif (
certPath
and os.path.isdir(certPath)
and user_sslopt.get("ca_cert_path", None) is None
):
sslopt["ca_cert_path"] = certPath
if sslopt.get('server_hostname', None): if sslopt.get("server_hostname", None):
hostname = sslopt['server_hostname'] hostname = sslopt["server_hostname"]
check_hostname = sslopt.get('check_hostname', True) check_hostname = sslopt.get("check_hostname", True)
sock = _wrap_sni_socket(sock, sslopt, hostname, check_hostname) sock = _wrap_sni_socket(sock, sslopt, hostname, check_hostname)
return sock return sock
def _tunnel(sock, host, port, auth): def _tunnel(sock: socket.socket, host, port: int, auth) -> socket.socket:
debug("Connecting proxy...") debug("Connecting proxy...")
connect_header = "CONNECT {h}:{p} HTTP/1.1\r\n".format(h=host, p=port) connect_header = f"CONNECT {host}:{port} HTTP/1.1\r\n"
connect_header += "Host: {h}:{p}\r\n".format(h=host, p=port) connect_header += f"Host: {host}:{port}\r\n"
# TODO: support digest auth. # TODO: support digest auth.
if auth and auth[0]: if auth and auth[0]:
auth_str = auth[0] auth_str = auth[0]
if auth[1]: if auth[1]:
auth_str += ":" + auth[1] auth_str += f":{auth[1]}"
encoded_str = base64encode(auth_str.encode()).strip().decode().replace('\n', '') encoded_str = base64encode(auth_str.encode()).strip().decode().replace("\n", "")
connect_header += "Proxy-Authorization: Basic {str}\r\n".format(str=encoded_str) connect_header += f"Proxy-Authorization: Basic {encoded_str}\r\n"
connect_header += "\r\n" connect_header += "\r\n"
dump("request header", connect_header) dump("request header", connect_header)
@ -300,40 +332,37 @@ def _tunnel(sock, host, port, auth):
raise WebSocketProxyException(str(e)) raise WebSocketProxyException(str(e))
if status != 200: if status != 200:
raise WebSocketProxyException( raise WebSocketProxyException(f"failed CONNECT via proxy status: {status}")
"failed CONNECT via proxy status: {status}".format(status=status))
return sock return sock
def read_headers(sock): def read_headers(sock: socket.socket) -> tuple:
status = None status = None
status_message = None status_message = None
headers = {} headers: dict = {}
trace("--- response header ---") trace("--- response header ---")
while True: while True:
line = recv_line(sock) line = recv_line(sock)
line = line.decode('utf-8').strip() line = line.decode("utf-8").strip()
if not line: if not line:
break break
trace(line) trace(line)
if not status: if not status:
status_info = line.split(" ", 2) status_info = line.split(" ", 2)
status = int(status_info[1]) status = int(status_info[1])
if len(status_info) > 2: if len(status_info) > 2:
status_message = status_info[2] status_message = status_info[2]
else: else:
kv = line.split(":", 1) kv = line.split(":", 1)
if len(kv) == 2: if len(kv) != 2:
key, value = kv
if key.lower() == "set-cookie" and headers.get("set-cookie"):
headers["set-cookie"] = headers.get("set-cookie") + "; " + value.strip()
else:
headers[key.lower()] = value.strip()
else:
raise WebSocketException("Invalid header") raise WebSocketException("Invalid header")
key, value = kv
if key.lower() == "set-cookie" and headers.get("set-cookie"):
headers["set-cookie"] = headers.get("set-cookie") + "; " + value.strip()
else:
headers[key.lower()] = value.strip()
trace("-----------------------") trace("-----------------------")

View file

@ -19,25 +19,38 @@ See the License for the specific language governing permissions and
limitations under the License. limitations under the License.
""" """
_logger = logging.getLogger('websocket') _logger = logging.getLogger("websocket")
try: try:
from logging import NullHandler from logging import NullHandler
except ImportError: except ImportError:
class NullHandler(logging.Handler): class NullHandler(logging.Handler):
def emit(self, record) -> None: def emit(self, record) -> None:
pass pass
_logger.addHandler(NullHandler()) _logger.addHandler(NullHandler())
_traceEnabled = False _traceEnabled = False
__all__ = ["enableTrace", "dump", "error", "warning", "debug", "trace", __all__ = [
"isEnabledForError", "isEnabledForDebug", "isEnabledForTrace"] "enableTrace",
"dump",
"error",
"warning",
"debug",
"trace",
"isEnabledForError",
"isEnabledForDebug",
"isEnabledForTrace",
]
def enableTrace(traceable: bool, def enableTrace(
handler: logging.StreamHandler = logging.StreamHandler(), traceable: bool,
level: str = "DEBUG") -> None: handler: logging.StreamHandler = logging.StreamHandler(),
level: str = "DEBUG",
) -> None:
""" """
Turn on/off the traceability. Turn on/off the traceability.
@ -55,7 +68,7 @@ def enableTrace(traceable: bool,
def dump(title: str, message: str) -> None: def dump(title: str, message: str) -> None:
if _traceEnabled: if _traceEnabled:
_logger.debug("--- " + title + " ---") _logger.debug(f"--- {title} ---")
_logger.debug(message) _logger.debug(message)
_logger.debug("-----------------------") _logger.debug("-----------------------")

View file

@ -1,6 +1,7 @@
import errno import errno
import selectors import selectors
import socket import socket
from typing import Union
from ._exceptions import * from ._exceptions import *
from ._ssl_compat import * from ._ssl_compat import *
@ -37,12 +38,18 @@ if hasattr(socket, "TCP_KEEPCNT"):
_default_timeout = None _default_timeout = None
__all__ = ["DEFAULT_SOCKET_OPTION", "sock_opt", "setdefaulttimeout", "getdefaulttimeout", __all__ = [
"recv", "recv_line", "send"] "DEFAULT_SOCKET_OPTION",
"sock_opt",
"setdefaulttimeout",
"getdefaulttimeout",
"recv",
"recv_line",
"send",
]
class sock_opt: class sock_opt:
def __init__(self, sockopt: list, sslopt: dict) -> None: def __init__(self, sockopt: list, sslopt: dict) -> None:
if sockopt is None: if sockopt is None:
sockopt = [] sockopt = []
@ -53,7 +60,7 @@ class sock_opt:
self.timeout = None self.timeout = None
def setdefaulttimeout(timeout: int or float) -> None: def setdefaulttimeout(timeout: Union[int, float, None]) -> None:
""" """
Set the global timeout setting to connect. Set the global timeout setting to connect.
@ -66,7 +73,7 @@ def setdefaulttimeout(timeout: int or float) -> None:
_default_timeout = timeout _default_timeout = timeout
def getdefaulttimeout() -> int or float: def getdefaulttimeout() -> Union[int, float, None]:
""" """
Get default timeout Get default timeout
@ -89,7 +96,7 @@ def recv(sock: socket.socket, bufsize: int) -> bytes:
pass pass
except socket.error as exc: except socket.error as exc:
error_code = extract_error_code(exc) error_code = extract_error_code(exc)
if error_code != errno.EAGAIN and error_code != errno.EWOULDBLOCK: if error_code not in [errno.EAGAIN, errno.EWOULDBLOCK]:
raise raise
sel = selectors.DefaultSelector() sel = selectors.DefaultSelector()
@ -113,14 +120,13 @@ def recv(sock: socket.socket, bufsize: int) -> bytes:
raise WebSocketTimeoutException(message) raise WebSocketTimeoutException(message)
except SSLError as e: except SSLError as e:
message = extract_err_message(e) message = extract_err_message(e)
if isinstance(message, str) and 'timed out' in message: if isinstance(message, str) and "timed out" in message:
raise WebSocketTimeoutException(message) raise WebSocketTimeoutException(message)
else: else:
raise raise
if not bytes_: if not bytes_:
raise WebSocketConnectionClosedException( raise WebSocketConnectionClosedException("Connection to remote host was lost.")
"Connection to remote host was lost.")
return bytes_ return bytes_
@ -130,14 +136,14 @@ def recv_line(sock: socket.socket) -> bytes:
while True: while True:
c = recv(sock, 1) c = recv(sock, 1)
line.append(c) line.append(c)
if c == b'\n': if c == b"\n":
break break
return b''.join(line) return b"".join(line)
def send(sock: socket.socket, data: bytes) -> int: def send(sock: socket.socket, data: Union[bytes, str]) -> int:
if isinstance(data, str): if isinstance(data, str):
data = data.encode('utf-8') data = data.encode("utf-8")
if not sock: if not sock:
raise WebSocketConnectionClosedException("socket is already closed.") raise WebSocketConnectionClosedException("socket is already closed.")
@ -151,7 +157,7 @@ def send(sock: socket.socket, data: bytes) -> int:
error_code = extract_error_code(exc) error_code = extract_error_code(exc)
if error_code is None: if error_code is None:
raise raise
if error_code != errno.EAGAIN and error_code != errno.EWOULDBLOCK: if error_code not in [errno.EAGAIN, errno.EWOULDBLOCK]:
raise raise
sel = selectors.DefaultSelector() sel = selectors.DefaultSelector()

View file

@ -20,9 +20,8 @@ __all__ = ["HAVE_SSL", "ssl", "SSLError", "SSLWantReadError", "SSLWantWriteError
try: try:
import ssl import ssl
from ssl import SSLError from ssl import SSLError, SSLWantReadError, SSLWantWriteError
from ssl import SSLWantReadError
from ssl import SSLWantWriteError
HAVE_SSL = True HAVE_SSL = True
except ImportError: except ImportError:
# dummy class of SSLError for environment without ssl support # dummy class of SSLError for environment without ssl support

View file

@ -1,7 +1,7 @@
import os import os
import socket import socket
import struct import struct
from typing import Optional
from urllib.parse import unquote, urlparse from urllib.parse import unquote, urlparse
""" """
@ -67,7 +67,7 @@ def parse_url(url: str) -> tuple:
resource = "/" resource = "/"
if parsed.query: if parsed.query:
resource += "?" + parsed.query resource += f"?{parsed.query}"
return hostname, port, resource, is_secure return hostname, port, resource, is_secure
@ -93,37 +93,50 @@ def _is_subnet_address(hostname: str) -> bool:
def _is_address_in_network(ip: str, net: str) -> bool: def _is_address_in_network(ip: str, net: str) -> bool:
ipaddr = struct.unpack('!I', socket.inet_aton(ip))[0] ipaddr: int = struct.unpack("!I", socket.inet_aton(ip))[0]
netaddr, netmask = net.split('/') netaddr, netmask = net.split("/")
netaddr = struct.unpack('!I', socket.inet_aton(netaddr))[0] netaddr: int = struct.unpack("!I", socket.inet_aton(netaddr))[0]
netmask = (0xFFFFFFFF << (32 - int(netmask))) & 0xFFFFFFFF netmask = (0xFFFFFFFF << (32 - int(netmask))) & 0xFFFFFFFF
return ipaddr & netmask == netaddr return ipaddr & netmask == netaddr
def _is_no_proxy_host(hostname: str, no_proxy: list) -> bool: def _is_no_proxy_host(hostname: str, no_proxy: Optional[list]) -> bool:
if not no_proxy: if not no_proxy:
v = os.environ.get("no_proxy", os.environ.get("NO_PROXY", "")).replace(" ", "") if v := os.environ.get("no_proxy", os.environ.get("NO_PROXY", "")).replace(
if v: " ", ""
):
no_proxy = v.split(",") no_proxy = v.split(",")
if not no_proxy: if not no_proxy:
no_proxy = DEFAULT_NO_PROXY_HOST no_proxy = DEFAULT_NO_PROXY_HOST
if '*' in no_proxy: if "*" in no_proxy:
return True return True
if hostname in no_proxy: if hostname in no_proxy:
return True return True
if _is_ip_address(hostname): if _is_ip_address(hostname):
return any([_is_address_in_network(hostname, subnet) for subnet in no_proxy if _is_subnet_address(subnet)]) return any(
for domain in [domain for domain in no_proxy if domain.startswith('.')]: [
_is_address_in_network(hostname, subnet)
for subnet in no_proxy
if _is_subnet_address(subnet)
]
)
for domain in [domain for domain in no_proxy if domain.startswith(".")]:
if hostname.endswith(domain): if hostname.endswith(domain):
return True return True
return False return False
def get_proxy_info( def get_proxy_info(
hostname: str, is_secure: bool, proxy_host: str = None, proxy_port: int = 0, proxy_auth: tuple = None, hostname: str,
no_proxy: list = None, proxy_type: str = 'http') -> tuple: is_secure: bool,
proxy_host: Optional[str] = None,
proxy_port: int = 0,
proxy_auth: Optional[tuple] = None,
no_proxy: Optional[list] = None,
proxy_type: str = "http",
) -> tuple:
""" """
Try to retrieve proxy host and port from environment Try to retrieve proxy host and port from environment
if not provided in options. if not provided in options.
@ -159,10 +172,16 @@ def get_proxy_info(
return proxy_host, port, auth return proxy_host, port, auth
env_key = "https_proxy" if is_secure else "http_proxy" env_key = "https_proxy" if is_secure else "http_proxy"
value = os.environ.get(env_key, os.environ.get(env_key.upper(), "")).replace(" ", "") value = os.environ.get(env_key, os.environ.get(env_key.upper(), "")).replace(
" ", ""
)
if value: if value:
proxy = urlparse(value) proxy = urlparse(value)
auth = (unquote(proxy.username), unquote(proxy.password)) if proxy.username else None auth = (
(unquote(proxy.username), unquote(proxy.password))
if proxy.username
else None
)
return proxy.hostname, proxy.port, auth return proxy.hostname, proxy.port, auth
return None, 0, None return None, 0, None

View file

@ -1,3 +1,5 @@
from typing import Union
""" """
_url.py _url.py
websocket - WebSocket client library for Python websocket - WebSocket client library for Python
@ -20,7 +22,6 @@ __all__ = ["NoLock", "validate_utf8", "extract_err_message", "extract_error_code
class NoLock: class NoLock:
def __enter__(self) -> None: def __enter__(self) -> None:
pass pass
@ -33,8 +34,9 @@ try:
# strings. # strings.
from wsaccel.utf8validator import Utf8Validator from wsaccel.utf8validator import Utf8Validator
def _validate_utf8(utfbytes: bytes) -> bool: def _validate_utf8(utfbytes: Union[str, bytes]) -> bool:
return Utf8Validator().validate(utfbytes)[0] result: bool = Utf8Validator().validate(utfbytes)[0]
return result
except ImportError: except ImportError:
# UTF-8 validator # UTF-8 validator
@ -46,44 +48,396 @@ except ImportError:
_UTF8D = [ _UTF8D = [
# The first part of the table maps bytes to character classes that # The first part of the table maps bytes to character classes that
# to reduce the size of the transition table and create bitmasks. # to reduce the size of the transition table and create bitmasks.
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,
1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1, 9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9, 0,
7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7, 7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7, 0,
8,8,2,2,2,2,2,2,2,2,2,2,2,2,2,2, 2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2, 0,
10,3,3,3,3,3,3,3,3,3,3,3,3,4,3,3, 11,6,6,6,5,8,8,8,8,8,8,8,8,8,8,8, 0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
9,
9,
9,
9,
9,
9,
9,
9,
9,
9,
9,
9,
9,
9,
9,
9,
7,
7,
7,
7,
7,
7,
7,
7,
7,
7,
7,
7,
7,
7,
7,
7,
7,
7,
7,
7,
7,
7,
7,
7,
7,
7,
7,
7,
7,
7,
7,
7,
8,
8,
2,
2,
2,
2,
2,
2,
2,
2,
2,
2,
2,
2,
2,
2,
2,
2,
2,
2,
2,
2,
2,
2,
2,
2,
2,
2,
2,
2,
2,
2,
10,
3,
3,
3,
3,
3,
3,
3,
3,
3,
3,
3,
3,
4,
3,
3,
11,
6,
6,
6,
5,
8,
8,
8,
8,
8,
8,
8,
8,
8,
8,
8,
# The second part is a transition table that maps a combination # The second part is a transition table that maps a combination
# of a state of the automaton and a character class to a state. # of a state of the automaton and a character class to a state.
0,12,24,36,60,96,84,12,12,12,48,72, 12,12,12,12,12,12,12,12,12,12,12,12, 0,
12, 0,12,12,12,12,12, 0,12, 0,12,12, 12,24,12,12,12,12,12,24,12,24,12,12, 12,
12,12,12,12,12,12,12,24,12,12,12,12, 12,24,12,12,12,12,12,12,12,24,12,12, 24,
12,12,12,12,12,12,12,36,12,36,12,12, 12,36,12,12,12,12,12,36,12,36,12,12, 36,
12,36,12,12,12,12,12,12,12,12,12,12, ] 60,
96,
84,
12,
12,
12,
48,
72,
12,
12,
12,
12,
12,
12,
12,
12,
12,
12,
12,
12,
12,
0,
12,
12,
12,
12,
12,
0,
12,
0,
12,
12,
12,
24,
12,
12,
12,
12,
12,
24,
12,
24,
12,
12,
12,
12,
12,
12,
12,
12,
12,
24,
12,
12,
12,
12,
12,
24,
12,
12,
12,
12,
12,
12,
12,
24,
12,
12,
12,
12,
12,
12,
12,
12,
12,
36,
12,
36,
12,
12,
12,
36,
12,
12,
12,
12,
12,
36,
12,
36,
12,
12,
12,
36,
12,
12,
12,
12,
12,
12,
12,
12,
12,
12,
]
def _decode(state: int, codep: int, ch: int) -> tuple: def _decode(state: int, codep: int, ch: int) -> tuple:
tp = _UTF8D[ch] tp = _UTF8D[ch]
codep = (ch & 0x3f) | (codep << 6) if ( codep = (
state != _UTF8_ACCEPT) else (0xff >> tp) & ch (ch & 0x3F) | (codep << 6) if (state != _UTF8_ACCEPT) else (0xFF >> tp) & ch
)
state = _UTF8D[256 + state + tp] state = _UTF8D[256 + state + tp]
return state, codep return state, codep
def _validate_utf8(utfbytes: str or bytes) -> bool: def _validate_utf8(utfbytes: Union[str, bytes]) -> bool:
state = _UTF8_ACCEPT state = _UTF8_ACCEPT
codep = 0 codep = 0
for i in utfbytes: for i in utfbytes:
state, codep = _decode(state, codep, i) state, codep = _decode(state, codep, int(i))
if state == _UTF8_REJECT: if state == _UTF8_REJECT:
return False return False
return True return True
def validate_utf8(utfbytes: str or bytes) -> bool: def validate_utf8(utfbytes: Union[str, bytes]) -> bool:
""" """
validate utf8 byte string. validate utf8 byte string.
utfbytes: utf byte string to check. utfbytes: utf byte string to check.
@ -92,13 +446,14 @@ def validate_utf8(utfbytes: str or bytes) -> bool:
return _validate_utf8(utfbytes) return _validate_utf8(utfbytes)
def extract_err_message(exception: Exception) -> str or None: def extract_err_message(exception: Exception) -> Union[str, None]:
if exception.args: if exception.args:
return exception.args[0] exception_message: str = exception.args[0]
return exception_message
else: else:
return None return None
def extract_error_code(exception: Exception) -> int or None: def extract_error_code(exception: Exception) -> Union[int, None]:
if exception.args and len(exception.args) > 1: if exception.args and len(exception.args) > 1:
return exception.args[0] if isinstance(exception.args[0], int) else None return exception.args[0] if isinstance(exception.args[0], int) else None

Some files were not shown because too many files have changed in this diff Show more