Merge branch 'nightly' into dependabot/pip/nightly/dnspython-2.6.1

This commit is contained in:
JonnyWong16 2024-03-24 15:25:09 -07:00 committed by GitHub
commit 85a23aba49
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
126 changed files with 5448 additions and 3193 deletions

View file

@ -47,7 +47,7 @@ jobs:
version: latest version: latest
- name: Cache Docker Layers - name: Cache Docker Layers
uses: actions/cache@v3 uses: actions/cache@v4
with: with:
path: /tmp/.buildx-cache path: /tmp/.buildx-cache
key: ${{ runner.os }}-buildx-${{ github.sha }} key: ${{ runner.os }}-buildx-${{ github.sha }}

View file

@ -129,7 +129,7 @@ jobs:
echo "$EOF" >> $GITHUB_OUTPUT echo "$EOF" >> $GITHUB_OUTPUT
- name: Create Release - name: Create Release
uses: softprops/action-gh-release@v1 uses: softprops/action-gh-release@v2
id: create_release id: create_release
env: env:
GITHUB_TOKEN: ${{ secrets.GHACTIONS_TOKEN }} GITHUB_TOKEN: ${{ secrets.GHACTIONS_TOKEN }}

View file

@ -1 +1 @@
__version__ = "1.2.3" __version__ = "1.3.0"

View file

@ -168,9 +168,9 @@ class Arrow:
isinstance(tzinfo, dt_tzinfo) isinstance(tzinfo, dt_tzinfo)
and hasattr(tzinfo, "localize") and hasattr(tzinfo, "localize")
and hasattr(tzinfo, "zone") and hasattr(tzinfo, "zone")
and tzinfo.zone # type: ignore[attr-defined] and tzinfo.zone
): ):
tzinfo = parser.TzinfoParser.parse(tzinfo.zone) # type: ignore[attr-defined] tzinfo = parser.TzinfoParser.parse(tzinfo.zone)
elif isinstance(tzinfo, str): elif isinstance(tzinfo, str):
tzinfo = parser.TzinfoParser.parse(tzinfo) tzinfo = parser.TzinfoParser.parse(tzinfo)
@ -495,7 +495,7 @@ class Arrow:
yield current yield current
values = [getattr(current, f) for f in cls._ATTRS] values = [getattr(current, f) for f in cls._ATTRS]
current = cls(*values, tzinfo=tzinfo).shift( # type: ignore current = cls(*values, tzinfo=tzinfo).shift( # type: ignore[misc]
**{frame_relative: relative_steps} **{frame_relative: relative_steps}
) )
@ -578,7 +578,7 @@ class Arrow:
for _ in range(3 - len(values)): for _ in range(3 - len(values)):
values.append(1) values.append(1)
floor = self.__class__(*values, tzinfo=self.tzinfo) # type: ignore floor = self.__class__(*values, tzinfo=self.tzinfo) # type: ignore[misc]
if frame_absolute == "week": if frame_absolute == "week":
# if week_start is greater than self.isoweekday() go back one week by setting delta = 7 # if week_start is greater than self.isoweekday() go back one week by setting delta = 7
@ -792,7 +792,6 @@ class Arrow:
return self._datetime.isoformat() return self._datetime.isoformat()
def __format__(self, formatstr: str) -> str: def __format__(self, formatstr: str) -> str:
if len(formatstr) > 0: if len(formatstr) > 0:
return self.format(formatstr) return self.format(formatstr)
@ -804,7 +803,6 @@ class Arrow:
# attributes and properties # attributes and properties
def __getattr__(self, name: str) -> int: def __getattr__(self, name: str) -> int:
if name == "week": if name == "week":
return self.isocalendar()[1] return self.isocalendar()[1]
@ -965,7 +963,6 @@ class Arrow:
absolute_kwargs = {} absolute_kwargs = {}
for key, value in kwargs.items(): for key, value in kwargs.items():
if key in self._ATTRS: if key in self._ATTRS:
absolute_kwargs[key] = value absolute_kwargs[key] = value
elif key in ["week", "quarter"]: elif key in ["week", "quarter"]:
@ -1022,7 +1019,6 @@ class Arrow:
additional_attrs = ["weeks", "quarters", "weekday"] additional_attrs = ["weeks", "quarters", "weekday"]
for key, value in kwargs.items(): for key, value in kwargs.items():
if key in self._ATTRS_PLURAL or key in additional_attrs: if key in self._ATTRS_PLURAL or key in additional_attrs:
relative_kwargs[key] = value relative_kwargs[key] = value
else: else:
@ -1259,11 +1255,10 @@ class Arrow:
) )
if trunc(abs(delta)) != 1: if trunc(abs(delta)) != 1:
granularity += "s" # type: ignore granularity += "s" # type: ignore[assignment]
return locale.describe(granularity, delta, only_distance=only_distance) return locale.describe(granularity, delta, only_distance=only_distance)
else: else:
if not granularity: if not granularity:
raise ValueError( raise ValueError(
"Empty granularity list provided. " "Empty granularity list provided. "
@ -1314,7 +1309,7 @@ class Arrow:
def dehumanize(self, input_string: str, locale: str = "en_us") -> "Arrow": def dehumanize(self, input_string: str, locale: str = "en_us") -> "Arrow":
"""Returns a new :class:`Arrow <arrow.arrow.Arrow>` object, that represents """Returns a new :class:`Arrow <arrow.arrow.Arrow>` object, that represents
the time difference relative to the attrbiutes of the the time difference relative to the attributes of the
:class:`Arrow <arrow.arrow.Arrow>` object. :class:`Arrow <arrow.arrow.Arrow>` object.
:param timestring: a ``str`` representing a humanized relative time. :param timestring: a ``str`` representing a humanized relative time.
@ -1367,7 +1362,6 @@ class Arrow:
# Search input string for each time unit within locale # Search input string for each time unit within locale
for unit, unit_object in locale_obj.timeframes.items(): for unit, unit_object in locale_obj.timeframes.items():
# Need to check the type of unit_object to create the correct dictionary # Need to check the type of unit_object to create the correct dictionary
if isinstance(unit_object, Mapping): if isinstance(unit_object, Mapping):
strings_to_search = unit_object strings_to_search = unit_object
@ -1378,7 +1372,6 @@ class Arrow:
# Needs to cycle all through strings as some locales have strings that # Needs to cycle all through strings as some locales have strings that
# could overlap in a regex match, since input validation isn't being performed. # could overlap in a regex match, since input validation isn't being performed.
for time_delta, time_string in strings_to_search.items(): for time_delta, time_string in strings_to_search.items():
# Replace {0} with regex \d representing digits # Replace {0} with regex \d representing digits
search_string = str(time_string) search_string = str(time_string)
search_string = search_string.format(r"\d+") search_string = search_string.format(r"\d+")
@ -1419,7 +1412,7 @@ class Arrow:
# Assert error if string does not modify any units # Assert error if string does not modify any units
if not any([True for k, v in unit_visited.items() if v]): if not any([True for k, v in unit_visited.items() if v]):
raise ValueError( raise ValueError(
"Input string not valid. Note: Some locales do not support the week granulairty in Arrow. " "Input string not valid. Note: Some locales do not support the week granularity in Arrow. "
"If you are attempting to use the week granularity on an unsupported locale, this could be the cause of this error." "If you are attempting to use the week granularity on an unsupported locale, this could be the cause of this error."
) )
@ -1718,7 +1711,6 @@ class Arrow:
# math # math
def __add__(self, other: Any) -> "Arrow": def __add__(self, other: Any) -> "Arrow":
if isinstance(other, (timedelta, relativedelta)): if isinstance(other, (timedelta, relativedelta)):
return self.fromdatetime(self._datetime + other, self._datetime.tzinfo) return self.fromdatetime(self._datetime + other, self._datetime.tzinfo)
@ -1736,7 +1728,6 @@ class Arrow:
pass # pragma: no cover pass # pragma: no cover
def __sub__(self, other: Any) -> Union[timedelta, "Arrow"]: def __sub__(self, other: Any) -> Union[timedelta, "Arrow"]:
if isinstance(other, (timedelta, relativedelta)): if isinstance(other, (timedelta, relativedelta)):
return self.fromdatetime(self._datetime - other, self._datetime.tzinfo) return self.fromdatetime(self._datetime - other, self._datetime.tzinfo)
@ -1749,7 +1740,6 @@ class Arrow:
return NotImplemented return NotImplemented
def __rsub__(self, other: Any) -> timedelta: def __rsub__(self, other: Any) -> timedelta:
if isinstance(other, dt_datetime): if isinstance(other, dt_datetime):
return other - self._datetime return other - self._datetime
@ -1758,42 +1748,36 @@ class Arrow:
# comparisons # comparisons
def __eq__(self, other: Any) -> bool: def __eq__(self, other: Any) -> bool:
if not isinstance(other, (Arrow, dt_datetime)): if not isinstance(other, (Arrow, dt_datetime)):
return False return False
return self._datetime == self._get_datetime(other) return self._datetime == self._get_datetime(other)
def __ne__(self, other: Any) -> bool: def __ne__(self, other: Any) -> bool:
if not isinstance(other, (Arrow, dt_datetime)): if not isinstance(other, (Arrow, dt_datetime)):
return True return True
return not self.__eq__(other) return not self.__eq__(other)
def __gt__(self, other: Any) -> bool: def __gt__(self, other: Any) -> bool:
if not isinstance(other, (Arrow, dt_datetime)): if not isinstance(other, (Arrow, dt_datetime)):
return NotImplemented return NotImplemented
return self._datetime > self._get_datetime(other) return self._datetime > self._get_datetime(other)
def __ge__(self, other: Any) -> bool: def __ge__(self, other: Any) -> bool:
if not isinstance(other, (Arrow, dt_datetime)): if not isinstance(other, (Arrow, dt_datetime)):
return NotImplemented return NotImplemented
return self._datetime >= self._get_datetime(other) return self._datetime >= self._get_datetime(other)
def __lt__(self, other: Any) -> bool: def __lt__(self, other: Any) -> bool:
if not isinstance(other, (Arrow, dt_datetime)): if not isinstance(other, (Arrow, dt_datetime)):
return NotImplemented return NotImplemented
return self._datetime < self._get_datetime(other) return self._datetime < self._get_datetime(other)
def __le__(self, other: Any) -> bool: def __le__(self, other: Any) -> bool:
if not isinstance(other, (Arrow, dt_datetime)): if not isinstance(other, (Arrow, dt_datetime)):
return NotImplemented return NotImplemented
@ -1865,7 +1849,6 @@ class Arrow:
def _get_iteration_params(cls, end: Any, limit: Optional[int]) -> Tuple[Any, int]: def _get_iteration_params(cls, end: Any, limit: Optional[int]) -> Tuple[Any, int]:
"""Sets default end and limit values for range method.""" """Sets default end and limit values for range method."""
if end is None: if end is None:
if limit is None: if limit is None:
raise ValueError("One of 'end' or 'limit' is required.") raise ValueError("One of 'end' or 'limit' is required.")

View file

@ -267,11 +267,9 @@ class ArrowFactory:
raise TypeError(f"Cannot parse single argument of type {type(arg)!r}.") raise TypeError(f"Cannot parse single argument of type {type(arg)!r}.")
elif arg_count == 2: elif arg_count == 2:
arg_1, arg_2 = args[0], args[1] arg_1, arg_2 = args[0], args[1]
if isinstance(arg_1, datetime): if isinstance(arg_1, datetime):
# (datetime, tzinfo/str) -> fromdatetime @ tzinfo # (datetime, tzinfo/str) -> fromdatetime @ tzinfo
if isinstance(arg_2, (dt_tzinfo, str)): if isinstance(arg_2, (dt_tzinfo, str)):
return self.type.fromdatetime(arg_1, tzinfo=arg_2) return self.type.fromdatetime(arg_1, tzinfo=arg_2)
@ -281,7 +279,6 @@ class ArrowFactory:
) )
elif isinstance(arg_1, date): elif isinstance(arg_1, date):
# (date, tzinfo/str) -> fromdate @ tzinfo # (date, tzinfo/str) -> fromdate @ tzinfo
if isinstance(arg_2, (dt_tzinfo, str)): if isinstance(arg_2, (dt_tzinfo, str)):
return self.type.fromdate(arg_1, tzinfo=arg_2) return self.type.fromdate(arg_1, tzinfo=arg_2)

View file

@ -29,7 +29,6 @@ FORMAT_W3C: Final[str] = "YYYY-MM-DD HH:mm:ssZZ"
class DateTimeFormatter: class DateTimeFormatter:
# This pattern matches characters enclosed in square brackets are matched as # This pattern matches characters enclosed in square brackets are matched as
# an atomic group. For more info on atomic groups and how to they are # an atomic group. For more info on atomic groups and how to they are
# emulated in Python's re library, see https://stackoverflow.com/a/13577411/2701578 # emulated in Python's re library, see https://stackoverflow.com/a/13577411/2701578
@ -41,18 +40,15 @@ class DateTimeFormatter:
locale: locales.Locale locale: locales.Locale
def __init__(self, locale: str = DEFAULT_LOCALE) -> None: def __init__(self, locale: str = DEFAULT_LOCALE) -> None:
self.locale = locales.get_locale(locale) self.locale = locales.get_locale(locale)
def format(cls, dt: datetime, fmt: str) -> str: def format(cls, dt: datetime, fmt: str) -> str:
# FIXME: _format_token() is nullable # FIXME: _format_token() is nullable
return cls._FORMAT_RE.sub( return cls._FORMAT_RE.sub(
lambda m: cast(str, cls._format_token(dt, m.group(0))), fmt lambda m: cast(str, cls._format_token(dt, m.group(0))), fmt
) )
def _format_token(self, dt: datetime, token: Optional[str]) -> Optional[str]: def _format_token(self, dt: datetime, token: Optional[str]) -> Optional[str]:
if token and token.startswith("[") and token.endswith("]"): if token and token.startswith("[") and token.endswith("]"):
return token[1:-1] return token[1:-1]

View file

@ -129,7 +129,6 @@ class Locale:
_locale_map[locale_name.lower().replace("_", "-")] = cls _locale_map[locale_name.lower().replace("_", "-")] = cls
def __init__(self) -> None: def __init__(self) -> None:
self._month_name_to_ordinal = None self._month_name_to_ordinal = None
def describe( def describe(
@ -174,7 +173,7 @@ class Locale:
# Needed to determine the correct relative string to use # Needed to determine the correct relative string to use
timeframe_value = 0 timeframe_value = 0
for _unit_name, unit_value in timeframes: for _, unit_value in timeframes:
if trunc(unit_value) != 0: if trunc(unit_value) != 0:
timeframe_value = trunc(unit_value) timeframe_value = trunc(unit_value)
break break
@ -285,7 +284,6 @@ class Locale:
timeframe: TimeFrameLiteral, timeframe: TimeFrameLiteral,
delta: Union[float, int], delta: Union[float, int],
) -> str: ) -> str:
if timeframe == "now": if timeframe == "now":
return humanized return humanized
@ -425,7 +423,7 @@ class ItalianLocale(Locale):
"hours": "{0} ore", "hours": "{0} ore",
"day": "un giorno", "day": "un giorno",
"days": "{0} giorni", "days": "{0} giorni",
"week": "una settimana,", "week": "una settimana",
"weeks": "{0} settimane", "weeks": "{0} settimane",
"month": "un mese", "month": "un mese",
"months": "{0} mesi", "months": "{0} mesi",
@ -867,14 +865,16 @@ class FinnishLocale(Locale):
timeframes: ClassVar[Mapping[TimeFrameLiteral, Union[str, Mapping[str, str]]]] = { timeframes: ClassVar[Mapping[TimeFrameLiteral, Union[str, Mapping[str, str]]]] = {
"now": "juuri nyt", "now": "juuri nyt",
"second": "sekunti", "second": {"past": "sekunti", "future": "sekunnin"},
"seconds": {"past": "{0} muutama sekunti", "future": "{0} muutaman sekunnin"}, "seconds": {"past": "{0} sekuntia", "future": "{0} sekunnin"},
"minute": {"past": "minuutti", "future": "minuutin"}, "minute": {"past": "minuutti", "future": "minuutin"},
"minutes": {"past": "{0} minuuttia", "future": "{0} minuutin"}, "minutes": {"past": "{0} minuuttia", "future": "{0} minuutin"},
"hour": {"past": "tunti", "future": "tunnin"}, "hour": {"past": "tunti", "future": "tunnin"},
"hours": {"past": "{0} tuntia", "future": "{0} tunnin"}, "hours": {"past": "{0} tuntia", "future": "{0} tunnin"},
"day": "päivä", "day": {"past": "päivä", "future": "päivän"},
"days": {"past": "{0} päivää", "future": "{0} päivän"}, "days": {"past": "{0} päivää", "future": "{0} päivän"},
"week": {"past": "viikko", "future": "viikon"},
"weeks": {"past": "{0} viikkoa", "future": "{0} viikon"},
"month": {"past": "kuukausi", "future": "kuukauden"}, "month": {"past": "kuukausi", "future": "kuukauden"},
"months": {"past": "{0} kuukautta", "future": "{0} kuukauden"}, "months": {"past": "{0} kuukautta", "future": "{0} kuukauden"},
"year": {"past": "vuosi", "future": "vuoden"}, "year": {"past": "vuosi", "future": "vuoden"},
@ -1887,7 +1887,7 @@ class GermanBaseLocale(Locale):
future = "in {0}" future = "in {0}"
and_word = "und" and_word = "und"
timeframes = { timeframes: ClassVar[Dict[TimeFrameLiteral, str]] = {
"now": "gerade eben", "now": "gerade eben",
"second": "einer Sekunde", "second": "einer Sekunde",
"seconds": "{0} Sekunden", "seconds": "{0} Sekunden",
@ -1982,7 +1982,9 @@ class GermanBaseLocale(Locale):
return super().describe(timeframe, delta, only_distance) return super().describe(timeframe, delta, only_distance)
# German uses a different case without 'in' or 'ago' # German uses a different case without 'in' or 'ago'
humanized = self.timeframes_only_distance[timeframe].format(trunc(abs(delta))) humanized: str = self.timeframes_only_distance[timeframe].format(
trunc(abs(delta))
)
return humanized return humanized
@ -2547,6 +2549,8 @@ class ArabicLocale(Locale):
"hours": {"2": "ساعتين", "ten": "{0} ساعات", "higher": "{0} ساعة"}, "hours": {"2": "ساعتين", "ten": "{0} ساعات", "higher": "{0} ساعة"},
"day": "يوم", "day": "يوم",
"days": {"2": "يومين", "ten": "{0} أيام", "higher": "{0} يوم"}, "days": {"2": "يومين", "ten": "{0} أيام", "higher": "{0} يوم"},
"week": "اسبوع",
"weeks": {"2": "اسبوعين", "ten": "{0} أسابيع", "higher": "{0} اسبوع"},
"month": "شهر", "month": "شهر",
"months": {"2": "شهرين", "ten": "{0} أشهر", "higher": "{0} شهر"}, "months": {"2": "شهرين", "ten": "{0} أشهر", "higher": "{0} شهر"},
"year": "سنة", "year": "سنة",
@ -3709,6 +3713,8 @@ class HungarianLocale(Locale):
"hours": {"past": "{0} órával", "future": "{0} óra"}, "hours": {"past": "{0} órával", "future": "{0} óra"},
"day": {"past": "egy nappal", "future": "egy nap"}, "day": {"past": "egy nappal", "future": "egy nap"},
"days": {"past": "{0} nappal", "future": "{0} nap"}, "days": {"past": "{0} nappal", "future": "{0} nap"},
"week": {"past": "egy héttel", "future": "egy hét"},
"weeks": {"past": "{0} héttel", "future": "{0} hét"},
"month": {"past": "egy hónappal", "future": "egy hónap"}, "month": {"past": "egy hónappal", "future": "egy hónap"},
"months": {"past": "{0} hónappal", "future": "{0} hónap"}, "months": {"past": "{0} hónappal", "future": "{0} hónap"},
"year": {"past": "egy évvel", "future": "egy év"}, "year": {"past": "egy évvel", "future": "egy év"},
@ -3934,7 +3940,6 @@ class ThaiLocale(Locale):
class LaotianLocale(Locale): class LaotianLocale(Locale):
names = ["lo", "lo-la"] names = ["lo", "lo-la"]
past = "{0} ກ່ອນຫນ້ານີ້" past = "{0} ກ່ອນຫນ້ານີ້"
@ -4119,6 +4124,7 @@ class BengaliLocale(Locale):
return f"{n}র্থ" return f"{n}র্থ"
if n == 6: if n == 6:
return f"{n}ষ্ঠ" return f"{n}ষ্ঠ"
return ""
class RomanshLocale(Locale): class RomanshLocale(Locale):
@ -4137,6 +4143,8 @@ class RomanshLocale(Locale):
"hours": "{0} ura", "hours": "{0} ura",
"day": "in di", "day": "in di",
"days": "{0} dis", "days": "{0} dis",
"week": "in'emna",
"weeks": "{0} emnas",
"month": "in mais", "month": "in mais",
"months": "{0} mais", "months": "{0} mais",
"year": "in onn", "year": "in onn",
@ -5399,7 +5407,7 @@ class LuxembourgishLocale(Locale):
future = "an {0}" future = "an {0}"
and_word = "an" and_word = "an"
timeframes = { timeframes: ClassVar[Dict[TimeFrameLiteral, str]] = {
"now": "just elo", "now": "just elo",
"second": "enger Sekonn", "second": "enger Sekonn",
"seconds": "{0} Sekonnen", "seconds": "{0} Sekonnen",
@ -5487,7 +5495,9 @@ class LuxembourgishLocale(Locale):
return super().describe(timeframe, delta, only_distance) return super().describe(timeframe, delta, only_distance)
# Luxembourgish uses a different case without 'in' or 'ago' # Luxembourgish uses a different case without 'in' or 'ago'
humanized = self.timeframes_only_distance[timeframe].format(trunc(abs(delta))) humanized: str = self.timeframes_only_distance[timeframe].format(
trunc(abs(delta))
)
return humanized return humanized

View file

@ -159,7 +159,6 @@ class DateTimeParser:
_input_re_map: Dict[_FORMAT_TYPE, Pattern[str]] _input_re_map: Dict[_FORMAT_TYPE, Pattern[str]]
def __init__(self, locale: str = DEFAULT_LOCALE, cache_size: int = 0) -> None: def __init__(self, locale: str = DEFAULT_LOCALE, cache_size: int = 0) -> None:
self.locale = locales.get_locale(locale) self.locale = locales.get_locale(locale)
self._input_re_map = self._BASE_INPUT_RE_MAP.copy() self._input_re_map = self._BASE_INPUT_RE_MAP.copy()
self._input_re_map.update( self._input_re_map.update(
@ -196,7 +195,6 @@ class DateTimeParser:
def parse_iso( def parse_iso(
self, datetime_string: str, normalize_whitespace: bool = False self, datetime_string: str, normalize_whitespace: bool = False
) -> datetime: ) -> datetime:
if normalize_whitespace: if normalize_whitespace:
datetime_string = re.sub(r"\s+", " ", datetime_string.strip()) datetime_string = re.sub(r"\s+", " ", datetime_string.strip())
@ -236,13 +234,14 @@ class DateTimeParser:
] ]
if has_time: if has_time:
if has_space_divider: if has_space_divider:
date_string, time_string = datetime_string.split(" ", 1) date_string, time_string = datetime_string.split(" ", 1)
else: else:
date_string, time_string = datetime_string.split("T", 1) date_string, time_string = datetime_string.split("T", 1)
time_parts = re.split(r"[\+\-Z]", time_string, 1, re.IGNORECASE) time_parts = re.split(
r"[\+\-Z]", time_string, maxsplit=1, flags=re.IGNORECASE
)
time_components: Optional[Match[str]] = self._TIME_RE.match(time_parts[0]) time_components: Optional[Match[str]] = self._TIME_RE.match(time_parts[0])
@ -303,7 +302,6 @@ class DateTimeParser:
fmt: Union[List[str], str], fmt: Union[List[str], str],
normalize_whitespace: bool = False, normalize_whitespace: bool = False,
) -> datetime: ) -> datetime:
if normalize_whitespace: if normalize_whitespace:
datetime_string = re.sub(r"\s+", " ", datetime_string) datetime_string = re.sub(r"\s+", " ", datetime_string)
@ -341,12 +339,11 @@ class DateTimeParser:
f"Unable to find a match group for the specified token {token!r}." f"Unable to find a match group for the specified token {token!r}."
) )
self._parse_token(token, value, parts) # type: ignore self._parse_token(token, value, parts) # type: ignore[arg-type]
return self._build_datetime(parts) return self._build_datetime(parts)
def _generate_pattern_re(self, fmt: str) -> Tuple[List[_FORMAT_TYPE], Pattern[str]]: def _generate_pattern_re(self, fmt: str) -> Tuple[List[_FORMAT_TYPE], Pattern[str]]:
# fmt is a string of tokens like 'YYYY-MM-DD' # fmt is a string of tokens like 'YYYY-MM-DD'
# we construct a new string by replacing each # we construct a new string by replacing each
# token by its pattern: # token by its pattern:
@ -498,7 +495,6 @@ class DateTimeParser:
value: Any, value: Any,
parts: _Parts, parts: _Parts,
) -> None: ) -> None:
if token == "YYYY": if token == "YYYY":
parts["year"] = int(value) parts["year"] = int(value)
@ -508,7 +504,7 @@ class DateTimeParser:
elif token in ["MMMM", "MMM"]: elif token in ["MMMM", "MMM"]:
# FIXME: month_number() is nullable # FIXME: month_number() is nullable
parts["month"] = self.locale.month_number(value.lower()) # type: ignore parts["month"] = self.locale.month_number(value.lower()) # type: ignore[typeddict-item]
elif token in ["MM", "M"]: elif token in ["MM", "M"]:
parts["month"] = int(value) parts["month"] = int(value)
@ -588,7 +584,6 @@ class DateTimeParser:
weekdate = parts.get("weekdate") weekdate = parts.get("weekdate")
if weekdate is not None: if weekdate is not None:
year, week = int(weekdate[0]), int(weekdate[1]) year, week = int(weekdate[0]), int(weekdate[1])
if weekdate[2] is not None: if weekdate[2] is not None:
@ -712,7 +707,6 @@ class DateTimeParser:
) )
def _parse_multiformat(self, string: str, formats: Iterable[str]) -> datetime: def _parse_multiformat(self, string: str, formats: Iterable[str]) -> datetime:
_datetime: Optional[datetime] = None _datetime: Optional[datetime] = None
for fmt in formats: for fmt in formats:
@ -740,12 +734,11 @@ class DateTimeParser:
class TzinfoParser: class TzinfoParser:
_TZINFO_RE: ClassVar[Pattern[str]] = re.compile( _TZINFO_RE: ClassVar[Pattern[str]] = re.compile(
r"^([\+\-])?(\d{2})(?:\:?(\d{2}))?$" r"^(?:\(UTC)*([\+\-])?(\d{2})(?:\:?(\d{2}))?"
) )
@classmethod @classmethod
def parse(cls, tzinfo_string: str) -> dt_tzinfo: def parse(cls, tzinfo_string: str) -> dt_tzinfo:
tzinfo: Optional[dt_tzinfo] = None tzinfo: Optional[dt_tzinfo] = None
if tzinfo_string == "local": if tzinfo_string == "local":
@ -755,7 +748,6 @@ class TzinfoParser:
tzinfo = tz.tzutc() tzinfo = tz.tzutc()
else: else:
iso_match = cls._TZINFO_RE.match(tzinfo_string) iso_match = cls._TZINFO_RE.match(tzinfo_string)
if iso_match: if iso_match:

View file

@ -26,6 +26,12 @@ def update_wrapper(
class _HashedSeq(list): class _HashedSeq(list):
"""This class guarantees that hash() will be called no more than once
per element. This is important because the lru_cache() will hash
the key multiple times on a cache miss.
"""
__slots__ = 'hashvalue' __slots__ = 'hashvalue'
def __init__(self, tup, hash=hash): def __init__(self, tup, hash=hash):
@ -41,45 +47,57 @@ def _make_key(
kwds, kwds,
typed, typed,
kwd_mark=(object(),), kwd_mark=(object(),),
fasttypes=set([int, str, frozenset, type(None)]), fasttypes={int, str},
sorted=sorted,
tuple=tuple, tuple=tuple,
type=type, type=type,
len=len, len=len,
): ):
'Make a cache key from optionally typed positional and keyword arguments' """Make a cache key from optionally typed positional and keyword arguments
The key is constructed in a way that is flat as possible rather than
as a nested structure that would take more memory.
If there is only a single argument and its data type is known to cache
its hash value, then that argument is returned without a wrapper. This
saves space and improves lookup speed.
"""
# All of code below relies on kwds preserving the order input by the user.
# Formerly, we sorted() the kwds before looping. The new way is *much*
# faster; however, it means that f(x=1, y=2) will now be treated as a
# distinct call from f(y=2, x=1) which will be cached separately.
key = args key = args
if kwds: if kwds:
sorted_items = sorted(kwds.items())
key += kwd_mark key += kwd_mark
for item in sorted_items: for item in kwds.items():
key += item key += item
if typed: if typed:
key += tuple(type(v) for v in args) key += tuple(type(v) for v in args)
if kwds: if kwds:
key += tuple(type(v) for k, v in sorted_items) key += tuple(type(v) for v in kwds.values())
elif len(key) == 1 and type(key[0]) in fasttypes: elif len(key) == 1 and type(key[0]) in fasttypes:
return key[0] return key[0]
return _HashedSeq(key) return _HashedSeq(key)
def lru_cache(maxsize=100, typed=False): # noqa: C901 def lru_cache(maxsize=128, typed=False):
"""Least-recently-used cache decorator. """Least-recently-used cache decorator.
If *maxsize* is set to None, the LRU features are disabled and the cache If *maxsize* is set to None, the LRU features are disabled and the cache
can grow without bound. can grow without bound.
If *typed* is True, arguments of different types will be cached separately. If *typed* is True, arguments of different types will be cached separately.
For example, f(3.0) and f(3) will be treated as distinct calls with For example, f(decimal.Decimal("3.0")) and f(3.0) will be treated as
distinct results. distinct calls with distinct results. Some types such as str and int may
be cached separately even when typed is false.
Arguments to the cached function must be hashable. Arguments to the cached function must be hashable.
View the cache statistics named tuple (hits, misses, maxsize, currsize) with View the cache statistics named tuple (hits, misses, maxsize, currsize)
f.cache_info(). Clear the cache and statistics with f.cache_clear(). with f.cache_info(). Clear the cache and statistics with f.cache_clear().
Access the underlying function with f.__wrapped__. Access the underlying function with f.__wrapped__.
See: http://en.wikipedia.org/wiki/Cache_algorithms#Least_Recently_Used See: https://en.wikipedia.org/wiki/Cache_replacement_policies#Least_recently_used_(LRU)
""" """
@ -88,108 +106,138 @@ def lru_cache(maxsize=100, typed=False): # noqa: C901
# The internals of the lru_cache are encapsulated for thread safety and # The internals of the lru_cache are encapsulated for thread safety and
# to allow the implementation to change (including a possible C version). # to allow the implementation to change (including a possible C version).
if isinstance(maxsize, int):
# Negative maxsize is treated as 0
if maxsize < 0:
maxsize = 0
elif callable(maxsize) and isinstance(typed, bool):
# The user_function was passed in directly via the maxsize argument
user_function, maxsize = maxsize, 128
wrapper = _lru_cache_wrapper(user_function, maxsize, typed, _CacheInfo)
wrapper.cache_parameters = lambda: {'maxsize': maxsize, 'typed': typed}
return update_wrapper(wrapper, user_function)
elif maxsize is not None:
raise TypeError('Expected first argument to be an integer, a callable, or None')
def decorating_function(user_function): def decorating_function(user_function):
cache = dict() wrapper = _lru_cache_wrapper(user_function, maxsize, typed, _CacheInfo)
stats = [0, 0] # make statistics updateable non-locally wrapper.cache_parameters = lambda: {'maxsize': maxsize, 'typed': typed}
HITS, MISSES = 0, 1 # names for the stats fields
make_key = _make_key
cache_get = cache.get # bound method to lookup key or return None
_len = len # localize the global len() function
lock = RLock() # because linkedlist updates aren't threadsafe
root = [] # root of the circular doubly linked list
root[:] = [root, root, None, None] # initialize by pointing to self
nonlocal_root = [root] # make updateable non-locally
PREV, NEXT, KEY, RESULT = 0, 1, 2, 3 # names for the link fields
if maxsize == 0:
def wrapper(*args, **kwds):
# no caching, just do a statistics update after a successful call
result = user_function(*args, **kwds)
stats[MISSES] += 1
return result
elif maxsize is None:
def wrapper(*args, **kwds):
# simple caching without ordering or size limit
key = make_key(args, kwds, typed)
result = cache_get(
key, root
) # root used here as a unique not-found sentinel
if result is not root:
stats[HITS] += 1
return result
result = user_function(*args, **kwds)
cache[key] = result
stats[MISSES] += 1
return result
else:
def wrapper(*args, **kwds):
# size limited caching that tracks accesses by recency
key = make_key(args, kwds, typed) if kwds or typed else args
with lock:
link = cache_get(key)
if link is not None:
# record recent use of the key by moving it
# to the front of the list
(root,) = nonlocal_root
link_prev, link_next, key, result = link
link_prev[NEXT] = link_next
link_next[PREV] = link_prev
last = root[PREV]
last[NEXT] = root[PREV] = link
link[PREV] = last
link[NEXT] = root
stats[HITS] += 1
return result
result = user_function(*args, **kwds)
with lock:
(root,) = nonlocal_root
if key in cache:
# getting here means that this same key was added to the
# cache while the lock was released. since the link
# update is already done, we need only return the
# computed result and update the count of misses.
pass
elif _len(cache) >= maxsize:
# use the old root to store the new key and result
oldroot = root
oldroot[KEY] = key
oldroot[RESULT] = result
# empty the oldest link and make it the new root
root = nonlocal_root[0] = oldroot[NEXT]
oldkey = root[KEY]
root[KEY] = root[RESULT] = None
# now update the cache dictionary for the new links
del cache[oldkey]
cache[key] = oldroot
else:
# put result in a new link at the front of the list
last = root[PREV]
link = [last, root, key, result]
last[NEXT] = root[PREV] = cache[key] = link
stats[MISSES] += 1
return result
def cache_info():
"""Report cache statistics"""
with lock:
return _CacheInfo(stats[HITS], stats[MISSES], maxsize, len(cache))
def cache_clear():
"""Clear the cache and cache statistics"""
with lock:
cache.clear()
root = nonlocal_root[0]
root[:] = [root, root, None, None]
stats[:] = [0, 0]
wrapper.__wrapped__ = user_function
wrapper.cache_info = cache_info
wrapper.cache_clear = cache_clear
return update_wrapper(wrapper, user_function) return update_wrapper(wrapper, user_function)
return decorating_function return decorating_function
def _lru_cache_wrapper(user_function, maxsize, typed, _CacheInfo):
# Constants shared by all lru cache instances:
sentinel = object() # unique object used to signal cache misses
make_key = _make_key # build a key from the function arguments
PREV, NEXT, KEY, RESULT = 0, 1, 2, 3 # names for the link fields
cache = {}
hits = misses = 0
full = False
cache_get = cache.get # bound method to lookup a key or return None
cache_len = cache.__len__ # get cache size without calling len()
lock = RLock() # because linkedlist updates aren't threadsafe
root = [] # root of the circular doubly linked list
root[:] = [root, root, None, None] # initialize by pointing to self
if maxsize == 0:
def wrapper(*args, **kwds):
# No caching -- just a statistics update
nonlocal misses
misses += 1
result = user_function(*args, **kwds)
return result
elif maxsize is None:
def wrapper(*args, **kwds):
# Simple caching without ordering or size limit
nonlocal hits, misses
key = make_key(args, kwds, typed)
result = cache_get(key, sentinel)
if result is not sentinel:
hits += 1
return result
misses += 1
result = user_function(*args, **kwds)
cache[key] = result
return result
else:
def wrapper(*args, **kwds):
# Size limited caching that tracks accesses by recency
nonlocal root, hits, misses, full
key = make_key(args, kwds, typed)
with lock:
link = cache_get(key)
if link is not None:
# Move the link to the front of the circular queue
link_prev, link_next, _key, result = link
link_prev[NEXT] = link_next
link_next[PREV] = link_prev
last = root[PREV]
last[NEXT] = root[PREV] = link
link[PREV] = last
link[NEXT] = root
hits += 1
return result
misses += 1
result = user_function(*args, **kwds)
with lock:
if key in cache:
# Getting here means that this same key was added to the
# cache while the lock was released. Since the link
# update is already done, we need only return the
# computed result and update the count of misses.
pass
elif full:
# Use the old root to store the new key and result.
oldroot = root
oldroot[KEY] = key
oldroot[RESULT] = result
# Empty the oldest link and make it the new root.
# Keep a reference to the old key and old result to
# prevent their ref counts from going to zero during the
# update. That will prevent potentially arbitrary object
# clean-up code (i.e. __del__) from running while we're
# still adjusting the links.
root = oldroot[NEXT]
oldkey = root[KEY]
root[KEY] = root[RESULT] = None
# Now update the cache dictionary.
del cache[oldkey]
# Save the potentially reentrant cache[key] assignment
# for last, after the root and links have been put in
# a consistent state.
cache[key] = oldroot
else:
# Put result in a new link at the front of the queue.
last = root[PREV]
link = [last, root, key, result]
last[NEXT] = root[PREV] = cache[key] = link
# Use the cache_len bound method instead of the len() function
# which could potentially be wrapped in an lru_cache itself.
full = cache_len() >= maxsize
return result
def cache_info():
"""Report cache statistics"""
with lock:
return _CacheInfo(hits, misses, maxsize, cache_len())
def cache_clear():
"""Clear the cache and cache statistics"""
nonlocal hits, misses, full
with lock:
cache.clear()
root[:] = [root, root, None, None]
hits = misses = 0
full = False
wrapper.cache_info = cache_info
wrapper.cache_clear = cache_clear
return wrapper

View file

@ -11,9 +11,9 @@ from bleach.sanitizer import (
# yyyymmdd # yyyymmdd
__releasedate__ = "20230123" __releasedate__ = "20231006"
# x.y.z or x.y.z.dev0 -- semver # x.y.z or x.y.z.dev0 -- semver
__version__ = "6.0.0" __version__ = "6.1.0"
__all__ = ["clean", "linkify"] __all__ = ["clean", "linkify"]

View file

@ -395,10 +395,17 @@ class BleachHTMLTokenizer(HTMLTokenizer):
# followed by a series of characters. It's treated as a tag # followed by a series of characters. It's treated as a tag
# name that abruptly ends, but we should treat that like # name that abruptly ends, but we should treat that like
# character data # character data
yield { yield {"type": TAG_TOKEN_TYPE_CHARACTERS, "data": self.stream.get_tag()}
"type": TAG_TOKEN_TYPE_CHARACTERS, elif last_error_token["data"] in (
"data": "<" + self.currentToken["name"], "eof-in-attribute-name",
} "eof-in-attribute-value-no-quotes",
):
# Handle the case where the text being parsed ends with <
# followed by a series of characters and then space and then
# more characters. It's treated as a tag name followed by an
# attribute that abruptly ends, but we should treat that like
# character data.
yield {"type": TAG_TOKEN_TYPE_CHARACTERS, "data": self.stream.get_tag()}
else: else:
yield last_error_token yield last_error_token

View file

@ -45,8 +45,8 @@ def build_url_re(tlds=TLDS, protocols=html5lib_shim.allowed_protocols):
r"""\(* # Match any opening parentheses. r"""\(* # Match any opening parentheses.
\b(?<![@.])(?:(?:{0}):/{{0,3}}(?:(?:\w+:)?\w+@)?)? # http:// \b(?<![@.])(?:(?:{0}):/{{0,3}}(?:(?:\w+:)?\w+@)?)? # http://
([\w-]+\.)+(?:{1})(?:\:[0-9]+)?(?!\.\w)\b # xx.yy.tld(:##)? ([\w-]+\.)+(?:{1})(?:\:[0-9]+)?(?!\.\w)\b # xx.yy.tld(:##)?
(?:[/?][^\s\{{\}}\|\\\^\[\]`<>"]*)? (?:[/?][^\s\{{\}}\|\\\^`<>"]*)?
# /path/zz (excluding "unsafe" chars from RFC 1738, # /path/zz (excluding "unsafe" chars from RFC 3986,
# except for # and ~, which happen in practice) # except for # and ~, which happen in practice)
""".format( """.format(
"|".join(sorted(protocols)), "|".join(sorted(tlds)) "|".join(sorted(protocols)), "|".join(sorted(tlds))
@ -591,7 +591,7 @@ class LinkifyFilter(html5lib_shim.Filter):
in_a = False in_a = False
token_buffer = [] token_buffer = []
else: else:
token_buffer.append(token) token_buffer.extend(list(self.extract_entities(token)))
continue continue
if token["type"] in ["StartTag", "EmptyTag"]: if token["type"] in ["StartTag", "EmptyTag"]:

View file

@ -1,4 +1,4 @@
from .core import contents, where from .core import contents, where
__all__ = ["contents", "where"] __all__ = ["contents", "where"]
__version__ = "2023.07.22" __version__ = "2024.02.02"

View file

@ -245,34 +245,6 @@ mJlglFwjz1onl14LBQaTNx47aTbrqZ5hHY8y2o4M1nQ+ewkk2gF3R8Q7zTSMmfXK
4SVhM7JZG+Ju1zdXtg2pEto= 4SVhM7JZG+Ju1zdXtg2pEto=
-----END CERTIFICATE----- -----END CERTIFICATE-----
# Issuer: O=SECOM Trust.net OU=Security Communication RootCA1
# Subject: O=SECOM Trust.net OU=Security Communication RootCA1
# Label: "Security Communication Root CA"
# Serial: 0
# MD5 Fingerprint: f1:bc:63:6a:54:e0:b5:27:f5:cd:e7:1a:e3:4d:6e:4a
# SHA1 Fingerprint: 36:b1:2b:49:f9:81:9e:d7:4c:9e:bc:38:0f:c6:56:8f:5d:ac:b2:f7
# SHA256 Fingerprint: e7:5e:72:ed:9f:56:0e:ec:6e:b4:80:00:73:a4:3f:c3:ad:19:19:5a:39:22:82:01:78:95:97:4a:99:02:6b:6c
-----BEGIN CERTIFICATE-----
MIIDWjCCAkKgAwIBAgIBADANBgkqhkiG9w0BAQUFADBQMQswCQYDVQQGEwJKUDEY
MBYGA1UEChMPU0VDT00gVHJ1c3QubmV0MScwJQYDVQQLEx5TZWN1cml0eSBDb21t
dW5pY2F0aW9uIFJvb3RDQTEwHhcNMDMwOTMwMDQyMDQ5WhcNMjMwOTMwMDQyMDQ5
WjBQMQswCQYDVQQGEwJKUDEYMBYGA1UEChMPU0VDT00gVHJ1c3QubmV0MScwJQYD
VQQLEx5TZWN1cml0eSBDb21tdW5pY2F0aW9uIFJvb3RDQTEwggEiMA0GCSqGSIb3
DQEBAQUAA4IBDwAwggEKAoIBAQCzs/5/022x7xZ8V6UMbXaKL0u/ZPtM7orw8yl8
9f/uKuDp6bpbZCKamm8sOiZpUQWZJtzVHGpxxpp9Hp3dfGzGjGdnSj74cbAZJ6kJ
DKaVv0uMDPpVmDvY6CKhS3E4eayXkmmziX7qIWgGmBSWh9JhNrxtJ1aeV+7AwFb9
Ms+k2Y7CI9eNqPPYJayX5HA49LY6tJ07lyZDo6G8SVlyTCMwhwFY9k6+HGhWZq/N
QV3Is00qVUarH9oe4kA92819uZKAnDfdDJZkndwi92SL32HeFZRSFaB9UslLqCHJ
xrHty8OVYNEP8Ktw+N/LTX7s1vqr2b1/VPKl6Xn62dZ2JChzAgMBAAGjPzA9MB0G
A1UdDgQWBBSgc0mZaNyFW2XjmygvV5+9M7wHSDALBgNVHQ8EBAMCAQYwDwYDVR0T
AQH/BAUwAwEB/zANBgkqhkiG9w0BAQUFAAOCAQEAaECpqLvkT115swW1F7NgE+vG
kl3g0dNq/vu+m22/xwVtWSDEHPC32oRYAmP6SBbvT6UL90qY8j+eG61Ha2POCEfr
Uj94nK9NrvjVT8+amCoQQTlSxN3Zmw7vkwGusi7KaEIkQmywszo+zenaSMQVy+n5
Bw+SUEmK3TGXX8npN6o7WWWXlDLJs58+OmJYxUmtYg5xpTKqL8aJdkNAExNnPaJU
JRDL8Try2frbSVa7pv6nQTXD4IhhyYjH3zYQIphZ6rBK+1YWc26sTfcioU+tHXot
RSflMMFe8toTyyVCUZVHA4xsIcx0Qu1T/zOLjw9XARYvz6buyXAiFL39vmwLAw==
-----END CERTIFICATE-----
# Issuer: CN=XRamp Global Certification Authority O=XRamp Security Services Inc OU=www.xrampsecurity.com # Issuer: CN=XRamp Global Certification Authority O=XRamp Security Services Inc OU=www.xrampsecurity.com
# Subject: CN=XRamp Global Certification Authority O=XRamp Security Services Inc OU=www.xrampsecurity.com # Subject: CN=XRamp Global Certification Authority O=XRamp Security Services Inc OU=www.xrampsecurity.com
# Label: "XRamp Global CA Root" # Label: "XRamp Global CA Root"
@ -881,49 +853,6 @@ Mx86OyXShkDOOyyGeMlhLxS67ttVb9+E7gUJTb0o2HLO02JQZR7rkpeDMdmztcpH
WD9f WD9f
-----END CERTIFICATE----- -----END CERTIFICATE-----
# Issuer: CN=Autoridad de Certificacion Firmaprofesional CIF A62634068
# Subject: CN=Autoridad de Certificacion Firmaprofesional CIF A62634068
# Label: "Autoridad de Certificacion Firmaprofesional CIF A62634068"
# Serial: 6047274297262753887
# MD5 Fingerprint: 73:3a:74:7a:ec:bb:a3:96:a6:c2:e4:e2:c8:9b:c0:c3
# SHA1 Fingerprint: ae:c5:fb:3f:c8:e1:bf:c4:e5:4f:03:07:5a:9a:e8:00:b7:f7:b6:fa
# SHA256 Fingerprint: 04:04:80:28:bf:1f:28:64:d4:8f:9a:d4:d8:32:94:36:6a:82:88:56:55:3f:3b:14:30:3f:90:14:7f:5d:40:ef
-----BEGIN CERTIFICATE-----
MIIGFDCCA/ygAwIBAgIIU+w77vuySF8wDQYJKoZIhvcNAQEFBQAwUTELMAkGA1UE
BhMCRVMxQjBABgNVBAMMOUF1dG9yaWRhZCBkZSBDZXJ0aWZpY2FjaW9uIEZpcm1h
cHJvZmVzaW9uYWwgQ0lGIEE2MjYzNDA2ODAeFw0wOTA1MjAwODM4MTVaFw0zMDEy
MzEwODM4MTVaMFExCzAJBgNVBAYTAkVTMUIwQAYDVQQDDDlBdXRvcmlkYWQgZGUg
Q2VydGlmaWNhY2lvbiBGaXJtYXByb2Zlc2lvbmFsIENJRiBBNjI2MzQwNjgwggIi
MA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQDKlmuO6vj78aI14H9M2uDDUtd9
thDIAl6zQyrET2qyyhxdKJp4ERppWVevtSBC5IsP5t9bpgOSL/UR5GLXMnE42QQM
cas9UX4PB99jBVzpv5RvwSmCwLTaUbDBPLutN0pcyvFLNg4kq7/DhHf9qFD0sefG
L9ItWY16Ck6WaVICqjaY7Pz6FIMMNx/Jkjd/14Et5cS54D40/mf0PmbR0/RAz15i
NA9wBj4gGFrO93IbJWyTdBSTo3OxDqqHECNZXyAFGUftaI6SEspd/NYrspI8IM/h
X68gvqB2f3bl7BqGYTM+53u0P6APjqK5am+5hyZvQWyIplD9amML9ZMWGxmPsu2b
m8mQ9QEM3xk9Dz44I8kvjwzRAv4bVdZO0I08r0+k8/6vKtMFnXkIoctXMbScyJCy
Z/QYFpM6/EfY0XiWMR+6KwxfXZmtY4laJCB22N/9q06mIqqdXuYnin1oKaPnirja
EbsXLZmdEyRG98Xi2J+Of8ePdG1asuhy9azuJBCtLxTa/y2aRnFHvkLfuwHb9H/T
KI8xWVvTyQKmtFLKbpf7Q8UIJm+K9Lv9nyiqDdVF8xM6HdjAeI9BZzwelGSuewvF
6NkBiDkal4ZkQdU7hwxu+g/GvUgUvzlN1J5Bto+WHWOWk9mVBngxaJ43BjuAiUVh
OSPHG0SjFeUc+JIwuwIDAQABo4HvMIHsMBIGA1UdEwEB/wQIMAYBAf8CAQEwDgYD
VR0PAQH/BAQDAgEGMB0GA1UdDgQWBBRlzeurNR4APn7VdMActHNHDhpkLzCBpgYD
VR0gBIGeMIGbMIGYBgRVHSAAMIGPMC8GCCsGAQUFBwIBFiNodHRwOi8vd3d3LmZp
cm1hcHJvZmVzaW9uYWwuY29tL2NwczBcBggrBgEFBQcCAjBQHk4AUABhAHMAZQBv
ACAAZABlACAAbABhACAAQgBvAG4AYQBuAG8AdgBhACAANAA3ACAAQgBhAHIAYwBl
AGwAbwBuAGEAIAAwADgAMAAxADcwDQYJKoZIhvcNAQEFBQADggIBABd9oPm03cXF
661LJLWhAqvdpYhKsg9VSytXjDvlMd3+xDLx51tkljYyGOylMnfX40S2wBEqgLk9
am58m9Ot/MPWo+ZkKXzR4Tgegiv/J2Wv+xYVxC5xhOW1//qkR71kMrv2JYSiJ0L1
ILDCExARzRAVukKQKtJE4ZYm6zFIEv0q2skGz3QeqUvVhyj5eTSSPi5E6PaPT481
PyWzOdxjKpBrIF/EUhJOlywqrJ2X3kjyo2bbwtKDlaZmp54lD+kLM5FlClrD2VQS
3a/DTg4fJl4N3LON7NWBcN7STyQF82xO9UxJZo3R/9ILJUFI/lGExkKvgATP0H5k
SeTy36LssUzAKh3ntLFlosS88Zj0qnAHY7S42jtM+kAiMFsRpvAFDsYCA0irhpuF
3dvd6qJ2gHN99ZwExEWN57kci57q13XRcrHedUTnQn3iV2t93Jm8PYMo6oCTjcVM
ZcFwgbg4/EMxsvYDNEeyrPsiBsse3RdHHF9mudMaotoRsaS8I8nkvof/uZS2+F0g
StRf571oe2XyFR7SOqkt6dhrJKyXWERHrVkY8SFlcN7ONGCoQPHzPKTDKCOM/icz
Q0CgFzzr6juwcqajuUpLXhZI9LK8yIySxZ2frHI2vDSANGupi5LAuBft7HZT9SQB
jLMi6Et8Vcad+qMUu2WFbm5PEn4KPJ2V
-----END CERTIFICATE-----
# Issuer: CN=Izenpe.com O=IZENPE S.A. # Issuer: CN=Izenpe.com O=IZENPE S.A.
# Subject: CN=Izenpe.com O=IZENPE S.A. # Subject: CN=Izenpe.com O=IZENPE S.A.
# Label: "Izenpe.com" # Label: "Izenpe.com"
@ -4633,3 +4562,253 @@ o7Ey7Nmj1m+UI/87tyll5gfp77YZ6ufCOB0yiJA8EytuzO+rdwY0d4RPcuSBhPm5
dDTedk+SKlOxJTnbPP/lPqYO5Wue/9vsL3SD3460s6neFE3/MaNFcyT6lSnMEpcE dDTedk+SKlOxJTnbPP/lPqYO5Wue/9vsL3SD3460s6neFE3/MaNFcyT6lSnMEpcE
oji2jbDwN/zIIX8/syQbPYtuzE2wFg2WHYMfRsCbvUOZ58SWLs5fyQ== oji2jbDwN/zIIX8/syQbPYtuzE2wFg2WHYMfRsCbvUOZ58SWLs5fyQ==
-----END CERTIFICATE----- -----END CERTIFICATE-----
# Issuer: CN=TrustAsia Global Root CA G3 O=TrustAsia Technologies, Inc.
# Subject: CN=TrustAsia Global Root CA G3 O=TrustAsia Technologies, Inc.
# Label: "TrustAsia Global Root CA G3"
# Serial: 576386314500428537169965010905813481816650257167
# MD5 Fingerprint: 30:42:1b:b7:bb:81:75:35:e4:16:4f:53:d2:94:de:04
# SHA1 Fingerprint: 63:cf:b6:c1:27:2b:56:e4:88:8e:1c:23:9a:b6:2e:81:47:24:c3:c7
# SHA256 Fingerprint: e0:d3:22:6a:eb:11:63:c2:e4:8f:f9:be:3b:50:b4:c6:43:1b:e7:bb:1e:ac:c5:c3:6b:5d:5e:c5:09:03:9a:08
-----BEGIN CERTIFICATE-----
MIIFpTCCA42gAwIBAgIUZPYOZXdhaqs7tOqFhLuxibhxkw8wDQYJKoZIhvcNAQEM
BQAwWjELMAkGA1UEBhMCQ04xJTAjBgNVBAoMHFRydXN0QXNpYSBUZWNobm9sb2dp
ZXMsIEluYy4xJDAiBgNVBAMMG1RydXN0QXNpYSBHbG9iYWwgUm9vdCBDQSBHMzAe
Fw0yMTA1MjAwMjEwMTlaFw00NjA1MTkwMjEwMTlaMFoxCzAJBgNVBAYTAkNOMSUw
IwYDVQQKDBxUcnVzdEFzaWEgVGVjaG5vbG9naWVzLCBJbmMuMSQwIgYDVQQDDBtU
cnVzdEFzaWEgR2xvYmFsIFJvb3QgQ0EgRzMwggIiMA0GCSqGSIb3DQEBAQUAA4IC
DwAwggIKAoICAQDAMYJhkuSUGwoqZdC+BqmHO1ES6nBBruL7dOoKjbmzTNyPtxNS
T1QY4SxzlZHFZjtqz6xjbYdT8PfxObegQ2OwxANdV6nnRM7EoYNl9lA+sX4WuDqK
AtCWHwDNBSHvBm3dIZwZQ0WhxeiAysKtQGIXBsaqvPPW5vxQfmZCHzyLpnl5hkA1
nyDvP+uLRx+PjsXUjrYsyUQE49RDdT/VP68czH5GX6zfZBCK70bwkPAPLfSIC7Ep
qq+FqklYqL9joDiR5rPmd2jE+SoZhLsO4fWvieylL1AgdB4SQXMeJNnKziyhWTXA
yB1GJ2Faj/lN03J5Zh6fFZAhLf3ti1ZwA0pJPn9pMRJpxx5cynoTi+jm9WAPzJMs
hH/x/Gr8m0ed262IPfN2dTPXS6TIi/n1Q1hPy8gDVI+lhXgEGvNz8teHHUGf59gX
zhqcD0r83ERoVGjiQTz+LISGNzzNPy+i2+f3VANfWdP3kXjHi3dqFuVJhZBFcnAv
kV34PmVACxmZySYgWmjBNb9Pp1Hx2BErW+Canig7CjoKH8GB5S7wprlppYiU5msT
f9FkPz2ccEblooV7WIQn3MSAPmeamseaMQ4w7OYXQJXZRe0Blqq/DPNL0WP3E1jA
uPP6Z92bfW1K/zJMtSU7/xxnD4UiWQWRkUF3gdCFTIcQcf+eQxuulXUtgQIDAQAB
o2MwYTAPBgNVHRMBAf8EBTADAQH/MB8GA1UdIwQYMBaAFEDk5PIj7zjKsK5Xf/Ih
MBY027ySMB0GA1UdDgQWBBRA5OTyI+84yrCuV3/yITAWNNu8kjAOBgNVHQ8BAf8E
BAMCAQYwDQYJKoZIhvcNAQEMBQADggIBACY7UeFNOPMyGLS0XuFlXsSUT9SnYaP4
wM8zAQLpw6o1D/GUE3d3NZ4tVlFEbuHGLige/9rsR82XRBf34EzC4Xx8MnpmyFq2
XFNFV1pF1AWZLy4jVe5jaN/TG3inEpQGAHUNcoTpLrxaatXeL1nHo+zSh2bbt1S1
JKv0Q3jbSwTEb93mPmY+KfJLaHEih6D4sTNjduMNhXJEIlU/HHzp/LgV6FL6qj6j
ITk1dImmasI5+njPtqzn59ZW/yOSLlALqbUHM/Q4X6RJpstlcHboCoWASzY9M/eV
VHUl2qzEc4Jl6VL1XP04lQJqaTDFHApXB64ipCz5xUG3uOyfT0gA+QEEVcys+TIx
xHWVBqB/0Y0n3bOppHKH/lmLmnp0Ft0WpWIp6zqW3IunaFnT63eROfjXy9mPX1on
AX1daBli2MjN9LdyR75bl87yraKZk62Uy5P2EgmVtqvXO9A/EcswFi55gORngS1d
7XB4tmBZrOFdRWOPyN9yaFvqHbgB8X7754qz41SgOAngPN5C8sLtLpvzHzW2Ntjj
gKGLzZlkD8Kqq7HK9W+eQ42EVJmzbsASZthwEPEGNTNDqJwuuhQxzhB/HIbjj9LV
+Hfsm6vxL2PZQl/gZ4FkkfGXL/xuJvYz+NO1+MRiqzFRJQJ6+N1rZdVtTTDIZbpo
FGWsJwt0ivKH
-----END CERTIFICATE-----
# Issuer: CN=TrustAsia Global Root CA G4 O=TrustAsia Technologies, Inc.
# Subject: CN=TrustAsia Global Root CA G4 O=TrustAsia Technologies, Inc.
# Label: "TrustAsia Global Root CA G4"
# Serial: 451799571007117016466790293371524403291602933463
# MD5 Fingerprint: 54:dd:b2:d7:5f:d8:3e:ed:7c:e0:0b:2e:cc:ed:eb:eb
# SHA1 Fingerprint: 57:73:a5:61:5d:80:b2:e6:ac:38:82:fc:68:07:31:ac:9f:b5:92:5a
# SHA256 Fingerprint: be:4b:56:cb:50:56:c0:13:6a:52:6d:f4:44:50:8d:aa:36:a0:b5:4f:42:e4:ac:38:f7:2a:f4:70:e4:79:65:4c
-----BEGIN CERTIFICATE-----
MIICVTCCAdygAwIBAgIUTyNkuI6XY57GU4HBdk7LKnQV1tcwCgYIKoZIzj0EAwMw
WjELMAkGA1UEBhMCQ04xJTAjBgNVBAoMHFRydXN0QXNpYSBUZWNobm9sb2dpZXMs
IEluYy4xJDAiBgNVBAMMG1RydXN0QXNpYSBHbG9iYWwgUm9vdCBDQSBHNDAeFw0y
MTA1MjAwMjEwMjJaFw00NjA1MTkwMjEwMjJaMFoxCzAJBgNVBAYTAkNOMSUwIwYD
VQQKDBxUcnVzdEFzaWEgVGVjaG5vbG9naWVzLCBJbmMuMSQwIgYDVQQDDBtUcnVz
dEFzaWEgR2xvYmFsIFJvb3QgQ0EgRzQwdjAQBgcqhkjOPQIBBgUrgQQAIgNiAATx
s8045CVD5d4ZCbuBeaIVXxVjAd7Cq92zphtnS4CDr5nLrBfbK5bKfFJV4hrhPVbw
LxYI+hW8m7tH5j/uqOFMjPXTNvk4XatwmkcN4oFBButJ+bAp3TPsUKV/eSm4IJij
YzBhMA8GA1UdEwEB/wQFMAMBAf8wHwYDVR0jBBgwFoAUpbtKl86zK3+kMd6Xg1mD
pm9xy94wHQYDVR0OBBYEFKW7SpfOsyt/pDHel4NZg6ZvccveMA4GA1UdDwEB/wQE
AwIBBjAKBggqhkjOPQQDAwNnADBkAjBe8usGzEkxn0AAbbd+NvBNEU/zy4k6LHiR
UKNbwMp1JvK/kF0LgoxgKJ/GcJpo5PECMFxYDlZ2z1jD1xCMuo6u47xkdUfFVZDj
/bpV6wfEU6s3qe4hsiFbYI89MvHVI5TWWA==
-----END CERTIFICATE-----
# Issuer: CN=CommScope Public Trust ECC Root-01 O=CommScope
# Subject: CN=CommScope Public Trust ECC Root-01 O=CommScope
# Label: "CommScope Public Trust ECC Root-01"
# Serial: 385011430473757362783587124273108818652468453534
# MD5 Fingerprint: 3a:40:a7:fc:03:8c:9c:38:79:2f:3a:a2:6c:b6:0a:16
# SHA1 Fingerprint: 07:86:c0:d8:dd:8e:c0:80:98:06:98:d0:58:7a:ef:de:a6:cc:a2:5d
# SHA256 Fingerprint: 11:43:7c:da:7b:b4:5e:41:36:5f:45:b3:9a:38:98:6b:0d:e0:0d:ef:34:8e:0c:7b:b0:87:36:33:80:0b:c3:8b
-----BEGIN CERTIFICATE-----
MIICHTCCAaOgAwIBAgIUQ3CCd89NXTTxyq4yLzf39H91oJ4wCgYIKoZIzj0EAwMw
TjELMAkGA1UEBhMCVVMxEjAQBgNVBAoMCUNvbW1TY29wZTErMCkGA1UEAwwiQ29t
bVNjb3BlIFB1YmxpYyBUcnVzdCBFQ0MgUm9vdC0wMTAeFw0yMTA0MjgxNzM1NDNa
Fw00NjA0MjgxNzM1NDJaME4xCzAJBgNVBAYTAlVTMRIwEAYDVQQKDAlDb21tU2Nv
cGUxKzApBgNVBAMMIkNvbW1TY29wZSBQdWJsaWMgVHJ1c3QgRUNDIFJvb3QtMDEw
djAQBgcqhkjOPQIBBgUrgQQAIgNiAARLNumuV16ocNfQj3Rid8NeeqrltqLxeP0C
flfdkXmcbLlSiFS8LwS+uM32ENEp7LXQoMPwiXAZu1FlxUOcw5tjnSCDPgYLpkJE
hRGnSjot6dZoL0hOUysHP029uax3OVejQjBAMA8GA1UdEwEB/wQFMAMBAf8wDgYD
VR0PAQH/BAQDAgEGMB0GA1UdDgQWBBSOB2LAUN3GGQYARnQE9/OufXVNMDAKBggq
hkjOPQQDAwNoADBlAjEAnDPfQeMjqEI2Jpc1XHvr20v4qotzVRVcrHgpD7oh2MSg
2NED3W3ROT3Ek2DS43KyAjB8xX6I01D1HiXo+k515liWpDVfG2XqYZpwI7UNo5uS
Um9poIyNStDuiw7LR47QjRE=
-----END CERTIFICATE-----
# Issuer: CN=CommScope Public Trust ECC Root-02 O=CommScope
# Subject: CN=CommScope Public Trust ECC Root-02 O=CommScope
# Label: "CommScope Public Trust ECC Root-02"
# Serial: 234015080301808452132356021271193974922492992893
# MD5 Fingerprint: 59:b0:44:d5:65:4d:b8:5c:55:19:92:02:b6:d1:94:b2
# SHA1 Fingerprint: 3c:3f:ef:57:0f:fe:65:93:86:9e:a0:fe:b0:f6:ed:8e:d1:13:c7:e5
# SHA256 Fingerprint: 2f:fb:7f:81:3b:bb:b3:c8:9a:b4:e8:16:2d:0f:16:d7:15:09:a8:30:cc:9d:73:c2:62:e5:14:08:75:d1:ad:4a
-----BEGIN CERTIFICATE-----
MIICHDCCAaOgAwIBAgIUKP2ZYEFHpgE6yhR7H+/5aAiDXX0wCgYIKoZIzj0EAwMw
TjELMAkGA1UEBhMCVVMxEjAQBgNVBAoMCUNvbW1TY29wZTErMCkGA1UEAwwiQ29t
bVNjb3BlIFB1YmxpYyBUcnVzdCBFQ0MgUm9vdC0wMjAeFw0yMTA0MjgxNzQ0NTRa
Fw00NjA0MjgxNzQ0NTNaME4xCzAJBgNVBAYTAlVTMRIwEAYDVQQKDAlDb21tU2Nv
cGUxKzApBgNVBAMMIkNvbW1TY29wZSBQdWJsaWMgVHJ1c3QgRUNDIFJvb3QtMDIw
djAQBgcqhkjOPQIBBgUrgQQAIgNiAAR4MIHoYx7l63FRD/cHB8o5mXxO1Q/MMDAL
j2aTPs+9xYa9+bG3tD60B8jzljHz7aRP+KNOjSkVWLjVb3/ubCK1sK9IRQq9qEmU
v4RDsNuESgMjGWdqb8FuvAY5N9GIIvejQjBAMA8GA1UdEwEB/wQFMAMBAf8wDgYD
VR0PAQH/BAQDAgEGMB0GA1UdDgQWBBTmGHX/72DehKT1RsfeSlXjMjZ59TAKBggq
hkjOPQQDAwNnADBkAjAmc0l6tqvmSfR9Uj/UQQSugEODZXW5hYA4O9Zv5JOGq4/n
ich/m35rChJVYaoR4HkCMHfoMXGsPHED1oQmHhS48zs73u1Z/GtMMH9ZzkXpc2AV
mkzw5l4lIhVtwodZ0LKOag==
-----END CERTIFICATE-----
# Issuer: CN=CommScope Public Trust RSA Root-01 O=CommScope
# Subject: CN=CommScope Public Trust RSA Root-01 O=CommScope
# Label: "CommScope Public Trust RSA Root-01"
# Serial: 354030733275608256394402989253558293562031411421
# MD5 Fingerprint: 0e:b4:15:bc:87:63:5d:5d:02:73:d4:26:38:68:73:d8
# SHA1 Fingerprint: 6d:0a:5f:f7:b4:23:06:b4:85:b3:b7:97:64:fc:ac:75:f5:33:f2:93
# SHA256 Fingerprint: 02:bd:f9:6e:2a:45:dd:9b:f1:8f:c7:e1:db:df:21:a0:37:9b:a3:c9:c2:61:03:44:cf:d8:d6:06:fe:c1:ed:81
-----BEGIN CERTIFICATE-----
MIIFbDCCA1SgAwIBAgIUPgNJgXUWdDGOTKvVxZAplsU5EN0wDQYJKoZIhvcNAQEL
BQAwTjELMAkGA1UEBhMCVVMxEjAQBgNVBAoMCUNvbW1TY29wZTErMCkGA1UEAwwi
Q29tbVNjb3BlIFB1YmxpYyBUcnVzdCBSU0EgUm9vdC0wMTAeFw0yMTA0MjgxNjQ1
NTRaFw00NjA0MjgxNjQ1NTNaME4xCzAJBgNVBAYTAlVTMRIwEAYDVQQKDAlDb21t
U2NvcGUxKzApBgNVBAMMIkNvbW1TY29wZSBQdWJsaWMgVHJ1c3QgUlNBIFJvb3Qt
MDEwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQCwSGWjDR1C45FtnYSk
YZYSwu3D2iM0GXb26v1VWvZVAVMP8syMl0+5UMuzAURWlv2bKOx7dAvnQmtVzslh
suitQDy6uUEKBU8bJoWPQ7VAtYXR1HHcg0Hz9kXHgKKEUJdGzqAMxGBWBB0HW0al
DrJLpA6lfO741GIDuZNqihS4cPgugkY4Iw50x2tBt9Apo52AsH53k2NC+zSDO3Oj
WiE260f6GBfZumbCk6SP/F2krfxQapWsvCQz0b2If4b19bJzKo98rwjyGpg/qYFl
P8GMicWWMJoKz/TUyDTtnS+8jTiGU+6Xn6myY5QXjQ/cZip8UlF1y5mO6D1cv547
KI2DAg+pn3LiLCuz3GaXAEDQpFSOm117RTYm1nJD68/A6g3czhLmfTifBSeolz7p
UcZsBSjBAg/pGG3svZwG1KdJ9FQFa2ww8esD1eo9anbCyxooSU1/ZOD6K9pzg4H/
kQO9lLvkuI6cMmPNn7togbGEW682v3fuHX/3SZtS7NJ3Wn2RnU3COS3kuoL4b/JO
Hg9O5j9ZpSPcPYeoKFgo0fEbNttPxP/hjFtyjMcmAyejOQoBqsCyMWCDIqFPEgkB
Ea801M/XrmLTBQe0MXXgDW1XT2mH+VepuhX2yFJtocucH+X8eKg1mp9BFM6ltM6U
CBwJrVbl2rZJmkrqYxhTnCwuwwIDAQABo0IwQDAPBgNVHRMBAf8EBTADAQH/MA4G
A1UdDwEB/wQEAwIBBjAdBgNVHQ4EFgQUN12mmnQywsL5x6YVEFm45P3luG0wDQYJ
KoZIhvcNAQELBQADggIBAK+nz97/4L1CjU3lIpbfaOp9TSp90K09FlxD533Ahuh6
NWPxzIHIxgvoLlI1pKZJkGNRrDSsBTtXAOnTYtPZKdVUvhwQkZyybf5Z/Xn36lbQ
nmhUQo8mUuJM3y+Xpi/SB5io82BdS5pYV4jvguX6r2yBS5KPQJqTRlnLX3gWsWc+
QgvfKNmwrZggvkN80V4aCRckjXtdlemrwWCrWxhkgPut4AZ9HcpZuPN4KWfGVh2v
trV0KnahP/t1MJ+UXjulYPPLXAziDslg+MkfFoom3ecnf+slpoq9uC02EJqxWE2a
aE9gVOX2RhOOiKy8IUISrcZKiX2bwdgt6ZYD9KJ0DLwAHb/WNyVntHKLr4W96ioD
j8z7PEQkguIBpQtZtjSNMgsSDesnwv1B10A8ckYpwIzqug/xBpMu95yo9GA+o/E4
Xo4TwbM6l4c/ksp4qRyv0LAbJh6+cOx69TOY6lz/KwsETkPdY34Op054A5U+1C0w
lREQKC6/oAI+/15Z0wUOlV9TRe9rh9VIzRamloPh37MG88EU26fsHItdkJANclHn
YfkUyq+Dj7+vsQpZXdxc1+SWrVtgHdqul7I52Qb1dgAT+GhMIbA1xNxVssnBQVoc
icCMb3SgazNNtQEo/a2tiRc7ppqEvOuM6sRxJKi6KfkIsidWNTJf6jn7MZrVGczw
-----END CERTIFICATE-----
# Issuer: CN=CommScope Public Trust RSA Root-02 O=CommScope
# Subject: CN=CommScope Public Trust RSA Root-02 O=CommScope
# Label: "CommScope Public Trust RSA Root-02"
# Serial: 480062499834624527752716769107743131258796508494
# MD5 Fingerprint: e1:29:f9:62:7b:76:e2:96:6d:f3:d4:d7:0f:ae:1f:aa
# SHA1 Fingerprint: ea:b0:e2:52:1b:89:93:4c:11:68:f2:d8:9a:ac:22:4c:a3:8a:57:ae
# SHA256 Fingerprint: ff:e9:43:d7:93:42:4b:4f:7c:44:0c:1c:3d:64:8d:53:63:f3:4b:82:dc:87:aa:7a:9f:11:8f:c5:de:e1:01:f1
-----BEGIN CERTIFICATE-----
MIIFbDCCA1SgAwIBAgIUVBa/O345lXGN0aoApYYNK496BU4wDQYJKoZIhvcNAQEL
BQAwTjELMAkGA1UEBhMCVVMxEjAQBgNVBAoMCUNvbW1TY29wZTErMCkGA1UEAwwi
Q29tbVNjb3BlIFB1YmxpYyBUcnVzdCBSU0EgUm9vdC0wMjAeFw0yMTA0MjgxNzE2
NDNaFw00NjA0MjgxNzE2NDJaME4xCzAJBgNVBAYTAlVTMRIwEAYDVQQKDAlDb21t
U2NvcGUxKzApBgNVBAMMIkNvbW1TY29wZSBQdWJsaWMgVHJ1c3QgUlNBIFJvb3Qt
MDIwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQDh+g77aAASyE3VrCLE
NQE7xVTlWXZjpX/rwcRqmL0yjReA61260WI9JSMZNRTpf4mnG2I81lDnNJUDMrG0
kyI9p+Kx7eZ7Ti6Hmw0zdQreqjXnfuU2mKKuJZ6VszKWpCtYHu8//mI0SFHRtI1C
rWDaSWqVcN3SAOLMV2MCe5bdSZdbkk6V0/nLKR8YSvgBKtJjCW4k6YnS5cciTNxz
hkcAqg2Ijq6FfUrpuzNPDlJwnZXjfG2WWy09X6GDRl224yW4fKcZgBzqZUPckXk2
LHR88mcGyYnJ27/aaL8j7dxrrSiDeS/sOKUNNwFnJ5rpM9kzXzehxfCrPfp4sOcs
n/Y+n2Dg70jpkEUeBVF4GiwSLFworA2iI540jwXmojPOEXcT1A6kHkIfhs1w/tku
FT0du7jyU1fbzMZ0KZwYszZ1OC4PVKH4kh+Jlk+71O6d6Ts2QrUKOyrUZHk2EOH5
kQMreyBUzQ0ZGshBMjTRsJnhkB4BQDa1t/qp5Xd1pCKBXbCL5CcSD1SIxtuFdOa3
wNemKfrb3vOTlycEVS8KbzfFPROvCgCpLIscgSjX74Yxqa7ybrjKaixUR9gqiC6v
wQcQeKwRoi9C8DfF8rhW3Q5iLc4tVn5V8qdE9isy9COoR+jUKgF4z2rDN6ieZdIs
5fq6M8EGRPbmz6UNp2YINIos8wIDAQABo0IwQDAPBgNVHRMBAf8EBTADAQH/MA4G
A1UdDwEB/wQEAwIBBjAdBgNVHQ4EFgQUR9DnsSL/nSz12Vdgs7GxcJXvYXowDQYJ
KoZIhvcNAQELBQADggIBAIZpsU0v6Z9PIpNojuQhmaPORVMbc0RTAIFhzTHjCLqB
KCh6krm2qMhDnscTJk3C2OVVnJJdUNjCK9v+5qiXz1I6JMNlZFxHMaNlNRPDk7n3
+VGXu6TwYofF1gbTl4MgqX67tiHCpQ2EAOHyJxCDut0DgdXdaMNmEMjRdrSzbyme
APnCKfWxkxlSaRosTKCL4BWaMS/TiJVZbuXEs1DIFAhKm4sTg7GkcrI7djNB3Nyq
pgdvHSQSn8h2vS/ZjvQs7rfSOBAkNlEv41xdgSGn2rtO/+YHqP65DSdsu3BaVXoT
6fEqSWnHX4dXTEN5bTpl6TBcQe7rd6VzEojov32u5cSoHw2OHG1QAk8mGEPej1WF
sQs3BWDJVTkSBKEqz3EWnzZRSb9wO55nnPt7eck5HHisd5FUmrh1CoFSl+NmYWvt
PjgelmFV4ZFUjO2MJB+ByRCac5krFk5yAD9UG/iNuovnFNa2RU9g7Jauwy8CTl2d
lklyALKrdVwPaFsdZcJfMw8eD/A7hvWwTruc9+olBdytoptLFwG+Qt81IR2tq670
v64fG9PiO/yzcnMcmyiQiRM9HcEARwmWmjgb3bHPDcK0RPOWlc4yOo80nOAXx17O
rg3bhzjlP1v9mxnhMUF6cKojawHhRUzNlM47ni3niAIi9G7oyOzWPPO5std3eqx7
-----END CERTIFICATE-----
# Issuer: CN=Telekom Security TLS ECC Root 2020 O=Deutsche Telekom Security GmbH
# Subject: CN=Telekom Security TLS ECC Root 2020 O=Deutsche Telekom Security GmbH
# Label: "Telekom Security TLS ECC Root 2020"
# Serial: 72082518505882327255703894282316633856
# MD5 Fingerprint: c1:ab:fe:6a:10:2c:03:8d:bc:1c:22:32:c0:85:a7:fd
# SHA1 Fingerprint: c0:f8:96:c5:a9:3b:01:06:21:07:da:18:42:48:bc:e9:9d:88:d5:ec
# SHA256 Fingerprint: 57:8a:f4:de:d0:85:3f:4e:59:98:db:4a:ea:f9:cb:ea:8d:94:5f:60:b6:20:a3:8d:1a:3c:13:b2:bc:7b:a8:e1
-----BEGIN CERTIFICATE-----
MIICQjCCAcmgAwIBAgIQNjqWjMlcsljN0AFdxeVXADAKBggqhkjOPQQDAzBjMQsw
CQYDVQQGEwJERTEnMCUGA1UECgweRGV1dHNjaGUgVGVsZWtvbSBTZWN1cml0eSBH
bWJIMSswKQYDVQQDDCJUZWxla29tIFNlY3VyaXR5IFRMUyBFQ0MgUm9vdCAyMDIw
MB4XDTIwMDgyNTA3NDgyMFoXDTQ1MDgyNTIzNTk1OVowYzELMAkGA1UEBhMCREUx
JzAlBgNVBAoMHkRldXRzY2hlIFRlbGVrb20gU2VjdXJpdHkgR21iSDErMCkGA1UE
AwwiVGVsZWtvbSBTZWN1cml0eSBUTFMgRUNDIFJvb3QgMjAyMDB2MBAGByqGSM49
AgEGBSuBBAAiA2IABM6//leov9Wq9xCazbzREaK9Z0LMkOsVGJDZos0MKiXrPk/O
tdKPD/M12kOLAoC+b1EkHQ9rK8qfwm9QMuU3ILYg/4gND21Ju9sGpIeQkpT0CdDP
f8iAC8GXs7s1J8nCG6NCMEAwHQYDVR0OBBYEFONyzG6VmUex5rNhTNHLq+O6zd6f
MA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgEGMAoGCCqGSM49BAMDA2cA
MGQCMHVSi7ekEE+uShCLsoRbQuHmKjYC2qBuGT8lv9pZMo7k+5Dck2TOrbRBR2Di
z6fLHgIwN0GMZt9Ba9aDAEH9L1r3ULRn0SyocddDypwnJJGDSA3PzfdUga/sf+Rn
27iQ7t0l
-----END CERTIFICATE-----
# Issuer: CN=Telekom Security TLS RSA Root 2023 O=Deutsche Telekom Security GmbH
# Subject: CN=Telekom Security TLS RSA Root 2023 O=Deutsche Telekom Security GmbH
# Label: "Telekom Security TLS RSA Root 2023"
# Serial: 44676229530606711399881795178081572759
# MD5 Fingerprint: bf:5b:eb:54:40:cd:48:71:c4:20:8d:7d:de:0a:42:f2
# SHA1 Fingerprint: 54:d3:ac:b3:bd:57:56:f6:85:9d:ce:e5:c3:21:e2:d4:ad:83:d0:93
# SHA256 Fingerprint: ef:c6:5c:ad:bb:59:ad:b6:ef:e8:4d:a2:23:11:b3:56:24:b7:1b:3b:1e:a0:da:8b:66:55:17:4e:c8:97:86:46
-----BEGIN CERTIFICATE-----
MIIFszCCA5ugAwIBAgIQIZxULej27HF3+k7ow3BXlzANBgkqhkiG9w0BAQwFADBj
MQswCQYDVQQGEwJERTEnMCUGA1UECgweRGV1dHNjaGUgVGVsZWtvbSBTZWN1cml0
eSBHbWJIMSswKQYDVQQDDCJUZWxla29tIFNlY3VyaXR5IFRMUyBSU0EgUm9vdCAy
MDIzMB4XDTIzMDMyODEyMTY0NVoXDTQ4MDMyNzIzNTk1OVowYzELMAkGA1UEBhMC
REUxJzAlBgNVBAoMHkRldXRzY2hlIFRlbGVrb20gU2VjdXJpdHkgR21iSDErMCkG
A1UEAwwiVGVsZWtvbSBTZWN1cml0eSBUTFMgUlNBIFJvb3QgMjAyMzCCAiIwDQYJ
KoZIhvcNAQEBBQADggIPADCCAgoCggIBAO01oYGA88tKaVvC+1GDrib94W7zgRJ9
cUD/h3VCKSHtgVIs3xLBGYSJwb3FKNXVS2xE1kzbB5ZKVXrKNoIENqil/Cf2SfHV
cp6R+SPWcHu79ZvB7JPPGeplfohwoHP89v+1VmLhc2o0mD6CuKyVU/QBoCcHcqMA
U6DksquDOFczJZSfvkgdmOGjup5czQRxUX11eKvzWarE4GC+j4NSuHUaQTXtvPM6
Y+mpFEXX5lLRbtLevOP1Czvm4MS9Q2QTps70mDdsipWol8hHD/BeEIvnHRz+sTug
BTNoBUGCwQMrAcjnj02r6LX2zWtEtefdi+zqJbQAIldNsLGyMcEWzv/9FIS3R/qy
8XDe24tsNlikfLMR0cN3f1+2JeANxdKz+bi4d9s3cXFH42AYTyS2dTd4uaNir73J
co4vzLuu2+QVUhkHM/tqty1LkCiCc/4YizWN26cEar7qwU02OxY2kTLvtkCJkUPg
8qKrBC7m8kwOFjQgrIfBLX7JZkcXFBGk8/ehJImr2BrIoVyxo/eMbcgByU/J7MT8
rFEz0ciD0cmfHdRHNCk+y7AO+oMLKFjlKdw/fKifybYKu6boRhYPluV75Gp6SG12
mAWl3G0eQh5C2hrgUve1g8Aae3g1LDj1H/1Joy7SWWO/gLCMk3PLNaaZlSJhZQNg
+y+TS/qanIA7AgMBAAGjYzBhMA4GA1UdDwEB/wQEAwIBBjAdBgNVHQ4EFgQUtqeX
gj10hZv3PJ+TmpV5dVKMbUcwDwYDVR0TAQH/BAUwAwEB/zAfBgNVHSMEGDAWgBS2
p5eCPXSFm/c8n5OalXl1UoxtRzANBgkqhkiG9w0BAQwFAAOCAgEAqMxhpr51nhVQ
pGv7qHBFfLp+sVr8WyP6Cnf4mHGCDG3gXkaqk/QeoMPhk9tLrbKmXauw1GLLXrtm
9S3ul0A8Yute1hTWjOKWi0FpkzXmuZlrYrShF2Y0pmtjxrlO8iLpWA1WQdH6DErw
M807u20hOq6OcrXDSvvpfeWxm4bu4uB9tPcy/SKE8YXJN3nptT+/XOR0so8RYgDd
GGah2XsjX/GO1WfoVNpbOms2b/mBsTNHM3dA+VKq3dSDz4V4mZqTuXNnQkYRIer+
CqkbGmVps4+uFrb2S1ayLfmlyOw7YqPta9BO1UAJpB+Y1zqlklkg5LB9zVtzaL1t
xKITDmcZuI1CfmwMmm6gJC3VRRvcxAIU/oVbZZfKTpBQCHpCNfnqwmbU+AGuHrS+
w6jv/naaoqYfRvaE7fzbzsQCzndILIyy7MMAo+wsVRjBfhnu4S/yrYObnqsZ38aK
L4x35bcF7DvB7L6Gs4a8wPfc5+pbrrLMtTWGS9DiP7bY+A4A7l3j941Y/8+LN+lj
X273CXE2whJdV/LItM3z7gLfEdxquVeEHVlNjM7IDiPCtyaaEBRx/pOyiriA8A4Q
ntOoUAw3gi/q4Iqd4Sw5/7W0cwDk90imc6y/st53BIe0o82bNSQ3+pCTE4FCxpgm
dTdmQRCsu/WU48IxK63nI1bMNSWSs1A=
-----END CERTIFICATE-----

View file

@ -5,6 +5,10 @@ certifi.py
This module returns the installation location of cacert.pem or its contents. This module returns the installation location of cacert.pem or its contents.
""" """
import sys import sys
import atexit
def exit_cacert_ctx() -> None:
_CACERT_CTX.__exit__(None, None, None) # type: ignore[union-attr]
if sys.version_info >= (3, 11): if sys.version_info >= (3, 11):
@ -35,6 +39,7 @@ if sys.version_info >= (3, 11):
# we will also store that at the global level as well. # we will also store that at the global level as well.
_CACERT_CTX = as_file(files("certifi").joinpath("cacert.pem")) _CACERT_CTX = as_file(files("certifi").joinpath("cacert.pem"))
_CACERT_PATH = str(_CACERT_CTX.__enter__()) _CACERT_PATH = str(_CACERT_CTX.__enter__())
atexit.register(exit_cacert_ctx)
return _CACERT_PATH return _CACERT_PATH
@ -70,6 +75,7 @@ elif sys.version_info >= (3, 7):
# we will also store that at the global level as well. # we will also store that at the global level as well.
_CACERT_CTX = get_path("certifi", "cacert.pem") _CACERT_CTX = get_path("certifi", "cacert.pem")
_CACERT_PATH = str(_CACERT_CTX.__enter__()) _CACERT_PATH = str(_CACERT_CTX.__enter__())
atexit.register(exit_cacert_ctx)
return _CACERT_PATH return _CACERT_PATH

View file

@ -0,0 +1,6 @@
version = "2.9.*"
upstream_repository = "https://github.com/dateutil/dateutil"
partial_stub = true
[tool.stubtest]
ignore_missing_stub = true

View file

View file

@ -0,0 +1,9 @@
from typing_extensions import Self
class weekday:
def __init__(self, weekday: int, n: int | None = None) -> None: ...
def __call__(self, n: int) -> Self: ...
def __eq__(self, other: object) -> bool: ...
def __hash__(self) -> int: ...
weekday: int
n: int

View file

@ -0,0 +1,8 @@
from datetime import date
from typing import Literal
EASTER_JULIAN: Literal[1]
EASTER_ORTHODOX: Literal[2]
EASTER_WESTERN: Literal[3]
def easter(year: int, method: Literal[1, 2, 3] = 3) -> date: ...

View file

@ -0,0 +1,67 @@
from collections.abc import Callable, Mapping
from datetime import datetime, tzinfo
from typing import IO, Any
from typing_extensions import TypeAlias
from .isoparser import isoparse as isoparse, isoparser as isoparser
_FileOrStr: TypeAlias = bytes | str | IO[str] | IO[Any]
_TzData: TypeAlias = tzinfo | int | str | None
_TzInfo: TypeAlias = Mapping[str, _TzData] | Callable[[str, int], _TzData]
class parserinfo:
JUMP: list[str]
WEEKDAYS: list[tuple[str, ...]]
MONTHS: list[tuple[str, ...]]
HMS: list[tuple[str, str, str]]
AMPM: list[tuple[str, str]]
UTCZONE: list[str]
PERTAIN: list[str]
TZOFFSET: dict[str, int]
def __init__(self, dayfirst: bool = False, yearfirst: bool = False) -> None: ...
def jump(self, name: str) -> bool: ...
def weekday(self, name: str) -> int | None: ...
def month(self, name: str) -> int | None: ...
def hms(self, name: str) -> int | None: ...
def ampm(self, name: str) -> int | None: ...
def pertain(self, name: str) -> bool: ...
def utczone(self, name: str) -> bool: ...
def tzoffset(self, name: str) -> int | None: ...
def convertyear(self, year: int) -> int: ...
def validate(self, res: datetime) -> bool: ...
class parser:
def __init__(self, info: parserinfo | None = None) -> None: ...
def parse(
self,
timestr: _FileOrStr,
default: datetime | None = None,
ignoretz: bool = False,
tzinfos: _TzInfo | None = None,
*,
dayfirst: bool | None = ...,
yearfirst: bool | None = ...,
fuzzy: bool = ...,
fuzzy_with_tokens: bool = ...,
) -> datetime: ...
DEFAULTPARSER: parser
def parse(
timestr: _FileOrStr,
parserinfo: parserinfo | None = None,
*,
dayfirst: bool | None = ...,
yearfirst: bool | None = ...,
ignoretz: bool = ...,
fuzzy: bool = ...,
fuzzy_with_tokens: bool = ...,
default: datetime | None = ...,
tzinfos: _TzInfo | None = ...,
) -> datetime: ...
class _tzparser: ...
DEFAULTTZPARSER: _tzparser
class ParserError(ValueError): ...

View file

@ -0,0 +1,15 @@
from _typeshed import SupportsRead
from datetime import date, datetime, time, tzinfo
from typing_extensions import TypeAlias
_Readable: TypeAlias = SupportsRead[str | bytes]
_TakesAscii: TypeAlias = str | bytes | _Readable
class isoparser:
def __init__(self, sep: str | bytes | None = None): ...
def isoparse(self, dt_str: _TakesAscii) -> datetime: ...
def parse_isodate(self, datestr: _TakesAscii) -> date: ...
def parse_isotime(self, timestr: _TakesAscii) -> time: ...
def parse_tzstr(self, tzstr: _TakesAscii, zero_as_utc: bool = True) -> tzinfo: ...
def isoparse(dt_str: _TakesAscii) -> datetime: ...

View file

@ -0,0 +1 @@
partial

View file

@ -0,0 +1,97 @@
from datetime import date, timedelta
from typing import SupportsFloat, TypeVar, overload
from typing_extensions import Self, TypeAlias
# See #9817 for why we reexport this here
from ._common import weekday as weekday
_DateT = TypeVar("_DateT", bound=date)
# Work around attribute and type having the same name.
_Weekday: TypeAlias = weekday
MO: weekday
TU: weekday
WE: weekday
TH: weekday
FR: weekday
SA: weekday
SU: weekday
class relativedelta:
years: int
months: int
days: int
leapdays: int
hours: int
minutes: int
seconds: int
microseconds: int
year: int | None
month: int | None
weekday: _Weekday | None
day: int | None
hour: int | None
minute: int | None
second: int | None
microsecond: int | None
def __init__(
self,
dt1: date | None = None,
dt2: date | None = None,
years: int | None = 0,
months: int | None = 0,
days: int | None = 0,
leapdays: int | None = 0,
weeks: int | None = 0,
hours: int | None = 0,
minutes: int | None = 0,
seconds: int | None = 0,
microseconds: int | None = 0,
year: int | None = None,
month: int | None = None,
day: int | None = None,
weekday: int | _Weekday | None = None,
yearday: int | None = None,
nlyearday: int | None = None,
hour: int | None = None,
minute: int | None = None,
second: int | None = None,
microsecond: int | None = None,
) -> None: ...
@property
def weeks(self) -> int: ...
@weeks.setter
def weeks(self, value: int) -> None: ...
def normalized(self) -> Self: ...
# TODO: use Union when mypy will handle it properly in overloaded operator
# methods (#2129, #1442, #1264 in mypy)
@overload
def __add__(self, other: relativedelta) -> Self: ...
@overload
def __add__(self, other: timedelta) -> Self: ...
@overload
def __add__(self, other: _DateT) -> _DateT: ...
@overload
def __radd__(self, other: relativedelta) -> Self: ...
@overload
def __radd__(self, other: timedelta) -> Self: ...
@overload
def __radd__(self, other: _DateT) -> _DateT: ...
@overload
def __rsub__(self, other: relativedelta) -> Self: ...
@overload
def __rsub__(self, other: timedelta) -> Self: ...
@overload
def __rsub__(self, other: _DateT) -> _DateT: ...
def __sub__(self, other: relativedelta) -> Self: ...
def __neg__(self) -> Self: ...
def __bool__(self) -> bool: ...
def __nonzero__(self) -> bool: ...
def __mul__(self, other: SupportsFloat) -> Self: ...
def __rmul__(self, other: SupportsFloat) -> Self: ...
def __eq__(self, other: object) -> bool: ...
def __ne__(self, other: object) -> bool: ...
def __div__(self, other: SupportsFloat) -> Self: ...
def __truediv__(self, other: SupportsFloat) -> Self: ...
def __abs__(self) -> Self: ...
def __hash__(self) -> int: ...

View file

@ -0,0 +1,111 @@
import datetime
from _typeshed import Incomplete
from collections.abc import Iterable, Iterator, Sequence
from typing_extensions import TypeAlias
from ._common import weekday as weekdaybase
YEARLY: int
MONTHLY: int
WEEKLY: int
DAILY: int
HOURLY: int
MINUTELY: int
SECONDLY: int
class weekday(weekdaybase): ...
weekdays: tuple[weekday, weekday, weekday, weekday, weekday, weekday, weekday]
MO: weekday
TU: weekday
WE: weekday
TH: weekday
FR: weekday
SA: weekday
SU: weekday
class rrulebase:
def __init__(self, cache: bool = False) -> None: ...
def __iter__(self) -> Iterator[datetime.datetime]: ...
def __getitem__(self, item): ...
def __contains__(self, item): ...
def count(self): ...
def before(self, dt, inc: bool = False): ...
def after(self, dt, inc: bool = False): ...
def xafter(self, dt, count: Incomplete | None = None, inc: bool = False): ...
def between(self, after, before, inc: bool = False, count: int = 1): ...
class rrule(rrulebase):
def __init__(
self,
freq,
dtstart: datetime.date | None = None,
interval: int = 1,
wkst: weekday | int | None = None,
count: int | None = None,
until: datetime.date | int | None = None,
bysetpos: int | Iterable[int] | None = None,
bymonth: int | Iterable[int] | None = None,
bymonthday: int | Iterable[int] | None = None,
byyearday: int | Iterable[int] | None = None,
byeaster: int | Iterable[int] | None = None,
byweekno: int | Iterable[int] | None = None,
byweekday: int | weekday | Iterable[int] | Iterable[weekday] | None = None,
byhour: int | Iterable[int] | None = None,
byminute: int | Iterable[int] | None = None,
bysecond: int | Iterable[int] | None = None,
cache: bool = False,
) -> None: ...
def replace(self, **kwargs): ...
_RRule: TypeAlias = rrule
class _iterinfo:
rrule: _RRule
def __init__(self, rrule: _RRule) -> None: ...
yearlen: int | None
nextyearlen: int | None
yearordinal: int | None
yearweekday: int | None
mmask: Sequence[int] | None
mdaymask: Sequence[int] | None
nmdaymask: Sequence[int] | None
wdaymask: Sequence[int] | None
mrange: Sequence[int] | None
wnomask: Sequence[int] | None
nwdaymask: Sequence[int] | None
eastermask: Sequence[int] | None
lastyear: int | None
lastmonth: int | None
def rebuild(self, year, month): ...
def ydayset(self, year, month, day): ...
def mdayset(self, year, month, day): ...
def wdayset(self, year, month, day): ...
def ddayset(self, year, month, day): ...
def htimeset(self, hour, minute, second): ...
def mtimeset(self, hour, minute, second): ...
def stimeset(self, hour, minute, second): ...
class rruleset(rrulebase):
class _genitem:
dt: Incomplete
genlist: list[Incomplete]
gen: Incomplete
def __init__(self, genlist, gen) -> None: ...
def __next__(self) -> None: ...
next = __next__
def __lt__(self, other) -> bool: ...
def __gt__(self, other) -> bool: ...
def __eq__(self, other) -> bool: ...
def __ne__(self, other) -> bool: ...
def __init__(self, cache: bool = False) -> None: ...
def rrule(self, rrule: _RRule): ...
def rdate(self, rdate): ...
def exrule(self, exrule): ...
def exdate(self, exdate): ...
class _rrulestr:
def __call__(self, s, **kwargs) -> rrule | rruleset: ...
rrulestr: _rrulestr

View file

@ -0,0 +1,15 @@
from .tz import (
datetime_ambiguous as datetime_ambiguous,
datetime_exists as datetime_exists,
gettz as gettz,
resolve_imaginary as resolve_imaginary,
tzfile as tzfile,
tzical as tzical,
tzlocal as tzlocal,
tzoffset as tzoffset,
tzrange as tzrange,
tzstr as tzstr,
tzutc as tzutc,
)
UTC: tzutc

View file

@ -0,0 +1,28 @@
import abc
from datetime import datetime, timedelta, tzinfo
from typing import ClassVar
def tzname_in_python2(namefunc): ...
def enfold(dt: datetime, fold: int = 1): ...
class _DatetimeWithFold(datetime):
@property
def fold(self): ...
# Doesn't actually have ABCMeta as the metaclass at runtime,
# but mypy complains if we don't have it in the stub.
# See discussion in #8908
class _tzinfo(tzinfo, metaclass=abc.ABCMeta):
def is_ambiguous(self, dt: datetime) -> bool: ...
def fromutc(self, dt: datetime) -> datetime: ...
class tzrangebase(_tzinfo):
def __init__(self) -> None: ...
def utcoffset(self, dt: datetime | None) -> timedelta | None: ...
def dst(self, dt: datetime | None) -> timedelta | None: ...
def tzname(self, dt: datetime | None) -> str: ...
def fromutc(self, dt: datetime) -> datetime: ...
def is_ambiguous(self, dt: datetime) -> bool: ...
__hash__: ClassVar[None] # type: ignore[assignment]
def __ne__(self, other): ...
__reduce__ = object.__reduce__

View file

@ -0,0 +1,115 @@
import datetime
from _typeshed import Incomplete
from typing import ClassVar, Literal, Protocol, TypeVar
from ..relativedelta import relativedelta
from ._common import _tzinfo as _tzinfo, enfold as enfold, tzname_in_python2 as tzname_in_python2, tzrangebase as tzrangebase
_DT = TypeVar("_DT", bound=datetime.datetime)
ZERO: datetime.timedelta
EPOCH: datetime.datetime
EPOCHORDINAL: int
class tzutc(datetime.tzinfo):
def utcoffset(self, dt: datetime.datetime | None) -> datetime.timedelta | None: ...
def dst(self, dt: datetime.datetime | None) -> datetime.timedelta | None: ...
def tzname(self, dt: datetime.datetime | None) -> str: ...
def is_ambiguous(self, dt: datetime.datetime | None) -> bool: ...
def fromutc(self, dt: _DT) -> _DT: ...
def __eq__(self, other): ...
__hash__: ClassVar[None] # type: ignore[assignment]
def __ne__(self, other): ...
__reduce__ = object.__reduce__
class tzoffset(datetime.tzinfo):
def __init__(self, name, offset) -> None: ...
def utcoffset(self, dt: datetime.datetime | None) -> datetime.timedelta | None: ...
def dst(self, dt: datetime.datetime | None) -> datetime.timedelta | None: ...
def is_ambiguous(self, dt: datetime.datetime | None) -> bool: ...
def tzname(self, dt: datetime.datetime | None) -> str: ...
def fromutc(self, dt: _DT) -> _DT: ...
def __eq__(self, other): ...
__hash__: ClassVar[None] # type: ignore[assignment]
def __ne__(self, other): ...
__reduce__ = object.__reduce__
@classmethod
def instance(cls, name, offset) -> tzoffset: ...
class tzlocal(_tzinfo):
def __init__(self) -> None: ...
def utcoffset(self, dt: datetime.datetime | None) -> datetime.timedelta | None: ...
def dst(self, dt: datetime.datetime | None) -> datetime.timedelta | None: ...
def tzname(self, dt: datetime.datetime | None) -> str: ...
def is_ambiguous(self, dt: datetime.datetime | None) -> bool: ...
def __eq__(self, other): ...
__hash__: ClassVar[None] # type: ignore[assignment]
def __ne__(self, other): ...
__reduce__ = object.__reduce__
class _ttinfo:
def __init__(self) -> None: ...
def __eq__(self, other): ...
__hash__: ClassVar[None] # type: ignore[assignment]
def __ne__(self, other): ...
class _TZFileReader(Protocol):
# optional attribute:
# name: str
def read(self, size: int, /) -> bytes: ...
def seek(self, target: int, whence: Literal[1], /) -> object: ...
class tzfile(_tzinfo):
def __init__(self, fileobj: str | _TZFileReader, filename: str | None = None) -> None: ...
def is_ambiguous(self, dt: datetime.datetime | None, idx: int | None = None) -> bool: ...
def utcoffset(self, dt: datetime.datetime | None) -> datetime.timedelta | None: ...
def dst(self, dt: datetime.datetime | None) -> datetime.timedelta | None: ...
def tzname(self, dt: datetime.datetime | None) -> str: ...
def __eq__(self, other): ...
__hash__: ClassVar[None] # type: ignore[assignment]
def __ne__(self, other): ...
def __reduce__(self): ...
def __reduce_ex__(self, protocol): ...
class tzrange(tzrangebase):
hasdst: bool
def __init__(
self,
stdabbr: str,
stdoffset: int | datetime.timedelta | None = None,
dstabbr: str | None = None,
dstoffset: int | datetime.timedelta | None = None,
start: relativedelta | None = None,
end: relativedelta | None = None,
) -> None: ...
def transitions(self, year: int) -> tuple[datetime.datetime, datetime.datetime]: ...
def __eq__(self, other): ...
class tzstr(tzrange):
hasdst: bool
def __init__(self, s: str, posix_offset: bool = False) -> None: ...
@classmethod
def instance(cls, name, offset) -> tzoffset: ...
class _ICalReader(Protocol):
# optional attribute:
# name: str
def read(self) -> str: ...
class tzical:
def __init__(self, fileobj: str | _ICalReader) -> None: ...
def keys(self): ...
def get(self, tzid: Incomplete | None = None): ...
TZFILES: list[str]
TZPATHS: list[str]
def datetime_exists(dt: datetime.datetime, tz: datetime.tzinfo | None = None) -> bool: ...
def datetime_ambiguous(dt: datetime.datetime, tz: datetime.tzinfo | None = None) -> bool: ...
def resolve_imaginary(dt: datetime.datetime) -> datetime.datetime: ...
class _GetTZ:
def __call__(self, name: str | None = ...) -> datetime.tzinfo | None: ...
def nocache(self, name: str | None) -> datetime.tzinfo | None: ...
gettz: _GetTZ

View file

@ -0,0 +1,5 @@
from datetime import datetime, timedelta, tzinfo
def default_tzinfo(dt: datetime, tzinfo: tzinfo) -> datetime: ...
def today(tzinfo: tzinfo | None = None) -> datetime: ...
def within_delta(dt1: datetime, dt2: datetime, delta: timedelta) -> bool: ...

View file

@ -0,0 +1,17 @@
from _typeshed import Incomplete
from typing import IO
from typing_extensions import TypeAlias
__all__ = ["get_zonefile_instance", "gettz", "gettz_db_metadata"]
_MetadataType: TypeAlias = dict[str, Incomplete]
class ZoneInfoFile:
zones: dict[Incomplete, Incomplete]
metadata: _MetadataType | None
def __init__(self, zonefile_stream: IO[bytes] | None = None) -> None: ...
def get(self, name, default: Incomplete | None = None): ...
def get_zonefile_instance(new_instance: bool = False) -> ZoneInfoFile: ...
def gettz(name): ...
def gettz_db_metadata() -> _MetadataType: ...

View file

@ -0,0 +1,11 @@
from _typeshed import Incomplete, StrOrBytesPath
from collections.abc import Sequence
from tarfile import TarInfo
def rebuild(
filename: StrOrBytesPath,
tag: Incomplete | None = None,
format: str = "gz",
zonegroups: Sequence[str | TarInfo] = [],
metadata: Incomplete | None = None,
) -> None: ...

View file

@ -1,4 +1,6 @@
# -*- coding: utf-8 -*- # -*- coding: utf-8 -*-
import sys
try: try:
from ._version import version as __version__ from ._version import version as __version__
except ImportError: except ImportError:
@ -6,3 +8,17 @@ except ImportError:
__all__ = ['easter', 'parser', 'relativedelta', 'rrule', 'tz', __all__ = ['easter', 'parser', 'relativedelta', 'rrule', 'tz',
'utils', 'zoneinfo'] 'utils', 'zoneinfo']
def __getattr__(name):
import importlib
if name in __all__:
return importlib.import_module("." + name, __name__)
raise AttributeError(
"module {!r} has not attribute {!r}".format(__name__, name)
)
def __dir__():
# __dir__ should include all the lazy-importable modules as well.
return [x for x in globals() if x not in sys.modules] + __all__

View file

@ -1,5 +1,4 @@
# coding: utf-8
# file generated by setuptools_scm # file generated by setuptools_scm
# don't change, don't track in version control # don't change, don't track in version control
version = '2.8.2' __version__ = version = '2.9.0.post0'
version_tuple = (2, 8, 2) __version_tuple__ = version_tuple = (2, 9, 0)

View file

@ -72,7 +72,7 @@ class isoparser(object):
Common: Common:
- ``YYYY`` - ``YYYY``
- ``YYYY-MM`` or ``YYYYMM`` - ``YYYY-MM``
- ``YYYY-MM-DD`` or ``YYYYMMDD`` - ``YYYY-MM-DD`` or ``YYYYMMDD``
Uncommon: Uncommon:

View file

@ -182,7 +182,7 @@ class rrulebase(object):
# __len__() introduces a large performance penalty. # __len__() introduces a large performance penalty.
def count(self): def count(self):
""" Returns the number of recurrences in this set. It will have go """ Returns the number of recurrences in this set. It will have go
trough the whole recurrence, if this hasn't been done before. """ through the whole recurrence, if this hasn't been done before. """
if self._len is None: if self._len is None:
for x in self: for x in self:
pass pass

View file

@ -34,7 +34,7 @@ except ImportError:
from warnings import warn from warnings import warn
ZERO = datetime.timedelta(0) ZERO = datetime.timedelta(0)
EPOCH = datetime.datetime.utcfromtimestamp(0) EPOCH = datetime.datetime(1970, 1, 1, 0, 0)
EPOCHORDINAL = EPOCH.toordinal() EPOCHORDINAL = EPOCH.toordinal()

View file

@ -1,5 +1,5 @@
#!/usr/bin/env python #!/usr/bin/env python
# Copyright 2015,2016,2017 Nir Cohen # Copyright 2015-2021 Nir Cohen
# #
# Licensed under the Apache License, Version 2.0 (the "License"); # Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License. # you may not use this file except in compliance with the License.
@ -55,7 +55,7 @@ except ImportError:
# Python 3.7 # Python 3.7
TypedDict = dict TypedDict = dict
__version__ = "1.8.0" __version__ = "1.9.0"
class VersionDict(TypedDict): class VersionDict(TypedDict):
@ -125,6 +125,7 @@ _DISTRO_RELEASE_BASENAME_PATTERN = re.compile(r"(\w+)[-_](release|version)$")
# Base file names to be looked up for if _UNIXCONFDIR is not readable. # Base file names to be looked up for if _UNIXCONFDIR is not readable.
_DISTRO_RELEASE_BASENAMES = [ _DISTRO_RELEASE_BASENAMES = [
"SuSE-release", "SuSE-release",
"altlinux-release",
"arch-release", "arch-release",
"base-release", "base-release",
"centos-release", "centos-release",
@ -151,6 +152,8 @@ _DISTRO_RELEASE_IGNORE_BASENAMES = (
"system-release", "system-release",
"plesk-release", "plesk-release",
"iredmail-release", "iredmail-release",
"board-release",
"ec2_version",
) )
@ -243,6 +246,7 @@ def id() -> str:
"rocky" Rocky Linux "rocky" Rocky Linux
"aix" AIX "aix" AIX
"guix" Guix System "guix" Guix System
"altlinux" ALT Linux
============== ========================================= ============== =========================================
If you have a need to get distros for reliable IDs added into this set, If you have a need to get distros for reliable IDs added into this set,
@ -991,10 +995,10 @@ class LinuxDistribution:
For details, see :func:`distro.info`. For details, see :func:`distro.info`.
""" """
return dict( return InfoDict(
id=self.id(), id=self.id(),
version=self.version(pretty, best), version=self.version(pretty, best),
version_parts=dict( version_parts=VersionDict(
major=self.major_version(best), major=self.major_version(best),
minor=self.minor_version(best), minor=self.minor_version(best),
build_number=self.build_number(best), build_number=self.build_number(best),

View file

@ -1,8 +1,8 @@
# mako/__init__.py # mako/__init__.py
# Copyright 2006-2022 the Mako authors and contributors <see AUTHORS file> # Copyright 2006-2024 the Mako authors and contributors <see AUTHORS file>
# #
# This module is part of Mako and is released under # This module is part of Mako and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php # the MIT License: http://www.opensource.org/licenses/mit-license.php
__version__ = "1.2.4" __version__ = "1.3.2"

View file

@ -1,5 +1,5 @@
# mako/_ast_util.py # mako/_ast_util.py
# Copyright 2006-2022 the Mako authors and contributors <see AUTHORS file> # Copyright 2006-2024 the Mako authors and contributors <see AUTHORS file>
# #
# This module is part of Mako and is released under # This module is part of Mako and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php # the MIT License: http://www.opensource.org/licenses/mit-license.php

View file

@ -1,5 +1,5 @@
# mako/ast.py # mako/ast.py
# Copyright 2006-2022 the Mako authors and contributors <see AUTHORS file> # Copyright 2006-2024 the Mako authors and contributors <see AUTHORS file>
# #
# This module is part of Mako and is released under # This module is part of Mako and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php # the MIT License: http://www.opensource.org/licenses/mit-license.php

View file

@ -1,5 +1,5 @@
# mako/cache.py # mako/cache.py
# Copyright 2006-2022 the Mako authors and contributors <see AUTHORS file> # Copyright 2006-2024 the Mako authors and contributors <see AUTHORS file>
# #
# This module is part of Mako and is released under # This module is part of Mako and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php # the MIT License: http://www.opensource.org/licenses/mit-license.php

View file

@ -1,5 +1,5 @@
# mako/cmd.py # mako/cmd.py
# Copyright 2006-2022 the Mako authors and contributors <see AUTHORS file> # Copyright 2006-2024 the Mako authors and contributors <see AUTHORS file>
# #
# This module is part of Mako and is released under # This module is part of Mako and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php # the MIT License: http://www.opensource.org/licenses/mit-license.php
@ -25,7 +25,6 @@ def _exit():
def cmdline(argv=None): def cmdline(argv=None):
parser = ArgumentParser() parser = ArgumentParser()
parser.add_argument( parser.add_argument(
"--var", "--var",

View file

@ -1,5 +1,5 @@
# mako/codegen.py # mako/codegen.py
# Copyright 2006-2022 the Mako authors and contributors <see AUTHORS file> # Copyright 2006-2024 the Mako authors and contributors <see AUTHORS file>
# #
# This module is part of Mako and is released under # This module is part of Mako and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php # the MIT License: http://www.opensource.org/licenses/mit-license.php
@ -816,7 +816,6 @@ class _GenerateRenderMethod:
) )
or len(self.compiler.default_filters) or len(self.compiler.default_filters)
): ):
s = self.create_filter_callable( s = self.create_filter_callable(
node.escapes_code.args, "%s" % node.text, True node.escapes_code.args, "%s" % node.text, True
) )
@ -1181,7 +1180,6 @@ class _Identifiers:
def visitBlockTag(self, node): def visitBlockTag(self, node):
if node is not self.node and not node.is_anonymous: if node is not self.node and not node.is_anonymous:
if isinstance(self.node, parsetree.DefTag): if isinstance(self.node, parsetree.DefTag):
raise exceptions.CompileException( raise exceptions.CompileException(
"Named block '%s' not allowed inside of def '%s'" "Named block '%s' not allowed inside of def '%s'"

View file

@ -1,17 +1,17 @@
# mako/compat.py # mako/compat.py
# Copyright 2006-2022 the Mako authors and contributors <see AUTHORS file> # Copyright 2006-2024 the Mako authors and contributors <see AUTHORS file>
# #
# This module is part of Mako and is released under # This module is part of Mako and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php # the MIT License: http://www.opensource.org/licenses/mit-license.php
import collections import collections
from importlib import metadata as importlib_metadata
from importlib import util from importlib import util
import inspect import inspect
import sys import sys
win32 = sys.platform.startswith("win") win32 = sys.platform.startswith("win")
pypy = hasattr(sys, "pypy_version_info") pypy = hasattr(sys, "pypy_version_info")
py38 = sys.version_info >= (3, 8)
ArgSpec = collections.namedtuple( ArgSpec = collections.namedtuple(
"ArgSpec", ["args", "varargs", "keywords", "defaults"] "ArgSpec", ["args", "varargs", "keywords", "defaults"]
@ -62,12 +62,6 @@ def exception_name(exc):
return exc.__class__.__name__ return exc.__class__.__name__
if py38:
from importlib import metadata as importlib_metadata
else:
import importlib_metadata # noqa
def importlib_metadata_get(group): def importlib_metadata_get(group):
ep = importlib_metadata.entry_points() ep = importlib_metadata.entry_points()
if hasattr(ep, "select"): if hasattr(ep, "select"):

View file

@ -1,5 +1,5 @@
# mako/exceptions.py # mako/exceptions.py
# Copyright 2006-2022 the Mako authors and contributors <see AUTHORS file> # Copyright 2006-2024 the Mako authors and contributors <see AUTHORS file>
# #
# This module is part of Mako and is released under # This module is part of Mako and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php # the MIT License: http://www.opensource.org/licenses/mit-license.php

View file

@ -1,5 +1,5 @@
# ext/autohandler.py # ext/autohandler.py
# Copyright 2006-2022 the Mako authors and contributors <see AUTHORS file> # Copyright 2006-2024 the Mako authors and contributors <see AUTHORS file>
# #
# This module is part of Mako and is released under # This module is part of Mako and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php # the MIT License: http://www.opensource.org/licenses/mit-license.php

View file

@ -1,5 +1,5 @@
# ext/babelplugin.py # ext/babelplugin.py
# Copyright 2006-2022 the Mako authors and contributors <see AUTHORS file> # Copyright 2006-2024 the Mako authors and contributors <see AUTHORS file>
# #
# This module is part of Mako and is released under # This module is part of Mako and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php # the MIT License: http://www.opensource.org/licenses/mit-license.php

View file

@ -1,5 +1,5 @@
# ext/beaker_cache.py # ext/beaker_cache.py
# Copyright 2006-2022 the Mako authors and contributors <see AUTHORS file> # Copyright 2006-2024 the Mako authors and contributors <see AUTHORS file>
# #
# This module is part of Mako and is released under # This module is part of Mako and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php # the MIT License: http://www.opensource.org/licenses/mit-license.php

View file

@ -1,5 +1,5 @@
# ext/extract.py # ext/extract.py
# Copyright 2006-2022 the Mako authors and contributors <see AUTHORS file> # Copyright 2006-2024 the Mako authors and contributors <see AUTHORS file>
# #
# This module is part of Mako and is released under # This module is part of Mako and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php # the MIT License: http://www.opensource.org/licenses/mit-license.php

View file

@ -1,5 +1,5 @@
# ext/linguaplugin.py # ext/linguaplugin.py
# Copyright 2006-2022 the Mako authors and contributors <see AUTHORS file> # Copyright 2006-2024 the Mako authors and contributors <see AUTHORS file>
# #
# This module is part of Mako and is released under # This module is part of Mako and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php # the MIT License: http://www.opensource.org/licenses/mit-license.php

View file

@ -1,5 +1,5 @@
# ext/preprocessors.py # ext/preprocessors.py
# Copyright 2006-2022 the Mako authors and contributors <see AUTHORS file> # Copyright 2006-2024 the Mako authors and contributors <see AUTHORS file>
# #
# This module is part of Mako and is released under # This module is part of Mako and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php # the MIT License: http://www.opensource.org/licenses/mit-license.php

View file

@ -1,5 +1,5 @@
# ext/pygmentplugin.py # ext/pygmentplugin.py
# Copyright 2006-2022 the Mako authors and contributors <see AUTHORS file> # Copyright 2006-2024 the Mako authors and contributors <see AUTHORS file>
# #
# This module is part of Mako and is released under # This module is part of Mako and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php # the MIT License: http://www.opensource.org/licenses/mit-license.php

View file

@ -1,5 +1,5 @@
# ext/turbogears.py # ext/turbogears.py
# Copyright 2006-2022 the Mako authors and contributors <see AUTHORS file> # Copyright 2006-2024 the Mako authors and contributors <see AUTHORS file>
# #
# This module is part of Mako and is released under # This module is part of Mako and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php # the MIT License: http://www.opensource.org/licenses/mit-license.php

View file

@ -1,5 +1,5 @@
# mako/filters.py # mako/filters.py
# Copyright 2006-2022 the Mako authors and contributors <see AUTHORS file> # Copyright 2006-2024 the Mako authors and contributors <see AUTHORS file>
# #
# This module is part of Mako and is released under # This module is part of Mako and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php # the MIT License: http://www.opensource.org/licenses/mit-license.php

View file

@ -1,5 +1,5 @@
# mako/lexer.py # mako/lexer.py
# Copyright 2006-2022 the Mako authors and contributors <see AUTHORS file> # Copyright 2006-2024 the Mako authors and contributors <see AUTHORS file>
# #
# This module is part of Mako and is released under # This module is part of Mako and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php # the MIT License: http://www.opensource.org/licenses/mit-license.php
@ -247,6 +247,8 @@ class Lexer:
continue continue
if self.match_python_block(): if self.match_python_block():
continue continue
if self.match_percent():
continue
if self.match_text(): if self.match_text():
continue continue
@ -352,14 +354,24 @@ class Lexer:
else: else:
return True return True
def match_percent(self):
match = self.match(r"(?<=^)(\s*)%%(%*)", re.M)
if match:
self.append_node(
parsetree.Text, match.group(1) + "%" + match.group(2)
)
return True
else:
return False
def match_text(self): def match_text(self):
match = self.match( match = self.match(
r""" r"""
(.*?) # anything, followed by: (.*?) # anything, followed by:
( (
(?<=\n)(?=[ \t]*(?=%|\#\#)) # an eval or line-based (?<=\n)(?=[ \t]*(?=%|\#\#)) # an eval or line-based
# comment preceded by a # comment, preceded by a
# consumed newline and whitespace # consumed newline and whitespace
| |
(?=\${) # an expression (?=\${) # an expression
| |

View file

@ -1,5 +1,5 @@
# mako/lookup.py # mako/lookup.py
# Copyright 2006-2022 the Mako authors and contributors <see AUTHORS file> # Copyright 2006-2024 the Mako authors and contributors <see AUTHORS file>
# #
# This module is part of Mako and is released under # This module is part of Mako and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php # the MIT License: http://www.opensource.org/licenses/mit-license.php
@ -178,7 +178,6 @@ class TemplateLookup(TemplateCollection):
lexer_cls=None, lexer_cls=None,
include_error_handler=None, include_error_handler=None,
): ):
self.directories = [ self.directories = [
posixpath.normpath(d) for d in util.to_list(directories, ()) posixpath.normpath(d) for d in util.to_list(directories, ())
] ]

View file

@ -1,5 +1,5 @@
# mako/parsetree.py # mako/parsetree.py
# Copyright 2006-2022 the Mako authors and contributors <see AUTHORS file> # Copyright 2006-2024 the Mako authors and contributors <see AUTHORS file>
# #
# This module is part of Mako and is released under # This module is part of Mako and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php # the MIT License: http://www.opensource.org/licenses/mit-license.php

View file

@ -1,5 +1,5 @@
# mako/pygen.py # mako/pygen.py
# Copyright 2006-2022 the Mako authors and contributors <see AUTHORS file> # Copyright 2006-2024 the Mako authors and contributors <see AUTHORS file>
# #
# This module is part of Mako and is released under # This module is part of Mako and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php # the MIT License: http://www.opensource.org/licenses/mit-license.php

View file

@ -1,5 +1,5 @@
# mako/pyparser.py # mako/pyparser.py
# Copyright 2006-2022 the Mako authors and contributors <see AUTHORS file> # Copyright 2006-2024 the Mako authors and contributors <see AUTHORS file>
# #
# This module is part of Mako and is released under # This module is part of Mako and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php # the MIT License: http://www.opensource.org/licenses/mit-license.php
@ -64,7 +64,6 @@ class FindIdentifiers(_ast_util.NodeVisitor):
self._add_declared(node.name) self._add_declared(node.name)
def visit_Assign(self, node): def visit_Assign(self, node):
# flip around the visiting of Assign so the expression gets # flip around the visiting of Assign so the expression gets
# evaluated first, in the case of a clause like "x=x+5" (x # evaluated first, in the case of a clause like "x=x+5" (x
# is undeclared) # is undeclared)
@ -99,7 +98,6 @@ class FindIdentifiers(_ast_util.NodeVisitor):
yield arg yield arg
def _visit_function(self, node, islambda): def _visit_function(self, node, islambda):
# push function state onto stack. dont log any more # push function state onto stack. dont log any more
# identifiers as "declared" until outside of the function, # identifiers as "declared" until outside of the function,
# but keep logging identifiers as "undeclared". track # but keep logging identifiers as "undeclared". track
@ -122,7 +120,6 @@ class FindIdentifiers(_ast_util.NodeVisitor):
self.local_ident_stack = local_ident_stack self.local_ident_stack = local_ident_stack
def visit_For(self, node): def visit_For(self, node):
# flip around visit # flip around visit
self.visit(node.iter) self.visit(node.iter)

View file

@ -530,7 +530,7 @@ class Namespace:
def _populate(self, d, l): def _populate(self, d, l):
for ident in l: for ident in l:
if ident == "*": if ident == "*":
for (k, v) in self._get_star(): for k, v in self._get_star():
d[k] = v d[k] = v
else: else:
d[ident] = getattr(self, ident) d[ident] = getattr(self, ident)

View file

@ -1,5 +1,5 @@
# mako/template.py # mako/template.py
# Copyright 2006-2022 the Mako authors and contributors <see AUTHORS file> # Copyright 2006-2024 the Mako authors and contributors <see AUTHORS file>
# #
# This module is part of Mako and is released under # This module is part of Mako and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php # the MIT License: http://www.opensource.org/licenses/mit-license.php
@ -26,7 +26,6 @@ from mako.lexer import Lexer
class Template: class Template:
r"""Represents a compiled template. r"""Represents a compiled template.
:class:`.Template` includes a reference to the original :class:`.Template` includes a reference to the original

View file

@ -103,7 +103,6 @@ def _assert_raises(
check_context=False, check_context=False,
cause_cls=None, cause_cls=None,
): ):
with _expect_raises(except_cls, msg, check_context, cause_cls) as ec: with _expect_raises(except_cls, msg, check_context, cause_cls) as ec:
callable_(*args, **kwargs) callable_(*args, **kwargs)
return ec.error return ec.error

View file

@ -19,6 +19,10 @@ def result_lines(result):
] ]
def result_raw_lines(result):
return [x for x in re.split(r"\r?\n", result) if x.strip() != ""]
def make_path( def make_path(
filespec: Union[Path, str], filespec: Union[Path, str],
make_absolute: bool = True, make_absolute: bool = True,

View file

@ -1,5 +1,5 @@
# mako/util.py # mako/util.py
# Copyright 2006-2022 the Mako authors and contributors <see AUTHORS file> # Copyright 2006-2024 the Mako authors and contributors <see AUTHORS file>
# #
# This module is part of Mako and is released under # This module is part of Mako and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php # the MIT License: http://www.opensource.org/licenses/mit-license.php

View file

@ -1,5 +1,4 @@
import functools import functools
import re
import string import string
import sys import sys
import typing as t import typing as t
@ -14,10 +13,7 @@ if t.TYPE_CHECKING:
_P = te.ParamSpec("_P") _P = te.ParamSpec("_P")
__version__ = "2.1.3" __version__ = "2.1.5"
_strip_comments_re = re.compile(r"<!--.*?-->", re.DOTALL)
_strip_tags_re = re.compile(r"<.*?>", re.DOTALL)
def _simple_escaping_wrapper(func: "t.Callable[_P, str]") -> "t.Callable[_P, Markup]": def _simple_escaping_wrapper(func: "t.Callable[_P, str]") -> "t.Callable[_P, Markup]":
@ -162,9 +158,41 @@ class Markup(str):
>>> Markup("Main &raquo;\t<em>About</em>").striptags() >>> Markup("Main &raquo;\t<em>About</em>").striptags()
'Main » About' 'Main » About'
""" """
# Use two regexes to avoid ambiguous matches. value = str(self)
value = _strip_comments_re.sub("", self)
value = _strip_tags_re.sub("", value) # Look for comments then tags separately. Otherwise, a comment that
# contains a tag would end early, leaving some of the comment behind.
while True:
# keep finding comment start marks
start = value.find("<!--")
if start == -1:
break
# find a comment end mark beyond the start, otherwise stop
end = value.find("-->", start)
if end == -1:
break
value = f"{value[:start]}{value[end + 3:]}"
# remove tags using the same method
while True:
start = value.find("<")
if start == -1:
break
end = value.find(">", start)
if end == -1:
break
value = f"{value[:start]}{value[end + 1:]}"
# collapse spaces
value = " ".join(value.split()) value = " ".join(value.split())
return self.__class__(value).unescape() return self.__class__(value).unescape()

View file

@ -2,6 +2,7 @@
Utilities for determining application-specific dirs. See <https://github.com/platformdirs/platformdirs> for details and Utilities for determining application-specific dirs. See <https://github.com/platformdirs/platformdirs> for details and
usage. usage.
""" """
from __future__ import annotations from __future__ import annotations
import os import os
@ -14,11 +15,7 @@ from .version import __version_tuple__ as __version_info__
if TYPE_CHECKING: if TYPE_CHECKING:
from pathlib import Path from pathlib import Path
from typing import Literal
if sys.version_info >= (3, 8): # pragma: no cover (py38+)
from typing import Literal
else: # pragma: no cover (py38+)
from typing_extensions import Literal
def _set_platform_dir_class() -> type[PlatformDirsABC]: def _set_platform_dir_class() -> type[PlatformDirsABC]:

View file

@ -1,4 +1,5 @@
"""Main entry point.""" """Main entry point."""
from __future__ import annotations from __future__ import annotations
from platformdirs import PlatformDirs, __version__ from platformdirs import PlatformDirs, __version__

View file

@ -1,4 +1,5 @@
"""Android.""" """Android."""
from __future__ import annotations from __future__ import annotations
import os import os

View file

@ -1,4 +1,5 @@
"""Base API.""" """Base API."""
from __future__ import annotations from __future__ import annotations
import os import os
@ -7,12 +8,7 @@ from pathlib import Path
from typing import TYPE_CHECKING from typing import TYPE_CHECKING
if TYPE_CHECKING: if TYPE_CHECKING:
import sys from typing import Iterator, Literal
if sys.version_info >= (3, 8): # pragma: no cover (py38+)
from typing import Literal
else: # pragma: no cover (py38+)
from typing_extensions import Literal
class PlatformDirsABC(ABC): class PlatformDirsABC(ABC):
@ -241,3 +237,43 @@ class PlatformDirsABC(ABC):
def site_runtime_path(self) -> Path: def site_runtime_path(self) -> Path:
""":return: runtime path shared by users""" """:return: runtime path shared by users"""
return Path(self.site_runtime_dir) return Path(self.site_runtime_dir)
def iter_config_dirs(self) -> Iterator[str]:
""":yield: all user and site configuration directories."""
yield self.user_config_dir
yield self.site_config_dir
def iter_data_dirs(self) -> Iterator[str]:
""":yield: all user and site data directories."""
yield self.user_data_dir
yield self.site_data_dir
def iter_cache_dirs(self) -> Iterator[str]:
""":yield: all user and site cache directories."""
yield self.user_cache_dir
yield self.site_cache_dir
def iter_runtime_dirs(self) -> Iterator[str]:
""":yield: all user and site runtime directories."""
yield self.user_runtime_dir
yield self.site_runtime_dir
def iter_config_paths(self) -> Iterator[Path]:
""":yield: all user and site configuration paths."""
for path in self.iter_config_dirs():
yield Path(path)
def iter_data_paths(self) -> Iterator[Path]:
""":yield: all user and site data paths."""
for path in self.iter_data_dirs():
yield Path(path)
def iter_cache_paths(self) -> Iterator[Path]:
""":yield: all user and site cache paths."""
for path in self.iter_cache_dirs():
yield Path(path)
def iter_runtime_paths(self) -> Iterator[Path]:
""":yield: all user and site runtime paths."""
for path in self.iter_runtime_dirs():
yield Path(path)

View file

@ -1,4 +1,5 @@
"""macOS.""" """macOS."""
from __future__ import annotations from __future__ import annotations
import os.path import os.path

View file

@ -1,10 +1,12 @@
"""Unix.""" """Unix."""
from __future__ import annotations from __future__ import annotations
import os import os
import sys import sys
from configparser import ConfigParser from configparser import ConfigParser
from pathlib import Path from pathlib import Path
from typing import Iterator
from .api import PlatformDirsABC from .api import PlatformDirsABC
@ -43,24 +45,24 @@ class Unix(PlatformDirsABC):
return self._append_app_name_and_version(path) return self._append_app_name_and_version(path)
@property @property
def site_data_dir(self) -> str: def _site_data_dirs(self) -> list[str]:
"""
:return: data directories shared by users (if `multipath <platformdirs.api.PlatformDirsABC.multipath>` is
enabled and ``XDG_DATA_DIR`` is set and a multi path the response is also a multi path separated by the OS
path separator), e.g. ``/usr/local/share/$appname/$version`` or ``/usr/share/$appname/$version``
"""
# XDG default for $XDG_DATA_DIRS; only first, if multipath is False
path = os.environ.get("XDG_DATA_DIRS", "") path = os.environ.get("XDG_DATA_DIRS", "")
if not path.strip(): if not path.strip():
path = f"/usr/local/share{os.pathsep}/usr/share" path = f"/usr/local/share{os.pathsep}/usr/share"
return self._with_multi_path(path) return [self._append_app_name_and_version(p) for p in path.split(os.pathsep)]
def _with_multi_path(self, path: str) -> str: @property
path_list = path.split(os.pathsep) def site_data_dir(self) -> str:
"""
:return: data directories shared by users (if `multipath <platformdirs.api.PlatformDirsABC.multipath>` is
enabled and ``XDG_DATA_DIRS`` is set and a multi path the response is also a multi path separated by the
OS path separator), e.g. ``/usr/local/share/$appname/$version`` or ``/usr/share/$appname/$version``
"""
# XDG default for $XDG_DATA_DIRS; only first, if multipath is False
dirs = self._site_data_dirs
if not self.multipath: if not self.multipath:
path_list = path_list[0:1] return dirs[0]
path_list = [self._append_app_name_and_version(os.path.expanduser(p)) for p in path_list] # noqa: PTH111 return os.pathsep.join(dirs)
return os.pathsep.join(path_list)
@property @property
def user_config_dir(self) -> str: def user_config_dir(self) -> str:
@ -74,17 +76,24 @@ class Unix(PlatformDirsABC):
return self._append_app_name_and_version(path) return self._append_app_name_and_version(path)
@property @property
def site_config_dir(self) -> str: def _site_config_dirs(self) -> list[str]:
"""
:return: config directories shared by users (if `multipath <platformdirs.api.PlatformDirsABC.multipath>`
is enabled and ``XDG_DATA_DIR`` is set and a multi path the response is also a multi path separated by the OS
path separator), e.g. ``/etc/xdg/$appname/$version``
"""
# XDG default for $XDG_CONFIG_DIRS only first, if multipath is False
path = os.environ.get("XDG_CONFIG_DIRS", "") path = os.environ.get("XDG_CONFIG_DIRS", "")
if not path.strip(): if not path.strip():
path = "/etc/xdg" path = "/etc/xdg"
return self._with_multi_path(path) return [self._append_app_name_and_version(p) for p in path.split(os.pathsep)]
@property
def site_config_dir(self) -> str:
"""
:return: config directories shared by users (if `multipath <platformdirs.api.PlatformDirsABC.multipath>`
is enabled and ``XDG_CONFIG_DIRS`` is set and a multi path the response is also a multi path separated by
the OS path separator), e.g. ``/etc/xdg/$appname/$version``
"""
# XDG default for $XDG_CONFIG_DIRS only first, if multipath is False
dirs = self._site_config_dirs
if not self.multipath:
return dirs[0]
return os.pathsep.join(dirs)
@property @property
def user_cache_dir(self) -> str: def user_cache_dir(self) -> str:
@ -99,8 +108,8 @@ class Unix(PlatformDirsABC):
@property @property
def site_cache_dir(self) -> str: def site_cache_dir(self) -> str:
""":return: cache directory shared by users, e.g. ``/var/tmp/$appname/$version``""" """:return: cache directory shared by users, e.g. ``/var/cache/$appname/$version``"""
return self._append_app_name_and_version("/var/tmp") # noqa: S108 return self._append_app_name_and_version("/var/cache")
@property @property
def user_state_dir(self) -> str: def user_state_dir(self) -> str:
@ -215,6 +224,16 @@ class Unix(PlatformDirsABC):
directory = directory.split(os.pathsep)[0] directory = directory.split(os.pathsep)[0]
return Path(directory) return Path(directory)
def iter_config_dirs(self) -> Iterator[str]:
""":yield: all user and site configuration directories."""
yield self.user_config_dir
yield from self._site_config_dirs
def iter_data_dirs(self) -> Iterator[str]:
""":yield: all user and site data directories."""
yield self.user_data_dir
yield from self._site_data_dirs
def _get_user_media_dir(env_var: str, fallback_tilde_path: str) -> str: def _get_user_media_dir(env_var: str, fallback_tilde_path: str) -> str:
media_dir = _get_user_dirs_folder(env_var) media_dir = _get_user_dirs_folder(env_var)

View file

@ -12,5 +12,5 @@ __version__: str
__version_tuple__: VERSION_TUPLE __version_tuple__: VERSION_TUPLE
version_tuple: VERSION_TUPLE version_tuple: VERSION_TUPLE
__version__ = version = '3.11.0' __version__ = version = '4.2.0'
__version_tuple__ = version_tuple = (3, 11, 0) __version_tuple__ = version_tuple = (4, 2, 0)

View file

@ -1,4 +1,5 @@
"""Windows.""" """Windows."""
from __future__ import annotations from __future__ import annotations
import ctypes import ctypes

View file

@ -22,8 +22,8 @@ from pytz.tzfile import build_tzinfo
# The IANA (nee Olson) database is updated several times a year. # The IANA (nee Olson) database is updated several times a year.
OLSON_VERSION = '2023c' OLSON_VERSION = '2024a'
VERSION = '2023.3' # pip compatible version number. VERSION = '2024.1' # pip compatible version number.
__version__ = VERSION __version__ = VERSION
OLSEN_VERSION = OLSON_VERSION # Old releases had this misspelling OLSEN_VERSION = OLSON_VERSION # Old releases had this misspelling

View file

@ -24,7 +24,8 @@ def memorized_timedelta(seconds):
_timedelta_cache[seconds] = delta _timedelta_cache[seconds] = delta
return delta return delta
_epoch = datetime.utcfromtimestamp(0)
_epoch = datetime(1970, 1, 1, 0, 0) # datetime.utcfromtimestamp(0)
_datetime_cache = {0: _epoch} _datetime_cache = {0: _epoch}
@ -33,12 +34,13 @@ def memorized_datetime(seconds):
try: try:
return _datetime_cache[seconds] return _datetime_cache[seconds]
except KeyError: except KeyError:
# NB. We can't just do datetime.utcfromtimestamp(seconds) as this # NB. We can't just do datetime.fromtimestamp(seconds, tz=timezone.utc).replace(tzinfo=None)
# fails with negative values under Windows (Bug #90096) # as this fails with negative values under Windows (Bug #90096)
dt = _epoch + timedelta(seconds=seconds) dt = _epoch + timedelta(seconds=seconds)
_datetime_cache[seconds] = dt _datetime_cache[seconds] = dt
return dt return dt
_ttinfo_cache = {} _ttinfo_cache = {}
@ -55,6 +57,7 @@ def memorized_ttinfo(*args):
_ttinfo_cache[args] = ttinfo _ttinfo_cache[args] = ttinfo
return ttinfo return ttinfo
_notime = memorized_timedelta(0) _notime = memorized_timedelta(0)
@ -355,7 +358,7 @@ class DstTzInfo(BaseTzInfo):
is_dst=False) + timedelta(hours=6) is_dst=False) + timedelta(hours=6)
# If we get this far, we have multiple possible timezones - this # If we get this far, we have multiple possible timezones - this
# is an ambiguous case occuring during the end-of-DST transition. # is an ambiguous case occurring during the end-of-DST transition.
# If told to be strict, raise an exception since we have an # If told to be strict, raise an exception since we have an
# ambiguous case # ambiguous case

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

View file

@ -3,17 +3,22 @@
# This file is in the public domain, so clarified as of # This file is in the public domain, so clarified as of
# 2009-05-17 by Arthur David Olson. # 2009-05-17 by Arthur David Olson.
# #
# From Paul Eggert (2022-11-18): # From Paul Eggert (2023-09-06):
# This file contains a table of two-letter country codes. Columns are # This file contains a table of two-letter country codes. Columns are
# separated by a single tab. Lines beginning with '#' are comments. # separated by a single tab. Lines beginning with '#' are comments.
# All text uses UTF-8 encoding. The columns of the table are as follows: # All text uses UTF-8 encoding. The columns of the table are as follows:
# #
# 1. ISO 3166-1 alpha-2 country code, current as of # 1. ISO 3166-1 alpha-2 country code, current as of
# ISO 3166-1 N1087 (2022-09-02). See: Updates on ISO 3166-1 # ISO/TC 46 N1108 (2023-04-05). See: ISO/TC 46 Documents
# https://isotc.iso.org/livelink/livelink/Open/16944257 # https://www.iso.org/committee/48750.html?view=documents
# 2. The usual English name for the coded region, # 2. The usual English name for the coded region. This sometimes
# chosen so that alphabetic sorting of subsets produces helpful lists. # departs from ISO-listed names, sometimes so that sorted subsets
# This is not the same as the English name in the ISO 3166 tables. # of names are useful (e.g., "Samoa (American)" and "Samoa
# (western)" rather than "American Samoa" and "Samoa"),
# sometimes to avoid confusion among non-experts (e.g.,
# "Czech Republic" and "Turkey" rather than "Czechia" and "Türkiye"),
# and sometimes to omit needless detail or churn (e.g., "Netherlands"
# rather than "Netherlands (the)" or "Netherlands (Kingdom of the)").
# #
# The table is sorted by country code. # The table is sorted by country code.
# #

View file

@ -3,13 +3,10 @@
# This file is in the public domain. # This file is in the public domain.
# This file is generated automatically from the data in the public-domain # This file is generated automatically from the data in the public-domain
# NIST format leap-seconds.list file, which can be copied from # NIST/IERS format leap-seconds.list file, which can be copied from
# <ftp://ftp.nist.gov/pub/time/leap-seconds.list>
# or <ftp://ftp.boulder.nist.gov/pub/time/leap-seconds.list>.
# The NIST file is used instead of its IERS upstream counterpart
# <https://hpiers.obspm.fr/iers/bul/bulc/ntp/leap-seconds.list> # <https://hpiers.obspm.fr/iers/bul/bulc/ntp/leap-seconds.list>
# because under US law the NIST file is public domain # or, in a variant with different comments, from
# whereas the IERS file's copyright and license status is unclear. # <ftp://ftp.boulder.nist.gov/pub/time/leap-seconds.list>.
# For more about leap-seconds.list, please see # For more about leap-seconds.list, please see
# The NTP Timescale and Leap Seconds # The NTP Timescale and Leap Seconds
# <https://www.eecis.udel.edu/~mills/leap.html>. # <https://www.eecis.udel.edu/~mills/leap.html>.
@ -72,11 +69,11 @@ Leap 2016 Dec 31 23:59:60 + S
# Any additional leap seconds will come after this. # Any additional leap seconds will come after this.
# This Expires line is commented out for now, # This Expires line is commented out for now,
# so that pre-2020a zic implementations do not reject this file. # so that pre-2020a zic implementations do not reject this file.
#Expires 2023 Dec 28 00:00:00 #Expires 2024 Dec 28 00:00:00
# POSIX timestamps for the data in this file: # POSIX timestamps for the data in this file:
#updated 1467936000 (2016-07-08 00:00:00 UTC) #updated 1704708379 (2024-01-08 10:06:19 UTC)
#expires 1703721600 (2023-12-28 00:00:00 UTC) #expires 1735344000 (2024-12-28 00:00:00 UTC)
# Updated through IERS Bulletin C65 # Updated through IERS Bulletin C (https://hpiers.obspm.fr/iers/bul/bulc/bulletinc.dat)
# File expires on: 28 December 2023 # File expires on 28 December 2024

File diff suppressed because it is too large Load diff

View file

@ -48,7 +48,7 @@ AR -3124-06411 America/Argentina/Cordoba Argentina (most areas: CB, CC, CN, ER,
AR -2447-06525 America/Argentina/Salta Salta (SA, LP, NQ, RN) AR -2447-06525 America/Argentina/Salta Salta (SA, LP, NQ, RN)
AR -2411-06518 America/Argentina/Jujuy Jujuy (JY) AR -2411-06518 America/Argentina/Jujuy Jujuy (JY)
AR -2649-06513 America/Argentina/Tucuman Tucuman (TM) AR -2649-06513 America/Argentina/Tucuman Tucuman (TM)
AR -2828-06547 America/Argentina/Catamarca Catamarca (CT); Chubut (CH) AR -2828-06547 America/Argentina/Catamarca Catamarca (CT), Chubut (CH)
AR -2926-06651 America/Argentina/La_Rioja La Rioja (LR) AR -2926-06651 America/Argentina/La_Rioja La Rioja (LR)
AR -3132-06831 America/Argentina/San_Juan San Juan (SJ) AR -3132-06831 America/Argentina/San_Juan San Juan (SJ)
AR -3253-06849 America/Argentina/Mendoza Mendoza (MZ) AR -3253-06849 America/Argentina/Mendoza Mendoza (MZ)
@ -87,7 +87,7 @@ BN +0456+11455 Asia/Brunei
BO -1630-06809 America/La_Paz BO -1630-06809 America/La_Paz
BQ +120903-0681636 America/Kralendijk BQ +120903-0681636 America/Kralendijk
BR -0351-03225 America/Noronha Atlantic islands BR -0351-03225 America/Noronha Atlantic islands
BR -0127-04829 America/Belem Para (east); Amapa BR -0127-04829 America/Belem Para (east), Amapa
BR -0343-03830 America/Fortaleza Brazil (northeast: MA, PI, CE, RN, PB) BR -0343-03830 America/Fortaleza Brazil (northeast: MA, PI, CE, RN, PB)
BR -0803-03454 America/Recife Pernambuco BR -0803-03454 America/Recife Pernambuco
BR -0712-04812 America/Araguaina Tocantins BR -0712-04812 America/Araguaina Tocantins
@ -107,21 +107,21 @@ BT +2728+08939 Asia/Thimphu
BW -2439+02555 Africa/Gaborone BW -2439+02555 Africa/Gaborone
BY +5354+02734 Europe/Minsk BY +5354+02734 Europe/Minsk
BZ +1730-08812 America/Belize BZ +1730-08812 America/Belize
CA +4734-05243 America/St_Johns Newfoundland; Labrador (southeast) CA +4734-05243 America/St_Johns Newfoundland, Labrador (SE)
CA +4439-06336 America/Halifax Atlantic - NS (most areas); PE CA +4439-06336 America/Halifax Atlantic - NS (most areas), PE
CA +4612-05957 America/Glace_Bay Atlantic - NS (Cape Breton) CA +4612-05957 America/Glace_Bay Atlantic - NS (Cape Breton)
CA +4606-06447 America/Moncton Atlantic - New Brunswick CA +4606-06447 America/Moncton Atlantic - New Brunswick
CA +5320-06025 America/Goose_Bay Atlantic - Labrador (most areas) CA +5320-06025 America/Goose_Bay Atlantic - Labrador (most areas)
CA +5125-05707 America/Blanc-Sablon AST - QC (Lower North Shore) CA +5125-05707 America/Blanc-Sablon AST - QC (Lower North Shore)
CA +4339-07923 America/Toronto Eastern - ON, QC (most areas) CA +4339-07923 America/Toronto Eastern - ON & QC (most areas)
CA +6344-06828 America/Iqaluit Eastern - NU (most areas) CA +6344-06828 America/Iqaluit Eastern - NU (most areas)
CA +484531-0913718 America/Atikokan EST - ON (Atikokan); NU (Coral H) CA +484531-0913718 America/Atikokan EST - ON (Atikokan), NU (Coral H)
CA +4953-09709 America/Winnipeg Central - ON (west); Manitoba CA +4953-09709 America/Winnipeg Central - ON (west), Manitoba
CA +744144-0944945 America/Resolute Central - NU (Resolute) CA +744144-0944945 America/Resolute Central - NU (Resolute)
CA +624900-0920459 America/Rankin_Inlet Central - NU (central) CA +624900-0920459 America/Rankin_Inlet Central - NU (central)
CA +5024-10439 America/Regina CST - SK (most areas) CA +5024-10439 America/Regina CST - SK (most areas)
CA +5017-10750 America/Swift_Current CST - SK (midwest) CA +5017-10750 America/Swift_Current CST - SK (midwest)
CA +5333-11328 America/Edmonton Mountain - AB; BC (E); NT (E); SK (W) CA +5333-11328 America/Edmonton Mountain - AB, BC(E), NT(E), SK(W)
CA +690650-1050310 America/Cambridge_Bay Mountain - NU (west) CA +690650-1050310 America/Cambridge_Bay Mountain - NU (west)
CA +682059-1334300 America/Inuvik Mountain - NT (west) CA +682059-1334300 America/Inuvik Mountain - NT (west)
CA +4906-11631 America/Creston MST - BC (Creston) CA +4906-11631 America/Creston MST - BC (Creston)
@ -207,8 +207,8 @@ HT +1832-07220 America/Port-au-Prince
HU +4730+01905 Europe/Budapest HU +4730+01905 Europe/Budapest
ID -0610+10648 Asia/Jakarta Java, Sumatra ID -0610+10648 Asia/Jakarta Java, Sumatra
ID -0002+10920 Asia/Pontianak Borneo (west, central) ID -0002+10920 Asia/Pontianak Borneo (west, central)
ID -0507+11924 Asia/Makassar Borneo (east, south); Sulawesi/Celebes, Bali, Nusa Tengarra; Timor (west) ID -0507+11924 Asia/Makassar Borneo (east, south), Sulawesi/Celebes, Bali, Nusa Tengarra, Timor (west)
ID -0232+14042 Asia/Jayapura New Guinea (West Papua / Irian Jaya); Malukus/Moluccas ID -0232+14042 Asia/Jayapura New Guinea (West Papua / Irian Jaya), Malukus/Moluccas
IE +5320-00615 Europe/Dublin IE +5320-00615 Europe/Dublin
IL +314650+0351326 Asia/Jerusalem IL +314650+0351326 Asia/Jerusalem
IM +5409-00428 Europe/Isle_of_Man IM +5409-00428 Europe/Isle_of_Man
@ -355,7 +355,7 @@ RU +4310+13156 Asia/Vladivostok MSK+07 - Amur River
RU +643337+1431336 Asia/Ust-Nera MSK+07 - Oymyakonsky RU +643337+1431336 Asia/Ust-Nera MSK+07 - Oymyakonsky
RU +5934+15048 Asia/Magadan MSK+08 - Magadan RU +5934+15048 Asia/Magadan MSK+08 - Magadan
RU +4658+14242 Asia/Sakhalin MSK+08 - Sakhalin Island RU +4658+14242 Asia/Sakhalin MSK+08 - Sakhalin Island
RU +6728+15343 Asia/Srednekolymsk MSK+08 - Sakha (E); N Kuril Is RU +6728+15343 Asia/Srednekolymsk MSK+08 - Sakha (E), N Kuril Is
RU +5301+15839 Asia/Kamchatka MSK+09 - Kamchatka RU +5301+15839 Asia/Kamchatka MSK+09 - Kamchatka
RU +6445+17729 Asia/Anadyr MSK+09 - Bering Sea RU +6445+17729 Asia/Anadyr MSK+09 - Bering Sea
RW -0157+03004 Africa/Kigali RW -0157+03004 Africa/Kigali
@ -418,7 +418,7 @@ US +470659-1011757 America/North_Dakota/Center Central - ND (Oliver)
US +465042-1012439 America/North_Dakota/New_Salem Central - ND (Morton rural) US +465042-1012439 America/North_Dakota/New_Salem Central - ND (Morton rural)
US +471551-1014640 America/North_Dakota/Beulah Central - ND (Mercer) US +471551-1014640 America/North_Dakota/Beulah Central - ND (Mercer)
US +394421-1045903 America/Denver Mountain (most areas) US +394421-1045903 America/Denver Mountain (most areas)
US +433649-1161209 America/Boise Mountain - ID (south); OR (east) US +433649-1161209 America/Boise Mountain - ID (south), OR (east)
US +332654-1120424 America/Phoenix MST - AZ (except Navajo) US +332654-1120424 America/Phoenix MST - AZ (except Navajo)
US +340308-1181434 America/Los_Angeles Pacific US +340308-1181434 America/Los_Angeles Pacific
US +611305-1495401 America/Anchorage Alaska (most areas) US +611305-1495401 America/Anchorage Alaska (most areas)

View file

@ -37,7 +37,7 @@
#country- #country-
#codes coordinates TZ comments #codes coordinates TZ comments
AD +4230+00131 Europe/Andorra AD +4230+00131 Europe/Andorra
AE,OM,RE,SC,TF +2518+05518 Asia/Dubai Crozet, Scattered Is AE,OM,RE,SC,TF +2518+05518 Asia/Dubai Crozet
AF +3431+06912 Asia/Kabul AF +3431+06912 Asia/Kabul
AL +4120+01950 Europe/Tirane AL +4120+01950 Europe/Tirane
AM +4011+04430 Asia/Yerevan AM +4011+04430 Asia/Yerevan
@ -47,12 +47,13 @@ AQ -6736+06253 Antarctica/Mawson Mawson
AQ -6448-06406 Antarctica/Palmer Palmer AQ -6448-06406 Antarctica/Palmer Palmer
AQ -6734-06808 Antarctica/Rothera Rothera AQ -6734-06808 Antarctica/Rothera Rothera
AQ -720041+0023206 Antarctica/Troll Troll AQ -720041+0023206 Antarctica/Troll Troll
AQ -7824+10654 Antarctica/Vostok Vostok
AR -3436-05827 America/Argentina/Buenos_Aires Buenos Aires (BA, CF) AR -3436-05827 America/Argentina/Buenos_Aires Buenos Aires (BA, CF)
AR -3124-06411 America/Argentina/Cordoba most areas: CB, CC, CN, ER, FM, MN, SE, SF AR -3124-06411 America/Argentina/Cordoba most areas: CB, CC, CN, ER, FM, MN, SE, SF
AR -2447-06525 America/Argentina/Salta Salta (SA, LP, NQ, RN) AR -2447-06525 America/Argentina/Salta Salta (SA, LP, NQ, RN)
AR -2411-06518 America/Argentina/Jujuy Jujuy (JY) AR -2411-06518 America/Argentina/Jujuy Jujuy (JY)
AR -2649-06513 America/Argentina/Tucuman Tucumán (TM) AR -2649-06513 America/Argentina/Tucuman Tucumán (TM)
AR -2828-06547 America/Argentina/Catamarca Catamarca (CT); Chubut (CH) AR -2828-06547 America/Argentina/Catamarca Catamarca (CT), Chubut (CH)
AR -2926-06651 America/Argentina/La_Rioja La Rioja (LR) AR -2926-06651 America/Argentina/La_Rioja La Rioja (LR)
AR -3132-06831 America/Argentina/San_Juan San Juan (SJ) AR -3132-06831 America/Argentina/San_Juan San Juan (SJ)
AR -3253-06849 America/Argentina/Mendoza Mendoza (MZ) AR -3253-06849 America/Argentina/Mendoza Mendoza (MZ)
@ -81,7 +82,7 @@ BG +4241+02319 Europe/Sofia
BM +3217-06446 Atlantic/Bermuda BM +3217-06446 Atlantic/Bermuda
BO -1630-06809 America/La_Paz BO -1630-06809 America/La_Paz
BR -0351-03225 America/Noronha Atlantic islands BR -0351-03225 America/Noronha Atlantic islands
BR -0127-04829 America/Belem Pará (east); Amapá BR -0127-04829 America/Belem Pará (east), Amapá
BR -0343-03830 America/Fortaleza Brazil (northeast: MA, PI, CE, RN, PB) BR -0343-03830 America/Fortaleza Brazil (northeast: MA, PI, CE, RN, PB)
BR -0803-03454 America/Recife Pernambuco BR -0803-03454 America/Recife Pernambuco
BR -0712-04812 America/Araguaina Tocantins BR -0712-04812 America/Araguaina Tocantins
@ -99,19 +100,19 @@ BR -0958-06748 America/Rio_Branco Acre
BT +2728+08939 Asia/Thimphu BT +2728+08939 Asia/Thimphu
BY +5354+02734 Europe/Minsk BY +5354+02734 Europe/Minsk
BZ +1730-08812 America/Belize BZ +1730-08812 America/Belize
CA +4734-05243 America/St_Johns Newfoundland; Labrador (southeast) CA +4734-05243 America/St_Johns Newfoundland, Labrador (SE)
CA +4439-06336 America/Halifax Atlantic - NS (most areas); PE CA +4439-06336 America/Halifax Atlantic - NS (most areas), PE
CA +4612-05957 America/Glace_Bay Atlantic - NS (Cape Breton) CA +4612-05957 America/Glace_Bay Atlantic - NS (Cape Breton)
CA +4606-06447 America/Moncton Atlantic - New Brunswick CA +4606-06447 America/Moncton Atlantic - New Brunswick
CA +5320-06025 America/Goose_Bay Atlantic - Labrador (most areas) CA +5320-06025 America/Goose_Bay Atlantic - Labrador (most areas)
CA,BS +4339-07923 America/Toronto Eastern - ON, QC (most areas) CA,BS +4339-07923 America/Toronto Eastern - ON & QC (most areas)
CA +6344-06828 America/Iqaluit Eastern - NU (most areas) CA +6344-06828 America/Iqaluit Eastern - NU (most areas)
CA +4953-09709 America/Winnipeg Central - ON (west); Manitoba CA +4953-09709 America/Winnipeg Central - ON (west), Manitoba
CA +744144-0944945 America/Resolute Central - NU (Resolute) CA +744144-0944945 America/Resolute Central - NU (Resolute)
CA +624900-0920459 America/Rankin_Inlet Central - NU (central) CA +624900-0920459 America/Rankin_Inlet Central - NU (central)
CA +5024-10439 America/Regina CST - SK (most areas) CA +5024-10439 America/Regina CST - SK (most areas)
CA +5017-10750 America/Swift_Current CST - SK (midwest) CA +5017-10750 America/Swift_Current CST - SK (midwest)
CA +5333-11328 America/Edmonton Mountain - AB; BC (E); NT (E); SK (W) CA +5333-11328 America/Edmonton Mountain - AB, BC(E), NT(E), SK(W)
CA +690650-1050310 America/Cambridge_Bay Mountain - NU (west) CA +690650-1050310 America/Cambridge_Bay Mountain - NU (west)
CA +682059-1334300 America/Inuvik Mountain - NT (west) CA +682059-1334300 America/Inuvik Mountain - NT (west)
CA +5546-12014 America/Dawson_Creek MST - BC (Dawson Cr, Ft St John) CA +5546-12014 America/Dawson_Creek MST - BC (Dawson Cr, Ft St John)
@ -126,7 +127,7 @@ CL -3327-07040 America/Santiago most of Chile
CL -5309-07055 America/Punta_Arenas Region of Magallanes CL -5309-07055 America/Punta_Arenas Region of Magallanes
CL -2709-10926 Pacific/Easter Easter Island CL -2709-10926 Pacific/Easter Easter Island
CN +3114+12128 Asia/Shanghai Beijing Time CN +3114+12128 Asia/Shanghai Beijing Time
CN,AQ +4348+08735 Asia/Urumqi Xinjiang Time, Vostok CN +4348+08735 Asia/Urumqi Xinjiang Time
CO +0436-07405 America/Bogota CO +0436-07405 America/Bogota
CR +0956-08405 America/Costa_Rica CR +0956-08405 America/Costa_Rica
CU +2308-08222 America/Havana CU +2308-08222 America/Havana
@ -171,8 +172,8 @@ HT +1832-07220 America/Port-au-Prince
HU +4730+01905 Europe/Budapest HU +4730+01905 Europe/Budapest
ID -0610+10648 Asia/Jakarta Java, Sumatra ID -0610+10648 Asia/Jakarta Java, Sumatra
ID -0002+10920 Asia/Pontianak Borneo (west, central) ID -0002+10920 Asia/Pontianak Borneo (west, central)
ID -0507+11924 Asia/Makassar Borneo (east, south); Sulawesi/Celebes, Bali, Nusa Tengarra; Timor (west) ID -0507+11924 Asia/Makassar Borneo (east, south), Sulawesi/Celebes, Bali, Nusa Tengarra, Timor (west)
ID -0232+14042 Asia/Jayapura New Guinea (West Papua / Irian Jaya); Malukus/Moluccas ID -0232+14042 Asia/Jayapura New Guinea (West Papua / Irian Jaya), Malukus/Moluccas
IE +5320-00615 Europe/Dublin IE +5320-00615 Europe/Dublin
IL +314650+0351326 Asia/Jerusalem IL +314650+0351326 Asia/Jerusalem
IN +2232+08822 Asia/Kolkata IN +2232+08822 Asia/Kolkata
@ -251,7 +252,7 @@ PK +2452+06703 Asia/Karachi
PL +5215+02100 Europe/Warsaw PL +5215+02100 Europe/Warsaw
PM +4703-05620 America/Miquelon PM +4703-05620 America/Miquelon
PN -2504-13005 Pacific/Pitcairn PN -2504-13005 Pacific/Pitcairn
PR,AG,CA,AI,AW,BL,BQ,CW,DM,GD,GP,KN,LC,MF,MS,SX,TT,VC,VG,VI +182806-0660622 America/Puerto_Rico AST PR,AG,CA,AI,AW,BL,BQ,CW,DM,GD,GP,KN,LC,MF,MS,SX,TT,VC,VG,VI +182806-0660622 America/Puerto_Rico AST - QC (Lower North Shore)
PS +3130+03428 Asia/Gaza Gaza Strip PS +3130+03428 Asia/Gaza Gaza Strip
PS +313200+0350542 Asia/Hebron West Bank PS +313200+0350542 Asia/Hebron West Bank
PT +3843-00908 Europe/Lisbon Portugal (mainland) PT +3843-00908 Europe/Lisbon Portugal (mainland)
@ -287,7 +288,7 @@ RU +4310+13156 Asia/Vladivostok MSK+07 - Amur River
RU +643337+1431336 Asia/Ust-Nera MSK+07 - Oymyakonsky RU +643337+1431336 Asia/Ust-Nera MSK+07 - Oymyakonsky
RU +5934+15048 Asia/Magadan MSK+08 - Magadan RU +5934+15048 Asia/Magadan MSK+08 - Magadan
RU +4658+14242 Asia/Sakhalin MSK+08 - Sakhalin Island RU +4658+14242 Asia/Sakhalin MSK+08 - Sakhalin Island
RU +6728+15343 Asia/Srednekolymsk MSK+08 - Sakha (E); N Kuril Is RU +6728+15343 Asia/Srednekolymsk MSK+08 - Sakha (E), N Kuril Is
RU +5301+15839 Asia/Kamchatka MSK+09 - Kamchatka RU +5301+15839 Asia/Kamchatka MSK+09 - Kamchatka
RU +6445+17729 Asia/Anadyr MSK+09 - Bering Sea RU +6445+17729 Asia/Anadyr MSK+09 - Bering Sea
SA,AQ,KW,YE +2438+04643 Asia/Riyadh Syowa SA,AQ,KW,YE +2438+04643 Asia/Riyadh Syowa
@ -329,7 +330,7 @@ US +470659-1011757 America/North_Dakota/Center Central - ND (Oliver)
US +465042-1012439 America/North_Dakota/New_Salem Central - ND (Morton rural) US +465042-1012439 America/North_Dakota/New_Salem Central - ND (Morton rural)
US +471551-1014640 America/North_Dakota/Beulah Central - ND (Mercer) US +471551-1014640 America/North_Dakota/Beulah Central - ND (Mercer)
US +394421-1045903 America/Denver Mountain (most areas) US +394421-1045903 America/Denver Mountain (most areas)
US +433649-1161209 America/Boise Mountain - ID (south); OR (east) US +433649-1161209 America/Boise Mountain - ID (south), OR (east)
US,CA +332654-1120424 America/Phoenix MST - AZ (most areas), Creston BC US,CA +332654-1120424 America/Phoenix MST - AZ (most areas), Creston BC
US +340308-1181434 America/Los_Angeles Pacific US +340308-1181434 America/Los_Angeles Pacific
US +611305-1495401 America/Anchorage Alaska (most areas) US +611305-1495401 America/Anchorage Alaska (most areas)

View file

@ -0,0 +1,303 @@
# tzdb timezone descriptions, for users who do not care about old timestamps
#
# This file is in the public domain.
#
# From Paul Eggert (2023-12-18):
# This file contains a table where each row stands for a timezone
# where civil timestamps are predicted to agree from now on.
# This file is like zone1970.tab (see zone1970.tab's coments),
# but with the following changes:
#
# 1. Each timezone corresponds to a set of clocks that are planned
# to agree from now on. This is a larger set of clocks than in
# zone1970.tab, where each timezone's clocks must agree from 1970 on.
# 2. The first column is irrelevant and ignored.
# 3. The table is sorted in a different way:
# first by standard time UTC offset;
# then, if DST is used, by daylight saving UTC offset;
# then by time zone abbreviation.
# 4. Every timezone has a nonempty comments column, with wording
# distinguishing the timezone only from other timezones with the
# same UTC offset at some point during the year.
#
# The format of this table is experimental, and may change in future versions.
#
# This table is intended as an aid for users, to help them select timezones
# appropriate for their practical needs. It is not intended to take or
# endorse any position on legal or territorial claims.
#
#XX coordinates TZ comments
#
# -11 - SST
XX -1416-17042 Pacific/Pago_Pago Midway; Samoa ("SST")
#
# -11
XX -1901-16955 Pacific/Niue Niue
#
# -10 - HST
XX +211825-1575130 Pacific/Honolulu Hawaii ("HST")
#
# -10
XX -1732-14934 Pacific/Tahiti Tahiti; Cook Islands
#
# -10/-09 - HST / HDT (North America DST)
XX +515248-1763929 America/Adak western Aleutians in Alaska ("HST/HDT")
#
# -09:30
XX -0900-13930 Pacific/Marquesas Marquesas
#
# -09
XX -2308-13457 Pacific/Gambier Gambier
#
# -09/-08 - AKST/AKDT (North America DST)
XX +611305-1495401 America/Anchorage most of Alaska ("AKST/AKDT")
#
# -08
XX -2504-13005 Pacific/Pitcairn Pitcairn
#
# -08/-07 - PST/PDT (North America DST)
XX +340308-1181434 America/Los_Angeles Pacific ("PST/PDT") - US & Canada; Mexico near US border
#
# -07 - MST
XX +332654-1120424 America/Phoenix Mountain Standard ("MST") - Arizona; western Mexico; Yukon
#
# -07/-06 - MST/MDT (North America DST)
XX +394421-1045903 America/Denver Mountain ("MST/MDT") - US & Canada; Mexico near US border
#
# -06
XX -0054-08936 Pacific/Galapagos Galápagos
#
# -06 - CST
XX +1924-09909 America/Mexico_City Central Standard ("CST") - Saskatchewan; central Mexico; Central America
#
# -06/-05 (Chile DST)
XX -2709-10926 Pacific/Easter Easter Island
#
# -06/-05 - CST/CDT (North America DST)
XX +415100-0873900 America/Chicago Central ("CST/CDT") - US & Canada; Mexico near US border
#
# -05
XX -1203-07703 America/Lima eastern South America
#
# -05 - EST
XX +175805-0764736 America/Jamaica Eastern Standard ("EST") - Caymans; Jamaica; eastern Mexico; Panama
#
# -05/-04 - CST/CDT (Cuba DST)
XX +2308-08222 America/Havana Cuba
#
# -05/-04 - EST/EDT (North America DST)
XX +404251-0740023 America/New_York Eastern ("EST/EDT") - US & Canada
#
# -04
XX +1030-06656 America/Caracas western South America
#
# -04 - AST
XX +1828-06954 America/Santo_Domingo Atlantic Standard ("AST") - eastern Caribbean
#
# -04/-03 (Chile DST)
XX -3327-07040 America/Santiago most of Chile
#
# -04/-03 (Paraguay DST)
XX -2516-05740 America/Asuncion Paraguay
#
# -04/-03 - AST/ADT (North America DST)
XX +4439-06336 America/Halifax Atlantic ("AST/ADT") - Canada; Bermuda
#
# -03:30/-02:30 - NST/NDT (North America DST)
XX +4734-05243 America/St_Johns Newfoundland ("NST/NDT")
#
# -03
XX -2332-04637 America/Sao_Paulo eastern South America
#
# -03/-02 (North America DST)
XX +4703-05620 America/Miquelon St Pierre & Miquelon
#
# -02
XX -0351-03225 America/Noronha Fernando de Noronha; South Georgia
#
# -02/-01 (EU DST)
XX +6411-05144 America/Nuuk most of Greenland
#
# -01
XX +1455-02331 Atlantic/Cape_Verde Cape Verde
#
# -01/+00 (EU DST)
XX +3744-02540 Atlantic/Azores Azores
# -01/+00 (EU DST) until 2024-03-31; then -02/-01 (EU DST)
XX +7029-02158 America/Scoresbysund Ittoqqortoormiit
#
# +00 - GMT
XX +0519-00402 Africa/Abidjan far western Africa; Iceland ("GMT")
#
# +00/+01 - GMT/BST (EU DST)
XX +513030-0000731 Europe/London United Kingdom ("GMT/BST")
#
# +00/+01 - WET/WEST (EU DST)
XX +3843-00908 Europe/Lisbon western Europe ("WET/WEST")
#
# +00/+02 - Troll DST
XX -720041+0023206 Antarctica/Troll Troll Station in Antarctica
#
# +01 - CET
XX +3647+00303 Africa/Algiers Algeria, Tunisia ("CET")
#
# +01 - WAT
XX +0627+00324 Africa/Lagos western Africa ("WAT")
#
# +01/+00 - IST/GMT (EU DST in reverse)
XX +5320-00615 Europe/Dublin Ireland ("IST/GMT")
#
# +01/+00 - (Morocco DST)
XX +3339-00735 Africa/Casablanca Morocco
#
# +01/+02 - CET/CEST (EU DST)
XX +4852+00220 Europe/Paris central Europe ("CET/CEST")
#
# +02 - CAT
XX -2558+03235 Africa/Maputo central Africa ("CAT")
#
# +02 - EET
XX +3254+01311 Africa/Tripoli Libya; Kaliningrad ("EET")
#
# +02 - SAST
XX -2615+02800 Africa/Johannesburg southern Africa ("SAST")
#
# +02/+03 - EET/EEST (EU DST)
XX +3758+02343 Europe/Athens eastern Europe ("EET/EEST")
#
# +02/+03 - EET/EEST (Egypt DST)
XX +3003+03115 Africa/Cairo Egypt
#
# +02/+03 - EET/EEST (Lebanon DST)
XX +3353+03530 Asia/Beirut Lebanon
#
# +02/+03 - EET/EEST (Moldova DST)
XX +4700+02850 Europe/Chisinau Moldova
#
# +02/+03 - EET/EEST (Palestine DST)
XX +3130+03428 Asia/Gaza Palestine
#
# +02/+03 - IST/IDT (Israel DST)
XX +314650+0351326 Asia/Jerusalem Israel
#
# +03
XX +4101+02858 Europe/Istanbul Near East; Belarus
#
# +03 - EAT
XX -0117+03649 Africa/Nairobi eastern Africa ("EAT")
#
# +03 - MSK
XX +554521+0373704 Europe/Moscow Moscow ("MSK")
#
# +03:30
XX +3540+05126 Asia/Tehran Iran
#
# +04
XX +2518+05518 Asia/Dubai Russia; Caucasus; Persian Gulf; Seychelles; Réunion
#
# +04:30
XX +3431+06912 Asia/Kabul Afghanistan
#
# +05
XX +4120+06918 Asia/Tashkent Russia; west Kazakhstan; Tajikistan; Turkmenistan; Uzbekistan; Maldives
#
# +05 - PKT
XX +2452+06703 Asia/Karachi Pakistan ("PKT")
#
# +05:30
XX +0656+07951 Asia/Colombo Sri Lanka
#
# +05:30 - IST
XX +2232+08822 Asia/Kolkata India ("IST")
#
# +05:45
XX +2743+08519 Asia/Kathmandu Nepal
#
# +06
XX +2343+09025 Asia/Dhaka Russia; Kyrgyzstan; Bhutan; Bangladesh; Chagos
# +06 until 2024-03-01; then +05
XX +4315+07657 Asia/Almaty Kazakhstan (except western areas)
#
# +06:30
XX +1647+09610 Asia/Yangon Myanmar; Cocos
#
# +07
XX +1345+10031 Asia/Bangkok Russia; Indochina; Christmas Island
#
# +07 - WIB
XX -0610+10648 Asia/Jakarta Indonesia ("WIB")
#
# +08
XX +0117+10351 Asia/Singapore Russia; Brunei; Malaysia; Singapore
#
# +08 - AWST
XX -3157+11551 Australia/Perth Western Australia ("AWST")
#
# +08 - CST
XX +3114+12128 Asia/Shanghai China ("CST")
#
# +08 - HKT
XX +2217+11409 Asia/Hong_Kong Hong Kong ("HKT")
#
# +08 - PHT
XX +1435+12100 Asia/Manila Philippines ("PHT")
#
# +08 - WITA
XX -0507+11924 Asia/Makassar Indonesia ("WITA")
#
# +08:45
XX -3143+12852 Australia/Eucla Eucla
#
# +09
XX +5203+11328 Asia/Chita Russia; Palau; East Timor
#
# +09 - JST
XX +353916+1394441 Asia/Tokyo Japan ("JST")
#
# +09 - KST
XX +3733+12658 Asia/Seoul Korea ("KST")
#
# +09 - WIT
XX -0232+14042 Asia/Jayapura Indonesia ("WIT")
#
# +09:30 - ACST
XX -1228+13050 Australia/Darwin Northern Territory ("ACST")
#
# +09:30/+10:30 - ACST/ACDT (Australia DST)
XX -3455+13835 Australia/Adelaide South Australia ("ACST/ACDT")
#
# +10
XX +4310+13156 Asia/Vladivostok Russia; Yap; Chuuk; Papua New Guinea; Dumont d'Urville
#
# +10 - AEST
XX -2728+15302 Australia/Brisbane Queensland ("AEST")
#
# +10 - ChST
XX +1328+14445 Pacific/Guam Mariana Islands ("ChST")
#
# +10/+11 - AEST/AEDT (Australia DST)
XX -3352+15113 Australia/Sydney southeast Australia ("AEST/AEDT")
#
# +10:30/+11
XX -3133+15905 Australia/Lord_Howe Lord Howe Island
#
# +11
XX -0613+15534 Pacific/Bougainville Russia; Kosrae; Bougainville; Solomons
#
# +11/+12 (Australia DST)
XX -2903+16758 Pacific/Norfolk Norfolk Island
#
# +12
XX +5301+15839 Asia/Kamchatka Russia; Tuvalu; Fiji; etc.
#
# +12/+13 (New Zealand DST)
XX -3652+17446 Pacific/Auckland New Zealand ("NZST/NZDT")
#
# +12:45/+13:45 (Chatham DST)
XX -4357-17633 Pacific/Chatham Chatham Islands
#
# +13
XX -210800-1751200 Pacific/Tongatapu Kanton; Tokelau; Samoa (western); Tonga
#
# +14
XX +0152-15720 Pacific/Kiritimati Kiritimati

View file

@ -118,7 +118,7 @@ Serializing multiple objects to JSON lines (newline-delimited JSON)::
""" """
from __future__ import absolute_import from __future__ import absolute_import
__version__ = '3.19.1' __version__ = '3.19.2'
__all__ = [ __all__ = [
'dump', 'dumps', 'load', 'loads', 'dump', 'dumps', 'load', 'loads',
'JSONDecoder', 'JSONDecodeError', 'JSONEncoder', 'JSONDecoder', 'JSONDecodeError', 'JSONEncoder',

Some files were not shown because too many files have changed in this diff Show more