feat: Upgrade to Python 3.12 (#4675)

Co-authored-by: Hayden <64056131+hay-kot@users.noreply.github.com>
This commit is contained in:
Michael Genson 2024-12-04 22:31:26 -06:00 committed by GitHub
parent 0e6a40e210
commit 87504fbb05
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
43 changed files with 128 additions and 163 deletions

View file

@ -1,8 +1,8 @@
# See here for image contents: https://github.com/microsoft/vscode-dev-containers/tree/v0.224.2/containers/python-3/.devcontainer/base.Dockerfile # See here for image contents: https://github.com/microsoft/vscode-dev-containers/tree/v0.224.2/containers/python-3/.devcontainer/base.Dockerfile
# [Choice] Python version (use -bullseye variants on local arm64/Apple Silicon): 3, 3.10, 3.9, 3.8, 3.7, 3.6, 3-bullseye, 3.10-bullseye, 3.9-bullseye, 3.8-bullseye, 3.7-bullseye, 3.6-bullseye, 3-buster, 3.10-buster, 3.9-buster, 3.8-buster, 3.7-buster, 3.6-buster # [Choice] Python version (use -bullseye variants on local arm64/Apple Silicon): 3, 3.10, 3.9, 3.8, 3.7, 3.6, 3-bullseye, 3.10-bullseye, 3.9-bullseye, 3.8-bullseye, 3.7-bullseye, 3.6-bullseye, 3-buster, 3.10-buster, 3.9-buster, 3.8-buster, 3.7-buster, 3.6-buster
ARG VARIANT="3.10-bullseye" ARG VARIANT="3.12-bullseye"
FROM mcr.microsoft.com/vscode/devcontainers/python:0-${VARIANT} FROM mcr.microsoft.com/devcontainers/python:${VARIANT}
# [Choice] Node.js version: none, lts/*, 16, 14, 12, 10 # [Choice] Node.js version: none, lts/*, 16, 14, 12, 10
ARG NODE_VERSION="none" ARG NODE_VERSION="none"

View file

@ -9,7 +9,7 @@
// Update 'VARIANT' to pick a Python version: 3, 3.10, 3.9, 3.8, 3.7, 3.6 // Update 'VARIANT' to pick a Python version: 3, 3.10, 3.9, 3.8, 3.7, 3.6
// Append -bullseye or -buster to pin to an OS version. // Append -bullseye or -buster to pin to an OS version.
// Use -bullseye variants on local on arm64/Apple Silicon. // Use -bullseye variants on local on arm64/Apple Silicon.
"VARIANT": "3.10-bullseye", "VARIANT": "3.12-bullseye",
// Options // Options
"NODE_VERSION": "16" "NODE_VERSION": "16"
} }

View file

@ -47,7 +47,7 @@ jobs:
- name: Set up python - name: Set up python
uses: actions/setup-python@v5 uses: actions/setup-python@v5
with: with:
python-version: "3.10" python-version: "3.12"
- name: Install Poetry - name: Install Poetry
uses: snok/install-poetry@v1 uses: snok/install-poetry@v1

View file

@ -18,7 +18,7 @@ jobs:
- name: Setup Python - name: Setup Python
uses: actions/setup-python@v5 uses: actions/setup-python@v5
with: with:
python-version: "3.10" python-version: "3.12"
- name: Set PY - name: Set PY
shell: bash shell: bash

View file

@ -17,7 +17,7 @@ RUN yarn generate
############################################### ###############################################
# Base Image - Python # Base Image - Python
############################################### ###############################################
FROM python:3.10-slim as python-base FROM python:3.12-slim as python-base
ENV MEALIE_HOME="/app" ENV MEALIE_HOME="/app"

View file

@ -32,7 +32,7 @@ Make sure the VSCode Dev Containers extension is installed, then select "Dev Con
### Prerequisites ### Prerequisites
- [Python 3.10](https://www.python.org/downloads/) - [Python 3.12](https://www.python.org/downloads/)
- [Poetry](https://python-poetry.org/docs/#installation) - [Poetry](https://python-poetry.org/docs/#installation)
- [Node v16.x](https://nodejs.org/en/) - [Node v16.x](https://nodejs.org/en/)
- [yarn](https://classic.yarnpkg.com/lang/en/docs/install/#mac-stable) - [yarn](https://classic.yarnpkg.com/lang/en/docs/install/#mac-stable)

View file

@ -6,7 +6,7 @@ Create Date: 2024-03-18 02:28:15.896959
""" """
from datetime import datetime, timezone from datetime import UTC, datetime
from textwrap import dedent from textwrap import dedent
from typing import Any from typing import Any
from uuid import uuid4 from uuid import uuid4
@ -34,7 +34,7 @@ def new_user_rating(user_id: Any, recipe_id: Any, rating: float | None = None, i
else: else:
id = "%.32x" % uuid4().int # noqa: UP031 id = "%.32x" % uuid4().int # noqa: UP031
now = datetime.now(timezone.utc).isoformat() now = datetime.now(UTC).isoformat()
return { return {
"id": id, "id": id,
"user_id": user_id, "user_id": user_id,

View file

@ -6,7 +6,7 @@ Create Date: 2024-07-12 16:16:29.973929
""" """
from datetime import datetime, timezone from datetime import UTC, datetime
from textwrap import dedent from textwrap import dedent
from typing import Any from typing import Any
from uuid import uuid4 from uuid import uuid4
@ -89,7 +89,7 @@ def dedupe_cookbook_slugs():
def create_household(session: orm.Session, group_id: str) -> str: def create_household(session: orm.Session, group_id: str) -> str:
# create/insert household # create/insert household
household_id = generate_id() household_id = generate_id()
timestamp = datetime.now(timezone.utc).isoformat() timestamp = datetime.now(UTC).isoformat()
household_data = { household_data = {
"id": household_id, "id": household_id,
"name": settings.DEFAULT_HOUSEHOLD, "name": settings.DEFAULT_HOUSEHOLD,

View file

@ -3,7 +3,7 @@ from functools import lru_cache
import requests import requests
_LAST_RESET = None _LAST_RESET: datetime.datetime | None = None
@lru_cache(maxsize=1) @lru_cache(maxsize=1)
@ -32,7 +32,7 @@ def get_latest_version() -> str:
global _LAST_RESET global _LAST_RESET
now = datetime.datetime.now(datetime.timezone.utc) now = datetime.datetime.now(datetime.UTC)
if not _LAST_RESET or now - _LAST_RESET > datetime.timedelta(days=MAX_DAYS_OLD): if not _LAST_RESET or now - _LAST_RESET > datetime.timedelta(days=MAX_DAYS_OLD):
_LAST_RESET = now _LAST_RESET = now

View file

@ -1,5 +1,5 @@
import abc import abc
from datetime import datetime, timedelta, timezone from datetime import UTC, datetime, timedelta
from typing import Generic, TypeVar from typing import Generic, TypeVar
import jwt import jwt
@ -45,7 +45,7 @@ class AuthProvider(Generic[T], metaclass=abc.ABCMeta):
to_encode = data.copy() to_encode = data.copy()
expires_delta = expires_delta or timedelta(hours=settings.TOKEN_TIME) expires_delta = expires_delta or timedelta(hours=settings.TOKEN_TIME)
expire = datetime.now(timezone.utc) + expires_delta expire = datetime.now(UTC) + expires_delta
to_encode["exp"] = expire to_encode["exp"] = expire
to_encode["iss"] = ISS to_encode["iss"] = ISS

View file

@ -1,5 +1,5 @@
import secrets import secrets
from datetime import datetime, timedelta, timezone from datetime import UTC, datetime, timedelta
from pathlib import Path from pathlib import Path
import jwt import jwt
@ -34,7 +34,7 @@ def create_access_token(data: dict, expires_delta: timedelta | None = None) -> s
to_encode = data.copy() to_encode = data.copy()
expires_delta = expires_delta or timedelta(hours=settings.TOKEN_TIME) expires_delta = expires_delta or timedelta(hours=settings.TOKEN_TIME)
expire = datetime.now(timezone.utc) + expires_delta expire = datetime.now(UTC) + expires_delta
to_encode["exp"] = expire to_encode["exp"] = expire
return jwt.encode(to_encode, settings.SECRET, algorithm=ALGORITHM) return jwt.encode(to_encode, settings.SECRET, algorithm=ALGORITHM)

View file

@ -1,7 +1,7 @@
import logging import logging
import os import os
import secrets import secrets
from datetime import datetime, timezone from datetime import UTC, datetime
from pathlib import Path from pathlib import Path
from typing import Annotated, Any, NamedTuple from typing import Annotated, Any, NamedTuple
@ -160,7 +160,7 @@ class AppSettings(AppLoggingSettings):
local_tz = tzlocal() local_tz = tzlocal()
now = datetime.now(local_tz) now = datetime.now(local_tz)
local_time = now.replace(hour=local_hour, minute=local_minute) local_time = now.replace(hour=local_hour, minute=local_minute)
utc_time = local_time.astimezone(timezone.utc) utc_time = local_time.astimezone(UTC)
self.logger.debug(f"Local time: {local_hour}:{local_minute} | UTC time: {utc_time.hour}:{utc_time.minute}") self.logger.debug(f"Local time: {local_hour}:{local_minute} | UTC time: {utc_time.hour}:{utc_time.minute}")
return ScheduleTime(utc_time.hour, utc_time.minute) return ScheduleTime(utc_time.hour, utc_time.minute)

View file

@ -1,4 +1,4 @@
from datetime import datetime, timezone from datetime import UTC, datetime
from sqlalchemy.types import DateTime, TypeDecorator from sqlalchemy.types import DateTime, TypeDecorator
@ -7,14 +7,14 @@ def get_utc_now():
""" """
Returns the current time in UTC. Returns the current time in UTC.
""" """
return datetime.now(timezone.utc) return datetime.now(UTC)
def get_utc_today(): def get_utc_today():
""" """
Returns the current date in UTC. Returns the current date in UTC.
""" """
return datetime.now(timezone.utc).date() return datetime.now(UTC).date()
class NaiveDateTime(TypeDecorator): class NaiveDateTime(TypeDecorator):
@ -35,7 +35,7 @@ class NaiveDateTime(TypeDecorator):
try: try:
if value.tzinfo is not None: if value.tzinfo is not None:
value = value.astimezone(timezone.utc) value = value.astimezone(UTC)
return value.replace(tzinfo=None) return value.replace(tzinfo=None)
except Exception: except Exception:
return value return value
@ -43,7 +43,7 @@ class NaiveDateTime(TypeDecorator):
def process_result_value(self, value: datetime | None, dialect): def process_result_value(self, value: datetime | None, dialect):
try: try:
if value is not None: if value is not None:
value = value.replace(tzinfo=timezone.utc) value = value.replace(tzinfo=UTC)
except Exception: except Exception:
pass pass

View file

@ -1,5 +1,5 @@
from contextvars import ContextVar from contextvars import ContextVar
from datetime import datetime, timezone from datetime import UTC, datetime
from typing import TYPE_CHECKING, Optional from typing import TYPE_CHECKING, Optional
from pydantic import ConfigDict from pydantic import ConfigDict
@ -227,7 +227,7 @@ def update_shopping_lists(session: orm.Session, _):
if not shopping_list: if not shopping_list:
continue continue
shopping_list.updated_at = datetime.now(timezone.utc) shopping_list.updated_at = datetime.now(UTC)
local_session.commit() local_session.commit()
except Exception: except Exception:
local_session.rollback() local_session.rollback()

View file

@ -1,4 +1,4 @@
from datetime import datetime, time, timezone from datetime import UTC, datetime, time
from typing import TYPE_CHECKING, Optional from typing import TYPE_CHECKING, Optional
from sqlalchemy import Boolean, ForeignKey, String, Time, orm from sqlalchemy import Boolean, ForeignKey, String, Time, orm
@ -30,7 +30,7 @@ class GroupWebhooksModel(SqlAlchemyBase, BaseMixins):
# New Fields # New Fields
webhook_type: Mapped[str | None] = mapped_column(String, default="") # Future use for different types of webhooks webhook_type: Mapped[str | None] = mapped_column(String, default="") # Future use for different types of webhooks
scheduled_time: Mapped[time | None] = mapped_column(Time, default=lambda: datetime.now(timezone.utc).time()) scheduled_time: Mapped[time | None] = mapped_column(Time, default=lambda: datetime.now(UTC).time())
# Column is no longer used but is kept for since it's super annoying to # Column is no longer used but is kept for since it's super annoying to
# delete a column in SQLite and it's not a big deal to keep it around # delete a column in SQLite and it's not a big deal to keep it around

View file

@ -1,4 +1,4 @@
from datetime import date, datetime, timezone from datetime import UTC, date, datetime
from typing import TYPE_CHECKING from typing import TYPE_CHECKING
import sqlalchemy as sa import sqlalchemy as sa
@ -207,7 +207,7 @@ class RecipeModel(SqlAlchemyBase, BaseMixins):
if notes: if notes:
self.notes = [Note(**n) for n in notes] self.notes = [Note(**n) for n in notes]
self.date_updated = datetime.now(timezone.utc) self.date_updated = datetime.now(UTC)
# SQLAlchemy events do not seem to register things that are set during auto_init # SQLAlchemy events do not seem to register things that are set during auto_init
if name is not None: if name is not None:

View file

@ -1,4 +1,4 @@
from datetime import datetime, timezone from datetime import UTC, datetime
from typing import TYPE_CHECKING from typing import TYPE_CHECKING
from sqlalchemy import ForeignKey, String from sqlalchemy import ForeignKey, String
@ -48,4 +48,4 @@ class RecipeTimelineEvent(SqlAlchemyBase, BaseMixins):
timestamp=None, timestamp=None,
**_, **_,
) -> None: ) -> None:
self.timestamp = timestamp or datetime.now(timezone.utc) self.timestamp = timestamp or datetime.now(UTC)

View file

@ -1,4 +1,4 @@
from datetime import datetime, timedelta, timezone from datetime import UTC, datetime, timedelta
from typing import TYPE_CHECKING from typing import TYPE_CHECKING
from uuid import uuid4 from uuid import uuid4
@ -15,7 +15,7 @@ if TYPE_CHECKING:
def defaut_expires_at_time() -> datetime: def defaut_expires_at_time() -> datetime:
return datetime.now(timezone.utc) + timedelta(days=30) return datetime.now(UTC) + timedelta(days=30)
class RecipeShareTokenModel(SqlAlchemyBase, BaseMixins): class RecipeShareTokenModel(SqlAlchemyBase, BaseMixins):

View file

@ -2,7 +2,7 @@ from __future__ import annotations
import random import random
from collections.abc import Iterable from collections.abc import Iterable
from datetime import datetime, timezone from datetime import UTC, datetime
from math import ceil from math import ceil
from typing import Any, Generic, TypeVar from typing import Any, Generic, TypeVar
@ -70,7 +70,7 @@ class RepositoryGeneric(Generic[Schema, Model]):
return self._household_id return self._household_id
def _random_seed(self) -> str: def _random_seed(self) -> str:
return str(datetime.now(tz=timezone.utc)) return str(datetime.now(tz=UTC))
def _log_exception(self, e: Exception) -> None: def _log_exception(self, e: Exception) -> None:
self.logger.error(f"Error processing query for Repo model={self.model.__name__} schema={self.schema.__name__}") self.logger.error(f"Error processing query for Repo model={self.model.__name__} schema={self.schema.__name__}")

View file

@ -1,4 +1,4 @@
from datetime import datetime, timezone from datetime import UTC, datetime
from sqlalchemy import select from sqlalchemy import select
@ -13,7 +13,7 @@ class RepositoryMeals(HouseholdRepositoryGeneric[ReadPlanEntry, GroupMealPlan]):
if not self.household_id: if not self.household_id:
raise Exception("household_id not set") raise Exception("household_id not set")
today = datetime.now(tz=timezone.utc).date() today = datetime.now(tz=UTC).date()
stmt = select(GroupMealPlan).filter( stmt = select(GroupMealPlan).filter(
GroupMealPlan.date == today, GroupMealPlan.household_id == self.household_id GroupMealPlan.date == today, GroupMealPlan.household_id == self.household_id
) )

View file

@ -1,7 +1,7 @@
import re as re import re as re
from collections.abc import Sequence from collections.abc import Sequence
from random import randint from random import randint
from typing import cast from typing import Self, cast
from uuid import UUID from uuid import UUID
import sqlalchemy as sa import sqlalchemy as sa
@ -10,7 +10,6 @@ from pydantic import UUID4
from slugify import slugify from slugify import slugify
from sqlalchemy import orm from sqlalchemy import orm
from sqlalchemy.exc import IntegrityError from sqlalchemy.exc import IntegrityError
from typing_extensions import Self
from mealie.db.models.household.household import Household from mealie.db.models.household.household import Household
from mealie.db.models.recipe.category import Category from mealie.db.models.recipe.category import Category

View file

@ -1,4 +1,4 @@
from datetime import datetime, timezone from datetime import UTC, datetime
from functools import cached_property from functools import cached_property
from fastapi import APIRouter, BackgroundTasks, Depends from fastapi import APIRouter, BackgroundTasks, Depends
@ -45,7 +45,7 @@ class ReadWebhookController(BaseUserController):
"""Manually re-fires all previously scheduled webhooks for today""" """Manually re-fires all previously scheduled webhooks for today"""
start_time = datetime.min.time() start_time = datetime.min.time()
start_dt = datetime.combine(datetime.now(timezone.utc).date(), start_time) start_dt = datetime.combine(datetime.now(UTC).date(), start_time)
post_group_webhooks(start_dt=start_dt, group_id=self.group.id, household_id=self.household.id) post_group_webhooks(start_dt=start_dt, group_id=self.group.id, household_id=self.household.id)
@router.get("/{item_id}", response_model=ReadWebhook) @router.get("/{item_id}", response_model=ReadWebhook)

View file

@ -3,7 +3,7 @@ From Pydantic V1: https://github.com/pydantic/pydantic/blob/abcf81ec104d2da70894
""" """
import re import re
from datetime import date, datetime, time, timedelta, timezone from datetime import UTC, date, datetime, time, timedelta, timezone
date_expr = r"(?P<year>\d{4})-(?P<month>\d{1,2})-(?P<day>\d{1,2})" date_expr = r"(?P<year>\d{4})-(?P<month>\d{1,2})-(?P<day>\d{1,2})"
time_expr = ( time_expr = (
@ -39,7 +39,7 @@ iso8601_duration_re = re.compile(
r"$" r"$"
) )
EPOCH = datetime(1970, 1, 1, tzinfo=timezone.utc) EPOCH = datetime(1970, 1, 1, tzinfo=UTC)
# if greater than this, the number is in ms, if less than or equal it's in seconds # if greater than this, the number is in ms, if less than or equal it's in seconds
# (in seconds this is 11th October 2603, in ms it's 20th August 1970) # (in seconds this is 11th October 2603, in ms it's 20th August 1970)
MS_WATERSHED = int(2e10) MS_WATERSHED = int(2e10)
@ -87,12 +87,12 @@ def from_unix_seconds(seconds: int | float) -> datetime:
while abs(seconds) > MS_WATERSHED: while abs(seconds) > MS_WATERSHED:
seconds /= 1000 seconds /= 1000
dt = EPOCH + timedelta(seconds=seconds) dt = EPOCH + timedelta(seconds=seconds)
return dt.replace(tzinfo=timezone.utc) return dt.replace(tzinfo=UTC)
def _parse_timezone(value: str | None, error: type[Exception]) -> None | int | timezone: def _parse_timezone(value: str | None, error: type[Exception]) -> None | int | timezone:
if value == "Z": if value == "Z":
return timezone.utc return UTC
elif value is not None: elif value is not None:
offset_mins = int(value[-2:]) if len(value) > 3 else 0 offset_mins = int(value[-2:]) if len(value) > 3 else 0
offset = 60 * int(value[1:3]) + offset_mins offset = 60 * int(value[1:3]) + offset_mins

View file

@ -2,16 +2,15 @@ from __future__ import annotations
import re import re
from collections.abc import Sequence from collections.abc import Sequence
from datetime import datetime, timezone from datetime import UTC, datetime
from enum import Enum from enum import Enum
from typing import ClassVar, Protocol, TypeVar from typing import ClassVar, Protocol, Self, TypeVar
from humps.main import camelize from humps.main import camelize
from pydantic import UUID4, AliasChoices, BaseModel, ConfigDict, Field, model_validator from pydantic import UUID4, AliasChoices, BaseModel, ConfigDict, Field, model_validator
from sqlalchemy import Select, desc, func, or_, text from sqlalchemy import Select, desc, func, or_, text
from sqlalchemy.orm import InstrumentedAttribute, Session from sqlalchemy.orm import InstrumentedAttribute, Session
from sqlalchemy.orm.interfaces import LoaderOption from sqlalchemy.orm.interfaces import LoaderOption
from typing_extensions import Self
from mealie.db.models._model_base import SqlAlchemyBase from mealie.db.models._model_base import SqlAlchemyBase
@ -88,7 +87,7 @@ class MealieModel(BaseModel):
if not isinstance(val, datetime): if not isinstance(val, datetime):
continue continue
if not val.tzinfo: if not val.tzinfo:
setattr(self, field, val.replace(tzinfo=timezone.utc)) setattr(self, field, val.replace(tzinfo=UTC))
return self return self

View file

@ -32,7 +32,7 @@ class CreateWebhook(MealieModel):
type: datetime is treated as a value with a timezone type: datetime is treated as a value with a timezone
""" """
parser_funcs = [ parser_funcs = [
lambda x: parse_datetime(x).astimezone(datetime.timezone.utc).time(), lambda x: parse_datetime(x).astimezone(datetime.UTC).time(),
parse_time, parse_time,
] ]

View file

@ -1,4 +1,4 @@
from datetime import datetime, timedelta, timezone from datetime import UTC, datetime, timedelta
from pydantic import UUID4, ConfigDict, Field from pydantic import UUID4, ConfigDict, Field
from sqlalchemy.orm import selectinload from sqlalchemy.orm import selectinload
@ -11,7 +11,7 @@ from .recipe import Recipe
def defaut_expires_at_time() -> datetime: def defaut_expires_at_time() -> datetime:
return datetime.now(timezone.utc) + timedelta(days=30) return datetime.now(UTC) + timedelta(days=30)
class RecipeShareTokenCreate(MealieModel): class RecipeShareTokenCreate(MealieModel):

View file

@ -1,4 +1,4 @@
from datetime import datetime, timezone from datetime import UTC, datetime
from enum import Enum from enum import Enum
from pathlib import Path from pathlib import Path
from typing import Annotated from typing import Annotated
@ -40,7 +40,7 @@ class RecipeTimelineEventIn(MealieModel):
message: str | None = Field(None, alias="eventMessage") message: str | None = Field(None, alias="eventMessage")
image: Annotated[TimelineEventImage | None, Field(validate_default=True)] = TimelineEventImage.does_not_have_image image: Annotated[TimelineEventImage | None, Field(validate_default=True)] = TimelineEventImage.does_not_have_image
timestamp: datetime = datetime.now(timezone.utc) timestamp: datetime = datetime.now(UTC)
model_config = ConfigDict(use_enum_values=True) model_config = ConfigDict(use_enum_values=True)

View file

@ -1,4 +1,4 @@
from datetime import datetime, timedelta, timezone from datetime import UTC, datetime, timedelta
from pathlib import Path from pathlib import Path
from typing import Annotated, Any, Generic, TypeVar from typing import Annotated, Any, Generic, TypeVar
from uuid import UUID from uuid import UUID
@ -218,7 +218,7 @@ class PrivateUser(UserOut):
return False return False
lockout_expires_at = self.locked_at + timedelta(hours=get_app_settings().SECURITY_USER_LOCKOUT_TIME) lockout_expires_at = self.locked_at + timedelta(hours=get_app_settings().SECURITY_USER_LOCKOUT_TIME)
return lockout_expires_at > datetime.now(timezone.utc) return lockout_expires_at > datetime.now(UTC)
def directory(self) -> Path: def directory(self) -> Path:
return PrivateUser.get_directory(self.id) return PrivateUser.get_directory(self.id)

View file

@ -25,7 +25,7 @@ class BackupV2(BaseService):
db_file = self.settings.DB_URL.removeprefix("sqlite:///") # type: ignore db_file = self.settings.DB_URL.removeprefix("sqlite:///") # type: ignore
# Create a backup of the SQLite database # Create a backup of the SQLite database
timestamp = datetime.datetime.now(datetime.timezone.utc).strftime("%Y.%m.%d") timestamp = datetime.datetime.now(datetime.UTC).strftime("%Y.%m.%d")
shutil.copy(db_file, self.directories.DATA_DIR.joinpath(f"mealie_{timestamp}.bak.db")) shutil.copy(db_file, self.directories.DATA_DIR.joinpath(f"mealie_{timestamp}.bak.db"))
def _postgres(self) -> None: def _postgres(self) -> None:
@ -37,7 +37,7 @@ class BackupV2(BaseService):
exclude_ext = {".zip"} exclude_ext = {".zip"}
exclude_dirs = {"backups", ".temp"} exclude_dirs = {"backups", ".temp"}
timestamp = datetime.datetime.now(datetime.timezone.utc).strftime("%Y.%m.%d.%H.%M.%S") timestamp = datetime.datetime.now(datetime.UTC).strftime("%Y.%m.%d.%H.%M.%S")
backup_name = f"mealie_{timestamp}.zip" backup_name = f"mealie_{timestamp}.zip"
backup_file = self.directories.BACKUP_DIR / backup_name backup_file = self.directories.BACKUP_DIR / backup_name

View file

@ -2,7 +2,7 @@ import contextlib
import json import json
from abc import ABC, abstractmethod from abc import ABC, abstractmethod
from collections.abc import Generator from collections.abc import Generator
from datetime import datetime, timezone from datetime import UTC, datetime
from typing import cast from typing import cast
from urllib.parse import parse_qs, urlencode, urlsplit, urlunsplit from urllib.parse import parse_qs, urlencode, urlsplit, urlunsplit
@ -163,8 +163,8 @@ class WebhookEventListener(EventListenerBase):
with self.ensure_session() as session: with self.ensure_session() as session:
stmt = select(GroupWebhooksModel).where( stmt = select(GroupWebhooksModel).where(
GroupWebhooksModel.enabled == True, # noqa: E712 - required for SQLAlchemy comparison GroupWebhooksModel.enabled == True, # noqa: E712 - required for SQLAlchemy comparison
GroupWebhooksModel.scheduled_time > start_dt.astimezone(timezone.utc).time(), GroupWebhooksModel.scheduled_time > start_dt.astimezone(UTC).time(),
GroupWebhooksModel.scheduled_time <= end_dt.astimezone(timezone.utc).time(), GroupWebhooksModel.scheduled_time <= end_dt.astimezone(UTC).time(),
GroupWebhooksModel.group_id == self.group_id, GroupWebhooksModel.group_id == self.group_id,
GroupWebhooksModel.household_id == self.household_id, GroupWebhooksModel.household_id == self.household_id,
) )

View file

@ -1,5 +1,5 @@
import uuid import uuid
from datetime import date, datetime, timezone from datetime import UTC, date, datetime
from enum import Enum, auto from enum import Enum, auto
from typing import Any from typing import Any
@ -193,4 +193,4 @@ class Event(MealieModel):
def __init__(self, *args, **kwargs) -> None: def __init__(self, *args, **kwargs) -> None:
super().__init__(*args, **kwargs) super().__init__(*args, **kwargs)
self.event_id = uuid.uuid4() self.event_id = uuid.uuid4()
self.timestamp = datetime.now(timezone.utc) self.timestamp = datetime.now(UTC)

View file

@ -43,7 +43,7 @@ class Exporter(BaseService):
name="Data Export", name="Data Export",
size=pretty_size(export_path.stat().st_size), size=pretty_size(export_path.stat().st_size),
filename=export_path.name, filename=export_path.name,
expires=datetime.datetime.now(datetime.timezone.utc) + datetime.timedelta(days=1), expires=datetime.datetime.now(datetime.UTC) + datetime.timedelta(days=1),
) )
db.group_exports.create(group_data_export) db.group_exports.create(group_data_export)

View file

@ -1,6 +1,6 @@
import tempfile import tempfile
import zipfile import zipfile
from datetime import datetime, timezone from datetime import UTC, datetime
from pathlib import Path from pathlib import Path
from bs4 import BeautifulSoup from bs4 import BeautifulSoup
@ -35,7 +35,7 @@ class CopyMeThatMigrator(BaseMigrator):
self.name = "copymethat" self.name = "copymethat"
self.key_aliases = [ self.key_aliases = [
MigrationAlias(key="last_made", alias="made_this", func=lambda x: datetime.now(timezone.utc)), MigrationAlias(key="last_made", alias="made_this", func=lambda x: datetime.now(UTC)),
MigrationAlias(key="notes", alias="recipeNotes"), MigrationAlias(key="notes", alias="recipeNotes"),
MigrationAlias(key="orgURL", alias="original_link"), MigrationAlias(key="orgURL", alias="original_link"),
MigrationAlias(key="rating", alias="ratingValue"), MigrationAlias(key="rating", alias="ratingValue"),

View file

@ -1,7 +1,7 @@
import json import json
import os import os
import shutil import shutil
from datetime import datetime, timezone from datetime import UTC, datetime
from pathlib import Path from pathlib import Path
from shutil import copytree, rmtree from shutil import copytree, rmtree
from typing import Any from typing import Any
@ -192,7 +192,7 @@ class RecipeService(RecipeServiceBase):
recipe_id=new_recipe.id, recipe_id=new_recipe.id,
subject=self.t("recipe.recipe-created"), subject=self.t("recipe.recipe-created"),
event_type=TimelineEventType.system, event_type=TimelineEventType.system,
timestamp=new_recipe.created_at or datetime.now(timezone.utc), timestamp=new_recipe.created_at or datetime.now(UTC),
) )
self.repos.recipe_timeline_events.create(timeline_event_data) self.repos.recipe_timeline_events.create(timeline_event_data)

View file

@ -1,5 +1,5 @@
import asyncio import asyncio
from datetime import datetime, timedelta, timezone from datetime import UTC, datetime, timedelta
from pathlib import Path from pathlib import Path
from mealie.core import root_logger from mealie.core import root_logger
@ -28,7 +28,7 @@ class SchedulerService:
async def schedule_daily(): async def schedule_daily():
now = datetime.now(timezone.utc) now = datetime.now(UTC)
daily_schedule_time = get_app_settings().DAILY_SCHEDULE_TIME_UTC daily_schedule_time = get_app_settings().DAILY_SCHEDULE_TIME_UTC
logger.debug(f"Current time is {now} and DAILY_SCHEDULE_TIME (in UTC) is {daily_schedule_time}") logger.debug(f"Current time is {now} and DAILY_SCHEDULE_TIME (in UTC) is {daily_schedule_time}")

View file

@ -1,4 +1,4 @@
from datetime import datetime, time, timedelta, timezone from datetime import UTC, datetime, time, timedelta
from pydantic import UUID4 from pydantic import UUID4
from sqlalchemy.orm import Session from sqlalchemy.orm import Session
@ -45,7 +45,7 @@ def _create_mealplan_timeline_events_for_household(
else: else:
event_subject = f"{user.full_name} made this for {mealplan.entry_type.value}" event_subject = f"{user.full_name} made this for {mealplan.entry_type.value}"
query_start_time = datetime.combine(datetime.now(timezone.utc).date(), time.min) query_start_time = datetime.combine(datetime.now(UTC).date(), time.min)
query_end_time = query_start_time + timedelta(days=1) query_end_time = query_start_time + timedelta(days=1)
query = PaginationQuery( query = PaginationQuery(
query_filter=( query_filter=(
@ -116,7 +116,7 @@ def _create_mealplan_timeline_events_for_group(event_time: datetime, session: Se
def create_mealplan_timeline_events() -> None: def create_mealplan_timeline_events() -> None:
event_time = datetime.now(timezone.utc) event_time = datetime.now(UTC)
with session_context() as session: with session_context() as session:
repos = get_repositories(session) repos = get_repositories(session)

View file

@ -1,4 +1,4 @@
from datetime import datetime, timezone from datetime import UTC, datetime
from pydantic import UUID4 from pydantic import UUID4
@ -18,7 +18,7 @@ from mealie.services.event_bus_service.event_types import (
EventWebhookData, EventWebhookData,
) )
last_ran = datetime.now(timezone.utc) last_ran = datetime.now(UTC)
def post_group_webhooks( def post_group_webhooks(
@ -32,7 +32,7 @@ def post_group_webhooks(
start_dt = start_dt or last_ran start_dt = start_dt or last_ran
# end the query at the current time # end the query at the current time
last_ran = end_dt = datetime.now(timezone.utc) last_ran = end_dt = datetime.now(UTC)
if group_id is None: if group_id is None:
# publish the webhook event to each group's event bus # publish the webhook event to each group's event bus
@ -80,7 +80,7 @@ def post_group_webhooks(
def post_single_webhook(webhook: ReadWebhook, message: str = "") -> None: def post_single_webhook(webhook: ReadWebhook, message: str = "") -> None:
dt = datetime.min.replace(tzinfo=timezone.utc) dt = datetime.min.replace(tzinfo=UTC)
event_type = EventTypes.webhook_task event_type = EventTypes.webhook_task
event_document_data = EventWebhookData( event_document_data = EventWebhookData(

View file

@ -17,7 +17,7 @@ def purge_group_data_exports(max_minutes_old=ONE_DAY_AS_MINUTES):
logger = root_logger.get_logger() logger = root_logger.get_logger()
logger.debug("purging group data exports") logger.debug("purging group data exports")
limit = datetime.datetime.now(datetime.timezone.utc) - datetime.timedelta(minutes=max_minutes_old) limit = datetime.datetime.now(datetime.UTC) - datetime.timedelta(minutes=max_minutes_old)
with session_context() as session: with session_context() as session:
stmt = select(GroupDataExportsModel).filter(cast(GroupDataExportsModel.expires, NaiveDateTime) <= limit) stmt = select(GroupDataExportsModel).filter(cast(GroupDataExportsModel.expires, NaiveDateTime) <= limit)
@ -39,7 +39,7 @@ def purge_excess_files() -> None:
directories = get_app_dirs() directories = get_app_dirs()
logger = root_logger.get_logger() logger = root_logger.get_logger()
limit = datetime.datetime.now(datetime.timezone.utc) - datetime.timedelta(minutes=ONE_DAY_AS_MINUTES * 2) limit = datetime.datetime.now(datetime.UTC) - datetime.timedelta(minutes=ONE_DAY_AS_MINUTES * 2)
for file in directories.GROUPS_DIR.glob("**/export/*.zip"): for file in directories.GROUPS_DIR.glob("**/export/*.zip"):
# TODO: fix comparison types # TODO: fix comparison types

View file

@ -14,7 +14,7 @@ MAX_DAYS_OLD = 2
def purge_password_reset_tokens(): def purge_password_reset_tokens():
"""Purges all events after x days""" """Purges all events after x days"""
logger.debug("purging password reset tokens") logger.debug("purging password reset tokens")
limit = datetime.datetime.now(datetime.timezone.utc) - datetime.timedelta(days=MAX_DAYS_OLD) limit = datetime.datetime.now(datetime.UTC) - datetime.timedelta(days=MAX_DAYS_OLD)
with session_context() as session: with session_context() as session:
stmt = delete(PasswordResetModel).filter(PasswordResetModel.created_at <= limit) stmt = delete(PasswordResetModel).filter(PasswordResetModel.created_at <= limit)

View file

@ -14,7 +14,7 @@ MAX_DAYS_OLD = 4
def purge_group_registration(): def purge_group_registration():
"""Purges all events after x days""" """Purges all events after x days"""
logger.debug("purging expired registration tokens") logger.debug("purging expired registration tokens")
limit = datetime.datetime.now(datetime.timezone.utc) - datetime.timedelta(days=MAX_DAYS_OLD) limit = datetime.datetime.now(datetime.UTC) - datetime.timedelta(days=MAX_DAYS_OLD)
with session_context() as session: with session_context() as session:
stmt = delete(GroupInviteToken).filter(GroupInviteToken.created_at <= limit) stmt = delete(GroupInviteToken).filter(GroupInviteToken.created_at <= limit)

View file

@ -1,4 +1,4 @@
from datetime import datetime, timezone from datetime import UTC, datetime
from mealie.repos.repository_factory import AllRepositories from mealie.repos.repository_factory import AllRepositories
from mealie.schema.user.user import PrivateUser from mealie.schema.user.user import PrivateUser
@ -30,7 +30,7 @@ class UserService(BaseService):
return unlocked return unlocked
def lock_user(self, user: PrivateUser) -> PrivateUser: def lock_user(self, user: PrivateUser) -> PrivateUser:
user.locked_at = datetime.now(timezone.utc) user.locked_at = datetime.now(UTC)
return self.repos.users.update(user.id, user) return self.repos.users.update(user.id, user)
def unlock_user(self, user: PrivateUser) -> PrivateUser: def unlock_user(self, user: PrivateUser) -> PrivateUser:

117
poetry.lock generated
View file

@ -1,4 +1,4 @@
# This file is automatically @generated by Poetry 1.8.3 and should not be changed by hand. # This file is automatically @generated by Poetry 1.8.4 and should not be changed by hand.
[[package]] [[package]]
name = "aiofiles" name = "aiofiles"
@ -67,7 +67,6 @@ files = [
] ]
[package.dependencies] [package.dependencies]
exceptiongroup = {version = "*", markers = "python_version < \"3.11\""}
idna = ">=2.8" idna = ">=2.8"
sniffio = ">=1.1" sniffio = ">=1.1"
@ -117,9 +116,6 @@ files = [
{file = "astroid-3.3.5.tar.gz", hash = "sha256:5cfc40ae9f68311075d27ef68a4841bdc5cc7f6cf86671b49f00607d30188e2d"}, {file = "astroid-3.3.5.tar.gz", hash = "sha256:5cfc40ae9f68311075d27ef68a4841bdc5cc7f6cf86671b49f00607d30188e2d"},
] ]
[package.dependencies]
typing-extensions = {version = ">=4.0.0", markers = "python_version < \"3.11\""}
[[package]] [[package]]
name = "authlib" name = "authlib"
version = "1.3.2" version = "1.3.2"
@ -565,13 +561,13 @@ graph = ["objgraph (>=1.7.2)"]
[[package]] [[package]]
name = "distlib" name = "distlib"
version = "0.3.6" version = "0.3.9"
description = "Distribution utilities" description = "Distribution utilities"
optional = false optional = false
python-versions = "*" python-versions = "*"
files = [ files = [
{file = "distlib-0.3.6-py2.py3-none-any.whl", hash = "sha256:f35c4b692542ca110de7ef0bea44d73981caeb34ca0b9b6b2e6d7790dda8f80e"}, {file = "distlib-0.3.9-py2.py3-none-any.whl", hash = "sha256:47f8c22fd27c27e25a65601af709b38e4f0a45ea4fc2e710f65755fa8caaaf87"},
{file = "distlib-0.3.6.tar.gz", hash = "sha256:14bad2d9b04d3a36127ac97f30b12a19268f211063d8f8ee4f47108896e11b46"}, {file = "distlib-0.3.9.tar.gz", hash = "sha256:a60f20dea646b8a33f3e7772f74dc0b2d0772d2837ee1342a00645c81edf9403"},
] ]
[[package]] [[package]]
@ -585,20 +581,6 @@ files = [
{file = "distro-1.9.0.tar.gz", hash = "sha256:2fa77c6fd8940f116ee1d6b94a2f90b13b5ea8d019b98bc8bafdcabcdd9bdbed"}, {file = "distro-1.9.0.tar.gz", hash = "sha256:2fa77c6fd8940f116ee1d6b94a2f90b13b5ea8d019b98bc8bafdcabcdd9bdbed"},
] ]
[[package]]
name = "exceptiongroup"
version = "1.1.0"
description = "Backport of PEP 654 (exception groups)"
optional = false
python-versions = ">=3.7"
files = [
{file = "exceptiongroup-1.1.0-py3-none-any.whl", hash = "sha256:327cbda3da756e2de031a3107b81ab7b3770a602c4d16ca618298c526f4bec1e"},
{file = "exceptiongroup-1.1.0.tar.gz", hash = "sha256:bcb67d800a4497e1b404c2dd44fca47d3b7a5e5433dbab67f96c1a685cdfdf23"},
]
[package.extras]
test = ["pytest (>=6)"]
[[package]] [[package]]
name = "extruct" name = "extruct"
version = "0.18.0" version = "0.18.0"
@ -645,18 +627,19 @@ standard = ["email-validator (>=2.0.0)", "fastapi-cli[standard] (>=0.0.5)", "htt
[[package]] [[package]]
name = "filelock" name = "filelock"
version = "3.9.0" version = "3.16.1"
description = "A platform independent file lock." description = "A platform independent file lock."
optional = false optional = false
python-versions = ">=3.7" python-versions = ">=3.8"
files = [ files = [
{file = "filelock-3.9.0-py3-none-any.whl", hash = "sha256:f58d535af89bb9ad5cd4df046f741f8553a418c01a7856bf0d173bbc9f6bd16d"}, {file = "filelock-3.16.1-py3-none-any.whl", hash = "sha256:2082e5703d51fbf98ea75855d9d5527e33d8ff23099bec374a134febee6946b0"},
{file = "filelock-3.9.0.tar.gz", hash = "sha256:7b319f24340b51f55a2bf7a12ac0755a9b03e718311dac567a0f4f7fabd2f5de"}, {file = "filelock-3.16.1.tar.gz", hash = "sha256:c249fbfcd5db47e5e2d6d62198e565475ee65e4831e2561c8e313fa7eb961435"},
] ]
[package.extras] [package.extras]
docs = ["furo (>=2022.12.7)", "sphinx (>=5.3)", "sphinx-autodoc-typehints (>=1.19.5)"] docs = ["furo (>=2024.8.6)", "sphinx (>=8.0.2)", "sphinx-autodoc-typehints (>=2.4.1)"]
testing = ["covdefaults (>=2.2.2)", "coverage (>=7.0.1)", "pytest (>=7.2)", "pytest-cov (>=4)", "pytest-timeout (>=2.1)"] testing = ["covdefaults (>=2.3)", "coverage (>=7.6.1)", "diff-cover (>=9.2)", "pytest (>=8.3.3)", "pytest-asyncio (>=0.24)", "pytest-cov (>=5)", "pytest-mock (>=3.14)", "pytest-timeout (>=2.3.1)", "virtualenv (>=20.26.4)"]
typing = ["typing-extensions (>=4.12.2)"]
[[package]] [[package]]
name = "freezegun" name = "freezegun"
@ -1549,7 +1532,6 @@ files = [
[package.dependencies] [package.dependencies]
mypy-extensions = ">=1.0.0" mypy-extensions = ">=1.0.0"
tomli = {version = ">=1.1.0", markers = "python_version < \"3.11\""}
typing-extensions = ">=4.6.0" typing-extensions = ">=4.6.0"
[package.extras] [package.extras]
@ -1911,18 +1893,19 @@ tests-min = ["defusedxml", "packaging", "pytest"]
[[package]] [[package]]
name = "platformdirs" name = "platformdirs"
version = "2.6.2" version = "4.3.6"
description = "A small Python package for determining appropriate platform-specific dirs, e.g. a \"user data dir\"." description = "A small Python package for determining appropriate platform-specific dirs, e.g. a `user data dir`."
optional = false optional = false
python-versions = ">=3.7" python-versions = ">=3.8"
files = [ files = [
{file = "platformdirs-2.6.2-py3-none-any.whl", hash = "sha256:83c8f6d04389165de7c9b6f0c682439697887bca0aa2f1c87ef1826be3584490"}, {file = "platformdirs-4.3.6-py3-none-any.whl", hash = "sha256:73e575e1408ab8103900836b97580d5307456908a03e92031bab39e4554cc3fb"},
{file = "platformdirs-2.6.2.tar.gz", hash = "sha256:e1fea1fe471b9ff8332e229df3cb7de4f53eeea4998d3b6bfff542115e998bd2"}, {file = "platformdirs-4.3.6.tar.gz", hash = "sha256:357fb2acbc885b0419afd3ce3ed34564c13c9b95c89360cd9563f73aa5e2b907"},
] ]
[package.extras] [package.extras]
docs = ["furo (>=2022.12.7)", "proselint (>=0.13)", "sphinx (>=5.3)", "sphinx-autodoc-typehints (>=1.19.5)"] docs = ["furo (>=2024.8.6)", "proselint (>=0.14)", "sphinx (>=8.0.2)", "sphinx-autodoc-typehints (>=2.4)"]
test = ["appdirs (==1.4.4)", "covdefaults (>=2.2.2)", "pytest (>=7.2)", "pytest-cov (>=4)", "pytest-mock (>=3.10)"] test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=8.3.2)", "pytest-cov (>=5)", "pytest-mock (>=3.14)"]
type = ["mypy (>=1.11.2)"]
[[package]] [[package]]
name = "pluggy" name = "pluggy"
@ -2295,15 +2278,10 @@ files = [
[package.dependencies] [package.dependencies]
astroid = ">=3.3.5,<=3.4.0-dev0" astroid = ">=3.3.5,<=3.4.0-dev0"
colorama = {version = ">=0.4.5", markers = "sys_platform == \"win32\""} colorama = {version = ">=0.4.5", markers = "sys_platform == \"win32\""}
dill = [ dill = {version = ">=0.3.7", markers = "python_version >= \"3.12\""}
{version = ">=0.2", markers = "python_version < \"3.11\""},
{version = ">=0.3.7", markers = "python_version >= \"3.12\""},
{version = ">=0.3.6", markers = "python_version >= \"3.11\" and python_version < \"3.12\""},
]
isort = ">=4.2.5,<5.13.0 || >5.13.0,<6" isort = ">=4.2.5,<5.13.0 || >5.13.0,<6"
mccabe = ">=0.6,<0.8" mccabe = ">=0.6,<0.8"
platformdirs = ">=2.2.0" platformdirs = ">=2.2.0"
tomli = {version = ">=1.1.0", markers = "python_version < \"3.11\""}
tomlkit = ">=0.10.1" tomlkit = ">=0.10.1"
[package.extras] [package.extras]
@ -2371,11 +2349,9 @@ files = [
[package.dependencies] [package.dependencies]
colorama = {version = "*", markers = "sys_platform == \"win32\""} colorama = {version = "*", markers = "sys_platform == \"win32\""}
exceptiongroup = {version = ">=1.0.0rc8", markers = "python_version < \"3.11\""}
iniconfig = "*" iniconfig = "*"
packaging = "*" packaging = "*"
pluggy = ">=1.5,<2" pluggy = ">=1.5,<2"
tomli = {version = ">=1", markers = "python_version < \"3.11\""}
[package.extras] [package.extras]
dev = ["argcomplete", "attrs (>=19.2)", "hypothesis (>=3.56)", "mock", "pygments (>=2.7.2)", "requests", "setuptools", "xmlschema"] dev = ["argcomplete", "attrs (>=19.2)", "hypothesis (>=3.56)", "mock", "pygments (>=2.7.2)", "requests", "setuptools", "xmlschema"]
@ -2838,7 +2814,6 @@ files = [
[package.dependencies] [package.dependencies]
markdown-it-py = ">=2.2.0" markdown-it-py = ">=2.2.0"
pygments = ">=2.13.0,<3.0.0" pygments = ">=2.13.0,<3.0.0"
typing-extensions = {version = ">=4.0.0,<5.0", markers = "python_version < \"3.11\""}
[package.extras] [package.extras]
jupyter = ["ipywidgets (>=7.5.1,<9)"] jupyter = ["ipywidgets (>=7.5.1,<9)"]
@ -2872,19 +2847,23 @@ files = [
[[package]] [[package]]
name = "setuptools" name = "setuptools"
version = "67.1.0" version = "75.6.0"
description = "Easily download, build, install, upgrade, and uninstall Python packages" description = "Easily download, build, install, upgrade, and uninstall Python packages"
optional = false optional = false
python-versions = ">=3.7" python-versions = ">=3.9"
files = [ files = [
{file = "setuptools-67.1.0-py3-none-any.whl", hash = "sha256:a7687c12b444eaac951ea87a9627c4f904ac757e7abdc5aac32833234af90378"}, {file = "setuptools-75.6.0-py3-none-any.whl", hash = "sha256:ce74b49e8f7110f9bf04883b730f4765b774ef3ef28f722cce7c273d253aaf7d"},
{file = "setuptools-67.1.0.tar.gz", hash = "sha256:e261cdf010c11a41cb5cb5f1bf3338a7433832029f559a6a7614bd42a967c300"}, {file = "setuptools-75.6.0.tar.gz", hash = "sha256:8199222558df7c86216af4f84c30e9b34a61d8ba19366cc914424cdbd28252f6"},
] ]
[package.extras] [package.extras]
docs = ["furo", "jaraco.packaging (>=9)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-hoverxref (<2)", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (==0.8.3)", "sphinx-reredirects", "sphinxcontrib-towncrier"] check = ["pytest-checkdocs (>=2.4)", "pytest-ruff (>=0.2.1)", "ruff (>=0.7.0)"]
testing = ["build[virtualenv]", "filelock (>=3.4.0)", "flake8 (<5)", "flake8-2020", "ini2toml[lite] (>=0.9)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "pip (>=19.1)", "pip-run (>=8.8)", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=1.3)", "pytest-flake8", "pytest-mypy (>=0.9.1)", "pytest-perf", "pytest-timeout", "pytest-xdist", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"] core = ["importlib_metadata (>=6)", "jaraco.collections", "jaraco.functools (>=4)", "jaraco.text (>=3.7)", "more_itertools", "more_itertools (>=8.8)", "packaging", "packaging (>=24.2)", "platformdirs (>=4.2.2)", "tomli (>=2.0.1)", "wheel (>=0.43.0)"]
testing-integration = ["build[virtualenv]", "filelock (>=3.4.0)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "pytest", "pytest-enabler", "pytest-xdist", "tomli", "virtualenv (>=13.0.0)", "wheel"] cover = ["pytest-cov"]
doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "pyproject-hooks (!=1.1)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (>=1,<2)", "sphinx-reredirects", "sphinxcontrib-towncrier", "towncrier (<24.7)"]
enabler = ["pytest-enabler (>=2.2)"]
test = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "ini2toml[lite] (>=0.14)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "jaraco.test (>=5.5)", "packaging (>=24.2)", "pip (>=19.1)", "pyproject-hooks (!=1.1)", "pytest (>=6,!=8.1.*)", "pytest-home (>=0.5)", "pytest-perf", "pytest-subprocess", "pytest-timeout", "pytest-xdist (>=3)", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel (>=0.44.0)"]
type = ["importlib_metadata (>=7.0.2)", "jaraco.develop (>=7.21)", "mypy (>=1.12,<1.14)", "pytest-mypy"]
[[package]] [[package]]
name = "six" name = "six"
@ -3042,17 +3021,6 @@ files = [
{file = "text_unidecode-1.3-py2.py3-none-any.whl", hash = "sha256:1311f10e8b895935241623731c2ba64f4c455287888b18189350b67134a822e8"}, {file = "text_unidecode-1.3-py2.py3-none-any.whl", hash = "sha256:1311f10e8b895935241623731c2ba64f4c455287888b18189350b67134a822e8"},
] ]
[[package]]
name = "tomli"
version = "1.2.3"
description = "A lil' TOML parser"
optional = false
python-versions = ">=3.6"
files = [
{file = "tomli-1.2.3-py3-none-any.whl", hash = "sha256:e3069e4be3ead9668e21cb9b074cd948f7b3113fd9c8bba083f48247aab8b11c"},
{file = "tomli-1.2.3.tar.gz", hash = "sha256:05b6166bff487dc068d322585c7ea4ef78deed501cc124060e0f238e89a9231f"},
]
[[package]] [[package]]
name = "tomlkit" name = "tomlkit"
version = "0.11.6" version = "0.11.6"
@ -3198,7 +3166,6 @@ h11 = ">=0.8"
httptools = {version = ">=0.6.3", optional = true, markers = "extra == \"standard\""} httptools = {version = ">=0.6.3", optional = true, markers = "extra == \"standard\""}
python-dotenv = {version = ">=0.13", optional = true, markers = "extra == \"standard\""} python-dotenv = {version = ">=0.13", optional = true, markers = "extra == \"standard\""}
pyyaml = {version = ">=5.1", optional = true, markers = "extra == \"standard\""} pyyaml = {version = ">=5.1", optional = true, markers = "extra == \"standard\""}
typing-extensions = {version = ">=4.0", markers = "python_version < \"3.11\""}
uvloop = {version = ">=0.14.0,<0.15.0 || >0.15.0,<0.15.1 || >0.15.1", optional = true, markers = "(sys_platform != \"win32\" and sys_platform != \"cygwin\") and platform_python_implementation != \"PyPy\" and extra == \"standard\""} uvloop = {version = ">=0.14.0,<0.15.0 || >0.15.0,<0.15.1 || >0.15.1", optional = true, markers = "(sys_platform != \"win32\" and sys_platform != \"cygwin\") and platform_python_implementation != \"PyPy\" and extra == \"standard\""}
watchfiles = {version = ">=0.13", optional = true, markers = "extra == \"standard\""} watchfiles = {version = ">=0.13", optional = true, markers = "extra == \"standard\""}
websockets = {version = ">=10.4", optional = true, markers = "extra == \"standard\""} websockets = {version = ">=10.4", optional = true, markers = "extra == \"standard\""}
@ -3252,23 +3219,23 @@ test = ["Cython (>=0.29.36,<0.30.0)", "aiohttp (==3.9.0b0)", "aiohttp (>=3.8.1)"
[[package]] [[package]]
name = "virtualenv" name = "virtualenv"
version = "20.17.1" version = "20.28.0"
description = "Virtual Python Environment builder" description = "Virtual Python Environment builder"
optional = false optional = false
python-versions = ">=3.6" python-versions = ">=3.8"
files = [ files = [
{file = "virtualenv-20.17.1-py3-none-any.whl", hash = "sha256:ce3b1684d6e1a20a3e5ed36795a97dfc6af29bc3970ca8dab93e11ac6094b3c4"}, {file = "virtualenv-20.28.0-py3-none-any.whl", hash = "sha256:23eae1b4516ecd610481eda647f3a7c09aea295055337331bb4e6892ecce47b0"},
{file = "virtualenv-20.17.1.tar.gz", hash = "sha256:f8b927684efc6f1cc206c9db297a570ab9ad0e51c16fa9e45487d36d1905c058"}, {file = "virtualenv-20.28.0.tar.gz", hash = "sha256:2c9c3262bb8e7b87ea801d715fae4495e6032450c71d2309be9550e7364049aa"},
] ]
[package.dependencies] [package.dependencies]
distlib = ">=0.3.6,<1" distlib = ">=0.3.7,<1"
filelock = ">=3.4.1,<4" filelock = ">=3.12.2,<4"
platformdirs = ">=2.4,<3" platformdirs = ">=3.9.1,<5"
[package.extras] [package.extras]
docs = ["proselint (>=0.13)", "sphinx (>=5.3)", "sphinx-argparse (>=0.3.2)", "sphinx-rtd-theme (>=1)", "towncrier (>=22.8)"] docs = ["furo (>=2023.7.26)", "proselint (>=0.13)", "sphinx (>=7.1.2,!=7.3)", "sphinx-argparse (>=0.4)", "sphinxcontrib-towncrier (>=0.2.1a0)", "towncrier (>=23.6)"]
testing = ["coverage (>=6.2)", "coverage-enable-subprocess (>=1)", "flaky (>=3.7)", "packaging (>=21.3)", "pytest (>=7.0.1)", "pytest-env (>=0.6.2)", "pytest-freezegun (>=0.4.2)", "pytest-mock (>=3.6.1)", "pytest-randomly (>=3.10.3)", "pytest-timeout (>=2.1)"] test = ["covdefaults (>=2.3)", "coverage (>=7.2.7)", "coverage-enable-subprocess (>=1)", "flaky (>=3.7)", "packaging (>=23.1)", "pytest (>=7.4)", "pytest-env (>=0.8.2)", "pytest-freezer (>=0.4.8)", "pytest-mock (>=3.11.1)", "pytest-randomly (>=3.12)", "pytest-timeout (>=2.1)", "setuptools (>=68)", "time-machine (>=2.10)"]
[[package]] [[package]]
name = "w3lib" name = "w3lib"
@ -3445,5 +3412,5 @@ pgsql = ["psycopg2-binary"]
[metadata] [metadata]
lock-version = "2.0" lock-version = "2.0"
python-versions = "^3.10" python-versions = "^3.12"
content-hash = "55dbb0d6a3e28964743f87ae1d3a4ead8428cf1051ea97839edb325f39c526ba" content-hash = "70a06c4bc96fda6284e61a84db5770d969ea06e78caaa5860966b53768607929"

View file

@ -27,7 +27,7 @@ orjson = "^3.8.0"
psycopg2-binary = { version = "^2.9.1", optional = true } psycopg2-binary = { version = "^2.9.1", optional = true }
pydantic = "^2.6.1" pydantic = "^2.6.1"
pyhumps = "^3.5.3" pyhumps = "^3.5.3"
python = "^3.10" python = "^3.12"
python-dateutil = "^2.8.2" python-dateutil = "^2.8.2"
python-dotenv = "^1.0.0" python-dotenv = "^1.0.0"
python-ldap = "^3.3.1" python-ldap = "^3.3.1"
@ -105,7 +105,7 @@ pgsql = ["psycopg2-binary"]
follow_imports = "skip" follow_imports = "skip"
ignore_missing_imports = true ignore_missing_imports = true
plugins = "pydantic.mypy" plugins = "pydantic.mypy"
python_version = "3.10" python_version = "3.12"
strict_optional = true strict_optional = true
[tool.ruff] [tool.ruff]
@ -135,8 +135,8 @@ exclude = [
"venv", "venv",
] ]
# Assume Python 3.10. # Assume Python 3.12.
target-version = "py310" target-version = "py312"
[tool.ruff.lint] [tool.ruff.lint]
# Enable Pyflakes `E` and `F` codes by default. # Enable Pyflakes `E` and `F` codes by default.