Merge branch 'mealie-next' into mealie-next

This commit is contained in:
Tempest 2025-08-15 09:46:03 -05:00 committed by GitHub
commit a717235db9
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
35 changed files with 615 additions and 238 deletions

View file

@ -71,6 +71,7 @@ tasks:
desc: run code generators desc: run code generators
cmds: cmds:
- poetry run python dev/code-generation/main.py {{ .CLI_ARGS }} - poetry run python dev/code-generation/main.py {{ .CLI_ARGS }}
- task: docs:gen
- task: py:format - task: py:format
dev:services: dev:services:

View file

@ -8,8 +8,8 @@ from utils import log
# ============================================================ # ============================================================
template = """// This Code is auto generated by gen_ts_types.py template = """// This Code is auto generated by gen_ts_types.py
{% for name in global %}import {{ name }} from "@/components/global/{{ name }}.vue"; {% for name in global %}import type {{ name }} from "@/components/global/{{ name }}.vue";
{% endfor %}{% for name in layout %}import {{ name }} from "@/components/layout/{{ name }}.vue"; {% endfor %}{% for name in layout %}import type {{ name }} from "@/components/layout/{{ name }}.vue";
{% endfor %} {% endfor %}
declare module "vue" { declare module "vue" {
export interface GlobalComponents { export interface GlobalComponents {

File diff suppressed because one or more lines are too long

View file

@ -70,7 +70,7 @@ import RecipeCardSection from "@/components/Domain/Recipe/RecipeCardSection.vue"
import { useCookbookStore } from "~/composables/store/use-cookbook-store"; import { useCookbookStore } from "~/composables/store/use-cookbook-store";
import { useCookbook } from "~/composables/use-group-cookbooks"; import { useCookbook } from "~/composables/use-group-cookbooks";
import { useLoggedInState } from "~/composables/use-logged-in-state"; import { useLoggedInState } from "~/composables/use-logged-in-state";
import type { RecipeCookBook } from "~/lib/api/types/cookbook"; import type { ReadCookBook } from "~/lib/api/types/cookbook";
import CookbookEditor from "~/components/Domain/Cookbook/CookbookEditor.vue"; import CookbookEditor from "~/components/Domain/Cookbook/CookbookEditor.vue";
const $auth = useMealieAuth(); const $auth = useMealieAuth();
@ -100,7 +100,7 @@ const dialogStates = reactive({
edit: false, edit: false,
}); });
const editTarget = ref<RecipeCookBook | null>(null); const editTarget = ref<ReadCookBook | null>(null);
function handleEditCookbook() { function handleEditCookbook() {
dialogStates.edit = true; dialogStates.edit = true;
editTarget.value = book.value; editTarget.value = book.value;

View file

@ -1,18 +1,18 @@
import type { Composer } from "vue-i18n"; import type { Composer } from "vue-i18n";
import { useReadOnlyStore, useStore } from "../partials/use-store-factory"; import { useReadOnlyStore, useStore } from "../partials/use-store-factory";
import type { RecipeCookBook } from "~/lib/api/types/cookbook"; import type { ReadCookBook } from "~/lib/api/types/cookbook";
import { usePublicExploreApi, useUserApi } from "~/composables/api"; import { usePublicExploreApi, useUserApi } from "~/composables/api";
const store: Ref<RecipeCookBook[]> = ref([]); const store: Ref<ReadCookBook[]> = ref([]);
const loading = ref(false); const loading = ref(false);
const publicLoading = ref(false); const publicLoading = ref(false);
export const useCookbookStore = function (i18n?: Composer) { export const useCookbookStore = function (i18n?: Composer) {
const api = useUserApi(i18n); const api = useUserApi(i18n);
return useStore<RecipeCookBook>(store, loading, api.cookbooks); return useStore<ReadCookBook>(store, loading, api.cookbooks);
}; };
export const usePublicCookbookStore = function (groupSlug: string, i18n?: Composer) { export const usePublicCookbookStore = function (groupSlug: string, i18n?: Composer) {
const api = usePublicExploreApi(groupSlug, i18n).explore; const api = usePublicExploreApi(groupSlug, i18n).explore;
return useReadOnlyStore<RecipeCookBook>(store, publicLoading, api.cookbooks); return useReadOnlyStore<ReadCookBook>(store, publicLoading, api.cookbooks);
}; };

View file

@ -1,5 +1,5 @@
import { BaseCRUDAPIReadOnly } from "~/lib/api/base/base-clients"; import { BaseCRUDAPIReadOnly } from "~/lib/api/base/base-clients";
import { RecipeCookBook } from "~/lib/api/types/cookbook"; import { ReadCookBook } from "~/lib/api/types/cookbook";
import { ApiRequestInstance } from "~/lib/api/types/non-generated"; import { ApiRequestInstance } from "~/lib/api/types/non-generated";
const prefix = "/api"; const prefix = "/api";
@ -10,7 +10,7 @@ const routes = {
cookbooksGroupSlugCookbookId: (groupSlug: string | number, cookbookId: string | number) => `${exploreGroupSlug(groupSlug)}/cookbooks/${cookbookId}`, cookbooksGroupSlugCookbookId: (groupSlug: string | number, cookbookId: string | number) => `${exploreGroupSlug(groupSlug)}/cookbooks/${cookbookId}`,
}; };
export class PublicCookbooksApi extends BaseCRUDAPIReadOnly<RecipeCookBook> { export class PublicCookbooksApi extends BaseCRUDAPIReadOnly<ReadCookBook> {
constructor(requests: ApiRequestInstance, groupSlug: string) { constructor(requests: ApiRequestInstance, groupSlug: string) {
super( super(
requests, requests,

View file

@ -1,4 +1,5 @@
/* tslint:disable */ /* tslint:disable */
/* eslint-disable */
/** /**
/* This file was automatically generated from pydantic models by running pydantic2ts. /* This file was automatically generated from pydantic models by running pydantic2ts.
/* Do not modify it by hand - just update the pydantic models and then re-run the script /* Do not modify it by hand - just update the pydantic models and then re-run the script

View file

@ -1,4 +1,5 @@
/* tslint:disable */ /* tslint:disable */
/* eslint-disable */
/** /**
/* This file was automatically generated from pydantic models by running pydantic2ts. /* This file was automatically generated from pydantic models by running pydantic2ts.
/* Do not modify it by hand - just update the pydantic models and then re-run the script /* Do not modify it by hand - just update the pydantic models and then re-run the script

View file

@ -1,4 +1,5 @@
/* tslint:disable */ /* tslint:disable */
/* eslint-disable */
/** /**
/* This file was automatically generated from pydantic models by running pydantic2ts. /* This file was automatically generated from pydantic models by running pydantic2ts.
/* Do not modify it by hand - just update the pydantic models and then re-run the script /* Do not modify it by hand - just update the pydantic models and then re-run the script
@ -38,67 +39,6 @@ export interface QueryFilterJSONPart {
attributeName?: string | null; attributeName?: string | null;
relationalOperator?: RelationalKeyword | RelationalOperator | null; relationalOperator?: RelationalKeyword | RelationalOperator | null;
value?: string | string[] | null; value?: string | string[] | null;
}
export interface RecipeCookBook {
name: string;
description?: string;
slug?: string | null;
position?: number;
public?: boolean;
queryFilterString?: string;
groupId: string;
householdId: string;
id: string;
queryFilter?: QueryFilterJSON;
recipes: RecipeSummary[];
}
export interface RecipeSummary {
id?: string | null;
userId?: string;
householdId?: string;
groupId?: string;
name?: string | null;
slug?: string;
image?: unknown;
recipeServings?: number;
recipeYieldQuantity?: number;
recipeYield?: string | null;
totalTime?: string | null;
prepTime?: string | null;
cookTime?: string | null;
performTime?: string | null;
description?: string | null;
recipeCategory?: RecipeCategory[] | null;
tags?: RecipeTag[] | null;
tools?: RecipeTool[];
rating?: number | null;
orgURL?: string | null;
dateAdded?: string | null;
dateUpdated?: string | null;
createdAt?: string | null;
updatedAt?: string | null;
lastMade?: string | null;
}
export interface RecipeCategory {
id?: string | null;
groupId?: string | null;
name: string;
slug: string;
[k: string]: unknown;
}
export interface RecipeTag {
id?: string | null;
groupId?: string | null;
name: string;
slug: string;
[k: string]: unknown;
}
export interface RecipeTool {
id: string;
groupId?: string | null;
name: string;
slug: string;
householdsWithTool?: string[];
[k: string]: unknown; [k: string]: unknown;
} }
export interface SaveCookBook { export interface SaveCookBook {

View file

@ -1,4 +1,5 @@
/* tslint:disable */ /* tslint:disable */
/* eslint-disable */
/** /**
/* This file was automatically generated from pydantic models by running pydantic2ts. /* This file was automatically generated from pydantic models by running pydantic2ts.
/* Do not modify it by hand - just update the pydantic models and then re-run the script /* Do not modify it by hand - just update the pydantic models and then re-run the script

View file

@ -1,4 +1,5 @@
/* tslint:disable */ /* tslint:disable */
/* eslint-disable */
/** /**
/* This file was automatically generated from pydantic models by running pydantic2ts. /* This file was automatically generated from pydantic models by running pydantic2ts.
/* Do not modify it by hand - just update the pydantic models and then re-run the script /* Do not modify it by hand - just update the pydantic models and then re-run the script

View file

@ -1,4 +1,5 @@
/* tslint:disable */ /* tslint:disable */
/* eslint-disable */
/** /**
/* This file was automatically generated from pydantic models by running pydantic2ts. /* This file was automatically generated from pydantic models by running pydantic2ts.
/* Do not modify it by hand - just update the pydantic models and then re-run the script /* Do not modify it by hand - just update the pydantic models and then re-run the script

View file

@ -1,4 +1,5 @@
/* tslint:disable */ /* tslint:disable */
/* eslint-disable */
/** /**
/* This file was automatically generated from pydantic models by running pydantic2ts. /* This file was automatically generated from pydantic models by running pydantic2ts.
/* Do not modify it by hand - just update the pydantic models and then re-run the script /* Do not modify it by hand - just update the pydantic models and then re-run the script

View file

@ -1,4 +1,5 @@
/* tslint:disable */ /* tslint:disable */
/* eslint-disable */
/** /**
/* This file was automatically generated from pydantic models by running pydantic2ts. /* This file was automatically generated from pydantic models by running pydantic2ts.
/* Do not modify it by hand - just update the pydantic models and then re-run the script /* Do not modify it by hand - just update the pydantic models and then re-run the script

View file

@ -1,4 +1,5 @@
/* tslint:disable */ /* tslint:disable */
/* eslint-disable */
/** /**
/* This file was automatically generated from pydantic models by running pydantic2ts. /* This file was automatically generated from pydantic models by running pydantic2ts.
/* Do not modify it by hand - just update the pydantic models and then re-run the script /* Do not modify it by hand - just update the pydantic models and then re-run the script

View file

@ -1,4 +1,5 @@
/* tslint:disable */ /* tslint:disable */
/* eslint-disable */
/** /**
/* This file was automatically generated from pydantic models by running pydantic2ts. /* This file was automatically generated from pydantic models by running pydantic2ts.
/* Do not modify it by hand - just update the pydantic models and then re-run the script /* Do not modify it by hand - just update the pydantic models and then re-run the script

View file

@ -1,5 +1,5 @@
import { BaseCRUDAPI } from "../base/base-clients"; import { BaseCRUDAPI } from "../base/base-clients";
import type { CreateCookBook, RecipeCookBook, UpdateCookBook } from "~/lib/api/types/cookbook"; import type { CreateCookBook, ReadCookBook, UpdateCookBook } from "~/lib/api/types/cookbook";
const prefix = "/api"; const prefix = "/api";
@ -8,7 +8,7 @@ const routes = {
cookbooksId: (id: number) => `${prefix}/households/cookbooks/${id}`, cookbooksId: (id: number) => `${prefix}/households/cookbooks/${id}`,
}; };
export class CookbookAPI extends BaseCRUDAPI<CreateCookBook, RecipeCookBook, UpdateCookBook> { export class CookbookAPI extends BaseCRUDAPI<CreateCookBook, ReadCookBook, UpdateCookBook> {
baseRoute: string = routes.cookbooks; baseRoute: string = routes.cookbooks;
itemRoute = routes.cookbooksId; itemRoute = routes.cookbooksId;

View file

@ -19,6 +19,8 @@ from mealie.routes._base.routers import UserAPIRouter
from mealie.schema.user import PrivateUser from mealie.schema.user import PrivateUser
from mealie.schema.user.auth import CredentialsRequestForm from mealie.schema.user.auth import CredentialsRequestForm
from .auth_cache import AuthCache
public_router = APIRouter(tags=["Users: Authentication"]) public_router = APIRouter(tags=["Users: Authentication"])
user_router = UserAPIRouter(tags=["Users: Authentication"]) user_router = UserAPIRouter(tags=["Users: Authentication"])
logger = root_logger.get_logger("auth") logger = root_logger.get_logger("auth")
@ -27,7 +29,7 @@ remember_me_duration = timedelta(days=14)
settings = get_app_settings() settings = get_app_settings()
if settings.OIDC_READY: if settings.OIDC_READY:
oauth = OAuth() oauth = OAuth(cache=AuthCache())
scope = None scope = None
if settings.OIDC_SCOPES_OVERRIDE: if settings.OIDC_SCOPES_OVERRIDE:
scope = settings.OIDC_SCOPES_OVERRIDE scope = settings.OIDC_SCOPES_OVERRIDE

View file

@ -0,0 +1,51 @@
import time
from typing import Any
class AuthCache:
def __init__(self, threshold: int = 500, default_timeout: float = 300):
self.default_timeout = default_timeout
self._cache: dict[str, tuple[float, Any]] = {}
self.clear = self._cache.clear
self._threshold = threshold
def _prune(self):
if len(self._cache) > self._threshold:
now = time.time()
toremove = []
for idx, (key, (expires, _)) in enumerate(self._cache.items()):
if (expires != 0 and expires <= now) or idx % 3 == 0:
toremove.append(key)
for key in toremove:
self._cache.pop(key, None)
def _normalize_timeout(self, timeout: float | None) -> float:
if timeout is None:
timeout = self.default_timeout
if timeout > 0:
timeout = time.time() + timeout
return timeout
async def get(self, key: str) -> Any:
try:
expires, value = self._cache[key]
if expires == 0 or expires > time.time():
return value
except KeyError:
return None
async def set(self, key: str, value: Any, timeout: float | None = None) -> bool:
expires = self._normalize_timeout(timeout)
self._prune()
self._cache[key] = (expires, value)
return True
async def delete(self, key: str) -> bool:
return self._cache.pop(key, None) is not None
async def has(self, key: str) -> bool:
try:
expires, value = self._cache[key]
return expires == 0 or expires > time.time()
except KeyError:
return False

View file

@ -5,7 +5,7 @@ from pydantic import UUID4
from mealie.routes._base import controller from mealie.routes._base import controller
from mealie.routes._base.base_controllers import BasePublicHouseholdExploreController from mealie.routes._base.base_controllers import BasePublicHouseholdExploreController
from mealie.schema.cookbook.cookbook import ReadCookBook, RecipeCookBook from mealie.schema.cookbook.cookbook import ReadCookBook
from mealie.schema.make_dependable import make_dependable from mealie.schema.make_dependable import make_dependable
from mealie.schema.response.pagination import PaginationBase, PaginationQuery from mealie.schema.response.pagination import PaginationBase, PaginationQuery
@ -39,8 +39,8 @@ class PublicCookbooksController(BasePublicHouseholdExploreController):
response.set_pagination_guides(self.get_explore_url_path(router.url_path_for("get_all")), q.model_dump()) response.set_pagination_guides(self.get_explore_url_path(router.url_path_for("get_all")), q.model_dump())
return response return response
@router.get("/{item_id}", response_model=RecipeCookBook) @router.get("/{item_id}", response_model=ReadCookBook)
def get_one(self, item_id: UUID4 | str) -> RecipeCookBook: def get_one(self, item_id: UUID4 | str) -> ReadCookBook:
NOT_FOUND_EXCEPTION = HTTPException(404, "cookbook not found") NOT_FOUND_EXCEPTION = HTTPException(404, "cookbook not found")
if isinstance(item_id, UUID): if isinstance(item_id, UUID):
match_attr = "id" match_attr = "id"
@ -58,13 +58,4 @@ class PublicCookbooksController(BasePublicHouseholdExploreController):
if not household or household.preferences.private_household: if not household or household.preferences.private_household:
raise NOT_FOUND_EXCEPTION raise NOT_FOUND_EXCEPTION
cross_household_recipes = self.cross_household_repos.recipes return cookbook
recipes = cross_household_recipes.page_all(
PaginationQuery(
page=1,
per_page=-1,
query_filter="settings.public = TRUE AND household.preferences.privateHousehold = FALSE",
),
cookbook=cookbook,
)
return cookbook.cast(RecipeCookBook, recipes=recipes.items)

View file

@ -11,7 +11,7 @@ from mealie.routes._base import BaseCrudController, controller
from mealie.routes._base.mixins import HttpRepo from mealie.routes._base.mixins import HttpRepo
from mealie.routes._base.routers import MealieCrudRoute from mealie.routes._base.routers import MealieCrudRoute
from mealie.schema import mapper from mealie.schema import mapper
from mealie.schema.cookbook import CreateCookBook, ReadCookBook, RecipeCookBook, SaveCookBook, UpdateCookBook from mealie.schema.cookbook import CreateCookBook, ReadCookBook, SaveCookBook, UpdateCookBook
from mealie.schema.cookbook.cookbook import CookBookPagination from mealie.schema.cookbook.cookbook import CookBookPagination
from mealie.schema.response.pagination import PaginationQuery from mealie.schema.response.pagination import PaginationQuery
from mealie.services.event_bus_service.event_types import ( from mealie.services.event_bus_service.event_types import (
@ -101,7 +101,7 @@ class GroupCookbookController(BaseCrudController):
return all_updated return all_updated
@router.get("/{item_id}", response_model=RecipeCookBook) @router.get("/{item_id}", response_model=ReadCookBook)
def get_one(self, item_id: UUID4 | str): def get_one(self, item_id: UUID4 | str):
if isinstance(item_id, UUID): if isinstance(item_id, UUID):
match_attr = "id" match_attr = "id"
@ -114,12 +114,10 @@ class GroupCookbookController(BaseCrudController):
# Allow fetching other households' cookbooks # Allow fetching other households' cookbooks
cookbook = self.group_cookbooks.get_one(item_id, match_attr) cookbook = self.group_cookbooks.get_one(item_id, match_attr)
if cookbook is None: if cookbook is None:
raise HTTPException(status_code=404) raise HTTPException(status_code=404)
recipe_pagination = self.repos.recipes.page_all(PaginationQuery(page=1, per_page=-1, cookbook=cookbook)) return cookbook
return cookbook.cast(RecipeCookBook, recipes=recipe_pagination.items)
@router.put("/{item_id}", response_model=ReadCookBook) @router.put("/{item_id}", response_model=ReadCookBook)
def update_one(self, item_id: str, data: CreateCookBook): def update_one(self, item_id: str, data: CreateCookBook):

View file

@ -3,11 +3,11 @@ from .datetime_parse import DateError, DateTimeError, DurationError, TimeError
from .mealie_model import HasUUID, MealieModel, SearchType from .mealie_model import HasUUID, MealieModel, SearchType
__all__ = [ __all__ = [
"HasUUID",
"MealieModel",
"SearchType",
"DateError", "DateError",
"DateTimeError", "DateTimeError",
"DurationError", "DurationError",
"TimeError", "TimeError",
"HasUUID",
"MealieModel",
"SearchType",
] ]

View file

@ -18,10 +18,28 @@ from .restore import (
from .settings import CustomPageBase, CustomPageOut from .settings import CustomPageBase, CustomPageOut
__all__ = [ __all__ = [
"MaintenanceLogs",
"MaintenanceStorageDetails",
"MaintenanceSummary",
"ChowdownURL", "ChowdownURL",
"MigrationFile", "MigrationFile",
"MigrationImport", "MigrationImport",
"Migrations", "Migrations",
"CustomPageBase",
"CustomPageOut",
"CommentImport",
"CustomPageImport",
"GroupImport",
"ImportBase",
"NotificationImport",
"RecipeImport",
"SettingsImport",
"UserImport",
"AllBackups",
"BackupFile",
"BackupOptions",
"CreateBackup",
"ImportJob",
"AdminAboutInfo", "AdminAboutInfo",
"AppInfo", "AppInfo",
"AppStartupInfo", "AppStartupInfo",
@ -31,23 +49,5 @@ __all__ = [
"EmailReady", "EmailReady",
"EmailSuccess", "EmailSuccess",
"EmailTest", "EmailTest",
"CustomPageBase",
"CustomPageOut",
"AllBackups",
"BackupFile",
"BackupOptions",
"CreateBackup",
"ImportJob",
"MaintenanceLogs",
"MaintenanceStorageDetails",
"MaintenanceSummary",
"DebugResponse", "DebugResponse",
"CommentImport",
"CustomPageImport",
"GroupImport",
"ImportBase",
"NotificationImport",
"RecipeImport",
"SettingsImport",
"UserImport",
] ]

View file

@ -1,11 +1,10 @@
# This file is auto-generated by gen_schema_exports.py # This file is auto-generated by gen_schema_exports.py
from .cookbook import CookBookPagination, CreateCookBook, ReadCookBook, RecipeCookBook, SaveCookBook, UpdateCookBook from .cookbook import CookBookPagination, CreateCookBook, ReadCookBook, SaveCookBook, UpdateCookBook
__all__ = [ __all__ = [
"CookBookPagination", "CookBookPagination",
"CreateCookBook", "CreateCookBook",
"ReadCookBook", "ReadCookBook",
"RecipeCookBook",
"SaveCookBook", "SaveCookBook",
"UpdateCookBook", "UpdateCookBook",
] ]

View file

@ -7,7 +7,6 @@ from slugify import slugify
from mealie.core.root_logger import get_logger from mealie.core.root_logger import get_logger
from mealie.db.models.recipe import RecipeModel from mealie.db.models.recipe import RecipeModel
from mealie.schema._mealie import MealieModel from mealie.schema._mealie import MealieModel
from mealie.schema.recipe.recipe import RecipeSummary
from mealie.schema.response.pagination import PaginationBase from mealie.schema.response.pagination import PaginationBase
from mealie.schema.response.query_filter import QueryFilterBuilder, QueryFilterJSON from mealie.schema.response.query_filter import QueryFilterBuilder, QueryFilterJSON
@ -84,10 +83,3 @@ class ReadCookBook(UpdateCookBook):
class CookBookPagination(PaginationBase): class CookBookPagination(PaginationBase):
items: list[ReadCookBook] items: list[ReadCookBook]
class RecipeCookBook(ReadCookBook):
group_id: UUID4
household_id: UUID4
recipes: list[RecipeSummary]
model_config = ConfigDict(from_attributes=True)

View file

@ -7,13 +7,13 @@ from .group_seeder import SeederConfig
from .group_statistics import GroupStorage from .group_statistics import GroupStorage
__all__ = [ __all__ = [
"GroupAdminUpdate",
"GroupStorage",
"GroupDataExport", "GroupDataExport",
"SeederConfig",
"CreateGroupPreferences", "CreateGroupPreferences",
"ReadGroupPreferences", "ReadGroupPreferences",
"UpdateGroupPreferences", "UpdateGroupPreferences",
"GroupStorage",
"DataMigrationCreate", "DataMigrationCreate",
"SupportedMigrations", "SupportedMigrations",
"SeederConfig",
"GroupAdminUpdate",
] ]

View file

@ -70,6 +70,49 @@ from .invite_token import CreateInviteToken, EmailInitationResponse, EmailInvita
from .webhook import CreateWebhook, ReadWebhook, SaveWebhook, WebhookPagination, WebhookType from .webhook import CreateWebhook, ReadWebhook, SaveWebhook, WebhookPagination, WebhookType
__all__ = [ __all__ = [
"GroupEventNotifierCreate",
"GroupEventNotifierOptions",
"GroupEventNotifierOptionsOut",
"GroupEventNotifierOptionsSave",
"GroupEventNotifierOut",
"GroupEventNotifierPrivate",
"GroupEventNotifierSave",
"GroupEventNotifierUpdate",
"GroupEventPagination",
"CreateGroupRecipeAction",
"GroupRecipeActionOut",
"GroupRecipeActionPagination",
"GroupRecipeActionPayload",
"GroupRecipeActionType",
"SaveGroupRecipeAction",
"CreateWebhook",
"ReadWebhook",
"SaveWebhook",
"WebhookPagination",
"WebhookType",
"CreateHouseholdPreferences",
"ReadHouseholdPreferences",
"SaveHouseholdPreferences",
"UpdateHouseholdPreferences",
"HouseholdCreate",
"HouseholdInDB",
"HouseholdPagination",
"HouseholdRecipeBase",
"HouseholdRecipeCreate",
"HouseholdRecipeOut",
"HouseholdRecipeSummary",
"HouseholdRecipeUpdate",
"HouseholdSave",
"HouseholdSummary",
"HouseholdUserSummary",
"UpdateHousehold",
"UpdateHouseholdAdmin",
"HouseholdStatistics",
"CreateInviteToken",
"EmailInitationResponse",
"EmailInvitation",
"ReadInviteToken",
"SaveInviteToken",
"ShoppingListAddRecipeParams", "ShoppingListAddRecipeParams",
"ShoppingListAddRecipeParamsBulk", "ShoppingListAddRecipeParamsBulk",
"ShoppingListCreate", "ShoppingListCreate",
@ -93,48 +136,5 @@ __all__ = [
"ShoppingListSave", "ShoppingListSave",
"ShoppingListSummary", "ShoppingListSummary",
"ShoppingListUpdate", "ShoppingListUpdate",
"GroupEventNotifierCreate",
"GroupEventNotifierOptions",
"GroupEventNotifierOptionsOut",
"GroupEventNotifierOptionsSave",
"GroupEventNotifierOut",
"GroupEventNotifierPrivate",
"GroupEventNotifierSave",
"GroupEventNotifierUpdate",
"GroupEventPagination",
"CreateGroupRecipeAction",
"GroupRecipeActionOut",
"GroupRecipeActionPagination",
"GroupRecipeActionPayload",
"GroupRecipeActionType",
"SaveGroupRecipeAction",
"CreateHouseholdPreferences",
"ReadHouseholdPreferences",
"SaveHouseholdPreferences",
"UpdateHouseholdPreferences",
"SetPermissions", "SetPermissions",
"CreateInviteToken",
"EmailInitationResponse",
"EmailInvitation",
"ReadInviteToken",
"SaveInviteToken",
"HouseholdStatistics",
"CreateWebhook",
"ReadWebhook",
"SaveWebhook",
"WebhookPagination",
"WebhookType",
"HouseholdCreate",
"HouseholdInDB",
"HouseholdPagination",
"HouseholdRecipeBase",
"HouseholdRecipeCreate",
"HouseholdRecipeOut",
"HouseholdRecipeSummary",
"HouseholdRecipeUpdate",
"HouseholdSave",
"HouseholdSummary",
"HouseholdUserSummary",
"UpdateHousehold",
"UpdateHouseholdAdmin",
] ]

View file

@ -12,6 +12,9 @@ from .plan_rules import PlanRulesCreate, PlanRulesDay, PlanRulesOut, PlanRulesPa
from .shopping_list import ListItem, ShoppingListIn, ShoppingListOut from .shopping_list import ListItem, ShoppingListIn, ShoppingListOut
__all__ = [ __all__ = [
"ListItem",
"ShoppingListIn",
"ShoppingListOut",
"CreatePlanEntry", "CreatePlanEntry",
"CreateRandomEntry", "CreateRandomEntry",
"PlanEntryPagination", "PlanEntryPagination",
@ -19,9 +22,6 @@ __all__ = [
"ReadPlanEntry", "ReadPlanEntry",
"SavePlanEntry", "SavePlanEntry",
"UpdatePlanEntry", "UpdatePlanEntry",
"ListItem",
"ShoppingListIn",
"ShoppingListOut",
"PlanRulesCreate", "PlanRulesCreate",
"PlanRulesDay", "PlanRulesDay",
"PlanRulesOut", "PlanRulesOut",

View file

@ -89,6 +89,35 @@ from .recipe_tool import RecipeToolCreate, RecipeToolOut, RecipeToolResponse, Re
from .request_helpers import RecipeDuplicate, RecipeSlug, RecipeZipTokenResponse, SlugResponse, UpdateImageResponse from .request_helpers import RecipeDuplicate, RecipeSlug, RecipeZipTokenResponse, SlugResponse, UpdateImageResponse
__all__ = [ __all__ = [
"IngredientReferences",
"RecipeStep",
"RecipeNote",
"CategoryBase",
"CategoryIn",
"CategoryOut",
"CategorySave",
"RecipeCategoryResponse",
"RecipeTagResponse",
"TagBase",
"TagIn",
"TagOut",
"TagSave",
"RecipeAsset",
"RecipeTimelineEventCreate",
"RecipeTimelineEventIn",
"RecipeTimelineEventOut",
"RecipeTimelineEventPagination",
"RecipeTimelineEventUpdate",
"TimelineEventImage",
"TimelineEventType",
"RecipeSuggestionQuery",
"RecipeSuggestionResponse",
"RecipeSuggestionResponseItem",
"Nutrition",
"RecipeShareToken",
"RecipeShareTokenCreate",
"RecipeShareTokenSave",
"RecipeShareTokenSummary",
"CreateIngredientFood", "CreateIngredientFood",
"CreateIngredientFoodAlias", "CreateIngredientFoodAlias",
"CreateIngredientUnit", "CreateIngredientUnit",
@ -111,27 +140,13 @@ __all__ = [
"SaveIngredientFood", "SaveIngredientFood",
"SaveIngredientUnit", "SaveIngredientUnit",
"UnitFoodBase", "UnitFoodBase",
"RecipeTimelineEventCreate",
"RecipeTimelineEventIn",
"RecipeTimelineEventOut",
"RecipeTimelineEventPagination",
"RecipeTimelineEventUpdate",
"TimelineEventImage",
"TimelineEventType",
"Nutrition",
"AssignCategories",
"AssignSettings",
"AssignTags",
"DeleteRecipes",
"ExportBase",
"ExportRecipes",
"ExportTypes",
"RecipeCommentCreate", "RecipeCommentCreate",
"RecipeCommentOut", "RecipeCommentOut",
"RecipeCommentPagination", "RecipeCommentPagination",
"RecipeCommentSave", "RecipeCommentSave",
"RecipeCommentUpdate", "RecipeCommentUpdate",
"UserBase", "UserBase",
"RecipeSettings",
"CreateRecipe", "CreateRecipe",
"CreateRecipeBulk", "CreateRecipeBulk",
"CreateRecipeByUrlBulk", "CreateRecipeByUrlBulk",
@ -145,40 +160,25 @@ __all__ = [
"RecipeTagPagination", "RecipeTagPagination",
"RecipeTool", "RecipeTool",
"RecipeToolPagination", "RecipeToolPagination",
"IngredientReferences", "ScrapeRecipe",
"RecipeStep", "ScrapeRecipeBase",
"RecipeNote", "ScrapeRecipeData",
"RecipeSuggestionQuery", "ScrapeRecipeTest",
"RecipeSuggestionResponse", "AssignCategories",
"RecipeSuggestionResponseItem", "AssignSettings",
"RecipeSettings", "AssignTags",
"RecipeShareToken", "DeleteRecipes",
"RecipeShareTokenCreate", "ExportBase",
"RecipeShareTokenSave", "ExportRecipes",
"RecipeShareTokenSummary", "ExportTypes",
"RecipeAsset", "RecipeToolCreate",
"RecipeToolOut",
"RecipeToolResponse",
"RecipeToolSave",
"RecipeImageTypes",
"RecipeDuplicate", "RecipeDuplicate",
"RecipeSlug", "RecipeSlug",
"RecipeZipTokenResponse", "RecipeZipTokenResponse",
"SlugResponse", "SlugResponse",
"UpdateImageResponse", "UpdateImageResponse",
"RecipeToolCreate",
"RecipeToolOut",
"RecipeToolResponse",
"RecipeToolSave",
"CategoryBase",
"CategoryIn",
"CategoryOut",
"CategorySave",
"RecipeCategoryResponse",
"RecipeTagResponse",
"TagBase",
"TagIn",
"TagOut",
"TagSave",
"ScrapeRecipe",
"ScrapeRecipeBase",
"ScrapeRecipeData",
"ScrapeRecipeTest",
"RecipeImageTypes",
] ]

View file

@ -28,14 +28,14 @@ __all__ = [
"QueryFilterJSONPart", "QueryFilterJSONPart",
"RelationalKeyword", "RelationalKeyword",
"RelationalOperator", "RelationalOperator",
"SearchFilter", "ValidationResponse",
"OrderByNullPosition", "OrderByNullPosition",
"OrderDirection", "OrderDirection",
"PaginationBase", "PaginationBase",
"PaginationQuery", "PaginationQuery",
"RecipeSearchQuery", "RecipeSearchQuery",
"RequestQuery", "RequestQuery",
"ValidationResponse", "SearchFilter",
"ErrorResponse", "ErrorResponse",
"FileTokenResponse", "FileTokenResponse",
"SuccessResponse", "SuccessResponse",

View file

@ -38,6 +38,12 @@ from .user_passwords import (
) )
__all__ = [ __all__ = [
"ForgotPassword",
"PasswordResetToken",
"PrivatePasswordResetToken",
"ResetPassword",
"SavePasswordResetToken",
"ValidateResetToken",
"CredentialsRequest", "CredentialsRequest",
"CredentialsRequestForm", "CredentialsRequestForm",
"Token", "Token",
@ -69,10 +75,4 @@ __all__ = [
"UserRatings", "UserRatings",
"UserSummary", "UserSummary",
"UserSummaryPagination", "UserSummaryPagination",
"ForgotPassword",
"PasswordResetToken",
"PrivatePasswordResetToken",
"ResetPassword",
"SavePasswordResetToken",
"ValidateResetToken",
] ]

View file

@ -217,13 +217,14 @@ def test_get_cookbooks_with_recipes(api_client: TestClient, unique_user: TestUse
) )
) )
# Get the cookbook and make sure we only get the public recipes from each household # Get the cookbook recipes and make sure we only get the public recipes from each household
response = api_client.get(api_routes.explore_groups_group_slug_cookbooks_item_id(unique_user.group_id, cookbook.id)) response = api_client.get(
api_routes.explore_groups_group_slug_recipes(unique_user.group_id), params={"cookbook": cookbook.slug}
)
assert response.status_code == 200 assert response.status_code == 200
cookbook_data = response.json() recipe_data = response.json()
assert cookbook_data["id"] == str(cookbook.id)
cookbook_recipe_ids: set[str] = {recipe["id"] for recipe in cookbook_data["recipes"]} cookbook_recipe_ids: set[str] = {recipe["id"] for recipe in recipe_data["items"]}
assert len(cookbook_recipe_ids) == 2 assert len(cookbook_recipe_ids) == 2
assert str(public_recipe.id) in cookbook_recipe_ids assert str(public_recipe.id) in cookbook_recipe_ids
assert str(private_recipe.id) not in cookbook_recipe_ids assert str(private_recipe.id) not in cookbook_recipe_ids
@ -297,13 +298,14 @@ def test_get_cookbooks_private_household(api_client: TestClient, unique_user: Te
) )
) )
# Get the cookbook and make sure we only get the public recipes from each household # Get the cookbook recipes and make sure we only get the public recipes from each household
response = api_client.get(api_routes.explore_groups_group_slug_cookbooks_item_id(unique_user.group_id, cookbook.id)) response = api_client.get(
api_routes.explore_groups_group_slug_recipes(unique_user.group_id), params={"cookbook": cookbook.slug}
)
assert response.status_code == 200 assert response.status_code == 200
cookbook_data = response.json() recipe_data = response.json()
assert cookbook_data["id"] == str(cookbook.id)
cookbook_recipe_ids: set[str] = {recipe["id"] for recipe in cookbook_data["recipes"]} cookbook_recipe_ids: set[str] = {recipe["id"] for recipe in recipe_data["items"]}
assert len(cookbook_recipe_ids) == 1 assert len(cookbook_recipe_ids) == 1
assert str(public_recipe.id) in cookbook_recipe_ids assert str(public_recipe.id) in cookbook_recipe_ids
assert str(other_household_private_recipe.id) not in cookbook_recipe_ids assert str(other_household_private_recipe.id) not in cookbook_recipe_ids

View file

@ -0,0 +1,239 @@
import asyncio
import time
from unittest.mock import patch
import pytest
from mealie.routes.auth.auth_cache import AuthCache
@pytest.fixture
def cache():
return AuthCache(threshold=5, default_timeout=1.0)
@pytest.mark.asyncio
async def test_set_and_get_basic_operation(cache: AuthCache):
key = "test_key"
value = {"user": "test_user", "data": "some_data"}
result = await cache.set(key, value)
assert result is True
retrieved = await cache.get(key)
assert retrieved == value
@pytest.mark.asyncio
async def test_get_nonexistent_key(cache: AuthCache):
result = await cache.get("nonexistent_key")
assert result is None
@pytest.mark.asyncio
async def test_has_key(cache: AuthCache):
key = "test_key"
value = "test_value"
assert await cache.has(key) is False
await cache.set(key, value)
assert await cache.has(key) is True
@pytest.mark.asyncio
async def test_delete_key(cache: AuthCache):
key = "test_key"
value = "test_value"
await cache.set(key, value)
assert await cache.has(key) is True
result = await cache.delete(key)
assert result is True
assert await cache.has(key) is False
assert await cache.get(key) is None
@pytest.mark.asyncio
async def test_delete_nonexistent_key(cache: AuthCache):
result = await cache.delete("nonexistent_key")
assert result is False
@pytest.mark.asyncio
async def test_expiration_with_custom_timeout(cache: AuthCache):
key = "test_key"
value = "test_value"
timeout = 0.1 # 100ms
await cache.set(key, value, timeout=timeout)
assert await cache.has(key) is True
assert await cache.get(key) == value
# Wait for expiration
await asyncio.sleep(0.15)
assert await cache.has(key) is False
assert await cache.get(key) is None
@pytest.mark.asyncio
async def test_expiration_with_default_timeout(cache: AuthCache):
key = "test_key"
value = "test_value"
await cache.set(key, value)
assert await cache.has(key) is True
with patch("mealie.routes.auth.auth_cache.time") as mock_time:
current_time = time.time()
expired_time = current_time + cache.default_timeout + 1
mock_time.time.return_value = expired_time
assert await cache.has(key) is False
assert await cache.get(key) is None
@pytest.mark.asyncio
async def test_zero_timeout_never_expires(cache: AuthCache):
key = "test_key"
value = "test_value"
await cache.set(key, value, timeout=0)
with patch("time.time") as mock_time:
mock_time.return_value = time.time() + 10000
assert await cache.has(key) is True
assert await cache.get(key) == value
@pytest.mark.asyncio
async def test_clear_cache(cache: AuthCache):
await cache.set("key1", "value1")
await cache.set("key2", "value2")
await cache.set("key3", "value3")
assert await cache.has("key1") is True
assert await cache.has("key2") is True
assert await cache.has("key3") is True
cache.clear()
assert await cache.has("key1") is False
assert await cache.has("key2") is False
assert await cache.has("key3") is False
@pytest.mark.asyncio
async def test_pruning_when_threshold_exceeded(cache: AuthCache):
"""Test that the cache prunes old items when threshold is exceeded."""
# Fill the cache beyond the threshold (threshold=5)
for i in range(10):
await cache.set(f"key_{i}", f"value_{i}")
assert len(cache._cache) < 10 # Should be less than what we inserted
@pytest.mark.asyncio
async def test_pruning_removes_expired_items(cache: AuthCache):
# Add some items that will expire quickly
await cache.set("expired1", "value1", timeout=0.01)
await cache.set("expired2", "value2", timeout=0.01)
# Add some items that won't expire (using longer timeout instead of 0)
await cache.set("permanent1", "value3", timeout=300)
await cache.set("permanent2", "value4", timeout=300)
# Wait for first items to expire
await asyncio.sleep(0.02)
# Trigger pruning by adding one more item (enough to trigger threshold check)
await cache.set("trigger_final", "final_value")
assert await cache.has("expired1") is False
assert await cache.has("expired2") is False
# At least one permanent item should remain (pruning may remove some but not all)
permanent_count = sum([await cache.has("permanent1"), await cache.has("permanent2")])
assert permanent_count >= 0 # Allow for some pruning of permanent items due to the modulo logic
def test_normalize_timeout_none():
cache = AuthCache(default_timeout=300)
with patch("time.time", return_value=1000):
result = cache._normalize_timeout(None)
assert result == 1300 # 1000 + 300
def test_normalize_timeout_zero():
cache = AuthCache()
result = cache._normalize_timeout(0)
assert result == 0
def test_normalize_timeout_positive():
cache = AuthCache()
with patch("time.time", return_value=1000):
result = cache._normalize_timeout(60)
assert result == 1060 # 1000 + 60
@pytest.mark.asyncio
async def test_cache_stores_complex_objects(cache: AuthCache):
# Simulate an OIDC token structure
token_data = {
"access_token": "eyJ0eXAiOiJKV1QiLCJhbGciOiJSUzI1NiJ9...",
"id_token": "eyJ0eXAiOiJKV1QiLCJhbGciOiJSUzI1NiJ9...",
"userinfo": {
"sub": "user123",
"email": "user@example.com",
"preferred_username": "testuser",
"groups": ["mealie_user"],
},
"token_type": "Bearer",
"expires_in": 3600,
}
key = "oauth_token_user123"
await cache.set(key, token_data)
retrieved = await cache.get(key)
assert retrieved == token_data
assert retrieved["userinfo"]["email"] == "user@example.com"
assert "mealie_user" in retrieved["userinfo"]["groups"]
@pytest.mark.asyncio
async def test_cache_overwrites_existing_key(cache: AuthCache):
key = "test_key"
await cache.set(key, "initial_value")
assert await cache.get(key) == "initial_value"
await cache.set(key, "new_value")
assert await cache.get(key) == "new_value"
@pytest.mark.asyncio
async def test_concurrent_access(cache: AuthCache):
async def set_values(start_idx, count):
for i in range(start_idx, start_idx + count):
await cache.set(f"key_{i}", f"value_{i}")
async def get_values(start_idx, count):
results = []
for i in range(start_idx, start_idx + count):
value = await cache.get(f"key_{i}")
results.append(value)
return results
await asyncio.gather(set_values(0, 5), set_values(5, 5), set_values(10, 5))
results = await asyncio.gather(get_values(0, 5), get_values(5, 5), get_values(10, 5))
all_results = [item for sublist in results for item in sublist]
actual_values = [v for v in all_results if v is not None]
assert len(actual_values) > 0

View file

@ -0,0 +1,153 @@
import asyncio
import pytest
from authlib.integrations.starlette_client import OAuth
from mealie.routes.auth.auth_cache import AuthCache
def test_auth_cache_initialization_with_oauth():
oauth = OAuth(cache=AuthCache())
oauth.register(
"test_oidc",
client_id="test_client_id",
client_secret="test_client_secret",
server_metadata_url="https://example.com/.well-known/openid_configuration",
client_kwargs={"scope": "openid email profile"},
code_challenge_method="S256",
)
assert oauth is not None
assert isinstance(oauth.cache, AuthCache)
assert "test_oidc" in oauth._clients
@pytest.mark.asyncio
async def test_oauth_cache_operations():
cache = AuthCache(threshold=500, default_timeout=300)
cache_key = "oauth_state_12345"
oauth_data = {
"state": "12345",
"code_verifier": "dBjftJeZ4CVP-mB92K27uhbUJU1p1r_wW1gFWFOEjXk",
"redirect_uri": "http://localhost:3000/login",
}
result = await cache.set(cache_key, oauth_data, timeout=600) # 10 minutes
assert result is True
retrieved_data = await cache.get(cache_key)
assert retrieved_data == oauth_data
assert retrieved_data["state"] == "12345"
deleted = await cache.delete(cache_key)
assert deleted is True
assert await cache.get(cache_key) is None
@pytest.mark.asyncio
async def test_oauth_cache_handles_token_expiration():
cache = AuthCache()
token_key = "access_token_user123"
token_data = {
"access_token": "eyJ0eXAiOiJKV1QiLCJhbGciOiJSUzI1NiJ9...",
"token_type": "Bearer",
"expires_in": 3600,
"scope": "openid email profile",
}
await cache.set(token_key, token_data, timeout=0.1)
assert await cache.has(token_key) is True
await asyncio.sleep(0.15)
assert await cache.has(token_key) is False
assert await cache.get(token_key) is None
@pytest.mark.asyncio
async def test_oauth_cache_concurrent_requests():
cache = AuthCache()
async def simulate_oauth_flow(user_id: str):
"""Simulate a complete OAuth flow for a user."""
state_key = f"oauth_state_{user_id}"
token_key = f"access_token_{user_id}"
state_data = {"state": user_id, "code_verifier": f"verifier_{user_id}"}
await cache.set(state_key, state_data, timeout=600)
token_data = {"access_token": f"token_{user_id}", "user_id": user_id, "expires_in": 3600}
await cache.set(token_key, token_data, timeout=3600)
state = await cache.get(state_key)
token = await cache.get(token_key)
return state, token
results = await asyncio.gather(
simulate_oauth_flow("user1"), simulate_oauth_flow("user2"), simulate_oauth_flow("user3")
)
for i, (state, token) in enumerate(results, 1):
assert state["state"] == f"user{i}"
assert token["access_token"] == f"token_user{i}"
def test_auth_cache_disabled_when_oidc_not_ready():
cache = AuthCache()
assert cache is not None
assert isinstance(cache, AuthCache)
@pytest.mark.asyncio
async def test_auth_cache_memory_efficiency():
cache = AuthCache(threshold=10, default_timeout=300)
for i in range(50):
await cache.set(f"token_{i}", f"data_{i}", timeout=0) # Never expire
assert len(cache._cache) <= 15 # Should be close to threshold, accounting for pruning logic
remaining_items = 0
for i in range(50):
if await cache.has(f"token_{i}"):
remaining_items += 1
assert 0 < remaining_items < 50
@pytest.mark.asyncio
async def test_auth_cache_with_real_oauth_data_structure():
cache = AuthCache()
oauth_token = {
"access_token": "eyJhbGciOiJSUzI1NiIsInR5cCI6IkpXVCIsImtpZCI6IjEifQ...",
"id_token": "eyJhbGciOiJSUzI1NiIsInR5cCI6IkpXVCIsImtpZCI6IjEifQ...",
"token_type": "Bearer",
"expires_in": 3600,
"scope": "openid email profile groups",
"userinfo": {
"sub": "auth0|507f1f77bcf86cd799439011",
"email": "john.doe@example.com",
"email_verified": True,
"name": "John Doe",
"preferred_username": "johndoe",
"groups": ["mealie_user", "staff"],
},
}
user_session_key = "oauth_session_auth0|507f1f77bcf86cd799439011"
await cache.set(user_session_key, oauth_token, timeout=3600)
retrieved = await cache.get(user_session_key)
assert retrieved["access_token"] == oauth_token["access_token"]
assert retrieved["userinfo"]["email"] == "john.doe@example.com"
assert "mealie_user" in retrieved["userinfo"]["groups"]
assert retrieved["userinfo"]["email_verified"] is True
updated_token = oauth_token.copy()
updated_token["access_token"] = "new_access_token_eyJhbGciOiJSUzI1NiIs..."
updated_token["userinfo"]["last_login"] = "2024-01-01T12:00:00Z"
await cache.set(user_session_key, updated_token, timeout=3600)
updated_retrieved = await cache.get(user_session_key)
assert updated_retrieved["access_token"] != oauth_token["access_token"]
assert updated_retrieved["userinfo"]["last_login"] == "2024-01-01T12:00:00Z"