This commit is contained in:
Andrew Brock 2025-08-20 13:34:32 -04:00 committed by GitHub
commit bb96076b22
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
7 changed files with 30 additions and 41 deletions

View file

@ -1,6 +1,7 @@
import contextlib import contextlib
from pathlib import Path from pathlib import Path
from PIL import UnidentifiedImageError
from pydantic import UUID4 from pydantic import UUID4
from mealie.core import root_logger from mealie.core import root_logger
@ -24,6 +25,7 @@ from mealie.services.scraper import cleaner
from .._base_service import BaseService from .._base_service import BaseService
from .utils.database_helpers import DatabaseMigrationHelpers from .utils.database_helpers import DatabaseMigrationHelpers
from .utils.migration_alias import MigrationAlias from .utils.migration_alias import MigrationAlias
from .utils.migration_helpers import import_image
class BaseMigrator(BaseService): class BaseMigrator(BaseService):
@ -269,3 +271,9 @@ class BaseMigrator(BaseService):
recipe = cleaner.clean(recipe_dict, self.translator, url=recipe_dict.get("org_url", None)) recipe = cleaner.clean(recipe_dict, self.translator, url=recipe_dict.get("org_url", None))
return recipe return recipe
def import_image(self, slug: str, src: str | Path, recipe_id: UUID4):
try:
import_image(src, recipe_id)
except UnidentifiedImageError as e:
self.logger.error(f"Failed to import image for {slug}: {e}")

View file

@ -4,7 +4,7 @@ from pathlib import Path
from ._migration_base import BaseMigrator from ._migration_base import BaseMigrator
from .utils.migration_alias import MigrationAlias from .utils.migration_alias import MigrationAlias
from .utils.migration_helpers import MigrationReaders, import_image, split_by_comma from .utils.migration_helpers import MigrationReaders, split_by_comma
class ChowdownMigrator(BaseMigrator): class ChowdownMigrator(BaseMigrator):
@ -64,4 +64,4 @@ class ChowdownMigrator(BaseMigrator):
except StopIteration: except StopIteration:
continue continue
if cd_image: if cd_image:
import_image(cd_image, recipe_id) self.import_image(slug, cd_image, recipe_id)

View file

@ -9,13 +9,7 @@ from mealie.schema.reports.reports import ReportEntryCreate
from ._migration_base import BaseMigrator from ._migration_base import BaseMigrator
from .utils.migration_alias import MigrationAlias from .utils.migration_alias import MigrationAlias
from .utils.migration_helpers import ( from .utils.migration_helpers import MigrationReaders, glob_walker, parse_iso8601_duration, split_by_comma
MigrationReaders,
glob_walker,
import_image,
parse_iso8601_duration,
split_by_comma,
)
@dataclass @dataclass
@ -103,4 +97,4 @@ class NextcloudMigrator(BaseMigrator):
if status: if status:
nc_dir = nextcloud_dirs[slug] nc_dir = nextcloud_dirs[slug]
if nc_dir.image: if nc_dir.image:
import_image(nc_dir.image, recipe_id) self.import_image(slug, nc_dir.image, recipe_id)

View file

@ -7,13 +7,10 @@ import zipfile
from gzip import GzipFile from gzip import GzipFile
from pathlib import Path from pathlib import Path
from slugify import slugify
from mealie.schema.recipe import RecipeNote from mealie.schema.recipe import RecipeNote
from ._migration_base import BaseMigrator from ._migration_base import BaseMigrator
from .utils.migration_alias import MigrationAlias from .utils.migration_alias import MigrationAlias
from .utils.migration_helpers import import_image
def paprika_recipes(file: Path): def paprika_recipes(file: Path):
@ -67,32 +64,26 @@ class PaprikaMigrator(BaseMigrator):
] ]
def _migrate(self) -> None: def _migrate(self) -> None:
recipe_image_urls = {} recipes = [r for r in paprika_recipes(self.archive) if "name" in r]
recipe_models = [self.clean_recipe_dictionary(r) for r in recipes]
results = self.import_recipes_to_database(recipe_models)
recipes = [] for (slug, recipe_id, status), recipe in zip(results, recipes, strict=True):
for recipe in paprika_recipes(self.archive): if not status:
if "name" not in recipe:
continue continue
recipe_model = self.clean_recipe_dictionary(recipe) image_data = recipe.get("photo_data")
if image_data is None:
if "photo_data" in recipe: self.logger.info(f"Recipe '{recipe['name']}' has no image")
recipe_image_urls[slugify(recipe["name"])] = recipe["photo_data"]
recipes.append(recipe_model)
results = self.import_recipes_to_database(recipes)
for slug, recipe_id, status in results:
if not status:
continue continue
try: try:
# Images are stored as base64 encoded strings, so we need to decode them before importing. # Images are stored as base64 encoded strings, so we need to decode them before importing.
image = io.BytesIO(base64.b64decode(recipe_image_urls[slug])) image = io.BytesIO(base64.b64decode(image_data))
with tempfile.NamedTemporaryFile(suffix=".jpeg") as temp_file: with tempfile.NamedTemporaryFile(suffix=".jpeg") as temp_file:
temp_file.write(image.read()) temp_file.write(image.read())
temp_file.flush()
path = Path(temp_file.name) path = Path(temp_file.name)
import_image(path, recipe_id) self.import_image(slug, path, recipe_id)
except Exception as e: except Exception as e:
self.logger.error(f"Failed to download image for {slug}: {e}") self.logger.error(f"Failed to import image for {slug}: {e}")

View file

@ -8,7 +8,7 @@ from mealie.services.scraper import cleaner
from ._migration_base import BaseMigrator from ._migration_base import BaseMigrator
from .utils.migration_alias import MigrationAlias from .utils.migration_alias import MigrationAlias
from .utils.migration_helpers import import_image, parse_iso8601_duration from .utils.migration_helpers import parse_iso8601_duration
def clean_instructions(instructions: list[str]) -> list[str]: def clean_instructions(instructions: list[str]) -> list[str]:
@ -98,7 +98,7 @@ class RecipeKeeperMigrator(BaseMigrator):
recipes = [self.clean_recipe_dictionary(x) for x in recipes_as_dicts] recipes = [self.clean_recipe_dictionary(x) for x in recipes_as_dicts]
results = self.import_recipes_to_database(recipes) results = self.import_recipes_to_database(recipes)
for (_, recipe_id, status), recipe in zip(results, recipes, strict=False): for (slug, recipe_id, status), recipe in zip(results, recipes, strict=False):
if status: if status:
try: try:
if not recipe or not recipe.image: if not recipe or not recipe.image:
@ -107,4 +107,4 @@ class RecipeKeeperMigrator(BaseMigrator):
except StopIteration: except StopIteration:
continue continue
import_image(recipe.image, recipe_id) self.import_image(slug, recipe.image, recipe_id)

View file

@ -10,7 +10,6 @@ from mealie.schema.reports.reports import ReportEntryCreate
from ._migration_base import BaseMigrator from ._migration_base import BaseMigrator
from .utils.migration_alias import MigrationAlias from .utils.migration_alias import MigrationAlias
from .utils.migration_helpers import import_image
def _build_ingredient_from_ingredient_data(ingredient_data: dict[str, Any], title: str | None = None) -> dict[str, Any]: def _build_ingredient_from_ingredient_data(ingredient_data: dict[str, Any], title: str | None = None) -> dict[str, Any]:
@ -150,4 +149,4 @@ class TandoorMigrator(BaseMigrator):
except StopIteration: except StopIteration:
continue continue
import_image(r.image, recipe_id) self.import_image(slug, r.image, recipe_id)

View file

@ -104,6 +104,7 @@ def import_image(src: str | Path, recipe_id: UUID4):
"""Read the successful migrations attribute and for each import the image """Read the successful migrations attribute and for each import the image
appropriately into the image directory. Minification is done in mass appropriately into the image directory. Minification is done in mass
after the migration occurs. after the migration occurs.
May raise an UnidentifiedImageError if the file is not a recognised format.
""" """
if isinstance(src, str): if isinstance(src, str):
@ -113,11 +114,7 @@ def import_image(src: str | Path, recipe_id: UUID4):
return return
data_service = RecipeDataService(recipe_id=recipe_id) data_service = RecipeDataService(recipe_id=recipe_id)
try:
data_service.write_image(src, src.suffix) data_service.write_image(src, src.suffix)
except UnidentifiedImageError:
return
async def scrape_image(image_url: str, recipe_id: UUID4): async def scrape_image(image_url: str, recipe_id: UUID4):