fix import errors on no group

This commit is contained in:
hay-kot 2021-03-21 16:40:16 -08:00
commit 2852ca6f0a
6 changed files with 153 additions and 143 deletions

View file

@ -23,42 +23,15 @@
<v-divider></v-divider> <v-divider></v-divider>
<v-card-text> <v-card-text>
<v-row> <ImportOptions @update-options="updateOptions" class="mt-5 mb-2" />
<v-col>
<ImportOptions @update-options="updateOptions" class="mt-5" /> <v-divider></v-divider>
</v-col>
<!-- <v-col> <v-checkbox
<v-tooltip top> dense
<template v-slot:activator="{ on, attrs }"> label="Remove existing entries matching imported entries"
<span v-on="on" v-bind="attrs"> v-model="forceImport"
<v-checkbox ></v-checkbox>
class="mb-n4 mt-1"
dense
label="Force"
v-model="forceImport"
></v-checkbox>
</span>
</template>
<span>Force update existing recipes</span>
</v-tooltip>
<v-tooltip top>
<template v-slot:activator="{ on, attrs }">
<span v-on="on" v-bind="attrs">
<v-checkbox
class="mb-n4 mt-1"
dense
label="Rebase"
v-model="rebaseImport"
></v-checkbox>
</span>
</template>
<span
>Removes all recipes, and then imports recipes from the
backup</span
>
</v-tooltip>
</v-col> -->
</v-row>
</v-card-text> </v-card-text>
<v-divider></v-divider> <v-divider></v-divider>

View file

@ -18,6 +18,7 @@ app = FastAPI(
redoc_url=redoc_url, redoc_url=redoc_url,
) )
def start_scheduler(): def start_scheduler():
import mealie.services.scheduler.scheduled_jobs import mealie.services.scheduler.scheduled_jobs

View file

@ -57,7 +57,12 @@ class Group(SqlAlchemyBase, BaseMixins):
@staticmethod @staticmethod
def get_ref(session: Session, name: str): def get_ref(session: Session, name: str):
return session.query(Group).filter(Group.name == name).one() item = session.query(Group).filter(Group.name == name).one()
if item:
return item
else:
return session.query(Group).filter(Group.id == 1).one()
@staticmethod @staticmethod
def create_if_not_exist(session, name: str = DEFAULT_GROUP): def create_if_not_exist(session, name: str = DEFAULT_GROUP):

View file

@ -34,14 +34,14 @@ class User(SqlAlchemyBase, BaseMixins):
group = group if group else DEFAULT_GROUP group = group if group else DEFAULT_GROUP
self.full_name = full_name self.full_name = full_name
self.email = email self.email = email
self.group = Group.create_if_not_exist(session, group) self.group = Group.get_ref(session, group)
self.admin = admin self.admin = admin
self.password = password self.password = password
def update(self, full_name, email, group, admin, session=None, id=None, password=None): def update(self, full_name, email, group, admin, session=None, id=None, password=None):
self.full_name = full_name self.full_name = full_name
self.email = email self.email = email
self.group = Group.create_if_not_exist(session, group) self.group = Group.get_ref(session, group)
self.admin = admin self.admin = admin
if password: if password:

View file

@ -10,7 +10,7 @@ class ImportBase(BaseModel):
class RecipeImport(ImportBase): class RecipeImport(ImportBase):
slug: str slug: Optional[str]
class ThemeImport(ImportBase): class ThemeImport(ImportBase):

View file

@ -1,8 +1,9 @@
import json import json
import shutil import shutil
import zipfile import zipfile
from logging import exception
from pathlib import Path from pathlib import Path
from typing import List from typing import Callable, List
from fastapi.logger import logger from fastapi.logger import logger
from mealie.core.config import BACKUP_DIR, IMG_DIR, TEMP_DIR from mealie.core.config import BACKUP_DIR, IMG_DIR, TEMP_DIR
@ -11,6 +12,8 @@ from mealie.schema.recipe import Recipe
from mealie.schema.restore import GroupImport, RecipeImport, SettingsImport, ThemeImport, UserImport from mealie.schema.restore import GroupImport, RecipeImport, SettingsImport, ThemeImport, UserImport
from mealie.schema.theme import SiteTheme from mealie.schema.theme import SiteTheme
from mealie.schema.user import UpdateGroup, UserInDB from mealie.schema.user import UpdateGroup, UserInDB
from pydantic.main import BaseModel
from schema.settings import SiteSettings
from sqlalchemy.orm.session import Session from sqlalchemy.orm.session import Session
@ -50,35 +53,24 @@ class ImportDatabase:
imports = [] imports = []
successful_imports = [] successful_imports = []
def read_recipe_file(file_path: Path): recipes = ImportDatabase.read_models_file(
with open(file_path, "r") as f: file_path=recipe_dir, model=Recipe, single_file=False, migrate=ImportDatabase._recipe_migration
try: )
recipe_dict = json.loads(f.read())
recipe_dict = ImportDatabase._recipe_migration(recipe_dict)
return Recipe(**recipe_dict)
except:
import_status = RecipeImport(name=file_path.stem, slug=file_path.stem, status=False)
imports.append(import_status)
recipes = [read_recipe_file(r) for r in recipe_dir.glob("*.json")]
for recipe in recipes: for recipe in recipes:
try: recipe: Recipe
db.recipes.create(self.session, recipe.dict())
import_status = RecipeImport(name=recipe.name, slug=recipe.slug, status=True)
successful_imports.append(recipe.slug)
logger.info(f"Imported: {recipe.slug}")
except Exception as inst: import_status = self.import_model(
self.session.rollback() db_table=db.recipes,
logger.error(inst) model=recipe,
logger.info(f"Failed Import: {recipe.slug}") return_model=RecipeImport,
import_status = RecipeImport( name_attr="name",
name=recipe.name, search_key="slug",
slug=recipe.slug, slug=recipe.slug,
status=False, )
exception=str(inst),
) if import_status.status:
successful_imports.append(recipe.slug)
imports.append(import_status) imports.append(import_status)
@ -122,117 +114,156 @@ class ImportDatabase:
def import_themes(self): def import_themes(self):
themes_file = self.import_dir.joinpath("themes", "themes.json") themes_file = self.import_dir.joinpath("themes", "themes.json")
if not themes_file.exists(): themes = ImportDatabase.read_models_file(themes_file, SiteTheme)
return []
theme_imports = [] theme_imports = []
with open(themes_file, "r") as f:
themes: list[dict] = json.loads(f.read())
themes: list[SiteTheme] = [SiteTheme(**theme) for theme in themes]
for theme in themes: for theme in themes:
if theme.name == "default": if theme.name == "default":
continue continue
item = db.themes.get(self.session, theme.name)
if item: import_status = self.import_model(
import_status = UserImport(name=theme.name, status=False, exception="Theme Exists") db_table=db.themes,
theme_imports.append(import_status) model=theme,
continue return_model=ThemeImport,
try: name_attr="name",
db.themes.create(self.session, theme.dict()) search_key="name",
theme_imports.append(ThemeImport(name=theme.name, status=True)) )
except Exception as inst:
logger.info(f"Unable Import Theme {theme.name}") theme_imports.append(import_status)
theme_imports.append(ThemeImport(name=theme.name, status=False, exception=str(inst)))
return theme_imports return theme_imports
def import_settings(self): def import_settings(self): #! Broken
settings_file = self.import_dir.joinpath("settings", "settings.json") settings_file = self.import_dir.joinpath("settings", "settings.json")
if not settings_file.exists(): settings = ImportDatabase.read_models_file(settings_file, SiteSettings)
return [] settings = settings[0]
settings_imports = [] try:
db.settings.update(self.session, 1, settings.dict())
import_status = SettingsImport(name="Site Settings", status=True)
with open(settings_file, "r") as f: except Exception as inst:
settings: dict = json.loads(f.read()) self.session.rollback()
import_status = SettingsImport(name="Site Settings", status=False, exception=str(inst))
name = settings.get("name") return [import_status]
try:
db.settings.update(self.session, name, settings)
import_status = SettingsImport(name=name, status=True)
except Exception as inst:
self.session.rollback()
import_status = SettingsImport(name=name, status=False, exception=str(inst))
settings_imports.append(import_status)
return settings_imports
def import_groups(self): def import_groups(self):
groups_file = self.import_dir.joinpath("groups", "groups.json") groups_file = self.import_dir.joinpath("groups", "groups.json")
if not groups_file.exists(): groups = ImportDatabase.read_models_file(groups_file, UpdateGroup)
return []
group_imports = [] group_imports = []
with open(groups_file, "r") as f:
groups = [UpdateGroup(**g) for g in json.loads(f.read())]
for group in groups: for group in groups:
item = db.groups.get(self.session, group.name, "name") import_status = self.import_model(db.groups, group, GroupImport, search_key="name")
if item:
import_status = GroupImport(name=group.name, status=False, exception="Group Exists")
group_imports.append(import_status)
continue
try:
db.groups.create(self.session, group.dict())
import_status = GroupImport(name=group.name, status=True)
except Exception as inst:
self.session.rollback()
import_status = GroupImport(name=group.name, status=False, exception=str(inst))
group_imports.append(import_status) group_imports.append(import_status)
return group_imports return group_imports
def import_users(self): def import_users(self):
users_file = self.import_dir.joinpath("users", "users.json") users_file = self.import_dir.joinpath("users", "users.json")
if not users_file.exists(): users = ImportDatabase.read_models_file(users_file, UserInDB)
return []
user_imports = [] user_imports = []
with open(users_file, "r") as f:
users = [UserInDB(**g) for g in json.loads(f.read())]
for user in users: for user in users:
if user.id == 1: if user.id == 1: # Update Default User
db.users.update(self.session, 1, user.dict()) db.users.update(self.session, 1, user.dict())
import_status = UserImport(name=user.full_name, status=True) import_status = UserImport(name=user.full_name, status=True)
user_imports.append(import_status) user_imports.append(import_status)
continue continue
item = db.users.get(self.session, user.email, "email") import_status = self.import_model(
if item: db_table=db.users,
import_status = UserImport(name=user.full_name, status=False, exception="User Email Exists") model=user,
user_imports.append(import_status) return_model=UserImport,
continue name_attr="full_name",
search_key="email",
try: )
db.users.create(self.session, user.dict())
import_status = UserImport(name=user.full_name, status=True)
except Exception as inst:
self.session.rollback()
import_status = UserImport(name=user.full_name, status=False, exception=str(inst))
user_imports.append(import_status) user_imports.append(import_status)
return user_imports return user_imports
@staticmethod
def read_models_file(file_path: Path, model: BaseModel, single_file=True, migrate: Callable = None):
"""A general purpose function that is used to process a backup `.json` file created by mealie
note that if the file doesn't not exists the function will return any empty list
Args:
file_path (Path): The path to the .json file or directory
model (BaseModel): The pydantic model that will be created from the .json file entries
single_file (bool, optional): If true, the json data will be treated as list, if false it will use glob style matches and treat each file as its own entry. Defaults to True.
migrate (Callable, optional): A migrate function that will be called on the data prior to creating a model. Defaults to None.
Returns:
[type]: [description]
"""
if not file_path.exists():
return []
if single_file:
with open(file_path, "r") as f:
file_data = json.loads(f.read())
if migrate:
file_data = [migrate(x) for x in file_data]
return [model(**g) for g in file_data]
all_models = []
for file in file_path.glob("*.json"):
with open(file, "r") as f:
file_data = json.loads(f.read())
if migrate:
file_data = migrate(file_data)
all_models.append(model(**file_data))
return all_models
def import_model(self, db_table, model, return_model, name_attr="name", search_key="id", **kwargs):
"""A general purpose function used to insert a list of pydantic modelsi into the database.
The assumption at this point is that the models that are inserted. If self.force_imports is true
any existing entries will be removed prior to creation
Args:
db_table ([type]): A database table like `db.users`
model ([type]): The Pydantic model that matches the database
return_model ([type]): The return model that will be used for the 'report'
name_attr (str, optional): The name property on the return model. Defaults to "name".
search_key (str, optional): The key used to identify if an the entry already exists. Defaults to "id"
**kwargs (): Any kwargs passed will be used to set attributes on the `return_model`
Returns:
[type]: Returns the `return_model` specified.
"""
model_name = getattr(model, name_attr)
search_value = getattr(model, search_key)
item = db_table.get(self.session, search_value, search_key)
if item:
if self.force_imports:
primary_key = getattr(item, db_table.primary_key)
db_table.delete(self.session, primary_key)
else:
return return_model(
name=model_name,
status=False,
exception=f"Table entry with matching '{search_key}': '{search_value}' exists",
)
try:
db_table.create(self.session, model.dict())
import_status = return_model(name=model_name, status=True)
except Exception as inst:
self.session.rollback()
import_status = return_model(name=model_name, status=False, exception=str(inst))
for key, value in kwargs.items():
setattr(return_model, key, value)
return import_status
def clean_up(self): def clean_up(self):
shutil.rmtree(TEMP_DIR) shutil.rmtree(TEMP_DIR)
@ -248,7 +279,7 @@ def import_database(
force_import: bool = False, force_import: bool = False,
rebase: bool = False, rebase: bool = False,
): ):
import_session = ImportDatabase(session, archive) import_session = ImportDatabase(session, archive, force_import)
recipe_report = [] recipe_report = []
if import_recipes: if import_recipes: