fix import errors on no group

This commit is contained in:
hay-kot 2021-03-21 16:40:16 -08:00
commit 2852ca6f0a
6 changed files with 153 additions and 143 deletions

View file

@ -23,42 +23,15 @@
<v-divider></v-divider>
<v-card-text>
<v-row>
<v-col>
<ImportOptions @update-options="updateOptions" class="mt-5" />
</v-col>
<!-- <v-col>
<v-tooltip top>
<template v-slot:activator="{ on, attrs }">
<span v-on="on" v-bind="attrs">
<v-checkbox
class="mb-n4 mt-1"
dense
label="Force"
v-model="forceImport"
></v-checkbox>
</span>
</template>
<span>Force update existing recipes</span>
</v-tooltip>
<v-tooltip top>
<template v-slot:activator="{ on, attrs }">
<span v-on="on" v-bind="attrs">
<v-checkbox
class="mb-n4 mt-1"
dense
label="Rebase"
v-model="rebaseImport"
></v-checkbox>
</span>
</template>
<span
>Removes all recipes, and then imports recipes from the
backup</span
>
</v-tooltip>
</v-col> -->
</v-row>
<ImportOptions @update-options="updateOptions" class="mt-5 mb-2" />
<v-divider></v-divider>
<v-checkbox
dense
label="Remove existing entries matching imported entries"
v-model="forceImport"
></v-checkbox>
</v-card-text>
<v-divider></v-divider>

View file

@ -18,6 +18,7 @@ app = FastAPI(
redoc_url=redoc_url,
)
def start_scheduler():
import mealie.services.scheduler.scheduled_jobs

View file

@ -57,7 +57,12 @@ class Group(SqlAlchemyBase, BaseMixins):
@staticmethod
def get_ref(session: Session, name: str):
return session.query(Group).filter(Group.name == name).one()
item = session.query(Group).filter(Group.name == name).one()
if item:
return item
else:
return session.query(Group).filter(Group.id == 1).one()
@staticmethod
def create_if_not_exist(session, name: str = DEFAULT_GROUP):

View file

@ -34,14 +34,14 @@ class User(SqlAlchemyBase, BaseMixins):
group = group if group else DEFAULT_GROUP
self.full_name = full_name
self.email = email
self.group = Group.create_if_not_exist(session, group)
self.group = Group.get_ref(session, group)
self.admin = admin
self.password = password
def update(self, full_name, email, group, admin, session=None, id=None, password=None):
self.full_name = full_name
self.email = email
self.group = Group.create_if_not_exist(session, group)
self.group = Group.get_ref(session, group)
self.admin = admin
if password:

View file

@ -10,7 +10,7 @@ class ImportBase(BaseModel):
class RecipeImport(ImportBase):
slug: str
slug: Optional[str]
class ThemeImport(ImportBase):

View file

@ -1,8 +1,9 @@
import json
import shutil
import zipfile
from logging import exception
from pathlib import Path
from typing import List
from typing import Callable, List
from fastapi.logger import logger
from mealie.core.config import BACKUP_DIR, IMG_DIR, TEMP_DIR
@ -11,6 +12,8 @@ from mealie.schema.recipe import Recipe
from mealie.schema.restore import GroupImport, RecipeImport, SettingsImport, ThemeImport, UserImport
from mealie.schema.theme import SiteTheme
from mealie.schema.user import UpdateGroup, UserInDB
from pydantic.main import BaseModel
from schema.settings import SiteSettings
from sqlalchemy.orm.session import Session
@ -50,35 +53,24 @@ class ImportDatabase:
imports = []
successful_imports = []
def read_recipe_file(file_path: Path):
with open(file_path, "r") as f:
try:
recipe_dict = json.loads(f.read())
recipe_dict = ImportDatabase._recipe_migration(recipe_dict)
return Recipe(**recipe_dict)
except:
import_status = RecipeImport(name=file_path.stem, slug=file_path.stem, status=False)
imports.append(import_status)
recipes = [read_recipe_file(r) for r in recipe_dir.glob("*.json")]
recipes = ImportDatabase.read_models_file(
file_path=recipe_dir, model=Recipe, single_file=False, migrate=ImportDatabase._recipe_migration
)
for recipe in recipes:
try:
db.recipes.create(self.session, recipe.dict())
import_status = RecipeImport(name=recipe.name, slug=recipe.slug, status=True)
successful_imports.append(recipe.slug)
logger.info(f"Imported: {recipe.slug}")
recipe: Recipe
except Exception as inst:
self.session.rollback()
logger.error(inst)
logger.info(f"Failed Import: {recipe.slug}")
import_status = RecipeImport(
name=recipe.name,
slug=recipe.slug,
status=False,
exception=str(inst),
)
import_status = self.import_model(
db_table=db.recipes,
model=recipe,
return_model=RecipeImport,
name_attr="name",
search_key="slug",
slug=recipe.slug,
)
if import_status.status:
successful_imports.append(recipe.slug)
imports.append(import_status)
@ -122,117 +114,156 @@ class ImportDatabase:
def import_themes(self):
themes_file = self.import_dir.joinpath("themes", "themes.json")
if not themes_file.exists():
return []
themes = ImportDatabase.read_models_file(themes_file, SiteTheme)
theme_imports = []
with open(themes_file, "r") as f:
themes: list[dict] = json.loads(f.read())
themes: list[SiteTheme] = [SiteTheme(**theme) for theme in themes]
for theme in themes:
if theme.name == "default":
continue
item = db.themes.get(self.session, theme.name)
if item:
import_status = UserImport(name=theme.name, status=False, exception="Theme Exists")
theme_imports.append(import_status)
continue
try:
db.themes.create(self.session, theme.dict())
theme_imports.append(ThemeImport(name=theme.name, status=True))
except Exception as inst:
logger.info(f"Unable Import Theme {theme.name}")
theme_imports.append(ThemeImport(name=theme.name, status=False, exception=str(inst)))
import_status = self.import_model(
db_table=db.themes,
model=theme,
return_model=ThemeImport,
name_attr="name",
search_key="name",
)
theme_imports.append(import_status)
return theme_imports
def import_settings(self):
def import_settings(self): #! Broken
settings_file = self.import_dir.joinpath("settings", "settings.json")
if not settings_file.exists():
return []
settings = ImportDatabase.read_models_file(settings_file, SiteSettings)
settings = settings[0]
settings_imports = []
try:
db.settings.update(self.session, 1, settings.dict())
import_status = SettingsImport(name="Site Settings", status=True)
with open(settings_file, "r") as f:
settings: dict = json.loads(f.read())
except Exception as inst:
self.session.rollback()
import_status = SettingsImport(name="Site Settings", status=False, exception=str(inst))
name = settings.get("name")
try:
db.settings.update(self.session, name, settings)
import_status = SettingsImport(name=name, status=True)
except Exception as inst:
self.session.rollback()
import_status = SettingsImport(name=name, status=False, exception=str(inst))
settings_imports.append(import_status)
return settings_imports
return [import_status]
def import_groups(self):
groups_file = self.import_dir.joinpath("groups", "groups.json")
if not groups_file.exists():
return []
groups = ImportDatabase.read_models_file(groups_file, UpdateGroup)
group_imports = []
with open(groups_file, "r") as f:
groups = [UpdateGroup(**g) for g in json.loads(f.read())]
for group in groups:
item = db.groups.get(self.session, group.name, "name")
if item:
import_status = GroupImport(name=group.name, status=False, exception="Group Exists")
group_imports.append(import_status)
continue
try:
db.groups.create(self.session, group.dict())
import_status = GroupImport(name=group.name, status=True)
except Exception as inst:
self.session.rollback()
import_status = GroupImport(name=group.name, status=False, exception=str(inst))
import_status = self.import_model(db.groups, group, GroupImport, search_key="name")
group_imports.append(import_status)
return group_imports
def import_users(self):
users_file = self.import_dir.joinpath("users", "users.json")
if not users_file.exists():
return []
users = ImportDatabase.read_models_file(users_file, UserInDB)
user_imports = []
with open(users_file, "r") as f:
users = [UserInDB(**g) for g in json.loads(f.read())]
for user in users:
if user.id == 1:
if user.id == 1: # Update Default User
db.users.update(self.session, 1, user.dict())
import_status = UserImport(name=user.full_name, status=True)
user_imports.append(import_status)
continue
item = db.users.get(self.session, user.email, "email")
if item:
import_status = UserImport(name=user.full_name, status=False, exception="User Email Exists")
user_imports.append(import_status)
continue
try:
db.users.create(self.session, user.dict())
import_status = UserImport(name=user.full_name, status=True)
except Exception as inst:
self.session.rollback()
import_status = UserImport(name=user.full_name, status=False, exception=str(inst))
import_status = self.import_model(
db_table=db.users,
model=user,
return_model=UserImport,
name_attr="full_name",
search_key="email",
)
user_imports.append(import_status)
return user_imports
@staticmethod
def read_models_file(file_path: Path, model: BaseModel, single_file=True, migrate: Callable = None):
"""A general purpose function that is used to process a backup `.json` file created by mealie
note that if the file doesn't not exists the function will return any empty list
Args:
file_path (Path): The path to the .json file or directory
model (BaseModel): The pydantic model that will be created from the .json file entries
single_file (bool, optional): If true, the json data will be treated as list, if false it will use glob style matches and treat each file as its own entry. Defaults to True.
migrate (Callable, optional): A migrate function that will be called on the data prior to creating a model. Defaults to None.
Returns:
[type]: [description]
"""
if not file_path.exists():
return []
if single_file:
with open(file_path, "r") as f:
file_data = json.loads(f.read())
if migrate:
file_data = [migrate(x) for x in file_data]
return [model(**g) for g in file_data]
all_models = []
for file in file_path.glob("*.json"):
with open(file, "r") as f:
file_data = json.loads(f.read())
if migrate:
file_data = migrate(file_data)
all_models.append(model(**file_data))
return all_models
def import_model(self, db_table, model, return_model, name_attr="name", search_key="id", **kwargs):
"""A general purpose function used to insert a list of pydantic modelsi into the database.
The assumption at this point is that the models that are inserted. If self.force_imports is true
any existing entries will be removed prior to creation
Args:
db_table ([type]): A database table like `db.users`
model ([type]): The Pydantic model that matches the database
return_model ([type]): The return model that will be used for the 'report'
name_attr (str, optional): The name property on the return model. Defaults to "name".
search_key (str, optional): The key used to identify if an the entry already exists. Defaults to "id"
**kwargs (): Any kwargs passed will be used to set attributes on the `return_model`
Returns:
[type]: Returns the `return_model` specified.
"""
model_name = getattr(model, name_attr)
search_value = getattr(model, search_key)
item = db_table.get(self.session, search_value, search_key)
if item:
if self.force_imports:
primary_key = getattr(item, db_table.primary_key)
db_table.delete(self.session, primary_key)
else:
return return_model(
name=model_name,
status=False,
exception=f"Table entry with matching '{search_key}': '{search_value}' exists",
)
try:
db_table.create(self.session, model.dict())
import_status = return_model(name=model_name, status=True)
except Exception as inst:
self.session.rollback()
import_status = return_model(name=model_name, status=False, exception=str(inst))
for key, value in kwargs.items():
setattr(return_model, key, value)
return import_status
def clean_up(self):
shutil.rmtree(TEMP_DIR)
@ -248,7 +279,7 @@ def import_database(
force_import: bool = False,
rebase: bool = False,
):
import_session = ImportDatabase(session, archive)
import_session = ImportDatabase(session, archive, force_import)
recipe_report = []
if import_recipes: