mirror of
https://github.com/hay-kot/mealie.git
synced 2025-08-22 06:23:34 -07:00
Dropping Mongo From Dev Branch (#89)
* Fix link to Docker Hub Found an extra s. DESTROYED it. * initial pass * second pass cleanup * backup card framework * backup card functionality * translation * upload button vile creation * Release v0.1.0 Candidate (#85) * Changed uvicorn port to 80 * Changed port in docker-compose to match dockerfile * Readded environment variables in docker-compose * production image rework * Use opengraph metadata to make basic recipe cards when full recipe metadata is not available * fixed instrucitons on parse * add last_recipe * automated testing * roadmap update * Sqlite (#75) * file structure * auto-test * take 2 * refactor ap scheduler and startup process * fixed scraper error * database abstraction * database abstraction * port recipes over to new schema * meal migration * start settings migration * finale mongo port * backup improvements * migration imports to new DB structure * unused import cleanup * docs strings * settings and theme import logic * cleanup * fixed tinydb error * requirements * fuzzy search * remove scratch file * sqlalchemy models * improved search ui * recipe models almost done * sql modal population * del scratch * rewrite database model mixins * mostly grabage * recipe updates * working sqllite * remove old files and reorganize * final cleanup Co-authored-by: Hayden <hay-kot@pm.me> * Backup card (#78) * backup / import dialog * upgrade to new tag method * New import card * rename settings.py to app_config.py * migrate to poetry for development * fix failing test Co-authored-by: Hayden <hay-kot@pm.me> * added mkdocs to docker-compose * Translations (#72) * Translations + danish * changed back proxy target to use ENV * Resolved more merge conflicts * Removed test in translation * Documentation of translations * Updated translations * removed old packages Co-authored-by: Hayden <64056131+hay-kot@users.noreply.github.com> * fail to start bug fixes * feature: prep/cook/total time slots (#80) Co-authored-by: Hayden <hay-kot@pm.me> * missing bind attributes * Bug fixes (#81) * fix: url remains after succesful import * docs: changelog + update todos * arm image * arm compose * compose updates * update poetry * arm support Co-authored-by: Hayden <hay-kot@pm.me> * dockerfile hotfix * dockerfile hotfix * Version Release Final Touches (#84) * Remove slim * bug: opacity issues * bug: startup failure with no database * ci/cd on dev branch * formatting * v0.1.0 documentation Co-authored-by: Hayden <hay-kot@pm.me> * db init hotfix * bug: fix crash in mongo * fix mongo bug * fixed version notifier * finale changelog Co-authored-by: kentora <=> Co-authored-by: Hayden <hay-kot@pm.me> Co-authored-by: Richard Mitic <richard.h.mitic@gmail.com> Co-authored-by: kentora <kentora@kentora.dk> * build container * webscraper hotfix * dev bug: change data location to prevent reloads * api docs * api docs bug * workflow update Co-authored-by: David Young <davidy@funkypenguin.co.nz> Co-authored-by: Hayden <hay-kot@pm.me> Co-authored-by: Richard Mitic <richard.h.mitic@gmail.com> Co-authored-by: kentora <kentora@kentora.dk>
This commit is contained in:
parent
3a5f99919b
commit
567bff8b42
49 changed files with 514 additions and 885 deletions
49
.github/workflows/dockerbuild.prod.yml
vendored
Normal file
49
.github/workflows/dockerbuild.prod.yml
vendored
Normal file
|
@ -0,0 +1,49 @@
|
|||
name: Docker Build Dev
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- master
|
||||
|
||||
jobs:
|
||||
build:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
#
|
||||
# Checkout
|
||||
#
|
||||
- name: checkout code
|
||||
uses: actions/checkout@v2
|
||||
#
|
||||
# Setup QEMU
|
||||
#
|
||||
- name: Set up QEMU
|
||||
id: qemu
|
||||
uses: docker/setup-qemu-action@v1
|
||||
with:
|
||||
image: tonistiigi/binfmt:latest
|
||||
platforms: all
|
||||
#
|
||||
# Setup Buildx
|
||||
#
|
||||
- name: install buildx
|
||||
id: buildx
|
||||
uses: docker/setup-buildx-action@v1
|
||||
with:
|
||||
install: true
|
||||
#
|
||||
# Login to Docker Hub
|
||||
#
|
||||
- name: Login to Docker Hub
|
||||
uses: docker/login-action@v1
|
||||
with:
|
||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
#
|
||||
# Build
|
||||
#
|
||||
- name: build the image
|
||||
run: |
|
||||
docker build --push \
|
||||
--tag hkotel/mealie:latest \
|
||||
--platform linux/amd64,linux/arm/v7,linux/arm64 .
|
1
.github/workflows/pytest.yml
vendored
1
.github/workflows/pytest.yml
vendored
|
@ -4,7 +4,6 @@ on:
|
|||
branches:
|
||||
- master
|
||||
- dev
|
||||
- cd/cd
|
||||
pull_request:
|
||||
branches:
|
||||
- master
|
||||
|
|
19
.gitignore
vendored
19
.gitignore
vendored
|
@ -10,19 +10,18 @@ mealie/temp/api.html
|
|||
.temp/
|
||||
|
||||
|
||||
mealie/data/backups/*
|
||||
mealie/data/debug/*
|
||||
mealie/data/img/*
|
||||
mealie/data/migration/*
|
||||
!mealie/dist/*
|
||||
app_data/backups/*
|
||||
app_data/debug/*
|
||||
app_data/img/*
|
||||
app_data/migration/*
|
||||
|
||||
#Exception to keep folders
|
||||
!mealie/dist/.gitkeep
|
||||
!mealie/data/backups/.gitkeep
|
||||
!mealie/data/backups/dev_sample_data*
|
||||
!mealie/data/debug/.gitkeep
|
||||
!mealie/data/migration/.gitkeep
|
||||
!mealie/data/img/.gitkeep
|
||||
!app_data/backups/.gitkeep
|
||||
!app_data/backups/dev_sample_data*
|
||||
!app_data/debug/.gitkeep
|
||||
!app_data/migration/.gitkeep
|
||||
!app_data/img/.gitkeep
|
||||
|
||||
.DS_Store
|
||||
node_modules
|
||||
|
|
|
@ -1,33 +0,0 @@
|
|||
FROM node:lts-alpine as build-stage
|
||||
WORKDIR /app
|
||||
COPY ./frontend/package*.json ./
|
||||
RUN npm install
|
||||
COPY ./frontend/ .
|
||||
RUN npm run build
|
||||
|
||||
FROM mrnr91/uvicorn-gunicorn-fastapi:python3.8
|
||||
|
||||
|
||||
COPY ./requirements.txt /app/requirements.txt
|
||||
|
||||
WORKDIR /app
|
||||
|
||||
RUN apt-get update -y && \
|
||||
apt-get install -y python-pip python-dev git curl --no-install-recommends
|
||||
|
||||
RUN curl -sSL https://raw.githubusercontent.com/python-poetry/poetry/master/get-poetry.py | POETRY_HOME=/opt/poetry python && \
|
||||
cd /usr/local/bin && \
|
||||
ln -s /opt/poetry/bin/poetry && \
|
||||
poetry config virtualenvs.create false
|
||||
|
||||
COPY ./pyproject.toml ./app/poetry.lock* /app/
|
||||
|
||||
COPY ./mealie /app
|
||||
RUN poetry install --no-root --no-dev
|
||||
COPY --from=build-stage /app/dist /app/dist
|
||||
RUN rm -rf /app/test /app/.temp
|
||||
|
||||
ENV ENV prod
|
||||
ENV APP_MODULE "app:app"
|
||||
|
||||
VOLUME [ "/app/data" ]
|
|
@ -8,7 +8,6 @@ RUN curl -sSL https://raw.githubusercontent.com/python-poetry/poetry/master/get-
|
|||
ln -s /opt/poetry/bin/poetry && \
|
||||
poetry config virtualenvs.create false
|
||||
|
||||
RUN mkdir /app/
|
||||
COPY ./pyproject.toml ./app/poetry.lock* /app/
|
||||
|
||||
WORKDIR /app
|
||||
|
@ -17,7 +16,6 @@ RUN poetry install --no-root
|
|||
|
||||
COPY ./mealie /app
|
||||
|
||||
|
||||
ENTRYPOINT [ "python" ]
|
||||
|
||||
CMD [ "app.py" ]
|
|
@ -33,7 +33,7 @@
|
|||
Request Feature
|
||||
</a>
|
||||
·
|
||||
<a href="https://hub.docker.com/repository/docker/hkotel/mealies"> Docker Hub
|
||||
<a href="https://hub.docker.com/repository/docker/hkotel/mealie"> Docker Hub
|
||||
</a>
|
||||
</p>
|
||||
|
||||
|
|
|
@ -1,16 +0,0 @@
|
|||
# Use root/example as user/password credentials
|
||||
# Frontend/Backend Served via the same Uvicorn Server
|
||||
version: "3.1"
|
||||
services:
|
||||
mealie:
|
||||
build:
|
||||
context: ./
|
||||
dockerfile: Dockerfile.arm
|
||||
container_name: mealie
|
||||
restart: always
|
||||
ports:
|
||||
- 9090:80
|
||||
environment:
|
||||
db_type: sql
|
||||
volumes:
|
||||
- ./mealie/data/:/app/data
|
|
@ -26,39 +26,17 @@ services:
|
|||
ports:
|
||||
- 9921:9000
|
||||
environment:
|
||||
db_type: sqlite
|
||||
TZ: America/Anchorage # Specify Correct Timezone for Date/Time to line up correctly.
|
||||
db_username: root
|
||||
db_password: example
|
||||
db_host: mongo
|
||||
db_port: 27017
|
||||
volumes:
|
||||
- ./app_data:/app_data
|
||||
- ./mealie:/app
|
||||
|
||||
mealie-docs:
|
||||
image: squidfunk/mkdocs-material
|
||||
restart: always
|
||||
ports:
|
||||
- 9924:8000
|
||||
- 9923:8000
|
||||
volumes:
|
||||
- ./docs:/docs
|
||||
|
||||
# Database
|
||||
mongo:
|
||||
image: mongo
|
||||
restart: always
|
||||
ports:
|
||||
- 9923:27017
|
||||
environment:
|
||||
TZ: America/Anchorage # Specify Correct Timezone for Date/Time to line up correctly.
|
||||
MONGO_INITDB_ROOT_USERNAME: root
|
||||
MONGO_INITDB_ROOT_PASSWORD: example
|
||||
|
||||
# Database UI
|
||||
mongo-express:
|
||||
image: mongo-express
|
||||
restart: always
|
||||
ports:
|
||||
- 9922:8081
|
||||
environment:
|
||||
ME_CONFIG_MONGODB_ADMINUSERNAME: root
|
||||
ME_CONFIG_MONGODB_ADMINPASSWORD: example
|
||||
|
|
|
@ -10,23 +10,7 @@ services:
|
|||
- 9090:80
|
||||
environment:
|
||||
db_type: sql
|
||||
db_username: root
|
||||
db_password: example
|
||||
db_host: mongo
|
||||
db_port: 27017
|
||||
|
||||
# volumes:
|
||||
# - ./mealie/data/:/app/data
|
||||
# mongo:
|
||||
# image: mongo
|
||||
# restart: always
|
||||
# environment:
|
||||
# MONGO_INITDB_ROOT_USERNAME: root
|
||||
# MONGO_INITDB_ROOT_PASSWORD: example
|
||||
# mongo-express: # Optional Mongo GUI
|
||||
# image: mongo-express
|
||||
# restart: always
|
||||
# ports:
|
||||
# - 9091:8081
|
||||
# environment:
|
||||
# ME_CONFIG_MONGODB_ADMINUSERNAME: root
|
||||
# ME_CONFIG_MONGODB_ADMINPASSWORD: example
|
||||
|
||||
|
|
|
@ -45,8 +45,7 @@ Feature placement is not set in stone. This is much more of a guideline than any
|
|||
- [ ] Setup SQL Migrations
|
||||
|
||||
## Breaking Changes
|
||||
- Internal port 9000 changed to port 80 for better Traefik support
|
||||
- MongoDB support dropped
|
||||
## Code Chores
|
||||
- [ ] Remove MongoDB Interface Code
|
||||
- [x] Remove MongoDB Interface Code
|
||||
- [ ] Dockerfile Trim
|
||||
|
|
5
frontend/.vscode/settings.json
vendored
5
frontend/.vscode/settings.json
vendored
|
@ -1,5 +0,0 @@
|
|||
{
|
||||
"cSpell.enableFiletypes": [
|
||||
"!javascript"
|
||||
]
|
||||
}
|
|
@ -28,15 +28,8 @@ export default {
|
|||
await apiReq.delete(backupURLs.deleteBackup(fileName));
|
||||
},
|
||||
|
||||
async create(tag, template) {
|
||||
if (typeof template == String) {
|
||||
template = [template];
|
||||
}
|
||||
console.log(tag, template);
|
||||
let response = apiReq.post(backupURLs.createBackup, {
|
||||
tag: tag,
|
||||
template: template,
|
||||
});
|
||||
async create(data) {
|
||||
let response = apiReq.post(backupURLs.createBackup, data);
|
||||
return response;
|
||||
},
|
||||
};
|
||||
|
|
|
@ -0,0 +1,90 @@
|
|||
<template>
|
||||
<div>
|
||||
<ImportDialog
|
||||
:name="selectedName"
|
||||
:date="selectedDate"
|
||||
ref="import_dialog"
|
||||
@import="importBackup"
|
||||
@delete="deleteBackup"
|
||||
/>
|
||||
<v-row>
|
||||
<v-col
|
||||
:sm="6"
|
||||
:md="6"
|
||||
:lg="4"
|
||||
:xl="4"
|
||||
v-for="backup in backups"
|
||||
:key="backup.name"
|
||||
>
|
||||
<v-card @click="openDialog(backup)">
|
||||
<v-card-text>
|
||||
<v-row align="center">
|
||||
<v-col cols="12" sm="2">
|
||||
<v-icon color="primary"> mdi-backup-restore </v-icon>
|
||||
</v-col>
|
||||
<v-col cols="12" sm="10">
|
||||
<div>
|
||||
<strong>{{ backup.name }}</strong>
|
||||
</div>
|
||||
<div>{{ readableTime(backup.date) }}</div>
|
||||
</v-col>
|
||||
</v-row>
|
||||
</v-card-text>
|
||||
</v-card>
|
||||
</v-col>
|
||||
</v-row>
|
||||
</div>
|
||||
</template>
|
||||
|
||||
<script>
|
||||
import ImportDialog from "./ImportDialog";
|
||||
import api from "../../../api";
|
||||
import utils from "../../../utils";
|
||||
export default {
|
||||
props: {
|
||||
backups: Array,
|
||||
},
|
||||
components: {
|
||||
ImportDialog,
|
||||
},
|
||||
data() {
|
||||
return {
|
||||
selectedName: "",
|
||||
selectedDate: "",
|
||||
loading: false,
|
||||
};
|
||||
},
|
||||
methods: {
|
||||
openDialog(backup) {
|
||||
this.selectedDate = this.readableTime(backup.date);
|
||||
this.selectedName = backup.name;
|
||||
this.$refs.import_dialog.open();
|
||||
},
|
||||
readableTime(timestamp) {
|
||||
let date = new Date(timestamp);
|
||||
return utils.getDateAsText(date);
|
||||
},
|
||||
async importBackup(data) {
|
||||
this.$emit("loading");
|
||||
let response = await api.backups.import(data.name, data);
|
||||
|
||||
let failed = response.data.failed;
|
||||
let succesful = response.data.successful;
|
||||
|
||||
this.$emit("finished", succesful, failed);
|
||||
},
|
||||
deleteBackup(data) {
|
||||
this.$emit("loading");
|
||||
|
||||
api.backups.delete(data.name);
|
||||
this.selectedBackup = null;
|
||||
this.backupLoading = false;
|
||||
|
||||
this.$emit("finished");
|
||||
},
|
||||
},
|
||||
};
|
||||
</script>
|
||||
|
||||
<style>
|
||||
</style>
|
|
@ -8,7 +8,7 @@
|
|||
|
||||
<v-card-text>
|
||||
<v-row>
|
||||
<v-col>
|
||||
<v-col >
|
||||
<v-checkbox
|
||||
class="mb-n4 mt-1"
|
||||
dense
|
||||
|
|
124
frontend/src/components/Settings/Backup/NewBackupCard.vue
Normal file
124
frontend/src/components/Settings/Backup/NewBackupCard.vue
Normal file
|
@ -0,0 +1,124 @@
|
|||
<template>
|
||||
<v-card :loading="loading">
|
||||
<v-card-title> {{ $t("settings.backup.create-heading") }} </v-card-title>
|
||||
<v-card-text class="mt-n3">
|
||||
<v-text-field
|
||||
dense
|
||||
:label="$t('settings.backup.backup-tag')"
|
||||
v-model="tag"
|
||||
></v-text-field>
|
||||
</v-card-text>
|
||||
<v-card-actions class="mt-n9">
|
||||
<v-switch v-model="fullBackup" :label="switchLabel"></v-switch>
|
||||
<v-spacer></v-spacer>
|
||||
<v-btn color="success" text @click="createBackup()">
|
||||
{{ $t("general.create") }}
|
||||
</v-btn>
|
||||
</v-card-actions>
|
||||
|
||||
<v-card-text v-if="!fullBackup" class="mt-n6">
|
||||
<v-row>
|
||||
<v-col sm="4">
|
||||
<p>{{ $t("general.options") }}:</p>
|
||||
<v-checkbox
|
||||
v-for="option in options"
|
||||
:key="option.text"
|
||||
class="mb-n4 mt-n3"
|
||||
dense
|
||||
:label="option.text"
|
||||
v-model="option.value"
|
||||
></v-checkbox>
|
||||
</v-col>
|
||||
<v-col>
|
||||
<p>{{ $t("general.templates") }}:</p>
|
||||
<v-checkbox
|
||||
v-for="template in availableTemplates"
|
||||
:key="template"
|
||||
class="mb-n4 mt-n3"
|
||||
dense
|
||||
:label="template"
|
||||
@click="appendTemplate(template)"
|
||||
></v-checkbox>
|
||||
</v-col>
|
||||
</v-row>
|
||||
</v-card-text>
|
||||
</v-card>
|
||||
</template>
|
||||
|
||||
<script>
|
||||
import api from "../../../api";
|
||||
export default {
|
||||
data() {
|
||||
return {
|
||||
tag: null,
|
||||
fullBackup: true,
|
||||
loading: false,
|
||||
options: {
|
||||
recipes: {
|
||||
value: true,
|
||||
text: this.$t("general.recipes"),
|
||||
},
|
||||
settings: {
|
||||
value: true,
|
||||
text: this.$t("general.settings"),
|
||||
},
|
||||
themes: {
|
||||
value: true,
|
||||
text: this.$t("general.themes"),
|
||||
},
|
||||
},
|
||||
availableTemplates: [],
|
||||
selectedTemplates: [],
|
||||
};
|
||||
},
|
||||
mounted() {
|
||||
this.getAvailableBackups();
|
||||
},
|
||||
computed: {
|
||||
switchLabel() {
|
||||
if (this.fullBackup) {
|
||||
return "Full Backup";
|
||||
} else return "Partial Backup";
|
||||
},
|
||||
},
|
||||
methods: {
|
||||
async getAvailableBackups() {
|
||||
let response = await api.backups.requestAvailable();
|
||||
response.templates.forEach((element) => {
|
||||
this.availableTemplates.push(element);
|
||||
});
|
||||
},
|
||||
async createBackup() {
|
||||
this.loading = true;
|
||||
|
||||
let data = {
|
||||
tag: this.tag,
|
||||
options: {
|
||||
recipes: this.options.recipes.value,
|
||||
settings: this.options.settings.value,
|
||||
themes: this.options.themes.value,
|
||||
},
|
||||
templates: this.selectedTemplates,
|
||||
};
|
||||
|
||||
console.log(data);
|
||||
|
||||
await api.backups.create(data);
|
||||
this.loading = false;
|
||||
|
||||
this.$emit("created");
|
||||
},
|
||||
appendTemplate(templateName) {
|
||||
if (this.selectedTemplates.includes(templateName)) {
|
||||
let index = this.selectedTemplates.indexOf(templateName);
|
||||
if (index !== -1) {
|
||||
this.selectedTemplates.splice(index, 1);
|
||||
}
|
||||
} else this.selectedTemplates.push(templateName);
|
||||
},
|
||||
},
|
||||
};
|
||||
</script>
|
||||
|
||||
<style>
|
||||
</style>
|
|
@ -1,34 +1,33 @@
|
|||
<template>
|
||||
<v-card :loading="backupLoading" class="mt-3">
|
||||
<v-card-title class="headline">
|
||||
{{$t('settings.backup-and-exports')}}
|
||||
{{ $t("settings.backup-and-exports") }}
|
||||
</v-card-title>
|
||||
<v-divider></v-divider>
|
||||
|
||||
<v-card-text>
|
||||
<v-row>
|
||||
<v-col cols="12" md="6" ss="12">
|
||||
<NewBackupCard @created="processFinished" />
|
||||
</v-col>
|
||||
<v-col cols="12" md="6" sm="12">
|
||||
<p>
|
||||
{{$t('settings.backup-info')}}
|
||||
{{ $t("settings.backup-info") }}
|
||||
</p>
|
||||
|
||||
<v-row dense align="center">
|
||||
<v-col dense cols="12" sm="12" md="4">
|
||||
<v-text-field v-model="backupTag" :label="$t('settings.backup-tag')"></v-text-field>
|
||||
</v-col>
|
||||
<v-col cols="12" sm="12" md="3">
|
||||
<v-combobox
|
||||
auto-select-first
|
||||
:label="$t('settings.markdown-template')"
|
||||
:items="availableTemplates"
|
||||
v-model="selectedTemplate"
|
||||
></v-combobox>
|
||||
</v-col>
|
||||
<v-col dense cols="12" sm="12" md="2">
|
||||
<v-btn block text color="accent" @click="createBackup" width="165">
|
||||
{{$t('settings.backup-recipes')}}
|
||||
</v-btn>
|
||||
</v-col>
|
||||
</v-row>
|
||||
<BackupCard
|
||||
<v-divider class="my-3"></v-divider>
|
||||
<v-card-title class="mt-n6">
|
||||
Available Backups
|
||||
<v-spacer></v-spacer>
|
||||
<span>
|
||||
<v-btn color="success" text class="ma-2 white--text">
|
||||
Upload
|
||||
<v-icon right dark> mdi-cloud-upload </v-icon>
|
||||
</v-btn>
|
||||
</span>
|
||||
</v-card-title>
|
||||
<AvailableBackupCard
|
||||
@loading="backupLoading = true"
|
||||
@finished="processFinished"
|
||||
:backups="availableBackups"
|
||||
|
@ -46,23 +45,21 @@
|
|||
<script>
|
||||
import api from "../../../api";
|
||||
import SuccessFailureAlert from "../../UI/SuccessFailureAlert";
|
||||
import BackupCard from "./BackupCard";
|
||||
import AvailableBackupCard from "./AvailableBackupCard";
|
||||
import NewBackupCard from "./NewBackupCard";
|
||||
|
||||
export default {
|
||||
components: {
|
||||
SuccessFailureAlert,
|
||||
BackupCard,
|
||||
AvailableBackupCard,
|
||||
NewBackupCard,
|
||||
},
|
||||
data() {
|
||||
return {
|
||||
failedImports: [],
|
||||
successfulImports: [],
|
||||
backupLoading: false,
|
||||
backupTag: null,
|
||||
selectedBackup: null,
|
||||
selectedTemplate: null,
|
||||
availableBackups: [],
|
||||
availableTemplates: [],
|
||||
};
|
||||
},
|
||||
mounted() {
|
||||
|
@ -85,17 +82,6 @@ export default {
|
|||
this.backupLoading = false;
|
||||
}
|
||||
},
|
||||
async createBackup() {
|
||||
this.backupLoading = true;
|
||||
|
||||
let response = await api.backups.create(this.backupTag, this.templates);
|
||||
|
||||
if (response.status == 201) {
|
||||
this.selectedBackup = null;
|
||||
this.getAvailableBackups();
|
||||
this.backupLoading = false;
|
||||
}
|
||||
},
|
||||
processFinished(successful = null, failed = null) {
|
||||
this.getAvailableBackups();
|
||||
this.backupLoading = false;
|
||||
|
|
|
@ -11,7 +11,7 @@
|
|||
>
|
||||
</v-file-input>
|
||||
</v-form>
|
||||
</template>
|
||||
</template>c
|
||||
|
||||
<script>
|
||||
import api from "../../../api";
|
||||
|
|
25
frontend/src/components/UI/UploadBtn.vue
Normal file
25
frontend/src/components/UI/UploadBtn.vue
Normal file
|
@ -0,0 +1,25 @@
|
|||
<template>
|
||||
<v-form ref="file">
|
||||
<v-file-input
|
||||
:loading="loading"
|
||||
:label="$t('migration.upload-an-archive')"
|
||||
v-model="file"
|
||||
accept=".zip"
|
||||
@change="upload"
|
||||
:prepend-icon="icon"
|
||||
class="file-icon"
|
||||
>
|
||||
</v-file-input>
|
||||
<v-btn color="success" text class="ma-2 white--text">
|
||||
Upload
|
||||
<v-icon right dark> mdi-cloud-upload </v-icon>
|
||||
</v-btn>
|
||||
</v-form>
|
||||
</template>
|
||||
|
||||
<script>
|
||||
export default {};
|
||||
</script>
|
||||
|
||||
<style>
|
||||
</style>
|
|
@ -29,7 +29,11 @@
|
|||
"enabled": "Enabled",
|
||||
"download": "Download",
|
||||
"import": "Import",
|
||||
"delete-data": "Delete Data"
|
||||
"delete-data": "Delete Data",
|
||||
"options": "Options",
|
||||
"templates": "Templates",
|
||||
"recipes": "Recipes",
|
||||
"themes": "Themes"
|
||||
},
|
||||
"login": {
|
||||
"stay-logged-in": "Stay logged in?",
|
||||
|
@ -77,7 +81,7 @@
|
|||
"latest": "Latest",
|
||||
"explore-the-docs": "Explore the Docs",
|
||||
"contribute": "Contribute",
|
||||
"backup-and-exports": "Backup and Exports",
|
||||
"backup-and-exports": "Backups",
|
||||
"backup-info": "Backups are exported in standard JSON format along with all the images stored on the file system. In your backup folder you'll find a .zip file that contains all of the recipe JSON and images from the database. Additionally, if you selected a markdown file, those will also be stored in the .zip file. To import a backup, it must be located in your backups folder. Automated backups are done each day at 3:00 AM.",
|
||||
"backup-tag": "Backup Tag",
|
||||
"markdown-template": "Markdown Template",
|
||||
|
@ -114,7 +118,9 @@
|
|||
"backup": {
|
||||
"import-recipes": "Import Recipes",
|
||||
"import-themes": "Import Themes",
|
||||
"import-settings": "Import Settings"
|
||||
"import-settings": "Import Settings",
|
||||
"create-heading": "Create a Backup",
|
||||
"backup-tag": "Backup Tag"
|
||||
}
|
||||
},
|
||||
"migration": {
|
||||
|
|
|
@ -13,7 +13,7 @@ from routes import (
|
|||
static_routes,
|
||||
user_routes,
|
||||
)
|
||||
from utils.api_docs import generate_api_docs
|
||||
# from utils.api_docs import generate_api_docs
|
||||
from utils.logger import logger
|
||||
|
||||
app = FastAPI(
|
||||
|
@ -54,8 +54,8 @@ app.include_router(static_routes.router)
|
|||
|
||||
|
||||
# Generate API Documentation
|
||||
if not PRODUCTION:
|
||||
generate_api_docs(app)
|
||||
# if not PRODUCTION:
|
||||
# generate_api_docs(app)
|
||||
|
||||
if __name__ == "__main__":
|
||||
logger.info("-----SYSTEM STARTUP-----")
|
||||
|
|
|
@ -17,7 +17,7 @@ dotenv.load_dotenv(ENV)
|
|||
|
||||
# Helpful Globals
|
||||
BASE_DIR = CWD
|
||||
DATA_DIR = CWD.joinpath("data")
|
||||
DATA_DIR = CWD.parent.joinpath("app_data")
|
||||
WEB_PATH = CWD.joinpath("dist")
|
||||
IMG_DIR = DATA_DIR.joinpath("img")
|
||||
BACKUP_DIR = DATA_DIR.joinpath("backups")
|
||||
|
@ -55,14 +55,8 @@ SQLITE_FILE = None
|
|||
DATABASE_TYPE = os.getenv("db_type", "sqlite") # mongo, sqlite
|
||||
if DATABASE_TYPE == "sqlite":
|
||||
USE_SQL = True
|
||||
USE_MONGO = False
|
||||
SQLITE_FILE = SQLITE_DIR.joinpath("mealie.sqlite")
|
||||
|
||||
|
||||
elif DATABASE_TYPE == "mongo":
|
||||
USE_MONGO = True
|
||||
USE_SQL = False
|
||||
|
||||
else:
|
||||
raise Exception(
|
||||
"Unable to determine database type. Acceptible options are 'mongo' or 'tinydb' "
|
||||
|
|
|
@ -1,107 +0,0 @@
|
|||
{
|
||||
"@context": "http://schema.org",
|
||||
"@type": "Recipe",
|
||||
"articleBody": "\u201cMy great-grandmothers were Indigenous and mostly nomadic, which means lots of fungi foraging,\u201d says Maricela Vega, the chef at Atlanta restaurant 8ARM and founder of Chicomec\u00f3atl, an organization centering the foodways of Indigenous Mexican diaspora. \u201cWhen I serve lion\u2019s mane mushrooms to vegans they sometimes mistake them for chicken, but they\u2019re more affordable, better for the planet, and help strengthen your immune system! They grow wild during Georgia winters, but at-home cultivation kits are easy for those without forest access. I use pumpkin hot sauce, oil, and fresh herbs as a marinade, then bust out a baby grill or cast-iron skillet to get them nice and crispy.\u201d This is a multistep recipe but perfect for long winter days when you want to flood the house with toasty, irresistible aromas. When you chop the mixed herbs, save the stems and throw them into the blanching water for the vegetables to add flavor.",
|
||||
"alternativeHeadline": "This dish is perfect for long winter days when you want to flood the house with toasty, irresistible aromas.",
|
||||
"dateModified": "2021-01-11 18:32:43.962000",
|
||||
"datePublished": "2021-01-12 04:00:00",
|
||||
"keywords": [
|
||||
"recipes",
|
||||
"healthyish",
|
||||
"vegan",
|
||||
"herb",
|
||||
"apple cider vinegar",
|
||||
"kosher salt",
|
||||
"carrot",
|
||||
"potato",
|
||||
"radicchio",
|
||||
"mushroom",
|
||||
"oyster mushrooms",
|
||||
"oil",
|
||||
"black pepper",
|
||||
"lemon",
|
||||
"web"
|
||||
],
|
||||
"thumbnailUrl": "https://assets.bonappetit.com/photos/5ffc74b39cbb0a3c54d7400f/1:1/w_1199,h_1199,c_limit/HLY-Maricela-Vega-Grilled%20Mushrooms%20and%20Root%20Vegetables.jpg",
|
||||
"publisher": {
|
||||
"@context": "https://schema.org",
|
||||
"@type": "Organization",
|
||||
"name": "Bon App\u00e9tit",
|
||||
"logo": {
|
||||
"@type": "ImageObject",
|
||||
"url": "https://www.bonappetit.com/verso/static/bon-appetit/assets/logo-seo.328de564b950e3d5d1fbe3e42f065290ca1d3844.png",
|
||||
"width": "479px",
|
||||
"height": "100px"
|
||||
},
|
||||
"url": "https://www.bonappetit.com"
|
||||
},
|
||||
"isPartOf": {
|
||||
"@type": [
|
||||
"CreativeWork",
|
||||
"Product"
|
||||
],
|
||||
"name": "Bon App\u00e9tit"
|
||||
},
|
||||
"isAccessibleForFree": true,
|
||||
"author": [
|
||||
{
|
||||
"@type": "Person",
|
||||
"name": "Maricela Vega",
|
||||
"sameAs": "https://bon-appetit.com/contributor/maricela-vega/"
|
||||
}
|
||||
],
|
||||
"description": "This dish is perfect for long winter days when you want to flood the house with toasty, irresistible aromas.",
|
||||
"image": "grilled-mushrooms-and-root-vegetables.jpg",
|
||||
"headline": "Grilled Mushrooms and Root Vegetables",
|
||||
"name": "Grilled Mushrooms and Root Vegetables",
|
||||
"recipeIngredient": [
|
||||
"1 cup Sesame Cr\u00e8me",
|
||||
"2 Tbsp. Allium Confit",
|
||||
"2 Tbsp. finely chopped mixed tender herbs (parsley, oregano, and/or mint), stems reservedfor blanching vegetables",
|
||||
"2 Tbsp. apple cider vinegar",
|
||||
"Kosher salt, freshly ground pepper",
|
||||
"1 cup finely chopped mixed herbs (parsley, oregano, and/or mint), stems reserved for blanching vegetables",
|
||||
"Kosher salt",
|
||||
"4 medium carrots (about 1 lb.), preferably rainbow, scrubbed, halved lengthwise, cut crosswise into thirds",
|
||||
"4 lb. baby potatoes, halved",
|
||||
"1 head of radicchio, coarsely chopped, divided",
|
||||
"1 lb. lion\u2019s mane, king trumpet, or oyster mushrooms, cut into 2\" pieces",
|
||||
"\u00bd cup Pumpkin Hot Sauce",
|
||||
"\u00bd cup grapeseed or vegetable oil",
|
||||
"Freshly ground black pepper",
|
||||
"1 cup Spiced Pecans",
|
||||
"1 lemon, halved"
|
||||
],
|
||||
"recipeInstructions": [
|
||||
{
|
||||
"text": "Pur\u00e9e Sesame Cr\u00e8me, Allium Confit, chopped herbs, and vinegar in a blender on high speed, adding ice water by the tablespoonful as needed to achieve a pourable consistency, until smooth and creamy. Season sauce with salt and pepper."
|
||||
},
|
||||
{
|
||||
"text": "Fill a large pot three quarters full with water, add reserved herb stems, and season heavily with salt. Bring water to a boil, then add carrots and cook until just tender, about 3 minutes. Using a slotted spoon, immediately transfer carrots to a large bowl of ice water and let cool."
|
||||
},
|
||||
{
|
||||
"text": "Place potatoes in same pot and return to a boil. Cook until tender (flesh should be easy to pierce with a fork), about 10 minutes. Using slotted spoon, transfer potatoes to bowl of ice water and let cool. Drain carrots and potatoes; place in a clean large bowl and add half of the radicchio. Place mushrooms in a medium bowl."
|
||||
},
|
||||
{
|
||||
"text": "Whisk Pumpkin Hot Sauce, oil, and chopped herbs in another medium bowl. Pour half of mixture over carrots and potatoes and the other half over mushrooms; toss each to coat. Season with salt and pepper."
|
||||
},
|
||||
{
|
||||
"text": "Prepare a grill for medium-high heat. (Alternatively, heat a large cast-iron skillet over medium-high.) Grill mushrooms, turning occasionally, until deep golden brown and crisp around the edges (or cook in batches, stirring often, if using a skillet), 12\u201314 minutes. Transfer mushrooms to a large shallow serving bowl."
|
||||
},
|
||||
{
|
||||
"text": "Grill carrots, potatoes, and radicchio, turning occasionally, until deep golden brown all over (or cook in batches, tossing often, if using a skillet), about 4 minutes. Transfer vegetables to bowl with mushrooms and toss to combine."
|
||||
},
|
||||
{
|
||||
"text": "To serve, drizzle generously with sesame sauce; top with Spiced Pecans and remaining radicchio. Squeeze juice from each lemon half over."
|
||||
}
|
||||
],
|
||||
"recipeYield": "4 Servings",
|
||||
"url": "https://www.bonappetit.com/recipe/grilled-mushrooms-and-root-vegetables",
|
||||
"slug": "grilled-mushrooms-and-root-vegetables",
|
||||
"orgURL": "https://www.bonappetit.com/recipe/grilled-mushrooms-and-root-vegetables",
|
||||
"categories": [],
|
||||
"tags": [],
|
||||
"dateAdded": null,
|
||||
"notes": [],
|
||||
"extras": []
|
||||
}
|
|
@ -1,7 +1,62 @@
|
|||
from db.db_mealplan import _Meals
|
||||
from db.db_recipes import _Recipes
|
||||
from db.db_settings import _Settings
|
||||
from db.db_themes import _Themes
|
||||
from db.db_base import BaseDocument
|
||||
from db.sql.db_session import create_session
|
||||
from db.sql.meal_models import MealPlanModel
|
||||
from db.sql.recipe_models import RecipeModel
|
||||
from db.sql.settings_models import SiteSettingsModel
|
||||
from db.sql.theme_models import SiteThemeModel
|
||||
|
||||
"""
|
||||
# TODO
|
||||
- [ ] Abstract Classes to use save_new, and update from base models
|
||||
- [ ] Create Category and Tags Table with Many to Many relationship
|
||||
"""
|
||||
class _Recipes(BaseDocument):
|
||||
def __init__(self) -> None:
|
||||
self.primary_key = "slug"
|
||||
self.sql_model = RecipeModel
|
||||
self.create_session = create_session
|
||||
|
||||
def update_image(self, slug: str, extension: str) -> None:
|
||||
pass
|
||||
|
||||
|
||||
class _Meals(BaseDocument):
|
||||
def __init__(self) -> None:
|
||||
self.primary_key = "uid"
|
||||
self.sql_model = MealPlanModel
|
||||
self.create_session = create_session
|
||||
|
||||
|
||||
class _Settings(BaseDocument):
|
||||
def __init__(self) -> None:
|
||||
self.primary_key = "name"
|
||||
self.sql_model = SiteSettingsModel
|
||||
self.create_session = create_session
|
||||
|
||||
def save_new(self, main: dict, webhooks: dict) -> str:
|
||||
session = create_session()
|
||||
new_settings = self.sql_model(main.get("name"), webhooks)
|
||||
|
||||
session.add(new_settings)
|
||||
session.commit()
|
||||
|
||||
return new_settings.dict()
|
||||
|
||||
|
||||
class _Themes(BaseDocument):
|
||||
def __init__(self) -> None:
|
||||
self.primary_key = "name"
|
||||
self.sql_model = SiteThemeModel
|
||||
self.create_session = create_session
|
||||
|
||||
def update(self, data: dict) -> dict:
|
||||
session, theme_model = self._query_one(
|
||||
match_value=data["name"], match_key="name"
|
||||
)
|
||||
|
||||
theme_model.update(**data)
|
||||
session.commit()
|
||||
session.close()
|
||||
|
||||
|
||||
class Database:
|
||||
|
|
|
@ -1,8 +1,5 @@
|
|||
import json
|
||||
from typing import Union
|
||||
|
||||
import mongoengine
|
||||
from app_config import USE_MONGO, USE_SQL
|
||||
from sqlalchemy.orm.session import Session
|
||||
|
||||
from db.sql.db_session import create_session
|
||||
|
@ -13,57 +10,10 @@ class BaseDocument:
|
|||
def __init__(self) -> None:
|
||||
self.primary_key: str
|
||||
self.store: str
|
||||
self.document: mongoengine.Document
|
||||
self.sql_model: SqlAlchemyBase
|
||||
self.create_session = create_session
|
||||
|
||||
@staticmethod # TODO: Probably Put a version in each class to speed up reads?
|
||||
def _unpack_mongo(document) -> dict:
|
||||
document = json.loads(document.to_json())
|
||||
del document["_id"]
|
||||
|
||||
# Recipe Cleanup
|
||||
try:
|
||||
document["dateAdded"] = document["dateAdded"]["$date"]
|
||||
except:
|
||||
pass
|
||||
|
||||
try:
|
||||
document["uid"] = document["uid"]["$uuid"]
|
||||
except:
|
||||
pass
|
||||
|
||||
# Meal Plan
|
||||
try:
|
||||
document["startDate"] = document["startDate"]["$date"]
|
||||
document["endDate"] = document["endDate"]["$date"]
|
||||
|
||||
meals = []
|
||||
for meal in document["meals"]:
|
||||
meal["date"] = meal["date"]["$date"]
|
||||
meals.append(meal)
|
||||
document["meals"] = meals
|
||||
except:
|
||||
pass
|
||||
|
||||
return document
|
||||
|
||||
def get_all(self, limit: int = None, order_by: str = None):
|
||||
if USE_MONGO:
|
||||
if order_by:
|
||||
documents = self.document.objects.order_by(str(order_by)).limit(limit)
|
||||
elif limit == None:
|
||||
documents = self.document.objects()
|
||||
else:
|
||||
documents = self.document.objects().limit(limit)
|
||||
|
||||
docs = [BaseDocument._unpack_mongo(item) for item in documents]
|
||||
|
||||
if limit == 1:
|
||||
return docs[0]
|
||||
return docs
|
||||
|
||||
elif USE_SQL:
|
||||
session = create_session()
|
||||
list = [x.dict() for x in session.query(self.sql_model).all()]
|
||||
session.close()
|
||||
|
@ -115,68 +65,37 @@ class BaseDocument:
|
|||
if match_key == None:
|
||||
match_key = self.primary_key
|
||||
|
||||
if USE_MONGO:
|
||||
document = self.document.objects.get(**{str(match_key): match_value})
|
||||
db_entry = BaseDocument._unpack_mongo(document)
|
||||
|
||||
elif USE_SQL:
|
||||
session = self.create_session()
|
||||
result = (
|
||||
session.query(self.sql_model)
|
||||
.filter_by(**{match_key: match_value})
|
||||
.one()
|
||||
session.query(self.sql_model).filter_by(**{match_key: match_value}).one()
|
||||
)
|
||||
db_entry = result.dict()
|
||||
session.close()
|
||||
|
||||
return db_entry
|
||||
|
||||
else:
|
||||
raise Exception("No database type established")
|
||||
|
||||
if limit == 1 and type(db_entry) == list:
|
||||
return db_entry[0]
|
||||
else:
|
||||
return db_entry
|
||||
|
||||
def save_new(self, document: dict) -> dict:
|
||||
if USE_MONGO:
|
||||
new_document = self.document(**document)
|
||||
new_document.save()
|
||||
return BaseDocument._unpack_mongo(new_document)
|
||||
elif USE_SQL:
|
||||
session = self.create_session()
|
||||
new_document = self.sql_model(**document)
|
||||
session.add(new_document)
|
||||
return_data = new_document.dict()
|
||||
session.commit()
|
||||
|
||||
|
||||
session.close()
|
||||
return return_data
|
||||
|
||||
def update(self, match_value, new_data) -> dict:
|
||||
if USE_MONGO:
|
||||
return_data = self.update_mongo(match_value, new_data)
|
||||
elif USE_SQL:
|
||||
session, entry = self._query_one(match_value=match_value)
|
||||
entry.update(session=session, **new_data)
|
||||
return_data = entry.dict()
|
||||
session.commit()
|
||||
|
||||
session.close()
|
||||
else:
|
||||
raise Exception("No Database Configured")
|
||||
|
||||
return return_data
|
||||
|
||||
def delete(self, primary_key_value) -> dict:
|
||||
if USE_MONGO:
|
||||
document = self.document.objects.get(
|
||||
**{str(self.primary_key): primary_key_value}
|
||||
)
|
||||
|
||||
if document:
|
||||
document.delete()
|
||||
elif USE_SQL:
|
||||
session = create_session()
|
||||
|
||||
result = (
|
||||
|
|
|
@ -1,63 +0,0 @@
|
|||
from typing import List
|
||||
|
||||
from app_config import USE_MONGO, USE_SQL
|
||||
|
||||
from db.db_base import BaseDocument
|
||||
from db.db_setup import USE_MONGO, USE_SQL
|
||||
from db.mongo.meal_models import MealDocument, MealPlanDocument
|
||||
from db.sql.db_session import create_session
|
||||
from db.sql.meal_models import MealPlanModel
|
||||
|
||||
|
||||
class _Meals(BaseDocument):
|
||||
def __init__(self) -> None:
|
||||
self.primary_key = "uid"
|
||||
if USE_SQL:
|
||||
self.sql_model = MealPlanModel
|
||||
self.create_session = create_session
|
||||
|
||||
self.document = MealPlanDocument
|
||||
|
||||
@staticmethod
|
||||
def _process_meals(meals: List[dict]) -> List[MealDocument]:
|
||||
"""Turns a list of Meals in dictionary form into a list of
|
||||
MealDocuments that can be attached to a MealPlanDocument
|
||||
|
||||
|
||||
Args: \n
|
||||
meals (List[dict]): From a Pydantic Class in meal_services.py \n
|
||||
|
||||
Returns:
|
||||
a List of MealDocuments
|
||||
"""
|
||||
meal_docs = []
|
||||
for meal in meals:
|
||||
meal_doc = MealDocument(**meal)
|
||||
meal_docs.append(meal_doc)
|
||||
|
||||
return meal_docs
|
||||
|
||||
def save_new_mongo(self, plan_data: dict) -> None:
|
||||
"""Saves a new meal plan into the database
|
||||
|
||||
Args: \n
|
||||
plan_data (dict): From a Pydantic Class in meal_services.py \n
|
||||
"""
|
||||
|
||||
if USE_MONGO:
|
||||
plan_data["meals"] = _Meals._process_meals(plan_data["meals"])
|
||||
document = self.document(**plan_data)
|
||||
|
||||
document.save()
|
||||
elif USE_SQL:
|
||||
pass
|
||||
|
||||
def update_mongo(self, uid: str, plan_data: dict) -> dict:
|
||||
if USE_MONGO:
|
||||
document = self.document.objects.get(uid=uid)
|
||||
if document:
|
||||
new_meals = _Meals._process_meals(plan_data["meals"])
|
||||
document.update(set__meals=new_meals)
|
||||
document.save()
|
||||
elif USE_SQL:
|
||||
pass
|
|
@ -1,68 +0,0 @@
|
|||
from app_config import USE_MONGO, USE_SQL
|
||||
|
||||
from db.db_base import BaseDocument
|
||||
from db.mongo.recipe_models import RecipeDocument
|
||||
from db.sql.db_session import create_session
|
||||
from db.sql.recipe_models import RecipeModel
|
||||
|
||||
|
||||
class _Recipes(BaseDocument):
|
||||
def __init__(self) -> None:
|
||||
self.primary_key = "slug"
|
||||
if USE_SQL:
|
||||
self.sql_model = RecipeModel
|
||||
self.create_session = create_session
|
||||
else:
|
||||
self.document = RecipeDocument
|
||||
|
||||
def save_new_sql(self, recipe_data: dict):
|
||||
session = self.create_session()
|
||||
new_recipe = self.sql_model(**recipe_data)
|
||||
session.add(new_recipe)
|
||||
session.commit()
|
||||
|
||||
return recipe_data
|
||||
|
||||
def update_mongo(self, slug: str, new_data: dict) -> None:
|
||||
if USE_MONGO:
|
||||
document = self.document.objects.get(slug=slug)
|
||||
|
||||
if document:
|
||||
document.update(set__name=new_data.get("name"))
|
||||
document.update(set__description=new_data.get("description"))
|
||||
document.update(set__image=new_data.get("image"))
|
||||
document.update(set__recipeYield=new_data.get("recipeYield"))
|
||||
document.update(set__recipeIngredient=new_data.get("recipeIngredient"))
|
||||
document.update(
|
||||
set__recipeInstructions=new_data.get("recipeInstructions")
|
||||
)
|
||||
document.update(set__totalTime=new_data.get("totalTime"))
|
||||
|
||||
document.update(set__slug=new_data.get("slug"))
|
||||
document.update(set__categories=new_data.get("categories"))
|
||||
document.update(set__tags=new_data.get("tags"))
|
||||
document.update(set__notes=new_data.get("notes"))
|
||||
document.update(set__orgURL=new_data.get("orgURL"))
|
||||
document.update(set__rating=new_data.get("rating"))
|
||||
document.update(set__extras=new_data.get("extras"))
|
||||
document.save()
|
||||
|
||||
return new_data
|
||||
# elif USE_SQL:
|
||||
# session, recipe = self._query_one(match_value=slug)
|
||||
# recipe.update(session=session, **new_data)
|
||||
# recipe_dict = recipe.dict()
|
||||
# session.commit()
|
||||
|
||||
# session.close()
|
||||
|
||||
# return recipe_dict
|
||||
|
||||
def update_image(self, slug: str, extension: str) -> None:
|
||||
if USE_MONGO:
|
||||
document = self.document.objects.get(slug=slug)
|
||||
|
||||
if document:
|
||||
document.update(set__image=f"{slug}.{extension}")
|
||||
elif USE_SQL:
|
||||
pass
|
|
@ -1,44 +0,0 @@
|
|||
from app_config import USE_MONGO, USE_SQL
|
||||
|
||||
from db.db_base import BaseDocument
|
||||
from db.db_setup import USE_MONGO, USE_SQL
|
||||
from db.mongo.settings_models import SiteSettingsDocument, WebhooksDocument
|
||||
from db.sql.db_session import create_session
|
||||
from db.sql.settings_models import SiteSettingsModel
|
||||
|
||||
|
||||
class _Settings(BaseDocument):
|
||||
def __init__(self) -> None:
|
||||
|
||||
self.primary_key = "name"
|
||||
|
||||
if USE_SQL:
|
||||
self.sql_model = SiteSettingsModel
|
||||
self.create_session = create_session
|
||||
|
||||
self.document = SiteSettingsDocument
|
||||
|
||||
def save_new(self, main: dict, webhooks: dict) -> str:
|
||||
|
||||
if USE_MONGO:
|
||||
main["webhooks"] = WebhooksDocument(**webhooks)
|
||||
new_doc = self.document(**main)
|
||||
return new_doc.save()
|
||||
|
||||
elif USE_SQL:
|
||||
session = create_session()
|
||||
new_settings = self.sql_model(main.get("name"), webhooks)
|
||||
|
||||
session.add(new_settings)
|
||||
session.commit()
|
||||
|
||||
return new_settings.dict()
|
||||
|
||||
def update_mongo(self, name: str, new_data: dict) -> dict:
|
||||
if USE_MONGO:
|
||||
document = self.document.objects.get(name=name)
|
||||
if document:
|
||||
document.update(set__webhooks=WebhooksDocument(**new_data["webhooks"]))
|
||||
document.save()
|
||||
elif USE_SQL:
|
||||
return
|
|
@ -1,4 +1,4 @@
|
|||
from app_config import SQLITE_FILE, USE_MONGO, USE_SQL
|
||||
from app_config import SQLITE_FILE, USE_SQL
|
||||
|
||||
from db.sql.db_session import globa_init as sql_global_init
|
||||
|
||||
|
@ -10,7 +10,5 @@ if USE_SQL:
|
|||
|
||||
pass
|
||||
|
||||
elif USE_MONGO:
|
||||
from db.mongo.mongo_setup import global_init as mongo_global_init
|
||||
|
||||
mongo_global_init()
|
||||
else:
|
||||
raise Exception("Cannot identify database type")
|
||||
|
|
|
@ -1,56 +0,0 @@
|
|||
from app_config import USE_MONGO, USE_SQL
|
||||
|
||||
from db.db_base import BaseDocument
|
||||
from db.db_setup import USE_MONGO, USE_SQL
|
||||
from db.mongo.settings_models import SiteThemeDocument, ThemeColorsDocument
|
||||
from db.sql.db_session import create_session
|
||||
from db.sql.theme_models import SiteThemeModel
|
||||
|
||||
|
||||
class _Themes(BaseDocument):
|
||||
def __init__(self) -> None:
|
||||
self.primary_key = "name"
|
||||
if USE_SQL:
|
||||
self.sql_model = SiteThemeModel
|
||||
self.create_session = create_session
|
||||
else:
|
||||
self.document = SiteThemeDocument
|
||||
|
||||
def save_new(self, theme_data: dict) -> None:
|
||||
if USE_MONGO:
|
||||
theme_data["colors"] = ThemeColorsDocument(**theme_data["colors"])
|
||||
|
||||
document = self.document(**theme_data)
|
||||
|
||||
document.save()
|
||||
elif USE_SQL:
|
||||
session = self.create_session()
|
||||
new_theme = self.sql_model(**theme_data)
|
||||
|
||||
session.add(new_theme)
|
||||
session.commit()
|
||||
|
||||
return_data = new_theme.dict()
|
||||
|
||||
session.close()
|
||||
return return_data
|
||||
|
||||
def update(self, data: dict) -> dict:
|
||||
if USE_MONGO:
|
||||
colors = ThemeColorsDocument(**data["colors"])
|
||||
theme_document = self.document.objects.get(name=data.get("name"))
|
||||
|
||||
if theme_document:
|
||||
theme_document.update(set__colors=colors)
|
||||
theme_document.save()
|
||||
else:
|
||||
raise Exception("No database entry was found to update")
|
||||
|
||||
elif USE_SQL:
|
||||
session, theme_model = self._query_one(
|
||||
match_value=data["name"], match_key="name"
|
||||
)
|
||||
|
||||
theme_model.update(**data)
|
||||
session.commit()
|
||||
session.close()
|
|
@ -1,24 +0,0 @@
|
|||
import uuid
|
||||
|
||||
import mongoengine
|
||||
|
||||
|
||||
class MealDocument(mongoengine.EmbeddedDocument):
|
||||
slug = mongoengine.StringField()
|
||||
name = mongoengine.StringField()
|
||||
date = mongoengine.DateField()
|
||||
dateText = mongoengine.StringField()
|
||||
image = mongoengine.StringField()
|
||||
description = mongoengine.StringField()
|
||||
|
||||
|
||||
class MealPlanDocument(mongoengine.Document):
|
||||
uid = mongoengine.UUIDField(default=uuid.uuid1)
|
||||
startDate = mongoengine.DateField(required=True)
|
||||
endDate = mongoengine.DateField(required=True)
|
||||
meals = mongoengine.ListField(required=True)
|
||||
|
||||
meta = {
|
||||
"db_alias": "core",
|
||||
"collection": "meals",
|
||||
}
|
|
@ -1,17 +0,0 @@
|
|||
import mongoengine
|
||||
from app_config import DB_HOST, DB_PASSWORD, DB_PORT, DB_USERNAME, MEALIE_DB_NAME
|
||||
from utils.logger import logger
|
||||
|
||||
|
||||
def global_init():
|
||||
mongoengine.register_connection(
|
||||
alias="core",
|
||||
name=MEALIE_DB_NAME,
|
||||
host=DB_HOST,
|
||||
port=int(DB_PORT),
|
||||
username=DB_USERNAME,
|
||||
password=DB_PASSWORD,
|
||||
authentication_source="admin",
|
||||
)
|
||||
|
||||
logger.info("Mongo Data Initialized")
|
|
@ -1,34 +0,0 @@
|
|||
import datetime
|
||||
|
||||
import mongoengine
|
||||
|
||||
|
||||
class RecipeDocument(mongoengine.Document):
|
||||
# Standard Schema
|
||||
# id = mongoengine.UUIDField(primary_key=True)
|
||||
name = mongoengine.StringField(required=True)
|
||||
description = mongoengine.StringField(required=True)
|
||||
image = mongoengine.StringField(required=False)
|
||||
recipeYield = mongoengine.StringField(required=True, default="")
|
||||
recipeIngredient = mongoengine.ListField(required=True, default=[])
|
||||
recipeInstructions = mongoengine.ListField(requiredd=True, default=[])
|
||||
totalTime = mongoengine.StringField(required=False)
|
||||
|
||||
# Mealie Specific
|
||||
slug = mongoengine.StringField(required=True, unique=True)
|
||||
categories = mongoengine.ListField(default=[])
|
||||
tags = mongoengine.ListField(default=[])
|
||||
dateAdded = mongoengine.DateTimeField(binary=True, default=datetime.date.today)
|
||||
notes = mongoengine.ListField(default=[])
|
||||
rating = mongoengine.IntField(required=True, default=0)
|
||||
orgURL = mongoengine.URLField(required=False)
|
||||
extras = mongoengine.DictField(required=False)
|
||||
|
||||
meta = {
|
||||
"db_alias": "core",
|
||||
"collection": "recipes",
|
||||
}
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
pass
|
|
@ -1,37 +0,0 @@
|
|||
import mongoengine
|
||||
|
||||
|
||||
class WebhooksDocument(mongoengine.EmbeddedDocument):
|
||||
webhookURLs = mongoengine.ListField(required=False, default=[])
|
||||
webhookTime = mongoengine.StringField(required=False, default="00:00")
|
||||
enabled = mongoengine.BooleanField(required=False, default=False)
|
||||
|
||||
|
||||
class SiteSettingsDocument(mongoengine.Document):
|
||||
name = mongoengine.StringField(require=True, default="main", unique=True)
|
||||
webhooks = mongoengine.EmbeddedDocumentField(WebhooksDocument, required=True)
|
||||
|
||||
meta = {
|
||||
"db_alias": "core",
|
||||
"collection": "settings",
|
||||
}
|
||||
|
||||
|
||||
class ThemeColorsDocument(mongoengine.EmbeddedDocument):
|
||||
primary = mongoengine.StringField(require=True)
|
||||
accent = mongoengine.StringField(require=True)
|
||||
secondary = mongoengine.StringField(require=True)
|
||||
success = mongoengine.StringField(require=True)
|
||||
info = mongoengine.StringField(require=True)
|
||||
warning = mongoengine.StringField(require=True)
|
||||
error = mongoengine.StringField(require=True)
|
||||
|
||||
|
||||
class SiteThemeDocument(mongoengine.Document):
|
||||
name = mongoengine.StringField(require=True, unique=True)
|
||||
colors = mongoengine.EmbeddedDocumentField(ThemeColorsDocument, required=True)
|
||||
|
||||
meta = {
|
||||
"db_alias": "core",
|
||||
"collection": "themes",
|
||||
}
|
|
@ -1,6 +0,0 @@
|
|||
|
||||
# import mongoengine
|
||||
|
||||
# class User(mongoengine.Document):
|
||||
# username: mongoengine.EmailField()
|
||||
# password: mongoengine.ReferenceField
|
|
@ -4,15 +4,32 @@ from typing import List, Optional
|
|||
from pydantic import BaseModel
|
||||
|
||||
|
||||
class BackupOptions(BaseModel):
|
||||
recipes: bool = True
|
||||
settings: bool = True
|
||||
themes: bool = True
|
||||
|
||||
class Config:
|
||||
schema_extra = {
|
||||
"example": {
|
||||
"recipes": True,
|
||||
"settings": True,
|
||||
"themes": True,
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
class BackupJob(BaseModel):
|
||||
tag: Optional[str]
|
||||
template: Optional[List[str]]
|
||||
options: BackupOptions
|
||||
templates: Optional[List[str]] = []
|
||||
|
||||
class Config:
|
||||
schema_extra = {
|
||||
"example": {
|
||||
"tag": "July 23rd 2021",
|
||||
"template": "recipes.md",
|
||||
"options": BackupOptions,
|
||||
"template": ["recipes.md"],
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -56,6 +73,6 @@ class ImportJob(BaseModel):
|
|||
"force": False,
|
||||
"rebase": False,
|
||||
"themes": False,
|
||||
"settings": False
|
||||
"settings": False,
|
||||
}
|
||||
}
|
||||
|
|
|
@ -19,7 +19,7 @@ def available_imports():
|
|||
backup = LocalBackup(name=archive.name, date=archive.stat().st_ctime)
|
||||
imports.append(backup)
|
||||
|
||||
for template in TEMPLATE_DIR.glob("*.md"):
|
||||
for template in TEMPLATE_DIR.glob("*.*"):
|
||||
templates.append(template.name)
|
||||
|
||||
imports.sort(key=operator.attrgetter("date"), reverse=True)
|
||||
|
@ -30,7 +30,13 @@ def available_imports():
|
|||
@router.post("/api/backups/export/database/", status_code=201)
|
||||
def export_database(data: BackupJob):
|
||||
"""Generates a backup of the recipe database in json format."""
|
||||
export_path = backup_all(data.tag, data.template)
|
||||
export_path = backup_all(
|
||||
tag=data.tag,
|
||||
templates=data.templates,
|
||||
export_recipes=data.options.recipes,
|
||||
export_settings=data.options.settings,
|
||||
export_themes=data.options.themes,
|
||||
)
|
||||
try:
|
||||
return SnackResponse.success("Backup Created at " + export_path)
|
||||
except:
|
||||
|
|
|
@ -3,11 +3,11 @@ import shutil
|
|||
from datetime import datetime
|
||||
from pathlib import Path
|
||||
|
||||
from app_config import BACKUP_DIR, IMG_DIR, TEMP_DIR, TEMPLATE_DIR
|
||||
from jinja2 import Template
|
||||
from services.meal_services import MealPlan
|
||||
from services.recipe_services import Recipe
|
||||
from services.settings_services import SiteSettings, SiteTheme
|
||||
from app_config import BACKUP_DIR, IMG_DIR, TEMP_DIR, TEMPLATE_DIR
|
||||
from utils.logger import logger
|
||||
|
||||
|
||||
|
@ -123,15 +123,26 @@ class ExportDatabase:
|
|||
return str(zip_path.absolute()) + ".zip"
|
||||
|
||||
|
||||
def backup_all(tag=None, templates=None):
|
||||
def backup_all(
|
||||
tag=None,
|
||||
templates=None,
|
||||
export_recipes=True,
|
||||
export_settings=True,
|
||||
export_themes=True,
|
||||
):
|
||||
db_export = ExportDatabase(tag=tag, templates=templates)
|
||||
|
||||
if export_recipes:
|
||||
db_export.export_recipes()
|
||||
db_export.export_images()
|
||||
|
||||
if export_settings:
|
||||
db_export.export_settings()
|
||||
|
||||
if export_themes:
|
||||
db_export.export_themes()
|
||||
db_export.export_meals()
|
||||
#
|
||||
# db_export.export_meals()
|
||||
|
||||
return db_export.finish_export()
|
||||
|
||||
|
||||
|
|
|
@ -2,9 +2,7 @@ import shutil
|
|||
from pathlib import Path
|
||||
|
||||
import requests
|
||||
|
||||
CWD = Path(__file__).parent
|
||||
IMG_DIR = CWD.parent.joinpath("data", "img")
|
||||
from app_config import IMG_DIR
|
||||
|
||||
|
||||
def read_image(recipe_slug: str) -> Path:
|
||||
|
|
|
@ -57,7 +57,17 @@ def normalize_yield(yld) -> str:
|
|||
return yld
|
||||
|
||||
|
||||
def normalize_time(time_entry) -> str:
|
||||
if type(time_entry) == type(None):
|
||||
return None
|
||||
elif type(time_entry) != str:
|
||||
return str(time_entry)
|
||||
|
||||
|
||||
def normalize_data(recipe_data: dict) -> dict:
|
||||
recipe_data["totalTime"] = normalize_time(recipe_data.get("totalTime"))
|
||||
recipe_data["prepTime"] = normalize_time(recipe_data.get("prepTime"))
|
||||
recipe_data["performTime"] = normalize_time(recipe_data.get("performTime"))
|
||||
recipe_data["recipeYield"] = normalize_yield(recipe_data.get("recipeYield"))
|
||||
recipe_data["recipeInstructions"] = normalize_instructions(
|
||||
recipe_data["recipeInstructions"]
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
import json
|
||||
|
||||
from app_config import BASE_DIR
|
||||
from app_config import DATA_DIR
|
||||
|
||||
"""Script to export the ReDoc documentation page into a standalone HTML file."""
|
||||
|
||||
|
@ -33,7 +33,7 @@ HTML_TEMPLATE = """<!DOCTYPE html>
|
|||
|
||||
|
||||
def generate_api_docs(app):
|
||||
out_dir = BASE_DIR.joinpath(".temp")
|
||||
out_dir = DATA_DIR.joinpath(".temp")
|
||||
out_dir.mkdir(parents=True, exist_ok=True)
|
||||
out_path = out_dir.joinpath("index.html")
|
||||
with open(out_path, "w") as fd:
|
||||
|
|
|
@ -1,16 +1,17 @@
|
|||
import logging
|
||||
from pathlib import Path
|
||||
|
||||
from app_config import DATA_DIR
|
||||
|
||||
LOGGER_LEVEL = "INFO"
|
||||
CWD = Path(__file__).parent
|
||||
LOGGER_FILE = CWD.parent.joinpath("data", "mealie.log")
|
||||
LOGGER_FILE = DATA_DIR.joinpath("mealie.log")
|
||||
|
||||
|
||||
logging.basicConfig(
|
||||
level=LOGGER_LEVEL,
|
||||
format="%(asctime)s %(levelname)s: %(message)s",
|
||||
datefmt="%d-%b-%y %H:%M:%S",
|
||||
filename=LOGGER_FILE,
|
||||
)
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
|
101
poetry.lock
generated
101
poetry.lock
generated
|
@ -395,17 +395,6 @@ BeautifulSoup4 = ">=4.6.0"
|
|||
html5lib = ">=1.0.1"
|
||||
requests = ">=2.18.4"
|
||||
|
||||
[[package]]
|
||||
name = "mongoengine"
|
||||
version = "0.22.1"
|
||||
description = "MongoEngine is a Python Object-Document Mapper for working with MongoDB."
|
||||
category = "main"
|
||||
optional = false
|
||||
python-versions = ">=3.6"
|
||||
|
||||
[package.dependencies]
|
||||
pymongo = ">=3.4,<4.0"
|
||||
|
||||
[[package]]
|
||||
name = "mypy-extensions"
|
||||
version = "0.4.3"
|
||||
|
@ -480,24 +469,6 @@ isort = ">=4.2.5,<6"
|
|||
mccabe = ">=0.6,<0.7"
|
||||
toml = ">=0.7.1"
|
||||
|
||||
[[package]]
|
||||
name = "pymongo"
|
||||
version = "3.11.2"
|
||||
description = "Python driver for MongoDB <http://www.mongodb.org>"
|
||||
category = "main"
|
||||
optional = false
|
||||
python-versions = "*"
|
||||
|
||||
[package.extras]
|
||||
aws = ["pymongo-auth-aws (<2.0.0)"]
|
||||
encryption = ["pymongocrypt (<2.0.0)"]
|
||||
gssapi = ["pykerberos"]
|
||||
ocsp = ["pyopenssl (>=17.2.0)", "requests (<3.0.0)", "service-identity (>=18.1.0)"]
|
||||
snappy = ["python-snappy"]
|
||||
srv = ["dnspython (>=1.16.0,<1.17.0)"]
|
||||
tls = ["ipaddress"]
|
||||
zstd = ["zstandard"]
|
||||
|
||||
[[package]]
|
||||
name = "pyparsing"
|
||||
version = "2.4.7"
|
||||
|
@ -864,7 +835,7 @@ python-versions = "*"
|
|||
[metadata]
|
||||
lock-version = "1.1"
|
||||
python-versions = "^3.8"
|
||||
content-hash = "41e74af4ccdf3d291de04842718297a83774c80da335894e1eb0fcacb0fddac5"
|
||||
content-hash = "49f7f72b21beae20519bf5c1e86ab21391bbaa7c5fd47d455ee898d86b7d6ee0"
|
||||
|
||||
[metadata.files]
|
||||
aiofiles = [
|
||||
|
@ -1096,10 +1067,6 @@ mccabe = [
|
|||
mf2py = [
|
||||
{file = "mf2py-1.1.2.tar.gz", hash = "sha256:84f1f8f2ff3f1deb1c30be497e7ccd805452996a662fd4a77f09e0105bede2c9"},
|
||||
]
|
||||
mongoengine = [
|
||||
{file = "mongoengine-0.22.1-py3-none-any.whl", hash = "sha256:4d5efb8b6ddffc087d0741fe56fe30637b5629e33c8fae8de53a907ec20c43dd"},
|
||||
{file = "mongoengine-0.22.1.tar.gz", hash = "sha256:620d13db551c849402eb3c362878934f5260ec0028fff2a5d81bf53b21d91572"},
|
||||
]
|
||||
mypy-extensions = [
|
||||
{file = "mypy_extensions-0.4.3-py2.py3-none-any.whl", hash = "sha256:090fedd75945a69ae91ce1303b5824f428daf5a028d2f6ab8a299250a846f15d"},
|
||||
{file = "mypy_extensions-0.4.3.tar.gz", hash = "sha256:2d82818f5bb3e369420cb3c4060a7970edba416647068eb4c5343488a6c604a8"},
|
||||
|
@ -1148,72 +1115,6 @@ pylint = [
|
|||
{file = "pylint-2.6.0-py3-none-any.whl", hash = "sha256:bfe68f020f8a0fece830a22dd4d5dddb4ecc6137db04face4c3420a46a52239f"},
|
||||
{file = "pylint-2.6.0.tar.gz", hash = "sha256:bb4a908c9dadbc3aac18860550e870f58e1a02c9f2c204fdf5693d73be061210"},
|
||||
]
|
||||
pymongo = [
|
||||
{file = "pymongo-3.11.2-cp27-cp27m-macosx_10_14_intel.whl", hash = "sha256:9be785bd4e1ba0148fb00ca84e4dbfbd1c74df3af3a648559adc60b0782f34de"},
|
||||
{file = "pymongo-3.11.2-cp27-cp27m-manylinux1_i686.whl", hash = "sha256:646d4d30c5aa7c0ddbfe9b990f0f77a88621024a21ad0b792bd9d58caa9611f0"},
|
||||
{file = "pymongo-3.11.2-cp27-cp27m-manylinux1_x86_64.whl", hash = "sha256:8d669c720891781e7c82d412cad39f9730ef277e3957b48a3344dae47d3caa03"},
|
||||
{file = "pymongo-3.11.2-cp27-cp27m-win32.whl", hash = "sha256:ce53c00be204ec4428d3c1f3c478ae89d388efec575544c27f57b61e9fa4a7f2"},
|
||||
{file = "pymongo-3.11.2-cp27-cp27m-win_amd64.whl", hash = "sha256:82d5ded5834b6c92380847860eb28dcaf20b847a27cee5811c4aaceef87fd280"},
|
||||
{file = "pymongo-3.11.2-cp27-cp27mu-manylinux1_i686.whl", hash = "sha256:fcc66d17a3363b7bd6d2655de8706e25a3cd1be2bd1b8e8d8a5c504a6ef893ae"},
|
||||
{file = "pymongo-3.11.2-cp27-cp27mu-manylinux1_x86_64.whl", hash = "sha256:b875bb4b438931dce550e170bfb558597189b8d0160f4ac60f14a21955161699"},
|
||||
{file = "pymongo-3.11.2-cp34-cp34m-macosx_10_6_intel.whl", hash = "sha256:6700e251c6396cc05d7460dc05ef8e19e60a7b53b62c007725b48e123aaa2b1c"},
|
||||
{file = "pymongo-3.11.2-cp34-cp34m-manylinux1_i686.whl", hash = "sha256:c046e09e886f4539f8626afba17fa8f2e6552731f9384e2827154e3e3b7fda4e"},
|
||||
{file = "pymongo-3.11.2-cp34-cp34m-manylinux1_x86_64.whl", hash = "sha256:4942a5659ae927bb764a123a6409870ca5dd572d83b3bfb71412c9a191bbf792"},
|
||||
{file = "pymongo-3.11.2-cp34-cp34m-win32.whl", hash = "sha256:422069f2cebf58c9dd9e8040b4768f7be4f228c95bc4505e8fa8e7b4f7191ad8"},
|
||||
{file = "pymongo-3.11.2-cp34-cp34m-win_amd64.whl", hash = "sha256:44376a657717de8847d5d71a9305f3595c7e78c91ac77edbb87058d12ede87a6"},
|
||||
{file = "pymongo-3.11.2-cp35-cp35m-macosx_10_6_intel.whl", hash = "sha256:36b9b98a39565a8f33803c81569442b35e749a72fb1aa7d0bcdb1a33052f8bcc"},
|
||||
{file = "pymongo-3.11.2-cp35-cp35m-manylinux1_i686.whl", hash = "sha256:a118a1df7280ffab7fe0f3eab325868339ff1c4d5b8e0750db0f0a796da8f849"},
|
||||
{file = "pymongo-3.11.2-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:c812b6e53344e92f10f12235219fb769c491a4a87a02c9c3f93fe632e493bda8"},
|
||||
{file = "pymongo-3.11.2-cp35-cp35m-manylinux2014_aarch64.whl", hash = "sha256:cc421babc687dc52ce0fc19787b2404518ca749d9db59576100946ff886f38ed"},
|
||||
{file = "pymongo-3.11.2-cp35-cp35m-manylinux2014_i686.whl", hash = "sha256:6aac7e0e8de92f11a410eb68c24a2decbac6f094e82fd95d22546d0168e7a18b"},
|
||||
{file = "pymongo-3.11.2-cp35-cp35m-manylinux2014_ppc64le.whl", hash = "sha256:c6cf288c9e03195d8e12b72a6388b32f18a5e9c2545622417a963e428e1fe496"},
|
||||
{file = "pymongo-3.11.2-cp35-cp35m-manylinux2014_s390x.whl", hash = "sha256:5980509801cbd2942df31714d055d89863684b4de26829c349362e610a48694e"},
|
||||
{file = "pymongo-3.11.2-cp35-cp35m-manylinux2014_x86_64.whl", hash = "sha256:264843ce2af0640994a4331148ef5312989bc004678c457460758766c9b4decc"},
|
||||
{file = "pymongo-3.11.2-cp35-cp35m-win32.whl", hash = "sha256:ef18aa15b1aa18c42933deed5233b3284186e9ed85c25d2704ceff5099a3964c"},
|
||||
{file = "pymongo-3.11.2-cp35-cp35m-win_amd64.whl", hash = "sha256:019ddf7ced8e42cc6c8c608927c799be8097237596c94ffe551f6ef70e55237e"},
|
||||
{file = "pymongo-3.11.2-cp36-cp36m-macosx_10_6_intel.whl", hash = "sha256:96c6aef7ffb0d37206c0342abb82d874fa8cdc344267277ec63f562b94335c22"},
|
||||
{file = "pymongo-3.11.2-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:82f6e42ba40440a7e0a20bfe12465a3b62d65966a4c7ad1a21b36ffff88de6fe"},
|
||||
{file = "pymongo-3.11.2-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:5ad7b96c27acd7e256b33f47cf3d23bd7dd902f9c033ae43f32ffcbc37bebafd"},
|
||||
{file = "pymongo-3.11.2-cp36-cp36m-manylinux2014_aarch64.whl", hash = "sha256:45728e6aae3023afb5b2829586d1d2bfd9f0d71cfd7d3c924b71a5e9aef617a8"},
|
||||
{file = "pymongo-3.11.2-cp36-cp36m-manylinux2014_i686.whl", hash = "sha256:ce9964c117cbe5cf6269f30a2b334d28675956e988b7dbd0b4f7370924afda2e"},
|
||||
{file = "pymongo-3.11.2-cp36-cp36m-manylinux2014_ppc64le.whl", hash = "sha256:1222025db539641071a1b67f6950f65a6342a39db5b454bf306abd6954f1ad8a"},
|
||||
{file = "pymongo-3.11.2-cp36-cp36m-manylinux2014_s390x.whl", hash = "sha256:fc4946acb6cdada08f60aca103b61334995523da65be5fe816ea8571c9967d46"},
|
||||
{file = "pymongo-3.11.2-cp36-cp36m-manylinux2014_x86_64.whl", hash = "sha256:76579fcf77052b39796fe4a11818d1289dd48cffe15951b3403288fa163c29f6"},
|
||||
{file = "pymongo-3.11.2-cp36-cp36m-win32.whl", hash = "sha256:d6f82e86896a8db70e8ae8fa4b7556a0f188f1d8a6c53b2ba229889d55a59308"},
|
||||
{file = "pymongo-3.11.2-cp36-cp36m-win_amd64.whl", hash = "sha256:082832a59da18efab4d9148cca396451bac99da9757f31767f706e828b5b8500"},
|
||||
{file = "pymongo-3.11.2-cp37-cp37m-macosx_10_6_intel.whl", hash = "sha256:3646c2286d889618d43e01d9810ac1fc17709d2b4dec61366df5edc8ba228b3e"},
|
||||
{file = "pymongo-3.11.2-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:3ec8f8e106a1476659d8c020228b45614daabdbdb6c6454a843a1d4f77d13339"},
|
||||
{file = "pymongo-3.11.2-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:202ea1d4edc8a5439fc179802d807b49e7e563207fea5610779e56674ac770c6"},
|
||||
{file = "pymongo-3.11.2-cp37-cp37m-manylinux2014_aarch64.whl", hash = "sha256:b50af6701b4a5288b77fb4db44a363aa9485caf2c3e7a40c0373fd45e34440af"},
|
||||
{file = "pymongo-3.11.2-cp37-cp37m-manylinux2014_i686.whl", hash = "sha256:46792b71ab802d9caf1fc9d52e83399ef8e1a36e91eef4d827c06e36b8df2230"},
|
||||
{file = "pymongo-3.11.2-cp37-cp37m-manylinux2014_ppc64le.whl", hash = "sha256:21d7b48567a1c80f9266e0ab61c1218a31279d911da345679188733e354f81cc"},
|
||||
{file = "pymongo-3.11.2-cp37-cp37m-manylinux2014_s390x.whl", hash = "sha256:29a6840c2ac778010547cad5870f3db2e080ad7fad01197b07fff993c08692c8"},
|
||||
{file = "pymongo-3.11.2-cp37-cp37m-manylinux2014_x86_64.whl", hash = "sha256:6122470dfa61d4909b75c98012c1577404ba4ab860d0095e0c6980560cb3711f"},
|
||||
{file = "pymongo-3.11.2-cp37-cp37m-win32.whl", hash = "sha256:047cc2007b280672ddfdf2e7b862aad8d898f481f65bbc9067bfa4e420a019a9"},
|
||||
{file = "pymongo-3.11.2-cp37-cp37m-win_amd64.whl", hash = "sha256:1580fad512c678b720784e5c9018621b1b3bd37fb5b1633e874738862d6435c7"},
|
||||
{file = "pymongo-3.11.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:7e69fa025a1db189443428f345fea5555d16413df6addc056e17bb8c9794b006"},
|
||||
{file = "pymongo-3.11.2-cp38-cp38-manylinux1_i686.whl", hash = "sha256:813db97e9955b6b1b50b5cebd18cb148580603bb9b067ea4c5cc656b333bc906"},
|
||||
{file = "pymongo-3.11.2-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:523804bd8fcb5255508052b50073a27c701b90a73ea46e29be46dad5fe01bde6"},
|
||||
{file = "pymongo-3.11.2-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:fa741e9c805567239f845c7e9a016aff797f9bb02ff9bc8ccd2fbd9eafefedd4"},
|
||||
{file = "pymongo-3.11.2-cp38-cp38-manylinux2014_i686.whl", hash = "sha256:b95d2c2829b5956bf54d9a22ffec911dea75abf0f0f7e0a8a57423434bfbde91"},
|
||||
{file = "pymongo-3.11.2-cp38-cp38-manylinux2014_ppc64le.whl", hash = "sha256:6e7a6057481a644970e43475292e1c0af095ca39a20fe83781196bd6e6690a38"},
|
||||
{file = "pymongo-3.11.2-cp38-cp38-manylinux2014_s390x.whl", hash = "sha256:540dafd6f4a0590fc966465c726b80fa7c0804490c39786ef29236fe68c94401"},
|
||||
{file = "pymongo-3.11.2-cp38-cp38-manylinux2014_x86_64.whl", hash = "sha256:d9d3ae537f61011191b2fd6f8527b9f9f8a848b37d4c85a0f7bb28004c42b546"},
|
||||
{file = "pymongo-3.11.2-cp38-cp38-win32.whl", hash = "sha256:047c325c4a96e7be7d11acf58639bcf71a81ca212d9c6590e3369bc28678647a"},
|
||||
{file = "pymongo-3.11.2-cp38-cp38-win_amd64.whl", hash = "sha256:b4294ddf76452459433ecfa6a93258608b5e462c76ef15e4695ed5e2762f009f"},
|
||||
{file = "pymongo-3.11.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:061d59f525831c4051af0b6dbafa62b0b8b168d4ef5b6e3c46d0811b8499d100"},
|
||||
{file = "pymongo-3.11.2-cp39-cp39-manylinux1_i686.whl", hash = "sha256:ed98683d8f01f1c46ef2d02469e04e9a8fe9a73a9741a4e6e66677a73b59bec8"},
|
||||
{file = "pymongo-3.11.2-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:7f0c507e1f108790840d6c4b594019ebf595025c324c9f7e9c9b2b15b41f884e"},
|
||||
{file = "pymongo-3.11.2-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:9d19843568df9d263dc92ae4cc2279879add8a26996473f9155590cac635b321"},
|
||||
{file = "pymongo-3.11.2-cp39-cp39-manylinux2014_i686.whl", hash = "sha256:6175fd105da74a09adb38f93be96e1f64873294c906e5e722cbbc5bd10c44e3b"},
|
||||
{file = "pymongo-3.11.2-cp39-cp39-manylinux2014_ppc64le.whl", hash = "sha256:944ed467feb949e103555863fa934fb84216a096b0004ca364d3ddf9d18e2b9e"},
|
||||
{file = "pymongo-3.11.2-cp39-cp39-manylinux2014_s390x.whl", hash = "sha256:4be4fe9d18523da98deeb0b554ac76e1dc1562ee879d62572b34dda8593efcc1"},
|
||||
{file = "pymongo-3.11.2-cp39-cp39-manylinux2014_x86_64.whl", hash = "sha256:270a1f6a331eac3a393090af06df68297cb31a8b2df0bdcbd97dc613c5758e78"},
|
||||
{file = "pymongo-3.11.2-cp39-cp39-win32.whl", hash = "sha256:e565d1e4388765c135052717f15f9e0314f9d172062444c6b3fc0002e93ed04b"},
|
||||
{file = "pymongo-3.11.2-cp39-cp39-win_amd64.whl", hash = "sha256:0a53a751d977ad02f1bd22ddb6288bb4816c4758f44a50225462aeeae9cbf6a0"},
|
||||
{file = "pymongo-3.11.2-py2.7-macosx-10.14-intel.egg", hash = "sha256:c1d1992bbdf363b22b5a9543ab7d7c6f27a1498826d50d91319b803ddcf1142e"},
|
||||
{file = "pymongo-3.11.2.tar.gz", hash = "sha256:c2b67881392a9e85aa108e75f62cdbe372d5a3f17ea5f8d3436dcf4662052f14"},
|
||||
]
|
||||
pyparsing = [
|
||||
{file = "pyparsing-2.4.7-py2.py3-none-any.whl", hash = "sha256:ef9d7589ef3c200abe66653d3f1ab1033c3c419ae9b9bdb1240a85b024efc88b"},
|
||||
{file = "pyparsing-2.4.7.tar.gz", hash = "sha256:c203ec8783bf771a155b207279b9bccb8dea02d8f0c9e5f8ead507bc3246ecc1"},
|
||||
|
|
|
@ -20,7 +20,6 @@ APScheduler = "^3.6.3"
|
|||
SQLAlchemy = "^1.3.22"
|
||||
Jinja2 = "^2.11.2"
|
||||
python-dotenv = "^0.15.0"
|
||||
mongoengine = "^0.22.1"
|
||||
python-slugify = "^4.0.1"
|
||||
requests = "^2.25.1"
|
||||
PyYAML = "^5.3.1"
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue