mirror of
https://github.com/hay-kot/mealie.git
synced 2025-08-22 14:33:33 -07:00
debug routes
This commit is contained in:
parent
b558a50a7d
commit
23026a4cc4
3 changed files with 66 additions and 2 deletions
|
@ -6,6 +6,7 @@ from fastapi.staticfiles import StaticFiles
|
||||||
from app_config import PORT, PRODUCTION, WEB_PATH, docs_url, redoc_url
|
from app_config import PORT, PRODUCTION, WEB_PATH, docs_url, redoc_url
|
||||||
from routes import (
|
from routes import (
|
||||||
backup_routes,
|
backup_routes,
|
||||||
|
debug_routes,
|
||||||
meal_routes,
|
meal_routes,
|
||||||
migration_routes,
|
migration_routes,
|
||||||
setting_routes,
|
setting_routes,
|
||||||
|
@ -68,6 +69,7 @@ def api_routers():
|
||||||
app.include_router(user_routes.router)
|
app.include_router(user_routes.router)
|
||||||
# Migration Routes
|
# Migration Routes
|
||||||
app.include_router(migration_routes.router)
|
app.include_router(migration_routes.router)
|
||||||
|
app.include_router(debug_routes.router)
|
||||||
|
|
||||||
|
|
||||||
if PRODUCTION:
|
if PRODUCTION:
|
||||||
|
|
|
@ -37,7 +37,7 @@ REQUIRED_DIRS = [
|
||||||
SQLITE_DIR,
|
SQLITE_DIR,
|
||||||
]
|
]
|
||||||
|
|
||||||
|
APP_VERSION = "v0.2.0"
|
||||||
# General
|
# General
|
||||||
PRODUCTION = os.environ.get("ENV")
|
PRODUCTION = os.environ.get("ENV")
|
||||||
PORT = int(os.getenv("mealie_port", 9000))
|
PORT = int(os.getenv("mealie_port", 9000))
|
||||||
|
@ -55,7 +55,7 @@ SQLITE_FILE = None
|
||||||
DATABASE_TYPE = os.getenv("db_type", "sqlite") # mongo, sqlite
|
DATABASE_TYPE = os.getenv("db_type", "sqlite") # mongo, sqlite
|
||||||
if DATABASE_TYPE == "sqlite":
|
if DATABASE_TYPE == "sqlite":
|
||||||
USE_SQL = True
|
USE_SQL = True
|
||||||
SQLITE_FILE = SQLITE_DIR.joinpath("mealie.sqlite")
|
SQLITE_FILE = SQLITE_DIR.joinpath(f"mealie_{APP_VERSION}.sqlite")
|
||||||
|
|
||||||
else:
|
else:
|
||||||
raise Exception(
|
raise Exception(
|
||||||
|
|
62
mealie/routes/debug_routes.py
Normal file
62
mealie/routes/debug_routes.py
Normal file
|
@ -0,0 +1,62 @@
|
||||||
|
import json
|
||||||
|
import os
|
||||||
|
|
||||||
|
from app_config import DEBUG_DIR
|
||||||
|
from fastapi import APIRouter
|
||||||
|
from fastapi.responses import HTMLResponse
|
||||||
|
from utils.logger import LOGGER_FILE
|
||||||
|
|
||||||
|
router = APIRouter(prefix="/api/debug", tags=["Debug"])
|
||||||
|
|
||||||
|
|
||||||
|
@router.get("/last-recipe-json")
|
||||||
|
async def get_last_recipe_json():
|
||||||
|
""" Doc Str """
|
||||||
|
|
||||||
|
with open(DEBUG_DIR.joinpath("last_recipe.json"), "r") as f:
|
||||||
|
return json.loads(f.read())
|
||||||
|
|
||||||
|
|
||||||
|
@router.get("/log/{num}", response_class=HTMLResponse)
|
||||||
|
async def get_log(num: int):
|
||||||
|
""" Doc Str """
|
||||||
|
with open(LOGGER_FILE, "rb") as f:
|
||||||
|
log_text = tail(f, num)
|
||||||
|
HTML_RESPONSE = f"""
|
||||||
|
<html>
|
||||||
|
<head>
|
||||||
|
<title>Mealie Log</title>
|
||||||
|
</head>
|
||||||
|
<body style="white-space: pre-line">
|
||||||
|
<p>
|
||||||
|
{log_text}
|
||||||
|
</p>
|
||||||
|
</body>
|
||||||
|
</html>
|
||||||
|
"""
|
||||||
|
|
||||||
|
return HTML_RESPONSE
|
||||||
|
|
||||||
|
|
||||||
|
def tail(f, lines=20):
|
||||||
|
total_lines_wanted = lines
|
||||||
|
|
||||||
|
BLOCK_SIZE = 1024
|
||||||
|
f.seek(0, 2)
|
||||||
|
block_end_byte = f.tell()
|
||||||
|
lines_to_go = total_lines_wanted
|
||||||
|
block_number = -1
|
||||||
|
blocks = []
|
||||||
|
while lines_to_go > 0 and block_end_byte > 0:
|
||||||
|
if block_end_byte - BLOCK_SIZE > 0:
|
||||||
|
f.seek(block_number * BLOCK_SIZE, 2)
|
||||||
|
blocks.append(f.read(BLOCK_SIZE))
|
||||||
|
else:
|
||||||
|
f.seek(0, 0)
|
||||||
|
blocks.append(f.read(block_end_byte))
|
||||||
|
lines_found = blocks[-1].count(b"\n")
|
||||||
|
lines_to_go -= lines_found
|
||||||
|
block_end_byte -= BLOCK_SIZE
|
||||||
|
block_number -= 1
|
||||||
|
all_read_text = b"".join(reversed(blocks))
|
||||||
|
return b"<br/>".join(all_read_text.splitlines()[-total_lines_wanted:])
|
Loading…
Add table
Add a link
Reference in a new issue