mirror of
https://github.com/mealie-recipes/mealie.git
synced 2026-02-10 09:53:14 -05:00
Merge branch 'mealie-next' into fix/translation-issues-when-scraping
This commit is contained in:
@@ -1,5 +1,6 @@
|
||||
import uvicorn
|
||||
from fastapi import FastAPI
|
||||
from fastapi.middleware.cors import CORSMiddleware
|
||||
from fastapi.middleware.gzip import GZipMiddleware
|
||||
from fastapi.routing import APIRoute
|
||||
|
||||
@@ -48,6 +49,17 @@ app = FastAPI(
|
||||
|
||||
app.add_middleware(GZipMiddleware, minimum_size=1000)
|
||||
|
||||
if not settings.PRODUCTION:
|
||||
allowed_origins = ["http://localhost:3000"]
|
||||
|
||||
app.add_middleware(
|
||||
CORSMiddleware,
|
||||
allow_origins=allowed_origins,
|
||||
allow_credentials=True,
|
||||
allow_methods=["*"],
|
||||
allow_headers=["*"],
|
||||
)
|
||||
|
||||
register_debug_handler(app)
|
||||
|
||||
|
||||
|
||||
150
mealie/db/fixes/fix_migration_data.py
Normal file
150
mealie/db/fixes/fix_migration_data.py
Normal file
@@ -0,0 +1,150 @@
|
||||
from uuid import uuid4
|
||||
|
||||
from slugify import slugify
|
||||
from sqlalchemy.orm import Session
|
||||
|
||||
from mealie.core import root_logger
|
||||
from mealie.db.models.group.group import Group
|
||||
from mealie.db.models.group.shopping_list import ShoppingList, ShoppingListMultiPurposeLabel
|
||||
from mealie.db.models.labels import MultiPurposeLabel
|
||||
from mealie.db.models.recipe.ingredient import IngredientFoodModel, IngredientUnitModel
|
||||
from mealie.db.models.recipe.recipe import RecipeModel
|
||||
|
||||
logger = root_logger.get_logger("init_db")
|
||||
|
||||
|
||||
def fix_recipe_normalized_search_properties(session: Session):
|
||||
recipes = session.query(RecipeModel).all()
|
||||
recipes_fixed = False
|
||||
|
||||
for recipe in recipes:
|
||||
add_to_session = False
|
||||
if recipe.name and not recipe.name_normalized:
|
||||
recipe.name_normalized = RecipeModel.normalize(recipe.name)
|
||||
add_to_session = True
|
||||
if recipe.description and not recipe.description_normalized:
|
||||
recipe.description_normalized = RecipeModel.normalize(recipe.description)
|
||||
add_to_session = True
|
||||
|
||||
for ingredient in recipe.recipe_ingredient:
|
||||
if ingredient.note and not ingredient.note_normalized:
|
||||
ingredient.note_normalized = RecipeModel.normalize(ingredient.note)
|
||||
add_to_session = True
|
||||
if ingredient.original_text and not ingredient.original_text_normalized:
|
||||
ingredient.original_text = RecipeModel.normalize(ingredient.original_text_normalized)
|
||||
add_to_session = True
|
||||
|
||||
if add_to_session:
|
||||
recipes_fixed = True
|
||||
session.add(recipe)
|
||||
|
||||
if recipes_fixed:
|
||||
logger.info("Updating recipe normalized search properties")
|
||||
session.commit()
|
||||
|
||||
|
||||
def fix_shopping_list_label_settings(session: Session):
|
||||
shopping_lists = session.query(ShoppingList).all()
|
||||
labels = session.query(MultiPurposeLabel).all()
|
||||
label_settings_fixed = False
|
||||
|
||||
for shopping_list in shopping_lists:
|
||||
labels_by_id = {label.id: label for label in labels if label.group_id == shopping_list.group_id}
|
||||
for label_setting in shopping_list.label_settings:
|
||||
if not labels_by_id.pop(label_setting.label_id, None):
|
||||
# label setting is no longer valid, so delete it
|
||||
session.delete(label_setting)
|
||||
label_settings_fixed = True
|
||||
|
||||
if not labels_by_id:
|
||||
# all labels are accounted for, so we don't need to add any
|
||||
continue
|
||||
|
||||
label_settings_fixed = True
|
||||
for i, label in enumerate(labels_by_id.values()):
|
||||
new_label_setting = ShoppingListMultiPurposeLabel(
|
||||
id=uuid4(),
|
||||
shopping_list_id=shopping_list.id,
|
||||
label_id=label.id,
|
||||
position=i + len(shopping_list.label_settings),
|
||||
)
|
||||
|
||||
session.add(new_label_setting)
|
||||
|
||||
if label_settings_fixed:
|
||||
logger.info("Fixing shopping list label settings")
|
||||
session.commit()
|
||||
|
||||
|
||||
def fix_group_slugs(session: Session):
|
||||
groups = session.query(Group).all()
|
||||
seen_slugs: set[str] = set()
|
||||
groups_fixed = False
|
||||
|
||||
for group in groups:
|
||||
if not group.slug:
|
||||
original_name = group.name
|
||||
new_name = original_name
|
||||
attempts = 0
|
||||
while True:
|
||||
slug = slugify(group.name)
|
||||
if slug not in seen_slugs:
|
||||
break
|
||||
|
||||
attempts += 1
|
||||
new_name = f"{original_name} ({attempts})"
|
||||
|
||||
groups_fixed = True
|
||||
group.name = new_name
|
||||
group.slug = slug
|
||||
|
||||
if groups_fixed:
|
||||
logger.info("Adding missing group slugs")
|
||||
session.commit()
|
||||
|
||||
|
||||
def fix_normalized_unit_and_food_names(session: Session):
|
||||
units = session.query(IngredientUnitModel).all()
|
||||
units_fixed = False
|
||||
|
||||
for unit in units:
|
||||
add_to_session = False
|
||||
if unit.name and not unit.name_normalized:
|
||||
unit.name_normalized = IngredientUnitModel.normalize(unit.name)
|
||||
add_to_session = True
|
||||
if unit.abbreviation and not unit.abbreviation_normalized:
|
||||
unit.abbreviation_normalized = IngredientUnitModel.normalize(unit.abbreviation)
|
||||
add_to_session = True
|
||||
|
||||
if add_to_session:
|
||||
units_fixed = True
|
||||
session.add(unit)
|
||||
|
||||
if units_fixed:
|
||||
logger.info("Updating unit normalized search properties")
|
||||
session.commit()
|
||||
|
||||
foods = session.query(IngredientFoodModel).all()
|
||||
foods_fixed = False
|
||||
|
||||
for food in foods:
|
||||
add_to_session = False
|
||||
if food.name and not food.name_normalized:
|
||||
food.name_normalized = IngredientFoodModel.normalize(food.name)
|
||||
add_to_session = True
|
||||
|
||||
if add_to_session:
|
||||
foods_fixed = True
|
||||
session.add(food)
|
||||
|
||||
if foods_fixed:
|
||||
logger.info("Updating food normalized search properties")
|
||||
session.commit()
|
||||
|
||||
|
||||
def fix_migration_data(session: Session):
|
||||
logger.info("Checking for migration data fixes")
|
||||
fix_recipe_normalized_search_properties(session)
|
||||
fix_shopping_list_label_settings(session)
|
||||
fix_group_slugs(session)
|
||||
fix_normalized_unit_and_food_names(session)
|
||||
@@ -11,6 +11,7 @@ from mealie.core import root_logger
|
||||
from mealie.core.config import get_app_settings
|
||||
from mealie.db.db_setup import session_context
|
||||
from mealie.db.fixes.fix_group_with_no_name import fix_group_with_no_name
|
||||
from mealie.db.fixes.fix_migration_data import fix_migration_data
|
||||
from mealie.db.fixes.fix_slug_foods import fix_slug_food_names
|
||||
from mealie.repos.all_repositories import get_repositories
|
||||
from mealie.repos.repository_factory import AllRepositories
|
||||
@@ -97,6 +98,9 @@ def main():
|
||||
session.execute(text("CREATE EXTENSION IF NOT EXISTS pg_trgm;"))
|
||||
|
||||
db = get_repositories(session)
|
||||
safe_try(lambda: fix_migration_data(session))
|
||||
safe_try(lambda: fix_slug_food_names(db))
|
||||
safe_try(lambda: fix_group_with_no_name(session))
|
||||
|
||||
if db.users.get_all():
|
||||
logger.debug("Database exists")
|
||||
@@ -104,9 +108,6 @@ def main():
|
||||
logger.info("Database contains no users, initializing...")
|
||||
init_db(db)
|
||||
|
||||
safe_try(lambda: fix_slug_food_names(db))
|
||||
safe_try(lambda: fix_group_with_no_name(session))
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
|
||||
@@ -1,5 +1,7 @@
|
||||
import uuid
|
||||
from typing import Any
|
||||
|
||||
from sqlalchemy import Dialect
|
||||
from sqlalchemy.dialects.postgresql import UUID
|
||||
from sqlalchemy.types import CHAR, TypeDecorator
|
||||
|
||||
@@ -17,13 +19,8 @@ class GUID(TypeDecorator):
|
||||
def generate():
|
||||
return uuid.uuid4()
|
||||
|
||||
def load_dialect_impl(self, dialect):
|
||||
if dialect.name == "postgresql":
|
||||
return dialect.type_descriptor(UUID())
|
||||
else:
|
||||
return dialect.type_descriptor(CHAR(32))
|
||||
|
||||
def process_bind_param(self, value, dialect):
|
||||
@staticmethod
|
||||
def convert_value_to_guid(value: Any, dialect: Dialect) -> str | None:
|
||||
if value is None:
|
||||
return value
|
||||
elif dialect.name == "postgresql":
|
||||
@@ -35,7 +32,25 @@ class GUID(TypeDecorator):
|
||||
# hexstring
|
||||
return "%.32x" % value.int
|
||||
|
||||
def load_dialect_impl(self, dialect):
|
||||
if dialect.name == "postgresql":
|
||||
return dialect.type_descriptor(UUID())
|
||||
else:
|
||||
return dialect.type_descriptor(CHAR(32))
|
||||
|
||||
def process_bind_param(self, value, dialect):
|
||||
return self.convert_value_to_guid(value, dialect)
|
||||
|
||||
def _uuid_value(self, value):
|
||||
if value is None:
|
||||
return value
|
||||
else:
|
||||
if not isinstance(value, uuid.UUID):
|
||||
value = uuid.UUID(value)
|
||||
return value
|
||||
|
||||
def process_result_value(self, value, dialect):
|
||||
if value is not None and not isinstance(value, uuid.UUID):
|
||||
value = uuid.UUID(value)
|
||||
return value
|
||||
return self._uuid_value(value)
|
||||
|
||||
def sort_key_function(self, value):
|
||||
return self._uuid_value(value)
|
||||
|
||||
@@ -1,9 +1,9 @@
|
||||
{
|
||||
"generic": {
|
||||
"server-error": "Váratlan hiba történt"
|
||||
"server-error": "Egy váratlan hiba történt"
|
||||
},
|
||||
"recipe": {
|
||||
"unique-name-error": "A receptek nevének egyedinek kell lennie"
|
||||
"unique-name-error": "A receptek neveinek egyedi értéknek kell lenniük"
|
||||
},
|
||||
"mealplan": {
|
||||
"no-recipes-match-your-rules": "Nem található recept a beállítottt szabályok alapján"
|
||||
@@ -12,7 +12,7 @@
|
||||
"user-updated": "Felhasználó frissítve",
|
||||
"password-updated": "Jelszó frissítve",
|
||||
"invalid-current-password": "Érvénytelen jelenlegi jelszó",
|
||||
"ldap-update-password-unavailable": "Nem sikerült jelszót váltani, a felhasználó LDAP-al lépett be"
|
||||
"ldap-update-password-unavailable": "A jelszó frissítése sikertelen, a felhasználó LDAP-al lépett be"
|
||||
},
|
||||
"group": {
|
||||
"report-deleted": "A jelentés törlésre került."
|
||||
|
||||
@@ -84,9 +84,7 @@ def content_with_meta(group_slug: str, recipe: Recipe) -> str:
|
||||
f"{__app_settings.BASE_URL}/api/media/recipes/{recipe.id}/images/original.webp?version={recipe.image}"
|
||||
)
|
||||
else:
|
||||
image_url = (
|
||||
"https://raw.githubusercontent.com/hay-kot/mealie/dev/frontend/public/img/icons/android-chrome-512x512.png"
|
||||
)
|
||||
image_url = "https://raw.githubusercontent.com/mealie-recipes/mealie/9571816ac4eed5beacfc0abf6c03eff1427fd0eb/frontend/static/icons/android-chrome-512x512.png"
|
||||
|
||||
ingredients: list[str] = []
|
||||
if recipe.settings.disable_amount: # type: ignore
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
import datetime
|
||||
import uuid
|
||||
from os import path
|
||||
from pathlib import Path
|
||||
|
||||
@@ -10,6 +11,8 @@ from sqlalchemy.orm import sessionmaker
|
||||
|
||||
from alembic import command
|
||||
from alembic.config import Config
|
||||
from mealie.db import init_db
|
||||
from mealie.db.models._model_utils import GUID
|
||||
from mealie.services._base_service import BaseService
|
||||
|
||||
PROJECT_DIR = Path(__file__).parent.parent.parent.parent
|
||||
@@ -38,23 +41,33 @@ class AlchemyExporter(BaseService):
|
||||
self.session_maker = sessionmaker(bind=self.engine)
|
||||
|
||||
@staticmethod
|
||||
def convert_to_datetime(data: dict) -> dict:
|
||||
def is_uuid(value: str) -> bool:
|
||||
try:
|
||||
uuid.UUID(value)
|
||||
return True
|
||||
except ValueError:
|
||||
return False
|
||||
|
||||
def convert_types(self, data: dict) -> dict:
|
||||
"""
|
||||
walks the dictionary to convert all things that look like timestamps to datetime objects
|
||||
walks the dictionary to restore all things that look like string representations of their complex types
|
||||
used in the context of reading a json file into a database via SQLAlchemy.
|
||||
"""
|
||||
for key, value in data.items():
|
||||
if isinstance(value, dict):
|
||||
data = AlchemyExporter.convert_to_datetime(value)
|
||||
data = self.convert_types(value)
|
||||
elif isinstance(value, list): # assume that this is a list of dictionaries
|
||||
data[key] = [AlchemyExporter.convert_to_datetime(item) for item in value]
|
||||
data[key] = [self.convert_types(item) for item in value]
|
||||
elif isinstance(value, str):
|
||||
if key in AlchemyExporter.look_for_datetime:
|
||||
data[key] = AlchemyExporter.DateTimeParser(dt=value).dt
|
||||
if key in AlchemyExporter.look_for_date:
|
||||
data[key] = AlchemyExporter.DateTimeParser(date=value).date
|
||||
if key in AlchemyExporter.look_for_time:
|
||||
data[key] = AlchemyExporter.DateTimeParser(time=value).time
|
||||
if self.is_uuid(value):
|
||||
# convert the data to the current database's native GUID type
|
||||
data[key] = GUID.convert_value_to_guid(value, self.engine.dialect)
|
||||
if key in self.look_for_datetime:
|
||||
data[key] = self.DateTimeParser(dt=value).dt
|
||||
if key in self.look_for_date:
|
||||
data[key] = self.DateTimeParser(date=value).date
|
||||
if key in self.look_for_time:
|
||||
data[key] = self.DateTimeParser(time=value).time
|
||||
return data
|
||||
|
||||
def dump_schema(self) -> dict:
|
||||
@@ -105,7 +118,7 @@ class AlchemyExporter(BaseService):
|
||||
del db_dump["alembic_version"]
|
||||
"""Restores all data from dictionary into the database"""
|
||||
with self.engine.begin() as connection:
|
||||
data = AlchemyExporter.convert_to_datetime(db_dump)
|
||||
data = self.convert_types(db_dump)
|
||||
|
||||
self.meta.reflect(bind=self.engine)
|
||||
for table_name, rows in data.items():
|
||||
@@ -139,8 +152,8 @@ SELECT SETVAL('shopping_list_item_extras_id_seq', (SELECT MAX(id) FROM shopping_
|
||||
)
|
||||
)
|
||||
|
||||
# Run all migrations up to current version
|
||||
command.upgrade(alembic_cfg, "head")
|
||||
# Re-init database to finish migrations
|
||||
init_db.main()
|
||||
|
||||
def drop_all(self) -> None:
|
||||
"""Drops all data from the database"""
|
||||
|
||||
@@ -83,7 +83,7 @@ class BackupV2(BaseService):
|
||||
# Validation
|
||||
if not contents.validate():
|
||||
self.logger.error(
|
||||
"Invalid backup file. file does not contain required elements (data directory and database.json"
|
||||
"Invalid backup file. file does not contain required elements (data directory and database.json)"
|
||||
)
|
||||
raise ValueError("Invalid backup file")
|
||||
|
||||
|
||||
@@ -212,7 +212,7 @@
|
||||
<td style="width: 550px">
|
||||
<img
|
||||
height="auto"
|
||||
src="https://api-test.emailbuilder.top/saemailbuilder/dc23dc82-ffd7-4f4c-b563-94f23db4c2c3/images/256d8bd6-ffde-4bf2-b577-dd8306dae877/file.png"
|
||||
src="https://raw.githubusercontent.com/mealie-recipes/mealie/9571816ac4eed5beacfc0abf6c03eff1427fd0eb/frontend/static/mealie-email-banner.png"
|
||||
style="
|
||||
border: 0;
|
||||
display: block;
|
||||
|
||||
@@ -16,7 +16,7 @@ class ApprisePublisher:
|
||||
def __init__(self, hard_fail=False) -> None:
|
||||
asset = apprise.AppriseAsset(
|
||||
async_mode=True,
|
||||
image_url_mask="https://raw.githubusercontent.com/hay-kot/mealie/dev/frontend/public/img/icons/android-chrome-maskable-512x512.png",
|
||||
image_url_mask="https://raw.githubusercontent.com/mealie-recipes/mealie/9571816ac4eed5beacfc0abf6c03eff1427fd0eb/frontend/static/icons/android-chrome-maskable-512x512.png",
|
||||
)
|
||||
self.apprise = apprise.Apprise(asset=asset)
|
||||
self.hard_fail = hard_fail
|
||||
|
||||
Reference in New Issue
Block a user