Merge branch 'mealie-next' into fix/translation-issues-when-scraping

This commit is contained in:
Michael Genson
2023-12-11 13:01:26 -06:00
committed by GitHub
74 changed files with 505 additions and 672 deletions

View File

@@ -0,0 +1,50 @@
from slugify import slugify
from sqlalchemy.exc import IntegrityError
from sqlalchemy.orm import Session
from mealie.core import root_logger
from mealie.db.models.group import Group
logger = root_logger.get_logger("init_db")
def _do_fix(session: Session, group: Group, counter: int):
if counter:
new_name = f"{group.id} ({counter})"
else:
new_name = str(group.id)
group.name = new_name
group.slug = slugify(group.name)
session.commit()
def fix_group_with_no_name(session: Session):
groups = session.query(Group).filter(Group.name == "").all()
if not groups:
logger.debug("No group found with an empty name; skipping fix")
return
logger.info(
f'{len(groups)} {"group" if len(groups) == 1 else "groups"} found with a missing name; '
f"applying default name"
)
offset = 0
for i, group in enumerate(groups):
attempts = 0
while True:
if attempts >= 3:
raise Exception(
f'Unable to fix empty group name for group_id "{group.id}": too many attempts ({attempts})'
)
counter = i + offset
try:
_do_fix(session, group, counter)
break
except IntegrityError:
session.rollback()
attempts += 1
offset += 1
continue

View File

@@ -13,7 +13,7 @@ def fix_slug_food_names(db: AllRepositories):
logger = root_logger.get_logger("init_db")
if not food:
logger.info(f"No food found with slug: '{check_for_food}' skipping fix")
logger.debug(f"No food found with slug: '{check_for_food}' skipping fix")
return
all_foods = db.ingredient_foods.get_all()

View File

@@ -10,6 +10,7 @@ from alembic.runtime import migration
from mealie.core import root_logger
from mealie.core.config import get_app_settings
from mealie.db.db_setup import session_context
from mealie.db.fixes.fix_group_with_no_name import fix_group_with_no_name
from mealie.db.fixes.fix_slug_foods import fix_slug_food_names
from mealie.repos.all_repositories import get_repositories
from mealie.repos.repository_factory import AllRepositories
@@ -104,6 +105,7 @@ def main():
init_db(db)
safe_try(lambda: fix_slug_food_names(db))
safe_try(lambda: fix_group_with_no_name(session))
if __name__ == "__main__":

View File

@@ -1,15 +1,10 @@
import asyncio
import random
import shutil
import string
from fastapi import APIRouter, BackgroundTasks
from fastapi import APIRouter
from recipe_scrapers import __version__ as recipe_scraper_version
from mealie.core.release_checker import get_latest_version
from mealie.core.settings.static import APP_VERSION
from mealie.routes._base import BaseAdminController, controller
from mealie.schema.admin.about import AdminAboutInfo, AppStatistics, CheckAppConfig, DockerVolumeText
from mealie.schema.admin.about import AdminAboutInfo, AppStatistics, CheckAppConfig
router = APIRouter(prefix="/about")
@@ -57,25 +52,3 @@ class AdminAboutController(BaseAdminController):
base_url_set=settings.BASE_URL != "http://localhost:8080",
is_up_to_date=APP_VERSION == "develop" or APP_VERSION == "nightly" or get_latest_version() == APP_VERSION,
)
@router.get("/docker/validate", response_model=DockerVolumeText)
def validate_docker_volume(self, bg: BackgroundTasks):
validation_dir = self.folders.DATA_DIR / "docker-validation"
validation_dir.mkdir(exist_ok=True)
random_string = "".join(random.choice(string.ascii_uppercase + string.digits) for _ in range(100))
with validation_dir.joinpath("validate.txt").open("w") as f:
f.write(random_string)
async def cleanup():
await asyncio.sleep(60)
try:
shutil.rmtree(validation_dir)
except Exception as e:
self.logger.error(f"Failed to remove docker validation directory: {e}")
bg.add_task(cleanup)
return DockerVolumeText(text=random_string)

View File

@@ -1,5 +1,5 @@
# This file is auto-generated by gen_schema_exports.py
from .about import AdminAboutInfo, AppInfo, AppStartupInfo, AppStatistics, AppTheme, CheckAppConfig, DockerVolumeText
from .about import AdminAboutInfo, AppInfo, AppStartupInfo, AppStatistics, AppTheme, CheckAppConfig
from .backup import AllBackups, BackupFile, BackupOptions, CreateBackup, ImportJob
from .email import EmailReady, EmailSuccess, EmailTest
from .maintenance import MaintenanceLogs, MaintenanceStorageDetails, MaintenanceSummary
@@ -31,7 +31,6 @@ __all__ = [
"AppStatistics",
"AppTheme",
"CheckAppConfig",
"DockerVolumeText",
"EmailReady",
"EmailSuccess",
"EmailTest",

View File

@@ -59,7 +59,3 @@ class CheckAppConfig(MealieModel):
ldap_ready: bool
base_url_set: bool
is_up_to_date: bool
class DockerVolumeText(MealieModel):
text: str

View File

@@ -61,7 +61,7 @@ class ChangePassword(MealieModel):
class GroupBase(MealieModel):
name: str
name: constr(strip_whitespace=True, min_length=1) # type: ignore
class Config:
orm_mode = True

View File

@@ -90,10 +90,10 @@ def clean_image(image: str | list | dict | None = None, default: str = "no image
image attempts to parse the image field from a recipe and return a string. Currenty
Supported Structures:
- `https://exmaple.com` - A string
- `{ "url": "https://exmaple.com" }` - A dictionary with a `url` key
- `["https://exmaple.com"]` - A list of strings
- `[{ "url": "https://exmaple.com" }]` - A list of dictionaries with a `url` key
- `https://example.com` - A string
- `{ "url": "https://example.com" }` - A dictionary with a `url` key
- `["https://example.com"]` - A list of strings
- `[{ "url": "https://example.com" }]` - A list of dictionaries with a `url` key
Raises:
TypeError: If the image field is not a supported type a TypeError is raised.
@@ -113,8 +113,11 @@ def clean_image(image: str | list | dict | None = None, default: str = "no image
return [x["url"] for x in image]
case {"url": str(image)}:
return [image]
case [{"@id": str(_)}, *_]:
return [x["@id"] for x in image]
case _:
raise TypeError(f"Unexpected type for image: {type(image)}, {image}")
logger.exception(f"Unexpected type for image: {type(image)}, {image}")
return [default]
def clean_instructions(steps_object: list | dict | str, default: list | None = None) -> list[dict]:

View File

@@ -212,10 +212,6 @@ class RecipeScraperPackage(ABCScraperStrategy):
class RecipeScraperOpenGraph(ABCScraperStrategy):
"""
Abstract class for all recipe parsers.
"""
async def get_html(self, url: str) -> str:
return await safe_scrape_html(url)
@@ -245,7 +241,7 @@ class RecipeScraperOpenGraph(ABCScraperStrategy):
"recipeIngredient": ["Could not detect ingredients"],
"recipeInstructions": [{"text": "Could not detect instructions"}],
"slug": slugify(og_field(properties, "og:title")),
"orgURL": og_field(properties, "og:url"),
"orgURL": self.url,
"categories": [],
"tags": og_fields(properties, "og:article:tag"),
"dateAdded": None,