Release v0.1.0 Candidate (#85)

* Changed uvicorn port to 80

* Changed port in docker-compose to match dockerfile

* Readded environment variables in docker-compose

* production image rework

* Use opengraph metadata to make basic recipe cards when full recipe metadata is not available

* fixed instrucitons on parse

* add last_recipe

* automated testing

* roadmap update

* Sqlite (#75)

* file structure

* auto-test

* take 2

* refactor ap scheduler and startup process

* fixed scraper error

* database abstraction

* database abstraction

* port recipes over to new schema

* meal migration

* start settings migration

* finale mongo port

* backup improvements

* migration imports to new DB structure

* unused import cleanup

* docs strings

* settings and theme import logic

* cleanup

* fixed tinydb error

* requirements

* fuzzy search

* remove scratch file

* sqlalchemy models

* improved search ui

* recipe models almost done

* sql modal population

* del scratch

* rewrite database model mixins

* mostly grabage

* recipe updates

* working sqllite

* remove old files and reorganize

* final cleanup

Co-authored-by: Hayden <hay-kot@pm.me>

* Backup card (#78)

* backup / import dialog

* upgrade to new tag method

* New import card

* rename settings.py to app_config.py

* migrate to poetry for development

* fix failing test

Co-authored-by: Hayden <hay-kot@pm.me>

* added mkdocs to docker-compose

* Translations (#72)

* Translations + danish

* changed back proxy target to use ENV

* Resolved more merge conflicts

* Removed test in translation

* Documentation of translations

* Updated translations

* removed old packages

Co-authored-by: Hayden <64056131+hay-kot@users.noreply.github.com>

* fail to start bug fixes

* feature: prep/cook/total time slots (#80)

Co-authored-by: Hayden <hay-kot@pm.me>

* missing bind attributes

* Bug fixes (#81)

* fix: url remains after succesful import

* docs: changelog + update todos

* arm image

* arm compose

* compose updates

* update poetry

* arm support

Co-authored-by: Hayden <hay-kot@pm.me>

* dockerfile hotfix

* dockerfile hotfix

* Version Release Final Touches (#84)

* Remove slim

* bug: opacity issues

* bug: startup failure with no database

* ci/cd on dev branch

* formatting

* v0.1.0 documentation

Co-authored-by: Hayden <hay-kot@pm.me>

* db init hotfix

* bug: fix crash in mongo

* fix mongo bug

* fixed version notifier

* finale changelog

Co-authored-by: kentora <=>
Co-authored-by: Hayden <hay-kot@pm.me>
Co-authored-by: Richard Mitic <richard.h.mitic@gmail.com>
Co-authored-by: kentora <kentora@kentora.dk>
This commit is contained in:
Hayden
2021-01-17 22:22:54 -09:00
committed by GitHub
parent f6c1fa0e8b
commit 88dfd40b8d
173 changed files with 10273 additions and 3735 deletions

View File

@@ -1,149 +0,0 @@
import json
import shutil
import zipfile
from datetime import datetime
from pathlib import Path
from db.recipe_models import RecipeDocument
from jinja2 import Template
from utils.logger import logger
from services.recipe_services import IMG_DIR
CWD = Path(__file__).parent
BACKUP_DIR = CWD.parent.joinpath("data", "backups")
TEMPLATE_DIR = CWD.parent.joinpath("data", "templates")
TEMP_DIR = CWD.parent.joinpath("data", "temp")
def auto_backup_job():
for backup in BACKUP_DIR.glob("Auto*.zip"):
backup.unlink()
templates = []
for template in TEMPLATE_DIR.iterdir():
templates.append(template)
export_db(tag="Auto", templates=templates)
logger.info("Auto Backup Called")
def import_migration(recipe_dict: dict) -> dict:
del recipe_dict["_id"]
del recipe_dict["dateAdded"]
# Migration from list to Object Type Data
if type(recipe_dict["extras"]) == list:
recipe_dict["extras"] = {}
return recipe_dict
def import_from_archive(file_name: str) -> list:
successful_imports = []
failed_imports = []
file_path = BACKUP_DIR.joinpath(file_name)
with zipfile.ZipFile(file_path, "r") as zip_ref:
zip_ref.extractall(TEMP_DIR)
recipe_dir = TEMP_DIR.joinpath("recipes")
for recipe in recipe_dir.glob("*.json"):
with open(recipe, "r") as f:
recipe_dict = json.loads(f.read())
try:
recipe_dict = import_migration(recipe_dict)
recipeDoc = RecipeDocument(**recipe_dict)
recipeDoc.save()
successful_imports.append(recipe.stem)
except:
logger.info(f"Failed Import: {recipe.stem}")
failed_imports.append(recipe.stem)
image_dir = TEMP_DIR.joinpath("images")
for image in image_dir.iterdir():
if image.stem in successful_imports:
shutil.copy(image, IMG_DIR)
shutil.rmtree(TEMP_DIR)
return {"successful": successful_imports, "failed": failed_imports}
def export_db(tag=None, templates=None):
if tag:
export_tag = tag + "_" + datetime.now().strftime("%Y-%b-%d")
else:
export_tag = datetime.now().strftime("%Y-%b-%d")
backup_folder = TEMP_DIR.joinpath(export_tag)
backup_folder.mkdir(parents=True, exist_ok=True)
img_folder = backup_folder.joinpath("images")
img_folder.mkdir(parents=True, exist_ok=True)
recipe_folder = backup_folder.joinpath("recipes")
recipe_folder.mkdir(parents=True, exist_ok=True)
export_images(img_folder)
if type(templates) == list:
for template in templates:
export_recipes(recipe_folder, template)
elif type(templates) == str:
export_recipes(recipe_folder, templates)
else:
export_recipes(recipe_folder)
zip_path = BACKUP_DIR.joinpath(f"{export_tag}")
shutil.make_archive(zip_path, "zip", backup_folder)
shutil.rmtree(backup_folder)
shutil.rmtree(TEMP_DIR)
return str(zip_path.absolute()) + ".zip"
def export_images(dest_dir) -> Path:
for file in IMG_DIR.iterdir():
shutil.copy(file, dest_dir.joinpath(file.name))
def export_recipes(dest_dir: Path, template=None) -> Path:
all_recipes = RecipeDocument.objects()
logger.info(f"Backing Up Recipes: {all_recipes}")
for recipe in all_recipes:
json_recipe = recipe.to_json(indent=4)
if template:
md_dest = dest_dir.parent.joinpath("templates")
md_dest.mkdir(parents=True, exist_ok=True)
template = TEMPLATE_DIR.joinpath(template)
export_markdown(md_dest, json_recipe, template)
filename = recipe.slug + ".json"
file_path = dest_dir.joinpath(filename)
with open(file_path, "w") as f:
f.write(json_recipe)
def export_markdown(dest_dir: Path, recipe_data: json, template=Path) -> Path:
recipe_data: dict = json.loads(recipe_data)
recipe_template = TEMPLATE_DIR.joinpath("recipes.md")
with open(recipe_template, "r") as f:
template = Template(f.read())
out_file = dest_dir.joinpath(recipe_data["slug"] + ".md")
content = template.render(recipe=recipe_data)
with open(out_file, "w") as f:
f.write(content)
if __name__ == "__main__":
pass

View File

@@ -0,0 +1,147 @@
import json
import shutil
from datetime import datetime
from pathlib import Path
from jinja2 import Template
from services.meal_services import MealPlan
from services.recipe_services import Recipe
from services.settings_services import SiteSettings, SiteTheme
from app_config import BACKUP_DIR, IMG_DIR, TEMP_DIR, TEMPLATE_DIR
from utils.logger import logger
class ExportDatabase:
def __init__(self, tag=None, templates=None) -> None:
"""Export a Mealie database. Export interacts directly with class objects and can be used
with any supported backend database platform. By default tags are timestands, and no Jinja2 templates are rendered
Args:
tag ([str], optional): A str to be used as a file tag. Defaults to None.
templates (list, optional): A list of template file names. Defaults to None.
"""
if tag:
export_tag = tag + "_" + datetime.now().strftime("%Y-%b-%d")
else:
export_tag = datetime.now().strftime("%Y-%b-%d")
self.main_dir = TEMP_DIR.joinpath(export_tag)
self.img_dir = self.main_dir.joinpath("images")
self.recipe_dir = self.main_dir.joinpath("recipes")
self.themes_dir = self.main_dir.joinpath("themes")
self.settings_dir = self.main_dir.joinpath("settings")
self.templates_dir = self.main_dir.joinpath("templates")
self.mealplans_dir = self.main_dir.joinpath("mealplans")
try:
self.templates = [TEMPLATE_DIR.joinpath(x) for x in templates]
except:
self.templates = False
logger.info("No Jinja2 Templates Registered for Export")
required_dirs = [
self.main_dir,
self.img_dir,
self.recipe_dir,
self.themes_dir,
self.settings_dir,
self.templates_dir,
self.mealplans_dir,
]
for dir in required_dirs:
dir.mkdir(parents=True, exist_ok=True)
def export_recipes(self):
all_recipes = Recipe.get_all()
for recipe in all_recipes:
logger.info(f"Backing Up Recipes: {recipe}")
filename = recipe.get("slug") + ".json"
file_path = self.recipe_dir.joinpath(filename)
ExportDatabase._write_json_file(recipe, file_path)
if self.templates:
self._export_template(recipe)
def _export_template(self, recipe_data: dict):
for template_path in self.templates:
with open(template_path, "r") as f:
template = Template(f.read())
filename = recipe_data.get("name") + template_path.suffix
out_file = self.templates_dir.joinpath(filename)
content = template.render(recipe=recipe_data)
with open(out_file, "w") as f:
f.write(content)
def export_images(self):
for file in IMG_DIR.iterdir():
shutil.copy(file, self.img_dir.joinpath(file.name))
def export_settings(self):
all_settings = SiteSettings.get_site_settings()
out_file = self.settings_dir.joinpath("settings.json")
ExportDatabase._write_json_file(all_settings.dict(), out_file)
def export_themes(self):
all_themes = SiteTheme.get_all()
if all_themes:
all_themes = [x.dict() for x in all_themes]
out_file = self.themes_dir.joinpath("themes.json")
ExportDatabase._write_json_file(all_themes, out_file)
def export_meals(
self,
): #! Problem Parseing Datetime Objects... May come back to this
meal_plans = MealPlan.get_all()
if meal_plans:
meal_plans = [x.dict() for x in meal_plans]
out_file = self.mealplans_dir.joinpath("mealplans.json")
ExportDatabase._write_json_file(meal_plans, out_file)
@staticmethod
def _write_json_file(data, out_file: Path):
json_data = json.dumps(data, indent=4, default=str)
with open(out_file, "w") as f:
f.write(json_data)
def finish_export(self):
zip_path = BACKUP_DIR.joinpath(f"{self.main_dir.name}")
shutil.make_archive(zip_path, "zip", self.main_dir)
shutil.rmtree(TEMP_DIR)
return str(zip_path.absolute()) + ".zip"
def backup_all(tag=None, templates=None):
db_export = ExportDatabase(tag=tag, templates=templates)
db_export.export_recipes()
db_export.export_images()
db_export.export_settings()
db_export.export_themes()
db_export.export_meals()
#
return db_export.finish_export()
def auto_backup_job():
for backup in BACKUP_DIR.glob("Auto*.zip"):
backup.unlink()
templates = []
for template in TEMPLATE_DIR.iterdir():
templates.append(template)
backup_all(tag="Auto", templates=templates)
logger.info("Auto Backup Called")

View File

@@ -0,0 +1,132 @@
import json
import shutil
import zipfile
from pathlib import Path
from typing import List
from app_config import BACKUP_DIR, IMG_DIR, TEMP_DIR
from services.recipe_services import Recipe
from services.settings_services import SiteSettings, SiteTheme
from utils.logger import logger
class ImportDatabase:
def __init__(
self,
zip_archive: str,
import_recipes: bool = True,
import_settings: bool = True,
import_themes: bool = True,
force_import: bool = False,
rebase: bool = False,
) -> None:
"""Import a database.zip file exported from mealie.
Args:
zip_archive (str): The filename contained in the backups directory
import_recipes (bool, optional): Import Recipes?. Defaults to True.
import_settings (bool, optional): Determines if settings are imported. Defaults to True.
import_themes (bool, optional): Determines if themes are imported. Defaults to True.
force_import (bool, optional): Force import will update all existing recipes. If False existing recipes are skipped. Defaults to False.
rebase (bool, optional): Rebase will first clear the database and then import Recipes. Defaults to False.
Raises:
Exception: If the zip file does not exists an exception raise.
"""
self.archive = BACKUP_DIR.joinpath(zip_archive)
self.imp_recipes = import_recipes
self.imp_settings = import_settings
self.imp_themes = import_themes
self.force_imports = force_import
self.force_rebase = rebase
if self.archive.is_file():
self.import_dir = TEMP_DIR.joinpath("active_import")
self.import_dir.mkdir(parents=True, exist_ok=True)
with zipfile.ZipFile(self.archive, "r") as zip_ref:
zip_ref.extractall(self.import_dir)
pass
else:
raise Exception("Import file does not exist")
def run(self):
if self.imp_recipes:
report = self.import_recipes()
if self.imp_settings:
self.import_settings()
if self.imp_themes:
self.import_themes()
self.clean_up()
return report if report else None
def import_recipes(self):
recipe_dir: Path = self.import_dir.joinpath("recipes")
successful_imports = []
failed_imports = []
for recipe in recipe_dir.glob("*.json"):
with open(recipe, "r") as f:
recipe_dict = json.loads(f.read())
recipe_dict = ImportDatabase._recipe_migration(recipe_dict)
try:
recipe_obj = Recipe(**recipe_dict)
recipe_obj.save_to_db()
successful_imports.append(recipe.stem)
logger.info(f"Imported: {recipe.stem}")
except:
logger.info(f"Failed Import: {recipe.stem}")
failed_imports.append(recipe.stem)
self._import_images(successful_imports)
return {"successful": successful_imports, "failed": failed_imports}
@staticmethod
def _recipe_migration(recipe_dict: dict) -> dict:
try:
del recipe_dict["_id"]
del recipe_dict["dateAdded"]
except:
logger.info("Detected new backup Schema, skipping migration...")
return recipe_dict
# Migration from list to Object Type Data
if type(recipe_dict["extras"]) == list:
recipe_dict["extras"] = {}
return recipe_dict
def _import_images(self, successful_imports: List[str]):
image_dir = self.import_dir.joinpath("images")
for image in image_dir.iterdir():
if image.stem in successful_imports:
shutil.copy(image, IMG_DIR)
def import_themes(self):
themes_file = self.import_dir.joinpath("themes", "themes.json")
with open(themes_file, "r") as f:
themes: list = json.loads(f.read())
for theme in themes:
new_theme = SiteTheme(**theme)
try:
new_theme.save_to_db()
except:
logger.info(f"Unable Import Theme {new_theme.name}")
def import_settings(self):
settings_file = self.import_dir.joinpath("settings", "settings.json")
with open(settings_file, "r") as f:
settings: dict = json.loads(f.read())
settings = SiteSettings(**settings)
settings.update()
def clean_up(self):
shutil.rmtree(TEMP_DIR)

View File

@@ -2,13 +2,12 @@ import shutil
from pathlib import Path
import requests
from fastapi.responses import FileResponse
CWD = Path(__file__).parent
IMG_DIR = CWD.parent.joinpath("data", "img")
def read_image(recipe_slug: str) -> FileResponse:
def read_image(recipe_slug: str) -> Path:
if IMG_DIR.joinpath(recipe_slug).is_file():
return IMG_DIR.joinpath(recipe_slug)
else:

View File

@@ -1,9 +1,8 @@
import json
from datetime import date, timedelta
from pathlib import Path
from typing import List, Optional
from db.meal_models import MealDocument, MealPlanDocument
from db.database import db
from pydantic import BaseModel
from services.recipe_services import Recipe
@@ -80,73 +79,30 @@ class MealPlan(BaseModel):
self.meals = meals
def save_to_db(self):
meal_docs = []
for meal in self.meals:
meal = meal.dict()
meal_doc = MealDocument(**meal)
meal_docs.append(meal_doc)
self.meals = meal_docs
meal_plan = MealPlanDocument(**self.dict())
meal_plan.save()
db.meals.save_new(self.dict())
@staticmethod
def get_all() -> List:
all_meals = []
for plan in MealPlanDocument.objects.order_by("startDate"):
all_meals.append(MealPlan._unpack_doc(plan))
print(all_meals)
all_meals = [MealPlan(**x) for x in db.meals.get_all(order_by="startDate")]
return all_meals
def update(self, uid):
document = MealPlanDocument.objects.get(uid=uid)
meal_docs = []
for meal in self.meals:
meal = meal.dict()
meal_doc = MealDocument(**meal)
meal_docs.append(meal_doc)
self.meals = meal_docs
if document:
document.update(set__meals=self.meals)
document.save()
db.meals.update(uid, self.dict())
@staticmethod
def delete(uid):
document = MealPlanDocument.objects.get(uid=uid)
if document:
document.delete()
@staticmethod
def _unpack_doc(document: MealPlanDocument):
meal_plan = json.loads(document.to_json())
del meal_plan["_id"]["$oid"]
print(meal_plan)
meal_plan["uid"] = meal_plan["uid"]["$uuid"]
meal_plan["startDate"] = meal_plan["startDate"]["$date"]
meal_plan["endDate"] = meal_plan["endDate"]["$date"]
meals = []
for meal in meal_plan["meals"]:
meal["date"] = meal["date"]["$date"]
meals.append(Meal(**meal))
meal_plan["meals"] = meals
return MealPlan(**meal_plan)
db.meals.delete(uid)
@staticmethod
def today() -> str:
""" Returns the meal slug for Today """
meal_plan = MealPlanDocument.objects.order_by("startDate").limit(1)
meal_plan = MealPlan._unpack_doc(meal_plan[0])
meal_plan = db.meals.get_all(limit=1, order_by="startDate")
for meal in meal_plan.meals:
meal_docs = [Meal(**meal) for meal in meal_plan["meals"]]
for meal in meal_docs:
if meal.date == date.today():
return meal.slug
@@ -154,7 +110,6 @@ class MealPlan(BaseModel):
@staticmethod
def this_week():
meal_plan = MealPlanDocument.objects.order_by("startDate").limit(1)
meal_plan = MealPlan._unpack_doc(meal_plan[0])
meal_plan = db.meals.get_all(limit=1, order_by="startDate")
return meal_plan

View File

@@ -3,8 +3,8 @@ from pathlib import Path
import git
import yaml
from services.image_services import IMG_DIR
from services.recipe_services import Recipe
from app_config import IMG_DIR
try:
from yaml import CLoader as Loader

View File

@@ -4,11 +4,11 @@ import shutil
import zipfile
from pathlib import Path
from services.recipe_services import IMG_DIR, Recipe
from services.recipe_services import Recipe
from services.scrape_services import normalize_data, process_recipe_data
from app_config import IMG_DIR, TEMP_DIR
CWD = Path(__file__).parent
TEMP_DIR = CWD.parent.parent.joinpath("data", "temp")
MIGRTAION_DIR = CWD.parent.parent.joinpath("data", "migration")

View File

@@ -3,16 +3,12 @@ import json
from pathlib import Path
from typing import Any, List, Optional
from db.recipe_models import RecipeDocument
from db.database import db
from pydantic import BaseModel, validator
from slugify import slugify
from services.image_services import delete_image
CWD = Path(__file__).parent
ALL_RECIPES = CWD.parent.joinpath("data", "all_recipes.json")
IMG_DIR = CWD.parent.joinpath("data", "img")
class RecipeNote(BaseModel):
title: str
@@ -31,7 +27,10 @@ class Recipe(BaseModel):
recipeYield: Optional[str]
recipeIngredient: Optional[list]
recipeInstructions: Optional[list]
totalTime: Optional[Any]
totalTime: Optional[str] = None
prepTime: Optional[str] = None
performTime: Optional[str] = None
# Mealie Specific
slug: Optional[str] = ""
@@ -67,9 +66,7 @@ class Recipe(BaseModel):
"notes": [{"title": "Watch Out!", "text": "Prep the day before!"}],
"orgURL": "https://www.bonappetit.com/recipe/chicken-and-rice-with-leeks-and-salsa-verde",
"rating": 3,
"extras": {
"message": "Don't forget to defrost the chicken!"
}
"extras": {"message": "Don't forget to defrost the chicken!"},
}
}
@@ -94,12 +91,12 @@ class Recipe(BaseModel):
return cls(**document)
@classmethod
def get_by_slug(_cls, slug: str):
""" Returns a recipe dictionary from the slug """
def get_by_slug(cls, slug: str):
""" Returns a Recipe Object by Slug """
document = RecipeDocument.objects.get(slug=slug)
document = db.recipes.get(slug, "slug")
return Recipe._unpack_doc(document)
return cls(**document)
def save_to_db(self) -> str:
recipe_dict = self.dict()
@@ -110,47 +107,36 @@ class Recipe(BaseModel):
except:
recipe_dict["image"] = "no image"
try:
total_time = recipe_dict.get("totalTime")
recipe_dict["totalTime"] = str(total_time)
except:
pass
# try:
# total_time = recipe_dict.get("totalTime")
# recipe_dict["totalTime"] = str(total_time)
# except:
# pass
recipeDoc = RecipeDocument(**recipe_dict)
recipeDoc.save()
recipe_doc = db.recipes.save_new(recipe_dict)
recipe = Recipe(**recipe_doc)
return recipeDoc.slug
return recipe.slug
@staticmethod
def delete(recipe_slug: str) -> str:
""" Removes the recipe from the database by slug """
delete_image(recipe_slug)
document = RecipeDocument.objects.get(slug=recipe_slug)
if document:
document.delete()
return "Document Deleted"
db.recipes.delete(recipe_slug)
return "Document Deleted"
def update(self, recipe_slug: str):
""" Updates the recipe from the database by slug"""
document = RecipeDocument.objects.get(slug=recipe_slug)
updated_slug = db.recipes.update(recipe_slug, self.dict())
return updated_slug.get("slug")
if document:
document.update(set__name=self.name)
document.update(set__description=self.description)
document.update(set__image=self.image)
document.update(set__recipeYield=self.recipeYield)
document.update(set__recipeIngredient=self.recipeIngredient)
document.update(set__recipeInstructions=self.recipeInstructions)
document.update(set__totalTime=self.totalTime)
@staticmethod
def update_image(slug: str, extension: str):
db.recipes.update_image(slug, extension)
document.update(set__categories=self.categories)
document.update(set__tags=self.tags)
document.update(set__notes=self.notes)
document.update(set__orgURL=self.orgURL)
document.update(set__rating=self.rating)
document.update(set__extras=self.extras)
document.save()
@staticmethod
def get_all():
return db.recipes.get_all()
def read_requested_values(keys: list, max_results: int = 0) -> List[dict]:
@@ -166,7 +152,7 @@ def read_requested_values(keys: list, max_results: int = 0) -> List[dict]:
"""
recipe_list = []
for recipe in RecipeDocument.objects.order_by("dateAdded").limit(max_results):
for recipe in db.recipes.get_all(limit=max_results, order_by="dateAdded"):
recipe_details = {}
for key in keys:
try:

View File

@@ -5,7 +5,7 @@ import requests
from apscheduler.schedulers.background import BackgroundScheduler
from utils.logger import logger
from services.backup_services import auto_backup_job
from services.backups.exports import auto_backup_job
from services.meal_services import MealPlan
from services.recipe_services import Recipe
from services.settings_services import SiteSettings

View File

@@ -1,10 +1,13 @@
import json
from pathlib import Path
from typing import List
from typing import List, Tuple
from scrape_schema_recipe import scrape_url
import extruct
import requests
import scrape_schema_recipe
from slugify import slugify
from utils.logger import logger
from w3lib.html import get_base_url
from services.image_services import scrape_image
from services.recipe_services import Recipe
@@ -59,21 +62,10 @@ def normalize_data(recipe_data: dict) -> dict:
recipe_data["recipeInstructions"] = normalize_instructions(
recipe_data["recipeInstructions"]
)
recipe_data["image"] = normalize_image_url(recipe_data["image"])
return recipe_data
def create_from_url(url: str) -> dict:
recipe_data = process_recipe_url(url)
with open(TEMP_FILE, "w") as f:
f.write(json.dumps(recipe_data, indent=4, default=str))
recipe_data = normalize_data(recipe_data)
recipe = Recipe(**recipe_data)
return recipe.save_to_db()
def process_recipe_data(new_recipe: dict, url=None) -> dict:
slug = slugify(new_recipe["name"])
mealie_tags = {
@@ -91,21 +83,84 @@ def process_recipe_data(new_recipe: dict, url=None) -> dict:
return new_recipe
def process_recipe_url(url: str) -> dict:
new_recipe: dict = scrape_url(url, python_objects=True)[0]
logger.info(f"Recipe Scraped From Web: {new_recipe}")
def extract_recipe_from_html(html: str, url: str) -> dict:
scraped_recipes: List[dict] = scrape_schema_recipe.loads(html, python_objects=True)
if not new_recipe:
return "fail" # TODO: Return Better Error Here
new_recipe = process_recipe_data(new_recipe, url)
try:
img_path = scrape_image(
normalize_image_url(new_recipe.get("image")), new_recipe.get("slug")
if not scraped_recipes:
scraped_recipes: List[dict] = scrape_schema_recipe.scrape_url(
url, python_objects=True
)
new_recipe["image"] = img_path.name
except:
new_recipe["image"] = None
if scraped_recipes:
new_recipe: dict = scraped_recipes[0]
logger.info(f"Recipe Scraped From Web: {new_recipe}")
if not new_recipe:
return "fail" # TODO: Return Better Error Here
new_recipe = process_recipe_data(new_recipe, url=url)
new_recipe = normalize_data(new_recipe)
else:
new_recipe = basic_recipe_from_opengraph(html, url)
logger.info(f"Recipe Scraped from opengraph metadata: {new_recipe}")
return new_recipe
def download_image_for_recipe(recipe: dict) -> dict:
try:
img_path = scrape_image(recipe.get("image"), recipe.get("slug"))
recipe["image"] = img_path.name
except:
recipe["image"] = None
return recipe
def og_field(properties: dict, field_name: str) -> str:
return next((val for name, val in properties if name == field_name), None)
def og_fields(properties: List[Tuple[str, str]], field_name: str) -> List[str]:
return list({val for name, val in properties if name == field_name})
def basic_recipe_from_opengraph(html: str, url: str) -> dict:
base_url = get_base_url(html, url)
data = extruct.extract(html, base_url=base_url)
properties = data["opengraph"][0]["properties"]
return {
"name": og_field(properties, "og:title"),
"description": og_field(properties, "og:description"),
"image": og_field(properties, "og:image"),
"recipeYield": "",
# FIXME: If recipeIngredient is an empty list, mongodb's data verification fails.
"recipeIngredient": ["Could not detect ingredients"],
# FIXME: recipeInstructions is allowed to be empty but message this is added for user sanity.
"recipeInstructions": [{"text": "Could not detect instructions"}],
"slug": slugify(og_field(properties, "og:title")),
"orgURL": og_field(properties, "og:url"),
"categories": [],
"tags": og_fields(properties, "og:article:tag"),
"dateAdded": None,
"notes": [],
"extras": [],
}
def process_recipe_url(url: str) -> dict:
r = requests.get(url)
new_recipe = extract_recipe_from_html(r.text, url)
new_recipe = download_image_for_recipe(new_recipe)
return new_recipe
def create_from_url(url: str) -> dict:
recipe_data = process_recipe_url(url)
with open(TEMP_FILE, "w") as f:
f.write(json.dumps(recipe_data, indent=4, default=str))
recipe = Recipe(**recipe_data)
return recipe.save_to_db()

View File

@@ -1,23 +1,15 @@
import json
from typing import List, Optional
from db.settings_models import (
SiteSettingsDocument,
SiteThemeDocument,
ThemeColorsDocument,
WebhooksDocument,
)
from db.database import db
from db.db_setup import sql_exists
from pydantic import BaseModel
from utils.logger import logger
class Webhooks(BaseModel):
webhookTime: str
webhookURLs: Optional[List[str]]
enabled: bool
@staticmethod
def run():
pass
webhookTime: str = "00:00"
webhookURLs: Optional[List[str]] = []
enabled: bool = False
class SiteSettings(BaseModel):
@@ -37,30 +29,22 @@ class SiteSettings(BaseModel):
}
@staticmethod
def _unpack_doc(document: SiteSettingsDocument):
document = json.loads(document.to_json())
del document["_id"]
document["webhhooks"] = Webhooks(**document["webhooks"])
return SiteSettings(**document)
def get_all():
db.settings.get_all()
@staticmethod
def get_site_settings():
@classmethod
def get_site_settings(cls):
try:
document = SiteSettingsDocument.objects.get(name="main")
document = db.settings.get("main")
except:
webhooks = WebhooksDocument()
document = SiteSettingsDocument(name="main", webhooks=webhooks)
document.save()
webhooks = Webhooks()
default_entry = SiteSettings(name="main", webhooks=webhooks)
document = db.settings.save_new(default_entry.dict(), webhooks.dict())
return SiteSettings._unpack_doc(document)
return cls(**document)
def update(self):
document = SiteSettingsDocument.objects.get(name="main")
new_webhooks = WebhooksDocument(**self.webhooks.dict())
document.update(set__webhooks=new_webhooks)
document.save()
db.settings.update("main", new_data=self.dict())
class Colors(BaseModel):
@@ -93,50 +77,67 @@ class SiteTheme(BaseModel):
}
}
@staticmethod
def get_by_name(theme_name):
document = SiteThemeDocument.objects.get(name=theme_name)
return SiteTheme._unpack_doc(document)
@classmethod
def get_by_name(cls, theme_name):
db_entry = db.themes.get(theme_name)
name = db_entry.get("name")
colors = Colors(**db_entry.get("colors"))
@staticmethod
def _unpack_doc(document):
document = json.loads(document.to_json())
del document["_id"]
theme_colors = SiteTheme(**document)
return theme_colors
return cls(name=name, colors=colors)
@staticmethod
def get_all():
all_themes = []
for theme in SiteThemeDocument.objects():
all_themes.append(SiteTheme._unpack_doc(theme))
all_themes = db.themes.get_all()
for index, theme in enumerate(all_themes):
name = theme.get("name")
colors = Colors(**theme.get("colors"))
all_themes[index] = SiteTheme(name=name, colors=colors)
return all_themes
def save_to_db(self):
theme = self.dict()
theme["colors"] = ThemeColorsDocument(**theme["colors"])
theme_document = SiteThemeDocument(**theme)
theme_document.save()
db.themes.save_new(self.dict())
def update_document(self):
theme = self.dict()
theme["colors"] = ThemeColorsDocument(**theme["colors"])
theme_document = SiteThemeDocument.objects.get(name=self.name)
if theme_document:
theme_document.update(set__colors=theme["colors"])
theme_document.save()
db.themes.update(self.dict())
@staticmethod
def delete_theme(theme_name: str) -> str:
""" Removes the theme by name """
document = SiteThemeDocument.objects.get(name=theme_name)
db.themes.delete(theme_name)
if document:
document.delete()
return "Document Deleted"
def default_theme_init():
default_colors = {
"primary": "#E58325",
"accent": "#00457A",
"secondary": "#973542",
"success": "#5AB1BB",
"info": "#4990BA",
"warning": "#FF4081",
"error": "#EF5350",
}
try:
SiteTheme.get_by_name("default")
logger.info("Default theme exists... skipping generation")
except:
logger.info("Generating Default Theme")
colors = Colors(**default_colors)
default_theme = SiteTheme(name="default", colors=colors)
default_theme.save_to_db()
def default_settings_init():
try:
document = db.settings.get("main")
except:
webhooks = Webhooks()
default_entry = SiteSettings(name="main", webhooks=webhooks)
document = db.settings.save_new(default_entry.dict(), webhooks.dict())
if not sql_exists:
default_settings_init()
default_theme_init()