mirror of
https://github.com/mealie-recipes/mealie.git
synced 2026-02-06 16:03:12 -05:00
feat: Add Households to Mealie (#3970)
This commit is contained in:
@@ -12,10 +12,10 @@ from sqlalchemy import select
|
||||
from sqlalchemy.orm.session import Session
|
||||
|
||||
from mealie.db.db_setup import session_context
|
||||
from mealie.db.models.group.webhooks import GroupWebhooksModel
|
||||
from mealie.db.models.household.webhooks import GroupWebhooksModel
|
||||
from mealie.repos.repository_factory import AllRepositories
|
||||
from mealie.schema.group.group_events import GroupEventNotifierPrivate
|
||||
from mealie.schema.group.webhook import ReadWebhook
|
||||
from mealie.schema.household.group_events import GroupEventNotifierPrivate
|
||||
from mealie.schema.household.webhook import ReadWebhook
|
||||
from mealie.schema.response.pagination import PaginationQuery
|
||||
|
||||
from .event_types import Event, EventDocumentType, EventTypes, EventWebhookData
|
||||
@@ -26,8 +26,9 @@ class EventListenerBase(ABC):
|
||||
_session: Session | None = None
|
||||
_repos: AllRepositories | None = None
|
||||
|
||||
def __init__(self, group_id: UUID4, publisher: PublisherLike) -> None:
|
||||
def __init__(self, group_id: UUID4, household_id: UUID4, publisher: PublisherLike) -> None:
|
||||
self.group_id = group_id
|
||||
self.household_id = household_id
|
||||
self.publisher = publisher
|
||||
self._session = None
|
||||
self._repos = None
|
||||
@@ -61,24 +62,24 @@ class EventListenerBase(ABC):
|
||||
yield self._session
|
||||
|
||||
@contextlib.contextmanager
|
||||
def ensure_repos(self) -> Generator[AllRepositories, None, None]:
|
||||
def ensure_repos(self, group_id: UUID4, household_id: UUID4) -> Generator[AllRepositories, None, None]:
|
||||
if self._repos is None:
|
||||
with self.ensure_session() as session:
|
||||
self._repos = AllRepositories(session)
|
||||
self._repos = AllRepositories(session, group_id=group_id, household_id=household_id)
|
||||
yield self._repos
|
||||
else:
|
||||
yield self._repos
|
||||
|
||||
|
||||
class AppriseEventListener(EventListenerBase):
|
||||
def __init__(self, group_id: UUID4) -> None:
|
||||
super().__init__(group_id, ApprisePublisher())
|
||||
def __init__(self, group_id: UUID4, household_id: UUID4) -> None:
|
||||
super().__init__(group_id, household_id, ApprisePublisher())
|
||||
|
||||
def get_subscribers(self, event: Event) -> list[str]:
|
||||
with self.ensure_repos() as repos:
|
||||
notifiers: list[GroupEventNotifierPrivate] = repos.group_event_notifier.by_group( # type: ignore
|
||||
self.group_id
|
||||
).multi_query({"enabled": True}, override_schema=GroupEventNotifierPrivate)
|
||||
with self.ensure_repos(self.group_id, self.household_id) as repos:
|
||||
notifiers: list[GroupEventNotifierPrivate] = repos.group_event_notifier.multi_query(
|
||||
{"enabled": True}, override_schema=GroupEventNotifierPrivate
|
||||
)
|
||||
|
||||
urls = [notifier.apprise_url for notifier in notifiers if getattr(notifier.options, event.event_type.name)]
|
||||
urls = AppriseEventListener.update_urls_with_event_data(urls, event)
|
||||
@@ -126,8 +127,8 @@ class AppriseEventListener(EventListenerBase):
|
||||
|
||||
|
||||
class WebhookEventListener(EventListenerBase):
|
||||
def __init__(self, group_id: UUID4) -> None:
|
||||
super().__init__(group_id, WebhookPublisher())
|
||||
def __init__(self, group_id: UUID4, household_id: UUID4) -> None:
|
||||
super().__init__(group_id, household_id, WebhookPublisher())
|
||||
|
||||
def get_subscribers(self, event: Event) -> list[ReadWebhook]:
|
||||
# we only care about events that contain webhook information
|
||||
@@ -140,10 +141,10 @@ class WebhookEventListener(EventListenerBase):
|
||||
return scheduled_webhooks
|
||||
|
||||
def publish_to_subscribers(self, event: Event, subscribers: list[ReadWebhook]) -> None:
|
||||
with self.ensure_repos() as repos:
|
||||
with self.ensure_repos(self.group_id, self.household_id) as repos:
|
||||
if event.document_data.document_type == EventDocumentType.mealplan:
|
||||
# TODO: limit mealplan data to a date range instead of returning all mealplans
|
||||
meal_repo = repos.meals.by_group(self.group_id)
|
||||
meal_repo = repos.meals
|
||||
meal_pagination_data = meal_repo.page_all(pagination=PaginationQuery(page=1, per_page=-1))
|
||||
meal_data = meal_pagination_data.items
|
||||
if meal_data:
|
||||
@@ -158,5 +159,7 @@ class WebhookEventListener(EventListenerBase):
|
||||
GroupWebhooksModel.enabled == True, # noqa: E712 - required for SQLAlchemy comparison
|
||||
GroupWebhooksModel.scheduled_time > start_dt.astimezone(timezone.utc).time(),
|
||||
GroupWebhooksModel.scheduled_time <= end_dt.astimezone(timezone.utc).time(),
|
||||
GroupWebhooksModel.group_id == self.group_id,
|
||||
GroupWebhooksModel.household_id == self.household_id,
|
||||
)
|
||||
return session.execute(stmt).scalars().all()
|
||||
|
||||
@@ -1,9 +1,11 @@
|
||||
from fastapi import BackgroundTasks, Depends, Query
|
||||
from fastapi import BackgroundTasks, Depends
|
||||
from pydantic import UUID4
|
||||
from sqlalchemy.orm.session import Session
|
||||
|
||||
from mealie.core.config import get_app_settings
|
||||
from mealie.db.db_setup import generate_session
|
||||
from mealie.repos.all_repositories import get_repositories
|
||||
from mealie.schema.response.pagination import PaginationQuery
|
||||
from mealie.services.event_bus_service.event_bus_listeners import (
|
||||
AppriseEventListener,
|
||||
EventListenerBase,
|
||||
@@ -40,30 +42,36 @@ class EventSource:
|
||||
class EventBusService:
|
||||
bg: BackgroundTasks | None = None
|
||||
session: Session | None = None
|
||||
group_id: UUID4 | None = None
|
||||
|
||||
def __init__(
|
||||
self, bg: BackgroundTasks | None = None, session: Session | None = None, group_id: UUID4 | None = None
|
||||
self,
|
||||
bg: BackgroundTasks | None = None,
|
||||
session: Session | None = None,
|
||||
) -> None:
|
||||
self.bg = bg
|
||||
self.session = session
|
||||
self.group_id = group_id
|
||||
|
||||
self.listeners: list[EventListenerBase] = [
|
||||
AppriseEventListener(self.group_id),
|
||||
WebhookEventListener(self.group_id),
|
||||
def _get_listeners(self, group_id: UUID4, household_id: UUID4) -> list[EventListenerBase]:
|
||||
return [
|
||||
AppriseEventListener(group_id, household_id),
|
||||
WebhookEventListener(group_id, household_id),
|
||||
]
|
||||
|
||||
def _publish_event(self, event: Event, group_id: UUID4, household_id: UUID4) -> None:
|
||||
"""Publishes the event to all listeners"""
|
||||
for listener in self._get_listeners(group_id, household_id):
|
||||
if subscribers := listener.get_subscribers(event):
|
||||
listener.publish_to_subscribers(event, subscribers)
|
||||
|
||||
def dispatch(
|
||||
self,
|
||||
integration_id: str,
|
||||
group_id: UUID4,
|
||||
household_id: UUID4 | None,
|
||||
event_type: EventTypes,
|
||||
document_data: EventDocumentDataBase | None,
|
||||
message: str = "",
|
||||
) -> None:
|
||||
self.group_id = group_id
|
||||
|
||||
event = Event(
|
||||
message=EventBusMessage.from_type(event_type, body=message),
|
||||
event_type=event_type,
|
||||
@@ -71,24 +79,27 @@ class EventBusService:
|
||||
document_data=document_data,
|
||||
)
|
||||
|
||||
if self.bg:
|
||||
self.bg.add_task(self.publish_event, event=event)
|
||||
if not household_id:
|
||||
if not self.session:
|
||||
raise ValueError("Session is required if household_id is not provided")
|
||||
|
||||
repos = get_repositories(self.session, group_id=group_id)
|
||||
households = repos.households.page_all(PaginationQuery(page=1, per_page=-1)).items
|
||||
household_ids = [household.id for household in households]
|
||||
else:
|
||||
self.publish_event(event)
|
||||
household_ids = [household_id]
|
||||
|
||||
def publish_event(self, event: Event) -> None:
|
||||
"""Publishes the event to all listeners"""
|
||||
for listener in self.listeners:
|
||||
if subscribers := listener.get_subscribers(event):
|
||||
listener.publish_to_subscribers(event, subscribers)
|
||||
for household_id in household_ids:
|
||||
if self.bg:
|
||||
self.bg.add_task(self._publish_event, event=event, group_id=group_id, household_id=household_id)
|
||||
else:
|
||||
self._publish_event(event, group_id, household_id)
|
||||
|
||||
@classmethod
|
||||
def as_dependency(
|
||||
cls,
|
||||
bg: BackgroundTasks,
|
||||
session=Depends(generate_session),
|
||||
group_id: UUID4 | None = Query(None, include_in_schema=False),
|
||||
):
|
||||
"""Convenience method to use as a dependency in FastAPI routes"""
|
||||
return cls(bg, session, group_id)
|
||||
return cls(bg, session)
|
||||
|
||||
@@ -1,11 +1,16 @@
|
||||
from pydantic import UUID4
|
||||
|
||||
from mealie.core.config import get_app_settings
|
||||
from mealie.pkgs.stats import fs_stats
|
||||
from mealie.repos.all_repositories import get_repositories
|
||||
from mealie.repos.repository_factory import AllRepositories
|
||||
from mealie.schema.group.group_preferences import CreateGroupPreferences
|
||||
from mealie.schema.group.group_statistics import GroupStatistics, GroupStorage
|
||||
from mealie.schema.group.group_statistics import GroupStorage
|
||||
from mealie.schema.household.household import HouseholdCreate
|
||||
from mealie.schema.household.household_preferences import CreateHouseholdPreferences
|
||||
from mealie.schema.user.user import GroupBase
|
||||
from mealie.services._base_service import BaseService
|
||||
from mealie.services.household_services.household_service import HouseholdService
|
||||
|
||||
ALLOWED_SIZE = 500 * fs_stats.megabyte
|
||||
|
||||
@@ -20,7 +25,7 @@ class GroupService(BaseService):
|
||||
def create_group(repos: AllRepositories, g_base: GroupBase, prefs: CreateGroupPreferences | None = None):
|
||||
"""
|
||||
Creates a new group in the database with the required associated table references to ensure
|
||||
the group includes required preferences.
|
||||
the group includes required preferences and default household.
|
||||
"""
|
||||
new_group = repos.groups.create(g_base)
|
||||
|
||||
@@ -29,28 +34,36 @@ class GroupService(BaseService):
|
||||
else:
|
||||
prefs.group_id = new_group.id
|
||||
|
||||
repos.group_preferences.create(prefs)
|
||||
group_repos = get_repositories(repos.session, group_id=new_group.id, household_id=None)
|
||||
group_preferences = group_repos.group_preferences.create(prefs)
|
||||
|
||||
settings = get_app_settings()
|
||||
household = HouseholdService.create_household(
|
||||
group_repos,
|
||||
HouseholdCreate(name=settings.DEFAULT_HOUSEHOLD, group_id=new_group.id),
|
||||
prefs=CreateHouseholdPreferences(
|
||||
private_household=group_preferences.private_group,
|
||||
recipe_public=not group_preferences.private_group,
|
||||
),
|
||||
)
|
||||
|
||||
new_group.preferences = group_preferences
|
||||
new_group.households = [household]
|
||||
|
||||
return new_group
|
||||
|
||||
def calculate_statistics(self, group_id: None | UUID4 = None) -> GroupStatistics:
|
||||
"""
|
||||
calculate_statistics calculates the statistics for the group and returns
|
||||
a GroupStatistics object.
|
||||
"""
|
||||
target_id = group_id or self.group_id
|
||||
|
||||
return self.repos.groups.statistics(target_id)
|
||||
|
||||
def calculate_group_storage(self, group_id: None | UUID4 = None) -> GroupStorage:
|
||||
"""
|
||||
calculate_group_storage calculates the storage used by the group and returns
|
||||
a GroupStorage object.
|
||||
"""
|
||||
|
||||
# we need all recipes from all households, not just our household
|
||||
group_repos = get_repositories(self.repos.session, group_id=group_id, household_id=None)
|
||||
|
||||
target_id = group_id or self.group_id
|
||||
|
||||
all_ids = self.repos.recipes.all_ids(target_id)
|
||||
all_ids = group_repos.recipes.all_ids(target_id)
|
||||
|
||||
used_size = sum(
|
||||
fs_stats.get_dir_size(f"{self.directories.RECIPE_DATA_DIR}/{recipe_id!s}") for recipe_id in all_ids
|
||||
|
||||
@@ -1,7 +1,6 @@
|
||||
from pydantic import UUID4
|
||||
|
||||
from mealie.repos.all_repositories import get_repositories
|
||||
from mealie.repos.repository_factory import AllRepositories
|
||||
from mealie.schema.group.group_shopping_list import ShoppingListMultiPurposeLabelCreate
|
||||
from mealie.schema.household.group_shopping_list import ShoppingListMultiPurposeLabelCreate
|
||||
from mealie.schema.labels.multi_purpose_label import (
|
||||
MultiPurposeLabelCreate,
|
||||
MultiPurposeLabelOut,
|
||||
@@ -11,14 +10,15 @@ from mealie.schema.response.pagination import PaginationQuery
|
||||
|
||||
|
||||
class MultiPurposeLabelService:
|
||||
def __init__(self, repos: AllRepositories, group_id: UUID4):
|
||||
def __init__(self, repos: AllRepositories):
|
||||
self.repos = repos
|
||||
self.group_id = group_id
|
||||
self.labels = repos.group_multi_purpose_labels
|
||||
|
||||
def _update_shopping_list_label_references(self, new_labels: list[MultiPurposeLabelOut]) -> None:
|
||||
shopping_lists_repo = self.repos.group_shopping_lists.by_group(self.group_id)
|
||||
shopping_list_multi_purpose_labels_repo = self.repos.shopping_list_multi_purpose_labels
|
||||
# remove the households filter so we get all lists
|
||||
household_repos = get_repositories(self.repos.session, group_id=self.repos.group_id, household_id=None)
|
||||
shopping_lists_repo = household_repos.group_shopping_lists
|
||||
shopping_list_multi_purpose_labels_repo = household_repos.shopping_list_multi_purpose_labels
|
||||
|
||||
shopping_lists = shopping_lists_repo.page_all(PaginationQuery(page=1, per_page=-1))
|
||||
new_shopping_list_labels: list[ShoppingListMultiPurposeLabelCreate] = []
|
||||
@@ -35,11 +35,13 @@ class MultiPurposeLabelService:
|
||||
shopping_list_multi_purpose_labels_repo.create_many(new_shopping_list_labels)
|
||||
|
||||
def create_one(self, data: MultiPurposeLabelCreate) -> MultiPurposeLabelOut:
|
||||
label = self.labels.create(data.cast(MultiPurposeLabelSave, group_id=self.group_id))
|
||||
label = self.labels.create(data.cast(MultiPurposeLabelSave, group_id=self.repos.group_id))
|
||||
self._update_shopping_list_label_references([label])
|
||||
return label
|
||||
|
||||
def create_many(self, data: list[MultiPurposeLabelCreate]) -> list[MultiPurposeLabelOut]:
|
||||
labels = self.labels.create_many([label.cast(MultiPurposeLabelSave, group_id=self.group_id) for label in data])
|
||||
labels = self.labels.create_many(
|
||||
[label.cast(MultiPurposeLabelSave, group_id=self.repos.group_id) for label in data]
|
||||
)
|
||||
self._update_shopping_list_label_references(labels)
|
||||
return labels
|
||||
|
||||
0
mealie/services/household_services/__init__.py
Normal file
0
mealie/services/household_services/__init__.py
Normal file
50
mealie/services/household_services/household_service.py
Normal file
50
mealie/services/household_services/household_service.py
Normal file
@@ -0,0 +1,50 @@
|
||||
from pydantic import UUID4
|
||||
|
||||
from mealie.repos.all_repositories import get_repositories
|
||||
from mealie.repos.repository_factory import AllRepositories
|
||||
from mealie.schema.household.household import HouseholdCreate
|
||||
from mealie.schema.household.household_preferences import CreateHouseholdPreferences, SaveHouseholdPreferences
|
||||
from mealie.schema.household.household_statistics import HouseholdStatistics
|
||||
from mealie.services._base_service import BaseService
|
||||
|
||||
|
||||
class HouseholdService(BaseService):
|
||||
def __init__(self, group_id: UUID4, household_id: UUID4, repos: AllRepositories):
|
||||
self.group_id = group_id
|
||||
self.household_id = household_id
|
||||
self.repos = repos
|
||||
super().__init__()
|
||||
|
||||
@staticmethod
|
||||
def create_household(
|
||||
repos: AllRepositories, h_base: HouseholdCreate, prefs: CreateHouseholdPreferences | None = None
|
||||
):
|
||||
new_household = repos.households.create(h_base)
|
||||
if prefs is None:
|
||||
group = repos.groups.get_one(new_household.group_id)
|
||||
if group and group.preferences:
|
||||
prefs = CreateHouseholdPreferences(
|
||||
private_household=group.preferences.private_group,
|
||||
recipe_public=not group.preferences.private_group,
|
||||
)
|
||||
else:
|
||||
prefs = CreateHouseholdPreferences()
|
||||
save_prefs = prefs.cast(SaveHouseholdPreferences, household_id=new_household.id)
|
||||
|
||||
household_repos = get_repositories(
|
||||
repos.session, group_id=new_household.group_id, household_id=new_household.id
|
||||
)
|
||||
household_repos.household_preferences.create(save_prefs)
|
||||
return new_household
|
||||
|
||||
def calculate_statistics(
|
||||
self, group_id: UUID4 | None = None, household_id: UUID4 | None = None
|
||||
) -> HouseholdStatistics:
|
||||
"""
|
||||
calculate_statistics calculates the statistics for the group and returns
|
||||
a HouseholdStatistics object.
|
||||
"""
|
||||
group_id = group_id or self.group_id
|
||||
household_id = household_id or self.household_id
|
||||
|
||||
return self.repos.households.statistics(group_id, household_id)
|
||||
@@ -4,10 +4,10 @@ from pydantic import UUID4
|
||||
|
||||
from mealie.core.exceptions import UnexpectedNone
|
||||
from mealie.repos.repository_factory import AllRepositories
|
||||
from mealie.schema.group import ShoppingListItemCreate, ShoppingListOut
|
||||
from mealie.schema.group.group_shopping_list import (
|
||||
from mealie.schema.household.group_shopping_list import (
|
||||
ShoppingListCreate,
|
||||
ShoppingListItemBase,
|
||||
ShoppingListItemCreate,
|
||||
ShoppingListItemOut,
|
||||
ShoppingListItemRecipeRefCreate,
|
||||
ShoppingListItemRecipeRefOut,
|
||||
@@ -15,6 +15,7 @@ from mealie.schema.group.group_shopping_list import (
|
||||
ShoppingListItemUpdate,
|
||||
ShoppingListItemUpdateBulk,
|
||||
ShoppingListMultiPurposeLabelCreate,
|
||||
ShoppingListOut,
|
||||
ShoppingListSave,
|
||||
)
|
||||
from mealie.schema.recipe.recipe_ingredient import (
|
||||
@@ -23,24 +24,19 @@ from mealie.schema.recipe.recipe_ingredient import (
|
||||
RecipeIngredient,
|
||||
)
|
||||
from mealie.schema.response.pagination import OrderDirection, PaginationQuery
|
||||
from mealie.schema.user.user import GroupInDB, UserOut
|
||||
from mealie.services.parser_services._base import DataMatcher
|
||||
|
||||
|
||||
class ShoppingListService:
|
||||
DEFAULT_FOOD_FUZZY_MATCH_THRESHOLD = 80
|
||||
|
||||
def __init__(self, repos: AllRepositories, group: GroupInDB, user: UserOut):
|
||||
def __init__(self, repos: AllRepositories):
|
||||
self.repos = repos
|
||||
self.group = group
|
||||
self.user = user
|
||||
self.shopping_lists = repos.group_shopping_lists
|
||||
self.list_items = repos.group_shopping_list_item
|
||||
self.list_item_refs = repos.group_shopping_list_item_references
|
||||
self.list_refs = repos.group_shopping_list_recipe_refs
|
||||
self.data_matcher = DataMatcher(
|
||||
self.group.id, self.repos, food_fuzzy_match_threshold=self.DEFAULT_FOOD_FUZZY_MATCH_THRESHOLD
|
||||
)
|
||||
self.data_matcher = DataMatcher(self.repos, food_fuzzy_match_threshold=self.DEFAULT_FOOD_FUZZY_MATCH_THRESHOLD)
|
||||
|
||||
@staticmethod
|
||||
def can_merge(item1: ShoppingListItemBase, item2: ShoppingListItemBase) -> bool:
|
||||
@@ -497,11 +493,11 @@ class ShoppingListService:
|
||||
|
||||
return self.shopping_lists.get_one(shopping_list.id), items # type: ignore
|
||||
|
||||
def create_one_list(self, data: ShoppingListCreate):
|
||||
create_data = data.cast(ShoppingListSave, group_id=self.group.id, user_id=self.user.id)
|
||||
def create_one_list(self, data: ShoppingListCreate, owner_id: UUID4):
|
||||
create_data = data.cast(ShoppingListSave, group_id=self.repos.group_id, user_id=owner_id)
|
||||
new_list = self.shopping_lists.create(create_data) # type: ignore
|
||||
|
||||
labels = self.repos.group_multi_purpose_labels.by_group(self.group.id).page_all(
|
||||
labels = self.repos.group_multi_purpose_labels.page_all(
|
||||
PaginationQuery(page=1, per_page=-1, order_by="name", order_direction=OrderDirection.asc)
|
||||
)
|
||||
label_settings = [
|
||||
@@ -1,6 +1,5 @@
|
||||
import contextlib
|
||||
from pathlib import Path
|
||||
from uuid import UUID
|
||||
|
||||
from pydantic import UUID4
|
||||
|
||||
@@ -42,7 +41,8 @@ class BaseMigrator(BaseService):
|
||||
db: AllRepositories,
|
||||
session,
|
||||
user_id: UUID4,
|
||||
group_id: UUID,
|
||||
household_id: UUID4,
|
||||
group_id: UUID4,
|
||||
add_migration_tag: bool,
|
||||
translator: Translator,
|
||||
):
|
||||
@@ -56,11 +56,16 @@ class BaseMigrator(BaseService):
|
||||
if not user:
|
||||
raise UnexpectedNone(f"Cannot find user {user_id}")
|
||||
|
||||
household = db.households.get_one(household_id)
|
||||
if not household:
|
||||
raise UnexpectedNone(f"Cannot find household {household_id}")
|
||||
|
||||
group = db.groups.get_one(group_id)
|
||||
if not group:
|
||||
raise UnexpectedNone(f"Cannot find group {group_id}")
|
||||
|
||||
self.user = user
|
||||
self.household = household
|
||||
self.group = group
|
||||
|
||||
self.name = "migration"
|
||||
@@ -69,8 +74,8 @@ class BaseMigrator(BaseService):
|
||||
|
||||
self.logger = root_logger.get_logger()
|
||||
|
||||
self.helpers = DatabaseMigrationHelpers(self.db, self.session, self.group.id, self.user.id)
|
||||
self.recipe_service = RecipeService(db, user, group, translator=self.translator)
|
||||
self.helpers = DatabaseMigrationHelpers(self.db, self.session)
|
||||
self.recipe_service = RecipeService(db, user, household, translator=self.translator)
|
||||
|
||||
super().__init__()
|
||||
|
||||
@@ -163,16 +168,16 @@ class BaseMigrator(BaseService):
|
||||
|
||||
return_vars: list[tuple[str, UUID4, bool]] = []
|
||||
|
||||
if not self.group.preferences:
|
||||
raise ValueError("Group preferences not found")
|
||||
if not self.household.preferences:
|
||||
raise ValueError("Household preferences not found")
|
||||
|
||||
default_settings = RecipeSettings(
|
||||
public=self.group.preferences.recipe_public,
|
||||
show_nutrition=self.group.preferences.recipe_show_nutrition,
|
||||
show_assets=self.group.preferences.recipe_show_assets,
|
||||
landscape_view=self.group.preferences.recipe_landscape_view,
|
||||
disable_comments=self.group.preferences.recipe_disable_comments,
|
||||
disable_amount=self.group.preferences.recipe_disable_amount,
|
||||
public=self.household.preferences.recipe_public,
|
||||
show_nutrition=self.household.preferences.recipe_show_nutrition,
|
||||
show_assets=self.household.preferences.recipe_show_assets,
|
||||
landscape_view=self.household.preferences.recipe_landscape_view,
|
||||
disable_comments=self.household.preferences.recipe_disable_comments,
|
||||
disable_amount=self.household.preferences.recipe_disable_amount,
|
||||
)
|
||||
|
||||
for recipe in validated_recipes:
|
||||
|
||||
@@ -1,23 +1,25 @@
|
||||
from collections.abc import Iterable
|
||||
from typing import TypeVar
|
||||
from __future__ import annotations
|
||||
|
||||
from pydantic import UUID4, BaseModel
|
||||
from collections.abc import Iterable
|
||||
from typing import TYPE_CHECKING, TypeVar
|
||||
|
||||
from pydantic import BaseModel
|
||||
from slugify import slugify
|
||||
from sqlalchemy.orm import Session
|
||||
|
||||
from mealie.repos.all_repositories import AllRepositories
|
||||
from mealie.repos.repository_factory import RepositoryGeneric
|
||||
from mealie.schema.recipe import RecipeCategory
|
||||
from mealie.schema.recipe.recipe import RecipeTag
|
||||
from mealie.schema.recipe.recipe_category import CategoryOut, CategorySave, TagOut, TagSave
|
||||
|
||||
T = TypeVar("T", bound=BaseModel)
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from mealie.repos.repository_generic import RepositoryGeneric
|
||||
|
||||
|
||||
class DatabaseMigrationHelpers:
|
||||
def __init__(self, db: AllRepositories, session: Session, group_id: UUID4, user_id: UUID4) -> None:
|
||||
self.group_id = group_id
|
||||
self.user_id = user_id
|
||||
def __init__(self, db: AllRepositories, session: Session) -> None:
|
||||
self.session = session
|
||||
self.db = db
|
||||
|
||||
@@ -39,7 +41,7 @@ class DatabaseMigrationHelpers:
|
||||
if not item_model:
|
||||
item_model = accessor.create(
|
||||
create_model(
|
||||
group_id=self.group_id,
|
||||
group_id=self.db.group_id,
|
||||
name=item_name,
|
||||
slug=slug_lookup,
|
||||
)
|
||||
@@ -50,7 +52,7 @@ class DatabaseMigrationHelpers:
|
||||
|
||||
def get_or_set_category(self, categories: Iterable[str]) -> list[RecipeCategory]:
|
||||
return self._get_or_set_generic(
|
||||
self.db.categories.by_group(self.group_id),
|
||||
self.db.categories,
|
||||
categories,
|
||||
CategorySave,
|
||||
CategoryOut,
|
||||
@@ -58,7 +60,7 @@ class DatabaseMigrationHelpers:
|
||||
|
||||
def get_or_set_tags(self, tags: Iterable[str]) -> list[RecipeTag]:
|
||||
return self._get_or_set_generic(
|
||||
self.db.tags.by_group(self.group_id),
|
||||
self.db.tags,
|
||||
tags,
|
||||
TagSave,
|
||||
TagOut,
|
||||
|
||||
@@ -23,12 +23,10 @@ T = TypeVar("T", bound=BaseModel)
|
||||
class DataMatcher:
|
||||
def __init__(
|
||||
self,
|
||||
group_id: UUID4,
|
||||
repos: AllRepositories,
|
||||
food_fuzzy_match_threshold: int = 85,
|
||||
unit_fuzzy_match_threshold: int = 70,
|
||||
) -> None:
|
||||
self.group_id = group_id
|
||||
self.repos = repos
|
||||
|
||||
self._food_fuzzy_match_threshold = food_fuzzy_match_threshold
|
||||
@@ -39,7 +37,7 @@ class DataMatcher:
|
||||
@property
|
||||
def foods_by_alias(self) -> dict[str, IngredientFood]:
|
||||
if self._foods_by_alias is None:
|
||||
foods_repo = self.repos.ingredient_foods.by_group(self.group_id)
|
||||
foods_repo = self.repos.ingredient_foods
|
||||
query = PaginationQuery(page=1, per_page=-1)
|
||||
all_foods = foods_repo.page_all(query).items
|
||||
|
||||
@@ -61,7 +59,7 @@ class DataMatcher:
|
||||
@property
|
||||
def units_by_alias(self) -> dict[str, IngredientUnit]:
|
||||
if self._units_by_alias is None:
|
||||
units_repo = self.repos.ingredient_units.by_group(self.group_id)
|
||||
units_repo = self.repos.ingredient_units
|
||||
query = PaginationQuery(page=1, per_page=-1)
|
||||
all_units = units_repo.page_all(query).items
|
||||
|
||||
@@ -132,13 +130,11 @@ class ABCIngredientParser(ABC):
|
||||
def __init__(self, group_id: UUID4, session: Session) -> None:
|
||||
self.group_id = group_id
|
||||
self.session = session
|
||||
self.data_matcher = DataMatcher(
|
||||
self.group_id, self._repos, self.food_fuzzy_match_threshold, self.unit_fuzzy_match_threshold
|
||||
)
|
||||
self.data_matcher = DataMatcher(self._repos, self.food_fuzzy_match_threshold, self.unit_fuzzy_match_threshold)
|
||||
|
||||
@property
|
||||
def _repos(self) -> AllRepositories:
|
||||
return get_repositories(self.session)
|
||||
return get_repositories(self.session, group_id=self.group_id)
|
||||
|
||||
@property
|
||||
def food_fuzzy_match_threshold(self) -> int:
|
||||
|
||||
@@ -63,7 +63,7 @@ class InvalidDomainError(Exception):
|
||||
class RecipeDataService(BaseService):
|
||||
minifier: img.ABCMinifier
|
||||
|
||||
def __init__(self, recipe_id: UUID4, group_id: UUID4 | None = None) -> None:
|
||||
def __init__(self, recipe_id: UUID4) -> None:
|
||||
"""
|
||||
RecipeDataService is a service that consolidates the reading/writing actions related
|
||||
to assets, and images for a recipe.
|
||||
@@ -71,7 +71,6 @@ class RecipeDataService(BaseService):
|
||||
super().__init__()
|
||||
|
||||
self.recipe_id = recipe_id
|
||||
self.slug = group_id
|
||||
self.minifier = img.PillowMinifier(purge=True, logger=self.logger)
|
||||
|
||||
self.dir_data = Recipe.directory_from_id(self.recipe_id)
|
||||
|
||||
@@ -18,6 +18,7 @@ from mealie.lang.providers import Translator
|
||||
from mealie.pkgs import cache
|
||||
from mealie.repos.repository_factory import AllRepositories
|
||||
from mealie.repos.repository_generic import RepositoryGeneric
|
||||
from mealie.schema.household.household import HouseholdInDB
|
||||
from mealie.schema.openai.recipe import OpenAIRecipe
|
||||
from mealie.schema.recipe.recipe import CreateRecipe, Recipe
|
||||
from mealie.schema.recipe.recipe_ingredient import RecipeIngredient
|
||||
@@ -26,7 +27,7 @@ from mealie.schema.recipe.recipe_settings import RecipeSettings
|
||||
from mealie.schema.recipe.recipe_step import RecipeStep
|
||||
from mealie.schema.recipe.recipe_timeline_events import RecipeTimelineEventCreate, TimelineEventType
|
||||
from mealie.schema.recipe.request_helpers import RecipeDuplicate
|
||||
from mealie.schema.user.user import GroupInDB, PrivateUser, UserRatingCreate
|
||||
from mealie.schema.user.user import PrivateUser, UserRatingCreate
|
||||
from mealie.services._base_service import BaseService
|
||||
from mealie.services.openai import OpenAIDataInjection, OpenAILocalImage, OpenAIService
|
||||
from mealie.services.recipe.recipe_data_service import RecipeDataService
|
||||
@@ -35,10 +36,15 @@ from .template_service import TemplateService
|
||||
|
||||
|
||||
class RecipeServiceBase(BaseService):
|
||||
def __init__(self, repos: AllRepositories, user: PrivateUser, group: GroupInDB, translator: Translator):
|
||||
def __init__(self, repos: AllRepositories, user: PrivateUser, household: HouseholdInDB, translator: Translator):
|
||||
self.repos = repos
|
||||
self.user = user
|
||||
self.group = group
|
||||
self.household = household
|
||||
|
||||
if repos.group_id != user.group_id != household.group_id:
|
||||
raise Exception("group ids do not match")
|
||||
if repos.household_id != user.household_id != household.id:
|
||||
raise Exception("household ids do not match")
|
||||
|
||||
self.translator = translator
|
||||
self.t = translator.t
|
||||
@@ -48,7 +54,7 @@ class RecipeServiceBase(BaseService):
|
||||
|
||||
class RecipeService(RecipeServiceBase):
|
||||
def _get_recipe(self, data: str | UUID, key: str | None = None) -> Recipe:
|
||||
recipe = self.repos.recipes.by_group(self.group.id).get_one(data, key)
|
||||
recipe = self.repos.recipes.get_one(data, key)
|
||||
if recipe is None:
|
||||
raise exceptions.NoEntryFound("Recipe not found.")
|
||||
return recipe
|
||||
@@ -97,7 +103,8 @@ class RecipeService(RecipeServiceBase):
|
||||
additional_attrs = additional_attrs or {}
|
||||
additional_attrs["name"] = name
|
||||
additional_attrs["user_id"] = self.user.id
|
||||
additional_attrs["group_id"] = self.user.group_id
|
||||
additional_attrs["household_id"] = self.household.id
|
||||
additional_attrs["group_id"] = self.household.group_id
|
||||
|
||||
if additional_attrs.get("tags"):
|
||||
for i in range(len(additional_attrs.get("tags", []))):
|
||||
@@ -133,14 +140,14 @@ class RecipeService(RecipeServiceBase):
|
||||
data: Recipe = self._recipe_creation_factory(name=create_data.name, additional_attrs=create_data.model_dump())
|
||||
|
||||
if isinstance(create_data, CreateRecipe) or create_data.settings is None:
|
||||
if self.group.preferences is not None:
|
||||
if self.household.preferences is not None:
|
||||
data.settings = RecipeSettings(
|
||||
public=self.group.preferences.recipe_public,
|
||||
show_nutrition=self.group.preferences.recipe_show_nutrition,
|
||||
show_assets=self.group.preferences.recipe_show_assets,
|
||||
landscape_view=self.group.preferences.recipe_landscape_view,
|
||||
disable_comments=self.group.preferences.recipe_disable_comments,
|
||||
disable_amount=self.group.preferences.recipe_disable_amount,
|
||||
public=self.household.preferences.recipe_public,
|
||||
show_nutrition=self.household.preferences.recipe_show_nutrition,
|
||||
show_assets=self.household.preferences.recipe_show_assets,
|
||||
landscape_view=self.household.preferences.recipe_landscape_view,
|
||||
disable_comments=self.household.preferences.recipe_disable_comments,
|
||||
disable_amount=self.household.preferences.recipe_disable_amount,
|
||||
)
|
||||
else:
|
||||
data.settings = RecipeSettings()
|
||||
@@ -172,7 +179,7 @@ class RecipeService(RecipeServiceBase):
|
||||
return new_recipe
|
||||
|
||||
def _transform_user_id(self, user_id: str) -> str:
|
||||
query = self.repos.users.by_group(self.group.id).get_one(user_id)
|
||||
query = self.repos.users.get_one(user_id)
|
||||
if query:
|
||||
return user_id
|
||||
else:
|
||||
@@ -207,14 +214,15 @@ class RecipeService(RecipeServiceBase):
|
||||
elif not isinstance(data, dict):
|
||||
return data
|
||||
|
||||
# force group_id to match the group id of the current user
|
||||
data["group_id"] = str(self.group.id)
|
||||
# force group_id and household_id to match the group id of the current user
|
||||
data["group_id"] = str(self.user.group_id)
|
||||
data["household_id"] = str(self.user.household_id)
|
||||
|
||||
# make sure categories and tags are valid
|
||||
if key == "recipe_category":
|
||||
return self._transform_category_or_tag(data, self.repos.categories.by_group(self.group.id))
|
||||
return self._transform_category_or_tag(data, self.repos.categories)
|
||||
elif key == "tags":
|
||||
return self._transform_category_or_tag(data, self.repos.tags.by_group(self.group.id))
|
||||
return self._transform_category_or_tag(data, self.repos.tags)
|
||||
|
||||
# recursively process other objects
|
||||
for k, v in data.items():
|
||||
@@ -259,7 +267,7 @@ class RecipeService(RecipeServiceBase):
|
||||
return recipe
|
||||
|
||||
async def create_from_images(self, images: list[UploadFile], translate_language: str | None = None) -> Recipe:
|
||||
openai_recipe_service = OpenAIRecipeService(self.repos, self.user, self.group, self.translator)
|
||||
openai_recipe_service = OpenAIRecipeService(self.repos, self.user, self.household, self.translator)
|
||||
with get_temporary_path() as temp_path:
|
||||
local_images: list[Path] = []
|
||||
for image in images:
|
||||
@@ -321,8 +329,8 @@ class RecipeService(RecipeServiceBase):
|
||||
# Copy all assets (including images) to the new recipe directory
|
||||
# This assures that replaced links in recipe steps continue to work when the old recipe is deleted
|
||||
try:
|
||||
new_service = RecipeDataService(new_recipe.id, group_id=old_recipe.group_id)
|
||||
old_service = RecipeDataService(old_recipe.id, group_id=old_recipe.group_id)
|
||||
new_service = RecipeDataService(new_recipe.id)
|
||||
old_service = RecipeDataService(old_recipe.id)
|
||||
copytree(
|
||||
old_service.dir_data,
|
||||
new_service.dir_data,
|
||||
@@ -379,9 +387,7 @@ class RecipeService(RecipeServiceBase):
|
||||
if recipe is None:
|
||||
raise exceptions.NoEntryFound("Recipe not found.")
|
||||
|
||||
new_data = self.repos.recipes.by_group(self.group.id).patch(
|
||||
recipe.slug, patch_data.model_dump(exclude_unset=True)
|
||||
)
|
||||
new_data = self.repos.recipes.patch(recipe.slug, patch_data.model_dump(exclude_unset=True))
|
||||
|
||||
self.check_assets(new_data, recipe.slug)
|
||||
return new_data
|
||||
@@ -389,7 +395,7 @@ class RecipeService(RecipeServiceBase):
|
||||
def update_last_made(self, slug: str, timestamp: datetime) -> Recipe:
|
||||
# we bypass the pre update check since any user can update a recipe's last made date, even if it's locked
|
||||
recipe = self._get_recipe(slug)
|
||||
return self.repos.recipes.by_group(self.group.id).patch(recipe.slug, {"last_made": timestamp})
|
||||
return self.repos.recipes.patch(recipe.slug, {"last_made": timestamp})
|
||||
|
||||
def delete_one(self, slug) -> Recipe:
|
||||
recipe = self._get_recipe(slug)
|
||||
@@ -414,6 +420,7 @@ class OpenAIRecipeService(RecipeServiceBase):
|
||||
return Recipe(
|
||||
user_id=self.user.id,
|
||||
group_id=self.user.group_id,
|
||||
household_id=self.household.id,
|
||||
name=openai_recipe.name,
|
||||
slug=slugify(openai_recipe.name),
|
||||
description=openai_recipe.description,
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
from datetime import datetime, time, timedelta, timezone
|
||||
|
||||
from pydantic import UUID4
|
||||
from sqlalchemy.orm import Session
|
||||
|
||||
from mealie.db.db_setup import session_context
|
||||
from mealie.repos.all_repositories import get_repositories
|
||||
@@ -18,99 +19,109 @@ from mealie.services.event_bus_service.event_types import (
|
||||
)
|
||||
|
||||
|
||||
def create_mealplan_timeline_events(group_id: UUID4 | None = None):
|
||||
def _create_mealplan_timeline_events_for_household(
|
||||
event_time: datetime, session: Session, group_id: UUID4, household_id: UUID4
|
||||
) -> None:
|
||||
repos = get_repositories(session, group_id=group_id, household_id=household_id)
|
||||
event_bus_service = EventBusService(session=session)
|
||||
|
||||
timeline_events_to_create: list[RecipeTimelineEventCreate] = []
|
||||
recipes_to_update: dict[UUID4, RecipeSummary] = {}
|
||||
recipe_id_to_slug_map: dict[UUID4, str] = {}
|
||||
|
||||
mealplans = repos.meals.get_today()
|
||||
for mealplan in mealplans:
|
||||
if not (mealplan.recipe and mealplan.user_id):
|
||||
continue
|
||||
|
||||
user = repos.users.get_one(mealplan.user_id)
|
||||
if not user:
|
||||
continue
|
||||
|
||||
# TODO: make this translatable
|
||||
if mealplan.entry_type == PlanEntryType.side:
|
||||
event_subject = f"{user.full_name} made this as a side"
|
||||
|
||||
else:
|
||||
event_subject = f"{user.full_name} made this for {mealplan.entry_type.value}"
|
||||
|
||||
query_start_time = datetime.combine(datetime.now(timezone.utc).date(), time.min)
|
||||
query_end_time = query_start_time + timedelta(days=1)
|
||||
query = PaginationQuery(
|
||||
query_filter=(
|
||||
f'recipe_id = "{mealplan.recipe_id}" '
|
||||
f'AND timestamp >= "{query_start_time.isoformat()}" '
|
||||
f'AND timestamp < "{query_end_time.isoformat()}" '
|
||||
f'AND subject = "{event_subject}"'
|
||||
)
|
||||
)
|
||||
|
||||
# if this event already exists, don't create it again
|
||||
events = repos.recipe_timeline_events.page_all(pagination=query)
|
||||
if events.items:
|
||||
continue
|
||||
|
||||
# bump up the last made date
|
||||
last_made = mealplan.recipe.last_made
|
||||
if (not last_made or last_made.date() < event_time.date()) and mealplan.recipe_id not in recipes_to_update:
|
||||
recipes_to_update[mealplan.recipe_id] = mealplan.recipe
|
||||
|
||||
timeline_events_to_create.append(
|
||||
RecipeTimelineEventCreate(
|
||||
user_id=user.id,
|
||||
subject=event_subject,
|
||||
event_type=TimelineEventType.info,
|
||||
timestamp=event_time,
|
||||
recipe_id=mealplan.recipe_id,
|
||||
)
|
||||
)
|
||||
|
||||
recipe_id_to_slug_map[mealplan.recipe_id] = mealplan.recipe.slug
|
||||
|
||||
if not timeline_events_to_create:
|
||||
return
|
||||
|
||||
# TODO: use bulk operations
|
||||
for event in timeline_events_to_create:
|
||||
new_event = repos.recipe_timeline_events.create(event)
|
||||
event_bus_service.dispatch(
|
||||
integration_id=DEFAULT_INTEGRATION_ID,
|
||||
group_id=group_id,
|
||||
household_id=household_id,
|
||||
event_type=EventTypes.recipe_updated,
|
||||
document_data=EventRecipeTimelineEventData(
|
||||
operation=EventOperation.create,
|
||||
recipe_slug=recipe_id_to_slug_map[new_event.recipe_id],
|
||||
recipe_timeline_event_id=new_event.id,
|
||||
),
|
||||
)
|
||||
|
||||
for recipe in recipes_to_update.values():
|
||||
repos.recipes.patch(recipe.slug, {"last_made": event_time})
|
||||
event_bus_service.dispatch(
|
||||
integration_id=DEFAULT_INTEGRATION_ID,
|
||||
group_id=group_id,
|
||||
household_id=household_id,
|
||||
event_type=EventTypes.recipe_updated,
|
||||
document_data=EventRecipeData(operation=EventOperation.update, recipe_slug=recipe.slug),
|
||||
)
|
||||
|
||||
|
||||
def _create_mealplan_timeline_events_for_group(event_time: datetime, session: Session, group_id: UUID4) -> None:
|
||||
repos = get_repositories(session, group_id=group_id)
|
||||
households_data = repos.households.page_all(PaginationQuery(page=1, per_page=-1))
|
||||
household_ids = [household.id for household in households_data.items]
|
||||
for household_id in household_ids:
|
||||
_create_mealplan_timeline_events_for_household(event_time, session, group_id, household_id)
|
||||
|
||||
|
||||
def create_mealplan_timeline_events() -> None:
|
||||
event_time = datetime.now(timezone.utc)
|
||||
|
||||
with session_context() as session:
|
||||
repos = get_repositories(session)
|
||||
if group_id is None:
|
||||
# if not specified, we check all groups
|
||||
groups_data = repos.groups.page_all(PaginationQuery(page=1, per_page=-1))
|
||||
group_ids = [group.id for group in groups_data.items]
|
||||
|
||||
else:
|
||||
group_ids = [group_id]
|
||||
groups_data = repos.groups.page_all(PaginationQuery(page=1, per_page=-1))
|
||||
group_ids = [group.id for group in groups_data.items]
|
||||
|
||||
for group_id in group_ids:
|
||||
event_bus_service = EventBusService(session=session, group_id=group_id)
|
||||
|
||||
timeline_events_to_create: list[RecipeTimelineEventCreate] = []
|
||||
recipes_to_update: dict[UUID4, RecipeSummary] = {}
|
||||
recipe_id_to_slug_map: dict[UUID4, str] = {}
|
||||
|
||||
mealplans = repos.meals.get_today(group_id)
|
||||
for mealplan in mealplans:
|
||||
if not (mealplan.recipe and mealplan.user_id):
|
||||
continue
|
||||
|
||||
user = repos.users.get_one(mealplan.user_id)
|
||||
if not user:
|
||||
continue
|
||||
|
||||
# TODO: make this translatable
|
||||
if mealplan.entry_type == PlanEntryType.side:
|
||||
event_subject = f"{user.full_name} made this as a side"
|
||||
|
||||
else:
|
||||
event_subject = f"{user.full_name} made this for {mealplan.entry_type.value}"
|
||||
|
||||
query_start_time = datetime.combine(datetime.now(timezone.utc).date(), time.min)
|
||||
query_end_time = query_start_time + timedelta(days=1)
|
||||
query = PaginationQuery(
|
||||
query_filter=(
|
||||
f'recipe_id = "{mealplan.recipe_id}" '
|
||||
f'AND timestamp >= "{query_start_time.isoformat()}" '
|
||||
f'AND timestamp < "{query_end_time.isoformat()}" '
|
||||
f'AND subject = "{event_subject}"'
|
||||
)
|
||||
)
|
||||
|
||||
# if this event already exists, don't create it again
|
||||
events = repos.recipe_timeline_events.page_all(pagination=query)
|
||||
if events.items:
|
||||
continue
|
||||
|
||||
# bump up the last made date
|
||||
last_made = mealplan.recipe.last_made
|
||||
if (
|
||||
not last_made or last_made.date() < event_time.date()
|
||||
) and mealplan.recipe_id not in recipes_to_update:
|
||||
recipes_to_update[mealplan.recipe_id] = mealplan.recipe
|
||||
|
||||
timeline_events_to_create.append(
|
||||
RecipeTimelineEventCreate(
|
||||
user_id=user.id,
|
||||
subject=event_subject,
|
||||
event_type=TimelineEventType.info,
|
||||
timestamp=event_time,
|
||||
recipe_id=mealplan.recipe_id,
|
||||
)
|
||||
)
|
||||
|
||||
recipe_id_to_slug_map[mealplan.recipe_id] = mealplan.recipe.slug
|
||||
|
||||
if not timeline_events_to_create:
|
||||
return
|
||||
|
||||
# TODO: use bulk operations
|
||||
for event in timeline_events_to_create:
|
||||
new_event = repos.recipe_timeline_events.create(event)
|
||||
event_bus_service.dispatch(
|
||||
integration_id=DEFAULT_INTEGRATION_ID,
|
||||
group_id=group_id,
|
||||
event_type=EventTypes.recipe_updated,
|
||||
document_data=EventRecipeTimelineEventData(
|
||||
operation=EventOperation.create,
|
||||
recipe_slug=recipe_id_to_slug_map[new_event.recipe_id],
|
||||
recipe_timeline_event_id=new_event.id,
|
||||
),
|
||||
)
|
||||
|
||||
for recipe in recipes_to_update.values():
|
||||
repos.recipes.patch(recipe.slug, {"last_made": event_time})
|
||||
event_bus_service.dispatch(
|
||||
integration_id=DEFAULT_INTEGRATION_ID,
|
||||
group_id=group_id,
|
||||
event_type=EventTypes.recipe_updated,
|
||||
document_data=EventRecipeData(operation=EventOperation.update, recipe_slug=recipe.slug),
|
||||
)
|
||||
_create_mealplan_timeline_events_for_group(event_time, session, group_id)
|
||||
|
||||
@@ -4,21 +4,28 @@ from pydantic import UUID4
|
||||
|
||||
from mealie.db.db_setup import session_context
|
||||
from mealie.repos.all_repositories import get_repositories
|
||||
from mealie.routes.groups.controller_shopping_lists import publish_list_item_events
|
||||
from mealie.routes.households.controller_shopping_lists import publish_list_item_events
|
||||
from mealie.schema.response.pagination import OrderDirection, PaginationQuery
|
||||
from mealie.schema.user.user import DEFAULT_INTEGRATION_ID
|
||||
from mealie.services.event_bus_service.event_bus_service import EventBusService
|
||||
from mealie.services.event_bus_service.event_types import EventDocumentDataBase, EventTypes
|
||||
from mealie.services.group_services.shopping_lists import ShoppingListService
|
||||
from mealie.services.household_services.shopping_lists import ShoppingListService
|
||||
|
||||
MAX_CHECKED_ITEMS = 100
|
||||
|
||||
|
||||
def _create_publish_event(event_bus_service: EventBusService, group_id: UUID4):
|
||||
def publish_event(event_type: EventTypes, document_data: EventDocumentDataBase, message: str = ""):
|
||||
def _create_publish_event(event_bus_service: EventBusService):
|
||||
def publish_event(
|
||||
event_type: EventTypes,
|
||||
document_data: EventDocumentDataBase,
|
||||
group_id: UUID4,
|
||||
household_id: UUID4 | None,
|
||||
message: str = "",
|
||||
):
|
||||
event_bus_service.dispatch(
|
||||
integration_id=DEFAULT_INTEGRATION_ID,
|
||||
group_id=group_id,
|
||||
household_id=household_id,
|
||||
event_type=event_type,
|
||||
document_data=document_data,
|
||||
message=message,
|
||||
@@ -32,7 +39,7 @@ def _trim_list_items(shopping_list_service: ShoppingListService, shopping_list_i
|
||||
page=1,
|
||||
per_page=-1,
|
||||
query_filter=f'shopping_list_id="{shopping_list_id}" AND checked=true',
|
||||
order_by="update_at",
|
||||
order_by="updated_at",
|
||||
order_direction=OrderDirection.desc,
|
||||
)
|
||||
query = shopping_list_service.list_items.page_all(pagination)
|
||||
@@ -44,28 +51,25 @@ def _trim_list_items(shopping_list_service: ShoppingListService, shopping_list_i
|
||||
publish_list_item_events(event_publisher, items_response)
|
||||
|
||||
|
||||
def delete_old_checked_list_items(group_id: UUID4 | None = None):
|
||||
def delete_old_checked_list_items():
|
||||
with session_context() as session:
|
||||
repos = get_repositories(session)
|
||||
if group_id is None:
|
||||
# if not specified, we check all groups
|
||||
groups = repos.groups.page_all(PaginationQuery(page=1, per_page=-1)).items
|
||||
|
||||
else:
|
||||
group = repos.groups.get_one(group_id)
|
||||
if not group:
|
||||
raise Exception(f'Group not found: "{group_id}"')
|
||||
|
||||
groups = [group]
|
||||
groups = repos.groups.page_all(PaginationQuery(page=1, per_page=-1)).items
|
||||
|
||||
for group in groups:
|
||||
event_bus_service = EventBusService(session=session, group_id=group.id)
|
||||
# user is passed as None since we don't use it here
|
||||
shopping_list_service = ShoppingListService(repos, group, None) # type: ignore
|
||||
shopping_list_data = repos.group_shopping_lists.by_group(group.id).page_all(
|
||||
PaginationQuery(page=1, per_page=-1)
|
||||
)
|
||||
for shopping_list in shopping_list_data.items:
|
||||
_trim_list_items(
|
||||
shopping_list_service, shopping_list.id, _create_publish_event(event_bus_service, group.id)
|
||||
)
|
||||
group_repos = get_repositories(session, group_id=group.id)
|
||||
households = group_repos.households.page_all(PaginationQuery(page=1, per_page=-1)).items
|
||||
event_bus_service = EventBusService(session=session)
|
||||
event_publisher = _create_publish_event(event_bus_service)
|
||||
|
||||
for household in households:
|
||||
household_repos = get_repositories(session, group_id=group.id, household_id=household.id)
|
||||
|
||||
shopping_list_service = ShoppingListService(household_repos)
|
||||
shopping_list_data = household_repos.group_shopping_lists.page_all(PaginationQuery(page=1, per_page=-1))
|
||||
for shopping_list in shopping_list_data.items:
|
||||
_trim_list_items(
|
||||
shopping_list_service,
|
||||
shopping_list.id,
|
||||
event_publisher,
|
||||
)
|
||||
|
||||
@@ -4,7 +4,7 @@ from pydantic import UUID4
|
||||
|
||||
from mealie.db.db_setup import session_context
|
||||
from mealie.repos.all_repositories import get_repositories
|
||||
from mealie.schema.group.webhook import ReadWebhook
|
||||
from mealie.schema.household.webhook import ReadWebhook
|
||||
from mealie.schema.response.pagination import PaginationQuery
|
||||
from mealie.services.event_bus_service.event_bus_listeners import WebhookEventListener
|
||||
from mealie.services.event_bus_service.event_bus_service import EventBusService
|
||||
@@ -21,7 +21,9 @@ from mealie.services.event_bus_service.event_types import (
|
||||
last_ran = datetime.now(timezone.utc)
|
||||
|
||||
|
||||
def post_group_webhooks(start_dt: datetime | None = None, group_id: UUID4 | None = None) -> None:
|
||||
def post_group_webhooks(
|
||||
start_dt: datetime | None = None, group_id: UUID4 | None = None, household_id: UUID4 | None = None
|
||||
) -> None:
|
||||
"""Post webhook events to specified group, or all groups"""
|
||||
|
||||
global last_ran
|
||||
@@ -58,13 +60,23 @@ def post_group_webhooks(start_dt: datetime | None = None, group_id: UUID4 | None
|
||||
)
|
||||
|
||||
for group_id in group_ids:
|
||||
event_bus = EventBusService(group_id=group_id)
|
||||
event_bus.dispatch(
|
||||
integration_id=INTERNAL_INTEGRATION_ID,
|
||||
group_id=group_id,
|
||||
event_type=event_type,
|
||||
document_data=event_document_data,
|
||||
)
|
||||
if household_id is None:
|
||||
with session_context() as session:
|
||||
household_repos = get_repositories(session, group_id=group_id)
|
||||
households_data = household_repos.households.page_all(PaginationQuery(page=1, per_page=-1))
|
||||
household_ids = [household.id for household in households_data.items]
|
||||
else:
|
||||
household_ids = [household_id]
|
||||
|
||||
for household_id in household_ids:
|
||||
event_bus = EventBusService()
|
||||
event_bus.dispatch(
|
||||
integration_id=INTERNAL_INTEGRATION_ID,
|
||||
group_id=group_id,
|
||||
household_id=household_id,
|
||||
event_type=event_type,
|
||||
document_data=event_document_data,
|
||||
)
|
||||
|
||||
|
||||
def post_single_webhook(webhook: ReadWebhook, message: str = "") -> None:
|
||||
@@ -84,5 +96,5 @@ def post_single_webhook(webhook: ReadWebhook, message: str = "") -> None:
|
||||
document_data=event_document_data,
|
||||
)
|
||||
|
||||
listener = WebhookEventListener(webhook.group_id)
|
||||
listener = WebhookEventListener(webhook.group_id, webhook.household_id)
|
||||
listener.publish_to_subscribers(event, [webhook])
|
||||
|
||||
@@ -4,7 +4,7 @@ from sqlalchemy import delete
|
||||
|
||||
from mealie.core import root_logger
|
||||
from mealie.db.db_setup import session_context
|
||||
from mealie.db.models.group import GroupInviteToken
|
||||
from mealie.db.models.household import GroupInviteToken
|
||||
|
||||
logger = root_logger.get_logger()
|
||||
|
||||
|
||||
@@ -1,6 +1,5 @@
|
||||
from dataclasses import dataclass
|
||||
|
||||
from pydantic import UUID4
|
||||
from slugify import slugify
|
||||
|
||||
from mealie.repos.repository_factory import AllRepositories
|
||||
@@ -14,8 +13,6 @@ class NoContextException(Exception):
|
||||
|
||||
@dataclass(slots=True)
|
||||
class ScraperContext:
|
||||
user_id: UUID4
|
||||
group_id: UUID4
|
||||
repos: AllRepositories
|
||||
|
||||
|
||||
@@ -30,7 +27,7 @@ class ScrapedExtras:
|
||||
if not self._tags:
|
||||
return []
|
||||
|
||||
repo = ctx.repos.tags.by_group(ctx.group_id)
|
||||
repo = ctx.repos.tags
|
||||
|
||||
tags = []
|
||||
seen_tag_slugs: set[str] = set()
|
||||
@@ -46,7 +43,7 @@ class ScrapedExtras:
|
||||
tags.append(db_tag)
|
||||
continue
|
||||
|
||||
save_data = TagSave(name=tag, group_id=ctx.group_id)
|
||||
save_data = TagSave(name=tag, group_id=ctx.repos.group_id)
|
||||
db_tag = repo.create(save_data)
|
||||
|
||||
tags.append(db_tag)
|
||||
|
||||
@@ -1,24 +1,21 @@
|
||||
from mealie.repos.repository_factory import AllRepositories
|
||||
from mealie.repos.seed.seeders import IngredientFoodsSeeder, IngredientUnitsSeeder, MultiPurposeLabelSeeder
|
||||
from mealie.schema.user.user import GroupInDB, PrivateUser
|
||||
from mealie.services._base_service import BaseService
|
||||
|
||||
|
||||
class SeederService(BaseService):
|
||||
def __init__(self, repos: AllRepositories, user: PrivateUser, group: GroupInDB):
|
||||
def __init__(self, repos: AllRepositories):
|
||||
self.repos = repos
|
||||
self.user = user
|
||||
self.group = group
|
||||
super().__init__()
|
||||
|
||||
def seed_foods(self, locale: str) -> None:
|
||||
seeder = IngredientFoodsSeeder(self.repos, self.logger, self.group.id)
|
||||
seeder = IngredientFoodsSeeder(self.repos, self.logger)
|
||||
seeder.seed(locale)
|
||||
|
||||
def seed_labels(self, locale: str) -> None:
|
||||
seeder = MultiPurposeLabelSeeder(self.repos, self.logger, self.group.id)
|
||||
seeder = MultiPurposeLabelSeeder(self.repos, self.logger)
|
||||
seeder.seed(locale)
|
||||
|
||||
def seed_units(self, locale: str) -> None:
|
||||
seeder = IngredientUnitsSeeder(self.repos, self.logger, self.group.id)
|
||||
seeder = IngredientUnitsSeeder(self.repos, self.logger)
|
||||
seeder.seed(locale)
|
||||
|
||||
@@ -11,7 +11,7 @@ from mealie.services.email import EmailService
|
||||
|
||||
class PasswordResetService(BaseService):
|
||||
def __init__(self, session: Session) -> None:
|
||||
self.db = get_repositories(session)
|
||||
self.db = get_repositories(session, group_id=None, household_id=None)
|
||||
super().__init__()
|
||||
|
||||
def generate_reset_token(self, email: str) -> SavePasswordResetToken | None:
|
||||
|
||||
@@ -2,14 +2,20 @@ from logging import Logger
|
||||
from uuid import uuid4
|
||||
|
||||
from fastapi import HTTPException, status
|
||||
from pydantic import UUID4
|
||||
|
||||
from mealie.core.config import get_app_settings
|
||||
from mealie.core.security import hash_password
|
||||
from mealie.lang.providers import Translator
|
||||
from mealie.repos.all_repositories import get_repositories
|
||||
from mealie.repos.repository_factory import AllRepositories
|
||||
from mealie.schema.group.group_preferences import CreateGroupPreferences
|
||||
from mealie.schema.household.household import HouseholdCreate, HouseholdInDB
|
||||
from mealie.schema.household.household_preferences import CreateHouseholdPreferences
|
||||
from mealie.schema.user.registration import CreateUserRegistration
|
||||
from mealie.schema.user.user import GroupBase, GroupInDB, PrivateUser, UserIn
|
||||
from mealie.services.group_services.group_service import GroupService
|
||||
from mealie.services.household_services.household_service import HouseholdService
|
||||
from mealie.services.seeder.seeder_service import SeederService
|
||||
|
||||
|
||||
@@ -22,7 +28,7 @@ class RegistrationService:
|
||||
self.repos = db
|
||||
self.t = translator.t
|
||||
|
||||
def _create_new_user(self, group: GroupInDB, new_group: bool) -> PrivateUser:
|
||||
def _create_new_user(self, group: GroupInDB, household: HouseholdInDB, new_group: bool) -> PrivateUser:
|
||||
new_user = UserIn(
|
||||
email=self.registration.email,
|
||||
username=self.registration.username,
|
||||
@@ -30,6 +36,7 @@ class RegistrationService:
|
||||
full_name=self.registration.full_name,
|
||||
advanced=self.registration.advanced,
|
||||
group=group,
|
||||
household=household,
|
||||
can_invite=new_group,
|
||||
can_manage=new_group,
|
||||
can_organize=new_group,
|
||||
@@ -44,6 +51,22 @@ class RegistrationService:
|
||||
group_preferences = CreateGroupPreferences(
|
||||
group_id=uuid4(),
|
||||
private_group=self.registration.private,
|
||||
)
|
||||
|
||||
return GroupService.create_group(self.repos, group_data, group_preferences)
|
||||
|
||||
def _fetch_or_register_new_household(self, group_id: UUID4) -> HouseholdInDB:
|
||||
settings = get_app_settings()
|
||||
new_household_name = self.registration.household or settings.DEFAULT_HOUSEHOLD
|
||||
|
||||
group_repos = get_repositories(self.repos.session, group_id=group_id)
|
||||
household_fetch = group_repos.households.get_by_name(new_household_name)
|
||||
if household_fetch:
|
||||
return household_fetch
|
||||
|
||||
household_data = HouseholdCreate(name=new_household_name)
|
||||
household_preferences = CreateHouseholdPreferences(
|
||||
private_household=self.registration.private,
|
||||
first_day_of_week=0,
|
||||
recipe_public=not self.registration.private,
|
||||
recipe_show_nutrition=self.registration.advanced,
|
||||
@@ -52,8 +75,7 @@ class RegistrationService:
|
||||
recipe_disable_comments=self.registration.advanced,
|
||||
recipe_disable_amount=self.registration.advanced,
|
||||
)
|
||||
|
||||
return GroupService.create_group(self.repos, group_data, group_preferences)
|
||||
return HouseholdService.create_household(group_repos, household_data, household_preferences)
|
||||
|
||||
def register_user(self, registration: CreateUserRegistration) -> PrivateUser:
|
||||
self.registration = registration
|
||||
@@ -66,28 +88,32 @@ class RegistrationService:
|
||||
token_entry = None
|
||||
new_group = False
|
||||
|
||||
if registration.group_token and registration.group_token != "":
|
||||
if registration.group_token:
|
||||
token_entry = self.repos.group_invite_tokens.get_one(registration.group_token)
|
||||
if not token_entry:
|
||||
raise HTTPException(status.HTTP_400_BAD_REQUEST, {"message": "Invalid group token"})
|
||||
|
||||
maybe_none_group = self.repos.groups.get_one(token_entry.group_id)
|
||||
|
||||
if maybe_none_group is None:
|
||||
raise HTTPException(status.HTTP_400_BAD_REQUEST, {"message": "Invalid group token"})
|
||||
|
||||
group = maybe_none_group
|
||||
|
||||
maybe_none_household = self.repos.households.get_one(token_entry.household_id)
|
||||
if maybe_none_household is None:
|
||||
raise HTTPException(status.HTTP_400_BAD_REQUEST, {"message": "Invalid group token"})
|
||||
household = maybe_none_household
|
||||
elif registration.group:
|
||||
new_group = True
|
||||
group = self._register_new_group()
|
||||
household = self._fetch_or_register_new_household(group.id)
|
||||
else:
|
||||
raise HTTPException(status.HTTP_400_BAD_REQUEST, {"message": "Missing group"})
|
||||
|
||||
self.logger.info(f"Registering user {registration.username}")
|
||||
user = self._create_new_user(group, new_group)
|
||||
user = self._create_new_user(group, household, new_group)
|
||||
|
||||
if new_group and registration.seed_data:
|
||||
seeder_service = SeederService(self.repos, user, group)
|
||||
seeder_service = SeederService(self.repos)
|
||||
seeder_service.seed_foods(registration.locale)
|
||||
seeder_service.seed_labels(registration.locale)
|
||||
seeder_service.seed_units(registration.locale)
|
||||
|
||||
Reference in New Issue
Block a user