mirror of
https://github.com/mealie-recipes/mealie.git
synced 2026-01-30 20:43:12 -05:00
feat: Upgrade to Python 3.12 (#4675)
Co-authored-by: Hayden <64056131+hay-kot@users.noreply.github.com>
This commit is contained in:
@@ -25,7 +25,7 @@ class BackupV2(BaseService):
|
||||
db_file = self.settings.DB_URL.removeprefix("sqlite:///") # type: ignore
|
||||
|
||||
# Create a backup of the SQLite database
|
||||
timestamp = datetime.datetime.now(datetime.timezone.utc).strftime("%Y.%m.%d")
|
||||
timestamp = datetime.datetime.now(datetime.UTC).strftime("%Y.%m.%d")
|
||||
shutil.copy(db_file, self.directories.DATA_DIR.joinpath(f"mealie_{timestamp}.bak.db"))
|
||||
|
||||
def _postgres(self) -> None:
|
||||
@@ -37,7 +37,7 @@ class BackupV2(BaseService):
|
||||
exclude_ext = {".zip"}
|
||||
exclude_dirs = {"backups", ".temp"}
|
||||
|
||||
timestamp = datetime.datetime.now(datetime.timezone.utc).strftime("%Y.%m.%d.%H.%M.%S")
|
||||
timestamp = datetime.datetime.now(datetime.UTC).strftime("%Y.%m.%d.%H.%M.%S")
|
||||
|
||||
backup_name = f"mealie_{timestamp}.zip"
|
||||
backup_file = self.directories.BACKUP_DIR / backup_name
|
||||
|
||||
@@ -2,7 +2,7 @@ import contextlib
|
||||
import json
|
||||
from abc import ABC, abstractmethod
|
||||
from collections.abc import Generator
|
||||
from datetime import datetime, timezone
|
||||
from datetime import UTC, datetime
|
||||
from typing import cast
|
||||
from urllib.parse import parse_qs, urlencode, urlsplit, urlunsplit
|
||||
|
||||
@@ -163,8 +163,8 @@ class WebhookEventListener(EventListenerBase):
|
||||
with self.ensure_session() as session:
|
||||
stmt = select(GroupWebhooksModel).where(
|
||||
GroupWebhooksModel.enabled == True, # noqa: E712 - required for SQLAlchemy comparison
|
||||
GroupWebhooksModel.scheduled_time > start_dt.astimezone(timezone.utc).time(),
|
||||
GroupWebhooksModel.scheduled_time <= end_dt.astimezone(timezone.utc).time(),
|
||||
GroupWebhooksModel.scheduled_time > start_dt.astimezone(UTC).time(),
|
||||
GroupWebhooksModel.scheduled_time <= end_dt.astimezone(UTC).time(),
|
||||
GroupWebhooksModel.group_id == self.group_id,
|
||||
GroupWebhooksModel.household_id == self.household_id,
|
||||
)
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
import uuid
|
||||
from datetime import date, datetime, timezone
|
||||
from datetime import UTC, date, datetime
|
||||
from enum import Enum, auto
|
||||
from typing import Any
|
||||
|
||||
@@ -193,4 +193,4 @@ class Event(MealieModel):
|
||||
def __init__(self, *args, **kwargs) -> None:
|
||||
super().__init__(*args, **kwargs)
|
||||
self.event_id = uuid.uuid4()
|
||||
self.timestamp = datetime.now(timezone.utc)
|
||||
self.timestamp = datetime.now(UTC)
|
||||
|
||||
@@ -43,7 +43,7 @@ class Exporter(BaseService):
|
||||
name="Data Export",
|
||||
size=pretty_size(export_path.stat().st_size),
|
||||
filename=export_path.name,
|
||||
expires=datetime.datetime.now(datetime.timezone.utc) + datetime.timedelta(days=1),
|
||||
expires=datetime.datetime.now(datetime.UTC) + datetime.timedelta(days=1),
|
||||
)
|
||||
|
||||
db.group_exports.create(group_data_export)
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
import tempfile
|
||||
import zipfile
|
||||
from datetime import datetime, timezone
|
||||
from datetime import UTC, datetime
|
||||
from pathlib import Path
|
||||
|
||||
from bs4 import BeautifulSoup
|
||||
@@ -35,7 +35,7 @@ class CopyMeThatMigrator(BaseMigrator):
|
||||
self.name = "copymethat"
|
||||
|
||||
self.key_aliases = [
|
||||
MigrationAlias(key="last_made", alias="made_this", func=lambda x: datetime.now(timezone.utc)),
|
||||
MigrationAlias(key="last_made", alias="made_this", func=lambda x: datetime.now(UTC)),
|
||||
MigrationAlias(key="notes", alias="recipeNotes"),
|
||||
MigrationAlias(key="orgURL", alias="original_link"),
|
||||
MigrationAlias(key="rating", alias="ratingValue"),
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
import json
|
||||
import os
|
||||
import shutil
|
||||
from datetime import datetime, timezone
|
||||
from datetime import UTC, datetime
|
||||
from pathlib import Path
|
||||
from shutil import copytree, rmtree
|
||||
from typing import Any
|
||||
@@ -192,7 +192,7 @@ class RecipeService(RecipeServiceBase):
|
||||
recipe_id=new_recipe.id,
|
||||
subject=self.t("recipe.recipe-created"),
|
||||
event_type=TimelineEventType.system,
|
||||
timestamp=new_recipe.created_at or datetime.now(timezone.utc),
|
||||
timestamp=new_recipe.created_at or datetime.now(UTC),
|
||||
)
|
||||
|
||||
self.repos.recipe_timeline_events.create(timeline_event_data)
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
import asyncio
|
||||
from datetime import datetime, timedelta, timezone
|
||||
from datetime import UTC, datetime, timedelta
|
||||
from pathlib import Path
|
||||
|
||||
from mealie.core import root_logger
|
||||
@@ -28,7 +28,7 @@ class SchedulerService:
|
||||
|
||||
|
||||
async def schedule_daily():
|
||||
now = datetime.now(timezone.utc)
|
||||
now = datetime.now(UTC)
|
||||
daily_schedule_time = get_app_settings().DAILY_SCHEDULE_TIME_UTC
|
||||
logger.debug(f"Current time is {now} and DAILY_SCHEDULE_TIME (in UTC) is {daily_schedule_time}")
|
||||
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
from datetime import datetime, time, timedelta, timezone
|
||||
from datetime import UTC, datetime, time, timedelta
|
||||
|
||||
from pydantic import UUID4
|
||||
from sqlalchemy.orm import Session
|
||||
@@ -45,7 +45,7 @@ def _create_mealplan_timeline_events_for_household(
|
||||
else:
|
||||
event_subject = f"{user.full_name} made this for {mealplan.entry_type.value}"
|
||||
|
||||
query_start_time = datetime.combine(datetime.now(timezone.utc).date(), time.min)
|
||||
query_start_time = datetime.combine(datetime.now(UTC).date(), time.min)
|
||||
query_end_time = query_start_time + timedelta(days=1)
|
||||
query = PaginationQuery(
|
||||
query_filter=(
|
||||
@@ -116,7 +116,7 @@ def _create_mealplan_timeline_events_for_group(event_time: datetime, session: Se
|
||||
|
||||
|
||||
def create_mealplan_timeline_events() -> None:
|
||||
event_time = datetime.now(timezone.utc)
|
||||
event_time = datetime.now(UTC)
|
||||
|
||||
with session_context() as session:
|
||||
repos = get_repositories(session)
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
from datetime import datetime, timezone
|
||||
from datetime import UTC, datetime
|
||||
|
||||
from pydantic import UUID4
|
||||
|
||||
@@ -18,7 +18,7 @@ from mealie.services.event_bus_service.event_types import (
|
||||
EventWebhookData,
|
||||
)
|
||||
|
||||
last_ran = datetime.now(timezone.utc)
|
||||
last_ran = datetime.now(UTC)
|
||||
|
||||
|
||||
def post_group_webhooks(
|
||||
@@ -32,7 +32,7 @@ def post_group_webhooks(
|
||||
start_dt = start_dt or last_ran
|
||||
|
||||
# end the query at the current time
|
||||
last_ran = end_dt = datetime.now(timezone.utc)
|
||||
last_ran = end_dt = datetime.now(UTC)
|
||||
|
||||
if group_id is None:
|
||||
# publish the webhook event to each group's event bus
|
||||
@@ -80,7 +80,7 @@ def post_group_webhooks(
|
||||
|
||||
|
||||
def post_single_webhook(webhook: ReadWebhook, message: str = "") -> None:
|
||||
dt = datetime.min.replace(tzinfo=timezone.utc)
|
||||
dt = datetime.min.replace(tzinfo=UTC)
|
||||
event_type = EventTypes.webhook_task
|
||||
|
||||
event_document_data = EventWebhookData(
|
||||
|
||||
@@ -17,7 +17,7 @@ def purge_group_data_exports(max_minutes_old=ONE_DAY_AS_MINUTES):
|
||||
logger = root_logger.get_logger()
|
||||
|
||||
logger.debug("purging group data exports")
|
||||
limit = datetime.datetime.now(datetime.timezone.utc) - datetime.timedelta(minutes=max_minutes_old)
|
||||
limit = datetime.datetime.now(datetime.UTC) - datetime.timedelta(minutes=max_minutes_old)
|
||||
|
||||
with session_context() as session:
|
||||
stmt = select(GroupDataExportsModel).filter(cast(GroupDataExportsModel.expires, NaiveDateTime) <= limit)
|
||||
@@ -39,7 +39,7 @@ def purge_excess_files() -> None:
|
||||
directories = get_app_dirs()
|
||||
logger = root_logger.get_logger()
|
||||
|
||||
limit = datetime.datetime.now(datetime.timezone.utc) - datetime.timedelta(minutes=ONE_DAY_AS_MINUTES * 2)
|
||||
limit = datetime.datetime.now(datetime.UTC) - datetime.timedelta(minutes=ONE_DAY_AS_MINUTES * 2)
|
||||
|
||||
for file in directories.GROUPS_DIR.glob("**/export/*.zip"):
|
||||
# TODO: fix comparison types
|
||||
|
||||
@@ -14,7 +14,7 @@ MAX_DAYS_OLD = 2
|
||||
def purge_password_reset_tokens():
|
||||
"""Purges all events after x days"""
|
||||
logger.debug("purging password reset tokens")
|
||||
limit = datetime.datetime.now(datetime.timezone.utc) - datetime.timedelta(days=MAX_DAYS_OLD)
|
||||
limit = datetime.datetime.now(datetime.UTC) - datetime.timedelta(days=MAX_DAYS_OLD)
|
||||
|
||||
with session_context() as session:
|
||||
stmt = delete(PasswordResetModel).filter(PasswordResetModel.created_at <= limit)
|
||||
|
||||
@@ -14,7 +14,7 @@ MAX_DAYS_OLD = 4
|
||||
def purge_group_registration():
|
||||
"""Purges all events after x days"""
|
||||
logger.debug("purging expired registration tokens")
|
||||
limit = datetime.datetime.now(datetime.timezone.utc) - datetime.timedelta(days=MAX_DAYS_OLD)
|
||||
limit = datetime.datetime.now(datetime.UTC) - datetime.timedelta(days=MAX_DAYS_OLD)
|
||||
|
||||
with session_context() as session:
|
||||
stmt = delete(GroupInviteToken).filter(GroupInviteToken.created_at <= limit)
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
from datetime import datetime, timezone
|
||||
from datetime import UTC, datetime
|
||||
|
||||
from mealie.repos.repository_factory import AllRepositories
|
||||
from mealie.schema.user.user import PrivateUser
|
||||
@@ -30,7 +30,7 @@ class UserService(BaseService):
|
||||
return unlocked
|
||||
|
||||
def lock_user(self, user: PrivateUser) -> PrivateUser:
|
||||
user.locked_at = datetime.now(timezone.utc)
|
||||
user.locked_at = datetime.now(UTC)
|
||||
return self.repos.users.update(user.id, user)
|
||||
|
||||
def unlock_user(self, user: PrivateUser) -> PrivateUser:
|
||||
|
||||
Reference in New Issue
Block a user