feat: Customizable OpenAI prompts (#5146) (#6588)

Co-authored-by: Michael Genson <71845777+michael-genson@users.noreply.github.com>
Co-authored-by: Michael Genson <genson.michael@gmail.com>
This commit is contained in:
Imanuel
2026-01-30 19:00:03 +01:00
committed by GitHub
parent e83891e3ca
commit c7ae67e7cd
4 changed files with 157 additions and 20 deletions

View File

@@ -122,17 +122,18 @@ For usage, see [Usage - OpenID Connect](../authentication/oidc-v2.md)
Mealie supports various integrations using OpenAI. For more information, check out our [OpenAI documentation](./open-ai.md).
For custom mapping variables (e.g. OPENAI_CUSTOM_HEADERS) you should pass values as JSON encoded strings (e.g. `OPENAI_CUSTOM_PARAMS='{"k1": "v1", "k2": "v2"}'`)
| Variables | Default | Description |
| ------------------------------------------------- | :-----: | ---------------------------------------------------------------------------------------------------------------------------------------------------------------------------- |
| OPENAI_BASE_URL<super>[&dagger;][secrets]</super> | None | The base URL for the OpenAI API. If you're not sure, leave this empty to use the standard OpenAI platform |
| OPENAI_API_KEY<super>[&dagger;][secrets]</super> | None | Your OpenAI API Key. Enables OpenAI-related features |
| OPENAI_MODEL | gpt-4o | Which OpenAI model to use. If you're not sure, leave this empty |
| OPENAI_CUSTOM_HEADERS | None | Custom HTTP headers to add to all OpenAI requests. This should generally be left empty unless your custom service requires them |
| OPENAI_CUSTOM_PARAMS | None | Custom HTTP query params to add to all OpenAI requests. This should generally be left empty unless your custom service requires them |
| OPENAI_ENABLE_IMAGE_SERVICES | True | Whether to enable OpenAI image services, such as creating recipes via image. Leave this enabled unless your custom model doesn't support it, or you want to reduce costs |
| OPENAI_WORKERS | 2 | Number of OpenAI workers per request. Higher values may increase processing speed, but will incur additional API costs |
| OPENAI_SEND_DATABASE_DATA | True | Whether to send Mealie data to OpenAI to improve request accuracy. This will incur additional API costs |
| OPENAI_REQUEST_TIMEOUT | 300 | The number of seconds to wait for an OpenAI request to complete before cancelling the request. Leave this empty unless you're running into timeout issues on slower hardware |
| Variables | Default | Description |
|---------------------------------------------------|:-------:|--------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|
| OPENAI_BASE_URL<super>[&dagger;][secrets]</super> | None | The base URL for the OpenAI API. If you're not sure, leave this empty to use the standard OpenAI platform |
| OPENAI_API_KEY<super>[&dagger;][secrets]</super> | None | Your OpenAI API Key. Enables OpenAI-related features |
| OPENAI_MODEL | gpt-4o | Which OpenAI model to use. If you're not sure, leave this empty |
| OPENAI_CUSTOM_HEADERS | None | Custom HTTP headers to add to all OpenAI requests. This should generally be left empty unless your custom service requires them |
| OPENAI_CUSTOM_PARAMS | None | Custom HTTP query params to add to all OpenAI requests. This should generally be left empty unless your custom service requires them |
| OPENAI_ENABLE_IMAGE_SERVICES | True | Whether to enable OpenAI image services, such as creating recipes via image. Leave this enabled unless your custom model doesn't support it, or you want to reduce costs |
| OPENAI_WORKERS | 2 | Number of OpenAI workers per request. Higher values may increase processing speed, but will incur additional API costs |
| OPENAI_SEND_DATABASE_DATA | True | Whether to send Mealie data to OpenAI to improve request accuracy. This will incur additional API costs |
| OPENAI_REQUEST_TIMEOUT | 300 | The number of seconds to wait for an OpenAI request to complete before cancelling the request. Leave this empty unless you're running into timeout issues on slower hardware |
| OPENAI_CUSTOM_PROMPT_DIR | None | Path to custom prompt files. Only existing files in your custom directory will override the defaults; any missing or empty custom files will automatically fall back to the system defaults. See https://github.com/mealie-recipes/mealie/tree/mealie-next/mealie/services/openai/prompts for expected file names. |
### Theming

View File

@@ -412,6 +412,11 @@ class AppSettings(AppLoggingSettings):
"""
The number of seconds to wait for an OpenAI request to complete before cancelling the request
"""
OPENAI_CUSTOM_PROMPT_DIR: str | None = None
"""
Path to a folder containing custom prompt files;
files are individually optional, each prompt name will fall back to the default if no custom file exists
"""
@property
def OPENAI_FEATURE(self) -> FeatureDetails:

View File

@@ -10,11 +10,14 @@ from openai import NOT_GIVEN, AsyncOpenAI
from openai.types.chat import ChatCompletion
from pydantic import BaseModel, field_validator
from mealie.core import root_logger
from mealie.core.config import get_app_settings
from mealie.pkgs import img
from .._base_service import BaseService
logger = root_logger.get_logger(__name__)
class OpenAIDataInjection(BaseModel):
description: str
@@ -85,6 +88,7 @@ class OpenAIService(BaseService):
self.workers = settings.OPENAI_WORKERS
self.send_db_data = settings.OPENAI_SEND_DATABASE_DATA
self.enable_image_services = settings.OPENAI_ENABLE_IMAGE_SERVICES
self.custom_prompt_dir = settings.OPENAI_CUSTOM_PROMPT_DIR
self.get_client = lambda: AsyncOpenAI(
base_url=settings.OPENAI_BASE_URL,
@@ -96,8 +100,64 @@ class OpenAIService(BaseService):
super().__init__()
@classmethod
def get_prompt(cls, name: str, data_injections: list[OpenAIDataInjection] | None = None) -> str:
def _get_prompt_file_candidates(self, name: str) -> list[Path]:
"""
Returns a list of prompt file path candidates.
First optional entry is the users custom prompt file, if configured and existing,
second one (or only one) is the systems default prompt file
"""
tree = name.split(".")
relative_path = Path(*tree[:-1], tree[-1] + ".txt")
default_prompt_file = Path(self.PROMPTS_DIR, relative_path)
try:
# Only include custom files if the custom_dir is configured, is a directory, and the prompt file exists
custom_dir = Path(self.custom_prompt_dir) if self.custom_prompt_dir else None
if custom_dir and not custom_dir.is_dir():
custom_dir = None
except Exception:
custom_dir = None
if custom_dir:
custom_prompt_file = Path(custom_dir, relative_path)
if custom_prompt_file.exists():
logger.debug(f"Found valid custom prompt file: {custom_prompt_file}")
return [custom_prompt_file, default_prompt_file]
else:
logger.debug(f"Custom prompt file doesn't exist: {custom_prompt_file}")
else:
logger.debug(f"Custom prompt dir doesn't exist: {custom_dir}")
# Otherwise, only return the default internal prompt file
return [default_prompt_file]
def _load_prompt_from_file(self, name: str) -> str:
"""Attempts to load custom prompt, otherwise falling back to the default"""
prompt_file_candidates = self._get_prompt_file_candidates(name)
content = None
last_error = None
for prompt_file in prompt_file_candidates:
try:
logger.debug(f"Trying to load prompt file: {prompt_file}")
with open(prompt_file) as f:
content = f.read()
if content:
logger.debug(f"Successfully read prompt from {prompt_file}")
break
except OSError as e:
last_error = e
if not content:
if last_error:
raise OSError(f"Unable to load prompt {name}") from last_error
else:
# This handles the case where the list was empty (no existing candidates found)
attempted_paths = ", ".join(map(str, prompt_file_candidates))
raise OSError(f"Unable to load prompt '{name}'. No valid content found in files: {attempted_paths}")
return content
def get_prompt(self, name: str, data_injections: list[OpenAIDataInjection] | None = None) -> str:
"""
Load stored prompt and inject data into it.
@@ -109,13 +169,7 @@ class OpenAIService(BaseService):
if not name:
raise ValueError("Prompt name cannot be empty")
tree = name.split(".")
prompt_dir = os.path.join(cls.PROMPTS_DIR, *tree[:-1], tree[-1] + ".txt")
try:
with open(prompt_dir) as f:
content = f.read()
except OSError as e:
raise OSError(f"Unable to load prompt {name}") from e
content = self._load_prompt_from_file(name)
if not data_injections:
return content

View File

@@ -0,0 +1,77 @@
import pytest
import mealie.services.openai.openai as openai_module
from mealie.services.openai.openai import OpenAIService
class _SettingsStub:
OPENAI_ENABLED = True
OPENAI_MODEL = "gpt-4o"
OPENAI_WORKERS = 1
OPENAI_SEND_DATABASE_DATA = False
OPENAI_ENABLE_IMAGE_SERVICES = True
OPENAI_CUSTOM_PROMPT_DIR = None
OPENAI_BASE_URL = None
OPENAI_API_KEY = "dummy"
OPENAI_REQUEST_TIMEOUT = 30
OPENAI_CUSTOM_HEADERS = {}
OPENAI_CUSTOM_PARAMS = {}
@pytest.fixture()
def settings_stub(tmp_path, monkeypatch):
s = _SettingsStub()
prompts_dir = tmp_path / "prompts"
(prompts_dir / "recipes").mkdir(parents=True)
default_prompt = prompts_dir / "recipes" / "parse-recipe-ingredients.txt"
default_prompt.write_text("DEFAULT PROMPT")
monkeypatch.setattr(OpenAIService, "PROMPTS_DIR", prompts_dir)
def _fake_get_app_settings():
return s
monkeypatch.setattr(openai_module, "get_app_settings", _fake_get_app_settings)
return s
def test_get_prompt_default_only(settings_stub):
svc = OpenAIService()
out = svc.get_prompt("recipes.parse-recipe-ingredients")
assert out == "DEFAULT PROMPT"
def test_get_prompt_custom_dir_used(settings_stub, tmp_path):
custom_dir = tmp_path / "custom"
(custom_dir / "recipes").mkdir(parents=True)
(custom_dir / "recipes" / "parse-recipe-ingredients.txt").write_text("CUSTOM PROMPT")
settings_stub.OPENAI_CUSTOM_PROMPT_DIR = str(custom_dir)
svc = OpenAIService()
out = svc.get_prompt("recipes.parse-recipe-ingredients")
assert out == "CUSTOM PROMPT"
def test_get_prompt_custom_empty_falls_back_to_default(settings_stub, tmp_path):
custom_dir = tmp_path / "custom"
(custom_dir / "recipes").mkdir(parents=True)
(custom_dir / "recipes" / "parse-recipe-ingredients.txt").write_text("")
settings_stub.OPENAI_CUSTOM_PROMPT_DIR = str(custom_dir)
svc = OpenAIService()
out = svc.get_prompt("recipes.parse-recipe-ingredients")
assert out == "DEFAULT PROMPT"
def test_get_prompt_raises_when_no_files(settings_stub, monkeypatch):
# Point PROMPTS_DIR to an empty temp folder (already done in fixture) but remove default file
prompts_dir = OpenAIService.PROMPTS_DIR
for p in prompts_dir.rglob("*.txt"):
p.unlink()
svc = OpenAIService()
with pytest.raises(OSError) as ei:
svc.get_prompt("recipes.parse-recipe-ingredients")
assert "Unable to load prompt" in str(ei.value)