rewrite logger to support custom config files (#3104)

This commit is contained in:
Hayden
2024-04-16 10:52:49 -05:00
committed by GitHub
parent cba076b6a4
commit 6bd5a82b92
19 changed files with 285 additions and 294 deletions

View File

@@ -0,0 +1,67 @@
import json
import logging
import pathlib
import typing
from logging import config as logging_config
__dir = pathlib.Path(__file__).parent
__conf: dict[str, str] | None = None
def _load_config(path: pathlib.Path, substitutions: dict[str, str] | None = None) -> dict[str, typing.Any]:
with open(path) as file:
if substitutions:
contents = file.read()
for key, value in substitutions.items():
# Replaces the key matches
#
# Example:
# {"key": "value"}
# "/path/to/${key}/file" -> "/path/to/value/file"
contents = contents.replace(f"${{{key}}}", value)
json_data = json.loads(contents)
else:
json_data = json.load(file)
return json_data
def log_config() -> dict[str, str]:
if __conf is None:
raise ValueError("logger not configured, must call configured_logger first")
return __conf
def configured_logger(
*,
mode: str,
config_override: pathlib.Path | None = None,
substitutions: dict[str, str] | None = None,
) -> logging.Logger:
"""
Configure the logger based on the mode and return the root logger
Args:
mode (str): The mode to configure the logger for (production, development, testing)
config_override (pathlib.Path, optional): A path to a custom logging config. Defaults to None.
substitutions (dict[str, str], optional): A dictionary of substitutions to apply to the logging config.
"""
global __conf
if config_override:
__conf = _load_config(config_override, substitutions)
else:
if mode == "production":
__conf = _load_config(__dir / "logconf.prod.json", substitutions)
elif mode == "development":
__conf = _load_config(__dir / "logconf.dev.json", substitutions)
elif mode == "testing":
__conf = _load_config(__dir / "logconf.test.json", substitutions)
else:
raise ValueError(f"Invalid mode: {mode}")
logging_config.dictConfig(config=__conf)
return logging.getLogger()

View File

@@ -0,0 +1,17 @@
{
"version": 1,
"disable_existing_loggers": false,
"handlers": {
"rich": {
"class": "rich.logging.RichHandler"
}
},
"loggers": {
"root": {
"level": "DEBUG",
"handlers": [
"rich"
]
}
}
}

View File

@@ -0,0 +1,74 @@
{
"version": 1,
"disable_existing_loggers": false,
"formatters": {
"simple": {
"format": "%(levelname)-8s %(asctime)s - %(message)s",
"datefmt": "%Y-%m-%dT%H:%M:%S"
},
"detailed": {
"format": "[%(levelname)s|%(module)s|L%(lineno)d] %(asctime)s: %(message)s",
"datefmt": "%Y-%m-%dT%H:%M:%S"
},
"access": {
"()": "uvicorn.logging.AccessFormatter",
"fmt": "%(levelname)-8s %(asctime)s - [%(client_addr)s] %(status_code)s \"%(request_line)s\"",
"datefmt": "%Y-%m-%dT%H:%M:%S"
}
},
"handlers": {
"stderr": {
"class": "logging.StreamHandler",
"level": "WARNING",
"formatter": "simple",
"stream": "ext://sys.stderr"
},
"stdout": {
"class": "logging.StreamHandler",
"level": "${LOG_LEVEL}",
"formatter": "simple",
"stream": "ext://sys.stdout"
},
"access": {
"class": "logging.StreamHandler",
"level": "${LOG_LEVEL}",
"formatter": "access",
"stream": "ext://sys.stdout"
},
"file": {
"class": "logging.handlers.RotatingFileHandler",
"level": "DEBUG",
"formatter": "detailed",
"filename": "${DATA_DIR}/mealie.log",
"maxBytes": 10000,
"backupCount": 3
}
},
"loggers": {
"root": {
"level": "${LOG_LEVEL}",
"handlers": [
"stderr",
"file",
"stdout"
]
},
"uvicorn.error": {
"handlers": [
"stderr",
"file",
"stdout"
],
"level": "${LOG_LEVEL}",
"propagate": false
},
"uvicorn.access": {
"handlers": [
"access",
"file"
],
"level": "${LOG_LEVEL}",
"propagate": false
}
}
}

View File

@@ -0,0 +1,26 @@
{
"version": 1,
"disable_existing_loggers": false,
"formatters": {
"detailed": {
"format": "[%(levelname)s|%(module)s|L%(lineno)d] %(asctime)s: %(message)s",
"datefmt": "%Y-%m-%dT%H:%M:%S"
}
},
"handlers": {
"stdout": {
"class": "logging.StreamHandler",
"level": "DEBUG",
"formatter": "detailed",
"stream": "ext://sys.stdout"
}
},
"loggers": {
"root": {
"level": "${LOG_LEVEL}",
"handlers": [
"stdout"
]
}
}
}

View File

@@ -1,85 +1,46 @@
import logging
import sys
from dataclasses import dataclass
from functools import lru_cache
from mealie.core.config import determine_data_dir
from .config import get_app_dirs, get_app_settings
from .logger.config import configured_logger
DATA_DIR = determine_data_dir()
from .config import get_app_settings # noqa E402
LOGGER_FILE = DATA_DIR.joinpath("mealie.log")
DATE_FORMAT = "%d-%b-%y %H:%M:%S"
LOGGER_FORMAT = "%(levelname)s: %(asctime)s \t%(message)s"
@dataclass
class LoggerConfig:
handlers: list
format: str
date_format: str
logger_file: str
level: int = logging.INFO
@lru_cache
def get_logger_config():
settings = get_app_settings()
log_level = logging._nameToLevel[settings.LOG_LEVEL]
if not settings.PRODUCTION:
from rich.logging import RichHandler
return LoggerConfig(
handlers=[RichHandler(rich_tracebacks=True, tracebacks_show_locals=True)],
format=None,
date_format=None,
logger_file=None,
level=log_level,
)
output_file_handler = logging.FileHandler(LOGGER_FILE)
handler_format = logging.Formatter(LOGGER_FORMAT, datefmt=DATE_FORMAT)
output_file_handler.setFormatter(handler_format)
# Stdout
stdout_handler = logging.StreamHandler(sys.stdout)
stdout_handler.setFormatter(handler_format)
return LoggerConfig(
handlers=[output_file_handler, stdout_handler],
format="%(levelname)s: %(asctime)s \t%(message)s",
date_format="%d-%b-%y %H:%M:%S",
logger_file=LOGGER_FILE,
level=log_level,
)
logger_config = get_logger_config()
logging.basicConfig(
level=logger_config.level,
format=logger_config.format,
datefmt=logger_config.date_format,
handlers=logger_config.handlers,
)
def logger_init() -> logging.Logger:
"""Returns the Root Logging Object for Mealie"""
return logging.getLogger("mealie")
root_logger = logger_init()
__root_logger: None | logging.Logger = None
def get_logger(module=None) -> logging.Logger:
"""Returns a child logger for mealie"""
global root_logger
"""
Get a logger instance for a module, in most cases module should not be
provided. Simply using the root logger is sufficient.
Cases where you would want to use a module specific logger might be a background
task or a long running process where you want to easily identify the source of
those messages
"""
global __root_logger
if __root_logger is None:
app_settings = get_app_settings()
mode = "development"
if app_settings.TESTING:
mode = "testing"
elif app_settings.PRODUCTION:
mode = "production"
dirs = get_app_dirs()
substitutions = {
"DATA_DIR": dirs.DATA_DIR.as_posix(),
"LOG_LEVEL": app_settings.LOG_LEVEL.upper(),
}
__root_logger = configured_logger(
mode=mode,
config_override=app_settings.LOG_CONFIG_OVERRIDE,
substitutions=substitutions,
)
if module is None:
return root_logger
return __root_logger
return root_logger.getChild(module)
return __root_logger.getChild(module)

View File

@@ -36,13 +36,21 @@ class AppSettings(BaseSettings):
"""path to static files directory (ex. `mealie/dist`)"""
IS_DEMO: bool = False
HOST_IP: str = "*"
API_HOST: str = "0.0.0.0"
API_PORT: int = 9000
API_DOCS: bool = True
TOKEN_TIME: int = 48
"""time in hours"""
SECRET: str
LOG_LEVEL: str = "INFO"
LOG_CONFIG_OVERRIDE: Path | None = None
"""path to custom logging configuration file"""
LOG_LEVEL: str = "info"
"""corresponds to standard Python log levels"""
GIT_COMMIT_HASH: str = "unknown"