Skip to content

Commit 54d3173

Browse files
authored
Merge pull request #240 from sri-dhurkesh/feature/structured-logging
Partially closes #239
2 parents 39f47b6 + 077375d commit 54d3173

File tree

8 files changed

+931
-664
lines changed

8 files changed

+931
-664
lines changed

pyproject.toml

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -33,6 +33,8 @@ dependencies = [
3333
"gunicorn>=23.0.0",
3434
"ruff>=0.11.13",
3535
"mypy>=1.16.0",
36+
"structlog>=25.1.0,<=25.5.0",
37+
"rich>=14.2.0",
3638
]
3739

3840
[project.optional-dependencies]

src/app/core/config.py

Lines changed: 28 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -21,6 +21,32 @@ class CryptSettings(BaseSettings):
2121
REFRESH_TOKEN_EXPIRE_DAYS: int = 7
2222

2323

24+
class FileLoggerSettings(BaseSettings):
25+
FILE_LOG_MAX_BYTES: int = 10 * 1024 * 1024
26+
FILE_LOG_BACKUP_COUNT: int = 5
27+
FILE_LOG_FORMAT_JSON: bool = True
28+
FILE_LOG_LEVEL: str = "INFO"
29+
30+
# Include request ID, path, method, client host, and status code in the file log
31+
FILE_LOG_INCLUDE_REQUEST_ID: bool = True
32+
FILE_LOG_INCLUDE_PATH: bool = True
33+
FILE_LOG_INCLUDE_METHOD: bool = True
34+
FILE_LOG_INCLUDE_CLIENT_HOST: bool = True
35+
FILE_LOG_INCLUDE_STATUS_CODE: bool = True
36+
37+
38+
class ConsoleLoggerSettings(BaseSettings):
39+
CONSOLE_LOG_LEVEL: str = "INFO"
40+
CONSOLE_LOG_FORMAT_JSON: bool = False
41+
42+
# Include request ID, path, method, client host, and status code in the console log
43+
CONSOLE_LOG_INCLUDE_REQUEST_ID: bool = False
44+
CONSOLE_LOG_INCLUDE_PATH: bool = False
45+
CONSOLE_LOG_INCLUDE_METHOD: bool = False
46+
CONSOLE_LOG_INCLUDE_CLIENT_HOST: bool = False
47+
CONSOLE_LOG_INCLUDE_STATUS_CODE: bool = False
48+
49+
2450
class DatabaseSettings(BaseSettings):
2551
pass
2652

@@ -164,6 +190,8 @@ class Settings(
164190
CRUDAdminSettings,
165191
EnvironmentSettings,
166192
CORSSettings,
193+
FileLoggerSettings,
194+
ConsoleLoggerSettings,
167195
):
168196
model_config = SettingsConfigDict(
169197
env_file=os.path.join(os.path.dirname(os.path.realpath(__file__)), "..", "..", ".env"),

src/app/core/logger.py

Lines changed: 122 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -1,20 +1,132 @@
1+
"""Logging configuration for the application."""
2+
13
import logging
24
import os
35
from logging.handlers import RotatingFileHandler
46

7+
import structlog
8+
from structlog.dev import ConsoleRenderer
9+
from structlog.processors import JSONRenderer
10+
from structlog.types import EventDict, Processor
11+
12+
from ..core.config import settings
13+
14+
15+
def drop_color_message_key(_, __, event_dict: EventDict) -> EventDict:
16+
"""Uvicorn adds `color_message` which duplicates `event`.
17+
18+
Remove it to avoid double logging.
19+
"""
20+
event_dict.pop("color_message", None)
21+
return event_dict
22+
23+
24+
def file_log_filter_processors(_, __, event_dict: EventDict) -> EventDict:
25+
"""Filter out the request ID, path, method, client host, and status code from the event dict if the
26+
corresponding setting is False."""
27+
28+
if not settings.FILE_LOG_INCLUDE_REQUEST_ID:
29+
event_dict.pop("request_id", None)
30+
if not settings.FILE_LOG_INCLUDE_PATH:
31+
event_dict.pop("path", None)
32+
if not settings.FILE_LOG_INCLUDE_METHOD:
33+
event_dict.pop("method", None)
34+
if not settings.FILE_LOG_INCLUDE_CLIENT_HOST:
35+
event_dict.pop("client_host", None)
36+
if not settings.FILE_LOG_INCLUDE_STATUS_CODE:
37+
event_dict.pop("status_code", None)
38+
return event_dict
39+
40+
41+
def console_log_filter_processors(_, __, event_dict: EventDict) -> EventDict:
42+
"""Filter out the request ID, path, method, client host, and status code from the event dict if the
43+
corresponding setting is False."""
44+
45+
if not settings.CONSOLE_LOG_INCLUDE_REQUEST_ID:
46+
event_dict.pop("request_id", None)
47+
if not settings.CONSOLE_LOG_INCLUDE_PATH:
48+
event_dict.pop("path", None)
49+
if not settings.CONSOLE_LOG_INCLUDE_METHOD:
50+
event_dict.pop("method", None)
51+
if not settings.CONSOLE_LOG_INCLUDE_CLIENT_HOST:
52+
event_dict.pop("client_host", None)
53+
if not settings.CONSOLE_LOG_INCLUDE_STATUS_CODE:
54+
event_dict.pop("status_code", None)
55+
return event_dict
56+
57+
58+
# Shared processors for all loggers
59+
timestamper = structlog.processors.TimeStamper(fmt="iso")
60+
SHARED_PROCESSORS: list[Processor] = [
61+
structlog.contextvars.merge_contextvars,
62+
structlog.stdlib.add_logger_name,
63+
structlog.stdlib.add_log_level,
64+
structlog.stdlib.PositionalArgumentsFormatter(),
65+
structlog.stdlib.ExtraAdder(),
66+
drop_color_message_key,
67+
timestamper,
68+
structlog.processors.StackInfoRenderer(),
69+
]
70+
71+
72+
# Configure structlog globally
73+
structlog.configure(
74+
processors=SHARED_PROCESSORS + [structlog.stdlib.ProcessorFormatter.wrap_for_formatter],
75+
logger_factory=structlog.stdlib.LoggerFactory(),
76+
cache_logger_on_first_use=True,
77+
)
78+
79+
80+
def build_formatter(*, json_output: bool, pre_chain: list[Processor]) -> structlog.stdlib.ProcessorFormatter:
81+
"""Build a ProcessorFormatter with the specified renderer and processors."""
82+
renderer = JSONRenderer() if json_output else ConsoleRenderer()
83+
84+
processors = [structlog.stdlib.ProcessorFormatter.remove_processors_meta, renderer]
85+
86+
if json_output:
87+
pre_chain = pre_chain + [structlog.processors.format_exc_info]
88+
89+
return structlog.stdlib.ProcessorFormatter(foreign_pre_chain=pre_chain, processors=processors)
90+
91+
92+
# Setup log directory
593
LOG_DIR = os.path.join(os.path.dirname(os.path.dirname(__file__)), "logs")
6-
if not os.path.exists(LOG_DIR):
7-
os.makedirs(LOG_DIR)
94+
os.makedirs(LOG_DIR, exist_ok=True)
95+
896

9-
LOG_FILE_PATH = os.path.join(LOG_DIR, "app.log")
97+
# File handler configuration
98+
file_handler = RotatingFileHandler(
99+
filename=os.path.join(LOG_DIR, "app.log"),
100+
maxBytes=settings.FILE_LOG_MAX_BYTES,
101+
backupCount=settings.FILE_LOG_BACKUP_COUNT,
102+
)
103+
file_handler.setLevel(settings.FILE_LOG_LEVEL)
104+
file_handler.setFormatter(
105+
build_formatter(
106+
json_output=settings.FILE_LOG_FORMAT_JSON, pre_chain=SHARED_PROCESSORS + [file_log_filter_processors]
107+
)
108+
)
10109

11-
LOGGING_LEVEL = logging.INFO
12-
LOGGING_FORMAT = "%(asctime)s - %(name)s - %(levelname)s - %(message)s"
110+
# Console handler configuration
111+
console_handler = logging.StreamHandler()
112+
console_handler.setLevel(settings.CONSOLE_LOG_LEVEL)
113+
console_handler.setFormatter(
114+
build_formatter(
115+
json_output=settings.CONSOLE_LOG_FORMAT_JSON, pre_chain=SHARED_PROCESSORS + [console_log_filter_processors]
116+
)
117+
)
13118

14-
logging.basicConfig(level=LOGGING_LEVEL, format=LOGGING_FORMAT)
15119

16-
file_handler = RotatingFileHandler(LOG_FILE_PATH, maxBytes=10485760, backupCount=5)
17-
file_handler.setLevel(LOGGING_LEVEL)
18-
file_handler.setFormatter(logging.Formatter(LOGGING_FORMAT))
120+
# Root logger configuration
121+
root_logger = logging.getLogger()
122+
root_logger.setLevel(logging.INFO)
123+
root_logger.handlers.clear() # avoid duplicate logs
124+
root_logger.addHandler(file_handler)
125+
root_logger.addHandler(console_handler)
19126

20-
logging.getLogger("").addHandler(file_handler)
127+
# Uvicorn logger integration
128+
for logger_name in ("uvicorn", "uvicorn.error", "uvicorn.access"):
129+
logger = logging.getLogger(logger_name)
130+
logger.handlers.clear()
131+
logger.propagate = True
132+
logger.setLevel(logging.INFO)

src/app/core/setup.py

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -15,6 +15,7 @@
1515
from ..api.dependencies import get_current_superuser
1616
from ..core.utils.rate_limit import rate_limiter
1717
from ..middleware.client_cache_middleware import ClientCacheMiddleware
18+
from ..middleware.logger_middleware import LoggerMiddleware
1819
from ..models import * # noqa: F403
1920
from .config import (
2021
AppSettings,
@@ -219,7 +220,7 @@ def create_application(
219220
allow_methods=settings.CORS_METHODS,
220221
allow_headers=settings.CORS_HEADERS,
221222
)
222-
223+
application.add_middleware(LoggerMiddleware)
223224
if isinstance(settings, EnvironmentSettings):
224225
if settings.ENVIRONMENT != EnvironmentOption.PRODUCTION:
225226
docs_router = APIRouter()

src/app/core/worker/functions.py

Lines changed: 12 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,13 +1,13 @@
11
import asyncio
22
import logging
3+
from typing import Any
34

5+
import structlog
46
import uvloop
57
from arq.worker import Worker
68

79
asyncio.set_event_loop_policy(uvloop.EventLoopPolicy())
810

9-
logging.basicConfig(level=logging.INFO, format="%(asctime)s - %(name)s - %(levelname)s - %(message)s")
10-
1111

1212
# -------- background tasks --------
1313
async def sample_background_task(ctx: Worker, name: str) -> str:
@@ -22,3 +22,13 @@ async def startup(ctx: Worker) -> None:
2222

2323
async def shutdown(ctx: Worker) -> None:
2424
logging.info("Worker end")
25+
26+
27+
async def on_job_start(ctx: dict[str, Any]) -> None:
28+
structlog.contextvars.bind_contextvars(job_id=ctx["job_id"])
29+
logging.info("Job Started")
30+
31+
32+
async def on_job_end(ctx: dict[str, Any]) -> None:
33+
logging.info("Job Competed")
34+
structlog.contextvars.clear_contextvars()

src/app/core/worker/settings.py

Lines changed: 28 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,14 @@
1+
import asyncio
2+
from typing import cast
3+
4+
from arq.cli import watch_reload
15
from arq.connections import RedisSettings
6+
from arq.typing import WorkerSettingsType
7+
from arq.worker import check_health, run_worker
28

39
from ...core.config import settings
4-
from .functions import sample_background_task, shutdown, startup
10+
from ...core.logger import logging # noqa: F401
11+
from .functions import on_job_end, on_job_start, sample_background_task, shutdown, startup
512

613
REDIS_QUEUE_HOST = settings.REDIS_QUEUE_HOST
714
REDIS_QUEUE_PORT = settings.REDIS_QUEUE_PORT
@@ -12,4 +19,24 @@ class WorkerSettings:
1219
redis_settings = RedisSettings(host=REDIS_QUEUE_HOST, port=REDIS_QUEUE_PORT)
1320
on_startup = startup
1421
on_shutdown = shutdown
22+
on_job_start = on_job_start
23+
on_job_end = on_job_end
1524
handle_signals = False
25+
26+
27+
def start_arq_service(check: bool = False, burst: int | None = None, watch: str | None = None):
28+
worker_settings_ = cast("WorkerSettingsType", WorkerSettings)
29+
30+
if check:
31+
exit(check_health(worker_settings_))
32+
else:
33+
kwargs = {} if burst is None else {"burst": burst}
34+
if watch:
35+
asyncio.run(watch_reload(watch, worker_settings_))
36+
else:
37+
run_worker(worker_settings_, **kwargs)
38+
39+
40+
if __name__ == "__main__":
41+
start_arq_service()
42+
# python -m src.app.core.worker.settings
Lines changed: 38 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,38 @@
1+
# app/middleware/request_id.py
2+
import uuid
3+
4+
import structlog
5+
from fastapi import FastAPI, Request
6+
from starlette.middleware.base import BaseHTTPMiddleware, RequestResponseEndpoint
7+
from starlette.responses import Response
8+
9+
10+
class LoggerMiddleware(BaseHTTPMiddleware):
11+
"""Middleware to add request ID to the context variables.
12+
13+
Parameters
14+
----------
15+
app: FastAPI
16+
The FastAPI application instance.
17+
"""
18+
19+
def __init__(self, app: FastAPI) -> None:
20+
super().__init__(app)
21+
22+
async def dispatch(self, request: Request, call_next: RequestResponseEndpoint) -> Response:
23+
"""
24+
Add request ID to the context variables.
25+
"""
26+
request_id = request.headers.get("X-Request-ID", str(uuid.uuid4()))
27+
structlog.contextvars.clear_contextvars()
28+
structlog.contextvars.bind_contextvars(
29+
request_id=request_id,
30+
client_host=request.client.host if request.client else None,
31+
status_code=None,
32+
path=request.url.path,
33+
method=request.method,
34+
)
35+
response = await call_next(request)
36+
structlog.contextvars.bind_contextvars(status_code=response.status_code)
37+
response.headers["X-Request-ID"] = request_id
38+
return response

0 commit comments

Comments
 (0)