-
-
Notifications
You must be signed in to change notification settings - Fork 0
Expand file tree
/
Copy pathfactory.py
More file actions
459 lines (388 loc) · 15.5 KB
/
factory.py
File metadata and controls
459 lines (388 loc) · 15.5 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
import atexit
import dataclasses
import logging
import threading
import time
from dataclasses import dataclass
from enum import StrEnum
from pythonlogs.basic_log import BasicLog as _BasicLogImpl
from pythonlogs.core.constants import LogLevel, RotateWhen
from pythonlogs.core.log_utils import cleanup_logger_handlers
from pythonlogs.core.settings import get_log_settings
from pythonlogs.size_rotating import SizeRotatingLog as _SizeRotatingLogImpl
from pythonlogs.timed_rotating import TimedRotatingLog as _TimedRotatingLogImpl
@dataclass
class LoggerConfig:
"""Configuration class to group logger parameters"""
level: LogLevel | str | None = None
name: str | None = None
directory: str | None = None
filenames: list | tuple | None = None
encoding: str | None = None
datefmt: str | None = None
timezone: str | None = None
streamhandler: bool | None = None
showlocation: bool | None = None
maxmbytes: int | None = None
when: RotateWhen | str | None = None
sufix: str | None = None
daystokeep: int | None = None
class LoggerType(StrEnum):
"""Available logger types"""
BASIC = "basic"
SIZE_ROTATING = "size_rotating"
TIMED_ROTATING = "timed_rotating"
class LoggerFactory:
"""Factory for creating different types of loggers with optimized instantiation and memory management"""
# Logger registry for reusing loggers by name with timestamp tracking
_logger_registry: dict[str, tuple[logging.Logger, float]] = {}
# Thread lock for registry access
_registry_lock = threading.RLock()
# Memory optimization settings
_max_loggers = 100 # Maximum number of cached loggers
_logger_ttl = 3600 # Logger TTL in seconds (1 hour)
_initialized = False # Flag to track if memory limits have been initialized
_atexit_registered = False # Flag to track if atexit cleanup is registered
@classmethod
def _ensure_initialized(cls) -> None:
"""Ensure memory limits are initialized from settings on first use."""
if not cls._initialized:
settings = get_log_settings()
cls._max_loggers = settings.max_loggers
cls._logger_ttl = settings.logger_ttl_seconds
cls._initialized = True
# Register atexit cleanup on first use
if not cls._atexit_registered:
atexit.register(cls._atexit_cleanup)
cls._atexit_registered = True
@classmethod
def get_or_create_logger(
cls,
logger_type: LoggerType | str,
name: str | None = None,
**kwargs,
) -> logging.Logger:
"""
Get an existing logger from registry or create a new one.
Loggers are cached by name for performance.
Args:
logger_type: Type of logger to create
name: Logger name (used as cache key)
**kwargs: Additional logger configuration
Returns:
Cached or newly created logger instance
"""
# Use the default name if none provided
if name is None:
name = get_log_settings().appname
# Thread-safe check-and-create operation
with cls._registry_lock:
# Initialize memory limits from settings on first use
cls._ensure_initialized()
# Clean up expired loggers first
cls._cleanup_expired_loggers()
# Check if logger already exists in the registry
if name in cls._logger_registry:
logger, _ = cls._logger_registry[name]
# Update timestamp for LRU tracking
cls._logger_registry[name] = (logger, time.time())
return logger
# Ensure registry size limit
cls._enforce_size_limit()
# Create a new logger and cache it with timestamp
logger = cls.create_logger(logger_type, name=name, **kwargs)
cls._logger_registry[name] = (logger, time.time())
return logger
@classmethod
def clear_registry(cls) -> None:
"""Clear the logger registry with proper resource cleanup."""
with cls._registry_lock:
for logger, _ in cls._logger_registry.values():
cls._cleanup_logger(logger)
cls._logger_registry.clear()
@classmethod
def _cleanup_expired_loggers(cls) -> None:
"""Remove expired loggers from registry based on TTL."""
current_time = time.time()
expired_keys = []
for name, (logger, timestamp) in cls._logger_registry.items():
if current_time - timestamp > cls._logger_ttl:
expired_keys.append(name)
cls._cleanup_logger(logger)
for key in expired_keys:
cls._logger_registry.pop(key, None)
@classmethod
def _enforce_size_limit(cls) -> None:
"""Enforce maximum registry size by removing the oldest entries (LRU eviction)."""
if cls._max_loggers <= 0:
# Special case: if max_loggers is 0 or negative, clear all
cls.clear_registry()
return
if len(cls._logger_registry) >= cls._max_loggers:
# Sort by timestamp (oldest first) and remove the oldest entries
sorted_entries = sorted(cls._logger_registry.items(), key=lambda x: x[1][1])
entries_to_remove = len(sorted_entries) - cls._max_loggers + 1
for i in range(min(entries_to_remove, len(sorted_entries))):
name, (logger, _) = sorted_entries[i]
cls._cleanup_logger(logger)
cls._logger_registry.pop(name, None)
@classmethod
def set_memory_limits(cls, max_loggers: int = 100, ttl_seconds: int = 3600) -> None:
"""Configure memory management limits for the logger registry at runtime.
Args:
max_loggers: Maximum number of cached loggers
ttl_seconds: Time-to-live for cached loggers in seconds
"""
with cls._registry_lock:
cls._max_loggers = max_loggers
cls._logger_ttl = ttl_seconds
cls._initialized = True # Mark as manually configured
# Clean up immediately with new settings
cls._cleanup_expired_loggers()
cls._enforce_size_limit()
@classmethod
def _atexit_cleanup(cls) -> None:
"""Cleanup function registered with atexit to ensure proper resource cleanup."""
try:
cls.clear_registry()
except (OSError, ValueError, RuntimeError):
# Silently ignore expected exceptions during shutdown cleanup
pass
@staticmethod
def _cleanup_logger(logger: logging.Logger) -> None:
"""Clean up logger resources by closing all handlers."""
cleanup_logger_handlers(logger)
@classmethod
def shutdown_logger(cls, name: str) -> bool:
"""Shutdown and remove a specific logger from registry.
Args:
name: Logger name to shut down
Returns:
True if logger was found and shutdown, False otherwise
"""
with cls._registry_lock:
if name in cls._logger_registry:
logger, _ = cls._logger_registry.pop(name)
cls._cleanup_logger(logger)
return True
return False
@classmethod
def get_registered_loggers(cls) -> dict[str, logging.Logger]:
"""Get all registered loggers. Returns a copy of the registry."""
with cls._registry_lock:
return {name: logger for name, (logger, _) in cls._logger_registry.items()}
@classmethod
def get_memory_limits(cls) -> dict[str, int]:
"""Get current memory management limits.
Returns:
Dictionary with current max_loggers and ttl_seconds settings
"""
with cls._registry_lock:
return {"max_loggers": cls._max_loggers, "ttl_seconds": cls._logger_ttl}
# Mapping of logger types to their implementation classes and accepted fields
_LOGGER_IMPL = {
LoggerType.BASIC: (
_BasicLogImpl,
{"level", "name", "encoding", "datefmt", "timezone", "showlocation"},
),
LoggerType.SIZE_ROTATING: (
_SizeRotatingLogImpl,
{
"level",
"name",
"directory",
"filenames",
"maxmbytes",
"daystokeep",
"encoding",
"datefmt",
"timezone",
"streamhandler",
"showlocation",
},
),
LoggerType.TIMED_ROTATING: (
_TimedRotatingLogImpl,
{
"level",
"name",
"directory",
"filenames",
"when",
"sufix",
"daystokeep",
"encoding",
"datefmt",
"timezone",
"streamhandler",
"showlocation",
},
),
}
@staticmethod
def create_logger(logger_type: LoggerType | str, config: LoggerConfig | None = None, **kwargs) -> logging.Logger:
"""
Factory method to create loggers based on type.
Args:
logger_type: Type of logger to create (LoggerType enum or string)
config: LoggerConfig object with logger parameters
**kwargs: Individual logger parameters (kwargs take precedence over config)
Returns:
Configured logger instance
Raises:
ValueError: If invalid logger_type is provided
"""
# Convert string to enum if needed
if isinstance(logger_type, str):
try:
logger_type = LoggerType(logger_type.lower())
except ValueError as err:
raise ValueError(
f"Invalid logger type: {logger_type}. Valid types: {[t.value for t in LoggerType]}"
) from err
# Merge config and kwargs (kwargs take precedence)
if config is None:
config = LoggerConfig()
merged = {f.name: kwargs.get(f.name, getattr(config, f.name)) for f in dataclasses.fields(LoggerConfig)}
# Convert enum values to strings for logger classes
if isinstance(merged.get("level"), LogLevel):
merged["level"] = merged["level"].value
if isinstance(merged.get("when"), RotateWhen):
merged["when"] = merged["when"].value
# Create logger using table-driven dispatch
impl_class, valid_fields = LoggerFactory._LOGGER_IMPL[logger_type]
logger_kwargs = {k: v for k, v in merged.items() if k in valid_fields}
return impl_class(**logger_kwargs).init()
# Public API wrapper classes - act like logging.Logger with context manager support
class _LoggerMixin:
"""Mixin providing common logger wrapper functionality with context manager support."""
_logger: logging.Logger
def __getattr__(self, name: str):
"""Delegate attribute access to the underlying logger."""
return getattr(self._logger, name)
def __enter__(self):
"""Context manager entry."""
return self
def __exit__(self, exc_type, exc_val, exc_tb):
"""Context manager exit with automatic cleanup."""
cleanup_logger_handlers(self._logger)
return False
class BasicLog(_LoggerMixin):
"""Basic logger wrapper that acts like logging.Logger with context manager support.
Usage:
# Direct usage
logger = BasicLog(name="app", level="INFO")
logger.info("Hello world")
# Context manager (automatic cleanup)
with BasicLog(name="app", level="INFO") as logger:
logger.info("Hello world")
"""
def __init__(
self,
level: LogLevel | str | None = None,
name: str | None = None,
encoding: str | None = None,
datefmt: str | None = None,
timezone: str | None = None,
showlocation: bool | None = None,
):
self._logger = LoggerFactory.create_logger(
LoggerType.BASIC,
level=level,
name=name,
encoding=encoding,
datefmt=datefmt,
timezone=timezone,
showlocation=showlocation,
)
self._name = name or get_log_settings().appname
class SizeRotatingLog(_LoggerMixin):
"""Size-based rotating logger wrapper that acts like logging.Logger with context manager support.
Usage:
# Direct usage
logger = SizeRotatingLog(name="app", directory="/logs", filenames=["app.log"])
logger.info("Hello world")
# Context manager (automatic cleanup)
with SizeRotatingLog(name="app", directory="/logs", filenames=["app.log"]) as logger:
logger.info("Hello world")
"""
def __init__(
self,
level: LogLevel | str | None = None,
name: str | None = None,
directory: str | None = None,
filenames: list | tuple | None = None,
maxmbytes: int | None = None,
daystokeep: int | None = None,
encoding: str | None = None,
datefmt: str | None = None,
timezone: str | None = None,
streamhandler: bool | None = None,
showlocation: bool | None = None,
):
self._logger = LoggerFactory.create_logger(
LoggerType.SIZE_ROTATING,
level=level,
name=name,
directory=directory,
filenames=filenames,
maxmbytes=maxmbytes,
daystokeep=daystokeep,
encoding=encoding,
datefmt=datefmt,
timezone=timezone,
streamhandler=streamhandler,
showlocation=showlocation,
)
self._name = name or get_log_settings().appname
class TimedRotatingLog(_LoggerMixin):
"""Time-based rotating logger wrapper that acts like logging.Logger with context manager support.
Usage:
# Direct usage
logger = TimedRotatingLog(name="app", directory="/logs", when="midnight")
logger.info("Hello world")
# Context manager (automatic cleanup)
with TimedRotatingLog(name="app", directory="/logs", when="midnight") as logger:
logger.info("Hello world")
"""
def __init__(
self,
level: LogLevel | str | None = None,
name: str | None = None,
directory: str | None = None,
filenames: list | tuple | None = None,
when: RotateWhen | str | None = None,
sufix: str | None = None,
daystokeep: int | None = None,
encoding: str | None = None,
datefmt: str | None = None,
timezone: str | None = None,
streamhandler: bool | None = None,
showlocation: bool | None = None,
):
self._logger = LoggerFactory.create_logger(
LoggerType.TIMED_ROTATING,
level=level,
name=name,
directory=directory,
filenames=filenames,
when=when,
sufix=sufix,
daystokeep=daystokeep,
encoding=encoding,
datefmt=datefmt,
timezone=timezone,
streamhandler=streamhandler,
showlocation=showlocation,
)
self._name = name or get_log_settings().appname
# Convenience functions
def clear_logger_registry() -> None:
"""Clear the logger registry with proper cleanup."""
LoggerFactory.clear_registry()
def shutdown_logger(name: str) -> bool:
"""Shut down a specific logger."""
return LoggerFactory.shutdown_logger(name)
def get_registered_loggers() -> dict[str, logging.Logger]:
"""Get all registered loggers."""
return LoggerFactory.get_registered_loggers()