Skip to content

Commit beeea58

Browse files
Bordaclaude
andcommitted
fix(metrics): address review findings before merge
- Replace all hardcoded test ports (9093-19200) with port=0 and read actual port from server_address[1] to prevent CI port collisions - Clarify CacheMetrics.window_sizes docstring: windowing is not automatic; callers must pass window= explicitly to get_stats() - Add README note that entry_count/total_size_bytes are populated for the memory backend only; all other backends report 0 - Standardize MetricsContext guards to 'if self._m is not None:' - Remove dead _init_prometheus_metrics no-op method and its call site - Replace deprecated typing.Deque/Dict with deque[...]/dict[...] builtins Co-Authored-By: Claude Sonnet 4.6 <noreply@anthropic.com>
1 parent 9f3c0ce commit beeea58

4 files changed

Lines changed: 43 additions & 44 deletions

File tree

README.rst

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -364,6 +364,8 @@ The metrics system tracks:
364364
* **Size limit rejections**: Entries rejected due to ``entry_size_limit``
365365
* **Cache size (memory backend only)**: Number of entries and total size in bytes for the in-memory cache core
366366

367+
Note: ``entry_count`` and ``total_size_bytes`` are populated only for the memory backend. Other backends (pickle, redis, sql, mongo) currently always report ``0`` for these fields.
368+
367369
Sampling Rate
368370
-------------
369371

src/cachier/exporters/prometheus.py

Lines changed: 4 additions & 15 deletions
Original file line numberDiff line numberDiff line change
@@ -7,7 +7,7 @@
77
# http://www.opensource.org/licenses/MIT-license
88

99
import threading
10-
from typing import TYPE_CHECKING, Any, Callable, Dict, Optional, Protocol, cast
10+
from typing import TYPE_CHECKING, Any, Callable, Optional, Protocol, cast
1111

1212
from .base import MetricsExporter
1313

@@ -60,7 +60,7 @@ def collect(self) -> Any:
6060
"""Collect metrics from all registered functions."""
6161
# Snapshot all metrics in one lock acquisition for consistency
6262
with self.exporter._lock:
63-
snapshots: Dict[str, "MetricSnapshot"] = {}
63+
snapshots: dict[str, "MetricSnapshot"] = {}
6464
for func_name, func in self.exporter._registered_functions.items():
6565
m = _get_func_metrics(func)
6666
if m is not None:
@@ -144,7 +144,7 @@ def __init__(self, port: int = 9090, use_prometheus_client: bool = True, host: s
144144
self.port = port
145145
self.host = host
146146
self.use_prometheus_client = use_prometheus_client
147-
self._registered_functions: Dict[str, _MetricsEnabledCallable] = {}
147+
self._registered_functions: dict[str, _MetricsEnabledCallable] = {}
148148
self._lock = threading.Lock()
149149
self._server: Optional[Any] = None
150150
self._server_thread: Optional[threading.Thread] = None
@@ -156,7 +156,6 @@ def __init__(self, port: int = 9090, use_prometheus_client: bool = True, host: s
156156
self._registry: Optional[Any] = None
157157
if use_prometheus_client and PROMETHEUS_CLIENT_AVAILABLE:
158158
self._prom_client = prometheus_client
159-
self._init_prometheus_metrics()
160159
self._setup_collector()
161160

162161
def _setup_collector(self) -> None:
@@ -168,16 +167,6 @@ def _setup_collector(self) -> None:
168167
self._registry = CollectorRegistry()
169168
self._registry.register(CachierCollector(self))
170169

171-
def _init_prometheus_metrics(self) -> None:
172-
"""Initialize Prometheus metrics using prometheus_client.
173-
174-
Note: With custom collector, we don't need to pre-define metrics.
175-
The collector will generate them dynamically at scrape time.
176-
177-
"""
178-
# Metrics are now handled by the custom collector in _setup_collector()
179-
pass
180-
181170
def register_function(self, func: Callable[..., Any]) -> None:
182171
"""Register a cached function for metrics export.
183172
@@ -232,7 +221,7 @@ def _generate_text_metrics(self) -> str:
232221
"""
233222
# Snapshot all metrics in one lock acquisition for consistency
234223
with self._lock:
235-
snapshots: Dict[str, "MetricSnapshot"] = {}
224+
snapshots: dict[str, "MetricSnapshot"] = {}
236225
for func_name, func in self._registered_functions.items():
237226
m = _get_func_metrics(func)
238227
if m is not None:

src/cachier/metrics.py

Lines changed: 15 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -12,7 +12,7 @@
1212
from collections import deque
1313
from dataclasses import dataclass
1414
from datetime import timedelta
15-
from typing import Deque, Optional
15+
from typing import Optional
1616

1717

1818
@dataclass
@@ -96,7 +96,10 @@ class CacheMetrics:
9696
Sampling rate for metrics collection (0.0-1.0), by default 1.0
9797
Lower values reduce overhead at the cost of accuracy
9898
window_sizes : list of timedelta, optional
99-
Time windows to track for aggregated metrics, by default [1 minute, 1 hour, 1 day]
99+
Informational window presets, by default [1 minute, 1 hour, 1 day].
100+
These values are not applied automatically. To filter latency metrics
101+
by time window, pass ``window=timedelta(...)`` explicitly to
102+
``get_stats()``.
100103
101104
Examples
102105
--------
@@ -116,7 +119,9 @@ def __init__(self, sampling_rate: float = 1.0, window_sizes: Optional[list[timed
116119
sampling_rate : float
117120
Sampling rate between 0.0 and 1.0
118121
window_sizes : list of timedelta, optional
119-
Time windows for aggregated metrics
122+
Informational window presets. These values are stored but not
123+
applied automatically; pass ``window=timedelta(...)`` to
124+
``get_stats()`` for time-windowed latency aggregation.
120125
121126
"""
122127
if not 0.0 <= sampling_rate <= 1.0:
@@ -147,7 +152,7 @@ def __init__(self, sampling_rate: float = 1.0, window_sizes: Optional[list[timed
147152
# Assuming ~1000 ops/sec max, keep 1 day of data = 86.4M points
148153
# Limit to 100K points for memory efficiency
149154
max_latency_points = 100000
150-
self._latencies: Deque[_TimestampedMetric] = deque(maxlen=max_latency_points)
155+
self._latencies: deque[_TimestampedMetric] = deque(maxlen=max_latency_points)
151156

152157
# Size tracking
153158
self._entry_count = 0
@@ -355,30 +360,30 @@ def __exit__(self, *_: object) -> None:
355360

356361
def record_hit(self) -> None:
357362
"""Record a cache hit."""
358-
if self._m:
363+
if self._m is not None:
359364
self._m.record_hit()
360365

361366
def record_miss(self) -> None:
362367
"""Record a cache miss."""
363-
if self._m:
368+
if self._m is not None:
364369
self._m.record_miss()
365370

366371
def record_stale_hit(self) -> None:
367372
"""Record a stale cache hit."""
368-
if self._m:
373+
if self._m is not None:
369374
self._m.record_stale_hit()
370375

371376
def record_recalculation(self) -> None:
372377
"""Record a cache recalculation."""
373-
if self._m:
378+
if self._m is not None:
374379
self._m.record_recalculation()
375380

376381
def record_wait_timeout(self) -> None:
377382
"""Record a wait timeout."""
378-
if self._m:
383+
if self._m is not None:
379384
self._m.record_wait_timeout()
380385

381386
def record_size_limit_rejection(self) -> None:
382387
"""Record an entry rejection due to size limit."""
383-
if self._m:
388+
if self._m is not None:
384389
self._m.record_size_limit_rejection()

tests/test_exporters.py

Lines changed: 22 additions & 19 deletions
Original file line numberDiff line numberDiff line change
@@ -54,7 +54,7 @@ def test_func(x):
5454

5555
test_func.clear_cache()
5656

57-
exporter = PrometheusExporter(port=9093, use_prometheus_client=False)
57+
exporter = PrometheusExporter(port=0, use_prometheus_client=False)
5858
exporter.register_function(test_func)
5959

6060
# Generate some metrics
@@ -89,7 +89,7 @@ def func2(x):
8989
func1.clear_cache()
9090
func2.clear_cache()
9191

92-
exporter = PrometheusExporter(port=9094, use_prometheus_client=False)
92+
exporter = PrometheusExporter(port=0, use_prometheus_client=False)
9393
exporter.register_function(func1)
9494
exporter.register_function(func2)
9595

@@ -132,10 +132,10 @@ def test_func(x):
132132
test_func.clear_cache()
133133
test_func(5)
134134

135-
exporter1 = PrometheusExporter(port=9097, use_prometheus_client=False)
135+
exporter1 = PrometheusExporter(port=0, use_prometheus_client=False)
136136
exporter1.register_function(test_func)
137137

138-
exporter2 = PrometheusExporter(port=9098, use_prometheus_client=False)
138+
exporter2 = PrometheusExporter(port=0, use_prometheus_client=False)
139139
exporter2.register_function(test_func)
140140

141141
# Both should generate valid metrics
@@ -158,7 +158,7 @@ def test_func(x):
158158

159159
test_func.clear_cache()
160160

161-
exporter = PrometheusExporter(port=9099, use_prometheus_client=False)
161+
exporter = PrometheusExporter(port=0, use_prometheus_client=False)
162162
exporter.register_function(test_func)
163163

164164
test_func(5) # miss
@@ -290,7 +290,7 @@ def test_func(x):
290290
test_func.clear_cache()
291291
test_func(5)
292292

293-
exporter = PrometheusExporter(port=19093, use_prometheus_client=True)
293+
exporter = PrometheusExporter(port=0, use_prometheus_client=True)
294294
assert exporter._prom_client is None
295295
exporter.register_function(test_func)
296296
text = exporter._generate_text_metrics()
@@ -303,8 +303,7 @@ def test_func(x):
303303
def test_prometheus_prom_client_available_paths():
304304
"""Cover prometheus_client-available code paths via module-level patching.
305305
306-
Exercises: __init__ branch (L157-160), _setup_collector (L168-169),
307-
_init_prometheus_metrics (L179), CachierCollector.describe (L57), and
306+
Exercises: __init__ branch, _setup_collector, CachierCollector.describe, and
308307
CachierCollector.collect() None-metrics skip (L66 False branch).
309308
310309
"""
@@ -382,7 +381,7 @@ def test_prometheus_module_import_with_prom_client():
382381
@pytest.mark.memory
383382
def test_prometheus_stop_when_not_started():
384383
"""Test that stop() is a no-op when the server was never started."""
385-
exporter = PrometheusExporter(port=19094, use_prometheus_client=False)
384+
exporter = PrometheusExporter(port=0, use_prometheus_client=False)
386385
exporter.stop() # Should not raise
387386

388387

@@ -391,10 +390,11 @@ def test_prometheus_simple_server_404():
391390
"""Test that simple HTTP server returns 404 for non-metrics paths."""
392391
import http.client
393392

394-
exporter = PrometheusExporter(port=19095, use_prometheus_client=False)
393+
exporter = PrometheusExporter(port=0, use_prometheus_client=False)
395394
exporter.start()
395+
actual_port = exporter._server.server_address[1]
396396
try:
397-
conn = http.client.HTTPConnection("127.0.0.1", 19095)
397+
conn = http.client.HTTPConnection("127.0.0.1", actual_port)
398398
conn.request("GET", "/notfound")
399399
response = conn.getresponse()
400400
assert response.status == 404
@@ -410,10 +410,11 @@ def test_prometheus_prometheus_server_404():
410410

411411
pytest.importorskip("prometheus_client")
412412

413-
exporter = PrometheusExporter(port=19096, use_prometheus_client=True)
413+
exporter = PrometheusExporter(port=0, use_prometheus_client=True)
414414
exporter.start()
415+
actual_port = exporter._server.server_address[1]
415416
try:
416-
conn = http.client.HTTPConnection("127.0.0.1", 19096)
417+
conn = http.client.HTTPConnection("127.0.0.1", actual_port)
417418
conn.request("GET", "/notfound")
418419
response = conn.getresponse()
419420
assert response.status == 404
@@ -428,7 +429,7 @@ def test_prometheus_collector_collect_empty():
428429
pytest.importorskip("prometheus_client")
429430
from prometheus_client import generate_latest
430431

431-
exporter = PrometheusExporter(port=19097, use_prometheus_client=True)
432+
exporter = PrometheusExporter(port=0, use_prometheus_client=True)
432433
assert exporter._registry is not None
433434
# No functions registered — collect() should run without error and yield metric families
434435
output = generate_latest(exporter._registry).decode()
@@ -448,11 +449,12 @@ def test_func(x):
448449
test_func.clear_cache()
449450
test_func(5)
450451

451-
exporter = PrometheusExporter(port=19098, use_prometheus_client=False)
452+
exporter = PrometheusExporter(port=0, use_prometheus_client=False)
452453
exporter.register_function(test_func)
453454
exporter.start()
455+
actual_port = exporter._server.server_address[1]
454456
try:
455-
response = urllib.request.urlopen("http://127.0.0.1:19098/metrics", timeout=5)
457+
response = urllib.request.urlopen(f"http://127.0.0.1:{actual_port}/metrics", timeout=5)
456458
body = response.read().decode()
457459
assert "cachier_cache_hits_total" in body
458460
finally:
@@ -474,11 +476,12 @@ def test_func(x):
474476
test_func.clear_cache()
475477
test_func(5)
476478

477-
exporter = PrometheusExporter(port=19099, use_prometheus_client=True)
479+
exporter = PrometheusExporter(port=0, use_prometheus_client=True)
478480
exporter.register_function(test_func)
479481
exporter.start()
482+
actual_port = exporter._server.server_address[1]
480483
try:
481-
response = urllib.request.urlopen("http://127.0.0.1:19099/metrics")
484+
response = urllib.request.urlopen(f"http://127.0.0.1:{actual_port}/metrics")
482485
body = response.read().decode()
483486
assert "cachier_cache_hits_total" in body
484487
finally:
@@ -568,7 +571,7 @@ def test_prometheus_collector_collect_skips_none_metrics():
568571
pytest.importorskip("prometheus_client")
569572
from prometheus_client import generate_latest
570573

571-
exporter = PrometheusExporter(port=19200, use_prometheus_client=True)
574+
exporter = PrometheusExporter(port=0, use_prometheus_client=True)
572575

573576
class _NoMetrics:
574577
__module__ = "test"

0 commit comments

Comments
 (0)