Skip to content

Commit 1af28b6

Browse files
authored
Merge pull request #9 from LAA-Software-Engineering/feat/api-timeline-compare
feat(api): POST /query/timeline and POST /query/compare
2 parents 7c5f4c6 + d530a03 commit 1af28b6

6 files changed

Lines changed: 591 additions & 3 deletions

File tree

README.md

Lines changed: 18 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -768,6 +768,8 @@ make api
768768
| `POST` | `/query/explain` | Explain a time window |
769769
| `POST` | `/query/ask` | Answer a natural language question |
770770
| `POST` | `/query/clusters` | List top clusters |
771+
| `POST` | `/query/timeline` | Reconstruct incident timeline for a window |
772+
| `POST` | `/query/compare` | Diff two time windows (same semantics as `raglogs compare`) |
771773
| `GET` | `/config` | Read effective configuration |
772774

773775
**Example**
@@ -796,6 +798,22 @@ curl -X POST http://localhost:8000/query/explain \
796798
}
797799
```
798800

801+
**Timeline**`POST /query/timeline` accepts the same window filters as the CLI (`since` or `from_time`/`to_time`, optional `service`, `env`, `all_ingestions`, `ingestion_job_id`). Set `"format": "text"` to include a plain-text `text` field alongside `events`.
802+
803+
```bash
804+
curl -X POST http://localhost:8000/query/timeline \
805+
-H "Content-Type: application/json" \
806+
-d '{"since": "2h", "format": "json"}'
807+
```
808+
809+
**Compare**`POST /query/compare` matches `raglogs compare`: either `"since"` + `"baseline"` (durations, window A ends at request time) or explicit `window_a_from` / `window_a_to` / `window_b_from` / `window_b_to`. Optional `"format": "text"` adds a rendered `text` field.
810+
811+
```bash
812+
curl -X POST http://localhost:8000/query/compare \
813+
-H "Content-Type: application/json" \
814+
-d '{"since": "30m", "baseline": "24h"}'
815+
```
816+
799817
---
800818

801819
## Development
@@ -862,7 +880,6 @@ New source adapters go in `raglogs/adapters/`. Each adapter yields `ParsedLogLin
862880
- Kubernetes log export ingestion
863881
- Semantic cluster merging via pgvector
864882
- Markdown incident report export (`raglogs explain --format markdown > postmortem.md`)
865-
- `POST /query/timeline` and `POST /query/compare` API endpoints
866883
- Web UI
867884

868885
---

src/api/app.py

Lines changed: 3 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -3,7 +3,7 @@
33
from fastapi import FastAPI
44
from fastapi.responses import ORJSONResponse
55

6-
from src.api.routes import ask, clusters, config, explain, health, ingestions
6+
from src.api.routes import ask, clusters, compare_windows, config, explain, health, ingestions, timeline
77

88

99
@asynccontextmanager
@@ -30,4 +30,6 @@ async def lifespan(app: FastAPI):
3030
app.include_router(explain.router, prefix="/query", tags=["query"])
3131
app.include_router(ask.router, prefix="/query", tags=["query"])
3232
app.include_router(clusters.router, prefix="/query", tags=["query"])
33+
app.include_router(timeline.router, prefix="/query", tags=["query"])
34+
app.include_router(compare_windows.router, prefix="/query", tags=["query"])
3335
app.include_router(config.router, prefix="/config", tags=["config"])

src/api/routes/compare_windows.py

Lines changed: 269 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,269 @@
1+
"""POST /query/compare — same pipeline as `raglogs compare`."""
2+
from __future__ import annotations
3+
4+
import uuid
5+
from datetime import datetime, timezone
6+
from typing import Literal, Optional
7+
8+
from fastapi import APIRouter, HTTPException
9+
from pydantic import BaseModel
10+
11+
from src.core.compare.differ import CompareResult
12+
13+
router = APIRouter()
14+
15+
16+
class CompareRequest(BaseModel):
17+
since: Optional[str] = None
18+
baseline: Optional[str] = None
19+
window_a_from: Optional[datetime] = None
20+
window_a_to: Optional[datetime] = None
21+
window_b_from: Optional[datetime] = None
22+
window_b_to: Optional[datetime] = None
23+
service: Optional[str] = None
24+
env: Optional[str] = None
25+
all_ingestions: bool = False
26+
ingestion_job_id: Optional[str] = None
27+
format: Literal["json", "text"] = "json"
28+
29+
30+
def _ensure_utc(dt: datetime) -> datetime:
31+
if dt.tzinfo is None:
32+
return dt.replace(tzinfo=timezone.utc)
33+
return dt
34+
35+
36+
def _resolve_compare_windows(request: CompareRequest) -> tuple[datetime, datetime, datetime, datetime]:
37+
from src.utils.time import parse_duration
38+
39+
now = datetime.now(tz=timezone.utc)
40+
41+
if request.since and request.baseline:
42+
incident_delta = parse_duration(request.since)
43+
baseline_delta = parse_duration(request.baseline)
44+
a_end = now
45+
a_start = now - incident_delta
46+
b_end = now - baseline_delta
47+
b_start = b_end - incident_delta
48+
return a_start, a_end, b_start, b_end
49+
50+
if (
51+
request.window_a_from
52+
and request.window_a_to
53+
and request.window_b_from
54+
and request.window_b_to
55+
):
56+
return (
57+
_ensure_utc(request.window_a_from),
58+
_ensure_utc(request.window_a_to),
59+
_ensure_utc(request.window_b_from),
60+
_ensure_utc(request.window_b_to),
61+
)
62+
63+
raise ValueError(
64+
"Provide either since+baseline, or window_a_from/to and window_b_from/to"
65+
)
66+
67+
68+
def _diff_to_dict(d) -> dict:
69+
return {
70+
"fingerprint": d.fingerprint,
71+
"message": d.message,
72+
"services": d.services,
73+
"count_a": d.count_a,
74+
"count_b": d.count_b,
75+
}
76+
77+
78+
def _compare_result_to_payload(result: CompareResult) -> dict:
79+
return {
80+
"window_a": {
81+
"start": result.window_a_start.isoformat(),
82+
"end": result.window_a_end.isoformat(),
83+
},
84+
"window_b": {
85+
"start": result.window_b_start.isoformat(),
86+
"end": result.window_b_end.isoformat(),
87+
},
88+
"has_changes": result.has_changes,
89+
"new_clusters": [_diff_to_dict(d) for d in result.new_clusters],
90+
"disappeared_clusters": [_diff_to_dict(d) for d in result.disappeared_clusters],
91+
"increased_clusters": [_diff_to_dict(d) for d in result.increased_clusters],
92+
"decreased_clusters": [_diff_to_dict(d) for d in result.decreased_clusters],
93+
"new_triggers": [
94+
{"message": t.message, "service": t.service, "only_in": t.only_in}
95+
for t in result.new_triggers
96+
],
97+
"dropped_triggers": [
98+
{"message": t.message, "service": t.service, "only_in": t.only_in}
99+
for t in result.dropped_triggers
100+
],
101+
}
102+
103+
104+
def _trunc(text: str, n: int = 72) -> str:
105+
if len(text) <= n:
106+
return text
107+
cut = text[:n]
108+
sp = cut.rfind(" ")
109+
return (cut[:sp] if sp > n // 2 else cut) + "…"
110+
111+
112+
def _format_counts(count_a: Optional[int], count_b: Optional[int]) -> str:
113+
if count_a is not None and count_b is None:
114+
return f"{count_a} events"
115+
if count_b is not None and count_a is None:
116+
return f"{count_b} events"
117+
if count_a is not None and count_b is not None:
118+
return f"{count_b}{count_a}"
119+
return ""
120+
121+
122+
def format_compare_plain(result: CompareResult) -> str:
123+
"""Plain-text layout matching CLI `compare` (no Rich)."""
124+
from src.utils.time import format_window
125+
126+
lines: list[str] = []
127+
lines.append("")
128+
lines.append("Incident comparison")
129+
lines.append("")
130+
lines.append(f" Window A (now): {format_window(result.window_a_start, result.window_a_end)}")
131+
lines.append(f" Window B (baseline): {format_window(result.window_b_start, result.window_b_end)}")
132+
lines.append("")
133+
134+
if not result.has_changes:
135+
lines.append(" No significant changes between windows.")
136+
lines.append("")
137+
return "\n".join(lines)
138+
139+
def row(sigil: str, msg: str, count_a: Optional[int], count_b: Optional[int]) -> None:
140+
count_str = _format_counts(count_a, count_b)
141+
lines.append(f" {sigil} {_trunc(msg):<74} {count_str}".rstrip())
142+
143+
if result.new_clusters:
144+
lines.append("New error clusters")
145+
for d in result.new_clusters:
146+
row("+", d.message, d.count_a, d.count_b)
147+
lines.append("")
148+
149+
if result.disappeared_clusters:
150+
lines.append("Errors that disappeared")
151+
for d in result.disappeared_clusters:
152+
row("-", d.message, d.count_a, d.count_b)
153+
lines.append("")
154+
155+
if result.increased_clusters:
156+
lines.append("Errors that increased")
157+
for d in result.increased_clusters:
158+
row("↑", d.message, d.count_a, d.count_b)
159+
lines.append("")
160+
161+
if result.decreased_clusters:
162+
lines.append("Errors that decreased")
163+
for d in result.decreased_clusters:
164+
row("↓", d.message, d.count_a, d.count_b)
165+
lines.append("")
166+
167+
if result.new_triggers:
168+
lines.append("Triggers in A not seen in B")
169+
for t in result.new_triggers:
170+
svc = f" · {t.service}" if t.service else ""
171+
lines.append(f" +⚡ {_trunc(t.message)}{svc}")
172+
lines.append("")
173+
174+
if result.dropped_triggers:
175+
lines.append("Triggers in B not seen in A")
176+
for t in result.dropped_triggers:
177+
svc = f" · {t.service}" if t.service else ""
178+
lines.append(f" -⚡ {_trunc(t.message)}{svc}")
179+
lines.append("")
180+
181+
return "\n".join(lines)
182+
183+
184+
@router.post("/compare")
185+
def compare_endpoint(request: CompareRequest):
186+
from src.core.clustering.clusterer import run_clustering
187+
from src.core.compare.differ import compare_windows as run_compare_windows
188+
from src.core.explain.evidence import assemble_evidence
189+
from src.core.explain.summarizer import get_latest_ingestion_job_id
190+
from src.db.session import get_db
191+
192+
try:
193+
a_start, a_end, b_start, b_end = _resolve_compare_windows(request)
194+
except ValueError as e:
195+
raise HTTPException(status_code=400, detail=str(e))
196+
197+
ingestion_job_id: Optional[uuid.UUID] = None
198+
if request.ingestion_job_id:
199+
try:
200+
ingestion_job_id = uuid.UUID(request.ingestion_job_id)
201+
except ValueError:
202+
raise HTTPException(status_code=400, detail="Invalid ingestion_job_id")
203+
204+
try:
205+
with get_db() as db:
206+
job_id = None
207+
if ingestion_job_id is not None:
208+
job_id = ingestion_job_id
209+
elif not request.all_ingestions:
210+
job_id = get_latest_ingestion_job_id(db)
211+
212+
_, clusters_a = run_clustering(
213+
db=db,
214+
window_start=a_start,
215+
window_end=a_end,
216+
service=request.service,
217+
environment=request.env,
218+
save_to_db=False,
219+
ingestion_job_id=job_id,
220+
max_clusters=50,
221+
)
222+
_, clusters_b = run_clustering(
223+
db=db,
224+
window_start=b_start,
225+
window_end=b_end,
226+
service=request.service,
227+
environment=request.env,
228+
save_to_db=False,
229+
ingestion_job_id=job_id,
230+
max_clusters=50,
231+
)
232+
233+
packet_a = assemble_evidence(
234+
db=db,
235+
window_start=a_start,
236+
window_end=a_end,
237+
clusters=clusters_a,
238+
service_filter=request.service,
239+
environment_filter=request.env,
240+
ingestion_job_id=job_id,
241+
)
242+
packet_b = assemble_evidence(
243+
db=db,
244+
window_start=b_start,
245+
window_end=b_end,
246+
clusters=clusters_b,
247+
service_filter=request.service,
248+
environment_filter=request.env,
249+
ingestion_job_id=job_id,
250+
)
251+
252+
result = run_compare_windows(
253+
clusters_a=clusters_a,
254+
clusters_b=clusters_b,
255+
triggers_a=packet_a.trigger_candidates,
256+
triggers_b=packet_b.trigger_candidates,
257+
window_a_start=a_start,
258+
window_a_end=a_end,
259+
window_b_start=b_start,
260+
window_b_end=b_end,
261+
)
262+
263+
except Exception as e:
264+
raise HTTPException(status_code=500, detail=str(e))
265+
266+
payload = _compare_result_to_payload(result)
267+
if request.format == "text":
268+
payload["text"] = format_compare_plain(result)
269+
return payload

0 commit comments

Comments
 (0)