-
Notifications
You must be signed in to change notification settings - Fork 192
Expand file tree
/
Copy pathsearch_router.py
More file actions
105 lines (89 loc) · 3.59 KB
/
search_router.py
File metadata and controls
105 lines (89 loc) · 3.59 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
"""V2 router for search operations.
This router uses external_id UUIDs for stable, API-friendly routing.
V1 uses string-based project names which are less efficient and less stable.
"""
from fastapi import APIRouter, HTTPException, Path
from basic_memory import telemetry
from basic_memory.api.v2.utils import to_search_results
from basic_memory.repository.semantic_errors import (
SemanticDependenciesMissingError,
SemanticSearchDisabledError,
)
from basic_memory.schemas.search import SearchQuery, SearchResponse
from basic_memory.deps import (
SearchServiceV2ExternalDep,
EntityServiceV2ExternalDep,
TaskSchedulerDep,
ProjectExternalIdPathDep,
)
# Note: No prefix here - it's added during registration as /v2/{project_id}/search
router = APIRouter(tags=["search"])
@router.post("/search/", response_model=SearchResponse)
async def search(
query: SearchQuery,
search_service: SearchServiceV2ExternalDep,
entity_service: EntityServiceV2ExternalDep,
project_id: str = Path(..., description="Project external UUID"),
page: int = 1,
page_size: int = 10,
):
"""Search across all knowledge and documents in a project.
V2 uses external_id UUIDs for stable API references.
Args:
project_id: Project external UUID from URL path
query: Search query parameters (text, filters, etc.)
search_service: Search service scoped to project
entity_service: Entity service scoped to project
page: Page number for pagination
page_size: Number of results per page
Returns:
SearchResponse with paginated search results
"""
with telemetry.operation(
"api.request.search",
entrypoint="api",
page=page,
page_size=page_size,
retrieval_mode=query.retrieval_mode.value,
has_text_query=bool(query.text and query.text.strip()),
has_title_query=bool(query.title),
has_permalink_query=bool(query.permalink or query.permalink_match),
):
offset = (page - 1) * page_size
# Fetch one extra item to detect whether more pages exist (N+1 trick)
fetch_limit = page_size + 1
try:
results = await search_service.search(query, limit=fetch_limit, offset=offset)
except SemanticSearchDisabledError as exc:
raise HTTPException(status_code=400, detail=str(exc)) from exc
except SemanticDependenciesMissingError as exc:
raise HTTPException(status_code=400, detail=str(exc)) from exc
except ValueError as exc:
raise HTTPException(status_code=400, detail=str(exc)) from exc
has_more = len(results) > page_size
if has_more:
results = results[:page_size]
search_results = await to_search_results(entity_service, results)
return SearchResponse(
results=search_results,
current_page=page,
page_size=page_size,
has_more=has_more,
)
@router.post("/search/reindex")
async def reindex(
task_scheduler: TaskSchedulerDep,
project_id: ProjectExternalIdPathDep,
):
"""Recreate and populate the search index for a project.
This is a background operation that rebuilds the search index
from scratch. Useful after bulk updates or if the index becomes
corrupted.
Args:
project_id: Project external UUID from URL path
task_scheduler: Task scheduler for background work
Returns:
Status message indicating reindex has been initiated
"""
task_scheduler.schedule("reindex_project", project_id=project_id)
return {"status": "ok", "message": "Reindex initiated"}