Skip to content

Commit abd4a5a

Browse files
committed
Merge branch 'main' of github.com:basicmachines-co/basic-memory
2 parents cc104f7 + a872947 commit abd4a5a

File tree

11 files changed

+1106
-91
lines changed

11 files changed

+1106
-91
lines changed

src/basic_memory/cli/commands/cloud/project_sync.py

Lines changed: 0 additions & 32 deletions
Original file line numberDiff line numberDiff line change
@@ -124,22 +124,6 @@ def sync_project_command(
124124

125125
if success:
126126
console.print(f"[green]{name} synced successfully[/green]")
127-
128-
# Trigger database sync if not a dry run
129-
if not dry_run:
130-
131-
async def _trigger_db_sync():
132-
async with get_client(project_name=name) as client:
133-
return await ProjectClient(client).sync(
134-
project_data.external_id, force_full=False
135-
)
136-
137-
try:
138-
with force_routing(cloud=True):
139-
result = run_with_cleanup(_trigger_db_sync())
140-
console.print(f"[dim]Database sync initiated: {result.get('message')}[/dim]")
141-
except Exception as e:
142-
console.print(f"[yellow]Warning: Could not trigger database sync: {e}[/yellow]")
143127
else:
144128
console.print(f"[red]{name} sync failed[/red]")
145129
raise typer.Exit(1)
@@ -202,22 +186,6 @@ def bisync_project_command(
202186
sync_entry.last_sync = datetime.now()
203187
sync_entry.bisync_initialized = True
204188
ConfigManager().save_config(config)
205-
206-
# Trigger database sync if not a dry run
207-
if not dry_run:
208-
209-
async def _trigger_db_sync():
210-
async with get_client(project_name=name) as client:
211-
return await ProjectClient(client).sync(
212-
project_data.external_id, force_full=False
213-
)
214-
215-
try:
216-
with force_routing(cloud=True):
217-
result = run_with_cleanup(_trigger_db_sync())
218-
console.print(f"[dim]Database sync initiated: {result.get('message')}[/dim]")
219-
except Exception as e:
220-
console.print(f"[yellow]Warning: Could not trigger database sync: {e}[/yellow]")
221189
else:
222190
console.print(f"[red]{name} bisync failed[/red]")
223191
raise typer.Exit(1)

src/basic_memory/cli/commands/project.py

Lines changed: 231 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -14,6 +14,7 @@
1414

1515
from basic_memory.cli.app import app
1616
from basic_memory.cli.auth import CLIAuth
17+
from basic_memory.cli.commands.cloud.api_client import CloudAPIError, make_api_request
1718
from basic_memory.cli.commands.cloud.bisync_commands import get_mount_info
1819
from basic_memory.cli.commands.cloud.project_sync import (
1920
_has_cloud_credentials,
@@ -26,9 +27,13 @@
2627
from basic_memory.cli.commands.command_utils import get_project_info, run_with_cleanup
2728
from basic_memory.cli.commands.routing import force_routing, validate_routing_flags
2829
from basic_memory.config import ConfigManager, ProjectEntry, ProjectMode
29-
from basic_memory.mcp.async_client import get_client
30+
from basic_memory.mcp.async_client import get_client, resolve_configured_workspace
3031
from basic_memory.mcp.clients import ProjectClient
31-
from basic_memory.schemas.cloud import ProjectVisibility
32+
from basic_memory.schemas.cloud import (
33+
CloudProjectIndexStatus,
34+
CloudTenantIndexStatusResponse,
35+
ProjectVisibility,
36+
)
3237
from basic_memory.schemas.project_info import ProjectItem, ProjectList
3338
from basic_memory.utils import generate_permalink, normalize_project_path
3439

@@ -58,6 +63,211 @@ def make_bar(value: int, max_value: int, width: int = 40) -> Text:
5863
return bar
5964

6065

66+
def _uses_cloud_project_info_route(project_name: str, *, local: bool, cloud: bool) -> bool:
67+
"""Return whether project info should attempt cloud augmentation."""
68+
if local:
69+
return False
70+
if cloud:
71+
return True
72+
73+
config_manager = ConfigManager()
74+
resolved_name, _ = config_manager.get_project(project_name)
75+
effective_name = resolved_name or project_name
76+
return config_manager.config.get_project_mode(effective_name) == ProjectMode.CLOUD
77+
78+
79+
def _resolve_cloud_status_workspace_id(project_name: str) -> str:
80+
"""Resolve the tenant/workspace for cloud index status lookup."""
81+
config_manager = ConfigManager()
82+
config = config_manager.config
83+
84+
if not _has_cloud_credentials(config):
85+
raise RuntimeError(
86+
"Cloud credentials not found. Run `bm cloud api-key save <key>` or `bm cloud login` first."
87+
)
88+
89+
configured_name, _ = config_manager.get_project(project_name)
90+
effective_name = configured_name or project_name
91+
92+
workspace_id = resolve_configured_workspace(config=config, project_name=effective_name)
93+
if workspace_id is not None:
94+
return workspace_id
95+
96+
workspace_id = _resolve_workspace_id(config, None)
97+
if workspace_id is not None:
98+
return workspace_id
99+
100+
raise RuntimeError(
101+
f"Cloud workspace could not be resolved for project '{effective_name}'. "
102+
"Set a project workspace with `bm project set-cloud --workspace ...` or configure a "
103+
"default workspace with `bm cloud workspace set-default ...`."
104+
)
105+
106+
107+
async def _resolve_cloud_status_workspace_id_async(project_name: str) -> str:
108+
"""Resolve the tenant/workspace for cloud index status lookup in async contexts."""
109+
config_manager = ConfigManager()
110+
config = config_manager.config
111+
112+
if not _has_cloud_credentials(config):
113+
raise RuntimeError(
114+
"Cloud credentials not found. Run `bm cloud api-key save <key>` or `bm cloud login` first."
115+
)
116+
117+
configured_name, _ = config_manager.get_project(project_name)
118+
effective_name = configured_name or project_name
119+
120+
workspace_id = resolve_configured_workspace(config=config, project_name=effective_name)
121+
if workspace_id is not None:
122+
return workspace_id
123+
124+
from basic_memory.mcp.project_context import get_available_workspaces
125+
126+
workspaces = await get_available_workspaces()
127+
if len(workspaces) == 1:
128+
return workspaces[0].tenant_id
129+
130+
raise RuntimeError(
131+
f"Cloud workspace could not be resolved for project '{effective_name}'. "
132+
"Set a project workspace with `bm project set-cloud --workspace ...` or configure a "
133+
"default workspace with `bm cloud workspace set-default ...`."
134+
)
135+
136+
137+
def _match_cloud_index_status_project(
138+
project_name: str, projects: list[CloudProjectIndexStatus]
139+
) -> CloudProjectIndexStatus | None:
140+
"""Match the requested project against the tenant index-status payload."""
141+
exact_match = next(
142+
(project for project in projects if project.project_name == project_name), None
143+
)
144+
if exact_match is not None:
145+
return exact_match
146+
147+
project_permalink = generate_permalink(project_name)
148+
permalink_matches = [
149+
project
150+
for project in projects
151+
if generate_permalink(project.project_name) == project_permalink
152+
]
153+
if len(permalink_matches) == 1:
154+
return permalink_matches[0]
155+
156+
return None
157+
158+
159+
def _format_cloud_index_status_error(error: Exception) -> str:
160+
"""Convert cloud lookup failures into concise user-facing text."""
161+
if isinstance(error, CloudAPIError):
162+
detail_message: str | None = None
163+
detail = error.detail.get("detail")
164+
if isinstance(detail, str):
165+
detail_message = detail
166+
elif isinstance(detail, dict):
167+
if isinstance(detail.get("message"), str):
168+
detail_message = detail["message"]
169+
elif isinstance(detail.get("detail"), str):
170+
detail_message = detail["detail"]
171+
172+
if error.status_code and detail_message:
173+
return f"HTTP {error.status_code}: {detail_message}"
174+
if error.status_code:
175+
return f"HTTP {error.status_code}"
176+
177+
return str(error)
178+
179+
180+
async def _fetch_cloud_project_index_status(project_name: str) -> CloudProjectIndexStatus:
181+
"""Fetch cloud index freshness for one project from the admin tenant endpoint."""
182+
workspace_id = await _resolve_cloud_status_workspace_id_async(project_name)
183+
host_url = ConfigManager().config.cloud_host.rstrip("/")
184+
185+
try:
186+
response = await make_api_request(
187+
method="GET",
188+
url=f"{host_url}/admin/tenants/{workspace_id}/index-status",
189+
)
190+
except typer.Exit as exc:
191+
if exc.exit_code not in (None, 0):
192+
raise RuntimeError(
193+
"Cloud credentials not found. Run `bm cloud api-key save <key>` or "
194+
"`bm cloud login` first."
195+
) from exc
196+
raise
197+
198+
tenant_status = CloudTenantIndexStatusResponse.model_validate(response.json())
199+
if tenant_status.error:
200+
raise RuntimeError(tenant_status.error)
201+
202+
project_status = _match_cloud_index_status_project(project_name, tenant_status.projects)
203+
if project_status is None:
204+
raise RuntimeError(
205+
f"Project '{project_name}' was not found in workspace index status "
206+
f"for tenant '{workspace_id}'."
207+
)
208+
209+
return project_status
210+
211+
212+
def _load_cloud_project_index_status(
213+
project_name: str,
214+
) -> tuple[CloudProjectIndexStatus | None, str | None]:
215+
"""Best-effort wrapper around the cloud index freshness lookup."""
216+
try:
217+
return run_with_cleanup(_fetch_cloud_project_index_status(project_name)), None
218+
except Exception as exc:
219+
return None, _format_cloud_index_status_error(exc)
220+
221+
222+
def _build_cloud_index_status_section(
223+
cloud_index_status: CloudProjectIndexStatus | None,
224+
cloud_index_status_error: str | None,
225+
) -> Table | None:
226+
"""Render the optional Cloud Index Status block for rich project info."""
227+
if cloud_index_status is None and cloud_index_status_error is None:
228+
return None
229+
230+
table = Table.grid(padding=(0, 2))
231+
table.add_column("property", style="cyan")
232+
table.add_column("value", style="green")
233+
234+
table.add_row("[bold]Cloud Index Status[/bold]", "")
235+
236+
if cloud_index_status_error is not None:
237+
table.add_row("[yellow]●[/yellow] Warning", f"[yellow]{cloud_index_status_error}[/yellow]")
238+
return table
239+
240+
if cloud_index_status is None:
241+
return table
242+
243+
table.add_row("Files", str(cloud_index_status.current_file_count))
244+
table.add_row(
245+
"Note content",
246+
f"{cloud_index_status.note_content_synced}/{cloud_index_status.current_file_count}",
247+
)
248+
table.add_row(
249+
"Search",
250+
f"{cloud_index_status.total_indexed_entities}/{cloud_index_status.current_file_count}",
251+
)
252+
table.add_row("Embeddable", str(cloud_index_status.embeddable_indexed_entities))
253+
table.add_row(
254+
"Vectorized",
255+
(
256+
f"{cloud_index_status.total_entities_with_chunks}/"
257+
f"{cloud_index_status.embeddable_indexed_entities}"
258+
),
259+
)
260+
261+
if cloud_index_status.reindex_recommended:
262+
table.add_row("[yellow]●[/yellow] Status", "[yellow]Reindex recommended[/yellow]")
263+
if cloud_index_status.reindex_reason:
264+
table.add_row("Reason", f"[yellow]{cloud_index_status.reindex_reason}[/yellow]")
265+
else:
266+
table.add_row("[green]●[/green] Status", "[green]Up to date[/green]")
267+
268+
return table
269+
270+
61271
def _normalize_project_visibility(visibility: str | None) -> ProjectVisibility:
62272
"""Normalize CLI visibility input to the cloud API contract."""
63273
if visibility is None:
@@ -856,9 +1066,20 @@ def display_project_info(
8561066
with force_routing(local=local, cloud=cloud):
8571067
info = run_with_cleanup(get_project_info(name))
8581068

1069+
cloud_index_status: CloudProjectIndexStatus | None = None
1070+
cloud_index_status_error: str | None = None
1071+
if _uses_cloud_project_info_route(info.project_name, local=local, cloud=cloud):
1072+
cloud_index_status, cloud_index_status_error = _load_cloud_project_index_status(
1073+
info.project_name
1074+
)
1075+
8591076
if json_output:
860-
# Convert to JSON and print
861-
print(json.dumps(info.model_dump(), indent=2, default=str))
1077+
output = info.model_dump()
1078+
output["cloud_index_status"] = (
1079+
cloud_index_status.model_dump() if cloud_index_status is not None else None
1080+
)
1081+
output["cloud_index_status_error"] = cloud_index_status_error
1082+
print(json.dumps(output, indent=2, default=str))
8621083
else:
8631084
# --- Left column: Knowledge Graph stats ---
8641085
left = Table.grid(padding=(0, 2))
@@ -916,6 +1137,10 @@ def display_project_info(
9161137
columns = Table.grid(padding=(0, 4), expand=False)
9171138
columns.add_row(left, right)
9181139

1140+
cloud_section = _build_cloud_index_status_section(
1141+
cloud_index_status, cloud_index_status_error
1142+
)
1143+
9191144
# --- Note Types bar chart (top 5 by count) ---
9201145
bars_section = None
9211146
if info.statistics.note_types:
@@ -954,6 +1179,8 @@ def display_project_info(
9541179

9551180
# --- Assemble dashboard ---
9561181
parts: list = [columns, ""]
1182+
if cloud_section is not None:
1183+
parts.extend([cloud_section, ""])
9571184
if bars_section:
9581185
parts.extend([bars_section, ""])
9591186
parts.append(footer)

src/basic_memory/repository/search_repository.py

Lines changed: 4 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -70,6 +70,10 @@ async def sync_entity_vectors(self, entity_id: int) -> None:
7070
"""Sync semantic vector chunks for an entity."""
7171
...
7272

73+
async def delete_entity_vector_rows(self, entity_id: int) -> None:
74+
"""Delete semantic vector chunks and embeddings for one entity."""
75+
...
76+
7377
async def sync_entity_vectors_batch(
7478
self,
7579
entity_ids: list[int],

src/basic_memory/repository/search_repository_base.py

Lines changed: 9 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -454,6 +454,15 @@ async def execute_query(
454454
logger.debug(f"Query executed successfully in {elapsed_time:.2f}s.")
455455
return result
456456

457+
async def delete_entity_vector_rows(self, entity_id: int) -> None:
458+
"""Delete one entity's derived vector rows using the backend's cleanup path."""
459+
await self._ensure_vector_tables()
460+
461+
async with db.scoped_session(self.session_maker) as session:
462+
await self._prepare_vector_session(session)
463+
await self._delete_entity_chunks(session, entity_id)
464+
await session.commit()
465+
457466
# ------------------------------------------------------------------
458467
# Shared semantic search: guard, text processing, chunking
459468
# ------------------------------------------------------------------

src/basic_memory/repository/sqlite_search_repository.py

Lines changed: 0 additions & 15 deletions
Original file line numberDiff line numberDiff line change
@@ -565,21 +565,6 @@ async def _delete_stale_chunks(
565565
stale_params,
566566
)
567567

568-
async def delete_entity_vector_rows(self, entity_id: int) -> None:
569-
"""Delete one entity's vec rows on a sqlite-vec-enabled connection."""
570-
await self._ensure_vector_tables()
571-
572-
async with db.scoped_session(self.session_maker) as session:
573-
await self._ensure_sqlite_vec_loaded(session)
574-
575-
# Constraint: sqlite-vec virtual tables are only visible after vec0 is
576-
# loaded on this exact connection.
577-
# Why: generic repository sessions can reach search_vector_chunks but still
578-
# fail with "no such module: vec0" when touching embeddings.
579-
# Outcome: service-level cleanup routes vec-table deletes through this helper.
580-
await self._delete_entity_chunks(session, entity_id)
581-
await session.commit()
582-
583568
async def delete_project_vector_rows(self) -> None:
584569
"""Delete all vector rows for this project on a sqlite-vec-enabled connection."""
585570
await self._ensure_vector_tables()

0 commit comments

Comments
 (0)