Skip to content

Commit 54b968b

Browse files
phernandezclaude
andcommitted
fix: reduce excessive log volume by demoting per-request noise to DEBUG (#613)
Demote high-frequency per-request/per-item logs from INFO to DEBUG: - 🔇 Client routing decisions (async_client.py) — logged every MCP tool call - 🔇 DB migration checks (db.py) — logged every ASGI client creation - 🔇 Vector table ensure/ready (sqlite + postgres search repos) — logged every search - 🔇 Per-entity search index start/complete (search_service.py) — logged every file sync - 🔇 Incremental scan details + per-file permalink updates (sync_service.py) - 🔇 MCP search tool params and no-results (search.py) - 📉 Log retention: "10 days" → 5 files (~50MB cap) API v2 request/response logs remain at INFO for observability. Closes #613 Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com> Signed-off-by: phernandez <paul@basicmachines.co>
1 parent e59b5cb commit 54b968b

9 files changed

Lines changed: 21 additions & 21 deletions

File tree

src/basic_memory/api/v2/routers/knowledge_router.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -130,7 +130,7 @@ async def resolve_identifier(
130130
resolution_method=resolution_method,
131131
)
132132

133-
logger.info(
133+
logger.debug(
134134
f"API v2 response: resolved '{data.identifier}' to external_id={result.external_id} via {resolution_method}"
135135
)
136136

src/basic_memory/db.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -475,7 +475,7 @@ async def run_migrations(
475475
Note: Alembic tracks which migrations have been applied via the alembic_version table,
476476
so it's safe to call this multiple times - it will only run pending migrations.
477477
"""
478-
logger.info("Running database migrations...")
478+
logger.debug("Running database migrations...")
479479
temp_engine: AsyncEngine | None = None
480480
try:
481481
revisions_before_upgrade: set[str] = set()
@@ -514,7 +514,7 @@ async def run_migrations(
514514
config.set_main_option("sqlalchemy.url", db_url)
515515

516516
command.upgrade(config, "head")
517-
logger.info("Migrations completed successfully")
517+
logger.debug("Migrations completed successfully")
518518

519519
# Get session maker - ensure we don't trigger recursive migration calls
520520
if _session_maker is None:

src/basic_memory/mcp/async_client.py

Lines changed: 5 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -154,13 +154,13 @@ async def get_client(
154154
# Outcome: route strictly based on explicit flag.
155155
if _explicit_routing():
156156
if _force_local_mode():
157-
logger.info("Explicit local routing enabled - using ASGI client")
157+
logger.debug("Explicit local routing enabled - using ASGI client")
158158
async with _asgi_client(timeout) as client:
159159
yield client
160160
return
161161

162162
if _force_cloud_mode():
163-
logger.info("Explicit cloud routing enabled - using cloud proxy client")
163+
logger.debug("Explicit cloud routing enabled - using cloud proxy client")
164164
async with _cloud_client(config, timeout, workspace=workspace) as client:
165165
yield client
166166
return
@@ -172,7 +172,7 @@ async def get_client(
172172
if project_name is not None and not _explicit_routing():
173173
project_mode = config.get_project_mode(project_name)
174174
if project_mode == ProjectMode.CLOUD:
175-
logger.info(f"Project '{project_name}' is cloud mode - using cloud proxy client")
175+
logger.debug(f"Project '{project_name}' is cloud mode - using cloud proxy client")
176176
try:
177177
async with _cloud_client(config, timeout, workspace=workspace) as client:
178178
yield client
@@ -183,13 +183,13 @@ async def get_client(
183183
) from exc
184184
return
185185

186-
logger.info(f"Project '{project_name}' is local mode - using ASGI client")
186+
logger.debug(f"Project '{project_name}' is local mode - using ASGI client")
187187
async with _asgi_client(timeout) as client:
188188
yield client
189189
return
190190

191191
# --- Default fallback ---
192-
logger.info("Default routing - using ASGI client for local Basic Memory API")
192+
logger.debug("Default routing - using ASGI client for local Basic Memory API")
193193
async with _asgi_client(timeout) as client:
194194
yield client
195195

src/basic_memory/mcp/tools/search.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -513,7 +513,7 @@ async def search_notes(
513513
"`tags`, `status`, `note_types`, `entity_types`, or `after_date`."
514514
)
515515

516-
logger.info(f"Searching for {search_query} in project {active_project.name}")
516+
logger.debug(f"Searching for {search_query} in project {active_project.name}")
517517
# Import here to avoid circular import (tools → clients → utils → tools)
518518
from basic_memory.mcp.clients import SearchClient
519519

@@ -527,7 +527,7 @@ async def search_notes(
527527

528528
# Check if we got no results and provide helpful guidance
529529
if not result.results:
530-
logger.info(
530+
logger.debug(
531531
f"Search returned no results for query: {query} in project {active_project.name}"
532532
)
533533
# Don't treat this as an error, but the user might want guidance

src/basic_memory/repository/postgres_search_repository.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -267,7 +267,7 @@ async def _ensure_vector_tables(self) -> None:
267267
if self._vector_tables_initialized:
268268
return
269269

270-
logger.info("Ensuring Postgres vector tables exist for semantic search")
270+
logger.debug("Ensuring Postgres vector tables exist for semantic search")
271271

272272
async with self._vector_tables_lock:
273273
if self._vector_tables_initialized:
@@ -358,7 +358,7 @@ async def _ensure_vector_tables(self) -> None:
358358
)
359359
await session.commit()
360360

361-
logger.info(f"Postgres vector tables ready (dimensions={self._vector_dimensions})")
361+
logger.debug(f"Postgres vector tables ready (dimensions={self._vector_dimensions})")
362362
self._vector_tables_initialized = True
363363

364364
async def _get_existing_embedding_dims(self, session: AsyncSession) -> int | None:

src/basic_memory/repository/sqlite_search_repository.py

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -79,7 +79,7 @@ async def init_search_index(self):
7979
across server restarts. Also creates vector tables when semantic search
8080
is enabled so missing dependencies are caught at startup, not first query.
8181
"""
82-
logger.info("Initializing SQLite FTS5 search index")
82+
logger.debug("Initializing SQLite FTS5 search index")
8383
try:
8484
async with db.scoped_session(self.session_maker) as session:
8585
# Create FTS5 virtual table if it doesn't exist
@@ -378,7 +378,7 @@ async def _ensure_vector_tables(self) -> None:
378378
if self._vector_tables_initialized:
379379
return
380380

381-
logger.info("Ensuring SQLite vector tables exist for semantic search")
381+
logger.debug("Ensuring SQLite vector tables exist for semantic search")
382382

383383
async with db.scoped_session(self.session_maker) as session:
384384
await self._ensure_sqlite_vec_loaded(session)
@@ -431,7 +431,7 @@ async def _ensure_vector_tables(self) -> None:
431431
await session.execute(create_sqlite_search_vector_embeddings(self._vector_dimensions))
432432
await session.commit()
433433

434-
logger.info(f"SQLite vector tables ready (dimensions={self._vector_dimensions})")
434+
logger.debug(f"SQLite vector tables ready (dimensions={self._vector_dimensions})")
435435
self._vector_tables_initialized = True
436436

437437
async def _prepare_vector_session(self, session: AsyncSession) -> None:

src/basic_memory/services/search_service.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -347,7 +347,7 @@ async def index_entity_data(
347347
entity: Entity,
348348
content: str | None = None,
349349
) -> None:
350-
logger.info(
350+
logger.debug(
351351
f"[BackgroundTask] Starting search index for entity_id={entity.id} "
352352
f"permalink={entity.permalink} project_id={entity.project_id}"
353353
)
@@ -360,7 +360,7 @@ async def index_entity_data(
360360
entity, content
361361
) if entity.is_markdown else await self.index_entity_file(entity)
362362

363-
logger.info(
363+
logger.debug(
364364
f"[BackgroundTask] Completed search index for entity_id={entity.id} "
365365
f"permalink={entity.permalink}"
366366
)

src/basic_memory/sync/sync_service.py

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -437,13 +437,13 @@ async def scan(self, directory, force_full: bool = False):
437437
elif project.last_scan_timestamp is not None:
438438
# Incremental scan: only files modified since last scan
439439
scan_type = "incremental"
440-
logger.info(
440+
logger.debug(
441441
f"Running incremental scan for files modified since {project.last_scan_timestamp}"
442442
)
443443
file_paths_to_scan = await self._scan_directory_modified_since(
444444
directory, project.last_scan_timestamp
445445
)
446-
logger.info(
446+
logger.debug(
447447
f"Incremental scan found {len(file_paths_to_scan)} potentially changed files"
448448
)
449449

@@ -705,7 +705,7 @@ async def sync_markdown_file(self, path: str, new: bool = True) -> Tuple[Optiona
705705

706706
# If permalink changed, update the file
707707
if permalink != entity_markdown.frontmatter.permalink:
708-
logger.info(
708+
logger.debug(
709709
f"Updating permalink for path: {path}, old_permalink: {entity_markdown.frontmatter.permalink}, new_permalink: {permalink}"
710710
)
711711

src/basic_memory/utils.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -281,7 +281,7 @@ def setup_logging(
281281
str(log_path),
282282
level=log_level,
283283
rotation="10 MB",
284-
retention="10 days",
284+
retention=5,
285285
backtrace=True,
286286
diagnose=True,
287287
enqueue=False,

0 commit comments

Comments
 (0)