diff --git a/docs/CLI Reference.md b/docs/CLI Reference.md index 20298e80c..c5580c7b2 100644 --- a/docs/CLI Reference.md +++ b/docs/CLI Reference.md @@ -29,6 +29,22 @@ Options: - `--watch`: Continuously monitor for changes - `--verbose`: Show detailed output +**Note**: + +As of the v0.12.0 release syncing will occur in real time when the mcp process starts. +- The real time sync means that it is no longer necessary to run the `basic-memory sync --watch` process in a a terminal to sync changes to the db (so the AI can see them). This will be done automatically. + +This behavior can be changed via the config. The config file for Basic Memory is in the home directory under `.basic-memory/config.json`. + +To change the properties, set the following values: +``` + ~/.basic-memory/config.json +{ + "sync_changes": false, +} +``` + +Thanks for using Basic Memory! ### import Imports external knowledge sources: diff --git a/docs/Getting Started with Basic Memory.md b/docs/Getting Started with Basic Memory.md index 194288c62..43df77323 100644 --- a/docs/Getting Started with Basic Memory.md +++ b/docs/Getting Started with Basic Memory.md @@ -89,22 +89,11 @@ Replace `/absolute/path/to/uvx` with the actual path you found in Step 1. Close and reopen Claude Desktop for the changes to take effect. -### 3. Start the Sync Service +### 3. Sync changes in real time -> Note the sync service is optional. You can run it if you want your local change to be available in Claude Desktop - -Start the sync service to monitor your files for changes: - -```bash -# One-time sync -basic-memory sync - -# For continuous monitoring (recommended) -basic-memory sync --watch -``` - -The `--watch` flag enables automatic detection of file changes, updating your knowledge in real time. +> **Note**: The service will sync changes from your project directory in real time so they available for the AI assistant. +To disable realtime sync, you can update the config. See [[CLI Reference#sync]]. ### 4. Staying Updated To update Basic Memory when new versions are released: @@ -145,8 +134,8 @@ If Claude cannot find Basic Memory tools: 1. **Check absolute paths**: Ensure you're using complete absolute paths to uvx in the Claude Desktop configuration 2. **Verify installation**: Run `basic-memory --version` in Terminal to confirm Basic Memory is installed 3. **Restart applications**: Restart both Terminal and Claude Desktop after making configuration changes -4. **Check sync status**: Ensure `basic-memory sync --watch` is running - +4. **Check sync status**: You can view the sync status by running `basic-memory status +. #### Permission Issues If you encounter permission errors: @@ -282,15 +271,15 @@ basic-memory import claude conversations basic-memory import chatgpt ``` -After importing, run `basic-memory sync` to index everything. +After importing, the changes will be synced. Initial syncs may take a few moments. You can see info about your project by running `basic-memrory project info`. ## Quick Tips -- Keep `basic-memory sync --watch` running in a terminal window +- Basic Memory will sync changes from your project in real time. - Use special prompts (Continue Conversation, Recent Activity, Search) to start contextual discussions - Build connections between notes for a richer knowledge graph -- Use direct memory:// URLs when you need precise context -- Use git to version control your knowledge base +- Use direct `memory://` URLs with a permalink when you need precise context. See [[User Guide#Using memory // URLs]] +- Use git to version control your knowledge base (git integration is on the roadmap) - Review and edit AI-generated notes for accuracy ## Next Steps diff --git a/docs/Knowledge Format.md b/docs/Knowledge Format.md index dbf12449c..0e8c50415 100644 --- a/docs/Knowledge Format.md +++ b/docs/Knowledge Format.md @@ -144,7 +144,19 @@ permalink: auth-approaches-2024 --- ``` -If not specified, one will be generated automatically from the title. +If not specified, one will be generated automatically from the title, if the note has has a frontmatter section. + +By default a notes' permalink value will not change if the file is moved. It's a **stable** identifier :). But if you'd rather permalinks are always updated when a file moves, you can set the config setting in the global config. + +The config file for Basic Memory is in the home directory under `.basic-memory/config.json`. + +To change the behavior, set the following value: +``` + ~/.basic-memory/config.json +{ + "update_permalinks_on_move": true +} +``` ### Using memory:// URLs diff --git a/src/basic_memory/api/app.py b/src/basic_memory/api/app.py index 12522da63..cb21d229e 100644 --- a/src/basic_memory/api/app.py +++ b/src/basic_memory/api/app.py @@ -1,6 +1,5 @@ """FastAPI application for basic-memory knowledge graph API.""" -import asyncio from contextlib import asynccontextmanager from fastapi import FastAPI, HTTPException @@ -10,44 +9,14 @@ from basic_memory import db from basic_memory.api.routers import knowledge, memory, project_info, resource, search from basic_memory.config import config as project_config -from basic_memory.config import config_manager -from basic_memory.sync import SyncService, WatchService - - -async def run_background_sync(sync_service: SyncService, watch_service: WatchService): # pragma: no cover - logger.info(f"Starting watch service to sync file changes in dir: {project_config.home}") - # full sync - await sync_service.sync(project_config.home, show_progress=False) - - # watch changes - await watch_service.run() +from basic_memory.services.initialization import initialize_app @asynccontextmanager async def lifespan(app: FastAPI): # pragma: no cover """Lifecycle manager for the FastAPI app.""" - await db.run_migrations(project_config) - - # app config - basic_memory_config = config_manager.load_config() - logger.info(f"Sync changes enabled: {basic_memory_config.sync_changes}") - logger.info(f"Update permalinks on move enabled: {basic_memory_config.update_permalinks_on_move}") - - watch_task = None - if basic_memory_config.sync_changes: - # import after migrations have run - from basic_memory.cli.commands.sync import get_sync_service - - sync_service = await get_sync_service() - watch_service = WatchService( - sync_service=sync_service, - file_service=sync_service.entity_service.file_service, - config=project_config, - ) - watch_task = asyncio.create_task(run_background_sync(sync_service, watch_service)) - else: - logger.info("Sync changes disabled. Skipping watch service.") - + # Initialize database and file sync services + watch_task = await initialize_app(project_config) # proceed with startup yield diff --git a/src/basic_memory/cli/app.py b/src/basic_memory/cli/app.py index 92a4e89bb..bb44f51f6 100644 --- a/src/basic_memory/cli/app.py +++ b/src/basic_memory/cli/app.py @@ -7,8 +7,11 @@ def version_callback(value: bool) -> None: """Show version and exit.""" if value: # pragma: no cover import basic_memory + from basic_memory.config import config typer.echo(f"Basic Memory version: {basic_memory.__version__}") + typer.echo(f"Current project: {config.project}") + typer.echo(f"Project path: {config.home}") raise typer.Exit() @@ -17,11 +20,12 @@ def version_callback(value: bool) -> None: @app.callback() def app_callback( + ctx: typer.Context, project: Optional[str] = typer.Option( None, "--project", "-p", - help="Specify which project to use", + help="Specify which project to use 1", envvar="BASIC_MEMORY_PROJECT", ), version: Optional[bool] = typer.Option( @@ -34,6 +38,7 @@ def app_callback( ), ) -> None: """Basic Memory - Local-first personal knowledge management.""" + # We use the project option to set the BASIC_MEMORY_PROJECT environment variable # The config module will pick this up when loading if project: # pragma: no cover @@ -53,6 +58,13 @@ def app_callback( config = new_config + # Run migrations for every command unless --version was specified + if not version and ctx.invoked_subcommand is not None: + from basic_memory.config import config + from basic_memory.services.initialization import ensure_initialize_database + + ensure_initialize_database(config) + # Register sub-command groups import_app = typer.Typer(help="Import data from various sources") diff --git a/src/basic_memory/cli/commands/mcp.py b/src/basic_memory/cli/commands/mcp.py index 6a211e138..16bea5eea 100644 --- a/src/basic_memory/cli/commands/mcp.py +++ b/src/basic_memory/cli/commands/mcp.py @@ -1,10 +1,7 @@ """MCP server command.""" -from loguru import logger - import basic_memory from basic_memory.cli.app import app -from basic_memory.config import config, config_manager # Import mcp instance from basic_memory.mcp.server import mcp as mcp_server # pragma: no cover @@ -15,19 +12,24 @@ @app.command() def mcp(): # pragma: no cover - """Run the MCP server for Claude Desktop integration.""" - home_dir = config.home - project_name = config.project + """Run the MCP server""" + from basic_memory.config import config + import asyncio + from basic_memory.services.initialization import initialize_database + + # First, run just the database migrations synchronously + asyncio.run(initialize_database(config)) + + # Load config to check if sync is enabled + from basic_memory.config import config_manager - # app config basic_memory_config = config_manager.load_config() - logger.info(f"Starting Basic Memory MCP server {basic_memory.__version__}") - logger.info(f"Project: {project_name}") - logger.info(f"Project directory: {home_dir}") - logger.info(f"Sync changes enabled: {basic_memory_config.sync_changes}") - logger.info( - f"Update permalinks on move enabled: {basic_memory_config.update_permalinks_on_move}" - ) + if basic_memory_config.sync_changes: + # For now, we'll just log that sync will be handled by the MCP server + from loguru import logger + + logger.info("File sync will be handled by the MCP server") + # Start the MCP server mcp_server.run() diff --git a/src/basic_memory/cli/commands/sync.py b/src/basic_memory/cli/commands/sync.py index 9741ebeb3..979b03370 100644 --- a/src/basic_memory/cli/commands/sync.py +++ b/src/basic_memory/cli/commands/sync.py @@ -179,14 +179,14 @@ async def run_sync(verbose: bool = False, watch: bool = False, console_status: b ) # full sync - no progress bars in watch mode - await sync_service.sync(config.home, show_progress=False) + await sync_service.sync(config.home) # watch changes await watch_service.run() # pragma: no cover else: - # one time sync - use progress bars for better UX + # one time sync logger.info("Running one-time sync") - knowledge_changes = await sync_service.sync(config.home, show_progress=True) + knowledge_changes = await sync_service.sync(config.home) # Log results duration_ms = int((time.time() - start_time) * 1000) @@ -237,11 +237,11 @@ def sync( if not isinstance(e, typer.Exit): logger.exception( "Sync command failed", - project=config.project, - error=str(e), - error_type=type(e).__name__, - watch_mode=watch, - directory=str(config.home), + f"project={config.project}," + f"error={str(e)}," + f"error_type={type(e).__name__}," + f"watch_mode={watch}," + f"directory={str(config.home)}", ) typer.echo(f"Error during sync: {e}", err=True) raise typer.Exit(1) diff --git a/src/basic_memory/cli/main.py b/src/basic_memory/cli/main.py index 8338cb842..120a65c4b 100644 --- a/src/basic_memory/cli/main.py +++ b/src/basic_memory/cli/main.py @@ -1,9 +1,5 @@ """Main CLI entry point for basic-memory.""" # pragma: no cover -import asyncio - -import typer - from basic_memory.cli.app import app # pragma: no cover # Register commands @@ -20,48 +16,11 @@ tool, ) from basic_memory.config import config -from basic_memory.db import run_migrations as db_run_migrations - - -# Version command -@app.callback(invoke_without_command=True) -def main( - ctx: typer.Context, - project: str = typer.Option( # noqa - "main", - "--project", - "-p", - help="Specify which project to use", - envvar="BASIC_MEMORY_PROJECT", - ), - version: bool = typer.Option( - False, - "--version", - "-V", - help="Show version information and exit.", - is_eager=True, - ), -): - """Basic Memory - Local-first personal knowledge management system.""" - if version: # pragma: no cover - from basic_memory import __version__ - from basic_memory.config import config - - typer.echo(f"Basic Memory v{__version__}") - typer.echo(f"Current project: {config.project}") - typer.echo(f"Project path: {config.home}") - raise typer.Exit() - - # Handle project selection via environment variable - if project: - import os - - os.environ["BASIC_MEMORY_PROJECT"] = project - +from basic_memory.services.initialization import ensure_initialization if __name__ == "__main__": # pragma: no cover - # Run database migrations - asyncio.run(db_run_migrations(config)) + # Run initialization if we are starting as a module + ensure_initialization(config) # start the app app() diff --git a/src/basic_memory/config.py b/src/basic_memory/config.py index 854b24c67..a708c69ef 100644 --- a/src/basic_memory/config.py +++ b/src/basic_memory/config.py @@ -35,7 +35,7 @@ class ProjectConfig(BaseSettings): # Watch service configuration sync_delay: int = Field( - default=500, description="Milliseconds to wait after changes before syncing", gt=0 + default=1000, description="Milliseconds to wait after changes before syncing", gt=0 ) # update permalinks on move @@ -274,7 +274,7 @@ def setup_basic_memory_logging(): # pragma: no cover console=False, ) - logger.info(f"Starting Basic Memory {basic_memory.__version__} (Project: {config.project})") + logger.info(f"Basic Memory {basic_memory.__version__} (Project: {config.project})") _LOGGING_SETUP = True diff --git a/src/basic_memory/mcp/server.py b/src/basic_memory/mcp/server.py index ecd97a177..d1c562e7d 100644 --- a/src/basic_memory/mcp/server.py +++ b/src/basic_memory/mcp/server.py @@ -1,11 +1,37 @@ """Enhanced FastMCP server instance for Basic Memory.""" +import asyncio +from contextlib import asynccontextmanager +from typing import AsyncIterator, Optional + from mcp.server.fastmcp import FastMCP -from mcp.server.fastmcp.utilities.logging import configure_logging +from mcp.server.fastmcp.utilities.logging import configure_logging as mcp_configure_logging +from dataclasses import dataclass + +from basic_memory.config import config as project_config +from basic_memory.services.initialization import initialize_app # mcp console logging -configure_logging(level="ERROR") +mcp_configure_logging(level="ERROR") + + +@dataclass +class AppContext: + watch_task: Optional[asyncio.Task] + + +@asynccontextmanager +async def app_lifespan(server: FastMCP) -> AsyncIterator[AppContext]: # pragma: no cover + """Manage application lifecycle with type-safe context""" + # Initialize on startup + watch_task = await initialize_app(project_config) + try: + yield AppContext(watch_task=watch_task) + finally: + # Cleanup on shutdown + if watch_task: + watch_task.cancel() # Create the shared server instance -mcp = FastMCP("Basic Memory", log_level="ERROR") +mcp = FastMCP("Basic Memory", log_level="ERROR", lifespan=app_lifespan) diff --git a/src/basic_memory/repository/repository.py b/src/basic_memory/repository/repository.py index 0e2c421cf..fbc3044f1 100644 --- a/src/basic_memory/repository/repository.py +++ b/src/basic_memory/repository/repository.py @@ -137,8 +137,6 @@ async def find_by_ids(self, ids: List[int]) -> Sequence[T]: async def find_one(self, query: Select[tuple[T]]) -> Optional[T]: """Execute a query and retrieve a single record.""" - logger.debug(f"Finding one {self.Model.__name__} with query: {query}") - # add in load options query = query.options(*self.get_load_options()) result = await self.execute_query(query) @@ -270,11 +268,9 @@ async def execute_query(self, query: Executable, use_query_options: bool = True) """Execute a query asynchronously.""" query = query.options(*self.get_load_options()) if use_query_options else query - logger.debug(f"Executing query: {query}") async with db.scoped_session(self.session_maker) as session: result = await session.execute(query) - logger.debug("Query executed successfully") return result def get_load_options(self) -> List[LoaderOption]: diff --git a/src/basic_memory/services/file_service.py b/src/basic_memory/services/file_service.py index 63870f01a..e691331d4 100644 --- a/src/basic_memory/services/file_service.py +++ b/src/basic_memory/services/file_service.py @@ -60,7 +60,7 @@ async def read_entity_content(self, entity: EntityModel) -> str: Returns: Raw content string without metadata sections """ - logger.debug("Reading entity content", entity_id=entity.id, permalink=entity.permalink) + logger.debug(f"Reading entity content, entity_id={entity.id}, permalink={entity.permalink}") file_path = self.get_entity_path(entity) markdown = await self.markdown_processor.read_file(file_path) diff --git a/src/basic_memory/services/initialization.py b/src/basic_memory/services/initialization.py new file mode 100644 index 000000000..3e6d1783a --- /dev/null +++ b/src/basic_memory/services/initialization.py @@ -0,0 +1,143 @@ +"""Shared initialization service for Basic Memory. + +This module provides shared initialization functions used by both CLI and API +to ensure consistent application startup across all entry points. +""" + +import asyncio +from typing import Optional + +from loguru import logger + +from basic_memory import db +from basic_memory.config import ProjectConfig, config_manager +from basic_memory.sync import WatchService + +# Import this inside functions to avoid circular imports +# from basic_memory.cli.commands.sync import get_sync_service + + +async def initialize_database(app_config: ProjectConfig) -> None: + """Run database migrations to ensure schema is up to date. + + Args: + app_config: The Basic Memory project configuration + """ + try: + logger.info("Running database migrations...") + await db.run_migrations(app_config) + logger.info("Migrations completed successfully") + except Exception as e: + logger.error(f"Error running migrations: {e}") + # Allow application to continue - it might still work + # depending on what the error was, and will fail with a + # more specific error if the database is actually unusable + + +async def initialize_file_sync( + app_config: ProjectConfig, +) -> asyncio.Task: + """Initialize file synchronization services. + + Args: + app_config: The Basic Memory project configuration + + Returns: + Tuple of (sync_service, watch_service, watch_task) if sync is enabled, + or (None, None, None) if sync is disabled + """ + # Load app configuration + # Import here to avoid circular imports + from basic_memory.cli.commands.sync import get_sync_service + + # Initialize sync service + sync_service = await get_sync_service() + + # Initialize watch service + watch_service = WatchService( + sync_service=sync_service, + file_service=sync_service.entity_service.file_service, + config=app_config, + quiet=True, + ) + + # Create the background task for running sync + async def run_background_sync(): # pragma: no cover + # Run initial full sync + await sync_service.sync(app_config.home) + logger.info("Sync completed successfully") + + # Start background sync task + logger.info(f"Starting watch service to sync file changes in dir: {app_config.home}") + + # Start watching for changes + await watch_service.run() + + watch_task = asyncio.create_task(run_background_sync()) + logger.info("Watch service started") + return watch_task + + +async def initialize_app( + app_config: ProjectConfig, +) -> Optional[asyncio.Task]: + """Initialize the Basic Memory application. + + This function handles all initialization steps needed for both API and shor lived CLI commands. + For long running commands like mcp, a + - Running database migrations + - Setting up file synchronization + + Args: + app_config: The Basic Memory project configuration + """ + # Initialize database first + await initialize_database(app_config) + + basic_memory_config = config_manager.load_config() + logger.info(f"Sync changes enabled: {basic_memory_config.sync_changes}") + logger.info( + f"Update permalinks on move enabled: {basic_memory_config.update_permalinks_on_move}" + ) + if not basic_memory_config.sync_changes: # pragma: no cover + logger.info("Sync changes disabled. Skipping watch service.") + return + + # Initialize file sync services + return await initialize_file_sync(app_config) + + +def ensure_initialization(app_config: ProjectConfig) -> None: + """Ensure initialization runs in a synchronous context. + + This is a wrapper for the async initialize_app function that can be + called from synchronous code like CLI entry points. + + Args: + app_config: The Basic Memory project configuration + """ + try: + asyncio.run(initialize_app(app_config)) + except Exception as e: + logger.error(f"Error during initialization: {e}") + # Continue execution even if initialization fails + # The command might still work, or will fail with a + # more specific error message + + +def ensure_initialize_database(app_config: ProjectConfig) -> None: + """Ensure initialization runs in a synchronous context. + + This is a wrapper for the async initialize_database function that can be + called from synchronous code like CLI entry points. + + Args: + app_config: The Basic Memory project configuration + """ + try: + asyncio.run(initialize_database(app_config)) + except Exception as e: + logger.error(f"Error during initialization: {e}") + # Continue execution even if initialization fails + # The command might still work, or will fail with a + # more specific error message diff --git a/src/basic_memory/sync/sync_service.py b/src/basic_memory/sync/sync_service.py index dbb9f3e59..4a0e4da84 100644 --- a/src/basic_memory/sync/sync_service.py +++ b/src/basic_memory/sync/sync_service.py @@ -19,7 +19,6 @@ from basic_memory.services import EntityService, FileService from basic_memory.services.search_service import SearchService import time -from rich.progress import Progress, TextColumn, BarColumn, TaskProgressColumn @dataclass @@ -83,145 +82,51 @@ def __init__( self.search_service = search_service self.file_service = file_service - async def sync(self, directory: Path, show_progress: bool = True) -> SyncReport: + async def sync(self, directory: Path) -> SyncReport: """Sync all files with database.""" start_time = time.time() - console = None - progress = None # Will be initialized if show_progress is True - - logger.info("Sync operation started", directory=str(directory)) + logger.info(f"Sync operation started for directory: {directory}") # initial paths from db to sync # path -> checksum - if show_progress: - from rich.console import Console - - console = Console() - console.print(f"Scanning directory: {directory}") - report = await self.scan(directory) # Initialize progress tracking if requested - if show_progress and report.total > 0: - progress = Progress( - TextColumn("[bold blue]{task.description}"), - BarColumn(), - TaskProgressColumn(), - console=console, - expand=True, - ) - # order of sync matters to resolve relations effectively logger.info( - "Sync changes detected", - new_files=len(report.new), - modified_files=len(report.modified), - deleted_files=len(report.deleted), - moved_files=len(report.moves), + f"Sync changes detected: new_files={len(report.new)}, modified_files={len(report.modified)}, " + + f"deleted_files={len(report.deleted)}, moved_files={len(report.moves)}" ) - if show_progress and report.total > 0: - with progress: # pyright: ignore - # Track each category separately - move_task = None - if report.moves: # pragma: no cover - move_task = progress.add_task("[blue]Moving files...", total=len(report.moves)) # pyright: ignore - - delete_task = None - if report.deleted: # pragma: no cover - delete_task = progress.add_task( # pyright: ignore - "[red]Deleting files...", total=len(report.deleted) - ) - - new_task = None - if report.new: - new_task = progress.add_task( # pyright: ignore - "[green]Adding new files...", total=len(report.new) - ) - - modify_task = None - if report.modified: # pragma: no cover - modify_task = progress.add_task( # pyright: ignore - "[yellow]Updating modified files...", total=len(report.modified) + # sync moves first + for old_path, new_path in report.moves.items(): + # in the case where a file has been deleted and replaced by another file + # it will show up in the move and modified lists, so handle it in modified + if new_path in report.modified: + report.modified.remove(new_path) + logger.debug( + f"File marked as moved and modified: old_path={old_path}, new_path={new_path}" ) + else: + await self.handle_move(old_path, new_path) - # sync moves first - for i, (old_path, new_path) in enumerate(report.moves.items()): - # in the case where a file has been deleted and replaced by another file - # it will show up in the move and modified lists, so handle it in modified - if new_path in report.modified: # pragma: no cover - report.modified.remove(new_path) - logger.debug( - "File marked as moved and modified", - old_path=old_path, - new_path=new_path, - action="processing as modified", - ) - else: # pragma: no cover - await self.handle_move(old_path, new_path) - - if move_task is not None: # pragma: no cover - progress.update(move_task, advance=1) # pyright: ignore - - # deleted next - for i, path in enumerate(report.deleted): # pragma: no cover - await self.handle_delete(path) - if delete_task is not None: # pragma: no cover - progress.update(delete_task, advance=1) # pyright: ignore - - # then new and modified - for i, path in enumerate(report.new): - await self.sync_file(path, new=True) - if new_task is not None: - progress.update(new_task, advance=1) # pyright: ignore - - for i, path in enumerate(report.modified): # pragma: no cover - await self.sync_file(path, new=False) - if modify_task is not None: # pragma: no cover - progress.update(modify_task, advance=1) # pyright: ignore - - # Final step - resolving relations - if report.total > 0: - relation_task = progress.add_task("[cyan]Resolving relations...", total=1) # pyright: ignore - await self.resolve_relations() - progress.update(relation_task, advance=1) # pyright: ignore - else: - # No progress display - proceed with normal sync - # sync moves first - for old_path, new_path in report.moves.items(): - # in the case where a file has been deleted and replaced by another file - # it will show up in the move and modified lists, so handle it in modified - if new_path in report.modified: - report.modified.remove(new_path) - logger.debug( - "File marked as moved and modified", - old_path=old_path, - new_path=new_path, - action="processing as modified", - ) - else: - await self.handle_move(old_path, new_path) - - # deleted next - for path in report.deleted: - await self.handle_delete(path) + # deleted next + for path in report.deleted: + await self.handle_delete(path) - # then new and modified - for path in report.new: - await self.sync_file(path, new=True) + # then new and modified + for path in report.new: + await self.sync_file(path, new=True) - for path in report.modified: - await self.sync_file(path, new=False) + for path in report.modified: + await self.sync_file(path, new=False) - await self.resolve_relations() + await self.resolve_relations() duration_ms = int((time.time() - start_time) * 1000) logger.info( - "Sync operation completed", - directory=str(directory), - total_changes=report.total, - duration_ms=duration_ms, + f"Sync operation completed: directory={directory}, total_changes={report.total}, duration_ms={duration_ms}" ) return report @@ -230,6 +135,7 @@ async def scan(self, directory): """Scan directory for changes compared to database state.""" db_paths = await self.get_db_file_state() + logger.debug(f"Found {len(db_paths)} db paths") # Track potentially moved files by checksum scan_result = await self.scan_directory(directory) @@ -280,6 +186,7 @@ async def get_db_file_state(self) -> Dict[str, str]: :param db_records: the data from the db """ db_records = await self.entity_repository.find_all() + logger.info(f"Found {len(db_records)} db records") return {r.file_path: r.checksum or "" for r in db_records} async def sync_file( @@ -296,10 +203,7 @@ async def sync_file( """ try: logger.debug( - "Syncing file", - path=path, - is_new=new, - is_markdown=self.file_service.is_markdown(path), + f"Syncing file path={path} is_new={new} is_markdown={self.file_service.is_markdown(path)}" ) if self.file_service.is_markdown(path): @@ -311,7 +215,7 @@ async def sync_file( await self.search_service.index_entity(entity) logger.debug( - "File sync completed", path=path, entity_id=entity.id, checksum=checksum + f"File sync completed, path={path}, entity_id={entity.id}, checksum={checksum[:8]}" ) return entity, checksum @@ -330,7 +234,7 @@ async def sync_markdown_file(self, path: str, new: bool = True) -> Tuple[Optiona Tuple of (entity, checksum) """ # Parse markdown first to get any existing permalink - logger.debug("Parsing markdown file", path=path) + logger.debug(f"Parsing markdown file, path: {path}, new: {new}") file_path = self.entity_parser.base_path / path file_content = file_path.read_text() @@ -347,10 +251,7 @@ async def sync_markdown_file(self, path: str, new: bool = True) -> Tuple[Optiona # If permalink changed, update the file if permalink != entity_markdown.frontmatter.permalink: logger.info( - "Updating permalink", - path=path, - old_permalink=entity_markdown.frontmatter.permalink, - new_permalink=permalink, + f"Updating permalink for path: {path}, old_permalink: {entity_markdown.frontmatter.permalink}, new_permalink: {permalink}" ) entity_markdown.frontmatter.metadata["permalink"] = permalink @@ -359,12 +260,12 @@ async def sync_markdown_file(self, path: str, new: bool = True) -> Tuple[Optiona # if the file is new, create an entity if new: # Create entity with final permalink - logger.debug("Creating new entity from markdown", path=path) + logger.debug(f"Creating new entity from markdown, path={path}") await self.entity_service.create_entity_from_markdown(Path(path), entity_markdown) # otherwise we need to update the entity and observations else: - logger.debug("Updating entity from markdown", path=path) + logger.debug(f"Updating entity from markdown, path={path}") await self.entity_service.update_entity_and_observations(Path(path), entity_markdown) # Update relations and search index @@ -379,12 +280,9 @@ async def sync_markdown_file(self, path: str, new: bool = True) -> Tuple[Optiona await self.entity_repository.update(entity.id, {"checksum": final_checksum}) logger.debug( - "Markdown sync completed", - path=path, - entity_id=entity.id, - observation_count=len(entity.observations), - relation_count=len(entity.relations), - checksum=final_checksum, + f"Markdown sync completed: path={path}, entity_id={entity.id}, " + f"observation_count={len(entity.observations)}, relation_count={len(entity.relations)}, " + f"checksum={final_checksum[:8]}" ) # Return the final checksum to ensure everything is consistent @@ -429,7 +327,7 @@ async def sync_regular_file(self, path: str, new: bool = True) -> Tuple[Optional else: entity = await self.entity_repository.get_by_file_path(path) if entity is None: # pragma: no cover - logger.error("Entity not found for existing file", path=path) + logger.error(f"Entity not found for existing file, path={path}") raise ValueError(f"Entity not found for existing file: {path}") updated = await self.entity_repository.update( @@ -437,7 +335,7 @@ async def sync_regular_file(self, path: str, new: bool = True) -> Tuple[Optional ) if updated is None: # pragma: no cover - logger.error("Failed to update entity", entity_id=entity.id, path=path) + logger.error(f"Failed to update entity, entity_id={entity.id}, path={path}") raise ValueError(f"Failed to update entity with ID {entity.id}") return updated, checksum @@ -449,10 +347,7 @@ async def handle_delete(self, file_path: str): entity = await self.entity_repository.get_by_file_path(file_path) if entity: logger.info( - "Deleting entity", - file_path=file_path, - entity_id=entity.id, - permalink=entity.permalink, + f"Deleting entity with file_path={file_path}, entity_id={entity.id}, permalink={entity.permalink}" ) # Delete from db (this cascades to observations/relations) @@ -466,10 +361,8 @@ async def handle_delete(self, file_path: str): ) logger.debug( - "Cleaning up search index", - entity_id=entity.id, - file_path=file_path, - index_entries=len(permalinks), + f"Cleaning up search index for entity_id={entity.id}, file_path={file_path}, " + f"index_entries={len(permalinks)}" ) for permalink in permalinks: @@ -479,7 +372,7 @@ async def handle_delete(self, file_path: str): await self.search_service.delete_by_entity_id(entity.id) async def handle_move(self, old_path, new_path): - logger.info("Moving entity", old_path=old_path, new_path=new_path) + logger.debug("Moving entity", old_path=old_path, new_path=new_path) entity = await self.entity_repository.get_by_file_path(old_path) if entity: @@ -500,29 +393,28 @@ async def handle_move(self, old_path, new_path): updates["checksum"] = new_checksum logger.info( - "Updating permalink on move", - old_permalink=entity.permalink, - new_permalink=new_permalink, - new_checksum=new_checksum, + f"Updating permalink on move,old_permalink={entity.permalink}" + f"new_permalink={new_permalink}" + f"new_checksum={new_checksum}" ) updated = await self.entity_repository.update(entity.id, updates) if updated is None: # pragma: no cover logger.error( - "Failed to update entity path", - entity_id=entity.id, - old_path=old_path, - new_path=new_path, + "Failed to update entity path" + f"entity_id={entity.id}" + f"old_path={old_path}" + f"new_path={new_path}" ) raise ValueError(f"Failed to update entity path for ID {entity.id}") logger.debug( - "Entity path updated", - entity_id=entity.id, - permalink=entity.permalink, - old_path=old_path, - new_path=new_path, + "Entity path updated" + f"entity_id={entity.id} " + f"permalink={entity.permalink} " + f"old_path={old_path} " + f"new_path={new_path} " ) # update search index @@ -537,10 +429,10 @@ async def resolve_relations(self): for relation in unresolved_relations: logger.debug( - "Attempting to resolve relation", - relation_id=relation.id, - from_id=relation.from_id, - to_name=relation.to_name, + "Attempting to resolve relation " + f"relation_id={relation.id} " + f"from_id={relation.from_id} " + f"to_name={relation.to_name}" ) resolved_entity = await self.entity_service.link_resolver.resolve_link(relation.to_name) @@ -548,12 +440,12 @@ async def resolve_relations(self): # ignore reference to self if resolved_entity and resolved_entity.id != relation.from_id: logger.debug( - "Resolved forward reference", - relation_id=relation.id, - from_id=relation.from_id, - to_name=relation.to_name, - resolved_id=resolved_entity.id, - resolved_title=resolved_entity.title, + "Resolved forward reference " + f"relation_id={relation.id} " + f"from_id={relation.from_id} " + f"to_name={relation.to_name} " + f"resolved_id={resolved_entity.id} " + f"resolved_title={resolved_entity.title}", ) try: await self.relation_repository.update( @@ -565,10 +457,10 @@ async def resolve_relations(self): ) except IntegrityError: # pragma: no cover logger.debug( - "Ignoring duplicate relation", - relation_id=relation.id, - from_id=relation.from_id, - to_name=relation.to_name, + "Ignoring duplicate relation " + f"relation_id={relation.id} " + f"from_id={relation.from_id} " + f"to_name={relation.to_name}" ) # update search index @@ -586,7 +478,7 @@ async def scan_directory(self, directory: Path) -> ScanResult: """ start_time = time.time() - logger.debug("Scanning directory", directory=str(directory)) + logger.debug(f"Scanning directory {directory}") result = ScanResult() for root, dirnames, filenames in os.walk(str(directory)): @@ -608,10 +500,10 @@ async def scan_directory(self, directory: Path) -> ScanResult: duration_ms = int((time.time() - start_time) * 1000) logger.debug( - "Directory scan completed", - directory=str(directory), - files_found=len(result.files), - duration_ms=duration_ms, + f"{directory} scan completed " + f"directory={str(directory)} " + f"files_found={len(result.files)} " + f"duration_ms={duration_ms}" ) return result diff --git a/src/basic_memory/sync/watch_service.py b/src/basic_memory/sync/watch_service.py index 31bf099be..238e2b837 100644 --- a/src/basic_memory/sync/watch_service.py +++ b/src/basic_memory/sync/watch_service.py @@ -70,22 +70,30 @@ def record_error(self, error: str): class WatchService: - def __init__(self, sync_service: SyncService, file_service: FileService, config: ProjectConfig): + def __init__( + self, + sync_service: SyncService, + file_service: FileService, + config: ProjectConfig, + quiet: bool = False, + ): self.sync_service = sync_service self.file_service = file_service self.config = config self.state = WatchServiceState() self.status_path = config.home / ".basic-memory" / WATCH_STATUS_JSON self.status_path.parent.mkdir(parents=True, exist_ok=True) - self.console = Console() + + # quiet mode for mcp so it doesn't mess up stdout + self.console = Console(quiet=quiet) async def run(self): # pragma: no cover """Watch for file changes and sync them""" logger.info( "Watch service started", - directory=str(self.config.home), - debounce_ms=self.config.sync_delay, - pid=os.getpid(), + f"directory={str(self.config.home)}", + f"debounce_ms={self.config.sync_delay}", + f"pid={os.getpid()}", ) self.state.running = True @@ -111,8 +119,8 @@ async def run(self): # pragma: no cover finally: logger.info( "Watch service stopped", - directory=str(self.config.home), - runtime_seconds=int((datetime.now() - self.state.start_time).total_seconds()), + f"directory={str(self.config.home)}", + f"runtime_seconds={int((datetime.now() - self.state.start_time).total_seconds())}", ) self.state.running = False @@ -154,7 +162,7 @@ async def handle_changes(self, directory: Path, changes: Set[FileChange]): start_time = time.time() - logger.info("Processing file changes", change_count=len(changes), directory=str(directory)) + logger.info(f"Processing file changes, change_count={len(changes)}, directory={directory}") # Group changes by type adds: List[str] = [] @@ -177,9 +185,17 @@ async def handle_changes(self, directory: Path, changes: Set[FileChange]): modifies.append(relative_path) logger.debug( - "Grouped file changes", added=len(adds), deleted=len(deletes), modified=len(modifies) + f"Grouped file changes, added={len(adds)}, deleted={len(deletes)}, modified={len(modifies)}" ) + # because of our atomic writes on updates, an add may be an existing file + for added_path in adds: # pragma: no cover TODO add test + entity = await self.sync_service.entity_repository.get_by_file_path(added_path) + if entity is not None: + logger.debug(f"Existing file will be processed as modified, path={added_path}") + adds.remove(added_path) + modifies.append(added_path) + # Track processed files to avoid duplicates processed: Set[str] = set() @@ -223,15 +239,16 @@ async def handle_changes(self, directory: Path, changes: Set[FileChange]): status="success", ) self.console.print(f"[blue]→[/blue] {deleted_path} → {added_path}") + logger.info(f"move: {deleted_path} -> {added_path}") processed.add(added_path) processed.add(deleted_path) break except Exception as e: # pragma: no cover logger.warning( "Error checking for move", - old_path=deleted_path, - new_path=added_path, - error=str(e), + f"old_path={deleted_path}", + f"new_path={added_path}", + f"error={str(e)}", ) # Handle remaining changes - group them by type for concise output @@ -247,6 +264,7 @@ async def handle_changes(self, directory: Path, changes: Set[FileChange]): await self.sync_service.handle_delete(path) self.state.add_event(path=path, action="deleted", status="success") self.console.print(f"[red]✕[/red] {path}") + logger.info(f"deleted: {path}") processed.add(path) delete_count += 1 @@ -257,28 +275,27 @@ async def handle_changes(self, directory: Path, changes: Set[FileChange]): full_path = directory / path if not full_path.exists() or full_path.is_dir(): logger.debug( - "Skipping non-existent or directory path", path=path + f"Skipping non-existent or directory path, path={path}" ) # pragma: no cover processed.add(path) # pragma: no cover continue # pragma: no cover - logger.debug("Processing new file", path=path) + logger.debug(f"Processing new file, path={path}") entity, checksum = await self.sync_service.sync_file(path, new=True) if checksum: self.state.add_event( path=path, action="new", status="success", checksum=checksum ) self.console.print(f"[green]✓[/green] {path}") - logger.debug( - "Added file processed", - path=path, - entity_id=entity.id if entity else None, - checksum=checksum, + logger.info( + "new file processed", + f"path={path}", + f"checksum={checksum}", ) processed.add(path) add_count += 1 else: # pragma: no cover - logger.warning("Error syncing new file", path=path) # pragma: no cover + logger.warning(f"Error syncing new file, path={path}") # pragma: no cover self.console.print( f"[orange]?[/orange] Error syncing: {path}" ) # pragma: no cover @@ -296,7 +313,7 @@ async def handle_changes(self, directory: Path, changes: Set[FileChange]): processed.add(path) continue - logger.debug("Processing modified file", path=path) + logger.debug(f"Processing modified file: path={path}") entity, checksum = await self.sync_service.sync_file(path, new=False) self.state.add_event( path=path, action="modified", status="success", checksum=checksum @@ -311,17 +328,18 @@ async def handle_changes(self, directory: Path, changes: Set[FileChange]): f"[yellow]...[/yellow] Repeated changes to {path}" ) # pragma: no cover else: - # New file being modified + # haven't processed this file self.console.print(f"[yellow]✎[/yellow] {path}") + logger.info(f"modified: {path}") last_modified_path = path repeat_count = 0 modify_count += 1 logger.debug( - "Modified file processed", - path=path, - entity_id=entity.id if entity else None, - checksum=checksum, + "Modified file processed, " + f"path={path} " + f"entity_id={entity.id if entity else None} " + f"checksum={checksum}", ) processed.add(path) @@ -339,16 +357,17 @@ async def handle_changes(self, directory: Path, changes: Set[FileChange]): if changes: self.console.print(f"{', '.join(changes)}", style="dim") # pyright: ignore + logger.info(f"changes: {len(changes)}") duration_ms = int((time.time() - start_time) * 1000) self.state.last_scan = datetime.now() self.state.synced_files += len(processed) logger.info( - "File change processing completed", - processed_files=len(processed), - total_synced_files=self.state.synced_files, - duration_ms=duration_ms, + "File change processing completed, " + f"processed_files={len(processed)}, " + f"total_synced_files={self.state.synced_files}, " + f"duration_ms={duration_ms}" ) await self.write_status() diff --git a/tests/cli/test_cli_tools.py b/tests/cli/test_cli_tools.py index e57a03343..9e699b4d7 100644 --- a/tests/cli/test_cli_tools.py +++ b/tests/cli/test_cli_tools.py @@ -3,6 +3,8 @@ These tests use real MCP tools with the test environment instead of mocks. """ +# Import for testing + import io from datetime import datetime, timedelta import json @@ -410,3 +412,29 @@ def test_continue_conversation_no_results(cli_env): # Check result contains expected content for no results assert "Continuing conversation on: NonexistentTopic" in result.stdout assert "The supplied query did not return any information" in result.stdout + + +@patch("basic_memory.services.initialization.initialize_database") +def test_ensure_migrations_functionality(mock_initialize_database, test_config, monkeypatch): + """Test the database initialization functionality.""" + from basic_memory.services.initialization import ensure_initialization + + # Call the function + ensure_initialization(test_config) + + # The underlying asyncio.run should call our mocked function + mock_initialize_database.assert_called_once() + + +@patch("basic_memory.services.initialization.initialize_database") +def test_ensure_migrations_handles_errors(mock_initialize_database, test_config, monkeypatch): + """Test that initialization handles errors gracefully.""" + from basic_memory.services.initialization import ensure_initialization + + # Configure mock to raise an exception + mock_initialize_database.side_effect = Exception("Test error") + + # Call the function - should not raise exception + ensure_initialization(test_config) + + # We're just making sure it doesn't crash by calling it diff --git a/tests/conftest.py b/tests/conftest.py index f0668038c..7f589a72f 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -351,5 +351,5 @@ def test_files(test_config) -> dict[str, Path]: @pytest_asyncio.fixture async def synced_files(sync_service, test_config, test_files): # Initial sync - should create forward reference - await sync_service.sync(test_config.home, show_progress=False) + await sync_service.sync(test_config.home) return test_files diff --git a/tests/services/test_initialization.py b/tests/services/test_initialization.py new file mode 100644 index 000000000..de6066558 --- /dev/null +++ b/tests/services/test_initialization.py @@ -0,0 +1,49 @@ +"""Tests for the initialization service.""" + +from unittest.mock import patch + +import pytest + +from basic_memory.services.initialization import ( + ensure_initialization, + initialize_app, + initialize_database, +) + + +@pytest.mark.asyncio +@patch("basic_memory.services.initialization.db.run_migrations") +async def test_initialize_database(mock_run_migrations, test_config): + """Test initializing the database.""" + await initialize_database(test_config) + mock_run_migrations.assert_called_once_with(test_config) + + +@pytest.mark.asyncio +@patch("basic_memory.services.initialization.db.run_migrations") +async def test_initialize_database_error(mock_run_migrations, test_config): + """Test handling errors during database initialization.""" + mock_run_migrations.side_effect = Exception("Test error") + await initialize_database(test_config) + mock_run_migrations.assert_called_once_with(test_config) + + +@pytest.mark.asyncio +@patch("basic_memory.services.initialization.initialize_database") +@patch("basic_memory.services.initialization.initialize_file_sync") +async def test_initialize_app(mock_initialize_file_sync, mock_initialize_database, test_config): + """Test app initialization.""" + mock_initialize_file_sync.return_value = "task" + + result = await initialize_app(test_config) + + mock_initialize_database.assert_called_once_with(test_config) + mock_initialize_file_sync.assert_called_once_with(test_config) + assert result == "task" + + +@patch("basic_memory.services.initialization.asyncio.run") +def test_ensure_initialization(mock_run, test_config): + """Test synchronous initialization wrapper.""" + ensure_initialization(test_config) + mock_run.assert_called_once() diff --git a/tests/sync/test_sync_service.py b/tests/sync/test_sync_service.py index dd2463a2e..43fbf7aed 100644 --- a/tests/sync/test_sync_service.py +++ b/tests/sync/test_sync_service.py @@ -46,7 +46,7 @@ async def test_forward_reference_resolution( await create_test_file(project_dir / "source.md", source_content) # Initial sync - should create forward reference - await sync_service.sync(test_config.home, show_progress=False) + await sync_service.sync(test_config.home) # Verify forward reference source = await entity_service.get_by_permalink("source") @@ -65,7 +65,7 @@ async def test_forward_reference_resolution( await create_test_file(project_dir / "target_doc.md", target_content) # Sync again - should resolve the reference - await sync_service.sync(test_config.home, show_progress=False) + await sync_service.sync(test_config.home) # Verify reference is now resolved source = await entity_service.get_by_permalink("source") @@ -118,7 +118,7 @@ async def test_sync( await entity_service.repository.add(other) # Run sync - await sync_service.sync(test_config.home, show_progress=False) + await sync_service.sync(test_config.home) # Verify results entities = await entity_service.repository.find_all() @@ -148,7 +148,7 @@ async def test_sync_hidden_file( await create_test_file(project_dir / "concept/.hidden.md", "hidden") # Run sync - await sync_service.sync(test_config.home, show_progress=False) + await sync_service.sync(test_config.home) # Verify results entities = await entity_service.repository.find_all() @@ -182,7 +182,7 @@ async def test_sync_entity_with_nonexistent_relations( await create_test_file(project_dir / "concept/depends_on_future.md", content) # Sync - await sync_service.sync(test_config.home, show_progress=False) + await sync_service.sync(test_config.home) # Verify entity created but no relations entity = await sync_service.entity_service.repository.get_by_permalink( @@ -238,7 +238,7 @@ async def test_sync_entity_circular_relations( await create_test_file(project_dir / "concept/entity_b.md", content_b) # Sync - await sync_service.sync(test_config.home, show_progress=False) + await sync_service.sync(test_config.home) # Verify both entities and their relations entity_a = await sync_service.entity_service.repository.get_by_permalink("concept/entity-a") @@ -309,7 +309,7 @@ async def test_sync_entity_duplicate_relations( await create_test_file(project_dir / "concept/duplicate_relations.md", content) # Sync - await sync_service.sync(test_config.home, show_progress=False) + await sync_service.sync(test_config.home) # Verify duplicates are handled entity = await sync_service.entity_service.repository.get_by_permalink( @@ -351,7 +351,7 @@ async def test_sync_entity_with_random_categories( await create_test_file(project_dir / "concept/invalid_category.md", content) # Sync - await sync_service.sync(test_config.home, show_progress=False) + await sync_service.sync(test_config.home) # Verify observations entity = await sync_service.entity_service.repository.get_by_permalink( @@ -431,7 +431,7 @@ async def test_sync_entity_with_order_dependent_relations( await create_test_file(project_dir / f"concept/entity_{name}.md", content) # Sync - await sync_service.sync(test_config.home, show_progress=False) + await sync_service.sync(test_config.home) # Verify all relations are created correctly regardless of order entity_a = await sync_service.entity_service.repository.get_by_permalink("concept/entity-a") @@ -451,7 +451,7 @@ async def test_sync_entity_with_order_dependent_relations( @pytest.mark.asyncio async def test_sync_empty_directories(sync_service: SyncService, test_config: ProjectConfig): """Test syncing empty directories.""" - await sync_service.sync(test_config.home, show_progress=False) + await sync_service.sync(test_config.home) # Should not raise exceptions for empty dirs assert (test_config.home).exists() @@ -486,7 +486,7 @@ async def modify_file(): doc_path.write_text("Modified during sync") # Run sync and modification concurrently - await asyncio.gather(sync_service.sync(test_config.home, show_progress=False), modify_file()) + await asyncio.gather(sync_service.sync(test_config.home), modify_file()) # Verify final state doc = await sync_service.entity_service.repository.get_by_permalink("changing") @@ -494,7 +494,7 @@ async def modify_file(): # if we failed in the middle of a sync, the next one should fix it. if doc.checksum is None: - await sync_service.sync(test_config.home, show_progress=False) + await sync_service.sync(test_config.home) doc = await sync_service.entity_service.repository.get_by_permalink("changing") assert doc.checksum is not None @@ -530,7 +530,7 @@ async def test_permalink_formatting( await create_test_file(test_config.home / filename, content) # Run sync - await sync_service.sync(test_config.home, show_progress=False) + await sync_service.sync(test_config.home) # Verify permalinks entities = await entity_service.repository.find_all() @@ -601,7 +601,7 @@ async def test_sync_preserves_timestamps( await create_test_file(file_path, file_dates_content) # Run sync - await sync_service.sync(test_config.home, show_progress=False) + await sync_service.sync(test_config.home) # Check explicit frontmatter dates explicit_entity = await entity_service.get_by_permalink("explicit-dates") @@ -641,7 +641,7 @@ async def test_file_move_updates_search_index( await create_test_file(old_path, content) # Initial sync - await sync_service.sync(test_config.home, show_progress=False) + await sync_service.sync(test_config.home) # Move the file new_path = project_dir / "new" / "moved_file.md" @@ -649,7 +649,7 @@ async def test_file_move_updates_search_index( old_path.rename(new_path) # Sync again - await sync_service.sync(test_config.home, show_progress=False) + await sync_service.sync(test_config.home) # Check search index has updated path results = await search_service.search(SearchQuery(text="Content for move test")) @@ -691,7 +691,7 @@ async def test_sync_null_checksum_cleanup( await create_test_file(test_config.home / "concept/incomplete.md", content) # Run sync - await sync_service.sync(test_config.home, show_progress=False) + await sync_service.sync(test_config.home) # Verify entity was properly synced updated = await entity_service.get_by_permalink("concept/incomplete") @@ -720,7 +720,7 @@ async def test_sync_permalink_resolved( await create_test_file(old_path, content) # Initial sync - await sync_service.sync(test_config.home, show_progress=False) + await sync_service.sync(test_config.home) # Move the file new_path = project_dir / "new" / "moved_file.md" @@ -728,7 +728,7 @@ async def test_sync_permalink_resolved( old_path.rename(new_path) # Sync again - await sync_service.sync(test_config.home, show_progress=False) + await sync_service.sync(test_config.home) file_content, _ = await file_service.read_file(new_path) assert "permalink: old/test-move" in file_content @@ -747,7 +747,7 @@ async def test_sync_permalink_resolved( await create_test_file(old_path, content) # Sync new file - await sync_service.sync(test_config.home, show_progress=False) + await sync_service.sync(test_config.home) # assert permalink is unique file_content, _ = await file_service.read_file(old_path) @@ -789,7 +789,7 @@ async def test_sync_permalink_resolved_on_update( ) # Run sync - await sync_service.sync(test_config.home, show_progress=False) + await sync_service.sync(test_config.home) # Check permalinks file_one_content, _ = await file_service.read_file(one_file) @@ -812,7 +812,7 @@ async def test_sync_permalink_resolved_on_update( two_file.write_text(updated_content) # Run sync - await sync_service.sync(test_config.home, show_progress=False) + await sync_service.sync(test_config.home) # Check permalinks file_two_content, _ = await file_service.read_file(two_file) @@ -833,7 +833,7 @@ async def test_sync_permalink_resolved_on_update( await create_test_file(new_file, new_content) # Run another time - await sync_service.sync(test_config.home, show_progress=False) + await sync_service.sync(test_config.home) # Should have deduplicated permalink new_file_content, _ = await file_service.read_file(new_file) @@ -853,7 +853,7 @@ async def test_sync_permalink_not_created_if_no_frontmatter( await create_test_file(file) # Run sync - await sync_service.sync(test_config.home, show_progress=False) + await sync_service.sync(test_config.home) # Check permalink not created file_content, _ = await file_service.read_file(file) @@ -900,7 +900,7 @@ async def test_sync_permalink_updated_on_move( await create_test_file(old_path, content) # Initial sync - await sync_service.sync(test_config.home, show_progress=False) + await sync_service.sync(test_config.home) # verify permalink old_content, _ = await file_service.read_file(old_path) @@ -912,7 +912,7 @@ async def test_sync_permalink_updated_on_move( old_path.rename(new_path) # Sync again - await sync_service.sync(test_config.home, show_progress=False) + await sync_service.sync(test_config.home) file_content, _ = await file_service.read_file(new_path) assert "permalink: new/moved-file" in file_content @@ -921,7 +921,7 @@ async def test_sync_permalink_updated_on_move( @pytest.mark.asyncio async def test_sync_non_markdown_files(sync_service, test_config, test_files): """Test syncing non-markdown files.""" - report = await sync_service.sync(test_config.home, show_progress=False) + report = await sync_service.sync(test_config.home) assert report.total == 2 # Check files were detected @@ -944,7 +944,7 @@ async def test_sync_non_markdown_files_modified( sync_service, test_config, test_files, file_service ): """Test syncing non-markdown files.""" - report = await sync_service.sync(test_config.home, show_progress=False) + report = await sync_service.sync(test_config.home) assert report.total == 2 # Check files were detected @@ -954,7 +954,7 @@ async def test_sync_non_markdown_files_modified( test_files["pdf"].write_text("New content") test_files["image"].write_text("New content") - report = await sync_service.sync(test_config.home, show_progress=False) + report = await sync_service.sync(test_config.home) assert len(report.modified) == 2 pdf_file_content, pdf_checksum = await file_service.read_file(test_files["pdf"].name) @@ -972,7 +972,7 @@ async def test_sync_non_markdown_files_modified( @pytest.mark.asyncio async def test_sync_non_markdown_files_move(sync_service, test_config, test_files): """Test syncing non-markdown files updates permalink""" - report = await sync_service.sync(test_config.home, show_progress=False) + report = await sync_service.sync(test_config.home) assert report.total == 2 # Check files were detected @@ -980,7 +980,7 @@ async def test_sync_non_markdown_files_move(sync_service, test_config, test_file assert test_files["image"].name in [f for f in report.new] test_files["pdf"].rename(test_config.home / "moved_pdf.pdf") - report2 = await sync_service.sync(test_config.home, show_progress=False) + report2 = await sync_service.sync(test_config.home) assert len(report2.moves) == 1 # Verify entity is updated @@ -992,7 +992,7 @@ async def test_sync_non_markdown_files_move(sync_service, test_config, test_file @pytest.mark.asyncio async def test_sync_non_markdown_files_deleted(sync_service, test_config, test_files): """Test syncing non-markdown files updates permalink""" - report = await sync_service.sync(test_config.home, show_progress=False) + report = await sync_service.sync(test_config.home) assert report.total == 2 # Check files were detected @@ -1000,7 +1000,7 @@ async def test_sync_non_markdown_files_deleted(sync_service, test_config, test_f assert test_files["image"].name in [f for f in report.new] test_files["pdf"].unlink() - report2 = await sync_service.sync(test_config.home, show_progress=False) + report2 = await sync_service.sync(test_config.home) assert len(report2.deleted) == 1 # Verify entity is deleted @@ -1019,14 +1019,14 @@ async def test_sync_non_markdown_files_move_with_delete( await create_test_file(test_config.home / "other/doc-1.pdf", "content2") # Initial sync - await sync_service.sync(test_config.home, show_progress=False) + await sync_service.sync(test_config.home) # First move/delete the original file to make way for the move (test_config.home / "doc.pdf").unlink() (test_config.home / "other/doc-1.pdf").rename(test_config.home / "doc.pdf") # Sync again - await sync_service.sync(test_config.home, show_progress=False) + await sync_service.sync(test_config.home) # Verify the changes moved_entity = await sync_service.entity_repository.get_by_file_path("doc.pdf") @@ -1058,7 +1058,7 @@ async def test_sync_relation_to_non_markdown_file( await create_test_file(note_file, content) # Run sync - await sync_service.sync(test_config.home, show_progress=False) + await sync_service.sync(test_config.home) # Check permalinks file_one_content, _ = await file_service.read_file(note_file) diff --git a/tests/sync/test_sync_wikilink_issue.py b/tests/sync/test_sync_wikilink_issue.py index 13f39d4d8..a0f28e3d5 100644 --- a/tests/sync/test_sync_wikilink_issue.py +++ b/tests/sync/test_sync_wikilink_issue.py @@ -1,8 +1,9 @@ """Test for issue #72 - notes with wikilinks staying in modified status.""" -import pytest from pathlib import Path +import pytest + from basic_memory.sync.sync_service import SyncService @@ -30,12 +31,12 @@ async def test_wikilink_modified_status_issue(sync_service: SyncService, test_co await create_test_file(test_file_path, content) # Initial sync - report1 = await sync_service.sync(test_config.home, show_progress=False) + report1 = await sync_service.sync(test_config.home) assert "test_wikilink.md" in report1.new assert "test_wikilink.md" not in report1.modified # Sync again without changing the file - should not be modified - report2 = await sync_service.sync(test_config.home, show_progress=False) + report2 = await sync_service.sync(test_config.home) assert "test_wikilink.md" not in report2.new assert "test_wikilink.md" not in report2.modified @@ -52,11 +53,11 @@ async def test_wikilink_modified_status_issue(sync_service: SyncService, test_co await create_test_file(target_file_path, target_content) # Sync again after adding target file - report3 = await sync_service.sync(test_config.home, show_progress=False) + report3 = await sync_service.sync(test_config.home) assert "another_file.md" in report3.new assert "test_wikilink.md" not in report3.modified # Sync one more time - both files should now be stable - report4 = await sync_service.sync(test_config.home, show_progress=False) + report4 = await sync_service.sync(test_config.home) assert "test_wikilink.md" not in report4.modified assert "another_file.md" not in report4.modified diff --git a/tests/sync/test_watch_service.py b/tests/sync/test_watch_service.py index 9e82a938a..74a97fa40 100644 --- a/tests/sync/test_watch_service.py +++ b/tests/sync/test_watch_service.py @@ -125,7 +125,7 @@ async def test_handle_file_modify(watch_service, test_config): await create_test_file(test_file, initial_content) # Initial sync - await watch_service.sync_service.sync(project_dir, show_progress=False) + await watch_service.sync_service.sync(project_dir) # Modify file modified_content = """--- @@ -169,7 +169,7 @@ async def test_handle_file_delete(watch_service, test_config): await create_test_file(test_file, content) # Initial sync - await watch_service.sync_service.sync(project_dir, show_progress=False) + await watch_service.sync_service.sync(project_dir) # Delete file test_file.unlink() @@ -207,7 +207,7 @@ async def test_handle_file_move(watch_service, test_config): await create_test_file(old_path, content) # Initial sync - await watch_service.sync_service.sync(project_dir, show_progress=False) + await watch_service.sync_service.sync(project_dir) initial_entity = await watch_service.sync_service.entity_repository.get_by_file_path( "old/test_move.md" ) @@ -306,7 +306,7 @@ async def test_handle_rapid_move(watch_service, test_config): Test content for rapid moves """ await create_test_file(original_path, content) - await watch_service.sync_service.sync(project_dir, show_progress=False) + await watch_service.sync_service.sync(project_dir) # Perform rapid moves temp_path = project_dir / "temp.md" @@ -393,7 +393,7 @@ async def test_handle_directory_rename(watch_service, test_config): await create_test_file(file_in_dir, content) # Initial sync to add the file to the database - await watch_service.sync_service.sync(project_dir, show_progress=False) + await watch_service.sync_service.sync(project_dir) # Rename the directory new_dir_path = project_dir / "new_dir" diff --git a/tests/utils/test_permalink_formatting.py b/tests/utils/test_permalink_formatting.py index 11a68cb61..71d471ed5 100644 --- a/tests/utils/test_permalink_formatting.py +++ b/tests/utils/test_permalink_formatting.py @@ -1,8 +1,9 @@ """Test permalink formatting during sync.""" -import pytest from pathlib import Path +import pytest + from basic_memory.config import ProjectConfig from basic_memory.services import EntityService from basic_memory.sync.sync_service import SyncService @@ -57,7 +58,7 @@ async def test_permalink_formatting( await create_test_file(project_dir / filename, content) # Run sync - await sync_service.sync(test_config.home, show_progress=False) + await sync_service.sync(test_config.home) # Verify permalinks for filename, expected_permalink in test_cases: diff --git a/tests/utils/test_utf8_handling.py b/tests/utils/test_utf8_handling.py index 9efe0d42a..c3aac42b9 100644 --- a/tests/utils/test_utf8_handling.py +++ b/tests/utils/test_utf8_handling.py @@ -3,7 +3,7 @@ import pytest from basic_memory import file_utils -from basic_memory.file_utils import write_file_atomic, compute_checksum +from basic_memory.file_utils import compute_checksum, write_file_atomic @pytest.mark.asyncio @@ -130,7 +130,7 @@ async def test_utf8_in_entity_sync(sync_service, test_config): test_file.write_text(utf8_content, encoding="utf-8") # Sync the file - await sync_service.sync(test_config.home, show_progress=False) + await sync_service.sync(test_config.home) # Verify entity was created entity = await sync_service.entity_service.get_by_permalink("i18n/utf8-document") diff --git a/uv.lock b/uv.lock index 2838fa3c7..82e5f52eb 100644 --- a/uv.lock +++ b/uv.lock @@ -71,7 +71,7 @@ wheels = [ [[package]] name = "basic-memory" -version = "0.11.0" +version = "0.12.0" source = { editable = "." } dependencies = [ { name = "aiosqlite" },