diff --git a/docs/cloud-cli.md b/docs/cloud-cli.md index 0b0534b02..2bfdc9f0c 100644 --- a/docs/cloud-cli.md +++ b/docs/cloud-cli.md @@ -7,6 +7,7 @@ The Basic Memory Cloud CLI provides seamless integration between local and cloud The cloud CLI enables you to: - **Toggle cloud mode** with `bm cloud login` / `bm cloud logout` - **Use regular commands in cloud mode**: `bm project`, `bm sync`, `bm tool` all work with cloud +- **Upload local files** directly to cloud projects via `bm cloud upload` - **Bidirectional sync** with rclone bisync (recommended for most users) - **Direct file access** via rclone mount (alternative workflow) - **Integrity verification** with `bm cloud check` @@ -160,6 +161,69 @@ bm project list This Dropbox-like workflow means you don't need to manually coordinate projects between local and cloud. +### Uploading Local Files + +You can directly upload local files or directories to cloud projects using `bm cloud upload`. This is useful for: +- Migrating existing local projects to the cloud +- Quickly uploading specific files or directories +- One-time bulk uploads without setting up sync + +**Basic Usage:** + +```bash +# Upload a directory to existing project +bm cloud upload ~/my-notes --project research + +# Upload a single file +bm cloud upload important-doc.md --project research +``` + +**Create Project On-the-Fly:** + +If the target project doesn't exist yet, use `--create-project`: + +```bash +# Upload and create project in one step +bm cloud upload ~/local-project --project new-research --create-project +``` + +**Skip Automatic Sync:** + +By default, the command syncs the project after upload to index the files. To skip this: + +```bash +# Upload without triggering sync +bm cloud upload ~/bulk-data --project archives --no-sync +``` + +**File Filtering:** + +The upload command respects `.bmignore` and `.gitignore` patterns, automatically excluding: +- Hidden files (`.git`, `.DS_Store`) +- Build artifacts (`node_modules`, `__pycache__`) +- Database files (`*.db`, `*.db-wal`) +- Environment files (`.env`) + +To customize what gets uploaded, edit `~/.basic-memory/.bmignore`. + +**Complete Example:** + +```bash +# 1. Login to cloud +bm cloud login + +# 2. Upload local project (creates project if needed) +bm cloud upload ~/Documents/research-notes --project research --create-project + +# 3. Verify upload +bm project list +``` + +**Notes:** +- Files are uploaded directly via WebDAV (no sync setup required) +- Uploads are immediate and don't require bisync or mount +- Use this for migration or one-time uploads; use `bm sync` for ongoing synchronization + ## File Synchronization ### The `bm sync` Command (Cloud Mode Aware) @@ -628,6 +692,15 @@ bm cloud check # Full integrity check bm cloud check --one-way # Faster one-way check ``` +### File Upload + +```bash +# Upload files/directories to cloud projects +bm cloud upload --project # Upload to existing project +bm cloud upload -p --create-project # Upload and create project +bm cloud upload -p --no-sync # Upload without syncing +``` + ### Direct File Access (Mount) ```bash diff --git a/pyproject.toml b/pyproject.toml index a90869808..100b298e2 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -34,7 +34,7 @@ dependencies = [ "pyjwt>=2.10.1", "python-dotenv>=1.1.0", "pytest-aio>=1.9.0", - "aiofiles>=24.1.0", # Async file I/O + "aiofiles>=24.1.0", # Async file I/O ] diff --git a/src/basic_memory/cli/commands/cloud/__init__.py b/src/basic_memory/cli/commands/cloud/__init__.py index 85b431990..1b6146e2b 100644 --- a/src/basic_memory/cli/commands/cloud/__init__.py +++ b/src/basic_memory/cli/commands/cloud/__init__.py @@ -2,4 +2,5 @@ # Import all commands to register them with typer from basic_memory.cli.commands.cloud.core_commands import * # noqa: F401,F403 -from basic_memory.cli.commands.cloud.api_client import get_authenticated_headers # noqa: F401 +from basic_memory.cli.commands.cloud.api_client import get_authenticated_headers, get_cloud_config # noqa: F401 +from basic_memory.cli.commands.cloud.upload_command import * # noqa: F401,F403 diff --git a/src/basic_memory/cli/commands/cloud/bisync_commands.py b/src/basic_memory/cli/commands/cloud/bisync_commands.py index 4fe4d26ea..25aa1a6de 100644 --- a/src/basic_memory/cli/commands/cloud/bisync_commands.py +++ b/src/basic_memory/cli/commands/cloud/bisync_commands.py @@ -12,6 +12,10 @@ from rich.table import Table from basic_memory.cli.commands.cloud.api_client import CloudAPIError, make_api_request +from basic_memory.cli.commands.cloud.cloud_utils import ( + create_cloud_project, + fetch_cloud_projects, +) from basic_memory.cli.commands.cloud.rclone_config import ( add_tenant_to_rclone_config, ) @@ -21,11 +25,7 @@ from basic_memory.schemas.cloud import ( TenantMountInfo, MountCredentials, - CloudProjectList, - CloudProjectCreateRequest, - CloudProjectCreateResponse, ) -from basic_memory.utils import generate_permalink console = Console() @@ -110,24 +110,6 @@ async def generate_mount_credentials(tenant_id: str) -> MountCredentials: raise BisyncError(f"Failed to generate credentials: {e}") from e -async def fetch_cloud_projects() -> CloudProjectList: - """Fetch list of projects from cloud API. - - Returns: - CloudProjectList with projects from cloud - """ - try: - config_manager = ConfigManager() - config = config_manager.config - host_url = config.cloud_host.rstrip("/") - - response = await make_api_request(method="GET", url=f"{host_url}/proxy/projects/projects") - - return CloudProjectList.model_validate(response.json()) - except Exception as e: - raise BisyncError(f"Failed to fetch cloud projects: {e}") from e - - def scan_local_directories(sync_dir: Path) -> list[str]: """Scan local sync directory for project folders. @@ -148,41 +130,6 @@ def scan_local_directories(sync_dir: Path) -> list[str]: return directories -async def create_cloud_project(project_name: str) -> CloudProjectCreateResponse: - """Create a new project on cloud. - - Args: - project_name: Name of project to create - - Returns: - CloudProjectCreateResponse with project details from API - """ - try: - config_manager = ConfigManager() - config = config_manager.config - host_url = config.cloud_host.rstrip("/") - - # Use generate_permalink to ensure consistent naming - project_path = generate_permalink(project_name) - - project_data = CloudProjectCreateRequest( - name=project_name, - path=project_path, - set_default=False, - ) - - response = await make_api_request( - method="POST", - url=f"{host_url}/proxy/projects/projects", - headers={"Content-Type": "application/json"}, - json_data=project_data.model_dump(), - ) - - return CloudProjectCreateResponse.model_validate(response.json()) - except Exception as e: - raise BisyncError(f"Failed to create cloud project '{project_name}': {e}") from e - - def get_bisync_state_path(tenant_id: str) -> Path: """Get path to bisync state directory.""" return Path.home() / ".basic-memory" / "bisync-state" / tenant_id diff --git a/src/basic_memory/cli/commands/cloud/cloud_utils.py b/src/basic_memory/cli/commands/cloud/cloud_utils.py new file mode 100644 index 000000000..294d2613f --- /dev/null +++ b/src/basic_memory/cli/commands/cloud/cloud_utils.py @@ -0,0 +1,100 @@ +"""Shared utilities for cloud operations.""" + +from basic_memory.cli.commands.cloud.api_client import make_api_request +from basic_memory.config import ConfigManager +from basic_memory.schemas.cloud import ( + CloudProjectList, + CloudProjectCreateRequest, + CloudProjectCreateResponse, +) +from basic_memory.utils import generate_permalink + + +class CloudUtilsError(Exception): + """Exception raised for cloud utility errors.""" + + pass + + +async def fetch_cloud_projects() -> CloudProjectList: + """Fetch list of projects from cloud API. + + Returns: + CloudProjectList with projects from cloud + """ + try: + config_manager = ConfigManager() + config = config_manager.config + host_url = config.cloud_host.rstrip("/") + + response = await make_api_request(method="GET", url=f"{host_url}/proxy/projects/projects") + + return CloudProjectList.model_validate(response.json()) + except Exception as e: + raise CloudUtilsError(f"Failed to fetch cloud projects: {e}") from e + + +async def create_cloud_project(project_name: str) -> CloudProjectCreateResponse: + """Create a new project on cloud. + + Args: + project_name: Name of project to create + + Returns: + CloudProjectCreateResponse with project details from API + """ + try: + config_manager = ConfigManager() + config = config_manager.config + host_url = config.cloud_host.rstrip("/") + + # Use generate_permalink to ensure consistent naming + project_path = generate_permalink(project_name) + + project_data = CloudProjectCreateRequest( + name=project_name, + path=project_path, + set_default=False, + ) + + response = await make_api_request( + method="POST", + url=f"{host_url}/proxy/projects/projects", + headers={"Content-Type": "application/json"}, + json_data=project_data.model_dump(), + ) + + return CloudProjectCreateResponse.model_validate(response.json()) + except Exception as e: + raise CloudUtilsError(f"Failed to create cloud project '{project_name}': {e}") from e + + +async def sync_project(project_name: str) -> None: + """Trigger sync for a specific project on cloud. + + Args: + project_name: Name of project to sync + """ + try: + from basic_memory.cli.commands.command_utils import run_sync + + await run_sync(project=project_name) + except Exception as e: + raise CloudUtilsError(f"Failed to sync project '{project_name}': {e}") from e + + +async def project_exists(project_name: str) -> bool: + """Check if a project exists on cloud. + + Args: + project_name: Name of project to check + + Returns: + True if project exists, False otherwise + """ + try: + projects = await fetch_cloud_projects() + project_names = {p.name for p in projects.projects} + return project_name in project_names + except Exception: + return False diff --git a/src/basic_memory/cli/commands/cloud/upload.py b/src/basic_memory/cli/commands/cloud/upload.py new file mode 100644 index 000000000..87cc6d6fa --- /dev/null +++ b/src/basic_memory/cli/commands/cloud/upload.py @@ -0,0 +1,128 @@ +"""WebDAV upload functionality for basic-memory projects.""" + +import os +from pathlib import Path + +import aiofiles +import httpx + +from basic_memory.ignore_utils import load_gitignore_patterns, should_ignore_path +from basic_memory.mcp.async_client import get_client +from basic_memory.mcp.tools.utils import call_put + + +async def upload_path(local_path: Path, project_name: str) -> bool: + """ + Upload a file or directory to cloud project via WebDAV. + + Args: + local_path: Path to local file or directory + project_name: Name of cloud project (destination) + + Returns: + True if upload succeeded, False otherwise + """ + try: + # Resolve path + local_path = local_path.resolve() + + # Check if path exists + if not local_path.exists(): + print(f"Error: Path does not exist: {local_path}") + return False + + # Get files to upload + if local_path.is_file(): + files_to_upload = [(local_path, local_path.name)] + else: + files_to_upload = _get_files_to_upload(local_path) + + if not files_to_upload: + print("No files found to upload") + return True + + print(f"Found {len(files_to_upload)} file(s) to upload") + + # Upload files using httpx + total_bytes = 0 + + async with get_client() as client: + for i, (file_path, relative_path) in enumerate(files_to_upload, 1): + # Build remote path: /webdav/{project_name}/{relative_path} + remote_path = f"/webdav/{project_name}/{relative_path}" + print(f"Uploading {relative_path} ({i}/{len(files_to_upload)})") + + # Read file content asynchronously + async with aiofiles.open(file_path, "rb") as f: + content = await f.read() + + # Upload via HTTP PUT to WebDAV endpoint + response = await call_put(client, remote_path, content=content) + response.raise_for_status() + + total_bytes += file_path.stat().st_size + + # Format size based on magnitude + if total_bytes < 1024: + size_str = f"{total_bytes} bytes" + elif total_bytes < 1024 * 1024: + size_str = f"{total_bytes / 1024:.1f} KB" + else: + size_str = f"{total_bytes / (1024 * 1024):.1f} MB" + + print(f"✓ Upload complete: {len(files_to_upload)} file(s) ({size_str})") + return True + + except httpx.HTTPStatusError as e: + print(f"Upload failed: HTTP {e.response.status_code} - {e.response.text}") + return False + except Exception as e: + print(f"Upload failed: {e}") + return False + + +def _get_files_to_upload(directory: Path) -> list[tuple[Path, str]]: + """ + Get list of files to upload from directory. + + Uses .bmignore and .gitignore patterns for filtering. + + Args: + directory: Directory to scan + + Returns: + List of (absolute_path, relative_path) tuples + """ + files = [] + + # Load ignore patterns from .bmignore and .gitignore + ignore_patterns = load_gitignore_patterns(directory) + + # Walk through directory + for root, dirs, filenames in os.walk(directory): + root_path = Path(root) + + # Filter directories based on ignore patterns + filtered_dirs = [] + for d in dirs: + dir_path = root_path / d + if not should_ignore_path(dir_path, directory, ignore_patterns): + filtered_dirs.append(d) + dirs[:] = filtered_dirs + + # Process files + for filename in filenames: + file_path = root_path / filename + + # Check if file should be ignored + if should_ignore_path(file_path, directory, ignore_patterns): + continue + + # Calculate relative path for remote + rel_path = file_path.relative_to(directory) + # Use forward slashes for WebDAV paths + remote_path = str(rel_path).replace("\\", "/") + + files.append((file_path, remote_path)) + + return files diff --git a/src/basic_memory/cli/commands/cloud/upload_command.py b/src/basic_memory/cli/commands/cloud/upload_command.py new file mode 100644 index 000000000..3de072070 --- /dev/null +++ b/src/basic_memory/cli/commands/cloud/upload_command.py @@ -0,0 +1,94 @@ +"""Upload CLI commands for basic-memory projects.""" + +import asyncio +from pathlib import Path + +import typer +from rich.console import Console + +from basic_memory.cli.app import cloud_app +from basic_memory.cli.commands.cloud.cloud_utils import ( + create_cloud_project, + project_exists, + sync_project, +) +from basic_memory.cli.commands.cloud.upload import upload_path + +console = Console() + + +@cloud_app.command("upload") +def upload( + path: Path = typer.Argument( + ..., + help="Path to local file or directory to upload", + exists=True, + readable=True, + resolve_path=True, + ), + project: str = typer.Option( + ..., + "--project", + "-p", + help="Cloud project name (destination)", + ), + create_project: bool = typer.Option( + False, + "--create-project", + "-c", + help="Create project if it doesn't exist", + ), + sync: bool = typer.Option( + True, + "--sync/--no-sync", + help="Sync project after upload (default: true)", + ), +) -> None: + """Upload local files or directories to cloud project via WebDAV. + + Examples: + bm cloud upload ~/my-notes --project research + bm cloud upload notes.md --project research --create-project + bm cloud upload ~/docs --project work --no-sync + """ + + async def _upload(): + # Check if project exists + if not await project_exists(project): + if create_project: + console.print(f"[blue]Creating cloud project '{project}'...[/blue]") + try: + await create_cloud_project(project) + console.print(f"[green]✓ Created project '{project}'[/green]") + except Exception as e: + console.print(f"[red]Failed to create project: {e}[/red]") + raise typer.Exit(1) + else: + console.print( + f"[red]Project '{project}' does not exist.[/red]\n" + f"[yellow]Options:[/yellow]\n" + f" 1. Create it first: bm project add {project}\n" + f" 2. Use --create-project flag to create automatically" + ) + raise typer.Exit(1) + + # Perform upload + console.print(f"[blue]Uploading {path} to project '{project}'...[/blue]") + success = await upload_path(path, project) + if not success: + console.print("[red]Upload failed[/red]") + raise typer.Exit(1) + + console.print(f"[green]✅ Successfully uploaded to '{project}'[/green]") + + # Sync project if requested + if sync: + console.print(f"[blue]Syncing project '{project}'...[/blue]") + try: + await sync_project(project) + console.print("[green]✓ Project synced[/green]") + except Exception as e: + console.print(f"[yellow]Warning: Sync failed: {e}[/yellow]") + console.print("[dim]Files uploaded but may not be indexed yet[/dim]") + + asyncio.run(_upload()) diff --git a/src/basic_memory/mcp/async_client.py b/src/basic_memory/mcp/async_client.py index 77b7f48a3..499fca689 100644 --- a/src/basic_memory/mcp/async_client.py +++ b/src/basic_memory/mcp/async_client.py @@ -87,6 +87,7 @@ async def get_client() -> AsyncIterator[AsyncClient]: # Auth header set ONCE at client creation proxy_base_url = f"{config.cloud_host}/proxy" logger.info(f"Creating HTTP client for cloud proxy at: {proxy_base_url}") + print(f"proxy_base_url: {proxy_base_url}") async with AsyncClient( base_url=proxy_base_url, headers={"Authorization": f"Bearer {token}"}, diff --git a/tests/cli/test_bisync_commands.py b/tests/cli/test_bisync_commands.py index 30c5354ae..7334bd891 100644 --- a/tests/cli/test_bisync_commands.py +++ b/tests/cli/test_bisync_commands.py @@ -404,11 +404,9 @@ class TestCloudProjectAutoRegistration: @pytest.mark.asyncio async def test_extracts_directory_names_from_cloud_paths(self): """Test extraction of directory names from cloud project paths.""" - from basic_memory.cli.commands.cloud.bisync_commands import fetch_cloud_projects + from basic_memory.cli.commands.cloud.cloud_utils import fetch_cloud_projects - with patch( - "basic_memory.cli.commands.cloud.bisync_commands.make_api_request" - ) as mock_request: + with patch("basic_memory.cli.commands.cloud.cloud_utils.make_api_request") as mock_request: mock_response = Mock() mock_response.json.return_value = { "projects": [ @@ -435,13 +433,11 @@ async def test_extracts_directory_names_from_cloud_paths(self): @pytest.mark.asyncio async def test_create_cloud_project_generates_permalink(self): """Test that create_cloud_project generates correct permalink.""" - from basic_memory.cli.commands.cloud.bisync_commands import create_cloud_project + from basic_memory.cli.commands.cloud.cloud_utils import create_cloud_project - with patch( - "basic_memory.cli.commands.cloud.bisync_commands.make_api_request" - ) as mock_request: + with patch("basic_memory.cli.commands.cloud.cloud_utils.make_api_request") as mock_request: with patch( - "basic_memory.cli.commands.cloud.bisync_commands.generate_permalink" + "basic_memory.cli.commands.cloud.cloud_utils.generate_permalink" ) as mock_permalink: mock_permalink.return_value = "my-new-project" mock_response = Mock() diff --git a/tests/cli/test_cloud_utils.py b/tests/cli/test_cloud_utils.py new file mode 100644 index 000000000..3acef7298 --- /dev/null +++ b/tests/cli/test_cloud_utils.py @@ -0,0 +1,316 @@ +"""Tests for cloud_utils module.""" + +from unittest.mock import AsyncMock, Mock, patch + +import pytest + +from basic_memory.cli.commands.cloud.cloud_utils import ( + CloudUtilsError, + create_cloud_project, + fetch_cloud_projects, + project_exists, + sync_project, +) + + +class TestFetchCloudProjects: + """Tests for fetch_cloud_projects().""" + + @pytest.mark.asyncio + async def test_fetches_projects_successfully(self): + """Test successful fetch of cloud projects.""" + with patch("basic_memory.cli.commands.cloud.cloud_utils.make_api_request") as mock_request: + with patch("basic_memory.cli.commands.cloud.cloud_utils.ConfigManager") as mock_config: + # Setup config + mock_config.return_value.config.cloud_host = "https://example.com" + + # Mock API response + mock_response = Mock() + mock_response.json.return_value = { + "projects": [ + {"name": "Project 1", "path": "/app/data/project-1"}, + {"name": "Project 2", "path": "/app/data/project-2"}, + ] + } + mock_request.return_value = mock_response + + result = await fetch_cloud_projects() + + # Verify result + assert len(result.projects) == 2 + assert result.projects[0].name == "Project 1" + assert result.projects[1].name == "Project 2" + + # Verify API was called correctly + mock_request.assert_called_once_with( + method="GET", url="https://example.com/proxy/projects/projects" + ) + + @pytest.mark.asyncio + async def test_strips_trailing_slash_from_host(self): + """Test that trailing slash is stripped from cloud_host.""" + with patch("basic_memory.cli.commands.cloud.cloud_utils.make_api_request") as mock_request: + with patch("basic_memory.cli.commands.cloud.cloud_utils.ConfigManager") as mock_config: + # Setup config with trailing slash + mock_config.return_value.config.cloud_host = "https://example.com/" + + mock_response = Mock() + mock_response.json.return_value = {"projects": []} + mock_request.return_value = mock_response + + await fetch_cloud_projects() + + # Verify trailing slash was removed + call_args = mock_request.call_args + assert call_args[1]["url"] == "https://example.com/proxy/projects/projects" + + @pytest.mark.asyncio + async def test_raises_error_on_api_failure(self): + """Test that CloudUtilsError is raised on API failure.""" + with patch("basic_memory.cli.commands.cloud.cloud_utils.make_api_request") as mock_request: + with patch("basic_memory.cli.commands.cloud.cloud_utils.ConfigManager") as mock_config: + mock_config.return_value.config.cloud_host = "https://example.com" + mock_request.side_effect = Exception("API Error") + + with pytest.raises(CloudUtilsError) as exc_info: + await fetch_cloud_projects() + + assert "Failed to fetch cloud projects" in str(exc_info.value) + + @pytest.mark.asyncio + async def test_handles_empty_project_list(self): + """Test handling of empty project list.""" + with patch("basic_memory.cli.commands.cloud.cloud_utils.make_api_request") as mock_request: + with patch("basic_memory.cli.commands.cloud.cloud_utils.ConfigManager") as mock_config: + mock_config.return_value.config.cloud_host = "https://example.com" + + mock_response = Mock() + mock_response.json.return_value = {"projects": []} + mock_request.return_value = mock_response + + result = await fetch_cloud_projects() + + assert len(result.projects) == 0 + + +class TestCreateCloudProject: + """Tests for create_cloud_project().""" + + @pytest.mark.asyncio + async def test_creates_project_successfully(self): + """Test successful project creation.""" + with patch("basic_memory.cli.commands.cloud.cloud_utils.make_api_request") as mock_request: + with patch("basic_memory.cli.commands.cloud.cloud_utils.ConfigManager") as mock_config: + with patch( + "basic_memory.cli.commands.cloud.cloud_utils.generate_permalink" + ) as mock_permalink: + # Setup mocks + mock_config.return_value.config.cloud_host = "https://example.com" + mock_permalink.return_value = "my-project" + + mock_response = Mock() + mock_response.json.return_value = { + "name": "My Project", + "path": "my-project", + "message": "Created successfully", + } + mock_request.return_value = mock_response + + result = await create_cloud_project("My Project") + + # Verify result + assert result.name == "My Project" + assert result.path == "my-project" + assert result.message == "Created successfully" + + # Verify permalink was generated + mock_permalink.assert_called_once_with("My Project") + + # Verify API request + call_args = mock_request.call_args + assert call_args[1]["method"] == "POST" + assert call_args[1]["url"] == "https://example.com/proxy/projects/projects" + assert call_args[1]["headers"]["Content-Type"] == "application/json" + + json_data = call_args[1]["json_data"] + assert json_data["name"] == "My Project" + assert json_data["path"] == "my-project" + assert json_data["set_default"] is False + + @pytest.mark.asyncio + async def test_generates_permalink_from_name(self): + """Test that permalink is generated from project name.""" + with patch("basic_memory.cli.commands.cloud.cloud_utils.make_api_request") as mock_request: + with patch("basic_memory.cli.commands.cloud.cloud_utils.ConfigManager") as mock_config: + with patch( + "basic_memory.cli.commands.cloud.cloud_utils.generate_permalink" + ) as mock_permalink: + mock_config.return_value.config.cloud_host = "https://example.com" + mock_permalink.return_value = "test-project-123" + + mock_response = Mock() + mock_response.json.return_value = { + "name": "Test Project 123", + "path": "test-project-123", + "message": "Created", + } + mock_request.return_value = mock_response + + await create_cloud_project("Test Project 123") + + # Verify generate_permalink was called with project name + mock_permalink.assert_called_once_with("Test Project 123") + + @pytest.mark.asyncio + async def test_raises_error_on_api_failure(self): + """Test that CloudUtilsError is raised on API failure.""" + with patch("basic_memory.cli.commands.cloud.cloud_utils.make_api_request") as mock_request: + with patch("basic_memory.cli.commands.cloud.cloud_utils.ConfigManager") as mock_config: + with patch( + "basic_memory.cli.commands.cloud.cloud_utils.generate_permalink" + ) as mock_permalink: + mock_config.return_value.config.cloud_host = "https://example.com" + mock_permalink.return_value = "project" + mock_request.side_effect = Exception("API Error") + + with pytest.raises(CloudUtilsError) as exc_info: + await create_cloud_project("Test Project") + + assert "Failed to create cloud project 'Test Project'" in str(exc_info.value) + + @pytest.mark.asyncio + async def test_strips_trailing_slash_from_host(self): + """Test that trailing slash is stripped from cloud_host.""" + with patch("basic_memory.cli.commands.cloud.cloud_utils.make_api_request") as mock_request: + with patch("basic_memory.cli.commands.cloud.cloud_utils.ConfigManager") as mock_config: + with patch( + "basic_memory.cli.commands.cloud.cloud_utils.generate_permalink" + ) as mock_permalink: + mock_config.return_value.config.cloud_host = "https://example.com/" + mock_permalink.return_value = "project" + + mock_response = Mock() + mock_response.json.return_value = { + "name": "Project", + "path": "project", + "message": "Created", + } + mock_request.return_value = mock_response + + await create_cloud_project("Project") + + # Verify trailing slash was removed + call_args = mock_request.call_args + assert call_args[1]["url"] == "https://example.com/proxy/projects/projects" + + +class TestSyncProject: + """Tests for sync_project().""" + + @pytest.mark.asyncio + async def test_syncs_project_successfully(self): + """Test successful project sync.""" + # Patch at the point where it's imported (inside the function) + with patch("basic_memory.cli.commands.command_utils.run_sync", new_callable=AsyncMock) as mock_sync: + await sync_project("test-project") + + # Verify run_sync was called with project name + mock_sync.assert_called_once_with(project="test-project") + + @pytest.mark.asyncio + async def test_raises_error_on_sync_failure(self): + """Test that CloudUtilsError is raised on sync failure.""" + # Patch at the point where it's imported (inside the function) + with patch("basic_memory.cli.commands.command_utils.run_sync", new_callable=AsyncMock) as mock_sync: + mock_sync.side_effect = Exception("Sync failed") + + with pytest.raises(CloudUtilsError) as exc_info: + await sync_project("test-project") + + assert "Failed to sync project 'test-project'" in str(exc_info.value) + + +class TestProjectExists: + """Tests for project_exists().""" + + @pytest.mark.asyncio + async def test_returns_true_when_project_exists(self): + """Test that True is returned when project exists.""" + from basic_memory.schemas.cloud import CloudProject, CloudProjectList + + with patch( + "basic_memory.cli.commands.cloud.cloud_utils.fetch_cloud_projects" + ) as mock_fetch: + # Create actual CloudProject objects + projects = CloudProjectList( + projects=[ + CloudProject(name="project-1", path="/app/data/project-1"), + CloudProject(name="test-project", path="/app/data/test-project"), + CloudProject(name="project-2", path="/app/data/project-2"), + ] + ) + mock_fetch.return_value = projects + + result = await project_exists("test-project") + + assert result is True + + @pytest.mark.asyncio + async def test_returns_false_when_project_not_found(self): + """Test that False is returned when project doesn't exist.""" + with patch( + "basic_memory.cli.commands.cloud.cloud_utils.fetch_cloud_projects" + ) as mock_fetch: + # Mock project list without matching project + mock_projects = Mock() + mock_projects.projects = [ + Mock(name="project-1"), + Mock(name="project-2"), + ] + mock_fetch.return_value = mock_projects + + result = await project_exists("nonexistent-project") + + assert result is False + + @pytest.mark.asyncio + async def test_returns_false_on_api_error(self): + """Test that False is returned on API error.""" + with patch( + "basic_memory.cli.commands.cloud.cloud_utils.fetch_cloud_projects" + ) as mock_fetch: + mock_fetch.side_effect = Exception("API Error") + + result = await project_exists("test-project") + + # Should return False instead of raising exception + assert result is False + + @pytest.mark.asyncio + async def test_handles_empty_project_list(self): + """Test handling of empty project list.""" + with patch( + "basic_memory.cli.commands.cloud.cloud_utils.fetch_cloud_projects" + ) as mock_fetch: + mock_projects = Mock() + mock_projects.projects = [] + mock_fetch.return_value = mock_projects + + result = await project_exists("any-project") + + assert result is False + + @pytest.mark.asyncio + async def test_case_sensitive_matching(self): + """Test that project name matching is case-sensitive.""" + with patch( + "basic_memory.cli.commands.cloud.cloud_utils.fetch_cloud_projects" + ) as mock_fetch: + mock_projects = Mock() + mock_projects.projects = [Mock(name="Test-Project")] + mock_fetch.return_value = mock_projects + + # Different case should not match + result = await project_exists("test-project") + + assert result is False diff --git a/tests/cli/test_upload.py b/tests/cli/test_upload.py new file mode 100644 index 000000000..3ed894ad4 --- /dev/null +++ b/tests/cli/test_upload.py @@ -0,0 +1,326 @@ +"""Tests for upload module.""" + +import os +from pathlib import Path +from unittest.mock import AsyncMock, Mock, patch + +import httpx +import pytest + +from basic_memory.cli.commands.cloud.upload import _get_files_to_upload, upload_path + + +class TestGetFilesToUpload: + """Tests for _get_files_to_upload().""" + + def test_collects_files_from_directory(self, tmp_path): + """Test collecting files from a directory.""" + # Create test directory structure + (tmp_path / "file1.txt").write_text("content1") + (tmp_path / "file2.md").write_text("content2") + (tmp_path / "subdir").mkdir() + (tmp_path / "subdir" / "file3.py").write_text("content3") + + # Call with real ignore utils (no mocking) + result = _get_files_to_upload(tmp_path) + + # Should find all 3 files + assert len(result) == 3 + + # Extract just the relative paths for easier assertion + relative_paths = [rel_path for _, rel_path in result] + assert "file1.txt" in relative_paths + assert "file2.md" in relative_paths + assert "subdir/file3.py" in relative_paths + + def test_respects_gitignore_patterns(self, tmp_path): + """Test that gitignore patterns are respected.""" + # Create test files + (tmp_path / "keep.txt").write_text("keep") + (tmp_path / "ignore.pyc").write_text("ignore") + + # Create .gitignore file + gitignore_file = tmp_path / ".gitignore" + gitignore_file.write_text("*.pyc\n") + + result = _get_files_to_upload(tmp_path) + + # Should only find keep.txt (not .pyc or .gitignore itself) + relative_paths = [rel_path for _, rel_path in result] + assert "keep.txt" in relative_paths + assert "ignore.pyc" not in relative_paths + + def test_handles_empty_directory(self, tmp_path): + """Test handling of empty directory.""" + empty_dir = tmp_path / "empty" + empty_dir.mkdir() + + result = _get_files_to_upload(empty_dir) + + assert result == [] + + def test_converts_windows_paths_to_forward_slashes(self, tmp_path): + """Test that Windows backslashes are converted to forward slashes.""" + # Create nested structure + (tmp_path / "dir1").mkdir() + (tmp_path / "dir1" / "dir2").mkdir() + (tmp_path / "dir1" / "dir2" / "file.txt").write_text("content") + + result = _get_files_to_upload(tmp_path) + + # Remote path should use forward slashes + _, remote_path = result[0] + assert "\\" not in remote_path # No backslashes + assert "dir1/dir2/file.txt" == remote_path + + +class TestUploadPath: + """Tests for upload_path().""" + + @pytest.mark.asyncio + async def test_uploads_single_file(self, tmp_path): + """Test uploading a single file.""" + test_file = tmp_path / "test.txt" + test_file.write_text("test content") + + # Mock the client and HTTP response + mock_client = AsyncMock() + mock_response = Mock() + mock_response.raise_for_status = Mock() + + with patch("basic_memory.cli.commands.cloud.upload.get_client") as mock_get_client: + with patch("basic_memory.cli.commands.cloud.upload.call_put") as mock_put: + with patch("aiofiles.open", create=True) as mock_aiofiles_open: + # Setup mocks + mock_get_client.return_value.__aenter__.return_value = mock_client + mock_get_client.return_value.__aexit__.return_value = None + mock_put.return_value = mock_response + + # Mock file reading + mock_file = AsyncMock() + mock_file.read.return_value = b"test content" + mock_aiofiles_open.return_value.__aenter__.return_value = mock_file + + result = await upload_path(test_file, "test-project") + + # Verify success + assert result is True + + # Verify PUT was called with correct path + mock_put.assert_called_once() + call_args = mock_put.call_args + assert call_args[0][0] == mock_client + assert call_args[0][1] == "/webdav/test-project/test.txt" + assert call_args[1]["content"] == b"test content" + + @pytest.mark.asyncio + async def test_uploads_directory(self, tmp_path): + """Test uploading a directory with multiple files.""" + # Create test files + (tmp_path / "file1.txt").write_text("content1") + (tmp_path / "file2.txt").write_text("content2") + + mock_client = AsyncMock() + mock_response = Mock() + mock_response.raise_for_status = Mock() + + with patch("basic_memory.cli.commands.cloud.upload.get_client") as mock_get_client: + with patch("basic_memory.cli.commands.cloud.upload.call_put") as mock_put: + with patch("basic_memory.cli.commands.cloud.upload._get_files_to_upload") as mock_get_files: + with patch("aiofiles.open", create=True) as mock_aiofiles_open: + # Setup mocks + mock_get_client.return_value.__aenter__.return_value = mock_client + mock_get_client.return_value.__aexit__.return_value = None + mock_put.return_value = mock_response + + # Mock file listing + mock_get_files.return_value = [ + (tmp_path / "file1.txt", "file1.txt"), + (tmp_path / "file2.txt", "file2.txt"), + ] + + # Mock file reading + mock_file = AsyncMock() + mock_file.read.side_effect = [b"content1", b"content2"] + mock_aiofiles_open.return_value.__aenter__.return_value = mock_file + + result = await upload_path(tmp_path, "test-project") + + # Verify success + assert result is True + + # Verify PUT was called twice + assert mock_put.call_count == 2 + + @pytest.mark.asyncio + async def test_handles_nonexistent_path(self, tmp_path): + """Test handling of nonexistent path.""" + nonexistent = tmp_path / "does-not-exist" + + result = await upload_path(nonexistent, "test-project") + + # Should return False + assert result is False + + @pytest.mark.asyncio + async def test_handles_http_error(self, tmp_path): + """Test handling of HTTP errors during upload.""" + test_file = tmp_path / "test.txt" + test_file.write_text("test content") + + mock_client = AsyncMock() + mock_response = Mock() + mock_response.status_code = 403 + mock_response.text = "Forbidden" + mock_response.raise_for_status.side_effect = httpx.HTTPStatusError( + "Forbidden", request=Mock(), response=mock_response + ) + + with patch("basic_memory.cli.commands.cloud.upload.get_client") as mock_get_client: + with patch("basic_memory.cli.commands.cloud.upload.call_put") as mock_put: + with patch("aiofiles.open", create=True) as mock_aiofiles_open: + # Setup mocks + mock_get_client.return_value.__aenter__.return_value = mock_client + mock_get_client.return_value.__aexit__.return_value = None + mock_put.return_value = mock_response + + # Mock file reading + mock_file = AsyncMock() + mock_file.read.return_value = b"test content" + mock_aiofiles_open.return_value.__aenter__.return_value = mock_file + + result = await upload_path(test_file, "test-project") + + # Should return False on error + assert result is False + + @pytest.mark.asyncio + async def test_handles_empty_directory(self, tmp_path): + """Test uploading an empty directory.""" + empty_dir = tmp_path / "empty" + empty_dir.mkdir() + + with patch("basic_memory.cli.commands.cloud.upload._get_files_to_upload") as mock_get_files: + mock_get_files.return_value = [] + + result = await upload_path(empty_dir, "test-project") + + # Should return True (no-op success) + assert result is True + + @pytest.mark.asyncio + async def test_formats_file_size_bytes(self, tmp_path, capsys): + """Test file size formatting for small files (bytes).""" + test_file = tmp_path / "small.txt" + test_file.write_text("hi") # 2 bytes + + mock_client = AsyncMock() + mock_response = Mock() + mock_response.raise_for_status = Mock() + + with patch("basic_memory.cli.commands.cloud.upload.get_client") as mock_get_client: + with patch("basic_memory.cli.commands.cloud.upload.call_put") as mock_put: + with patch("aiofiles.open", create=True) as mock_aiofiles_open: + mock_get_client.return_value.__aenter__.return_value = mock_client + mock_get_client.return_value.__aexit__.return_value = None + mock_put.return_value = mock_response + + mock_file = AsyncMock() + mock_file.read.return_value = b"hi" + mock_aiofiles_open.return_value.__aenter__.return_value = mock_file + + await upload_path(test_file, "test-project") + + # Check output contains "bytes" + captured = capsys.readouterr() + assert "bytes" in captured.out + + @pytest.mark.asyncio + async def test_formats_file_size_kilobytes(self, tmp_path, capsys): + """Test file size formatting for medium files (KB).""" + test_file = tmp_path / "medium.txt" + # Create file with 2KB of content + test_file.write_text("x" * 2048) + + mock_client = AsyncMock() + mock_response = Mock() + mock_response.raise_for_status = Mock() + + with patch("basic_memory.cli.commands.cloud.upload.get_client") as mock_get_client: + with patch("basic_memory.cli.commands.cloud.upload.call_put") as mock_put: + with patch("aiofiles.open", create=True) as mock_aiofiles_open: + mock_get_client.return_value.__aenter__.return_value = mock_client + mock_get_client.return_value.__aexit__.return_value = None + mock_put.return_value = mock_response + + mock_file = AsyncMock() + mock_file.read.return_value = b"x" * 2048 + mock_aiofiles_open.return_value.__aenter__.return_value = mock_file + + await upload_path(test_file, "test-project") + + # Check output contains "KB" + captured = capsys.readouterr() + assert "KB" in captured.out + + @pytest.mark.asyncio + async def test_formats_file_size_megabytes(self, tmp_path, capsys): + """Test file size formatting for large files (MB).""" + test_file = tmp_path / "large.txt" + # Create file with 2MB of content + test_file.write_text("x" * (2 * 1024 * 1024)) + + mock_client = AsyncMock() + mock_response = Mock() + mock_response.raise_for_status = Mock() + + with patch("basic_memory.cli.commands.cloud.upload.get_client") as mock_get_client: + with patch("basic_memory.cli.commands.cloud.upload.call_put") as mock_put: + with patch("aiofiles.open", create=True) as mock_aiofiles_open: + mock_get_client.return_value.__aenter__.return_value = mock_client + mock_get_client.return_value.__aexit__.return_value = None + mock_put.return_value = mock_response + + mock_file = AsyncMock() + mock_file.read.return_value = b"x" * (2 * 1024 * 1024) + mock_aiofiles_open.return_value.__aenter__.return_value = mock_file + + await upload_path(test_file, "test-project") + + # Check output contains "MB" + captured = capsys.readouterr() + assert "MB" in captured.out + + @pytest.mark.asyncio + async def test_builds_correct_webdav_path(self, tmp_path): + """Test that WebDAV path is correctly constructed.""" + # Create nested structure + (tmp_path / "subdir").mkdir() + test_file = tmp_path / "subdir" / "file.txt" + test_file.write_text("content") + + mock_client = AsyncMock() + mock_response = Mock() + mock_response.raise_for_status = Mock() + + with patch("basic_memory.cli.commands.cloud.upload.get_client") as mock_get_client: + with patch("basic_memory.cli.commands.cloud.upload.call_put") as mock_put: + with patch("basic_memory.cli.commands.cloud.upload._get_files_to_upload") as mock_get_files: + with patch("aiofiles.open", create=True) as mock_aiofiles_open: + mock_get_client.return_value.__aenter__.return_value = mock_client + mock_get_client.return_value.__aexit__.return_value = None + mock_put.return_value = mock_response + + # Mock file listing with relative path + mock_get_files.return_value = [(test_file, "subdir/file.txt")] + + mock_file = AsyncMock() + mock_file.read.return_value = b"content" + mock_aiofiles_open.return_value.__aenter__.return_value = mock_file + + await upload_path(tmp_path, "my-project") + + # Verify WebDAV path format: /webdav/{project_name}/{relative_path} + mock_put.assert_called_once() + call_args = mock_put.call_args + assert call_args[0][1] == "/webdav/my-project/subdir/file.txt"