diff --git a/src/index.ts b/src/index.ts index e958f44..8b05db8 100644 --- a/src/index.ts +++ b/src/index.ts @@ -32,11 +32,15 @@ import { formatExampleContent, formatFileContent, } from "./utils/format.js"; +import { MCP_VERSION } from "./version.js"; +import { getSyncState, writeAutoResyncAttempt } from "./utils/sync-metadata.js"; +import { getRepoTag } from "./utils/git.js"; +import type { Logger } from "./utils/git.js"; const server = new Server( { name: "aztec-mcp", - version: "1.0.0", + version: MCP_VERSION, }, { capabilities: { @@ -189,140 +193,183 @@ server.setRequestHandler(ListToolsRequestSchema, async () => ({ ], })); +function validateToolRequest(name: string, args: Record | undefined): void { + switch (name) { + case "aztec_sync_repos": + case "aztec_status": + case "aztec_list_examples": + break; + case "aztec_search_code": + case "aztec_search_docs": + if (!args?.query) throw new McpError(ErrorCode.InvalidParams, "query is required"); + break; + case "aztec_read_example": + if (!args?.name) throw new McpError(ErrorCode.InvalidParams, "name is required"); + break; + case "aztec_read_file": + if (!args?.path) throw new McpError(ErrorCode.InvalidParams, "path is required"); + break; + default: + throw new McpError(ErrorCode.MethodNotFound, `Unknown tool: ${name}`); + } +} + +// Sync lock — prevents concurrent syncs from racing over filesystem paths +let syncInFlight: Promise | null = null; + +function createSyncLog(): Logger { + return (message: string, level: "info" | "debug" | "warning" | "error" = "info") => { + server.sendLoggingMessage({ + level, + logger: "aztec-sync", + data: message, + }).catch(() => {}); + }; +} + +function ensureAutoResync(): void { + // If any sync is already in progress, don't block — let the tool proceed + // with existing local checkouts. + if (syncInFlight) return; + + const syncState = getSyncState(); + if (syncState.kind !== "needsAutoResync" && syncState.kind !== "legacyUnknownVersion") { + return; + } + + const task = (async () => { + const log = createSyncLog(); + + let version: string | undefined; + if (syncState.kind === "needsAutoResync") { + version = syncState.aztecVersion; + log(`Auto-syncing repos for MCP server v${MCP_VERSION}...`, "info"); + } else { + // Legacy install — try to detect version from existing checkout + const detectedTag = await getRepoTag("aztec-packages"); + if (detectedTag) { + version = detectedTag; + log(`Auto-syncing repos (detected ${detectedTag} from existing checkout)...`, "info"); + } else { + log("Install predates sync metadata. Run aztec_sync_repos to establish tracked state.", "warning"); + try { writeAutoResyncAttempt("deferred"); } catch { /* non-fatal */ } + return; + } + } + + const syncResult = await syncRepos({ version, force: true, log }); + if (syncResult.metadataSafe) { + log("Auto-sync complete", "info"); + } else { + // Sync failed or metadata could not be persisted — retry after backoff + try { writeAutoResyncAttempt("retryable"); } catch { /* non-fatal */ } + if (syncResult.success) { + log(`Auto-resync partial: ${syncResult.message}`, "info"); + } else { + log(`Auto-resync failed: ${syncResult.message}. Local tools will use existing checkouts.`, "warning"); + } + } + })(); + + // Fire and forget — auto-resync is best-effort background work. + // Read-only tools proceed immediately with existing local checkouts. + syncInFlight = task.finally(() => { syncInFlight = null; }); +} + /** * Handle tool calls */ server.setRequestHandler(CallToolRequestSchema, async (request) => { const { name, arguments: args } = request.params; + // Validate tool name and required arguments before any expensive operations + validateToolRequest(name, args); + + // Auto re-sync if MCP server version changed since last sync. + // ensureAutoResync() starts the sync (fire-and-forget) — we then wait for any + // in-flight sync to finish so read-only tools don't race against filesystem mutations. + if (name !== "aztec_sync_repos") { + ensureAutoResync(); + if (syncInFlight) await syncInFlight.catch(() => {}); + } + try { + // validateToolRequest() above guarantees name is a known tool + let text!: string; + switch (name) { case "aztec_sync_repos": { - const log = (message: string, level: string = "info") => { - server.sendLoggingMessage({ - level: level as "info" | "debug" | "warning" | "error", - logger: "aztec-sync", - data: message, - }).catch(() => {}); - }; - const result = await syncRepos({ + // Wait for any in-flight sync (auto or manual) before starting + while (syncInFlight) await syncInFlight.catch(() => {}); + const log = createSyncLog(); + const task = syncRepos({ version: args?.version as string | undefined, force: args?.force as boolean | undefined, repos: args?.repos as string[] | undefined, log, }); - return { - content: [ - { - type: "text", - text: formatSyncResult(result), - }, - ], - }; + syncInFlight = task.then(() => {}).finally(() => { syncInFlight = null; }); + const result = await task; + text = formatSyncResult(result); + break; } case "aztec_status": { const status = await getStatus(); - return { - content: [ - { - type: "text", - text: formatStatus(status), - }, - ], - }; + text = formatStatus(status); + break; } case "aztec_search_code": { - if (!args?.query) { - throw new McpError(ErrorCode.InvalidParams, "query is required"); - } const result = searchAztecCode({ - query: args.query as string, + query: args!.query as string, filePattern: args?.filePattern as string | undefined, repo: args?.repo as string | undefined, maxResults: args?.maxResults as number | undefined, }); - return { - content: [ - { - type: "text", - text: formatSearchResults(result), - }, - ], - }; + text = formatSearchResults(result); + break; } case "aztec_search_docs": { - if (!args?.query) { - throw new McpError(ErrorCode.InvalidParams, "query is required"); - } const result = searchAztecDocs({ - query: args.query as string, + query: args!.query as string, section: args?.section as string | undefined, maxResults: args?.maxResults as number | undefined, }); - return { - content: [ - { - type: "text", - text: formatSearchResults(result), - }, - ], - }; + text = formatSearchResults(result); + break; } case "aztec_list_examples": { const result = listAztecExamples({ category: args?.category as string | undefined, }); - return { - content: [ - { - type: "text", - text: formatExamplesList(result), - }, - ], - }; + text = formatExamplesList(result); + break; } case "aztec_read_example": { - if (!args?.name) { - throw new McpError(ErrorCode.InvalidParams, "name is required"); - } const result = readAztecExample({ - name: args.name as string, + name: args!.name as string, }); - return { - content: [ - { - type: "text", - text: formatExampleContent(result), - }, - ], - }; + text = formatExampleContent(result); + break; } case "aztec_read_file": { - if (!args?.path) { - throw new McpError(ErrorCode.InvalidParams, "path is required"); - } const result = readRepoFile({ - path: args.path as string, + path: args!.path as string, }); - return { - content: [ - { - type: "text", - text: formatFileContent(result), - }, - ], - }; + text = formatFileContent(result); + break; } - default: - throw new McpError(ErrorCode.MethodNotFound, `Unknown tool: ${name}`); } + + return { + content: [{ type: "text", text }], + }; } catch (error) { if (error instanceof McpError) throw error; diff --git a/src/tools/sync.ts b/src/tools/sync.ts index 1f99980..06da6ce 100644 --- a/src/tools/sync.ts +++ b/src/tools/sync.ts @@ -2,11 +2,15 @@ * Repository sync tool - clones and updates Aztec repositories */ +import { existsSync } from "fs"; +import { join } from "path"; import { AZTEC_REPOS, getAztecRepos, DEFAULT_AZTEC_VERSION, RepoConfig } from "../repos/config.js"; -import { cloneRepo, getReposStatus, getNoirCommitFromAztec, REPOS_DIR, Logger } from "../utils/git.js"; +import { cloneRepo, getReposStatus, getNoirCommitFromAztec, getRepoPath, REPOS_DIR, Logger } from "../utils/git.js"; +import { writeSyncMetadata, stampMetadataMcpVersion, readSyncMetadata, SyncMetadata } from "../utils/sync-metadata.js"; export interface SyncResult { success: boolean; + metadataSafe: boolean; message: string; version: string; repos: { @@ -40,6 +44,7 @@ export async function syncRepos(options: { if (reposToSync.length === 0) { return { success: false, + metadataSafe: false, message: "No repositories matched the specified names", version: effectiveVersion, repos: [], @@ -94,8 +99,25 @@ export async function syncRepos(options: { await syncRepo(aztecPackages, nextIndex++, totalRepos); } - // Get the Noir commit from aztec-packages (if available) - const noirCommit = await getNoirCommitFromAztec(); + // Abort if aztec-packages failed during any version-targeted or forced sync — + // cloneRepo does a destructive replacement when the version changes, so a failure + // leaves the old checkout while other repos sync to the new tag, producing a + // mixed-version workspace. + const aztecFailed = results.some( + (r) => r.name === "aztec-packages" && r.status.toLowerCase().includes("error"), + ); + if (aztecFailed && (force || version)) { + return { + success: false, + metadataSafe: false, + message: "Sync aborted: aztec-packages failed to sync", + version: effectiveVersion, + repos: results, + }; + } + + // Only derive noir commit if aztec-packages succeeded + const noirCommit = !aztecFailed ? await getNoirCommitFromAztec() : null; if (noirCommit) { log?.(`Resolved Noir commit from aztec-packages: ${noirCommit.substring(0, 7)}`, "info"); } @@ -133,17 +155,66 @@ export async function syncRepos(options: { parallelBatch.map((item) => syncRepo(item.config, item.index, totalRepos, item.statusTransform)) ); + // Warn if versioned docs paths don't exist after clone + let versionedDocsMissing = false; + for (const repo of syntheticRepos) { + const result = results.find((r) => r.name === repo.name); + if (!result || result.status.toLowerCase().includes("error")) continue; + + for (const sparsePath of repo.sparse || []) { + if (!sparsePath.includes(effectiveVersion)) continue; + const fullPath = join(getRepoPath(repo.name), sparsePath); + if (!existsSync(fullPath)) { + result.status += `. Note: docs not found for ${effectiveVersion} in aztec-packages`; + versionedDocsMissing = true; + break; + } + } + } + const allSuccess = results.every( (r) => !r.status.toLowerCase().includes("error") ); log?.(`Sync complete: ${results.length} repos, ${allSuccess ? "all succeeded" : "some failed"}`, "info"); + // Metadata is safe to write when all repos succeeded and every configured + // repo was included (explicit full list counts as full sync). Docs-missing + // is cosmetic — repos are usable and auto-resync should not keep retrying. + const isFullSync = !repoNames || configuredRepos.every((r) => repoNames.includes(r.name)); + let metadataSafe = allSuccess && isFullSync; + let metadataWriteFailed = false; + + if (metadataSafe) { + try { + writeSyncMetadata(effectiveVersion); + } catch { + // Metadata write failed — caller must not treat this as fully persisted + metadataSafe = false; + metadataWriteFailed = true; + } + } else if (allSuccess && !isFullSync) { + // Partial sync succeeded — stamp mcpVersion so the install is not mistaken + // for a legacy or stale-version install that needs a full auto-resync. + try { + stampMetadataMcpVersion(effectiveVersion); + } catch { + // Non-fatal + } + } + + const message = !allSuccess + ? "Some repositories failed to sync" + : metadataWriteFailed + ? `Synced ${results.length} repositories but failed to persist sync metadata — next startup may re-sync` + : versionedDocsMissing + ? `Synced ${results.length} repositories but docs not found for ${effectiveVersion} — version may not exist yet` + : `Successfully synced ${results.length} repositories to ${REPOS_DIR}`; + return { success: allSuccess, - message: allSuccess - ? `Successfully synced ${results.length} repositories to ${REPOS_DIR}` - : "Some repositories failed to sync", + metadataSafe, + message, version: effectiveVersion, repos: results, }; @@ -160,6 +231,7 @@ export async function getStatus(): Promise<{ cloned: boolean; commit?: string; }[]; + syncMetadata: SyncMetadata | null; }> { const statusMap = await getReposStatus(AZTEC_REPOS); @@ -176,5 +248,6 @@ export async function getStatus(): Promise<{ return { reposDir: REPOS_DIR, repos, + syncMetadata: readSyncMetadata(), }; } diff --git a/src/utils/format.ts b/src/utils/format.ts index 2cdc860..d05e9ae 100644 --- a/src/utils/format.ts +++ b/src/utils/format.ts @@ -4,6 +4,7 @@ import type { SyncResult } from "../tools/sync.js"; import type { SearchResult, FileInfo } from "./search.js"; +import type { SyncMetadata } from "./sync-metadata.js"; export function formatSyncResult(result: SyncResult): string { const lines = [ @@ -31,15 +32,23 @@ export function formatStatus(status: { cloned: boolean; commit?: string; }[]; + syncMetadata?: SyncMetadata | null; }): string { const lines = [ "Aztec MCP Server Status", "", `Repos directory: ${status.reposDir}`, - "", - "Repositories:", ]; + if (status.syncMetadata) { + lines.push(`Last synced: ${status.syncMetadata.syncedAt}`); + lines.push(`MCP server version: ${status.syncMetadata.mcpVersion}`); + lines.push(`Aztec version: ${status.syncMetadata.aztecVersion}`); + } + + lines.push(""); + lines.push("Repositories:"); + for (const repo of status.repos) { const icon = repo.cloned ? "✓" : "○"; const commit = repo.commit ? ` (${repo.commit})` : ""; diff --git a/src/utils/git.ts b/src/utils/git.ts index 72ada50..556db68 100644 --- a/src/utils/git.ts +++ b/src/utils/git.ts @@ -3,7 +3,7 @@ */ import { simpleGit, SimpleGit } from "simple-git"; -import { existsSync, mkdirSync, rmSync } from "fs"; +import { existsSync, mkdirSync, rmSync, renameSync } from "fs"; import { join } from "path"; import { homedir } from "os"; import { RepoConfig } from "../repos/config.js"; @@ -51,15 +51,10 @@ export async function cloneRepo( // Check if we need to re-clone due to version mismatch const versionMismatch = await needsReclone(config); - - // Remove existing if force is set or version changed - if ((force || versionMismatch) && existsSync(repoPath)) { - log?.(`${config.name}: Removing existing clone (force=${force}, versionMismatch=${versionMismatch})`, "debug"); - rmSync(repoPath, { recursive: true, force: true }); - } + const needsForceReclone = (force || versionMismatch) && existsSync(repoPath); // If already cloned and version matches, skip or update - if (isRepoCloned(config.name)) { + if (!needsForceReclone && isRepoCloned(config.name)) { if (config.tag || config.commit) { log?.(`${config.name}: Already cloned at correct ${config.tag ? "tag" : "commit"}, skipping`, "debug"); return `${config.name} already at ${config.commit || config.tag}`; @@ -68,6 +63,15 @@ export async function cloneRepo( return await updateRepo(config.name, log); } + // Clone to a temp dir when replacing an existing repo, so failure leaves the old repo intact + const clonePath = needsForceReclone ? repoPath + ".tmp" : repoPath; + + if (needsForceReclone) { + log?.(`${config.name}: Safe re-clone (force=${force}, versionMismatch=${versionMismatch})`, "debug"); + // Clean up stale temp dir from any previous failed attempt + rmSync(clonePath, { recursive: true, force: true }); + } + // Determine ref to checkout: commit > tag > branch const ref = config.commit || config.tag || config.branch || "default"; const refType = config.commit ? "commit" : config.tag ? "tag" : "branch"; @@ -83,83 +87,110 @@ export async function cloneRepo( const git: SimpleGit = simpleGit({ progress: progressHandler }); - if (isSparse) { - // Clone with sparse checkout for large repos - if (config.commit) { - // For commits, we need full history to fetch the commit - await git.clone(config.url, repoPath, [ - "--filter=blob:none", - "--sparse", - "--no-checkout", - ]); - - const repoGit = simpleGit({ baseDir: repoPath, progress: progressHandler }); - await repoGit.raw(["config", "gc.auto", "0"]); - log?.(`${config.name}: Setting sparse checkout paths: ${config.sparse!.join(", ")}`, "debug"); - await repoGit.raw(["sparse-checkout", "set", ...config.sparse!]); - log?.(`${config.name}: Fetching commit ${config.commit.substring(0, 7)}`, "info"); - await repoGit.fetch(["origin", config.commit]); - log?.(`${config.name}: Checking out commit`, "debug"); - await repoGit.checkout(config.commit); - } else if (config.tag) { - await git.clone(config.url, repoPath, [ - "--filter=blob:none", - "--sparse", - "--no-checkout", - ]); - - const repoGit = simpleGit({ baseDir: repoPath, progress: progressHandler }); - await repoGit.raw(["config", "gc.auto", "0"]); - log?.(`${config.name}: Setting sparse checkout paths: ${config.sparse!.join(", ")}`, "debug"); - await repoGit.raw(["sparse-checkout", "set", ...config.sparse!]); - log?.(`${config.name}: Fetching tag ${config.tag}`, "info"); - await repoGit.fetch(["--depth=1", "origin", `refs/tags/${config.tag}:refs/tags/${config.tag}`]); - log?.(`${config.name}: Checking out tag`, "debug"); - await repoGit.checkout(config.tag); + try { + if (isSparse) { + // Clone with sparse checkout for large repos + if (config.commit) { + // For commits, we need full history to fetch the commit + await git.clone(config.url, clonePath, [ + "--filter=blob:none", + "--sparse", + "--no-checkout", + ]); + + const repoGit = simpleGit({ baseDir: clonePath, progress: progressHandler }); + await repoGit.raw(["config", "gc.auto", "0"]); + log?.(`${config.name}: Setting sparse checkout paths: ${config.sparse!.join(", ")}`, "debug"); + await repoGit.raw(["sparse-checkout", "set", ...config.sparse!]); + log?.(`${config.name}: Fetching commit ${config.commit.substring(0, 7)}`, "info"); + await repoGit.fetch(["origin", config.commit]); + log?.(`${config.name}: Checking out commit`, "debug"); + await repoGit.checkout(config.commit); + } else if (config.tag) { + await git.clone(config.url, clonePath, [ + "--filter=blob:none", + "--sparse", + "--no-checkout", + ]); + + const repoGit = simpleGit({ baseDir: clonePath, progress: progressHandler }); + await repoGit.raw(["config", "gc.auto", "0"]); + log?.(`${config.name}: Setting sparse checkout paths: ${config.sparse!.join(", ")}`, "debug"); + await repoGit.raw(["sparse-checkout", "set", ...config.sparse!]); + log?.(`${config.name}: Fetching tag ${config.tag}`, "info"); + await repoGit.fetch(["--depth=1", "origin", `refs/tags/${config.tag}:refs/tags/${config.tag}`]); + log?.(`${config.name}: Checking out tag`, "debug"); + await repoGit.checkout(config.tag); + } else { + await git.clone(config.url, clonePath, [ + "--filter=blob:none", + "--sparse", + "--depth=1", + ...(config.branch ? ["-b", config.branch] : []), + ]); + + const repoGit = simpleGit({ baseDir: clonePath, progress: progressHandler }); + await repoGit.raw(["config", "gc.auto", "0"]); + log?.(`${config.name}: Setting sparse checkout paths: ${config.sparse!.join(", ")}`, "debug"); + await repoGit.raw(["sparse-checkout", "set", ...config.sparse!]); + } } else { - await git.clone(config.url, repoPath, [ - "--filter=blob:none", - "--sparse", - "--depth=1", - ...(config.branch ? ["-b", config.branch] : []), - ]); - - const repoGit = simpleGit({ baseDir: repoPath, progress: progressHandler }); - await repoGit.raw(["config", "gc.auto", "0"]); - log?.(`${config.name}: Setting sparse checkout paths: ${config.sparse!.join(", ")}`, "debug"); - await repoGit.raw(["sparse-checkout", "set", ...config.sparse!]); + // Clone for smaller repos + if (config.commit) { + // For commits, clone and checkout specific commit + await git.clone(config.url, clonePath, ["--no-checkout"]); + const repoGit = simpleGit({ baseDir: clonePath, progress: progressHandler }); + log?.(`${config.name}: Fetching commit ${config.commit.substring(0, 7)}`, "info"); + await repoGit.fetch(["origin", config.commit]); + log?.(`${config.name}: Checking out commit`, "debug"); + await repoGit.checkout(config.commit); + } else if (config.tag) { + // Clone and checkout tag + await git.clone(config.url, clonePath, ["--no-checkout"]); + const repoGit = simpleGit({ baseDir: clonePath, progress: progressHandler }); + log?.(`${config.name}: Fetching tag ${config.tag}`, "info"); + await repoGit.fetch(["--depth=1", "origin", `refs/tags/${config.tag}:refs/tags/${config.tag}`]); + log?.(`${config.name}: Checking out tag`, "debug"); + await repoGit.checkout(config.tag); + } else { + await git.clone(config.url, clonePath, [ + "--depth=1", + ...(config.branch ? ["-b", config.branch] : []), + ]); + } } - - log?.(`${config.name}: Clone complete`, "info"); - return `Cloned ${config.name} @ ${ref} (${refType}, sparse: ${config.sparse!.join(", ")})`; - } else { - // Clone for smaller repos - if (config.commit) { - // For commits, clone and checkout specific commit - await git.clone(config.url, repoPath, ["--no-checkout"]); - const repoGit = simpleGit({ baseDir: repoPath, progress: progressHandler }); - log?.(`${config.name}: Fetching commit ${config.commit.substring(0, 7)}`, "info"); - await repoGit.fetch(["origin", config.commit]); - log?.(`${config.name}: Checking out commit`, "debug"); - await repoGit.checkout(config.commit); - } else if (config.tag) { - // Clone and checkout tag - await git.clone(config.url, repoPath, ["--no-checkout"]); - const repoGit = simpleGit({ baseDir: repoPath, progress: progressHandler }); - log?.(`${config.name}: Fetching tag ${config.tag}`, "info"); - await repoGit.fetch(["--depth=1", "origin", `refs/tags/${config.tag}:refs/tags/${config.tag}`]); - log?.(`${config.name}: Checking out tag`, "debug"); - await repoGit.checkout(config.tag); - } else { - await git.clone(config.url, repoPath, [ - "--depth=1", - ...(config.branch ? ["-b", config.branch] : []), - ]); + } catch (error) { + // On failure: clean up temp dir, leave original repo intact + if (needsForceReclone) { + rmSync(clonePath, { recursive: true, force: true }); } + throw error; + } - log?.(`${config.name}: Clone complete`, "info"); - return `Cloned ${config.name} @ ${ref} (${refType})`; + // On success: atomic swap — move old out, move new in, then delete old. + // If the new rename fails, restore old from backup so the repo stays available. + if (needsForceReclone) { + const backupPath = repoPath + ".old"; + rmSync(backupPath, { recursive: true, force: true }); + if (existsSync(repoPath)) { + renameSync(repoPath, backupPath); + } + try { + renameSync(clonePath, repoPath); + } catch (swapError) { + // Restore old checkout so the repo isn't left unavailable + if (existsSync(backupPath)) { + try { renameSync(backupPath, repoPath); } catch { /* best-effort restore */ } + } + rmSync(clonePath, { recursive: true, force: true }); + throw swapError; + } + rmSync(backupPath, { recursive: true, force: true }); } + + log?.(`${config.name}: Clone complete`, "info"); + const sparseLabel = isSparse ? `, sparse: ${config.sparse!.join(", ")}` : ""; + return `Cloned ${config.name} @ ${ref} (${refType}${sparseLabel})`; } /** diff --git a/src/utils/sync-metadata.ts b/src/utils/sync-metadata.ts new file mode 100644 index 0000000..f51102e --- /dev/null +++ b/src/utils/sync-metadata.ts @@ -0,0 +1,156 @@ +import { readFileSync, writeFileSync, existsSync, readdirSync } from "fs"; +import { join } from "path"; +import { REPOS_DIR } from "./git.js"; +import { MCP_VERSION } from "../version.js"; + +export interface SyncMetadata { + mcpVersion: string; + syncedAt: string; + aztecVersion: string; + autoResyncAttempt?: { + targetMcpVersion: string; + attemptedAt: string; + result?: "deferred" | "retryable" | "hard_failure"; + }; +} + +export type SyncState = + | { kind: "upToDate" } + | { kind: "needsAutoResync"; aztecVersion: string } + | { kind: "legacyUnknownVersion" } + | { kind: "noRepos" }; + +export function getMetadataPath(): string { + return join(REPOS_DIR, ".sync-metadata.json"); +} + +// In-memory cache — avoids a readFileSync + JSON.parse on every tool call +let cachedSyncState: SyncState | null = null; + +/** Invalidate the cached sync state (called after writes) */ +export function invalidateSyncStateCache(): void { + cachedSyncState = null; +} + +export function writeSyncMetadata(aztecVersion: string): void { + const metadata: SyncMetadata = { + mcpVersion: MCP_VERSION, + syncedAt: new Date().toISOString(), + aztecVersion, + }; + writeFileSync(getMetadataPath(), JSON.stringify(metadata, null, 2)); + invalidateSyncStateCache(); +} + +export function readSyncMetadata(): SyncMetadata | null { + try { + const raw = readFileSync(getMetadataPath(), "utf-8"); + const parsed = JSON.parse(raw); + if (parsed && typeof parsed.mcpVersion === "string") { + return parsed as SyncMetadata; + } + return null; + } catch { + return null; + } +} + +/** + * Record a failed auto-resync attempt so we don't retry on every request. + * Preserves existing metadata fields; creates minimal metadata if none exists. + */ +export function writeAutoResyncAttempt( + result?: "deferred" | "retryable" | "hard_failure", +): void { + const existing = readSyncMetadata(); + const metadata: SyncMetadata = existing ?? { + mcpVersion: "unknown", + syncedAt: "", + aztecVersion: "", + }; + metadata.autoResyncAttempt = { + targetMcpVersion: MCP_VERSION, + attemptedAt: new Date().toISOString(), + result, + }; + writeFileSync(getMetadataPath(), JSON.stringify(metadata, null, 2)); + invalidateSyncStateCache(); +} + +/** + * Update just the mcpVersion in existing metadata (or create minimal metadata). + * Used after partial syncs so the install is not mistaken for a legacy or + * stale-version install that needs a full auto-resync. + */ +export function stampMetadataMcpVersion(aztecVersion: string): void { + const existing = readSyncMetadata(); + const metadata: SyncMetadata = existing ?? { + mcpVersion: MCP_VERSION, + syncedAt: new Date().toISOString(), + aztecVersion, + }; + metadata.mcpVersion = MCP_VERSION; + if (!metadata.aztecVersion) { + metadata.aztecVersion = aztecVersion; + } + delete metadata.autoResyncAttempt; + writeFileSync(getMetadataPath(), JSON.stringify(metadata, null, 2)); + invalidateSyncStateCache(); +} + +/** + * Determine whether auto-resync is needed based on persisted metadata. + * + * States: + * - noRepos: fresh install, no repos dir exists + * - legacyUnknownVersion: repos exist but no metadata (pre-metadata install), + * or metadata exists with unknown aztecVersion from a prior failed attempt + * - needsAutoResync: metadata version doesn't match current MCP version + * - upToDate: versions match, or auto-resync already attempted for this version + */ +export function getSyncState(): SyncState { + if (cachedSyncState) return cachedSyncState; + + const result = computeSyncState(); + cachedSyncState = result; + return result; +} + +function computeSyncState(): SyncState { + const metadata = readSyncMetadata(); + if (!metadata) { + if (!existsSync(REPOS_DIR)) return { kind: "noRepos" }; + // A failed initial sync can leave REPOS_DIR empty — treat that as noRepos + // rather than legacyUnknownVersion (which would trigger auto-resync and + // then writeAutoResyncAttempt, permanently suppressing retries). + try { + const hasRepos = readdirSync(REPOS_DIR).some((e) => !e.startsWith(".")); + return hasRepos ? { kind: "legacyUnknownVersion" } : { kind: "noRepos" }; + } catch { + return { kind: "noRepos" }; + } + } + if (metadata.mcpVersion === MCP_VERSION) { + return { kind: "upToDate" }; + } + // Version mismatch — already attempted auto-resync for this MCP version? + if (metadata.autoResyncAttempt?.targetMcpVersion === MCP_VERSION) { + const attempt = metadata.autoResyncAttempt; + // Retryable failures back off for 30 minutes then allow a retry + if (attempt.result === "retryable") { + const elapsed = Date.now() - new Date(attempt.attemptedAt).getTime(); + if (elapsed < 30 * 60 * 1000) { + return { kind: "upToDate" }; + } + // Backoff expired — fall through to needsAutoResync + } else { + // hard_failure, deferred, or undefined (backwards compat) — permanent suppress + return { kind: "upToDate" }; + } + } + // Version mismatch with unknown aztec version — treat as legacy + if (!metadata.aztecVersion) { + return { kind: "legacyUnknownVersion" }; + } + return { kind: "needsAutoResync", aztecVersion: metadata.aztecVersion }; +} diff --git a/src/version.ts b/src/version.ts new file mode 100644 index 0000000..4163e8a --- /dev/null +++ b/src/version.ts @@ -0,0 +1,4 @@ +import { createRequire } from "module"; + +const require = createRequire(import.meta.url); +export const MCP_VERSION: string = require("../package.json").version; diff --git a/tests/tools/sync.test.ts b/tests/tools/sync.test.ts index 7bcc37f..1b8ebf9 100644 --- a/tests/tools/sync.test.ts +++ b/tests/tools/sync.test.ts @@ -3,6 +3,10 @@ import { describe, it, expect, vi, beforeEach } from "vitest"; const mockCloneRepo = vi.fn(); const mockGetReposStatus = vi.fn(); const mockGetNoirCommitFromAztec = vi.fn(); +const mockWriteSyncMetadata = vi.fn(); +const mockStampMetadataMcpVersion = vi.fn(); +const mockReadSyncMetadata = vi.fn(); +const mockExistsSync = vi.fn(); vi.mock("../../src/repos/config.js", () => ({ AZTEC_REPOS: [ @@ -77,9 +81,25 @@ vi.mock("../../src/utils/git.js", () => ({ cloneRepo: (...args: any[]) => mockCloneRepo(...args), getReposStatus: (...args: any[]) => mockGetReposStatus(...args), getNoirCommitFromAztec: () => mockGetNoirCommitFromAztec(), + getRepoPath: (name: string) => `/fake/repos/${name}`, REPOS_DIR: "/fake/repos", })); +vi.mock("../../src/utils/sync-metadata.js", () => ({ + writeSyncMetadata: (...args: any[]) => mockWriteSyncMetadata(...args), + stampMetadataMcpVersion: (...args: any[]) => mockStampMetadataMcpVersion(...args), + readSyncMetadata: () => mockReadSyncMetadata(), +})); + +vi.mock("fs", () => ({ + existsSync: (...args: any[]) => mockExistsSync(...args), +})); + +vi.mock("path", async () => { + const actual = await vi.importActual("path"); + return actual; +}); + import { getAztecRepos } from "../../src/repos/config.js"; import { syncRepos, getStatus } from "../../src/tools/sync.js"; @@ -89,6 +109,8 @@ beforeEach(() => { vi.clearAllMocks(); mockCloneRepo.mockResolvedValue("Cloned"); mockGetNoirCommitFromAztec.mockResolvedValue(null); + mockWriteSyncMetadata.mockReset(); + mockExistsSync.mockReturnValue(true); }); describe("syncRepos", () => { @@ -194,6 +216,162 @@ describe("syncRepos", () => { true ); }); + + it("returns metadataSafe:true on full sync success", async () => { + mockCloneRepo.mockResolvedValue("Cloned"); + const result = await syncRepos({}); + + expect(result.metadataSafe).toBe(true); + expect(mockWriteSyncMetadata).toHaveBeenCalledWith("v1.0.0"); + }); + + it("writes sync metadata with custom version on full sync success", async () => { + mockCloneRepo.mockResolvedValue("Cloned"); + await syncRepos({ version: "v2.0.0" }); + + expect(mockWriteSyncMetadata).toHaveBeenCalledWith("v2.0.0"); + }); + + it("does not write sync metadata on failure", async () => { + mockCloneRepo + .mockResolvedValueOnce("Cloned ok") + .mockRejectedValueOnce(new Error("fail")) + .mockResolvedValue("Cloned ok"); + + await syncRepos({}); + + expect(mockWriteSyncMetadata).not.toHaveBeenCalled(); + }); + + it("does not write sync metadata on partial sync", async () => { + mockCloneRepo.mockResolvedValue("Cloned"); + await syncRepos({ repos: ["aztec-packages"] }); + + expect(mockWriteSyncMetadata).not.toHaveBeenCalled(); + }); + + it("does not fail sync if metadata write throws but clears metadataSafe and reports it", async () => { + mockCloneRepo.mockResolvedValue("Cloned"); + mockWriteSyncMetadata.mockImplementation(() => { + throw new Error("write failed"); + }); + + const result = await syncRepos({}); + expect(result.success).toBe(true); + expect(result.metadataSafe).toBe(false); + expect(result.message).toContain("failed to persist sync metadata"); + }); + + it("warns when versioned docs path does not exist after clone", async () => { + mockExistsSync.mockReturnValue(false); + + const result = await syncRepos({}); + + const docsRepo = result.repos.find((r) => r.name === "aztec-packages-docs"); + expect(docsRepo).toBeDefined(); + expect(docsRepo!.status).toContain("docs not found for v1.0.0"); + // Docs missing is cosmetic — repos are usable, metadata should be written + expect(result.success).toBe(true); + expect(result.metadataSafe).toBe(true); + expect(mockWriteSyncMetadata).toHaveBeenCalledWith("v1.0.0"); + }); + + it("provides distinct message when docs missing but clones succeed", async () => { + mockExistsSync.mockReturnValue(false); + + const result = await syncRepos({}); + + expect(result.success).toBe(true); + expect(result.message).toContain("docs not found for v1.0.0"); + expect(result.message).toContain("version may not exist yet"); + expect(result.message).not.toContain("failed to sync"); + }); + + it("does not warn when versioned docs path exists", async () => { + mockExistsSync.mockReturnValue(true); + + const result = await syncRepos({}); + + const docsRepo = result.repos.find((r) => r.name === "aztec-packages-docs"); + expect(docsRepo).toBeDefined(); + expect(docsRepo!.status).not.toContain("docs not found"); + }); + + it("aborts when aztec-packages fails with force", async () => { + mockCloneRepo.mockImplementation(async (config: any) => { + if (config.name === "aztec-packages") throw new Error("clone failed"); + return "Cloned"; + }); + + const result = await syncRepos({ force: true }); + + expect(result.success).toBe(false); + expect(result.metadataSafe).toBe(false); + expect(result.message).toContain("aztec-packages failed"); + // Should only have attempted aztec-packages, not the rest + expect(result.repos).toHaveLength(1); + expect(result.repos[0].name).toBe("aztec-packages"); + }); + + it("does not derive noir commit when aztec-packages fails", async () => { + mockCloneRepo.mockImplementation(async (config: any) => { + if (config.name === "aztec-packages") throw new Error("clone failed"); + return "Cloned"; + }); + + await syncRepos({ force: true }); + + expect(mockGetNoirCommitFromAztec).not.toHaveBeenCalled(); + }); + + it("aborts when aztec-packages fails with version (no force)", async () => { + mockCloneRepo.mockImplementation(async (config: any) => { + if (config.name === "aztec-packages") throw new Error("clone failed"); + return "Cloned"; + }); + + const result = await syncRepos({ version: "v2.0.0" }); + + expect(result.success).toBe(false); + expect(result.metadataSafe).toBe(false); + expect(result.message).toContain("aztec-packages failed"); + expect(result.repos).toHaveLength(1); + }); + + it("continues when aztec-packages fails without force or version", async () => { + mockCloneRepo.mockImplementation(async (config: any) => { + if (config.name === "aztec-packages") throw new Error("clone failed"); + return "Cloned"; + }); + + const result = await syncRepos({}); + + // Should have attempted all repos, not just aztec-packages + expect(result.repos.length).toBeGreaterThan(1); + expect(result.success).toBe(false); + // Should not derive noir commit from failed aztec-packages + expect(mockGetNoirCommitFromAztec).not.toHaveBeenCalled(); + }); + + it("returns metadataSafe:false on partial sync but stamps mcpVersion", async () => { + mockCloneRepo.mockResolvedValue("Cloned"); + const result = await syncRepos({ repos: ["aztec-packages"] }); + + expect(result.success).toBe(true); + expect(result.metadataSafe).toBe(false); + expect(mockWriteSyncMetadata).not.toHaveBeenCalled(); + expect(mockStampMetadataMcpVersion).toHaveBeenCalledWith("v1.0.0"); + }); + + it("treats explicit full repo list as full sync for metadata", async () => { + mockCloneRepo.mockResolvedValue("Cloned"); + const allRepoNames = ["aztec-packages", "aztec-examples", "noir", "noir-examples", "aztec-starter"]; + const result = await syncRepos({ repos: allRepoNames }); + + expect(result.success).toBe(true); + expect(result.metadataSafe).toBe(true); + expect(mockWriteSyncMetadata).toHaveBeenCalledWith("v1.0.0"); + }); }); describe("getStatus", () => { @@ -234,4 +412,28 @@ describe("getStatus", () => { expect(examples?.cloned).toBe(false); expect(examples?.commit).toBeUndefined(); }); + + it("includes syncMetadata when available", async () => { + mockGetReposStatus.mockResolvedValue(new Map()); + mockReadSyncMetadata.mockReturnValue({ + mcpVersion: "1.5.0", + syncedAt: "2025-01-01T00:00:00.000Z", + aztecVersion: "v1.0.0", + }); + + const status = await getStatus(); + expect(status.syncMetadata).toEqual({ + mcpVersion: "1.5.0", + syncedAt: "2025-01-01T00:00:00.000Z", + aztecVersion: "v1.0.0", + }); + }); + + it("includes null syncMetadata when no file exists", async () => { + mockGetReposStatus.mockResolvedValue(new Map()); + mockReadSyncMetadata.mockReturnValue(null); + + const status = await getStatus(); + expect(status.syncMetadata).toBeNull(); + }); }); diff --git a/tests/utils/format.test.ts b/tests/utils/format.test.ts index 6b8341f..c0a5441 100644 --- a/tests/utils/format.test.ts +++ b/tests/utils/format.test.ts @@ -12,6 +12,7 @@ describe("formatSyncResult", () => { it("shows checkmark for success", () => { const result = formatSyncResult({ success: true, + metadataSafe: true, message: "All good", version: "v1.0.0", repos: [{ name: "repo1", status: "Cloned repo1" }], @@ -23,6 +24,7 @@ describe("formatSyncResult", () => { it("shows warning icon for failure", () => { const result = formatSyncResult({ success: false, + metadataSafe: false, message: "Some failed", version: "v1.0.0", repos: [], @@ -33,6 +35,7 @@ describe("formatSyncResult", () => { it("shows per-repo icons based on error in status", () => { const result = formatSyncResult({ success: false, + metadataSafe: false, message: "Mixed", version: "v1.0.0", repos: [ @@ -76,6 +79,31 @@ describe("formatStatus", () => { }); expect(result).toContain("No repositories cloned"); }); + + it("displays sync metadata when present", () => { + const result = formatStatus({ + reposDir: "/repos", + repos: [], + syncMetadata: { + mcpVersion: "1.5.0", + syncedAt: "2025-01-01T00:00:00.000Z", + aztecVersion: "v1.0.0", + }, + }); + expect(result).toContain("Last synced: 2025-01-01T00:00:00.000Z"); + expect(result).toContain("MCP server version: 1.5.0"); + expect(result).toContain("Aztec version: v1.0.0"); + }); + + it("omits sync metadata lines when not present", () => { + const result = formatStatus({ + reposDir: "/repos", + repos: [], + }); + expect(result).not.toContain("Last synced"); + expect(result).not.toContain("MCP server version"); + expect(result).not.toContain("Aztec version"); + }); }); describe("formatSearchResults", () => { diff --git a/tests/utils/git.test.ts b/tests/utils/git.test.ts index 9485c18..6781651 100644 --- a/tests/utils/git.test.ts +++ b/tests/utils/git.test.ts @@ -19,6 +19,7 @@ vi.mock("fs", () => ({ existsSync: vi.fn(), mkdirSync: vi.fn(), rmSync: vi.fn(), + renameSync: vi.fn(), })); vi.mock("os", () => ({ @@ -29,7 +30,7 @@ vi.mock("os", () => ({ process.env.AZTEC_MCP_REPOS_DIR = "/tmp/test-repos"; import { simpleGit } from "simple-git"; -import { existsSync, mkdirSync, rmSync } from "fs"; +import { existsSync, mkdirSync, rmSync, renameSync } from "fs"; import { REPOS_DIR, ensureReposDir, @@ -48,6 +49,7 @@ import type { RepoConfig } from "../../src/repos/config.js"; const mockExistsSync = vi.mocked(existsSync); const mockMkdirSync = vi.mocked(mkdirSync); const mockRmSync = vi.mocked(rmSync); +const mockRenameSync = vi.mocked(renameSync); const mockSimpleGit = vi.mocked(simpleGit); beforeEach(() => { @@ -208,14 +210,15 @@ describe("cloneRepo", () => { expect(sparseCheckoutCalls).toHaveLength(0); }); - it("force=true removes existing directory", async () => { - // First call: not cloned (needsReclone check) - make it return true - // isRepoCloned checks existsSync for .git dir - // existsSync calls: 1) needsReclone->isRepoCloned(.git), 2) repoPath exists, 3) isRepoCloned(.git) + it("force=true clones to temp dir then swaps", async () => { + // existsSync calls: + // 1) needsReclone -> isRepoCloned(.git) -> false (needs reclone) + // 2) existsSync(repoPath) for needsForceReclone -> true (repo exists) + // 3) existsSync(repoPath) before swap -> true (old checkout exists) mockExistsSync - .mockReturnValueOnce(false) // needsReclone -> isRepoCloned -> false -> needs clone - .mockReturnValueOnce(true) // existsSync(repoPath) for rmSync guard - .mockReturnValueOnce(false); // isRepoCloned -> not cloned after removal + .mockReturnValueOnce(false) // needsReclone -> isRepoCloned -> not at right version + .mockReturnValueOnce(true) // existsSync(repoPath) -> repo exists, so needsForceReclone=true + .mockReturnValueOnce(true); // existsSync(repoPath) before swap -> old checkout exists mockGitInstance.clone.mockResolvedValue(undefined); mockGitInstance.raw.mockResolvedValue(undefined); @@ -223,10 +226,110 @@ describe("cloneRepo", () => { mockGitInstance.checkout.mockResolvedValue(undefined); await cloneRepo(sparseConfig, true); + + // Clone goes to .tmp path + expect(mockGitInstance.clone).toHaveBeenCalledWith( + sparseConfig.url, + expect.stringContaining("aztec-packages.tmp"), + expect.any(Array) + ); + // On success: move old to .old backup, rename temp into place, delete backup + expect(mockRenameSync).toHaveBeenCalledWith( + expect.stringMatching(/aztec-packages$/), + expect.stringContaining("aztec-packages.old") + ); + expect(mockRenameSync).toHaveBeenCalledWith( + expect.stringContaining("aztec-packages.tmp"), + expect.stringMatching(/aztec-packages$/) + ); expect(mockRmSync).toHaveBeenCalledWith( - expect.stringContaining("aztec-packages"), + expect.stringContaining("aztec-packages.old"), + { recursive: true, force: true } + ); + }); + + it("clone failure preserves existing repo", async () => { + mockExistsSync + .mockReturnValueOnce(false) // needsReclone -> isRepoCloned -> needs reclone + .mockReturnValueOnce(true); // existsSync(repoPath) -> repo exists + + mockGitInstance.clone.mockRejectedValue(new Error("network error")); + + await expect(cloneRepo(sparseConfig, true)).rejects.toThrow("network error"); + + // Only the temp dir is cleaned up, not the original + expect(mockRmSync).toHaveBeenCalledWith( + expect.stringContaining("aztec-packages.tmp"), { recursive: true, force: true } ); + // Original repo not deleted, rename not called + expect(mockRmSync).not.toHaveBeenCalledWith( + expect.stringMatching(/aztec-packages$/), + expect.anything() + ); + expect(mockRenameSync).not.toHaveBeenCalled(); + }); + + it("stale temp dir is cleaned before clone", async () => { + mockExistsSync + .mockReturnValueOnce(false) // needsReclone -> isRepoCloned -> needs reclone + .mockReturnValueOnce(true) // existsSync(repoPath) -> repo exists + .mockReturnValueOnce(true); // existsSync(repoPath) before swap -> old checkout exists + + mockGitInstance.clone.mockResolvedValue(undefined); + mockGitInstance.raw.mockResolvedValue(undefined); + mockGitInstance.fetch.mockResolvedValue(undefined); + mockGitInstance.checkout.mockResolvedValue(undefined); + + await cloneRepo(sparseConfig, true); + + // First rmSync call cleans stale temp, before clone + const rmCalls = mockRmSync.mock.calls; + const staleTempCleanup = rmCalls.find( + (c) => typeof c[0] === "string" && c[0].endsWith(".tmp") + ); + expect(staleTempCleanup).toBeDefined(); + + // Clone still proceeds to .tmp + expect(mockGitInstance.clone).toHaveBeenCalledWith( + sparseConfig.url, + expect.stringContaining("aztec-packages.tmp"), + expect.any(Array) + ); + }); + + it("version mismatch uses safe re-clone via temp dir", async () => { + // Config with a tag that doesn't match what's cloned + const mismatchConfig: RepoConfig = { + name: "aztec-packages", + url: "https://github.com/AztecProtocol/aztec-packages", + tag: "v2.0.0", + sparse: ["docs"], + description: "test", + }; + + // needsReclone calls: isRepoCloned(.git), then getRepoTag which also calls isRepoCloned(.git) + mockExistsSync.mockReturnValueOnce(true); // needsReclone -> isRepoCloned -> true (cloned) + mockExistsSync.mockReturnValueOnce(true); // getRepoTag -> isRepoCloned -> true + mockGitInstance.raw.mockResolvedValueOnce("v1.0.0\n"); // getRepoTag -> v1.0.0 (mismatch!) + mockExistsSync.mockReturnValueOnce(true); // existsSync(repoPath) -> repo exists + mockExistsSync.mockReturnValueOnce(true); // existsSync(repoPath) before swap -> exists + + mockGitInstance.clone.mockResolvedValue(undefined); + mockGitInstance.raw.mockResolvedValue(undefined); + mockGitInstance.fetch.mockResolvedValue(undefined); + mockGitInstance.checkout.mockResolvedValue(undefined); + + await cloneRepo(mismatchConfig); + + // Should clone to .tmp path (safe re-clone, not destructive) + expect(mockGitInstance.clone).toHaveBeenCalledWith( + mismatchConfig.url, + expect.stringContaining("aztec-packages.tmp"), + expect.any(Array) + ); + // Should swap on success + expect(mockRenameSync).toHaveBeenCalled(); }); it("already cloned + version match skips update for tag-pinned repos", async () => { diff --git a/tests/utils/sync-metadata.test.ts b/tests/utils/sync-metadata.test.ts new file mode 100644 index 0000000..7a81690 --- /dev/null +++ b/tests/utils/sync-metadata.test.ts @@ -0,0 +1,392 @@ +import { describe, it, expect, vi, beforeEach } from "vitest"; + +const mockReadFileSync = vi.fn(); +const mockWriteFileSync = vi.fn(); +const mockExistsSync = vi.fn(); +const mockReaddirSync = vi.fn(); + +vi.mock("fs", () => ({ + readFileSync: (...args: any[]) => mockReadFileSync(...args), + writeFileSync: (...args: any[]) => mockWriteFileSync(...args), + existsSync: (...args: any[]) => mockExistsSync(...args), + readdirSync: (...args: any[]) => mockReaddirSync(...args), +})); + +vi.mock("../../src/utils/git.js", () => ({ + REPOS_DIR: "/fake/repos", +})); + +vi.mock("../../src/version.js", () => ({ + MCP_VERSION: "2.0.0", +})); + +import { + getMetadataPath, + writeSyncMetadata, + readSyncMetadata, + getSyncState, + writeAutoResyncAttempt, + stampMetadataMcpVersion, + invalidateSyncStateCache, +} from "../../src/utils/sync-metadata.js"; + +beforeEach(() => { + vi.clearAllMocks(); + invalidateSyncStateCache(); +}); + +describe("getMetadataPath", () => { + it("returns path in REPOS_DIR", () => { + expect(getMetadataPath()).toBe("/fake/repos/.sync-metadata.json"); + }); +}); + +describe("writeSyncMetadata", () => { + it("writes JSON with mcpVersion, syncedAt, and aztecVersion", () => { + writeSyncMetadata("v1.0.0"); + + expect(mockWriteFileSync).toHaveBeenCalledOnce(); + const [path, content] = mockWriteFileSync.mock.calls[0]; + expect(path).toBe("/fake/repos/.sync-metadata.json"); + + const parsed = JSON.parse(content); + expect(parsed.mcpVersion).toBe("2.0.0"); + expect(parsed.aztecVersion).toBe("v1.0.0"); + expect(parsed.syncedAt).toBeDefined(); + }); +}); + +describe("readSyncMetadata", () => { + it("returns parsed metadata when file exists", () => { + mockReadFileSync.mockReturnValue( + JSON.stringify({ + mcpVersion: "1.5.0", + syncedAt: "2025-01-01T00:00:00.000Z", + aztecVersion: "v1.0.0", + }) + ); + + const result = readSyncMetadata(); + expect(result).toEqual({ + mcpVersion: "1.5.0", + syncedAt: "2025-01-01T00:00:00.000Z", + aztecVersion: "v1.0.0", + }); + }); + + it("returns null when file does not exist", () => { + mockReadFileSync.mockImplementation(() => { + throw new Error("ENOENT"); + }); + + expect(readSyncMetadata()).toBeNull(); + }); + + it("returns null when file contains invalid JSON", () => { + mockReadFileSync.mockReturnValue("not json"); + expect(readSyncMetadata()).toBeNull(); + }); + + it("returns null when JSON lacks mcpVersion", () => { + mockReadFileSync.mockReturnValue(JSON.stringify({ foo: "bar" })); + expect(readSyncMetadata()).toBeNull(); + }); +}); + +describe("writeAutoResyncAttempt", () => { + it("preserves existing metadata and adds attempt with result", () => { + mockReadFileSync.mockReturnValue( + JSON.stringify({ + mcpVersion: "1.5.0", + syncedAt: "2025-01-01T00:00:00.000Z", + aztecVersion: "v1.0.0", + }) + ); + + writeAutoResyncAttempt("hard_failure"); + + expect(mockWriteFileSync).toHaveBeenCalledOnce(); + const parsed = JSON.parse(mockWriteFileSync.mock.calls[0][1]); + expect(parsed.mcpVersion).toBe("1.5.0"); + expect(parsed.aztecVersion).toBe("v1.0.0"); + expect(parsed.autoResyncAttempt).toEqual({ + targetMcpVersion: "2.0.0", + attemptedAt: expect.any(String), + result: "hard_failure", + }); + }); + + it("creates minimal metadata when no file exists", () => { + mockReadFileSync.mockImplementation(() => { + throw new Error("ENOENT"); + }); + + writeAutoResyncAttempt("retryable"); + + expect(mockWriteFileSync).toHaveBeenCalledOnce(); + const parsed = JSON.parse(mockWriteFileSync.mock.calls[0][1]); + expect(parsed.mcpVersion).toBe("unknown"); + expect(parsed.aztecVersion).toBe(""); + expect(parsed.autoResyncAttempt.targetMcpVersion).toBe("2.0.0"); + expect(parsed.autoResyncAttempt.result).toBe("retryable"); + }); + + it("stores undefined result when no argument passed", () => { + mockReadFileSync.mockImplementation(() => { + throw new Error("ENOENT"); + }); + + writeAutoResyncAttempt(); + + const parsed = JSON.parse(mockWriteFileSync.mock.calls[0][1]); + expect(parsed.autoResyncAttempt.result).toBeUndefined(); + }); +}); + +describe("stampMetadataMcpVersion", () => { + it("updates mcpVersion in existing metadata and clears autoResyncAttempt", () => { + mockReadFileSync.mockReturnValue( + JSON.stringify({ + mcpVersion: "1.5.0", + syncedAt: "2025-01-01T00:00:00.000Z", + aztecVersion: "v1.0.0", + autoResyncAttempt: { + targetMcpVersion: "2.0.0", + attemptedAt: "2025-06-01T00:00:00.000Z", + result: "retryable", + }, + }) + ); + + stampMetadataMcpVersion("v1.0.0"); + + const parsed = JSON.parse(mockWriteFileSync.mock.calls[0][1]); + expect(parsed.mcpVersion).toBe("2.0.0"); + expect(parsed.aztecVersion).toBe("v1.0.0"); + expect(parsed.syncedAt).toBe("2025-01-01T00:00:00.000Z"); + expect(parsed.autoResyncAttempt).toBeUndefined(); + }); + + it("creates minimal metadata when no file exists", () => { + mockReadFileSync.mockImplementation(() => { + throw new Error("ENOENT"); + }); + + stampMetadataMcpVersion("v2.0.0"); + + const parsed = JSON.parse(mockWriteFileSync.mock.calls[0][1]); + expect(parsed.mcpVersion).toBe("2.0.0"); + expect(parsed.aztecVersion).toBe("v2.0.0"); + expect(parsed.syncedAt).toBeDefined(); + }); + + it("fills in empty aztecVersion from argument", () => { + mockReadFileSync.mockReturnValue( + JSON.stringify({ + mcpVersion: "unknown", + syncedAt: "", + aztecVersion: "", + }) + ); + + stampMetadataMcpVersion("v3.0.0"); + + const parsed = JSON.parse(mockWriteFileSync.mock.calls[0][1]); + expect(parsed.mcpVersion).toBe("2.0.0"); + expect(parsed.aztecVersion).toBe("v3.0.0"); + }); + + it("preserves existing aztecVersion when already set", () => { + mockReadFileSync.mockReturnValue( + JSON.stringify({ + mcpVersion: "1.0.0", + syncedAt: "2025-01-01T00:00:00.000Z", + aztecVersion: "v1.0.0", + }) + ); + + stampMetadataMcpVersion("v2.0.0"); + + const parsed = JSON.parse(mockWriteFileSync.mock.calls[0][1]); + expect(parsed.aztecVersion).toBe("v1.0.0"); + }); +}); + +describe("getSyncState", () => { + it("returns noRepos when no metadata and no repos dir", () => { + mockReadFileSync.mockImplementation(() => { + throw new Error("ENOENT"); + }); + mockExistsSync.mockReturnValue(false); + + expect(getSyncState()).toEqual({ kind: "noRepos" }); + }); + + it("returns legacyUnknownVersion when no metadata but repos dir has cloned repos", () => { + mockReadFileSync.mockImplementation(() => { + throw new Error("ENOENT"); + }); + mockExistsSync.mockReturnValue(true); + mockReaddirSync.mockReturnValue(["aztec-packages", "noir"]); + + expect(getSyncState()).toEqual({ kind: "legacyUnknownVersion" }); + }); + + it("returns noRepos when repos dir exists but is empty (failed initial sync)", () => { + mockReadFileSync.mockImplementation(() => { + throw new Error("ENOENT"); + }); + mockExistsSync.mockReturnValue(true); + mockReaddirSync.mockReturnValue([]); + + expect(getSyncState()).toEqual({ kind: "noRepos" }); + }); + + it("returns noRepos when repos dir has only hidden files", () => { + mockReadFileSync.mockImplementation(() => { + throw new Error("ENOENT"); + }); + mockExistsSync.mockReturnValue(true); + mockReaddirSync.mockReturnValue([".sync-metadata.json"]); + + expect(getSyncState()).toEqual({ kind: "noRepos" }); + }); + + it("returns upToDate when version matches", () => { + mockReadFileSync.mockReturnValue( + JSON.stringify({ + mcpVersion: "2.0.0", + syncedAt: "2025-01-01T00:00:00.000Z", + aztecVersion: "v1.0.0", + }) + ); + + expect(getSyncState()).toEqual({ kind: "upToDate" }); + }); + + it("returns needsAutoResync when version mismatches", () => { + mockReadFileSync.mockReturnValue( + JSON.stringify({ + mcpVersion: "1.5.0", + syncedAt: "2025-01-01T00:00:00.000Z", + aztecVersion: "v1.0.0", + }) + ); + + expect(getSyncState()).toEqual({ kind: "needsAutoResync", aztecVersion: "v1.0.0" }); + }); + + it("returns upToDate when hard_failure attempt for this version", () => { + mockReadFileSync.mockReturnValue( + JSON.stringify({ + mcpVersion: "1.5.0", + syncedAt: "2025-01-01T00:00:00.000Z", + aztecVersion: "v1.0.0", + autoResyncAttempt: { + targetMcpVersion: "2.0.0", + attemptedAt: "2025-06-01T00:00:00.000Z", + result: "hard_failure", + }, + }) + ); + + expect(getSyncState()).toEqual({ kind: "upToDate" }); + }); + + it("returns upToDate when old metadata lacks result field (backwards compat)", () => { + mockReadFileSync.mockReturnValue( + JSON.stringify({ + mcpVersion: "1.5.0", + syncedAt: "2025-01-01T00:00:00.000Z", + aztecVersion: "v1.0.0", + autoResyncAttempt: { + targetMcpVersion: "2.0.0", + attemptedAt: "2025-06-01T00:00:00.000Z", + }, + }) + ); + + expect(getSyncState()).toEqual({ kind: "upToDate" }); + }); + + it("returns upToDate when deferred attempt for this version", () => { + mockReadFileSync.mockReturnValue( + JSON.stringify({ + mcpVersion: "1.5.0", + syncedAt: "2025-01-01T00:00:00.000Z", + aztecVersion: "v1.0.0", + autoResyncAttempt: { + targetMcpVersion: "2.0.0", + attemptedAt: "2025-06-01T00:00:00.000Z", + result: "deferred", + }, + }) + ); + + expect(getSyncState()).toEqual({ kind: "upToDate" }); + }); + + it("returns upToDate when retryable attempt within 30 minutes", () => { + const recentTime = new Date(Date.now() - 10 * 60 * 1000).toISOString(); // 10 min ago + mockReadFileSync.mockReturnValue( + JSON.stringify({ + mcpVersion: "1.5.0", + syncedAt: "2025-01-01T00:00:00.000Z", + aztecVersion: "v1.0.0", + autoResyncAttempt: { + targetMcpVersion: "2.0.0", + attemptedAt: recentTime, + result: "retryable", + }, + }) + ); + + expect(getSyncState()).toEqual({ kind: "upToDate" }); + }); + + it("returns needsAutoResync when retryable attempt after 30+ minutes", () => { + const oldTime = new Date(Date.now() - 31 * 60 * 1000).toISOString(); // 31 min ago + mockReadFileSync.mockReturnValue( + JSON.stringify({ + mcpVersion: "1.5.0", + syncedAt: "2025-01-01T00:00:00.000Z", + aztecVersion: "v1.0.0", + autoResyncAttempt: { + targetMcpVersion: "2.0.0", + attemptedAt: oldTime, + result: "retryable", + }, + }) + ); + + expect(getSyncState()).toEqual({ kind: "needsAutoResync", aztecVersion: "v1.0.0" }); + }); + + it("returns needsAutoResync when attempt was for a different version", () => { + mockReadFileSync.mockReturnValue( + JSON.stringify({ + mcpVersion: "1.0.0", + syncedAt: "2025-01-01T00:00:00.000Z", + aztecVersion: "v1.0.0", + autoResyncAttempt: { + targetMcpVersion: "1.5.0", + attemptedAt: "2025-06-01T00:00:00.000Z", + }, + }) + ); + + expect(getSyncState()).toEqual({ kind: "needsAutoResync", aztecVersion: "v1.0.0" }); + }); + + it("returns legacyUnknownVersion when metadata has empty aztecVersion", () => { + mockReadFileSync.mockReturnValue( + JSON.stringify({ + mcpVersion: "unknown", + syncedAt: "", + aztecVersion: "", + }) + ); + + expect(getSyncState()).toEqual({ kind: "legacyUnknownVersion" }); + }); +}); diff --git a/tests/version.test.ts b/tests/version.test.ts new file mode 100644 index 0000000..1497394 --- /dev/null +++ b/tests/version.test.ts @@ -0,0 +1,15 @@ +import { describe, it, expect } from "vitest"; +import { MCP_VERSION } from "../src/version.js"; + +describe("MCP_VERSION", () => { + it("is a valid semver string", () => { + expect(MCP_VERSION).toMatch(/^\d+\.\d+\.\d+/); + }); + + it("matches package.json version", async () => { + const { createRequire } = await import("module"); + const require = createRequire(import.meta.url); + const pkg = require("../package.json"); + expect(MCP_VERSION).toBe(pkg.version); + }); +});