diff --git a/packages/cli/src/ui/hooks/useAtCompletion.test.ts b/packages/cli/src/ui/hooks/useAtCompletion.test.ts index 27e779acef0..582e8f1fdb4 100644 --- a/packages/cli/src/ui/hooks/useAtCompletion.test.ts +++ b/packages/cli/src/ui/hooks/useAtCompletion.test.ts @@ -7,6 +7,8 @@ import { describe, it, expect, beforeEach, vi, afterEach } from 'vitest'; import { act, useState } from 'react'; import * as path from 'node:path'; +import * as fs from 'node:fs'; +import * as crawlCache from '../../../../core/src/utils/filesearch/crawlCache.js'; import { renderHook } from '../../test-utils/render.js'; import { waitFor } from '../../test-utils/async.js'; import { useAtCompletion } from './useAtCompletion.js'; @@ -69,7 +71,9 @@ describe('useAtCompletion', () => { if (testRootDir) { await cleanupTmpDir(testRootDir); } + crawlCache.clear(); vi.restoreAllMocks(); + vi.useRealTimers(); }); describe('File Search Logic', () => { @@ -186,6 +190,49 @@ describe('useAtCompletion', () => { ]); }); }); + + it('should pick up newly created files when re-triggered (@ menu re-opened)', async () => { + vi.useFakeTimers(); + const structure: FileSystemStructure = { 'initial.txt': '' }; + testRootDir = await createTmpDir(structure); + + const { result, rerender } = await renderHook( + ({ enabled }) => + useTestHarnessForAtCompletion(enabled, '', mockConfig, testRootDir), + { initialProps: { enabled: true } }, + ); + + // Wait for initial suggestions + await waitFor(() => { + expect(result.current.suggestions.map((s) => s.value)).toContain( + 'initial.txt', + ); + }); + + // Add a new file to the disk + const newFile = path.join(testRootDir, 'newfile.txt'); + await fs.promises.writeFile(newFile, ''); + + // Advance timers by >5s to expire the crawler cache + act(() => { + vi.advanceTimersByTime(5100); + }); + + // Toggle enabled to re-trigger initialize (close and open @ menu) + act(() => { + rerender({ enabled: false }); + }); + act(() => { + rerender({ enabled: true }); + }); + + // Wait for the new file to appear in suggestions + await waitFor(() => { + expect(result.current.suggestions.map((s) => s.value)).toContain( + 'newfile.txt', + ); + }); + }); }); describe('MCP resource suggestions', () => { diff --git a/packages/cli/src/ui/hooks/useAtCompletion.ts b/packages/cli/src/ui/hooks/useAtCompletion.ts index 4a7b9ebc130..3ad77e0aabd 100644 --- a/packages/cli/src/ui/hooks/useAtCompletion.ts +++ b/packages/cli/src/ui/hooks/useAtCompletion.ts @@ -284,27 +284,34 @@ export function useAtCompletion(props: UseAtCompletionProps): void { const initPromises: Array> = []; for (const dir of directories) { - if (fileSearchMap.current.has(dir)) continue; - - const searcher = FileSearchFactory.create({ - projectRoot: dir, - ignoreDirs: [], - fileDiscoveryService: new FileDiscoveryService( - dir, - config?.getFileFilteringOptions(), - ), - cache: true, - cacheTtl: 30, - enableRecursiveFileSearch: - config?.getEnableRecursiveFileSearch() ?? true, - enableFuzzySearch: - config?.getFileFilteringEnableFuzzySearch() ?? true, - maxFiles: config?.getFileFilteringOptions()?.maxFileCount, - }); + let searcher = fileSearchMap.current.get(dir); + if (!searcher) { + searcher = FileSearchFactory.create({ + projectRoot: dir, + ignoreDirs: [], + fileDiscoveryService: new FileDiscoveryService( + dir, + config?.getFileFilteringOptions(), + ), + cache: true, + // use a short 5s TTL for the @ menu to ensure newly created files + // (by the agent or user) are discovered quickly, while still + // protecting against redundant crawls in very large repositories. + cacheTtl: 5, + enableRecursiveFileSearch: + config?.getEnableRecursiveFileSearch() ?? true, + enableFuzzySearch: + config?.getFileFilteringEnableFuzzySearch() ?? true, + maxFiles: config?.getFileFilteringOptions()?.maxFileCount, + }); + fileSearchMap.current.set(dir, searcher); + } initPromises.push( searcher.initialize().then(() => { if (initEpoch.current === currentEpoch) { + // Ensure we still have the latest version in the map + // (though set() above already did it, this handles potential concurrent resets) fileSearchMap.current.set(dir, searcher); } }), diff --git a/packages/core/src/utils/filesearch/fileSearch.test.ts b/packages/core/src/utils/filesearch/fileSearch.test.ts index 33906fcb0a2..14dfa16f8ca 100644 --- a/packages/core/src/utils/filesearch/fileSearch.test.ts +++ b/packages/core/src/utils/filesearch/fileSearch.test.ts @@ -799,6 +799,81 @@ describe('FileSearch', () => { ]); }); + it('should skip re-building the Fzf index if files have not changed (referential equality)', async () => { + tmpDir = await createTmpDir({ + 'file1.js': '', + }); + + const fileSearch = FileSearchFactory.create({ + projectRoot: tmpDir, + fileDiscoveryService: new FileDiscoveryService(tmpDir, { + respectGitIgnore: false, + respectGeminiIgnore: false, + }), + ignoreDirs: [], + cache: true, + cacheTtl: 10000, + enableRecursiveFileSearch: true, + enableFuzzySearch: true, + // eslint-disable-next-line @typescript-eslint/no-explicit-any + }) as any; + + await fileSearch.initialize(); + const firstFzf = fileSearch.fzf; + const firstResultCache = fileSearch.resultCache; + + expect(firstFzf).toBeDefined(); + expect(firstResultCache).toBeDefined(); + + // Mock crawl to return the exact same array instance + const crawlSpy = vi + .spyOn(crawler, 'crawl') + .mockResolvedValue(fileSearch.allFiles); + + await fileSearch.initialize(); + + expect(fileSearch.resultCache).toBe(firstResultCache); + expect(crawlSpy).toHaveBeenCalled(); + }); + + it('should prevent concurrent initialization calls', async () => { + tmpDir = await createTmpDir({}); + const fileSearch = FileSearchFactory.create({ + projectRoot: tmpDir, + fileDiscoveryService: new FileDiscoveryService(tmpDir, {}), + ignoreDirs: [], + cache: true, + cacheTtl: 10000, + enableRecursiveFileSearch: true, + enableFuzzySearch: true, + }); + + let crawlResolve: (value: string[]) => void; + const crawlPromise = new Promise((resolve) => { + crawlResolve = resolve; + }); + + const crawlSpy = vi.spyOn(crawler, 'crawl').mockReturnValue(crawlPromise); + + // Start first initialization + const init1 = fileSearch.initialize(); + expect(crawlSpy).toHaveBeenCalledTimes(1); + + // Start second initialization while first is still "Initializing" + const init2 = fileSearch.initialize(); + + // Should NOT have called crawl again + expect(crawlSpy).toHaveBeenCalledTimes(1); + + // Resolve the first one + crawlResolve!(['file1.js']); + await Promise.all([init1, init2]); + + expect(crawlSpy).toHaveBeenCalledTimes(1); + // eslint-disable-next-line @typescript-eslint/no-explicit-any + expect((fileSearch as any).allFiles).toEqual(['file1.js']); + }); + describe('DirectoryFileSearch', () => { it('should search for files in the current directory', async () => { tmpDir = await createTmpDir({ diff --git a/packages/core/src/utils/filesearch/fileSearch.ts b/packages/core/src/utils/filesearch/fileSearch.ts index e3f608e5088..86e55c4cfa7 100644 --- a/packages/core/src/utils/filesearch/fileSearch.ts +++ b/packages/core/src/utils/filesearch/fileSearch.ts @@ -63,6 +63,12 @@ export interface FileSearchOptions { maxFiles?: number; } +enum InitializationState { + Uninitialized, + Initializing, + Initialized, +} + export class AbortError extends Error { constructor(message = 'Search aborted') { super(message); @@ -133,26 +139,62 @@ class RecursiveFileSearch implements FileSearch { private resultCache: ResultCache | undefined; private allFiles: string[] = []; private fzf: AsyncFzf | undefined; + private initializationState = InitializationState.Uninitialized; + private initPromise: Promise | null = null; constructor(private readonly options: FileSearchOptions) {} async initialize(): Promise { - this.ignore = loadIgnoreRules( - this.options.fileDiscoveryService, - this.options.ignoreDirs, - ); + if (this.initializationState === InitializationState.Initializing) { + return this.initPromise || Promise.resolve(); + } - this.allFiles = await crawl({ - crawlDirectory: this.options.projectRoot, - cwd: this.options.projectRoot, - ignore: this.ignore, - cache: this.options.cache, - cacheTtl: this.options.cacheTtl, - maxDepth: this.options.maxDepth, - maxFiles: this.options.maxFiles ?? 20000, - }); + this.initPromise = (async () => { + const prevState = this.initializationState; + this.initializationState = InitializationState.Initializing; + + try { + const nextIgnore = loadIgnoreRules( + this.options.fileDiscoveryService, + this.options.ignoreDirs, + ); + + const nextFiles = await crawl({ + crawlDirectory: this.options.projectRoot, + cwd: this.options.projectRoot, + ignore: nextIgnore, + cache: this.options.cache, + cacheTtl: this.options.cacheTtl, + maxDepth: this.options.maxDepth, + maxFiles: this.options.maxFiles ?? 20000, + }); + + if ( + nextFiles === this.allFiles && + this.ignore && + prevState === InitializationState.Initialized + ) { + this.initializationState = InitializationState.Initialized; + // optimization: if the file list is referentially equal (from crawl cache) + // and we already have ignore rules, skip rebuilding the FZF index. + return; + } + + this.ignore = nextIgnore; + this.allFiles = nextFiles; + this.buildResultCache(); + this.initializationState = InitializationState.Initialized; + } catch (e) { + this.initializationState = prevState; + throw e; + } + })(); - this.buildResultCache(); + try { + await this.initPromise; + } finally { + this.initPromise = null; + } } async search(