feat(core, cli): Introduce high-performance FileSearch engine (#5136)
Co-authored-by: Jacob Richman <jacob314@gmail.com>
This commit is contained in:
parent
2141b39c3d
commit
12a9bc3ed9
|
@ -932,6 +932,10 @@
|
|||
"resolved": "packages/core",
|
||||
"link": true
|
||||
},
|
||||
"node_modules/@google/gemini-cli-test-utils": {
|
||||
"resolved": "packages/test-utils",
|
||||
"link": true
|
||||
},
|
||||
"node_modules/@grpc/grpc-js": {
|
||||
"version": "1.13.4",
|
||||
"resolved": "https://registry.npmjs.org/@grpc/grpc-js/-/grpc-js-1.13.4.tgz",
|
||||
|
@ -2401,6 +2405,13 @@
|
|||
"integrity": "sha512-37i+OaWTh9qeK4LSHPsyRC7NahnGotNuZvjLSgcPzblpHB3rrCJxAOgI5gCdKm7coonsaX1Of0ILiTcnZjbfxA==",
|
||||
"license": "MIT"
|
||||
},
|
||||
"node_modules/@types/picomatch": {
|
||||
"version": "4.0.1",
|
||||
"resolved": "https://registry.npmjs.org/@types/picomatch/-/picomatch-4.0.1.tgz",
|
||||
"integrity": "sha512-dLqxmi5VJRC9XTvc/oaTtk+bDb4RRqxLZPZ3jIpYBHEnDXX8lu02w2yWI6NsPPsELuVK298Z2iR8jgoWKRdUVQ==",
|
||||
"dev": true,
|
||||
"license": "MIT"
|
||||
},
|
||||
"node_modules/@types/qs": {
|
||||
"version": "6.14.0",
|
||||
"resolved": "https://registry.npmjs.org/@types/qs/-/qs-6.14.0.tgz",
|
||||
|
@ -11694,6 +11705,7 @@
|
|||
},
|
||||
"devDependencies": {
|
||||
"@babel/runtime": "^7.27.6",
|
||||
"@google/gemini-cli-test-utils": "file:../test-utils",
|
||||
"@testing-library/react": "^16.3.0",
|
||||
"@types/command-exists": "^1.2.3",
|
||||
"@types/diff": "^7.0.2",
|
||||
|
@ -11876,6 +11888,7 @@
|
|||
"chardet": "^2.1.0",
|
||||
"diff": "^7.0.0",
|
||||
"dotenv": "^17.1.0",
|
||||
"fdir": "^6.4.6",
|
||||
"glob": "^10.4.5",
|
||||
"google-auth-library": "^9.11.0",
|
||||
"html-to-text": "^9.0.5",
|
||||
|
@ -11884,6 +11897,7 @@
|
|||
"marked": "^15.0.12",
|
||||
"micromatch": "^4.0.8",
|
||||
"open": "^10.1.2",
|
||||
"picomatch": "^4.0.1",
|
||||
"shell-quote": "^1.8.3",
|
||||
"simple-git": "^3.28.0",
|
||||
"strip-ansi": "^7.1.0",
|
||||
|
@ -11891,10 +11905,12 @@
|
|||
"ws": "^8.18.0"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@google/gemini-cli-test-utils": "file:../test-utils",
|
||||
"@types/diff": "^7.0.2",
|
||||
"@types/dotenv": "^6.1.1",
|
||||
"@types/micromatch": "^4.0.8",
|
||||
"@types/minimatch": "^5.1.2",
|
||||
"@types/picomatch": "^4.0.1",
|
||||
"@types/ws": "^8.5.10",
|
||||
"typescript": "^5.3.3",
|
||||
"vitest": "^3.1.1"
|
||||
|
@ -11940,6 +11956,20 @@
|
|||
"url": "https://github.com/sponsors/epoberezkin"
|
||||
}
|
||||
},
|
||||
"packages/core/node_modules/fdir": {
|
||||
"version": "6.4.6",
|
||||
"resolved": "https://registry.npmjs.org/fdir/-/fdir-6.4.6.tgz",
|
||||
"integrity": "sha512-hiFoqpyZcfNm1yc4u8oWCf9A2c4D3QjCrks3zmoVKVxpQRzmPNar1hUJcBG2RQHvEVGDN+Jm81ZheVLAQMK6+w==",
|
||||
"license": "MIT",
|
||||
"peerDependencies": {
|
||||
"picomatch": "^3 || ^4"
|
||||
},
|
||||
"peerDependenciesMeta": {
|
||||
"picomatch": {
|
||||
"optional": true
|
||||
}
|
||||
}
|
||||
},
|
||||
"packages/core/node_modules/ignore": {
|
||||
"version": "7.0.5",
|
||||
"license": "MIT",
|
||||
|
@ -11953,6 +11983,29 @@
|
|||
"integrity": "sha512-NM8/P9n3XjXhIZn1lLhkFaACTOURQXjWhV4BA/RnOv8xvgqtqpAX9IO4mRQxSx1Rlo4tqzeqb0sOlruaOy3dug==",
|
||||
"license": "MIT"
|
||||
},
|
||||
"packages/core/node_modules/picomatch": {
|
||||
"version": "4.0.3",
|
||||
"resolved": "https://registry.npmjs.org/picomatch/-/picomatch-4.0.3.tgz",
|
||||
"integrity": "sha512-5gTmgEY/sqK6gFXLIsQNH19lWb4ebPDLA4SdLP7dsWkIXHWlG66oPuVvXSGFPppYZz8ZDZq0dYYrbHfBCVUb1Q==",
|
||||
"license": "MIT",
|
||||
"engines": {
|
||||
"node": ">=12"
|
||||
},
|
||||
"funding": {
|
||||
"url": "https://github.com/sponsors/jonschlinkert"
|
||||
}
|
||||
},
|
||||
"packages/test-utils": {
|
||||
"name": "@google/gemini-cli-test-utils",
|
||||
"version": "0.1.0",
|
||||
"license": "Apache-2.0",
|
||||
"devDependencies": {
|
||||
"typescript": "^5.3.3"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=20"
|
||||
}
|
||||
},
|
||||
"packages/vscode-ide-companion": {
|
||||
"name": "gemini-cli-vscode-ide-companion",
|
||||
"version": "0.0.1",
|
||||
|
|
|
@ -73,7 +73,8 @@
|
|||
"pretty-format": "^30.0.2",
|
||||
"react-dom": "^19.1.0",
|
||||
"typescript": "^5.3.3",
|
||||
"vitest": "^3.1.1"
|
||||
"vitest": "^3.1.1",
|
||||
"@google/gemini-cli-test-utils": "file:../test-utils"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=20"
|
||||
|
|
|
@ -0,0 +1,380 @@
|
|||
/**
|
||||
* @license
|
||||
* Copyright 2025 Google LLC
|
||||
* SPDX-License-Identifier: Apache-2.0
|
||||
*/
|
||||
|
||||
/** @vitest-environment jsdom */
|
||||
|
||||
import { describe, it, expect, beforeEach, vi, afterEach } from 'vitest';
|
||||
import { renderHook, waitFor, act } from '@testing-library/react';
|
||||
import { useAtCompletion } from './useAtCompletion.js';
|
||||
import { Config, FileSearch } from '@google/gemini-cli-core';
|
||||
import {
|
||||
createTmpDir,
|
||||
cleanupTmpDir,
|
||||
FileSystemStructure,
|
||||
} from '@google/gemini-cli-test-utils';
|
||||
import { useState } from 'react';
|
||||
import { Suggestion } from '../components/SuggestionsDisplay.js';
|
||||
|
||||
// Test harness to capture the state from the hook's callbacks.
|
||||
function useTestHarnessForAtCompletion(
|
||||
enabled: boolean,
|
||||
pattern: string,
|
||||
config: Config | undefined,
|
||||
cwd: string,
|
||||
) {
|
||||
const [suggestions, setSuggestions] = useState<Suggestion[]>([]);
|
||||
const [isLoadingSuggestions, setIsLoadingSuggestions] = useState(false);
|
||||
|
||||
useAtCompletion({
|
||||
enabled,
|
||||
pattern,
|
||||
config,
|
||||
cwd,
|
||||
setSuggestions,
|
||||
setIsLoadingSuggestions,
|
||||
});
|
||||
|
||||
return { suggestions, isLoadingSuggestions };
|
||||
}
|
||||
|
||||
describe('useAtCompletion', () => {
|
||||
let testRootDir: string;
|
||||
let mockConfig: Config;
|
||||
|
||||
beforeEach(() => {
|
||||
mockConfig = {
|
||||
getFileFilteringOptions: vi.fn(() => ({
|
||||
respectGitIgnore: true,
|
||||
respectGeminiIgnore: true,
|
||||
})),
|
||||
} as unknown as Config;
|
||||
vi.clearAllMocks();
|
||||
});
|
||||
|
||||
afterEach(async () => {
|
||||
if (testRootDir) {
|
||||
await cleanupTmpDir(testRootDir);
|
||||
}
|
||||
vi.restoreAllMocks();
|
||||
});
|
||||
|
||||
describe('File Search Logic', () => {
|
||||
it('should perform a recursive search for an empty pattern', async () => {
|
||||
const structure: FileSystemStructure = {
|
||||
'file.txt': '',
|
||||
src: {
|
||||
'index.js': '',
|
||||
components: ['Button.tsx', 'Button with spaces.tsx'],
|
||||
},
|
||||
};
|
||||
testRootDir = await createTmpDir(structure);
|
||||
|
||||
const { result } = renderHook(() =>
|
||||
useTestHarnessForAtCompletion(true, '', mockConfig, testRootDir),
|
||||
);
|
||||
|
||||
await waitFor(() => {
|
||||
expect(result.current.suggestions.length).toBeGreaterThan(0);
|
||||
});
|
||||
|
||||
expect(result.current.suggestions.map((s) => s.value)).toEqual([
|
||||
'src/',
|
||||
'src/components/',
|
||||
'file.txt',
|
||||
'src/components/Button\\ with\\ spaces.tsx',
|
||||
'src/components/Button.tsx',
|
||||
'src/index.js',
|
||||
]);
|
||||
});
|
||||
|
||||
it('should correctly filter the recursive list based on a pattern', async () => {
|
||||
const structure: FileSystemStructure = {
|
||||
'file.txt': '',
|
||||
src: {
|
||||
'index.js': '',
|
||||
components: {
|
||||
'Button.tsx': '',
|
||||
},
|
||||
},
|
||||
};
|
||||
testRootDir = await createTmpDir(structure);
|
||||
|
||||
const { result } = renderHook(() =>
|
||||
useTestHarnessForAtCompletion(true, 'src/', mockConfig, testRootDir),
|
||||
);
|
||||
|
||||
await waitFor(() => {
|
||||
expect(result.current.suggestions.length).toBeGreaterThan(0);
|
||||
});
|
||||
|
||||
expect(result.current.suggestions.map((s) => s.value)).toEqual([
|
||||
'src/',
|
||||
'src/components/',
|
||||
'src/components/Button.tsx',
|
||||
'src/index.js',
|
||||
]);
|
||||
});
|
||||
|
||||
it('should append a trailing slash to directory paths in suggestions', async () => {
|
||||
const structure: FileSystemStructure = {
|
||||
'file.txt': '',
|
||||
dir: {},
|
||||
};
|
||||
testRootDir = await createTmpDir(structure);
|
||||
|
||||
const { result } = renderHook(() =>
|
||||
useTestHarnessForAtCompletion(true, '', mockConfig, testRootDir),
|
||||
);
|
||||
|
||||
await waitFor(() => {
|
||||
expect(result.current.suggestions.length).toBeGreaterThan(0);
|
||||
});
|
||||
|
||||
expect(result.current.suggestions.map((s) => s.value)).toEqual([
|
||||
'dir/',
|
||||
'file.txt',
|
||||
]);
|
||||
});
|
||||
});
|
||||
|
||||
describe('UI State and Loading Behavior', () => {
|
||||
it('should be in a loading state during initial file system crawl', async () => {
|
||||
testRootDir = await createTmpDir({});
|
||||
const { result } = renderHook(() =>
|
||||
useTestHarnessForAtCompletion(true, '', mockConfig, testRootDir),
|
||||
);
|
||||
|
||||
// It's initially true because the effect runs synchronously.
|
||||
expect(result.current.isLoadingSuggestions).toBe(true);
|
||||
|
||||
// Wait for the loading to complete.
|
||||
await waitFor(() => {
|
||||
expect(result.current.isLoadingSuggestions).toBe(false);
|
||||
});
|
||||
});
|
||||
|
||||
it('should NOT show a loading indicator for subsequent searches that complete under 100ms', async () => {
|
||||
const structure: FileSystemStructure = { 'a.txt': '', 'b.txt': '' };
|
||||
testRootDir = await createTmpDir(structure);
|
||||
|
||||
const { result, rerender } = renderHook(
|
||||
({ pattern }) =>
|
||||
useTestHarnessForAtCompletion(true, pattern, mockConfig, testRootDir),
|
||||
{ initialProps: { pattern: 'a' } },
|
||||
);
|
||||
|
||||
await waitFor(() => {
|
||||
expect(result.current.suggestions.map((s) => s.value)).toEqual([
|
||||
'a.txt',
|
||||
]);
|
||||
});
|
||||
expect(result.current.isLoadingSuggestions).toBe(false);
|
||||
|
||||
rerender({ pattern: 'b' });
|
||||
|
||||
// Wait for the final result
|
||||
await waitFor(() => {
|
||||
expect(result.current.suggestions.map((s) => s.value)).toEqual([
|
||||
'b.txt',
|
||||
]);
|
||||
});
|
||||
|
||||
expect(result.current.isLoadingSuggestions).toBe(false);
|
||||
});
|
||||
|
||||
it('should show a loading indicator and clear old suggestions for subsequent searches that take longer than 100ms', async () => {
|
||||
const structure: FileSystemStructure = { 'a.txt': '', 'b.txt': '' };
|
||||
testRootDir = await createTmpDir(structure);
|
||||
|
||||
// Spy on the search method to introduce an artificial delay
|
||||
const originalSearch = FileSearch.prototype.search;
|
||||
vi.spyOn(FileSearch.prototype, 'search').mockImplementation(
|
||||
async function (...args) {
|
||||
await new Promise((resolve) => setTimeout(resolve, 200));
|
||||
return originalSearch.apply(this, args);
|
||||
},
|
||||
);
|
||||
|
||||
const { result, rerender } = renderHook(
|
||||
({ pattern }) =>
|
||||
useTestHarnessForAtCompletion(true, pattern, mockConfig, testRootDir),
|
||||
{ initialProps: { pattern: 'a' } },
|
||||
);
|
||||
|
||||
// Wait for the initial (slow) search to complete
|
||||
await waitFor(() => {
|
||||
expect(result.current.suggestions.map((s) => s.value)).toEqual([
|
||||
'a.txt',
|
||||
]);
|
||||
});
|
||||
|
||||
// Now, rerender to trigger the second search
|
||||
rerender({ pattern: 'b' });
|
||||
|
||||
// Wait for the loading indicator to appear
|
||||
await waitFor(() => {
|
||||
expect(result.current.isLoadingSuggestions).toBe(true);
|
||||
});
|
||||
|
||||
// Suggestions should be cleared while loading
|
||||
expect(result.current.suggestions).toEqual([]);
|
||||
|
||||
// Wait for the final (slow) search to complete
|
||||
await waitFor(
|
||||
() => {
|
||||
expect(result.current.suggestions.map((s) => s.value)).toEqual([
|
||||
'b.txt',
|
||||
]);
|
||||
},
|
||||
{ timeout: 1000 },
|
||||
); // Increase timeout for the slow search
|
||||
|
||||
expect(result.current.isLoadingSuggestions).toBe(false);
|
||||
});
|
||||
|
||||
it('should abort the previous search when a new one starts', async () => {
|
||||
const structure: FileSystemStructure = { 'a.txt': '', 'b.txt': '' };
|
||||
testRootDir = await createTmpDir(structure);
|
||||
|
||||
const abortSpy = vi.spyOn(AbortController.prototype, 'abort');
|
||||
const searchSpy = vi
|
||||
.spyOn(FileSearch.prototype, 'search')
|
||||
.mockImplementation(async (...args) => {
|
||||
const delay = args[0] === 'a' ? 500 : 50;
|
||||
await new Promise((resolve) => setTimeout(resolve, delay));
|
||||
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
||||
return [args[0] as any];
|
||||
});
|
||||
|
||||
const { result, rerender } = renderHook(
|
||||
({ pattern }) =>
|
||||
useTestHarnessForAtCompletion(true, pattern, mockConfig, testRootDir),
|
||||
{ initialProps: { pattern: 'a' } },
|
||||
);
|
||||
|
||||
// Wait for the hook to be ready (initialization is complete)
|
||||
await waitFor(() => {
|
||||
expect(searchSpy).toHaveBeenCalledWith('a', expect.any(Object));
|
||||
});
|
||||
|
||||
// Now that the first search is in-flight, trigger the second one.
|
||||
act(() => {
|
||||
rerender({ pattern: 'b' });
|
||||
});
|
||||
|
||||
// The abort should have been called for the first search.
|
||||
expect(abortSpy).toHaveBeenCalledTimes(1);
|
||||
|
||||
// Wait for the final result, which should be from the second, faster search.
|
||||
await waitFor(
|
||||
() => {
|
||||
expect(result.current.suggestions.map((s) => s.value)).toEqual(['b']);
|
||||
},
|
||||
{ timeout: 1000 },
|
||||
);
|
||||
|
||||
// The search spy should have been called for both patterns.
|
||||
expect(searchSpy).toHaveBeenCalledWith('b', expect.any(Object));
|
||||
|
||||
vi.restoreAllMocks();
|
||||
});
|
||||
});
|
||||
|
||||
describe('Filtering and Configuration', () => {
|
||||
it('should respect .gitignore files', async () => {
|
||||
const gitignoreContent = ['dist/', '*.log'].join('\n');
|
||||
const structure: FileSystemStructure = {
|
||||
'.git': {},
|
||||
'.gitignore': gitignoreContent,
|
||||
dist: {},
|
||||
'test.log': '',
|
||||
src: {},
|
||||
};
|
||||
testRootDir = await createTmpDir(structure);
|
||||
|
||||
const { result } = renderHook(() =>
|
||||
useTestHarnessForAtCompletion(true, '', mockConfig, testRootDir),
|
||||
);
|
||||
|
||||
await waitFor(() => {
|
||||
expect(result.current.suggestions.length).toBeGreaterThan(0);
|
||||
});
|
||||
|
||||
expect(result.current.suggestions.map((s) => s.value)).toEqual([
|
||||
'src/',
|
||||
'.gitignore',
|
||||
]);
|
||||
});
|
||||
|
||||
it('should work correctly when config is undefined', async () => {
|
||||
const structure: FileSystemStructure = {
|
||||
node_modules: {},
|
||||
src: {},
|
||||
};
|
||||
testRootDir = await createTmpDir(structure);
|
||||
|
||||
const { result } = renderHook(() =>
|
||||
useTestHarnessForAtCompletion(true, '', undefined, testRootDir),
|
||||
);
|
||||
|
||||
await waitFor(() => {
|
||||
expect(result.current.suggestions.length).toBeGreaterThan(0);
|
||||
});
|
||||
|
||||
expect(result.current.suggestions.map((s) => s.value)).toEqual([
|
||||
'node_modules/',
|
||||
'src/',
|
||||
]);
|
||||
});
|
||||
|
||||
it('should reset and re-initialize when the cwd changes', async () => {
|
||||
const structure1: FileSystemStructure = { 'file1.txt': '' };
|
||||
const rootDir1 = await createTmpDir(structure1);
|
||||
const structure2: FileSystemStructure = { 'file2.txt': '' };
|
||||
const rootDir2 = await createTmpDir(structure2);
|
||||
|
||||
const { result, rerender } = renderHook(
|
||||
({ cwd, pattern }) =>
|
||||
useTestHarnessForAtCompletion(true, pattern, mockConfig, cwd),
|
||||
{
|
||||
initialProps: {
|
||||
cwd: rootDir1,
|
||||
pattern: 'file',
|
||||
},
|
||||
},
|
||||
);
|
||||
|
||||
// Wait for initial suggestions from the first directory
|
||||
await waitFor(() => {
|
||||
expect(result.current.suggestions.map((s) => s.value)).toEqual([
|
||||
'file1.txt',
|
||||
]);
|
||||
});
|
||||
|
||||
// Change the CWD
|
||||
act(() => {
|
||||
rerender({ cwd: rootDir2, pattern: 'file' });
|
||||
});
|
||||
|
||||
// After CWD changes, suggestions should be cleared and it should load again.
|
||||
await waitFor(() => {
|
||||
expect(result.current.isLoadingSuggestions).toBe(true);
|
||||
expect(result.current.suggestions).toEqual([]);
|
||||
});
|
||||
|
||||
// Wait for the new suggestions from the second directory
|
||||
await waitFor(() => {
|
||||
expect(result.current.suggestions.map((s) => s.value)).toEqual([
|
||||
'file2.txt',
|
||||
]);
|
||||
});
|
||||
expect(result.current.isLoadingSuggestions).toBe(false);
|
||||
|
||||
await cleanupTmpDir(rootDir1);
|
||||
await cleanupTmpDir(rootDir2);
|
||||
});
|
||||
});
|
||||
});
|
|
@ -0,0 +1,228 @@
|
|||
/**
|
||||
* @license
|
||||
* Copyright 2025 Google LLC
|
||||
* SPDX-License-Identifier: Apache-2.0
|
||||
*/
|
||||
|
||||
import { useEffect, useReducer, useRef } from 'react';
|
||||
import { Config, FileSearch, escapePath } from '@google/gemini-cli-core';
|
||||
import {
|
||||
Suggestion,
|
||||
MAX_SUGGESTIONS_TO_SHOW,
|
||||
} from '../components/SuggestionsDisplay.js';
|
||||
|
||||
export enum AtCompletionStatus {
|
||||
IDLE = 'idle',
|
||||
INITIALIZING = 'initializing',
|
||||
READY = 'ready',
|
||||
SEARCHING = 'searching',
|
||||
ERROR = 'error',
|
||||
}
|
||||
|
||||
interface AtCompletionState {
|
||||
status: AtCompletionStatus;
|
||||
suggestions: Suggestion[];
|
||||
isLoading: boolean;
|
||||
pattern: string | null;
|
||||
}
|
||||
|
||||
type AtCompletionAction =
|
||||
| { type: 'INITIALIZE' }
|
||||
| { type: 'INITIALIZE_SUCCESS' }
|
||||
| { type: 'SEARCH'; payload: string }
|
||||
| { type: 'SEARCH_SUCCESS'; payload: Suggestion[] }
|
||||
| { type: 'SET_LOADING'; payload: boolean }
|
||||
| { type: 'ERROR' }
|
||||
| { type: 'RESET' };
|
||||
|
||||
const initialState: AtCompletionState = {
|
||||
status: AtCompletionStatus.IDLE,
|
||||
suggestions: [],
|
||||
isLoading: false,
|
||||
pattern: null,
|
||||
};
|
||||
|
||||
function atCompletionReducer(
|
||||
state: AtCompletionState,
|
||||
action: AtCompletionAction,
|
||||
): AtCompletionState {
|
||||
switch (action.type) {
|
||||
case 'INITIALIZE':
|
||||
return {
|
||||
...state,
|
||||
status: AtCompletionStatus.INITIALIZING,
|
||||
isLoading: true,
|
||||
};
|
||||
case 'INITIALIZE_SUCCESS':
|
||||
return { ...state, status: AtCompletionStatus.READY, isLoading: false };
|
||||
case 'SEARCH':
|
||||
// Keep old suggestions, don't set loading immediately
|
||||
return {
|
||||
...state,
|
||||
status: AtCompletionStatus.SEARCHING,
|
||||
pattern: action.payload,
|
||||
};
|
||||
case 'SEARCH_SUCCESS':
|
||||
return {
|
||||
...state,
|
||||
status: AtCompletionStatus.READY,
|
||||
suggestions: action.payload,
|
||||
isLoading: false,
|
||||
};
|
||||
case 'SET_LOADING':
|
||||
// Only show loading if we are still in a searching state
|
||||
if (state.status === AtCompletionStatus.SEARCHING) {
|
||||
return { ...state, isLoading: action.payload, suggestions: [] };
|
||||
}
|
||||
return state;
|
||||
case 'ERROR':
|
||||
return {
|
||||
...state,
|
||||
status: AtCompletionStatus.ERROR,
|
||||
isLoading: false,
|
||||
suggestions: [],
|
||||
};
|
||||
case 'RESET':
|
||||
return initialState;
|
||||
default:
|
||||
return state;
|
||||
}
|
||||
}
|
||||
|
||||
export interface UseAtCompletionProps {
|
||||
enabled: boolean;
|
||||
pattern: string;
|
||||
config: Config | undefined;
|
||||
cwd: string;
|
||||
setSuggestions: (suggestions: Suggestion[]) => void;
|
||||
setIsLoadingSuggestions: (isLoading: boolean) => void;
|
||||
}
|
||||
|
||||
export function useAtCompletion(props: UseAtCompletionProps): void {
|
||||
const {
|
||||
enabled,
|
||||
pattern,
|
||||
config,
|
||||
cwd,
|
||||
setSuggestions,
|
||||
setIsLoadingSuggestions,
|
||||
} = props;
|
||||
const [state, dispatch] = useReducer(atCompletionReducer, initialState);
|
||||
const fileSearch = useRef<FileSearch | null>(null);
|
||||
const searchAbortController = useRef<AbortController | null>(null);
|
||||
const slowSearchTimer = useRef<NodeJS.Timeout | null>(null);
|
||||
|
||||
useEffect(() => {
|
||||
setSuggestions(state.suggestions);
|
||||
}, [state.suggestions, setSuggestions]);
|
||||
|
||||
useEffect(() => {
|
||||
setIsLoadingSuggestions(state.isLoading);
|
||||
}, [state.isLoading, setIsLoadingSuggestions]);
|
||||
|
||||
useEffect(() => {
|
||||
dispatch({ type: 'RESET' });
|
||||
}, [cwd, config]);
|
||||
|
||||
// Reacts to user input (`pattern`) ONLY.
|
||||
useEffect(() => {
|
||||
if (!enabled) {
|
||||
return;
|
||||
}
|
||||
if (pattern === null) {
|
||||
dispatch({ type: 'RESET' });
|
||||
return;
|
||||
}
|
||||
|
||||
if (state.status === AtCompletionStatus.IDLE) {
|
||||
dispatch({ type: 'INITIALIZE' });
|
||||
} else if (
|
||||
(state.status === AtCompletionStatus.READY ||
|
||||
state.status === AtCompletionStatus.SEARCHING) &&
|
||||
pattern !== state.pattern // Only search if the pattern has changed
|
||||
) {
|
||||
dispatch({ type: 'SEARCH', payload: pattern });
|
||||
}
|
||||
}, [enabled, pattern, state.status, state.pattern]);
|
||||
|
||||
// The "Worker" that performs async operations based on status.
|
||||
useEffect(() => {
|
||||
const initialize = async () => {
|
||||
try {
|
||||
const searcher = new FileSearch({
|
||||
projectRoot: cwd,
|
||||
ignoreDirs: [],
|
||||
useGitignore:
|
||||
config?.getFileFilteringOptions()?.respectGitIgnore ?? true,
|
||||
useGeminiignore:
|
||||
config?.getFileFilteringOptions()?.respectGeminiIgnore ?? true,
|
||||
cache: true,
|
||||
cacheTtl: 30, // 30 seconds
|
||||
});
|
||||
await searcher.initialize();
|
||||
fileSearch.current = searcher;
|
||||
dispatch({ type: 'INITIALIZE_SUCCESS' });
|
||||
if (state.pattern !== null) {
|
||||
dispatch({ type: 'SEARCH', payload: state.pattern });
|
||||
}
|
||||
} catch (_) {
|
||||
dispatch({ type: 'ERROR' });
|
||||
}
|
||||
};
|
||||
|
||||
const search = async () => {
|
||||
if (!fileSearch.current || state.pattern === null) {
|
||||
return;
|
||||
}
|
||||
|
||||
if (slowSearchTimer.current) {
|
||||
clearTimeout(slowSearchTimer.current);
|
||||
}
|
||||
|
||||
const controller = new AbortController();
|
||||
searchAbortController.current = controller;
|
||||
|
||||
slowSearchTimer.current = setTimeout(() => {
|
||||
dispatch({ type: 'SET_LOADING', payload: true });
|
||||
}, 100);
|
||||
|
||||
try {
|
||||
const results = await fileSearch.current.search(state.pattern, {
|
||||
signal: controller.signal,
|
||||
maxResults: MAX_SUGGESTIONS_TO_SHOW * 3,
|
||||
});
|
||||
|
||||
if (slowSearchTimer.current) {
|
||||
clearTimeout(slowSearchTimer.current);
|
||||
}
|
||||
|
||||
if (controller.signal.aborted) {
|
||||
return;
|
||||
}
|
||||
|
||||
const suggestions = results.map((p) => ({
|
||||
label: p,
|
||||
value: escapePath(p),
|
||||
}));
|
||||
dispatch({ type: 'SEARCH_SUCCESS', payload: suggestions });
|
||||
} catch (error) {
|
||||
if (!(error instanceof Error && error.name === 'AbortError')) {
|
||||
dispatch({ type: 'ERROR' });
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
if (state.status === AtCompletionStatus.INITIALIZING) {
|
||||
initialize();
|
||||
} else if (state.status === AtCompletionStatus.SEARCHING) {
|
||||
search();
|
||||
}
|
||||
|
||||
return () => {
|
||||
searchAbortController.current?.abort();
|
||||
if (slowSearchTimer.current) {
|
||||
clearTimeout(slowSearchTimer.current);
|
||||
}
|
||||
};
|
||||
}, [state.status, state.pattern, config, cwd]);
|
||||
}
|
File diff suppressed because it is too large
Load Diff
|
@ -4,20 +4,7 @@
|
|||
* SPDX-License-Identifier: Apache-2.0
|
||||
*/
|
||||
|
||||
import { useEffect, useCallback, useMemo, useRef } from 'react';
|
||||
import * as fs from 'fs/promises';
|
||||
import * as path from 'path';
|
||||
import { glob } from 'glob';
|
||||
import {
|
||||
isNodeError,
|
||||
escapePath,
|
||||
unescapePath,
|
||||
getErrorMessage,
|
||||
Config,
|
||||
FileDiscoveryService,
|
||||
DEFAULT_FILE_FILTERING_OPTIONS,
|
||||
SHELL_SPECIAL_CHARS,
|
||||
} from '@google/gemini-cli-core';
|
||||
import { useCallback, useMemo, useEffect } from 'react';
|
||||
import { Suggestion } from '../components/SuggestionsDisplay.js';
|
||||
import { CommandContext, SlashCommand } from '../commands/types.js';
|
||||
import {
|
||||
|
@ -26,8 +13,17 @@ import {
|
|||
} from '../components/shared/text-buffer.js';
|
||||
import { isSlashCommand } from '../utils/commandUtils.js';
|
||||
import { toCodePoints } from '../utils/textUtils.js';
|
||||
import { useAtCompletion } from './useAtCompletion.js';
|
||||
import { useSlashCompletion } from './useSlashCompletion.js';
|
||||
import { Config } from '@google/gemini-cli-core';
|
||||
import { useCompletion } from './useCompletion.js';
|
||||
|
||||
export enum CompletionMode {
|
||||
IDLE = 'IDLE',
|
||||
AT = 'AT',
|
||||
SLASH = 'SLASH',
|
||||
}
|
||||
|
||||
export interface UseCommandCompletionReturn {
|
||||
suggestions: Suggestion[];
|
||||
activeSuggestionIndex: number;
|
||||
|
@ -72,541 +68,109 @@ export function useCommandCompletion(
|
|||
navigateDown,
|
||||
} = useCompletion();
|
||||
|
||||
const completionStart = useRef(-1);
|
||||
const completionEnd = useRef(-1);
|
||||
|
||||
const cursorRow = buffer.cursor[0];
|
||||
const cursorCol = buffer.cursor[1];
|
||||
|
||||
// Check if cursor is after @ or / without unescaped spaces
|
||||
const commandIndex = useMemo(() => {
|
||||
const currentLine = buffer.lines[cursorRow] || '';
|
||||
if (cursorRow === 0 && isSlashCommand(currentLine.trim())) {
|
||||
return currentLine.indexOf('/');
|
||||
}
|
||||
|
||||
// For other completions like '@', we search backwards from the cursor.
|
||||
|
||||
const codePoints = toCodePoints(currentLine);
|
||||
for (let i = cursorCol - 1; i >= 0; i--) {
|
||||
const char = codePoints[i];
|
||||
|
||||
if (char === ' ') {
|
||||
// Check for unescaped spaces.
|
||||
let backslashCount = 0;
|
||||
for (let j = i - 1; j >= 0 && codePoints[j] === '\\'; j--) {
|
||||
backslashCount++;
|
||||
}
|
||||
if (backslashCount % 2 === 0) {
|
||||
return -1; // Inactive on unescaped space.
|
||||
}
|
||||
} else if (char === '@') {
|
||||
// Active if we find an '@' before any unescaped space.
|
||||
return i;
|
||||
const { completionMode, query, completionStart, completionEnd } =
|
||||
useMemo(() => {
|
||||
const currentLine = buffer.lines[cursorRow] || '';
|
||||
if (cursorRow === 0 && isSlashCommand(currentLine.trim())) {
|
||||
return {
|
||||
completionMode: CompletionMode.SLASH,
|
||||
query: currentLine,
|
||||
completionStart: 0,
|
||||
completionEnd: currentLine.length,
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
return -1;
|
||||
}, [cursorRow, cursorCol, buffer.lines]);
|
||||
const codePoints = toCodePoints(currentLine);
|
||||
for (let i = cursorCol - 1; i >= 0; i--) {
|
||||
const char = codePoints[i];
|
||||
|
||||
if (char === ' ') {
|
||||
let backslashCount = 0;
|
||||
for (let j = i - 1; j >= 0 && codePoints[j] === '\\'; j--) {
|
||||
backslashCount++;
|
||||
}
|
||||
if (backslashCount % 2 === 0) {
|
||||
return {
|
||||
completionMode: CompletionMode.IDLE,
|
||||
query: null,
|
||||
completionStart: -1,
|
||||
completionEnd: -1,
|
||||
};
|
||||
}
|
||||
} else if (char === '@') {
|
||||
let end = codePoints.length;
|
||||
for (let i = cursorCol; i < codePoints.length; i++) {
|
||||
if (codePoints[i] === ' ') {
|
||||
let backslashCount = 0;
|
||||
for (let j = i - 1; j >= 0 && codePoints[j] === '\\'; j--) {
|
||||
backslashCount++;
|
||||
}
|
||||
|
||||
if (backslashCount % 2 === 0) {
|
||||
end = i;
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
const pathStart = i + 1;
|
||||
const partialPath = currentLine.substring(pathStart, end);
|
||||
return {
|
||||
completionMode: CompletionMode.AT,
|
||||
query: partialPath,
|
||||
completionStart: pathStart,
|
||||
completionEnd: end,
|
||||
};
|
||||
}
|
||||
}
|
||||
return {
|
||||
completionMode: CompletionMode.IDLE,
|
||||
query: null,
|
||||
completionStart: -1,
|
||||
completionEnd: -1,
|
||||
};
|
||||
}, [cursorRow, cursorCol, buffer.lines]);
|
||||
|
||||
useAtCompletion({
|
||||
enabled: completionMode === CompletionMode.AT,
|
||||
pattern: query || '',
|
||||
config,
|
||||
cwd,
|
||||
setSuggestions,
|
||||
setIsLoadingSuggestions,
|
||||
});
|
||||
|
||||
const slashCompletionRange = useSlashCompletion({
|
||||
enabled: completionMode === CompletionMode.SLASH,
|
||||
query,
|
||||
slashCommands,
|
||||
commandContext,
|
||||
setSuggestions,
|
||||
setIsLoadingSuggestions,
|
||||
setIsPerfectMatch,
|
||||
});
|
||||
|
||||
useEffect(() => {
|
||||
if (commandIndex === -1 || reverseSearchActive) {
|
||||
setTimeout(resetCompletionState, 0);
|
||||
return;
|
||||
}
|
||||
setActiveSuggestionIndex(suggestions.length > 0 ? 0 : -1);
|
||||
setVisibleStartIndex(0);
|
||||
}, [suggestions, setActiveSuggestionIndex, setVisibleStartIndex]);
|
||||
|
||||
const currentLine = buffer.lines[cursorRow] || '';
|
||||
const codePoints = toCodePoints(currentLine);
|
||||
|
||||
if (codePoints[commandIndex] === '/') {
|
||||
// Always reset perfect match at the beginning of processing.
|
||||
setIsPerfectMatch(false);
|
||||
|
||||
const fullPath = currentLine.substring(commandIndex + 1);
|
||||
const hasTrailingSpace = currentLine.endsWith(' ');
|
||||
|
||||
// Get all non-empty parts of the command.
|
||||
const rawParts = fullPath.split(/\s+/).filter((p) => p);
|
||||
|
||||
let commandPathParts = rawParts;
|
||||
let partial = '';
|
||||
|
||||
// If there's no trailing space, the last part is potentially a partial segment.
|
||||
// We tentatively separate it.
|
||||
if (!hasTrailingSpace && rawParts.length > 0) {
|
||||
partial = rawParts[rawParts.length - 1];
|
||||
commandPathParts = rawParts.slice(0, -1);
|
||||
}
|
||||
|
||||
// Traverse the Command Tree using the tentative completed path
|
||||
let currentLevel: readonly SlashCommand[] | undefined = slashCommands;
|
||||
let leafCommand: SlashCommand | null = null;
|
||||
|
||||
for (const part of commandPathParts) {
|
||||
if (!currentLevel) {
|
||||
leafCommand = null;
|
||||
currentLevel = [];
|
||||
break;
|
||||
}
|
||||
const found: SlashCommand | undefined = currentLevel.find(
|
||||
(cmd) => cmd.name === part || cmd.altNames?.includes(part),
|
||||
);
|
||||
if (found) {
|
||||
leafCommand = found;
|
||||
currentLevel = found.subCommands as
|
||||
| readonly SlashCommand[]
|
||||
| undefined;
|
||||
} else {
|
||||
leafCommand = null;
|
||||
currentLevel = [];
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
let exactMatchAsParent: SlashCommand | undefined;
|
||||
// Handle the Ambiguous Case
|
||||
if (!hasTrailingSpace && currentLevel) {
|
||||
exactMatchAsParent = currentLevel.find(
|
||||
(cmd) =>
|
||||
(cmd.name === partial || cmd.altNames?.includes(partial)) &&
|
||||
cmd.subCommands,
|
||||
);
|
||||
|
||||
if (exactMatchAsParent) {
|
||||
// It's a perfect match for a parent command. Override our initial guess.
|
||||
// Treat it as a completed command path.
|
||||
leafCommand = exactMatchAsParent;
|
||||
currentLevel = exactMatchAsParent.subCommands;
|
||||
partial = ''; // We now want to suggest ALL of its sub-commands.
|
||||
}
|
||||
}
|
||||
|
||||
// Check for perfect, executable match
|
||||
if (!hasTrailingSpace) {
|
||||
if (leafCommand && partial === '' && leafCommand.action) {
|
||||
// Case: /command<enter> - command has action, no sub-commands were suggested
|
||||
setIsPerfectMatch(true);
|
||||
} else if (currentLevel) {
|
||||
// Case: /command subcommand<enter>
|
||||
const perfectMatch = currentLevel.find(
|
||||
(cmd) =>
|
||||
(cmd.name === partial || cmd.altNames?.includes(partial)) &&
|
||||
cmd.action,
|
||||
);
|
||||
if (perfectMatch) {
|
||||
setIsPerfectMatch(true);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const depth = commandPathParts.length;
|
||||
const isArgumentCompletion =
|
||||
leafCommand?.completion &&
|
||||
(hasTrailingSpace ||
|
||||
(rawParts.length > depth && depth > 0 && partial !== ''));
|
||||
|
||||
// Set completion range
|
||||
if (hasTrailingSpace || exactMatchAsParent) {
|
||||
completionStart.current = currentLine.length;
|
||||
completionEnd.current = currentLine.length;
|
||||
} else if (partial) {
|
||||
if (isArgumentCompletion) {
|
||||
const commandSoFar = `/${commandPathParts.join(' ')}`;
|
||||
const argStartIndex =
|
||||
commandSoFar.length + (commandPathParts.length > 0 ? 1 : 0);
|
||||
completionStart.current = argStartIndex;
|
||||
} else {
|
||||
completionStart.current = currentLine.length - partial.length;
|
||||
}
|
||||
completionEnd.current = currentLine.length;
|
||||
} else {
|
||||
// e.g. /
|
||||
completionStart.current = commandIndex + 1;
|
||||
completionEnd.current = currentLine.length;
|
||||
}
|
||||
|
||||
// Provide Suggestions based on the now-corrected context
|
||||
if (isArgumentCompletion) {
|
||||
const fetchAndSetSuggestions = async () => {
|
||||
setIsLoadingSuggestions(true);
|
||||
const argString = rawParts.slice(depth).join(' ');
|
||||
const results =
|
||||
(await leafCommand!.completion!(commandContext, argString)) || [];
|
||||
const finalSuggestions = results.map((s) => ({ label: s, value: s }));
|
||||
setSuggestions(finalSuggestions);
|
||||
setShowSuggestions(finalSuggestions.length > 0);
|
||||
setActiveSuggestionIndex(finalSuggestions.length > 0 ? 0 : -1);
|
||||
setIsLoadingSuggestions(false);
|
||||
};
|
||||
fetchAndSetSuggestions();
|
||||
return;
|
||||
}
|
||||
|
||||
// Command/Sub-command Completion
|
||||
const commandsToSearch = currentLevel || [];
|
||||
if (commandsToSearch.length > 0) {
|
||||
let potentialSuggestions = commandsToSearch.filter(
|
||||
(cmd) =>
|
||||
cmd.description &&
|
||||
(cmd.name.startsWith(partial) ||
|
||||
cmd.altNames?.some((alt) => alt.startsWith(partial))),
|
||||
);
|
||||
|
||||
// If a user's input is an exact match and it is a leaf command,
|
||||
// enter should submit immediately.
|
||||
if (potentialSuggestions.length > 0 && !hasTrailingSpace) {
|
||||
const perfectMatch = potentialSuggestions.find(
|
||||
(s) => s.name === partial || s.altNames?.includes(partial),
|
||||
);
|
||||
if (perfectMatch && perfectMatch.action) {
|
||||
potentialSuggestions = [];
|
||||
}
|
||||
}
|
||||
|
||||
const finalSuggestions = potentialSuggestions.map((cmd) => ({
|
||||
label: cmd.name,
|
||||
value: cmd.name,
|
||||
description: cmd.description,
|
||||
}));
|
||||
|
||||
setSuggestions(finalSuggestions);
|
||||
setShowSuggestions(finalSuggestions.length > 0);
|
||||
setActiveSuggestionIndex(finalSuggestions.length > 0 ? 0 : -1);
|
||||
setIsLoadingSuggestions(false);
|
||||
return;
|
||||
}
|
||||
|
||||
// If we fall through, no suggestions are available.
|
||||
useEffect(() => {
|
||||
if (completionMode === CompletionMode.IDLE || reverseSearchActive) {
|
||||
resetCompletionState();
|
||||
return;
|
||||
}
|
||||
|
||||
// Handle At Command Completion
|
||||
completionEnd.current = codePoints.length;
|
||||
for (let i = cursorCol; i < codePoints.length; i++) {
|
||||
if (codePoints[i] === ' ') {
|
||||
let backslashCount = 0;
|
||||
for (let j = i - 1; j >= 0 && codePoints[j] === '\\'; j--) {
|
||||
backslashCount++;
|
||||
}
|
||||
|
||||
if (backslashCount % 2 === 0) {
|
||||
completionEnd.current = i;
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const pathStart = commandIndex + 1;
|
||||
const partialPath = currentLine.substring(pathStart, completionEnd.current);
|
||||
const lastSlashIndex = partialPath.lastIndexOf('/');
|
||||
completionStart.current =
|
||||
lastSlashIndex === -1 ? pathStart : pathStart + lastSlashIndex + 1;
|
||||
const baseDirRelative =
|
||||
lastSlashIndex === -1
|
||||
? '.'
|
||||
: partialPath.substring(0, lastSlashIndex + 1);
|
||||
const prefix = unescapePath(
|
||||
lastSlashIndex === -1
|
||||
? partialPath
|
||||
: partialPath.substring(lastSlashIndex + 1),
|
||||
);
|
||||
|
||||
let isMounted = true;
|
||||
|
||||
const findFilesRecursively = async (
|
||||
startDir: string,
|
||||
searchPrefix: string,
|
||||
fileDiscovery: FileDiscoveryService | null,
|
||||
filterOptions: {
|
||||
respectGitIgnore?: boolean;
|
||||
respectGeminiIgnore?: boolean;
|
||||
},
|
||||
currentRelativePath = '',
|
||||
depth = 0,
|
||||
maxDepth = 10, // Limit recursion depth
|
||||
maxResults = 50, // Limit number of results
|
||||
): Promise<Suggestion[]> => {
|
||||
if (depth > maxDepth) {
|
||||
return [];
|
||||
}
|
||||
|
||||
const lowerSearchPrefix = searchPrefix.toLowerCase();
|
||||
let foundSuggestions: Suggestion[] = [];
|
||||
try {
|
||||
const entries = await fs.readdir(startDir, { withFileTypes: true });
|
||||
for (const entry of entries) {
|
||||
if (foundSuggestions.length >= maxResults) break;
|
||||
|
||||
const entryPathRelative = path.join(currentRelativePath, entry.name);
|
||||
const entryPathFromRoot = path.relative(
|
||||
startDir,
|
||||
path.join(startDir, entry.name),
|
||||
);
|
||||
|
||||
// Conditionally ignore dotfiles
|
||||
if (!searchPrefix.startsWith('.') && entry.name.startsWith('.')) {
|
||||
continue;
|
||||
}
|
||||
|
||||
// Check if this entry should be ignored by filtering options
|
||||
if (
|
||||
fileDiscovery &&
|
||||
fileDiscovery.shouldIgnoreFile(entryPathFromRoot, filterOptions)
|
||||
) {
|
||||
continue;
|
||||
}
|
||||
|
||||
if (entry.name.toLowerCase().startsWith(lowerSearchPrefix)) {
|
||||
foundSuggestions.push({
|
||||
label: entryPathRelative + (entry.isDirectory() ? '/' : ''),
|
||||
value: escapePath(
|
||||
entryPathRelative + (entry.isDirectory() ? '/' : ''),
|
||||
),
|
||||
});
|
||||
}
|
||||
if (
|
||||
entry.isDirectory() &&
|
||||
entry.name !== 'node_modules' &&
|
||||
!entry.name.startsWith('.')
|
||||
) {
|
||||
if (foundSuggestions.length < maxResults) {
|
||||
foundSuggestions = foundSuggestions.concat(
|
||||
await findFilesRecursively(
|
||||
path.join(startDir, entry.name),
|
||||
searchPrefix, // Pass original searchPrefix for recursive calls
|
||||
fileDiscovery,
|
||||
filterOptions,
|
||||
entryPathRelative,
|
||||
depth + 1,
|
||||
maxDepth,
|
||||
maxResults - foundSuggestions.length,
|
||||
),
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
} catch (_err) {
|
||||
// Ignore errors like permission denied or ENOENT during recursive search
|
||||
}
|
||||
return foundSuggestions.slice(0, maxResults);
|
||||
};
|
||||
|
||||
const findFilesWithGlob = async (
|
||||
searchPrefix: string,
|
||||
fileDiscoveryService: FileDiscoveryService,
|
||||
filterOptions: {
|
||||
respectGitIgnore?: boolean;
|
||||
respectGeminiIgnore?: boolean;
|
||||
},
|
||||
searchDir: string,
|
||||
maxResults = 50,
|
||||
): Promise<Suggestion[]> => {
|
||||
const globPattern = `**/${searchPrefix}*`;
|
||||
const files = await glob(globPattern, {
|
||||
cwd: searchDir,
|
||||
dot: searchPrefix.startsWith('.'),
|
||||
nocase: true,
|
||||
});
|
||||
|
||||
const suggestions: Suggestion[] = files
|
||||
.filter((file) => {
|
||||
if (fileDiscoveryService) {
|
||||
return !fileDiscoveryService.shouldIgnoreFile(file, filterOptions);
|
||||
}
|
||||
return true;
|
||||
})
|
||||
.map((file: string) => {
|
||||
const absolutePath = path.resolve(searchDir, file);
|
||||
const label = path.relative(cwd, absolutePath);
|
||||
return {
|
||||
label,
|
||||
value: escapePath(label),
|
||||
};
|
||||
})
|
||||
.slice(0, maxResults);
|
||||
|
||||
return suggestions;
|
||||
};
|
||||
|
||||
const fetchSuggestions = async () => {
|
||||
setIsLoadingSuggestions(true);
|
||||
let fetchedSuggestions: Suggestion[] = [];
|
||||
|
||||
const fileDiscoveryService = config ? config.getFileService() : null;
|
||||
const enableRecursiveSearch =
|
||||
config?.getEnableRecursiveFileSearch() ?? true;
|
||||
const filterOptions =
|
||||
config?.getFileFilteringOptions() ?? DEFAULT_FILE_FILTERING_OPTIONS;
|
||||
|
||||
try {
|
||||
// If there's no slash, or it's the root, do a recursive search from workspace directories
|
||||
for (const dir of dirs) {
|
||||
let fetchedSuggestionsPerDir: Suggestion[] = [];
|
||||
if (
|
||||
partialPath.indexOf('/') === -1 &&
|
||||
prefix &&
|
||||
enableRecursiveSearch
|
||||
) {
|
||||
if (fileDiscoveryService) {
|
||||
fetchedSuggestionsPerDir = await findFilesWithGlob(
|
||||
prefix,
|
||||
fileDiscoveryService,
|
||||
filterOptions,
|
||||
dir,
|
||||
);
|
||||
} else {
|
||||
fetchedSuggestionsPerDir = await findFilesRecursively(
|
||||
dir,
|
||||
prefix,
|
||||
null,
|
||||
filterOptions,
|
||||
);
|
||||
}
|
||||
} else {
|
||||
// Original behavior: list files in the specific directory
|
||||
const lowerPrefix = prefix.toLowerCase();
|
||||
const baseDirAbsolute = path.resolve(dir, baseDirRelative);
|
||||
const entries = await fs.readdir(baseDirAbsolute, {
|
||||
withFileTypes: true,
|
||||
});
|
||||
|
||||
// Filter entries using git-aware filtering
|
||||
const filteredEntries = [];
|
||||
for (const entry of entries) {
|
||||
// Conditionally ignore dotfiles
|
||||
if (!prefix.startsWith('.') && entry.name.startsWith('.')) {
|
||||
continue;
|
||||
}
|
||||
if (!entry.name.toLowerCase().startsWith(lowerPrefix)) continue;
|
||||
|
||||
const relativePath = path.relative(
|
||||
dir,
|
||||
path.join(baseDirAbsolute, entry.name),
|
||||
);
|
||||
if (
|
||||
fileDiscoveryService &&
|
||||
fileDiscoveryService.shouldIgnoreFile(
|
||||
relativePath,
|
||||
filterOptions,
|
||||
)
|
||||
) {
|
||||
continue;
|
||||
}
|
||||
|
||||
filteredEntries.push(entry);
|
||||
}
|
||||
|
||||
fetchedSuggestionsPerDir = filteredEntries.map((entry) => {
|
||||
const absolutePath = path.resolve(baseDirAbsolute, entry.name);
|
||||
const label =
|
||||
cwd === dir ? entry.name : path.relative(cwd, absolutePath);
|
||||
const suggestionLabel = entry.isDirectory() ? label + '/' : label;
|
||||
return {
|
||||
label: suggestionLabel,
|
||||
value: escapePath(suggestionLabel),
|
||||
};
|
||||
});
|
||||
}
|
||||
fetchedSuggestions = [
|
||||
...fetchedSuggestions,
|
||||
...fetchedSuggestionsPerDir,
|
||||
];
|
||||
}
|
||||
|
||||
// Like glob, we always return forward slashes for path separators, even on Windows.
|
||||
// But preserve backslash escaping for special characters.
|
||||
const specialCharsLookahead = `(?![${SHELL_SPECIAL_CHARS.source.slice(1, -1)}])`;
|
||||
const pathSeparatorRegex = new RegExp(
|
||||
`\\\\${specialCharsLookahead}`,
|
||||
'g',
|
||||
);
|
||||
fetchedSuggestions = fetchedSuggestions.map((suggestion) => ({
|
||||
...suggestion,
|
||||
label: suggestion.label.replace(pathSeparatorRegex, '/'),
|
||||
value: suggestion.value.replace(pathSeparatorRegex, '/'),
|
||||
}));
|
||||
|
||||
// Sort by depth, then directories first, then alphabetically
|
||||
fetchedSuggestions.sort((a, b) => {
|
||||
const depthA = (a.label.match(/\//g) || []).length;
|
||||
const depthB = (b.label.match(/\//g) || []).length;
|
||||
|
||||
if (depthA !== depthB) {
|
||||
return depthA - depthB;
|
||||
}
|
||||
|
||||
const aIsDir = a.label.endsWith('/');
|
||||
const bIsDir = b.label.endsWith('/');
|
||||
if (aIsDir && !bIsDir) return -1;
|
||||
if (!aIsDir && bIsDir) return 1;
|
||||
|
||||
// exclude extension when comparing
|
||||
const filenameA = a.label.substring(
|
||||
0,
|
||||
a.label.length - path.extname(a.label).length,
|
||||
);
|
||||
const filenameB = b.label.substring(
|
||||
0,
|
||||
b.label.length - path.extname(b.label).length,
|
||||
);
|
||||
|
||||
return (
|
||||
filenameA.localeCompare(filenameB) || a.label.localeCompare(b.label)
|
||||
);
|
||||
});
|
||||
|
||||
if (isMounted) {
|
||||
setSuggestions(fetchedSuggestions);
|
||||
setShowSuggestions(fetchedSuggestions.length > 0);
|
||||
setActiveSuggestionIndex(fetchedSuggestions.length > 0 ? 0 : -1);
|
||||
setVisibleStartIndex(0);
|
||||
}
|
||||
} catch (error: unknown) {
|
||||
if (isNodeError(error) && error.code === 'ENOENT') {
|
||||
if (isMounted) {
|
||||
setSuggestions([]);
|
||||
setShowSuggestions(false);
|
||||
}
|
||||
} else {
|
||||
console.error(
|
||||
`Error fetching completion suggestions for ${partialPath}: ${getErrorMessage(error)}`,
|
||||
);
|
||||
if (isMounted) {
|
||||
resetCompletionState();
|
||||
}
|
||||
}
|
||||
}
|
||||
if (isMounted) {
|
||||
setIsLoadingSuggestions(false);
|
||||
}
|
||||
};
|
||||
|
||||
const debounceTimeout = setTimeout(fetchSuggestions, 100);
|
||||
|
||||
return () => {
|
||||
isMounted = false;
|
||||
clearTimeout(debounceTimeout);
|
||||
};
|
||||
// Show suggestions if we are loading OR if there are results to display.
|
||||
setShowSuggestions(isLoadingSuggestions || suggestions.length > 0);
|
||||
}, [
|
||||
buffer.text,
|
||||
cursorRow,
|
||||
cursorCol,
|
||||
buffer.lines,
|
||||
dirs,
|
||||
cwd,
|
||||
commandIndex,
|
||||
resetCompletionState,
|
||||
slashCommands,
|
||||
commandContext,
|
||||
config,
|
||||
completionMode,
|
||||
suggestions.length,
|
||||
isLoadingSuggestions,
|
||||
reverseSearchActive,
|
||||
setSuggestions,
|
||||
resetCompletionState,
|
||||
setShowSuggestions,
|
||||
setActiveSuggestionIndex,
|
||||
setIsLoadingSuggestions,
|
||||
setIsPerfectMatch,
|
||||
setVisibleStartIndex,
|
||||
]);
|
||||
|
||||
const handleAutocomplete = useCallback(
|
||||
|
@ -616,18 +180,23 @@ export function useCommandCompletion(
|
|||
}
|
||||
const suggestion = suggestions[indexToUse].value;
|
||||
|
||||
if (completionStart.current === -1 || completionEnd.current === -1) {
|
||||
let start = completionStart;
|
||||
let end = completionEnd;
|
||||
if (completionMode === CompletionMode.SLASH) {
|
||||
start = slashCompletionRange.completionStart;
|
||||
end = slashCompletionRange.completionEnd;
|
||||
}
|
||||
|
||||
if (start === -1 || end === -1) {
|
||||
return;
|
||||
}
|
||||
|
||||
const isSlash = (buffer.lines[cursorRow] || '')[commandIndex] === '/';
|
||||
let suggestionText = suggestion;
|
||||
if (isSlash) {
|
||||
// If we are inserting (not replacing), and the preceding character is not a space, add one.
|
||||
if (completionMode === CompletionMode.SLASH) {
|
||||
if (
|
||||
completionStart.current === completionEnd.current &&
|
||||
completionStart.current > commandIndex + 1 &&
|
||||
(buffer.lines[cursorRow] || '')[completionStart.current - 1] !== ' '
|
||||
start === end &&
|
||||
start > 1 &&
|
||||
(buffer.lines[cursorRow] || '')[start - 1] !== ' '
|
||||
) {
|
||||
suggestionText = ' ' + suggestionText;
|
||||
}
|
||||
|
@ -636,12 +205,20 @@ export function useCommandCompletion(
|
|||
suggestionText += ' ';
|
||||
|
||||
buffer.replaceRangeByOffset(
|
||||
logicalPosToOffset(buffer.lines, cursorRow, completionStart.current),
|
||||
logicalPosToOffset(buffer.lines, cursorRow, completionEnd.current),
|
||||
logicalPosToOffset(buffer.lines, cursorRow, start),
|
||||
logicalPosToOffset(buffer.lines, cursorRow, end),
|
||||
suggestionText,
|
||||
);
|
||||
},
|
||||
[cursorRow, buffer, suggestions, commandIndex],
|
||||
[
|
||||
cursorRow,
|
||||
buffer,
|
||||
suggestions,
|
||||
completionMode,
|
||||
completionStart,
|
||||
completionEnd,
|
||||
slashCompletionRange,
|
||||
],
|
||||
);
|
||||
|
||||
return {
|
||||
|
|
|
@ -41,12 +41,17 @@ export function useReverseSearchCompletion(
|
|||
navigateDown,
|
||||
} = useCompletion();
|
||||
|
||||
// whenever reverseSearchActive is on, filter history
|
||||
useEffect(() => {
|
||||
if (!reverseSearchActive) {
|
||||
resetCompletionState();
|
||||
}
|
||||
}, [reverseSearchActive, resetCompletionState]);
|
||||
|
||||
useEffect(() => {
|
||||
if (!reverseSearchActive) {
|
||||
return;
|
||||
}
|
||||
|
||||
const q = buffer.text.toLowerCase();
|
||||
const matches = shellHistory.reduce<Suggestion[]>((acc, cmd) => {
|
||||
const idx = cmd.toLowerCase().indexOf(q);
|
||||
|
@ -55,6 +60,7 @@ export function useReverseSearchCompletion(
|
|||
}
|
||||
return acc;
|
||||
}, []);
|
||||
|
||||
setSuggestions(matches);
|
||||
setShowSuggestions(matches.length > 0);
|
||||
setActiveSuggestionIndex(matches.length > 0 ? 0 : -1);
|
||||
|
@ -62,7 +68,6 @@ export function useReverseSearchCompletion(
|
|||
buffer.text,
|
||||
shellHistory,
|
||||
reverseSearchActive,
|
||||
resetCompletionState,
|
||||
setActiveSuggestionIndex,
|
||||
setShowSuggestions,
|
||||
setSuggestions,
|
||||
|
|
|
@ -0,0 +1,434 @@
|
|||
/**
|
||||
* @license
|
||||
* Copyright 2025 Google LLC
|
||||
* SPDX-License-Identifier: Apache-2.0
|
||||
*/
|
||||
|
||||
/** @vitest-environment jsdom */
|
||||
|
||||
import { describe, it, expect, vi } from 'vitest';
|
||||
import { renderHook, waitFor } from '@testing-library/react';
|
||||
import { useSlashCompletion } from './useSlashCompletion.js';
|
||||
import { CommandContext, SlashCommand } from '../commands/types.js';
|
||||
import { useState } from 'react';
|
||||
import { Suggestion } from '../components/SuggestionsDisplay.js';
|
||||
|
||||
// Test harness to capture the state from the hook's callbacks.
|
||||
function useTestHarnessForSlashCompletion(
|
||||
enabled: boolean,
|
||||
query: string | null,
|
||||
slashCommands: readonly SlashCommand[],
|
||||
commandContext: CommandContext,
|
||||
) {
|
||||
const [suggestions, setSuggestions] = useState<Suggestion[]>([]);
|
||||
const [isLoadingSuggestions, setIsLoadingSuggestions] = useState(false);
|
||||
const [isPerfectMatch, setIsPerfectMatch] = useState(false);
|
||||
|
||||
const { completionStart, completionEnd } = useSlashCompletion({
|
||||
enabled,
|
||||
query,
|
||||
slashCommands,
|
||||
commandContext,
|
||||
setSuggestions,
|
||||
setIsLoadingSuggestions,
|
||||
setIsPerfectMatch,
|
||||
});
|
||||
|
||||
return {
|
||||
suggestions,
|
||||
isLoadingSuggestions,
|
||||
isPerfectMatch,
|
||||
completionStart,
|
||||
completionEnd,
|
||||
};
|
||||
}
|
||||
|
||||
describe('useSlashCompletion', () => {
|
||||
// A minimal mock is sufficient for these tests.
|
||||
const mockCommandContext = {} as CommandContext;
|
||||
|
||||
describe('Top-Level Commands', () => {
|
||||
it('should suggest all top-level commands for the root slash', async () => {
|
||||
const slashCommands = [
|
||||
{ name: 'help', altNames: ['?'], description: 'Show help' },
|
||||
{
|
||||
name: 'stats',
|
||||
altNames: ['usage'],
|
||||
description: 'check session stats. Usage: /stats [model|tools]',
|
||||
},
|
||||
{ name: 'clear', description: 'Clear the screen' },
|
||||
{
|
||||
name: 'memory',
|
||||
description: 'Manage memory',
|
||||
subCommands: [{ name: 'show', description: 'Show memory' }],
|
||||
},
|
||||
{ name: 'chat', description: 'Manage chat history' },
|
||||
] as unknown as SlashCommand[];
|
||||
const { result } = renderHook(() =>
|
||||
useTestHarnessForSlashCompletion(
|
||||
true,
|
||||
'/',
|
||||
slashCommands,
|
||||
mockCommandContext,
|
||||
),
|
||||
);
|
||||
|
||||
expect(result.current.suggestions.length).toBe(slashCommands.length);
|
||||
expect(result.current.suggestions.map((s) => s.label)).toEqual(
|
||||
expect.arrayContaining(['help', 'clear', 'memory', 'chat', 'stats']),
|
||||
);
|
||||
});
|
||||
|
||||
it('should filter commands based on partial input', async () => {
|
||||
const slashCommands = [
|
||||
{ name: 'memory', description: 'Manage memory' },
|
||||
] as unknown as SlashCommand[];
|
||||
const { result } = renderHook(() =>
|
||||
useTestHarnessForSlashCompletion(
|
||||
true,
|
||||
'/mem',
|
||||
slashCommands,
|
||||
mockCommandContext,
|
||||
),
|
||||
);
|
||||
|
||||
expect(result.current.suggestions).toEqual([
|
||||
{ label: 'memory', value: 'memory', description: 'Manage memory' },
|
||||
]);
|
||||
});
|
||||
|
||||
it('should suggest commands based on partial altNames', async () => {
|
||||
const slashCommands = [
|
||||
{
|
||||
name: 'stats',
|
||||
altNames: ['usage'],
|
||||
description: 'check session stats. Usage: /stats [model|tools]',
|
||||
},
|
||||
] as unknown as SlashCommand[];
|
||||
const { result } = renderHook(() =>
|
||||
useTestHarnessForSlashCompletion(
|
||||
true,
|
||||
'/usag',
|
||||
slashCommands,
|
||||
mockCommandContext,
|
||||
),
|
||||
);
|
||||
|
||||
expect(result.current.suggestions).toEqual([
|
||||
{
|
||||
label: 'stats',
|
||||
value: 'stats',
|
||||
description: 'check session stats. Usage: /stats [model|tools]',
|
||||
},
|
||||
]);
|
||||
});
|
||||
|
||||
it('should NOT provide suggestions for a perfectly typed command that is a leaf node', async () => {
|
||||
const slashCommands = [
|
||||
{ name: 'clear', description: 'Clear the screen', action: vi.fn() },
|
||||
] as unknown as SlashCommand[];
|
||||
const { result } = renderHook(() =>
|
||||
useTestHarnessForSlashCompletion(
|
||||
true,
|
||||
'/clear',
|
||||
slashCommands,
|
||||
mockCommandContext,
|
||||
),
|
||||
);
|
||||
|
||||
expect(result.current.suggestions).toHaveLength(0);
|
||||
});
|
||||
|
||||
it.each([['/?'], ['/usage']])(
|
||||
'should not suggest commands when altNames is fully typed',
|
||||
async (query) => {
|
||||
const mockSlashCommands = [
|
||||
{
|
||||
name: 'help',
|
||||
altNames: ['?'],
|
||||
description: 'Show help',
|
||||
action: vi.fn(),
|
||||
},
|
||||
{
|
||||
name: 'stats',
|
||||
altNames: ['usage'],
|
||||
description: 'check session stats. Usage: /stats [model|tools]',
|
||||
action: vi.fn(),
|
||||
},
|
||||
] as unknown as SlashCommand[];
|
||||
|
||||
const { result } = renderHook(() =>
|
||||
useTestHarnessForSlashCompletion(
|
||||
true,
|
||||
query,
|
||||
mockSlashCommands,
|
||||
mockCommandContext,
|
||||
),
|
||||
);
|
||||
|
||||
expect(result.current.suggestions).toHaveLength(0);
|
||||
},
|
||||
);
|
||||
|
||||
it('should not provide suggestions for a fully typed command that has no sub-commands or argument completion', async () => {
|
||||
const slashCommands = [
|
||||
{ name: 'clear', description: 'Clear the screen' },
|
||||
] as unknown as SlashCommand[];
|
||||
const { result } = renderHook(() =>
|
||||
useTestHarnessForSlashCompletion(
|
||||
true,
|
||||
'/clear ',
|
||||
slashCommands,
|
||||
mockCommandContext,
|
||||
),
|
||||
);
|
||||
|
||||
expect(result.current.suggestions).toHaveLength(0);
|
||||
});
|
||||
|
||||
it('should not provide suggestions for an unknown command', async () => {
|
||||
const slashCommands = [
|
||||
{ name: 'help', description: 'Show help' },
|
||||
] as unknown as SlashCommand[];
|
||||
const { result } = renderHook(() =>
|
||||
useTestHarnessForSlashCompletion(
|
||||
true,
|
||||
'/unknown-command',
|
||||
slashCommands,
|
||||
mockCommandContext,
|
||||
),
|
||||
);
|
||||
|
||||
expect(result.current.suggestions).toHaveLength(0);
|
||||
});
|
||||
});
|
||||
|
||||
describe('Sub-Commands', () => {
|
||||
it('should suggest sub-commands for a parent command', async () => {
|
||||
const slashCommands = [
|
||||
{
|
||||
name: 'memory',
|
||||
description: 'Manage memory',
|
||||
subCommands: [
|
||||
{ name: 'show', description: 'Show memory' },
|
||||
{ name: 'add', description: 'Add to memory' },
|
||||
],
|
||||
},
|
||||
] as unknown as SlashCommand[];
|
||||
|
||||
const { result } = renderHook(() =>
|
||||
useTestHarnessForSlashCompletion(
|
||||
true,
|
||||
'/memory',
|
||||
slashCommands,
|
||||
mockCommandContext,
|
||||
),
|
||||
);
|
||||
|
||||
expect(result.current.suggestions).toHaveLength(2);
|
||||
expect(result.current.suggestions).toEqual(
|
||||
expect.arrayContaining([
|
||||
{ label: 'show', value: 'show', description: 'Show memory' },
|
||||
{ label: 'add', value: 'add', description: 'Add to memory' },
|
||||
]),
|
||||
);
|
||||
});
|
||||
|
||||
it('should suggest all sub-commands when the query ends with the parent command and a space', async () => {
|
||||
const slashCommands = [
|
||||
{
|
||||
name: 'memory',
|
||||
description: 'Manage memory',
|
||||
subCommands: [
|
||||
{ name: 'show', description: 'Show memory' },
|
||||
{ name: 'add', description: 'Add to memory' },
|
||||
],
|
||||
},
|
||||
] as unknown as SlashCommand[];
|
||||
const { result } = renderHook(() =>
|
||||
useTestHarnessForSlashCompletion(
|
||||
true,
|
||||
'/memory ',
|
||||
slashCommands,
|
||||
mockCommandContext,
|
||||
),
|
||||
);
|
||||
|
||||
expect(result.current.suggestions).toHaveLength(2);
|
||||
expect(result.current.suggestions).toEqual(
|
||||
expect.arrayContaining([
|
||||
{ label: 'show', value: 'show', description: 'Show memory' },
|
||||
{ label: 'add', value: 'add', description: 'Add to memory' },
|
||||
]),
|
||||
);
|
||||
});
|
||||
|
||||
it('should filter sub-commands by prefix', async () => {
|
||||
const slashCommands = [
|
||||
{
|
||||
name: 'memory',
|
||||
description: 'Manage memory',
|
||||
subCommands: [
|
||||
{ name: 'show', description: 'Show memory' },
|
||||
{ name: 'add', description: 'Add to memory' },
|
||||
],
|
||||
},
|
||||
] as unknown as SlashCommand[];
|
||||
const { result } = renderHook(() =>
|
||||
useTestHarnessForSlashCompletion(
|
||||
true,
|
||||
'/memory a',
|
||||
slashCommands,
|
||||
mockCommandContext,
|
||||
),
|
||||
);
|
||||
|
||||
expect(result.current.suggestions).toEqual([
|
||||
{ label: 'add', value: 'add', description: 'Add to memory' },
|
||||
]);
|
||||
});
|
||||
|
||||
it('should provide no suggestions for an invalid sub-command', async () => {
|
||||
const slashCommands = [
|
||||
{
|
||||
name: 'memory',
|
||||
description: 'Manage memory',
|
||||
subCommands: [
|
||||
{ name: 'show', description: 'Show memory' },
|
||||
{ name: 'add', description: 'Add to memory' },
|
||||
],
|
||||
},
|
||||
] as unknown as SlashCommand[];
|
||||
const { result } = renderHook(() =>
|
||||
useTestHarnessForSlashCompletion(
|
||||
true,
|
||||
'/memory dothisnow',
|
||||
slashCommands,
|
||||
mockCommandContext,
|
||||
),
|
||||
);
|
||||
|
||||
expect(result.current.suggestions).toHaveLength(0);
|
||||
});
|
||||
});
|
||||
|
||||
describe('Argument Completion', () => {
|
||||
it('should call the command.completion function for argument suggestions', async () => {
|
||||
const availableTags = [
|
||||
'my-chat-tag-1',
|
||||
'my-chat-tag-2',
|
||||
'another-channel',
|
||||
];
|
||||
const mockCompletionFn = vi
|
||||
.fn()
|
||||
.mockImplementation(
|
||||
async (_context: CommandContext, partialArg: string) =>
|
||||
availableTags.filter((tag) => tag.startsWith(partialArg)),
|
||||
);
|
||||
|
||||
const slashCommands = [
|
||||
{
|
||||
name: 'chat',
|
||||
description: 'Manage chat history',
|
||||
subCommands: [
|
||||
{
|
||||
name: 'resume',
|
||||
description: 'Resume a saved chat',
|
||||
completion: mockCompletionFn,
|
||||
},
|
||||
],
|
||||
},
|
||||
] as unknown as SlashCommand[];
|
||||
|
||||
const { result } = renderHook(() =>
|
||||
useTestHarnessForSlashCompletion(
|
||||
true,
|
||||
'/chat resume my-ch',
|
||||
slashCommands,
|
||||
mockCommandContext,
|
||||
),
|
||||
);
|
||||
|
||||
await waitFor(() => {
|
||||
expect(mockCompletionFn).toHaveBeenCalledWith(
|
||||
mockCommandContext,
|
||||
'my-ch',
|
||||
);
|
||||
});
|
||||
|
||||
await waitFor(() => {
|
||||
expect(result.current.suggestions).toEqual([
|
||||
{ label: 'my-chat-tag-1', value: 'my-chat-tag-1' },
|
||||
{ label: 'my-chat-tag-2', value: 'my-chat-tag-2' },
|
||||
]);
|
||||
});
|
||||
});
|
||||
|
||||
it('should call command.completion with an empty string when args start with a space', async () => {
|
||||
const mockCompletionFn = vi
|
||||
.fn()
|
||||
.mockResolvedValue(['my-chat-tag-1', 'my-chat-tag-2', 'my-channel']);
|
||||
|
||||
const slashCommands = [
|
||||
{
|
||||
name: 'chat',
|
||||
description: 'Manage chat history',
|
||||
subCommands: [
|
||||
{
|
||||
name: 'resume',
|
||||
description: 'Resume a saved chat',
|
||||
completion: mockCompletionFn,
|
||||
},
|
||||
],
|
||||
},
|
||||
] as unknown as SlashCommand[];
|
||||
|
||||
const { result } = renderHook(() =>
|
||||
useTestHarnessForSlashCompletion(
|
||||
true,
|
||||
'/chat resume ',
|
||||
slashCommands,
|
||||
mockCommandContext,
|
||||
),
|
||||
);
|
||||
|
||||
await waitFor(() => {
|
||||
expect(mockCompletionFn).toHaveBeenCalledWith(mockCommandContext, '');
|
||||
});
|
||||
|
||||
await waitFor(() => {
|
||||
expect(result.current.suggestions).toHaveLength(3);
|
||||
});
|
||||
});
|
||||
|
||||
it('should handle completion function that returns null', async () => {
|
||||
const completionFn = vi.fn().mockResolvedValue(null);
|
||||
const slashCommands = [
|
||||
{
|
||||
name: 'chat',
|
||||
description: 'Manage chat history',
|
||||
subCommands: [
|
||||
{
|
||||
name: 'resume',
|
||||
description: 'Resume a saved chat',
|
||||
completion: completionFn,
|
||||
},
|
||||
],
|
||||
},
|
||||
] as unknown as SlashCommand[];
|
||||
|
||||
const { result } = renderHook(() =>
|
||||
useTestHarnessForSlashCompletion(
|
||||
true,
|
||||
'/chat resume ',
|
||||
slashCommands,
|
||||
mockCommandContext,
|
||||
),
|
||||
);
|
||||
|
||||
await waitFor(() => {
|
||||
expect(result.current.suggestions).toHaveLength(0);
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
|
@ -0,0 +1,187 @@
|
|||
/**
|
||||
* @license
|
||||
* Copyright 2025 Google LLC
|
||||
* SPDX-License-Identifier: Apache-2.0
|
||||
*/
|
||||
|
||||
import { useState, useEffect } from 'react';
|
||||
import { Suggestion } from '../components/SuggestionsDisplay.js';
|
||||
import { CommandContext, SlashCommand } from '../commands/types.js';
|
||||
|
||||
export interface UseSlashCompletionProps {
|
||||
enabled: boolean;
|
||||
query: string | null;
|
||||
slashCommands: readonly SlashCommand[];
|
||||
commandContext: CommandContext;
|
||||
setSuggestions: (suggestions: Suggestion[]) => void;
|
||||
setIsLoadingSuggestions: (isLoading: boolean) => void;
|
||||
setIsPerfectMatch: (isMatch: boolean) => void;
|
||||
}
|
||||
|
||||
export function useSlashCompletion(props: UseSlashCompletionProps): {
|
||||
completionStart: number;
|
||||
completionEnd: number;
|
||||
} {
|
||||
const {
|
||||
enabled,
|
||||
query,
|
||||
slashCommands,
|
||||
commandContext,
|
||||
setSuggestions,
|
||||
setIsLoadingSuggestions,
|
||||
setIsPerfectMatch,
|
||||
} = props;
|
||||
const [completionStart, setCompletionStart] = useState(-1);
|
||||
const [completionEnd, setCompletionEnd] = useState(-1);
|
||||
|
||||
useEffect(() => {
|
||||
if (!enabled || query === null) {
|
||||
return;
|
||||
}
|
||||
|
||||
const fullPath = query?.substring(1) || '';
|
||||
const hasTrailingSpace = !!query?.endsWith(' ');
|
||||
const rawParts = fullPath.split(/\s+/).filter((p) => p);
|
||||
let commandPathParts = rawParts;
|
||||
let partial = '';
|
||||
|
||||
if (!hasTrailingSpace && rawParts.length > 0) {
|
||||
partial = rawParts[rawParts.length - 1];
|
||||
commandPathParts = rawParts.slice(0, -1);
|
||||
}
|
||||
|
||||
let currentLevel: readonly SlashCommand[] | undefined = slashCommands;
|
||||
let leafCommand: SlashCommand | null = null;
|
||||
|
||||
for (const part of commandPathParts) {
|
||||
if (!currentLevel) {
|
||||
leafCommand = null;
|
||||
currentLevel = [];
|
||||
break;
|
||||
}
|
||||
const found: SlashCommand | undefined = currentLevel.find(
|
||||
(cmd) => cmd.name === part || cmd.altNames?.includes(part),
|
||||
);
|
||||
if (found) {
|
||||
leafCommand = found;
|
||||
currentLevel = found.subCommands as readonly SlashCommand[] | undefined;
|
||||
} else {
|
||||
leafCommand = null;
|
||||
currentLevel = [];
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
let exactMatchAsParent: SlashCommand | undefined;
|
||||
if (!hasTrailingSpace && currentLevel) {
|
||||
exactMatchAsParent = currentLevel.find(
|
||||
(cmd) =>
|
||||
(cmd.name === partial || cmd.altNames?.includes(partial)) &&
|
||||
cmd.subCommands,
|
||||
);
|
||||
|
||||
if (exactMatchAsParent) {
|
||||
leafCommand = exactMatchAsParent;
|
||||
currentLevel = exactMatchAsParent.subCommands;
|
||||
partial = '';
|
||||
}
|
||||
}
|
||||
|
||||
setIsPerfectMatch(false);
|
||||
if (!hasTrailingSpace) {
|
||||
if (leafCommand && partial === '' && leafCommand.action) {
|
||||
setIsPerfectMatch(true);
|
||||
} else if (currentLevel) {
|
||||
const perfectMatch = currentLevel.find(
|
||||
(cmd) =>
|
||||
(cmd.name === partial || cmd.altNames?.includes(partial)) &&
|
||||
cmd.action,
|
||||
);
|
||||
if (perfectMatch) {
|
||||
setIsPerfectMatch(true);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const depth = commandPathParts.length;
|
||||
const isArgumentCompletion =
|
||||
leafCommand?.completion &&
|
||||
(hasTrailingSpace ||
|
||||
(rawParts.length > depth && depth > 0 && partial !== ''));
|
||||
|
||||
if (hasTrailingSpace || exactMatchAsParent) {
|
||||
setCompletionStart(query.length);
|
||||
setCompletionEnd(query.length);
|
||||
} else if (partial) {
|
||||
if (isArgumentCompletion) {
|
||||
const commandSoFar = `/${commandPathParts.join(' ')}`;
|
||||
const argStartIndex =
|
||||
commandSoFar.length + (commandPathParts.length > 0 ? 1 : 0);
|
||||
setCompletionStart(argStartIndex);
|
||||
} else {
|
||||
setCompletionStart(query.length - partial.length);
|
||||
}
|
||||
setCompletionEnd(query.length);
|
||||
} else {
|
||||
setCompletionStart(1);
|
||||
setCompletionEnd(query.length);
|
||||
}
|
||||
|
||||
if (isArgumentCompletion) {
|
||||
const fetchAndSetSuggestions = async () => {
|
||||
setIsLoadingSuggestions(true);
|
||||
const argString = rawParts.slice(depth).join(' ');
|
||||
const results =
|
||||
(await leafCommand!.completion!(commandContext, argString)) || [];
|
||||
const finalSuggestions = results.map((s) => ({ label: s, value: s }));
|
||||
setSuggestions(finalSuggestions);
|
||||
setIsLoadingSuggestions(false);
|
||||
};
|
||||
fetchAndSetSuggestions();
|
||||
return;
|
||||
}
|
||||
|
||||
const commandsToSearch = currentLevel || [];
|
||||
if (commandsToSearch.length > 0) {
|
||||
let potentialSuggestions = commandsToSearch.filter(
|
||||
(cmd) =>
|
||||
cmd.description &&
|
||||
(cmd.name.startsWith(partial) ||
|
||||
cmd.altNames?.some((alt) => alt.startsWith(partial))),
|
||||
);
|
||||
|
||||
if (potentialSuggestions.length > 0 && !hasTrailingSpace) {
|
||||
const perfectMatch = potentialSuggestions.find(
|
||||
(s) => s.name === partial || s.altNames?.includes(partial),
|
||||
);
|
||||
if (perfectMatch && perfectMatch.action) {
|
||||
potentialSuggestions = [];
|
||||
}
|
||||
}
|
||||
|
||||
const finalSuggestions = potentialSuggestions.map((cmd) => ({
|
||||
label: cmd.name,
|
||||
value: cmd.name,
|
||||
description: cmd.description,
|
||||
}));
|
||||
|
||||
setSuggestions(finalSuggestions);
|
||||
return;
|
||||
}
|
||||
|
||||
setSuggestions([]);
|
||||
}, [
|
||||
enabled,
|
||||
query,
|
||||
slashCommands,
|
||||
commandContext,
|
||||
setSuggestions,
|
||||
setIsLoadingSuggestions,
|
||||
setIsPerfectMatch,
|
||||
]);
|
||||
|
||||
return {
|
||||
completionStart,
|
||||
completionEnd,
|
||||
};
|
||||
}
|
|
@ -34,6 +34,7 @@
|
|||
"chardet": "^2.1.0",
|
||||
"diff": "^7.0.0",
|
||||
"dotenv": "^17.1.0",
|
||||
"fdir": "^6.4.6",
|
||||
"glob": "^10.4.5",
|
||||
"google-auth-library": "^9.11.0",
|
||||
"html-to-text": "^9.0.5",
|
||||
|
@ -42,6 +43,7 @@
|
|||
"marked": "^15.0.12",
|
||||
"micromatch": "^4.0.8",
|
||||
"open": "^10.1.2",
|
||||
"picomatch": "^4.0.1",
|
||||
"shell-quote": "^1.8.3",
|
||||
"simple-git": "^3.28.0",
|
||||
"strip-ansi": "^7.1.0",
|
||||
|
@ -49,10 +51,12 @@
|
|||
"ws": "^8.18.0"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@google/gemini-cli-test-utils": "file:../test-utils",
|
||||
"@types/diff": "^7.0.2",
|
||||
"@types/dotenv": "^6.1.1",
|
||||
"@types/micromatch": "^4.0.8",
|
||||
"@types/minimatch": "^5.1.2",
|
||||
"@types/picomatch": "^4.0.1",
|
||||
"@types/ws": "^8.5.10",
|
||||
"typescript": "^5.3.3",
|
||||
"vitest": "^3.1.1"
|
||||
|
|
|
@ -40,6 +40,7 @@ export * from './utils/shell-utils.js';
|
|||
export * from './utils/systemEncoding.js';
|
||||
export * from './utils/textUtils.js';
|
||||
export * from './utils/formatters.js';
|
||||
export * from './utils/filesearch/fileSearch.js';
|
||||
|
||||
// Export services
|
||||
export * from './services/fileDiscoveryService.js';
|
||||
|
|
|
@ -0,0 +1,112 @@
|
|||
/**
|
||||
* @license
|
||||
* Copyright 2025 Google LLC
|
||||
* SPDX-License-Identifier: Apache-2.0
|
||||
*/
|
||||
|
||||
import { describe, it, expect, vi, afterEach, beforeEach } from 'vitest';
|
||||
import { getCacheKey, read, write, clear } from './crawlCache.js';
|
||||
|
||||
describe('CrawlCache', () => {
|
||||
describe('getCacheKey', () => {
|
||||
it('should generate a consistent hash', () => {
|
||||
const key1 = getCacheKey('/foo', 'bar');
|
||||
const key2 = getCacheKey('/foo', 'bar');
|
||||
expect(key1).toBe(key2);
|
||||
});
|
||||
|
||||
it('should generate a different hash for different directories', () => {
|
||||
const key1 = getCacheKey('/foo', 'bar');
|
||||
const key2 = getCacheKey('/bar', 'bar');
|
||||
expect(key1).not.toBe(key2);
|
||||
});
|
||||
|
||||
it('should generate a different hash for different ignore content', () => {
|
||||
const key1 = getCacheKey('/foo', 'bar');
|
||||
const key2 = getCacheKey('/foo', 'baz');
|
||||
expect(key1).not.toBe(key2);
|
||||
});
|
||||
});
|
||||
|
||||
describe('in-memory cache operations', () => {
|
||||
beforeEach(() => {
|
||||
// Ensure a clean slate before each test
|
||||
clear();
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
// Restore real timers after each test that uses fake ones
|
||||
vi.useRealTimers();
|
||||
});
|
||||
|
||||
it('should write and read data from the cache', () => {
|
||||
const key = 'test-key';
|
||||
const data = ['foo', 'bar'];
|
||||
write(key, data, 10000); // 10 second TTL
|
||||
const cachedData = read(key);
|
||||
expect(cachedData).toEqual(data);
|
||||
});
|
||||
|
||||
it('should return undefined for a nonexistent key', () => {
|
||||
const cachedData = read('nonexistent-key');
|
||||
expect(cachedData).toBeUndefined();
|
||||
});
|
||||
|
||||
it('should clear the cache', () => {
|
||||
const key = 'test-key';
|
||||
const data = ['foo', 'bar'];
|
||||
write(key, data, 10000);
|
||||
clear();
|
||||
const cachedData = read(key);
|
||||
expect(cachedData).toBeUndefined();
|
||||
});
|
||||
|
||||
it('should automatically evict a cache entry after its TTL expires', async () => {
|
||||
vi.useFakeTimers();
|
||||
const key = 'ttl-key';
|
||||
const data = ['foo'];
|
||||
const ttl = 5000; // 5 seconds
|
||||
|
||||
write(key, data, ttl);
|
||||
|
||||
// Should exist immediately after writing
|
||||
expect(read(key)).toEqual(data);
|
||||
|
||||
// Advance time just before expiration
|
||||
await vi.advanceTimersByTimeAsync(ttl - 1);
|
||||
expect(read(key)).toEqual(data);
|
||||
|
||||
// Advance time past expiration
|
||||
await vi.advanceTimersByTimeAsync(1);
|
||||
expect(read(key)).toBeUndefined();
|
||||
});
|
||||
|
||||
it('should reset the timer when an entry is updated', async () => {
|
||||
vi.useFakeTimers();
|
||||
const key = 'update-key';
|
||||
const initialData = ['initial'];
|
||||
const updatedData = ['updated'];
|
||||
const ttl = 5000; // 5 seconds
|
||||
|
||||
// Write initial data
|
||||
write(key, initialData, ttl);
|
||||
|
||||
// Advance time, but not enough to expire
|
||||
await vi.advanceTimersByTimeAsync(3000);
|
||||
expect(read(key)).toEqual(initialData);
|
||||
|
||||
// Update the data, which should reset the timer
|
||||
write(key, updatedData, ttl);
|
||||
expect(read(key)).toEqual(updatedData);
|
||||
|
||||
// Advance time again. If the timer wasn't reset, the total elapsed
|
||||
// time (3000 + 3000 = 6000) would cause an eviction.
|
||||
await vi.advanceTimersByTimeAsync(3000);
|
||||
expect(read(key)).toEqual(updatedData);
|
||||
|
||||
// Advance past the new expiration time
|
||||
await vi.advanceTimersByTimeAsync(2001);
|
||||
expect(read(key)).toBeUndefined();
|
||||
});
|
||||
});
|
||||
});
|
|
@ -0,0 +1,65 @@
|
|||
/**
|
||||
* @license
|
||||
* Copyright 2025 Google LLC
|
||||
* SPDX-License-Identifier: Apache-2.0
|
||||
*/
|
||||
|
||||
import crypto from 'node:crypto';
|
||||
|
||||
const crawlCache = new Map<string, string[]>();
|
||||
const cacheTimers = new Map<string, NodeJS.Timeout>();
|
||||
|
||||
/**
|
||||
* Generates a unique cache key based on the project directory and the content
|
||||
* of ignore files. This ensures that the cache is invalidated if the project
|
||||
* or ignore rules change.
|
||||
*/
|
||||
export const getCacheKey = (
|
||||
directory: string,
|
||||
ignoreContent: string,
|
||||
): string => {
|
||||
const hash = crypto.createHash('sha256');
|
||||
hash.update(directory);
|
||||
hash.update(ignoreContent);
|
||||
return hash.digest('hex');
|
||||
};
|
||||
|
||||
/**
|
||||
* Reads cached data from the in-memory cache.
|
||||
* Returns undefined if the key is not found.
|
||||
*/
|
||||
export const read = (key: string): string[] | undefined => crawlCache.get(key);
|
||||
|
||||
/**
|
||||
* Writes data to the in-memory cache and sets a timer to evict it after the TTL.
|
||||
*/
|
||||
export const write = (key: string, results: string[], ttlMs: number): void => {
|
||||
// Clear any existing timer for this key to prevent premature deletion
|
||||
if (cacheTimers.has(key)) {
|
||||
clearTimeout(cacheTimers.get(key)!);
|
||||
}
|
||||
|
||||
// Store the new data
|
||||
crawlCache.set(key, results);
|
||||
|
||||
// Set a timer to automatically delete the cache entry after the TTL
|
||||
const timerId = setTimeout(() => {
|
||||
crawlCache.delete(key);
|
||||
cacheTimers.delete(key);
|
||||
}, ttlMs);
|
||||
|
||||
// Store the timer handle so we can clear it if the entry is updated
|
||||
cacheTimers.set(key, timerId);
|
||||
};
|
||||
|
||||
/**
|
||||
* Clears the entire cache and all active timers.
|
||||
* Primarily used for testing.
|
||||
*/
|
||||
export const clear = (): void => {
|
||||
for (const timerId of cacheTimers.values()) {
|
||||
clearTimeout(timerId);
|
||||
}
|
||||
crawlCache.clear();
|
||||
cacheTimers.clear();
|
||||
};
|
|
@ -0,0 +1,642 @@
|
|||
/**
|
||||
* @license
|
||||
* Copyright 2025 Google LLC
|
||||
* SPDX-License-Identifier: Apache-2.0
|
||||
*/
|
||||
|
||||
import { describe, it, expect, beforeEach, afterEach, vi } from 'vitest';
|
||||
import * as fs from 'fs/promises';
|
||||
import * as path from 'path';
|
||||
import * as cache from './crawlCache.js';
|
||||
import { FileSearch, AbortError, filter } from './fileSearch.js';
|
||||
import { createTmpDir, cleanupTmpDir } from '@google/gemini-cli-test-utils';
|
||||
|
||||
type FileSearchWithPrivateMethods = FileSearch & {
|
||||
performCrawl: () => Promise<void>;
|
||||
};
|
||||
|
||||
describe('FileSearch', () => {
|
||||
let tmpDir: string;
|
||||
afterEach(async () => {
|
||||
if (tmpDir) {
|
||||
await cleanupTmpDir(tmpDir);
|
||||
}
|
||||
vi.restoreAllMocks();
|
||||
});
|
||||
|
||||
it('should use .geminiignore rules', async () => {
|
||||
tmpDir = await createTmpDir({
|
||||
'.geminiignore': 'dist/',
|
||||
dist: ['ignored.js'],
|
||||
src: ['not-ignored.js'],
|
||||
});
|
||||
|
||||
const fileSearch = new FileSearch({
|
||||
projectRoot: tmpDir,
|
||||
useGitignore: false,
|
||||
useGeminiignore: true,
|
||||
ignoreDirs: [],
|
||||
cache: false,
|
||||
cacheTtl: 0,
|
||||
});
|
||||
|
||||
await fileSearch.initialize();
|
||||
const results = await fileSearch.search('');
|
||||
|
||||
expect(results).toEqual(['src/', '.geminiignore', 'src/not-ignored.js']);
|
||||
});
|
||||
|
||||
it('should combine .gitignore and .geminiignore rules', async () => {
|
||||
tmpDir = await createTmpDir({
|
||||
'.gitignore': 'dist/',
|
||||
'.geminiignore': 'build/',
|
||||
dist: ['ignored-by-git.js'],
|
||||
build: ['ignored-by-gemini.js'],
|
||||
src: ['not-ignored.js'],
|
||||
});
|
||||
|
||||
const fileSearch = new FileSearch({
|
||||
projectRoot: tmpDir,
|
||||
useGitignore: true,
|
||||
useGeminiignore: true,
|
||||
ignoreDirs: [],
|
||||
cache: false,
|
||||
cacheTtl: 0,
|
||||
});
|
||||
|
||||
await fileSearch.initialize();
|
||||
const results = await fileSearch.search('');
|
||||
|
||||
expect(results).toEqual([
|
||||
'src/',
|
||||
'.geminiignore',
|
||||
'.gitignore',
|
||||
'src/not-ignored.js',
|
||||
]);
|
||||
});
|
||||
|
||||
it('should use ignoreDirs option', async () => {
|
||||
tmpDir = await createTmpDir({
|
||||
logs: ['some.log'],
|
||||
src: ['main.js'],
|
||||
});
|
||||
|
||||
const fileSearch = new FileSearch({
|
||||
projectRoot: tmpDir,
|
||||
useGitignore: false,
|
||||
useGeminiignore: false,
|
||||
ignoreDirs: ['logs'],
|
||||
cache: false,
|
||||
cacheTtl: 0,
|
||||
});
|
||||
|
||||
await fileSearch.initialize();
|
||||
const results = await fileSearch.search('');
|
||||
|
||||
expect(results).toEqual(['src/', 'src/main.js']);
|
||||
});
|
||||
|
||||
it('should handle negated directories', async () => {
|
||||
tmpDir = await createTmpDir({
|
||||
'.gitignore': ['build/**', '!build/public', '!build/public/**'].join(
|
||||
'\n',
|
||||
),
|
||||
build: {
|
||||
'private.js': '',
|
||||
public: ['index.html'],
|
||||
},
|
||||
src: ['main.js'],
|
||||
});
|
||||
|
||||
const fileSearch = new FileSearch({
|
||||
projectRoot: tmpDir,
|
||||
useGitignore: true,
|
||||
useGeminiignore: false,
|
||||
ignoreDirs: [],
|
||||
cache: false,
|
||||
cacheTtl: 0,
|
||||
});
|
||||
|
||||
await fileSearch.initialize();
|
||||
const results = await fileSearch.search('');
|
||||
|
||||
expect(results).toEqual([
|
||||
'build/',
|
||||
'build/public/',
|
||||
'src/',
|
||||
'.gitignore',
|
||||
'build/public/index.html',
|
||||
'src/main.js',
|
||||
]);
|
||||
});
|
||||
|
||||
it('should filter results with a search pattern', async () => {
|
||||
tmpDir = await createTmpDir({
|
||||
src: {
|
||||
'main.js': '',
|
||||
'util.ts': '',
|
||||
'style.css': '',
|
||||
},
|
||||
});
|
||||
|
||||
const fileSearch = new FileSearch({
|
||||
projectRoot: tmpDir,
|
||||
useGitignore: false,
|
||||
useGeminiignore: false,
|
||||
ignoreDirs: [],
|
||||
cache: false,
|
||||
cacheTtl: 0,
|
||||
});
|
||||
|
||||
await fileSearch.initialize();
|
||||
const results = await fileSearch.search('**/*.js');
|
||||
|
||||
expect(results).toEqual(['src/main.js']);
|
||||
});
|
||||
|
||||
it('should handle root-level file negation', async () => {
|
||||
tmpDir = await createTmpDir({
|
||||
'.gitignore': ['*.mk', '!Foo.mk'].join('\n'),
|
||||
'bar.mk': '',
|
||||
'Foo.mk': '',
|
||||
});
|
||||
|
||||
const fileSearch = new FileSearch({
|
||||
projectRoot: tmpDir,
|
||||
useGitignore: true,
|
||||
useGeminiignore: false,
|
||||
ignoreDirs: [],
|
||||
cache: false,
|
||||
cacheTtl: 0,
|
||||
});
|
||||
|
||||
await fileSearch.initialize();
|
||||
const results = await fileSearch.search('');
|
||||
|
||||
expect(results).toEqual(['.gitignore', 'Foo.mk']);
|
||||
});
|
||||
|
||||
it('should handle directory negation with glob', async () => {
|
||||
tmpDir = await createTmpDir({
|
||||
'.gitignore': [
|
||||
'third_party/**',
|
||||
'!third_party/foo',
|
||||
'!third_party/foo/bar',
|
||||
'!third_party/foo/bar/baz_buffer',
|
||||
].join('\n'),
|
||||
third_party: {
|
||||
foo: {
|
||||
bar: {
|
||||
baz_buffer: '',
|
||||
},
|
||||
},
|
||||
ignore_this: '',
|
||||
},
|
||||
});
|
||||
|
||||
const fileSearch = new FileSearch({
|
||||
projectRoot: tmpDir,
|
||||
useGitignore: true,
|
||||
useGeminiignore: false,
|
||||
ignoreDirs: [],
|
||||
cache: false,
|
||||
cacheTtl: 0,
|
||||
});
|
||||
|
||||
await fileSearch.initialize();
|
||||
const results = await fileSearch.search('');
|
||||
|
||||
expect(results).toEqual([
|
||||
'third_party/',
|
||||
'third_party/foo/',
|
||||
'third_party/foo/bar/',
|
||||
'.gitignore',
|
||||
'third_party/foo/bar/baz_buffer',
|
||||
]);
|
||||
});
|
||||
|
||||
it('should correctly handle negated patterns in .gitignore', async () => {
|
||||
tmpDir = await createTmpDir({
|
||||
'.gitignore': ['dist/**', '!dist/keep.js'].join('\n'),
|
||||
dist: ['ignore.js', 'keep.js'],
|
||||
src: ['main.js'],
|
||||
});
|
||||
|
||||
const fileSearch = new FileSearch({
|
||||
projectRoot: tmpDir,
|
||||
useGitignore: true,
|
||||
useGeminiignore: false,
|
||||
ignoreDirs: [],
|
||||
cache: false,
|
||||
cacheTtl: 0,
|
||||
});
|
||||
|
||||
await fileSearch.initialize();
|
||||
const results = await fileSearch.search('');
|
||||
|
||||
expect(results).toEqual([
|
||||
'dist/',
|
||||
'src/',
|
||||
'.gitignore',
|
||||
'dist/keep.js',
|
||||
'src/main.js',
|
||||
]);
|
||||
});
|
||||
|
||||
// New test cases start here
|
||||
|
||||
it('should initialize correctly when ignore files are missing', async () => {
|
||||
tmpDir = await createTmpDir({
|
||||
src: ['file1.js'],
|
||||
});
|
||||
|
||||
const fileSearch = new FileSearch({
|
||||
projectRoot: tmpDir,
|
||||
useGitignore: true,
|
||||
useGeminiignore: true,
|
||||
ignoreDirs: [],
|
||||
cache: false,
|
||||
cacheTtl: 0,
|
||||
});
|
||||
|
||||
// Expect no errors to be thrown during initialization
|
||||
await expect(fileSearch.initialize()).resolves.toBeUndefined();
|
||||
const results = await fileSearch.search('');
|
||||
expect(results).toEqual(['src/', 'src/file1.js']);
|
||||
});
|
||||
|
||||
it('should respect maxResults option in search', async () => {
|
||||
tmpDir = await createTmpDir({
|
||||
src: {
|
||||
'file1.js': '',
|
||||
'file2.js': '',
|
||||
'file3.js': '',
|
||||
'file4.js': '',
|
||||
},
|
||||
});
|
||||
|
||||
const fileSearch = new FileSearch({
|
||||
projectRoot: tmpDir,
|
||||
useGitignore: false,
|
||||
useGeminiignore: false,
|
||||
ignoreDirs: [],
|
||||
cache: false,
|
||||
cacheTtl: 0,
|
||||
});
|
||||
|
||||
await fileSearch.initialize();
|
||||
const results = await fileSearch.search('**/*.js', { maxResults: 2 });
|
||||
|
||||
expect(results).toEqual(['src/file1.js', 'src/file2.js']); // Assuming alphabetical sort
|
||||
});
|
||||
|
||||
it('should return empty array when no matches are found', async () => {
|
||||
tmpDir = await createTmpDir({
|
||||
src: ['file1.js'],
|
||||
});
|
||||
|
||||
const fileSearch = new FileSearch({
|
||||
projectRoot: tmpDir,
|
||||
useGitignore: false,
|
||||
useGeminiignore: false,
|
||||
ignoreDirs: [],
|
||||
cache: false,
|
||||
cacheTtl: 0,
|
||||
});
|
||||
|
||||
await fileSearch.initialize();
|
||||
const results = await fileSearch.search('nonexistent-file.xyz');
|
||||
|
||||
expect(results).toEqual([]);
|
||||
});
|
||||
|
||||
it('should throw AbortError when filter is aborted', async () => {
|
||||
const controller = new AbortController();
|
||||
const dummyPaths = Array.from({ length: 5000 }, (_, i) => `file${i}.js`); // Large array to ensure yielding
|
||||
|
||||
const filterPromise = filter(dummyPaths, '*.js', controller.signal);
|
||||
|
||||
// Abort after a short delay to ensure filter has started
|
||||
setTimeout(() => controller.abort(), 1);
|
||||
|
||||
await expect(filterPromise).rejects.toThrow(AbortError);
|
||||
});
|
||||
|
||||
describe('with in-memory cache', () => {
|
||||
beforeEach(() => {
|
||||
cache.clear();
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
vi.useRealTimers();
|
||||
});
|
||||
|
||||
it('should throw an error if search is called before initialization', async () => {
|
||||
tmpDir = await createTmpDir({});
|
||||
const fileSearch = new FileSearch({
|
||||
projectRoot: tmpDir,
|
||||
useGitignore: false,
|
||||
useGeminiignore: false,
|
||||
ignoreDirs: [],
|
||||
cache: false,
|
||||
cacheTtl: 0,
|
||||
});
|
||||
|
||||
await expect(fileSearch.search('')).rejects.toThrow(
|
||||
'Engine not initialized. Call initialize() first.',
|
||||
);
|
||||
});
|
||||
|
||||
it('should hit the cache for subsequent searches', async () => {
|
||||
tmpDir = await createTmpDir({ 'file1.js': '' });
|
||||
const getOptions = () => ({
|
||||
projectRoot: tmpDir,
|
||||
useGitignore: false,
|
||||
useGeminiignore: false,
|
||||
ignoreDirs: [],
|
||||
cache: true,
|
||||
cacheTtl: 10,
|
||||
});
|
||||
|
||||
const fs1 = new FileSearch(getOptions());
|
||||
const crawlSpy1 = vi.spyOn(
|
||||
fs1 as FileSearchWithPrivateMethods,
|
||||
'performCrawl',
|
||||
);
|
||||
await fs1.initialize();
|
||||
expect(crawlSpy1).toHaveBeenCalledTimes(1);
|
||||
|
||||
// Second search should hit the cache because the options are identical
|
||||
const fs2 = new FileSearch(getOptions());
|
||||
const crawlSpy2 = vi.spyOn(
|
||||
fs2 as FileSearchWithPrivateMethods,
|
||||
'performCrawl',
|
||||
);
|
||||
await fs2.initialize();
|
||||
expect(crawlSpy2).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should miss the cache when ignore rules change', async () => {
|
||||
tmpDir = await createTmpDir({
|
||||
'.gitignore': 'a.txt',
|
||||
'a.txt': '',
|
||||
'b.txt': '',
|
||||
});
|
||||
const options = {
|
||||
projectRoot: tmpDir,
|
||||
useGitignore: true,
|
||||
useGeminiignore: false,
|
||||
ignoreDirs: [],
|
||||
cache: true,
|
||||
cacheTtl: 10000,
|
||||
};
|
||||
|
||||
// Initial search to populate the cache
|
||||
const fs1 = new FileSearch(options);
|
||||
const crawlSpy1 = vi.spyOn(
|
||||
fs1 as FileSearchWithPrivateMethods,
|
||||
'performCrawl',
|
||||
);
|
||||
await fs1.initialize();
|
||||
const results1 = await fs1.search('');
|
||||
expect(crawlSpy1).toHaveBeenCalledTimes(1);
|
||||
expect(results1).toEqual(['.gitignore', 'b.txt']);
|
||||
|
||||
// Modify the ignore file
|
||||
await fs.writeFile(path.join(tmpDir, '.gitignore'), 'b.txt');
|
||||
|
||||
// Second search should miss the cache and trigger a recrawl
|
||||
const fs2 = new FileSearch(options);
|
||||
const crawlSpy2 = vi.spyOn(
|
||||
fs2 as FileSearchWithPrivateMethods,
|
||||
'performCrawl',
|
||||
);
|
||||
await fs2.initialize();
|
||||
const results2 = await fs2.search('');
|
||||
expect(crawlSpy2).toHaveBeenCalledTimes(1);
|
||||
expect(results2).toEqual(['.gitignore', 'a.txt']);
|
||||
});
|
||||
|
||||
it('should miss the cache after TTL expires', async () => {
|
||||
vi.useFakeTimers();
|
||||
tmpDir = await createTmpDir({ 'file1.js': '' });
|
||||
const options = {
|
||||
projectRoot: tmpDir,
|
||||
useGitignore: false,
|
||||
useGeminiignore: false,
|
||||
ignoreDirs: [],
|
||||
cache: true,
|
||||
cacheTtl: 10, // 10 seconds
|
||||
};
|
||||
|
||||
// Initial search to populate the cache
|
||||
const fs1 = new FileSearch(options);
|
||||
await fs1.initialize();
|
||||
|
||||
// Advance time past the TTL
|
||||
await vi.advanceTimersByTimeAsync(11000);
|
||||
|
||||
// Second search should miss the cache and trigger a recrawl
|
||||
const fs2 = new FileSearch(options);
|
||||
const crawlSpy = vi.spyOn(
|
||||
fs2 as FileSearchWithPrivateMethods,
|
||||
'performCrawl',
|
||||
);
|
||||
await fs2.initialize();
|
||||
|
||||
expect(crawlSpy).toHaveBeenCalledTimes(1);
|
||||
});
|
||||
});
|
||||
|
||||
it('should handle empty or commented-only ignore files', async () => {
|
||||
tmpDir = await createTmpDir({
|
||||
'.gitignore': '# This is a comment\n\n \n',
|
||||
src: ['main.js'],
|
||||
});
|
||||
|
||||
const fileSearch = new FileSearch({
|
||||
projectRoot: tmpDir,
|
||||
useGitignore: true,
|
||||
useGeminiignore: false,
|
||||
ignoreDirs: [],
|
||||
cache: false,
|
||||
cacheTtl: 0,
|
||||
});
|
||||
|
||||
await fileSearch.initialize();
|
||||
const results = await fileSearch.search('');
|
||||
|
||||
expect(results).toEqual(['src/', '.gitignore', 'src/main.js']);
|
||||
});
|
||||
|
||||
it('should always ignore the .git directory', async () => {
|
||||
tmpDir = await createTmpDir({
|
||||
'.git': ['config', 'HEAD'],
|
||||
src: ['main.js'],
|
||||
});
|
||||
|
||||
const fileSearch = new FileSearch({
|
||||
projectRoot: tmpDir,
|
||||
useGitignore: false, // Explicitly disable .gitignore to isolate this rule
|
||||
useGeminiignore: false,
|
||||
ignoreDirs: [],
|
||||
cache: false,
|
||||
cacheTtl: 0,
|
||||
});
|
||||
|
||||
await fileSearch.initialize();
|
||||
const results = await fileSearch.search('');
|
||||
|
||||
expect(results).toEqual(['src/', 'src/main.js']);
|
||||
});
|
||||
|
||||
it('should be cancellable via AbortSignal', async () => {
|
||||
const largeDir: Record<string, string> = {};
|
||||
for (let i = 0; i < 100; i++) {
|
||||
largeDir[`file${i}.js`] = '';
|
||||
}
|
||||
tmpDir = await createTmpDir(largeDir);
|
||||
|
||||
const fileSearch = new FileSearch({
|
||||
projectRoot: tmpDir,
|
||||
useGitignore: false,
|
||||
useGeminiignore: false,
|
||||
ignoreDirs: [],
|
||||
cache: false,
|
||||
cacheTtl: 0,
|
||||
});
|
||||
|
||||
await fileSearch.initialize();
|
||||
|
||||
const controller = new AbortController();
|
||||
const searchPromise = fileSearch.search('**/*.js', {
|
||||
signal: controller.signal,
|
||||
});
|
||||
|
||||
// Yield to allow the search to start before aborting.
|
||||
await new Promise((resolve) => setImmediate(resolve));
|
||||
|
||||
controller.abort();
|
||||
|
||||
await expect(searchPromise).rejects.toThrow(AbortError);
|
||||
});
|
||||
|
||||
it('should leverage ResultCache for bestBaseQuery optimization', async () => {
|
||||
tmpDir = await createTmpDir({
|
||||
src: {
|
||||
'foo.js': '',
|
||||
'bar.ts': '',
|
||||
nested: {
|
||||
'baz.js': '',
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
const fileSearch = new FileSearch({
|
||||
projectRoot: tmpDir,
|
||||
useGitignore: false,
|
||||
useGeminiignore: false,
|
||||
ignoreDirs: [],
|
||||
cache: true, // Enable caching for this test
|
||||
cacheTtl: 0,
|
||||
});
|
||||
|
||||
await fileSearch.initialize();
|
||||
|
||||
// Perform a broad search to prime the cache
|
||||
const broadResults = await fileSearch.search('src/**');
|
||||
expect(broadResults).toEqual([
|
||||
'src/',
|
||||
'src/nested/',
|
||||
'src/bar.ts',
|
||||
'src/foo.js',
|
||||
'src/nested/baz.js',
|
||||
]);
|
||||
|
||||
// Perform a more specific search that should leverage the broad search's cached results
|
||||
const specificResults = await fileSearch.search('src/**/*.js');
|
||||
expect(specificResults).toEqual(['src/foo.js', 'src/nested/baz.js']);
|
||||
|
||||
// Although we can't directly inspect ResultCache.hits/misses from here,
|
||||
// the correctness of specificResults after a broad search implicitly
|
||||
// verifies that the caching mechanism, including bestBaseQuery, is working.
|
||||
});
|
||||
|
||||
it('should be case-insensitive by default', async () => {
|
||||
tmpDir = await createTmpDir({
|
||||
'File1.Js': '',
|
||||
'file2.js': '',
|
||||
'FILE3.JS': '',
|
||||
'other.txt': '',
|
||||
});
|
||||
|
||||
const fileSearch = new FileSearch({
|
||||
projectRoot: tmpDir,
|
||||
useGitignore: false,
|
||||
useGeminiignore: false,
|
||||
ignoreDirs: [],
|
||||
cache: false,
|
||||
cacheTtl: 0,
|
||||
});
|
||||
|
||||
await fileSearch.initialize();
|
||||
|
||||
// Search with a lowercase pattern
|
||||
let results = await fileSearch.search('file*.js');
|
||||
expect(results).toHaveLength(3);
|
||||
expect(results).toEqual(
|
||||
expect.arrayContaining(['File1.Js', 'file2.js', 'FILE3.JS']),
|
||||
);
|
||||
|
||||
// Search with an uppercase pattern
|
||||
results = await fileSearch.search('FILE*.JS');
|
||||
expect(results).toHaveLength(3);
|
||||
expect(results).toEqual(
|
||||
expect.arrayContaining(['File1.Js', 'file2.js', 'FILE3.JS']),
|
||||
);
|
||||
|
||||
// Search with a mixed-case pattern
|
||||
results = await fileSearch.search('FiLe*.Js');
|
||||
expect(results).toHaveLength(3);
|
||||
expect(results).toEqual(
|
||||
expect.arrayContaining(['File1.Js', 'file2.js', 'FILE3.JS']),
|
||||
);
|
||||
});
|
||||
|
||||
it('should respect maxResults even when the cache returns an exact match', async () => {
|
||||
tmpDir = await createTmpDir({
|
||||
'file1.js': '',
|
||||
'file2.js': '',
|
||||
'file3.js': '',
|
||||
'file4.js': '',
|
||||
'file5.js': '',
|
||||
});
|
||||
|
||||
const fileSearch = new FileSearch({
|
||||
projectRoot: tmpDir,
|
||||
useGitignore: false,
|
||||
useGeminiignore: false,
|
||||
ignoreDirs: [],
|
||||
cache: true, // Ensure caching is enabled
|
||||
cacheTtl: 10000,
|
||||
});
|
||||
|
||||
await fileSearch.initialize();
|
||||
|
||||
// 1. Perform a broad search to populate the cache with an exact match.
|
||||
const initialResults = await fileSearch.search('*.js');
|
||||
expect(initialResults).toEqual([
|
||||
'file1.js',
|
||||
'file2.js',
|
||||
'file3.js',
|
||||
'file4.js',
|
||||
'file5.js',
|
||||
]);
|
||||
|
||||
// 2. Perform the same search again, but this time with a maxResults limit.
|
||||
const limitedResults = await fileSearch.search('*.js', { maxResults: 2 });
|
||||
|
||||
// 3. Assert that the maxResults limit was respected, even with a cache hit.
|
||||
expect(limitedResults).toEqual(['file1.js', 'file2.js']);
|
||||
});
|
||||
});
|
|
@ -0,0 +1,269 @@
|
|||
/**
|
||||
* @license
|
||||
* Copyright 2025 Google LLC
|
||||
* SPDX-License-Identifier: Apache-2.0
|
||||
*/
|
||||
|
||||
import path from 'node:path';
|
||||
import fs from 'node:fs';
|
||||
import { fdir } from 'fdir';
|
||||
import picomatch from 'picomatch';
|
||||
import { Ignore } from './ignore.js';
|
||||
import { ResultCache } from './result-cache.js';
|
||||
import * as cache from './crawlCache.js';
|
||||
|
||||
export type FileSearchOptions = {
|
||||
projectRoot: string;
|
||||
ignoreDirs: string[];
|
||||
useGitignore: boolean;
|
||||
useGeminiignore: boolean;
|
||||
cache: boolean;
|
||||
cacheTtl: number;
|
||||
};
|
||||
|
||||
export class AbortError extends Error {
|
||||
constructor(message = 'Search aborted') {
|
||||
super(message);
|
||||
this.name = 'AbortError';
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Filters a list of paths based on a given pattern.
|
||||
* @param allPaths The list of all paths to filter.
|
||||
* @param pattern The picomatch pattern to filter by.
|
||||
* @param signal An AbortSignal to cancel the operation.
|
||||
* @returns A promise that resolves to the filtered and sorted list of paths.
|
||||
*/
|
||||
export async function filter(
|
||||
allPaths: string[],
|
||||
pattern: string,
|
||||
signal: AbortSignal | undefined,
|
||||
): Promise<string[]> {
|
||||
const patternFilter = picomatch(pattern, {
|
||||
dot: true,
|
||||
contains: true,
|
||||
nocase: true,
|
||||
});
|
||||
|
||||
const results: string[] = [];
|
||||
for (const [i, p] of allPaths.entries()) {
|
||||
// Yield control to the event loop periodically to prevent blocking.
|
||||
if (i % 1000 === 0) {
|
||||
await new Promise((resolve) => setImmediate(resolve));
|
||||
if (signal?.aborted) {
|
||||
throw new AbortError();
|
||||
}
|
||||
}
|
||||
|
||||
if (patternFilter(p)) {
|
||||
results.push(p);
|
||||
}
|
||||
}
|
||||
|
||||
results.sort((a, b) => {
|
||||
const aIsDir = a.endsWith('/');
|
||||
const bIsDir = b.endsWith('/');
|
||||
|
||||
if (aIsDir && !bIsDir) return -1;
|
||||
if (!aIsDir && bIsDir) return 1;
|
||||
|
||||
// This is 40% faster than localeCompare and the only thing we would really
|
||||
// gain from localeCompare is case-sensitive sort
|
||||
return a < b ? -1 : a > b ? 1 : 0;
|
||||
});
|
||||
|
||||
return results;
|
||||
}
|
||||
|
||||
export type SearchOptions = {
|
||||
signal?: AbortSignal;
|
||||
maxResults?: number;
|
||||
};
|
||||
|
||||
/**
|
||||
* Provides a fast and efficient way to search for files within a project,
|
||||
* respecting .gitignore and .geminiignore rules, and utilizing caching
|
||||
* for improved performance.
|
||||
*/
|
||||
export class FileSearch {
|
||||
private readonly absoluteDir: string;
|
||||
private readonly ignore: Ignore = new Ignore();
|
||||
private resultCache: ResultCache | undefined;
|
||||
private allFiles: string[] = [];
|
||||
|
||||
/**
|
||||
* Constructs a new `FileSearch` instance.
|
||||
* @param options Configuration options for the file search.
|
||||
*/
|
||||
constructor(private readonly options: FileSearchOptions) {
|
||||
this.absoluteDir = path.resolve(options.projectRoot);
|
||||
}
|
||||
|
||||
/**
|
||||
* Initializes the file search engine by loading ignore rules, crawling the
|
||||
* file system, and building the in-memory cache. This method must be called
|
||||
* before performing any searches.
|
||||
*/
|
||||
async initialize(): Promise<void> {
|
||||
this.loadIgnoreRules();
|
||||
await this.crawlFiles();
|
||||
this.buildResultCache();
|
||||
}
|
||||
|
||||
/**
|
||||
* Searches for files matching a given pattern.
|
||||
* @param pattern The picomatch pattern to search for (e.g., '*.js', 'src/**').
|
||||
* @param options Search options, including an AbortSignal and maxResults.
|
||||
* @returns A promise that resolves to a list of matching file paths, relative
|
||||
* to the project root.
|
||||
*/
|
||||
async search(
|
||||
pattern: string,
|
||||
options: SearchOptions = {},
|
||||
): Promise<string[]> {
|
||||
if (!this.resultCache) {
|
||||
throw new Error('Engine not initialized. Call initialize() first.');
|
||||
}
|
||||
|
||||
pattern = pattern || '*';
|
||||
|
||||
const { files: candidates, isExactMatch } =
|
||||
await this.resultCache!.get(pattern);
|
||||
|
||||
let filteredCandidates;
|
||||
if (isExactMatch) {
|
||||
filteredCandidates = candidates;
|
||||
} else {
|
||||
// Apply the user's picomatch pattern filter
|
||||
filteredCandidates = await filter(candidates, pattern, options.signal);
|
||||
this.resultCache!.set(pattern, filteredCandidates);
|
||||
}
|
||||
|
||||
// Trade-off: We apply a two-stage filtering process.
|
||||
// 1. During the file system crawl (`performCrawl`), we only apply directory-level
|
||||
// ignore rules (e.g., `node_modules/`, `dist/`). This is because applying
|
||||
// a full ignore filter (which includes file-specific patterns like `*.log`)
|
||||
// during the crawl can significantly slow down `fdir`.
|
||||
// 2. Here, in the `search` method, we apply the full ignore filter
|
||||
// (including file patterns) to the `filteredCandidates` (which have already
|
||||
// been filtered by the user's search pattern and sorted). For autocomplete,
|
||||
// the number of displayed results is small (MAX_SUGGESTIONS_TO_SHOW),
|
||||
// so applying the full filter to this truncated list is much more efficient
|
||||
// than applying it to every file during the initial crawl.
|
||||
const fileFilter = this.ignore.getFileFilter();
|
||||
const results: string[] = [];
|
||||
for (const [i, candidate] of filteredCandidates.entries()) {
|
||||
// Yield to the event loop to avoid blocking on large result sets.
|
||||
if (i % 1000 === 0) {
|
||||
await new Promise((resolve) => setImmediate(resolve));
|
||||
if (options.signal?.aborted) {
|
||||
throw new AbortError();
|
||||
}
|
||||
}
|
||||
|
||||
if (results.length >= (options.maxResults ?? Infinity)) {
|
||||
break;
|
||||
}
|
||||
// The `ignore` library throws an error if the path is '.', so we skip it.
|
||||
if (candidate === '.') {
|
||||
continue;
|
||||
}
|
||||
if (!fileFilter(candidate)) {
|
||||
results.push(candidate);
|
||||
}
|
||||
}
|
||||
return results;
|
||||
}
|
||||
|
||||
/**
|
||||
* Loads ignore rules from .gitignore and .geminiignore files, and applies
|
||||
* any additional ignore directories specified in the options.
|
||||
*/
|
||||
private loadIgnoreRules(): void {
|
||||
if (this.options.useGitignore) {
|
||||
const gitignorePath = path.join(this.absoluteDir, '.gitignore');
|
||||
if (fs.existsSync(gitignorePath)) {
|
||||
this.ignore.add(fs.readFileSync(gitignorePath, 'utf8'));
|
||||
}
|
||||
}
|
||||
|
||||
if (this.options.useGeminiignore) {
|
||||
const geminiignorePath = path.join(this.absoluteDir, '.geminiignore');
|
||||
if (fs.existsSync(geminiignorePath)) {
|
||||
this.ignore.add(fs.readFileSync(geminiignorePath, 'utf8'));
|
||||
}
|
||||
}
|
||||
|
||||
const ignoreDirs = ['.git', ...this.options.ignoreDirs];
|
||||
this.ignore.add(
|
||||
ignoreDirs.map((dir) => {
|
||||
if (dir.endsWith('/')) {
|
||||
return dir;
|
||||
}
|
||||
return `${dir}/`;
|
||||
}),
|
||||
);
|
||||
}
|
||||
|
||||
/**
|
||||
* Crawls the file system to get a list of all files and directories,
|
||||
* optionally using a cache for faster initialization.
|
||||
*/
|
||||
private async crawlFiles(): Promise<void> {
|
||||
if (this.options.cache) {
|
||||
const cacheKey = cache.getCacheKey(
|
||||
this.absoluteDir,
|
||||
this.ignore.getFingerprint(),
|
||||
);
|
||||
const cachedResults = cache.read(cacheKey);
|
||||
|
||||
if (cachedResults) {
|
||||
this.allFiles = cachedResults;
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
this.allFiles = await this.performCrawl();
|
||||
|
||||
if (this.options.cache) {
|
||||
const cacheKey = cache.getCacheKey(
|
||||
this.absoluteDir,
|
||||
this.ignore.getFingerprint(),
|
||||
);
|
||||
cache.write(cacheKey, this.allFiles, this.options.cacheTtl * 1000);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Performs the actual file system crawl using `fdir`, applying directory
|
||||
* ignore rules.
|
||||
* @returns A promise that resolves to a list of all files and directories.
|
||||
*/
|
||||
private async performCrawl(): Promise<string[]> {
|
||||
const dirFilter = this.ignore.getDirectoryFilter();
|
||||
|
||||
// We use `fdir` for fast file system traversal. A key performance
|
||||
// optimization for large workspaces is to exclude entire directories
|
||||
// early in the traversal process. This is why we apply directory-specific
|
||||
// ignore rules (e.g., `node_modules/`, `dist/`) directly to `fdir`'s
|
||||
// exclude filter.
|
||||
const api = new fdir()
|
||||
.withRelativePaths()
|
||||
.withDirs()
|
||||
.withPathSeparator('/') // Always use unix style paths
|
||||
.exclude((_, dirPath) => {
|
||||
const relativePath = path.relative(this.absoluteDir, dirPath);
|
||||
return dirFilter(`${relativePath}/`);
|
||||
});
|
||||
|
||||
return api.crawl(this.absoluteDir).withPromise();
|
||||
}
|
||||
|
||||
/**
|
||||
* Builds the in-memory cache for fast pattern matching.
|
||||
*/
|
||||
private buildResultCache(): void {
|
||||
this.resultCache = new ResultCache(this.allFiles, this.absoluteDir);
|
||||
}
|
||||
}
|
|
@ -0,0 +1,65 @@
|
|||
/**
|
||||
* @license
|
||||
* Copyright 2025 Google LLC
|
||||
* SPDX-License-Identifier: Apache-2.0
|
||||
*/
|
||||
|
||||
import { describe, it, expect } from 'vitest';
|
||||
import { Ignore } from './ignore.js';
|
||||
|
||||
describe('Ignore', () => {
|
||||
describe('getDirectoryFilter', () => {
|
||||
it('should ignore directories matching directory patterns', () => {
|
||||
const ig = new Ignore().add(['foo/', 'bar/']);
|
||||
const dirFilter = ig.getDirectoryFilter();
|
||||
expect(dirFilter('foo/')).toBe(true);
|
||||
expect(dirFilter('bar/')).toBe(true);
|
||||
expect(dirFilter('baz/')).toBe(false);
|
||||
});
|
||||
|
||||
it('should not ignore directories with file patterns', () => {
|
||||
const ig = new Ignore().add(['foo.js', '*.log']);
|
||||
const dirFilter = ig.getDirectoryFilter();
|
||||
expect(dirFilter('foo.js')).toBe(false);
|
||||
expect(dirFilter('foo.log')).toBe(false);
|
||||
});
|
||||
});
|
||||
|
||||
describe('getFileFilter', () => {
|
||||
it('should not ignore files with directory patterns', () => {
|
||||
const ig = new Ignore().add(['foo/', 'bar/']);
|
||||
const fileFilter = ig.getFileFilter();
|
||||
expect(fileFilter('foo')).toBe(false);
|
||||
expect(fileFilter('foo/file.txt')).toBe(false);
|
||||
});
|
||||
|
||||
it('should ignore files matching file patterns', () => {
|
||||
const ig = new Ignore().add(['*.log', 'foo.js']);
|
||||
const fileFilter = ig.getFileFilter();
|
||||
expect(fileFilter('foo.log')).toBe(true);
|
||||
expect(fileFilter('foo.js')).toBe(true);
|
||||
expect(fileFilter('bar.txt')).toBe(false);
|
||||
});
|
||||
});
|
||||
|
||||
it('should accumulate patterns across multiple add() calls', () => {
|
||||
const ig = new Ignore().add('foo.js');
|
||||
ig.add('bar.js');
|
||||
const fileFilter = ig.getFileFilter();
|
||||
expect(fileFilter('foo.js')).toBe(true);
|
||||
expect(fileFilter('bar.js')).toBe(true);
|
||||
expect(fileFilter('baz.js')).toBe(false);
|
||||
});
|
||||
|
||||
it('should return a stable and consistent fingerprint', () => {
|
||||
const ig1 = new Ignore().add(['foo', '!bar']);
|
||||
const ig2 = new Ignore().add('foo\n!bar');
|
||||
|
||||
// Fingerprints should be identical for the same rules.
|
||||
expect(ig1.getFingerprint()).toBe(ig2.getFingerprint());
|
||||
|
||||
// Adding a new rule should change the fingerprint.
|
||||
ig2.add('baz');
|
||||
expect(ig1.getFingerprint()).not.toBe(ig2.getFingerprint());
|
||||
});
|
||||
});
|
|
@ -0,0 +1,93 @@
|
|||
/**
|
||||
* @license
|
||||
* Copyright 2025 Google LLC
|
||||
* SPDX-License-Identifier: Apache-2.0
|
||||
*/
|
||||
|
||||
import ignore from 'ignore';
|
||||
import picomatch from 'picomatch';
|
||||
|
||||
const hasFileExtension = picomatch('**/*[*.]*');
|
||||
|
||||
export class Ignore {
|
||||
private readonly allPatterns: string[] = [];
|
||||
private dirIgnorer = ignore();
|
||||
private fileIgnorer = ignore();
|
||||
|
||||
/**
|
||||
* Adds one or more ignore patterns.
|
||||
* @param patterns A single pattern string or an array of pattern strings.
|
||||
* Each pattern can be a glob-like string similar to .gitignore rules.
|
||||
* @returns The `Ignore` instance for chaining.
|
||||
*/
|
||||
add(patterns: string | string[]): this {
|
||||
if (typeof patterns === 'string') {
|
||||
patterns = patterns.split(/\r?\n/);
|
||||
}
|
||||
|
||||
for (const p of patterns) {
|
||||
const pattern = p.trim();
|
||||
|
||||
if (pattern === '' || pattern.startsWith('#')) {
|
||||
continue;
|
||||
}
|
||||
|
||||
this.allPatterns.push(pattern);
|
||||
|
||||
const isPositiveDirPattern =
|
||||
pattern.endsWith('/') && !pattern.startsWith('!');
|
||||
|
||||
if (isPositiveDirPattern) {
|
||||
this.dirIgnorer.add(pattern);
|
||||
} else {
|
||||
// An ambiguous pattern (e.g., "build") could match a file or a
|
||||
// directory. To optimize the file system crawl, we use a heuristic:
|
||||
// patterns without a dot in the last segment are included in the
|
||||
// directory exclusion check.
|
||||
//
|
||||
// This heuristic can fail. For example, an ignore pattern of "my.assets"
|
||||
// intended to exclude a directory will not be treated as a directory
|
||||
// pattern because it contains a ".". This results in crawling a
|
||||
// directory that should have been excluded, reducing efficiency.
|
||||
// Correctness is still maintained. The incorrectly crawled directory
|
||||
// will be filtered out by the final ignore check.
|
||||
//
|
||||
// For maximum crawl efficiency, users should explicitly mark directory
|
||||
// patterns with a trailing slash (e.g., "my.assets/").
|
||||
this.fileIgnorer.add(pattern);
|
||||
if (!hasFileExtension(pattern)) {
|
||||
this.dirIgnorer.add(pattern);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns a predicate that matches explicit directory ignore patterns (patterns ending with '/').
|
||||
* @returns {(dirPath: string) => boolean}
|
||||
*/
|
||||
getDirectoryFilter(): (dirPath: string) => boolean {
|
||||
return (dirPath: string) => this.dirIgnorer.ignores(dirPath);
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns a predicate that matches file ignore patterns (all patterns not ending with '/').
|
||||
* Note: This may also match directories if a file pattern matches a directory name, but all explicit directory patterns are handled by getDirectoryFilter.
|
||||
* @returns {(filePath: string) => boolean}
|
||||
*/
|
||||
getFileFilter(): (filePath: string) => boolean {
|
||||
return (filePath: string) => this.fileIgnorer.ignores(filePath);
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns a string representing the current set of ignore patterns.
|
||||
* This can be used to generate a unique identifier for the ignore configuration,
|
||||
* useful for caching purposes.
|
||||
* @returns A string fingerprint of the ignore patterns.
|
||||
*/
|
||||
getFingerprint(): string {
|
||||
return this.allPatterns.join('\n');
|
||||
}
|
||||
}
|
|
@ -0,0 +1,56 @@
|
|||
/**
|
||||
* @license
|
||||
* Copyright 2025 Google LLC
|
||||
* SPDX-License-Identifier: Apache-2.0
|
||||
*/
|
||||
|
||||
import path from 'node:path';
|
||||
import { test, expect } from 'vitest';
|
||||
import { ResultCache } from './result-cache.js';
|
||||
|
||||
test('ResultCache basic usage', async () => {
|
||||
const files = [
|
||||
'foo.txt',
|
||||
'bar.js',
|
||||
'baz.md',
|
||||
'subdir/file.txt',
|
||||
'subdir/other.js',
|
||||
'subdir/nested/file.md',
|
||||
];
|
||||
const cache = new ResultCache(files, path.resolve('.'));
|
||||
const { files: resultFiles, isExactMatch } = await cache.get('*.js');
|
||||
expect(resultFiles).toEqual(files);
|
||||
expect(isExactMatch).toBe(false);
|
||||
});
|
||||
|
||||
test('ResultCache cache hit/miss', async () => {
|
||||
const files = ['foo.txt', 'bar.js', 'baz.md'];
|
||||
const cache = new ResultCache(files, path.resolve('.'));
|
||||
// First call: miss
|
||||
const { files: result1Files, isExactMatch: isExactMatch1 } =
|
||||
await cache.get('*.js');
|
||||
expect(result1Files).toEqual(files);
|
||||
expect(isExactMatch1).toBe(false);
|
||||
|
||||
// Simulate FileSearch applying the filter and setting the result
|
||||
cache.set('*.js', ['bar.js']);
|
||||
|
||||
// Second call: hit
|
||||
const { files: result2Files, isExactMatch: isExactMatch2 } =
|
||||
await cache.get('*.js');
|
||||
expect(result2Files).toEqual(['bar.js']);
|
||||
expect(isExactMatch2).toBe(true);
|
||||
});
|
||||
|
||||
test('ResultCache best base query', async () => {
|
||||
const files = ['foo.txt', 'foobar.js', 'baz.md'];
|
||||
const cache = new ResultCache(files, path.resolve('.'));
|
||||
|
||||
// Cache a broader query
|
||||
cache.set('foo', ['foo.txt', 'foobar.js']);
|
||||
|
||||
// Search for a more specific query that starts with the broader one
|
||||
const { files: resultFiles, isExactMatch } = await cache.get('foobar');
|
||||
expect(resultFiles).toEqual(['foo.txt', 'foobar.js']);
|
||||
expect(isExactMatch).toBe(false);
|
||||
});
|
|
@ -0,0 +1,70 @@
|
|||
/**
|
||||
* @license
|
||||
* Copyright 2025 Google LLC
|
||||
* SPDX-License-Identifier: Apache-2.0
|
||||
*/
|
||||
|
||||
/**
|
||||
* Implements an in-memory cache for file search results.
|
||||
* This cache optimizes subsequent searches by leveraging previously computed results.
|
||||
*/
|
||||
export class ResultCache {
|
||||
private readonly cache: Map<string, string[]>;
|
||||
private hits = 0;
|
||||
private misses = 0;
|
||||
|
||||
constructor(
|
||||
private readonly allFiles: string[],
|
||||
private readonly absoluteDir: string,
|
||||
) {
|
||||
this.cache = new Map();
|
||||
}
|
||||
|
||||
/**
|
||||
* Retrieves cached search results for a given query, or provides a base set
|
||||
* of files to search from.
|
||||
* @param query The search query pattern.
|
||||
* @returns An object containing the files to search and a boolean indicating
|
||||
* if the result is an exact cache hit.
|
||||
*/
|
||||
async get(
|
||||
query: string,
|
||||
): Promise<{ files: string[]; isExactMatch: boolean }> {
|
||||
const isCacheHit = this.cache.has(query);
|
||||
|
||||
if (isCacheHit) {
|
||||
this.hits++;
|
||||
return { files: this.cache.get(query)!, isExactMatch: true };
|
||||
}
|
||||
|
||||
this.misses++;
|
||||
|
||||
// This is the core optimization of the memory cache.
|
||||
// If a user first searches for "foo", and then for "foobar",
|
||||
// we don't need to search through all files again. We can start
|
||||
// from the results of the "foo" search.
|
||||
// This finds the most specific, already-cached query that is a prefix
|
||||
// of the current query.
|
||||
let bestBaseQuery = '';
|
||||
for (const key of this.cache?.keys?.() ?? []) {
|
||||
if (query.startsWith(key) && key.length > bestBaseQuery.length) {
|
||||
bestBaseQuery = key;
|
||||
}
|
||||
}
|
||||
|
||||
const filesToSearch = bestBaseQuery
|
||||
? this.cache.get(bestBaseQuery)!
|
||||
: this.allFiles;
|
||||
|
||||
return { files: filesToSearch, isExactMatch: false };
|
||||
}
|
||||
|
||||
/**
|
||||
* Stores search results in the cache.
|
||||
* @param query The search query pattern.
|
||||
* @param results The matching file paths to cache.
|
||||
*/
|
||||
set(query: string, results: string[]): void {
|
||||
this.cache.set(query, results);
|
||||
}
|
||||
}
|
|
@ -0,0 +1,7 @@
|
|||
/**
|
||||
* @license
|
||||
* Copyright 2025 Google LLC
|
||||
* SPDX-License-Identifier: Apache-2.0
|
||||
*/
|
||||
|
||||
export * from './src/file-system-test-helpers.js';
|
|
@ -0,0 +1,18 @@
|
|||
{
|
||||
"name": "@google/gemini-cli-test-utils",
|
||||
"version": "0.1.0",
|
||||
"private": true,
|
||||
"main": "src/index.ts",
|
||||
"license": "Apache-2.0",
|
||||
"type": "module",
|
||||
"scripts": {
|
||||
"build": "node ../../scripts/build_package.js",
|
||||
"typecheck": "tsc --noEmit"
|
||||
},
|
||||
"devDependencies": {
|
||||
"typescript": "^5.3.3"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=20"
|
||||
}
|
||||
}
|
|
@ -0,0 +1,98 @@
|
|||
/**
|
||||
* @license
|
||||
* Copyright 2025 Google LLC
|
||||
* SPDX-License-Identifier: Apache-2.0
|
||||
*/
|
||||
|
||||
import * as fs from 'fs/promises';
|
||||
import * as path from 'path';
|
||||
import * as os from 'os';
|
||||
|
||||
/**
|
||||
* Defines the structure of a virtual file system to be created for testing.
|
||||
* Keys are file or directory names, and values can be:
|
||||
* - A string: The content of a file.
|
||||
* - A `FileSystemStructure` object: Represents a subdirectory with its own structure.
|
||||
* - An array of strings or `FileSystemStructure` objects: Represents a directory
|
||||
* where strings are empty files and objects are subdirectories.
|
||||
*
|
||||
* @example
|
||||
* // Example 1: Simple files and directories
|
||||
* const structure1 = {
|
||||
* 'file1.txt': 'Hello, world!',
|
||||
* 'empty-dir': [],
|
||||
* 'src': {
|
||||
* 'main.js': '// Main application file',
|
||||
* 'utils.ts': '// Utility functions',
|
||||
* },
|
||||
* };
|
||||
*
|
||||
* @example
|
||||
* // Example 2: Nested directories and empty files within an array
|
||||
* const structure2 = {
|
||||
* 'config.json': '{ "port": 3000 }',
|
||||
* 'data': [
|
||||
* 'users.csv',
|
||||
* 'products.json',
|
||||
* {
|
||||
* 'logs': [
|
||||
* 'error.log',
|
||||
* 'access.log',
|
||||
* ],
|
||||
* },
|
||||
* ],
|
||||
* };
|
||||
*/
|
||||
export type FileSystemStructure = {
|
||||
[name: string]:
|
||||
| string
|
||||
| FileSystemStructure
|
||||
| Array<string | FileSystemStructure>;
|
||||
};
|
||||
|
||||
/**
|
||||
* Recursively creates files and directories based on the provided `FileSystemStructure`.
|
||||
* @param dir The base directory where the structure will be created.
|
||||
* @param structure The `FileSystemStructure` defining the files and directories.
|
||||
*/
|
||||
async function create(dir: string, structure: FileSystemStructure) {
|
||||
for (const [name, content] of Object.entries(structure)) {
|
||||
const newPath = path.join(dir, name);
|
||||
if (typeof content === 'string') {
|
||||
await fs.writeFile(newPath, content);
|
||||
} else if (Array.isArray(content)) {
|
||||
await fs.mkdir(newPath, { recursive: true });
|
||||
for (const item of content) {
|
||||
if (typeof item === 'string') {
|
||||
await fs.writeFile(path.join(newPath, item), '');
|
||||
} else {
|
||||
await create(newPath, item as FileSystemStructure);
|
||||
}
|
||||
}
|
||||
} else if (typeof content === 'object' && content !== null) {
|
||||
await fs.mkdir(newPath, { recursive: true });
|
||||
await create(newPath, content as FileSystemStructure);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a temporary directory and populates it with a given file system structure.
|
||||
* @param structure The `FileSystemStructure` to create within the temporary directory.
|
||||
* @returns A promise that resolves to the absolute path of the created temporary directory.
|
||||
*/
|
||||
export async function createTmpDir(
|
||||
structure: FileSystemStructure,
|
||||
): Promise<string> {
|
||||
const tmpDir = await fs.mkdtemp(path.join(os.tmpdir(), 'gemini-cli-test-'));
|
||||
await create(tmpDir, structure);
|
||||
return tmpDir;
|
||||
}
|
||||
|
||||
/**
|
||||
* Cleans up (deletes) a temporary directory and its contents.
|
||||
* @param dir The absolute path to the temporary directory to clean up.
|
||||
*/
|
||||
export async function cleanupTmpDir(dir: string) {
|
||||
await fs.rm(dir, { recursive: true, force: true });
|
||||
}
|
|
@ -0,0 +1,7 @@
|
|||
/**
|
||||
* @license
|
||||
* Copyright 2025 Google LLC
|
||||
* SPDX-License-Identifier: Apache-2.0
|
||||
*/
|
||||
|
||||
export * from './file-system-test-helpers.js';
|
|
@ -0,0 +1,11 @@
|
|||
{
|
||||
"extends": "../../tsconfig.json",
|
||||
"compilerOptions": {
|
||||
"outDir": "dist",
|
||||
"lib": ["DOM", "DOM.Iterable", "ES2021"],
|
||||
"composite": true,
|
||||
"types": ["node"]
|
||||
},
|
||||
"include": ["index.ts", "src/**/*.ts", "src/**/*.json"],
|
||||
"exclude": ["node_modules", "dist"]
|
||||
}
|
Loading…
Reference in New Issue