feat: Multi-Directory Workspace Support (part1: add `--include-directories` option) (#4605)
Co-authored-by: Allen Hutchison <adh@google.com>
This commit is contained in:
parent
21965f986c
commit
c1fe688956
|
@ -387,6 +387,11 @@ Arguments passed directly when running the CLI can override other configurations
|
|||
- **`--proxy`**:
|
||||
- Sets the proxy for the CLI.
|
||||
- Example: `--proxy http://localhost:7890`.
|
||||
- **`--include-directories <dir1,dir2,...>`**:
|
||||
- Includes additional directories in the workspace for multi-directory support.
|
||||
- Can be specified multiple times or as comma-separated values.
|
||||
- 5 directories can be added at maximum.
|
||||
- Example: `--include-directories /path/to/project1,/path/to/project2` or `--include-directories /path/to/project1 --include-directories /path/to/project2`
|
||||
- **`--version`**:
|
||||
- Displays the version of the CLI.
|
||||
|
||||
|
|
|
@ -61,6 +61,7 @@ export interface CliArgs {
|
|||
listExtensions: boolean | undefined;
|
||||
ideMode: boolean | undefined;
|
||||
proxy: string | undefined;
|
||||
includeDirectories: string[] | undefined;
|
||||
}
|
||||
|
||||
export async function parseArguments(): Promise<CliArgs> {
|
||||
|
@ -199,6 +200,15 @@ export async function parseArguments(): Promise<CliArgs> {
|
|||
description:
|
||||
'Proxy for gemini client, like schema://user:password@host:port',
|
||||
})
|
||||
.option('include-directories', {
|
||||
type: 'array',
|
||||
string: true,
|
||||
description:
|
||||
'Additional directories to include in the workspace (comma-separated or multiple --include-directories)',
|
||||
coerce: (dirs: string[]) =>
|
||||
// Handle comma-separated values
|
||||
dirs.flatMap((dir) => dir.split(',').map((d) => d.trim())),
|
||||
})
|
||||
.version(await getCliVersion()) // This will enable the --version flag based on package.json
|
||||
.alias('v', 'version')
|
||||
.help()
|
||||
|
@ -366,6 +376,7 @@ export async function loadCliConfig(
|
|||
embeddingModel: DEFAULT_GEMINI_EMBEDDING_MODEL,
|
||||
sandbox: sandboxConfig,
|
||||
targetDir: process.cwd(),
|
||||
includeDirectories: argv.includeDirectories,
|
||||
debugMode,
|
||||
question: argv.promptInteractive || argv.prompt || '',
|
||||
fullContext: argv.allFiles || argv.all_files || false,
|
||||
|
|
|
@ -199,7 +199,7 @@ export async function main() {
|
|||
process.exit(1);
|
||||
}
|
||||
}
|
||||
await start_sandbox(sandboxConfig, memoryArgs);
|
||||
await start_sandbox(sandboxConfig, memoryArgs, config);
|
||||
process.exit(0);
|
||||
} else {
|
||||
// Not in a sandbox and not entering one, so relaunch with additional
|
||||
|
|
|
@ -152,6 +152,9 @@ vi.mock('@google/gemini-cli-core', async (importOriginal) => {
|
|||
getSessionId: vi.fn(() => 'test-session-id'),
|
||||
getUserTier: vi.fn().mockResolvedValue(undefined),
|
||||
getIdeMode: vi.fn(() => false),
|
||||
getWorkspaceContext: vi.fn(() => ({
|
||||
getDirectories: vi.fn(() => []),
|
||||
})),
|
||||
};
|
||||
});
|
||||
|
||||
|
@ -292,6 +295,13 @@ describe('App UI', () => {
|
|||
|
||||
// Ensure a theme is set so the theme dialog does not appear.
|
||||
mockSettings = createMockSettings({ workspace: { theme: 'Default' } });
|
||||
|
||||
// Ensure getWorkspaceContext is available if not added by the constructor
|
||||
if (!mockConfig.getWorkspaceContext) {
|
||||
mockConfig.getWorkspaceContext = vi.fn(() => ({
|
||||
getDirectories: vi.fn(() => ['/test/dir']),
|
||||
}));
|
||||
}
|
||||
vi.mocked(ideContext.getIdeContext).mockReturnValue(undefined);
|
||||
});
|
||||
|
||||
|
|
|
@ -62,6 +62,7 @@ describe('aboutCommand', () => {
|
|||
});
|
||||
|
||||
it('should call addItem with all version info', async () => {
|
||||
process.env.SANDBOX = '';
|
||||
if (!aboutCommand.action) {
|
||||
throw new Error('The about command must have an action.');
|
||||
}
|
||||
|
|
|
@ -172,6 +172,9 @@ describe('InputPrompt', () => {
|
|||
getProjectRoot: () => path.join('test', 'project'),
|
||||
getTargetDir: () => path.join('test', 'project', 'src'),
|
||||
getVimMode: () => false,
|
||||
getWorkspaceContext: () => ({
|
||||
getDirectories: () => ['/test/project/src'],
|
||||
}),
|
||||
} as unknown as Config,
|
||||
slashCommands: mockSlashCommands,
|
||||
commandContext: mockCommandContext,
|
||||
|
@ -731,6 +734,7 @@ describe('InputPrompt', () => {
|
|||
// Verify useCompletion was called with correct signature
|
||||
expect(mockedUseCompletion).toHaveBeenCalledWith(
|
||||
mockBuffer,
|
||||
['/test/project/src'],
|
||||
path.join('test', 'project', 'src'),
|
||||
mockSlashCommands,
|
||||
mockCommandContext,
|
||||
|
@ -756,6 +760,7 @@ describe('InputPrompt', () => {
|
|||
|
||||
expect(mockedUseCompletion).toHaveBeenCalledWith(
|
||||
mockBuffer,
|
||||
['/test/project/src'],
|
||||
path.join('test', 'project', 'src'),
|
||||
mockSlashCommands,
|
||||
mockCommandContext,
|
||||
|
@ -781,6 +786,7 @@ describe('InputPrompt', () => {
|
|||
|
||||
expect(mockedUseCompletion).toHaveBeenCalledWith(
|
||||
mockBuffer,
|
||||
['/test/project/src'],
|
||||
path.join('test', 'project', 'src'),
|
||||
mockSlashCommands,
|
||||
mockCommandContext,
|
||||
|
@ -806,6 +812,7 @@ describe('InputPrompt', () => {
|
|||
|
||||
expect(mockedUseCompletion).toHaveBeenCalledWith(
|
||||
mockBuffer,
|
||||
['/test/project/src'],
|
||||
path.join('test', 'project', 'src'),
|
||||
mockSlashCommands,
|
||||
mockCommandContext,
|
||||
|
@ -831,6 +838,7 @@ describe('InputPrompt', () => {
|
|||
|
||||
expect(mockedUseCompletion).toHaveBeenCalledWith(
|
||||
mockBuffer,
|
||||
['/test/project/src'],
|
||||
path.join('test', 'project', 'src'),
|
||||
mockSlashCommands,
|
||||
mockCommandContext,
|
||||
|
@ -857,6 +865,7 @@ describe('InputPrompt', () => {
|
|||
// Verify useCompletion was called with the buffer
|
||||
expect(mockedUseCompletion).toHaveBeenCalledWith(
|
||||
mockBuffer,
|
||||
['/test/project/src'],
|
||||
path.join('test', 'project', 'src'),
|
||||
mockSlashCommands,
|
||||
mockCommandContext,
|
||||
|
@ -882,6 +891,7 @@ describe('InputPrompt', () => {
|
|||
|
||||
expect(mockedUseCompletion).toHaveBeenCalledWith(
|
||||
mockBuffer,
|
||||
['/test/project/src'],
|
||||
path.join('test', 'project', 'src'),
|
||||
mockSlashCommands,
|
||||
mockCommandContext,
|
||||
|
@ -908,6 +918,7 @@ describe('InputPrompt', () => {
|
|||
|
||||
expect(mockedUseCompletion).toHaveBeenCalledWith(
|
||||
mockBuffer,
|
||||
['/test/project/src'],
|
||||
path.join('test', 'project', 'src'),
|
||||
mockSlashCommands,
|
||||
mockCommandContext,
|
||||
|
@ -934,6 +945,7 @@ describe('InputPrompt', () => {
|
|||
|
||||
expect(mockedUseCompletion).toHaveBeenCalledWith(
|
||||
mockBuffer,
|
||||
['/test/project/src'],
|
||||
path.join('test', 'project', 'src'),
|
||||
mockSlashCommands,
|
||||
mockCommandContext,
|
||||
|
@ -960,6 +972,7 @@ describe('InputPrompt', () => {
|
|||
|
||||
expect(mockedUseCompletion).toHaveBeenCalledWith(
|
||||
mockBuffer,
|
||||
['/test/project/src'],
|
||||
path.join('test', 'project', 'src'),
|
||||
mockSlashCommands,
|
||||
mockCommandContext,
|
||||
|
@ -986,6 +999,7 @@ describe('InputPrompt', () => {
|
|||
|
||||
expect(mockedUseCompletion).toHaveBeenCalledWith(
|
||||
mockBuffer,
|
||||
['/test/project/src'],
|
||||
path.join('test', 'project', 'src'),
|
||||
mockSlashCommands,
|
||||
mockCommandContext,
|
||||
|
@ -1014,6 +1028,7 @@ describe('InputPrompt', () => {
|
|||
|
||||
expect(mockedUseCompletion).toHaveBeenCalledWith(
|
||||
mockBuffer,
|
||||
['/test/project/src'],
|
||||
path.join('test', 'project', 'src'),
|
||||
mockSlashCommands,
|
||||
mockCommandContext,
|
||||
|
@ -1040,6 +1055,7 @@ describe('InputPrompt', () => {
|
|||
|
||||
expect(mockedUseCompletion).toHaveBeenCalledWith(
|
||||
mockBuffer,
|
||||
['/test/project/src'],
|
||||
path.join('test', 'project', 'src'),
|
||||
mockSlashCommands,
|
||||
mockCommandContext,
|
||||
|
@ -1068,6 +1084,7 @@ describe('InputPrompt', () => {
|
|||
|
||||
expect(mockedUseCompletion).toHaveBeenCalledWith(
|
||||
mockBuffer,
|
||||
['/test/project/src'],
|
||||
path.join('test', 'project', 'src'),
|
||||
mockSlashCommands,
|
||||
mockCommandContext,
|
||||
|
|
|
@ -60,8 +60,19 @@ export const InputPrompt: React.FC<InputPromptProps> = ({
|
|||
}) => {
|
||||
const [justNavigatedHistory, setJustNavigatedHistory] = useState(false);
|
||||
|
||||
const [dirs, setDirs] = useState<readonly string[]>(
|
||||
config.getWorkspaceContext().getDirectories(),
|
||||
);
|
||||
const dirsChanged = config.getWorkspaceContext().getDirectories();
|
||||
useEffect(() => {
|
||||
if (dirs.length !== dirsChanged.length) {
|
||||
setDirs(dirsChanged);
|
||||
}
|
||||
}, [dirs.length, dirsChanged]);
|
||||
|
||||
const completion = useCompletion(
|
||||
buffer,
|
||||
dirs,
|
||||
config.getTargetDir(),
|
||||
slashCommands,
|
||||
commandContext,
|
||||
|
|
|
@ -57,6 +57,10 @@ describe('handleAtCommand', () => {
|
|||
respectGeminiIgnore: true,
|
||||
}),
|
||||
getEnableRecursiveFileSearch: vi.fn(() => true),
|
||||
getWorkspaceContext: () => ({
|
||||
isPathWithinWorkspace: () => true,
|
||||
getDirectories: () => [testRootDir],
|
||||
}),
|
||||
} as unknown as Config;
|
||||
|
||||
const registry = new ToolRegistry(mockConfig);
|
||||
|
|
|
@ -188,6 +188,14 @@ export async function handleAtCommand({
|
|||
|
||||
// Check if path should be ignored based on filtering options
|
||||
|
||||
const workspaceContext = config.getWorkspaceContext();
|
||||
if (!workspaceContext.isPathWithinWorkspace(pathName)) {
|
||||
onDebugMessage(
|
||||
`Path ${pathName} is not in the workspace and will be skipped.`,
|
||||
);
|
||||
continue;
|
||||
}
|
||||
|
||||
const gitIgnored =
|
||||
respectFileIgnore.respectGitIgnore &&
|
||||
fileDiscovery.shouldIgnoreFile(pathName, {
|
||||
|
@ -215,90 +223,88 @@ export async function handleAtCommand({
|
|||
continue;
|
||||
}
|
||||
|
||||
let currentPathSpec = pathName;
|
||||
let resolvedSuccessfully = false;
|
||||
|
||||
try {
|
||||
const absolutePath = path.resolve(config.getTargetDir(), pathName);
|
||||
const stats = await fs.stat(absolutePath);
|
||||
if (stats.isDirectory()) {
|
||||
currentPathSpec =
|
||||
pathName + (pathName.endsWith(path.sep) ? `**` : `/**`);
|
||||
onDebugMessage(
|
||||
`Path ${pathName} resolved to directory, using glob: ${currentPathSpec}`,
|
||||
);
|
||||
} else {
|
||||
onDebugMessage(`Path ${pathName} resolved to file: ${absolutePath}`);
|
||||
}
|
||||
resolvedSuccessfully = true;
|
||||
} catch (error) {
|
||||
if (isNodeError(error) && error.code === 'ENOENT') {
|
||||
if (config.getEnableRecursiveFileSearch() && globTool) {
|
||||
for (const dir of config.getWorkspaceContext().getDirectories()) {
|
||||
let currentPathSpec = pathName;
|
||||
let resolvedSuccessfully = false;
|
||||
try {
|
||||
const absolutePath = path.resolve(dir, pathName);
|
||||
const stats = await fs.stat(absolutePath);
|
||||
if (stats.isDirectory()) {
|
||||
currentPathSpec =
|
||||
pathName + (pathName.endsWith(path.sep) ? `**` : `/**`);
|
||||
onDebugMessage(
|
||||
`Path ${pathName} not found directly, attempting glob search.`,
|
||||
`Path ${pathName} resolved to directory, using glob: ${currentPathSpec}`,
|
||||
);
|
||||
try {
|
||||
const globResult = await globTool.execute(
|
||||
{
|
||||
pattern: `**/*${pathName}*`,
|
||||
path: config.getTargetDir(),
|
||||
},
|
||||
signal,
|
||||
} else {
|
||||
onDebugMessage(`Path ${pathName} resolved to file: ${absolutePath}`);
|
||||
}
|
||||
resolvedSuccessfully = true;
|
||||
} catch (error) {
|
||||
if (isNodeError(error) && error.code === 'ENOENT') {
|
||||
if (config.getEnableRecursiveFileSearch() && globTool) {
|
||||
onDebugMessage(
|
||||
`Path ${pathName} not found directly, attempting glob search.`,
|
||||
);
|
||||
if (
|
||||
globResult.llmContent &&
|
||||
typeof globResult.llmContent === 'string' &&
|
||||
!globResult.llmContent.startsWith('No files found') &&
|
||||
!globResult.llmContent.startsWith('Error:')
|
||||
) {
|
||||
const lines = globResult.llmContent.split('\n');
|
||||
if (lines.length > 1 && lines[1]) {
|
||||
const firstMatchAbsolute = lines[1].trim();
|
||||
currentPathSpec = path.relative(
|
||||
config.getTargetDir(),
|
||||
firstMatchAbsolute,
|
||||
);
|
||||
onDebugMessage(
|
||||
`Glob search for ${pathName} found ${firstMatchAbsolute}, using relative path: ${currentPathSpec}`,
|
||||
);
|
||||
resolvedSuccessfully = true;
|
||||
try {
|
||||
const globResult = await globTool.execute(
|
||||
{
|
||||
pattern: `**/*${pathName}*`,
|
||||
path: dir,
|
||||
},
|
||||
signal,
|
||||
);
|
||||
if (
|
||||
globResult.llmContent &&
|
||||
typeof globResult.llmContent === 'string' &&
|
||||
!globResult.llmContent.startsWith('No files found') &&
|
||||
!globResult.llmContent.startsWith('Error:')
|
||||
) {
|
||||
const lines = globResult.llmContent.split('\n');
|
||||
if (lines.length > 1 && lines[1]) {
|
||||
const firstMatchAbsolute = lines[1].trim();
|
||||
currentPathSpec = path.relative(dir, firstMatchAbsolute);
|
||||
onDebugMessage(
|
||||
`Glob search for ${pathName} found ${firstMatchAbsolute}, using relative path: ${currentPathSpec}`,
|
||||
);
|
||||
resolvedSuccessfully = true;
|
||||
} else {
|
||||
onDebugMessage(
|
||||
`Glob search for '**/*${pathName}*' did not return a usable path. Path ${pathName} will be skipped.`,
|
||||
);
|
||||
}
|
||||
} else {
|
||||
onDebugMessage(
|
||||
`Glob search for '**/*${pathName}*' did not return a usable path. Path ${pathName} will be skipped.`,
|
||||
`Glob search for '**/*${pathName}*' found no files or an error. Path ${pathName} will be skipped.`,
|
||||
);
|
||||
}
|
||||
} else {
|
||||
} catch (globError) {
|
||||
console.error(
|
||||
`Error during glob search for ${pathName}: ${getErrorMessage(globError)}`,
|
||||
);
|
||||
onDebugMessage(
|
||||
`Glob search for '**/*${pathName}*' found no files or an error. Path ${pathName} will be skipped.`,
|
||||
`Error during glob search for ${pathName}. Path ${pathName} will be skipped.`,
|
||||
);
|
||||
}
|
||||
} catch (globError) {
|
||||
console.error(
|
||||
`Error during glob search for ${pathName}: ${getErrorMessage(globError)}`,
|
||||
);
|
||||
} else {
|
||||
onDebugMessage(
|
||||
`Error during glob search for ${pathName}. Path ${pathName} will be skipped.`,
|
||||
`Glob tool not found. Path ${pathName} will be skipped.`,
|
||||
);
|
||||
}
|
||||
} else {
|
||||
console.error(
|
||||
`Error stating path ${pathName}: ${getErrorMessage(error)}`,
|
||||
);
|
||||
onDebugMessage(
|
||||
`Glob tool not found. Path ${pathName} will be skipped.`,
|
||||
`Error stating path ${pathName}. Path ${pathName} will be skipped.`,
|
||||
);
|
||||
}
|
||||
} else {
|
||||
console.error(
|
||||
`Error stating path ${pathName}: ${getErrorMessage(error)}`,
|
||||
);
|
||||
onDebugMessage(
|
||||
`Error stating path ${pathName}. Path ${pathName} will be skipped.`,
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
if (resolvedSuccessfully) {
|
||||
pathSpecsToRead.push(currentPathSpec);
|
||||
atPathToResolvedSpecMap.set(originalAtPath, currentPathSpec);
|
||||
contentLabelsForDisplay.push(pathName);
|
||||
if (resolvedSuccessfully) {
|
||||
pathSpecsToRead.push(currentPathSpec);
|
||||
atPathToResolvedSpecMap.set(originalAtPath, currentPathSpec);
|
||||
contentLabelsForDisplay.push(pathName);
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -22,6 +22,7 @@ describe('useCompletion', () => {
|
|||
|
||||
// A minimal mock is sufficient for these tests.
|
||||
const mockCommandContext = {} as CommandContext;
|
||||
let testDirs: string[];
|
||||
|
||||
async function createEmptyDir(...pathSegments: string[]) {
|
||||
const fullPath = path.join(testRootDir, ...pathSegments);
|
||||
|
@ -51,8 +52,12 @@ describe('useCompletion', () => {
|
|||
testRootDir = await fs.mkdtemp(
|
||||
path.join(os.tmpdir(), 'completion-unit-test-'),
|
||||
);
|
||||
testDirs = [testRootDir];
|
||||
mockConfig = {
|
||||
getTargetDir: () => testRootDir,
|
||||
getWorkspaceContext: () => ({
|
||||
getDirectories: () => testDirs,
|
||||
}),
|
||||
getProjectRoot: () => testRootDir,
|
||||
getFileFilteringOptions: vi.fn(() => ({
|
||||
respectGitIgnore: true,
|
||||
|
@ -79,6 +84,7 @@ describe('useCompletion', () => {
|
|||
const { result } = renderHook(() =>
|
||||
useCompletion(
|
||||
useTextBufferForTest(''),
|
||||
testDirs,
|
||||
testRootDir,
|
||||
slashCommands,
|
||||
mockCommandContext,
|
||||
|
@ -108,6 +114,7 @@ describe('useCompletion', () => {
|
|||
const textBuffer = useTextBufferForTest(text);
|
||||
return useCompletion(
|
||||
textBuffer,
|
||||
testDirs,
|
||||
testRootDir,
|
||||
slashCommands,
|
||||
mockCommandContext,
|
||||
|
@ -138,6 +145,7 @@ describe('useCompletion', () => {
|
|||
const { result } = renderHook(() =>
|
||||
useCompletion(
|
||||
useTextBufferForTest('/help'),
|
||||
testDirs,
|
||||
testRootDir,
|
||||
slashCommands,
|
||||
mockCommandContext,
|
||||
|
@ -170,6 +178,7 @@ describe('useCompletion', () => {
|
|||
const { result } = renderHook(() =>
|
||||
useCompletion(
|
||||
useTextBufferForTest(''),
|
||||
testDirs,
|
||||
testRootDir,
|
||||
slashCommands,
|
||||
mockCommandContext,
|
||||
|
@ -191,6 +200,7 @@ describe('useCompletion', () => {
|
|||
const { result } = renderHook(() =>
|
||||
useCompletion(
|
||||
useTextBufferForTest(''),
|
||||
testDirs,
|
||||
testRootDir,
|
||||
slashCommands,
|
||||
mockCommandContext,
|
||||
|
@ -215,6 +225,7 @@ describe('useCompletion', () => {
|
|||
const { result } = renderHook(() =>
|
||||
useCompletion(
|
||||
useTextBufferForTest('/h'),
|
||||
testDirs,
|
||||
testRootDir,
|
||||
slashCommands,
|
||||
mockCommandContext,
|
||||
|
@ -242,6 +253,7 @@ describe('useCompletion', () => {
|
|||
const { result } = renderHook(() =>
|
||||
useCompletion(
|
||||
useTextBufferForTest('/h'),
|
||||
testDirs,
|
||||
testRootDir,
|
||||
slashCommands,
|
||||
mockCommandContext,
|
||||
|
@ -270,6 +282,7 @@ describe('useCompletion', () => {
|
|||
const { result } = renderHook(() =>
|
||||
useCompletion(
|
||||
useTextBufferForTest('/'),
|
||||
testDirs,
|
||||
testRootDir,
|
||||
slashCommands,
|
||||
mockCommandContext,
|
||||
|
@ -315,6 +328,7 @@ describe('useCompletion', () => {
|
|||
const { result } = renderHook(() =>
|
||||
useCompletion(
|
||||
useTextBufferForTest('/command'),
|
||||
testDirs,
|
||||
testRootDir,
|
||||
largeMockCommands,
|
||||
mockCommandContext,
|
||||
|
@ -372,6 +386,7 @@ describe('useCompletion', () => {
|
|||
const { result } = renderHook(() =>
|
||||
useCompletion(
|
||||
useTextBufferForTest('/'),
|
||||
testDirs,
|
||||
testRootDir,
|
||||
slashCommands,
|
||||
mockCommandContext,
|
||||
|
@ -394,6 +409,7 @@ describe('useCompletion', () => {
|
|||
const { result } = renderHook(() =>
|
||||
useCompletion(
|
||||
useTextBufferForTest('/mem'),
|
||||
testDirs,
|
||||
testRootDir,
|
||||
slashCommands,
|
||||
mockCommandContext,
|
||||
|
@ -417,6 +433,7 @@ describe('useCompletion', () => {
|
|||
const { result } = renderHook(() =>
|
||||
useCompletion(
|
||||
useTextBufferForTest('/usag'), // part of the word "usage"
|
||||
testDirs,
|
||||
testRootDir,
|
||||
slashCommands,
|
||||
mockCommandContext,
|
||||
|
@ -443,6 +460,7 @@ describe('useCompletion', () => {
|
|||
const { result } = renderHook(() =>
|
||||
useCompletion(
|
||||
useTextBufferForTest('/clear'), // No trailing space
|
||||
testDirs,
|
||||
testRootDir,
|
||||
slashCommands,
|
||||
mockCommandContext,
|
||||
|
@ -474,6 +492,7 @@ describe('useCompletion', () => {
|
|||
const { result } = renderHook(() =>
|
||||
useCompletion(
|
||||
useTextBufferForTest(query),
|
||||
testDirs,
|
||||
testRootDir,
|
||||
mockSlashCommands,
|
||||
mockCommandContext,
|
||||
|
@ -494,6 +513,7 @@ describe('useCompletion', () => {
|
|||
const { result } = renderHook(() =>
|
||||
useCompletion(
|
||||
useTextBufferForTest('/clear '),
|
||||
testDirs,
|
||||
testRootDir,
|
||||
slashCommands,
|
||||
mockCommandContext,
|
||||
|
@ -514,6 +534,7 @@ describe('useCompletion', () => {
|
|||
const { result } = renderHook(() =>
|
||||
useCompletion(
|
||||
useTextBufferForTest('/unknown-command'),
|
||||
testDirs,
|
||||
testRootDir,
|
||||
slashCommands,
|
||||
mockCommandContext,
|
||||
|
@ -547,6 +568,7 @@ describe('useCompletion', () => {
|
|||
const { result } = renderHook(() =>
|
||||
useCompletion(
|
||||
useTextBufferForTest('/memory'), // Note: no trailing space
|
||||
testDirs,
|
||||
testRootDir,
|
||||
slashCommands,
|
||||
mockCommandContext,
|
||||
|
@ -584,6 +606,7 @@ describe('useCompletion', () => {
|
|||
const { result } = renderHook(() =>
|
||||
useCompletion(
|
||||
useTextBufferForTest('/memory'),
|
||||
testDirs,
|
||||
testRootDir,
|
||||
slashCommands,
|
||||
mockCommandContext,
|
||||
|
@ -619,6 +642,7 @@ describe('useCompletion', () => {
|
|||
const { result } = renderHook(() =>
|
||||
useCompletion(
|
||||
useTextBufferForTest('/memory a'),
|
||||
testDirs,
|
||||
testRootDir,
|
||||
slashCommands,
|
||||
mockCommandContext,
|
||||
|
@ -650,6 +674,7 @@ describe('useCompletion', () => {
|
|||
const { result } = renderHook(() =>
|
||||
useCompletion(
|
||||
useTextBufferForTest('/memory dothisnow'),
|
||||
testDirs,
|
||||
testRootDir,
|
||||
slashCommands,
|
||||
mockCommandContext,
|
||||
|
@ -692,6 +717,7 @@ describe('useCompletion', () => {
|
|||
const { result } = renderHook(() =>
|
||||
useCompletion(
|
||||
useTextBufferForTest('/chat resume my-ch'),
|
||||
testDirs,
|
||||
testRootDir,
|
||||
slashCommands,
|
||||
mockCommandContext,
|
||||
|
@ -735,6 +761,7 @@ describe('useCompletion', () => {
|
|||
const { result } = renderHook(() =>
|
||||
useCompletion(
|
||||
useTextBufferForTest('/chat resume '),
|
||||
testDirs,
|
||||
testRootDir,
|
||||
slashCommands,
|
||||
mockCommandContext,
|
||||
|
@ -769,6 +796,7 @@ describe('useCompletion', () => {
|
|||
const { result } = renderHook(() =>
|
||||
useCompletion(
|
||||
useTextBufferForTest('/chat resume '),
|
||||
testDirs,
|
||||
testRootDir,
|
||||
slashCommands,
|
||||
mockCommandContext,
|
||||
|
@ -796,6 +824,7 @@ describe('useCompletion', () => {
|
|||
const { result } = renderHook(() =>
|
||||
useCompletion(
|
||||
useTextBufferForTest('@s'),
|
||||
testDirs,
|
||||
testRootDir,
|
||||
[],
|
||||
mockCommandContext,
|
||||
|
@ -829,6 +858,7 @@ describe('useCompletion', () => {
|
|||
const { result } = renderHook(() =>
|
||||
useCompletion(
|
||||
useTextBufferForTest('@src/comp'),
|
||||
testDirs,
|
||||
testRootDir,
|
||||
[],
|
||||
mockCommandContext,
|
||||
|
@ -854,6 +884,7 @@ describe('useCompletion', () => {
|
|||
const { result } = renderHook(() =>
|
||||
useCompletion(
|
||||
useTextBufferForTest('@.'),
|
||||
testDirs,
|
||||
testRootDir,
|
||||
[],
|
||||
mockCommandContext,
|
||||
|
@ -885,6 +916,7 @@ describe('useCompletion', () => {
|
|||
const { result } = renderHook(() =>
|
||||
useCompletion(
|
||||
useTextBufferForTest('@d'),
|
||||
testDirs,
|
||||
testRootDir,
|
||||
[],
|
||||
mockCommandContext,
|
||||
|
@ -910,6 +942,7 @@ describe('useCompletion', () => {
|
|||
const { result } = renderHook(() =>
|
||||
useCompletion(
|
||||
useTextBufferForTest('@'),
|
||||
testDirs,
|
||||
testRootDir,
|
||||
[],
|
||||
mockCommandContext,
|
||||
|
@ -944,6 +977,7 @@ describe('useCompletion', () => {
|
|||
const { result } = renderHook(() =>
|
||||
useCompletion(
|
||||
useTextBufferForTest('@'),
|
||||
testDirs,
|
||||
testRootDir,
|
||||
[],
|
||||
mockCommandContext,
|
||||
|
@ -974,6 +1008,7 @@ describe('useCompletion', () => {
|
|||
const { result } = renderHook(() =>
|
||||
useCompletion(
|
||||
useTextBufferForTest('@d'),
|
||||
testDirs,
|
||||
testRootDir,
|
||||
[],
|
||||
mockCommandContext,
|
||||
|
@ -1007,6 +1042,7 @@ describe('useCompletion', () => {
|
|||
const { result } = renderHook(() =>
|
||||
useCompletion(
|
||||
useTextBufferForTest('@'),
|
||||
testDirs,
|
||||
testRootDir,
|
||||
[],
|
||||
mockCommandContext,
|
||||
|
@ -1039,6 +1075,7 @@ describe('useCompletion', () => {
|
|||
const { result } = renderHook(() =>
|
||||
useCompletion(
|
||||
useTextBufferForTest('@t'),
|
||||
testDirs,
|
||||
testRootDir,
|
||||
[],
|
||||
mockCommandContext,
|
||||
|
@ -1085,6 +1122,7 @@ describe('useCompletion', () => {
|
|||
const { result } = renderHook(() =>
|
||||
useCompletion(
|
||||
mockBuffer,
|
||||
testDirs,
|
||||
testRootDir,
|
||||
slashCommands,
|
||||
mockCommandContext,
|
||||
|
@ -1128,6 +1166,7 @@ describe('useCompletion', () => {
|
|||
const { result } = renderHook(() =>
|
||||
useCompletion(
|
||||
mockBuffer,
|
||||
testDirs,
|
||||
testRootDir,
|
||||
slashCommands,
|
||||
mockCommandContext,
|
||||
|
@ -1173,6 +1212,7 @@ describe('useCompletion', () => {
|
|||
const { result } = renderHook(() =>
|
||||
useCompletion(
|
||||
mockBuffer,
|
||||
testDirs,
|
||||
testRootDir,
|
||||
slashCommands,
|
||||
mockCommandContext,
|
||||
|
@ -1221,6 +1261,7 @@ describe('useCompletion', () => {
|
|||
const { result } = renderHook(() =>
|
||||
useCompletion(
|
||||
mockBuffer,
|
||||
testDirs,
|
||||
testRootDir,
|
||||
slashCommands,
|
||||
mockCommandContext,
|
||||
|
|
|
@ -43,6 +43,7 @@ export interface UseCompletionReturn {
|
|||
|
||||
export function useCompletion(
|
||||
buffer: TextBuffer,
|
||||
dirs: readonly string[],
|
||||
cwd: string,
|
||||
slashCommands: readonly SlashCommand[],
|
||||
commandContext: CommandContext,
|
||||
|
@ -328,8 +329,6 @@ export function useCompletion(
|
|||
: partialPath.substring(lastSlashIndex + 1),
|
||||
);
|
||||
|
||||
const baseDirAbsolute = path.resolve(cwd, baseDirRelative);
|
||||
|
||||
let isMounted = true;
|
||||
|
||||
const findFilesRecursively = async (
|
||||
|
@ -358,7 +357,7 @@ export function useCompletion(
|
|||
|
||||
const entryPathRelative = path.join(currentRelativePath, entry.name);
|
||||
const entryPathFromRoot = path.relative(
|
||||
cwd,
|
||||
startDir,
|
||||
path.join(startDir, entry.name),
|
||||
);
|
||||
|
||||
|
@ -417,29 +416,31 @@ export function useCompletion(
|
|||
respectGitIgnore?: boolean;
|
||||
respectGeminiIgnore?: boolean;
|
||||
},
|
||||
searchDir: string,
|
||||
maxResults = 50,
|
||||
): Promise<Suggestion[]> => {
|
||||
const globPattern = `**/${searchPrefix}*`;
|
||||
const files = await glob(globPattern, {
|
||||
cwd,
|
||||
cwd: searchDir,
|
||||
dot: searchPrefix.startsWith('.'),
|
||||
nocase: true,
|
||||
});
|
||||
|
||||
const suggestions: Suggestion[] = files
|
||||
.map((file: string) => ({
|
||||
label: file,
|
||||
value: escapePath(file),
|
||||
}))
|
||||
.filter((s) => {
|
||||
.filter((file) => {
|
||||
if (fileDiscoveryService) {
|
||||
return !fileDiscoveryService.shouldIgnoreFile(
|
||||
s.label,
|
||||
filterOptions,
|
||||
); // relative path
|
||||
return !fileDiscoveryService.shouldIgnoreFile(file, filterOptions);
|
||||
}
|
||||
return true;
|
||||
})
|
||||
.map((file: string) => {
|
||||
const absolutePath = path.resolve(searchDir, file);
|
||||
const label = path.relative(cwd, absolutePath);
|
||||
return {
|
||||
label,
|
||||
value: escapePath(label),
|
||||
};
|
||||
})
|
||||
.slice(0, maxResults);
|
||||
|
||||
return suggestions;
|
||||
|
@ -456,63 +457,78 @@ export function useCompletion(
|
|||
config?.getFileFilteringOptions() ?? DEFAULT_FILE_FILTERING_OPTIONS;
|
||||
|
||||
try {
|
||||
// If there's no slash, or it's the root, do a recursive search from cwd
|
||||
if (
|
||||
partialPath.indexOf('/') === -1 &&
|
||||
prefix &&
|
||||
enableRecursiveSearch
|
||||
) {
|
||||
if (fileDiscoveryService) {
|
||||
fetchedSuggestions = await findFilesWithGlob(
|
||||
prefix,
|
||||
fileDiscoveryService,
|
||||
filterOptions,
|
||||
);
|
||||
// If there's no slash, or it's the root, do a recursive search from workspace directories
|
||||
for (const dir of dirs) {
|
||||
let fetchedSuggestionsPerDir: Suggestion[] = [];
|
||||
if (
|
||||
partialPath.indexOf('/') === -1 &&
|
||||
prefix &&
|
||||
enableRecursiveSearch
|
||||
) {
|
||||
if (fileDiscoveryService) {
|
||||
fetchedSuggestionsPerDir = await findFilesWithGlob(
|
||||
prefix,
|
||||
fileDiscoveryService,
|
||||
filterOptions,
|
||||
dir,
|
||||
);
|
||||
} else {
|
||||
fetchedSuggestionsPerDir = await findFilesRecursively(
|
||||
dir,
|
||||
prefix,
|
||||
null,
|
||||
filterOptions,
|
||||
);
|
||||
}
|
||||
} else {
|
||||
fetchedSuggestions = await findFilesRecursively(
|
||||
cwd,
|
||||
prefix,
|
||||
null,
|
||||
filterOptions,
|
||||
);
|
||||
}
|
||||
} else {
|
||||
// Original behavior: list files in the specific directory
|
||||
const lowerPrefix = prefix.toLowerCase();
|
||||
const entries = await fs.readdir(baseDirAbsolute, {
|
||||
withFileTypes: true,
|
||||
});
|
||||
// Original behavior: list files in the specific directory
|
||||
const lowerPrefix = prefix.toLowerCase();
|
||||
const baseDirAbsolute = path.resolve(dir, baseDirRelative);
|
||||
const entries = await fs.readdir(baseDirAbsolute, {
|
||||
withFileTypes: true,
|
||||
});
|
||||
|
||||
// Filter entries using git-aware filtering
|
||||
const filteredEntries = [];
|
||||
for (const entry of entries) {
|
||||
// Conditionally ignore dotfiles
|
||||
if (!prefix.startsWith('.') && entry.name.startsWith('.')) {
|
||||
continue;
|
||||
}
|
||||
if (!entry.name.toLowerCase().startsWith(lowerPrefix)) continue;
|
||||
// Filter entries using git-aware filtering
|
||||
const filteredEntries = [];
|
||||
for (const entry of entries) {
|
||||
// Conditionally ignore dotfiles
|
||||
if (!prefix.startsWith('.') && entry.name.startsWith('.')) {
|
||||
continue;
|
||||
}
|
||||
if (!entry.name.toLowerCase().startsWith(lowerPrefix)) continue;
|
||||
|
||||
const relativePath = path.relative(
|
||||
cwd,
|
||||
path.join(baseDirAbsolute, entry.name),
|
||||
);
|
||||
if (
|
||||
fileDiscoveryService &&
|
||||
fileDiscoveryService.shouldIgnoreFile(relativePath, filterOptions)
|
||||
) {
|
||||
continue;
|
||||
const relativePath = path.relative(
|
||||
dir,
|
||||
path.join(baseDirAbsolute, entry.name),
|
||||
);
|
||||
if (
|
||||
fileDiscoveryService &&
|
||||
fileDiscoveryService.shouldIgnoreFile(
|
||||
relativePath,
|
||||
filterOptions,
|
||||
)
|
||||
) {
|
||||
continue;
|
||||
}
|
||||
|
||||
filteredEntries.push(entry);
|
||||
}
|
||||
|
||||
filteredEntries.push(entry);
|
||||
fetchedSuggestionsPerDir = filteredEntries.map((entry) => {
|
||||
const absolutePath = path.resolve(baseDirAbsolute, entry.name);
|
||||
const label =
|
||||
cwd === dir ? entry.name : path.relative(cwd, absolutePath);
|
||||
const suggestionLabel = entry.isDirectory() ? label + '/' : label;
|
||||
return {
|
||||
label: suggestionLabel,
|
||||
value: escapePath(suggestionLabel),
|
||||
};
|
||||
});
|
||||
}
|
||||
|
||||
fetchedSuggestions = filteredEntries.map((entry) => {
|
||||
const label = entry.isDirectory() ? entry.name + '/' : entry.name;
|
||||
return {
|
||||
label,
|
||||
value: escapePath(label), // Value for completion should be just the name part
|
||||
};
|
||||
});
|
||||
fetchedSuggestions = [
|
||||
...fetchedSuggestions,
|
||||
...fetchedSuggestionsPerDir,
|
||||
];
|
||||
}
|
||||
|
||||
// Like glob, we always return forwardslashes, even in windows.
|
||||
|
@ -585,6 +601,7 @@ export function useCompletion(
|
|||
};
|
||||
}, [
|
||||
buffer.text,
|
||||
dirs,
|
||||
cwd,
|
||||
isActive,
|
||||
resetCompletionState,
|
||||
|
|
|
@ -13,6 +13,12 @@
|
|||
(subpath (string-append (param "HOME_DIR") "/.npm"))
|
||||
(subpath (string-append (param "HOME_DIR") "/.cache"))
|
||||
(subpath (string-append (param "HOME_DIR") "/.gitconfig"))
|
||||
;; Allow writes to included directories from --include-directories
|
||||
(subpath (param "INCLUDE_DIR_0"))
|
||||
(subpath (param "INCLUDE_DIR_1"))
|
||||
(subpath (param "INCLUDE_DIR_2"))
|
||||
(subpath (param "INCLUDE_DIR_3"))
|
||||
(subpath (param "INCLUDE_DIR_4"))
|
||||
(literal "/dev/stdout")
|
||||
(literal "/dev/stderr")
|
||||
(literal "/dev/null")
|
||||
|
|
|
@ -13,6 +13,12 @@
|
|||
(subpath (string-append (param "HOME_DIR") "/.npm"))
|
||||
(subpath (string-append (param "HOME_DIR") "/.cache"))
|
||||
(subpath (string-append (param "HOME_DIR") "/.gitconfig"))
|
||||
;; Allow writes to included directories from --include-directories
|
||||
(subpath (param "INCLUDE_DIR_0"))
|
||||
(subpath (param "INCLUDE_DIR_1"))
|
||||
(subpath (param "INCLUDE_DIR_2"))
|
||||
(subpath (param "INCLUDE_DIR_3"))
|
||||
(subpath (param "INCLUDE_DIR_4"))
|
||||
(literal "/dev/stdout")
|
||||
(literal "/dev/stderr")
|
||||
(literal "/dev/null")
|
||||
|
|
|
@ -13,6 +13,12 @@
|
|||
(subpath (string-append (param "HOME_DIR") "/.npm"))
|
||||
(subpath (string-append (param "HOME_DIR") "/.cache"))
|
||||
(subpath (string-append (param "HOME_DIR") "/.gitconfig"))
|
||||
;; Allow writes to included directories from --include-directories
|
||||
(subpath (param "INCLUDE_DIR_0"))
|
||||
(subpath (param "INCLUDE_DIR_1"))
|
||||
(subpath (param "INCLUDE_DIR_2"))
|
||||
(subpath (param "INCLUDE_DIR_3"))
|
||||
(subpath (param "INCLUDE_DIR_4"))
|
||||
(literal "/dev/stdout")
|
||||
(literal "/dev/stderr")
|
||||
(literal "/dev/null")
|
||||
|
|
|
@ -71,6 +71,12 @@
|
|||
(subpath (string-append (param "HOME_DIR") "/.npm"))
|
||||
(subpath (string-append (param "HOME_DIR") "/.cache"))
|
||||
(subpath (string-append (param "HOME_DIR") "/.gitconfig"))
|
||||
;; Allow writes to included directories from --include-directories
|
||||
(subpath (param "INCLUDE_DIR_0"))
|
||||
(subpath (param "INCLUDE_DIR_1"))
|
||||
(subpath (param "INCLUDE_DIR_2"))
|
||||
(subpath (param "INCLUDE_DIR_3"))
|
||||
(subpath (param "INCLUDE_DIR_4"))
|
||||
(literal "/dev/stdout")
|
||||
(literal "/dev/stderr")
|
||||
(literal "/dev/null")
|
||||
|
|
|
@ -71,6 +71,12 @@
|
|||
(subpath (string-append (param "HOME_DIR") "/.npm"))
|
||||
(subpath (string-append (param "HOME_DIR") "/.cache"))
|
||||
(subpath (string-append (param "HOME_DIR") "/.gitconfig"))
|
||||
;; Allow writes to included directories from --include-directories
|
||||
(subpath (param "INCLUDE_DIR_0"))
|
||||
(subpath (param "INCLUDE_DIR_1"))
|
||||
(subpath (param "INCLUDE_DIR_2"))
|
||||
(subpath (param "INCLUDE_DIR_3"))
|
||||
(subpath (param "INCLUDE_DIR_4"))
|
||||
(literal "/dev/stdout")
|
||||
(literal "/dev/stderr")
|
||||
(literal "/dev/null")
|
||||
|
|
|
@ -71,6 +71,12 @@
|
|||
(subpath (string-append (param "HOME_DIR") "/.npm"))
|
||||
(subpath (string-append (param "HOME_DIR") "/.cache"))
|
||||
(subpath (string-append (param "HOME_DIR") "/.gitconfig"))
|
||||
;; Allow writes to included directories from --include-directories
|
||||
(subpath (param "INCLUDE_DIR_0"))
|
||||
(subpath (param "INCLUDE_DIR_1"))
|
||||
(subpath (param "INCLUDE_DIR_2"))
|
||||
(subpath (param "INCLUDE_DIR_3"))
|
||||
(subpath (param "INCLUDE_DIR_4"))
|
||||
(literal "/dev/stdout")
|
||||
(literal "/dev/stderr")
|
||||
(literal "/dev/null")
|
||||
|
|
|
@ -15,7 +15,7 @@ import {
|
|||
SETTINGS_DIRECTORY_NAME,
|
||||
} from '../config/settings.js';
|
||||
import { promisify } from 'util';
|
||||
import { SandboxConfig } from '@google/gemini-cli-core';
|
||||
import { Config, SandboxConfig } from '@google/gemini-cli-core';
|
||||
|
||||
const execAsync = promisify(exec);
|
||||
|
||||
|
@ -183,6 +183,7 @@ function entrypoint(workdir: string): string[] {
|
|||
export async function start_sandbox(
|
||||
config: SandboxConfig,
|
||||
nodeArgs: string[] = [],
|
||||
cliConfig?: Config,
|
||||
) {
|
||||
if (config.command === 'sandbox-exec') {
|
||||
// disallow BUILD_SANDBOX
|
||||
|
@ -223,6 +224,38 @@ export async function start_sandbox(
|
|||
`HOME_DIR=${fs.realpathSync(os.homedir())}`,
|
||||
'-D',
|
||||
`CACHE_DIR=${fs.realpathSync(execSync(`getconf DARWIN_USER_CACHE_DIR`).toString().trim())}`,
|
||||
];
|
||||
|
||||
// Add included directories from the workspace context
|
||||
// Always add 5 INCLUDE_DIR parameters to ensure .sb files can reference them
|
||||
const MAX_INCLUDE_DIRS = 5;
|
||||
const targetDir = fs.realpathSync(cliConfig?.getTargetDir() || '');
|
||||
const includedDirs: string[] = [];
|
||||
|
||||
if (cliConfig) {
|
||||
const workspaceContext = cliConfig.getWorkspaceContext();
|
||||
const directories = workspaceContext.getDirectories();
|
||||
|
||||
// Filter out TARGET_DIR
|
||||
for (const dir of directories) {
|
||||
const realDir = fs.realpathSync(dir);
|
||||
if (realDir !== targetDir) {
|
||||
includedDirs.push(realDir);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
for (let i = 0; i < MAX_INCLUDE_DIRS; i++) {
|
||||
let dirPath = '/dev/null'; // Default to a safe path that won't cause issues
|
||||
|
||||
if (i < includedDirs.length) {
|
||||
dirPath = includedDirs[i];
|
||||
}
|
||||
|
||||
args.push('-D', `INCLUDE_DIR_${i}=${dirPath}`);
|
||||
}
|
||||
|
||||
args.push(
|
||||
'-f',
|
||||
profileFile,
|
||||
'sh',
|
||||
|
@ -232,7 +265,7 @@ export async function start_sandbox(
|
|||
`NODE_OPTIONS="${nodeOptions}"`,
|
||||
...process.argv.map((arg) => quote([arg])),
|
||||
].join(' '),
|
||||
];
|
||||
);
|
||||
// start and set up proxy if GEMINI_SANDBOX_PROXY_COMMAND is set
|
||||
const proxyCommand = process.env.GEMINI_SANDBOX_PROXY_COMMAND;
|
||||
let proxyProcess: ChildProcess | undefined = undefined;
|
||||
|
|
|
@ -19,6 +19,18 @@ import {
|
|||
import { GeminiClient } from '../core/client.js';
|
||||
import { GitService } from '../services/gitService.js';
|
||||
|
||||
vi.mock('fs', async (importOriginal) => {
|
||||
const actual = await importOriginal<typeof import('fs')>();
|
||||
return {
|
||||
...actual,
|
||||
existsSync: vi.fn().mockReturnValue(true),
|
||||
statSync: vi.fn().mockReturnValue({
|
||||
isDirectory: vi.fn().mockReturnValue(true),
|
||||
}),
|
||||
realpathSync: vi.fn((path) => path),
|
||||
};
|
||||
});
|
||||
|
||||
// Mock dependencies that might be called during Config construction or createServerConfig
|
||||
vi.mock('../tools/tool-registry', () => {
|
||||
const ToolRegistryMock = vi.fn();
|
||||
|
@ -219,6 +231,23 @@ describe('Server Config (config.ts)', () => {
|
|||
expect(config.getFileFilteringRespectGitIgnore()).toBe(false);
|
||||
});
|
||||
|
||||
it('should initialize WorkspaceContext with includeDirectories', () => {
|
||||
const includeDirectories = ['/path/to/dir1', '/path/to/dir2'];
|
||||
const paramsWithIncludeDirs: ConfigParameters = {
|
||||
...baseParams,
|
||||
includeDirectories,
|
||||
};
|
||||
const config = new Config(paramsWithIncludeDirs);
|
||||
const workspaceContext = config.getWorkspaceContext();
|
||||
const directories = workspaceContext.getDirectories();
|
||||
|
||||
// Should include the target directory plus the included directories
|
||||
expect(directories).toHaveLength(3);
|
||||
expect(directories).toContain(path.resolve(baseParams.targetDir));
|
||||
expect(directories).toContain('/path/to/dir1');
|
||||
expect(directories).toContain('/path/to/dir2');
|
||||
});
|
||||
|
||||
it('Config constructor should set telemetry to true when provided as true', () => {
|
||||
const paramsWithTelemetry: ConfigParameters = {
|
||||
...baseParams,
|
||||
|
|
|
@ -50,6 +50,7 @@ import { IdeClient } from '../ide/ide-client.js';
|
|||
|
||||
// Re-export OAuth config type
|
||||
export type { MCPOAuthConfig };
|
||||
import { WorkspaceContext } from '../utils/workspaceContext.js';
|
||||
|
||||
export enum ApprovalMode {
|
||||
DEFAULT = 'default',
|
||||
|
@ -172,6 +173,7 @@ export interface ConfigParameters {
|
|||
proxy?: string;
|
||||
cwd: string;
|
||||
fileDiscoveryService?: FileDiscoveryService;
|
||||
includeDirectories?: string[];
|
||||
bugCommand?: BugCommandSettings;
|
||||
model: string;
|
||||
extensionContextFilePaths?: string[];
|
||||
|
@ -194,6 +196,7 @@ export class Config {
|
|||
private readonly embeddingModel: string;
|
||||
private readonly sandbox: SandboxConfig | undefined;
|
||||
private readonly targetDir: string;
|
||||
private readonly workspaceContext: WorkspaceContext;
|
||||
private readonly debugMode: boolean;
|
||||
private readonly question: string | undefined;
|
||||
private readonly fullContext: boolean;
|
||||
|
@ -248,6 +251,10 @@ export class Config {
|
|||
params.embeddingModel ?? DEFAULT_GEMINI_EMBEDDING_MODEL;
|
||||
this.sandbox = params.sandbox;
|
||||
this.targetDir = path.resolve(params.targetDir);
|
||||
this.workspaceContext = new WorkspaceContext(
|
||||
this.targetDir,
|
||||
params.includeDirectories ?? [],
|
||||
);
|
||||
this.debugMode = params.debugMode;
|
||||
this.question = params.question;
|
||||
this.fullContext = params.fullContext ?? false;
|
||||
|
@ -392,6 +399,10 @@ export class Config {
|
|||
return this.targetDir;
|
||||
}
|
||||
|
||||
getWorkspaceContext(): WorkspaceContext {
|
||||
return this.workspaceContext;
|
||||
}
|
||||
|
||||
getToolRegistry(): Promise<ToolRegistry> {
|
||||
return Promise.resolve(this.toolRegistry);
|
||||
}
|
||||
|
|
|
@ -4,14 +4,21 @@
|
|||
* SPDX-License-Identifier: Apache-2.0
|
||||
*/
|
||||
|
||||
import { describe, it, expect, beforeEach } from 'vitest';
|
||||
import { describe, it, expect, beforeEach, vi } from 'vitest';
|
||||
import { Config } from './config.js';
|
||||
import { DEFAULT_GEMINI_MODEL, DEFAULT_GEMINI_FLASH_MODEL } from './models.js';
|
||||
import fs from 'node:fs';
|
||||
|
||||
vi.mock('node:fs');
|
||||
|
||||
describe('Flash Model Fallback Configuration', () => {
|
||||
let config: Config;
|
||||
|
||||
beforeEach(() => {
|
||||
vi.mocked(fs.existsSync).mockReturnValue(true);
|
||||
vi.mocked(fs.statSync).mockReturnValue({
|
||||
isDirectory: () => true,
|
||||
} as fs.Stats);
|
||||
config = new Config({
|
||||
sessionId: 'test-session',
|
||||
targetDir: '/test',
|
||||
|
|
|
@ -200,6 +200,9 @@ describe('Gemini Client (client.ts)', () => {
|
|||
getNoBrowser: vi.fn().mockReturnValue(false),
|
||||
getUsageStatisticsEnabled: vi.fn().mockReturnValue(true),
|
||||
getIdeMode: vi.fn().mockReturnValue(false),
|
||||
getWorkspaceContext: vi.fn().mockReturnValue({
|
||||
getDirectories: vi.fn().mockReturnValue(['/test/dir']),
|
||||
}),
|
||||
getGeminiClient: vi.fn(),
|
||||
setFallbackMode: vi.fn(),
|
||||
};
|
||||
|
|
|
@ -172,7 +172,6 @@ export class GeminiClient {
|
|||
}
|
||||
|
||||
private async getEnvironment(): Promise<Part[]> {
|
||||
const cwd = this.config.getWorkingDir();
|
||||
const today = new Date().toLocaleDateString(undefined, {
|
||||
weekday: 'long',
|
||||
year: 'numeric',
|
||||
|
@ -180,14 +179,35 @@ export class GeminiClient {
|
|||
day: 'numeric',
|
||||
});
|
||||
const platform = process.platform;
|
||||
const folderStructure = await getFolderStructure(cwd, {
|
||||
fileService: this.config.getFileService(),
|
||||
});
|
||||
|
||||
const workspaceContext = this.config.getWorkspaceContext();
|
||||
const workspaceDirectories = workspaceContext.getDirectories();
|
||||
|
||||
const folderStructures = await Promise.all(
|
||||
workspaceDirectories.map((dir) =>
|
||||
getFolderStructure(dir, {
|
||||
fileService: this.config.getFileService(),
|
||||
}),
|
||||
),
|
||||
);
|
||||
|
||||
const folderStructure = folderStructures.join('\n');
|
||||
|
||||
let workingDirPreamble: string;
|
||||
if (workspaceDirectories.length === 1) {
|
||||
workingDirPreamble = `I'm currently working in the directory: ${workspaceDirectories[0]}`;
|
||||
} else {
|
||||
const dirList = workspaceDirectories
|
||||
.map((dir) => ` - ${dir}`)
|
||||
.join('\n');
|
||||
workingDirPreamble = `I'm currently working in the following directories:\n${dirList}`;
|
||||
}
|
||||
|
||||
const context = `
|
||||
This is the Gemini CLI. We are setting up the context for our chat.
|
||||
Today's date is ${today}.
|
||||
My operating system is: ${platform}
|
||||
I'm currently working in the directory: ${cwd}
|
||||
${workingDirPreamble}
|
||||
${folderStructure}
|
||||
`.trim();
|
||||
|
||||
|
|
|
@ -0,0 +1,33 @@
|
|||
/**
|
||||
* @license
|
||||
* Copyright 2025 Google LLC
|
||||
* SPDX-License-Identifier: Apache-2.0
|
||||
*/
|
||||
|
||||
import { vi } from 'vitest';
|
||||
import { WorkspaceContext } from '../utils/workspaceContext.js';
|
||||
|
||||
/**
|
||||
* Creates a mock WorkspaceContext for testing
|
||||
* @param rootDir The root directory to use for the mock
|
||||
* @param additionalDirs Optional additional directories to include in the workspace
|
||||
* @returns A mock WorkspaceContext instance
|
||||
*/
|
||||
export function createMockWorkspaceContext(
|
||||
rootDir: string,
|
||||
additionalDirs: string[] = [],
|
||||
): WorkspaceContext {
|
||||
const allDirs = [rootDir, ...additionalDirs];
|
||||
|
||||
const mockWorkspaceContext = {
|
||||
addDirectory: vi.fn(),
|
||||
getDirectories: vi.fn().mockReturnValue(allDirs),
|
||||
isPathWithinWorkspace: vi
|
||||
.fn()
|
||||
.mockImplementation((path: string) =>
|
||||
allDirs.some((dir) => path.startsWith(dir)),
|
||||
),
|
||||
} as unknown as WorkspaceContext;
|
||||
|
||||
return mockWorkspaceContext;
|
||||
}
|
|
@ -32,6 +32,7 @@ import fs from 'fs';
|
|||
import os from 'os';
|
||||
import { ApprovalMode, Config } from '../config/config.js';
|
||||
import { Content, Part, SchemaUnion } from '@google/genai';
|
||||
import { createMockWorkspaceContext } from '../test-utils/mockWorkspaceContext.js';
|
||||
|
||||
describe('EditTool', () => {
|
||||
let tool: EditTool;
|
||||
|
@ -41,6 +42,7 @@ describe('EditTool', () => {
|
|||
let geminiClient: any;
|
||||
|
||||
beforeEach(() => {
|
||||
vi.restoreAllMocks();
|
||||
tempDir = fs.mkdtempSync(path.join(os.tmpdir(), 'edit-tool-test-'));
|
||||
rootDir = path.join(tempDir, 'root');
|
||||
fs.mkdirSync(rootDir);
|
||||
|
@ -54,6 +56,7 @@ describe('EditTool', () => {
|
|||
getTargetDir: () => rootDir,
|
||||
getApprovalMode: vi.fn(),
|
||||
setApprovalMode: vi.fn(),
|
||||
getWorkspaceContext: () => createMockWorkspaceContext(rootDir),
|
||||
// getGeminiConfig: () => ({ apiKey: 'test-api-key' }), // This was not a real Config method
|
||||
// Add other properties/methods of Config if EditTool uses them
|
||||
// Minimal other methods to satisfy Config type if needed by EditTool constructor or other direct uses:
|
||||
|
@ -215,8 +218,9 @@ describe('EditTool', () => {
|
|||
old_string: 'old',
|
||||
new_string: 'new',
|
||||
};
|
||||
expect(tool.validateToolParams(params)).toMatch(
|
||||
/File path must be within the root directory/,
|
||||
const error = tool.validateToolParams(params);
|
||||
expect(error).toContain(
|
||||
'File path must be within one of the workspace directories',
|
||||
);
|
||||
});
|
||||
});
|
||||
|
@ -675,4 +679,28 @@ describe('EditTool', () => {
|
|||
);
|
||||
});
|
||||
});
|
||||
|
||||
describe('workspace boundary validation', () => {
|
||||
it('should validate paths are within workspace root', () => {
|
||||
const validPath = {
|
||||
file_path: path.join(rootDir, 'file.txt'),
|
||||
old_string: 'old',
|
||||
new_string: 'new',
|
||||
};
|
||||
expect(tool.validateToolParams(validPath)).toBeNull();
|
||||
});
|
||||
|
||||
it('should reject paths outside workspace root', () => {
|
||||
const invalidPath = {
|
||||
file_path: '/etc/passwd',
|
||||
old_string: 'root',
|
||||
new_string: 'hacked',
|
||||
};
|
||||
const error = tool.validateToolParams(invalidPath);
|
||||
expect(error).toContain(
|
||||
'File path must be within one of the workspace directories',
|
||||
);
|
||||
expect(error).toContain(rootDir);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
|
|
@ -26,7 +26,6 @@ import { ensureCorrectEdit } from '../utils/editCorrector.js';
|
|||
import { DEFAULT_DIFF_OPTIONS } from './diffOptions.js';
|
||||
import { ReadFileTool } from './read-file.js';
|
||||
import { ModifiableTool, ModifyContext } from './modifiable-tool.js';
|
||||
import { isWithinRoot } from '../utils/fileUtils.js';
|
||||
|
||||
/**
|
||||
* Parameters for the Edit tool
|
||||
|
@ -137,8 +136,10 @@ Expectation for required parameters:
|
|||
return `File path must be absolute: ${params.file_path}`;
|
||||
}
|
||||
|
||||
if (!isWithinRoot(params.file_path, this.config.getTargetDir())) {
|
||||
return `File path must be within the root directory (${this.config.getTargetDir()}): ${params.file_path}`;
|
||||
const workspaceContext = this.config.getWorkspaceContext();
|
||||
if (!workspaceContext.isPathWithinWorkspace(params.file_path)) {
|
||||
const directories = workspaceContext.getDirectories();
|
||||
return `File path must be within one of the workspace directories: ${directories.join(', ')}`;
|
||||
}
|
||||
|
||||
return null;
|
||||
|
|
|
@ -9,9 +9,10 @@ import { partListUnionToString } from '../core/geminiRequest.js';
|
|||
import path from 'path';
|
||||
import fs from 'fs/promises';
|
||||
import os from 'os';
|
||||
import { describe, it, expect, beforeEach, afterEach } from 'vitest'; // Removed vi
|
||||
import { describe, it, expect, beforeEach, afterEach } from 'vitest';
|
||||
import { FileDiscoveryService } from '../services/fileDiscoveryService.js';
|
||||
import { Config } from '../config/config.js';
|
||||
import { createMockWorkspaceContext } from '../test-utils/mockWorkspaceContext.js';
|
||||
|
||||
describe('GlobTool', () => {
|
||||
let tempRootDir: string; // This will be the rootDirectory for the GlobTool instance
|
||||
|
@ -23,6 +24,7 @@ describe('GlobTool', () => {
|
|||
getFileService: () => new FileDiscoveryService(tempRootDir),
|
||||
getFileFilteringRespectGitIgnore: () => true,
|
||||
getTargetDir: () => tempRootDir,
|
||||
getWorkspaceContext: () => createMockWorkspaceContext(tempRootDir),
|
||||
} as unknown as Config;
|
||||
|
||||
beforeEach(async () => {
|
||||
|
@ -243,7 +245,7 @@ describe('GlobTool', () => {
|
|||
path: '../../../../../../../../../../tmp',
|
||||
}; // Definitely outside
|
||||
expect(specificGlobTool.validateToolParams(paramsOutside)).toContain(
|
||||
"resolves outside the tool's root directory",
|
||||
'resolves outside the allowed workspace directories',
|
||||
);
|
||||
});
|
||||
|
||||
|
@ -264,6 +266,37 @@ describe('GlobTool', () => {
|
|||
);
|
||||
});
|
||||
});
|
||||
|
||||
describe('workspace boundary validation', () => {
|
||||
it('should validate search paths are within workspace boundaries', () => {
|
||||
const validPath = { pattern: '*.ts', path: 'sub' };
|
||||
const invalidPath = { pattern: '*.ts', path: '../..' };
|
||||
|
||||
expect(globTool.validateToolParams(validPath)).toBeNull();
|
||||
expect(globTool.validateToolParams(invalidPath)).toContain(
|
||||
'resolves outside the allowed workspace directories',
|
||||
);
|
||||
});
|
||||
|
||||
it('should provide clear error messages when path is outside workspace', () => {
|
||||
const invalidPath = { pattern: '*.ts', path: '/etc' };
|
||||
const error = globTool.validateToolParams(invalidPath);
|
||||
|
||||
expect(error).toContain(
|
||||
'resolves outside the allowed workspace directories',
|
||||
);
|
||||
expect(error).toContain(tempRootDir);
|
||||
});
|
||||
|
||||
it('should work with paths in workspace subdirectories', async () => {
|
||||
const params: GlobToolParams = { pattern: '*.md', path: 'sub' };
|
||||
const result = await globTool.execute(params, abortSignal);
|
||||
|
||||
expect(result.llmContent).toContain('Found 2 file(s)');
|
||||
expect(result.llmContent).toContain('fileC.md');
|
||||
expect(result.llmContent).toContain('FileD.MD');
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('sortFileEntries', () => {
|
||||
|
|
|
@ -11,7 +11,6 @@ import { SchemaValidator } from '../utils/schemaValidator.js';
|
|||
import { BaseTool, Icon, ToolResult } from './tools.js';
|
||||
import { Type } from '@google/genai';
|
||||
import { shortenPath, makeRelative } from '../utils/paths.js';
|
||||
import { isWithinRoot } from '../utils/fileUtils.js';
|
||||
import { Config } from '../config/config.js';
|
||||
|
||||
// Subset of 'Path' interface provided by 'glob' that we can implement for testing
|
||||
|
@ -130,8 +129,10 @@ export class GlobTool extends BaseTool<GlobToolParams, ToolResult> {
|
|||
params.path || '.',
|
||||
);
|
||||
|
||||
if (!isWithinRoot(searchDirAbsolute, this.config.getTargetDir())) {
|
||||
return `Search path ("${searchDirAbsolute}") resolves outside the tool's root directory ("${this.config.getTargetDir()}").`;
|
||||
const workspaceContext = this.config.getWorkspaceContext();
|
||||
if (!workspaceContext.isPathWithinWorkspace(searchDirAbsolute)) {
|
||||
const directories = workspaceContext.getDirectories();
|
||||
return `Search path ("${searchDirAbsolute}") resolves outside the allowed workspace directories: ${directories.join(', ')}`;
|
||||
}
|
||||
|
||||
const targetDir = searchDirAbsolute || this.config.getTargetDir();
|
||||
|
@ -189,10 +190,27 @@ export class GlobTool extends BaseTool<GlobToolParams, ToolResult> {
|
|||
}
|
||||
|
||||
try {
|
||||
const searchDirAbsolute = path.resolve(
|
||||
this.config.getTargetDir(),
|
||||
params.path || '.',
|
||||
);
|
||||
const workspaceContext = this.config.getWorkspaceContext();
|
||||
const workspaceDirectories = workspaceContext.getDirectories();
|
||||
|
||||
// If a specific path is provided, resolve it and check if it's within workspace
|
||||
let searchDirectories: readonly string[];
|
||||
if (params.path) {
|
||||
const searchDirAbsolute = path.resolve(
|
||||
this.config.getTargetDir(),
|
||||
params.path,
|
||||
);
|
||||
if (!workspaceContext.isPathWithinWorkspace(searchDirAbsolute)) {
|
||||
return {
|
||||
llmContent: `Error: Path "${params.path}" is not within any workspace directory`,
|
||||
returnDisplay: `Path is not within workspace`,
|
||||
};
|
||||
}
|
||||
searchDirectories = [searchDirAbsolute];
|
||||
} else {
|
||||
// Search across all workspace directories
|
||||
searchDirectories = workspaceDirectories;
|
||||
}
|
||||
|
||||
// Get centralized file discovery service
|
||||
const respectGitIgnore =
|
||||
|
@ -200,17 +218,26 @@ export class GlobTool extends BaseTool<GlobToolParams, ToolResult> {
|
|||
this.config.getFileFilteringRespectGitIgnore();
|
||||
const fileDiscovery = this.config.getFileService();
|
||||
|
||||
const entries = (await glob(params.pattern, {
|
||||
cwd: searchDirAbsolute,
|
||||
withFileTypes: true,
|
||||
nodir: true,
|
||||
stat: true,
|
||||
nocase: !params.case_sensitive,
|
||||
dot: true,
|
||||
ignore: ['**/node_modules/**', '**/.git/**'],
|
||||
follow: false,
|
||||
signal,
|
||||
})) as GlobPath[];
|
||||
// Collect entries from all search directories
|
||||
let allEntries: GlobPath[] = [];
|
||||
|
||||
for (const searchDir of searchDirectories) {
|
||||
const entries = (await glob(params.pattern, {
|
||||
cwd: searchDir,
|
||||
withFileTypes: true,
|
||||
nodir: true,
|
||||
stat: true,
|
||||
nocase: !params.case_sensitive,
|
||||
dot: true,
|
||||
ignore: ['**/node_modules/**', '**/.git/**'],
|
||||
follow: false,
|
||||
signal,
|
||||
})) as GlobPath[];
|
||||
|
||||
allEntries = allEntries.concat(entries);
|
||||
}
|
||||
|
||||
const entries = allEntries;
|
||||
|
||||
// Apply git-aware filtering if enabled and in git repository
|
||||
let filteredEntries = entries;
|
||||
|
@ -236,7 +263,12 @@ export class GlobTool extends BaseTool<GlobToolParams, ToolResult> {
|
|||
}
|
||||
|
||||
if (!filteredEntries || filteredEntries.length === 0) {
|
||||
let message = `No files found matching pattern "${params.pattern}" within ${searchDirAbsolute}.`;
|
||||
let message = `No files found matching pattern "${params.pattern}"`;
|
||||
if (searchDirectories.length === 1) {
|
||||
message += ` within ${searchDirectories[0]}`;
|
||||
} else {
|
||||
message += ` within ${searchDirectories.length} workspace directories`;
|
||||
}
|
||||
if (gitIgnoredCount > 0) {
|
||||
message += ` (${gitIgnoredCount} files were git-ignored)`;
|
||||
}
|
||||
|
@ -263,7 +295,12 @@ export class GlobTool extends BaseTool<GlobToolParams, ToolResult> {
|
|||
const fileListDescription = sortedAbsolutePaths.join('\n');
|
||||
const fileCount = sortedAbsolutePaths.length;
|
||||
|
||||
let resultMessage = `Found ${fileCount} file(s) matching "${params.pattern}" within ${searchDirAbsolute}`;
|
||||
let resultMessage = `Found ${fileCount} file(s) matching "${params.pattern}"`;
|
||||
if (searchDirectories.length === 1) {
|
||||
resultMessage += ` within ${searchDirectories[0]}`;
|
||||
} else {
|
||||
resultMessage += ` across ${searchDirectories.length} workspace directories`;
|
||||
}
|
||||
if (gitIgnoredCount > 0) {
|
||||
resultMessage += ` (${gitIgnoredCount} additional files were git-ignored)`;
|
||||
}
|
||||
|
|
|
@ -10,6 +10,7 @@ import path from 'path';
|
|||
import fs from 'fs/promises';
|
||||
import os from 'os';
|
||||
import { Config } from '../config/config.js';
|
||||
import { createMockWorkspaceContext } from '../test-utils/mockWorkspaceContext.js';
|
||||
|
||||
// Mock the child_process module to control grep/git grep behavior
|
||||
vi.mock('child_process', () => ({
|
||||
|
@ -33,6 +34,7 @@ describe('GrepTool', () => {
|
|||
|
||||
const mockConfig = {
|
||||
getTargetDir: () => tempRootDir,
|
||||
getWorkspaceContext: () => createMockWorkspaceContext(tempRootDir),
|
||||
} as unknown as Config;
|
||||
|
||||
beforeEach(async () => {
|
||||
|
@ -120,7 +122,7 @@ describe('GrepTool', () => {
|
|||
const params: GrepToolParams = { pattern: 'world' };
|
||||
const result = await grepTool.execute(params, abortSignal);
|
||||
expect(result.llmContent).toContain(
|
||||
'Found 3 matches for pattern "world" in path "."',
|
||||
'Found 3 matches for pattern "world" in the workspace directory',
|
||||
);
|
||||
expect(result.llmContent).toContain('File: fileA.txt');
|
||||
expect(result.llmContent).toContain('L1: hello world');
|
||||
|
@ -147,7 +149,7 @@ describe('GrepTool', () => {
|
|||
const params: GrepToolParams = { pattern: 'hello', include: '*.js' };
|
||||
const result = await grepTool.execute(params, abortSignal);
|
||||
expect(result.llmContent).toContain(
|
||||
'Found 1 match for pattern "hello" in path "." (filter: "*.js")',
|
||||
'Found 1 match for pattern "hello" in the workspace directory (filter: "*.js"):',
|
||||
);
|
||||
expect(result.llmContent).toContain('File: fileB.js');
|
||||
expect(result.llmContent).toContain(
|
||||
|
@ -179,7 +181,7 @@ describe('GrepTool', () => {
|
|||
const params: GrepToolParams = { pattern: 'nonexistentpattern' };
|
||||
const result = await grepTool.execute(params, abortSignal);
|
||||
expect(result.llmContent).toContain(
|
||||
'No matches found for pattern "nonexistentpattern" in path "."',
|
||||
'No matches found for pattern "nonexistentpattern" in the workspace directory.',
|
||||
);
|
||||
expect(result.returnDisplay).toBe('No matches found');
|
||||
});
|
||||
|
@ -188,7 +190,7 @@ describe('GrepTool', () => {
|
|||
const params: GrepToolParams = { pattern: 'foo.*bar' }; // Matches 'const foo = "bar";'
|
||||
const result = await grepTool.execute(params, abortSignal);
|
||||
expect(result.llmContent).toContain(
|
||||
'Found 1 match for pattern "foo.*bar" in path "."',
|
||||
'Found 1 match for pattern "foo.*bar" in the workspace directory:',
|
||||
);
|
||||
expect(result.llmContent).toContain('File: fileB.js');
|
||||
expect(result.llmContent).toContain('L1: const foo = "bar";');
|
||||
|
@ -198,7 +200,7 @@ describe('GrepTool', () => {
|
|||
const params: GrepToolParams = { pattern: 'HELLO' };
|
||||
const result = await grepTool.execute(params, abortSignal);
|
||||
expect(result.llmContent).toContain(
|
||||
'Found 2 matches for pattern "HELLO" in path "."',
|
||||
'Found 2 matches for pattern "HELLO" in the workspace directory:',
|
||||
);
|
||||
expect(result.llmContent).toContain('File: fileA.txt');
|
||||
expect(result.llmContent).toContain('L1: hello world');
|
||||
|
@ -220,6 +222,98 @@ describe('GrepTool', () => {
|
|||
});
|
||||
});
|
||||
|
||||
describe('multi-directory workspace', () => {
|
||||
it('should search across all workspace directories when no path is specified', async () => {
|
||||
// Create additional directory with test files
|
||||
const secondDir = await fs.mkdtemp(
|
||||
path.join(os.tmpdir(), 'grep-tool-second-'),
|
||||
);
|
||||
await fs.writeFile(
|
||||
path.join(secondDir, 'other.txt'),
|
||||
'hello from second directory\nworld in second',
|
||||
);
|
||||
await fs.writeFile(
|
||||
path.join(secondDir, 'another.js'),
|
||||
'function world() { return "test"; }',
|
||||
);
|
||||
|
||||
// Create a mock config with multiple directories
|
||||
const multiDirConfig = {
|
||||
getTargetDir: () => tempRootDir,
|
||||
getWorkspaceContext: () =>
|
||||
createMockWorkspaceContext(tempRootDir, [secondDir]),
|
||||
} as unknown as Config;
|
||||
|
||||
const multiDirGrepTool = new GrepTool(multiDirConfig);
|
||||
const params: GrepToolParams = { pattern: 'world' };
|
||||
const result = await multiDirGrepTool.execute(params, abortSignal);
|
||||
|
||||
// Should find matches in both directories
|
||||
expect(result.llmContent).toContain(
|
||||
'Found 5 matches for pattern "world"',
|
||||
);
|
||||
|
||||
// Matches from first directory
|
||||
expect(result.llmContent).toContain('fileA.txt');
|
||||
expect(result.llmContent).toContain('L1: hello world');
|
||||
expect(result.llmContent).toContain('L2: second line with world');
|
||||
expect(result.llmContent).toContain('fileC.txt');
|
||||
expect(result.llmContent).toContain('L1: another world in sub dir');
|
||||
|
||||
// Matches from second directory (with directory name prefix)
|
||||
const secondDirName = path.basename(secondDir);
|
||||
expect(result.llmContent).toContain(
|
||||
`File: ${path.join(secondDirName, 'other.txt')}`,
|
||||
);
|
||||
expect(result.llmContent).toContain('L2: world in second');
|
||||
expect(result.llmContent).toContain(
|
||||
`File: ${path.join(secondDirName, 'another.js')}`,
|
||||
);
|
||||
expect(result.llmContent).toContain('L1: function world()');
|
||||
|
||||
// Clean up
|
||||
await fs.rm(secondDir, { recursive: true, force: true });
|
||||
});
|
||||
|
||||
it('should search only specified path within workspace directories', async () => {
|
||||
// Create additional directory
|
||||
const secondDir = await fs.mkdtemp(
|
||||
path.join(os.tmpdir(), 'grep-tool-second-'),
|
||||
);
|
||||
await fs.mkdir(path.join(secondDir, 'sub'));
|
||||
await fs.writeFile(
|
||||
path.join(secondDir, 'sub', 'test.txt'),
|
||||
'hello from second sub directory',
|
||||
);
|
||||
|
||||
// Create a mock config with multiple directories
|
||||
const multiDirConfig = {
|
||||
getTargetDir: () => tempRootDir,
|
||||
getWorkspaceContext: () =>
|
||||
createMockWorkspaceContext(tempRootDir, [secondDir]),
|
||||
} as unknown as Config;
|
||||
|
||||
const multiDirGrepTool = new GrepTool(multiDirConfig);
|
||||
|
||||
// Search only in the 'sub' directory of the first workspace
|
||||
const params: GrepToolParams = { pattern: 'world', path: 'sub' };
|
||||
const result = await multiDirGrepTool.execute(params, abortSignal);
|
||||
|
||||
// Should only find matches in the specified sub directory
|
||||
expect(result.llmContent).toContain(
|
||||
'Found 1 match for pattern "world" in path "sub"',
|
||||
);
|
||||
expect(result.llmContent).toContain('File: fileC.txt');
|
||||
expect(result.llmContent).toContain('L1: another world in sub dir');
|
||||
|
||||
// Should not contain matches from second directory
|
||||
expect(result.llmContent).not.toContain('test.txt');
|
||||
|
||||
// Clean up
|
||||
await fs.rm(secondDir, { recursive: true, force: true });
|
||||
});
|
||||
});
|
||||
|
||||
describe('getDescription', () => {
|
||||
it('should generate correct description with pattern only', () => {
|
||||
const params: GrepToolParams = { pattern: 'testPattern' };
|
||||
|
@ -246,6 +340,21 @@ describe('GrepTool', () => {
|
|||
);
|
||||
});
|
||||
|
||||
it('should indicate searching across all workspace directories when no path specified', () => {
|
||||
// Create a mock config with multiple directories
|
||||
const multiDirConfig = {
|
||||
getTargetDir: () => tempRootDir,
|
||||
getWorkspaceContext: () =>
|
||||
createMockWorkspaceContext(tempRootDir, ['/another/dir']),
|
||||
} as unknown as Config;
|
||||
|
||||
const multiDirGrepTool = new GrepTool(multiDirConfig);
|
||||
const params: GrepToolParams = { pattern: 'testPattern' };
|
||||
expect(multiDirGrepTool.getDescription(params)).toBe(
|
||||
"'testPattern' across all workspace directories",
|
||||
);
|
||||
});
|
||||
|
||||
it('should generate correct description with pattern, include, and path', () => {
|
||||
const params: GrepToolParams = {
|
||||
pattern: 'testPattern',
|
||||
|
|
|
@ -92,22 +92,23 @@ export class GrepTool extends BaseTool<GrepToolParams, ToolResult> {
|
|||
/**
|
||||
* Checks if a path is within the root directory and resolves it.
|
||||
* @param relativePath Path relative to the root directory (or undefined for root).
|
||||
* @returns The absolute path if valid and exists.
|
||||
* @returns The absolute path if valid and exists, or null if no path specified (to search all directories).
|
||||
* @throws {Error} If path is outside root, doesn't exist, or isn't a directory.
|
||||
*/
|
||||
private resolveAndValidatePath(relativePath?: string): string {
|
||||
const targetPath = path.resolve(
|
||||
this.config.getTargetDir(),
|
||||
relativePath || '.',
|
||||
);
|
||||
private resolveAndValidatePath(relativePath?: string): string | null {
|
||||
// If no path specified, return null to indicate searching all workspace directories
|
||||
if (!relativePath) {
|
||||
return null;
|
||||
}
|
||||
|
||||
// Security Check: Ensure the resolved path is still within the root directory.
|
||||
if (
|
||||
!targetPath.startsWith(this.config.getTargetDir()) &&
|
||||
targetPath !== this.config.getTargetDir()
|
||||
) {
|
||||
const targetPath = path.resolve(this.config.getTargetDir(), relativePath);
|
||||
|
||||
// Security Check: Ensure the resolved path is within workspace boundaries
|
||||
const workspaceContext = this.config.getWorkspaceContext();
|
||||
if (!workspaceContext.isPathWithinWorkspace(targetPath)) {
|
||||
const directories = workspaceContext.getDirectories();
|
||||
throw new Error(
|
||||
`Path validation failed: Attempted path "${relativePath || '.'}" resolves outside the allowed root directory "${this.config.getTargetDir()}".`,
|
||||
`Path validation failed: Attempted path "${relativePath}" resolves outside the allowed workspace directories: ${directories.join(', ')}`,
|
||||
);
|
||||
}
|
||||
|
||||
|
@ -146,10 +147,13 @@ export class GrepTool extends BaseTool<GrepToolParams, ToolResult> {
|
|||
return `Invalid regular expression pattern provided: ${params.pattern}. Error: ${getErrorMessage(error)}`;
|
||||
}
|
||||
|
||||
try {
|
||||
this.resolveAndValidatePath(params.path);
|
||||
} catch (error) {
|
||||
return getErrorMessage(error);
|
||||
// Only validate path if one is provided
|
||||
if (params.path) {
|
||||
try {
|
||||
this.resolveAndValidatePath(params.path);
|
||||
} catch (error) {
|
||||
return getErrorMessage(error);
|
||||
}
|
||||
}
|
||||
|
||||
return null; // Parameters are valid
|
||||
|
@ -174,44 +178,78 @@ export class GrepTool extends BaseTool<GrepToolParams, ToolResult> {
|
|||
};
|
||||
}
|
||||
|
||||
let searchDirAbs: string;
|
||||
try {
|
||||
searchDirAbs = this.resolveAndValidatePath(params.path);
|
||||
const workspaceContext = this.config.getWorkspaceContext();
|
||||
const searchDirAbs = this.resolveAndValidatePath(params.path);
|
||||
const searchDirDisplay = params.path || '.';
|
||||
|
||||
const matches: GrepMatch[] = await this.performGrepSearch({
|
||||
pattern: params.pattern,
|
||||
path: searchDirAbs,
|
||||
include: params.include,
|
||||
signal,
|
||||
});
|
||||
// Determine which directories to search
|
||||
let searchDirectories: readonly string[];
|
||||
if (searchDirAbs === null) {
|
||||
// No path specified - search all workspace directories
|
||||
searchDirectories = workspaceContext.getDirectories();
|
||||
} else {
|
||||
// Specific path provided - search only that directory
|
||||
searchDirectories = [searchDirAbs];
|
||||
}
|
||||
|
||||
if (matches.length === 0) {
|
||||
const noMatchMsg = `No matches found for pattern "${params.pattern}" in path "${searchDirDisplay}"${params.include ? ` (filter: "${params.include}")` : ''}.`;
|
||||
// Collect matches from all search directories
|
||||
let allMatches: GrepMatch[] = [];
|
||||
for (const searchDir of searchDirectories) {
|
||||
const matches = await this.performGrepSearch({
|
||||
pattern: params.pattern,
|
||||
path: searchDir,
|
||||
include: params.include,
|
||||
signal,
|
||||
});
|
||||
|
||||
// Add directory prefix if searching multiple directories
|
||||
if (searchDirectories.length > 1) {
|
||||
const dirName = path.basename(searchDir);
|
||||
matches.forEach((match) => {
|
||||
match.filePath = path.join(dirName, match.filePath);
|
||||
});
|
||||
}
|
||||
|
||||
allMatches = allMatches.concat(matches);
|
||||
}
|
||||
|
||||
let searchLocationDescription: string;
|
||||
if (searchDirAbs === null) {
|
||||
const numDirs = workspaceContext.getDirectories().length;
|
||||
searchLocationDescription =
|
||||
numDirs > 1
|
||||
? `across ${numDirs} workspace directories`
|
||||
: `in the workspace directory`;
|
||||
} else {
|
||||
searchLocationDescription = `in path "${searchDirDisplay}"`;
|
||||
}
|
||||
|
||||
if (allMatches.length === 0) {
|
||||
const noMatchMsg = `No matches found for pattern "${params.pattern}" ${searchLocationDescription}${params.include ? ` (filter: "${params.include}")` : ''}.`;
|
||||
return { llmContent: noMatchMsg, returnDisplay: `No matches found` };
|
||||
}
|
||||
|
||||
const matchesByFile = matches.reduce(
|
||||
// Group matches by file
|
||||
const matchesByFile = allMatches.reduce(
|
||||
(acc, match) => {
|
||||
const relativeFilePath =
|
||||
path.relative(
|
||||
searchDirAbs,
|
||||
path.resolve(searchDirAbs, match.filePath),
|
||||
) || path.basename(match.filePath);
|
||||
if (!acc[relativeFilePath]) {
|
||||
acc[relativeFilePath] = [];
|
||||
const fileKey = match.filePath;
|
||||
if (!acc[fileKey]) {
|
||||
acc[fileKey] = [];
|
||||
}
|
||||
acc[relativeFilePath].push(match);
|
||||
acc[relativeFilePath].sort((a, b) => a.lineNumber - b.lineNumber);
|
||||
acc[fileKey].push(match);
|
||||
acc[fileKey].sort((a, b) => a.lineNumber - b.lineNumber);
|
||||
return acc;
|
||||
},
|
||||
{} as Record<string, GrepMatch[]>,
|
||||
);
|
||||
|
||||
const matchCount = matches.length;
|
||||
const matchCount = allMatches.length;
|
||||
const matchTerm = matchCount === 1 ? 'match' : 'matches';
|
||||
|
||||
let llmContent = `Found ${matchCount} ${matchTerm} for pattern "${params.pattern}" in path "${searchDirDisplay}"${params.include ? ` (filter: "${params.include}")` : ''}:\n---\n`;
|
||||
let llmContent = `Found ${matchCount} ${matchTerm} for pattern "${params.pattern}" ${searchLocationDescription}${params.include ? ` (filter: "${params.include}")` : ''}:
|
||||
---
|
||||
`;
|
||||
|
||||
for (const filePath in matchesByFile) {
|
||||
llmContent += `File: ${filePath}\n`;
|
||||
|
@ -334,6 +372,13 @@ export class GrepTool extends BaseTool<GrepToolParams, ToolResult> {
|
|||
);
|
||||
description += ` within ${shortenPath(relativePath)}`;
|
||||
}
|
||||
} else {
|
||||
// When no path is specified, indicate searching all workspace directories
|
||||
const workspaceContext = this.config.getWorkspaceContext();
|
||||
const directories = workspaceContext.getDirectories();
|
||||
if (directories.length > 1) {
|
||||
description += ` across all workspace directories`;
|
||||
}
|
||||
}
|
||||
return description;
|
||||
}
|
||||
|
|
|
@ -0,0 +1,496 @@
|
|||
/**
|
||||
* @license
|
||||
* Copyright 2025 Google LLC
|
||||
* SPDX-License-Identifier: Apache-2.0
|
||||
*/
|
||||
|
||||
/* eslint-disable @typescript-eslint/no-explicit-any */
|
||||
|
||||
import { describe, it, expect, beforeEach, vi } from 'vitest';
|
||||
import fs from 'fs';
|
||||
import path from 'path';
|
||||
|
||||
vi.mock('fs', () => ({
|
||||
default: {
|
||||
statSync: vi.fn(),
|
||||
readdirSync: vi.fn(),
|
||||
},
|
||||
statSync: vi.fn(),
|
||||
readdirSync: vi.fn(),
|
||||
}));
|
||||
import { LSTool } from './ls.js';
|
||||
import { Config } from '../config/config.js';
|
||||
import { WorkspaceContext } from '../utils/workspaceContext.js';
|
||||
import { FileDiscoveryService } from '../services/fileDiscoveryService.js';
|
||||
|
||||
describe('LSTool', () => {
|
||||
let lsTool: LSTool;
|
||||
let mockConfig: Config;
|
||||
let mockWorkspaceContext: WorkspaceContext;
|
||||
let mockFileService: FileDiscoveryService;
|
||||
const mockPrimaryDir = '/home/user/project';
|
||||
const mockSecondaryDir = '/home/user/other-project';
|
||||
|
||||
beforeEach(() => {
|
||||
vi.resetAllMocks();
|
||||
|
||||
// Mock WorkspaceContext
|
||||
mockWorkspaceContext = {
|
||||
getDirectories: vi
|
||||
.fn()
|
||||
.mockReturnValue([mockPrimaryDir, mockSecondaryDir]),
|
||||
isPathWithinWorkspace: vi
|
||||
.fn()
|
||||
.mockImplementation(
|
||||
(path) =>
|
||||
path.startsWith(mockPrimaryDir) ||
|
||||
path.startsWith(mockSecondaryDir),
|
||||
),
|
||||
addDirectory: vi.fn(),
|
||||
} as unknown as WorkspaceContext;
|
||||
|
||||
// Mock FileService
|
||||
mockFileService = {
|
||||
shouldGitIgnoreFile: vi.fn().mockReturnValue(false),
|
||||
shouldGeminiIgnoreFile: vi.fn().mockReturnValue(false),
|
||||
} as unknown as FileDiscoveryService;
|
||||
|
||||
// Mock Config
|
||||
mockConfig = {
|
||||
getTargetDir: vi.fn().mockReturnValue(mockPrimaryDir),
|
||||
getWorkspaceContext: vi.fn().mockReturnValue(mockWorkspaceContext),
|
||||
getFileService: vi.fn().mockReturnValue(mockFileService),
|
||||
getFileFilteringOptions: vi.fn().mockReturnValue({
|
||||
respectGitIgnore: true,
|
||||
respectGeminiIgnore: true,
|
||||
}),
|
||||
} as unknown as Config;
|
||||
|
||||
lsTool = new LSTool(mockConfig);
|
||||
});
|
||||
|
||||
describe('parameter validation', () => {
|
||||
it('should accept valid absolute paths within workspace', () => {
|
||||
const params = {
|
||||
path: '/home/user/project/src',
|
||||
};
|
||||
|
||||
const error = lsTool.validateToolParams(params);
|
||||
expect(error).toBeNull();
|
||||
});
|
||||
|
||||
it('should reject relative paths', () => {
|
||||
const params = {
|
||||
path: './src',
|
||||
};
|
||||
|
||||
const error = lsTool.validateToolParams(params);
|
||||
expect(error).toBe('Path must be absolute: ./src');
|
||||
});
|
||||
|
||||
it('should reject paths outside workspace with clear error message', () => {
|
||||
const params = {
|
||||
path: '/etc/passwd',
|
||||
};
|
||||
|
||||
const error = lsTool.validateToolParams(params);
|
||||
expect(error).toBe(
|
||||
'Path must be within one of the workspace directories: /home/user/project, /home/user/other-project',
|
||||
);
|
||||
});
|
||||
|
||||
it('should accept paths in secondary workspace directory', () => {
|
||||
const params = {
|
||||
path: '/home/user/other-project/lib',
|
||||
};
|
||||
|
||||
const error = lsTool.validateToolParams(params);
|
||||
expect(error).toBeNull();
|
||||
});
|
||||
});
|
||||
|
||||
describe('execute', () => {
|
||||
it('should list files in a directory', async () => {
|
||||
const testPath = '/home/user/project/src';
|
||||
const mockFiles = ['file1.ts', 'file2.ts', 'subdir'];
|
||||
const mockStats = {
|
||||
isDirectory: vi.fn(),
|
||||
mtime: new Date(),
|
||||
size: 1024,
|
||||
};
|
||||
|
||||
vi.mocked(fs.statSync).mockImplementation((path: any) => {
|
||||
const pathStr = path.toString();
|
||||
if (pathStr === testPath) {
|
||||
return { isDirectory: () => true } as fs.Stats;
|
||||
}
|
||||
// For individual files
|
||||
if (pathStr.toString().endsWith('subdir')) {
|
||||
return { ...mockStats, isDirectory: () => true, size: 0 } as fs.Stats;
|
||||
}
|
||||
return { ...mockStats, isDirectory: () => false } as fs.Stats;
|
||||
});
|
||||
|
||||
vi.mocked(fs.readdirSync).mockReturnValue(mockFiles as any);
|
||||
|
||||
const result = await lsTool.execute(
|
||||
{ path: testPath },
|
||||
new AbortController().signal,
|
||||
);
|
||||
|
||||
expect(result.llmContent).toContain('[DIR] subdir');
|
||||
expect(result.llmContent).toContain('file1.ts');
|
||||
expect(result.llmContent).toContain('file2.ts');
|
||||
expect(result.returnDisplay).toBe('Listed 3 item(s).');
|
||||
});
|
||||
|
||||
it('should list files from secondary workspace directory', async () => {
|
||||
const testPath = '/home/user/other-project/lib';
|
||||
const mockFiles = ['module1.js', 'module2.js'];
|
||||
|
||||
vi.mocked(fs.statSync).mockImplementation((path: any) => {
|
||||
if (path.toString() === testPath) {
|
||||
return { isDirectory: () => true } as fs.Stats;
|
||||
}
|
||||
return {
|
||||
isDirectory: () => false,
|
||||
mtime: new Date(),
|
||||
size: 2048,
|
||||
} as fs.Stats;
|
||||
});
|
||||
|
||||
vi.mocked(fs.readdirSync).mockReturnValue(mockFiles as any);
|
||||
|
||||
const result = await lsTool.execute(
|
||||
{ path: testPath },
|
||||
new AbortController().signal,
|
||||
);
|
||||
|
||||
expect(result.llmContent).toContain('module1.js');
|
||||
expect(result.llmContent).toContain('module2.js');
|
||||
expect(result.returnDisplay).toBe('Listed 2 item(s).');
|
||||
});
|
||||
|
||||
it('should handle empty directories', async () => {
|
||||
const testPath = '/home/user/project/empty';
|
||||
|
||||
vi.mocked(fs.statSync).mockReturnValue({
|
||||
isDirectory: () => true,
|
||||
} as fs.Stats);
|
||||
vi.mocked(fs.readdirSync).mockReturnValue([]);
|
||||
|
||||
const result = await lsTool.execute(
|
||||
{ path: testPath },
|
||||
new AbortController().signal,
|
||||
);
|
||||
|
||||
expect(result.llmContent).toBe(
|
||||
'Directory /home/user/project/empty is empty.',
|
||||
);
|
||||
expect(result.returnDisplay).toBe('Directory is empty.');
|
||||
});
|
||||
|
||||
it('should respect ignore patterns', async () => {
|
||||
const testPath = '/home/user/project/src';
|
||||
const mockFiles = ['test.js', 'test.spec.js', 'index.js'];
|
||||
|
||||
vi.mocked(fs.statSync).mockImplementation((path: any) => {
|
||||
const pathStr = path.toString();
|
||||
if (pathStr === testPath) {
|
||||
return { isDirectory: () => true } as fs.Stats;
|
||||
}
|
||||
return {
|
||||
isDirectory: () => false,
|
||||
mtime: new Date(),
|
||||
size: 1024,
|
||||
} as fs.Stats;
|
||||
});
|
||||
vi.mocked(fs.readdirSync).mockReturnValue(mockFiles as any);
|
||||
|
||||
const result = await lsTool.execute(
|
||||
{ path: testPath, ignore: ['*.spec.js'] },
|
||||
new AbortController().signal,
|
||||
);
|
||||
|
||||
expect(result.llmContent).toContain('test.js');
|
||||
expect(result.llmContent).toContain('index.js');
|
||||
expect(result.llmContent).not.toContain('test.spec.js');
|
||||
expect(result.returnDisplay).toBe('Listed 2 item(s).');
|
||||
});
|
||||
|
||||
it('should respect gitignore patterns', async () => {
|
||||
const testPath = '/home/user/project/src';
|
||||
const mockFiles = ['file1.js', 'file2.js', 'ignored.js'];
|
||||
|
||||
vi.mocked(fs.statSync).mockImplementation((path: any) => {
|
||||
const pathStr = path.toString();
|
||||
if (pathStr === testPath) {
|
||||
return { isDirectory: () => true } as fs.Stats;
|
||||
}
|
||||
return {
|
||||
isDirectory: () => false,
|
||||
mtime: new Date(),
|
||||
size: 1024,
|
||||
} as fs.Stats;
|
||||
});
|
||||
vi.mocked(fs.readdirSync).mockReturnValue(mockFiles as any);
|
||||
(mockFileService.shouldGitIgnoreFile as any).mockImplementation(
|
||||
(path: string) => path.includes('ignored.js'),
|
||||
);
|
||||
|
||||
const result = await lsTool.execute(
|
||||
{ path: testPath },
|
||||
new AbortController().signal,
|
||||
);
|
||||
|
||||
expect(result.llmContent).toContain('file1.js');
|
||||
expect(result.llmContent).toContain('file2.js');
|
||||
expect(result.llmContent).not.toContain('ignored.js');
|
||||
expect(result.returnDisplay).toBe('Listed 2 item(s). (1 git-ignored)');
|
||||
});
|
||||
|
||||
it('should respect geminiignore patterns', async () => {
|
||||
const testPath = '/home/user/project/src';
|
||||
const mockFiles = ['file1.js', 'file2.js', 'private.js'];
|
||||
|
||||
vi.mocked(fs.statSync).mockImplementation((path: any) => {
|
||||
const pathStr = path.toString();
|
||||
if (pathStr === testPath) {
|
||||
return { isDirectory: () => true } as fs.Stats;
|
||||
}
|
||||
return {
|
||||
isDirectory: () => false,
|
||||
mtime: new Date(),
|
||||
size: 1024,
|
||||
} as fs.Stats;
|
||||
});
|
||||
vi.mocked(fs.readdirSync).mockReturnValue(mockFiles as any);
|
||||
(mockFileService.shouldGeminiIgnoreFile as any).mockImplementation(
|
||||
(path: string) => path.includes('private.js'),
|
||||
);
|
||||
|
||||
const result = await lsTool.execute(
|
||||
{ path: testPath },
|
||||
new AbortController().signal,
|
||||
);
|
||||
|
||||
expect(result.llmContent).toContain('file1.js');
|
||||
expect(result.llmContent).toContain('file2.js');
|
||||
expect(result.llmContent).not.toContain('private.js');
|
||||
expect(result.returnDisplay).toBe('Listed 2 item(s). (1 gemini-ignored)');
|
||||
});
|
||||
|
||||
it('should handle non-directory paths', async () => {
|
||||
const testPath = '/home/user/project/file.txt';
|
||||
|
||||
vi.mocked(fs.statSync).mockReturnValue({
|
||||
isDirectory: () => false,
|
||||
} as fs.Stats);
|
||||
|
||||
const result = await lsTool.execute(
|
||||
{ path: testPath },
|
||||
new AbortController().signal,
|
||||
);
|
||||
|
||||
expect(result.llmContent).toContain('Path is not a directory');
|
||||
expect(result.returnDisplay).toBe('Error: Path is not a directory.');
|
||||
});
|
||||
|
||||
it('should handle non-existent paths', async () => {
|
||||
const testPath = '/home/user/project/does-not-exist';
|
||||
|
||||
vi.mocked(fs.statSync).mockImplementation(() => {
|
||||
throw new Error('ENOENT: no such file or directory');
|
||||
});
|
||||
|
||||
const result = await lsTool.execute(
|
||||
{ path: testPath },
|
||||
new AbortController().signal,
|
||||
);
|
||||
|
||||
expect(result.llmContent).toContain('Error listing directory');
|
||||
expect(result.returnDisplay).toBe('Error: Failed to list directory.');
|
||||
});
|
||||
|
||||
it('should sort directories first, then files alphabetically', async () => {
|
||||
const testPath = '/home/user/project/src';
|
||||
const mockFiles = ['z-file.ts', 'a-dir', 'b-file.ts', 'c-dir'];
|
||||
|
||||
vi.mocked(fs.statSync).mockImplementation((path: any) => {
|
||||
if (path.toString() === testPath) {
|
||||
return { isDirectory: () => true } as fs.Stats;
|
||||
}
|
||||
if (path.toString().endsWith('-dir')) {
|
||||
return {
|
||||
isDirectory: () => true,
|
||||
mtime: new Date(),
|
||||
size: 0,
|
||||
} as fs.Stats;
|
||||
}
|
||||
return {
|
||||
isDirectory: () => false,
|
||||
mtime: new Date(),
|
||||
size: 1024,
|
||||
} as fs.Stats;
|
||||
});
|
||||
|
||||
vi.mocked(fs.readdirSync).mockReturnValue(mockFiles as any);
|
||||
|
||||
const result = await lsTool.execute(
|
||||
{ path: testPath },
|
||||
new AbortController().signal,
|
||||
);
|
||||
|
||||
const lines = (
|
||||
typeof result.llmContent === 'string' ? result.llmContent : ''
|
||||
).split('\n');
|
||||
const entries = lines.slice(1).filter((line: string) => line.trim()); // Skip header
|
||||
expect(entries[0]).toBe('[DIR] a-dir');
|
||||
expect(entries[1]).toBe('[DIR] c-dir');
|
||||
expect(entries[2]).toBe('b-file.ts');
|
||||
expect(entries[3]).toBe('z-file.ts');
|
||||
});
|
||||
|
||||
it('should handle permission errors gracefully', async () => {
|
||||
const testPath = '/home/user/project/restricted';
|
||||
|
||||
vi.mocked(fs.statSync).mockReturnValue({
|
||||
isDirectory: () => true,
|
||||
} as fs.Stats);
|
||||
vi.mocked(fs.readdirSync).mockImplementation(() => {
|
||||
throw new Error('EACCES: permission denied');
|
||||
});
|
||||
|
||||
const result = await lsTool.execute(
|
||||
{ path: testPath },
|
||||
new AbortController().signal,
|
||||
);
|
||||
|
||||
expect(result.llmContent).toContain('Error listing directory');
|
||||
expect(result.llmContent).toContain('permission denied');
|
||||
expect(result.returnDisplay).toBe('Error: Failed to list directory.');
|
||||
});
|
||||
|
||||
it('should validate parameters and return error for invalid params', async () => {
|
||||
const result = await lsTool.execute(
|
||||
{ path: '../outside' },
|
||||
new AbortController().signal,
|
||||
);
|
||||
|
||||
expect(result.llmContent).toContain('Invalid parameters provided');
|
||||
expect(result.returnDisplay).toBe('Error: Failed to execute tool.');
|
||||
});
|
||||
|
||||
it('should handle errors accessing individual files during listing', async () => {
|
||||
const testPath = '/home/user/project/src';
|
||||
const mockFiles = ['accessible.ts', 'inaccessible.ts'];
|
||||
|
||||
vi.mocked(fs.statSync).mockImplementation((path: any) => {
|
||||
if (path.toString() === testPath) {
|
||||
return { isDirectory: () => true } as fs.Stats;
|
||||
}
|
||||
if (path.toString().endsWith('inaccessible.ts')) {
|
||||
throw new Error('EACCES: permission denied');
|
||||
}
|
||||
return {
|
||||
isDirectory: () => false,
|
||||
mtime: new Date(),
|
||||
size: 1024,
|
||||
} as fs.Stats;
|
||||
});
|
||||
|
||||
vi.mocked(fs.readdirSync).mockReturnValue(mockFiles as any);
|
||||
|
||||
// Spy on console.error to verify it's called
|
||||
const consoleErrorSpy = vi
|
||||
.spyOn(console, 'error')
|
||||
.mockImplementation(() => {});
|
||||
|
||||
const result = await lsTool.execute(
|
||||
{ path: testPath },
|
||||
new AbortController().signal,
|
||||
);
|
||||
|
||||
// Should still list the accessible file
|
||||
expect(result.llmContent).toContain('accessible.ts');
|
||||
expect(result.llmContent).not.toContain('inaccessible.ts');
|
||||
expect(result.returnDisplay).toBe('Listed 1 item(s).');
|
||||
|
||||
// Verify error was logged
|
||||
expect(consoleErrorSpy).toHaveBeenCalledWith(
|
||||
expect.stringContaining('Error accessing'),
|
||||
);
|
||||
|
||||
consoleErrorSpy.mockRestore();
|
||||
});
|
||||
});
|
||||
|
||||
describe('getDescription', () => {
|
||||
it('should return shortened relative path', () => {
|
||||
const params = {
|
||||
path: path.join(mockPrimaryDir, 'deeply', 'nested', 'directory'),
|
||||
};
|
||||
|
||||
const description = lsTool.getDescription(params);
|
||||
expect(description).toBe(path.join('deeply', 'nested', 'directory'));
|
||||
});
|
||||
|
||||
it('should handle paths in secondary workspace', () => {
|
||||
const params = {
|
||||
path: path.join(mockSecondaryDir, 'lib'),
|
||||
};
|
||||
|
||||
const description = lsTool.getDescription(params);
|
||||
expect(description).toBe(path.join('..', 'other-project', 'lib'));
|
||||
});
|
||||
});
|
||||
|
||||
describe('workspace boundary validation', () => {
|
||||
it('should accept paths in primary workspace directory', () => {
|
||||
const params = { path: `${mockPrimaryDir}/src` };
|
||||
expect(lsTool.validateToolParams(params)).toBeNull();
|
||||
});
|
||||
|
||||
it('should accept paths in secondary workspace directory', () => {
|
||||
const params = { path: `${mockSecondaryDir}/lib` };
|
||||
expect(lsTool.validateToolParams(params)).toBeNull();
|
||||
});
|
||||
|
||||
it('should reject paths outside all workspace directories', () => {
|
||||
const params = { path: '/etc/passwd' };
|
||||
const error = lsTool.validateToolParams(params);
|
||||
expect(error).toContain(
|
||||
'Path must be within one of the workspace directories',
|
||||
);
|
||||
expect(error).toContain(mockPrimaryDir);
|
||||
expect(error).toContain(mockSecondaryDir);
|
||||
});
|
||||
|
||||
it('should list files from secondary workspace directory', async () => {
|
||||
const testPath = `${mockSecondaryDir}/tests`;
|
||||
const mockFiles = ['test1.spec.ts', 'test2.spec.ts'];
|
||||
|
||||
vi.mocked(fs.statSync).mockImplementation((path: any) => {
|
||||
if (path.toString() === testPath) {
|
||||
return { isDirectory: () => true } as fs.Stats;
|
||||
}
|
||||
return {
|
||||
isDirectory: () => false,
|
||||
mtime: new Date(),
|
||||
size: 512,
|
||||
} as fs.Stats;
|
||||
});
|
||||
|
||||
vi.mocked(fs.readdirSync).mockReturnValue(mockFiles as any);
|
||||
|
||||
const result = await lsTool.execute(
|
||||
{ path: testPath },
|
||||
new AbortController().signal,
|
||||
);
|
||||
|
||||
expect(result.llmContent).toContain('test1.spec.ts');
|
||||
expect(result.llmContent).toContain('test2.spec.ts');
|
||||
expect(result.returnDisplay).toBe('Listed 2 item(s).');
|
||||
});
|
||||
});
|
||||
});
|
|
@ -11,7 +11,6 @@ import { Type } from '@google/genai';
|
|||
import { SchemaValidator } from '../utils/schemaValidator.js';
|
||||
import { makeRelative, shortenPath } from '../utils/paths.js';
|
||||
import { Config, DEFAULT_FILE_FILTERING_OPTIONS } from '../config/config.js';
|
||||
import { isWithinRoot } from '../utils/fileUtils.js';
|
||||
|
||||
/**
|
||||
* Parameters for the LS tool
|
||||
|
@ -129,8 +128,11 @@ export class LSTool extends BaseTool<LSToolParams, ToolResult> {
|
|||
if (!path.isAbsolute(params.path)) {
|
||||
return `Path must be absolute: ${params.path}`;
|
||||
}
|
||||
if (!isWithinRoot(params.path, this.config.getTargetDir())) {
|
||||
return `Path must be within the root directory (${this.config.getTargetDir()}): ${params.path}`;
|
||||
|
||||
const workspaceContext = this.config.getWorkspaceContext();
|
||||
if (!workspaceContext.isPathWithinWorkspace(params.path)) {
|
||||
const directories = workspaceContext.getDirectories();
|
||||
return `Path must be within one of the workspace directories: ${directories.join(', ')}`;
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
|
|
@ -12,6 +12,7 @@ import fs from 'fs';
|
|||
import fsp from 'fs/promises';
|
||||
import { Config } from '../config/config.js';
|
||||
import { FileDiscoveryService } from '../services/fileDiscoveryService.js';
|
||||
import { createMockWorkspaceContext } from '../test-utils/mockWorkspaceContext.js';
|
||||
|
||||
describe('ReadFileTool', () => {
|
||||
let tempRootDir: string;
|
||||
|
@ -27,6 +28,7 @@ describe('ReadFileTool', () => {
|
|||
const mockConfigInstance = {
|
||||
getFileService: () => new FileDiscoveryService(tempRootDir),
|
||||
getTargetDir: () => tempRootDir,
|
||||
getWorkspaceContext: () => createMockWorkspaceContext(tempRootDir),
|
||||
} as unknown as Config;
|
||||
tool = new ReadFileTool(mockConfigInstance);
|
||||
});
|
||||
|
@ -65,8 +67,9 @@ describe('ReadFileTool', () => {
|
|||
it('should return error for path outside root', () => {
|
||||
const outsidePath = path.resolve(os.tmpdir(), 'outside-root.txt');
|
||||
const params: ReadFileToolParams = { absolute_path: outsidePath };
|
||||
expect(tool.validateToolParams(params)).toMatch(
|
||||
/File path must be within the root directory/,
|
||||
const error = tool.validateToolParams(params);
|
||||
expect(error).toContain(
|
||||
'File path must be within one of the workspace directories',
|
||||
);
|
||||
});
|
||||
|
||||
|
@ -261,4 +264,36 @@ describe('ReadFileTool', () => {
|
|||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('workspace boundary validation', () => {
|
||||
it('should validate paths are within workspace root', () => {
|
||||
const params: ReadFileToolParams = {
|
||||
absolute_path: path.join(tempRootDir, 'file.txt'),
|
||||
};
|
||||
expect(tool.validateToolParams(params)).toBeNull();
|
||||
});
|
||||
|
||||
it('should reject paths outside workspace root', () => {
|
||||
const params: ReadFileToolParams = {
|
||||
absolute_path: '/etc/passwd',
|
||||
};
|
||||
const error = tool.validateToolParams(params);
|
||||
expect(error).toContain(
|
||||
'File path must be within one of the workspace directories',
|
||||
);
|
||||
expect(error).toContain(tempRootDir);
|
||||
});
|
||||
|
||||
it('should provide clear error message with workspace directories', () => {
|
||||
const outsidePath = path.join(os.tmpdir(), 'outside-workspace.txt');
|
||||
const params: ReadFileToolParams = {
|
||||
absolute_path: outsidePath,
|
||||
};
|
||||
const error = tool.validateToolParams(params);
|
||||
expect(error).toContain(
|
||||
'File path must be within one of the workspace directories',
|
||||
);
|
||||
expect(error).toContain(tempRootDir);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
|
|
@ -10,7 +10,6 @@ import { makeRelative, shortenPath } from '../utils/paths.js';
|
|||
import { BaseTool, Icon, ToolLocation, ToolResult } from './tools.js';
|
||||
import { Type } from '@google/genai';
|
||||
import {
|
||||
isWithinRoot,
|
||||
processSingleFileContent,
|
||||
getSpecificMimeType,
|
||||
} from '../utils/fileUtils.js';
|
||||
|
@ -86,8 +85,11 @@ export class ReadFileTool extends BaseTool<ReadFileToolParams, ToolResult> {
|
|||
if (!path.isAbsolute(filePath)) {
|
||||
return `File path must be absolute, but was relative: ${filePath}. You must provide an absolute path.`;
|
||||
}
|
||||
if (!isWithinRoot(filePath, this.config.getTargetDir())) {
|
||||
return `File path must be within the root directory (${this.config.getTargetDir()}): ${filePath}`;
|
||||
|
||||
const workspaceContext = this.config.getWorkspaceContext();
|
||||
if (!workspaceContext.isPathWithinWorkspace(filePath)) {
|
||||
const directories = workspaceContext.getDirectories();
|
||||
return `File path must be within one of the workspace directories: ${directories.join(', ')}`;
|
||||
}
|
||||
if (params.offset !== undefined && params.offset < 0) {
|
||||
return 'Offset must be a non-negative number';
|
||||
|
@ -145,7 +147,7 @@ export class ReadFileTool extends BaseTool<ReadFileToolParams, ToolResult> {
|
|||
if (result.error) {
|
||||
return {
|
||||
llmContent: result.error, // The detailed error for LLM
|
||||
returnDisplay: result.returnDisplay, // User-friendly error
|
||||
returnDisplay: result.returnDisplay || 'Error reading file', // User-friendly error
|
||||
};
|
||||
}
|
||||
|
||||
|
@ -163,8 +165,8 @@ export class ReadFileTool extends BaseTool<ReadFileToolParams, ToolResult> {
|
|||
);
|
||||
|
||||
return {
|
||||
llmContent: result.llmContent,
|
||||
returnDisplay: result.returnDisplay,
|
||||
llmContent: result.llmContent || '',
|
||||
returnDisplay: result.returnDisplay || '',
|
||||
};
|
||||
}
|
||||
}
|
||||
|
|
|
@ -13,6 +13,7 @@ import path from 'path';
|
|||
import fs from 'fs'; // Actual fs for setup
|
||||
import os from 'os';
|
||||
import { Config } from '../config/config.js';
|
||||
import { WorkspaceContext } from '../utils/workspaceContext.js';
|
||||
|
||||
vi.mock('mime-types', () => {
|
||||
const lookup = (filename: string) => {
|
||||
|
@ -48,11 +49,11 @@ describe('ReadManyFilesTool', () => {
|
|||
let mockReadFileFn: Mock;
|
||||
|
||||
beforeEach(async () => {
|
||||
tempRootDir = fs.mkdtempSync(
|
||||
path.join(os.tmpdir(), 'read-many-files-root-'),
|
||||
tempRootDir = fs.realpathSync(
|
||||
fs.mkdtempSync(path.join(os.tmpdir(), 'read-many-files-root-')),
|
||||
);
|
||||
tempDirOutsideRoot = fs.mkdtempSync(
|
||||
path.join(os.tmpdir(), 'read-many-files-external-'),
|
||||
tempDirOutsideRoot = fs.realpathSync(
|
||||
fs.mkdtempSync(path.join(os.tmpdir(), 'read-many-files-external-')),
|
||||
);
|
||||
fs.writeFileSync(path.join(tempRootDir, '.geminiignore'), 'foo.*');
|
||||
const fileService = new FileDiscoveryService(tempRootDir);
|
||||
|
@ -64,6 +65,8 @@ describe('ReadManyFilesTool', () => {
|
|||
respectGeminiIgnore: true,
|
||||
}),
|
||||
getTargetDir: () => tempRootDir,
|
||||
getWorkspaceDirs: () => [tempRootDir],
|
||||
getWorkspaceContext: () => new WorkspaceContext(tempRootDir),
|
||||
} as Partial<Config> as Config;
|
||||
tool = new ReadManyFilesTool(mockConfig);
|
||||
|
||||
|
@ -424,5 +427,54 @@ describe('ReadManyFilesTool', () => {
|
|||
expect(result.returnDisplay).not.toContain('foo.quux');
|
||||
expect(result.returnDisplay).toContain('bar.ts');
|
||||
});
|
||||
|
||||
it('should read files from multiple workspace directories', async () => {
|
||||
const tempDir1 = fs.realpathSync(
|
||||
fs.mkdtempSync(path.join(os.tmpdir(), 'multi-dir-1-')),
|
||||
);
|
||||
const tempDir2 = fs.realpathSync(
|
||||
fs.mkdtempSync(path.join(os.tmpdir(), 'multi-dir-2-')),
|
||||
);
|
||||
const fileService = new FileDiscoveryService(tempDir1);
|
||||
const mockConfig = {
|
||||
getFileService: () => fileService,
|
||||
getFileFilteringOptions: () => ({
|
||||
respectGitIgnore: true,
|
||||
respectGeminiIgnore: true,
|
||||
}),
|
||||
getWorkspaceContext: () => new WorkspaceContext(tempDir1, [tempDir2]),
|
||||
getTargetDir: () => tempDir1,
|
||||
} as Partial<Config> as Config;
|
||||
tool = new ReadManyFilesTool(mockConfig);
|
||||
|
||||
fs.writeFileSync(path.join(tempDir1, 'file1.txt'), 'Content1');
|
||||
fs.writeFileSync(path.join(tempDir2, 'file2.txt'), 'Content2');
|
||||
|
||||
const params = { paths: ['*.txt'] };
|
||||
const result = await tool.execute(params, new AbortController().signal);
|
||||
const content = result.llmContent as string[];
|
||||
if (!Array.isArray(content)) {
|
||||
throw new Error(`llmContent is not an array: ${content}`);
|
||||
}
|
||||
const expectedPath1 = path.join(tempDir1, 'file1.txt');
|
||||
const expectedPath2 = path.join(tempDir2, 'file2.txt');
|
||||
|
||||
expect(
|
||||
content.some((c) =>
|
||||
c.includes(`--- ${expectedPath1} ---\n\nContent1\n\n`),
|
||||
),
|
||||
).toBe(true);
|
||||
expect(
|
||||
content.some((c) =>
|
||||
c.includes(`--- ${expectedPath2} ---\n\nContent2\n\n`),
|
||||
),
|
||||
).toBe(true);
|
||||
expect(result.returnDisplay).toContain(
|
||||
'Successfully read and concatenated content from **2 file(s)**',
|
||||
);
|
||||
|
||||
fs.rmSync(tempDir1, { recursive: true, force: true });
|
||||
fs.rmSync(tempDir2, { recursive: true, force: true });
|
||||
});
|
||||
});
|
||||
});
|
||||
|
|
|
@ -302,18 +302,27 @@ Use this tool when the user's query implies needing the content of several files
|
|||
}
|
||||
|
||||
try {
|
||||
const entries = await glob(
|
||||
searchPatterns.map((p) => p.replace(/\\/g, '/')),
|
||||
{
|
||||
cwd: this.config.getTargetDir(),
|
||||
ignore: effectiveExcludes,
|
||||
nodir: true,
|
||||
dot: true,
|
||||
absolute: true,
|
||||
nocase: true,
|
||||
signal,
|
||||
},
|
||||
);
|
||||
const allEntries = new Set<string>();
|
||||
const workspaceDirs = this.config.getWorkspaceContext().getDirectories();
|
||||
|
||||
for (const dir of workspaceDirs) {
|
||||
const entriesInDir = await glob(
|
||||
searchPatterns.map((p) => p.replace(/\\/g, '/')),
|
||||
{
|
||||
cwd: dir,
|
||||
ignore: effectiveExcludes,
|
||||
nodir: true,
|
||||
dot: true,
|
||||
absolute: true,
|
||||
nocase: true,
|
||||
signal,
|
||||
},
|
||||
);
|
||||
for (const entry of entriesInDir) {
|
||||
allEntries.add(entry);
|
||||
}
|
||||
}
|
||||
const entries = Array.from(allEntries);
|
||||
|
||||
const gitFilteredEntries = fileFilteringOptions.respectGitIgnore
|
||||
? fileDiscovery
|
||||
|
@ -346,11 +355,15 @@ Use this tool when the user's query implies needing the content of several files
|
|||
let geminiIgnoredCount = 0;
|
||||
|
||||
for (const absoluteFilePath of entries) {
|
||||
// Security check: ensure the glob library didn't return something outside targetDir.
|
||||
if (!absoluteFilePath.startsWith(this.config.getTargetDir())) {
|
||||
// Security check: ensure the glob library didn't return something outside the workspace.
|
||||
if (
|
||||
!this.config
|
||||
.getWorkspaceContext()
|
||||
.isPathWithinWorkspace(absoluteFilePath)
|
||||
) {
|
||||
skippedFiles.push({
|
||||
path: absoluteFilePath,
|
||||
reason: `Security: Glob library returned path outside target directory. Base: ${this.config.getTargetDir()}, Path: ${absoluteFilePath}`,
|
||||
reason: `Security: Glob library returned path outside workspace. Path: ${absoluteFilePath}`,
|
||||
});
|
||||
continue;
|
||||
}
|
||||
|
|
|
@ -37,6 +37,7 @@ import * as crypto from 'crypto';
|
|||
import * as summarizer from '../utils/summarizer.js';
|
||||
import { ToolConfirmationOutcome } from './tools.js';
|
||||
import { OUTPUT_UPDATE_INTERVAL_MS } from './shell.js';
|
||||
import { createMockWorkspaceContext } from '../test-utils/mockWorkspaceContext.js';
|
||||
|
||||
describe('ShellTool', () => {
|
||||
let shellTool: ShellTool;
|
||||
|
@ -53,6 +54,7 @@ describe('ShellTool', () => {
|
|||
getDebugMode: vi.fn().mockReturnValue(false),
|
||||
getTargetDir: vi.fn().mockReturnValue('/test/dir'),
|
||||
getSummarizeToolOutputConfig: vi.fn().mockReturnValue(undefined),
|
||||
getWorkspaceContext: () => createMockWorkspaceContext('.'),
|
||||
getGeminiClient: vi.fn(),
|
||||
} as unknown as Config;
|
||||
|
||||
|
@ -105,7 +107,7 @@ describe('ShellTool', () => {
|
|||
vi.mocked(fs.existsSync).mockReturnValue(false);
|
||||
expect(
|
||||
shellTool.validateToolParams({ command: 'ls', directory: 'rel/path' }),
|
||||
).toBe('Directory must exist.');
|
||||
).toBe("Directory 'rel/path' is not a registered workspace directory.");
|
||||
});
|
||||
});
|
||||
|
||||
|
@ -385,3 +387,37 @@ describe('ShellTool', () => {
|
|||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('validateToolParams', () => {
|
||||
it('should return null for valid directory', () => {
|
||||
const config = {
|
||||
getCoreTools: () => undefined,
|
||||
getExcludeTools: () => undefined,
|
||||
getTargetDir: () => '/root',
|
||||
getWorkspaceContext: () =>
|
||||
createMockWorkspaceContext('/root', ['/users/test']),
|
||||
} as unknown as Config;
|
||||
const shellTool = new ShellTool(config);
|
||||
const result = shellTool.validateToolParams({
|
||||
command: 'ls',
|
||||
directory: 'test',
|
||||
});
|
||||
expect(result).toBeNull();
|
||||
});
|
||||
|
||||
it('should return error for directory outside workspace', () => {
|
||||
const config = {
|
||||
getCoreTools: () => undefined,
|
||||
getExcludeTools: () => undefined,
|
||||
getTargetDir: () => '/root',
|
||||
getWorkspaceContext: () =>
|
||||
createMockWorkspaceContext('/root', ['/users/test']),
|
||||
} as unknown as Config;
|
||||
const shellTool = new ShellTool(config);
|
||||
const result = shellTool.validateToolParams({
|
||||
command: 'ls',
|
||||
directory: 'test2',
|
||||
});
|
||||
expect(result).toContain('is not a registered workspace directory');
|
||||
});
|
||||
});
|
||||
|
|
|
@ -124,14 +124,19 @@ export class ShellTool extends BaseTool<ShellToolParams, ToolResult> {
|
|||
}
|
||||
if (params.directory) {
|
||||
if (path.isAbsolute(params.directory)) {
|
||||
return 'Directory cannot be absolute. Must be relative to the project root directory.';
|
||||
return 'Directory cannot be absolute. Please refer to workspace directories by their name.';
|
||||
}
|
||||
const directory = path.resolve(
|
||||
this.config.getTargetDir(),
|
||||
params.directory,
|
||||
const workspaceDirs = this.config.getWorkspaceContext().getDirectories();
|
||||
const matchingDirs = workspaceDirs.filter(
|
||||
(dir) => path.basename(dir) === params.directory,
|
||||
);
|
||||
if (!fs.existsSync(directory)) {
|
||||
return 'Directory must exist.';
|
||||
|
||||
if (matchingDirs.length === 0) {
|
||||
return `Directory '${params.directory}' is not a registered workspace directory.`;
|
||||
}
|
||||
|
||||
if (matchingDirs.length > 1) {
|
||||
return `Directory name '${params.directory}' is ambiguous as it matches multiple workspace directories.`;
|
||||
}
|
||||
}
|
||||
return null;
|
||||
|
|
|
@ -30,6 +30,9 @@ import {
|
|||
Schema,
|
||||
} from '@google/genai';
|
||||
import { spawn } from 'node:child_process';
|
||||
import fs from 'node:fs';
|
||||
|
||||
vi.mock('node:fs');
|
||||
|
||||
// Use vi.hoisted to define the mock function so it can be used in the vi.mock factory
|
||||
const mockDiscoverMcpTools = vi.hoisted(() => vi.fn());
|
||||
|
@ -144,6 +147,10 @@ describe('ToolRegistry', () => {
|
|||
let mockConfigGetToolDiscoveryCommand: ReturnType<typeof vi.spyOn>;
|
||||
|
||||
beforeEach(() => {
|
||||
vi.mocked(fs.existsSync).mockReturnValue(true);
|
||||
vi.mocked(fs.statSync).mockReturnValue({
|
||||
isDirectory: () => true,
|
||||
} as fs.Stats);
|
||||
config = new Config(baseConfigParams);
|
||||
toolRegistry = new ToolRegistry(config);
|
||||
vi.spyOn(console, 'warn').mockImplementation(() => {});
|
||||
|
|
|
@ -31,6 +31,7 @@ import {
|
|||
ensureCorrectFileContent,
|
||||
CorrectedEditResult,
|
||||
} from '../utils/editCorrector.js';
|
||||
import { createMockWorkspaceContext } from '../test-utils/mockWorkspaceContext.js';
|
||||
|
||||
const rootDir = path.resolve(os.tmpdir(), 'gemini-cli-test-root');
|
||||
|
||||
|
@ -54,6 +55,7 @@ const mockConfigInternal = {
|
|||
getApprovalMode: vi.fn(() => ApprovalMode.DEFAULT),
|
||||
setApprovalMode: vi.fn(),
|
||||
getGeminiClient: vi.fn(), // Initialize as a plain mock function
|
||||
getWorkspaceContext: () => createMockWorkspaceContext(rootDir),
|
||||
getApiKey: () => 'test-key',
|
||||
getModel: () => 'test-model',
|
||||
getSandbox: () => false,
|
||||
|
@ -83,6 +85,7 @@ describe('WriteFileTool', () => {
|
|||
let tempDir: string;
|
||||
|
||||
beforeEach(() => {
|
||||
vi.clearAllMocks();
|
||||
// Create a unique temporary directory for files created outside the root
|
||||
tempDir = fs.mkdtempSync(
|
||||
path.join(os.tmpdir(), 'write-file-test-external-'),
|
||||
|
@ -98,6 +101,11 @@ describe('WriteFileTool', () => {
|
|||
) as Mocked<GeminiClient>;
|
||||
vi.mocked(GeminiClient).mockImplementation(() => mockGeminiClientInstance);
|
||||
|
||||
vi.mocked(ensureCorrectEdit).mockImplementation(mockEnsureCorrectEdit);
|
||||
vi.mocked(ensureCorrectFileContent).mockImplementation(
|
||||
mockEnsureCorrectFileContent,
|
||||
);
|
||||
|
||||
// Now that mockGeminiClientInstance is initialized, set the mock implementation for getGeminiClient
|
||||
mockConfigInternal.getGeminiClient.mockReturnValue(
|
||||
mockGeminiClientInstance,
|
||||
|
@ -177,8 +185,9 @@ describe('WriteFileTool', () => {
|
|||
file_path: outsidePath,
|
||||
content: 'hello',
|
||||
};
|
||||
expect(tool.validateToolParams(params)).toMatch(
|
||||
/File path must be within the root directory/,
|
||||
const error = tool.validateToolParams(params);
|
||||
expect(error).toContain(
|
||||
'File path must be within one of the workspace directories',
|
||||
);
|
||||
});
|
||||
|
||||
|
@ -427,8 +436,8 @@ describe('WriteFileTool', () => {
|
|||
const params = { file_path: outsidePath, content: 'test' };
|
||||
const result = await tool.execute(params, abortSignal);
|
||||
expect(result.llmContent).toMatch(/Error: Invalid parameters provided/);
|
||||
expect(result.returnDisplay).toMatch(
|
||||
/Error: File path must be within the root directory/,
|
||||
expect(result.returnDisplay).toContain(
|
||||
'Error: File path must be within one of the workspace directories',
|
||||
);
|
||||
});
|
||||
|
||||
|
@ -616,4 +625,39 @@ describe('WriteFileTool', () => {
|
|||
expect(result.llmContent).not.toMatch(/User modified the `content`/);
|
||||
});
|
||||
});
|
||||
|
||||
describe('workspace boundary validation', () => {
|
||||
it('should validate paths are within workspace root', () => {
|
||||
const params = {
|
||||
file_path: path.join(rootDir, 'file.txt'),
|
||||
content: 'test content',
|
||||
};
|
||||
expect(tool.validateToolParams(params)).toBeNull();
|
||||
});
|
||||
|
||||
it('should reject paths outside workspace root', () => {
|
||||
const params = {
|
||||
file_path: '/etc/passwd',
|
||||
content: 'malicious',
|
||||
};
|
||||
const error = tool.validateToolParams(params);
|
||||
expect(error).toContain(
|
||||
'File path must be within one of the workspace directories',
|
||||
);
|
||||
expect(error).toContain(rootDir);
|
||||
});
|
||||
|
||||
it('should provide clear error message with workspace directories', () => {
|
||||
const outsidePath = path.join(tempDir, 'outside-root.txt');
|
||||
const params = {
|
||||
file_path: outsidePath,
|
||||
content: 'test',
|
||||
};
|
||||
const error = tool.validateToolParams(params);
|
||||
expect(error).toContain(
|
||||
'File path must be within one of the workspace directories',
|
||||
);
|
||||
expect(error).toContain(rootDir);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
|
|
@ -27,7 +27,7 @@ import {
|
|||
} from '../utils/editCorrector.js';
|
||||
import { DEFAULT_DIFF_OPTIONS } from './diffOptions.js';
|
||||
import { ModifiableTool, ModifyContext } from './modifiable-tool.js';
|
||||
import { getSpecificMimeType, isWithinRoot } from '../utils/fileUtils.js';
|
||||
import { getSpecificMimeType } from '../utils/fileUtils.js';
|
||||
import {
|
||||
recordFileOperationMetric,
|
||||
FileOperation,
|
||||
|
@ -105,8 +105,11 @@ export class WriteFileTool
|
|||
if (!path.isAbsolute(filePath)) {
|
||||
return `File path must be absolute: ${filePath}`;
|
||||
}
|
||||
if (!isWithinRoot(filePath, this.config.getTargetDir())) {
|
||||
return `File path must be within the root directory (${this.config.getTargetDir()}): ${filePath}`;
|
||||
|
||||
const workspaceContext = this.config.getWorkspaceContext();
|
||||
if (!workspaceContext.isPathWithinWorkspace(filePath)) {
|
||||
const directories = workspaceContext.getDirectories();
|
||||
return `File path must be within one of the workspace directories: ${directories.join(', ')}`;
|
||||
}
|
||||
|
||||
try {
|
||||
|
|
|
@ -6,6 +6,7 @@
|
|||
|
||||
import { describe, it, expect, beforeEach, vi } from 'vitest';
|
||||
import { Config } from '../config/config.js';
|
||||
import fs from 'node:fs';
|
||||
import {
|
||||
setSimulate429,
|
||||
disableSimulationAfterFallback,
|
||||
|
@ -17,10 +18,16 @@ import { DEFAULT_GEMINI_FLASH_MODEL } from '../config/models.js';
|
|||
import { retryWithBackoff } from './retry.js';
|
||||
import { AuthType } from '../core/contentGenerator.js';
|
||||
|
||||
vi.mock('node:fs');
|
||||
|
||||
describe('Flash Fallback Integration', () => {
|
||||
let config: Config;
|
||||
|
||||
beforeEach(() => {
|
||||
vi.mocked(fs.existsSync).mockReturnValue(true);
|
||||
vi.mocked(fs.statSync).mockReturnValue({
|
||||
isDirectory: () => true,
|
||||
} as fs.Stats);
|
||||
config = new Config({
|
||||
sessionId: 'test-session',
|
||||
targetDir: '/test',
|
||||
|
|
|
@ -0,0 +1,283 @@
|
|||
/**
|
||||
* @license
|
||||
* Copyright 2025 Google LLC
|
||||
* SPDX-License-Identifier: Apache-2.0
|
||||
*/
|
||||
|
||||
import { describe, it, expect, beforeEach, vi } from 'vitest';
|
||||
import * as fs from 'fs';
|
||||
import * as path from 'path';
|
||||
import { WorkspaceContext } from './workspaceContext.js';
|
||||
|
||||
vi.mock('fs');
|
||||
|
||||
describe('WorkspaceContext', () => {
|
||||
let workspaceContext: WorkspaceContext;
|
||||
// Use path module to create platform-agnostic paths
|
||||
const mockCwd = path.resolve(path.sep, 'home', 'user', 'project');
|
||||
const mockExistingDir = path.resolve(
|
||||
path.sep,
|
||||
'home',
|
||||
'user',
|
||||
'other-project',
|
||||
);
|
||||
const mockNonExistentDir = path.resolve(
|
||||
path.sep,
|
||||
'home',
|
||||
'user',
|
||||
'does-not-exist',
|
||||
);
|
||||
const mockSymlinkDir = path.resolve(path.sep, 'home', 'user', 'symlink');
|
||||
const mockRealPath = path.resolve(path.sep, 'home', 'user', 'real-directory');
|
||||
|
||||
beforeEach(() => {
|
||||
vi.resetAllMocks();
|
||||
|
||||
// Mock fs.existsSync
|
||||
vi.mocked(fs.existsSync).mockImplementation((path) => {
|
||||
const pathStr = path.toString();
|
||||
return (
|
||||
pathStr === mockCwd ||
|
||||
pathStr === mockExistingDir ||
|
||||
pathStr === mockSymlinkDir ||
|
||||
pathStr === mockRealPath
|
||||
);
|
||||
});
|
||||
|
||||
// Mock fs.statSync
|
||||
vi.mocked(fs.statSync).mockImplementation((path) => {
|
||||
const pathStr = path.toString();
|
||||
if (pathStr === mockNonExistentDir) {
|
||||
throw new Error('ENOENT');
|
||||
}
|
||||
return {
|
||||
isDirectory: () => true,
|
||||
} as fs.Stats;
|
||||
});
|
||||
|
||||
// Mock fs.realpathSync
|
||||
vi.mocked(fs.realpathSync).mockImplementation((path) => {
|
||||
const pathStr = path.toString();
|
||||
if (pathStr === mockSymlinkDir) {
|
||||
return mockRealPath;
|
||||
}
|
||||
return pathStr;
|
||||
});
|
||||
});
|
||||
|
||||
describe('initialization', () => {
|
||||
it('should initialize with a single directory (cwd)', () => {
|
||||
workspaceContext = new WorkspaceContext(mockCwd);
|
||||
const directories = workspaceContext.getDirectories();
|
||||
expect(directories).toHaveLength(1);
|
||||
expect(directories[0]).toBe(mockCwd);
|
||||
});
|
||||
|
||||
it('should validate and resolve directories to absolute paths', () => {
|
||||
const absolutePath = path.join(mockCwd, 'subdir');
|
||||
vi.mocked(fs.existsSync).mockImplementation(
|
||||
(p) => p === mockCwd || p === absolutePath,
|
||||
);
|
||||
vi.mocked(fs.realpathSync).mockImplementation((p) => p.toString());
|
||||
|
||||
workspaceContext = new WorkspaceContext(mockCwd, [absolutePath]);
|
||||
const directories = workspaceContext.getDirectories();
|
||||
expect(directories).toContain(absolutePath);
|
||||
});
|
||||
|
||||
it('should reject non-existent directories', () => {
|
||||
expect(() => {
|
||||
new WorkspaceContext(mockCwd, [mockNonExistentDir]);
|
||||
}).toThrow('Directory does not exist');
|
||||
});
|
||||
|
||||
it('should handle empty initialization', () => {
|
||||
workspaceContext = new WorkspaceContext(mockCwd, []);
|
||||
const directories = workspaceContext.getDirectories();
|
||||
expect(directories).toHaveLength(1);
|
||||
expect(directories[0]).toBe(mockCwd);
|
||||
});
|
||||
});
|
||||
|
||||
describe('adding directories', () => {
|
||||
beforeEach(() => {
|
||||
workspaceContext = new WorkspaceContext(mockCwd);
|
||||
});
|
||||
|
||||
it('should add valid directories', () => {
|
||||
workspaceContext.addDirectory(mockExistingDir);
|
||||
const directories = workspaceContext.getDirectories();
|
||||
expect(directories).toHaveLength(2);
|
||||
expect(directories).toContain(mockExistingDir);
|
||||
});
|
||||
|
||||
it('should resolve relative paths to absolute', () => {
|
||||
// Since we can't mock path.resolve, we'll test with absolute paths
|
||||
workspaceContext.addDirectory(mockExistingDir);
|
||||
const directories = workspaceContext.getDirectories();
|
||||
expect(directories).toContain(mockExistingDir);
|
||||
});
|
||||
|
||||
it('should reject non-existent directories', () => {
|
||||
expect(() => {
|
||||
workspaceContext.addDirectory(mockNonExistentDir);
|
||||
}).toThrow('Directory does not exist');
|
||||
});
|
||||
|
||||
it('should prevent duplicate directories', () => {
|
||||
workspaceContext.addDirectory(mockExistingDir);
|
||||
workspaceContext.addDirectory(mockExistingDir);
|
||||
const directories = workspaceContext.getDirectories();
|
||||
expect(directories.filter((d) => d === mockExistingDir)).toHaveLength(1);
|
||||
});
|
||||
|
||||
it('should handle symbolic links correctly', () => {
|
||||
workspaceContext.addDirectory(mockSymlinkDir);
|
||||
const directories = workspaceContext.getDirectories();
|
||||
expect(directories).toContain(mockRealPath);
|
||||
expect(directories).not.toContain(mockSymlinkDir);
|
||||
});
|
||||
});
|
||||
|
||||
describe('path validation', () => {
|
||||
beforeEach(() => {
|
||||
workspaceContext = new WorkspaceContext(mockCwd, [mockExistingDir]);
|
||||
});
|
||||
|
||||
it('should accept paths within workspace directories', () => {
|
||||
const validPath1 = path.join(mockCwd, 'src', 'file.ts');
|
||||
const validPath2 = path.join(mockExistingDir, 'lib', 'module.js');
|
||||
|
||||
expect(workspaceContext.isPathWithinWorkspace(validPath1)).toBe(true);
|
||||
expect(workspaceContext.isPathWithinWorkspace(validPath2)).toBe(true);
|
||||
});
|
||||
|
||||
it('should reject paths outside workspace', () => {
|
||||
const invalidPath = path.resolve(
|
||||
path.dirname(mockCwd),
|
||||
'outside-workspace',
|
||||
'file.txt',
|
||||
);
|
||||
expect(workspaceContext.isPathWithinWorkspace(invalidPath)).toBe(false);
|
||||
});
|
||||
|
||||
it('should resolve symbolic links before validation', () => {
|
||||
const symlinkPath = path.join(mockCwd, 'symlink-file');
|
||||
const realPath = path.join(mockCwd, 'real-file');
|
||||
|
||||
vi.mocked(fs.existsSync).mockReturnValue(true);
|
||||
vi.mocked(fs.realpathSync).mockImplementation((p) => {
|
||||
if (p === symlinkPath) {
|
||||
return realPath;
|
||||
}
|
||||
return p.toString();
|
||||
});
|
||||
|
||||
expect(workspaceContext.isPathWithinWorkspace(symlinkPath)).toBe(true);
|
||||
});
|
||||
|
||||
it('should handle nested directories correctly', () => {
|
||||
const nestedPath = path.join(
|
||||
mockCwd,
|
||||
'deeply',
|
||||
'nested',
|
||||
'path',
|
||||
'file.txt',
|
||||
);
|
||||
expect(workspaceContext.isPathWithinWorkspace(nestedPath)).toBe(true);
|
||||
});
|
||||
|
||||
it('should handle edge cases (root, parent references)', () => {
|
||||
const rootPath = '/';
|
||||
const parentPath = path.dirname(mockCwd);
|
||||
|
||||
expect(workspaceContext.isPathWithinWorkspace(rootPath)).toBe(false);
|
||||
expect(workspaceContext.isPathWithinWorkspace(parentPath)).toBe(false);
|
||||
});
|
||||
|
||||
it('should handle non-existent paths correctly', () => {
|
||||
const nonExistentPath = path.join(mockCwd, 'does-not-exist.txt');
|
||||
vi.mocked(fs.existsSync).mockImplementation((p) => p !== nonExistentPath);
|
||||
|
||||
// Should still validate based on path structure
|
||||
expect(workspaceContext.isPathWithinWorkspace(nonExistentPath)).toBe(
|
||||
true,
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
describe('getDirectories', () => {
|
||||
it('should return a copy of directories array', () => {
|
||||
workspaceContext = new WorkspaceContext(mockCwd);
|
||||
const dirs1 = workspaceContext.getDirectories();
|
||||
const dirs2 = workspaceContext.getDirectories();
|
||||
|
||||
expect(dirs1).not.toBe(dirs2); // Different array instances
|
||||
expect(dirs1).toEqual(dirs2); // Same content
|
||||
});
|
||||
});
|
||||
|
||||
describe('symbolic link security', () => {
|
||||
beforeEach(() => {
|
||||
workspaceContext = new WorkspaceContext(mockCwd);
|
||||
});
|
||||
|
||||
it('should follow symlinks but validate resolved path', () => {
|
||||
const symlinkInsideWorkspace = path.join(mockCwd, 'link-to-subdir');
|
||||
const resolvedInsideWorkspace = path.join(mockCwd, 'subdir');
|
||||
|
||||
vi.mocked(fs.existsSync).mockReturnValue(true);
|
||||
vi.mocked(fs.realpathSync).mockImplementation((p) => {
|
||||
if (p === symlinkInsideWorkspace) {
|
||||
return resolvedInsideWorkspace;
|
||||
}
|
||||
return p.toString();
|
||||
});
|
||||
|
||||
expect(
|
||||
workspaceContext.isPathWithinWorkspace(symlinkInsideWorkspace),
|
||||
).toBe(true);
|
||||
});
|
||||
|
||||
it('should prevent sandbox escape via symlinks', () => {
|
||||
const symlinkEscape = path.join(mockCwd, 'escape-link');
|
||||
const resolvedOutside = path.resolve(mockCwd, '..', 'outside-file');
|
||||
|
||||
vi.mocked(fs.existsSync).mockImplementation((p) => {
|
||||
const pathStr = p.toString();
|
||||
return (
|
||||
pathStr === symlinkEscape ||
|
||||
pathStr === resolvedOutside ||
|
||||
pathStr === mockCwd
|
||||
);
|
||||
});
|
||||
vi.mocked(fs.realpathSync).mockImplementation((p) => {
|
||||
if (p.toString() === symlinkEscape) {
|
||||
return resolvedOutside;
|
||||
}
|
||||
return p.toString();
|
||||
});
|
||||
vi.mocked(fs.statSync).mockImplementation(
|
||||
(p) =>
|
||||
({
|
||||
isDirectory: () => p.toString() !== resolvedOutside,
|
||||
}) as fs.Stats,
|
||||
);
|
||||
|
||||
workspaceContext = new WorkspaceContext(mockCwd);
|
||||
expect(workspaceContext.isPathWithinWorkspace(symlinkEscape)).toBe(false);
|
||||
});
|
||||
|
||||
it('should handle circular symlinks', () => {
|
||||
const circularLink = path.join(mockCwd, 'circular');
|
||||
|
||||
vi.mocked(fs.existsSync).mockReturnValue(true);
|
||||
vi.mocked(fs.realpathSync).mockImplementation(() => {
|
||||
throw new Error('ELOOP: too many symbolic links encountered');
|
||||
});
|
||||
|
||||
// Should handle the error gracefully
|
||||
expect(workspaceContext.isPathWithinWorkspace(circularLink)).toBe(false);
|
||||
});
|
||||
});
|
||||
});
|
|
@ -0,0 +1,127 @@
|
|||
/**
|
||||
* @license
|
||||
* Copyright 2025 Google LLC
|
||||
* SPDX-License-Identifier: Apache-2.0
|
||||
*/
|
||||
|
||||
import * as fs from 'fs';
|
||||
import * as path from 'path';
|
||||
|
||||
/**
|
||||
* WorkspaceContext manages multiple workspace directories and validates paths
|
||||
* against them. This allows the CLI to operate on files from multiple directories
|
||||
* in a single session.
|
||||
*/
|
||||
export class WorkspaceContext {
|
||||
private directories: Set<string>;
|
||||
|
||||
/**
|
||||
* Creates a new WorkspaceContext with the given initial directory and optional additional directories.
|
||||
* @param initialDirectory The initial working directory (usually cwd)
|
||||
* @param additionalDirectories Optional array of additional directories to include
|
||||
*/
|
||||
constructor(initialDirectory: string, additionalDirectories: string[] = []) {
|
||||
this.directories = new Set<string>();
|
||||
|
||||
this.addDirectoryInternal(initialDirectory);
|
||||
|
||||
for (const dir of additionalDirectories) {
|
||||
this.addDirectoryInternal(dir);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Adds a directory to the workspace.
|
||||
* @param directory The directory path to add (can be relative or absolute)
|
||||
* @param basePath Optional base path for resolving relative paths (defaults to cwd)
|
||||
*/
|
||||
addDirectory(directory: string, basePath: string = process.cwd()): void {
|
||||
this.addDirectoryInternal(directory, basePath);
|
||||
}
|
||||
|
||||
/**
|
||||
* Internal method to add a directory with validation.
|
||||
*/
|
||||
private addDirectoryInternal(
|
||||
directory: string,
|
||||
basePath: string = process.cwd(),
|
||||
): void {
|
||||
const absolutePath = path.isAbsolute(directory)
|
||||
? directory
|
||||
: path.resolve(basePath, directory);
|
||||
|
||||
if (!fs.existsSync(absolutePath)) {
|
||||
throw new Error(`Directory does not exist: ${absolutePath}`);
|
||||
}
|
||||
|
||||
const stats = fs.statSync(absolutePath);
|
||||
if (!stats.isDirectory()) {
|
||||
throw new Error(`Path is not a directory: ${absolutePath}`);
|
||||
}
|
||||
|
||||
let realPath: string;
|
||||
try {
|
||||
realPath = fs.realpathSync(absolutePath);
|
||||
} catch (_error) {
|
||||
throw new Error(`Failed to resolve path: ${absolutePath}`);
|
||||
}
|
||||
|
||||
this.directories.add(realPath);
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets a copy of all workspace directories.
|
||||
* @returns Array of absolute directory paths
|
||||
*/
|
||||
getDirectories(): readonly string[] {
|
||||
return Array.from(this.directories);
|
||||
}
|
||||
|
||||
/**
|
||||
* Checks if a given path is within any of the workspace directories.
|
||||
* @param pathToCheck The path to validate
|
||||
* @returns True if the path is within the workspace, false otherwise
|
||||
*/
|
||||
isPathWithinWorkspace(pathToCheck: string): boolean {
|
||||
try {
|
||||
const absolutePath = path.resolve(pathToCheck);
|
||||
|
||||
let resolvedPath = absolutePath;
|
||||
if (fs.existsSync(absolutePath)) {
|
||||
try {
|
||||
resolvedPath = fs.realpathSync(absolutePath);
|
||||
} catch (_error) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
for (const dir of this.directories) {
|
||||
if (this.isPathWithinRoot(resolvedPath, dir)) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
||||
return false;
|
||||
} catch (_error) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Checks if a path is within a given root directory.
|
||||
* @param pathToCheck The absolute path to check
|
||||
* @param rootDirectory The absolute root directory
|
||||
* @returns True if the path is within the root directory, false otherwise
|
||||
*/
|
||||
private isPathWithinRoot(
|
||||
pathToCheck: string,
|
||||
rootDirectory: string,
|
||||
): boolean {
|
||||
const relative = path.relative(rootDirectory, pathToCheck);
|
||||
return (
|
||||
!relative.startsWith(`..${path.sep}`) &&
|
||||
relative !== '..' &&
|
||||
!path.isAbsolute(relative)
|
||||
);
|
||||
}
|
||||
}
|
Loading…
Reference in New Issue