Fix: Prevent hang in large directories by using BFS for getFolderStru… (#470)

Co-authored-by: N. Taylor Mullen <ntaylormullen@google.com>
This commit is contained in:
Allen Hutchison 2025-05-22 10:47:21 -07:00 committed by GitHub
parent 7eaf850489
commit 0c192555bb
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
5 changed files with 500 additions and 252 deletions

View File

@ -130,8 +130,22 @@ async function collectDownwardGeminiFiles(
directory: string, directory: string,
debugMode: boolean, debugMode: boolean,
ignoreDirs: string[], ignoreDirs: string[],
scannedDirCount: { count: number },
maxScanDirs: number,
): Promise<string[]> { ): Promise<string[]> {
if (debugMode) logger.debug(`Recursively scanning downward in: ${directory}`); if (scannedDirCount.count >= maxScanDirs) {
if (debugMode)
logger.debug(
`Max directory scan limit (${maxScanDirs}) reached. Stopping downward scan at: ${directory}`,
);
return [];
}
scannedDirCount.count++;
if (debugMode)
logger.debug(
`Scanning downward for ${GEMINI_MD_FILENAME} files in: ${directory} (scanned: ${scannedDirCount.count}/${maxScanDirs})`,
);
const collectedPaths: string[] = []; const collectedPaths: string[] = [];
try { try {
const entries = await fs.readdir(directory, { withFileTypes: true }); const entries = await fs.readdir(directory, { withFileTypes: true });
@ -147,6 +161,8 @@ async function collectDownwardGeminiFiles(
fullPath, fullPath,
debugMode, debugMode,
ignoreDirs, ignoreDirs,
scannedDirCount,
maxScanDirs,
); );
collectedPaths.push(...subDirPaths); collectedPaths.push(...subDirPaths);
} else if (entry.isFile() && entry.name === GEMINI_MD_FILENAME) { } else if (entry.isFile() && entry.name === GEMINI_MD_FILENAME) {
@ -154,11 +170,13 @@ async function collectDownwardGeminiFiles(
await fs.access(fullPath, fsSync.constants.R_OK); await fs.access(fullPath, fsSync.constants.R_OK);
collectedPaths.push(fullPath); collectedPaths.push(fullPath);
if (debugMode) if (debugMode)
logger.debug(`Found readable downward GEMINI.md: ${fullPath}`); logger.debug(
`Found readable downward ${GEMINI_MD_FILENAME}: ${fullPath}`,
);
} catch { } catch {
if (debugMode) if (debugMode)
logger.debug( logger.debug(
`Downward GEMINI.md not readable, skipping: ${fullPath}`, `Downward ${GEMINI_MD_FILENAME} not readable, skipping: ${fullPath}`,
); );
} }
} }
@ -186,18 +204,22 @@ export async function getGeminiMdFilePaths(
const paths: string[] = []; const paths: string[] = [];
if (debugMode) if (debugMode)
logger.debug(`Searching for GEMINI.md starting from CWD: ${resolvedCwd}`); logger.debug(
`Searching for ${GEMINI_MD_FILENAME} starting from CWD: ${resolvedCwd}`,
);
if (debugMode) logger.debug(`User home directory: ${resolvedHome}`); if (debugMode) logger.debug(`User home directory: ${resolvedHome}`);
try { try {
await fs.access(globalMemoryPath, fsSync.constants.R_OK); await fs.access(globalMemoryPath, fsSync.constants.R_OK);
paths.push(globalMemoryPath); paths.push(globalMemoryPath);
if (debugMode) if (debugMode)
logger.debug(`Found readable global GEMINI.md: ${globalMemoryPath}`); logger.debug(
`Found readable global ${GEMINI_MD_FILENAME}: ${globalMemoryPath}`,
);
} catch { } catch {
if (debugMode) if (debugMode)
logger.debug( logger.debug(
`Global GEMINI.md not found or not readable: ${globalMemoryPath}`, `Global ${GEMINI_MD_FILENAME} not found or not readable: ${globalMemoryPath}`,
); );
} }
@ -215,7 +237,9 @@ export async function getGeminiMdFilePaths(
currentDir !== path.dirname(currentDir) currentDir !== path.dirname(currentDir)
) { ) {
if (debugMode) if (debugMode)
logger.debug(`Checking for GEMINI.md in (upward scan): ${currentDir}`); logger.debug(
`Checking for ${GEMINI_MD_FILENAME} in (upward scan): ${currentDir}`,
);
if (currentDir === path.join(resolvedHome, GEMINI_CONFIG_DIR)) { if (currentDir === path.join(resolvedHome, GEMINI_CONFIG_DIR)) {
if (debugMode) if (debugMode)
logger.debug(`Skipping check inside global config dir: ${currentDir}`); logger.debug(`Skipping check inside global config dir: ${currentDir}`);
@ -226,11 +250,13 @@ export async function getGeminiMdFilePaths(
await fs.access(potentialPath, fsSync.constants.R_OK); await fs.access(potentialPath, fsSync.constants.R_OK);
upwardPaths.unshift(potentialPath); upwardPaths.unshift(potentialPath);
if (debugMode) if (debugMode)
logger.debug(`Found readable upward GEMINI.md: ${potentialPath}`); logger.debug(
`Found readable upward ${GEMINI_MD_FILENAME}: ${potentialPath}`,
);
} catch { } catch {
if (debugMode) if (debugMode)
logger.debug( logger.debug(
`Upward GEMINI.md not found or not readable in: ${currentDir}`, `Upward ${GEMINI_MD_FILENAME} not found or not readable in: ${currentDir}`,
); );
} }
const parentDir = path.dirname(currentDir); const parentDir = path.dirname(currentDir);
@ -245,15 +271,19 @@ export async function getGeminiMdFilePaths(
if (debugMode) if (debugMode)
logger.debug(`Starting downward scan from CWD: ${resolvedCwd}`); logger.debug(`Starting downward scan from CWD: ${resolvedCwd}`);
const MAX_DIRECTORIES_TO_SCAN_FOR_MEMORY = 200; // Define the cap
const scannedDirCount = { count: 0 };
const downwardPaths = await collectDownwardGeminiFiles( const downwardPaths = await collectDownwardGeminiFiles(
resolvedCwd, resolvedCwd,
debugMode, debugMode,
DEFAULT_IGNORE_DIRECTORIES, DEFAULT_IGNORE_DIRECTORIES,
scannedDirCount,
MAX_DIRECTORIES_TO_SCAN_FOR_MEMORY,
); );
downwardPaths.sort(); downwardPaths.sort();
if (debugMode && downwardPaths.length > 0) if (debugMode && downwardPaths.length > 0)
logger.debug( logger.debug(
`Found downward GEMINI.md files (sorted): ${JSON.stringify(downwardPaths)}`, `Found downward ${GEMINI_MD_FILENAME} files (sorted): ${JSON.stringify(downwardPaths)}`,
); );
for (const dPath of downwardPaths) { for (const dPath of downwardPaths) {
if (!paths.includes(dPath)) { if (!paths.includes(dPath)) {
@ -263,7 +293,7 @@ export async function getGeminiMdFilePaths(
if (debugMode) if (debugMode)
logger.debug( logger.debug(
`Final ordered GEMINI.md paths to read: ${JSON.stringify(paths)}`, `Final ordered ${GEMINI_MD_FILENAME} paths to read: ${JSON.stringify(paths)}`,
); );
return paths; return paths;
} }
@ -289,7 +319,7 @@ async function readGeminiMdFiles(
} catch (error: unknown) { } catch (error: unknown) {
const message = error instanceof Error ? error.message : String(error); const message = error instanceof Error ? error.message : String(error);
logger.warn( logger.warn(
`Warning: Could not read GEMINI.md file at ${filePath}. Error: ${message}`, `Warning: Could not read ${GEMINI_MD_FILENAME} file at ${filePath}. Error: ${message}`,
); );
results.push({ filePath, content: null }); results.push({ filePath, content: null });
if (debugMode) logger.debug(`Failed to read: ${filePath}`); if (debugMode) logger.debug(`Failed to read: ${filePath}`);

View File

@ -0,0 +1,278 @@
/**
* @license
* Copyright 2025 Google LLC
* SPDX-License-Identifier: Apache-2.0
*/
import { describe, it, expect, vi, beforeEach, afterEach, Mock } from 'vitest';
import fsPromises from 'fs/promises';
import { Dirent as FSDirent } from 'fs';
import * as nodePath from 'path';
import { getFolderStructure } from './getFolderStructure.js';
vi.mock('path', async (importOriginal) => {
const original = (await importOriginal()) as typeof nodePath;
return {
...original,
resolve: vi.fn((str) => str),
// Other path functions (basename, join, normalize, etc.) will use original implementation
};
});
vi.mock('fs/promises');
// Import 'path' again here, it will be the mocked version
import * as path from 'path';
// Helper to create Dirent-like objects for mocking fs.readdir
const createDirent = (name: string, type: 'file' | 'dir'): FSDirent => ({
name,
isFile: () => type === 'file',
isDirectory: () => type === 'dir',
isBlockDevice: () => false,
isCharacterDevice: () => false,
isSymbolicLink: () => false,
isFIFO: () => false,
isSocket: () => false,
parentPath: '',
path: '',
});
describe('getFolderStructure', () => {
beforeEach(() => {
vi.resetAllMocks();
// path.resolve is now a vi.fn() due to the top-level vi.mock.
// We ensure its implementation is set for each test (or rely on the one from vi.mock).
// vi.resetAllMocks() clears call history but not the implementation set by vi.fn() in vi.mock.
// If we needed to change it per test, we would do it here:
(path.resolve as Mock).mockImplementation((str: string) => str);
// Re-apply/define the mock implementation for fsPromises.readdir for each test
(fsPromises.readdir as Mock).mockImplementation(
async (dirPath: string | Buffer | URL) => {
// path.normalize here will use the mocked path module.
// Since normalize is spread from original, it should be the real one.
const normalizedPath = path.normalize(dirPath.toString());
if (mockFsStructure[normalizedPath]) {
return mockFsStructure[normalizedPath];
}
throw Object.assign(
new Error(
`ENOENT: no such file or directory, scandir '${normalizedPath}'`,
),
{ code: 'ENOENT' },
);
},
);
});
afterEach(() => {
vi.restoreAllMocks(); // Restores spies (like fsPromises.readdir) and resets vi.fn mocks (like path.resolve)
});
const mockFsStructure: Record<string, FSDirent[]> = {
'/testroot': [
createDirent('file1.txt', 'file'),
createDirent('subfolderA', 'dir'),
createDirent('emptyFolder', 'dir'),
createDirent('.hiddenfile', 'file'),
createDirent('node_modules', 'dir'),
],
'/testroot/subfolderA': [
createDirent('fileA1.ts', 'file'),
createDirent('fileA2.js', 'file'),
createDirent('subfolderB', 'dir'),
],
'/testroot/subfolderA/subfolderB': [createDirent('fileB1.md', 'file')],
'/testroot/emptyFolder': [],
'/testroot/node_modules': [createDirent('somepackage', 'dir')],
'/testroot/manyFilesFolder': Array.from({ length: 10 }, (_, i) =>
createDirent(`file-${i}.txt`, 'file'),
),
'/testroot/manyFolders': Array.from({ length: 5 }, (_, i) =>
createDirent(`folder-${i}`, 'dir'),
),
...Array.from({ length: 5 }, (_, i) => ({
[`/testroot/manyFolders/folder-${i}`]: [
createDirent('child.txt', 'file'),
],
})).reduce((acc, val) => ({ ...acc, ...val }), {}),
'/testroot/deepFolders': [createDirent('level1', 'dir')],
'/testroot/deepFolders/level1': [createDirent('level2', 'dir')],
'/testroot/deepFolders/level1/level2': [createDirent('level3', 'dir')],
'/testroot/deepFolders/level1/level2/level3': [
createDirent('file.txt', 'file'),
],
};
it('should return basic folder structure', async () => {
const structure = await getFolderStructure('/testroot/subfolderA');
const expected = `
Showing up to 200 items (files + folders).
/testroot/subfolderA/
fileA1.ts
fileA2.js
subfolderB/
fileB1.md
`.trim();
expect(structure.trim()).toBe(expected);
});
it('should handle an empty folder', async () => {
const structure = await getFolderStructure('/testroot/emptyFolder');
const expected = `
Showing up to 200 items (files + folders).
/testroot/emptyFolder/
`.trim();
expect(structure.trim()).toBe(expected.trim());
});
it('should ignore folders specified in ignoredFolders (default)', async () => {
const structure = await getFolderStructure('/testroot');
const expected = `
Showing up to 200 items (files + folders). Folders or files indicated with ... contain more items not shown, were ignored, or the display limit (200 items) was reached.
/testroot/
.hiddenfile
file1.txt
emptyFolder/
node_modules/...
subfolderA/
fileA1.ts
fileA2.js
subfolderB/
fileB1.md
`.trim();
expect(structure.trim()).toBe(expected);
});
it('should ignore folders specified in custom ignoredFolders', async () => {
const structure = await getFolderStructure('/testroot', {
ignoredFolders: new Set(['subfolderA', 'node_modules']),
});
const expected = `
Showing up to 200 items (files + folders). Folders or files indicated with ... contain more items not shown, were ignored, or the display limit (200 items) was reached.
/testroot/
.hiddenfile
file1.txt
emptyFolder/
node_modules/...
subfolderA/...
`.trim();
expect(structure.trim()).toBe(expected);
});
it('should filter files by fileIncludePattern', async () => {
const structure = await getFolderStructure('/testroot/subfolderA', {
fileIncludePattern: /\.ts$/,
});
const expected = `
Showing up to 200 items (files + folders).
/testroot/subfolderA/
fileA1.ts
subfolderB/
`.trim();
expect(structure.trim()).toBe(expected);
});
it('should handle maxItems truncation for files within a folder', async () => {
const structure = await getFolderStructure('/testroot/subfolderA', {
maxItems: 3,
});
const expected = `
Showing up to 3 items (files + folders).
/testroot/subfolderA/
fileA1.ts
fileA2.js
subfolderB/
`.trim();
expect(structure.trim()).toBe(expected);
});
it('should handle maxItems truncation for subfolders', async () => {
const structure = await getFolderStructure('/testroot/manyFolders', {
maxItems: 4,
});
const expectedRevised = `
Showing up to 4 items (files + folders). Folders or files indicated with ... contain more items not shown, were ignored, or the display limit (4 items) was reached.
/testroot/manyFolders/
folder-0/
folder-1/
folder-2/
folder-3/
...
`.trim();
expect(structure.trim()).toBe(expectedRevised);
});
it('should handle maxItems that only allows the root folder itself', async () => {
const structure = await getFolderStructure('/testroot/subfolderA', {
maxItems: 1,
});
const expectedRevisedMax1 = `
Showing up to 1 items (files + folders). Folders or files indicated with ... contain more items not shown, were ignored, or the display limit (1 items) was reached.
/testroot/subfolderA/
fileA1.ts
...
...
`.trim();
expect(structure.trim()).toBe(expectedRevisedMax1);
});
it('should handle non-existent directory', async () => {
// Temporarily make fsPromises.readdir throw ENOENT for this specific path
const originalReaddir = fsPromises.readdir;
(fsPromises.readdir as Mock).mockImplementation(
async (p: string | Buffer | URL) => {
if (p === '/nonexistent') {
throw Object.assign(new Error('ENOENT'), { code: 'ENOENT' });
}
return originalReaddir(p);
},
);
const structure = await getFolderStructure('/nonexistent');
expect(structure).toContain(
'Error: Could not read directory "/nonexistent"',
);
});
it('should handle deep folder structure within limits', async () => {
const structure = await getFolderStructure('/testroot/deepFolders', {
maxItems: 10,
});
const expected = `
Showing up to 10 items (files + folders).
/testroot/deepFolders/
level1/
level2/
level3/
file.txt
`.trim();
expect(structure.trim()).toBe(expected);
});
it('should truncate deep folder structure if maxItems is small', async () => {
const structure = await getFolderStructure('/testroot/deepFolders', {
maxItems: 3,
});
const expected = `
Showing up to 3 items (files + folders).
/testroot/deepFolders/
level1/
level2/
level3/
`.trim();
expect(structure.trim()).toBe(expected);
});
});

View File

@ -5,6 +5,7 @@
*/ */
import * as fs from 'fs/promises'; import * as fs from 'fs/promises';
import { Dirent } from 'fs';
import * as path from 'path'; import * as path from 'path';
import { getErrorMessage, isNodeError } from './errors.js'; import { getErrorMessage, isNodeError } from './errors.js';
@ -37,286 +38,220 @@ interface FullFolderInfo {
path: string; path: string;
files: string[]; files: string[];
subFolders: FullFolderInfo[]; subFolders: FullFolderInfo[];
totalChildren: number; // Total files + subfolders recursively totalChildren: number; // Number of files and subfolders included from this folder during BFS scan
totalFiles: number; // Total files recursively totalFiles: number; // Number of files included from this folder during BFS scan
isIgnored?: boolean; // Flag to easily identify ignored folders later isIgnored?: boolean; // Flag to easily identify ignored folders later
}
/** Represents the potentially truncated structure used for display. */
interface ReducedFolderNode {
name: string; // Folder name
isRoot?: boolean;
files: string[]; // File names, might end with '...'
subFolders: ReducedFolderNode[]; // Subfolders, might be truncated
hasMoreFiles?: boolean; // Indicates if files were truncated for this specific folder hasMoreFiles?: boolean; // Indicates if files were truncated for this specific folder
hasMoreSubfolders?: boolean; // Indicates if subfolders were truncated for this specific folder hasMoreSubfolders?: boolean; // Indicates if subfolders were truncated for this specific folder
} }
// --- Interfaces ---
// --- Helper Functions --- // --- Helper Functions ---
/**
* Recursively reads the full directory structure without truncation.
* Ignored folders are included but not recursed into.
* @param folderPath The absolute path to the folder.
* @param options Configuration options.
* @returns A promise resolving to the FullFolderInfo or null if access denied/not found.
*/
async function readFullStructure( async function readFullStructure(
folderPath: string, rootPath: string,
options: MergedFolderStructureOptions, options: MergedFolderStructureOptions,
): Promise<FullFolderInfo | null> { ): Promise<FullFolderInfo | null> {
const name = path.basename(folderPath); const rootName = path.basename(rootPath);
const folderInfo: Omit<FullFolderInfo, 'totalChildren' | 'totalFiles'> = { const rootNode: FullFolderInfo = {
name, name: rootName,
path: folderPath, path: rootPath,
files: [], files: [],
subFolders: [], subFolders: [],
isIgnored: false, totalChildren: 0,
totalFiles: 0,
}; };
let totalChildrenCount = 0; const queue: Array<{ folderInfo: FullFolderInfo; currentPath: string }> = [
let totalFileCount = 0; { folderInfo: rootNode, currentPath: rootPath },
];
let currentItemCount = 0;
// Count the root node itself as one item if we are not just listing its content
try { const processedPaths = new Set<string>(); // To avoid processing same path if symlinks create loops
const entries = await fs.readdir(folderPath, { withFileTypes: true });
// Process directories first while (queue.length > 0) {
for (const entry of entries) { const { folderInfo, currentPath } = queue.shift()!;
if (entry.isDirectory()) {
const subFolderName = entry.name;
const subFolderPath = path.join(folderPath, subFolderName);
// Check if the folder should be ignored if (processedPaths.has(currentPath)) {
if (options.ignoredFolders.has(subFolderName)) { continue;
// Add ignored folder node but don't recurse }
const ignoredFolderInfo: FullFolderInfo = { processedPaths.add(currentPath);
name: subFolderName,
path: subFolderPath,
files: [],
subFolders: [],
totalChildren: 0, // No children explored
totalFiles: 0, // No files explored
isIgnored: true,
};
folderInfo.subFolders.push(ignoredFolderInfo);
// Skip recursion for this folder
continue;
}
// If not ignored, recurse as before if (currentItemCount >= options.maxItems) {
const subFolderInfo = await readFullStructure(subFolderPath, options); // If the root itself caused us to exceed, we can't really show anything.
// Add non-empty folders OR explicitly ignored folders // Otherwise, this folder won't be processed further.
if ( // The parent that queued this would have set its own hasMoreSubfolders flag.
subFolderInfo && continue;
(subFolderInfo.totalChildren > 0 ||
subFolderInfo.files.length > 0 ||
subFolderInfo.isIgnored)
) {
folderInfo.subFolders.push(subFolderInfo);
}
}
} }
// Then process files (only if the current folder itself isn't marked as ignored) let entries: Dirent[];
try {
const rawEntries = await fs.readdir(currentPath, { withFileTypes: true });
// Sort entries alphabetically by name for consistent processing order
entries = rawEntries.sort((a, b) => a.name.localeCompare(b.name));
} catch (error: unknown) {
if (
isNodeError(error) &&
(error.code === 'EACCES' || error.code === 'ENOENT')
) {
console.warn(
`Warning: Could not read directory ${currentPath}: ${error.message}`,
);
if (currentPath === rootPath && error.code === 'ENOENT') {
return null; // Root directory itself not found
}
// For other EACCES/ENOENT on subdirectories, just skip them.
continue;
}
throw error;
}
const filesInCurrentDir: string[] = [];
const subFoldersInCurrentDir: FullFolderInfo[] = [];
// Process files first in the current directory
for (const entry of entries) { for (const entry of entries) {
if (entry.isFile()) { if (entry.isFile()) {
if (currentItemCount >= options.maxItems) {
folderInfo.hasMoreFiles = true;
break;
}
const fileName = entry.name; const fileName = entry.name;
if ( if (
!options.fileIncludePattern || !options.fileIncludePattern ||
options.fileIncludePattern.test(fileName) options.fileIncludePattern.test(fileName)
) { ) {
folderInfo.files.push(fileName); filesInCurrentDir.push(fileName);
currentItemCount++;
folderInfo.totalFiles++;
folderInfo.totalChildren++;
} }
} }
} }
folderInfo.files = filesInCurrentDir;
// Calculate totals *after* processing children // Then process directories and queue them
// Ignored folders contribute 0 to counts here because we didn't look inside. for (const entry of entries) {
totalFileCount = if (entry.isDirectory()) {
folderInfo.files.length + // Check if adding this directory ITSELF would meet or exceed maxItems
folderInfo.subFolders.reduce((sum, sf) => sum + sf.totalFiles, 0); // (currentItemCount refers to items *already* added before this one)
// Count the ignored folder itself as one child item in the parent's count. if (currentItemCount >= options.maxItems) {
totalChildrenCount = folderInfo.hasMoreSubfolders = true;
folderInfo.files.length + break; // Already at limit, cannot add this folder or any more
folderInfo.subFolders.length +
folderInfo.subFolders.reduce((sum, sf) => sum + sf.totalChildren, 0);
} catch (error: unknown) {
if (
isNodeError(error) &&
(error.code === 'EACCES' || error.code === 'ENOENT')
) {
console.warn(
`Warning: Could not read directory ${folderPath}: ${error.message}`,
);
return null;
}
throw error;
}
return {
...folderInfo,
totalChildren: totalChildrenCount,
totalFiles: totalFileCount,
};
}
/**
* Reduces the full folder structure based on the maxItems limit using BFS.
* Handles explicitly ignored folders by showing them with a truncation indicator.
* @param fullInfo The complete folder structure info.
* @param maxItems The maximum number of items (files + folders) to include.
* @param ignoredFolders The set of folder names that were ignored during the read phase.
* @returns The root node of the reduced structure.
*/
function reduceStructure(
fullInfo: FullFolderInfo,
maxItems: number,
): ReducedFolderNode {
const rootReducedNode: ReducedFolderNode = {
name: fullInfo.name,
files: [],
subFolders: [],
isRoot: true,
};
const queue: Array<{
original: FullFolderInfo;
reduced: ReducedFolderNode;
}> = [];
// Don't count the root itself towards the limit initially
queue.push({ original: fullInfo, reduced: rootReducedNode });
let itemCount = 0; // Count folders + files added to the reduced structure
while (queue.length > 0) {
const { original: originalFolder, reduced: reducedFolder } = queue.shift()!;
// If the folder being processed was itself marked as ignored (shouldn't happen for root)
if (originalFolder.isIgnored) {
continue;
}
// Process Files
let fileLimitReached = false;
for (const file of originalFolder.files) {
// Check limit *before* adding the file
if (itemCount >= maxItems) {
if (!fileLimitReached) {
reducedFolder.files.push(TRUNCATION_INDICATOR);
reducedFolder.hasMoreFiles = true;
fileLimitReached = true;
} }
break; // If adding THIS folder makes us hit the limit exactly, and it might have children,
} // it's better to show '...' for the parent, unless this is the very last item slot.
reducedFolder.files.push(file); // This logic is tricky. Let's try a simpler: if we can't add this item, mark and break.
itemCount++;
}
// Process Subfolders const subFolderName = entry.name;
let subfolderLimitReached = false; const subFolderPath = path.join(currentPath, subFolderName);
for (const subFolder of originalFolder.subFolders) {
// Count the folder itself towards the limit if (options.ignoredFolders.has(subFolderName)) {
itemCount++; const ignoredSubFolder: FullFolderInfo = {
if (itemCount > maxItems) { name: subFolderName,
if (!subfolderLimitReached) { path: subFolderPath,
// Add a placeholder node ONLY if we haven't already added one files: [],
const truncatedSubfolderNode: ReducedFolderNode = {
name: subFolder.name,
files: [TRUNCATION_INDICATOR], // Generic truncation
subFolders: [], subFolders: [],
hasMoreFiles: true, totalChildren: 0,
totalFiles: 0,
isIgnored: true,
}; };
reducedFolder.subFolders.push(truncatedSubfolderNode); subFoldersInCurrentDir.push(ignoredSubFolder);
reducedFolder.hasMoreSubfolders = true; currentItemCount++; // Count the ignored folder itself
subfolderLimitReached = true; folderInfo.totalChildren++; // Also counts towards parent's children
continue;
} }
continue; // Stop processing further subfolders for this parent
}
// Handle explicitly ignored folders identified during the read phase const subFolderNode: FullFolderInfo = {
if (subFolder.isIgnored) { name: subFolderName,
const ignoredReducedNode: ReducedFolderNode = { path: subFolderPath,
name: subFolder.name,
files: [TRUNCATION_INDICATOR], // Indicate contents ignored/truncated
subFolders: [],
hasMoreFiles: true, // Mark as truncated
};
reducedFolder.subFolders.push(ignoredReducedNode);
// DO NOT add the ignored folder to the queue for further processing
} else {
// If not ignored and within limit, create the reduced node and add to queue
const reducedSubFolder: ReducedFolderNode = {
name: subFolder.name,
files: [], files: [],
subFolders: [], subFolders: [],
totalChildren: 0,
totalFiles: 0,
}; };
reducedFolder.subFolders.push(reducedSubFolder); subFoldersInCurrentDir.push(subFolderNode);
queue.push({ original: subFolder, reduced: reducedSubFolder }); currentItemCount++;
folderInfo.totalChildren++; // Counts towards parent's children
// Add to queue for processing its children later
queue.push({ folderInfo: subFolderNode, currentPath: subFolderPath });
} }
} }
folderInfo.subFolders = subFoldersInCurrentDir;
} }
return rootReducedNode; return rootNode;
}
/** Calculates the total number of items present in the reduced structure. */
function countReducedItems(node: ReducedFolderNode): number {
let count = 0;
// Count files, treating '...' as one item if present
count += node.files.length;
// Count subfolders and recursively count their contents
count += node.subFolders.length;
for (const sub of node.subFolders) {
// Check if it's a placeholder ignored/truncated node
const isTruncatedPlaceholder =
sub.files.length === 1 &&
sub.files[0] === TRUNCATION_INDICATOR &&
sub.subFolders.length === 0;
if (!isTruncatedPlaceholder) {
count += countReducedItems(sub);
}
// Don't add count for items *inside* the placeholder node itself.
}
return count;
} }
/** /**
* Formats the reduced folder structure into a tree-like string. * Reads the directory structure using BFS, respecting maxItems.
* @param node The current node in the reduced structure. * @param node The current node in the reduced structure.
* @param indent The current indentation string. * @param indent The current indentation string.
* @param isLast Sibling indicator. * @param isLast Sibling indicator.
* @param builder Array to build the string lines. * @param builder Array to build the string lines.
*/ */
function formatReducedStructure( function formatStructure(
node: ReducedFolderNode, node: FullFolderInfo,
indent: string, currentIndent: string,
isLast: boolean, isLastChildOfParent: boolean,
isProcessingRootNode: boolean,
builder: string[], builder: string[],
): void { ): void {
const connector = isLast ? '└───' : '├───'; const connector = isLastChildOfParent ? '└───' : '├───';
const linePrefix = indent + connector;
// Don't print the root node's name directly, only its contents // The root node of the structure (the one passed initially to getFolderStructure)
if (!node.isRoot) { // is not printed with a connector line itself, only its name as a header.
builder.push(`${linePrefix}${node.name}/`); // Its children are printed relative to that conceptual root.
// Ignored root nodes ARE printed with a connector.
if (!isProcessingRootNode || node.isIgnored) {
builder.push(
`${currentIndent}${connector}${node.name}/${node.isIgnored ? TRUNCATION_INDICATOR : ''}`,
);
} }
const childIndent = indent + (isLast || node.isRoot ? ' ' : '│ '); // Use " " if last, "│" otherwise // Determine the indent for the children of *this* node.
// If *this* node was the root of the whole structure, its children start with no indent before their connectors.
// Otherwise, children's indent extends from the current node's indent.
const indentForChildren = isProcessingRootNode
? ''
: currentIndent + (isLastChildOfParent ? ' ' : '│ ');
// Render files // Render files of the current node
const fileCount = node.files.length; const fileCount = node.files.length;
for (let i = 0; i < fileCount; i++) { for (let i = 0; i < fileCount; i++) {
const isLastFile = i === fileCount - 1 && node.subFolders.length === 0; const isLastFileAmongSiblings =
const fileConnector = isLastFile ? '└───' : '├───'; i === fileCount - 1 &&
builder.push(`${childIndent}${fileConnector}${node.files[i]}`); node.subFolders.length === 0 &&
!node.hasMoreSubfolders;
const fileConnector = isLastFileAmongSiblings ? '└───' : '├───';
builder.push(`${indentForChildren}${fileConnector}${node.files[i]}`);
}
if (node.hasMoreFiles) {
const isLastIndicatorAmongSiblings =
node.subFolders.length === 0 && !node.hasMoreSubfolders;
const fileConnector = isLastIndicatorAmongSiblings ? '└───' : '├───';
builder.push(`${indentForChildren}${fileConnector}${TRUNCATION_INDICATOR}`);
} }
// Render subfolders // Render subfolders of the current node
const subFolderCount = node.subFolders.length; const subFolderCount = node.subFolders.length;
for (let i = 0; i < subFolderCount; i++) { for (let i = 0; i < subFolderCount; i++) {
const isLastSub = i === subFolderCount - 1; const isLastSubfolderAmongSiblings =
formatReducedStructure(node.subFolders[i], childIndent, isLastSub, builder); i === subFolderCount - 1 && !node.hasMoreSubfolders;
// Children are never the root node being processed initially.
formatStructure(
node.subFolders[i],
indentForChildren,
isLastSubfolderAmongSiblings,
false,
builder,
);
}
if (node.hasMoreSubfolders) {
builder.push(`${indentForChildren}└───${TRUNCATION_INDICATOR}`);
} }
} }
@ -343,40 +278,44 @@ export async function getFolderStructure(
}; };
try { try {
// 1. Read the full structure (includes ignored folders marked as such) // 1. Read the structure using BFS, respecting maxItems
const fullInfo = await readFullStructure(resolvedPath, mergedOptions); const structureRoot = await readFullStructure(resolvedPath, mergedOptions);
if (!fullInfo) { if (!structureRoot) {
return `Error: Could not read directory "${resolvedPath}". Check path and permissions.`; return `Error: Could not read directory "${resolvedPath}". Check path and permissions.`;
} }
// 2. Reduce the structure (handles ignored folders specifically) // 2. Format the structure into a string
const reducedRoot = reduceStructure(fullInfo, mergedOptions.maxItems);
// 3. Count items in the *reduced* structure for the summary
const rootNodeItselfCount = 0; // Don't count the root node in the items summary
const reducedItemCount =
countReducedItems(reducedRoot) - rootNodeItselfCount;
// 4. Format the reduced structure into a string
const structureLines: string[] = []; const structureLines: string[] = [];
formatReducedStructure(reducedRoot, '', true, structureLines); // Pass true for isRoot for the initial call
formatStructure(structureRoot, '', true, true, structureLines);
// 5. Build the final output string // 3. Build the final output string
const displayPath = resolvedPath.replace(/\\/g, '/'); const displayPath = resolvedPath.replace(/\\/g, '/');
const totalOriginalChildren = fullInfo.totalChildren;
let disclaimer = ''; let disclaimer = '';
// Check if any truncation happened OR if ignored folders were present // Check if truncation occurred anywhere or if ignored folders are present.
if ( // A simple check: if any node indicates more files/subfolders, or is ignored.
reducedItemCount < totalOriginalChildren || let truncationOccurred = false;
fullInfo.subFolders.some((sf) => sf.isIgnored) function checkForTruncation(node: FullFolderInfo) {
) { if (node.hasMoreFiles || node.hasMoreSubfolders || node.isIgnored) {
disclaimer = `Folders or files indicated with ${TRUNCATION_INDICATOR} contain more items not shown or were ignored.`; truncationOccurred = true;
}
if (!truncationOccurred) {
for (const sub of node.subFolders) {
checkForTruncation(sub);
if (truncationOccurred) break;
}
}
}
checkForTruncation(structureRoot);
if (truncationOccurred) {
disclaimer = `Folders or files indicated with ${TRUNCATION_INDICATOR} contain more items not shown, were ignored, or the display limit (${mergedOptions.maxItems} items) was reached.`;
} }
const summary = const summary =
`Showing ${reducedItemCount} of ${totalOriginalChildren} items (files + folders). ${disclaimer}`.trim(); `Showing up to ${mergedOptions.maxItems} items (files + folders). ${disclaimer}`.trim();
return `${summary}\n\n${displayPath}/\n${structureLines.join('\n')}`; return `${summary}\n\n${displayPath}/\n${structureLines.join('\n')}`;
} catch (error: unknown) { } catch (error: unknown) {

View File

@ -3,7 +3,8 @@
"compilerOptions": { "compilerOptions": {
"outDir": "dist", "outDir": "dist",
"lib": ["DOM", "DOM.Iterable", "ES2021"], "lib": ["DOM", "DOM.Iterable", "ES2021"],
"composite": true "composite": true,
"types": ["node", "vitest/globals"]
}, },
"include": ["index.ts", "src/**/*.ts", "src/**/*.json"], "include": ["index.ts", "src/**/*.ts", "src/**/*.json"],
"exclude": ["node_modules", "dist"] "exclude": ["node_modules", "dist"]

View File

@ -14,6 +14,6 @@
"module": "NodeNext", "module": "NodeNext",
"moduleResolution": "nodenext", "moduleResolution": "nodenext",
"target": "es2022", "target": "es2022",
"types": ["node"] "types": ["node", "vitest/globals"]
} }
} }