Refac: Centralize storage file management (#4078)

Co-authored-by: Taylor Mullen <ntaylormullen@google.com>
This commit is contained in:
Yuki Okita 2025-08-20 10:55:47 +09:00 committed by GitHub
parent 1049d38845
commit 21c6480b65
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
50 changed files with 889 additions and 532 deletions

View File

@ -11,9 +11,27 @@ import { loadExtensions } from '../../config/extension.js';
import { createTransport } from '@google/gemini-cli-core'; import { createTransport } from '@google/gemini-cli-core';
import { Client } from '@modelcontextprotocol/sdk/client/index.js'; import { Client } from '@modelcontextprotocol/sdk/client/index.js';
vi.mock('../../config/settings.js'); vi.mock('../../config/settings.js', () => ({
vi.mock('../../config/extension.js'); loadSettings: vi.fn(),
vi.mock('@google/gemini-cli-core'); }));
vi.mock('../../config/extension.js', () => ({
loadExtensions: vi.fn(),
}));
vi.mock('@google/gemini-cli-core', () => ({
createTransport: vi.fn(),
MCPServerStatus: {
CONNECTED: 'CONNECTED',
CONNECTING: 'CONNECTING',
DISCONNECTED: 'DISCONNECTED',
},
Storage: vi.fn().mockImplementation((_cwd: string) => ({
getGlobalSettingsPath: () => '/tmp/gemini/settings.json',
getWorkspaceSettingsPath: () => '/tmp/gemini/workspace-settings.json',
getProjectTempDir: () => '/test/home/.gemini/tmp/mocked_hash',
})),
GEMINI_CONFIG_DIR: '.gemini',
getErrorMessage: (e: unknown) => (e instanceof Error ? e.message : String(e)),
}));
vi.mock('@modelcontextprotocol/sdk/client/index.js'); vi.mock('@modelcontextprotocol/sdk/client/index.js');
const mockedLoadSettings = loadSettings as vi.Mock; const mockedLoadSettings = loadSettings as vi.Mock;

View File

@ -6,7 +6,6 @@
import { describe, it, expect, vi, beforeEach, afterEach } from 'vitest'; import { describe, it, expect, vi, beforeEach, afterEach } from 'vitest';
import * as os from 'os'; import * as os from 'os';
import * as fs from 'fs';
import * as path from 'path'; import * as path from 'path';
import { ShellTool, EditTool, WriteFileTool } from '@google/gemini-cli-core'; import { ShellTool, EditTool, WriteFileTool } from '@google/gemini-cli-core';
import { loadCliConfig, parseArguments } from './config.js'; import { loadCliConfig, parseArguments } from './config.js';
@ -19,6 +18,38 @@ vi.mock('./trustedFolders.js', () => ({
isWorkspaceTrusted: vi.fn(), isWorkspaceTrusted: vi.fn(),
})); }));
vi.mock('fs', async (importOriginal) => {
const actualFs = await importOriginal<typeof import('fs')>();
const pathMod = await import('path');
const mockHome = '/mock/home/user';
const MOCK_CWD1 = process.cwd();
const MOCK_CWD2 = pathMod.resolve(pathMod.sep, 'home', 'user', 'project');
const mockPaths = new Set([
MOCK_CWD1,
MOCK_CWD2,
pathMod.resolve(pathMod.sep, 'cli', 'path1'),
pathMod.resolve(pathMod.sep, 'settings', 'path1'),
pathMod.join(mockHome, 'settings', 'path2'),
pathMod.join(MOCK_CWD2, 'cli', 'path2'),
pathMod.join(MOCK_CWD2, 'settings', 'path3'),
]);
return {
...actualFs,
mkdirSync: vi.fn(),
writeFileSync: vi.fn(),
existsSync: vi.fn((p) => mockPaths.has(p.toString())),
statSync: vi.fn((p) => {
if (mockPaths.has(p.toString())) {
return { isDirectory: () => true } as unknown as import('fs').Stats;
}
return (actualFs as typeof import('fs')).statSync(p as unknown as string);
}),
realpathSync: vi.fn((p) => p),
};
});
vi.mock('os', async (importOriginal) => { vi.mock('os', async (importOriginal) => {
const actualOs = await importOriginal<typeof os>(); const actualOs = await importOriginal<typeof os>();
return { return {
@ -1441,35 +1472,6 @@ describe('loadCliConfig folderTrust', () => {
}); });
}); });
vi.mock('fs', async () => {
const actualFs = await vi.importActual<typeof fs>('fs');
const MOCK_CWD1 = process.cwd();
const MOCK_CWD2 = path.resolve(path.sep, 'home', 'user', 'project');
const mockPaths = new Set([
MOCK_CWD1,
MOCK_CWD2,
path.resolve(path.sep, 'cli', 'path1'),
path.resolve(path.sep, 'settings', 'path1'),
path.join(os.homedir(), 'settings', 'path2'),
path.join(MOCK_CWD2, 'cli', 'path2'),
path.join(MOCK_CWD2, 'settings', 'path3'),
]);
return {
...actualFs,
existsSync: vi.fn((p) => mockPaths.has(p.toString())),
statSync: vi.fn((p) => {
if (mockPaths.has(p.toString())) {
return { isDirectory: () => true };
}
// Fallback for other paths if needed, though the test should be specific.
return actualFs.statSync(p);
}),
realpathSync: vi.fn((p) => p),
};
});
describe('loadCliConfig with includeDirectories', () => { describe('loadCliConfig with includeDirectories', () => {
const originalArgv = process.argv; const originalArgv = process.argv;

View File

@ -10,7 +10,6 @@ import * as os from 'os';
import * as path from 'path'; import * as path from 'path';
import { import {
EXTENSIONS_CONFIG_FILENAME, EXTENSIONS_CONFIG_FILENAME,
EXTENSIONS_DIRECTORY_NAME,
annotateActiveExtensions, annotateActiveExtensions,
loadExtensions, loadExtensions,
} from './extension.js'; } from './extension.js';
@ -23,6 +22,8 @@ vi.mock('os', async (importOriginal) => {
}; };
}); });
const EXTENSIONS_DIRECTORY_NAME = path.join('.gemini', 'extensions');
describe('loadExtensions', () => { describe('loadExtensions', () => {
let tempWorkspaceDir: string; let tempWorkspaceDir: string;
let tempHomeDir: string; let tempHomeDir: string;

View File

@ -4,12 +4,15 @@
* SPDX-License-Identifier: Apache-2.0 * SPDX-License-Identifier: Apache-2.0
*/ */
import { MCPServerConfig, GeminiCLIExtension } from '@google/gemini-cli-core'; import {
MCPServerConfig,
GeminiCLIExtension,
Storage,
} from '@google/gemini-cli-core';
import * as fs from 'fs'; import * as fs from 'fs';
import * as path from 'path'; import * as path from 'path';
import * as os from 'os'; import * as os from 'os';
export const EXTENSIONS_DIRECTORY_NAME = path.join('.gemini', 'extensions');
export const EXTENSIONS_CONFIG_FILENAME = 'gemini-extension.json'; export const EXTENSIONS_CONFIG_FILENAME = 'gemini-extension.json';
export interface Extension { export interface Extension {
@ -43,7 +46,8 @@ export function loadExtensions(workspaceDir: string): Extension[] {
} }
function loadExtensionsFromDir(dir: string): Extension[] { function loadExtensionsFromDir(dir: string): Extension[] {
const extensionsDir = path.join(dir, EXTENSIONS_DIRECTORY_NAME); const storage = new Storage(dir);
const extensionsDir = storage.getExtensionsDir();
if (!fs.existsSync(extensionsDir)) { if (!fs.existsSync(extensionsDir)) {
return []; return [];
} }

View File

@ -11,6 +11,7 @@ import * as dotenv from 'dotenv';
import { import {
GEMINI_CONFIG_DIR as GEMINI_DIR, GEMINI_CONFIG_DIR as GEMINI_DIR,
getErrorMessage, getErrorMessage,
Storage,
} from '@google/gemini-cli-core'; } from '@google/gemini-cli-core';
import stripJsonComments from 'strip-json-comments'; import stripJsonComments from 'strip-json-comments';
import { DefaultLight } from '../ui/themes/default-light.js'; import { DefaultLight } from '../ui/themes/default-light.js';
@ -20,8 +21,9 @@ import { Settings, MemoryImportFormat } from './settingsSchema.js';
export type { Settings, MemoryImportFormat }; export type { Settings, MemoryImportFormat };
export const SETTINGS_DIRECTORY_NAME = '.gemini'; export const SETTINGS_DIRECTORY_NAME = '.gemini';
export const USER_SETTINGS_DIR = path.join(homedir(), SETTINGS_DIRECTORY_NAME);
export const USER_SETTINGS_PATH = path.join(USER_SETTINGS_DIR, 'settings.json'); export const USER_SETTINGS_PATH = Storage.getGlobalSettingsPath();
export const USER_SETTINGS_DIR = path.dirname(USER_SETTINGS_PATH);
export const DEFAULT_EXCLUDED_ENV_VARS = ['DEBUG', 'DEBUG_MODE']; export const DEFAULT_EXCLUDED_ENV_VARS = ['DEBUG', 'DEBUG_MODE'];
export function getSystemSettingsPath(): string { export function getSystemSettingsPath(): string {
@ -37,10 +39,6 @@ export function getSystemSettingsPath(): string {
} }
} }
export function getWorkspaceSettingsPath(workspaceDir: string): string {
return path.join(workspaceDir, SETTINGS_DIRECTORY_NAME, 'settings.json');
}
export type { DnsResolutionOrder } from './settingsSchema.js'; export type { DnsResolutionOrder } from './settingsSchema.js';
export enum SettingScope { export enum SettingScope {
@ -269,7 +267,9 @@ export function loadEnvironment(settings?: Settings): void {
// If no settings provided, try to load workspace settings for exclusions // If no settings provided, try to load workspace settings for exclusions
let resolvedSettings = settings; let resolvedSettings = settings;
if (!resolvedSettings) { if (!resolvedSettings) {
const workspaceSettingsPath = getWorkspaceSettingsPath(process.cwd()); const workspaceSettingsPath = new Storage(
process.cwd(),
).getWorkspaceSettingsPath();
try { try {
if (fs.existsSync(workspaceSettingsPath)) { if (fs.existsSync(workspaceSettingsPath)) {
const workspaceContent = fs.readFileSync( const workspaceContent = fs.readFileSync(
@ -342,7 +342,9 @@ export function loadSettings(workspaceDir: string): LoadedSettings {
// We expect homedir to always exist and be resolvable. // We expect homedir to always exist and be resolvable.
const realHomeDir = fs.realpathSync(resolvedHomeDir); const realHomeDir = fs.realpathSync(resolvedHomeDir);
const workspaceSettingsPath = getWorkspaceSettingsPath(workspaceDir); const workspaceSettingsPath = new Storage(
workspaceDir,
).getWorkspaceSettingsPath();
// Load system settings // Load system settings
try { try {

View File

@ -5,11 +5,7 @@
*/ */
import * as path from 'node:path'; import * as path from 'node:path';
import { import { Config, Storage } from '@google/gemini-cli-core';
Config,
getProjectCommandsDir,
getUserCommandsDir,
} from '@google/gemini-cli-core';
import mock from 'mock-fs'; import mock from 'mock-fs';
import { FileCommandLoader } from './FileCommandLoader.js'; import { FileCommandLoader } from './FileCommandLoader.js';
import { assert, vi } from 'vitest'; import { assert, vi } from 'vitest';
@ -57,6 +53,7 @@ vi.mock('@google/gemini-cli-core', async (importOriginal) => {
await importOriginal<typeof import('@google/gemini-cli-core')>(); await importOriginal<typeof import('@google/gemini-cli-core')>();
return { return {
...original, ...original,
Storage: original.Storage,
isCommandAllowed: vi.fn(), isCommandAllowed: vi.fn(),
ShellExecutionService: { ShellExecutionService: {
execute: vi.fn(), execute: vi.fn(),
@ -86,7 +83,7 @@ describe('FileCommandLoader', () => {
}); });
it('loads a single command from a file', async () => { it('loads a single command from a file', async () => {
const userCommandsDir = getUserCommandsDir(); const userCommandsDir = Storage.getUserCommandsDir();
mock({ mock({
[userCommandsDir]: { [userCommandsDir]: {
'test.toml': 'prompt = "This is a test prompt"', 'test.toml': 'prompt = "This is a test prompt"',
@ -127,7 +124,7 @@ describe('FileCommandLoader', () => {
itif(process.platform !== 'win32')( itif(process.platform !== 'win32')(
'loads commands from a symlinked directory', 'loads commands from a symlinked directory',
async () => { async () => {
const userCommandsDir = getUserCommandsDir(); const userCommandsDir = Storage.getUserCommandsDir();
const realCommandsDir = '/real/commands'; const realCommandsDir = '/real/commands';
mock({ mock({
[realCommandsDir]: { [realCommandsDir]: {
@ -152,7 +149,7 @@ describe('FileCommandLoader', () => {
itif(process.platform !== 'win32')( itif(process.platform !== 'win32')(
'loads commands from a symlinked subdirectory', 'loads commands from a symlinked subdirectory',
async () => { async () => {
const userCommandsDir = getUserCommandsDir(); const userCommandsDir = Storage.getUserCommandsDir();
const realNamespacedDir = '/real/namespaced-commands'; const realNamespacedDir = '/real/namespaced-commands';
mock({ mock({
[userCommandsDir]: { [userCommandsDir]: {
@ -176,7 +173,7 @@ describe('FileCommandLoader', () => {
); );
it('loads multiple commands', async () => { it('loads multiple commands', async () => {
const userCommandsDir = getUserCommandsDir(); const userCommandsDir = Storage.getUserCommandsDir();
mock({ mock({
[userCommandsDir]: { [userCommandsDir]: {
'test1.toml': 'prompt = "Prompt 1"', 'test1.toml': 'prompt = "Prompt 1"',
@ -191,7 +188,7 @@ describe('FileCommandLoader', () => {
}); });
it('creates deeply nested namespaces correctly', async () => { it('creates deeply nested namespaces correctly', async () => {
const userCommandsDir = getUserCommandsDir(); const userCommandsDir = Storage.getUserCommandsDir();
mock({ mock({
[userCommandsDir]: { [userCommandsDir]: {
@ -205,7 +202,7 @@ describe('FileCommandLoader', () => {
const mockConfig = { const mockConfig = {
getProjectRoot: vi.fn(() => '/path/to/project'), getProjectRoot: vi.fn(() => '/path/to/project'),
getExtensions: vi.fn(() => []), getExtensions: vi.fn(() => []),
} as unknown as Config; } as Config;
const loader = new FileCommandLoader(mockConfig); const loader = new FileCommandLoader(mockConfig);
const commands = await loader.loadCommands(signal); const commands = await loader.loadCommands(signal);
expect(commands).toHaveLength(1); expect(commands).toHaveLength(1);
@ -213,7 +210,7 @@ describe('FileCommandLoader', () => {
}); });
it('creates namespaces from nested directories', async () => { it('creates namespaces from nested directories', async () => {
const userCommandsDir = getUserCommandsDir(); const userCommandsDir = Storage.getUserCommandsDir();
mock({ mock({
[userCommandsDir]: { [userCommandsDir]: {
git: { git: {
@ -232,8 +229,10 @@ describe('FileCommandLoader', () => {
}); });
it('returns both user and project commands in order', async () => { it('returns both user and project commands in order', async () => {
const userCommandsDir = getUserCommandsDir(); const userCommandsDir = Storage.getUserCommandsDir();
const projectCommandsDir = getProjectCommandsDir(process.cwd()); const projectCommandsDir = new Storage(
process.cwd(),
).getProjectCommandsDir();
mock({ mock({
[userCommandsDir]: { [userCommandsDir]: {
'test.toml': 'prompt = "User prompt"', 'test.toml': 'prompt = "User prompt"',
@ -246,7 +245,7 @@ describe('FileCommandLoader', () => {
const mockConfig = { const mockConfig = {
getProjectRoot: vi.fn(() => process.cwd()), getProjectRoot: vi.fn(() => process.cwd()),
getExtensions: vi.fn(() => []), getExtensions: vi.fn(() => []),
} as unknown as Config; } as Config;
const loader = new FileCommandLoader(mockConfig); const loader = new FileCommandLoader(mockConfig);
const commands = await loader.loadCommands(signal); const commands = await loader.loadCommands(signal);
@ -284,7 +283,7 @@ describe('FileCommandLoader', () => {
}); });
it('ignores files with TOML syntax errors', async () => { it('ignores files with TOML syntax errors', async () => {
const userCommandsDir = getUserCommandsDir(); const userCommandsDir = Storage.getUserCommandsDir();
mock({ mock({
[userCommandsDir]: { [userCommandsDir]: {
'invalid.toml': 'this is not valid toml', 'invalid.toml': 'this is not valid toml',
@ -300,7 +299,7 @@ describe('FileCommandLoader', () => {
}); });
it('ignores files that are semantically invalid (missing prompt)', async () => { it('ignores files that are semantically invalid (missing prompt)', async () => {
const userCommandsDir = getUserCommandsDir(); const userCommandsDir = Storage.getUserCommandsDir();
mock({ mock({
[userCommandsDir]: { [userCommandsDir]: {
'no_prompt.toml': 'description = "This file is missing a prompt"', 'no_prompt.toml': 'description = "This file is missing a prompt"',
@ -316,7 +315,7 @@ describe('FileCommandLoader', () => {
}); });
it('handles filename edge cases correctly', async () => { it('handles filename edge cases correctly', async () => {
const userCommandsDir = getUserCommandsDir(); const userCommandsDir = Storage.getUserCommandsDir();
mock({ mock({
[userCommandsDir]: { [userCommandsDir]: {
'test.v1.toml': 'prompt = "Test prompt"', 'test.v1.toml': 'prompt = "Test prompt"',
@ -338,7 +337,7 @@ describe('FileCommandLoader', () => {
}); });
it('uses a default description if not provided', async () => { it('uses a default description if not provided', async () => {
const userCommandsDir = getUserCommandsDir(); const userCommandsDir = Storage.getUserCommandsDir();
mock({ mock({
[userCommandsDir]: { [userCommandsDir]: {
'test.toml': 'prompt = "Test prompt"', 'test.toml': 'prompt = "Test prompt"',
@ -353,7 +352,7 @@ describe('FileCommandLoader', () => {
}); });
it('uses the provided description', async () => { it('uses the provided description', async () => {
const userCommandsDir = getUserCommandsDir(); const userCommandsDir = Storage.getUserCommandsDir();
mock({ mock({
[userCommandsDir]: { [userCommandsDir]: {
'test.toml': 'prompt = "Test prompt"\ndescription = "My test command"', 'test.toml': 'prompt = "Test prompt"\ndescription = "My test command"',
@ -368,7 +367,7 @@ describe('FileCommandLoader', () => {
}); });
it('should sanitize colons in filenames to prevent namespace conflicts', async () => { it('should sanitize colons in filenames to prevent namespace conflicts', async () => {
const userCommandsDir = getUserCommandsDir(); const userCommandsDir = Storage.getUserCommandsDir();
mock({ mock({
[userCommandsDir]: { [userCommandsDir]: {
'legacy:command.toml': 'prompt = "This is a legacy command"', 'legacy:command.toml': 'prompt = "This is a legacy command"',
@ -388,7 +387,7 @@ describe('FileCommandLoader', () => {
describe('Processor Instantiation Logic', () => { describe('Processor Instantiation Logic', () => {
it('instantiates only DefaultArgumentProcessor if no {{args}} or !{} are present', async () => { it('instantiates only DefaultArgumentProcessor if no {{args}} or !{} are present', async () => {
const userCommandsDir = getUserCommandsDir(); const userCommandsDir = Storage.getUserCommandsDir();
mock({ mock({
[userCommandsDir]: { [userCommandsDir]: {
'simple.toml': `prompt = "Just a regular prompt"`, 'simple.toml': `prompt = "Just a regular prompt"`,
@ -403,7 +402,7 @@ describe('FileCommandLoader', () => {
}); });
it('instantiates only ShellProcessor if {{args}} is present (but not !{})', async () => { it('instantiates only ShellProcessor if {{args}} is present (but not !{})', async () => {
const userCommandsDir = getUserCommandsDir(); const userCommandsDir = Storage.getUserCommandsDir();
mock({ mock({
[userCommandsDir]: { [userCommandsDir]: {
'args.toml': `prompt = "Prompt with {{args}}"`, 'args.toml': `prompt = "Prompt with {{args}}"`,
@ -418,7 +417,7 @@ describe('FileCommandLoader', () => {
}); });
it('instantiates ShellProcessor and DefaultArgumentProcessor if !{} is present (but not {{args}})', async () => { it('instantiates ShellProcessor and DefaultArgumentProcessor if !{} is present (but not {{args}})', async () => {
const userCommandsDir = getUserCommandsDir(); const userCommandsDir = Storage.getUserCommandsDir();
mock({ mock({
[userCommandsDir]: { [userCommandsDir]: {
'shell.toml': `prompt = "Prompt with !{cmd}"`, 'shell.toml': `prompt = "Prompt with !{cmd}"`,
@ -433,7 +432,7 @@ describe('FileCommandLoader', () => {
}); });
it('instantiates only ShellProcessor if both {{args}} and !{} are present', async () => { it('instantiates only ShellProcessor if both {{args}} and !{} are present', async () => {
const userCommandsDir = getUserCommandsDir(); const userCommandsDir = Storage.getUserCommandsDir();
mock({ mock({
[userCommandsDir]: { [userCommandsDir]: {
'both.toml': `prompt = "Prompt with {{args}} and !{cmd}"`, 'both.toml': `prompt = "Prompt with {{args}} and !{cmd}"`,
@ -450,8 +449,10 @@ describe('FileCommandLoader', () => {
describe('Extension Command Loading', () => { describe('Extension Command Loading', () => {
it('loads commands from active extensions', async () => { it('loads commands from active extensions', async () => {
const userCommandsDir = getUserCommandsDir(); const userCommandsDir = Storage.getUserCommandsDir();
const projectCommandsDir = getProjectCommandsDir(process.cwd()); const projectCommandsDir = new Storage(
process.cwd(),
).getProjectCommandsDir();
const extensionDir = path.join( const extensionDir = path.join(
process.cwd(), process.cwd(),
'.gemini/extensions/test-ext', '.gemini/extensions/test-ext',
@ -485,7 +486,7 @@ describe('FileCommandLoader', () => {
path: extensionDir, path: extensionDir,
}, },
]), ]),
} as unknown as Config; } as Config;
const loader = new FileCommandLoader(mockConfig); const loader = new FileCommandLoader(mockConfig);
const commands = await loader.loadCommands(signal); const commands = await loader.loadCommands(signal);
@ -499,8 +500,10 @@ describe('FileCommandLoader', () => {
}); });
it('extension commands have extensionName metadata for conflict resolution', async () => { it('extension commands have extensionName metadata for conflict resolution', async () => {
const userCommandsDir = getUserCommandsDir(); const userCommandsDir = Storage.getUserCommandsDir();
const projectCommandsDir = getProjectCommandsDir(process.cwd()); const projectCommandsDir = new Storage(
process.cwd(),
).getProjectCommandsDir();
const extensionDir = path.join( const extensionDir = path.join(
process.cwd(), process.cwd(),
'.gemini/extensions/test-ext', '.gemini/extensions/test-ext',
@ -534,7 +537,7 @@ describe('FileCommandLoader', () => {
path: extensionDir, path: extensionDir,
}, },
]), ]),
} as unknown as Config; } as Config;
const loader = new FileCommandLoader(mockConfig); const loader = new FileCommandLoader(mockConfig);
const commands = await loader.loadCommands(signal); const commands = await loader.loadCommands(signal);
@ -641,7 +644,7 @@ describe('FileCommandLoader', () => {
path: extensionDir2, path: extensionDir2,
}, },
]), ]),
} as unknown as Config; } as Config;
const loader = new FileCommandLoader(mockConfig); const loader = new FileCommandLoader(mockConfig);
const commands = await loader.loadCommands(signal); const commands = await loader.loadCommands(signal);
@ -677,7 +680,7 @@ describe('FileCommandLoader', () => {
path: extensionDir, path: extensionDir,
}, },
]), ]),
} as unknown as Config; } as Config;
const loader = new FileCommandLoader(mockConfig); const loader = new FileCommandLoader(mockConfig);
const commands = await loader.loadCommands(signal); const commands = await loader.loadCommands(signal);
expect(commands).toHaveLength(0); expect(commands).toHaveLength(0);
@ -709,7 +712,7 @@ describe('FileCommandLoader', () => {
getExtensions: vi.fn(() => [ getExtensions: vi.fn(() => [
{ name: 'a', version: '1.0.0', isActive: true, path: extensionDir }, { name: 'a', version: '1.0.0', isActive: true, path: extensionDir },
]), ]),
} as unknown as Config; } as Config;
const loader = new FileCommandLoader(mockConfig); const loader = new FileCommandLoader(mockConfig);
const commands = await loader.loadCommands(signal); const commands = await loader.loadCommands(signal);
@ -742,7 +745,7 @@ describe('FileCommandLoader', () => {
describe('Argument Handling Integration (via ShellProcessor)', () => { describe('Argument Handling Integration (via ShellProcessor)', () => {
it('correctly processes a command with {{args}}', async () => { it('correctly processes a command with {{args}}', async () => {
const userCommandsDir = getUserCommandsDir(); const userCommandsDir = Storage.getUserCommandsDir();
mock({ mock({
[userCommandsDir]: { [userCommandsDir]: {
'shorthand.toml': 'shorthand.toml':
@ -774,7 +777,7 @@ describe('FileCommandLoader', () => {
describe('Default Argument Processor Integration', () => { describe('Default Argument Processor Integration', () => {
it('correctly processes a command without {{args}}', async () => { it('correctly processes a command without {{args}}', async () => {
const userCommandsDir = getUserCommandsDir(); const userCommandsDir = Storage.getUserCommandsDir();
mock({ mock({
[userCommandsDir]: { [userCommandsDir]: {
'model_led.toml': 'model_led.toml':
@ -808,7 +811,7 @@ describe('FileCommandLoader', () => {
describe('Shell Processor Integration', () => { describe('Shell Processor Integration', () => {
it('instantiates ShellProcessor if {{args}} is present (even without shell trigger)', async () => { it('instantiates ShellProcessor if {{args}} is present (even without shell trigger)', async () => {
const userCommandsDir = getUserCommandsDir(); const userCommandsDir = Storage.getUserCommandsDir();
mock({ mock({
[userCommandsDir]: { [userCommandsDir]: {
'args_only.toml': `prompt = "Hello {{args}}"`, 'args_only.toml': `prompt = "Hello {{args}}"`,
@ -821,7 +824,7 @@ describe('FileCommandLoader', () => {
expect(ShellProcessor).toHaveBeenCalledWith('args_only'); expect(ShellProcessor).toHaveBeenCalledWith('args_only');
}); });
it('instantiates ShellProcessor if the trigger is present', async () => { it('instantiates ShellProcessor if the trigger is present', async () => {
const userCommandsDir = getUserCommandsDir(); const userCommandsDir = Storage.getUserCommandsDir();
mock({ mock({
[userCommandsDir]: { [userCommandsDir]: {
'shell.toml': `prompt = "Run this: ${SHELL_INJECTION_TRIGGER}echo hello}"`, 'shell.toml': `prompt = "Run this: ${SHELL_INJECTION_TRIGGER}echo hello}"`,
@ -835,7 +838,7 @@ describe('FileCommandLoader', () => {
}); });
it('does not instantiate ShellProcessor if no triggers ({{args}} or !{}) are present', async () => { it('does not instantiate ShellProcessor if no triggers ({{args}} or !{}) are present', async () => {
const userCommandsDir = getUserCommandsDir(); const userCommandsDir = Storage.getUserCommandsDir();
mock({ mock({
[userCommandsDir]: { [userCommandsDir]: {
'regular.toml': `prompt = "Just a regular prompt"`, 'regular.toml': `prompt = "Just a regular prompt"`,
@ -849,7 +852,7 @@ describe('FileCommandLoader', () => {
}); });
it('returns a "submit_prompt" action if shell processing succeeds', async () => { it('returns a "submit_prompt" action if shell processing succeeds', async () => {
const userCommandsDir = getUserCommandsDir(); const userCommandsDir = Storage.getUserCommandsDir();
mock({ mock({
[userCommandsDir]: { [userCommandsDir]: {
'shell.toml': `prompt = "Run !{echo 'hello'}"`, 'shell.toml': `prompt = "Run !{echo 'hello'}"`,
@ -876,7 +879,7 @@ describe('FileCommandLoader', () => {
}); });
it('returns a "confirm_shell_commands" action if shell processing requires it', async () => { it('returns a "confirm_shell_commands" action if shell processing requires it', async () => {
const userCommandsDir = getUserCommandsDir(); const userCommandsDir = Storage.getUserCommandsDir();
const rawInvocation = '/shell rm -rf /'; const rawInvocation = '/shell rm -rf /';
mock({ mock({
[userCommandsDir]: { [userCommandsDir]: {
@ -910,7 +913,7 @@ describe('FileCommandLoader', () => {
}); });
it('re-throws other errors from the processor', async () => { it('re-throws other errors from the processor', async () => {
const userCommandsDir = getUserCommandsDir(); const userCommandsDir = Storage.getUserCommandsDir();
mock({ mock({
[userCommandsDir]: { [userCommandsDir]: {
'shell.toml': `prompt = "Run !{something}"`, 'shell.toml': `prompt = "Run !{something}"`,
@ -935,7 +938,7 @@ describe('FileCommandLoader', () => {
).rejects.toThrow('Something else went wrong'); ).rejects.toThrow('Something else went wrong');
}); });
it('assembles the processor pipeline in the correct order (Shell -> Default)', async () => { it('assembles the processor pipeline in the correct order (Shell -> Default)', async () => {
const userCommandsDir = getUserCommandsDir(); const userCommandsDir = Storage.getUserCommandsDir();
mock({ mock({
[userCommandsDir]: { [userCommandsDir]: {
// This prompt uses !{} but NOT {{args}}, so both processors should be active. // This prompt uses !{} but NOT {{args}}, so both processors should be active.

View File

@ -9,11 +9,7 @@ import path from 'path';
import toml from '@iarna/toml'; import toml from '@iarna/toml';
import { glob } from 'glob'; import { glob } from 'glob';
import { z } from 'zod'; import { z } from 'zod';
import { import { Config, Storage } from '@google/gemini-cli-core';
Config,
getProjectCommandsDir,
getUserCommandsDir,
} from '@google/gemini-cli-core';
import { ICommandLoader } from './types.js'; import { ICommandLoader } from './types.js';
import { import {
CommandContext, CommandContext,
@ -130,11 +126,13 @@ export class FileCommandLoader implements ICommandLoader {
private getCommandDirectories(): CommandDirectory[] { private getCommandDirectories(): CommandDirectory[] {
const dirs: CommandDirectory[] = []; const dirs: CommandDirectory[] = [];
const storage = this.config?.storage ?? new Storage(this.projectRoot);
// 1. User commands // 1. User commands
dirs.push({ path: getUserCommandsDir() }); dirs.push({ path: Storage.getUserCommandsDir() });
// 2. Project commands (override user commands) // 2. Project commands (override user commands)
dirs.push({ path: getProjectCommandsDir(this.projectRoot) }); dirs.push({ path: storage.getProjectCommandsDir() });
// 3. Extension commands (processed last to detect all conflicts) // 3. Extension commands (processed last to detect all conflicts)
if (this.config) { if (this.config) {

View File

@ -742,7 +742,7 @@ const App = ({ config, settings, startupWarnings = [], version }: AppProps) => {
} }
}, [config, config.getGeminiMdFileCount]); }, [config, config.getGeminiMdFileCount]);
const logger = useLogger(); const logger = useLogger(config.storage);
useEffect(() => { useEffect(() => {
const fetchUserMessages = async () => { const fetchUserMessages = async () => {

View File

@ -67,11 +67,14 @@ describe('chatCommand', () => {
mockContext = createMockCommandContext({ mockContext = createMockCommandContext({
services: { services: {
config: { config: {
getProjectTempDir: () => '/tmp/gemini', getProjectRoot: () => '/project/root',
getGeminiClient: () => getGeminiClient: () =>
({ ({
getChat: mockGetChat, getChat: mockGetChat,
}) as unknown as GeminiClient, }) as unknown as GeminiClient,
storage: {
getProjectTempDir: () => '/project/root/.gemini/tmp/mockhash',
},
}, },
logger: { logger: {
saveCheckpoint: mockSaveCheckpoint, saveCheckpoint: mockSaveCheckpoint,

View File

@ -28,7 +28,8 @@ const getSavedChatTags = async (
context: CommandContext, context: CommandContext,
mtSortDesc: boolean, mtSortDesc: boolean,
): Promise<ChatDetail[]> => { ): Promise<ChatDetail[]> => {
const geminiDir = context.services.config?.getProjectTempDir(); const cfg = context.services.config;
const geminiDir = cfg?.storage?.getProjectTempDir();
if (!geminiDir) { if (!geminiDir) {
return []; return [];
} }

View File

@ -20,7 +20,14 @@ import * as core from '@google/gemini-cli-core';
vi.mock('child_process'); vi.mock('child_process');
vi.mock('glob'); vi.mock('glob');
vi.mock('@google/gemini-cli-core'); vi.mock('@google/gemini-cli-core', async (importOriginal) => {
const original = await importOriginal<typeof core>();
return {
...original,
getOauthClient: vi.fn(original.getOauthClient),
getIdeInstaller: vi.fn(original.getIdeInstaller),
};
});
describe('ideCommand', () => { describe('ideCommand', () => {
let mockContext: CommandContext; let mockContext: CommandContext;

View File

@ -39,7 +39,10 @@ describe('restoreCommand', () => {
mockConfig = { mockConfig = {
getCheckpointingEnabled: vi.fn().mockReturnValue(true), getCheckpointingEnabled: vi.fn().mockReturnValue(true),
getProjectTempDir: vi.fn().mockReturnValue(geminiTempDir), storage: {
getProjectTempCheckpointsDir: vi.fn().mockReturnValue(checkpointsDir),
getProjectTempDir: vi.fn().mockReturnValue(geminiTempDir),
},
getGeminiClient: vi.fn().mockReturnValue({ getGeminiClient: vi.fn().mockReturnValue({
setHistory: mockSetHistory, setHistory: mockSetHistory,
}), }),
@ -77,7 +80,9 @@ describe('restoreCommand', () => {
describe('action', () => { describe('action', () => {
it('should return an error if temp dir is not found', async () => { it('should return an error if temp dir is not found', async () => {
vi.mocked(mockConfig.getProjectTempDir).mockReturnValue(''); vi.mocked(
mockConfig.storage.getProjectTempCheckpointsDir,
).mockReturnValue('');
expect( expect(
await restoreCommand(mockConfig)?.action?.(mockContext, ''), await restoreCommand(mockConfig)?.action?.(mockContext, ''),
@ -219,7 +224,7 @@ describe('restoreCommand', () => {
describe('completion', () => { describe('completion', () => {
it('should return an empty array if temp dir is not found', async () => { it('should return an empty array if temp dir is not found', async () => {
vi.mocked(mockConfig.getProjectTempDir).mockReturnValue(''); vi.mocked(mockConfig.storage.getProjectTempDir).mockReturnValue('');
const command = restoreCommand(mockConfig); const command = restoreCommand(mockConfig);
expect(await command?.completion?.(mockContext, '')).toEqual([]); expect(await command?.completion?.(mockContext, '')).toEqual([]);

View File

@ -22,9 +22,7 @@ async function restoreAction(
const { config, git: gitService } = services; const { config, git: gitService } = services;
const { addItem, loadHistory } = ui; const { addItem, loadHistory } = ui;
const checkpointDir = config?.getProjectTempDir() const checkpointDir = config?.storage.getProjectTempCheckpointsDir();
? path.join(config.getProjectTempDir(), 'checkpoints')
: undefined;
if (!checkpointDir) { if (!checkpointDir) {
return { return {
@ -125,9 +123,7 @@ async function completion(
): Promise<string[]> { ): Promise<string[]> {
const { services } = context; const { services } = context;
const { config } = services; const { config } = services;
const checkpointDir = config?.getProjectTempDir() const checkpointDir = config?.storage.getProjectTempCheckpointsDir();
? path.join(config.getProjectTempDir(), 'checkpoints')
: undefined;
if (!checkpointDir) { if (!checkpointDir) {
return []; return [];
} }

View File

@ -81,7 +81,7 @@ export const InputPrompt: React.FC<InputPromptProps> = ({
const [cursorPosition, setCursorPosition] = useState<[number, number]>([ const [cursorPosition, setCursorPosition] = useState<[number, number]>([
0, 0, 0, 0,
]); ]);
const shellHistory = useShellHistory(config.getProjectRoot()); const shellHistory = useShellHistory(config.getProjectRoot(), config.storage);
const historyData = shellHistory.history; const historyData = shellHistory.history;
const completion = useCommandCompletion( const completion = useCommandCompletion(

View File

@ -17,15 +17,10 @@ import {
const mockIsBinary = vi.hoisted(() => vi.fn()); const mockIsBinary = vi.hoisted(() => vi.fn());
const mockShellExecutionService = vi.hoisted(() => vi.fn()); const mockShellExecutionService = vi.hoisted(() => vi.fn());
vi.mock('@google/gemini-cli-core', async (importOriginal) => { vi.mock('@google/gemini-cli-core', () => ({
const original = ShellExecutionService: { execute: mockShellExecutionService },
await importOriginal<typeof import('@google/gemini-cli-core')>(); isBinary: mockIsBinary,
return { }));
...original,
ShellExecutionService: { execute: mockShellExecutionService },
isBinary: mockIsBinary,
};
});
vi.mock('fs'); vi.mock('fs');
vi.mock('os'); vi.mock('os');
vi.mock('crypto'); vi.mock('crypto');

View File

@ -16,6 +16,7 @@ import {
makeSlashCommandEvent, makeSlashCommandEvent,
SlashCommandStatus, SlashCommandStatus,
ToolConfirmationOutcome, ToolConfirmationOutcome,
Storage,
} from '@google/gemini-cli-core'; } from '@google/gemini-cli-core';
import { useSessionStats } from '../contexts/SessionContext.js'; import { useSessionStats } from '../contexts/SessionContext.js';
import { runExitCleanup } from '../../utils/cleanup.js'; import { runExitCleanup } from '../../utils/cleanup.js';
@ -82,11 +83,14 @@ export const useSlashCommandProcessor = (
if (!config?.getProjectRoot()) { if (!config?.getProjectRoot()) {
return; return;
} }
return new GitService(config.getProjectRoot()); return new GitService(config.getProjectRoot(), config.storage);
}, [config]); }, [config]);
const logger = useMemo(() => { const logger = useMemo(() => {
const l = new Logger(config?.getSessionId() || ''); const l = new Logger(
config?.getSessionId() || '',
config?.storage ?? new Storage(process.cwd()),
);
// The logger's initialize is async, but we can create the instance // The logger's initialize is async, but we can create the instance
// synchronously. Commands that use it will await its initialization. // synchronously. Commands that use it will await its initialization.
return l; return l;

View File

@ -105,13 +105,14 @@ export const useGeminiStream = (
useStateAndRef<HistoryItemWithoutId | null>(null); useStateAndRef<HistoryItemWithoutId | null>(null);
const processedMemoryToolsRef = useRef<Set<string>>(new Set()); const processedMemoryToolsRef = useRef<Set<string>>(new Set());
const { startNewPrompt, getPromptCount } = useSessionStats(); const { startNewPrompt, getPromptCount } = useSessionStats();
const logger = useLogger(); const storage = config.storage;
const logger = useLogger(storage);
const gitService = useMemo(() => { const gitService = useMemo(() => {
if (!config.getProjectRoot()) { if (!config.getProjectRoot()) {
return; return;
} }
return new GitService(config.getProjectRoot()); return new GitService(config.getProjectRoot(), storage);
}, [config]); }, [config, storage]);
const [toolCalls, scheduleToolCalls, markToolsAsSubmitted] = const [toolCalls, scheduleToolCalls, markToolsAsSubmitted] =
useReactToolScheduler( useReactToolScheduler(
@ -877,9 +878,7 @@ export const useGeminiStream = (
); );
if (restorableToolCalls.length > 0) { if (restorableToolCalls.length > 0) {
const checkpointDir = config.getProjectTempDir() const checkpointDir = storage.getProjectTempCheckpointsDir();
? path.join(config.getProjectTempDir(), 'checkpoints')
: undefined;
if (!checkpointDir) { if (!checkpointDir) {
return; return;
@ -962,7 +961,15 @@ export const useGeminiStream = (
} }
}; };
saveRestorableToolCalls(); saveRestorableToolCalls();
}, [toolCalls, config, onDebugMessage, gitService, history, geminiClient]); }, [
toolCalls,
config,
onDebugMessage,
gitService,
history,
geminiClient,
storage,
]);
return { return {
streamingState, streamingState,

View File

@ -5,16 +5,16 @@
*/ */
import { useState, useEffect } from 'react'; import { useState, useEffect } from 'react';
import { sessionId, Logger } from '@google/gemini-cli-core'; import { sessionId, Logger, Storage } from '@google/gemini-cli-core';
/** /**
* Hook to manage the logger instance. * Hook to manage the logger instance.
*/ */
export const useLogger = () => { export const useLogger = (storage: Storage) => {
const [logger, setLogger] = useState<Logger | null>(null); const [logger, setLogger] = useState<Logger | null>(null);
useEffect(() => { useEffect(() => {
const newLogger = new Logger(sessionId); const newLogger = new Logger(sessionId, storage);
/** /**
* Start async initialization, no need to await. Using await slows down the * Start async initialization, no need to await. Using await slows down the
* time from launch to see the gemini-cli prompt and it's better to not save * time from launch to see the gemini-cli prompt and it's better to not save
@ -26,7 +26,7 @@ export const useLogger = () => {
setLogger(newLogger); setLogger(newLogger);
}) })
.catch(() => {}); .catch(() => {});
}, []); }, [storage]);
return logger; return logger;
}; };

View File

@ -11,9 +11,41 @@ import * as path from 'path';
import * as os from 'os'; import * as os from 'os';
import * as crypto from 'crypto'; import * as crypto from 'crypto';
vi.mock('fs/promises'); vi.mock('fs/promises', () => ({
readFile: vi.fn(),
writeFile: vi.fn(),
mkdir: vi.fn(),
}));
vi.mock('os'); vi.mock('os');
vi.mock('crypto'); vi.mock('crypto');
vi.mock('fs', async (importOriginal) => {
const actualFs = await importOriginal<typeof import('fs')>();
return {
...actualFs,
mkdirSync: vi.fn(),
};
});
vi.mock('@google/gemini-cli-core', () => {
class Storage {
getProjectTempDir(): string {
return path.join('/test/home/', '.gemini', 'tmp', 'mocked_hash');
}
getHistoryFilePath(): string {
return path.join(
'/test/home/',
'.gemini',
'tmp',
'mocked_hash',
'shell_history',
);
}
}
return {
isNodeError: (err: unknown): err is NodeJS.ErrnoException =>
typeof err === 'object' && err !== null && 'code' in err,
Storage,
};
});
const MOCKED_PROJECT_ROOT = '/test/project'; const MOCKED_PROJECT_ROOT = '/test/project';
const MOCKED_HOME_DIR = '/test/home'; const MOCKED_HOME_DIR = '/test/home';

View File

@ -7,9 +7,8 @@
import { useState, useEffect, useCallback } from 'react'; import { useState, useEffect, useCallback } from 'react';
import * as fs from 'fs/promises'; import * as fs from 'fs/promises';
import * as path from 'path'; import * as path from 'path';
import { isNodeError, getProjectTempDir } from '@google/gemini-cli-core'; import { isNodeError, Storage } from '@google/gemini-cli-core';
const HISTORY_FILE = 'shell_history';
const MAX_HISTORY_LENGTH = 100; const MAX_HISTORY_LENGTH = 100;
export interface UseShellHistoryReturn { export interface UseShellHistoryReturn {
@ -20,9 +19,12 @@ export interface UseShellHistoryReturn {
resetHistoryPosition: () => void; resetHistoryPosition: () => void;
} }
async function getHistoryFilePath(projectRoot: string): Promise<string> { async function getHistoryFilePath(
const historyDir = getProjectTempDir(projectRoot); projectRoot: string,
return path.join(historyDir, HISTORY_FILE); configStorage?: Storage,
): Promise<string> {
const storage = configStorage ?? new Storage(projectRoot);
return storage.getHistoryFilePath();
} }
// Handle multiline commands // Handle multiline commands
@ -67,20 +69,23 @@ async function writeHistoryFile(
} }
} }
export function useShellHistory(projectRoot: string): UseShellHistoryReturn { export function useShellHistory(
projectRoot: string,
storage?: Storage,
): UseShellHistoryReturn {
const [history, setHistory] = useState<string[]>([]); const [history, setHistory] = useState<string[]>([]);
const [historyIndex, setHistoryIndex] = useState(-1); const [historyIndex, setHistoryIndex] = useState(-1);
const [historyFilePath, setHistoryFilePath] = useState<string | null>(null); const [historyFilePath, setHistoryFilePath] = useState<string | null>(null);
useEffect(() => { useEffect(() => {
async function loadHistory() { async function loadHistory() {
const filePath = await getHistoryFilePath(projectRoot); const filePath = await getHistoryFilePath(projectRoot, storage);
setHistoryFilePath(filePath); setHistoryFilePath(filePath);
const loadedHistory = await readHistoryFile(filePath); const loadedHistory = await readHistoryFile(filePath);
setHistory(loadedHistory.reverse()); // Newest first setHistory(loadedHistory.reverse()); // Newest first
} }
loadHistory(); loadHistory();
}, [projectRoot]); }, [projectRoot, storage]);
const addCommandToHistory = useCallback( const addCommandToHistory = useCallback(
(command: string) => { (command: string) => {

View File

@ -6,7 +6,7 @@
import { promises as fs } from 'fs'; import { promises as fs } from 'fs';
import { join } from 'path'; import { join } from 'path';
import { getProjectTempDir } from '@google/gemini-cli-core'; import { Storage } from '@google/gemini-cli-core';
const cleanupFunctions: Array<(() => void) | (() => Promise<void>)> = []; const cleanupFunctions: Array<(() => void) | (() => Promise<void>)> = [];
@ -26,7 +26,8 @@ export async function runExitCleanup() {
} }
export async function cleanupCheckpoints() { export async function cleanupCheckpoints() {
const tempDir = getProjectTempDir(process.cwd()); const storage = new Storage(process.cwd());
const tempDir = storage.getProjectTempDir();
const checkpointsDir = join(tempDir, 'checkpoints'); const checkpointsDir = join(tempDir, 'checkpoints');
try { try {
await fs.rm(checkpointsDir, { recursive: true, force: true }); await fs.rm(checkpointsDir, { recursive: true, force: true });

View File

@ -11,7 +11,7 @@ import {
clearCachedCredentialFile, clearCachedCredentialFile,
clearOauthClientCache, clearOauthClientCache,
} from './oauth2.js'; } from './oauth2.js';
import { getCachedGoogleAccount } from '../utils/user_account.js'; import { UserAccountManager } from '../utils/userAccountManager.js';
import { OAuth2Client, Compute } from 'google-auth-library'; import { OAuth2Client, Compute } from 'google-auth-library';
import * as fs from 'fs'; import * as fs from 'fs';
import * as path from 'path'; import * as path from 'path';
@ -180,7 +180,10 @@ describe('oauth2', () => {
}); });
// Verify the getCachedGoogleAccount function works // Verify the getCachedGoogleAccount function works
expect(getCachedGoogleAccount()).toBe('test-google-account@gmail.com'); const userAccountManager = new UserAccountManager();
expect(userAccountManager.getCachedGoogleAccount()).toBe(
'test-google-account@gmail.com',
);
}); });
it('should perform login with user code', async () => { it('should perform login with user code', async () => {
@ -533,14 +536,17 @@ describe('oauth2', () => {
googleAccountPath, googleAccountPath,
JSON.stringify(accountData), JSON.stringify(accountData),
); );
const userAccountManager = new UserAccountManager();
expect(fs.existsSync(credsPath)).toBe(true); expect(fs.existsSync(credsPath)).toBe(true);
expect(fs.existsSync(googleAccountPath)).toBe(true); expect(fs.existsSync(googleAccountPath)).toBe(true);
expect(getCachedGoogleAccount()).toBe('test@example.com'); expect(userAccountManager.getCachedGoogleAccount()).toBe(
'test@example.com',
);
await clearCachedCredentialFile(); await clearCachedCredentialFile();
expect(fs.existsSync(credsPath)).toBe(false); expect(fs.existsSync(credsPath)).toBe(false);
expect(getCachedGoogleAccount()).toBeNull(); expect(userAccountManager.getCachedGoogleAccount()).toBeNull();
const updatedAccountData = JSON.parse( const updatedAccountData = JSON.parse(
fs.readFileSync(googleAccountPath, 'utf-8'), fs.readFileSync(googleAccountPath, 'utf-8'),
); );

View File

@ -17,16 +17,14 @@ import * as net from 'net';
import open from 'open'; import open from 'open';
import path from 'node:path'; import path from 'node:path';
import { promises as fs } from 'node:fs'; import { promises as fs } from 'node:fs';
import * as os from 'os';
import { Config } from '../config/config.js'; import { Config } from '../config/config.js';
import { getErrorMessage } from '../utils/errors.js'; import { getErrorMessage } from '../utils/errors.js';
import { import { UserAccountManager } from '../utils/userAccountManager.js';
cacheGoogleAccount,
getCachedGoogleAccount,
clearCachedGoogleAccount,
} from '../utils/user_account.js';
import { AuthType } from '../core/contentGenerator.js'; import { AuthType } from '../core/contentGenerator.js';
import readline from 'node:readline'; import readline from 'node:readline';
import { Storage } from '../config/storage.js';
const userAccountManager = new UserAccountManager();
// OAuth Client ID used to initiate OAuth2Client class. // OAuth Client ID used to initiate OAuth2Client class.
const OAUTH_CLIENT_ID = const OAUTH_CLIENT_ID =
@ -53,9 +51,6 @@ const SIGN_IN_SUCCESS_URL =
const SIGN_IN_FAILURE_URL = const SIGN_IN_FAILURE_URL =
'https://developers.google.com/gemini-code-assist/auth_failure_gemini'; 'https://developers.google.com/gemini-code-assist/auth_failure_gemini';
const GEMINI_DIR = '.gemini';
const CREDENTIAL_FILENAME = 'oauth_creds.json';
/** /**
* An Authentication URL for updating the credentials of a Oauth2Client * An Authentication URL for updating the credentials of a Oauth2Client
* as well as a promise that will resolve when the credentials have * as well as a promise that will resolve when the credentials have
@ -99,7 +94,7 @@ async function initOauthClient(
if (await loadCachedCredentials(client)) { if (await loadCachedCredentials(client)) {
// Found valid cached credentials. // Found valid cached credentials.
// Check if we need to retrieve Google Account ID or Email // Check if we need to retrieve Google Account ID or Email
if (!getCachedGoogleAccount()) { if (!userAccountManager.getCachedGoogleAccount()) {
try { try {
await fetchAndCacheUserInfo(client); await fetchAndCacheUserInfo(client);
} catch { } catch {
@ -352,7 +347,7 @@ export function getAvailablePort(): Promise<number> {
async function loadCachedCredentials(client: OAuth2Client): Promise<boolean> { async function loadCachedCredentials(client: OAuth2Client): Promise<boolean> {
const pathsToTry = [ const pathsToTry = [
getCachedCredentialPath(), Storage.getOAuthCredsPath(),
process.env['GOOGLE_APPLICATION_CREDENTIALS'], process.env['GOOGLE_APPLICATION_CREDENTIALS'],
].filter((p): p is string => !!p); ].filter((p): p is string => !!p);
@ -380,26 +375,22 @@ async function loadCachedCredentials(client: OAuth2Client): Promise<boolean> {
} }
async function cacheCredentials(credentials: Credentials) { async function cacheCredentials(credentials: Credentials) {
const filePath = getCachedCredentialPath(); const filePath = Storage.getOAuthCredsPath();
await fs.mkdir(path.dirname(filePath), { recursive: true }); await fs.mkdir(path.dirname(filePath), { recursive: true });
const credString = JSON.stringify(credentials, null, 2); const credString = JSON.stringify(credentials, null, 2);
await fs.writeFile(filePath, credString, { mode: 0o600 }); await fs.writeFile(filePath, credString, { mode: 0o600 });
} }
function getCachedCredentialPath(): string {
return path.join(os.homedir(), GEMINI_DIR, CREDENTIAL_FILENAME);
}
export function clearOauthClientCache() { export function clearOauthClientCache() {
oauthClientPromises.clear(); oauthClientPromises.clear();
} }
export async function clearCachedCredentialFile() { export async function clearCachedCredentialFile() {
try { try {
await fs.rm(getCachedCredentialPath(), { force: true }); await fs.rm(Storage.getOAuthCredsPath(), { force: true });
// Clear the Google Account ID cache when credentials are cleared // Clear the Google Account ID cache when credentials are cleared
await clearCachedGoogleAccount(); await userAccountManager.clearCachedGoogleAccount();
// Clear the in-memory OAuth client cache to force re-authentication // Clear the in-memory OAuth client cache to force re-authentication
clearOauthClientCache(); clearOauthClientCache();
} catch (e) { } catch (e) {
@ -433,9 +424,7 @@ async function fetchAndCacheUserInfo(client: OAuth2Client): Promise<void> {
} }
const userInfo = await response.json(); const userInfo = await response.json();
if (userInfo.email) { await userAccountManager.cacheGoogleAccount(userInfo.email);
await cacheGoogleAccount(userInfo.email);
}
} catch (error) { } catch (error) {
console.error('Error retrieving user info:', error); console.error('Error retrieving user info:', error);
} }

View File

@ -22,16 +22,11 @@ import { ShellTool } from '../tools/shell.js';
import { WriteFileTool } from '../tools/write-file.js'; import { WriteFileTool } from '../tools/write-file.js';
import { WebFetchTool } from '../tools/web-fetch.js'; import { WebFetchTool } from '../tools/web-fetch.js';
import { ReadManyFilesTool } from '../tools/read-many-files.js'; import { ReadManyFilesTool } from '../tools/read-many-files.js';
import { import { MemoryTool, setGeminiMdFilename } from '../tools/memoryTool.js';
MemoryTool,
setGeminiMdFilename,
GEMINI_CONFIG_DIR as GEMINI_DIR,
} from '../tools/memoryTool.js';
import { WebSearchTool } from '../tools/web-search.js'; import { WebSearchTool } from '../tools/web-search.js';
import { GeminiClient } from '../core/client.js'; import { GeminiClient } from '../core/client.js';
import { FileDiscoveryService } from '../services/fileDiscoveryService.js'; import { FileDiscoveryService } from '../services/fileDiscoveryService.js';
import { GitService } from '../services/gitService.js'; import { GitService } from '../services/gitService.js';
import { getProjectTempDir } from '../utils/paths.js';
import { import {
initializeTelemetry, initializeTelemetry,
DEFAULT_TELEMETRY_TARGET, DEFAULT_TELEMETRY_TARGET,
@ -57,6 +52,7 @@ import { IdeConnectionEvent, IdeConnectionType } from '../telemetry/types.js';
// Re-export OAuth config type // Re-export OAuth config type
export type { MCPOAuthConfig }; export type { MCPOAuthConfig };
import { WorkspaceContext } from '../utils/workspaceContext.js'; import { WorkspaceContext } from '../utils/workspaceContext.js';
import { Storage } from './storage.js';
export enum ApprovalMode { export enum ApprovalMode {
DEFAULT = 'default', DEFAULT = 'default',
@ -272,6 +268,7 @@ export class Config {
private readonly shouldUseNodePtyShell: boolean; private readonly shouldUseNodePtyShell: boolean;
private readonly skipNextSpeakerCheck: boolean; private readonly skipNextSpeakerCheck: boolean;
private initialized: boolean = false; private initialized: boolean = false;
readonly storage: Storage;
constructor(params: ConfigParameters) { constructor(params: ConfigParameters) {
this.sessionId = params.sessionId; this.sessionId = params.sessionId;
@ -340,6 +337,7 @@ export class Config {
this.trustedFolder = params.trustedFolder; this.trustedFolder = params.trustedFolder;
this.shouldUseNodePtyShell = params.shouldUseNodePtyShell ?? false; this.shouldUseNodePtyShell = params.shouldUseNodePtyShell ?? false;
this.skipNextSpeakerCheck = params.skipNextSpeakerCheck ?? false; this.skipNextSpeakerCheck = params.skipNextSpeakerCheck ?? false;
this.storage = new Storage(this.targetDir);
if (params.contextFileName) { if (params.contextFileName) {
setGeminiMdFilename(params.contextFileName); setGeminiMdFilename(params.contextFileName);
@ -591,14 +589,6 @@ export class Config {
return this.geminiClient; return this.geminiClient;
} }
getGeminiDir(): string {
return path.join(this.targetDir, GEMINI_DIR);
}
getProjectTempDir(): string {
return getProjectTempDir(this.getProjectRoot());
}
getEnableRecursiveFileSearch(): boolean { getEnableRecursiveFileSearch(): boolean {
return this.fileFiltering.enableRecursiveFileSearch; return this.fileFiltering.enableRecursiveFileSearch;
} }
@ -744,7 +734,7 @@ export class Config {
async getGitService(): Promise<GitService> { async getGitService(): Promise<GitService> {
if (!this.gitService) { if (!this.gitService) {
this.gitService = new GitService(this.targetDir); this.gitService = new GitService(this.targetDir, this.storage);
await this.gitService.initialize(); await this.gitService.initialize();
} }
return this.gitService; return this.gitService;

View File

@ -0,0 +1,55 @@
/**
* @license
* Copyright 2025 Google LLC
* SPDX-License-Identifier: Apache-2.0
*/
import { describe, it, expect, vi } from 'vitest';
import * as os from 'os';
import * as path from 'node:path';
vi.mock('fs', async (importOriginal) => {
const actual = await importOriginal<typeof import('fs')>();
return {
...actual,
mkdirSync: vi.fn(),
};
});
import { Storage } from './storage.js';
describe('Storage getGlobalSettingsPath', () => {
it('returns path to ~/.gemini/settings.json', () => {
const expected = path.join(os.homedir(), '.gemini', 'settings.json');
expect(Storage.getGlobalSettingsPath()).toBe(expected);
});
});
describe('Storage additional helpers', () => {
const projectRoot = '/tmp/project';
const storage = new Storage(projectRoot);
it('getWorkspaceSettingsPath returns project/.gemini/settings.json', () => {
const expected = path.join(projectRoot, '.gemini', 'settings.json');
expect(storage.getWorkspaceSettingsPath()).toBe(expected);
});
it('getUserCommandsDir returns ~/.gemini/commands', () => {
const expected = path.join(os.homedir(), '.gemini', 'commands');
expect(Storage.getUserCommandsDir()).toBe(expected);
});
it('getProjectCommandsDir returns project/.gemini/commands', () => {
const expected = path.join(projectRoot, '.gemini', 'commands');
expect(storage.getProjectCommandsDir()).toBe(expected);
});
it('getMcpOAuthTokensPath returns ~/.gemini/mcp-oauth-tokens.json', () => {
const expected = path.join(
os.homedir(),
'.gemini',
'mcp-oauth-tokens.json',
);
expect(Storage.getMcpOAuthTokensPath()).toBe(expected);
});
});

View File

@ -0,0 +1,114 @@
/**
* @license
* Copyright 2025 Google LLC
* SPDX-License-Identifier: Apache-2.0
*/
import * as path from 'node:path';
import * as os from 'os';
import * as crypto from 'crypto';
import * as fs from 'fs';
export const GEMINI_DIR = '.gemini';
export const GOOGLE_ACCOUNTS_FILENAME = 'google_accounts.json';
const TMP_DIR_NAME = 'tmp';
export class Storage {
private readonly targetDir: string;
constructor(targetDir: string) {
this.targetDir = targetDir;
}
static getGlobalGeminiDir(): string {
const homeDir = os.homedir();
if (!homeDir) {
return path.join(os.tmpdir(), '.gemini');
}
return path.join(homeDir, GEMINI_DIR);
}
static getMcpOAuthTokensPath(): string {
return path.join(Storage.getGlobalGeminiDir(), 'mcp-oauth-tokens.json');
}
static getGlobalSettingsPath(): string {
return path.join(Storage.getGlobalGeminiDir(), 'settings.json');
}
static getInstallationIdPath(): string {
return path.join(Storage.getGlobalGeminiDir(), 'installation_id');
}
static getGoogleAccountsPath(): string {
return path.join(Storage.getGlobalGeminiDir(), GOOGLE_ACCOUNTS_FILENAME);
}
static getUserCommandsDir(): string {
return path.join(Storage.getGlobalGeminiDir(), 'commands');
}
static getGlobalMemoryFilePath(): string {
return path.join(Storage.getGlobalGeminiDir(), 'memory.md');
}
static getGlobalTempDir(): string {
return path.join(Storage.getGlobalGeminiDir(), TMP_DIR_NAME);
}
getGeminiDir(): string {
return path.join(this.targetDir, GEMINI_DIR);
}
getProjectTempDir(): string {
const hash = this.getFilePathHash(this.getProjectRoot());
const tempDir = Storage.getGlobalTempDir();
return path.join(tempDir, hash);
}
ensureProjectTempDirExists(): void {
fs.mkdirSync(this.getProjectTempDir(), { recursive: true });
}
static getOAuthCredsPath(): string {
return path.join(Storage.getGlobalGeminiDir(), 'oauth_creds.json');
}
getProjectRoot(): string {
return this.targetDir;
}
private getFilePathHash(filePath: string): string {
return crypto.createHash('sha256').update(filePath).digest('hex');
}
getHistoryDir(): string {
const hash = this.getFilePathHash(this.getProjectRoot());
const historyDir = path.join(Storage.getGlobalGeminiDir(), 'history');
return path.join(historyDir, hash);
}
getWorkspaceSettingsPath(): string {
return path.join(this.getGeminiDir(), 'settings.json');
}
getProjectCommandsDir(): string {
return path.join(this.getGeminiDir(), 'commands');
}
getProjectTempCheckpointsDir(): string {
return path.join(this.getProjectTempDir(), 'checkpoints');
}
getExtensionsDir(): string {
return path.join(this.getGeminiDir(), 'extensions');
}
getExtensionsConfigPath(): string {
return path.join(this.getExtensionsDir(), 'gemini-extension.json');
}
getHistoryFilePath(): string {
return path.join(this.getProjectTempDir(), 'shell_history');
}
}

View File

@ -19,7 +19,7 @@ import { Config } from '../config/config.js';
import { UserTierId } from '../code_assist/types.js'; import { UserTierId } from '../code_assist/types.js';
import { LoggingContentGenerator } from './loggingContentGenerator.js'; import { LoggingContentGenerator } from './loggingContentGenerator.js';
import { getInstallationId } from '../utils/user_id.js'; import { InstallationManager } from '../utils/installationManager.js';
/** /**
* Interface abstracting the core functionalities for generating content and counting tokens. * Interface abstracting the core functionalities for generating content and counting tokens.
@ -136,7 +136,8 @@ export async function createContentGenerator(
) { ) {
let headers: Record<string, string> = { ...baseHeaders }; let headers: Record<string, string> = { ...baseHeaders };
if (gcConfig?.getUsageStatisticsEnabled()) { if (gcConfig?.getUsageStatisticsEnabled()) {
const installationId = getInstallationId(); const installationManager = new InstallationManager();
const installationId = installationManager.getInstallationId();
headers = { headers = {
...headers, ...headers,
'x-gemini-api-privileged-user-id': `${installationId}`, 'x-gemini-api-privileged-user-id': `${installationId}`,

View File

@ -20,6 +20,7 @@ import {
encodeTagName, encodeTagName,
decodeTagName, decodeTagName,
} from './logger.js'; } from './logger.js';
import { Storage } from '../config/storage.js';
import { promises as fs, existsSync } from 'node:fs'; import { promises as fs, existsSync } from 'node:fs';
import path from 'node:path'; import path from 'node:path';
import { Content } from '@google/genai'; import { Content } from '@google/genai';
@ -83,7 +84,7 @@ describe('Logger', () => {
await cleanupLogAndCheckpointFiles(); await cleanupLogAndCheckpointFiles();
// Ensure the directory exists for the test // Ensure the directory exists for the test
await fs.mkdir(TEST_GEMINI_DIR, { recursive: true }); await fs.mkdir(TEST_GEMINI_DIR, { recursive: true });
logger = new Logger(testSessionId); logger = new Logger(testSessionId, new Storage(process.cwd()));
await logger.initialize(); await logger.initialize();
}); });
@ -150,7 +151,10 @@ describe('Logger', () => {
TEST_LOG_FILE_PATH, TEST_LOG_FILE_PATH,
JSON.stringify(existingLogs, null, 2), JSON.stringify(existingLogs, null, 2),
); );
const newLogger = new Logger(currentSessionId); const newLogger = new Logger(
currentSessionId,
new Storage(process.cwd()),
);
await newLogger.initialize(); await newLogger.initialize();
expect(newLogger['messageId']).toBe(2); expect(newLogger['messageId']).toBe(2);
expect(newLogger['logs']).toEqual(existingLogs); expect(newLogger['logs']).toEqual(existingLogs);
@ -171,7 +175,7 @@ describe('Logger', () => {
TEST_LOG_FILE_PATH, TEST_LOG_FILE_PATH,
JSON.stringify(existingLogs, null, 2), JSON.stringify(existingLogs, null, 2),
); );
const newLogger = new Logger('a-new-session'); const newLogger = new Logger('a-new-session', new Storage(process.cwd()));
await newLogger.initialize(); await newLogger.initialize();
expect(newLogger['messageId']).toBe(0); expect(newLogger['messageId']).toBe(0);
newLogger.close(); newLogger.close();
@ -196,7 +200,7 @@ describe('Logger', () => {
.spyOn(console, 'debug') .spyOn(console, 'debug')
.mockImplementation(() => {}); .mockImplementation(() => {});
const newLogger = new Logger(testSessionId); const newLogger = new Logger(testSessionId, new Storage(process.cwd()));
await newLogger.initialize(); await newLogger.initialize();
expect(consoleDebugSpy).toHaveBeenCalledWith( expect(consoleDebugSpy).toHaveBeenCalledWith(
@ -224,7 +228,7 @@ describe('Logger', () => {
.spyOn(console, 'debug') .spyOn(console, 'debug')
.mockImplementation(() => {}); .mockImplementation(() => {});
const newLogger = new Logger(testSessionId); const newLogger = new Logger(testSessionId, new Storage(process.cwd()));
await newLogger.initialize(); await newLogger.initialize();
expect(consoleDebugSpy).toHaveBeenCalledWith( expect(consoleDebugSpy).toHaveBeenCalledWith(
@ -274,7 +278,10 @@ describe('Logger', () => {
}); });
it('should handle logger not initialized', async () => { it('should handle logger not initialized', async () => {
const uninitializedLogger = new Logger(testSessionId); const uninitializedLogger = new Logger(
testSessionId,
new Storage(process.cwd()),
);
uninitializedLogger.close(); // Ensure it's treated as uninitialized uninitializedLogger.close(); // Ensure it's treated as uninitialized
const consoleDebugSpy = vi const consoleDebugSpy = vi
.spyOn(console, 'debug') .spyOn(console, 'debug')
@ -289,10 +296,16 @@ describe('Logger', () => {
it('should simulate concurrent writes from different logger instances to the same file', async () => { it('should simulate concurrent writes from different logger instances to the same file', async () => {
const concurrentSessionId = 'concurrent-session'; const concurrentSessionId = 'concurrent-session';
const logger1 = new Logger(concurrentSessionId); const logger1 = new Logger(
concurrentSessionId,
new Storage(process.cwd()),
);
await logger1.initialize(); await logger1.initialize();
const logger2 = new Logger(concurrentSessionId); const logger2 = new Logger(
concurrentSessionId,
new Storage(process.cwd()),
);
await logger2.initialize(); await logger2.initialize();
expect(logger2['sessionId']).toEqual(logger1['sessionId']); expect(logger2['sessionId']).toEqual(logger1['sessionId']);
@ -345,14 +358,14 @@ describe('Logger', () => {
describe('getPreviousUserMessages', () => { describe('getPreviousUserMessages', () => {
it('should retrieve all user messages from logs, sorted newest first', async () => { it('should retrieve all user messages from logs, sorted newest first', async () => {
const loggerSort = new Logger('session-1'); const loggerSort = new Logger('session-1', new Storage(process.cwd()));
await loggerSort.initialize(); await loggerSort.initialize();
await loggerSort.logMessage(MessageSenderType.USER, 'S1M0_ts100000'); await loggerSort.logMessage(MessageSenderType.USER, 'S1M0_ts100000');
vi.advanceTimersByTime(1000); vi.advanceTimersByTime(1000);
await loggerSort.logMessage(MessageSenderType.USER, 'S1M1_ts101000'); await loggerSort.logMessage(MessageSenderType.USER, 'S1M1_ts101000');
vi.advanceTimersByTime(1000); vi.advanceTimersByTime(1000);
// Switch to a different session to log // Switch to a different session to log
const loggerSort2 = new Logger('session-2'); const loggerSort2 = new Logger('session-2', new Storage(process.cwd()));
await loggerSort2.initialize(); await loggerSort2.initialize();
await loggerSort2.logMessage(MessageSenderType.USER, 'S2M0_ts102000'); await loggerSort2.logMessage(MessageSenderType.USER, 'S2M0_ts102000');
vi.advanceTimersByTime(1000); vi.advanceTimersByTime(1000);
@ -365,7 +378,10 @@ describe('Logger', () => {
loggerSort.close(); loggerSort.close();
loggerSort2.close(); loggerSort2.close();
const finalLogger = new Logger('final-session'); const finalLogger = new Logger(
'final-session',
new Storage(process.cwd()),
);
await finalLogger.initialize(); await finalLogger.initialize();
const messages = await finalLogger.getPreviousUserMessages(); const messages = await finalLogger.getPreviousUserMessages();
@ -385,7 +401,10 @@ describe('Logger', () => {
}); });
it('should return empty array if logger not initialized', async () => { it('should return empty array if logger not initialized', async () => {
const uninitializedLogger = new Logger(testSessionId); const uninitializedLogger = new Logger(
testSessionId,
new Storage(process.cwd()),
);
uninitializedLogger.close(); uninitializedLogger.close();
const messages = await uninitializedLogger.getPreviousUserMessages(); const messages = await uninitializedLogger.getPreviousUserMessages();
expect(messages).toEqual([]); expect(messages).toEqual([]);
@ -428,7 +447,10 @@ describe('Logger', () => {
}); });
it('should not throw if logger is not initialized', async () => { it('should not throw if logger is not initialized', async () => {
const uninitializedLogger = new Logger(testSessionId); const uninitializedLogger = new Logger(
testSessionId,
new Storage(process.cwd()),
);
uninitializedLogger.close(); uninitializedLogger.close();
const consoleErrorSpy = vi const consoleErrorSpy = vi
.spyOn(console, 'error') .spyOn(console, 'error')
@ -525,7 +547,10 @@ describe('Logger', () => {
}); });
it('should return an empty array if logger is not initialized', async () => { it('should return an empty array if logger is not initialized', async () => {
const uninitializedLogger = new Logger(testSessionId); const uninitializedLogger = new Logger(
testSessionId,
new Storage(process.cwd()),
);
uninitializedLogger.close(); uninitializedLogger.close();
const consoleErrorSpy = vi const consoleErrorSpy = vi
.spyOn(console, 'error') .spyOn(console, 'error')
@ -613,7 +638,10 @@ describe('Logger', () => {
}); });
it('should return false if logger is not initialized', async () => { it('should return false if logger is not initialized', async () => {
const uninitializedLogger = new Logger(testSessionId); const uninitializedLogger = new Logger(
testSessionId,
new Storage(process.cwd()),
);
uninitializedLogger.close(); uninitializedLogger.close();
const consoleErrorSpy = vi const consoleErrorSpy = vi
.spyOn(console, 'error') .spyOn(console, 'error')
@ -651,7 +679,10 @@ describe('Logger', () => {
}); });
it('should throw an error if logger is not initialized', async () => { it('should throw an error if logger is not initialized', async () => {
const uninitializedLogger = new Logger(testSessionId); const uninitializedLogger = new Logger(
testSessionId,
new Storage(process.cwd()),
);
uninitializedLogger.close(); uninitializedLogger.close();
await expect(uninitializedLogger.checkpointExists(tag)).rejects.toThrow( await expect(uninitializedLogger.checkpointExists(tag)).rejects.toThrow(

View File

@ -7,7 +7,7 @@
import path from 'node:path'; import path from 'node:path';
import { promises as fs } from 'node:fs'; import { promises as fs } from 'node:fs';
import { Content } from '@google/genai'; import { Content } from '@google/genai';
import { getProjectTempDir } from '../utils/paths.js'; import { Storage } from '../config/storage.js';
const LOG_FILE_NAME = 'logs.json'; const LOG_FILE_NAME = 'logs.json';
@ -67,7 +67,10 @@ export class Logger {
private initialized = false; private initialized = false;
private logs: LogEntry[] = []; // In-memory cache, ideally reflects the last known state of the file private logs: LogEntry[] = []; // In-memory cache, ideally reflects the last known state of the file
constructor(sessionId: string) { constructor(
sessionId: string,
private readonly storage: Storage,
) {
this.sessionId = sessionId; this.sessionId = sessionId;
} }
@ -130,7 +133,7 @@ export class Logger {
return; return;
} }
this.geminiDir = getProjectTempDir(process.cwd()); this.geminiDir = this.storage.getProjectTempDir();
this.logFilePath = path.join(this.geminiDir, LOG_FILE_NAME); this.logFilePath = path.join(this.geminiDir, LOG_FILE_NAME);
try { try {

View File

@ -101,3 +101,4 @@ export { OAuthUtils } from './mcp/oauth-utils.js';
export * from './telemetry/index.js'; export * from './telemetry/index.js';
export { sessionId } from './utils/session.js'; export { sessionId } from './utils/session.js';
export * from './utils/browser.js'; export * from './utils/browser.js';
export { Storage } from './config/storage.js';

View File

@ -21,6 +21,7 @@ vi.mock('node:fs', () => ({
mkdir: vi.fn(), mkdir: vi.fn(),
unlink: vi.fn(), unlink: vi.fn(),
}, },
mkdirSync: vi.fn(),
})); }));
vi.mock('node:os', () => ({ vi.mock('node:os', () => ({

View File

@ -6,7 +6,7 @@
import { promises as fs } from 'node:fs'; import { promises as fs } from 'node:fs';
import * as path from 'node:path'; import * as path from 'node:path';
import * as os from 'node:os'; import { Storage } from '../config/storage.js';
import { getErrorMessage } from '../utils/errors.js'; import { getErrorMessage } from '../utils/errors.js';
/** /**
@ -36,17 +36,13 @@ export interface MCPOAuthCredentials {
* Class for managing MCP OAuth token storage and retrieval. * Class for managing MCP OAuth token storage and retrieval.
*/ */
export class MCPOAuthTokenStorage { export class MCPOAuthTokenStorage {
private static readonly TOKEN_FILE = 'mcp-oauth-tokens.json';
private static readonly CONFIG_DIR = '.gemini';
/** /**
* Get the path to the token storage file. * Get the path to the token storage file.
* *
* @returns The full path to the token storage file * @returns The full path to the token storage file
*/ */
private static getTokenFilePath(): string { private static getTokenFilePath(): string {
const homeDir = os.homedir(); return Storage.getMcpOAuthTokensPath();
return path.join(homeDir, this.CONFIG_DIR, this.TOKEN_FILE);
} }
/** /**

View File

@ -40,9 +40,11 @@ describe('ChatRecordingService', () => {
mockConfig = { mockConfig = {
getSessionId: vi.fn().mockReturnValue('test-session-id'), getSessionId: vi.fn().mockReturnValue('test-session-id'),
getProjectRoot: vi.fn().mockReturnValue('/test/project/root'), getProjectRoot: vi.fn().mockReturnValue('/test/project/root'),
getProjectTempDir: vi storage: {
.fn() getProjectTempDir: vi
.mockReturnValue('/test/project/root/.gemini/tmp'), .fn()
.mockReturnValue('/test/project/root/.gemini/tmp'),
},
getModel: vi.fn().mockReturnValue('gemini-pro'), getModel: vi.fn().mockReturnValue('gemini-pro'),
getDebugMode: vi.fn().mockReturnValue(false), getDebugMode: vi.fn().mockReturnValue(false),
} as unknown as Config; } as unknown as Config;

View File

@ -136,7 +136,10 @@ export class ChatRecordingService {
this.cachedLastConvData = null; this.cachedLastConvData = null;
} else { } else {
// Create new session // Create new session
const chatsDir = path.join(this.config.getProjectTempDir(), 'chats'); const chatsDir = path.join(
this.config.storage.getProjectTempDir(),
'chats',
);
fs.mkdirSync(chatsDir, { recursive: true }); fs.mkdirSync(chatsDir, { recursive: true });
const timestamp = new Date() const timestamp = new Date()
@ -422,7 +425,10 @@ export class ChatRecordingService {
*/ */
deleteSession(sessionId: string): void { deleteSession(sessionId: string): void {
try { try {
const chatsDir = path.join(this.config.getProjectTempDir(), 'chats'); const chatsDir = path.join(
this.config.storage.getProjectTempDir(),
'chats',
);
const sessionPath = path.join(chatsDir, `${sessionId}.json`); const sessionPath = path.join(chatsDir, `${sessionId}.json`);
fs.unlinkSync(sessionPath); fs.unlinkSync(sessionPath);
} catch (error) { } catch (error) {

View File

@ -6,6 +6,7 @@
import { describe, it, expect, vi, beforeEach, afterEach } from 'vitest'; import { describe, it, expect, vi, beforeEach, afterEach } from 'vitest';
import { GitService } from './gitService.js'; import { GitService } from './gitService.js';
import { Storage } from '../config/storage.js';
import * as path from 'path'; import * as path from 'path';
import * as fs from 'fs/promises'; import * as fs from 'fs/promises';
import * as os from 'os'; import * as os from 'os';
@ -55,6 +56,7 @@ describe('GitService', () => {
let projectRoot: string; let projectRoot: string;
let homedir: string; let homedir: string;
let hash: string; let hash: string;
let storage: Storage;
beforeEach(async () => { beforeEach(async () => {
testRootDir = await fs.mkdtemp(path.join(os.tmpdir(), 'git-service-test-')); testRootDir = await fs.mkdtemp(path.join(os.tmpdir(), 'git-service-test-'));
@ -100,6 +102,7 @@ describe('GitService', () => {
hoistedMockCommit.mockResolvedValue({ hoistedMockCommit.mockResolvedValue({
commit: 'initial', commit: 'initial',
}); });
storage = new Storage(projectRoot);
}); });
afterEach(async () => { afterEach(async () => {
@ -109,13 +112,13 @@ describe('GitService', () => {
describe('constructor', () => { describe('constructor', () => {
it('should successfully create an instance', () => { it('should successfully create an instance', () => {
expect(() => new GitService(projectRoot)).not.toThrow(); expect(() => new GitService(projectRoot, storage)).not.toThrow();
}); });
}); });
describe('verifyGitAvailability', () => { describe('verifyGitAvailability', () => {
it('should resolve true if git --version command succeeds', async () => { it('should resolve true if git --version command succeeds', async () => {
const service = new GitService(projectRoot); const service = new GitService(projectRoot, storage);
await expect(service.verifyGitAvailability()).resolves.toBe(true); await expect(service.verifyGitAvailability()).resolves.toBe(true);
}); });
@ -124,7 +127,7 @@ describe('GitService', () => {
callback(new Error('git not found')); callback(new Error('git not found'));
return {} as ChildProcess; return {} as ChildProcess;
}); });
const service = new GitService(projectRoot); const service = new GitService(projectRoot, storage);
await expect(service.verifyGitAvailability()).resolves.toBe(false); await expect(service.verifyGitAvailability()).resolves.toBe(false);
}); });
}); });
@ -135,14 +138,14 @@ describe('GitService', () => {
callback(new Error('git not found')); callback(new Error('git not found'));
return {} as ChildProcess; return {} as ChildProcess;
}); });
const service = new GitService(projectRoot); const service = new GitService(projectRoot, storage);
await expect(service.initialize()).rejects.toThrow( await expect(service.initialize()).rejects.toThrow(
'Checkpointing is enabled, but Git is not installed. Please install Git or disable checkpointing to continue.', 'Checkpointing is enabled, but Git is not installed. Please install Git or disable checkpointing to continue.',
); );
}); });
it('should call setupShadowGitRepository if Git is available', async () => { it('should call setupShadowGitRepository if Git is available', async () => {
const service = new GitService(projectRoot); const service = new GitService(projectRoot, storage);
const setupSpy = vi const setupSpy = vi
.spyOn(service, 'setupShadowGitRepository') .spyOn(service, 'setupShadowGitRepository')
.mockResolvedValue(undefined); .mockResolvedValue(undefined);
@ -162,14 +165,14 @@ describe('GitService', () => {
}); });
it('should create history and repository directories', async () => { it('should create history and repository directories', async () => {
const service = new GitService(projectRoot); const service = new GitService(projectRoot, storage);
await service.setupShadowGitRepository(); await service.setupShadowGitRepository();
const stats = await fs.stat(repoDir); const stats = await fs.stat(repoDir);
expect(stats.isDirectory()).toBe(true); expect(stats.isDirectory()).toBe(true);
}); });
it('should create a .gitconfig file with the correct content', async () => { it('should create a .gitconfig file with the correct content', async () => {
const service = new GitService(projectRoot); const service = new GitService(projectRoot, storage);
await service.setupShadowGitRepository(); await service.setupShadowGitRepository();
const expectedConfigContent = const expectedConfigContent =
@ -180,7 +183,7 @@ describe('GitService', () => {
it('should initialize git repo in historyDir if not already initialized', async () => { it('should initialize git repo in historyDir if not already initialized', async () => {
hoistedMockCheckIsRepo.mockResolvedValue(false); hoistedMockCheckIsRepo.mockResolvedValue(false);
const service = new GitService(projectRoot); const service = new GitService(projectRoot, storage);
await service.setupShadowGitRepository(); await service.setupShadowGitRepository();
expect(hoistedMockSimpleGit).toHaveBeenCalledWith(repoDir); expect(hoistedMockSimpleGit).toHaveBeenCalledWith(repoDir);
expect(hoistedMockInit).toHaveBeenCalled(); expect(hoistedMockInit).toHaveBeenCalled();
@ -188,7 +191,7 @@ describe('GitService', () => {
it('should not initialize git repo if already initialized', async () => { it('should not initialize git repo if already initialized', async () => {
hoistedMockCheckIsRepo.mockResolvedValue(true); hoistedMockCheckIsRepo.mockResolvedValue(true);
const service = new GitService(projectRoot); const service = new GitService(projectRoot, storage);
await service.setupShadowGitRepository(); await service.setupShadowGitRepository();
expect(hoistedMockInit).not.toHaveBeenCalled(); expect(hoistedMockInit).not.toHaveBeenCalled();
}); });
@ -198,7 +201,7 @@ describe('GitService', () => {
const visibleGitIgnorePath = path.join(projectRoot, '.gitignore'); const visibleGitIgnorePath = path.join(projectRoot, '.gitignore');
await fs.writeFile(visibleGitIgnorePath, gitignoreContent); await fs.writeFile(visibleGitIgnorePath, gitignoreContent);
const service = new GitService(projectRoot); const service = new GitService(projectRoot, storage);
await service.setupShadowGitRepository(); await service.setupShadowGitRepository();
const hiddenGitIgnorePath = path.join(repoDir, '.gitignore'); const hiddenGitIgnorePath = path.join(repoDir, '.gitignore');
@ -207,7 +210,7 @@ describe('GitService', () => {
}); });
it('should not create a .gitignore in shadow repo if project .gitignore does not exist', async () => { it('should not create a .gitignore in shadow repo if project .gitignore does not exist', async () => {
const service = new GitService(projectRoot); const service = new GitService(projectRoot, storage);
await service.setupShadowGitRepository(); await service.setupShadowGitRepository();
const hiddenGitIgnorePath = path.join(repoDir, '.gitignore'); const hiddenGitIgnorePath = path.join(repoDir, '.gitignore');
@ -221,7 +224,7 @@ describe('GitService', () => {
// Create a directory instead of a file to cause a read error // Create a directory instead of a file to cause a read error
await fs.mkdir(visibleGitIgnorePath); await fs.mkdir(visibleGitIgnorePath);
const service = new GitService(projectRoot); const service = new GitService(projectRoot, storage);
// EISDIR is the expected error code on Unix-like systems // EISDIR is the expected error code on Unix-like systems
await expect(service.setupShadowGitRepository()).rejects.toThrow( await expect(service.setupShadowGitRepository()).rejects.toThrow(
/EISDIR: illegal operation on a directory, read|EBUSY: resource busy or locked, read/, /EISDIR: illegal operation on a directory, read|EBUSY: resource busy or locked, read/,
@ -230,7 +233,7 @@ describe('GitService', () => {
it('should make an initial commit if no commits exist in history repo', async () => { it('should make an initial commit if no commits exist in history repo', async () => {
hoistedMockCheckIsRepo.mockResolvedValue(false); hoistedMockCheckIsRepo.mockResolvedValue(false);
const service = new GitService(projectRoot); const service = new GitService(projectRoot, storage);
await service.setupShadowGitRepository(); await service.setupShadowGitRepository();
expect(hoistedMockCommit).toHaveBeenCalledWith('Initial commit', { expect(hoistedMockCommit).toHaveBeenCalledWith('Initial commit', {
'--allow-empty': null, '--allow-empty': null,
@ -239,7 +242,7 @@ describe('GitService', () => {
it('should not make an initial commit if commits already exist', async () => { it('should not make an initial commit if commits already exist', async () => {
hoistedMockCheckIsRepo.mockResolvedValue(true); hoistedMockCheckIsRepo.mockResolvedValue(true);
const service = new GitService(projectRoot); const service = new GitService(projectRoot, storage);
await service.setupShadowGitRepository(); await service.setupShadowGitRepository();
expect(hoistedMockCommit).not.toHaveBeenCalled(); expect(hoistedMockCommit).not.toHaveBeenCalled();
}); });

View File

@ -6,22 +6,22 @@
import * as fs from 'fs/promises'; import * as fs from 'fs/promises';
import * as path from 'path'; import * as path from 'path';
import * as os from 'os';
import { isNodeError } from '../utils/errors.js'; import { isNodeError } from '../utils/errors.js';
import { exec } from 'node:child_process'; import { exec } from 'node:child_process';
import { simpleGit, SimpleGit, CheckRepoActions } from 'simple-git'; import { simpleGit, SimpleGit, CheckRepoActions } from 'simple-git';
import { getProjectHash, GEMINI_DIR } from '../utils/paths.js'; import { Storage } from '../config/storage.js';
export class GitService { export class GitService {
private projectRoot: string; private projectRoot: string;
private storage: Storage;
constructor(projectRoot: string) { constructor(projectRoot: string, storage: Storage) {
this.projectRoot = path.resolve(projectRoot); this.projectRoot = path.resolve(projectRoot);
this.storage = storage;
} }
private getHistoryDir(): string { private getHistoryDir(): string {
const hash = getProjectHash(this.projectRoot); return this.storage.getHistoryDir();
return path.join(os.homedir(), GEMINI_DIR, 'history', hash);
} }
async initialize(): Promise<void> { async initialize(): Promise<void> {

View File

@ -22,13 +22,13 @@ import {
TEST_ONLY, TEST_ONLY,
} from './clearcut-logger.js'; } from './clearcut-logger.js';
import { ConfigParameters } from '../../config/config.js'; import { ConfigParameters } from '../../config/config.js';
import * as userAccount from '../../utils/user_account.js';
import * as userId from '../../utils/user_id.js';
import { EventMetadataKey } from './event-metadata-key.js'; import { EventMetadataKey } from './event-metadata-key.js';
import { makeFakeConfig } from '../../test-utils/config.js'; import { makeFakeConfig } from '../../test-utils/config.js';
import { http, HttpResponse } from 'msw'; import { http, HttpResponse } from 'msw';
import { server } from '../../mocks/msw.js'; import { server } from '../../mocks/msw.js';
import { makeChatCompressionEvent } from '../types.js'; import { makeChatCompressionEvent } from '../types.js';
import { UserAccountManager } from '../../utils/userAccountManager.js';
import { InstallationManager } from '../../utils/installationManager.js';
interface CustomMatchers<R = unknown> { interface CustomMatchers<R = unknown> {
toHaveMetadataValue: ([key, value]: [EventMetadataKey, string]) => R; toHaveMetadataValue: ([key, value]: [EventMetadataKey, string]) => R;
@ -71,11 +71,11 @@ expect.extend({
}, },
}); });
vi.mock('../../utils/user_account'); vi.mock('../../utils/userAccountManager.js');
vi.mock('../../utils/user_id'); vi.mock('../../utils/installationManager.js');
const mockUserAccount = vi.mocked(userAccount); const mockUserAccount = vi.mocked(UserAccountManager.prototype);
const mockUserId = vi.mocked(userId); const mockInstallMgr = vi.mocked(InstallationManager.prototype);
// TODO(richieforeman): Consider moving this to test setup globally. // TODO(richieforeman): Consider moving this to test setup globally.
beforeAll(() => { beforeAll(() => {
@ -113,7 +113,6 @@ describe('ClearcutLogger', () => {
config = {} as Partial<ConfigParameters>, config = {} as Partial<ConfigParameters>,
lifetimeGoogleAccounts = 1, lifetimeGoogleAccounts = 1,
cachedGoogleAccount = 'test@google.com', cachedGoogleAccount = 'test@google.com',
installationId = 'test-installation-id',
} = {}) { } = {}) {
server.resetHandlers( server.resetHandlers(
http.post(CLEARCUT_URL, () => HttpResponse.text(EXAMPLE_RESPONSE)), http.post(CLEARCUT_URL, () => HttpResponse.text(EXAMPLE_RESPONSE)),
@ -131,7 +130,9 @@ describe('ClearcutLogger', () => {
mockUserAccount.getLifetimeGoogleAccounts.mockReturnValue( mockUserAccount.getLifetimeGoogleAccounts.mockReturnValue(
lifetimeGoogleAccounts, lifetimeGoogleAccounts,
); );
mockUserId.getInstallationId.mockReturnValue(installationId); mockInstallMgr.getInstallationId = vi
.fn()
.mockReturnValue('test-installation-id');
const logger = ClearcutLogger.getInstance(loggerConfig); const logger = ClearcutLogger.getInstance(loggerConfig);

View File

@ -22,12 +22,9 @@ import {
} from '../types.js'; } from '../types.js';
import { EventMetadataKey } from './event-metadata-key.js'; import { EventMetadataKey } from './event-metadata-key.js';
import { Config } from '../../config/config.js'; import { Config } from '../../config/config.js';
import { InstallationManager } from '../../utils/installationManager.js';
import { UserAccountManager } from '../../utils/userAccountManager.js';
import { safeJsonStringify } from '../../utils/safeJsonStringify.js'; import { safeJsonStringify } from '../../utils/safeJsonStringify.js';
import {
getCachedGoogleAccount,
getLifetimeGoogleAccounts,
} from '../../utils/user_account.js';
import { getInstallationId } from '../../utils/user_id.js';
import { FixedDeque } from 'mnemonist'; import { FixedDeque } from 'mnemonist';
import { GIT_COMMIT_INFO, CLI_VERSION } from '../../generated/git-commit.js'; import { GIT_COMMIT_INFO, CLI_VERSION } from '../../generated/git-commit.js';
import { DetectedIde, detectIde } from '../../ide/detect-ide.js'; import { DetectedIde, detectIde } from '../../ide/detect-ide.js';
@ -129,6 +126,8 @@ export class ClearcutLogger {
private config?: Config; private config?: Config;
private sessionData: EventValue[] = []; private sessionData: EventValue[] = [];
private promptId: string = ''; private promptId: string = '';
private readonly installationManager: InstallationManager;
private readonly userAccountManager: UserAccountManager;
/** /**
* Queue of pending events that need to be flushed to the server. New events * Queue of pending events that need to be flushed to the server. New events
@ -152,10 +151,12 @@ export class ClearcutLogger {
*/ */
private pendingFlush: boolean = false; private pendingFlush: boolean = false;
private constructor(config?: Config) { private constructor(config: Config) {
this.config = config; this.config = config;
this.events = new FixedDeque<LogEventEntry[]>(Array, MAX_EVENTS); this.events = new FixedDeque<LogEventEntry[]>(Array, MAX_EVENTS);
this.promptId = config?.getSessionId() ?? ''; this.promptId = config?.getSessionId() ?? '';
this.installationManager = new InstallationManager();
this.userAccountManager = new UserAccountManager();
} }
static getInstance(config?: Config): ClearcutLogger | undefined { static getInstance(config?: Config): ClearcutLogger | undefined {
@ -202,12 +203,14 @@ export class ClearcutLogger {
} }
createLogEvent(eventName: EventNames, data: EventValue[] = []): LogEvent { createLogEvent(eventName: EventNames, data: EventValue[] = []): LogEvent {
const email = getCachedGoogleAccount(); const email = this.userAccountManager.getCachedGoogleAccount();
if (eventName !== EventNames.START_SESSION) { if (eventName !== EventNames.START_SESSION) {
data.push(...this.sessionData); data.push(...this.sessionData);
} }
data = this.addDefaultFields(data); const totalAccounts = this.userAccountManager.getLifetimeGoogleAccounts();
data = this.addDefaultFields(data, totalAccounts);
const logEvent: LogEvent = { const logEvent: LogEvent = {
console_type: 'GEMINI_CLI', console_type: 'GEMINI_CLI',
@ -220,7 +223,7 @@ export class ClearcutLogger {
if (email) { if (email) {
logEvent.client_email = email; logEvent.client_email = email;
} else { } else {
logEvent.client_install_id = getInstallationId(); logEvent.client_install_id = this.installationManager.getInstallationId();
} }
return logEvent; return logEvent;
@ -679,8 +682,7 @@ export class ClearcutLogger {
* Adds default fields to data, and returns a new data array. This fields * Adds default fields to data, and returns a new data array. This fields
* should exist on all log events. * should exist on all log events.
*/ */
addDefaultFields(data: EventValue[]): EventValue[] { addDefaultFields(data: EventValue[], totalAccounts: number): EventValue[] {
const totalAccounts = getLifetimeGoogleAccounts();
const surface = determineSurface(); const surface = determineSurface();
const defaultLogMetadata: EventValue[] = [ const defaultLogMetadata: EventValue[] = [

View File

@ -17,6 +17,7 @@ vi.mock('fs', () => ({
}, },
statSync: vi.fn(), statSync: vi.fn(),
readdirSync: vi.fn(), readdirSync: vi.fn(),
mkdirSync: vi.fn(),
})); }));
import { LSTool } from './ls.js'; import { LSTool } from './ls.js';
import { Config } from '../config/config.js'; import { Config } from '../config/config.js';

View File

@ -18,7 +18,19 @@ import * as os from 'os';
import { ToolConfirmationOutcome } from './tools.js'; import { ToolConfirmationOutcome } from './tools.js';
// Mock dependencies // Mock dependencies
vi.mock('fs/promises'); vi.mock(import('fs/promises'), async (importOriginal) => {
const actual = await importOriginal();
return {
...actual,
mkdir: vi.fn(),
readFile: vi.fn(),
};
});
vi.mock('fs', () => ({
mkdirSync: vi.fn(),
}));
vi.mock('os'); vi.mock('os');
const MEMORY_SECTION_HEADER = '## Gemini Added Memories'; const MEMORY_SECTION_HEADER = '## Gemini Added Memories';

View File

@ -15,7 +15,7 @@ import {
import { FunctionDeclaration } from '@google/genai'; import { FunctionDeclaration } from '@google/genai';
import * as fs from 'fs/promises'; import * as fs from 'fs/promises';
import * as path from 'path'; import * as path from 'path';
import { homedir } from 'os'; import { Storage } from '../config/storage.js';
import * as Diff from 'diff'; import * as Diff from 'diff';
import { DEFAULT_DIFF_OPTIONS } from './diffOptions.js'; import { DEFAULT_DIFF_OPTIONS } from './diffOptions.js';
import { tildeifyPath } from '../utils/paths.js'; import { tildeifyPath } from '../utils/paths.js';
@ -96,7 +96,7 @@ interface SaveMemoryParams {
} }
function getGlobalMemoryFilePath(): string { function getGlobalMemoryFilePath(): string {
return path.join(homedir(), GEMINI_CONFIG_DIR, getCurrentGeminiMdFilename()); return path.join(Storage.getGlobalGeminiDir(), getCurrentGeminiMdFilename());
} }
/** /**

View File

@ -27,6 +27,7 @@ let mockSendMessageStream: any;
vi.mock('fs', () => ({ vi.mock('fs', () => ({
statSync: vi.fn(), statSync: vi.fn(),
mkdirSync: vi.fn(),
})); }));
vi.mock('../core/client.js', () => ({ vi.mock('../core/client.js', () => ({

View File

@ -0,0 +1,102 @@
/**
* @license
* Copyright 2025 Google LLC
* SPDX-License-Identifier: Apache-2.0
*/
import { vi, describe, it, expect, beforeEach, afterEach, Mock } from 'vitest';
import { InstallationManager } from './installationManager.js';
import * as fs from 'node:fs';
import * as os from 'node:os';
import path from 'node:path';
import { randomUUID } from 'crypto';
vi.mock('node:fs', async (importOriginal) => {
const actual = await importOriginal<typeof import('node:fs')>();
return {
...actual,
readFileSync: vi.fn(actual.readFileSync),
existsSync: vi.fn(actual.existsSync),
} as typeof actual;
});
vi.mock('os', async (importOriginal) => {
const os = await importOriginal<typeof import('os')>();
return {
...os,
homedir: vi.fn(),
};
});
vi.mock('crypto', async (importOriginal) => {
const crypto = await importOriginal<typeof import('crypto')>();
return {
...crypto,
randomUUID: vi.fn(),
};
});
describe('InstallationManager', () => {
let tempHomeDir: string;
let installationManager: InstallationManager;
const installationIdFile = () =>
path.join(tempHomeDir, '.gemini', 'installation_id');
beforeEach(() => {
tempHomeDir = fs.mkdtempSync(
path.join(os.tmpdir(), 'gemini-cli-test-home-'),
);
(os.homedir as Mock).mockReturnValue(tempHomeDir);
installationManager = new InstallationManager();
});
afterEach(() => {
fs.rmSync(tempHomeDir, { recursive: true, force: true });
vi.clearAllMocks();
});
describe('getInstallationId', () => {
it('should create and write a new installation ID if one does not exist', () => {
const newId = 'new-uuid-123';
(randomUUID as Mock).mockReturnValue(newId);
const installationId = installationManager.getInstallationId();
expect(installationId).toBe(newId);
expect(fs.existsSync(installationIdFile())).toBe(true);
expect(fs.readFileSync(installationIdFile(), 'utf-8')).toBe(newId);
});
it('should read an existing installation ID from a file', () => {
const existingId = 'existing-uuid-123';
fs.mkdirSync(path.dirname(installationIdFile()), { recursive: true });
fs.writeFileSync(installationIdFile(), existingId);
const installationId = installationManager.getInstallationId();
expect(installationId).toBe(existingId);
});
it('should return the same ID on subsequent calls', () => {
const firstId = installationManager.getInstallationId();
const secondId = installationManager.getInstallationId();
expect(secondId).toBe(firstId);
});
it('should handle read errors and return a fallback ID', () => {
vi.mocked(fs.existsSync).mockReturnValueOnce(true);
const readSpy = vi.mocked(fs.readFileSync);
readSpy.mockImplementationOnce(() => {
throw new Error('Read error');
});
const consoleErrorSpy = vi
.spyOn(console, 'error')
.mockImplementation(() => {});
const id = installationManager.getInstallationId();
expect(id).toBe('123456789');
expect(consoleErrorSpy).toHaveBeenCalled();
});
});
});

View File

@ -0,0 +1,58 @@
/**
* @license
* Copyright 2025 Google LLC
* SPDX-License-Identifier: Apache-2.0
*/
import * as fs from 'fs';
import { randomUUID } from 'crypto';
import * as path from 'node:path';
import { Storage } from '../config/storage.js';
export class InstallationManager {
private getInstallationIdPath(): string {
return Storage.getInstallationIdPath();
}
private readInstallationIdFromFile(): string | null {
const installationIdFile = this.getInstallationIdPath();
if (fs.existsSync(installationIdFile)) {
const installationid = fs
.readFileSync(installationIdFile, 'utf-8')
.trim();
return installationid || null;
}
return null;
}
private writeInstallationIdToFile(installationId: string) {
const installationIdFile = this.getInstallationIdPath();
const dir = path.dirname(installationIdFile);
fs.mkdirSync(dir, { recursive: true });
fs.writeFileSync(installationIdFile, installationId, 'utf-8');
}
/**
* Retrieves the installation ID from a file, creating it if it doesn't exist.
* This ID is used for unique user installation tracking.
* @returns A UUID string for the user.
*/
getInstallationId(): string {
try {
let installationId = this.readInstallationIdFromFile();
if (!installationId) {
installationId = randomUUID();
this.writeInstallationIdToFile(installationId);
}
return installationId;
} catch (error) {
console.error(
'Error accessing installation ID file, generating ephemeral ID:',
error,
);
return '123456789';
}
}
}

View File

@ -10,8 +10,6 @@ import * as crypto from 'crypto';
export const GEMINI_DIR = '.gemini'; export const GEMINI_DIR = '.gemini';
export const GOOGLE_ACCOUNTS_FILENAME = 'google_accounts.json'; export const GOOGLE_ACCOUNTS_FILENAME = 'google_accounts.json';
const TMP_DIR_NAME = 'tmp';
const COMMANDS_DIR_NAME = 'commands';
/** /**
* Special characters that need to be escaped in file paths for shell compatibility. * Special characters that need to be escaped in file paths for shell compatibility.
@ -174,33 +172,6 @@ export function getProjectHash(projectRoot: string): string {
return crypto.createHash('sha256').update(projectRoot).digest('hex'); return crypto.createHash('sha256').update(projectRoot).digest('hex');
} }
/**
* Generates a unique temporary directory path for a project.
* @param projectRoot The absolute path to the project's root directory.
* @returns The path to the project's temporary directory.
*/
export function getProjectTempDir(projectRoot: string): string {
const hash = getProjectHash(projectRoot);
return path.join(os.homedir(), GEMINI_DIR, TMP_DIR_NAME, hash);
}
/**
* Returns the absolute path to the user-level commands directory.
* @returns The path to the user's commands directory.
*/
export function getUserCommandsDir(): string {
return path.join(os.homedir(), GEMINI_DIR, COMMANDS_DIR_NAME);
}
/**
* Returns the absolute path to the project-level commands directory.
* @param projectRoot The absolute path to the project's root directory.
* @returns The path to the project's commands directory.
*/
export function getProjectCommandsDir(projectRoot: string): string {
return path.join(projectRoot, GEMINI_DIR, COMMANDS_DIR_NAME);
}
/** /**
* Checks if a path is a subpath of another path. * Checks if a path is a subpath of another path.
* @param parentPath The parent path. * @param parentPath The parent path.

View File

@ -5,12 +5,7 @@
*/ */
import { vi, describe, it, expect, beforeEach, afterEach, Mock } from 'vitest'; import { vi, describe, it, expect, beforeEach, afterEach, Mock } from 'vitest';
import { import { UserAccountManager } from './userAccountManager.js';
cacheGoogleAccount,
getCachedGoogleAccount,
clearCachedGoogleAccount,
getLifetimeGoogleAccounts,
} from './user_account.js';
import * as fs from 'node:fs'; import * as fs from 'node:fs';
import * as os from 'node:os'; import * as os from 'node:os';
import path from 'node:path'; import path from 'node:path';
@ -23,16 +18,21 @@ vi.mock('os', async (importOriginal) => {
}; };
}); });
describe('user_account', () => { describe('UserAccountManager', () => {
let tempHomeDir: string; let tempHomeDir: string;
const accountsFile = () => let userAccountManager: UserAccountManager;
path.join(tempHomeDir, '.gemini', 'google_accounts.json'); let accountsFile: () => string;
beforeEach(() => { beforeEach(() => {
tempHomeDir = fs.mkdtempSync( tempHomeDir = fs.mkdtempSync(
path.join(os.tmpdir(), 'gemini-cli-test-home-'), path.join(os.tmpdir(), 'gemini-cli-test-home-'),
); );
(os.homedir as Mock).mockReturnValue(tempHomeDir); (os.homedir as Mock).mockReturnValue(tempHomeDir);
accountsFile = () =>
path.join(tempHomeDir, '.gemini', 'google_accounts.json');
userAccountManager = new UserAccountManager();
}); });
afterEach(() => { afterEach(() => {
fs.rmSync(tempHomeDir, { recursive: true, force: true }); fs.rmSync(tempHomeDir, { recursive: true, force: true });
vi.clearAllMocks(); vi.clearAllMocks();
@ -40,7 +40,7 @@ describe('user_account', () => {
describe('cacheGoogleAccount', () => { describe('cacheGoogleAccount', () => {
it('should create directory and write initial account file', async () => { it('should create directory and write initial account file', async () => {
await cacheGoogleAccount('test1@google.com'); await userAccountManager.cacheGoogleAccount('test1@google.com');
// Verify Google Account ID was cached // Verify Google Account ID was cached
expect(fs.existsSync(accountsFile())).toBe(true); expect(fs.existsSync(accountsFile())).toBe(true);
@ -60,7 +60,7 @@ describe('user_account', () => {
), ),
); );
await cacheGoogleAccount('test3@google.com'); await userAccountManager.cacheGoogleAccount('test3@google.com');
expect(fs.readFileSync(accountsFile(), 'utf-8')).toBe( expect(fs.readFileSync(accountsFile(), 'utf-8')).toBe(
JSON.stringify( JSON.stringify(
@ -84,8 +84,8 @@ describe('user_account', () => {
2, 2,
), ),
); );
await cacheGoogleAccount('test2@google.com'); await userAccountManager.cacheGoogleAccount('test2@google.com');
await cacheGoogleAccount('test1@google.com'); await userAccountManager.cacheGoogleAccount('test1@google.com');
expect(fs.readFileSync(accountsFile(), 'utf-8')).toBe( expect(fs.readFileSync(accountsFile(), 'utf-8')).toBe(
JSON.stringify( JSON.stringify(
@ -103,7 +103,7 @@ describe('user_account', () => {
.spyOn(console, 'log') .spyOn(console, 'log')
.mockImplementation(() => {}); .mockImplementation(() => {});
await cacheGoogleAccount('test1@google.com'); await userAccountManager.cacheGoogleAccount('test1@google.com');
expect(consoleLogSpy).toHaveBeenCalled(); expect(consoleLogSpy).toHaveBeenCalled();
expect(JSON.parse(fs.readFileSync(accountsFile(), 'utf-8'))).toEqual({ expect(JSON.parse(fs.readFileSync(accountsFile(), 'utf-8'))).toEqual({
@ -122,7 +122,7 @@ describe('user_account', () => {
.spyOn(console, 'log') .spyOn(console, 'log')
.mockImplementation(() => {}); .mockImplementation(() => {});
await cacheGoogleAccount('test2@google.com'); await userAccountManager.cacheGoogleAccount('test2@google.com');
expect(consoleLogSpy).toHaveBeenCalled(); expect(consoleLogSpy).toHaveBeenCalled();
expect(JSON.parse(fs.readFileSync(accountsFile(), 'utf-8'))).toEqual({ expect(JSON.parse(fs.readFileSync(accountsFile(), 'utf-8'))).toEqual({
@ -139,19 +139,19 @@ describe('user_account', () => {
accountsFile(), accountsFile(),
JSON.stringify({ active: 'active@google.com', old: [] }, null, 2), JSON.stringify({ active: 'active@google.com', old: [] }, null, 2),
); );
const account = getCachedGoogleAccount(); const account = userAccountManager.getCachedGoogleAccount();
expect(account).toBe('active@google.com'); expect(account).toBe('active@google.com');
}); });
it('should return null if file does not exist', () => { it('should return null if file does not exist', () => {
const account = getCachedGoogleAccount(); const account = userAccountManager.getCachedGoogleAccount();
expect(account).toBeNull(); expect(account).toBeNull();
}); });
it('should return null if file is empty', () => { it('should return null if file is empty', () => {
fs.mkdirSync(path.dirname(accountsFile()), { recursive: true }); fs.mkdirSync(path.dirname(accountsFile()), { recursive: true });
fs.writeFileSync(accountsFile(), ''); fs.writeFileSync(accountsFile(), '');
const account = getCachedGoogleAccount(); const account = userAccountManager.getCachedGoogleAccount();
expect(account).toBeNull(); expect(account).toBeNull();
}); });
@ -162,7 +162,7 @@ describe('user_account', () => {
.spyOn(console, 'log') .spyOn(console, 'log')
.mockImplementation(() => {}); .mockImplementation(() => {});
const account = getCachedGoogleAccount(); const account = userAccountManager.getCachedGoogleAccount();
expect(account).toBeNull(); expect(account).toBeNull();
expect(consoleLogSpy).toHaveBeenCalled(); expect(consoleLogSpy).toHaveBeenCalled();
@ -171,7 +171,7 @@ describe('user_account', () => {
it('should return null if active key is missing', () => { it('should return null if active key is missing', () => {
fs.mkdirSync(path.dirname(accountsFile()), { recursive: true }); fs.mkdirSync(path.dirname(accountsFile()), { recursive: true });
fs.writeFileSync(accountsFile(), JSON.stringify({ old: [] })); fs.writeFileSync(accountsFile(), JSON.stringify({ old: [] }));
const account = getCachedGoogleAccount(); const account = userAccountManager.getCachedGoogleAccount();
expect(account).toBeNull(); expect(account).toBeNull();
}); });
}); });
@ -188,7 +188,7 @@ describe('user_account', () => {
), ),
); );
await clearCachedGoogleAccount(); await userAccountManager.clearCachedGoogleAccount();
const stored = JSON.parse(fs.readFileSync(accountsFile(), 'utf-8')); const stored = JSON.parse(fs.readFileSync(accountsFile(), 'utf-8'));
expect(stored.active).toBeNull(); expect(stored.active).toBeNull();
@ -198,7 +198,7 @@ describe('user_account', () => {
it('should handle empty file gracefully', async () => { it('should handle empty file gracefully', async () => {
fs.mkdirSync(path.dirname(accountsFile()), { recursive: true }); fs.mkdirSync(path.dirname(accountsFile()), { recursive: true });
fs.writeFileSync(accountsFile(), ''); fs.writeFileSync(accountsFile(), '');
await clearCachedGoogleAccount(); await userAccountManager.clearCachedGoogleAccount();
const stored = JSON.parse(fs.readFileSync(accountsFile(), 'utf-8')); const stored = JSON.parse(fs.readFileSync(accountsFile(), 'utf-8'));
expect(stored.active).toBeNull(); expect(stored.active).toBeNull();
expect(stored.old).toEqual([]); expect(stored.old).toEqual([]);
@ -211,7 +211,7 @@ describe('user_account', () => {
.spyOn(console, 'log') .spyOn(console, 'log')
.mockImplementation(() => {}); .mockImplementation(() => {});
await clearCachedGoogleAccount(); await userAccountManager.clearCachedGoogleAccount();
expect(consoleLogSpy).toHaveBeenCalled(); expect(consoleLogSpy).toHaveBeenCalled();
const stored = JSON.parse(fs.readFileSync(accountsFile(), 'utf-8')); const stored = JSON.parse(fs.readFileSync(accountsFile(), 'utf-8'));
@ -226,7 +226,7 @@ describe('user_account', () => {
JSON.stringify({ active: null, old: ['old1@google.com'] }, null, 2), JSON.stringify({ active: null, old: ['old1@google.com'] }, null, 2),
); );
await clearCachedGoogleAccount(); await userAccountManager.clearCachedGoogleAccount();
const stored = JSON.parse(fs.readFileSync(accountsFile(), 'utf-8')); const stored = JSON.parse(fs.readFileSync(accountsFile(), 'utf-8'));
expect(stored.active).toBeNull(); expect(stored.active).toBeNull();
@ -247,7 +247,7 @@ describe('user_account', () => {
), ),
); );
await clearCachedGoogleAccount(); await userAccountManager.clearCachedGoogleAccount();
const stored = JSON.parse(fs.readFileSync(accountsFile(), 'utf-8')); const stored = JSON.parse(fs.readFileSync(accountsFile(), 'utf-8'));
expect(stored.active).toBeNull(); expect(stored.active).toBeNull();
@ -257,24 +257,24 @@ describe('user_account', () => {
describe('getLifetimeGoogleAccounts', () => { describe('getLifetimeGoogleAccounts', () => {
it('should return 0 if the file does not exist', () => { it('should return 0 if the file does not exist', () => {
expect(getLifetimeGoogleAccounts()).toBe(0); expect(userAccountManager.getLifetimeGoogleAccounts()).toBe(0);
}); });
it('should return 0 if the file is empty', () => { it('should return 0 if the file is empty', () => {
fs.mkdirSync(path.dirname(accountsFile()), { recursive: true }); fs.mkdirSync(path.dirname(accountsFile()), { recursive: true });
fs.writeFileSync(accountsFile(), ''); fs.writeFileSync(accountsFile(), '');
expect(getLifetimeGoogleAccounts()).toBe(0); expect(userAccountManager.getLifetimeGoogleAccounts()).toBe(0);
}); });
it('should return 0 if the file is corrupted', () => { it('should return 0 if the file is corrupted', () => {
fs.mkdirSync(path.dirname(accountsFile()), { recursive: true }); fs.mkdirSync(path.dirname(accountsFile()), { recursive: true });
fs.writeFileSync(accountsFile(), 'invalid json'); fs.writeFileSync(accountsFile(), 'invalid json');
const consoleLogSpy = vi const consoleDebugSpy = vi
.spyOn(console, 'log') .spyOn(console, 'log')
.mockImplementation(() => {}); .mockImplementation(() => {});
expect(getLifetimeGoogleAccounts()).toBe(0); expect(userAccountManager.getLifetimeGoogleAccounts()).toBe(0);
expect(consoleLogSpy).toHaveBeenCalled(); expect(consoleDebugSpy).toHaveBeenCalled();
}); });
it('should return 1 if there is only an active account', () => { it('should return 1 if there is only an active account', () => {
@ -283,7 +283,7 @@ describe('user_account', () => {
accountsFile(), accountsFile(),
JSON.stringify({ active: 'test1@google.com', old: [] }), JSON.stringify({ active: 'test1@google.com', old: [] }),
); );
expect(getLifetimeGoogleAccounts()).toBe(1); expect(userAccountManager.getLifetimeGoogleAccounts()).toBe(1);
}); });
it('should correctly count old accounts when active is null', () => { it('should correctly count old accounts when active is null', () => {
@ -295,7 +295,7 @@ describe('user_account', () => {
old: ['test1@google.com', 'test2@google.com'], old: ['test1@google.com', 'test2@google.com'],
}), }),
); );
expect(getLifetimeGoogleAccounts()).toBe(2); expect(userAccountManager.getLifetimeGoogleAccounts()).toBe(2);
}); });
it('should correctly count both active and old accounts', () => { it('should correctly count both active and old accounts', () => {
@ -307,7 +307,7 @@ describe('user_account', () => {
old: ['test1@google.com', 'test2@google.com'], old: ['test1@google.com', 'test2@google.com'],
}), }),
); );
expect(getLifetimeGoogleAccounts()).toBe(3); expect(userAccountManager.getLifetimeGoogleAccounts()).toBe(3);
}); });
it('should handle valid JSON with incorrect schema by returning 0', () => { it('should handle valid JSON with incorrect schema by returning 0', () => {
@ -320,7 +320,7 @@ describe('user_account', () => {
.spyOn(console, 'log') .spyOn(console, 'log')
.mockImplementation(() => {}); .mockImplementation(() => {});
expect(getLifetimeGoogleAccounts()).toBe(0); expect(userAccountManager.getLifetimeGoogleAccounts()).toBe(0);
expect(consoleLogSpy).toHaveBeenCalled(); expect(consoleLogSpy).toHaveBeenCalled();
}); });
@ -333,7 +333,7 @@ describe('user_account', () => {
old: ['test1@google.com', 'test2@google.com'], old: ['test1@google.com', 'test2@google.com'],
}), }),
); );
expect(getLifetimeGoogleAccounts()).toBe(2); expect(userAccountManager.getLifetimeGoogleAccounts()).toBe(2);
}); });
}); });
}); });

View File

@ -0,0 +1,140 @@
/**
* @license
* Copyright 2025 Google LLC
* SPDX-License-Identifier: Apache-2.0
*/
import path from 'node:path';
import { promises as fsp, readFileSync } from 'node:fs';
import { Storage } from '../config/storage.js';
interface UserAccounts {
active: string | null;
old: string[];
}
export class UserAccountManager {
private getGoogleAccountsCachePath(): string {
return Storage.getGoogleAccountsPath();
}
/**
* Parses and validates the string content of an accounts file.
* @param content The raw string content from the file.
* @returns A valid UserAccounts object.
*/
private parseAndValidateAccounts(content: string): UserAccounts {
const defaultState = { active: null, old: [] };
if (!content.trim()) {
return defaultState;
}
const parsed = JSON.parse(content);
// Inlined validation logic
if (typeof parsed !== 'object' || parsed === null) {
console.log('Invalid accounts file schema, starting fresh.');
return defaultState;
}
const { active, old } = parsed as Partial<UserAccounts>;
const isValid =
(active === undefined || active === null || typeof active === 'string') &&
(old === undefined ||
(Array.isArray(old) && old.every((i) => typeof i === 'string')));
if (!isValid) {
console.log('Invalid accounts file schema, starting fresh.');
return defaultState;
}
return {
active: parsed.active ?? null,
old: parsed.old ?? [],
};
}
private readAccountsSync(filePath: string): UserAccounts {
const defaultState = { active: null, old: [] };
try {
const content = readFileSync(filePath, 'utf-8');
return this.parseAndValidateAccounts(content);
} catch (error) {
if (
error instanceof Error &&
'code' in error &&
error.code === 'ENOENT'
) {
return defaultState;
}
console.log('Error during sync read of accounts, starting fresh.', error);
return defaultState;
}
}
private async readAccounts(filePath: string): Promise<UserAccounts> {
const defaultState = { active: null, old: [] };
try {
const content = await fsp.readFile(filePath, 'utf-8');
return this.parseAndValidateAccounts(content);
} catch (error) {
if (
error instanceof Error &&
'code' in error &&
error.code === 'ENOENT'
) {
return defaultState;
}
console.log('Could not parse accounts file, starting fresh.', error);
return defaultState;
}
}
async cacheGoogleAccount(email: string): Promise<void> {
const filePath = this.getGoogleAccountsCachePath();
await fsp.mkdir(path.dirname(filePath), { recursive: true });
const accounts = await this.readAccounts(filePath);
if (accounts.active && accounts.active !== email) {
if (!accounts.old.includes(accounts.active)) {
accounts.old.push(accounts.active);
}
}
// If the new email was in the old list, remove it
accounts.old = accounts.old.filter((oldEmail) => oldEmail !== email);
accounts.active = email;
await fsp.writeFile(filePath, JSON.stringify(accounts, null, 2), 'utf-8');
}
getCachedGoogleAccount(): string | null {
const filePath = this.getGoogleAccountsCachePath();
const accounts = this.readAccountsSync(filePath);
return accounts.active;
}
getLifetimeGoogleAccounts(): number {
const filePath = this.getGoogleAccountsCachePath();
const accounts = this.readAccountsSync(filePath);
const allAccounts = new Set(accounts.old);
if (accounts.active) {
allAccounts.add(accounts.active);
}
return allAccounts.size;
}
async clearCachedGoogleAccount(): Promise<void> {
const filePath = this.getGoogleAccountsCachePath();
const accounts = await this.readAccounts(filePath);
if (accounts.active) {
if (!accounts.old.includes(accounts.active)) {
accounts.old.push(accounts.active);
}
accounts.active = null;
}
await fsp.writeFile(filePath, JSON.stringify(accounts, null, 2), 'utf-8');
}
}

View File

@ -1,131 +0,0 @@
/**
* @license
* Copyright 2025 Google LLC
* SPDX-License-Identifier: Apache-2.0
*/
import path from 'node:path';
import { promises as fsp, readFileSync } from 'node:fs';
import * as os from 'os';
import { GEMINI_DIR, GOOGLE_ACCOUNTS_FILENAME } from './paths.js';
interface UserAccounts {
active: string | null;
old: string[];
}
function getGoogleAccountsCachePath(): string {
return path.join(os.homedir(), GEMINI_DIR, GOOGLE_ACCOUNTS_FILENAME);
}
/**
* Parses and validates the string content of an accounts file.
* @param content The raw string content from the file.
* @returns A valid UserAccounts object.
*/
function parseAndValidateAccounts(content: string): UserAccounts {
const defaultState = { active: null, old: [] };
if (!content.trim()) {
return defaultState;
}
const parsed = JSON.parse(content);
// Inlined validation logic
if (typeof parsed !== 'object' || parsed === null) {
console.log('Invalid accounts file schema, starting fresh.');
return defaultState;
}
const { active, old } = parsed as Partial<UserAccounts>;
const isValid =
(active === undefined || active === null || typeof active === 'string') &&
(old === undefined ||
(Array.isArray(old) && old.every((i) => typeof i === 'string')));
if (!isValid) {
console.log('Invalid accounts file schema, starting fresh.');
return defaultState;
}
return {
active: parsed.active ?? null,
old: parsed.old ?? [],
};
}
function readAccountsSync(filePath: string): UserAccounts {
const defaultState = { active: null, old: [] };
try {
const content = readFileSync(filePath, 'utf-8');
return parseAndValidateAccounts(content);
} catch (error) {
if (error instanceof Error && 'code' in error && error.code === 'ENOENT') {
return defaultState;
}
console.log('Error during sync read of accounts, starting fresh.', error);
return defaultState;
}
}
async function readAccounts(filePath: string): Promise<UserAccounts> {
const defaultState = { active: null, old: [] };
try {
const content = await fsp.readFile(filePath, 'utf-8');
return parseAndValidateAccounts(content);
} catch (error) {
if (error instanceof Error && 'code' in error && error.code === 'ENOENT') {
return defaultState;
}
console.log('Could not parse accounts file, starting fresh.', error);
return defaultState;
}
}
export async function cacheGoogleAccount(email: string): Promise<void> {
const filePath = getGoogleAccountsCachePath();
await fsp.mkdir(path.dirname(filePath), { recursive: true });
const accounts = await readAccounts(filePath);
if (accounts.active && accounts.active !== email) {
if (!accounts.old.includes(accounts.active)) {
accounts.old.push(accounts.active);
}
}
// If the new email was in the old list, remove it
accounts.old = accounts.old.filter((oldEmail) => oldEmail !== email);
accounts.active = email;
await fsp.writeFile(filePath, JSON.stringify(accounts, null, 2), 'utf-8');
}
export function getCachedGoogleAccount(): string | null {
const filePath = getGoogleAccountsCachePath();
const accounts = readAccountsSync(filePath);
return accounts.active;
}
export function getLifetimeGoogleAccounts(): number {
const filePath = getGoogleAccountsCachePath();
const accounts = readAccountsSync(filePath);
const allAccounts = new Set(accounts.old);
if (accounts.active) {
allAccounts.add(accounts.active);
}
return allAccounts.size;
}
export async function clearCachedGoogleAccount(): Promise<void> {
const filePath = getGoogleAccountsCachePath();
const accounts = await readAccounts(filePath);
if (accounts.active) {
if (!accounts.old.includes(accounts.active)) {
accounts.old.push(accounts.active);
}
accounts.active = null;
}
await fsp.writeFile(filePath, JSON.stringify(accounts, null, 2), 'utf-8');
}

View File

@ -1,24 +0,0 @@
/**
* @license
* Copyright 2025 Google LLC
* SPDX-License-Identifier: Apache-2.0
*/
import { describe, it, expect } from 'vitest';
import { getInstallationId } from './user_id.js';
describe('user_id', () => {
describe('getInstallationId', () => {
it('should return a valid UUID format string', () => {
const installationId = getInstallationId();
expect(installationId).toBeDefined();
expect(typeof installationId).toBe('string');
expect(installationId.length).toBeGreaterThan(0);
// Should return the same ID on subsequent calls (consistent)
const secondCall = getInstallationId();
expect(secondCall).toBe(installationId);
});
});
});

View File

@ -1,58 +0,0 @@
/**
* @license
* Copyright 2025 Google LLC
* SPDX-License-Identifier: Apache-2.0
*/
import * as os from 'os';
import * as fs from 'fs';
import * as path from 'path';
import { randomUUID } from 'crypto';
import { GEMINI_DIR } from './paths.js';
const homeDir = os.homedir() ?? '';
const geminiDir = path.join(homeDir, GEMINI_DIR);
const installationIdFile = path.join(geminiDir, 'installation_id');
function ensureGeminiDirExists() {
if (!fs.existsSync(geminiDir)) {
fs.mkdirSync(geminiDir, { recursive: true });
}
}
function readInstallationIdFromFile(): string | null {
if (fs.existsSync(installationIdFile)) {
const installationid = fs.readFileSync(installationIdFile, 'utf-8').trim();
return installationid || null;
}
return null;
}
function writeInstallationIdToFile(installationId: string) {
fs.writeFileSync(installationIdFile, installationId, 'utf-8');
}
/**
* Retrieves the installation ID from a file, creating it if it doesn't exist.
* This ID is used for unique user installation tracking.
* @returns A UUID string for the user.
*/
export function getInstallationId(): string {
try {
ensureGeminiDirExists();
let installationId = readInstallationIdFromFile();
if (!installationId) {
installationId = randomUUID();
writeInstallationIdToFile(installationId);
}
return installationId;
} catch (error) {
console.error(
'Error accessing installation ID file, generating ephemeral ID:',
error,
);
return '123456789';
}
}