Refac: Centralize storage file management (#4078)
Co-authored-by: Taylor Mullen <ntaylormullen@google.com>
This commit is contained in:
parent
1049d38845
commit
21c6480b65
|
@ -11,9 +11,27 @@ import { loadExtensions } from '../../config/extension.js';
|
|||
import { createTransport } from '@google/gemini-cli-core';
|
||||
import { Client } from '@modelcontextprotocol/sdk/client/index.js';
|
||||
|
||||
vi.mock('../../config/settings.js');
|
||||
vi.mock('../../config/extension.js');
|
||||
vi.mock('@google/gemini-cli-core');
|
||||
vi.mock('../../config/settings.js', () => ({
|
||||
loadSettings: vi.fn(),
|
||||
}));
|
||||
vi.mock('../../config/extension.js', () => ({
|
||||
loadExtensions: vi.fn(),
|
||||
}));
|
||||
vi.mock('@google/gemini-cli-core', () => ({
|
||||
createTransport: vi.fn(),
|
||||
MCPServerStatus: {
|
||||
CONNECTED: 'CONNECTED',
|
||||
CONNECTING: 'CONNECTING',
|
||||
DISCONNECTED: 'DISCONNECTED',
|
||||
},
|
||||
Storage: vi.fn().mockImplementation((_cwd: string) => ({
|
||||
getGlobalSettingsPath: () => '/tmp/gemini/settings.json',
|
||||
getWorkspaceSettingsPath: () => '/tmp/gemini/workspace-settings.json',
|
||||
getProjectTempDir: () => '/test/home/.gemini/tmp/mocked_hash',
|
||||
})),
|
||||
GEMINI_CONFIG_DIR: '.gemini',
|
||||
getErrorMessage: (e: unknown) => (e instanceof Error ? e.message : String(e)),
|
||||
}));
|
||||
vi.mock('@modelcontextprotocol/sdk/client/index.js');
|
||||
|
||||
const mockedLoadSettings = loadSettings as vi.Mock;
|
||||
|
|
|
@ -6,7 +6,6 @@
|
|||
|
||||
import { describe, it, expect, vi, beforeEach, afterEach } from 'vitest';
|
||||
import * as os from 'os';
|
||||
import * as fs from 'fs';
|
||||
import * as path from 'path';
|
||||
import { ShellTool, EditTool, WriteFileTool } from '@google/gemini-cli-core';
|
||||
import { loadCliConfig, parseArguments } from './config.js';
|
||||
|
@ -19,6 +18,38 @@ vi.mock('./trustedFolders.js', () => ({
|
|||
isWorkspaceTrusted: vi.fn(),
|
||||
}));
|
||||
|
||||
vi.mock('fs', async (importOriginal) => {
|
||||
const actualFs = await importOriginal<typeof import('fs')>();
|
||||
const pathMod = await import('path');
|
||||
const mockHome = '/mock/home/user';
|
||||
const MOCK_CWD1 = process.cwd();
|
||||
const MOCK_CWD2 = pathMod.resolve(pathMod.sep, 'home', 'user', 'project');
|
||||
|
||||
const mockPaths = new Set([
|
||||
MOCK_CWD1,
|
||||
MOCK_CWD2,
|
||||
pathMod.resolve(pathMod.sep, 'cli', 'path1'),
|
||||
pathMod.resolve(pathMod.sep, 'settings', 'path1'),
|
||||
pathMod.join(mockHome, 'settings', 'path2'),
|
||||
pathMod.join(MOCK_CWD2, 'cli', 'path2'),
|
||||
pathMod.join(MOCK_CWD2, 'settings', 'path3'),
|
||||
]);
|
||||
|
||||
return {
|
||||
...actualFs,
|
||||
mkdirSync: vi.fn(),
|
||||
writeFileSync: vi.fn(),
|
||||
existsSync: vi.fn((p) => mockPaths.has(p.toString())),
|
||||
statSync: vi.fn((p) => {
|
||||
if (mockPaths.has(p.toString())) {
|
||||
return { isDirectory: () => true } as unknown as import('fs').Stats;
|
||||
}
|
||||
return (actualFs as typeof import('fs')).statSync(p as unknown as string);
|
||||
}),
|
||||
realpathSync: vi.fn((p) => p),
|
||||
};
|
||||
});
|
||||
|
||||
vi.mock('os', async (importOriginal) => {
|
||||
const actualOs = await importOriginal<typeof os>();
|
||||
return {
|
||||
|
@ -1441,35 +1472,6 @@ describe('loadCliConfig folderTrust', () => {
|
|||
});
|
||||
});
|
||||
|
||||
vi.mock('fs', async () => {
|
||||
const actualFs = await vi.importActual<typeof fs>('fs');
|
||||
const MOCK_CWD1 = process.cwd();
|
||||
const MOCK_CWD2 = path.resolve(path.sep, 'home', 'user', 'project');
|
||||
|
||||
const mockPaths = new Set([
|
||||
MOCK_CWD1,
|
||||
MOCK_CWD2,
|
||||
path.resolve(path.sep, 'cli', 'path1'),
|
||||
path.resolve(path.sep, 'settings', 'path1'),
|
||||
path.join(os.homedir(), 'settings', 'path2'),
|
||||
path.join(MOCK_CWD2, 'cli', 'path2'),
|
||||
path.join(MOCK_CWD2, 'settings', 'path3'),
|
||||
]);
|
||||
|
||||
return {
|
||||
...actualFs,
|
||||
existsSync: vi.fn((p) => mockPaths.has(p.toString())),
|
||||
statSync: vi.fn((p) => {
|
||||
if (mockPaths.has(p.toString())) {
|
||||
return { isDirectory: () => true };
|
||||
}
|
||||
// Fallback for other paths if needed, though the test should be specific.
|
||||
return actualFs.statSync(p);
|
||||
}),
|
||||
realpathSync: vi.fn((p) => p),
|
||||
};
|
||||
});
|
||||
|
||||
describe('loadCliConfig with includeDirectories', () => {
|
||||
const originalArgv = process.argv;
|
||||
|
||||
|
|
|
@ -10,7 +10,6 @@ import * as os from 'os';
|
|||
import * as path from 'path';
|
||||
import {
|
||||
EXTENSIONS_CONFIG_FILENAME,
|
||||
EXTENSIONS_DIRECTORY_NAME,
|
||||
annotateActiveExtensions,
|
||||
loadExtensions,
|
||||
} from './extension.js';
|
||||
|
@ -23,6 +22,8 @@ vi.mock('os', async (importOriginal) => {
|
|||
};
|
||||
});
|
||||
|
||||
const EXTENSIONS_DIRECTORY_NAME = path.join('.gemini', 'extensions');
|
||||
|
||||
describe('loadExtensions', () => {
|
||||
let tempWorkspaceDir: string;
|
||||
let tempHomeDir: string;
|
||||
|
|
|
@ -4,12 +4,15 @@
|
|||
* SPDX-License-Identifier: Apache-2.0
|
||||
*/
|
||||
|
||||
import { MCPServerConfig, GeminiCLIExtension } from '@google/gemini-cli-core';
|
||||
import {
|
||||
MCPServerConfig,
|
||||
GeminiCLIExtension,
|
||||
Storage,
|
||||
} from '@google/gemini-cli-core';
|
||||
import * as fs from 'fs';
|
||||
import * as path from 'path';
|
||||
import * as os from 'os';
|
||||
|
||||
export const EXTENSIONS_DIRECTORY_NAME = path.join('.gemini', 'extensions');
|
||||
export const EXTENSIONS_CONFIG_FILENAME = 'gemini-extension.json';
|
||||
|
||||
export interface Extension {
|
||||
|
@ -43,7 +46,8 @@ export function loadExtensions(workspaceDir: string): Extension[] {
|
|||
}
|
||||
|
||||
function loadExtensionsFromDir(dir: string): Extension[] {
|
||||
const extensionsDir = path.join(dir, EXTENSIONS_DIRECTORY_NAME);
|
||||
const storage = new Storage(dir);
|
||||
const extensionsDir = storage.getExtensionsDir();
|
||||
if (!fs.existsSync(extensionsDir)) {
|
||||
return [];
|
||||
}
|
||||
|
|
|
@ -11,6 +11,7 @@ import * as dotenv from 'dotenv';
|
|||
import {
|
||||
GEMINI_CONFIG_DIR as GEMINI_DIR,
|
||||
getErrorMessage,
|
||||
Storage,
|
||||
} from '@google/gemini-cli-core';
|
||||
import stripJsonComments from 'strip-json-comments';
|
||||
import { DefaultLight } from '../ui/themes/default-light.js';
|
||||
|
@ -20,8 +21,9 @@ import { Settings, MemoryImportFormat } from './settingsSchema.js';
|
|||
export type { Settings, MemoryImportFormat };
|
||||
|
||||
export const SETTINGS_DIRECTORY_NAME = '.gemini';
|
||||
export const USER_SETTINGS_DIR = path.join(homedir(), SETTINGS_DIRECTORY_NAME);
|
||||
export const USER_SETTINGS_PATH = path.join(USER_SETTINGS_DIR, 'settings.json');
|
||||
|
||||
export const USER_SETTINGS_PATH = Storage.getGlobalSettingsPath();
|
||||
export const USER_SETTINGS_DIR = path.dirname(USER_SETTINGS_PATH);
|
||||
export const DEFAULT_EXCLUDED_ENV_VARS = ['DEBUG', 'DEBUG_MODE'];
|
||||
|
||||
export function getSystemSettingsPath(): string {
|
||||
|
@ -37,10 +39,6 @@ export function getSystemSettingsPath(): string {
|
|||
}
|
||||
}
|
||||
|
||||
export function getWorkspaceSettingsPath(workspaceDir: string): string {
|
||||
return path.join(workspaceDir, SETTINGS_DIRECTORY_NAME, 'settings.json');
|
||||
}
|
||||
|
||||
export type { DnsResolutionOrder } from './settingsSchema.js';
|
||||
|
||||
export enum SettingScope {
|
||||
|
@ -269,7 +267,9 @@ export function loadEnvironment(settings?: Settings): void {
|
|||
// If no settings provided, try to load workspace settings for exclusions
|
||||
let resolvedSettings = settings;
|
||||
if (!resolvedSettings) {
|
||||
const workspaceSettingsPath = getWorkspaceSettingsPath(process.cwd());
|
||||
const workspaceSettingsPath = new Storage(
|
||||
process.cwd(),
|
||||
).getWorkspaceSettingsPath();
|
||||
try {
|
||||
if (fs.existsSync(workspaceSettingsPath)) {
|
||||
const workspaceContent = fs.readFileSync(
|
||||
|
@ -342,7 +342,9 @@ export function loadSettings(workspaceDir: string): LoadedSettings {
|
|||
// We expect homedir to always exist and be resolvable.
|
||||
const realHomeDir = fs.realpathSync(resolvedHomeDir);
|
||||
|
||||
const workspaceSettingsPath = getWorkspaceSettingsPath(workspaceDir);
|
||||
const workspaceSettingsPath = new Storage(
|
||||
workspaceDir,
|
||||
).getWorkspaceSettingsPath();
|
||||
|
||||
// Load system settings
|
||||
try {
|
||||
|
|
|
@ -5,11 +5,7 @@
|
|||
*/
|
||||
|
||||
import * as path from 'node:path';
|
||||
import {
|
||||
Config,
|
||||
getProjectCommandsDir,
|
||||
getUserCommandsDir,
|
||||
} from '@google/gemini-cli-core';
|
||||
import { Config, Storage } from '@google/gemini-cli-core';
|
||||
import mock from 'mock-fs';
|
||||
import { FileCommandLoader } from './FileCommandLoader.js';
|
||||
import { assert, vi } from 'vitest';
|
||||
|
@ -57,6 +53,7 @@ vi.mock('@google/gemini-cli-core', async (importOriginal) => {
|
|||
await importOriginal<typeof import('@google/gemini-cli-core')>();
|
||||
return {
|
||||
...original,
|
||||
Storage: original.Storage,
|
||||
isCommandAllowed: vi.fn(),
|
||||
ShellExecutionService: {
|
||||
execute: vi.fn(),
|
||||
|
@ -86,7 +83,7 @@ describe('FileCommandLoader', () => {
|
|||
});
|
||||
|
||||
it('loads a single command from a file', async () => {
|
||||
const userCommandsDir = getUserCommandsDir();
|
||||
const userCommandsDir = Storage.getUserCommandsDir();
|
||||
mock({
|
||||
[userCommandsDir]: {
|
||||
'test.toml': 'prompt = "This is a test prompt"',
|
||||
|
@ -127,7 +124,7 @@ describe('FileCommandLoader', () => {
|
|||
itif(process.platform !== 'win32')(
|
||||
'loads commands from a symlinked directory',
|
||||
async () => {
|
||||
const userCommandsDir = getUserCommandsDir();
|
||||
const userCommandsDir = Storage.getUserCommandsDir();
|
||||
const realCommandsDir = '/real/commands';
|
||||
mock({
|
||||
[realCommandsDir]: {
|
||||
|
@ -152,7 +149,7 @@ describe('FileCommandLoader', () => {
|
|||
itif(process.platform !== 'win32')(
|
||||
'loads commands from a symlinked subdirectory',
|
||||
async () => {
|
||||
const userCommandsDir = getUserCommandsDir();
|
||||
const userCommandsDir = Storage.getUserCommandsDir();
|
||||
const realNamespacedDir = '/real/namespaced-commands';
|
||||
mock({
|
||||
[userCommandsDir]: {
|
||||
|
@ -176,7 +173,7 @@ describe('FileCommandLoader', () => {
|
|||
);
|
||||
|
||||
it('loads multiple commands', async () => {
|
||||
const userCommandsDir = getUserCommandsDir();
|
||||
const userCommandsDir = Storage.getUserCommandsDir();
|
||||
mock({
|
||||
[userCommandsDir]: {
|
||||
'test1.toml': 'prompt = "Prompt 1"',
|
||||
|
@ -191,7 +188,7 @@ describe('FileCommandLoader', () => {
|
|||
});
|
||||
|
||||
it('creates deeply nested namespaces correctly', async () => {
|
||||
const userCommandsDir = getUserCommandsDir();
|
||||
const userCommandsDir = Storage.getUserCommandsDir();
|
||||
|
||||
mock({
|
||||
[userCommandsDir]: {
|
||||
|
@ -205,7 +202,7 @@ describe('FileCommandLoader', () => {
|
|||
const mockConfig = {
|
||||
getProjectRoot: vi.fn(() => '/path/to/project'),
|
||||
getExtensions: vi.fn(() => []),
|
||||
} as unknown as Config;
|
||||
} as Config;
|
||||
const loader = new FileCommandLoader(mockConfig);
|
||||
const commands = await loader.loadCommands(signal);
|
||||
expect(commands).toHaveLength(1);
|
||||
|
@ -213,7 +210,7 @@ describe('FileCommandLoader', () => {
|
|||
});
|
||||
|
||||
it('creates namespaces from nested directories', async () => {
|
||||
const userCommandsDir = getUserCommandsDir();
|
||||
const userCommandsDir = Storage.getUserCommandsDir();
|
||||
mock({
|
||||
[userCommandsDir]: {
|
||||
git: {
|
||||
|
@ -232,8 +229,10 @@ describe('FileCommandLoader', () => {
|
|||
});
|
||||
|
||||
it('returns both user and project commands in order', async () => {
|
||||
const userCommandsDir = getUserCommandsDir();
|
||||
const projectCommandsDir = getProjectCommandsDir(process.cwd());
|
||||
const userCommandsDir = Storage.getUserCommandsDir();
|
||||
const projectCommandsDir = new Storage(
|
||||
process.cwd(),
|
||||
).getProjectCommandsDir();
|
||||
mock({
|
||||
[userCommandsDir]: {
|
||||
'test.toml': 'prompt = "User prompt"',
|
||||
|
@ -246,7 +245,7 @@ describe('FileCommandLoader', () => {
|
|||
const mockConfig = {
|
||||
getProjectRoot: vi.fn(() => process.cwd()),
|
||||
getExtensions: vi.fn(() => []),
|
||||
} as unknown as Config;
|
||||
} as Config;
|
||||
const loader = new FileCommandLoader(mockConfig);
|
||||
const commands = await loader.loadCommands(signal);
|
||||
|
||||
|
@ -284,7 +283,7 @@ describe('FileCommandLoader', () => {
|
|||
});
|
||||
|
||||
it('ignores files with TOML syntax errors', async () => {
|
||||
const userCommandsDir = getUserCommandsDir();
|
||||
const userCommandsDir = Storage.getUserCommandsDir();
|
||||
mock({
|
||||
[userCommandsDir]: {
|
||||
'invalid.toml': 'this is not valid toml',
|
||||
|
@ -300,7 +299,7 @@ describe('FileCommandLoader', () => {
|
|||
});
|
||||
|
||||
it('ignores files that are semantically invalid (missing prompt)', async () => {
|
||||
const userCommandsDir = getUserCommandsDir();
|
||||
const userCommandsDir = Storage.getUserCommandsDir();
|
||||
mock({
|
||||
[userCommandsDir]: {
|
||||
'no_prompt.toml': 'description = "This file is missing a prompt"',
|
||||
|
@ -316,7 +315,7 @@ describe('FileCommandLoader', () => {
|
|||
});
|
||||
|
||||
it('handles filename edge cases correctly', async () => {
|
||||
const userCommandsDir = getUserCommandsDir();
|
||||
const userCommandsDir = Storage.getUserCommandsDir();
|
||||
mock({
|
||||
[userCommandsDir]: {
|
||||
'test.v1.toml': 'prompt = "Test prompt"',
|
||||
|
@ -338,7 +337,7 @@ describe('FileCommandLoader', () => {
|
|||
});
|
||||
|
||||
it('uses a default description if not provided', async () => {
|
||||
const userCommandsDir = getUserCommandsDir();
|
||||
const userCommandsDir = Storage.getUserCommandsDir();
|
||||
mock({
|
||||
[userCommandsDir]: {
|
||||
'test.toml': 'prompt = "Test prompt"',
|
||||
|
@ -353,7 +352,7 @@ describe('FileCommandLoader', () => {
|
|||
});
|
||||
|
||||
it('uses the provided description', async () => {
|
||||
const userCommandsDir = getUserCommandsDir();
|
||||
const userCommandsDir = Storage.getUserCommandsDir();
|
||||
mock({
|
||||
[userCommandsDir]: {
|
||||
'test.toml': 'prompt = "Test prompt"\ndescription = "My test command"',
|
||||
|
@ -368,7 +367,7 @@ describe('FileCommandLoader', () => {
|
|||
});
|
||||
|
||||
it('should sanitize colons in filenames to prevent namespace conflicts', async () => {
|
||||
const userCommandsDir = getUserCommandsDir();
|
||||
const userCommandsDir = Storage.getUserCommandsDir();
|
||||
mock({
|
||||
[userCommandsDir]: {
|
||||
'legacy:command.toml': 'prompt = "This is a legacy command"',
|
||||
|
@ -388,7 +387,7 @@ describe('FileCommandLoader', () => {
|
|||
|
||||
describe('Processor Instantiation Logic', () => {
|
||||
it('instantiates only DefaultArgumentProcessor if no {{args}} or !{} are present', async () => {
|
||||
const userCommandsDir = getUserCommandsDir();
|
||||
const userCommandsDir = Storage.getUserCommandsDir();
|
||||
mock({
|
||||
[userCommandsDir]: {
|
||||
'simple.toml': `prompt = "Just a regular prompt"`,
|
||||
|
@ -403,7 +402,7 @@ describe('FileCommandLoader', () => {
|
|||
});
|
||||
|
||||
it('instantiates only ShellProcessor if {{args}} is present (but not !{})', async () => {
|
||||
const userCommandsDir = getUserCommandsDir();
|
||||
const userCommandsDir = Storage.getUserCommandsDir();
|
||||
mock({
|
||||
[userCommandsDir]: {
|
||||
'args.toml': `prompt = "Prompt with {{args}}"`,
|
||||
|
@ -418,7 +417,7 @@ describe('FileCommandLoader', () => {
|
|||
});
|
||||
|
||||
it('instantiates ShellProcessor and DefaultArgumentProcessor if !{} is present (but not {{args}})', async () => {
|
||||
const userCommandsDir = getUserCommandsDir();
|
||||
const userCommandsDir = Storage.getUserCommandsDir();
|
||||
mock({
|
||||
[userCommandsDir]: {
|
||||
'shell.toml': `prompt = "Prompt with !{cmd}"`,
|
||||
|
@ -433,7 +432,7 @@ describe('FileCommandLoader', () => {
|
|||
});
|
||||
|
||||
it('instantiates only ShellProcessor if both {{args}} and !{} are present', async () => {
|
||||
const userCommandsDir = getUserCommandsDir();
|
||||
const userCommandsDir = Storage.getUserCommandsDir();
|
||||
mock({
|
||||
[userCommandsDir]: {
|
||||
'both.toml': `prompt = "Prompt with {{args}} and !{cmd}"`,
|
||||
|
@ -450,8 +449,10 @@ describe('FileCommandLoader', () => {
|
|||
|
||||
describe('Extension Command Loading', () => {
|
||||
it('loads commands from active extensions', async () => {
|
||||
const userCommandsDir = getUserCommandsDir();
|
||||
const projectCommandsDir = getProjectCommandsDir(process.cwd());
|
||||
const userCommandsDir = Storage.getUserCommandsDir();
|
||||
const projectCommandsDir = new Storage(
|
||||
process.cwd(),
|
||||
).getProjectCommandsDir();
|
||||
const extensionDir = path.join(
|
||||
process.cwd(),
|
||||
'.gemini/extensions/test-ext',
|
||||
|
@ -485,7 +486,7 @@ describe('FileCommandLoader', () => {
|
|||
path: extensionDir,
|
||||
},
|
||||
]),
|
||||
} as unknown as Config;
|
||||
} as Config;
|
||||
const loader = new FileCommandLoader(mockConfig);
|
||||
const commands = await loader.loadCommands(signal);
|
||||
|
||||
|
@ -499,8 +500,10 @@ describe('FileCommandLoader', () => {
|
|||
});
|
||||
|
||||
it('extension commands have extensionName metadata for conflict resolution', async () => {
|
||||
const userCommandsDir = getUserCommandsDir();
|
||||
const projectCommandsDir = getProjectCommandsDir(process.cwd());
|
||||
const userCommandsDir = Storage.getUserCommandsDir();
|
||||
const projectCommandsDir = new Storage(
|
||||
process.cwd(),
|
||||
).getProjectCommandsDir();
|
||||
const extensionDir = path.join(
|
||||
process.cwd(),
|
||||
'.gemini/extensions/test-ext',
|
||||
|
@ -534,7 +537,7 @@ describe('FileCommandLoader', () => {
|
|||
path: extensionDir,
|
||||
},
|
||||
]),
|
||||
} as unknown as Config;
|
||||
} as Config;
|
||||
const loader = new FileCommandLoader(mockConfig);
|
||||
const commands = await loader.loadCommands(signal);
|
||||
|
||||
|
@ -641,7 +644,7 @@ describe('FileCommandLoader', () => {
|
|||
path: extensionDir2,
|
||||
},
|
||||
]),
|
||||
} as unknown as Config;
|
||||
} as Config;
|
||||
const loader = new FileCommandLoader(mockConfig);
|
||||
const commands = await loader.loadCommands(signal);
|
||||
|
||||
|
@ -677,7 +680,7 @@ describe('FileCommandLoader', () => {
|
|||
path: extensionDir,
|
||||
},
|
||||
]),
|
||||
} as unknown as Config;
|
||||
} as Config;
|
||||
const loader = new FileCommandLoader(mockConfig);
|
||||
const commands = await loader.loadCommands(signal);
|
||||
expect(commands).toHaveLength(0);
|
||||
|
@ -709,7 +712,7 @@ describe('FileCommandLoader', () => {
|
|||
getExtensions: vi.fn(() => [
|
||||
{ name: 'a', version: '1.0.0', isActive: true, path: extensionDir },
|
||||
]),
|
||||
} as unknown as Config;
|
||||
} as Config;
|
||||
const loader = new FileCommandLoader(mockConfig);
|
||||
const commands = await loader.loadCommands(signal);
|
||||
|
||||
|
@ -742,7 +745,7 @@ describe('FileCommandLoader', () => {
|
|||
|
||||
describe('Argument Handling Integration (via ShellProcessor)', () => {
|
||||
it('correctly processes a command with {{args}}', async () => {
|
||||
const userCommandsDir = getUserCommandsDir();
|
||||
const userCommandsDir = Storage.getUserCommandsDir();
|
||||
mock({
|
||||
[userCommandsDir]: {
|
||||
'shorthand.toml':
|
||||
|
@ -774,7 +777,7 @@ describe('FileCommandLoader', () => {
|
|||
|
||||
describe('Default Argument Processor Integration', () => {
|
||||
it('correctly processes a command without {{args}}', async () => {
|
||||
const userCommandsDir = getUserCommandsDir();
|
||||
const userCommandsDir = Storage.getUserCommandsDir();
|
||||
mock({
|
||||
[userCommandsDir]: {
|
||||
'model_led.toml':
|
||||
|
@ -808,7 +811,7 @@ describe('FileCommandLoader', () => {
|
|||
|
||||
describe('Shell Processor Integration', () => {
|
||||
it('instantiates ShellProcessor if {{args}} is present (even without shell trigger)', async () => {
|
||||
const userCommandsDir = getUserCommandsDir();
|
||||
const userCommandsDir = Storage.getUserCommandsDir();
|
||||
mock({
|
||||
[userCommandsDir]: {
|
||||
'args_only.toml': `prompt = "Hello {{args}}"`,
|
||||
|
@ -821,7 +824,7 @@ describe('FileCommandLoader', () => {
|
|||
expect(ShellProcessor).toHaveBeenCalledWith('args_only');
|
||||
});
|
||||
it('instantiates ShellProcessor if the trigger is present', async () => {
|
||||
const userCommandsDir = getUserCommandsDir();
|
||||
const userCommandsDir = Storage.getUserCommandsDir();
|
||||
mock({
|
||||
[userCommandsDir]: {
|
||||
'shell.toml': `prompt = "Run this: ${SHELL_INJECTION_TRIGGER}echo hello}"`,
|
||||
|
@ -835,7 +838,7 @@ describe('FileCommandLoader', () => {
|
|||
});
|
||||
|
||||
it('does not instantiate ShellProcessor if no triggers ({{args}} or !{}) are present', async () => {
|
||||
const userCommandsDir = getUserCommandsDir();
|
||||
const userCommandsDir = Storage.getUserCommandsDir();
|
||||
mock({
|
||||
[userCommandsDir]: {
|
||||
'regular.toml': `prompt = "Just a regular prompt"`,
|
||||
|
@ -849,7 +852,7 @@ describe('FileCommandLoader', () => {
|
|||
});
|
||||
|
||||
it('returns a "submit_prompt" action if shell processing succeeds', async () => {
|
||||
const userCommandsDir = getUserCommandsDir();
|
||||
const userCommandsDir = Storage.getUserCommandsDir();
|
||||
mock({
|
||||
[userCommandsDir]: {
|
||||
'shell.toml': `prompt = "Run !{echo 'hello'}"`,
|
||||
|
@ -876,7 +879,7 @@ describe('FileCommandLoader', () => {
|
|||
});
|
||||
|
||||
it('returns a "confirm_shell_commands" action if shell processing requires it', async () => {
|
||||
const userCommandsDir = getUserCommandsDir();
|
||||
const userCommandsDir = Storage.getUserCommandsDir();
|
||||
const rawInvocation = '/shell rm -rf /';
|
||||
mock({
|
||||
[userCommandsDir]: {
|
||||
|
@ -910,7 +913,7 @@ describe('FileCommandLoader', () => {
|
|||
});
|
||||
|
||||
it('re-throws other errors from the processor', async () => {
|
||||
const userCommandsDir = getUserCommandsDir();
|
||||
const userCommandsDir = Storage.getUserCommandsDir();
|
||||
mock({
|
||||
[userCommandsDir]: {
|
||||
'shell.toml': `prompt = "Run !{something}"`,
|
||||
|
@ -935,7 +938,7 @@ describe('FileCommandLoader', () => {
|
|||
).rejects.toThrow('Something else went wrong');
|
||||
});
|
||||
it('assembles the processor pipeline in the correct order (Shell -> Default)', async () => {
|
||||
const userCommandsDir = getUserCommandsDir();
|
||||
const userCommandsDir = Storage.getUserCommandsDir();
|
||||
mock({
|
||||
[userCommandsDir]: {
|
||||
// This prompt uses !{} but NOT {{args}}, so both processors should be active.
|
||||
|
|
|
@ -9,11 +9,7 @@ import path from 'path';
|
|||
import toml from '@iarna/toml';
|
||||
import { glob } from 'glob';
|
||||
import { z } from 'zod';
|
||||
import {
|
||||
Config,
|
||||
getProjectCommandsDir,
|
||||
getUserCommandsDir,
|
||||
} from '@google/gemini-cli-core';
|
||||
import { Config, Storage } from '@google/gemini-cli-core';
|
||||
import { ICommandLoader } from './types.js';
|
||||
import {
|
||||
CommandContext,
|
||||
|
@ -130,11 +126,13 @@ export class FileCommandLoader implements ICommandLoader {
|
|||
private getCommandDirectories(): CommandDirectory[] {
|
||||
const dirs: CommandDirectory[] = [];
|
||||
|
||||
const storage = this.config?.storage ?? new Storage(this.projectRoot);
|
||||
|
||||
// 1. User commands
|
||||
dirs.push({ path: getUserCommandsDir() });
|
||||
dirs.push({ path: Storage.getUserCommandsDir() });
|
||||
|
||||
// 2. Project commands (override user commands)
|
||||
dirs.push({ path: getProjectCommandsDir(this.projectRoot) });
|
||||
dirs.push({ path: storage.getProjectCommandsDir() });
|
||||
|
||||
// 3. Extension commands (processed last to detect all conflicts)
|
||||
if (this.config) {
|
||||
|
|
|
@ -742,7 +742,7 @@ const App = ({ config, settings, startupWarnings = [], version }: AppProps) => {
|
|||
}
|
||||
}, [config, config.getGeminiMdFileCount]);
|
||||
|
||||
const logger = useLogger();
|
||||
const logger = useLogger(config.storage);
|
||||
|
||||
useEffect(() => {
|
||||
const fetchUserMessages = async () => {
|
||||
|
|
|
@ -67,11 +67,14 @@ describe('chatCommand', () => {
|
|||
mockContext = createMockCommandContext({
|
||||
services: {
|
||||
config: {
|
||||
getProjectTempDir: () => '/tmp/gemini',
|
||||
getProjectRoot: () => '/project/root',
|
||||
getGeminiClient: () =>
|
||||
({
|
||||
getChat: mockGetChat,
|
||||
}) as unknown as GeminiClient,
|
||||
storage: {
|
||||
getProjectTempDir: () => '/project/root/.gemini/tmp/mockhash',
|
||||
},
|
||||
},
|
||||
logger: {
|
||||
saveCheckpoint: mockSaveCheckpoint,
|
||||
|
|
|
@ -28,7 +28,8 @@ const getSavedChatTags = async (
|
|||
context: CommandContext,
|
||||
mtSortDesc: boolean,
|
||||
): Promise<ChatDetail[]> => {
|
||||
const geminiDir = context.services.config?.getProjectTempDir();
|
||||
const cfg = context.services.config;
|
||||
const geminiDir = cfg?.storage?.getProjectTempDir();
|
||||
if (!geminiDir) {
|
||||
return [];
|
||||
}
|
||||
|
|
|
@ -20,7 +20,14 @@ import * as core from '@google/gemini-cli-core';
|
|||
|
||||
vi.mock('child_process');
|
||||
vi.mock('glob');
|
||||
vi.mock('@google/gemini-cli-core');
|
||||
vi.mock('@google/gemini-cli-core', async (importOriginal) => {
|
||||
const original = await importOriginal<typeof core>();
|
||||
return {
|
||||
...original,
|
||||
getOauthClient: vi.fn(original.getOauthClient),
|
||||
getIdeInstaller: vi.fn(original.getIdeInstaller),
|
||||
};
|
||||
});
|
||||
|
||||
describe('ideCommand', () => {
|
||||
let mockContext: CommandContext;
|
||||
|
|
|
@ -39,7 +39,10 @@ describe('restoreCommand', () => {
|
|||
|
||||
mockConfig = {
|
||||
getCheckpointingEnabled: vi.fn().mockReturnValue(true),
|
||||
getProjectTempDir: vi.fn().mockReturnValue(geminiTempDir),
|
||||
storage: {
|
||||
getProjectTempCheckpointsDir: vi.fn().mockReturnValue(checkpointsDir),
|
||||
getProjectTempDir: vi.fn().mockReturnValue(geminiTempDir),
|
||||
},
|
||||
getGeminiClient: vi.fn().mockReturnValue({
|
||||
setHistory: mockSetHistory,
|
||||
}),
|
||||
|
@ -77,7 +80,9 @@ describe('restoreCommand', () => {
|
|||
|
||||
describe('action', () => {
|
||||
it('should return an error if temp dir is not found', async () => {
|
||||
vi.mocked(mockConfig.getProjectTempDir).mockReturnValue('');
|
||||
vi.mocked(
|
||||
mockConfig.storage.getProjectTempCheckpointsDir,
|
||||
).mockReturnValue('');
|
||||
|
||||
expect(
|
||||
await restoreCommand(mockConfig)?.action?.(mockContext, ''),
|
||||
|
@ -219,7 +224,7 @@ describe('restoreCommand', () => {
|
|||
|
||||
describe('completion', () => {
|
||||
it('should return an empty array if temp dir is not found', async () => {
|
||||
vi.mocked(mockConfig.getProjectTempDir).mockReturnValue('');
|
||||
vi.mocked(mockConfig.storage.getProjectTempDir).mockReturnValue('');
|
||||
const command = restoreCommand(mockConfig);
|
||||
|
||||
expect(await command?.completion?.(mockContext, '')).toEqual([]);
|
||||
|
|
|
@ -22,9 +22,7 @@ async function restoreAction(
|
|||
const { config, git: gitService } = services;
|
||||
const { addItem, loadHistory } = ui;
|
||||
|
||||
const checkpointDir = config?.getProjectTempDir()
|
||||
? path.join(config.getProjectTempDir(), 'checkpoints')
|
||||
: undefined;
|
||||
const checkpointDir = config?.storage.getProjectTempCheckpointsDir();
|
||||
|
||||
if (!checkpointDir) {
|
||||
return {
|
||||
|
@ -125,9 +123,7 @@ async function completion(
|
|||
): Promise<string[]> {
|
||||
const { services } = context;
|
||||
const { config } = services;
|
||||
const checkpointDir = config?.getProjectTempDir()
|
||||
? path.join(config.getProjectTempDir(), 'checkpoints')
|
||||
: undefined;
|
||||
const checkpointDir = config?.storage.getProjectTempCheckpointsDir();
|
||||
if (!checkpointDir) {
|
||||
return [];
|
||||
}
|
||||
|
|
|
@ -81,7 +81,7 @@ export const InputPrompt: React.FC<InputPromptProps> = ({
|
|||
const [cursorPosition, setCursorPosition] = useState<[number, number]>([
|
||||
0, 0,
|
||||
]);
|
||||
const shellHistory = useShellHistory(config.getProjectRoot());
|
||||
const shellHistory = useShellHistory(config.getProjectRoot(), config.storage);
|
||||
const historyData = shellHistory.history;
|
||||
|
||||
const completion = useCommandCompletion(
|
||||
|
|
|
@ -17,15 +17,10 @@ import {
|
|||
|
||||
const mockIsBinary = vi.hoisted(() => vi.fn());
|
||||
const mockShellExecutionService = vi.hoisted(() => vi.fn());
|
||||
vi.mock('@google/gemini-cli-core', async (importOriginal) => {
|
||||
const original =
|
||||
await importOriginal<typeof import('@google/gemini-cli-core')>();
|
||||
return {
|
||||
...original,
|
||||
ShellExecutionService: { execute: mockShellExecutionService },
|
||||
isBinary: mockIsBinary,
|
||||
};
|
||||
});
|
||||
vi.mock('@google/gemini-cli-core', () => ({
|
||||
ShellExecutionService: { execute: mockShellExecutionService },
|
||||
isBinary: mockIsBinary,
|
||||
}));
|
||||
vi.mock('fs');
|
||||
vi.mock('os');
|
||||
vi.mock('crypto');
|
||||
|
|
|
@ -16,6 +16,7 @@ import {
|
|||
makeSlashCommandEvent,
|
||||
SlashCommandStatus,
|
||||
ToolConfirmationOutcome,
|
||||
Storage,
|
||||
} from '@google/gemini-cli-core';
|
||||
import { useSessionStats } from '../contexts/SessionContext.js';
|
||||
import { runExitCleanup } from '../../utils/cleanup.js';
|
||||
|
@ -82,11 +83,14 @@ export const useSlashCommandProcessor = (
|
|||
if (!config?.getProjectRoot()) {
|
||||
return;
|
||||
}
|
||||
return new GitService(config.getProjectRoot());
|
||||
return new GitService(config.getProjectRoot(), config.storage);
|
||||
}, [config]);
|
||||
|
||||
const logger = useMemo(() => {
|
||||
const l = new Logger(config?.getSessionId() || '');
|
||||
const l = new Logger(
|
||||
config?.getSessionId() || '',
|
||||
config?.storage ?? new Storage(process.cwd()),
|
||||
);
|
||||
// The logger's initialize is async, but we can create the instance
|
||||
// synchronously. Commands that use it will await its initialization.
|
||||
return l;
|
||||
|
|
|
@ -105,13 +105,14 @@ export const useGeminiStream = (
|
|||
useStateAndRef<HistoryItemWithoutId | null>(null);
|
||||
const processedMemoryToolsRef = useRef<Set<string>>(new Set());
|
||||
const { startNewPrompt, getPromptCount } = useSessionStats();
|
||||
const logger = useLogger();
|
||||
const storage = config.storage;
|
||||
const logger = useLogger(storage);
|
||||
const gitService = useMemo(() => {
|
||||
if (!config.getProjectRoot()) {
|
||||
return;
|
||||
}
|
||||
return new GitService(config.getProjectRoot());
|
||||
}, [config]);
|
||||
return new GitService(config.getProjectRoot(), storage);
|
||||
}, [config, storage]);
|
||||
|
||||
const [toolCalls, scheduleToolCalls, markToolsAsSubmitted] =
|
||||
useReactToolScheduler(
|
||||
|
@ -877,9 +878,7 @@ export const useGeminiStream = (
|
|||
);
|
||||
|
||||
if (restorableToolCalls.length > 0) {
|
||||
const checkpointDir = config.getProjectTempDir()
|
||||
? path.join(config.getProjectTempDir(), 'checkpoints')
|
||||
: undefined;
|
||||
const checkpointDir = storage.getProjectTempCheckpointsDir();
|
||||
|
||||
if (!checkpointDir) {
|
||||
return;
|
||||
|
@ -962,7 +961,15 @@ export const useGeminiStream = (
|
|||
}
|
||||
};
|
||||
saveRestorableToolCalls();
|
||||
}, [toolCalls, config, onDebugMessage, gitService, history, geminiClient]);
|
||||
}, [
|
||||
toolCalls,
|
||||
config,
|
||||
onDebugMessage,
|
||||
gitService,
|
||||
history,
|
||||
geminiClient,
|
||||
storage,
|
||||
]);
|
||||
|
||||
return {
|
||||
streamingState,
|
||||
|
|
|
@ -5,16 +5,16 @@
|
|||
*/
|
||||
|
||||
import { useState, useEffect } from 'react';
|
||||
import { sessionId, Logger } from '@google/gemini-cli-core';
|
||||
import { sessionId, Logger, Storage } from '@google/gemini-cli-core';
|
||||
|
||||
/**
|
||||
* Hook to manage the logger instance.
|
||||
*/
|
||||
export const useLogger = () => {
|
||||
export const useLogger = (storage: Storage) => {
|
||||
const [logger, setLogger] = useState<Logger | null>(null);
|
||||
|
||||
useEffect(() => {
|
||||
const newLogger = new Logger(sessionId);
|
||||
const newLogger = new Logger(sessionId, storage);
|
||||
/**
|
||||
* Start async initialization, no need to await. Using await slows down the
|
||||
* time from launch to see the gemini-cli prompt and it's better to not save
|
||||
|
@ -26,7 +26,7 @@ export const useLogger = () => {
|
|||
setLogger(newLogger);
|
||||
})
|
||||
.catch(() => {});
|
||||
}, []);
|
||||
}, [storage]);
|
||||
|
||||
return logger;
|
||||
};
|
||||
|
|
|
@ -11,9 +11,41 @@ import * as path from 'path';
|
|||
import * as os from 'os';
|
||||
import * as crypto from 'crypto';
|
||||
|
||||
vi.mock('fs/promises');
|
||||
vi.mock('fs/promises', () => ({
|
||||
readFile: vi.fn(),
|
||||
writeFile: vi.fn(),
|
||||
mkdir: vi.fn(),
|
||||
}));
|
||||
vi.mock('os');
|
||||
vi.mock('crypto');
|
||||
vi.mock('fs', async (importOriginal) => {
|
||||
const actualFs = await importOriginal<typeof import('fs')>();
|
||||
return {
|
||||
...actualFs,
|
||||
mkdirSync: vi.fn(),
|
||||
};
|
||||
});
|
||||
vi.mock('@google/gemini-cli-core', () => {
|
||||
class Storage {
|
||||
getProjectTempDir(): string {
|
||||
return path.join('/test/home/', '.gemini', 'tmp', 'mocked_hash');
|
||||
}
|
||||
getHistoryFilePath(): string {
|
||||
return path.join(
|
||||
'/test/home/',
|
||||
'.gemini',
|
||||
'tmp',
|
||||
'mocked_hash',
|
||||
'shell_history',
|
||||
);
|
||||
}
|
||||
}
|
||||
return {
|
||||
isNodeError: (err: unknown): err is NodeJS.ErrnoException =>
|
||||
typeof err === 'object' && err !== null && 'code' in err,
|
||||
Storage,
|
||||
};
|
||||
});
|
||||
|
||||
const MOCKED_PROJECT_ROOT = '/test/project';
|
||||
const MOCKED_HOME_DIR = '/test/home';
|
||||
|
|
|
@ -7,9 +7,8 @@
|
|||
import { useState, useEffect, useCallback } from 'react';
|
||||
import * as fs from 'fs/promises';
|
||||
import * as path from 'path';
|
||||
import { isNodeError, getProjectTempDir } from '@google/gemini-cli-core';
|
||||
import { isNodeError, Storage } from '@google/gemini-cli-core';
|
||||
|
||||
const HISTORY_FILE = 'shell_history';
|
||||
const MAX_HISTORY_LENGTH = 100;
|
||||
|
||||
export interface UseShellHistoryReturn {
|
||||
|
@ -20,9 +19,12 @@ export interface UseShellHistoryReturn {
|
|||
resetHistoryPosition: () => void;
|
||||
}
|
||||
|
||||
async function getHistoryFilePath(projectRoot: string): Promise<string> {
|
||||
const historyDir = getProjectTempDir(projectRoot);
|
||||
return path.join(historyDir, HISTORY_FILE);
|
||||
async function getHistoryFilePath(
|
||||
projectRoot: string,
|
||||
configStorage?: Storage,
|
||||
): Promise<string> {
|
||||
const storage = configStorage ?? new Storage(projectRoot);
|
||||
return storage.getHistoryFilePath();
|
||||
}
|
||||
|
||||
// Handle multiline commands
|
||||
|
@ -67,20 +69,23 @@ async function writeHistoryFile(
|
|||
}
|
||||
}
|
||||
|
||||
export function useShellHistory(projectRoot: string): UseShellHistoryReturn {
|
||||
export function useShellHistory(
|
||||
projectRoot: string,
|
||||
storage?: Storage,
|
||||
): UseShellHistoryReturn {
|
||||
const [history, setHistory] = useState<string[]>([]);
|
||||
const [historyIndex, setHistoryIndex] = useState(-1);
|
||||
const [historyFilePath, setHistoryFilePath] = useState<string | null>(null);
|
||||
|
||||
useEffect(() => {
|
||||
async function loadHistory() {
|
||||
const filePath = await getHistoryFilePath(projectRoot);
|
||||
const filePath = await getHistoryFilePath(projectRoot, storage);
|
||||
setHistoryFilePath(filePath);
|
||||
const loadedHistory = await readHistoryFile(filePath);
|
||||
setHistory(loadedHistory.reverse()); // Newest first
|
||||
}
|
||||
loadHistory();
|
||||
}, [projectRoot]);
|
||||
}, [projectRoot, storage]);
|
||||
|
||||
const addCommandToHistory = useCallback(
|
||||
(command: string) => {
|
||||
|
|
|
@ -6,7 +6,7 @@
|
|||
|
||||
import { promises as fs } from 'fs';
|
||||
import { join } from 'path';
|
||||
import { getProjectTempDir } from '@google/gemini-cli-core';
|
||||
import { Storage } from '@google/gemini-cli-core';
|
||||
|
||||
const cleanupFunctions: Array<(() => void) | (() => Promise<void>)> = [];
|
||||
|
||||
|
@ -26,7 +26,8 @@ export async function runExitCleanup() {
|
|||
}
|
||||
|
||||
export async function cleanupCheckpoints() {
|
||||
const tempDir = getProjectTempDir(process.cwd());
|
||||
const storage = new Storage(process.cwd());
|
||||
const tempDir = storage.getProjectTempDir();
|
||||
const checkpointsDir = join(tempDir, 'checkpoints');
|
||||
try {
|
||||
await fs.rm(checkpointsDir, { recursive: true, force: true });
|
||||
|
|
|
@ -11,7 +11,7 @@ import {
|
|||
clearCachedCredentialFile,
|
||||
clearOauthClientCache,
|
||||
} from './oauth2.js';
|
||||
import { getCachedGoogleAccount } from '../utils/user_account.js';
|
||||
import { UserAccountManager } from '../utils/userAccountManager.js';
|
||||
import { OAuth2Client, Compute } from 'google-auth-library';
|
||||
import * as fs from 'fs';
|
||||
import * as path from 'path';
|
||||
|
@ -180,7 +180,10 @@ describe('oauth2', () => {
|
|||
});
|
||||
|
||||
// Verify the getCachedGoogleAccount function works
|
||||
expect(getCachedGoogleAccount()).toBe('test-google-account@gmail.com');
|
||||
const userAccountManager = new UserAccountManager();
|
||||
expect(userAccountManager.getCachedGoogleAccount()).toBe(
|
||||
'test-google-account@gmail.com',
|
||||
);
|
||||
});
|
||||
|
||||
it('should perform login with user code', async () => {
|
||||
|
@ -533,14 +536,17 @@ describe('oauth2', () => {
|
|||
googleAccountPath,
|
||||
JSON.stringify(accountData),
|
||||
);
|
||||
const userAccountManager = new UserAccountManager();
|
||||
|
||||
expect(fs.existsSync(credsPath)).toBe(true);
|
||||
expect(fs.existsSync(googleAccountPath)).toBe(true);
|
||||
expect(getCachedGoogleAccount()).toBe('test@example.com');
|
||||
expect(userAccountManager.getCachedGoogleAccount()).toBe(
|
||||
'test@example.com',
|
||||
);
|
||||
|
||||
await clearCachedCredentialFile();
|
||||
expect(fs.existsSync(credsPath)).toBe(false);
|
||||
expect(getCachedGoogleAccount()).toBeNull();
|
||||
expect(userAccountManager.getCachedGoogleAccount()).toBeNull();
|
||||
const updatedAccountData = JSON.parse(
|
||||
fs.readFileSync(googleAccountPath, 'utf-8'),
|
||||
);
|
||||
|
|
|
@ -17,16 +17,14 @@ import * as net from 'net';
|
|||
import open from 'open';
|
||||
import path from 'node:path';
|
||||
import { promises as fs } from 'node:fs';
|
||||
import * as os from 'os';
|
||||
import { Config } from '../config/config.js';
|
||||
import { getErrorMessage } from '../utils/errors.js';
|
||||
import {
|
||||
cacheGoogleAccount,
|
||||
getCachedGoogleAccount,
|
||||
clearCachedGoogleAccount,
|
||||
} from '../utils/user_account.js';
|
||||
import { UserAccountManager } from '../utils/userAccountManager.js';
|
||||
import { AuthType } from '../core/contentGenerator.js';
|
||||
import readline from 'node:readline';
|
||||
import { Storage } from '../config/storage.js';
|
||||
|
||||
const userAccountManager = new UserAccountManager();
|
||||
|
||||
// OAuth Client ID used to initiate OAuth2Client class.
|
||||
const OAUTH_CLIENT_ID =
|
||||
|
@ -53,9 +51,6 @@ const SIGN_IN_SUCCESS_URL =
|
|||
const SIGN_IN_FAILURE_URL =
|
||||
'https://developers.google.com/gemini-code-assist/auth_failure_gemini';
|
||||
|
||||
const GEMINI_DIR = '.gemini';
|
||||
const CREDENTIAL_FILENAME = 'oauth_creds.json';
|
||||
|
||||
/**
|
||||
* An Authentication URL for updating the credentials of a Oauth2Client
|
||||
* as well as a promise that will resolve when the credentials have
|
||||
|
@ -99,7 +94,7 @@ async function initOauthClient(
|
|||
if (await loadCachedCredentials(client)) {
|
||||
// Found valid cached credentials.
|
||||
// Check if we need to retrieve Google Account ID or Email
|
||||
if (!getCachedGoogleAccount()) {
|
||||
if (!userAccountManager.getCachedGoogleAccount()) {
|
||||
try {
|
||||
await fetchAndCacheUserInfo(client);
|
||||
} catch {
|
||||
|
@ -352,7 +347,7 @@ export function getAvailablePort(): Promise<number> {
|
|||
|
||||
async function loadCachedCredentials(client: OAuth2Client): Promise<boolean> {
|
||||
const pathsToTry = [
|
||||
getCachedCredentialPath(),
|
||||
Storage.getOAuthCredsPath(),
|
||||
process.env['GOOGLE_APPLICATION_CREDENTIALS'],
|
||||
].filter((p): p is string => !!p);
|
||||
|
||||
|
@ -380,26 +375,22 @@ async function loadCachedCredentials(client: OAuth2Client): Promise<boolean> {
|
|||
}
|
||||
|
||||
async function cacheCredentials(credentials: Credentials) {
|
||||
const filePath = getCachedCredentialPath();
|
||||
const filePath = Storage.getOAuthCredsPath();
|
||||
await fs.mkdir(path.dirname(filePath), { recursive: true });
|
||||
|
||||
const credString = JSON.stringify(credentials, null, 2);
|
||||
await fs.writeFile(filePath, credString, { mode: 0o600 });
|
||||
}
|
||||
|
||||
function getCachedCredentialPath(): string {
|
||||
return path.join(os.homedir(), GEMINI_DIR, CREDENTIAL_FILENAME);
|
||||
}
|
||||
|
||||
export function clearOauthClientCache() {
|
||||
oauthClientPromises.clear();
|
||||
}
|
||||
|
||||
export async function clearCachedCredentialFile() {
|
||||
try {
|
||||
await fs.rm(getCachedCredentialPath(), { force: true });
|
||||
await fs.rm(Storage.getOAuthCredsPath(), { force: true });
|
||||
// Clear the Google Account ID cache when credentials are cleared
|
||||
await clearCachedGoogleAccount();
|
||||
await userAccountManager.clearCachedGoogleAccount();
|
||||
// Clear the in-memory OAuth client cache to force re-authentication
|
||||
clearOauthClientCache();
|
||||
} catch (e) {
|
||||
|
@ -433,9 +424,7 @@ async function fetchAndCacheUserInfo(client: OAuth2Client): Promise<void> {
|
|||
}
|
||||
|
||||
const userInfo = await response.json();
|
||||
if (userInfo.email) {
|
||||
await cacheGoogleAccount(userInfo.email);
|
||||
}
|
||||
await userAccountManager.cacheGoogleAccount(userInfo.email);
|
||||
} catch (error) {
|
||||
console.error('Error retrieving user info:', error);
|
||||
}
|
||||
|
|
|
@ -22,16 +22,11 @@ import { ShellTool } from '../tools/shell.js';
|
|||
import { WriteFileTool } from '../tools/write-file.js';
|
||||
import { WebFetchTool } from '../tools/web-fetch.js';
|
||||
import { ReadManyFilesTool } from '../tools/read-many-files.js';
|
||||
import {
|
||||
MemoryTool,
|
||||
setGeminiMdFilename,
|
||||
GEMINI_CONFIG_DIR as GEMINI_DIR,
|
||||
} from '../tools/memoryTool.js';
|
||||
import { MemoryTool, setGeminiMdFilename } from '../tools/memoryTool.js';
|
||||
import { WebSearchTool } from '../tools/web-search.js';
|
||||
import { GeminiClient } from '../core/client.js';
|
||||
import { FileDiscoveryService } from '../services/fileDiscoveryService.js';
|
||||
import { GitService } from '../services/gitService.js';
|
||||
import { getProjectTempDir } from '../utils/paths.js';
|
||||
import {
|
||||
initializeTelemetry,
|
||||
DEFAULT_TELEMETRY_TARGET,
|
||||
|
@ -57,6 +52,7 @@ import { IdeConnectionEvent, IdeConnectionType } from '../telemetry/types.js';
|
|||
// Re-export OAuth config type
|
||||
export type { MCPOAuthConfig };
|
||||
import { WorkspaceContext } from '../utils/workspaceContext.js';
|
||||
import { Storage } from './storage.js';
|
||||
|
||||
export enum ApprovalMode {
|
||||
DEFAULT = 'default',
|
||||
|
@ -272,6 +268,7 @@ export class Config {
|
|||
private readonly shouldUseNodePtyShell: boolean;
|
||||
private readonly skipNextSpeakerCheck: boolean;
|
||||
private initialized: boolean = false;
|
||||
readonly storage: Storage;
|
||||
|
||||
constructor(params: ConfigParameters) {
|
||||
this.sessionId = params.sessionId;
|
||||
|
@ -340,6 +337,7 @@ export class Config {
|
|||
this.trustedFolder = params.trustedFolder;
|
||||
this.shouldUseNodePtyShell = params.shouldUseNodePtyShell ?? false;
|
||||
this.skipNextSpeakerCheck = params.skipNextSpeakerCheck ?? false;
|
||||
this.storage = new Storage(this.targetDir);
|
||||
|
||||
if (params.contextFileName) {
|
||||
setGeminiMdFilename(params.contextFileName);
|
||||
|
@ -591,14 +589,6 @@ export class Config {
|
|||
return this.geminiClient;
|
||||
}
|
||||
|
||||
getGeminiDir(): string {
|
||||
return path.join(this.targetDir, GEMINI_DIR);
|
||||
}
|
||||
|
||||
getProjectTempDir(): string {
|
||||
return getProjectTempDir(this.getProjectRoot());
|
||||
}
|
||||
|
||||
getEnableRecursiveFileSearch(): boolean {
|
||||
return this.fileFiltering.enableRecursiveFileSearch;
|
||||
}
|
||||
|
@ -744,7 +734,7 @@ export class Config {
|
|||
|
||||
async getGitService(): Promise<GitService> {
|
||||
if (!this.gitService) {
|
||||
this.gitService = new GitService(this.targetDir);
|
||||
this.gitService = new GitService(this.targetDir, this.storage);
|
||||
await this.gitService.initialize();
|
||||
}
|
||||
return this.gitService;
|
||||
|
|
|
@ -0,0 +1,55 @@
|
|||
/**
|
||||
* @license
|
||||
* Copyright 2025 Google LLC
|
||||
* SPDX-License-Identifier: Apache-2.0
|
||||
*/
|
||||
|
||||
import { describe, it, expect, vi } from 'vitest';
|
||||
import * as os from 'os';
|
||||
import * as path from 'node:path';
|
||||
|
||||
vi.mock('fs', async (importOriginal) => {
|
||||
const actual = await importOriginal<typeof import('fs')>();
|
||||
return {
|
||||
...actual,
|
||||
mkdirSync: vi.fn(),
|
||||
};
|
||||
});
|
||||
|
||||
import { Storage } from './storage.js';
|
||||
|
||||
describe('Storage – getGlobalSettingsPath', () => {
|
||||
it('returns path to ~/.gemini/settings.json', () => {
|
||||
const expected = path.join(os.homedir(), '.gemini', 'settings.json');
|
||||
expect(Storage.getGlobalSettingsPath()).toBe(expected);
|
||||
});
|
||||
});
|
||||
|
||||
describe('Storage – additional helpers', () => {
|
||||
const projectRoot = '/tmp/project';
|
||||
const storage = new Storage(projectRoot);
|
||||
|
||||
it('getWorkspaceSettingsPath returns project/.gemini/settings.json', () => {
|
||||
const expected = path.join(projectRoot, '.gemini', 'settings.json');
|
||||
expect(storage.getWorkspaceSettingsPath()).toBe(expected);
|
||||
});
|
||||
|
||||
it('getUserCommandsDir returns ~/.gemini/commands', () => {
|
||||
const expected = path.join(os.homedir(), '.gemini', 'commands');
|
||||
expect(Storage.getUserCommandsDir()).toBe(expected);
|
||||
});
|
||||
|
||||
it('getProjectCommandsDir returns project/.gemini/commands', () => {
|
||||
const expected = path.join(projectRoot, '.gemini', 'commands');
|
||||
expect(storage.getProjectCommandsDir()).toBe(expected);
|
||||
});
|
||||
|
||||
it('getMcpOAuthTokensPath returns ~/.gemini/mcp-oauth-tokens.json', () => {
|
||||
const expected = path.join(
|
||||
os.homedir(),
|
||||
'.gemini',
|
||||
'mcp-oauth-tokens.json',
|
||||
);
|
||||
expect(Storage.getMcpOAuthTokensPath()).toBe(expected);
|
||||
});
|
||||
});
|
|
@ -0,0 +1,114 @@
|
|||
/**
|
||||
* @license
|
||||
* Copyright 2025 Google LLC
|
||||
* SPDX-License-Identifier: Apache-2.0
|
||||
*/
|
||||
|
||||
import * as path from 'node:path';
|
||||
import * as os from 'os';
|
||||
import * as crypto from 'crypto';
|
||||
import * as fs from 'fs';
|
||||
|
||||
export const GEMINI_DIR = '.gemini';
|
||||
export const GOOGLE_ACCOUNTS_FILENAME = 'google_accounts.json';
|
||||
const TMP_DIR_NAME = 'tmp';
|
||||
|
||||
export class Storage {
|
||||
private readonly targetDir: string;
|
||||
|
||||
constructor(targetDir: string) {
|
||||
this.targetDir = targetDir;
|
||||
}
|
||||
|
||||
static getGlobalGeminiDir(): string {
|
||||
const homeDir = os.homedir();
|
||||
if (!homeDir) {
|
||||
return path.join(os.tmpdir(), '.gemini');
|
||||
}
|
||||
return path.join(homeDir, GEMINI_DIR);
|
||||
}
|
||||
|
||||
static getMcpOAuthTokensPath(): string {
|
||||
return path.join(Storage.getGlobalGeminiDir(), 'mcp-oauth-tokens.json');
|
||||
}
|
||||
|
||||
static getGlobalSettingsPath(): string {
|
||||
return path.join(Storage.getGlobalGeminiDir(), 'settings.json');
|
||||
}
|
||||
|
||||
static getInstallationIdPath(): string {
|
||||
return path.join(Storage.getGlobalGeminiDir(), 'installation_id');
|
||||
}
|
||||
|
||||
static getGoogleAccountsPath(): string {
|
||||
return path.join(Storage.getGlobalGeminiDir(), GOOGLE_ACCOUNTS_FILENAME);
|
||||
}
|
||||
|
||||
static getUserCommandsDir(): string {
|
||||
return path.join(Storage.getGlobalGeminiDir(), 'commands');
|
||||
}
|
||||
|
||||
static getGlobalMemoryFilePath(): string {
|
||||
return path.join(Storage.getGlobalGeminiDir(), 'memory.md');
|
||||
}
|
||||
|
||||
static getGlobalTempDir(): string {
|
||||
return path.join(Storage.getGlobalGeminiDir(), TMP_DIR_NAME);
|
||||
}
|
||||
|
||||
getGeminiDir(): string {
|
||||
return path.join(this.targetDir, GEMINI_DIR);
|
||||
}
|
||||
|
||||
getProjectTempDir(): string {
|
||||
const hash = this.getFilePathHash(this.getProjectRoot());
|
||||
const tempDir = Storage.getGlobalTempDir();
|
||||
return path.join(tempDir, hash);
|
||||
}
|
||||
|
||||
ensureProjectTempDirExists(): void {
|
||||
fs.mkdirSync(this.getProjectTempDir(), { recursive: true });
|
||||
}
|
||||
|
||||
static getOAuthCredsPath(): string {
|
||||
return path.join(Storage.getGlobalGeminiDir(), 'oauth_creds.json');
|
||||
}
|
||||
|
||||
getProjectRoot(): string {
|
||||
return this.targetDir;
|
||||
}
|
||||
|
||||
private getFilePathHash(filePath: string): string {
|
||||
return crypto.createHash('sha256').update(filePath).digest('hex');
|
||||
}
|
||||
|
||||
getHistoryDir(): string {
|
||||
const hash = this.getFilePathHash(this.getProjectRoot());
|
||||
const historyDir = path.join(Storage.getGlobalGeminiDir(), 'history');
|
||||
return path.join(historyDir, hash);
|
||||
}
|
||||
|
||||
getWorkspaceSettingsPath(): string {
|
||||
return path.join(this.getGeminiDir(), 'settings.json');
|
||||
}
|
||||
|
||||
getProjectCommandsDir(): string {
|
||||
return path.join(this.getGeminiDir(), 'commands');
|
||||
}
|
||||
|
||||
getProjectTempCheckpointsDir(): string {
|
||||
return path.join(this.getProjectTempDir(), 'checkpoints');
|
||||
}
|
||||
|
||||
getExtensionsDir(): string {
|
||||
return path.join(this.getGeminiDir(), 'extensions');
|
||||
}
|
||||
|
||||
getExtensionsConfigPath(): string {
|
||||
return path.join(this.getExtensionsDir(), 'gemini-extension.json');
|
||||
}
|
||||
|
||||
getHistoryFilePath(): string {
|
||||
return path.join(this.getProjectTempDir(), 'shell_history');
|
||||
}
|
||||
}
|
|
@ -19,7 +19,7 @@ import { Config } from '../config/config.js';
|
|||
|
||||
import { UserTierId } from '../code_assist/types.js';
|
||||
import { LoggingContentGenerator } from './loggingContentGenerator.js';
|
||||
import { getInstallationId } from '../utils/user_id.js';
|
||||
import { InstallationManager } from '../utils/installationManager.js';
|
||||
|
||||
/**
|
||||
* Interface abstracting the core functionalities for generating content and counting tokens.
|
||||
|
@ -136,7 +136,8 @@ export async function createContentGenerator(
|
|||
) {
|
||||
let headers: Record<string, string> = { ...baseHeaders };
|
||||
if (gcConfig?.getUsageStatisticsEnabled()) {
|
||||
const installationId = getInstallationId();
|
||||
const installationManager = new InstallationManager();
|
||||
const installationId = installationManager.getInstallationId();
|
||||
headers = {
|
||||
...headers,
|
||||
'x-gemini-api-privileged-user-id': `${installationId}`,
|
||||
|
|
|
@ -20,6 +20,7 @@ import {
|
|||
encodeTagName,
|
||||
decodeTagName,
|
||||
} from './logger.js';
|
||||
import { Storage } from '../config/storage.js';
|
||||
import { promises as fs, existsSync } from 'node:fs';
|
||||
import path from 'node:path';
|
||||
import { Content } from '@google/genai';
|
||||
|
@ -83,7 +84,7 @@ describe('Logger', () => {
|
|||
await cleanupLogAndCheckpointFiles();
|
||||
// Ensure the directory exists for the test
|
||||
await fs.mkdir(TEST_GEMINI_DIR, { recursive: true });
|
||||
logger = new Logger(testSessionId);
|
||||
logger = new Logger(testSessionId, new Storage(process.cwd()));
|
||||
await logger.initialize();
|
||||
});
|
||||
|
||||
|
@ -150,7 +151,10 @@ describe('Logger', () => {
|
|||
TEST_LOG_FILE_PATH,
|
||||
JSON.stringify(existingLogs, null, 2),
|
||||
);
|
||||
const newLogger = new Logger(currentSessionId);
|
||||
const newLogger = new Logger(
|
||||
currentSessionId,
|
||||
new Storage(process.cwd()),
|
||||
);
|
||||
await newLogger.initialize();
|
||||
expect(newLogger['messageId']).toBe(2);
|
||||
expect(newLogger['logs']).toEqual(existingLogs);
|
||||
|
@ -171,7 +175,7 @@ describe('Logger', () => {
|
|||
TEST_LOG_FILE_PATH,
|
||||
JSON.stringify(existingLogs, null, 2),
|
||||
);
|
||||
const newLogger = new Logger('a-new-session');
|
||||
const newLogger = new Logger('a-new-session', new Storage(process.cwd()));
|
||||
await newLogger.initialize();
|
||||
expect(newLogger['messageId']).toBe(0);
|
||||
newLogger.close();
|
||||
|
@ -196,7 +200,7 @@ describe('Logger', () => {
|
|||
.spyOn(console, 'debug')
|
||||
.mockImplementation(() => {});
|
||||
|
||||
const newLogger = new Logger(testSessionId);
|
||||
const newLogger = new Logger(testSessionId, new Storage(process.cwd()));
|
||||
await newLogger.initialize();
|
||||
|
||||
expect(consoleDebugSpy).toHaveBeenCalledWith(
|
||||
|
@ -224,7 +228,7 @@ describe('Logger', () => {
|
|||
.spyOn(console, 'debug')
|
||||
.mockImplementation(() => {});
|
||||
|
||||
const newLogger = new Logger(testSessionId);
|
||||
const newLogger = new Logger(testSessionId, new Storage(process.cwd()));
|
||||
await newLogger.initialize();
|
||||
|
||||
expect(consoleDebugSpy).toHaveBeenCalledWith(
|
||||
|
@ -274,7 +278,10 @@ describe('Logger', () => {
|
|||
});
|
||||
|
||||
it('should handle logger not initialized', async () => {
|
||||
const uninitializedLogger = new Logger(testSessionId);
|
||||
const uninitializedLogger = new Logger(
|
||||
testSessionId,
|
||||
new Storage(process.cwd()),
|
||||
);
|
||||
uninitializedLogger.close(); // Ensure it's treated as uninitialized
|
||||
const consoleDebugSpy = vi
|
||||
.spyOn(console, 'debug')
|
||||
|
@ -289,10 +296,16 @@ describe('Logger', () => {
|
|||
|
||||
it('should simulate concurrent writes from different logger instances to the same file', async () => {
|
||||
const concurrentSessionId = 'concurrent-session';
|
||||
const logger1 = new Logger(concurrentSessionId);
|
||||
const logger1 = new Logger(
|
||||
concurrentSessionId,
|
||||
new Storage(process.cwd()),
|
||||
);
|
||||
await logger1.initialize();
|
||||
|
||||
const logger2 = new Logger(concurrentSessionId);
|
||||
const logger2 = new Logger(
|
||||
concurrentSessionId,
|
||||
new Storage(process.cwd()),
|
||||
);
|
||||
await logger2.initialize();
|
||||
expect(logger2['sessionId']).toEqual(logger1['sessionId']);
|
||||
|
||||
|
@ -345,14 +358,14 @@ describe('Logger', () => {
|
|||
|
||||
describe('getPreviousUserMessages', () => {
|
||||
it('should retrieve all user messages from logs, sorted newest first', async () => {
|
||||
const loggerSort = new Logger('session-1');
|
||||
const loggerSort = new Logger('session-1', new Storage(process.cwd()));
|
||||
await loggerSort.initialize();
|
||||
await loggerSort.logMessage(MessageSenderType.USER, 'S1M0_ts100000');
|
||||
vi.advanceTimersByTime(1000);
|
||||
await loggerSort.logMessage(MessageSenderType.USER, 'S1M1_ts101000');
|
||||
vi.advanceTimersByTime(1000);
|
||||
// Switch to a different session to log
|
||||
const loggerSort2 = new Logger('session-2');
|
||||
const loggerSort2 = new Logger('session-2', new Storage(process.cwd()));
|
||||
await loggerSort2.initialize();
|
||||
await loggerSort2.logMessage(MessageSenderType.USER, 'S2M0_ts102000');
|
||||
vi.advanceTimersByTime(1000);
|
||||
|
@ -365,7 +378,10 @@ describe('Logger', () => {
|
|||
loggerSort.close();
|
||||
loggerSort2.close();
|
||||
|
||||
const finalLogger = new Logger('final-session');
|
||||
const finalLogger = new Logger(
|
||||
'final-session',
|
||||
new Storage(process.cwd()),
|
||||
);
|
||||
await finalLogger.initialize();
|
||||
|
||||
const messages = await finalLogger.getPreviousUserMessages();
|
||||
|
@ -385,7 +401,10 @@ describe('Logger', () => {
|
|||
});
|
||||
|
||||
it('should return empty array if logger not initialized', async () => {
|
||||
const uninitializedLogger = new Logger(testSessionId);
|
||||
const uninitializedLogger = new Logger(
|
||||
testSessionId,
|
||||
new Storage(process.cwd()),
|
||||
);
|
||||
uninitializedLogger.close();
|
||||
const messages = await uninitializedLogger.getPreviousUserMessages();
|
||||
expect(messages).toEqual([]);
|
||||
|
@ -428,7 +447,10 @@ describe('Logger', () => {
|
|||
});
|
||||
|
||||
it('should not throw if logger is not initialized', async () => {
|
||||
const uninitializedLogger = new Logger(testSessionId);
|
||||
const uninitializedLogger = new Logger(
|
||||
testSessionId,
|
||||
new Storage(process.cwd()),
|
||||
);
|
||||
uninitializedLogger.close();
|
||||
const consoleErrorSpy = vi
|
||||
.spyOn(console, 'error')
|
||||
|
@ -525,7 +547,10 @@ describe('Logger', () => {
|
|||
});
|
||||
|
||||
it('should return an empty array if logger is not initialized', async () => {
|
||||
const uninitializedLogger = new Logger(testSessionId);
|
||||
const uninitializedLogger = new Logger(
|
||||
testSessionId,
|
||||
new Storage(process.cwd()),
|
||||
);
|
||||
uninitializedLogger.close();
|
||||
const consoleErrorSpy = vi
|
||||
.spyOn(console, 'error')
|
||||
|
@ -613,7 +638,10 @@ describe('Logger', () => {
|
|||
});
|
||||
|
||||
it('should return false if logger is not initialized', async () => {
|
||||
const uninitializedLogger = new Logger(testSessionId);
|
||||
const uninitializedLogger = new Logger(
|
||||
testSessionId,
|
||||
new Storage(process.cwd()),
|
||||
);
|
||||
uninitializedLogger.close();
|
||||
const consoleErrorSpy = vi
|
||||
.spyOn(console, 'error')
|
||||
|
@ -651,7 +679,10 @@ describe('Logger', () => {
|
|||
});
|
||||
|
||||
it('should throw an error if logger is not initialized', async () => {
|
||||
const uninitializedLogger = new Logger(testSessionId);
|
||||
const uninitializedLogger = new Logger(
|
||||
testSessionId,
|
||||
new Storage(process.cwd()),
|
||||
);
|
||||
uninitializedLogger.close();
|
||||
|
||||
await expect(uninitializedLogger.checkpointExists(tag)).rejects.toThrow(
|
||||
|
|
|
@ -7,7 +7,7 @@
|
|||
import path from 'node:path';
|
||||
import { promises as fs } from 'node:fs';
|
||||
import { Content } from '@google/genai';
|
||||
import { getProjectTempDir } from '../utils/paths.js';
|
||||
import { Storage } from '../config/storage.js';
|
||||
|
||||
const LOG_FILE_NAME = 'logs.json';
|
||||
|
||||
|
@ -67,7 +67,10 @@ export class Logger {
|
|||
private initialized = false;
|
||||
private logs: LogEntry[] = []; // In-memory cache, ideally reflects the last known state of the file
|
||||
|
||||
constructor(sessionId: string) {
|
||||
constructor(
|
||||
sessionId: string,
|
||||
private readonly storage: Storage,
|
||||
) {
|
||||
this.sessionId = sessionId;
|
||||
}
|
||||
|
||||
|
@ -130,7 +133,7 @@ export class Logger {
|
|||
return;
|
||||
}
|
||||
|
||||
this.geminiDir = getProjectTempDir(process.cwd());
|
||||
this.geminiDir = this.storage.getProjectTempDir();
|
||||
this.logFilePath = path.join(this.geminiDir, LOG_FILE_NAME);
|
||||
|
||||
try {
|
||||
|
|
|
@ -101,3 +101,4 @@ export { OAuthUtils } from './mcp/oauth-utils.js';
|
|||
export * from './telemetry/index.js';
|
||||
export { sessionId } from './utils/session.js';
|
||||
export * from './utils/browser.js';
|
||||
export { Storage } from './config/storage.js';
|
||||
|
|
|
@ -21,6 +21,7 @@ vi.mock('node:fs', () => ({
|
|||
mkdir: vi.fn(),
|
||||
unlink: vi.fn(),
|
||||
},
|
||||
mkdirSync: vi.fn(),
|
||||
}));
|
||||
|
||||
vi.mock('node:os', () => ({
|
||||
|
|
|
@ -6,7 +6,7 @@
|
|||
|
||||
import { promises as fs } from 'node:fs';
|
||||
import * as path from 'node:path';
|
||||
import * as os from 'node:os';
|
||||
import { Storage } from '../config/storage.js';
|
||||
import { getErrorMessage } from '../utils/errors.js';
|
||||
|
||||
/**
|
||||
|
@ -36,17 +36,13 @@ export interface MCPOAuthCredentials {
|
|||
* Class for managing MCP OAuth token storage and retrieval.
|
||||
*/
|
||||
export class MCPOAuthTokenStorage {
|
||||
private static readonly TOKEN_FILE = 'mcp-oauth-tokens.json';
|
||||
private static readonly CONFIG_DIR = '.gemini';
|
||||
|
||||
/**
|
||||
* Get the path to the token storage file.
|
||||
*
|
||||
* @returns The full path to the token storage file
|
||||
*/
|
||||
private static getTokenFilePath(): string {
|
||||
const homeDir = os.homedir();
|
||||
return path.join(homeDir, this.CONFIG_DIR, this.TOKEN_FILE);
|
||||
return Storage.getMcpOAuthTokensPath();
|
||||
}
|
||||
|
||||
/**
|
||||
|
|
|
@ -40,9 +40,11 @@ describe('ChatRecordingService', () => {
|
|||
mockConfig = {
|
||||
getSessionId: vi.fn().mockReturnValue('test-session-id'),
|
||||
getProjectRoot: vi.fn().mockReturnValue('/test/project/root'),
|
||||
getProjectTempDir: vi
|
||||
.fn()
|
||||
.mockReturnValue('/test/project/root/.gemini/tmp'),
|
||||
storage: {
|
||||
getProjectTempDir: vi
|
||||
.fn()
|
||||
.mockReturnValue('/test/project/root/.gemini/tmp'),
|
||||
},
|
||||
getModel: vi.fn().mockReturnValue('gemini-pro'),
|
||||
getDebugMode: vi.fn().mockReturnValue(false),
|
||||
} as unknown as Config;
|
||||
|
|
|
@ -136,7 +136,10 @@ export class ChatRecordingService {
|
|||
this.cachedLastConvData = null;
|
||||
} else {
|
||||
// Create new session
|
||||
const chatsDir = path.join(this.config.getProjectTempDir(), 'chats');
|
||||
const chatsDir = path.join(
|
||||
this.config.storage.getProjectTempDir(),
|
||||
'chats',
|
||||
);
|
||||
fs.mkdirSync(chatsDir, { recursive: true });
|
||||
|
||||
const timestamp = new Date()
|
||||
|
@ -422,7 +425,10 @@ export class ChatRecordingService {
|
|||
*/
|
||||
deleteSession(sessionId: string): void {
|
||||
try {
|
||||
const chatsDir = path.join(this.config.getProjectTempDir(), 'chats');
|
||||
const chatsDir = path.join(
|
||||
this.config.storage.getProjectTempDir(),
|
||||
'chats',
|
||||
);
|
||||
const sessionPath = path.join(chatsDir, `${sessionId}.json`);
|
||||
fs.unlinkSync(sessionPath);
|
||||
} catch (error) {
|
||||
|
|
|
@ -6,6 +6,7 @@
|
|||
|
||||
import { describe, it, expect, vi, beforeEach, afterEach } from 'vitest';
|
||||
import { GitService } from './gitService.js';
|
||||
import { Storage } from '../config/storage.js';
|
||||
import * as path from 'path';
|
||||
import * as fs from 'fs/promises';
|
||||
import * as os from 'os';
|
||||
|
@ -55,6 +56,7 @@ describe('GitService', () => {
|
|||
let projectRoot: string;
|
||||
let homedir: string;
|
||||
let hash: string;
|
||||
let storage: Storage;
|
||||
|
||||
beforeEach(async () => {
|
||||
testRootDir = await fs.mkdtemp(path.join(os.tmpdir(), 'git-service-test-'));
|
||||
|
@ -100,6 +102,7 @@ describe('GitService', () => {
|
|||
hoistedMockCommit.mockResolvedValue({
|
||||
commit: 'initial',
|
||||
});
|
||||
storage = new Storage(projectRoot);
|
||||
});
|
||||
|
||||
afterEach(async () => {
|
||||
|
@ -109,13 +112,13 @@ describe('GitService', () => {
|
|||
|
||||
describe('constructor', () => {
|
||||
it('should successfully create an instance', () => {
|
||||
expect(() => new GitService(projectRoot)).not.toThrow();
|
||||
expect(() => new GitService(projectRoot, storage)).not.toThrow();
|
||||
});
|
||||
});
|
||||
|
||||
describe('verifyGitAvailability', () => {
|
||||
it('should resolve true if git --version command succeeds', async () => {
|
||||
const service = new GitService(projectRoot);
|
||||
const service = new GitService(projectRoot, storage);
|
||||
await expect(service.verifyGitAvailability()).resolves.toBe(true);
|
||||
});
|
||||
|
||||
|
@ -124,7 +127,7 @@ describe('GitService', () => {
|
|||
callback(new Error('git not found'));
|
||||
return {} as ChildProcess;
|
||||
});
|
||||
const service = new GitService(projectRoot);
|
||||
const service = new GitService(projectRoot, storage);
|
||||
await expect(service.verifyGitAvailability()).resolves.toBe(false);
|
||||
});
|
||||
});
|
||||
|
@ -135,14 +138,14 @@ describe('GitService', () => {
|
|||
callback(new Error('git not found'));
|
||||
return {} as ChildProcess;
|
||||
});
|
||||
const service = new GitService(projectRoot);
|
||||
const service = new GitService(projectRoot, storage);
|
||||
await expect(service.initialize()).rejects.toThrow(
|
||||
'Checkpointing is enabled, but Git is not installed. Please install Git or disable checkpointing to continue.',
|
||||
);
|
||||
});
|
||||
|
||||
it('should call setupShadowGitRepository if Git is available', async () => {
|
||||
const service = new GitService(projectRoot);
|
||||
const service = new GitService(projectRoot, storage);
|
||||
const setupSpy = vi
|
||||
.spyOn(service, 'setupShadowGitRepository')
|
||||
.mockResolvedValue(undefined);
|
||||
|
@ -162,14 +165,14 @@ describe('GitService', () => {
|
|||
});
|
||||
|
||||
it('should create history and repository directories', async () => {
|
||||
const service = new GitService(projectRoot);
|
||||
const service = new GitService(projectRoot, storage);
|
||||
await service.setupShadowGitRepository();
|
||||
const stats = await fs.stat(repoDir);
|
||||
expect(stats.isDirectory()).toBe(true);
|
||||
});
|
||||
|
||||
it('should create a .gitconfig file with the correct content', async () => {
|
||||
const service = new GitService(projectRoot);
|
||||
const service = new GitService(projectRoot, storage);
|
||||
await service.setupShadowGitRepository();
|
||||
|
||||
const expectedConfigContent =
|
||||
|
@ -180,7 +183,7 @@ describe('GitService', () => {
|
|||
|
||||
it('should initialize git repo in historyDir if not already initialized', async () => {
|
||||
hoistedMockCheckIsRepo.mockResolvedValue(false);
|
||||
const service = new GitService(projectRoot);
|
||||
const service = new GitService(projectRoot, storage);
|
||||
await service.setupShadowGitRepository();
|
||||
expect(hoistedMockSimpleGit).toHaveBeenCalledWith(repoDir);
|
||||
expect(hoistedMockInit).toHaveBeenCalled();
|
||||
|
@ -188,7 +191,7 @@ describe('GitService', () => {
|
|||
|
||||
it('should not initialize git repo if already initialized', async () => {
|
||||
hoistedMockCheckIsRepo.mockResolvedValue(true);
|
||||
const service = new GitService(projectRoot);
|
||||
const service = new GitService(projectRoot, storage);
|
||||
await service.setupShadowGitRepository();
|
||||
expect(hoistedMockInit).not.toHaveBeenCalled();
|
||||
});
|
||||
|
@ -198,7 +201,7 @@ describe('GitService', () => {
|
|||
const visibleGitIgnorePath = path.join(projectRoot, '.gitignore');
|
||||
await fs.writeFile(visibleGitIgnorePath, gitignoreContent);
|
||||
|
||||
const service = new GitService(projectRoot);
|
||||
const service = new GitService(projectRoot, storage);
|
||||
await service.setupShadowGitRepository();
|
||||
|
||||
const hiddenGitIgnorePath = path.join(repoDir, '.gitignore');
|
||||
|
@ -207,7 +210,7 @@ describe('GitService', () => {
|
|||
});
|
||||
|
||||
it('should not create a .gitignore in shadow repo if project .gitignore does not exist', async () => {
|
||||
const service = new GitService(projectRoot);
|
||||
const service = new GitService(projectRoot, storage);
|
||||
await service.setupShadowGitRepository();
|
||||
|
||||
const hiddenGitIgnorePath = path.join(repoDir, '.gitignore');
|
||||
|
@ -221,7 +224,7 @@ describe('GitService', () => {
|
|||
// Create a directory instead of a file to cause a read error
|
||||
await fs.mkdir(visibleGitIgnorePath);
|
||||
|
||||
const service = new GitService(projectRoot);
|
||||
const service = new GitService(projectRoot, storage);
|
||||
// EISDIR is the expected error code on Unix-like systems
|
||||
await expect(service.setupShadowGitRepository()).rejects.toThrow(
|
||||
/EISDIR: illegal operation on a directory, read|EBUSY: resource busy or locked, read/,
|
||||
|
@ -230,7 +233,7 @@ describe('GitService', () => {
|
|||
|
||||
it('should make an initial commit if no commits exist in history repo', async () => {
|
||||
hoistedMockCheckIsRepo.mockResolvedValue(false);
|
||||
const service = new GitService(projectRoot);
|
||||
const service = new GitService(projectRoot, storage);
|
||||
await service.setupShadowGitRepository();
|
||||
expect(hoistedMockCommit).toHaveBeenCalledWith('Initial commit', {
|
||||
'--allow-empty': null,
|
||||
|
@ -239,7 +242,7 @@ describe('GitService', () => {
|
|||
|
||||
it('should not make an initial commit if commits already exist', async () => {
|
||||
hoistedMockCheckIsRepo.mockResolvedValue(true);
|
||||
const service = new GitService(projectRoot);
|
||||
const service = new GitService(projectRoot, storage);
|
||||
await service.setupShadowGitRepository();
|
||||
expect(hoistedMockCommit).not.toHaveBeenCalled();
|
||||
});
|
||||
|
|
|
@ -6,22 +6,22 @@
|
|||
|
||||
import * as fs from 'fs/promises';
|
||||
import * as path from 'path';
|
||||
import * as os from 'os';
|
||||
import { isNodeError } from '../utils/errors.js';
|
||||
import { exec } from 'node:child_process';
|
||||
import { simpleGit, SimpleGit, CheckRepoActions } from 'simple-git';
|
||||
import { getProjectHash, GEMINI_DIR } from '../utils/paths.js';
|
||||
import { Storage } from '../config/storage.js';
|
||||
|
||||
export class GitService {
|
||||
private projectRoot: string;
|
||||
private storage: Storage;
|
||||
|
||||
constructor(projectRoot: string) {
|
||||
constructor(projectRoot: string, storage: Storage) {
|
||||
this.projectRoot = path.resolve(projectRoot);
|
||||
this.storage = storage;
|
||||
}
|
||||
|
||||
private getHistoryDir(): string {
|
||||
const hash = getProjectHash(this.projectRoot);
|
||||
return path.join(os.homedir(), GEMINI_DIR, 'history', hash);
|
||||
return this.storage.getHistoryDir();
|
||||
}
|
||||
|
||||
async initialize(): Promise<void> {
|
||||
|
|
|
@ -22,13 +22,13 @@ import {
|
|||
TEST_ONLY,
|
||||
} from './clearcut-logger.js';
|
||||
import { ConfigParameters } from '../../config/config.js';
|
||||
import * as userAccount from '../../utils/user_account.js';
|
||||
import * as userId from '../../utils/user_id.js';
|
||||
import { EventMetadataKey } from './event-metadata-key.js';
|
||||
import { makeFakeConfig } from '../../test-utils/config.js';
|
||||
import { http, HttpResponse } from 'msw';
|
||||
import { server } from '../../mocks/msw.js';
|
||||
import { makeChatCompressionEvent } from '../types.js';
|
||||
import { UserAccountManager } from '../../utils/userAccountManager.js';
|
||||
import { InstallationManager } from '../../utils/installationManager.js';
|
||||
|
||||
interface CustomMatchers<R = unknown> {
|
||||
toHaveMetadataValue: ([key, value]: [EventMetadataKey, string]) => R;
|
||||
|
@ -71,11 +71,11 @@ expect.extend({
|
|||
},
|
||||
});
|
||||
|
||||
vi.mock('../../utils/user_account');
|
||||
vi.mock('../../utils/user_id');
|
||||
vi.mock('../../utils/userAccountManager.js');
|
||||
vi.mock('../../utils/installationManager.js');
|
||||
|
||||
const mockUserAccount = vi.mocked(userAccount);
|
||||
const mockUserId = vi.mocked(userId);
|
||||
const mockUserAccount = vi.mocked(UserAccountManager.prototype);
|
||||
const mockInstallMgr = vi.mocked(InstallationManager.prototype);
|
||||
|
||||
// TODO(richieforeman): Consider moving this to test setup globally.
|
||||
beforeAll(() => {
|
||||
|
@ -113,7 +113,6 @@ describe('ClearcutLogger', () => {
|
|||
config = {} as Partial<ConfigParameters>,
|
||||
lifetimeGoogleAccounts = 1,
|
||||
cachedGoogleAccount = 'test@google.com',
|
||||
installationId = 'test-installation-id',
|
||||
} = {}) {
|
||||
server.resetHandlers(
|
||||
http.post(CLEARCUT_URL, () => HttpResponse.text(EXAMPLE_RESPONSE)),
|
||||
|
@ -131,7 +130,9 @@ describe('ClearcutLogger', () => {
|
|||
mockUserAccount.getLifetimeGoogleAccounts.mockReturnValue(
|
||||
lifetimeGoogleAccounts,
|
||||
);
|
||||
mockUserId.getInstallationId.mockReturnValue(installationId);
|
||||
mockInstallMgr.getInstallationId = vi
|
||||
.fn()
|
||||
.mockReturnValue('test-installation-id');
|
||||
|
||||
const logger = ClearcutLogger.getInstance(loggerConfig);
|
||||
|
||||
|
|
|
@ -22,12 +22,9 @@ import {
|
|||
} from '../types.js';
|
||||
import { EventMetadataKey } from './event-metadata-key.js';
|
||||
import { Config } from '../../config/config.js';
|
||||
import { InstallationManager } from '../../utils/installationManager.js';
|
||||
import { UserAccountManager } from '../../utils/userAccountManager.js';
|
||||
import { safeJsonStringify } from '../../utils/safeJsonStringify.js';
|
||||
import {
|
||||
getCachedGoogleAccount,
|
||||
getLifetimeGoogleAccounts,
|
||||
} from '../../utils/user_account.js';
|
||||
import { getInstallationId } from '../../utils/user_id.js';
|
||||
import { FixedDeque } from 'mnemonist';
|
||||
import { GIT_COMMIT_INFO, CLI_VERSION } from '../../generated/git-commit.js';
|
||||
import { DetectedIde, detectIde } from '../../ide/detect-ide.js';
|
||||
|
@ -129,6 +126,8 @@ export class ClearcutLogger {
|
|||
private config?: Config;
|
||||
private sessionData: EventValue[] = [];
|
||||
private promptId: string = '';
|
||||
private readonly installationManager: InstallationManager;
|
||||
private readonly userAccountManager: UserAccountManager;
|
||||
|
||||
/**
|
||||
* Queue of pending events that need to be flushed to the server. New events
|
||||
|
@ -152,10 +151,12 @@ export class ClearcutLogger {
|
|||
*/
|
||||
private pendingFlush: boolean = false;
|
||||
|
||||
private constructor(config?: Config) {
|
||||
private constructor(config: Config) {
|
||||
this.config = config;
|
||||
this.events = new FixedDeque<LogEventEntry[]>(Array, MAX_EVENTS);
|
||||
this.promptId = config?.getSessionId() ?? '';
|
||||
this.installationManager = new InstallationManager();
|
||||
this.userAccountManager = new UserAccountManager();
|
||||
}
|
||||
|
||||
static getInstance(config?: Config): ClearcutLogger | undefined {
|
||||
|
@ -202,12 +203,14 @@ export class ClearcutLogger {
|
|||
}
|
||||
|
||||
createLogEvent(eventName: EventNames, data: EventValue[] = []): LogEvent {
|
||||
const email = getCachedGoogleAccount();
|
||||
const email = this.userAccountManager.getCachedGoogleAccount();
|
||||
|
||||
if (eventName !== EventNames.START_SESSION) {
|
||||
data.push(...this.sessionData);
|
||||
}
|
||||
data = this.addDefaultFields(data);
|
||||
const totalAccounts = this.userAccountManager.getLifetimeGoogleAccounts();
|
||||
|
||||
data = this.addDefaultFields(data, totalAccounts);
|
||||
|
||||
const logEvent: LogEvent = {
|
||||
console_type: 'GEMINI_CLI',
|
||||
|
@ -220,7 +223,7 @@ export class ClearcutLogger {
|
|||
if (email) {
|
||||
logEvent.client_email = email;
|
||||
} else {
|
||||
logEvent.client_install_id = getInstallationId();
|
||||
logEvent.client_install_id = this.installationManager.getInstallationId();
|
||||
}
|
||||
|
||||
return logEvent;
|
||||
|
@ -679,8 +682,7 @@ export class ClearcutLogger {
|
|||
* Adds default fields to data, and returns a new data array. This fields
|
||||
* should exist on all log events.
|
||||
*/
|
||||
addDefaultFields(data: EventValue[]): EventValue[] {
|
||||
const totalAccounts = getLifetimeGoogleAccounts();
|
||||
addDefaultFields(data: EventValue[], totalAccounts: number): EventValue[] {
|
||||
const surface = determineSurface();
|
||||
|
||||
const defaultLogMetadata: EventValue[] = [
|
||||
|
|
|
@ -17,6 +17,7 @@ vi.mock('fs', () => ({
|
|||
},
|
||||
statSync: vi.fn(),
|
||||
readdirSync: vi.fn(),
|
||||
mkdirSync: vi.fn(),
|
||||
}));
|
||||
import { LSTool } from './ls.js';
|
||||
import { Config } from '../config/config.js';
|
||||
|
|
|
@ -18,7 +18,19 @@ import * as os from 'os';
|
|||
import { ToolConfirmationOutcome } from './tools.js';
|
||||
|
||||
// Mock dependencies
|
||||
vi.mock('fs/promises');
|
||||
vi.mock(import('fs/promises'), async (importOriginal) => {
|
||||
const actual = await importOriginal();
|
||||
return {
|
||||
...actual,
|
||||
mkdir: vi.fn(),
|
||||
readFile: vi.fn(),
|
||||
};
|
||||
});
|
||||
|
||||
vi.mock('fs', () => ({
|
||||
mkdirSync: vi.fn(),
|
||||
}));
|
||||
|
||||
vi.mock('os');
|
||||
|
||||
const MEMORY_SECTION_HEADER = '## Gemini Added Memories';
|
||||
|
|
|
@ -15,7 +15,7 @@ import {
|
|||
import { FunctionDeclaration } from '@google/genai';
|
||||
import * as fs from 'fs/promises';
|
||||
import * as path from 'path';
|
||||
import { homedir } from 'os';
|
||||
import { Storage } from '../config/storage.js';
|
||||
import * as Diff from 'diff';
|
||||
import { DEFAULT_DIFF_OPTIONS } from './diffOptions.js';
|
||||
import { tildeifyPath } from '../utils/paths.js';
|
||||
|
@ -96,7 +96,7 @@ interface SaveMemoryParams {
|
|||
}
|
||||
|
||||
function getGlobalMemoryFilePath(): string {
|
||||
return path.join(homedir(), GEMINI_CONFIG_DIR, getCurrentGeminiMdFilename());
|
||||
return path.join(Storage.getGlobalGeminiDir(), getCurrentGeminiMdFilename());
|
||||
}
|
||||
|
||||
/**
|
||||
|
|
|
@ -27,6 +27,7 @@ let mockSendMessageStream: any;
|
|||
|
||||
vi.mock('fs', () => ({
|
||||
statSync: vi.fn(),
|
||||
mkdirSync: vi.fn(),
|
||||
}));
|
||||
|
||||
vi.mock('../core/client.js', () => ({
|
||||
|
|
|
@ -0,0 +1,102 @@
|
|||
/**
|
||||
* @license
|
||||
* Copyright 2025 Google LLC
|
||||
* SPDX-License-Identifier: Apache-2.0
|
||||
*/
|
||||
|
||||
import { vi, describe, it, expect, beforeEach, afterEach, Mock } from 'vitest';
|
||||
import { InstallationManager } from './installationManager.js';
|
||||
import * as fs from 'node:fs';
|
||||
import * as os from 'node:os';
|
||||
import path from 'node:path';
|
||||
import { randomUUID } from 'crypto';
|
||||
|
||||
vi.mock('node:fs', async (importOriginal) => {
|
||||
const actual = await importOriginal<typeof import('node:fs')>();
|
||||
return {
|
||||
...actual,
|
||||
readFileSync: vi.fn(actual.readFileSync),
|
||||
existsSync: vi.fn(actual.existsSync),
|
||||
} as typeof actual;
|
||||
});
|
||||
|
||||
vi.mock('os', async (importOriginal) => {
|
||||
const os = await importOriginal<typeof import('os')>();
|
||||
return {
|
||||
...os,
|
||||
homedir: vi.fn(),
|
||||
};
|
||||
});
|
||||
|
||||
vi.mock('crypto', async (importOriginal) => {
|
||||
const crypto = await importOriginal<typeof import('crypto')>();
|
||||
return {
|
||||
...crypto,
|
||||
randomUUID: vi.fn(),
|
||||
};
|
||||
});
|
||||
|
||||
describe('InstallationManager', () => {
|
||||
let tempHomeDir: string;
|
||||
let installationManager: InstallationManager;
|
||||
const installationIdFile = () =>
|
||||
path.join(tempHomeDir, '.gemini', 'installation_id');
|
||||
|
||||
beforeEach(() => {
|
||||
tempHomeDir = fs.mkdtempSync(
|
||||
path.join(os.tmpdir(), 'gemini-cli-test-home-'),
|
||||
);
|
||||
(os.homedir as Mock).mockReturnValue(tempHomeDir);
|
||||
installationManager = new InstallationManager();
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
fs.rmSync(tempHomeDir, { recursive: true, force: true });
|
||||
vi.clearAllMocks();
|
||||
});
|
||||
|
||||
describe('getInstallationId', () => {
|
||||
it('should create and write a new installation ID if one does not exist', () => {
|
||||
const newId = 'new-uuid-123';
|
||||
(randomUUID as Mock).mockReturnValue(newId);
|
||||
|
||||
const installationId = installationManager.getInstallationId();
|
||||
|
||||
expect(installationId).toBe(newId);
|
||||
expect(fs.existsSync(installationIdFile())).toBe(true);
|
||||
expect(fs.readFileSync(installationIdFile(), 'utf-8')).toBe(newId);
|
||||
});
|
||||
|
||||
it('should read an existing installation ID from a file', () => {
|
||||
const existingId = 'existing-uuid-123';
|
||||
fs.mkdirSync(path.dirname(installationIdFile()), { recursive: true });
|
||||
fs.writeFileSync(installationIdFile(), existingId);
|
||||
|
||||
const installationId = installationManager.getInstallationId();
|
||||
|
||||
expect(installationId).toBe(existingId);
|
||||
});
|
||||
|
||||
it('should return the same ID on subsequent calls', () => {
|
||||
const firstId = installationManager.getInstallationId();
|
||||
const secondId = installationManager.getInstallationId();
|
||||
expect(secondId).toBe(firstId);
|
||||
});
|
||||
|
||||
it('should handle read errors and return a fallback ID', () => {
|
||||
vi.mocked(fs.existsSync).mockReturnValueOnce(true);
|
||||
const readSpy = vi.mocked(fs.readFileSync);
|
||||
readSpy.mockImplementationOnce(() => {
|
||||
throw new Error('Read error');
|
||||
});
|
||||
const consoleErrorSpy = vi
|
||||
.spyOn(console, 'error')
|
||||
.mockImplementation(() => {});
|
||||
|
||||
const id = installationManager.getInstallationId();
|
||||
|
||||
expect(id).toBe('123456789');
|
||||
expect(consoleErrorSpy).toHaveBeenCalled();
|
||||
});
|
||||
});
|
||||
});
|
|
@ -0,0 +1,58 @@
|
|||
/**
|
||||
* @license
|
||||
* Copyright 2025 Google LLC
|
||||
* SPDX-License-Identifier: Apache-2.0
|
||||
*/
|
||||
|
||||
import * as fs from 'fs';
|
||||
import { randomUUID } from 'crypto';
|
||||
import * as path from 'node:path';
|
||||
import { Storage } from '../config/storage.js';
|
||||
|
||||
export class InstallationManager {
|
||||
private getInstallationIdPath(): string {
|
||||
return Storage.getInstallationIdPath();
|
||||
}
|
||||
|
||||
private readInstallationIdFromFile(): string | null {
|
||||
const installationIdFile = this.getInstallationIdPath();
|
||||
if (fs.existsSync(installationIdFile)) {
|
||||
const installationid = fs
|
||||
.readFileSync(installationIdFile, 'utf-8')
|
||||
.trim();
|
||||
return installationid || null;
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
private writeInstallationIdToFile(installationId: string) {
|
||||
const installationIdFile = this.getInstallationIdPath();
|
||||
const dir = path.dirname(installationIdFile);
|
||||
fs.mkdirSync(dir, { recursive: true });
|
||||
fs.writeFileSync(installationIdFile, installationId, 'utf-8');
|
||||
}
|
||||
|
||||
/**
|
||||
* Retrieves the installation ID from a file, creating it if it doesn't exist.
|
||||
* This ID is used for unique user installation tracking.
|
||||
* @returns A UUID string for the user.
|
||||
*/
|
||||
getInstallationId(): string {
|
||||
try {
|
||||
let installationId = this.readInstallationIdFromFile();
|
||||
|
||||
if (!installationId) {
|
||||
installationId = randomUUID();
|
||||
this.writeInstallationIdToFile(installationId);
|
||||
}
|
||||
|
||||
return installationId;
|
||||
} catch (error) {
|
||||
console.error(
|
||||
'Error accessing installation ID file, generating ephemeral ID:',
|
||||
error,
|
||||
);
|
||||
return '123456789';
|
||||
}
|
||||
}
|
||||
}
|
|
@ -10,8 +10,6 @@ import * as crypto from 'crypto';
|
|||
|
||||
export const GEMINI_DIR = '.gemini';
|
||||
export const GOOGLE_ACCOUNTS_FILENAME = 'google_accounts.json';
|
||||
const TMP_DIR_NAME = 'tmp';
|
||||
const COMMANDS_DIR_NAME = 'commands';
|
||||
|
||||
/**
|
||||
* Special characters that need to be escaped in file paths for shell compatibility.
|
||||
|
@ -174,33 +172,6 @@ export function getProjectHash(projectRoot: string): string {
|
|||
return crypto.createHash('sha256').update(projectRoot).digest('hex');
|
||||
}
|
||||
|
||||
/**
|
||||
* Generates a unique temporary directory path for a project.
|
||||
* @param projectRoot The absolute path to the project's root directory.
|
||||
* @returns The path to the project's temporary directory.
|
||||
*/
|
||||
export function getProjectTempDir(projectRoot: string): string {
|
||||
const hash = getProjectHash(projectRoot);
|
||||
return path.join(os.homedir(), GEMINI_DIR, TMP_DIR_NAME, hash);
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the absolute path to the user-level commands directory.
|
||||
* @returns The path to the user's commands directory.
|
||||
*/
|
||||
export function getUserCommandsDir(): string {
|
||||
return path.join(os.homedir(), GEMINI_DIR, COMMANDS_DIR_NAME);
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the absolute path to the project-level commands directory.
|
||||
* @param projectRoot The absolute path to the project's root directory.
|
||||
* @returns The path to the project's commands directory.
|
||||
*/
|
||||
export function getProjectCommandsDir(projectRoot: string): string {
|
||||
return path.join(projectRoot, GEMINI_DIR, COMMANDS_DIR_NAME);
|
||||
}
|
||||
|
||||
/**
|
||||
* Checks if a path is a subpath of another path.
|
||||
* @param parentPath The parent path.
|
||||
|
|
|
@ -5,12 +5,7 @@
|
|||
*/
|
||||
|
||||
import { vi, describe, it, expect, beforeEach, afterEach, Mock } from 'vitest';
|
||||
import {
|
||||
cacheGoogleAccount,
|
||||
getCachedGoogleAccount,
|
||||
clearCachedGoogleAccount,
|
||||
getLifetimeGoogleAccounts,
|
||||
} from './user_account.js';
|
||||
import { UserAccountManager } from './userAccountManager.js';
|
||||
import * as fs from 'node:fs';
|
||||
import * as os from 'node:os';
|
||||
import path from 'node:path';
|
||||
|
@ -23,16 +18,21 @@ vi.mock('os', async (importOriginal) => {
|
|||
};
|
||||
});
|
||||
|
||||
describe('user_account', () => {
|
||||
describe('UserAccountManager', () => {
|
||||
let tempHomeDir: string;
|
||||
const accountsFile = () =>
|
||||
path.join(tempHomeDir, '.gemini', 'google_accounts.json');
|
||||
let userAccountManager: UserAccountManager;
|
||||
let accountsFile: () => string;
|
||||
|
||||
beforeEach(() => {
|
||||
tempHomeDir = fs.mkdtempSync(
|
||||
path.join(os.tmpdir(), 'gemini-cli-test-home-'),
|
||||
);
|
||||
(os.homedir as Mock).mockReturnValue(tempHomeDir);
|
||||
accountsFile = () =>
|
||||
path.join(tempHomeDir, '.gemini', 'google_accounts.json');
|
||||
userAccountManager = new UserAccountManager();
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
fs.rmSync(tempHomeDir, { recursive: true, force: true });
|
||||
vi.clearAllMocks();
|
||||
|
@ -40,7 +40,7 @@ describe('user_account', () => {
|
|||
|
||||
describe('cacheGoogleAccount', () => {
|
||||
it('should create directory and write initial account file', async () => {
|
||||
await cacheGoogleAccount('test1@google.com');
|
||||
await userAccountManager.cacheGoogleAccount('test1@google.com');
|
||||
|
||||
// Verify Google Account ID was cached
|
||||
expect(fs.existsSync(accountsFile())).toBe(true);
|
||||
|
@ -60,7 +60,7 @@ describe('user_account', () => {
|
|||
),
|
||||
);
|
||||
|
||||
await cacheGoogleAccount('test3@google.com');
|
||||
await userAccountManager.cacheGoogleAccount('test3@google.com');
|
||||
|
||||
expect(fs.readFileSync(accountsFile(), 'utf-8')).toBe(
|
||||
JSON.stringify(
|
||||
|
@ -84,8 +84,8 @@ describe('user_account', () => {
|
|||
2,
|
||||
),
|
||||
);
|
||||
await cacheGoogleAccount('test2@google.com');
|
||||
await cacheGoogleAccount('test1@google.com');
|
||||
await userAccountManager.cacheGoogleAccount('test2@google.com');
|
||||
await userAccountManager.cacheGoogleAccount('test1@google.com');
|
||||
|
||||
expect(fs.readFileSync(accountsFile(), 'utf-8')).toBe(
|
||||
JSON.stringify(
|
||||
|
@ -103,7 +103,7 @@ describe('user_account', () => {
|
|||
.spyOn(console, 'log')
|
||||
.mockImplementation(() => {});
|
||||
|
||||
await cacheGoogleAccount('test1@google.com');
|
||||
await userAccountManager.cacheGoogleAccount('test1@google.com');
|
||||
|
||||
expect(consoleLogSpy).toHaveBeenCalled();
|
||||
expect(JSON.parse(fs.readFileSync(accountsFile(), 'utf-8'))).toEqual({
|
||||
|
@ -122,7 +122,7 @@ describe('user_account', () => {
|
|||
.spyOn(console, 'log')
|
||||
.mockImplementation(() => {});
|
||||
|
||||
await cacheGoogleAccount('test2@google.com');
|
||||
await userAccountManager.cacheGoogleAccount('test2@google.com');
|
||||
|
||||
expect(consoleLogSpy).toHaveBeenCalled();
|
||||
expect(JSON.parse(fs.readFileSync(accountsFile(), 'utf-8'))).toEqual({
|
||||
|
@ -139,19 +139,19 @@ describe('user_account', () => {
|
|||
accountsFile(),
|
||||
JSON.stringify({ active: 'active@google.com', old: [] }, null, 2),
|
||||
);
|
||||
const account = getCachedGoogleAccount();
|
||||
const account = userAccountManager.getCachedGoogleAccount();
|
||||
expect(account).toBe('active@google.com');
|
||||
});
|
||||
|
||||
it('should return null if file does not exist', () => {
|
||||
const account = getCachedGoogleAccount();
|
||||
const account = userAccountManager.getCachedGoogleAccount();
|
||||
expect(account).toBeNull();
|
||||
});
|
||||
|
||||
it('should return null if file is empty', () => {
|
||||
fs.mkdirSync(path.dirname(accountsFile()), { recursive: true });
|
||||
fs.writeFileSync(accountsFile(), '');
|
||||
const account = getCachedGoogleAccount();
|
||||
const account = userAccountManager.getCachedGoogleAccount();
|
||||
expect(account).toBeNull();
|
||||
});
|
||||
|
||||
|
@ -162,7 +162,7 @@ describe('user_account', () => {
|
|||
.spyOn(console, 'log')
|
||||
.mockImplementation(() => {});
|
||||
|
||||
const account = getCachedGoogleAccount();
|
||||
const account = userAccountManager.getCachedGoogleAccount();
|
||||
|
||||
expect(account).toBeNull();
|
||||
expect(consoleLogSpy).toHaveBeenCalled();
|
||||
|
@ -171,7 +171,7 @@ describe('user_account', () => {
|
|||
it('should return null if active key is missing', () => {
|
||||
fs.mkdirSync(path.dirname(accountsFile()), { recursive: true });
|
||||
fs.writeFileSync(accountsFile(), JSON.stringify({ old: [] }));
|
||||
const account = getCachedGoogleAccount();
|
||||
const account = userAccountManager.getCachedGoogleAccount();
|
||||
expect(account).toBeNull();
|
||||
});
|
||||
});
|
||||
|
@ -188,7 +188,7 @@ describe('user_account', () => {
|
|||
),
|
||||
);
|
||||
|
||||
await clearCachedGoogleAccount();
|
||||
await userAccountManager.clearCachedGoogleAccount();
|
||||
|
||||
const stored = JSON.parse(fs.readFileSync(accountsFile(), 'utf-8'));
|
||||
expect(stored.active).toBeNull();
|
||||
|
@ -198,7 +198,7 @@ describe('user_account', () => {
|
|||
it('should handle empty file gracefully', async () => {
|
||||
fs.mkdirSync(path.dirname(accountsFile()), { recursive: true });
|
||||
fs.writeFileSync(accountsFile(), '');
|
||||
await clearCachedGoogleAccount();
|
||||
await userAccountManager.clearCachedGoogleAccount();
|
||||
const stored = JSON.parse(fs.readFileSync(accountsFile(), 'utf-8'));
|
||||
expect(stored.active).toBeNull();
|
||||
expect(stored.old).toEqual([]);
|
||||
|
@ -211,7 +211,7 @@ describe('user_account', () => {
|
|||
.spyOn(console, 'log')
|
||||
.mockImplementation(() => {});
|
||||
|
||||
await clearCachedGoogleAccount();
|
||||
await userAccountManager.clearCachedGoogleAccount();
|
||||
|
||||
expect(consoleLogSpy).toHaveBeenCalled();
|
||||
const stored = JSON.parse(fs.readFileSync(accountsFile(), 'utf-8'));
|
||||
|
@ -226,7 +226,7 @@ describe('user_account', () => {
|
|||
JSON.stringify({ active: null, old: ['old1@google.com'] }, null, 2),
|
||||
);
|
||||
|
||||
await clearCachedGoogleAccount();
|
||||
await userAccountManager.clearCachedGoogleAccount();
|
||||
|
||||
const stored = JSON.parse(fs.readFileSync(accountsFile(), 'utf-8'));
|
||||
expect(stored.active).toBeNull();
|
||||
|
@ -247,7 +247,7 @@ describe('user_account', () => {
|
|||
),
|
||||
);
|
||||
|
||||
await clearCachedGoogleAccount();
|
||||
await userAccountManager.clearCachedGoogleAccount();
|
||||
|
||||
const stored = JSON.parse(fs.readFileSync(accountsFile(), 'utf-8'));
|
||||
expect(stored.active).toBeNull();
|
||||
|
@ -257,24 +257,24 @@ describe('user_account', () => {
|
|||
|
||||
describe('getLifetimeGoogleAccounts', () => {
|
||||
it('should return 0 if the file does not exist', () => {
|
||||
expect(getLifetimeGoogleAccounts()).toBe(0);
|
||||
expect(userAccountManager.getLifetimeGoogleAccounts()).toBe(0);
|
||||
});
|
||||
|
||||
it('should return 0 if the file is empty', () => {
|
||||
fs.mkdirSync(path.dirname(accountsFile()), { recursive: true });
|
||||
fs.writeFileSync(accountsFile(), '');
|
||||
expect(getLifetimeGoogleAccounts()).toBe(0);
|
||||
expect(userAccountManager.getLifetimeGoogleAccounts()).toBe(0);
|
||||
});
|
||||
|
||||
it('should return 0 if the file is corrupted', () => {
|
||||
fs.mkdirSync(path.dirname(accountsFile()), { recursive: true });
|
||||
fs.writeFileSync(accountsFile(), 'invalid json');
|
||||
const consoleLogSpy = vi
|
||||
const consoleDebugSpy = vi
|
||||
.spyOn(console, 'log')
|
||||
.mockImplementation(() => {});
|
||||
|
||||
expect(getLifetimeGoogleAccounts()).toBe(0);
|
||||
expect(consoleLogSpy).toHaveBeenCalled();
|
||||
expect(userAccountManager.getLifetimeGoogleAccounts()).toBe(0);
|
||||
expect(consoleDebugSpy).toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should return 1 if there is only an active account', () => {
|
||||
|
@ -283,7 +283,7 @@ describe('user_account', () => {
|
|||
accountsFile(),
|
||||
JSON.stringify({ active: 'test1@google.com', old: [] }),
|
||||
);
|
||||
expect(getLifetimeGoogleAccounts()).toBe(1);
|
||||
expect(userAccountManager.getLifetimeGoogleAccounts()).toBe(1);
|
||||
});
|
||||
|
||||
it('should correctly count old accounts when active is null', () => {
|
||||
|
@ -295,7 +295,7 @@ describe('user_account', () => {
|
|||
old: ['test1@google.com', 'test2@google.com'],
|
||||
}),
|
||||
);
|
||||
expect(getLifetimeGoogleAccounts()).toBe(2);
|
||||
expect(userAccountManager.getLifetimeGoogleAccounts()).toBe(2);
|
||||
});
|
||||
|
||||
it('should correctly count both active and old accounts', () => {
|
||||
|
@ -307,7 +307,7 @@ describe('user_account', () => {
|
|||
old: ['test1@google.com', 'test2@google.com'],
|
||||
}),
|
||||
);
|
||||
expect(getLifetimeGoogleAccounts()).toBe(3);
|
||||
expect(userAccountManager.getLifetimeGoogleAccounts()).toBe(3);
|
||||
});
|
||||
|
||||
it('should handle valid JSON with incorrect schema by returning 0', () => {
|
||||
|
@ -320,7 +320,7 @@ describe('user_account', () => {
|
|||
.spyOn(console, 'log')
|
||||
.mockImplementation(() => {});
|
||||
|
||||
expect(getLifetimeGoogleAccounts()).toBe(0);
|
||||
expect(userAccountManager.getLifetimeGoogleAccounts()).toBe(0);
|
||||
expect(consoleLogSpy).toHaveBeenCalled();
|
||||
});
|
||||
|
||||
|
@ -333,7 +333,7 @@ describe('user_account', () => {
|
|||
old: ['test1@google.com', 'test2@google.com'],
|
||||
}),
|
||||
);
|
||||
expect(getLifetimeGoogleAccounts()).toBe(2);
|
||||
expect(userAccountManager.getLifetimeGoogleAccounts()).toBe(2);
|
||||
});
|
||||
});
|
||||
});
|
|
@ -0,0 +1,140 @@
|
|||
/**
|
||||
* @license
|
||||
* Copyright 2025 Google LLC
|
||||
* SPDX-License-Identifier: Apache-2.0
|
||||
*/
|
||||
|
||||
import path from 'node:path';
|
||||
import { promises as fsp, readFileSync } from 'node:fs';
|
||||
import { Storage } from '../config/storage.js';
|
||||
|
||||
interface UserAccounts {
|
||||
active: string | null;
|
||||
old: string[];
|
||||
}
|
||||
|
||||
export class UserAccountManager {
|
||||
private getGoogleAccountsCachePath(): string {
|
||||
return Storage.getGoogleAccountsPath();
|
||||
}
|
||||
|
||||
/**
|
||||
* Parses and validates the string content of an accounts file.
|
||||
* @param content The raw string content from the file.
|
||||
* @returns A valid UserAccounts object.
|
||||
*/
|
||||
private parseAndValidateAccounts(content: string): UserAccounts {
|
||||
const defaultState = { active: null, old: [] };
|
||||
if (!content.trim()) {
|
||||
return defaultState;
|
||||
}
|
||||
|
||||
const parsed = JSON.parse(content);
|
||||
|
||||
// Inlined validation logic
|
||||
if (typeof parsed !== 'object' || parsed === null) {
|
||||
console.log('Invalid accounts file schema, starting fresh.');
|
||||
return defaultState;
|
||||
}
|
||||
const { active, old } = parsed as Partial<UserAccounts>;
|
||||
const isValid =
|
||||
(active === undefined || active === null || typeof active === 'string') &&
|
||||
(old === undefined ||
|
||||
(Array.isArray(old) && old.every((i) => typeof i === 'string')));
|
||||
|
||||
if (!isValid) {
|
||||
console.log('Invalid accounts file schema, starting fresh.');
|
||||
return defaultState;
|
||||
}
|
||||
|
||||
return {
|
||||
active: parsed.active ?? null,
|
||||
old: parsed.old ?? [],
|
||||
};
|
||||
}
|
||||
|
||||
private readAccountsSync(filePath: string): UserAccounts {
|
||||
const defaultState = { active: null, old: [] };
|
||||
try {
|
||||
const content = readFileSync(filePath, 'utf-8');
|
||||
return this.parseAndValidateAccounts(content);
|
||||
} catch (error) {
|
||||
if (
|
||||
error instanceof Error &&
|
||||
'code' in error &&
|
||||
error.code === 'ENOENT'
|
||||
) {
|
||||
return defaultState;
|
||||
}
|
||||
console.log('Error during sync read of accounts, starting fresh.', error);
|
||||
return defaultState;
|
||||
}
|
||||
}
|
||||
|
||||
private async readAccounts(filePath: string): Promise<UserAccounts> {
|
||||
const defaultState = { active: null, old: [] };
|
||||
try {
|
||||
const content = await fsp.readFile(filePath, 'utf-8');
|
||||
return this.parseAndValidateAccounts(content);
|
||||
} catch (error) {
|
||||
if (
|
||||
error instanceof Error &&
|
||||
'code' in error &&
|
||||
error.code === 'ENOENT'
|
||||
) {
|
||||
return defaultState;
|
||||
}
|
||||
console.log('Could not parse accounts file, starting fresh.', error);
|
||||
return defaultState;
|
||||
}
|
||||
}
|
||||
|
||||
async cacheGoogleAccount(email: string): Promise<void> {
|
||||
const filePath = this.getGoogleAccountsCachePath();
|
||||
await fsp.mkdir(path.dirname(filePath), { recursive: true });
|
||||
|
||||
const accounts = await this.readAccounts(filePath);
|
||||
|
||||
if (accounts.active && accounts.active !== email) {
|
||||
if (!accounts.old.includes(accounts.active)) {
|
||||
accounts.old.push(accounts.active);
|
||||
}
|
||||
}
|
||||
|
||||
// If the new email was in the old list, remove it
|
||||
accounts.old = accounts.old.filter((oldEmail) => oldEmail !== email);
|
||||
|
||||
accounts.active = email;
|
||||
await fsp.writeFile(filePath, JSON.stringify(accounts, null, 2), 'utf-8');
|
||||
}
|
||||
|
||||
getCachedGoogleAccount(): string | null {
|
||||
const filePath = this.getGoogleAccountsCachePath();
|
||||
const accounts = this.readAccountsSync(filePath);
|
||||
return accounts.active;
|
||||
}
|
||||
|
||||
getLifetimeGoogleAccounts(): number {
|
||||
const filePath = this.getGoogleAccountsCachePath();
|
||||
const accounts = this.readAccountsSync(filePath);
|
||||
const allAccounts = new Set(accounts.old);
|
||||
if (accounts.active) {
|
||||
allAccounts.add(accounts.active);
|
||||
}
|
||||
return allAccounts.size;
|
||||
}
|
||||
|
||||
async clearCachedGoogleAccount(): Promise<void> {
|
||||
const filePath = this.getGoogleAccountsCachePath();
|
||||
const accounts = await this.readAccounts(filePath);
|
||||
|
||||
if (accounts.active) {
|
||||
if (!accounts.old.includes(accounts.active)) {
|
||||
accounts.old.push(accounts.active);
|
||||
}
|
||||
accounts.active = null;
|
||||
}
|
||||
|
||||
await fsp.writeFile(filePath, JSON.stringify(accounts, null, 2), 'utf-8');
|
||||
}
|
||||
}
|
|
@ -1,131 +0,0 @@
|
|||
/**
|
||||
* @license
|
||||
* Copyright 2025 Google LLC
|
||||
* SPDX-License-Identifier: Apache-2.0
|
||||
*/
|
||||
|
||||
import path from 'node:path';
|
||||
import { promises as fsp, readFileSync } from 'node:fs';
|
||||
import * as os from 'os';
|
||||
import { GEMINI_DIR, GOOGLE_ACCOUNTS_FILENAME } from './paths.js';
|
||||
|
||||
interface UserAccounts {
|
||||
active: string | null;
|
||||
old: string[];
|
||||
}
|
||||
|
||||
function getGoogleAccountsCachePath(): string {
|
||||
return path.join(os.homedir(), GEMINI_DIR, GOOGLE_ACCOUNTS_FILENAME);
|
||||
}
|
||||
|
||||
/**
|
||||
* Parses and validates the string content of an accounts file.
|
||||
* @param content The raw string content from the file.
|
||||
* @returns A valid UserAccounts object.
|
||||
*/
|
||||
function parseAndValidateAccounts(content: string): UserAccounts {
|
||||
const defaultState = { active: null, old: [] };
|
||||
if (!content.trim()) {
|
||||
return defaultState;
|
||||
}
|
||||
|
||||
const parsed = JSON.parse(content);
|
||||
|
||||
// Inlined validation logic
|
||||
if (typeof parsed !== 'object' || parsed === null) {
|
||||
console.log('Invalid accounts file schema, starting fresh.');
|
||||
return defaultState;
|
||||
}
|
||||
const { active, old } = parsed as Partial<UserAccounts>;
|
||||
const isValid =
|
||||
(active === undefined || active === null || typeof active === 'string') &&
|
||||
(old === undefined ||
|
||||
(Array.isArray(old) && old.every((i) => typeof i === 'string')));
|
||||
|
||||
if (!isValid) {
|
||||
console.log('Invalid accounts file schema, starting fresh.');
|
||||
return defaultState;
|
||||
}
|
||||
|
||||
return {
|
||||
active: parsed.active ?? null,
|
||||
old: parsed.old ?? [],
|
||||
};
|
||||
}
|
||||
|
||||
function readAccountsSync(filePath: string): UserAccounts {
|
||||
const defaultState = { active: null, old: [] };
|
||||
try {
|
||||
const content = readFileSync(filePath, 'utf-8');
|
||||
return parseAndValidateAccounts(content);
|
||||
} catch (error) {
|
||||
if (error instanceof Error && 'code' in error && error.code === 'ENOENT') {
|
||||
return defaultState;
|
||||
}
|
||||
console.log('Error during sync read of accounts, starting fresh.', error);
|
||||
return defaultState;
|
||||
}
|
||||
}
|
||||
|
||||
async function readAccounts(filePath: string): Promise<UserAccounts> {
|
||||
const defaultState = { active: null, old: [] };
|
||||
try {
|
||||
const content = await fsp.readFile(filePath, 'utf-8');
|
||||
return parseAndValidateAccounts(content);
|
||||
} catch (error) {
|
||||
if (error instanceof Error && 'code' in error && error.code === 'ENOENT') {
|
||||
return defaultState;
|
||||
}
|
||||
console.log('Could not parse accounts file, starting fresh.', error);
|
||||
return defaultState;
|
||||
}
|
||||
}
|
||||
|
||||
export async function cacheGoogleAccount(email: string): Promise<void> {
|
||||
const filePath = getGoogleAccountsCachePath();
|
||||
await fsp.mkdir(path.dirname(filePath), { recursive: true });
|
||||
|
||||
const accounts = await readAccounts(filePath);
|
||||
|
||||
if (accounts.active && accounts.active !== email) {
|
||||
if (!accounts.old.includes(accounts.active)) {
|
||||
accounts.old.push(accounts.active);
|
||||
}
|
||||
}
|
||||
|
||||
// If the new email was in the old list, remove it
|
||||
accounts.old = accounts.old.filter((oldEmail) => oldEmail !== email);
|
||||
|
||||
accounts.active = email;
|
||||
await fsp.writeFile(filePath, JSON.stringify(accounts, null, 2), 'utf-8');
|
||||
}
|
||||
|
||||
export function getCachedGoogleAccount(): string | null {
|
||||
const filePath = getGoogleAccountsCachePath();
|
||||
const accounts = readAccountsSync(filePath);
|
||||
return accounts.active;
|
||||
}
|
||||
|
||||
export function getLifetimeGoogleAccounts(): number {
|
||||
const filePath = getGoogleAccountsCachePath();
|
||||
const accounts = readAccountsSync(filePath);
|
||||
const allAccounts = new Set(accounts.old);
|
||||
if (accounts.active) {
|
||||
allAccounts.add(accounts.active);
|
||||
}
|
||||
return allAccounts.size;
|
||||
}
|
||||
|
||||
export async function clearCachedGoogleAccount(): Promise<void> {
|
||||
const filePath = getGoogleAccountsCachePath();
|
||||
const accounts = await readAccounts(filePath);
|
||||
|
||||
if (accounts.active) {
|
||||
if (!accounts.old.includes(accounts.active)) {
|
||||
accounts.old.push(accounts.active);
|
||||
}
|
||||
accounts.active = null;
|
||||
}
|
||||
|
||||
await fsp.writeFile(filePath, JSON.stringify(accounts, null, 2), 'utf-8');
|
||||
}
|
|
@ -1,24 +0,0 @@
|
|||
/**
|
||||
* @license
|
||||
* Copyright 2025 Google LLC
|
||||
* SPDX-License-Identifier: Apache-2.0
|
||||
*/
|
||||
|
||||
import { describe, it, expect } from 'vitest';
|
||||
import { getInstallationId } from './user_id.js';
|
||||
|
||||
describe('user_id', () => {
|
||||
describe('getInstallationId', () => {
|
||||
it('should return a valid UUID format string', () => {
|
||||
const installationId = getInstallationId();
|
||||
|
||||
expect(installationId).toBeDefined();
|
||||
expect(typeof installationId).toBe('string');
|
||||
expect(installationId.length).toBeGreaterThan(0);
|
||||
|
||||
// Should return the same ID on subsequent calls (consistent)
|
||||
const secondCall = getInstallationId();
|
||||
expect(secondCall).toBe(installationId);
|
||||
});
|
||||
});
|
||||
});
|
|
@ -1,58 +0,0 @@
|
|||
/**
|
||||
* @license
|
||||
* Copyright 2025 Google LLC
|
||||
* SPDX-License-Identifier: Apache-2.0
|
||||
*/
|
||||
|
||||
import * as os from 'os';
|
||||
import * as fs from 'fs';
|
||||
import * as path from 'path';
|
||||
import { randomUUID } from 'crypto';
|
||||
import { GEMINI_DIR } from './paths.js';
|
||||
|
||||
const homeDir = os.homedir() ?? '';
|
||||
const geminiDir = path.join(homeDir, GEMINI_DIR);
|
||||
const installationIdFile = path.join(geminiDir, 'installation_id');
|
||||
|
||||
function ensureGeminiDirExists() {
|
||||
if (!fs.existsSync(geminiDir)) {
|
||||
fs.mkdirSync(geminiDir, { recursive: true });
|
||||
}
|
||||
}
|
||||
|
||||
function readInstallationIdFromFile(): string | null {
|
||||
if (fs.existsSync(installationIdFile)) {
|
||||
const installationid = fs.readFileSync(installationIdFile, 'utf-8').trim();
|
||||
return installationid || null;
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
function writeInstallationIdToFile(installationId: string) {
|
||||
fs.writeFileSync(installationIdFile, installationId, 'utf-8');
|
||||
}
|
||||
|
||||
/**
|
||||
* Retrieves the installation ID from a file, creating it if it doesn't exist.
|
||||
* This ID is used for unique user installation tracking.
|
||||
* @returns A UUID string for the user.
|
||||
*/
|
||||
export function getInstallationId(): string {
|
||||
try {
|
||||
ensureGeminiDirExists();
|
||||
let installationId = readInstallationIdFromFile();
|
||||
|
||||
if (!installationId) {
|
||||
installationId = randomUUID();
|
||||
writeInstallationIdToFile(installationId);
|
||||
}
|
||||
|
||||
return installationId;
|
||||
} catch (error) {
|
||||
console.error(
|
||||
'Error accessing installation ID file, generating ephemeral ID:',
|
||||
error,
|
||||
);
|
||||
return '123456789';
|
||||
}
|
||||
}
|
Loading…
Reference in New Issue