feat: Replace SQLite with JSON logging for macOS sandbox compatibility
- Removes the sqlite3 dependency and refactors the logging mechanism to use a JSON file (logs.json) instead of a database. - This change is a temporary workaround to address issues with macOS sandboxing that were caused by the SQLite native module. - Storing all logs in a single JSON file may introduce scalability concerns in the future. Fixes https://github.com/google-gemini/gemini-cli/issues/522
This commit is contained in:
parent
9e1cfca53f
commit
b3f52e215a
File diff suppressed because it is too large
Load Diff
|
@ -24,7 +24,7 @@
|
||||||
"auth:docker": "gcloud auth configure-docker us-west1-docker.pkg.dev",
|
"auth:docker": "gcloud auth configure-docker us-west1-docker.pkg.dev",
|
||||||
"auth": "npm run auth:npm && npm run auth:docker",
|
"auth": "npm run auth:npm && npm run auth:docker",
|
||||||
"prerelease:dev": "npm run prerelease:version --workspaces && npm run prerelease:deps --workspaces",
|
"prerelease:dev": "npm run prerelease:version --workspaces && npm run prerelease:deps --workspaces",
|
||||||
"bundle": "node_modules/.bin/esbuild packages/cli/index.ts --bundle --outfile=bundle/gemini.js --platform=node --format=esm --external:sqlite3 --banner:js=\"import { createRequire } from 'module'; const require = createRequire(import.meta.url); globalThis.__filename = require('url').fileURLToPath(import.meta.url); globalThis.__dirname = require('path').dirname(globalThis.__filename);\" && bash scripts/copy_bundle_assets.sh"
|
"bundle": "node_modules/.bin/esbuild packages/cli/index.ts --bundle --outfile=bundle/gemini.js --platform=node --format=esm --banner:js=\"import { createRequire } from 'module'; const require = createRequire(import.meta.url); globalThis.__filename = require('url').fileURLToPath(import.meta.url); globalThis.__dirname = require('path').dirname(globalThis.__filename);\" && bash scripts/copy_bundle_assets.sh"
|
||||||
},
|
},
|
||||||
"bin": {
|
"bin": {
|
||||||
"gemini": "bundle/gemini.js"
|
"gemini": "bundle/gemini.js"
|
||||||
|
@ -34,9 +34,7 @@
|
||||||
"README.md",
|
"README.md",
|
||||||
"LICENSE"
|
"LICENSE"
|
||||||
],
|
],
|
||||||
"dependencies": {
|
"dependencies": {},
|
||||||
"sqlite3": "^5.1.7"
|
|
||||||
},
|
|
||||||
"devDependencies": {
|
"devDependencies": {
|
||||||
"@types/mime-types": "^2.1.4",
|
"@types/mime-types": "^2.1.4",
|
||||||
"@vitest/coverage-v8": "^3.1.1",
|
"@vitest/coverage-v8": "^3.1.1",
|
||||||
|
|
|
@ -27,8 +27,7 @@
|
||||||
"diff": "^7.0.0",
|
"diff": "^7.0.0",
|
||||||
"dotenv": "^16.4.7",
|
"dotenv": "^16.4.7",
|
||||||
"fast-glob": "^3.3.3",
|
"fast-glob": "^3.3.3",
|
||||||
"shell-quote": "^1.8.2",
|
"shell-quote": "^1.8.2"
|
||||||
"sqlite3": "^5.1.7"
|
|
||||||
},
|
},
|
||||||
"devDependencies": {
|
"devDependencies": {
|
||||||
"@types/diff": "^7.0.2",
|
"@types/diff": "^7.0.2",
|
||||||
|
|
|
@ -4,194 +4,429 @@
|
||||||
* SPDX-License-Identifier: Apache-2.0
|
* SPDX-License-Identifier: Apache-2.0
|
||||||
*/
|
*/
|
||||||
|
|
||||||
import { describe, it, expect, vi, beforeEach, afterEach } from 'vitest';
|
import {
|
||||||
import { Logger, MessageSenderType } from './logger.js';
|
describe,
|
||||||
|
it,
|
||||||
|
expect,
|
||||||
|
vi,
|
||||||
|
beforeEach,
|
||||||
|
afterEach,
|
||||||
|
afterAll,
|
||||||
|
} from 'vitest';
|
||||||
|
import { Logger, MessageSenderType, LogEntry } from './logger.js';
|
||||||
|
import { promises as fs } from 'node:fs';
|
||||||
|
import path from 'node:path';
|
||||||
|
|
||||||
// Mocks
|
const GEMINI_DIR = '.gemini';
|
||||||
const mockDb = {
|
const LOG_FILE_NAME = 'logs.json';
|
||||||
exec: vi.fn((_sql, callback) => callback?.(null)),
|
const TEST_LOG_FILE_PATH = path.join(process.cwd(), GEMINI_DIR, LOG_FILE_NAME);
|
||||||
all: vi.fn((_sql, _params, callback) => callback?.(null, [])),
|
|
||||||
run: vi.fn((_sql, _params, callback) => callback?.(null)),
|
|
||||||
close: vi.fn((callback) => callback?.(null)),
|
|
||||||
};
|
|
||||||
|
|
||||||
vi.mock('sqlite3', () => ({
|
async function cleanupLogFile() {
|
||||||
Database: vi.fn((_dbPath, _options, callback) => {
|
try {
|
||||||
process.nextTick(() => callback?.(null));
|
await fs.unlink(TEST_LOG_FILE_PATH);
|
||||||
return mockDb;
|
} catch (error) {
|
||||||
}),
|
if ((error as NodeJS.ErrnoException).code !== 'ENOENT') {
|
||||||
default: {
|
// Other errors during unlink are ignored for cleanup purposes
|
||||||
Database: vi.fn((_dbPath, _options, callback) => {
|
}
|
||||||
process.nextTick(() => callback?.(null));
|
}
|
||||||
return mockDb;
|
try {
|
||||||
}),
|
const geminiDirPath = path.join(process.cwd(), GEMINI_DIR);
|
||||||
},
|
const dirContents = await fs.readdir(geminiDirPath);
|
||||||
}));
|
for (const file of dirContents) {
|
||||||
|
if (file.startsWith(LOG_FILE_NAME + '.') && file.endsWith('.bak')) {
|
||||||
|
try {
|
||||||
|
await fs.unlink(path.join(geminiDirPath, file));
|
||||||
|
} catch (_e) {
|
||||||
|
/* ignore */
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} catch (error) {
|
||||||
|
if ((error as NodeJS.ErrnoException).code !== 'ENOENT') {
|
||||||
|
/* ignore if .gemini dir itself is missing */
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
async function readLogFile(): Promise<LogEntry[]> {
|
||||||
|
try {
|
||||||
|
const content = await fs.readFile(TEST_LOG_FILE_PATH, 'utf-8');
|
||||||
|
return JSON.parse(content) as LogEntry[];
|
||||||
|
} catch (error) {
|
||||||
|
if ((error as NodeJS.ErrnoException).code === 'ENOENT') {
|
||||||
|
return [];
|
||||||
|
}
|
||||||
|
throw error;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
describe('Logger', () => {
|
describe('Logger', () => {
|
||||||
let logger: Logger;
|
let logger: Logger;
|
||||||
|
|
||||||
beforeEach(async () => {
|
beforeEach(async () => {
|
||||||
vi.resetAllMocks();
|
vi.useFakeTimers();
|
||||||
|
vi.setSystemTime(new Date('2025-01-01T12:00:00.000Z'));
|
||||||
// Get a new instance for each test to ensure isolation,
|
await cleanupLogFile();
|
||||||
logger = new Logger();
|
logger = new Logger();
|
||||||
// We need to wait for the async initialize to complete
|
// Initialize is usually called here, but some tests initialize their own instances.
|
||||||
await logger.initialize().catch((err) => {
|
// For tests that use the global `logger`, it will be initialized here.
|
||||||
console.error('Error initializing logger:', err);
|
await logger.initialize();
|
||||||
});
|
|
||||||
});
|
});
|
||||||
|
|
||||||
afterEach(() => {
|
afterEach(async () => {
|
||||||
vi.restoreAllMocks();
|
logger.close();
|
||||||
logger.close(); // Close the database connection after each test
|
await cleanupLogFile();
|
||||||
|
vi.useRealTimers();
|
||||||
|
vi.resetAllMocks(); // Ensure mocks are reset for every test
|
||||||
|
});
|
||||||
|
|
||||||
|
afterAll(async () => {
|
||||||
|
await cleanupLogFile();
|
||||||
});
|
});
|
||||||
|
|
||||||
describe('initialize', () => {
|
describe('initialize', () => {
|
||||||
it('should execute create tables if not exists', async () => {
|
it('should create .gemini directory and an empty log file if none exist', async () => {
|
||||||
expect(mockDb.exec).toHaveBeenCalledWith(
|
await cleanupLogFile();
|
||||||
expect.stringMatching(/CREATE TABLE IF NOT EXISTS messages/),
|
const geminiDirPath = path.join(process.cwd(), GEMINI_DIR);
|
||||||
expect.any(Function),
|
try {
|
||||||
);
|
await fs.rm(geminiDirPath, { recursive: true, force: true });
|
||||||
|
} catch (_e) {
|
||||||
|
/* ignore */
|
||||||
|
}
|
||||||
|
|
||||||
|
const newLogger = new Logger();
|
||||||
|
await newLogger.initialize();
|
||||||
|
|
||||||
|
const dirExists = await fs
|
||||||
|
.access(geminiDirPath)
|
||||||
|
.then(() => true)
|
||||||
|
.catch(() => false);
|
||||||
|
expect(dirExists).toBe(true);
|
||||||
|
const fileExists = await fs
|
||||||
|
.access(TEST_LOG_FILE_PATH)
|
||||||
|
.then(() => true)
|
||||||
|
.catch(() => false);
|
||||||
|
expect(fileExists).toBe(true);
|
||||||
|
const logContent = await readLogFile();
|
||||||
|
expect(logContent).toEqual([]);
|
||||||
|
newLogger.close();
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should load existing logs and set correct messageId for the current session', async () => {
|
||||||
|
const fixedTime = new Date('2025-01-01T10:00:00.000Z');
|
||||||
|
vi.setSystemTime(fixedTime);
|
||||||
|
const currentSessionId = Math.floor(fixedTime.getTime() / 1000);
|
||||||
|
const existingLogs: LogEntry[] = [
|
||||||
|
{
|
||||||
|
sessionId: currentSessionId,
|
||||||
|
messageId: 0,
|
||||||
|
timestamp: new Date('2025-01-01T10:00:05.000Z').toISOString(),
|
||||||
|
type: MessageSenderType.USER,
|
||||||
|
message: 'Msg1',
|
||||||
|
},
|
||||||
|
{
|
||||||
|
sessionId: currentSessionId - 100,
|
||||||
|
messageId: 5,
|
||||||
|
timestamp: new Date('2025-01-01T09:00:00.000Z').toISOString(),
|
||||||
|
type: MessageSenderType.USER,
|
||||||
|
message: 'OldMsg',
|
||||||
|
},
|
||||||
|
{
|
||||||
|
sessionId: currentSessionId,
|
||||||
|
messageId: 1,
|
||||||
|
timestamp: new Date('2025-01-01T10:00:10.000Z').toISOString(),
|
||||||
|
type: MessageSenderType.USER,
|
||||||
|
message: 'Msg2',
|
||||||
|
},
|
||||||
|
];
|
||||||
|
await fs.mkdir(path.join(process.cwd(), GEMINI_DIR), { recursive: true });
|
||||||
|
await fs.writeFile(TEST_LOG_FILE_PATH, JSON.stringify(existingLogs));
|
||||||
|
const newLogger = new Logger();
|
||||||
|
await newLogger.initialize();
|
||||||
|
expect(newLogger['messageId']).toBe(2);
|
||||||
|
expect(newLogger['logs']).toEqual(existingLogs);
|
||||||
|
newLogger.close();
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should set messageId to 0 for a new session if log file exists but has no logs for current session', async () => {
|
||||||
|
const fixedTime = new Date('2025-01-01T14:00:00.000Z');
|
||||||
|
vi.setSystemTime(fixedTime);
|
||||||
|
const existingLogs: LogEntry[] = [
|
||||||
|
{
|
||||||
|
sessionId: Math.floor(fixedTime.getTime() / 1000) - 500,
|
||||||
|
messageId: 5,
|
||||||
|
timestamp: new Date().toISOString(),
|
||||||
|
type: MessageSenderType.USER,
|
||||||
|
message: 'OldMsg',
|
||||||
|
},
|
||||||
|
];
|
||||||
|
await fs.mkdir(path.join(process.cwd(), GEMINI_DIR), { recursive: true });
|
||||||
|
await fs.writeFile(TEST_LOG_FILE_PATH, JSON.stringify(existingLogs));
|
||||||
|
const newLogger = new Logger();
|
||||||
|
await newLogger.initialize();
|
||||||
|
expect(newLogger['messageId']).toBe(0);
|
||||||
|
newLogger.close();
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should be idempotent', async () => {
|
it('should be idempotent', async () => {
|
||||||
mockDb.exec.mockClear();
|
// logger is initialized in beforeEach
|
||||||
|
await logger.logMessage(MessageSenderType.USER, 'test message');
|
||||||
|
const initialMessageId = logger['messageId'];
|
||||||
|
const initialLogCount = logger['logs'].length;
|
||||||
|
await logger.initialize(); // Second call should not change state
|
||||||
|
expect(logger['messageId']).toBe(initialMessageId);
|
||||||
|
expect(logger['logs'].length).toBe(initialLogCount);
|
||||||
|
const logsFromFile = await readLogFile();
|
||||||
|
expect(logsFromFile.length).toBe(1);
|
||||||
|
});
|
||||||
|
|
||||||
await logger.initialize(); // Second call
|
it('should handle invalid JSON in log file by backing it up and starting fresh', async () => {
|
||||||
|
await fs.mkdir(path.join(process.cwd(), GEMINI_DIR), { recursive: true });
|
||||||
|
await fs.writeFile(TEST_LOG_FILE_PATH, 'invalid json');
|
||||||
|
const consoleDebugSpy = vi
|
||||||
|
.spyOn(console, 'debug')
|
||||||
|
.mockImplementation(() => {});
|
||||||
|
const newLogger = new Logger();
|
||||||
|
await newLogger.initialize();
|
||||||
|
expect(consoleDebugSpy).toHaveBeenCalledWith(
|
||||||
|
expect.stringContaining('Invalid JSON in log file'),
|
||||||
|
expect.any(SyntaxError),
|
||||||
|
);
|
||||||
|
const logContent = await readLogFile();
|
||||||
|
expect(logContent).toEqual([]);
|
||||||
|
const dirContents = await fs.readdir(
|
||||||
|
path.join(process.cwd(), GEMINI_DIR),
|
||||||
|
);
|
||||||
|
expect(
|
||||||
|
dirContents.some(
|
||||||
|
(f) =>
|
||||||
|
f.startsWith(LOG_FILE_NAME + '.invalid_json') && f.endsWith('.bak'),
|
||||||
|
),
|
||||||
|
).toBe(true);
|
||||||
|
consoleDebugSpy.mockRestore();
|
||||||
|
newLogger.close();
|
||||||
|
});
|
||||||
|
|
||||||
expect(mockDb.exec).not.toHaveBeenCalled();
|
it('should handle non-array JSON in log file by backing it up and starting fresh', async () => {
|
||||||
|
await fs.mkdir(path.join(process.cwd(), GEMINI_DIR), { recursive: true });
|
||||||
|
await fs.writeFile(
|
||||||
|
TEST_LOG_FILE_PATH,
|
||||||
|
JSON.stringify({ not: 'an array' }),
|
||||||
|
);
|
||||||
|
const consoleDebugSpy = vi
|
||||||
|
.spyOn(console, 'debug')
|
||||||
|
.mockImplementation(() => {});
|
||||||
|
const newLogger = new Logger();
|
||||||
|
await newLogger.initialize();
|
||||||
|
expect(consoleDebugSpy).toHaveBeenCalledWith(
|
||||||
|
`Log file at ${TEST_LOG_FILE_PATH} is not a valid JSON array. Starting with empty logs.`,
|
||||||
|
);
|
||||||
|
const logContent = await readLogFile();
|
||||||
|
expect(logContent).toEqual([]);
|
||||||
|
const dirContents = await fs.readdir(
|
||||||
|
path.join(process.cwd(), GEMINI_DIR),
|
||||||
|
);
|
||||||
|
expect(
|
||||||
|
dirContents.some(
|
||||||
|
(f) =>
|
||||||
|
f.startsWith(LOG_FILE_NAME + '.malformed_array') &&
|
||||||
|
f.endsWith('.bak'),
|
||||||
|
),
|
||||||
|
).toBe(true);
|
||||||
|
consoleDebugSpy.mockRestore();
|
||||||
|
newLogger.close();
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
describe('logMessage', () => {
|
describe('logMessage', () => {
|
||||||
it('should insert a message into the database', async () => {
|
it('should append a message to the log file and update in-memory logs', async () => {
|
||||||
const type = MessageSenderType.USER;
|
await logger.logMessage(MessageSenderType.USER, 'Hello, world!');
|
||||||
const message = 'Hello, world!';
|
const logsFromFile = await readLogFile();
|
||||||
await logger.logMessage(type, message);
|
expect(logsFromFile.length).toBe(1);
|
||||||
expect(mockDb.run).toHaveBeenCalledWith(
|
expect(logsFromFile[0]).toMatchObject({
|
||||||
"INSERT INTO messages (session_id, message_id, type, message, timestamp) VALUES (?, ?, ?, ?, datetime('now'))",
|
sessionId: logger['sessionId'],
|
||||||
[expect.any(Number), 0, type, message], // sessionId, messageId, type, message
|
messageId: 0,
|
||||||
expect.any(Function),
|
type: MessageSenderType.USER,
|
||||||
);
|
message: 'Hello, world!',
|
||||||
|
timestamp: new Date('2025-01-01T12:00:00.000Z').toISOString(),
|
||||||
|
});
|
||||||
|
expect(logger['logs'].length).toBe(1);
|
||||||
|
expect(logger['logs'][0]).toEqual(logsFromFile[0]);
|
||||||
|
expect(logger['messageId']).toBe(1);
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should increment messageId for subsequent messages', async () => {
|
it('should correctly increment messageId for subsequent messages in the same session', async () => {
|
||||||
await logger.logMessage(MessageSenderType.USER, 'First message');
|
await logger.logMessage(MessageSenderType.USER, 'First');
|
||||||
expect(mockDb.run).toHaveBeenCalledWith(
|
vi.advanceTimersByTime(1000);
|
||||||
expect.any(String),
|
await logger.logMessage(MessageSenderType.USER, 'Second');
|
||||||
[expect.any(Number), 0, MessageSenderType.USER, 'First message'],
|
const logs = await readLogFile();
|
||||||
expect.any(Function),
|
expect(logs.length).toBe(2);
|
||||||
);
|
expect(logs[0].messageId).toBe(0);
|
||||||
await logger.logMessage(MessageSenderType.USER, 'Second message');
|
expect(logs[1].messageId).toBe(1);
|
||||||
expect(mockDb.run).toHaveBeenCalledWith(
|
expect(logs[1].timestamp).not.toBe(logs[0].timestamp);
|
||||||
expect.any(String),
|
expect(logger['messageId']).toBe(2);
|
||||||
[expect.any(Number), 1, MessageSenderType.USER, 'Second message'], // messageId is now 1
|
|
||||||
expect.any(Function),
|
|
||||||
);
|
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should handle database not initialized', async () => {
|
it('should handle logger not initialized', async () => {
|
||||||
const uninitializedLogger = new Logger();
|
const uninitializedLogger = new Logger();
|
||||||
// uninitializedLogger.initialize() is not called
|
const consoleDebugSpy = vi
|
||||||
const consoleErrorSpy = vi
|
.spyOn(console, 'debug')
|
||||||
.spyOn(console, 'error')
|
|
||||||
.mockImplementation(() => {});
|
.mockImplementation(() => {});
|
||||||
|
|
||||||
await uninitializedLogger.logMessage(MessageSenderType.USER, 'test');
|
await uninitializedLogger.logMessage(MessageSenderType.USER, 'test');
|
||||||
|
expect(consoleDebugSpy).toHaveBeenCalledWith(
|
||||||
expect(consoleErrorSpy).toHaveBeenCalledWith('Database not initialized.');
|
'Logger not initialized or session ID missing. Cannot log message.',
|
||||||
expect(mockDb.run).not.toHaveBeenCalled();
|
);
|
||||||
consoleErrorSpy.mockRestore();
|
expect((await readLogFile()).length).toBe(0);
|
||||||
|
consoleDebugSpy.mockRestore();
|
||||||
|
uninitializedLogger.close();
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should handle error during db.run', async () => {
|
it('should simulate concurrent writes from different logger instances to the same file', async () => {
|
||||||
const error = new Error('db.run failed');
|
const logger1 = new Logger(); // logger1
|
||||||
mockDb.run.mockImplementationOnce(
|
vi.setSystemTime(new Date('2025-01-01T13:00:00.000Z'));
|
||||||
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
await logger1.initialize();
|
||||||
(_sql: any, _params: any, callback: any) => callback?.(error),
|
const s1 = logger1['sessionId'];
|
||||||
);
|
|
||||||
|
|
||||||
await expect(
|
const logger2 = new Logger(); // logger2, will share same session if time is same
|
||||||
logger.logMessage(MessageSenderType.USER, 'test'),
|
vi.setSystemTime(new Date('2025-01-01T13:00:00.000Z'));
|
||||||
).rejects.toThrow('db.run failed');
|
await logger2.initialize();
|
||||||
|
expect(logger2['sessionId']).toEqual(s1);
|
||||||
|
|
||||||
|
// Log from logger1
|
||||||
|
await logger1.logMessage(MessageSenderType.USER, 'L1M1'); // L1 internal msgId becomes 1, writes {s1, 0}
|
||||||
|
vi.advanceTimersByTime(10);
|
||||||
|
|
||||||
|
// Log from logger2. It reads file (sees {s1,0}), its internal msgId for s1 is 1.
|
||||||
|
await logger2.logMessage(MessageSenderType.USER, 'L2M1'); // L2 internal msgId becomes 2, writes {s1, 1}
|
||||||
|
vi.advanceTimersByTime(10);
|
||||||
|
|
||||||
|
// Log from logger1. It reads file (sees {s1,0}, {s1,1}), its internal msgId for s1 is 2.
|
||||||
|
await logger1.logMessage(MessageSenderType.USER, 'L1M2'); // L1 internal msgId becomes 3, writes {s1, 2}
|
||||||
|
vi.advanceTimersByTime(10);
|
||||||
|
|
||||||
|
// Log from logger2. It reads file (sees {s1,0}, {s1,1}, {s1,2}), its internal msgId for s1 is 3.
|
||||||
|
await logger2.logMessage(MessageSenderType.USER, 'L2M2'); // L2 internal msgId becomes 4, writes {s1, 3}
|
||||||
|
|
||||||
|
const logsFromFile = await readLogFile();
|
||||||
|
expect(logsFromFile.length).toBe(4);
|
||||||
|
const messageIdsInFile = logsFromFile
|
||||||
|
.map((log) => log.messageId)
|
||||||
|
.sort((a, b) => a - b);
|
||||||
|
expect(messageIdsInFile).toEqual([0, 1, 2, 3]);
|
||||||
|
|
||||||
|
const messagesInFile = logsFromFile
|
||||||
|
.sort((a, b) => a.messageId - b.messageId)
|
||||||
|
.map((l) => l.message);
|
||||||
|
expect(messagesInFile).toEqual(['L1M1', 'L2M1', 'L1M2', 'L2M2']);
|
||||||
|
|
||||||
|
// Check internal state (next messageId each logger would use for that session)
|
||||||
|
expect(logger1['messageId']).toBe(3); // L1 wrote 0, then 2. Next is 3.
|
||||||
|
expect(logger2['messageId']).toBe(4); // L2 wrote 1, then 3. Next is 4.
|
||||||
|
|
||||||
|
logger1.close();
|
||||||
|
logger2.close();
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should not throw, not increment messageId, and log error if writing to file fails', async () => {
|
||||||
|
const writeFileSpy = vi
|
||||||
|
.spyOn(fs, 'writeFile')
|
||||||
|
.mockRejectedValueOnce(new Error('Disk full'));
|
||||||
|
const consoleDebugSpy = vi
|
||||||
|
.spyOn(console, 'debug')
|
||||||
|
.mockImplementation(() => {});
|
||||||
|
const initialMessageId = logger['messageId'];
|
||||||
|
const initialLogCount = logger['logs'].length;
|
||||||
|
|
||||||
|
await logger.logMessage(MessageSenderType.USER, 'test fail write');
|
||||||
|
|
||||||
|
expect(consoleDebugSpy).toHaveBeenCalledWith(
|
||||||
|
'Error writing to log file:',
|
||||||
|
expect.any(Error),
|
||||||
|
);
|
||||||
|
expect(logger['messageId']).toBe(initialMessageId); // Not incremented
|
||||||
|
expect(logger['logs'].length).toBe(initialLogCount); // Log not added to in-memory cache
|
||||||
|
|
||||||
|
writeFileSpy.mockRestore();
|
||||||
|
consoleDebugSpy.mockRestore();
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
describe('getPreviousUserMessages', () => {
|
describe('getPreviousUserMessages', () => {
|
||||||
it('should query the database for messages', async () => {
|
it('should retrieve user messages, sorted newest first by session, then timestamp, then messageId', async () => {
|
||||||
mockDb.all.mockImplementationOnce(
|
const loggerSort = new Logger();
|
||||||
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
vi.setSystemTime(new Date('2025-01-01T10:00:00.000Z'));
|
||||||
(_sql: any, params: any, callback: any) =>
|
await loggerSort.initialize();
|
||||||
callback?.(null, [{ message: 'msg1' }, { message: 'msg2' }]),
|
const s1 = loggerSort['sessionId']!;
|
||||||
);
|
await loggerSort.logMessage(MessageSenderType.USER, 'S1M0_ts100000'); // msgId 0
|
||||||
|
vi.advanceTimersByTime(10);
|
||||||
|
await loggerSort.logMessage(MessageSenderType.USER, 'S1M1_ts100010'); // msgId 1
|
||||||
|
loggerSort.close(); // Close to ensure next initialize starts a new session if time changed
|
||||||
|
|
||||||
|
vi.setSystemTime(new Date('2025-01-01T11:00:00.000Z'));
|
||||||
|
await loggerSort.initialize(); // Re-initialize for a new session
|
||||||
|
const s2 = loggerSort['sessionId']!;
|
||||||
|
expect(s2).not.toEqual(s1);
|
||||||
|
await loggerSort.logMessage(MessageSenderType.USER, 'S2M0_ts110000'); // msgId 0 for s2
|
||||||
|
vi.advanceTimersByTime(10);
|
||||||
|
await loggerSort.logMessage(
|
||||||
|
'model' as MessageSenderType,
|
||||||
|
'S2_Model_ts110010',
|
||||||
|
);
|
||||||
|
vi.advanceTimersByTime(10);
|
||||||
|
await loggerSort.logMessage(MessageSenderType.USER, 'S2M1_ts110020'); // msgId 1 for s2
|
||||||
|
loggerSort.close();
|
||||||
|
|
||||||
|
// To test the sorting thoroughly, especially the session part, we'll read the file
|
||||||
|
// as if it was written by multiple sessions and then initialize a new logger to load them.
|
||||||
|
const combinedLogs = await readLogFile();
|
||||||
|
const finalLogger = new Logger();
|
||||||
|
// Manually set its internal logs to simulate loading from a file with mixed sessions
|
||||||
|
finalLogger['logs'] = combinedLogs;
|
||||||
|
finalLogger['initialized'] = true; // Mark as initialized to allow getPreviousUserMessages to run
|
||||||
|
|
||||||
|
const messages = await finalLogger.getPreviousUserMessages();
|
||||||
|
expect(messages).toEqual([
|
||||||
|
'S2M1_ts110020',
|
||||||
|
'S2M0_ts110000',
|
||||||
|
'S1M1_ts100010',
|
||||||
|
'S1M0_ts100000',
|
||||||
|
]);
|
||||||
|
finalLogger.close();
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should return empty array if no user messages exist', async () => {
|
||||||
|
await logger.logMessage('system' as MessageSenderType, 'System boot');
|
||||||
const messages = await logger.getPreviousUserMessages();
|
const messages = await logger.getPreviousUserMessages();
|
||||||
|
|
||||||
expect(mockDb.all).toHaveBeenCalledWith(
|
|
||||||
expect.stringMatching(/SELECT message FROM messages/),
|
|
||||||
[],
|
|
||||||
expect.any(Function),
|
|
||||||
);
|
|
||||||
expect(messages).toEqual(['msg1', 'msg2']);
|
|
||||||
});
|
|
||||||
|
|
||||||
it('should handle database not initialized', async () => {
|
|
||||||
const uninitializedLogger = new Logger();
|
|
||||||
// uninitializedLogger.initialize() is not called
|
|
||||||
const consoleErrorSpy = vi
|
|
||||||
.spyOn(console, 'error')
|
|
||||||
.mockImplementation(() => {});
|
|
||||||
|
|
||||||
const messages = await uninitializedLogger.getPreviousUserMessages();
|
|
||||||
|
|
||||||
expect(consoleErrorSpy).toHaveBeenCalledWith('Database not initialized.');
|
|
||||||
expect(messages).toEqual([]);
|
expect(messages).toEqual([]);
|
||||||
expect(mockDb.all).not.toHaveBeenCalled();
|
|
||||||
consoleErrorSpy.mockRestore();
|
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should handle error during db.all', async () => {
|
it('should return empty array if logger not initialized', async () => {
|
||||||
const error = new Error('db.all failed');
|
const uninitializedLogger = new Logger();
|
||||||
mockDb.all.mockImplementationOnce(
|
const messages = await uninitializedLogger.getPreviousUserMessages();
|
||||||
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
expect(messages).toEqual([]);
|
||||||
(_sql: any, _params: any, callback: any) => callback?.(error, []),
|
uninitializedLogger.close();
|
||||||
);
|
|
||||||
|
|
||||||
await expect(logger.getPreviousUserMessages()).rejects.toThrow(
|
|
||||||
'db.all failed',
|
|
||||||
);
|
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
describe('close', () => {
|
describe('close', () => {
|
||||||
it('should close the database connection', () => {
|
it('should reset logger state', async () => {
|
||||||
|
await logger.logMessage(MessageSenderType.USER, 'A message');
|
||||||
logger.close();
|
logger.close();
|
||||||
expect(mockDb.close).toHaveBeenCalled();
|
const consoleDebugSpy = vi
|
||||||
});
|
.spyOn(console, 'debug')
|
||||||
|
|
||||||
it('should handle database not initialized', () => {
|
|
||||||
const uninitializedLogger = new Logger();
|
|
||||||
// uninitializedLogger.initialize() is not called
|
|
||||||
uninitializedLogger.close();
|
|
||||||
expect(() => uninitializedLogger.close()).not.toThrow();
|
|
||||||
});
|
|
||||||
|
|
||||||
it('should handle error during db.close', () => {
|
|
||||||
const error = new Error('db.close failed');
|
|
||||||
mockDb.close.mockImplementationOnce((callback: (error: Error) => void) =>
|
|
||||||
callback?.(error),
|
|
||||||
);
|
|
||||||
const consoleErrorSpy = vi
|
|
||||||
.spyOn(console, 'error')
|
|
||||||
.mockImplementation(() => {});
|
.mockImplementation(() => {});
|
||||||
|
await logger.logMessage(MessageSenderType.USER, 'Another message');
|
||||||
logger.close();
|
expect(consoleDebugSpy).toHaveBeenCalledWith(
|
||||||
expect(consoleErrorSpy).toHaveBeenCalledWith(
|
'Logger not initialized or session ID missing. Cannot log message.',
|
||||||
'Error closing database:',
|
|
||||||
error.message,
|
|
||||||
);
|
);
|
||||||
consoleErrorSpy.mockRestore();
|
const messages = await logger.getPreviousUserMessages();
|
||||||
|
expect(messages).toEqual([]);
|
||||||
|
expect(logger['initialized']).toBe(false);
|
||||||
|
expect(logger['logFilePath']).toBeUndefined();
|
||||||
|
expect(logger['logs']).toEqual([]);
|
||||||
|
expect(logger['sessionId']).toBeUndefined();
|
||||||
|
expect(logger['messageId']).toBe(0);
|
||||||
|
consoleDebugSpy.mockRestore();
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
|
@ -5,127 +5,235 @@
|
||||||
*/
|
*/
|
||||||
|
|
||||||
import path from 'node:path';
|
import path from 'node:path';
|
||||||
import sqlite3 from 'sqlite3';
|
|
||||||
import { promises as fs } from 'node:fs';
|
import { promises as fs } from 'node:fs';
|
||||||
|
|
||||||
const GEMINI_DIR = '.gemini';
|
const GEMINI_DIR = '.gemini';
|
||||||
const DB_NAME = 'logs.db';
|
const LOG_FILE_NAME = 'logs.json';
|
||||||
const CREATE_TABLE_SQL = `
|
|
||||||
CREATE TABLE IF NOT EXISTS messages (
|
|
||||||
session_id INTEGER,
|
|
||||||
message_id INTEGER,
|
|
||||||
timestamp TEXT,
|
|
||||||
type TEXT,
|
|
||||||
message TEXT
|
|
||||||
);`;
|
|
||||||
|
|
||||||
export enum MessageSenderType {
|
export enum MessageSenderType {
|
||||||
USER = 'user',
|
USER = 'user',
|
||||||
}
|
}
|
||||||
|
|
||||||
|
export interface LogEntry {
|
||||||
|
sessionId: number;
|
||||||
|
messageId: number;
|
||||||
|
timestamp: string;
|
||||||
|
type: MessageSenderType;
|
||||||
|
message: string;
|
||||||
|
}
|
||||||
|
|
||||||
export class Logger {
|
export class Logger {
|
||||||
private db: sqlite3.Database | undefined;
|
private logFilePath: string | undefined;
|
||||||
private sessionId: number | undefined;
|
private sessionId: number | undefined;
|
||||||
private messageId: number | undefined;
|
private messageId = 0; // Instance-specific counter for the next messageId
|
||||||
|
private initialized = false;
|
||||||
|
private logs: LogEntry[] = []; // In-memory cache, ideally reflects the last known state of the file
|
||||||
|
|
||||||
constructor() {}
|
constructor() {}
|
||||||
|
|
||||||
async initialize(): Promise<void> {
|
private async _readLogFile(): Promise<LogEntry[]> {
|
||||||
if (this.db) {
|
if (!this.logFilePath) {
|
||||||
return;
|
throw new Error('Log file path not set during read attempt.');
|
||||||
}
|
}
|
||||||
|
try {
|
||||||
this.sessionId = Math.floor(Date.now() / 1000);
|
const fileContent = await fs.readFile(this.logFilePath, 'utf-8');
|
||||||
this.messageId = 0;
|
const parsedLogs = JSON.parse(fileContent);
|
||||||
|
if (!Array.isArray(parsedLogs)) {
|
||||||
// Could be cleaner if our sqlite package supported promises.
|
console.debug(
|
||||||
return new Promise((resolve, reject) => {
|
`Log file at ${this.logFilePath} is not a valid JSON array. Starting with empty logs.`,
|
||||||
const DB_DIR = path.resolve(process.cwd(), GEMINI_DIR);
|
|
||||||
const DB_PATH = path.join(DB_DIR, DB_NAME);
|
|
||||||
fs.mkdir(DB_DIR, { recursive: true })
|
|
||||||
.then(() => {
|
|
||||||
this.db = new sqlite3.Database(
|
|
||||||
DB_PATH,
|
|
||||||
sqlite3.OPEN_READWRITE |
|
|
||||||
sqlite3.OPEN_CREATE |
|
|
||||||
sqlite3.OPEN_FULLMUTEX,
|
|
||||||
(err: Error | null) => {
|
|
||||||
if (err) {
|
|
||||||
reject(err);
|
|
||||||
}
|
|
||||||
|
|
||||||
// Read and execute the SQL script in create_tables.sql
|
|
||||||
this.db?.exec(CREATE_TABLE_SQL, (err: Error | null) => {
|
|
||||||
if (err) {
|
|
||||||
this.db?.close();
|
|
||||||
reject(err);
|
|
||||||
}
|
|
||||||
resolve();
|
|
||||||
});
|
|
||||||
},
|
|
||||||
);
|
);
|
||||||
})
|
await this._backupCorruptedLogFile('malformed_array');
|
||||||
.catch(reject);
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Get list of previous user inputs sorted most recent first.
|
|
||||||
* @returns list of messages.
|
|
||||||
*/
|
|
||||||
async getPreviousUserMessages(): Promise<string[]> {
|
|
||||||
if (!this.db) {
|
|
||||||
console.error('Database not initialized.');
|
|
||||||
return [];
|
return [];
|
||||||
}
|
}
|
||||||
|
return parsedLogs.filter(
|
||||||
return new Promise((resolve, reject) => {
|
(entry) =>
|
||||||
// Most recent messages first
|
typeof entry.sessionId === 'number' &&
|
||||||
const query = `SELECT message FROM messages
|
typeof entry.messageId === 'number' &&
|
||||||
WHERE type = '${MessageSenderType.USER}'
|
typeof entry.timestamp === 'string' &&
|
||||||
ORDER BY session_id DESC, message_id DESC`;
|
typeof entry.type === 'string' &&
|
||||||
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
typeof entry.message === 'string',
|
||||||
this.db!.all(query, [], (err: Error | null, rows: any[]) => {
|
) as LogEntry[];
|
||||||
if (err) {
|
} catch (error) {
|
||||||
reject(err);
|
const nodeError = error as NodeJS.ErrnoException;
|
||||||
} else {
|
if (nodeError.code === 'ENOENT') {
|
||||||
resolve(rows.map((row) => row.message));
|
return [];
|
||||||
}
|
}
|
||||||
});
|
if (error instanceof SyntaxError) {
|
||||||
});
|
console.debug(
|
||||||
|
`Invalid JSON in log file ${this.logFilePath}. Backing up and starting fresh.`,
|
||||||
|
error,
|
||||||
|
);
|
||||||
|
await this._backupCorruptedLogFile('invalid_json');
|
||||||
|
return [];
|
||||||
|
}
|
||||||
|
console.debug(
|
||||||
|
`Failed to read or parse log file ${this.logFilePath}:`,
|
||||||
|
error,
|
||||||
|
);
|
||||||
|
throw error;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private async _backupCorruptedLogFile(reason: string): Promise<void> {
|
||||||
|
if (!this.logFilePath) return;
|
||||||
|
const backupPath = `${this.logFilePath}.${reason}.${Date.now()}.bak`;
|
||||||
|
try {
|
||||||
|
await fs.rename(this.logFilePath, backupPath);
|
||||||
|
console.debug(`Backed up corrupted log file to ${backupPath}`);
|
||||||
|
} catch (_backupError) {
|
||||||
|
// If rename fails (e.g. file doesn't exist), no need to log an error here as the primary error (e.g. invalid JSON) is already handled.
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
async initialize(): Promise<void> {
|
||||||
|
if (this.initialized) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
this.sessionId = Math.floor(Date.now() / 1000);
|
||||||
|
const geminiDir = path.resolve(process.cwd(), GEMINI_DIR);
|
||||||
|
this.logFilePath = path.join(geminiDir, LOG_FILE_NAME);
|
||||||
|
|
||||||
|
try {
|
||||||
|
await fs.mkdir(geminiDir, { recursive: true });
|
||||||
|
let fileExisted = true;
|
||||||
|
try {
|
||||||
|
await fs.access(this.logFilePath);
|
||||||
|
} catch (_e) {
|
||||||
|
fileExisted = false;
|
||||||
|
}
|
||||||
|
this.logs = await this._readLogFile();
|
||||||
|
if (!fileExisted && this.logs.length === 0) {
|
||||||
|
await fs.writeFile(this.logFilePath, '[]', 'utf-8');
|
||||||
|
}
|
||||||
|
const sessionLogs = this.logs.filter(
|
||||||
|
(entry) => entry.sessionId === this.sessionId,
|
||||||
|
);
|
||||||
|
this.messageId =
|
||||||
|
sessionLogs.length > 0
|
||||||
|
? Math.max(...sessionLogs.map((entry) => entry.messageId)) + 1
|
||||||
|
: 0;
|
||||||
|
this.initialized = true;
|
||||||
|
} catch (err) {
|
||||||
|
console.error('Failed to initialize logger:', err);
|
||||||
|
this.initialized = false;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private async _updateLogFile(
|
||||||
|
entryToAppend: LogEntry,
|
||||||
|
): Promise<LogEntry | null> {
|
||||||
|
if (!this.logFilePath) {
|
||||||
|
console.debug('Log file path not set. Cannot persist log entry.');
|
||||||
|
throw new Error('Log file path not set during update attempt.');
|
||||||
|
}
|
||||||
|
|
||||||
|
let currentLogsOnDisk: LogEntry[];
|
||||||
|
try {
|
||||||
|
currentLogsOnDisk = await this._readLogFile();
|
||||||
|
} catch (readError) {
|
||||||
|
console.debug(
|
||||||
|
'Critical error reading log file before append:',
|
||||||
|
readError,
|
||||||
|
);
|
||||||
|
throw readError;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Determine the correct messageId for the new entry based on current disk state for its session
|
||||||
|
const sessionLogsOnDisk = currentLogsOnDisk.filter(
|
||||||
|
(e) => e.sessionId === entryToAppend.sessionId,
|
||||||
|
);
|
||||||
|
const nextMessageIdForSession =
|
||||||
|
sessionLogsOnDisk.length > 0
|
||||||
|
? Math.max(...sessionLogsOnDisk.map((e) => e.messageId)) + 1
|
||||||
|
: 0;
|
||||||
|
|
||||||
|
// Update the messageId of the entry we are about to append
|
||||||
|
entryToAppend.messageId = nextMessageIdForSession;
|
||||||
|
|
||||||
|
// Check if this entry (same session, same *recalculated* messageId, same content) might already exist
|
||||||
|
// This is a stricter check for true duplicates if multiple instances try to log the exact same thing
|
||||||
|
// at the exact same calculated messageId slot.
|
||||||
|
const entryExists = currentLogsOnDisk.some(
|
||||||
|
(e) =>
|
||||||
|
e.sessionId === entryToAppend.sessionId &&
|
||||||
|
e.messageId === entryToAppend.messageId &&
|
||||||
|
e.timestamp === entryToAppend.timestamp && // Timestamps are good for distinguishing
|
||||||
|
e.message === entryToAppend.message,
|
||||||
|
);
|
||||||
|
|
||||||
|
if (entryExists) {
|
||||||
|
console.debug(
|
||||||
|
`Duplicate log entry detected and skipped: session ${entryToAppend.sessionId}, messageId ${entryToAppend.messageId}`,
|
||||||
|
);
|
||||||
|
this.logs = currentLogsOnDisk; // Ensure in-memory is synced with disk
|
||||||
|
return null; // Indicate that no new entry was actually added
|
||||||
|
}
|
||||||
|
|
||||||
|
currentLogsOnDisk.push(entryToAppend);
|
||||||
|
|
||||||
|
try {
|
||||||
|
await fs.writeFile(
|
||||||
|
this.logFilePath,
|
||||||
|
JSON.stringify(currentLogsOnDisk, null, 2),
|
||||||
|
'utf-8',
|
||||||
|
);
|
||||||
|
this.logs = currentLogsOnDisk;
|
||||||
|
return entryToAppend; // Return the successfully appended entry
|
||||||
|
} catch (error) {
|
||||||
|
console.debug('Error writing to log file:', error);
|
||||||
|
throw error;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
async getPreviousUserMessages(): Promise<string[]> {
|
||||||
|
if (!this.initialized) return [];
|
||||||
|
return this.logs
|
||||||
|
.filter((entry) => entry.type === MessageSenderType.USER)
|
||||||
|
.sort((a, b) => {
|
||||||
|
if (b.sessionId !== a.sessionId) return b.sessionId - a.sessionId;
|
||||||
|
const dateA = new Date(a.timestamp).getTime();
|
||||||
|
const dateB = new Date(b.timestamp).getTime();
|
||||||
|
if (dateB !== dateA) return dateB - dateA;
|
||||||
|
return b.messageId - a.messageId;
|
||||||
|
})
|
||||||
|
.map((entry) => entry.message);
|
||||||
}
|
}
|
||||||
|
|
||||||
async logMessage(type: MessageSenderType, message: string): Promise<void> {
|
async logMessage(type: MessageSenderType, message: string): Promise<void> {
|
||||||
if (!this.db) {
|
if (!this.initialized || this.sessionId === undefined) {
|
||||||
console.error('Database not initialized.');
|
console.debug(
|
||||||
|
'Logger not initialized or session ID missing. Cannot log message.',
|
||||||
|
);
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
return new Promise((resolve, reject) => {
|
// The messageId used here is the instance's idea of the next ID.
|
||||||
const query = `INSERT INTO messages (session_id, message_id, type, message, timestamp) VALUES (?, ?, ?, ?, datetime('now'))`;
|
// _updateLogFile will verify and potentially recalculate based on the file's actual state.
|
||||||
this.messageId = this.messageId! + 1;
|
const newEntryObject: LogEntry = {
|
||||||
this.db!.run(
|
sessionId: this.sessionId,
|
||||||
query,
|
messageId: this.messageId, // This will be recalculated in _updateLogFile
|
||||||
[this.sessionId || 0, this.messageId - 1, type, message],
|
type,
|
||||||
(err: Error | null) => {
|
message,
|
||||||
if (err) {
|
timestamp: new Date().toISOString(),
|
||||||
reject(err);
|
};
|
||||||
} else {
|
|
||||||
resolve();
|
try {
|
||||||
|
const writtenEntry = await this._updateLogFile(newEntryObject);
|
||||||
|
if (writtenEntry) {
|
||||||
|
// If an entry was actually written (not a duplicate skip),
|
||||||
|
// then this instance can increment its idea of the next messageId for this session.
|
||||||
|
this.messageId = writtenEntry.messageId + 1;
|
||||||
|
}
|
||||||
|
} catch (_error) {
|
||||||
|
// Error already logged by _updateLogFile or _readLogFile
|
||||||
}
|
}
|
||||||
},
|
|
||||||
);
|
|
||||||
});
|
|
||||||
}
|
}
|
||||||
|
|
||||||
close(): void {
|
close(): void {
|
||||||
if (this.db) {
|
this.initialized = false;
|
||||||
this.db.close((err: Error | null) => {
|
this.logFilePath = undefined;
|
||||||
if (err) {
|
this.logs = [];
|
||||||
console.error('Error closing database:', err.message);
|
this.sessionId = undefined;
|
||||||
}
|
this.messageId = 0;
|
||||||
});
|
|
||||||
this.db = undefined;
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
Loading…
Reference in New Issue