Bug fix telemetry token count (#1250)

Co-authored-by: N. Taylor Mullen <ntaylormullen@google.com>
This commit is contained in:
Abhi 2025-06-20 01:45:29 -04:00 committed by GitHub
parent 05b1c8101f
commit fbbb6f2611
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
4 changed files with 85 additions and 39 deletions

View File

@ -24,7 +24,7 @@ import {
logApiRequest, logApiRequest,
logApiResponse, logApiResponse,
logApiError, logApiError,
combinedUsageMetadata, getFinalUsageMetadata,
} from '../telemetry/loggers.js'; } from '../telemetry/loggers.js';
import { import {
getStructuredResponse, getStructuredResponse,
@ -444,7 +444,7 @@ export class GeminiChat {
const fullText = getStructuredResponseFromParts(allParts); const fullText = getStructuredResponseFromParts(allParts);
await this._logApiResponse( await this._logApiResponse(
durationMs, durationMs,
combinedUsageMetadata(chunks), getFinalUsageMetadata(chunks),
fullText, fullText,
); );
} }

View File

@ -25,7 +25,7 @@ export {
logApiRequest, logApiRequest,
logApiError, logApiError,
logApiResponse, logApiResponse,
combinedUsageMetadata, getFinalUsageMetadata,
} from './loggers.js'; } from './loggers.js';
export { export {
UserPromptEvent, UserPromptEvent,

View File

@ -20,10 +20,12 @@ import {
logUserPrompt, logUserPrompt,
logToolCall, logToolCall,
ToolCallDecision, ToolCallDecision,
getFinalUsageMetadata,
} from './loggers.js'; } from './loggers.js';
import * as metrics from './metrics.js'; import * as metrics from './metrics.js';
import * as sdk from './sdk.js'; import * as sdk from './sdk.js';
import { vi, describe, beforeEach, it, expect } from 'vitest'; import { vi, describe, beforeEach, it, expect } from 'vitest';
import { GenerateContentResponse } from '@google/genai';
vi.mock('@gemini-cli/cli/dist/src/utils/version', () => ({ vi.mock('@gemini-cli/cli/dist/src/utils/version', () => ({
getCliVersion: () => 'test-version', getCliVersion: () => 'test-version',
@ -520,3 +522,75 @@ describe('loggers', () => {
}); });
}); });
}); });
describe('getFinalUsageMetadata', () => {
const createMockResponse = (
usageMetadata?: GenerateContentResponse['usageMetadata'],
): GenerateContentResponse =>
({
text: () => '',
data: () => ({}) as Record<string, unknown>,
functionCalls: () => [],
executableCode: () => [],
codeExecutionResult: () => [],
usageMetadata,
}) as unknown as GenerateContentResponse;
it('should return the usageMetadata from the last chunk that has it', () => {
const chunks: GenerateContentResponse[] = [
createMockResponse({
promptTokenCount: 10,
candidatesTokenCount: 20,
totalTokenCount: 30,
}),
createMockResponse(),
createMockResponse({
promptTokenCount: 15,
candidatesTokenCount: 25,
totalTokenCount: 40,
}),
createMockResponse(),
];
const result = getFinalUsageMetadata(chunks);
expect(result).toEqual({
promptTokenCount: 15,
candidatesTokenCount: 25,
totalTokenCount: 40,
});
});
it('should return undefined if no chunks have usageMetadata', () => {
const chunks: GenerateContentResponse[] = [
createMockResponse(),
createMockResponse(),
createMockResponse(),
];
const result = getFinalUsageMetadata(chunks);
expect(result).toBeUndefined();
});
it('should return the metadata from the only chunk if it has it', () => {
const chunks: GenerateContentResponse[] = [
createMockResponse({
promptTokenCount: 1,
candidatesTokenCount: 2,
totalTokenCount: 3,
}),
];
const result = getFinalUsageMetadata(chunks);
expect(result).toEqual({
promptTokenCount: 1,
candidatesTokenCount: 2,
totalTokenCount: 3,
});
});
it('should return undefined for an empty array of chunks', () => {
const chunks: GenerateContentResponse[] = [];
const result = getFinalUsageMetadata(chunks);
expect(result).toBeUndefined();
});
});

View File

@ -288,42 +288,14 @@ export function logApiResponse(
recordTokenUsageMetrics(config, event.model, event.tool_token_count, 'tool'); recordTokenUsageMetrics(config, event.model, event.tool_token_count, 'tool');
} }
export function combinedUsageMetadata( export function getFinalUsageMetadata(
chunks: GenerateContentResponse[], chunks: GenerateContentResponse[],
): GenerateContentResponseUsageMetadata { ): GenerateContentResponseUsageMetadata | undefined {
const metadataKeys: Array<keyof GenerateContentResponseUsageMetadata> = [ // Only the last streamed item has the final token count.
'promptTokenCount', const lastChunkWithMetadata = chunks
'candidatesTokenCount', .slice()
'cachedContentTokenCount', .reverse()
'thoughtsTokenCount', .find((chunk) => chunk.usageMetadata);
'toolUsePromptTokenCount',
'totalTokenCount',
];
const totals: Record<keyof GenerateContentResponseUsageMetadata, number> = { return lastChunkWithMetadata?.usageMetadata;
promptTokenCount: 0,
candidatesTokenCount: 0,
cachedContentTokenCount: 0,
thoughtsTokenCount: 0,
toolUsePromptTokenCount: 0,
totalTokenCount: 0,
cacheTokensDetails: 0,
candidatesTokensDetails: 0,
promptTokensDetails: 0,
toolUsePromptTokensDetails: 0,
trafficType: 0,
};
for (const chunk of chunks) {
if (chunk.usageMetadata) {
for (const key of metadataKeys) {
const chunkValue = chunk.usageMetadata[key];
if (typeof chunkValue === 'number') {
totals[key] += chunkValue;
}
}
}
}
return totals as unknown as GenerateContentResponseUsageMetadata;
} }