diff --git a/packages/core/src/core/geminiChat.ts b/packages/core/src/core/geminiChat.ts index cb0dd07b..3929dd26 100644 --- a/packages/core/src/core/geminiChat.ts +++ b/packages/core/src/core/geminiChat.ts @@ -24,7 +24,7 @@ import { logApiRequest, logApiResponse, logApiError, - combinedUsageMetadata, + getFinalUsageMetadata, } from '../telemetry/loggers.js'; import { getStructuredResponse, @@ -444,7 +444,7 @@ export class GeminiChat { const fullText = getStructuredResponseFromParts(allParts); await this._logApiResponse( durationMs, - combinedUsageMetadata(chunks), + getFinalUsageMetadata(chunks), fullText, ); } diff --git a/packages/core/src/telemetry/index.ts b/packages/core/src/telemetry/index.ts index 32e98144..6329b401 100644 --- a/packages/core/src/telemetry/index.ts +++ b/packages/core/src/telemetry/index.ts @@ -25,7 +25,7 @@ export { logApiRequest, logApiError, logApiResponse, - combinedUsageMetadata, + getFinalUsageMetadata, } from './loggers.js'; export { UserPromptEvent, diff --git a/packages/core/src/telemetry/loggers.test.ts b/packages/core/src/telemetry/loggers.test.ts index 5b1cb707..3f61530f 100644 --- a/packages/core/src/telemetry/loggers.test.ts +++ b/packages/core/src/telemetry/loggers.test.ts @@ -20,10 +20,12 @@ import { logUserPrompt, logToolCall, ToolCallDecision, + getFinalUsageMetadata, } from './loggers.js'; import * as metrics from './metrics.js'; import * as sdk from './sdk.js'; import { vi, describe, beforeEach, it, expect } from 'vitest'; +import { GenerateContentResponse } from '@google/genai'; vi.mock('@gemini-cli/cli/dist/src/utils/version', () => ({ getCliVersion: () => 'test-version', @@ -520,3 +522,75 @@ describe('loggers', () => { }); }); }); + +describe('getFinalUsageMetadata', () => { + const createMockResponse = ( + usageMetadata?: GenerateContentResponse['usageMetadata'], + ): GenerateContentResponse => + ({ + text: () => '', + data: () => ({}) as Record, + functionCalls: () => [], + executableCode: () => [], + codeExecutionResult: () => [], + usageMetadata, + }) as unknown as GenerateContentResponse; + + it('should return the usageMetadata from the last chunk that has it', () => { + const chunks: GenerateContentResponse[] = [ + createMockResponse({ + promptTokenCount: 10, + candidatesTokenCount: 20, + totalTokenCount: 30, + }), + createMockResponse(), + createMockResponse({ + promptTokenCount: 15, + candidatesTokenCount: 25, + totalTokenCount: 40, + }), + createMockResponse(), + ]; + + const result = getFinalUsageMetadata(chunks); + expect(result).toEqual({ + promptTokenCount: 15, + candidatesTokenCount: 25, + totalTokenCount: 40, + }); + }); + + it('should return undefined if no chunks have usageMetadata', () => { + const chunks: GenerateContentResponse[] = [ + createMockResponse(), + createMockResponse(), + createMockResponse(), + ]; + + const result = getFinalUsageMetadata(chunks); + expect(result).toBeUndefined(); + }); + + it('should return the metadata from the only chunk if it has it', () => { + const chunks: GenerateContentResponse[] = [ + createMockResponse({ + promptTokenCount: 1, + candidatesTokenCount: 2, + totalTokenCount: 3, + }), + ]; + + const result = getFinalUsageMetadata(chunks); + expect(result).toEqual({ + promptTokenCount: 1, + candidatesTokenCount: 2, + totalTokenCount: 3, + }); + }); + + it('should return undefined for an empty array of chunks', () => { + const chunks: GenerateContentResponse[] = []; + const result = getFinalUsageMetadata(chunks); + expect(result).toBeUndefined(); + }); +}); diff --git a/packages/core/src/telemetry/loggers.ts b/packages/core/src/telemetry/loggers.ts index 76b91f7b..f7edca5e 100644 --- a/packages/core/src/telemetry/loggers.ts +++ b/packages/core/src/telemetry/loggers.ts @@ -288,42 +288,14 @@ export function logApiResponse( recordTokenUsageMetrics(config, event.model, event.tool_token_count, 'tool'); } -export function combinedUsageMetadata( +export function getFinalUsageMetadata( chunks: GenerateContentResponse[], -): GenerateContentResponseUsageMetadata { - const metadataKeys: Array = [ - 'promptTokenCount', - 'candidatesTokenCount', - 'cachedContentTokenCount', - 'thoughtsTokenCount', - 'toolUsePromptTokenCount', - 'totalTokenCount', - ]; +): GenerateContentResponseUsageMetadata | undefined { + // Only the last streamed item has the final token count. + const lastChunkWithMetadata = chunks + .slice() + .reverse() + .find((chunk) => chunk.usageMetadata); - const totals: Record = { - promptTokenCount: 0, - candidatesTokenCount: 0, - cachedContentTokenCount: 0, - thoughtsTokenCount: 0, - toolUsePromptTokenCount: 0, - totalTokenCount: 0, - cacheTokensDetails: 0, - candidatesTokensDetails: 0, - promptTokensDetails: 0, - toolUsePromptTokensDetails: 0, - trafficType: 0, - }; - - for (const chunk of chunks) { - if (chunk.usageMetadata) { - for (const key of metadataKeys) { - const chunkValue = chunk.usageMetadata[key]; - if (typeof chunkValue === 'number') { - totals[key] += chunkValue; - } - } - } - } - - return totals as unknown as GenerateContentResponseUsageMetadata; + return lastChunkWithMetadata?.usageMetadata; }