fix(cli) - Adding logging for response and error in LoggingContentGenerator (#5842)

Co-authored-by: Shi Shu <shii@google.com>
This commit is contained in:
shishu314 2025-08-08 15:58:33 -04:00 committed by GitHub
parent 34b5dc7f28
commit 60bde58f29
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
2 changed files with 111 additions and 89 deletions

View File

@ -14,15 +14,12 @@ import {
SendMessageParameters, SendMessageParameters,
createUserContent, createUserContent,
Part, Part,
GenerateContentResponseUsageMetadata,
Tool, Tool,
} from '@google/genai'; } from '@google/genai';
import { retryWithBackoff } from '../utils/retry.js'; import { retryWithBackoff } from '../utils/retry.js';
import { isFunctionResponse } from '../utils/messageInspectors.js'; import { isFunctionResponse } from '../utils/messageInspectors.js';
import { ContentGenerator, AuthType } from './contentGenerator.js'; import { ContentGenerator, AuthType } from './contentGenerator.js';
import { Config } from '../config/config.js'; import { Config } from '../config/config.js';
import { logApiResponse, logApiError } from '../telemetry/loggers.js';
import { ApiErrorEvent, ApiResponseEvent } from '../telemetry/types.js';
import { DEFAULT_GEMINI_FLASH_MODEL } from '../config/models.js'; import { DEFAULT_GEMINI_FLASH_MODEL } from '../config/models.js';
import { hasCycleInSchema } from '../tools/tools.js'; import { hasCycleInSchema } from '../tools/tools.js';
import { StructuredError } from './turn.js'; import { StructuredError } from './turn.js';
@ -131,46 +128,6 @@ export class GeminiChat {
validateHistory(history); validateHistory(history);
} }
private async _logApiResponse(
durationMs: number,
prompt_id: string,
usageMetadata?: GenerateContentResponseUsageMetadata,
responseText?: string,
): Promise<void> {
logApiResponse(
this.config,
new ApiResponseEvent(
this.config.getModel(),
durationMs,
prompt_id,
this.config.getContentGeneratorConfig()?.authType,
usageMetadata,
responseText,
),
);
}
private _logApiError(
durationMs: number,
error: unknown,
prompt_id: string,
): void {
const errorMessage = error instanceof Error ? error.message : String(error);
const errorType = error instanceof Error ? error.name : 'unknown';
logApiError(
this.config,
new ApiErrorEvent(
this.config.getModel(),
errorMessage,
durationMs,
prompt_id,
this.config.getContentGeneratorConfig()?.authType,
errorType,
),
);
}
/** /**
* Handles falling back to Flash model when persistent 429 errors occur for OAuth users. * Handles falling back to Flash model when persistent 429 errors occur for OAuth users.
* Uses a fallback handler if provided by the config; otherwise, returns null. * Uses a fallback handler if provided by the config; otherwise, returns null.
@ -249,7 +206,6 @@ export class GeminiChat {
const userContent = createUserContent(params.message); const userContent = createUserContent(params.message);
const requestContents = this.getHistory(true).concat(userContent); const requestContents = this.getHistory(true).concat(userContent);
const startTime = Date.now();
let response: GenerateContentResponse; let response: GenerateContentResponse;
try { try {
@ -290,13 +246,6 @@ export class GeminiChat {
await this.handleFlashFallback(authType, error), await this.handleFlashFallback(authType, error),
authType: this.config.getContentGeneratorConfig()?.authType, authType: this.config.getContentGeneratorConfig()?.authType,
}); });
const durationMs = Date.now() - startTime;
await this._logApiResponse(
durationMs,
prompt_id,
response.usageMetadata,
JSON.stringify(response),
);
this.sendPromise = (async () => { this.sendPromise = (async () => {
const outputContent = response.candidates?.[0]?.content; const outputContent = response.candidates?.[0]?.content;
@ -324,8 +273,6 @@ export class GeminiChat {
}); });
return response; return response;
} catch (error) { } catch (error) {
const durationMs = Date.now() - startTime;
this._logApiError(durationMs, error, prompt_id);
this.sendPromise = Promise.resolve(); this.sendPromise = Promise.resolve();
throw error; throw error;
} }
@ -361,8 +308,6 @@ export class GeminiChat {
const userContent = createUserContent(params.message); const userContent = createUserContent(params.message);
const requestContents = this.getHistory(true).concat(userContent); const requestContents = this.getHistory(true).concat(userContent);
const startTime = Date.now();
try { try {
const apiCall = () => { const apiCall = () => {
const modelToUse = this.config.getModel(); const modelToUse = this.config.getModel();
@ -413,16 +358,9 @@ export class GeminiChat {
.then(() => undefined) .then(() => undefined)
.catch(() => undefined); .catch(() => undefined);
const result = this.processStreamResponse( const result = this.processStreamResponse(streamResponse, userContent);
streamResponse,
userContent,
startTime,
prompt_id,
);
return result; return result;
} catch (error) { } catch (error) {
const durationMs = Date.now() - startTime;
this._logApiError(durationMs, error, prompt_id);
this.sendPromise = Promise.resolve(); this.sendPromise = Promise.resolve();
throw error; throw error;
} }
@ -483,17 +421,6 @@ export class GeminiChat {
this.generationConfig.tools = tools; this.generationConfig.tools = tools;
} }
getFinalUsageMetadata(
chunks: GenerateContentResponse[],
): GenerateContentResponseUsageMetadata | undefined {
const lastChunkWithMetadata = chunks
.slice()
.reverse()
.find((chunk) => chunk.usageMetadata);
return lastChunkWithMetadata?.usageMetadata;
}
async maybeIncludeSchemaDepthContext(error: StructuredError): Promise<void> { async maybeIncludeSchemaDepthContext(error: StructuredError): Promise<void> {
// Check for potentially problematic cyclic tools with cyclic schemas // Check for potentially problematic cyclic tools with cyclic schemas
// and include a recommendation to remove potentially problematic tools. // and include a recommendation to remove potentially problematic tools.
@ -525,8 +452,6 @@ export class GeminiChat {
private async *processStreamResponse( private async *processStreamResponse(
streamResponse: AsyncGenerator<GenerateContentResponse>, streamResponse: AsyncGenerator<GenerateContentResponse>,
inputContent: Content, inputContent: Content,
startTime: number,
prompt_id: string,
) { ) {
const outputContent: Content[] = []; const outputContent: Content[] = [];
const chunks: GenerateContentResponse[] = []; const chunks: GenerateContentResponse[] = [];
@ -549,25 +474,16 @@ export class GeminiChat {
} }
} catch (error) { } catch (error) {
errorOccurred = true; errorOccurred = true;
const durationMs = Date.now() - startTime;
this._logApiError(durationMs, error, prompt_id);
throw error; throw error;
} }
if (!errorOccurred) { if (!errorOccurred) {
const durationMs = Date.now() - startTime;
const allParts: Part[] = []; const allParts: Part[] = [];
for (const content of outputContent) { for (const content of outputContent) {
if (content.parts) { if (content.parts) {
allParts.push(...content.parts); allParts.push(...content.parts);
} }
} }
await this._logApiResponse(
durationMs,
prompt_id,
this.getFinalUsageMetadata(chunks),
JSON.stringify(chunks),
);
} }
this.recordHistory(inputContent, outputContent); this.recordHistory(inputContent, outputContent);
} }

View File

@ -11,11 +11,20 @@ import {
EmbedContentParameters, EmbedContentParameters,
EmbedContentResponse, EmbedContentResponse,
GenerateContentParameters, GenerateContentParameters,
GenerateContentResponseUsageMetadata,
GenerateContentResponse, GenerateContentResponse,
} from '@google/genai'; } from '@google/genai';
import { ApiRequestEvent } from '../telemetry/types.js'; import {
ApiRequestEvent,
ApiResponseEvent,
ApiErrorEvent,
} from '../telemetry/types.js';
import { Config } from '../config/config.js'; import { Config } from '../config/config.js';
import { logApiRequest } from '../telemetry/loggers.js'; import {
logApiError,
logApiRequest,
logApiResponse,
} from '../telemetry/loggers.js';
import { ContentGenerator } from './contentGenerator.js'; import { ContentGenerator } from './contentGenerator.js';
import { toContents } from '../code_assist/converter.js'; import { toContents } from '../code_assist/converter.js';
@ -40,20 +49,117 @@ export class LoggingContentGenerator implements ContentGenerator {
); );
} }
private _logApiResponse(
durationMs: number,
prompt_id: string,
usageMetadata?: GenerateContentResponseUsageMetadata,
responseText?: string,
): void {
logApiResponse(
this.config,
new ApiResponseEvent(
this.config.getModel(),
durationMs,
prompt_id,
this.config.getContentGeneratorConfig()?.authType,
usageMetadata,
responseText,
),
);
}
private _logApiError(
durationMs: number,
error: unknown,
prompt_id: string,
): void {
const errorMessage = error instanceof Error ? error.message : String(error);
const errorType = error instanceof Error ? error.name : 'unknown';
logApiError(
this.config,
new ApiErrorEvent(
this.config.getModel(),
errorMessage,
durationMs,
prompt_id,
this.config.getContentGeneratorConfig()?.authType,
errorType,
),
);
}
async generateContent( async generateContent(
req: GenerateContentParameters, req: GenerateContentParameters,
userPromptId: string, userPromptId: string,
): Promise<GenerateContentResponse> { ): Promise<GenerateContentResponse> {
const startTime = Date.now();
this.logApiRequest(toContents(req.contents), req.model, userPromptId); this.logApiRequest(toContents(req.contents), req.model, userPromptId);
return this.wrapped.generateContent(req, userPromptId); try {
const response = await this.wrapped.generateContent(req, userPromptId);
const durationMs = Date.now() - startTime;
this._logApiResponse(
durationMs,
userPromptId,
response.usageMetadata,
JSON.stringify(response),
);
return response;
} catch (error) {
const durationMs = Date.now() - startTime;
this._logApiError(durationMs, error, userPromptId);
throw error;
}
} }
async generateContentStream( async generateContentStream(
req: GenerateContentParameters, req: GenerateContentParameters,
userPromptId: string, userPromptId: string,
): Promise<AsyncGenerator<GenerateContentResponse>> { ): Promise<AsyncGenerator<GenerateContentResponse>> {
const startTime = Date.now();
this.logApiRequest(toContents(req.contents), req.model, userPromptId); this.logApiRequest(toContents(req.contents), req.model, userPromptId);
return this.wrapped.generateContentStream(req, userPromptId);
let stream: AsyncGenerator<GenerateContentResponse>;
try {
stream = await this.wrapped.generateContentStream(req, userPromptId);
} catch (error) {
const durationMs = Date.now() - startTime;
this._logApiError(durationMs, error, userPromptId);
throw error;
}
return this.loggingStreamWrapper(stream, startTime, userPromptId);
}
private async *loggingStreamWrapper(
stream: AsyncGenerator<GenerateContentResponse>,
startTime: number,
userPromptId: string,
): AsyncGenerator<GenerateContentResponse> {
let lastResponse: GenerateContentResponse | undefined;
let lastUsageMetadata: GenerateContentResponseUsageMetadata | undefined;
try {
for await (const response of stream) {
lastResponse = response;
if (response.usageMetadata) {
lastUsageMetadata = response.usageMetadata;
}
yield response;
}
} catch (error) {
const durationMs = Date.now() - startTime;
this._logApiError(durationMs, error, userPromptId);
throw error;
}
const durationMs = Date.now() - startTime;
if (lastResponse) {
this._logApiResponse(
durationMs,
userPromptId,
lastUsageMetadata,
JSON.stringify(lastResponse),
);
}
} }
async countTokens(req: CountTokensParameters): Promise<CountTokensResponse> { async countTokens(req: CountTokensParameters): Promise<CountTokensResponse> {