code review followup for compress command (#1097)
Followup to https://github.com/google-gemini/gemini-cli/pull/986
This commit is contained in:
parent
c3971754bf
commit
e59c872b3d
|
@ -23,7 +23,8 @@ export const CompressionMessage: React.FC<CompressionDisplayProps> = ({
|
|||
}) => {
|
||||
const text = compression.isPending
|
||||
? 'Compressing chat history'
|
||||
: `Chat history compressed from ${compression.originalTokenCount} to ${compression.newTokenCount} tokens.`;
|
||||
: `Chat history compressed from ${compression.originalTokenCount ?? 'unknown'}` +
|
||||
` to ${compression.newTokenCount ?? 'unknown'} tokens.`;
|
||||
|
||||
return (
|
||||
<Box flexDirection="row">
|
||||
|
|
|
@ -159,8 +159,8 @@ describe('useSlashCommandProcessor', () => {
|
|||
process.env = { ...globalThis.process.env };
|
||||
});
|
||||
|
||||
const getProcessor = (showToolDescriptions: boolean = false) => {
|
||||
const { result } = renderHook(() =>
|
||||
const getProcessorHook = (showToolDescriptions: boolean = false) =>
|
||||
renderHook(() =>
|
||||
useSlashCommandProcessor(
|
||||
mockConfig,
|
||||
[],
|
||||
|
@ -178,8 +178,9 @@ describe('useSlashCommandProcessor', () => {
|
|||
mockSetQuittingMessages,
|
||||
),
|
||||
);
|
||||
return result.current;
|
||||
};
|
||||
|
||||
const getProcessor = (showToolDescriptions: boolean = false) =>
|
||||
getProcessorHook(showToolDescriptions).result.current;
|
||||
|
||||
describe('/memory add', () => {
|
||||
it('should return tool scheduling info on valid input', async () => {
|
||||
|
@ -1132,10 +1133,20 @@ Add any other context about the problem here.
|
|||
|
||||
describe('/compress command', () => {
|
||||
it('should call tryCompressChat(true)', async () => {
|
||||
const { handleSlashCommand } = getProcessor();
|
||||
const hook = getProcessorHook();
|
||||
mockTryCompressChat.mockImplementationOnce(async (force?: boolean) => {
|
||||
// TODO: Check that we have a pending compression item in the history.
|
||||
expect(force).toBe(true);
|
||||
await act(async () => {
|
||||
hook.rerender();
|
||||
});
|
||||
expect(hook.result.current.pendingHistoryItems).toContainEqual({
|
||||
type: MessageType.COMPRESSION,
|
||||
compression: {
|
||||
isPending: true,
|
||||
originalTokenCount: null,
|
||||
newTokenCount: null,
|
||||
},
|
||||
});
|
||||
return {
|
||||
originalTokenCount: 100,
|
||||
newTokenCount: 50,
|
||||
|
@ -1143,8 +1154,12 @@ Add any other context about the problem here.
|
|||
});
|
||||
|
||||
await act(async () => {
|
||||
handleSlashCommand('/compress');
|
||||
hook.result.current.handleSlashCommand('/compress');
|
||||
});
|
||||
await act(async () => {
|
||||
hook.rerender();
|
||||
});
|
||||
expect(hook.result.current.pendingHistoryItems).toEqual([]);
|
||||
expect(mockGeminiClient.tryCompressChat).toHaveBeenCalledWith(true);
|
||||
expect(mockAddItem).toHaveBeenNthCalledWith(
|
||||
2,
|
||||
|
|
|
@ -760,6 +760,8 @@ Add any other context about the problem here.
|
|||
type: MessageType.COMPRESSION,
|
||||
compression: {
|
||||
isPending: true,
|
||||
originalTokenCount: null,
|
||||
newTokenCount: null,
|
||||
},
|
||||
});
|
||||
try {
|
||||
|
|
|
@ -379,7 +379,8 @@ export const useGeminiStream = (
|
|||
text:
|
||||
`IMPORTANT: This conversation approached the input token limit for ${config.getModel()}. ` +
|
||||
`A compressed context will be sent for future messages (compressed from: ` +
|
||||
`${eventValue.originalTokenCount} to ${eventValue.newTokenCount} tokens).`,
|
||||
`${eventValue?.originalTokenCount ?? 'unknown'} to ` +
|
||||
`${eventValue?.newTokenCount ?? 'unknown'} tokens).`,
|
||||
},
|
||||
Date.now(),
|
||||
),
|
||||
|
|
|
@ -55,8 +55,8 @@ export interface IndividualToolCallDisplay {
|
|||
|
||||
export interface CompressionProps {
|
||||
isPending: boolean;
|
||||
originalTokenCount?: number;
|
||||
newTokenCount?: number;
|
||||
originalTokenCount: number | null;
|
||||
newTokenCount: number | null;
|
||||
}
|
||||
|
||||
export interface HistoryItemBase {
|
||||
|
|
|
@ -475,9 +475,11 @@ export class GeminiClient {
|
|||
await cg.countTokens({ model: this.model, contents: newHistory })
|
||||
).totalTokens;
|
||||
|
||||
return {
|
||||
originalTokenCount,
|
||||
newTokenCount,
|
||||
};
|
||||
return originalTokenCount && newTokenCount
|
||||
? {
|
||||
originalTokenCount,
|
||||
newTokenCount,
|
||||
}
|
||||
: null;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -110,13 +110,13 @@ export type ServerGeminiErrorEvent = {
|
|||
};
|
||||
|
||||
export interface ChatCompressionInfo {
|
||||
originalTokenCount: number | undefined;
|
||||
newTokenCount: number | undefined;
|
||||
originalTokenCount: number;
|
||||
newTokenCount: number;
|
||||
}
|
||||
|
||||
export type ServerGeminiChatCompressedEvent = {
|
||||
type: GeminiEventType.ChatCompressed;
|
||||
value: ChatCompressionInfo;
|
||||
value: ChatCompressionInfo | null;
|
||||
};
|
||||
|
||||
export type ServerGeminiUsageMetadataEvent = {
|
||||
|
|
Loading…
Reference in New Issue