Skip to content

Commit a248ac4

Browse files
wenshaoclaude
andcommitted
fix: build errors and Copilot round 4 feedback
1. Fix NO_TOOLS type: Object.freeze produces readonly array incompatible with ToolUnion[]. Use Readonly<Pick<>> instead; spread in requestConfig already creates a fresh mutable copy per call. 2. Fix test missing required 'model' field in ContentGeneratorConfig. 3. Track firstResponseId/firstModelVersion in loggingStreamWrapper so _logApiResponse/_logApiError have accurate values even when full response collection is skipped for internal prompts. 4. Strengthen OpenAI logger test assertion: assert OpenAILogger was constructed (not guarded by if), then assert logInteraction was not called. Co-Authored-By: Claude Opus 4.6 (1M context) <noreply@anthropic.com>
1 parent 59656d9 commit a248ac4

File tree

3 files changed

+19
-11
lines changed

3 files changed

+19
-11
lines changed

packages/core/src/core/loggingContentGenerator/loggingContentGenerator.test.ts

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -496,6 +496,7 @@ describe('LoggingContentGenerator', () => {
496496
} as unknown as ContentGenerator;
497497

498498
const gen = new LoggingContentGenerator(mockWrapped, createConfig(), {
499+
model: 'test-model',
499500
enableOpenAILogging: true,
500501
openAILoggingDir: '/tmp/test-logs',
501502
});
@@ -511,13 +512,12 @@ describe('LoggingContentGenerator', () => {
511512
expect(logApiRequest).not.toHaveBeenCalled();
512513
// logApiResponse SHOULD be called (for /stats token tracking)
513514
expect(logApiResponse).toHaveBeenCalled();
514-
// OpenAI logger should NOT be called
515+
// OpenAI logger should be constructed, but no interaction should be logged
516+
expect(OpenAILogger).toHaveBeenCalled();
515517
const loggerInstance = (
516518
OpenAILogger as unknown as ReturnType<typeof vi.fn>
517519
).mock.results[0]?.value;
518-
if (loggerInstance) {
519-
expect(loggerInstance.logInteraction).not.toHaveBeenCalled();
520-
}
520+
expect(loggerInstance.logInteraction).not.toHaveBeenCalled();
521521
},
522522
);
523523
});

packages/core/src/core/loggingContentGenerator/loggingContentGenerator.ts

Lines changed: 14 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -229,9 +229,19 @@ export class LoggingContentGenerator implements ContentGenerator {
229229
// skip collecting full responses to avoid unnecessary memory overhead.
230230
const responses: GenerateContentResponse[] = [];
231231

232+
// Track first-seen IDs so _logApiResponse/_logApiError have accurate
233+
// values even when we skip collecting full responses for internal prompts.
234+
let firstResponseId = '';
235+
let firstModelVersion = '';
232236
let lastUsageMetadata: GenerateContentResponseUsageMetadata | undefined;
233237
try {
234238
for await (const response of stream) {
239+
if (!firstResponseId && response.responseId) {
240+
firstResponseId = response.responseId;
241+
}
242+
if (!firstModelVersion && response.modelVersion) {
243+
firstModelVersion = response.modelVersion;
244+
}
235245
if (!isInternal) {
236246
responses.push(response);
237247
}
@@ -243,9 +253,9 @@ export class LoggingContentGenerator implements ContentGenerator {
243253
// Only log successful API response if no error occurred
244254
const durationMs = Date.now() - startTime;
245255
this._logApiResponse(
246-
responses[0]?.responseId ?? '',
256+
firstResponseId,
247257
durationMs,
248-
responses[0]?.modelVersion || model,
258+
firstModelVersion || model,
249259
userPromptId,
250260
lastUsageMetadata,
251261
);
@@ -257,10 +267,10 @@ export class LoggingContentGenerator implements ContentGenerator {
257267
} catch (error) {
258268
const durationMs = Date.now() - startTime;
259269
this._logApiError(
260-
responses[0]?.responseId ?? '',
270+
firstResponseId,
261271
durationMs,
262272
error,
263-
responses[0]?.modelVersion || model,
273+
firstModelVersion || model,
264274
userPromptId,
265275
);
266276
if (!isInternal) {

packages/core/src/followup/forkedQuery.ts

Lines changed: 1 addition & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -26,9 +26,7 @@ import { GeminiChat, StreamEventType } from '../core/geminiChat.js';
2626
import type { Config } from '../config/config.js';
2727

2828
/** Per-request config that strips tools so the model never produces function calls. */
29-
const NO_TOOLS: GenerateContentConfig = Object.freeze({
30-
tools: Object.freeze([]),
31-
});
29+
const NO_TOOLS: Readonly<Pick<GenerateContentConfig, 'tools'>> = { tools: [] };
3230

3331
/**
3432
* Snapshot of the main conversation's cache-critical parameters.

0 commit comments

Comments
 (0)