Skip to content
Original file line number Diff line number Diff line change
Expand Up @@ -74,6 +74,15 @@ export class LoggingContentGenerator implements ContentGenerator {
return this.wrapped;
}

/**
* Returns true if the promptId belongs to an internal background operation
* (e.g., suggestion generation, forked queries) whose input/output should
* not be recorded to OpenAI logs or other persistent stores.
*/
private isInternalPromptId(promptId: string): boolean {
return promptId === 'prompt_suggestion' || promptId === 'forked_query';
}

private logApiRequest(
contents: Content[],
model: string,
Expand Down Expand Up @@ -143,8 +152,17 @@ export class LoggingContentGenerator implements ContentGenerator {
userPromptId: string,
): Promise<GenerateContentResponse> {
const startTime = Date.now();
this.logApiRequest(this.toContents(req.contents), req.model, userPromptId);
const openaiRequest = await this.buildOpenAIRequestForLogging(req);
const isInternal = this.isInternalPromptId(userPromptId);
if (!isInternal) {
this.logApiRequest(
this.toContents(req.contents),
req.model,
userPromptId,
);
}
const openaiRequest = isInternal
? undefined
: await this.buildOpenAIRequestForLogging(req);
try {
const response = await this.wrapped.generateContent(req, userPromptId);
const durationMs = Date.now() - startTime;
Expand All @@ -155,12 +173,16 @@ export class LoggingContentGenerator implements ContentGenerator {
userPromptId,
response.usageMetadata,
);
await this.logOpenAIInteraction(openaiRequest, response);
if (!isInternal) {
await this.logOpenAIInteraction(openaiRequest, response);
}
return response;
} catch (error) {
const durationMs = Date.now() - startTime;
this._logApiError('', durationMs, error, req.model, userPromptId);
await this.logOpenAIInteraction(openaiRequest, undefined, error);
if (!isInternal) {
await this.logOpenAIInteraction(openaiRequest, undefined, error);
}
throw error;
}
}
Expand All @@ -170,16 +192,27 @@ export class LoggingContentGenerator implements ContentGenerator {
userPromptId: string,
): Promise<AsyncGenerator<GenerateContentResponse>> {
const startTime = Date.now();
this.logApiRequest(this.toContents(req.contents), req.model, userPromptId);
const openaiRequest = await this.buildOpenAIRequestForLogging(req);
const isInternal = this.isInternalPromptId(userPromptId);
if (!isInternal) {
this.logApiRequest(
this.toContents(req.contents),
req.model,
userPromptId,
);
}
const openaiRequest = isInternal
? undefined
: await this.buildOpenAIRequestForLogging(req);

let stream: AsyncGenerator<GenerateContentResponse>;
try {
stream = await this.wrapped.generateContentStream(req, userPromptId);
} catch (error) {
const durationMs = Date.now() - startTime;
this._logApiError('', durationMs, error, req.model, userPromptId);
await this.logOpenAIInteraction(openaiRequest, undefined, error);
if (!isInternal) {
await this.logOpenAIInteraction(openaiRequest, undefined, error);
}
throw error;
}

Expand Down
6 changes: 4 additions & 2 deletions packages/core/src/followup/forkedQuery.ts
Original file line number Diff line number Diff line change
Expand Up @@ -191,8 +191,10 @@ export async function runForkedQuery(
const model = options?.model ?? params.model;
const chat = createForkedChat(config, params);

// Build per-request config overrides for JSON schema if needed
const requestConfig: GenerateContentConfig = {};
// Build per-request config overrides for JSON schema if needed.
// Strip tools — forked queries are pure text completion and must never
// produce function calls or appear inside tool-call UI elements.
const requestConfig: GenerateContentConfig = { tools: [] };
if (options?.abortSignal) {
requestConfig.abortSignal = options.abortSignal;
}
Expand Down
17 changes: 15 additions & 2 deletions packages/core/src/telemetry/loggers.ts
Original file line number Diff line number Diff line change
Expand Up @@ -121,6 +121,15 @@ function getCommonAttributes(config: Config): LogAttributes {

export { getCommonAttributes };

/**
* Returns true if the prompt_id belongs to an internal background operation
* (suggestion generation, forked queries) whose events should not be
* recorded to the chatRecordingService.
*/
function isInternalPromptId(promptId: string): boolean {
return promptId === 'prompt_suggestion' || promptId === 'forked_query';
}

export function logStartSession(
config: Config,
event: StartSessionEvent,
Expand Down Expand Up @@ -382,7 +391,9 @@ export function logApiError(config: Config, event: ApiErrorEvent): void {
'event.timestamp': new Date().toISOString(),
} as UiEvent;
uiTelemetryService.addEvent(uiEvent);
config.getChatRecordingService()?.recordUiTelemetryEvent(uiEvent);
if (!isInternalPromptId(event.prompt_id)) {
config.getChatRecordingService()?.recordUiTelemetryEvent(uiEvent);
}
QwenLogger.getInstance(config)?.logApiErrorEvent(event);
if (!isTelemetrySdkInitialized()) return;

Expand Down Expand Up @@ -449,7 +460,9 @@ export function logApiResponse(config: Config, event: ApiResponseEvent): void {
'event.timestamp': new Date().toISOString(),
} as UiEvent;
uiTelemetryService.addEvent(uiEvent);
config.getChatRecordingService()?.recordUiTelemetryEvent(uiEvent);
if (!isInternalPromptId(event.prompt_id)) {
config.getChatRecordingService()?.recordUiTelemetryEvent(uiEvent);
}
QwenLogger.getInstance(config)?.logApiResponseEvent(event);
if (!isTelemetrySdkInitialized()) return;
const attributes: LogAttributes = {
Expand Down
Loading