Skip to content

Commit f3e1b05

Browse files
committed
chore: Add non-streaming fallback for non-streaming OpenAI APIs, such as as Z-AI GLM coding API
1 parent 7187a53 commit f3e1b05

File tree

1 file changed

+33
-2
lines changed

1 file changed

+33
-2
lines changed

internal/llm/provider/openai.go

Lines changed: 33 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -437,10 +437,41 @@ func (o *openaiClient) stream(ctx context.Context, messages []message.Message, t
437437
err := openaiStream.Err()
438438
if err == nil || errors.Is(err, io.EOF) {
439439
if len(acc.Choices) == 0 {
440+
// Some OpenAI-compatible APIs (e.g., Z.AI GLM models with stream=false)
441+
// return complete responses instead of streaming chunks.
442+
// Fall back to non-streaming API call to handle this case.
443+
slog.Warn("Empty streaming response, falling back to non-streaming call")
444+
response, sendErr := o.send(ctx, messages, tools)
445+
if sendErr != nil {
446+
eventChan <- ProviderEvent{
447+
Type: EventError,
448+
Error: fmt.Errorf("streaming and non-streaming calls both failed: %w", sendErr),
449+
}
450+
return
451+
}
452+
// Emit the complete response as streaming events
453+
if response.Content != "" {
454+
eventChan <- ProviderEvent{
455+
Type: EventContentDelta,
456+
Content: response.Content,
457+
}
458+
}
459+
// Emit tool call start events if present
460+
for _, toolCall := range response.ToolCalls {
461+
eventChan <- ProviderEvent{
462+
Type: EventToolUseStart,
463+
ToolCall: &message.ToolCall{
464+
ID: toolCall.ID,
465+
Name: toolCall.Name,
466+
Finished: false,
467+
},
468+
}
469+
}
440470
eventChan <- ProviderEvent{
441-
Type: EventError,
442-
Error: fmt.Errorf("received empty streaming response from OpenAI API - check endpoint configuration"),
471+
Type: EventComplete,
472+
Response: response,
443473
}
474+
close(eventChan)
444475
return
445476
}
446477

0 commit comments

Comments
 (0)