Skip to content

Commit 0d2004d

Browse files
KirschXvercel-ai-sdk[bot]
authored andcommitted
Backport conflicts for PR #9857 to release-v5.0
1 parent 936916e commit 0d2004d

File tree

15 files changed

+2375
-163
lines changed

15 files changed

+2375
-163
lines changed

.changeset/six-roses-peel.md

Lines changed: 6 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,6 @@
1+
---
2+
'@ai-sdk/react': patch
3+
'ai': patch
4+
---
5+
6+
Added finishReason on useChat onFinish callbck

content/docs/07-reference/02-ai-sdk-ui/01-use-chat.mdx

Lines changed: 7 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -275,6 +275,13 @@ Allows you to easily create a conversational user interface for your chatbot app
275275
type: 'boolean',
276276
description: `True if errors during streaming caused the response to stop early.`,
277277
},
278+
{
279+
name: 'finishReason',
280+
type: "'stop' | 'length' | 'content-filter' | 'tool-calls' | 'error' | 'other' | 'unknown'",
281+
isOptional: true,
282+
description:
283+
'The reason why the model finished generating the response. Undefined if the finish reason was not provided by the model.',
284+
},
278285
],
279286
},
280287
],

examples/next-openai/app/use-chat-data-ui-parts/page.tsx

Lines changed: 14 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -2,7 +2,8 @@
22

33
import ChatInput from '@/components/chat-input';
44
import { useChat } from '@ai-sdk/react';
5-
import { DefaultChatTransport, UIMessage } from 'ai';
5+
import { DefaultChatTransport, UIMessage, type FinishReason } from 'ai';
6+
import { useState } from 'react';
67

78
type MyMessage = UIMessage<
89
never,
@@ -16,6 +17,9 @@ type MyMessage = UIMessage<
1617
>;
1718

1819
export default function Chat() {
20+
const [lastFinishReason, setLastFinishReason] = useState<
21+
FinishReason | undefined
22+
>(undefined);
1923
const { error, status, sendMessage, messages, regenerate, stop } =
2024
useChat<MyMessage>({
2125
transport: new DefaultChatTransport({
@@ -24,6 +28,9 @@ export default function Chat() {
2428
onData: dataPart => {
2529
console.log('dataPart', JSON.stringify(dataPart, null, 2));
2630
},
31+
onFinish: ({ finishReason }) => {
32+
setLastFinishReason(finishReason);
33+
},
2734
});
2835

2936
return (
@@ -94,6 +101,12 @@ export default function Chat() {
94101
</div>
95102
)}
96103

104+
{messages.length > 0 && (
105+
<div className="mt-4 text-gray-500">
106+
Finish reason: {String(lastFinishReason)}
107+
</div>
108+
)}
109+
97110
<ChatInput status={status} onSubmit={text => sendMessage({ text })} />
98111
</div>
99112
);
Lines changed: 250 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,250 @@
1+
import { LanguageModelV3CallOptions } from '@ai-sdk/provider';
2+
import { tool } from '@ai-sdk/provider-utils';
3+
import {
4+
convertArrayToReadableStream,
5+
convertReadableStreamToArray,
6+
} from '@ai-sdk/provider-utils/test';
7+
import { beforeEach, describe, expect, it } from 'vitest';
8+
import { z } from 'zod/v4';
9+
import { MockLanguageModelV3 } from '../test/mock-language-model-v3';
10+
import { createAgentUIStreamResponse } from './create-agent-ui-stream-response';
11+
import { ToolLoopAgent } from './tool-loop-agent';
12+
13+
describe('createAgentUIStreamResponse', () => {
14+
describe('when using tools toModelOutput', () => {
15+
let recordedInputs: LanguageModelV3CallOptions[];
16+
let response: Response;
17+
let decodedChunks: string[];
18+
19+
beforeEach(async () => {
20+
recordedInputs = [];
21+
22+
const agent = new ToolLoopAgent({
23+
model: new MockLanguageModelV3({
24+
doStream: async input => {
25+
recordedInputs.push(input);
26+
return {
27+
stream: convertArrayToReadableStream([
28+
{
29+
type: 'stream-start',
30+
warnings: [],
31+
},
32+
{
33+
type: 'response-metadata',
34+
id: 'id-0',
35+
modelId: 'mock-model-id',
36+
timestamp: new Date(0),
37+
},
38+
{ type: 'text-start', id: '1' },
39+
{ type: 'text-delta', id: '1', delta: 'Hello' },
40+
{ type: 'text-delta', id: '1', delta: ', ' },
41+
{ type: 'text-delta', id: '1', delta: `world!` },
42+
{ type: 'text-end', id: '1' },
43+
{
44+
type: 'finish',
45+
finishReason: 'stop',
46+
usage: {
47+
inputTokens: 10,
48+
outputTokens: 10,
49+
totalTokens: 20,
50+
},
51+
providerMetadata: {
52+
testProvider: { testKey: 'testValue' },
53+
},
54+
},
55+
]),
56+
};
57+
},
58+
}),
59+
tools: {
60+
example: tool({
61+
description: 'Example tool',
62+
inputSchema: z.object({
63+
input: z.string(),
64+
}),
65+
outputSchema: z.object({
66+
value: z.string(),
67+
}),
68+
// important: tool has toModelOutput that needs to be called
69+
toModelOutput: output => ({
70+
type: 'content',
71+
value: [{ type: 'text', text: output.value }],
72+
}),
73+
}),
74+
},
75+
});
76+
77+
response = await createAgentUIStreamResponse({
78+
agent,
79+
messages: [
80+
{
81+
role: 'user',
82+
id: 'msg-1',
83+
parts: [
84+
{
85+
type: 'text' as const,
86+
text: 'Hello, world!',
87+
},
88+
],
89+
},
90+
{
91+
role: 'assistant',
92+
id: 'msg-2',
93+
parts: [
94+
{
95+
type: 'tool-example' as const,
96+
toolCallId: 'call-1',
97+
state: 'output-available',
98+
input: {
99+
input: 'Hello, world!',
100+
},
101+
output: {
102+
value: 'Example tool: Hello, world!',
103+
},
104+
},
105+
],
106+
},
107+
],
108+
});
109+
110+
// consume the response
111+
const decoder = new TextDecoder();
112+
const encodedStream = response.body!;
113+
const chunks = await convertReadableStreamToArray(encodedStream);
114+
decodedChunks = chunks.map(chunk => decoder.decode(chunk));
115+
});
116+
117+
it('should have a single call that contains the tool result as text', () => {
118+
expect(recordedInputs).toMatchInlineSnapshot(`
119+
[
120+
{
121+
"abortSignal": undefined,
122+
"frequencyPenalty": undefined,
123+
"headers": undefined,
124+
"includeRawChunks": false,
125+
"maxOutputTokens": undefined,
126+
"presencePenalty": undefined,
127+
"prompt": [
128+
{
129+
"content": [
130+
{
131+
"providerOptions": undefined,
132+
"text": "Hello, world!",
133+
"type": "text",
134+
},
135+
],
136+
"providerOptions": undefined,
137+
"role": "user",
138+
},
139+
{
140+
"content": [
141+
{
142+
"input": {
143+
"input": "Hello, world!",
144+
},
145+
"providerExecuted": undefined,
146+
"providerOptions": undefined,
147+
"toolCallId": "call-1",
148+
"toolName": "example",
149+
"type": "tool-call",
150+
},
151+
],
152+
"providerOptions": undefined,
153+
"role": "assistant",
154+
},
155+
{
156+
"content": [
157+
{
158+
"output": {
159+
"type": "content",
160+
"value": [
161+
{
162+
"text": "Example tool: Hello, world!",
163+
"type": "text",
164+
},
165+
],
166+
},
167+
"providerOptions": undefined,
168+
"toolCallId": "call-1",
169+
"toolName": "example",
170+
"type": "tool-result",
171+
},
172+
],
173+
"providerOptions": undefined,
174+
"role": "tool",
175+
},
176+
],
177+
"providerOptions": undefined,
178+
"responseFormat": undefined,
179+
"seed": undefined,
180+
"stopSequences": undefined,
181+
"temperature": undefined,
182+
"toolChoice": {
183+
"type": "auto",
184+
},
185+
"tools": [
186+
{
187+
"description": "Example tool",
188+
"inputSchema": {
189+
"$schema": "http://json-schema.org/draft-07/schema#",
190+
"additionalProperties": false,
191+
"properties": {
192+
"input": {
193+
"type": "string",
194+
},
195+
},
196+
"required": [
197+
"input",
198+
],
199+
"type": "object",
200+
},
201+
"name": "example",
202+
"providerOptions": undefined,
203+
"type": "function",
204+
},
205+
],
206+
"topK": undefined,
207+
"topP": undefined,
208+
},
209+
]
210+
`);
211+
});
212+
213+
it('should return the UI message stream response', () => {
214+
expect(decodedChunks).toMatchInlineSnapshot(`
215+
[
216+
"data: {"type":"start"}
217+
218+
",
219+
"data: {"type":"start-step"}
220+
221+
",
222+
"data: {"type":"text-start","id":"1"}
223+
224+
",
225+
"data: {"type":"text-delta","id":"1","delta":"Hello"}
226+
227+
",
228+
"data: {"type":"text-delta","id":"1","delta":", "}
229+
230+
",
231+
"data: {"type":"text-delta","id":"1","delta":"world!"}
232+
233+
",
234+
"data: {"type":"text-end","id":"1"}
235+
236+
",
237+
"data: {"type":"finish-step"}
238+
239+
",
240+
"data: {"type":"finish","finishReason":"stop"}
241+
242+
",
243+
"data: [DONE]
244+
245+
",
246+
]
247+
`);
248+
});
249+
});
250+
});

packages/ai/src/generate-text/__snapshots__/stream-text.test.ts.snap

Lines changed: 7 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -433,7 +433,7 @@ exports[`streamText > result.pipeUIMessageStreamToResponse > should mask error m
433433
"data: {"type":"finish-step"}
434434
435435
",
436-
"data: {"type":"finish"}
436+
"data: {"type":"finish","finishReason":"error"}
437437
438438
",
439439
"data: [DONE]
@@ -456,7 +456,7 @@ exports[`streamText > result.pipeUIMessageStreamToResponse > should support cust
456456
"data: {"type":"finish-step"}
457457
458458
",
459-
"data: {"type":"finish"}
459+
"data: {"type":"finish","finishReason":"error"}
460460
461461
",
462462
"data: [DONE]
@@ -579,6 +579,7 @@ exports[`streamText > result.toUIMessageStream > should mask error messages by d
579579
"type": "finish-step",
580580
},
581581
{
582+
"finishReason": "error",
582583
"type": "finish",
583584
},
584585
]
@@ -624,6 +625,7 @@ exports[`streamText > result.toUIMessageStream > should send tool call, tool cal
624625
"type": "finish-step",
625626
},
626627
{
628+
"finishReason": "stop",
627629
"type": "finish",
628630
},
629631
]
@@ -645,6 +647,7 @@ exports[`streamText > result.toUIMessageStream > should support custom error mes
645647
"type": "finish-step",
646648
},
647649
{
650+
"finishReason": "error",
648651
"type": "finish",
649652
},
650653
]
@@ -664,7 +667,7 @@ exports[`streamText > result.toUIMessageStreamResponse > should mask error messa
664667
"data: {"type":"finish-step"}
665668
666669
",
667-
"data: {"type":"finish"}
670+
"data: {"type":"finish","finishReason":"error"}
668671
669672
",
670673
"data: [DONE]
@@ -687,7 +690,7 @@ exports[`streamText > result.toUIMessageStreamResponse > should support custom e
687690
"data: {"type":"finish-step"}
688691
689692
",
690-
"data: {"type":"finish"}
693+
"data: {"type":"finish","finishReason":"error"}
691694
692695
",
693696
"data: [DONE]

0 commit comments

Comments
 (0)