Skip to content

Commit e188f5a

Browse files
authored
Merge pull request #100 from cnblogs/adds-thinking-option
feat: support reasoning switch for qwen3 models
2 parents 9f2f8a1 + 2bae32e commit e188f5a

14 files changed

+224
-1631
lines changed

README.md

Lines changed: 31 additions & 14 deletions
Original file line numberDiff line numberDiff line change
@@ -123,19 +123,6 @@ var completion = await client.GetQWenCompletionAsync(QWenLlm.QWenMax, prompt);
123123
Console.WriteLine(completion.Output.Text);
124124
```
125125

126-
## Reasoning
127-
128-
Use `completion.Output.Choices![0].Message.ReasoningContent` to access the reasoning content from model.
129-
130-
```csharp
131-
var history = new List<ChatMessage>
132-
{
133-
ChatMessage.User("Calculate 1+1")
134-
};
135-
var completion = await client.GetDeepSeekChatCompletionAsync(DeepSeekLlm.DeepSeekR1, history);
136-
Console.WriteLine(completion.Output.Choices[0]!.Message.ReasoningContent);
137-
```
138-
139126
## Multi-round chat
140127

141128
```csharp
@@ -153,6 +140,36 @@ var completion = await client.GetQWenChatCompletionAsync(QWenLlm.QWenMax, histor
153140
Console.WriteLine(completion.Output.Choices[0].Message.Content); // The number is 42
154141
```
155142

143+
## Reasoning
144+
145+
Use `completion.Output.Choices![0].Message.ReasoningContent` to access the thoughts from reasoning model.
146+
147+
```csharp
148+
var history = new List<ChatMessage>
149+
{
150+
ChatMessage.User("Calculate 1+1")
151+
};
152+
var completion = await client.GetDeepSeekChatCompletionAsync(DeepSeekLlm.DeepSeekR1, history);
153+
Console.WriteLine(completion.Output.Choices[0]!.Message.ReasoningContent);
154+
```
155+
156+
### QWen3
157+
158+
Use `TextGenerationParameters.EnableThinking` to toggle reasoning.
159+
160+
```csharp
161+
var stream = dashScopeClient
162+
.GetQWenChatStreamAsync(
163+
QWenLlm.QWenPlusLatest,
164+
history,
165+
new TextGenerationParameters
166+
{
167+
IncrementalOutput = true,
168+
ResultFormat = ResultFormats.Message,
169+
EnableThinking = true
170+
});
171+
```
172+
156173
## Function Call
157174

158175
Creates a function with parameters
@@ -182,7 +199,7 @@ public enum TemperatureUnit
182199
}
183200
```
184201

185-
Append tool information to chat messages.
202+
Append tool information to chat messages (Here we use `JsonSchema.NET` to generate JSON Schema).
186203

187204
```csharp
188205
var tools = new List<ToolDefinition>()

README.zh-Hans.md

Lines changed: 18 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -157,6 +157,23 @@ var completion = await client.GetDeepSeekChatCompletionAsync(DeepSeekLlm.DeepSee
157157
Console.WriteLine(completion.Output.Choices[0]!.Message.ReasoningContent);
158158
```
159159

160+
### QWen3
161+
162+
使用 `TextGenerationParameters.EnableThinking` 决定是否使用模型的推理能力。
163+
164+
```csharp
165+
var stream = dashScopeClient
166+
.GetQWenChatStreamAsync(
167+
QWenLlm.QWenPlusLatest,
168+
history,
169+
new TextGenerationParameters
170+
{
171+
IncrementalOutput = true,
172+
ResultFormat = ResultFormats.Message,
173+
EnableThinking = true
174+
});
175+
```
176+
160177
## 工具调用
161178

162179
创建一个可供模型使用的方法。
@@ -182,7 +199,7 @@ public enum TemperatureUnit
182199
}
183200
```
184201

185-
对话时带上方法的名称、描述和参数列表,参数列表以 JSON Schema 的形式提供。
202+
对话时带上方法的名称、描述和参数列表,参数列表以 JSON Schema 的形式提供(这里使用 `JsonSchema.Net` 库,您也可以使用其它具有类似功能的库)
186203

187204
```csharp
188205
var tools = new List<ToolDefinition>()

sample/Cnblogs.DashScope.Sample/Program.cs

Lines changed: 13 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -32,12 +32,12 @@
3232
switch (type)
3333
{
3434
case SampleType.TextCompletion:
35-
Console.WriteLine("Prompt > ");
35+
Console.Write("Prompt > ");
3636
userInput = Console.ReadLine()!;
3737
await TextCompletionAsync(userInput);
3838
break;
3939
case SampleType.TextCompletionSse:
40-
Console.WriteLine("Prompt > ");
40+
Console.Write("Prompt > ");
4141
userInput = Console.ReadLine()!;
4242
await TextCompletionStreamAsync(userInput);
4343
break;
@@ -97,9 +97,14 @@ async Task ChatStreamAsync()
9797
history.Add(TextChatMessage.User(input));
9898
var stream = dashScopeClient
9999
.GetQWenChatStreamAsync(
100-
QWenLlm.QWenMax,
100+
QWenLlm.QWenPlusLatest,
101101
history,
102-
new TextGenerationParameters { IncrementalOutput = true, ResultFormat = ResultFormats.Message });
102+
new TextGenerationParameters
103+
{
104+
IncrementalOutput = true,
105+
ResultFormat = ResultFormats.Message,
106+
EnableThinking = true
107+
});
103108
var role = string.Empty;
104109
var message = new StringBuilder();
105110
await foreach (var modelResponse in stream)
@@ -112,7 +117,10 @@ async Task ChatStreamAsync()
112117
}
113118

114119
message.Append(chunk.Message.Content);
115-
Console.Write(chunk.Message.Content);
120+
var write = string.IsNullOrEmpty(chunk.Message.ReasoningContent)
121+
? chunk.Message.Content
122+
: chunk.Message.ReasoningContent;
123+
Console.Write(write);
116124
}
117125

118126
Console.WriteLine();

src/Cnblogs.DashScope.Core/ITextGenerationParameters.cs

Lines changed: 5 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -40,6 +40,11 @@ public interface ITextGenerationParameters
4040
/// </summary>
4141
public bool? EnableSearch { get; }
4242

43+
/// <summary>
44+
/// Thinking option. Valid for supported models.(e.g. qwen3)
45+
/// </summary>
46+
public bool? EnableThinking { get; }
47+
4348
/// <summary>
4449
/// Available tools for model to call.
4550
/// </summary>
Lines changed: 7 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,7 @@
1+
namespace Cnblogs.DashScope.Core;
2+
3+
/// <summary>
4+
/// Output details for text generation api.
5+
/// </summary>
6+
/// <param name="ReasoningTokens">Token count of reasoning content.</param>
7+
public record TextGenerationOutputTokenDetails(int ReasoningTokens);

src/Cnblogs.DashScope.Core/TextGenerationParameters.cs

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -38,6 +38,9 @@ public class TextGenerationParameters : ITextGenerationParameters
3838
/// <inheritdoc />
3939
public bool? EnableSearch { get; set; }
4040

41+
/// <inheritdoc />
42+
public bool? EnableThinking { get; set; }
43+
4144
/// <inheritdoc />
4245
public IEnumerable<ToolDefinition>? Tools { get; set; }
4346

Lines changed: 7 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,7 @@
1+
namespace Cnblogs.DashScope.Core;
2+
3+
/// <summary>
4+
/// Token usage details.
5+
/// </summary>
6+
/// <param name="CachedTokens">Token count of cached input.</param>
7+
public record TextGenerationPromptTokenDetails(int CachedTokens);

src/Cnblogs.DashScope.Core/TextGenerationTokenDetails.cs

Lines changed: 0 additions & 7 deletions
This file was deleted.

src/Cnblogs.DashScope.Core/TextGenerationTokenUsage.cs

Lines changed: 6 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -14,7 +14,12 @@ public class TextGenerationTokenUsage
1414
/// <summary>
1515
/// Input token details.
1616
/// </summary>
17-
public TextGenerationTokenDetails? PromptTokensDetails { get; set; }
17+
public TextGenerationPromptTokenDetails? PromptTokensDetails { get; set; }
18+
19+
/// <summary>
20+
/// Output token details.
21+
/// </summary>
22+
public TextGenerationOutputTokenDetails? OutputTokensDetails { get; set; }
1823

1924
/// <summary>
2025
/// The number of output token.

test/Cnblogs.DashScope.Tests.Shared/RawHttpData/single-generation-message-reasoning-nosse.request.body.json

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -9,6 +9,7 @@
99
]
1010
},
1111
"parameters": {
12-
"incremental_output": false
12+
"incremental_output": false,
13+
"result_format": "message"
1314
}
1415
}
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,5 @@
11
{
2-
"model": "deepseek-r1",
2+
"model": "qwen-plus-latest",
33
"input": {
44
"messages": [
55
{
@@ -9,6 +9,8 @@
99
]
1010
},
1111
"parameters": {
12-
"incremental_output": true
12+
"incremental_output": true,
13+
"result_format": "message",
14+
"enable_thinking": true
1315
}
1416
}

0 commit comments

Comments
 (0)