Skip to content

Commit f6f7457

Browse files
committed
feat: Added ability to pass settings inside LLM chain.
1 parent 1100736 commit f6f7457

File tree

4 files changed

+14
-6
lines changed

4 files changed

+14
-6
lines changed

src/Core/src/Chains/Chain.cs

Lines changed: 4 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -75,13 +75,15 @@ public static DoChain Do(
7575
/// <param name="llm"></param>
7676
/// <param name="inputKey"></param>
7777
/// <param name="outputKey"></param>
78+
/// <param name="settings"></param>
7879
/// <returns></returns>
7980
public static LLMChain LLM(
8081
IChatModel llm,
8182
string inputKey = "text",
82-
string outputKey = "text")
83+
string outputKey = "text",
84+
ChatSettings? settings = null)
8385
{
84-
return new LLMChain(llm, inputKey, outputKey);
86+
return new LLMChain(llm, inputKey, outputKey, settings);
8587
}
8688

8789
/// <inheritdoc cref="LLM"/>

src/Core/src/Chains/StackableChains/Agents/ReActAgentExecutorChain.cs

Lines changed: 4 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -108,7 +108,10 @@ private void InitializeChain()
108108
| Set(toolNames, "tool_names")
109109
| LoadMemory(_conversationBufferMemory, outputKey: "history")
110110
| Template(_reActPrompt)
111-
| Chain.LLM(_model).UseCache(_useCache)
111+
| Chain.LLM(_model, settings: new ChatSettings
112+
{
113+
StopSequences = ["Observation", "[END]"],
114+
}).UseCache(_useCache)
112115
| UpdateMemory(_conversationBufferMemory, requestKey: "input", responseKey: "text")
113116
| ReActParser(inputKey: "text", outputKey: ReActAnswer);
114117

src/Core/src/Chains/StackableChains/LLMChain.cs

Lines changed: 5 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -10,19 +10,22 @@ public class LLMChain : BaseStackableChain
1010
{
1111
private readonly IChatModel _llm;
1212
private bool _useCache;
13+
private ChatSettings _settings;
1314

1415
private const string CACHE_DIR = "cache";
1516

1617
/// <inheritdoc/>
1718
public LLMChain(
1819
IChatModel llm,
1920
string inputKey = "prompt",
20-
string outputKey = "text"
21+
string outputKey = "text",
22+
ChatSettings? settings = null
2123
)
2224
{
2325
InputKeys = new[] { inputKey };
2426
OutputKeys = new[] { outputKey };
2527
_llm = llm;
28+
_settings = settings ?? new ChatSettings();
2629
}
2730

2831
string? GetCachedAnswer(string prompt)
@@ -63,7 +66,7 @@ protected override async Task<IChainValues> InternalCallAsync(
6366
}
6467
}
6568

66-
var response = await _llm.GenerateAsync(prompt, cancellationToken: cancellationToken).ConfigureAwait(false);
69+
var response = await _llm.GenerateAsync(prompt, settings: _settings, cancellationToken: cancellationToken).ConfigureAwait(false);
6770
responseContent = response.Messages.Last().Content;
6871
if (_useCache)
6972
SaveCachedAnswer(prompt, responseContent);

src/Meta/test/WikiTests.cs

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -50,7 +50,7 @@ public async Task AgentWithOllamaReact()
5050
var apiKey =
5151
Environment.GetEnvironmentVariable("OPENAI_API_KEY") ??
5252
throw new InvalidOperationException("OpenAI API key is not set");
53-
var llm = new Gpt35TurboModel(apiKey);
53+
var llm = new Gpt35TurboModel(apiKey).UseConsoleForDebug();
5454

5555
// create a google search
5656
var searchApiKey =

0 commit comments

Comments
 (0)