Skip to content
Open
Changes from 2 commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
57 changes: 56 additions & 1 deletion core/llm/llms/Ollama.ts
Original file line number Diff line number Diff line change
Expand Up @@ -114,6 +114,18 @@ type OllamaErrorResponse = {
error: string;
};

type N8nChatReponse = {
type: string;
content?: string;
metadata: {
nodeId: string;
nodeName: string;
itemIndex: number;
runIndex: number;
timestamps: number;
};
};

type OllamaRawResponse =
| OllamaErrorResponse
| (OllamaBaseResponse & {
Expand All @@ -124,7 +136,8 @@ type OllamaChatResponse =
| OllamaErrorResponse
| (OllamaBaseResponse & {
message: OllamaChatMessage;
});
})
| N8nChatReponse;

interface OllamaTool {
type: "function";
Expand All @@ -146,6 +159,7 @@ class Ollama extends BaseLLM implements ModelInstaller {
private static modelsBeingInstalled: Set<string> = new Set();
private static modelsBeingInstalledMutex = new Mutex();

private static _isThinking: boolean = false;
private fimSupported: boolean = false;
constructor(options: LLMOptions) {
super(options);
Expand Down Expand Up @@ -388,6 +402,15 @@ class Ollama extends BaseLLM implements ModelInstaller {
}
}

static GetIsThinking(): boolean {
return this._isThinking;
}
static SetIsThinking(newValue: boolean): void {
if (this._isThinking !== newValue) {
this._isThinking = newValue;
}
}

protected async *_streamChat(
messages: ChatMessage[],
signal: AbortSignal,
Expand Down Expand Up @@ -433,6 +456,38 @@ class Ollama extends BaseLLM implements ModelInstaller {
throw new Error(res.error);
}

if ("type" in res) {
const { content } = res;

if (content === "<think>") {
Ollama.SetIsThinking(true);
}

if (Ollama.GetIsThinking() && content) {
const thinkingMessage: ThinkingChatMessage = {
role: "thinking",
content: content,
};

if (thinkingMessage) {
if (content === "</think>") {
Ollama.SetIsThinking(false);
}
// When Streaming you can't have both thinking and content
return [thinkingMessage];
}
}

if (content) {
const chatMessage: ChatMessage = {
role: "assistant",
content: content,
};
return [chatMessage];
}
return [];
}

const { role, content, thinking, tool_calls: toolCalls } = res.message;

if (role === "tool") {
Expand Down
Loading