From 67f4db8501d321bd1b7732ce9b38f494416de9d7 Mon Sep 17 00:00:00 2001 From: Alex Yang <himself65@outlook.com> Date: Sun, 10 Nov 2024 23:27:09 -0800 Subject: [PATCH] fix: steaming chat in ollama (#1463) --- packages/providers/ollama/src/llm.ts | 17 ++++++++++++++++- 1 file changed, 16 insertions(+), 1 deletion(-) diff --git a/packages/providers/ollama/src/llm.ts b/packages/providers/ollama/src/llm.ts index f82da2ff3..3878a9678 100644 --- a/packages/providers/ollama/src/llm.ts +++ b/packages/providers/ollama/src/llm.ts @@ -23,7 +23,22 @@ import { type Options, } from "ollama/browser"; -const messageAccessor = (part: OllamaChatResponse): ChatResponseChunk => { +const messageAccessor = ( + part: OllamaChatResponse, +): ChatResponseChunk<ToolCallLLMMessageOptions> => { + if (part.message.tool_calls) { + return { + raw: part, + delta: part.message.content, + options: { + toolCall: part.message.tool_calls.map((toolCall) => ({ + name: toolCall.function.name, + input: toolCall.function.arguments, + id: randomUUID(), + })), + }, + }; + } return { raw: part, delta: part.message.content, -- GitLab