From c80bf3311f5e33b87b32b81ab6362bd28217eb72 Mon Sep 17 00:00:00 2001 From: Alex Yang <himself65@outlook.com> Date: Wed, 10 Apr 2024 02:54:36 -0500 Subject: [PATCH] fix: response.raw should be null (#705) --- packages/core/e2e/node/snapshot/llm.snap | 5 +++-- packages/core/src/llm/base.ts | 1 + packages/core/src/llm/ollama.ts | 3 ++- packages/core/src/llm/types.ts | 11 +++++++++-- packages/core/src/llm/utils.ts | 9 ++++++++- 5 files changed, 23 insertions(+), 6 deletions(-) diff --git a/packages/core/e2e/node/snapshot/llm.snap b/packages/core/e2e/node/snapshot/llm.snap index a27bcffc1..6074f1524 100644 --- a/packages/core/e2e/node/snapshot/llm.snap +++ b/packages/core/e2e/node/snapshot/llm.snap @@ -56,10 +56,11 @@ { "id": "de46b84e-7345-430f-b8fa-423354b630c9", "response": { - "raw": {}, + "raw": null, "message": { "content": "Hello! How can I assist you today?", - "role": "assistant" + "role": "assistant", + "options": {} } } } diff --git a/packages/core/src/llm/base.ts b/packages/core/src/llm/base.ts index 8c245f437..d0f434909 100644 --- a/packages/core/src/llm/base.ts +++ b/packages/core/src/llm/base.ts @@ -41,6 +41,7 @@ export abstract class BaseLLM< }); return streamConverter(stream, (chunk) => { return { + raw: null, text: chunk.delta, }; }); diff --git a/packages/core/src/llm/ollama.ts b/packages/core/src/llm/ollama.ts index aad435628..85e475cf2 100644 --- a/packages/core/src/llm/ollama.ts +++ b/packages/core/src/llm/ollama.ts @@ -17,8 +17,9 @@ const messageAccessor = (data: any): ChatResponseChunk => { delta: data.message.content, }; }; + const completionAccessor = (data: any): CompletionResponse => { - return { text: data.response }; + return { text: data.response, raw: null }; }; // https://github.com/jmorganca/ollama diff --git a/packages/core/src/llm/types.ts b/packages/core/src/llm/types.ts index 1231885b1..1b27b1309 100644 --- a/packages/core/src/llm/types.ts +++ b/packages/core/src/llm/types.ts @@ -126,8 +126,10 @@ export interface ChatResponse< message: ChatMessage<AdditionalMessageOptions>; /** * Raw response from the LLM + * + * It's possible that this is `null` if the LLM response an iterable of chunks */ - raw: object; + raw: object | null; } export type ChatResponseChunk< @@ -148,7 +150,12 @@ export type ChatResponseChunk< export interface CompletionResponse { text: string; - raw?: Record<string, any>; + /** + * Raw response from the LLM + * + * It's possible that this is `null` if the LLM response an iterable of chunks + */ + raw: object | null; } export type LLMMetadata = { diff --git a/packages/core/src/llm/utils.ts b/packages/core/src/llm/utils.ts index fbbc0c6c7..b37cc05da 100644 --- a/packages/core/src/llm/utils.ts +++ b/packages/core/src/llm/utils.ts @@ -84,10 +84,11 @@ export function wrapLLMEvent( }; response[Symbol.asyncIterator] = async function* () { const finalResponse: ChatResponse = { - raw: response, + raw: null, message: { content: "", role: "assistant", + options: {}, }, }; let firstOne = false; @@ -98,6 +99,12 @@ export function wrapLLMEvent( } else { finalResponse.message.content += chunk.delta; } + if (chunk.options) { + finalResponse.message.options = { + ...finalResponse.message.options, + ...chunk.options, + }; + } yield chunk; } snapshot(() => { -- GitLab