From e9f7b9b79e8e4ddf242cf56a2a6710f09ba27da7 Mon Sep 17 00:00:00 2001 From: Timothy Carambat <rambat1010@gmail.com> Date: Thu, 4 Jan 2024 18:05:06 -0800 Subject: [PATCH] Handle undefined stream chunk for native LLM (#534) --- server/utils/chats/stream.js | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/server/utils/chats/stream.js b/server/utils/chats/stream.js index 293e3b410..a6ade1819 100644 --- a/server/utils/chats/stream.js +++ b/server/utils/chats/stream.js @@ -268,6 +268,11 @@ function handleStreamResponses(response, stream, responseProps) { return new Promise(async (resolve) => { let fullText = ""; for await (const chunk of stream) { + if (chunk === undefined) + throw new Error( + "Stream returned undefined chunk. Aborting reply - check model provider logs." + ); + const content = chunk.hasOwnProperty("content") ? chunk.content : chunk; fullText += content; writeResponseChunk(response, { -- GitLab