From 37cdb845a4e447f01ee0e783d842c0e1657069e5 Mon Sep 17 00:00:00 2001
From: Timothy Carambat <rambat1010@gmail.com>
Date: Tue, 12 Dec 2023 16:20:06 -0800
Subject: [PATCH] patch: implement @lunamidori hotfix for LocalAI streaming
 chunk overflows (#433)

* patch: implement @lunamidori hotfix for LocalAI streaming chunk overflows
resolves #416

* change log to error log

* log trace

* lint
---
 server/utils/chats/stream.js | 11 ++++++++++-
 1 file changed, 10 insertions(+), 1 deletion(-)

diff --git a/server/utils/chats/stream.js b/server/utils/chats/stream.js
index 643f3d439..4eb9cf022 100644
--- a/server/utils/chats/stream.js
+++ b/server/utils/chats/stream.js
@@ -253,7 +253,16 @@ function handleStreamResponses(response, stream, responseProps) {
         } catch {}
 
         if (!validJSON) {
-          chunk += message;
+          // It can be possible that the chunk decoding is running away
+          // and the message chunk fails to append due to string length.
+          // In this case abort the chunk and reset so we can continue.
+          // ref: https://github.com/Mintplex-Labs/anything-llm/issues/416
+          try {
+            chunk += message;
+          } catch (e) {
+            console.error(`Chunk appending error`, e);
+            chunk = "";
+          }
           continue;
         } else {
           chunk = "";
-- 
GitLab