From 910eb36cfe4fa5a59e16024054fda2125009852d Mon Sep 17 00:00:00 2001 From: Sean Hatfield <seanhatfield5@gmail.com> Date: Fri, 28 Jun 2024 14:23:16 -0700 Subject: [PATCH] [FIX] OpenAI compatible endpoints query mode developer API bug fix (#1789) fix query mode always responding with refusal message on develop api openai compatible endpoints --- server/utils/chats/openaiCompatible.js | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/server/utils/chats/openaiCompatible.js b/server/utils/chats/openaiCompatible.js index 4c78eeb75..bd984b76e 100644 --- a/server/utils/chats/openaiCompatible.js +++ b/server/utils/chats/openaiCompatible.js @@ -112,8 +112,8 @@ async function chatSync({ } // For OpenAI Compatible chats, we cannot do backfilling so we simply aggregate results here. - contextTexts = [...contextTexts]; - sources = [...sources]; + contextTexts = [...contextTexts, ...vectorSearchResults.contextTexts]; + sources = [...sources, ...vectorSearchResults.sources]; // If in query mode and no context chunks are found from search, backfill, or pins - do not // let the LLM try to hallucinate a response or use general knowledge and exit early @@ -328,8 +328,8 @@ async function streamChat({ } // For OpenAI Compatible chats, we cannot do backfilling so we simply aggregate results here. - contextTexts = [...contextTexts]; - sources = [...sources]; + contextTexts = [...contextTexts, ...vectorSearchResults.contextTexts]; + sources = [...sources, ...vectorSearchResults.sources]; // If in query mode and no context chunks are found from search, backfill, or pins - do not // let the LLM try to hallucinate a response or use general knowledge and exit early -- GitLab