From d02013fd71f454279ca89631fcb2748b8178f70e Mon Sep 17 00:00:00 2001
From: Sean Hatfield <seanhatfield5@gmail.com>
Date: Thu, 2 May 2024 10:27:09 -0700
Subject: [PATCH] [FIX] Document pinning does not count in query mode (#1250)

* if document is pinned, do not give queryRefusalResponse message

* forgot embed.js patch

---------

Co-authored-by: timothycarambat <rambat1010@gmail.com>
---
 server/utils/chats/embed.js  | 6 +++++-
 server/utils/chats/index.js  | 8 ++++++--
 server/utils/chats/stream.js | 8 ++++++--
 3 files changed, 17 insertions(+), 5 deletions(-)

diff --git a/server/utils/chats/embed.js b/server/utils/chats/embed.js
index 533ea0c34..98b096fb1 100644
--- a/server/utils/chats/embed.js
+++ b/server/utils/chats/embed.js
@@ -131,7 +131,11 @@ async function streamChatWithForEmbed(
 
   // If in query mode and no sources are found, do not
   // let the LLM try to hallucinate a response or use general knowledge
-  if (chatMode === "query" && sources.length === 0) {
+  if (
+    chatMode === "query" &&
+    sources.length === 0 &&
+    pinnedDocIdentifiers.length === 0
+  ) {
     writeResponseChunk(response, {
       id: uuid,
       type: "textResponse",
diff --git a/server/utils/chats/index.js b/server/utils/chats/index.js
index 38ce6c9bd..76f98e0df 100644
--- a/server/utils/chats/index.js
+++ b/server/utils/chats/index.js
@@ -140,9 +140,13 @@ async function chatWithWorkspace(
   contextTexts = [...contextTexts, ...vectorSearchResults.contextTexts];
   sources = [...sources, ...vectorSearchResults.sources];
 
-  // If in query mode and no sources are found, do not
+  // If in query mode and no sources are found from the vector search and no pinned documents, do not
   // let the LLM try to hallucinate a response or use general knowledge and exit early
-  if (chatMode === "query" && sources.length === 0) {
+  if (
+    chatMode === "query" &&
+    vectorSearchResults.sources.length === 0 &&
+    pinnedDocIdentifiers.length === 0
+  ) {
     return {
       id: uuid,
       type: "textResponse",
diff --git a/server/utils/chats/stream.js b/server/utils/chats/stream.js
index 57f326664..ba4dea163 100644
--- a/server/utils/chats/stream.js
+++ b/server/utils/chats/stream.js
@@ -160,9 +160,13 @@ async function streamChatWithWorkspace(
   contextTexts = [...contextTexts, ...vectorSearchResults.contextTexts];
   sources = [...sources, ...vectorSearchResults.sources];
 
-  // If in query mode and no sources are found, do not
+  // If in query mode and no sources are found from the vector search and no pinned documents, do not
   // let the LLM try to hallucinate a response or use general knowledge and exit early
-  if (chatMode === "query" && sources.length === 0) {
+  if (
+    chatMode === "query" &&
+    sources.length === 0 &&
+    pinnedDocIdentifiers.length === 0
+  ) {
     writeResponseChunk(response, {
       id: uuid,
       type: "textResponse",
-- 
GitLab