From c2523a9593c7ac22a3ca43e82a1681c12d8765d4 Mon Sep 17 00:00:00 2001 From: Sean Hatfield <seanhatfield5@gmail.com> Date: Thu, 20 Jun 2024 15:44:19 -0700 Subject: [PATCH] [FEAT] Persist query mode refusal responses as chat history (#1727) * log query refusals to workspace chats but hide in ui * linting --------- Co-authored-by: timothycarambat <rambat1010@gmail.com> --- server/models/workspaceChats.js | 2 ++ server/utils/chats/index.js | 42 ++++++++++++++++++++++++++++----- server/utils/chats/stream.js | 39 +++++++++++++++++++++++++----- 3 files changed, 71 insertions(+), 12 deletions(-) diff --git a/server/models/workspaceChats.js b/server/models/workspaceChats.js index bda40064d..951245204 100644 --- a/server/models/workspaceChats.js +++ b/server/models/workspaceChats.js @@ -7,6 +7,7 @@ const WorkspaceChats = { response = {}, user = null, threadId = null, + include = true, }) { try { const chat = await prisma.workspace_chats.create({ @@ -16,6 +17,7 @@ const WorkspaceChats = { response: JSON.stringify(response), user_id: user?.id || null, thread_id: threadId, + include, }, }); return { chat, message: null }; diff --git a/server/utils/chats/index.js b/server/utils/chats/index.js index b6258c2e3..f3e0baae2 100644 --- a/server/utils/chats/index.js +++ b/server/utils/chats/index.js @@ -77,15 +77,30 @@ async function chatWithWorkspace( // User is trying to query-mode chat a workspace that has no data in it - so // we should exit early as no information can be found under these conditions. if ((!hasVectorizedSpace || embeddingsCount === 0) && chatMode === "query") { + const textResponse = + workspace?.queryRefusalResponse ?? + "There is no relevant information in this workspace to answer your query."; + + await WorkspaceChats.new({ + workspaceId: workspace.id, + prompt: message, + response: { + text: textResponse, + sources: [], + type: chatMode, + }, + threadId: thread?.id || null, + include: false, + user, + }); + return { id: uuid, type: "textResponse", sources: [], close: true, error: null, - textResponse: - workspace?.queryRefusalResponse ?? - "There is no relevant information in this workspace to answer your query.", + textResponse, }; } @@ -172,15 +187,30 @@ async function chatWithWorkspace( // If in query mode and no context chunks are found from search, backfill, or pins - do not // let the LLM try to hallucinate a response or use general knowledge and exit early if (chatMode === "query" && contextTexts.length === 0) { + const textResponse = + workspace?.queryRefusalResponse ?? + "There is no relevant information in this workspace to answer your query."; + + await WorkspaceChats.new({ + workspaceId: workspace.id, + prompt: message, + response: { + text: textResponse, + sources: [], + type: chatMode, + }, + threadId: thread?.id || null, + include: false, + user, + }); + return { id: uuid, type: "textResponse", sources: [], close: true, error: null, - textResponse: - workspace?.queryRefusalResponse ?? - "There is no relevant information in this workspace to answer your query.", + textResponse, }; } diff --git a/server/utils/chats/stream.js b/server/utils/chats/stream.js index ced9a9710..770e6cb6b 100644 --- a/server/utils/chats/stream.js +++ b/server/utils/chats/stream.js @@ -75,16 +75,29 @@ async function streamChatWithWorkspace( // User is trying to query-mode chat a workspace that has no data in it - so // we should exit early as no information can be found under these conditions. if ((!hasVectorizedSpace || embeddingsCount === 0) && chatMode === "query") { + const textResponse = + workspace?.queryRefusalResponse ?? + "There is no relevant information in this workspace to answer your query."; writeResponseChunk(response, { id: uuid, type: "textResponse", - textResponse: - workspace?.queryRefusalResponse ?? - "There is no relevant information in this workspace to answer your query.", + textResponse, sources: [], close: true, error: null, }); + await WorkspaceChats.new({ + workspaceId: workspace.id, + prompt: message, + response: { + text: textResponse, + sources: [], + type: chatMode, + }, + threadId: thread?.id || null, + include: false, + user, + }); return; } @@ -177,16 +190,30 @@ async function streamChatWithWorkspace( // If in query mode and no context chunks are found from search, backfill, or pins - do not // let the LLM try to hallucinate a response or use general knowledge and exit early if (chatMode === "query" && contextTexts.length === 0) { + const textResponse = + workspace?.queryRefusalResponse ?? + "There is no relevant information in this workspace to answer your query."; writeResponseChunk(response, { id: uuid, type: "textResponse", - textResponse: - workspace?.queryRefusalResponse ?? - "There is no relevant information in this workspace to answer your query.", + textResponse, sources: [], close: true, error: null, }); + + await WorkspaceChats.new({ + workspaceId: workspace.id, + prompt: message, + response: { + text: textResponse, + sources: [], + type: chatMode, + }, + threadId: thread?.id || null, + include: false, + user, + }); return; } -- GitLab