diff --git a/server/models/workspaceChats.js b/server/models/workspaceChats.js
index bda40064d5bb59ad307ee4c585a57f308b58b90a..951245204fe42d8bc9ea5d3e6ede5e34d176ac6d 100644
--- a/server/models/workspaceChats.js
+++ b/server/models/workspaceChats.js
@@ -7,6 +7,7 @@ const WorkspaceChats = {
     response = {},
     user = null,
     threadId = null,
+    include = true,
   }) {
     try {
       const chat = await prisma.workspace_chats.create({
@@ -16,6 +17,7 @@ const WorkspaceChats = {
           response: JSON.stringify(response),
           user_id: user?.id || null,
           thread_id: threadId,
+          include,
         },
       });
       return { chat, message: null };
diff --git a/server/utils/chats/index.js b/server/utils/chats/index.js
index b6258c2e336cc6dd90738488dcf89e6285aca2f9..f3e0baae224e8ffca10880d3c13cc91f3e1b8773 100644
--- a/server/utils/chats/index.js
+++ b/server/utils/chats/index.js
@@ -77,15 +77,30 @@ async function chatWithWorkspace(
   // User is trying to query-mode chat a workspace that has no data in it - so
   // we should exit early as no information can be found under these conditions.
   if ((!hasVectorizedSpace || embeddingsCount === 0) && chatMode === "query") {
+    const textResponse =
+      workspace?.queryRefusalResponse ??
+      "There is no relevant information in this workspace to answer your query.";
+
+    await WorkspaceChats.new({
+      workspaceId: workspace.id,
+      prompt: message,
+      response: {
+        text: textResponse,
+        sources: [],
+        type: chatMode,
+      },
+      threadId: thread?.id || null,
+      include: false,
+      user,
+    });
+
     return {
       id: uuid,
       type: "textResponse",
       sources: [],
       close: true,
       error: null,
-      textResponse:
-        workspace?.queryRefusalResponse ??
-        "There is no relevant information in this workspace to answer your query.",
+      textResponse,
     };
   }
 
@@ -172,15 +187,30 @@ async function chatWithWorkspace(
   // If in query mode and no context chunks are found from search, backfill, or pins -  do not
   // let the LLM try to hallucinate a response or use general knowledge and exit early
   if (chatMode === "query" && contextTexts.length === 0) {
+    const textResponse =
+      workspace?.queryRefusalResponse ??
+      "There is no relevant information in this workspace to answer your query.";
+
+    await WorkspaceChats.new({
+      workspaceId: workspace.id,
+      prompt: message,
+      response: {
+        text: textResponse,
+        sources: [],
+        type: chatMode,
+      },
+      threadId: thread?.id || null,
+      include: false,
+      user,
+    });
+
     return {
       id: uuid,
       type: "textResponse",
       sources: [],
       close: true,
       error: null,
-      textResponse:
-        workspace?.queryRefusalResponse ??
-        "There is no relevant information in this workspace to answer your query.",
+      textResponse,
     };
   }
 
diff --git a/server/utils/chats/stream.js b/server/utils/chats/stream.js
index ced9a97109430a7b28a4215b9f17357dd14b7d1b..770e6cb6b8253f5a1a42f5bdecf4c9ebaba04a77 100644
--- a/server/utils/chats/stream.js
+++ b/server/utils/chats/stream.js
@@ -75,16 +75,29 @@ async function streamChatWithWorkspace(
   // User is trying to query-mode chat a workspace that has no data in it - so
   // we should exit early as no information can be found under these conditions.
   if ((!hasVectorizedSpace || embeddingsCount === 0) && chatMode === "query") {
+    const textResponse =
+      workspace?.queryRefusalResponse ??
+      "There is no relevant information in this workspace to answer your query.";
     writeResponseChunk(response, {
       id: uuid,
       type: "textResponse",
-      textResponse:
-        workspace?.queryRefusalResponse ??
-        "There is no relevant information in this workspace to answer your query.",
+      textResponse,
       sources: [],
       close: true,
       error: null,
     });
+    await WorkspaceChats.new({
+      workspaceId: workspace.id,
+      prompt: message,
+      response: {
+        text: textResponse,
+        sources: [],
+        type: chatMode,
+      },
+      threadId: thread?.id || null,
+      include: false,
+      user,
+    });
     return;
   }
 
@@ -177,16 +190,30 @@ async function streamChatWithWorkspace(
   // If in query mode and no context chunks are found from search, backfill, or pins -  do not
   // let the LLM try to hallucinate a response or use general knowledge and exit early
   if (chatMode === "query" && contextTexts.length === 0) {
+    const textResponse =
+      workspace?.queryRefusalResponse ??
+      "There is no relevant information in this workspace to answer your query.";
     writeResponseChunk(response, {
       id: uuid,
       type: "textResponse",
-      textResponse:
-        workspace?.queryRefusalResponse ??
-        "There is no relevant information in this workspace to answer your query.",
+      textResponse,
       sources: [],
       close: true,
       error: null,
     });
+
+    await WorkspaceChats.new({
+      workspaceId: workspace.id,
+      prompt: message,
+      response: {
+        text: textResponse,
+        sources: [],
+        type: chatMode,
+      },
+      threadId: thread?.id || null,
+      include: false,
+      user,
+    });
     return;
   }