diff --git a/server/utils/chats/embed.js b/server/utils/chats/embed.js
index 533ea0c347318aeb68024d7600f278c2038123fc..98b096fb1c30c34d6ccb861da783764446401b00 100644
--- a/server/utils/chats/embed.js
+++ b/server/utils/chats/embed.js
@@ -131,7 +131,11 @@ async function streamChatWithForEmbed(
 
   // If in query mode and no sources are found, do not
   // let the LLM try to hallucinate a response or use general knowledge
-  if (chatMode === "query" && sources.length === 0) {
+  if (
+    chatMode === "query" &&
+    sources.length === 0 &&
+    pinnedDocIdentifiers.length === 0
+  ) {
     writeResponseChunk(response, {
       id: uuid,
       type: "textResponse",
diff --git a/server/utils/chats/index.js b/server/utils/chats/index.js
index 38ce6c9bd2694a9df06526072541463dd9bfb472..76f98e0df861ef04647dcff28b3e8b6972ec627d 100644
--- a/server/utils/chats/index.js
+++ b/server/utils/chats/index.js
@@ -140,9 +140,13 @@ async function chatWithWorkspace(
   contextTexts = [...contextTexts, ...vectorSearchResults.contextTexts];
   sources = [...sources, ...vectorSearchResults.sources];
 
-  // If in query mode and no sources are found, do not
+  // If in query mode and no sources are found from the vector search and no pinned documents, do not
   // let the LLM try to hallucinate a response or use general knowledge and exit early
-  if (chatMode === "query" && sources.length === 0) {
+  if (
+    chatMode === "query" &&
+    vectorSearchResults.sources.length === 0 &&
+    pinnedDocIdentifiers.length === 0
+  ) {
     return {
       id: uuid,
       type: "textResponse",
diff --git a/server/utils/chats/stream.js b/server/utils/chats/stream.js
index 57f3266645361554e93c5dc9867c54b76635fafb..ba4dea163f7e98462808a26bd753fbdb4efef384 100644
--- a/server/utils/chats/stream.js
+++ b/server/utils/chats/stream.js
@@ -160,9 +160,13 @@ async function streamChatWithWorkspace(
   contextTexts = [...contextTexts, ...vectorSearchResults.contextTexts];
   sources = [...sources, ...vectorSearchResults.sources];
 
-  // If in query mode and no sources are found, do not
+  // If in query mode and no sources are found from the vector search and no pinned documents, do not
   // let the LLM try to hallucinate a response or use general knowledge and exit early
-  if (chatMode === "query" && sources.length === 0) {
+  if (
+    chatMode === "query" &&
+    sources.length === 0 &&
+    pinnedDocIdentifiers.length === 0
+  ) {
     writeResponseChunk(response, {
       id: uuid,
       type: "textResponse",