diff --git a/server/endpoints/api/index.js b/server/endpoints/api/index.js
index c5a2b8a8d32fd1f6dd190abad62d2f4665a3e7bb..fdf225b85323f198000c98693d33a404d4fed365 100644
--- a/server/endpoints/api/index.js
+++ b/server/endpoints/api/index.js
@@ -4,6 +4,7 @@ const { apiAuthEndpoints } = require("./auth");
 const { apiDocumentEndpoints } = require("./document");
 const { apiSystemEndpoints } = require("./system");
 const { apiWorkspaceEndpoints } = require("./workspace");
+const { apiWorkspaceThreadEndpoints } = require("./workspaceThread");
 const { apiUserManagementEndpoints } = require("./userManagement");
 
 // All endpoints must be documented and pass through the validApiKey Middleware.
@@ -17,6 +18,7 @@ function developerEndpoints(app, router) {
   apiSystemEndpoints(router);
   apiWorkspaceEndpoints(router);
   apiDocumentEndpoints(router);
+  apiWorkspaceThreadEndpoints(router);
   apiUserManagementEndpoints(router);
 }
 
diff --git a/server/endpoints/api/workspaceThread/index.js b/server/endpoints/api/workspaceThread/index.js
new file mode 100644
index 0000000000000000000000000000000000000000..a8c859a80b878aed39d8f6743b424a1c7b8047c9
--- /dev/null
+++ b/server/endpoints/api/workspaceThread/index.js
@@ -0,0 +1,594 @@
+const { v4: uuidv4 } = require("uuid");
+const { WorkspaceThread } = require("../../../models/workspaceThread");
+const { Workspace } = require("../../../models/workspace");
+const { validApiKey } = require("../../../utils/middleware/validApiKey");
+const { reqBody, multiUserMode } = require("../../../utils/http");
+const { chatWithWorkspace } = require("../../../utils/chats");
+const {
+  streamChatWithWorkspace,
+  VALID_CHAT_MODE,
+} = require("../../../utils/chats/stream");
+const { Telemetry } = require("../../../models/telemetry");
+const { EventLogs } = require("../../../models/eventLogs");
+const {
+  writeResponseChunk,
+  convertToChatHistory,
+} = require("../../../utils/helpers/chat/responses");
+const { WorkspaceChats } = require("../../../models/workspaceChats");
+const { User } = require("../../../models/user");
+
+function apiWorkspaceThreadEndpoints(app) {
+  if (!app) return;
+
+  app.post(
+    "/v1/workspace/:slug/thread/new",
+    [validApiKey],
+    async (request, response) => {
+      /*
+      #swagger.tags = ['Workspace Threads']
+      #swagger.description = 'Create a new workspace thread'
+      #swagger.parameters['slug'] = {
+          in: 'path',
+          description: 'Unique slug of workspace',
+          required: true,
+          type: 'string'
+      }
+      #swagger.requestBody = {
+        description: 'Optional userId associated with the thread',
+        required: false,
+        type: 'object',
+        content: {
+          "application/json": {
+            example: {
+              userId: 1
+            }
+          }
+        }
+      }
+      #swagger.responses[200] = {
+        content: {
+          "application/json": {
+            schema: {
+              type: 'object',
+              example: {
+                thread: {
+                  "id": 1,
+                  "name": "Thread",
+                  "slug": "thread-uuid",
+                  "user_id": 1,
+                  "workspace_id": 1
+                },
+                message: null
+              }
+            }
+          }
+        }
+      }
+      #swagger.responses[403] = {
+        schema: {
+          "$ref": "#/definitions/InvalidAPIKey"
+        }
+      }
+      */
+      try {
+        const { slug } = request.params;
+        const { userId } = reqBody(request);
+        const workspace = await Workspace.get({ slug });
+
+        if (!workspace) {
+          response.sendStatus(400).end();
+          return;
+        }
+
+        const { thread, message } = await WorkspaceThread.new(
+          workspace,
+          userId ? Number(userId) : null
+        );
+
+        await Telemetry.sendTelemetry("workspace_thread_created", {
+          multiUserMode: multiUserMode(response),
+          LLMSelection: process.env.LLM_PROVIDER || "openai",
+          Embedder: process.env.EMBEDDING_ENGINE || "inherit",
+          VectorDbSelection: process.env.VECTOR_DB || "lancedb",
+        });
+        await EventLogs.logEvent("api_workspace_thread_created", {
+          workspaceName: workspace?.name || "Unknown Workspace",
+        });
+        response.status(200).json({ thread, message });
+      } catch (e) {
+        console.log(e.message, e);
+        response.sendStatus(500).end();
+      }
+    }
+  );
+
+  app.post(
+    "/v1/workspace/:slug/thread/:threadSlug/update",
+    [validApiKey],
+    async (request, response) => {
+      /*
+      #swagger.tags = ['Workspace Threads']
+      #swagger.description = 'Update thread name by its unique slug.'
+      #swagger.path = '/v1/workspace/{slug}/thread/{threadSlug}/update'
+      #swagger.parameters['slug'] = {
+          in: 'path',
+          description: 'Unique slug of workspace',
+          required: true,
+          type: 'string'
+      }
+      #swagger.parameters['threadSlug'] = {
+          in: 'path',
+          description: 'Unique slug of thread',
+          required: true,
+          type: 'string'
+      }
+      #swagger.requestBody = {
+        description: 'JSON object containing new name to update the thread.',
+        required: true,
+        type: 'object',
+        content: {
+          "application/json": {
+            example: {
+              "name": 'Updated Thread Name'
+            }
+          }
+        }
+      }
+      #swagger.responses[200] = {
+        content: {
+          "application/json": {
+            schema: {
+              type: 'object',
+              example: {
+                thread: {
+                  "id": 1,
+                  "name": "Updated Thread Name",
+                  "slug": "thread-uuid",
+                  "user_id": 1,
+                  "workspace_id": 1
+                },
+                message: null,
+              }
+            }
+          }
+        }
+      }
+      #swagger.responses[403] = {
+        schema: {
+          "$ref": "#/definitions/InvalidAPIKey"
+        }
+      }
+      */
+      try {
+        const { slug, threadSlug } = request.params;
+        const { name } = reqBody(request);
+        const workspace = await Workspace.get({ slug });
+        const thread = await WorkspaceThread.get({
+          slug: threadSlug,
+          workspace_id: workspace.id,
+        });
+
+        if (!workspace || !thread) {
+          response.sendStatus(400).end();
+          return;
+        }
+
+        const { thread: updatedThread, message } = await WorkspaceThread.update(
+          thread,
+          { name }
+        );
+        response.status(200).json({ thread: updatedThread, message });
+      } catch (e) {
+        console.log(e.message, e);
+        response.sendStatus(500).end();
+      }
+    }
+  );
+
+  app.delete(
+    "/v1/workspace/:slug/thread/:threadSlug",
+    [validApiKey],
+    async (request, response) => {
+      /*
+    #swagger.tags = ['Workspace Threads']
+    #swagger.description = 'Delete a workspace thread'
+    #swagger.parameters['slug'] = {
+        in: 'path',
+        description: 'Unique slug of workspace',
+        required: true,
+        type: 'string'
+    }
+    #swagger.parameters['threadSlug'] = {
+        in: 'path',
+        description: 'Unique slug of thread',
+        required: true,
+        type: 'string'
+    }
+    #swagger.responses[200] = {
+      description: 'Thread deleted successfully'
+    }
+    #swagger.responses[403] = {
+      schema: {
+        "$ref": "#/definitions/InvalidAPIKey"
+      }
+    }
+    */
+      try {
+        const { slug, threadSlug } = request.params;
+        const workspace = await Workspace.get({ slug });
+
+        if (!workspace) {
+          response.sendStatus(400).end();
+          return;
+        }
+
+        await WorkspaceThread.delete({
+          slug: threadSlug,
+          workspace_id: workspace.id,
+        });
+        response.sendStatus(200).end();
+      } catch (e) {
+        console.log(e.message, e);
+        response.sendStatus(500).end();
+      }
+    }
+  );
+
+  app.get(
+    "/v1/workspace/:slug/thread/:threadSlug/chats",
+    [validApiKey],
+    async (request, response) => {
+      /*
+      #swagger.tags = ['Workspace Threads']
+      #swagger.description = 'Get chats for a workspace thread'
+      #swagger.parameters['slug'] = {
+          in: 'path',
+          description: 'Unique slug of workspace',
+          required: true,
+          type: 'string'
+      }
+      #swagger.parameters['threadSlug'] = {
+          in: 'path',
+          description: 'Unique slug of thread',
+          required: true,
+          type: 'string'
+      }
+      #swagger.responses[200] = {
+        content: {
+          "application/json": {
+            schema: {
+              type: 'object',
+              example: {
+                history: [
+                  {
+                    "role": "user",
+                    "content": "What is AnythingLLM?",
+                    "sentAt": 1692851630
+                  },
+                  {
+                    "role": "assistant",
+                    "content": "AnythingLLM is a platform that allows you to convert notes, PDFs, and other source materials into a chatbot. It ensures privacy, cites its answers, and allows multiple people to interact with the same documents simultaneously. It is particularly useful for businesses to enhance the visibility and readability of various written communications such as SOPs, contracts, and sales calls. You can try it out with a free trial to see if it meets your business needs.",
+                    "sources": [{"source": "object about source document and snippets used"}]
+                  }
+                ]
+              }
+            }
+          }
+        }
+      }
+      #swagger.responses[403] = {
+        schema: {
+          "$ref": "#/definitions/InvalidAPIKey"
+        }
+      }
+      */
+      try {
+        const { slug, threadSlug } = request.params;
+        const workspace = await Workspace.get({ slug });
+        const thread = await WorkspaceThread.get({
+          slug: threadSlug,
+          workspace_id: workspace.id,
+        });
+
+        if (!workspace || !thread) {
+          response.sendStatus(400).end();
+          return;
+        }
+
+        const history = await WorkspaceChats.where(
+          {
+            workspaceId: workspace.id,
+            thread_id: thread.id,
+            include: true,
+          },
+          null,
+          { id: "asc" }
+        );
+
+        response.status(200).json({ history: convertToChatHistory(history) });
+      } catch (e) {
+        console.log(e.message, e);
+        response.sendStatus(500).end();
+      }
+    }
+  );
+
+  app.post(
+    "/v1/workspace/:slug/thread/:threadSlug/chat",
+    [validApiKey],
+    async (request, response) => {
+      /*
+      #swagger.tags = ['Workspace Threads']
+      #swagger.description = 'Chat with a workspace thread'
+      #swagger.parameters['slug'] = {
+          in: 'path',
+          description: 'Unique slug of workspace',
+          required: true,
+          type: 'string'
+      }
+      #swagger.parameters['threadSlug'] = {
+          in: 'path',
+          description: 'Unique slug of thread',
+          required: true,
+          type: 'string'
+      }
+      #swagger.requestBody = {
+        description: 'Send a prompt to the workspace thread and the type of conversation (query or chat).',
+        required: true,
+        type: 'object',
+        content: {
+          "application/json": {
+            example: {
+              message: "What is AnythingLLM?",
+              mode: "query | chat",
+              userId: 1
+            }
+          }
+        }
+      }
+      #swagger.responses[200] = {
+        content: {
+          "application/json": {
+            schema: {
+              type: 'object',
+              example: {
+                id: 'chat-uuid',
+                type: "abort | textResponse",
+                textResponse: "Response to your query",
+                sources: [{title: "anythingllm.txt", chunk: "This is a context chunk used in the answer of the prompt by the LLM."}],
+                close: true,
+                error: "null | text string of the failure mode."
+              }
+            }
+          }
+        }
+      }
+      #swagger.responses[403] = {
+        schema: {
+          "$ref": "#/definitions/InvalidAPIKey"
+        }
+      }
+      */
+      try {
+        const { slug, threadSlug } = request.params;
+        const { message, mode = "query", userId } = reqBody(request);
+        const workspace = await Workspace.get({ slug });
+        const thread = await WorkspaceThread.get({
+          slug: threadSlug,
+          workspace_id: workspace.id,
+        });
+
+        if (!workspace || !thread) {
+          response.status(400).json({
+            id: uuidv4(),
+            type: "abort",
+            textResponse: null,
+            sources: [],
+            close: true,
+            error: `Workspace ${slug} or thread ${threadSlug} is not valid.`,
+          });
+          return;
+        }
+
+        if (!message?.length || !VALID_CHAT_MODE.includes(mode)) {
+          response.status(400).json({
+            id: uuidv4(),
+            type: "abort",
+            textResponse: null,
+            sources: [],
+            close: true,
+            error: !message?.length
+              ? "message parameter cannot be empty."
+              : `${mode} is not a valid mode.`,
+          });
+          return;
+        }
+
+        const user = userId ? await User.get({ id: Number(userId) }) : null;
+        const result = await chatWithWorkspace(
+          workspace,
+          message,
+          mode,
+          user,
+          thread
+        );
+        await Telemetry.sendTelemetry("sent_chat", {
+          LLMSelection: process.env.LLM_PROVIDER || "openai",
+          Embedder: process.env.EMBEDDING_ENGINE || "inherit",
+          VectorDbSelection: process.env.VECTOR_DB || "lancedb",
+        });
+        await EventLogs.logEvent("api_sent_chat", {
+          workspaceName: workspace?.name,
+          chatModel: workspace?.chatModel || "System Default",
+          threadName: thread?.name,
+          userId: user?.id,
+        });
+        response.status(200).json({ ...result });
+      } catch (e) {
+        console.log(e.message, e);
+        response.status(500).json({
+          id: uuidv4(),
+          type: "abort",
+          textResponse: null,
+          sources: [],
+          close: true,
+          error: e.message,
+        });
+      }
+    }
+  );
+
+  app.post(
+    "/v1/workspace/:slug/thread/:threadSlug/stream-chat",
+    [validApiKey],
+    async (request, response) => {
+      /*
+      #swagger.tags = ['Workspace Threads']
+      #swagger.description = 'Stream chat with a workspace thread'
+      #swagger.parameters['slug'] = {
+          in: 'path',
+          description: 'Unique slug of workspace',
+          required: true,
+          type: 'string'
+      }
+      #swagger.parameters['threadSlug'] = {
+          in: 'path',
+          description: 'Unique slug of thread',
+          required: true,
+          type: 'string'
+      }
+      #swagger.requestBody = {
+        description: 'Send a prompt to the workspace thread and the type of conversation (query or chat).',
+        required: true,
+        type: 'object',
+        content: {
+          "application/json": {
+            example: {
+              message: "What is AnythingLLM?",
+              mode: "query | chat",
+              userId: 1
+            }
+          }
+        }
+      }
+      #swagger.responses[200] = {
+        content: {
+          "text/event-stream": {
+            schema: {
+              type: 'array',
+              example: [
+                {
+                  id: 'uuid-123',
+                  type: "abort | textResponseChunk",
+                  textResponse: "First chunk",
+                  sources: [],
+                  close: false,
+                  error: "null | text string of the failure mode."
+                },
+                {
+                  id: 'uuid-123',
+                  type: "abort | textResponseChunk",
+                  textResponse: "chunk two",
+                  sources: [],
+                  close: false,
+                  error: "null | text string of the failure mode."
+                },
+                {
+                  id: 'uuid-123',
+                  type: "abort | textResponseChunk",
+                  textResponse: "final chunk of LLM output!",
+                  sources: [{title: "anythingllm.txt", chunk: "This is a context chunk used in the answer of the prompt by the LLM. This will only return in the final chunk."}],
+                  close: true,
+                  error: "null | text string of the failure mode."
+                }
+              ]
+            }
+          }
+        }
+      }
+      #swagger.responses[403] = {
+        schema: {
+          "$ref": "#/definitions/InvalidAPIKey"
+        }
+      }
+      */
+      try {
+        const { slug, threadSlug } = request.params;
+        const { message, mode = "query", userId } = reqBody(request);
+        const workspace = await Workspace.get({ slug });
+        const thread = await WorkspaceThread.get({
+          slug: threadSlug,
+          workspace_id: workspace.id,
+        });
+
+        if (!workspace || !thread) {
+          response.status(400).json({
+            id: uuidv4(),
+            type: "abort",
+            textResponse: null,
+            sources: [],
+            close: true,
+            error: `Workspace ${slug} or thread ${threadSlug} is not valid.`,
+          });
+          return;
+        }
+
+        if (!message?.length || !VALID_CHAT_MODE.includes(mode)) {
+          response.status(400).json({
+            id: uuidv4(),
+            type: "abort",
+            textResponse: null,
+            sources: [],
+            close: true,
+            error: !message?.length
+              ? "Message is empty"
+              : `${mode} is not a valid mode.`,
+          });
+          return;
+        }
+
+        const user = userId ? await User.get({ id: Number(userId) }) : null;
+
+        response.setHeader("Cache-Control", "no-cache");
+        response.setHeader("Content-Type", "text/event-stream");
+        response.setHeader("Access-Control-Allow-Origin", "*");
+        response.setHeader("Connection", "keep-alive");
+        response.flushHeaders();
+
+        await streamChatWithWorkspace(
+          response,
+          workspace,
+          message,
+          mode,
+          user,
+          thread
+        );
+        await Telemetry.sendTelemetry("sent_chat", {
+          LLMSelection: process.env.LLM_PROVIDER || "openai",
+          Embedder: process.env.EMBEDDING_ENGINE || "inherit",
+          VectorDbSelection: process.env.VECTOR_DB || "lancedb",
+        });
+        await EventLogs.logEvent("api_sent_chat", {
+          workspaceName: workspace?.name,
+          chatModel: workspace?.chatModel || "System Default",
+          threadName: thread?.name,
+          userId: user?.id,
+        });
+        response.end();
+      } catch (e) {
+        console.log(e.message, e);
+        writeResponseChunk(response, {
+          id: uuidv4(),
+          type: "abort",
+          textResponse: null,
+          sources: [],
+          close: true,
+          error: e.message,
+        });
+        response.end();
+      }
+    }
+  );
+}
+
+module.exports = { apiWorkspaceThreadEndpoints };
diff --git a/server/swagger/init.js b/server/swagger/init.js
index 064814565a37f9524c4b7e3df0edfda855b4e8dc..31edcf1c47c72037947fddd4be4a7d398136c431 100644
--- a/server/swagger/init.js
+++ b/server/swagger/init.js
@@ -35,6 +35,7 @@ const endpointsFiles = [
   "../endpoints/api/document/index.js",
   "../endpoints/api/workspace/index.js",
   "../endpoints/api/system/index.js",
+  "../endpoints/api/workspaceThread/index.js",
   "../endpoints/api/userManagement/index.js",
 ];
 
diff --git a/server/swagger/openapi.json b/server/swagger/openapi.json
index 2a1b55437529eceaa8fd24df484420b3e9d82104..d27504aaebc12568bf62cb057a740421dd1a4e39 100644
--- a/server/swagger/openapi.json
+++ b/server/swagger/openapi.json
@@ -2371,6 +2371,487 @@
         }
       }
     },
+    "/v1/workspace/{slug}/thread/new": {
+      "post": {
+        "tags": [
+          "Workspace Threads"
+        ],
+        "description": "Create a new workspace thread",
+        "parameters": [
+          {
+            "name": "slug",
+            "in": "path",
+            "required": true,
+            "schema": {
+              "type": "string"
+            },
+            "description": "Unique slug of workspace"
+          }
+        ],
+        "responses": {
+          "200": {
+            "description": "OK",
+            "content": {
+              "application/json": {
+                "schema": {
+                  "type": "object",
+                  "example": {
+                    "thread": {
+                      "id": 1,
+                      "name": "Thread",
+                      "slug": "thread-uuid",
+                      "user_id": 1,
+                      "workspace_id": 1
+                    },
+                    "message": null
+                  }
+                }
+              }
+            }
+          },
+          "400": {
+            "description": "Bad Request"
+          },
+          "403": {
+            "description": "Forbidden",
+            "content": {
+              "application/json": {
+                "schema": {
+                  "$ref": "#/components/schemas/InvalidAPIKey"
+                }
+              },
+              "application/xml": {
+                "schema": {
+                  "$ref": "#/components/schemas/InvalidAPIKey"
+                }
+              }
+            }
+          },
+          "500": {
+            "description": "Internal Server Error"
+          }
+        },
+        "requestBody": {
+          "description": "Optional userId associated with the thread",
+          "required": false,
+          "type": "object",
+          "content": {
+            "application/json": {
+              "example": {
+                "userId": 1
+              }
+            }
+          }
+        }
+      }
+    },
+    "/v1/workspace/{slug}/thread/{threadSlug}/update": {
+      "post": {
+        "tags": [
+          "Workspace Threads"
+        ],
+        "description": "Update thread name by its unique slug.",
+        "parameters": [
+          {
+            "name": "slug",
+            "in": "path",
+            "required": true,
+            "schema": {
+              "type": "string"
+            },
+            "description": "Unique slug of workspace"
+          },
+          {
+            "name": "threadSlug",
+            "in": "path",
+            "required": true,
+            "schema": {
+              "type": "string"
+            },
+            "description": "Unique slug of thread"
+          }
+        ],
+        "responses": {
+          "200": {
+            "description": "OK",
+            "content": {
+              "application/json": {
+                "schema": {
+                  "type": "object",
+                  "example": {
+                    "thread": {
+                      "id": 1,
+                      "name": "Updated Thread Name",
+                      "slug": "thread-uuid",
+                      "user_id": 1,
+                      "workspace_id": 1
+                    },
+                    "message": null
+                  }
+                }
+              }
+            }
+          },
+          "400": {
+            "description": "Bad Request"
+          },
+          "403": {
+            "description": "Forbidden",
+            "content": {
+              "application/json": {
+                "schema": {
+                  "$ref": "#/components/schemas/InvalidAPIKey"
+                }
+              },
+              "application/xml": {
+                "schema": {
+                  "$ref": "#/components/schemas/InvalidAPIKey"
+                }
+              }
+            }
+          },
+          "500": {
+            "description": "Internal Server Error"
+          }
+        },
+        "requestBody": {
+          "description": "JSON object containing new name to update the thread.",
+          "required": true,
+          "type": "object",
+          "content": {
+            "application/json": {
+              "example": {
+                "name": "Updated Thread Name"
+              }
+            }
+          }
+        }
+      }
+    },
+    "/v1/workspace/{slug}/thread/{threadSlug}": {
+      "delete": {
+        "tags": [
+          "Workspace Threads"
+        ],
+        "description": "Delete a workspace thread",
+        "parameters": [
+          {
+            "name": "slug",
+            "in": "path",
+            "required": true,
+            "schema": {
+              "type": "string"
+            },
+            "description": "Unique slug of workspace"
+          },
+          {
+            "name": "threadSlug",
+            "in": "path",
+            "required": true,
+            "schema": {
+              "type": "string"
+            },
+            "description": "Unique slug of thread"
+          }
+        ],
+        "responses": {
+          "200": {
+            "description": "Thread deleted successfully"
+          },
+          "400": {
+            "description": "Bad Request"
+          },
+          "403": {
+            "description": "Forbidden",
+            "content": {
+              "application/json": {
+                "schema": {
+                  "$ref": "#/components/schemas/InvalidAPIKey"
+                }
+              },
+              "application/xml": {
+                "schema": {
+                  "$ref": "#/components/schemas/InvalidAPIKey"
+                }
+              }
+            }
+          },
+          "500": {
+            "description": "Internal Server Error"
+          }
+        }
+      }
+    },
+    "/v1/workspace/{slug}/thread/{threadSlug}/chats": {
+      "get": {
+        "tags": [
+          "Workspace Threads"
+        ],
+        "description": "Get chats for a workspace thread",
+        "parameters": [
+          {
+            "name": "slug",
+            "in": "path",
+            "required": true,
+            "schema": {
+              "type": "string"
+            },
+            "description": "Unique slug of workspace"
+          },
+          {
+            "name": "threadSlug",
+            "in": "path",
+            "required": true,
+            "schema": {
+              "type": "string"
+            },
+            "description": "Unique slug of thread"
+          }
+        ],
+        "responses": {
+          "200": {
+            "description": "OK",
+            "content": {
+              "application/json": {
+                "schema": {
+                  "type": "object",
+                  "example": {
+                    "history": [
+                      {
+                        "role": "user",
+                        "content": "What is AnythingLLM?",
+                        "sentAt": 1692851630
+                      },
+                      {
+                        "role": "assistant",
+                        "content": "AnythingLLM is a platform that allows you to convert notes, PDFs, and other source materials into a chatbot. It ensures privacy, cites its answers, and allows multiple people to interact with the same documents simultaneously. It is particularly useful for businesses to enhance the visibility and readability of various written communications such as SOPs, contracts, and sales calls. You can try it out with a free trial to see if it meets your business needs.",
+                        "sources": [
+                          {
+                            "source": "object about source document and snippets used"
+                          }
+                        ]
+                      }
+                    ]
+                  }
+                }
+              }
+            }
+          },
+          "400": {
+            "description": "Bad Request"
+          },
+          "403": {
+            "description": "Forbidden",
+            "content": {
+              "application/json": {
+                "schema": {
+                  "$ref": "#/components/schemas/InvalidAPIKey"
+                }
+              },
+              "application/xml": {
+                "schema": {
+                  "$ref": "#/components/schemas/InvalidAPIKey"
+                }
+              }
+            }
+          },
+          "500": {
+            "description": "Internal Server Error"
+          }
+        }
+      }
+    },
+    "/v1/workspace/{slug}/thread/{threadSlug}/chat": {
+      "post": {
+        "tags": [
+          "Workspace Threads"
+        ],
+        "description": "Chat with a workspace thread",
+        "parameters": [
+          {
+            "name": "slug",
+            "in": "path",
+            "required": true,
+            "schema": {
+              "type": "string"
+            },
+            "description": "Unique slug of workspace"
+          },
+          {
+            "name": "threadSlug",
+            "in": "path",
+            "required": true,
+            "schema": {
+              "type": "string"
+            },
+            "description": "Unique slug of thread"
+          }
+        ],
+        "responses": {
+          "200": {
+            "description": "OK",
+            "content": {
+              "application/json": {
+                "schema": {
+                  "type": "object",
+                  "example": {
+                    "id": "chat-uuid",
+                    "type": "abort | textResponse",
+                    "textResponse": "Response to your query",
+                    "sources": [
+                      {
+                        "title": "anythingllm.txt",
+                        "chunk": "This is a context chunk used in the answer of the prompt by the LLM."
+                      }
+                    ],
+                    "close": true,
+                    "error": "null | text string of the failure mode."
+                  }
+                }
+              }
+            }
+          },
+          "400": {
+            "description": "Bad Request"
+          },
+          "403": {
+            "description": "Forbidden",
+            "content": {
+              "application/json": {
+                "schema": {
+                  "$ref": "#/components/schemas/InvalidAPIKey"
+                }
+              },
+              "application/xml": {
+                "schema": {
+                  "$ref": "#/components/schemas/InvalidAPIKey"
+                }
+              }
+            }
+          },
+          "500": {
+            "description": "Internal Server Error"
+          }
+        },
+        "requestBody": {
+          "description": "Send a prompt to the workspace thread and the type of conversation (query or chat).",
+          "required": true,
+          "type": "object",
+          "content": {
+            "application/json": {
+              "example": {
+                "message": "What is AnythingLLM?",
+                "mode": "query | chat",
+                "userId": 1
+              }
+            }
+          }
+        }
+      }
+    },
+    "/v1/workspace/{slug}/thread/{threadSlug}/stream-chat": {
+      "post": {
+        "tags": [
+          "Workspace Threads"
+        ],
+        "description": "Stream chat with a workspace thread",
+        "parameters": [
+          {
+            "name": "slug",
+            "in": "path",
+            "required": true,
+            "schema": {
+              "type": "string"
+            },
+            "description": "Unique slug of workspace"
+          },
+          {
+            "name": "threadSlug",
+            "in": "path",
+            "required": true,
+            "schema": {
+              "type": "string"
+            },
+            "description": "Unique slug of thread"
+          }
+        ],
+        "responses": {
+          "200": {
+            "content": {
+              "text/event-stream": {
+                "schema": {
+                  "type": "array",
+                  "example": [
+                    {
+                      "id": "uuid-123",
+                      "type": "abort | textResponseChunk",
+                      "textResponse": "First chunk",
+                      "sources": [],
+                      "close": false,
+                      "error": "null | text string of the failure mode."
+                    },
+                    {
+                      "id": "uuid-123",
+                      "type": "abort | textResponseChunk",
+                      "textResponse": "chunk two",
+                      "sources": [],
+                      "close": false,
+                      "error": "null | text string of the failure mode."
+                    },
+                    {
+                      "id": "uuid-123",
+                      "type": "abort | textResponseChunk",
+                      "textResponse": "final chunk of LLM output!",
+                      "sources": [
+                        {
+                          "title": "anythingllm.txt",
+                          "chunk": "This is a context chunk used in the answer of the prompt by the LLM. This will only return in the final chunk."
+                        }
+                      ],
+                      "close": true,
+                      "error": "null | text string of the failure mode."
+                    }
+                  ]
+                }
+              }
+            },
+            "description": "OK"
+          },
+          "400": {
+            "description": "Bad Request"
+          },
+          "403": {
+            "description": "Forbidden",
+            "content": {
+              "application/json": {
+                "schema": {
+                  "$ref": "#/components/schemas/InvalidAPIKey"
+                }
+              },
+              "application/xml": {
+                "schema": {
+                  "$ref": "#/components/schemas/InvalidAPIKey"
+                }
+              }
+            }
+          }
+        },
+        "requestBody": {
+          "description": "Send a prompt to the workspace thread and the type of conversation (query or chat).",
+          "required": true,
+          "type": "object",
+          "content": {
+            "application/json": {
+              "example": {
+                "message": "What is AnythingLLM?",
+                "mode": "query | chat",
+                "userId": 1
+              }
+            }
+          }
+        }
+      }
+    },
     "/v1/users": {
       "get": {
         "tags": [