diff --git a/.github/workflows/dev-build.yaml b/.github/workflows/dev-build.yaml
index e81d99c58363337b62ac66b82b23091dacfd0218..a7632dfd04a59a5a62804dd4d94d26efe9423adb 100644
--- a/.github/workflows/dev-build.yaml
+++ b/.github/workflows/dev-build.yaml
@@ -6,7 +6,7 @@ concurrency:
 
 on:
   push:
-    branches: ['vex'] # put your current branch to create a build. Core team only.
+    branches: ['558-multi-modal-support'] # put your current branch to create a build. Core team only.
     paths-ignore:
       - '**.md'
       - 'cloud-deployments/*'
diff --git a/README.md b/README.md
index a0b41a7628215c9de04143cf66352b0ed0ea2daa..d7812265ddf971fe6455014adaf0560e09d43cb8 100644
--- a/README.md
+++ b/README.md
@@ -53,19 +53,19 @@ AnythingLLM is a full-stack application where you can use commercial off-the-she
 
 AnythingLLM divides your documents into objects called `workspaces`. A Workspace functions a lot like a thread, but with the addition of containerization of your documents. Workspaces can share documents, but they do not talk to each other so you can keep your context for each workspace clean.
 
-Some cool features of AnythingLLM
-
-- **Multi-user instance support and permissioning**
-- Agents inside your workspace (browse the web, run code, etc)
-- [Custom Embeddable Chat widget for your website](./embed/README.md)
-- Multiple document type support (PDF, TXT, DOCX, etc)
-- Manage documents in your vector database from a simple UI
-- Two chat modes `conversation` and `query`. Conversation retains previous questions and amendments. Query is simple QA against your documents
-- In-chat citations
+## Cool features of AnythingLLM
+
+- 🆕 **Multi-modal support (both closed and open-source LLMs!)**
+- 👤 Multi-user instance support and permissioning _Docker version only_
+- 🦾 Agents inside your workspace (browse the web, run code, etc)
+- 💬 [Custom Embeddable Chat widget for your website](./embed/README.md) _Docker version only_
+- 📖 Multiple document type support (PDF, TXT, DOCX, etc)
+- Simple chat UI with Drag-n-Drop funcitonality and clear citations.
 - 100% Cloud deployment ready.
-- "Bring your own LLM" model.
-- Extremely efficient cost-saving measures for managing very large documents. You'll never pay to embed a massive document or transcript more than once. 90% more cost effective than other document chatbot solutions.
+- Works with all popular [closed and open-source LLM providers](#supported-llms-embedder-models-speech-models-and-vector-databases).
+- Built-in cost & time-saving measures for managing very large documents compared to any other chat UI.
 - Full Developer API for custom integrations!
+- Much more...install and find out!
 
 ### Supported LLMs, Embedder Models, Speech models, and Vector Databases
 
diff --git a/frontend/src/components/WorkspaceChat/ChatContainer/ChatHistory/HistoricalMessage/Actions/EditMessage/index.jsx b/frontend/src/components/WorkspaceChat/ChatContainer/ChatHistory/HistoricalMessage/Actions/EditMessage/index.jsx
index 34a77dea8239afa6ee1fac77797a5898a0d13f7a..811de87d6391d4a54860f7be0570790ae86953d8 100644
--- a/frontend/src/components/WorkspaceChat/ChatContainer/ChatHistory/HistoricalMessage/Actions/EditMessage/index.jsx
+++ b/frontend/src/components/WorkspaceChat/ChatContainer/ChatHistory/HistoricalMessage/Actions/EditMessage/index.jsx
@@ -69,6 +69,7 @@ export function EditMessageForm({
   role,
   chatId,
   message,
+  attachments = [],
   adjustTextArea,
   saveChanges,
 }) {
@@ -77,15 +78,15 @@ export function EditMessageForm({
     e.preventDefault();
     const form = new FormData(e.target);
     const editedMessage = form.get("editedMessage");
-    saveChanges({ editedMessage, chatId, role });
+    saveChanges({ editedMessage, chatId, role, attachments });
     window.dispatchEvent(
-      new CustomEvent(EDIT_EVENT, { detail: { chatId, role } })
+      new CustomEvent(EDIT_EVENT, { detail: { chatId, role, attachments } })
     );
   }
 
   function cancelEdits() {
     window.dispatchEvent(
-      new CustomEvent(EDIT_EVENT, { detail: { chatId, role } })
+      new CustomEvent(EDIT_EVENT, { detail: { chatId, role, attachments } })
     );
     return false;
   }
diff --git a/frontend/src/components/WorkspaceChat/ChatContainer/ChatHistory/HistoricalMessage/index.jsx b/frontend/src/components/WorkspaceChat/ChatContainer/ChatHistory/HistoricalMessage/index.jsx
index 7446b166cf566186174e34f6cdd487ba45abb61d..f6920fa152c9d5a07a50a5efd48915e58400c55b 100644
--- a/frontend/src/components/WorkspaceChat/ChatContainer/ChatHistory/HistoricalMessage/index.jsx
+++ b/frontend/src/components/WorkspaceChat/ChatContainer/ChatHistory/HistoricalMessage/index.jsx
@@ -19,6 +19,7 @@ const HistoricalMessage = ({
   role,
   workspace,
   sources = [],
+  attachments = [],
   error = false,
   feedbackScore = null,
   chatId = null,
@@ -92,16 +93,20 @@ const HistoricalMessage = ({
               role={role}
               chatId={chatId}
               message={message}
+              attachments={attachments}
               adjustTextArea={adjustTextArea}
               saveChanges={saveEditedMessage}
             />
           ) : (
-            <span
-              className={`flex flex-col gap-y-1`}
-              dangerouslySetInnerHTML={{
-                __html: DOMPurify.sanitize(renderMarkdown(message)),
-              }}
-            />
+            <div>
+              <span
+                className={`flex flex-col gap-y-1`}
+                dangerouslySetInnerHTML={{
+                  __html: DOMPurify.sanitize(renderMarkdown(message)),
+                }}
+              />
+              <ChatAttachments attachments={attachments} />
+            </div>
           )}
         </div>
         <div className="flex gap-x-5 ml-14">
@@ -160,3 +165,18 @@ export default memo(
     );
   }
 );
+
+function ChatAttachments({ attachments = [] }) {
+  if (!attachments.length) return null;
+  return (
+    <div className="flex flex-wrap gap-2">
+      {attachments.map((item) => (
+        <img
+          key={item.name}
+          src={item.contentString}
+          className="max-w-[300px] rounded-md"
+        />
+      ))}
+    </div>
+  );
+}
diff --git a/frontend/src/components/WorkspaceChat/ChatContainer/ChatHistory/index.jsx b/frontend/src/components/WorkspaceChat/ChatContainer/ChatHistory/index.jsx
index 647d104f306a450efb470a7be8ebbf7a80429c4a..8a5697f94346da5f9e0de3ef4f195c768073f264 100644
--- a/frontend/src/components/WorkspaceChat/ChatContainer/ChatHistory/index.jsx
+++ b/frontend/src/components/WorkspaceChat/ChatContainer/ChatHistory/index.jsx
@@ -93,7 +93,12 @@ export default function ChatHistory({
     sendCommand(`${heading} ${message}`, true);
   };
 
-  const saveEditedMessage = async ({ editedMessage, chatId, role }) => {
+  const saveEditedMessage = async ({
+    editedMessage,
+    chatId,
+    role,
+    attachments = [],
+  }) => {
     if (!editedMessage) return; // Don't save empty edits.
 
     // if the edit was a user message, we will auto-regenerate the response and delete all
@@ -110,7 +115,7 @@ export default function ChatHistory({
       updatedHistory[updatedHistory.length - 1].content = editedMessage;
       // remove all edited messages after the edited message in backend
       await Workspace.deleteEditedChats(workspace.slug, threadSlug, chatId);
-      sendCommand(editedMessage, true, updatedHistory);
+      sendCommand(editedMessage, true, updatedHistory, attachments);
       return;
     }
 
@@ -228,6 +233,7 @@ export default function ChatHistory({
             feedbackScore={props.feedbackScore}
             chatId={props.chatId}
             error={props.error}
+            attachments={props.attachments}
             regenerateMessage={regenerateAssistantMessage}
             isLastMessage={isLastBotReply}
             saveEditedMessage={saveEditedMessage}
diff --git a/frontend/src/components/WorkspaceChat/ChatContainer/DnDWrapper/index.jsx b/frontend/src/components/WorkspaceChat/ChatContainer/DnDWrapper/index.jsx
index d7e4edb62810f1f71db51211d2f150b8a890609c..6d6f345350f0299c7803ec5dbe54e4a4561a5733 100644
--- a/frontend/src/components/WorkspaceChat/ChatContainer/DnDWrapper/index.jsx
+++ b/frontend/src/components/WorkspaceChat/ChatContainer/DnDWrapper/index.jsx
@@ -1,4 +1,4 @@
-import { useState, useEffect } from "react";
+import { useState, useEffect, createContext, useContext } from "react";
 import { v4 } from "uuid";
 import System from "@/models/system";
 import { useDropzone } from "react-dropzone";
@@ -6,6 +6,7 @@ import DndIcon from "./dnd-icon.png";
 import Workspace from "@/models/workspace";
 import useUser from "@/hooks/useUser";
 
+export const DndUploaderContext = createContext();
 export const REMOVE_ATTACHMENT_EVENT = "ATTACHMENT_REMOVE";
 export const CLEAR_ATTACHMENTS_EVENT = "ATTACHMENT_CLEAR";
 
@@ -14,13 +15,14 @@ export const CLEAR_ATTACHMENTS_EVENT = "ATTACHMENT_CLEAR";
  * @typedef Attachment
  * @property {string} uid - unique file id.
  * @property {File} file - native File object
+ * @property {string|null} contentString - base64 encoded string of file
  * @property {('in_progress'|'failed'|'success')} status - the automatic upload status.
  * @property {string|null} error - Error message
  * @property {{id:string, location:string}|null} document - uploaded document details
+ * @property {('attachment'|'upload')} type - The type of upload. Attachments are chat-specific, uploads go to the workspace.
  */
 
-export default function DnDFileUploaderWrapper({ workspace, children }) {
-  /** @type {[Attachment[], Function]} */
+export function DnDFileUploaderProvider({ workspace, children }) {
   const [files, setFiles] = useState([]);
   const [ready, setReady] = useState(false);
   const [dragging, setDragging] = useState(false);
@@ -49,7 +51,7 @@ export default function DnDFileUploaderWrapper({ workspace, children }) {
     /** @type {{uid: Attachment['uid'], document: Attachment['document']}} */
     const { uid, document } = event.detail;
     setFiles((prev) => prev.filter((prevFile) => prevFile.uid !== uid));
-    if (!document.location) return;
+    if (!document?.location) return;
     await Workspace.deleteAndUnembedFile(workspace.slug, document.location);
   }
 
@@ -60,20 +62,68 @@ export default function DnDFileUploaderWrapper({ workspace, children }) {
     setFiles([]);
   }
 
+  /**
+   * Turns files into attachments we can send as body request to backend
+   * for a chat.
+   * @returns {{name:string,mime:string,contentString:string}[]}
+   */
+  function parseAttachments() {
+    return (
+      files
+        ?.filter((file) => file.type === "attachment")
+        ?.map(
+          (
+            /** @type {Attachment} */
+            attachment
+          ) => {
+            return {
+              name: attachment.file.name,
+              mime: attachment.file.type,
+              contentString: attachment.contentString,
+            };
+          }
+        ) || []
+    );
+  }
+
+  /**
+   * Handle dropped files.
+   * @param {Attachment[]} acceptedFiles
+   * @param {any[]} _rejections
+   */
   async function onDrop(acceptedFiles, _rejections) {
     setDragging(false);
+
     /** @type {Attachment[]} */
-    const newAccepted = acceptedFiles.map((file) => {
-      return {
-        uid: v4(),
-        file,
-        status: "in_progress",
-        error: null,
-      };
-    });
+    const newAccepted = [];
+    for (const file of acceptedFiles) {
+      if (file.type.startsWith("image/")) {
+        newAccepted.push({
+          uid: v4(),
+          file,
+          contentString: await toBase64(file),
+          status: "success",
+          error: null,
+          type: "attachment",
+        });
+      } else {
+        newAccepted.push({
+          uid: v4(),
+          file,
+          contentString: null,
+          status: "in_progress",
+          error: null,
+          type: "upload",
+        });
+      }
+    }
+
     setFiles((prev) => [...prev, ...newAccepted]);
 
     for (const attachment of newAccepted) {
+      // Images/attachments are chat specific.
+      if (attachment.type === "attachment") continue;
+
       const formData = new FormData();
       formData.append("file", attachment.file, attachment.file.name);
       Workspace.uploadAndEmbedFile(workspace.slug, formData).then(
@@ -100,6 +150,18 @@ export default function DnDFileUploaderWrapper({ workspace, children }) {
     }
   }
 
+  return (
+    <DndUploaderContext.Provider
+      value={{ files, ready, dragging, setDragging, onDrop, parseAttachments }}
+    >
+      {children}
+    </DndUploaderContext.Provider>
+  );
+}
+
+export default function DnDFileUploaderWrapper({ children }) {
+  const { onDrop, ready, dragging, setDragging } =
+    useContext(DndUploaderContext);
   const { getRootProps, getInputProps } = useDropzone({
     onDrop,
     disabled: !ready,
@@ -129,8 +191,25 @@ export default function DnDFileUploaderWrapper({ workspace, children }) {
           </div>
         </div>
       </div>
-      <input {...getInputProps()} />
-      {children(files, setFiles)}
+      <input id="dnd-chat-file-uploader" {...getInputProps()} />
+      {children}
     </div>
   );
 }
+
+/**
+ * Convert image types into Base64 strings for requests.
+ * @param {File} file
+ * @returns {string}
+ */
+async function toBase64(file) {
+  return new Promise((resolve, reject) => {
+    const reader = new FileReader();
+    reader.onload = () => {
+      const base64String = reader.result.split(",")[1];
+      resolve(`data:${file.type};base64,${base64String}`);
+    };
+    reader.onerror = (error) => reject(error);
+    reader.readAsDataURL(file);
+  });
+}
diff --git a/frontend/src/components/WorkspaceChat/ChatContainer/PromptInput/AttachItem/index.jsx b/frontend/src/components/WorkspaceChat/ChatContainer/PromptInput/AttachItem/index.jsx
new file mode 100644
index 0000000000000000000000000000000000000000..74f22f90c500960db82e2c2efde3256866180d6b
--- /dev/null
+++ b/frontend/src/components/WorkspaceChat/ChatContainer/PromptInput/AttachItem/index.jsx
@@ -0,0 +1,34 @@
+import { PaperclipHorizontal } from "@phosphor-icons/react";
+import { Tooltip } from "react-tooltip";
+
+/**
+ * This is a simple proxy component that clicks on the DnD file uploader for the user.
+ * @returns
+ */
+export default function AttachItem() {
+  return (
+    <>
+      <button
+        id="attach-item-btn"
+        data-tooltip-id="attach-item-btn"
+        data-tooltip-content="Attach a file to this chat"
+        aria-label="Attach a file to this chat"
+        type="button"
+        onClick={(e) => {
+          e?.target?.blur();
+          document?.getElementById("dnd-chat-file-uploader")?.click();
+          return;
+        }}
+        className={`border-none relative flex justify-center items-center opacity-60 hover:opacity-100 cursor-pointer`}
+      >
+        <PaperclipHorizontal className="w-6 h-6 pointer-events-none text-white rotate-90 -scale-y-100" />
+        <Tooltip
+          id="attach-item-btn"
+          place="top"
+          delayShow={300}
+          className="tooltip !text-xs z-99"
+        />
+      </button>
+    </>
+  );
+}
diff --git a/frontend/src/components/WorkspaceChat/ChatContainer/PromptInput/Attachments/index.jsx b/frontend/src/components/WorkspaceChat/ChatContainer/PromptInput/Attachments/index.jsx
index 4d64ad53ec97fb949c40aea78120032485f91a74..f93ebebd25f4ef1814661dda6163ab4166b8f115 100644
--- a/frontend/src/components/WorkspaceChat/ChatContainer/PromptInput/Attachments/index.jsx
+++ b/frontend/src/components/WorkspaceChat/ChatContainer/PromptInput/Attachments/index.jsx
@@ -5,6 +5,7 @@ import {
   FileDoc,
   FileHtml,
   FileText,
+  FileImage,
   FilePdf,
   WarningOctagon,
   X,
@@ -32,7 +33,7 @@ export default function AttachmentManager({ attachments }) {
  * @param {{attachment: import("../../DnDWrapper").Attachment}}
  */
 function AttachmentItem({ attachment }) {
-  const { uid, file, status, error, document } = attachment;
+  const { uid, file, status, error, document, type } = attachment;
   const { iconBgColor, Icon } = displayFromFile(file);
 
   function removeFileFromQueue() {
@@ -106,6 +107,48 @@ function AttachmentItem({ attachment }) {
     );
   }
 
+  if (type === "attachment") {
+    return (
+      <>
+        <div
+          data-tooltip-id={`attachment-uid-${uid}-success`}
+          data-tooltip-content={`${file.name} will be attached to this prompt. It will not be embedded into the workspace permanently.`}
+          className={`relative h-14 px-2 py-2 flex items-center gap-x-4 rounded-lg bg-zinc-800 border border-white/20 w-[200px] group`}
+        >
+          <div className="invisible group-hover:visible absolute -top-[5px] -right-[5px] w-fit h-fit z-[10]">
+            <button
+              onClick={removeFileFromQueue}
+              type="button"
+              className="bg-zinc-700 hover:bg-red-400 rounded-full p-1 flex items-center justify-center hover:border-transparent border border-white/40"
+            >
+              <X
+                size={10}
+                className="flex-shrink-0 text-zinc-200 group-hover:text-white"
+              />
+            </button>
+          </div>
+          <div
+            className={`${iconBgColor} rounded-lg flex items-center justify-center flex-shrink-0 p-1`}
+          >
+            <Icon size={30} className="text-white" />
+          </div>
+          <div className="flex flex-col w-[130px]">
+            <p className="text-white text-xs font-medium truncate">
+              {file.name}
+            </p>
+            <p className="text-white/80 text-xs font-medium">Image attached!</p>
+          </div>
+        </div>
+        <Tooltip
+          id={`attachment-uid-${uid}-success`}
+          place="top"
+          delayShow={300}
+          className="allm-tooltip !allm-text-xs"
+        />
+      </>
+    );
+  }
+
   return (
     <>
       <div
@@ -170,6 +213,10 @@ function displayFromFile(file) {
     case "c":
     case "c":
       return { iconBgColor: "bg-warn", Icon: FileCode };
+    case "png":
+    case "jpg":
+    case "jpeg":
+      return { iconBgColor: "bg-royalblue", Icon: FileImage };
     default:
       return { iconBgColor: "bg-royalblue", Icon: FileText };
   }
diff --git a/frontend/src/components/WorkspaceChat/ChatContainer/PromptInput/index.jsx b/frontend/src/components/WorkspaceChat/ChatContainer/PromptInput/index.jsx
index 253f158f58deee8d998c416a60e0acf6cb0a609b..031d71ae1655222b3ac1e7251d23ce355a84bcbc 100644
--- a/frontend/src/components/WorkspaceChat/ChatContainer/PromptInput/index.jsx
+++ b/frontend/src/components/WorkspaceChat/ChatContainer/PromptInput/index.jsx
@@ -14,6 +14,7 @@ import TextSizeButton from "./TextSizeMenu";
 import SpeechToText from "./SpeechToText";
 import { Tooltip } from "react-tooltip";
 import AttachmentManager from "./Attachments";
+import AttachItem from "./AttachItem";
 
 export const PROMPT_INPUT_EVENT = "set_prompt_input";
 export default function PromptInput({
@@ -161,6 +162,7 @@ export default function PromptInput({
             </div>
             <div className="flex justify-between py-3.5">
               <div className="flex gap-x-2">
+                <AttachItem />
                 <SlashCommandsButton
                   showing={showSlashCommand}
                   setShowSlashCommand={setShowSlashCommand}
diff --git a/frontend/src/components/WorkspaceChat/ChatContainer/index.jsx b/frontend/src/components/WorkspaceChat/ChatContainer/index.jsx
index 2d6be099b83f1ece56f2a46ef856d4b407f13930..d42ace39a3b69ce28d8de18a3905c81e4e075051 100644
--- a/frontend/src/components/WorkspaceChat/ChatContainer/index.jsx
+++ b/frontend/src/components/WorkspaceChat/ChatContainer/index.jsx
@@ -1,6 +1,6 @@
-import { useState, useEffect } from "react";
+import { useState, useEffect, useContext } from "react";
 import ChatHistory from "./ChatHistory";
-import DnDFileUploadWrapper, { CLEAR_ATTACHMENTS_EVENT } from "./DnDWrapper";
+import { CLEAR_ATTACHMENTS_EVENT, DndUploaderContext } from "./DnDWrapper";
 import PromptInput, { PROMPT_INPUT_EVENT } from "./PromptInput";
 import Workspace from "@/models/workspace";
 import handleChat, { ABORT_STREAM_EVENT } from "@/utils/chat";
@@ -13,6 +13,7 @@ import handleSocketResponse, {
   AGENT_SESSION_END,
   AGENT_SESSION_START,
 } from "@/utils/chat/agent";
+import DnDFileUploaderWrapper from "./DnDWrapper";
 
 export default function ChatContainer({ workspace, knownHistory = [] }) {
   const { threadSlug = null } = useParams();
@@ -21,6 +22,7 @@ export default function ChatContainer({ workspace, knownHistory = [] }) {
   const [chatHistory, setChatHistory] = useState(knownHistory);
   const [socketId, setSocketId] = useState(null);
   const [websocket, setWebsocket] = useState(null);
+  const { files, parseAttachments } = useContext(DndUploaderContext);
 
   // Maintain state of message from whatever is in PromptInput
   const handleMessageChange = (event) => {
@@ -41,7 +43,11 @@ export default function ChatContainer({ workspace, knownHistory = [] }) {
     if (!message || message === "") return false;
     const prevChatHistory = [
       ...chatHistory,
-      { content: message, role: "user" },
+      {
+        content: message,
+        role: "user",
+        attachments: parseAttachments(),
+      },
       {
         content: "",
         role: "assistant",
@@ -60,11 +66,23 @@ export default function ChatContainer({ workspace, knownHistory = [] }) {
     const updatedHistory = chatHistory.slice(0, -1);
     const lastUserMessage = updatedHistory.slice(-1)[0];
     Workspace.deleteChats(workspace.slug, [chatId])
-      .then(() => sendCommand(lastUserMessage.content, true, updatedHistory))
+      .then(() =>
+        sendCommand(
+          lastUserMessage.content,
+          true,
+          updatedHistory,
+          lastUserMessage?.attachments
+        )
+      )
       .catch((e) => console.error(e));
   };
 
-  const sendCommand = async (command, submit = false, history = []) => {
+  const sendCommand = async (
+    command,
+    submit = false,
+    history = [],
+    attachments = []
+  ) => {
     if (!command || command === "") return false;
     if (!submit) {
       setMessageEmit(command);
@@ -81,13 +99,18 @@ export default function ChatContainer({ workspace, knownHistory = [] }) {
           role: "assistant",
           pending: true,
           userMessage: command,
+          attachments,
           animate: true,
         },
       ];
     } else {
       prevChatHistory = [
         ...chatHistory,
-        { content: command, role: "user" },
+        {
+          content: command,
+          role: "user",
+          attachments,
+        },
         {
           content: "",
           role: "assistant",
@@ -123,7 +146,12 @@ export default function ChatContainer({ workspace, knownHistory = [] }) {
       }
 
       if (!promptMessage || !promptMessage?.userMessage) return false;
+
+      // If running and edit or regeneration, this history will already have attachments
+      // so no need to parse the current state.
+      const attachments = promptMessage?.attachments ?? parseAttachments();
       window.dispatchEvent(new CustomEvent(CLEAR_ATTACHMENTS_EVENT));
+
       await Workspace.multiplexStream({
         workspaceSlug: workspace.slug,
         threadSlug,
@@ -137,6 +165,7 @@ export default function ChatContainer({ workspace, knownHistory = [] }) {
             _chatHistory,
             setSocketId
           ),
+        attachments,
       });
       return;
     }
@@ -218,31 +247,27 @@ export default function ChatContainer({ workspace, knownHistory = [] }) {
   return (
     <div
       style={{ height: isMobile ? "100%" : "calc(100% - 32px)" }}
-      className="transition-all duration-500 relative md:ml-[2px] md:mr-[16px] md:my-[16px] md:rounded-[16px] bg-main-gradient w-full h-full overflow-y-scroll border-2 border-outline"
+      className="transition-all duration-500 relative md:ml-[2px] md:mr-[16px] md:my-[16px] md:rounded-[16px] bg-main-gradient w-full h-full overflow-y-scroll border-2 border-outline no-scroll"
     >
       {isMobile && <SidebarMobileHeader />}
-      <DnDFileUploadWrapper workspace={workspace}>
-        {(files) => (
-          <>
-            <ChatHistory
-              history={chatHistory}
-              workspace={workspace}
-              sendCommand={sendCommand}
-              updateHistory={setChatHistory}
-              regenerateAssistantMessage={regenerateAssistantMessage}
-              hasAttachments={files.length > 0}
-            />
-            <PromptInput
-              submit={handleSubmit}
-              onChange={handleMessageChange}
-              inputDisabled={loadingResponse}
-              buttonDisabled={loadingResponse}
-              sendCommand={sendCommand}
-              attachments={files}
-            />
-          </>
-        )}
-      </DnDFileUploadWrapper>
+      <DnDFileUploaderWrapper>
+        <ChatHistory
+          history={chatHistory}
+          workspace={workspace}
+          sendCommand={sendCommand}
+          updateHistory={setChatHistory}
+          regenerateAssistantMessage={regenerateAssistantMessage}
+          hasAttachments={files.length > 0}
+        />
+        <PromptInput
+          submit={handleSubmit}
+          onChange={handleMessageChange}
+          inputDisabled={loadingResponse}
+          buttonDisabled={loadingResponse}
+          sendCommand={sendCommand}
+          attachments={files}
+        />
+      </DnDFileUploaderWrapper>
     </div>
   );
 }
diff --git a/frontend/src/components/WorkspaceChat/index.jsx b/frontend/src/components/WorkspaceChat/index.jsx
index dec4c541f9e230796059166f2f6da58d2feb386b..c37ecb0c84e60ac42811889d31f7cc790658c4fe 100644
--- a/frontend/src/components/WorkspaceChat/index.jsx
+++ b/frontend/src/components/WorkspaceChat/index.jsx
@@ -5,6 +5,9 @@ import ChatContainer from "./ChatContainer";
 import paths from "@/utils/paths";
 import ModalWrapper from "../ModalWrapper";
 import { useParams } from "react-router-dom";
+import DnDFileUploaderWrapper, {
+  DnDFileUploaderProvider,
+} from "./ChatContainer/DnDWrapper";
 
 export default function WorkspaceChat({ loading, workspace }) {
   const { threadSlug = null } = useParams();
@@ -62,7 +65,11 @@ export default function WorkspaceChat({ loading, workspace }) {
   }
 
   setEventDelegatorForCodeSnippets();
-  return <ChatContainer workspace={workspace} knownHistory={history} />;
+  return (
+    <DnDFileUploaderProvider workspace={workspace}>
+      <ChatContainer workspace={workspace} knownHistory={history} />
+    </DnDFileUploaderProvider>
+  );
 }
 
 // Enables us to safely markdown and sanitize all responses without risk of injection
diff --git a/frontend/src/models/workspace.js b/frontend/src/models/workspace.js
index c45502580084a2e5f5a6ba3203ec7046b7d35b06..c27c087804ca1bdc344e358c782c3827075cefc0 100644
--- a/frontend/src/models/workspace.js
+++ b/frontend/src/models/workspace.js
@@ -115,16 +115,23 @@ const Workspace = {
     threadSlug = null,
     prompt,
     chatHandler,
+    attachments = [],
   }) {
     if (!!threadSlug)
       return this.threads.streamChat(
         { workspaceSlug, threadSlug },
         prompt,
-        chatHandler
+        chatHandler,
+        attachments
       );
-    return this.streamChat({ slug: workspaceSlug }, prompt, chatHandler);
+    return this.streamChat(
+      { slug: workspaceSlug },
+      prompt,
+      chatHandler,
+      attachments
+    );
   },
-  streamChat: async function ({ slug }, message, handleChat) {
+  streamChat: async function ({ slug }, message, handleChat, attachments = []) {
     const ctrl = new AbortController();
 
     // Listen for the ABORT_STREAM_EVENT key to be emitted by the client
@@ -138,7 +145,7 @@ const Workspace = {
 
     await fetchEventSource(`${API_BASE}/workspace/${slug}/stream-chat`, {
       method: "POST",
-      body: JSON.stringify({ message }),
+      body: JSON.stringify({ message, attachments }),
       headers: baseHeaders(),
       signal: ctrl.signal,
       openWhenHidden: true,
diff --git a/frontend/src/models/workspaceThread.js b/frontend/src/models/workspaceThread.js
index a73006c99ec3742bdcb214b3eb261a45b2d0a625..8a200f24c7b2bdff79591fd5f934011b3d3a2d51 100644
--- a/frontend/src/models/workspaceThread.js
+++ b/frontend/src/models/workspaceThread.js
@@ -90,7 +90,8 @@ const WorkspaceThread = {
   streamChat: async function (
     { workspaceSlug, threadSlug },
     message,
-    handleChat
+    handleChat,
+    attachments = []
   ) {
     const ctrl = new AbortController();
 
@@ -107,7 +108,7 @@ const WorkspaceThread = {
       `${API_BASE}/workspace/${workspaceSlug}/thread/${threadSlug}/stream-chat`,
       {
         method: "POST",
-        body: JSON.stringify({ message }),
+        body: JSON.stringify({ message, attachments }),
         headers: baseHeaders(),
         signal: ctrl.signal,
         openWhenHidden: true,
diff --git a/server/endpoints/chat.js b/server/endpoints/chat.js
index c7e70265fcc01731cf5e353359440d87761f6e87..787aba574b0c759ab6c16d2bdfa4a9759833923c 100644
--- a/server/endpoints/chat.js
+++ b/server/endpoints/chat.js
@@ -27,7 +27,7 @@ function chatEndpoints(app) {
     async (request, response) => {
       try {
         const user = await userFromSession(request, response);
-        const { message } = reqBody(request);
+        const { message, attachments = [] } = reqBody(request);
         const workspace = response.locals.workspace;
 
         if (!message?.length) {
@@ -88,13 +88,16 @@ function chatEndpoints(app) {
           workspace,
           message,
           workspace?.chatMode,
-          user
+          user,
+          null,
+          attachments
         );
         await Telemetry.sendTelemetry("sent_chat", {
           multiUserMode: multiUserMode(response),
           LLMSelection: process.env.LLM_PROVIDER || "openai",
           Embedder: process.env.EMBEDDING_ENGINE || "inherit",
           VectorDbSelection: process.env.VECTOR_DB || "lancedb",
+          multiModal: Array.isArray(attachments) && attachments?.length !== 0,
         });
 
         await EventLogs.logEvent(
@@ -131,7 +134,7 @@ function chatEndpoints(app) {
     async (request, response) => {
       try {
         const user = await userFromSession(request, response);
-        const { message } = reqBody(request);
+        const { message, attachments = [] } = reqBody(request);
         const workspace = response.locals.workspace;
         const thread = response.locals.thread;
 
@@ -196,7 +199,8 @@ function chatEndpoints(app) {
           message,
           workspace?.chatMode,
           user,
-          thread
+          thread,
+          attachments
         );
 
         // If thread was renamed emit event to frontend via special `action` response.
@@ -221,6 +225,7 @@ function chatEndpoints(app) {
           LLMSelection: process.env.LLM_PROVIDER || "openai",
           Embedder: process.env.EMBEDDING_ENGINE || "inherit",
           VectorDbSelection: process.env.VECTOR_DB || "lancedb",
+          multiModal: Array.isArray(attachments) && attachments?.length !== 0,
         });
 
         await EventLogs.logEvent(
diff --git a/server/utils/AiProviders/anthropic/index.js b/server/utils/AiProviders/anthropic/index.js
index ad3694d8897c42cc35cd96e2e12ba176088ad42e..5702fc839ceecf7309a0cb44f9fea9aeb0932293 100644
--- a/server/utils/AiProviders/anthropic/index.js
+++ b/server/utils/AiProviders/anthropic/index.js
@@ -66,18 +66,50 @@ class AnthropicLLM {
     return validModels.includes(modelName);
   }
 
+  /**
+   * Generates appropriate content array for a message + attachments.
+   * @param {{userPrompt:string, attachments: import("../../helpers").Attachment[]}}
+   * @returns {string|object[]}
+   */
+  #generateContent({ userPrompt, attachments = [] }) {
+    if (!attachments.length) {
+      return userPrompt;
+    }
+
+    const content = [{ type: "text", text: userPrompt }];
+    for (let attachment of attachments) {
+      content.push({
+        type: "image",
+        source: {
+          type: "base64",
+          media_type: attachment.mime,
+          data: attachment.contentString.split("base64,")[1],
+        },
+      });
+    }
+    return content.flat();
+  }
+
   constructPrompt({
     systemPrompt = "",
     contextTexts = [],
     chatHistory = [],
     userPrompt = "",
+    attachments = [], // This is the specific attachment for only this prompt
   }) {
     const prompt = {
       role: "system",
       content: `${systemPrompt}${this.#appendContext(contextTexts)}`,
     };
 
-    return [prompt, ...chatHistory, { role: "user", content: userPrompt }];
+    return [
+      prompt,
+      ...chatHistory,
+      {
+        role: "user",
+        content: this.#generateContent({ userPrompt, attachments }),
+      },
+    ];
   }
 
   async getChatCompletion(messages = null, { temperature = 0.7 }) {
diff --git a/server/utils/AiProviders/bedrock/index.js b/server/utils/AiProviders/bedrock/index.js
index 2ea439ae62ffdba723f0b39d4a7124fc488b33b7..f579c03310a2244012c0dbe8b241eeb92ca40554 100644
--- a/server/utils/AiProviders/bedrock/index.js
+++ b/server/utils/AiProviders/bedrock/index.js
@@ -95,21 +95,60 @@ class AWSBedrockLLM {
     return true;
   }
 
+  /**
+   * Generates appropriate content array for a message + attachments.
+   * @param {{userPrompt:string, attachments: import("../../helpers").Attachment[]}}
+   * @returns {string|object[]}
+   */
+  #generateContent({ userPrompt, attachments = [] }) {
+    if (!attachments.length) {
+      return { content: userPrompt };
+    }
+
+    const content = [{ type: "text", text: userPrompt }];
+    for (let attachment of attachments) {
+      content.push({
+        type: "image_url",
+        image_url: attachment.contentString,
+      });
+    }
+    return { content: content.flat() };
+  }
+
+  /**
+   * Construct the user prompt for this model.
+   * @param {{attachments: import("../../helpers").Attachment[]}} param0
+   * @returns
+   */
   constructPrompt({
     systemPrompt = "",
     contextTexts = [],
     chatHistory = [],
     userPrompt = "",
+    attachments = [],
   }) {
     // AWS Mistral models do not support system prompts
     if (this.model.startsWith("mistral"))
-      return [...chatHistory, { role: "user", content: userPrompt }];
+      return [
+        ...chatHistory,
+        {
+          role: "user",
+          ...this.#generateContent({ userPrompt, attachments }),
+        },
+      ];
 
     const prompt = {
       role: "system",
       content: `${systemPrompt}${this.#appendContext(contextTexts)}`,
     };
-    return [prompt, ...chatHistory, { role: "user", content: userPrompt }];
+    return [
+      prompt,
+      ...chatHistory,
+      {
+        role: "user",
+        ...this.#generateContent({ userPrompt, attachments }),
+      },
+    ];
   }
 
   async getChatCompletion(messages = null, { temperature = 0.7 }) {
diff --git a/server/utils/AiProviders/gemini/index.js b/server/utils/AiProviders/gemini/index.js
index e49f6b8b98e6df10f77d4bb5ea9c521b54212596..f2a948428cfcdb52b32e60b92a37a3b3556a3869 100644
--- a/server/utils/AiProviders/gemini/index.js
+++ b/server/utils/AiProviders/gemini/index.js
@@ -112,11 +112,34 @@ class GeminiLLM {
     return validModels.includes(modelName);
   }
 
+  /**
+   * Generates appropriate content array for a message + attachments.
+   * @param {{userPrompt:string, attachments: import("../../helpers").Attachment[]}}
+   * @returns {string|object[]}
+   */
+  #generateContent({ userPrompt, attachments = [] }) {
+    if (!attachments.length) {
+      return userPrompt;
+    }
+
+    const content = [{ text: userPrompt }];
+    for (let attachment of attachments) {
+      content.push({
+        inlineData: {
+          data: attachment.contentString.split("base64,")[1],
+          mimeType: attachment.mime,
+        },
+      });
+    }
+    return content.flat();
+  }
+
   constructPrompt({
     systemPrompt = "",
     contextTexts = [],
     chatHistory = [],
     userPrompt = "",
+    attachments = [],
   }) {
     const prompt = {
       role: "system",
@@ -126,7 +149,10 @@ class GeminiLLM {
       prompt,
       { role: "assistant", content: "Okay." },
       ...chatHistory,
-      { role: "USER_PROMPT", content: userPrompt },
+      {
+        role: "USER_PROMPT",
+        content: this.#generateContent({ userPrompt, attachments }),
+      },
     ];
   }
 
diff --git a/server/utils/AiProviders/koboldCPP/index.js b/server/utils/AiProviders/koboldCPP/index.js
index f29b65879f59b6f1bd645e226260130526e748bc..5c67103d303520de1a8471bcb10bdb3de1edce5e 100644
--- a/server/utils/AiProviders/koboldCPP/index.js
+++ b/server/utils/AiProviders/koboldCPP/index.js
@@ -66,17 +66,52 @@ class KoboldCPPLLM {
     return true;
   }
 
+  /**
+   * Generates appropriate content array for a message + attachments.
+   * @param {{userPrompt:string, attachments: import("../../helpers").Attachment[]}}
+   * @returns {string|object[]}
+   */
+  #generateContent({ userPrompt, attachments = [] }) {
+    if (!attachments.length) {
+      return userPrompt;
+    }
+
+    const content = [{ type: "text", text: userPrompt }];
+    for (let attachment of attachments) {
+      content.push({
+        type: "image_url",
+        image_url: {
+          url: attachment.contentString,
+        },
+      });
+    }
+    return content.flat();
+  }
+
+  /**
+   * Construct the user prompt for this model.
+   * @param {{attachments: import("../../helpers").Attachment[]}} param0
+   * @returns
+   */
   constructPrompt({
     systemPrompt = "",
     contextTexts = [],
     chatHistory = [],
     userPrompt = "",
+    attachments = [],
   }) {
     const prompt = {
       role: "system",
       content: `${systemPrompt}${this.#appendContext(contextTexts)}`,
     };
-    return [prompt, ...chatHistory, { role: "user", content: userPrompt }];
+    return [
+      prompt,
+      ...chatHistory,
+      {
+        role: "user",
+        content: this.#generateContent({ userPrompt, attachments }),
+      },
+    ];
   }
 
   async getChatCompletion(messages = null, { temperature = 0.7 }) {
diff --git a/server/utils/AiProviders/liteLLM/index.js b/server/utils/AiProviders/liteLLM/index.js
index 884049876947cc21821f65cc6ffad6ce5a3aa686..897a484dd19628cf5965bb7f937c2f42043d3165 100644
--- a/server/utils/AiProviders/liteLLM/index.js
+++ b/server/utils/AiProviders/liteLLM/index.js
@@ -65,17 +65,52 @@ class LiteLLM {
     return true;
   }
 
+  /**
+   * Generates appropriate content array for a message + attachments.
+   * @param {{userPrompt:string, attachments: import("../../helpers").Attachment[]}}
+   * @returns {string|object[]}
+   */
+  #generateContent({ userPrompt, attachments = [] }) {
+    if (!attachments.length) {
+      return userPrompt;
+    }
+
+    const content = [{ type: "text", text: userPrompt }];
+    for (let attachment of attachments) {
+      content.push({
+        type: "image_url",
+        image_url: {
+          url: attachment.contentString,
+        },
+      });
+    }
+    return content.flat();
+  }
+
+  /**
+   * Construct the user prompt for this model.
+   * @param {{attachments: import("../../helpers").Attachment[]}} param0
+   * @returns
+   */
   constructPrompt({
     systemPrompt = "",
     contextTexts = [],
     chatHistory = [],
     userPrompt = "",
+    attachments = [],
   }) {
     const prompt = {
       role: "system",
       content: `${systemPrompt}${this.#appendContext(contextTexts)}`,
     };
-    return [prompt, ...chatHistory, { role: "user", content: userPrompt }];
+    return [
+      prompt,
+      ...chatHistory,
+      {
+        role: "user",
+        content: this.#generateContent({ userPrompt, attachments }),
+      },
+    ];
   }
 
   async getChatCompletion(messages = null, { temperature = 0.7 }) {
diff --git a/server/utils/AiProviders/lmStudio/index.js b/server/utils/AiProviders/lmStudio/index.js
index bdafd0d639fcccf8b58f22b797a0cb86d55b09e9..6ff025884c73736a3cd122a1b96e2ec2ddd27b41 100644
--- a/server/utils/AiProviders/lmStudio/index.js
+++ b/server/utils/AiProviders/lmStudio/index.js
@@ -63,17 +63,53 @@ class LMStudioLLM {
     return true;
   }
 
+  /**
+   * Generates appropriate content array for a message + attachments.
+   * @param {{userPrompt:string, attachments: import("../../helpers").Attachment[]}}
+   * @returns {string|object[]}
+   */
+  #generateContent({ userPrompt, attachments = [] }) {
+    if (!attachments.length) {
+      return userPrompt;
+    }
+
+    const content = [{ type: "text", text: userPrompt }];
+    for (let attachment of attachments) {
+      content.push({
+        type: "image_url",
+        image_url: {
+          url: attachment.contentString,
+          detail: "auto",
+        },
+      });
+    }
+    return content.flat();
+  }
+
+  /**
+   * Construct the user prompt for this model.
+   * @param {{attachments: import("../../helpers").Attachment[]}} param0
+   * @returns
+   */
   constructPrompt({
     systemPrompt = "",
     contextTexts = [],
     chatHistory = [],
     userPrompt = "",
+    attachments = [],
   }) {
     const prompt = {
       role: "system",
       content: `${systemPrompt}${this.#appendContext(contextTexts)}`,
     };
-    return [prompt, ...chatHistory, { role: "user", content: userPrompt }];
+    return [
+      prompt,
+      ...chatHistory,
+      {
+        role: "user",
+        content: this.#generateContent({ userPrompt, attachments }),
+      },
+    ];
   }
 
   async getChatCompletion(messages = null, { temperature = 0.7 }) {
diff --git a/server/utils/AiProviders/localAi/index.js b/server/utils/AiProviders/localAi/index.js
index be3f2516f582804bd32f04af4df90851856504a5..2275e1e8ded96b82afc531d780c2ee5922de5fcb 100644
--- a/server/utils/AiProviders/localAi/index.js
+++ b/server/utils/AiProviders/localAi/index.js
@@ -53,17 +53,52 @@ class LocalAiLLM {
     return true;
   }
 
+  /**
+   * Generates appropriate content array for a message + attachments.
+   * @param {{userPrompt:string, attachments: import("../../helpers").Attachment[]}}
+   * @returns {string|object[]}
+   */
+  #generateContent({ userPrompt, attachments = [] }) {
+    if (!attachments.length) {
+      return userPrompt;
+    }
+
+    const content = [{ type: "text", text: userPrompt }];
+    for (let attachment of attachments) {
+      content.push({
+        type: "image_url",
+        image_url: {
+          url: attachment.contentString,
+        },
+      });
+    }
+    return content.flat();
+  }
+
+  /**
+   * Construct the user prompt for this model.
+   * @param {{attachments: import("../../helpers").Attachment[]}} param0
+   * @returns
+   */
   constructPrompt({
     systemPrompt = "",
     contextTexts = [],
     chatHistory = [],
     userPrompt = "",
+    attachments = [],
   }) {
     const prompt = {
       role: "system",
       content: `${systemPrompt}${this.#appendContext(contextTexts)}`,
     };
-    return [prompt, ...chatHistory, { role: "user", content: userPrompt }];
+    return [
+      prompt,
+      ...chatHistory,
+      {
+        role: "user",
+        content: this.#generateContent({ userPrompt, attachments }),
+      },
+    ];
   }
 
   async getChatCompletion(messages = null, { temperature = 0.7 }) {
diff --git a/server/utils/AiProviders/ollama/index.js b/server/utils/AiProviders/ollama/index.js
index 174670f2c79827f6f978a5a941928b107883c3e2..a4e99f7883ab79e3d2705ef2b214cc85fc19bb24 100644
--- a/server/utils/AiProviders/ollama/index.js
+++ b/server/utils/AiProviders/ollama/index.js
@@ -90,17 +90,50 @@ class OllamaAILLM {
     return true;
   }
 
+  /**
+   * Generates appropriate content array for a message + attachments.
+   * @param {{userPrompt:string, attachments: import("../../helpers").Attachment[]}}
+   * @returns {string|object[]}
+   */
+  #generateContent({ userPrompt, attachments = [] }) {
+    if (!attachments.length) {
+      return { content: userPrompt };
+    }
+
+    const content = [{ type: "text", text: userPrompt }];
+    for (let attachment of attachments) {
+      content.push({
+        type: "image_url",
+        image_url: attachment.contentString,
+      });
+    }
+    return { content: content.flat() };
+  }
+
+  /**
+   * Construct the user prompt for this model.
+   * @param {{attachments: import("../../helpers").Attachment[]}} param0
+   * @returns
+   */
   constructPrompt({
     systemPrompt = "",
     contextTexts = [],
     chatHistory = [],
     userPrompt = "",
+    attachments = [],
   }) {
     const prompt = {
       role: "system",
       content: `${systemPrompt}${this.#appendContext(contextTexts)}`,
     };
-    return [prompt, ...chatHistory, { role: "user", content: userPrompt }];
+    return [
+      prompt,
+      ...chatHistory,
+      {
+        role: "user",
+        ...this.#generateContent({ userPrompt, attachments }),
+      },
+    ];
   }
 
   async getChatCompletion(messages = null, { temperature = 0.7 }) {
diff --git a/server/utils/AiProviders/openAi/index.js b/server/utils/AiProviders/openAi/index.js
index 1c79aac127d17de69deb5cff65ab5813cb70434e..c4cf11f16830eeb2e87f0b7ae057fef63f37a266 100644
--- a/server/utils/AiProviders/openAi/index.js
+++ b/server/utils/AiProviders/openAi/index.js
@@ -73,17 +73,53 @@ class OpenAiLLM {
     return !!model;
   }
 
+  /**
+   * Generates appropriate content array for a message + attachments.
+   * @param {{userPrompt:string, attachments: import("../../helpers").Attachment[]}}
+   * @returns {string|object[]}
+   */
+  #generateContent({ userPrompt, attachments = [] }) {
+    if (!attachments.length) {
+      return userPrompt;
+    }
+
+    const content = [{ type: "text", text: userPrompt }];
+    for (let attachment of attachments) {
+      content.push({
+        type: "image_url",
+        image_url: {
+          url: attachment.contentString,
+          detail: "high",
+        },
+      });
+    }
+    return content.flat();
+  }
+
+  /**
+   * Construct the user prompt for this model.
+   * @param {{attachments: import("../../helpers").Attachment[]}} param0
+   * @returns
+   */
   constructPrompt({
     systemPrompt = "",
     contextTexts = [],
     chatHistory = [],
     userPrompt = "",
+    attachments = [], // This is the specific attachment for only this prompt
   }) {
     const prompt = {
       role: "system",
       content: `${systemPrompt}${this.#appendContext(contextTexts)}`,
     };
-    return [prompt, ...chatHistory, { role: "user", content: userPrompt }];
+    return [
+      prompt,
+      ...chatHistory,
+      {
+        role: "user",
+        content: this.#generateContent({ userPrompt, attachments }),
+      },
+    ];
   }
 
   async getChatCompletion(messages = null, { temperature = 0.7 }) {
diff --git a/server/utils/AiProviders/openRouter/index.js b/server/utils/AiProviders/openRouter/index.js
index c7d4dfb0b6c60ede62edec8c7788d25ae4175dd3..d9b971b6f63f03a302bced8fff682ae073bc535a 100644
--- a/server/utils/AiProviders/openRouter/index.js
+++ b/server/utils/AiProviders/openRouter/index.js
@@ -128,17 +128,49 @@ class OpenRouterLLM {
     return availableModels.hasOwnProperty(model);
   }
 
+  /**
+   * Generates appropriate content array for a message + attachments.
+   * @param {{userPrompt:string, attachments: import("../../helpers").Attachment[]}}
+   * @returns {string|object[]}
+   */
+  #generateContent({ userPrompt, attachments = [] }) {
+    if (!attachments.length) {
+      return userPrompt;
+    }
+
+    const content = [{ type: "text", text: userPrompt }];
+    for (let attachment of attachments) {
+      content.push({
+        type: "image_url",
+        image_url: {
+          url: attachment.contentString,
+          detail: "auto",
+        },
+      });
+    }
+    console.log(content.flat());
+    return content.flat();
+  }
+
   constructPrompt({
     systemPrompt = "",
     contextTexts = [],
     chatHistory = [],
     userPrompt = "",
+    attachments = [],
   }) {
     const prompt = {
       role: "system",
       content: `${systemPrompt}${this.#appendContext(contextTexts)}`,
     };
-    return [prompt, ...chatHistory, { role: "user", content: userPrompt }];
+    return [
+      prompt,
+      ...chatHistory,
+      {
+        role: "user",
+        content: this.#generateContent({ userPrompt, attachments }),
+      },
+    ];
   }
 
   async getChatCompletion(messages = null, { temperature = 0.7 }) {
diff --git a/server/utils/AiProviders/textGenWebUI/index.js b/server/utils/AiProviders/textGenWebUI/index.js
index 484a35c8bdc576ec442892a0aead1eb7ca791e39..9400a12f4b981acac129f4a9eea76a752199fdde 100644
--- a/server/utils/AiProviders/textGenWebUI/index.js
+++ b/server/utils/AiProviders/textGenWebUI/index.js
@@ -63,17 +63,52 @@ class TextGenWebUILLM {
     return true;
   }
 
+  /**
+   * Generates appropriate content array for a message + attachments.
+   * @param {{userPrompt:string, attachments: import("../../helpers").Attachment[]}}
+   * @returns {string|object[]}
+   */
+  #generateContent({ userPrompt, attachments = [] }) {
+    if (!attachments.length) {
+      return userPrompt;
+    }
+
+    const content = [{ type: "text", text: userPrompt }];
+    for (let attachment of attachments) {
+      content.push({
+        type: "image_url",
+        image_url: {
+          url: attachment.contentString,
+        },
+      });
+    }
+    return content.flat();
+  }
+
+  /**
+   * Construct the user prompt for this model.
+   * @param {{attachments: import("../../helpers").Attachment[]}} param0
+   * @returns
+   */
   constructPrompt({
     systemPrompt = "",
     contextTexts = [],
     chatHistory = [],
     userPrompt = "",
+    attachments = [],
   }) {
     const prompt = {
       role: "system",
       content: `${systemPrompt}${this.#appendContext(contextTexts)}`,
     };
-    return [prompt, ...chatHistory, { role: "user", content: userPrompt }];
+    return [
+      prompt,
+      ...chatHistory,
+      {
+        role: "user",
+        content: this.#generateContent({ userPrompt, attachments }),
+      },
+    ];
   }
 
   async getChatCompletion(messages = null, { temperature = 0.7 }) {
diff --git a/server/utils/chats/stream.js b/server/utils/chats/stream.js
index a3f4f3b1d5dbe7e4aed67e5cb284251b42fa4472..bc13833631182a55cae1ec3d75842f7269e3c4c4 100644
--- a/server/utils/chats/stream.js
+++ b/server/utils/chats/stream.js
@@ -20,7 +20,8 @@ async function streamChatWithWorkspace(
   message,
   chatMode = "chat",
   user = null,
-  thread = null
+  thread = null,
+  attachments = []
 ) {
   const uuid = uuidv4();
   const updatedMessage = await grepCommand(message, user);
@@ -69,6 +70,7 @@ async function streamChatWithWorkspace(
       type: "textResponse",
       textResponse,
       sources: [],
+      attachments,
       close: true,
       error: null,
     });
@@ -79,6 +81,7 @@ async function streamChatWithWorkspace(
         text: textResponse,
         sources: [],
         type: chatMode,
+        attachments,
       },
       threadId: thread?.id || null,
       include: false,
@@ -195,6 +198,7 @@ async function streamChatWithWorkspace(
         text: textResponse,
         sources: [],
         type: chatMode,
+        attachments,
       },
       threadId: thread?.id || null,
       include: false,
@@ -211,6 +215,7 @@ async function streamChatWithWorkspace(
       userPrompt: updatedMessage,
       contextTexts,
       chatHistory,
+      attachments,
     },
     rawHistory
   );
@@ -246,7 +251,7 @@ async function streamChatWithWorkspace(
     const { chat } = await WorkspaceChats.new({
       workspaceId: workspace.id,
       prompt: message,
-      response: { text: completeText, sources, type: chatMode },
+      response: { text: completeText, sources, type: chatMode, attachments },
       threadId: thread?.id || null,
       user,
     });
diff --git a/server/utils/helpers/chat/responses.js b/server/utils/helpers/chat/responses.js
index 4140e1e2feb5a5bfbc82ff6e5197b2abd234567d..5eec61823e23f212dd2a26dd41545da84b623823 100644
--- a/server/utils/helpers/chat/responses.js
+++ b/server/utils/helpers/chat/responses.js
@@ -71,6 +71,7 @@ function convertToChatHistory(history = []) {
         role: "user",
         content: prompt,
         sentAt: moment(createdAt).unix(),
+        attachments: data?.attachments ?? [],
         chatId: id,
       },
       {
diff --git a/server/utils/helpers/index.js b/server/utils/helpers/index.js
index 2f6ea51461fdeff69f495cd0f23389717e6f5eb7..765e7226fdee3d3a9d0514ac8bde174528aa644c 100644
--- a/server/utils/helpers/index.js
+++ b/server/utils/helpers/index.js
@@ -1,3 +1,11 @@
+/**
+ * File Attachment for automatic upload on the chat container page.
+ * @typedef Attachment
+ * @property {string} name - the given file name
+ * @property {string} mime - the given file mime
+ * @property {string} contentString - full base64 encoded string of file
+ */
+
 /**
  * @typedef {Object} BaseLLMProvider - A basic llm provider object
  * @property {Function} streamingEnabled - Checks if streaming is enabled for chat completions.