diff --git a/docker/.env.example b/docker/.env.example
index 74ab3ef62c8ca516b3ad0d6691f15b25f863c2ba..f5bf26aee305806fb20b0db51ffdef29fece8459 100644
--- a/docker/.env.example
+++ b/docker/.env.example
@@ -27,6 +27,7 @@ CACHE_VECTORS="true"
 # LOCAL_AI_BASE_PATH='http://host.docker.internal:8080/v1'
 # LOCAL_AI_MODEL_PREF='luna-ai-llama2'
 # LOCAL_AI_MODEL_TOKEN_LIMIT=4096
+# LOCAL_AI_API_KEY="sk-123abc"
 
 ###########################################
 ######## Embedding API SElECTION ##########
diff --git a/frontend/src/components/LLMSelection/LocalAiOptions/index.jsx b/frontend/src/components/LLMSelection/LocalAiOptions/index.jsx
index a09a47d7c8e00ddd92b4fcceacf0470e5af85fe3..8adad7e50e383eb6b08b9443db8db201ae2d8716 100644
--- a/frontend/src/components/LLMSelection/LocalAiOptions/index.jsx
+++ b/frontend/src/components/LLMSelection/LocalAiOptions/index.jsx
@@ -6,12 +6,11 @@ import System from "../../../models/system";
 export default function LocalAiOptions({ settings, showAlert = false }) {
   const [basePathValue, setBasePathValue] = useState(settings?.LocalAiBasePath);
   const [basePath, setBasePath] = useState(settings?.LocalAiBasePath);
-  function updateBasePath() {
-    setBasePath(basePathValue);
-  }
+  const [apiKeyValue, setApiKeyValue] = useState(settings?.LocalAiApiKey);
+  const [apiKey, setApiKey] = useState(settings?.LocalAiApiKey);
 
   return (
-    <div className="w-full flex flex-col">
+    <div className="w-full flex flex-col gap-y-4">
       {showAlert && (
         <div className="flex flex-col md:flex-row md:items-center gap-x-2 text-white mb-6 bg-blue-800/30 w-fit rounded-lg px-4 py-2">
           <div className="gap-x-2 flex items-center">
@@ -44,10 +43,14 @@ export default function LocalAiOptions({ settings, showAlert = false }) {
             autoComplete="off"
             spellCheck={false}
             onChange={(e) => setBasePathValue(e.target.value)}
-            onBlur={updateBasePath}
+            onBlur={() => setBasePath(basePathValue)}
           />
         </div>
-        <LocalAIModelSelection settings={settings} basePath={basePath} />
+        <LocalAIModelSelection
+          settings={settings}
+          basePath={basePath}
+          apiKey={apiKey}
+        />
         <div className="flex flex-col w-60">
           <label className="text-white text-sm font-semibold block mb-4">
             Token context window
@@ -65,11 +68,35 @@ export default function LocalAiOptions({ settings, showAlert = false }) {
           />
         </div>
       </div>
+      <div className="w-full flex items-center gap-4">
+        <div className="flex flex-col w-60">
+          <div className="flex flex-col gap-y-1 mb-4">
+            <label className="text-white text-sm font-semibold block">
+              Local AI API Key
+            </label>
+            <p className="text-xs italic text-white/60">
+              optional API key to use if running LocalAI with API keys.
+            </p>
+          </div>
+
+          <input
+            type="password"
+            name="LocalAiApiKey"
+            className="bg-zinc-900 text-white placeholder-white placeholder-opacity-60 text-sm rounded-lg focus:border-white block w-full p-2.5"
+            placeholder="sk-mysecretkey"
+            defaultValue={settings?.LocalAiApiKey ? "*".repeat(20) : ""}
+            autoComplete="off"
+            spellCheck={false}
+            onChange={(e) => setApiKeyValue(e.target.value)}
+            onBlur={() => setApiKey(apiKeyValue)}
+          />
+        </div>
+      </div>
     </div>
   );
 }
 
-function LocalAIModelSelection({ settings, basePath = null }) {
+function LocalAIModelSelection({ settings, basePath = null, apiKey = null }) {
   const [customModels, setCustomModels] = useState([]);
   const [loading, setLoading] = useState(true);
 
@@ -81,12 +108,12 @@ function LocalAIModelSelection({ settings, basePath = null }) {
         return;
       }
       setLoading(true);
-      const { models } = await System.customModels("localai", null, basePath);
+      const { models } = await System.customModels("localai", apiKey, basePath);
       setCustomModels(models || []);
       setLoading(false);
     }
     findCustomModels();
-  }, [basePath]);
+  }, [basePath, apiKey]);
 
   if (loading || customModels.length == 0) {
     return (
diff --git a/server/.env.example b/server/.env.example
index 03d1eb9bcefc0f3e5079bb4c564b1314dd8ddf2d..f83c5e72a6f453cddb9265b68d62b4d7de66f238 100644
--- a/server/.env.example
+++ b/server/.env.example
@@ -27,6 +27,7 @@ JWT_SECRET="my-random-string-for-seeding" # Please generate random string at lea
 # LOCAL_AI_BASE_PATH='http://localhost:8080/v1'
 # LOCAL_AI_MODEL_PREF='luna-ai-llama2'
 # LOCAL_AI_MODEL_TOKEN_LIMIT=4096
+# LOCAL_AI_API_KEY="sk-123abc"
 
 ###########################################
 ######## Embedding API SElECTION ##########
diff --git a/server/endpoints/system.js b/server/endpoints/system.js
index fd07d03c098df40ebff908d47afaa4e8164e3005..22ce8ef10763c019d254eedc9dfc341508c6c3e9 100644
--- a/server/endpoints/system.js
+++ b/server/endpoints/system.js
@@ -8,7 +8,7 @@ const {
   acceptedFileTypes,
 } = require("../utils/files/documentProcessor");
 const { purgeDocument } = require("../utils/files/purgeDocument");
-const { getVectorDbClass, getLLMProvider } = require("../utils/helpers");
+const { getVectorDbClass } = require("../utils/helpers");
 const { updateENV, dumpENV } = require("../utils/helpers/updateENV");
 const {
   reqBody,
diff --git a/server/models/systemSettings.js b/server/models/systemSettings.js
index ec40cb7f189138b218c92ecfb5703d91637ee1a8..7556d8f22421091d89ac0e077e534b728cb97a17 100644
--- a/server/models/systemSettings.js
+++ b/server/models/systemSettings.js
@@ -103,6 +103,7 @@ const SystemSettings = {
             LocalAiBasePath: process.env.LOCAL_AI_BASE_PATH,
             LocalAiModelPref: process.env.LOCAL_AI_MODEL_PREF,
             LocalAiTokenLimit: process.env.LOCAL_AI_MODEL_TOKEN_LIMIT,
+            LocalAiApiKey: !!process.env.LOCAL_AI_API_KEY,
 
             // For embedding credentials when localai is selected.
             OpenAiKey: !!process.env.OPEN_AI_KEY,
diff --git a/server/utils/AiProviders/localAi/index.js b/server/utils/AiProviders/localAi/index.js
index 616213a236c89a48374d06df0847d02297752d20..6c7a3263fb1bea34385fcc566275bbe1c28fdaa1 100644
--- a/server/utils/AiProviders/localAi/index.js
+++ b/server/utils/AiProviders/localAi/index.js
@@ -8,6 +8,11 @@ class LocalAiLLM {
     const { Configuration, OpenAIApi } = require("openai");
     const config = new Configuration({
       basePath: process.env.LOCAL_AI_BASE_PATH,
+      ...(!!process.env.LOCAL_AI_API_KEY
+        ? {
+            apiKey: process.env.LOCAL_AI_API_KEY,
+          }
+        : {}),
     });
     this.openai = new OpenAIApi(config);
     this.model = process.env.LOCAL_AI_MODEL_PREF;
diff --git a/server/utils/helpers/customModels.js b/server/utils/helpers/customModels.js
index e5bc1fcfb5d2af026f26b061faca524d74a0dca0..cd6adccafd00eba633d266be161d705c35de29d5 100644
--- a/server/utils/helpers/customModels.js
+++ b/server/utils/helpers/customModels.js
@@ -35,10 +35,11 @@ async function openAiModels(apiKey = null) {
   return { models, error: null };
 }
 
-async function localAIModels(basePath = null) {
+async function localAIModels(basePath = null, apiKey = null) {
   const { Configuration, OpenAIApi } = require("openai");
   const config = new Configuration({
     basePath,
+    ...(!!apiKey ? { apiKey } : {}),
   });
   const openai = new OpenAIApi(config);
   const models = await openai
diff --git a/server/utils/helpers/updateENV.js b/server/utils/helpers/updateENV.js
index b7ecffa140613455d04eff8bb9291bba8fd94503..6e0b84970b8c892431e782efc6593af33e6634ce 100644
--- a/server/utils/helpers/updateENV.js
+++ b/server/utils/helpers/updateENV.js
@@ -67,6 +67,10 @@ const KEY_MAPPING = {
     envKey: "LOCAL_AI_MODEL_TOKEN_LIMIT",
     checks: [nonZero],
   },
+  LocalAiApiKey: {
+    envKey: "LOCAL_AI_API_KEY",
+    checks: [],
+  },
 
   EmbeddingEngine: {
     envKey: "EMBEDDING_ENGINE",
diff --git a/server/utils/http/index.js b/server/utils/http/index.js
index 5b61236f0591258b7982c924cd090e8e60edf34f..cb57c4a2894a01c9811363e8cff29453fbf96c2c 100644
--- a/server/utils/http/index.js
+++ b/server/utils/http/index.js
@@ -52,6 +52,13 @@ function multiUserMode(response) {
   return response?.locals?.multiUserMode;
 }
 
+function parseAuthHeader(headerValue = null, apiKey = null) {
+  if (headerValue === null || apiKey === null) return {};
+  if (headerValue === "Authorization")
+    return { Authorization: `Bearer ${apiKey}` };
+  return { [headerValue]: apiKey };
+}
+
 module.exports = {
   reqBody,
   multiUserMode,
@@ -59,4 +66,5 @@ module.exports = {
   makeJWT,
   decodeJWT,
   userFromSession,
+  parseAuthHeader,
 };
diff --git a/server/utils/vectorDbProviders/chroma/index.js b/server/utils/vectorDbProviders/chroma/index.js
index c2f0257dd501d6ffb4fd424697cbddfe70b69083..0e75fa07fb056a9e7acd1122a89ee4afeb04fef0 100644
--- a/server/utils/vectorDbProviders/chroma/index.js
+++ b/server/utils/vectorDbProviders/chroma/index.js
@@ -15,10 +15,10 @@ const Chroma = {
       ...(!!process.env.CHROMA_API_HEADER && !!process.env.CHROMA_API_KEY
         ? {
             fetchOptions: {
-              headers: {
-                [process.env.CHROMA_API_HEADER || "X-Api-Key"]:
-                  process.env.CHROMA_API_KEY,
-              },
+              headers: parseAuthHeader(
+                process.env.CHROMA_API_HEADER || "X-Api-Key",
+                process.env.CHROMA_API_KEY
+              ),
             },
           }
         : {}),