diff --git a/frontend/src/components/EmbeddingSelection/LocalAiOptions/index.jsx b/frontend/src/components/EmbeddingSelection/LocalAiOptions/index.jsx
index 232c33205fb07f706d9b58082cf3618cb7994b00..5871e288f47864f7407c5fd3bcf4a0cd5e527d52 100644
--- a/frontend/src/components/EmbeddingSelection/LocalAiOptions/index.jsx
+++ b/frontend/src/components/EmbeddingSelection/LocalAiOptions/index.jsx
@@ -6,51 +6,80 @@ export default function LocalAiOptions({ settings }) {
     settings?.EmbeddingBasePath
   );
   const [basePath, setBasePath] = useState(settings?.EmbeddingBasePath);
-  function updateBasePath() {
-    setBasePath(basePathValue);
-  }
+  const [apiKeyValue, setApiKeyValue] = useState(settings?.LocalAiApiKey);
+  const [apiKey, setApiKey] = useState(settings?.LocalAiApiKey);
 
   return (
     <>
-      <div className="flex flex-col w-60">
-        <label className="text-white text-sm font-semibold block mb-4">
-          LocalAI Base URL
-        </label>
-        <input
-          type="url"
-          name="EmbeddingBasePath"
-          className="bg-zinc-900 text-white placeholder-white placeholder-opacity-60 text-sm rounded-lg focus:border-white block w-full p-2.5"
-          placeholder="http://localhost:8080/v1"
-          defaultValue={settings?.EmbeddingBasePath}
-          onChange={(e) => setBasePathValue(e.target.value)}
-          onBlur={updateBasePath}
-          required={true}
-          autoComplete="off"
-          spellCheck={false}
+      <div className="w-full flex items-center gap-4">
+        <div className="flex flex-col w-60">
+          <label className="text-white text-sm font-semibold block mb-4">
+            LocalAI Base URL
+          </label>
+          <input
+            type="url"
+            name="EmbeddingBasePath"
+            className="bg-zinc-900 text-white placeholder-white placeholder-opacity-60 text-sm rounded-lg focus:border-white block w-full p-2.5"
+            placeholder="http://localhost:8080/v1"
+            defaultValue={settings?.EmbeddingBasePath}
+            onChange={(e) => setBasePathValue(e.target.value)}
+            onBlur={() => setBasePath(basePathValue)}
+            required={true}
+            autoComplete="off"
+            spellCheck={false}
+          />
+        </div>
+        <LocalAIModelSelection
+          settings={settings}
+          apiKey={apiKey}
+          basePath={basePath}
         />
+        <div className="flex flex-col w-60">
+          <label className="text-white text-sm font-semibold block mb-4">
+            Max embedding chunk length
+          </label>
+          <input
+            type="number"
+            name="EmbeddingModelMaxChunkLength"
+            className="bg-zinc-900 text-white placeholder-white placeholder-opacity-60 text-sm rounded-lg focus:border-white block w-full p-2.5"
+            placeholder="1000"
+            min={1}
+            onScroll={(e) => e.target.blur()}
+            defaultValue={settings?.EmbeddingModelMaxChunkLength}
+            required={false}
+            autoComplete="off"
+          />
+        </div>
       </div>
-      <LocalAIModelSelection settings={settings} basePath={basePath} />
-      <div className="flex flex-col w-60">
-        <label className="text-white text-sm font-semibold block mb-4">
-          Max embedding chunk length
-        </label>
-        <input
-          type="number"
-          name="EmbeddingModelMaxChunkLength"
-          className="bg-zinc-900 text-white placeholder-white placeholder-opacity-60 text-sm rounded-lg focus:border-white block w-full p-2.5"
-          placeholder="1000"
-          min={1}
-          onScroll={(e) => e.target.blur()}
-          defaultValue={settings?.EmbeddingModelMaxChunkLength}
-          required={false}
-          autoComplete="off"
-        />
+      <div className="w-full flex items-center gap-4">
+        <div className="flex flex-col w-60">
+          <div className="flex flex-col gap-y-1 mb-4">
+            <label className="text-white text-sm font-semibold block">
+              Local AI API Key
+            </label>
+            <p className="text-xs italic text-white/60">
+              optional API key to use if running LocalAI with API keys.
+            </p>
+          </div>
+
+          <input
+            type="password"
+            name="LocalAiApiKey"
+            className="bg-zinc-900 text-white placeholder-white placeholder-opacity-60 text-sm rounded-lg focus:border-white block w-full p-2.5"
+            placeholder="sk-mysecretkey"
+            defaultValue={settings?.LocalAiApiKey ? "*".repeat(20) : ""}
+            autoComplete="off"
+            spellCheck={false}
+            onChange={(e) => setApiKeyValue(e.target.value)}
+            onBlur={() => setApiKey(apiKeyValue)}
+          />
+        </div>
       </div>
     </>
   );
 }
 
-function LocalAIModelSelection({ settings, basePath = null }) {
+function LocalAIModelSelection({ settings, apiKey = null, basePath = null }) {
   const [customModels, setCustomModels] = useState([]);
   const [loading, setLoading] = useState(true);
 
@@ -62,12 +91,12 @@ function LocalAIModelSelection({ settings, basePath = null }) {
         return;
       }
       setLoading(true);
-      const { models } = await System.customModels("localai", null, basePath);
+      const { models } = await System.customModels("localai", apiKey, basePath);
       setCustomModels(models || []);
       setLoading(false);
     }
     findCustomModels();
-  }, [basePath]);
+  }, [basePath, apiKey]);
 
   if (loading || customModels.length == 0) {
     return (
diff --git a/server/models/systemSettings.js b/server/models/systemSettings.js
index 66f7108bd838497d187446c368f74b54cf15b79e..068359bb023051bde099d9c35d0a00aa7a11b45c 100644
--- a/server/models/systemSettings.js
+++ b/server/models/systemSettings.js
@@ -29,6 +29,7 @@ const SystemSettings = {
       EmbeddingModelPref: process.env.EMBEDDING_MODEL_PREF,
       EmbeddingModelMaxChunkLength:
         process.env.EMBEDDING_MODEL_MAX_CHUNK_LENGTH,
+      LocalAiApiKey: !!process.env.LOCAL_AI_API_KEY,
       ...(vectorDB === "pinecone"
         ? {
             PineConeEnvironment: process.env.PINECONE_ENVIRONMENT,
@@ -98,13 +99,11 @@ const SystemSettings = {
             AzureOpenAiEmbeddingModelPref: process.env.EMBEDDING_MODEL_PREF,
           }
         : {}),
-
       ...(llmProvider === "localai"
         ? {
             LocalAiBasePath: process.env.LOCAL_AI_BASE_PATH,
             LocalAiModelPref: process.env.LOCAL_AI_MODEL_PREF,
             LocalAiTokenLimit: process.env.LOCAL_AI_MODEL_TOKEN_LIMIT,
-            LocalAiApiKey: !!process.env.LOCAL_AI_API_KEY,
 
             // For embedding credentials when localai is selected.
             OpenAiKey: !!process.env.OPEN_AI_KEY,
diff --git a/server/utils/EmbeddingEngines/localAi/index.js b/server/utils/EmbeddingEngines/localAi/index.js
index aa36b5d13309c8226dac69c20e59fa9f7db753dd..0f43cc7dc6579514caea931e3468a4193a802ec3 100644
--- a/server/utils/EmbeddingEngines/localAi/index.js
+++ b/server/utils/EmbeddingEngines/localAi/index.js
@@ -9,6 +9,11 @@ class LocalAiEmbedder {
       throw new Error("No embedding model was set.");
     const config = new Configuration({
       basePath: process.env.EMBEDDING_BASE_PATH,
+      ...(!!process.env.LOCAL_AI_API_KEY
+        ? {
+            apiKey: process.env.LOCAL_AI_API_KEY,
+          }
+        : {}),
     });
     this.openai = new OpenAIApi(config);
 
diff --git a/server/utils/helpers/customModels.js b/server/utils/helpers/customModels.js
index 8d46ab6856d01bb7696d56750b53a16691b94cee..03e373774c716971fc351025dbbe9b552fea9925 100644
--- a/server/utils/helpers/customModels.js
+++ b/server/utils/helpers/customModels.js
@@ -8,7 +8,7 @@ async function getCustomModels(provider = "", apiKey = null, basePath = null) {
     case "openai":
       return await openAiModels(apiKey);
     case "localai":
-      return await localAIModels(basePath);
+      return await localAIModels(basePath, apiKey);
     case "native-llm":
       return nativeLLMModels();
     default: