From 6fa8b0ce9308ba8dfaf8cf8e7159b2e4aed0e25a Mon Sep 17 00:00:00 2001
From: Timothy Carambat <rambat1010@gmail.com>
Date: Mon, 4 Dec 2023 08:38:15 -0800
Subject: [PATCH] Add API key option to LocalAI (#407)

* Add API key option to LocalAI

* add api key for model dropdown selector
---
 docker/.env.example                           |  1 +
 .../LLMSelection/LocalAiOptions/index.jsx     | 45 +++++++++++++++----
 server/.env.example                           |  1 +
 server/endpoints/system.js                    |  2 +-
 server/models/systemSettings.js               |  1 +
 server/utils/AiProviders/localAi/index.js     |  5 +++
 server/utils/helpers/customModels.js          |  3 +-
 server/utils/helpers/updateENV.js             |  4 ++
 server/utils/http/index.js                    |  8 ++++
 .../utils/vectorDbProviders/chroma/index.js   |  8 ++--
 10 files changed, 63 insertions(+), 15 deletions(-)

diff --git a/docker/.env.example b/docker/.env.example
index 74ab3ef62..f5bf26aee 100644
--- a/docker/.env.example
+++ b/docker/.env.example
@@ -27,6 +27,7 @@ CACHE_VECTORS="true"
 # LOCAL_AI_BASE_PATH='http://host.docker.internal:8080/v1'
 # LOCAL_AI_MODEL_PREF='luna-ai-llama2'
 # LOCAL_AI_MODEL_TOKEN_LIMIT=4096
+# LOCAL_AI_API_KEY="sk-123abc"
 
 ###########################################
 ######## Embedding API SElECTION ##########
diff --git a/frontend/src/components/LLMSelection/LocalAiOptions/index.jsx b/frontend/src/components/LLMSelection/LocalAiOptions/index.jsx
index a09a47d7c..8adad7e50 100644
--- a/frontend/src/components/LLMSelection/LocalAiOptions/index.jsx
+++ b/frontend/src/components/LLMSelection/LocalAiOptions/index.jsx
@@ -6,12 +6,11 @@ import System from "../../../models/system";
 export default function LocalAiOptions({ settings, showAlert = false }) {
   const [basePathValue, setBasePathValue] = useState(settings?.LocalAiBasePath);
   const [basePath, setBasePath] = useState(settings?.LocalAiBasePath);
-  function updateBasePath() {
-    setBasePath(basePathValue);
-  }
+  const [apiKeyValue, setApiKeyValue] = useState(settings?.LocalAiApiKey);
+  const [apiKey, setApiKey] = useState(settings?.LocalAiApiKey);
 
   return (
-    <div className="w-full flex flex-col">
+    <div className="w-full flex flex-col gap-y-4">
       {showAlert && (
         <div className="flex flex-col md:flex-row md:items-center gap-x-2 text-white mb-6 bg-blue-800/30 w-fit rounded-lg px-4 py-2">
           <div className="gap-x-2 flex items-center">
@@ -44,10 +43,14 @@ export default function LocalAiOptions({ settings, showAlert = false }) {
             autoComplete="off"
             spellCheck={false}
             onChange={(e) => setBasePathValue(e.target.value)}
-            onBlur={updateBasePath}
+            onBlur={() => setBasePath(basePathValue)}
           />
         </div>
-        <LocalAIModelSelection settings={settings} basePath={basePath} />
+        <LocalAIModelSelection
+          settings={settings}
+          basePath={basePath}
+          apiKey={apiKey}
+        />
         <div className="flex flex-col w-60">
           <label className="text-white text-sm font-semibold block mb-4">
             Token context window
@@ -65,11 +68,35 @@ export default function LocalAiOptions({ settings, showAlert = false }) {
           />
         </div>
       </div>
+      <div className="w-full flex items-center gap-4">
+        <div className="flex flex-col w-60">
+          <div className="flex flex-col gap-y-1 mb-4">
+            <label className="text-white text-sm font-semibold block">
+              Local AI API Key
+            </label>
+            <p className="text-xs italic text-white/60">
+              optional API key to use if running LocalAI with API keys.
+            </p>
+          </div>
+
+          <input
+            type="password"
+            name="LocalAiApiKey"
+            className="bg-zinc-900 text-white placeholder-white placeholder-opacity-60 text-sm rounded-lg focus:border-white block w-full p-2.5"
+            placeholder="sk-mysecretkey"
+            defaultValue={settings?.LocalAiApiKey ? "*".repeat(20) : ""}
+            autoComplete="off"
+            spellCheck={false}
+            onChange={(e) => setApiKeyValue(e.target.value)}
+            onBlur={() => setApiKey(apiKeyValue)}
+          />
+        </div>
+      </div>
     </div>
   );
 }
 
-function LocalAIModelSelection({ settings, basePath = null }) {
+function LocalAIModelSelection({ settings, basePath = null, apiKey = null }) {
   const [customModels, setCustomModels] = useState([]);
   const [loading, setLoading] = useState(true);
 
@@ -81,12 +108,12 @@ function LocalAIModelSelection({ settings, basePath = null }) {
         return;
       }
       setLoading(true);
-      const { models } = await System.customModels("localai", null, basePath);
+      const { models } = await System.customModels("localai", apiKey, basePath);
       setCustomModels(models || []);
       setLoading(false);
     }
     findCustomModels();
-  }, [basePath]);
+  }, [basePath, apiKey]);
 
   if (loading || customModels.length == 0) {
     return (
diff --git a/server/.env.example b/server/.env.example
index 03d1eb9bc..f83c5e72a 100644
--- a/server/.env.example
+++ b/server/.env.example
@@ -27,6 +27,7 @@ JWT_SECRET="my-random-string-for-seeding" # Please generate random string at lea
 # LOCAL_AI_BASE_PATH='http://localhost:8080/v1'
 # LOCAL_AI_MODEL_PREF='luna-ai-llama2'
 # LOCAL_AI_MODEL_TOKEN_LIMIT=4096
+# LOCAL_AI_API_KEY="sk-123abc"
 
 ###########################################
 ######## Embedding API SElECTION ##########
diff --git a/server/endpoints/system.js b/server/endpoints/system.js
index fd07d03c0..22ce8ef10 100644
--- a/server/endpoints/system.js
+++ b/server/endpoints/system.js
@@ -8,7 +8,7 @@ const {
   acceptedFileTypes,
 } = require("../utils/files/documentProcessor");
 const { purgeDocument } = require("../utils/files/purgeDocument");
-const { getVectorDbClass, getLLMProvider } = require("../utils/helpers");
+const { getVectorDbClass } = require("../utils/helpers");
 const { updateENV, dumpENV } = require("../utils/helpers/updateENV");
 const {
   reqBody,
diff --git a/server/models/systemSettings.js b/server/models/systemSettings.js
index ec40cb7f1..7556d8f22 100644
--- a/server/models/systemSettings.js
+++ b/server/models/systemSettings.js
@@ -103,6 +103,7 @@ const SystemSettings = {
             LocalAiBasePath: process.env.LOCAL_AI_BASE_PATH,
             LocalAiModelPref: process.env.LOCAL_AI_MODEL_PREF,
             LocalAiTokenLimit: process.env.LOCAL_AI_MODEL_TOKEN_LIMIT,
+            LocalAiApiKey: !!process.env.LOCAL_AI_API_KEY,
 
             // For embedding credentials when localai is selected.
             OpenAiKey: !!process.env.OPEN_AI_KEY,
diff --git a/server/utils/AiProviders/localAi/index.js b/server/utils/AiProviders/localAi/index.js
index 616213a23..6c7a3263f 100644
--- a/server/utils/AiProviders/localAi/index.js
+++ b/server/utils/AiProviders/localAi/index.js
@@ -8,6 +8,11 @@ class LocalAiLLM {
     const { Configuration, OpenAIApi } = require("openai");
     const config = new Configuration({
       basePath: process.env.LOCAL_AI_BASE_PATH,
+      ...(!!process.env.LOCAL_AI_API_KEY
+        ? {
+            apiKey: process.env.LOCAL_AI_API_KEY,
+          }
+        : {}),
     });
     this.openai = new OpenAIApi(config);
     this.model = process.env.LOCAL_AI_MODEL_PREF;
diff --git a/server/utils/helpers/customModels.js b/server/utils/helpers/customModels.js
index e5bc1fcfb..cd6adccaf 100644
--- a/server/utils/helpers/customModels.js
+++ b/server/utils/helpers/customModels.js
@@ -35,10 +35,11 @@ async function openAiModels(apiKey = null) {
   return { models, error: null };
 }
 
-async function localAIModels(basePath = null) {
+async function localAIModels(basePath = null, apiKey = null) {
   const { Configuration, OpenAIApi } = require("openai");
   const config = new Configuration({
     basePath,
+    ...(!!apiKey ? { apiKey } : {}),
   });
   const openai = new OpenAIApi(config);
   const models = await openai
diff --git a/server/utils/helpers/updateENV.js b/server/utils/helpers/updateENV.js
index b7ecffa14..6e0b84970 100644
--- a/server/utils/helpers/updateENV.js
+++ b/server/utils/helpers/updateENV.js
@@ -67,6 +67,10 @@ const KEY_MAPPING = {
     envKey: "LOCAL_AI_MODEL_TOKEN_LIMIT",
     checks: [nonZero],
   },
+  LocalAiApiKey: {
+    envKey: "LOCAL_AI_API_KEY",
+    checks: [],
+  },
 
   EmbeddingEngine: {
     envKey: "EMBEDDING_ENGINE",
diff --git a/server/utils/http/index.js b/server/utils/http/index.js
index 5b61236f0..cb57c4a28 100644
--- a/server/utils/http/index.js
+++ b/server/utils/http/index.js
@@ -52,6 +52,13 @@ function multiUserMode(response) {
   return response?.locals?.multiUserMode;
 }
 
+function parseAuthHeader(headerValue = null, apiKey = null) {
+  if (headerValue === null || apiKey === null) return {};
+  if (headerValue === "Authorization")
+    return { Authorization: `Bearer ${apiKey}` };
+  return { [headerValue]: apiKey };
+}
+
 module.exports = {
   reqBody,
   multiUserMode,
@@ -59,4 +66,5 @@ module.exports = {
   makeJWT,
   decodeJWT,
   userFromSession,
+  parseAuthHeader,
 };
diff --git a/server/utils/vectorDbProviders/chroma/index.js b/server/utils/vectorDbProviders/chroma/index.js
index c2f0257dd..0e75fa07f 100644
--- a/server/utils/vectorDbProviders/chroma/index.js
+++ b/server/utils/vectorDbProviders/chroma/index.js
@@ -15,10 +15,10 @@ const Chroma = {
       ...(!!process.env.CHROMA_API_HEADER && !!process.env.CHROMA_API_KEY
         ? {
             fetchOptions: {
-              headers: {
-                [process.env.CHROMA_API_HEADER || "X-Api-Key"]:
-                  process.env.CHROMA_API_KEY,
-              },
+              headers: parseAuthHeader(
+                process.env.CHROMA_API_HEADER || "X-Api-Key",
+                process.env.CHROMA_API_KEY
+              ),
             },
           }
         : {}),
-- 
GitLab