From e2567ffc037802302351378bcfc2deffbf21af33 Mon Sep 17 00:00:00 2001
From: Marcus Schiesser <mail@marcusschiesser.de>
Date: Fri, 26 Apr 2024 15:17:53 +0800
Subject: [PATCH] feat: use TOP_K env variable also for TS (#67)

---
 helpers/env-variables.ts                      | 36 +++++++++++--------
 .../engines/typescript/chat/chat.ts           |  4 ++-
 2 files changed, 25 insertions(+), 15 deletions(-)

diff --git a/helpers/env-variables.ts b/helpers/env-variables.ts
index 9cd3f5ad..5451d435 100644
--- a/helpers/env-variables.ts
+++ b/helpers/env-variables.ts
@@ -163,6 +163,14 @@ const getModelEnvs = (modelConfig: ModelConfig): EnvVar[] => {
             description: "The OpenAI API key to use.",
             value: modelConfig.apiKey,
           },
+          {
+            name: "LLM_TEMPERATURE",
+            description: "Temperature for sampling from the model.",
+          },
+          {
+            name: "LLM_MAX_TOKENS",
+            description: "Maximum number of tokens to generate.",
+          },
         ]
       : []),
   ];
@@ -186,20 +194,7 @@ const getFrameworkEnvs = (
       description: "The port to start the backend app.",
       value: port?.toString() || "8000",
     },
-    {
-      name: "LLM_TEMPERATURE",
-      description: "Temperature for sampling from the model.",
-    },
-    {
-      name: "LLM_MAX_TOKENS",
-      description: "Maximum number of tokens to generate.",
-    },
-    {
-      name: "TOP_K",
-      description:
-        "The number of similar embeddings to return when retrieving documents.",
-      value: "3",
-    },
+    // TODO: Once LlamaIndexTS supports string templates, move this to `getEngineEnvs`
     {
       name: "SYSTEM_PROMPT",
       description: `Custom system prompt.
@@ -215,6 +210,17 @@ Given this information, please answer the question: {query_str}
   ];
 };
 
+const getEngineEnvs = (): EnvVar[] => {
+  return [
+    {
+      name: "TOP_K",
+      description:
+        "The number of similar embeddings to return when retrieving documents.",
+      value: "3",
+    },
+  ];
+};
+
 export const createBackendEnvFile = async (
   root: string,
   opts: {
@@ -236,6 +242,8 @@ export const createBackendEnvFile = async (
     },
     // Add model environment variables
     ...getModelEnvs(opts.modelConfig),
+    // Add engine environment variables
+    ...getEngineEnvs(),
     // Add vector database environment variables
     ...getVectorDBEnvs(opts.vectorDb),
     ...getFrameworkEnvs(opts.framework, opts.port),
diff --git a/templates/components/engines/typescript/chat/chat.ts b/templates/components/engines/typescript/chat/chat.ts
index 62cc77df..5b47fe58 100644
--- a/templates/components/engines/typescript/chat/chat.ts
+++ b/templates/components/engines/typescript/chat/chat.ts
@@ -9,7 +9,9 @@ export async function createChatEngine() {
     );
   }
   const retriever = index.asRetriever();
-  retriever.similarityTopK = 3;
+  retriever.similarityTopK = process.env.TOP_K
+    ? parseInt(process.env.TOP_K)
+    : 3;
 
   return new ContextChatEngine({
     chatModel: Settings.llm,
-- 
GitLab