Skip to content
Snippets Groups Projects
Unverified Commit e2567ffc authored by Marcus Schiesser's avatar Marcus Schiesser Committed by GitHub
Browse files

feat: use TOP_K env variable also for TS (#67)

parent 5d8d752b
No related branches found
No related tags found
No related merge requests found
......@@ -163,6 +163,14 @@ const getModelEnvs = (modelConfig: ModelConfig): EnvVar[] => {
description: "The OpenAI API key to use.",
value: modelConfig.apiKey,
},
{
name: "LLM_TEMPERATURE",
description: "Temperature for sampling from the model.",
},
{
name: "LLM_MAX_TOKENS",
description: "Maximum number of tokens to generate.",
},
]
: []),
];
......@@ -186,20 +194,7 @@ const getFrameworkEnvs = (
description: "The port to start the backend app.",
value: port?.toString() || "8000",
},
{
name: "LLM_TEMPERATURE",
description: "Temperature for sampling from the model.",
},
{
name: "LLM_MAX_TOKENS",
description: "Maximum number of tokens to generate.",
},
{
name: "TOP_K",
description:
"The number of similar embeddings to return when retrieving documents.",
value: "3",
},
// TODO: Once LlamaIndexTS supports string templates, move this to `getEngineEnvs`
{
name: "SYSTEM_PROMPT",
description: `Custom system prompt.
......@@ -215,6 +210,17 @@ Given this information, please answer the question: {query_str}
];
};
const getEngineEnvs = (): EnvVar[] => {
return [
{
name: "TOP_K",
description:
"The number of similar embeddings to return when retrieving documents.",
value: "3",
},
];
};
export const createBackendEnvFile = async (
root: string,
opts: {
......@@ -236,6 +242,8 @@ export const createBackendEnvFile = async (
},
// Add model environment variables
...getModelEnvs(opts.modelConfig),
// Add engine environment variables
...getEngineEnvs(),
// Add vector database environment variables
...getVectorDBEnvs(opts.vectorDb),
...getFrameworkEnvs(opts.framework, opts.port),
......
......@@ -9,7 +9,9 @@ export async function createChatEngine() {
);
}
const retriever = index.asRetriever();
retriever.similarityTopK = 3;
retriever.similarityTopK = process.env.TOP_K
? parseInt(process.env.TOP_K)
: 3;
return new ContextChatEngine({
chatModel: Settings.llm,
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment