Newer
Older
import fs from "fs/promises";
import path from "path";
import {
FileSourceConfig,
TemplateDataSource,
TemplateFramework,
TemplateVectorDB,
Huu Le (Lee)
committed
WebSourceConfig,
} from "./types";
type EnvVar = {
name?: string;
description?: string;
value?: string;
};
const renderEnvVar = (envVars: EnvVar[]): string => {
return envVars.reduce(
(prev, env) =>
prev +
(env.description
? `# ${env.description.replaceAll("\n", "\n# ")}\n`
: "") +
(env.name
? env.value
? `${env.name}=${env.value}\n\n`
: `# ${env.name}=\n\n`
: ""),
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
"",
);
};
const getVectorDBEnvs = (vectorDb: TemplateVectorDB) => {
switch (vectorDb) {
case "mongo":
return [
{
name: "MONGO_URI",
description:
"For generating a connection URI, see https://docs.timescale.com/use-timescale/latest/services/create-a-service\nThe MongoDB connection URI.",
},
{
name: "MONGODB_DATABASE",
},
{
name: "MONGODB_VECTORS",
},
{
name: "MONGODB_VECTOR_INDEX",
},
];
case "pg":
return [
{
name: "PG_CONNECTION_STRING",
description:
"For generating a connection URI, see https://docs.timescale.com/use-timescale/latest/services/create-a-service\nThe PostgreSQL connection string.",
},
];
case "pinecone":
return [
{
name: "PINECONE_API_KEY",
description:
"Configuration for Pinecone vector store\nThe Pinecone API key.",
},
{
name: "PINECONE_ENVIRONMENT",
},
{
name: "PINECONE_INDEX_NAME",
},
];
case "milvus":
return [
{
name: "MILVUS_ADDRESS",
description:
"The address of the Milvus server. Eg: http://localhost:19530",
value: "http://localhost:19530",
},
{
name: "MILVUS_COLLECTION",
description:
"The name of the Milvus collection to store the vectors.",
value: "llamacollection",
},
{
name: "MILVUS_USERNAME",
description: "The username to access the Milvus server.",
},
{
name: "MILVUS_PASSWORD",
description: "The password to access the Milvus server.",
},
];
default:
return [];
}
};
const getDataSourceEnvs = (
dataSource: TemplateDataSource,
llamaCloudKey?: string,
) => {
switch (dataSource.type) {
case "web":
Huu Le (Lee)
committed
const config = dataSource.config as WebSourceConfig;
return [
{
name: "BASE_URL",
description: "The base URL to start web scraping.",
Huu Le (Lee)
committed
value: config.baseUrl,
},
{
name: "URL_PREFIX",
description: "The prefix of the URL to start web scraping.",
Huu Le (Lee)
committed
value: config.baseUrl,
},
{
name: "MAX_DEPTH",
description: "The maximum depth to scrape.",
Huu Le (Lee)
committed
value: config.depth?.toString(),
},
];
case "file":
case "folder":
return [
...((dataSource?.config as FileSourceConfig).useLlamaParse
? [
{
name: "LLAMA_CLOUD_API_KEY",
description: `The Llama Cloud API key.`,
value: llamaCloudKey,
},
]
: []),
];
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
default:
return [];
}
};
export const createBackendEnvFile = async (
root: string,
opts: {
openAiKey?: string;
llamaCloudKey?: string;
vectorDb?: TemplateVectorDB;
model?: string;
embeddingModel?: string;
framework?: TemplateFramework;
dataSource?: TemplateDataSource;
port?: number;
},
) => {
// Init env values
const envFileName = ".env";
const defaultEnvs = [
{
render: true,
name: "MODEL",
description: "The name of LLM model to use.",
value: opts.model || "gpt-3.5-turbo",
},
{
render: true,
name: "OPENAI_API_KEY",
description: "The OpenAI API key to use.",
value: opts.openAiKey,
},
// Add vector database environment variables
...(opts.vectorDb ? getVectorDBEnvs(opts.vectorDb) : []),
// Add data source environment variables
...(opts.dataSource
? getDataSourceEnvs(opts.dataSource, opts.llamaCloudKey)
: []),
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
];
let envVars: EnvVar[] = [];
if (opts.framework === "fastapi") {
envVars = [
...defaultEnvs,
...[
{
name: "APP_HOST",
description: "The address to start the backend app.",
value: "0.0.0.0",
},
{
name: "APP_PORT",
description: "The port to start the backend app.",
value: opts.port?.toString() || "8000",
},
{
name: "EMBEDDING_MODEL",
description: "Name of the embedding model to use.",
value: opts.embeddingModel,
},
{
name: "EMBEDDING_DIM",
description: "Dimension of the embedding model to use.",
},
{
name: "LLM_TEMPERATURE",
description: "Temperature for sampling from the model.",
},
{
name: "LLM_MAX_TOKENS",
description: "Maximum number of tokens to generate.",
},
{
name: "TOP_K",
description:
"The number of similar embeddings to return when retrieving documents.",
value: "3",
},
{
name: "SYSTEM_PROMPT",
description: `Custom system prompt.
Example:
SYSTEM_PROMPT="
We have provided context information below.
---------------------
{context_str}
---------------------
Given this information, please answer the question: {query_str}
"`,
},
],
];
} else {
envVars = [
...defaultEnvs,
...[
opts.framework === "nextjs"
? {
name: "NEXT_PUBLIC_MODEL",
description:
"The LLM model to use (hardcode to front-end artifact).",
value: opts.model || "gpt-3.5-turbo",
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
}
: {},
],
];
}
// Render and write env file
const content = renderEnvVar(envVars);
await fs.writeFile(path.join(root, envFileName), content);
console.log(`Created '${envFileName}' file. Please check the settings.`);
};
export const createFrontendEnvFile = async (
root: string,
opts: {
customApiPath?: string;
model?: string;
},
) => {
const defaultFrontendEnvs = [
{
name: "MODEL",
description: "The OpenAI model to use.",
value: opts.model,
},
{
name: "NEXT_PUBLIC_MODEL",
description: "The OpenAI model to use (hardcode to front-end artifact).",
value: opts.model,
},
{
name: "NEXT_PUBLIC_CHAT_API",
description: "The backend API for chat endpoint.",
value: opts.customApiPath
? opts.customApiPath
: "http://localhost:8000/api/chat",
},
];
const content = renderEnvVar(defaultFrontendEnvs);
await fs.writeFile(path.join(root, ".env"), content);
};