Newer
Older
import { copy } from "./copy";
import { callPackageManager } from "./install";
import { cyan } from "picocolors";
import { COMMUNITY_OWNER, COMMUNITY_REPO } from "./constant";
import { PackageManager } from "./get-pkg-manager";
import { installLlamapackProject } from "./llama-pack";
Huu Le (Lee)
committed
import { isHavingPoetryLockFile, tryPoetryRun } from "./poetry";
import { installPythonTemplate } from "./python";
import { downloadAndExtractRepo } from "./repo";
Marcus Schiesser
committed
import {
Huu Le (Lee)
committed
FileSourceConfig,
Marcus Schiesser
committed
InstallTemplateArgs,
Marcus Schiesser
committed
TemplateFramework,
TemplateVectorDB,
Marcus Schiesser
committed
} from "./types";
import { installTSTemplate } from "./typescript";
const createEnvLocalFile = async (
root: string,
llamaCloudKey?: string;
vectorDb?: TemplateVectorDB;
model?: string;
embeddingModel?: string;
) => {
const envFileName = ".env";
let content = "";
const model = opts?.model || "gpt-3.5-turbo";
}
console.log("\nUsing OpenAI model: ", model, "\n");
if (opts?.openAiKey) {
content += `OPENAI_API_KEY=${opts?.openAiKey}\n`;
}
if (opts?.embeddingModel) {
content += `EMBEDDING_MODEL=${opts?.embeddingModel}\n`;
}
Huu Le (Lee)
committed
if ((opts?.dataSource?.config as FileSourceConfig).useLlamaParse) {
if (opts?.llamaCloudKey) {
content += `LLAMA_CLOUD_API_KEY=${opts?.llamaCloudKey}\n`;
} else {
content += `# Please obtain the Llama Cloud API key from https://cloud.llamaindex.ai/api-key
# and set it to the LLAMA_CLOUD_API_KEY variable below.
# LLAMA_CLOUD_API_KEY=`;
}
case "mongo": {
content += `# For generating a connection URI, see https://www.mongodb.com/docs/guides/atlas/connection-string\n`;
content += `MONGODB_DATABASE=\n`;
content += `MONGODB_VECTORS=\n`;
content += `MONGODB_VECTOR_INDEX=\n`;
break;
}
case "pg": {
content += `# For generating a connection URI, see https://docs.timescale.com/use-timescale/latest/services/create-a-service\n`;
content += `PG_CONNECTION_STRING=\n`;
break;
}
case "pinecone": {
content += `PINECONE_API_KEY=\n`;
content += `PINECONE_ENVIRONMENT=\n`;
content += `PINECONE_INDEX_NAME=\n`;
break;
}
}
switch (opts?.dataSource?.type) {
case "web": {
const webConfig = opts?.dataSource.config as WebSourceConfig;
content += `# web loader config\n`;
content += `BASE_URL=${webConfig.baseUrl}\n`;
content += `URL_PREFIX=${webConfig.baseUrl}\n`;
content += `MAX_DEPTH=${webConfig.depth}\n`;
break;
}
}
if (content) {
await fs.writeFile(path.join(root, envFileName), content);
console.log(`Created '${envFileName}' file. Please check the settings.`);
Huu Le (Lee)
committed
// eslint-disable-next-line max-params
async function generateContextData(
Marcus Schiesser
committed
framework: TemplateFramework,
packageManager?: PackageManager,
vectorDb?: TemplateVectorDB,
Huu Le (Lee)
committed
dataSource?: TemplateDataSource,
llamaCloudKey?: string,
) {
const runGenerate = `${cyan(
framework === "fastapi"
Huu Le (Lee)
committed
? "poetry run python app/engine/generate.py"
: `${packageManager} run generate`,
)}`;
Huu Le (Lee)
committed
const openAiKeyConfigured = openAiKey || process.env["OPENAI_API_KEY"];
const llamaCloudKeyConfigured = (dataSource?.config as FileSourceConfig)
?.useLlamaParse
? llamaCloudKey || process.env["LLAMA_CLOUD_API_KEY"]
: true;
const hasVectorDb = vectorDb && vectorDb !== "none";
Huu Le (Lee)
committed
if (framework === "fastapi") {
Huu Le (Lee)
committed
if (
openAiKeyConfigured &&
llamaCloudKeyConfigured &&
!hasVectorDb &&
isHavingPoetryLockFile()
) {
Huu Le (Lee)
committed
console.log(`Running ${runGenerate} to generate the context data.`);
const result = tryPoetryRun("python app/engine/generate.py");
if (!result) {
console.log(`Failed to run ${runGenerate}.`);
process.exit(1);
}
console.log(`Generated context data`);
Huu Le (Lee)
committed
return;
}
} else {
Huu Le (Lee)
committed
if (openAiKeyConfigured && vectorDb === "none") {
Huu Le (Lee)
committed
console.log(`Running ${runGenerate} to generate the context data.`);
await callPackageManager(packageManager, true, ["run", "generate"]);
return;
}
const settings = [];
Huu Le (Lee)
committed
if (!openAiKeyConfigured) settings.push("your OpenAI key");
if (!llamaCloudKeyConfigured) settings.push("your Llama Cloud key");
if (hasVectorDb) settings.push("your Vector DB environment variables");
const settingsMessage =
settings.length > 0 ? `After setting ${settings.join(" and ")}, ` : "";
const generateMessage = `run ${runGenerate} to generate the context data.`;
console.log(`\n${settingsMessage}${generateMessage}\n\n`);
Marcus Schiesser
committed
}
Huu Le (Lee)
committed
}
Marcus Schiesser
committed
Huu Le (Lee)
committed
const copyContextData = async (
root: string,
dataSource?: TemplateDataSource,
) => {
const destPath = path.join(root, "data");
Huu Le (Lee)
committed
const dataSourceConfig = dataSource?.config as FileSourceConfig;
Huu Le (Lee)
committed
// Copy file
if (dataSource?.type === "file") {
if (dataSourceConfig.path) {
console.log(`\nCopying file to ${cyan(destPath)}\n`);
await fs.mkdir(destPath, { recursive: true });
await fs.copyFile(
dataSourceConfig.path,
path.join(destPath, path.basename(dataSourceConfig.path)),
);
} else {
console.log("Missing file path in config");
process.exit(1);
}
return;
}
// Copy folder
if (dataSource?.type === "folder") {
dataSourceConfig.path ?? path.join(templatesDir, "components", "data");
Huu Le (Lee)
committed
console.log(`\nCopying data to ${cyan(destPath)}\n`);
await copy("**", destPath, {
parents: true,
cwd: srcPath,
});
Huu Le (Lee)
committed
return;
const installCommunityProject = async ({
root,
communityProjectPath,
}: Pick<InstallTemplateArgs, "root" | "communityProjectPath">) => {
console.log("\nInstalling community project:", communityProjectPath!);
await downloadAndExtractRepo(root, {
username: COMMUNITY_OWNER,
name: COMMUNITY_REPO,
branch: "main",
filePath: communityProjectPath!,
});
};
export const installTemplate = async (
props: InstallTemplateArgs & { backend: boolean },
) => {
process.chdir(props.root);
if (props.template === "community" && props.communityProjectPath) {
await installCommunityProject(props);
return;
}
if (props.template === "llamapack" && props.llamapack) {
await installLlamapackProject(props);
return;
}
if (props.framework === "fastapi") {
await installPythonTemplate(props);
} else {
await installTSTemplate(props);
}
Marcus Schiesser
committed
if (props.backend) {
// This is a backend, so we need to copy the test data and create the env file.
Marcus Schiesser
committed
// Copy the environment file to the target directory.
await createEnvLocalFile(props.root, {
openAiKey: props.openAiKey,
llamaCloudKey: props.llamaCloudKey,
vectorDb: props.vectorDb,
model: props.model,
embeddingModel: props.embeddingModel,
Huu Le (Lee)
committed
await copyContextData(props.root, props.dataSource);
if (
props.postInstallAction === "runApp" ||
props.postInstallAction === "dependencies"
) {
props.framework,
props.packageManager,
props.openAiKey,
props.vectorDb,
Huu Le (Lee)
committed
props.dataSource,
props.llamaCloudKey,
} else {
// this is a frontend for a full-stack app, create .env file with model information
const content = `MODEL=${props.model}\nNEXT_PUBLIC_MODEL=${props.model}\n`;
await fs.writeFile(path.join(props.root, ".env"), content);