Newer
Older
import { callPackageManager } from "./install";
import { cyan } from "picocolors";
import { writeLoadersConfig } from "./datasources";
import { createBackendEnvFile, createFrontendEnvFile } from "./env-variables";
import { PackageManager } from "./get-pkg-manager";
import { installLlamapackProject } from "./llama-pack";
import { makeDir } from "./make-dir";
Huu Le (Lee)
committed
import { isHavingPoetryLockFile, tryPoetryRun } from "./poetry";
import { installPythonTemplate } from "./python";
import { downloadAndExtractRepo } from "./repo";
import { ConfigFileType, writeToolsConfig } from "./tools";
Marcus Schiesser
committed
import {
Huu Le (Lee)
committed
FileSourceConfig,
Marcus Schiesser
committed
InstallTemplateArgs,
Marcus Schiesser
committed
TemplateFramework,
TemplateVectorDB,
Marcus Schiesser
committed
} from "./types";
import { installTSTemplate } from "./typescript";
const checkForGenerateScript = (
modelConfig: ModelConfig,
vectorDb?: TemplateVectorDB,
llamaCloudKey?: string,
useLlamaParse?: boolean,
) => {
const missingSettings = [];
if (!modelConfig.isConfigured()) {
missingSettings.push("your model provider API key");
}
const llamaCloudApiKey = llamaCloudKey ?? process.env["LLAMA_CLOUD_API_KEY"];
const isRequiredLlamaCloudKey = useLlamaParse || vectorDb === "llamacloud";
if (isRequiredLlamaCloudKey && !llamaCloudApiKey) {
missingSettings.push("your LLAMA_CLOUD_API_KEY");
}
if (vectorDb !== "none" && vectorDb !== "llamacloud") {
missingSettings.push("your Vector DB environment variables");
}
return missingSettings;
};
Huu Le (Lee)
committed
// eslint-disable-next-line max-params
async function generateContextData(
Marcus Schiesser
committed
framework: TemplateFramework,
modelConfig: ModelConfig,
Marcus Schiesser
committed
packageManager?: PackageManager,
vectorDb?: TemplateVectorDB,
Huu Le (Lee)
committed
llamaCloudKey?: string,
Huu Le (Lee)
committed
) {
const runGenerate = `${cyan(
framework === "fastapi"
: `${packageManager} run generate`,
)}`;
const missingSettings = checkForGenerateScript(
modelConfig,
vectorDb,
llamaCloudKey,
useLlamaParse,
);
if (!missingSettings.length) {
// If all the required environment variables are set, run the generate script
if (framework === "fastapi") {
if (isHavingPoetryLockFile()) {
console.log(`Running ${runGenerate} to generate the context data.`);
const result = tryPoetryRun("poetry run generate");
if (!result) {
console.log(`Failed to run ${runGenerate}.`);
process.exit(1);
}
console.log(`Generated context data`);
return;
Huu Le (Lee)
committed
console.log(`Running ${runGenerate} to generate the context data.`);
await callPackageManager(packageManager, true, ["run", "generate"]);
return;
}
const settingsMessage = `After setting ${missingSettings.join(" and ")}, run ${runGenerate} to generate the context data.`;
console.log(`\n${settingsMessage}\n\n`);
Marcus Schiesser
committed
}
Huu Le (Lee)
committed
}
Marcus Schiesser
committed
const downloadFile = async (url: string, destPath: string) => {
const response = await fetch(url);
const fileBuffer = await response.arrayBuffer();
await fsExtra.writeFile(destPath, Buffer.from(fileBuffer));
};
const prepareContextData = async (
Huu Le (Lee)
committed
root: string,
Huu Le (Lee)
committed
) => {
await makeDir(path.join(root, "data"));
for (const dataSource of dataSources) {
const dataSourceConfig = dataSource?.config as FileSourceConfig;
// If the path is URLs, download the data and save it to the data directory
if ("url" in dataSourceConfig) {
console.log(
"Downloading file from URL:",
dataSourceConfig.url.toString(),
);
const destPath = path.join(
root,
"data",
dataSourceConfig.filename ??
path.basename(dataSourceConfig.url.toString()),
);
await downloadFile(dataSourceConfig.url.toString(), destPath);
} else {
// Copy local data
console.log("Copying data from path:", dataSourceConfig.path);
const destPath = path.join(
root,
"data",
path.basename(dataSourceConfig.path),
);
await fsExtra.copy(dataSourceConfig.path, destPath);
}
const installCommunityProject = async ({
root,
communityProjectConfig,
}: Pick<InstallTemplateArgs, "root" | "communityProjectConfig">) => {
const { owner, repo, branch, filePath } = communityProjectConfig!;
console.log("\nInstalling community project:", filePath || repo);
await downloadAndExtractRepo(root, {
username: owner,
name: repo,
branch,
filePath: filePath || "",
export const installTemplate = async (
props: InstallTemplateArgs & { backend: boolean },
) => {
process.chdir(props.root);
if (props.template === "community" && props.communityProjectConfig) {
await installCommunityProject(props);
return;
}
if (props.template === "llamapack" && props.llamapack) {
await installLlamapackProject(props);
return;
}
if (props.framework === "fastapi") {
await installPythonTemplate(props);
Marcus Schiesser
committed
if (props.vectorDb !== "llamacloud") {
// write loaders configuration (currently Python only)
// not needed for LlamaCloud as it has its own loaders
await writeLoadersConfig(
props.root,
props.dataSources,
props.useLlamaParse,
);
}
} else {
await installTSTemplate(props);
}
Marcus Schiesser
committed
// write tools configuration
await writeToolsConfig(
props.root,
props.tools,
props.framework === "fastapi" ? ConfigFileType.YAML : ConfigFileType.JSON,
);
if (props.backend) {
// This is a backend, so we need to copy the test data and create the env file.
Marcus Schiesser
committed
// Copy the environment file to the target directory.
if (
props.template === "streaming" ||
props.template === "multiagent" ||
await createBackendEnvFile(props.root, props);
await prepareContextData(
props.root,
props.dataSources.filter((ds) => ds.type === "file"),
);
if (
props.dataSources.length > 0 &&
(props.postInstallAction === "runApp" ||
props.postInstallAction === "dependencies")
) {
console.log("\nGenerating context data...\n");
await generateContextData(
props.framework,
props.modelConfig,
props.packageManager,
props.vectorDb,
props.llamaCloudKey,
props.useLlamaParse,
await makeDir(path.join(props.root, "output/tools"));
await makeDir(path.join(props.root, "output/uploaded"));
await makeDir(path.join(props.root, "output/llamacloud"));
} else {
// this is a frontend for a full-stack app, create .env file with model information
await createFrontendEnvFile(props.root, {
vectorDb: props.vectorDb,
});