Newer
Older
import { execSync } from "child_process";
import ciInfo from "ci-info";
import fs from "fs";
import path from "path";
import { blue, green, red } from "picocolors";
import prompts from "prompts";
import { InstallAppArgs } from "./create-app";
TemplateDataSourceType,
TemplateFramework,
} from "./helpers";
import { COMMUNITY_OWNER, COMMUNITY_REPO } from "./helpers/constant";
Marcus Schiesser
committed
import { EXAMPLE_FILE } from "./helpers/datasources";
import { templatesDir } from "./helpers/dir";
import { getAvailableLlamapackOptions } from "./helpers/llama-pack";
Thuc Pham
committed
import { askModelConfig } from "./helpers/providers";
import { getProjectOptions } from "./helpers/repo";
import {
supportedTools,
toolRequiresConfig,
toolsRequireConfig,
} from "./helpers/tools";
Huu Le (Lee)
committed
export type QuestionArgs = Omit<
InstallAppArgs,
"appPath" | "packageManager"
askModels?: boolean;
Huu Le (Lee)
committed
const supportedContextFileTypes = [
".pdf",
".doc",
".docx",
".xls",
".xlsx",
".csv",
];
const MACOS_FILE_SELECTION_SCRIPT = `
osascript -l JavaScript -e '
a = Application.currentApplication();
a.includeStandardAdditions = true;
a.chooseFile({ withPrompt: "Please select files to process:", multipleSelectionsAllowed: true }).map(file => file.toString())
Huu Le (Lee)
committed
const MACOS_FOLDER_SELECTION_SCRIPT = `
osascript -l JavaScript -e '
a = Application.currentApplication();
a.includeStandardAdditions = true;
a.chooseFolder({ withPrompt: "Please select folders to process:", multipleSelectionsAllowed: true }).map(folder => folder.toString())
Huu Le (Lee)
committed
'`;
const WINDOWS_FILE_SELECTION_SCRIPT = `
Add-Type -AssemblyName System.Windows.Forms
$openFileDialog = New-Object System.Windows.Forms.OpenFileDialog
$openFileDialog.InitialDirectory = [Environment]::GetFolderPath('Desktop')
$openFileDialog.Multiselect = $true
$result = $openFileDialog.ShowDialog()
if ($result -eq 'OK') {
Huu Le (Lee)
committed
const WINDOWS_FOLDER_SELECTION_SCRIPT = `
Add-Type -AssemblyName System.windows.forms
$folderBrowser = New-Object System.Windows.Forms.FolderBrowserDialog
$dialogResult = $folderBrowser.ShowDialog()
if ($dialogResult -eq [System.Windows.Forms.DialogResult]::OK)
{
$folderBrowser.SelectedPath
}
`;
const defaults: Omit<QuestionArgs, "modelConfig"> = {
template: "streaming",
framework: "nextjs",
ui: "shadcn",
frontend: false,
communityProjectConfig: undefined,
};
export const questionHandlers = {
onCancel: () => {
console.error("Exiting.");
process.exit(1);
},
};
const getVectorDbChoices = (framework: TemplateFramework) => {
const choices = [
{
title: "No, just store the data in the file system",
value: "none",
},
{ title: "MongoDB", value: "mongo" },
{ title: "PostgreSQL", value: "pg" },
{ title: "Pinecone", value: "pinecone" },
{ title: "Milvus", value: "milvus" },
{ title: "ChromaDB", value: "chroma" },
{ title: "Weaviate", value: "weaviate" },
const vectordbLang = framework === "fastapi" ? "python" : "typescript";
const compPath = path.join(templatesDir, "components");
const vectordbPath = path.join(compPath, "vectordbs", vectordbLang);
const availableChoices = fs
.readdirSync(vectordbPath)
.filter((file) => fs.statSync(path.join(vectordbPath, file)).isDirectory());
const displayedChoices = choices.filter((choice) =>
availableChoices.includes(choice.value),
);
return displayedChoices;
};
export const getDataSourceChoices = (
framework: TemplateFramework,
selectedDataSource: TemplateDataSource[],
// If LlamaCloud is already selected, don't show any other options
if (selectedDataSource.find((s) => s.type === "llamacloud")) {
return [];
}
if (selectedDataSource.length > 0) {
Huu Le (Lee)
committed
choices.push({
title: "No",
value: "no",
});
}
if (selectedDataSource === undefined || selectedDataSource.length === 0) {
if (template !== "multiagent") {
choices.push({
Huu Le (Lee)
committed
choices.push({
title:
process.platform !== "linux"
? "Use an example PDF"
: "Use an example PDF (you can add your own data files later)",
value: "exampleFile",
});
}
// Linux has many distros so we won't support file/folder picker for now
if (process.platform !== "linux") {
choices.push(
{
title: `Use local files (${supportedContextFileTypes.join(", ")})`,
value: "file",
},
{
title:
process.platform === "win32"
? "Use a local folder"
: "Use local folders",
value: "folder",
},
);
}
if (framework === "fastapi" && template !== "extractor") {
choices.push({
title: "Use website content (requires Chrome)",
value: "web",
});
choices.push({
title: "Use data from a database (Mysql, PostgreSQL)",
value: "db",
});
Huu Le (Lee)
committed
}
if (!selectedDataSource.length && template !== "extractor") {
choices.push({
title: "Use managed index from LlamaCloud",
value: "llamacloud",
});
}
Huu Le (Lee)
committed
return choices;
};
const selectLocalContextData = async (type: TemplateDataSourceType) => {
Huu Le (Lee)
committed
let selectedPath: string = "";
let execScript: string;
let execOpts: any = {};
switch (process.platform) {
case "win32": // Windows
Huu Le (Lee)
committed
execScript =
type === "file"
? WINDOWS_FILE_SELECTION_SCRIPT
: WINDOWS_FOLDER_SELECTION_SCRIPT;
execOpts = { shell: "powershell.exe" };
break;
case "darwin": // MacOS
Huu Le (Lee)
committed
execScript =
type === "file"
? MACOS_FILE_SELECTION_SCRIPT
: MACOS_FOLDER_SELECTION_SCRIPT;
break;
default: // Unsupported OS
console.log(red("Unsupported OS error!"));
process.exit(1);
}
Huu Le (Lee)
committed
selectedPath = execSync(execScript, execOpts).toString().trim();
const paths =
process.platform === "win32"
? selectedPath.split("\r\n")
: selectedPath.split(", ");
for (const p of paths) {
if (
!supportedContextFileTypes.includes(path.extname(p))
) {
Huu Le (Lee)
committed
console.log(
red(
`Please select a supported file type: ${supportedContextFileTypes}`,
),
);
process.exit(1);
}
} catch (error) {
console.log(
red(
Huu Le (Lee)
committed
"Got an error when trying to select local context data! Please try again or select another data source option.",
),
);
process.exit(1);
}
};
export const onPromptState = (state: any) => {
if (state.aborted) {
// If we don't re-enable the terminal cursor before exiting
// the program, the cursor will remain hidden
process.stdout.write("\x1B[?25h");
process.stdout.write("\n");
process.exit(1);
}
};
export const askQuestions = async (
program: QuestionArgs,
preferences: QuestionArgs,
openAiKey?: string,
) => {
const getPrefOrDefault = <K extends keyof Omit<QuestionArgs, "modelConfig">>(
field: K,
): Omit<QuestionArgs, "modelConfig">[K] =>
preferences[field] ?? defaults[field];
// Ask for next action after installation
async function askPostInstallAction() {
if (program.postInstallAction === undefined) {
if (ciInfo.isCI) {
program.postInstallAction = getPrefOrDefault("postInstallAction");
} else {
{
title: "Just generate code (~1 sec)",
value: "none",
},
{
title: "Start in VSCode (~1 sec)",
value: "VSCode",
},
{
title: "Generate code and install dependencies (~2 min)",
value: "dependencies",
},
];
if (program.template !== "multiagent") {
const modelConfigured =
!program.llamapack && program.modelConfig.isConfigured();
// If using LlamaParse, require LlamaCloud API key
const llamaCloudKeyConfigured = program.useLlamaParse
? program.llamaCloudKey || process.env["LLAMA_CLOUD_API_KEY"]
: true;
const hasVectorDb = program.vectorDb && program.vectorDb !== "none";
// Can run the app if all tools do not require configuration
if (
!hasVectorDb &&
modelConfigured &&
llamaCloudKeyConfigured &&
!toolsRequireConfig(program.tools)
) {
actionChoices.push({
title:
"Generate code, install dependencies, and run the app (~2 min)",
value: "runApp",
});
}
}
const { action } = await prompts(
{
type: "select",
name: "action",
message: "How would you like to proceed?",
choices: actionChoices,
initial: 1,
},
questionHandlers,
);
program.postInstallAction = action;
}
}
}
if (!program.template) {
if (ciInfo.isCI) {
program.template = getPrefOrDefault("template");
} else {
const styledRepo = blue(
`https://github.com/${COMMUNITY_OWNER}/${COMMUNITY_REPO}`,
);
const { template } = await prompts(
{
type: "select",
name: "template",
message: "Which template would you like to use?",
choices: [
{ title: "Agentic RAG (single agent)", value: "streaming" },
{
title: "Multi-agent app (using llama-agents)",
value: "multiagent",
},
{ title: "Structured Extractor", value: "extractor" },
{
title: `Community template from ${styledRepo}`,
value: "community",
},
{
title: "Example using a LlamaPack",
value: "llamapack",
},
],
},
questionHandlers,
);
program.template = template;
preferences.template = template;
}
}
if (program.template === "community") {
const projectOptions = await getProjectOptions(
COMMUNITY_OWNER,
COMMUNITY_REPO,
);
const { communityProjectConfig } = await prompts(
{
type: "select",
name: "communityProjectConfig",
message: "Select community template",
choices: projectOptions.map(({ title, value }) => ({
title,
value: JSON.stringify(value), // serialize value to string in terminal
})),
initial: 0,
},
questionHandlers,
);
const projectConfig = JSON.parse(communityProjectConfig);
program.communityProjectConfig = projectConfig;
preferences.communityProjectConfig = projectConfig;
return; // early return - no further questions needed for community projects
}
if (program.template === "llamapack") {
const availableLlamaPacks = await getAvailableLlamapackOptions();
const { llamapack } = await prompts(
{
type: "select",
name: "llamapack",
message: "Select LlamaPack",
choices: availableLlamaPacks.map((pack) => ({
title: pack.name,
value: pack.folderPath,
})),
initial: 0,
},
questionHandlers,
);
program.llamapack = llamapack;
preferences.llamapack = llamapack;
await askPostInstallAction();
return; // early return - no further questions needed for llamapack projects
}
if (program.template === "multiagent") {
// TODO: multi-agents currently only supports FastAPI
program.framework = preferences.framework = "fastapi";
} else if (program.template === "extractor") {
// Extractor template only supports FastAPI, empty data sources, and llamacloud
// So we just use example file for extractor template, this allows user to choose vector database later
program.dataSources = [EXAMPLE_FILE];
program.framework = preferences.framework = "fastapi";
if (!program.framework) {
if (ciInfo.isCI) {
program.framework = getPrefOrDefault("framework");
} else {
const choices = [
{ title: "NextJS", value: "nextjs" },
{ title: "Express", value: "express" },
{ title: "FastAPI (Python)", value: "fastapi" },
];
const { framework } = await prompts(
{
type: "select",
name: "framework",
message: "Which framework would you like to use?",
choices,
initial: 0,
},
questionHandlers,
);
program.framework = framework;
preferences.framework = framework;
}
}
if (
(program.framework === "express" || program.framework === "fastapi") &&
program.template === "streaming"
) {
// if a backend-only framework is selected, ask whether we should create a frontend
if (program.frontend === undefined) {
if (ciInfo.isCI) {
program.frontend = getPrefOrDefault("frontend");
} else {
const styledNextJS = blue("NextJS");
const styledBackend = green(
program.framework === "express"
? "Express "
: program.framework === "fastapi"
? "FastAPI (Python) "
: "",
);
const { frontend } = await prompts({
onState: onPromptState,
type: "toggle",
name: "frontend",
message: `Would you like to generate a ${styledNextJS} frontend for your ${styledBackend}backend?`,
initial: getPrefOrDefault("frontend"),
active: "Yes",
inactive: "No",
});
program.frontend = Boolean(frontend);
preferences.frontend = Boolean(frontend);
}
}
} else {
program.frontend = false;
}
if (program.framework === "nextjs" || program.frontend) {
if (!program.ui) {
program.ui = defaults.ui;
}
}
if (!program.observability && program.template === "streaming") {
if (ciInfo.isCI) {
program.observability = getPrefOrDefault("observability");
} else {
const { observability } = await prompts(
{
type: "select",
name: "observability",
message: "Would you like to set up observability?",
choices: [
{ title: "No", value: "none" },
...(program.framework === "fastapi"
? [{ title: "LlamaTrace", value: "llamatrace" }]
: []),
{ title: "Traceloop", value: "traceloop" },
],
initial: 0,
},
questionHandlers,
program.observability = observability;
preferences.observability = observability;
if (!program.modelConfig) {
const modelConfig = await askModelConfig({
openAiKey,
askModels: program.askModels ?? false,
framework: program.framework,
});
program.modelConfig = modelConfig;
preferences.modelConfig = modelConfig;
Marcus Schiesser
committed
if (!program.dataSources) {
program.dataSources = getPrefOrDefault("dataSources");
Marcus Schiesser
committed
// continue asking user for data sources if none are initially provided
const firstQuestion = program.dataSources.length === 0;
const choices = getDataSourceChoices(
program.framework,
program.dataSources,
);
if (choices.length === 0) break;
const { selectedSource } = await prompts(
{
type: "select",
name: "selectedSource",
message: firstQuestion
? "Which data source would you like to use?"
: "Would you like to add another data source?",
questionHandlers,
Marcus Schiesser
committed
if (selectedSource === "no" || selectedSource === "none") {
// user doesn't want another data source or any data source
551
552
553
554
555
556
557
558
559
560
561
562
563
564
565
566
567
568
569
570
571
572
573
574
575
576
577
578
579
580
581
582
583
584
585
586
587
588
589
switch (selectedSource) {
case "exampleFile": {
program.dataSources.push(EXAMPLE_FILE);
break;
}
case "file":
case "folder": {
const selectedPaths = await selectLocalContextData(selectedSource);
for (const p of selectedPaths) {
program.dataSources.push({
type: "file",
config: {
path: p,
},
});
}
break;
}
case "web": {
const { baseUrl } = await prompts(
{
type: "text",
name: "baseUrl",
message: "Please provide base URL of the website: ",
initial: "https://www.llamaindex.ai",
validate: (value: string) => {
if (!value.includes("://")) {
value = `https://${value}`;
}
const urlObj = new URL(value);
if (
urlObj.protocol !== "https:" &&
urlObj.protocol !== "http:"
) {
return `URL=${value} has invalid protocol, only allow http or https`;
}
return true;
},
},
questionHandlers,
Huu Le (Lee)
committed
config: {
baseUrl,
prefix: baseUrl,
depth: 1,
Huu Le (Lee)
committed
},
case "db": {
const dbPrompts: prompts.PromptObject<string>[] = [
{
type: "text",
name: "uri",
message:
"Please enter the connection string (URI) for the database.",
initial: "mysql+pymysql://user:pass@localhost:3306/mydb",
validate: (value: string) => {
if (!value) {
return "Please provide a valid connection string";
} else if (
!(
value.startsWith("mysql+pymysql://") ||
value.startsWith("postgresql+psycopg://")
)
) {
return "The connection string must start with 'mysql+pymysql://' for MySQL or 'postgresql+psycopg://' for PostgreSQL";
}
return true;
},
Huu Le (Lee)
committed
},
// Only ask for a query, user can provide more complex queries in the config file later
{
type: (prev) => (prev ? "text" : null),
name: "queries",
message: "Please enter the SQL query to fetch data:",
initial: "SELECT * FROM mytable",
},
];
program.dataSources.push({
type: "db",
config: await prompts(dbPrompts, questionHandlers),
case "llamacloud": {
program.dataSources.push({
type: "llamacloud",
config: {},
});
program.dataSources.push(EXAMPLE_FILE);
break;
}
Huu Le (Lee)
committed
}
}
const isUsingLlamaCloud = program.dataSources.some(
(ds) => ds.type === "llamacloud",
);
// Asking for LlamaParse if user selected file data source
if (isUsingLlamaCloud) {
// default to use LlamaParse if using LlamaCloud
program.useLlamaParse = preferences.useLlamaParse = true;
} else {
// Extractor template doesn't support LlamaParse and LlamaCloud right now (cannot use asyncio loop in Reflex)
if (
program.useLlamaParse === undefined &&
program.template !== "extractor"
) {
// if already set useLlamaParse, don't ask again
if (program.dataSources.some((ds) => ds.type === "file")) {
if (ciInfo.isCI) {
program.useLlamaParse = getPrefOrDefault("useLlamaParse");
} else {
const { useLlamaParse } = await prompts(
{
type: "toggle",
name: "useLlamaParse",
message:
"Would you like to use LlamaParse (improved parser for RAG - requires API key)?",
initial: false,
active: "yes",
inactive: "no",
},
questionHandlers,
);
program.useLlamaParse = useLlamaParse;
preferences.useLlamaParse = useLlamaParse;
}
}
}
}
// Ask for LlamaCloud API key when using a LlamaCloud index or LlamaParse
if (isUsingLlamaCloud || program.useLlamaParse) {
if (!program.llamaCloudKey) {
// if already set, don't ask again
if (ciInfo.isCI) {
program.llamaCloudKey = getPrefOrDefault("llamaCloudKey");
} else {
// Ask for LlamaCloud API key
const { llamaCloudKey } = await prompts(
{
type: "text",
name: "llamaCloudKey",
message:
"Please provide your LlamaCloud API key (leave blank to skip):",
},
questionHandlers,
);
program.llamaCloudKey = preferences.llamaCloudKey =
llamaCloudKey || process.env.LLAMA_CLOUD_API_KEY;
}
if (isUsingLlamaCloud) {
// When using a LlamaCloud index, don't ask for vector database and use code in `llamacloud` folder for vector database
const vectorDb = "llamacloud";
program.vectorDb = vectorDb;
preferences.vectorDb = vectorDb;
} else if (program.dataSources.length > 0 && !program.vectorDb) {
Huu Le (Lee)
committed
if (ciInfo.isCI) {
program.vectorDb = getPrefOrDefault("vectorDb");
} else {
const { vectorDb } = await prompts(
Huu Le (Lee)
committed
type: "select",
name: "vectorDb",
message: "Would you like to use a vector database?",
choices: getVectorDbChoices(program.framework),
initial: 0,
questionHandlers,
Huu Le (Lee)
committed
program.vectorDb = vectorDb;
preferences.vectorDb = vectorDb;
}
if (!program.tools && program.template === "streaming") {
// TODO: allow to select tools also for multi-agent framework
if (ciInfo.isCI) {
program.tools = getPrefOrDefault("tools");
} else {
const options = supportedTools.filter((t) =>
t.supportedFrameworks?.includes(program.framework),
);
const toolChoices = options.map((tool) => ({
title: `${tool.display}${toolRequiresConfig(tool) ? " (needs configuration)" : ""}`,
const { toolsName } = await prompts({
name: "toolsName",
message:
"Would you like to build an agent using tools? If so, select the tools here, otherwise just press enter",
choices: toolChoices,
});
const tools = toolsName?.map((tool: string) =>
supportedTools.find((t) => t.name === tool),
);
program.tools = tools;
preferences.tools = tools;
}
}
};
export const toChoice = (value: string) => {
return { title: value, value };
};