Newer
Older
import { execSync } from "child_process";
import ciInfo from "ci-info";
import got from "got";
import ora from "ora";
import { blue, green, red } from "picocolors";
import prompts from "prompts";
import { InstallAppArgs } from "./create-app";
TemplateDataSourceType,
TemplateFramework,
} from "./helpers";
import { COMMUNITY_OWNER, COMMUNITY_REPO } from "./helpers/constant";
Marcus Schiesser
committed
import { EXAMPLE_FILE } from "./helpers/datasources";
import { templatesDir } from "./helpers/dir";
import { getAvailableLlamapackOptions } from "./helpers/llama-pack";
import { getProjectOptions } from "./helpers/repo";
import { supportedTools, toolsRequireConfig } from "./helpers/tools";
const OPENAI_API_URL = "https://api.openai.com/v1";
Huu Le (Lee)
committed
export type QuestionArgs = Omit<
InstallAppArgs,
"appPath" | "packageManager"
> & {
listServerModels?: boolean;
};
Huu Le (Lee)
committed
const supportedContextFileTypes = [
".pdf",
".doc",
".docx",
".xls",
".xlsx",
".csv",
];
const MACOS_FILE_SELECTION_SCRIPT = `
osascript -l JavaScript -e '
a = Application.currentApplication();
a.includeStandardAdditions = true;
a.chooseFile({ withPrompt: "Please select files to process:", multipleSelectionsAllowed: true }).map(file => file.toString())
Huu Le (Lee)
committed
const MACOS_FOLDER_SELECTION_SCRIPT = `
osascript -l JavaScript -e '
a = Application.currentApplication();
a.includeStandardAdditions = true;
a.chooseFolder({ withPrompt: "Please select folders to process:", multipleSelectionsAllowed: true }).map(folder => folder.toString())
Huu Le (Lee)
committed
'`;
const WINDOWS_FILE_SELECTION_SCRIPT = `
Add-Type -AssemblyName System.Windows.Forms
$openFileDialog = New-Object System.Windows.Forms.OpenFileDialog
$openFileDialog.InitialDirectory = [Environment]::GetFolderPath('Desktop')
$openFileDialog.Multiselect = $true
$result = $openFileDialog.ShowDialog()
if ($result -eq 'OK') {
Huu Le (Lee)
committed
const WINDOWS_FOLDER_SELECTION_SCRIPT = `
Add-Type -AssemblyName System.windows.forms
$folderBrowser = New-Object System.Windows.Forms.FolderBrowserDialog
$dialogResult = $folderBrowser.ShowDialog()
if ($dialogResult -eq [System.Windows.Forms.DialogResult]::OK)
{
$folderBrowser.SelectedPath
}
`;
const defaults: QuestionArgs = {
template: "streaming",
framework: "nextjs",
ui: "shadcn",
frontend: false,
model: "gpt-3.5-turbo",
embeddingModel: "text-embedding-ada-002",
communityProjectConfig: undefined,
};
const handlers = {
onCancel: () => {
console.error("Exiting.");
process.exit(1);
},
};
const getVectorDbChoices = (framework: TemplateFramework) => {
const choices = [
{
title: "No, just store the data in the file system",
value: "none",
},
{ title: "MongoDB", value: "mongo" },
{ title: "PostgreSQL", value: "pg" },
{ title: "Pinecone", value: "pinecone" },
{ title: "Milvus", value: "milvus" },
const vectordbLang = framework === "fastapi" ? "python" : "typescript";
const compPath = path.join(templatesDir, "components");
const vectordbPath = path.join(compPath, "vectordbs", vectordbLang);
const availableChoices = fs
.readdirSync(vectordbPath)
.filter((file) => fs.statSync(path.join(vectordbPath, file)).isDirectory());
const displayedChoices = choices.filter((choice) =>
availableChoices.includes(choice.value),
);
return displayedChoices;
};
export const getDataSourceChoices = (
framework: TemplateFramework,
selectedDataSource: TemplateDataSource[],
) => {
const choices = [];
if (selectedDataSource.length > 0) {
Huu Le (Lee)
committed
choices.push({
title: "No",
value: "no",
});
}
if (selectedDataSource === undefined || selectedDataSource.length === 0) {
choices.push({
title: "No data, just a simple chat",
value: "none",
Huu Le (Lee)
committed
});
choices.push({
title: "Use an example PDF",
value: "exampleFile",
});
}
choices.push(
{
title: `Use local files (${supportedContextFileTypes.join(", ")})`,
value: "file",
},
{
title:
process.platform === "win32"
? "Use a local folder"
: "Use local folders",
Huu Le (Lee)
committed
if (framework === "fastapi") {
choices.push({
title: "Use website content (requires Chrome)",
value: "web",
});
choices.push({
title: "Use data from a database (Mysql, PostgreSQL)",
value: "db",
});
Huu Le (Lee)
committed
}
return choices;
};
const selectLocalContextData = async (type: TemplateDataSourceType) => {
Huu Le (Lee)
committed
let selectedPath: string = "";
let execScript: string;
let execOpts: any = {};
switch (process.platform) {
case "win32": // Windows
Huu Le (Lee)
committed
execScript =
type === "file"
? WINDOWS_FILE_SELECTION_SCRIPT
: WINDOWS_FOLDER_SELECTION_SCRIPT;
execOpts = { shell: "powershell.exe" };
break;
case "darwin": // MacOS
Huu Le (Lee)
committed
execScript =
type === "file"
? MACOS_FILE_SELECTION_SCRIPT
: MACOS_FOLDER_SELECTION_SCRIPT;
break;
default: // Unsupported OS
console.log(red("Unsupported OS error!"));
process.exit(1);
}
Huu Le (Lee)
committed
selectedPath = execSync(execScript, execOpts).toString().trim();
const paths =
process.platform === "win32"
? selectedPath.split("\r\n")
: selectedPath.split(", ");
for (const p of paths) {
if (
!supportedContextFileTypes.includes(path.extname(p))
) {
Huu Le (Lee)
committed
console.log(
red(
`Please select a supported file type: ${supportedContextFileTypes}`,
),
);
process.exit(1);
}
} catch (error) {
console.log(
red(
Huu Le (Lee)
committed
"Got an error when trying to select local context data! Please try again or select another data source option.",
),
);
process.exit(1);
}
};
export const onPromptState = (state: any) => {
if (state.aborted) {
// If we don't re-enable the terminal cursor before exiting
// the program, the cursor will remain hidden
process.stdout.write("\x1B[?25h");
process.stdout.write("\n");
process.exit(1);
}
};
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
const getAvailableModelChoices = async (
selectEmbedding: boolean,
apiKey?: string,
listServerModels?: boolean,
) => {
const defaultLLMModels = [
"gpt-3.5-turbo-0125",
"gpt-4-turbo-preview",
"gpt-4",
"gpt-4-vision-preview",
];
const defaultEmbeddingModels = [
"text-embedding-ada-002",
"text-embedding-3-small",
"text-embedding-3-large",
];
const isLLMModels = (model_id: string) => {
return model_id.startsWith("gpt");
};
const isEmbeddingModel = (model_id: string) => {
return (
model_id.includes("embedding") ||
defaultEmbeddingModels.includes(model_id)
);
};
if (apiKey && listServerModels) {
const spinner = ora("Fetching available models").start();
try {
const response = await got(`${OPENAI_API_URL}/models`, {
headers: {
Authorization: "Bearer " + apiKey,
},
timeout: 5000,
responseType: "json",
});
const data: any = await response.body;
spinner.stop();
return data.data
.filter((model: any) =>
selectEmbedding ? isEmbeddingModel(model.id) : isLLMModels(model.id),
)
.map((el: any) => {
return {
title: el.id,
value: el.id,
};
});
} catch (error) {
spinner.stop();
if ((error as any).response?.statusCode === 401) {
console.log(
red(
"Invalid OpenAI API key provided! Please provide a valid key and try again!",
),
);
} else {
console.log(red("Request failed: " + error));
}
process.exit(1);
}
} else {
const data = selectEmbedding ? defaultEmbeddingModels : defaultLLMModels;
return data.map((model) => ({
title: model,
value: model,
}));
}
};
export const askQuestions = async (
program: QuestionArgs,
preferences: QuestionArgs,
) => {
const getPrefOrDefault = <K extends keyof QuestionArgs>(
field: K,
): QuestionArgs[K] => preferences[field] ?? defaults[field];
// Ask for next action after installation
async function askPostInstallAction() {
if (program.postInstallAction === undefined) {
if (ciInfo.isCI) {
program.postInstallAction = getPrefOrDefault("postInstallAction");
} else {
{
title: "Just generate code (~1 sec)",
value: "none",
},
{
title: "Start in VSCode (~1 sec)",
value: "VSCode",
},
{
title: "Generate code and install dependencies (~2 min)",
value: "dependencies",
},
];
Huu Le (Lee)
committed
const openAiKeyConfigured =
program.openAiKey || process.env["OPENAI_API_KEY"];
// If using LlamaParse, require LlamaCloud API key
const llamaCloudKeyConfigured = program.useLlamaParse
Huu Le (Lee)
committed
? program.llamaCloudKey || process.env["LLAMA_CLOUD_API_KEY"]
: true;
const hasVectorDb = program.vectorDb && program.vectorDb !== "none";
// Can run the app if all tools do not require configuration
if (
!hasVectorDb &&
Huu Le (Lee)
committed
openAiKeyConfigured &&
llamaCloudKeyConfigured &&
!toolsRequireConfig(program.tools) &&
!program.llamapack
actionChoices.push({
title:
"Generate code, install dependencies, and run the app (~2 min)",
value: "runApp",
});
}
const { action } = await prompts(
{
type: "select",
name: "action",
message: "How would you like to proceed?",
choices: actionChoices,
initial: 1,
},
handlers,
);
program.postInstallAction = action;
}
}
}
if (!program.template) {
if (ciInfo.isCI) {
program.template = getPrefOrDefault("template");
} else {
const styledRepo = blue(
`https://github.com/${COMMUNITY_OWNER}/${COMMUNITY_REPO}`,
);
const { template } = await prompts(
{
type: "select",
name: "template",
message: "Which template would you like to use?",
choices: [
{ title: "Chat", value: "streaming" },
{
title: `Community template from ${styledRepo}`,
value: "community",
},
{
title: "Example using a LlamaPack",
value: "llamapack",
},
],
},
handlers,
);
program.template = template;
preferences.template = template;
}
}
if (program.template === "community") {
const projectOptions = await getProjectOptions(
COMMUNITY_OWNER,
COMMUNITY_REPO,
);
const { communityProjectConfig } = await prompts(
{
type: "select",
name: "communityProjectConfig",
message: "Select community template",
choices: projectOptions.map(({ title, value }) => ({
title,
value: JSON.stringify(value), // serialize value to string in terminal
})),
initial: 0,
},
);
const projectConfig = JSON.parse(communityProjectConfig);
program.communityProjectConfig = projectConfig;
preferences.communityProjectConfig = projectConfig;
return; // early return - no further questions needed for community projects
}
if (program.template === "llamapack") {
const availableLlamaPacks = await getAvailableLlamapackOptions();
const { llamapack } = await prompts(
{
type: "select",
name: "llamapack",
message: "Select LlamaPack",
choices: availableLlamaPacks.map((pack) => ({
title: pack.name,
value: pack.folderPath,
})),
initial: 0,
},
handlers,
);
program.llamapack = llamapack;
preferences.llamapack = llamapack;
await askPostInstallAction();
return; // early return - no further questions needed for llamapack projects
}
if (!program.framework) {
if (ciInfo.isCI) {
program.framework = getPrefOrDefault("framework");
} else {
const choices = [
{ title: "NextJS", value: "nextjs" },
{ title: "Express", value: "express" },
{ title: "FastAPI (Python)", value: "fastapi" },
];
const { framework } = await prompts(
{
type: "select",
name: "framework",
message: "Which framework would you like to use?",
choices,
initial: 0,
},
handlers,
);
program.framework = framework;
preferences.framework = framework;
}
}
if (program.framework === "express" || program.framework === "fastapi") {
// if a backend-only framework is selected, ask whether we should create a frontend
// (only for streaming backends)
if (program.frontend === undefined) {
if (ciInfo.isCI) {
program.frontend = getPrefOrDefault("frontend");
} else {
const styledNextJS = blue("NextJS");
const styledBackend = green(
program.framework === "express"
? "Express "
: program.framework === "fastapi"
? "FastAPI (Python) "
: "",
);
const { frontend } = await prompts({
onState: onPromptState,
type: "toggle",
name: "frontend",
message: `Would you like to generate a ${styledNextJS} frontend for your ${styledBackend}backend?`,
initial: getPrefOrDefault("frontend"),
active: "Yes",
inactive: "No",
});
program.frontend = Boolean(frontend);
preferences.frontend = Boolean(frontend);
}
}
} else {
program.frontend = false;
}
if (program.framework === "nextjs" || program.frontend) {
if (!program.ui) {
program.ui = defaults.ui;
}
}
if (program.framework === "express" || program.framework === "nextjs") {
if (!program.observability) {
if (ciInfo.isCI) {
program.observability = getPrefOrDefault("observability");
} else {
const { observability } = await prompts(
{
type: "select",
name: "observability",
message: "Would you like to set up observability?",
choices: [
{ title: "No", value: "none" },
{ title: "OpenTelemetry", value: "opentelemetry" },
],
initial: 0,
},
handlers,
);
program.observability = observability;
preferences.observability = observability;
}
if (!program.openAiKey) {
const { key } = await prompts(
{
type: "text",
name: "key",
message: program.listServerModels
? "Please provide your OpenAI API key (or reuse OPENAI_API_KEY env variable):"
: "Please provide your OpenAI API key (leave blank to skip):",
validate: (value: string) => {
if (program.listServerModels && !value) {
if (process.env.OPENAI_API_KEY) {
return true;
}
return "OpenAI API key is required";
}
return true;
},
},
handlers,
);
program.openAiKey = key || process.env.OPENAI_API_KEY;
preferences.openAiKey = key || process.env.OPENAI_API_KEY;
}
if (!program.model) {
if (ciInfo.isCI) {
program.model = getPrefOrDefault("model");
} else {
const { model } = await prompts(
{
type: "select",
name: "model",
message: "Which model would you like to use?",
choices: await getAvailableModelChoices(
false,
program.openAiKey,
program.listServerModels,
),
initial: 0,
},
handlers,
);
program.model = model;
preferences.model = model;
}
}
if (!program.embeddingModel && program.framework === "fastapi") {
if (ciInfo.isCI) {
program.embeddingModel = getPrefOrDefault("embeddingModel");
} else {
const { embeddingModel } = await prompts(
{
type: "select",
name: "embeddingModel",
message: "Which embedding model would you like to use?",
choices: await getAvailableModelChoices(
true,
program.openAiKey,
program.listServerModels,
),
initial: 0,
},
handlers,
);
program.embeddingModel = embeddingModel;
preferences.embeddingModel = embeddingModel;
}
}
Marcus Schiesser
committed
if (!program.dataSources) {
program.dataSources = getPrefOrDefault("dataSources");
Marcus Schiesser
committed
// continue asking user for data sources if none are initially provided
const firstQuestion = program.dataSources.length === 0;
const { selectedSource } = await prompts(
{
type: "select",
name: "selectedSource",
message: firstQuestion
? "Which data source would you like to use?"
: "Would you like to add another data source?",
choices: getDataSourceChoices(
program.framework,
program.dataSources,
),
Marcus Schiesser
committed
if (selectedSource === "no" || selectedSource === "none") {
// user doesn't want another data source or any data source
636
637
638
639
640
641
642
643
644
645
646
647
648
649
650
651
652
653
654
655
656
657
658
659
660
661
662
663
664
665
666
667
668
669
670
671
672
673
674
675
676
677
switch (selectedSource) {
case "exampleFile": {
program.dataSources.push(EXAMPLE_FILE);
break;
}
case "file":
case "folder": {
const selectedPaths = await selectLocalContextData(selectedSource);
for (const p of selectedPaths) {
program.dataSources.push({
type: "file",
config: {
path: p,
},
});
}
break;
}
case "web": {
const { baseUrl } = await prompts(
{
type: "text",
name: "baseUrl",
message: "Please provide base URL of the website: ",
initial: "https://www.llamaindex.ai",
validate: (value: string) => {
if (!value.includes("://")) {
value = `https://${value}`;
}
const urlObj = new URL(value);
if (
urlObj.protocol !== "https:" &&
urlObj.protocol !== "http:"
) {
return `URL=${value} has invalid protocol, only allow http or https`;
}
return true;
},
},
handlers,
);
Huu Le (Lee)
committed
config: {
baseUrl,
prefix: baseUrl,
depth: 1,
Huu Le (Lee)
committed
},
case "db": {
const dbPrompts: prompts.PromptObject<string>[] = [
{
type: "text",
name: "uri",
message:
"Please enter the connection string (URI) for the database.",
initial: "mysql+pymysql://user:pass@localhost:3306/mydb",
validate: (value: string) => {
if (!value) {
return "Please provide a valid connection string";
} else if (
!(
value.startsWith("mysql+pymysql://") ||
value.startsWith("postgresql+psycopg://")
)
) {
return "The connection string must start with 'mysql+pymysql://' for MySQL or 'postgresql+psycopg://' for PostgreSQL";
}
return true;
},
Huu Le (Lee)
committed
},
// Only ask for a query, user can provide more complex queries in the config file later
{
type: (prev) => (prev ? "text" : null),
name: "queries",
message: "Please enter the SQL query to fetch data:",
initial: "SELECT * FROM mytable",
},
];
program.dataSources.push({
type: "db",
config: await prompts(dbPrompts, handlers),
});
}
Huu Le (Lee)
committed
}
}
// Asking for LlamaParse if user selected file or folder data source
program.dataSources.some((ds) => ds.type === "file") &&
Marcus Schiesser
committed
program.useLlamaParse === undefined
) {
if (ciInfo.isCI) {
program.useLlamaParse = getPrefOrDefault("useLlamaParse");
program.llamaCloudKey = getPrefOrDefault("llamaCloudKey");
} else {
const { useLlamaParse } = await prompts(
{
type: "toggle",
name: "useLlamaParse",
message:
"Would you like to use LlamaParse (improved parser for RAG - requires API key)?",
initial: false,
active: "yes",
inactive: "no",
},
handlers,
);
program.useLlamaParse = useLlamaParse;
// Ask for LlamaCloud API key
if (useLlamaParse && program.llamaCloudKey === undefined) {
const { llamaCloudKey } = await prompts(
{
type: "text",
name: "llamaCloudKey",
Huu Le (Lee)
committed
message:
"Please provide your LlamaIndex Cloud API key (leave blank to skip):",
},
handlers,
);
program.llamaCloudKey = llamaCloudKey;
}
}
}
Marcus Schiesser
committed
if (program.dataSources.length > 0 && !program.vectorDb) {
Huu Le (Lee)
committed
if (ciInfo.isCI) {
program.vectorDb = getPrefOrDefault("vectorDb");
} else {
const { vectorDb } = await prompts(
Huu Le (Lee)
committed
type: "select",
name: "vectorDb",
message: "Would you like to use a vector database?",
choices: getVectorDbChoices(program.framework),
initial: 0,
Huu Le (Lee)
committed
program.vectorDb = vectorDb;
preferences.vectorDb = vectorDb;
}
Marcus Schiesser
committed
// TODO: allow tools also without datasources
if (!program.tools && program.dataSources.length > 0) {
if (ciInfo.isCI) {
program.tools = getPrefOrDefault("tools");
} else {
const options = supportedTools.filter((t) =>
t.supportedFrameworks?.includes(program.framework),
);
const toolChoices = options.map((tool) => ({
title: tool.display,
value: tool.name,
}));
const { toolsName } = await prompts({
name: "toolsName",
message:
"Would you like to build an agent using tools? If so, select the tools here, otherwise just press enter",
choices: toolChoices,
});
const tools = toolsName?.map((tool: string) =>
supportedTools.find((t) => t.name === tool),
);
program.tools = tools;
preferences.tools = tools;
}
}