Skip to content
Snippets Groups Projects
Commit 71bd256f authored by Thuc Pham's avatar Thuc Pham Committed by GitHub
Browse files

feat: generate llama pack example (#429)

parent bfd6ae10
No related branches found
No related tags found
No related merge requests found
...@@ -32,6 +32,7 @@ export async function createApp({ ...@@ -32,6 +32,7 @@ export async function createApp({
openAiKey, openAiKey,
model, model,
communityProjectPath, communityProjectPath,
llamapack,
vectorDb, vectorDb,
externalPort, externalPort,
postInstallAction, postInstallAction,
...@@ -75,6 +76,7 @@ export async function createApp({ ...@@ -75,6 +76,7 @@ export async function createApp({
openAiKey, openAiKey,
model, model,
communityProjectPath, communityProjectPath,
llamapack,
vectorDb, vectorDb,
externalPort, externalPort,
postInstallAction, postInstallAction,
......
export const COMMUNITY_OWNER = "run-llama"; export const COMMUNITY_OWNER = "run-llama";
export const COMMUNITY_REPO = "create_llama_projects"; export const COMMUNITY_REPO = "create_llama_projects";
export const LLAMA_PACK_OWNER = "run-llama";
export const LLAMA_PACK_REPO = "llama-hub";
export const LLAMA_HUB_FOLDER_PATH = `${LLAMA_PACK_OWNER}/${LLAMA_PACK_REPO}/main/llama_hub`;
export const LLAMA_PACK_CONFIG_PATH = `${LLAMA_HUB_FOLDER_PATH}/llama_packs/library.json`;
...@@ -7,6 +7,7 @@ import { cyan } from "picocolors"; ...@@ -7,6 +7,7 @@ import { cyan } from "picocolors";
import { COMMUNITY_OWNER, COMMUNITY_REPO } from "./constant"; import { COMMUNITY_OWNER, COMMUNITY_REPO } from "./constant";
import { PackageManager } from "./get-pkg-manager"; import { PackageManager } from "./get-pkg-manager";
import { installLlamapackProject } from "./llama-pack";
import { isHavingPoetryLockFile, tryPoetryRun } from "./poetry"; import { isHavingPoetryLockFile, tryPoetryRun } from "./poetry";
import { installPythonTemplate } from "./python"; import { installPythonTemplate } from "./python";
import { downloadAndExtractRepo } from "./repo"; import { downloadAndExtractRepo } from "./repo";
...@@ -153,6 +154,11 @@ export const installTemplate = async ( ...@@ -153,6 +154,11 @@ export const installTemplate = async (
return; return;
} }
if (props.template === "llamapack" && props.llamapack) {
await installLlamapackProject(props);
return;
}
if (props.framework === "fastapi") { if (props.framework === "fastapi") {
await installPythonTemplate(props); await installPythonTemplate(props);
} else { } else {
......
import fs from "fs/promises";
import path from "path";
import { LLAMA_HUB_FOLDER_PATH, LLAMA_PACK_CONFIG_PATH } from "./constant";
import { copy } from "./copy";
import { installPythonDependencies } from "./python";
import { getRepoRawContent } from "./repo";
import { InstallTemplateArgs } from "./types";
export async function getAvailableLlamapackOptions(): Promise<
{
name: string;
folderPath: string;
example: boolean | undefined;
}[]
> {
const libraryJsonRaw = await getRepoRawContent(LLAMA_PACK_CONFIG_PATH);
const libraryJson = JSON.parse(libraryJsonRaw);
const llamapackKeys = Object.keys(libraryJson);
return llamapackKeys
.map((key) => ({
name: key,
folderPath: libraryJson[key].id,
example: libraryJson[key].example,
}))
.filter((item) => !!item.example);
}
const copyLlamapackEmptyProject = async ({
root,
}: Pick<InstallTemplateArgs, "root">) => {
const templatePath = path.join(
__dirname,
"..",
"templates/components/sample-projects/llamapack",
);
await copy("**", root, {
parents: true,
cwd: templatePath,
});
};
const copyData = async ({
root,
}: Pick<InstallTemplateArgs, "root" | "llamapack">) => {
const dataPath = path.join(__dirname, "..", "templates/components/data");
await copy("**", path.join(root, "data"), {
parents: true,
cwd: dataPath,
});
};
const installLlamapackExample = async ({
root,
llamapack,
}: Pick<InstallTemplateArgs, "root" | "llamapack">) => {
const exampleFileName = "example.py";
const readmeFileName = "README.md";
const exampleFilePath = `${LLAMA_HUB_FOLDER_PATH}/${llamapack}/${exampleFileName}`;
const readmeFilePath = `${LLAMA_HUB_FOLDER_PATH}/${llamapack}/${readmeFileName}`;
// Download example.py from llamapack and save to root
const exampleContent = await getRepoRawContent(exampleFilePath);
await fs.writeFile(path.join(root, exampleFileName), exampleContent);
// Download README.md from llamapack and combine with README-template.md,
// save to root and then delete template file
const readmeContent = await getRepoRawContent(readmeFilePath);
const readmeTemplateContent = await fs.readFile(
path.join(root, "README-template.md"),
"utf-8",
);
await fs.writeFile(
path.join(root, readmeFileName),
`${readmeContent}\n${readmeTemplateContent}`,
);
await fs.unlink(path.join(root, "README-template.md"));
};
export const installLlamapackProject = async ({
root,
llamapack,
postInstallAction,
}: Pick<InstallTemplateArgs, "root" | "llamapack" | "postInstallAction">) => {
console.log("\nInstalling Llamapack project:", llamapack!);
await copyLlamapackEmptyProject({ root });
await copyData({ root });
await installLlamapackExample({ root, llamapack });
if (postInstallAction !== "none") {
installPythonDependencies(root);
}
};
...@@ -61,3 +61,11 @@ export async function getRepoRootFolders( ...@@ -61,3 +61,11 @@ export async function getRepoRootFolders(
const folders = data.filter((item) => item.type === "dir"); const folders = data.filter((item) => item.type === "dir");
return folders.map((item) => item.name); return folders.map((item) => item.name);
} }
export async function getRepoRawContent(repoFilePath: string) {
const url = `https://raw.githubusercontent.com/${repoFilePath}`;
const response = await got(url, {
responseType: "text",
});
return response.body;
}
import { PackageManager } from "../helpers/get-pkg-manager"; import { PackageManager } from "../helpers/get-pkg-manager";
export type TemplateType = "simple" | "streaming" | "community"; export type TemplateType = "simple" | "streaming" | "community" | "llamapack";
export type TemplateFramework = "nextjs" | "express" | "fastapi"; export type TemplateFramework = "nextjs" | "express" | "fastapi";
export type TemplateEngine = "simple" | "context"; export type TemplateEngine = "simple" | "context";
export type TemplateUI = "html" | "shadcn"; export type TemplateUI = "html" | "shadcn";
...@@ -23,6 +23,7 @@ export interface InstallTemplateArgs { ...@@ -23,6 +23,7 @@ export interface InstallTemplateArgs {
forBackend?: string; forBackend?: string;
model: string; model: string;
communityProjectPath?: string; communityProjectPath?: string;
llamapack?: string;
vectorDb?: TemplateVectorDB; vectorDb?: TemplateVectorDB;
externalPort?: number; externalPort?: number;
postInstallAction?: TemplatePostInstallAction; postInstallAction?: TemplatePostInstallAction;
......
...@@ -237,6 +237,7 @@ async function run(): Promise<void> { ...@@ -237,6 +237,7 @@ async function run(): Promise<void> {
openAiKey: program.openAiKey, openAiKey: program.openAiKey,
model: program.model, model: program.model,
communityProjectPath: program.communityProjectPath, communityProjectPath: program.communityProjectPath,
llamapack: program.llamapack,
vectorDb: program.vectorDb, vectorDb: program.vectorDb,
externalPort: program.externalPort, externalPort: program.externalPort,
postInstallAction: program.postInstallAction, postInstallAction: program.postInstallAction,
......
...@@ -7,6 +7,7 @@ import prompts from "prompts"; ...@@ -7,6 +7,7 @@ import prompts from "prompts";
import { InstallAppArgs } from "./create-app"; import { InstallAppArgs } from "./create-app";
import { TemplateFramework } from "./helpers"; import { TemplateFramework } from "./helpers";
import { COMMUNITY_OWNER, COMMUNITY_REPO } from "./helpers/constant"; import { COMMUNITY_OWNER, COMMUNITY_REPO } from "./helpers/constant";
import { getAvailableLlamapackOptions } from "./helpers/llama-pack";
import { getRepoRootFolders } from "./helpers/repo"; import { getRepoRootFolders } from "./helpers/repo";
export type QuestionArgs = Omit<InstallAppArgs, "appPath" | "packageManager">; export type QuestionArgs = Omit<InstallAppArgs, "appPath" | "packageManager">;
...@@ -37,6 +38,7 @@ const defaults: QuestionArgs = { ...@@ -37,6 +38,7 @@ const defaults: QuestionArgs = {
openAiKey: "", openAiKey: "",
model: "gpt-3.5-turbo", model: "gpt-3.5-turbo",
communityProjectPath: "", communityProjectPath: "",
llamapack: "",
postInstallAction: "dependencies", postInstallAction: "dependencies",
}; };
...@@ -129,6 +131,48 @@ export const askQuestions = async ( ...@@ -129,6 +131,48 @@ export const askQuestions = async (
field: K, field: K,
): QuestionArgs[K] => preferences[field] ?? defaults[field]; ): QuestionArgs[K] => preferences[field] ?? defaults[field];
// Ask for next action after installation
async function askPostInstallAction() {
if (program.postInstallAction === undefined) {
if (ciInfo.isCI) {
program.postInstallAction = getPrefOrDefault("postInstallAction");
} else {
let actionChoices = [
{
title: "Just generate code (~1 sec)",
value: "none",
},
{
title: "Generate code and install dependencies (~2 min)",
value: "dependencies",
},
];
const hasOpenAiKey = program.openAiKey || process.env["OPENAI_API_KEY"];
if (program.vectorDb === "none" && hasOpenAiKey) {
actionChoices.push({
title:
"Generate code, install dependencies, and run the app (~2 min)",
value: "runApp",
});
}
const { action } = await prompts(
{
type: "select",
name: "action",
message: "How would you like to proceed?",
choices: actionChoices,
initial: 1,
},
handlers,
);
program.postInstallAction = action;
}
}
}
if (!program.template) { if (!program.template) {
if (ciInfo.isCI) { if (ciInfo.isCI) {
program.template = getPrefOrDefault("template"); program.template = getPrefOrDefault("template");
...@@ -148,6 +192,10 @@ export const askQuestions = async ( ...@@ -148,6 +192,10 @@ export const askQuestions = async (
title: `Community template from ${styledRepo}`, title: `Community template from ${styledRepo}`,
value: "community", value: "community",
}, },
{
title: "Example using a LlamaPack",
value: "llamapack",
},
], ],
initial: 1, initial: 1,
}, },
...@@ -181,6 +229,27 @@ export const askQuestions = async ( ...@@ -181,6 +229,27 @@ export const askQuestions = async (
return; // early return - no further questions needed for community projects return; // early return - no further questions needed for community projects
} }
if (program.template === "llamapack") {
const availableLlamaPacks = await getAvailableLlamapackOptions();
const { llamapack } = await prompts(
{
type: "select",
name: "llamapack",
message: "Select LlamaPack",
choices: availableLlamaPacks.map((pack) => ({
title: pack.name,
value: pack.folderPath,
})),
initial: 0,
},
handlers,
);
program.llamapack = llamapack;
preferences.llamapack = llamapack;
await askPostInstallAction();
return; // early return - no further questions needed for llamapack projects
}
if (!program.framework) { if (!program.framework) {
if (ciInfo.isCI) { if (ciInfo.isCI) {
program.framework = getPrefOrDefault("framework"); program.framework = getPrefOrDefault("framework");
...@@ -386,45 +455,7 @@ export const askQuestions = async ( ...@@ -386,45 +455,7 @@ export const askQuestions = async (
} }
} }
// Ask for next action after installation await askPostInstallAction();
if (program.postInstallAction === undefined) {
if (ciInfo.isCI) {
program.postInstallAction = getPrefOrDefault("postInstallAction");
} else {
let actionChoices = [
{
title: "Just generate code (~1 sec)",
value: "none",
},
{
title: "Generate code and install dependencies (~2 min)",
value: "dependencies",
},
];
const hasOpenAiKey = program.openAiKey || process.env["OPENAI_API_KEY"];
if (program.vectorDb === "none" && hasOpenAiKey) {
actionChoices.push({
title:
"Generate code, install dependencies, and run the app (~2 min)",
value: "runApp",
});
}
const { action } = await prompts(
{
type: "select",
name: "action",
message: "How would you like to proceed?",
choices: actionChoices,
initial: 1,
},
handlers,
);
program.postInstallAction = action;
}
}
// TODO: consider using zod to validate the input (doesn't work like this as not every option is required) // TODO: consider using zod to validate the input (doesn't work like this as not every option is required)
// templateUISchema.parse(program.ui); // templateUISchema.parse(program.ui);
......
---
## Quickstart
1. Check above instructions for setting up your environment and export required environment variables
For example, if you are using bash, you can run the following command to set up OpenAI API key
```bash
export OPENAI_API_KEY=your_api_key
```
2. Run the example
```
poetry run python example.py
```
[tool.poetry]
name = "app"
version = "0.1.0"
description = "Llama Pack Example"
authors = ["Marcus Schiesser <mail@marcusschiesser.de>"]
readme = "README.md"
[tool.poetry.dependencies]
python = "^3.11,<3.12"
llama-index = "^0.9.19"
python-dotenv = "^1.0.0"
[build-system]
requires = ["poetry-core"]
build-backend = "poetry.core.masonry.api"
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment