Skip to content
Snippets Groups Projects
Unverified Commit fe03aaae authored by Thuc Pham's avatar Thuc Pham Committed by GitHub
Browse files

feat: generate llama pack example (#429)

parent 9ce7d3d6
Branches
Tags
No related merge requests found
---
"create-llama": patch
---
feat: generate llama pack example
......@@ -32,6 +32,7 @@ export async function createApp({
openAiKey,
model,
communityProjectPath,
llamapack,
vectorDb,
externalPort,
postInstallAction,
......@@ -75,6 +76,7 @@ export async function createApp({
openAiKey,
model,
communityProjectPath,
llamapack,
vectorDb,
externalPort,
postInstallAction,
......
export const COMMUNITY_OWNER = "run-llama";
export const COMMUNITY_REPO = "create_llama_projects";
export const LLAMA_PACK_OWNER = "run-llama";
export const LLAMA_PACK_REPO = "llama-hub";
export const LLAMA_HUB_FOLDER_PATH = `${LLAMA_PACK_OWNER}/${LLAMA_PACK_REPO}/main/llama_hub`;
export const LLAMA_PACK_CONFIG_PATH = `${LLAMA_HUB_FOLDER_PATH}/llama_packs/library.json`;
......@@ -7,6 +7,7 @@ import { cyan } from "picocolors";
import { COMMUNITY_OWNER, COMMUNITY_REPO } from "./constant";
import { PackageManager } from "./get-pkg-manager";
import { installLlamapackProject } from "./llama-pack";
import { isHavingPoetryLockFile, tryPoetryRun } from "./poetry";
import { installPythonTemplate } from "./python";
import { downloadAndExtractRepo } from "./repo";
......@@ -153,6 +154,11 @@ export const installTemplate = async (
return;
}
if (props.template === "llamapack" && props.llamapack) {
await installLlamapackProject(props);
return;
}
if (props.framework === "fastapi") {
await installPythonTemplate(props);
} else {
......
import fs from "fs/promises";
import path from "path";
import { LLAMA_HUB_FOLDER_PATH, LLAMA_PACK_CONFIG_PATH } from "./constant";
import { copy } from "./copy";
import { installPythonDependencies } from "./python";
import { getRepoRawContent } from "./repo";
import { InstallTemplateArgs } from "./types";
export async function getAvailableLlamapackOptions(): Promise<
{
name: string;
folderPath: string;
example: boolean | undefined;
}[]
> {
const libraryJsonRaw = await getRepoRawContent(LLAMA_PACK_CONFIG_PATH);
const libraryJson = JSON.parse(libraryJsonRaw);
const llamapackKeys = Object.keys(libraryJson);
return llamapackKeys
.map((key) => ({
name: key,
folderPath: libraryJson[key].id,
example: libraryJson[key].example,
}))
.filter((item) => !!item.example);
}
const copyLlamapackEmptyProject = async ({
root,
}: Pick<InstallTemplateArgs, "root">) => {
const templatePath = path.join(
__dirname,
"..",
"templates/components/sample-projects/llamapack",
);
await copy("**", root, {
parents: true,
cwd: templatePath,
});
};
const copyData = async ({
root,
}: Pick<InstallTemplateArgs, "root" | "llamapack">) => {
const dataPath = path.join(__dirname, "..", "templates/components/data");
await copy("**", path.join(root, "data"), {
parents: true,
cwd: dataPath,
});
};
const installLlamapackExample = async ({
root,
llamapack,
}: Pick<InstallTemplateArgs, "root" | "llamapack">) => {
const exampleFileName = "example.py";
const readmeFileName = "README.md";
const exampleFilePath = `${LLAMA_HUB_FOLDER_PATH}/${llamapack}/${exampleFileName}`;
const readmeFilePath = `${LLAMA_HUB_FOLDER_PATH}/${llamapack}/${readmeFileName}`;
// Download example.py from llamapack and save to root
const exampleContent = await getRepoRawContent(exampleFilePath);
await fs.writeFile(path.join(root, exampleFileName), exampleContent);
// Download README.md from llamapack and combine with README-template.md,
// save to root and then delete template file
const readmeContent = await getRepoRawContent(readmeFilePath);
const readmeTemplateContent = await fs.readFile(
path.join(root, "README-template.md"),
"utf-8",
);
await fs.writeFile(
path.join(root, readmeFileName),
`${readmeContent}\n${readmeTemplateContent}`,
);
await fs.unlink(path.join(root, "README-template.md"));
};
export const installLlamapackProject = async ({
root,
llamapack,
postInstallAction,
}: Pick<InstallTemplateArgs, "root" | "llamapack" | "postInstallAction">) => {
console.log("\nInstalling Llamapack project:", llamapack!);
await copyLlamapackEmptyProject({ root });
await copyData({ root });
await installLlamapackExample({ root, llamapack });
if (postInstallAction !== "none") {
installPythonDependencies(root);
}
};
......@@ -61,3 +61,11 @@ export async function getRepoRootFolders(
const folders = data.filter((item) => item.type === "dir");
return folders.map((item) => item.name);
}
export async function getRepoRawContent(repoFilePath: string) {
const url = `https://raw.githubusercontent.com/${repoFilePath}`;
const response = await got(url, {
responseType: "text",
});
return response.body;
}
import { PackageManager } from "../helpers/get-pkg-manager";
export type TemplateType = "simple" | "streaming" | "community";
export type TemplateType = "simple" | "streaming" | "community" | "llamapack";
export type TemplateFramework = "nextjs" | "express" | "fastapi";
export type TemplateEngine = "simple" | "context";
export type TemplateUI = "html" | "shadcn";
......@@ -23,6 +23,7 @@ export interface InstallTemplateArgs {
forBackend?: string;
model: string;
communityProjectPath?: string;
llamapack?: string;
vectorDb?: TemplateVectorDB;
externalPort?: number;
postInstallAction?: TemplatePostInstallAction;
......
......@@ -237,6 +237,7 @@ async function run(): Promise<void> {
openAiKey: program.openAiKey,
model: program.model,
communityProjectPath: program.communityProjectPath,
llamapack: program.llamapack,
vectorDb: program.vectorDb,
externalPort: program.externalPort,
postInstallAction: program.postInstallAction,
......
......@@ -7,6 +7,7 @@ import prompts from "prompts";
import { InstallAppArgs } from "./create-app";
import { TemplateFramework } from "./helpers";
import { COMMUNITY_OWNER, COMMUNITY_REPO } from "./helpers/constant";
import { getAvailableLlamapackOptions } from "./helpers/llama-pack";
import { getRepoRootFolders } from "./helpers/repo";
export type QuestionArgs = Omit<InstallAppArgs, "appPath" | "packageManager">;
......@@ -37,6 +38,7 @@ const defaults: QuestionArgs = {
openAiKey: "",
model: "gpt-3.5-turbo",
communityProjectPath: "",
llamapack: "",
postInstallAction: "dependencies",
};
......@@ -129,6 +131,48 @@ export const askQuestions = async (
field: K,
): QuestionArgs[K] => preferences[field] ?? defaults[field];
// Ask for next action after installation
async function askPostInstallAction() {
if (program.postInstallAction === undefined) {
if (ciInfo.isCI) {
program.postInstallAction = getPrefOrDefault("postInstallAction");
} else {
let actionChoices = [
{
title: "Just generate code (~1 sec)",
value: "none",
},
{
title: "Generate code and install dependencies (~2 min)",
value: "dependencies",
},
];
const hasOpenAiKey = program.openAiKey || process.env["OPENAI_API_KEY"];
if (program.vectorDb === "none" && hasOpenAiKey) {
actionChoices.push({
title:
"Generate code, install dependencies, and run the app (~2 min)",
value: "runApp",
});
}
const { action } = await prompts(
{
type: "select",
name: "action",
message: "How would you like to proceed?",
choices: actionChoices,
initial: 1,
},
handlers,
);
program.postInstallAction = action;
}
}
}
if (!program.template) {
if (ciInfo.isCI) {
program.template = getPrefOrDefault("template");
......@@ -148,6 +192,10 @@ export const askQuestions = async (
title: `Community template from ${styledRepo}`,
value: "community",
},
{
title: "Example using a LlamaPack",
value: "llamapack",
},
],
initial: 1,
},
......@@ -181,6 +229,27 @@ export const askQuestions = async (
return; // early return - no further questions needed for community projects
}
if (program.template === "llamapack") {
const availableLlamaPacks = await getAvailableLlamapackOptions();
const { llamapack } = await prompts(
{
type: "select",
name: "llamapack",
message: "Select LlamaPack",
choices: availableLlamaPacks.map((pack) => ({
title: pack.name,
value: pack.folderPath,
})),
initial: 0,
},
handlers,
);
program.llamapack = llamapack;
preferences.llamapack = llamapack;
await askPostInstallAction();
return; // early return - no further questions needed for llamapack projects
}
if (!program.framework) {
if (ciInfo.isCI) {
program.framework = getPrefOrDefault("framework");
......@@ -386,45 +455,7 @@ export const askQuestions = async (
}
}
// Ask for next action after installation
if (program.postInstallAction === undefined) {
if (ciInfo.isCI) {
program.postInstallAction = getPrefOrDefault("postInstallAction");
} else {
let actionChoices = [
{
title: "Just generate code (~1 sec)",
value: "none",
},
{
title: "Generate code and install dependencies (~2 min)",
value: "dependencies",
},
];
const hasOpenAiKey = program.openAiKey || process.env["OPENAI_API_KEY"];
if (program.vectorDb === "none" && hasOpenAiKey) {
actionChoices.push({
title:
"Generate code, install dependencies, and run the app (~2 min)",
value: "runApp",
});
}
const { action } = await prompts(
{
type: "select",
name: "action",
message: "How would you like to proceed?",
choices: actionChoices,
initial: 1,
},
handlers,
);
program.postInstallAction = action;
}
}
await askPostInstallAction();
// TODO: consider using zod to validate the input (doesn't work like this as not every option is required)
// templateUISchema.parse(program.ui);
......
---
## Quickstart
1. Check above instructions for setting up your environment and export required environment variables
For example, if you are using bash, you can run the following command to set up OpenAI API key
```bash
export OPENAI_API_KEY=your_api_key
```
2. Run the example
```
poetry run python example.py
```
[tool.poetry]
name = "app"
version = "0.1.0"
description = "Llama Pack Example"
authors = ["Marcus Schiesser <mail@marcusschiesser.de>"]
readme = "README.md"
[tool.poetry.dependencies]
python = "^3.11,<3.12"
llama-index = "^0.9.19"
python-dotenv = "^1.0.0"
[build-system]
requires = ["poetry-core"]
build-backend = "poetry.core.masonry.api"
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Please register or to comment