diff --git a/packages/create-llama/create-app.ts b/packages/create-llama/create-app.ts index ea5a0ac815f40b0529aefd30e0555fd4c11edbb2..1e0663be160abdae8165302ea64c9bdce971ee5d 100644 --- a/packages/create-llama/create-app.ts +++ b/packages/create-llama/create-app.ts @@ -14,7 +14,7 @@ import type { TemplateType, TemplateUI, } from "./templates"; -import { installTemplate } from "./templates"; +import { installPythonTemplate, installTemplate } from "./templates"; export async function createApp({ template, @@ -60,17 +60,21 @@ export async function createApp({ process.chdir(root); - await installTemplate({ - appName, - root, - template, - framework, - engine, - ui, - packageManager, - isOnline, - eslint, - }); + if (framework === "fastapi") { + await installPythonTemplate({ appName, root, template, framework }); + } else { + await installTemplate({ + appName, + root, + template, + framework, + engine, + ui, + packageManager, + isOnline, + eslint, + }); + } if (tryGitInit(root)) { console.log("Initialized a git repository."); diff --git a/packages/create-llama/index.ts b/packages/create-llama/index.ts index 6aeb13ecba52916ffdbe2760b0620aed99a6dbf4..a287cdfef763db6252fe6808ad1d4a2e6579bd64 100644 --- a/packages/create-llama/index.ts +++ b/packages/create-llama/index.ts @@ -225,6 +225,7 @@ async function run(): Promise<void> { choices: [ { title: "NextJS", value: "nextjs" }, { title: "Express", value: "express" }, + { title: "FastAPI (Python)", value: "fastapi" }, ], initial: 0, }, @@ -299,6 +300,7 @@ async function run(): Promise<void> { } if ( + program.framework !== "fastapi" && !process.argv.includes("--eslint") && !process.argv.includes("--no-eslint") ) { diff --git a/packages/create-llama/templates/index.ts b/packages/create-llama/templates/index.ts index 6f338b635f4b69588cfa630c33004568cf5282f8..d2bf52688e5b85361d6dd6babf254f7da7f1594e 100644 --- a/packages/create-llama/templates/index.ts +++ b/packages/create-llama/templates/index.ts @@ -7,7 +7,7 @@ import path from "path"; import { bold, cyan } from "picocolors"; import { version } from "../package.json"; -import { InstallTemplateArgs } from "./types"; +import { InstallPythonTemplateArgs, InstallTemplateArgs } from "./types"; /** * Install a LlamaIndex internal template to a given `root` directory. @@ -166,4 +166,36 @@ export const installTemplate = async ({ await install(packageManager, isOnline); }; +export const installPythonTemplate = async ({ + appName, + root, + template, + framework, +}: InstallPythonTemplateArgs) => { + console.log("\nInitializing Python project with template:", template, "\n"); + const templatePath = path.join(__dirname, template, framework); + await copy("**", root, { + parents: true, + cwd: templatePath, + rename(name) { + switch (name) { + case "gitignore": { + return `.${name}`; + } + // README.md is ignored by webpack-asset-relocator-loader used by ncc: + // https://github.com/vercel/webpack-asset-relocator-loader/blob/e9308683d47ff507253e37c9bcbb99474603192b/src/asset-relocator.js#L227 + case "README-template.md": { + return "README.md"; + } + default: { + return name; + } + } + }, + }); + console.log( + "\nPython project, dependencies won't be installed automatically.\n", + ); +}; + export * from "./types"; diff --git a/packages/create-llama/templates/simple/fastapi/README-template.md b/packages/create-llama/templates/simple/fastapi/README-template.md new file mode 100644 index 0000000000000000000000000000000000000000..baa5fa63fcb1c07f8d74af3aa2eabd7bb493fda2 --- /dev/null +++ b/packages/create-llama/templates/simple/fastapi/README-template.md @@ -0,0 +1,36 @@ +This is a [LlamaIndex](https://www.llamaindex.ai/) project using [FastAPI](https://fastapi.tiangolo.com/) bootstrapped with [`create-llama`](https://github.com/run-llama/LlamaIndexTS/tree/main/packages/create-llama). + +## Getting Started + +First, setup the environment: + +``` +poetry install +poetry shell +``` + +Second, run the development server: + +``` +python main.py +``` + +Then call the API endpoint `/api/chat` to see the result: + +``` +curl --location 'localhost:8000/api/chat' \ +--header 'Content-Type: application/json' \ +--data '{ "messages": [{ "role": "user", "content": "Hello" }] }' +``` + +You can start editing the API by modifying `app/api/routers/chat.py`. The endpoint auto-updates as you save the file. + +Open [http://localhost:8000/docs](http://localhost:8000/docs) with your browser to see the Swagger UI of the API. + +## Learn More + +To learn more about LlamaIndex, take a look at the following resources: + +- [LlamaIndex Documentation](https://docs.llamaindex.ai) - learn about LlamaIndex. + +You can check out [the LlamaIndex GitHub repository](https://github.com/run-llama/llama_index) - your feedback and contributions are welcome! diff --git a/packages/create-llama/templates/simple/fastapi/app/__init__.py b/packages/create-llama/templates/simple/fastapi/app/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/packages/create-llama/templates/simple/fastapi/app/api/__init__.py b/packages/create-llama/templates/simple/fastapi/app/api/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/packages/create-llama/templates/simple/fastapi/app/api/routers/__init__.py b/packages/create-llama/templates/simple/fastapi/app/api/routers/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/packages/create-llama/templates/simple/fastapi/app/api/routers/chat.py b/packages/create-llama/templates/simple/fastapi/app/api/routers/chat.py new file mode 100644 index 0000000000000000000000000000000000000000..bd6a38c515df2f612ec2665e201329f950da80be --- /dev/null +++ b/packages/create-llama/templates/simple/fastapi/app/api/routers/chat.py @@ -0,0 +1,65 @@ +import logging +import os +from typing import List +from fastapi import APIRouter, Depends, HTTPException, status +from llama_index import ( + StorageContext, + load_index_from_storage, + SimpleDirectoryReader, + VectorStoreIndex, +) +from llama_index.llms.base import MessageRole +from pydantic import BaseModel + +STORAGE_DIR = "./storage" # directory to cache the generated index +DATA_DIR = "./data" # directory containing the documents to index + +chat_router = r = APIRouter() + + +class Message(BaseModel): + role: MessageRole + content: str + + +class _ChatData(BaseModel): + messages: List[Message] + + +def get_index(): + logger = logging.getLogger("uvicorn") + # check if storage already exists + if not os.path.exists(STORAGE_DIR): + logger.info("Creating new index") + # load the documents and create the index + documents = SimpleDirectoryReader(DATA_DIR).load_data() + index = VectorStoreIndex.from_documents(documents) + # store it for later + index.storage_context.persist(STORAGE_DIR) + logger.info(f"Finished creating new index. Stored in {STORAGE_DIR}") + else: + # load the existing index + logger.info(f"Loading index from {STORAGE_DIR}...") + storage_context = StorageContext.from_defaults(persist_dir=STORAGE_DIR) + index = load_index_from_storage(storage_context) + logger.info(f"Finished loading index from {STORAGE_DIR}") + return index + + +@r.post("/") +def chat(data: _ChatData, index: VectorStoreIndex = Depends(get_index)) -> Message: + # check preconditions + if len(data.messages) == 0: + raise HTTPException( + status_code=status.HTTP_400_BAD_REQUEST, + detail="No messages provided", + ) + lastMessage = data.messages.pop() + if lastMessage.role != MessageRole.USER: + raise HTTPException( + status_code=status.HTTP_400_BAD_REQUEST, + detail="Last message must be from user", + ) + chat_engine = index.as_chat_engine() + response = chat_engine.chat(lastMessage.content, data.messages) + return Message(role=MessageRole.ASSISTANT, content=response.response) diff --git a/packages/create-llama/templates/simple/fastapi/data/brk-2022.pdf b/packages/create-llama/templates/simple/fastapi/data/brk-2022.pdf new file mode 100644 index 0000000000000000000000000000000000000000..876ea6a37f0fe785dffc74434e805b65a802b91f Binary files /dev/null and b/packages/create-llama/templates/simple/fastapi/data/brk-2022.pdf differ diff --git a/packages/create-llama/templates/simple/fastapi/gitignore b/packages/create-llama/templates/simple/fastapi/gitignore new file mode 100644 index 0000000000000000000000000000000000000000..069fcb4020566da83dbc398c06dce5bbe92aface --- /dev/null +++ b/packages/create-llama/templates/simple/fastapi/gitignore @@ -0,0 +1,2 @@ +__pycache__ +storage diff --git a/packages/create-llama/templates/simple/fastapi/main.py b/packages/create-llama/templates/simple/fastapi/main.py new file mode 100644 index 0000000000000000000000000000000000000000..e307354bc3b935a52d96c0e138a937969df4d4cf --- /dev/null +++ b/packages/create-llama/templates/simple/fastapi/main.py @@ -0,0 +1,23 @@ +import os +import uvicorn +from app.api.routers.chat import chat_router +from fastapi import FastAPI +from fastapi.middleware.cors import CORSMiddleware + +app = FastAPI() + +origin = os.getenv("CORS_ORIGIN") +if origin: + app.add_middleware( + CORSMiddleware, + allow_origins=[origin], + allow_credentials=True, + allow_methods=["*"], + allow_headers=["*"], + ) + +app.include_router(chat_router, prefix="/api/chat") + + +if __name__ == "__main__": + uvicorn.run(app="main:app", host="0.0.0.0", reload=True) diff --git a/packages/create-llama/templates/simple/fastapi/pyproject.toml b/packages/create-llama/templates/simple/fastapi/pyproject.toml new file mode 100644 index 0000000000000000000000000000000000000000..f069bfd947ff71d7004ac84696e32223c7eeffb2 --- /dev/null +++ b/packages/create-llama/templates/simple/fastapi/pyproject.toml @@ -0,0 +1,18 @@ +[tool.poetry] +name = "llamaindex-fastapi" +version = "0.1.0" +description = "" +authors = ["Marcus Schiesser <mail@marcusschiesser.de>"] +readme = "README.md" + +[tool.poetry.dependencies] +python = "^3.11,<3.12" +fastapi = "^0.104.1" +uvicorn = { extras = ["standard"], version = "^0.23.2" } +llama-index = "^0.8.56" +pypdf = "^3.17.0" + + +[build-system] +requires = ["poetry-core"] +build-backend = "poetry.core.masonry.api" diff --git a/packages/create-llama/templates/simple/fastapi/tests/__init__.py b/packages/create-llama/templates/simple/fastapi/tests/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/packages/create-llama/templates/types.ts b/packages/create-llama/templates/types.ts index e76cd7da17718c906b6c58dcd93c1c825d0aba9f..75d4d70eedf0745e9270fa4849c5ca922e22abe9 100644 --- a/packages/create-llama/templates/types.ts +++ b/packages/create-llama/templates/types.ts @@ -1,10 +1,17 @@ import { PackageManager } from "../helpers/get-pkg-manager"; export type TemplateType = "simple" | "streaming"; -export type TemplateFramework = "nextjs" | "express"; +export type TemplateFramework = "nextjs" | "express" | "fastapi"; export type TemplateEngine = "simple" | "context"; export type TemplateUI = "html" | "shadcn"; +export interface InstallPythonTemplateArgs { + appName: string; + root: string; + template: TemplateType; + framework: TemplateFramework; +} + export interface InstallTemplateArgs { appName: string; root: string;