diff --git a/.changeset/popular-dryers-check.md b/.changeset/popular-dryers-check.md new file mode 100644 index 0000000000000000000000000000000000000000..ec91439a6f4c40bc74a1cc574c7bc91b0e271459 --- /dev/null +++ b/.changeset/popular-dryers-check.md @@ -0,0 +1,5 @@ +--- +"create-llama": patch +--- + +Improve DX for Python template (use one deployment instead of two) diff --git a/create-app.ts b/create-app.ts index 144fdc7e207f90b6d5d2bc289d4698eb5470196e..c90c8c5d0319f319e51a4f819742654427818667 100644 --- a/create-app.ts +++ b/create-app.ts @@ -7,7 +7,6 @@ import { getOnline } from "./helpers/is-online"; import { isWriteable } from "./helpers/is-writeable"; import { makeDir } from "./helpers/make-dir"; -import fs from "fs"; import terminalLink from "terminal-link"; import type { InstallTemplateArgs, TemplateObservability } from "./helpers"; import { installTemplate } from "./helpers"; @@ -35,7 +34,7 @@ export async function createApp({ communityProjectConfig, llamapack, vectorDb, - externalPort, + port, postInstallAction, dataSources, tools, @@ -81,7 +80,7 @@ export async function createApp({ communityProjectConfig, llamapack, vectorDb, - externalPort, + port, postInstallAction, dataSources, tools, @@ -90,28 +89,20 @@ export async function createApp({ agents, }; - if (frontend) { - // install backend - const backendRoot = path.join(root, "backend"); - await makeDir(backendRoot); - await installTemplate({ ...args, root: backendRoot, backend: true }); + // Install backend + await installTemplate({ ...args, backend: true }); + + if (frontend && framework === "fastapi") { // install frontend - const frontendRoot = path.join(root, "frontend"); + const frontendRoot = path.join(root, ".frontend"); await makeDir(frontendRoot); await installTemplate({ ...args, root: frontendRoot, framework: "nextjs", - customApiPath: `http://localhost:${externalPort ?? 8000}/api/chat`, + customApiPath: `http://localhost:${port ?? 8000}/api/chat`, backend: false, }); - // copy readme for fullstack - await fs.promises.copyFile( - path.join(templatesDir, "README-fullstack.md"), - path.join(root, "README.md"), - ); - } else { - await installTemplate({ ...args, backend: true }); } await writeDevcontainer(root, templatesDir, framework, frontend); diff --git a/e2e/python/resolve_dependencies.spec.ts b/e2e/python/resolve_dependencies.spec.ts index f1e5ddaf4aa1baf96f794bb5089a6024654a8937..1d7e121994bf3f471fb0ecb84b38712378f7f904 100644 --- a/e2e/python/resolve_dependencies.spec.ts +++ b/e2e/python/resolve_dependencies.spec.ts @@ -63,7 +63,6 @@ if ( vectorDb, tools: "none", port: 3000, - externalPort: 8000, postInstallAction: "none", templateUI: undefined, appType: "--no-frontend", @@ -101,7 +100,6 @@ if ( vectorDb: "none", tools: tool, port: 3000, - externalPort: 8000, postInstallAction: "none", templateUI: undefined, appType: "--no-frontend", @@ -135,7 +133,6 @@ if ( vectorDb: "none", tools: "none", port: 3000, - externalPort: 8000, postInstallAction: "none", templateUI: undefined, appType: "--no-frontend", @@ -169,7 +166,6 @@ if ( vectorDb: "none", tools: "none", port: 3000, - externalPort: 8000, postInstallAction: "none", templateUI: undefined, appType: "--no-frontend", diff --git a/e2e/shared/extractor_template.spec.ts b/e2e/shared/extractor_template.spec.ts index 643fc6dde863e58998adf62c3fad2bf76f127b04..698d80527be6a67f54f80f819ea573d35358b127 100644 --- a/e2e/shared/extractor_template.spec.ts +++ b/e2e/shared/extractor_template.spec.ts @@ -20,8 +20,7 @@ if ( dataSource === "--example-file" ) { test.describe("Test extractor template", async () => { - let frontendPort: number; - let backendPort: number; + let appPort: number; let name: string; let appProcess: ChildProcess; let cwd: string; @@ -29,16 +28,14 @@ if ( // Create extractor app test.beforeAll(async () => { cwd = await createTestDir(); - frontendPort = Math.floor(Math.random() * 10000) + 10000; - backendPort = frontendPort + 1; + appPort = Math.floor(Math.random() * 10000) + 10000; const result = await runCreateLlama({ cwd, templateType: "extractor", templateFramework: "fastapi", dataSource: "--example-file", vectorDb: "none", - port: frontendPort, - externalPort: backendPort, + port: appPort, postInstallAction: "runApp", }); name = result.projectName; @@ -54,7 +51,7 @@ if ( expect(dirExists).toBeTruthy(); }); test("Frontend should have a title", async ({ page }) => { - await page.goto(`http://localhost:${frontendPort}`); + await page.goto(`http://localhost:${appPort}`); await expect(page.getByText("Built by LlamaIndex")).toBeVisible({ timeout: 2000 * 60, }); diff --git a/e2e/shared/multiagent_template.spec.ts b/e2e/shared/multiagent_template.spec.ts index 52721cf5c874ae49741adaed94f4ca0ee7dec1ff..ccb33539b2ebfd9a5bae841d799f9b2c353dcb7d 100644 --- a/e2e/shared/multiagent_template.spec.ts +++ b/e2e/shared/multiagent_template.spec.ts @@ -16,7 +16,7 @@ const templateFramework: TemplateFramework = process.env.FRAMEWORK const dataSource: string = "--example-file"; const templateUI: TemplateUI = "shadcn"; const templatePostInstallAction: TemplatePostInstallAction = "runApp"; -const appType: AppType = templateFramework === "nextjs" ? "" : "--frontend"; +const appType: AppType = templateFramework === "fastapi" ? "--frontend" : ""; const userMessage = "Write a blog post about physical standards for letters"; const templateAgents = ["financial_report", "blog", "form_filling"]; @@ -27,7 +27,6 @@ for (const agents of templateAgents) { "The multiagent template currently only works with files. We also only run on Linux to speed up tests.", ); let port: number; - let externalPort: number; let cwd: string; let name: string; let appProcess: ChildProcess; @@ -36,7 +35,6 @@ for (const agents of templateAgents) { test.beforeAll(async () => { port = Math.floor(Math.random() * 10000) + 10000; - externalPort = port + 1; cwd = await createTestDir(); const result = await runCreateLlama({ cwd, @@ -45,7 +43,6 @@ for (const agents of templateAgents) { dataSource, vectorDb, port, - externalPort, postInstallAction: templatePostInstallAction, templateUI, appType, @@ -61,6 +58,10 @@ for (const agents of templateAgents) { }); test("Frontend should have a title", async ({ page }) => { + test.skip( + templatePostInstallAction !== "runApp" || + templateFramework === "express", + ); await page.goto(`http://localhost:${port}`); await expect(page.getByText("Built by LlamaIndex")).toBeVisible(); }); @@ -69,7 +70,10 @@ for (const agents of templateAgents) { page, }) => { test.skip( - agents === "financial_report" || agents === "form_filling", + templatePostInstallAction !== "runApp" || + agents === "financial_report" || + agents === "form_filling" || + templateFramework === "express", "Skip chat tests for financial report and form filling.", ); await page.goto(`http://localhost:${port}`); diff --git a/e2e/shared/streaming_template.spec.ts b/e2e/shared/streaming_template.spec.ts index b34d4fedeeff7c3c328c83d1e012a9196953cae1..f961a2f99946cdf76ea8bbcdd7c460d89a18d011 100644 --- a/e2e/shared/streaming_template.spec.ts +++ b/e2e/shared/streaming_template.spec.ts @@ -22,7 +22,7 @@ const templatePostInstallAction: TemplatePostInstallAction = "runApp"; const llamaCloudProjectName = "create-llama"; const llamaCloudIndexName = "e2e-test"; -const appType: AppType = templateFramework === "nextjs" ? "" : "--frontend"; +const appType: AppType = templateFramework === "fastapi" ? "--frontend" : ""; const userMessage = dataSource !== "--no-files" ? "Physical standard for letters" : "Hello"; @@ -35,7 +35,6 @@ test.describe(`Test streaming template ${templateFramework} ${dataSource} ${temp } let port: number; - let externalPort: number; let cwd: string; let name: string; let appProcess: ChildProcess; @@ -44,7 +43,6 @@ test.describe(`Test streaming template ${templateFramework} ${dataSource} ${temp test.beforeAll(async () => { port = Math.floor(Math.random() * 10000) + 10000; - externalPort = port + 1; cwd = await createTestDir(); const result = await runCreateLlama({ cwd, @@ -53,7 +51,6 @@ test.describe(`Test streaming template ${templateFramework} ${dataSource} ${temp dataSource, vectorDb, port, - externalPort, postInstallAction: templatePostInstallAction, templateUI, appType, @@ -68,8 +65,11 @@ test.describe(`Test streaming template ${templateFramework} ${dataSource} ${temp const dirExists = fs.existsSync(path.join(cwd, name)); expect(dirExists).toBeTruthy(); }); + test("Frontend should have a title", async ({ page }) => { - test.skip(templatePostInstallAction !== "runApp"); + test.skip( + templatePostInstallAction !== "runApp" || templateFramework === "express", + ); await page.goto(`http://localhost:${port}`); await expect(page.getByText("Built by LlamaIndex")).toBeVisible(); }); @@ -77,7 +77,9 @@ test.describe(`Test streaming template ${templateFramework} ${dataSource} ${temp test("Frontend should be able to submit a message and receive a response", async ({ page, }) => { - test.skip(templatePostInstallAction !== "runApp"); + test.skip( + templatePostInstallAction !== "runApp" || templateFramework === "express", + ); await page.goto(`http://localhost:${port}`); await page.fill("form textarea", userMessage); const [response] = await Promise.all([ @@ -102,7 +104,7 @@ test.describe(`Test streaming template ${templateFramework} ${dataSource} ${temp test.skip(templatePostInstallAction !== "runApp"); test.skip(templateFramework === "nextjs"); const response = await request.post( - `http://localhost:${externalPort}/api/chat/request`, + `http://localhost:${port}/api/chat/request`, { data: { messages: [ diff --git a/e2e/typescript/resolve_dependencies.spec.ts b/e2e/typescript/resolve_dependencies.spec.ts index 9ae8aa7a1a673f2b424367951f13be17d5dd93b3..c8223be605bfacbf380835b644dc1ceabfbf9ffe 100644 --- a/e2e/typescript/resolve_dependencies.spec.ts +++ b/e2e/typescript/resolve_dependencies.spec.ts @@ -56,7 +56,6 @@ test.describe("Test resolve TS dependencies", () => { dataSource: dataSource, vectorDb: vectorDb, port: 3000, - externalPort: 8000, postInstallAction: "none", templateUI: undefined, appType: templateFramework === "nextjs" ? "" : "--no-frontend", diff --git a/e2e/utils.ts b/e2e/utils.ts index 799daf49ff05688f8ad23d371811ccd8860a2717..4d1dd6283de09b781bcde5ed9b99283cc405a44b 100644 --- a/e2e/utils.ts +++ b/e2e/utils.ts @@ -25,7 +25,6 @@ export type RunCreateLlamaOptions = { dataSource: string; vectorDb: TemplateVectorDB; port: number; - externalPort: number; postInstallAction: TemplatePostInstallAction; templateUI?: TemplateUI; appType?: AppType; @@ -44,7 +43,6 @@ export async function runCreateLlama({ dataSource, vectorDb, port, - externalPort, postInstallAction, templateUI, appType, @@ -93,8 +91,6 @@ export async function runCreateLlama({ "--use-pnpm", "--port", port, - "--external-port", - externalPort, "--post-install-action", postInstallAction, "--tools", @@ -142,12 +138,7 @@ export async function runCreateLlama({ // Wait for app to start if (postInstallAction === "runApp") { - await checkAppHasStarted( - appType === "--frontend", - templateFramework, - port, - externalPort, - ); + await waitPorts([port]); } else if (postInstallAction === "dependencies") { await waitForProcess(appProcess, 1000 * 60); // wait 1 min for dependencies to be resolved } else { @@ -167,19 +158,6 @@ export async function createTestDir() { return cwd; } -// eslint-disable-next-line max-params -async function checkAppHasStarted( - frontend: boolean, - framework: TemplateFramework, - port: number, - externalPort: number, -) { - const portsToWait = frontend - ? [port, externalPort] - : [framework === "nextjs" ? port : externalPort]; - await waitPorts(portsToWait); -} - async function waitPorts(ports: number[]): Promise<void> { const waitForPort = async (port: number): Promise<void> => { await waitPort({ diff --git a/helpers/devcontainer.ts b/helpers/devcontainer.ts index 4157411989316246dbc3772df0ecdd35f5ac5f17..153add212e482898aafb0c9e3d5f230e467abdaa 100644 --- a/helpers/devcontainer.ts +++ b/helpers/devcontainer.ts @@ -5,36 +5,21 @@ import { TemplateFramework } from "./types"; function renderDevcontainerContent( templatesDir: string, framework: TemplateFramework, - frontend: boolean, ) { const devcontainerJson: any = JSON.parse( fs.readFileSync(path.join(templatesDir, "devcontainer.json"), "utf8"), ); // Modify postCreateCommand - if (frontend) { - devcontainerJson.postCreateCommand = - framework === "fastapi" - ? "cd backend && poetry install && cd ../frontend && npm install" - : "cd backend && npm install && cd ../frontend && npm install"; - } else { - devcontainerJson.postCreateCommand = - framework === "fastapi" ? "poetry install" : "npm install"; - } + devcontainerJson.postCreateCommand = + framework === "fastapi" ? "poetry install" : "npm install"; // Modify containerEnv if (framework === "fastapi") { - if (frontend) { - devcontainerJson.containerEnv = { - ...devcontainerJson.containerEnv, - PYTHONPATH: "${PYTHONPATH}:${workspaceFolder}/backend", - }; - } else { - devcontainerJson.containerEnv = { - ...devcontainerJson.containerEnv, - PYTHONPATH: "${PYTHONPATH}:${workspaceFolder}", - }; - } + devcontainerJson.containerEnv = { + ...devcontainerJson.containerEnv, + PYTHONPATH: "${PYTHONPATH}:${workspaceFolder}", + }; } return JSON.stringify(devcontainerJson, null, 2); @@ -54,7 +39,6 @@ export const writeDevcontainer = async ( const devcontainerContent = renderDevcontainerContent( templatesDir, framework, - frontend, ); fs.mkdirSync(devcontainerDir); await fs.promises.writeFile( diff --git a/helpers/env-variables.ts b/helpers/env-variables.ts index 4a554ff047c8a7dd7dbac9e4dac35a75dea57771..ddac770d384eaad8d1ac2067775b91212cc360cd 100644 --- a/helpers/env-variables.ts +++ b/helpers/env-variables.ts @@ -553,7 +553,7 @@ export const createBackendEnvFile = async ( | "framework" | "dataSources" | "template" - | "externalPort" + | "port" | "tools" | "observability" >, @@ -570,7 +570,7 @@ export const createBackendEnvFile = async ( ...getModelEnvs(opts.modelConfig), ...getEngineEnvs(), ...getVectorDBEnvs(opts.vectorDb, opts.framework), - ...getFrameworkEnvs(opts.framework, opts.externalPort), + ...getFrameworkEnvs(opts.framework, opts.port), ...getToolEnvs(opts.tools), ...getTemplateEnvs(opts.template), ...getObservabilityEnvs(opts.observability), diff --git a/helpers/run-app.ts b/helpers/run-app.ts index 2ec4e762f729706c58f3b8eaff2f7f341b9f85bc..991a9790d93e995643a834f58a91f26a624d7cf3 100644 --- a/helpers/run-app.ts +++ b/helpers/run-app.ts @@ -1,40 +1,39 @@ -import { ChildProcess, SpawnOptions, spawn } from "child_process"; -import path from "path"; +import { SpawnOptions, spawn } from "child_process"; import { TemplateFramework } from "./types"; const createProcess = ( command: string, args: string[], options: SpawnOptions, -) => { - return spawn(command, args, { - ...options, - shell: true, - }) - .on("exit", function (code) { - if (code !== 0) { - console.log(`Child process exited with code=${code}`); - process.exit(1); - } +): Promise<void> => { + return new Promise((resolve, reject) => { + spawn(command, args, { + ...options, + shell: true, }) - .on("error", function (err) { - console.log("Error when running chill process: ", err); - process.exit(1); - }); + .on("exit", function (code) { + if (code !== 0) { + console.log(`Child process exited with code=${code}`); + reject(code); + } else { + resolve(); + } + }) + .on("error", function (err) { + console.log("Error when running child process: ", err); + reject(err); + }); + }); }; -export function runReflexApp( - appPath: string, - frontendPort?: number, - backendPort?: number, -) { - const commandArgs = ["run", "reflex", "run"]; - if (frontendPort) { - commandArgs.push("--frontend-port", frontendPort.toString()); - } - if (backendPort) { - commandArgs.push("--backend-port", backendPort.toString()); - } +export function runReflexApp(appPath: string, port: number) { + const commandArgs = [ + "run", + "reflex", + "run", + "--frontend-port", + port.toString(), + ]; return createProcess("poetry", commandArgs, { stdio: "inherit", cwd: appPath, @@ -42,11 +41,10 @@ export function runReflexApp( } export function runFastAPIApp(appPath: string, port: number) { - const commandArgs = ["run", "uvicorn", "main:app", "--port=" + port]; - - return createProcess("poetry", commandArgs, { + return createProcess("poetry", ["run", "dev"], { stdio: "inherit", cwd: appPath, + env: { ...process.env, APP_PORT: `${port}` }, }); } @@ -61,39 +59,23 @@ export function runTSApp(appPath: string, port: number) { export async function runApp( appPath: string, template: string, - frontend: boolean, framework: TemplateFramework, port?: number, - externalPort?: number, -): Promise<any> { - const processes: ChildProcess[] = []; +): Promise<void> { + try { + // Start the app + const defaultPort = + framework === "nextjs" || template === "extractor" ? 3000 : 8000; - // Callback to kill all sub processes if the main process is killed - process.on("exit", () => { - console.log("Killing app processes..."); - processes.forEach((p) => p.kill()); - }); - - // Default sub app paths - const backendPath = path.join(appPath, "backend"); - const frontendPath = path.join(appPath, "frontend"); - - if (template === "extractor") { - processes.push(runReflexApp(appPath, port, externalPort)); + const appRunner = + template === "extractor" + ? runReflexApp + : framework === "fastapi" + ? runFastAPIApp + : runTSApp; + await appRunner(appPath, port || defaultPort); + } catch (error) { + console.error("Failed to run app:", error); + throw error; } - if (template === "streaming" || template === "multiagent") { - if (framework === "fastapi" || framework === "express") { - const backendRunner = framework === "fastapi" ? runFastAPIApp : runTSApp; - if (frontend) { - processes.push(backendRunner(backendPath, externalPort || 8000)); - processes.push(runTSApp(frontendPath, port || 3000)); - } else { - processes.push(backendRunner(appPath, externalPort || 8000)); - } - } else if (framework === "nextjs") { - processes.push(runTSApp(appPath, port || 3000)); - } - } - - return Promise.all(processes); } diff --git a/helpers/types.ts b/helpers/types.ts index bcaf5b062af5bf8731f6a0e02c8747c4b1235972..53e1cdbabc919ea63975edd445b7abaaa43c0934 100644 --- a/helpers/types.ts +++ b/helpers/types.ts @@ -96,7 +96,7 @@ export interface InstallTemplateArgs { communityProjectConfig?: CommunityProjectConfig; llamapack?: string; vectorDb?: TemplateVectorDB; - externalPort?: number; + port?: number; postInstallAction?: TemplatePostInstallAction; tools?: Tool[]; observability?: TemplateObservability; diff --git a/helpers/typescript.ts b/helpers/typescript.ts index 7d08ed44781982c06034123e12b99669cd8e3674..d0064bfd23c364c6ecd1cb37c3a75ee609459322 100644 --- a/helpers/typescript.ts +++ b/helpers/typescript.ts @@ -241,7 +241,10 @@ export const installTSTemplate = async ({ vectorDb, }); - if (postInstallAction === "runApp" || postInstallAction === "dependencies") { + if ( + backend && + (postInstallAction === "runApp" || postInstallAction === "dependencies") + ) { await installTSDependencies(packageJson, packageManager, isOnline); } diff --git a/index.ts b/index.ts index de7f5b6492b8d3ee39dae666e8ad760fc9db8a08..8b87b1199afcc20e60088696a5cac379de90347e 100644 --- a/index.ts +++ b/index.ts @@ -134,13 +134,6 @@ const program = new Command(packageJson.name) ` Select UI port. -`, - ) - .option( - "--external-port <external>", - ` - - Select external port. `, ) .option( @@ -333,7 +326,7 @@ async function run(): Promise<void> { ...answers, appPath: resolvedProjectPath, packageManager, - externalPort: options.externalPort, + port: options.port, }); if (answers.postInstallAction === "VSCode") { @@ -362,14 +355,7 @@ Please check ${cyan( } } else if (answers.postInstallAction === "runApp") { console.log(`Running app in ${root}...`); - await runApp( - root, - answers.template, - answers.frontend, - answers.framework, - options.port, - options.externalPort, - ); + await runApp(root, answers.template, answers.framework, options.port); } } diff --git a/questions/questions.ts b/questions/questions.ts index c152362f8350b35c0fa10260c605d5e483489055..2427b70a697c7975eea43d878c20aa4ec6b58249 100644 --- a/questions/questions.ts +++ b/questions/questions.ts @@ -1,4 +1,4 @@ -import { blue, green } from "picocolors"; +import { blue } from "picocolors"; import prompts from "prompts"; import { isCI } from "."; import { COMMUNITY_OWNER, COMMUNITY_REPO } from "../helpers/constant"; @@ -123,24 +123,17 @@ export const askProQuestions = async (program: QuestionArgs) => { } if ( - (program.framework === "express" || program.framework === "fastapi") && + program.framework === "fastapi" && (program.template === "streaming" || program.template === "multiagent") ) { // if a backend-only framework is selected, ask whether we should create a frontend if (program.frontend === undefined) { const styledNextJS = blue("NextJS"); - const styledBackend = green( - program.framework === "express" - ? "Express " - : program.framework === "fastapi" - ? "FastAPI (Python) " - : "", - ); const { frontend } = await prompts({ onState: onPromptState, type: "toggle", name: "frontend", - message: `Would you like to generate a ${styledNextJS} frontend for your ${styledBackend}backend?`, + message: `Would you like to generate a ${styledNextJS} frontend for your FastAPI backend?`, initial: false, active: "Yes", inactive: "No", diff --git a/questions/types.ts b/questions/types.ts index 1ea45c186521734439b55fefc751e5f8d0f6dd92..4d0acd890c181bd1e710c9818072ac8adc80b66f 100644 --- a/questions/types.ts +++ b/questions/types.ts @@ -2,7 +2,7 @@ import { InstallAppArgs } from "../create-app"; export type QuestionResults = Omit< InstallAppArgs, - "appPath" | "packageManager" | "externalPort" + "appPath" | "packageManager" >; export type PureQuestionArgs = { diff --git a/templates/.gitignore b/templates/.gitignore deleted file mode 100644 index ec6c67b630467343abb46cfeea0535ce4b339554..0000000000000000000000000000000000000000 --- a/templates/.gitignore +++ /dev/null @@ -1,3 +0,0 @@ -__pycache__ -poetry.lock -storage diff --git a/templates/README-fullstack.md b/templates/README-fullstack.md deleted file mode 100644 index 5a41b8cfc370f4fe99269331065bdee8b6aa8e8c..0000000000000000000000000000000000000000 --- a/templates/README-fullstack.md +++ /dev/null @@ -1,18 +0,0 @@ -This is a [LlamaIndex](https://www.llamaindex.ai/) project bootstrapped with [`create-llama`](https://github.com/run-llama/LlamaIndexTS/tree/main/packages/create-llama). - -## Getting Started - -First, startup the backend as described in the [backend README](./backend/README.md). - -Second, run the development server of the frontend as described in the [frontend README](./frontend/README.md). - -Open [http://localhost:3000](http://localhost:3000) with your browser to see the result. - -## Learn More - -To learn more about LlamaIndex, take a look at the following resources: - -- [LlamaIndex Documentation](https://docs.llamaindex.ai) - learn about LlamaIndex (Python features). -- [LlamaIndexTS Documentation](https://ts.llamaindex.ai) - learn about LlamaIndex (Typescript features). - -You can check out [the LlamaIndexTS GitHub repository](https://github.com/run-llama/LlamaIndexTS) - your feedback and contributions are welcome! diff --git a/templates/components/agents/python/blog/README-template.md b/templates/components/agents/python/blog/README-template.md index 5d17a5ac883a01cd3d8d417e67eb3331596cbf9e..b4e04a4bac96c1c6010420e59843b76498663fa4 100644 --- a/templates/components/agents/python/blog/README-template.md +++ b/templates/components/agents/python/blog/README-template.md @@ -32,7 +32,7 @@ poetry run generate Third, run the development server: ```shell -poetry run python main.py +poetry run dev ``` Per default, the example is using the explicit workflow. You can change the example by setting the `EXAMPLE_TYPE` environment variable to `choreography` or `orchestrator`. @@ -47,12 +47,12 @@ curl --location 'localhost:8000/api/chat' \ You can start editing the API by modifying `app/api/routers/chat.py` or `app/examples/workflow.py`. The API auto-updates as you save the files. -Open [http://localhost:8000/docs](http://localhost:8000/docs) with your browser to see the Swagger UI of the API. +Open [http://localhost:8000](http://localhost:8000) with your browser to start the app. -The API allows CORS for all origins to simplify development. You can change this behavior by setting the `ENVIRONMENT` environment variable to `prod`: +To start the app in **production**, run: ``` -ENVIRONMENT=prod poetry run python main.py +poetry run prod ``` ## Learn More diff --git a/templates/components/agents/python/financial_report/README-template.md b/templates/components/agents/python/financial_report/README-template.md index 0f3beb238e9c2c4fa8a57565fd951aa1f2270874..7b8122eb9c448006066b3c790707a7ceed350f81 100644 --- a/templates/components/agents/python/financial_report/README-template.md +++ b/templates/components/agents/python/financial_report/README-template.md @@ -21,7 +21,7 @@ poetry run generate Third, run the development server: ```shell -poetry run python main.py +poetry run dev ``` The example provides one streaming API endpoint `/api/chat`. @@ -35,12 +35,12 @@ curl --location 'localhost:8000/api/chat' \ You can start editing the API by modifying `app/api/routers/chat.py` or `app/workflows/financial_report.py`. The API auto-updates as you save the files. -Open [http://localhost:8000/docs](http://localhost:8000/docs) with your browser to see the Swagger UI of the API. +Open [http://localhost:8000](http://localhost:8000) with your browser to start the app. -The API allows CORS for all origins to simplify development. You can change this behavior by setting the `ENVIRONMENT` environment variable to `prod`: +To start the app in **production**, run: ``` -ENVIRONMENT=prod poetry run python main.py +poetry run prod ``` ## Learn More diff --git a/templates/components/agents/python/form_filling/README-template.md b/templates/components/agents/python/form_filling/README-template.md index be6ec38e39f2cd3632f78ca2fa4627ce4ab39e1b..6c6717aaccc4bb7cb70a9199a864c9040f5e9509 100644 --- a/templates/components/agents/python/form_filling/README-template.md +++ b/templates/components/agents/python/form_filling/README-template.md @@ -16,7 +16,7 @@ Make sure you have the `OPENAI_API_KEY` set. Second, run the development server: ```shell -poetry run python main.py +poetry run dev ``` ## Use Case: Filling Financial CSV Template @@ -41,12 +41,12 @@ curl --location 'localhost:8000/api/chat' \ You can start editing the API by modifying `app/api/routers/chat.py` or `app/workflows/form_filling.py`. The API auto-updates as you save the files. -Open [http://localhost:8000/docs](http://localhost:8000/docs) with your browser to see the Swagger UI of the API. +Open [http://localhost:8000](http://localhost:8000) with your browser to start the app. -The API allows CORS for all origins to simplify development. You can change this behavior by setting the `ENVIRONMENT` environment variable to `prod`: +To start the app in **production**, run: ``` -ENVIRONMENT=prod poetry run python main.py +poetry run prod ``` ## Learn More diff --git a/templates/types/streaming/fastapi/README-template.md b/templates/types/streaming/fastapi/README-template.md index 7969ff0ea8dc968d023893bbd8b63b99fbce6a8a..c04ef0b3739483a46b6f84d2c7cfd57380cf47b5 100644 --- a/templates/types/streaming/fastapi/README-template.md +++ b/templates/types/streaming/fastapi/README-template.md @@ -21,12 +21,14 @@ Second, generate the embeddings of the documents in the `./data` directory (if t poetry run generate ``` -Third, run the development server: +Third, run the app: ``` -python main.py +poetry run dev ``` +Open [http://localhost:8000](http://localhost:8000) with your browser to start the app. + The example provides two different API endpoints: 1. `/api/chat` - a streaming chat endpoint @@ -50,12 +52,10 @@ curl --location 'localhost:8000/api/chat/request' \ You can start editing the API endpoints by modifying `app/api/routers/chat.py`. The endpoints auto-update as you save the file. You can delete the endpoint you're not using. -Open [http://localhost:8000/docs](http://localhost:8000/docs) with your browser to see the Swagger UI of the API. - -The API allows CORS for all origins to simplify development. You can change this behavior by setting the `ENVIRONMENT` environment variable to `prod`: +To start the app in **production**, run: ``` -ENVIRONMENT=prod python main.py +poetry run prod ``` ## Using Docker diff --git a/templates/types/streaming/fastapi/app/config.py b/templates/types/streaming/fastapi/app/config.py index 29fa8d9a28fa2fc5ae9502639c1452cf8ae15e4b..31daa5117e7230f86ffe14778e7068314fde8667 100644 --- a/templates/types/streaming/fastapi/app/config.py +++ b/templates/types/streaming/fastapi/app/config.py @@ -1 +1,4 @@ +import os + DATA_DIR = "data" +STATIC_DIR = os.getenv("STATIC_DIR", "static") diff --git a/templates/types/streaming/fastapi/app/middlewares/frontend.py b/templates/types/streaming/fastapi/app/middlewares/frontend.py new file mode 100644 index 0000000000000000000000000000000000000000..83321c97ceff684095ff88d73c7acc53efa9b70c --- /dev/null +++ b/templates/types/streaming/fastapi/app/middlewares/frontend.py @@ -0,0 +1,78 @@ +import logging +from typing import Set + +import httpx +from fastapi import Request +from fastapi.responses import StreamingResponse + +logger = logging.getLogger("uvicorn") + + +class FrontendProxyMiddleware: + """ + Proxy requests to the frontend development server + """ + + def __init__( + self, + app, + frontend_endpoint: str, + excluded_paths: Set[str], + ): + self.app = app + self.excluded_paths = excluded_paths + self.frontend_endpoint = frontend_endpoint + + async def _request_frontend( + self, + request: Request, + path: str, + timeout: float = 60.0, + ): + async with httpx.AsyncClient(timeout=timeout) as client: + url = f"{self.frontend_endpoint}/{path}" + if request.query_params: + url = f"{url}?{request.query_params}" + + headers = dict(request.headers) + try: + body = await request.body() if request.method != "GET" else None + + response = await client.request( + method=request.method, + url=url, + headers=headers, + content=body, + follow_redirects=True, + ) + + response_headers = dict(response.headers) + response_headers.pop("content-encoding", None) + response_headers.pop("content-length", None) + + return StreamingResponse( + response.iter_bytes(), + status_code=response.status_code, + headers=response_headers, + ) + except Exception as e: + logger.error(f"Proxy error: {str(e)}") + raise + + def _is_excluded_path(self, path: str) -> bool: + return any( + path.startswith(excluded_path) for excluded_path in self.excluded_paths + ) + + async def __call__(self, scope, receive, send): + if scope["type"] != "http": + return await self.app(scope, receive, send) + + request = Request(scope, receive) + path = request.url.path + + if self._is_excluded_path(path): + return await self.app(scope, receive, send) + + response = await self._request_frontend(request, path.lstrip("/")) + return await response(scope, receive, send) diff --git a/templates/types/streaming/fastapi/gitignore b/templates/types/streaming/fastapi/gitignore index ae22d348e213803cd983db0c3fbcab39e87f0608..dec47a30058f80d56a99f2f3c360318f4da864ad 100644 --- a/templates/types/streaming/fastapi/gitignore +++ b/templates/types/streaming/fastapi/gitignore @@ -2,3 +2,4 @@ __pycache__ storage .env output +static/ diff --git a/templates/types/streaming/fastapi/main.py b/templates/types/streaming/fastapi/main.py index cf1a4e8c0eeb8ab2fbdd2ed51536b8ac1cc9770b..9796e0f0a1173da11ef69a33d2389598fe56e7f4 100644 --- a/templates/types/streaming/fastapi/main.py +++ b/templates/types/streaming/fastapi/main.py @@ -1,5 +1,5 @@ # flake8: noqa: E402 -from app.config import DATA_DIR +from app.config import DATA_DIR, STATIC_DIR from dotenv import load_dotenv load_dotenv() @@ -9,10 +9,10 @@ import os import uvicorn from app.api.routers import api_router +from app.middlewares.frontend import FrontendProxyMiddleware from app.observability import init_observability from app.settings import init_settings from fastapi import FastAPI -from fastapi.middleware.cors import CORSMiddleware from fastapi.responses import RedirectResponse from fastapi.staticfiles import StaticFiles @@ -24,38 +24,43 @@ init_observability() environment = os.getenv("ENVIRONMENT", "dev") # Default to 'development' if not set logger = logging.getLogger("uvicorn") -if environment == "dev": - logger.warning("Running in development mode - allowing CORS for all origins") - app.add_middleware( - CORSMiddleware, - allow_origins=["*"], - allow_credentials=True, - allow_methods=["*"], - allow_headers=["*"], - ) - - # Redirect to documentation page when accessing base URL - @app.get("/") - async def redirect_to_docs(): - return RedirectResponse(url="/docs") - -def mount_static_files(directory, path): +def mount_static_files(directory, path, html=False): if os.path.exists(directory): logger.info(f"Mounting static files '{directory}' at '{path}'") app.mount( path, - StaticFiles(directory=directory, check_dir=False), + StaticFiles(directory=directory, check_dir=False, html=html), name=f"{directory}-static", ) +app.include_router(api_router, prefix="/api") + # Mount the data files to serve the file viewer mount_static_files(DATA_DIR, "/api/files/data") # Mount the output files from tools mount_static_files("output", "/api/files/output") -app.include_router(api_router, prefix="/api") +if environment == "dev": + frontend_endpoint = os.getenv("FRONTEND_ENDPOINT") + if frontend_endpoint: + app.add_middleware( + FrontendProxyMiddleware, + frontend_endpoint=frontend_endpoint, + excluded_paths=set( + route.path for route in app.routes if hasattr(route, "path") + ), + ) + else: + logger.warning("No frontend endpoint - starting API server only") + + @app.get("/") + async def redirect_to_docs(): + return RedirectResponse(url="/docs") +else: + # Mount the frontend static files (production) + mount_static_files(STATIC_DIR, "/", html=True) if __name__ == "__main__": app_host = os.getenv("APP_HOST", "0.0.0.0") diff --git a/templates/types/streaming/fastapi/pyproject.toml b/templates/types/streaming/fastapi/pyproject.toml index ed5433584e4df4e396b89978c85a0812960a470b..342d191cc8193b61d9025ab3de2c3f33b8ec1f98 100644 --- a/templates/types/streaming/fastapi/pyproject.toml +++ b/templates/types/streaming/fastapi/pyproject.toml @@ -7,6 +7,9 @@ readme = "README.md" [tool.poetry.scripts] generate = "app.engine.generate:generate_datasource" +dev = "run:dev" # Starts the app in dev mode +prod = "run:prod" # Starts the app in prod mode +build = "run:build" # Builds the frontend assets and copies them to the static directory [tool.poetry.dependencies] python = ">=3.11,<3.14" @@ -16,6 +19,7 @@ python-dotenv = "^1.0.0" aiostream = "^0.5.2" cachetools = "^5.3.3" llama-index = "^0.11.17" +rich = "^13.9.4" [tool.poetry.group.dev.dependencies] mypy = "^1.8.0" diff --git a/templates/types/streaming/fastapi/run.py b/templates/types/streaming/fastapi/run.py new file mode 100644 index 0000000000000000000000000000000000000000..8286c373c73b938f052fe7efe71b3a98d9e736dd --- /dev/null +++ b/templates/types/streaming/fastapi/run.py @@ -0,0 +1,275 @@ +import asyncio +import os +import shutil +import socket +from asyncio.subprocess import Process +from pathlib import Path +from shutil import which +from subprocess import CalledProcessError, run + +import dotenv +import rich + +dotenv.load_dotenv() + + +FRONTEND_DIR = Path(os.getenv("FRONTEND_DIR", ".frontend")) +DEFAULT_FRONTEND_PORT = 3000 +STATIC_DIR = Path(os.getenv("STATIC_DIR", "static")) + + +def build(): + """ + Build the frontend and copy the static files to the backend. + + Raises: + SystemError: If any build step fails + """ + static_dir = Path("static") + + try: + package_manager = _get_node_package_manager() + _install_frontend_dependencies() + + rich.print("\n[bold]Building the frontend[/bold]") + run([package_manager, "run", "build"], cwd=FRONTEND_DIR, check=True) + + if static_dir.exists(): + shutil.rmtree(static_dir) + static_dir.mkdir(exist_ok=True) + + shutil.copytree(FRONTEND_DIR / "out", static_dir, dirs_exist_ok=True) + + rich.print( + "\n[bold]Built frontend successfully![/bold]" + "\n[bold]Run: 'poetry run prod' to start the app[/bold]" + "\n[bold]Don't forget to update the .env file![/bold]" + ) + except CalledProcessError as e: + raise SystemError(f"Build failed during {e.cmd}") from e + except Exception as e: + raise SystemError(f"Build failed: {str(e)}") from e + + +def dev(): + asyncio.run(start_development_servers()) + + +def prod(): + asyncio.run(start_production_server()) + + +async def start_development_servers(): + """ + Start both frontend and backend development servers. + Frontend runs with hot reloading, backend runs FastAPI server. + + Raises: + SystemError: If either server fails to start + """ + rich.print("\n[bold]Starting development servers[/bold]") + + try: + processes = [] + if _is_frontend_included(): + frontend_process, frontend_port = await _run_frontend() + processes.append(frontend_process) + backend_process = await _run_backend( + envs={ + "ENVIRONMENT": "dev", + "FRONTEND_ENDPOINT": f"http://localhost:{frontend_port}", + }, + ) + processes.append(backend_process) + else: + backend_process = await _run_backend( + envs={"ENVIRONMENT": "dev"}, + ) + processes.append(backend_process) + + try: + # Wait for processes to complete + await asyncio.gather(*[process.wait() for process in processes]) + except (asyncio.CancelledError, KeyboardInterrupt): + rich.print("\n[bold yellow]Shutting down...[/bold yellow]") + finally: + # Terminate both processes + for process in processes: + process.terminate() + try: + await asyncio.wait_for(process.wait(), timeout=5) + except asyncio.TimeoutError: + process.kill() + + except Exception as e: + raise SystemError(f"Failed to start development servers: {str(e)}") from e + + +async def start_production_server(): + if _is_frontend_included(): + is_frontend_built = (FRONTEND_DIR / "out" / "index.html").exists() + is_frontend_static_dir_exists = STATIC_DIR.exists() + if not is_frontend_built or not is_frontend_static_dir_exists: + build() + + try: + process = await _run_backend( + envs={"ENVIRONMENT": "prod"}, + ) + await process.wait() + except Exception as e: + raise SystemError(f"Failed to start production server: {str(e)}") from e + finally: + process.terminate() + try: + await asyncio.wait_for(process.wait(), timeout=5) + except asyncio.TimeoutError: + process.kill() + + +async def _run_frontend( + port: int = DEFAULT_FRONTEND_PORT, + timeout: int = 5, +) -> tuple[Process, int]: + """ + Start the frontend development server and return its process and port. + + Returns: + tuple[Process, int]: The frontend process and the port it's running on + """ + # Install dependencies + _install_frontend_dependencies() + + port = _find_free_port(start_port=DEFAULT_FRONTEND_PORT) + package_manager = _get_node_package_manager() + frontend_process = await asyncio.create_subprocess_exec( + package_manager, + "run", + "dev", + "-p", + str(port), + cwd=FRONTEND_DIR, + ) + rich.print( + f"\n[bold]Waiting for frontend to start, port: {port}, process id: {frontend_process.pid}[/bold]" + ) + # Block until the frontend is accessible + for _ in range(timeout): + await asyncio.sleep(1) + # Check if the frontend is accessible (port is open) or frontend_process is running + if frontend_process.returncode is not None: + raise RuntimeError("Could not start frontend dev server") + if not _is_bindable_port(port): + rich.print( + f"\n[bold green]Frontend dev server is running on port {port}[/bold green]" + ) + return frontend_process, port + raise TimeoutError(f"Frontend dev server failed to start within {timeout} seconds") + + +async def _run_backend( + envs: dict[str, str | None] = {}, +) -> Process: + """ + Start the backend development server. + + Args: + frontend_port: The port number the frontend is running on + Returns: + Process: The backend process + """ + # Merge environment variables + envs = {**os.environ, **(envs or {})} + rich.print("\n[bold]Starting backend FastAPI server...[/bold]") + poetry_executable = _get_poetry_executable() + return await asyncio.create_subprocess_exec( + poetry_executable, + "run", + "python", + "main.py", + env=envs, + ) + + +def _install_frontend_dependencies(): + package_manager = _get_node_package_manager() + rich.print( + f"\n[bold]Installing frontend dependencies using {Path(package_manager).name}. It might take a while...[/bold]" + ) + run([package_manager, "install"], cwd=".frontend", check=True) + + +def _get_node_package_manager() -> str: + """ + Check for available package managers and return the preferred one. + Returns 'pnpm' if installed, falls back to 'npm'. + Raises SystemError if neither is installed. + + Returns: + str: The full path to the available package manager executable + """ + # On Windows, we need to check for .cmd extensions + pnpm_cmds = ["pnpm", "pnpm.cmd"] + npm_cmds = ["npm", "npm.cmd"] + + for cmd in pnpm_cmds: + cmd_path = which(cmd) + if cmd_path is not None: + return cmd_path + + for cmd in npm_cmds: + cmd_path = which(cmd) + if cmd_path is not None: + return cmd_path + + raise SystemError( + "Neither pnpm nor npm is installed. Please install Node.js and a package manager first." + ) + + +def _get_poetry_executable() -> str: + """ + Check for available Poetry executables and return the preferred one. + Returns 'poetry' if installed, falls back to 'poetry.cmd'. + Raises SystemError if neither is installed. + + Returns: + str: The full path to the available Poetry executable + """ + poetry_cmds = ["poetry", "poetry.cmd"] + for cmd in poetry_cmds: + cmd_path = which(cmd) + if cmd_path is not None: + return cmd_path + raise SystemError("Poetry is not installed. Please install Poetry first.") + + +def _is_bindable_port(port: int) -> bool: + """Check if a port is available by attempting to connect to it.""" + with socket.socket(socket.AF_INET, socket.SOCK_STREAM) as s: + try: + # Try to connect to the port + s.connect(("localhost", port)) + # If we can connect, port is in use + return False + except ConnectionRefusedError: + # Connection refused means port is available + return True + except socket.error: + # Other socket errors also likely mean port is available + return True + + +def _find_free_port(start_port: int) -> int: + """ + Find a free port starting from the given port number. + """ + for port in range(start_port, 65535): + if _is_bindable_port(port): + return port + raise SystemError("No free port found") + + +def _is_frontend_included() -> bool: + """Check if the app has frontend""" + return FRONTEND_DIR.exists()