diff --git a/.changeset/perfect-days-smoke.md b/.changeset/perfect-days-smoke.md new file mode 100644 index 0000000000000000000000000000000000000000..b56490f0a9836a6f7abc4e1e01e78c654305d4c0 --- /dev/null +++ b/.changeset/perfect-days-smoke.md @@ -0,0 +1,5 @@ +--- +"create-llama": patch +--- + +Generate NEXT_PUBLIC_CHAT_API for NextJS backend to specify alternative backend diff --git a/create-app.ts b/create-app.ts index 8849c0fb9cda6ce274612355e2fcd0a35f59ff97..345ff36992ac4837ca40eadde8660cc91e6fcc98 100644 --- a/create-app.ts +++ b/create-app.ts @@ -16,7 +16,7 @@ import { configVSCode } from "./helpers/vscode"; export type InstallAppArgs = Omit< InstallTemplateArgs, - "appName" | "root" | "isOnline" | "customApiPath" + "appName" | "root" | "isOnline" | "port" > & { appPath: string; frontend: boolean; @@ -34,7 +34,6 @@ export async function createApp({ communityProjectConfig, llamapack, vectorDb, - port, postInstallAction, dataSources, tools, @@ -80,7 +79,6 @@ export async function createApp({ communityProjectConfig, llamapack, vectorDb, - port, postInstallAction, dataSources, tools, @@ -100,7 +98,6 @@ export async function createApp({ ...args, root: frontendRoot, framework: "nextjs", - customApiPath: `http://localhost:${port ?? 8000}/api/chat`, backend: false, }); } diff --git a/helpers/env-variables.ts b/helpers/env-variables.ts index ddac770d384eaad8d1ac2067775b91212cc360cd..37fc2d5378fef0b713c8ede881ff9b558e356077 100644 --- a/helpers/env-variables.ts +++ b/helpers/env-variables.ts @@ -407,6 +407,13 @@ const getFrameworkEnvs = ( ], ); } + if (framework === "nextjs") { + result.push({ + name: "NEXT_PUBLIC_CHAT_API", + description: + "The API for the chat endpoint. Set when using a custom backend (e.g. Express). Use full URL like http://localhost:8000/api/chat", + }); + } return result; }; @@ -585,18 +592,10 @@ export const createBackendEnvFile = async ( export const createFrontendEnvFile = async ( root: string, opts: { - customApiPath?: string; vectorDb?: TemplateVectorDB; }, ) => { const defaultFrontendEnvs = [ - { - name: "NEXT_PUBLIC_CHAT_API", - description: "The backend API for chat endpoint.", - value: opts.customApiPath - ? opts.customApiPath - : "http://localhost:8000/api/chat", - }, { name: "NEXT_PUBLIC_USE_LLAMACLOUD", description: "Let's the user change indexes in LlamaCloud projects", diff --git a/helpers/index.ts b/helpers/index.ts index 8525455d444503432babdde672a5df644073e9cd..27bdd32e2a7b3139e347fa041179b3fea52bfc05 100644 --- a/helpers/index.ts +++ b/helpers/index.ts @@ -225,7 +225,6 @@ export const installTemplate = async ( } else { // this is a frontend for a full-stack app, create .env file with model information await createFrontendEnvFile(props.root, { - customApiPath: props.customApiPath, vectorDb: props.vectorDb, }); } diff --git a/helpers/types.ts b/helpers/types.ts index 53e1cdbabc919ea63975edd445b7abaaa43c0934..75fdc60d6d18cb7cd151f0feea3425adf6d95df7 100644 --- a/helpers/types.ts +++ b/helpers/types.ts @@ -89,7 +89,6 @@ export interface InstallTemplateArgs { framework: TemplateFramework; ui: TemplateUI; dataSources: TemplateDataSource[]; - customApiPath?: string; modelConfig: ModelConfig; llamaCloudKey?: string; useLlamaParse?: boolean; diff --git a/index.ts b/index.ts index 8b87b1199afcc20e60088696a5cac379de90347e..1cbe50de18c925b00ef7601c45789a00c17e1d84 100644 --- a/index.ts +++ b/index.ts @@ -326,7 +326,6 @@ async function run(): Promise<void> { ...answers, appPath: resolvedProjectPath, packageManager, - port: options.port, }); if (answers.postInstallAction === "VSCode") {