From 83ebdfb1c55d3fe19ce0c6f94f1787192e18ebc5 Mon Sep 17 00:00:00 2001
From: Alex Yang <himself65@outlook.com>
Date: Mon, 1 Jul 2024 14:52:57 -0700
Subject: [PATCH] fix: next.js binding (#997)

---
 .changeset/shiny-pants-change.md              |  6 ++
 .../nextjs-node-runtime/src/actions/openai.ts | 68 +++++++++++++++++++
 .../src/app/api/openai/route.ts               | 10 +++
 packages/llamaindex/src/next.ts               | 12 ++--
 4 files changed, 90 insertions(+), 6 deletions(-)
 create mode 100644 .changeset/shiny-pants-change.md
 create mode 100644 packages/llamaindex/e2e/examples/nextjs-node-runtime/src/actions/openai.ts
 create mode 100644 packages/llamaindex/e2e/examples/nextjs-node-runtime/src/app/api/openai/route.ts

diff --git a/.changeset/shiny-pants-change.md b/.changeset/shiny-pants-change.md
new file mode 100644
index 000000000..1cd630969
--- /dev/null
+++ b/.changeset/shiny-pants-change.md
@@ -0,0 +1,6 @@
+---
+"@llamaindex/next-node-runtime-test": patch
+"llamaindex": patch
+---
+
+fix: next.js build error
diff --git a/packages/llamaindex/e2e/examples/nextjs-node-runtime/src/actions/openai.ts b/packages/llamaindex/e2e/examples/nextjs-node-runtime/src/actions/openai.ts
new file mode 100644
index 000000000..1621a0947
--- /dev/null
+++ b/packages/llamaindex/e2e/examples/nextjs-node-runtime/src/actions/openai.ts
@@ -0,0 +1,68 @@
+"use server";
+import {
+  OpenAI,
+  OpenAIAgent,
+  QueryEngineTool,
+  Settings,
+  VectorStoreIndex,
+} from "llamaindex";
+import { HuggingFaceEmbedding } from "llamaindex/embeddings/HuggingFaceEmbedding";
+import { SimpleDirectoryReader } from "llamaindex/readers/SimpleDirectoryReader";
+
+Settings.llm = new OpenAI({
+  // eslint-disable-next-line turbo/no-undeclared-env-vars
+  apiKey: process.env.NEXT_PUBLIC_OPENAI_KEY ?? "FAKE_KEY_TO_PASS_TESTS",
+  model: "gpt-4o",
+});
+Settings.embedModel = new HuggingFaceEmbedding({
+  modelType: "BAAI/bge-small-en-v1.5",
+  quantized: false,
+});
+Settings.callbackManager.on("llm-tool-call", (event) => {
+  console.log(event.detail.payload);
+});
+Settings.callbackManager.on("llm-tool-result", (event) => {
+  console.log(event.detail.payload);
+});
+
+export async function getOpenAIModelRequest(query: string) {
+  try {
+    const currentDir = __dirname;
+
+    // load our data and create a query engine
+    const reader = new SimpleDirectoryReader();
+    const documents = await reader.loadData(currentDir);
+    const index = await VectorStoreIndex.fromDocuments(documents);
+    const retriever = index.asRetriever({
+      similarityTopK: 10,
+    });
+    const queryEngine = index.asQueryEngine({
+      retriever,
+    });
+
+    // define the query engine as a tool
+    const tools = [
+      new QueryEngineTool({
+        queryEngine: queryEngine,
+        metadata: {
+          name: "deployment_details_per_env",
+          description: `This tool can answer detailed questions about deployments happened in various environments.`,
+        },
+      }),
+    ];
+    // create the agent
+    const agent = new OpenAIAgent({ tools });
+
+    const { response } = await agent.chat({
+      message: query,
+    });
+    return {
+      message: response,
+    };
+  } catch (err) {
+    console.error(err);
+    return {
+      errors: "Error Calling OpenAI Model",
+    };
+  }
+}
diff --git a/packages/llamaindex/e2e/examples/nextjs-node-runtime/src/app/api/openai/route.ts b/packages/llamaindex/e2e/examples/nextjs-node-runtime/src/app/api/openai/route.ts
new file mode 100644
index 000000000..9268884cc
--- /dev/null
+++ b/packages/llamaindex/e2e/examples/nextjs-node-runtime/src/app/api/openai/route.ts
@@ -0,0 +1,10 @@
+import { getOpenAIModelRequest } from "@/actions/openai";
+import { NextRequest, NextResponse } from "next/server";
+
+// POST /api/openai
+export async function POST(request: NextRequest) {
+  const body = await request.json();
+  const content = await getOpenAIModelRequest(body.query);
+
+  return NextResponse.json(content, { status: 200 });
+}
diff --git a/packages/llamaindex/src/next.ts b/packages/llamaindex/src/next.ts
index 9858835d3..a5dac67d1 100644
--- a/packages/llamaindex/src/next.ts
+++ b/packages/llamaindex/src/next.ts
@@ -16,23 +16,23 @@
  * @module
  */
 export default function withLlamaIndex(config: any) {
+  config.experimental = config.experimental ?? {};
+  config.experimental.serverComponentsExternalPackages =
+    config.experimental.serverComponentsExternalPackages ?? [];
+  config.experimental.serverComponentsExternalPackages.push(
+    "@xenova/transformers",
+  );
   const userWebpack = config.webpack;
-  //#region hack for `@xenova/transformers`
-  // Ignore node-specific modules when bundling for the browser
-  // See https://webpack.js.org/configuration/resolve/#resolvealias
   config.webpack = function (webpackConfig: any) {
     if (userWebpack) {
       webpackConfig = userWebpack(webpackConfig);
     }
     webpackConfig.resolve.alias = {
       ...webpackConfig.resolve.alias,
-      sharp$: false,
-      "onnxruntime-node$": false,
       "@google-cloud/vertexai": false,
       "groq-sdk": false,
     };
     return webpackConfig;
   };
-  //#endregion
   return config;
 }
-- 
GitLab