From aac1ee3af3da0bd129d5286c32bb1f5d1f54002a Mon Sep 17 00:00:00 2001
From: Alex Yang <himself65@outlook.com>
Date: Sat, 6 Apr 2024 23:57:21 -0500
Subject: [PATCH] e2e: init llamaindex e2e test (#697)

---
 .github/workflows/test.yml                |  19 +++
 package.json                              |   1 +
 packages/core/e2e/.gitignore              |   1 +
 packages/core/e2e/README.md               |  38 ++++++
 packages/core/e2e/fixtures/llm/open_ai.ts |  68 +++++++++++
 packages/core/e2e/mock-module.js          |  36 ++++++
 packages/core/e2e/mock-register.js        |   3 +
 packages/core/e2e/node/basic.e2e.ts       | 139 ++++++++++++++++++++++
 packages/core/e2e/package.json            |  16 +++
 packages/core/e2e/tsconfig.json           |  23 ++++
 packages/core/src/agent/openai/base.ts    |  10 +-
 packages/core/src/llm/types.ts            |   4 +-
 packages/core/src/llm/utils.ts            |   6 +-
 packages/core/src/types.ts                |   2 +
 pnpm-lock.yaml                            |  40 ++++++-
 pnpm-workspace.yaml                       |   1 +
 tsconfig.json                             |   3 +
 turbo.json                                |   3 +
 18 files changed, 407 insertions(+), 6 deletions(-)
 create mode 100644 packages/core/e2e/.gitignore
 create mode 100644 packages/core/e2e/README.md
 create mode 100644 packages/core/e2e/fixtures/llm/open_ai.ts
 create mode 100644 packages/core/e2e/mock-module.js
 create mode 100644 packages/core/e2e/mock-register.js
 create mode 100644 packages/core/e2e/node/basic.e2e.ts
 create mode 100644 packages/core/e2e/package.json
 create mode 100644 packages/core/e2e/tsconfig.json

diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml
index 040d55928..f8ff42934 100644
--- a/.github/workflows/test.yml
+++ b/.github/workflows/test.yml
@@ -2,7 +2,26 @@ name: Run Tests
 
 on: [push, pull_request]
 
+concurrency:
+  group: ${{ github.workflow }}-${{ github.ref }}
+  cancel-in-progress: true
+
 jobs:
+  e2e:
+    runs-on: ubuntu-latest
+    steps:
+      - uses: actions/checkout@v4
+      - uses: pnpm/action-setup@v2
+      - name: Setup Node.js
+        uses: actions/setup-node@v4
+        with:
+          node-version-file: ".nvmrc"
+          cache: "pnpm"
+      - name: Install dependencies
+        run: pnpm install
+      - name: Run E2E Tests
+        run: pnpm run e2e
+
   test:
     strategy:
       fail-fast: false
diff --git a/package.json b/package.json
index e1b1a1152..2805b1d42 100644
--- a/package.json
+++ b/package.json
@@ -9,6 +9,7 @@
     "format:write": "prettier --ignore-unknown --write .",
     "lint": "turbo run lint",
     "prepare": "husky",
+    "e2e": "turbo run e2e",
     "test": "turbo run test",
     "type-check": "tsc -b --diagnostics",
     "release": "pnpm run check-minor-version && pnpm run build:release && changeset publish",
diff --git a/packages/core/e2e/.gitignore b/packages/core/e2e/.gitignore
new file mode 100644
index 000000000..98d8a5a63
--- /dev/null
+++ b/packages/core/e2e/.gitignore
@@ -0,0 +1 @@
+logs
diff --git a/packages/core/e2e/README.md b/packages/core/e2e/README.md
new file mode 100644
index 000000000..ef6096106
--- /dev/null
+++ b/packages/core/e2e/README.md
@@ -0,0 +1,38 @@
+# LlamaIndexTS Core E2E Tests
+
+## Overview
+
+We are using Node.js Test Runner to run E2E tests for LlamaIndexTS Core.
+
+It supports the following features:
+
+- Run tests in parallel
+- Pure Node.js Environment
+- Switch between mock and real LLM API
+- Customizable logics
+
+## Usage
+
+- Run with mock register:
+
+```shell
+node --import tsx --import ./mock-register.js --test ./node/basic.e2e.ts
+```
+
+- Run without mock register:
+
+```shell
+node --import tsx --test ./node/basic.e2e.ts
+```
+
+- Run with specific test:
+
+```shell
+node --import tsx --import ./mock-register.js --test-name-pattern=agent --test ./node/basic.e2e.ts
+```
+
+- Run with debug logs:
+
+```shell
+CONSOLA_LEVEL=5 node --import tsx --import ./mock-register.js --test-name-pattern=agent --test ./node/basic.e2e.ts
+```
diff --git a/packages/core/e2e/fixtures/llm/open_ai.ts b/packages/core/e2e/fixtures/llm/open_ai.ts
new file mode 100644
index 000000000..201af668c
--- /dev/null
+++ b/packages/core/e2e/fixtures/llm/open_ai.ts
@@ -0,0 +1,68 @@
+import { faker } from "@faker-js/faker";
+import type {
+  ChatResponse,
+  ChatResponseChunk,
+  CompletionResponse,
+  LLM,
+  LLMChatParamsNonStreaming,
+  LLMChatParamsStreaming,
+  LLMCompletionParamsNonStreaming,
+  LLMCompletionParamsStreaming,
+} from "llamaindex/llm/types";
+
+export function getOpenAISession() {
+  return {};
+}
+
+export function isFunctionCallingModel() {
+  return true;
+}
+
+export class OpenAI implements LLM {
+  get metadata() {
+    return {
+      model: "mock-model",
+      temperature: 0.1,
+      topP: 1,
+      contextWindow: 2048,
+      tokenizer: undefined,
+      isFunctionCallingModel: true,
+    };
+  }
+  chat(
+    params: LLMChatParamsStreaming<Record<string, unknown>>,
+  ): Promise<AsyncIterable<ChatResponseChunk>>;
+  chat(
+    params: LLMChatParamsNonStreaming<Record<string, unknown>>,
+  ): Promise<ChatResponse>;
+  chat(
+    params:
+      | LLMChatParamsStreaming<Record<string, unknown>>
+      | LLMChatParamsNonStreaming<Record<string, unknown>>,
+  ): unknown {
+    if (params.stream) {
+      return {
+        [Symbol.asyncIterator]: async function* () {
+          yield {
+            delta: faker.word.words(),
+          } satisfies ChatResponseChunk;
+        },
+      };
+    }
+    return {
+      message: {
+        content: faker.lorem.paragraph(),
+        role: "assistant",
+      },
+    } satisfies ChatResponse;
+  }
+  complete(
+    params: LLMCompletionParamsStreaming,
+  ): Promise<AsyncIterable<CompletionResponse>>;
+  complete(
+    params: LLMCompletionParamsNonStreaming,
+  ): Promise<CompletionResponse>;
+  async complete(params: unknown): Promise<unknown> {
+    throw new Error("Method not implemented.");
+  }
+}
diff --git a/packages/core/e2e/mock-module.js b/packages/core/e2e/mock-module.js
new file mode 100644
index 000000000..d32e8f187
--- /dev/null
+++ b/packages/core/e2e/mock-module.js
@@ -0,0 +1,36 @@
+/**
+ * This script will replace the resolved module with the corresponding fixture file.
+ */
+import { stat } from "node:fs/promises";
+import { join, relative } from "node:path";
+import { fileURLToPath, pathToFileURL } from "node:url";
+const packageDistDir = fileURLToPath(new URL("../dist", import.meta.url));
+const fixturesDir = fileURLToPath(new URL("./fixtures", import.meta.url));
+
+export async function resolve(specifier, context, nextResolve) {
+  const result = await nextResolve(specifier, context);
+  if (result.format === "builtin" || result.url.startsWith("node:")) {
+    return result;
+  }
+  const targetUrl = fileURLToPath(result.url).replace(/\.js$/, ".ts");
+  const relativePath = relative(packageDistDir, targetUrl);
+  if (relativePath.startsWith(".") || relativePath.startsWith("/")) {
+    return result;
+  }
+  const url = pathToFileURL(join(fixturesDir, relativePath)).toString();
+  const exist = await stat(fileURLToPath(url))
+    .then((stat) => stat.isFile())
+    .catch((err) => {
+      if (err.code === "ENOENT") {
+        return false;
+      }
+      throw err;
+    });
+  if (!exist) {
+    return result;
+  }
+  return {
+    url,
+    format: "module",
+  };
+}
diff --git a/packages/core/e2e/mock-register.js b/packages/core/e2e/mock-register.js
new file mode 100644
index 000000000..36e724f24
--- /dev/null
+++ b/packages/core/e2e/mock-register.js
@@ -0,0 +1,3 @@
+import { register } from "node:module";
+
+register("./mock-module.js", import.meta.url);
diff --git a/packages/core/e2e/node/basic.e2e.ts b/packages/core/e2e/node/basic.e2e.ts
new file mode 100644
index 000000000..214fe7172
--- /dev/null
+++ b/packages/core/e2e/node/basic.e2e.ts
@@ -0,0 +1,139 @@
+/* eslint-disable @typescript-eslint/no-floating-promises */
+import { consola } from "consola";
+import {
+  OpenAI,
+  OpenAIAgent,
+  Settings,
+  type LLM,
+  type LLMEndEvent,
+  type LLMStartEvent,
+} from "llamaindex";
+import { ok } from "node:assert";
+import type { WriteStream } from "node:fs";
+import { createWriteStream } from "node:fs";
+import { mkdir } from "node:fs/promises";
+import { join } from "node:path";
+import { after, before, beforeEach, describe, test } from "node:test";
+import { inspect } from "node:util";
+
+let llm: LLM;
+let fsStream: WriteStream;
+before(async () => {
+  const logUrl = new URL(
+    join(
+      "..",
+      "logs",
+      `basic.e2e.${new Date().toISOString().replace(/:/g, "-").replace(/\./g, "-")}.log`,
+    ),
+    import.meta.url,
+  );
+  await mkdir(new URL(".", logUrl), { recursive: true });
+  fsStream = createWriteStream(logUrl, {
+    encoding: "utf-8",
+  });
+});
+
+after(() => {
+  fsStream.end();
+});
+
+beforeEach((s) => {
+  fsStream.write("start: " + s.name + "\n");
+});
+
+const llmEventStartHandler = (event: LLMStartEvent) => {
+  const { payload } = event.detail;
+  fsStream.write(
+    "llmEventStart: " +
+      inspect(payload, {
+        depth: Infinity,
+      }) +
+      "\n",
+  );
+};
+
+const llmEventEndHandler = (event: LLMEndEvent) => {
+  const { payload } = event.detail;
+  fsStream.write(
+    "llmEventEnd: " +
+      inspect(payload, {
+        depth: Infinity,
+      }) +
+      "\n",
+  );
+};
+
+before(() => {
+  Settings.llm = new OpenAI({
+    model: "gpt-3.5-turbo",
+  });
+  llm = Settings.llm;
+  Settings.callbackManager.on("llm-start", llmEventStartHandler);
+  Settings.callbackManager.on("llm-end", llmEventEndHandler);
+});
+
+after(() => {
+  Settings.callbackManager.off("llm-start", llmEventStartHandler);
+  Settings.callbackManager.off("llm-end", llmEventEndHandler);
+});
+
+describe("llm", () => {
+  test("llm.chat", async () => {
+    const response = await llm.chat({
+      messages: [
+        {
+          content: "Hello",
+          role: "user",
+        },
+      ],
+    });
+    consola.debug("response:", response);
+    ok(typeof response.message.content === "string");
+  });
+
+  test("stream llm.chat", async () => {
+    const iter = await llm.chat({
+      stream: true,
+      messages: [
+        {
+          content: "hello",
+          role: "user",
+        },
+      ],
+    });
+    for await (const chunk of iter) {
+      consola.debug("chunk:", chunk);
+      ok(typeof chunk.delta === "string");
+    }
+  });
+});
+
+describe("agent", () => {
+  test("agent.chat", async () => {
+    const agent = new OpenAIAgent({
+      tools: [
+        {
+          call: async () => {
+            return "35 degrees and sunny in San Francisco";
+          },
+          metadata: {
+            name: "Weather",
+            description: "Get the weather",
+            parameters: {
+              type: "object",
+              properties: {
+                location: { type: "string" },
+              },
+              required: ["location"],
+            },
+          },
+        },
+      ],
+    });
+    const result = await agent.chat({
+      message: "What is the weather in San Francisco?",
+    });
+    consola.debug("response:", result.response);
+    ok(typeof result.response === "string");
+  });
+});
diff --git a/packages/core/e2e/package.json b/packages/core/e2e/package.json
new file mode 100644
index 000000000..7b8b456b2
--- /dev/null
+++ b/packages/core/e2e/package.json
@@ -0,0 +1,16 @@
+{
+  "name": "@llamaindex/core-e2e",
+  "private": true,
+  "version": "0.0.2",
+  "type": "module",
+  "scripts": {
+    "e2e": "node --import tsx --import ./mock-register.js --test ./node/*.e2e.ts",
+    "e2e:nomock": "node --import tsx --test ./node/*.e2e.ts"
+  },
+  "devDependencies": {
+    "@faker-js/faker": "^8.4.1",
+    "consola": "^3.2.3",
+    "llamaindex": "workspace:*",
+    "tsx": "^4.7.2"
+  }
+}
diff --git a/packages/core/e2e/tsconfig.json b/packages/core/e2e/tsconfig.json
new file mode 100644
index 000000000..409139772
--- /dev/null
+++ b/packages/core/e2e/tsconfig.json
@@ -0,0 +1,23 @@
+{
+  "extends": "../../../tsconfig.json",
+  "compilerOptions": {
+    "outDir": "./lib",
+    "module": "node16",
+    "moduleResolution": "node16",
+    "target": "ESNext"
+  },
+  "include": [
+    "./**/*.ts",
+    "./mock-module.js",
+    "./mock-register.js",
+    "./fixtures"
+  ],
+  "references": [
+    {
+      "path": "../../core/tsconfig.json"
+    },
+    {
+      "path": "../../env/tsconfig.json"
+    }
+  ]
+}
diff --git a/packages/core/src/agent/openai/base.ts b/packages/core/src/agent/openai/base.ts
index 795792158..6f57ccf87 100644
--- a/packages/core/src/agent/openai/base.ts
+++ b/packages/core/src/agent/openai/base.ts
@@ -1,3 +1,4 @@
+import { Settings } from "../../Settings.js";
 import type { ChatMessage } from "../../llm/index.js";
 import { OpenAI } from "../../llm/index.js";
 import type { ObjectRetriever } from "../../objects/base.js";
@@ -32,7 +33,14 @@ export class OpenAIAgent extends AgentRunner {
     toolRetriever,
     systemPrompt,
   }: OpenAIAgentParams) {
-    llm = llm ?? new OpenAI({ model: "gpt-3.5-turbo-0613" });
+    if (!llm) {
+      if (Settings.llm instanceof OpenAI) {
+        llm = Settings.llm;
+      } else {
+        console.warn("No OpenAI model provided, creating a new one");
+        llm = new OpenAI({ model: "gpt-3.5-turbo-0613" });
+      }
+    }
 
     if (systemPrompt) {
       if (prefixMessages) {
diff --git a/packages/core/src/llm/types.ts b/packages/core/src/llm/types.ts
index 8abf65480..db039f58e 100644
--- a/packages/core/src/llm/types.ts
+++ b/packages/core/src/llm/types.ts
@@ -1,5 +1,5 @@
 import type { Tokenizers } from "../GlobalsHelper.js";
-import type { BaseTool } from "../types.js";
+import type { BaseTool, UUID } from "../types.js";
 
 type LLMBaseEvent<
   Type extends string,
@@ -11,12 +11,14 @@ type LLMBaseEvent<
 export type LLMStartEvent = LLMBaseEvent<
   "llm-start",
   {
+    id: UUID;
     messages: ChatMessage[];
   }
 >;
 export type LLMEndEvent = LLMBaseEvent<
   "llm-end",
   {
+    id: UUID;
     response: ChatResponse;
   }
 >;
diff --git a/packages/core/src/llm/utils.ts b/packages/core/src/llm/utils.ts
index 2fb626708..49a671b1a 100644
--- a/packages/core/src/llm/utils.ts
+++ b/packages/core/src/llm/utils.ts
@@ -1,4 +1,4 @@
-import { AsyncLocalStorage } from "@llamaindex/env";
+import { AsyncLocalStorage, randomUUID } from "@llamaindex/env";
 import { getCallbackManager } from "../internal/settings/CallbackManager.js";
 import type {
   ChatResponse,
@@ -68,8 +68,10 @@ export function wrapLLMEvent(
     this: LLM,
     ...params: Parameters<LLMChat["chat"]>
   ): ReturnType<LLMChat["chat"]> {
+    const id = randomUUID();
     getCallbackManager().dispatchEvent("llm-start", {
       payload: {
+        id,
         messages: params[0].messages,
       },
     });
@@ -100,6 +102,7 @@ export function wrapLLMEvent(
         snapshot(() => {
           getCallbackManager().dispatchEvent("llm-end", {
             payload: {
+              id,
               response: finalResponse,
             },
           });
@@ -108,6 +111,7 @@ export function wrapLLMEvent(
     } else {
       getCallbackManager().dispatchEvent("llm-end", {
         payload: {
+          id,
           response,
         },
       });
diff --git a/packages/core/src/types.ts b/packages/core/src/types.ts
index 694255638..6adc07544 100644
--- a/packages/core/src/types.ts
+++ b/packages/core/src/types.ts
@@ -81,3 +81,5 @@ export class QueryBundle {
     return this.queryStr;
   }
 }
+
+export type UUID = `${string}-${string}-${string}-${string}-${string}`;
diff --git a/pnpm-lock.yaml b/pnpm-lock.yaml
index 8d1774216..4fdbc8d75 100644
--- a/pnpm-lock.yaml
+++ b/pnpm-lock.yaml
@@ -305,6 +305,21 @@ importers:
         specifier: ^5.3.3
         version: 5.4.3
 
+  packages/core/e2e:
+    devDependencies:
+      '@faker-js/faker':
+        specifier: ^8.4.1
+        version: 8.4.1
+      consola:
+        specifier: ^3.2.3
+        version: 3.2.3
+      llamaindex:
+        specifier: workspace:*
+        version: link:..
+      tsx:
+        specifier: ^4.7.2
+        version: 4.7.2
+
   packages/core/tests:
     devDependencies:
       llamaindex:
@@ -3308,6 +3323,11 @@ packages:
     engines: {node: ^12.22.0 || ^14.17.0 || >=16.0.0}
     dev: false
 
+  /@faker-js/faker@8.4.1:
+    resolution: {integrity: sha512-XQ3cU+Q8Uqmrbf2e0cIC/QN43sTBSC8KF12u29Mb47tWrt2hAgBXSgpZMj4Ao8Uk0iJcU99QsOCaIL8934obCg==}
+    engines: {node: ^14.17.0 || ^16.13.0 || >=18.0.0, npm: '>=6.14.13'}
+    dev: true
+
   /@fastify/busboy@2.1.0:
     resolution: {integrity: sha512-+KpH+QxZU7O4675t3mnkQKcZZg56u+K/Ct2K+N2AZYNVK8kyeo/bI18tI8aPm3tvNNRyTWfj6s5tnGNlcbQRsA==}
     engines: {node: '>=14'}
@@ -4660,7 +4680,7 @@ packages:
     resolution: {integrity: sha512-jGjmu/ZqS7FjSH6owMcD5qpq19+1RS9DeVRqfl1FeBMxTDQAGwlMWOcs52NDoXaNKyG3d1cYQFMs9rCrb88o9Q==}
     dependencies:
       '@types/history': 4.7.11
-      '@types/react': 18.2.66
+      '@types/react': 18.2.73
     dev: true
 
   /@types/react@18.2.65:
@@ -6419,6 +6439,11 @@ packages:
   /consola@2.15.3:
     resolution: {integrity: sha512-9vAdYbHj6x2fLKC4+oPH0kFzY/orMZyG2Aj+kNylHxKGJ/Ed4dpNyAQYwJOdqO4zdM7XpVHmyejQDcQHrnuXbw==}
 
+  /consola@3.2.3:
+    resolution: {integrity: sha512-I5qxpzLv+sJhTVEoLYNcTW+bThDCPsit0vLNKShZx6rLtpilNpmmeTPaeqJb9ZE9dV3DGaeby6Vuhrw38WjeyQ==}
+    engines: {node: ^14.18.0 || >=16.10.0}
+    dev: true
+
   /content-disposition@0.5.2:
     resolution: {integrity: sha512-kRGRZw3bLlFISDBgwTSA1TMBFN6J6GWDeubmDE3AF+3+yXL8hTWv8r5rkLbqYXY4RjPk/EzHnClI3zQf1cFmHA==}
     engines: {node: '>= 0.6'}
@@ -8705,7 +8730,6 @@ packages:
     resolution: {integrity: sha512-wuMsz4leaj5hbGgg4IvDU0bqJagpftG5l5cXIAvo8uZrqn0NJqwtfupTN00VnkQJPcIRrxYrm1Ue24btpCha2A==}
     dependencies:
       resolve-pkg-maps: 1.0.0
-    dev: false
 
   /github-from-package@0.0.0:
     resolution: {integrity: sha512-SyHy3T1v2NUXn29OsWdxmK6RwHD+vkj3v8en8AOBZ1wBQ/hCAQ5bAQTD02kW4W9tUp/3Qh6J8r9EvntiyCmOOw==}
@@ -13420,7 +13444,6 @@ packages:
 
   /resolve-pkg-maps@1.0.0:
     resolution: {integrity: sha512-seS2Tj26TBVOC2NIc2rOe2y2ZO7efxITtLZcGSOnHHNOQ7CkiUBfw0Iw2ck6xkIhPwLhKNLS8BO+hEpngQlqzw==}
-    dev: false
 
   /resolve@1.22.8:
     resolution: {integrity: sha512-oKWePCxqpd6FlLvGV1VU0x7bkPmmCNolxzjMf4NczoDnQcIWrAF+cPtZn5i6n+RfD2d9i0tzpKnG6Yk168yIyw==}
@@ -14794,6 +14817,17 @@ packages:
       typescript: 4.9.5
     dev: true
 
+  /tsx@4.7.2:
+    resolution: {integrity: sha512-BCNd4kz6fz12fyrgCTEdZHGJ9fWTGeUzXmQysh0RVocDY3h4frk05ZNCXSy4kIenF7y/QnrdiVpTsyNRn6vlAw==}
+    engines: {node: '>=18.0.0'}
+    hasBin: true
+    dependencies:
+      esbuild: 0.19.12
+      get-tsconfig: 4.7.2
+    optionalDependencies:
+      fsevents: 2.3.3
+    dev: true
+
   /tty-table@4.2.3:
     resolution: {integrity: sha512-Fs15mu0vGzCrj8fmJNP7Ynxt5J7praPXqFN0leZeZBXJwkMxv9cb2D454k1ltrtUSJbZ4yH4e0CynsHLxmUfFA==}
     engines: {node: '>=8.0.0'}
diff --git a/pnpm-workspace.yaml b/pnpm-workspace.yaml
index c06441a5c..260d6689e 100644
--- a/pnpm-workspace.yaml
+++ b/pnpm-workspace.yaml
@@ -2,6 +2,7 @@ packages:
   - "apps/*"
   - "packages/*"
   - "packages/core/tests"
+  - "packages/core/e2e"
   - "packages/edge/e2e/*"
   - "examples/"
   - "examples/*"
diff --git a/tsconfig.json b/tsconfig.json
index 9027b38e4..43f6f276e 100644
--- a/tsconfig.json
+++ b/tsconfig.json
@@ -24,6 +24,9 @@
     {
       "path": "./packages/edge/e2e/test-edge-runtime/tsconfig.json"
     },
+    {
+      "path": "./packages/core/e2e/tsconfig.json"
+    },
     {
       "path": "./packages/core/tests/tsconfig.json"
     },
diff --git a/turbo.json b/turbo.json
index 6ef217666..8ca45295e 100644
--- a/turbo.json
+++ b/turbo.json
@@ -18,6 +18,9 @@
     "test": {
       "dependsOn": ["^build", "@llamaindex/edge#build"]
     },
+    "e2e": {
+      "dependsOn": ["^build"]
+    },
     "dev": {
       "cache": false,
       "persistent": true
-- 
GitLab