diff --git a/apps/docs/docs/examples/agent.mdx b/apps/docs/docs/examples/agent.mdx
new file mode 100644
index 0000000000000000000000000000000000000000..2ebbf2e9bad2e43c82bef62cfd36950e3b3e49ea
--- /dev/null
+++ b/apps/docs/docs/examples/agent.mdx
@@ -0,0 +1,85 @@
+# Agents
+
+A built-in agent that can take decisions and reasoning based on the tools provided to it.
+
+## OpenAI Agent
+
+```ts
+import { FunctionTool, OpenAIAgent } from "llamaindex";
+
+// Define a function to sum two numbers
+function sumNumbers({ a, b }: { a: number; b: number }): number {
+  return a + b;
+}
+
+// Define a function to divide two numbers
+function divideNumbers({ a, b }: { a: number; b: number }): number {
+  return a / b;
+}
+
+// Define the parameters of the sum function as a JSON schema
+const sumJSON = {
+  type: "object",
+  properties: {
+    a: {
+      type: "number",
+      description: "The first number",
+    },
+    b: {
+      type: "number",
+      description: "The second number",
+    },
+  },
+  required: ["a", "b"],
+};
+
+// Define the parameters of the divide function as a JSON schema
+const divideJSON = {
+  type: "object",
+  properties: {
+    a: {
+      type: "number",
+      description: "The dividend to divide",
+    },
+    b: {
+      type: "number",
+      description: "The divisor to divide by",
+    },
+  },
+  required: ["a", "b"],
+};
+
+async function main() {
+  // Create a function tool from the sum function
+  const sumFunctionTool = new FunctionTool(sumNumbers, {
+    name: "sumNumbers",
+    description: "Use this function to sum two numbers",
+    parameters: sumJSON,
+  });
+
+  // Create a function tool from the divide function
+  const divideFunctionTool = new FunctionTool(divideNumbers, {
+    name: "divideNumbers",
+    description: "Use this function to divide two numbers"
+    parameters: divideJSON,
+  });
+
+  // Create an OpenAIAgent with the function tools
+  const agent = new OpenAIAgent({
+    tools: [sumFunctionTool, divideFunctionTool],
+    verbose: true,
+  });
+
+  // Chat with the agent
+  const response = await agent.chat({
+    message: "How much is 5 + 5? then divide by 2",
+  });
+
+  // Print the response
+  console.log(String(response));
+}
+
+main().then(() => {
+  console.log("Done");
+});
+```
diff --git a/apps/docs/docs/modules/agent/_category_.yml b/apps/docs/docs/modules/agent/_category_.yml
new file mode 100644
index 0000000000000000000000000000000000000000..549b74c5a136746cc283d3307172ae7f5cd42c38
--- /dev/null
+++ b/apps/docs/docs/modules/agent/_category_.yml
@@ -0,0 +1 @@
+label: "Agents"
diff --git a/apps/docs/docs/modules/agent/index.md b/apps/docs/docs/modules/agent/index.md
new file mode 100644
index 0000000000000000000000000000000000000000..28d9ff27b556689e41c16f5e8d3bbc9e1b7a4b5d
--- /dev/null
+++ b/apps/docs/docs/modules/agent/index.md
@@ -0,0 +1,14 @@
+# Agents
+
+An “agent” is an automated reasoning and decision engine. It takes in a user input/query and can make internal decisions for executing that query in order to return the correct result. The key agent components can include, but are not limited to:
+
+- Breaking down a complex question into smaller ones
+- Choosing an external Tool to use + coming up with parameters for calling the Tool
+- Planning out a set of tasks
+- Storing previously completed tasks in a memory module
+
+## Getting Started
+
+LlamaIndex.TS comes with a few built-in agents, but you can also create your own. The built-in agents include:
+
+- [OpenAI Agent](./openai.md)
diff --git a/apps/docs/docs/modules/agent/openai.mdx b/apps/docs/docs/modules/agent/openai.mdx
new file mode 100644
index 0000000000000000000000000000000000000000..084cea1c9bcb8679c668357468f261947de23ccf
--- /dev/null
+++ b/apps/docs/docs/modules/agent/openai.mdx
@@ -0,0 +1,183 @@
+# OpenAI Agent
+
+OpenAI API that supports function calling, it’s never been easier to build your own agent!
+
+In this notebook tutorial, we showcase how to write your own OpenAI agent
+
+## Setup
+
+First, you need to install the `llamaindex` package. You can do this by running the following command in your terminal:
+
+```bash
+pnpm i llamaindex
+```
+
+Then we can define a function to sum two numbers and another function to divide two numbers.
+
+```ts
+function sumNumbers({ a, b }: { a: number; b: number }): number {
+  return a + b;
+}
+
+// Define a function to divide two numbers
+function divideNumbers({ a, b }: { a: number; b: number }): number {
+  return a / b;
+}
+```
+
+## Create a function tool
+
+Now we can create a function tool from the sum function and another function tool from the divide function.
+
+For the parameters of the sum function, we can define a JSON schema.
+
+### JSON Schema
+
+```ts
+const sumJSON = {
+  type: "object",
+  properties: {
+    a: {
+      type: "number",
+      description: "The first number",
+    },
+    b: {
+      type: "number",
+      description: "The second number",
+    },
+  },
+  required: ["a", "b"],
+};
+
+const divideJSON = {
+  type: "object",
+  properties: {
+    a: {
+      type: "number",
+      description: "The dividend a to divide",
+    },
+    b: {
+      type: "number",
+      description: "The divisor b to divide by",
+    },
+  },
+  required: ["a", "b"],
+};
+
+const sumFunctionTool = new FunctionTool(sumNumbers, {
+  name: "sumNumbers",
+  description: "Use this function to sum two numbers",
+  parameters: sumJSON,
+});
+
+const divideFunctionTool = new FunctionTool(divideNumbers, {
+  name: "divideNumbers",
+  description: "Use this function to divide two numbers",
+  parameters: divideJSON,
+});
+```
+
+## Create an OpenAIAgent
+
+Now we can create an OpenAIAgent with the function tools.
+
+```ts
+const worker = new OpenAIAgent({
+  tools: [sumFunctionTool, divideFunctionTool],
+  verbose: true,
+});
+```
+
+## Chat with the agent
+
+Now we can chat with the agent.
+
+```ts
+const response = await worker.chat({
+  message: "How much is 5 + 5? then divide by 2",
+});
+
+console.log(String(response));
+```
+
+## Full code
+
+```ts
+import { FunctionTool, OpenAIAgent } from "llamaindex";
+
+// Define a function to sum two numbers
+function sumNumbers({ a, b }: { a: number; b: number }): number {
+  return a + b;
+}
+
+// Define a function to divide two numbers
+function divideNumbers({ a, b }: { a: number; b: number }): number {
+  return a / b;
+}
+
+// Define the parameters of the sum function as a JSON schema
+const sumJSON = {
+  type: "object",
+  properties: {
+    a: {
+      type: "number",
+      description: "The first number",
+    },
+    b: {
+      type: "number",
+      description: "The second number",
+    },
+  },
+  required: ["a", "b"],
+};
+
+// Define the parameters of the divide function as a JSON schema
+const divideJSON = {
+  type: "object",
+  properties: {
+    a: {
+      type: "number",
+      description: "The argument a to divide",
+    },
+    b: {
+      type: "number",
+      description: "The argument b to divide",
+    },
+  },
+  required: ["a", "b"],
+};
+
+async function main() {
+  // Create a function tool from the sum function
+  const sumFunctionTool = new FunctionTool(sumNumbers, {
+    name: "sumNumbers",
+    description: "Use this function to sum two numbers",
+    parameters: sumJSON,
+  });
+
+  // Create a function tool from the divide function
+  const divideFunctionTool = new FunctionTool(divideNumbers, {
+    name: "divideNumbers",
+    description: "Use this function to divide two numbers",
+    parameters: divideJSON,
+  });
+
+  // Create an OpenAIAgent with the function tools
+  const agent = new OpenAIAgent({
+    tools: [sumFunctionTool, divideFunctionTool],
+    verbose: true,
+  });
+
+  // Chat with the agent
+  const response = await agent.chat({
+    message: "How much is 5 + 5? then divide by 2",
+  });
+
+  // Print the response
+  console.log(String(response));
+}
+
+main().then(() => {
+  console.log("Done");
+});
+```
diff --git a/examples/agent/openai.ts b/examples/agent/openai.ts
new file mode 100644
index 0000000000000000000000000000000000000000..58eb11a0aa9cd85df53af8a203e7b6aff299ef28
--- /dev/null
+++ b/examples/agent/openai.ts
@@ -0,0 +1,76 @@
+import { FunctionTool, OpenAIAgent } from "llamaindex";
+
+// Define a function to sum two numbers
+function sumNumbers({ a, b }: { a: number; b: number }): number {
+  return a + b;
+}
+
+// Define a function to divide two numbers
+function divideNumbers({ a, b }: { a: number; b: number }): number {
+  return a / b;
+}
+
+// Define the parameters of the sum function as a JSON schema
+const sumJSON = {
+  type: "object",
+  properties: {
+    a: {
+      type: "number",
+      description: "The first number",
+    },
+    b: {
+      type: "number",
+      description: "The second number",
+    },
+  },
+  required: ["a", "b"],
+};
+
+const divideJSON = {
+  type: "object",
+  properties: {
+    a: {
+      type: "number",
+      description: "The dividend a to divide",
+    },
+    b: {
+      type: "number",
+      description: "The divisor b to divide by",
+    },
+  },
+  required: ["a", "b"],
+};
+
+async function main() {
+  // Create a function tool from the sum function
+  const functionTool = new FunctionTool(sumNumbers, {
+    name: "sumNumbers",
+    description: "Use this function to sum two numbers",
+    parameters: sumJSON,
+  });
+
+  // Create a function tool from the divide function
+  const functionTool2 = new FunctionTool(divideNumbers, {
+    name: "divideNumbers",
+    description: "Use this function to divide two numbers",
+    parameters: divideJSON,
+  });
+
+  // Create an OpenAIAgent with the function tools
+  const agent = new OpenAIAgent({
+    tools: [functionTool, functionTool2],
+    verbose: true,
+  });
+
+  // Chat with the agent
+  const response = await agent.chat({
+    message: "How much is 5 + 5? then divide by 2",
+  });
+
+  // Print the response
+  console.log(String(response));
+}
+
+main().then(() => {
+  console.log("Done");
+});
diff --git a/packages/core/src/OutputParser.ts b/packages/core/src/OutputParser.ts
index 2b09bb4b7823ddf52dd3162935f3da0050403b8e..de356f6a83aeca58ba91dd12ac00ffa1999cf79a 100644
--- a/packages/core/src/OutputParser.ts
+++ b/packages/core/src/OutputParser.ts
@@ -74,9 +74,6 @@ export class SubQuestionOutputParser
 {
   parse(output: string): StructuredOutput<SubQuestion[]> {
     const parsed = parseJsonMarkdown(output);
-
-    // TODO add zod validation
-
     return { rawOutput: output, parsedOutput: parsed };
   }
 
diff --git a/packages/core/src/Response.ts b/packages/core/src/Response.ts
index 6f651587611d16af3c09db4eec59f72d9f5a4b19..5ae9514b77666000b306c7617d988b12dc7db88d 100644
--- a/packages/core/src/Response.ts
+++ b/packages/core/src/Response.ts
@@ -13,7 +13,7 @@ export class Response {
     this.sourceNodes = sourceNodes || [];
   }
 
-  getFormattedSources() {
+  protected _getFormattedSources() {
     throw new Error("Not implemented yet");
   }
 
diff --git a/packages/core/src/agent/index.ts b/packages/core/src/agent/index.ts
new file mode 100644
index 0000000000000000000000000000000000000000..596146258fbfcd2782d3de29de100862ed882508
--- /dev/null
+++ b/packages/core/src/agent/index.ts
@@ -0,0 +1,2 @@
+export * from "./openai/base";
+export * from "./openai/worker";
diff --git a/packages/core/src/agent/openai/base.ts b/packages/core/src/agent/openai/base.ts
new file mode 100644
index 0000000000000000000000000000000000000000..6581f5de024efe44c209b50f2225a232d2545fe3
--- /dev/null
+++ b/packages/core/src/agent/openai/base.ts
@@ -0,0 +1,55 @@
+import { CallbackManager } from "../../callbacks/CallbackManager";
+import { ChatMessage, OpenAI } from "../../llm";
+import { ObjectRetriever } from "../../objects/base";
+import { BaseTool } from "../../types";
+import { AgentRunner } from "../runner/base";
+import { OpenAIAgentWorker } from "./worker";
+
+type OpenAIAgentParams = {
+  tools: BaseTool[];
+  llm?: OpenAI;
+  memory?: any;
+  prefixMessages?: ChatMessage[];
+  verbose?: boolean;
+  maxFunctionCalls?: number;
+  defaultToolChoice?: string;
+  callbackManager?: CallbackManager;
+  toolRetriever?: ObjectRetriever<BaseTool>;
+};
+
+/**
+ * An agent that uses OpenAI's API to generate text.
+ *
+ * @category OpenAI
+ */
+export class OpenAIAgent extends AgentRunner {
+  constructor({
+    tools,
+    llm,
+    memory,
+    prefixMessages,
+    verbose,
+    maxFunctionCalls = 5,
+    defaultToolChoice = "auto",
+    callbackManager,
+    toolRetriever,
+  }: OpenAIAgentParams) {
+    const stepEngine = new OpenAIAgentWorker({
+      tools,
+      callbackManager,
+      llm,
+      prefixMessages,
+      maxFunctionCalls,
+      toolRetriever,
+      verbose,
+    });
+
+    super({
+      agentWorker: stepEngine,
+      memory,
+      callbackManager,
+      defaultToolChoice,
+      chatHistory: prefixMessages,
+    });
+  }
+}
diff --git a/packages/core/src/agent/openai/types/chat.ts b/packages/core/src/agent/openai/types/chat.ts
new file mode 100644
index 0000000000000000000000000000000000000000..afe92219808e0503886000257197cacf0833e7df
--- /dev/null
+++ b/packages/core/src/agent/openai/types/chat.ts
@@ -0,0 +1,13 @@
+export type OpenAIToolCall = ChatCompletionMessageToolCall;
+
+export interface Function {
+  arguments: string;
+  name: string;
+  type: "function";
+}
+
+export interface ChatCompletionMessageToolCall {
+  id: string;
+  function: Function;
+  type: "function";
+}
diff --git a/packages/core/src/agent/openai/utils.ts b/packages/core/src/agent/openai/utils.ts
new file mode 100644
index 0000000000000000000000000000000000000000..fb2094b0e55e0a6db0d11c7acbaefcf2fcc4d269
--- /dev/null
+++ b/packages/core/src/agent/openai/utils.ts
@@ -0,0 +1,27 @@
+import { ToolMetadata } from "../../types";
+
+export type OpenAIFunction = {
+  type: "function";
+  function: ToolMetadata;
+};
+
+type OpenAiTool = {
+  name: string;
+  description: string;
+  parameters: ToolMetadata["parameters"];
+};
+
+export const toOpenAiTool = ({
+  name,
+  description,
+  parameters,
+}: OpenAiTool): OpenAIFunction => {
+  return {
+    type: "function",
+    function: {
+      name: name,
+      description: description,
+      parameters,
+    },
+  };
+};
diff --git a/packages/core/src/agent/openai/worker.ts b/packages/core/src/agent/openai/worker.ts
new file mode 100644
index 0000000000000000000000000000000000000000..1119ca1c9e6bda6d2686d182375d915ae8321699
--- /dev/null
+++ b/packages/core/src/agent/openai/worker.ts
@@ -0,0 +1,405 @@
+// Assuming that the necessary interfaces and classes (like BaseTool, OpenAI, ChatMessage, CallbackManager, etc.) are defined elsewhere
+
+import { CallbackManager } from "../../callbacks/CallbackManager";
+import { AgentChatResponse, ChatResponseMode } from "../../engines/chat";
+import { randomUUID } from "../../env";
+import {
+  ChatMessage,
+  ChatResponse,
+  ChatResponseChunk,
+  OpenAI,
+} from "../../llm";
+import { ChatMemoryBuffer } from "../../memory/ChatMemoryBuffer";
+import { ObjectRetriever } from "../../objects/base";
+import { ToolOutput } from "../../tools/types";
+import { callToolWithErrorHandling } from "../../tools/utils";
+import { BaseTool } from "../../types";
+import { AgentWorker, Task, TaskStep, TaskStepOutput } from "../types";
+import { addUserStepToMemory, getFunctionByName } from "../utils";
+import { OpenAIToolCall } from "./types/chat";
+import { toOpenAiTool } from "./utils";
+
+const DEFAULT_MAX_FUNCTION_CALLS = 5;
+
+/**
+ * Call function.
+ * @param tools: tools
+ * @param toolCall: tool call
+ * @param verbose: verbose
+ * @returns: void
+ */
+async function callFunction(
+  tools: BaseTool[],
+  toolCall: OpenAIToolCall,
+  verbose: boolean = false,
+): Promise<[ChatMessage, ToolOutput]> {
+  const id_ = toolCall.id;
+  const functionCall = toolCall.function;
+  const name = toolCall.function.name;
+  const argumentsStr = toolCall.function.arguments;
+
+  if (verbose) {
+    console.log("=== Calling Function ===");
+    console.log(`Calling function: ${name} with args: ${argumentsStr}`);
+  }
+
+  const tool = getFunctionByName(tools, name);
+  const argumentDict = JSON.parse(argumentsStr);
+
+  // Call tool
+  // Use default error message
+  const output = await callToolWithErrorHandling(tool, argumentDict, null);
+
+  if (verbose) {
+    console.log(`Got output ${output}`);
+    console.log("==========================");
+  }
+
+  return [
+    {
+      content: String(output),
+      role: "tool",
+      additionalKwargs: {
+        name,
+        tool_call_id: id_,
+      },
+    },
+    output,
+  ];
+}
+
+type OpenAIAgentWorkerParams = {
+  tools: BaseTool[];
+  llm?: OpenAI;
+  prefixMessages?: ChatMessage[];
+  verbose?: boolean;
+  maxFunctionCalls?: number;
+  callbackManager?: CallbackManager | undefined;
+  toolRetriever?: ObjectRetriever<BaseTool>;
+};
+
+type CallFunctionOutput = {
+  message: ChatMessage;
+  toolOutput: ToolOutput;
+};
+
+/**
+ * OpenAI agent worker.
+ * This class is responsible for running the agent.
+ */
+export class OpenAIAgentWorker implements AgentWorker {
+  private _llm: OpenAI;
+  private _verbose: boolean;
+  private _maxFunctionCalls: number;
+
+  public prefixMessages: ChatMessage[];
+  public callbackManager: CallbackManager | undefined;
+
+  private _getTools: (input: string) => BaseTool[];
+
+  /**
+   * Initialize.
+   */
+  constructor({
+    tools,
+    llm,
+    prefixMessages,
+    verbose,
+    maxFunctionCalls = DEFAULT_MAX_FUNCTION_CALLS,
+    callbackManager,
+    toolRetriever,
+  }: OpenAIAgentWorkerParams) {
+    this._llm = llm ?? new OpenAI({ model: "gpt-3.5-turbo-0613" });
+    this._verbose = verbose || false;
+    this._maxFunctionCalls = maxFunctionCalls;
+    this.prefixMessages = prefixMessages || [];
+    this.callbackManager = callbackManager || this._llm.callbackManager;
+
+    if (tools.length > 0 && toolRetriever) {
+      throw new Error("Cannot specify both tools and tool_retriever");
+    } else if (tools.length > 0) {
+      this._getTools = () => tools;
+    } else if (toolRetriever) {
+      // @ts-ignore
+      this._getTools = (message: string) => toolRetriever.retrieve(message);
+    } else {
+      this._getTools = () => [];
+    }
+  }
+
+  /**
+   * Get all messages.
+   * @param task: task
+   * @returns: messages
+   */
+  public getAllMessages(task: Task): ChatMessage[] {
+    return [
+      ...this.prefixMessages,
+      ...task.memory.get(),
+      ...task.extraState.newMemory.get(),
+    ];
+  }
+
+  /**
+   * Get latest tool calls.
+   * @param task: task
+   * @returns: tool calls
+   */
+  public getLatestToolCalls(task: Task): OpenAIToolCall[] | null {
+    const chatHistory: ChatMessage[] = task.extraState.newMemory.getAll();
+
+    if (chatHistory.length === 0) {
+      return null;
+    }
+
+    return chatHistory[chatHistory.length - 1].additionalKwargs?.toolCalls;
+  }
+
+  /**
+   *
+   * @param task
+   * @param openaiTools
+   * @param toolChoice
+   * @returns
+   */
+  private _getLlmChatKwargs(
+    task: Task,
+    openaiTools: { [key: string]: any }[],
+    toolChoice: string | { [key: string]: any } = "auto",
+  ): { [key: string]: any } {
+    const llmChatKwargs: { [key: string]: any } = {
+      messages: this.getAllMessages(task),
+    };
+
+    if (openaiTools.length > 0) {
+      llmChatKwargs.tools = openaiTools;
+      llmChatKwargs.toolChoice = toolChoice;
+    }
+
+    return llmChatKwargs;
+  }
+
+  /**
+   * Process message.
+   * @param task: task
+   * @param chatResponse: chat response
+   * @returns: agent chat response
+   */
+  private _processMessage(
+    task: Task,
+    chatResponse: ChatResponse,
+  ): AgentChatResponse | AsyncIterable<ChatResponseChunk> {
+    const aiMessage = chatResponse.message;
+    task.extraState.newMemory.put(aiMessage);
+    return new AgentChatResponse(aiMessage.content, task.extraState.sources);
+  }
+
+  /**
+   * Get agent response.
+   * @param task: task
+   * @param mode: mode
+   * @param llmChatKwargs: llm chat kwargs
+   * @returns: agent chat response
+   */
+  private async _getAgentResponse(
+    task: Task,
+    mode: ChatResponseMode,
+    llmChatKwargs: any,
+  ): Promise<AgentChatResponse> {
+    if (mode === ChatResponseMode.WAIT) {
+      const chatResponse = (await this._llm.chat({
+        stream: false,
+        ...llmChatKwargs,
+      })) as unknown as ChatResponse;
+
+      return this._processMessage(task, chatResponse) as AgentChatResponse;
+    } else {
+      throw new Error("Not implemented");
+    }
+  }
+
+  /**
+   * Call function.
+   * @param tools: tools
+   * @param toolCall: tool call
+   * @param memory: memory
+   * @param sources: sources
+   * @returns: void
+   */
+  async callFunction(
+    tools: BaseTool[],
+    toolCall: OpenAIToolCall,
+  ): Promise<CallFunctionOutput> {
+    const functionCall = toolCall.function;
+
+    if (!functionCall) {
+      throw new Error("Invalid tool_call object");
+    }
+
+    const functionMessage = await callFunction(tools, toolCall, this._verbose);
+
+    const message = functionMessage[0];
+    const toolOutput = functionMessage[1];
+
+    return {
+      message,
+      toolOutput,
+    };
+  }
+
+  /**
+   * Initialize step.
+   * @param task: task
+   * @param kwargs: kwargs
+   * @returns: task step
+   */
+  initializeStep(task: Task, kwargs?: any): TaskStep {
+    const sources: ToolOutput[] = [];
+
+    const newMemory = new ChatMemoryBuffer();
+
+    const taskState = {
+      sources,
+      nFunctionCalls: 0,
+      newMemory,
+    };
+
+    task.extraState = {
+      ...task.extraState,
+      ...taskState,
+    };
+
+    return new TaskStep(task.taskId, randomUUID(), task.input);
+  }
+
+  /**
+   * Should continue.
+   * @param toolCalls: tool calls
+   * @param nFunctionCalls: number of function calls
+   * @returns: boolean
+   */
+  private _shouldContinue(
+    toolCalls: OpenAIToolCall[] | null,
+    nFunctionCalls: number,
+  ): boolean {
+    if (nFunctionCalls > this._maxFunctionCalls) {
+      return false;
+    }
+
+    if (toolCalls?.length === 0) {
+      return false;
+    }
+
+    return true;
+  }
+
+  /**
+   * Get tools.
+   * @param input: input
+   * @returns: tools
+   */
+  getTools(input: string): BaseTool[] {
+    return this._getTools(input);
+  }
+
+  private async _runStep(
+    step: TaskStep,
+    task: Task,
+    mode: ChatResponseMode = ChatResponseMode.WAIT,
+    toolChoice: string | { [key: string]: any } = "auto",
+  ): Promise<TaskStepOutput> {
+    const tools = this.getTools(task.input);
+
+    if (step.input) {
+      addUserStepToMemory(step, task.extraState.newMemory, this._verbose);
+    }
+
+    const openaiTools = tools.map((tool) =>
+      toOpenAiTool({
+        name: tool.metadata.name,
+        description: tool.metadata.description,
+        parameters: tool.metadata.parameters,
+      }),
+    );
+
+    const llmChatKwargs = this._getLlmChatKwargs(task, openaiTools, toolChoice);
+
+    const agentChatResponse = await this._getAgentResponse(
+      task,
+      mode,
+      llmChatKwargs,
+    );
+
+    const latestToolCalls = this.getLatestToolCalls(task) || [];
+
+    let isDone: boolean;
+    let newSteps: TaskStep[] = [];
+
+    if (
+      !this._shouldContinue(latestToolCalls, task.extraState.nFunctionCalls)
+    ) {
+      isDone = true;
+      newSteps = [];
+    } else {
+      isDone = false;
+      for (const toolCall of latestToolCalls) {
+        const { message, toolOutput } = await this.callFunction(
+          tools,
+          toolCall,
+        );
+
+        task.extraState.sources.push(toolOutput);
+        task.extraState.newMemory.put(message);
+
+        task.extraState.nFunctionCalls += 1;
+      }
+
+      newSteps = [step.getNextStep(randomUUID(), undefined)];
+    }
+
+    return new TaskStepOutput(agentChatResponse, step, newSteps, isDone);
+  }
+
+  /**
+   * Run step.
+   * @param step: step
+   * @param task: task
+   * @param kwargs: kwargs
+   * @returns: task step output
+   */
+  async runStep(
+    step: TaskStep,
+    task: Task,
+    kwargs?: any,
+  ): Promise<TaskStepOutput> {
+    const toolChoice = kwargs?.toolChoice || "auto";
+    return this._runStep(step, task, ChatResponseMode.WAIT, toolChoice);
+  }
+
+  /**
+   * Stream step.
+   * @param step: step
+   * @param task: task
+   * @param kwargs: kwargs
+   * @returns: task step output
+   */
+  async streamStep(
+    step: TaskStep,
+    task: Task,
+    kwargs?: any,
+  ): Promise<TaskStepOutput> {
+    const toolChoice = kwargs?.toolChoice || "auto";
+    return this._runStep(step, task, ChatResponseMode.STREAM, toolChoice);
+  }
+
+  /**
+   * Finalize task.
+   * @param task: task
+   * @param kwargs: kwargs
+   * @returns: void
+   */
+  finalizeTask(task: Task, kwargs?: any): void {
+    task.memory.set(task.memory.get().concat(task.extraState.newMemory.get()));
+    task.extraState.newMemory.reset();
+  }
+}
diff --git a/packages/core/src/agent/runner/base.ts b/packages/core/src/agent/runner/base.ts
new file mode 100644
index 0000000000000000000000000000000000000000..39e4d6379d7372935f57934e7bc0d05277c616ca
--- /dev/null
+++ b/packages/core/src/agent/runner/base.ts
@@ -0,0 +1,343 @@
+import { randomUUID } from "crypto";
+import { CallbackManager } from "../../callbacks/CallbackManager";
+import {
+  AgentChatResponse,
+  ChatEngineAgentParams,
+  ChatResponseMode,
+} from "../../engines/chat";
+import { ChatMessage, LLM } from "../../llm";
+import { ChatMemoryBuffer } from "../../memory/ChatMemoryBuffer";
+import { BaseMemory } from "../../memory/types";
+import { AgentWorker, Task, TaskStep, TaskStepOutput } from "../types";
+import { AgentState, BaseAgentRunner, TaskState } from "./types";
+
+const validateStepFromArgs = (
+  taskId: string,
+  input: string,
+  step?: any,
+  kwargs?: any,
+): TaskStep | undefined => {
+  if (step) {
+    if (input) {
+      throw new Error("Cannot specify both `step` and `input`");
+    }
+    return step;
+  } else {
+    return new TaskStep(taskId, step, input, kwargs);
+  }
+};
+
+type AgentRunnerParams = {
+  agentWorker: AgentWorker;
+  chatHistory?: ChatMessage[];
+  state?: AgentState;
+  memory?: BaseMemory;
+  llm?: LLM;
+  callbackManager?: CallbackManager;
+  initTaskStateKwargs?: Record<string, any>;
+  deleteTaskOnFinish?: boolean;
+  defaultToolChoice?: string;
+};
+
+export class AgentRunner extends BaseAgentRunner {
+  agentWorker: AgentWorker;
+  state: AgentState;
+  memory: BaseMemory;
+  callbackManager: CallbackManager;
+  initTaskStateKwargs: Record<string, any>;
+  deleteTaskOnFinish: boolean;
+  defaultToolChoice: string;
+
+  /**
+   * Creates an AgentRunner.
+   */
+  constructor(params: AgentRunnerParams) {
+    super();
+
+    this.agentWorker = params.agentWorker;
+    this.state = params.state ?? new AgentState();
+    this.memory =
+      params.memory ??
+      new ChatMemoryBuffer({
+        chatHistory: params.chatHistory,
+      });
+    this.callbackManager = params.callbackManager ?? new CallbackManager();
+    this.initTaskStateKwargs = params.initTaskStateKwargs ?? {};
+    this.deleteTaskOnFinish = params.deleteTaskOnFinish ?? false;
+    this.defaultToolChoice = params.defaultToolChoice ?? "auto";
+  }
+
+  /**
+   * Creates a task.
+   * @param input
+   * @param kwargs
+   */
+  createTask(input: string, kwargs?: any): Task {
+    let extraState;
+
+    if (!this.initTaskStateKwargs) {
+      if (kwargs && "extraState" in kwargs) {
+        if (extraState) {
+          delete extraState["extraState"];
+        }
+      }
+    } else {
+      if (kwargs && "extraState" in kwargs) {
+        throw new Error(
+          "Cannot specify both `extraState` and `initTaskStateKwargs`",
+        );
+      } else {
+        extraState = this.initTaskStateKwargs;
+      }
+    }
+
+    const task = new Task({
+      taskId: randomUUID(),
+      input,
+      memory: this.memory,
+      extraState,
+      ...kwargs,
+    });
+
+    const initialStep = this.agentWorker.initializeStep(task);
+
+    const taskState = new TaskState({
+      task,
+      stepQueue: [initialStep],
+    });
+
+    this.state.taskDict[task.taskId] = taskState;
+
+    return task;
+  }
+
+  /**
+   * Deletes the task.
+   * @param taskId
+   */
+  deleteTask(taskId: string): void {
+    delete this.state.taskDict[taskId];
+  }
+
+  /**
+   * Returns the list of tasks.
+   */
+  listTasks(): Task[] {
+    return Object.values(this.state.taskDict).map(
+      (taskState) => taskState.task,
+    );
+  }
+
+  /**
+   * Returns the task.
+   */
+  getTask(taskId: string): Task {
+    return this.state.taskDict[taskId].task;
+  }
+
+  /**
+   * Returns the completed steps in the task.
+   * @param taskId
+   * @param kwargs
+   */
+  getCompletedSteps(taskId: string): TaskStepOutput[] {
+    return this.state.taskDict[taskId].completedSteps;
+  }
+
+  /**
+   * Returns the next steps in the task.
+   * @param taskId
+   * @param kwargs
+   */
+  getUpcomingSteps(taskId: string, kwargs: any): TaskStep[] {
+    return this.state.taskDict[taskId].stepQueue;
+  }
+
+  private async _runStep(
+    taskId: string,
+    step?: TaskStep,
+    mode: ChatResponseMode = ChatResponseMode.WAIT,
+    kwargs?: any,
+  ): Promise<TaskStepOutput> {
+    const task = this.state.getTask(taskId);
+    const curStep = step || this.state.getStepQueue(taskId).shift();
+
+    let curStepOutput;
+
+    if (!curStep) {
+      throw new Error(`No step found for task ${taskId}`);
+    }
+
+    if (mode === ChatResponseMode.WAIT) {
+      curStepOutput = await this.agentWorker.runStep(curStep, task, kwargs);
+    } else if (mode === ChatResponseMode.STREAM) {
+      curStepOutput = await this.agentWorker.streamStep(curStep, task, kwargs);
+    } else {
+      throw new Error(`Invalid mode: ${mode}`);
+    }
+
+    const nextSteps = curStepOutput.nextSteps;
+
+    this.state.addSteps(taskId, nextSteps);
+    this.state.addCompletedStep(taskId, [curStepOutput]);
+
+    return curStepOutput;
+  }
+
+  /**
+   * Runs the next step in the task.
+   * @param taskId
+   * @param kwargs
+   * @param step
+   * @returns
+   */
+  async runStep(
+    taskId: string,
+    input: string,
+    step?: TaskStep,
+    kwargs: any = {},
+  ): Promise<TaskStepOutput> {
+    const curStep = validateStepFromArgs(taskId, input, step, kwargs);
+    return this._runStep(taskId, curStep, ChatResponseMode.WAIT, kwargs);
+  }
+
+  /**
+   * Runs the step and returns the response.
+   * @param taskId
+   * @param input
+   * @param step
+   * @param kwargs
+   */
+  async streamStep(
+    taskId: string,
+    input: string,
+    step?: TaskStep,
+    kwargs?: any,
+  ): Promise<TaskStepOutput> {
+    const curStep = validateStepFromArgs(taskId, input, step, kwargs);
+    return this._runStep(taskId, curStep, ChatResponseMode.STREAM, kwargs);
+  }
+
+  /**
+   * Finalizes the response and returns it.
+   * @param taskId
+   * @param kwargs
+   * @param stepOutput
+   * @returns
+   */
+  async finalizeResponse(
+    taskId: string,
+    stepOutput: TaskStepOutput,
+    kwargs?: any,
+  ): Promise<AgentChatResponse> {
+    if (!stepOutput) {
+      stepOutput =
+        this.getCompletedSteps(taskId)[
+          this.getCompletedSteps(taskId).length - 1
+        ];
+    }
+    if (!stepOutput.isLast) {
+      throw new Error(
+        "finalizeResponse can only be called on the last step output",
+      );
+    }
+
+    if (!(stepOutput.output instanceof AgentChatResponse)) {
+      throw new Error(
+        `When \`isLast\` is True, cur_step_output.output must be AGENT_CHAT_RESPONSE_TYPE: ${stepOutput.output}`,
+      );
+    }
+
+    this.agentWorker.finalizeTask(this.getTask(taskId), kwargs);
+
+    if (this.deleteTaskOnFinish) {
+      this.deleteTask(taskId);
+    }
+
+    return stepOutput.output;
+  }
+
+  protected async _chat({
+    message,
+    toolChoice,
+  }: ChatEngineAgentParams & { mode: ChatResponseMode }) {
+    const task = this.createTask(message as string);
+
+    let resultOutput;
+
+    while (true) {
+      const curStepOutput = await this._runStep(task.taskId);
+
+      if (curStepOutput.isLast) {
+        resultOutput = curStepOutput;
+        break;
+      }
+
+      toolChoice = "auto";
+    }
+
+    return this.finalizeResponse(task.taskId, resultOutput);
+  }
+
+  /**
+   * Sends a message to the LLM and returns the response.
+   * @param message
+   * @param chatHistory
+   * @param toolChoice
+   * @returns
+   */
+  public async chat({
+    message,
+    chatHistory,
+    toolChoice,
+  }: ChatEngineAgentParams): Promise<AgentChatResponse> {
+    if (!toolChoice) {
+      toolChoice = this.defaultToolChoice;
+    }
+
+    const chatResponse = await this._chat({
+      message,
+      chatHistory,
+      toolChoice,
+      mode: ChatResponseMode.WAIT,
+    });
+
+    return chatResponse;
+  }
+
+  protected _getPromptModules(): string[] {
+    return [];
+  }
+
+  protected _getPrompts(): string[] {
+    return [];
+  }
+
+  /**
+   * Resets the agent.
+   */
+  reset(): void {
+    this.state = new AgentState();
+  }
+
+  getCompletedStep(
+    taskId: string,
+    stepId: string,
+    kwargs: any,
+  ): TaskStepOutput {
+    const completedSteps = this.getCompletedSteps(taskId);
+    for (const stepOutput of completedSteps) {
+      if (stepOutput.taskStep.stepId === stepId) {
+        return stepOutput;
+      }
+    }
+
+    throw new Error(`Step ${stepId} not found in task ${taskId}`);
+  }
+
+  /**
+   * Undoes the step.
+   * @param taskId
+   */
+  undoStep(taskId: string): void {}
+}
diff --git a/packages/core/src/agent/runner/types.ts b/packages/core/src/agent/runner/types.ts
new file mode 100644
index 0000000000000000000000000000000000000000..b4e7a2236a05e2bd486b2c2404a9aeb285138831
--- /dev/null
+++ b/packages/core/src/agent/runner/types.ts
@@ -0,0 +1,102 @@
+import { AgentChatResponse } from "../../engines/chat";
+import { BaseAgent, Task, TaskStep, TaskStepOutput } from "../types";
+
+export class TaskState {
+  task!: Task;
+  stepQueue!: TaskStep[];
+  completedSteps!: TaskStepOutput[];
+
+  constructor(init?: Partial<TaskState>) {
+    Object.assign(this, init);
+  }
+}
+
+export abstract class BaseAgentRunner extends BaseAgent {
+  constructor(init?: Partial<BaseAgentRunner>) {
+    super();
+  }
+
+  abstract createTask(input: string, kwargs: any): Task;
+  abstract deleteTask(taskId: string): void;
+  abstract getTask(taskId: string, kwargs: any): Task;
+  abstract listTasks(kwargs: any): Task[];
+  abstract getUpcomingSteps(taskId: string, kwargs: any): TaskStep[];
+  abstract getCompletedSteps(taskId: string, kwargs: any): TaskStepOutput[];
+
+  getCompletedStep(
+    taskId: string,
+    stepId: string,
+    kwargs: any,
+  ): TaskStepOutput {
+    const completedSteps = this.getCompletedSteps(taskId, kwargs);
+    for (const stepOutput of completedSteps) {
+      if (stepOutput.taskStep.stepId === stepId) {
+        return stepOutput;
+      }
+    }
+
+    throw new Error(`Step ${stepId} not found in task ${taskId}`);
+  }
+
+  abstract runStep(
+    taskId: string,
+    input: string,
+    step: TaskStep,
+    kwargs: any,
+  ): Promise<TaskStepOutput>;
+
+  abstract streamStep(
+    taskId: string,
+    input: string,
+    step: TaskStep,
+    kwargs?: any,
+  ): Promise<TaskStepOutput>;
+
+  abstract finalizeResponse(
+    taskId: string,
+    stepOutput: TaskStepOutput,
+    kwargs?: any,
+  ): Promise<AgentChatResponse>;
+
+  abstract undoStep(taskId: string): void;
+}
+
+export class AgentState {
+  taskDict!: Record<string, TaskState>;
+
+  constructor(init?: Partial<AgentState>) {
+    Object.assign(this, init);
+
+    if (!this.taskDict) {
+      this.taskDict = {};
+    }
+  }
+
+  getTask(taskId: string): Task {
+    return this.taskDict[taskId].task;
+  }
+
+  getCompletedSteps(taskId: string): TaskStepOutput[] {
+    return this.taskDict[taskId].completedSteps || [];
+  }
+
+  getStepQueue(taskId: string): TaskStep[] {
+    return this.taskDict[taskId].stepQueue || [];
+  }
+
+  addSteps(taskId: string, steps: TaskStep[]): void {
+    if (!this.taskDict[taskId].stepQueue) {
+      this.taskDict[taskId].stepQueue = [];
+    }
+
+    this.taskDict[taskId].stepQueue.push(...steps);
+  }
+
+  addCompletedStep(taskId: string, stepOutputs: TaskStepOutput[]): void {
+    if (!this.taskDict[taskId].completedSteps) {
+      this.taskDict[taskId].completedSteps = [];
+    }
+
+    this.taskDict[taskId].completedSteps.push(...stepOutputs);
+  }
+}
diff --git a/packages/core/src/agent/types.ts b/packages/core/src/agent/types.ts
new file mode 100644
index 0000000000000000000000000000000000000000..e862825f583acaf96e3b46036d1395ab48796b51
--- /dev/null
+++ b/packages/core/src/agent/types.ts
@@ -0,0 +1,181 @@
+import { AgentChatResponse, ChatEngineAgentParams } from "../engines/chat";
+import { QueryEngineParamsNonStreaming } from "../types";
+
+export interface AgentWorker {
+  initializeStep(task: Task, kwargs?: any): TaskStep;
+  runStep(step: TaskStep, task: Task, kwargs?: any): Promise<TaskStepOutput>;
+  streamStep(step: TaskStep, task: Task, kwargs?: any): Promise<TaskStepOutput>;
+  finalizeTask(task: Task, kwargs?: any): void;
+}
+
+interface BaseChatEngine {
+  chat(params: ChatEngineAgentParams): Promise<AgentChatResponse>;
+}
+
+interface BaseQueryEngine {
+  query(params: QueryEngineParamsNonStreaming): Promise<AgentChatResponse>;
+}
+
+/**
+ * BaseAgent is the base class for all agents.
+ */
+export abstract class BaseAgent implements BaseChatEngine, BaseQueryEngine {
+  protected _getPrompts(): string[] {
+    return [];
+  }
+
+  protected _getPromptModules(): string[] {
+    return [];
+  }
+
+  abstract chat(params: ChatEngineAgentParams): Promise<AgentChatResponse>;
+  abstract reset(): void;
+
+  /**
+   * query is the main entrypoint for the agent. It takes a query and returns a response.
+   * @param params
+   * @returns
+   */
+  async query(
+    params: QueryEngineParamsNonStreaming,
+  ): Promise<AgentChatResponse> {
+    // Handle non-streaming query
+    const agentResponse = await this.chat({
+      message: params.query,
+      chatHistory: [],
+    });
+
+    return agentResponse;
+  }
+}
+
+type TaskParams = {
+  taskId: string;
+  input: string;
+  memory: any;
+  extraState: Record<string, any>;
+};
+
+/**
+ * Task is a unit of work for the agent.
+ * @param taskId: taskId
+ */
+export class Task {
+  taskId: string;
+  input: string;
+
+  memory: any;
+  extraState: Record<string, any>;
+
+  constructor({ taskId, input, memory, extraState }: TaskParams) {
+    this.taskId = taskId;
+    this.input = input;
+    this.memory = memory;
+    this.extraState = extraState ?? {};
+  }
+}
+
+interface ITaskStep {
+  taskId: string;
+  stepId: string;
+  input?: string | null;
+  stepState: Record<string, any>;
+  nextSteps: Record<string, TaskStep>;
+  prevSteps: Record<string, TaskStep>;
+  isReady: boolean;
+  getNextStep(
+    stepId: string,
+    input?: string,
+    stepState?: Record<string, any>,
+  ): TaskStep;
+  linkStep(nextStep: TaskStep): void;
+}
+
+/**
+ * TaskStep is a unit of work for the agent.
+ * @param taskId: taskId
+ * @param stepId: stepId
+ * @param input: input
+ * @param stepState: stepState
+ */
+export class TaskStep implements ITaskStep {
+  taskId: string;
+  stepId: string;
+  input?: string | null;
+  stepState: Record<string, any> = {};
+  nextSteps: Record<string, TaskStep> = {};
+  prevSteps: Record<string, TaskStep> = {};
+  isReady: boolean = true;
+
+  constructor(
+    taskId: string,
+    stepId: string,
+    input?: string | null,
+    stepState?: Record<string, any> | null,
+  ) {
+    this.taskId = taskId;
+    this.stepId = stepId;
+    this.input = input;
+    this.stepState = stepState ?? this.stepState;
+  }
+
+  /*
+   * getNextStep is a function that returns the next step.
+   * @param stepId: stepId
+   * @param input: input
+   * @param stepState: stepState
+   * @returns: TaskStep
+   */
+  getNextStep(
+    stepId: string,
+    input?: string,
+    stepState?: Record<string, unknown>,
+  ): TaskStep {
+    return new TaskStep(
+      this.taskId,
+      stepId,
+      input,
+      stepState ?? this.stepState,
+    );
+  }
+
+  /*
+   * linkStep is a function that links the next step.
+   * @param nextStep: nextStep
+   * @returns: void
+   */
+  linkStep(nextStep: TaskStep): void {
+    this.nextSteps[nextStep.stepId] = nextStep;
+    nextStep.prevSteps[this.stepId] = this;
+  }
+}
+
+/**
+ * TaskStepOutput is a unit of work for the agent.
+ * @param output: output
+ * @param taskStep: taskStep
+ * @param nextSteps: nextSteps
+ * @param isLast: isLast
+ */
+export class TaskStepOutput {
+  output: unknown;
+  taskStep: TaskStep;
+  nextSteps: TaskStep[];
+  isLast: boolean;
+
+  constructor(
+    output: unknown,
+    taskStep: TaskStep,
+    nextSteps: TaskStep[],
+    isLast: boolean = false,
+  ) {
+    this.output = output;
+    this.taskStep = taskStep;
+    this.nextSteps = nextSteps;
+    this.isLast = isLast;
+  }
+
+  toString(): string {
+    return String(this.output);
+  }
+}
diff --git a/packages/core/src/agent/utils.ts b/packages/core/src/agent/utils.ts
new file mode 100644
index 0000000000000000000000000000000000000000..4d6aa8098d884315030c15558f6adb2f138d34cf
--- /dev/null
+++ b/packages/core/src/agent/utils.ts
@@ -0,0 +1,51 @@
+import { ChatMessage } from "../llm";
+import { ChatMemoryBuffer } from "../memory/ChatMemoryBuffer";
+import { BaseTool } from "../types";
+import { TaskStep } from "./types";
+
+/**
+ * Adds the user's input to the memory.
+ *
+ * @param step - The step to add to the memory.
+ * @param memory - The memory to add the step to.
+ * @param verbose - Whether to print debug messages.
+ */
+export function addUserStepToMemory(
+  step: TaskStep,
+  memory: ChatMemoryBuffer,
+  verbose: boolean = false,
+): void {
+  if (!step.input) {
+    return;
+  }
+
+  const userMessage: ChatMessage = {
+    content: step.input,
+    role: "user",
+  };
+
+  memory.put(userMessage);
+
+  if (verbose) {
+    console.log(`Added user message to memory!: ${userMessage.content}`);
+  }
+}
+
+/**
+ * Get function by name.
+ * @param tools: tools
+ * @param name: name
+ * @returns: tool
+ */
+export function getFunctionByName(tools: BaseTool[], name: string): BaseTool {
+  const nameToTool: { [key: string]: BaseTool } = {};
+  tools.forEach((tool) => {
+    nameToTool[tool.metadata.name] = tool;
+  });
+
+  if (!(name in nameToTool)) {
+    throw new Error(`Tool with name ${name} not found`);
+  }
+
+  return nameToTool[name];
+}
diff --git a/packages/core/src/engines/chat/types.ts b/packages/core/src/engines/chat/types.ts
index 1d8c18378450d764a6df3168b1ffa64ae2871edc..f11329110e411327526ad51f209f4b5217689f1a 100644
--- a/packages/core/src/engines/chat/types.ts
+++ b/packages/core/src/engines/chat/types.ts
@@ -1,9 +1,10 @@
 import { ChatHistory } from "../../ChatHistory";
-import { NodeWithScore } from "../../Node";
+import { BaseNode, NodeWithScore } from "../../Node";
 import { Response } from "../../Response";
 import { Event } from "../../callbacks/CallbackManager";
 import { ChatMessage } from "../../llm";
 import { MessageContent } from "../../llm/types";
+import { ToolOutput } from "../../tools/types";
 
 /**
  * Represents the base parameters for ChatEngine.
@@ -24,6 +25,10 @@ export interface ChatEngineParamsNonStreaming extends ChatEngineParamsBase {
   stream?: false | null;
 }
 
+export interface ChatEngineAgentParams extends ChatEngineParamsBase {
+  toolChoice?: string | Record<string, any>;
+}
+
 /**
  * A ChatEngine is used to handle back and forth chats between the application and the LLM.
  */
@@ -52,3 +57,32 @@ export interface Context {
 export interface ContextGenerator {
   generate(message: string, parentEvent?: Event): Promise<Context>;
 }
+
+export enum ChatResponseMode {
+  WAIT = "wait",
+  STREAM = "stream",
+}
+
+export class AgentChatResponse {
+  response: string;
+  sources: ToolOutput[];
+  sourceNodes?: BaseNode[];
+
+  constructor(
+    response: string,
+    sources?: ToolOutput[],
+    sourceNodes?: BaseNode[],
+  ) {
+    this.response = response;
+    this.sources = sources || [];
+    this.sourceNodes = sourceNodes || [];
+  }
+
+  protected _getFormattedSources() {
+    throw new Error("Not implemented yet");
+  }
+
+  toString() {
+    return this.response ?? "";
+  }
+}
diff --git a/packages/core/src/index.ts b/packages/core/src/index.ts
index 0d02cbc6e8de89d94e3335406b8d846509b9fb17..2c965e8c2bf6ba9a99acd8273450fbcd3f752095 100644
--- a/packages/core/src/index.ts
+++ b/packages/core/src/index.ts
@@ -9,6 +9,7 @@ export * from "./Response";
 export * from "./Retriever";
 export * from "./ServiceContext";
 export * from "./TextSplitter";
+export * from "./agent";
 export * from "./callbacks/CallbackManager";
 export * from "./constants";
 export * from "./embeddings";
@@ -33,4 +34,4 @@ export * from "./readers/base";
 export * from "./selectors";
 export * from "./storage";
 export * from "./synthesizers";
-export type * from "./types";
+export * from "./tools";
diff --git a/packages/core/src/llm/LLM.ts b/packages/core/src/llm/LLM.ts
index e543164ef933e92b8c5ede08cf11f07a4e578c44..29e975003e08971430cf0280e6994ee12265f9fe 100644
--- a/packages/core/src/llm/LLM.ts
+++ b/packages/core/src/llm/LLM.ts
@@ -77,7 +77,14 @@ export class OpenAI extends BaseLLM {
   maxTokens?: number;
   additionalChatOptions?: Omit<
     Partial<OpenAILLM.Chat.ChatCompletionCreateParams>,
-    "max_tokens" | "messages" | "model" | "temperature" | "top_p" | "stream"
+    | "max_tokens"
+    | "messages"
+    | "model"
+    | "temperature"
+    | "top_p"
+    | "stream"
+    | "tools"
+    | "toolChoice"
   >;
 
   // OpenAI session params
@@ -179,7 +186,7 @@ export class OpenAI extends BaseLLM {
 
   mapMessageType(
     messageType: MessageType,
-  ): "user" | "assistant" | "system" | "function" {
+  ): "user" | "assistant" | "system" | "function" | "tool" {
     switch (messageType) {
       case "user":
         return "user";
@@ -189,11 +196,30 @@ export class OpenAI extends BaseLLM {
         return "system";
       case "function":
         return "function";
+      case "tool":
+        return "tool";
       default:
         return "user";
     }
   }
 
+  toOpenAIMessage(messages: ChatMessage[]) {
+    return messages.map((message) => {
+      const additionalKwargs = message.additionalKwargs ?? {};
+
+      if (message.additionalKwargs?.toolCalls) {
+        additionalKwargs.tool_calls = message.additionalKwargs.toolCalls;
+        delete additionalKwargs.toolCalls;
+      }
+
+      return {
+        role: this.mapMessageType(message.role),
+        content: message.content,
+        ...additionalKwargs,
+      };
+    });
+  }
+
   chat(
     params: LLMChatParamsStreaming,
   ): Promise<AsyncIterable<ChatResponseChunk>>;
@@ -201,18 +227,15 @@ export class OpenAI extends BaseLLM {
   async chat(
     params: LLMChatParamsNonStreaming | LLMChatParamsStreaming,
   ): Promise<ChatResponse | AsyncIterable<ChatResponseChunk>> {
-    const { messages, parentEvent, stream } = params;
-    const baseRequestParams: OpenAILLM.Chat.ChatCompletionCreateParams = {
+    const { messages, parentEvent, stream, tools, toolChoice } = params;
+
+    let baseRequestParams: OpenAILLM.Chat.ChatCompletionCreateParams = {
       model: this.model,
       temperature: this.temperature,
       max_tokens: this.maxTokens,
-      messages: messages.map(
-        (message) =>
-          ({
-            role: this.mapMessageType(message.role),
-            content: message.content,
-          }) as ChatCompletionMessageParam,
-      ),
+      tools: tools,
+      tool_choice: toolChoice,
+      messages: this.toOpenAIMessage(messages) as ChatCompletionMessageParam[],
       top_p: this.topP,
       ...this.additionalChatOptions,
     };
@@ -221,6 +244,7 @@ export class OpenAI extends BaseLLM {
     if (stream) {
       return this.streamChat(params);
     }
+
     // Non-streaming
     const response = await this.session.openai.chat.completions.create({
       ...baseRequestParams,
@@ -228,8 +252,19 @@ export class OpenAI extends BaseLLM {
     });
 
     const content = response.choices[0].message?.content ?? "";
+
+    const kwargsOutput: Record<string, any> = {};
+
+    if (response.choices[0].message?.tool_calls) {
+      kwargsOutput.toolCalls = response.choices[0].message.tool_calls;
+    }
+
     return {
-      message: { content, role: response.choices[0].message.role },
+      message: {
+        content,
+        role: response.choices[0].message.role,
+        additionalKwargs: kwargsOutput,
+      },
     };
   }
 
diff --git a/packages/core/src/llm/types.ts b/packages/core/src/llm/types.ts
index 97ead4a7541ed5f6aacb814f9c155fbfd6886863..305769f83d22f893fd6ea3aec411ccc0da927c96 100644
--- a/packages/core/src/llm/types.ts
+++ b/packages/core/src/llm/types.ts
@@ -39,17 +39,20 @@ export type MessageType =
   | "system"
   | "generic"
   | "function"
-  | "memory";
+  | "memory"
+  | "tool";
 
 export interface ChatMessage {
   // TODO: use MessageContent
   content: any;
   role: MessageType;
+  additionalKwargs?: Record<string, any>;
 }
 
 export interface ChatResponse {
   message: ChatMessage;
   raw?: Record<string, any>;
+  additionalKwargs?: Record<string, any>;
 }
 
 export interface ChatResponseChunk {
@@ -74,6 +77,9 @@ export interface LLMChatParamsBase {
   messages: ChatMessage[];
   parentEvent?: Event;
   extraParams?: Record<string, any>;
+  tools?: any;
+  toolChoice?: any;
+  additionalKwargs?: Record<string, any>;
 }
 
 export interface LLMChatParamsStreaming extends LLMChatParamsBase {
diff --git a/packages/core/src/memory/ChatMemoryBuffer.ts b/packages/core/src/memory/ChatMemoryBuffer.ts
new file mode 100644
index 0000000000000000000000000000000000000000..72036336b5394e98fec322adf72270b247a788da
--- /dev/null
+++ b/packages/core/src/memory/ChatMemoryBuffer.ts
@@ -0,0 +1,119 @@
+import { ChatMessage } from "../llm";
+import { SimpleChatStore } from "../storage/chatStore/SimpleChatStore";
+import { BaseChatStore } from "../storage/chatStore/types";
+import { BaseMemory } from "./types";
+
+type ChatMemoryBufferParams = {
+  tokenLimit?: number;
+  chatStore?: BaseChatStore;
+  chatStoreKey?: string;
+  chatHistory?: ChatMessage[];
+};
+
+/**
+ * Chat memory buffer.
+ */
+export class ChatMemoryBuffer implements BaseMemory {
+  tokenLimit: number;
+
+  chatStore: BaseChatStore;
+  chatStoreKey: string;
+
+  /**
+   * Initialize.
+   */
+  constructor(init?: Partial<ChatMemoryBufferParams>) {
+    this.tokenLimit = init?.tokenLimit ?? 3000;
+    this.chatStore = init?.chatStore ?? new SimpleChatStore();
+    this.chatStoreKey = init?.chatStoreKey ?? "chat_history";
+
+    if (init?.chatHistory) {
+      this.chatStore.setMessages(this.chatStoreKey, init.chatHistory);
+    }
+  }
+
+  /**
+    Get chat history.
+    @param initialTokenCount: number of tokens to start with
+  */
+  get(initialTokenCount: number = 0): ChatMessage[] {
+    const chatHistory = this.getAll();
+
+    if (initialTokenCount > this.tokenLimit) {
+      throw new Error("Initial token count exceeds token limit");
+    }
+
+    let messageCount = chatHistory.length;
+    let tokenCount =
+      this._tokenCountForMessageCount(messageCount) + initialTokenCount;
+
+    while (tokenCount > this.tokenLimit && messageCount > 1) {
+      messageCount -= 1;
+      if (chatHistory[-messageCount].role === "assistant") {
+        // we cannot have an assistant message at the start of the chat history
+        // if after removal of the first, we have an assistant message,
+        // we need to remove the assistant message too
+        messageCount -= 1;
+      }
+
+      tokenCount =
+        this._tokenCountForMessageCount(messageCount) + initialTokenCount;
+    }
+
+    // catch one message longer than token limit
+    if (tokenCount > this.tokenLimit || messageCount <= 0) {
+      return [];
+    }
+
+    return chatHistory.slice(-messageCount);
+  }
+
+  /**
+   * Get all chat history.
+   * @returns {ChatMessage[]} chat history
+   */
+  getAll(): ChatMessage[] {
+    return this.chatStore.getMessages(this.chatStoreKey);
+  }
+
+  /**
+   * Put chat history.
+   * @param message
+   */
+  put(message: ChatMessage): void {
+    this.chatStore.addMessage(this.chatStoreKey, message);
+  }
+
+  /**
+   * Set chat history.
+   * @param messages
+   */
+  set(messages: ChatMessage[]): void {
+    this.chatStore.setMessages(this.chatStoreKey, messages);
+  }
+
+  /**
+   * Reset chat history.
+   */
+  reset(): void {
+    this.chatStore.deleteMessages(this.chatStoreKey);
+  }
+
+  /**
+   * Get token count for message count.
+   * @param messageCount
+   * @returns {number} token count
+   */
+  private _tokenCountForMessageCount(messageCount: number): number {
+    if (messageCount <= 0) {
+      return 0;
+    }
+
+    const chatHistory = this.getAll();
+    const msgStr = chatHistory
+      .slice(-messageCount)
+      .map((m) => m.content)
+      .join(" ");
+    return msgStr.split(" ").length;
+  }
+}
diff --git a/packages/core/src/memory/types.ts b/packages/core/src/memory/types.ts
new file mode 100644
index 0000000000000000000000000000000000000000..2222599abb71d73c4f2355982ab07b005762fc90
--- /dev/null
+++ b/packages/core/src/memory/types.ts
@@ -0,0 +1,24 @@
+import { ChatMessage } from "../llm";
+
+export interface BaseMemory {
+  /* 
+   Get chat history.
+  */
+  get(...args: any): ChatMessage[];
+  /*
+    Get all chat history.
+  */
+  getAll(): ChatMessage[];
+  /*
+    Put chat history.
+  */
+  put(message: ChatMessage): void;
+  /*
+    Set chat history.
+  */
+  set(messages: ChatMessage[]): void;
+  /*
+    Reset chat history.
+  */
+  reset(): void;
+}
diff --git a/packages/core/src/objects/base.ts b/packages/core/src/objects/base.ts
new file mode 100644
index 0000000000000000000000000000000000000000..7adeb94902709556c8956ba4f8de43781ae79918
--- /dev/null
+++ b/packages/core/src/objects/base.ts
@@ -0,0 +1,80 @@
+import { BaseNode, TextNode } from "../Node";
+import { BaseRetriever } from "../Retriever";
+
+// Assuming that necessary interfaces and classes (like OT, TextNode, BaseNode, etc.) are defined elsewhere
+// Import statements (e.g., for TextNode, BaseNode) should be added based on your project's structure
+
+export abstract class BaseObjectNodeMapping<OT> {
+  // TypeScript doesn't support Python's classmethod directly, but we can use static methods as an alternative
+  abstract fromObjects<OT>(
+    objs: OT[],
+    ...args: any[]
+  ): BaseObjectNodeMapping<OT>;
+
+  // Abstract methods in TypeScript
+  abstract objNodeMapping(): Record<any, any>;
+  abstract toNode(obj: OT): TextNode;
+
+  // Concrete methods can be defined as usual
+  validateObject(obj: OT): void {}
+
+  // Implementing the add object logic
+  addObj(obj: OT): void {
+    this.validateObject(obj);
+    this._addObj(obj);
+  }
+
+  // Abstract method for internal add object logic
+  protected abstract _addObj(obj: OT): void;
+
+  // Implementing toNodes method
+  toNodes(objs: OT[]): TextNode[] {
+    return objs.map((obj) => this.toNode(obj));
+  }
+
+  // Abstract method for internal from node logic
+  protected abstract _fromNode(node: BaseNode): OT;
+
+  // Implementing fromNode method
+  fromNode(node: BaseNode): OT {
+    const obj = this._fromNode(node);
+    this.validateObject(obj);
+    return obj;
+  }
+
+  // Abstract methods for persistence
+  abstract persist(persistDir: string, objNodeMappingFilename: string): void;
+}
+
+// You will need to implement specific subclasses of BaseObjectNodeMapping as per your project requirements.
+
+type QueryType = string;
+
+export class ObjectRetriever<OT> {
+  private _retriever: BaseRetriever;
+  private _objectNodeMapping: BaseObjectNodeMapping<OT>;
+
+  constructor(
+    retriever: BaseRetriever,
+    objectNodeMapping: BaseObjectNodeMapping<OT>,
+  ) {
+    this._retriever = retriever;
+    this._objectNodeMapping = objectNodeMapping;
+  }
+
+  // In TypeScript, getters are defined like this.
+  get retriever(): BaseRetriever {
+    return this._retriever;
+  }
+
+  // Translating the retrieve method
+  async retrieve(strOrQueryBundle: QueryType): Promise<OT[]> {
+    const nodes = await this._retriever.retrieve(strOrQueryBundle);
+    return nodes.map((node) => this._objectNodeMapping.fromNode(node.node));
+  }
+
+  // // Translating the _asQueryComponent method
+  // public asQueryComponent(kwargs: any): any {
+  //     return new ObjectRetrieverComponent(this);
+  // }
+}
diff --git a/packages/core/src/storage/chatStore/SimpleChatStore.ts b/packages/core/src/storage/chatStore/SimpleChatStore.ts
new file mode 100644
index 0000000000000000000000000000000000000000..b2b272bd7049eb34052dfad4251feb358f23e86c
--- /dev/null
+++ b/packages/core/src/storage/chatStore/SimpleChatStore.ts
@@ -0,0 +1,92 @@
+import { ChatMessage } from "../../llm";
+import { BaseChatStore } from "./types";
+
+/**
+ * Simple chat store.
+ */
+export class SimpleChatStore implements BaseChatStore {
+  store: { [key: string]: ChatMessage[] } = {};
+
+  /**
+   * Set messages.
+   * @param key: key
+   * @param messages: messages
+   * @returns: void
+   */
+  public setMessages(key: string, messages: ChatMessage[]): void {
+    this.store[key] = messages;
+  }
+
+  /**
+   * Get messages.
+   * @param key: key
+   * @returns: messages
+   */
+  public getMessages(key: string): ChatMessage[] {
+    return this.store[key] || [];
+  }
+
+  /**
+   * Add message.
+   * @param key: key
+   * @param message: message
+   * @returns: void
+   */
+  public addMessage(key: string, message: ChatMessage): void {
+    this.store[key] = this.store[key] || [];
+    this.store[key].push(message);
+  }
+
+  /**
+   * Delete messages.
+   * @param key: key
+   * @returns: messages
+   */
+  public deleteMessages(key: string): ChatMessage[] | null {
+    if (!(key in this.store)) {
+      return null;
+    }
+    const messages = this.store[key];
+    delete this.store[key];
+    return messages;
+  }
+
+  /**
+   * Delete message.
+   * @param key: key
+   * @param idx: idx
+   * @returns: message
+   */
+  public deleteMessage(key: string, idx: number): ChatMessage | null {
+    if (!(key in this.store)) {
+      return null;
+    }
+    if (idx >= this.store[key].length) {
+      return null;
+    }
+    return this.store[key].splice(idx, 1)[0];
+  }
+
+  /**
+   * Delete last message.
+   * @param key: key
+   * @returns: message
+   */
+  public deleteLastMessage(key: string): ChatMessage | null {
+    if (!(key in this.store)) {
+      return null;
+    }
+
+    const lastMessage = this.store[key].pop();
+
+    return lastMessage || null;
+  }
+
+  /**
+   * Get keys.
+   * @returns: keys
+   */
+  public getKeys(): string[] {
+    return Object.keys(this.store);
+  }
+}
diff --git a/packages/core/src/storage/chatStore/types.ts b/packages/core/src/storage/chatStore/types.ts
new file mode 100644
index 0000000000000000000000000000000000000000..a72a296d5055589dca44954bcdf932acc3c79489
--- /dev/null
+++ b/packages/core/src/storage/chatStore/types.ts
@@ -0,0 +1,11 @@
+import { ChatMessage } from "../../llm";
+
+export interface BaseChatStore {
+  setMessages(key: string, messages: ChatMessage[]): void;
+  getMessages(key: string): ChatMessage[];
+  addMessage(key: string, message: ChatMessage): void;
+  deleteMessages(key: string): ChatMessage[] | null;
+  deleteMessage(key: string, idx: number): ChatMessage | null;
+  deleteLastMessage(key: string): ChatMessage | null;
+  getKeys(): string[];
+}
diff --git a/packages/core/src/storage/index.ts b/packages/core/src/storage/index.ts
index ae372ead52586db82e40cc44bd6b5681e81ec72f..2dbc4fd1e80bd0ca9abd081bbf247c77f63de0f8 100644
--- a/packages/core/src/storage/index.ts
+++ b/packages/core/src/storage/index.ts
@@ -1,5 +1,7 @@
 export * from "./FileSystem";
 export * from "./StorageContext";
+export { SimpleChatStore } from "./chatStore/SimpleChatStore";
+export * from "./chatStore/types";
 export * from "./constants";
 export { SimpleDocumentStore } from "./docStore/SimpleDocumentStore";
 export * from "./docStore/types";
diff --git a/packages/core/src/tests/MetadataExtractors.test.ts b/packages/core/src/tests/MetadataExtractors.test.ts
index abc1b78ba6559e036c86bcf573442df4a6b4f685..e293f528d4d470ea4756663d8a0ea27728347104 100644
--- a/packages/core/src/tests/MetadataExtractors.test.ts
+++ b/packages/core/src/tests/MetadataExtractors.test.ts
@@ -50,6 +50,7 @@ describe("[MetadataExtractor]: Extractors should populate the metadata", () => {
     mockLlmGeneration({ languageModel, callbackManager });
 
     const embedModel = new OpenAIEmbedding();
+
     mockEmbeddingModel(embedModel);
 
     serviceContext = serviceContextFromDefaults({
diff --git a/packages/core/src/tests/agent/OpenAIAgent.test.ts b/packages/core/src/tests/agent/OpenAIAgent.test.ts
new file mode 100644
index 0000000000000000000000000000000000000000..071740a0a96b44ee250b58b3f6161adcdd4b58be
--- /dev/null
+++ b/packages/core/src/tests/agent/OpenAIAgent.test.ts
@@ -0,0 +1,69 @@
+import { OpenAIAgent } from "../../agent";
+import { CallbackManager } from "../../callbacks/CallbackManager";
+import { OpenAI } from "../../llm";
+import { FunctionTool } from "../../tools";
+import { mockLlmToolCallGeneration } from "../utility/mockOpenAI";
+
+// Define a function to sum two numbers
+function sumNumbers({ a, b }: { a: number; b: number }): number {
+  return a + b;
+}
+
+const sumJSON = {
+  type: "object",
+  properties: {
+    a: {
+      type: "number",
+      description: "The first number",
+    },
+    b: {
+      type: "number",
+      description: "The second number",
+    },
+  },
+  required: ["a", "b"],
+};
+
+jest.mock("../../llm/open_ai", () => {
+  return {
+    getOpenAISession: jest.fn().mockImplementation(() => null),
+  };
+});
+
+describe("OpenAIAgent", () => {
+  let openaiAgent: OpenAIAgent;
+
+  beforeEach(() => {
+    const callbackManager = new CallbackManager({});
+
+    const languageModel = new OpenAI({
+      model: "gpt-3.5-turbo",
+      callbackManager,
+    });
+
+    mockLlmToolCallGeneration({
+      languageModel,
+      callbackManager,
+    });
+
+    const sumFunctionTool = new FunctionTool(sumNumbers, {
+      name: "sumNumbers",
+      description: "Use this function to sum two numbers",
+      parameters: sumJSON,
+    });
+
+    openaiAgent = new OpenAIAgent({
+      tools: [sumFunctionTool],
+      llm: languageModel,
+      verbose: false,
+    });
+  });
+
+  it("should be able to chat with agent", async () => {
+    const response = await openaiAgent.chat({
+      message: "how much is 1 + 1?",
+    });
+
+    expect(String(response)).toEqual("The sum is 2");
+  });
+});
diff --git a/packages/core/src/tests/agent/runner/AgentRunner.test.ts b/packages/core/src/tests/agent/runner/AgentRunner.test.ts
new file mode 100644
index 0000000000000000000000000000000000000000..27702ad9e687ddb030aa135e1f71b01636942a9e
--- /dev/null
+++ b/packages/core/src/tests/agent/runner/AgentRunner.test.ts
@@ -0,0 +1,99 @@
+import { OpenAIAgentWorker } from "../../../agent";
+import { AgentRunner } from "../../../agent/runner/base";
+import { CallbackManager } from "../../../callbacks/CallbackManager";
+import { OpenAI } from "../../../llm/LLM";
+
+import {
+  DEFAULT_LLM_TEXT_OUTPUT,
+  mockLlmGeneration,
+} from "../../utility/mockOpenAI";
+
+jest.mock("../../../llm/open_ai", () => {
+  return {
+    getOpenAISession: jest.fn().mockImplementation(() => null),
+  };
+});
+
+describe("Agent Runner", () => {
+  let agentRunner: AgentRunner;
+
+  beforeEach(() => {
+    const callbackManager = new CallbackManager({});
+
+    const languageModel = new OpenAI({
+      model: "gpt-3.5-turbo",
+      callbackManager,
+    });
+
+    mockLlmGeneration({
+      languageModel,
+      callbackManager,
+    });
+
+    agentRunner = new AgentRunner({
+      llm: languageModel,
+      agentWorker: new OpenAIAgentWorker({
+        llm: languageModel,
+        tools: [],
+        verbose: false,
+      }),
+    });
+  });
+
+  it("should be able to initialize a task", () => {
+    const task = agentRunner.createTask("hello world");
+
+    expect(task.input).toEqual("hello world");
+    expect(task.taskId in agentRunner.state.taskDict).toEqual(true);
+
+    expect(agentRunner.listTasks().length).toEqual(1);
+  });
+
+  it("should be able to run a step", async () => {
+    const task = agentRunner.createTask("hello world");
+
+    expect(agentRunner.getCompletedSteps(task.taskId)).toBeUndefined();
+
+    const stepOutput = await agentRunner.runStep(task.taskId, task.input);
+
+    const completedSteps = agentRunner.getCompletedSteps(task.taskId);
+
+    expect(completedSteps.length).toEqual(1);
+
+    expect(stepOutput.isLast).toEqual(true);
+  });
+
+  it("should be able to finalize a task", async () => {
+    const task = agentRunner.createTask("hello world");
+
+    expect(agentRunner.getCompletedSteps(task.taskId)).toBeUndefined();
+
+    const stepOutput1 = await agentRunner.runStep(task.taskId, task.input);
+
+    expect(stepOutput1.isLast).toEqual(true);
+  });
+
+  it("should be able to delete a task", () => {
+    const task = agentRunner.createTask("hello world");
+
+    expect(agentRunner.listTasks().length).toEqual(1);
+
+    agentRunner.deleteTask(task.taskId);
+
+    expect(agentRunner.listTasks().length).toEqual(0);
+  });
+
+  it("should be able to run a chat", async () => {
+    const response = await agentRunner.chat({
+      message: "hello world",
+    });
+
+    expect(agentRunner.listTasks().length).toEqual(1);
+
+    expect(response).toEqual({
+      response: DEFAULT_LLM_TEXT_OUTPUT,
+      sourceNodes: [],
+      sources: [],
+    });
+  });
+});
diff --git a/packages/core/src/tests/tools/Tools.test.ts b/packages/core/src/tests/tools/Tools.test.ts
new file mode 100644
index 0000000000000000000000000000000000000000..767a8608a910b7b00e8eab2e999c5505d1d56a0c
--- /dev/null
+++ b/packages/core/src/tests/tools/Tools.test.ts
@@ -0,0 +1,45 @@
+import { FunctionTool, ToolOutput } from "../../tools";
+import { callToolWithErrorHandling } from "../../tools/utils";
+
+function sumNumbers({ a, b }: { a: number; b: number }): number {
+  return a + b;
+}
+
+const sumJSON = {
+  type: "object",
+  properties: {
+    a: {
+      type: "number",
+      description: "The first number",
+    },
+    b: {
+      type: "number",
+      description: "The second number",
+    },
+  },
+  required: ["a", "b"],
+};
+
+describe("Tools", () => {
+  it("should be able to call a tool with a common JSON", async () => {
+    const tool = new FunctionTool(sumNumbers, {
+      name: "sumNumbers",
+      description: "Use this function to sum two numbers",
+      parameters: sumJSON,
+    });
+
+    const response = await callToolWithErrorHandling(tool, {
+      a: 1,
+      b: 2,
+    });
+
+    expect(response).toEqual(
+      new ToolOutput(
+        response.content,
+        tool.metadata.name,
+        { a: 1, b: 2 },
+        response.content,
+      ),
+    );
+  });
+});
diff --git a/packages/core/src/tests/utility/mockOpenAI.ts b/packages/core/src/tests/utility/mockOpenAI.ts
index e06053ed9384f0fffa29bebbaddc9965bc7b0bba..88961e3d52697968cdaa5e5cbb1fe0f3c2c22cf4 100644
--- a/packages/core/src/tests/utility/mockOpenAI.ts
+++ b/packages/core/src/tests/utility/mockOpenAI.ts
@@ -64,6 +64,26 @@ export function mockLlmGeneration({
     );
 }
 
+export function mockLlmToolCallGeneration({
+  languageModel,
+  callbackManager,
+}: {
+  languageModel: OpenAI;
+  callbackManager: CallbackManager;
+}) {
+  jest.spyOn(languageModel, "chat").mockImplementation(
+    () =>
+      new Promise((resolve) =>
+        resolve({
+          message: {
+            content: "The sum is 2",
+            role: "assistant",
+          },
+        }),
+      ),
+  );
+}
+
 export function mockEmbeddingModel(embedModel: OpenAIEmbedding) {
   jest.spyOn(embedModel, "getTextEmbedding").mockImplementation(async (x) => {
     return new Promise((resolve) => {
diff --git a/packages/core/src/tools/functionTool.ts b/packages/core/src/tools/functionTool.ts
new file mode 100644
index 0000000000000000000000000000000000000000..f9114612929fb903c0fc379d91871a6258ef22a8
--- /dev/null
+++ b/packages/core/src/tools/functionTool.ts
@@ -0,0 +1,32 @@
+import { BaseTool, ToolMetadata } from "../types";
+
+type Metadata = {
+  name: string;
+  description: string;
+  parameters: ToolMetadata["parameters"];
+};
+
+export class FunctionTool<T = any> implements BaseTool {
+  private _fn: (...args: any[]) => any;
+  private _metadata: ToolMetadata;
+
+  constructor(fn: (...args: any[]) => any, metadata: Metadata) {
+    this._fn = fn;
+    this._metadata = metadata as ToolMetadata;
+  }
+
+  static fromDefaults<T = any>(
+    fn: (...args: any[]) => any,
+    metadata?: Metadata,
+  ): FunctionTool<T> {
+    return new FunctionTool(fn, metadata!);
+  }
+
+  get metadata(): ToolMetadata {
+    return this._metadata;
+  }
+
+  async call(...args: any[]): Promise<any> {
+    return this._fn(...args);
+  }
+}
diff --git a/packages/core/src/tools/index.ts b/packages/core/src/tools/index.ts
new file mode 100644
index 0000000000000000000000000000000000000000..2c87cd60ed0fcfb23760ffa446e4ee0a59ddad35
--- /dev/null
+++ b/packages/core/src/tools/index.ts
@@ -0,0 +1,2 @@
+export * from "./functionTool";
+export * from "./types";
diff --git a/packages/core/src/tools/types.ts b/packages/core/src/tools/types.ts
new file mode 100644
index 0000000000000000000000000000000000000000..f98f14f7275c3b2f69ef0aafc438aa3eb0167e6d
--- /dev/null
+++ b/packages/core/src/tools/types.ts
@@ -0,0 +1,22 @@
+export class ToolOutput {
+  content: string;
+  toolName: string;
+  rawInput: any;
+  rawOutput: any;
+
+  constructor(
+    content: string,
+    toolName: string,
+    rawInput: any,
+    rawOutput: any,
+  ) {
+    this.content = content;
+    this.toolName = toolName;
+    this.rawInput = rawInput;
+    this.rawOutput = rawOutput;
+  }
+
+  toString(): string {
+    return this.content;
+  }
+}
diff --git a/packages/core/src/tools/utils.ts b/packages/core/src/tools/utils.ts
new file mode 100644
index 0000000000000000000000000000000000000000..8bb8e76176505cff3c63c91b6b9581282b9c8552
--- /dev/null
+++ b/packages/core/src/tools/utils.ts
@@ -0,0 +1,33 @@
+import { BaseTool } from "../types";
+import { ToolOutput } from "./types";
+
+/**
+ * Call tool with error handling.
+ * @param tool: tool
+ * @param inputDict: input dict
+ * @param errorMessage: error message
+ * @param raiseError: raise error
+ * @returns: tool output
+ */
+export async function callToolWithErrorHandling(
+  tool: BaseTool,
+  inputDict: { [key: string]: any },
+  errorMessage: string | null = null,
+  raiseError: boolean = false,
+): Promise<ToolOutput> {
+  try {
+    const value = await tool.call?.(inputDict);
+    return new ToolOutput(value, tool.metadata.name, inputDict, value);
+  } catch (e) {
+    if (raiseError) {
+      throw e;
+    }
+    errorMessage = errorMessage || `Error: ${e}`;
+    return new ToolOutput(
+      errorMessage,
+      tool.metadata.name,
+      { kwargs: inputDict },
+      e,
+    );
+  }
+}
diff --git a/packages/core/src/types.ts b/packages/core/src/types.ts
index 3f8074980fbeef55fd7c964d78266529b5a7c89b..61dee2fb5cbedf34f85968b6894965bff8a25aec 100644
--- a/packages/core/src/types.ts
+++ b/packages/core/src/types.ts
@@ -1,7 +1,6 @@
 /**
  * Top level types to avoid circular dependencies
  */
-
 import { Event } from "./callbacks/CallbackManager";
 import { Response } from "./Response";
 
@@ -37,6 +36,7 @@ export interface BaseQueryEngine {
  * Simple Tool interface. Likely to change.
  */
 export interface BaseTool {
+  call?: (...args: any[]) => any;
   metadata: ToolMetadata;
 }
 
@@ -64,9 +64,17 @@ export interface StructuredOutput<T> {
   parsedOutput: T;
 }
 
+export type ToolParameters = {
+  type: string | "object";
+  properties: Record<string, { type: string; description?: string }>;
+  required?: string[];
+};
+
 export interface ToolMetadata {
   description: string;
   name: string;
+  parameters?: ToolParameters;
+  argsKwargs?: Record<string, any>;
 }
 
 export type ToolMetadataOnlyDescription = Pick<ToolMetadata, "description">;
diff --git a/pnpm-lock.yaml b/pnpm-lock.yaml
index c05fe2b3587c7fec1112e8daaf69779a7cfb34c2..88213f2c41282c89fd5dbd61b6a5fa99da42b1d6 100644
--- a/pnpm-lock.yaml
+++ b/pnpm-lock.yaml
@@ -599,7 +599,7 @@ packages:
     resolution: {integrity: sha512-qrSfCYxYQB5owCmGLbl8XRpX1ytXlpueOb0N0UmQwA073KZxejgQTzAmJezxvpwQD9uGtK2shHdi55QT+MbjIw==}
     engines: {node: '>=6.9.0'}
     dependencies:
-      '@babel/types': 7.23.9
+      '@babel/types': 7.23.6
       '@jridgewell/gen-mapping': 0.3.3
       '@jridgewell/trace-mapping': 0.3.22
       jsesc: 2.5.2
@@ -608,13 +608,13 @@ packages:
     resolution: {integrity: sha512-LvBTxu8bQSQkcyKOU+a1btnNFQ1dMAd0R6PyW3arXes06F6QLWLIrd681bxRPIXlrMGR3XYnW9JyML7dP3qgxg==}
     engines: {node: '>=6.9.0'}
     dependencies:
-      '@babel/types': 7.23.9
+      '@babel/types': 7.23.6
 
   /@babel/helper-builder-binary-assignment-operator-visitor@7.22.15:
     resolution: {integrity: sha512-QkBXwGgaoC2GtGZRoma6kv7Szfv06khvhFav67ZExau2RaXzy8MpHSMO2PNoP2XtmQphJQRHFfg77Bq731Yizw==}
     engines: {node: '>=6.9.0'}
     dependencies:
-      '@babel/types': 7.23.9
+      '@babel/types': 7.23.6
 
   /@babel/helper-compilation-targets@7.23.6:
     resolution: {integrity: sha512-9JB548GZoQVmzrFgp8o7KxdgkTGm6xs9DW0o/Pim72UDjzr5ObUQ6ZzYPqA+g9OTS2bBQoctLJrky0RDCAWRgQ==}
@@ -677,25 +677,25 @@ packages:
     engines: {node: '>=6.9.0'}
     dependencies:
       '@babel/template': 7.23.9
-      '@babel/types': 7.23.9
+      '@babel/types': 7.23.6
 
   /@babel/helper-hoist-variables@7.22.5:
     resolution: {integrity: sha512-wGjk9QZVzvknA6yKIUURb8zY3grXCcOZt+/7Wcy8O2uctxhplmUPkOdlgoNhmdVee2c92JXbf1xpMtVNbfoxRw==}
     engines: {node: '>=6.9.0'}
     dependencies:
-      '@babel/types': 7.23.9
+      '@babel/types': 7.23.6
 
   /@babel/helper-member-expression-to-functions@7.23.0:
     resolution: {integrity: sha512-6gfrPwh7OuT6gZyJZvd6WbTfrqAo7vm4xCzAXOusKqq/vWdKXphTpj5klHKNmRUU6/QRGlBsyU9mAIPaWHlqJA==}
     engines: {node: '>=6.9.0'}
     dependencies:
-      '@babel/types': 7.23.9
+      '@babel/types': 7.23.6
 
   /@babel/helper-module-imports@7.22.15:
     resolution: {integrity: sha512-0pYVBnDKZO2fnSPCrgM/6WMc7eS20Fbok+0r88fp+YtWVLZrp4CkafFGIp+W0VKw4a22sgebPT99y+FDNMdP4w==}
     engines: {node: '>=6.9.0'}
     dependencies:
-      '@babel/types': 7.23.9
+      '@babel/types': 7.23.6
 
   /@babel/helper-module-transforms@7.23.3(@babel/core@7.23.9):
     resolution: {integrity: sha512-7bBs4ED9OmswdfDzpz4MpWgSrV7FXlc3zIagvLFjS5H+Mk7Snr21vQ6QwrsoCGMfNC4e4LQPdoULEt4ykz0SRQ==}
@@ -714,7 +714,7 @@ packages:
     resolution: {integrity: sha512-HBwaojN0xFRx4yIvpwGqxiV2tUfl7401jlok564NgB9EHS1y6QT17FmKWm4ztqjeVdXLuC4fSvHc5ePpQjoTbw==}
     engines: {node: '>=6.9.0'}
     dependencies:
-      '@babel/types': 7.23.9
+      '@babel/types': 7.23.6
 
   /@babel/helper-plugin-utils@7.22.5:
     resolution: {integrity: sha512-uLls06UVKgFG9QD4OeFYLEGteMIAa5kpTPcFL28yuCIIzsf6ZyKZMllKVOCZFhiZ5ptnwX4mtKdWCBE/uT4amg==}
@@ -752,13 +752,13 @@ packages:
     resolution: {integrity: sha512-tK14r66JZKiC43p8Ki33yLBVJKlQDFoA8GYN67lWCDCqoL6EMMSuM9b+Iff2jHaM/RRFYl7K+iiru7hbRqNx8Q==}
     engines: {node: '>=6.9.0'}
     dependencies:
-      '@babel/types': 7.23.9
+      '@babel/types': 7.23.6
 
   /@babel/helper-split-export-declaration@7.22.6:
     resolution: {integrity: sha512-AsUnxuLhRYsisFiaJwvp1QF+I3KjD5FOxut14q/GzovUe6orHLesW2C7d754kRm53h5gqrz6sFl6sxc4BVtE/g==}
     engines: {node: '>=6.9.0'}
     dependencies:
-      '@babel/types': 7.23.9
+      '@babel/types': 7.23.6
 
   /@babel/helper-string-parser@7.23.4:
     resolution: {integrity: sha512-803gmbQdqwdf4olxrX4AJyFBV/RTr3rSmOj0rKwesmzlfhYNDEs+/iOcznzpNWlJlIlTJC2QfPFcHB6DlzdVLQ==}
@@ -778,7 +778,7 @@ packages:
     dependencies:
       '@babel/helper-function-name': 7.23.0
       '@babel/template': 7.23.9
-      '@babel/types': 7.23.9
+      '@babel/types': 7.23.6
 
   /@babel/helpers@7.23.9:
     resolution: {integrity: sha512-87ICKgU5t5SzOT7sBMfCOZQ2rHjRU+Pcb9BoILMYz600W6DkVRLFBPwQ18gwUVvggqXivaUakpnxWQGbpywbBQ==}
@@ -1447,7 +1447,7 @@ packages:
       '@babel/helper-module-imports': 7.22.15
       '@babel/helper-plugin-utils': 7.22.5
       '@babel/plugin-syntax-jsx': 7.23.3(@babel/core@7.23.9)
-      '@babel/types': 7.23.9
+      '@babel/types': 7.23.6
 
   /@babel/plugin-transform-react-pure-annotations@7.23.3(@babel/core@7.23.9):
     resolution: {integrity: sha512-qMFdSS+TUhB7Q/3HVPnEdYJDQIk57jkntAwSuz9xfSE4n+3I+vHYCli3HoHawN1Z3RfCz/y1zXA/JXjG6cVImQ==}
@@ -1688,7 +1688,7 @@ packages:
     dependencies:
       '@babel/core': 7.23.9
       '@babel/helper-plugin-utils': 7.22.5
-      '@babel/types': 7.23.9
+      '@babel/types': 7.23.6
       esutils: 2.0.3
 
   /@babel/preset-react@7.23.3(@babel/core@7.23.9):
@@ -1753,7 +1753,7 @@ packages:
       '@babel/helper-hoist-variables': 7.22.5
       '@babel/helper-split-export-declaration': 7.22.6
       '@babel/parser': 7.23.9
-      '@babel/types': 7.23.9
+      '@babel/types': 7.23.6
       debug: 4.3.4
       globals: 11.12.0
     transitivePeerDependencies:
@@ -1766,7 +1766,6 @@ packages:
       '@babel/helper-string-parser': 7.23.4
       '@babel/helper-validator-identifier': 7.22.20
       to-fast-properties: 2.0.0
-    dev: true
 
   /@babel/types@7.23.9:
     resolution: {integrity: sha512-dQjSq/7HaSjRM43FFGnv5keM2HsxpmyV1PfaSVm0nzzjwwTmjOe6J4bC8e3+pTEIgHaHj+1ZlLThRJ2auc/w1Q==}
@@ -2931,11 +2930,11 @@ packages:
     engines: {node: '>=18'}
     dev: false
 
-  /@humanwhocodes/config-array@0.11.14:
-    resolution: {integrity: sha512-3T8LkOmg45BV5FICb15QQMsyUSWrQ8AygVfC7ZG32zOalnqrilm018ZVCw0eapXux8FtA33q8PSRSstjee3jSg==}
+  /@humanwhocodes/config-array@0.11.13:
+    resolution: {integrity: sha512-JSBDMiDKSzQVngfRjOdFXgFfklaXI4K9nLF49Auh21lmBWRLIK3+xTErTWD4KU54pb6coM6ESE7Awz/FNU3zgQ==}
     engines: {node: '>=10.10.0'}
     dependencies:
-      '@humanwhocodes/object-schema': 2.0.2
+      '@humanwhocodes/object-schema': 2.0.1
       debug: 4.3.4
       minimatch: 3.1.2
     transitivePeerDependencies:
@@ -2945,8 +2944,8 @@ packages:
     resolution: {integrity: sha512-bxveV4V8v5Yb4ncFTT3rPSgZBOpCkjfK0y4oVVVJwIuDVBRMDXrPyXRL988i5ap9m9bnyEEjWfm5WkBmtffLfA==}
     engines: {node: '>=12.22'}
 
-  /@humanwhocodes/object-schema@2.0.2:
-    resolution: {integrity: sha512-6EwiSjwWYP7pTckG6I5eyFANjPhmPjUX9JRLUSfNPC7FX7zK9gyZAfUEaECL6ALTpGX5AjnBq3C9XmVWPitNpw==}
+  /@humanwhocodes/object-schema@2.0.1:
+    resolution: {integrity: sha512-dvuCeX5fC9dXgJn9t+X5atfmgQAzUOWqS1254Gh0m6i8wKd10ebXkfNKiRK+1GWi/yTvvLDHpoxLr0xxxeslWw==}
 
   /@isaacs/cliui@8.0.2:
     resolution: {integrity: sha512-O8jcjabXaleOG9DQ0+ARXWZBTfnP4WNAqzuiJK7ll44AmxGKv/J2M4TPjxjY3znBCfvBXFzucm1twdyFybFqEA==}
@@ -3774,8 +3773,8 @@ packages:
     resolution: {integrity: sha512-TV7t8GKYaJWsn00tFDqBw8+Uqmr8A0fRU1tvTQhyZzGv0sJCGRQL3JGMI3ucuKo3XIZdUP+Lx7/gh2t3lewy7g==}
     engines: {node: '>=14.16'}
 
-  /@sinonjs/commons@3.0.1:
-    resolution: {integrity: sha512-K3mCHKQ9sVh8o1C9cxkwxaOmXoAMlDxC1mYyHrjqOWEcBjYr76t96zL2zlj5dUGZ3HSw240X1qgH3Mjf1yJWpQ==}
+  /@sinonjs/commons@3.0.0:
+    resolution: {integrity: sha512-jXBtWAF4vmdNmZgD5FoKsVLv3rPgDnLgPbU84LIJ3otV44vJlDRokVng5v8NFJdCf/da9legHcKaRuZs4L7faA==}
     dependencies:
       type-detect: 4.0.8
     dev: true
@@ -3783,7 +3782,7 @@ packages:
   /@sinonjs/fake-timers@10.3.0:
     resolution: {integrity: sha512-V4BG07kuYSUkTCSBHG8G8TNhM+F19jXFWnQtzj+we8DrkpSBCee9Z3Ms8yiGer/dlmhe35/Xdgyo3/0rQKg7YA==}
     dependencies:
-      '@sinonjs/commons': 3.0.1
+      '@sinonjs/commons': 3.0.0
     dev: true
 
   /@slorber/remark-comment@1.0.0:
@@ -3927,7 +3926,7 @@ packages:
     resolution: {integrity: sha512-1hnUxxjd83EAxbL4a0JDJoD3Dao3hmjvyvyEV8PzWmLK3B9m9NPlW7GKjFyoWE8nM7HnXzPcmmSyOW8yOddSXw==}
     engines: {node: '>=10'}
     dependencies:
-      '@babel/types': 7.23.9
+      '@babel/types': 7.23.6
       entities: 4.5.0
 
   /@svgr/plugin-jsx@6.5.1(@svgr/core@6.5.1):
@@ -4181,7 +4180,7 @@ packages:
       chalk: 2.4.2
       commander: 10.0.1
       execa: 5.1.1
-      fast-glob: 3.3.2
+      fast-glob: 3.3.1
       fs-extra: 10.1.0
       gradient-string: 2.0.2
       inquirer: 8.2.6
@@ -4207,7 +4206,7 @@ packages:
     resolution: {integrity: sha512-qoQprZvz5wQFJwMDqeseRXWv3rqMvhgpbXFfVyWhbx9X47POIA6i/+dXefEmZKoAgOaTdaIgNSMqMIU61yRyzA==}
     dependencies:
       '@babel/parser': 7.23.9
-      '@babel/types': 7.23.9
+      '@babel/types': 7.23.6
       '@types/babel__generator': 7.6.8
       '@types/babel__template': 7.4.4
       '@types/babel__traverse': 7.20.5
@@ -4216,20 +4215,20 @@ packages:
   /@types/babel__generator@7.6.8:
     resolution: {integrity: sha512-ASsj+tpEDsEiFr1arWrlN6V3mdfjRMZt6LtK/Vp/kreFLnr5QH5+DhvD5nINYZXzwJvXeGq+05iUXcAzVrqWtw==}
     dependencies:
-      '@babel/types': 7.23.9
+      '@babel/types': 7.23.6
     dev: true
 
   /@types/babel__template@7.4.4:
     resolution: {integrity: sha512-h/NUaSyG5EyxBIp8YRxo4RMe2/qQgvyowRwVMzhYhBCONbW8PUsg4lkFMrhgZhUe5z3L3MiLDuvyJ/CaPa2A8A==}
     dependencies:
       '@babel/parser': 7.23.9
-      '@babel/types': 7.23.9
+      '@babel/types': 7.23.6
     dev: true
 
   /@types/babel__traverse@7.20.5:
     resolution: {integrity: sha512-WXCyOcRtH37HAUkpXhUduaxdm82b4GSlyTqajXviN4EfiuPgNYR109xMCKvpl6zPIpua0DGlMEDCq+g8EdoheQ==}
     dependencies:
-      '@babel/types': 7.23.9
+      '@babel/types': 7.23.6
     dev: true
 
   /@types/body-parser@1.19.5:
@@ -5440,7 +5439,7 @@ packages:
     engines: {node: ^14.15.0 || ^16.10.0 || >=18.0.0}
     dependencies:
       '@babel/template': 7.23.9
-      '@babel/types': 7.23.9
+      '@babel/types': 7.23.6
       '@types/babel__core': 7.20.5
       '@types/babel__traverse': 7.20.5
     dev: true
@@ -6071,7 +6070,7 @@ packages:
     engines: {node: '>=18'}
     dependencies:
       slice-ansi: 5.0.0
-      string-width: 7.1.0
+      string-width: 7.0.0
     dev: true
 
   /cli-width@3.0.0:
@@ -6131,7 +6130,7 @@ packages:
     resolution: {integrity: sha512-jsayHP4Z1gKjXB+NsFhEKrM2dAN4XCpbHbhwzzYfFrVL/DYPw9D/ACob6EjbIiV47PSe3OcxJqX/b1V/T7XK3A==}
     engines: {node: '>=14.18.0'}
     dependencies:
-      rfdc: 1.3.1
+      rfdc: 1.3.0
     dev: false
 
   /collapse-white-space@2.1.0:
@@ -6217,10 +6216,10 @@ packages:
 
   /commander@2.20.0:
     resolution: {integrity: sha512-7j2y+40w61zy6YC2iRNpUe/NwhNyoXrYpHMrSunaMG64nRnaf96zO/KMQR4OyN/UnE5KLyEBnKHd4aG3rskjpQ==}
-    dev: true
 
   /commander@2.20.3:
     resolution: {integrity: sha512-GpVkmM8vF2vQUkj2LvZmD35JxeJOLCwJ9cUkugyk2nuhbv3+mJvpLYYt+0+USMxE+oj+ey/lJEnhZw75x/OMcQ==}
+    dev: true
 
   /commander@5.1.0:
     resolution: {integrity: sha512-P0CysNDQ7rtVw4QIQtm+MRxV66vKFSvlsQvGYXZWR3qFU0jlMKHZZZgw8e+8DSah4UDKMqnknRDQz+xuQXQ/Zg==}
@@ -7399,8 +7398,8 @@ packages:
       object-keys: 1.1.1
       object.assign: 4.1.5
       regexp.prototype.flags: 1.5.1
-      safe-array-concat: 1.1.0
-      safe-regex-test: 1.0.2
+      safe-array-concat: 1.0.1
+      safe-regex-test: 1.0.0
       string.prototype.trim: 1.2.8
       string.prototype.trimend: 1.0.7
       string.prototype.trimstart: 1.0.7
@@ -7427,7 +7426,7 @@ packages:
       has-symbols: 1.0.3
       internal-slot: 1.0.6
       iterator.prototype: 1.1.2
-      safe-array-concat: 1.1.0
+      safe-array-concat: 1.0.1
     dev: false
 
   /es-module-lexer@1.4.1:
@@ -7773,7 +7772,7 @@ packages:
       '@eslint-community/regexpp': 4.10.0
       '@eslint/eslintrc': 2.1.4
       '@eslint/js': 8.56.0
-      '@humanwhocodes/config-array': 0.11.14
+      '@humanwhocodes/config-array': 0.11.13
       '@humanwhocodes/module-importer': 1.0.1
       '@nodelib/fs.walk': 1.2.8
       '@ungap/structured-clone': 1.2.0
@@ -8681,7 +8680,7 @@ packages:
       '@types/glob': 7.2.0
       array-union: 2.1.0
       dir-glob: 3.0.1
-      fast-glob: 3.3.2
+      fast-glob: 3.3.1
       glob: 7.2.3
       ignore: 5.3.0
       merge2: 1.4.1
@@ -10215,7 +10214,7 @@ packages:
       '@babel/generator': 7.23.6
       '@babel/plugin-syntax-jsx': 7.23.3(@babel/core@7.23.9)
       '@babel/plugin-syntax-typescript': 7.23.3(@babel/core@7.23.9)
-      '@babel/types': 7.23.9
+      '@babel/types': 7.23.6
       '@jest/expect-utils': 29.7.0
       '@jest/transform': 29.7.0
       '@jest/types': 29.6.3
@@ -10535,7 +10534,7 @@ packages:
       colorette: 2.0.20
       eventemitter3: 5.0.1
       log-update: 6.0.0
-      rfdc: 1.3.1
+      rfdc: 1.3.0
       wrap-ansi: 9.0.0
     dev: true
 
@@ -11616,7 +11615,7 @@ packages:
     engines: {node: '>=10.13.0'}
     hasBin: true
     dependencies:
-      commander: 2.20.3
+      commander: 2.20.0
       debug: 4.3.4
       glob: 7.2.3
       requirejs: 2.3.6
@@ -14050,8 +14049,8 @@ packages:
     resolution: {integrity: sha512-U9nH88a3fc/ekCF1l0/UP1IosiuIjyTh7hBvXVMHYgVcfGvt897Xguj2UOLDeI5BG2m7/uwyaLVT6fbtCwTyzw==}
     engines: {iojs: '>=1.0.0', node: '>=0.10.0'}
 
-  /rfdc@1.3.1:
-    resolution: {integrity: sha512-r5a3l5HzYlIC68TpmYKlxWjmOP6wiPJ1vWv2HeLhNsRZMrCkxeqxiHlQ21oXmQ4F3SiryXBHhAD7JZqvOJjFmg==}
+  /rfdc@1.3.0:
+    resolution: {integrity: sha512-V2hovdzFbOi77/WajaSMXk2OLm+xNIeQdMMuB7icj7bk6zi2F8GGAxigcnDFpJHbNyNcgyJDiP+8nOrY5cZGrA==}
 
   /rimraf@3.0.2:
     resolution: {integrity: sha512-JZkJMZkAGFFPP2YqXZXPbMlMBgsxzE8ILs4lMIX/2o0L9UBw9O/Y3o6wFw/i9YLapcUJWwqbi3kdxIPdC62TIA==}
@@ -14165,8 +14164,8 @@ packages:
       tslib: 2.6.2
     dev: true
 
-  /safe-array-concat@1.1.0:
-    resolution: {integrity: sha512-ZdQ0Jeb9Ofti4hbt5lX3T2JcAamT9hfzYU1MNB+z/jaEbB6wfFfPIR/zEORmZqobkCCJhSjodobH6WHNmJ97dg==}
+  /safe-array-concat@1.0.1:
+    resolution: {integrity: sha512-6XbUAseYE2KtOuGueyeobCySj9L4+66Tn6KQMOPQJrAJEowYKW/YR/MGJZl7FdydUdaFu4LYyDZjxf4/Nmo23Q==}
     engines: {node: '>=0.4'}
     dependencies:
       call-bind: 1.0.5
@@ -14182,15 +14181,6 @@ packages:
 
   /safe-regex-test@1.0.0:
     resolution: {integrity: sha512-JBUUzyOgEwXQY1NuPtvcj/qcBDbDmEvWufhlnXZIm75DEHp+afM1r1ujJpJsV/gSM4t59tpDyPi1sd6ZaPFfsA==}
-    dependencies:
-      call-bind: 1.0.2
-      get-intrinsic: 1.2.0
-      is-regex: 1.1.4
-    dev: false
-
-  /safe-regex-test@1.0.2:
-    resolution: {integrity: sha512-83S9w6eFq12BBIJYvjMux6/dkirb8+4zJRA9cxNBVb7Wq5fJBW+Xze48WqR8pxua7bDuAaaAxtVVd4Idjp1dBQ==}
-    engines: {node: '>= 0.4'}
     dependencies:
       call-bind: 1.0.5
       get-intrinsic: 1.2.2
@@ -14209,7 +14199,7 @@ packages:
     engines: {node: '>=6.0.0'}
     hasBin: true
     dependencies:
-      commander: 2.20.3
+      commander: 2.20.0
     dev: true
 
   /sax@1.3.0:
@@ -14661,14 +14651,14 @@ packages:
       spdx-license-ids: 3.0.16
     dev: true
 
-  /spdx-exceptions@2.4.0:
-    resolution: {integrity: sha512-hcjppoJ68fhxA/cjbN4T8N6uCUejN8yFw69ttpqtBeCbF3u13n7mb31NB9jKwGTTWWnt9IbRA/mf1FprYS8wfw==}
+  /spdx-exceptions@2.3.0:
+    resolution: {integrity: sha512-/tTrYOC7PPI1nUAgx34hUpqXuyJG+DTHJTnIULG4rDygi4xu/tfgmq1e1cIRwRzwZgo4NLySi+ricLkZkw4i5A==}
     dev: true
 
   /spdx-expression-parse@3.0.1:
     resolution: {integrity: sha512-cbqHunsQWnJNE6KhVSMsMeH5H/L9EpymbzqTQ3uLwNCLZ1Q481oWaofqH7nO6V07xlXwY6PhQdQ2IedWx/ZK4Q==}
     dependencies:
-      spdx-exceptions: 2.4.0
+      spdx-exceptions: 2.3.0
       spdx-license-ids: 3.0.16
     dev: true
 
@@ -14756,8 +14746,8 @@ packages:
     engines: {node: '>=10.0.0'}
     dev: true
 
-  /streamx@2.15.6:
-    resolution: {integrity: sha512-q+vQL4AAz+FdfT137VF69Cc/APqUbxy+MDOImRrMvchJpigHj9GksgDU2LYbO9rx7RX6osWgxJB2WxhYv4SZAw==}
+  /streamx@2.15.5:
+    resolution: {integrity: sha512-9thPGMkKC2GctCzyCUjME3yR03x2xNo0GPKGkRw2UMYN+gqWa9uqpyNWhmsNCutU5zHmkUum0LsCRQTXUgUCAg==}
     dependencies:
       fast-fifo: 1.3.2
       queue-tick: 1.0.1
@@ -14778,7 +14768,7 @@ packages:
     engines: {node: '>=14.18.0'}
     dependencies:
       codsen-utils: 1.6.3
-      rfdc: 1.3.1
+      rfdc: 1.3.0
     dev: false
 
   /string-length@4.0.2:
@@ -14823,8 +14813,8 @@ packages:
       emoji-regex: 9.2.2
       strip-ansi: 7.1.0
 
-  /string-width@7.1.0:
-    resolution: {integrity: sha512-SEIJCWiX7Kg4c129n48aDRwLbFb2LJmXXFrWBG4NGaRtMQ3myKPKbwrD1BKqQn74oCoNMBVrfDEr5M9YxCsrkw==}
+  /string-width@7.0.0:
+    resolution: {integrity: sha512-GPQHj7row82Hjo9hKZieKcHIhaAIKOJvFSIZXuCU9OASVZrMNUaZuz++SPVrBjnLsnk4k+z9f2EIypgxf2vNFw==}
     engines: {node: '>=18'}
     dependencies:
       emoji-regex: 10.3.0
@@ -15030,7 +15020,7 @@ packages:
     engines: {node: '>=6.0.0'}
     hasBin: true
     dependencies:
-      commander: 2.20.3
+      commander: 2.20.0
       debug: 4.3.4
     transitivePeerDependencies:
       - supports-color
@@ -15130,7 +15120,7 @@ packages:
     dependencies:
       b4a: 1.6.4
       fast-fifo: 1.3.2
-      streamx: 2.15.6
+      streamx: 2.15.5
     dev: false
 
   /tar@6.1.15:
@@ -15212,7 +15202,7 @@ packages:
     dependencies:
       '@jridgewell/source-map': 0.3.5
       acorn: 8.11.3
-      commander: 2.20.3
+      commander: 2.20.0
       source-map-support: 0.5.21
 
   /test-exclude@6.0.0:
@@ -15729,7 +15719,7 @@ packages:
   /unbox-primitive@1.0.2:
     resolution: {integrity: sha512-61pPlCD9h51VoreyJ0BReideM3MDKMKnh6+V9L08331ipq6Q8OFXZYiqP6n/tbHx4s5I9uRhcye6BrbkizkBDw==}
     dependencies:
-      call-bind: 1.0.5
+      call-bind: 1.0.2
       has-bigints: 1.0.2
       has-symbols: 1.0.3
       which-boxed-primitive: 1.0.2
@@ -16437,7 +16427,7 @@ packages:
     engines: {node: '>=18'}
     dependencies:
       ansi-styles: 6.2.1
-      string-width: 7.1.0
+      string-width: 7.0.0
       strip-ansi: 7.1.0
     dev: true