diff --git a/.changeset/perfect-sloths-vanish.md b/.changeset/perfect-sloths-vanish.md
new file mode 100644
index 0000000000000000000000000000000000000000..77b6d3510b44c3153767968a0fd770290f8297a5
--- /dev/null
+++ b/.changeset/perfect-sloths-vanish.md
@@ -0,0 +1,10 @@
+---
+"llamaindex": patch
+"@llamaindex/edge": patch
+---
+
+feat: init anthropic agent
+
+remove the `tool` | `function` type in `MessageType`. Replace with `assistant` instead.
+This is because these two types are only available for `OpenAI`.
+Since `OpenAI` deprecates the function type, we support the Claude 3 tool call.
diff --git a/examples/anthropic/agent.ts b/examples/anthropic/agent.ts
new file mode 100644
index 0000000000000000000000000000000000000000..94c2d23f87e123a24e43357d4caa08d13a479318
--- /dev/null
+++ b/examples/anthropic/agent.ts
@@ -0,0 +1,43 @@
+import { FunctionTool, Settings, WikipediaTool } from "llamaindex";
+import { AnthropicAgent } from "llamaindex/agent/anthropic";
+
+Settings.callbackManager.on("llm-tool-call", (event) => {
+  console.log("llm-tool-call", event.detail.payload.toolCall);
+});
+
+const agent = new AnthropicAgent({
+  tools: [
+    FunctionTool.from<{ location: string }>(
+      (query) => {
+        return `The weather in ${query.location} is sunny`;
+      },
+      {
+        name: "weather",
+        description: "Get the weather",
+        parameters: {
+          type: "object",
+          properties: {
+            location: {
+              type: "string",
+              description: "The location to get the weather for",
+            },
+          },
+          required: ["location"],
+        },
+      },
+    ),
+    new WikipediaTool(),
+  ],
+});
+
+async function main() {
+  // https://docs.anthropic.com/claude/docs/tool-use#tool-use-best-practices-and-limitations
+  const { response } = await agent.chat({
+    message:
+      "What is the weather in New York? What's the history of New York from Wikipedia in 3 sentences?",
+  });
+
+  console.log(response);
+}
+
+void main();
diff --git a/examples/toolsStream.ts b/examples/toolsStream.ts
index 3b1e86619d2525152c9f5de09de922cd3ee250cf..7108ddd06802a358319c2439bdc09452b482d0d2 100644
--- a/examples/toolsStream.ts
+++ b/examples/toolsStream.ts
@@ -35,7 +35,10 @@ async function main() {
   const stream = await llm.chat({ ...args, stream: true });
   for await (const chunk of stream) {
     process.stdout.write(chunk.delta);
-    console.log(chunk.options?.toolCalls?.[0]);
+    if (chunk.options && "toolCall" in chunk.options) {
+      console.log("Tool call:");
+      console.log(chunk.options.toolCall);
+    }
   }
 }
 
diff --git a/packages/core/e2e/fixtures/llm/open_ai.ts b/packages/core/e2e/fixtures/llm/open_ai.ts
index bef6a4b40528f545b72376404aa90a3d30ea4745..2c1741b8ca0c3b32613a5f3d677210179972eda0 100644
--- a/packages/core/e2e/fixtures/llm/open_ai.ts
+++ b/packages/core/e2e/fixtures/llm/open_ai.ts
@@ -9,7 +9,7 @@ import type {
   LLMCompletionParamsStreaming,
 } from "llamaindex/llm/types";
 import { extractText } from "llamaindex/llm/utils";
-import { strictEqual } from "node:assert";
+import { deepStrictEqual, strictEqual } from "node:assert";
 import { llmCompleteMockStorage } from "../../node/utils.js";
 
 export function getOpenAISession() {
@@ -21,6 +21,7 @@ export function isFunctionCallingModel() {
 }
 
 export class OpenAI implements LLM {
+  supportToolCall = true;
   get metadata() {
     return {
       model: "mock-model",
@@ -48,7 +49,7 @@ export class OpenAI implements LLM {
       strictEqual(chatMessage.length, params.messages.length);
       for (let i = 0; i < chatMessage.length; i++) {
         strictEqual(chatMessage[i].role, params.messages[i].role);
-        strictEqual(chatMessage[i].content, params.messages[i].content);
+        deepStrictEqual(chatMessage[i].content, params.messages[i].content);
       }
 
       if (llmCompleteMockStorage.llmEventEnd.length > 0) {
diff --git a/packages/core/e2e/node/claude.e2e.ts b/packages/core/e2e/node/claude.e2e.ts
index 23325866b83cc36f946f837c8756b8ceed36bd94..c4e82a954a5c9399a539b744dabdace29eee7631 100644
--- a/packages/core/e2e/node/claude.e2e.ts
+++ b/packages/core/e2e/node/claude.e2e.ts
@@ -1,7 +1,9 @@
 import { consola } from "consola";
-import { Anthropic, Settings, type LLM } from "llamaindex";
+import { Anthropic, FunctionTool, Settings, type LLM } from "llamaindex";
+import { AnthropicAgent } from "llamaindex/agent/anthropic";
 import { ok } from "node:assert";
 import { beforeEach, test } from "node:test";
+import { sumNumbersTool } from "./fixtures/tools.js";
 import { mockLLMEvent } from "./utils.js";
 
 let llm: LLM;
@@ -20,6 +22,7 @@ await test("anthropic llm", async (t) => {
         {
           content: "Hello",
           role: "user",
+          options: {},
         },
       ],
     });
@@ -43,3 +46,84 @@ await test("anthropic llm", async (t) => {
     }
   });
 });
+
+await test("anthropic agent", async (t) => {
+  await mockLLMEvent(t, "anthropic-agent");
+  await t.test("chat", async () => {
+    const agent = new AnthropicAgent({
+      tools: [
+        {
+          call: async () => {
+            return "35 degrees and sunny in San Francisco";
+          },
+          metadata: {
+            name: "Weather",
+            description: "Get the weather",
+            parameters: {
+              type: "object",
+              properties: {
+                location: { type: "string" },
+              },
+              required: ["location"],
+            },
+          },
+        },
+      ],
+    });
+    const result = await agent.chat({
+      message: "What is the weather in San Francisco?",
+    });
+    consola.debug("response:", result.response);
+    ok(typeof result.response === "string");
+    ok(result.response.includes("35"));
+  });
+
+  await t.test("async function", async () => {
+    const uniqueId = "123456789";
+    const showUniqueId = FunctionTool.from<{
+      firstName: string;
+      lastName: string;
+    }>(
+      async ({ firstName, lastName }) => {
+        ok(typeof firstName === "string");
+        ok(typeof lastName === "string");
+        const fullName = firstName + lastName;
+        ok(fullName.toLowerCase().includes("alex"));
+        ok(fullName.toLowerCase().includes("yang"));
+        return uniqueId;
+      },
+      {
+        name: "unique_id",
+        description: "show user unique id",
+        parameters: {
+          type: "object",
+          properties: {
+            firstName: { type: "string" },
+            lastName: { type: "string" },
+          },
+          required: ["firstName", "lastName"],
+        },
+      },
+    );
+    const agent = new AnthropicAgent({
+      tools: [showUniqueId],
+    });
+    const { response } = await agent.chat({
+      message: "My name is Alex Yang. What is my unique id?",
+    });
+    consola.debug("response:", response);
+    ok(response.includes(uniqueId));
+  });
+
+  await t.test("sum numbers", async () => {
+    const openaiAgent = new AnthropicAgent({
+      tools: [sumNumbersTool],
+    });
+
+    const response = await openaiAgent.chat({
+      message: "how much is 1 + 1?",
+    });
+
+    ok(response.response.includes("2"));
+  });
+});
diff --git a/packages/core/e2e/node/fixtures/tools.ts b/packages/core/e2e/node/fixtures/tools.ts
new file mode 100644
index 0000000000000000000000000000000000000000..6d523991a9354e7884aabef2de97062fbf957b6a
--- /dev/null
+++ b/packages/core/e2e/node/fixtures/tools.ts
@@ -0,0 +1,47 @@
+import { FunctionTool } from "llamaindex";
+
+function sumNumbers({ a, b }: { a: number; b: number }) {
+  return `${a + b}`;
+}
+
+function divideNumbers({ a, b }: { a: number; b: number }) {
+  return `${a / b}`;
+}
+
+export const sumNumbersTool = FunctionTool.from(sumNumbers, {
+  name: "sumNumbers",
+  description: "Use this function to sum two numbers",
+  parameters: {
+    type: "object",
+    properties: {
+      a: {
+        type: "number",
+        description: "The first number",
+      },
+      b: {
+        type: "number",
+        description: "The second number",
+      },
+    },
+    required: ["a", "b"],
+  },
+});
+
+export const divideNumbersTool = FunctionTool.from(divideNumbers, {
+  name: "divideNumbers",
+  description: "Use this function to divide two numbers",
+  parameters: {
+    type: "object",
+    properties: {
+      a: {
+        type: "number",
+        description: "The first number",
+      },
+      b: {
+        type: "number",
+        description: "The second number",
+      },
+    },
+    required: ["a", "b"],
+  },
+});
diff --git a/packages/core/e2e/node/openai.e2e.ts b/packages/core/e2e/node/openai.e2e.ts
index 710ea5bc948ab78afa775399d0e705020bde503a..06c78a3093d54ec2b3ca61fb6e03f7140a763d22 100644
--- a/packages/core/e2e/node/openai.e2e.ts
+++ b/packages/core/e2e/node/openai.e2e.ts
@@ -10,8 +10,9 @@ import {
   VectorStoreIndex,
   type LLM,
 } from "llamaindex";
-import { ok } from "node:assert";
+import { ok, strictEqual } from "node:assert";
 import { beforeEach, test } from "node:test";
+import { divideNumbersTool, sumNumbersTool } from "./fixtures/tools.js";
 import { mockLLMEvent } from "./utils.js";
 
 let llm: LLM;
@@ -22,14 +23,6 @@ beforeEach(async () => {
   llm = Settings.llm;
 });
 
-function sumNumbers({ a, b }: { a: number; b: number }) {
-  return `${a + b}`;
-}
-
-function divideNumbers({ a, b }: { a: number; b: number }) {
-  return `${a / b}`;
-}
-
 await test("openai llm", async (t) => {
   await mockLLMEvent(t, "llm");
   await t.test("llm.chat", async () => {
@@ -166,27 +159,8 @@ await test("agent", async (t) => {
   });
 
   await t.test("sum numbers", async () => {
-    const sumFunctionTool = new FunctionTool(sumNumbers, {
-      name: "sumNumbers",
-      description: "Use this function to sum two numbers",
-      parameters: {
-        type: "object",
-        properties: {
-          a: {
-            type: "number",
-            description: "The first number",
-          },
-          b: {
-            type: "number",
-            description: "The second number",
-          },
-        },
-        required: ["a", "b"],
-      },
-    });
-
     const openaiAgent = new OpenAIAgent({
-      tools: [sumFunctionTool],
+      tools: [sumNumbersTool],
     });
 
     const response = await openaiAgent.chat({
@@ -199,51 +173,12 @@ await test("agent", async (t) => {
 
 await test("agent stream", async (t) => {
   await mockLLMEvent(t, "agent_stream");
-  await t.test("sum numbers stream", async () => {
-    const sumJSON = {
-      type: "object",
-      properties: {
-        a: {
-          type: "number",
-          description: "The first number",
-        },
-        b: {
-          type: "number",
-          description: "The second number",
-        },
-      },
-      required: ["a", "b"],
-    } as const;
-
-    const divideJSON = {
-      type: "object",
-      properties: {
-        a: {
-          type: "number",
-          description: "The dividend",
-        },
-        b: {
-          type: "number",
-          description: "The divisor",
-        },
-      },
-      required: ["a", "b"],
-    } as const;
-
-    const functionTool = FunctionTool.from(sumNumbers, {
-      name: "sumNumbers",
-      description: "Use this function to sum two numbers",
-      parameters: sumJSON,
-    });
-
-    const functionTool2 = FunctionTool.from(divideNumbers, {
-      name: "divideNumbers",
-      description: "Use this function to divide two numbers",
-      parameters: divideJSON,
-    });
+  await t.test("sum numbers stream", async (t) => {
+    const fn = t.mock.fn(() => {});
+    Settings.callbackManager.on("llm-tool-call", fn);
 
     const agent = new OpenAIAgent({
-      tools: [functionTool, functionTool2],
+      tools: [sumNumbersTool, divideNumbersTool],
     });
 
     const { response } = await agent.chat({
@@ -257,13 +192,17 @@ await test("agent stream", async (t) => {
       message += chunk.response;
     }
 
+    strictEqual(fn.mock.callCount(), 2);
     ok(message.includes("28"));
+    Settings.callbackManager.off("llm-tool-call", fn);
   });
 });
 
 await test("queryEngine", async (t) => {
   await mockLLMEvent(t, "queryEngine_subquestion");
   await t.test("subquestion", async () => {
+    const fn = t.mock.fn(() => {});
+    Settings.callbackManager.on("llm-tool-call", fn);
     const document = new Document({
       text: "Bill Gates stole from Apple.\n Steve Jobs stole from Xerox.",
     });
@@ -288,5 +227,7 @@ await test("queryEngine", async (t) => {
     });
 
     ok(response.includes("Apple"));
+    strictEqual(fn.mock.callCount(), 0);
+    Settings.callbackManager.off("llm-tool-call", fn);
   });
 });
diff --git a/packages/core/e2e/node/snapshot/agent.snap b/packages/core/e2e/node/snapshot/agent.snap
index c283affff0b45185be6d21894ce442fb712e3568..11cb01b924dfc0a8f3d740fa623c1a728debb625 100644
--- a/packages/core/e2e/node/snapshot/agent.snap
+++ b/packages/core/e2e/node/snapshot/agent.snap
@@ -20,24 +20,21 @@
           "content": "",
           "role": "assistant",
           "options": {
-            "toolCalls": [
-              {
-                "id": "HIDDEN",
-                "type": "function",
-                "function": {
-                  "name": "Weather",
-                  "arguments": "{\"location\":\"San Francisco\"}"
-                }
-              }
-            ]
+            "toolCall": {
+              "id": "HIDDEN",
+              "name": "Weather",
+              "input": "{\"location\":\"San Francisco\"}"
+            }
           }
         },
         {
           "content": "35 degrees and sunny in San Francisco",
-          "role": "tool",
+          "role": "user",
           "options": {
-            "name": "Weather",
-            "tool_call_id": "HIDDEN"
+            "toolResult": {
+              "id": "HIDDEN",
+              "isError": false
+            }
           }
         }
       ]
@@ -62,24 +59,21 @@
           "content": "",
           "role": "assistant",
           "options": {
-            "toolCalls": [
-              {
-                "id": "HIDDEN",
-                "type": "function",
-                "function": {
-                  "name": "unique_id",
-                  "arguments": "{\"firstName\":\"Alex\",\"lastName\":\"Yang\"}"
-                }
-              }
-            ]
+            "toolCall": {
+              "id": "HIDDEN",
+              "name": "unique_id",
+              "input": "{\"firstName\":\"Alex\",\"lastName\":\"Yang\"}"
+            }
           }
         },
         {
           "content": "123456789",
-          "role": "tool",
+          "role": "user",
           "options": {
-            "name": "unique_id",
-            "tool_call_id": "HIDDEN"
+            "toolResult": {
+              "id": "HIDDEN",
+              "isError": false
+            }
           }
         }
       ]
@@ -104,24 +98,21 @@
           "content": "",
           "role": "assistant",
           "options": {
-            "toolCalls": [
-              {
-                "id": "HIDDEN",
-                "type": "function",
-                "function": {
-                  "name": "sumNumbers",
-                  "arguments": "{\"a\":1,\"b\":1}"
-                }
-              }
-            ]
+            "toolCall": {
+              "id": "HIDDEN",
+              "name": "sumNumbers",
+              "input": "{\"a\":1,\"b\":1}"
+            }
           }
         },
         {
           "content": "2",
-          "role": "tool",
+          "role": "user",
           "options": {
-            "name": "sumNumbers",
-            "tool_call_id": "HIDDEN"
+            "toolResult": {
+              "id": "HIDDEN",
+              "isError": false
+            }
           }
         }
       ]
@@ -168,16 +159,11 @@
           "content": "",
           "role": "assistant",
           "options": {
-            "toolCalls": [
-              {
-                "id": "HIDDEN",
-                "type": "function",
-                "function": {
-                  "name": "Weather",
-                  "arguments": "{\"location\":\"San Francisco\"}"
-                }
-              }
-            ]
+            "toolCall": {
+              "id": "HIDDEN",
+              "name": "Weather",
+              "input": "{\"location\":\"San Francisco\"}"
+            }
           }
         }
       }
@@ -255,16 +241,11 @@
           "content": "",
           "role": "assistant",
           "options": {
-            "toolCalls": [
-              {
-                "id": "HIDDEN",
-                "type": "function",
-                "function": {
-                  "name": "unique_id",
-                  "arguments": "{\"firstName\":\"Alex\",\"lastName\":\"Yang\"}"
-                }
-              }
-            ]
+            "toolCall": {
+              "id": "HIDDEN",
+              "name": "unique_id",
+              "input": "{\"firstName\":\"Alex\",\"lastName\":\"Yang\"}"
+            }
           }
         }
       }
@@ -342,16 +323,11 @@
           "content": "",
           "role": "assistant",
           "options": {
-            "toolCalls": [
-              {
-                "id": "HIDDEN",
-                "type": "function",
-                "function": {
-                  "name": "sumNumbers",
-                  "arguments": "{\"a\":1,\"b\":1}"
-                }
-              }
-            ]
+            "toolCall": {
+              "id": "HIDDEN",
+              "name": "sumNumbers",
+              "input": "{\"a\":1,\"b\":1}"
+            }
           }
         }
       }
@@ -369,7 +345,7 @@
               "index": 0,
               "message": {
                 "role": "assistant",
-                "content": "The sum of 1 + 1 is 2."
+                "content": "1 + 1 is equal to 2."
               },
               "logprobs": null,
               "finish_reason": "stop"
@@ -377,13 +353,13 @@
           ],
           "usage": {
             "prompt_tokens": 97,
-            "completion_tokens": 13,
-            "total_tokens": 110
+            "completion_tokens": 11,
+            "total_tokens": 108
           },
           "system_fingerprint": "HIDDEN"
         },
         "message": {
-          "content": "The sum of 1 + 1 is 2.",
+          "content": "1 + 1 is equal to 2.",
           "role": "assistant",
           "options": {}
         }
diff --git a/packages/core/e2e/node/snapshot/agent_stream.snap b/packages/core/e2e/node/snapshot/agent_stream.snap
index ecd85f162825e35b498f3abdb0718f5ea27ab3c1..e9e9aa161d5834e0b097ada9ff8b86271e77f893 100644
--- a/packages/core/e2e/node/snapshot/agent_stream.snap
+++ b/packages/core/e2e/node/snapshot/agent_stream.snap
@@ -20,40 +20,72 @@
           "content": "",
           "role": "assistant",
           "options": {
-            "toolCalls": [
-              {
-                "function": {
-                  "name": "divideNumbers",
-                  "arguments": "{\"a\": 16, \"b\": 2}"
-                },
-                "id": "HIDDEN",
-                "type": "function"
-              },
-              {
-                "function": {
-                  "name": "sumNumbers",
-                  "arguments": "{\"a\": 8, \"b\": 20}"
-                },
-                "id": "HIDDEN",
-                "type": "function"
-              }
-            ]
+            "toolCall": {
+              "name": "divideNumbers",
+              "id": "HIDDEN",
+              "input": "{\"a\": 16, \"b\": 2}"
+            }
           }
         },
         {
           "content": "8",
-          "role": "tool",
+          "role": "user",
           "options": {
-            "name": "divideNumbers",
-            "tool_call_id": "HIDDEN"
+            "toolResult": {
+              "id": "HIDDEN",
+              "isError": false
+            }
+          }
+        }
+      ]
+    },
+    {
+      "id": "PRESERVE_2",
+      "messages": [
+        {
+          "content": "Divide 16 by 2 then add 20",
+          "role": "user"
+        },
+        {
+          "content": "",
+          "role": "assistant",
+          "options": {
+            "toolCall": {
+              "name": "divideNumbers",
+              "id": "HIDDEN",
+              "input": "{\"a\": 16, \"b\": 2}"
+            }
+          }
+        },
+        {
+          "content": "8",
+          "role": "user",
+          "options": {
+            "toolResult": {
+              "id": "HIDDEN",
+              "isError": false
+            }
+          }
+        },
+        {
+          "content": "",
+          "role": "assistant",
+          "options": {
+            "toolCall": {
+              "name": "sumNumbers",
+              "id": "HIDDEN",
+              "input": "{\"a\":8,\"b\":20}"
+            }
           }
         },
         {
           "content": "28",
-          "role": "tool",
+          "role": "user",
           "options": {
-            "name": "sumNumbers",
-            "tool_call_id": "HIDDEN"
+            "toolResult": {
+              "id": "HIDDEN",
+              "isError": false
+            }
           }
         }
       ]
@@ -93,24 +125,11 @@
               ]
             },
             "options": {
-              "toolCalls": [
-                {
-                  "function": {
-                    "name": "divideNumbers",
-                    "arguments": "{\"a\": 16, \"b\": 2}"
-                  },
-                  "id": "HIDDEN",
-                  "type": "function"
-                },
-                {
-                  "function": {
-                    "name": "sumNumbers",
-                    "arguments": "{\"a\": 8, \"b\": 20}"
-                  },
-                  "id": "HIDDEN",
-                  "type": "function"
-                }
-              ]
+              "toolCall": {
+                "name": "divideNumbers",
+                "id": "HIDDEN",
+                "input": "{\"a\": 16, \"b\": 2}"
+              }
             },
             "delta": ""
           },
@@ -140,24 +159,11 @@
               ]
             },
             "options": {
-              "toolCalls": [
-                {
-                  "function": {
-                    "name": "divideNumbers",
-                    "arguments": "{\"a\": 16, \"b\": 2}"
-                  },
-                  "id": "HIDDEN",
-                  "type": "function"
-                },
-                {
-                  "function": {
-                    "name": "sumNumbers",
-                    "arguments": "{\"a\": 8, \"b\": 20}"
-                  },
-                  "id": "HIDDEN",
-                  "type": "function"
-                }
-              ]
+              "toolCall": {
+                "name": "divideNumbers",
+                "id": "HIDDEN",
+                "input": "{\"a\": 16, \"b\": 2}"
+              }
             },
             "delta": ""
           },
@@ -187,24 +193,11 @@
               ]
             },
             "options": {
-              "toolCalls": [
-                {
-                  "function": {
-                    "name": "divideNumbers",
-                    "arguments": "{\"a\": 16, \"b\": 2}"
-                  },
-                  "id": "HIDDEN",
-                  "type": "function"
-                },
-                {
-                  "function": {
-                    "name": "sumNumbers",
-                    "arguments": "{\"a\": 8, \"b\": 20}"
-                  },
-                  "id": "HIDDEN",
-                  "type": "function"
-                }
-              ]
+              "toolCall": {
+                "name": "divideNumbers",
+                "id": "HIDDEN",
+                "input": "{\"a\": 16, \"b\": 2}"
+              }
             },
             "delta": ""
           },
@@ -234,24 +227,11 @@
               ]
             },
             "options": {
-              "toolCalls": [
-                {
-                  "function": {
-                    "name": "divideNumbers",
-                    "arguments": "{\"a\": 16, \"b\": 2}"
-                  },
-                  "id": "HIDDEN",
-                  "type": "function"
-                },
-                {
-                  "function": {
-                    "name": "sumNumbers",
-                    "arguments": "{\"a\": 8, \"b\": 20}"
-                  },
-                  "id": "HIDDEN",
-                  "type": "function"
-                }
-              ]
+              "toolCall": {
+                "name": "divideNumbers",
+                "id": "HIDDEN",
+                "input": "{\"a\": 16, \"b\": 2}"
+              }
             },
             "delta": ""
           },
@@ -281,24 +261,11 @@
               ]
             },
             "options": {
-              "toolCalls": [
-                {
-                  "function": {
-                    "name": "divideNumbers",
-                    "arguments": "{\"a\": 16, \"b\": 2}"
-                  },
-                  "id": "HIDDEN",
-                  "type": "function"
-                },
-                {
-                  "function": {
-                    "name": "sumNumbers",
-                    "arguments": "{\"a\": 8, \"b\": 20}"
-                  },
-                  "id": "HIDDEN",
-                  "type": "function"
-                }
-              ]
+              "toolCall": {
+                "name": "divideNumbers",
+                "id": "HIDDEN",
+                "input": "{\"a\": 16, \"b\": 2}"
+              }
             },
             "delta": ""
           },
@@ -331,24 +298,11 @@
               ]
             },
             "options": {
-              "toolCalls": [
-                {
-                  "function": {
-                    "name": "divideNumbers",
-                    "arguments": "{\"a\": 16, \"b\": 2}"
-                  },
-                  "id": "HIDDEN",
-                  "type": "function"
-                },
-                {
-                  "function": {
-                    "name": "sumNumbers",
-                    "arguments": "{\"a\": 8, \"b\": 20}"
-                  },
-                  "id": "HIDDEN",
-                  "type": "function"
-                }
-              ]
+              "toolCall": {
+                "name": "sumNumbers",
+                "id": "HIDDEN",
+                "input": "{\"a\": 8, \"b\": 20}"
+              }
             },
             "delta": ""
           },
@@ -378,24 +332,11 @@
               ]
             },
             "options": {
-              "toolCalls": [
-                {
-                  "function": {
-                    "name": "divideNumbers",
-                    "arguments": "{\"a\": 16, \"b\": 2}"
-                  },
-                  "id": "HIDDEN",
-                  "type": "function"
-                },
-                {
-                  "function": {
-                    "name": "sumNumbers",
-                    "arguments": "{\"a\": 8, \"b\": 20}"
-                  },
-                  "id": "HIDDEN",
-                  "type": "function"
-                }
-              ]
+              "toolCall": {
+                "name": "sumNumbers",
+                "id": "HIDDEN",
+                "input": "{\"a\": 8, \"b\": 20}"
+              }
             },
             "delta": ""
           },
@@ -425,24 +366,11 @@
               ]
             },
             "options": {
-              "toolCalls": [
-                {
-                  "function": {
-                    "name": "divideNumbers",
-                    "arguments": "{\"a\": 16, \"b\": 2}"
-                  },
-                  "id": "HIDDEN",
-                  "type": "function"
-                },
-                {
-                  "function": {
-                    "name": "sumNumbers",
-                    "arguments": "{\"a\": 8, \"b\": 20}"
-                  },
-                  "id": "HIDDEN",
-                  "type": "function"
-                }
-              ]
+              "toolCall": {
+                "name": "sumNumbers",
+                "id": "HIDDEN",
+                "input": "{\"a\": 8, \"b\": 20}"
+              }
             },
             "delta": ""
           },
@@ -472,24 +400,11 @@
               ]
             },
             "options": {
-              "toolCalls": [
-                {
-                  "function": {
-                    "name": "divideNumbers",
-                    "arguments": "{\"a\": 16, \"b\": 2}"
-                  },
-                  "id": "HIDDEN",
-                  "type": "function"
-                },
-                {
-                  "function": {
-                    "name": "sumNumbers",
-                    "arguments": "{\"a\": 8, \"b\": 20}"
-                  },
-                  "id": "HIDDEN",
-                  "type": "function"
-                }
-              ]
+              "toolCall": {
+                "name": "sumNumbers",
+                "id": "HIDDEN",
+                "input": "{\"a\": 8, \"b\": 20}"
+              }
             },
             "delta": ""
           },
@@ -519,24 +434,11 @@
               ]
             },
             "options": {
-              "toolCalls": [
-                {
-                  "function": {
-                    "name": "divideNumbers",
-                    "arguments": "{\"a\": 16, \"b\": 2}"
-                  },
-                  "id": "HIDDEN",
-                  "type": "function"
-                },
-                {
-                  "function": {
-                    "name": "sumNumbers",
-                    "arguments": "{\"a\": 8, \"b\": 20}"
-                  },
-                  "id": "HIDDEN",
-                  "type": "function"
-                }
-              ]
+              "toolCall": {
+                "name": "sumNumbers",
+                "id": "HIDDEN",
+                "input": "{\"a\": 8, \"b\": 20}"
+              }
             },
             "delta": ""
           }
@@ -545,24 +447,11 @@
           "content": "",
           "role": "assistant",
           "options": {
-            "toolCalls": [
-              {
-                "function": {
-                  "name": "divideNumbers",
-                  "arguments": "{\"a\": 16, \"b\": 2}"
-                },
-                "id": "HIDDEN",
-                "type": "function"
-              },
-              {
-                "function": {
-                  "name": "sumNumbers",
-                  "arguments": "{\"a\": 8, \"b\": 20}"
-                },
-                "id": "HIDDEN",
-                "type": "function"
-              }
-            ]
+            "toolCall": {
+              "name": "sumNumbers",
+              "id": "HIDDEN",
+              "input": "{\"a\": 8, \"b\": 20}"
+            }
           }
         }
       }
@@ -582,15 +471,33 @@
                 {
                   "index": 0,
                   "delta": {
-                    "content": "The"
+                    "role": "assistant",
+                    "content": null,
+                    "tool_calls": [
+                      {
+                        "index": 0,
+                        "id": "HIDDEN",
+                        "type": "function",
+                        "function": {
+                          "name": "sumNumbers",
+                          "arguments": ""
+                        }
+                      }
+                    ]
                   },
                   "logprobs": null,
                   "finish_reason": null
                 }
               ]
             },
-            "options": {},
-            "delta": "The"
+            "options": {
+              "toolCall": {
+                "name": "sumNumbers",
+                "id": "HIDDEN",
+                "input": "{\"a\":8,\"b\":20}"
+              }
+            },
+            "delta": ""
           },
           {
             "raw": {
@@ -603,15 +510,28 @@
                 {
                   "index": 0,
                   "delta": {
-                    "content": " result"
+                    "tool_calls": [
+                      {
+                        "index": 0,
+                        "function": {
+                          "arguments": "{\""
+                        }
+                      }
+                    ]
                   },
                   "logprobs": null,
                   "finish_reason": null
                 }
               ]
             },
-            "options": {},
-            "delta": " result"
+            "options": {
+              "toolCall": {
+                "name": "sumNumbers",
+                "id": "HIDDEN",
+                "input": "{\"a\":8,\"b\":20}"
+              }
+            },
+            "delta": ""
           },
           {
             "raw": {
@@ -624,15 +544,28 @@
                 {
                   "index": 0,
                   "delta": {
-                    "content": " of"
+                    "tool_calls": [
+                      {
+                        "index": 0,
+                        "function": {
+                          "arguments": "a"
+                        }
+                      }
+                    ]
                   },
                   "logprobs": null,
                   "finish_reason": null
                 }
               ]
             },
-            "options": {},
-            "delta": " of"
+            "options": {
+              "toolCall": {
+                "name": "sumNumbers",
+                "id": "HIDDEN",
+                "input": "{\"a\":8,\"b\":20}"
+              }
+            },
+            "delta": ""
           },
           {
             "raw": {
@@ -645,15 +578,28 @@
                 {
                   "index": 0,
                   "delta": {
-                    "content": " dividing"
+                    "tool_calls": [
+                      {
+                        "index": 0,
+                        "function": {
+                          "arguments": "\":"
+                        }
+                      }
+                    ]
                   },
                   "logprobs": null,
                   "finish_reason": null
                 }
               ]
             },
-            "options": {},
-            "delta": " dividing"
+            "options": {
+              "toolCall": {
+                "name": "sumNumbers",
+                "id": "HIDDEN",
+                "input": "{\"a\":8,\"b\":20}"
+              }
+            },
+            "delta": ""
           },
           {
             "raw": {
@@ -666,15 +612,28 @@
                 {
                   "index": 0,
                   "delta": {
-                    "content": " "
+                    "tool_calls": [
+                      {
+                        "index": 0,
+                        "function": {
+                          "arguments": "8"
+                        }
+                      }
+                    ]
                   },
                   "logprobs": null,
                   "finish_reason": null
                 }
               ]
             },
-            "options": {},
-            "delta": " "
+            "options": {
+              "toolCall": {
+                "name": "sumNumbers",
+                "id": "HIDDEN",
+                "input": "{\"a\":8,\"b\":20}"
+              }
+            },
+            "delta": ""
           },
           {
             "raw": {
@@ -687,15 +646,28 @@
                 {
                   "index": 0,
                   "delta": {
-                    "content": "16"
+                    "tool_calls": [
+                      {
+                        "index": 0,
+                        "function": {
+                          "arguments": ",\""
+                        }
+                      }
+                    ]
                   },
                   "logprobs": null,
                   "finish_reason": null
                 }
               ]
             },
-            "options": {},
-            "delta": "16"
+            "options": {
+              "toolCall": {
+                "name": "sumNumbers",
+                "id": "HIDDEN",
+                "input": "{\"a\":8,\"b\":20}"
+              }
+            },
+            "delta": ""
           },
           {
             "raw": {
@@ -708,15 +680,28 @@
                 {
                   "index": 0,
                   "delta": {
-                    "content": " by"
+                    "tool_calls": [
+                      {
+                        "index": 0,
+                        "function": {
+                          "arguments": "b"
+                        }
+                      }
+                    ]
                   },
                   "logprobs": null,
                   "finish_reason": null
                 }
               ]
             },
-            "options": {},
-            "delta": " by"
+            "options": {
+              "toolCall": {
+                "name": "sumNumbers",
+                "id": "HIDDEN",
+                "input": "{\"a\":8,\"b\":20}"
+              }
+            },
+            "delta": ""
           },
           {
             "raw": {
@@ -729,15 +714,28 @@
                 {
                   "index": 0,
                   "delta": {
-                    "content": " "
+                    "tool_calls": [
+                      {
+                        "index": 0,
+                        "function": {
+                          "arguments": "\":"
+                        }
+                      }
+                    ]
                   },
                   "logprobs": null,
                   "finish_reason": null
                 }
               ]
             },
-            "options": {},
-            "delta": " "
+            "options": {
+              "toolCall": {
+                "name": "sumNumbers",
+                "id": "HIDDEN",
+                "input": "{\"a\":8,\"b\":20}"
+              }
+            },
+            "delta": ""
           },
           {
             "raw": {
@@ -750,15 +748,28 @@
                 {
                   "index": 0,
                   "delta": {
-                    "content": "2"
+                    "tool_calls": [
+                      {
+                        "index": 0,
+                        "function": {
+                          "arguments": "20"
+                        }
+                      }
+                    ]
                   },
                   "logprobs": null,
                   "finish_reason": null
                 }
               ]
             },
-            "options": {},
-            "delta": "2"
+            "options": {
+              "toolCall": {
+                "name": "sumNumbers",
+                "id": "HIDDEN",
+                "input": "{\"a\":8,\"b\":20}"
+              }
+            },
+            "delta": ""
           },
           {
             "raw": {
@@ -771,16 +782,47 @@
                 {
                   "index": 0,
                   "delta": {
-                    "content": " is"
+                    "tool_calls": [
+                      {
+                        "index": 0,
+                        "function": {
+                          "arguments": "}"
+                        }
+                      }
+                    ]
                   },
                   "logprobs": null,
                   "finish_reason": null
                 }
               ]
             },
-            "options": {},
-            "delta": " is"
-          },
+            "options": {
+              "toolCall": {
+                "name": "sumNumbers",
+                "id": "HIDDEN",
+                "input": "{\"a\":8,\"b\":20}"
+              }
+            },
+            "delta": ""
+          }
+        ],
+        "message": {
+          "content": "",
+          "role": "assistant",
+          "options": {
+            "toolCall": {
+              "name": "sumNumbers",
+              "id": "HIDDEN",
+              "input": "{\"a\":8,\"b\":20}"
+            }
+          }
+        }
+      }
+    },
+    {
+      "id": "PRESERVE_2",
+      "response": {
+        "raw": [
           {
             "raw": {
               "id": "HIDDEN",
@@ -792,7 +834,7 @@
                 {
                   "index": 0,
                   "delta": {
-                    "content": " "
+                    "content": "The"
                   },
                   "logprobs": null,
                   "finish_reason": null
@@ -800,7 +842,7 @@
               ]
             },
             "options": {},
-            "delta": " "
+            "delta": "The"
           },
           {
             "raw": {
@@ -813,7 +855,7 @@
                 {
                   "index": 0,
                   "delta": {
-                    "content": "8"
+                    "content": " result"
                   },
                   "logprobs": null,
                   "finish_reason": null
@@ -821,7 +863,7 @@
               ]
             },
             "options": {},
-            "delta": "8"
+            "delta": " result"
           },
           {
             "raw": {
@@ -834,7 +876,7 @@
                 {
                   "index": 0,
                   "delta": {
-                    "content": ","
+                    "content": " of"
                   },
                   "logprobs": null,
                   "finish_reason": null
@@ -842,7 +884,7 @@
               ]
             },
             "options": {},
-            "delta": ","
+            "delta": " of"
           },
           {
             "raw": {
@@ -855,7 +897,7 @@
                 {
                   "index": 0,
                   "delta": {
-                    "content": " and"
+                    "content": " dividing"
                   },
                   "logprobs": null,
                   "finish_reason": null
@@ -863,7 +905,7 @@
               ]
             },
             "options": {},
-            "delta": " and"
+            "delta": " dividing"
           },
           {
             "raw": {
@@ -876,7 +918,7 @@
                 {
                   "index": 0,
                   "delta": {
-                    "content": " when"
+                    "content": " "
                   },
                   "logprobs": null,
                   "finish_reason": null
@@ -884,7 +926,7 @@
               ]
             },
             "options": {},
-            "delta": " when"
+            "delta": " "
           },
           {
             "raw": {
@@ -897,7 +939,7 @@
                 {
                   "index": 0,
                   "delta": {
-                    "content": " you"
+                    "content": "16"
                   },
                   "logprobs": null,
                   "finish_reason": null
@@ -905,7 +947,7 @@
               ]
             },
             "options": {},
-            "delta": " you"
+            "delta": "16"
           },
           {
             "raw": {
@@ -918,7 +960,7 @@
                 {
                   "index": 0,
                   "delta": {
-                    "content": " add"
+                    "content": " by"
                   },
                   "logprobs": null,
                   "finish_reason": null
@@ -926,7 +968,7 @@
               ]
             },
             "options": {},
-            "delta": " add"
+            "delta": " by"
           },
           {
             "raw": {
@@ -960,7 +1002,7 @@
                 {
                   "index": 0,
                   "delta": {
-                    "content": "20"
+                    "content": "2"
                   },
                   "logprobs": null,
                   "finish_reason": null
@@ -968,7 +1010,7 @@
               ]
             },
             "options": {},
-            "delta": "20"
+            "delta": "2"
           },
           {
             "raw": {
@@ -981,7 +1023,7 @@
                 {
                   "index": 0,
                   "delta": {
-                    "content": " to"
+                    "content": " and"
                   },
                   "logprobs": null,
                   "finish_reason": null
@@ -989,7 +1031,7 @@
               ]
             },
             "options": {},
-            "delta": " to"
+            "delta": " and"
           },
           {
             "raw": {
@@ -1002,7 +1044,7 @@
                 {
                   "index": 0,
                   "delta": {
-                    "content": " "
+                    "content": " then"
                   },
                   "logprobs": null,
                   "finish_reason": null
@@ -1010,7 +1052,7 @@
               ]
             },
             "options": {},
-            "delta": " "
+            "delta": " then"
           },
           {
             "raw": {
@@ -1023,7 +1065,7 @@
                 {
                   "index": 0,
                   "delta": {
-                    "content": "8"
+                    "content": " adding"
                   },
                   "logprobs": null,
                   "finish_reason": null
@@ -1031,7 +1073,7 @@
               ]
             },
             "options": {},
-            "delta": "8"
+            "delta": " adding"
           },
           {
             "raw": {
@@ -1044,7 +1086,7 @@
                 {
                   "index": 0,
                   "delta": {
-                    "content": ","
+                    "content": " "
                   },
                   "logprobs": null,
                   "finish_reason": null
@@ -1052,7 +1094,7 @@
               ]
             },
             "options": {},
-            "delta": ","
+            "delta": " "
           },
           {
             "raw": {
@@ -1065,7 +1107,7 @@
                 {
                   "index": 0,
                   "delta": {
-                    "content": " you"
+                    "content": "20"
                   },
                   "logprobs": null,
                   "finish_reason": null
@@ -1073,7 +1115,7 @@
               ]
             },
             "options": {},
-            "delta": " you"
+            "delta": "20"
           },
           {
             "raw": {
@@ -1086,7 +1128,7 @@
                 {
                   "index": 0,
                   "delta": {
-                    "content": " get"
+                    "content": " is"
                   },
                   "logprobs": null,
                   "finish_reason": null
@@ -1094,7 +1136,7 @@
               ]
             },
             "options": {},
-            "delta": " get"
+            "delta": " is"
           },
           {
             "raw": {
@@ -1161,7 +1203,7 @@
           }
         ],
         "message": {
-          "content": "The result of dividing 16 by 2 is 8, and when you add 20 to 8, you get 28.",
+          "content": "The result of dividing 16 by 2 and then adding 20 is 28.",
           "role": "assistant",
           "options": {}
         }
@@ -1200,24 +1242,11 @@
           ]
         },
         "options": {
-          "toolCalls": [
-            {
-              "function": {
-                "name": "divideNumbers",
-                "arguments": "{\"a\": 16, \"b\": 2}"
-              },
-              "id": "HIDDEN",
-              "type": "function"
-            },
-            {
-              "function": {
-                "name": "sumNumbers",
-                "arguments": "{\"a\": 8, \"b\": 20}"
-              },
-              "id": "HIDDEN",
-              "type": "function"
-            }
-          ]
+          "toolCall": {
+            "name": "divideNumbers",
+            "id": "HIDDEN",
+            "input": "{\"a\": 16, \"b\": 2}"
+          }
         },
         "delta": ""
       }
@@ -1250,24 +1279,11 @@
           ]
         },
         "options": {
-          "toolCalls": [
-            {
-              "function": {
-                "name": "divideNumbers",
-                "arguments": "{\"a\": 16, \"b\": 2}"
-              },
-              "id": "HIDDEN",
-              "type": "function"
-            },
-            {
-              "function": {
-                "name": "sumNumbers",
-                "arguments": "{\"a\": 8, \"b\": 20}"
-              },
-              "id": "HIDDEN",
-              "type": "function"
-            }
-          ]
+          "toolCall": {
+            "name": "divideNumbers",
+            "id": "HIDDEN",
+            "input": "{\"a\": 16, \"b\": 2}"
+          }
         },
         "delta": ""
       }
@@ -1300,24 +1316,11 @@
           ]
         },
         "options": {
-          "toolCalls": [
-            {
-              "function": {
-                "name": "divideNumbers",
-                "arguments": "{\"a\": 16, \"b\": 2}"
-              },
-              "id": "HIDDEN",
-              "type": "function"
-            },
-            {
-              "function": {
-                "name": "sumNumbers",
-                "arguments": "{\"a\": 8, \"b\": 20}"
-              },
-              "id": "HIDDEN",
-              "type": "function"
-            }
-          ]
+          "toolCall": {
+            "name": "divideNumbers",
+            "id": "HIDDEN",
+            "input": "{\"a\": 16, \"b\": 2}"
+          }
         },
         "delta": ""
       }
@@ -1350,24 +1353,11 @@
           ]
         },
         "options": {
-          "toolCalls": [
-            {
-              "function": {
-                "name": "divideNumbers",
-                "arguments": "{\"a\": 16, \"b\": 2}"
-              },
-              "id": "HIDDEN",
-              "type": "function"
-            },
-            {
-              "function": {
-                "name": "sumNumbers",
-                "arguments": "{\"a\": 8, \"b\": 20}"
-              },
-              "id": "HIDDEN",
-              "type": "function"
-            }
-          ]
+          "toolCall": {
+            "name": "divideNumbers",
+            "id": "HIDDEN",
+            "input": "{\"a\": 16, \"b\": 2}"
+          }
         },
         "delta": ""
       }
@@ -1400,24 +1390,11 @@
           ]
         },
         "options": {
-          "toolCalls": [
-            {
-              "function": {
-                "name": "divideNumbers",
-                "arguments": "{\"a\": 16, \"b\": 2}"
-              },
-              "id": "HIDDEN",
-              "type": "function"
-            },
-            {
-              "function": {
-                "name": "sumNumbers",
-                "arguments": "{\"a\": 8, \"b\": 20}"
-              },
-              "id": "HIDDEN",
-              "type": "function"
-            }
-          ]
+          "toolCall": {
+            "name": "divideNumbers",
+            "id": "HIDDEN",
+            "input": "{\"a\": 16, \"b\": 2}"
+          }
         },
         "delta": ""
       }
@@ -1453,24 +1430,11 @@
           ]
         },
         "options": {
-          "toolCalls": [
-            {
-              "function": {
-                "name": "divideNumbers",
-                "arguments": "{\"a\": 16, \"b\": 2}"
-              },
-              "id": "HIDDEN",
-              "type": "function"
-            },
-            {
-              "function": {
-                "name": "sumNumbers",
-                "arguments": "{\"a\": 8, \"b\": 20}"
-              },
-              "id": "HIDDEN",
-              "type": "function"
-            }
-          ]
+          "toolCall": {
+            "name": "sumNumbers",
+            "id": "HIDDEN",
+            "input": "{\"a\": 8, \"b\": 20}"
+          }
         },
         "delta": ""
       }
@@ -1503,24 +1467,11 @@
           ]
         },
         "options": {
-          "toolCalls": [
-            {
-              "function": {
-                "name": "divideNumbers",
-                "arguments": "{\"a\": 16, \"b\": 2}"
-              },
-              "id": "HIDDEN",
-              "type": "function"
-            },
-            {
-              "function": {
-                "name": "sumNumbers",
-                "arguments": "{\"a\": 8, \"b\": 20}"
-              },
-              "id": "HIDDEN",
-              "type": "function"
-            }
-          ]
+          "toolCall": {
+            "name": "sumNumbers",
+            "id": "HIDDEN",
+            "input": "{\"a\": 8, \"b\": 20}"
+          }
         },
         "delta": ""
       }
@@ -1553,24 +1504,11 @@
           ]
         },
         "options": {
-          "toolCalls": [
-            {
-              "function": {
-                "name": "divideNumbers",
-                "arguments": "{\"a\": 16, \"b\": 2}"
-              },
-              "id": "HIDDEN",
-              "type": "function"
-            },
-            {
-              "function": {
-                "name": "sumNumbers",
-                "arguments": "{\"a\": 8, \"b\": 20}"
-              },
-              "id": "HIDDEN",
-              "type": "function"
-            }
-          ]
+          "toolCall": {
+            "name": "sumNumbers",
+            "id": "HIDDEN",
+            "input": "{\"a\": 8, \"b\": 20}"
+          }
         },
         "delta": ""
       }
@@ -1603,24 +1541,11 @@
           ]
         },
         "options": {
-          "toolCalls": [
-            {
-              "function": {
-                "name": "divideNumbers",
-                "arguments": "{\"a\": 16, \"b\": 2}"
-              },
-              "id": "HIDDEN",
-              "type": "function"
-            },
-            {
-              "function": {
-                "name": "sumNumbers",
-                "arguments": "{\"a\": 8, \"b\": 20}"
-              },
-              "id": "HIDDEN",
-              "type": "function"
-            }
-          ]
+          "toolCall": {
+            "name": "sumNumbers",
+            "id": "HIDDEN",
+            "input": "{\"a\": 8, \"b\": 20}"
+          }
         },
         "delta": ""
       }
@@ -1653,24 +1578,11 @@
           ]
         },
         "options": {
-          "toolCalls": [
-            {
-              "function": {
-                "name": "divideNumbers",
-                "arguments": "{\"a\": 16, \"b\": 2}"
-              },
-              "id": "HIDDEN",
-              "type": "function"
-            },
-            {
-              "function": {
-                "name": "sumNumbers",
-                "arguments": "{\"a\": 8, \"b\": 20}"
-              },
-              "id": "HIDDEN",
-              "type": "function"
-            }
-          ]
+          "toolCall": {
+            "name": "sumNumbers",
+            "id": "HIDDEN",
+            "input": "{\"a\": 8, \"b\": 20}"
+          }
         },
         "delta": ""
       }
@@ -1688,15 +1600,33 @@
             {
               "index": 0,
               "delta": {
-                "content": "The"
+                "role": "assistant",
+                "content": null,
+                "tool_calls": [
+                  {
+                    "index": 0,
+                    "id": "HIDDEN",
+                    "type": "function",
+                    "function": {
+                      "name": "sumNumbers",
+                      "arguments": ""
+                    }
+                  }
+                ]
               },
               "logprobs": null,
               "finish_reason": null
             }
           ]
         },
-        "options": {},
-        "delta": "The"
+        "options": {
+          "toolCall": {
+            "name": "sumNumbers",
+            "id": "HIDDEN",
+            "input": "{\"a\":8,\"b\":20}"
+          }
+        },
+        "delta": ""
       }
     },
     {
@@ -1712,15 +1642,28 @@
             {
               "index": 0,
               "delta": {
-                "content": " result"
+                "tool_calls": [
+                  {
+                    "index": 0,
+                    "function": {
+                      "arguments": "{\""
+                    }
+                  }
+                ]
               },
               "logprobs": null,
               "finish_reason": null
             }
           ]
         },
-        "options": {},
-        "delta": " result"
+        "options": {
+          "toolCall": {
+            "name": "sumNumbers",
+            "id": "HIDDEN",
+            "input": "{\"a\":8,\"b\":20}"
+          }
+        },
+        "delta": ""
       }
     },
     {
@@ -1736,15 +1679,28 @@
             {
               "index": 0,
               "delta": {
-                "content": " of"
+                "tool_calls": [
+                  {
+                    "index": 0,
+                    "function": {
+                      "arguments": "a"
+                    }
+                  }
+                ]
               },
               "logprobs": null,
               "finish_reason": null
             }
           ]
         },
-        "options": {},
-        "delta": " of"
+        "options": {
+          "toolCall": {
+            "name": "sumNumbers",
+            "id": "HIDDEN",
+            "input": "{\"a\":8,\"b\":20}"
+          }
+        },
+        "delta": ""
       }
     },
     {
@@ -1760,15 +1716,28 @@
             {
               "index": 0,
               "delta": {
-                "content": " dividing"
+                "tool_calls": [
+                  {
+                    "index": 0,
+                    "function": {
+                      "arguments": "\":"
+                    }
+                  }
+                ]
               },
               "logprobs": null,
               "finish_reason": null
             }
           ]
         },
-        "options": {},
-        "delta": " dividing"
+        "options": {
+          "toolCall": {
+            "name": "sumNumbers",
+            "id": "HIDDEN",
+            "input": "{\"a\":8,\"b\":20}"
+          }
+        },
+        "delta": ""
       }
     },
     {
@@ -1784,15 +1753,28 @@
             {
               "index": 0,
               "delta": {
-                "content": " "
+                "tool_calls": [
+                  {
+                    "index": 0,
+                    "function": {
+                      "arguments": "8"
+                    }
+                  }
+                ]
               },
               "logprobs": null,
               "finish_reason": null
             }
           ]
         },
-        "options": {},
-        "delta": " "
+        "options": {
+          "toolCall": {
+            "name": "sumNumbers",
+            "id": "HIDDEN",
+            "input": "{\"a\":8,\"b\":20}"
+          }
+        },
+        "delta": ""
       }
     },
     {
@@ -1808,15 +1790,28 @@
             {
               "index": 0,
               "delta": {
-                "content": "16"
+                "tool_calls": [
+                  {
+                    "index": 0,
+                    "function": {
+                      "arguments": ",\""
+                    }
+                  }
+                ]
               },
               "logprobs": null,
               "finish_reason": null
             }
           ]
         },
-        "options": {},
-        "delta": "16"
+        "options": {
+          "toolCall": {
+            "name": "sumNumbers",
+            "id": "HIDDEN",
+            "input": "{\"a\":8,\"b\":20}"
+          }
+        },
+        "delta": ""
       }
     },
     {
@@ -1832,15 +1827,28 @@
             {
               "index": 0,
               "delta": {
-                "content": " by"
+                "tool_calls": [
+                  {
+                    "index": 0,
+                    "function": {
+                      "arguments": "b"
+                    }
+                  }
+                ]
               },
               "logprobs": null,
               "finish_reason": null
             }
           ]
         },
-        "options": {},
-        "delta": " by"
+        "options": {
+          "toolCall": {
+            "name": "sumNumbers",
+            "id": "HIDDEN",
+            "input": "{\"a\":8,\"b\":20}"
+          }
+        },
+        "delta": ""
       }
     },
     {
@@ -1856,15 +1864,28 @@
             {
               "index": 0,
               "delta": {
-                "content": " "
+                "tool_calls": [
+                  {
+                    "index": 0,
+                    "function": {
+                      "arguments": "\":"
+                    }
+                  }
+                ]
               },
               "logprobs": null,
               "finish_reason": null
             }
           ]
         },
-        "options": {},
-        "delta": " "
+        "options": {
+          "toolCall": {
+            "name": "sumNumbers",
+            "id": "HIDDEN",
+            "input": "{\"a\":8,\"b\":20}"
+          }
+        },
+        "delta": ""
       }
     },
     {
@@ -1880,15 +1901,28 @@
             {
               "index": 0,
               "delta": {
-                "content": "2"
+                "tool_calls": [
+                  {
+                    "index": 0,
+                    "function": {
+                      "arguments": "20"
+                    }
+                  }
+                ]
               },
               "logprobs": null,
               "finish_reason": null
             }
           ]
         },
-        "options": {},
-        "delta": "2"
+        "options": {
+          "toolCall": {
+            "name": "sumNumbers",
+            "id": "HIDDEN",
+            "input": "{\"a\":8,\"b\":20}"
+          }
+        },
+        "delta": ""
       }
     },
     {
@@ -1904,19 +1938,32 @@
             {
               "index": 0,
               "delta": {
-                "content": " is"
+                "tool_calls": [
+                  {
+                    "index": 0,
+                    "function": {
+                      "arguments": "}"
+                    }
+                  }
+                ]
               },
               "logprobs": null,
               "finish_reason": null
             }
           ]
         },
-        "options": {},
-        "delta": " is"
+        "options": {
+          "toolCall": {
+            "name": "sumNumbers",
+            "id": "HIDDEN",
+            "input": "{\"a\":8,\"b\":20}"
+          }
+        },
+        "delta": ""
       }
     },
     {
-      "id": "PRESERVE_1",
+      "id": "PRESERVE_2",
       "chunk": {
         "raw": {
           "id": "HIDDEN",
@@ -1928,7 +1975,7 @@
             {
               "index": 0,
               "delta": {
-                "content": " "
+                "content": "The"
               },
               "logprobs": null,
               "finish_reason": null
@@ -1936,11 +1983,11 @@
           ]
         },
         "options": {},
-        "delta": " "
+        "delta": "The"
       }
     },
     {
-      "id": "PRESERVE_1",
+      "id": "PRESERVE_2",
       "chunk": {
         "raw": {
           "id": "HIDDEN",
@@ -1952,7 +1999,7 @@
             {
               "index": 0,
               "delta": {
-                "content": "8"
+                "content": " result"
               },
               "logprobs": null,
               "finish_reason": null
@@ -1960,11 +2007,11 @@
           ]
         },
         "options": {},
-        "delta": "8"
+        "delta": " result"
       }
     },
     {
-      "id": "PRESERVE_1",
+      "id": "PRESERVE_2",
       "chunk": {
         "raw": {
           "id": "HIDDEN",
@@ -1976,7 +2023,7 @@
             {
               "index": 0,
               "delta": {
-                "content": ","
+                "content": " of"
               },
               "logprobs": null,
               "finish_reason": null
@@ -1984,11 +2031,11 @@
           ]
         },
         "options": {},
-        "delta": ","
+        "delta": " of"
       }
     },
     {
-      "id": "PRESERVE_1",
+      "id": "PRESERVE_2",
       "chunk": {
         "raw": {
           "id": "HIDDEN",
@@ -2000,7 +2047,7 @@
             {
               "index": 0,
               "delta": {
-                "content": " and"
+                "content": " dividing"
               },
               "logprobs": null,
               "finish_reason": null
@@ -2008,11 +2055,11 @@
           ]
         },
         "options": {},
-        "delta": " and"
+        "delta": " dividing"
       }
     },
     {
-      "id": "PRESERVE_1",
+      "id": "PRESERVE_2",
       "chunk": {
         "raw": {
           "id": "HIDDEN",
@@ -2024,7 +2071,7 @@
             {
               "index": 0,
               "delta": {
-                "content": " when"
+                "content": " "
               },
               "logprobs": null,
               "finish_reason": null
@@ -2032,11 +2079,11 @@
           ]
         },
         "options": {},
-        "delta": " when"
+        "delta": " "
       }
     },
     {
-      "id": "PRESERVE_1",
+      "id": "PRESERVE_2",
       "chunk": {
         "raw": {
           "id": "HIDDEN",
@@ -2048,7 +2095,7 @@
             {
               "index": 0,
               "delta": {
-                "content": " you"
+                "content": "16"
               },
               "logprobs": null,
               "finish_reason": null
@@ -2056,11 +2103,11 @@
           ]
         },
         "options": {},
-        "delta": " you"
+        "delta": "16"
       }
     },
     {
-      "id": "PRESERVE_1",
+      "id": "PRESERVE_2",
       "chunk": {
         "raw": {
           "id": "HIDDEN",
@@ -2072,7 +2119,7 @@
             {
               "index": 0,
               "delta": {
-                "content": " add"
+                "content": " by"
               },
               "logprobs": null,
               "finish_reason": null
@@ -2080,11 +2127,11 @@
           ]
         },
         "options": {},
-        "delta": " add"
+        "delta": " by"
       }
     },
     {
-      "id": "PRESERVE_1",
+      "id": "PRESERVE_2",
       "chunk": {
         "raw": {
           "id": "HIDDEN",
@@ -2108,7 +2155,7 @@
       }
     },
     {
-      "id": "PRESERVE_1",
+      "id": "PRESERVE_2",
       "chunk": {
         "raw": {
           "id": "HIDDEN",
@@ -2120,7 +2167,7 @@
             {
               "index": 0,
               "delta": {
-                "content": "20"
+                "content": "2"
               },
               "logprobs": null,
               "finish_reason": null
@@ -2128,11 +2175,11 @@
           ]
         },
         "options": {},
-        "delta": "20"
+        "delta": "2"
       }
     },
     {
-      "id": "PRESERVE_1",
+      "id": "PRESERVE_2",
       "chunk": {
         "raw": {
           "id": "HIDDEN",
@@ -2144,7 +2191,7 @@
             {
               "index": 0,
               "delta": {
-                "content": " to"
+                "content": " and"
               },
               "logprobs": null,
               "finish_reason": null
@@ -2152,11 +2199,11 @@
           ]
         },
         "options": {},
-        "delta": " to"
+        "delta": " and"
       }
     },
     {
-      "id": "PRESERVE_1",
+      "id": "PRESERVE_2",
       "chunk": {
         "raw": {
           "id": "HIDDEN",
@@ -2168,7 +2215,7 @@
             {
               "index": 0,
               "delta": {
-                "content": " "
+                "content": " then"
               },
               "logprobs": null,
               "finish_reason": null
@@ -2176,11 +2223,11 @@
           ]
         },
         "options": {},
-        "delta": " "
+        "delta": " then"
       }
     },
     {
-      "id": "PRESERVE_1",
+      "id": "PRESERVE_2",
       "chunk": {
         "raw": {
           "id": "HIDDEN",
@@ -2192,7 +2239,7 @@
             {
               "index": 0,
               "delta": {
-                "content": "8"
+                "content": " adding"
               },
               "logprobs": null,
               "finish_reason": null
@@ -2200,11 +2247,11 @@
           ]
         },
         "options": {},
-        "delta": "8"
+        "delta": " adding"
       }
     },
     {
-      "id": "PRESERVE_1",
+      "id": "PRESERVE_2",
       "chunk": {
         "raw": {
           "id": "HIDDEN",
@@ -2216,7 +2263,7 @@
             {
               "index": 0,
               "delta": {
-                "content": ","
+                "content": " "
               },
               "logprobs": null,
               "finish_reason": null
@@ -2224,11 +2271,11 @@
           ]
         },
         "options": {},
-        "delta": ","
+        "delta": " "
       }
     },
     {
-      "id": "PRESERVE_1",
+      "id": "PRESERVE_2",
       "chunk": {
         "raw": {
           "id": "HIDDEN",
@@ -2240,7 +2287,7 @@
             {
               "index": 0,
               "delta": {
-                "content": " you"
+                "content": "20"
               },
               "logprobs": null,
               "finish_reason": null
@@ -2248,11 +2295,11 @@
           ]
         },
         "options": {},
-        "delta": " you"
+        "delta": "20"
       }
     },
     {
-      "id": "PRESERVE_1",
+      "id": "PRESERVE_2",
       "chunk": {
         "raw": {
           "id": "HIDDEN",
@@ -2264,7 +2311,7 @@
             {
               "index": 0,
               "delta": {
-                "content": " get"
+                "content": " is"
               },
               "logprobs": null,
               "finish_reason": null
@@ -2272,11 +2319,11 @@
           ]
         },
         "options": {},
-        "delta": " get"
+        "delta": " is"
       }
     },
     {
-      "id": "PRESERVE_1",
+      "id": "PRESERVE_2",
       "chunk": {
         "raw": {
           "id": "HIDDEN",
@@ -2300,7 +2347,7 @@
       }
     },
     {
-      "id": "PRESERVE_1",
+      "id": "PRESERVE_2",
       "chunk": {
         "raw": {
           "id": "HIDDEN",
@@ -2324,7 +2371,7 @@
       }
     },
     {
-      "id": "PRESERVE_1",
+      "id": "PRESERVE_2",
       "chunk": {
         "raw": {
           "id": "HIDDEN",
diff --git a/packages/core/e2e/node/snapshot/anthropic-agent.snap b/packages/core/e2e/node/snapshot/anthropic-agent.snap
new file mode 100644
index 0000000000000000000000000000000000000000..8d57bad7e7245a175563b1c066f85ca582ac8890
--- /dev/null
+++ b/packages/core/e2e/node/snapshot/anthropic-agent.snap
@@ -0,0 +1,403 @@
+{
+  "llmEventStart": [
+    {
+      "id": "PRESERVE_0",
+      "messages": [
+        {
+          "role": "user",
+          "content": "What is the weather in San Francisco?",
+          "options": {}
+        }
+      ]
+    },
+    {
+      "id": "PRESERVE_1",
+      "messages": [
+        {
+          "role": "user",
+          "content": "What is the weather in San Francisco?",
+          "options": {}
+        },
+        {
+          "content": [
+            {
+              "type": "text",
+              "text": "<thinking>\nThe user is asking for the weather in a specific location, San Francisco. The Weather function is the relevant tool to answer this request, as it returns weather information for a given location.\n\nThe Weather function has one required parameter:\n- location (string): The user has directly provided the location of \"San Francisco\"\n\nSince the required location parameter has been provided by the user, we have all the necessary information to call the Weather function.\n</thinking>"
+            }
+          ],
+          "role": "assistant",
+          "options": {
+            "toolCall": {
+              "id": "HIDDEN",
+              "name": "Weather",
+              "input": {
+                "location": "San Francisco"
+              }
+            }
+          }
+        },
+        {
+          "content": "35 degrees and sunny in San Francisco",
+          "role": "user",
+          "options": {
+            "toolResult": {
+              "isError": false,
+              "id": "HIDDEN"
+            }
+          }
+        }
+      ]
+    },
+    {
+      "id": "PRESERVE_2",
+      "messages": [
+        {
+          "role": "user",
+          "content": "My name is Alex Yang. What is my unique id?",
+          "options": {}
+        }
+      ]
+    },
+    {
+      "id": "PRESERVE_3",
+      "messages": [
+        {
+          "role": "user",
+          "content": "My name is Alex Yang. What is my unique id?",
+          "options": {}
+        },
+        {
+          "content": [
+            {
+              "type": "text",
+              "text": "<thinking>\nThe unique_id function is the relevant tool to answer the user's request for their unique ID. It requires two parameters:\nfirstName: The user provided their first name, which is \"Alex\"\nlastName: The user also provided their last name, \"Yang\"\nSince the user has provided all the required parameters, we can proceed with calling the unique_id function.\n</thinking>"
+            }
+          ],
+          "role": "assistant",
+          "options": {
+            "toolCall": {
+              "id": "HIDDEN",
+              "name": "unique_id",
+              "input": {
+                "firstName": "Alex",
+                "lastName": "Yang"
+              }
+            }
+          }
+        },
+        {
+          "content": "123456789",
+          "role": "user",
+          "options": {
+            "toolResult": {
+              "isError": false,
+              "id": "HIDDEN"
+            }
+          }
+        }
+      ]
+    },
+    {
+      "id": "PRESERVE_4",
+      "messages": [
+        {
+          "role": "user",
+          "content": "how much is 1 + 1?",
+          "options": {}
+        }
+      ]
+    },
+    {
+      "id": "PRESERVE_5",
+      "messages": [
+        {
+          "role": "user",
+          "content": "how much is 1 + 1?",
+          "options": {}
+        },
+        {
+          "content": [
+            {
+              "type": "text",
+              "text": "<thinking>\nThe user is asking to sum the numbers 1 and 1. The relevant tool to use is the sumNumbers function, which takes two number parameters a and b.\nThe user has directly provided the values for the parameters:\na = 1 \nb = 1\nSince all the required parameters have been provided, we can proceed with calling the function.\n</thinking>"
+            }
+          ],
+          "role": "assistant",
+          "options": {
+            "toolCall": {
+              "id": "HIDDEN",
+              "name": "sumNumbers",
+              "input": {
+                "a": 1,
+                "b": 1
+              }
+            }
+          }
+        },
+        {
+          "content": "2",
+          "role": "user",
+          "options": {
+            "toolResult": {
+              "isError": false,
+              "id": "HIDDEN"
+            }
+          }
+        }
+      ]
+    }
+  ],
+  "llmEventEnd": [
+    {
+      "id": "PRESERVE_0",
+      "response": {
+        "raw": {
+          "id": "HIDDEN",
+          "type": "message",
+          "role": "assistant",
+          "model": "claude-3-opus-20240229",
+          "stop_sequence": null,
+          "usage": {
+            "input_tokens": 462,
+            "output_tokens": 147
+          },
+          "content": [
+            {
+              "type": "text",
+              "text": "<thinking>\nThe user is asking for the weather in a specific location, San Francisco. The Weather function is the relevant tool to answer this request, as it returns weather information for a given location.\n\nThe Weather function has one required parameter:\n- location (string): The user has directly provided the location of \"San Francisco\"\n\nSince the required location parameter has been provided by the user, we have all the necessary information to call the Weather function.\n</thinking>"
+            },
+            {
+              "type": "tool_use",
+              "id": "HIDDEN",
+              "name": "Weather",
+              "input": {
+                "location": "San Francisco"
+              }
+            }
+          ],
+          "stop_reason": "tool_use"
+        },
+        "message": {
+          "content": [
+            {
+              "type": "text",
+              "text": "<thinking>\nThe user is asking for the weather in a specific location, San Francisco. The Weather function is the relevant tool to answer this request, as it returns weather information for a given location.\n\nThe Weather function has one required parameter:\n- location (string): The user has directly provided the location of \"San Francisco\"\n\nSince the required location parameter has been provided by the user, we have all the necessary information to call the Weather function.\n</thinking>"
+            }
+          ],
+          "role": "assistant",
+          "options": {
+            "toolCall": {
+              "id": "HIDDEN",
+              "name": "Weather",
+              "input": {
+                "location": "San Francisco"
+              }
+            }
+          }
+        }
+      }
+    },
+    {
+      "id": "PRESERVE_1",
+      "response": {
+        "raw": {
+          "id": "HIDDEN",
+          "type": "message",
+          "role": "assistant",
+          "model": "claude-3-opus-20240229",
+          "stop_sequence": null,
+          "usage": {
+            "input_tokens": 628,
+            "output_tokens": 18
+          },
+          "content": [
+            {
+              "type": "text",
+              "text": "The current weather in San Francisco is 35 degrees and sunny."
+            }
+          ],
+          "stop_reason": "end_turn"
+        },
+        "message": {
+          "content": [
+            {
+              "type": "text",
+              "text": "The current weather in San Francisco is 35 degrees and sunny."
+            }
+          ],
+          "role": "assistant",
+          "options": {}
+        }
+      }
+    },
+    {
+      "id": "PRESERVE_2",
+      "response": {
+        "raw": {
+          "id": "HIDDEN",
+          "type": "message",
+          "role": "assistant",
+          "model": "claude-3-opus-20240229",
+          "stop_sequence": null,
+          "usage": {
+            "input_tokens": 482,
+            "output_tokens": 152
+          },
+          "content": [
+            {
+              "type": "text",
+              "text": "<thinking>\nThe unique_id function is the relevant tool to answer the user's request for their unique ID. It requires two parameters:\nfirstName: The user provided their first name, which is \"Alex\"\nlastName: The user also provided their last name, \"Yang\"\nSince the user has provided all the required parameters, we can proceed with calling the unique_id function.\n</thinking>"
+            },
+            {
+              "type": "tool_use",
+              "id": "HIDDEN",
+              "name": "unique_id",
+              "input": {
+                "firstName": "Alex",
+                "lastName": "Yang"
+              }
+            }
+          ],
+          "stop_reason": "tool_use"
+        },
+        "message": {
+          "content": [
+            {
+              "type": "text",
+              "text": "<thinking>\nThe unique_id function is the relevant tool to answer the user's request for their unique ID. It requires two parameters:\nfirstName: The user provided their first name, which is \"Alex\"\nlastName: The user also provided their last name, \"Yang\"\nSince the user has provided all the required parameters, we can proceed with calling the unique_id function.\n</thinking>"
+            }
+          ],
+          "role": "assistant",
+          "options": {
+            "toolCall": {
+              "id": "HIDDEN",
+              "name": "unique_id",
+              "input": {
+                "firstName": "Alex",
+                "lastName": "Yang"
+              }
+            }
+          }
+        }
+      }
+    },
+    {
+      "id": "PRESERVE_3",
+      "response": {
+        "raw": {
+          "id": "HIDDEN",
+          "type": "message",
+          "role": "assistant",
+          "model": "claude-3-opus-20240229",
+          "stop_sequence": null,
+          "usage": {
+            "input_tokens": 648,
+            "output_tokens": 13
+          },
+          "content": [
+            {
+              "type": "text",
+              "text": "Your unique ID is 123456789."
+            }
+          ],
+          "stop_reason": "end_turn"
+        },
+        "message": {
+          "content": [
+            {
+              "type": "text",
+              "text": "Your unique ID is 123456789."
+            }
+          ],
+          "role": "assistant",
+          "options": {}
+        }
+      }
+    },
+    {
+      "id": "PRESERVE_4",
+      "response": {
+        "raw": {
+          "id": "HIDDEN",
+          "type": "message",
+          "role": "assistant",
+          "model": "claude-3-opus-20240229",
+          "stop_sequence": null,
+          "usage": {
+            "input_tokens": 498,
+            "output_tokens": 151
+          },
+          "content": [
+            {
+              "type": "text",
+              "text": "<thinking>\nThe user is asking to sum the numbers 1 and 1. The relevant tool to use is the sumNumbers function, which takes two number parameters a and b.\nThe user has directly provided the values for the parameters:\na = 1 \nb = 1\nSince all the required parameters have been provided, we can proceed with calling the function.\n</thinking>"
+            },
+            {
+              "type": "tool_use",
+              "id": "HIDDEN",
+              "name": "sumNumbers",
+              "input": {
+                "a": 1,
+                "b": 1
+              }
+            }
+          ],
+          "stop_reason": "tool_use"
+        },
+        "message": {
+          "content": [
+            {
+              "type": "text",
+              "text": "<thinking>\nThe user is asking to sum the numbers 1 and 1. The relevant tool to use is the sumNumbers function, which takes two number parameters a and b.\nThe user has directly provided the values for the parameters:\na = 1 \nb = 1\nSince all the required parameters have been provided, we can proceed with calling the function.\n</thinking>"
+            }
+          ],
+          "role": "assistant",
+          "options": {
+            "toolCall": {
+              "id": "HIDDEN",
+              "name": "sumNumbers",
+              "input": {
+                "a": 1,
+                "b": 1
+              }
+            }
+          }
+        }
+      }
+    },
+    {
+      "id": "PRESERVE_5",
+      "response": {
+        "raw": {
+          "id": "HIDDEN",
+          "type": "message",
+          "role": "assistant",
+          "model": "claude-3-opus-20240229",
+          "stop_sequence": null,
+          "usage": {
+            "input_tokens": 661,
+            "output_tokens": 16
+          },
+          "content": [
+            {
+              "type": "text",
+              "text": "So 1 + 1 = 2."
+            }
+          ],
+          "stop_reason": "end_turn"
+        },
+        "message": {
+          "content": [
+            {
+              "type": "text",
+              "text": "So 1 + 1 = 2."
+            }
+          ],
+          "role": "assistant",
+          "options": {}
+        }
+      }
+    }
+  ],
+  "llmEventStream": []
+}
\ No newline at end of file
diff --git a/packages/core/e2e/node/snapshot/gpt-4-turbo.snap b/packages/core/e2e/node/snapshot/gpt-4-turbo.snap
index de400bac63ccbd17308f615b0d2ced8309d9c84b..2e5658921dacae3066292c2cb5dc6f5a5248ef60 100644
--- a/packages/core/e2e/node/snapshot/gpt-4-turbo.snap
+++ b/packages/core/e2e/node/snapshot/gpt-4-turbo.snap
@@ -20,24 +20,21 @@
           "content": "",
           "role": "assistant",
           "options": {
-            "toolCalls": [
-              {
-                "id": "HIDDEN",
-                "type": "function",
-                "function": {
-                  "name": "Weather",
-                  "arguments": "{\"location\":\"San Jose\"}"
-                }
-              }
-            ]
+            "toolCall": {
+              "id": "HIDDEN",
+              "name": "Weather",
+              "input": "{\"location\":\"San Jose\"}"
+            }
           }
         },
         {
           "content": "45 degrees and sunny in San Jose",
-          "role": "tool",
+          "role": "user",
           "options": {
-            "name": "Weather",
-            "tool_call_id": "HIDDEN"
+            "toolResult": {
+              "id": "HIDDEN",
+              "isError": false
+            }
           }
         }
       ]
@@ -84,16 +81,11 @@
           "content": "",
           "role": "assistant",
           "options": {
-            "toolCalls": [
-              {
-                "id": "HIDDEN",
-                "type": "function",
-                "function": {
-                  "name": "Weather",
-                  "arguments": "{\"location\":\"San Jose\"}"
-                }
-              }
-            ]
+            "toolCall": {
+              "id": "HIDDEN",
+              "name": "Weather",
+              "input": "{\"location\":\"San Jose\"}"
+            }
           }
         }
       }
diff --git a/packages/core/e2e/node/snapshot/llm-anthropic.snap b/packages/core/e2e/node/snapshot/llm-anthropic.snap
index 6b18dccbdbcf7e232336009ab847b67893fa0c12..9c489206307416b8049245ef24867f3042121850 100644
--- a/packages/core/e2e/node/snapshot/llm-anthropic.snap
+++ b/packages/core/e2e/node/snapshot/llm-anthropic.snap
@@ -5,7 +5,8 @@
       "messages": [
         {
           "content": "Hello",
-          "role": "user"
+          "role": "user",
+          "options": {}
         }
       ]
     },
@@ -43,7 +44,8 @@
         },
         "message": {
           "content": "Hello! How can I assist you today?",
-          "role": "assistant"
+          "role": "assistant",
+          "options": {}
         }
       }
     },
@@ -60,7 +62,8 @@
                 "text": "Hello"
               }
             },
-            "delta": "Hello"
+            "delta": "Hello",
+            "options": {}
           },
           {
             "raw": {
@@ -71,7 +74,8 @@
                 "text": "!"
               }
             },
-            "delta": "!"
+            "delta": "!",
+            "options": {}
           },
           {
             "raw": {
@@ -82,7 +86,8 @@
                 "text": " How"
               }
             },
-            "delta": " How"
+            "delta": " How",
+            "options": {}
           },
           {
             "raw": {
@@ -93,7 +98,8 @@
                 "text": " can"
               }
             },
-            "delta": " can"
+            "delta": " can",
+            "options": {}
           },
           {
             "raw": {
@@ -104,7 +110,8 @@
                 "text": " I"
               }
             },
-            "delta": " I"
+            "delta": " I",
+            "options": {}
           },
           {
             "raw": {
@@ -115,7 +122,8 @@
                 "text": " assist"
               }
             },
-            "delta": " assist"
+            "delta": " assist",
+            "options": {}
           },
           {
             "raw": {
@@ -126,7 +134,8 @@
                 "text": " you"
               }
             },
-            "delta": " you"
+            "delta": " you",
+            "options": {}
           },
           {
             "raw": {
@@ -137,7 +146,8 @@
                 "text": " today"
               }
             },
-            "delta": " today"
+            "delta": " today",
+            "options": {}
           },
           {
             "raw": {
@@ -148,7 +158,8 @@
                 "text": "?"
               }
             },
-            "delta": "?"
+            "delta": "?",
+            "options": {}
           }
         ],
         "message": {
@@ -171,7 +182,8 @@
             "text": "Hello"
           }
         },
-        "delta": "Hello"
+        "delta": "Hello",
+        "options": {}
       }
     },
     {
@@ -185,7 +197,8 @@
             "text": "!"
           }
         },
-        "delta": "!"
+        "delta": "!",
+        "options": {}
       }
     },
     {
@@ -199,7 +212,8 @@
             "text": " How"
           }
         },
-        "delta": " How"
+        "delta": " How",
+        "options": {}
       }
     },
     {
@@ -213,7 +227,8 @@
             "text": " can"
           }
         },
-        "delta": " can"
+        "delta": " can",
+        "options": {}
       }
     },
     {
@@ -227,7 +242,8 @@
             "text": " I"
           }
         },
-        "delta": " I"
+        "delta": " I",
+        "options": {}
       }
     },
     {
@@ -241,7 +257,8 @@
             "text": " assist"
           }
         },
-        "delta": " assist"
+        "delta": " assist",
+        "options": {}
       }
     },
     {
@@ -255,7 +272,8 @@
             "text": " you"
           }
         },
-        "delta": " you"
+        "delta": " you",
+        "options": {}
       }
     },
     {
@@ -269,7 +287,8 @@
             "text": " today"
           }
         },
-        "delta": " today"
+        "delta": " today",
+        "options": {}
       }
     },
     {
@@ -283,7 +302,8 @@
             "text": "?"
           }
         },
-        "delta": "?"
+        "delta": "?",
+        "options": {}
       }
     }
   ]
diff --git a/packages/core/src/ChatHistory.ts b/packages/core/src/ChatHistory.ts
index 1b2a04a957aa01e87d2d549a875980514b4d650c..470ad65079615e735eb2cb7ef01ec3e03557dfbe 100644
--- a/packages/core/src/ChatHistory.ts
+++ b/packages/core/src/ChatHistory.ts
@@ -8,20 +8,22 @@ import { extractText } from "./llm/utils.js";
 /**
  * A ChatHistory is used to keep the state of back and forth chat messages
  */
-export abstract class ChatHistory {
-  abstract get messages(): ChatMessage[];
+export abstract class ChatHistory<
+  AdditionalMessageOptions extends object = object,
+> {
+  abstract get messages(): ChatMessage<AdditionalMessageOptions>[];
   /**
    * Adds a message to the chat history.
    * @param message
    */
-  abstract addMessage(message: ChatMessage): void;
+  abstract addMessage(message: ChatMessage<AdditionalMessageOptions>): void;
 
   /**
    * Returns the messages that should be used as input to the LLM.
    */
   abstract requestMessages(
-    transientMessages?: ChatMessage[],
-  ): Promise<ChatMessage[]>;
+    transientMessages?: ChatMessage<AdditionalMessageOptions>[],
+  ): Promise<ChatMessage<AdditionalMessageOptions>[]>;
 
   /**
    * Resets the chat history so that it's empty.
@@ -31,7 +33,7 @@ export abstract class ChatHistory {
   /**
    * Returns the new messages since the last call to this function (or since calling the constructor)
    */
-  abstract newMessages(): ChatMessage[];
+  abstract newMessages(): ChatMessage<AdditionalMessageOptions>[];
 }
 
 export class SimpleChatHistory extends ChatHistory {
@@ -108,6 +110,7 @@ export class SummaryChatHistory extends ChatHistory {
             context: messagesToHistoryStr(messagesToSummarize),
           }),
           role: "user" as MessageType,
+          options: {},
         },
       ];
       // remove oldest message until the chat history is short enough for the context window
@@ -116,7 +119,9 @@ export class SummaryChatHistory extends ChatHistory {
       this.tokenizer(promptMessages[0].content).length > this.tokensToSummarize
     );
 
-    const response = await this.llm.chat({ messages: promptMessages });
+    const response = await this.llm.chat({
+      messages: promptMessages,
+    });
     return { content: response.message.content, role: "memory" };
   }
 
diff --git a/packages/core/src/agent/anthropic.ts b/packages/core/src/agent/anthropic.ts
new file mode 100644
index 0000000000000000000000000000000000000000..3934834923ad4b37ccdc0826fff9487739936490
--- /dev/null
+++ b/packages/core/src/agent/anthropic.ts
@@ -0,0 +1,170 @@
+import { Settings } from "../Settings.js";
+import {
+  AgentChatResponse,
+  type ChatEngineParamsNonStreaming,
+} from "../engines/chat/index.js";
+import { wrapEventCaller } from "../internal/context/EventCaller.js";
+import { getCallbackManager } from "../internal/settings/CallbackManager.js";
+import { prettifyError } from "../internal/utils.js";
+import { Anthropic } from "../llm/anthropic.js";
+import type {
+  ChatMessage,
+  ChatResponse,
+  ToolCallLLMMessageOptions,
+} from "../llm/index.js";
+import { extractText } from "../llm/utils.js";
+import { ObjectRetriever } from "../objects/index.js";
+import type { BaseToolWithCall } from "../types.js";
+
+const MAX_TOOL_CALLS = 10;
+
+type AnthropicParamsBase = {
+  llm?: Anthropic;
+  chatHistory?: ChatMessage<ToolCallLLMMessageOptions>[];
+};
+
+type AnthropicParamsWithTools = AnthropicParamsBase & {
+  tools: BaseToolWithCall[];
+};
+
+type AnthropicParamsWithToolRetriever = AnthropicParamsBase & {
+  toolRetriever: ObjectRetriever<BaseToolWithCall>;
+};
+
+export type AnthropicAgentParams =
+  | AnthropicParamsWithTools
+  | AnthropicParamsWithToolRetriever;
+
+type AgentContext = {
+  toolCalls: number;
+  llm: Anthropic;
+  tools: BaseToolWithCall[];
+  messages: ChatMessage<ToolCallLLMMessageOptions>[];
+  shouldContinue: (context: AgentContext) => boolean;
+};
+
+type TaskResult = {
+  response: ChatResponse<ToolCallLLMMessageOptions>;
+  chatHistory: ChatMessage<ToolCallLLMMessageOptions>[];
+};
+
+async function task(
+  context: AgentContext,
+  input: ChatMessage<ToolCallLLMMessageOptions>,
+): Promise<TaskResult> {
+  const { llm, tools, messages } = context;
+  const nextMessages: ChatMessage<ToolCallLLMMessageOptions>[] = [
+    ...messages,
+    input,
+  ];
+  const response = await llm.chat({
+    stream: false,
+    tools,
+    messages: nextMessages,
+  });
+  const options = response.message.options ?? {};
+  if ("toolCall" in options) {
+    const { toolCall } = options;
+    const { input, name, id } = toolCall;
+    const targetTool = tools.find((tool) => tool.metadata.name === name);
+    let output: string;
+    let isError = true;
+    if (!context.shouldContinue(context)) {
+      output = "Error: Tool call limit reached";
+    } else if (!targetTool) {
+      output = `Error: Tool ${name} not found`;
+    } else {
+      try {
+        getCallbackManager().dispatchEvent("llm-tool-call", {
+          payload: {
+            toolCall: {
+              name,
+              input,
+            },
+          },
+        });
+        output = await targetTool.call(input);
+        isError = false;
+      } catch (error: unknown) {
+        output = prettifyError(error);
+      }
+    }
+    return task(
+      {
+        ...context,
+        toolCalls: context.toolCalls + 1,
+        messages: [...nextMessages, response.message],
+      },
+      {
+        content: output,
+        role: "user",
+        options: {
+          toolResult: {
+            isError,
+            id,
+          },
+        },
+      },
+    );
+  } else {
+    return { response, chatHistory: [...nextMessages, response.message] };
+  }
+}
+
+export class AnthropicAgent {
+  readonly #llm: Anthropic;
+  readonly #tools:
+    | BaseToolWithCall[]
+    | ((query: string) => Promise<BaseToolWithCall[]>) = [];
+  #chatHistory: ChatMessage<ToolCallLLMMessageOptions>[] = [];
+
+  constructor(params: AnthropicAgentParams) {
+    this.#llm =
+      params.llm ?? Settings.llm instanceof Anthropic
+        ? (Settings.llm as Anthropic)
+        : new Anthropic();
+    if ("tools" in params) {
+      this.#tools = params.tools;
+    } else if ("toolRetriever" in params) {
+      this.#tools = params.toolRetriever.retrieve.bind(params.toolRetriever);
+    }
+    if (Array.isArray(params.chatHistory)) {
+      this.#chatHistory = params.chatHistory;
+    }
+  }
+
+  static shouldContinue(context: AgentContext): boolean {
+    return context.toolCalls < MAX_TOOL_CALLS;
+  }
+
+  public reset(): void {
+    this.#chatHistory = [];
+  }
+
+  getTools(query: string): Promise<BaseToolWithCall[]> | BaseToolWithCall[] {
+    return typeof this.#tools === "function" ? this.#tools(query) : this.#tools;
+  }
+
+  @wrapEventCaller
+  async chat(
+    params: ChatEngineParamsNonStreaming,
+  ): Promise<Promise<AgentChatResponse>> {
+    const { chatHistory, response } = await task(
+      {
+        llm: this.#llm,
+        tools: await this.getTools(extractText(params.message)),
+        toolCalls: 0,
+        messages: [...this.#chatHistory],
+        // do we need this?
+        shouldContinue: AnthropicAgent.shouldContinue,
+      },
+      {
+        role: "user",
+        content: params.message,
+        options: {},
+      },
+    );
+    this.#chatHistory = [...chatHistory];
+    return new AgentChatResponse(extractText(response.message.content));
+  }
+}
diff --git a/packages/core/src/agent/index.ts b/packages/core/src/agent/index.ts
index bbf0a4481c4ae9aff71e0eafec80f15a0b40cb11..652403e03a9f1af0a2d79d758167bfd8af7430f0 100644
--- a/packages/core/src/agent/index.ts
+++ b/packages/core/src/agent/index.ts
@@ -1,3 +1,5 @@
+// Not exporting the AnthropicAgent because it is not ready to ship yet.
+// export { AnthropicAgent, type AnthropicAgentParams } from "./anthropic.js";
 export * from "./openai/base.js";
 export * from "./openai/worker.js";
 export * from "./react/base.js";
diff --git a/packages/core/src/agent/openai/base.ts b/packages/core/src/agent/openai/base.ts
index eb89d231715248ef6e6143a97ccbd47f78b8603f..6a9af20c7b56fba332af52044ed8d58fa9552259 100644
--- a/packages/core/src/agent/openai/base.ts
+++ b/packages/core/src/agent/openai/base.ts
@@ -14,7 +14,7 @@ type OpenAIAgentParams = {
   prefixMessages?: ChatMessage[];
   maxFunctionCalls?: number;
   defaultToolChoice?: string;
-  toolRetriever?: ObjectRetriever;
+  toolRetriever?: ObjectRetriever<BaseTool>;
   systemPrompt?: string;
 };
 
@@ -56,7 +56,7 @@ export class OpenAIAgent extends AgentRunner {
       ];
     }
 
-    if (!llm?.metadata.isFunctionCallingModel) {
+    if (!llm?.supportToolCall) {
       throw new Error("LLM model must be a function-calling model");
     }
 
@@ -73,6 +73,7 @@ export class OpenAIAgent extends AgentRunner {
       llm,
       memory,
       defaultToolChoice,
+      // @ts-expect-error 2322
       chatHistory: prefixMessages,
     });
   }
diff --git a/packages/core/src/agent/openai/worker.ts b/packages/core/src/agent/openai/worker.ts
index d7939cd136dd9b84114c6029fd49ebbe5b44ce98..38e4d72ed5f011b28cea9a21fe49fa521c277f5f 100644
--- a/packages/core/src/agent/openai/worker.ts
+++ b/packages/core/src/agent/openai/worker.ts
@@ -14,7 +14,8 @@ import {
   type ChatResponseChunk,
   type LLMChatParamsBase,
   type OpenAIAdditionalChatOptions,
-  type OpenAIAdditionalMessageOptions,
+  type ToolCallLLMMessageOptions,
+  type ToolCallOptions,
 } from "../../llm/index.js";
 import { extractText } from "../../llm/utils.js";
 import { ChatMemoryBuffer } from "../../memory/ChatMemoryBuffer.js";
@@ -25,28 +26,25 @@ import type { BaseTool } from "../../types.js";
 import type { AgentWorker, Task } from "../types.js";
 import { TaskStep, TaskStepOutput } from "../types.js";
 import { addUserStepToMemory, getFunctionByName } from "../utils.js";
-import type { OpenAIToolCall } from "./types/chat.js";
 
 async function callFunction(
   tools: BaseTool[],
-  toolCall: OpenAIToolCall,
-): Promise<[ChatMessage, ToolOutput]> {
-  const id_ = toolCall.id;
-  const functionCall = toolCall.function;
-  const name = toolCall.function.name;
-  const argumentsStr = toolCall.function.arguments;
+  toolCall: ToolCallOptions["toolCall"],
+): Promise<[ChatMessage<ToolCallLLMMessageOptions>, ToolOutput]> {
+  const id = toolCall.id;
+  const name = toolCall.name;
+  const input = toolCall.input;
 
   if (Settings.debug) {
     console.log("=== Calling Function ===");
-    console.log(`Calling function: ${name} with args: ${argumentsStr}`);
+    console.log(`Calling function: ${name} with args: ${input}`);
   }
 
   const tool = getFunctionByName(tools, name);
-  const argumentDict = JSON.parse(argumentsStr);
 
   // Call tool
   // Use default error message
-  const output = await callToolWithErrorHandling(tool, argumentDict);
+  const output = await callToolWithErrorHandling(tool, input);
 
   if (Settings.debug) {
     console.log(`Got output ${output}`);
@@ -56,10 +54,12 @@ async function callFunction(
   return [
     {
       content: `${output}`,
-      role: "tool",
+      role: "user",
       options: {
-        name,
-        tool_call_id: id_,
+        toolResult: {
+          id,
+          isError: false,
+        },
       },
     },
     output,
@@ -71,7 +71,7 @@ type OpenAIAgentWorkerParams = {
   llm?: OpenAI;
   prefixMessages?: ChatMessage[];
   maxFunctionCalls?: number;
-  toolRetriever?: ObjectRetriever;
+  toolRetriever?: ObjectRetriever<BaseTool>;
 };
 
 type CallFunctionOutput = {
@@ -120,7 +120,7 @@ export class OpenAIAgentWorker
     }
   }
 
-  public getAllMessages(task: Task): ChatMessage[] {
+  public getAllMessages(task: Task): ChatMessage<ToolCallLLMMessageOptions>[] {
     return [
       ...this.prefixMessages,
       ...task.memory.get(),
@@ -128,30 +128,33 @@ export class OpenAIAgentWorker
     ];
   }
 
-  public getLatestToolCalls(task: Task): OpenAIToolCall[] | null {
+  public getLatestToolCall(task: Task): ToolCallOptions["toolCall"] | null {
     const chatHistory: ChatMessage[] = task.extraState.newMemory.getAll();
 
     if (chatHistory.length === 0) {
       return null;
     }
 
-    // fixme
-    return chatHistory[chatHistory.length - 1].options?.toolCalls as any;
+    // @ts-expect-error fixme
+    return chatHistory[chatHistory.length - 1].options?.toolCall;
   }
 
   private _getLlmChatParams(
     task: Task,
-    openaiTools: BaseTool[],
+    tools: BaseTool[],
     toolChoice: ChatCompletionToolChoiceOption = "auto",
-  ): LLMChatParamsBase<OpenAIAdditionalChatOptions> {
+  ): LLMChatParamsBase<OpenAIAdditionalChatOptions, ToolCallLLMMessageOptions> {
     const llmChatParams = {
       messages: this.getAllMessages(task),
       tools: undefined as BaseTool[] | undefined,
       additionalChatOptions: {} as OpenAIAdditionalChatOptions,
-    } satisfies LLMChatParamsBase<OpenAIAdditionalChatOptions>;
+    } satisfies LLMChatParamsBase<
+      OpenAIAdditionalChatOptions,
+      ToolCallLLMMessageOptions
+    >;
 
-    if (openaiTools.length > 0) {
-      llmChatParams.tools = openaiTools;
+    if (tools.length > 0) {
+      llmChatParams.tools = tools;
       llmChatParams.additionalChatOptions.tool_choice = toolChoice;
     }
 
@@ -172,7 +175,10 @@ export class OpenAIAgentWorker
 
   private async _getStreamAiResponse(
     task: Task,
-    llmChatParams: LLMChatParamsBase<OpenAIAdditionalChatOptions>,
+    llmChatParams: LLMChatParamsBase<
+      OpenAIAdditionalChatOptions,
+      ToolCallLLMMessageOptions
+    >,
   ): Promise<StreamingAgentChatResponse | AgentChatResponse> {
     const stream = await this.llm.chat({
       stream: true,
@@ -180,7 +186,7 @@ export class OpenAIAgentWorker
     });
 
     const responseChunkStream = new ReadableStream<
-      ChatResponseChunk<OpenAIAdditionalMessageOptions>
+      ChatResponseChunk<ToolCallLLMMessageOptions>
     >({
       async start(controller) {
         for await (const chunk of stream) {
@@ -198,11 +204,11 @@ export class OpenAIAgentWorker
     }
     // check if first chunk has tool calls, if so, this is a function call
     // otherwise, it's a regular message
-    const hasToolCalls: boolean =
-      !!value.options?.toolCalls?.length &&
-      value.options?.toolCalls?.length > 0;
+    const hasToolCall: boolean = !!(
+      value.options && "toolCall" in value.options
+    );
 
-    if (hasToolCalls) {
+    if (hasToolCall) {
       return this._processMessage(task, {
         content: await pipeline(finalStream, async (iterator) => {
           let content = "";
@@ -247,7 +253,10 @@ export class OpenAIAgentWorker
   private async _getAgentResponse(
     task: Task,
     mode: ChatResponseMode,
-    llmChatParams: LLMChatParamsBase<OpenAIAdditionalChatOptions>,
+    llmChatParams: LLMChatParamsBase<
+      OpenAIAdditionalChatOptions,
+      ToolCallLLMMessageOptions
+    >,
   ): Promise<AgentChatResponse | StreamingAgentChatResponse> {
     if (mode === ChatResponseMode.WAIT) {
       const chatResponse = await this.llm.chat({
@@ -268,14 +277,8 @@ export class OpenAIAgentWorker
 
   async callFunction(
     tools: BaseTool[],
-    toolCall: OpenAIToolCall,
+    toolCall: ToolCallOptions["toolCall"],
   ): Promise<CallFunctionOutput> {
-    const functionCall = toolCall.function;
-
-    if (!functionCall) {
-      throw new Error("Invalid tool_call object");
-    }
-
     const functionMessage = await callFunction(tools, toolCall);
 
     const message = functionMessage[0];
@@ -309,18 +312,14 @@ export class OpenAIAgentWorker
   }
 
   private _shouldContinue(
-    toolCalls: OpenAIToolCall[] | null,
+    toolCall: ToolCallOptions["toolCall"] | null,
     nFunctionCalls: number,
-  ): boolean {
+  ): toolCall is ToolCallOptions["toolCall"] {
     if (nFunctionCalls > this.maxFunctionCalls) {
       return false;
     }
 
-    if (toolCalls?.length === 0) {
-      return false;
-    }
-
-    return true;
+    return !!toolCall;
   }
 
   async getTools(input: string): Promise<BaseTool[]> {
@@ -347,29 +346,25 @@ export class OpenAIAgentWorker
       llmChatParams,
     );
 
-    const latestToolCalls = this.getLatestToolCalls(task) || [];
+    const latestToolCall = this.getLatestToolCall(task) ?? null;
 
     let isDone: boolean;
-    let newSteps: TaskStep[] = [];
+    let newSteps: TaskStep[];
 
-    if (
-      !this._shouldContinue(latestToolCalls, task.extraState.nFunctionCalls)
-    ) {
+    if (!this._shouldContinue(latestToolCall, task.extraState.nFunctionCalls)) {
       isDone = true;
       newSteps = [];
     } else {
       isDone = false;
-      for (const toolCall of latestToolCalls) {
-        const { message, toolOutput } = await this.callFunction(
-          tools,
-          toolCall,
-        );
+      const { message, toolOutput } = await this.callFunction(
+        tools,
+        latestToolCall,
+      );
 
-        task.extraState.sources.push(toolOutput);
-        task.extraState.newMemory.put(message);
+      task.extraState.sources.push(toolOutput);
+      task.extraState.newMemory.put(message);
 
-        task.extraState.nFunctionCalls += 1;
-      }
+      task.extraState.nFunctionCalls += 1;
 
       newSteps = [step.getNextStep(randomUUID(), undefined)];
     }
diff --git a/packages/core/src/agent/react/base.ts b/packages/core/src/agent/react/base.ts
index 5f7f1b06abc1aaa1e49c4d77af10e53876355b6a..763e6f4874684dc75f9116f7265456dc96da8e0e 100644
--- a/packages/core/src/agent/react/base.ts
+++ b/packages/core/src/agent/react/base.ts
@@ -12,7 +12,7 @@ type ReActAgentParams = {
   prefixMessages?: ChatMessage[];
   maxInteractions?: number;
   defaultToolChoice?: string;
-  toolRetriever?: ObjectRetriever;
+  toolRetriever?: ObjectRetriever<BaseTool>;
 };
 
 /**
@@ -41,6 +41,7 @@ export class ReActAgent extends AgentRunner {
       agentWorker: stepEngine,
       memory,
       defaultToolChoice,
+      // @ts-expect-error 2322
       chatHistory: prefixMessages,
     });
   }
diff --git a/packages/core/src/agent/react/worker.ts b/packages/core/src/agent/react/worker.ts
index 7987d33c087d390dc4325296c0005ab3fc6e326d..3fba7b04cc0eb945f6bd6d8d8de7303f0755f129 100644
--- a/packages/core/src/agent/react/worker.ts
+++ b/packages/core/src/agent/react/worker.ts
@@ -2,6 +2,7 @@ import { randomUUID } from "@llamaindex/env";
 import type { ChatMessage } from "cohere-ai/api";
 import { Settings } from "../../Settings.js";
 import { AgentChatResponse } from "../../engines/chat/index.js";
+import { getCallbackManager } from "../../internal/settings/CallbackManager.js";
 import { type ChatResponse, type LLM } from "../../llm/index.js";
 import { extractText } from "../../llm/utils.js";
 import { ChatMemoryBuffer } from "../../memory/ChatMemoryBuffer.js";
@@ -25,7 +26,7 @@ type ReActAgentWorkerParams = {
   maxInteractions?: number;
   reactChatFormatter?: ReActChatFormatter | undefined;
   outputParser?: ReActOutputParser | undefined;
-  toolRetriever?: ObjectRetriever | undefined;
+  toolRetriever?: ObjectRetriever<BaseTool> | undefined;
 };
 
 function addUserStepToReasoning(
@@ -194,6 +195,14 @@ export class ReActAgentWorker implements AgentWorker<ChatParams> {
 
     const tool = toolsDict[actionReasoningStep.action];
 
+    getCallbackManager().dispatchEvent("llm-tool-call", {
+      payload: {
+        toolCall: {
+          name: tool.metadata.name,
+          input: JSON.stringify(actionReasoningStep.actionInput),
+        },
+      },
+    });
     const toolOutput = await tool.call!(actionReasoningStep.actionInput);
 
     task.extraState.sources.push(
diff --git a/packages/core/src/agent/runner/base.ts b/packages/core/src/agent/runner/base.ts
index 361fe9aa5f3bfd6c4f316d72e573b88a488ae0d8..915f996b3a0c831e861f32f135cd41aa41485408 100644
--- a/packages/core/src/agent/runner/base.ts
+++ b/packages/core/src/agent/runner/base.ts
@@ -1,11 +1,12 @@
 import { randomUUID } from "@llamaindex/env";
+import type { ChatHistory } from "../../ChatHistory.js";
 import type { ChatEngineAgentParams } from "../../engines/chat/index.js";
 import {
   AgentChatResponse,
   ChatResponseMode,
   StreamingAgentChatResponse,
 } from "../../engines/chat/index.js";
-import type { ChatMessage, LLM } from "../../llm/index.js";
+import type { LLM } from "../../llm/index.js";
 import { ChatMemoryBuffer } from "../../memory/ChatMemoryBuffer.js";
 import type { BaseMemory } from "../../memory/types.js";
 import type { AgentWorker, TaskStepOutput } from "../types.js";
@@ -30,7 +31,7 @@ const validateStepFromArgs = (
 
 type AgentRunnerParams = {
   agentWorker: AgentWorker;
-  chatHistory?: ChatMessage[];
+  chatHistory?: ChatHistory;
   state?: AgentState;
   memory?: BaseMemory;
   llm?: LLM;
diff --git a/packages/core/src/callbacks/CallbackManager.ts b/packages/core/src/callbacks/CallbackManager.ts
index d596615acb39ab54831885d33a00627fa65ea272..22c534c18ba1a0654d7d4c21c14e106b9a4dfbec 100644
--- a/packages/core/src/callbacks/CallbackManager.ts
+++ b/packages/core/src/callbacks/CallbackManager.ts
@@ -9,6 +9,7 @@ import type {
   LLMEndEvent,
   LLMStartEvent,
   LLMStreamEvent,
+  LLMToolCallEvent,
 } from "../llm/types.js";
 
 export class LlamaIndexCustomEvent<T = any> extends CustomEvent<T> {
@@ -48,6 +49,7 @@ export interface LlamaIndexEventMaps {
   stream: CustomEvent<StreamCallbackResponse>;
   "llm-start": LLMStartEvent;
   "llm-end": LLMEndEvent;
+  "llm-tool-call": LLMToolCallEvent;
   "llm-stream": LLMStreamEvent;
 }
 
@@ -203,8 +205,10 @@ export class CallbackManager implements CallbackManagerMethods {
     if (!handlers) {
       return;
     }
-    handlers.forEach((handler) =>
-      handler(LlamaIndexCustomEvent.fromEvent(event, detail)),
-    );
+    queueMicrotask(() => {
+      handlers.forEach((handler) =>
+        handler(LlamaIndexCustomEvent.fromEvent(event, detail)),
+      );
+    });
   }
 }
diff --git a/packages/core/src/internal/utils.ts b/packages/core/src/internal/utils.ts
index c04db0b393e497af319d3853fac26014539e3b7a..63133c61a8c54eb49604b06f65c64cfed6e3c0ad 100644
--- a/packages/core/src/internal/utils.ts
+++ b/packages/core/src/internal/utils.ts
@@ -5,3 +5,14 @@ export const isAsyncGenerator = (obj: unknown): obj is AsyncGenerator => {
 export const isGenerator = (obj: unknown): obj is Generator => {
   return obj != null && typeof obj === "object" && Symbol.iterator in obj;
 };
+
+/**
+ * Prettify an error for AI to read
+ */
+export function prettifyError(error: unknown): string {
+  if (error instanceof Error) {
+    return `Error(${error.name}): ${error.message}`;
+  } else {
+    return `${error}`;
+  }
+}
diff --git a/packages/core/src/llm/anthropic.ts b/packages/core/src/llm/anthropic.ts
index c6dfd54732408bc0287d6e7c15ce1732ce687c0d..a708d9b0a29b91b88d3d399e2a121c06ed41d557 100644
--- a/packages/core/src/llm/anthropic.ts
+++ b/packages/core/src/llm/anthropic.ts
@@ -2,19 +2,28 @@ import type { ClientOptions } from "@anthropic-ai/sdk";
 import { Anthropic as SDKAnthropic } from "@anthropic-ai/sdk";
 import type {
   Tool,
+  ToolResultBlockParam,
   ToolUseBlock,
+  ToolUseBlockParam,
+  ToolsBetaContentBlock,
+  ToolsBetaMessageParam,
 } from "@anthropic-ai/sdk/resources/beta/tools/messages";
-import type { TextBlock } from "@anthropic-ai/sdk/resources/index";
+import type {
+  TextBlock,
+  TextBlockParam,
+} from "@anthropic-ai/sdk/resources/index";
+import type { MessageParam } from "@anthropic-ai/sdk/resources/messages";
 import { getEnv } from "@llamaindex/env";
 import _ from "lodash";
 import type { BaseTool } from "../types.js";
-import { BaseLLM } from "./base.js";
+import { ToolCallLLM } from "./base.js";
 import type {
   ChatMessage,
   ChatResponse,
   ChatResponseChunk,
   LLMChatParamsNonStreaming,
   LLMChatParamsStreaming,
+  ToolCallLLMMessageOptions,
 } from "./types.js";
 import { extractText, wrapLLMEvent } from "./utils.js";
 
@@ -89,19 +98,7 @@ const AVAILABLE_ANTHROPIC_MODELS_WITHOUT_DATE: { [key: string]: string } = {
 
 export type AnthropicAdditionalChatOptions = {};
 
-export type AnthropicAdditionalMessageOptions =
-  | {
-      toolCall: string;
-    }
-  | {
-      toolUse: ToolUseBlock;
-    }
-  | {};
-
-export class Anthropic extends BaseLLM<
-  AnthropicAdditionalChatOptions,
-  AnthropicAdditionalMessageOptions
-> {
+export class Anthropic extends ToolCallLLM<AnthropicAdditionalChatOptions> {
   // Per completion Anthropic params
   model: keyof typeof ALL_AVAILABLE_ANTHROPIC_MODELS;
   temperature: number;
@@ -133,6 +130,10 @@ export class Anthropic extends BaseLLM<
       });
   }
 
+  get supportToolCall() {
+    return this.model.startsWith("claude-3");
+  }
+
   get metadata() {
     return {
       model: this.model,
@@ -151,27 +152,90 @@ export class Anthropic extends BaseLLM<
     return model;
   };
 
-  formatMessages(messages: ChatMessage[]) {
-    return messages.map((message) => {
+  formatMessages<Beta = false>(
+    messages: ChatMessage<ToolCallLLMMessageOptions>[],
+  ): Beta extends true ? ToolsBetaMessageParam[] : MessageParam[] {
+    return messages.map<any>((message) => {
       if (message.role !== "user" && message.role !== "assistant") {
         throw new Error("Unsupported Anthropic role");
       }
+      const options = message.options ?? {};
+      if ("toolResult" in options) {
+        const { id, isError } = options.toolResult;
+        return {
+          role: "user",
+          content: [
+            {
+              type: "tool_result",
+              is_error: isError,
+              content: [
+                {
+                  type: "text",
+                  text: extractText(message.content),
+                },
+              ],
+              tool_use_id: id,
+            },
+          ] satisfies ToolResultBlockParam[],
+        } satisfies ToolsBetaMessageParam;
+      } else if ("toolCall" in options) {
+        const aiThinkingText = extractText(message.content);
+        return {
+          role: "assistant",
+          content: [
+            // this could be empty when you call two tools in one query
+            ...(aiThinkingText.trim()
+              ? [
+                  {
+                    type: "text",
+                    text: aiThinkingText,
+                  } satisfies TextBlockParam,
+                ]
+              : []),
+            {
+              type: "tool_use",
+              id: options.toolCall.id,
+              name: options.toolCall.name,
+              input: options.toolCall.input,
+            } satisfies ToolUseBlockParam,
+          ] satisfies ToolsBetaContentBlock[],
+        } satisfies ToolsBetaMessageParam;
+      }
 
       return {
         content: extractText(message.content),
         role: message.role,
-      };
+      } satisfies MessageParam;
     });
   }
 
   chat(
-    params: LLMChatParamsStreaming,
-  ): Promise<AsyncIterable<ChatResponseChunk>>;
-  chat(params: LLMChatParamsNonStreaming): Promise<ChatResponse>;
+    params: LLMChatParamsStreaming<
+      AnthropicAdditionalChatOptions,
+      ToolCallLLMMessageOptions
+    >,
+  ): Promise<AsyncIterable<ChatResponseChunk<ToolCallLLMMessageOptions>>>;
+  chat(
+    params: LLMChatParamsNonStreaming<
+      AnthropicAdditionalChatOptions,
+      ToolCallLLMMessageOptions
+    >,
+  ): Promise<ChatResponse<ToolCallLLMMessageOptions>>;
   @wrapLLMEvent
   async chat(
-    params: LLMChatParamsNonStreaming | LLMChatParamsStreaming,
-  ): Promise<ChatResponse | AsyncIterable<ChatResponseChunk>> {
+    params:
+      | LLMChatParamsNonStreaming<
+          AnthropicAdditionalChatOptions,
+          ToolCallLLMMessageOptions
+        >
+      | LLMChatParamsStreaming<
+          AnthropicAdditionalChatOptions,
+          ToolCallLLMMessageOptions
+        >,
+  ): Promise<
+    | ChatResponse<ToolCallLLMMessageOptions>
+    | AsyncIterable<ChatResponseChunk<ToolCallLLMMessageOptions>>
+  > {
     let { messages } = params;
 
     const { stream, tools } = params;
@@ -201,7 +265,7 @@ export class Anthropic extends BaseLLM<
 
     if (tools) {
       const response = await anthropic.beta.tools.messages.create({
-        messages: this.formatMessages(messages),
+        messages: this.formatMessages<true>(messages),
         tools: tools.map(Anthropic.toTool),
         model: this.getModelName(this.model),
         temperature: this.temperature,
@@ -226,7 +290,11 @@ export class Anthropic extends BaseLLM<
           role: "assistant",
           options: toolUseBlock
             ? {
-                toolUse: toolUseBlock,
+                toolCall: {
+                  id: toolUseBlock.id,
+                  name: toolUseBlock.name,
+                  input: toolUseBlock.input,
+                },
               }
             : {},
         },
@@ -243,18 +311,22 @@ export class Anthropic extends BaseLLM<
 
       return {
         raw: response,
-        message: { content: response.content[0].text, role: "assistant" },
+        message: {
+          content: response.content[0].text,
+          role: "assistant",
+          options: {},
+        },
       };
     }
   }
 
   protected async *streamChat(
-    messages: ChatMessage[],
+    messages: ChatMessage<ToolCallLLMMessageOptions>[],
     systemPrompt?: string | null,
-  ): AsyncIterable<ChatResponseChunk> {
+  ): AsyncIterable<ChatResponseChunk<ToolCallLLMMessageOptions>> {
     const stream = await this.session.anthropic.messages.create({
       model: this.getModelName(this.model),
-      messages: this.formatMessages(messages),
+      messages: this.formatMessages<false>(messages),
       max_tokens: this.maxTokens ?? 4096,
       temperature: this.temperature,
       top_p: this.topP,
@@ -273,6 +345,7 @@ export class Anthropic extends BaseLLM<
       yield {
         raw: part,
         delta: content,
+        options: {},
       };
     }
     return;
diff --git a/packages/core/src/llm/base.ts b/packages/core/src/llm/base.ts
index d0f434909c55b8746b238146a3c82dd7f6eec9fd..8db28bc9b1ac1281b3b9603bd653fc3fb23fd115 100644
--- a/packages/core/src/llm/base.ts
+++ b/packages/core/src/llm/base.ts
@@ -8,18 +8,13 @@ import type {
   LLMCompletionParamsNonStreaming,
   LLMCompletionParamsStreaming,
   LLMMetadata,
+  ToolCallLLMMessageOptions,
 } from "./types.js";
 import { extractText, streamConverter } from "./utils.js";
 
 export abstract class BaseLLM<
-  AdditionalChatOptions extends Record<string, unknown> = Record<
-    string,
-    unknown
-  >,
-  AdditionalMessageOptions extends Record<string, unknown> = Record<
-    string,
-    unknown
-  >,
+  AdditionalChatOptions extends object = object,
+  AdditionalMessageOptions extends object = object,
 > implements LLM<AdditionalChatOptions>
 {
   abstract metadata: LLMMetadata;
@@ -56,9 +51,21 @@ export abstract class BaseLLM<
   }
 
   abstract chat(
-    params: LLMChatParamsStreaming<AdditionalChatOptions>,
+    params: LLMChatParamsStreaming<
+      AdditionalChatOptions,
+      AdditionalMessageOptions
+    >,
   ): Promise<AsyncIterable<ChatResponseChunk>>;
   abstract chat(
-    params: LLMChatParamsNonStreaming<AdditionalChatOptions>,
+    params: LLMChatParamsNonStreaming<
+      AdditionalChatOptions,
+      AdditionalMessageOptions
+    >,
   ): Promise<ChatResponse<AdditionalMessageOptions>>;
 }
+
+export abstract class ToolCallLLM<
+  AdditionalChatOptions extends object = object,
+> extends BaseLLM<AdditionalChatOptions, ToolCallLLMMessageOptions> {
+  abstract supportToolCall: boolean;
+}
diff --git a/packages/core/src/llm/open_ai.ts b/packages/core/src/llm/open_ai.ts
index 6279f924ec33757bf513a5d6e77e0348d144a3ad..78590ec89b7770451ded4d791fe1e5b0c67d4276 100644
--- a/packages/core/src/llm/open_ai.ts
+++ b/packages/core/src/llm/open_ai.ts
@@ -9,11 +9,11 @@ import { OpenAI as OrigOpenAI } from "openai";
 
 import type {
   ChatCompletionAssistantMessageParam,
-  ChatCompletionFunctionMessageParam,
   ChatCompletionMessageToolCall,
   ChatCompletionRole,
   ChatCompletionSystemMessageParam,
   ChatCompletionTool,
+  ChatCompletionToolMessageParam,
   ChatCompletionUserMessageParam,
 } from "openai/resources/chat/completions";
 import type { ChatCompletionMessageParam } from "openai/resources/index.js";
@@ -28,7 +28,7 @@ import {
   getAzureModel,
   shouldUseAzure,
 } from "./azure.js";
-import { BaseLLM } from "./base.js";
+import { ToolCallLLM } from "./base.js";
 import type {
   ChatMessage,
   ChatResponse,
@@ -37,8 +37,9 @@ import type {
   LLMChatParamsNonStreaming,
   LLMChatParamsStreaming,
   LLMMetadata,
-  MessageToolCall,
   MessageType,
+  ToolCallLLMMessageOptions,
+  ToolCallOptions,
 } from "./types.js";
 import { extractText, wrapLLMEvent } from "./utils.js";
 
@@ -143,14 +144,7 @@ export function isFunctionCallingModel(llm: LLM): llm is OpenAI {
   return isChatModel && !isOld;
 }
 
-export type OpenAIAdditionalMetadata = {
-  isFunctionCallingModel: boolean;
-};
-
-export type OpenAIAdditionalMessageOptions = {
-  functionName?: string;
-  toolCalls?: ChatCompletionMessageToolCall[];
-};
+export type OpenAIAdditionalMetadata = {};
 
 export type OpenAIAdditionalChatOptions = Omit<
   Partial<OpenAILLM.Chat.ChatCompletionCreateParams>,
@@ -164,10 +158,7 @@ export type OpenAIAdditionalChatOptions = Omit<
   | "toolChoice"
 >;
 
-export class OpenAI extends BaseLLM<
-  OpenAIAdditionalChatOptions,
-  OpenAIAdditionalMessageOptions
-> {
+export class OpenAI extends ToolCallLLM<OpenAIAdditionalChatOptions> {
   // Per completion OpenAI params
   model: keyof typeof ALL_AVAILABLE_OPENAI_MODELS | string;
   temperature: number;
@@ -238,6 +229,10 @@ export class OpenAI extends BaseLLM<
     }
   }
 
+  get supportToolCall() {
+    return isFunctionCallingModel(this);
+  }
+
   get metadata(): LLMMetadata & OpenAIAdditionalMetadata {
     const contextWindow =
       ALL_AVAILABLE_OPENAI_MODELS[
@@ -250,7 +245,6 @@ export class OpenAI extends BaseLLM<
       maxTokens: this.maxTokens,
       contextWindow,
       tokenizer: Tokenizers.CL100K_BASE,
-      isFunctionCallingModel: isFunctionCallingModel(this),
     };
   }
 
@@ -262,46 +256,45 @@ export class OpenAI extends BaseLLM<
         return "assistant";
       case "system":
         return "system";
-      case "function":
-        return "function";
-      case "tool":
-        return "tool";
       default:
         return "user";
     }
   }
 
   static toOpenAIMessage(
-    messages: ChatMessage<OpenAIAdditionalMessageOptions>[],
+    messages: ChatMessage<ToolCallLLMMessageOptions>[],
   ): ChatCompletionMessageParam[] {
     return messages.map((message) => {
-      const options: OpenAIAdditionalMessageOptions = message.options ?? {};
-      if (message.role === "user") {
-        return {
-          role: "user",
-          content: message.content,
-        } satisfies ChatCompletionUserMessageParam;
-      }
-      if (typeof message.content !== "string") {
-        console.warn("Message content is not a string");
-      }
-      if (message.role === "function") {
-        if (!options.functionName) {
-          console.warn("Function message does not have a name");
-        }
+      const options = message.options ?? {};
+      if ("toolResult" in options) {
         return {
-          role: "function",
-          name: options.functionName ?? "UNKNOWN",
+          tool_call_id: options.toolResult.id,
+          role: "tool",
           content: extractText(message.content),
-          // todo: remove this since this is deprecated in the OpenAI API
-        } satisfies ChatCompletionFunctionMessageParam;
-      }
-      if (message.role === "assistant") {
+        } satisfies ChatCompletionToolMessageParam;
+      } else if ("toolCall" in options) {
         return {
           role: "assistant",
           content: extractText(message.content),
-          tool_calls: options.toolCalls,
+          tool_calls: [
+            {
+              id: options.toolCall.id,
+              type: "function",
+              function: {
+                name: options.toolCall.name,
+                arguments:
+                  typeof options.toolCall.input === "string"
+                    ? options.toolCall.input
+                    : JSON.stringify(options.toolCall.input),
+              },
+            },
+          ],
         } satisfies ChatCompletionAssistantMessageParam;
+      } else if (message.role === "user") {
+        return {
+          role: "user",
+          content: message.content,
+        } satisfies ChatCompletionUserMessageParam;
       }
 
       const response:
@@ -312,27 +305,38 @@ export class OpenAI extends BaseLLM<
         role: OpenAI.toOpenAIRole(message.role) as never,
         // fixme: should not extract text, but assert content is string
         content: extractText(message.content),
-        ...options,
       };
       return response;
     });
   }
 
   chat(
-    params: LLMChatParamsStreaming<OpenAIAdditionalChatOptions>,
-  ): Promise<AsyncIterable<ChatResponseChunk<OpenAIAdditionalMessageOptions>>>;
+    params: LLMChatParamsStreaming<
+      OpenAIAdditionalChatOptions,
+      ToolCallLLMMessageOptions
+    >,
+  ): Promise<AsyncIterable<ChatResponseChunk<ToolCallLLMMessageOptions>>>;
   chat(
-    params: LLMChatParamsNonStreaming<OpenAIAdditionalChatOptions>,
-  ): Promise<ChatResponse<OpenAIAdditionalMessageOptions>>;
+    params: LLMChatParamsNonStreaming<
+      OpenAIAdditionalChatOptions,
+      ToolCallLLMMessageOptions
+    >,
+  ): Promise<ChatResponse<ToolCallLLMMessageOptions>>;
   @wrapEventCaller
   @wrapLLMEvent
   async chat(
     params:
-      | LLMChatParamsNonStreaming<OpenAIAdditionalChatOptions>
-      | LLMChatParamsStreaming<OpenAIAdditionalChatOptions>,
+      | LLMChatParamsNonStreaming<
+          OpenAIAdditionalChatOptions,
+          ToolCallLLMMessageOptions
+        >
+      | LLMChatParamsStreaming<
+          OpenAIAdditionalChatOptions,
+          ToolCallLLMMessageOptions
+        >,
   ): Promise<
-    | ChatResponse<OpenAIAdditionalMessageOptions>
-    | AsyncIterable<ChatResponseChunk<OpenAIAdditionalMessageOptions>>
+    | ChatResponse<ToolCallLLMMessageOptions>
+    | AsyncIterable<ChatResponseChunk<ToolCallLLMMessageOptions>>
   > {
     const { messages, stream, tools, additionalChatOptions } = params;
     const baseRequestParams: OpenAILLM.Chat.ChatCompletionCreateParams = {
@@ -358,18 +362,21 @@ export class OpenAI extends BaseLLM<
 
     const content = response.choices[0].message?.content ?? "";
 
-    const options: OpenAIAdditionalMessageOptions = {};
-
-    if (response.choices[0].message?.tool_calls) {
-      options.toolCalls = response.choices[0].message.tool_calls;
-    }
-
     return {
       raw: response,
       message: {
         content,
         role: response.choices[0].message.role,
-        options,
+        options: response.choices[0].message?.tool_calls
+          ? {
+              toolCall: {
+                id: response.choices[0].message.tool_calls[0].id,
+                name: response.choices[0].message.tool_calls[0].function.name,
+                input:
+                  response.choices[0].message.tool_calls[0].function.arguments,
+              },
+            }
+          : {},
       },
     };
   }
@@ -377,7 +384,7 @@ export class OpenAI extends BaseLLM<
   @wrapEventCaller
   protected async *streamChat(
     baseRequestParams: OpenAILLM.Chat.ChatCompletionCreateParams,
-  ): AsyncIterable<ChatResponseChunk<OpenAIAdditionalMessageOptions>> {
+  ): AsyncIterable<ChatResponseChunk<ToolCallLLMMessageOptions>> {
     const stream: AsyncIterable<OpenAILLM.Chat.ChatCompletionChunk> =
       await this.session.openai.chat.completions.create({
         ...baseRequestParams,
@@ -387,13 +394,26 @@ export class OpenAI extends BaseLLM<
     // TODO: add callback to streamConverter and use streamConverter here
     //Indices
     let idxCounter: number = 0;
-    const toolCalls: MessageToolCall[] = [];
+    let toolCallOptions: ToolCallOptions | null = null;
     for await (const part of stream) {
       if (!part.choices.length) continue;
       const choice = part.choices[0];
       // skip parts that don't have any content
       if (!(choice.delta.content || choice.delta.tool_calls)) continue;
-      updateToolCalls(toolCalls, choice.delta.tool_calls);
+      if (choice.delta.tool_calls?.[0].id) {
+        toolCallOptions = {
+          toolCall: {
+            name: choice.delta.tool_calls[0].function!.name!,
+            id: choice.delta.tool_calls[0].id,
+            input: choice.delta.tool_calls[0].function!.arguments!,
+          },
+        };
+      } else {
+        if (choice.delta.tool_calls?.[0].function?.arguments) {
+          toolCallOptions!.toolCall.input +=
+            choice.delta.tool_calls[0].function.arguments;
+        }
+      }
 
       const isDone: boolean = choice.finish_reason !== null;
 
@@ -405,8 +425,7 @@ export class OpenAI extends BaseLLM<
 
       yield {
         raw: part,
-        // add tool calls to final chunk
-        options: toolCalls.length > 0 ? { toolCalls: toolCalls } : {},
+        options: toolCallOptions ? toolCallOptions : {},
         delta: choice.delta.content ?? "",
       };
     }
@@ -424,34 +443,3 @@ export class OpenAI extends BaseLLM<
     };
   }
 }
-
-function updateToolCalls(
-  toolCalls: MessageToolCall[],
-  toolCallDeltas?: OpenAILLM.Chat.Completions.ChatCompletionChunk.Choice.Delta.ToolCall[],
-) {
-  function augmentToolCall(
-    toolCall?: MessageToolCall,
-    toolCallDelta?: OpenAILLM.Chat.Completions.ChatCompletionChunk.Choice.Delta.ToolCall,
-  ) {
-    toolCall =
-      toolCall ??
-      ({ function: { name: "", arguments: "" } } as MessageToolCall);
-    toolCall.id = toolCall.id ?? toolCallDelta?.id;
-    toolCall.type = toolCall.type ?? toolCallDelta?.type;
-    if (toolCallDelta?.function?.arguments) {
-      toolCall.function.arguments += toolCallDelta.function.arguments;
-    }
-    if (toolCallDelta?.function?.name) {
-      toolCall.function.name += toolCallDelta.function.name;
-    }
-    return toolCall;
-  }
-  if (toolCallDeltas) {
-    toolCallDeltas?.forEach((toolCall) => {
-      toolCalls[toolCall.index] = augmentToolCall(
-        toolCalls[toolCall.index],
-        toolCall,
-      );
-    });
-  }
-}
diff --git a/packages/core/src/llm/types.ts b/packages/core/src/llm/types.ts
index bbe8aa8e061b222e0b88e071a11367d7d11cb641..26da7b4ee6e4f37694d7ef98c1da8e3fa37ca774 100644
--- a/packages/core/src/llm/types.ts
+++ b/packages/core/src/llm/types.ts
@@ -1,54 +1,42 @@
 import type { Tokenizers } from "../GlobalsHelper.js";
 import type { BaseTool, UUID } from "../types.js";
 
-type LLMBaseEvent<
-  Type extends string,
-  Payload extends Record<string, unknown>,
-> = CustomEvent<{
+type LLMBaseEvent<Payload extends Record<string, unknown>> = CustomEvent<{
   payload: Payload;
 }>;
 
-export type LLMStartEvent = LLMBaseEvent<
-  "llm-start",
-  {
-    id: UUID;
-    messages: ChatMessage[];
-  }
->;
-export type LLMEndEvent = LLMBaseEvent<
-  "llm-end",
-  {
-    id: UUID;
-    response: ChatResponse;
-  }
->;
-export type LLMStreamEvent = LLMBaseEvent<
-  "llm-stream",
-  {
-    id: UUID;
-    chunk: ChatResponseChunk;
-  }
->;
+export type LLMStartEvent = LLMBaseEvent<{
+  id: UUID;
+  messages: ChatMessage[];
+}>;
+export type LLMToolCallEvent = LLMBaseEvent<{
+  // fixme: id is missing in the context
+  // id: UUID;
+  toolCall: Omit<ToolCallOptions["toolCall"], "id">;
+}>;
+export type LLMEndEvent = LLMBaseEvent<{
+  id: UUID;
+  response: ChatResponse;
+}>;
+export type LLMStreamEvent = LLMBaseEvent<{
+  id: UUID;
+  chunk: ChatResponseChunk;
+}>;
 
 /**
  * @internal
  */
 export interface LLMChat<
-  AdditionalChatOptions extends Record<string, unknown> = Record<
-    string,
-    unknown
-  >,
-  AdditionalMessageOptions extends Record<string, unknown> = Record<
-    string,
-    unknown
-  >,
+  AdditionalChatOptions extends object = object,
+  AdditionalMessageOptions extends object = object,
 > {
   chat(
     params:
       | LLMChatParamsStreaming<AdditionalChatOptions>
       | LLMChatParamsNonStreaming<AdditionalChatOptions>,
   ): Promise<
-    ChatResponse<AdditionalMessageOptions> | AsyncIterable<ChatResponseChunk>
+    | ChatResponse<AdditionalMessageOptions>
+    | AsyncIterable<ChatResponseChunk<AdditionalMessageOptions>>
   >;
 }
 
@@ -56,24 +44,24 @@ export interface LLMChat<
  * Unified language model interface
  */
 export interface LLM<
-  AdditionalChatOptions extends Record<string, unknown> = Record<
-    string,
-    unknown
-  >,
-  AdditionalMessageOptions extends Record<string, unknown> = Record<
-    string,
-    unknown
-  >,
+  AdditionalChatOptions extends object = object,
+  AdditionalMessageOptions extends object = object,
 > extends LLMChat<AdditionalChatOptions> {
   metadata: LLMMetadata;
   /**
    * Get a chat response from the LLM
    */
   chat(
-    params: LLMChatParamsStreaming<AdditionalChatOptions>,
+    params: LLMChatParamsStreaming<
+      AdditionalChatOptions,
+      AdditionalMessageOptions
+    >,
   ): Promise<AsyncIterable<ChatResponseChunk>>;
   chat(
-    params: LLMChatParamsNonStreaming<AdditionalChatOptions>,
+    params: LLMChatParamsNonStreaming<
+      AdditionalChatOptions,
+      AdditionalMessageOptions
+    >,
   ): Promise<ChatResponse<AdditionalMessageOptions>>;
 
   /**
@@ -87,48 +75,16 @@ export interface LLM<
   ): Promise<CompletionResponse>;
 }
 
-// todo: remove "generic", "function", "memory";
-export type MessageType =
-  | "user"
-  | "assistant"
-  | "system"
-  /**
-   * @deprecated
-   */
-  | "generic"
-  /**
-   * @deprecated
-   */
-  | "function"
-  /**
-   * @deprecated
-   */
-  | "memory"
-  | "tool";
-
-export type ChatMessage<
-  AdditionalMessageOptions extends Record<string, unknown> = Record<
-    string,
-    unknown
-  >,
-> =
-  AdditionalMessageOptions extends Record<string, unknown>
-    ? {
-        content: MessageContent;
-        role: MessageType;
-        options?: AdditionalMessageOptions;
-      }
-    : {
-        content: MessageContent;
-        role: MessageType;
-        options: AdditionalMessageOptions;
-      };
+export type MessageType = "user" | "assistant" | "system" | "memory";
+
+export type ChatMessage<AdditionalMessageOptions extends object = object> = {
+  content: MessageContent;
+  role: MessageType;
+  options?: undefined | AdditionalMessageOptions;
+};
 
 export interface ChatResponse<
-  AdditionalMessageOptions extends Record<string, unknown> = Record<
-    string,
-    unknown
-  >,
+  AdditionalMessageOptions extends object = object,
 > {
   message: ChatMessage<AdditionalMessageOptions>;
   /**
@@ -140,22 +96,12 @@ export interface ChatResponse<
 }
 
 export type ChatResponseChunk<
-  AdditionalMessageOptions extends Record<string, unknown> = Record<
-    string,
-    unknown
-  >,
-> =
-  AdditionalMessageOptions extends Record<string, unknown>
-    ? {
-        raw: object | null;
-        delta: string;
-        options?: AdditionalMessageOptions;
-      }
-    : {
-        raw: object | null;
-        delta: string;
-        options: AdditionalMessageOptions;
-      };
+  AdditionalMessageOptions extends object = object,
+> = {
+  raw: object | null;
+  delta: string;
+  options?: undefined | AdditionalMessageOptions;
+};
 
 export interface CompletionResponse {
   text: string;
@@ -177,36 +123,25 @@ export type LLMMetadata = {
 };
 
 export interface LLMChatParamsBase<
-  AdditionalChatOptions extends Record<string, unknown> = Record<
-    string,
-    unknown
-  >,
-  AdditionalMessageOptions extends Record<string, unknown> = Record<
-    string,
-    unknown
-  >,
+  AdditionalChatOptions extends object = object,
+  AdditionalMessageOptions extends object = object,
 > {
   messages: ChatMessage<AdditionalMessageOptions>[];
   additionalChatOptions?: AdditionalChatOptions;
   tools?: BaseTool[];
-  additionalKwargs?: Record<string, unknown>;
 }
 
 export interface LLMChatParamsStreaming<
-  AdditionalChatOptions extends Record<string, unknown> = Record<
-    string,
-    unknown
-  >,
-> extends LLMChatParamsBase<AdditionalChatOptions> {
+  AdditionalChatOptions extends object = object,
+  AdditionalMessageOptions extends object = object,
+> extends LLMChatParamsBase<AdditionalChatOptions, AdditionalMessageOptions> {
   stream: true;
 }
 
 export interface LLMChatParamsNonStreaming<
-  AdditionalChatOptions extends Record<string, unknown> = Record<
-    string,
-    unknown
-  >,
-> extends LLMChatParamsBase<AdditionalChatOptions> {
+  AdditionalChatOptions extends object = object,
+  AdditionalMessageOptions extends object = object,
+> extends LLMChatParamsBase<AdditionalChatOptions, AdditionalMessageOptions> {
   stream?: false;
 }
 
@@ -242,13 +177,29 @@ export type MessageContentDetail =
  */
 export type MessageContent = string | MessageContentDetail[];
 
-interface Function {
-  arguments: string;
+export type ToolCall = {
   name: string;
-}
+  // for now, claude-3-opus will give object, gpt-3/4 will give string
+  // todo: unify this to always be an object
+  input: unknown;
+  id: string;
+};
 
-export interface MessageToolCall {
+export type ToolResult = {
   id: string;
-  function: Function;
-  type: "function";
-}
+  result: string;
+  isError: boolean;
+};
+
+export type ToolCallOptions = {
+  toolCall: ToolCall;
+};
+
+export type ToolResultOptions = {
+  toolResult: ToolResult;
+};
+
+export type ToolCallLLMMessageOptions =
+  | ToolResultOptions
+  | ToolCallOptions
+  | {};
diff --git a/packages/core/src/llm/utils.ts b/packages/core/src/llm/utils.ts
index 44f256cbc6bfa84a5ab2714b539f7c73b8fc565a..4d90a66b0a2b506b1cf9d424aa2e4760c4bfa3c2 100644
--- a/packages/core/src/llm/utils.ts
+++ b/packages/core/src/llm/utils.ts
@@ -61,14 +61,24 @@ export function extractText(message: MessageContent): string {
 /**
  * @internal
  */
-export function wrapLLMEvent(
-  originalMethod: LLMChat["chat"],
+export function wrapLLMEvent<
+  AdditionalChatOptions extends object = object,
+  AdditionalMessageOptions extends object = object,
+>(
+  originalMethod: LLMChat<
+    AdditionalChatOptions,
+    AdditionalMessageOptions
+  >["chat"],
   _context: ClassMethodDecoratorContext,
 ) {
   return async function withLLMEvent(
-    this: LLM,
-    ...params: Parameters<LLMChat["chat"]>
-  ): ReturnType<LLMChat["chat"]> {
+    this: LLM<AdditionalChatOptions, AdditionalMessageOptions>,
+    ...params: Parameters<
+      LLMChat<AdditionalChatOptions, AdditionalMessageOptions>["chat"]
+    >
+  ): ReturnType<
+    LLMChat<AdditionalChatOptions, AdditionalMessageOptions>["chat"]
+  > {
     const id = randomUUID();
     getCallbackManager().dispatchEvent("llm-start", {
       payload: {
diff --git a/packages/core/src/memory/ChatMemoryBuffer.ts b/packages/core/src/memory/ChatMemoryBuffer.ts
index 54367a84aa384fc2f209b0ea31a2997f2bcac50b..eda6bbb313c308ec914505b45f4a1a6a3835d7bf 100644
--- a/packages/core/src/memory/ChatMemoryBuffer.ts
+++ b/packages/core/src/memory/ChatMemoryBuffer.ts
@@ -1,3 +1,4 @@
+import type { ChatHistory } from "../ChatHistory.js";
 import type { ChatMessage, LLM } from "../llm/index.js";
 import { SimpleChatStore } from "../storage/chatStore/SimpleChatStore.js";
 import type { BaseChatStore } from "../storage/chatStore/types.js";
@@ -6,25 +7,17 @@ import type { BaseMemory } from "./types.js";
 const DEFAULT_TOKEN_LIMIT_RATIO = 0.75;
 const DEFAULT_TOKEN_LIMIT = 3000;
 
-type ChatMemoryBufferParams<
-  AdditionalMessageOptions extends Record<string, unknown> = Record<
-    string,
-    unknown
-  >,
-> = {
-  tokenLimit?: number;
-  chatStore?: BaseChatStore<AdditionalMessageOptions>;
-  chatStoreKey?: string;
-  chatHistory?: ChatMessage<AdditionalMessageOptions>[];
-  llm?: LLM<Record<string, unknown>, AdditionalMessageOptions>;
-};
-
-export class ChatMemoryBuffer<
-  AdditionalMessageOptions extends Record<string, unknown> = Record<
-    string,
-    unknown
-  >,
-> implements BaseMemory<AdditionalMessageOptions>
+type ChatMemoryBufferParams<AdditionalMessageOptions extends object = object> =
+  {
+    tokenLimit?: number;
+    chatStore?: BaseChatStore<AdditionalMessageOptions>;
+    chatStoreKey?: string;
+    chatHistory?: ChatHistory<AdditionalMessageOptions>;
+    llm?: LLM<object, AdditionalMessageOptions>;
+  };
+
+export class ChatMemoryBuffer<AdditionalMessageOptions extends object = object>
+  implements BaseMemory<AdditionalMessageOptions>
 {
   tokenLimit: number;
 
@@ -47,7 +40,7 @@ export class ChatMemoryBuffer<
     }
 
     if (init?.chatHistory) {
-      this.chatStore.setMessages(this.chatStoreKey, init.chatHistory);
+      this.chatStore.setMessages(this.chatStoreKey, init.chatHistory.messages);
     }
   }
 
diff --git a/packages/core/src/memory/types.ts b/packages/core/src/memory/types.ts
index 5a19c431bb51c1d0aa43a429e5c7b48639923f35..8af16e5a1de748f8e3ac45080849ce1546947d33 100644
--- a/packages/core/src/memory/types.ts
+++ b/packages/core/src/memory/types.ts
@@ -1,11 +1,6 @@
 import type { ChatMessage } from "../llm/index.js";
 
-export interface BaseMemory<
-  AdditionalMessageOptions extends Record<string, unknown> = Record<
-    string,
-    unknown
-  >,
-> {
+export interface BaseMemory<AdditionalMessageOptions extends object = object> {
   tokenLimit: number;
   get(...args: unknown[]): ChatMessage<AdditionalMessageOptions>[];
   getAll(): ChatMessage<AdditionalMessageOptions>[];
diff --git a/packages/core/src/objects/base.ts b/packages/core/src/objects/base.ts
index b7bd963996a3ddadd8ecacf0aeea65d2f18b85de..3ef6832d4824afad698efa7981053c692c554de1 100644
--- a/packages/core/src/objects/base.ts
+++ b/packages/core/src/objects/base.ts
@@ -50,7 +50,7 @@ export abstract class BaseObjectNodeMapping {
 
 type QueryType = string;
 
-export class ObjectRetriever {
+export class ObjectRetriever<T = unknown> {
   _retriever: BaseRetriever;
   _objectNodeMapping: BaseObjectNodeMapping;
 
@@ -68,7 +68,7 @@ export class ObjectRetriever {
   }
 
   // Translating the retrieve method
-  async retrieve(strOrQueryBundle: QueryType): Promise<any> {
+  async retrieve(strOrQueryBundle: QueryType): Promise<T[]> {
     const nodes = await this.retriever.retrieve({ query: strOrQueryBundle });
     const objs = nodes.map((n) => this._objectNodeMapping.fromNode(n.node));
     return objs;
@@ -180,7 +180,7 @@ export class ObjectIndex {
     return this._objectNodeMapping.objNodeMapping();
   }
 
-  async asRetriever(kwargs: any): Promise<ObjectRetriever> {
+  async asRetriever(kwargs: any): Promise<ObjectRetriever<any>> {
     return new ObjectRetriever(
       this._index.asRetriever(kwargs),
       this._objectNodeMapping,
diff --git a/packages/core/src/storage/chatStore/SimpleChatStore.ts b/packages/core/src/storage/chatStore/SimpleChatStore.ts
index 4c09f2550e6e35498ba89665a59052426bdcc72a..4f6df4718115704a6f69fb4b00ccb8f562f285f2 100644
--- a/packages/core/src/storage/chatStore/SimpleChatStore.ts
+++ b/packages/core/src/storage/chatStore/SimpleChatStore.ts
@@ -6,10 +6,7 @@ import type { BaseChatStore } from "./types.js";
  *  This could lead to memory leaks if the messages are not properly cleaned up.
  */
 export class SimpleChatStore<
-  AdditionalMessageOptions extends Record<string, unknown> = Record<
-    string,
-    unknown
-  >,
+  AdditionalMessageOptions extends object = Record<string, unknown>,
 > implements BaseChatStore<AdditionalMessageOptions>
 {
   store: { [key: string]: ChatMessage<AdditionalMessageOptions>[] } = {};
diff --git a/packages/core/src/storage/chatStore/types.ts b/packages/core/src/storage/chatStore/types.ts
index 7b3a2acda0d23e8e51a17345c4adb06e1c42367e..c809f91becf2312c03fb03b2e13b602a73b55139 100644
--- a/packages/core/src/storage/chatStore/types.ts
+++ b/packages/core/src/storage/chatStore/types.ts
@@ -1,10 +1,7 @@
 import type { ChatMessage } from "../../llm/index.js";
 
 export interface BaseChatStore<
-  AdditionalMessageOptions extends Record<string, unknown> = Record<
-    string,
-    unknown
-  >,
+  AdditionalMessageOptions extends object = object,
 > {
   setMessages(
     key: string,
diff --git a/packages/core/src/tools/utils.ts b/packages/core/src/tools/utils.ts
index 3c19c20a4dd19102c4925dd1f2e73ee05c8caf4a..afc5280aa7224e634a43b6cb724d477fa9699d8a 100644
--- a/packages/core/src/tools/utils.ts
+++ b/packages/core/src/tools/utils.ts
@@ -1,27 +1,33 @@
+import { getCallbackManager } from "../internal/settings/CallbackManager.js";
 import type { BaseTool } from "../types.js";
 import { ToolOutput } from "./types.js";
 
 export async function callToolWithErrorHandling(
   tool: BaseTool,
-  inputDict: { [key: string]: any },
+  input: unknown,
 ): Promise<ToolOutput> {
   if (!tool.call) {
     return new ToolOutput(
       "Error: Tool does not have a call function.",
       tool.metadata.name,
-      { kwargs: inputDict },
+      input,
       null,
     );
   }
   try {
-    const value = await tool.call(inputDict);
-    return new ToolOutput(value, tool.metadata.name, inputDict, value);
-  } catch (e) {
-    return new ToolOutput(
-      `Error: ${e}`,
-      tool.metadata.name,
-      { kwargs: inputDict },
-      e,
+    getCallbackManager().dispatchEvent("llm-tool-call", {
+      payload: {
+        toolCall: {
+          name: tool.metadata.name,
+          input,
+        },
+      },
+    });
+    const value = await tool.call(
+      typeof input === "string" ? JSON.parse(input) : input,
     );
+    return new ToolOutput(value, tool.metadata.name, input, value);
+  } catch (e) {
+    return new ToolOutput(`Error: ${e}`, tool.metadata.name, input, e);
   }
 }
diff --git a/packages/core/src/types.ts b/packages/core/src/types.ts
index f1b0073e3ad32eeb73de3427b98d7fea47afd44b..a51b1182ec543d25c90eeb416a9b6634aa6c942f 100644
--- a/packages/core/src/types.ts
+++ b/packages/core/src/types.ts
@@ -67,7 +67,7 @@ export interface BaseTool<Input = any> {
   Input extends Known ? ToolMetadata<JSONSchemaType<Input>> : ToolMetadata;
 }
 
-export type ToolWithCall<Input = unknown> = Omit<BaseTool<Input>, "call"> & {
+export type BaseToolWithCall<Input = any> = Omit<BaseTool<Input>, "call"> & {
   call: NonNullable<Pick<BaseTool<Input>, "call">["call"]>;
 };
 
diff --git a/packages/core/tests/type.test.ts b/packages/core/tests/type.test.ts
new file mode 100644
index 0000000000000000000000000000000000000000..3612f0c00d7cd8ed6ff693024cf8c18d934ef582
--- /dev/null
+++ b/packages/core/tests/type.test.ts
@@ -0,0 +1,56 @@
+import type { ChatMessage, MessageContent, MessageType } from "llamaindex";
+import { expectTypeOf, test } from "vitest";
+import type { ChatResponse } from "../src/index.js";
+
+test("chat message type", () => {
+  // if generic is not provided, `options` is not required
+  expectTypeOf<ChatMessage>().toMatchTypeOf<{
+    content: MessageContent;
+    role: MessageType;
+  }>();
+  expectTypeOf<ChatMessage>().toMatchTypeOf<{
+    content: MessageContent;
+    role: MessageType;
+    options?: object;
+  }>();
+  expectTypeOf<ChatMessage>().not.toMatchTypeOf<{
+    content: MessageContent;
+    role: MessageType;
+    options: Record<string, unknown>;
+  }>();
+  type Options = {
+    a: string;
+    b: number;
+  };
+  expectTypeOf<ChatMessage<Options>>().toMatchTypeOf<{
+    content: MessageContent;
+    role: MessageType;
+    options?: Options;
+  }>();
+});
+
+test("chat response type", () => {
+  // if generic is not provided, `options` is not required
+  expectTypeOf<ChatResponse>().toMatchTypeOf<{
+    message: ChatMessage;
+    raw: object | null;
+  }>();
+  expectTypeOf<ChatResponse>().toMatchTypeOf<{
+    message: ChatMessage;
+    raw: object | null;
+    options?: Record<string, unknown>;
+  }>();
+  expectTypeOf<ChatResponse>().not.toMatchTypeOf<{
+    message: ChatMessage;
+    raw: object | null;
+    options: Record<string, unknown>;
+  }>();
+  type Options = {
+    a: string;
+    b: number;
+  };
+  expectTypeOf<ChatResponse<Options>>().toMatchTypeOf<{
+    message: ChatMessage<Options>;
+    raw: object | null;
+  }>();
+});