diff --git a/.changeset/clever-monkeys-switch.md b/.changeset/clever-monkeys-switch.md
new file mode 100644
index 0000000000000000000000000000000000000000..3fa003bbef7ce22db3b59cefca9827d59f7f6cda
--- /dev/null
+++ b/.changeset/clever-monkeys-switch.md
@@ -0,0 +1,7 @@
+---
+"@llamaindex/vercel": patch
+"@llamaindex/doc": patch
+"@llamaindex/examples": patch
+---
+
+Add vercel tool adapter to use query engine tool
diff --git a/apps/next/src/content/docs/llamaindex/integration/meta.json b/apps/next/src/content/docs/llamaindex/integration/meta.json
index 2d671d4fa2e4ebc05331538a9c09aba6254171d8..606fc4d320ca13b989e867c6e5ef673469178cc5 100644
--- a/apps/next/src/content/docs/llamaindex/integration/meta.json
+++ b/apps/next/src/content/docs/llamaindex/integration/meta.json
@@ -1,5 +1,5 @@
 {
   "title": "Integration",
   "description": "See our integrations",
-  "pages": ["open-llm-metry", "lang-trace"]
+  "pages": ["open-llm-metry", "lang-trace", "vercel"]
 }
diff --git a/apps/next/src/content/docs/llamaindex/integration/vercel.mdx b/apps/next/src/content/docs/llamaindex/integration/vercel.mdx
new file mode 100644
index 0000000000000000000000000000000000000000..1824677a5042aed02ca7a8a0ed685ccc41636d3d
--- /dev/null
+++ b/apps/next/src/content/docs/llamaindex/integration/vercel.mdx
@@ -0,0 +1,80 @@
+---
+title: Vercel
+description: Integrate LlamaIndex with Vercel's AI SDK
+---
+
+LlamaIndex provides integration with Vercel's AI SDK, allowing you to create powerful search and retrieval applications. Below are examples of how to use LlamaIndex with `streamText` from the Vercel AI SDK.
+
+## Setup
+
+First, install the required dependencies:
+
+```bash
+npm install @llamaindex/vercel ai
+```
+
+## Using Local Vector Store
+
+Here's how to create a simple vector store index and query it using Vercel's AI SDK:
+
+```typescript
+import { openai } from "@ai-sdk/openai";
+import { llamaindex } from "@llamaindex/vercel";
+import { streamText } from "ai";
+import { Document, VectorStoreIndex } from "llamaindex";
+
+// Create an index from your documents
+const document = new Document({ text: yourText, id_: "unique-id" });
+const index = await VectorStoreIndex.fromDocuments([document]);
+
+// Create a query tool
+const queryTool = llamaindex({
+  index,
+  description: "Search through the documents", // optional
+});
+
+// Use the tool with Vercel's AI SDK
+streamText({
+  tools: { queryTool },
+  prompt: "Your question here",
+  model: openai("gpt-4"),
+  onFinish({ response }) {
+    console.log("Response:", response.messages); // log the response
+  },
+}).toDataStream();
+```
+
+## Using LlamaCloud
+
+For production deployments, you can use LlamaCloud to store and manage your documents:
+
+```typescript
+import { LlamaCloudIndex } from "llamaindex";
+
+// Create a LlamaCloud index
+const index = await LlamaCloudIndex.fromDocuments({
+  documents: [document],
+  name: "your-index-name",
+  projectName: "your-project",
+  apiKey: process.env.LLAMA_CLOUD_API_KEY,
+});
+
+// Use it the same way as VectorStoreIndex
+const queryTool = llamaindex({
+  index,
+  description: "Search through the documents",
+});
+
+// Use the tool with Vercel's AI SDK
+streamText({
+  tools: { queryTool },
+  prompt: "Your question here",
+  model: openai("gpt-4"),
+}).toDataStream();
+```
+
+## Next Steps
+
+1. Explore [LlamaCloud](https://cloud.llamaindex.ai/) for managed document storage and retrieval
+2. Join our [Discord community](https://discord.gg/llamaindex) for support and discussions
+
diff --git a/examples/package.json b/examples/package.json
index 31c1ba06978653eccd2774c6cb7a474b8417753d..6b11abdc7107bc933618b989d9dff9105e36d944 100644
--- a/examples/package.json
+++ b/examples/package.json
@@ -3,6 +3,7 @@
   "private": true,
   "version": "0.0.16",
   "dependencies": {
+    "@ai-sdk/openai": "^1.0.5",
     "@aws-crypto/sha256-js": "^5.2.0",
     "@azure/cosmos": "^4.1.1",
     "@azure/identity": "^4.4.1",
@@ -10,10 +11,12 @@
     "@llamaindex/core": "^0.4.10",
     "@llamaindex/readers": "^1.0.11",
     "@llamaindex/workflow": "^0.0.6",
+    "@llamaindex/vercel": "^0.0.1",
     "@notionhq/client": "^2.2.15",
     "@pinecone-database/pinecone": "^4.0.0",
     "@vercel/postgres": "^0.10.0",
     "@zilliz/milvus2-sdk-node": "^2.4.6",
+    "ai": "^4.0.0",
     "chromadb": "^1.8.1",
     "commander": "^12.1.0",
     "dotenv": "^16.4.5",
diff --git a/examples/vercel/README.md b/examples/vercel/README.md
new file mode 100644
index 0000000000000000000000000000000000000000..edb1f11f24d528b93af04d71e56db71b7f624725
--- /dev/null
+++ b/examples/vercel/README.md
@@ -0,0 +1,50 @@
+# Vercel Examples
+
+These examples demonstrate how to integrate LlamaIndexTS with Vercel's AI SDK. The examples show how to use LlamaIndex for search and retrieval in both local vector store and LlamaCloud environments.
+
+## Setup
+
+To run these examples, first install the required dependencies from the parent folder `examples`:
+
+```bash
+npm i
+```
+
+## Running the Examples
+
+Make sure to run the examples from the parent folder called `examples`. The following examples are available:
+
+### Vector Store Example
+
+Run the local vector store example with:
+
+```bash
+npx tsx vercel/vector-store.ts
+```
+
+This example demonstrates:
+
+- Creating a vector store index from one document
+- Using Vercel's AI SDK with LlamaIndex for streaming responses
+
+### LlamaCloud Example
+
+To run the LlamaCloud example:
+
+```bash
+npx tsx vercel/llamacloud.ts
+```
+
+This example requires a LlamaCloud API key set in your environment and an embedding model set in the `EMBEDDING_MODEL` environment variable:
+
+```bash
+export LLAMA_CLOUD_API_KEY=your_api_key_here
+export EMBEDDING_MODEL="text-embedding-3-small"
+```
+
+The example demonstrates:
+
+- Creating a LlamaCloud index from one document
+- Streaming responses using Vercel's AI SDK
+
+For more detailed information about the Vercel integration, check out [the documentation](https://ts.llamaindex.ai/docs/llamaindex/integration/vercel).
diff --git a/examples/vercel/llamacloud.ts b/examples/vercel/llamacloud.ts
new file mode 100644
index 0000000000000000000000000000000000000000..2fac09863d703bb69425d6f13b2178b59acd91b1
--- /dev/null
+++ b/examples/vercel/llamacloud.ts
@@ -0,0 +1,38 @@
+import { openai } from "@ai-sdk/openai";
+import { llamaindex } from "@llamaindex/vercel";
+import { streamText } from "ai";
+import { Document, LlamaCloudIndex } from "llamaindex";
+import fs from "node:fs/promises";
+
+async function main() {
+  const path = "node_modules/llamaindex/examples/abramov.txt";
+  const essay = await fs.readFile(path, "utf-8");
+  const document = new Document({ text: essay, id_: path });
+
+  const index = await LlamaCloudIndex.fromDocuments({
+    documents: [document],
+    name: "test-pipeline",
+    projectName: "Default",
+    apiKey: process.env.LLAMA_CLOUD_API_KEY,
+  });
+  console.log("Successfully created index");
+
+  const result = streamText({
+    model: openai("gpt-4o"),
+    prompt: "Cost of moving cat from Russia to UK?",
+    tools: {
+      queryTool: llamaindex({
+        index,
+        description:
+          "get information from your knowledge base to answer questions.", // optional description
+      }),
+    },
+    maxSteps: 5,
+  });
+
+  for await (const textPart of result.textStream) {
+    process.stdout.write(textPart);
+  }
+}
+
+main().catch(console.error);
diff --git a/examples/vercel/vector-store.ts b/examples/vercel/vector-store.ts
new file mode 100644
index 0000000000000000000000000000000000000000..c61291ba385c445d224260ed4a03322c3eb5ca3f
--- /dev/null
+++ b/examples/vercel/vector-store.ts
@@ -0,0 +1,34 @@
+import { openai } from "@ai-sdk/openai";
+import { llamaindex } from "@llamaindex/vercel";
+import { streamText } from "ai";
+import { Document, VectorStoreIndex } from "llamaindex";
+
+import fs from "node:fs/promises";
+
+async function main() {
+  const path = "node_modules/llamaindex/examples/abramov.txt";
+  const essay = await fs.readFile(path, "utf-8");
+  const document = new Document({ text: essay, id_: path });
+
+  const index = await VectorStoreIndex.fromDocuments([document]);
+  console.log("Successfully created index");
+
+  const result = streamText({
+    model: openai("gpt-4o"),
+    prompt: "Cost of moving cat from Russia to UK?",
+    tools: {
+      queryTool: llamaindex({
+        index,
+        description:
+          "get information from your knowledge base to answer questions.", // optional description
+      }),
+    },
+    maxSteps: 5,
+  });
+
+  for await (const textPart of result.textStream) {
+    process.stdout.write(textPart);
+  }
+}
+
+main().catch(console.error);
diff --git a/packages/providers/vercel/package.json b/packages/providers/vercel/package.json
new file mode 100644
index 0000000000000000000000000000000000000000..484b16b632f6a70e9d7d1bbafc7af059e8e63e9a
--- /dev/null
+++ b/packages/providers/vercel/package.json
@@ -0,0 +1,50 @@
+{
+  "name": "@llamaindex/vercel",
+  "description": "Vercel Adapter for LlamaIndex",
+  "version": "0.0.1",
+  "type": "module",
+  "main": "./dist/index.cjs",
+  "module": "./dist/index.js",
+  "exports": {
+    ".": {
+      "edge-light": {
+        "types": "./dist/index.edge-light.d.ts",
+        "default": "./dist/index.edge-light.js"
+      },
+      "workerd": {
+        "types": "./dist/index.edge-light.d.ts",
+        "default": "./dist/index.edge-light.js"
+      },
+      "require": {
+        "types": "./dist/index.d.cts",
+        "default": "./dist/index.cjs"
+      },
+      "import": {
+        "types": "./dist/index.d.ts",
+        "default": "./dist/index.js"
+      }
+    }
+  },
+  "files": [
+    "dist"
+  ],
+  "repository": {
+    "type": "git",
+    "url": "https://github.com/run-llama/LlamaIndexTS.git",
+    "directory": "packages/providers/vercel"
+  },
+  "scripts": {
+    "build": "bunchee",
+    "dev": "bunchee --watch"
+  },
+  "devDependencies": {
+    "bunchee": "5.6.1"
+  },
+  "dependencies": {
+    "@llamaindex/core": "workspace:*",
+    "zod": "^3.23.8"
+  },
+  "peerDependencies": {
+    "ai": "^4.0.0"
+  }
+}
diff --git a/packages/providers/vercel/src/index.ts b/packages/providers/vercel/src/index.ts
new file mode 100644
index 0000000000000000000000000000000000000000..5484738837c573b9ddb66701752ebf2ea275ac52
--- /dev/null
+++ b/packages/providers/vercel/src/index.ts
@@ -0,0 +1 @@
+export { llamaindex } from "./tool";
diff --git a/packages/providers/vercel/src/tool.ts b/packages/providers/vercel/src/tool.ts
new file mode 100644
index 0000000000000000000000000000000000000000..a0720d94fc229fd1c45326ce18243c5d18e4f52b
--- /dev/null
+++ b/packages/providers/vercel/src/tool.ts
@@ -0,0 +1,29 @@
+import type { BaseQueryEngine } from "@llamaindex/core/query-engine";
+import { type CoreTool, tool } from "ai";
+import { z } from "zod";
+
+interface DatasourceIndex {
+  asQueryEngine: () => BaseQueryEngine;
+}
+
+export function llamaindex({
+  index,
+  description,
+}: {
+  index: DatasourceIndex;
+  description?: string;
+}): CoreTool {
+  const queryEngine = index.asQueryEngine();
+  return tool({
+    description: description ?? "Get information about your documents.",
+    parameters: z.object({
+      query: z
+        .string()
+        .describe("The query to get information about your documents."),
+    }),
+    execute: async ({ query }) => {
+      const result = await queryEngine?.query({ query });
+      return result?.message.content ?? "No result found in documents.";
+    },
+  });
+}
diff --git a/packages/providers/vercel/tsconfig.json b/packages/providers/vercel/tsconfig.json
new file mode 100644
index 0000000000000000000000000000000000000000..3fad47fb71026680ff194da502936fedd967972a
--- /dev/null
+++ b/packages/providers/vercel/tsconfig.json
@@ -0,0 +1,19 @@
+{
+  "extends": "../../../tsconfig.json",
+  "compilerOptions": {
+    "target": "ESNext",
+    "module": "ESNext",
+    "moduleResolution": "bundler",
+    "outDir": "./lib",
+    "tsBuildInfoFile": "./lib/.tsbuildinfo"
+  },
+  "include": ["./src"],
+  "references": [
+    {
+      "path": "../../core/tsconfig.json"
+    },
+    {
+      "path": "../../env/tsconfig.json"
+    }
+  ]
+}
diff --git a/pnpm-lock.yaml b/pnpm-lock.yaml
index f8cf25d7e01e54a7096a914d542d52ca961527e8..0a3cfc25fc28d953df3450147d83112d4732ade3 100644
--- a/pnpm-lock.yaml
+++ b/pnpm-lock.yaml
@@ -587,6 +587,9 @@ importers:
 
   examples:
     dependencies:
+      '@ai-sdk/openai':
+        specifier: ^1.0.5
+        version: 1.0.5(zod@3.23.8)
       '@aws-crypto/sha256-js':
         specifier: ^5.2.0
         version: 5.2.0
@@ -605,6 +608,9 @@ importers:
       '@llamaindex/readers':
         specifier: ^1.0.11
         version: link:../packages/readers
+      '@llamaindex/vercel':
+        specifier: ^0.0.1
+        version: link:../packages/providers/vercel
       '@llamaindex/workflow':
         specifier: ^0.0.6
         version: link:../packages/workflow
@@ -620,6 +626,9 @@ importers:
       '@zilliz/milvus2-sdk-node':
         specifier: ^2.4.6
         version: 2.4.9
+      ai:
+        specifier: ^4.0.0
+        version: 4.0.6(react@19.0.0-rc-bf7e210c-20241017)(zod@3.23.8)
       chromadb:
         specifier: ^1.8.1
         version: 1.9.2(@google/generative-ai@0.21.0)(cohere-ai@7.14.0(@aws-sdk/client-sso-oidc@3.693.0(@aws-sdk/client-sts@3.693.0))(encoding@0.1.13))(encoding@0.1.13)(openai@4.72.0(encoding@0.1.13)(zod@3.23.8))
@@ -1366,6 +1375,22 @@ importers:
         specifier: 5.6.1
         version: 5.6.1(typescript@5.6.3)
 
+  packages/providers/vercel:
+    dependencies:
+      '@llamaindex/core':
+        specifier: workspace:*
+        version: link:../../core
+      ai:
+        specifier: ^4.0.0
+        version: 4.0.6(react@19.0.0-rc-bf7e210c-20241017)(zod@3.23.8)
+      zod:
+        specifier: ^3.23.8
+        version: 3.23.8
+    devDependencies:
+      bunchee:
+        specifier: 5.6.1
+        version: 5.6.1(typescript@5.6.3)
+
   packages/providers/vllm:
     dependencies:
       '@llamaindex/openai':
@@ -1525,6 +1550,12 @@ importers:
 
 packages:
 
+  '@ai-sdk/openai@1.0.5':
+    resolution: {integrity: sha512-JDCPBJQx9o3LgboBPaA55v+9EZ7Vm/ozy0+J5DIr2jJF8WETjeCnigdxixyzEy/Od4wX871jOTSuGffwNIi0kA==}
+    engines: {node: '>=18'}
+    peerDependencies:
+      zod: ^3.0.0
+
   '@ai-sdk/provider-utils@1.0.22':
     resolution: {integrity: sha512-YHK2rpj++wnLVc9vPGzGFP3Pjeld2MwhKinetA0zKXOoHAT/Jit5O8kZsxcSlJPu9wvcGT1UGZEjZrtO7PfFOQ==}
     engines: {node: '>=18'}
@@ -1543,6 +1574,15 @@ packages:
       zod:
         optional: true
 
+  '@ai-sdk/provider-utils@2.0.2':
+    resolution: {integrity: sha512-IAvhKhdlXqiSmvx/D4uNlFYCl8dWT+M9K+IuEcSgnE2Aj27GWu8sDIpAf4r4Voc+wOUkOECVKQhFo8g9pozdjA==}
+    engines: {node: '>=18'}
+    peerDependencies:
+      zod: ^3.0.0
+    peerDependenciesMeta:
+      zod:
+        optional: true
+
   '@ai-sdk/provider@0.0.26':
     resolution: {integrity: sha512-dQkfBDs2lTYpKM8389oopPdQgIU007GQyCbuPPrV+K6MtSII3HBfE0stUIMXUb44L+LK1t6GXPP7wjSzjO6uKg==}
     engines: {node: '>=18'}
@@ -1551,6 +1591,10 @@ packages:
     resolution: {integrity: sha512-Sj29AzooJ7SYvhPd+AAWt/E7j63E9+AzRnoMHUaJPRYzOd/WDrVNxxv85prF9gDcQ7XPVlSk9j6oAZV9/DXYpA==}
     engines: {node: '>=18'}
 
+  '@ai-sdk/provider@1.0.1':
+    resolution: {integrity: sha512-mV+3iNDkzUsZ0pR2jG0sVzU6xtQY5DtSCBy3JFycLp6PwjyLw/iodfL3MwdmMCRJWgs3dadcHejRnMvF9nGTBg==}
+    engines: {node: '>=18'}
+
   '@ai-sdk/react@0.0.70':
     resolution: {integrity: sha512-GnwbtjW4/4z7MleLiW+TOZC2M29eCg1tOUpuEiYFMmFNZK8mkrqM0PFZMo6UsYeUYMWqEOOcPOU9OQVJMJh7IQ==}
     engines: {node: '>=18'}
@@ -1575,6 +1619,18 @@ packages:
       zod:
         optional: true
 
+  '@ai-sdk/react@1.0.3':
+    resolution: {integrity: sha512-Mak7qIRlbgtP4I7EFoNKRIQTlABJHhgwrN8SV2WKKdmsfWK2RwcubQWz1hp88cQ0bpF6KxxjSY1UUnS/S9oR5g==}
+    engines: {node: '>=18'}
+    peerDependencies:
+      react: ^18 || ^19 || ^19.0.0-rc
+      zod: ^3.0.0
+    peerDependenciesMeta:
+      react:
+        optional: true
+      zod:
+        optional: true
+
   '@ai-sdk/solid@0.0.54':
     resolution: {integrity: sha512-96KWTVK+opdFeRubqrgaJXoNiDP89gNxFRWUp0PJOotZW816AbhUf4EnDjBjXTLjXL1n0h8tGSE9sZsRkj9wQQ==}
     engines: {node: '>=18'}
@@ -1611,6 +1667,15 @@ packages:
       zod:
         optional: true
 
+  '@ai-sdk/ui-utils@1.0.2':
+    resolution: {integrity: sha512-hHrUdeThGHu/rsGZBWQ9PjrAU9Htxgbo9MFyR5B/aWoNbBeXn1HLMY1+uMEnXL5pRPlmyVRjgIavWg7UgeNDOw==}
+    engines: {node: '>=18'}
+    peerDependencies:
+      zod: ^3.0.0
+    peerDependenciesMeta:
+      zod:
+        optional: true
+
   '@ai-sdk/vue@0.0.59':
     resolution: {integrity: sha512-+ofYlnqdc8c4F6tM0IKF0+7NagZRAiqBJpGDJ+6EYhDW8FHLUP/JFBgu32SjxSxC6IKFZxEnl68ZoP/Z38EMlw==}
     engines: {node: '>=18'}
@@ -5918,6 +5983,18 @@ packages:
       zod:
         optional: true
 
+  ai@4.0.6:
+    resolution: {integrity: sha512-TD7fH0LymjIYWmdQViB5SoBb1iuuDPOZ7RMU3W9r4SeUf68RzWyixz118QHQTENNqPiGA6vs5NDVAmZOnhzqYA==}
+    engines: {node: '>=18'}
+    peerDependencies:
+      react: ^18 || ^19 || ^19.0.0-rc
+      zod: ^3.0.0
+    peerDependenciesMeta:
+      react:
+        optional: true
+      zod:
+        optional: true
+
   ajv-formats@2.1.1:
     resolution: {integrity: sha512-Wx0Kx52hxE7C18hkMEggYlEifqWZtYaRgouJor+WMdPnQyEK13vgEWyVNup7SoeeoLMsr4kf5h6dOW11I15MUA==}
     peerDependencies:
@@ -13286,6 +13363,12 @@ packages:
 
 snapshots:
 
+  '@ai-sdk/openai@1.0.5(zod@3.23.8)':
+    dependencies:
+      '@ai-sdk/provider': 1.0.1
+      '@ai-sdk/provider-utils': 2.0.2(zod@3.23.8)
+      zod: 3.23.8
+
   '@ai-sdk/provider-utils@1.0.22(zod@3.23.8)':
     dependencies:
       '@ai-sdk/provider': 0.0.26
@@ -13304,6 +13387,15 @@ snapshots:
     optionalDependencies:
       zod: 3.23.8
 
+  '@ai-sdk/provider-utils@2.0.2(zod@3.23.8)':
+    dependencies:
+      '@ai-sdk/provider': 1.0.1
+      eventsource-parser: 3.0.0
+      nanoid: 3.3.7
+      secure-json-parse: 2.7.0
+    optionalDependencies:
+      zod: 3.23.8
+
   '@ai-sdk/provider@0.0.26':
     dependencies:
       json-schema: 0.4.0
@@ -13312,6 +13404,10 @@ snapshots:
     dependencies:
       json-schema: 0.4.0
 
+  '@ai-sdk/provider@1.0.1':
+    dependencies:
+      json-schema: 0.4.0
+
   '@ai-sdk/react@0.0.70(react@18.3.1)(zod@3.23.8)':
     dependencies:
       '@ai-sdk/provider-utils': 1.0.22(zod@3.23.8)
@@ -13332,6 +13428,16 @@ snapshots:
       react: 18.3.1
       zod: 3.23.8
 
+  '@ai-sdk/react@1.0.3(react@19.0.0-rc-bf7e210c-20241017)(zod@3.23.8)':
+    dependencies:
+      '@ai-sdk/provider-utils': 2.0.2(zod@3.23.8)
+      '@ai-sdk/ui-utils': 1.0.2(zod@3.23.8)
+      swr: 2.2.5(react@19.0.0-rc-bf7e210c-20241017)
+      throttleit: 2.1.0
+    optionalDependencies:
+      react: 19.0.0-rc-bf7e210c-20241017
+      zod: 3.23.8
+
   '@ai-sdk/solid@0.0.54(zod@3.23.8)':
     dependencies:
       '@ai-sdk/provider-utils': 1.0.22(zod@3.23.8)
@@ -13367,6 +13473,14 @@ snapshots:
     optionalDependencies:
       zod: 3.23.8
 
+  '@ai-sdk/ui-utils@1.0.2(zod@3.23.8)':
+    dependencies:
+      '@ai-sdk/provider': 1.0.1
+      '@ai-sdk/provider-utils': 2.0.2(zod@3.23.8)
+      zod-to-json-schema: 3.23.5(zod@3.23.8)
+    optionalDependencies:
+      zod: 3.23.8
+
   '@ai-sdk/vue@0.0.59(vue@3.5.12(typescript@5.6.3))(zod@3.23.8)':
     dependencies:
       '@ai-sdk/provider-utils': 1.0.22(zod@3.23.8)
@@ -19107,6 +19221,19 @@ snapshots:
       react: 18.3.1
       zod: 3.23.8
 
+  ai@4.0.6(react@19.0.0-rc-bf7e210c-20241017)(zod@3.23.8):
+    dependencies:
+      '@ai-sdk/provider': 1.0.1
+      '@ai-sdk/provider-utils': 2.0.2(zod@3.23.8)
+      '@ai-sdk/react': 1.0.3(react@19.0.0-rc-bf7e210c-20241017)(zod@3.23.8)
+      '@ai-sdk/ui-utils': 1.0.2(zod@3.23.8)
+      '@opentelemetry/api': 1.9.0
+      jsondiffpatch: 0.6.0
+      zod-to-json-schema: 3.23.5(zod@3.23.8)
+    optionalDependencies:
+      react: 19.0.0-rc-bf7e210c-20241017
+      zod: 3.23.8
+
   ajv-formats@2.1.1(ajv@8.17.1):
     optionalDependencies:
       ajv: 8.17.1
@@ -26905,6 +27032,12 @@ snapshots:
       react: 18.3.1
       use-sync-external-store: 1.2.2(react@18.3.1)
 
+  swr@2.2.5(react@19.0.0-rc-bf7e210c-20241017):
+    dependencies:
+      client-only: 0.0.1
+      react: 19.0.0-rc-bf7e210c-20241017
+      use-sync-external-store: 1.2.2(react@19.0.0-rc-bf7e210c-20241017)
+
   swrev@4.0.0: {}
 
   swrv@1.0.4(vue@3.5.12(typescript@5.6.3)):
@@ -27549,6 +27682,10 @@ snapshots:
     dependencies:
       react: 18.3.1
 
+  use-sync-external-store@1.2.2(react@19.0.0-rc-bf7e210c-20241017):
+    dependencies:
+      react: 19.0.0-rc-bf7e210c-20241017
+
   util-deprecate@1.0.2: {}
 
   utila@0.4.0: {}
diff --git a/tsconfig.json b/tsconfig.json
index f8ea549d3ce48e876b6a3eafae1cbdbb4267cd7c..ee6c508cabf104ce4e9eb5e026faeb83875d3fd8 100644
--- a/tsconfig.json
+++ b/tsconfig.json
@@ -56,6 +56,9 @@
     {
       "path": "./packages/providers/vllm/tsconfig.json"
     },
+    {
+      "path": "./packages/providers/vercel/tsconfig.json"
+    },
     {
       "path": "./packages/cloud/tsconfig.json"
     },