diff --git a/.changeset/pink-dolls-destroy.md b/.changeset/pink-dolls-destroy.md
new file mode 100644
index 0000000000000000000000000000000000000000..33f1e17d8e04f7f55065038d30e400eae9f464eb
--- /dev/null
+++ b/.changeset/pink-dolls-destroy.md
@@ -0,0 +1,6 @@
+---
+"llamaindex": patch
+"@llamaindex/vllm": patch
+---
+
+feat: vllm support
diff --git a/examples/vllm.ts b/examples/vllm.ts
new file mode 100644
index 0000000000000000000000000000000000000000..f65cd375ea7adbd6710be3ad868def9e60a0c258
--- /dev/null
+++ b/examples/vllm.ts
@@ -0,0 +1,16 @@
+import { VLLM } from "llamaindex";
+
+const llm = new VLLM({
+  model: "NousResearch/Meta-Llama-3-8B-Instruct",
+});
+
+const response = await llm.chat({
+  messages: [
+    {
+      role: "user",
+      content: "Hello?",
+    },
+  ],
+});
+
+console.log(response.message.content);
diff --git a/packages/llamaindex/package.json b/packages/llamaindex/package.json
index 124edc354fc087863363233e083ec1207e09a0be..d65a4474a5e07fbba2d33d872a8f6f324d191956 100644
--- a/packages/llamaindex/package.json
+++ b/packages/llamaindex/package.json
@@ -44,6 +44,7 @@
     "@llamaindex/portkey-ai": "workspace:*",
     "@llamaindex/readers": "workspace:*",
     "@llamaindex/replicate": "workspace:*",
+    "@llamaindex/vllm": "workspace:*",
     "@mistralai/mistralai": "^1.0.4",
     "@mixedbread-ai/sdk": "^2.2.11",
     "@pinecone-database/pinecone": "^3.0.2",
@@ -85,10 +86,10 @@
     }
   },
   "devDependencies": {
+    "@huggingface/transformers": "^3.0.2",
     "@swc/cli": "^0.5.0",
     "@swc/core": "^1.7.22",
     "@vercel/postgres": "^0.10.0",
-    "@huggingface/transformers": "^3.0.2",
     "concurrently": "^9.1.0",
     "glob": "^11.0.0",
     "pg": "^8.12.0",
diff --git a/packages/llamaindex/src/llm/index.ts b/packages/llamaindex/src/llm/index.ts
index 5596440a0ce24099adeb6d692f4299e67c5855e8..83e4b7f08a0a186c3a192df1cb02959229dd0e29 100644
--- a/packages/llamaindex/src/llm/index.ts
+++ b/packages/llamaindex/src/llm/index.ts
@@ -1,3 +1,4 @@
+export { VLLM, type VLLMParams } from "@llamaindex/vllm";
 export {
   ALL_AVAILABLE_ANTHROPIC_LEGACY_MODELS,
   ALL_AVAILABLE_ANTHROPIC_MODELS,
@@ -6,7 +7,6 @@ export {
 } from "./anthropic.js";
 export { FireworksLLM } from "./fireworks.js";
 export { Gemini, GeminiSession } from "./gemini/base.js";
-
 export {
   GEMINI_MODEL,
   type GoogleGeminiSessionOptions,
diff --git a/packages/providers/vllm/package.json b/packages/providers/vllm/package.json
new file mode 100644
index 0000000000000000000000000000000000000000..b72a2001dc1f347a41b9d5251f0b8c6bb01539d2
--- /dev/null
+++ b/packages/providers/vllm/package.json
@@ -0,0 +1,38 @@
+{
+  "name": "@llamaindex/vllm",
+  "description": "vLLM Adapter for LlamaIndex",
+  "version": "0.0.1",
+  "type": "module",
+  "main": "./dist/index.cjs",
+  "module": "./dist/index.js",
+  "exports": {
+    ".": {
+      "require": {
+        "types": "./dist/index.d.cts",
+        "default": "./dist/index.cjs"
+      },
+      "import": {
+        "types": "./dist/index.d.ts",
+        "default": "./dist/index.js"
+      }
+    }
+  },
+  "files": [
+    "dist"
+  ],
+  "repository": {
+    "type": "git",
+    "url": "https://github.com/run-llama/LlamaIndexTS.git",
+    "directory": "packages/providers/vllm"
+  },
+  "scripts": {
+    "build": "bunchee",
+    "dev": "bunchee --watch"
+  },
+  "devDependencies": {
+    "bunchee": "5.6.1"
+  },
+  "dependencies": {
+    "@llamaindex/openai": "workspace:*"
+  }
+}
diff --git a/packages/providers/vllm/src/index.ts b/packages/providers/vllm/src/index.ts
new file mode 100644
index 0000000000000000000000000000000000000000..2c5bc5792a867319085a279f3db206a7650926d7
--- /dev/null
+++ b/packages/providers/vllm/src/index.ts
@@ -0,0 +1 @@
+export { VLLM, type VLLMParams } from "./llm";
diff --git a/packages/providers/vllm/src/llm.ts b/packages/providers/vllm/src/llm.ts
new file mode 100644
index 0000000000000000000000000000000000000000..28b56afc04fdb412d5bdb5b76248406d3f6e25dd
--- /dev/null
+++ b/packages/providers/vllm/src/llm.ts
@@ -0,0 +1,25 @@
+/**
+ * vLLM
+ *
+ * https://docs.vllm.ai/en/latest/index.html
+ *
+ * @module
+ */
+import { OpenAI } from "@llamaindex/openai";
+
+export type VLLMParams = {
+  model: string;
+  baseURL?: string;
+};
+
+export class VLLM extends OpenAI {
+  constructor(params: VLLMParams) {
+    super({
+      additionalSessionOptions: {
+        baseURL: "http://localhost:8000/v1",
+      },
+      model: params.model,
+      apiKey: "token-abc123",
+    });
+  }
+}
diff --git a/packages/providers/vllm/tsconfig.json b/packages/providers/vllm/tsconfig.json
new file mode 100644
index 0000000000000000000000000000000000000000..7af4eb3149e08316df0584aa46d7e09cd1a94e9c
--- /dev/null
+++ b/packages/providers/vllm/tsconfig.json
@@ -0,0 +1,16 @@
+{
+  "extends": "../../../tsconfig.json",
+  "compilerOptions": {
+    "target": "ESNext",
+    "module": "ESNext",
+    "moduleResolution": "bundler",
+    "outDir": "./lib",
+    "tsBuildInfoFile": "./lib/.tsbuildinfo"
+  },
+  "include": ["./src", "package.json"],
+  "references": [
+    {
+      "path": "../openai/tsconfig.json"
+    }
+  ]
+}
diff --git a/pnpm-lock.yaml b/pnpm-lock.yaml
index 3d650bd2c1feb5a75de7927e5f118b709fa8c343..c1e41071ebf4e6be0708bdb2ff14b38b4eaef364 100644
--- a/pnpm-lock.yaml
+++ b/pnpm-lock.yaml
@@ -1026,6 +1026,9 @@ importers:
       '@llamaindex/replicate':
         specifier: workspace:*
         version: link:../providers/replicate
+      '@llamaindex/vllm':
+        specifier: workspace:*
+        version: link:../providers/vllm
       '@mistralai/mistralai':
         specifier: ^1.0.4
         version: 1.1.0(zod@3.23.8)
@@ -1350,6 +1353,16 @@ importers:
         specifier: 5.6.1
         version: 5.6.1(typescript@5.6.3)
 
+  packages/providers/vllm:
+    dependencies:
+      '@llamaindex/openai':
+        specifier: workspace:*
+        version: link:../openai
+    devDependencies:
+      bunchee:
+        specifier: 5.6.1
+        version: 5.6.1(typescript@5.6.3)
+
   packages/readers:
     dependencies:
       '@azure/cosmos':
diff --git a/tsconfig.json b/tsconfig.json
index 0d73a1e452e82f08918868dfab63e91298a92f94..3be6d6b8d5047b4b78fbb1248cb04d453bfff2bc 100644
--- a/tsconfig.json
+++ b/tsconfig.json
@@ -53,6 +53,9 @@
     {
       "path": "./packages/providers/ollama/tsconfig.json"
     },
+    {
+      "path": "./packages/providers/vllm/tsconfig.json"
+    },
     {
       "path": "./packages/cloud/tsconfig.json"
     },