diff --git a/packages/create-llama/templates/index.ts b/packages/create-llama/templates/index.ts
index 5bf8a7d3b2e1fbea897c702c32db32ac2fad7737..6f338b635f4b69588cfa630c33004568cf5282f8 100644
--- a/packages/create-llama/templates/index.ts
+++ b/packages/create-llama/templates/index.ts
@@ -72,7 +72,7 @@ export const installTemplate = async ({
     const routeFile = path.join(
       root,
       relativeEngineDestPath,
-      framework === "nextjs" ? "route.ts" : "llm.controller.ts",
+      framework === "nextjs" ? "route.ts" : "chat.controller.ts",
     );
     const routeFileContent = await fs.readFile(routeFile, "utf8");
     const newContent = routeFileContent.replace(
diff --git a/packages/create-llama/templates/simple/express/README-template.md b/packages/create-llama/templates/simple/express/README-template.md
index 2b4d1153161e4a68fbb7bcd1e7c2f3d79b77a808..2da2865d5df6d375677892968d9195f3bdaaa7f2 100644
--- a/packages/create-llama/templates/simple/express/README-template.md
+++ b/packages/create-llama/templates/simple/express/README-template.md
@@ -14,18 +14,15 @@ Second, run the development server:
 npm run dev
 ```
 
-Then call the express API endpoint `/api/llm` to see the result:
+Then call the express API endpoint `/api/chat` to see the result:
 
 ```
-curl --location 'localhost:3000/api/llm' \
+curl --location 'localhost:3000/api/chat' \
 --header 'Content-Type: application/json' \
---data '{
-    "message": "Hello",
-    "chatHistory": []
-}'
+--data '{ "messages": [{ "role": "user", "content": "Hello" }] }'
 ```
 
-You can start editing the API by modifying `src/controllers/llm.controller.ts`. The endpoint auto-updates as you save the file.
+You can start editing the API by modifying `src/controllers/chat.controller.ts`. The endpoint auto-updates as you save the file.
 
 ## Learn More
 
diff --git a/packages/create-llama/templates/simple/express/index.ts b/packages/create-llama/templates/simple/express/index.ts
index 157555572d8ea79b314bf6e9215750d425fedbce..b73784295150b719c9bf3e42a2bb36ceddfc0622 100644
--- a/packages/create-llama/templates/simple/express/index.ts
+++ b/packages/create-llama/templates/simple/express/index.ts
@@ -1,5 +1,5 @@
 import express, { Express, Request, Response } from "express";
-import llmRouter from "./src/routes/llm.route";
+import chatRouter from "./src/routes/chat.route";
 
 const app: Express = express();
 const port = 3000;
@@ -10,7 +10,7 @@ app.get("/", (req: Request, res: Response) => {
   res.send("LlamaIndex Express Server");
 });
 
-app.use("/api/llm", llmRouter);
+app.use("/api/chat", chatRouter);
 
 app.listen(port, () => {
   console.log(`⚡️[server]: Server is running at http://localhost:${port}`);
diff --git a/packages/create-llama/templates/simple/express/package.json b/packages/create-llama/templates/simple/express/package.json
index 3b57d610eddedb61121642a7681797b37490f103..1e20573d87f21dce3667575c97ab550063126103 100644
--- a/packages/create-llama/templates/simple/express/package.json
+++ b/packages/create-llama/templates/simple/express/package.json
@@ -1,11 +1,12 @@
 {
   "name": "llama-index-express",
   "version": "1.0.0",
-  "main": "index.js",
+  "main": "dist/index.js",
+  "type": "module",
   "scripts": {
-    "build": "tsc",
+    "build": "tsup index.ts --format esm --dts",
     "start": "node dist/index.js",
-    "dev": "concurrently \"tsc --watch\" \"nodemon -q dist/index.js\""
+    "dev": "concurrently \"tsup index.ts --format esm --dts --watch\" \"nodemon -q dist/index.js\""
   },
   "dependencies": {
     "express": "^4",
@@ -15,8 +16,9 @@
     "@types/express": "^4",
     "@types/node": "^20",
     "concurrently": "^8",
+    "eslint": "^8",
     "nodemon": "^3",
-    "typescript": "^5",
-    "eslint": "^8"
+    "tsup": "^7",
+    "typescript": "^5"
   }
 }
\ No newline at end of file
diff --git a/packages/create-llama/templates/simple/express/src/controllers/llm.controller.ts b/packages/create-llama/templates/simple/express/src/controllers/chat.controller.ts
similarity index 67%
rename from packages/create-llama/templates/simple/express/src/controllers/llm.controller.ts
rename to packages/create-llama/templates/simple/express/src/controllers/chat.controller.ts
index 7647b649baa7c24a72f80823779b9f7f6a9f8542..b11e1050299fb5359306b5973e4eeb310083287a 100644
--- a/packages/create-llama/templates/simple/express/src/controllers/llm.controller.ts
+++ b/packages/create-llama/templates/simple/express/src/controllers/chat.controller.ts
@@ -4,16 +4,12 @@ import { createChatEngine } from "../../../../engines/context";
 
 export const chat = async (req: Request, res: Response, next: NextFunction) => {
   try {
-    const {
-      message,
-      chatHistory,
-    }: {
-      message: string;
-      chatHistory: ChatMessage[];
-    } = req.body;
-    if (!message || !chatHistory) {
+    const { messages }: { messages: ChatMessage[] } = req.body;
+    const lastMessage = messages.pop();
+    if (!messages || !lastMessage || lastMessage.role !== "user") {
       return res.status(400).json({
-        error: "message, chatHistory are required in the request body",
+        error:
+          "messages are required in the request body and the last message must be from the user",
       });
     }
 
@@ -23,7 +19,7 @@ export const chat = async (req: Request, res: Response, next: NextFunction) => {
 
     const chatEngine = await createChatEngine(llm);
 
-    const response = await chatEngine.chat(message, chatHistory);
+    const response = await chatEngine.chat(lastMessage.content, messages);
     const result: ChatMessage = {
       role: "assistant",
       content: response.response,
diff --git a/packages/create-llama/templates/simple/express/src/routes/llm.route.ts b/packages/create-llama/templates/simple/express/src/routes/chat.route.ts
similarity index 70%
rename from packages/create-llama/templates/simple/express/src/routes/llm.route.ts
rename to packages/create-llama/templates/simple/express/src/routes/chat.route.ts
index 3711c71b95997be6cd201b3059224d8807983fa0..bdfeb08534b9a2c987c4d23a4bb5c6df50075908 100644
--- a/packages/create-llama/templates/simple/express/src/routes/llm.route.ts
+++ b/packages/create-llama/templates/simple/express/src/routes/chat.route.ts
@@ -1,5 +1,5 @@
 import express from "express";
-import { chat } from "../controllers/llm.controller";
+import { chat } from "../controllers/chat.controller";
 
 const llmRouter = express.Router();
 
diff --git a/packages/create-llama/templates/simple/express/tsconfig.json b/packages/create-llama/templates/simple/express/tsconfig.json
index fd70902d6d230b81198dfd1a21bb7c7288b033df..57c4a62815ab2017702f080ca64871369eb5caaf 100644
--- a/packages/create-llama/templates/simple/express/tsconfig.json
+++ b/packages/create-llama/templates/simple/express/tsconfig.json
@@ -1,8 +1,6 @@
 {
   "compilerOptions": {
     "target": "es2016",
-    "module": "commonjs",
-    "outDir": "./dist",
     "esModuleInterop": true,
     "forceConsistentCasingInFileNames": true,
     "strict": true,
diff --git a/packages/create-llama/templates/streaming/express/README-template.md b/packages/create-llama/templates/streaming/express/README-template.md
new file mode 100644
index 0000000000000000000000000000000000000000..2da2865d5df6d375677892968d9195f3bdaaa7f2
--- /dev/null
+++ b/packages/create-llama/templates/streaming/express/README-template.md
@@ -0,0 +1,34 @@
+This is a [LlamaIndex](https://www.llamaindex.ai/) project using [Express](https://expressjs.com/) bootstrapped with [`create-llama`](https://github.com/run-llama/LlamaIndexTS/tree/main/packages/create-llama).
+
+## Getting Started
+
+First, install the dependencies:
+
+```
+npm install
+```
+
+Second, run the development server:
+
+```
+npm run dev
+```
+
+Then call the express API endpoint `/api/chat` to see the result:
+
+```
+curl --location 'localhost:3000/api/chat' \
+--header 'Content-Type: application/json' \
+--data '{ "messages": [{ "role": "user", "content": "Hello" }] }'
+```
+
+You can start editing the API by modifying `src/controllers/chat.controller.ts`. The endpoint auto-updates as you save the file.
+
+## Learn More
+
+To learn more about LlamaIndex, take a look at the following resources:
+
+- [LlamaIndex Documentation](https://docs.llamaindex.ai) - learn about LlamaIndex (Python features).
+- [LlamaIndexTS Documentation](https://ts.llamaindex.ai) - learn about LlamaIndex (Typescript features).
+
+You can check out [the LlamaIndexTS GitHub repository](https://github.com/run-llama/LlamaIndexTS) - your feedback and contributions are welcome!
diff --git a/packages/create-llama/templates/streaming/express/eslintrc.json b/packages/create-llama/templates/streaming/express/eslintrc.json
new file mode 100644
index 0000000000000000000000000000000000000000..c19581799d886ce5be17def8ad268c2f4b175109
--- /dev/null
+++ b/packages/create-llama/templates/streaming/express/eslintrc.json
@@ -0,0 +1,3 @@
+{
+  "extends": "eslint:recommended"
+}
\ No newline at end of file
diff --git a/packages/create-llama/templates/streaming/express/index.ts b/packages/create-llama/templates/streaming/express/index.ts
new file mode 100644
index 0000000000000000000000000000000000000000..b73784295150b719c9bf3e42a2bb36ceddfc0622
--- /dev/null
+++ b/packages/create-llama/templates/streaming/express/index.ts
@@ -0,0 +1,17 @@
+import express, { Express, Request, Response } from "express";
+import chatRouter from "./src/routes/chat.route";
+
+const app: Express = express();
+const port = 3000;
+
+app.use(express.json());
+
+app.get("/", (req: Request, res: Response) => {
+  res.send("LlamaIndex Express Server");
+});
+
+app.use("/api/chat", chatRouter);
+
+app.listen(port, () => {
+  console.log(`⚡️[server]: Server is running at http://localhost:${port}`);
+});
diff --git a/packages/create-llama/templates/streaming/express/package.json b/packages/create-llama/templates/streaming/express/package.json
new file mode 100644
index 0000000000000000000000000000000000000000..ac90ee8a6fce4773d79b27cc27fdbcc8b75b9143
--- /dev/null
+++ b/packages/create-llama/templates/streaming/express/package.json
@@ -0,0 +1,25 @@
+{
+  "name": "llama-index-express-streaming",
+  "version": "1.0.0",
+  "main": "dist/index.js",
+  "type": "module",
+  "scripts": {
+    "build": "tsup index.ts --format esm --dts",
+    "start": "node dist/index.js",
+    "dev": "concurrently \"tsup index.ts --format esm --dts --watch\" \"nodemon -q dist/index.js\""
+  },
+  "dependencies": {
+    "ai": "^2",
+    "express": "^4",
+    "llamaindex": "0.0.31"
+  },
+  "devDependencies": {
+    "@types/express": "^4",
+    "@types/node": "^20",
+    "concurrently": "^8",
+    "eslint": "^8",
+    "nodemon": "^3",
+    "tsup": "^7",
+    "typescript": "^5"
+  }
+}
\ No newline at end of file
diff --git a/packages/create-llama/templates/streaming/express/src/controllers/chat.controller.ts b/packages/create-llama/templates/streaming/express/src/controllers/chat.controller.ts
new file mode 100644
index 0000000000000000000000000000000000000000..58f96b035f2a974b1caea8ccfc4429dbd5192ce4
--- /dev/null
+++ b/packages/create-llama/templates/streaming/express/src/controllers/chat.controller.ts
@@ -0,0 +1,36 @@
+import { streamToResponse } from "ai";
+import { NextFunction, Request, Response } from "express";
+import { ChatMessage, OpenAI } from "llamaindex";
+import { createChatEngine } from "../../../../engines/context";
+import { LlamaIndexStream } from "./llamaindex-stream";
+
+export const chat = async (req: Request, res: Response, next: NextFunction) => {
+  try {
+    const { messages }: { messages: ChatMessage[] } = req.body;
+    const lastMessage = messages.pop();
+    if (!messages || !lastMessage || lastMessage.role !== "user") {
+      return res.status(400).json({
+        error:
+          "messages are required in the request body and the last message must be from the user",
+      });
+    }
+
+    const llm = new OpenAI({
+      model: "gpt-3.5-turbo",
+    });
+
+    const chatEngine = await createChatEngine(llm);
+
+    const response = await chatEngine.chat(lastMessage.content, messages, true);
+
+    // Transform the response into a readable stream
+    const stream = LlamaIndexStream(response);
+
+    streamToResponse(stream, res);
+  } catch (error) {
+    console.error("[LlamaIndex]", error);
+    return res.status(500).json({
+      error: (error as Error).message,
+    });
+  }
+};
diff --git a/packages/create-llama/templates/streaming/express/src/controllers/llamaindex-stream.ts b/packages/create-llama/templates/streaming/express/src/controllers/llamaindex-stream.ts
new file mode 100644
index 0000000000000000000000000000000000000000..12328de875d8a59a5501db7fbf2aa0d763a3dbaa
--- /dev/null
+++ b/packages/create-llama/templates/streaming/express/src/controllers/llamaindex-stream.ts
@@ -0,0 +1,35 @@
+import {
+  createCallbacksTransformer,
+  createStreamDataTransformer,
+  trimStartOfStreamHelper,
+  type AIStreamCallbacksAndOptions,
+} from "ai";
+
+function createParser(res: AsyncGenerator<any>) {
+  const trimStartOfStream = trimStartOfStreamHelper();
+  return new ReadableStream<string>({
+    async pull(controller): Promise<void> {
+      const { value, done } = await res.next();
+      if (done) {
+        controller.close();
+        return;
+      }
+
+      const text = trimStartOfStream(value ?? "");
+      if (text) {
+        controller.enqueue(text);
+      }
+    },
+  });
+}
+
+export function LlamaIndexStream(
+  res: AsyncGenerator<any>,
+  callbacks?: AIStreamCallbacksAndOptions,
+): ReadableStream {
+  return createParser(res)
+    .pipeThrough(createCallbacksTransformer(callbacks))
+    .pipeThrough(
+      createStreamDataTransformer(callbacks?.experimental_streamData),
+    );
+}
diff --git a/packages/create-llama/templates/streaming/express/src/routes/chat.route.ts b/packages/create-llama/templates/streaming/express/src/routes/chat.route.ts
new file mode 100644
index 0000000000000000000000000000000000000000..bdfeb08534b9a2c987c4d23a4bb5c6df50075908
--- /dev/null
+++ b/packages/create-llama/templates/streaming/express/src/routes/chat.route.ts
@@ -0,0 +1,8 @@
+import express from "express";
+import { chat } from "../controllers/chat.controller";
+
+const llmRouter = express.Router();
+
+llmRouter.route("/").post(chat);
+
+export default llmRouter;
diff --git a/packages/create-llama/templates/streaming/express/tsconfig.json b/packages/create-llama/templates/streaming/express/tsconfig.json
new file mode 100644
index 0000000000000000000000000000000000000000..bf2e3ec6eee097c359767b81c8bc8b11c5684531
--- /dev/null
+++ b/packages/create-llama/templates/streaming/express/tsconfig.json
@@ -0,0 +1,9 @@
+{
+  "compilerOptions": {
+    "target": "es2016",
+    "esModuleInterop": true,
+    "forceConsistentCasingInFileNames": true,
+    "strict": true,
+    "skipLibCheck": true
+  }
+}
\ No newline at end of file