diff --git a/templates/index.ts b/templates/index.ts
index 7e23f793493a7034bfdca3956ea5443b2cc30fe4..f1b5b2ef561d0bc00db2a941d1b8733b1b59e9e9 100644
--- a/templates/index.ts
+++ b/templates/index.ts
@@ -71,17 +71,6 @@ export const installTemplate = async ({
       parents: true,
       cwd: enginePath,
     });
-    const routeFile = path.join(
-      root,
-      relativeEngineDestPath,
-      framework === "nextjs" ? "route.ts" : "chat.controller.ts",
-    );
-    const routeFileContent = await fs.readFile(routeFile, "utf8");
-    const newContent = routeFileContent.replace(
-      /^import { createChatEngine }.*$/m,
-      'import { createChatEngine } from "./engine"\n',
-    );
-    await fs.writeFile(routeFile, newContent);
   }
 
   /**
@@ -125,8 +114,10 @@ export const installTemplate = async ({
       customApiPath,
       "\n",
     );
+    // remove the default api folder
     const apiPath = path.join(root, "app", "api");
     await fs.rmdir(apiPath, { recursive: true });
+    // modify the dev script to use the custom api path
     packageJson.scripts = {
       ...packageJson.scripts,
       dev: `NEXT_PUBLIC_CHAT_API=${customApiPath} next dev`,
diff --git a/templates/types/simple/express/src/controllers/chat.controller.ts b/templates/types/simple/express/src/controllers/chat.controller.ts
index b11e1050299fb5359306b5973e4eeb310083287a..476c0c35d51b7c8259c59035e9cac82f66f37ac1 100644
--- a/templates/types/simple/express/src/controllers/chat.controller.ts
+++ b/templates/types/simple/express/src/controllers/chat.controller.ts
@@ -1,6 +1,6 @@
 import { NextFunction, Request, Response } from "express";
 import { ChatMessage, OpenAI } from "llamaindex";
-import { createChatEngine } from "../../../../engines/context";
+import { createChatEngine } from "./engine";
 
 export const chat = async (req: Request, res: Response, next: NextFunction) => {
   try {
diff --git a/templates/components/engines/simple/index.ts b/templates/types/simple/express/src/controllers/engine/index.ts
similarity index 100%
rename from templates/components/engines/simple/index.ts
rename to templates/types/simple/express/src/controllers/engine/index.ts
diff --git a/templates/types/simple/express/tsconfig.json b/templates/types/simple/express/tsconfig.json
index 57c4a62815ab2017702f080ca64871369eb5caaf..e886da1ef324ab02d14102f3a4a36c8f90f70eef 100644
--- a/templates/types/simple/express/tsconfig.json
+++ b/templates/types/simple/express/tsconfig.json
@@ -4,6 +4,7 @@
     "esModuleInterop": true,
     "forceConsistentCasingInFileNames": true,
     "strict": true,
-    "skipLibCheck": true
+    "skipLibCheck": true,
+    "moduleResolution": "node"
   }
-}
+}
\ No newline at end of file
diff --git a/templates/types/simple/nextjs/app/api/chat/engine/index.ts b/templates/types/simple/nextjs/app/api/chat/engine/index.ts
new file mode 100644
index 0000000000000000000000000000000000000000..abb02e90cd2ce91096791bf10c4665afcbe11d38
--- /dev/null
+++ b/templates/types/simple/nextjs/app/api/chat/engine/index.ts
@@ -0,0 +1,7 @@
+import { LLM, SimpleChatEngine } from "llamaindex";
+
+export async function createChatEngine(llm: LLM) {
+  return new SimpleChatEngine({
+    llm,
+  });
+}
diff --git a/templates/types/simple/nextjs/app/api/chat/route.ts b/templates/types/simple/nextjs/app/api/chat/route.ts
index 651a020f2f7a83581aa94f8354b64083e7eb2abc..097341ab43922058b9d06b53c215feb16a2a7e23 100644
--- a/templates/types/simple/nextjs/app/api/chat/route.ts
+++ b/templates/types/simple/nextjs/app/api/chat/route.ts
@@ -1,6 +1,6 @@
 import { ChatMessage, OpenAI } from "llamaindex";
 import { NextRequest, NextResponse } from "next/server";
-import { createChatEngine } from "../../../../../../engines/context";
+import { createChatEngine } from "./engine";
 
 export const runtime = "nodejs";
 export const dynamic = "force-dynamic";
diff --git a/templates/types/streaming/express/src/controllers/chat.controller.ts b/templates/types/streaming/express/src/controllers/chat.controller.ts
index 58f96b035f2a974b1caea8ccfc4429dbd5192ce4..162b5db74da1802749fe9168860bf0f5ff094a8c 100644
--- a/templates/types/streaming/express/src/controllers/chat.controller.ts
+++ b/templates/types/streaming/express/src/controllers/chat.controller.ts
@@ -1,7 +1,7 @@
 import { streamToResponse } from "ai";
 import { NextFunction, Request, Response } from "express";
 import { ChatMessage, OpenAI } from "llamaindex";
-import { createChatEngine } from "../../../../engines/context";
+import { createChatEngine } from "./engine";
 import { LlamaIndexStream } from "./llamaindex-stream";
 
 export const chat = async (req: Request, res: Response, next: NextFunction) => {
diff --git a/templates/types/streaming/express/src/controllers/engine/index.ts b/templates/types/streaming/express/src/controllers/engine/index.ts
new file mode 100644
index 0000000000000000000000000000000000000000..abb02e90cd2ce91096791bf10c4665afcbe11d38
--- /dev/null
+++ b/templates/types/streaming/express/src/controllers/engine/index.ts
@@ -0,0 +1,7 @@
+import { LLM, SimpleChatEngine } from "llamaindex";
+
+export async function createChatEngine(llm: LLM) {
+  return new SimpleChatEngine({
+    llm,
+  });
+}
diff --git a/templates/types/streaming/express/tsconfig.json b/templates/types/streaming/express/tsconfig.json
index bf2e3ec6eee097c359767b81c8bc8b11c5684531..e886da1ef324ab02d14102f3a4a36c8f90f70eef 100644
--- a/templates/types/streaming/express/tsconfig.json
+++ b/templates/types/streaming/express/tsconfig.json
@@ -4,6 +4,7 @@
     "esModuleInterop": true,
     "forceConsistentCasingInFileNames": true,
     "strict": true,
-    "skipLibCheck": true
+    "skipLibCheck": true,
+    "moduleResolution": "node"
   }
 }
\ No newline at end of file
diff --git a/templates/types/streaming/nextjs/app/api/chat/engine/index.ts b/templates/types/streaming/nextjs/app/api/chat/engine/index.ts
new file mode 100644
index 0000000000000000000000000000000000000000..abb02e90cd2ce91096791bf10c4665afcbe11d38
--- /dev/null
+++ b/templates/types/streaming/nextjs/app/api/chat/engine/index.ts
@@ -0,0 +1,7 @@
+import { LLM, SimpleChatEngine } from "llamaindex";
+
+export async function createChatEngine(llm: LLM) {
+  return new SimpleChatEngine({
+    llm,
+  });
+}
diff --git a/templates/types/streaming/nextjs/app/api/chat/route.ts b/templates/types/streaming/nextjs/app/api/chat/route.ts
index e87ef01db16d488d5639d9023cb6cfcff049fe45..989a5fec484fae3b34060ed20b4c1d5f1e42773f 100644
--- a/templates/types/streaming/nextjs/app/api/chat/route.ts
+++ b/templates/types/streaming/nextjs/app/api/chat/route.ts
@@ -1,7 +1,7 @@
 import { Message, StreamingTextResponse } from "ai";
 import { OpenAI } from "llamaindex";
 import { NextRequest, NextResponse } from "next/server";
-import { createChatEngine } from "../../../../../../engines/context";
+import { createChatEngine } from "./engine";
 import { LlamaIndexStream } from "./llamaindex-stream";
 
 export const runtime = "nodejs";