diff --git a/examples/llm_stream.ts b/examples/llmStream.ts
similarity index 78%
rename from examples/llm_stream.ts
rename to examples/llmStream.ts
index 89bc1aac3dd31c899eb80efea09fe89e2b6d76e0..ffaa495b6ef1e8633880eb5b2be16ac04b41fa5a 100644
--- a/examples/llm_stream.ts
+++ b/examples/llmStream.ts
@@ -1,5 +1,6 @@
 import * as tiktoken from "tiktoken-node";
 import { ChatMessage, OpenAI } from "../packages/core/src/llm/LLM";
+import {SimpleChatEngine } from "../packages/core/src/ChatEngine";
 
 async function main() {
   const query: string = `
@@ -38,6 +39,17 @@ Where is Istanbul?
   console.log(
     `Output token total using tokenizer on accumulated output: ${correct_total_tokens}`,
   );
+
+
+  accumulated_result = "";
+  const chatEngine: SimpleChatEngine = new SimpleChatEngine();
+  const chatStream = await chatEngine.chat(query, undefined, true);
+    for await (const part of chatStream){
+      console.log(part);
+      accumulated_result += part;
+    }
+
+  console.log(accumulated_result);
 }
 
 main();