From 30add7a76560f61b22fef83269ea1ddd30bce466 Mon Sep 17 00:00:00 2001
From: Elliot Kang <kkang2097@gmail.com>
Date: Fri, 29 Sep 2023 12:00:39 -0700
Subject: [PATCH] add chatEngine example

---
 examples/{llm_stream.ts => llmStream.ts} | 12 ++++++++++++
 1 file changed, 12 insertions(+)
 rename examples/{llm_stream.ts => llmStream.ts} (78%)

diff --git a/examples/llm_stream.ts b/examples/llmStream.ts
similarity index 78%
rename from examples/llm_stream.ts
rename to examples/llmStream.ts
index 89bc1aac3..ffaa495b6 100644
--- a/examples/llm_stream.ts
+++ b/examples/llmStream.ts
@@ -1,5 +1,6 @@
 import * as tiktoken from "tiktoken-node";
 import { ChatMessage, OpenAI } from "../packages/core/src/llm/LLM";
+import {SimpleChatEngine } from "../packages/core/src/ChatEngine";
 
 async function main() {
   const query: string = `
@@ -38,6 +39,17 @@ Where is Istanbul?
   console.log(
     `Output token total using tokenizer on accumulated output: ${correct_total_tokens}`,
   );
+
+
+  accumulated_result = "";
+  const chatEngine: SimpleChatEngine = new SimpleChatEngine();
+  const chatStream = await chatEngine.chat(query, undefined, true);
+    for await (const part of chatStream){
+      console.log(part);
+      accumulated_result += part;
+    }
+
+  console.log(accumulated_result);
 }
 
 main();
-- 
GitLab