From 5e12f568bd74a627f1c612953037418c1f21c10d Mon Sep 17 00:00:00 2001
From: Elliot Kang <kkang2097@gmail.com>
Date: Sat, 30 Sep 2023 14:10:55 -0700
Subject: [PATCH] formatting

---
 apps/simple/llmStream.ts                       | 14 +++++++-------
 packages/core/src/callbacks/CallbackManager.ts | 13 ++++++-------
 packages/core/src/llm/LLM.ts                   |  8 ++++++--
 3 files changed, 19 insertions(+), 16 deletions(-)

diff --git a/apps/simple/llmStream.ts b/apps/simple/llmStream.ts
index a4c3c80e3..e9ebce444 100644
--- a/apps/simple/llmStream.ts
+++ b/apps/simple/llmStream.ts
@@ -1,5 +1,5 @@
-import {OpenAI, Anthropic, ChatMessage, SimpleChatEngine } from "llamaindex";
-// import {Anthropic} from '@anthropic-ai/sdk';
+import { ChatMessage, OpenAI, SimpleChatEngine } from "llamaindex";
+import {Anthropic} from "../../packages/core/src/llm/LLM";
 import { stdin as input, stdout as output } from "node:process";
 import readline from "node:readline/promises";
 
@@ -9,12 +9,9 @@ Where is Istanbul?
   `;
 
   // const llm = new OpenAI({ model: "gpt-3.5-turbo", temperature: 0.1 });
-  const llm = new OpenAI();
+  const llm = new Anthropic();
   const message: ChatMessage = { content: query, role: "user" };
 
-  // var accumulated_result: string = "";
-  // var total_tokens: number = 0;
-
   //TODO: Add callbacks later
 
   //Stream Complete
@@ -22,7 +19,10 @@ Where is Istanbul?
   //either an AsyncGenerator or a Response.
   // Omitting the streaming flag automatically sets streaming to false
 
-  const chatEngine: SimpleChatEngine = new SimpleChatEngine({chatHistory: undefined, llm: llm});
+  const chatEngine: SimpleChatEngine = new SimpleChatEngine({
+    chatHistory: undefined,
+    llm: llm,
+  });
 
   const rl = readline.createInterface({ input, output });
   while (true) {
diff --git a/packages/core/src/callbacks/CallbackManager.ts b/packages/core/src/callbacks/CallbackManager.ts
index fb7eff8c9..266058261 100644
--- a/packages/core/src/callbacks/CallbackManager.ts
+++ b/packages/core/src/callbacks/CallbackManager.ts
@@ -39,13 +39,12 @@ export interface DefaultStreamToken {
 //OpenAI stream token schema is the default.
 //Note: Anthropic and Replicate also use similar token schemas.
 export type OpenAIStreamToken = DefaultStreamToken;
-export type AnthropicStreamToken = 
-{
-  completion: string,
-  stop_reason: string | undefined,
-  model: string,
-  stop: boolean | undefined,
-  log_id: string
+export type AnthropicStreamToken = {
+  completion: string;
+  model: string;
+  stop_reason: string | undefined;
+  stop?: boolean | undefined;
+  log_id?: string;
 };
 
 //
diff --git a/packages/core/src/llm/LLM.ts b/packages/core/src/llm/LLM.ts
index 6f6c8c4e5..dd17eb49d 100644
--- a/packages/core/src/llm/LLM.ts
+++ b/packages/core/src/llm/LLM.ts
@@ -663,10 +663,14 @@ export class Anthropic implements LLM {
     parentEvent?: Event | undefined,
     streaming?: T,
   ): Promise<R> {
-    if(streaming){
+    if (streaming) {
       return this.streamComplete(prompt, parentEvent) as R;
     }
-    return this.chat([{ content: prompt, role: "user" }], parentEvent, streaming) as R;
+    return this.chat(
+      [{ content: prompt, role: "user" }],
+      parentEvent,
+      streaming,
+    ) as R;
   }
 
   protected streamComplete(
-- 
GitLab