From d57657599becadf6272027aa91ca92e93065e466 Mon Sep 17 00:00:00 2001
From: yisding <yi.s.ding@gmail.com>
Date: Mon, 6 Nov 2023 10:50:22 -0800
Subject: [PATCH] new openai models from dev day

---
 apps/simple/gptllama.ts      |  2 +-
 apps/simple/openai.ts        |  2 +-
 apps/simple/vision.ts        | 15 +++++++++++++++
 packages/core/src/llm/LLM.ts |  4 +++-
 4 files changed, 20 insertions(+), 3 deletions(-)
 create mode 100644 apps/simple/vision.ts

diff --git a/apps/simple/gptllama.ts b/apps/simple/gptllama.ts
index 1ecb2a1b0..5497b5e49 100644
--- a/apps/simple/gptllama.ts
+++ b/apps/simple/gptllama.ts
@@ -6,7 +6,7 @@ import readline from "node:readline/promises";
 import { ChatMessage, LlamaDeuce, OpenAI } from "llamaindex";
 
 (async () => {
-  const gpt4 = new OpenAI({ model: "gpt-4", temperature: 0.9 });
+  const gpt4 = new OpenAI({ model: "gpt-4-vision-preview", temperature: 0.9 });
   const l2 = new LlamaDeuce({
     model: "Llama-2-70b-chat-4bit",
     temperature: 0.9,
diff --git a/apps/simple/openai.ts b/apps/simple/openai.ts
index 4c7856be0..00a992abe 100644
--- a/apps/simple/openai.ts
+++ b/apps/simple/openai.ts
@@ -1,7 +1,7 @@
 import { OpenAI } from "llamaindex";
 
 (async () => {
-  const llm = new OpenAI({ model: "gpt-3.5-turbo", temperature: 0.0 });
+  const llm = new OpenAI({ model: "gpt-4-1106-preview", temperature: 0.1 });
 
   // complete api
   const response1 = await llm.complete("How are you?");
diff --git a/apps/simple/vision.ts b/apps/simple/vision.ts
new file mode 100644
index 000000000..27797d00d
--- /dev/null
+++ b/apps/simple/vision.ts
@@ -0,0 +1,15 @@
+import { OpenAI } from "llamaindex";
+
+(async () => {
+  const llm = new OpenAI({ model: "gpt-4-vision-preview", temperature: 0.1 });
+
+  // complete api
+  const response1 = await llm.complete("How are you?");
+  console.log(response1.message.content);
+
+  // chat api
+  const response2 = await llm.chat([
+    { content: "Tell me a joke!", role: "user" },
+  ]);
+  console.log(response2.message.content);
+})();
diff --git a/packages/core/src/llm/LLM.ts b/packages/core/src/llm/LLM.ts
index 9d69743f5..0d659abe1 100644
--- a/packages/core/src/llm/LLM.ts
+++ b/packages/core/src/llm/LLM.ts
@@ -101,6 +101,8 @@ export interface LLM {
 
 export const GPT4_MODELS = {
   "gpt-4": { contextWindow: 8192 },
+  "gpt-4-1106-preview": { contextWindow: 128000 },
+  "gpt-4-vision-preview": { contextWindow: 8192 },
   "gpt-4-32k": { contextWindow: 32768 },
 };
 
@@ -648,7 +650,7 @@ export class Anthropic implements LLM {
 
     this.callbackManager = init?.callbackManager;
   }
-  
+
   tokens(messages: ChatMessage[]): number {
     throw new Error("Method not implemented.");
   }
-- 
GitLab