diff --git a/apps/simple/gptllama.ts b/apps/simple/gptllama.ts index 1ecb2a1b01d63d6d854559c95bb118fecccf3b6e..5497b5e492c3c93a37a454085fe1d211435c07d4 100644 --- a/apps/simple/gptllama.ts +++ b/apps/simple/gptllama.ts @@ -6,7 +6,7 @@ import readline from "node:readline/promises"; import { ChatMessage, LlamaDeuce, OpenAI } from "llamaindex"; (async () => { - const gpt4 = new OpenAI({ model: "gpt-4", temperature: 0.9 }); + const gpt4 = new OpenAI({ model: "gpt-4-vision-preview", temperature: 0.9 }); const l2 = new LlamaDeuce({ model: "Llama-2-70b-chat-4bit", temperature: 0.9, diff --git a/apps/simple/openai.ts b/apps/simple/openai.ts index 4c7856be0ab9e5912cac3ca119416c6157c750ad..00a992abe38527aad98e2a8b4fa08630d5893893 100644 --- a/apps/simple/openai.ts +++ b/apps/simple/openai.ts @@ -1,7 +1,7 @@ import { OpenAI } from "llamaindex"; (async () => { - const llm = new OpenAI({ model: "gpt-3.5-turbo", temperature: 0.0 }); + const llm = new OpenAI({ model: "gpt-4-1106-preview", temperature: 0.1 }); // complete api const response1 = await llm.complete("How are you?"); diff --git a/apps/simple/vision.ts b/apps/simple/vision.ts new file mode 100644 index 0000000000000000000000000000000000000000..27797d00dbc1e40ebc9e696747ab5d92fa58e31d --- /dev/null +++ b/apps/simple/vision.ts @@ -0,0 +1,15 @@ +import { OpenAI } from "llamaindex"; + +(async () => { + const llm = new OpenAI({ model: "gpt-4-vision-preview", temperature: 0.1 }); + + // complete api + const response1 = await llm.complete("How are you?"); + console.log(response1.message.content); + + // chat api + const response2 = await llm.chat([ + { content: "Tell me a joke!", role: "user" }, + ]); + console.log(response2.message.content); +})(); diff --git a/packages/core/src/llm/LLM.ts b/packages/core/src/llm/LLM.ts index 9d69743f577141b5aff793dc07faf9497bc7f0d8..0d659abe1a60f4f1c095247b57b39417a682d239 100644 --- a/packages/core/src/llm/LLM.ts +++ b/packages/core/src/llm/LLM.ts @@ -101,6 +101,8 @@ export interface LLM { export const GPT4_MODELS = { "gpt-4": { contextWindow: 8192 }, + "gpt-4-1106-preview": { contextWindow: 128000 }, + "gpt-4-vision-preview": { contextWindow: 8192 }, "gpt-4-32k": { contextWindow: 32768 }, }; @@ -648,7 +650,7 @@ export class Anthropic implements LLM { this.callbackManager = init?.callbackManager; } - + tokens(messages: ChatMessage[]): number { throw new Error("Method not implemented."); }