Skip to content
Snippets Groups Projects
Commit 90418fa4 authored by Yi Ding's avatar Yi Ding
Browse files

upgrade packages

parent 8983e9b0
No related branches found
No related tags found
No related merge requests found
......@@ -2,22 +2,22 @@
"name": "llamaindex",
"version": "0.0.18",
"dependencies": {
"@anthropic-ai/sdk": "^0.5.9",
"@anthropic-ai/sdk": "^0.6.0",
"lodash": "^4.17.21",
"openai": "4.0.0-beta.8",
"openai": "^4.0.0",
"pdf-parse": "^1.1.1",
"replicate": "^0.14.1",
"replicate": "^0.16.1",
"tiktoken-node": "^0.0.6",
"uuid": "^9.0.0",
"wink-nlp": "^1.14.3"
},
"devDependencies": {
"@types/lodash": "^4.14.196",
"@types/node": "^18.17.1",
"@types/lodash": "^4.14.197",
"@types/node": "^18.17.5",
"@types/pdf-parse": "^1.1.1",
"@types/uuid": "^9.0.2",
"node-stdlib-browser": "^1.2.0",
"tsup": "^7.1.0"
"tsup": "^7.2.0"
},
"engines": {
"node": ">=18.0.0"
......
import { ChatCompletionChunk } from "openai/resources/chat";
import { globalsHelper } from "../../GlobalsHelper";
import { StreamCallbackResponse, Event } from "../CallbackManager";
import { APIResponse } from "openai/core";
import { Stream } from "openai/streaming";
import { globalsHelper } from "../../GlobalsHelper";
import { MessageType } from "../../llm/LLM";
import { Event, StreamCallbackResponse } from "../CallbackManager";
/**
* Handles the OpenAI streaming interface and pipes it to the callback function
......@@ -17,7 +16,7 @@ export async function handleOpenAIStream({
onLLMStream,
parentEvent,
}: {
response: APIResponse<Stream<ChatCompletionChunk>>;
response: Stream<ChatCompletionChunk>;
onLLMStream: (data: StreamCallbackResponse) => void;
parentEvent?: Event;
}): Promise<{ message: string; role: MessageType }> {
......
This diff is collapsed.
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment