Skip to content
Snippets Groups Projects
Commit a9e794bd authored by noble-varghese's avatar noble-varghese
Browse files

feat: Portkey integration with LLamaIndexTs

parent 68d9cfb5
Branches
Tags
No related merge requests found
import { Portkey } from "llamaindex";
(async () => {
const llms = [{
}]
const portkey = new Portkey({
mode: "single",
llms: [{
provider:"anyscale",
virtual_key:"anyscale-3b3c04",
model: "meta-llama/Llama-2-13b-chat-hf",
max_tokens: 2000
}]
});
const result = portkey.stream_chat([
{ role: "system", content: "You are a helpful assistant." },
{ role: "user", content: "Tell me a joke." }
]);
for await (const res of result) {
process.stdout.write(res)
}
})();
......@@ -16,7 +16,8 @@
"replicate": "^0.18.1",
"tiktoken": "^1.0.10",
"uuid": "^9.0.1",
"wink-nlp": "^1.14.3"
"wink-nlp": "^1.14.3",
"portkey-ai": "^0.1.11"
},
"devDependencies": {
"@types/lodash": "^4.14.199",
......
......@@ -7,6 +7,7 @@ import {
StreamCallbackResponse,
} from "../callbacks/CallbackManager";
import { LLMOptions } from "portkey-ai";
import {
AnthropicSession,
ANTHROPIC_AI_PROMPT,
......@@ -21,6 +22,7 @@ import {
shouldUseAzure,
} from "./azure";
import { getOpenAISession, OpenAISession } from "./openai";
import { PortkeySession, getPortkeySession } from "./portkey";
import { ReplicateSession } from "./replicate";
export type MessageType =
......@@ -592,3 +594,104 @@ export class Anthropic implements LLM {
return this.chat([{ content: prompt, role: "user" }], parentEvent);
}
}
export class Portkey implements LLM {
apiKey?: string = undefined;
baseUrl?: string = undefined;
mode?: string = undefined;
llms?: [LLMOptions] | null = undefined;
session: PortkeySession;
callbackManager?: CallbackManager;
constructor(init?: Partial<Portkey>) {
this.apiKey = init?.apiKey
this.baseUrl = init?.baseUrl
this.mode = init?.mode
this.llms = init?.llms
this.session = getPortkeySession({
apiKey: this.apiKey,
baseUrl: this.baseUrl,
llms: this.llms,
mode: this.mode
})
this.callbackManager = init?.callbackManager;
}
async chat(
messages: ChatMessage[],
parentEvent?: Event | undefined,
params?: Record<string, any>
): Promise<ChatResponse> {
const resolvedParams = params || {}
const response = await this.session.portkey.chatCompletions.create({
messages,
...resolvedParams
});
const content = response.choices[0].message?.content ?? "";
const role = response.choices[0].message?.role || "assistant";
return { message: { content, role: role as MessageType } };
}
async complete(
prompt: string,
parentEvent?: Event | undefined,
): Promise<CompletionResponse> {
return this.chat([{ content: prompt, role: "user" }], parentEvent);
}
async *stream_chat(
messages: ChatMessage[],
parentEvent?: Event,
params?: Record<string, any>
): AsyncGenerator<string, void, unknown> {
// Wrapping the stream in a callback.
const onLLMStream = this.callbackManager?.onLLMStream
? this.callbackManager.onLLMStream
: () => {};
const chunk_stream =
await this.session.portkey.chatCompletions.create({
messages,
...params,
stream: true,
});
const event: Event = parentEvent
? parentEvent
: {
id: "unspecified",
type: "llmPredict" as EventType,
};
//Indices
var idx_counter: number = 0;
for await (const part of chunk_stream) {
//Increment
part.choices[0].index = idx_counter;
const is_done: boolean =
part.choices[0].finish_reason === "stop" ? true : false;
//onLLMStream Callback
const stream_callback: StreamCallbackResponse = {
event: event,
index: idx_counter,
isDone: is_done,
// token: part,
};
onLLMStream(stream_callback);
idx_counter++;
yield part.choices[0].delta?.content ?? "";
}
return;
}
stream_complete(
query: string,
parentEvent?: Event,
): AsyncGenerator<string, void, unknown> {
return this.stream_chat([{ content: query, role: "user" }], parentEvent);
}
}
import _ from "lodash";
import { LLMOptions, Portkey } from "portkey-ai";
export const readEnv = (env: string, default_val?: string): string | undefined => {
if (typeof process !== 'undefined') {
return process.env?.[env] ?? default_val;
}
return default_val;
};
interface PortkeyOptions {
apiKey?: string;
baseUrl?: string;
mode?: string;
llms?: [LLMOptions] | null
}
export class PortkeySession {
portkey: Portkey;
constructor(options:PortkeyOptions = {}) {
if (!options.apiKey) {
options.apiKey = readEnv('PORTKEY_API_KEY')
}
if (!options.baseUrl) {
options.baseUrl = readEnv('PORTKEY_BASE_URL', "https://api.portkey.ai")
}
this.portkey = new Portkey({});
this.portkey.llms = [{}]
if (!options.apiKey) {
throw new Error("Set Portkey ApiKey in PORTKEY_API_KEY env variable");
}
this.portkey = new Portkey(options);
}
}
let defaultPortkeySession: {
session: PortkeySession;
options: PortkeyOptions;
}[] = [];
/**
* Get a session for the Portkey API. If one already exists with the same options,
* it will be returned. Otherwise, a new session will be created.
* @param options
* @returns
*/
export function getPortkeySession(options: PortkeyOptions = {}) {
let session = defaultPortkeySession.find((session) => {
return _.isEqual(session.options, options);
})?.session;
if (!session) {
session = new PortkeySession(options);
defaultPortkeySession.push({ session, options });
}
return session;
}
......@@ -149,6 +149,9 @@ importers:
pdf-parse:
specifier: ^1.1.1
version: 1.1.1
portkey-ai:
specifier: ^0.1.11
version: 0.1.11
rake-modified:
specifier: ^1.0.8
version: 1.0.8
......@@ -11239,6 +11242,12 @@ packages:
find-up: 3.0.0
dev: false
 
/portkey-ai@0.1.11:
resolution: {integrity: sha512-KRZLB7zdvJ40P6WuRAb6VE2gin7SXhbRs7ESqnxDeO8C+ECtKDrrS5g0+l3pD9+HZO4iCvvIROaRyd1NOyLmWw==}
dependencies:
agentkeepalive: 4.5.0
dev: false
/postcss-calc@8.2.4(postcss@8.4.28):
resolution: {integrity: sha512-SmWMSJmB8MRnnULldx0lQIyhSNvuDl9HfrZkaqqE/WHAhToYsAvDq+yAsA/kIyINDszOp3Rh0GFoNuH5Ypsm3Q==}
peerDependencies:
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Please register or to comment