diff --git a/apps/simple/portkey.ts b/apps/simple/portkey.ts
new file mode 100644
index 0000000000000000000000000000000000000000..4221b4b16650ea0ca1cf122c84fcbc20f23fba70
--- /dev/null
+++ b/apps/simple/portkey.ts
@@ -0,0 +1,23 @@
+import { Portkey } from "llamaindex";
+
+(async () => {
+  const llms = [{
+
+  }]
+  const portkey = new Portkey({
+    mode: "single",
+    llms: [{
+      provider:"anyscale",
+      virtual_key:"anyscale-3b3c04",
+      model: "meta-llama/Llama-2-13b-chat-hf",
+      max_tokens: 2000
+    }]
+  });
+  const result = portkey.stream_chat([
+    { role: "system", content: "You are a helpful assistant." },
+    { role: "user", content: "Tell me a joke." }
+  ]);
+  for await (const res of result) {
+    process.stdout.write(res)
+  }
+})();
diff --git a/examples/portkey.ts b/examples/portkey.ts
new file mode 100644
index 0000000000000000000000000000000000000000..4221b4b16650ea0ca1cf122c84fcbc20f23fba70
--- /dev/null
+++ b/examples/portkey.ts
@@ -0,0 +1,23 @@
+import { Portkey } from "llamaindex";
+
+(async () => {
+  const llms = [{
+
+  }]
+  const portkey = new Portkey({
+    mode: "single",
+    llms: [{
+      provider:"anyscale",
+      virtual_key:"anyscale-3b3c04",
+      model: "meta-llama/Llama-2-13b-chat-hf",
+      max_tokens: 2000
+    }]
+  });
+  const result = portkey.stream_chat([
+    { role: "system", content: "You are a helpful assistant." },
+    { role: "user", content: "Tell me a joke." }
+  ]);
+  for await (const res of result) {
+    process.stdout.write(res)
+  }
+})();
diff --git a/packages/core/package.json b/packages/core/package.json
index faa208eb1155df584cfb98b6dab872b4d10aee41..e7d5324705446b7e3890cd2d9105bf086f43e64b 100644
--- a/packages/core/package.json
+++ b/packages/core/package.json
@@ -16,7 +16,8 @@
     "replicate": "^0.18.1",
     "tiktoken": "^1.0.10",
     "uuid": "^9.0.1",
-    "wink-nlp": "^1.14.3"
+    "wink-nlp": "^1.14.3",
+    "portkey-ai": "^0.1.11"
   },
   "devDependencies": {
     "@types/lodash": "^4.14.199",
diff --git a/packages/core/src/llm/LLM.ts b/packages/core/src/llm/LLM.ts
index 777a22367775b34b65117ed1d0711c2858158dc2..26bb0554859d28b4e5715df9604b8d2c1fbdd793 100644
--- a/packages/core/src/llm/LLM.ts
+++ b/packages/core/src/llm/LLM.ts
@@ -8,6 +8,7 @@ import {
   StreamCallbackResponse,
 } from "../callbacks/CallbackManager";
 
+import { LLMOptions } from "portkey-ai";
 import {
   AnthropicSession,
   ANTHROPIC_AI_PROMPT,
@@ -22,6 +23,7 @@ import {
   shouldUseAzure,
 } from "./azure";
 import { getOpenAISession, OpenAISession } from "./openai";
+import { PortkeySession, getPortkeySession } from "./portkey";
 import { ReplicateSession } from "./replicate";
 
 export type MessageType =
@@ -679,3 +681,104 @@ export class Anthropic implements LLM {
     return this.streamChat([{ content: prompt, role: "user" }], parentEvent);
   }
 }
+
+
+export class Portkey implements LLM {
+  apiKey?: string = undefined;
+  baseURL?: string = undefined;
+  mode?: string = undefined;
+  llms?: [LLMOptions] | null = undefined;
+  session: PortkeySession;
+  callbackManager?: CallbackManager;
+
+  constructor(init?: Partial<Portkey>) {
+    this.apiKey = init?.apiKey
+    this.baseURL = init?.baseURL
+    this.mode = init?.mode
+    this.llms = init?.llms
+    this.session = getPortkeySession({
+      apiKey: this.apiKey,
+      baseURL: this.baseURL,
+      llms: this.llms,
+      mode: this.mode
+    })
+    this.callbackManager = init?.callbackManager;
+  }
+
+  async chat(
+    messages: ChatMessage[],
+    parentEvent?: Event | undefined,
+    params?: Record<string, any>
+  ): Promise<ChatResponse> {
+    const resolvedParams = params || {}
+    const response = await this.session.portkey.chatCompletions.create({
+      messages,
+      ...resolvedParams
+    });
+
+    const content = response.choices[0].message?.content ?? "";
+    const role = response.choices[0].message?.role || "assistant";
+    return { message: { content, role: role as MessageType } };
+  }
+  async complete(
+    prompt: string,
+    parentEvent?: Event | undefined,
+  ): Promise<CompletionResponse> {
+    return this.chat([{ content: prompt, role: "user" }], parentEvent);
+  }
+
+  async *stream_chat(
+    messages: ChatMessage[],
+    parentEvent?: Event,
+    params?: Record<string, any>
+  ): AsyncGenerator<string, void, unknown> {
+    // Wrapping the stream in a callback.
+    const onLLMStream = this.callbackManager?.onLLMStream
+      ? this.callbackManager.onLLMStream
+      : () => {};
+
+    const chunk_stream =
+      await this.session.portkey.chatCompletions.create({
+        messages,
+        ...params,
+        stream: true,
+      });
+
+    const event: Event = parentEvent
+      ? parentEvent
+      : {
+          id: "unspecified",
+          type: "llmPredict" as EventType,
+        };
+
+    //Indices
+    var idx_counter: number = 0;
+    for await (const part of chunk_stream) {
+      //Increment
+      part.choices[0].index = idx_counter;
+      const is_done: boolean =
+        part.choices[0].finish_reason === "stop" ? true : false;
+      //onLLMStream Callback
+
+      const stream_callback: StreamCallbackResponse = {
+        event: event,
+        index: idx_counter,
+        isDone: is_done,
+        // token: part,
+      };
+      onLLMStream(stream_callback);
+
+      idx_counter++;
+
+      yield part.choices[0].delta?.content ?? "";
+    }
+    return;
+  }
+
+  stream_complete(
+    query: string,
+    parentEvent?: Event,
+  ): AsyncGenerator<string, void, unknown> {
+    return this.stream_chat([{ content: query, role: "user" }], parentEvent);
+  }
+}
diff --git a/packages/core/src/llm/portkey.ts b/packages/core/src/llm/portkey.ts
new file mode 100644
index 0000000000000000000000000000000000000000..b4a5a326a2eda760572a22324b55818f8b8a5b55
--- /dev/null
+++ b/packages/core/src/llm/portkey.ts
@@ -0,0 +1,62 @@
+import _ from "lodash";
+import { LLMOptions, Portkey } from "portkey-ai";
+
+export const readEnv = (env: string, default_val?: string): string | undefined => {
+  if (typeof process !== 'undefined') {
+      return process.env?.[env] ?? default_val;
+  }
+  return default_val;
+};
+
+interface PortkeyOptions {
+  apiKey?: string;
+  baseURL?: string;
+  mode?: string;
+  llms?: [LLMOptions] | null
+}
+
+export class PortkeySession {
+  portkey: Portkey;
+
+  constructor(options:PortkeyOptions = {}) {
+    if (!options.apiKey) {
+      options.apiKey = readEnv('PORTKEY_API_KEY')
+    }
+
+    if (!options.baseURL) {
+      options.baseURL = readEnv('PORTKEY_BASE_URL', "https://api.portkey.ai")
+    }
+
+    this.portkey = new Portkey({});
+    this.portkey.llms = [{}]
+    if (!options.apiKey) {
+      throw new Error("Set Portkey ApiKey in PORTKEY_API_KEY env variable");
+    }
+
+    this.portkey = new Portkey(options);
+  }
+}
+
+let defaultPortkeySession: {
+  session: PortkeySession;
+  options: PortkeyOptions;
+}[] = [];
+
+/**
+ * Get a session for the Portkey API. If one already exists with the same options,
+ * it will be returned. Otherwise, a new session will be created.
+ * @param options
+ * @returns
+ */
+export function getPortkeySession(options: PortkeyOptions = {}) {
+  let session = defaultPortkeySession.find((session) => {
+    return _.isEqual(session.options, options);
+  })?.session;
+
+  if (!session) {
+    session = new PortkeySession(options);
+    defaultPortkeySession.push({ session, options });
+  }
+  return session;
+}
+
diff --git a/pnpm-lock.yaml b/pnpm-lock.yaml
index 2729e5190c8f27cbd8c5732c726a4b5b1f6fe10b..73f4ec55342b0fac1d842085aebcb4551c6db836 100644
--- a/pnpm-lock.yaml
+++ b/pnpm-lock.yaml
@@ -149,6 +149,9 @@ importers:
       pdf-parse:
         specifier: ^1.1.1
         version: 1.1.1
+      portkey-ai:
+        specifier: ^0.1.11
+        version: 0.1.11
       rake-modified:
         specifier: ^1.0.8
         version: 1.0.8
@@ -11239,6 +11242,12 @@ packages:
       find-up: 3.0.0
     dev: false
 
+  /portkey-ai@0.1.11:
+    resolution: {integrity: sha512-KRZLB7zdvJ40P6WuRAb6VE2gin7SXhbRs7ESqnxDeO8C+ECtKDrrS5g0+l3pD9+HZO4iCvvIROaRyd1NOyLmWw==}
+    dependencies:
+      agentkeepalive: 4.5.0
+    dev: false
+
   /postcss-calc@8.2.4(postcss@8.4.28):
     resolution: {integrity: sha512-SmWMSJmB8MRnnULldx0lQIyhSNvuDl9HfrZkaqqE/WHAhToYsAvDq+yAsA/kIyINDszOp3Rh0GFoNuH5Ypsm3Q==}
     peerDependencies: