From bc7a11cdbeb84670a45f5b5a2fa4088c8e5b65c6 Mon Sep 17 00:00:00 2001
From: Alex Yang <himself65@outlook.com>
Date: Fri, 3 May 2024 18:03:23 -0500
Subject: [PATCH] fix: inline ollama build (#807)

---
 .changeset/angry-lizards-compete.md           |   5 +
 packages/core/package.json                    |   1 -
 packages/core/src/internal/deps/ollama.d.ts   | 264 ++++++++++
 packages/core/src/internal/deps/ollama.js     | 462 ++++++++++++++++++
 .../core/src/internal/deps/ollama.license     |  21 +
 packages/core/src/llm/ollama.ts               |   4 +-
 pnpm-lock.yaml                                |  13 -
 7 files changed, 754 insertions(+), 16 deletions(-)
 create mode 100644 .changeset/angry-lizards-compete.md
 create mode 100644 packages/core/src/internal/deps/ollama.d.ts
 create mode 100644 packages/core/src/internal/deps/ollama.js
 create mode 100644 packages/core/src/internal/deps/ollama.license

diff --git a/.changeset/angry-lizards-compete.md b/.changeset/angry-lizards-compete.md
new file mode 100644
index 000000000..c36d9199b
--- /dev/null
+++ b/.changeset/angry-lizards-compete.md
@@ -0,0 +1,5 @@
+---
+"llamaindex": patch
+---
+
+fix: inline ollama build
diff --git a/packages/core/package.json b/packages/core/package.json
index bfc8d8532..4cecfdf3b 100644
--- a/packages/core/package.json
+++ b/packages/core/package.json
@@ -32,7 +32,6 @@
     "md-utils-ts": "^2.0.0",
     "mongodb": "^6.5.0",
     "notion-md-crawler": "^1.0.0",
-    "ollama": "^0.5.0",
     "openai": "^4.38.0",
     "papaparse": "^5.4.1",
     "pathe": "^1.1.2",
diff --git a/packages/core/src/internal/deps/ollama.d.ts b/packages/core/src/internal/deps/ollama.d.ts
new file mode 100644
index 000000000..39e6218f7
--- /dev/null
+++ b/packages/core/src/internal/deps/ollama.d.ts
@@ -0,0 +1,264 @@
+type Fetch = typeof fetch;
+interface Config {
+  host: string;
+  fetch?: Fetch;
+  proxy?: boolean;
+}
+interface Options {
+  numa: boolean;
+  num_ctx: number;
+  num_batch: number;
+  main_gpu: number;
+  low_vram: boolean;
+  f16_kv: boolean;
+  logits_all: boolean;
+  vocab_only: boolean;
+  use_mmap: boolean;
+  use_mlock: boolean;
+  embedding_only: boolean;
+  num_thread: number;
+  num_keep: number;
+  seed: number;
+  num_predict: number;
+  top_k: number;
+  top_p: number;
+  tfs_z: number;
+  typical_p: number;
+  repeat_last_n: number;
+  temperature: number;
+  repeat_penalty: number;
+  presence_penalty: number;
+  frequency_penalty: number;
+  mirostat: number;
+  mirostat_tau: number;
+  mirostat_eta: number;
+  penalize_newline: boolean;
+  stop: string[];
+}
+interface GenerateRequest {
+  model: string;
+  prompt: string;
+  system?: string;
+  template?: string;
+  context?: number[];
+  stream?: boolean;
+  raw?: boolean;
+  format?: string;
+  images?: Uint8Array[] | string[];
+  keep_alive?: string | number;
+  options?: Partial<Options>;
+}
+interface Message {
+  role: string;
+  content: string;
+  images?: Uint8Array[] | string[];
+}
+interface ChatRequest {
+  model: string;
+  messages?: Message[];
+  stream?: boolean;
+  format?: string;
+  keep_alive?: string | number;
+  options?: Partial<Options>;
+}
+interface PullRequest {
+  model: string;
+  insecure?: boolean;
+  stream?: boolean;
+}
+interface PushRequest {
+  model: string;
+  insecure?: boolean;
+  stream?: boolean;
+}
+interface CreateRequest {
+  model: string;
+  path?: string;
+  modelfile?: string;
+  stream?: boolean;
+}
+interface DeleteRequest {
+  model: string;
+}
+interface CopyRequest {
+  source: string;
+  destination: string;
+}
+interface ShowRequest {
+  model: string;
+  system?: string;
+  template?: string;
+  options?: Partial<Options>;
+}
+interface EmbeddingsRequest {
+  model: string;
+  prompt: string;
+  keep_alive?: string | number;
+  options?: Partial<Options>;
+}
+interface GenerateResponse {
+  model: string;
+  created_at: Date;
+  response: string;
+  done: boolean;
+  context: number[];
+  total_duration: number;
+  load_duration: number;
+  prompt_eval_count: number;
+  prompt_eval_duration: number;
+  eval_count: number;
+  eval_duration: number;
+}
+interface ChatResponse {
+  model: string;
+  created_at: Date;
+  message: Message;
+  done: boolean;
+  total_duration: number;
+  load_duration: number;
+  prompt_eval_count: number;
+  prompt_eval_duration: number;
+  eval_count: number;
+  eval_duration: number;
+}
+interface EmbeddingsResponse {
+  embedding: number[];
+}
+interface ProgressResponse {
+  status: string;
+  digest: string;
+  total: number;
+  completed: number;
+}
+interface ModelResponse {
+  name: string;
+  modified_at: Date;
+  size: number;
+  digest: string;
+  details: ModelDetails;
+}
+interface ModelDetails {
+  parent_model: string;
+  format: string;
+  family: string;
+  families: string[];
+  parameter_size: string;
+  quantization_level: string;
+}
+interface ShowResponse {
+  license: string;
+  modelfile: string;
+  parameters: string;
+  template: string;
+  system: string;
+  details: ModelDetails;
+  messages: Message[];
+}
+interface ListResponse {
+  models: ModelResponse[];
+}
+interface ErrorResponse {
+  error: string;
+}
+interface StatusResponse {
+  status: string;
+}
+
+declare class Ollama {
+  protected readonly config: Config;
+  protected readonly fetch: Fetch;
+  private abortController;
+  constructor(config?: Partial<Config>);
+  abort(): void;
+  protected processStreamableRequest<T extends object>(
+    endpoint: string,
+    request: {
+      stream?: boolean;
+    } & Record<string, any>,
+  ): Promise<T | AsyncGenerator<T>>;
+  encodeImage(image: Uint8Array | string): Promise<string>;
+  generate(
+    request: GenerateRequest & {
+      stream: true;
+    },
+  ): Promise<AsyncGenerator<GenerateResponse>>;
+  generate(
+    request: GenerateRequest & {
+      stream?: false;
+    },
+  ): Promise<GenerateResponse>;
+  chat(
+    request: ChatRequest & {
+      stream: true;
+    },
+  ): Promise<AsyncGenerator<ChatResponse>>;
+  chat(
+    request: ChatRequest & {
+      stream?: false;
+    },
+  ): Promise<ChatResponse>;
+  create(
+    request: CreateRequest & {
+      stream: true;
+    },
+  ): Promise<AsyncGenerator<ProgressResponse>>;
+  create(
+    request: CreateRequest & {
+      stream?: false;
+    },
+  ): Promise<ProgressResponse>;
+  pull(
+    request: PullRequest & {
+      stream: true;
+    },
+  ): Promise<AsyncGenerator<ProgressResponse>>;
+  pull(
+    request: PullRequest & {
+      stream?: false;
+    },
+  ): Promise<ProgressResponse>;
+  push(
+    request: PushRequest & {
+      stream: true;
+    },
+  ): Promise<AsyncGenerator<ProgressResponse>>;
+  push(
+    request: PushRequest & {
+      stream?: false;
+    },
+  ): Promise<ProgressResponse>;
+  delete(request: DeleteRequest): Promise<StatusResponse>;
+  copy(request: CopyRequest): Promise<StatusResponse>;
+  list(): Promise<ListResponse>;
+  show(request: ShowRequest): Promise<ShowResponse>;
+  embeddings(request: EmbeddingsRequest): Promise<EmbeddingsResponse>;
+}
+declare const _default: Ollama;
+
+export {
+  Ollama,
+  _default as default,
+  type ChatRequest,
+  type ChatResponse,
+  type Config,
+  type CopyRequest,
+  type CreateRequest,
+  type DeleteRequest,
+  type EmbeddingsRequest,
+  type EmbeddingsResponse,
+  type ErrorResponse,
+  type Fetch,
+  type GenerateRequest,
+  type GenerateResponse,
+  type ListResponse,
+  type Message,
+  type ModelDetails,
+  type ModelResponse,
+  type Options,
+  type ProgressResponse,
+  type PullRequest,
+  type PushRequest,
+  type ShowRequest,
+  type ShowResponse,
+  type StatusResponse,
+};
diff --git a/packages/core/src/internal/deps/ollama.js b/packages/core/src/internal/deps/ollama.js
new file mode 100644
index 000000000..db189e10a
--- /dev/null
+++ b/packages/core/src/internal/deps/ollama.js
@@ -0,0 +1,462 @@
+// generate from "tsup ./src/browser.js --format esm --dts"
+var __defProp = Object.defineProperty;
+var __getOwnPropSymbols = Object.getOwnPropertySymbols;
+var __hasOwnProp = Object.prototype.hasOwnProperty;
+var __propIsEnum = Object.prototype.propertyIsEnumerable;
+var __knownSymbol = (name, symbol) => {
+  return (symbol = Symbol[name]) ? symbol : Symbol.for("Symbol." + name);
+};
+var __defNormalProp = (obj, key, value) =>
+  key in obj
+    ? __defProp(obj, key, {
+        enumerable: true,
+        configurable: true,
+        writable: true,
+        value,
+      })
+    : (obj[key] = value);
+var __spreadValues = (a, b) => {
+  for (var prop in b || (b = {}))
+    if (__hasOwnProp.call(b, prop)) __defNormalProp(a, prop, b[prop]);
+  if (__getOwnPropSymbols)
+    for (var prop of __getOwnPropSymbols(b)) {
+      if (__propIsEnum.call(b, prop)) __defNormalProp(a, prop, b[prop]);
+    }
+  return a;
+};
+var __async = (__this, __arguments, generator) => {
+  return new Promise((resolve, reject) => {
+    var fulfilled = (value) => {
+      try {
+        step(generator.next(value));
+      } catch (e) {
+        reject(e);
+      }
+    };
+    var rejected = (value) => {
+      try {
+        step(generator.throw(value));
+      } catch (e) {
+        reject(e);
+      }
+    };
+    var step = (x) =>
+      x.done
+        ? resolve(x.value)
+        : Promise.resolve(x.value).then(fulfilled, rejected);
+    step((generator = generator.apply(__this, __arguments)).next());
+  });
+};
+var __await = function (promise, isYieldStar) {
+  this[0] = promise;
+  this[1] = isYieldStar;
+};
+var __asyncGenerator = (__this, __arguments, generator) => {
+  var resume = (k, v, yes, no) => {
+    try {
+      var x = generator[k](v),
+        isAwait = (v = x.value) instanceof __await,
+        done = x.done;
+      Promise.resolve(isAwait ? v[0] : v)
+        .then((y) =>
+          isAwait
+            ? resume(
+                k === "return" ? k : "next",
+                v[1] ? { done: y.done, value: y.value } : y,
+                yes,
+                no,
+              )
+            : yes({ value: y, done }),
+        )
+        .catch((e) => resume("throw", e, yes, no));
+    } catch (e) {
+      no(e);
+    }
+  };
+  var method = (k) =>
+    (it[k] = (x) => new Promise((yes, no) => resume(k, x, yes, no)));
+  var it = {};
+  return (
+    (generator = generator.apply(__this, __arguments)),
+    (it[__knownSymbol("asyncIterator")] = () => it),
+    method("next"),
+    method("throw"),
+    method("return"),
+    it
+  );
+};
+var __forAwait = (obj, it, method) =>
+  (it = obj[__knownSymbol("asyncIterator")])
+    ? it.call(obj)
+    : ((obj = obj[__knownSymbol("iterator")]()),
+      (it = {}),
+      (method = (key, fn) =>
+        (fn = obj[key]) &&
+        (it[key] = (arg) =>
+          new Promise(
+            (yes, no, done) => (
+              (arg = fn.call(obj, arg)),
+              (done = arg.done),
+              Promise.resolve(arg.value).then(
+                (value) => yes({ value, done }),
+                no,
+              )
+            ),
+          ))),
+      method("next"),
+      method("return"),
+      it);
+
+// src/version.ts
+var version = "0.0.0";
+
+// src/utils.ts
+var ResponseError = class _ResponseError extends Error {
+  constructor(error, status_code) {
+    super(error);
+    this.error = error;
+    this.status_code = status_code;
+    this.name = "ResponseError";
+    if (Error.captureStackTrace) {
+      Error.captureStackTrace(this, _ResponseError);
+    }
+  }
+};
+var checkOk = (response) =>
+  __async(void 0, null, function* () {
+    var _a;
+    if (!response.ok) {
+      let message = `Error ${response.status}: ${response.statusText}`;
+      let errorData = null;
+      if (
+        (_a = response.headers.get("content-type")) == null
+          ? void 0
+          : _a.includes("application/json")
+      ) {
+        try {
+          errorData = yield response.json();
+          message = errorData.error || message;
+        } catch (error) {
+          console.log("Failed to parse error response as JSON");
+        }
+      } else {
+        try {
+          console.log("Getting text from response");
+          const textResponse = yield response.text();
+          message = textResponse || message;
+        } catch (error) {
+          console.log("Failed to get text from error response");
+        }
+      }
+      throw new ResponseError(message, response.status);
+    }
+  });
+function getPlatform() {
+  if (typeof window !== "undefined" && window.navigator) {
+    return `${window.navigator.platform.toLowerCase()} Browser/${navigator.userAgent};`;
+  } else if (typeof process !== "undefined") {
+    return `${process.arch} ${process.platform} Node.js/${process.version}`;
+  }
+  return "";
+}
+var fetchWithHeaders = (_0, _1, ..._2) =>
+  __async(void 0, [_0, _1, ..._2], function* (fetch2, url, options = {}) {
+    const defaultHeaders = {
+      "Content-Type": "application/json",
+      Accept: "application/json",
+      "User-Agent": `ollama-js/${version} (${getPlatform()})`,
+    };
+    if (!options.headers) {
+      options.headers = {};
+    }
+    options.headers = __spreadValues(
+      __spreadValues({}, defaultHeaders),
+      options.headers,
+    );
+    return fetch2(url, options);
+  });
+var get = (fetch2, host) =>
+  __async(void 0, null, function* () {
+    const response = yield fetchWithHeaders(fetch2, host);
+    yield checkOk(response);
+    return response;
+  });
+var post = (fetch2, host, data, options) =>
+  __async(void 0, null, function* () {
+    const isRecord = (input) => {
+      return (
+        input !== null && typeof input === "object" && !Array.isArray(input)
+      );
+    };
+    const formattedData = isRecord(data) ? JSON.stringify(data) : data;
+    const response = yield fetchWithHeaders(fetch2, host, {
+      method: "POST",
+      body: formattedData,
+      signal: options == null ? void 0 : options.signal,
+    });
+    yield checkOk(response);
+    return response;
+  });
+var del = (fetch2, host, data) =>
+  __async(void 0, null, function* () {
+    const response = yield fetchWithHeaders(fetch2, host, {
+      method: "DELETE",
+      body: JSON.stringify(data),
+    });
+    yield checkOk(response);
+    return response;
+  });
+var parseJSON = function (itr) {
+  return __asyncGenerator(this, null, function* () {
+    var _a;
+    const decoder = new TextDecoder("utf-8");
+    let buffer = "";
+    const reader = itr.getReader();
+    while (true) {
+      const { done, value: chunk } = yield new __await(reader.read());
+      if (done) {
+        break;
+      }
+      buffer += decoder.decode(chunk);
+      const parts = buffer.split("\n");
+      buffer = (_a = parts.pop()) != null ? _a : "";
+      for (const part of parts) {
+        try {
+          yield JSON.parse(part);
+        } catch (error) {
+          console.warn("invalid json: ", part);
+        }
+      }
+    }
+    for (const part of buffer.split("\n").filter((p) => p !== "")) {
+      try {
+        yield JSON.parse(part);
+      } catch (error) {
+        console.warn("invalid json: ", part);
+      }
+    }
+  });
+};
+var formatHost = (host) => {
+  if (!host) {
+    return "http://127.0.0.1:11434";
+  }
+  let isExplicitProtocol = host.includes("://");
+  if (host.startsWith(":")) {
+    host = `http://127.0.0.1${host}`;
+    isExplicitProtocol = false;
+  }
+  if (!isExplicitProtocol) {
+    host = `http://${host}`;
+  }
+  const url = new URL(host);
+  let port = url.port;
+  if (!port) {
+    if (!isExplicitProtocol) {
+      port = "11434";
+    } else {
+      port = url.protocol === "https:" ? "443" : "80";
+    }
+  }
+  let formattedHost = `${url.protocol}//${url.hostname}:${port}${url.pathname}`;
+  if (formattedHost.endsWith("/")) {
+    formattedHost = formattedHost.slice(0, -1);
+  }
+  return formattedHost;
+};
+
+// src/browser.ts
+// import "whatwg-fetch";
+var Ollama = class {
+  constructor(config) {
+    var _a;
+    this.config = {
+      host: "",
+    };
+    if (!(config == null ? void 0 : config.proxy)) {
+      this.config.host = formatHost(
+        (_a = config == null ? void 0 : config.host) != null
+          ? _a
+          : "http://127.0.0.1:11434",
+      );
+    }
+    this.fetch = fetch;
+    if ((config == null ? void 0 : config.fetch) != null) {
+      this.fetch = config.fetch;
+    }
+    this.abortController = new AbortController();
+  }
+  // Abort any ongoing requests to Ollama
+  abort() {
+    this.abortController.abort();
+    this.abortController = new AbortController();
+  }
+  processStreamableRequest(endpoint, request) {
+    return __async(this, null, function* () {
+      var _a;
+      request.stream = (_a = request.stream) != null ? _a : false;
+      const response = yield post(
+        this.fetch,
+        `${this.config.host}/api/${endpoint}`,
+        __spreadValues({}, request),
+        { signal: this.abortController.signal },
+      );
+      if (!response.body) {
+        throw new Error("Missing body");
+      }
+      const itr = parseJSON(response.body);
+      if (request.stream) {
+        return (function () {
+          return __asyncGenerator(this, null, function* () {
+            try {
+              for (
+                var iter = __forAwait(itr), more, temp, error;
+                (more = !(temp = yield new __await(iter.next())).done);
+                more = false
+              ) {
+                const message = temp.value;
+                if ("error" in message) {
+                  throw new Error(message.error);
+                }
+                yield message;
+                if (message.done || message.status === "success") {
+                  return;
+                }
+              }
+            } catch (temp) {
+              error = [temp];
+            } finally {
+              try {
+                more &&
+                  (temp = iter.return) &&
+                  (yield new __await(temp.call(iter)));
+              } finally {
+                if (error) throw error[0];
+              }
+            }
+            throw new Error(
+              "Did not receive done or success response in stream.",
+            );
+          });
+        })();
+      } else {
+        const message = yield itr.next();
+        if (!message.value.done && message.value.status !== "success") {
+          throw new Error("Expected a completed response.");
+        }
+        return message.value;
+      }
+    });
+  }
+  encodeImage(image) {
+    return __async(this, null, function* () {
+      if (typeof image !== "string") {
+        const uint8Array = new Uint8Array(image);
+        const numberArray = Array.from(uint8Array);
+        const base64String = btoa(String.fromCharCode.apply(null, numberArray));
+        return base64String;
+      }
+      return image;
+    });
+  }
+  generate(request) {
+    return __async(this, null, function* () {
+      if (request.images) {
+        request.images = yield Promise.all(
+          request.images.map(this.encodeImage.bind(this)),
+        );
+      }
+      return this.processStreamableRequest("generate", request);
+    });
+  }
+  chat(request) {
+    return __async(this, null, function* () {
+      if (request.messages) {
+        for (const message of request.messages) {
+          if (message.images) {
+            message.images = yield Promise.all(
+              message.images.map(this.encodeImage.bind(this)),
+            );
+          }
+        }
+      }
+      return this.processStreamableRequest("chat", request);
+    });
+  }
+  create(request) {
+    return __async(this, null, function* () {
+      return this.processStreamableRequest("create", {
+        name: request.model,
+        stream: request.stream,
+        modelfile: request.modelfile,
+      });
+    });
+  }
+  pull(request) {
+    return __async(this, null, function* () {
+      return this.processStreamableRequest("pull", {
+        name: request.model,
+        stream: request.stream,
+        insecure: request.insecure,
+      });
+    });
+  }
+  push(request) {
+    return __async(this, null, function* () {
+      return this.processStreamableRequest("push", {
+        name: request.model,
+        stream: request.stream,
+        insecure: request.insecure,
+      });
+    });
+  }
+  delete(request) {
+    return __async(this, null, function* () {
+      yield del(this.fetch, `${this.config.host}/api/delete`, {
+        name: request.model,
+      });
+      return { status: "success" };
+    });
+  }
+  copy(request) {
+    return __async(this, null, function* () {
+      yield post(
+        this.fetch,
+        `${this.config.host}/api/copy`,
+        __spreadValues({}, request),
+      );
+      return { status: "success" };
+    });
+  }
+  list() {
+    return __async(this, null, function* () {
+      const response = yield get(this.fetch, `${this.config.host}/api/tags`);
+      const listResponse = yield response.json();
+      return listResponse;
+    });
+  }
+  show(request) {
+    return __async(this, null, function* () {
+      const response = yield post(
+        this.fetch,
+        `${this.config.host}/api/show`,
+        __spreadValues({}, request),
+      );
+      const showResponse = yield response.json();
+      return showResponse;
+    });
+  }
+  embeddings(request) {
+    return __async(this, null, function* () {
+      const response = yield post(
+        this.fetch,
+        `${this.config.host}/api/embeddings`,
+        __spreadValues({}, request),
+      );
+      const embeddingsResponse = yield response.json();
+      return embeddingsResponse;
+    });
+  }
+};
+var browser_default = new Ollama();
+export { Ollama, browser_default as default };
diff --git a/packages/core/src/internal/deps/ollama.license b/packages/core/src/internal/deps/ollama.license
new file mode 100644
index 000000000..49bd8b185
--- /dev/null
+++ b/packages/core/src/internal/deps/ollama.license
@@ -0,0 +1,21 @@
+MIT License
+
+Copyright (c) 2023 Saul
+
+Permission is hereby granted, free of charge, to any person obtaining a copy
+of this software and associated documentation files (the "Software"), to deal
+in the Software without restriction, including without limitation the rights
+to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+copies of the Software, and to permit persons to whom the Software is
+furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in all
+copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+SOFTWARE.
diff --git a/packages/core/src/llm/ollama.ts b/packages/core/src/llm/ollama.ts
index 195b5ef05..3dce01178 100644
--- a/packages/core/src/llm/ollama.ts
+++ b/packages/core/src/llm/ollama.ts
@@ -1,3 +1,4 @@
+import { BaseEmbedding } from "../embeddings/types.js";
 import ollama, {
   type CreateRequest,
   type ChatResponse as OllamaChatResponse,
@@ -5,8 +6,7 @@ import ollama, {
   type Options,
   type ProgressResponse,
   type ShowRequest,
-} from "ollama/browser";
-import { BaseEmbedding } from "../embeddings/types.js";
+} from "../internal/deps/ollama.js";
 import type {
   ChatResponse,
   ChatResponseChunk,
diff --git a/pnpm-lock.yaml b/pnpm-lock.yaml
index 679666bd4..f40758c7b 100644
--- a/pnpm-lock.yaml
+++ b/pnpm-lock.yaml
@@ -271,9 +271,6 @@ importers:
       notion-md-crawler:
         specifier: ^1.0.0
         version: 1.0.0(encoding@0.1.13)
-      ollama:
-        specifier: ^0.5.0
-        version: 0.5.0
       openai:
         specifier: ^4.38.0
         version: 4.38.1(encoding@0.1.13)
@@ -348,16 +345,6 @@ importers:
         specifier: ^4.7.2
         version: 4.7.2
 
-  packages/core/e2e/examples/chroma-db:
-    dependencies:
-      llamaindex:
-        specifier: workspace:*
-        version: link:../../..
-    devDependencies:
-      tsx:
-        specifier: ^4.7.2
-        version: 4.7.2
-
   packages/core/e2e/examples/cloudflare-worker-agent:
     dependencies:
       llamaindex:
-- 
GitLab