Skip to content
Snippets Groups Projects
Commit f5da6623 authored by Marcus Schiesser's avatar Marcus Schiesser
Browse files

fix: update llamaindex, use 127.0.0.1 for ollama as default

parent 0950cb90
No related branches found
No related tags found
No related merge requests found
...@@ -224,7 +224,7 @@ const getModelEnvs = (modelConfig: ModelConfig): EnvVar[] => { ...@@ -224,7 +224,7 @@ const getModelEnvs = (modelConfig: ModelConfig): EnvVar[] => {
{ {
name: "OLLAMA_BASE_URL", name: "OLLAMA_BASE_URL",
description: description:
"The base URL for the Ollama API. Eg: http://localhost:11434", "The base URL for the Ollama API. Eg: http://127.0.0.1:11434",
}, },
] ]
: []), : []),
......
...@@ -4,7 +4,5 @@ export async function initializeGlobalAgent() { ...@@ -4,7 +4,5 @@ export async function initializeGlobalAgent() {
/* Dynamically import global-agent/bootstrap */ /* Dynamically import global-agent/bootstrap */
await import("global-agent/bootstrap"); await import("global-agent/bootstrap");
console.log("Proxy enabled via global-agent."); console.log("Proxy enabled via global-agent.");
} else {
console.log("No proxy configuration found. Continuing without proxy.");
} }
} }
...@@ -14,7 +14,7 @@ ...@@ -14,7 +14,7 @@
"cors": "^2.8.5", "cors": "^2.8.5",
"dotenv": "^16.3.1", "dotenv": "^16.3.1",
"express": "^4.18.2", "express": "^4.18.2",
"llamaindex": "0.3.9", "llamaindex": "0.3.13",
"pdf2json": "3.0.5", "pdf2json": "3.0.5",
"ajv": "^8.12.0" "ajv": "^8.12.0"
}, },
......
...@@ -57,7 +57,7 @@ function initOpenAI() { ...@@ -57,7 +57,7 @@ function initOpenAI() {
function initOllama() { function initOllama() {
const config = { const config = {
host: process.env.OLLAMA_BASE_URL ?? "http://localhost:11434", host: process.env.OLLAMA_BASE_URL ?? "http://127.0.0.1:11434",
}; };
Settings.llm = new Ollama({ Settings.llm = new Ollama({
......
...@@ -23,7 +23,7 @@ def init_ollama(): ...@@ -23,7 +23,7 @@ def init_ollama():
from llama_index.llms.ollama import Ollama from llama_index.llms.ollama import Ollama
from llama_index.embeddings.ollama import OllamaEmbedding from llama_index.embeddings.ollama import OllamaEmbedding
base_url = os.getenv("OLLAMA_BASE_URL") or "http://localhost:11434" base_url = os.getenv("OLLAMA_BASE_URL") or "http://127.0.0.1:11434"
Settings.embed_model = OllamaEmbedding( Settings.embed_model = OllamaEmbedding(
base_url=base_url, base_url=base_url,
model_name=os.getenv("EMBEDDING_MODEL"), model_name=os.getenv("EMBEDDING_MODEL"),
......
...@@ -57,7 +57,7 @@ function initOpenAI() { ...@@ -57,7 +57,7 @@ function initOpenAI() {
function initOllama() { function initOllama() {
const config = { const config = {
host: process.env.OLLAMA_BASE_URL ?? "http://localhost:11434", host: process.env.OLLAMA_BASE_URL ?? "http://127.0.0.1:11434",
}; };
Settings.llm = new Ollama({ Settings.llm = new Ollama({
model: process.env.MODEL ?? "", model: process.env.MODEL ?? "",
......
...@@ -18,7 +18,7 @@ ...@@ -18,7 +18,7 @@
"class-variance-authority": "^0.7.0", "class-variance-authority": "^0.7.0",
"clsx": "^2.1.1", "clsx": "^2.1.1",
"dotenv": "^16.3.1", "dotenv": "^16.3.1",
"llamaindex": "0.3.8", "llamaindex": "0.3.13",
"lucide-react": "^0.294.0", "lucide-react": "^0.294.0",
"next": "^14.0.3", "next": "^14.0.3",
"pdf2json": "3.0.5", "pdf2json": "3.0.5",
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment