Skip to content
Snippets Groups Projects
Unverified Commit 01cf2fed authored by Timothy Carambat's avatar Timothy Carambat Committed by GitHub
Browse files

Make native embedder the fallback for all LLMs (#1427)

parent 7e0b638a
No related branches found
No related tags found
No related merge requests found
Showing
with 29 additions and 74 deletions
......@@ -3,6 +3,7 @@ const {
writeResponseChunk,
clientAbortedHandler,
} = require("../../helpers/chat/responses");
const { NativeEmbedder } = require("../../EmbeddingEngines/native");
class AnthropicLLM {
constructor(embedder = null, modelPreference = null) {
......@@ -23,11 +24,7 @@ class AnthropicLLM {
user: this.promptWindowLimit() * 0.7,
};
if (!embedder)
throw new Error(
"INVALID ANTHROPIC SETUP. No embedding engine has been set. Go to instance settings and set up an embedding interface to use Anthropic as your LLM."
);
this.embedder = embedder;
this.embedder = embedder ?? new NativeEmbedder();
this.defaultTemp = 0.7;
}
......
const { AzureOpenAiEmbedder } = require("../../EmbeddingEngines/azureOpenAi");
const { NativeEmbedder } = require("../../EmbeddingEngines/native");
const {
writeResponseChunk,
clientAbortedHandler,
......@@ -23,11 +23,7 @@ class AzureOpenAiLLM {
user: this.promptWindowLimit() * 0.7,
};
if (!embedder)
console.warn(
"No embedding provider defined for AzureOpenAiLLM - falling back to AzureOpenAiEmbedder for embedding!"
);
this.embedder = !embedder ? new AzureOpenAiEmbedder() : embedder;
this.embedder = embedder ?? new NativeEmbedder();
this.defaultTemp = 0.7;
}
......
......@@ -19,7 +19,8 @@ class CohereLLM {
system: this.promptWindowLimit() * 0.15,
user: this.promptWindowLimit() * 0.7,
};
this.embedder = !!embedder ? embedder : new NativeEmbedder();
this.embedder = embedder ?? new NativeEmbedder();
}
#appendContext(contextTexts = []) {
......
const { NativeEmbedder } = require("../../EmbeddingEngines/native");
const {
writeResponseChunk,
clientAbortedHandler,
......@@ -26,11 +27,7 @@ class GeminiLLM {
user: this.promptWindowLimit() * 0.7,
};
if (!embedder)
throw new Error(
"INVALID GEMINI LLM SETUP. No embedding engine has been set. Go to instance settings and set up an embedding interface to use Gemini as your LLM."
);
this.embedder = embedder;
this.embedder = embedder ?? new NativeEmbedder();
this.defaultTemp = 0.7; // not used for Gemini
}
......
......@@ -27,11 +27,7 @@ class GenericOpenAiLLM {
user: this.promptWindowLimit() * 0.7,
};
if (!embedder)
console.warn(
"No embedding provider defined for GenericOpenAiLLM - falling back to NativeEmbedder for embedding!"
);
this.embedder = !embedder ? new NativeEmbedder() : embedder;
this.embedder = embedder ?? new NativeEmbedder();
this.defaultTemp = 0.7;
this.log(`Inference API: ${this.basePath} Model: ${this.model}`);
}
......
......@@ -20,7 +20,7 @@ class GroqLLM {
user: this.promptWindowLimit() * 0.7,
};
this.embedder = !embedder ? new NativeEmbedder() : embedder;
this.embedder = embedder ?? new NativeEmbedder();
this.defaultTemp = 0.7;
}
......
const { NativeEmbedder } = require("../../EmbeddingEngines/native");
const { OpenAiEmbedder } = require("../../EmbeddingEngines/openAi");
const {
handleDefaultStreamResponseV2,
} = require("../../helpers/chat/responses");
......@@ -26,11 +25,7 @@ class HuggingFaceLLM {
user: this.promptWindowLimit() * 0.7,
};
if (!embedder)
console.warn(
"No embedding provider defined for HuggingFaceLLM - falling back to Native for embedding!"
);
this.embedder = !embedder ? new OpenAiEmbedder() : new NativeEmbedder();
this.embedder = embedder ?? new NativeEmbedder();
this.defaultTemp = 0.2;
}
......
......@@ -26,11 +26,7 @@ class KoboldCPPLLM {
user: this.promptWindowLimit() * 0.7,
};
if (!embedder)
console.warn(
"No embedding provider defined for KoboldCPPLLM - falling back to NativeEmbedder for embedding!"
);
this.embedder = !embedder ? new NativeEmbedder() : embedder;
this.embedder = embedder ?? new NativeEmbedder();
this.defaultTemp = 0.7;
this.log(`Inference API: ${this.basePath} Model: ${this.model}`);
}
......
......@@ -26,11 +26,7 @@ class LiteLLM {
user: this.promptWindowLimit() * 0.7,
};
if (!embedder)
console.warn(
"No embedding provider defined for LiteLLM - falling back to NativeEmbedder for embedding!"
);
this.embedder = !embedder ? new NativeEmbedder() : embedder;
this.embedder = embedder ?? new NativeEmbedder();
this.defaultTemp = 0.7;
this.log(`Inference API: ${this.basePath} Model: ${this.model}`);
}
......
const { NativeEmbedder } = require("../../EmbeddingEngines/native");
const {
handleDefaultStreamResponseV2,
} = require("../../helpers/chat/responses");
......@@ -27,11 +28,7 @@ class LMStudioLLM {
user: this.promptWindowLimit() * 0.7,
};
if (!embedder)
throw new Error(
"INVALID LM STUDIO SETUP. No embedding engine has been set. Go to instance settings and set up an embedding interface to use LMStudio as your LLM."
);
this.embedder = embedder;
this.embedder = embedder ?? new NativeEmbedder();
this.defaultTemp = 0.7;
}
......
const { NativeEmbedder } = require("../../EmbeddingEngines/native");
const {
handleDefaultStreamResponseV2,
} = require("../../helpers/chat/responses");
......@@ -19,11 +20,7 @@ class LocalAiLLM {
user: this.promptWindowLimit() * 0.7,
};
if (!embedder)
throw new Error(
"INVALID LOCAL AI SETUP. No embedding engine has been set. Go to instance settings and set up an embedding interface to use LocalAI as your LLM."
);
this.embedder = embedder;
this.embedder = embedder ?? new NativeEmbedder();
this.defaultTemp = 0.7;
}
......
const { NativeEmbedder } = require("../../EmbeddingEngines/native");
const {
handleDefaultStreamResponseV2,
} = require("../../helpers/chat/responses");
......@@ -20,11 +21,7 @@ class MistralLLM {
user: this.promptWindowLimit() * 0.7,
};
if (!embedder)
console.warn(
"No embedding provider defined for MistralLLM - falling back to OpenAiEmbedder for embedding!"
);
this.embedder = embedder;
this.embedder = embedder ?? new NativeEmbedder();
this.defaultTemp = 0.0;
}
......
......@@ -23,7 +23,7 @@ class NativeLLM {
system: this.promptWindowLimit() * 0.15,
user: this.promptWindowLimit() * 0.7,
};
this.embedder = embedder || new NativeEmbedder();
this.embedder = embedder ?? new NativeEmbedder();
this.cacheDir = path.resolve(
process.env.STORAGE_DIR
? path.resolve(process.env.STORAGE_DIR, "models", "downloaded")
......
......@@ -3,6 +3,7 @@ const {
writeResponseChunk,
clientAbortedHandler,
} = require("../../helpers/chat/responses");
const { NativeEmbedder } = require("../../EmbeddingEngines/native");
// Docs: https://github.com/jmorganca/ollama/blob/main/docs/api.md
class OllamaAILLM {
......@@ -18,11 +19,7 @@ class OllamaAILLM {
user: this.promptWindowLimit() * 0.7,
};
if (!embedder)
throw new Error(
"INVALID OLLAMA SETUP. No embedding engine has been set. Go to instance settings and set up an embedding interface to use Ollama as your LLM."
);
this.embedder = embedder;
this.embedder = embedder ?? new NativeEmbedder();
this.defaultTemp = 0.7;
}
......
const { OpenAiEmbedder } = require("../../EmbeddingEngines/openAi");
const { NativeEmbedder } = require("../../EmbeddingEngines/native");
const {
handleDefaultStreamResponseV2,
} = require("../../helpers/chat/responses");
......@@ -18,11 +18,7 @@ class OpenAiLLM {
user: this.promptWindowLimit() * 0.7,
};
if (!embedder)
console.warn(
"No embedding provider defined for OpenAiLLM - falling back to OpenAiEmbedder for embedding!"
);
this.embedder = !embedder ? new OpenAiEmbedder() : embedder;
this.embedder = embedder ?? new NativeEmbedder();
this.defaultTemp = 0.7;
}
......
......@@ -36,7 +36,7 @@ class OpenRouterLLM {
user: this.promptWindowLimit() * 0.7,
};
this.embedder = !embedder ? new NativeEmbedder() : embedder;
this.embedder = embedder ?? new NativeEmbedder();
this.defaultTemp = 0.7;
if (!fs.existsSync(cacheFolder))
......
......@@ -28,7 +28,7 @@ class PerplexityLLM {
user: this.promptWindowLimit() * 0.7,
};
this.embedder = !embedder ? new NativeEmbedder() : embedder;
this.embedder = embedder ?? new NativeEmbedder();
this.defaultTemp = 0.7;
}
......
......@@ -23,7 +23,7 @@ class TextGenWebUILLM {
user: this.promptWindowLimit() * 0.7,
};
this.embedder = !embedder ? new NativeEmbedder() : embedder;
this.embedder = embedder ?? new NativeEmbedder();
this.defaultTemp = 0.7;
this.log(`Inference API: ${this.basePath} Model: ${this.model}`);
}
......
const { NativeEmbedder } = require("../../EmbeddingEngines/native");
const {
handleDefaultStreamResponseV2,
} = require("../../helpers/chat/responses");
......@@ -23,11 +24,7 @@ class TogetherAiLLM {
user: this.promptWindowLimit() * 0.7,
};
if (!embedder)
throw new Error(
"INVALID TOGETHER AI SETUP. No embedding engine has been set. Go to instance settings and set up an embedding interface to use Together AI as your LLM."
);
this.embedder = embedder;
this.embedder = !embedder ? new NativeEmbedder() : embedder;
this.defaultTemp = 0.7;
}
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment