Skip to content
Snippets Groups Projects
Unverified Commit 7342839e authored by Timothy Carambat's avatar Timothy Carambat Committed by GitHub
Browse files

Passthrough agentModel for LMStudio (#2499)

parent ab6f03ce
No related branches found
No related tags found
No related merge requests found
...@@ -5,7 +5,7 @@ const { ...@@ -5,7 +5,7 @@ const {
// hybrid of openAi LLM chat completion for LMStudio // hybrid of openAi LLM chat completion for LMStudio
class LMStudioLLM { class LMStudioLLM {
constructor(embedder = null, _modelPreference = null) { constructor(embedder = null, modelPreference = null) {
if (!process.env.LMSTUDIO_BASE_PATH) if (!process.env.LMSTUDIO_BASE_PATH)
throw new Error("No LMStudio API Base Path was set."); throw new Error("No LMStudio API Base Path was set.");
...@@ -21,7 +21,10 @@ class LMStudioLLM { ...@@ -21,7 +21,10 @@ class LMStudioLLM {
// and any other value will crash inferencing. So until this is patched we will // and any other value will crash inferencing. So until this is patched we will
// try to fetch the `/models` and have the user set it, or just fallback to "Loaded from Chat UI" // try to fetch the `/models` and have the user set it, or just fallback to "Loaded from Chat UI"
// which will not impact users with <v0.2.17 and should work as well once the bug is fixed. // which will not impact users with <v0.2.17 and should work as well once the bug is fixed.
this.model = process.env.LMSTUDIO_MODEL_PREF || "Loaded from Chat UI"; this.model =
modelPreference ||
process.env.LMSTUDIO_MODEL_PREF ||
"Loaded from Chat UI";
this.limits = { this.limits = {
history: this.promptWindowLimit() * 0.15, history: this.promptWindowLimit() * 0.15,
system: this.promptWindowLimit() * 0.15, system: this.promptWindowLimit() * 0.15,
......
...@@ -756,7 +756,7 @@ ${this.getHistory({ to: route.to }) ...@@ -756,7 +756,7 @@ ${this.getHistory({ to: route.to })
case "anthropic": case "anthropic":
return new Providers.AnthropicProvider({ model: config.model }); return new Providers.AnthropicProvider({ model: config.model });
case "lmstudio": case "lmstudio":
return new Providers.LMStudioProvider({}); return new Providers.LMStudioProvider({ model: config.model });
case "ollama": case "ollama":
return new Providers.OllamaProvider({ model: config.model }); return new Providers.OllamaProvider({ model: config.model });
case "groq": case "groq":
......
...@@ -9,9 +9,14 @@ const UnTooled = require("./helpers/untooled.js"); ...@@ -9,9 +9,14 @@ const UnTooled = require("./helpers/untooled.js");
class LMStudioProvider extends InheritMultiple([Provider, UnTooled]) { class LMStudioProvider extends InheritMultiple([Provider, UnTooled]) {
model; model;
constructor(_config = {}) { /**
*
* @param {{model?: string}} config
*/
constructor(config = {}) {
super(); super();
const model = process.env.LMSTUDIO_MODEL_PREF || "Loaded from Chat UI"; const model =
config?.model || process.env.LMSTUDIO_MODEL_PREF || "Loaded from Chat UI";
const client = new OpenAI({ const client = new OpenAI({
baseURL: process.env.LMSTUDIO_BASE_PATH?.replace(/\/+$/, ""), // here is the URL to your LMStudio instance baseURL: process.env.LMSTUDIO_BASE_PATH?.replace(/\/+$/, ""), // here is the URL to your LMStudio instance
apiKey: null, apiKey: null,
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment