-
Timothy Carambat authored
* feat: add new model provider: Novita AI * feat: finished novita AI * fix: code lint * remove unneeded logging * add back log for novita stream not self closing * Clarify ENV vars for LLM/embedder seperation for future Patch ENV check for workspace/agent provider --------- Co-authored-by:
Jason <ggbbddjm@gmail.com> Co-authored-by:
shatfield4 <seanhatfield5@gmail.com>
Timothy Carambat authored* feat: add new model provider: Novita AI * feat: finished novita AI * fix: code lint * remove unneeded logging * add back log for novita stream not self closing * Clarify ENV vars for LLM/embedder seperation for future Patch ENV check for workspace/agent provider --------- Co-authored-by:
Jason <ggbbddjm@gmail.com> Co-authored-by:
shatfield4 <seanhatfield5@gmail.com>
Code owners
Assign users and groups as approvers for specific file changes. Learn more.
index.js 11.62 KiB
const { NativeEmbedder } = require("../../EmbeddingEngines/native");
const { v4: uuidv4 } = require("uuid");
const {
writeResponseChunk,
clientAbortedHandler,
} = require("../../helpers/chat/responses");
const fs = require("fs");
const path = require("path");
const { safeJsonParse } = require("../../http");
const cacheFolder = path.resolve(
process.env.STORAGE_DIR
? path.resolve(process.env.STORAGE_DIR, "models", "novita")
: path.resolve(__dirname, `../../../storage/models/novita`)
);
class NovitaLLM {
constructor(embedder = null, modelPreference = null) {
if (!process.env.NOVITA_LLM_API_KEY)
throw new Error("No Novita API key was set.");
const { OpenAI: OpenAIApi } = require("openai");
this.basePath = "https://api.novita.ai/v3/openai";
this.openai = new OpenAIApi({
baseURL: this.basePath,
apiKey: process.env.NOVITA_LLM_API_KEY ?? null,
defaultHeaders: {
"HTTP-Referer": "https://anythingllm.com",
"X-Novita-Source": "anythingllm",
},
});
this.model =
modelPreference ||
process.env.NOVITA_LLM_MODEL_PREF ||
"gryphe/mythomax-l2-13b";
this.limits = {
history: this.promptWindowLimit() * 0.15,
system: this.promptWindowLimit() * 0.15,
user: this.promptWindowLimit() * 0.7,
};
this.embedder = embedder ?? new NativeEmbedder();
this.defaultTemp = 0.7;
this.timeout = this.#parseTimeout();
if (!fs.existsSync(cacheFolder))
fs.mkdirSync(cacheFolder, { recursive: true });
this.cacheModelPath = path.resolve(cacheFolder, "models.json");
this.cacheAtPath = path.resolve(cacheFolder, ".cached_at");
this.log(`Loaded with model: ${this.model}`);
}
log(text, ...args) {
console.log(`\x1b[36m[${this.constructor.name}]\x1b[0m ${text}`, ...args);
}
/**
* Novita has various models that never return `finish_reasons` and thus leave the stream open
* which causes issues in subsequent messages. This timeout value forces us to close the stream after
* x milliseconds. This is a configurable value via the NOVITA_LLM_TIMEOUT_MS value
* @returns {number} The timeout value in milliseconds (default: 500)
*/
#parseTimeout() {
if (isNaN(Number(process.env.NOVITA_LLM_TIMEOUT_MS))) return 500;
const setValue = Number(process.env.NOVITA_LLM_TIMEOUT_MS);
if (setValue < 500) return 500;
return setValue;
}
// This checks if the .cached_at file has a timestamp that is more than 1Week (in millis)