Skip to content
Snippets Groups Projects
Unverified Commit 49988431 authored by Alex Yang's avatar Alex Yang Committed by GitHub
Browse files

refactor: move settings.llm into core package (#1165)

parent 72d65dd5
No related branches found
No related tags found
No related merge requests found
import type { Tokenizer } from "@llamaindex/env";
import type { LLM } from "../llms";
import {
type CallbackManager,
getCallbackManager,
......@@ -10,6 +11,7 @@ import {
setChunkSize,
withChunkSize,
} from "./settings/chunk-size";
import { getLLM, setLLM, withLLM } from "./settings/llm";
import {
getTokenizer,
setTokenizer,
......@@ -17,6 +19,15 @@ import {
} from "./settings/tokenizer";
export const Settings = {
get llm() {
return getLLM();
},
set llm(llm) {
setLLM(llm);
},
withLLM<Result>(llm: LLM, fn: () => Result): Result {
return withLLM(llm, fn);
},
get tokenizer() {
return getTokenizer();
},
......
import { AsyncLocalStorage } from "@llamaindex/env";
import type { LLM } from "../../llms";
const llmAsyncLocalStorage = new AsyncLocalStorage<LLM>();
let globalLLM: LLM | undefined;
export function getLLM(): LLM {
const currentLLM = globalLLM ?? llmAsyncLocalStorage.getStore();
if (!currentLLM) {
throw new Error(
"Cannot find LLM, please set `Settings.llm = ...` on the top of your code",
);
}
return currentLLM;
}
export function setLLM(llm: LLM): void {
globalLLM = llm;
}
export function withLLM<Result>(llm: LLM, fn: () => Result): Result {
return llmAsyncLocalStorage.run(llm, fn);
}
......@@ -27,7 +27,6 @@ export type PromptConfig = {
export interface Config {
prompt: PromptConfig;
llm: LLM | null;
promptHelper: PromptHelper | null;
embedModel: BaseEmbedding | null;
nodeParser: NodeParser | null;
......@@ -41,12 +40,10 @@ export interface Config {
*/
class GlobalSettings implements Config {
#prompt: PromptConfig = {};
#llm: LLM | null = null;
#promptHelper: PromptHelper | null = null;
#nodeParser: NodeParser | null = null;
#chunkOverlap?: number;
#llmAsyncLocalStorage = new AsyncLocalStorage<LLM>();
#promptHelperAsyncLocalStorage = new AsyncLocalStorage<PromptHelper>();
#nodeParserAsyncLocalStorage = new AsyncLocalStorage<NodeParser>();
#chunkOverlapAsyncLocalStorage = new AsyncLocalStorage<number>();
......@@ -62,19 +59,19 @@ class GlobalSettings implements Config {
}
get llm(): LLM {
if (this.#llm === null) {
this.#llm = new OpenAI();
if (CoreSettings.llm === null) {
CoreSettings.llm = new OpenAI();
}
return this.#llmAsyncLocalStorage.getStore() ?? this.#llm;
return CoreSettings.llm;
}
set llm(llm: LLM) {
this.#llm = llm;
CoreSettings.llm = llm;
}
withLLM<Result>(llm: LLM, fn: () => Result): Result {
return this.#llmAsyncLocalStorage.run(llm, fn);
return CoreSettings.withLLM(llm, fn);
}
get promptHelper(): PromptHelper {
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Please register or to comment