Skip to content
Snippets Groups Projects
Unverified Commit a8c0637d authored by Stefan Edberg's avatar Stefan Edberg Committed by GitHub
Browse files

feat: make it possible to provide base URL to OpenAI (#1740)

parent 387a1928
No related branches found
No related tags found
No related merge requests found
---
"@llamaindex/openai": patch
"@llamaindex/doc": patch
---
feat: simplify to provide base URL to OpenAI
......@@ -34,6 +34,18 @@ You can setup the apiKey on the environment variables, like:
export OPENAI_API_KEY="<YOUR_API_KEY>"
```
You can optionally set a custom base URL, like:
```bash
export OPENAI_BASE_URL="https://api.scaleway.ai/v1"
```
or
```ts
Settings.llm = new OpenAI({ model: "gpt-3.5-turbo", temperature: 0, apiKey: <YOUR_API_KEY>, baseURL: "https://api.scaleway.ai/v1" });
```
## Load and index documents
For this example, we will use a single document. In a real-world scenario, you would have multiple documents to index.
......
......@@ -36,7 +36,10 @@ export const ALL_OPENAI_EMBEDDING_MODELS = {
type ModelKeys = keyof typeof ALL_OPENAI_EMBEDDING_MODELS;
type LLMInstance = Pick<AzureOpenAILLM | OpenAILLM, "embeddings" | "apiKey">;
type LLMInstance = Pick<
AzureOpenAILLM | OpenAILLM,
"embeddings" | "apiKey" | "baseURL"
>;
export class OpenAIEmbedding extends BaseEmbedding {
/** embeddding model. defaults to "text-embedding-ada-002" */
......@@ -48,6 +51,8 @@ export class OpenAIEmbedding extends BaseEmbedding {
/** api key */
apiKey?: string | undefined = undefined;
/** base url */
baseURL?: string | undefined = undefined;
/** maximum number of retries, default 10 */
maxRetries: number;
/** timeout in ms, default 60 seconds */
......@@ -104,6 +109,10 @@ export class OpenAIEmbedding extends BaseEmbedding {
};
this.apiKey =
init?.session?.apiKey ?? azureConfig.apiKey ?? getEnv("OPENAI_API_KEY");
this.baseURL =
init?.session?.baseURL ??
azureConfig.baseURL ??
getEnv("OPENAI_BASE_URL");
this.lazySession = async () =>
import("openai").then(async ({ AzureOpenAI }) => {
AzureOpenAI = AzureOpenAIWithUserAgent(AzureOpenAI);
......@@ -121,12 +130,15 @@ export class OpenAIEmbedding extends BaseEmbedding {
} else {
this.apiKey =
init?.session?.apiKey ?? init?.apiKey ?? getEnv("OPENAI_API_KEY");
this.baseURL =
init?.session?.baseURL ?? init?.baseURL ?? getEnv("OPENAI_BASE_URL");
this.lazySession = async () =>
import("openai").then(({ OpenAI }) => {
return (
init?.session ??
new OpenAI({
apiKey: this.apiKey,
baseURL: this.baseURL,
maxRetries: this.maxRetries,
timeout: this.timeout!,
...this.additionalSessionOptions,
......
......@@ -182,7 +182,10 @@ export type OpenAIAdditionalChatOptions = Omit<
| "toolChoice"
>;
type LLMInstance = Pick<AzureOpenAILLM | OpenAILLM, "chat" | "apiKey">;
type LLMInstance = Pick<
AzureOpenAILLM | OpenAILLM,
"chat" | "apiKey" | "baseURL"
>;
export class OpenAI extends ToolCallLLM<OpenAIAdditionalChatOptions> {
model:
......@@ -197,6 +200,7 @@ export class OpenAI extends ToolCallLLM<OpenAIAdditionalChatOptions> {
// OpenAI session params
apiKey?: string | undefined = undefined;
baseURL?: string | undefined = undefined;
maxRetries: number;
timeout?: number;
additionalSessionOptions?:
......@@ -234,6 +238,8 @@ export class OpenAI extends ToolCallLLM<OpenAIAdditionalChatOptions> {
this.additionalSessionOptions = init?.additionalSessionOptions;
this.apiKey =
init?.session?.apiKey ?? init?.apiKey ?? getEnv("OPENAI_API_KEY");
this.baseURL =
init?.session?.baseURL ?? init?.baseURL ?? getEnv("OPENAI_BASE_URL");
if (init?.azure || shouldUseAzure()) {
const azureConfig = {
......@@ -261,6 +267,7 @@ export class OpenAI extends ToolCallLLM<OpenAIAdditionalChatOptions> {
import("openai").then(({ OpenAI }) => {
return new OpenAI({
apiKey: this.apiKey,
baseURL: this.baseURL,
maxRetries: this.maxRetries,
timeout: this.timeout!,
...this.additionalSessionOptions,
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment