diff --git a/apps/docs/docs/modules/llms/available_llms/azure.md b/apps/docs/docs/modules/llms/available_llms/azure.md index f6ca3ef6a8c7c264f277c0288ac590a315eb3842..d13f94317f1d53e3010b41decd8cbaecc533c0f3 100644 --- a/apps/docs/docs/modules/llms/available_llms/azure.md +++ b/apps/docs/docs/modules/llms/available_llms/azure.md @@ -50,7 +50,7 @@ const results = await queryEngine.query({ ```ts import { - Anthropic, + OpenAI, Document, VectorStoreIndex, serviceContextFromDefaults, @@ -70,6 +70,9 @@ async function main() { serviceContext, }); + // get retriever + const retriever = index.asRetriever(); + // Create a query engine const queryEngine = index.asQueryEngine({ retriever, diff --git a/apps/docs/docs/modules/llms/available_llms/llama2.md b/apps/docs/docs/modules/llms/available_llms/llama2.md index 65c26d6279c53f9870f7d20b2645c9dd12bded8b..ffcac13ac32ec0b59fc9a0ceb69f4aa6d13d2752 100644 --- a/apps/docs/docs/modules/llms/available_llms/llama2.md +++ b/apps/docs/docs/modules/llms/available_llms/llama2.md @@ -59,7 +59,7 @@ const results = await queryEngine.query({ ```ts import { - Anthropic, + LlamaDeuce, Document, VectorStoreIndex, serviceContextFromDefaults, @@ -79,6 +79,9 @@ async function main() { serviceContext, }); + // get retriever + const retriever = index.asRetriever(); + // Create a query engine const queryEngine = index.asQueryEngine({ retriever, diff --git a/apps/docs/docs/modules/llms/available_llms/mistral.md b/apps/docs/docs/modules/llms/available_llms/mistral.md index a928d75488fff761afe9a65bcca0244ee78f1f3b..81d17510acaa29a366ad566ac8864a32b47fdbdf 100644 --- a/apps/docs/docs/modules/llms/available_llms/mistral.md +++ b/apps/docs/docs/modules/llms/available_llms/mistral.md @@ -41,7 +41,7 @@ const results = await queryEngine.query({ ```ts import { - Anthropic, + MistralAI, Document, VectorStoreIndex, serviceContextFromDefaults, @@ -61,6 +61,9 @@ async function main() { serviceContext, }); + // get retriever + const retriever = index.asRetriever(); + // Create a query engine const queryEngine = index.asQueryEngine({ retriever, diff --git a/apps/docs/docs/modules/llms/available_llms/ollama.md b/apps/docs/docs/modules/llms/available_llms/ollama.md index d92e2841fe9fd980545786f9ac93963791e1a043..9d690bc9853400d23faa6711c0105eb55c648e1f 100644 --- a/apps/docs/docs/modules/llms/available_llms/ollama.md +++ b/apps/docs/docs/modules/llms/available_llms/ollama.md @@ -7,7 +7,10 @@ import { Ollama, serviceContextFromDefaults } from "llamaindex"; const ollamaLLM = new Ollama({ model: "llama2", temperature: 0.75 }); -const serviceContext = serviceContextFromDefaults({ llm: ollamaLLM }); +const serviceContext = serviceContextFromDefaults({ + llm: ollamaLLM, + embedModel: ollamaLLM, +}); ``` ## Load and index documents @@ -38,18 +41,25 @@ const results = await queryEngine.query({ ```ts import { - Anthropic, + Ollama, Document, VectorStoreIndex, serviceContextFromDefaults, } from "llamaindex"; +import fs from "fs/promises"; + async function main() { // Create an instance of the LLM const ollamaLLM = new Ollama({ model: "llama2", temperature: 0.75 }); + const essay = await fs.readFile("./paul_graham_essay.txt", "utf-8"); + // Create a service context - const serviceContext = serviceContextFromDefaults({ llm: ollamaLLM }); + const serviceContext = serviceContextFromDefaults({ + embedModel: ollamaLLM, // prevent 'Set OpenAI Key in OPENAI_API_KEY env variable' error + llm: ollamaLLM, + }); const document = new Document({ text: essay, id_: "essay" }); @@ -58,6 +68,9 @@ async function main() { serviceContext, }); + // get retriever + const retriever = index.asRetriever(); + // Create a query engine const queryEngine = index.asQueryEngine({ retriever, diff --git a/apps/docs/docs/modules/llms/available_llms/openai.md b/apps/docs/docs/modules/llms/available_llms/openai.md index 284e7d5a143021dc5e99d3e6ef039de1971bb2bb..67b4612053ed6776b8efd366325afbcbb041de8b 100644 --- a/apps/docs/docs/modules/llms/available_llms/openai.md +++ b/apps/docs/docs/modules/llms/available_llms/openai.md @@ -42,7 +42,7 @@ const results = await queryEngine.query({ ```ts import { - Anthropic, + OpenAI, Document, VectorStoreIndex, serviceContextFromDefaults, @@ -62,6 +62,9 @@ async function main() { serviceContext, }); + // get retriever + const retriever = index.asRetriever(); + // Create a query engine const queryEngine = index.asQueryEngine({ retriever, diff --git a/apps/docs/docs/modules/llms/available_llms/portkey.md b/apps/docs/docs/modules/llms/available_llms/portkey.md index 7c7720f9f64a603e442fb1b8c6d50d2fe5939587..1b3faed8f451c685250f5301467bea674cd91f79 100644 --- a/apps/docs/docs/modules/llms/available_llms/portkey.md +++ b/apps/docs/docs/modules/llms/available_llms/portkey.md @@ -40,7 +40,7 @@ const results = await queryEngine.query({ ```ts import { - Anthropic, + Portkey, Document, VectorStoreIndex, serviceContextFromDefaults, @@ -62,6 +62,9 @@ async function main() { serviceContext, }); + // get retriever + const retriever = index.asRetriever(); + // Create a query engine const queryEngine = index.asQueryEngine({ retriever, diff --git a/apps/docs/docs/modules/llms/available_llms/together.md b/apps/docs/docs/modules/llms/available_llms/together.md index 620adf8a9dd6a28d2dea4f0cd173032e2797c0cc..b8bc507ba7b5832ffe9e4bcd2748e703ce1505d5 100644 --- a/apps/docs/docs/modules/llms/available_llms/together.md +++ b/apps/docs/docs/modules/llms/available_llms/together.md @@ -40,7 +40,7 @@ const results = await queryEngine.query({ ```ts import { - Anthropic, + TogetherLLM, Document, VectorStoreIndex, serviceContextFromDefaults, @@ -62,6 +62,9 @@ async function main() { serviceContext, }); + // get retriever + const retriever = index.asRetriever(); + // Create a query engine const queryEngine = index.asQueryEngine({ retriever,