diff --git a/packages/create-llama/templates/components/engines/python/agent/__init__.py b/packages/create-llama/templates/components/engines/python/agent/__init__.py index f1b62b873d64d73a5e44183e71cd4f5ffc23fc4f..86e777edbd3f21c3e62b7b20b4cdff59c4d545b6 100644 --- a/packages/create-llama/templates/components/engines/python/agent/__init__.py +++ b/packages/create-llama/templates/components/engines/python/agent/__init__.py @@ -36,7 +36,7 @@ def get_chat_engine(): # Add query tool index = get_index() llm = index.service_context.llm - query_engine = index.as_query_engine(similarity_top_k=5) + query_engine = index.as_query_engine(similarity_top_k=3) query_engine_tool = QueryEngineTool.from_defaults(query_engine=query_engine) tools.append(query_engine_tool) diff --git a/packages/create-llama/templates/components/engines/python/chat/__init__.py b/packages/create-llama/templates/components/engines/python/chat/__init__.py index 18a6039b55e30e687d525e7c91083d97c0ad29ee..95de61cd825b2ac299caf9faafe98c529c4fe5a7 100644 --- a/packages/create-llama/templates/components/engines/python/chat/__init__.py +++ b/packages/create-llama/templates/components/engines/python/chat/__init__.py @@ -3,5 +3,5 @@ from app.engine.index import get_index def get_chat_engine(): return get_index().as_chat_engine( - similarity_top_k=5, chat_mode="condense_plus_context" + similarity_top_k=3, chat_mode="condense_plus_context" ) diff --git a/packages/create-llama/templates/components/vectordbs/typescript/mongo/index.ts b/packages/create-llama/templates/components/vectordbs/typescript/mongo/index.ts index 7aceaff06f8b1df4bf67d46ad589218e4e8c6fea..844789c60d538c68c4e364127728544592ebbc47 100644 --- a/packages/create-llama/templates/components/vectordbs/typescript/mongo/index.ts +++ b/packages/create-llama/templates/components/vectordbs/typescript/mongo/index.ts @@ -29,7 +29,7 @@ async function getDataSource(llm: LLM) { export async function createChatEngine(llm: LLM) { const index = await getDataSource(llm); - const retriever = index.asRetriever({ similarityTopK: 5 }); + const retriever = index.asRetriever({ similarityTopK: 3 }); return new ContextChatEngine({ chatModel: llm, retriever, diff --git a/packages/create-llama/templates/components/vectordbs/typescript/none/index.ts b/packages/create-llama/templates/components/vectordbs/typescript/none/index.ts index cdd93809dcd7d9939aba8798fdac3639151e4ac5..c3b65ce2366f26ad74c4a66f9ce53ab9347fafdd 100644 --- a/packages/create-llama/templates/components/vectordbs/typescript/none/index.ts +++ b/packages/create-llama/templates/components/vectordbs/typescript/none/index.ts @@ -35,7 +35,7 @@ async function getDataSource(llm: LLM) { export async function createChatEngine(llm: LLM) { const index = await getDataSource(llm); const retriever = index.asRetriever(); - retriever.similarityTopK = 5; + retriever.similarityTopK = 3; return new ContextChatEngine({ chatModel: llm, diff --git a/packages/create-llama/templates/components/vectordbs/typescript/pg/index.ts b/packages/create-llama/templates/components/vectordbs/typescript/pg/index.ts index 96a98085a04398d1393d1bf0bc507aa0973773a4..7de66a2e30a1aa5fb0f5b7b92bc0c6f3b24bfe7f 100644 --- a/packages/create-llama/templates/components/vectordbs/typescript/pg/index.ts +++ b/packages/create-llama/templates/components/vectordbs/typescript/pg/index.ts @@ -31,7 +31,7 @@ async function getDataSource(llm: LLM) { export async function createChatEngine(llm: LLM) { const index = await getDataSource(llm); - const retriever = index.asRetriever({ similarityTopK: 5 }); + const retriever = index.asRetriever({ similarityTopK: 3 }); return new ContextChatEngine({ chatModel: llm, retriever,