diff --git a/frontend/src/pages/GeneralSettings/PrivacyAndData/index.jsx b/frontend/src/pages/GeneralSettings/PrivacyAndData/index.jsx index 4075c89a6acaf1df255b86f70df1a263cd0b8823..2834708972512d4089652880a84032f88ba2a43f 100644 --- a/frontend/src/pages/GeneralSettings/PrivacyAndData/index.jsx +++ b/frontend/src/pages/GeneralSettings/PrivacyAndData/index.jsx @@ -64,7 +64,7 @@ export default function PrivacyAndDataHandling() { function ThirdParty({ settings }) { const llmChoice = settings?.LLMProvider || "openai"; const embeddingEngine = settings?.EmbeddingEngine || "openai"; - const vectorDb = settings?.VectorDB || "pinecone"; + const vectorDb = settings?.VectorDB || "lancedb"; return ( <div className="py-8 w-full flex items-start justify-center flex-col gap-y-6 border-b-2 border-white/10"> diff --git a/frontend/src/pages/OnboardingFlow/Steps/DataHandling/index.jsx b/frontend/src/pages/OnboardingFlow/Steps/DataHandling/index.jsx index 3c9eecd629b092f30b23fd2625e6459a1ba06c01..5c6b3798cf0e3472cb4a5d3cc41317bfe6196e3b 100644 --- a/frontend/src/pages/OnboardingFlow/Steps/DataHandling/index.jsx +++ b/frontend/src/pages/OnboardingFlow/Steps/DataHandling/index.jsx @@ -299,7 +299,7 @@ export default function DataHandling({ setHeader, setForwardBtn, setBackBtn }) { async function fetchKeys() { const _settings = await System.keys(); setLLMChoice(_settings?.LLMProvider || "openai"); - setVectorDb(_settings?.VectorDB || "pinecone"); + setVectorDb(_settings?.VectorDB || "lancedb"); setEmbeddingEngine(_settings?.EmbeddingEngine || "openai"); setLoading(false); diff --git a/server/endpoints/api/workspace/index.js b/server/endpoints/api/workspace/index.js index fbfdde8882d40ab254b635f2b9d3a80a0b84243a..9dd3fce70ae8a1a205e7ec7356706e4b1c91ebe0 100644 --- a/server/endpoints/api/workspace/index.js +++ b/server/endpoints/api/workspace/index.js @@ -72,7 +72,7 @@ function apiWorkspaceEndpoints(app) { multiUserMode: multiUserMode(response), LLMSelection: process.env.LLM_PROVIDER || "openai", Embedder: process.env.EMBEDDING_ENGINE || "inherit", - VectorDbSelection: process.env.VECTOR_DB || "pinecone", + VectorDbSelection: process.env.VECTOR_DB || "lancedb", }); await EventLogs.logEvent("api_workspace_created", { workspaceName: workspace?.name || "Unknown Workspace", @@ -525,7 +525,7 @@ function apiWorkspaceEndpoints(app) { await Telemetry.sendTelemetry("sent_chat", { LLMSelection: process.env.LLM_PROVIDER || "openai", Embedder: process.env.EMBEDDING_ENGINE || "inherit", - VectorDbSelection: process.env.VECTOR_DB || "pinecone", + VectorDbSelection: process.env.VECTOR_DB || "lancedb", }); await EventLogs.logEvent("api_sent_chat", { workspaceName: workspace?.name, @@ -647,7 +647,7 @@ function apiWorkspaceEndpoints(app) { await Telemetry.sendTelemetry("sent_chat", { LLMSelection: process.env.LLM_PROVIDER || "openai", Embedder: process.env.EMBEDDING_ENGINE || "inherit", - VectorDbSelection: process.env.VECTOR_DB || "pinecone", + VectorDbSelection: process.env.VECTOR_DB || "lancedb", }); await EventLogs.logEvent("api_sent_chat", { workspaceName: workspace?.name, diff --git a/server/endpoints/chat.js b/server/endpoints/chat.js index a08b36e24358550d2eb6d6d7f81c7695d3046286..7445c21345916ce3330ae416b500384f58045329 100644 --- a/server/endpoints/chat.js +++ b/server/endpoints/chat.js @@ -92,7 +92,7 @@ function chatEndpoints(app) { multiUserMode: multiUserMode(response), LLMSelection: process.env.LLM_PROVIDER || "openai", Embedder: process.env.EMBEDDING_ENGINE || "inherit", - VectorDbSelection: process.env.VECTOR_DB || "pinecone", + VectorDbSelection: process.env.VECTOR_DB || "lancedb", }); await EventLogs.logEvent( @@ -200,7 +200,7 @@ function chatEndpoints(app) { multiUserMode: multiUserMode(response), LLMSelection: process.env.LLM_PROVIDER || "openai", Embedder: process.env.EMBEDDING_ENGINE || "inherit", - VectorDbSelection: process.env.VECTOR_DB || "pinecone", + VectorDbSelection: process.env.VECTOR_DB || "lancedb", }); await EventLogs.logEvent( diff --git a/server/endpoints/embed/index.js b/server/endpoints/embed/index.js index 0631a655d33dbf3d4e5c7662da18119278acd449..532b361870aa2eba2ae490d27a6d81f884a4e44e 100644 --- a/server/endpoints/embed/index.js +++ b/server/endpoints/embed/index.js @@ -46,7 +46,7 @@ function embeddedEndpoints(app) { multiUserMode: multiUserMode(response), LLMSelection: process.env.LLM_PROVIDER || "openai", Embedder: process.env.EMBEDDING_ENGINE || "inherit", - VectorDbSelection: process.env.VECTOR_DB || "pinecone", + VectorDbSelection: process.env.VECTOR_DB || "lancedb", }); response.end(); } catch (e) { diff --git a/server/endpoints/workspaceThreads.js b/server/endpoints/workspaceThreads.js index 05b584af52845eeaf5dba62151fa1917ca60868e..e2aead974d4d46b01b66e4eee4f5b9a5b55bb867 100644 --- a/server/endpoints/workspaceThreads.js +++ b/server/endpoints/workspaceThreads.js @@ -34,7 +34,7 @@ function workspaceThreadEndpoints(app) { multiUserMode: multiUserMode(response), LLMSelection: process.env.LLM_PROVIDER || "openai", Embedder: process.env.EMBEDDING_ENGINE || "inherit", - VectorDbSelection: process.env.VECTOR_DB || "pinecone", + VectorDbSelection: process.env.VECTOR_DB || "lancedb", }, user?.id ); diff --git a/server/endpoints/workspaces.js b/server/endpoints/workspaces.js index f85c213fc28714340da936b8509e62dc8324bd06..c22c679a0c834080218bb6b2813e883a3f888490 100644 --- a/server/endpoints/workspaces.js +++ b/server/endpoints/workspaces.js @@ -45,7 +45,7 @@ function workspaceEndpoints(app) { multiUserMode: multiUserMode(response), LLMSelection: process.env.LLM_PROVIDER || "openai", Embedder: process.env.EMBEDDING_ENGINE || "inherit", - VectorDbSelection: process.env.VECTOR_DB || "pinecone", + VectorDbSelection: process.env.VECTOR_DB || "lancedb", }, user?.id ); diff --git a/server/models/documents.js b/server/models/documents.js index 1c2d1711381cc809f56bb44b0df7ce5d78155ded..6c09651c9805eb315eb11a4cafca83fd06fb5790 100644 --- a/server/models/documents.js +++ b/server/models/documents.js @@ -114,7 +114,7 @@ const Document = { await Telemetry.sendTelemetry("documents_embedded_in_workspace", { LLMSelection: process.env.LLM_PROVIDER || "openai", Embedder: process.env.EMBEDDING_ENGINE || "inherit", - VectorDbSelection: process.env.VECTOR_DB || "pinecone", + VectorDbSelection: process.env.VECTOR_DB || "lancedb", }); await EventLogs.logEvent( "workspace_documents_added", @@ -157,7 +157,7 @@ const Document = { await Telemetry.sendTelemetry("documents_removed_in_workspace", { LLMSelection: process.env.LLM_PROVIDER || "openai", Embedder: process.env.EMBEDDING_ENGINE || "inherit", - VectorDbSelection: process.env.VECTOR_DB || "pinecone", + VectorDbSelection: process.env.VECTOR_DB || "lancedb", }); await EventLogs.logEvent( "workspace_documents_removed", diff --git a/server/utils/helpers/index.js b/server/utils/helpers/index.js index 514d32042596fc76ee63d9f7b073dea5b8ed051b..72fbfc6e3938c809988eff9cc78f3ba8774a3301 100644 --- a/server/utils/helpers/index.js +++ b/server/utils/helpers/index.js @@ -1,5 +1,5 @@ function getVectorDbClass() { - const vectorSelection = process.env.VECTOR_DB || "pinecone"; + const vectorSelection = process.env.VECTOR_DB || "lancedb"; switch (vectorSelection) { case "pinecone": const { Pinecone } = require("../vectorDbProviders/pinecone");