diff --git a/frontend/.eslintrc.cjs b/frontend/.eslintrc.cjs index ec601b2ceaac715bcf72c007beadeca66512ea07..1bf2b322691edd32ad741627ee141d1c2fe7c762 100644 --- a/frontend/.eslintrc.cjs +++ b/frontend/.eslintrc.cjs @@ -1,15 +1,20 @@ module.exports = { - env: { browser: true, es2020: true }, - extends: [ - 'eslint:recommended', - 'plugin:react/recommended', - 'plugin:react/jsx-runtime', - 'plugin:react-hooks/recommended', + "env": { "browser": true, "es2020": true }, + "extends": [ + "eslint:recommended", + "plugin:react/recommended", + "plugin:react/jsx-runtime", + "plugin:react-hooks/recommended" ], - parserOptions: { ecmaVersion: 'latest', sourceType: 'module' }, - settings: { react: { version: '18.2' } }, - plugins: ['react-refresh'], - rules: { - 'react-refresh/only-export-components': 'warn', - }, + "files": ["**/*.js", "**/*.jsx"], + "linterOptions": { "reportUnusedDisableDirectives": true }, + "parserOptions": { "ecmaVersion": "latest", "sourceType": "module", "ecmaFeatures": { "jsx": true } }, + "settings": { "react": { "version": '18.2' } }, + "plugins": [ + "react-refresh", + "react-hooks" + ], + "rules": { + "react-refresh/only-export-components": "warn" + } } diff --git a/frontend/src/components/Modals/MangeWorkspace/Settings/index.jsx b/frontend/src/components/Modals/MangeWorkspace/Settings/index.jsx index 1ff13e772e3ae627e51cd4331868637821356bd1..7dfc2ccc1bf60460eebd63c42c94fb33300bc33b 100644 --- a/frontend/src/components/Modals/MangeWorkspace/Settings/index.jsx +++ b/frontend/src/components/Modals/MangeWorkspace/Settings/index.jsx @@ -1,6 +1,7 @@ import React, { useState, useRef, useEffect } from "react"; import Workspace from "../../../../models/workspace"; import paths from "../../../../utils/paths"; +import { chatPrompt } from "../../../../utils/chat"; export default function WorkspaceSettings({ workspace }) { const formEl = useRef(null); @@ -141,6 +142,35 @@ export default function WorkspaceSettings({ workspace }) { /> </div> + <div> + <div className="flex flex-col gap-y-1 mb-4"> + <label + htmlFor="name" + className="block text-sm font-medium text-gray-900 dark:text-white" + > + Prompt + </label> + <p className="text-xs text-gray-600 dark:text-stone-400"> + The prompt that will be used on this workspace. Define the + context and instructions for the AI to generate a response. + You should to provide a carefully crafted prompt so the AI can + generate a relevant and accurate response. + </p> + </div> + <textarea + name="openAiPrompt" + maxLength={500} + rows={5} + defaultValue={chatPrompt(workspace)} + className="bg-gray-50 border border-gray-300 text-gray-900 text-sm rounded-lg focus:ring-blue-500 focus:border-blue-500 block w-full p-2.5 dark:bg-stone-600 dark:border-stone-600 dark:placeholder-gray-400 dark:text-white dark:focus:ring-blue-500 dark:focus:border-blue-500" + placeholder="Given the following conversation, relevant context, and a follow up question, reply with an answer to the current question the user is asking. Return only your response to the question given the above information following the users instructions as needed." + required={true} + wrap="soft" + autoComplete="off" + onChange={() => setHasChanges(true)} + /> + </div> + <div> <div className="flex flex-col gap-y-1 mb-4"> <label diff --git a/frontend/src/components/Modals/Settings/ExportImport/index.jsx b/frontend/src/components/Modals/Settings/ExportImport/index.jsx index 169feb89c885666fc99d1249d1318ad4bfc36b27..4099e8c061e86a915b4fde6dc867906aa12fd430 100644 --- a/frontend/src/components/Modals/Settings/ExportImport/index.jsx +++ b/frontend/src/components/Modals/Settings/ExportImport/index.jsx @@ -181,7 +181,9 @@ function ImportData() { Import was completed successfully </p> </div> - <p className="text-green-800 text-xs italic">please reload the page to see the results of the import.</p> + <p className="text-green-800 text-xs italic"> + please reload the page to see the results of the import. + </p> </div> ); } diff --git a/frontend/src/components/Sidebar/index.jsx b/frontend/src/components/Sidebar/index.jsx index d7cdabdbb33b02d89f4f37731097f0e0bc4b2f58..880e54fde66810843c34f6328d0146cddb912025 100644 --- a/frontend/src/components/Sidebar/index.jsx +++ b/frontend/src/components/Sidebar/index.jsx @@ -185,10 +185,11 @@ export function SidebarMobileHeader() { className={`z-99 fixed top-0 left-0 transition-all duration-500 w-[100vw] h-[100vh]`} > <div - className={`${showBgOverlay + className={`${ + showBgOverlay ? "transition-all opacity-1" : "transition-none opacity-0" - } duration-500 fixed top-0 left-0 bg-black-900 bg-opacity-75 w-screen h-screen`} + } duration-500 fixed top-0 left-0 bg-black-900 bg-opacity-75 w-screen h-screen`} onClick={() => setShowSidebar(false)} /> <div diff --git a/frontend/src/utils/chat/index.js b/frontend/src/utils/chat/index.js index 539e22c7ebe75c1274fdfa04beafbdf23f53d506..35a911d0d8d0ee4488c7eed336bb1322bba11184 100644 --- a/frontend/src/utils/chat/index.js +++ b/frontend/src/utils/chat/index.js @@ -56,3 +56,10 @@ export default function handleChat( }); } } + +export function chatPrompt(workspace) { + return ( + workspace?.openAiPrompt ?? + "Given the following conversation, relevant context, and a follow up question, reply with an answer to the current question the user is asking. Return only your response to the question given the above information following the users instructions as needed." + ); +} diff --git a/server/models/workspace.js b/server/models/workspace.js index 1f4ca84afc5c260d46d48f51900a18ca81400c7e..fed92434b681d857d4e380565b0326dea2686f33 100644 --- a/server/models/workspace.js +++ b/server/models/workspace.js @@ -12,6 +12,7 @@ const Workspace = { "openAiTemp", "openAiHistory", "lastUpdatedAt", + "openAiPrompt", ], colsInit: ` id INTEGER PRIMARY KEY AUTOINCREMENT, @@ -21,7 +22,8 @@ const Workspace = { createdAt TEXT DEFAULT CURRENT_TIMESTAMP, openAiTemp REAL DEFAULT NULL, openAiHistory INTEGER DEFAULT 20, - lastUpdatedAt TEXT DEFAULT CURRENT_TIMESTAMP + lastUpdatedAt TEXT DEFAULT CURRENT_TIMESTAMP, + openAiPrompt TEXT DEFAULT NULL `, migrateTable: async function () { console.log(`\x1b[34m[MIGRATING]\x1b[0m Checking for Workspace migrations`); @@ -35,6 +37,11 @@ const Workspace = { execCmd: `ALTER TABLE ${this.tablename} ADD COLUMN openAiTemp REAL DEFAULT NULL`, doif: false, }, + { + colName: "openAiPrompt", + execCmd: `ALTER TABLE ${this.tablename} ADD COLUMN openAiPrompt TEXT DEFAULT NULL`, + doif: false, + }, { colName: "id", execCmd: `CREATE TRIGGER IF NOT EXISTS Trg_LastUpdated AFTER UPDATE ON ${this.tablename} diff --git a/server/utils/chats/index.js b/server/utils/chats/index.js index cd4c2942e971fb3e21c5e6194adc99b0988b57f8..800003e21189aa6f8fc1f3746929b5abe1b3bb9e 100644 --- a/server/utils/chats/index.js +++ b/server/utils/chats/index.js @@ -148,7 +148,16 @@ async function chatWithWorkspace(workspace, message, chatMode = "chat") { }; } } + +function chatPrompt(workspace) { + return ( + workspace?.openAiPrompt ?? + "Given the following conversation, relevant context, and a follow up question, reply with an answer to the current question the user is asking. Return only your response to the question given the above information following the users instructions as needed." + ); +} + module.exports = { convertToChatHistory, chatWithWorkspace, + chatPrompt, }; diff --git a/server/utils/vectorDbProviders/chroma/index.js b/server/utils/vectorDbProviders/chroma/index.js index 1fb324aa623f82039ea69ec6c9ca5a579066ba22..ddeb3afe7613ae06870ea76c7d13149db09cbc31 100644 --- a/server/utils/vectorDbProviders/chroma/index.js +++ b/server/utils/vectorDbProviders/chroma/index.js @@ -8,6 +8,7 @@ const { storeVectorResult, cachedVectorInformation } = require("../../files"); const { Configuration, OpenAIApi } = require("openai"); const { v4: uuidv4 } = require("uuid"); const { toChunks, curateSources } = require("../../helpers"); +const { chatPrompt } = require("../../chats"); const Chroma = { name: "Chroma", @@ -303,7 +304,7 @@ const Chroma = { { collectionName: namespace, url: process.env.CHROMA_ENDPOINT } ); const model = this.llm({ - temperature: workspace?.openAiTemp, + temperature: workspace?.openAiTemp ?? 0.7, }); const chain = VectorDBQAChain.fromLLM(model, vectorStore, { @@ -347,7 +348,7 @@ const Chroma = { ); const prompt = { role: "system", - content: `Given the following conversation, relevant context, and a follow up question, reply with an answer to the current question the user is asking. Return only your response to the question given the above information following the users instructions as needed. + content: `${chatPrompt(workspace)} Context: ${contextTexts .map((text, i) => { diff --git a/server/utils/vectorDbProviders/lance/index.js b/server/utils/vectorDbProviders/lance/index.js index 293e835a18b326ccfbb4be02866c0b74a303c4e2..21d962ffac2914f5df252f48df35ec0817be9225 100644 --- a/server/utils/vectorDbProviders/lance/index.js +++ b/server/utils/vectorDbProviders/lance/index.js @@ -5,6 +5,7 @@ const { RecursiveCharacterTextSplitter } = require("langchain/text_splitter"); const { storeVectorResult, cachedVectorInformation } = require("../../files"); const { Configuration, OpenAIApi } = require("openai"); const { v4: uuidv4 } = require("uuid"); +const { chatPrompt } = require("../../chats"); // Since we roll our own results for prompting we // have to manually curate sources as well. @@ -260,7 +261,7 @@ const LanceDb = { ); const prompt = { role: "system", - content: `Given the following conversation, relevant context, and a follow up question, reply with an answer to the current question the user is asking. Return only your response to the question given the above information following the users instructions as needed. + content: `${chatPrompt(workspace)} Context: ${contextTexts .map((text, i) => { @@ -309,7 +310,7 @@ const LanceDb = { ); const prompt = { role: "system", - content: `Given the following conversation, relevant context, and a follow up question, reply with an answer to the current question the user is asking. Return only your response to the question given the above information following the users instructions as needed. + content: `${chatPrompt(workspace)} Context: ${contextTexts .map((text, i) => { diff --git a/server/utils/vectorDbProviders/pinecone/index.js b/server/utils/vectorDbProviders/pinecone/index.js index dc984f7faea62ea25cb57ec5fc0d0654ad2c64c6..67e4d1efb253d74705ea51a44ae7d6aa429e0a54 100644 --- a/server/utils/vectorDbProviders/pinecone/index.js +++ b/server/utils/vectorDbProviders/pinecone/index.js @@ -10,6 +10,7 @@ const { storeVectorResult, cachedVectorInformation } = require("../../files"); const { Configuration, OpenAIApi } = require("openai"); const { v4: uuidv4 } = require("uuid"); const { toChunks, curateSources } = require("../../helpers"); +const { chatPrompt } = require("../../chats"); const Pinecone = { name: "Pinecone", @@ -278,7 +279,7 @@ const Pinecone = { }); const model = this.llm({ - temperature: workspace?.openAiTemp, + temperature: workspace?.openAiTemp ?? 0.7, }); const chain = VectorDBQAChain.fromLLM(model, vectorStore, { k: 5, @@ -318,14 +319,15 @@ const Pinecone = { ); const prompt = { role: "system", - content: `Given the following conversation, relevant context, and a follow up question, reply with an answer to the current question the user is asking. Return only your response to the question given the above information following the users instructions as needed. -Context: -${contextTexts - .map((text, i) => { - return `[CONTEXT ${i}]:\n${text}\n[END CONTEXT ${i}]\n\n`; - }) - .join("")}`, + content: `${chatPrompt(workspace)} + Context: + ${contextTexts + .map((text, i) => { + return `[CONTEXT ${i}]:\n${text}\n[END CONTEXT ${i}]\n\n`; + }) + .join("")}`, }; + const memory = [prompt, ...chatHistory, { role: "user", content: input }]; const responseText = await this.getChatCompletion(this.openai(), memory, {