Skip to content
Snippets Groups Projects
Unverified Commit 5a7d8add authored by Timothy Carambat's avatar Timothy Carambat Committed by GitHub
Browse files

[Fork] Additions on franzbischoff resolution on #122 (#152)


* Related to Issue #122, Implemented custom prompt in workspace settings.

* run linter

* Remove code duplication for chat prompt injection

---------

Co-authored-by: default avatarFrancisco Bischoff <franzbischoff@gmail.com>
parent fdce3e99
No related branches found
No related tags found
No related merge requests found
module.exports = { module.exports = {
env: { browser: true, es2020: true }, "env": { "browser": true, "es2020": true },
extends: [ "extends": [
'eslint:recommended', "eslint:recommended",
'plugin:react/recommended', "plugin:react/recommended",
'plugin:react/jsx-runtime', "plugin:react/jsx-runtime",
'plugin:react-hooks/recommended', "plugin:react-hooks/recommended"
], ],
parserOptions: { ecmaVersion: 'latest', sourceType: 'module' }, "files": ["**/*.js", "**/*.jsx"],
settings: { react: { version: '18.2' } }, "linterOptions": { "reportUnusedDisableDirectives": true },
plugins: ['react-refresh'], "parserOptions": { "ecmaVersion": "latest", "sourceType": "module", "ecmaFeatures": { "jsx": true } },
rules: { "settings": { "react": { "version": '18.2' } },
'react-refresh/only-export-components': 'warn', "plugins": [
}, "react-refresh",
"react-hooks"
],
"rules": {
"react-refresh/only-export-components": "warn"
}
} }
import React, { useState, useRef, useEffect } from "react"; import React, { useState, useRef, useEffect } from "react";
import Workspace from "../../../../models/workspace"; import Workspace from "../../../../models/workspace";
import paths from "../../../../utils/paths"; import paths from "../../../../utils/paths";
import { chatPrompt } from "../../../../utils/chat";
export default function WorkspaceSettings({ workspace }) { export default function WorkspaceSettings({ workspace }) {
const formEl = useRef(null); const formEl = useRef(null);
...@@ -141,6 +142,35 @@ export default function WorkspaceSettings({ workspace }) { ...@@ -141,6 +142,35 @@ export default function WorkspaceSettings({ workspace }) {
/> />
</div> </div>
<div>
<div className="flex flex-col gap-y-1 mb-4">
<label
htmlFor="name"
className="block text-sm font-medium text-gray-900 dark:text-white"
>
Prompt
</label>
<p className="text-xs text-gray-600 dark:text-stone-400">
The prompt that will be used on this workspace. Define the
context and instructions for the AI to generate a response.
You should to provide a carefully crafted prompt so the AI can
generate a relevant and accurate response.
</p>
</div>
<textarea
name="openAiPrompt"
maxLength={500}
rows={5}
defaultValue={chatPrompt(workspace)}
className="bg-gray-50 border border-gray-300 text-gray-900 text-sm rounded-lg focus:ring-blue-500 focus:border-blue-500 block w-full p-2.5 dark:bg-stone-600 dark:border-stone-600 dark:placeholder-gray-400 dark:text-white dark:focus:ring-blue-500 dark:focus:border-blue-500"
placeholder="Given the following conversation, relevant context, and a follow up question, reply with an answer to the current question the user is asking. Return only your response to the question given the above information following the users instructions as needed."
required={true}
wrap="soft"
autoComplete="off"
onChange={() => setHasChanges(true)}
/>
</div>
<div> <div>
<div className="flex flex-col gap-y-1 mb-4"> <div className="flex flex-col gap-y-1 mb-4">
<label <label
......
...@@ -181,7 +181,9 @@ function ImportData() { ...@@ -181,7 +181,9 @@ function ImportData() {
Import was completed successfully Import was completed successfully
</p> </p>
</div> </div>
<p className="text-green-800 text-xs italic">please reload the page to see the results of the import.</p> <p className="text-green-800 text-xs italic">
please reload the page to see the results of the import.
</p>
</div> </div>
); );
} }
......
...@@ -185,10 +185,11 @@ export function SidebarMobileHeader() { ...@@ -185,10 +185,11 @@ export function SidebarMobileHeader() {
className={`z-99 fixed top-0 left-0 transition-all duration-500 w-[100vw] h-[100vh]`} className={`z-99 fixed top-0 left-0 transition-all duration-500 w-[100vw] h-[100vh]`}
> >
<div <div
className={`${showBgOverlay className={`${
showBgOverlay
? "transition-all opacity-1" ? "transition-all opacity-1"
: "transition-none opacity-0" : "transition-none opacity-0"
} duration-500 fixed top-0 left-0 bg-black-900 bg-opacity-75 w-screen h-screen`} } duration-500 fixed top-0 left-0 bg-black-900 bg-opacity-75 w-screen h-screen`}
onClick={() => setShowSidebar(false)} onClick={() => setShowSidebar(false)}
/> />
<div <div
......
...@@ -56,3 +56,10 @@ export default function handleChat( ...@@ -56,3 +56,10 @@ export default function handleChat(
}); });
} }
} }
export function chatPrompt(workspace) {
return (
workspace?.openAiPrompt ??
"Given the following conversation, relevant context, and a follow up question, reply with an answer to the current question the user is asking. Return only your response to the question given the above information following the users instructions as needed."
);
}
...@@ -12,6 +12,7 @@ const Workspace = { ...@@ -12,6 +12,7 @@ const Workspace = {
"openAiTemp", "openAiTemp",
"openAiHistory", "openAiHistory",
"lastUpdatedAt", "lastUpdatedAt",
"openAiPrompt",
], ],
colsInit: ` colsInit: `
id INTEGER PRIMARY KEY AUTOINCREMENT, id INTEGER PRIMARY KEY AUTOINCREMENT,
...@@ -21,7 +22,8 @@ const Workspace = { ...@@ -21,7 +22,8 @@ const Workspace = {
createdAt TEXT DEFAULT CURRENT_TIMESTAMP, createdAt TEXT DEFAULT CURRENT_TIMESTAMP,
openAiTemp REAL DEFAULT NULL, openAiTemp REAL DEFAULT NULL,
openAiHistory INTEGER DEFAULT 20, openAiHistory INTEGER DEFAULT 20,
lastUpdatedAt TEXT DEFAULT CURRENT_TIMESTAMP lastUpdatedAt TEXT DEFAULT CURRENT_TIMESTAMP,
openAiPrompt TEXT DEFAULT NULL
`, `,
migrateTable: async function () { migrateTable: async function () {
console.log(`\x1b[34m[MIGRATING]\x1b[0m Checking for Workspace migrations`); console.log(`\x1b[34m[MIGRATING]\x1b[0m Checking for Workspace migrations`);
...@@ -35,6 +37,11 @@ const Workspace = { ...@@ -35,6 +37,11 @@ const Workspace = {
execCmd: `ALTER TABLE ${this.tablename} ADD COLUMN openAiTemp REAL DEFAULT NULL`, execCmd: `ALTER TABLE ${this.tablename} ADD COLUMN openAiTemp REAL DEFAULT NULL`,
doif: false, doif: false,
}, },
{
colName: "openAiPrompt",
execCmd: `ALTER TABLE ${this.tablename} ADD COLUMN openAiPrompt TEXT DEFAULT NULL`,
doif: false,
},
{ {
colName: "id", colName: "id",
execCmd: `CREATE TRIGGER IF NOT EXISTS Trg_LastUpdated AFTER UPDATE ON ${this.tablename} execCmd: `CREATE TRIGGER IF NOT EXISTS Trg_LastUpdated AFTER UPDATE ON ${this.tablename}
......
...@@ -148,7 +148,16 @@ async function chatWithWorkspace(workspace, message, chatMode = "chat") { ...@@ -148,7 +148,16 @@ async function chatWithWorkspace(workspace, message, chatMode = "chat") {
}; };
} }
} }
function chatPrompt(workspace) {
return (
workspace?.openAiPrompt ??
"Given the following conversation, relevant context, and a follow up question, reply with an answer to the current question the user is asking. Return only your response to the question given the above information following the users instructions as needed."
);
}
module.exports = { module.exports = {
convertToChatHistory, convertToChatHistory,
chatWithWorkspace, chatWithWorkspace,
chatPrompt,
}; };
...@@ -8,6 +8,7 @@ const { storeVectorResult, cachedVectorInformation } = require("../../files"); ...@@ -8,6 +8,7 @@ const { storeVectorResult, cachedVectorInformation } = require("../../files");
const { Configuration, OpenAIApi } = require("openai"); const { Configuration, OpenAIApi } = require("openai");
const { v4: uuidv4 } = require("uuid"); const { v4: uuidv4 } = require("uuid");
const { toChunks, curateSources } = require("../../helpers"); const { toChunks, curateSources } = require("../../helpers");
const { chatPrompt } = require("../../chats");
const Chroma = { const Chroma = {
name: "Chroma", name: "Chroma",
...@@ -303,7 +304,7 @@ const Chroma = { ...@@ -303,7 +304,7 @@ const Chroma = {
{ collectionName: namespace, url: process.env.CHROMA_ENDPOINT } { collectionName: namespace, url: process.env.CHROMA_ENDPOINT }
); );
const model = this.llm({ const model = this.llm({
temperature: workspace?.openAiTemp, temperature: workspace?.openAiTemp ?? 0.7,
}); });
const chain = VectorDBQAChain.fromLLM(model, vectorStore, { const chain = VectorDBQAChain.fromLLM(model, vectorStore, {
...@@ -347,7 +348,7 @@ const Chroma = { ...@@ -347,7 +348,7 @@ const Chroma = {
); );
const prompt = { const prompt = {
role: "system", role: "system",
content: `Given the following conversation, relevant context, and a follow up question, reply with an answer to the current question the user is asking. Return only your response to the question given the above information following the users instructions as needed. content: `${chatPrompt(workspace)}
Context: Context:
${contextTexts ${contextTexts
.map((text, i) => { .map((text, i) => {
......
...@@ -5,6 +5,7 @@ const { RecursiveCharacterTextSplitter } = require("langchain/text_splitter"); ...@@ -5,6 +5,7 @@ const { RecursiveCharacterTextSplitter } = require("langchain/text_splitter");
const { storeVectorResult, cachedVectorInformation } = require("../../files"); const { storeVectorResult, cachedVectorInformation } = require("../../files");
const { Configuration, OpenAIApi } = require("openai"); const { Configuration, OpenAIApi } = require("openai");
const { v4: uuidv4 } = require("uuid"); const { v4: uuidv4 } = require("uuid");
const { chatPrompt } = require("../../chats");
// Since we roll our own results for prompting we // Since we roll our own results for prompting we
// have to manually curate sources as well. // have to manually curate sources as well.
...@@ -260,7 +261,7 @@ const LanceDb = { ...@@ -260,7 +261,7 @@ const LanceDb = {
); );
const prompt = { const prompt = {
role: "system", role: "system",
content: `Given the following conversation, relevant context, and a follow up question, reply with an answer to the current question the user is asking. Return only your response to the question given the above information following the users instructions as needed. content: `${chatPrompt(workspace)}
Context: Context:
${contextTexts ${contextTexts
.map((text, i) => { .map((text, i) => {
...@@ -309,7 +310,7 @@ const LanceDb = { ...@@ -309,7 +310,7 @@ const LanceDb = {
); );
const prompt = { const prompt = {
role: "system", role: "system",
content: `Given the following conversation, relevant context, and a follow up question, reply with an answer to the current question the user is asking. Return only your response to the question given the above information following the users instructions as needed. content: `${chatPrompt(workspace)}
Context: Context:
${contextTexts ${contextTexts
.map((text, i) => { .map((text, i) => {
......
...@@ -10,6 +10,7 @@ const { storeVectorResult, cachedVectorInformation } = require("../../files"); ...@@ -10,6 +10,7 @@ const { storeVectorResult, cachedVectorInformation } = require("../../files");
const { Configuration, OpenAIApi } = require("openai"); const { Configuration, OpenAIApi } = require("openai");
const { v4: uuidv4 } = require("uuid"); const { v4: uuidv4 } = require("uuid");
const { toChunks, curateSources } = require("../../helpers"); const { toChunks, curateSources } = require("../../helpers");
const { chatPrompt } = require("../../chats");
const Pinecone = { const Pinecone = {
name: "Pinecone", name: "Pinecone",
...@@ -278,7 +279,7 @@ const Pinecone = { ...@@ -278,7 +279,7 @@ const Pinecone = {
}); });
const model = this.llm({ const model = this.llm({
temperature: workspace?.openAiTemp, temperature: workspace?.openAiTemp ?? 0.7,
}); });
const chain = VectorDBQAChain.fromLLM(model, vectorStore, { const chain = VectorDBQAChain.fromLLM(model, vectorStore, {
k: 5, k: 5,
...@@ -318,14 +319,15 @@ const Pinecone = { ...@@ -318,14 +319,15 @@ const Pinecone = {
); );
const prompt = { const prompt = {
role: "system", role: "system",
content: `Given the following conversation, relevant context, and a follow up question, reply with an answer to the current question the user is asking. Return only your response to the question given the above information following the users instructions as needed. content: `${chatPrompt(workspace)}
Context: Context:
${contextTexts ${contextTexts
.map((text, i) => { .map((text, i) => {
return `[CONTEXT ${i}]:\n${text}\n[END CONTEXT ${i}]\n\n`; return `[CONTEXT ${i}]:\n${text}\n[END CONTEXT ${i}]\n\n`;
}) })
.join("")}`, .join("")}`,
}; };
const memory = [prompt, ...chatHistory, { role: "user", content: input }]; const memory = [prompt, ...chatHistory, { role: "user", content: input }];
const responseText = await this.getChatCompletion(this.openai(), memory, { const responseText = await this.getChatCompletion(this.openai(), memory, {
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment