Skip to content
Snippets Groups Projects
Unverified Commit 93d7ce6d authored by Timothy Carambat's avatar Timothy Carambat Committed by GitHub
Browse files

Handle Bedrock models that cannot use `system` prompts (#2489)

parent 3dc0f3f4
No related branches found
No related tags found
No related merge requests found
...@@ -5,21 +5,30 @@ import paths from "@/utils/paths"; ...@@ -5,21 +5,30 @@ import paths from "@/utils/paths";
import { useTranslation } from "react-i18next"; import { useTranslation } from "react-i18next";
import { Link, useParams } from "react-router-dom"; import { Link, useParams } from "react-router-dom";
// These models do NOT support function calling /**
// and therefore are not supported for agents. * These models do NOT support function calling
* or do not support system prompts
* and therefore are not supported for agents.
* @param {string} provider - The AI provider.
* @param {string} model - The model name.
* @returns {boolean} Whether the model is supported for agents.
*/
function supportedModel(provider, model = "") { function supportedModel(provider, model = "") {
if (provider !== "openai") return true; if (provider === "openai") {
return ( return (
[ [
"gpt-3.5-turbo-0301", "gpt-3.5-turbo-0301",
"gpt-4-turbo-2024-04-09", "gpt-4-turbo-2024-04-09",
"gpt-4-turbo", "gpt-4-turbo",
"o1-preview", "o1-preview",
"o1-preview-2024-09-12", "o1-preview-2024-09-12",
"o1-mini", "o1-mini",
"o1-mini-2024-09-12", "o1-mini-2024-09-12",
].includes(model) === false ].includes(model) === false
); );
}
return true;
} }
export default function AgentModelSelection({ export default function AgentModelSelection({
......
...@@ -7,6 +7,20 @@ const { NativeEmbedder } = require("../../EmbeddingEngines/native"); ...@@ -7,6 +7,20 @@ const { NativeEmbedder } = require("../../EmbeddingEngines/native");
// Docs: https://js.langchain.com/v0.2/docs/integrations/chat/bedrock_converse // Docs: https://js.langchain.com/v0.2/docs/integrations/chat/bedrock_converse
class AWSBedrockLLM { class AWSBedrockLLM {
/**
* These models do not support system prompts
* It is not explicitly stated but it is observed that they do not use the system prompt
* in their responses and will crash when a system prompt is provided.
* We can add more models to this list as we discover them or new models are added.
* We may want to extend this list or make a user-config if using custom bedrock models.
*/
noSystemPromptModels = [
"amazon.titan-text-express-v1",
"amazon.titan-text-lite-v1",
"cohere.command-text-v14",
"cohere.command-light-text-v14",
];
constructor(embedder = null, modelPreference = null) { constructor(embedder = null, modelPreference = null) {
if (!process.env.AWS_BEDROCK_LLM_ACCESS_KEY_ID) if (!process.env.AWS_BEDROCK_LLM_ACCESS_KEY_ID)
throw new Error("No AWS Bedrock LLM profile id was set."); throw new Error("No AWS Bedrock LLM profile id was set.");
...@@ -59,6 +73,22 @@ class AWSBedrockLLM { ...@@ -59,6 +73,22 @@ class AWSBedrockLLM {
for (const chat of chats) { for (const chat of chats) {
if (!roleToMessageMap.hasOwnProperty(chat.role)) continue; if (!roleToMessageMap.hasOwnProperty(chat.role)) continue;
// When a model does not support system prompts, we need to handle it.
// We will add a new message that simulates the system prompt via a user message and AI response.
// This will allow the model to respond without crashing but we can still inject context.
if (
this.noSystemPromptModels.includes(this.model) &&
chat.role === "system"
) {
this.#log(
`Model does not support system prompts! Simulating system prompt via Human/AI message pairs.`
);
langchainChats.push(new HumanMessage({ content: chat.content }));
langchainChats.push(new AIMessage({ content: "Okay." }));
continue;
}
const MessageClass = roleToMessageMap[chat.role]; const MessageClass = roleToMessageMap[chat.role];
langchainChats.push(new MessageClass({ content: chat.content })); langchainChats.push(new MessageClass({ content: chat.content }));
} }
...@@ -78,6 +108,10 @@ class AWSBedrockLLM { ...@@ -78,6 +108,10 @@ class AWSBedrockLLM {
); );
} }
#log(text, ...args) {
console.log(`\x1b[32m[AWSBedrock]\x1b[0m ${text}`, ...args);
}
streamingEnabled() { streamingEnabled() {
return "streamGetChatCompletion" in this; return "streamGetChatCompletion" in this;
} }
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment