diff --git a/.changeset/silly-keys-wonder.md b/.changeset/silly-keys-wonder.md new file mode 100644 index 0000000000000000000000000000000000000000..fb3056be5d122c8ff013443e6c76fb94297023c8 --- /dev/null +++ b/.changeset/silly-keys-wonder.md @@ -0,0 +1,5 @@ +--- +"create-llama": patch +--- + +Add system prompt env variable for TS diff --git a/helpers/env-variables.ts b/helpers/env-variables.ts index 3ee3c55cb6413e02d25f0e00d24476f1833a963c..b5b36dd7bdebe404d60b03d495d165d81d21614a 100644 --- a/helpers/env-variables.ts +++ b/helpers/env-variables.ts @@ -249,13 +249,6 @@ const getFrameworkEnvs = ( description: "The port to start the backend app.", value: port?.toString() || "8000", }, - // TODO: Once LlamaIndexTS supports string templates, move this to `getEngineEnvs` - { - name: "SYSTEM_PROMPT", - description: `Custom system prompt. -Example: -SYSTEM_PROMPT="You are a helpful assistant who helps users with their questions."`, - }, ]; }; @@ -267,6 +260,12 @@ const getEngineEnvs = (): EnvVar[] => { "The number of similar embeddings to return when retrieving documents.", value: "3", }, + { + name: "SYSTEM_PROMPT", + description: `Custom system prompt. +Example: +SYSTEM_PROMPT="You are a helpful assistant who helps users with their questions."`, + }, ]; }; diff --git a/templates/components/engines/typescript/agent/chat.ts b/templates/components/engines/typescript/agent/chat.ts index 9d16c8bda40e5fc4a9ec71c18054c599c17f145b..856d36ee5029c0d808fcdb47c470ef1c798b9828 100644 --- a/templates/components/engines/typescript/agent/chat.ts +++ b/templates/components/engines/typescript/agent/chat.ts @@ -41,5 +41,6 @@ export async function createChatEngine() { return new OpenAIAgent({ tools, + systemPrompt: process.env.SYSTEM_PROMPT, }); } diff --git a/templates/components/engines/typescript/chat/chat.ts b/templates/components/engines/typescript/chat/chat.ts index 5b47fe5819b21536d94a378e09790b9980acfe90..ef1dd5b63adda6d9a7d46fc2e96523c0c219628d 100644 --- a/templates/components/engines/typescript/chat/chat.ts +++ b/templates/components/engines/typescript/chat/chat.ts @@ -16,5 +16,6 @@ export async function createChatEngine() { return new ContextChatEngine({ chatModel: Settings.llm, retriever, + systemPrompt: process.env.SYSTEM_PROMPT, }); }