Skip to content
Snippets Groups Projects
Unverified Commit 4fa2b76f authored by Thuc Pham's avatar Thuc Pham Committed by GitHub
Browse files

feat: implement citation for TS (#257)

parent 4ead8e14
No related branches found
No related tags found
No related merge requests found
---
"create-llama": patch
---
feat: implement citation for TS
...@@ -454,12 +454,7 @@ const getSystemPromptEnv = ( ...@@ -454,12 +454,7 @@ const getSystemPromptEnv = (
}, },
]; ];
// Citation only works with FastAPI along with the chat engine and data source provided for now. if (tools?.length == 0 && (dataSources?.length ?? 0 > 0)) {
if (
framework === "fastapi" &&
tools?.length == 0 &&
(dataSources?.length ?? 0 > 0)
) {
const citationPrompt = `'You have provided information from a knowledge base that has been passed to you in nodes of information. const citationPrompt = `'You have provided information from a knowledge base that has been passed to you in nodes of information.
Each node has useful metadata such as node ID, file name, page, etc. Each node has useful metadata such as node ID, file name, page, etc.
Please add the citation to the data node for each sentence or paragraph that you reference in the provided information. Please add the citation to the data node for each sentence or paragraph that you reference in the provided information.
......
import { ContextChatEngine, Settings } from "llamaindex"; import { ContextChatEngine, Settings } from "llamaindex";
import { getDataSource } from "./index"; import { getDataSource } from "./index";
import { nodeCitationProcessor } from "./nodePostprocessors";
import { generateFilters } from "./queryFilter"; import { generateFilters } from "./queryFilter";
export async function createChatEngine(documentIds?: string[], params?: any) { export async function createChatEngine(documentIds?: string[], params?: any) {
...@@ -14,9 +15,18 @@ export async function createChatEngine(documentIds?: string[], params?: any) { ...@@ -14,9 +15,18 @@ export async function createChatEngine(documentIds?: string[], params?: any) {
filters: generateFilters(documentIds || []), filters: generateFilters(documentIds || []),
}); });
const systemPrompt = process.env.SYSTEM_PROMPT;
const citationPrompt = process.env.SYSTEM_CITATION_PROMPT;
const prompt =
[systemPrompt, citationPrompt].filter((p) => p).join("\n") || undefined;
const nodePostprocessors = citationPrompt
? [nodeCitationProcessor]
: undefined;
return new ContextChatEngine({ return new ContextChatEngine({
chatModel: Settings.llm, chatModel: Settings.llm,
retriever, retriever,
systemPrompt: process.env.SYSTEM_PROMPT, systemPrompt: prompt,
nodePostprocessors,
}); });
} }
import {
BaseNodePostprocessor,
MessageContent,
NodeWithScore,
} from "llamaindex";
class NodeCitationProcessor implements BaseNodePostprocessor {
/**
* Append node_id into metadata for citation purpose.
* Config SYSTEM_CITATION_PROMPT in your runtime environment variable to enable this feature.
*/
async postprocessNodes(
nodes: NodeWithScore[],
query?: MessageContent,
): Promise<NodeWithScore[]> {
for (const nodeScore of nodes) {
if (!nodeScore.node || !nodeScore.node.metadata) {
continue; // Skip nodes with missing properties
}
nodeScore.node.metadata["node_id"] = nodeScore.node.id_;
}
return nodes;
}
}
export const nodeCitationProcessor = new NodeCitationProcessor();
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment