From 8386510d86711f5b37a29b4862ebd7dd9c2b4c9a Mon Sep 17 00:00:00 2001 From: Jingyi Zhao <zhao.elton@gmail.com> Date: Wed, 4 Dec 2024 20:36:00 -0500 Subject: [PATCH] chore: add e2e working example for ingestion (#1543) --- .../basicIngestion.ts} | 26 ++++++++++++------- 1 file changed, 17 insertions(+), 9 deletions(-) rename examples/{pipeline/ingestion.ts => ingestion/basicIngestion.ts} (57%) diff --git a/examples/pipeline/ingestion.ts b/examples/ingestion/basicIngestion.ts similarity index 57% rename from examples/pipeline/ingestion.ts rename to examples/ingestion/basicIngestion.ts index d3a851843..9af5711a1 100644 --- a/examples/pipeline/ingestion.ts +++ b/examples/ingestion/basicIngestion.ts @@ -1,16 +1,15 @@ -import fs from "node:fs/promises"; - import { Document, IngestionPipeline, - MetadataMode, OpenAIEmbedding, SentenceSplitter, + VectorStoreIndex, } from "llamaindex"; +import fs from "node:fs/promises"; async function main() { // Load essay from abramov.txt in Node - const path = "node_modules/llamaindex/examples/abramov.txt"; + const path = "../node_modules/llamaindex/examples/abramov.txt"; const essay = await fs.readFile(path, "utf-8"); @@ -22,14 +21,23 @@ async function main() { new OpenAIEmbedding(), ], }); + console.time("Pipeline Run Time"); - // run the pipeline const nodes = await pipeline.run({ documents: [document] }); - // print out the result of the pipeline run - for (const node of nodes) { - console.log(node.getContent(MetadataMode.NONE)); - } + console.timeEnd("Pipeline Run Time"); + + // initialize the VectorStoreIndex from nodes + const index = await VectorStoreIndex.init({ nodes }); + + // Query the index + const queryEngine = index.asQueryEngine(); + + const { message } = await queryEngine.query({ + query: "summarize the article in three sentence", + }); + + console.log(message); } main().catch(console.error); -- GitLab