Skip to content
Snippets Groups Projects
index.ts 1.08 KiB
Newer Older
  • Learn to ignore specific revisions
  • import {
      ContextChatEngine,
      LLM,
      serviceContextFromDefaults,
      SimpleDocumentStore,
      storageContextFromDefaults,
      VectorStoreIndex,
    } from "llamaindex";
    import { CHUNK_OVERLAP, CHUNK_SIZE, STORAGE_CACHE_DIR } from "./constants.mjs";
    
    async function getDataSource(llm: LLM) {
      const serviceContext = serviceContextFromDefaults({
        llm,
        chunkSize: CHUNK_SIZE,
        chunkOverlap: CHUNK_OVERLAP,
      });
      let storageContext = await storageContextFromDefaults({
        persistDir: `${STORAGE_CACHE_DIR}`,
      });
    
      const numberOfDocs = Object.keys(
        (storageContext.docStore as SimpleDocumentStore).toDict(),
      ).length;
      if (numberOfDocs === 0) {
        throw new Error(
          `StorageContext is empty - call 'npm run generate' to generate the storage first`,
        );
      }
      return await VectorStoreIndex.init({
        storageContext,
        serviceContext,
      });
    }
    
    export async function createChatEngine(llm: LLM) {
      const index = await getDataSource(llm);
      const retriever = index.asRetriever();
      retriever.similarityTopK = 5;
    
      return new ContextChatEngine({
        chatModel: llm,
        retriever,
      });
    }