Skip to content
Snippets Groups Projects
Commit 7626ca77 authored by thucpn's avatar thucpn
Browse files

refactor: import from llamaindex edge for typescript templates and vectordbs

parent dd92b911
No related tags found
No related merge requests found
Showing
with 47 additions and 57 deletions
import * as LlamaIndex from "@llamaindex/edge";
import * as traceloop from "@traceloop/node-server-sdk";
import * as LlamaIndex from "llamaindex";
export const initObservability = () => {
traceloop.initialize({
......
/* eslint-disable turbo/no-undeclared-env-vars */
import { VectorStoreIndex } from "@llamaindex/edge";
import { SimpleDirectoryReader } from "@llamaindex/edge/readers/SimpleDirectoryReader";
import { storageContextFromDefaults } from "@llamaindex/edge/storage/StorageContext";
import { MilvusVectorStore } from "@llamaindex/edge/storage/vectorStore/MilvusVectorStore";
import * as dotenv from "dotenv";
import {
MilvusVectorStore,
SimpleDirectoryReader,
VectorStoreIndex,
storageContextFromDefaults,
} from "llamaindex";
import {
STORAGE_DIR,
checkRequiredEnvVars,
......
import {
ContextChatEngine,
LLM,
MilvusVectorStore,
serviceContextFromDefaults,
VectorStoreIndex,
} from "llamaindex";
serviceContextFromDefaults,
} from "@llamaindex/edge";
import { MilvusVectorStore } from "@llamaindex/edge/storage/vectorStore/MilvusVectorStore";
import {
checkRequiredEnvVars,
CHUNK_OVERLAP,
CHUNK_SIZE,
checkRequiredEnvVars,
getMilvusClient,
} from "./shared.mjs";
......
/* eslint-disable turbo/no-undeclared-env-vars */
import { VectorStoreIndex } from "@llamaindex/edge";
import { SimpleDirectoryReader } from "@llamaindex/edge/readers/SimpleDirectoryReader";
import { storageContextFromDefaults } from "@llamaindex/edge/storage/StorageContext";
import { MongoDBAtlasVectorSearch } from "@llamaindex/edge/storage/vectorStore/MongoDBAtlasVectorSearch";
import * as dotenv from "dotenv";
import {
MongoDBAtlasVectorSearch,
SimpleDirectoryReader,
VectorStoreIndex,
storageContextFromDefaults,
} from "llamaindex";
import { MongoClient } from "mongodb";
import { STORAGE_DIR, checkRequiredEnvVars } from "./shared.mjs";
......
......@@ -2,12 +2,12 @@
import {
ContextChatEngine,
LLM,
MongoDBAtlasVectorSearch,
serviceContextFromDefaults,
VectorStoreIndex,
} from "llamaindex";
serviceContextFromDefaults,
} from "@llamaindex/edge";
import { MongoDBAtlasVectorSearch } from "@llamaindex/edge/storage/vectorStore/MongoDBAtlasVectorSearch";
import { MongoClient } from "mongodb";
import { checkRequiredEnvVars, CHUNK_OVERLAP, CHUNK_SIZE } from "./shared.mjs";
import { CHUNK_OVERLAP, CHUNK_SIZE, checkRequiredEnvVars } from "./shared.mjs";
async function getDataSource(llm: LLM) {
checkRequiredEnvVars();
......
import {
serviceContextFromDefaults,
SimpleDirectoryReader,
storageContextFromDefaults,
VectorStoreIndex,
} from "llamaindex";
import { VectorStoreIndex, serviceContextFromDefaults } from "@llamaindex/edge";
import { SimpleDirectoryReader } from "@llamaindex/edge/readers/SimpleDirectoryReader";
import { storageContextFromDefaults } from "@llamaindex/edge/storage/StorageContext";
import * as dotenv from "dotenv";
......
import {
ContextChatEngine,
LLM,
serviceContextFromDefaults,
SimpleDocumentStore,
storageContextFromDefaults,
VectorStoreIndex,
} from "llamaindex";
serviceContextFromDefaults,
} from "@llamaindex/edge";
import { storageContextFromDefaults } from "@llamaindex/edge/storage/StorageContext";
import { SimpleDocumentStore } from "@llamaindex/edge/storage/docStore/SimpleDocumentStore";
import { CHUNK_OVERLAP, CHUNK_SIZE, STORAGE_CACHE_DIR } from "./constants.mjs";
async function getDataSource(llm: LLM) {
......
/* eslint-disable turbo/no-undeclared-env-vars */
import { VectorStoreIndex } from "@llamaindex/edge";
import { SimpleDirectoryReader } from "@llamaindex/edge/readers/SimpleDirectoryReader";
import { storageContextFromDefaults } from "@llamaindex/edge/storage/StorageContext";
import { PGVectorStore } from "@llamaindex/edge/storage/vectorStore/PGVectorStore";
import * as dotenv from "dotenv";
import {
PGVectorStore,
SimpleDirectoryReader,
VectorStoreIndex,
storageContextFromDefaults,
} from "llamaindex";
import {
PGVECTOR_SCHEMA,
PGVECTOR_TABLE,
......
......@@ -2,10 +2,10 @@
import {
ContextChatEngine,
LLM,
PGVectorStore,
VectorStoreIndex,
serviceContextFromDefaults,
} from "llamaindex";
} from "@llamaindex/edge";
import { PGVectorStore } from "@llamaindex/edge/storage/vectorStore/PGVectorStore";
import {
CHUNK_OVERLAP,
CHUNK_SIZE,
......
/* eslint-disable turbo/no-undeclared-env-vars */
import { VectorStoreIndex } from "@llamaindex/edge";
import { SimpleDirectoryReader } from "@llamaindex/edge/readers/SimpleDirectoryReader";
import { storageContextFromDefaults } from "@llamaindex/edge/storage/StorageContext";
import { PineconeVectorStore } from "@llamaindex/edge/storage/vectorStore/PineconeVectorStore";
import * as dotenv from "dotenv";
import {
PineconeVectorStore,
SimpleDirectoryReader,
VectorStoreIndex,
storageContextFromDefaults,
} from "llamaindex";
import { STORAGE_DIR, checkRequiredEnvVars } from "./shared.mjs";
dotenv.config();
......
......@@ -2,10 +2,10 @@
import {
ContextChatEngine,
LLM,
PineconeVectorStore,
VectorStoreIndex,
serviceContextFromDefaults,
} from "llamaindex";
} from "@llamaindex/edge";
import { PineconeVectorStore } from "@llamaindex/edge/storage/vectorStore/PineconeVectorStore";
import { CHUNK_OVERLAP, CHUNK_SIZE, checkRequiredEnvVars } from "./shared.mjs";
async function getDataSource(llm: LLM) {
......
......@@ -12,7 +12,7 @@
"cors": "^2.8.5",
"dotenv": "^16.3.1",
"express": "^4.18.2",
"llamaindex": "latest"
"@llamaindex/edge": "^0.2.1"
},
"devDependencies": {
"@types/cors": "^2.8.17",
......
import { ChatMessage, MessageContent, OpenAI } from "@llamaindex/edge";
import { Request, Response } from "express";
import { ChatMessage, MessageContent, OpenAI } from "llamaindex";
import { createChatEngine } from "./engine";
const convertMessageContent = (
......
import { LLM, SimpleChatEngine } from "llamaindex";
import { LLM, SimpleChatEngine } from "@llamaindex/edge";
export async function createChatEngine(llm: LLM) {
return new SimpleChatEngine({
......
import { ChatMessage, MessageContent, OpenAI } from "@llamaindex/edge";
import { streamToResponse } from "ai";
import { Request, Response } from "express";
import { ChatMessage, MessageContent, OpenAI } from "llamaindex";
import { createChatEngine } from "./engine";
import { LlamaIndexStream } from "./llamaindex-stream";
......
import { LLM, SimpleChatEngine } from "llamaindex";
import { LLM, SimpleChatEngine } from "@llamaindex/edge";
export async function createChatEngine(llm: LLM) {
return new SimpleChatEngine({
......
import { Response } from "@llamaindex/edge";
import {
JSONValue,
createCallbacksTransformer,
......@@ -6,7 +7,6 @@ import {
trimStartOfStreamHelper,
type AIStreamCallbacksAndOptions,
} from "ai";
import { Response } from "llamaindex";
type ParserOptions = {
image_url?: string;
......
import { LLM, SimpleChatEngine } from "llamaindex";
import { LLM, SimpleChatEngine } from "@llamaindex/edge";
export async function createChatEngine(llm: LLM) {
return new SimpleChatEngine({
......
import { Response } from "@llamaindex/edge";
import {
JSONValue,
createCallbacksTransformer,
......@@ -6,7 +7,6 @@ import {
trimStartOfStreamHelper,
type AIStreamCallbacksAndOptions,
} from "ai";
import { Response } from "llamaindex";
type ParserOptions = {
image_url?: string;
......
import { initObservability } from "@/app/observability";
import { ChatMessage, MessageContent, OpenAI } from "@llamaindex/edge";
import { StreamingTextResponse } from "ai";
import { ChatMessage, MessageContent, OpenAI } from "llamaindex";
import { NextRequest, NextResponse } from "next/server";
import { createChatEngine } from "./engine";
import { LlamaIndexStream } from "./llamaindex-stream";
initObservability();
export const runtime = "nodejs";
export const runtime = "edge";
export const dynamic = "force-dynamic";
const convertMessageContent = (
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Please register or to comment