diff --git a/apps/simple/package.json b/apps/simple/package.json new file mode 100644 index 0000000000000000000000000000000000000000..6d41fe69cb0fb608517d4b833360f1f87c0e31b3 --- /dev/null +++ b/apps/simple/package.json @@ -0,0 +1,3 @@ +{ + "name": "simple" +} diff --git a/packages/core/QueryEngine.ts b/packages/core/QueryEngine.ts index 9a8cbdbc0ecaf56d915cea3ecfc98a6637a9cfcb..fb3acbf3bb65390e5911e2426fd056802ff01f26 100644 --- a/packages/core/QueryEngine.ts +++ b/packages/core/QueryEngine.ts @@ -1,13 +1,7 @@ import { Response } from "./Response"; export class BaseQueryEngine { - query(q: string): Response { + async aquery(q: string): Promise<Response> { return new Response(); } - - aquery(q: string): Promise<Response> { - return new Promise<Response>((resolve, reject) => { - resolve(new Response()); - }); - } } diff --git a/packages/core/Reader.ts b/packages/core/Reader.ts index e69de29bb2d1d6434b8b29ae775ad8c2e48c5391..90a25a44eb362a3be7dcf2d5465156af5979f0e4 100644 --- a/packages/core/Reader.ts +++ b/packages/core/Reader.ts @@ -0,0 +1,17 @@ +import { Document } from "./Document"; + +export interface BaseReader { + loadData(...args: any[]): Promise<Document>; +} + +export class SimpleDirectoryReader implements BaseReader { + async loadData(options) { + return new Document("1", ""); + } +} + +export class PDFReader implements BaseReader { + async loadData(options) { + return new Document("1", ""); + } +} diff --git a/packages/core/ServiceContext.ts b/packages/core/ServiceContext.ts index ab5d9a76052c52f031d99e795c9f5481c09f8651..10fcb25b958bc80d5f6d87318656f7f0572a2034 100644 --- a/packages/core/ServiceContext.ts +++ b/packages/core/ServiceContext.ts @@ -1,6 +1,6 @@ interface ServiceContext { llmPredictor?: any; - promptHelper: any; + // promptHelper: any; embedModel: any; nodeParser: any; // llamaLogger: any; diff --git a/packages/core/openai.ts b/packages/core/openai.ts index 794267fc4f607f7df7878ce99285c9d57a469389..d22971cf6124f0852aae4018a300225628ae24ac 100644 --- a/packages/core/openai.ts +++ b/packages/core/openai.ts @@ -33,9 +33,14 @@ interface FunctionMessage { export type Message = ChatMessage | FunctionMessage; +interface Function { + name: string; +} + export const getChatCompletions = async ( messages: Message[], - model = "gpt-3.5-turbo" + model = "gpt-3.5-turbo", + functions: Function[] | null = null ) => { return await fetch(OPENAI_CHAT_COMPLETIONS_URL, { method: "POST", diff --git a/packages/core/package.json b/packages/core/package.json new file mode 100644 index 0000000000000000000000000000000000000000..bf9c32aca6f497311ae3988d33fd8aac53cdb763 --- /dev/null +++ b/packages/core/package.json @@ -0,0 +1,3 @@ +{ + "name": "core" +}