diff --git a/README.md b/README.md
index 11b97a378370fe6452c24be8eff31bda55f02c11..9e6d712e712401354c747f1a667c09394b72b228 100644
--- a/README.md
+++ b/README.md
@@ -9,3 +9,56 @@ Right now there are two packages of importance:
 packages/core which is the main NPM library @llamaindex/core
 
 apps/simple is where the demo code lives
+
+### Turborepo docs
+
+You can checkout how Turborepo works using the built in [README-turborepo.md](README-turborepo.md)
+
+## Getting Started
+
+Install NodeJS. Preferably v18 using nvm or n.
+
+Inside the llamascript directory:
+
+```
+npm i -g pnpm ts-node
+pnpm install
+```
+
+Note: we use pnpm in this repo, which has a lot of the same functionality and CLI options as npm but it does do some things better in a monorepo, like centralizing dependencies and caching.
+
+PNPM's has documentation on its [workspace feature](https://pnpm.io/workspaces) and Turborepo had some [useful documentation also](https://turbo.build/repo/docs/core-concepts/monorepos/running-tasks).
+
+### Running Typescript
+
+When we publish to NPM we will have a tsc compiled version of the library in JS. For now, the easiest thing to do is use ts-node.
+
+### Test cases
+
+To run them, run
+
+```
+pnpm run test
+```
+
+To write new test cases write them in packages/core/src/tests
+
+We use Jest https://jestjs.io/ to write our test cases. Jest comes with a bunch of built in assertions using the expect function: https://jestjs.io/docs/expect
+
+### Demo applications
+
+You can create new demo applications in the apps folder. Just run pnpm init in the folder after you create it to create its own package.json
+
+### Installing packages
+
+To install packages for a specific package or demo application, run
+
+```
+pnpm add [NPM Package] --filter [package or application i.e. core or simple]
+```
+
+To install packages for every package or application run
+
+```
+pnpm add -w [NPM Package]
+```
diff --git a/packages/core/src/LanguageModel.ts b/packages/core/src/LanguageModel.ts
new file mode 100644
index 0000000000000000000000000000000000000000..bd56cbd4e163e29ba85732ab623c6d00c4d973cf
--- /dev/null
+++ b/packages/core/src/LanguageModel.ts
@@ -0,0 +1,35 @@
+interface LLMResult {}
+
+interface BaseLanguageModel {}
+
+type MessageType = "human" | "ai" | "system" | "chat";
+
+interface BaseMessage {
+  content: string;
+  type: MessageType;
+}
+
+interface Generation {
+  text: string;
+  generationInfo?: { [key: string]: any };
+}
+
+interface LLMResult {
+  generations: Generation[][]; // Each input can have more than one generations
+}
+
+class BaseChatModel implements BaseLanguageModel {}
+
+class ChatOpenAI extends BaseChatModel {
+  model: string = "gpt-3.5-turbo";
+  temperature: number = 0.7;
+  openAIKey: string | null = null;
+  requestTimeout: number | null = null;
+  maxRetries: number = 6;
+  n: number = 1;
+  maxTokens?: number;
+
+  async agenerate(messages: BaseMessage[], stop: string[] | null = null) {
+    return;
+  }
+}
diff --git a/packages/core/src/Prompt.ts b/packages/core/src/Prompt.ts
index 94d329d34b0efa2817d76f6035edecbf5ea3448b..7fed433c586b8708587ddfab745b069745b0f2cf 100644
--- a/packages/core/src/Prompt.ts
+++ b/packages/core/src/Prompt.ts
@@ -1,8 +1,8 @@
 /**
- * A prompt is a function that takes a dictionary of inputs and returns a string.
+ * A SimplePrompt is a function that takes a dictionary of inputs and returns a string.
  * NOTE this is a different interface compared to LlamaIndex Python
  */
-export type Prompt = (input: { [key: string]: string }) => string;
+export type SimplePrompt = (input: { [key: string]: string }) => string;
 
 /*
 DEFAULT_TEXT_QA_PROMPT_TMPL = (
@@ -15,9 +15,7 @@ DEFAULT_TEXT_QA_PROMPT_TMPL = (
 )
 */
 
-export const defaultTextQaPrompt: Prompt = (input: {
-  [key: string]: string;
-}) => {
+export const defaultTextQaPrompt: SimplePrompt = (input) => {
   const { context, query } = input;
 
   return `Context information is below.
@@ -42,9 +40,7 @@ DEFAULT_SUMMARY_PROMPT_TMPL = (
 )
 */
 
-export const defaultSummaryPrompt: Prompt = (input: {
-  [key: string]: string;
-}) => {
+export const defaultSummaryPrompt: SimplePrompt = (input) => {
   const { context } = input;
 
   return `Write a summary of the following. Try to use only the information provided. Try to include as many key details as possible.
diff --git a/packages/core/src/openai.ts b/packages/core/src/openai.ts
index b300580cb16bc72251bc935931020dd14f7d1879..5527bd1ed3e14ee503effa177299be1523a851ea 100644
--- a/packages/core/src/openai.ts
+++ b/packages/core/src/openai.ts
@@ -4,6 +4,12 @@ import {
   CreateCompletionResponse,
   CreateChatCompletionRequest,
   CreateChatCompletionResponse,
+  CreateEmbeddingRequest,
+  CreateEmbeddingResponse,
+  CreateModerationRequest,
+  CreateModerationResponse,
+  CreateEditRequest,
+  CreateEditResponse,
 } from "openai";
 import { AxiosRequestConfig, AxiosResponse } from "axios";
 import fetchAdapter from "./fetchAdapter";
@@ -28,6 +34,36 @@ export class OpenAIWrapper extends OpenAIApi {
       ...options,
     });
   }
+
+  createEmbedding(
+    createEmbeddingRequest: CreateEmbeddingRequest,
+    options?: AxiosRequestConfig<any> | undefined
+  ): Promise<AxiosResponse<CreateEmbeddingResponse, any>> {
+    return super.createEmbedding(createEmbeddingRequest, {
+      adapter: fetchAdapter,
+      ...options,
+    });
+  }
+
+  createModeration(
+    createModerationRequest: CreateModerationRequest,
+    options?: AxiosRequestConfig<any> | undefined
+  ): Promise<AxiosResponse<CreateModerationResponse, any>> {
+    return super.createModeration(createModerationRequest, {
+      adapter: fetchAdapter,
+      ...options,
+    });
+  }
+
+  createEdit(
+    createEditRequest: CreateEditRequest,
+    options?: AxiosRequestConfig<any> | undefined
+  ): Promise<AxiosResponse<CreateEditResponse, any>> {
+    return super.createEdit(createEditRequest, {
+      adapter: fetchAdapter,
+      ...options,
+    });
+  }
 }
 
 export * from "openai";
diff --git a/packages/core/tsconfig.json b/packages/core/tsconfig.json
index 67f916782db8c4561e9a48a07f8491e97138a2a9..7ca71178d53835722a040fe55cb186962981aec6 100644
--- a/packages/core/tsconfig.json
+++ b/packages/core/tsconfig.json
@@ -7,7 +7,8 @@
     "preserveWatchOutput": true,
     "skipLibCheck": true,
     "noEmit": true,
-    "strict": true
+    "strict": true,
+    "lib": ["es2015", "dom"]
   },
   "exclude": ["node_modules"]
 }