diff --git a/README.md b/README.md index 5a8f83adb2c5bfbca3c24ad0b1cc72f1c4adbc42..b83d08daa59e51bb50c166915acb02fcd986f471 100644 --- a/README.md +++ b/README.md @@ -25,7 +25,7 @@ LlamaIndex.TS aims to be a lightweight, easy to use set of libraries to help you LlamaIndex.TS supports multiple JS environments, including: -- Node.js (18, 20, 22) ✅ +- Node.js >= 20 ✅ - Deno ✅ - Bun ✅ - Nitro ✅ @@ -57,213 +57,9 @@ pnpm install llamaindex yarn add llamaindex ``` -### Setup TypeScript +### Setup in Node.js, Deno, Bun, TypeScript...? -```json5 -{ - compilerOptions: { - // â¬‡ï¸ add this line to your tsconfig.json - moduleResolution: "bundler", // or "node16" - }, -} -``` - -<details> - <summary>Why?</summary> - We are shipping both ESM and CJS module, and compatible with Vercel Edge, Cloudflare Workers, and other serverless platforms. - -So we are using [conditional exports](https://nodejs.org/api/packages.html#conditional-exports) to support all environments. - -This is a kind of modern way of shipping packages, but might cause TypeScript type check to fail because of legacy module resolution. - -Imaging you put output file into `/dist/openai.js` but you are importing `llamaindex/openai` in your code, and set `package.json` like this: - -```json -{ - "exports": { - "./openai": "./dist/openai.js" - } -} -``` - -In old module resolution, TypeScript will not be able to find the module because it is not follow the file structure, even you run `node index.js` successfully. (on Node.js >=16) - -See more about [moduleResolution](https://www.typescriptlang.org/docs/handbook/modules/theory.html#module-resolution) or -[TypeScript 5.0 blog](https://devblogs.microsoft.com/typescript/announcing-typescript-5-0/#--moduleresolution-bundler7). - -</details> - -### Node.js - -```ts -import fs from "node:fs/promises"; -import { Document, VectorStoreIndex } from "llamaindex"; - -async function main() { - // Load essay from abramov.txt in Node - const essay = await fs.readFile( - "node_modules/llamaindex/examples/abramov.txt", - "utf-8", - ); - - // Create Document object with essay - const document = new Document({ text: essay }); - - // Split text and create embeddings. Store them in a VectorStoreIndex - const index = await VectorStoreIndex.fromDocuments([document]); - - // Query the index - const queryEngine = index.asQueryEngine(); - const response = await queryEngine.query({ - query: "What did the author do in college?", - }); - - // Output response - console.log(response.toString()); -} - -main(); -``` - -```bash -# `pnpm install tsx` before running the script -node --import tsx ./main.ts -``` - -### Next.js - -You will need to add a llamaindex plugin to your Next.js project. - -```js -// next.config.js -const withLlamaIndex = require("llamaindex/next"); - -module.exports = withLlamaIndex({ - // your next.js config -}); -``` - -### React Server Actions - -You can combine `ai` with `llamaindex` in Next.js, Waku or Redwood.js with RSC (React Server Components). - -```tsx -"use client"; -import { chatWithAgent } from "@/actions"; -import type { JSX } from "react"; -import { useActionState } from "react"; - -export default function Home() { - const [ui, action] = useActionState<JSX.Element | null>(async () => { - return chatWithAgent("hello!", []); - }, null); - return ( - <main> - {ui} - <form action={action}> - <button>Chat</button> - </form> - </main> - ); -} -``` - -```tsx -// src/actions/index.ts -"use server"; -import { createStreamableUI } from "ai/rsc"; -import { OpenAIAgent } from "llamaindex"; -import type { ChatMessage } from "llamaindex/llm/types"; - -export async function chatWithAgent( - question: string, - prevMessages: ChatMessage[] = [], -) { - const agent = new OpenAIAgent({ - tools: [ - // ... adding your tools here - ], - }); - const responseStream = await agent.chat( - { - message: question, - chatHistory: prevMessages, - }, - true, - ); - const uiStream = createStreamableUI(<div>loading...</div>); - responseStream - .pipeTo( - new WritableStream({ - start: () => { - uiStream.update("response:"); - }, - write: async (message) => { - uiStream.append(message.response.delta); - }, - }), - ) - .catch(console.error); - return uiStream.value; -} -``` - -### Cloudflare Workers - -> [!TIP] -> Some modules are not supported in Cloudflare Workers which require Node.js APIs. - -```ts -// add `OPENAI_API_KEY` to the `.dev.vars` file -interface Env { - OPENAI_API_KEY: string; -} - -export default { - async fetch( - request: Request, - env: Env, - ctx: ExecutionContext, - ): Promise<Response> { - const { OpenAIAgent, OpenAI } = await import("@llamaindex/openai"); - const text = await request.text(); - const agent = new OpenAIAgent({ - llm: new OpenAI({ - apiKey: env.OPENAI_API_KEY, - }), - tools: [], - }); - const responseStream = await agent.chat({ - stream: true, - message: text, - }); - const textEncoder = new TextEncoder(); - const response = responseStream.pipeThrough<Uint8Array>( - new TransformStream({ - transform: (chunk, controller) => { - controller.enqueue(textEncoder.encode(chunk.delta)); - }, - }), - ); - return new Response(response); - }, -}; -``` - -### Vite - -We have some wasm dependencies for better performance. You can use `vite-plugin-wasm` to load them. - -```ts -import wasm from "vite-plugin-wasm"; - -export default { - plugins: [wasm()], - ssr: { - external: ["tiktoken"], - }, -}; -``` +See our official document: <https://ts.llamaindex.ai/docs/llamaindex/setup/getting-started> ### Tips when using in non-Node.js environments diff --git a/apps/next/src/app/layout.tsx b/apps/next/src/app/layout.tsx index 4a975ed23f6f7980b2c253b0ce2f35a80c7091f5..0af6d71b5f86a09067ea27f6c7235a0a31d05e79 100644 --- a/apps/next/src/app/layout.tsx +++ b/apps/next/src/app/layout.tsx @@ -1,5 +1,6 @@ import { AIProvider } from "@/actions"; import { TooltipProvider } from "@/components/ui/tooltip"; +import { Banner } from "fumadocs-ui/components/banner"; import { RootProvider } from "fumadocs-ui/provider"; import { Inter } from "next/font/google"; import type { ReactNode } from "react"; @@ -35,7 +36,14 @@ export default function Layout({ children }: { children: ReactNode }) { <body className="flex flex-col min-h-screen"> <TooltipProvider> <AIProvider> - <RootProvider>{children}</RootProvider> + <RootProvider> + <Banner variant="rainbow" id="experimental"> + Welcome to the experimental LlamaIndex.TS documentation site. + Some content are still in progress, you are welcome to help + contribute to the documentation + </Banner> + {children} + </RootProvider> </AIProvider> </TooltipProvider> </body> diff --git a/assets/tslogo.svg b/assets/tslogo.svg deleted file mode 100644 index 4635873240304c49c4263043fddbdefdfda35909..0000000000000000000000000000000000000000 --- a/assets/tslogo.svg +++ /dev/null @@ -1,7 +0,0 @@ -<svg fill="none" height="50" viewBox="0 0 512 512" width="50" xmlns="http://www.w3.org/2000/svg"> - <rect fill="#3178c6" height="512" rx="50" width="512" /> - <rect fill="#3178c6" height="512" rx="50" width="512" /> - <path clip-rule="evenodd" - d="m316.939 407.424v50.061c8.138 4.172 17.763 7.3 28.875 9.386s22.823 3.129 35.135 3.129c11.999 0 23.397-1.147 34.196-3.442 10.799-2.294 20.268-6.075 28.406-11.342 8.138-5.266 14.581-12.15 19.328-20.65s7.121-19.007 7.121-31.522c0-9.074-1.356-17.026-4.069-23.857s-6.625-12.906-11.738-18.225c-5.112-5.319-11.242-10.091-18.389-14.315s-15.207-8.213-24.18-11.967c-6.573-2.712-12.468-5.345-17.685-7.9-5.217-2.556-9.651-5.163-13.303-7.822-3.652-2.66-6.469-5.476-8.451-8.448-1.982-2.973-2.974-6.336-2.974-10.091 0-3.441.887-6.544 2.661-9.308s4.278-5.136 7.512-7.118c3.235-1.981 7.199-3.52 11.894-4.615 4.696-1.095 9.912-1.642 15.651-1.642 4.173 0 8.581.313 13.224.938 4.643.626 9.312 1.591 14.008 2.894 4.695 1.304 9.259 2.947 13.694 4.928 4.434 1.982 8.529 4.276 12.285 6.884v-46.776c-7.616-2.92-15.937-5.084-24.962-6.492s-19.381-2.112-31.066-2.112c-11.895 0-23.163 1.278-33.805 3.833s-20.006 6.544-28.093 11.967c-8.086 5.424-14.476 12.333-19.171 20.729-4.695 8.395-7.043 18.433-7.043 30.114 0 14.914 4.304 27.638 12.912 38.172 8.607 10.533 21.675 19.45 39.204 26.751 6.886 2.816 13.303 5.579 19.25 8.291s11.086 5.528 15.415 8.448c4.33 2.92 7.747 6.101 10.252 9.543 2.504 3.441 3.756 7.352 3.756 11.733 0 3.233-.783 6.231-2.348 8.995s-3.939 5.162-7.121 7.196-7.147 3.624-11.894 4.771c-4.748 1.148-10.303 1.721-16.668 1.721-10.851 0-21.597-1.903-32.24-5.71-10.642-3.806-20.502-9.516-29.579-17.13zm-84.159-123.342h64.22v-41.082h-179v41.082h63.906v182.918h50.874z" - fill="#fff" fill-rule="evenodd" /> -</svg> \ No newline at end of file