diff --git a/README.md b/README.md index 1b35822b78ce9808ab38fc3ee615ba9d831301af..0e6bd55d2950459412bf01103efe48dff1cc2dda 100644 --- a/README.md +++ b/README.md @@ -1,7 +1,11 @@ # LlamaIndex.TS +LlamaIndex is a data framework for your LLM application. + Use your own data with large language models (LLMs, OpenAI ChatGPT and others) in Typescript and Javascript. +Documentation: https://ts.llamaindex.ai/ + ## What is LlamaIndex.TS? LlamaIndex.TS aims to be a lightweight, easy to use set of libraries to help you integrate large language models into your applications with your own data. diff --git a/apps/docs/docs/introduction.md b/apps/docs/docs/introduction.md index 74d1db5b4bfc7fdf383ef9940c0186a95db3f378..9911345a1a31db357f539579f8235375cbed1de5 100644 --- a/apps/docs/docs/introduction.md +++ b/apps/docs/docs/introduction.md @@ -37,15 +37,14 @@ For more complex applications, our lower-level APIs allow advanced users to cust Our documentation includes [Installation Instructions](./installation.md) and a [Starter Tutorial](./starter.md) to build your first application. -Once you're up and running, [High-Level Concepts](./concepts.md) has an overview of LlamaIndex's modular architecture. For more hands-on practical examples, look through our [End-to-End Tutorials](LINK TO EXAMPLES FOLDER). +Once you're up and running, [High-Level Concepts](./concepts.md) has an overview of LlamaIndex's modular architecture. For more hands-on practical examples, look through our [End-to-End Tutorials](./end_to_end.md). ## ðŸ—ºï¸ Ecosystem To download or contribute, find LlamaIndex on: -- Github: https://github.com/jerryjliu/llama_index -- LlamaIndex (NPM): LINK TO NPM PACKAGE -- LlamaIndex (Python): https://pypi.org/project/llama-index/. +- Github: https://github.com/run-llama/LlamaIndexTS +- NPM: https://www.npmjs.com/package/llamaindex ## Community diff --git a/apps/docs/docs/modules/index.md b/apps/docs/docs/modules/index.md index 9293c7e880d86c4d377b041724982c68472eaf12..a5f91feb510b4577570773da1291cb8f13026a64 100644 --- a/apps/docs/docs/modules/index.md +++ b/apps/docs/docs/modules/index.md @@ -12,7 +12,7 @@ LlamaIndex.TS offers several core modules, seperated into high-level modules for - [**Indexes**](./high_level/data_index.md): indexes store the Nodes and the embeddings of those nodes. --[**QueryEngine**](./high_level/query_engine.md): Query engines are what generate the query you put in and give you back the result. Query engines generally combine a pre-built prompt with selected nodes from your Index to give the LLM the context it needs to answer your query. +- [**QueryEngine**](./high_level/query_engine.md): Query engines are what generate the query you put in and give you back the result. Query engines generally combine a pre-built prompt with selected nodes from your Index to give the LLM the context it needs to answer your query. - [**ChatEngine**](./high_level/chat_engine.md): A ChatEngine helps you build a chatbot that will interact with your Indexes. diff --git a/apps/docs/docs/starter.md b/apps/docs/docs/starter.md index 18099487e88a89f76485e4a37743cc76c4af999c..8c36ef537eadd98d2af3ace85150981845e72ed8 100644 --- a/apps/docs/docs/starter.md +++ b/apps/docs/docs/starter.md @@ -11,7 +11,7 @@ In a new folder: ```bash npm2yarn npm install typescript npm install @types/node -npx tsc –-init # if needed +npx tsc --init # if needed ``` Create the file `example.ts`. This code will load some example data, create a document, index it (which creates embeddings using OpenAI), and then creates query engine to answer questions about the data. diff --git a/apps/docs/src/pages/index.tsx b/apps/docs/src/pages/index.tsx index 6613236da5b1f02810cab35006cd86f007cf7c0f..42e43fa4e8ba3b800a947df3a1f81e0dab310ea5 100644 --- a/apps/docs/src/pages/index.tsx +++ b/apps/docs/src/pages/index.tsx @@ -24,8 +24,8 @@ export default function Home(): JSX.Element { const { siteConfig } = useDocusaurusContext(); return ( <Layout - title={`Hello from ${siteConfig.title}`} - description="Description will go into a meta tag in <head />" + title={`${siteConfig.title}`} + description="LlamaIndex is a data framework for your LLM application. Use your own data with large language models (LLMs, OpenAI ChatGPT and others) in Typescript and Javascript." > <HomepageHeader /> <main> diff --git a/apps/simple/README.md b/apps/simple/README.md index 61658cbfeb159d90073be258aea114b612475542..b7f57b3dc67949ad8e3b9e89e6cba423fe97a471 100644 --- a/apps/simple/README.md +++ b/apps/simple/README.md @@ -1,5 +1,9 @@ # Simple Examples -Due to packaging, you will need to run `pnpm --filter llamaindex build` before running these examples. +Due to packaging, you will need to run these commands to get started. +```bash +pnpm --filter llamaindex build +pnpm install +``` -Run them with ts-node, for example `npx ts-node vectorIndex.ts` +Then run the examples with `ts-node`, for example `npx ts-node vectorIndex.ts` diff --git a/apps/simple/llamadeuce.ts b/apps/simple/llamadeuce.ts index 9edae4437449c727343bda314e6a09728e619bd2..d7b309481369318566507a0adf61ae90e2b5de46 100644 --- a/apps/simple/llamadeuce.ts +++ b/apps/simple/llamadeuce.ts @@ -1,4 +1,4 @@ -import { LlamaDeuce } from "llamaindex/src/llm/LLM"; +import { LlamaDeuce } from "llamaindex"; (async () => { const deuce = new LlamaDeuce();