Skip to content
Snippets Groups Projects
Unverified Commit c3747d09 authored by Alex Yang's avatar Alex Yang Committed by GitHub
Browse files

feat: add nextjs plugin for `llamaindex` (#824)

parent 24a39aef
No related branches found
No related tags found
No related merge requests found
Showing
with 122 additions and 347 deletions
---
"llamaindex": patch
"@llamaindex/core-e2e": patch
"@llamaindex/next-agent-test": patch
"@llamaindex/nextjs-edge-runtime-test": patch
---
fix: import `@xenova/transformers`
For now, if you use llamaindex in next.js, you need to add a plugin from `llamaindex/next` to ensure some module resolutions are correct.
......@@ -78,6 +78,17 @@ node --import tsx ./main.ts
### Next.js
First, you will need to add a llamaindex plugin to your Next.js project.
```js
// next.config.js
const withLlamaIndex = require("llamaindex/next");
module.exports = withLlamaIndex({
// your next.js config
});
```
You can combine `ai` with `llamaindex` in Next.js with RSC (React Server Components).
```tsx
......
/** @type {import('next').NextConfig} */
const nextConfig = {};
export default nextConfig;
import withLlamaIndex from "llamaindex/next";
export default withLlamaIndex(nextConfig);
/** @type {import('next').NextConfig} */
const nextConfig = {};
export default nextConfig;
import withLlamaIndex from "llamaindex/next";
export default withLlamaIndex(nextConfig);
.main {
display: flex;
flex-direction: column;
justify-content: space-between;
align-items: center;
padding: 6rem;
min-height: 100vh;
}
.description {
display: inherit;
justify-content: inherit;
align-items: inherit;
font-size: 0.85rem;
max-width: var(--max-width);
width: 100%;
z-index: 2;
font-family: var(--font-mono);
}
.description a {
display: flex;
justify-content: center;
align-items: center;
gap: 0.5rem;
}
.description p {
position: relative;
margin: 0;
padding: 1rem;
background-color: rgba(var(--callout-rgb), 0.5);
border: 1px solid rgba(var(--callout-border-rgb), 0.3);
border-radius: var(--border-radius);
}
.code {
font-weight: 700;
font-family: var(--font-mono);
}
.grid {
display: grid;
grid-template-columns: repeat(4, minmax(25%, auto));
max-width: 100%;
width: var(--max-width);
}
.card {
padding: 1rem 1.2rem;
border-radius: var(--border-radius);
background: rgba(var(--card-rgb), 0);
border: 1px solid rgba(var(--card-border-rgb), 0);
transition:
background 200ms,
border 200ms;
}
.card span {
display: inline-block;
transition: transform 200ms;
}
.card h2 {
font-weight: 600;
margin-bottom: 0.7rem;
}
.card p {
margin: 0;
opacity: 0.6;
font-size: 0.9rem;
line-height: 1.5;
max-width: 30ch;
text-wrap: balance;
}
.center {
display: flex;
justify-content: center;
align-items: center;
position: relative;
padding: 4rem 0;
}
.center::before {
background: var(--secondary-glow);
border-radius: 50%;
width: 480px;
height: 360px;
margin-left: -400px;
}
.center::after {
background: var(--primary-glow);
width: 240px;
height: 180px;
z-index: -1;
}
.center::before,
.center::after {
content: "";
left: 50%;
position: absolute;
filter: blur(45px);
transform: translateZ(0);
}
.logo {
position: relative;
}
/* Enable hover only on non-touch devices */
@media (hover: hover) and (pointer: fine) {
.card:hover {
background: rgba(var(--card-rgb), 0.1);
border: 1px solid rgba(var(--card-border-rgb), 0.15);
}
.card:hover span {
transform: translateX(4px);
}
}
@media (prefers-reduced-motion) {
.card:hover span {
transform: none;
}
}
/* Mobile */
@media (max-width: 700px) {
.content {
padding: 4rem;
}
.grid {
grid-template-columns: 1fr;
margin-bottom: 120px;
max-width: 320px;
text-align: center;
}
.card {
padding: 1rem 2.5rem;
}
.card h2 {
margin-bottom: 0.5rem;
}
.center {
padding: 8rem 0 6rem;
}
.center::before {
transform: none;
height: 300px;
}
.description {
font-size: 0.8rem;
}
.description a {
padding: 1rem;
}
.description p,
.description div {
display: flex;
justify-content: center;
position: fixed;
width: 100%;
}
.description p {
align-items: center;
inset: 0 0 auto;
padding: 2rem 1rem 1.4rem;
border-radius: 0;
border: none;
border-bottom: 1px solid rgba(var(--callout-border-rgb), 0.25);
background: linear-gradient(
to bottom,
rgba(var(--background-start-rgb), 1),
rgba(var(--callout-rgb), 0.5)
);
background-clip: padding-box;
backdrop-filter: blur(24px);
}
.description div {
align-items: flex-end;
pointer-events: none;
inset: auto 0 0;
padding: 2rem;
height: 200px;
background: linear-gradient(
to bottom,
transparent 0%,
rgb(var(--background-end-rgb)) 40%
);
z-index: 1;
}
}
/* Tablet and Smaller Desktop */
@media (min-width: 701px) and (max-width: 1120px) {
.grid {
grid-template-columns: repeat(2, 50%);
}
}
@media (prefers-color-scheme: dark) {
.vercelLogo {
filter: invert(1);
}
.logo {
filter: invert(1) drop-shadow(0 0 0.3rem #ffffff70);
}
}
@keyframes rotate {
from {
transform: rotate(360deg);
}
to {
transform: rotate(0deg);
}
}
import Image from "next/image";
import "../utils/llm";
import styles from "./page.module.css";
import { tokenizerResultPromise } from "@/utils/llm";
import { use } from "react";
export const runtime = "edge";
export default function Home() {
const result = use(tokenizerResultPromise);
return (
<main className={styles.main}>
<div className={styles.description}>
<p>
Get started by editing&nbsp;
<code className={styles.code}>src/app/page.tsx</code>
</p>
<main>
<div>
<h1>Next.js Edge Runtime</h1>
<div>
<a
href="https://vercel.com?utm_source=create-next-app&utm_medium=appdir-template&utm_campaign=create-next-app"
target="_blank"
rel="noopener noreferrer"
>
By{" "}
<Image
src="/vercel.svg"
alt="Vercel Logo"
className={styles.vercelLogo}
width={100}
height={24}
priority
/>
</a>
{result.map((value, index) => (
<span key={index}>{value}</span>
))}
</div>
</div>
<div className={styles.center}>
<Image
className={styles.logo}
src="/next.svg"
alt="Next.js Logo"
width={180}
height={37}
priority
/>
</div>
<div className={styles.grid}>
<a
href="https://nextjs.org/docs?utm_source=create-next-app&utm_medium=appdir-template&utm_campaign=create-next-app"
className={styles.card}
target="_blank"
rel="noopener noreferrer"
>
<h2>
Docs <span>-&gt;</span>
</h2>
<p>Find in-depth information about Next.js features and API.</p>
</a>
<a
href="https://nextjs.org/learn?utm_source=create-next-app&utm_medium=appdir-template&utm_campaign=create-next-app"
className={styles.card}
target="_blank"
rel="noopener noreferrer"
>
<h2>
Learn <span>-&gt;</span>
</h2>
<p>Learn about Next.js in an interactive course with&nbsp;quizzes!</p>
</a>
<a
href="https://vercel.com/templates?framework=next.js&utm_source=create-next-app&utm_medium=appdir-template&utm_campaign=create-next-app"
className={styles.card}
target="_blank"
rel="noopener noreferrer"
>
<h2>
Templates <span>-&gt;</span>
</h2>
<p>Explore starter templates for Next.js.</p>
</a>
<a
href="https://vercel.com/new?utm_source=create-next-app&utm_medium=appdir-template&utm_campaign=create-next-app"
className={styles.card}
target="_blank"
rel="noopener noreferrer"
>
<h2>
Deploy <span>-&gt;</span>
</h2>
<p>
Instantly deploy your Next.js site to a shareable URL with Vercel.
</p>
</a>
</div>
</main>
);
}
"use server";
// test runtime
import "llamaindex";
import { ClipEmbedding } from "llamaindex/embeddings/ClipEmbedding";
import "llamaindex/readers/SimpleDirectoryReader";
// @ts-expect-error
if (typeof EdgeRuntime !== "string") {
throw new Error("Expected run in EdgeRuntime");
}
export const tokenizerResultPromise = new Promise<number[]>(
(resolve, reject) => {
const embedding = new ClipEmbedding();
//#region make sure @xenova/transformers is working in edge runtime
embedding
.getTokenizer()
.then((tokenizer) => {
resolve(tokenizer.encode("hello world"));
})
.catch(reject);
//#endregion
},
);
{
"compilerOptions": {
"target": "ESNext",
"lib": ["dom", "dom.iterable", "esnext"],
"outDir": "./dist",
"allowJs": true,
......
import _ from "lodash";
import type { ImageType } from "../Node.js";
import { lazyLoadTransformers } from "../internal/deps/transformers.js";
import { MultiModalEmbedding } from "./MultiModalEmbedding.js";
// only import type, to avoid bundling error
import type {
CLIPTextModelWithProjection,
CLIPVisionModelWithProjection,
PreTrainedTokenizer,
Processor,
} from "@xenova/transformers";
async function readImage(input: ImageType) {
const { RawImage } = await import(
/* webpackIgnore: true */
"@xenova/transformers"
);
const { RawImage } = await lazyLoadTransformers();
if (input instanceof Blob) {
return await RawImage.fromBlob(input);
} else if (_.isString(input) || input instanceof URL) {
......@@ -25,39 +30,30 @@ export class ClipEmbedding extends MultiModalEmbedding {
modelType: ClipEmbeddingModelType =
ClipEmbeddingModelType.XENOVA_CLIP_VIT_BASE_PATCH16;
private tokenizer: any;
private processor: any;
private visionModel: any;
private textModel: any;
private tokenizer: PreTrainedTokenizer | null = null;
private processor: Processor | null = null;
private visionModel: CLIPVisionModelWithProjection | null = null;
private textModel: CLIPTextModelWithProjection | null = null;
async getTokenizer() {
const { AutoTokenizer } = await lazyLoadTransformers();
if (!this.tokenizer) {
const { AutoTokenizer } = await import(
/* webpackIgnore: true */
"@xenova/transformers"
);
this.tokenizer = await AutoTokenizer.from_pretrained(this.modelType);
}
return this.tokenizer;
}
async getProcessor() {
const { AutoProcessor } = await lazyLoadTransformers();
if (!this.processor) {
const { AutoProcessor } = await import(
/* webpackIgnore: true */
"@xenova/transformers"
);
this.processor = await AutoProcessor.from_pretrained(this.modelType);
}
return this.processor;
}
async getVisionModel() {
const { CLIPVisionModelWithProjection } = await lazyLoadTransformers();
if (!this.visionModel) {
const { CLIPVisionModelWithProjection } = await import(
/* webpackIgnore: true */
"@xenova/transformers"
);
this.visionModel = await CLIPVisionModelWithProjection.from_pretrained(
this.modelType,
);
......@@ -67,11 +63,8 @@ export class ClipEmbedding extends MultiModalEmbedding {
}
async getTextModel() {
const { CLIPTextModelWithProjection } = await lazyLoadTransformers();
if (!this.textModel) {
const { CLIPTextModelWithProjection } = await import(
/* webpackIgnore: true */
"@xenova/transformers"
);
this.textModel = await CLIPTextModelWithProjection.from_pretrained(
this.modelType,
);
......
import { lazyLoadTransformers } from "../internal/deps/transformers.js";
import { BaseEmbedding } from "./types.js";
export enum HuggingFaceEmbeddingModelType {
......@@ -31,7 +32,7 @@ export class HuggingFaceEmbedding extends BaseEmbedding {
async getExtractor() {
if (!this.extractor) {
const { pipeline } = await import("@xenova/transformers");
const { pipeline } = await lazyLoadTransformers();
this.extractor = await pipeline("feature-extraction", this.modelType, {
quantized: this.quantized,
});
......
let transformer: typeof import("@xenova/transformers") | null = null;
export async function lazyLoadTransformers() {
if (!transformer) {
transformer = await import("@xenova/transformers");
}
// @ts-expect-error
if (typeof EdgeRuntime === "string") {
// there is no local file system in the edge runtime
transformer.env.allowLocalModels = false;
}
// fixme: handle cloudflare workers case here?
return transformer;
}
/**
* This is a Next.js configuration file that is used to customize the build process.
*
* @example
* ```js
* // next.config.js
* const withLlamaIndex = require("llamaindex/next")
*
* module.exports = withLlamaIndex({
* // Your Next.js configuration
* })
* ```
*
* This is only for Next.js projects, do not export this function on top-level.
*
* @module
*/
export default function withLlamaIndex(config: any) {
const userWebpack = config.webpack;
//#region hack for `@xenova/transformers`
// Ignore node-specific modules when bundling for the browser
// See https://webpack.js.org/configuration/resolve/#resolvealias
config.webpack = function (webpackConfig: any) {
if (userWebpack) {
webpackConfig = userWebpack(webpackConfig);
}
webpackConfig.resolve.alias = {
...webpackConfig.resolve.alias,
sharp$: false,
"onnxruntime-node$": false,
};
return webpackConfig;
};
//#endregion
return config;
}
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment