diff --git a/README.md b/README.md
index 0553b35866c80d77f9376912e4ae93b5a1107738..163cc2cdb7a176d8bca552d818d5ef7383f43cfe 100644
--- a/README.md
+++ b/README.md
@@ -76,7 +76,7 @@ main();
 node --import tsx ./main.ts
 ```
 
-### Next.js
+### React Server Component (Next.js, Waku, Redwood.JS...)
 
 First, you will need to add a llamaindex plugin to your Next.js project.
 
@@ -154,40 +154,6 @@ export async function chatWithAgent(
 }
 ```
 
-### Cloudflare Workers
-
-```ts
-// src/index.ts
-export default {
-  async fetch(
-    request: Request,
-    env: Env,
-    ctx: ExecutionContext,
-  ): Promise<Response> {
-    const { setEnvs } = await import("@llamaindex/env");
-    // set environment variables so that the OpenAIAgent can use them
-    setEnvs(env);
-    const { OpenAIAgent } = await import("llamaindex");
-    const agent = new OpenAIAgent({
-      tools: [],
-    });
-    const responseStream = await agent.chat({
-      stream: true,
-      message: "Hello? What is the weather today?",
-    });
-    const textEncoder = new TextEncoder();
-    const response = responseStream.pipeThrough(
-      new TransformStream({
-        transform: (chunk, controller) => {
-          controller.enqueue(textEncoder.encode(chunk.response.delta));
-        },
-      }),
-    );
-    return new Response(response);
-  },
-};
-```
-
 ## Playground
 
 Check out our NextJS playground at https://llama-playground.vercel.app/. The source is available at https://github.com/run-llama/ts-playground