diff --git a/packages/create-llama/templates/types/simple/fastapi_auth/README-template.md b/packages/create-llama/templates/types/simple/fastapi_auth/README-template.md
deleted file mode 100644
index f0b92bdfce648a374bd2723fee4ceec69605db69..0000000000000000000000000000000000000000
--- a/packages/create-llama/templates/types/simple/fastapi_auth/README-template.md
+++ /dev/null
@@ -1,42 +0,0 @@
-This is a [LlamaIndex](https://www.llamaindex.ai/) project using [FastAPI](https://fastapi.tiangolo.com/) bootstrapped with [`create-llama`](https://github.com/run-llama/LlamaIndexTS/tree/main/packages/create-llama).
-
-## Getting Started
-
-First, setup the environment:
-
-```
-poetry install
-poetry shell
-```
-
-Second, run the development server:
-
-```
-python main.py
-```
-
-Then call the API endpoint `/api/chat` to see the result:
-
-```
-curl --location 'localhost:8000/api/chat' \
---header 'Content-Type: application/json' \
---data '{ "messages": [{ "role": "user", "content": "Hello" }] }'
-```
-
-You can start editing the API by modifying `app/api/routers/chat.py`. The endpoint auto-updates as you save the file.
-
-Open [http://localhost:8000/docs](http://localhost:8000/docs) with your browser to see the Swagger UI of the API.
-
-The API allows CORS for all origins to simplify development. You can change this behavior by setting the `ENVIRONMENT` environment variable to `prod`:
-
-```
-ENVIRONMENT=prod uvicorn main:app
-```
-
-## Learn More
-
-To learn more about LlamaIndex, take a look at the following resources:
-
-- [LlamaIndex Documentation](https://docs.llamaindex.ai) - learn about LlamaIndex.
-
-You can check out [the LlamaIndex GitHub repository](https://github.com/run-llama/llama_index) - your feedback and contributions are welcome!
diff --git a/packages/create-llama/templates/types/simple/fastapi_auth/app/__init__.py b/packages/create-llama/templates/types/simple/fastapi_auth/app/__init__.py
deleted file mode 100644
index e69de29bb2d1d6434b8b29ae775ad8c2e48c5391..0000000000000000000000000000000000000000
diff --git a/packages/create-llama/templates/types/simple/fastapi_auth/app/api/__init__.py b/packages/create-llama/templates/types/simple/fastapi_auth/app/api/__init__.py
deleted file mode 100644
index e69de29bb2d1d6434b8b29ae775ad8c2e48c5391..0000000000000000000000000000000000000000
diff --git a/packages/create-llama/templates/types/simple/fastapi_auth/app/api/routers/__init__.py b/packages/create-llama/templates/types/simple/fastapi_auth/app/api/routers/__init__.py
deleted file mode 100644
index e69de29bb2d1d6434b8b29ae775ad8c2e48c5391..0000000000000000000000000000000000000000
diff --git a/packages/create-llama/templates/types/simple/fastapi_auth/app/api/routers/chat.py b/packages/create-llama/templates/types/simple/fastapi_auth/app/api/routers/chat.py
deleted file mode 100644
index 81f602edbeae66c5850b30a6183c009ab4b1e014..0000000000000000000000000000000000000000
--- a/packages/create-llama/templates/types/simple/fastapi_auth/app/api/routers/chat.py
+++ /dev/null
@@ -1,56 +0,0 @@
-from typing import List
-
-from app.utils.index import get_index
-from fastapi import APIRouter, Depends, HTTPException, status
-from llama_index import VectorStoreIndex
-from llama_index.llms.base import MessageRole, ChatMessage
-from pydantic import BaseModel
-
-chat_router = r = APIRouter()
-
-
-class _Message(BaseModel):
-    role: MessageRole
-    content: str
-
-
-class _ChatData(BaseModel):
-    messages: List[_Message]
-
-
-class _Result(BaseModel):
-    result: _Message
-
-
-@r.post("")
-async def chat(
-    data: _ChatData,
-    index: VectorStoreIndex = Depends(get_index),
-) -> _Result:
-    # check preconditions and get last message
-    if len(data.messages) == 0:
-        raise HTTPException(
-            status_code=status.HTTP_400_BAD_REQUEST,
-            detail="No messages provided",
-        )
-    lastMessage = data.messages.pop()
-    if lastMessage.role != MessageRole.USER:
-        raise HTTPException(
-            status_code=status.HTTP_400_BAD_REQUEST,
-            detail="Last message must be from user",
-        )
-    # convert messages coming from the request to type ChatMessage
-    messages = [
-        ChatMessage(
-            role=m.role,
-            content=m.content,
-        )
-        for m in data.messages
-    ]
-
-    # query chat engine
-    chat_engine = index.as_chat_engine()
-    response = chat_engine.chat(lastMessage.content, messages)
-    return _Result(
-        result=_Message(role=MessageRole.ASSISTANT, content=response.response)
-    )
diff --git a/packages/create-llama/templates/types/simple/fastapi_auth/app/utils/__init__.py b/packages/create-llama/templates/types/simple/fastapi_auth/app/utils/__init__.py
deleted file mode 100644
index e69de29bb2d1d6434b8b29ae775ad8c2e48c5391..0000000000000000000000000000000000000000
diff --git a/packages/create-llama/templates/types/simple/fastapi_auth/app/utils/index.py b/packages/create-llama/templates/types/simple/fastapi_auth/app/utils/index.py
deleted file mode 100644
index 43472724877e48853c6bd5df0c19ca9a0aa9371d..0000000000000000000000000000000000000000
--- a/packages/create-llama/templates/types/simple/fastapi_auth/app/utils/index.py
+++ /dev/null
@@ -1,38 +0,0 @@
-import logging
-import os
-
-from llama_index import (
-    SimpleDirectoryReader,
-    ServiceContext,
-    StorageContext,
-    VectorStoreIndex,
-    load_index_from_storage,
-)
-
-
-STORAGE_DIR = "./storage"  # directory to cache the generated index
-DATA_DIR = "./data"  # directory containing the documents to index
-
-service_context = ServiceContext.from_defaults(
-    llm=OpenAI(model="gpt-3.5-turbo")
-)
-
-
-def get_index():
-    logger = logging.getLogger("uvicorn")
-    # check if storage already exists
-    if not os.path.exists(STORAGE_DIR):
-        logger.info("Creating new index")
-        # load the documents and create the index
-        documents = SimpleDirectoryReader(DATA_DIR).load_data()
-        index = VectorStoreIndex.from_documents(documents, service_context=service_context)
-        # store it for later
-        index.storage_context.persist(STORAGE_DIR)
-        logger.info(f"Finished creating new index. Stored in {STORAGE_DIR}")
-    else:
-        # load the existing index
-        logger.info(f"Loading index from {STORAGE_DIR}...")
-        storage_context = StorageContext.from_defaults(persist_dir=STORAGE_DIR)
-        index = load_index_from_storage(storage_context, service_context=service_context)
-        logger.info(f"Finished loading index from {STORAGE_DIR}")
-    return index
diff --git a/packages/create-llama/templates/types/simple/fastapi_auth/data/101.pdf b/packages/create-llama/templates/types/simple/fastapi_auth/data/101.pdf
deleted file mode 100644
index ae5acffd5398b7c59e2df9e6dead2d99128b719c..0000000000000000000000000000000000000000
Binary files a/packages/create-llama/templates/types/simple/fastapi_auth/data/101.pdf and /dev/null differ
diff --git a/packages/create-llama/templates/types/simple/fastapi_auth/gitignore b/packages/create-llama/templates/types/simple/fastapi_auth/gitignore
deleted file mode 100644
index 069fcb4020566da83dbc398c06dce5bbe92aface..0000000000000000000000000000000000000000
--- a/packages/create-llama/templates/types/simple/fastapi_auth/gitignore
+++ /dev/null
@@ -1,2 +0,0 @@
-__pycache__
-storage
diff --git a/packages/create-llama/templates/types/simple/fastapi_auth/main.py b/packages/create-llama/templates/types/simple/fastapi_auth/main.py
deleted file mode 100644
index 9dc1a0afb6634f319538e8545bdd647693dffbfb..0000000000000000000000000000000000000000
--- a/packages/create-llama/templates/types/simple/fastapi_auth/main.py
+++ /dev/null
@@ -1,31 +0,0 @@
-import logging
-import os
-import uvicorn
-from app.api.routers.chat import chat_router
-from fastapi import FastAPI
-from fastapi.middleware.cors import CORSMiddleware
-from dotenv import load_dotenv
-
-load_dotenv()
-
-app = FastAPI()
-
-environment = os.getenv("ENVIRONMENT", "dev")  # Default to 'development' if not set
-
-
-if environment == "dev":
-    logger = logging.getLogger("uvicorn")
-    logger.warning("Running in development mode - allowing CORS for all origins")
-    app.add_middleware(
-        CORSMiddleware,
-        allow_origins=["*"],
-        allow_credentials=True,
-        allow_methods=["*"],
-        allow_headers=["*"],
-    )
-
-app.include_router(chat_router, prefix="/api/chat")
-
-
-if __name__ == "__main__":
-    uvicorn.run(app="main:app", host="0.0.0.0", reload=True)
diff --git a/packages/create-llama/templates/types/simple/fastapi_auth/pyproject.toml b/packages/create-llama/templates/types/simple/fastapi_auth/pyproject.toml
deleted file mode 100644
index 59d182bbb47a8d0ee06de3550f2c7fbf954a3901..0000000000000000000000000000000000000000
--- a/packages/create-llama/templates/types/simple/fastapi_auth/pyproject.toml
+++ /dev/null
@@ -1,19 +0,0 @@
-[tool.poetry]
-name = "llamaindex-fastapi"
-version = "0.1.0"
-description = ""
-authors = ["Marcus Schiesser <mail@marcusschiesser.de>"]
-readme = "README.md"
-
-[tool.poetry.dependencies]
-python = "^3.11,<3.12"
-fastapi = "^0.104.1"
-uvicorn = { extras = ["standard"], version = "^0.23.2" }
-llama-index = "^0.8.56"
-pypdf = "^3.17.0"
-python-dotenv = "^1.0.0"
-
-
-[build-system]
-requires = ["poetry-core"]
-build-backend = "poetry.core.masonry.api"
diff --git a/packages/create-llama/templates/types/simple/fastapi_auth/tests/__init__.py b/packages/create-llama/templates/types/simple/fastapi_auth/tests/__init__.py
deleted file mode 100644
index e69de29bb2d1d6434b8b29ae775ad8c2e48c5391..0000000000000000000000000000000000000000