diff --git a/llama-index-cli/llama_index/cli/rag/base.py b/llama-index-cli/llama_index/cli/rag/base.py
index 105362a9e2c8709c29b51c1f1c3e567ef6a6ddc8..204c61cfcecea7cd754bb12bd37bba08071bcd68 100644
--- a/llama-index-cli/llama_index/cli/rag/base.py
+++ b/llama-index-cli/llama_index/cli/rag/base.py
@@ -1,5 +1,6 @@
 import asyncio
 import os
+import shlex
 import shutil
 from argparse import ArgumentParser
 from glob import iglob
@@ -14,8 +15,8 @@ from llama_index.core import (
 from llama_index.core.base.embeddings.base import BaseEmbedding
 from llama_index.core.base.response.schema import (
     RESPONSE_TYPE,
-    StreamingResponse,
     Response,
+    StreamingResponse,
 )
 from llama_index.core.bridge.pydantic import BaseModel, Field, field_validator
 from llama_index.core.chat_engine import CondenseQuestionChatEngine
@@ -159,7 +160,7 @@ class RagCLI(BaseModel):
         if chat_engine is not None:
             return chat_engine
 
-        if values.get("query_pipeline", None) is None:
+        if values.get("query_pipeline") is None:
             values["query_pipeline"] = cls.query_pipeline_from_ingestion_pipeline(
                 query_pipeline=None, values=values
             )
@@ -231,7 +232,8 @@ class RagCLI(BaseModel):
 
             # Append the `--files` argument to the history file
             with open(f"{self.persist_dir}/{RAG_HISTORY_FILE_NAME}", "a") as f:
-                f.write(str(files) + "\n")
+                for file in files:
+                    f.write(str(file) + "\n")
 
         if create_llama:
             if shutil.which("npx") is None:
@@ -289,7 +291,7 @@ class RagCLI(BaseModel):
                                 "none",
                                 "--engine",
                                 "context",
-                                f"--files {path}",
+                                f"--files {shlex.quote(path)}",
                             ]
                             os.system(" ".join(command_args))
 
diff --git a/llama-index-cli/pyproject.toml b/llama-index-cli/pyproject.toml
index 812b8379f013c57a10b3f14ce3abc357e688085b..0723406f04afb9b734d219024bc0d1662744c4b2 100644
--- a/llama-index-cli/pyproject.toml
+++ b/llama-index-cli/pyproject.toml
@@ -14,7 +14,7 @@ disallow_untyped_defs = true
 # Remove venv skip when integrated with pre-commit
 exclude = ["_static", "build", "examples", "notebooks", "venv"]
 ignore_missing_imports = true
-python_version = "3.8"
+python_version = "3.9"
 
 [tool.poetry]
 authors = ["llamaindex"]
@@ -32,7 +32,7 @@ maintainers = [
 name = "llama-index-cli"
 packages = [{include = "llama_index/"}]
 readme = "README.md"
-version = "0.4.0"
+version = "0.4.1"
 
 [tool.poetry.dependencies]
 python = ">=3.9,<4.0"