Skip to content
Snippets Groups Projects
Unverified Commit b57e7673 authored by Massimiliano Pippi's avatar Massimiliano Pippi Committed by GitHub
Browse files

fix: escape user input before shelling out command (#17953)

parent 2c00a1df
Branches
Tags
No related merge requests found
import asyncio import asyncio
import os import os
import shlex
import shutil import shutil
from argparse import ArgumentParser from argparse import ArgumentParser
from glob import iglob from glob import iglob
...@@ -14,8 +15,8 @@ from llama_index.core import ( ...@@ -14,8 +15,8 @@ from llama_index.core import (
from llama_index.core.base.embeddings.base import BaseEmbedding from llama_index.core.base.embeddings.base import BaseEmbedding
from llama_index.core.base.response.schema import ( from llama_index.core.base.response.schema import (
RESPONSE_TYPE, RESPONSE_TYPE,
StreamingResponse,
Response, Response,
StreamingResponse,
) )
from llama_index.core.bridge.pydantic import BaseModel, Field, field_validator from llama_index.core.bridge.pydantic import BaseModel, Field, field_validator
from llama_index.core.chat_engine import CondenseQuestionChatEngine from llama_index.core.chat_engine import CondenseQuestionChatEngine
...@@ -159,7 +160,7 @@ class RagCLI(BaseModel): ...@@ -159,7 +160,7 @@ class RagCLI(BaseModel):
if chat_engine is not None: if chat_engine is not None:
return chat_engine return chat_engine
if values.get("query_pipeline", None) is None: if values.get("query_pipeline") is None:
values["query_pipeline"] = cls.query_pipeline_from_ingestion_pipeline( values["query_pipeline"] = cls.query_pipeline_from_ingestion_pipeline(
query_pipeline=None, values=values query_pipeline=None, values=values
) )
...@@ -231,7 +232,8 @@ class RagCLI(BaseModel): ...@@ -231,7 +232,8 @@ class RagCLI(BaseModel):
# Append the `--files` argument to the history file # Append the `--files` argument to the history file
with open(f"{self.persist_dir}/{RAG_HISTORY_FILE_NAME}", "a") as f: with open(f"{self.persist_dir}/{RAG_HISTORY_FILE_NAME}", "a") as f:
f.write(str(files) + "\n") for file in files:
f.write(str(file) + "\n")
if create_llama: if create_llama:
if shutil.which("npx") is None: if shutil.which("npx") is None:
...@@ -289,7 +291,7 @@ class RagCLI(BaseModel): ...@@ -289,7 +291,7 @@ class RagCLI(BaseModel):
"none", "none",
"--engine", "--engine",
"context", "context",
f"--files {path}", f"--files {shlex.quote(path)}",
] ]
os.system(" ".join(command_args)) os.system(" ".join(command_args))
......
...@@ -14,7 +14,7 @@ disallow_untyped_defs = true ...@@ -14,7 +14,7 @@ disallow_untyped_defs = true
# Remove venv skip when integrated with pre-commit # Remove venv skip when integrated with pre-commit
exclude = ["_static", "build", "examples", "notebooks", "venv"] exclude = ["_static", "build", "examples", "notebooks", "venv"]
ignore_missing_imports = true ignore_missing_imports = true
python_version = "3.8" python_version = "3.9"
[tool.poetry] [tool.poetry]
authors = ["llamaindex"] authors = ["llamaindex"]
...@@ -32,7 +32,7 @@ maintainers = [ ...@@ -32,7 +32,7 @@ maintainers = [
name = "llama-index-cli" name = "llama-index-cli"
packages = [{include = "llama_index/"}] packages = [{include = "llama_index/"}]
readme = "README.md" readme = "README.md"
version = "0.4.0" version = "0.4.1"
[tool.poetry.dependencies] [tool.poetry.dependencies]
python = ">=3.9,<4.0" python = ">=3.9,<4.0"
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Please register or to comment