diff --git a/llama-index-integrations/agent/llama-index-agent-openai/llama_index/agent/openai/openai_assistant_agent.py b/llama-index-integrations/agent/llama-index-agent-openai/llama_index/agent/openai/openai_assistant_agent.py
index 4f03d796a2ecff9934e74dfd650daf16ff872a3f..3ed3148654edbe61a0b04943a18d1ebb027f29f2 100644
--- a/llama-index-integrations/agent/llama-index-agent-openai/llama_index/agent/openai/openai_assistant_agent.py
+++ b/llama-index-integrations/agent/llama-index-agent-openai/llama_index/agent/openai/openai_assistant_agent.py
@@ -1,4 +1,5 @@
 """OpenAI Assistant Agent."""
+
 import asyncio
 import json
 import logging
@@ -134,6 +135,12 @@ def _process_files(client: Any, files: List[str]) -> Dict[str, str]:
     return file_dict
 
 
+def format_attachments(file_ids: Optional[List[str]] = None) -> List[Dict[str, str]]:
+    """Create attachments from file_ids."""
+    file_ids = file_ids or []
+    return [{"file_id": file_id} for file_id in file_ids]
+
+
 class OpenAIAssistantAgent(BaseAgent):
     """OpenAIAssistant agent.
 
@@ -223,7 +230,6 @@ class OpenAIAssistantAgent(BaseAgent):
         file_ids = file_ids or []
 
         file_dict = _process_files(client, files)
-        all_file_ids = list(file_dict.keys()) + file_ids
 
         # TODO: openai's typing is a bit sus
         all_openai_tools = cast(List[Any], all_openai_tools)
@@ -232,7 +238,6 @@ class OpenAIAssistantAgent(BaseAgent):
             instructions=instructions,
             tools=cast(List[Any], all_openai_tools),
             model=model,
-            file_ids=all_file_ids,
         )
         return cls(
             client,
@@ -335,12 +340,12 @@ class OpenAIAssistantAgent(BaseAgent):
 
     def add_message(self, message: str, file_ids: Optional[List[str]] = None) -> Any:
         """Add message to assistant."""
-        file_ids = file_ids or []
+        attachments = format_attachments(file_ids=file_ids)
         return self._client.beta.threads.messages.create(
             thread_id=self._thread_id,
             role="user",
             content=message,
-            file_ids=file_ids,
+            attachments=attachments,
         )
 
     def _run_function_calling(self, run: Any) -> List[ToolOutput]:
diff --git a/llama-index-integrations/agent/llama-index-agent-openai/tests/test_openai_assistant_agent.py b/llama-index-integrations/agent/llama-index-agent-openai/tests/test_openai_assistant_agent.py
index d142640e05ee4db387d5b7e6aff84dd7962efcf4..fca0fb59b11bce29fa577ab006c8f53a86dd2252 100644
--- a/llama-index-integrations/agent/llama-index-agent-openai/tests/test_openai_assistant_agent.py
+++ b/llama-index-integrations/agent/llama-index-agent-openai/tests/test_openai_assistant_agent.py
@@ -34,6 +34,48 @@ def test_from_existing_no_tools() -> None:
     assert isinstance(agent, OpenAIAssistantAgent)
 
 
+def test_from_new():
+    name = "Math Tutor"
+    instructions = (
+        "You are a personal math tutor. Write and run code to answer math questions."
+    )
+    openai_tools = [{"type": "code_interpreter"}]
+    instructions_prefix = (
+        "Please address the user as Jane Doe. The user has a premium account."
+    )
+    run_retrieve_sleep_time = 0.5
+    verbose = True
+    api_key = "test-api-key"
+
+    mock_assistant = MagicMock()
+    with patch.object(openai, "OpenAI") as mock_openai:
+        mock_openai.return_value.beta.assistants.create.return_value = mock_assistant
+        agent = OpenAIAssistantAgent.from_new(
+            name=name,
+            instructions=instructions,
+            openai_tools=openai_tools,
+            instructions_prefix=instructions_prefix,
+            run_retrieve_sleep_time=run_retrieve_sleep_time,
+            verbose=verbose,
+            api_key=api_key,
+        )
+
+    assert isinstance(agent, OpenAIAssistantAgent)
+    assert agent.assistant == mock_assistant
+    assert agent.client == mock_openai.return_value
+    assert agent._instructions_prefix == instructions_prefix
+    assert agent._run_retrieve_sleep_time == run_retrieve_sleep_time
+    assert agent._verbose == verbose
+
+    mock_openai.assert_called_once_with(api_key=api_key)
+    mock_openai.return_value.beta.assistants.create.assert_called_once_with(
+        model="gpt-4-1106-preview",
+        name=name,
+        instructions=instructions,
+        tools=openai_tools,
+    )
+
+
 @pytest.fixture()
 def add_tool() -> FunctionTool:
     def add(a: int, b: int) -> int: