diff --git a/llama-index-core/llama_index/core/agent/react/step.py b/llama-index-core/llama_index/core/agent/react/step.py index 7253d19b53a4d17fdfefb4a54f331e14562b2b26..272ea226ded76962ea2955b1a614759717b830ab 100644 --- a/llama-index-core/llama_index/core/agent/react/step.py +++ b/llama-index-core/llama_index/core/agent/react/step.py @@ -615,6 +615,8 @@ class ReActAgentWorker(BaseAgentWorker): ) ) # wait until response writing is done + agent_response._ensure_async_setup() + await agent_response._is_function_false_event.wait() return self._get_task_step_response(agent_response, step, is_done) diff --git a/llama-index-core/llama_index/core/chat_engine/types.py b/llama-index-core/llama_index/core/chat_engine/types.py index a4a0e09030399e29dd1f9170d8bfda0708359d60..9291565269e504a36e1b4f55132d7b9ca329ea82 100644 --- a/llama-index-core/llama_index/core/chat_engine/types.py +++ b/llama-index-core/llama_index/core/chat_engine/types.py @@ -113,7 +113,6 @@ class StreamingAgentChatResponse: self._is_function_not_none_thread_event.set() def aput_in_queue(self, delta: Optional[str]) -> None: - self._ensure_async_setup() self._aqueue.put_nowait(delta) self._new_item_event.set() @@ -167,6 +166,8 @@ class StreamingAgentChatResponse: memory: BaseMemory, on_stream_end_fn: Optional[callable] = None, ) -> None: + self._ensure_async_setup() + if self.achat_stream is None: raise ValueError( "achat_stream is None. Cannot asynchronously write to " diff --git a/llama-index-integrations/agent/llama-index-agent-openai-legacy/llama_index/agent/openai_legacy/openai_agent.py b/llama-index-integrations/agent/llama-index-agent-openai-legacy/llama_index/agent/openai_legacy/openai_agent.py index 22cda7e5e9bb81bb01950685ecd4f6f360963187..a807f8b0491a580d332c0109672b5c4f3470dc52 100644 --- a/llama-index-integrations/agent/llama-index-agent-openai-legacy/llama_index/agent/openai_legacy/openai_agent.py +++ b/llama-index-integrations/agent/llama-index-agent-openai-legacy/llama_index/agent/openai_legacy/openai_agent.py @@ -248,7 +248,9 @@ class BaseOpenAIAgent(BaseAgent): chat_stream_response.awrite_response_to_history(self.memory) ) # wait until openAI functions stop executing + chat_stream_response._ensure_async_setup() await chat_stream_response._is_function_false_event.wait() + # return response stream return chat_stream_response diff --git a/llama-index-integrations/agent/llama-index-agent-openai-legacy/pyproject.toml b/llama-index-integrations/agent/llama-index-agent-openai-legacy/pyproject.toml index 3ab4916648290fdbf136bed73dc1bd8633491396..f53dadfc617bd6934fe5232cc76cca44951b5b16 100644 --- a/llama-index-integrations/agent/llama-index-agent-openai-legacy/pyproject.toml +++ b/llama-index-integrations/agent/llama-index-agent-openai-legacy/pyproject.toml @@ -28,7 +28,7 @@ exclude = ["**/BUILD"] license = "MIT" name = "llama-index-agent-openai-legacy" readme = "README.md" -version = "0.1.2" +version = "0.1.3" [tool.poetry.dependencies] python = ">=3.8.1,<4.0" diff --git a/llama-index-integrations/agent/llama-index-agent-openai/llama_index/agent/openai/step.py b/llama-index-integrations/agent/llama-index-agent-openai/llama_index/agent/openai/step.py index 3a3f7b22ea33e3ee4ddbc1c4c0cf8cf4a6e0bfbf..c9892bd47cd1390b441324d580050d986e1fb93c 100644 --- a/llama-index-integrations/agent/llama-index-agent-openai/llama_index/agent/openai/step.py +++ b/llama-index-integrations/agent/llama-index-agent-openai/llama_index/agent/openai/step.py @@ -312,8 +312,11 @@ class OpenAIAgentWorker(BaseAgentWorker): on_stream_end_fn=partial(self.finalize_task, task), ) ) + chat_stream_response._ensure_async_setup() + # wait until openAI functions stop executing await chat_stream_response._is_function_false_event.wait() + # return response stream return chat_stream_response diff --git a/llama-index-integrations/agent/llama-index-agent-openai/pyproject.toml b/llama-index-integrations/agent/llama-index-agent-openai/pyproject.toml index a11750db87cae518efc462c0863e6498856619b6..a62b0b71a5a3113a5fabcfec206420e7a7e9e8d6 100644 --- a/llama-index-integrations/agent/llama-index-agent-openai/pyproject.toml +++ b/llama-index-integrations/agent/llama-index-agent-openai/pyproject.toml @@ -28,7 +28,7 @@ exclude = ["**/BUILD"] license = "MIT" name = "llama-index-agent-openai" readme = "README.md" -version = "0.1.6" +version = "0.1.7" [tool.poetry.dependencies] python = ">=3.8.1,<4.0"