Skip to content

Commit f9e6c85

Browse files
Fixed async generators 3.12, 311 and 3.10
1 parent cf99845 commit f9e6c85

13 files changed

+14
-14
lines changed

pyproject.toml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -4,7 +4,7 @@ build-backend = "setuptools.build_meta"
44

55
[project]
66
name = "ToolAgents"
7-
version = "0.2.6"
7+
version = "0.2.7"
88
description = "ToolAgents is a lightweight and flexible framework for creating function-calling agents with various language models and APIs."
99

1010
readme = "ReadMe.md"

src/ToolAgents/agents/base_llm_agent.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -244,7 +244,7 @@ async def get_streaming_response(
244244
tool_registry: ToolRegistry = None,
245245
settings: Optional[Any] = None,
246246
reset_last_messages_buffer: bool = True,
247-
) -> AsyncGenerator[ChatResponseChunk]:
247+
) -> AsyncGenerator[ChatResponseChunk, None]:
248248
"""
249249
Gets a streaming response from the chat API, handling any tool calls.
250250

src/ToolAgents/agents/chat_tool_agent.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -282,7 +282,7 @@ async def stream_step(
282282
tool_registry: ToolRegistry = None,
283283
settings: Optional[Any] = None,
284284
reset_last_messages_buffer: bool = True,
285-
) -> AsyncGenerator[StreamingChatMessage]:
285+
) -> AsyncGenerator[StreamingChatMessage, None]:
286286
"""
287287
Performs a single streaming step of interaction with the chat API,
288288
yielding chunks and whether they contain tool calls.
@@ -353,7 +353,7 @@ async def get_streaming_response(
353353
tool_registry: ToolRegistry = None,
354354
settings: Optional[Any] = None,
355355
reset_last_messages_buffer: bool = True,
356-
) -> AsyncGenerator[ChatResponseChunk]:
356+
) -> AsyncGenerator[ChatResponseChunk, None]:
357357
"""
358358
Gets a streaming response from the chat API, handling any tool calls.
359359

src/ToolAgents/provider/chat_api_provider/anthropic.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -76,7 +76,7 @@ async def get_response(self, messages: List[ChatMessage], settings: ProviderSett
7676

7777
async def get_streaming_response(self, messages: List[ChatMessage], settings: ProviderSettings=None,
7878
tools: Optional[List[FunctionTool]] = None) -> AsyncGenerator[
79-
StreamingChatMessage]:
79+
StreamingChatMessage, None]:
8080
request_kwargs = self._prepare_request(messages, settings, tools)
8181
request_kwargs["stream"] = True
8282
response = self.client.messages.create(**request_kwargs)

src/ToolAgents/provider/chat_api_provider/groq.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -76,7 +76,7 @@ async def get_response(self, messages: List[ChatMessage], settings=None,
7676

7777
async def get_streaming_response(self, messages: List[ChatMessage], settings=None,
7878
tools: Optional[List[FunctionTool]] = None) -> AsyncGenerator[
79-
StreamingChatMessage]:
79+
StreamingChatMessage, None]:
8080
request_kwargs = self._prepare_request(messages, settings, tools)
8181

8282
request_kwargs["stream"] = True

src/ToolAgents/provider/chat_api_provider/mistral.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -71,7 +71,7 @@ async def get_response(self, messages: List[ChatMessage], settings: ProviderSett
7171

7272
async def get_streaming_response(self, messages: List[ChatMessage], settings: ProviderSettings = None,
7373
tools: Optional[List[FunctionTool]] = None) -> AsyncGenerator[
74-
StreamingChatMessage]:
74+
StreamingChatMessage, None]:
7575
request_kwargs = self._prepare_request(messages, settings, tools)
7676

7777
stream = self.client.chat.stream_async(**request_kwargs)

src/ToolAgents/provider/chat_api_provider/open_ai.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -83,7 +83,7 @@ async def get_response(self, messages: List[ChatMessage], settings=None,
8383

8484
async def get_streaming_response(self, messages: List[ChatMessage], settings=None,
8585
tools: Optional[List[FunctionTool]] = None) -> AsyncGenerator[
86-
StreamingChatMessage]:
86+
StreamingChatMessage, None]:
8787
request_kwargs = self._prepare_request(messages, settings, tools)
8888

8989
request_kwargs["stream"] = True

src/ToolAgents/provider/llm_provider.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -249,7 +249,7 @@ async def get_response(self, messages: List[ChatMessage], settings: ProviderSett
249249
@abc.abstractmethod
250250
async def get_streaming_response(self, messages: List[ChatMessage], settings: ProviderSettings=None,
251251
tools: Optional[List[FunctionTool]] = None) -> AsyncGenerator[
252-
StreamingChatMessage]:
252+
StreamingChatMessage, None]:
253253
pass
254254

255255
@abc.abstractmethod

src/ToolAgents/provider/message_converter/anthropic_message_converter.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -183,7 +183,7 @@ def yield_from_provider(self, stream_generator: Any) -> Generator[StreamingChatM
183183
created_at=datetime.datetime.now(),
184184
updated_at=datetime.datetime.now()))
185185

186-
async def async_yield_from_provider(self, stream_generator: Any) -> AsyncGenerator[StreamingChatMessage]:
186+
async def async_yield_from_provider(self, stream_generator: Any) -> AsyncGenerator[StreamingChatMessage, None]:
187187
current_tool_call = None
188188
content = None
189189
has_tool_call = False

src/ToolAgents/provider/message_converter/groq_message_converter.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -206,7 +206,7 @@ def yield_from_provider(self, stream_generator: Any) -> Generator[StreamingChatM
206206
finished_chat_message=finished_message
207207
)
208208

209-
async def async_yield_from_provider(self, stream_generator: Any) -> AsyncGenerator[StreamingChatMessage]:
209+
async def async_yield_from_provider(self, stream_generator: Any) -> AsyncGenerator[StreamingChatMessage, None]:
210210
current_content = ""
211211
current_tool_calls = []
212212
alt_index = 0

src/ToolAgents/provider/message_converter/message_converter.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -42,5 +42,5 @@ def yield_from_provider(self, stream_generator: Any) -> Generator[StreamingChatM
4242
pass
4343

4444
@abstractmethod
45-
async def async_yield_from_provider(self, stream_generator: Any) -> AsyncGenerator[StreamingChatMessage]:
45+
async def async_yield_from_provider(self, stream_generator: Any) -> AsyncGenerator[StreamingChatMessage, None]:
4646
pass

src/ToolAgents/provider/message_converter/mistral_message_converter.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -187,7 +187,7 @@ def yield_from_provider(self, stream_generator: Any) -> Generator[StreamingChatM
187187
finished=True,
188188
finished_chat_message=finished_message
189189
)
190-
async def async_yield_from_provider(self, stream_generator: Any) -> AsyncGenerator[StreamingChatMessage]:
190+
async def async_yield_from_provider(self, stream_generator: Any) -> AsyncGenerator[StreamingChatMessage, None]:
191191
current_content = ""
192192
current_tool_calls = []
193193
alt_index = 0

src/ToolAgents/provider/message_converter/open_ai_message_converter.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -201,7 +201,7 @@ def yield_from_provider(self, stream_generator: Any) -> Generator[StreamingChatM
201201
finished=True,
202202
finished_chat_message=finished_message
203203
)
204-
async def async_yield_from_provider(self, stream_generator: Any) -> AsyncGenerator[StreamingChatMessage]:
204+
async def async_yield_from_provider(self, stream_generator: Any) -> AsyncGenerator[StreamingChatMessage, None]:
205205
current_content = ""
206206
current_tool_calls = []
207207
alt_index = 0

0 commit comments

Comments
 (0)