From 6e0bc147b52cb1e52d7fb3f8dd01d1f33dae201e Mon Sep 17 00:00:00 2001 From: Heiner Lohaus Date: Fri, 3 Jan 2025 20:35:46 +0100 Subject: Support continue messages in Airforce Add auth caching for OpenAI ChatGPT Some provider improvments --- g4f/tools/run_tools.py | 6 ++---- 1 file changed, 2 insertions(+), 4 deletions(-) (limited to 'g4f/tools/run_tools.py') diff --git a/g4f/tools/run_tools.py b/g4f/tools/run_tools.py index 21e8c7be..f52623e9 100644 --- a/g4f/tools/run_tools.py +++ b/g4f/tools/run_tools.py @@ -7,7 +7,7 @@ from typing import Optional, Callable, AsyncIterator from ..typing import Messages from ..providers.helper import filter_none -from ..client.helper import to_async_iterator +from ..providers.asyncio import to_async_iterator from .web_search import do_search, get_search_message from .files import read_bucket, get_bucket_dir from .. import debug @@ -55,9 +55,7 @@ async def async_iter_run_tools(async_iter_callback, model, messages, tool_calls: if has_bucket and isinstance(messages[-1]["content"], str): messages[-1]["content"] += BUCKET_INSTRUCTIONS - response = async_iter_callback(model=model, messages=messages, **kwargs) - if not hasattr(response, "__aiter__"): - response = to_async_iterator(response) + response = to_async_iterator(async_iter_callback(model=model, messages=messages, **kwargs)) async for chunk in response: yield chunk -- cgit v1.2.3