summaryrefslogtreecommitdiffstats
path: root/g4f/Provider
diff options
context:
space:
mode:
Diffstat (limited to 'g4f/Provider')
-rw-r--r--g4f/Provider/DeepInfra.py2
-rw-r--r--g4f/Provider/needs_auth/OpenaiChat.py4
2 files changed, 4 insertions, 2 deletions
diff --git a/g4f/Provider/DeepInfra.py b/g4f/Provider/DeepInfra.py
index 183f00ea..f44679ff 100644
--- a/g4f/Provider/DeepInfra.py
+++ b/g4f/Provider/DeepInfra.py
@@ -61,6 +61,8 @@ class DeepInfra(AsyncGeneratorProvider, ProviderModelMixin):
'model' : cls.get_model(model),
'messages': messages,
'temperature': kwargs.get("temperature", 0.7),
+ 'max_tokens': kwargs.get("max_tokens", 512),
+ 'stop': kwargs.get("stop", []),
'stream' : True
}
async with session.post('https://api.deepinfra.com/v1/openai/chat/completions',
diff --git a/g4f/Provider/needs_auth/OpenaiChat.py b/g4f/Provider/needs_auth/OpenaiChat.py
index 0fa433a4..e507404b 100644
--- a/g4f/Provider/needs_auth/OpenaiChat.py
+++ b/g4f/Provider/needs_auth/OpenaiChat.py
@@ -288,7 +288,7 @@ class OpenaiChat(AsyncGeneratorProvider, ProviderModelMixin):
json={"is_visible": False},
headers=headers
) as response:
- response.raise_for_status()
+ ...
@classmethod
async def create_async_generator(
@@ -434,7 +434,7 @@ class OpenaiChat(AsyncGeneratorProvider, ProviderModelMixin):
action = "continue"
await asyncio.sleep(5)
if history_disabled and auto_continue:
- await cls.delete_conversation(session, cls._headers, conversation_id)
+ await cls.delete_conversation(session, cls._headers, fields.conversation_id)
@staticmethod
async def iter_messages_ws(ws: ClientWebSocketResponse) -> AsyncIterator: