summaryrefslogtreecommitdiffstats
path: root/g4f/Provider/ChatBase.py
diff options
context:
space:
mode:
authorTekky <98614666+xtekky@users.noreply.github.com>2023-10-10 00:18:17 +0200
committerGitHub <noreply@github.com>2023-10-10 00:18:17 +0200
commit8a5c23f693c1ad92b5d5259201bb99807f76ea2a (patch)
tree26a25d7b5bee6ba1e2ac484e1ad124a8e52b0b43 /g4f/Provider/ChatBase.py
parentMerge branch 'main' of https://github.com/xtekky/gpt4free (diff)
parent Add Proxy Support and Create Provider to Readme (diff)
downloadgpt4free-8a5c23f693c1ad92b5d5259201bb99807f76ea2a.tar
gpt4free-8a5c23f693c1ad92b5d5259201bb99807f76ea2a.tar.gz
gpt4free-8a5c23f693c1ad92b5d5259201bb99807f76ea2a.tar.bz2
gpt4free-8a5c23f693c1ad92b5d5259201bb99807f76ea2a.tar.lz
gpt4free-8a5c23f693c1ad92b5d5259201bb99807f76ea2a.tar.xz
gpt4free-8a5c23f693c1ad92b5d5259201bb99807f76ea2a.tar.zst
gpt4free-8a5c23f693c1ad92b5d5259201bb99807f76ea2a.zip
Diffstat (limited to '')
-rw-r--r--g4f/Provider/ChatBase.py9
1 files changed, 5 insertions, 4 deletions
diff --git a/g4f/Provider/ChatBase.py b/g4f/Provider/ChatBase.py
index b98fe565..ce5160d8 100644
--- a/g4f/Provider/ChatBase.py
+++ b/g4f/Provider/ChatBase.py
@@ -2,7 +2,7 @@ from __future__ import annotations
from aiohttp import ClientSession
-from ..typing import AsyncGenerator
+from ..typing import AsyncResult, Messages
from .base_provider import AsyncGeneratorProvider
@@ -16,9 +16,10 @@ class ChatBase(AsyncGeneratorProvider):
async def create_async_generator(
cls,
model: str,
- messages: list[dict[str, str]],
+ messages: Messages,
+ proxy: str = None,
**kwargs
- ) -> AsyncGenerator:
+ ) -> AsyncResult:
if model == "gpt-4":
chat_id = "quran---tafseer-saadi-pdf-wbgknt7zn"
elif model == "gpt-3.5-turbo" or not model:
@@ -44,7 +45,7 @@ class ChatBase(AsyncGeneratorProvider):
"chatId": chat_id,
"conversationId": f"kcXpqEnqUie3dnJlsRi_O-{chat_id}"
}
- async with session.post("https://www.chatbase.co/api/fe/chat", json=data) as response:
+ async with session.post("https://www.chatbase.co/api/fe/chat", json=data, proxy=proxy) as response:
response.raise_for_status()
async for stream in response.content.iter_any():
yield stream.decode()