summaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
-rw-r--r--g4f/Provider/ChatBase.py5
-rw-r--r--g4f/Provider/GPTalk.py5
2 files changed, 9 insertions, 1 deletions
diff --git a/g4f/Provider/ChatBase.py b/g4f/Provider/ChatBase.py
index 9f11e1ac..ccc20244 100644
--- a/g4f/Provider/ChatBase.py
+++ b/g4f/Provider/ChatBase.py
@@ -12,6 +12,8 @@ class ChatBase(AsyncGeneratorProvider):
supports_message_history = True
working = True
jailbreak = True
+ list_incorrect_responses = ["support@chatbase",
+ "about Chatbase"]
@classmethod
async def create_async_generator(
@@ -53,6 +55,9 @@ class ChatBase(AsyncGeneratorProvider):
response_data = ""
async for stream in response.content.iter_any():
response_data += stream.decode()
+ for incorrect_response in cls.list_incorrect_responses:
+ if incorrect_response in response_data:
+ raise RuntimeError("Incorrect response")
yield stream.decode()
@classmethod
diff --git a/g4f/Provider/GPTalk.py b/g4f/Provider/GPTalk.py
index b5881e5d..5749ff2e 100644
--- a/g4f/Provider/GPTalk.py
+++ b/g4f/Provider/GPTalk.py
@@ -13,6 +13,7 @@ class GPTalk(AsyncGeneratorProvider):
working = True
supports_gpt_35_turbo = True
_auth = None
+ used_times = 0
@classmethod
async def create_async_generator(
@@ -44,7 +45,7 @@ class GPTalk(AsyncGeneratorProvider):
'x-auth-timestamp': f"{timestamp}",
}
async with ClientSession(headers=headers) as session:
- if not cls._auth or cls._auth["expires_at"] < timestamp:
+ if not cls._auth or cls._auth["expires_at"] < timestamp or cls.used_times == 5:
data = {
"fingerprint": secrets.token_hex(16).zfill(32),
"platform": "fingerprint"
@@ -52,6 +53,7 @@ class GPTalk(AsyncGeneratorProvider):
async with session.post(f"{cls.url}/api/chatgpt/user/login", json=data, proxy=proxy) as response:
response.raise_for_status()
cls._auth = (await response.json())["data"]
+ cls.used_times = 0
data = {
"content": format_prompt(messages),
"accept": "stream",
@@ -72,6 +74,7 @@ class GPTalk(AsyncGeneratorProvider):
async with session.post(f"{cls.url}/api/chatgpt/chatapi/text", json=data, headers=headers, proxy=proxy) as response:
response.raise_for_status()
token = (await response.json())["data"]["token"]
+ cls.used_times += 1
last_message = ""
async with session.get(f"{cls.url}/api/chatgpt/chatapi/stream", params={"token": token}, proxy=proxy) as response:
response.raise_for_status()