summaryrefslogtreecommitdiffstats
path: root/g4f/Provider/deprecated
diff options
context:
space:
mode:
Diffstat (limited to 'g4f/Provider/deprecated')
-rw-r--r--g4f/Provider/deprecated/AiChatOnline.py59
-rw-r--r--g4f/Provider/deprecated/Yqcloud.py2
-rw-r--r--g4f/Provider/deprecated/__init__.py2
3 files changed, 2 insertions, 61 deletions
diff --git a/g4f/Provider/deprecated/AiChatOnline.py b/g4f/Provider/deprecated/AiChatOnline.py
deleted file mode 100644
index e690f28e..00000000
--- a/g4f/Provider/deprecated/AiChatOnline.py
+++ /dev/null
@@ -1,59 +0,0 @@
-from __future__ import annotations
-
-import json
-from aiohttp import ClientSession
-
-from ...typing import AsyncResult, Messages
-from ..base_provider import AsyncGeneratorProvider
-from ..helper import get_random_string
-
-class AiChatOnline(AsyncGeneratorProvider):
- url = "https://aichatonline.org"
- working = False
- supports_gpt_35_turbo = True
- supports_message_history = False
-
- @classmethod
- async def create_async_generator(
- cls,
- model: str,
- messages: Messages,
- proxy: str = None,
- **kwargs
- ) -> AsyncResult:
- headers = {
- "User-Agent": "Mozilla/5.0 (X11; Ubuntu; Linux x86_64; rv:109.0) Gecko/20100101 Firefox/119.0",
- "Accept": "text/event-stream",
- "Accept-Language": "de,en-US;q=0.7,en;q=0.3",
- "Accept-Encoding": "gzip, deflate, br",
- "Referer": f"{cls.url}/chatgpt/chat/",
- "Content-Type": "application/json",
- "Origin": cls.url,
- "Alt-Used": "aichatonline.org",
- "Connection": "keep-alive",
- "Sec-Fetch-Dest": "empty",
- "Sec-Fetch-Mode": "cors",
- "Sec-Fetch-Site": "same-origin",
- "TE": "trailers"
- }
- async with ClientSession(headers=headers) as session:
- data = {
- "botId": "default",
- "customId": None,
- "session": get_random_string(16),
- "chatId": get_random_string(),
- "contextId": 7,
- "messages": messages,
- "newMessage": messages[-1]["content"],
- "newImageId": None,
- "stream": True
- }
- async with session.post(f"{cls.url}/chatgpt/wp-json/mwai-ui/v1/chats/submit", json=data, proxy=proxy) as response:
- response.raise_for_status()
- async for chunk in response.content:
- if chunk.startswith(b"data: "):
- data = json.loads(chunk[6:])
- if data["type"] == "live":
- yield data["data"]
- elif data["type"] == "end":
- break \ No newline at end of file
diff --git a/g4f/Provider/deprecated/Yqcloud.py b/g4f/Provider/deprecated/Yqcloud.py
index 2ec6931a..227f8995 100644
--- a/g4f/Provider/deprecated/Yqcloud.py
+++ b/g4f/Provider/deprecated/Yqcloud.py
@@ -9,7 +9,7 @@ from ..base_provider import AsyncGeneratorProvider, format_prompt
class Yqcloud(AsyncGeneratorProvider):
url = "https://chat9.yqcloud.top/"
- working = True
+ working = False
supports_gpt_35_turbo = True
@staticmethod
diff --git a/g4f/Provider/deprecated/__init__.py b/g4f/Provider/deprecated/__init__.py
index 408f3913..bf923f2a 100644
--- a/g4f/Provider/deprecated/__init__.py
+++ b/g4f/Provider/deprecated/__init__.py
@@ -25,7 +25,7 @@ from .Aichat import Aichat
from .Berlin import Berlin
from .Phind import Phind
from .AiAsk import AiAsk
-from .AiChatOnline import AiChatOnline
+from ..AiChatOnline import AiChatOnline
from .ChatAnywhere import ChatAnywhere
from .FakeGpt import FakeGpt
from .GeekGpt import GeekGpt