diff options
author | Tekky <98614666+xtekky@users.noreply.github.com> | 2023-08-28 22:08:23 +0200 |
---|---|---|
committer | GitHub <noreply@github.com> | 2023-08-28 22:08:23 +0200 |
commit | 7e687b3d178c00a27d7e5ae2613fe88ee7844639 (patch) | |
tree | 4034e8ae9fc7ca9af295f04358bb00516b464e0b /g4f/Provider/Yqcloud.py | |
parent | Merge pull request #851 from Luneye/patch-1 (diff) | |
parent | Merge branch 'main' into hugging (diff) | |
download | gpt4free-0.0.2.6.tar gpt4free-0.0.2.6.tar.gz gpt4free-0.0.2.6.tar.bz2 gpt4free-0.0.2.6.tar.lz gpt4free-0.0.2.6.tar.xz gpt4free-0.0.2.6.tar.zst gpt4free-0.0.2.6.zip |
Diffstat (limited to 'g4f/Provider/Yqcloud.py')
-rw-r--r-- | g4f/Provider/Yqcloud.py | 52 |
1 files changed, 23 insertions, 29 deletions
diff --git a/g4f/Provider/Yqcloud.py b/g4f/Provider/Yqcloud.py index 44173525..7c1d06ba 100644 --- a/g4f/Provider/Yqcloud.py +++ b/g4f/Provider/Yqcloud.py @@ -1,29 +1,27 @@ -import requests +from aiohttp import ClientSession -from ..typing import Any, CreateResult -from .base_provider import BaseProvider +from .base_provider import AsyncProvider, format_prompt -class Yqcloud(BaseProvider): - url = "https://chat9.yqcloud.top/" - working = True - supports_gpt_35_turbo = True +class Yqcloud(AsyncProvider): + url = "https://chat9.yqcloud.top/" + working = True + supports_gpt_35_turbo = True @staticmethod - def create_completion( + async def create_async( model: str, messages: list[dict[str, str]], - stream: bool, **kwargs: Any) -> CreateResult: - - headers = _create_header() - payload = _create_payload(messages) - - response = requests.post("https://api.aichatos.cloud/api/generateStream", - headers=headers, json=payload) - - response.raise_for_status() - response.encoding = 'utf-8' - yield response.text + proxy: str = None, + **kwargs, + ) -> str: + async with ClientSession( + headers=_create_header() + ) as session: + payload = _create_payload(messages) + async with session.post("https://api.aichatos.cloud/api/generateStream", proxy=proxy, json=payload) as response: + response.raise_for_status() + return await response.text() def _create_header(): @@ -35,15 +33,11 @@ def _create_header(): def _create_payload(messages: list[dict[str, str]]): - prompt = "" - for message in messages: - prompt += "%s: %s\n" % (message["role"], message["content"]) - prompt += "assistant:" - return { - "prompt" : prompt, - "network" : True, - "system" : "", + "prompt": format_prompt(messages), + "network": True, + "system": "", "withoutContext": False, - "stream" : False, - }
\ No newline at end of file + "stream": False, + "userId": "#/chat/1693025544336" + } |