diff options
author | Debaditya Banerji <77636021+devAdityaa@users.noreply.github.com> | 2023-12-23 20:26:59 +0100 |
---|---|---|
committer | GitHub <noreply@github.com> | 2023-12-23 20:26:59 +0100 |
commit | bcd1cdf4e8f35aae05fba287645ed37cfa59dc68 (patch) | |
tree | f2371813cc5e9f7140651c131a98d7d91d5706cc /g4f/Provider/Aura.py | |
parent | ~ | g4f v-0.1.9.5 (diff) | |
download | gpt4free-bcd1cdf4e8f35aae05fba287645ed37cfa59dc68.tar gpt4free-bcd1cdf4e8f35aae05fba287645ed37cfa59dc68.tar.gz gpt4free-bcd1cdf4e8f35aae05fba287645ed37cfa59dc68.tar.bz2 gpt4free-bcd1cdf4e8f35aae05fba287645ed37cfa59dc68.tar.lz gpt4free-bcd1cdf4e8f35aae05fba287645ed37cfa59dc68.tar.xz gpt4free-bcd1cdf4e8f35aae05fba287645ed37cfa59dc68.tar.zst gpt4free-bcd1cdf4e8f35aae05fba287645ed37cfa59dc68.zip |
Diffstat (limited to 'g4f/Provider/Aura.py')
-rw-r--r-- | g4f/Provider/Aura.py | 56 |
1 files changed, 56 insertions, 0 deletions
diff --git a/g4f/Provider/Aura.py b/g4f/Provider/Aura.py new file mode 100644 index 00000000..fa6025cd --- /dev/null +++ b/g4f/Provider/Aura.py @@ -0,0 +1,56 @@ +from __future__ import annotations +from aiohttp import ClientSession +from ..typing import AsyncResult, Messages +from .base_provider import AsyncGeneratorProvider + +class Aura(AsyncGeneratorProvider): + url = "https://openchat.team" + working = True + supports_gpt_35_turbo = True + + @classmethod + async def create_async_generator( + cls, + model: str, + messages: Messages, + proxy: str = None, + **kwargs + ) -> AsyncResult: + headers = { + "Accept": "*/*", + "Accept-Encoding": "gzip, deflate, br", + "Accept-Language": "en-GB,en-US;q=0.9,en;q=0.8", + "Content-Type": "application/json", + "Origin": f"{cls.url}", + "Referer": f"{cls.url}/", + "Sec-Ch-Ua": '"Not_A Brand";v="8", "Chromium";v="120", "Google Chrome";v="120"', + "Sec-Ch-Ua-Mobile": "?0", + "Sec-Ch-Ua-Platform": '"Linux"', + "Sec-Fetch-Dest": "empty", + "Sec-Fetch-Mode": "cors", + "Sec-Fetch-Site": "same-origin", + "User-Agent": "Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/120.0.0.0 Safari/537.36", + } + async with ClientSession(headers=headers) as session: + system_prompt="" + system_message=[] + for message in messages: + if message["role"]=="system": + system_prompt +=message["content"] + else: + system_message.append(message) + + data = { + "model": { + "id": "openchat_v3.2_mistral", + "name": "OpenChat Aura", + "maxLength": 24576, + "tokenLimit": 8192 + }, + "messages": system_message, + "key": "", + "prompt": f"{system_prompt}", + "temperature": 0.5 + } + async with session.post(f"{cls.url}/api/chat",json=data,proxy=proxy) as response: + yield await response.text()
\ No newline at end of file |