summaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
authorDebaditya Banerji <77636021+devAdityaa@users.noreply.github.com>2023-12-23 20:26:59 +0100
committerGitHub <noreply@github.com>2023-12-23 20:26:59 +0100
commitbcd1cdf4e8f35aae05fba287645ed37cfa59dc68 (patch)
treef2371813cc5e9f7140651c131a98d7d91d5706cc
parent~ | g4f v-0.1.9.5 (diff)
downloadgpt4free-bcd1cdf4e8f35aae05fba287645ed37cfa59dc68.tar
gpt4free-bcd1cdf4e8f35aae05fba287645ed37cfa59dc68.tar.gz
gpt4free-bcd1cdf4e8f35aae05fba287645ed37cfa59dc68.tar.bz2
gpt4free-bcd1cdf4e8f35aae05fba287645ed37cfa59dc68.tar.lz
gpt4free-bcd1cdf4e8f35aae05fba287645ed37cfa59dc68.tar.xz
gpt4free-bcd1cdf4e8f35aae05fba287645ed37cfa59dc68.tar.zst
gpt4free-bcd1cdf4e8f35aae05fba287645ed37cfa59dc68.zip
-rw-r--r--g4f/Provider/Aura.py56
-rw-r--r--g4f/Provider/__init__.py2
2 files changed, 57 insertions, 1 deletions
diff --git a/g4f/Provider/Aura.py b/g4f/Provider/Aura.py
new file mode 100644
index 00000000..fa6025cd
--- /dev/null
+++ b/g4f/Provider/Aura.py
@@ -0,0 +1,56 @@
+from __future__ import annotations
+from aiohttp import ClientSession
+from ..typing import AsyncResult, Messages
+from .base_provider import AsyncGeneratorProvider
+
+class Aura(AsyncGeneratorProvider):
+ url = "https://openchat.team"
+ working = True
+ supports_gpt_35_turbo = True
+
+ @classmethod
+ async def create_async_generator(
+ cls,
+ model: str,
+ messages: Messages,
+ proxy: str = None,
+ **kwargs
+ ) -> AsyncResult:
+ headers = {
+ "Accept": "*/*",
+ "Accept-Encoding": "gzip, deflate, br",
+ "Accept-Language": "en-GB,en-US;q=0.9,en;q=0.8",
+ "Content-Type": "application/json",
+ "Origin": f"{cls.url}",
+ "Referer": f"{cls.url}/",
+ "Sec-Ch-Ua": '"Not_A Brand";v="8", "Chromium";v="120", "Google Chrome";v="120"',
+ "Sec-Ch-Ua-Mobile": "?0",
+ "Sec-Ch-Ua-Platform": '"Linux"',
+ "Sec-Fetch-Dest": "empty",
+ "Sec-Fetch-Mode": "cors",
+ "Sec-Fetch-Site": "same-origin",
+ "User-Agent": "Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/120.0.0.0 Safari/537.36",
+ }
+ async with ClientSession(headers=headers) as session:
+ system_prompt=""
+ system_message=[]
+ for message in messages:
+ if message["role"]=="system":
+ system_prompt +=message["content"]
+ else:
+ system_message.append(message)
+
+ data = {
+ "model": {
+ "id": "openchat_v3.2_mistral",
+ "name": "OpenChat Aura",
+ "maxLength": 24576,
+ "tokenLimit": 8192
+ },
+ "messages": system_message,
+ "key": "",
+ "prompt": f"{system_prompt}",
+ "temperature": 0.5
+ }
+ async with session.post(f"{cls.url}/api/chat",json=data,proxy=proxy) as response:
+ yield await response.text() \ No newline at end of file
diff --git a/g4f/Provider/__init__.py b/g4f/Provider/__init__.py
index bfc02590..ba62a199 100644
--- a/g4f/Provider/__init__.py
+++ b/g4f/Provider/__init__.py
@@ -6,7 +6,7 @@ from .deprecated import *
from .needs_auth import *
from .unfinished import *
from .selenium import *
-
+from .Aura import Aura
from .AiAsk import AiAsk
from .Aichat import Aichat
from .AiChatOnline import AiChatOnline