summaryrefslogtreecommitdiffstats
path: root/g4f/Provider/not_working/Aura.py
diff options
context:
space:
mode:
authorkqlio67 <kqlio67@users.noreply.github.com>2024-11-06 16:25:09 +0100
committerkqlio67 <kqlio67@users.noreply.github.com>2024-11-06 16:25:09 +0100
commite98793d0a7af43878cf023fb045dd945a82507cf (patch)
tree205f2318755db4c7ad41a6d13e735c5d48e1450b /g4f/Provider/not_working/Aura.py
parentUpdate (g4f/Provider/DeepInfra.py g4f/Provider/__init__.py g4f/Provider/needs_auth/) (diff)
downloadgpt4free-e98793d0a7af43878cf023fb045dd945a82507cf.tar
gpt4free-e98793d0a7af43878cf023fb045dd945a82507cf.tar.gz
gpt4free-e98793d0a7af43878cf023fb045dd945a82507cf.tar.bz2
gpt4free-e98793d0a7af43878cf023fb045dd945a82507cf.tar.lz
gpt4free-e98793d0a7af43878cf023fb045dd945a82507cf.tar.xz
gpt4free-e98793d0a7af43878cf023fb045dd945a82507cf.tar.zst
gpt4free-e98793d0a7af43878cf023fb045dd945a82507cf.zip
Diffstat (limited to 'g4f/Provider/not_working/Aura.py')
-rw-r--r--g4f/Provider/not_working/Aura.py49
1 files changed, 49 insertions, 0 deletions
diff --git a/g4f/Provider/not_working/Aura.py b/g4f/Provider/not_working/Aura.py
new file mode 100644
index 00000000..e841d909
--- /dev/null
+++ b/g4f/Provider/not_working/Aura.py
@@ -0,0 +1,49 @@
+from __future__ import annotations
+
+from aiohttp import ClientSession
+
+from ...typing import AsyncResult, Messages
+from ..base_provider import AsyncGeneratorProvider
+from ...requests import get_args_from_browser
+from ...webdriver import WebDriver
+
+class Aura(AsyncGeneratorProvider):
+ url = "https://openchat.team"
+ working = False
+
+ @classmethod
+ async def create_async_generator(
+ cls,
+ model: str,
+ messages: Messages,
+ proxy: str = None,
+ temperature: float = 0.5,
+ max_tokens: int = 8192,
+ webdriver: WebDriver = None,
+ **kwargs
+ ) -> AsyncResult:
+ args = get_args_from_browser(cls.url, webdriver, proxy)
+ async with ClientSession(**args) as session:
+ new_messages = []
+ system_message = []
+ for message in messages:
+ if message["role"] == "system":
+ system_message.append(message["content"])
+ else:
+ new_messages.append(message)
+ data = {
+ "model": {
+ "id": "openchat_3.6",
+ "name": "OpenChat 3.6 (latest)",
+ "maxLength": 24576,
+ "tokenLimit": max_tokens
+ },
+ "messages": new_messages,
+ "key": "",
+ "prompt": "\n".join(system_message),
+ "temperature": temperature
+ }
+ async with session.post(f"{cls.url}/api/chat", json=data, proxy=proxy) as response:
+ response.raise_for_status()
+ async for chunk in response.content.iter_any():
+ yield chunk.decode(error="ignore")