summaryrefslogtreecommitdiffstats
path: root/g4f/Provider/needs_auth/ThebApi.py
diff options
context:
space:
mode:
authorTekky <98614666+xtekky@users.noreply.github.com>2023-11-18 03:40:09 +0100
committerGitHub <noreply@github.com>2023-11-18 03:40:09 +0100
commitca3eaaffee6464adc7f8a6963461a9433be9e7bf (patch)
tree2f86c8aa7a9db061441712bf60f956ef91a660e9 /g4f/Provider/needs_auth/ThebApi.py
parentMerge pull request #1262 from hlohaus/any (diff)
parentImprove providers (diff)
downloadgpt4free-ca3eaaffee6464adc7f8a6963461a9433be9e7bf.tar
gpt4free-ca3eaaffee6464adc7f8a6963461a9433be9e7bf.tar.gz
gpt4free-ca3eaaffee6464adc7f8a6963461a9433be9e7bf.tar.bz2
gpt4free-ca3eaaffee6464adc7f8a6963461a9433be9e7bf.tar.lz
gpt4free-ca3eaaffee6464adc7f8a6963461a9433be9e7bf.tar.xz
gpt4free-ca3eaaffee6464adc7f8a6963461a9433be9e7bf.tar.zst
gpt4free-ca3eaaffee6464adc7f8a6963461a9433be9e7bf.zip
Diffstat (limited to 'g4f/Provider/needs_auth/ThebApi.py')
-rw-r--r--g4f/Provider/needs_auth/ThebApi.py77
1 files changed, 77 insertions, 0 deletions
diff --git a/g4f/Provider/needs_auth/ThebApi.py b/g4f/Provider/needs_auth/ThebApi.py
new file mode 100644
index 00000000..0441f352
--- /dev/null
+++ b/g4f/Provider/needs_auth/ThebApi.py
@@ -0,0 +1,77 @@
+from __future__ import annotations
+
+import requests
+
+from ...typing import Any, CreateResult, Messages
+from ..base_provider import BaseProvider
+
+models = {
+ "theb-ai": "TheB.AI",
+ "gpt-3.5-turbo": "GPT-3.5",
+ "gpt-3.5-turbo-16k": "GPT-3.5-16K",
+ "gpt-4-turbo": "GPT-4 Turbo",
+ "gpt-4": "GPT-4",
+ "gpt-4-32k": "GPT-4 32K",
+ "claude-2": "Claude 2",
+ "claude-1": "Claude",
+ "claude-1-100k": "Claude 100K",
+ "claude-instant-1": "Claude Instant",
+ "claude-instant-1-100k": "Claude Instant 100K",
+ "palm-2": "PaLM 2",
+ "palm-2-codey": "Codey",
+ "vicuna-13b-v1.5": "Vicuna v1.5 13B",
+ "llama-2-7b-chat": "Llama 2 7B",
+ "llama-2-13b-chat": "Llama 2 13B",
+ "llama-2-70b-chat": "Llama 2 70B",
+ "code-llama-7b": "Code Llama 7B",
+ "code-llama-13b": "Code Llama 13B",
+ "code-llama-34b": "Code Llama 34B",
+ "qwen-7b-chat": "Qwen 7B"
+}
+
+class ThebApi(BaseProvider):
+ url = "https://theb.ai"
+ working = True
+ needs_auth = True
+
+ @staticmethod
+ def create_completion(
+ model: str,
+ messages: Messages,
+ stream: bool,
+ auth: str,
+ proxy: str = None,
+ **kwargs
+ ) -> CreateResult:
+ if model and model not in models:
+ raise ValueError(f"Model are not supported: {model}")
+ headers = {
+ 'accept': 'application/json',
+ 'authorization': f'Bearer {auth}',
+ 'content-type': 'application/json',
+ }
+ # response = requests.get("https://api.baizhi.ai/v1/models", headers=headers).json()["data"]
+ # models = dict([(m["id"], m["name"]) for m in response])
+ # print(json.dumps(models, indent=4))
+ data: dict[str, Any] = {
+ "model": model if model else "gpt-3.5-turbo",
+ "messages": messages,
+ "stream": False,
+ "model_params": {
+ "system_prompt": kwargs.get("system_message", "You are ChatGPT, a large language model trained by OpenAI, based on the GPT-3.5 architecture."),
+ "temperature": 1,
+ "top_p": 1,
+ **kwargs
+ }
+ }
+ response = requests.post(
+ "https://api.theb.ai/v1/chat/completions",
+ headers=headers,
+ json=data,
+ proxies={"https": proxy}
+ )
+ try:
+ response.raise_for_status()
+ yield response.json()["choices"][0]["message"]["content"]
+ except:
+ raise RuntimeError(f"Response: {next(response.iter_lines()).decode()}") \ No newline at end of file