summaryrefslogtreecommitdiffstats
path: root/g4f/Provider/needs_auth/ThebApi.py
blob: 8ec7bda84e84a03c29a016c8faee494e3e1dcbc6 (plain) (blame)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
from __future__ import annotations

import requests

from ...typing import Any, CreateResult, Messages
from ..base_provider import AbstractProvider

models = {
    "theb-ai": "TheB.AI",
    "gpt-3.5-turbo": "GPT-3.5",
    "gpt-3.5-turbo-16k": "GPT-3.5-16K",
    "gpt-4-turbo": "GPT-4 Turbo",
    "gpt-4": "GPT-4",
    "gpt-4-32k": "GPT-4 32K",
    "claude-2": "Claude 2",
    "claude-1": "Claude",
    "claude-1-100k": "Claude 100K",
    "claude-instant-1": "Claude Instant",
    "claude-instant-1-100k": "Claude Instant 100K",
    "palm-2": "PaLM 2",
    "palm-2-codey": "Codey",
    "vicuna-13b-v1.5": "Vicuna v1.5 13B",
    "llama-2-7b-chat": "Llama 2 7B",
    "llama-2-13b-chat": "Llama 2 13B",
    "llama-2-70b-chat": "Llama 2 70B",
    "code-llama-7b": "Code Llama 7B",
    "code-llama-13b": "Code Llama 13B",
    "code-llama-34b": "Code Llama 34B",
    "qwen-7b-chat": "Qwen 7B"
}

class ThebApi(AbstractProvider):
    url = "https://theb.ai"
    working = True
    needs_auth = True

    @staticmethod
    def create_completion(
        model: str,
        messages: Messages,
        stream: bool,
        auth: str,
        proxy: str = None,
        **kwargs
    ) -> CreateResult:
        if model and model not in models:
            raise ValueError(f"Model are not supported: {model}")
        headers = {
            'accept': 'application/json',
            'authorization': f'Bearer {auth}',
            'content-type': 'application/json',
        }
        # response = requests.get("https://api.baizhi.ai/v1/models", headers=headers).json()["data"]
        # models = dict([(m["id"], m["name"]) for m in response])
        # print(json.dumps(models, indent=4))
        data: dict[str, Any] = {
            "model": model if model else "gpt-3.5-turbo",
            "messages": messages,
            "stream": False,
            "model_params": {
                "system_prompt": kwargs.get("system_message", "You are ChatGPT, a large language model trained by OpenAI, based on the GPT-3.5 architecture."),
                "temperature": 1,
                "top_p": 1,
                **kwargs
            }
        }
        response = requests.post(
            "https://api.theb.ai/v1/chat/completions",
            headers=headers,
            json=data,
            proxies={"https": proxy}
        )
        try:
            response.raise_for_status()
            yield response.json()["choices"][0]["message"]["content"]
        except:
            raise RuntimeError(f"Response: {next(response.iter_lines()).decode()}")