summaryrefslogtreecommitdiffstats
path: root/g4f/Provider/Wuguokai.py
blob: a9614626b7864cfd9e934740633158ac05f99d02 (plain) (blame)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
from __future__ import annotations

import random

import requests

from ..typing import Any, CreateResult
from .base_provider import BaseProvider


class Wuguokai(BaseProvider):
    url = 'https://chat.wuguokai.xyz'
    supports_gpt_35_turbo = True
    working = True

    @staticmethod
    def create_completion(
        model: str,
        messages: list[dict[str, str]],
        stream: bool,
        **kwargs: Any,
    ) -> CreateResult:
        base = ''
        for message in messages:
            base += '%s: %s\n' % (message['role'], message['content'])
        base += 'assistant:'

        headers = {
            'authority': 'ai-api.wuguokai.xyz',
            'accept': 'application/json, text/plain, */*',
            'accept-language': 'id-ID,id;q=0.9,en-US;q=0.8,en;q=0.7',
            'content-type': 'application/json',
            'origin': 'https://chat.wuguokai.xyz',
            'referer': 'https://chat.wuguokai.xyz/',
            'sec-ch-ua': '"Not.A/Brand";v="8", "Chromium";v="114", "Google Chrome";v="114"',
            'sec-ch-ua-mobile': '?0',
            'sec-ch-ua-platform': '"Windows"',
            'sec-fetch-dest': 'empty',
            'sec-fetch-mode': 'cors',
            'sec-fetch-site': 'same-site',
            'user-agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/114.0.0.0 Safari/537.36'
        }
        data ={
            "prompt": base,
            "options": {},
            "userId": f"#/chat/{random.randint(1,99999999)}",
            "usingContext": True
        }
        response = requests.post("https://ai-api20.wuguokai.xyz/api/chat-process", headers=headers, timeout=3, json=data, proxies=kwargs['proxy'] if 'proxy' in kwargs else {})
        _split = response.text.split("> 若回答失败请重试或多刷新几次界面后重试")
        if response.status_code == 200:
            if len(_split) > 1:
                yield _split[1].strip()
            else:
                yield _split[0].strip()
        else:
            raise Exception(f"Error: {response.status_code} {response.reason}")

    @classmethod
    @property
    def params(cls):
        params = [
            ("model", "str"),
            ("messages", "list[dict[str, str]]"),
            ("stream", "bool")
        ]
        param = ", ".join([": ".join(p) for p in params])
        return f"g4f.provider.{cls.__name__} supports: ({param})"