summaryrefslogtreecommitdiffstats
path: root/g4f/Provider/FakeGpt.py
blob: a88f368231b41ac18b6bae9a6f70b1bd7b88d3c3 (plain) (blame)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
from __future__ import annotations

import uuid, time, random, json
from aiohttp import ClientSession

from ..typing import AsyncResult, Messages
from .base_provider import AsyncGeneratorProvider
from .helper import format_prompt, get_random_string


class FakeGpt(AsyncGeneratorProvider):
    url                   = "https://chat-shared2.zhile.io"
    supports_gpt_35_turbo = True
    working               = True
    _access_token         = None
    _cookie_jar           = None

    @classmethod
    async def create_async_generator(
        cls,
        model: str,
        messages: Messages,
        proxy: str = None,
        **kwargs
    ) -> AsyncResult:
        headers = {
            "Accept-Language": "en-US",
            "User-Agent": "Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/117.0.0.0 Safari/537.36",
            "Referer": "https://chat-shared2.zhile.io/?v=2",
            "sec-ch-ua": '"Google Chrome";v="117", "Not;A=Brand";v="8", "Chromium";v="117"',
            "sec-ch-ua-platform": '"Linux"',
            "sec-ch-ua-mobile": "?0",
        }
        async with ClientSession(headers=headers, cookie_jar=cls._cookie_jar) as session:
            if not cls._access_token:
                async with session.get(f"{cls.url}/api/loads", params={"t": int(time.time())}, proxy=proxy) as response:
                    response.raise_for_status()
                    list = (await response.json())["loads"]
                    token_ids = [t["token_id"] for t in list]
                data = {
                    "token_key": random.choice(token_ids),
                    "session_password": get_random_string()
                }
                async with session.post(f"{cls.url}/auth/login", data=data, proxy=proxy) as response:
                    response.raise_for_status()
                async with session.get(f"{cls.url}/api/auth/session", proxy=proxy) as response:
                    response.raise_for_status()
                    cls._access_token = (await response.json())["accessToken"]
                    cls._cookie_jar = session.cookie_jar
            headers = {
                "Content-Type": "application/json",
                "Accept": "text/event-stream",
                "X-Authorization": f"Bearer {cls._access_token}",
            }
            prompt = format_prompt(messages)
            data = {
                "action": "next",
                "messages": [
                    {
                        "id": str(uuid.uuid4()),
                        "author": {"role": "user"},
                        "content": {"content_type": "text", "parts": [prompt]},
                        "metadata": {},
                    }
                ],
                "parent_message_id": str(uuid.uuid4()),
                "model": "text-davinci-002-render-sha",
                "plugin_ids": [],
                "timezone_offset_min": -120,
                "suggestions": [],
                "history_and_training_disabled": True,
                "arkose_token": "",
                "force_paragen": False,
            }
            last_message = ""
            async with session.post(f"{cls.url}/api/conversation", json=data, headers=headers, proxy=proxy) as response:
                async for line in response.content:
                    if line.startswith(b"data: "):
                        line = line[6:]
                        if line == b"[DONE]":
                            break
                        try:
                            line = json.loads(line)
                            if line["message"]["metadata"]["message_type"] == "next":
                                new_message = line["message"]["content"]["parts"][0]
                                yield new_message[len(last_message):]
                                last_message = new_message
                        except:
                            continue
            if not last_message:
                raise RuntimeError("No valid response")