1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
|
from __future__ import annotations
import json
from aiohttp import ClientSession
from ..typing import AsyncResult, Messages
from .base_provider import AsyncGeneratorProvider, ProviderModelMixin
class FlowGpt(AsyncGeneratorProvider, ProviderModelMixin):
url = "https://flowgpt.com/chat"
working = True
supports_gpt_35_turbo = True
supports_gpt_4 = True
supports_message_history = True
default_model = "gpt-3.5-turbo"
models = [
"gpt-4",
"gpt-3.5-turbo",
"gpt-3.5-long",
"google-gemini",
"claude-v2",
"llama2-13b"
]
model_aliases = {
"gemini": "google-gemini",
"gemini-pro": "google-gemini"
}
@classmethod
async def create_async_generator(
cls,
model: str,
messages: Messages,
proxy: str = None,
**kwargs
) -> AsyncResult:
model = cls.get_model(model)
headers = {
"User-Agent": "Mozilla/5.0 (X11; Ubuntu; Linux x86_64; rv:122.0) Gecko/20100101 Firefox/122.0",
"Accept": "*/*",
"Accept-Language": "en-US;q=0.7,en;q=0.3",
"Accept-Encoding": "gzip, deflate, br",
"Referer": "https://flowgpt.com/",
"Content-Type": "application/json",
"Authorization": "Bearer null",
"Origin": "https://flowgpt.com",
"Connection": "keep-alive",
"Sec-Fetch-Dest": "empty",
"Sec-Fetch-Mode": "cors",
"Sec-Fetch-Site": "same-site",
"TE": "trailers"
}
async with ClientSession(headers=headers) as session:
data = {
"model": model,
"nsfw": False,
"question": messages[-1]["content"],
"history": [{"role": "assistant", "content": "Hello, how can I help you today?"}, *messages[:-1]],
"system": kwargs.get("system_message", "You are helpful assistant. Follow the user's instructions carefully."),
"temperature": kwargs.get("temperature", 0.7),
"promptId": f"model-{model}",
"documentIds": [],
"chatFileDocumentIds": [],
"generateImage": False,
"generateAudio": False
}
async with session.post("https://backend-k8s.flowgpt.com/v2/chat-anonymous", json=data, proxy=proxy) as response:
response.raise_for_status()
async for chunk in response.content:
if chunk.strip():
message = json.loads(chunk)
if "event" not in message:
continue
if message["event"] == "text":
yield message["data"]
|