summaryrefslogtreecommitdiffstats
path: root/g4f/Provider/Liaobots.py
diff options
context:
space:
mode:
authorzukixa <56563509+zukixa@users.noreply.github.com>2024-08-29 08:03:32 +0200
committerzukixa <56563509+zukixa@users.noreply.github.com>2024-08-29 08:03:32 +0200
commitbda2d679275ebd6bc2a3aa84f8fbc93967bead63 (patch)
tree8a36ace98ab138e1eff134a5ed8891fd3c817b5b /g4f/Provider/Liaobots.py
parent8 providers improved (diff)
downloadgpt4free-bda2d679275ebd6bc2a3aa84f8fbc93967bead63.tar
gpt4free-bda2d679275ebd6bc2a3aa84f8fbc93967bead63.tar.gz
gpt4free-bda2d679275ebd6bc2a3aa84f8fbc93967bead63.tar.bz2
gpt4free-bda2d679275ebd6bc2a3aa84f8fbc93967bead63.tar.lz
gpt4free-bda2d679275ebd6bc2a3aa84f8fbc93967bead63.tar.xz
gpt4free-bda2d679275ebd6bc2a3aa84f8fbc93967bead63.tar.zst
gpt4free-bda2d679275ebd6bc2a3aa84f8fbc93967bead63.zip
Diffstat (limited to 'g4f/Provider/Liaobots.py')
-rw-r--r--g4f/Provider/Liaobots.py164
1 files changed, 67 insertions, 97 deletions
diff --git a/g4f/Provider/Liaobots.py b/g4f/Provider/Liaobots.py
index af90860d..2034c34a 100644
--- a/g4f/Provider/Liaobots.py
+++ b/g4f/Provider/Liaobots.py
@@ -1,7 +1,7 @@
from __future__ import annotations
import uuid
-
+import requests
from aiohttp import ClientSession, BaseConnector
from ..typing import AsyncResult, Messages
@@ -9,74 +9,6 @@ from .base_provider import AsyncGeneratorProvider, ProviderModelMixin
from .helper import get_connector
from ..requests import raise_for_status
-models = {
- "gpt-4o-mini-free": {
- "id": "gpt-4o-mini-free",
- "name": "GPT-4o-Mini-Free",
- "model": "ChatGPT",
- "provider": "OpenAI",
- "maxLength": 31200,
- "tokenLimit": 7800,
- "context": "8K",
- },
- "gpt-4o-mini": {
- "id": "gpt-4o-mini",
- "name": "GPT-4o-Mini",
- "model": "ChatGPT",
- "provider": "OpenAI",
- "maxLength": 260000,
- "tokenLimit": 126000,
- "context": "128K",
- },
- "gpt-4o-free": {
- "context": "8K",
- "id": "gpt-4o-free",
- "maxLength": 31200,
- "model": "ChatGPT",
- "name": "GPT-4o-free",
- "provider": "OpenAI",
- "tokenLimit": 7800,
- },
- "gpt-4-turbo-2024-04-09": {
- "id": "gpt-4-turbo-2024-04-09",
- "name": "GPT-4-Turbo",
- "model": "ChatGPT",
- "provider": "OpenAI",
- "maxLength": 260000,
- "tokenLimit": 126000,
- "context": "128K",
- },
- "gpt-4o": {
- "context": "128K",
- "id": "gpt-4o",
- "maxLength": 124000,
- "model": "ChatGPT",
- "name": "GPT-4o",
- "provider": "OpenAI",
- "tokenLimit": 62000,
- },
- "gpt-4-0613": {
- "id": "gpt-4-0613",
- "name": "GPT-4",
- "model": "ChatGPT",
- "provider": "OpenAI",
- "maxLength": 260000,
- "tokenLimit": 126000,
- "context": "128K",
- },
- "gpt-4-turbo": {
- "id": "gpt-4-turbo",
- "name": "GPT-4-Turbo",
- "model": "ChatGPT",
- "provider": "OpenAI",
- "maxLength": 260000,
- "tokenLimit": 126000,
- "context": "128K",
- },
-}
-
-
-
class Liaobots(AsyncGeneratorProvider, ProviderModelMixin):
url = "https://liaobots.site"
working = True
@@ -85,26 +17,67 @@ class Liaobots(AsyncGeneratorProvider, ProviderModelMixin):
supports_gpt_35_turbo = True
supports_gpt_4 = True
default_model = "gpt-4o"
- models = list(models.keys())
+ models = None
model_aliases = {
"gpt-4o-mini": "gpt-4o-mini-free",
"gpt-4o": "gpt-4o-free",
"gpt-4-turbo": "gpt-4-turbo-2024-04-09",
- "gpt-4-": "gpt-4-0613",
+ "gpt-4o": "gpt-4o-2024-08-06",
+ "gpt-4": "gpt-4-0613",
+
"claude-3-opus": "claude-3-opus-20240229",
"claude-3-opus": "claude-3-opus-20240229-aws",
"claude-3-opus": "claude-3-opus-20240229-gcp",
"claude-3-sonnet": "claude-3-sonnet-20240229",
"claude-3-5-sonnet": "claude-3-5-sonnet-20240620",
"claude-3-haiku": "claude-3-haiku-20240307",
- "gemini-pro": "gemini-1.5-pro-latest",
+ "claude-2.1": "claude-2.1",
+
"gemini-pro": "gemini-1.0-pro-latest",
"gemini-flash": "gemini-1.5-flash-latest",
+ "gemini-pro": "gemini-1.5-pro-latest",
}
_auth_code = ""
_cookie_jar = None
@classmethod
+ def get_models(cls):
+ if cls.models is None:
+ url = 'https://liaobots.work/api/models'
+ headers = {
+ 'accept': '/',
+ 'accept-language': 'en-US,en;q=0.9',
+ 'content-type': 'application/json',
+ 'cookie': 'gkp2=ehnhUPJtkCgMmod8Sbxn',
+ 'origin': 'https://liaobots.work',
+ 'priority': 'u=1, i',
+ 'referer': 'https://liaobots.work/',
+ 'sec-ch-ua': '"Chromium";v="127", "Not)A;Brand";v="99"',
+ 'sec-ch-ua-mobile': '?0',
+ 'sec-ch-ua-platform': '"Linux"',
+ 'sec-fetch-dest': 'empty',
+ 'sec-fetch-mode': 'cors',
+ 'sec-fetch-site': 'same-origin',
+ 'user-agent': 'Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/127.0.0.0 Safari/537.36'
+ }
+ data = {'key': ''}
+
+ response = requests.post(url, headers=headers, json=data)
+
+ if response.status_code == 200:
+ try:
+ models_data = response.json()
+ cls.models = {model['id']: model for model in models_data}
+ except (ValueError, KeyError) as e:
+ print(f"Error processing JSON response: {e}")
+ cls.models = {}
+ else:
+ print(f"Request failed with status code: {response.status_code}")
+ cls.models = {}
+
+ return cls.models
+
+ @classmethod
async def create_async_generator(
cls,
model: str,
@@ -126,9 +99,10 @@ class Liaobots(AsyncGeneratorProvider, ProviderModelMixin):
cookie_jar=cls._cookie_jar,
connector=get_connector(connector, proxy, True)
) as session:
+ models = cls.get_models()
data = {
"conversationId": str(uuid.uuid4()),
- "model": models[model],
+ "model": models[cls.get_model(model)],
"messages": messages,
"key": "",
"prompt": kwargs.get("system_message", "You are a helpful assistant."),
@@ -141,20 +115,11 @@ class Liaobots(AsyncGeneratorProvider, ProviderModelMixin):
) as response:
await raise_for_status(response)
try:
- async with session.post(
- "https://liaobots.work/api/user",
- json={"authcode": cls._auth_code},
- verify_ssl=False
- ) as response:
- await raise_for_status(response)
- cls._auth_code = (await response.json(content_type=None))["authCode"]
- if not cls._auth_code:
- raise RuntimeError("Empty auth code")
- cls._cookie_jar = session.cookie_jar
+ await cls.ensure_auth_code(session)
async with session.post(
"https://liaobots.work/api/chat",
json=data,
- headers={"x-auth-code": cls._auth_code},
+ headers={"x-auth-code": cls._auth_code},
verify_ssl=False
) as response:
await raise_for_status(response)
@@ -164,16 +129,7 @@ class Liaobots(AsyncGeneratorProvider, ProviderModelMixin):
if chunk:
yield chunk.decode(errors="ignore")
except:
- async with session.post(
- "https://liaobots.work/api/user",
- json={"authcode": "pTIQr4FTnVRfr"},
- verify_ssl=False
- ) as response:
- await raise_for_status(response)
- cls._auth_code = (await response.json(content_type=None))["authCode"]
- if not cls._auth_code:
- raise RuntimeError("Empty auth code")
- cls._cookie_jar = session.cookie_jar
+ await cls.initialize_auth_code(session)
async with session.post(
"https://liaobots.work/api/chat",
json=data,
@@ -186,7 +142,6 @@ class Liaobots(AsyncGeneratorProvider, ProviderModelMixin):
raise RuntimeError("Invalid session")
if chunk:
yield chunk.decode(errors="ignore")
-
@classmethod
def get_model(cls, model: str) -> str:
"""
@@ -194,15 +149,16 @@ class Liaobots(AsyncGeneratorProvider, ProviderModelMixin):
"""
if model in cls.model_aliases:
model = cls.model_aliases[model]
+ models = cls.get_models()
if model not in models:
raise ValueError(f"Model '{model}' is not supported.")
return model
-
@classmethod
def is_supported(cls, model: str) -> bool:
"""
Check if the given model is supported.
"""
+ models = cls.get_models()
return model in models or model in cls.model_aliases
@classmethod
@@ -220,7 +176,6 @@ class Liaobots(AsyncGeneratorProvider, ProviderModelMixin):
if not cls._auth_code:
raise RuntimeError("Empty auth code")
cls._cookie_jar = session.cookie_jar
-
@classmethod
async def ensure_auth_code(cls, session: ClientSession) -> None:
"""
@@ -228,3 +183,18 @@ class Liaobots(AsyncGeneratorProvider, ProviderModelMixin):
"""
if not cls._auth_code:
await cls.initialize_auth_code(session)
+
+ @classmethod
+ async def refresh_auth_code(cls, session: ClientSession) -> None:
+ """
+ Refresh the auth code by making a new request.
+ """
+ await cls.initialize_auth_code(session)
+
+ @classmethod
+ async def get_auth_code(cls, session: ClientSession) -> str:
+ """
+ Get the current auth code, initializing it if necessary.
+ """
+ await cls.ensure_auth_code(session)
+ return cls._auth_code \ No newline at end of file