From 7294abc890c377d75c6c8c932620c2e2c8b3f0f9 Mon Sep 17 00:00:00 2001 From: Heiner Lohaus Date: Mon, 28 Aug 2023 01:43:45 +0200 Subject: Add async support for H2o Add format_prompt helper Fix create_completion in AsyncGeneratorProvider Move get_cookies from constructor to function Add ow HuggingChat implement Remove need auth form Liabots Add staic cache for access_token in OpenaiChat Add OpenAssistant provider Support stream and async in You Support async and add userId in Yqcloud Add log_time module --- g4f/Provider/Liaobots.py | 113 +++++++++++++++++++++++++++-------------------- 1 file changed, 66 insertions(+), 47 deletions(-) (limited to 'g4f/Provider/Liaobots.py') diff --git a/g4f/Provider/Liaobots.py b/g4f/Provider/Liaobots.py index a8bb83e8..e69a565e 100644 --- a/g4f/Provider/Liaobots.py +++ b/g4f/Provider/Liaobots.py @@ -1,59 +1,77 @@ -import uuid, requests +import uuid +import json +from aiohttp import ClientSession -from ..typing import Any, CreateResult -from .base_provider import BaseProvider +from ..typing import AsyncGenerator +from .base_provider import AsyncGeneratorProvider +models = { + "gpt-4": { + "id": "gpt-4", + "name": "GPT-4", + "maxLength": 24000, + "tokenLimit": 8000, + }, + "gpt-3.5-turbo": { + "id": "gpt-3.5-turbo", + "name": "GPT-3.5", + "maxLength": 12000, + "tokenLimit": 4000, + }, + "gpt-3.5-turbo-16k": { + "id": "gpt-3.5-turbo-16k", + "name": "GPT-3.5-16k", + "maxLength": 48000, + "tokenLimit": 16000, + }, +} -class Liaobots(BaseProvider): - url: str = "https://liaobots.com" - supports_stream = True - needs_auth = True - supports_gpt_35_turbo = True - supports_gpt_4 = True +class Liaobots(AsyncGeneratorProvider): + url = "https://liaobots.com" + supports_stream = True + supports_gpt_35_turbo = True + supports_gpt_4 = True + _auth_code = None - @staticmethod - def create_completion( + @classmethod + async def create_async_generator( + cls, model: str, messages: list[dict[str, str]], - stream: bool, **kwargs: Any) -> CreateResult: - + auth: str = None, + proxy: str = None, + **kwargs + ) -> AsyncGenerator: + if proxy and "://" not in proxy: + proxy = f"http://{proxy}" headers = { - "authority" : "liaobots.com", - "content-type" : "application/json", - "origin" : "https://liaobots.com", - "referer" : "https://liaobots.com/", - "user-agent" : "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/112.0.0.0 Safari/537.36", - "x-auth-code" : str(kwargs.get("auth")), - } - - models = { - "gpt-4": { - "id": "gpt-4", - "name": "GPT-4", - "maxLength": 24000, - "tokenLimit": 8000, - }, - "gpt-3.5-turbo": { - "id": "gpt-3.5-turbo", - "name": "GPT-3.5", - "maxLength": 12000, - "tokenLimit": 4000, - }, - } - json_data = { - "conversationId": str(uuid.uuid4()), - "model" : models[model], - "messages" : messages, - "key" : "", - "prompt" : "You are ChatGPT, a large language model trained by OpenAI. Follow the user's instructions carefully. Respond using markdown.", + "authority": "liaobots.com", + "content-type": "application/json", + "origin": "https://liaobots.com", + "referer": "https://liaobots.com/", + "user-agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/112.0.0.0 Safari/537.36", } + async with ClientSession( + headers=headers + ) as session: + model = model if model in models else "gpt-3.5-turbo" + auth_code = auth if isinstance(auth, str) else cls._auth_code + if not auth_code: + async with session.post("https://liaobots.com/api/user", proxy=proxy, json={"authcode": ""}) as response: + response.raise_for_status() + auth_code = cls._auth_code = json.loads((await response.text()))["authCode"] + data = { + "conversationId": str(uuid.uuid4()), + "model": models[model], + "messages": messages, + "key": "", + "prompt": "You are ChatGPT, a large language model trained by OpenAI. Follow the user's instructions carefully.", + } + async with session.post("https://liaobots.com/api/chat", proxy=proxy, json=data, headers={"x-auth-code": auth_code}) as response: + response.raise_for_status() + async for line in response.content: + yield line.decode("utf-8") - response = requests.post("https://liaobots.com/api/chat", - headers=headers, json=json_data, stream=True) - - response.raise_for_status() - for token in response.iter_content(chunk_size=2046): - yield token.decode("utf-8") @classmethod @property @@ -62,6 +80,7 @@ class Liaobots(BaseProvider): ("model", "str"), ("messages", "list[dict[str, str]]"), ("stream", "bool"), + ("proxy", "str"), ("auth", "str"), ] param = ", ".join([": ".join(p) for p in params]) -- cgit v1.2.3