From 6a61cf811655fa87dbcb196025cc0b6040502293 Mon Sep 17 00:00:00 2001 From: Heiner Lohaus Date: Wed, 4 Oct 2023 07:20:51 +0200 Subject: Sort providers in new dirs --- g4f/Provider/needs_auth/HuggingChat.py | 74 ++++++++++++++++++++++++++++++++++ 1 file changed, 74 insertions(+) create mode 100644 g4f/Provider/needs_auth/HuggingChat.py (limited to 'g4f/Provider/needs_auth/HuggingChat.py') diff --git a/g4f/Provider/needs_auth/HuggingChat.py b/g4f/Provider/needs_auth/HuggingChat.py new file mode 100644 index 00000000..1d500338 --- /dev/null +++ b/g4f/Provider/needs_auth/HuggingChat.py @@ -0,0 +1,74 @@ +from __future__ import annotations + +import json, uuid + +from aiohttp import ClientSession + +from ...typing import AsyncGenerator +from ..base_provider import AsyncGeneratorProvider, format_prompt, get_cookies + + +class HuggingChat(AsyncGeneratorProvider): + url = "https://huggingface.co/chat" + needs_auth = True + working = True + model = "meta-llama/Llama-2-70b-chat-hf" + + @classmethod + async def create_async_generator( + cls, + model: str, + messages: list[dict[str, str]], + stream: bool = True, + proxy: str = None, + cookies: dict = None, + **kwargs + ) -> AsyncGenerator: + model = model if model else cls.model + if proxy and "://" not in proxy: + proxy = f"http://{proxy}" + if not cookies: + cookies = get_cookies(".huggingface.co") + + headers = { + 'User-Agent': 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/111.0.0.0 Safari/537.36', + } + async with ClientSession( + cookies=cookies, + headers=headers + ) as session: + async with session.post(f"{cls.url}/conversation", json={"model": model}, proxy=proxy) as response: + conversation_id = (await response.json())["conversationId"] + + send = { + "id": str(uuid.uuid4()), + "inputs": format_prompt(messages), + "is_retry": False, + "response_id": str(uuid.uuid4()), + "web_search": False + } + async with session.post(f"{cls.url}/conversation/{conversation_id}", json=send, proxy=proxy) as response: + async for line in response.content: + line = json.loads(line[:-1]) + if "type" not in line: + raise RuntimeError(f"Response: {line}") + elif line["type"] == "stream": + yield line["token"] + elif line["type"] == "finalAnswer": + break + + async with session.delete(f"{cls.url}/conversation/{conversation_id}", proxy=proxy) as response: + response.raise_for_status() + + + @classmethod + @property + def params(cls): + params = [ + ("model", "str"), + ("messages", "list[dict[str, str]]"), + ("stream", "bool"), + ("proxy", "str"), + ] + param = ", ".join([": ".join(p) for p in params]) + return f"g4f.provider.{cls.__name__} supports: ({param})" -- cgit v1.2.3