From 926ddfd5438ca06840bdff9b9bd21ddcd8863aa9 Mon Sep 17 00:00:00 2001 From: Heiner Lohaus Date: Mon, 8 Apr 2024 07:24:00 +0200 Subject: Add WhiteRabbitNeo Provider, Many tiny improvments in the gui --- g4f/Provider/HuggingFace.py | 6 ++++- g4f/Provider/WhiteRabbitNeo.py | 57 ++++++++++++++++++++++++++++++++++++++++++ g4f/Provider/__init__.py | 1 + 3 files changed, 63 insertions(+), 1 deletion(-) create mode 100644 g4f/Provider/WhiteRabbitNeo.py (limited to 'g4f/Provider') diff --git a/g4f/Provider/HuggingFace.py b/g4f/Provider/HuggingFace.py index 647780fd..6a05c26e 100644 --- a/g4f/Provider/HuggingFace.py +++ b/g4f/Provider/HuggingFace.py @@ -13,6 +13,10 @@ class HuggingFace(AsyncGeneratorProvider, ProviderModelMixin): url = "https://huggingface.co/chat" working = True supports_message_history = True + models = [ + "mistralai/Mixtral-8x7B-Instruct-v0.1", + "mistralai/Mistral-7B-Instruct-v0.2" + ] default_model = "mistralai/Mixtral-8x7B-Instruct-v0.1" @classmethod @@ -29,7 +33,7 @@ class HuggingFace(AsyncGeneratorProvider, ProviderModelMixin): temperature: float = 0.7, **kwargs ) -> AsyncResult: - model = cls.get_model(model) + model = cls.get_model(model) if not model else model headers = {} if api_key is not None: headers["Authorization"] = f"Bearer {api_key}" diff --git a/g4f/Provider/WhiteRabbitNeo.py b/g4f/Provider/WhiteRabbitNeo.py new file mode 100644 index 00000000..339434e6 --- /dev/null +++ b/g4f/Provider/WhiteRabbitNeo.py @@ -0,0 +1,57 @@ +from __future__ import annotations + +from aiohttp import ClientSession, BaseConnector + +from ..typing import AsyncResult, Messages, Cookies +from ..requests.raise_for_status import raise_for_status +from .base_provider import AsyncGeneratorProvider +from .helper import get_cookies, get_connector, get_random_string + +class WhiteRabbitNeo(AsyncGeneratorProvider): + url = "https://www.whiterabbitneo.com" + working = True + supports_message_history = True + needs_auth = True + + @classmethod + async def create_async_generator( + cls, + model: str, + messages: Messages, + cookies: Cookies = None, + connector: BaseConnector = None, + proxy: str = None, + **kwargs + ) -> AsyncResult: + if cookies is None: + cookies = get_cookies("www.whiterabbitneo.com") + headers = { + "User-Agent": "Mozilla/5.0 (X11; Ubuntu; Linux x86_64; rv:123.0) Gecko/20100101 Firefox/123.0", + "Accept": "*/*", + "Accept-Language": "de,en-US;q=0.7,en;q=0.3", + "Accept-Encoding": "gzip, deflate, br", + "Referer": f"{cls.url}/", + "Content-Type": "text/plain;charset=UTF-8", + "Origin": cls.url, + "Connection": "keep-alive", + "Sec-Fetch-Dest": "empty", + "Sec-Fetch-Mode": "cors", + "Sec-Fetch-Site": "same-origin", + "TE": "trailers" + } + async with ClientSession( + headers=headers, + cookies=cookies, + connector=get_connector(connector, proxy) + ) as session: + data = { + "messages": messages, + "id": get_random_string(6), + "enhancePrompt": False, + "useFunctions": False + } + async with session.post(f"{cls.url}/api/chat", json=data, proxy=proxy) as response: + await raise_for_status(response) + async for chunk in response.content.iter_any(): + if chunk: + yield chunk.decode(errors="ignore") \ No newline at end of file diff --git a/g4f/Provider/__init__.py b/g4f/Provider/__init__.py index 1db29e19..2c6512f0 100644 --- a/g4f/Provider/__init__.py +++ b/g4f/Provider/__init__.py @@ -37,6 +37,7 @@ from .Local import Local from .PerplexityLabs import PerplexityLabs from .Pi import Pi from .Vercel import Vercel +from .WhiteRabbitNeo import WhiteRabbitNeo from .You import You import sys -- cgit v1.2.3 From 24345bc07bf77bdfa4bb799de62233ffdb1c5d73 Mon Sep 17 00:00:00 2001 From: Heiner Lohaus Date: Mon, 8 Apr 2024 23:27:54 +0200 Subject: Add DuckDuckGo Provider, Add SpeechRecognition to gui --- g4f/Provider/DuckDuckGo.py | 64 ++++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 64 insertions(+) create mode 100644 g4f/Provider/DuckDuckGo.py (limited to 'g4f/Provider') diff --git a/g4f/Provider/DuckDuckGo.py b/g4f/Provider/DuckDuckGo.py new file mode 100644 index 00000000..5269ced3 --- /dev/null +++ b/g4f/Provider/DuckDuckGo.py @@ -0,0 +1,64 @@ +from __future__ import annotations + +import json +import aiohttp + +from .base_provider import AsyncGeneratorProvider, ProviderModelMixin +from ..typing import AsyncResult, Messages +from ..requests.raise_for_status import raise_for_status + +class DuckDuckGo(AsyncGeneratorProvider, ProviderModelMixin): + url = "https://duckduckgo.com/duckchat" + working = True + supports_gpt_35_turbo = True + supports_message_history = True + + default_model = "gpt-3.5-turbo-0125" + models = ["gpt-3.5-turbo-0125", "claude-instant-1.2"] + model_aliases = {"gpt-3.5-turbo": "gpt-3.5-turbo-0125"} + + status_url = "https://duckduckgo.com/duckchat/v1/status" + chat_url = "https://duckduckgo.com/duckchat/v1/chat" + user_agent = 'Mozilla/5.0 (X11; Ubuntu; Linux x86_64; rv:123.0) Gecko/20100101 Firefox/123.0' + headers = { + 'User-Agent': user_agent, + 'Accept': 'text/event-stream', + 'Accept-Language': 'de,en-US;q=0.7,en;q=0.3', + 'Accept-Encoding': 'gzip, deflate, br', + 'Referer': 'https://duckduckgo.com/', + 'Content-Type': 'application/json', + 'Origin': 'https://duckduckgo.com', + 'Connection': 'keep-alive', + 'Cookie': 'dcm=1', + 'Sec-Fetch-Dest': 'empty', + 'Sec-Fetch-Mode': 'cors', + 'Sec-Fetch-Site': 'same-origin', + 'Pragma': 'no-cache', + 'TE': 'trailers' + } + + @classmethod + async def create_async_generator( + cls, + model: str, + messages: Messages, + **kwargs + ) -> AsyncResult: + async with aiohttp.ClientSession(headers=cls.headers) as session: + async with session.get(cls.status_url, headers={"x-vqd-accept": "1"}) as response: + await raise_for_status(response) + vqd_4 = response.headers.get("x-vqd-4") + payload = { + 'model': cls.get_model(model), + 'messages': messages + } + async with session.post(cls.chat_url, json=payload, headers={"x-vqd-4": vqd_4}) as response: + await raise_for_status(response) + async for line in response.content: + if line.startswith(b"data: "): + chunk = line[6:] + if chunk.startswith(b"[DONE]"): + break + data = json.loads(chunk) + if "message" in data: + yield data["message"] \ No newline at end of file -- cgit v1.2.3 From d4a92bb8df56009a7e5520dca0a3ed8f79e92d06 Mon Sep 17 00:00:00 2001 From: Heiner Lohaus Date: Tue, 9 Apr 2024 05:54:47 +0200 Subject: Add text to speech module --- g4f/Provider/__init__.py | 1 + 1 file changed, 1 insertion(+) (limited to 'g4f/Provider') diff --git a/g4f/Provider/__init__.py b/g4f/Provider/__init__.py index 2c6512f0..b567305c 100644 --- a/g4f/Provider/__init__.py +++ b/g4f/Provider/__init__.py @@ -21,6 +21,7 @@ from .ChatgptFree import ChatgptFree from .ChatgptNext import ChatgptNext from .ChatgptX import ChatgptX from .DeepInfra import DeepInfra +from .DuckDuckGo import DuckDuckGo from .FlowGpt import FlowGpt from .FreeChatgpt import FreeChatgpt from .FreeGpt import FreeGpt -- cgit v1.2.3 From 90715e702bbebcf2c3cfd39628c931bbadda28b0 Mon Sep 17 00:00:00 2001 From: Heiner Lohaus Date: Tue, 9 Apr 2024 19:19:33 +0200 Subject: Add project files --- g4f/Provider/Bing.py | 3 +++ 1 file changed, 3 insertions(+) (limited to 'g4f/Provider') diff --git a/g4f/Provider/Bing.py b/g4f/Provider/Bing.py index aa1b37b0..1e462084 100644 --- a/g4f/Provider/Bing.py +++ b/g4f/Provider/Bing.py @@ -46,6 +46,7 @@ class Bing(AsyncGeneratorProvider, ProviderModelMixin): messages: Messages, proxy: str = None, timeout: int = 900, + api_key: str = None, cookies: Cookies = None, connector: BaseConnector = None, tone: str = None, @@ -68,6 +69,8 @@ class Bing(AsyncGeneratorProvider, ProviderModelMixin): :return: An asynchronous result object. """ prompt = messages[-1]["content"] + if api_key is not None: + cookies["_U"] = api_key if context is None: context = create_context(messages[:-1]) if len(messages) > 1 else None if tone is None: -- cgit v1.2.3