diff options
Diffstat (limited to 'g4f/Provider')
-rw-r--r-- | g4f/Provider/Bing.py | 2 | ||||
-rw-r--r-- | g4f/Provider/Chatgpt4Online.py | 4 | ||||
-rw-r--r-- | g4f/Provider/DuckDuckGo.py | 30 | ||||
-rw-r--r-- | g4f/Provider/FlowGpt.py | 28 | ||||
-rw-r--r-- | g4f/Provider/PerplexityLabs.py | 17 | ||||
-rw-r--r-- | g4f/Provider/bing/conversation.py | 4 | ||||
-rw-r--r-- | g4f/Provider/needs_auth/Gemini.py | 5 |
7 files changed, 68 insertions, 22 deletions
diff --git a/g4f/Provider/Bing.py b/g4f/Provider/Bing.py index 92f79cca..fd8cac8e 100644 --- a/g4f/Provider/Bing.py +++ b/g4f/Provider/Bing.py @@ -107,7 +107,7 @@ def get_default_cookies(): 'PPLState' : '1', 'KievRPSSecAuth': '', 'SUID' : '', - 'SRCHUSR' : '', + 'SRCHUSR' : f'DOB={date.today().strftime("%Y%m%d")}&T={int(time.time())}', 'SRCHHPGUSR' : f'HV={int(time.time())}', 'BCP' : 'AD=1&AL=1&SM=1', '_Rwho' : f'u=d&ts={date.today().isoformat()}', diff --git a/g4f/Provider/Chatgpt4Online.py b/g4f/Provider/Chatgpt4Online.py index 169c936d..ff9a2c8f 100644 --- a/g4f/Provider/Chatgpt4Online.py +++ b/g4f/Provider/Chatgpt4Online.py @@ -6,6 +6,7 @@ from aiohttp import ClientSession from ..typing import Messages, AsyncResult from ..requests import get_args_from_browser +from ..webdriver import WebDriver from .base_provider import AsyncGeneratorProvider from .helper import get_random_string @@ -23,9 +24,10 @@ class Chatgpt4Online(AsyncGeneratorProvider): model: str, messages: Messages, proxy: str = None, + webdriver: WebDriver = None, **kwargs ) -> AsyncResult: - args = get_args_from_browser(f"{cls.url}/chat/", proxy=proxy) + args = get_args_from_browser(f"{cls.url}/chat/", webdriver, proxy=proxy) async with ClientSession(**args) as session: if not cls._wpnonce: async with session.get(f"{cls.url}/chat/", proxy=proxy) as response: diff --git a/g4f/Provider/DuckDuckGo.py b/g4f/Provider/DuckDuckGo.py index 5269ced3..2fa0612a 100644 --- a/g4f/Provider/DuckDuckGo.py +++ b/g4f/Provider/DuckDuckGo.py @@ -4,8 +4,10 @@ import json import aiohttp from .base_provider import AsyncGeneratorProvider, ProviderModelMixin +from .helper import get_connector from ..typing import AsyncResult, Messages from ..requests.raise_for_status import raise_for_status +from ..providers.conversation import BaseConversation class DuckDuckGo(AsyncGeneratorProvider, ProviderModelMixin): url = "https://duckduckgo.com/duckchat" @@ -42,23 +44,39 @@ class DuckDuckGo(AsyncGeneratorProvider, ProviderModelMixin): cls, model: str, messages: Messages, + proxy: str = None, + connector: aiohttp.BaseConnector = None, + conversation: Conversation = None, + return_conversation: bool = False, **kwargs ) -> AsyncResult: - async with aiohttp.ClientSession(headers=cls.headers) as session: - async with session.get(cls.status_url, headers={"x-vqd-accept": "1"}) as response: - await raise_for_status(response) - vqd_4 = response.headers.get("x-vqd-4") + async with aiohttp.ClientSession(headers=cls.headers, connector=get_connector(connector, proxy)) as session: + if conversation is not None and len(messages) > 1: + vqd_4 = conversation.vqd_4 + messages = [*conversation.messages, messages[-2], messages[-1]] + else: + async with session.get(cls.status_url, headers={"x-vqd-accept": "1"}) as response: + await raise_for_status(response) + vqd_4 = response.headers.get("x-vqd-4") + messages = [messages[-1]] payload = { 'model': cls.get_model(model), 'messages': messages } async with session.post(cls.chat_url, json=payload, headers={"x-vqd-4": vqd_4}) as response: await raise_for_status(response) + if return_conversation: + yield Conversation(response.headers.get("x-vqd-4"), messages) async for line in response.content: if line.startswith(b"data: "): chunk = line[6:] if chunk.startswith(b"[DONE]"): break data = json.loads(chunk) - if "message" in data: - yield data["message"]
\ No newline at end of file + if "message" in data and data["message"]: + yield data["message"] + +class Conversation(BaseConversation): + def __init__(self, vqd_4: str, messages: Messages) -> None: + self.vqd_4 = vqd_4 + self.messages = messages
\ No newline at end of file diff --git a/g4f/Provider/FlowGpt.py b/g4f/Provider/FlowGpt.py index dc5c6d8d..7edd6f19 100644 --- a/g4f/Provider/FlowGpt.py +++ b/g4f/Provider/FlowGpt.py @@ -1,10 +1,13 @@ from __future__ import annotations import json +import time +import hashlib from aiohttp import ClientSession from ..typing import AsyncResult, Messages from .base_provider import AsyncGeneratorProvider, ProviderModelMixin +from .helper import get_random_hex, get_random_string from ..requests.raise_for_status import raise_for_status class FlowGpt(AsyncGeneratorProvider, ProviderModelMixin): @@ -17,9 +20,17 @@ class FlowGpt(AsyncGeneratorProvider, ProviderModelMixin): models = [ "gpt-3.5-turbo", "gpt-3.5-long", + "gpt-4-turbo", "google-gemini", + "claude-instant", + "claude-v1", "claude-v2", - "llama2-13b" + "llama2-13b", + "mythalion-13b", + "pygmalion-13b", + "chronos-hermes-13b", + "Mixtral-8x7B", + "Dolphin-2.6-8x7B" ] model_aliases = { "gemini": "google-gemini", @@ -36,6 +47,12 @@ class FlowGpt(AsyncGeneratorProvider, ProviderModelMixin): **kwargs ) -> AsyncResult: model = cls.get_model(model) + timestamp = str(int(time.time())) + auth = "Bearer null" + nonce = get_random_hex() + data = f"{timestamp}-{nonce}-{auth}" + signature = hashlib.md5(data.encode()).hexdigest() + headers = { "User-Agent": "Mozilla/5.0 (X11; Ubuntu; Linux x86_64; rv:122.0) Gecko/20100101 Firefox/122.0", "Accept": "*/*", @@ -49,7 +66,12 @@ class FlowGpt(AsyncGeneratorProvider, ProviderModelMixin): "Sec-Fetch-Dest": "empty", "Sec-Fetch-Mode": "cors", "Sec-Fetch-Site": "same-site", - "TE": "trailers" + "TE": "trailers", + "Authorization": auth, + "x-flow-device-id": f"f-{get_random_string(19)}", + "x-nonce": nonce, + "x-signature": signature, + "x-timestamp": timestamp } async with ClientSession(headers=headers) as session: history = [message for message in messages[:-1] if message["role"] != "system"] @@ -69,7 +91,7 @@ class FlowGpt(AsyncGeneratorProvider, ProviderModelMixin): "generateImage": False, "generateAudio": False } - async with session.post("https://backend-k8s.flowgpt.com/v2/chat-anonymous", json=data, proxy=proxy) as response: + async with session.post("https://backend-k8s.flowgpt.com/v2/chat-anonymous-encrypted", json=data, proxy=proxy) as response: await raise_for_status(response) async for chunk in response.content: if chunk.strip(): diff --git a/g4f/Provider/PerplexityLabs.py b/g4f/Provider/PerplexityLabs.py index ba956100..ab36d284 100644 --- a/g4f/Provider/PerplexityLabs.py +++ b/g4f/Provider/PerplexityLabs.py @@ -2,14 +2,13 @@ from __future__ import annotations import random import json -from aiohttp import ClientSession, BaseConnector from ..typing import AsyncResult, Messages +from ..requests import StreamSession, raise_for_status from .base_provider import AsyncGeneratorProvider, ProviderModelMixin -from .helper import get_connector -API_URL = "https://labs-api.perplexity.ai/socket.io/" -WS_URL = "wss://labs-api.perplexity.ai/socket.io/" +API_URL = "https://www.perplexity.ai/socket.io/" +WS_URL = "wss://www.perplexity.ai/socket.io/" class PerplexityLabs(AsyncGeneratorProvider, ProviderModelMixin): url = "https://labs.perplexity.ai" @@ -35,7 +34,6 @@ class PerplexityLabs(AsyncGeneratorProvider, ProviderModelMixin): model: str, messages: Messages, proxy: str = None, - connector: BaseConnector = None, **kwargs ) -> AsyncResult: headers = { @@ -51,21 +49,22 @@ class PerplexityLabs(AsyncGeneratorProvider, ProviderModelMixin): "Sec-Fetch-Site": "same-site", "TE": "trailers", } - async with ClientSession(headers=headers, connector=get_connector(connector, proxy)) as session: + async with StreamSession(headers=headers, proxies={"all": proxy}) as session: t = format(random.getrandbits(32), "08x") async with session.get( f"{API_URL}?EIO=4&transport=polling&t={t}" ) as response: + await raise_for_status(response) text = await response.text() - + assert text.startswith("0") sid = json.loads(text[1:])["sid"] post_data = '40{"jwt":"anonymous-ask-user"}' async with session.post( f"{API_URL}?EIO=4&transport=polling&t={t}&sid={sid}", data=post_data ) as response: - assert await response.text() == "OK" - + await raise_for_status(response) + assert await response.text() == "OK" async with session.ws_connect(f"{WS_URL}?EIO=4&transport=websocket&sid={sid}", autoping=False) as ws: await ws.send_str("2probe") assert(await ws.receive_str() == "3probe") diff --git a/g4f/Provider/bing/conversation.py b/g4f/Provider/bing/conversation.py index ef48cd91..85292079 100644 --- a/g4f/Provider/bing/conversation.py +++ b/g4f/Provider/bing/conversation.py @@ -34,9 +34,9 @@ async def create_conversation(session: ClientSession, headers: dict, tone: str) Conversation: An instance representing the created conversation. """ if tone == "Copilot": - url = "https://copilot.microsoft.com/turing/conversation/create?bundleVersion=1.1686.0" + url = "https://copilot.microsoft.com/turing/conversation/create?bundleVersion=1.1690.0" else: - url = "https://www.bing.com/turing/conversation/create?bundleVersion=1.1686.0" + url = "https://www.bing.com/turing/conversation/create?bundleVersion=1.1690.0" async with session.get(url, headers=headers) as response: if response.status == 404: raise RateLimitError("Response 404: Do less requests and reuse conversations") diff --git a/g4f/Provider/needs_auth/Gemini.py b/g4f/Provider/needs_auth/Gemini.py index fc9d9575..ebf5f413 100644 --- a/g4f/Provider/needs_auth/Gemini.py +++ b/g4f/Provider/needs_auth/Gemini.py @@ -60,6 +60,7 @@ class Gemini(AsyncGeneratorProvider): model: str, messages: Messages, proxy: str = None, + api_key: str = None, cookies: Cookies = None, connector: BaseConnector = None, image: ImageType = None, @@ -67,6 +68,10 @@ class Gemini(AsyncGeneratorProvider): **kwargs ) -> AsyncResult: prompt = format_prompt(messages) + if api_key is not None: + if cookies is None: + cookies = {} + cookies["__Secure-1PSID"] = api_key cookies = cookies if cookies else get_cookies(".google.com", False, True) base_connector = get_connector(connector, proxy) async with ClientSession( |