diff options
Diffstat (limited to 'g4f')
-rw-r--r-- | g4f/Provider/DuckDuckGo.py | 85 | ||||
-rw-r--r-- | g4f/Provider/__init__.py | 1 | ||||
-rw-r--r-- | g4f/models.py | 4 |
3 files changed, 1 insertions, 89 deletions
diff --git a/g4f/Provider/DuckDuckGo.py b/g4f/Provider/DuckDuckGo.py deleted file mode 100644 index 9379660b..00000000 --- a/g4f/Provider/DuckDuckGo.py +++ /dev/null @@ -1,85 +0,0 @@ -from __future__ import annotations - -import json -import aiohttp - -from .base_provider import AsyncGeneratorProvider, ProviderModelMixin -from .helper import get_connector -from ..typing import AsyncResult, Messages -from ..requests.raise_for_status import raise_for_status -from ..providers.conversation import BaseConversation - -class DuckDuckGo(AsyncGeneratorProvider, ProviderModelMixin): - url = "https://duckduckgo.com/duckchat" - working = True - supports_gpt_35_turbo = True - supports_message_history = True - - default_model = "gpt-3.5-turbo-0125" - models = ["gpt-3.5-turbo-0125", "claude-3-haiku-20240307"] - model_aliases = { - "gpt-3.5-turbo": "gpt-3.5-turbo-0125", - "claude-3-haiku": "claude-3-haiku-20240307" - } - - status_url = "https://duckduckgo.com/duckchat/v1/status" - chat_url = "https://duckduckgo.com/duckchat/v1/chat" - user_agent = 'Mozilla/5.0 (X11; Ubuntu; Linux x86_64; rv:123.0) Gecko/20100101 Firefox/123.0' - headers = { - 'User-Agent': user_agent, - 'Accept': 'text/event-stream', - 'Accept-Language': 'de,en-US;q=0.7,en;q=0.3', - 'Accept-Encoding': 'gzip, deflate, br', - 'Referer': 'https://duckduckgo.com/', - 'Content-Type': 'application/json', - 'Origin': 'https://duckduckgo.com', - 'Connection': 'keep-alive', - 'Cookie': 'dcm=1', - 'Sec-Fetch-Dest': 'empty', - 'Sec-Fetch-Mode': 'cors', - 'Sec-Fetch-Site': 'same-origin', - 'Pragma': 'no-cache', - 'TE': 'trailers' - } - - @classmethod - async def create_async_generator( - cls, - model: str, - messages: Messages, - proxy: str = None, - connector: aiohttp.BaseConnector = None, - conversation: Conversation = None, - return_conversation: bool = False, - **kwargs - ) -> AsyncResult: - async with aiohttp.ClientSession(headers=cls.headers, connector=get_connector(connector, proxy)) as session: - if conversation is not None and len(messages) > 1: - vqd_4 = conversation.vqd_4 - messages = [*conversation.messages, messages[-2], messages[-1]] - else: - async with session.get(cls.status_url, headers={"x-vqd-accept": "1"}) as response: - await raise_for_status(response) - vqd_4 = response.headers.get("x-vqd-4") - messages = [messages[-1]] - payload = { - 'model': cls.get_model(model), - 'messages': messages - } - async with session.post(cls.chat_url, json=payload, headers={"x-vqd-4": vqd_4}) as response: - await raise_for_status(response) - if return_conversation: - yield Conversation(response.headers.get("x-vqd-4"), messages) - async for line in response.content: - if line.startswith(b"data: "): - chunk = line[6:] - if chunk.startswith(b"[DONE]"): - break - data = json.loads(chunk) - if "message" in data and data["message"]: - yield data["message"] - -class Conversation(BaseConversation): - def __init__(self, vqd_4: str, messages: Messages) -> None: - self.vqd_4 = vqd_4 - self.messages = messages
\ No newline at end of file diff --git a/g4f/Provider/__init__.py b/g4f/Provider/__init__.py index 3aeabaaf..1c387087 100644 --- a/g4f/Provider/__init__.py +++ b/g4f/Provider/__init__.py @@ -25,7 +25,6 @@ from .Cnote import Cnote from .Cohere import Cohere from .DeepInfra import DeepInfra from .DeepInfraImage import DeepInfraImage -from .DuckDuckGo import DuckDuckGo from .Feedough import Feedough from .FlowGpt import FlowGpt from .FreeChatgpt import FreeChatgpt diff --git a/g4f/models.py b/g4f/models.py index d031797e..78e1d74e 100644 --- a/g4f/models.py +++ b/g4f/models.py @@ -11,7 +11,6 @@ from .Provider import ( ChatgptNext, Cnote, DeepInfra, - DuckDuckGo, Feedough, FreeGpt, Gemini, @@ -71,7 +70,6 @@ gpt_35_long = Model( ChatgptNext, OpenaiChat, Koala, - DuckDuckGo, ]) ) @@ -196,7 +194,7 @@ claude_3_sonnet = Model( claude_3_haiku = Model( name = 'claude-3-haiku', base_provider = 'anthropic', - best_provider = DuckDuckGo + best_provider = None ) gpt_35_turbo_16k = Model( |