summaryrefslogtreecommitdiffstats
path: root/g4f
diff options
context:
space:
mode:
authorabc <98614666+xtekky@users.noreply.github.com>2024-05-31 17:17:34 +0200
committerabc <98614666+xtekky@users.noreply.github.com>2024-05-31 17:17:34 +0200
commitdd409d3f1ae3300146d43c7d2e870915cb79c9ef (patch)
tree7944b367ac8406a65e38d49dd923f242daed4b52 /g4f
parentUpdate README.md (diff)
downloadgpt4free-dd409d3f1ae3300146d43c7d2e870915cb79c9ef.tar
gpt4free-dd409d3f1ae3300146d43c7d2e870915cb79c9ef.tar.gz
gpt4free-dd409d3f1ae3300146d43c7d2e870915cb79c9ef.tar.bz2
gpt4free-dd409d3f1ae3300146d43c7d2e870915cb79c9ef.tar.lz
gpt4free-dd409d3f1ae3300146d43c7d2e870915cb79c9ef.tar.xz
gpt4free-dd409d3f1ae3300146d43c7d2e870915cb79c9ef.tar.zst
gpt4free-dd409d3f1ae3300146d43c7d2e870915cb79c9ef.zip
Diffstat (limited to 'g4f')
-rw-r--r--g4f/Provider/Ecosia.py47
-rw-r--r--g4f/Provider/__init__.py1
-rw-r--r--g4f/models.py3
3 files changed, 0 insertions, 51 deletions
diff --git a/g4f/Provider/Ecosia.py b/g4f/Provider/Ecosia.py
deleted file mode 100644
index 231412aa..00000000
--- a/g4f/Provider/Ecosia.py
+++ /dev/null
@@ -1,47 +0,0 @@
-
-from __future__ import annotations
-
-import base64
-import json
-from aiohttp import ClientSession, BaseConnector
-
-from ..typing import AsyncResult, Messages
-from ..requests.raise_for_status import raise_for_status
-from .base_provider import AsyncGeneratorProvider, ProviderModelMixin
-from .helper import get_connector
-
-class Ecosia(AsyncGeneratorProvider, ProviderModelMixin):
- url = "https://www.ecosia.org"
- working = True
- supports_gpt_35_turbo = True
- default_model = "gpt-3.5-turbo-0125"
- models = [default_model, "green"]
- model_aliases = {"gpt-3.5-turbo": default_model}
-
- @classmethod
- async def create_async_generator(
- cls,
- model: str,
- messages: Messages,
- connector: BaseConnector = None,
- proxy: str = None,
- **kwargs
- ) -> AsyncResult:
- model = cls.get_model(model)
- headers = {
- "authority": "api.ecosia.org",
- "accept": "*/*",
- "origin": cls.url,
- "referer": f"{cls.url}/",
- "user-agent": "Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/89.0.4389.114 Safari/537.36",
- }
- async with ClientSession(headers=headers, connector=get_connector(connector, proxy)) as session:
- data = {
- "messages": base64.b64encode(json.dumps(messages).encode()).decode()
- }
- api_url = f"https://api.ecosia.org/v2/chat/?sp={'eco' if model == 'green' else 'productivity'}"
- async with session.post(api_url, json=data) as response:
- await raise_for_status(response)
- async for chunk in response.content.iter_any():
- if chunk:
- yield chunk.decode(errors="ignore") \ No newline at end of file
diff --git a/g4f/Provider/__init__.py b/g4f/Provider/__init__.py
index e60e1310..3aeabaaf 100644
--- a/g4f/Provider/__init__.py
+++ b/g4f/Provider/__init__.py
@@ -26,7 +26,6 @@ from .Cohere import Cohere
from .DeepInfra import DeepInfra
from .DeepInfraImage import DeepInfraImage
from .DuckDuckGo import DuckDuckGo
-from .Ecosia import Ecosia
from .Feedough import Feedough
from .FlowGpt import FlowGpt
from .FreeChatgpt import FreeChatgpt
diff --git a/g4f/models.py b/g4f/models.py
index 40de22ba..d031797e 100644
--- a/g4f/models.py
+++ b/g4f/models.py
@@ -12,7 +12,6 @@ from .Provider import (
Cnote,
DeepInfra,
DuckDuckGo,
- Ecosia,
Feedough,
FreeGpt,
Gemini,
@@ -59,7 +58,6 @@ default = Model(
ChatgptAi,
You,
OpenaiChat,
- Ecosia,
])
)
@@ -73,7 +71,6 @@ gpt_35_long = Model(
ChatgptNext,
OpenaiChat,
Koala,
- Ecosia,
DuckDuckGo,
])
)