summaryrefslogtreecommitdiffstats
path: root/g4f/Provider/Ecosia.py
diff options
context:
space:
mode:
authorHeiner Lohaus <hlohaus@users.noreply.github.com>2024-04-20 10:43:53 +0200
committerHeiner Lohaus <hlohaus@users.noreply.github.com>2024-04-20 10:43:53 +0200
commit932d3a3ca092756cc3025e427fabf9ab674350fc (patch)
treec26601230c54c3c1a932fb9a249cd49751193237 /g4f/Provider/Ecosia.py
parentMerge pull request #1862 from hlohaus/nem (diff)
downloadgpt4free-932d3a3ca092756cc3025e427fabf9ab674350fc.tar
gpt4free-932d3a3ca092756cc3025e427fabf9ab674350fc.tar.gz
gpt4free-932d3a3ca092756cc3025e427fabf9ab674350fc.tar.bz2
gpt4free-932d3a3ca092756cc3025e427fabf9ab674350fc.tar.lz
gpt4free-932d3a3ca092756cc3025e427fabf9ab674350fc.tar.xz
gpt4free-932d3a3ca092756cc3025e427fabf9ab674350fc.tar.zst
gpt4free-932d3a3ca092756cc3025e427fabf9ab674350fc.zip
Diffstat (limited to 'g4f/Provider/Ecosia.py')
-rw-r--r--g4f/Provider/Ecosia.py8
1 files changed, 4 insertions, 4 deletions
diff --git a/g4f/Provider/Ecosia.py b/g4f/Provider/Ecosia.py
index 1cae3560..231412aa 100644
--- a/g4f/Provider/Ecosia.py
+++ b/g4f/Provider/Ecosia.py
@@ -15,7 +15,8 @@ class Ecosia(AsyncGeneratorProvider, ProviderModelMixin):
working = True
supports_gpt_35_turbo = True
default_model = "gpt-3.5-turbo-0125"
- model_aliases = {"gpt-3.5-turbo": "gpt-3.5-turbo-0125"}
+ models = [default_model, "green"]
+ model_aliases = {"gpt-3.5-turbo": default_model}
@classmethod
async def create_async_generator(
@@ -23,11 +24,10 @@ class Ecosia(AsyncGeneratorProvider, ProviderModelMixin):
model: str,
messages: Messages,
connector: BaseConnector = None,
- green: bool = False,
proxy: str = None,
**kwargs
) -> AsyncResult:
- cls.get_model(model)
+ model = cls.get_model(model)
headers = {
"authority": "api.ecosia.org",
"accept": "*/*",
@@ -39,7 +39,7 @@ class Ecosia(AsyncGeneratorProvider, ProviderModelMixin):
data = {
"messages": base64.b64encode(json.dumps(messages).encode()).decode()
}
- api_url = f"https://api.ecosia.org/v2/chat/?sp={'eco' if green else 'productivity'}"
+ api_url = f"https://api.ecosia.org/v2/chat/?sp={'eco' if model == 'green' else 'productivity'}"
async with session.post(api_url, json=data) as response:
await raise_for_status(response)
async for chunk in response.content.iter_any():