diff options
author | H Lohaus <hlohaus@users.noreply.github.com> | 2024-11-20 09:52:38 +0100 |
---|---|---|
committer | GitHub <noreply@github.com> | 2024-11-20 09:52:38 +0100 |
commit | dba41cda5647dc912e581d4bf81c09bb25257aab (patch) | |
tree | dd81fba3f02f268db8129c6105d952550560436b /g4f/Provider/PollinationsAI.py | |
parent | New RobocodersAPI provider with providers enhancement and client updates (#2388) (diff) | |
download | gpt4free-0.3.5.2.tar gpt4free-0.3.5.2.tar.gz gpt4free-0.3.5.2.tar.bz2 gpt4free-0.3.5.2.tar.lz gpt4free-0.3.5.2.tar.xz gpt4free-0.3.5.2.tar.zst gpt4free-0.3.5.2.zip |
Diffstat (limited to 'g4f/Provider/PollinationsAI.py')
-rw-r--r-- | g4f/Provider/PollinationsAI.py | 69 |
1 files changed, 69 insertions, 0 deletions
diff --git a/g4f/Provider/PollinationsAI.py b/g4f/Provider/PollinationsAI.py new file mode 100644 index 00000000..57597bf1 --- /dev/null +++ b/g4f/Provider/PollinationsAI.py @@ -0,0 +1,69 @@ +from __future__ import annotations + +from urllib.parse import quote +import random +import requests +from sys import maxsize +from aiohttp import ClientSession + +from ..typing import AsyncResult, Messages +from ..image import ImageResponse +from ..requests.raise_for_status import raise_for_status +from ..requests.aiohttp import get_connector +from .needs_auth.OpenaiAPI import OpenaiAPI +from .helper import format_prompt + +class PollinationsAI(OpenaiAPI): + label = "Pollinations.AI" + url = "https://pollinations.ai" + working = True + supports_stream = True + default_model = "openai" + + @classmethod + def get_models(cls): + if not cls.image_models: + url = "https://image.pollinations.ai/models" + response = requests.get(url) + raise_for_status(response) + cls.image_models = response.json() + if not cls.models: + url = "https://text.pollinations.ai/models" + response = requests.get(url) + raise_for_status(response) + cls.models = [model.get("name") for model in response.json()] + cls.models.extend(cls.image_models) + return cls.models + + @classmethod + async def create_async_generator( + cls, + model: str, + messages: Messages, + api_base: str = "https://text.pollinations.ai/openai", + api_key: str = None, + proxy: str = None, + seed: str = None, + **kwargs + ) -> AsyncResult: + if model: + model = cls.get_model(model) + if model in cls.image_models: + prompt = messages[-1]["content"] + if seed is None: + seed = random.randint(0, maxsize) + image = f"https://image.pollinations.ai/prompt/{quote(prompt)}?width=1024&height=1024&seed={int(seed)}&nofeed=true&nologo=true&model={quote(model)}" + yield ImageResponse(image, prompt) + return + if api_key is None: + async with ClientSession(connector=get_connector(proxy=proxy)) as session: + prompt = format_prompt(messages) + async with session.get(f"https://text.pollinations.ai/{quote(prompt)}?model={quote(model)}") as response: + await raise_for_status(response) + async for line in response.content.iter_any(): + yield line.decode(errors="ignore") + else: + async for chunk in super().create_async_generator( + model, messages, api_base=api_base, proxy=proxy, **kwargs + ): + yield chunk
\ No newline at end of file |