diff options
author | kqlio67 <kqlio67@users.noreply.github.com> | 2024-11-06 17:39:14 +0100 |
---|---|---|
committer | kqlio67 <kqlio67@users.noreply.github.com> | 2024-11-06 17:39:14 +0100 |
commit | 6dd378d2aca256f45ff7b2fd23c59497aad82045 (patch) | |
tree | c07a20b0e2e3c1643065cef96910ae8acc84a5eb /g4f/Provider/needs_auth | |
parent | Update (g4f/Provider/not_working/) (diff) | |
download | gpt4free-6dd378d2aca256f45ff7b2fd23c59497aad82045.tar gpt4free-6dd378d2aca256f45ff7b2fd23c59497aad82045.tar.gz gpt4free-6dd378d2aca256f45ff7b2fd23c59497aad82045.tar.bz2 gpt4free-6dd378d2aca256f45ff7b2fd23c59497aad82045.tar.lz gpt4free-6dd378d2aca256f45ff7b2fd23c59497aad82045.tar.xz gpt4free-6dd378d2aca256f45ff7b2fd23c59497aad82045.tar.zst gpt4free-6dd378d2aca256f45ff7b2fd23c59497aad82045.zip |
Diffstat (limited to 'g4f/Provider/needs_auth')
-rw-r--r-- | g4f/Provider/needs_auth/BingCreateImages.py | 54 | ||||
-rw-r--r-- | g4f/Provider/needs_auth/GeminiPro.py | 107 | ||||
-rw-r--r-- | g4f/Provider/needs_auth/OpenaiChat.py | 1 | ||||
-rw-r--r-- | g4f/Provider/needs_auth/WhiteRabbitNeo.py | 57 | ||||
-rw-r--r-- | g4f/Provider/needs_auth/__init__.py | 32 |
5 files changed, 237 insertions, 14 deletions
diff --git a/g4f/Provider/needs_auth/BingCreateImages.py b/g4f/Provider/needs_auth/BingCreateImages.py new file mode 100644 index 00000000..80984d40 --- /dev/null +++ b/g4f/Provider/needs_auth/BingCreateImages.py @@ -0,0 +1,54 @@ +from __future__ import annotations + +from ...cookies import get_cookies +from ...image import ImageResponse +from ...errors import MissingAuthError +from ...typing import AsyncResult, Messages, Cookies +from ..base_provider import AsyncGeneratorProvider, ProviderModelMixin +from ..bing.create_images import create_images, create_session + +class BingCreateImages(AsyncGeneratorProvider, ProviderModelMixin): + label = "Microsoft Designer in Bing" + parent = "Bing" + url = "https://www.bing.com/images/create" + working = True + needs_auth = True + image_models = ["dall-e"] + + def __init__(self, cookies: Cookies = None, proxy: str = None, api_key: str = None) -> None: + if api_key is not None: + if cookies is None: + cookies = {} + cookies["_U"] = api_key + self.cookies = cookies + self.proxy = proxy + + @classmethod + async def create_async_generator( + cls, + model: str, + messages: Messages, + api_key: str = None, + cookies: Cookies = None, + proxy: str = None, + **kwargs + ) -> AsyncResult: + session = BingCreateImages(cookies, proxy, api_key) + yield await session.generate(messages[-1]["content"]) + + async def generate(self, prompt: str) -> ImageResponse: + """ + Asynchronously creates a markdown formatted string with images based on the prompt. + + Args: + prompt (str): Prompt to generate images. + + Returns: + str: Markdown formatted string with images. + """ + cookies = self.cookies or get_cookies(".bing.com", False) + if cookies is None or "_U" not in cookies: + raise MissingAuthError('Missing "_U" cookie') + async with create_session(cookies, self.proxy) as session: + images = await create_images(session, prompt) + return ImageResponse(images, prompt, {"preview": "{image}?w=200&h=200"} if len(images) > 1 else {})
\ No newline at end of file diff --git a/g4f/Provider/needs_auth/GeminiPro.py b/g4f/Provider/needs_auth/GeminiPro.py new file mode 100644 index 00000000..5c170ae5 --- /dev/null +++ b/g4f/Provider/needs_auth/GeminiPro.py @@ -0,0 +1,107 @@ +from __future__ import annotations + +import base64 +import json +from aiohttp import ClientSession, BaseConnector + +from ...typing import AsyncResult, Messages, ImageType +from ..base_provider import AsyncGeneratorProvider, ProviderModelMixin +from ...image import to_bytes, is_accepted_format +from ...errors import MissingAuthError +from ..helper import get_connector + +class GeminiPro(AsyncGeneratorProvider, ProviderModelMixin): + label = "Gemini API" + url = "https://ai.google.dev" + working = True + supports_message_history = True + needs_auth = True + default_model = "gemini-1.5-pro-latest" + default_vision_model = default_model + models = [default_model, "gemini-pro", "gemini-pro-vision", "gemini-1.5-flash"] + + @classmethod + async def create_async_generator( + cls, + model: str, + messages: Messages, + stream: bool = False, + proxy: str = None, + api_key: str = None, + api_base: str = "https://generativelanguage.googleapis.com/v1beta", + use_auth_header: bool = False, + image: ImageType = None, + connector: BaseConnector = None, + **kwargs + ) -> AsyncResult: + model = cls.get_model(model) + + if not api_key: + raise MissingAuthError('Add a "api_key"') + + headers = params = None + if use_auth_header: + headers = {"Authorization": f"Bearer {api_key}"} + else: + params = {"key": api_key} + + method = "streamGenerateContent" if stream else "generateContent" + url = f"{api_base.rstrip('/')}/models/{model}:{method}" + async with ClientSession(headers=headers, connector=get_connector(connector, proxy)) as session: + contents = [ + { + "role": "model" if message["role"] == "assistant" else "user", + "parts": [{"text": message["content"]}] + } + for message in messages + if message["role"] != "system" + ] + if image is not None: + image = to_bytes(image) + contents[-1]["parts"].append({ + "inline_data": { + "mime_type": is_accepted_format(image), + "data": base64.b64encode(image).decode() + } + }) + data = { + "contents": contents, + "generationConfig": { + "stopSequences": kwargs.get("stop"), + "temperature": kwargs.get("temperature"), + "maxOutputTokens": kwargs.get("max_tokens"), + "topP": kwargs.get("top_p"), + "topK": kwargs.get("top_k"), + } + } + system_prompt = "\n".join( + message["content"] + for message in messages + if message["role"] == "system" + ) + if system_prompt: + data["system_instruction"] = {"parts": {"text": system_prompt}} + async with session.post(url, params=params, json=data) as response: + if not response.ok: + data = await response.json() + data = data[0] if isinstance(data, list) else data + raise RuntimeError(f"Response {response.status}: {data['error']['message']}") + if stream: + lines = [] + async for chunk in response.content: + if chunk == b"[{\n": + lines = [b"{\n"] + elif chunk == b",\r\n" or chunk == b"]": + try: + data = b"".join(lines) + data = json.loads(data) + yield data["candidates"][0]["content"]["parts"][0]["text"] + except: + data = data.decode(errors="ignore") if isinstance(data, bytes) else data + raise RuntimeError(f"Read chunk failed: {data}") + lines = [] + else: + lines.append(chunk) + else: + data = await response.json() + yield data["candidates"][0]["content"]["parts"][0]["text"] diff --git a/g4f/Provider/needs_auth/OpenaiChat.py b/g4f/Provider/needs_auth/OpenaiChat.py index f02121e3..3a0d6b29 100644 --- a/g4f/Provider/needs_auth/OpenaiChat.py +++ b/g4f/Provider/needs_auth/OpenaiChat.py @@ -55,6 +55,7 @@ class OpenaiChat(AsyncGeneratorProvider, ProviderModelMixin): label = "OpenAI ChatGPT" url = "https://chatgpt.com" working = True + needs_auth = True supports_gpt_4 = True supports_message_history = True supports_system_message = True diff --git a/g4f/Provider/needs_auth/WhiteRabbitNeo.py b/g4f/Provider/needs_auth/WhiteRabbitNeo.py new file mode 100644 index 00000000..82275c1c --- /dev/null +++ b/g4f/Provider/needs_auth/WhiteRabbitNeo.py @@ -0,0 +1,57 @@ +from __future__ import annotations + +from aiohttp import ClientSession, BaseConnector + +from ...typing import AsyncResult, Messages, Cookies +from ...requests.raise_for_status import raise_for_status +from ..base_provider import AsyncGeneratorProvider +from ..helper import get_cookies, get_connector, get_random_string + +class WhiteRabbitNeo(AsyncGeneratorProvider): + url = "https://www.whiterabbitneo.com" + working = True + supports_message_history = True + needs_auth = True + + @classmethod + async def create_async_generator( + cls, + model: str, + messages: Messages, + cookies: Cookies = None, + connector: BaseConnector = None, + proxy: str = None, + **kwargs + ) -> AsyncResult: + if cookies is None: + cookies = get_cookies("www.whiterabbitneo.com") + headers = { + "User-Agent": "Mozilla/5.0 (X11; Ubuntu; Linux x86_64; rv:123.0) Gecko/20100101 Firefox/123.0", + "Accept": "*/*", + "Accept-Language": "de,en-US;q=0.7,en;q=0.3", + "Accept-Encoding": "gzip, deflate, br", + "Referer": f"{cls.url}/", + "Content-Type": "text/plain;charset=UTF-8", + "Origin": cls.url, + "Connection": "keep-alive", + "Sec-Fetch-Dest": "empty", + "Sec-Fetch-Mode": "cors", + "Sec-Fetch-Site": "same-origin", + "TE": "trailers" + } + async with ClientSession( + headers=headers, + cookies=cookies, + connector=get_connector(connector, proxy) + ) as session: + data = { + "messages": messages, + "id": get_random_string(6), + "enhancePrompt": False, + "useFunctions": False + } + async with session.post(f"{cls.url}/api/chat", json=data, proxy=proxy) as response: + await raise_for_status(response) + async for chunk in response.content.iter_any(): + if chunk: + yield chunk.decode(errors="ignore") diff --git a/g4f/Provider/needs_auth/__init__.py b/g4f/Provider/needs_auth/__init__.py index 0626a837..e979f86d 100644 --- a/g4f/Provider/needs_auth/__init__.py +++ b/g4f/Provider/needs_auth/__init__.py @@ -1,18 +1,22 @@ from .gigachat import * -from .DeepInfra import DeepInfra -from .DeepInfraImage import DeepInfraImage -from .Gemini import Gemini -from .Raycast import Raycast -from .Theb import Theb -from .ThebApi import ThebApi -from .OpenaiChat import OpenaiChat -from .Poe import Poe -from .Openai import Openai -from .Groq import Groq -#from .OpenaiAccount import OpenaiAccount -from .PerplexityApi import PerplexityApi -from .Replicate import Replicate -from .MetaAI import MetaAI #from .MetaAIAccount import MetaAIAccount +#from .OpenaiAccount import OpenaiAccount + +from .BingCreateImages import BingCreateImages +from .DeepInfra import DeepInfra +from .DeepInfraImage import DeepInfraImage +from .Gemini import Gemini +from .GeminiPro import GeminiPro +from .Groq import Groq from .HuggingFace import HuggingFace +from .MetaAI import MetaAI +from .Openai import Openai +from .OpenaiChat import OpenaiChat +from .PerplexityApi import PerplexityApi +from .Poe import Poe +from .Raycast import Raycast +from .Replicate import Replicate +from .Theb import Theb +from .ThebApi import ThebApi +from .WhiteRabbitNeo import WhiteRabbitNeo |