diff options
author | H Lohaus <hlohaus@users.noreply.github.com> | 2024-02-09 14:24:15 +0100 |
---|---|---|
committer | GitHub <noreply@github.com> | 2024-02-09 14:24:15 +0100 |
commit | 5c75972c50ac936cb6dc8e01cef9cdb08daa8ed7 (patch) | |
tree | 739dc1c020fcaff53d3f80630036b9aa2ac94f54 /g4f | |
parent | Merge pull request #1565 from hlohaus/gemini (diff) | |
download | gpt4free-5c75972c50ac936cb6dc8e01cef9cdb08daa8ed7.tar gpt4free-5c75972c50ac936cb6dc8e01cef9cdb08daa8ed7.tar.gz gpt4free-5c75972c50ac936cb6dc8e01cef9cdb08daa8ed7.tar.bz2 gpt4free-5c75972c50ac936cb6dc8e01cef9cdb08daa8ed7.tar.lz gpt4free-5c75972c50ac936cb6dc8e01cef9cdb08daa8ed7.tar.xz gpt4free-5c75972c50ac936cb6dc8e01cef9cdb08daa8ed7.tar.zst gpt4free-5c75972c50ac936cb6dc8e01cef9cdb08daa8ed7.zip |
Diffstat (limited to '')
-rw-r--r-- | g4f/Provider/CreateImagesBing.py | 94 | ||||
-rw-r--r-- | g4f/Provider/GptChatly.py | 1 | ||||
-rw-r--r-- | g4f/Provider/Hashnode.py | 2 | ||||
-rw-r--r-- | g4f/Provider/Phind.py | 2 | ||||
-rw-r--r-- | g4f/Provider/__init__.py | 2 | ||||
-rw-r--r-- | g4f/Provider/bing/create_images.py | 90 | ||||
-rw-r--r-- | g4f/Provider/helper.py | 99 | ||||
-rw-r--r-- | g4f/Provider/needs_auth/ThebApi.py | 2 | ||||
-rw-r--r-- | g4f/__init__.py | 2 | ||||
-rw-r--r-- | g4f/cookies.py | 97 | ||||
-rw-r--r-- | g4f/errors.py | 3 | ||||
-rw-r--r-- | g4f/models.py | 18 | ||||
-rw-r--r-- | g4f/webdriver.py | 3 |
13 files changed, 215 insertions, 200 deletions
diff --git a/g4f/Provider/CreateImagesBing.py b/g4f/Provider/CreateImagesBing.py new file mode 100644 index 00000000..4d045188 --- /dev/null +++ b/g4f/Provider/CreateImagesBing.py @@ -0,0 +1,94 @@ +from __future__ import annotations + +import asyncio +import time +import os +from typing import Generator + +from ..cookies import get_cookies +from ..webdriver import WebDriver, get_driver_cookies, get_browser +from ..image import ImageResponse +from ..errors import MissingRequirementsError, MissingAuthError +from .bing.create_images import BING_URL, create_images, create_session + +BING_URL = "https://www.bing.com" +TIMEOUT_LOGIN = 1200 + +def wait_for_login(driver: WebDriver, timeout: int = TIMEOUT_LOGIN) -> None: + """ + Waits for the user to log in within a given timeout period. + + Args: + driver (WebDriver): Webdriver for browser automation. + timeout (int): Maximum waiting time in seconds. + + Raises: + RuntimeError: If the login process exceeds the timeout. + """ + driver.get(f"{BING_URL}/") + start_time = time.time() + while not driver.get_cookie("_U"): + if time.time() - start_time > timeout: + raise RuntimeError("Timeout error") + time.sleep(0.5) + +def get_cookies_from_browser(proxy: str = None) -> dict[str, str]: + """ + Retrieves cookies from the browser using webdriver. + + Args: + proxy (str, optional): Proxy configuration. + + Returns: + dict[str, str]: Retrieved cookies. + """ + with get_browser(proxy=proxy) as driver: + wait_for_login(driver) + time.sleep(1) + return get_driver_cookies(driver) + +class CreateImagesBing: + """A class for creating images using Bing.""" + + def __init__(self, cookies: dict[str, str] = {}, proxy: str = None) -> None: + self.cookies = cookies + self.proxy = proxy + + def create_completion(self, prompt: str) -> Generator[ImageResponse, None, None]: + """ + Generator for creating imagecompletion based on a prompt. + + Args: + prompt (str): Prompt to generate images. + + Yields: + Generator[str, None, None]: The final output as markdown formatted string with images. + """ + cookies = self.cookies or get_cookies(".bing.com", False) + if "_U" not in cookies: + login_url = os.environ.get("G4F_LOGIN_URL") + if login_url: + yield f"Please login: [Bing]({login_url})\n\n" + try: + self.cookies = get_cookies_from_browser(self.proxy) + except MissingRequirementsError as e: + raise MissingAuthError(f'Missing "_U" cookie. {e}') + yield asyncio.run(self.create_async(prompt)) + + async def create_async(self, prompt: str) -> ImageResponse: + """ + Asynchronously creates a markdown formatted string with images based on the prompt. + + Args: + prompt (str): Prompt to generate images. + + Returns: + str: Markdown formatted string with images. + """ + cookies = self.cookies or get_cookies(".bing.com", False) + if "_U" not in cookies: + raise MissingAuthError('Missing "_U" cookie') + proxy = os.environ.get("G4F_PROXY") + async with create_session(cookies, proxy) as session: + images = await create_images(session, prompt, self.proxy) + return ImageResponse(images, prompt, {"preview": "{image}?w=200&h=200"})
\ No newline at end of file diff --git a/g4f/Provider/GptChatly.py b/g4f/Provider/GptChatly.py index d98c2af4..9fb739a8 100644 --- a/g4f/Provider/GptChatly.py +++ b/g4f/Provider/GptChatly.py @@ -10,7 +10,6 @@ class GptChatly(AsyncProvider): working = True supports_message_history = True supports_gpt_35_turbo = True - supports_gpt_4 = True @classmethod async def create_async( diff --git a/g4f/Provider/Hashnode.py b/g4f/Provider/Hashnode.py index a287fa7c..7a0c2903 100644 --- a/g4f/Provider/Hashnode.py +++ b/g4f/Provider/Hashnode.py @@ -13,7 +13,7 @@ class SearchTypes(): class Hashnode(AsyncGeneratorProvider): url = "https://hashnode.com" - working = True + working = False supports_message_history = True supports_gpt_35_turbo = True _sources = [] diff --git a/g4f/Provider/Phind.py b/g4f/Provider/Phind.py index a7fdbeca..746dcbcc 100644 --- a/g4f/Provider/Phind.py +++ b/g4f/Provider/Phind.py @@ -67,7 +67,7 @@ class Phind(AsyncGeneratorProvider): if chunk.startswith(b'<PHIND_DONE/>'): break if chunk.startswith(b'<PHIND_BACKEND_ERROR>'): - raise RuntimeError(f"Response: {chunk}") + raise RuntimeError(f"Response: {chunk.decode()}") if chunk.startswith(b'<PHIND_WEBRESULTS>') or chunk.startswith(b'<PHIND_FOLLOWUP>'): pass elif chunk.startswith(b"<PHIND_METADATA>") or chunk.startswith(b"<PHIND_INDICATOR>"): diff --git a/g4f/Provider/__init__.py b/g4f/Provider/__init__.py index 1b45b00d..7dbc1504 100644 --- a/g4f/Provider/__init__.py +++ b/g4f/Provider/__init__.py @@ -53,6 +53,8 @@ from .Vercel import Vercel from .Ylokh import Ylokh from .You import You +from .CreateImagesBing import CreateImagesBing + import sys __modules__: list = [ diff --git a/g4f/Provider/bing/create_images.py b/g4f/Provider/bing/create_images.py index b31e9408..de4fd476 100644 --- a/g4f/Provider/bing/create_images.py +++ b/g4f/Provider/bing/create_images.py @@ -7,10 +7,9 @@ from __future__ import annotations import asyncio import time import json -import os from aiohttp import ClientSession, BaseConnector from urllib.parse import quote -from typing import Generator, List, Dict +from typing import List, Dict try: from bs4 import BeautifulSoup @@ -19,14 +18,11 @@ except ImportError: has_requirements = False from ..create_images import CreateImagesProvider -from ..helper import get_cookies, get_connector -from ...webdriver import WebDriver, get_driver_cookies, get_browser +from ..helper import get_connector from ...base_provider import ProviderType -from ...image import ImageResponse -from ...errors import MissingRequirementsError, MissingAuthError +from ...errors import MissingRequirementsError BING_URL = "https://www.bing.com" -TIMEOUT_LOGIN = 1200 TIMEOUT_IMAGE_CREATION = 300 ERRORS = [ "this prompt is being reviewed", @@ -39,24 +35,6 @@ BAD_IMAGES = [ "https://r.bing.com/rp/TX9QuO3WzcCJz1uaaSwQAz39Kb0.jpg", ] -def wait_for_login(driver: WebDriver, timeout: int = TIMEOUT_LOGIN) -> None: - """ - Waits for the user to log in within a given timeout period. - - Args: - driver (WebDriver): Webdriver for browser automation. - timeout (int): Maximum waiting time in seconds. - - Raises: - RuntimeError: If the login process exceeds the timeout. - """ - driver.get(f"{BING_URL}/") - start_time = time.time() - while not driver.get_cookie("_U"): - if time.time() - start_time > timeout: - raise RuntimeError("Timeout error") - time.sleep(0.5) - def create_session(cookies: Dict[str, str], proxy: str = None, connector: BaseConnector = None) -> ClientSession: """ Creates a new client session with specified cookies and headers. @@ -170,67 +148,6 @@ def read_images(html_content: str) -> List[str]: raise RuntimeError("No images found") return images -def get_cookies_from_browser(proxy: str = None) -> dict[str, str]: - """ - Retrieves cookies from the browser using webdriver. - - Args: - proxy (str, optional): Proxy configuration. - - Returns: - dict[str, str]: Retrieved cookies. - """ - with get_browser(proxy=proxy) as driver: - wait_for_login(driver) - time.sleep(1) - return get_driver_cookies(driver) - -class CreateImagesBing: - """A class for creating images using Bing.""" - - def __init__(self, cookies: dict[str, str] = {}, proxy: str = None) -> None: - self.cookies = cookies - self.proxy = proxy - - def create_completion(self, prompt: str) -> Generator[ImageResponse, None, None]: - """ - Generator for creating imagecompletion based on a prompt. - - Args: - prompt (str): Prompt to generate images. - - Yields: - Generator[str, None, None]: The final output as markdown formatted string with images. - """ - cookies = self.cookies or get_cookies(".bing.com", False) - if "_U" not in cookies: - login_url = os.environ.get("G4F_LOGIN_URL") - if login_url: - yield f"Please login: [Bing]({login_url})\n\n" - try: - self.cookies = get_cookies_from_browser(self.proxy) - except MissingRequirementsError as e: - raise MissingAuthError(f'Missing "_U" cookie. {e}') - yield asyncio.run(self.create_async(prompt)) - - async def create_async(self, prompt: str) -> ImageResponse: - """ - Asynchronously creates a markdown formatted string with images based on the prompt. - - Args: - prompt (str): Prompt to generate images. - - Returns: - str: Markdown formatted string with images. - """ - cookies = self.cookies or get_cookies(".bing.com", False) - if "_U" not in cookies: - raise MissingAuthError('Missing "_U" cookie') - proxy = os.environ.get("G4F_PROXY") - async with create_session(cookies, proxy) as session: - images = await create_images(session, prompt, self.proxy) - return ImageResponse(images, prompt, {"preview": "{image}?w=200&h=200"}) - def patch_provider(provider: ProviderType) -> CreateImagesProvider: """ Patches a provider to include image creation capabilities. @@ -241,6 +158,7 @@ def patch_provider(provider: ProviderType) -> CreateImagesProvider: Returns: CreateImagesProvider: The patched provider with image creation capabilities. """ + from ..CreateImagesBing import CreateImagesBing service = CreateImagesBing() return CreateImagesProvider( provider, diff --git a/g4f/Provider/helper.py b/g4f/Provider/helper.py index 2cab5e6f..35480255 100644 --- a/g4f/Provider/helper.py +++ b/g4f/Provider/helper.py @@ -1,104 +1,13 @@ from __future__ import annotations -import os import random import secrets import string from aiohttp import BaseConnector -try: - from platformdirs import user_config_dir - has_platformdirs = True -except ImportError: - has_platformdirs = False -try: - from browser_cookie3 import ( - chrome, chromium, opera, opera_gx, - brave, edge, vivaldi, firefox, - _LinuxPasswordManager, BrowserCookieError - ) - has_browser_cookie3 = True -except ImportError: - has_browser_cookie3 = False - -from ..typing import Dict, Messages, Cookies, Optional -from ..errors import MissingAiohttpSocksError, MissingRequirementsError -from .. import debug - -# Global variable to store cookies -_cookies: Dict[str, Cookies] = {} - -if has_browser_cookie3 and os.environ.get('DBUS_SESSION_BUS_ADDRESS') == "/dev/null": - _LinuxPasswordManager.get_password = lambda a, b: b"secret" - -def get_cookies(domain_name: str = '', raise_requirements_error: bool = True) -> Dict[str, str]: - """ - Load cookies for a given domain from all supported browsers and cache the results. - - Args: - domain_name (str): The domain for which to load cookies. - - Returns: - Dict[str, str]: A dictionary of cookie names and values. - """ - if domain_name in _cookies: - return _cookies[domain_name] - - cookies = load_cookies_from_browsers(domain_name, raise_requirements_error) - _cookies[domain_name] = cookies - return cookies - -def set_cookies(domain_name: str, cookies: Cookies = None) -> None: - if cookies: - _cookies[domain_name] = cookies - elif domain_name in _cookies: - _cookies.pop(domain_name) - -def load_cookies_from_browsers(domain_name: str, raise_requirements_error: bool = True) -> Cookies: - """ - Helper function to load cookies from various browsers. - - Args: - domain_name (str): The domain for which to load cookies. - - Returns: - Dict[str, str]: A dictionary of cookie names and values. - """ - if not has_browser_cookie3: - if raise_requirements_error: - raise MissingRequirementsError('Install "browser_cookie3" package') - return {} - cookies = {} - for cookie_fn in [_g4f, chrome, chromium, opera, opera_gx, brave, edge, vivaldi, firefox]: - try: - cookie_jar = cookie_fn(domain_name=domain_name) - if len(cookie_jar) and debug.logging: - print(f"Read cookies from {cookie_fn.__name__} for {domain_name}") - for cookie in cookie_jar: - if cookie.name not in cookies: - cookies[cookie.name] = cookie.value - except BrowserCookieError: - pass - except Exception as e: - if debug.logging: - print(f"Error reading cookies from {cookie_fn.__name__} for {domain_name}: {e}") - return cookies - -def _g4f(domain_name: str) -> list: - """ - Load cookies from the 'g4f' browser (if exists). - - Args: - domain_name (str): The domain for which to load cookies. - - Returns: - list: List of cookies. - """ - if not has_platformdirs: - return [] - user_data_dir = user_config_dir("g4f") - cookie_file = os.path.join(user_data_dir, "Default", "Cookies") - return [] if not os.path.exists(cookie_file) else chrome(cookie_file, domain_name) +from ..typing import Messages, Optional +from ..errors import MissingRequirementsError +from ..cookies import get_cookies def format_prompt(messages: Messages, add_special_tokens=False) -> str: """ @@ -149,5 +58,5 @@ def get_connector(connector: BaseConnector = None, proxy: str = None) -> Optiona from aiohttp_socks import ProxyConnector connector = ProxyConnector.from_url(proxy) except ImportError: - raise MissingAiohttpSocksError('Install "aiohttp_socks" package for proxy support') + raise MissingRequirementsError('Install "aiohttp_socks" package for proxy support') return connector
\ No newline at end of file diff --git a/g4f/Provider/needs_auth/ThebApi.py b/g4f/Provider/needs_auth/ThebApi.py index ea633243..1c7baf8d 100644 --- a/g4f/Provider/needs_auth/ThebApi.py +++ b/g4f/Provider/needs_auth/ThebApi.py @@ -43,7 +43,7 @@ class ThebApi(AbstractProvider, ProviderModelMixin): model: str, messages: Messages, stream: bool, - auth: str, + auth: str = None, proxy: str = None, **kwargs ) -> CreateResult: diff --git a/g4f/__init__.py b/g4f/__init__.py index 34c8aa19..93e4aa86 100644 --- a/g4f/__init__.py +++ b/g4f/__init__.py @@ -6,9 +6,9 @@ from .errors import * from .models import Model, ModelUtils from .Provider import AsyncGeneratorProvider, ProviderUtils from .typing import Messages, CreateResult, AsyncResult, Union +from .cookies import get_cookies, set_cookies from . import debug, version from .base_provider import BaseRetryProvider, ProviderType -from .Provider.helper import get_cookies, set_cookies from .Provider.base_provider import ProviderModelMixin def get_model_and_provider(model : Union[Model, str], diff --git a/g4f/cookies.py b/g4f/cookies.py new file mode 100644 index 00000000..b5c869e7 --- /dev/null +++ b/g4f/cookies.py @@ -0,0 +1,97 @@ +from __future__ import annotations + +import os + +try: + from platformdirs import user_config_dir + has_platformdirs = True +except ImportError: + has_platformdirs = False +try: + from browser_cookie3 import ( + chrome, chromium, opera, opera_gx, + brave, edge, vivaldi, firefox, + _LinuxPasswordManager, BrowserCookieError + ) + has_browser_cookie3 = True +except ImportError: + has_browser_cookie3 = False + +from .typing import Dict, Cookies +from .errors import MissingRequirementsError +from . import debug + +# Global variable to store cookies +_cookies: Dict[str, Cookies] = {} + +if has_browser_cookie3 and os.environ.get('DBUS_SESSION_BUS_ADDRESS') == "/dev/null": + _LinuxPasswordManager.get_password = lambda a, b: b"secret" + +def get_cookies(domain_name: str = '', raise_requirements_error: bool = True) -> Dict[str, str]: + """ + Load cookies for a given domain from all supported browsers and cache the results. + + Args: + domain_name (str): The domain for which to load cookies. + + Returns: + Dict[str, str]: A dictionary of cookie names and values. + """ + if domain_name in _cookies: + return _cookies[domain_name] + + cookies = load_cookies_from_browsers(domain_name, raise_requirements_error) + _cookies[domain_name] = cookies + return cookies + +def set_cookies(domain_name: str, cookies: Cookies = None) -> None: + if cookies: + _cookies[domain_name] = cookies + elif domain_name in _cookies: + _cookies.pop(domain_name) + +def load_cookies_from_browsers(domain_name: str, raise_requirements_error: bool = True) -> Cookies: + """ + Helper function to load cookies from various browsers. + + Args: + domain_name (str): The domain for which to load cookies. + + Returns: + Dict[str, str]: A dictionary of cookie names and values. + """ + if not has_browser_cookie3: + if raise_requirements_error: + raise MissingRequirementsError('Install "browser_cookie3" package') + return {} + cookies = {} + for cookie_fn in [_g4f, chrome, chromium, opera, opera_gx, brave, edge, vivaldi, firefox]: + try: + cookie_jar = cookie_fn(domain_name=domain_name) + if len(cookie_jar) and debug.logging: + print(f"Read cookies from {cookie_fn.__name__} for {domain_name}") + for cookie in cookie_jar: + if cookie.name not in cookies: + cookies[cookie.name] = cookie.value + except BrowserCookieError: + pass + except Exception as e: + if debug.logging: + print(f"Error reading cookies from {cookie_fn.__name__} for {domain_name}: {e}") + return cookies + +def _g4f(domain_name: str) -> list: + """ + Load cookies from the 'g4f' browser (if exists). + + Args: + domain_name (str): The domain for which to load cookies. + + Returns: + list: List of cookies. + """ + if not has_platformdirs: + return [] + user_data_dir = user_config_dir("g4f") + cookie_file = os.path.join(user_data_dir, "Default", "Cookies") + return [] if not os.path.exists(cookie_file) else chrome(cookie_file, domain_name)
\ No newline at end of file diff --git a/g4f/errors.py b/g4f/errors.py index ff28de3e..48171a6e 100644 --- a/g4f/errors.py +++ b/g4f/errors.py @@ -31,8 +31,5 @@ class ModelNotSupportedError(Exception): class MissingRequirementsError(Exception): pass -class MissingAiohttpSocksError(MissingRequirementsError): - pass - class MissingAuthError(Exception): pass
\ No newline at end of file diff --git a/g4f/models.py b/g4f/models.py index ed86024e..3b4ca468 100644 --- a/g4f/models.py +++ b/g4f/models.py @@ -15,15 +15,12 @@ from .Provider import ( DeepInfra, ChatBase, Liaobots, - GeekGpt, - FakeGpt, FreeGpt, Llama2, - Vercel, - Phind, + Vercel, + Gemini, GptGo, Gpt6, - Bard, Bing, You, Pi, @@ -53,7 +50,7 @@ default = Model( base_provider = "", best_provider = RetryProvider([ Bing, - ChatgptAi, GptGo, GeekGpt, + ChatgptAi, GptGo, You, Chatgpt4Online ]) @@ -65,7 +62,6 @@ gpt_35_long = Model( base_provider = 'openai', best_provider = RetryProvider([ FreeGpt, You, - GeekGpt, FakeGpt, Chatgpt4Online, ChatgptDemoAi, ChatgptNext, @@ -174,10 +170,10 @@ openchat_35 = Model( ) # Bard -bard = palm = Model( - name = 'palm', +gemini = bard = palm = Model( + name = 'gemini', base_provider = 'google', - best_provider = Bard + best_provider = Gemini ) claude_v2 = Model( @@ -271,8 +267,8 @@ class ModelUtils: 'airoboros-70b': airoboros_70b, 'airoboros-l2-70b': airoboros_l2_70b, 'openchat_3.5': openchat_35, + 'gemini': gemini, 'gemini-pro': gemini_pro, - 'bard': bard, 'claude-v2': claude_v2, 'pi': pi } diff --git a/g4f/webdriver.py b/g4f/webdriver.py index d28cd97b..b54fae15 100644 --- a/g4f/webdriver.py +++ b/g4f/webdriver.py @@ -9,6 +9,7 @@ try: from selenium.webdriver.support.ui import WebDriverWait from selenium.webdriver.support import expected_conditions as EC from selenium.webdriver.common.keys import Keys + from selenium.common.exceptions import NoSuchElementException has_requirements = True except ImportError: from typing import Type as WebDriver @@ -120,6 +121,8 @@ def bypass_cloudflare(driver: WebDriver, url: str, timeout: int) -> None: WebDriverWait(driver, 5).until( EC.presence_of_element_located((By.CSS_SELECTOR, "#challenge-stage input")) ).click() + except NoSuchElementException: + ... except Exception as e: if debug.logging: print(f"Error bypassing Cloudflare: {e}") |