From 5c75972c50ac936cb6dc8e01cef9cdb08daa8ed7 Mon Sep 17 00:00:00 2001 From: H Lohaus Date: Fri, 9 Feb 2024 14:24:15 +0100 Subject: Update provider and model list (#1568) Move bing.create_images and cookies helper Disable some providers --- README.md | 118 +++++++++++++++++++------------------ etc/testing/_providers.py | 1 + etc/tool/readme_table.py | 70 +++++++++------------- g4f/Provider/CreateImagesBing.py | 94 +++++++++++++++++++++++++++++ g4f/Provider/GptChatly.py | 1 - g4f/Provider/Hashnode.py | 2 +- g4f/Provider/Phind.py | 2 +- g4f/Provider/__init__.py | 2 + g4f/Provider/bing/create_images.py | 90 ++-------------------------- g4f/Provider/helper.py | 99 ++----------------------------- g4f/Provider/needs_auth/ThebApi.py | 2 +- g4f/__init__.py | 2 +- g4f/cookies.py | 97 ++++++++++++++++++++++++++++++ g4f/errors.py | 3 - g4f/models.py | 18 +++--- g4f/webdriver.py | 3 + 16 files changed, 305 insertions(+), 299 deletions(-) create mode 100644 g4f/Provider/CreateImagesBing.py create mode 100644 g4f/cookies.py diff --git a/README.md b/README.md index 797ce304..66311905 100644 --- a/README.md +++ b/README.md @@ -552,47 +552,54 @@ for choice in json_response: | Website | Provider | GPT-3.5 | GPT-4 | Stream | Status | Auth | | ------ | ------- | ------- | ----- | ------ | ------ | ---- | | [bing.com](https://bing.com/chat) | `g4f.Provider.Bing` | ❌ | ✔️ | ✔️ | ![Active](https://img.shields.io/badge/Active-brightgreen) | ❌ | -| [chat.geekgpt.org](https://chat.geekgpt.org) | `g4f.Provider.GeekGpt` | ✔️ | ✔️ | ✔️ | ![Inactive](https://img.shields.io/badge/Inactive-red) | ❌ | -| [gptchatly.com](https://gptchatly.com) | `g4f.Provider.GptChatly` | ✔️ | ✔️ | ❌ | ![Unknown](https://img.shields.io/badge/Unknown-grey) | ❌ | -| [liaobots.site](https://liaobots.site) | `g4f.Provider.Liaobots` | ✔️ | ✔️ | ✔️ | ![Unknown](https://img.shields.io/badge/Unknown-grey) | ❌ | +| [free.chatgpt.org.uk](https://free.chatgpt.org.uk) | `g4f.Provider.FreeChatgpt` | ✔️ | ✔️ | ✔️ | ![Unknown](https://img.shields.io/badge/Unknown-grey) | ❌ | +| [liaobots.site](https://liaobots.site) | `g4f.Provider.Liaobots` | ✔️ | ✔️ | ✔️ | ![Active](https://img.shields.io/badge/Active-brightgreen) | ❌ | +| [chat.openai.com](https://chat.openai.com) | `g4f.Provider.OpenaiChat` | ✔️ | ✔️ | ✔️ | ![Active](https://img.shields.io/badge/Active-brightgreen) | ✔️ | | [raycast.com](https://raycast.com) | `g4f.Provider.Raycast` | ✔️ | ✔️ | ✔️ | ![Unknown](https://img.shields.io/badge/Unknown-grey) | ✔️ | +| [beta.theb.ai](https://beta.theb.ai) | `g4f.Provider.Theb` | ✔️ | ✔️ | ✔️ | ![Unknown](https://img.shields.io/badge/Unknown-grey) | ❌ | +| [chat.geekgpt.org](https://chat.geekgpt.org) | `g4f.Provider.GeekGpt` | ✔️ | ✔️ | ✔️ | ![Inactive](https://img.shields.io/badge/Inactive-red) | ❌ | ### GPT-3.5 | Website | Provider | GPT-3.5 | GPT-4 | Stream | Status | Auth | | ------ | ------- | ------- | ----- | ------ | ------ | ---- | -| [www.aitianhu.com](https://www.aitianhu.com) | `g4f.Provider.AItianhu` | ✔️ | ❌ | ✔️ | ![Unknown](https://img.shields.io/badge/Unknown-grey) | ❌ | | [chat3.aiyunos.top](https://chat3.aiyunos.top/) | `g4f.Provider.AItianhuSpace` | ✔️ | ❌ | ✔️ | ![Unknown](https://img.shields.io/badge/Unknown-grey) | ❌ | -| [e.aiask.me](https://e.aiask.me) | `g4f.Provider.AiAsk` | ✔️ | ❌ | ✔️ | ![Unknown](https://img.shields.io/badge/Unknown-grey) | ❌ | -| [chat-gpt.org](https://chat-gpt.org/chat) | `g4f.Provider.Aichat` | ✔️ | ❌ | ❌ | ![Unknown](https://img.shields.io/badge/Unknown-grey) | ❌ | -| [www.chatbase.co](https://www.chatbase.co) | `g4f.Provider.ChatBase` | ✔️ | ❌ | ✔️ | ![Active](https://img.shields.io/badge/Active-brightgreen) | ❌ | +| [aichatonline.org](https://aichatonline.org) | `g4f.Provider.AiChatOnline` | ✔️ | ❌ | ✔️ | ![Unknown](https://img.shields.io/badge/Unknown-grey) | ❌ | +| [openchat.team](https://openchat.team) | `g4f.Provider.Aura` | ✔️ | ❌ | ✔️ | ![Active](https://img.shields.io/badge/Active-brightgreen) | ❌ | +| [chatbase.co](https://www.chatbase.co) | `g4f.Provider.ChatBase` | ✔️ | ❌ | ✔️ | ![Active](https://img.shields.io/badge/Active-brightgreen) | ❌ | | [chatforai.store](https://chatforai.store) | `g4f.Provider.ChatForAi` | ✔️ | ❌ | ✔️ | ![Unknown](https://img.shields.io/badge/Unknown-grey) | ❌ | -| [chatgpt.ai](https://chatgpt.ai) | `g4f.Provider.ChatgptAi` | ✔️ | ❌ | ✔️ | ![Active](https://img.shields.io/badge/Active-brightgreen) | ❌ | -| [chatgptx.de](https://chatgptx.de) | `g4f.Provider.ChatgptX` | ✔️ | ❌ | ✔️ | ![Unknown](https://img.shields.io/badge/Unknown-grey) | ❌ | -| [chat-shared2.zhile.io](https://chat-shared2.zhile.io) | `g4f.Provider.FakeGpt` | ✔️ | ❌ | ✔️ | ![Inactive](https://img.shields.io/badge/Inactive-red) | ❌ | -| [freegpts1.aifree.site](https://freegpts1.aifree.site/) | `g4f.Provider.FreeGpt` | ✔️ | ❌ | ✔️ | ![Active](https://img.shields.io/badge/Active-brightgreen) | ❌ | -| [gptalk.net](https://gptalk.net) | `g4f.Provider.GPTalk` | ✔️ | ❌ | ✔️ | ![Active](https://img.shields.io/badge/Active-brightgreen) | ❌ | -| [ai18.gptforlove.com](https://ai18.gptforlove.com) | `g4f.Provider.GptForLove` | ✔️ | ❌ | ✔️ | ![Active](https://img.shields.io/badge/Active-brightgreen) | ❌ | -| [gptgo.ai](https://gptgo.ai) | `g4f.Provider.GptGo` | ✔️ | ❌ | ✔️ | ![Active](https://img.shields.io/badge/Active-brightgreen) | ❌ | -| [hashnode.com](https://hashnode.com) | `g4f.Provider.Hashnode` | ✔️ | ❌ | ✔️ | ![Active](https://img.shields.io/badge/Active-brightgreen) | ❌ | +| [chatgpt.ai](https://chatgpt.ai) | `g4f.Provider.ChatgptAi` | ✔️ | ❌ | ✔️ | ![Unknown](https://img.shields.io/badge/Unknown-grey) | ❌ | +| [chat.chatgptdemo.net](https://chat.chatgptdemo.net) | `g4f.Provider.ChatgptDemo` | ✔️ | ❌ | ✔️ | ![Unknown](https://img.shields.io/badge/Unknown-grey) | ❌ | +| [chatgpt-free.cc](https://www.chatgpt-free.cc) | `g4f.Provider.ChatgptNext` | ✔️ | ❌ | ✔️ | ![Unknown](https://img.shields.io/badge/Unknown-grey) | ❌ | +| [chat.3211000.xyz](https://chat.3211000.xyz) | `g4f.Provider.Chatxyz` | ✔️ | ❌ | ✔️ | ![Unknown](https://img.shields.io/badge/Unknown-grey) | ❌ | +| [gptalk.net](https://gptalk.net) | `g4f.Provider.GPTalk` | ✔️ | ❌ | ✔️ | ![Unknown](https://img.shields.io/badge/Unknown-grey) | ❌ | +| [geminiprochat.com](https://geminiprochat.com) | `g4f.Provider.GeminiProChat` | ✔️ | ❌ | ✔️ | ![Active](https://img.shields.io/badge/Active-brightgreen) | ❌ | +| [gpt6.ai](https://gpt6.ai) | `g4f.Provider.Gpt6` | ✔️ | ❌ | ✔️ | ![Unknown](https://img.shields.io/badge/Unknown-grey) | ❌ | +| [gptchatly.com](https://gptchatly.com) | `g4f.Provider.GptChatly` | ✔️ | ❌ | ❌ | ![Unknown](https://img.shields.io/badge/Unknown-grey) | ❌ | +| [ai18.gptforlove.com](https://ai18.gptforlove.com) | `g4f.Provider.GptForLove` | ✔️ | ❌ | ✔️ | ![Unknown](https://img.shields.io/badge/Unknown-grey) | ❌ | +| [gptgo.ai](https://gptgo.ai) | `g4f.Provider.GptGo` | ✔️ | ❌ | ✔️ | ![Unknown](https://img.shields.io/badge/Unknown-grey) | ❌ | +| [gpttalk.ru](https://gpttalk.ru) | `g4f.Provider.GptTalkRu` | ✔️ | ❌ | ✔️ | ![Unknown](https://img.shields.io/badge/Unknown-grey) | ❌ | +| [koala.sh](https://koala.sh) | `g4f.Provider.Koala` | ✔️ | ❌ | ✔️ | ![Active](https://img.shields.io/badge/Active-brightgreen) | ❌ | | [app.myshell.ai](https://app.myshell.ai/chat) | `g4f.Provider.MyShell` | ✔️ | ❌ | ✔️ | ![Unknown](https://img.shields.io/badge/Unknown-grey) | ❌ | -| [noowai.com](https://noowai.com) | `g4f.Provider.NoowAi` | ✔️ | ❌ | ✔️ | ![Unknown](https://img.shields.io/badge/Unknown-grey) | ❌ | -| [chat.openai.com](https://chat.openai.com) | `g4f.Provider.OpenaiChat` | ✔️ | ❌ | ✔️ | ![Unknown](https://img.shields.io/badge/Unknown-grey) | ✔️ | -| [theb.ai](https://theb.ai) | `g4f.Provider.Theb` | ✔️ | ❌ | ✔️ | ![Unknown](https://img.shields.io/badge/Unknown-grey) | ✔️ | -| [sdk.vercel.ai](https://sdk.vercel.ai) | `g4f.Provider.Vercel` | ✔️ | ❌ | ✔️ | ![Unknown](https://img.shields.io/badge/Unknown-grey) | ❌ | +| [onlinegpt.org](https://onlinegpt.org) | `g4f.Provider.OnlineGpt` | ✔️ | ❌ | ✔️ | ![Unknown](https://img.shields.io/badge/Unknown-grey) | ❌ | +| [perplexity.ai](https://www.perplexity.ai) | `g4f.Provider.PerplexityAi` | ✔️ | ❌ | ✔️ | ![Unknown](https://img.shields.io/badge/Unknown-grey) | ❌ | +| [poe.com](https://poe.com) | `g4f.Provider.Poe` | ✔️ | ❌ | ✔️ | ![Unknown](https://img.shields.io/badge/Unknown-grey) | ✔️ | +| [talkai.info](https://talkai.info) | `g4f.Provider.TalkAi` | ✔️ | ❌ | ✔️ | ![Unknown](https://img.shields.io/badge/Unknown-grey) | ❌ | | [you.com](https://you.com) | `g4f.Provider.You` | ✔️ | ❌ | ✔️ | ![Active](https://img.shields.io/badge/Active-brightgreen) | ❌ | -| [chat9.yqcloud.top](https://chat9.yqcloud.top/) | `g4f.Provider.Yqcloud` | ✔️ | ❌ | ✔️ | ![Unknown](https://img.shields.io/badge/Unknown-grey) | ❌ | -| [chat.acytoo.com](https://chat.acytoo.com) | `g4f.Provider.Acytoo` | ✔️ | ❌ | ✔️ | ![Inactive](https://img.shields.io/badge/Inactive-red) | ❌ | -| [aibn.cc](https://aibn.cc) | `g4f.Provider.Aibn` | ✔️ | ❌ | ✔️ | ![Inactive](https://img.shields.io/badge/Inactive-red) | ❌ | -| [ai.ls](https://ai.ls) | `g4f.Provider.Ails` | ✔️ | ❌ | ✔️ | ![Inactive](https://img.shields.io/badge/Inactive-red) | ❌ | +| [aitianhu.com](https://www.aitianhu.com) | `g4f.Provider.AItianhu` | ✔️ | ❌ | ✔️ | ![Inactive](https://img.shields.io/badge/Inactive-red) | ❌ | +| [e.aiask.me](https://e.aiask.me) | `g4f.Provider.AiAsk` | ✔️ | ❌ | ✔️ | ![Inactive](https://img.shields.io/badge/Inactive-red) | ❌ | +| [chatgpt.bestim.org](https://chatgpt.bestim.org) | `g4f.Provider.Bestim` | ✔️ | ❌ | ✔️ | ![Inactive](https://img.shields.io/badge/Inactive-red) | ❌ | +| [chatanywhere.cn](https://chatanywhere.cn) | `g4f.Provider.ChatAnywhere` | ✔️ | ❌ | ✔️ | ![Inactive](https://img.shields.io/badge/Inactive-red) | ❌ | | [chatgpt4online.org](https://chatgpt4online.org) | `g4f.Provider.Chatgpt4Online` | ✔️ | ❌ | ✔️ | ![Inactive](https://img.shields.io/badge/Inactive-red) | ❌ | -| [chat.chatgptdemo.net](https://chat.chatgptdemo.net) | `g4f.Provider.ChatgptDemo` | ✔️ | ❌ | ✔️ | ![Inactive](https://img.shields.io/badge/Inactive-red) | ❌ | -| [chatgptduo.com](https://chatgptduo.com) | `g4f.Provider.ChatgptDuo` | ✔️ | ❌ | ❌ | ![Inactive](https://img.shields.io/badge/Inactive-red) | ❌ | +| [chat.chatgptdemo.ai](https://chat.chatgptdemo.ai) | `g4f.Provider.ChatgptDemoAi` | ✔️ | ❌ | ✔️ | ![Inactive](https://img.shields.io/badge/Inactive-red) | ❌ | | [chatgptfree.ai](https://chatgptfree.ai) | `g4f.Provider.ChatgptFree` | ✔️ | ❌ | ❌ | ![Inactive](https://img.shields.io/badge/Inactive-red) | ❌ | | [chatgptlogin.ai](https://chatgptlogin.ai) | `g4f.Provider.ChatgptLogin` | ✔️ | ❌ | ✔️ | ![Inactive](https://img.shields.io/badge/Inactive-red) | ❌ | -| [cromicle.top](https://cromicle.top) | `g4f.Provider.Cromicle` | ✔️ | ❌ | ✔️ | ![Inactive](https://img.shields.io/badge/Inactive-red) | ❌ | +| [chatgptx.de](https://chatgptx.de) | `g4f.Provider.ChatgptX` | ✔️ | ❌ | ✔️ | ![Inactive](https://img.shields.io/badge/Inactive-red) | ❌ | +| [chat-shared2.zhile.io](https://chat-shared2.zhile.io) | `g4f.Provider.FakeGpt` | ✔️ | ❌ | ✔️ | ![Inactive](https://img.shields.io/badge/Inactive-red) | ❌ | +| [freegpts1.aifree.site](https://freegpts1.aifree.site/) | `g4f.Provider.FreeGpt` | ✔️ | ❌ | ✔️ | ![Inactive](https://img.shields.io/badge/Inactive-red) | ❌ | | [gptgod.site](https://gptgod.site) | `g4f.Provider.GptGod` | ✔️ | ❌ | ✔️ | ![Inactive](https://img.shields.io/badge/Inactive-red) | ❌ | -| [opchatgpts.net](https://opchatgpts.net) | `g4f.Provider.Opchatgpts` | ✔️ | ❌ | ✔️ | ![Inactive](https://img.shields.io/badge/Inactive-red) | ❌ | +| [hashnode.com](https://hashnode.com) | `g4f.Provider.Hashnode` | ✔️ | ❌ | ✔️ | ![Inactive](https://img.shields.io/badge/Inactive-red) | ❌ | +| [sdk.vercel.ai](https://sdk.vercel.ai) | `g4f.Provider.Vercel` | ✔️ | ❌ | ✔️ | ![Inactive](https://img.shields.io/badge/Inactive-red) | ❌ | | [chat.ylokh.xyz](https://chat.ylokh.xyz) | `g4f.Provider.Ylokh` | ✔️ | ❌ | ✔️ | ![Inactive](https://img.shields.io/badge/Inactive-red) | ❌ | ### Other @@ -601,40 +608,37 @@ for choice in json_response: | ------ | ------- | ------- | ----- | ------ | ------ | ---- | | [bard.google.com](https://bard.google.com) | `g4f.Provider.Bard` | ❌ | ❌ | ❌ | ![Unknown](https://img.shields.io/badge/Unknown-grey) | ✔️ | | [deepinfra.com](https://deepinfra.com) | `g4f.Provider.DeepInfra` | ❌ | ❌ | ✔️ | ![Active](https://img.shields.io/badge/Active-brightgreen) | ❌ | -| [huggingface.co](https://huggingface.co/chat) | `g4f.Provider.HuggingChat` | ❌ | ❌ | ✔️ | ![Active](https://img.shields.io/badge/Active-brightgreen) | ✔️ | -| [www.llama2.ai](https://www.llama2.ai) | `g4f.Provider.Llama2` | ❌ | ❌ | ✔️ | ![Unknown](https://img.shields.io/badge/Unknown-grey) | ❌ | +| [gemini.google.com](https://gemini.google.com) | `g4f.Provider.Gemini` | ❌ | ❌ | ❌ | ![Active](https://img.shields.io/badge/Active-brightgreen) | ✔️ | +| [huggingface.co](https://huggingface.co/chat) | `g4f.Provider.HuggingChat` | ❌ | ❌ | ✔️ | ![Active](https://img.shields.io/badge/Active-brightgreen) | ❌ | +| [llama2.ai](https://www.llama2.ai) | `g4f.Provider.Llama2` | ❌ | ❌ | ✔️ | ![Active](https://img.shields.io/badge/Active-brightgreen) | ❌ | +| [labs.perplexity.ai](https://labs.perplexity.ai) | `g4f.Provider.PerplexityLabs` | ❌ | ❌ | ✔️ | ![Active](https://img.shields.io/badge/Active-brightgreen) | ❌ | +| [phind.com](https://www.phind.com) | `g4f.Provider.Phind` | ❌ | ❌ | ✔️ | ![Unknown](https://img.shields.io/badge/Unknown-grey) | ❌ | +| [pi.ai](https://pi.ai/talk) | `g4f.Provider.Pi` | ❌ | ❌ | ✔️ | ![Unknown](https://img.shields.io/badge/Unknown-grey) | ❌ | +| [theb.ai](https://theb.ai) | `g4f.Provider.ThebApi` | ❌ | ❌ | ❌ | ![Unknown](https://img.shields.io/badge/Unknown-grey) | ✔️ | | [open-assistant.io](https://open-assistant.io/chat) | `g4f.Provider.OpenAssistant` | ❌ | ❌ | ✔️ | ![Inactive](https://img.shields.io/badge/Inactive-red) | ✔️ | ### Models -| Model | Base Provider | Provider | Website | -| --------------------------------------- | ------------- | ------------------- | ------------------------------------------- | -| palm | Google | g4f.Provider.Bard | [bard.google.com](https://bard.google.com/) | -| h2ogpt-gm-oasst1-en-2048-falcon-7b-v3 | Hugging Face | g4f.Provider.H2o | [www.h2o.ai](https://www.h2o.ai/) | -| h2ogpt-gm-oasst1-en-2048-falcon-40b-v1 | Hugging Face | g4f.Provider.H2o | [www.h2o.ai](https://www.h2o.ai/) | -| h2ogpt-gm-oasst1-en-2048-open-llama-13b | Hugging Face | g4f.Provider.H2o | [www.h2o.ai](https://www.h2o.ai/) | -| claude-instant-v1 | Anthropic | g4f.Provider.Vercel | [sdk.vercel.ai](https://sdk.vercel.ai/) | -| claude-v1 | Anthropic | g4f.Provider.Vercel | [sdk.vercel.ai](https://sdk.vercel.ai/) | -| claude-v2 | Anthropic | g4f.Provider.Vercel | [sdk.vercel.ai](https://sdk.vercel.ai/) | -| command-light-nightly | Cohere | g4f.Provider.Vercel | [sdk.vercel.ai](https://sdk.vercel.ai/) | -| command-nightly | Cohere | g4f.Provider.Vercel | [sdk.vercel.ai](https://sdk.vercel.ai/) | -| gpt-neox-20b | Hugging Face | g4f.Provider.Vercel | [sdk.vercel.ai](https://sdk.vercel.ai/) | -| oasst-sft-1-pythia-12b | Hugging Face | g4f.Provider.Vercel | [sdk.vercel.ai](https://sdk.vercel.ai/) | -| oasst-sft-4-pythia-12b-epoch-3.5 | Hugging Face | g4f.Provider.Vercel | [sdk.vercel.ai](https://sdk.vercel.ai/) | -| santacoder | Hugging Face | g4f.Provider.Vercel | [sdk.vercel.ai](https://sdk.vercel.ai/) | -| bloom | Hugging Face | g4f.Provider.Vercel | [sdk.vercel.ai](https://sdk.vercel.ai/) | -| flan-t5-xxl | Hugging Face | g4f.Provider.Vercel | [sdk.vercel.ai](https://sdk.vercel.ai/) | -| code-davinci-002 | OpenAI | g4f.Provider.Vercel | [sdk.vercel.ai](https://sdk.vercel.ai/) | -| gpt-3.5-turbo-16k | OpenAI | g4f.Provider.Vercel | [sdk.vercel.ai](https://sdk.vercel.ai/) | -| gpt-3.5-turbo-16k-0613 | OpenAI | g4f.Provider.Vercel | [sdk.vercel.ai](https://sdk.vercel.ai/) | -| gpt-4-0613 | OpenAI | g4f.Provider.Vercel | [sdk.vercel.ai](https://sdk.vercel.ai/) | -| text-ada-001 | OpenAI | g4f.Provider.Vercel | [sdk.vercel.ai](https://sdk.vercel.ai/) | -| text-babbage-001 | OpenAI | g4f.Provider.Vercel | [sdk.vercel.ai](https://sdk.vercel.ai/) | -| text-curie-001 | OpenAI | g4f.Provider.Vercel | [sdk.vercel.ai](https://sdk.vercel.ai/) | -| text-davinci-002 | OpenAI | g4f.Provider.Vercel | [sdk.vercel.ai](https://sdk.vercel.ai/) | -| text-davinci-003 | OpenAI | g4f.Provider.Vercel | [sdk.vercel.ai](https://sdk.vercel.ai/) | -| llama13b-v2-chat | Replicate | g4f.Provider.Vercel | [sdk.vercel.ai](https://sdk.vercel.ai/) | -| llama7b-v2-chat | Replicate | g4f.Provider.Vercel | [sdk.vercel.ai](https://sdk.vercel.ai/) | +| Model | Base Provider | Provider | Website | +| ----- | ------------- | -------- | ------- | +| gpt-3.5-turbo | OpenAI | 5+ Providers | [openai.com](https://openai.com/) | +| gpt-4 | OpenAI | 2+ Providers | [openai.com](https://openai.com/) | +| gpt-4-turbo | OpenAI | g4f.Provider.Bing | [openai.com](https://openai.com/) | +| Llama-2-7b-chat-hf | Huggingface | 2+ Providers | [huggingface.co](https://huggingface.co/) | +| Llama-2-13b-chat-hf | Huggingface | 2+ Providers | [huggingface.co](https://huggingface.co/) | +| Llama-2-70b-chat-hf | Huggingface | 4+ Providers | [huggingface.co](https://huggingface.co/) | +| CodeLlama-34b-Instruct-hf | Huggingface | 3+ Providers | [huggingface.co](https://huggingface.co/) | +| Mixtral-8x7B-Instruct-v0.1 | Huggingface | 3+ Providers | [huggingface.co](https://huggingface.co/) | +| Mistral-7B-Instruct-v0.1 | Huggingface | 3+ Providers | [huggingface.co](https://huggingface.co/) | +| dolphin-2.6-mixtral-8x7b | Huggingface | g4f.Provider.DeepInfra | [huggingface.co](https://huggingface.co/) | +| lzlv_70b_fp16_hf | Huggingface | g4f.Provider.DeepInfra | [huggingface.co](https://huggingface.co/) | +| airoboros-70b | Huggingface | g4f.Provider.DeepInfra | [huggingface.co](https://huggingface.co/) | +| airoboros-l2-70b-gpt4-1.4.1 | Huggingface | g4f.Provider.DeepInfra | [huggingface.co](https://huggingface.co/) | +| openchat_3.5 | Huggingface | 2+ Providers | [huggingface.co](https://huggingface.co/) | +| gemini | Google | g4f.Provider.Gemini | [gemini.google.com](https://gemini.google.com/) | +| gemini-pro | Google | 2+ Providers | [gemini.google.com](https://gemini.google.com/) | +| claude-v2 | Anthropic | 2+ Providers | [anthropic.com](https://www.anthropic.com/) | +| pi | Inflection | g4f.Provider.Pi | [inflection.ai](https://inflection.ai/) | ## 🔗 Related GPT4Free Projects diff --git a/etc/testing/_providers.py b/etc/testing/_providers.py index 947fe7a0..e2ef0cbe 100644 --- a/etc/testing/_providers.py +++ b/etc/testing/_providers.py @@ -36,6 +36,7 @@ def get_providers() -> list[ProviderType]: for provider in __providers__ if provider.__name__ not in dir(Provider.deprecated) and provider.__name__ not in dir(Provider.unfinished) + and provider.url is not None ] def create_response(provider: ProviderType) -> str: diff --git a/etc/tool/readme_table.py b/etc/tool/readme_table.py index 0ce9d0b9..d5a409ec 100644 --- a/etc/tool/readme_table.py +++ b/etc/tool/readme_table.py @@ -1,22 +1,15 @@ import re -import sys -from pathlib import Path from urllib.parse import urlparse - -sys.path.append(str(Path(__file__).parent.parent.parent)) - import asyncio -from g4f import models -from g4f import ChatCompletion -from g4f.Provider.base_provider import BaseProvider -from etc.testing._providers import get_providers +from g4f import models, ChatCompletion +from g4f.base_provider import BaseProvider, BaseRetryProvider, ProviderType +from etc.testing._providers import get_providers from g4f import debug debug.logging = True - -async def test_async(provider: type[BaseProvider]): +async def test_async(provider: ProviderType): if not provider.working: return False messages = [{"role": "user", "content": "Hello Assistant!"}] @@ -32,19 +25,17 @@ async def test_async(provider: type[BaseProvider]): print(f"{provider.__name__}: {e.__class__.__name__}: {e}") return False - -async def test_async_list(providers: list[type[BaseProvider]]): +def test_async_list(providers: list[ProviderType]): responses: list = [ - test_async(_provider) + asyncio.run(test_async(_provider)) for _provider in providers ] - return await asyncio.gather(*responses) - + return responses def print_providers(): providers = get_providers() - responses = asyncio.run(test_async_list(providers)) + responses = test_async_list(providers) for type in ("GPT-4", "GPT-3.5", "Other"): lines = [ @@ -67,7 +58,7 @@ def print_providers(): do_continue = True if not do_continue: continue - netloc = urlparse(_provider.url).netloc + netloc = urlparse(_provider.url).netloc.replace("www.", "") website = f"[{netloc}]({_provider.url})" provider_name = f"`g4f.Provider.{_provider.__name__}`" @@ -92,48 +83,43 @@ def print_providers(): def print_models(): base_provider_names = { - "cohere": "Cohere", "google": "Google", "openai": "OpenAI", - "anthropic": "Anthropic", - "replicate": "Replicate", "huggingface": "Huggingface", + "anthropic": "Anthropic", + "inflection": "Inflection" } provider_urls = { - "Bard": "https://bard.google.com/", - "H2o": "https://www.h2o.ai/", - "Vercel": "https://sdk.vercel.ai/", + "google": "https://gemini.google.com/", + "openai": "https://openai.com/", + "huggingface": "https://huggingface.co/", + "anthropic": "https://www.anthropic.com/", + "inflection": "https://inflection.ai/", } lines = [ "| Model | Base Provider | Provider | Website |", "| ----- | ------------- | -------- | ------- |", ] - - _models = get_models() - for model in _models: - if not model.best_provider or model.best_provider.__name__ not in provider_urls: - continue - + for name, model in models.ModelUtils.convert.items(): + if name.startswith("gpt-3.5") or name.startswith("gpt-4"): + if name not in ("gpt-3.5-turbo", "gpt-4", "gpt-4-turbo"): + continue name = re.split(r":|/", model.name)[-1] base_provider = base_provider_names[model.base_provider] - provider_name = f"g4f.provider.{model.best_provider.__name__}" - provider_url = provider_urls[model.best_provider.__name__] - netloc = urlparse(provider_url).netloc + if not isinstance(model.best_provider, BaseRetryProvider): + provider_name = f"g4f.Provider.{model.best_provider.__name__}" + else: + provider_name = f"{len(model.best_provider.providers)}+ Providers" + provider_url = provider_urls[model.base_provider] + netloc = urlparse(provider_url).netloc.replace("www.", "") website = f"[{netloc}]({provider_url})" lines.append(f"| {name} | {base_provider} | {provider_name} | {website} |") print("\n".join(lines)) - -def get_models(): - _models = [item[1] for item in models.__dict__.items()] - _models = [model for model in _models if type(model) is models.Model] - return [model for model in _models if model.name not in ["gpt-3.5-turbo", "gpt-4"]] - - if __name__ == "__main__": - print_providers() - print("\n", "-" * 50, "\n") + #print_providers() + #print("\n", "-" * 50, "\n") print_models() \ No newline at end of file diff --git a/g4f/Provider/CreateImagesBing.py b/g4f/Provider/CreateImagesBing.py new file mode 100644 index 00000000..4d045188 --- /dev/null +++ b/g4f/Provider/CreateImagesBing.py @@ -0,0 +1,94 @@ +from __future__ import annotations + +import asyncio +import time +import os +from typing import Generator + +from ..cookies import get_cookies +from ..webdriver import WebDriver, get_driver_cookies, get_browser +from ..image import ImageResponse +from ..errors import MissingRequirementsError, MissingAuthError +from .bing.create_images import BING_URL, create_images, create_session + +BING_URL = "https://www.bing.com" +TIMEOUT_LOGIN = 1200 + +def wait_for_login(driver: WebDriver, timeout: int = TIMEOUT_LOGIN) -> None: + """ + Waits for the user to log in within a given timeout period. + + Args: + driver (WebDriver): Webdriver for browser automation. + timeout (int): Maximum waiting time in seconds. + + Raises: + RuntimeError: If the login process exceeds the timeout. + """ + driver.get(f"{BING_URL}/") + start_time = time.time() + while not driver.get_cookie("_U"): + if time.time() - start_time > timeout: + raise RuntimeError("Timeout error") + time.sleep(0.5) + +def get_cookies_from_browser(proxy: str = None) -> dict[str, str]: + """ + Retrieves cookies from the browser using webdriver. + + Args: + proxy (str, optional): Proxy configuration. + + Returns: + dict[str, str]: Retrieved cookies. + """ + with get_browser(proxy=proxy) as driver: + wait_for_login(driver) + time.sleep(1) + return get_driver_cookies(driver) + +class CreateImagesBing: + """A class for creating images using Bing.""" + + def __init__(self, cookies: dict[str, str] = {}, proxy: str = None) -> None: + self.cookies = cookies + self.proxy = proxy + + def create_completion(self, prompt: str) -> Generator[ImageResponse, None, None]: + """ + Generator for creating imagecompletion based on a prompt. + + Args: + prompt (str): Prompt to generate images. + + Yields: + Generator[str, None, None]: The final output as markdown formatted string with images. + """ + cookies = self.cookies or get_cookies(".bing.com", False) + if "_U" not in cookies: + login_url = os.environ.get("G4F_LOGIN_URL") + if login_url: + yield f"Please login: [Bing]({login_url})\n\n" + try: + self.cookies = get_cookies_from_browser(self.proxy) + except MissingRequirementsError as e: + raise MissingAuthError(f'Missing "_U" cookie. {e}') + yield asyncio.run(self.create_async(prompt)) + + async def create_async(self, prompt: str) -> ImageResponse: + """ + Asynchronously creates a markdown formatted string with images based on the prompt. + + Args: + prompt (str): Prompt to generate images. + + Returns: + str: Markdown formatted string with images. + """ + cookies = self.cookies or get_cookies(".bing.com", False) + if "_U" not in cookies: + raise MissingAuthError('Missing "_U" cookie') + proxy = os.environ.get("G4F_PROXY") + async with create_session(cookies, proxy) as session: + images = await create_images(session, prompt, self.proxy) + return ImageResponse(images, prompt, {"preview": "{image}?w=200&h=200"}) \ No newline at end of file diff --git a/g4f/Provider/GptChatly.py b/g4f/Provider/GptChatly.py index d98c2af4..9fb739a8 100644 --- a/g4f/Provider/GptChatly.py +++ b/g4f/Provider/GptChatly.py @@ -10,7 +10,6 @@ class GptChatly(AsyncProvider): working = True supports_message_history = True supports_gpt_35_turbo = True - supports_gpt_4 = True @classmethod async def create_async( diff --git a/g4f/Provider/Hashnode.py b/g4f/Provider/Hashnode.py index a287fa7c..7a0c2903 100644 --- a/g4f/Provider/Hashnode.py +++ b/g4f/Provider/Hashnode.py @@ -13,7 +13,7 @@ class SearchTypes(): class Hashnode(AsyncGeneratorProvider): url = "https://hashnode.com" - working = True + working = False supports_message_history = True supports_gpt_35_turbo = True _sources = [] diff --git a/g4f/Provider/Phind.py b/g4f/Provider/Phind.py index a7fdbeca..746dcbcc 100644 --- a/g4f/Provider/Phind.py +++ b/g4f/Provider/Phind.py @@ -67,7 +67,7 @@ class Phind(AsyncGeneratorProvider): if chunk.startswith(b''): break if chunk.startswith(b''): - raise RuntimeError(f"Response: {chunk}") + raise RuntimeError(f"Response: {chunk.decode()}") if chunk.startswith(b'') or chunk.startswith(b''): pass elif chunk.startswith(b"") or chunk.startswith(b""): diff --git a/g4f/Provider/__init__.py b/g4f/Provider/__init__.py index 1b45b00d..7dbc1504 100644 --- a/g4f/Provider/__init__.py +++ b/g4f/Provider/__init__.py @@ -53,6 +53,8 @@ from .Vercel import Vercel from .Ylokh import Ylokh from .You import You +from .CreateImagesBing import CreateImagesBing + import sys __modules__: list = [ diff --git a/g4f/Provider/bing/create_images.py b/g4f/Provider/bing/create_images.py index b31e9408..de4fd476 100644 --- a/g4f/Provider/bing/create_images.py +++ b/g4f/Provider/bing/create_images.py @@ -7,10 +7,9 @@ from __future__ import annotations import asyncio import time import json -import os from aiohttp import ClientSession, BaseConnector from urllib.parse import quote -from typing import Generator, List, Dict +from typing import List, Dict try: from bs4 import BeautifulSoup @@ -19,14 +18,11 @@ except ImportError: has_requirements = False from ..create_images import CreateImagesProvider -from ..helper import get_cookies, get_connector -from ...webdriver import WebDriver, get_driver_cookies, get_browser +from ..helper import get_connector from ...base_provider import ProviderType -from ...image import ImageResponse -from ...errors import MissingRequirementsError, MissingAuthError +from ...errors import MissingRequirementsError BING_URL = "https://www.bing.com" -TIMEOUT_LOGIN = 1200 TIMEOUT_IMAGE_CREATION = 300 ERRORS = [ "this prompt is being reviewed", @@ -39,24 +35,6 @@ BAD_IMAGES = [ "https://r.bing.com/rp/TX9QuO3WzcCJz1uaaSwQAz39Kb0.jpg", ] -def wait_for_login(driver: WebDriver, timeout: int = TIMEOUT_LOGIN) -> None: - """ - Waits for the user to log in within a given timeout period. - - Args: - driver (WebDriver): Webdriver for browser automation. - timeout (int): Maximum waiting time in seconds. - - Raises: - RuntimeError: If the login process exceeds the timeout. - """ - driver.get(f"{BING_URL}/") - start_time = time.time() - while not driver.get_cookie("_U"): - if time.time() - start_time > timeout: - raise RuntimeError("Timeout error") - time.sleep(0.5) - def create_session(cookies: Dict[str, str], proxy: str = None, connector: BaseConnector = None) -> ClientSession: """ Creates a new client session with specified cookies and headers. @@ -170,67 +148,6 @@ def read_images(html_content: str) -> List[str]: raise RuntimeError("No images found") return images -def get_cookies_from_browser(proxy: str = None) -> dict[str, str]: - """ - Retrieves cookies from the browser using webdriver. - - Args: - proxy (str, optional): Proxy configuration. - - Returns: - dict[str, str]: Retrieved cookies. - """ - with get_browser(proxy=proxy) as driver: - wait_for_login(driver) - time.sleep(1) - return get_driver_cookies(driver) - -class CreateImagesBing: - """A class for creating images using Bing.""" - - def __init__(self, cookies: dict[str, str] = {}, proxy: str = None) -> None: - self.cookies = cookies - self.proxy = proxy - - def create_completion(self, prompt: str) -> Generator[ImageResponse, None, None]: - """ - Generator for creating imagecompletion based on a prompt. - - Args: - prompt (str): Prompt to generate images. - - Yields: - Generator[str, None, None]: The final output as markdown formatted string with images. - """ - cookies = self.cookies or get_cookies(".bing.com", False) - if "_U" not in cookies: - login_url = os.environ.get("G4F_LOGIN_URL") - if login_url: - yield f"Please login: [Bing]({login_url})\n\n" - try: - self.cookies = get_cookies_from_browser(self.proxy) - except MissingRequirementsError as e: - raise MissingAuthError(f'Missing "_U" cookie. {e}') - yield asyncio.run(self.create_async(prompt)) - - async def create_async(self, prompt: str) -> ImageResponse: - """ - Asynchronously creates a markdown formatted string with images based on the prompt. - - Args: - prompt (str): Prompt to generate images. - - Returns: - str: Markdown formatted string with images. - """ - cookies = self.cookies or get_cookies(".bing.com", False) - if "_U" not in cookies: - raise MissingAuthError('Missing "_U" cookie') - proxy = os.environ.get("G4F_PROXY") - async with create_session(cookies, proxy) as session: - images = await create_images(session, prompt, self.proxy) - return ImageResponse(images, prompt, {"preview": "{image}?w=200&h=200"}) - def patch_provider(provider: ProviderType) -> CreateImagesProvider: """ Patches a provider to include image creation capabilities. @@ -241,6 +158,7 @@ def patch_provider(provider: ProviderType) -> CreateImagesProvider: Returns: CreateImagesProvider: The patched provider with image creation capabilities. """ + from ..CreateImagesBing import CreateImagesBing service = CreateImagesBing() return CreateImagesProvider( provider, diff --git a/g4f/Provider/helper.py b/g4f/Provider/helper.py index 2cab5e6f..35480255 100644 --- a/g4f/Provider/helper.py +++ b/g4f/Provider/helper.py @@ -1,104 +1,13 @@ from __future__ import annotations -import os import random import secrets import string from aiohttp import BaseConnector -try: - from platformdirs import user_config_dir - has_platformdirs = True -except ImportError: - has_platformdirs = False -try: - from browser_cookie3 import ( - chrome, chromium, opera, opera_gx, - brave, edge, vivaldi, firefox, - _LinuxPasswordManager, BrowserCookieError - ) - has_browser_cookie3 = True -except ImportError: - has_browser_cookie3 = False - -from ..typing import Dict, Messages, Cookies, Optional -from ..errors import MissingAiohttpSocksError, MissingRequirementsError -from .. import debug - -# Global variable to store cookies -_cookies: Dict[str, Cookies] = {} - -if has_browser_cookie3 and os.environ.get('DBUS_SESSION_BUS_ADDRESS') == "/dev/null": - _LinuxPasswordManager.get_password = lambda a, b: b"secret" - -def get_cookies(domain_name: str = '', raise_requirements_error: bool = True) -> Dict[str, str]: - """ - Load cookies for a given domain from all supported browsers and cache the results. - - Args: - domain_name (str): The domain for which to load cookies. - - Returns: - Dict[str, str]: A dictionary of cookie names and values. - """ - if domain_name in _cookies: - return _cookies[domain_name] - - cookies = load_cookies_from_browsers(domain_name, raise_requirements_error) - _cookies[domain_name] = cookies - return cookies - -def set_cookies(domain_name: str, cookies: Cookies = None) -> None: - if cookies: - _cookies[domain_name] = cookies - elif domain_name in _cookies: - _cookies.pop(domain_name) - -def load_cookies_from_browsers(domain_name: str, raise_requirements_error: bool = True) -> Cookies: - """ - Helper function to load cookies from various browsers. - - Args: - domain_name (str): The domain for which to load cookies. - - Returns: - Dict[str, str]: A dictionary of cookie names and values. - """ - if not has_browser_cookie3: - if raise_requirements_error: - raise MissingRequirementsError('Install "browser_cookie3" package') - return {} - cookies = {} - for cookie_fn in [_g4f, chrome, chromium, opera, opera_gx, brave, edge, vivaldi, firefox]: - try: - cookie_jar = cookie_fn(domain_name=domain_name) - if len(cookie_jar) and debug.logging: - print(f"Read cookies from {cookie_fn.__name__} for {domain_name}") - for cookie in cookie_jar: - if cookie.name not in cookies: - cookies[cookie.name] = cookie.value - except BrowserCookieError: - pass - except Exception as e: - if debug.logging: - print(f"Error reading cookies from {cookie_fn.__name__} for {domain_name}: {e}") - return cookies - -def _g4f(domain_name: str) -> list: - """ - Load cookies from the 'g4f' browser (if exists). - - Args: - domain_name (str): The domain for which to load cookies. - - Returns: - list: List of cookies. - """ - if not has_platformdirs: - return [] - user_data_dir = user_config_dir("g4f") - cookie_file = os.path.join(user_data_dir, "Default", "Cookies") - return [] if not os.path.exists(cookie_file) else chrome(cookie_file, domain_name) +from ..typing import Messages, Optional +from ..errors import MissingRequirementsError +from ..cookies import get_cookies def format_prompt(messages: Messages, add_special_tokens=False) -> str: """ @@ -149,5 +58,5 @@ def get_connector(connector: BaseConnector = None, proxy: str = None) -> Optiona from aiohttp_socks import ProxyConnector connector = ProxyConnector.from_url(proxy) except ImportError: - raise MissingAiohttpSocksError('Install "aiohttp_socks" package for proxy support') + raise MissingRequirementsError('Install "aiohttp_socks" package for proxy support') return connector \ No newline at end of file diff --git a/g4f/Provider/needs_auth/ThebApi.py b/g4f/Provider/needs_auth/ThebApi.py index ea633243..1c7baf8d 100644 --- a/g4f/Provider/needs_auth/ThebApi.py +++ b/g4f/Provider/needs_auth/ThebApi.py @@ -43,7 +43,7 @@ class ThebApi(AbstractProvider, ProviderModelMixin): model: str, messages: Messages, stream: bool, - auth: str, + auth: str = None, proxy: str = None, **kwargs ) -> CreateResult: diff --git a/g4f/__init__.py b/g4f/__init__.py index 34c8aa19..93e4aa86 100644 --- a/g4f/__init__.py +++ b/g4f/__init__.py @@ -6,9 +6,9 @@ from .errors import * from .models import Model, ModelUtils from .Provider import AsyncGeneratorProvider, ProviderUtils from .typing import Messages, CreateResult, AsyncResult, Union +from .cookies import get_cookies, set_cookies from . import debug, version from .base_provider import BaseRetryProvider, ProviderType -from .Provider.helper import get_cookies, set_cookies from .Provider.base_provider import ProviderModelMixin def get_model_and_provider(model : Union[Model, str], diff --git a/g4f/cookies.py b/g4f/cookies.py new file mode 100644 index 00000000..b5c869e7 --- /dev/null +++ b/g4f/cookies.py @@ -0,0 +1,97 @@ +from __future__ import annotations + +import os + +try: + from platformdirs import user_config_dir + has_platformdirs = True +except ImportError: + has_platformdirs = False +try: + from browser_cookie3 import ( + chrome, chromium, opera, opera_gx, + brave, edge, vivaldi, firefox, + _LinuxPasswordManager, BrowserCookieError + ) + has_browser_cookie3 = True +except ImportError: + has_browser_cookie3 = False + +from .typing import Dict, Cookies +from .errors import MissingRequirementsError +from . import debug + +# Global variable to store cookies +_cookies: Dict[str, Cookies] = {} + +if has_browser_cookie3 and os.environ.get('DBUS_SESSION_BUS_ADDRESS') == "/dev/null": + _LinuxPasswordManager.get_password = lambda a, b: b"secret" + +def get_cookies(domain_name: str = '', raise_requirements_error: bool = True) -> Dict[str, str]: + """ + Load cookies for a given domain from all supported browsers and cache the results. + + Args: + domain_name (str): The domain for which to load cookies. + + Returns: + Dict[str, str]: A dictionary of cookie names and values. + """ + if domain_name in _cookies: + return _cookies[domain_name] + + cookies = load_cookies_from_browsers(domain_name, raise_requirements_error) + _cookies[domain_name] = cookies + return cookies + +def set_cookies(domain_name: str, cookies: Cookies = None) -> None: + if cookies: + _cookies[domain_name] = cookies + elif domain_name in _cookies: + _cookies.pop(domain_name) + +def load_cookies_from_browsers(domain_name: str, raise_requirements_error: bool = True) -> Cookies: + """ + Helper function to load cookies from various browsers. + + Args: + domain_name (str): The domain for which to load cookies. + + Returns: + Dict[str, str]: A dictionary of cookie names and values. + """ + if not has_browser_cookie3: + if raise_requirements_error: + raise MissingRequirementsError('Install "browser_cookie3" package') + return {} + cookies = {} + for cookie_fn in [_g4f, chrome, chromium, opera, opera_gx, brave, edge, vivaldi, firefox]: + try: + cookie_jar = cookie_fn(domain_name=domain_name) + if len(cookie_jar) and debug.logging: + print(f"Read cookies from {cookie_fn.__name__} for {domain_name}") + for cookie in cookie_jar: + if cookie.name not in cookies: + cookies[cookie.name] = cookie.value + except BrowserCookieError: + pass + except Exception as e: + if debug.logging: + print(f"Error reading cookies from {cookie_fn.__name__} for {domain_name}: {e}") + return cookies + +def _g4f(domain_name: str) -> list: + """ + Load cookies from the 'g4f' browser (if exists). + + Args: + domain_name (str): The domain for which to load cookies. + + Returns: + list: List of cookies. + """ + if not has_platformdirs: + return [] + user_data_dir = user_config_dir("g4f") + cookie_file = os.path.join(user_data_dir, "Default", "Cookies") + return [] if not os.path.exists(cookie_file) else chrome(cookie_file, domain_name) \ No newline at end of file diff --git a/g4f/errors.py b/g4f/errors.py index ff28de3e..48171a6e 100644 --- a/g4f/errors.py +++ b/g4f/errors.py @@ -31,8 +31,5 @@ class ModelNotSupportedError(Exception): class MissingRequirementsError(Exception): pass -class MissingAiohttpSocksError(MissingRequirementsError): - pass - class MissingAuthError(Exception): pass \ No newline at end of file diff --git a/g4f/models.py b/g4f/models.py index ed86024e..3b4ca468 100644 --- a/g4f/models.py +++ b/g4f/models.py @@ -15,15 +15,12 @@ from .Provider import ( DeepInfra, ChatBase, Liaobots, - GeekGpt, - FakeGpt, FreeGpt, Llama2, - Vercel, - Phind, + Vercel, + Gemini, GptGo, Gpt6, - Bard, Bing, You, Pi, @@ -53,7 +50,7 @@ default = Model( base_provider = "", best_provider = RetryProvider([ Bing, - ChatgptAi, GptGo, GeekGpt, + ChatgptAi, GptGo, You, Chatgpt4Online ]) @@ -65,7 +62,6 @@ gpt_35_long = Model( base_provider = 'openai', best_provider = RetryProvider([ FreeGpt, You, - GeekGpt, FakeGpt, Chatgpt4Online, ChatgptDemoAi, ChatgptNext, @@ -174,10 +170,10 @@ openchat_35 = Model( ) # Bard -bard = palm = Model( - name = 'palm', +gemini = bard = palm = Model( + name = 'gemini', base_provider = 'google', - best_provider = Bard + best_provider = Gemini ) claude_v2 = Model( @@ -271,8 +267,8 @@ class ModelUtils: 'airoboros-70b': airoboros_70b, 'airoboros-l2-70b': airoboros_l2_70b, 'openchat_3.5': openchat_35, + 'gemini': gemini, 'gemini-pro': gemini_pro, - 'bard': bard, 'claude-v2': claude_v2, 'pi': pi } diff --git a/g4f/webdriver.py b/g4f/webdriver.py index d28cd97b..b54fae15 100644 --- a/g4f/webdriver.py +++ b/g4f/webdriver.py @@ -9,6 +9,7 @@ try: from selenium.webdriver.support.ui import WebDriverWait from selenium.webdriver.support import expected_conditions as EC from selenium.webdriver.common.keys import Keys + from selenium.common.exceptions import NoSuchElementException has_requirements = True except ImportError: from typing import Type as WebDriver @@ -120,6 +121,8 @@ def bypass_cloudflare(driver: WebDriver, url: str, timeout: int) -> None: WebDriverWait(driver, 5).until( EC.presence_of_element_located((By.CSS_SELECTOR, "#challenge-stage input")) ).click() + except NoSuchElementException: + ... except Exception as e: if debug.logging: print(f"Error bypassing Cloudflare: {e}") -- cgit v1.2.3