summaryrefslogtreecommitdiffstats
path: root/g4f
diff options
context:
space:
mode:
authorTekky <98614666+xtekky@users.noreply.github.com>2024-11-15 11:58:58 +0100
committerTekky <98614666+xtekky@users.noreply.github.com>2024-11-15 11:58:58 +0100
commit6ac545445a6abd80b5e664165442f3eccef8dd24 (patch)
tree4742a90026cb797a5f862c099523935e3659e83b /g4f
parentAi4Chat not working (diff)
downloadgpt4free-6ac545445a6abd80b5e664165442f3eccef8dd24.tar
gpt4free-6ac545445a6abd80b5e664165442f3eccef8dd24.tar.gz
gpt4free-6ac545445a6abd80b5e664165442f3eccef8dd24.tar.bz2
gpt4free-6ac545445a6abd80b5e664165442f3eccef8dd24.tar.lz
gpt4free-6ac545445a6abd80b5e664165442f3eccef8dd24.tar.xz
gpt4free-6ac545445a6abd80b5e664165442f3eccef8dd24.tar.zst
gpt4free-6ac545445a6abd80b5e664165442f3eccef8dd24.zip
Diffstat (limited to '')
-rw-r--r--g4f/Provider/Pi.py1
-rw-r--r--g4f/Provider/__init__.py1
-rw-r--r--g4f/Provider/needs_auth/Gemini.py1
-rw-r--r--g4f/Provider/needs_auth/Poe.py1
-rw-r--r--g4f/Provider/needs_auth/Raycast.py8
-rw-r--r--g4f/Provider/needs_auth/Theb.py1
-rw-r--r--g4f/Provider/not_working/Allyfy.py (renamed from g4f/Provider/Allyfy.py)8
-rw-r--r--g4f/models.py346
8 files changed, 188 insertions, 179 deletions
diff --git a/g4f/Provider/Pi.py b/g4f/Provider/Pi.py
index 266647ba..68a7357f 100644
--- a/g4f/Provider/Pi.py
+++ b/g4f/Provider/Pi.py
@@ -12,6 +12,7 @@ class Pi(AbstractProvider):
supports_stream = True
_session = None
default_model = "pi"
+ models = [default_model]
@classmethod
def create_completion(
diff --git a/g4f/Provider/__init__.py b/g4f/Provider/__init__.py
index dcf9c352..da0eacfe 100644
--- a/g4f/Provider/__init__.py
+++ b/g4f/Provider/__init__.py
@@ -12,7 +12,6 @@ from .not_working import *
from .local import *
from .AIUncensored import AIUncensored
-from .Allyfy import Allyfy
from .Airforce import Airforce
from .Bing import Bing
from .Blackbox import Blackbox
diff --git a/g4f/Provider/needs_auth/Gemini.py b/g4f/Provider/needs_auth/Gemini.py
index 8d741476..dad54c84 100644
--- a/g4f/Provider/needs_auth/Gemini.py
+++ b/g4f/Provider/needs_auth/Gemini.py
@@ -57,6 +57,7 @@ class Gemini(AsyncGeneratorProvider):
default_model = 'gemini'
image_models = ["gemini"]
default_vision_model = "gemini"
+ models = ["gemini", "gemini-1.5-flash", "gemini-1.5-pro"]
_cookies: Cookies = None
_snlm0e: str = None
_sid: str = None
diff --git a/g4f/Provider/needs_auth/Poe.py b/g4f/Provider/needs_auth/Poe.py
index 0c969d27..65fdbef9 100644
--- a/g4f/Provider/needs_auth/Poe.py
+++ b/g4f/Provider/needs_auth/Poe.py
@@ -26,6 +26,7 @@ class Poe(AbstractProvider):
needs_auth = True
supports_gpt_35_turbo = True
supports_stream = True
+ models = models.keys()
@classmethod
def create_completion(
diff --git a/g4f/Provider/needs_auth/Raycast.py b/g4f/Provider/needs_auth/Raycast.py
index 07abeda3..b8ec5a97 100644
--- a/g4f/Provider/needs_auth/Raycast.py
+++ b/g4f/Provider/needs_auth/Raycast.py
@@ -16,6 +16,11 @@ class Raycast(AbstractProvider):
needs_auth = True
working = True
+ models = [
+ "gpt-3.5-turbo",
+ "gpt-4"
+ ]
+
@staticmethod
def create_completion(
model: str,
@@ -25,6 +30,9 @@ class Raycast(AbstractProvider):
**kwargs,
) -> CreateResult:
auth = kwargs.get('auth')
+ if not auth:
+ raise ValueError("Raycast needs an auth token, pass it with the `auth` parameter")
+
headers = {
'Accept': 'application/json',
'Accept-Language': 'en-US,en;q=0.9',
diff --git a/g4f/Provider/needs_auth/Theb.py b/g4f/Provider/needs_auth/Theb.py
index af690063..c7d7d58e 100644
--- a/g4f/Provider/needs_auth/Theb.py
+++ b/g4f/Provider/needs_auth/Theb.py
@@ -38,6 +38,7 @@ class Theb(AbstractProvider):
supports_gpt_35_turbo = True
supports_gpt_4 = True
supports_stream = True
+ models = models.keys()
@classmethod
def create_completion(
diff --git a/g4f/Provider/Allyfy.py b/g4f/Provider/not_working/Allyfy.py
index 51f44963..a1c73499 100644
--- a/g4f/Provider/Allyfy.py
+++ b/g4f/Provider/not_working/Allyfy.py
@@ -4,15 +4,15 @@ import asyncio
import json
import uuid
from aiohttp import ClientSession
-from ..typing import AsyncResult, Messages
-from .base_provider import AsyncGeneratorProvider, ProviderModelMixin
-from .helper import format_prompt
+from ...typing import AsyncResult, Messages
+from ..base_provider import AsyncGeneratorProvider, ProviderModelMixin
+from ..helper import format_prompt
class Allyfy(AsyncGeneratorProvider, ProviderModelMixin):
url = "https://allyfy.chat"
api_endpoint = "https://chatbot.allyfy.chat/api/v1/message/stream/super/chat"
- working = True
+ working = False
supports_stream = True
supports_system_message = True
supports_message_history = True
diff --git a/g4f/models.py b/g4f/models.py
index 9c6178be..0a7eed35 100644
--- a/g4f/models.py
+++ b/g4f/models.py
@@ -9,7 +9,6 @@ from .Provider import (
AIChatFree,
Airforce,
AIUncensored,
- Allyfy,
Bing,
Blackbox,
ChatGpt,
@@ -599,203 +598,202 @@ class ModelUtils:
"""
convert: dict[str, Model] = {
-############
-### Text ###
-############
-
-### OpenAI ###
-# gpt-3.5
-'gpt-3.5-turbo': gpt_35_turbo,
+ ############
+ ### Text ###
+ ############
+
+ ### OpenAI ###
+ # gpt-3.5
+ 'gpt-3.5-turbo': gpt_35_turbo,
-# gpt-4
-'gpt-4o': gpt_4o,
-'gpt-4o-mini': gpt_4o_mini,
-'gpt-4': gpt_4,
-'gpt-4-turbo': gpt_4_turbo,
+ # gpt-4
+ 'gpt-4o': gpt_4o,
+ 'gpt-4o-mini': gpt_4o_mini,
+ 'gpt-4': gpt_4,
+ 'gpt-4-turbo': gpt_4_turbo,
-# o1
-'o1': o1,
-'o1-mini': o1_mini,
-
-
-### Meta ###
-"meta-ai": meta,
-
-# llama-2
-'llama-2-7b': llama_2_7b,
-
-# llama-3
-'llama-3-8b': llama_3_8b,
-
-# llama-3.1
-'llama-3.1-8b': llama_3_1_8b,
-'llama-3.1-70b': llama_3_1_70b,
-'llama-3.1-405b': llama_3_1_405b,
-
-# llama-3.2
-'llama-3.2-1b': llama_3_2_1b,
-'llama-3.2-11b': llama_3_2_11b,
-
-
-### Mistral ###
-'mistral-7b': mistral_7b,
-'mixtral-8x7b': mixtral_8x7b,
-'mistral-nemo': mistral_nemo,
-
-
-### NousResearch ###
-'hermes-2-pro': hermes_2_pro,
-'hermes-2-dpo': hermes_2_dpo,
-'hermes-3': hermes_3,
+ # o1
+ 'o1': o1,
+ 'o1-mini': o1_mini,
+
+
+ ### Meta ###
+ "meta-ai": meta,
+ # llama-2
+ 'llama-2-7b': llama_2_7b,
+
+ # llama-3
+ 'llama-3-8b': llama_3_8b,
-### Microsoft ###
-'phi-2': phi_2,
-'phi-3.5-mini': phi_3_5_mini,
+ # llama-3.1
+ 'llama-3.1-8b': llama_3_1_8b,
+ 'llama-3.1-70b': llama_3_1_70b,
+ 'llama-3.1-405b': llama_3_1_405b,
+
+ # llama-3.2
+ 'llama-3.2-1b': llama_3_2_1b,
+ 'llama-3.2-11b': llama_3_2_11b,
+
+ ### Mistral ###
+ 'mistral-7b': mistral_7b,
+ 'mixtral-8x7b': mixtral_8x7b,
+ 'mistral-nemo': mistral_nemo,
+
+
+ ### NousResearch ###
+ 'hermes-2-pro': hermes_2_pro,
+ 'hermes-2-dpo': hermes_2_dpo,
+ 'hermes-3': hermes_3,
+
+
+ ### Microsoft ###
+ 'phi-2': phi_2,
+ 'phi-3.5-mini': phi_3_5_mini,
+
+
+ ### Google ###
+ # gemini
+ 'gemini': gemini,
+ 'gemini-pro': gemini_pro,
+ 'gemini-flash': gemini_flash,
+
+ # gemma
+ 'gemma-2b': gemma_2b,
-### Google ###
-# gemini
-'gemini': gemini,
-'gemini-pro': gemini_pro,
-'gemini-flash': gemini_flash,
-
-# gemma
-'gemma-2b': gemma_2b,
+ ### Anthropic ###
+ 'claude-2.1': claude_2_1,
+ # claude 3
+ 'claude-3-opus': claude_3_opus,
+ 'claude-3-sonnet': claude_3_sonnet,
+ 'claude-3-haiku': claude_3_haiku,
-### Anthropic ###
-'claude-2.1': claude_2_1,
+ # claude 3.5
+ 'claude-3.5-sonnet': claude_3_5_sonnet,
+
+
+ ### Reka AI ###
+ 'reka-core': reka_core,
+
+
+ ### Blackbox AI ###
+ 'blackboxai': blackboxai,
+ 'blackboxai-pro': blackboxai_pro,
+
+
+ ### CohereForAI ###
+ 'command-r+': command_r_plus,
+
-# claude 3
-'claude-3-opus': claude_3_opus,
-'claude-3-sonnet': claude_3_sonnet,
-'claude-3-haiku': claude_3_haiku,
+ ### GigaChat ###
+ 'gigachat': gigachat,
+
+
+
+ ### Qwen ###
+ # qwen 1.5
+ 'qwen-1.5-7b': qwen_1_5_7b,
-# claude 3.5
-'claude-3.5-sonnet': claude_3_5_sonnet,
-
-
-### Reka AI ###
-'reka-core': reka_core,
-
-
-### Blackbox AI ###
-'blackboxai': blackboxai,
-'blackboxai-pro': blackboxai_pro,
-
-
-### CohereForAI ###
-'command-r+': command_r_plus,
-
+ # qwen 2
+ 'qwen-2-72b': qwen_2_72b,
-### GigaChat ###
-'gigachat': gigachat,
-
-
-
-### Qwen ###
-# qwen 1.5
-'qwen-1.5-7b': qwen_1_5_7b,
+ # qwen 2.5
+ 'qwen-2.5-coder-32b': qwen_2_5_coder_32b,
+
+
+ ### Upstage ###
+ 'solar-mini': solar_mini,
+ 'solar-pro': solar_pro,
-# qwen 2
-'qwen-2-72b': qwen_2_72b,
-# qwen 2.5
-'qwen-2.5-coder-32b': qwen_2_5_coder_32b,
-
-
-### Upstage ###
-'solar-mini': solar_mini,
-'solar-pro': solar_pro,
+ ### Inflection ###
+ 'pi': pi,
-### Inflection ###
-'pi': pi,
+ ### DeepSeek ###
+ 'deepseek-coder': deepseek_coder,
+
+
+ ### Yorickvp ###
+ 'llava-13b': llava_13b,
-### DeepSeek ###
-'deepseek-coder': deepseek_coder,
-
-
-### Yorickvp ###
-'llava-13b': llava_13b,
+ ### WizardLM ###
+ 'wizardlm-2-8x22b': wizardlm_2_8x22b,
+
+
+ ### OpenChat ###
+ 'openchat-3.5': openchat_3_5,
+
+
+ ### x.ai ###
+ 'grok-2': grok_2,
+ 'grok-2-mini': grok_2_mini,
+ 'grok-beta': grok_beta,
+
+
+ ### Perplexity AI ###
+ 'sonar-online': sonar_online,
+ 'sonar-chat': sonar_chat,
+
+
+ ### TheBloke ###
+ 'german-7b': german_7b,
+
+
+ ### Nvidia ###
+ 'nemotron-70b': nemotron_70b,
-### WizardLM ###
-'wizardlm-2-8x22b': wizardlm_2_8x22b,
+ ### Teknium ###
+ 'openhermes-2.5': openhermes_2_5,
-
-### OpenChat ###
-'openchat-3.5': openchat_3_5,
-
-
-### x.ai ###
-'grok-2': grok_2,
-'grok-2-mini': grok_2_mini,
-'grok-beta': grok_beta,
-
-
-### Perplexity AI ###
-'sonar-online': sonar_online,
-'sonar-chat': sonar_chat,
-
-
-### TheBloke ###
-'german-7b': german_7b,
-
-
-### Nvidia ###
-'nemotron-70b': nemotron_70b,
-
-
-### Teknium ###
-'openhermes-2.5': openhermes_2_5,
-
-
-### Liquid ###
-'lfm-40b': lfm_40b,
-
-
-### DiscoResearch ###
-'german-7b': german_7b,
-
-
-### HuggingFaceH4 ###
-'zephyr-7b': zephyr_7b,
-
-
-### Inferless ###
-'neural-7b': neural_7b,
-
-
-
-#############
-### Image ###
-#############
-
-### Stability AI ###
-'sdxl': sdxl,
-'sd-3': sd_3,
-
-
-### Playground ###
-'playground-v2.5': playground_v2_5,
+ ### Liquid ###
+ 'lfm-40b': lfm_40b,
+
+
+ ### DiscoResearch ###
+ 'german-7b': german_7b,
-### Flux AI ###
-'flux': flux,
-'flux-pro': flux_pro,
-'flux-realism': flux_realism,
-'flux-anime': flux_anime,
-'flux-3d': flux_3d,
-'flux-disney': flux_disney,
-'flux-pixel': flux_pixel,
-'flux-4o': flux_4o,
+ ### HuggingFaceH4 ###
+ 'zephyr-7b': zephyr_7b,
-### Other ###
-'any-dark': any_dark,
+
+ ### Inferless ###
+ 'neural-7b': neural_7b,
+
+
+
+ #############
+ ### Image ###
+ #############
+
+ ### Stability AI ###
+ 'sdxl': sdxl,
+ 'sd-3': sd_3,
+
+
+ ### Playground ###
+ 'playground-v2.5': playground_v2_5,
+
+
+ ### Flux AI ###
+ 'flux': flux,
+ 'flux-pro': flux_pro,
+ 'flux-realism': flux_realism,
+ 'flux-anime': flux_anime,
+ 'flux-3d': flux_3d,
+ 'flux-disney': flux_disney,
+ 'flux-pixel': flux_pixel,
+ 'flux-4o': flux_4o,
+
+
+ ### Other ###
+ 'any-dark': any_dark,
}
_all_models = list(ModelUtils.convert.keys())