diff options
author | kqlio67 <kqlio67@users.noreply.github.com> | 2024-09-12 17:06:52 +0200 |
---|---|---|
committer | kqlio67 <kqlio67@users.noreply.github.com> | 2024-09-12 17:06:52 +0200 |
commit | 11f702e0acc506c13569e4f38ad1390fa87b53f0 (patch) | |
tree | da1aee4abea0a224552aa42d6d5489863f3d4007 /g4f | |
parent | refactor(g4f/Provider/HuggingFace.py, g4f/Provider/HuggingChat.py): update provider models and aliases (diff) | |
download | gpt4free-11f702e0acc506c13569e4f38ad1390fa87b53f0.tar gpt4free-11f702e0acc506c13569e4f38ad1390fa87b53f0.tar.gz gpt4free-11f702e0acc506c13569e4f38ad1390fa87b53f0.tar.bz2 gpt4free-11f702e0acc506c13569e4f38ad1390fa87b53f0.tar.lz gpt4free-11f702e0acc506c13569e4f38ad1390fa87b53f0.tar.xz gpt4free-11f702e0acc506c13569e4f38ad1390fa87b53f0.tar.zst gpt4free-11f702e0acc506c13569e4f38ad1390fa87b53f0.zip |
Diffstat (limited to 'g4f')
-rw-r--r-- | g4f/Provider/HuggingChat.py | 4 | ||||
-rw-r--r-- | g4f/Provider/HuggingFace.py | 4 | ||||
-rw-r--r-- | g4f/models.py | 11 |
3 files changed, 2 insertions, 17 deletions
diff --git a/g4f/Provider/HuggingChat.py b/g4f/Provider/HuggingChat.py index 28e58768..fad44957 100644 --- a/g4f/Provider/HuggingChat.py +++ b/g4f/Provider/HuggingChat.py @@ -13,11 +13,10 @@ class HuggingChat(AbstractProvider, ProviderModelMixin): supports_stream = True default_model = "meta-llama/Meta-Llama-3.1-70B-Instruct" models = [ - 'meta-llama/Meta-Llama-3.1-70B-Instruct', + default_model, 'CohereForAI/c4ai-command-r-plus-08-2024', 'mistralai/Mixtral-8x7B-Instruct-v0.1', 'NousResearch/Nous-Hermes-2-Mixtral-8x7B-DPO', - '01-ai/Yi-1.5-34B-Chat', 'mistralai/Mistral-7B-Instruct-v0.3', 'microsoft/Phi-3-mini-4k-instruct', ] @@ -27,7 +26,6 @@ class HuggingChat(AbstractProvider, ProviderModelMixin): "command-r-plus": "CohereForAI/c4ai-command-r-plus-08-2024", "mixtral-8x7b": "mistralai/Mixtral-8x7B-Instruct-v0.1", "mixtral-8x7b": "NousResearch/Nous-Hermes-2-Mixtral-8x7B-DPO", - "yi-1.5-34b": "01-ai/Yi-1.5-34B-Chat", "mistral-7b": "mistralai/Mistral-7B-Instruct-v0.3", "phi-3-mini-4k": "microsoft/Phi-3-mini-4k-instruct", } diff --git a/g4f/Provider/HuggingFace.py b/g4f/Provider/HuggingFace.py index 46cfcca3..4fe02739 100644 --- a/g4f/Provider/HuggingFace.py +++ b/g4f/Provider/HuggingFace.py @@ -16,11 +16,10 @@ class HuggingFace(AsyncGeneratorProvider, ProviderModelMixin): supports_message_history = True default_model = "meta-llama/Meta-Llama-3.1-70B-Instruct" models = [ - 'meta-llama/Meta-Llama-3.1-70B-Instruct', + default_model, 'CohereForAI/c4ai-command-r-plus-08-2024', 'mistralai/Mixtral-8x7B-Instruct-v0.1', 'NousResearch/Nous-Hermes-2-Mixtral-8x7B-DPO', - '01-ai/Yi-1.5-34B-Chat', 'mistralai/Mistral-7B-Instruct-v0.3', 'microsoft/Phi-3-mini-4k-instruct', ] @@ -30,7 +29,6 @@ class HuggingFace(AsyncGeneratorProvider, ProviderModelMixin): "command-r-plus": "CohereForAI/c4ai-command-r-plus-08-2024", "mixtral-8x7b": "mistralai/Mixtral-8x7B-Instruct-v0.1", "mixtral-8x7b": "NousResearch/Nous-Hermes-2-Mixtral-8x7B-DPO", - "yi-1.5-34b": "01-ai/Yi-1.5-34B-Chat", "mistral-7b": "mistralai/Mistral-7B-Instruct-v0.3", "phi-3-mini-4k": "microsoft/Phi-3-mini-4k-instruct", } diff --git a/g4f/models.py b/g4f/models.py index 2414719b..44142094 100644 --- a/g4f/models.py +++ b/g4f/models.py @@ -202,13 +202,6 @@ mistral_7b = Model( best_provider = IterListProvider([HuggingChat, HuggingFace, DeepInfra]) ) -### 01-ai ### -yi_1_5_34b = Model( - name = "yi-1.5-34b", - base_provider = "01-ai", - best_provider = IterListProvider([HuggingChat, HuggingFace]) -) - ### Microsoft ### phi_3_mini_4k = Model( @@ -548,10 +541,6 @@ class ModelUtils: ### Mistral ### 'mixtral-8x7b': mixtral_8x7b, 'mistral-7b': mistral_7b, - - -### 01-ai ### -'yi-1.5-34b': yi_1_5_34b, ### Microsoft ### |