From abefd7f36fb5b03e612e5b3e96ba1079c4d2f303 Mon Sep 17 00:00:00 2001 From: kqlio67 Date: Thu, 12 Sep 2024 17:55:39 +0300 Subject: refactor(g4f/Provider/HuggingFace.py, g4f/Provider/HuggingChat.py): update provider models and aliases --- g4f/Provider/HuggingFace.py | 6 ++---- 1 file changed, 2 insertions(+), 4 deletions(-) (limited to 'g4f/Provider/HuggingFace.py') diff --git a/g4f/Provider/HuggingFace.py b/g4f/Provider/HuggingFace.py index 74957862..46cfcca3 100644 --- a/g4f/Provider/HuggingFace.py +++ b/g4f/Provider/HuggingFace.py @@ -17,8 +17,7 @@ class HuggingFace(AsyncGeneratorProvider, ProviderModelMixin): default_model = "meta-llama/Meta-Llama-3.1-70B-Instruct" models = [ 'meta-llama/Meta-Llama-3.1-70B-Instruct', - 'meta-llama/Meta-Llama-3.1-405B-Instruct-FP8', - 'CohereForAI/c4ai-command-r-plus', + 'CohereForAI/c4ai-command-r-plus-08-2024', 'mistralai/Mixtral-8x7B-Instruct-v0.1', 'NousResearch/Nous-Hermes-2-Mixtral-8x7B-DPO', '01-ai/Yi-1.5-34B-Chat', @@ -28,8 +27,7 @@ class HuggingFace(AsyncGeneratorProvider, ProviderModelMixin): model_aliases = { "llama-3.1-70b": "meta-llama/Meta-Llama-3.1-70B-Instruct", - "llama-3.1-405b": "meta-llama/Meta-Llama-3.1-405B-Instruct-FP8", - "command-r-plus": "CohereForAI/c4ai-command-r-plus", + "command-r-plus": "CohereForAI/c4ai-command-r-plus-08-2024", "mixtral-8x7b": "mistralai/Mixtral-8x7B-Instruct-v0.1", "mixtral-8x7b": "NousResearch/Nous-Hermes-2-Mixtral-8x7B-DPO", "yi-1.5-34b": "01-ai/Yi-1.5-34B-Chat", -- cgit v1.2.3 From 11f702e0acc506c13569e4f38ad1390fa87b53f0 Mon Sep 17 00:00:00 2001 From: kqlio67 Date: Thu, 12 Sep 2024 18:06:52 +0300 Subject: refactor(models): update HuggingChat, HuggingFace provider models and aliases --- g4f/Provider/HuggingFace.py | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) (limited to 'g4f/Provider/HuggingFace.py') diff --git a/g4f/Provider/HuggingFace.py b/g4f/Provider/HuggingFace.py index 46cfcca3..4fe02739 100644 --- a/g4f/Provider/HuggingFace.py +++ b/g4f/Provider/HuggingFace.py @@ -16,11 +16,10 @@ class HuggingFace(AsyncGeneratorProvider, ProviderModelMixin): supports_message_history = True default_model = "meta-llama/Meta-Llama-3.1-70B-Instruct" models = [ - 'meta-llama/Meta-Llama-3.1-70B-Instruct', + default_model, 'CohereForAI/c4ai-command-r-plus-08-2024', 'mistralai/Mixtral-8x7B-Instruct-v0.1', 'NousResearch/Nous-Hermes-2-Mixtral-8x7B-DPO', - '01-ai/Yi-1.5-34B-Chat', 'mistralai/Mistral-7B-Instruct-v0.3', 'microsoft/Phi-3-mini-4k-instruct', ] @@ -30,7 +29,6 @@ class HuggingFace(AsyncGeneratorProvider, ProviderModelMixin): "command-r-plus": "CohereForAI/c4ai-command-r-plus-08-2024", "mixtral-8x7b": "mistralai/Mixtral-8x7B-Instruct-v0.1", "mixtral-8x7b": "NousResearch/Nous-Hermes-2-Mixtral-8x7B-DPO", - "yi-1.5-34b": "01-ai/Yi-1.5-34B-Chat", "mistral-7b": "mistralai/Mistral-7B-Instruct-v0.3", "phi-3-mini-4k": "microsoft/Phi-3-mini-4k-instruct", } -- cgit v1.2.3 From abea4ddbcca40c1c1da51507363867fb3664228f Mon Sep 17 00:00:00 2001 From: kqlio67 Date: Thu, 12 Sep 2024 21:35:31 +0300 Subject: Bug fixes and improvements to HuggingChat and HuggingFace providers --- g4f/Provider/HuggingFace.py | 23 +++++------------------ 1 file changed, 5 insertions(+), 18 deletions(-) (limited to 'g4f/Provider/HuggingFace.py') diff --git a/g4f/Provider/HuggingFace.py b/g4f/Provider/HuggingFace.py index 4fe02739..586e5f5f 100644 --- a/g4f/Provider/HuggingFace.py +++ b/g4f/Provider/HuggingFace.py @@ -9,29 +9,16 @@ from .helper import get_connector from ..errors import RateLimitError, ModelNotFoundError from ..requests.raise_for_status import raise_for_status +from .HuggingChat import HuggingChat + class HuggingFace(AsyncGeneratorProvider, ProviderModelMixin): url = "https://huggingface.co/chat" working = True needs_auth = True supports_message_history = True - default_model = "meta-llama/Meta-Llama-3.1-70B-Instruct" - models = [ - default_model, - 'CohereForAI/c4ai-command-r-plus-08-2024', - 'mistralai/Mixtral-8x7B-Instruct-v0.1', - 'NousResearch/Nous-Hermes-2-Mixtral-8x7B-DPO', - 'mistralai/Mistral-7B-Instruct-v0.3', - 'microsoft/Phi-3-mini-4k-instruct', - ] - - model_aliases = { - "llama-3.1-70b": "meta-llama/Meta-Llama-3.1-70B-Instruct", - "command-r-plus": "CohereForAI/c4ai-command-r-plus-08-2024", - "mixtral-8x7b": "mistralai/Mixtral-8x7B-Instruct-v0.1", - "mixtral-8x7b": "NousResearch/Nous-Hermes-2-Mixtral-8x7B-DPO", - "mistral-7b": "mistralai/Mistral-7B-Instruct-v0.3", - "phi-3-mini-4k": "microsoft/Phi-3-mini-4k-instruct", - } + default_model = HuggingChat.default_model + models = HuggingChat.models + model_aliases = HuggingChat.model_aliases @classmethod def get_model(cls, model: str) -> str: -- cgit v1.2.3