summaryrefslogtreecommitdiffstats
path: root/g4f/Provider/HuggingFace.py
diff options
context:
space:
mode:
Diffstat (limited to '')
-rw-r--r--g4f/Provider/HuggingFace.py27
1 files changed, 5 insertions, 22 deletions
diff --git a/g4f/Provider/HuggingFace.py b/g4f/Provider/HuggingFace.py
index 74957862..586e5f5f 100644
--- a/g4f/Provider/HuggingFace.py
+++ b/g4f/Provider/HuggingFace.py
@@ -9,33 +9,16 @@ from .helper import get_connector
from ..errors import RateLimitError, ModelNotFoundError
from ..requests.raise_for_status import raise_for_status
+from .HuggingChat import HuggingChat
+
class HuggingFace(AsyncGeneratorProvider, ProviderModelMixin):
url = "https://huggingface.co/chat"
working = True
needs_auth = True
supports_message_history = True
- default_model = "meta-llama/Meta-Llama-3.1-70B-Instruct"
- models = [
- 'meta-llama/Meta-Llama-3.1-70B-Instruct',
- 'meta-llama/Meta-Llama-3.1-405B-Instruct-FP8',
- 'CohereForAI/c4ai-command-r-plus',
- 'mistralai/Mixtral-8x7B-Instruct-v0.1',
- 'NousResearch/Nous-Hermes-2-Mixtral-8x7B-DPO',
- '01-ai/Yi-1.5-34B-Chat',
- 'mistralai/Mistral-7B-Instruct-v0.3',
- 'microsoft/Phi-3-mini-4k-instruct',
- ]
-
- model_aliases = {
- "llama-3.1-70b": "meta-llama/Meta-Llama-3.1-70B-Instruct",
- "llama-3.1-405b": "meta-llama/Meta-Llama-3.1-405B-Instruct-FP8",
- "command-r-plus": "CohereForAI/c4ai-command-r-plus",
- "mixtral-8x7b": "mistralai/Mixtral-8x7B-Instruct-v0.1",
- "mixtral-8x7b": "NousResearch/Nous-Hermes-2-Mixtral-8x7B-DPO",
- "yi-1.5-34b": "01-ai/Yi-1.5-34B-Chat",
- "mistral-7b": "mistralai/Mistral-7B-Instruct-v0.3",
- "phi-3-mini-4k": "microsoft/Phi-3-mini-4k-instruct",
- }
+ default_model = HuggingChat.default_model
+ models = HuggingChat.models
+ model_aliases = HuggingChat.model_aliases
@classmethod
def get_model(cls, model: str) -> str: