From 2fad27b2c5ce6b05591d2921c7bafefa2de7c9b5 Mon Sep 17 00:00:00 2001
From: abc <98614666+xtekky@users.noreply.github.com>
Date: Sat, 13 Apr 2024 03:09:11 +0100
Subject: new gpt-4 beating opensource models

---
 g4f/Provider/HuggingChat.py    | 14 +++++++-------
 g4f/Provider/PerplexityLabs.py |  5 +++--
 2 files changed, 10 insertions(+), 9 deletions(-)

(limited to 'g4f/Provider')

diff --git a/g4f/Provider/HuggingChat.py b/g4f/Provider/HuggingChat.py
index b80795fe..882edb78 100644
--- a/g4f/Provider/HuggingChat.py
+++ b/g4f/Provider/HuggingChat.py
@@ -14,13 +14,12 @@ class HuggingChat(AsyncGeneratorProvider, ProviderModelMixin):
     working = True
     default_model = "mistralai/Mixtral-8x7B-Instruct-v0.1"
     models = [
-        "mistralai/Mixtral-8x7B-Instruct-v0.1",
-        "google/gemma-7b-it",
-        "meta-llama/Llama-2-70b-chat-hf",
-        "NousResearch/Nous-Hermes-2-Mixtral-8x7B-DPO",
-        "codellama/CodeLlama-34b-Instruct-hf",
-        "mistralai/Mistral-7B-Instruct-v0.2",
-        "openchat/openchat-3.5-0106",
+        "HuggingFaceH4/zephyr-orpo-141b-A35b-v0.1",
+        'CohereForAI/c4ai-command-r-plus',
+        'mistralai/Mixtral-8x7B-Instruct-v0.1',
+        'google/gemma-1.1-7b-it',
+        'NousResearch/Nous-Hermes-2-Mixtral-8x7B-DPO',
+        'mistralai/Mistral-7B-Instruct-v0.2'
     ]
     model_aliases = {
         "openchat/openchat_3.5": "openchat/openchat-3.5-0106",
@@ -48,6 +47,7 @@ class HuggingChat(AsyncGeneratorProvider, ProviderModelMixin):
         **kwargs
     ) -> AsyncResult:
         options = {"model": cls.get_model(model)}
+
         system_prompt = "\n".join([message["content"] for message in messages if message["role"] == "system"])
         if system_prompt:
             options["preprompt"] = system_prompt
diff --git a/g4f/Provider/PerplexityLabs.py b/g4f/Provider/PerplexityLabs.py
index 6c80efee..ba956100 100644
--- a/g4f/Provider/PerplexityLabs.py
+++ b/g4f/Provider/PerplexityLabs.py
@@ -19,13 +19,14 @@ class PerplexityLabs(AsyncGeneratorProvider, ProviderModelMixin):
         "sonar-small-online", "sonar-medium-online", "sonar-small-chat", "sonar-medium-chat", "mistral-7b-instruct", 
         "codellama-70b-instruct", "llava-v1.5-7b-wrapper", "llava-v1.6-34b", "mixtral-8x7b-instruct",
         "gemma-2b-it", "gemma-7b-it"
-        "mistral-medium", "related"
+        "mistral-medium", "related", "dbrx-instruct"
     ]
     model_aliases = {
         "mistralai/Mistral-7B-Instruct-v0.1": "mistral-7b-instruct", 
         "mistralai/Mixtral-8x7B-Instruct-v0.1": "mixtral-8x7b-instruct",
         "codellama/CodeLlama-70b-Instruct-hf": "codellama-70b-instruct",
-        "llava-v1.5-7b": "llava-v1.5-7b-wrapper"
+        "llava-v1.5-7b": "llava-v1.5-7b-wrapper",
+        'databricks/dbrx-instruct': "dbrx-instruct"
     }
 
     @classmethod
-- 
cgit v1.2.3