diff options
author | Heiner Lohaus <hlohaus@users.noreply.github.com> | 2024-03-11 02:41:59 +0100 |
---|---|---|
committer | Heiner Lohaus <hlohaus@users.noreply.github.com> | 2024-03-11 02:41:59 +0100 |
commit | ec51e9c76433b9e9498ade1dbe5de2268ad84218 (patch) | |
tree | 2876e373144f025949477601e89b8d478a49fafc /g4f/models.py | |
parent | Add word count from iG8R (diff) | |
download | gpt4free-ec51e9c76433b9e9498ade1dbe5de2268ad84218.tar gpt4free-ec51e9c76433b9e9498ade1dbe5de2268ad84218.tar.gz gpt4free-ec51e9c76433b9e9498ade1dbe5de2268ad84218.tar.bz2 gpt4free-ec51e9c76433b9e9498ade1dbe5de2268ad84218.tar.lz gpt4free-ec51e9c76433b9e9498ade1dbe5de2268ad84218.tar.xz gpt4free-ec51e9c76433b9e9498ade1dbe5de2268ad84218.tar.zst gpt4free-ec51e9c76433b9e9498ade1dbe5de2268ad84218.zip |
Diffstat (limited to 'g4f/models.py')
-rw-r--r-- | g4f/models.py | 27 |
1 files changed, 21 insertions, 6 deletions
diff --git a/g4f/models.py b/g4f/models.py index bf58ff25..b3e0d0ee 100644 --- a/g4f/models.py +++ b/g4f/models.py @@ -10,6 +10,7 @@ from .Provider import ( GeminiProChat, ChatgptNext, HuggingChat, + HuggingFace, ChatgptDemo, FreeChatgpt, GptForLove, @@ -112,32 +113,32 @@ llama2_13b = Model( llama2_70b = Model( name = "meta-llama/Llama-2-70b-chat-hf", base_provider = "meta", - best_provider = RetryProvider([Llama2, DeepInfra, HuggingChat, PerplexityLabs]) + best_provider = RetryProvider([Llama2, DeepInfra, HuggingChat]) ) codellama_34b_instruct = Model( name = "codellama/CodeLlama-34b-Instruct-hf", base_provider = "meta", - best_provider = RetryProvider([HuggingChat, PerplexityLabs, DeepInfra]) + best_provider = RetryProvider([HuggingChat, DeepInfra]) ) codellama_70b_instruct = Model( name = "codellama/CodeLlama-70b-Instruct-hf", base_provider = "meta", - best_provider = DeepInfra + best_provider = RetryProvider([DeepInfra, PerplexityLabs]) ) # Mistral mixtral_8x7b = Model( name = "mistralai/Mixtral-8x7B-Instruct-v0.1", base_provider = "huggingface", - best_provider = RetryProvider([DeepInfra, HuggingChat, PerplexityLabs]) + best_provider = RetryProvider([DeepInfra, HuggingChat, HuggingFace, PerplexityLabs]) ) mistral_7b = Model( name = "mistralai/Mistral-7B-Instruct-v0.1", base_provider = "huggingface", - best_provider = RetryProvider([DeepInfra, HuggingChat, PerplexityLabs]) + best_provider = RetryProvider([DeepInfra, HuggingChat, HuggingFace, PerplexityLabs]) ) # Misc models @@ -184,6 +185,18 @@ claude_v2 = Model( best_provider = RetryProvider([FreeChatgpt, Vercel]) ) +claude_3_opus = Model( + name = 'claude-3-opus', + base_provider = 'anthropic', + best_provider = You +) + +claude_3_sonnet = Model( + name = 'claude-3-sonnet', + base_provider = 'anthropic', + best_provider = You +) + gpt_35_turbo_16k = Model( name = 'gpt-3.5-turbo-16k', base_provider = 'openai', @@ -223,7 +236,7 @@ gpt_4_32k_0613 = Model( gemini_pro = Model( name = 'gemini-pro', base_provider = 'google', - best_provider = RetryProvider([FreeChatgpt, GeminiProChat]) + best_provider = RetryProvider([FreeChatgpt, GeminiProChat, You]) ) pi = Model( @@ -272,6 +285,8 @@ class ModelUtils: 'gemini': gemini, 'gemini-pro': gemini_pro, 'claude-v2': claude_v2, + 'claude-3-opus': claude_3_opus, + 'claude-3-sonnet': claude_3_sonnet, 'pi': pi } |