diff options
author | kqlio67 <kqlio67@users.noreply.github.com> | 2024-10-11 08:30:06 +0200 |
---|---|---|
committer | kqlio67 <kqlio67@users.noreply.github.com> | 2024-10-11 08:30:06 +0200 |
commit | bc2be5a5b48768a6e66da89e5945d62bccca64c9 (patch) | |
tree | 593acdc6b7169a13f60678cb5fd0fca0e4e73ddb /g4f/models.py | |
parent | chore(docs/requirements.md): remove curl_cffi installation instruction (diff) | |
download | gpt4free-bc2be5a5b48768a6e66da89e5945d62bccca64c9.tar gpt4free-bc2be5a5b48768a6e66da89e5945d62bccca64c9.tar.gz gpt4free-bc2be5a5b48768a6e66da89e5945d62bccca64c9.tar.bz2 gpt4free-bc2be5a5b48768a6e66da89e5945d62bccca64c9.tar.lz gpt4free-bc2be5a5b48768a6e66da89e5945d62bccca64c9.tar.xz gpt4free-bc2be5a5b48768a6e66da89e5945d62bccca64c9.tar.zst gpt4free-bc2be5a5b48768a6e66da89e5945d62bccca64c9.zip |
Diffstat (limited to 'g4f/models.py')
-rw-r--r-- | g4f/models.py | 316 |
1 files changed, 262 insertions, 54 deletions
diff --git a/g4f/models.py b/g4f/models.py index f1d50922..7a87f225 100644 --- a/g4f/models.py +++ b/g4f/models.py @@ -7,6 +7,7 @@ from .Provider import ( AIChatFree, Airforce, Allyfy, + AmigoChat, Bing, Binjie, Blackbox, @@ -15,6 +16,9 @@ from .Provider import ( ChatGptEs, ChatgptFree, ChatHub, + ChatifyAI, + Cloudflare, + DarkAI, DDG, DeepInfra, DeepInfraChat, @@ -34,7 +38,27 @@ from .Provider import ( LiteIcoding, MagickPen, MetaAI, - Nexra, + NexraAnimagineXL, + NexraBing, + NexraBlackbox, + NexraChatGPT, + NexraChatGPT4o, + NexraChatGptV2, + NexraChatGptWeb, + NexraDallE, + NexraDallE2, + NexraDalleMini, + NexraEmi, + NexraFluxPro, + NexraGeminiPro, + NexraLLaMA31, + NexraMidjourney, + NexraProdiaAI, + NexraQwen, + NexraSD15, + NexraSD21, + NexraSDLora, + NexraSDTurbo, OpenaiChat, PerplexityLabs, Pi, @@ -85,9 +109,9 @@ default = Model( LiteIcoding, Airforce, ChatHub, - Nexra, ChatGptEs, ChatHub, + AmigoChat, ]) ) @@ -100,7 +124,7 @@ default = Model( gpt_3 = Model( name = 'gpt-3', base_provider = 'OpenAI', - best_provider = Nexra + best_provider = NexraChatGPT ) # gpt-3.5 @@ -108,7 +132,7 @@ gpt_35_turbo = Model( name = 'gpt-3.5-turbo', base_provider = 'OpenAI', best_provider = IterListProvider([ - Allyfy, Nexra, Airforce, Liaobots, + Allyfy, NexraChatGPT, Airforce, DarkAI, Liaobots, ]) ) @@ -117,7 +141,7 @@ gpt_4o = Model( name = 'gpt-4o', base_provider = 'OpenAI', best_provider = IterListProvider([ - Liaobots, Nexra, ChatGptEs, Airforce, + NexraChatGPT4o, ChatGptEs, AmigoChat, DarkAI, Liaobots, Airforce, OpenaiChat ]) ) @@ -126,7 +150,7 @@ gpt_4o_mini = Model( name = 'gpt-4o-mini', base_provider = 'OpenAI', best_provider = IterListProvider([ - DDG, ChatGptEs, You, FreeNetfly, Pizzagpt, LiteIcoding, MagickPen, Liaobots, Airforce, ChatgptFree, Koala, + DDG, ChatGptEs, You, FreeNetfly, Pizzagpt, LiteIcoding, MagickPen, AmigoChat, Liaobots, Airforce, ChatgptFree, Koala, OpenaiChat, ChatGpt ]) ) @@ -135,7 +159,7 @@ gpt_4_turbo = Model( name = 'gpt-4-turbo', base_provider = 'OpenAI', best_provider = IterListProvider([ - Nexra, Liaobots, Airforce, Bing + Liaobots, Airforce, Bing ]) ) @@ -143,11 +167,24 @@ gpt_4 = Model( name = 'gpt-4', base_provider = 'OpenAI', best_provider = IterListProvider([ - Nexra, Binjie, Airforce, Chatgpt4Online, Bing, OpenaiChat, + NexraBing, NexraChatGPT, NexraChatGptV2, NexraChatGptWeb, Binjie, Airforce, Chatgpt4Online, Bing, OpenaiChat, gpt_4_turbo.best_provider, gpt_4o.best_provider, gpt_4o_mini.best_provider ]) ) +# o1 +o1 = Model( + name = 'o1', + base_provider = 'OpenAI', + best_provider = IterListProvider([AmigoChat]) +) + +o1_mini = Model( + name = 'o1-mini', + base_provider = 'OpenAI', + best_provider = IterListProvider([AmigoChat]) +) + ### GigaChat ### gigachat = Model( @@ -165,6 +202,12 @@ meta = Model( ) # llama 2 +llama_2_7b = Model( + name = "llama-2-7b", + base_provider = "Meta Llama", + best_provider = Cloudflare +) + llama_2_13b = Model( name = "llama-2-13b", base_provider = "Meta Llama", @@ -175,7 +218,7 @@ llama_2_13b = Model( llama_3_8b = Model( name = "llama-3-8b", base_provider = "Meta Llama", - best_provider = IterListProvider([Airforce, DeepInfra, Replicate]) + best_provider = IterListProvider([Cloudflare, Airforce, DeepInfra, Replicate]) ) llama_3_70b = Model( @@ -194,40 +237,57 @@ llama_3 = Model( llama_3_1_8b = Model( name = "llama-3.1-8b", base_provider = "Meta Llama", - best_provider = IterListProvider([Blackbox, DeepInfraChat, ChatHub, Airforce, PerplexityLabs]) + best_provider = IterListProvider([Blackbox, DeepInfraChat, ChatHub, Cloudflare, Airforce, PerplexityLabs]) ) llama_3_1_70b = Model( name = "llama-3.1-70b", base_provider = "Meta Llama", - best_provider = IterListProvider([DDG, HuggingChat, Blackbox, FreeGpt, TeachAnything, Free2GPT, DeepInfraChat, Airforce, HuggingFace, PerplexityLabs]) + best_provider = IterListProvider([DDG, HuggingChat, Blackbox, FreeGpt, TeachAnything, Free2GPT, DeepInfraChat, DarkAI, Airforce, HuggingFace, PerplexityLabs]) ) llama_3_1_405b = Model( name = "llama-3.1-405b", base_provider = "Meta Llama", - best_provider = IterListProvider([DeepInfraChat, Blackbox, Airforce]) + best_provider = IterListProvider([DeepInfraChat, Blackbox, AmigoChat, DarkAI, Airforce]) ) llama_3_1 = Model( name = "llama-3.1", base_provider = "Meta Llama", - best_provider = IterListProvider([Nexra, llama_3_1_8b.best_provider, llama_3_1_70b.best_provider, llama_3_1_405b.best_provider,]) + best_provider = IterListProvider([NexraLLaMA31, ChatifyAI, llama_3_1_8b.best_provider, llama_3_1_70b.best_provider, llama_3_1_405b.best_provider,]) ) # llama 3.2 +llama_3_2_1b = Model( + name = "llama-3.2-1b", + base_provider = "Meta Llama", + best_provider = IterListProvider([Cloudflare]) +) + +llama_3_2_3b = Model( + name = "llama-3.2-3b", + base_provider = "Meta Llama", + best_provider = IterListProvider([Cloudflare]) +) + llama_3_2_11b = Model( name = "llama-3.2-11b", base_provider = "Meta Llama", - best_provider = IterListProvider([HuggingChat, HuggingFace]) + best_provider = IterListProvider([Cloudflare, HuggingChat, HuggingFace]) ) llama_3_2_90b = Model( name = "llama-3.2-90b", base_provider = "Meta Llama", - best_provider = IterListProvider([Airforce]) + best_provider = IterListProvider([AmigoChat, Airforce]) ) +llama_3_2 = Model( + name = "llama-3.2", + base_provider = "Meta Llama", + best_provider = IterListProvider([llama_3_2_1b.best_provider, llama_3_2_3b.best_provider, llama_3_2_11b.best_provider, llama_3_2_90b.best_provider]) +) # llamaguard llamaguard_7b = Model( name = "llamaguard-7b", @@ -246,7 +306,7 @@ llamaguard_2_8b = Model( mistral_7b = Model( name = "mistral-7b", base_provider = "Mistral", - best_provider = IterListProvider([DeepInfraChat, Airforce, HuggingFace, DeepInfra]) + best_provider = IterListProvider([DeepInfraChat, Cloudflare, Airforce, DeepInfra]) ) mixtral_8x7b = Model( @@ -289,6 +349,12 @@ hermes_3 = Model( ### Microsoft ### +phi_2 = Model( + name = "phi-2", + base_provider = "Microsoft", + best_provider = Cloudflare +) + phi_3_medium_4k = Model( name = "phi-3-medium-4k", base_provider = "Microsoft", @@ -306,7 +372,7 @@ phi_3_5_mini = Model( gemini_pro = Model( name = 'gemini-pro', base_provider = 'Google DeepMind', - best_provider = IterListProvider([GeminiPro, LiteIcoding, Blackbox, AIChatFree, GPROChat, Nexra, Liaobots, Airforce]) + best_provider = IterListProvider([GeminiPro, LiteIcoding, Blackbox, AIChatFree, GPROChat, NexraGeminiPro, AmigoChat, Liaobots, Airforce]) ) gemini_flash = Model( @@ -343,6 +409,12 @@ gemma_2b = Model( ]) ) +gemma_7b = Model( + name = 'gemma-7b', + base_provider = 'Google', + best_provider = IterListProvider([Cloudflare]) +) + # gemma 2 gemma_2_27b = Model( name = 'gemma-2-27b', @@ -407,7 +479,7 @@ claude_3 = Model( claude_3_5_sonnet = Model( name = 'claude-3.5-sonnet', base_provider = 'Anthropic', - best_provider = IterListProvider([Blackbox, Airforce, Liaobots]) + best_provider = IterListProvider([Blackbox, Airforce, AmigoChat, Liaobots]) ) claude_3_5 = Model( @@ -430,10 +502,16 @@ reka_core = Model( ### Blackbox AI ### -blackbox = Model( - name = 'blackbox', +blackboxai = Model( + name = 'blackboxai', base_provider = 'Blackbox AI', - best_provider = Blackbox + best_provider = IterListProvider([Blackbox, NexraBlackbox]) +) + +blackboxai_pro = Model( + name = 'blackboxai-pro', + base_provider = 'Blackbox AI', + best_provider = IterListProvider([Blackbox]) ) @@ -463,16 +541,22 @@ sparkdesk_v1_1 = Model( ### Qwen ### # qwen 1 +qwen_1_5_0_5b = Model( + name = 'qwen-1.5-0.5b', + base_provider = 'Qwen', + best_provider = Cloudflare +) + qwen_1_5_7b = Model( name = 'qwen-1.5-7b', base_provider = 'Qwen', - best_provider = Airforce + best_provider = IterListProvider([Cloudflare, Airforce]) ) qwen_1_5_14b = Model( name = 'qwen-1.5-14b', base_provider = 'Qwen', - best_provider = IterListProvider([FreeChatgpt, Airforce]) + best_provider = IterListProvider([FreeChatgpt, Cloudflare, Airforce]) ) qwen_1_5_72b = Model( @@ -487,6 +571,12 @@ qwen_1_5_110b = Model( best_provider = Airforce ) +qwen_1_5_1_8b = Model( + name = 'qwen-1.5-1.8b', + base_provider = 'Qwen', + best_provider = Airforce +) + # qwen 2 qwen_2_72b = Model( name = 'qwen-2-72b', @@ -497,7 +587,7 @@ qwen_2_72b = Model( qwen = Model( name = 'qwen', base_provider = 'Qwen', - best_provider = IterListProvider([Nexra, qwen_1_5_14b.best_provider, qwen_1_5_72b.best_provider, qwen_1_5_110b.best_provider, qwen_2_72b.best_provider]) + best_provider = IterListProvider([NexraQwen, qwen_1_5_0_5b.best_provider, qwen_1_5_14b.best_provider, qwen_1_5_72b.best_provider, qwen_1_5_110b.best_provider, qwen_1_5_1_8b.best_provider, qwen_2_72b.best_provider]) ) @@ -514,14 +604,6 @@ glm_4_9b = Model( best_provider = FreeChatgpt ) -glm_4 = Model( - name = 'glm-4', - base_provider = 'Zhipu AI', - best_provider = IterListProvider([ - glm_3_6b.best_provider, glm_4_9b.best_provider - ]) -) - ### 01-ai ### yi_1_5_9b = Model( @@ -602,6 +684,12 @@ lzlv_70b = Model( ### OpenChat ### +openchat_3_5 = Model( + name = 'openchat-3.5', + base_provider = 'OpenChat', + best_provider = Cloudflare +) + openchat_3_6_8b = Model( name = 'openchat-3.6-8b', base_provider = 'OpenChat', @@ -669,16 +757,71 @@ cosmosrp = Model( ) +### TheBloke ### +german_7b = Model( + name = 'german-7b', + base_provider = 'TheBloke', + best_provider = IterListProvider([Cloudflare]) +) + + +### Tinyllama ### +tinyllama_1_1b = Model( + name = 'tinyllama-1.1b', + base_provider = 'Tinyllama', + best_provider = IterListProvider([Cloudflare]) +) + + +### Fblgit ### +cybertron_7b = Model( + name = 'cybertron-7b', + base_provider = 'Fblgit', + best_provider = IterListProvider([Cloudflare]) +) + + ############# ### Image ### ############# ### Stability AI ### +sdxl_lora = Model( + name = 'sdxl-lora', + base_provider = 'Stability AI', + best_provider = IterListProvider([NexraSDLora]) + +) + +sdxl_turbo = Model( + name = 'sdxl-turbo', + base_provider = 'Stability AI', + best_provider = IterListProvider([NexraSDTurbo]) + +) + sdxl = Model( name = 'sdxl', base_provider = 'Stability AI', - best_provider = IterListProvider([ReplicateHome, Nexra, DeepInfraImage]) + best_provider = IterListProvider([ + ReplicateHome, NexraSD21, DeepInfraImage, + sdxl_lora.best_provider, sdxl_turbo.best_provider, + ]) + +) + +sd_1_5 = Model( + name = 'sd-1.5', + base_provider = 'Stability AI', + best_provider = IterListProvider([NexraSD15]) + +) + +sd_2_1 = Model( + name = 'sd-2.1', + base_provider = 'Stability AI', + best_provider = IterListProvider([NexraSD21]) ) @@ -689,6 +832,13 @@ sd_3 = Model( ) +sd = Model( + name = 'sd', + base_provider = 'Stability AI', + best_provider = IterListProvider([sd_1_5.best_provider, sd_2_1.best_provider, sd_3.best_provider]) + +) + ### Playground ### playground_v2_5 = Model( @@ -707,10 +857,17 @@ flux = Model( ) +flux_pro = Model( + name = 'flux-pro', + base_provider = 'Flux AI', + best_provider = IterListProvider([NexraFluxPro, AmigoChat]) + +) + flux_realism = Model( name = 'flux-realism', base_provider = 'Flux AI', - best_provider = IterListProvider([Airforce]) + best_provider = IterListProvider([Airforce, AmigoChat]) ) @@ -757,31 +914,48 @@ flux_schnell = Model( ) -### ### +### OpenAI ### dalle_2 = Model( name = 'dalle-2', - base_provider = '', - best_provider = IterListProvider([Nexra]) + base_provider = 'OpenAI', + best_provider = IterListProvider([NexraDallE2]) ) dalle_3 = Model( name = 'dalle-3', - base_provider = '', + base_provider = 'OpenAI', best_provider = IterListProvider([Airforce]) ) dalle = Model( name = 'dalle', - base_provider = '', - best_provider = IterListProvider([Nexra, dalle_2.best_provider, dalle_3.best_provider]) + base_provider = 'OpenAI', + best_provider = IterListProvider([NexraDallE, dalle_2.best_provider, dalle_3.best_provider]) ) dalle_mini = Model( name = 'dalle-mini', - base_provider = '', - best_provider = IterListProvider([Nexra]) + base_provider = 'OpenAI', + best_provider = IterListProvider([NexraDalleMini]) + +) + + +### Cagliostro Research Lab ### +animagine_xl = Model( + name = 'animagine-xl', + base_provider = 'Cagliostro Research Lab', + best_provider = IterListProvider([NexraAnimagineXL]) + +) + +### Midjourney ### +midjourney = Model( + name = 'midjourney', + base_provider = 'Midjourney', + best_provider = IterListProvider([NexraMidjourney]) ) @@ -789,7 +963,7 @@ dalle_mini = Model( emi = Model( name = 'emi', base_provider = '', - best_provider = IterListProvider([Nexra]) + best_provider = IterListProvider([NexraEmi]) ) @@ -800,13 +974,6 @@ any_dark = Model( ) -prodia = Model( - name = 'prodia', - base_provider = '', - best_provider = IterListProvider([Nexra]) - -) - class ModelUtils: """ Utility class for mapping string identifiers to Model instances. @@ -832,12 +999,17 @@ class ModelUtils: 'gpt-4o-mini': gpt_4o_mini, 'gpt-4': gpt_4, 'gpt-4-turbo': gpt_4_turbo, + +# o1 +'o1': o1, +'o1-mini': o1_mini, ### Meta ### "meta-ai": meta, # llama-2 +'llama-2-7b': llama_2_7b, 'llama-2-13b': llama_2_13b, # llama-3 @@ -852,6 +1024,9 @@ class ModelUtils: 'llama-3.1-405b': llama_3_1_405b, # llama-3.2 +'llama-3.2': llama_3_2, +'llama-3.2-1b': llama_3_2_1b, +'llama-3.2-3b': llama_3_2_3b, 'llama-3.2-11b': llama_3_2_11b, 'llama-3.2-90b': llama_3_2_90b, @@ -875,6 +1050,7 @@ class ModelUtils: ### Microsoft ### +'phi-2': phi_2, 'phi_3_medium-4k': phi_3_medium_4k, 'phi-3.5-mini': phi_3_5_mini, @@ -888,6 +1064,7 @@ class ModelUtils: 'gemma-2b': gemma_2b, 'gemma-2b-9b': gemma_2b_9b, 'gemma-2b-27b': gemma_2b_27b, +'gemma-7b': gemma_7b, # gemma-2 'gemma-2': gemma_2, @@ -914,7 +1091,8 @@ class ModelUtils: ### Blackbox AI ### -'blackbox': blackbox, +'blackboxai': blackboxai, +'blackboxai-pro': blackboxai_pro, ### CohereForAI ### @@ -935,17 +1113,18 @@ class ModelUtils: ### Qwen ### 'qwen': qwen, +'qwen-1.5-0.5b': qwen_1_5_0_5b, 'qwen-1.5-7b': qwen_1_5_7b, 'qwen-1.5-14b': qwen_1_5_14b, 'qwen-1.5-72b': qwen_1_5_72b, 'qwen-1.5-110b': qwen_1_5_110b, +'qwen-1.5-1.8b': qwen_1_5_1_8b, 'qwen-2-72b': qwen_2_72b, ### Zhipu AI ### 'glm-3-6b': glm_3_6b, 'glm-4-9b': glm_4_9b, -'glm-4': glm_4, ### 01-ai ### @@ -983,6 +1162,7 @@ class ModelUtils: ### OpenChat ### +'openchat-3.5': openchat_3_5, 'openchat-3.6-8b': openchat_3_6_8b, @@ -1012,6 +1192,18 @@ class ModelUtils: 'cosmosrp': cosmosrp, +### TheBloke ### +'german-7b': german_7b, + + +### Tinyllama ### +'tinyllama-1.1b': tinyllama_1_1b, + + +### Fblgit ### +'cybertron-7b': cybertron_7b, + + ############# ### Image ### @@ -1019,6 +1211,11 @@ class ModelUtils: ### Stability AI ### 'sdxl': sdxl, +'sdxl-lora': sdxl_lora, +'sdxl-turbo': sdxl_turbo, +'sd': sd, +'sd-1.5': sd_1_5, +'sd-2.1': sd_2_1, 'sd-3': sd_3, @@ -1028,6 +1225,7 @@ class ModelUtils: ### Flux AI ### 'flux': flux, +'flux-pro': flux_pro, 'flux-realism': flux_realism, 'flux-anime': flux_anime, 'flux-3d': flux_3d, @@ -1037,14 +1235,24 @@ class ModelUtils: 'flux-schnell': flux_schnell, -### ### +### OpenAI ### 'dalle': dalle, 'dalle-2': dalle_2, 'dalle-3': dalle_3, 'dalle-mini': dalle_mini, + + +### Cagliostro Research Lab ### +'animagine-xl': animagine_xl, + + +### Midjourney ### +'midjourney': midjourney, + + +### Other ### 'emi': emi, 'any-dark': any_dark, -'prodia': prodia, } _all_models = list(ModelUtils.convert.keys()) |