summaryrefslogtreecommitdiffstats
path: root/g4f/models.py
diff options
context:
space:
mode:
Diffstat (limited to 'g4f/models.py')
-rw-r--r--g4f/models.py625
1 files changed, 345 insertions, 280 deletions
diff --git a/g4f/models.py b/g4f/models.py
index b6c5fd14..ddbeeddf 100644
--- a/g4f/models.py
+++ b/g4f/models.py
@@ -4,33 +4,35 @@ from dataclasses import dataclass
from .Provider import IterListProvider, ProviderType
from .Provider import (
- AI365VIP,
+ AiChatOnline,
Allyfy,
Bing,
+ Binjie,
+ Bixin123,
Blackbox,
ChatGot,
- Chatgpt4o,
Chatgpt4Online,
+ Chatgpt4o,
ChatgptFree,
+ CodeNews,
DDG,
DeepInfra,
DeepInfraImage,
+ FluxAirforce,
+ Free2GPT,
FreeChatgpt,
FreeGpt,
FreeNetfly,
Gemini,
GeminiPro,
- GeminiProChat,
GigaChat,
HuggingChat,
HuggingFace,
Koala,
Liaobots,
- LiteIcoding,
- MagickPenAsk,
- MagickPenChat,
- Marsyoo,
+ MagickPen,
MetaAI,
+ Nexra,
OpenaiChat,
PerplexityLabs,
Pi,
@@ -38,7 +40,10 @@ from .Provider import (
Reka,
Replicate,
ReplicateHome,
+ Snova,
TeachAnything,
+ TwitterBio,
+ Upstage,
You,
)
@@ -66,33 +71,18 @@ default = Model(
name = "",
base_provider = "",
best_provider = IterListProvider([
- Bing,
- You,
- OpenaiChat,
- FreeChatgpt,
- AI365VIP,
- Chatgpt4o,
DDG,
- ChatgptFree,
- Koala,
- Pizzagpt,
- ])
-)
-
-# GPT-3.5 too, but all providers supports long requests and responses
-gpt_35_long = Model(
- name = 'gpt-3.5-turbo',
- base_provider = 'openai',
- best_provider = IterListProvider([
- FreeGpt,
- You,
- Koala,
- ChatgptFree,
FreeChatgpt,
- DDG,
- AI365VIP,
+ HuggingChat,
Pizzagpt,
- Allyfy,
+ ChatgptFree,
+ ReplicateHome,
+ Upstage,
+ Blackbox,
+ Bixin123,
+ Binjie,
+ Free2GPT,
+ MagickPen,
])
)
@@ -101,56 +91,59 @@ gpt_35_long = Model(
############
### OpenAI ###
-### GPT-3.5 / GPT-4 ###
+# gpt-3
+gpt_3 = Model(
+ name = 'gpt-3',
+ base_provider = 'OpenAI',
+ best_provider = IterListProvider([
+ Nexra,
+ ])
+)
+
# gpt-3.5
gpt_35_turbo = Model(
name = 'gpt-3.5-turbo',
- base_provider = 'openai',
+ base_provider = 'OpenAI',
best_provider = IterListProvider([
- FreeGpt,
- You,
- Koala,
- ChatgptFree,
- FreeChatgpt,
- AI365VIP,
- Pizzagpt,
- Allyfy,
+ Allyfy, TwitterBio, Nexra, Bixin123, CodeNews,
])
)
# gpt-4
-gpt_4 = Model(
- name = 'gpt-4',
- base_provider = 'openai',
+gpt_4o = Model(
+ name = 'gpt-4o',
+ base_provider = 'OpenAI',
best_provider = IterListProvider([
- Bing, Chatgpt4Online
+ Liaobots, Chatgpt4o, OpenaiChat,
])
)
-gpt_4_turbo = Model(
- name = 'gpt-4-turbo',
- base_provider = 'openai',
+gpt_4o_mini = Model(
+ name = 'gpt-4o-mini',
+ base_provider = 'OpenAI',
best_provider = IterListProvider([
- Bing, Liaobots, LiteIcoding
+ DDG, Liaobots, You, FreeNetfly, Pizzagpt, ChatgptFree, AiChatOnline, CodeNews,
+ MagickPen, OpenaiChat, Koala,
])
)
-gpt_4o = Model(
- name = 'gpt-4o',
- base_provider = 'openai',
+
+gpt_4_turbo = Model(
+ name = 'gpt-4-turbo',
+ base_provider = 'OpenAI',
best_provider = IterListProvider([
- You, Liaobots, Chatgpt4o, AI365VIP, OpenaiChat, Marsyoo, LiteIcoding, MagickPenAsk,
+ Nexra, Bixin123, Liaobots, Bing
])
)
-gpt_4o_mini = Model(
- name = 'gpt-4o-mini',
- base_provider = 'openai',
+gpt_4 = Model(
+ name = 'gpt-4',
+ base_provider = 'OpenAI',
best_provider = IterListProvider([
- DDG, Liaobots, OpenaiChat, You, FreeNetfly, MagickPenChat,
+ Chatgpt4Online, Nexra, Binjie, Bing,
+ gpt_4_turbo.best_provider, gpt_4o.best_provider, gpt_4o_mini.best_provider
])
)
-
### GigaChat ###
gigachat = Model(
name = 'GigaChat:latest',
@@ -161,81 +154,65 @@ gigachat = Model(
### Meta ###
meta = Model(
- name = "meta",
- base_provider = "meta",
+ name = "meta-ai",
+ base_provider = "Meta",
best_provider = MetaAI
)
-llama_3_8b_instruct = Model(
- name = "meta-llama/Meta-Llama-3-8B-Instruct",
- base_provider = "meta",
- best_provider = IterListProvider([DeepInfra, PerplexityLabs, Replicate])
+llama_3_8b = Model(
+ name = "llama-3-8b",
+ base_provider = "Meta",
+ best_provider = IterListProvider([DeepInfra, Replicate])
)
-llama_3_70b_instruct = Model(
- name = "meta-llama/Meta-Llama-3-70B-Instruct",
- base_provider = "meta",
- best_provider = IterListProvider([DeepInfra, PerplexityLabs, Replicate])
+llama_3_70b = Model(
+ name = "llama-3-70b",
+ base_provider = "Meta",
+ best_provider = IterListProvider([ReplicateHome, DeepInfra, PerplexityLabs, Replicate])
)
-llama_3_70b_instruct = Model(
- name = "meta/meta-llama-3-70b-instruct",
- base_provider = "meta",
- best_provider = IterListProvider([ReplicateHome, TeachAnything])
+llama_3_1_8b = Model(
+ name = "llama-3.1-8b",
+ base_provider = "Meta",
+ best_provider = IterListProvider([Blackbox])
)
-llama_3_70b_chat_hf = Model(
- name = "meta-llama/Llama-3-70b-chat-hf",
- base_provider = "meta",
- best_provider = IterListProvider([DDG])
+llama_3_1_70b = Model(
+ name = "llama-3.1-70b",
+ base_provider = "Meta",
+ best_provider = IterListProvider([DDG, HuggingChat, FreeGpt, Blackbox, TeachAnything, Free2GPT, HuggingFace])
)
-llama_3_1_70b_instruct = Model(
- name = "meta-llama/Meta-Llama-3.1-70B-Instruct",
- base_provider = "meta",
- best_provider = IterListProvider([HuggingChat, HuggingFace])
+llama_3_1_405b = Model(
+ name = "llama-3.1-405b",
+ base_provider = "Meta",
+ best_provider = IterListProvider([HuggingChat, Blackbox, HuggingFace])
)
-llama_3_1_405b_instruct_FP8 = Model(
- name = "meta-llama/Meta-Llama-3.1-405B-Instruct-FP8",
- base_provider = "meta",
- best_provider = IterListProvider([HuggingChat, HuggingFace])
-)
-
-
### Mistral ###
mixtral_8x7b = Model(
- name = "mistralai/Mixtral-8x7B-Instruct-v0.1",
- base_provider = "huggingface",
- best_provider = IterListProvider([DeepInfra, HuggingFace, PerplexityLabs, HuggingChat, DDG, ReplicateHome])
-)
-
-mistral_7b_v02 = Model(
- name = "mistralai/Mistral-7B-Instruct-v0.2",
- base_provider = "huggingface",
- best_provider = IterListProvider([DeepInfra, HuggingFace, HuggingChat])
+ name = "mixtral-8x7b",
+ base_provider = "Mistral",
+ best_provider = IterListProvider([HuggingChat, DDG, ReplicateHome, TwitterBio, DeepInfra, HuggingFace,])
)
-
-### NousResearch ###
-Nous_Hermes_2_Mixtral_8x7B_DPO = Model(
- name = "NousResearch/Nous-Hermes-2-Mixtral-8x7B-DPO",
- base_provider = "NousResearch",
- best_provider = IterListProvider([HuggingFace, HuggingChat])
+mistral_7b = Model(
+ name = "mistral-7b",
+ base_provider = "Mistral",
+ best_provider = IterListProvider([HuggingChat, HuggingFace, DeepInfra])
)
-
### 01-ai ###
-Yi_1_5_34B_chat = Model(
- name = "01-ai/Yi-1.5-34B-Chat",
+yi_1_5_34b = Model(
+ name = "yi-1.5-34b",
base_provider = "01-ai",
- best_provider = IterListProvider([HuggingFace, HuggingChat])
+ best_provider = IterListProvider([HuggingChat, HuggingFace])
)
### Microsoft ###
-Phi_3_mini_4k_instruct = Model(
- name = "microsoft/Phi-3-mini-4k-instruct",
+phi_3_mini_4k = Model(
+ name = "phi-3-mini-4k",
base_provider = "Microsoft",
best_provider = IterListProvider([HuggingFace, HuggingChat])
)
@@ -252,41 +229,22 @@ gemini = Model(
gemini_pro = Model(
name = 'gemini-pro',
base_provider = 'Google',
- best_provider = IterListProvider([GeminiPro, You, ChatGot, GeminiProChat, Liaobots, LiteIcoding])
+ best_provider = IterListProvider([GeminiPro, ChatGot, Liaobots])
)
gemini_flash = Model(
name = 'gemini-flash',
base_provider = 'Google',
- best_provider = IterListProvider([Liaobots])
-)
-
-gemini_1_5 = Model(
- name = 'gemini-1.5',
- base_provider = 'Google',
- best_provider = IterListProvider([LiteIcoding])
+ best_provider = IterListProvider([Liaobots, Blackbox])
)
# gemma
-gemma_2b_it = Model(
- name = 'gemma-2b-it',
+gemma_2b = Model(
+ name = 'gemma-2b',
base_provider = 'Google',
best_provider = IterListProvider([ReplicateHome])
)
-gemma_2_9b_it = Model(
- name = 'gemma-2-9b-it',
- base_provider = 'Google',
- best_provider = IterListProvider([PerplexityLabs])
-)
-
-gemma_2_27b_it = Model(
- name = 'gemma-2-27b-it',
- base_provider = 'Google',
- best_provider = IterListProvider([PerplexityLabs])
-)
-
-
### Anthropic ###
claude_2 = Model(
name = 'claude-2',
@@ -309,13 +267,13 @@ claude_2_1 = Model(
claude_3_opus = Model(
name = 'claude-3-opus',
base_provider = 'Anthropic',
- best_provider = IterListProvider([You, Liaobots])
+ best_provider = IterListProvider([Liaobots])
)
claude_3_sonnet = Model(
name = 'claude-3-sonnet',
base_provider = 'Anthropic',
- best_provider = IterListProvider([You, Liaobots])
+ best_provider = IterListProvider([Liaobots])
)
claude_3_5_sonnet = Model(
@@ -327,19 +285,7 @@ claude_3_5_sonnet = Model(
claude_3_haiku = Model(
name = 'claude-3-haiku',
base_provider = 'Anthropic',
- best_provider = IterListProvider([DDG, AI365VIP, Liaobots])
-)
-
-claude_3 = Model(
- name = 'claude-3',
- base_provider = 'Anthropic',
- best_provider = IterListProvider([LiteIcoding])
-)
-
-claude_3_5 = Model(
- name = 'claude-3.5',
- base_provider = 'Anthropic',
- best_provider = IterListProvider([LiteIcoding])
+ best_provider = IterListProvider([DDG, Liaobots])
)
@@ -351,14 +297,6 @@ reka_core = Model(
)
-### NVIDIA ###
-nemotron_4_340b_instruct = Model(
- name = 'nemotron-4-340b-instruct',
- base_provider = 'NVIDIA',
- best_provider = IterListProvider([PerplexityLabs])
-)
-
-
### Blackbox ###
blackbox = Model(
name = 'blackbox',
@@ -369,7 +307,7 @@ blackbox = Model(
### Databricks ###
dbrx_instruct = Model(
- name = 'databricks/dbrx-instruct',
+ name = 'dbrx-instruct',
base_provider = 'Databricks',
best_provider = IterListProvider([DeepInfra])
)
@@ -377,71 +315,110 @@ dbrx_instruct = Model(
### CohereForAI ###
command_r_plus = Model(
- name = 'CohereForAI/c4ai-command-r-plus',
+ name = 'command-r-plus',
base_provider = 'CohereForAI',
best_provider = IterListProvider([HuggingChat])
)
### iFlytek ###
-SparkDesk_v1_1 = Model(
- name = 'SparkDesk-v1.1',
+sparkdesk_v1_1 = Model(
+ name = 'sparkdesk-v1.1',
base_provider = 'iFlytek',
best_provider = IterListProvider([FreeChatgpt])
)
-
-### DeepSeek ###
-deepseek_coder = Model(
- name = 'deepseek-coder',
- base_provider = 'DeepSeek',
- best_provider = IterListProvider([FreeChatgpt])
-)
-
-deepseek_chat = Model(
- name = 'deepseek-chat',
- base_provider = 'DeepSeek',
+### Qwen ###
+qwen_1_5_14b = Model(
+ name = 'qwen-1.5-14b',
+ base_provider = 'Qwen',
best_provider = IterListProvider([FreeChatgpt])
)
-
-### Qwen ###
-Qwen2_7B_instruct = Model(
- name = 'Qwen2-7B-Instruct',
+qwen_turbo = Model(
+ name = 'qwen-turbo',
base_provider = 'Qwen',
- best_provider = IterListProvider([FreeChatgpt])
+ best_provider = IterListProvider([Bixin123])
)
### Zhipu AI ###
-glm4_9B_chat = Model(
- name = 'glm4-9B-chat',
+glm_3_6b = Model(
+ name = 'glm-3-6b',
base_provider = 'Zhipu AI',
best_provider = IterListProvider([FreeChatgpt])
)
-chatglm3_6B = Model(
- name = 'chatglm3-6B',
+glm_4_9b = Model(
+ name = 'glm-4-9B',
base_provider = 'Zhipu AI',
best_provider = IterListProvider([FreeChatgpt])
)
+glm_4 = Model(
+ name = 'glm-4',
+ base_provider = 'Zhipu AI',
+ best_provider = IterListProvider([CodeNews, glm_4_9b.best_provider,])
+)
### 01-ai ###
-Yi_1_5_9B_chat = Model(
- name = 'Yi-1.5-9B-Chat',
+yi_1_5_9b = Model(
+ name = 'yi-1.5-9b',
base_provider = '01-ai',
best_provider = IterListProvider([FreeChatgpt])
)
-### Other ###
+### Pi ###
+solar_1_mini = Model(
+ name = 'solar-1-mini',
+ base_provider = 'Upstage',
+ best_provider = IterListProvider([Upstage])
+)
+
+### Pi ###
pi = Model(
name = 'pi',
base_provider = 'inflection',
best_provider = Pi
)
+### SambaNova ###
+samba_coe_v0_1 = Model(
+ name = 'samba-coe-v0.1',
+ base_provider = 'SambaNova',
+ best_provider = Snova
+)
+
+### Trong-Hieu Nguyen-Mau ###
+v1olet_merged_7b = Model(
+ name = 'v1olet-merged-7b',
+ base_provider = 'Trong-Hieu Nguyen-Mau',
+ best_provider = Snova
+)
+
+### Macadeliccc ###
+westlake_7b_v2 = Model(
+ name = 'westlake-7b-v2',
+ base_provider = 'Macadeliccc',
+ best_provider = Snova
+)
+
+### CookinAI ###
+donutlm_v1 = Model(
+ name = 'donutlm-v1',
+ base_provider = 'CookinAI',
+ best_provider = Snova
+)
+
+### DeepSeek ###
+deepseek = Model(
+ name = 'deepseek',
+ base_provider = 'DeepSeek',
+ best_provider = CodeNews
+)
+
+
#############
### Image ###
@@ -449,30 +426,82 @@ pi = Model(
### Stability AI ###
sdxl = Model(
- name = 'stability-ai/sdxl',
+ name = 'sdxl',
base_provider = 'Stability AI',
- best_provider = IterListProvider([DeepInfraImage])
+ best_provider = IterListProvider([ReplicateHome, DeepInfraImage])
)
-stable_diffusion_3 = Model(
- name = 'stability-ai/stable-diffusion-3',
+sd_3 = Model(
+ name = 'sd-3',
base_provider = 'Stability AI',
best_provider = IterListProvider([ReplicateHome])
)
-sdxl_lightning_4step = Model(
- name = 'bytedance/sdxl-lightning-4step',
+### Playground ###
+playground_v2_5 = Model(
+ name = 'playground-v2.5',
base_provider = 'Stability AI',
best_provider = IterListProvider([ReplicateHome])
)
-playground_v2_5_1024px_aesthetic = Model(
- name = 'playgroundai/playground-v2.5-1024px-aesthetic',
- base_provider = 'Stability AI',
- best_provider = IterListProvider([ReplicateHome])
+### Flux AI ###
+flux = Model(
+ name = 'flux',
+ base_provider = 'Flux AI',
+ best_provider = IterListProvider([FluxAirforce])
+
+)
+
+flux_realism = Model(
+ name = 'flux-realism',
+ base_provider = 'Flux AI',
+ best_provider = IterListProvider([FluxAirforce])
+
+)
+
+flux_anime = Model(
+ name = 'flux-anime',
+ base_provider = 'Flux AI',
+ best_provider = IterListProvider([FluxAirforce])
+
+)
+
+flux_3d = Model(
+ name = 'flux-3d',
+ base_provider = 'Flux AI',
+ best_provider = IterListProvider([FluxAirforce])
+
+)
+
+flux_disney = Model(
+ name = 'flux-disney',
+ base_provider = 'Flux AI',
+ best_provider = IterListProvider([FluxAirforce])
+
+)
+
+### ###
+dalle = Model(
+ name = 'dalle',
+ base_provider = '',
+ best_provider = IterListProvider([Nexra])
+
+)
+
+dalle_mini = Model(
+ name = 'dalle-mini',
+ base_provider = '',
+ best_provider = IterListProvider([Nexra])
+
+)
+
+emi = Model(
+ name = 'emi',
+ base_provider = '',
+ best_provider = IterListProvider([Nexra])
)
@@ -485,127 +514,163 @@ class ModelUtils:
"""
convert: dict[str, Model] = {
- ############
- ### Text ###
- ############
+############
+### Text ###
+############
- ### OpenAI ###
- ### GPT-3.5 / GPT-4 ###
- # gpt-3.5
- 'gpt-3.5-turbo': gpt_35_turbo,
- 'gpt-3.5-long': gpt_35_long,
+### OpenAI ###
+# gpt-3
+'gpt-3': gpt_3,
+
+# gpt-3.5
+'gpt-3.5-turbo': gpt_35_turbo,
- # gpt-4
- 'gpt-4o' : gpt_4o,
- 'gpt-4o-mini' : gpt_4o_mini,
- 'gpt-4' : gpt_4,
- 'gpt-4-turbo' : gpt_4_turbo,
+# gpt-4
+'gpt-4o' : gpt_4o,
+'gpt-4o-mini' : gpt_4o_mini,
+'gpt-4' : gpt_4,
+'gpt-4-turbo' : gpt_4_turbo,
- ### Meta ###
- "meta-ai": meta,
- 'llama-3-8b-instruct': llama_3_8b_instruct,
- 'llama-3-70b-instruct': llama_3_70b_instruct,
- 'llama-3-70b-chat': llama_3_70b_chat_hf,
- 'llama-3-70b-instruct': llama_3_70b_instruct,
+### Meta ###
+"meta-ai": meta,
+
+# llama-3
+'llama-3-8b': llama_3_8b,
+'llama-3-70b': llama_3_70b,
- 'llama-3.1-70b': llama_3_1_70b_instruct,
- 'llama-3.1-405b': llama_3_1_405b_instruct_FP8,
- 'llama-3.1-70b-instruct': llama_3_1_70b_instruct,
- 'llama-3.1-405b-instruct': llama_3_1_405b_instruct_FP8,
+# llama-3.1
+'llama-3.1-8b': llama_3_1_8b,
+'llama-3.1-70b': llama_3_1_70b,
+'llama-3.1-405b': llama_3_1_405b,
- ### Mistral (Opensource) ###
- 'mixtral-8x7b': mixtral_8x7b,
- 'mistral-7b-v02': mistral_7b_v02,
- ### NousResearch ###
- 'Nous-Hermes-2-Mixtral-8x7B-DPO': Nous_Hermes_2_Mixtral_8x7B_DPO,
+### Mistral ###
+'mixtral-8x7b': mixtral_8x7b,
+'mistral-7b': mistral_7b,
+
- ### 01-ai ###
- 'Yi-1.5-34b-chat': Yi_1_5_34B_chat,
+### 01-ai ###
+'yi-1.5-34b': yi_1_5_34b,
+
- ### Microsoft ###
- 'Phi-3-mini-4k-instruct': Phi_3_mini_4k_instruct,
+### Microsoft ###
+'phi-3-mini-4k': phi_3_mini_4k,
- ### Google ###
- # gemini
- 'gemini': gemini,
- 'gemini-pro': gemini_pro,
- 'gemini-pro': gemini_1_5,
- 'gemini-flash': gemini_flash,
+
+### Google ###
+# gemini
+'gemini': gemini,
+'gemini-pro': gemini_pro,
+'gemini-flash': gemini_flash,
- # gemma
- 'gemma-2b': gemma_2b_it,
- 'gemma-2-9b': gemma_2_9b_it,
- 'gemma-2-27b': gemma_2_27b_it,
+# gemma
+'gemma-2b': gemma_2b,
+
- ### Anthropic ###
- 'claude-2': claude_2,
- 'claude-2.0': claude_2_0,
- 'claude-2.1': claude_2_1,
+### Anthropic ###
+'claude-2': claude_2,
+'claude-2.0': claude_2_0,
+'claude-2.1': claude_2_1,
- 'claude-3-opus': claude_3_opus,
- 'claude-3-sonnet': claude_3_sonnet,
- 'claude-3-5-sonnet': claude_3_5_sonnet,
- 'claude-3-haiku': claude_3_haiku,
+'claude-3-opus': claude_3_opus,
+'claude-3-sonnet': claude_3_sonnet,
+'claude-3-5-sonnet': claude_3_5_sonnet,
+'claude-3-haiku': claude_3_haiku,
- 'claude-3-opus': claude_3,
- 'claude-3-5-sonnet': claude_3_5,
+### Reka AI ###
+'reka-core': reka_core,
+
+
+### Blackbox ###
+'blackbox': blackbox,
+
+### CohereForAI ###
+'command-r+': command_r_plus,
+
+
+### Databricks ###
+'dbrx-instruct': dbrx_instruct,
- ### Reka AI ###
- 'reka': reka_core,
- ### NVIDIA ###
- 'nemotron-4-340b-instruct': nemotron_4_340b_instruct,
+### GigaChat ###
+'gigachat': gigachat,
- ### Blackbox ###
- 'blackbox': blackbox,
- ### CohereForAI ###
- 'command-r+': command_r_plus,
+### iFlytek ###
+'sparkdesk-v1.1': sparkdesk_v1_1,
- ### Databricks ###
- 'dbrx-instruct': dbrx_instruct,
-
- ### GigaChat ###
- 'gigachat': gigachat,
- ### iFlytek ###
- 'SparkDesk-v1.1': SparkDesk_v1_1,
+### Qwen ###
+'qwen-1.5-14b': qwen_1_5_14b,
+'qwen-turbo': qwen_turbo,
- ### DeepSeek ###
- 'deepseek-coder': deepseek_coder,
- 'deepseek-chat': deepseek_chat,
- ### Qwen ###
- 'Qwen2-7b-instruct': Qwen2_7B_instruct,
+### Zhipu AI ###
+'glm-3-6b': glm_3_6b,
+'glm-4-9b': glm_4_9b,
+'glm-4': glm_4,
- ### Zhipu AI ###
- 'glm4-9b-chat': glm4_9B_chat,
- 'chatglm3-6b': chatglm3_6B,
- ### 01-ai ###
- 'Yi-1.5-9b-chat': Yi_1_5_9B_chat,
+### 01-ai ###
+'yi-1.5-9b': yi_1_5_9b,
- # Other
- 'pi': pi,
- #############
- ### Image ###
- #############
+### Upstage ###
+'solar-1-mini': solar_1_mini,
+
+
+### Pi ###
+'pi': pi,
+
+
+### SambaNova ###
+'samba-coe-v0.1': samba_coe_v0_1,
+
+
+### Trong-Hieu Nguyen-Mau ###
+'v1olet-merged-7b': v1olet_merged_7b,
+
+
+### Macadeliccc ###
+'westlake-7b-v2': westlake_7b_v2,
+
+
+### CookinAI ###
+'donutlm-v1': donutlm_v1,
+
+### DeepSeek ###
+'deepseek': deepseek,
+
- ### Stability AI ###
- 'sdxl': sdxl,
- 'stable-diffusion-3': stable_diffusion_3,
- ### ByteDance ###
- 'sdxl-lightning': sdxl_lightning_4step,
+#############
+### Image ###
+#############
- ### Playground ###
- 'playground-v2.5': playground_v2_5_1024px_aesthetic,
+### Stability AI ###
+'sdxl': sdxl,
+'sd-3': sd_3,
+
+
+### Playground ###
+'playground-v2.5': playground_v2_5,
+
+
+### Flux AI ###
+'flux': flux,
+'flux-realism': flux_realism,
+'flux-anime': flux_anime,
+'flux-3d': flux_3d,
+'flux-disney': flux_disney,
+
+### ###
+'dalle': dalle,
+'dalle-mini': dalle_mini,
+'emi': emi,
}
_all_models = list(ModelUtils.convert.keys())