summaryrefslogtreecommitdiffstats
path: root/g4f/models.py
diff options
context:
space:
mode:
authorxtekky <98614666+xtekky@users.noreply.github.com>2023-08-07 13:20:27 +0200
committerGitHub <noreply@github.com>2023-08-07 13:20:27 +0200
commit8d024192df662d8445265c9a4f5c7bea908fc0a6 (patch)
tree4749eb44df477c790442d912a40b86313e23d39d /g4f/models.py
parentupdate image (diff)
parentrefactor: refactor models (diff)
downloadgpt4free-8d024192df662d8445265c9a4f5c7bea908fc0a6.tar
gpt4free-8d024192df662d8445265c9a4f5c7bea908fc0a6.tar.gz
gpt4free-8d024192df662d8445265c9a4f5c7bea908fc0a6.tar.bz2
gpt4free-8d024192df662d8445265c9a4f5c7bea908fc0a6.tar.lz
gpt4free-8d024192df662d8445265c9a4f5c7bea908fc0a6.tar.xz
gpt4free-8d024192df662d8445265c9a4f5c7bea908fc0a6.tar.zst
gpt4free-8d024192df662d8445265c9a4f5c7bea908fc0a6.zip
Diffstat (limited to '')
-rw-r--r--g4f/models.py468
1 files changed, 240 insertions, 228 deletions
diff --git a/g4f/models.py b/g4f/models.py
index 95be4849..3a049614 100644
--- a/g4f/models.py
+++ b/g4f/models.py
@@ -1,232 +1,244 @@
-from g4f import Provider
+from types import ModuleType
+from . import Provider
+from dataclasses import dataclass
+@dataclass
class Model:
- class model:
- name: str
- base_provider: str
- best_provider: str
-
- class gpt_35_turbo:
- name: str = 'gpt-3.5-turbo'
- base_provider: str = 'openai'
- best_provider: Provider.Provider = Provider.Forefront
-
- class gpt_4:
- name: str = 'gpt-4'
- base_provider: str = 'openai'
- best_provider: Provider.Provider = Provider.Bing
- best_providers: list = [Provider.Bing, Provider.Lockchat]
-
- class claude_instant_v1_100k:
- name: str = 'claude-instant-v1-100k'
- base_provider: str = 'anthropic'
- best_provider: Provider.Provider = Provider.Vercel
-
- class claude_instant_v1:
- name: str = 'claude-instant-v1'
- base_provider: str = 'anthropic'
- best_provider: Provider.Provider = Provider.Vercel
-
- class claude_v1_100k:
- name: str = 'claude-v1-100k'
- base_provider: str = 'anthropic'
- best_provider: Provider.Provider = Provider.Vercel
-
- class claude_v1:
- name: str = 'claude-v1'
- base_provider: str = 'anthropic'
- best_provider: Provider.Provider = Provider.Vercel
-
- class alpaca_7b:
- name: str = 'alpaca-7b'
- base_provider: str = 'replicate'
- best_provider: Provider.Provider = Provider.Vercel
-
- class stablelm_tuned_alpha_7b:
- name: str = 'stablelm-tuned-alpha-7b'
- base_provider: str = 'replicate'
- best_provider: Provider.Provider = Provider.Vercel
-
- class bloom:
- name: str = 'bloom'
- base_provider: str = 'huggingface'
- best_provider: Provider.Provider = Provider.Vercel
-
- class bloomz:
- name: str = 'bloomz'
- base_provider: str = 'huggingface'
- best_provider: Provider.Provider = Provider.Vercel
-
- class flan_t5_xxl:
- name: str = 'flan-t5-xxl'
- base_provider: str = 'huggingface'
- best_provider: Provider.Provider = Provider.Vercel
-
- class flan_ul2:
- name: str = 'flan-ul2'
- base_provider: str = 'huggingface'
- best_provider: Provider.Provider = Provider.Vercel
-
- class gpt_neox_20b:
- name: str = 'gpt-neox-20b'
- base_provider: str = 'huggingface'
- best_provider: Provider.Provider = Provider.Vercel
-
- class oasst_sft_4_pythia_12b_epoch_35:
- name: str = 'oasst-sft-4-pythia-12b-epoch-3.5'
- base_provider: str = 'huggingface'
- best_provider: Provider.Provider = Provider.Vercel
-
- class santacoder:
- name: str = 'santacoder'
- base_provider: str = 'huggingface'
- best_provider: Provider.Provider = Provider.Vercel
-
- class command_medium_nightly:
- name: str = 'command-medium-nightly'
- base_provider: str = 'cohere'
- best_provider: Provider.Provider = Provider.Vercel
-
- class command_xlarge_nightly:
- name: str = 'command-xlarge-nightly'
- base_provider: str = 'cohere'
- best_provider: Provider.Provider = Provider.Vercel
-
- class code_cushman_001:
- name: str = 'code-cushman-001'
- base_provider: str = 'openai'
- best_provider: Provider.Provider = Provider.Vercel
-
- class code_davinci_002:
- name: str = 'code-davinci-002'
- base_provider: str = 'openai'
- best_provider: Provider.Provider = Provider.Vercel
-
- class text_ada_001:
- name: str = 'text-ada-001'
- base_provider: str = 'openai'
- best_provider: Provider.Provider = Provider.Vercel
-
- class text_babbage_001:
- name: str = 'text-babbage-001'
- base_provider: str = 'openai'
- best_provider: Provider.Provider = Provider.Vercel
-
- class text_curie_001:
- name: str = 'text-curie-001'
- base_provider: str = 'openai'
- best_provider: Provider.Provider = Provider.Vercel
-
- class text_davinci_002:
- name: str = 'text-davinci-002'
- base_provider: str = 'openai'
- best_provider: Provider.Provider = Provider.Vercel
-
- class text_davinci_003:
- name: str = 'text-davinci-003'
- base_provider: str = 'openai'
- best_provider: Provider.Provider = Provider.Vercel
-
- class palm:
- name: str = 'palm'
- base_provider: str = 'google'
- best_provider: Provider.Provider = Provider.Bard
-
-
- """ 'falcon-40b': Model.falcon_40b,
- 'falcon-7b': Model.falcon_7b,
- 'llama-13b': Model.llama_13b,"""
-
- class falcon_40b:
- name: str = 'falcon-40b'
- base_provider: str = 'huggingface'
- best_provider: Provider.Provider = Provider.H2o
-
- class falcon_7b:
- name: str = 'falcon-7b'
- base_provider: str = 'huggingface'
- best_provider: Provider.Provider = Provider.H2o
-
- class llama_13b:
- name: str = 'llama-13b'
- base_provider: str = 'huggingface'
- best_provider: Provider.Provider = Provider.H2o
-
- class gpt_35_turbo_16k:
- name: str = 'gpt-3.5-turbo-16k'
- base_provider: str = 'openai'
- best_provider: Provider.Provider = Provider.EasyChat
-
- class gpt_35_turbo_0613:
- name: str = 'gpt-3.5-turbo-0613'
- base_provider: str = 'openai'
- best_provider: Provider.Provider = Provider.EasyChat
-
- class gpt_35_turbo_16k_0613:
- name: str = 'gpt-3.5-turbo-16k-0613'
- base_provider: str = 'openai'
- best_provider: Provider.Provider = Provider.EasyChat
-
- class gpt_4_32k:
- name: str = 'gpt-4-32k'
- base_provider: str = 'openai'
- best_provider = None
-
- class gpt_4_0613:
- name: str = 'gpt-4-0613'
- base_provider: str = 'openai'
- best_provider = None
-
+ name: str
+ base_provider: str
+ best_provider: ModuleType | None
+
+
+gpt_35_turbo = Model(
+ name="gpt-3.5-turbo",
+ base_provider="openai",
+ best_provider=Provider.Forefront,
+)
+
+gpt_4 = Model(
+ name="gpt-4",
+ base_provider="openai",
+ best_provider=Provider.Bing,
+)
+
+claude_instant_v1_100k = Model(
+ name="claude-instant-v1-100k",
+ base_provider="anthropic",
+ best_provider=Provider.Vercel,
+)
+
+claude_instant_v1 = Model(
+ name="claude-instant-v1",
+ base_provider="anthropic",
+ best_provider=Provider.Vercel,
+)
+
+claude_v1_100k = Model(
+ name="claude-v1-100k",
+ base_provider="anthropic",
+ best_provider=Provider.Vercel,
+)
+
+claude_v1 = Model(
+ name="claude-v1",
+ base_provider="anthropic",
+ best_provider=Provider.Vercel,
+)
+
+alpaca_7b = Model(
+ name="alpaca-7b",
+ base_provider="replicate",
+ best_provider=Provider.Vercel,
+)
+
+stablelm_tuned_alpha_7b = Model(
+ name="stablelm-tuned-alpha-7b",
+ base_provider="replicate",
+ best_provider=Provider.Vercel,
+)
+
+bloom = Model(
+ name="bloom",
+ base_provider="huggingface",
+ best_provider=Provider.Vercel,
+)
+
+bloomz = Model(
+ name="bloomz",
+ base_provider="huggingface",
+ best_provider=Provider.Vercel,
+)
+
+flan_t5_xxl = Model(
+ name="flan-t5-xxl",
+ base_provider="huggingface",
+ best_provider=Provider.Vercel,
+)
+
+flan_ul2 = Model(
+ name="flan-ul2",
+ base_provider="huggingface",
+ best_provider=Provider.Vercel,
+)
+
+gpt_neox_20b = Model(
+ name="gpt-neox-20b",
+ base_provider="huggingface",
+ best_provider=Provider.Vercel,
+)
+
+oasst_sft_4_pythia_12b_epoch_35 = Model(
+ name="oasst-sft-4-pythia-12b-epoch-3.5",
+ base_provider="huggingface",
+ best_provider=Provider.Vercel,
+)
+
+santacoder = Model(
+ name="santacoder",
+ base_provider="huggingface",
+ best_provider=Provider.Vercel,
+)
+
+command_medium_nightly = Model(
+ name="command-medium-nightly",
+ base_provider="cohere",
+ best_provider=Provider.Vercel,
+)
+
+command_xlarge_nightly = Model(
+ name="command-xlarge-nightly",
+ base_provider="cohere",
+ best_provider=Provider.Vercel,
+)
+
+code_cushman_001 = Model(
+ name="code-cushman-001",
+ base_provider="openai",
+ best_provider=Provider.Vercel,
+)
+
+code_davinci_002 = Model(
+ name="code-davinci-002",
+ base_provider="openai",
+ best_provider=Provider.Vercel,
+)
+
+text_ada_001 = Model(
+ name="text-ada-001",
+ base_provider="openai",
+ best_provider=Provider.Vercel,
+)
+
+text_babbage_001 = Model(
+ name="text-babbage-001",
+ base_provider="openai",
+ best_provider=Provider.Vercel,
+)
+
+text_curie_001 = Model(
+ name="text-curie-001",
+ base_provider="openai",
+ best_provider=Provider.Vercel,
+)
+
+text_davinci_002 = Model(
+ name="text-davinci-002",
+ base_provider="openai",
+ best_provider=Provider.Vercel,
+)
+
+text_davinci_003 = Model(
+ name="text-davinci-003",
+ base_provider="openai",
+ best_provider=Provider.Vercel,
+)
+
+palm = Model(
+ name="palm",
+ base_provider="google",
+ best_provider=Provider.Bard,
+)
+
+falcon_40b = Model(
+ name="falcon-40b",
+ base_provider="huggingface",
+ best_provider=Provider.H2o,
+)
+
+falcon_7b = Model(
+ name="falcon-7b",
+ base_provider="huggingface",
+ best_provider=Provider.H2o,
+)
+
+llama_13b = Model(
+ name="llama-13b",
+ base_provider="huggingface",
+ best_provider=Provider.H2o,
+)
+
+gpt_35_turbo_16k = Model(
+ name="gpt-3.5-turbo-16k",
+ base_provider="openai",
+ best_provider=Provider.EasyChat,
+)
+
+gpt_35_turbo_0613 = Model(
+ name="gpt-3.5-turbo-0613",
+ base_provider="openai",
+ best_provider=Provider.EasyChat,
+)
+
+gpt_35_turbo_16k_0613 = Model(
+ name="gpt-3.5-turbo-16k-0613",
+ base_provider="openai",
+ best_provider=Provider.EasyChat,
+)
+
+gpt_4_32k = Model(name="gpt-4-32k", base_provider="openai", best_provider=None)
+
+gpt_4_0613 = Model(name="gpt-4-0613", base_provider="openai", best_provider=None)
+
+
class ModelUtils:
- convert: dict = {
- 'gpt-3.5-turbo': Model.gpt_35_turbo,
- 'gpt-3.6-turbo-16k': Model.gpt_35_turbo_16k,
- 'gpt-3.5-turbo-0613': Model.gpt_35_turbo_0613,
- 'gpt-3.5-turbo-16k-0613': Model.gpt_35_turbo_16k_0613,
-
- 'gpt-4': Model.gpt_4,
- 'gpt-4-32k': Model.gpt_4_32k,
- 'gpt-4-0613': Model.gpt_4_0613,
-
- 'claude-instant-v1-100k': Model.claude_instant_v1_100k,
- 'claude-v1-100k': Model.claude_v1_100k,
- 'claude-instant-v1': Model.claude_instant_v1,
- 'claude-v1': Model.claude_v1,
-
- 'alpaca-7b': Model.alpaca_7b,
- 'stablelm-tuned-alpha-7b': Model.stablelm_tuned_alpha_7b,
-
- 'bloom': Model.bloom,
- 'bloomz': Model.bloomz,
-
- 'flan-t5-xxl': Model.flan_t5_xxl,
- 'flan-ul2': Model.flan_ul2,
-
- 'gpt-neox-20b': Model.gpt_neox_20b,
- 'oasst-sft-4-pythia-12b-epoch-3.5': Model.oasst_sft_4_pythia_12b_epoch_35,
- 'santacoder': Model.santacoder,
-
- 'command-medium-nightly': Model.command_medium_nightly,
- 'command-xlarge-nightly': Model.command_xlarge_nightly,
-
- 'code-cushman-001': Model.code_cushman_001,
- 'code-davinci-002': Model.code_davinci_002,
-
- 'text-ada-001': Model.text_ada_001,
- 'text-babbage-001': Model.text_babbage_001,
- 'text-curie-001': Model.text_curie_001,
- 'text-davinci-002': Model.text_davinci_002,
- 'text-davinci-003': Model.text_davinci_003,
-
- 'palm2': Model.palm,
- 'palm': Model.palm,
- 'google': Model.palm,
- 'google-bard': Model.palm,
- 'google-palm': Model.palm,
- 'bard': Model.palm,
-
- 'falcon-40b': Model.falcon_40b,
- 'falcon-7b': Model.falcon_7b,
- 'llama-13b': Model.llama_13b,
- } \ No newline at end of file
+ convert: dict[str, Model] = {
+ "gpt-3.5-turbo": gpt_35_turbo,
+ "gpt-3.5-turbo-16k": gpt_35_turbo_16k,
+ "gpt-3.5-turbo-0613": gpt_35_turbo_0613,
+ "gpt-3.5-turbo-16k-0613": gpt_35_turbo_16k_0613,
+ "gpt-4": gpt_4,
+ "gpt-4-32k": gpt_4_32k,
+ "gpt-4-0613": gpt_4_0613,
+ "claude-instant-v1-100k": claude_instant_v1_100k,
+ "claude-v1-100k": claude_v1_100k,
+ "claude-instant-v1": claude_instant_v1,
+ "claude-v1": claude_v1,
+ "alpaca-7b": alpaca_7b,
+ "stablelm-tuned-alpha-7b": stablelm_tuned_alpha_7b,
+ "bloom": bloom,
+ "bloomz": bloomz,
+ "flan-t5-xxl": flan_t5_xxl,
+ "flan-ul2": flan_ul2,
+ "gpt-neox-20b": gpt_neox_20b,
+ "oasst-sft-4-pythia-12b-epoch-3.5": oasst_sft_4_pythia_12b_epoch_35,
+ "santacoder": santacoder,
+ "command-medium-nightly": command_medium_nightly,
+ "command-xlarge-nightly": command_xlarge_nightly,
+ "code-cushman-001": code_cushman_001,
+ "code-davinci-002": code_davinci_002,
+ "text-ada-001": text_ada_001,
+ "text-babbage-001": text_babbage_001,
+ "text-curie-001": text_curie_001,
+ "text-davinci-002": text_davinci_002,
+ "text-davinci-003": text_davinci_003,
+ "palm2": palm,
+ "palm": palm,
+ "google": palm,
+ "google-bard": palm,
+ "google-palm": palm,
+ "bard": palm,
+ "falcon-40b": falcon_40b,
+ "falcon-7b": falcon_7b,
+ "llama-13b": llama_13b,
+ }