From bda2d679275ebd6bc2a3aa84f8fbc93967bead63 Mon Sep 17 00:00:00 2001 From: zukixa <56563509+zukixa@users.noreply.github.com> Date: Wed, 28 Aug 2024 23:03:32 -0700 Subject: fix for 500 Internal Server Error #2199 [Request] Blackbox provider now support Gemini and LLaMa 3.1 models #2198 with some stuff from #2196 --- g4f/Provider/Blackbox.py | 20 +++++++++++++++++--- 1 file changed, 17 insertions(+), 3 deletions(-) (limited to 'g4f/Provider/Blackbox.py') diff --git a/g4f/Provider/Blackbox.py b/g4f/Provider/Blackbox.py index 3ba7abf4..fd84875e 100644 --- a/g4f/Provider/Blackbox.py +++ b/g4f/Provider/Blackbox.py @@ -14,7 +14,13 @@ class Blackbox(AsyncGeneratorProvider, ProviderModelMixin): url = "https://www.blackbox.ai" working = True default_model = 'blackbox' - + models = [ + default_model, + "gemini-1.5-flash", + "llama-3.1-8b", + 'llama-3.1-70b', + 'llama-3.1-405b', + ] @classmethod async def create_async_generator( cls, @@ -28,7 +34,8 @@ class Blackbox(AsyncGeneratorProvider, ProviderModelMixin): if image is not None: messages[-1]["data"] = { "fileText": image_name, - "imageBase64": to_data_uri(image) + "imageBase64": to_data_uri(image), + "title": str(uuid.uuid4()) } headers = { @@ -48,7 +55,13 @@ class Blackbox(AsyncGeneratorProvider, ProviderModelMixin): async with ClientSession(headers=headers) as session: random_id = secrets.token_hex(16) random_user_id = str(uuid.uuid4()) - + model_id_map = { + "blackbox": {}, + "gemini-1.5-flash": {'mode': True, 'id': 'Gemini'}, + "llama-3.1-8b": {'mode': True, 'id': "llama-3.1-8b"}, + 'llama-3.1-70b': {'mode': True, 'id': "llama-3.1-70b"}, + 'llama-3.1-405b': {'mode': True, 'id': "llama-3.1-405b"} + } data = { "messages": messages, "id": random_id, @@ -62,6 +75,7 @@ class Blackbox(AsyncGeneratorProvider, ProviderModelMixin): "webSearchMode": False, "userSystemPrompt": "", "githubToken": None, + "trendingAgentModel": model_id_map[model], # if you actually test this on the site, just ask each model "yo", weird behavior imo "maxTokens": None } -- cgit v1.2.3