From bda2d679275ebd6bc2a3aa84f8fbc93967bead63 Mon Sep 17 00:00:00 2001 From: zukixa <56563509+zukixa@users.noreply.github.com> Date: Wed, 28 Aug 2024 23:03:32 -0700 Subject: fix for 500 Internal Server Error #2199 [Request] Blackbox provider now support Gemini and LLaMa 3.1 models #2198 with some stuff from #2196 --- g4f/Provider/MagickPenChat.py | 7 +++---- 1 file changed, 3 insertions(+), 4 deletions(-) (limited to 'g4f/Provider/MagickPenChat.py') diff --git a/g4f/Provider/MagickPenChat.py b/g4f/Provider/MagickPenChat.py index 40ef32c1..ade85c4b 100644 --- a/g4f/Provider/MagickPenChat.py +++ b/g4f/Provider/MagickPenChat.py @@ -8,12 +8,11 @@ from .helper import format_prompt class MagickPenChat(AsyncGeneratorProvider, ProviderModelMixin): - url = "https://api.magickpen.com" - api_endpoint = "/chat/free" + url = "https://magickpen.com/chat" + api_endpoint = "https://api.magickpen.com/chat/free" working = True supports_gpt_4 = True default_model = "gpt-4o-mini" - @classmethod async def create_async_generator( cls, @@ -44,7 +43,7 @@ class MagickPenChat(AsyncGeneratorProvider, ProviderModelMixin): data = { "history": [{"role": "user", "content": format_prompt(messages)}] } - async with session.post(f"{cls.url}{cls.api_endpoint}", json=data, proxy=proxy) as response: + async with session.post(f"{cls.api_endpoint}", json=data, proxy=proxy) as response: response.raise_for_status() async for chunk in response.content: if chunk: -- cgit v1.2.3