From 3e7bee6741dc8b6ee8013a4aec3606fc315976b9 Mon Sep 17 00:00:00 2001 From: kqlio67 Date: Tue, 22 Oct 2024 13:25:34 +0300 Subject: Updated (g4f/models.py) --- g4f/Provider/nexra/NexraMidjourney.py | 72 +++++++++++++++++------------------ g4f/models.py | 12 ++++++ 2 files changed, 46 insertions(+), 38 deletions(-) diff --git a/g4f/Provider/nexra/NexraMidjourney.py b/g4f/Provider/nexra/NexraMidjourney.py index e43cb164..2eb57e29 100644 --- a/g4f/Provider/nexra/NexraMidjourney.py +++ b/g4f/Provider/nexra/NexraMidjourney.py @@ -1,66 +1,62 @@ from __future__ import annotations -from aiohttp import ClientSession import json - -from ...typing import AsyncResult, Messages -from ..base_provider import AsyncGeneratorProvider, ProviderModelMixin +import requests +from ...typing import CreateResult, Messages +from ..base_provider import ProviderModelMixin, AbstractProvider from ...image import ImageResponse - -class NexraMidjourney(AsyncGeneratorProvider, ProviderModelMixin): +class NexraMidjourney(AbstractProvider, ProviderModelMixin): label = "Nexra Midjourney" url = "https://nexra.aryahcr.cc/documentation/midjourney/en" api_endpoint = "https://nexra.aryahcr.cc/api/image/complements" - working = False - - default_model = 'midjourney' + working = True + + default_model = "midjourney" models = [default_model] @classmethod def get_model(cls, model: str) -> str: return cls.default_model - + @classmethod - async def create_async_generator( + def create_completion( cls, model: str, messages: Messages, - proxy: str = None, response: str = "url", # base64 or url **kwargs - ) -> AsyncResult: - # Retrieve the correct model to use + ) -> CreateResult: model = cls.get_model(model) - # Format the prompt from the messages - prompt = messages[0]['content'] - headers = { - "Content-Type": "application/json" + 'Content-Type': 'application/json' } - payload = { - "prompt": prompt, + + data = { + "prompt": messages[-1]["content"], "model": model, "response": response } + + response = requests.post(cls.api_endpoint, headers=headers, json=data) - async with ClientSession(headers=headers) as session: - async with session.post(cls.api_endpoint, json=payload, proxy=proxy) as response: - response.raise_for_status() - text_data = await response.text() + result = cls.process_response(response) + yield result - try: - # Parse the JSON response - json_start = text_data.find('{') - json_data = text_data[json_start:] - data = json.loads(json_data) - - # Check if the response contains images - if 'images' in data and len(data['images']) > 0: - image_url = data['images'][0] - yield ImageResponse(image_url, prompt) - else: - yield ImageResponse("No images found in the response.", prompt) - except json.JSONDecodeError: - yield ImageResponse("Failed to parse JSON. Response might not be in JSON format.", prompt) + @classmethod + def process_response(cls, response): + if response.status_code == 200: + try: + content = response.text.strip() + content = content.lstrip('_') + data = json.loads(content) + if data.get('status') and data.get('images'): + image_url = data['images'][0] + return ImageResponse(images=[image_url], alt="Generated Image") + else: + return "Error: No image URL found in the response" + except json.JSONDecodeError as e: + return f"Error: Unable to decode JSON response. Details: {str(e)}" + else: + return f"Error: {response.status_code}, Response: {response.text}" diff --git a/g4f/models.py b/g4f/models.py index 8aece1ec..6fa2fca1 100644 --- a/g4f/models.py +++ b/g4f/models.py @@ -50,6 +50,7 @@ from .Provider import ( NexraEmi, NexraFluxPro, NexraGeminiPro, + NexraMidjourney, NexraQwen, OpenaiChat, PerplexityLabs, @@ -835,6 +836,14 @@ dalle = Model( ) +### Midjourney ### +midjourney = Model( + name = 'midjourney', + base_provider = 'Midjourney', + best_provider = NexraMidjourney + +) + ### Other ### emi = Model( name = 'emi', @@ -1109,6 +1118,9 @@ class ModelUtils: 'dalle': dalle, 'dalle-2': dalle_2, +### Midjourney ### +'midjourney': midjourney, + ### Other ### 'emi': emi, -- cgit v1.2.3