summaryrefslogtreecommitdiffstats
path: root/g4f/Provider/airforce
diff options
context:
space:
mode:
Diffstat (limited to 'g4f/Provider/airforce')
-rw-r--r--g4f/Provider/airforce/AirforceChat.py10
-rw-r--r--g4f/Provider/airforce/AirforceImage.py55
2 files changed, 29 insertions, 36 deletions
diff --git a/g4f/Provider/airforce/AirforceChat.py b/g4f/Provider/airforce/AirforceChat.py
index b4b1eca3..8affbe5c 100644
--- a/g4f/Provider/airforce/AirforceChat.py
+++ b/g4f/Provider/airforce/AirforceChat.py
@@ -46,7 +46,7 @@ def split_messages(messages: Messages, chunk_size: int = 995) -> Messages:
class AirforceChat(AsyncGeneratorProvider, ProviderModelMixin):
label = "AirForce Chat"
- api_endpoint_completions = "https://api.airforce/chat/completions" # Замініть на реальний ендпоінт
+ api_endpoint = "https://api.airforce/chat/completions"
supports_stream = True
supports_system_message = True
supports_message_history = True
@@ -118,8 +118,8 @@ class AirforceChat(AsyncGeneratorProvider, ProviderModelMixin):
'openchat-3.5-0106',
# qwen
- #'Qwen1.5-72B-Chat', Пуста відповідь
- #'Qwen1.5-110B-Chat', Пуста відповідь
+ #'Qwen1.5-72B-Chat', # Empty answer
+ #'Qwen1.5-110B-Chat', # Empty answer
'Qwen2-72B-Instruct',
'Qwen2.5-7B-Instruct-Turbo',
'Qwen2.5-72B-Instruct-Turbo',
@@ -350,7 +350,7 @@ class AirforceChat(AsyncGeneratorProvider, ProviderModelMixin):
}
async with ClientSession(headers=headers) as session:
- async with session.post(cls.api_endpoint_completions, json=data, proxy=proxy) as response:
+ async with session.post(cls.api_endpoint, json=data, proxy=proxy) as response:
response.raise_for_status()
text = ""
if stream:
@@ -362,7 +362,7 @@ class AirforceChat(AsyncGeneratorProvider, ProviderModelMixin):
chunk = json.loads(json_str)
if 'choices' in chunk and chunk['choices']:
content = chunk['choices'][0].get('delta', {}).get('content', '')
- text += content # Збираємо дельти
+ text += content
except json.JSONDecodeError as e:
print(f"Error decoding JSON: {json_str}, Error: {e}")
elif line.strip() == "[DONE]":
diff --git a/g4f/Provider/airforce/AirforceImage.py b/g4f/Provider/airforce/AirforceImage.py
index 010d1a94..443c0f6b 100644
--- a/g4f/Provider/airforce/AirforceImage.py
+++ b/g4f/Provider/airforce/AirforceImage.py
@@ -1,46 +1,47 @@
from __future__ import annotations
from aiohttp import ClientSession
+from urllib.parse import urlencode
import random
from ...typing import AsyncResult, Messages
-from ..base_provider import AsyncGeneratorProvider, ProviderModelMixin
from ...image import ImageResponse
+from ..base_provider import AsyncGeneratorProvider, ProviderModelMixin
class AirforceImage(AsyncGeneratorProvider, ProviderModelMixin):
label = "Airforce Image"
#url = "https://api.airforce"
- api_endpoint_imagine2 = "https://api.airforce/imagine2"
+ api_endpoint = "https://api.airforce/imagine2"
#working = True
default_model = 'flux'
image_models = [
'flux',
- 'flux-realism',
+ 'flux-realism',
'flux-anime',
'flux-3d',
'flux-disney',
'flux-pixel',
'flux-4o',
'any-dark',
+ 'any-uncensored',
'stable-diffusion-xl-base',
'stable-diffusion-xl-lightning',
+ 'Flux-1.1-Pro',
]
models = [*image_models]
model_aliases = {
"sdxl": "stable-diffusion-xl-base",
- "sdxl": "stable-diffusion-xl-lightning",
+ "sdxl": "stable-diffusion-xl-lightning",
+ "flux-pro": "Flux-1.1-Pro",
}
-
-
+
@classmethod
def get_model(cls, model: str) -> str:
if model in cls.models:
return model
- elif model in cls.model_aliases:
- return cls.model_aliases[model]
else:
return cls.default_model
@@ -49,49 +50,41 @@ class AirforceImage(AsyncGeneratorProvider, ProviderModelMixin):
cls,
model: str,
messages: Messages,
- size: str = '1:1',
+ size: str = '1:1', # "1:1", "16:9", "9:16", "21:9", "9:21", "1:2", "2:1"
proxy: str = None,
**kwargs
) -> AsyncResult:
model = cls.get_model(model)
headers = {
- 'accept': '*/*',
+ 'accept': 'image/avif,image/webp,image/apng,image/svg+xml,image/*,*/*;q=0.8',
'accept-language': 'en-US,en;q=0.9',
- 'authorization': 'Bearer missing api key',
'cache-control': 'no-cache',
- 'origin': 'https://llmplayground.net',
+ 'dnt': '1',
'pragma': 'no-cache',
'priority': 'u=1, i',
'referer': 'https://llmplayground.net/',
'sec-ch-ua': '"Not?A_Brand";v="99", "Chromium";v="130"',
'sec-ch-ua-mobile': '?0',
'sec-ch-ua-platform': '"Linux"',
- 'sec-fetch-dest': 'empty',
- 'sec-fetch-mode': 'cors',
+ 'sec-fetch-dest': 'image',
+ 'sec-fetch-mode': 'no-cors',
'sec-fetch-site': 'cross-site',
'user-agent': 'Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/130.0.0.0 Safari/537.36'
}
-
+
async with ClientSession(headers=headers) as session:
- prompt = messages[-1]['content']
- seed = random.randint(0, 4294967295)
+ seed = random.randint(0, 58463)
params = {
'model': model,
- 'prompt': prompt,
+ 'prompt': messages[-1]["content"],
'size': size,
- 'seed': str(seed)
+ 'seed': seed
}
- async with session.get(cls.api_endpoint_imagine2, params=params, proxy=proxy) as response:
- response.raise_for_status()
- if response.status == 200:
- content_type = response.headers.get('Content-Type', '')
- if 'image' in content_type:
- image_url = str(response.url)
- yield ImageResponse(image_url, alt="Airforce generated image")
- else:
- content = await response.text()
- yield f"Unexpected content type: {content_type}\nResponse content: {content}"
+ full_url = f"{cls.api_endpoint}?{urlencode(params)}"
+
+ async with session.get(full_url, headers=headers, proxy=proxy) as response:
+ if response.status == 200 and response.headers.get('content-type', '').startswith('image'):
+ yield ImageResponse(images=[full_url], alt="Generated Image")
else:
- error_content = await response.text()
- yield f"Error: {error_content}"
+ raise Exception(f"Error: status {response.status}, content type {response.headers.get('content-type')}")