diff options
author | kqlio67 <kqlio67@users.noreply.github.com> | 2024-10-11 08:33:30 +0200 |
---|---|---|
committer | kqlio67 <kqlio67@users.noreply.github.com> | 2024-10-11 08:33:30 +0200 |
commit | a9bc67362f2be529fe9165ebb13347195ba1ddcf (patch) | |
tree | 1a91836eaa94f14c18ad5d55f687ff8a2118c357 /g4f/Provider/nexra/NexraLlama.py | |
parent | feat(g4f/Provider/__init__.py): add new providers and update imports (diff) | |
download | gpt4free-a9bc67362f2be529fe9165ebb13347195ba1ddcf.tar gpt4free-a9bc67362f2be529fe9165ebb13347195ba1ddcf.tar.gz gpt4free-a9bc67362f2be529fe9165ebb13347195ba1ddcf.tar.bz2 gpt4free-a9bc67362f2be529fe9165ebb13347195ba1ddcf.tar.lz gpt4free-a9bc67362f2be529fe9165ebb13347195ba1ddcf.tar.xz gpt4free-a9bc67362f2be529fe9165ebb13347195ba1ddcf.tar.zst gpt4free-a9bc67362f2be529fe9165ebb13347195ba1ddcf.zip |
Diffstat (limited to 'g4f/Provider/nexra/NexraLlama.py')
-rw-r--r-- | g4f/Provider/nexra/NexraLlama.py | 52 |
1 files changed, 0 insertions, 52 deletions
diff --git a/g4f/Provider/nexra/NexraLlama.py b/g4f/Provider/nexra/NexraLlama.py deleted file mode 100644 index 9ed892e8..00000000 --- a/g4f/Provider/nexra/NexraLlama.py +++ /dev/null @@ -1,52 +0,0 @@ -from __future__ import annotations - -import json -from aiohttp import ClientSession - -from ...typing import AsyncResult, Messages -from ..base_provider import AsyncGeneratorProvider, ProviderModelMixin -from ..helper import format_prompt - - -class NexraLlama(AsyncGeneratorProvider, ProviderModelMixin): - label = "Nexra LLaMA 3.1" - api_endpoint = "https://nexra.aryahcr.cc/api/chat/complements" - models = ['llama-3.1'] - - @classmethod - async def create_async_generator( - cls, - model: str, - messages: Messages, - proxy: str = None, - **kwargs - ) -> AsyncResult: - headers = { - "Content-Type": "application/json" - } - async with ClientSession(headers=headers) as session: - data = { - "messages": [ - {'role': 'assistant', 'content': ''}, - {'role': 'user', 'content': format_prompt(messages)} - ], - "markdown": False, - "stream": True, - "model": model - } - async with session.post(cls.api_endpoint, json=data, proxy=proxy) as response: - response.raise_for_status() - full_response = '' - async for line in response.content: - if line: - messages = line.decode('utf-8').split('\x1e') - for message_str in messages: - try: - message = json.loads(message_str) - if message.get('message'): - full_response = message['message'] - if message.get('finish'): - yield full_response.strip() - return - except json.JSONDecodeError: - pass |