diff options
author | H Lohaus <hlohaus@users.noreply.github.com> | 2024-03-15 13:44:15 +0100 |
---|---|---|
committer | GitHub <noreply@github.com> | 2024-03-15 13:44:15 +0100 |
commit | 7df7971d202af6e88010f17e83740d65386699ca (patch) | |
tree | da8ffc82319214511fb34e404b5b4851c1fef02d /g4f | |
parent | Error checking for Vercel (#1704) (diff) | |
parent | GigaChat models add (diff) | |
download | gpt4free-7df7971d202af6e88010f17e83740d65386699ca.tar gpt4free-7df7971d202af6e88010f17e83740d65386699ca.tar.gz gpt4free-7df7971d202af6e88010f17e83740d65386699ca.tar.bz2 gpt4free-7df7971d202af6e88010f17e83740d65386699ca.tar.lz gpt4free-7df7971d202af6e88010f17e83740d65386699ca.tar.xz gpt4free-7df7971d202af6e88010f17e83740d65386699ca.tar.zst gpt4free-7df7971d202af6e88010f17e83740d65386699ca.zip |
Diffstat (limited to '')
-rw-r--r-- | g4f/Provider/GigaChat.py | 97 | ||||
-rw-r--r-- | g4f/Provider/__init__.py | 1 | ||||
-rw-r--r-- | g4f/Provider/gigachat_crt/russian_trusted_root_ca_pem.crt | 33 | ||||
-rw-r--r-- | g4f/models.py | 28 |
4 files changed, 157 insertions, 2 deletions
diff --git a/g4f/Provider/GigaChat.py b/g4f/Provider/GigaChat.py new file mode 100644 index 00000000..699353b1 --- /dev/null +++ b/g4f/Provider/GigaChat.py @@ -0,0 +1,97 @@ +from __future__ import annotations + +import base64 +import os +import ssl +import time +import uuid + +import json +from aiohttp import ClientSession, BaseConnector, TCPConnector +from g4f.requests import raise_for_status + +from ..typing import AsyncResult, Messages, ImageType +from .base_provider import AsyncGeneratorProvider, ProviderModelMixin +from ..image import to_bytes, is_accepted_format +from ..errors import MissingAuthError +from .helper import get_connector + +access_token = '' +token_expires_at = 0 + +ssl_ctx = ssl.create_default_context( + cafile=os.path.dirname(__file__) + '/gigachat_crt/russian_trusted_root_ca_pem.crt') + + +class GigaChat(AsyncGeneratorProvider, ProviderModelMixin): + url = "https://developers.sber.ru/gigachat" + working = True + supports_message_history = True + supports_system_message = True + supports_stream = True + + needs_auth = True + default_model = "GigaChat:latest" + models = ["GigaChat:latest", "GigaChat-Plus", "GigaChat-Pro"] + + @classmethod + async def create_async_generator( + cls, + model: str, + messages: Messages, + stream: bool = True, + proxy: str = None, + api_key: str = None, + scope: str = "GIGACHAT_API_PERS", + update_interval: float = 0, + **kwargs + ) -> AsyncResult: + global access_token, token_expires_at + model = cls.get_model(model) + + if not api_key: + raise MissingAuthError('Missing "api_key"') + + connector = TCPConnector(ssl_context=ssl_ctx) + + async with ClientSession(connector=get_connector(connector, proxy)) as session: + if token_expires_at - int(time.time() * 1000) < 60000: + async with session.post(url="https://ngw.devices.sberbank.ru:9443/api/v2/oauth", + headers={"Authorization": f"Bearer {api_key}", + "RqUID": str(uuid.uuid4()), + "Content-Type": "application/x-www-form-urlencoded"}, + data={"scope": scope}) as response: + await raise_for_status(response) + data = await response.json() + access_token = data['access_token'] + token_expires_at = data['expires_at'] + + async with session.post(url="https://gigachat.devices.sberbank.ru/api/v1/chat/completions", + headers={"Authorization": f"Bearer {access_token}"}, + json={ + "model": model, + "messages": messages, + "stream": stream, + "update_interval": update_interval, + **kwargs + }) as response: + await raise_for_status(response) + + async for line in response.content: + if not stream: + yield json.loads(line.decode("utf-8"))['choices'][0]['message']['content'] + return + + if line and line.startswith(b"data:"): + line = line[6:-1] # remove "data: " prefix and "\n" suffix + if line.strip() == b"[DONE]": + return + else: + msg = json.loads(line.decode("utf-8"))['choices'][0] + content = msg['delta']['content'] + + if content: + yield content + + if 'finish_reason' in msg: + return diff --git a/g4f/Provider/__init__.py b/g4f/Provider/__init__.py index 8db3c0d4..50a5da31 100644 --- a/g4f/Provider/__init__.py +++ b/g4f/Provider/__init__.py @@ -24,6 +24,7 @@ from .DeepInfra import DeepInfra from .FlowGpt import FlowGpt from .FreeChatgpt import FreeChatgpt from .FreeGpt import FreeGpt +from .GigaChat import GigaChat from .GeminiPro import GeminiPro from .GeminiProChat import GeminiProChat from .GptTalkRu import GptTalkRu diff --git a/g4f/Provider/gigachat_crt/russian_trusted_root_ca_pem.crt b/g4f/Provider/gigachat_crt/russian_trusted_root_ca_pem.crt new file mode 100644 index 00000000..4c143a21 --- /dev/null +++ b/g4f/Provider/gigachat_crt/russian_trusted_root_ca_pem.crt @@ -0,0 +1,33 @@ +-----BEGIN CERTIFICATE----- +MIIFwjCCA6qgAwIBAgICEAAwDQYJKoZIhvcNAQELBQAwcDELMAkGA1UEBhMCUlUx +PzA9BgNVBAoMNlRoZSBNaW5pc3RyeSBvZiBEaWdpdGFsIERldmVsb3BtZW50IGFu +ZCBDb21tdW5pY2F0aW9uczEgMB4GA1UEAwwXUnVzc2lhbiBUcnVzdGVkIFJvb3Qg +Q0EwHhcNMjIwMzAxMjEwNDE1WhcNMzIwMjI3MjEwNDE1WjBwMQswCQYDVQQGEwJS +VTE/MD0GA1UECgw2VGhlIE1pbmlzdHJ5IG9mIERpZ2l0YWwgRGV2ZWxvcG1lbnQg +YW5kIENvbW11bmljYXRpb25zMSAwHgYDVQQDDBdSdXNzaWFuIFRydXN0ZWQgUm9v +dCBDQTCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBAMfFOZ8pUAL3+r2n +qqE0Zp52selXsKGFYoG0GM5bwz1bSFtCt+AZQMhkWQheI3poZAToYJu69pHLKS6Q +XBiwBC1cvzYmUYKMYZC7jE5YhEU2bSL0mX7NaMxMDmH2/NwuOVRj8OImVa5s1F4U +zn4Kv3PFlDBjjSjXKVY9kmjUBsXQrIHeaqmUIsPIlNWUnimXS0I0abExqkbdrXbX +YwCOXhOO2pDUx3ckmJlCMUGacUTnylyQW2VsJIyIGA8V0xzdaeUXg0VZ6ZmNUr5Y +Ber/EAOLPb8NYpsAhJe2mXjMB/J9HNsoFMBFJ0lLOT/+dQvjbdRZoOT8eqJpWnVD +U+QL/qEZnz57N88OWM3rabJkRNdU/Z7x5SFIM9FrqtN8xewsiBWBI0K6XFuOBOTD +4V08o4TzJ8+Ccq5XlCUW2L48pZNCYuBDfBh7FxkB7qDgGDiaftEkZZfApRg2E+M9 +G8wkNKTPLDc4wH0FDTijhgxR3Y4PiS1HL2Zhw7bD3CbslmEGgfnnZojNkJtcLeBH +BLa52/dSwNU4WWLubaYSiAmA9IUMX1/RpfpxOxd4Ykmhz97oFbUaDJFipIggx5sX +ePAlkTdWnv+RWBxlJwMQ25oEHmRguNYf4Zr/Rxr9cS93Y+mdXIZaBEE0KS2iLRqa +OiWBki9IMQU4phqPOBAaG7A+eP8PAgMBAAGjZjBkMB0GA1UdDgQWBBTh0YHlzlpf +BKrS6badZrHF+qwshzAfBgNVHSMEGDAWgBTh0YHlzlpfBKrS6badZrHF+qwshzAS +BgNVHRMBAf8ECDAGAQH/AgEEMA4GA1UdDwEB/wQEAwIBhjANBgkqhkiG9w0BAQsF +AAOCAgEAALIY1wkilt/urfEVM5vKzr6utOeDWCUczmWX/RX4ljpRdgF+5fAIS4vH +tmXkqpSCOVeWUrJV9QvZn6L227ZwuE15cWi8DCDal3Ue90WgAJJZMfTshN4OI8cq +W9E4EG9wglbEtMnObHlms8F3CHmrw3k6KmUkWGoa+/ENmcVl68u/cMRl1JbW2bM+ +/3A+SAg2c6iPDlehczKx2oa95QW0SkPPWGuNA/CE8CpyANIhu9XFrj3RQ3EqeRcS +AQQod1RNuHpfETLU/A2gMmvn/w/sx7TB3W5BPs6rprOA37tutPq9u6FTZOcG1Oqj +C/B7yTqgI7rbyvox7DEXoX7rIiEqyNNUguTk/u3SZ4VXE2kmxdmSh3TQvybfbnXV +4JbCZVaqiZraqc7oZMnRoWrXRG3ztbnbes/9qhRGI7PqXqeKJBztxRTEVj8ONs1d +WN5szTwaPIvhkhO3CO5ErU2rVdUr89wKpNXbBODFKRtgxUT70YpmJ46VVaqdAhOZ +D9EUUn4YaeLaS8AjSF/h7UkjOibNc4qVDiPP+rkehFWM66PVnP1Msh93tc+taIfC +EYVMxjh8zNbFuoc7fzvvrFILLe7ifvEIUqSVIC/AzplM/Jxw7buXFeGP1qVCBEHq +391d/9RAfaZ12zkwFsl+IKwE/OZxW8AHa9i1p4GO0YSNuczzEm4= +-----END CERTIFICATE-----
\ No newline at end of file diff --git a/g4f/models.py b/g4f/models.py index ae2ef54b..6fcfdd14 100644 --- a/g4f/models.py +++ b/g4f/models.py @@ -2,7 +2,7 @@ from __future__ import annotations from dataclasses import dataclass -from .Provider import RetryProvider, ProviderType +from .Provider import RetryProvider, ProviderType from .Provider import ( Chatgpt4Online, PerplexityLabs, @@ -15,6 +15,7 @@ from .Provider import ( ChatgptAi, DeepInfra, ChatBase, + GigaChat, Liaobots, FreeGpt, Llama2, @@ -95,6 +96,24 @@ gpt_4_turbo = Model( best_provider = Bing ) +gigachat = Model( + name = 'GigaChat:latest', + base_provider = 'gigachat', + best_provider = GigaChat +) + +gigachat_plus = Model( + name = 'GigaChat-Plus', + base_provider = 'gigachat', + best_provider = GigaChat +) + +gigachat_pro = Model( + name = 'GigaChat-Pro', + base_provider = 'gigachat', + best_provider = GigaChat +) + llama2_7b = Model( name = "meta-llama/Llama-2-7b-chat-hf", base_provider = 'meta', @@ -271,7 +290,12 @@ class ModelUtils: 'llama2-70b': llama2_70b, 'codellama-34b-instruct': codellama_34b_instruct, 'codellama-70b-instruct': codellama_70b_instruct, - + + # GigaChat + 'gigachat' : gigachat, + 'gigachat_plus': gigachat_plus, + 'gigachat_pro' : gigachat_pro, + 'mixtral-8x7b': mixtral_8x7b, 'mistral-7b': mistral_7b, 'dolphin-mixtral-8x7b': dolphin_mixtral_8x7b, |