diff options
author | Luneye <73485421+Luneye@users.noreply.github.com> | 2023-08-28 16:55:36 +0200 |
---|---|---|
committer | GitHub <noreply@github.com> | 2023-08-28 16:55:36 +0200 |
commit | 01294db6995511de37e9078e03ce32e54dbdad52 (patch) | |
tree | d6c4c14f4e6a3a81660ddb75272bc5da81cacecc /g4f/Provider/Opchatgpts.py | |
parent | Update Bing.py (diff) | |
parent | ~ | code styling (diff) | |
download | gpt4free-01294db6995511de37e9078e03ce32e54dbdad52.tar gpt4free-01294db6995511de37e9078e03ce32e54dbdad52.tar.gz gpt4free-01294db6995511de37e9078e03ce32e54dbdad52.tar.bz2 gpt4free-01294db6995511de37e9078e03ce32e54dbdad52.tar.lz gpt4free-01294db6995511de37e9078e03ce32e54dbdad52.tar.xz gpt4free-01294db6995511de37e9078e03ce32e54dbdad52.tar.zst gpt4free-01294db6995511de37e9078e03ce32e54dbdad52.zip |
Diffstat (limited to 'g4f/Provider/Opchatgpts.py')
-rw-r--r-- | g4f/Provider/Opchatgpts.py | 70 |
1 files changed, 33 insertions, 37 deletions
diff --git a/g4f/Provider/Opchatgpts.py b/g4f/Provider/Opchatgpts.py index 9daa0ed9..a47b0099 100644 --- a/g4f/Provider/Opchatgpts.py +++ b/g4f/Provider/Opchatgpts.py @@ -1,37 +1,34 @@ import requests -from ..typing import Any, CreateResult +from ..typing import Any, CreateResult from .base_provider import BaseProvider class Opchatgpts(BaseProvider): - url = "https://opchatgpts.net" - working = True + url = "https://opchatgpts.net" + working = True supports_gpt_35_turbo = True @staticmethod def create_completion( model: str, messages: list[dict[str, str]], - stream: bool, - **kwargs: Any, - ) -> CreateResult: - temperature = kwargs.get("temperature", 0.8) - max_tokens = kwargs.get("max_tokens", 1024) + stream: bool, **kwargs: Any) -> CreateResult: + + temperature = kwargs.get("temperature", 0.8) + max_tokens = kwargs.get("max_tokens", 1024) system_prompt = kwargs.get( "system_prompt", - "Converse as if you were an AI assistant. Be friendly, creative.", - ) + "Converse as if you were an AI assistant. Be friendly, creative.") + payload = _create_payload( - messages=messages, - temperature=temperature, - max_tokens=max_tokens, - system_prompt=system_prompt, - ) + messages = messages, + temperature = temperature, + max_tokens = max_tokens, + system_prompt = system_prompt) - response = requests.post( - "https://opchatgpts.net/wp-json/ai-chatbot/v1/chat", json=payload - ) + response = requests.post("https://opchatgpts.net/wp-json/ai-chatbot/v1/chat", json=payload) + response.raise_for_status() yield response.json()["reply"] @@ -39,24 +36,23 @@ class Opchatgpts(BaseProvider): def _create_payload( messages: list[dict[str, str]], temperature: float, - max_tokens: int, - system_prompt: str, -): + max_tokens: int, system_prompt: str) -> dict: + return { - "env": "chatbot", - "session": "N/A", - "prompt": "\n", - "context": system_prompt, - "messages": messages, - "newMessage": messages[::-1][0]["content"], - "userName": '<div class="mwai-name-text">User:</div>', - "aiName": '<div class="mwai-name-text">AI:</div>', - "model": "gpt-3.5-turbo", - "temperature": temperature, - "maxTokens": max_tokens, - "maxResults": 1, - "apiKey": "", - "service": "openai", - "embeddingsIndex": "", - "stop": "", + "env" : "chatbot", + "session" : "N/A", + "prompt" : "\n", + "context" : system_prompt, + "messages" : messages, + "newMessage" : messages[::-1][0]["content"], + "userName" : '<div class="mwai-name-text">User:</div>', + "aiName" : '<div class="mwai-name-text">AI:</div>', + "model" : "gpt-3.5-turbo", + "temperature" : temperature, + "maxTokens" : max_tokens, + "maxResults" : 1, + "apiKey" : "", + "service" : "openai", + "embeddingsIndex" : "", + "stop" : "", } |