diff options
author | xtekky <98614666+xtekky@users.noreply.github.com> | 2023-08-07 13:21:30 +0200 |
---|---|---|
committer | GitHub <noreply@github.com> | 2023-08-07 13:21:30 +0200 |
commit | 388a54357e020a8edd8d87d5efb7fa524122fd8c (patch) | |
tree | e90e4f0d5fbf4f0f877e1ac87be8f7a51d8f4179 | |
parent | Merge pull request #777 from nonk123/close-inactive-issues-automatically (diff) | |
parent | Update README.md (diff) | |
download | gpt4free-388a54357e020a8edd8d87d5efb7fa524122fd8c.tar gpt4free-388a54357e020a8edd8d87d5efb7fa524122fd8c.tar.gz gpt4free-388a54357e020a8edd8d87d5efb7fa524122fd8c.tar.bz2 gpt4free-388a54357e020a8edd8d87d5efb7fa524122fd8c.tar.lz gpt4free-388a54357e020a8edd8d87d5efb7fa524122fd8c.tar.xz gpt4free-388a54357e020a8edd8d87d5efb7fa524122fd8c.tar.zst gpt4free-388a54357e020a8edd8d87d5efb7fa524122fd8c.zip |
Diffstat (limited to '')
-rw-r--r-- | README.md | 1 | ||||
-rw-r--r-- | g4f/Provider/Providers/opchatgpts.py | 42 | ||||
-rw-r--r-- | g4f/Provider/__init__.py | 1 |
3 files changed, 44 insertions, 0 deletions
@@ -187,6 +187,7 @@ for token in chat_completion: | [b.ai-huan.xyz](https://b.ai-huan.xyz) | `g4f.Provider.BingHuan` | ✔️ | ✔️ | ✔️ | ![Inactive](https://img.shields.io/badge/Inactive-red) | ❌ | | [wewordle.org](https://wewordle.org/gptapi/v1/android/turbo) | `g4f.Provider.Wewordle` | ✔️ | ❌ | ❌ | ![Inactive](https://img.shields.io/badge/Inactive-red) | ❌ | | [chatgpt.ai](https://chatgpt.ai/gpt-4/) | `g4f.Provider.ChatgptAi` | ❌ | ✔️ | ❌ | ![Active](https://img.shields.io/badge/Active-brightgreen) | ❌ | +| [opchatgpts.net](https://opchatgpts.net) | `g4f.Provider.opchatgpts` | ✔️ | ❌ | ❌ | ![Active](https://img.shields.io/badge/Active-brightgreen) | ❌ | ### Other Models diff --git a/g4f/Provider/Providers/opchatgpts.py b/g4f/Provider/Providers/opchatgpts.py new file mode 100644 index 00000000..0ff652fb --- /dev/null +++ b/g4f/Provider/Providers/opchatgpts.py @@ -0,0 +1,42 @@ +import os +import requests +from ...typing import sha256, Dict, get_type_hints + +url = 'https://opchatgpts.net' +model = ['gpt-3.5-turbo'] +supports_stream = False +needs_auth = False +working = True + +def _create_completion(model: str, messages: list, stream: bool = False, temperature: float = 0.8, max_tokens: int = 1024, system_prompt: str = "Converse as if you were an AI assistant. Be friendly, creative.", **kwargs): + + data = { + 'env': 'chatbot', + 'session': 'N/A', + 'prompt': "\n", + 'context': system_prompt, + 'messages': messages, + 'newMessage': messages[::-1][0]["content"], + 'userName': '<div class="mwai-name-text">User:</div>', + 'aiName': '<div class="mwai-name-text">AI:</div>', + 'model': 'gpt-3.5-turbo', + 'temperature': temperature, + 'maxTokens': max_tokens, + 'maxResults': 1, + 'apiKey': '', + 'service': 'openai', + 'embeddingsIndex': '', + 'stop': '' + } + + response = requests.post('https://opchatgpts.net/wp-json/ai-chatbot/v1/chat', json=data).json() + + if response["success"]: + + return response["reply"] # `yield (response["reply"])` doesn't work + + raise Exception("Request failed: " + response) + +params = f'g4f.Providers.{os.path.basename(__file__)[:-3]} supports: ' + \ + '(%s)' % ', '.join( + [f"{name}: {get_type_hints(_create_completion)[name].__name__}" for name in _create_completion.__code__.co_varnames[:_create_completion.__code__.co_argcount]]) diff --git a/g4f/Provider/__init__.py b/g4f/Provider/__init__.py index b64e44f5..ee434400 100644 --- a/g4f/Provider/__init__.py +++ b/g4f/Provider/__init__.py @@ -23,6 +23,7 @@ from .Providers import ( BingHuan, Wewordle, ChatgptAi, + opchatgpts, ) Palm = Bard |