diff options
author | Heiner Lohaus <heiner@lohaus.eu> | 2023-10-27 22:59:14 +0200 |
---|---|---|
committer | Heiner Lohaus <heiner@lohaus.eu> | 2023-10-27 22:59:14 +0200 |
commit | 79cf039a888eef117ef514a9ad30a6cb4dbf81b9 (patch) | |
tree | 93895bb5109837cced6e5f3eaa7a98395aeafafa /g4f/Provider/Geekgpt.py | |
parent | ~ | g4f `v-0.1.7.8` (diff) | |
download | gpt4free-79cf039a888eef117ef514a9ad30a6cb4dbf81b9.tar gpt4free-79cf039a888eef117ef514a9ad30a6cb4dbf81b9.tar.gz gpt4free-79cf039a888eef117ef514a9ad30a6cb4dbf81b9.tar.bz2 gpt4free-79cf039a888eef117ef514a9ad30a6cb4dbf81b9.tar.lz gpt4free-79cf039a888eef117ef514a9ad30a6cb4dbf81b9.tar.xz gpt4free-79cf039a888eef117ef514a9ad30a6cb4dbf81b9.tar.zst gpt4free-79cf039a888eef117ef514a9ad30a6cb4dbf81b9.zip |
Diffstat (limited to 'g4f/Provider/Geekgpt.py')
-rw-r--r-- | g4f/Provider/Geekgpt.py | 84 |
1 files changed, 0 insertions, 84 deletions
diff --git a/g4f/Provider/Geekgpt.py b/g4f/Provider/Geekgpt.py deleted file mode 100644 index 3c577cf8..00000000 --- a/g4f/Provider/Geekgpt.py +++ /dev/null @@ -1,84 +0,0 @@ -from __future__ import annotations -import requests, json - -from .base_provider import BaseProvider -from ..typing import CreateResult, Messages -from json import dumps - - -class GeekGpt(BaseProvider): - url = 'https://chat.geekgpt.org' - supports_stream = True - working = True - supports_gpt_35_turbo = True - supports_gpt_4 = True - - @classmethod - def create_completion( - cls, - model: str, - messages: Messages, - stream: bool, - **kwargs - ) -> CreateResult: - if not model: - model = "gpt-3.5-turbo" - json_data = { - 'messages': messages, - 'model': model, - 'temperature': kwargs.get('temperature', 0.9), - 'presence_penalty': kwargs.get('presence_penalty', 0), - 'top_p': kwargs.get('top_p', 1), - 'frequency_penalty': kwargs.get('frequency_penalty', 0), - 'stream': True - } - - data = dumps(json_data, separators=(',', ':')) - - headers = { - 'authority': 'ai.fakeopen.com', - 'accept': '*/*', - 'accept-language': 'en,fr-FR;q=0.9,fr;q=0.8,es-ES;q=0.7,es;q=0.6,en-US;q=0.5,am;q=0.4,de;q=0.3', - 'authorization': 'Bearer pk-this-is-a-real-free-pool-token-for-everyone', - 'content-type': 'application/json', - 'origin': 'https://chat.geekgpt.org', - 'referer': 'https://chat.geekgpt.org/', - 'sec-ch-ua': '"Chromium";v="118", "Google Chrome";v="118", "Not=A?Brand";v="99"', - 'sec-ch-ua-mobile': '?0', - 'sec-ch-ua-platform': '"macOS"', - 'sec-fetch-dest': 'empty', - 'sec-fetch-mode': 'cors', - 'sec-fetch-site': 'cross-site', - 'user-agent': 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/118.0.0.0 Safari/537.36', - } - - response = requests.post("https://ai.fakeopen.com/v1/chat/completions", - headers=headers, data=data, stream=True) - response.raise_for_status() - - for chunk in response.iter_lines(): - if b'content' in chunk: - json_data = chunk.decode().replace("data: ", "") - - if json_data == "[DONE]": - break - - try: - content = json.loads(json_data)["choices"][0]["delta"].get("content") - except Exception as e: - raise RuntimeError(f'error | {e} :', json_data) - - if content: - yield content - - @classmethod - @property - def params(cls): - params = [ - ('model', 'str'), - ('messages', 'list[dict[str, str]]'), - ('stream', 'bool'), - ('temperature', 'float'), - ] - param = ', '.join([': '.join(p) for p in params]) - return f'g4f.provider.{cls.__name__} supports: ({param})' |