diff options
author | Heiner Lohaus <hlohaus@users.noreply.github.com> | 2023-12-01 23:56:12 +0100 |
---|---|---|
committer | Heiner Lohaus <hlohaus@users.noreply.github.com> | 2023-12-01 23:56:12 +0100 |
commit | e74af803e7add7790b366545f1fe6c80934de798 (patch) | |
tree | 4cdd527acfd2e5514a3ca638c136a0c5d59faf04 | |
parent | Added new provider PI (Hacky way to use) (#1291) (diff) | |
download | gpt4free-e74af803e7add7790b366545f1fe6c80934de798.tar gpt4free-e74af803e7add7790b366545f1fe6c80934de798.tar.gz gpt4free-e74af803e7add7790b366545f1fe6c80934de798.tar.bz2 gpt4free-e74af803e7add7790b366545f1fe6c80934de798.tar.lz gpt4free-e74af803e7add7790b366545f1fe6c80934de798.tar.xz gpt4free-e74af803e7add7790b366545f1fe6c80934de798.tar.zst gpt4free-e74af803e7add7790b366545f1fe6c80934de798.zip |
-rw-r--r-- | g4f/Provider/PI.py | 111 | ||||
-rw-r--r-- | g4f/Provider/Pi.py | 93 | ||||
-rw-r--r-- | g4f/Provider/__init__.py | 6 | ||||
-rw-r--r-- | g4f/models.py | 5 | ||||
-rw-r--r-- | piexample.py | 25 |
5 files changed, 98 insertions, 142 deletions
diff --git a/g4f/Provider/PI.py b/g4f/Provider/PI.py deleted file mode 100644 index 1e2edde8..00000000 --- a/g4f/Provider/PI.py +++ /dev/null @@ -1,111 +0,0 @@ -from __future__ import annotations - -from ..typing import AsyncResult, Messages -from .base_provider import AsyncGeneratorProvider - -import json -import cloudscraper - -class PI(AsyncGeneratorProvider): - url = "https://chat-gpt.com" - working = True - - @classmethod - async def create_async_generator( - cls, - model: str, - messages: Messages, - proxy: str = None, - **kwargs - ) -> AsyncResult: - Conversation = kwargs['conversation'] - UserPrompt = messages[-1] - if UserPrompt['role'] == 'user': - UserPrompt = UserPrompt['content'] - else: - UserPrompt = messages[-2]['content'] - if Conversation == None: - Conversation = PI.Start_Conversation() - Answer = Ask_PI(UserPrompt,Conversation['sid'],Conversation['cookies']) - - yield Answer[0]['text'] - - def Start_Conversation(): - scraper.headers = { - 'accept-type': 'application/json' - } - response = scraper.post('https://pi.ai/api/chat/start', data="{}",headers={'x-api-version': '3'}) - cookies = response.cookies - - if 'Just a moment' in response.text: - return { - 'error': 'cloudflare detected', - 'sid': None, - 'cookies': None, - } - return { - 'sid': response.json()['conversations'][0]['sid'], - 'cookies': cookies - } - - def GetConversationTitle(Conversation): - response = scraper.post('https://pi.ai/api/chat/start', data="{}",headers={'x-api-version': '3'}, cookies=Conversation['cookies']) - if 'Just a moment' in response.text: - return { - 'error': 'cloudflare detected', - 'title': 'Couldnt get the title', - } - return { - 'title': response.json()['conversations'][0]['title'] - } - - def GetChatHistory(Conversation): - params = { - 'conversation': Conversation['sid'], - } - response = scraper.get('https://pi.ai/api/chat/history', params=params, cookies=Conversation['cookies']) - if 'Just a moment' in response.text: - return { - 'error': 'cloudflare detected', - 'traceback': 'Couldnt get the chat history' - } - return response.json() - -session = cloudscraper.session() - -scraper = cloudscraper.create_scraper( - browser={ - 'browser': 'chrome', - 'platform': 'windows', - 'desktop': True - }, - sess=session -) - -scraper.headers = { - 'Accept': '*/*', - 'Accept-Encoding': 'deflate,gzip,br', -} - -def Ask_PI(message,sid,cookies): - json_data = { - 'text': message, - 'conversation': sid, - 'mode': 'BASE', - } - response = scraper.post('https://pi.ai/api/chat', json=json_data, cookies=cookies) - - if 'Just a moment' in response.text: - return [{ - 'error': 'cloudflare detected', - 'text': 'Couldnt generate the answer because we got detected by cloudflare please try again later' - } - ] - result = [] - for line in response.iter_lines(chunk_size=1024, decode_unicode=True): - if line.startswith('data: {"text":'): - result.append(json.loads(line.split('data: ')[1].encode('utf-8'))) - if line.startswith('data: {"title":'): - result.append(json.loads(line.split('data: ')[1].encode('utf-8'))) - - return result diff --git a/g4f/Provider/Pi.py b/g4f/Provider/Pi.py new file mode 100644 index 00000000..9ecebafb --- /dev/null +++ b/g4f/Provider/Pi.py @@ -0,0 +1,93 @@ +from __future__ import annotations + +from ..typing import CreateResult, Messages +from .base_provider import BaseProvider, format_prompt + +import json +from cloudscraper import CloudScraper, session, create_scraper + +class Pi(BaseProvider): + url = "https://chat-gpt.com" + working = True + supports_stream = True + + @classmethod + def create_completion( + cls, + model: str, + messages: Messages, + stream: bool, + proxy: str = None, + scraper: CloudScraper = None, + conversation: dict = None, + **kwargs + ) -> CreateResult: + if not scraper: + scraper = cls.get_scraper() + if not conversation: + conversation = cls.start_conversation(scraper) + answer = cls.ask(scraper, messages, conversation) + + last_answer = 0 + for line in answer: + if "text" in line: + yield line["text"][last_answer:] + last_answer = len(line["text"]) + + def get_scraper(): + scraper = create_scraper( + browser={ + 'browser': 'chrome', + 'platform': 'windows', + 'desktop': True + }, + sess=session() + ) + scraper.headers = { + 'Accept': '*/*', + 'Accept-Encoding': 'deflate,gzip,br', + } + return scraper + + def start_conversation(scraper: CloudScraper): + response = scraper.post('https://pi.ai/api/chat/start', data="{}", headers={ + 'accept': 'application/json', + 'x-api-version': '3' + }) + if 'Just a moment' in response.text: + raise RuntimeError('Error: Cloudflare detected') + return Conversation( + response.json()['conversations'][0]['sid'], + response.cookies + ) + + def get_chat_history(scraper: CloudScraper, conversation: Conversation): + params = { + 'conversation': conversation.sid, + } + response = scraper.get('https://pi.ai/api/chat/history', params=params, cookies=conversation.cookies) + if 'Just a moment' in response.text: + raise RuntimeError('Error: Cloudflare detected') + return response.json() + + def ask(scraper: CloudScraper, messages: Messages, conversation: Conversation): + json_data = { + 'text': format_prompt(messages), + 'conversation': conversation.sid, + 'mode': 'BASE', + } + response = scraper.post('https://pi.ai/api/chat', json=json_data, cookies=conversation.cookies, stream=True) + + for line in response.iter_lines(chunk_size=1024, decode_unicode=True): + if 'Just a moment' in line: + raise RuntimeError('Error: Cloudflare detected') + if line.startswith('data: {"text":'): + yield json.loads(line.split('data: ')[1]) + if line.startswith('data: {"title":'): + yield json.loads(line.split('data: ')[1]) + +class Conversation(): + def __init__(self, sid: str, cookies): + self.sid = sid + self.cookies = cookies +
\ No newline at end of file diff --git a/g4f/Provider/__init__.py b/g4f/Provider/__init__.py index 199b4f27..efc94613 100644 --- a/g4f/Provider/__init__.py +++ b/g4f/Provider/__init__.py @@ -44,6 +44,7 @@ from .OnlineGpt import OnlineGpt from .Opchatgpts import Opchatgpts from .PerplexityAi import PerplexityAi from .Phind import Phind +from .Pi import Pi from .TalkAi import TalkAi from .Vercel import Vercel from .Ylokh import Ylokh @@ -65,10 +66,9 @@ __providers__: list[type[BaseProvider]] = [ __all__: list[str] = [ provider.__name__ for provider in __providers__ ] -__map__: dict[str, BaseProvider] = dict([ +__map__: dict[str, type[BaseProvider]] = dict([ (provider.__name__, provider) for provider in __providers__ ]) class ProviderUtils: - convert: dict[str, BaseProvider] = __map__ -from .PI import PI
\ No newline at end of file + convert: dict[str, type[BaseProvider]] = __map__
\ No newline at end of file diff --git a/g4f/models.py b/g4f/models.py index e4b20ae5..2f86891d 100644 --- a/g4f/models.py +++ b/g4f/models.py @@ -11,7 +11,6 @@ from .Provider import ( ChatgptAi, DeepInfra, OnlineGpt, - ChatgptX, ChatBase, Liaobots, GeekGpt, @@ -27,7 +26,7 @@ from .Provider import ( Bing, You, H2o, - PI, + Pi, ) @dataclass(unsafe_hash=True) @@ -264,7 +263,7 @@ llama70b_v2_chat = Model( pi = Model( name = 'pi', base_provider = 'inflection', - best_provider=PI + best_provider=Pi ) class ModelUtils: diff --git a/piexample.py b/piexample.py deleted file mode 100644 index 23280d2b..00000000 --- a/piexample.py +++ /dev/null @@ -1,25 +0,0 @@ -from g4f import Provider - -import g4f - -Conversation = Provider.PI.Start_Conversation() - -Chat_History = Provider.PI.GetChatHistory(Conversation) - -response = g4f.ChatCompletion.create( - model="pi", - provider=g4f.Provider.PI, - messages=[ - { - "role": "user", - "content": 'Hello who are you?' - } - ], - stream=False, - conversation=Conversation -) - -for message in response: - print(message, flush=True, end='') - -Chat_Title = Provider.PI.GetConversationTitle(Conversation) |