summaryrefslogtreecommitdiffstats
path: root/openai_rev/theb/__init__.py
diff options
context:
space:
mode:
authornoptuno <repollo.marrero@gmail.com>2023-04-28 02:40:47 +0200
committernoptuno <repollo.marrero@gmail.com>2023-04-28 02:40:47 +0200
commit6f6a73987201c9c303047c61389b82ad98b15597 (patch)
treebf67eb590d49979d6740bc1e94b4018df48bce98 /openai_rev/theb/__init__.py
parentResolved merge conflicts and merged pr_218 into STREAMLIT_CHAT_IMPLEMENTATION (diff)
parentMerging PR_218 openai_rev package with new streamlit chat app (diff)
downloadgpt4free-6f6a73987201c9c303047c61389b82ad98b15597.tar
gpt4free-6f6a73987201c9c303047c61389b82ad98b15597.tar.gz
gpt4free-6f6a73987201c9c303047c61389b82ad98b15597.tar.bz2
gpt4free-6f6a73987201c9c303047c61389b82ad98b15597.tar.lz
gpt4free-6f6a73987201c9c303047c61389b82ad98b15597.tar.xz
gpt4free-6f6a73987201c9c303047c61389b82ad98b15597.tar.zst
gpt4free-6f6a73987201c9c303047c61389b82ad98b15597.zip
Diffstat (limited to 'openai_rev/theb/__init__.py')
-rw-r--r--openai_rev/theb/__init__.py52
1 files changed, 52 insertions, 0 deletions
diff --git a/openai_rev/theb/__init__.py b/openai_rev/theb/__init__.py
new file mode 100644
index 00000000..fa79fdd9
--- /dev/null
+++ b/openai_rev/theb/__init__.py
@@ -0,0 +1,52 @@
+from json import loads
+from queue import Queue, Empty
+from re import findall
+from threading import Thread
+
+from curl_cffi import requests
+
+
+class Completion:
+ # experimental
+ part1 = '{"role":"assistant","id":"chatcmpl'
+ part2 = '"},"index":0,"finish_reason":null}]}}'
+ regex = rf'{part1}(.*){part2}'
+
+ timer = None
+ message_queue = Queue()
+ stream_completed = False
+
+ @classmethod
+ def request(cls, prompt: str):
+ headers = {
+ 'authority': 'chatbot.theb.ai',
+ 'content-type': 'application/json',
+ 'origin': 'https://chatbot.theb.ai',
+ 'user-agent': 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/112.0.0.0 Safari/537.36',
+ }
+
+ requests.post(
+ 'https://chatbot.theb.ai/api/chat-process',
+ headers=headers,
+ content_callback=Completion.handle_stream_response,
+ json={'prompt': prompt, 'options': {}},
+ )
+
+ Completion.stream_completed = True
+
+ @staticmethod
+ def create(prompt: str):
+ Thread(target=Completion.request, args=[prompt]).start()
+
+ while not Completion.stream_completed or not Completion.message_queue.empty():
+ try:
+ message = Completion.message_queue.get(timeout=0.01)
+ for message in findall(Completion.regex, message):
+ yield loads(Completion.part1 + message + Completion.part2)['delta']
+
+ except Empty:
+ pass
+
+ @staticmethod
+ def handle_stream_response(response):
+ Completion.message_queue.put(response.decode())