summaryrefslogtreecommitdiffstats
path: root/gui
diff options
context:
space:
mode:
authorValerii <81074936+valerii-chirkov@users.noreply.github.com>2023-05-05 00:14:29 +0200
committerGitHub <noreply@github.com>2023-05-05 00:14:29 +0200
commitb3754facf9ada5bbf018a3b38151a2bd384afd52 (patch)
tree9f7b519081274c8f1c735aa2145b935acf335755 /gui
parentadd(quora/tests): Added a module with quora tests. (diff)
parentMerge pull request #443 from TheTrueShell/main (diff)
downloadgpt4free-b3754facf9ada5bbf018a3b38151a2bd384afd52.tar
gpt4free-b3754facf9ada5bbf018a3b38151a2bd384afd52.tar.gz
gpt4free-b3754facf9ada5bbf018a3b38151a2bd384afd52.tar.bz2
gpt4free-b3754facf9ada5bbf018a3b38151a2bd384afd52.tar.lz
gpt4free-b3754facf9ada5bbf018a3b38151a2bd384afd52.tar.xz
gpt4free-b3754facf9ada5bbf018a3b38151a2bd384afd52.tar.zst
gpt4free-b3754facf9ada5bbf018a3b38151a2bd384afd52.zip
Diffstat (limited to 'gui')
-rw-r--r--gui/README.md4
-rw-r--r--gui/pywebio-gui/README.md24
-rw-r--r--gui/pywebio-gui/pywebio-usesless.py59
-rw-r--r--gui/streamlit_chat_app.py66
4 files changed, 138 insertions, 15 deletions
diff --git a/gui/README.md b/gui/README.md
index c638c4dc..707fd36d 100644
--- a/gui/README.md
+++ b/gui/README.md
@@ -2,6 +2,8 @@
This code provides a Graphical User Interface (GUI) for gpt4free. Users can ask questions and get answers from GPT-4 API's, utilizing multiple API implementations. The project contains two different Streamlit applications: `streamlit_app.py` and `streamlit_chat_app.py`.
+In addition, a new GUI script specifically implemented using PyWebIO has been added and can be found in the pywebio-gui folder. If there are errors with the Streamlit version, you can try using the PyWebIO version instead
+
Installation
------------
@@ -69,4 +71,4 @@ There is a bug in `streamlit_chat_app.py` right now that I haven't pinpointed ye
License
-------
-This project is licensed under the MIT License. \ No newline at end of file
+This project is licensed under the MIT License.
diff --git a/gui/pywebio-gui/README.md b/gui/pywebio-gui/README.md
new file mode 100644
index 00000000..2b99c075
--- /dev/null
+++ b/gui/pywebio-gui/README.md
@@ -0,0 +1,24 @@
+# GUI with PyWebIO
+Simple, fast, and with fewer errors
+Only requires
+```bash
+pip install gpt4free
+pip install pywebio
+```
+clicking on 'pywebio-usesless.py' will run it
+
+PS: Currently, only 'usesless' is implemented, and the GUI is expected to be updated infrequently, with a focus on stability.
+
+↓ Here is the introduction in zh-Hans-CN below.
+
+# 使用pywebio实现的极简GUI
+简单,快捷,报错少
+只需要
+```bash
+pip install gpt4free
+pip install pywebio
+```
+
+双击pywebio-usesless.py即可运行
+
+ps:目前仅实现usesless,这个gui更新频率应该会比较少,目的是追求稳定
diff --git a/gui/pywebio-gui/pywebio-usesless.py b/gui/pywebio-gui/pywebio-usesless.py
new file mode 100644
index 00000000..c0843be6
--- /dev/null
+++ b/gui/pywebio-gui/pywebio-usesless.py
@@ -0,0 +1,59 @@
+from gpt4free import usesless
+import time
+from pywebio import start_server,config
+from pywebio.input import *
+from pywebio.output import *
+from pywebio.session import local
+message_id = ""
+def status():
+ try:
+ req = usesless.Completion.create(prompt="hello", parentMessageId=message_id)
+ print(f"Answer: {req['text']}")
+ put_success(f"Answer: {req['text']}",scope="body")
+ except:
+ put_error("Program Error",scope="body")
+
+def ask(prompt):
+ req = usesless.Completion.create(prompt=prompt, parentMessageId=local.message_id)
+ rp=req['text']
+ local.message_id=req["id"]
+ print("AI:\n"+rp)
+ local.conversation.extend([
+ {"role": "user", "content": prompt},
+ {"role": "assistant", "content": rp}
+ ])
+ print(local.conversation)
+ return rp
+
+def msg():
+ while True:
+ text= input_group("You:",[textarea('You:',name='text',rows=3, placeholder='请输入问题')])
+ if not(bool(text)):
+ break
+ if not(bool(text["text"])):
+ continue
+ time.sleep(0.5)
+ put_code("You:"+text["text"],scope="body")
+ print("Question:"+text["text"])
+ with use_scope('foot'):
+ put_loading(color="info")
+ rp= ask(text["text"])
+ clear(scope="foot")
+ time.sleep(0.5)
+ put_markdown("Bot:\n"+rp,scope="body")
+ time.sleep(0.7)
+
+@config(title="AIchat",theme="dark")
+def main():
+ put_scope("heads")
+ with use_scope('heads'):
+ put_html("<h1><center>AI Chat</center></h1>")
+ put_scope("body")
+ put_scope("foot")
+ status()
+ local.conversation=[]
+ local.message_id=""
+ msg()
+
+print("Click link to chat page")
+start_server(main, port=8099,allowed_origins="*",auto_open_webbrowser=True,debug=True)
diff --git a/gui/streamlit_chat_app.py b/gui/streamlit_chat_app.py
index 6abc9caf..af3969e6 100644
--- a/gui/streamlit_chat_app.py
+++ b/gui/streamlit_chat_app.py
@@ -1,4 +1,5 @@
import atexit
+import Levenshtein
import os
import sys
@@ -37,6 +38,17 @@ def save_conversations(conversations, current_conversation):
os.replace(temp_conversations_file, conversations_file)
+def delete_conversation(conversations, current_conversation):
+ for idx, conversation in enumerate(conversations):
+ conversations[idx] = current_conversation
+ break
+ conversations.remove(current_conversation)
+
+ temp_conversations_file = "temp_" + conversations_file
+ with open(temp_conversations_file, "wb") as f:
+ pickle.dump(conversations, f)
+
+ os.replace(temp_conversations_file, conversations_file)
def exit_handler():
print("Exiting, saving data...")
@@ -64,26 +76,29 @@ if 'input_field_key' not in st.session_state:
if 'query_method' not in st.session_state:
st.session_state['query_method'] = query
+if 'search_query' not in st.session_state:
+ st.session_state['search_query'] = ''
+
# Initialize new conversation
if 'current_conversation' not in st.session_state or st.session_state['current_conversation'] is None:
st.session_state['current_conversation'] = {'user_inputs': [], 'generated_responses': []}
input_placeholder = st.empty()
user_input = input_placeholder.text_input(
- 'You:', value=st.session_state['input_text'], key=f'input_text_{st.session_state["input_field_key"]}'
+ 'You:', value=st.session_state['input_text'], key=f'input_text_-1'#{st.session_state["input_field_key"]}
)
submit_button = st.button("Submit")
-
if (user_input and user_input != st.session_state['input_text']) or submit_button:
output = query(user_input, st.session_state['query_method'])
escaped_output = output.encode('utf-8').decode('unicode-escape')
- st.session_state.current_conversation['user_inputs'].append(user_input)
+ st.session_state['current_conversation']['user_inputs'].append(user_input)
st.session_state.current_conversation['generated_responses'].append(escaped_output)
save_conversations(st.session_state.conversations, st.session_state.current_conversation)
st.session_state['input_text'] = ''
+ st.session_state['input_field_key'] += 1 # Increment key value for new widget
user_input = input_placeholder.text_input(
'You:', value=st.session_state['input_text'], key=f'input_text_{st.session_state["input_field_key"]}'
) # Clear the input field
@@ -92,27 +107,50 @@ if (user_input and user_input != st.session_state['input_text']) or submit_butto
if st.sidebar.button("New Conversation"):
st.session_state['selected_conversation'] = None
st.session_state['current_conversation'] = {'user_inputs': [], 'generated_responses': []}
- st.session_state['input_field_key'] += 1
-
-st.session_state['query_method'] = st.sidebar.selectbox("Select API:", options=avail_query_methods, index=0)
+ st.session_state['input_field_key'] += 1 # Increment key value for new widget
+ st.session_state['query_method'] = st.sidebar.selectbox("Select API:", options=avail_query_methods, index=0)
# Proxy
st.session_state['proxy'] = st.sidebar.text_input("Proxy: ")
-# Sidebar
-st.sidebar.header("Conversation History")
+# Searchbar
+search_query = st.sidebar.text_input("Search Conversations:", value=st.session_state.get('search_query', ''), key='search')
-for idx, conversation in enumerate(st.session_state.conversations):
- if st.sidebar.button(f"Conversation {idx + 1}: {conversation['user_inputs'][0]}", key=f"sidebar_btn_{idx}"):
- st.session_state['selected_conversation'] = idx
- st.session_state['current_conversation'] = st.session_state.conversations[idx]
+if search_query:
+ filtered_conversations = []
+ indices = []
+ for idx, conversation in enumerate(st.session_state.conversations):
+ if search_query in conversation['user_inputs'][0]:
+ filtered_conversations.append(conversation)
+ indices.append(idx)
+
+ filtered_conversations = list(zip(indices, filtered_conversations))
+ conversations = sorted(filtered_conversations, key=lambda x: Levenshtein.distance(search_query, x[1]['user_inputs'][0]))
+ sidebar_header = f"Search Results ({len(conversations)})"
+else:
+ conversations = st.session_state.conversations
+ sidebar_header = "Conversation History"
+
+# Sidebar
+st.sidebar.header(sidebar_header)
+sidebar_col1, sidebar_col2 = st.sidebar.columns([5,1])
+for idx, conversation in enumerate(conversations):
+ if sidebar_col1.button(f"Conversation {idx + 1}: {conversation['user_inputs'][0]}", key=f"sidebar_btn_{idx}"):
+ st.session_state['selected_conversation'] = idx
+ st.session_state['current_conversation'] = conversation
+ if sidebar_col2.button('🗑️', key=f"sidebar_btn_delete_{idx}"):
+ if st.session_state['selected_conversation'] == idx:
+ st.session_state['selected_conversation'] = None
+ st.session_state['current_conversation'] = {'user_inputs': [], 'generated_responses': []}
+ delete_conversation(conversations, conversation)
+ st.experimental_rerun()
if st.session_state['selected_conversation'] is not None:
- conversation_to_display = st.session_state.conversations[st.session_state['selected_conversation']]
+ conversation_to_display = conversations[st.session_state['selected_conversation']]
else:
conversation_to_display = st.session_state.current_conversation
if conversation_to_display['generated_responses']:
for i in range(len(conversation_to_display['generated_responses']) - 1, -1, -1):
message(conversation_to_display["generated_responses"][i], key=f"display_generated_{i}")
- message(conversation_to_display['user_inputs'][i], is_user=True, key=f"display_user_{i}")
+ message(conversation_to_display['user_inputs'][i], is_user=True, key=f"display_user_{i}") \ No newline at end of file