summaryrefslogtreecommitdiffstats
path: root/gui/streamlit_chat_app.py
diff options
context:
space:
mode:
authort.me/xtekky <98614666+xtekky@users.noreply.github.com>2023-05-01 10:18:45 +0200
committerGitHub <noreply@github.com>2023-05-01 10:18:45 +0200
commit2ae26bfa7e10bd57f9b0d2e90e4c599c816359a4 (patch)
tree428b2acca885c7671c451eff4976f1efa2430089 /gui/streamlit_chat_app.py
parentMerge pull request #328 from editor-syntax/patch-3 (diff)
parentadd searchbar (diff)
downloadgpt4free-2ae26bfa7e10bd57f9b0d2e90e4c599c816359a4.tar
gpt4free-2ae26bfa7e10bd57f9b0d2e90e4c599c816359a4.tar.gz
gpt4free-2ae26bfa7e10bd57f9b0d2e90e4c599c816359a4.tar.bz2
gpt4free-2ae26bfa7e10bd57f9b0d2e90e4c599c816359a4.tar.lz
gpt4free-2ae26bfa7e10bd57f9b0d2e90e4c599c816359a4.tar.xz
gpt4free-2ae26bfa7e10bd57f9b0d2e90e4c599c816359a4.tar.zst
gpt4free-2ae26bfa7e10bd57f9b0d2e90e4c599c816359a4.zip
Diffstat (limited to 'gui/streamlit_chat_app.py')
-rw-r--r--gui/streamlit_chat_app.py33
1 files changed, 26 insertions, 7 deletions
diff --git a/gui/streamlit_chat_app.py b/gui/streamlit_chat_app.py
index bca4b954..7eb5a656 100644
--- a/gui/streamlit_chat_app.py
+++ b/gui/streamlit_chat_app.py
@@ -1,4 +1,5 @@
import atexit
+import Levenshtein
import os
import sys
@@ -64,13 +65,16 @@ if 'input_field_key' not in st.session_state:
if 'query_method' not in st.session_state:
st.session_state['query_method'] = query
+if 'search_query' not in st.session_state:
+ st.session_state['search_query'] = ''
+
# Initialize new conversation
if 'current_conversation' not in st.session_state or st.session_state['current_conversation'] is None:
st.session_state['current_conversation'] = {'user_inputs': [], 'generated_responses': []}
input_placeholder = st.empty()
user_input = input_placeholder.text_input(
- 'You:', value=st.session_state['input_text'], key=f'input_text_{st.session_state["input_field_key"]}'
+ 'You:', value=st.session_state['input_text'], key=f'input_text_-1'#{st.session_state["input_field_key"]}
)
submit_button = st.button("Submit")
@@ -79,7 +83,7 @@ if (user_input and user_input != st.session_state['input_text']) or submit_butto
escaped_output = output.encode('utf-8').decode('unicode-escape')
- st.session_state.current_conversation['user_inputs'].append(user_input)
+ st.session_state['current_conversation']['user_inputs'].append(user_input)
st.session_state.current_conversation['generated_responses'].append(escaped_output)
save_conversations(st.session_state.conversations, st.session_state.current_conversation)
st.session_state['input_text'] = ''
@@ -98,20 +102,35 @@ if st.sidebar.button("New Conversation"):
# Proxy
st.session_state['proxy'] = st.sidebar.text_input("Proxy: ")
+# Searchbar
+search_query = st.sidebar.text_input("Search Conversations:", value=st.session_state.get('search_query', ''), key='search')
+
+if search_query:
+ filtered_conversations = []
+ for conversation in st.session_state.conversations:
+ if search_query in conversation['user_inputs'][0]:
+ filtered_conversations.append(conversation)
+
+ conversations = sorted(filtered_conversations, key=lambda c: Levenshtein.distance(search_query, c['user_inputs'][0]))
+ sidebar_header = f"Search Results ({len(conversations)})"
+else:
+ conversations = st.session_state.conversations
+ sidebar_header = "Conversation History"
+
# Sidebar
-st.sidebar.header("Conversation History")
+st.sidebar.header(sidebar_header)
-for idx, conversation in enumerate(st.session_state.conversations):
+for idx, conversation in enumerate(conversations):
if st.sidebar.button(f"Conversation {idx + 1}: {conversation['user_inputs'][0]}", key=f"sidebar_btn_{idx}"):
st.session_state['selected_conversation'] = idx
- st.session_state['current_conversation'] = st.session_state.conversations[idx]
+ st.session_state['current_conversation'] = conversation
if st.session_state['selected_conversation'] is not None:
- conversation_to_display = st.session_state.conversations[st.session_state['selected_conversation']]
+ conversation_to_display = conversations[st.session_state['selected_conversation']]
else:
conversation_to_display = st.session_state.current_conversation
if conversation_to_display['generated_responses']:
for i in range(len(conversation_to_display['generated_responses']) - 1, -1, -1):
message(conversation_to_display["generated_responses"][i], key=f"display_generated_{i}")
- message(conversation_to_display['user_inputs'][i], is_user=True, key=f"display_user_{i}")
+ message(conversation_to_display['user_inputs'][i], is_user=True, key=f"display_user_{i}") \ No newline at end of file