summaryrefslogtreecommitdiffstats
path: root/g4f/gui/client/static/js/chat.v1.js
diff options
context:
space:
mode:
Diffstat (limited to 'g4f/gui/client/static/js/chat.v1.js')
-rw-r--r--g4f/gui/client/static/js/chat.v1.js12
1 files changed, 7 insertions, 5 deletions
diff --git a/g4f/gui/client/static/js/chat.v1.js b/g4f/gui/client/static/js/chat.v1.js
index 46f7e808..5eac4e77 100644
--- a/g4f/gui/client/static/js/chat.v1.js
+++ b/g4f/gui/client/static/js/chat.v1.js
@@ -1249,8 +1249,7 @@ function save_storage() {
}
}
-const SpeechRecognition = window.SpeechRecognition || window.webkitSpeechRecognition;
-if (SpeechRecognition) {
+if (window.SpeechRecognition) {
const mircoIcon = microLabel.querySelector("i");
mircoIcon.classList.add("fa-microphone");
mircoIcon.classList.remove("fa-microphone-slash");
@@ -1272,12 +1271,15 @@ if (SpeechRecognition) {
recognition.onstart = function() {
microLabel.classList.add("recognition");
startValue = messageInput.value;
+ messageInput.placeholder = "";
lastDebounceTranscript = "";
timeoutHandle = window.setTimeout(may_stop, 10000);
};
recognition.onend = function() {
microLabel.classList.remove("recognition");
- messageInput.focus();
+ messageInput.value = messageInput.placeholder;
+ messageInput.placeholder = "Ask a question";
+ //messageInput.focus();
};
recognition.onresult = function(event) {
if (!event.results) {
@@ -1295,9 +1297,9 @@ if (SpeechRecognition) {
lastDebounceTranscript = transcript;
}
if (transcript) {
- messageInput.value = `${startValue ? startValue+"\n" : ""}${transcript.trim()}`;
+ messageInput.placeholder = `${startValue ? startValue+"\n" : ""}${transcript.trim()}`;
if (isFinal) {
- startValue = messageInput.value;
+ startValue = messageInput.placeholder;
}
messageInput.style.height = messageInput.scrollHeight + "px";
messageInput.scrollTop = messageInput.scrollHeight;