diff options
author | H Lohaus <hlohaus@users.noreply.github.com> | 2024-02-29 16:26:47 +0100 |
---|---|---|
committer | GitHub <noreply@github.com> | 2024-02-29 16:26:47 +0100 |
commit | b10516a5121567aaef2f5d5c32e5c005d36ba82b (patch) | |
tree | 552533c44ebdd63263395b492bf78246ae56e1c5 /g4f/gui/client/js/chat.v1.js | |
parent | Update OpenaiChat.py (diff) | |
parent | Ignore empty auth header in api (diff) | |
download | gpt4free-0.2.2.6.tar gpt4free-0.2.2.6.tar.gz gpt4free-0.2.2.6.tar.bz2 gpt4free-0.2.2.6.tar.lz gpt4free-0.2.2.6.tar.xz gpt4free-0.2.2.6.tar.zst gpt4free-0.2.2.6.zip |
Diffstat (limited to 'g4f/gui/client/js/chat.v1.js')
-rw-r--r-- | g4f/gui/client/js/chat.v1.js | 91 |
1 files changed, 62 insertions, 29 deletions
diff --git a/g4f/gui/client/js/chat.v1.js b/g4f/gui/client/js/chat.v1.js index 8b065be2..ff486c6e 100644 --- a/g4f/gui/client/js/chat.v1.js +++ b/g4f/gui/client/js/chat.v1.js @@ -5,10 +5,12 @@ const message_input = document.getElementById(`message-input`); const box_conversations = document.querySelector(`.top`); const stop_generating = document.querySelector(`.stop_generating`); const regenerate = document.querySelector(`.regenerate`); -const send_button = document.querySelector(`#send-button`); -const imageInput = document.querySelector('#image'); -const cameraInput = document.querySelector('#camera'); -const fileInput = document.querySelector('#file'); +const send_button = document.getElementById("send-button"); +const imageInput = document.getElementById("image"); +const cameraInput = document.getElementById("camera"); +const fileInput = document.getElementById("file"); +const inputCount = document.getElementById("input-count") +const modelSelect = document.getElementById("model"); let prompt_lock = false; @@ -75,6 +77,7 @@ const handle_ask = async () => { if (message.length > 0) { message_input.value = ''; prompt_lock = true; + count_input() await add_conversation(window.conversation_id, message); if ("text" in fileInput.dataset) { message += '\n```' + fileInput.dataset.type + '\n'; @@ -89,6 +92,7 @@ const handle_ask = async () => { if (input.files.length > 0) imageInput.dataset.src = URL.createObjectURL(input.files[0]); else delete imageInput.dataset.src + model = modelSelect.options[modelSelect.selectedIndex].value message_box.innerHTML += ` <div class="message" data-index="${message_index}"> <div class="user"> @@ -97,11 +101,14 @@ const handle_ask = async () => { <i class="fa-regular fa-phone-arrow-up-right"></i> </div> <div class="content" id="user_${token}"> + <div class="content_inner"> ${markdown_render(message)} ${imageInput.dataset.src ? '<img src="' + imageInput.dataset.src + '" alt="Image upload">' : '' } + </div> + <div class="count">${count_words_and_tokens(message, model)}</div> </div> </div> `; @@ -120,19 +127,25 @@ const remove_cancel_button = async () => { }, 300); }; -const filter_messages = (messages) => { +const filter_messages = (messages, filter_last_message = true) => { // Removes none user messages at end - let last_message; - while (last_message = messages.pop()) { - if (last_message["role"] == "user") { - messages.push(last_message); - break; + if (filter_last_message) { + let last_message; + while (last_message = messages.pop()) { + if (last_message["role"] == "user") { + messages.push(last_message); + break; + } } } // Remove history, if it is selected if (document.getElementById('history')?.checked) { - messages = [messages[messages.length-1]]; + if (filter_last_message) { + messages = [messages.pop()]; + } else { + messages = [messages.pop(), messages.pop()]; + } } let new_messages = []; @@ -165,7 +178,6 @@ const ask_gpt = async () => { jailbreak = document.getElementById("jailbreak"); provider = document.getElementById("provider"); - model = document.getElementById("model"); window.text = ''; stop_generating.classList.remove(`stop_generating-hidden`); @@ -188,11 +200,13 @@ const ask_gpt = async () => { <div class="content" id="gpt_${window.token}"> <div class="provider"></div> <div class="content_inner"><span id="cursor"></span></div> + <div class="count"></div> </div> </div> `; content = document.getElementById(`gpt_${window.token}`); content_inner = content.querySelector('.content_inner'); + content_count = content.querySelector('.count'); message_box.scrollTop = message_box.scrollHeight; window.scrollTo(0, 0); @@ -200,7 +214,7 @@ const ask_gpt = async () => { let body = JSON.stringify({ id: window.token, conversation_id: window.conversation_id, - model: model.options[model.selectedIndex].value, + model: modelSelect.options[modelSelect.selectedIndex].value, jailbreak: jailbreak.options[jailbreak.selectedIndex].value, web_search: document.getElementById(`switch`).checked, provider: provider.options[provider.selectedIndex].value, @@ -270,6 +284,7 @@ const ask_gpt = async () => { html = html.substring(0, lastIndex) + '<span id="cursor"></span>' + lastElement; } content_inner.innerHTML = html; + content_count.innerText = count_words_and_tokens(text, provider?.model); highlight(content_inner); } @@ -397,13 +412,13 @@ const load_conversation = async (conversation_id) => { let messages = await get_messages(conversation_id); let elements = ""; + let last_model = null; for (i in messages) { let item = messages[i]; + last_model = item?.provider?.model; let next_i = parseInt(i) + 1; let next_provider = item.provider ? item.provider : (messages.length > next_i ? messages[next_i].provider : null); - let tokens_count = next_provider?.model ? count_tokens(next_provider.model, item.content) : ""; - let append_count = tokens_count ? `, ${tokens_count} tokens` : ""; - let words_count = `(${count_words(item.content)} words${append_count})` + let provider_link = item?.provider?.name ? `<a href="${item?.provider?.url}" target="_blank">${item.provider.name}</a>` : ""; let provider = provider_link ? ` <div class="provider"> @@ -424,17 +439,18 @@ const load_conversation = async (conversation_id) => { <div class="content"> ${provider} <div class="content_inner">${markdown_render(item.content)}</div> - <div class="count">${words_count}</div> + <div class="count">${count_words_and_tokens(item.content, next_provider?.model)}</div> </div> </div> `; } - const filtered = filter_messages(messages); + const filtered = filter_messages(messages, false); if (filtered.length > 0) { - let count_total = GPTTokenizer_cl100k_base?.encodeChat(filtered, "gpt-3.5-turbo").length + last_model = last_model?.startsWith("gpt-4") ? "gpt-4" : "gpt-3.5-turbo" + let count_total = GPTTokenizer_cl100k_base?.encodeChat(filtered, last_model).length if (count_total > 0) { - elements += `<div class="count_total">(${count_total} tokens used)</div>`; + elements += `<div class="count_total">(${count_total} tokens total)</div>`; } } @@ -467,6 +483,11 @@ function count_tokens(model, text) { } } +function count_words_and_tokens(text, model) { + const tokens_count = model ? `, ${count_tokens(model, text)} tokens` : ""; + return `(${count_words(text)} words${tokens_count})` +} + const get_conversation = async (conversation_id) => { let conversation = await JSON.parse( localStorage.getItem(`conversation:${conversation_id}`) @@ -703,6 +724,16 @@ colorThemes.forEach((themeOption) => { }); }); +const count_input = async () => { + if (message_input.value) { + model = modelSelect.options[modelSelect.selectedIndex].value; + inputCount.innerText = count_words_and_tokens(message_input.value, model); + } else { + inputCount.innerHTML = " " + } +}; +message_input.addEventListener("keyup", count_input); + window.onload = async () => { setTheme(); @@ -713,18 +744,21 @@ window.onload = async () => { } } - await setTimeout(() => { - load_conversations(); - }, 1); + count_input(); if (/\/chat\/.+/.test(window.location.href)) { - await load_conversation(window.conversation_id); + load_conversation(window.conversation_id); } else { - await say_hello() + say_hello() } - - message_input.addEventListener(`keydown`, async (evt) => { + + setTimeout(() => { + load_conversations(); + }, 1); + + message_input.addEventListener("keydown", async (evt) => { if (prompt_lock) return; + if (evt.keyCode === 13 && !evt.shiftKey) { evt.preventDefault(); console.log("pressed enter"); @@ -768,12 +802,11 @@ observer.observe(message_input, { attributes: true }); (async () => { response = await fetch('/backend-api/v2/models') models = await response.json() - let select = document.getElementById('model'); for (model of models) { let option = document.createElement('option'); option.value = option.text = model; - select.appendChild(option); + modelSelect.appendChild(option); } response = await fetch('/backend-api/v2/providers') |