diff options
author | H Lohaus <hlohaus@users.noreply.github.com> | 2024-11-19 11:04:01 +0100 |
---|---|---|
committer | GitHub <noreply@github.com> | 2024-11-19 11:04:01 +0100 |
commit | 23b3458216c86dbc8815e42379d32a8887ccd94c (patch) | |
tree | 1dd2ea30923cb356c1e57d1a3f7816cc93060c90 /g4f | |
parent | Merge pull request #2372 from hlohaus/info (diff) | |
parent | Add image upload to Copilot provider (diff) | |
download | gpt4free-23b3458216c86dbc8815e42379d32a8887ccd94c.tar gpt4free-23b3458216c86dbc8815e42379d32a8887ccd94c.tar.gz gpt4free-23b3458216c86dbc8815e42379d32a8887ccd94c.tar.bz2 gpt4free-23b3458216c86dbc8815e42379d32a8887ccd94c.tar.lz gpt4free-23b3458216c86dbc8815e42379d32a8887ccd94c.tar.xz gpt4free-23b3458216c86dbc8815e42379d32a8887ccd94c.tar.zst gpt4free-23b3458216c86dbc8815e42379d32a8887ccd94c.zip |
Diffstat (limited to 'g4f')
-rw-r--r-- | g4f/Provider/Copilot.py | 41 | ||||
-rw-r--r-- | g4f/Provider/airforce/AirforceChat.py | 11 | ||||
-rw-r--r-- | g4f/Provider/airforce/AirforceImage.py | 32 | ||||
-rw-r--r-- | g4f/Provider/needs_auth/CopilotAccount.py | 3 | ||||
-rw-r--r-- | g4f/Provider/needs_auth/OpenaiChat.py | 2 | ||||
-rw-r--r-- | g4f/gui/client/index.html | 9 | ||||
-rw-r--r-- | g4f/gui/client/static/css/style.css | 32 | ||||
-rw-r--r-- | g4f/gui/client/static/js/chat.v1.js | 234 |
8 files changed, 199 insertions, 165 deletions
diff --git a/g4f/Provider/Copilot.py b/g4f/Provider/Copilot.py index f10202bf..6e64c714 100644 --- a/g4f/Provider/Copilot.py +++ b/g4f/Provider/Copilot.py @@ -23,9 +23,10 @@ except ImportError: from .base_provider import AbstractProvider, BaseConversation from .helper import format_prompt -from ..typing import CreateResult, Messages +from ..typing import CreateResult, Messages, ImageType from ..errors import MissingRequirementsError from ..requests.raise_for_status import raise_for_status +from ..image import to_bytes, is_accepted_format from .. import debug class Conversation(BaseConversation): @@ -43,6 +44,7 @@ class Copilot(AbstractProvider): url = "https://copilot.microsoft.com" working = True supports_stream = True + default_model = "Copilot" websocket_url = "wss://copilot.microsoft.com/c/api/chat?api-version=2" conversation_url = f"{url}/c/api/conversations" @@ -55,6 +57,7 @@ class Copilot(AbstractProvider): stream: bool = False, proxy: str = None, timeout: int = 900, + image: ImageType = None, conversation: Conversation = None, return_conversation: bool = False, **kwargs @@ -66,7 +69,7 @@ class Copilot(AbstractProvider): access_token = None headers = None cookies = conversation.cookie_jar if conversation is not None else None - if cls.needs_auth: + if cls.needs_auth or image is not None: if conversation is None or conversation.access_token is None: access_token, cookies = asyncio.run(cls.get_access_token_and_cookies(proxy)) else: @@ -98,34 +101,48 @@ class Copilot(AbstractProvider): if debug.logging: print(f"Copilot: Use conversation: {conversation_id}") + images = [] + if image is not None: + data = to_bytes(image) + response = session.post( + "https://copilot.microsoft.com/c/api/attachments", + headers={"content-type": is_accepted_format(data)}, + data=data + ) + raise_for_status(response) + images.append({"type":"image", "url": response.json().get("url")}) + wss = session.ws_connect(cls.websocket_url) wss.send(json.dumps({ "event": "send", "conversationId": conversation_id, - "content": [{ + "content": [*images, { "type": "text", "text": prompt, }], "mode": "chat" }).encode(), CurlWsFlag.TEXT) + + is_started = False + msg = None while True: try: - msg = json.loads(wss.recv()[0]) + msg = wss.recv()[0] + msg = json.loads(msg) except: break if msg.get("event") == "appendText": yield msg.get("text") elif msg.get("event") in ["done", "partCompleted"]: break + if not is_started: + raise RuntimeError(f"Last message: {msg}") @classmethod async def get_access_token_and_cookies(cls, proxy: str = None): if not has_nodriver: raise MissingRequirementsError('Install "nodriver" package | pip install -U nodriver') - if has_platformdirs: - user_data_dir = user_config_dir("g4f-nodriver") - else: - user_data_dir = None + user_data_dir = user_config_dir("g4f-nodriver") if has_platformdirs else None if debug.logging: print(f"Copilot: Open nodriver with user_dir: {user_data_dir}") browser = await nodriver.start( @@ -133,7 +150,8 @@ class Copilot(AbstractProvider): browser_args=None if proxy is None else [f"--proxy-server={proxy}"], ) page = await browser.get(cls.url) - while True: + access_token = None + while access_token is None: access_token = await page.evaluate(""" (() => { for (var i = 0; i < localStorage.length; i++) { @@ -146,9 +164,8 @@ class Copilot(AbstractProvider): } })() """) - if access_token: - break - asyncio.sleep(1) + if access_token is None: + asyncio.sleep(1) cookies = {} for c in await page.send(nodriver.cdp.network.get_cookies([cls.url])): cookies[c.name] = c.value diff --git a/g4f/Provider/airforce/AirforceChat.py b/g4f/Provider/airforce/AirforceChat.py index e94dd0a8..1efe0026 100644 --- a/g4f/Provider/airforce/AirforceChat.py +++ b/g4f/Provider/airforce/AirforceChat.py @@ -4,6 +4,7 @@ import json import requests from aiohttp import ClientSession from typing import List +import logging from ...typing import AsyncResult, Messages from ..base_provider import AsyncGeneratorProvider, ProviderModelMixin @@ -54,9 +55,13 @@ class AirforceChat(AsyncGeneratorProvider, ProviderModelMixin): @classmethod def get_models(cls) -> list: if not cls.models: - response = requests.get('https://api.airforce/models') - data = response.json() - cls.models = [model['id'] for model in data['data']] + try: + response = requests.get('https://api.airforce/models', verify=False) + data = response.json() + cls.models = [model['id'] for model in data['data']] + except Exception as e: + logging.exception(e) + cls.models = [cls.default_model] model_aliases = { # openchat diff --git a/g4f/Provider/airforce/AirforceImage.py b/g4f/Provider/airforce/AirforceImage.py index b74bc364..a5bd113f 100644 --- a/g4f/Provider/airforce/AirforceImage.py +++ b/g4f/Provider/airforce/AirforceImage.py @@ -4,39 +4,37 @@ from aiohttp import ClientSession from urllib.parse import urlencode import random import requests +import logging from ...typing import AsyncResult, Messages from ...image import ImageResponse from ..base_provider import AsyncGeneratorProvider, ProviderModelMixin - class AirforceImage(AsyncGeneratorProvider, ProviderModelMixin): label = "Airforce Image" - #url = "https://api.airforce" + url = "https://api.airforce" api_endpoint = "https://api.airforce/imagine2" - #working = True + working = False default_model = 'flux' - - response = requests.get('https://api.airforce/imagine/models') - data = response.json() - - image_models = data - - models = [*image_models, "stable-diffusion-xl-base", "stable-diffusion-xl-lightning", "Flux-1.1-Pro"] - + additional_models = ["stable-diffusion-xl-base", "stable-diffusion-xl-lightning", "Flux-1.1-Pro"] model_aliases = { "sdxl": "stable-diffusion-xl-base", "sdxl": "stable-diffusion-xl-lightning", "flux-pro": "Flux-1.1-Pro", } - + @classmethod - def get_model(cls, model: str) -> str: - if model in cls.models: - return model - else: - return cls.default_model + def get_models(cls) -> list: + if not cls.models: + try: + response = requests.get('https://api.airforce/imagine/models', verify=False) + response.raise_for_status() + cls.models = [*response.json(), *cls.additional_models] + except Exception as e: + logging.exception(e) + cls.models = [cls.default_model] + return cls.models @classmethod async def create_async_generator( diff --git a/g4f/Provider/needs_auth/CopilotAccount.py b/g4f/Provider/needs_auth/CopilotAccount.py index fa43867e..76e51278 100644 --- a/g4f/Provider/needs_auth/CopilotAccount.py +++ b/g4f/Provider/needs_auth/CopilotAccount.py @@ -5,4 +5,5 @@ from ..Copilot import Copilot class CopilotAccount(Copilot): needs_auth = True parent = "Copilot" - default_model = ""
\ No newline at end of file + default_model = "Copilot" + default_vision_model = default_model
\ No newline at end of file diff --git a/g4f/Provider/needs_auth/OpenaiChat.py b/g4f/Provider/needs_auth/OpenaiChat.py index 13e15f1d..22264dd9 100644 --- a/g4f/Provider/needs_auth/OpenaiChat.py +++ b/g4f/Provider/needs_auth/OpenaiChat.py @@ -396,7 +396,7 @@ class OpenaiChat(AsyncGeneratorProvider, ProviderModelMixin): f"{cls.url}/backend-anon/sentinel/chat-requirements" if cls._api_key is None else f"{cls.url}/backend-api/sentinel/chat-requirements", - json={"p": get_requirements_token(RequestConfig.proof_token)}, + json={"p": get_requirements_token(RequestConfig.proof_token) if RequestConfig.proof_token else None}, headers=cls._headers ) as response: cls._update_request_args(session) diff --git a/g4f/gui/client/index.html b/g4f/gui/client/index.html index 63e47b3f..b256b0be 100644 --- a/g4f/gui/client/index.html +++ b/g4f/gui/client/index.html @@ -72,12 +72,12 @@ </button> <div class="info"> <i class="fa-brands fa-discord"></i> - <span class="convo-title">discord ~ <a href="https://discord.gghttps://discord.gg/6yrm7H4B">discord.gg/6yrm7H4B</a> + <span class="convo-title">discord ~ <a href="https://discord.gg/6yrm7H4B" target="_blank">discord.gg/6yrm7H4B</a> </span> </div> <div class="info"> <i class="fa-brands fa-github"></i> - <span class="convo-title">github ~ <a href="https://github.com/xtekky/gpt4free">@xtekky/gpt4free</a> + <span class="convo-title">github ~ <a href="https://github.com/xtekky/gpt4free" target="_blank">@xtekky/gpt4free</a> </span> </div> <div class="info"> @@ -87,7 +87,6 @@ </div> </div> <div class="images hidden"> - </div> <div class="settings hidden"> <div class="paper"> @@ -192,7 +191,7 @@ <i class="fa-regular fa-stop"></i> </button> </div> - <div class="regenerate regenerate-hidden"> + <div class="regenerate"> <button id="regenerateButton"> <span>Regenerate</span> <i class="fa-solid fa-rotate"></i> @@ -263,4 +262,4 @@ <i class="fa-solid fa-bars"></i> </div> </body> -</html> +</html>
\ No newline at end of file diff --git a/g4f/gui/client/static/css/style.css b/g4f/gui/client/static/css/style.css index 441e2042..3ee033ea 100644 --- a/g4f/gui/client/static/css/style.css +++ b/g4f/gui/client/static/css/style.css @@ -472,6 +472,11 @@ body { padding: 0 4px; } +.stop_generating-hidden, .regenerate-hidden { + animation: hide_popup 0.4s; + display: none; +} + .stop_generating, .toolbar .regenerate { position: absolute; z-index: 1000000; @@ -479,14 +484,6 @@ body { right: 0; } -@media only screen and (min-width: 40em) { - .stop_generating, .toolbar .regenerate { - left: 50%; - transform: translateX(-50%); - right: auto; - } -} - .stop_generating button, .toolbar .regenerate button{ backdrop-filter: blur(20px); -webkit-backdrop-filter: blur(20px); @@ -503,6 +500,17 @@ body { animation: show_popup 0.4s; } +@media only screen and (min-width: 40em) { + .stop_generating { + left: 50%; + transform: translateX(-50%); + right: auto; + } + .toolbar .regenerate { + right: 5px; + } +} + .toolbar .hide-input { background: transparent; border: none; @@ -524,11 +532,6 @@ body { } } -.stop_generating-hidden #cancelButton, .regenerate-hidden #regenerateButton { - animation: hide_popup 0.4s; - display: none; -} - .typing { position: absolute; top: -25px; @@ -779,8 +782,7 @@ select { overflow: auto; } - -#cursor { +.cursor { line-height: 17px; margin-left: 3px; -webkit-animation: blink 0.8s infinite; diff --git a/g4f/gui/client/static/js/chat.v1.js b/g4f/gui/client/static/js/chat.v1.js index 580cbf77..11306cdc 100644 --- a/g4f/gui/client/static/js/chat.v1.js +++ b/g4f/gui/client/static/js/chat.v1.js @@ -20,12 +20,13 @@ const settings = document.querySelector(".settings"); const chat = document.querySelector(".conversation"); const album = document.querySelector(".images"); -let prompt_lock = false; - -let content, content_inner, content_count = null; - const optionElements = document.querySelectorAll(".settings input, .settings textarea, #model, #model2, #provider") +let provider_storage = {}; +let message_storage = {}; +let controller_storage = {}; +let content_storage = {} + messageInput.addEventListener("blur", () => { window.scrollTo(0, 0); }); @@ -98,13 +99,10 @@ const register_message_buttons = async () => { if (!("click" in el.dataset)) { el.dataset.click = "true"; el.addEventListener("click", async () => { - if (prompt_lock) { - return; - } const message_el = el.parentElement.parentElement; await remove_message(window.conversation_id, message_el.dataset.index); - await load_conversation(window.conversation_id, false); - }) + await safe_load_conversation(window.conversation_id, false); + }); } }); @@ -114,18 +112,16 @@ const register_message_buttons = async () => { el.addEventListener("click", async () => { const message_el = el.parentElement.parentElement.parentElement; const copyText = await get_message(window.conversation_id, message_el.dataset.index); - - try { - if (!navigator.clipboard) { - throw new Error("navigator.clipboard: Clipboard API unavailable."); + try { + if (!navigator.clipboard) { + throw new Error("navigator.clipboard: Clipboard API unavailable."); + } + await navigator.clipboard.writeText(copyText); + } catch (e) { + console.error(e); + console.error("Clipboard API writeText() failed! Fallback to document.exec(\"copy\")..."); + fallback_clipboard(copyText); } - await navigator.clipboard.writeText(copyText); - } catch (e) { - console.error(e); - console.error("Clipboard API writeText() failed! Fallback to document.exec(\"copy\")..."); - fallback_clipboard(copyText); - } - el.classList.add("clicked"); setTimeout(() => el.classList.remove("clicked"), 1000); }) @@ -217,10 +213,8 @@ const register_message_buttons = async () => { const message_el = el.parentElement.parentElement.parentElement; el.classList.add("clicked"); setTimeout(() => el.classList.remove("clicked"), 1000); - prompt_lock = true; await hide_message(window.conversation_id, message_el.dataset.index); - window.token = message_id(); - await ask_gpt(message_el.dataset.index); + await ask_gpt(message_el.dataset.index, get_message_id()); }) } }); @@ -264,12 +258,11 @@ const handle_ask = async () => { messageInput.focus(); window.scrollTo(0, 0); - message = messageInput.value + let message = messageInput.value; if (message.length <= 0) { return; } messageInput.value = ""; - prompt_lock = true; count_input() await add_conversation(window.conversation_id, message); @@ -279,7 +272,7 @@ const handle_ask = async () => { message += '\n```' } let message_index = await add_message(window.conversation_id, "user", message); - window.token = message_id(); + let message_id = get_message_id(); if (imageInput.dataset.src) URL.revokeObjectURL(imageInput.dataset.src); const input = imageInput && imageInput.files.length > 0 ? imageInput : cameraInput @@ -293,7 +286,7 @@ const handle_ask = async () => { <i class="fa-solid fa-xmark"></i> <i class="fa-regular fa-phone-arrow-up-right"></i> </div> - <div class="content" id="user_${token}"> + <div class="content" id="user_${message_id}"> <div class="content_inner"> ${markdown_render(message)} ${imageInput.dataset.src @@ -312,17 +305,38 @@ const handle_ask = async () => { </div> `; highlight(message_box); - await ask_gpt(); + stop_generating.classList.remove("stop_generating-hidden"); + await ask_gpt(-1, message_id); }; -const remove_cancel_button = async () => { - stop_generating.classList.add(`stop_generating-hiding`); +async function remove_cancel_button() { + stop_generating.classList.add("stop_generating-hidden"); +} + +regenerate.addEventListener("click", async () => { + regenerate.classList.add("regenerate-hidden"); + stop_generating.classList.remove("stop_generating-hidden"); + await hide_message(window.conversation_id); + await ask_gpt(-1, get_message_id()); +}); - setTimeout(() => { - stop_generating.classList.remove(`stop_generating-hiding`); - stop_generating.classList.add(`stop_generating-hidden`); - }, 300); -}; +stop_generating.addEventListener("click", async () => { + stop_generating.classList.add("stop_generating-hidden"); + regenerate.classList.remove("regenerate-hidden"); + let key; + for (key in controller_storage) { + if (!controller_storage[key].signal.aborted) { + controller_storage[key].abort(); + let message = message_storage[key]; + if (message) { + content_storage[key].inner.innerHTML += " [aborted]"; + message_storage[key] += " [aborted]"; + console.log(`aborted ${window.conversation_id} #${key}`); + } + } + } + await load_conversation(window.conversation_id); +}); const prepare_messages = (messages, message_index = -1) => { // Removes none user messages at end @@ -367,13 +381,13 @@ const prepare_messages = (messages, message_index = -1) => { return new_messages; } - -async function add_message_chunk(message) { +async function add_message_chunk(message, message_index) { + content_map = content_storage[message_index]; if (message.type == "conversation") { console.info("Conversation used:", message.conversation) } else if (message.type == "provider") { - window.provider_result = message.provider; - content.querySelector('.provider').innerHTML = ` + provider_storage[message_index] = message.provider; + content_map.content.querySelector('.provider').innerHTML = ` <a href="${message.provider.url}" target="_blank"> ${message.provider.label ? message.provider.label : message.provider.name} </a> @@ -384,12 +398,12 @@ async function add_message_chunk(message) { } else if (message.type == "error") { window.error = message.error console.error(message.error); - content_inner.innerHTML += `<p><strong>An error occured:</strong> ${message.error}</p>`; + content_map.inner.innerHTML += `<p><strong>An error occured:</strong> ${message.error}</p>`; } else if (message.type == "preview") { - content_inner.innerHTML = markdown_render(message.preview); + content_map.inner.innerHTML = markdown_render(message.preview); } else if (message.type == "content") { - window.text += message.content; - html = markdown_render(window.text); + message_storage[message_index] += message.content; + html = markdown_render(message_storage[message_index]); let lastElement, lastIndex = null; for (element of ['</p>', '</code></pre>', '</p>\n</li>\n</ol>', '</li>\n</ol>', '</li>\n</ul>']) { const index = html.lastIndexOf(element) @@ -399,11 +413,11 @@ async function add_message_chunk(message) { } } if (lastIndex) { - html = html.substring(0, lastIndex) + '<span id="cursor"></span>' + lastElement; + html = html.substring(0, lastIndex) + '<span class="cursor"></span>' + lastElement; } - content_inner.innerHTML = html; - content_count.innerText = count_words_and_tokens(text, window.provider_result?.model); - highlight(content_inner); + content_map.inner.innerHTML = html; + content_map.count.innerText = count_words_and_tokens(message_storage[message_index], provider_storage[message_index]?.model); + highlight(content_map.inner); } window.scrollTo(0, 0); if (message_box.scrollTop >= message_box.scrollHeight - message_box.clientHeight - 100) { @@ -411,13 +425,6 @@ async function add_message_chunk(message) { } } -// fileInput?.addEventListener("click", (e) => { -// if (window?.pywebview) { -// e.preventDefault(); -// pywebview.api.choose_file(); -// } -// }); - cameraInput?.addEventListener("click", (e) => { if (window?.pywebview) { e.preventDefault(); @@ -432,44 +439,44 @@ imageInput?.addEventListener("click", (e) => { } }); -const ask_gpt = async (message_index = -1) => { - regenerate.classList.add(`regenerate-hidden`); - messages = await get_messages(window.conversation_id); - total_messages = messages.length; +const ask_gpt = async (message_index = -1, message_id) => { + let messages = await get_messages(window.conversation_id); + let total_messages = messages.length; messages = prepare_messages(messages, message_index); - - stop_generating.classList.remove(`stop_generating-hidden`); + message_index = total_messages + message_storage[message_index] = ""; + stop_generating.classList.remove(".stop_generating-hidden"); message_box.scrollTop = message_box.scrollHeight; window.scrollTo(0, 0); - el = message_box.querySelector('.count_total'); - el ? el.parentElement.removeChild(el) : null; + let count_total = message_box.querySelector('.count_total'); + count_total ? count_total.parentElement.removeChild(count_total) : null; message_box.innerHTML += ` - <div class="message" data-index="${total_messages}"> + <div class="message" data-index="${message_index}"> <div class="assistant"> ${gpt_image} <i class="fa-solid fa-xmark"></i> <i class="fa-regular fa-phone-arrow-down-left"></i> </div> - <div class="content" id="gpt_${window.token}"> + <div class="content" id="gpt_${message_id}"> <div class="provider"></div> - <div class="content_inner"><span id="cursor"></span></div> + <div class="content_inner"><span class="cursor"></span></div> <div class="count"></div> </div> </div> `; - window.controller = new AbortController(); - window.text = ""; - window.error = null; - window.abort = false; - window.provider_result = null; + controller_storage[message_index] = new AbortController(); + let error = false; - content = document.getElementById(`gpt_${window.token}`); - content_inner = content.querySelector('.content_inner'); - content_count = content.querySelector('.count'); + let content_el = document.getElementById(`gpt_${message_id}`) + let content_map = content_storage[message_index] = { + content: content_el, + inner: content_el.querySelector('.content_inner'), + count: content_el.querySelector('.count'), + } message_box.scrollTop = message_box.scrollHeight; window.scrollTo(0, 0); @@ -478,8 +485,6 @@ const ask_gpt = async (message_index = -1) => { const file = input && input.files.length > 0 ? input.files[0] : null; const provider = providerSelect.options[providerSelect.selectedIndex].value; const auto_continue = document.getElementById("auto_continue")?.checked; - if (file && !provider) - provider = "Bing"; let api_key = null; if (provider) { api_key = document.getElementById(`${provider}-api_key`)?.value || null; @@ -487,7 +492,7 @@ const ask_gpt = async (message_index = -1) => { api_key = document.querySelector(`.${provider}-api_key`)?.value || null; } await api("conversation", { - id: window.token, + id: message_id, conversation_id: window.conversation_id, model: get_selected_model(), web_search: document.getElementById("switch").checked, @@ -495,11 +500,11 @@ const ask_gpt = async (message_index = -1) => { messages: messages, auto_continue: auto_continue, api_key: api_key - }, file); + }, file, message_index); if (!error) { - html = markdown_render(text); - content_inner.innerHTML = html; - highlight(content_inner); + html = markdown_render(message_storage[message_index]); + content_map.inner.innerHTML = html; + highlight(content_map.inner); if (imageInput) imageInput.value = ""; if (cameraInput) cameraInput.value = ""; @@ -509,23 +514,23 @@ const ask_gpt = async (message_index = -1) => { console.error(e); if (e.name != "AbortError") { error = true; - content_inner.innerHTML += `<p><strong>An error occured:</strong> ${e}</p>`; + content_map.inner.innerHTML += `<p><strong>An error occured:</strong> ${e}</p>`; } } - if (!error && text) { - await add_message(window.conversation_id, "assistant", text, provider_result); - await load_conversation(window.conversation_id); + delete controller_storage[message_index]; + if (!error && message_storage[message_index]) { + const message_provider = message_index in provider_storage ? provider_storage[message_index] : null; + await add_message(window.conversation_id, "assistant", message_storage[message_index], message_provider); + await safe_load_conversation(window.conversation_id); } else { - let cursorDiv = document.getElementById("cursor"); + let cursorDiv = message_box.querySelector(".cursor"); if (cursorDiv) cursorDiv.parentNode.removeChild(cursorDiv); } window.scrollTo(0, 0); message_box.scrollTop = message_box.scrollHeight; await remove_cancel_button(); await register_message_buttons(); - prompt_lock = false; await load_conversations(); - regenerate.classList.remove("regenerate-hidden"); }; const clear_conversations = async () => { @@ -645,6 +650,10 @@ const load_conversation = async (conversation_id, scroll=true) => { let conversation = await get_conversation(conversation_id); let messages = conversation?.items || []; + if (!conversation) { + return; + } + if (systemPrompt) { systemPrompt.value = conversation.system || ""; } @@ -714,6 +723,19 @@ const load_conversation = async (conversation_id, scroll=true) => { } }; +async function safe_load_conversation(conversation_id, scroll=true) { + let is_running = false + for (const key in controller_storage) { + if (!controller_storage[key].signal.aborted) { + is_running = true; + break + } + } + if (!is_running) { + load_conversation(conversation_id, scroll); + } +} + async function get_conversation(conversation_id) { let conversation = await JSON.parse( appStorage.getItem(`conversation:${conversation_id}`) @@ -759,6 +781,7 @@ async function save_system_message() { } const hide_message = async (conversation_id, message_index =- 1) => { const conversation = await get_conversation(conversation_id) + if (!conversation) return; message_index = message_index == -1 ? conversation.items.length - 1 : message_index const last_message = message_index in conversation.items ? conversation.items[message_index] : null; if (last_message !== null) { @@ -795,6 +818,7 @@ const get_message = async (conversation_id, index) => { const add_message = async (conversation_id, role, content, provider) => { const conversation = await get_conversation(conversation_id); + if (!conversation) return; conversation.items.push({ role: role, content: content, @@ -852,23 +876,6 @@ const load_conversations = async () => { box_conversations.innerHTML += html; }; -document.getElementById("cancelButton").addEventListener("click", async () => { - window.controller.abort(); - if (!window.abort) { - window.abort = true; - content_inner.innerHTML += " [aborted]"; - if (window.text) window.text += " [aborted]"; - } - console.log(`aborted ${window.conversation_id}`); -}); - -document.getElementById("regenerateButton").addEventListener("click", async () => { - prompt_lock = true; - await hide_message(window.conversation_id); - window.token = message_id(); - await ask_gpt(); -}); - const hide_input = document.querySelector(".toolbar .hide-input"); hide_input.addEventListener("click", async (e) => { const icon = hide_input.querySelector("i"); @@ -891,7 +898,7 @@ const uuid = () => { ); }; -const message_id = () => { +function get_message_id() { random_bytes = (Math.floor(Math.random() * 1338377565) + 2956589730).toString( 2 ); @@ -1124,6 +1131,7 @@ async function on_load() { } async function on_api() { + let prompt_lock = false; messageInput.addEventListener("keydown", async (evt) => { if (prompt_lock) return; @@ -1132,6 +1140,8 @@ async function on_api() { if (evt.keyCode === 13 && !evt.shiftKey) { evt.preventDefault(); console.log("pressed enter"); + prompt_lock = true; + setTimeout(()=>prompt_lock=false, 3); await handle_ask(); } else { messageInput.style.removeProperty("height"); @@ -1141,6 +1151,8 @@ async function on_api() { sendButton.addEventListener(`click`, async () => { console.log("clicked send"); if (prompt_lock) return; + prompt_lock = true; + setTimeout(()=>prompt_lock=false, 3); await handle_ask(); }); messageInput.focus(); @@ -1289,7 +1301,7 @@ function get_selected_model() { } } -async function api(ressource, args=null, file=null) { +async function api(ressource, args=null, file=null, message_index=null) { if (window?.pywebview) { if (args !== null) { if (ressource == "models") { @@ -1318,17 +1330,17 @@ async function api(ressource, args=null, file=null) { } response = await fetch(url, { method: 'POST', - signal: window.controller.signal, + signal: controller_storage[message_index].signal, headers: headers, body: body }); - return read_response(response); + return read_response(response, message_index); } response = await fetch(url); return await response.json(); } -async function read_response(response) { +async function read_response(response, message_index) { const reader = response.body.pipeThrough(new TextDecoderStream()).getReader(); let buffer = "" while (true) { @@ -1341,7 +1353,7 @@ async function read_response(response) { continue; } try { - add_message_chunk(JSON.parse(buffer + line)) + add_message_chunk(JSON.parse(buffer + line), message_index); buffer = ""; } catch { buffer += line @@ -1382,7 +1394,7 @@ providerSelect.addEventListener("change", () => load_provider_models()); function save_storage() { let filename = `chat ${new Date().toLocaleString()}.json`.replaceAll(":", "-"); let data = {"options": {"g4f": ""}}; - for (let i = 0; i < appStorage.length; i++){label + for (let i = 0; i < appStorage.length; i++) { let key = appStorage.key(i); let item = appStorage.getItem(key); if (key.startsWith("conversation:")) { |