From d3f09ac5832ca228a6e3bcf7ea193fbf3fcad5d2 Mon Sep 17 00:00:00 2001 From: dail45 Date: Sat, 28 Oct 2023 16:32:11 +0500 Subject: [PATCH 1/2] increase backend conversation speed --- config.json | 3 +++ requirements.txt | 3 ++- server/backend.py | 63 +++++++++++++++++++++++++++++++++++++++-------- 3 files changed, 58 insertions(+), 11 deletions(-) diff --git a/config.json b/config.json index 87580ad..528b576 100644 --- a/config.json +++ b/config.json @@ -4,6 +4,9 @@ "port" : 1338, "debug": false }, + "backend_config": { + "join_chunks": true + }, "openai_key": "sk-...", "openai_api_base": "https://api.openai.com", diff --git a/requirements.txt b/requirements.txt index 5eaf725..1cb2bda 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,2 +1,3 @@ flask -requests \ No newline at end of file +requests +orjson \ No newline at end of file diff --git a/server/backend.py b/server/backend.py index 18c7f23..06b3685 100644 --- a/server/backend.py +++ b/server/backend.py @@ -1,11 +1,13 @@ from json import dumps -from time import time +from time import time, sleep from flask import request from hashlib import sha256 from datetime import datetime from requests import get -from requests import post -from json import loads +from requests import post +from orjson import loads +import threading +import weakref import os from server.config import special_instructions @@ -17,6 +19,8 @@ def __init__(self, app, config: dict) -> None: self.openai_key = os.getenv("OPENAI_API_KEY") or config['openai_key'] self.openai_api_base = os.getenv("OPENAI_API_BASE") or config['openai_api_base'] self.proxy = config['proxy'] + backend_config = config["backend_config"] + self.join_chunks = backend_config["join_chunks"] self.routes = { '/backend-api/v2/conversation': { 'function': self._conversation, @@ -79,7 +83,7 @@ def _conversation(self): ) if gpt_resp.status_code >= 400: - error_data =gpt_resp.json().get('error', {}) + error_data = gpt_resp.json().get('error', {}) error_code = error_data.get('code', None) error_message = error_data.get('message', "An error occurred") return { @@ -92,12 +96,12 @@ def _conversation(self): def stream(): for chunk in gpt_resp.iter_lines(): try: - decoded_line = loads(chunk.decode("utf-8").split("data: ")[1]) - token = decoded_line["choices"][0]['delta'].get('content') + if chunk: + token = loads(chunk[6:])["choices"][0]["delta"].get("content") + + if token is not None: + yield token - if token != None: - yield token - except GeneratorExit: break @@ -105,7 +109,46 @@ def stream(): print(e) print(e.__traceback__.tb_next) continue - + + if self.join_chunks: + storage, lock = [], threading.Lock() + + def storage_reader(storage: list, lock: threading.Lock): + try: + while True: + if len(storage) > 0: + lock.acquire() + joined_chunk = "".join(storage) + storage.clear() + lock.release() + yield joined_chunk + else: + sleep(0.05) + except TypeError: + lock.release() + lock.acquire() + joined_chunk = "".join(storage[:-1]) + storage.clear() + lock.release() + yield joined_chunk + + def storage_writer(storage: list, lock: threading.Lock, weakref_storage_reader, read_from_generator): + for chunk in read_from_generator: + if weakref_storage_reader(): + lock.acquire() + storage.append(chunk) + lock.release() + else: + break + lock.acquire() + storage.append(1) + lock.release() + + result_stream = storage_reader(storage, lock) + threading.Thread(target=storage_writer, + args=(storage, lock, weakref.ref(result_stream), stream())).start() + return self.app.response_class(result_stream, mimetype='text/event-stream') + return self.app.response_class(stream(), mimetype='text/event-stream') except Exception as e: From 8c9e8099a9bc25b298dbe05cf5518eb68b0eb040 Mon Sep 17 00:00:00 2001 From: dail45 Date: Tue, 31 Oct 2023 06:59:46 +0500 Subject: [PATCH 2/2] add context menu for messages(copy, regenerate(for gpt only), delete), fix table display --- client/css/style.css | 53 ++++++- client/js/chat.js | 370 ++++++++++++++++++++++++++++++++++++++----- 2 files changed, 386 insertions(+), 37 deletions(-) diff --git a/client/css/style.css b/client/css/style.css index a1f6908..3db6262 100644 --- a/client/css/style.css +++ b/client/css/style.css @@ -190,13 +190,13 @@ body { } .message { - width: 100%; overflow-wrap: break-word; display: flex; gap: var(--section-gap); padding: var(--section-gap); padding-bottom: 0; + padding-top: 5px; } .message:last-child { @@ -713,6 +713,57 @@ a:-webkit-any-link { width: 1px; } +.content table { + border-collapse: collapse; +} + +.content th, .content td { + border: 1px solid var(--colour-3); + padding: 2px; +} + +.copy-button, .regenerate-button, .delete-button { + background-color: var(--colour-1); + border-radius: var(--border-radius-1); + backdrop-filter: blur(20px); +} + +.copy-button svg, .regenerate-button svg, .delete-button svg { + width: 24px; + height: 24px; + fill: var(--colour-3); +} + +.dropdown-icon { + width: 32px; + height: 10px; + color: var(--colour-3); +} + +.dropdown-icon text { + fill: var(--colour-3); + font-size: 24px; + width: 32px; + height: 32px; +} + +.dropdown-message-func-buttons { + float: right; + display: block; + padding-right: var(--section-gap); + padding-top: var(--section-gap); +} + +.message-func-buttons { + display: none; + position: absolute; +} + +.dropdown-message-func-buttons:hover .message-func-buttons { + display: block; + z-index: 2; +} + .color-picker>fieldset { border: 0; diff --git a/client/js/chat.js b/client/js/chat.js index cff0be5..e861609 100644 --- a/client/js/chat.js +++ b/client/js/chat.js @@ -77,17 +77,52 @@ const ask_gpt = async (message) => { stop_generating.classList.remove(`stop_generating-hidden`); message_box.innerHTML += ` -
-
- ${user_image} - +
+ -
- ${format(message)} +
+
+ ${user_image} + +
+
+ ${format(message)} +
`; - /* .replace(/(?:\r\n|\r|\n)/g, '
') */ message_box.scrollTop = message_box.scrollHeight; @@ -96,12 +131,62 @@ const ask_gpt = async (message) => { window.scrollTo(0, 0); message_box.innerHTML += ` -
-
- ${gpt_image} +
+ -
-
+
+
+ ${gpt_image} +
+
+
+
`; @@ -126,7 +211,7 @@ const ask_gpt = async (message) => { meta: { id: window.token, content: { - conversation: await get_conversation(window.conversation_id), + conversation: (await get_conversation(window.conversation_id)).map(({cmid, role, content}) => ({role, content})), internet_access: document.getElementById("switch").checked, content_type: "text", parts: [ @@ -139,7 +224,6 @@ const ask_gpt = async (message) => { }, }), }); - const reader = response.body.getReader(); while (true) { @@ -187,8 +271,8 @@ const ask_gpt = async (message) => { "An error occured, please reload / refresh cache and try again."; } - add_message(window.conversation_id, "user", message); - add_message(window.conversation_id, "assistant", text); + add_message(window.conversation_id, "user", message, "user_" + window.token); + add_message(window.conversation_id, "assistant", text, "gpt_" + window.token); message_box.scrollTop = message_box.scrollHeight; await remove_cancel_button(); @@ -197,7 +281,7 @@ const ask_gpt = async (message) => { await load_conversations(20, 0); window.scrollTo(0, 0); } catch (e) { - add_message(window.conversation_id, "user", message); + add_message(window.conversation_id, "user", message, "user_" + window.token); message_box.scrollTop = message_box.scrollHeight; await remove_cancel_button(); @@ -214,16 +298,120 @@ const ask_gpt = async (message) => { let error_message = `oops ! something went wrong, please try again / reload. [stacktrace in console]`; document.getElementById(`gpt_${window.token}`).innerHTML = error_message; - add_message(window.conversation_id, "assistant", error_message); + add_message(window.conversation_id, "assistant", error_message, "gpt_" + window.token); } else { document.getElementById(`gpt_${window.token}`).innerHTML += ` [aborted]`; - add_message(window.conversation_id, "assistant", text + ` [aborted]`); + add_message(window.conversation_id, "assistant", text + ` [aborted]`, "gpt_" + window.token); } window.scrollTo(0, 0); } }; +const ask_gpt_regenerate = async (contentElement) => { + let conversation = await JSON.parse( + localStorage.getItem(`conversation:${conversation_id}`) + ); + let itemIndex = conversation.items.findIndex((el) => { return el.cmid === contentElement.id; }); + let contents = conversation.items.slice(0, itemIndex); + contents = contents.map(({cmid, role, content}) => ({role, content})); + contentElement.innerHTML = `
`; + try { + window.controller = new AbortController(); + + jailbreak = document.getElementById("jailbreak"); + model = document.getElementById("model"); + prompt_lock = true; + window.text = ``; + + stop_generating.classList.remove(`stop_generating-hidden`); + const response = await fetch(`/backend-api/v2/conversation`, { + method: `POST`, + signal: window.controller.signal, + headers: { + "content-type": `application/json`, + accept: `text/event-stream`, + }, + body: JSON.stringify({ + conversation_id: window.conversation_id, + action: `_ask`, + model: model.options[model.selectedIndex].value, + jailbreak: jailbreak.options[jailbreak.selectedIndex].value, + meta: { + id: contentElement.id.replace(/^gpt_|^user_/i, ''), + content: { + conversation: contents, + internet_access: document.getElementById("switch").checked, + content_type: "text", + parts: [ + { + content: "", + role: "user", + }, + ], + }, + }, + }), + }); + const reader = response.body.getReader(); + while (true) { + const {value, done} = await reader.read(); + if (done) break; + + chunk = new TextDecoder().decode(value); + + if ( + chunk.includes( + `
{ const elements = box_conversations.childNodes; let index = elements.length; @@ -256,7 +444,7 @@ const show_option = async (conversation_id) => { conv.style.display = "none"; yes.style.display = "block"; - not.style.display = "block"; + not.style.display = "block"; } const hide_option = async (conversation_id) => { @@ -266,9 +454,66 @@ const hide_option = async (conversation_id) => { conv.style.display = "block"; yes.style.display = "none"; - not.style.display = "none"; + not.style.display = "none"; +} + +const getMsgWrapperByButton = (button) => { + let msgFuncBtnsDiv = button.parentElement; + let dropdown = msgFuncBtnsDiv.parentElement; + return dropdown.parentElement; } +const getMsgContentByButton = (senderBtn) => { + let msgWrapper = getMsgWrapperByButton(senderBtn); + return msgWrapper.querySelector(".message").querySelector(".content"); +}; + +const isInGenerating = () => { return !stop_generating.classList.contains("stop_generating-hidden"); }; + +const copyFunction = async (button) => { + let contentElement = getMsgContentByButton(button); + let conversation = await JSON.parse( + localStorage.getItem(`conversation:${conversation_id}`) + ); + let convItems = conversation.items; + let itemIndex = convItems.findIndex((el) => { return el.cmid === contentElement.id; }); + let text = convItems[itemIndex].content; + + await navigator.clipboard.writeText(text); +}; + +const regenerateFunction = async (button) => { + console.log(stop_generating.classList); + console.log(isInGenerating()); + if (isInGenerating()) { + return; + } + let msgWrapper = getMsgWrapperByButton(button); + let contentElement = msgWrapper.querySelector(".message").querySelector(".content"); + let conversation = await JSON.parse( + localStorage.getItem(`conversation:${conversation_id}`) + ); + await ask_gpt_regenerate(contentElement); +}; + +const deleteFunction = async (button) => { + if (isInGenerating()) { + return; + } + let msgWrapper = getMsgWrapperByButton(button); + let contentElement = getMsgContentByButton(button); + let conversation = await JSON.parse( + localStorage.getItem(`conversation:${conversation_id}`) + ); + let convItems = conversation.items; + let itemIndex = convItems.findIndex((el) => { return el.cmid === contentElement.id; }); + if (itemIndex !== -1) { + convItems.splice(itemIndex, 1); + localStorage.setItem(`conversation:${conversation_id}`, JSON.stringify(conversation)); + msgWrapper.remove(); + } +}; + const delete_conversation = async (conversation_id) => { localStorage.removeItem(`conversation:${conversation_id}`); @@ -307,21 +552,73 @@ const load_conversation = async (conversation_id) => { for (item of conversation.items) { message_box.innerHTML += ` -
-
- ${item.role == "assistant" ? gpt_image : user_image} - ${ - item.role == "assistant" - ? `` - : `` - } +
+ -
- ${ - item.role == "assistant" - ? markdown.render(item.content) - : item.content - } +
+
+ ${item.role == "assistant" ? gpt_image : user_image} + ${ + item.role == "assistant" + ? `` + : `` + } +
+
+ ${ + item.role == "assistant" + ? markdown.render(item.content) + : item.content + } +
`; @@ -358,12 +655,13 @@ const add_conversation = async (conversation_id, title) => { } }; -const add_message = async (conversation_id, role, content) => { +const add_message = async (conversation_id, role, content, conversation_message_id) => { before_adding = JSON.parse( localStorage.getItem(`conversation:${conversation_id}`) ); before_adding.items.push({ + cmid: conversation_message_id, role: role, content: content, });