summaryrefslogtreecommitdiffstats
path: root/g4f
diff options
context:
space:
mode:
authorHeiner Lohaus <hlohaus@users.noreply.github.com>2024-11-21 07:14:36 +0100
committerHeiner Lohaus <hlohaus@users.noreply.github.com>2024-11-21 07:14:36 +0100
commit6f2b6cccbda6b3cd7ec5e0d5ba8ebf23d7c53419 (patch)
tree2c5a647a67c8f0b208a21e1bde4f4943980e1c36 /g4f
parentSupport synthesize in Openai generator (#2394) (diff)
downloadgpt4free-6f2b6cccbda6b3cd7ec5e0d5ba8ebf23d7c53419.tar
gpt4free-6f2b6cccbda6b3cd7ec5e0d5ba8ebf23d7c53419.tar.gz
gpt4free-6f2b6cccbda6b3cd7ec5e0d5ba8ebf23d7c53419.tar.bz2
gpt4free-6f2b6cccbda6b3cd7ec5e0d5ba8ebf23d7c53419.tar.lz
gpt4free-6f2b6cccbda6b3cd7ec5e0d5ba8ebf23d7c53419.tar.xz
gpt4free-6f2b6cccbda6b3cd7ec5e0d5ba8ebf23d7c53419.tar.zst
gpt4free-6f2b6cccbda6b3cd7ec5e0d5ba8ebf23d7c53419.zip
Diffstat (limited to 'g4f')
-rw-r--r--g4f/Provider/needs_auth/OpenaiChat.py4
-rw-r--r--g4f/Provider/openai/har_file.py8
-rw-r--r--g4f/gui/client/index.html2
-rw-r--r--g4f/gui/client/static/js/chat.v1.js33
-rw-r--r--g4f/gui/server/backend.py27
5 files changed, 47 insertions, 27 deletions
diff --git a/g4f/Provider/needs_auth/OpenaiChat.py b/g4f/Provider/needs_auth/OpenaiChat.py
index 797455fe..29e3d4b4 100644
--- a/g4f/Provider/needs_auth/OpenaiChat.py
+++ b/g4f/Provider/needs_auth/OpenaiChat.py
@@ -418,6 +418,7 @@ class OpenaiChat(AsyncGeneratorProvider, ProviderModelMixin):
[debug.log(text) for text in (
f"Arkose: {'False' if not need_arkose else RequestConfig.arkose_token[:12]+'...'}",
f"Proofofwork: {'False' if proofofwork is None else proofofwork[:12]+'...'}",
+ f"AccessToken: {'False' if cls._api_key is None else cls._api_key[:12]+'...'}",
)]
data = {
"action": action,
@@ -438,7 +439,8 @@ class OpenaiChat(AsyncGeneratorProvider, ProviderModelMixin):
messages = messages if conversation_id is None else [messages[-1]]
data["messages"] = cls.create_messages(messages, image_request)
headers = {
- "accept": "text/event-stream",
+ "Accept": "text/event-stream",
+ "Content-Type": "application/json",
"Openai-Sentinel-Chat-Requirements-Token": chat_token,
**cls._headers
}
diff --git a/g4f/Provider/openai/har_file.py b/g4f/Provider/openai/har_file.py
index 4569e1b7..d731bf73 100644
--- a/g4f/Provider/openai/har_file.py
+++ b/g4f/Provider/openai/har_file.py
@@ -81,10 +81,10 @@ def readHAR():
RequestConfig.access_token = match.group(1)
except KeyError:
continue
- RequestConfig.cookies = {c['name']: c['value'] for c in v['request']['cookies'] if c['name'] != "oai-did"}
+ RequestConfig.cookies = {c['name']: c['value'] for c in v['request']['cookies']}
RequestConfig.headers = v_headers
- if RequestConfig.access_token is None:
- raise NoValidHarFileError("No accessToken found in .har files")
+ if RequestConfig.proof_token is None:
+ raise NoValidHarFileError("No proof_token found in .har files")
def get_headers(entry) -> dict:
return {h['name'].lower(): h['value'] for h in entry['request']['headers'] if h['name'].lower() not in ['content-length', 'cookie'] and not h['name'].startswith(':')}
@@ -149,7 +149,7 @@ def getN() -> str:
return base64.b64encode(timestamp.encode()).decode()
async def get_request_config(proxy: str) -> RequestConfig:
- if RequestConfig.access_token is None:
+ if RequestConfig.proof_token is None:
readHAR()
if RequestConfig.arkose_request is not None:
RequestConfig.arkose_token = await sendRequest(genArkReq(RequestConfig.arkose_request), proxy)
diff --git a/g4f/gui/client/index.html b/g4f/gui/client/index.html
index 6c2ad8b6..116509d8 100644
--- a/g4f/gui/client/index.html
+++ b/g4f/gui/client/index.html
@@ -224,7 +224,7 @@
<i class="fa-solid fa-camera"></i>
</label>
<label class="file-label" for="file">
- <input type="file" id="file" name="file" accept="text/plain, text/html, text/xml, application/json, text/javascript, .sh, .py, .php, .css, .yaml, .sql, .log, .csv, .twig, .md" required/>
+ <input type="file" id="file" name="file" accept="text/plain, text/html, text/xml, application/json, text/javascript, .har, .sh, .py, .php, .css, .yaml, .sql, .log, .csv, .twig, .md" required/>
<i class="fa-solid fa-paperclip"></i>
</label>
<label class="micro-label" for="micro">
diff --git a/g4f/gui/client/static/js/chat.v1.js b/g4f/gui/client/static/js/chat.v1.js
index 73c0de0f..8127fb9d 100644
--- a/g4f/gui/client/static/js/chat.v1.js
+++ b/g4f/gui/client/static/js/chat.v1.js
@@ -1338,17 +1338,25 @@ fileInput.addEventListener('click', async (event) => {
delete fileInput.dataset.text;
});
+async function upload_cookies() {
+ const file = fileInput.files[0];
+ const formData = new FormData();
+ formData.append('file', file);
+ response = await fetch("/backend-api/v2/upload_cookies", {
+ method: 'POST',
+ body: formData,
+ });
+ if (response.status == 200) {
+ inputCount.innerText = `${file.name} was uploaded successfully`;
+ }
+ fileInput.value = "";
+}
+
fileInput.addEventListener('change', async (event) => {
if (fileInput.files.length) {
- type = fileInput.files[0].type;
- if (type && type.indexOf('/')) {
- type = type.split('/').pop().replace('x-', '')
- type = type.replace('plain', 'plaintext')
- .replace('shellscript', 'sh')
- .replace('svg+xml', 'svg')
- .replace('vnd.trolltech.linguist', 'ts')
- } else {
- type = fileInput.files[0].name.split('.').pop()
+ type = fileInput.files[0].name.split('.').pop()
+ if (type == "har") {
+ return await upload_cookies();
}
fileInput.dataset.type = type
const reader = new FileReader();
@@ -1357,14 +1365,19 @@ fileInput.addEventListener('change', async (event) => {
if (type == "json") {
const data = JSON.parse(fileInput.dataset.text);
if ("g4f" in data.options) {
+ let count = 0;
Object.keys(data).forEach(key => {
if (key != "options" && !localStorage.getItem(key)) {
appStorage.setItem(key, JSON.stringify(data[key]));
- }
+ count += 1;
+ }
});
delete fileInput.dataset.text;
await load_conversations();
fileInput.value = "";
+ inputCount.innerText = `${count} Conversations were imported successfully`;
+ } else {
+ await upload_cookies();
}
}
});
diff --git a/g4f/gui/server/backend.py b/g4f/gui/server/backend.py
index 87da49e1..fe020bb8 100644
--- a/g4f/gui/server/backend.py
+++ b/g4f/gui/server/backend.py
@@ -1,12 +1,15 @@
import json
import asyncio
import flask
+import os
from flask import request, Flask
from typing import AsyncGenerator, Generator
+from werkzeug.utils import secure_filename
from g4f.image import is_allowed_extension, to_image
from g4f.client.service import convert_to_provider
from g4f.errors import ProviderNotFoundError
+from g4f.cookies import get_cookies_dir
from .api import Api
def safe_iter_generator(generator: Generator) -> Generator:
@@ -79,8 +82,8 @@ class Backend_Api(Api):
'function': self.handle_synthesize,
'methods': ['GET']
},
- '/backend-api/v2/error': {
- 'function': self.handle_error,
+ '/backend-api/v2/upload_cookies': {
+ 'function': self.upload_cookies,
'methods': ['POST']
},
'/images/<path:name>': {
@@ -89,15 +92,17 @@ class Backend_Api(Api):
}
}
- def handle_error(self):
- """
- Initialize the backend API with the given Flask application.
-
- Args:
- app (Flask): Flask application instance to attach routes to.
- """
- print(request.json)
- return 'ok', 200
+ def upload_cookies(self):
+ file = None
+ if "file" in request.files:
+ file = request.files['file']
+ if file.filename == '':
+ return 'No selected file', 400
+ if file and file.filename.endswith(".json") or file.filename.endswith(".har"):
+ filename = secure_filename(file.filename)
+ file.save(os.path.join(get_cookies_dir(), filename))
+ return "File saved", 200
+ return 'Not supported file', 400
def handle_conversation(self):
"""