summaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
authorH Lohaus <hlohaus@users.noreply.github.com>2024-02-26 11:30:17 +0100
committerGitHub <noreply@github.com>2024-02-26 11:30:17 +0100
commit36e7665613c4a8ff373d90401ae804f7a334feab (patch)
tree1c5bd93c13f845b8554da3290e124116e251663f
parentMerge pull request #1630 from hlohaus/flow (diff)
parentAdd proxy connector to GeminiPro (diff)
downloadgpt4free-36e7665613c4a8ff373d90401ae804f7a334feab.tar
gpt4free-36e7665613c4a8ff373d90401ae804f7a334feab.tar.gz
gpt4free-36e7665613c4a8ff373d90401ae804f7a334feab.tar.bz2
gpt4free-36e7665613c4a8ff373d90401ae804f7a334feab.tar.lz
gpt4free-36e7665613c4a8ff373d90401ae804f7a334feab.tar.xz
gpt4free-36e7665613c4a8ff373d90401ae804f7a334feab.tar.zst
gpt4free-36e7665613c4a8ff373d90401ae804f7a334feab.zip
-rw-r--r--g4f/Provider/Bing.py2
-rw-r--r--g4f/Provider/GeminiPro.py24
-rw-r--r--g4f/Provider/Liaobots.py2
-rw-r--r--g4f/gui/client/html/index.html1
-rw-r--r--g4f/gui/client/js/chat.v1.js19
-rw-r--r--g4f/providers/helper.py7
6 files changed, 36 insertions, 19 deletions
diff --git a/g4f/Provider/Bing.py b/g4f/Provider/Bing.py
index 1e29c4f1..5bc89479 100644
--- a/g4f/Provider/Bing.py
+++ b/g4f/Provider/Bing.py
@@ -70,7 +70,7 @@ class Bing(AsyncGeneratorProvider):
gpt4_turbo = True if model.startswith("gpt-4-turbo") else False
- return stream_generate(prompt, tone, image, context, cookies, get_connector(connector, proxy), web_search, gpt4_turbo, timeout)
+ return stream_generate(prompt, tone, image, context, cookies, get_connector(connector, proxy, True), web_search, gpt4_turbo, timeout)
def create_context(messages: Messages) -> str:
"""
diff --git a/g4f/Provider/GeminiPro.py b/g4f/Provider/GeminiPro.py
index 87ded3ac..a2e3538d 100644
--- a/g4f/Provider/GeminiPro.py
+++ b/g4f/Provider/GeminiPro.py
@@ -2,12 +2,13 @@ from __future__ import annotations
import base64
import json
-from aiohttp import ClientSession
+from aiohttp import ClientSession, BaseConnector
from ..typing import AsyncResult, Messages, ImageType
from .base_provider import AsyncGeneratorProvider, ProviderModelMixin
from ..image import to_bytes, is_accepted_format
from ..errors import MissingAuthError
+from .helper import get_connector
class GeminiPro(AsyncGeneratorProvider, ProviderModelMixin):
url = "https://ai.google.dev"
@@ -27,6 +28,7 @@ class GeminiPro(AsyncGeneratorProvider, ProviderModelMixin):
api_key: str = None,
api_base: str = None,
image: ImageType = None,
+ connector: BaseConnector = None,
**kwargs
) -> AsyncResult:
model = "gemini-pro-vision" if not model and image else model
@@ -34,18 +36,19 @@ class GeminiPro(AsyncGeneratorProvider, ProviderModelMixin):
if not api_key:
raise MissingAuthError('Missing "api_key"')
+
+ headers = params = None
+ if api_base:
+ headers = {"Authorization": f"Bearer {api_key}"}
+ else:
+ params = {"key": api_key}
+
if not api_base:
api_base = f"https://generativelanguage.googleapis.com/v1beta"
method = "streamGenerateContent" if stream else "generateContent"
url = f"{api_base.rstrip('/')}/models/{model}:{method}"
- headers = None
- if api_base:
- headers = {f"Authorization": "Bearer {api_key}"}
- else:
- url += f"?key={api_key}"
-
- async with ClientSession(headers=headers) as session:
+ async with ClientSession(headers=headers, connector=get_connector(connector, proxy)) as session:
contents = [
{
"role": "model" if message["role"] == "assistant" else message["role"],
@@ -71,10 +74,11 @@ class GeminiPro(AsyncGeneratorProvider, ProviderModelMixin):
"topK": kwargs.get("top_k"),
}
}
- async with session.post(url, json=data, proxy=proxy) as response:
+ async with session.post(url, params=params, json=data) as response:
if not response.ok:
data = await response.json()
- raise RuntimeError(data[0]["error"]["message"])
+ data = data[0] if isinstance(data, list) else data
+ raise RuntimeError(data["error"]["message"])
if stream:
lines = []
async for chunk in response.content:
diff --git a/g4f/Provider/Liaobots.py b/g4f/Provider/Liaobots.py
index 54bf7f2e..159c8488 100644
--- a/g4f/Provider/Liaobots.py
+++ b/g4f/Provider/Liaobots.py
@@ -105,7 +105,7 @@ class Liaobots(AsyncGeneratorProvider, ProviderModelMixin):
async with ClientSession(
headers=headers,
cookie_jar=cls._cookie_jar,
- connector=get_connector(connector, proxy)
+ connector=get_connector(connector, proxy, True)
) as session:
cls._auth_code = auth if isinstance(auth, str) else cls._auth_code
if not cls._auth_code:
diff --git a/g4f/gui/client/html/index.html b/g4f/gui/client/html/index.html
index 102a762e..9ef8a820 100644
--- a/g4f/gui/client/html/index.html
+++ b/g4f/gui/client/html/index.html
@@ -173,7 +173,6 @@
<option value="">Provider: Auto</option>
<option value="Bing">Bing</option>
<option value="OpenaiChat">OpenaiChat</option>
- <option value="HuggingChat">HuggingChat</option>
<option value="Gemini">Gemini</option>
<option value="Liaobots">Liaobots</option>
<option value="Phind">Phind</option>
diff --git a/g4f/gui/client/js/chat.v1.js b/g4f/gui/client/js/chat.v1.js
index c727dbf9..edd27689 100644
--- a/g4f/gui/client/js/chat.v1.js
+++ b/g4f/gui/client/js/chat.v1.js
@@ -121,6 +121,20 @@ const remove_cancel_button = async () => {
};
const filter_messages = (messages) => {
+ // Removes none user messages at end
+ let last_message;
+ while (last_message = new_messages.pop()) {
+ if (last_message["role"] == "user") {
+ new_messages.push(last_message);
+ break;
+ }
+ }
+
+ // Remove history, if it is selected
+ if (document.getElementById('history')?.checked) {
+ messages = [messages[messages.length-1]];
+ }
+
let new_messages = [];
for (i in messages) {
new_message = messages[i];
@@ -135,6 +149,7 @@ const filter_messages = (messages) => {
new_messages.push(new_message)
}
}
+
return new_messages;
}
@@ -143,10 +158,6 @@ const ask_gpt = async () => {
messages = await get_messages(window.conversation_id);
total_messages = messages.length;
- // Remove history, if it is selected
- if (document.getElementById('history')?.checked) {
- messages = [messages[messages.length-1]];
- }
messages = filter_messages(messages);
window.scrollTo(0, 0);
diff --git a/g4f/providers/helper.py b/g4f/providers/helper.py
index 49d033d1..c027216b 100644
--- a/g4f/providers/helper.py
+++ b/g4f/providers/helper.py
@@ -51,11 +51,14 @@ def get_random_hex() -> str:
"""
return secrets.token_hex(16).zfill(32)
-def get_connector(connector: BaseConnector = None, proxy: str = None) -> Optional[BaseConnector]:
+def get_connector(connector: BaseConnector = None, proxy: str = None, rdns: bool = False) -> Optional[BaseConnector]:
if proxy and not connector:
try:
from aiohttp_socks import ProxyConnector
- connector = ProxyConnector.from_url(proxy)
+ if proxy.startswith("socks5h://"):
+ proxy = proxy.replace("socks5h://", "socks5://")
+ rdns = True
+ connector = ProxyConnector.from_url(proxy, rdns=rdns)
except ImportError:
raise MissingRequirementsError('Install "aiohttp_socks" package for proxy support')
return connector \ No newline at end of file