summaryrefslogtreecommitdiffstats
path: root/g4f/Provider
diff options
context:
space:
mode:
Diffstat (limited to 'g4f/Provider')
-rw-r--r--g4f/Provider/AItianhuSpace.py185
-rw-r--r--g4f/Provider/MyShell.py8
-rw-r--r--g4f/Provider/helper.py10
-rw-r--r--g4f/Provider/needs_auth/Poe.py129
-rw-r--r--g4f/Provider/needs_auth/__init__.py3
5 files changed, 249 insertions, 86 deletions
diff --git a/g4f/Provider/AItianhuSpace.py b/g4f/Provider/AItianhuSpace.py
index d316fc6f..a9a824cf 100644
--- a/g4f/Provider/AItianhuSpace.py
+++ b/g4f/Provider/AItianhuSpace.py
@@ -1,95 +1,128 @@
from __future__ import annotations
-import random, json
-from .. import debug
-from ..typing import AsyncResult, Messages
-from ..requests import StreamSession
-from .base_provider import AsyncGeneratorProvider, format_prompt, get_cookies
+import time
+import random
-domains = {
- "gpt-3.5-turbo": "aitianhu.space",
- "gpt-4": "aitianhu.website",
-}
+from ..typing import CreateResult, Messages
+from .base_provider import BaseProvider
+from .helper import WebDriver, format_prompt, get_browser
+from .. import debug
-class AItianhuSpace(AsyncGeneratorProvider):
+class AItianhuSpace(BaseProvider):
url = "https://chat3.aiyunos.top/"
working = True
supports_gpt_35_turbo = True
+ _domains = ["aitianhu.com", "aitianhu1.top"]
@classmethod
- async def create_async_generator(cls,
- model: str,
- messages: Messages,
- proxy: str = None,
- domain: str = None,
- cookies: dict = None,
- timeout: int = 10, **kwargs) -> AsyncResult:
-
+ def create_completion(
+ cls,
+ model: str,
+ messages: Messages,
+ stream: bool,
+ domain: str = None,
+ proxy: str = None,
+ timeout: int = 120,
+ browser: WebDriver = None,
+ hidden_display: bool = True,
+ **kwargs
+ ) -> CreateResult:
if not model:
model = "gpt-3.5-turbo"
-
- elif model not in domains:
- raise ValueError(f"Model are not supported: {model}")
-
if not domain:
chars = 'abcdefghijklmnopqrstuvwxyz0123456789'
rand = ''.join(random.choice(chars) for _ in range(6))
- domain = f"{rand}.{domains[model]}"
-
+ domain = random.choice(cls._domains)
+ domain = f"{rand}.{domain}"
if debug.logging:
print(f"AItianhuSpace | using domain: {domain}")
+ url = f"https://{domain}"
+ prompt = format_prompt(messages)
+ if browser:
+ driver = browser
+ else:
+ if hidden_display:
+ driver, display = get_browser("", True, proxy)
+ else:
+ driver = get_browser("", False, proxy)
- if not cookies:
- cookies = get_cookies('.aitianhu.space')
- if not cookies:
- raise RuntimeError(f"g4f.provider.{cls.__name__} requires cookies [refresh https://{domain} on chrome]")
+ from selenium.webdriver.common.by import By
+ from selenium.webdriver.support.ui import WebDriverWait
+ from selenium.webdriver.support import expected_conditions as EC
- url = f'https://{domain}'
- async with StreamSession(proxies={"https": proxy},
- cookies=cookies, timeout=timeout, impersonate="chrome110", verify=False) as session:
-
- data = {
- "prompt": format_prompt(messages),
- "options": {},
- "systemMessage": "You are ChatGPT, a large language model trained by OpenAI. Follow the user's instructions carefully.",
- "temperature": 0.8,
- "top_p": 1,
- **kwargs
- }
- headers = {
- "Authority": url,
- "Accept": "application/json, text/plain, */*",
- "Origin": url,
- "Referer": f"{url}/"
- }
- async with session.post(f"{url}/api/chat-process", json=data, headers=headers) as response:
- response.raise_for_status()
- async for line in response.iter_lines():
- if line == b"<script>":
- raise RuntimeError("Solve challenge and pass cookies and a fixed domain")
- if b"platform's risk control" in line:
- raise RuntimeError("Platform's Risk Control")
- line = json.loads(line)
- if "detail" in line:
- if content := line["detail"]["choices"][0]["delta"].get(
- "content"
- ):
- yield content
- elif "message" in line and "AI-4接口非常昂贵" in line["message"]:
- raise RuntimeError("Rate limit for GPT 4 reached")
- else:
- raise RuntimeError(f"Response: {line}")
-
+ wait = WebDriverWait(driver, timeout)
- @classmethod
- @property
- def params(cls):
- params = [
- ("model", "str"),
- ("messages", "list[dict[str, str]]"),
- ("stream", "bool"),
- ("temperature", "float"),
- ("top_p", "int"),
- ]
- param = ", ".join([": ".join(p) for p in params])
- return f"g4f.provider.{cls.__name__} supports: ({param})"
+ # Bypass devtools detection
+ driver.get("https://blank.page/")
+ wait.until(EC.visibility_of_element_located((By.ID, "sheet")))
+ driver.execute_script(f"""
+document.getElementById('sheet').addEventListener('click', () => {{
+ window.open('{url}', '_blank');
+}});
+""")
+ driver.find_element(By.ID, "sheet").click()
+ time.sleep(10)
+
+ original_window = driver.current_window_handle
+ for window_handle in driver.window_handles:
+ if window_handle != original_window:
+ driver.switch_to.window(window_handle)
+ break
+
+ wait.until(EC.visibility_of_element_located((By.CSS_SELECTOR, "textarea.n-input__textarea-el")))
+
+ try:
+ # Add hook in XMLHttpRequest
+ script = """
+const _http_request_open = XMLHttpRequest.prototype.open;
+window._last_message = window._message = "";
+window._loadend = false;
+XMLHttpRequest.prototype.open = function(method, url) {
+ if (url == "/api/chat-process") {
+ this.addEventListener("progress", (event) => {
+ const lines = this.responseText.split("\\n");
+ try {
+ window._message = JSON.parse(lines[lines.length-1])["text"];
+ } catch(e) { }
+ });
+ this.addEventListener("loadend", (event) => {
+ window._loadend = true;
+ });
+ }
+ return _http_request_open.call(this, method, url);
+}
+"""
+ driver.execute_script(script)
+
+ # Input and submit prompt
+ driver.find_element(By.CSS_SELECTOR, "textarea.n-input__textarea-el").send_keys(prompt)
+ driver.find_element(By.CSS_SELECTOR, "button.n-button.n-button--primary-type.n-button--medium-type").click()
+
+ # Yield response
+ while True:
+ chunk = driver.execute_script("""
+if (window._message && window._message != window._last_message) {
+ try {
+ return window._message.substring(window._last_message.length);
+ } finally {
+ window._last_message = window._message;
+ }
+}
+if (window._loadend) {
+ return null;
+}
+return "";
+""")
+ if chunk:
+ yield chunk
+ elif chunk != "":
+ break
+ else:
+ time.sleep(0.1)
+ finally:
+ driver.close()
+ if not browser:
+ time.sleep(0.1)
+ driver.quit()
+ if hidden_display:
+ display.stop() \ No newline at end of file
diff --git a/g4f/Provider/MyShell.py b/g4f/Provider/MyShell.py
index 70fd3509..0efeb0e8 100644
--- a/g4f/Provider/MyShell.py
+++ b/g4f/Provider/MyShell.py
@@ -38,11 +38,11 @@ class MyShell(BaseProvider):
driver.get(cls.url)
try:
- # Wait for page load
+ # Wait for page load and cloudflare validation
WebDriverWait(driver, timeout).until(
EC.presence_of_element_located((By.CSS_SELECTOR, "body:not(.no-js)"))
)
- # Send message
+ # Send request with message
script = """
response = await fetch("https://api.myshell.ai/v1/bot/chat/send_message", {
"headers": {
@@ -66,7 +66,7 @@ window.reader = response.body.getReader();
script = """
chunk = await window.reader.read();
if (chunk['done']) return null;
-text = (new TextDecoder ()).decode(chunk['value']);
+text = (new TextDecoder()).decode(chunk['value']);
content = '';
text.split('\\n').forEach((line, index) => {
if (line.startsWith('data: ')) {
@@ -81,7 +81,7 @@ text.split('\\n').forEach((line, index) => {
return content;
"""
while True:
- chunk = driver.execute_script(script):
+ chunk = driver.execute_script(script)
if chunk:
yield chunk
elif chunk != "":
diff --git a/g4f/Provider/helper.py b/g4f/Provider/helper.py
index cad32f05..b10c912a 100644
--- a/g4f/Provider/helper.py
+++ b/g4f/Provider/helper.py
@@ -18,10 +18,10 @@ from browser_cookie3 import (
BrowserCookieError
)
try:
- from selenium.webdriver.remote.webdriver import WebDriver
- except ImportError:
- class WebDriver():
- pass
+ from selenium.webdriver.remote.webdriver import WebDriver
+except ImportError:
+ class WebDriver():
+ pass
try:
from undetected_chromedriver import Chrome, ChromeOptions
except ImportError:
@@ -153,7 +153,7 @@ def get_browser(
if proxy:
if not options:
options = ChromeOptions()
- options.add_argument(f'--proxy-server={proxy}')
+ options.add_argument(f'--proxy-server={proxy}')
browser = Chrome(user_data_dir=user_data_dir, options=options)
if hidden_display:
diff --git a/g4f/Provider/needs_auth/Poe.py b/g4f/Provider/needs_auth/Poe.py
new file mode 100644
index 00000000..6fbf7fd4
--- /dev/null
+++ b/g4f/Provider/needs_auth/Poe.py
@@ -0,0 +1,129 @@
+from __future__ import annotations
+
+import time
+
+from ...typing import CreateResult, Messages
+from ..base_provider import BaseProvider
+from ..helper import WebDriver, format_prompt, get_browser
+
+models = {
+ "meta-llama/Llama-2-7b-chat-hf": {"name": "Llama-2-7b"},
+ "meta-llama/Llama-2-13b-chat-hf": {"name": "Llama-2-13b"},
+ "meta-llama/Llama-2-70b-chat-hf": {"name": "Llama-2-70b"},
+ "codellama/CodeLlama-7b-Instruct-hf": {"name": "Code-Llama-7b"},
+ "codellama/CodeLlama-13b-Instruct-hf": {"name": "Code-Llama-13b"},
+ "codellama/CodeLlama-34b-Instruct-hf": {"name": "Code-Llama-34b"},
+ "gpt-3.5-turbo": {"name": "GPT-3.5-Turbo"},
+ "gpt-3.5-turbo-instruct": {"name": "GPT-3.5-Turbo-Instruct"},
+ "gpt-4": {"name": "GPT-4"},
+ "palm": {"name": "Google-PaLM"},
+}
+
+class Poe(BaseProvider):
+ url = "https://poe.com"
+ working = True
+ supports_gpt_35_turbo = True
+ supports_stream = True
+
+ @classmethod
+ def create_completion(
+ cls,
+ model: str,
+ messages: Messages,
+ stream: bool,
+ proxy: str = None,
+ browser: WebDriver = None,
+ hidden_display: bool = True,
+ **kwargs
+ ) -> CreateResult:
+ if not model:
+ model = "gpt-3.5-turbo"
+ elif model not in models:
+ raise ValueError(f"Model are not supported: {model}")
+ prompt = format_prompt(messages)
+ if browser:
+ driver = browser
+ else:
+ if hidden_display:
+ driver, display = get_browser(None, True, proxy)
+ else:
+ driver = get_browser(None, False, proxy)
+
+ script = """
+window._message = window._last_message = "";
+window._message_finished = false;
+class ProxiedWebSocket extends WebSocket {
+ constructor(url, options) {
+ super(url, options);
+ this.addEventListener("message", (e) => {
+ const data = JSON.parse(JSON.parse(e.data)["messages"][0])["payload"]["data"];
+ if ("messageAdded" in data) {
+ if (data["messageAdded"]["author"] != "human") {
+ window._message = data["messageAdded"]["text"];
+ if (data["messageAdded"]["state"] == "complete") {
+ window._message_finished = true;
+ }
+ }
+ }
+ });
+ }
+}
+window.WebSocket = ProxiedWebSocket;
+"""
+ driver.execute_cdp_cmd("Page.addScriptToEvaluateOnNewDocument", {
+ "source": script
+ })
+
+ from selenium.webdriver.common.by import By
+ from selenium.webdriver.support.ui import WebDriverWait
+ from selenium.webdriver.support import expected_conditions as EC
+
+ try:
+ driver.get(f"{cls.url}/{models[model]['name']}")
+ wait = WebDriverWait(driver, 10 if hidden_display else 240)
+ wait.until(EC.visibility_of_element_located((By.CSS_SELECTOR, "textarea[class^='GrowingTextArea']")))
+ except:
+ # Reopen browser for login
+ if not browser:
+ driver.quit()
+ if hidden_display:
+ display.stop()
+ driver = get_browser(None, False, proxy)
+ driver.get(f"{cls.url}/{models[model]['name']}")
+ wait = WebDriverWait(driver, 240)
+ wait.until(EC.visibility_of_element_located((By.CSS_SELECTOR, "textarea[class^='GrowingTextArea']")))
+ else:
+ raise RuntimeError("Prompt textarea not found. You may not be logged in.")
+
+ driver.find_element(By.CSS_SELECTOR, "footer textarea[class^='GrowingTextArea']").send_keys(prompt)
+ driver.find_element(By.CSS_SELECTOR, "footer button[class*='ChatMessageSendButton']").click()
+
+ try:
+ script = """
+if(window._message && window._message != window._last_message) {
+ try {
+ return window._message.substring(window._last_message.length);
+ } finally {
+ window._last_message = window._message;
+ }
+} else if(window._message_finished) {
+ return null;
+} else {
+ return '';
+}
+"""
+ while True:
+ chunk = driver.execute_script(script)
+ if chunk:
+ yield chunk
+ elif chunk != "":
+ break
+ else:
+ time.sleep(0.1)
+ finally:
+ driver.close()
+ if not browser:
+ time.sleep(0.1)
+ driver.quit()
+ if hidden_display:
+ display.stop() \ No newline at end of file
diff --git a/g4f/Provider/needs_auth/__init__.py b/g4f/Provider/needs_auth/__init__.py
index 815194c4..4230253e 100644
--- a/g4f/Provider/needs_auth/__init__.py
+++ b/g4f/Provider/needs_auth/__init__.py
@@ -3,4 +3,5 @@ from .Raycast import Raycast
from .Theb import Theb
from .HuggingChat import HuggingChat
from .OpenaiChat import OpenaiChat
-from .OpenAssistant import OpenAssistant \ No newline at end of file
+from .OpenAssistant import OpenAssistant
+from .Poe import Poe \ No newline at end of file