1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
|
from __future__ import annotations
import time
from ...typing import CreateResult, Messages
from ..base_provider import AbstractProvider
from ..helper import format_prompt
from ...webdriver import WebDriver, WebDriverSession, element_send_text
models = {
"meta-llama/Llama-2-7b-chat-hf": {"name": "Llama-2-7b"},
"meta-llama/Llama-2-13b-chat-hf": {"name": "Llama-2-13b"},
"meta-llama/Llama-2-70b-chat-hf": {"name": "Llama-2-70b"},
"codellama/CodeLlama-7b-Instruct-hf": {"name": "Code-Llama-7b"},
"codellama/CodeLlama-13b-Instruct-hf": {"name": "Code-Llama-13b"},
"codellama/CodeLlama-34b-Instruct-hf": {"name": "Code-Llama-34b"},
"gpt-3.5-turbo": {"name": "GPT-3.5-Turbo"},
"gpt-3.5-turbo-instruct": {"name": "GPT-3.5-Turbo-Instruct"},
"gpt-4": {"name": "GPT-4"},
"palm": {"name": "Google-PaLM"},
}
class Poe(AbstractProvider):
url = "https://poe.com"
working = True
needs_auth = True
supports_gpt_35_turbo = True
supports_stream = True
models = models.keys()
@classmethod
def create_completion(
cls,
model: str,
messages: Messages,
stream: bool,
proxy: str = None,
webdriver: WebDriver = None,
user_data_dir: str = None,
headless: bool = True,
**kwargs
) -> CreateResult:
if not model:
model = "gpt-3.5-turbo"
elif model not in models:
raise ValueError(f"Model are not supported: {model}")
prompt = format_prompt(messages)
session = WebDriverSession(webdriver, user_data_dir, headless, proxy=proxy)
with session as driver:
from selenium.webdriver.common.by import By
from selenium.webdriver.support.ui import WebDriverWait
from selenium.webdriver.support import expected_conditions as EC
driver.execute_cdp_cmd("Page.addScriptToEvaluateOnNewDocument", {
"source": """
window._message = window._last_message = "";
window._message_finished = false;
class ProxiedWebSocket extends WebSocket {
constructor(url, options) {
super(url, options);
this.addEventListener("message", (e) => {
const data = JSON.parse(JSON.parse(e.data)["messages"][0])["payload"]["data"];
if ("messageAdded" in data) {
if (data["messageAdded"]["author"] != "human") {
window._message = data["messageAdded"]["text"];
if (data["messageAdded"]["state"] == "complete") {
window._message_finished = true;
}
}
}
});
}
}
window.WebSocket = ProxiedWebSocket;
"""
})
try:
driver.get(f"{cls.url}/{models[model]['name']}")
wait = WebDriverWait(driver, 10 if headless else 240)
wait.until(EC.visibility_of_element_located((By.CSS_SELECTOR, "textarea[class^='GrowingTextArea']")))
except:
# Reopen browser for login
if not webdriver:
driver = session.reopen()
driver.get(f"{cls.url}/{models[model]['name']}")
wait = WebDriverWait(driver, 240)
wait.until(EC.visibility_of_element_located((By.CSS_SELECTOR, "textarea[class^='GrowingTextArea']")))
else:
raise RuntimeError("Prompt textarea not found. You may not be logged in.")
element_send_text(driver.find_element(By.CSS_SELECTOR, "footer textarea[class^='GrowingTextArea']"), prompt)
driver.find_element(By.CSS_SELECTOR, "footer button[class*='ChatMessageSendButton']").click()
script = """
if(window._message && window._message != window._last_message) {
try {
return window._message.substring(window._last_message.length);
} finally {
window._last_message = window._message;
}
} else if(window._message_finished) {
return null;
} else {
return '';
}
"""
while True:
chunk = driver.execute_script(script)
if chunk:
yield chunk
elif chunk != "":
break
else:
time.sleep(0.1)
|