summaryrefslogtreecommitdiffstats
path: root/quora/__init__.py
diff options
context:
space:
mode:
authorRaju Komati <komatiraju032@gmail.com>2023-04-29 11:25:24 +0200
committerRaju Komati <komatiraju032@gmail.com>2023-04-29 11:25:24 +0200
commit54b4c789a75fec5c3a92a03dfd2d93d051309651 (patch)
tree813a7dd223e9f7df3b1708fc65a58ccc87684ba6 /quora/__init__.py
parentUpdate README.md (diff)
downloadgpt4free-54b4c789a75fec5c3a92a03dfd2d93d051309651.tar
gpt4free-54b4c789a75fec5c3a92a03dfd2d93d051309651.tar.gz
gpt4free-54b4c789a75fec5c3a92a03dfd2d93d051309651.tar.bz2
gpt4free-54b4c789a75fec5c3a92a03dfd2d93d051309651.tar.lz
gpt4free-54b4c789a75fec5c3a92a03dfd2d93d051309651.tar.xz
gpt4free-54b4c789a75fec5c3a92a03dfd2d93d051309651.tar.zst
gpt4free-54b4c789a75fec5c3a92a03dfd2d93d051309651.zip
Diffstat (limited to '')
-rw-r--r--gpt4free/quora/__init__.py (renamed from quora/__init__.py)152
1 files changed, 64 insertions, 88 deletions
diff --git a/quora/__init__.py b/gpt4free/quora/__init__.py
index cd5ec8f9..31526e0c 100644
--- a/quora/__init__.py
+++ b/gpt4free/quora/__init__.py
@@ -6,11 +6,12 @@ from pathlib import Path
from random import choice, choices, randint
from re import search, findall
from string import ascii_letters, digits
-from typing import Optional, Union
+from typing import Optional, Union, List, Any, Generator
from urllib.parse import unquote
import selenium.webdriver.support.expected_conditions as EC
from fake_useragent import UserAgent
+from pydantic import BaseModel
from pypasser import reCaptchaV3
from requests import Session
from selenium.webdriver import Firefox, Chrome, FirefoxOptions, ChromeOptions
@@ -18,8 +19,8 @@ from selenium.webdriver.common.by import By
from selenium.webdriver.support.wait import WebDriverWait
from tls_client import Session as TLS
-from quora.api import Client as PoeClient
-from quora.mail import Emailnator
+from .api import Client as PoeClient
+from .mail import Emailnator
SELENIUM_WEB_DRIVER_ERROR_MSG = b'''The error message you are receiving is due to the `geckodriver` executable not
being found in your system\'s PATH. To resolve this issue, you need to download the geckodriver and add its location
@@ -67,42 +68,27 @@ def extract_formkey(html):
return formkey
-class PoeResponse:
- class Completion:
- class Choices:
- def __init__(self, choice: dict) -> None:
- self.text = choice['text']
- self.content = self.text.encode()
- self.index = choice['index']
- self.logprobs = choice['logprobs']
- self.finish_reason = choice['finish_reason']
+class Choice(BaseModel):
+ text: str
+ index: int
+ logprobs: Any
+ finish_reason: str
- def __repr__(self) -> str:
- return f'''<__main__.APIResponse.Completion.Choices(\n text = {self.text.encode()},\n index = {self.index},\n logprobs = {self.logprobs},\n finish_reason = {self.finish_reason})object at 0x1337>'''
- def __init__(self, choices: dict) -> None:
- self.choices = [self.Choices(choice) for choice in choices]
+class Usage(BaseModel):
+ prompt_tokens: int
+ completion_tokens: int
+ total_tokens: int
- class Usage:
- def __init__(self, usage_dict: dict) -> None:
- self.prompt_tokens = usage_dict['prompt_tokens']
- self.completion_tokens = usage_dict['completion_tokens']
- self.total_tokens = usage_dict['total_tokens']
- def __repr__(self):
- return f'''<__main__.APIResponse.Usage(\n prompt_tokens = {self.prompt_tokens},\n completion_tokens = {self.completion_tokens},\n total_tokens = {self.total_tokens})object at 0x1337>'''
-
- def __init__(self, response_dict: dict) -> None:
- self.response_dict = response_dict
- self.id = response_dict['id']
- self.object = response_dict['object']
- self.created = response_dict['created']
- self.model = response_dict['model']
- self.completion = self.Completion(response_dict['choices'])
- self.usage = self.Usage(response_dict['usage'])
-
- def json(self) -> dict:
- return self.response_dict
+class PoeResponse(BaseModel):
+ id: int
+ object: str
+ created: int
+ model: str
+ choices: List[Choice]
+ usage: Usage
+ text: str
class ModelResponse:
@@ -116,18 +102,12 @@ class ModelResponse:
class Model:
@staticmethod
def create(
- token: str,
- model: str = 'gpt-3.5-turbo', # claude-instant
- system_prompt: str = 'You are ChatGPT a large language model developed by Openai. Answer as consisely as possible',
- description: str = 'gpt-3.5 language model from openai, skidded by poe.com',
- handle: str = None,
+ token: str,
+ model: str = 'gpt-3.5-turbo', # claude-instant
+ system_prompt: str = 'You are ChatGPT a large language model developed by Openai. Answer as consisely as possible',
+ description: str = 'gpt-3.5 language model from openai, skidded by poe.com',
+ handle: str = None,
) -> ModelResponse:
- models = {
- 'gpt-3.5-turbo': 'chinchilla',
- 'claude-instant-v1.0': 'a2',
- 'gpt-4': 'beaver',
- }
-
if not handle:
handle = f'gptx{randint(1111111, 9999999)}'
@@ -162,7 +142,7 @@ class Model:
obj={
'queryName': 'CreateBotMain_poeBotCreate_Mutation',
'variables': {
- 'model': models[model],
+ 'model': MODELS[model],
'handle': handle,
'prompt': system_prompt,
'isPromptPublic': True,
@@ -202,9 +182,9 @@ class Model:
class Account:
@staticmethod
def create(
- proxy: Optional[str] = None,
- logging: bool = False,
- enable_bot_creation: bool = False,
+ proxy: Optional[str] = None,
+ logging: bool = False,
+ enable_bot_creation: bool = False,
):
client = TLS(client_identifier='chrome110')
client.proxies = {'http': f'http://{proxy}', 'https': f'http://{proxy}'} if proxy else None
@@ -309,22 +289,23 @@ class Account:
class StreamingCompletion:
@staticmethod
def create(
- model: str = 'gpt-4',
- custom_model: bool = None,
- prompt: str = 'hello world',
- token: str = '',
- ):
+ model: str = 'gpt-4',
+ custom_model: bool = None,
+ prompt: str = 'hello world',
+ token: str = '',
+ ) -> Generator[PoeResponse, None, None]:
_model = MODELS[model] if not custom_model else custom_model
client = PoeClient(token)
for chunk in client.send_message(_model, prompt):
yield PoeResponse(
- {
+ **{
'id': chunk['messageId'],
'object': 'text_completion',
'created': chunk['creationTime'],
'model': _model,
+ 'text': chunk['text_new'],
'choices': [
{
'text': chunk['text_new'],
@@ -343,33 +324,28 @@ class StreamingCompletion:
class Completion:
+ @staticmethod
def create(
- model: str = 'gpt-4',
- custom_model: str = None,
- prompt: str = 'hello world',
- token: str = '',
- ):
- models = {
- 'sage': 'capybara',
- 'gpt-4': 'beaver',
- 'claude-v1.2': 'a2_2',
- 'claude-instant-v1.0': 'a2',
- 'gpt-3.5-turbo': 'chinchilla',
- }
-
- _model = models[model] if not custom_model else custom_model
+ model: str = 'gpt-4',
+ custom_model: str = None,
+ prompt: str = 'hello world',
+ token: str = '',
+ ) -> PoeResponse:
+ _model = MODELS[model] if not custom_model else custom_model
client = PoeClient(token)
- for chunk in client.send_message(_model, prompt):
- pass
+ chunk = None
+ for response in client.send_message(_model, prompt):
+ chunk = response
return PoeResponse(
- {
+ **{
'id': chunk['messageId'],
'object': 'text_completion',
'created': chunk['creationTime'],
'model': _model,
+ 'text': chunk['text'],
'choices': [
{
'text': chunk['text'],
@@ -389,12 +365,12 @@ class Completion:
class Poe:
def __init__(
- self,
- model: str = 'ChatGPT',
- driver: str = 'firefox',
- download_driver: bool = False,
- driver_path: Optional[str] = None,
- cookie_path: str = './quora/cookie.json',
+ self,
+ model: str = 'ChatGPT',
+ driver: str = 'firefox',
+ download_driver: bool = False,
+ driver_path: Optional[str] = None,
+ cookie_path: str = './quora/cookie.json',
):
# validating the model
if model and model not in MODELS:
@@ -451,8 +427,8 @@ class Poe:
driver.close()
return cookie
- @classmethod
- def __resolve_driver(cls, driver: str, driver_path: Optional[str] = None) -> Union[Firefox, Chrome]:
+ @staticmethod
+ def __resolve_driver(driver: str, driver_path: Optional[str] = None) -> Union[Firefox, Chrome]:
options = FirefoxOptions() if driver == 'firefox' else ChromeOptions()
options.add_argument('-headless')
@@ -473,12 +449,12 @@ class Poe:
return response
def create_bot(
- self,
- name: str,
- /,
- prompt: str = '',
- base_model: str = 'ChatGPT',
- description: str = '',
+ self,
+ name: str,
+ /,
+ prompt: str = '',
+ base_model: str = 'ChatGPT',
+ description: str = '',
) -> None:
if base_model not in MODELS:
raise RuntimeError('Sorry, the base_model you provided does not exist. Please check and try again.')
@@ -492,4 +468,4 @@ class Poe:
print(f'Successfully created bot with name: {response["bot"]["displayName"]}')
def list_bots(self) -> list:
- return list(self.client.bot_names.values())
+ return list(self.client.bot_names.values()) \ No newline at end of file