summaryrefslogtreecommitdiffstats
path: root/g4f/Provider/Phind.py
diff options
context:
space:
mode:
authorCommenter123321 <36051603+Commenter123321@users.noreply.github.com>2023-10-09 18:02:06 +0200
committerCommenter123321 <36051603+Commenter123321@users.noreply.github.com>2023-10-09 18:02:06 +0200
commit119817c96349807efaf87ee432ce46446542b66a (patch)
tree1dbdf4d4dbf4f6c8a8247274ef500a2f1de765d1 /g4f/Provider/Phind.py
parentaivvm's no life creator keeps patching it, but I'm just better 😉 (diff)
parentMerge branch 'main' of https://github.com/xtekky/gpt4free (diff)
downloadgpt4free-119817c96349807efaf87ee432ce46446542b66a.tar
gpt4free-119817c96349807efaf87ee432ce46446542b66a.tar.gz
gpt4free-119817c96349807efaf87ee432ce46446542b66a.tar.bz2
gpt4free-119817c96349807efaf87ee432ce46446542b66a.tar.lz
gpt4free-119817c96349807efaf87ee432ce46446542b66a.tar.xz
gpt4free-119817c96349807efaf87ee432ce46446542b66a.tar.zst
gpt4free-119817c96349807efaf87ee432ce46446542b66a.zip
Diffstat (limited to 'g4f/Provider/Phind.py')
-rw-r--r--g4f/Provider/Phind.py76
1 files changed, 76 insertions, 0 deletions
diff --git a/g4f/Provider/Phind.py b/g4f/Provider/Phind.py
new file mode 100644
index 00000000..0db4e3c2
--- /dev/null
+++ b/g4f/Provider/Phind.py
@@ -0,0 +1,76 @@
+from __future__ import annotations
+
+import random
+from datetime import datetime
+
+from ..typing import AsyncGenerator
+from ..requests import StreamSession
+from .base_provider import AsyncGeneratorProvider, format_prompt
+
+
+class Phind(AsyncGeneratorProvider):
+ url = "https://www.phind.com"
+ working = True
+ supports_gpt_4 = True
+
+ @classmethod
+ async def create_async_generator(
+ cls,
+ model: str,
+ messages: list[dict[str, str]],
+ proxy: str = None,
+ **kwargs
+ ) -> AsyncGenerator:
+ chars = 'abcdefghijklmnopqrstuvwxyz0123456789'
+ user_id = ''.join(random.choice(chars) for _ in range(24))
+ data = {
+ "question": format_prompt(messages),
+ "webResults": [],
+ "options": {
+ "date": datetime.now().strftime("%d.%m.%Y"),
+ "language": "en",
+ "detailed": True,
+ "anonUserId": user_id,
+ "answerModel": "GPT-4",
+ "creativeMode": False,
+ "customLinks": []
+ },
+ "context":""
+ }
+ headers = {
+ "Authority": cls.url,
+ "Accept": "application/json, text/plain, */*",
+ "Origin": cls.url,
+ "Referer": f"{cls.url}/"
+ }
+ async with StreamSession(headers=headers, timeout=(5, 180), proxies={"https": proxy}, impersonate="chrome107") as session:
+ async with session.post(f"{cls.url}/api/infer/answer", json=data) as response:
+ response.raise_for_status()
+ new_lines = 0
+ async for line in response.iter_lines():
+ if not line:
+ continue
+ if line.startswith(b"data: "):
+ line = line[6:]
+ if line.startswith(b"<PHIND_METADATA>"):
+ continue
+ if line:
+ if new_lines:
+ yield "".join(["\n" for _ in range(int(new_lines / 2))])
+ new_lines = 0
+ yield line.decode()
+ else:
+ new_lines += 1
+
+
+ @classmethod
+ @property
+ def params(cls):
+ params = [
+ ("model", "str"),
+ ("messages", "list[dict[str, str]]"),
+ ("stream", "bool"),
+ ("proxy", "str"),
+ ]
+ param = ", ".join([": ".join(p) for p in params])
+ return f"g4f.provider.{cls.__name__} supports: ({param})"