diff options
author | Heiner Lohaus <heiner@lohaus.eu> | 2023-10-26 21:32:49 +0200 |
---|---|---|
committer | Heiner Lohaus <heiner@lohaus.eu> | 2023-10-26 21:43:20 +0200 |
commit | 0d1ae405cc24f3daafb783d758c019ef6731d3aa (patch) | |
tree | 38b81047c788b60458341b4a2cbda8fb7fd936aa /g4f/Provider/DeepInfra.py | |
parent | Update README.md (diff) | |
download | gpt4free-0d1ae405cc24f3daafb783d758c019ef6731d3aa.tar gpt4free-0d1ae405cc24f3daafb783d758c019ef6731d3aa.tar.gz gpt4free-0d1ae405cc24f3daafb783d758c019ef6731d3aa.tar.bz2 gpt4free-0d1ae405cc24f3daafb783d758c019ef6731d3aa.tar.lz gpt4free-0d1ae405cc24f3daafb783d758c019ef6731d3aa.tar.xz gpt4free-0d1ae405cc24f3daafb783d758c019ef6731d3aa.tar.zst gpt4free-0d1ae405cc24f3daafb783d758c019ef6731d3aa.zip |
Diffstat (limited to 'g4f/Provider/DeepInfra.py')
-rw-r--r-- | g4f/Provider/DeepInfra.py | 63 |
1 files changed, 63 insertions, 0 deletions
diff --git a/g4f/Provider/DeepInfra.py b/g4f/Provider/DeepInfra.py new file mode 100644 index 00000000..70045cae --- /dev/null +++ b/g4f/Provider/DeepInfra.py @@ -0,0 +1,63 @@ +from __future__ import annotations + +import json +from aiohttp import ClientSession + +from ..typing import AsyncResult, Messages +from .base_provider import AsyncGeneratorProvider + + +class DeepInfra(AsyncGeneratorProvider): + url = "https://deepinfra.com" + working = True + + @classmethod + async def create_async_generator( + cls, + model: str, + messages: Messages, + proxy: str = None, + **kwargs + ) -> AsyncResult: + if not model: + model = "meta-llama/Llama-2-70b-chat-hf" + headers = { + "User-Agent": "Mozilla/5.0 (X11; Ubuntu; Linux x86_64; rv:109.0) Gecko/20100101 Firefox/118.0", + "Accept": "text/event-stream", + "Accept-Language": "de,en-US;q=0.7,en;q=0.3", + "Accept-Encoding": "gzip, deflate, br", + "Referer": f"{cls.url}/", + "Content-Type": "application/json", + "X-Deepinfra-Source": "web-page", + "Origin": cls.url, + "Connection": "keep-alive", + "Sec-Fetch-Dest": "empty", + "Sec-Fetch-Mode": "cors", + "Sec-Fetch-Site": "same-site", + "Pragma": "no-cache", + "Cache-Control": "no-cache", + } + async with ClientSession(headers=headers) as session: + data = { + "model": model, + "messages": messages, + "stream": True, + } + async with session.post( + "https://api.deepinfra.com/v1/openai/chat/completions", + json=data, + proxy=proxy + ) as response: + response.raise_for_status() + first = True + async for line in response.content: + if line.startswith(b"data: [DONE]"): + break + elif line.startswith(b"data: "): + chunk = json.loads(line[6:])["choices"][0]["delta"].get("content") + if chunk: + if first: + chunk = chunk.lstrip() + if chunk: + first = False + yield chunk
\ No newline at end of file |