summaryrefslogtreecommitdiffstats
path: root/g4f/Provider/needs_auth/HuggingChat.py
diff options
context:
space:
mode:
authorH Lohaus <hlohaus@users.noreply.github.com>2023-12-06 12:02:41 +0100
committerGitHub <noreply@github.com>2023-12-06 12:02:41 +0100
commitf962993b8dac225d85f9fbb59be188e263af1c0b (patch)
treeae153b027ad5a2679b4f1fde881d61cc0e1276b4 /g4f/Provider/needs_auth/HuggingChat.py
parent~ | g4f `v-0.1.9.1` (diff)
parentImprove docker image (diff)
downloadgpt4free-f962993b8dac225d85f9fbb59be188e263af1c0b.tar
gpt4free-f962993b8dac225d85f9fbb59be188e263af1c0b.tar.gz
gpt4free-f962993b8dac225d85f9fbb59be188e263af1c0b.tar.bz2
gpt4free-f962993b8dac225d85f9fbb59be188e263af1c0b.tar.lz
gpt4free-f962993b8dac225d85f9fbb59be188e263af1c0b.tar.xz
gpt4free-f962993b8dac225d85f9fbb59be188e263af1c0b.tar.zst
gpt4free-f962993b8dac225d85f9fbb59be188e263af1c0b.zip
Diffstat (limited to 'g4f/Provider/needs_auth/HuggingChat.py')
-rw-r--r--g4f/Provider/needs_auth/HuggingChat.py6
1 files changed, 2 insertions, 4 deletions
diff --git a/g4f/Provider/needs_auth/HuggingChat.py b/g4f/Provider/needs_auth/HuggingChat.py
index 59e2da73..530069c0 100644
--- a/g4f/Provider/needs_auth/HuggingChat.py
+++ b/g4f/Provider/needs_auth/HuggingChat.py
@@ -11,7 +11,6 @@ from ..helper import format_prompt, get_cookies
class HuggingChat(AsyncGeneratorProvider):
url = "https://huggingface.co/chat"
- needs_auth = True
working = True
model = "meta-llama/Llama-2-70b-chat-hf"
@@ -22,12 +21,11 @@ class HuggingChat(AsyncGeneratorProvider):
messages: Messages,
stream: bool = True,
proxy: str = None,
+ web_search: bool = False,
cookies: dict = None,
**kwargs
) -> AsyncResult:
model = model if model else cls.model
- if proxy and "://" not in proxy:
- proxy = f"http://{proxy}"
if not cookies:
cookies = get_cookies(".huggingface.co")
@@ -46,7 +44,7 @@ class HuggingChat(AsyncGeneratorProvider):
"inputs": format_prompt(messages),
"is_retry": False,
"response_id": str(uuid.uuid4()),
- "web_search": False
+ "web_search": web_search
}
async with session.post(f"{cls.url}/conversation/{conversation_id}", json=send, proxy=proxy) as response:
async for line in response.content: