From 86e36efe6bbae10286767b44c6a79913e5199de1 Mon Sep 17 00:00:00 2001 From: H Lohaus Date: Sat, 28 Dec 2024 16:50:08 +0100 Subject: Add Path and PathLike support when uploading images (#2514) * Add Path and PathLike support when uploading images Improve raise_for_status in special cases Move ImageResponse to providers.response module Improve OpenaiChat and OpenaiAccount providers Add Sources for web_search in OpenaiChat Add JsonConversation for import and export conversations to js Add RequestLogin response type Add TitleGeneration support in OpenaiChat and gui * Improve Docker Container Guide in README.md * Add tool calls api support, add search tool support --- g4f/Provider/needs_auth/HuggingFace.py | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) (limited to 'g4f/Provider/needs_auth/HuggingFace.py') diff --git a/g4f/Provider/needs_auth/HuggingFace.py b/g4f/Provider/needs_auth/HuggingFace.py index 6887ac4d..19f33fd0 100644 --- a/g4f/Provider/needs_auth/HuggingFace.py +++ b/g4f/Provider/needs_auth/HuggingFace.py @@ -48,6 +48,7 @@ class HuggingFace(AsyncGeneratorProvider, ProviderModelMixin): max_new_tokens: int = 1024, temperature: float = 0.7, prompt: str = None, + extra_data: dict = {}, **kwargs ) -> AsyncResult: try: @@ -73,16 +74,16 @@ class HuggingFace(AsyncGeneratorProvider, ProviderModelMixin): if api_key is not None: headers["Authorization"] = f"Bearer {api_key}" payload = None - if model in cls.image_models: + if cls.get_models() and model in cls.image_models: stream = False prompt = messages[-1]["content"] if prompt is None else prompt - payload = {"inputs": prompt, "parameters": {"seed": random.randint(0, 2**32)}} + payload = {"inputs": prompt, "parameters": {"seed": random.randint(0, 2**32), **extra_data}} else: params = { "return_full_text": False, "max_new_tokens": max_new_tokens, "temperature": temperature, - **kwargs + **extra_data } async with StreamSession( headers=headers, -- cgit v1.2.3