From f554018da26e32cd10c752162fbe2db0c210aadc Mon Sep 17 00:00:00 2001 From: nullstreak <139914347+nullstreak@users.noreply.github.com> Date: Fri, 15 Dec 2023 23:58:13 +0100 Subject: HuggingChat: Strip leading whitespace from the first token in the stream For some reason first token from the stream on HuggingChat always starts with a whitespace. This commit strips the leading whitespace from the first token in the stream to fix this issue. --- g4f/Provider/needs_auth/HuggingChat.py | 9 +++++++-- 1 file changed, 7 insertions(+), 2 deletions(-) diff --git a/g4f/Provider/needs_auth/HuggingChat.py b/g4f/Provider/needs_auth/HuggingChat.py index 530069c0..41c938b4 100644 --- a/g4f/Provider/needs_auth/HuggingChat.py +++ b/g4f/Provider/needs_auth/HuggingChat.py @@ -47,14 +47,19 @@ class HuggingChat(AsyncGeneratorProvider): "web_search": web_search } async with session.post(f"{cls.url}/conversation/{conversation_id}", json=send, proxy=proxy) as response: + first_token = True async for line in response.content: line = json.loads(line[:-1]) if "type" not in line: raise RuntimeError(f"Response: {line}") elif line["type"] == "stream": - yield line["token"] + token = line["token"] + if first_token: + token = token.lstrip() + first_token = False + yield token elif line["type"] == "finalAnswer": break async with session.delete(f"{cls.url}/conversation/{conversation_id}", proxy=proxy) as response: - response.raise_for_status() \ No newline at end of file + response.raise_for_status() -- cgit v1.2.3