From e2e2f281e8b64fadadcb3d36fac5faa1a4af20f2 Mon Sep 17 00:00:00 2001 From: Luneye <73485421+Luneye@users.noreply.github.com> Date: Sun, 27 Aug 2023 18:58:36 +0200 Subject: Update Bing.py Removed unnecessary check that could prematurely stop the AI's response while answering --- g4f/Provider/Bing.py | 122 ++++++++++++++++++++++++++++++++------------------- 1 file changed, 76 insertions(+), 46 deletions(-) (limited to 'g4f') diff --git a/g4f/Provider/Bing.py b/g4f/Provider/Bing.py index 2c2e60ad..4056916e 100644 --- a/g4f/Provider/Bing.py +++ b/g4f/Provider/Bing.py @@ -8,33 +8,65 @@ import asyncio from aiohttp import ClientSession from ..typing import Any, AsyncGenerator, CreateResult, Union -from .base_provider import AsyncGeneratorProvider, get_cookies +from .base_provider import BaseProvider -class Bing(AsyncGeneratorProvider): +class Bing(BaseProvider): url = "https://bing.com/chat" - needs_auth = True - working = True supports_gpt_4 = True + working=True supports_stream=True - + @staticmethod - def create_async_generator( - model: str, - messages: list[dict[str, str]], - cookies: dict = get_cookies(".bing.com"), - **kwargs - ) -> AsyncGenerator: - if len(messages) < 2: - prompt = messages[0]["content"] - context = None - - else: - prompt = messages[-1]["content"] - context = create_context(messages[:-1]) - - return stream_generate(prompt, context, cookies) - -def create_context(messages: list[dict[str, str]]): + def create_completion( + model: str, + messages: list[dict[str, str]], + stream: bool, + **kwargs: Any + ) -> CreateResult: + yield from run(create(messages, **kwargs)) + +def create( + messages: list[dict[str, str]], + cookies: dict = {} + ): + if len(messages) < 2: + prompt = messages[0]["content"] + context = None + + else: + prompt = messages[-1]["content"] + context = convert(messages[:-1]) + + if not cookies: + cookies = { + 'MUID': '', + 'BCP': '', + 'MUIDB': '', + 'USRLOC': '', + 'SRCHD': 'AF=hpcodx', + 'MMCASM': '', + '_UR': '', + 'ANON': '', + 'NAP': '', + 'ABDEF': '', + 'PPLState': '1', + 'KievRPSSecAuth': '', + '_U': '', + 'SUID': '', + '_EDGE_S': '', + 'WLS': '', + '_HPVN': '', + '_SS': '', + '_clck': '', + 'SRCHUSR': '', + '_RwBf': '', + 'SRCHHPGUSR': '', + 'ipv6': '', + } + + return stream_generate(prompt, context, cookies) + +def convert(messages: list[dict[str, str]]): context = "" for message in messages: @@ -159,32 +191,34 @@ class Defaults: 'x-forwarded-for': ip_address, } - optionsSets = [ - 'saharasugg', - 'enablenewsfc', - 'clgalileo', - 'gencontentv3', - "nlu_direct_response_filter", - "deepleo", - "disable_emoji_spoken_text", - "responsible_ai_policy_235", - "enablemm", - "h3precise" - "dtappid", - "cricinfo", - "cricinfov2", - "dv3sugg", - "nojbfedge" - ] + optionsSets = { + "optionsSets": [ + 'saharasugg', + 'enablenewsfc', + 'clgalileo', + 'gencontentv3', + "nlu_direct_response_filter", + "deepleo", + "disable_emoji_spoken_text", + "responsible_ai_policy_235", + "enablemm", + "h3precise" + "dtappid", + "cricinfo", + "cricinfov2", + "dv3sugg", + "nojbfedge" + ] + } -def format_message(message: dict) -> str: - return json.dumps(message, ensure_ascii=False) + Defaults.delimiter +def format_message(msg: dict) -> str: + return json.dumps(msg, ensure_ascii=False) + Defaults.delimiter def create_message(conversation: Conversation, prompt: str, context: str=None) -> str: struct = { 'arguments': [ { - 'optionsSets': Defaults.optionsSets, + **Defaults.optionsSets, 'source': 'cib', 'allowedMessageTypes': Defaults.allowedMessageTypes, 'sliceIds': Defaults.sliceIds, @@ -264,10 +298,6 @@ async def stream_generate( response_txt += inline_txt + '\n' result_text += inline_txt + '\n' - if returned_text.endswith(' '): - final = True - break - if response_txt.startswith(returned_text): new = response_txt[len(returned_text):] if new != "\n": -- cgit v1.2.3