summaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
authorCommenter123321 <36051603+Commenter123321@users.noreply.github.com>2023-10-10 14:22:13 +0200
committerCommenter123321 <36051603+Commenter123321@users.noreply.github.com>2023-10-10 14:22:13 +0200
commit3de672d0239e48375d029db2b224eff4515fc097 (patch)
tree12226f2b5a8f5b6017a90be4c7e61cd1145df1f0
parentMerge remote-tracking branch 'origin/main' (diff)
downloadgpt4free-3de672d0239e48375d029db2b224eff4515fc097.tar
gpt4free-3de672d0239e48375d029db2b224eff4515fc097.tar.gz
gpt4free-3de672d0239e48375d029db2b224eff4515fc097.tar.bz2
gpt4free-3de672d0239e48375d029db2b224eff4515fc097.tar.lz
gpt4free-3de672d0239e48375d029db2b224eff4515fc097.tar.xz
gpt4free-3de672d0239e48375d029db2b224eff4515fc097.tar.zst
gpt4free-3de672d0239e48375d029db2b224eff4515fc097.zip
-rw-r--r--g4f/Provider/Aivvm.py9
1 files changed, 6 insertions, 3 deletions
diff --git a/g4f/Provider/Aivvm.py b/g4f/Provider/Aivvm.py
index ac15ac16..05f12320 100644
--- a/g4f/Provider/Aivvm.py
+++ b/g4f/Provider/Aivvm.py
@@ -44,11 +44,13 @@ class Aivvm(BaseProvider):
"temperature" : kwargs.get("temperature", 0.7)
}
+ data = dumps(json_data)
+
headers = {
"accept" : "text/event-stream",
"accept-language" : "en-US,en;q=0.9",
"content-type" : "application/json",
- "content-length" : str(len(dumps(json_data))),
+ "content-length" : str(len(data)),
"sec-ch-ua" : "\"Chrome\";v=\"117\", \"Not;A=Brand\";v=\"8\", \"Chromium\";v=\"117\"",
"sec-ch-ua-mobile" : "?0",
"sec-ch-ua-platform": "\"Windows\"",
@@ -56,10 +58,11 @@ class Aivvm(BaseProvider):
"sec-fetch-mode" : "cors",
"sec-fetch-site" : "same-origin",
"sec-gpc" : "1",
- "referrer" : "https://chat.aivvm.com/"
+ "referrer" : "https://chat.aivvm.com/",
+ "user-agent" : "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/117.0.0.0 Safari/537.36"
}
- response = requests.post("https://chat.aivvm.com/api/chat", headers=headers, json=json_data, stream=True)
+ response = requests.post("https://chat.aivvm.com/api/chat", headers=headers, data=data, stream=True)
response.raise_for_status()
for chunk in response.iter_content():