summaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
authorH Lohaus <hlohaus@users.noreply.github.com>2024-04-06 23:37:41 +0200
committerGitHub <noreply@github.com>2024-04-06 23:37:41 +0200
commitff13f4f5008cbf56a33ca3248b58cc223b45f4a9 (patch)
tree3ec420254af4c8ccc56606e9050488d6d760c3ce
parentUpdate FlowGpt.py (diff)
downloadgpt4free-ff13f4f5008cbf56a33ca3248b58cc223b45f4a9.tar
gpt4free-ff13f4f5008cbf56a33ca3248b58cc223b45f4a9.tar.gz
gpt4free-ff13f4f5008cbf56a33ca3248b58cc223b45f4a9.tar.bz2
gpt4free-ff13f4f5008cbf56a33ca3248b58cc223b45f4a9.tar.lz
gpt4free-ff13f4f5008cbf56a33ca3248b58cc223b45f4a9.tar.xz
gpt4free-ff13f4f5008cbf56a33ca3248b58cc223b45f4a9.tar.zst
gpt4free-ff13f4f5008cbf56a33ca3248b58cc223b45f4a9.zip
-rw-r--r--g4f/Provider/Aura.py8
1 files changed, 5 insertions, 3 deletions
diff --git a/g4f/Provider/Aura.py b/g4f/Provider/Aura.py
index 877b7fef..4501df2c 100644
--- a/g4f/Provider/Aura.py
+++ b/g4f/Provider/Aura.py
@@ -17,6 +17,8 @@ class Aura(AsyncGeneratorProvider):
model: str,
messages: Messages,
proxy: str = None,
+ temperature: float = 0.5,
+ max_tokens: int = 8192.
webdriver: WebDriver = None,
**kwargs
) -> AsyncResult:
@@ -34,14 +36,14 @@ class Aura(AsyncGeneratorProvider):
"id": "openchat_v3.2_mistral",
"name": "OpenChat Aura",
"maxLength": 24576,
- "tokenLimit": 8192
+ "tokenLimit": max_tokens
},
"messages": new_messages,
"key": "",
"prompt": "\n".join(system_message),
- "temperature": 0.5
+ "temperature": temperature
}
async with session.post(f"{cls.url}/api/chat", json=data, proxy=proxy) as response:
response.raise_for_status()
async for chunk in response.content.iter_any():
- yield chunk.decode() \ No newline at end of file
+ yield chunk.decode(error="ignore") \ No newline at end of file