summaryrefslogtreecommitdiffstats
path: root/g4f/Provider/FlowGpt.py
diff options
context:
space:
mode:
authorHeiner Lohaus <hlohaus@users.noreply.github.com>2024-02-27 11:55:40 +0100
committerHeiner Lohaus <hlohaus@users.noreply.github.com>2024-02-27 11:55:40 +0100
commit84812b9632cae2dc4811222a2f31d42cb807a221 (patch)
treef7dda40a968be33ee87e78187a2bf41bb78cf973 /g4f/Provider/FlowGpt.py
parentAdd support for message history and system message in OpenaiChat (diff)
downloadgpt4free-84812b9632cae2dc4811222a2f31d42cb807a221.tar
gpt4free-84812b9632cae2dc4811222a2f31d42cb807a221.tar.gz
gpt4free-84812b9632cae2dc4811222a2f31d42cb807a221.tar.bz2
gpt4free-84812b9632cae2dc4811222a2f31d42cb807a221.tar.lz
gpt4free-84812b9632cae2dc4811222a2f31d42cb807a221.tar.xz
gpt4free-84812b9632cae2dc4811222a2f31d42cb807a221.tar.zst
gpt4free-84812b9632cae2dc4811222a2f31d42cb807a221.zip
Diffstat (limited to '')
-rw-r--r--g4f/Provider/FlowGpt.py8
1 files changed, 6 insertions, 2 deletions
diff --git a/g4f/Provider/FlowGpt.py b/g4f/Provider/FlowGpt.py
index 39192bf9..b466a2e6 100644
--- a/g4f/Provider/FlowGpt.py
+++ b/g4f/Provider/FlowGpt.py
@@ -51,12 +51,16 @@ class FlowGpt(AsyncGeneratorProvider, ProviderModelMixin):
"TE": "trailers"
}
async with ClientSession(headers=headers) as session:
+ history = [message for message in messages[:-1] if message["role"] != "system"]
+ system_message = "\n".join([message["content"] for message in messages if message["role"] == "system"])
+ if not system_message:
+ system_message = "You are helpful assistant. Follow the user's instructions carefully."
data = {
"model": model,
"nsfw": False,
"question": messages[-1]["content"],
- "history": [{"role": "assistant", "content": "Hello, how can I help you today?"}, *messages[:-1]],
- "system": kwargs.get("system_message", "You are helpful assistant. Follow the user's instructions carefully."),
+ "history": [{"role": "assistant", "content": "Hello, how can I help you today?"}, *history],
+ "system": system_message,
"temperature": kwargs.get("temperature", 0.7),
"promptId": f"model-{model}",
"documentIds": [],