From 963e0104ad83c6ca0e961a966bd20e1284bdb337 Mon Sep 17 00:00:00 2001 From: hlohaus <983577+hlohaus@users.noreply.github.com> Date: Fri, 28 Feb 2025 09:36:15 +0100 Subject: Add gpt-4.5 model --- g4f/Provider/openai/models.py | 2 +- g4f/providers/base_provider.py | 2 +- g4f/tools/langchain.py | 41 ----------------------------------------- 3 files changed, 2 insertions(+), 43 deletions(-) delete mode 100644 g4f/tools/langchain.py diff --git a/g4f/Provider/openai/models.py b/g4f/Provider/openai/models.py index 5207280a..2548205a 100644 --- a/g4f/Provider/openai/models.py +++ b/g4f/Provider/openai/models.py @@ -1,6 +1,6 @@ default_model = "auto" default_image_model = "dall-e-3" image_models = [default_image_model] -text_models = [default_model, "gpt-4", "gpt-4o", "gpt-4o-mini", "o1", "o1-preview", "o1-mini", "o3-mini", "o3-mini-high"] +text_models = [default_model, "gpt-4", "gpt-4.5", "gpt-4o", "gpt-4o-mini", "o1", "o1-preview", "o1-mini", "o3-mini", "o3-mini-high"] vision_models = text_models models = text_models + image_models \ No newline at end of file diff --git a/g4f/providers/base_provider.py b/g4f/providers/base_provider.py index f16dcc7b..7ae5b44a 100644 --- a/g4f/providers/base_provider.py +++ b/g4f/providers/base_provider.py @@ -377,7 +377,7 @@ class RaiseErrorMixin(): raise ResponseError(data["error"]) elif "code" in data["error"]: raise ResponseError("\n".join( - [e for e in [f'Error {data["error"]["code"]}:{data["error"]["message"]}', data["error"].get("failed_generation")] if e is not None] + [e for e in [f'Error {data["error"]["code"]}: {data["error"]["message"]}', data["error"].get("failed_generation")] if e is not None] )) elif "message" in data["error"]: raise ResponseError(data["error"]["message"]) diff --git a/g4f/tools/langchain.py b/g4f/tools/langchain.py deleted file mode 100644 index ecfd7d2f..00000000 --- a/g4f/tools/langchain.py +++ /dev/null @@ -1,41 +0,0 @@ -from __future__ import annotations - -from typing import Any, Dict -from langchain_community.chat_models import openai -from langchain_community.chat_models.openai import ChatOpenAI, BaseMessage, convert_message_to_dict -from pydantic import Field -from g4f.client import AsyncClient, Client -from g4f.client.stubs import ChatCompletionMessage - -def new_convert_message_to_dict(message: BaseMessage) -> dict: - message_dict: Dict[str, Any] - if isinstance(message, ChatCompletionMessage): - message_dict = {"role": message.role, "content": message.content} - if message.tool_calls is not None: - message_dict["tool_calls"] = [{ - "id": tool_call.id, - "type": tool_call.type, - "function": tool_call.function - } for tool_call in message.tool_calls] - if message_dict["content"] == "": - message_dict["content"] = None - else: - message_dict = convert_message_to_dict(message) - return message_dict - -openai.convert_message_to_dict = new_convert_message_to_dict - -class ChatAI(ChatOpenAI): - model_name: str = Field(default="gpt-4o", alias="model") - - @classmethod - def validate_environment(cls, values: dict) -> dict: - client_params = { - "api_key": values["g4f_api_key"] if "g4f_api_key" in values else None, - "provider": values["g4f_provider"] if "g4f_provider" in values else None, - } - values["client"] = Client(**client_params).chat.completions - values["async_client"] = AsyncClient( - **client_params - ).chat.completions - return values \ No newline at end of file -- cgit v1.2.3