From bc6262cc79a5c98cf627650f64d6f9d8729addee Mon Sep 17 00:00:00 2001 From: hlohaus <983577+hlohaus@users.noreply.github.com> Date: Fri, 21 Feb 2025 08:39:04 +0100 Subject: Add Documentaion for PydanticAI support --- docs/pydantic_ai.md | 95 ++++++++++++++++++++++++++++++++++ g4f/Provider/needs_auth/DeepSeekAPI.py | 49 ++++-------------- g4f/gui/server/backend_api.py | 2 +- 3 files changed, 106 insertions(+), 40 deletions(-) create mode 100644 docs/pydantic_ai.md diff --git a/docs/pydantic_ai.md b/docs/pydantic_ai.md new file mode 100644 index 00000000..2fc5f577 --- /dev/null +++ b/docs/pydantic_ai.md @@ -0,0 +1,95 @@ +# PydanticAI Integration with G4F Client + +This README provides an overview of how to integrate PydanticAI with the G4F client to create an agent that interacts with a language model. With this setup, you'll be able to apply patches to use PydanticAI models, enable debugging, and run simple agent-based interactions synchronously. However, please note that tool calls within AI requests are currently **not fully supported** in this environment. + +## Requirements + +Before starting, make sure you have the following Python dependencies installed: + +- `g4f`: A client that interfaces with various LLMs. +- `pydantic_ai`: A module that provides integration with Pydantic-based models. + +### Installation + +To install these dependencies, you can use `pip`: + +```bash +pip install g4f pydantic_ai +``` + +## Step-by-Step Setup + +### 1. Patch G4F to Use PydanticAI Models + +In order to use PydanticAI models with G4F, you need to apply the necessary patch to the client. This can be done by importing `apply_patch` from `g4f.tools.pydantic_ai`. The `api_key` parameter is optional, so if you have one, you can provide it. If not, the system will proceed without it. + +```python +from g4f.tools.pydantic_ai import apply_patch + +apply_patch(api_key="your_api_key_here") # Optional +``` + +If you don't have an API key, simply omit the `api_key` argument. + +### 2. Enable Debug Logging + +For troubleshooting and monitoring purposes, you may want to enable debug logging. This can be achieved by setting `g4f.debug.logging` to `True`. + +```python +import g4f.debug + +g4f.debug.logging = True +``` + +This will log detailed information about the internal processes and interactions. + +### 3. Create a Simple Agent + +Now you are ready to create a simple agent that can interact with the LLM. The agent is initialized with a model, and you can also define a system prompt. Here's an example where a basic agent is created with the model `g4f:Gemini:Gemini` and a simple system prompt: + +```python +from g4f import Agent + +# Define the agent +agent = Agent( + 'g4f:Gemini:Gemini', + system_prompt='Be concise, reply with one sentence.', +) +``` + +### 4. Run the Agent Synchronously + +Once the agent is set up, you can run it synchronously to interact with the LLM. The `run_sync` method sends a query to the LLM and returns the result. + +```python +# Run the agent synchronously with a user query +result = agent.run_sync('Where does "hello world" come from?') + +# Output the response +print(result.data) +``` + +In this example, the agent will send the system prompt along with the user query (`"Where does 'hello world' come from?"`) to the LLM. The LLM will process the request and return a concise answer. + +### Example Output + +```bash +The phrase "hello world" is commonly used in programming tutorials to demonstrate basic syntax and the concept of outputting text to the screen. +``` + +## Tool Calls and Limitations + +**Important**: Tool calls (such as applying external functions or calling APIs within the AI request itself) are **currently not fully supported**. If your system relies on invoking specific external tools or functions during the conversation with the model, you will need to implement this functionality outside the agent's context or handle it before or after the agent's request. + +For example, you can process your query or interact with external systems before passing the data to the agent. + +## Conclusion + +By following these steps, you have successfully integrated PydanticAI models into the G4F client, created an agent, and enabled debugging. This allows you to conduct conversations with the language model, pass system prompts, and retrieve responses synchronously. + +### Notes: +- The `api_key` parameter when calling `apply_patch` is optional. If you don’t provide it, the system will still work without an API key. +- Modify the agent’s `system_prompt` to suit the nature of the conversation you wish to have. +- **Tool calls within AI requests are not fully supported** at the moment. Use the agent's basic functionality for generating responses and handle external calls separately. + +For further customization and advanced use cases, refer to the G4F and PydanticAI documentation. \ No newline at end of file diff --git a/g4f/Provider/needs_auth/DeepSeekAPI.py b/g4f/Provider/needs_auth/DeepSeekAPI.py index 0c120266..8e007048 100644 --- a/g4f/Provider/needs_auth/DeepSeekAPI.py +++ b/g4f/Provider/needs_auth/DeepSeekAPI.py @@ -6,58 +6,27 @@ import time from typing import AsyncIterator import asyncio -from ..base_provider import AsyncAuthedProvider +from ..base_provider import AsyncAuthedProvider, ProviderModelMixin from ...providers.helper import get_last_user_message -from ... import requests -from ...errors import MissingAuthError from ...requests import get_args_from_nodriver, get_nodriver from ...providers.response import AuthResult, RequestLogin, Reasoning, JsonConversation, FinishReason from ...typing import AsyncResult, Messages try: - from curl_cffi import requests - from dsk.api import DeepSeekAPI, AuthenticationError, DeepSeekPOW - - class DeepSeekAPIArgs(DeepSeekAPI): - def __init__(self, args: dict): - self.auth_token = args.pop("api_key") - if not self.auth_token or not isinstance(self.auth_token, str): - raise AuthenticationError("Invalid auth token provided") - self.args = args - self.pow_solver = DeepSeekPOW() - - def _make_request(self, method: str, endpoint: str, json_data: dict, pow_required: bool = False, **kwargs): - url = f"{self.BASE_URL}{endpoint}" - headers = self._get_headers() - if pow_required: - challenge = self._get_pow_challenge() - pow_response = self.pow_solver.solve_challenge(challenge) - headers = self._get_headers(pow_response) - - response = requests.request( - method=method, - url=url, - json=json_data, **{ - **self.args, - "headers": {**headers, **self.args["headers"]}, - "timeout":None, - }, - **kwargs - ) - if response.status_code == 403: - raise MissingAuthError() - response.raise_for_status() - return response.json() + from dsk.api import DeepSeekAPI as DskAPI has_dsk = True except ImportError: has_dsk = False -class DeepSeekAPI(AsyncAuthedProvider): +class DeepSeekAPI(AsyncAuthedProvider, ProviderModelMixin): url = "https://chat.deepseek.com" working = has_dsk needs_auth = True use_nodriver = True _access_token = None + default_model = "deepseek-v3" + models = ["deepseek-v3", "deepseek-r1"] + @classmethod async def on_auth_async(cls, proxy: str = None, **kwargs) -> AsyncIterator: if not hasattr(cls, "browser"): @@ -82,10 +51,11 @@ class DeepSeekAPI(AsyncAuthedProvider): messages: Messages, auth_result: AuthResult, conversation: JsonConversation = None, + web_search: bool = False, **kwargs ) -> AsyncResult: # Initialize with your auth token - api = DeepSeekAPIArgs(auth_result.get_dict()) + api = DskAPI(auth_result.get_dict()) # Create a new chat session if conversation is None: @@ -97,7 +67,8 @@ class DeepSeekAPI(AsyncAuthedProvider): for chunk in api.chat_completion( conversation.chat_id, get_last_user_message(messages), - thinking_enabled=True + thinking_enabled="deepseek-r1" in model, + search_enabled=web_search ): if chunk['type'] == 'thinking': if not is_thinking: diff --git a/g4f/gui/server/backend_api.py b/g4f/gui/server/backend_api.py index 533a4d2f..05cdb726 100644 --- a/g4f/gui/server/backend_api.py +++ b/g4f/gui/server/backend_api.py @@ -371,7 +371,7 @@ class Backend_Api(Api): return jsonify({"error": {"message": f"Error uploading file: {str(e)}"}}), 500 @app.route('/backend-api/v2/upload_cookies', methods=['POST']) - def upload_cookies(self): + def upload_cookies(): file = None if "file" in request.files: file = request.files['file'] -- cgit v1.2.3