diff options
author | abc <98614666+xtekky@users.noreply.github.com> | 2024-03-11 19:31:13 +0100 |
---|---|---|
committer | abc <98614666+xtekky@users.noreply.github.com> | 2024-03-11 19:31:13 +0100 |
commit | 479ef89f4c3bc284cf50395481f2ef807c2f90ee (patch) | |
tree | 45bda032789e1544ce47a6deb40680cfdc13b035 /g4f/local/__init__.py | |
parent | ~ | support local llm inference (diff) | |
download | gpt4free-479ef89f4c3bc284cf50395481f2ef807c2f90ee.tar gpt4free-479ef89f4c3bc284cf50395481f2ef807c2f90ee.tar.gz gpt4free-479ef89f4c3bc284cf50395481f2ef807c2f90ee.tar.bz2 gpt4free-479ef89f4c3bc284cf50395481f2ef807c2f90ee.tar.lz gpt4free-479ef89f4c3bc284cf50395481f2ef807c2f90ee.tar.xz gpt4free-479ef89f4c3bc284cf50395481f2ef807c2f90ee.tar.zst gpt4free-479ef89f4c3bc284cf50395481f2ef807c2f90ee.zip |
Diffstat (limited to '')
-rw-r--r-- | g4f/local/__init__.py | 73 |
1 files changed, 5 insertions, 68 deletions
diff --git a/g4f/local/__init__.py b/g4f/local/__init__.py index 626643fc..cc678dc6 100644 --- a/g4f/local/__init__.py +++ b/g4f/local/__init__.py @@ -1,75 +1,13 @@ -import random, string, time, re - from ..typing import Union, Iterator, Messages from ..stubs import ChatCompletion, ChatCompletionChunk from .core.engine import LocalProvider from .core.models import models - -IterResponse = Iterator[Union[ChatCompletion, ChatCompletionChunk]] - -def read_json(text: str) -> dict: - match = re.search(r"```(json|)\n(?P<code>[\S\s]+?)\n```", text) - if match: - return match.group("code") - return text - -def iter_response( - response: Iterator[str], - stream: bool, - response_format: dict = None, - max_tokens: int = None, - stop: list = None -) -> IterResponse: - - content = "" - finish_reason = None - completion_id = ''.join(random.choices(string.ascii_letters + string.digits, k=28)) - for idx, chunk in enumerate(response): - content += str(chunk) - if max_tokens is not None and idx + 1 >= max_tokens: - finish_reason = "length" - first = -1 - word = None - if stop is not None: - for word in list(stop): - first = content.find(word) - if first != -1: - content = content[:first] - break - if stream and first != -1: - first = chunk.find(word) - if first != -1: - chunk = chunk[:first] - else: - first = 0 - if first != -1: - finish_reason = "stop" - if stream: - yield ChatCompletionChunk(chunk, None, completion_id, int(time.time())) - if finish_reason is not None: - break - finish_reason = "stop" if finish_reason is None else finish_reason - if stream: - yield ChatCompletionChunk(None, finish_reason, completion_id, int(time.time())) - else: - if response_format is not None and "type" in response_format: - if response_format["type"] == "json_object": - content = read_json(content) - yield ChatCompletion(content, finish_reason, completion_id, int(time.time())) - -def filter_none(**kwargs): - for key in list(kwargs.keys()): - if kwargs[key] is None: - del kwargs[key] - return kwargs +from ..client import iter_response, filter_none, IterResponse class LocalClient(): - def __init__( - self, - **kwargs - ) -> None: + def __init__(self, **kwargs) -> None: self.chat: Chat = Chat(self) - + @staticmethod def list_models(): return list(models.keys()) @@ -100,10 +38,9 @@ class Completions(): ) response = iter_response(response, stream, response_format, max_tokens, stop) return response if stream else next(response) - + class Chat(): completions: Completions def __init__(self, client: LocalClient): - self.completions = Completions(client) -
\ No newline at end of file + self.completions = Completions(client)
\ No newline at end of file |