diff options
Diffstat (limited to '')
-rw-r--r-- | g4f/Provider/GigaChat.py | 2 | ||||
-rw-r--r-- | g4f/local/__init__.py | 6 | ||||
-rw-r--r-- | g4f/local/_engine.py (renamed from g4f/local/core/engine.py) | 8 | ||||
-rw-r--r-- | g4f/local/_models.py (renamed from g4f/local/core/models.py) | 0 |
4 files changed, 8 insertions, 8 deletions
diff --git a/g4f/Provider/GigaChat.py b/g4f/Provider/GigaChat.py index c1ec7f5e..8ba07b43 100644 --- a/g4f/Provider/GigaChat.py +++ b/g4f/Provider/GigaChat.py @@ -35,7 +35,7 @@ class GigaChat(AsyncGeneratorProvider, ProviderModelMixin): stream: bool = True, proxy: str = None, api_key: str = None, - coonector: BaseConnector = None, + connector: BaseConnector = None, scope: str = "GIGACHAT_API_PERS", update_interval: float = 0, **kwargs diff --git a/g4f/local/__init__.py b/g4f/local/__init__.py index cc678dc6..c9d3d74a 100644 --- a/g4f/local/__init__.py +++ b/g4f/local/__init__.py @@ -1,8 +1,8 @@ from ..typing import Union, Iterator, Messages from ..stubs import ChatCompletion, ChatCompletionChunk -from .core.engine import LocalProvider -from .core.models import models -from ..client import iter_response, filter_none, IterResponse +from ._engine import LocalProvider +from ._models import models +from ..client import iter_response, filter_none, IterResponse class LocalClient(): def __init__(self, **kwargs) -> None: diff --git a/g4f/local/core/engine.py b/g4f/local/_engine.py index 920ed9b4..917de16c 100644 --- a/g4f/local/core/engine.py +++ b/g4f/local/_engine.py @@ -1,7 +1,7 @@ import os -from gpt4all import GPT4All -from .models import models +from gpt4all import GPT4All +from ._models import models class LocalProvider: @staticmethod @@ -10,7 +10,7 @@ class LocalProvider: raise ValueError(f"Model '{model}' not found / not yet implemented") model = models[model] - model_dir = os.path.join(os.path.dirname(os.path.abspath(__file__)), '../models/') + model_dir = os.path.join(os.path.dirname(os.path.abspath(__file__)), 'models/') full_model_path = os.path.join(model_dir, model['path']) if not os.path.isfile(full_model_path): @@ -23,7 +23,7 @@ class LocalProvider: raise ValueError(f"Model '{model['path']}' not found.") model = GPT4All(model_name=model['path'], - n_threads=8, + #n_threads=8, verbose=False, allow_download=False, model_path=model_dir) diff --git a/g4f/local/core/models.py b/g4f/local/_models.py index ec36fe41..ec36fe41 100644 --- a/g4f/local/core/models.py +++ b/g4f/local/_models.py |