diff options
-rw-r--r-- | .github/workflows/publish-workflow.yaml | 5 | ||||
-rw-r--r-- | README.md | 3 | ||||
-rw-r--r-- | docker/Dockerfile | 6 | ||||
-rw-r--r-- | g4f/Provider/GigaChat.py | 2 | ||||
-rw-r--r-- | g4f/local/__init__.py | 6 | ||||
-rw-r--r-- | g4f/local/_engine.py (renamed from g4f/local/core/engine.py) | 8 | ||||
-rw-r--r-- | g4f/local/_models.py (renamed from g4f/local/core/models.py) | 0 | ||||
-rw-r--r-- | setup.py | 2 |
8 files changed, 20 insertions, 12 deletions
diff --git a/.github/workflows/publish-workflow.yaml b/.github/workflows/publish-workflow.yaml index 634a5b31..bfc0b735 100644 --- a/.github/workflows/publish-workflow.yaml +++ b/.github/workflows/publish-workflow.yaml @@ -11,6 +11,10 @@ jobs: steps: - name: Checkout repository uses: actions/checkout@v4 + - name: Set up QEMU + uses: docker/setup-qemu-action@v3 + - name: Set up Docker Buildx + uses: docker/setup-buildx-action@v3 - name: Get metadata for Docker id: metadata @@ -38,6 +42,7 @@ jobs: with: context: . file: docker/Dockerfile + platforms: linux/amd64,linux/arm64 push: true tags: ${{ steps.metadata.outputs.tags }} labels: ${{ steps.metadata.outputs.labels }} @@ -1,4 +1,7 @@ ![248433934-7886223b-c1d1-4260-82aa-da5741f303bb](https://github.com/xtekky/gpt4free/assets/98614666/ea012c87-76e0-496a-8ac4-e2de090cc6c9) + +<a href="https://trendshift.io/repositories/1692" target="_blank"><img src="https://trendshift.io/api/badge/repositories/1692" alt="xtekky%2Fgpt4free | Trendshift" style="width: 250px; height: 55px;" width="250" height="55"/></a> + Written by [@xtekky](https://github.com/hlohaus) & maintained by [@hlohaus](https://github.com/hlohaus) <div id="top"></div> diff --git a/docker/Dockerfile b/docker/Dockerfile index 88e21b18..8b2d5b7b 100644 --- a/docker/Dockerfile +++ b/docker/Dockerfile @@ -1,4 +1,4 @@ -FROM selenium/node-chrome +FROM seleniarm/node-chromium ARG G4F_VERSION ARG G4F_USER=g4f @@ -81,10 +81,10 @@ WORKDIR $G4F_DIR COPY requirements.txt $G4F_DIR # Upgrade pip for the latest features and install the project's Python dependencies. -RUN pip install --upgrade pip && pip install -r requirements.txt +RUN pip install --break-system-packages --upgrade pip && pip install --break-system-packages -r requirements.txt # Copy the entire package into the container. ADD --chown=$G4F_USER:$G4F_USER g4f $G4F_DIR/g4f # Expose ports -EXPOSE 8080 1337
\ No newline at end of file +EXPOSE 8080 1337 diff --git a/g4f/Provider/GigaChat.py b/g4f/Provider/GigaChat.py index c1ec7f5e..8ba07b43 100644 --- a/g4f/Provider/GigaChat.py +++ b/g4f/Provider/GigaChat.py @@ -35,7 +35,7 @@ class GigaChat(AsyncGeneratorProvider, ProviderModelMixin): stream: bool = True, proxy: str = None, api_key: str = None, - coonector: BaseConnector = None, + connector: BaseConnector = None, scope: str = "GIGACHAT_API_PERS", update_interval: float = 0, **kwargs diff --git a/g4f/local/__init__.py b/g4f/local/__init__.py index cc678dc6..c9d3d74a 100644 --- a/g4f/local/__init__.py +++ b/g4f/local/__init__.py @@ -1,8 +1,8 @@ from ..typing import Union, Iterator, Messages from ..stubs import ChatCompletion, ChatCompletionChunk -from .core.engine import LocalProvider -from .core.models import models -from ..client import iter_response, filter_none, IterResponse +from ._engine import LocalProvider +from ._models import models +from ..client import iter_response, filter_none, IterResponse class LocalClient(): def __init__(self, **kwargs) -> None: diff --git a/g4f/local/core/engine.py b/g4f/local/_engine.py index 920ed9b4..917de16c 100644 --- a/g4f/local/core/engine.py +++ b/g4f/local/_engine.py @@ -1,7 +1,7 @@ import os -from gpt4all import GPT4All -from .models import models +from gpt4all import GPT4All +from ._models import models class LocalProvider: @staticmethod @@ -10,7 +10,7 @@ class LocalProvider: raise ValueError(f"Model '{model}' not found / not yet implemented") model = models[model] - model_dir = os.path.join(os.path.dirname(os.path.abspath(__file__)), '../models/') + model_dir = os.path.join(os.path.dirname(os.path.abspath(__file__)), 'models/') full_model_path = os.path.join(model_dir, model['path']) if not os.path.isfile(full_model_path): @@ -23,7 +23,7 @@ class LocalProvider: raise ValueError(f"Model '{model['path']}' not found.") model = GPT4All(model_name=model['path'], - n_threads=8, + #n_threads=8, verbose=False, allow_download=False, model_path=model_dir) diff --git a/g4f/local/core/models.py b/g4f/local/_models.py index ec36fe41..ec36fe41 100644 --- a/g4f/local/core/models.py +++ b/g4f/local/_models.py @@ -82,7 +82,7 @@ setup( long_description=long_description, packages=find_packages(), package_data={ - 'g4f': ['g4f/interference/*', 'g4f/gui/client/*', 'g4f/gui/server/*', 'g4f/Provider/npm/*'] + 'g4f': ['g4f/interference/*', 'g4f/gui/client/*', 'g4f/gui/server/*', 'g4f/Provider/npm/*', 'g4f/local/models/*'] }, include_package_data=True, install_requires=INSTALL_REQUIRE, |