diff options
author | Tekky <98614666+xtekky@users.noreply.github.com> | 2023-10-12 15:32:50 +0200 |
---|---|---|
committer | GitHub <noreply@github.com> | 2023-10-12 15:32:50 +0200 |
commit | 86248b44bcb4261c627335e24f5713e242793ece (patch) | |
tree | 59671d1777b76bade80b4a0c5ce8a42121878a9e /g4f/interference | |
parent | ~ | Merge pull request #1053 from Lin-jun-xiang/fix_GptGo (diff) | |
parent | change "Models" to "Providers" (diff) | |
download | gpt4free-86248b44bcb4261c627335e24f5713e242793ece.tar gpt4free-86248b44bcb4261c627335e24f5713e242793ece.tar.gz gpt4free-86248b44bcb4261c627335e24f5713e242793ece.tar.bz2 gpt4free-86248b44bcb4261c627335e24f5713e242793ece.tar.lz gpt4free-86248b44bcb4261c627335e24f5713e242793ece.tar.xz gpt4free-86248b44bcb4261c627335e24f5713e242793ece.tar.zst gpt4free-86248b44bcb4261c627335e24f5713e242793ece.zip |
Diffstat (limited to 'g4f/interference')
-rw-r--r-- | g4f/interference/__init__.py | 94 | ||||
-rw-r--r-- | g4f/interference/run.py | 4 |
2 files changed, 0 insertions, 98 deletions
diff --git a/g4f/interference/__init__.py b/g4f/interference/__init__.py deleted file mode 100644 index d756faa7..00000000 --- a/g4f/interference/__init__.py +++ /dev/null @@ -1,94 +0,0 @@ -import json -import time -import random -import string - -from typing import Any -from flask import Flask, request -from flask_cors import CORS -from g4f import ChatCompletion - -app = Flask(__name__) -CORS(app) - -@app.route('/') -def index(): - return 'interference api, url: http://127.0.0.1:1337' - -@app.route('/chat/completions', methods=['POST']) -def chat_completions(): - model = request.get_json().get('model', 'gpt-3.5-turbo') - stream = request.get_json().get('stream', False) - messages = request.get_json().get('messages') - - response = ChatCompletion.create(model = model, - stream = stream, messages = messages) - - completion_id = ''.join(random.choices(string.ascii_letters + string.digits, k=28)) - completion_timestamp = int(time.time()) - - if not stream: - return { - 'id': f'chatcmpl-{completion_id}', - 'object': 'chat.completion', - 'created': completion_timestamp, - 'model': model, - 'choices': [ - { - 'index': 0, - 'message': { - 'role': 'assistant', - 'content': response, - }, - 'finish_reason': 'stop', - } - ], - 'usage': { - 'prompt_tokens': None, - 'completion_tokens': None, - 'total_tokens': None, - }, - } - - def streaming(): - for chunk in response: - completion_data = { - 'id': f'chatcmpl-{completion_id}', - 'object': 'chat.completion.chunk', - 'created': completion_timestamp, - 'model': model, - 'choices': [ - { - 'index': 0, - 'delta': { - 'content': chunk, - }, - 'finish_reason': None, - } - ], - } - - content = json.dumps(completion_data, separators=(',', ':')) - yield f'data: {content}\n\n' - time.sleep(0.1) - - end_completion_data: dict[str, Any] = { - 'id': f'chatcmpl-{completion_id}', - 'object': 'chat.completion.chunk', - 'created': completion_timestamp, - 'model': model, - 'choices': [ - { - 'index': 0, - 'delta': {}, - 'finish_reason': 'stop', - } - ], - } - content = json.dumps(end_completion_data, separators=(',', ':')) - yield f'data: {content}\n\n' - - return app.response_class(streaming(), mimetype='text/event-stream') - -def run_interference(): - app.run(host='0.0.0.0', port=1337, debug=True)
\ No newline at end of file diff --git a/g4f/interference/run.py b/g4f/interference/run.py deleted file mode 100644 index e527ce11..00000000 --- a/g4f/interference/run.py +++ /dev/null @@ -1,4 +0,0 @@ -from g4f.interference import run_interference - -if __name__ == '__main__': - run_interference()
\ No newline at end of file |