summaryrefslogtreecommitdiffstats
path: root/testing
diff options
context:
space:
mode:
Diffstat (limited to 'testing')
-rw-r--r--testing/interference_test.py16
-rw-r--r--testing/readme_table.py71
-rw-r--r--testing/test.py12
-rw-r--r--testing/test_chat_completion.py17
-rw-r--r--testing/test_interference.py27
-rw-r--r--testing/test_providers.py69
6 files changed, 113 insertions, 99 deletions
diff --git a/testing/interference_test.py b/testing/interference_test.py
deleted file mode 100644
index 410a0ed6..00000000
--- a/testing/interference_test.py
+++ /dev/null
@@ -1,16 +0,0 @@
-import openai
-
-openai.api_key = ''
-openai.api_base = 'http://localhost:1337'
-
-chat_completion = openai.ChatCompletion.create(stream=True,
- model='gpt-3.5-turbo', messages=[{'role': 'user', 'content': 'write a poem about a tree'}])
-
-#print(chat_completion.choices[0].message.content)
-
-for token in chat_completion:
-
- content = token['choices'][0]['delta'].get('content')
- if content != None:
- print(content)
- \ No newline at end of file
diff --git a/testing/readme_table.py b/testing/readme_table.py
deleted file mode 100644
index 4d078034..00000000
--- a/testing/readme_table.py
+++ /dev/null
@@ -1,71 +0,0 @@
-from g4f.Provider import (
- Ails,
- You,
- Bing,
- Yqcloud,
- Theb,
- Aichat,
- Bard,
- Vercel,
- Forefront,
- Lockchat,
- Liaobots,
- H2o,
- ChatgptLogin,
- DeepAi,
- GetGpt,
- AItianhu,
- EasyChat,
- Acytoo,
- DfeHub,
- AiService,
- BingHuan,
- Wewordle,
- ChatgptAi,
-)
-
-from urllib.parse import urlparse
-
-providers = [
- Ails,
- You,
- Bing,
- Yqcloud,
- Theb,
- Aichat,
- Bard,
- Vercel,
- Forefront,
- Lockchat,
- Liaobots,
- H2o,
- ChatgptLogin,
- DeepAi,
- GetGpt,
- AItianhu,
- EasyChat,
- Acytoo,
- DfeHub,
- AiService,
- BingHuan,
- Wewordle,
- ChatgptAi,
-]
-
-# | Website| Provider| gpt-3.5-turbo | gpt-4 | Supports Stream | Status | Needs Auth |
-print('| Website| Provider| gpt-3.5 | gpt-4 | Streaming | Status | Auth |')
-print('| --- | --- | --- | --- | --- | --- | --- |')
-
-for provider in providers:
-
- parsed_url = urlparse(provider.url)
- name = f"`g4f.Provider.{provider.__name__.split('.')[-1]}`"
- url = f'[{parsed_url.netloc}]({provider.url})'
- has_gpt4 = '✔️' if 'gpt-4' in provider.model else '❌'
- has_gpt3_5 = '✔️' if 'gpt-3.5-turbo' in provider.model else '❌'
- streaming = '✔️' if provider.supports_stream else '❌'
- needs_auth = '✔️' if provider.needs_auth else '❌'
-
- working = '![Active](https://img.shields.io/badge/Active-brightgreen)' if provider.working else '![Inactive](https://img.shields.io/badge/Inactive-red)'
-
- print(f'| {url} | {name} | {has_gpt3_5} | {has_gpt4} | {streaming} | {working} | {needs_auth} |') \ No newline at end of file
diff --git a/testing/test.py b/testing/test.py
deleted file mode 100644
index ebb2b16d..00000000
--- a/testing/test.py
+++ /dev/null
@@ -1,12 +0,0 @@
-import g4f
-
-# Set with provider
-stream = False
-response = g4f.ChatCompletion.create(model='gpt-3.5-turbo', provider=g4f.Provider.Yqcloud, messages=[
- {"role": "user", "content": "hello"}], stream=stream)
-
-if stream:
- for message in response:
- print(message)
-else:
- print(response) \ No newline at end of file
diff --git a/testing/test_chat_completion.py b/testing/test_chat_completion.py
new file mode 100644
index 00000000..cc705f07
--- /dev/null
+++ b/testing/test_chat_completion.py
@@ -0,0 +1,17 @@
+import sys
+from pathlib import Path
+
+sys.path.append(str(Path(__file__).parent.parent))
+
+import g4f
+
+stream = False
+response = g4f.ChatCompletion.create(
+ model="gpt-3.5-turbo",
+ provider=g4f.provider.Ails,
+ messages=[{"role": "user", "content": "hello"}],
+ stream=stream,
+ active_server=5,
+)
+
+print(response)
diff --git a/testing/test_interference.py b/testing/test_interference.py
new file mode 100644
index 00000000..31717ea7
--- /dev/null
+++ b/testing/test_interference.py
@@ -0,0 +1,27 @@
+# type: ignore
+import openai
+
+openai.api_key = ""
+openai.api_base = "http://localhost:1337"
+
+
+def main():
+ chat_completion = openai.ChatCompletion.create(
+ model="gpt-3.5-turbo",
+ messages=[{"role": "user", "content": "write a poem about a tree"}],
+ stream=True,
+ )
+
+ if isinstance(chat_completion, dict):
+ # not stream
+ print(chat_completion.choices[0].message.content)
+ else:
+ # stream
+ for token in chat_completion:
+ content = token["choices"][0]["delta"].get("content")
+ if content != None:
+ print(content, end="", flush=True)
+
+
+if __name__ == "__main__":
+ main()
diff --git a/testing/test_providers.py b/testing/test_providers.py
new file mode 100644
index 00000000..a5c6f87b
--- /dev/null
+++ b/testing/test_providers.py
@@ -0,0 +1,69 @@
+import sys
+from pathlib import Path
+
+sys.path.append(str(Path(__file__).parent.parent))
+
+from g4f import BaseProvider, models, provider
+
+
+def main():
+ providers = get_providers()
+ results: list[list[str | bool]] = []
+
+ for _provider in providers:
+ print("start", _provider.__name__)
+ actual_working = judge(_provider)
+ expected_working = _provider.working
+ match = actual_working == expected_working
+
+ results.append([_provider.__name__, expected_working, actual_working, match])
+
+ print("failed provider list")
+ for result in results:
+ if not result[3]:
+ print(result)
+
+
+def get_providers() -> list[type[BaseProvider]]:
+ provider_names = dir(provider)
+ ignore_names = [
+ "base_provider",
+ "BaseProvider",
+ ]
+ provider_names = [
+ provider_name
+ for provider_name in provider_names
+ if not provider_name.startswith("__") and provider_name not in ignore_names
+ ]
+ return [getattr(provider, provider_name) for provider_name in provider_names]
+
+
+def create_response(_provider: type[BaseProvider]) -> str:
+ model = (
+ models.gpt_35_turbo.name
+ if _provider is not provider.H2o
+ else models.falcon_7b.name
+ )
+ response = _provider.create_completion(
+ model=model,
+ messages=[{"role": "user", "content": "Hello world!, plz yourself"}],
+ stream=False,
+ )
+ return "".join(response)
+
+
+def judge(_provider: type[BaseProvider]) -> bool:
+ if _provider.needs_auth:
+ return _provider.working
+
+ try:
+ response = create_response(_provider)
+ assert type(response) is str
+ return len(response) > 1
+ except Exception as e:
+ print(e)
+ return False
+
+
+if __name__ == "__main__":
+ main()