blob: 22d95f5f75f9e6c9f92aa665f320ffdde41292da (
plain) (
blame)
1
2
3
4
5
6
7
8
9
10
11
|
from time import sleep
from gpt4free import quora
token = quora.Account.create(proxy=None, logging=True)
print('token', token)
sleep(2)
for response in quora.StreamingCompletion.create(model='ChatGPT', prompt='hello world', token=token):
print(response.text, flush=True)
|