blob: 122f19c7dc6b60aeee37f946712f1b5419ae3765 (
plain) (
blame)
1
2
3
4
5
6
7
8
9
10
11
12
13
|
import quora
from time import sleep
token = quora.Account.create(proxy = None,logging = True)
print('token', token)
sleep(2)
for response in quora.StreamingCompletion.create(model = 'gpt-3.5-turbo',
prompt = 'hello world',
token = token):
print(response.completion.choices[0].text, end="", flush=True)
|