summaryrefslogtreecommitdiffstats
path: root/g4f/api/_tokenizer.py
diff options
context:
space:
mode:
Diffstat (limited to 'g4f/api/_tokenizer.py')
-rw-r--r--g4f/api/_tokenizer.py14
1 files changed, 7 insertions, 7 deletions
diff --git a/g4f/api/_tokenizer.py b/g4f/api/_tokenizer.py
index fd8f9d5a..de5877c4 100644
--- a/g4f/api/_tokenizer.py
+++ b/g4f/api/_tokenizer.py
@@ -1,9 +1,9 @@
-import tiktoken
-from typing import Union
+# import tiktoken
+# from typing import Union
-def tokenize(text: str, model: str = 'gpt-3.5-turbo') -> Union[int, str]:
- encoding = tiktoken.encoding_for_model(model)
- encoded = encoding.encode(text)
- num_tokens = len(encoded)
+# def tokenize(text: str, model: str = 'gpt-3.5-turbo') -> Union[int, str]:
+# encoding = tiktoken.encoding_for_model(model)
+# encoded = encoding.encode(text)
+# num_tokens = len(encoded)
- return num_tokens, encoded \ No newline at end of file
+# return num_tokens, encoded \ No newline at end of file