diff options
author | abc <98614666+xtekky@users.noreply.github.com> | 2023-11-19 23:59:18 +0100 |
---|---|---|
committer | abc <98614666+xtekky@users.noreply.github.com> | 2023-11-19 23:59:18 +0100 |
commit | 2345588d383a9115e8e36098caefc7b5bae077f3 (patch) | |
tree | 03700c66f78410977e0dca9ce3c3cf0cbb7b4d6d /g4f/api/_tokenizer.py | |
parent | ~ | g4f v-0.1.8.6 (diff) | |
download | gpt4free-2345588d383a9115e8e36098caefc7b5bae077f3.tar gpt4free-2345588d383a9115e8e36098caefc7b5bae077f3.tar.gz gpt4free-2345588d383a9115e8e36098caefc7b5bae077f3.tar.bz2 gpt4free-2345588d383a9115e8e36098caefc7b5bae077f3.tar.lz gpt4free-2345588d383a9115e8e36098caefc7b5bae077f3.tar.xz gpt4free-2345588d383a9115e8e36098caefc7b5bae077f3.tar.zst gpt4free-2345588d383a9115e8e36098caefc7b5bae077f3.zip |
Diffstat (limited to 'g4f/api/_tokenizer.py')
-rw-r--r-- | g4f/api/_tokenizer.py | 14 |
1 files changed, 7 insertions, 7 deletions
diff --git a/g4f/api/_tokenizer.py b/g4f/api/_tokenizer.py index fd8f9d5a..de5877c4 100644 --- a/g4f/api/_tokenizer.py +++ b/g4f/api/_tokenizer.py @@ -1,9 +1,9 @@ -import tiktoken -from typing import Union +# import tiktoken +# from typing import Union -def tokenize(text: str, model: str = 'gpt-3.5-turbo') -> Union[int, str]: - encoding = tiktoken.encoding_for_model(model) - encoded = encoding.encode(text) - num_tokens = len(encoded) +# def tokenize(text: str, model: str = 'gpt-3.5-turbo') -> Union[int, str]: +# encoding = tiktoken.encoding_for_model(model) +# encoded = encoding.encode(text) +# num_tokens = len(encoded) - return num_tokens, encoded
\ No newline at end of file +# return num_tokens, encoded
\ No newline at end of file |