remove unnecessary cache in simple tokenizer

This commit is contained in:
AN Long 2023-07-10 22:19:56 +08:00
parent a1d071733d
commit 77c7ef03fd
1 changed files with 0 additions and 1 deletions

View File

@ -7,7 +7,6 @@ import ftfy
import regex as re import regex as re
@lru_cache()
def default_bpe(): def default_bpe():
return os.path.join(os.path.dirname(os.path.abspath(__file__)), "bpe_simple_vocab_16e6.txt.gz") return os.path.join(os.path.dirname(os.path.abspath(__file__)), "bpe_simple_vocab_16e6.txt.gz")