import packaging to be compatible with setuptools==70.0.0 (#449)

* import packaging to be compatible with setuptools==70.0.0

* importing the version module

---------

Co-authored-by: Jamie <Jamie@Alexandras-MacBook-Pro.local>
Co-authored-by: Jong Wook Kim <jongwook@nyu.edu>
This commit is contained in:
tminer 2024-06-04 20:47:22 +01:00 committed by GitHub
parent a1d071733d
commit dcba3cb2e2
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
2 changed files with 5 additions and 4 deletions

View File

@ -2,8 +2,8 @@ import hashlib
import os import os
import urllib import urllib
import warnings import warnings
from typing import Any, Union, List from packaging import version
from pkg_resources import packaging from typing import Union, List
import torch import torch
from PIL import Image from PIL import Image
@ -20,7 +20,7 @@ except ImportError:
BICUBIC = Image.BICUBIC BICUBIC = Image.BICUBIC
if packaging.version.parse(torch.__version__) < packaging.version.parse("1.7.1"): if version.parse(torch.__version__) < version.parse("1.7.1"):
warnings.warn("PyTorch version 1.7.1 or higher is recommended") warnings.warn("PyTorch version 1.7.1 or higher is recommended")
@ -228,7 +228,7 @@ def tokenize(texts: Union[str, List[str]], context_length: int = 77, truncate: b
sot_token = _tokenizer.encoder["<|startoftext|>"] sot_token = _tokenizer.encoder["<|startoftext|>"]
eot_token = _tokenizer.encoder["<|endoftext|>"] eot_token = _tokenizer.encoder["<|endoftext|>"]
all_tokens = [[sot_token] + _tokenizer.encode(text) + [eot_token] for text in texts] all_tokens = [[sot_token] + _tokenizer.encode(text) + [eot_token] for text in texts]
if packaging.version.parse(torch.__version__) < packaging.version.parse("1.8.0"): if version.parse(torch.__version__) < version.parse("1.8.0"):
result = torch.zeros(len(all_tokens), context_length, dtype=torch.long) result = torch.zeros(len(all_tokens), context_length, dtype=torch.long)
else: else:
result = torch.zeros(len(all_tokens), context_length, dtype=torch.int) result = torch.zeros(len(all_tokens), context_length, dtype=torch.int)

View File

@ -1,4 +1,5 @@
ftfy ftfy
packaging
regex regex
tqdm tqdm
torch torch