def process_images():
import clip
import glob
clip_model, preprocess = clip.load("ViT-B/32")
clip_model.eval()
files = glob.glob('./**/*.jpg', recursive=True)
batch_size = 64
with torch.inference_mode():
for i in tqdm(range(0, len(files), 64)):
batch_files = files[i:i+batch_size]
images = torch.stack([preprocess(Image.open(file)) for file in batch_files], dim=0).cuda()
features = clip_model.encode_image(images)
features /= features.norm(dim=-1, keepdim=True)
features = features.detach().cpu().clone().numpy()
for feature, file in zip(features, batch_files):
np.save(file.replace('.jpg', '.npy'), feature)
process_images()
torch.cuda.empty_cache()
I have processing like this and torch still uses memory after execution while I expect memory to be freed, how should I fix it?