sourxbhh commited on
Commit
5edec12
·
1 Parent(s): 005d0a4

Fix: Make CLIP weight conversion optional for CPU compatibility

Browse files
Files changed (1) hide show
  1. models/ACMDM.py +4 -2
models/ACMDM.py CHANGED
@@ -109,8 +109,10 @@ class ACMDM(nn.Module):
109
 
110
  def load_and_freeze_clip(self, clip_version):
111
  clip_model, clip_preprocess = clip.load(clip_version, device='cpu', jit=False)
112
- assert torch.cuda.is_available()
113
- clip.model.convert_weights(clip_model)
 
 
114
 
115
  clip_model.eval()
116
  for p in clip_model.parameters():
 
109
 
110
  def load_and_freeze_clip(self, clip_version):
111
  clip_model, clip_preprocess = clip.load(clip_version, device='cpu', jit=False)
112
+ # Convert weights to half precision if CUDA is available (for efficiency)
113
+ # If CUDA is not available, skip conversion (works on CPU with float32)
114
+ if torch.cuda.is_available():
115
+ clip.model.convert_weights(clip_model)
116
 
117
  clip_model.eval()
118
  for p in clip_model.parameters():