mmeendez commited on
Commit
0911344
1 Parent(s): 16227b7

Use device instead of cuda

Browse files
Files changed (1) hide show
  1. app.py +1 -1
app.py CHANGED
@@ -55,7 +55,7 @@ def get_attention_mask(img_tensor):
55
 
56
  # Average the attention at each layer over all heads
57
  attention_probs = torch.mean(attention_probs, dim=1)
58
- residual = torch.eye(attention_probs.size(-1)).to("cuda")
59
  attention_probs = 0.5 * attention_probs + 0.5 * residual
60
 
61
  # normalize by layer
 
55
 
56
  # Average the attention at each layer over all heads
57
  attention_probs = torch.mean(attention_probs, dim=1)
58
+ residual = torch.eye(attention_probs.size(-1)).to(device)
59
  attention_probs = 0.5 * attention_probs + 0.5 * residual
60
 
61
  # normalize by layer