nyanko7 commited on
Commit
350b26b
1 Parent(s): 373549b

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +1 -1
app.py CHANGED
@@ -216,7 +216,7 @@ def attention(q: Tensor, k: Tensor, v: Tensor, pe: Tensor) -> Tensor:
216
 
217
  x = torch.nn.functional.scaled_dot_product_attention(q, k, v)
218
  # x = rearrange(x, "B H L D -> B L (H D)")
219
- x = x.permute(0, 2, 1, 3).contiguous().reshape(x.size(0), x.size(2), -1)
220
 
221
  return x
222
 
 
216
 
217
  x = torch.nn.functional.scaled_dot_product_attention(q, k, v)
218
  # x = rearrange(x, "B H L D -> B L (H D)")
219
+ x = x.permute(0, 2, 1, 3).reshape(x.size(0), x.size(2), -1)
220
 
221
  return x
222