lgcharpe commited on
Commit
6ce0e52
·
verified ·
1 Parent(s): 7decc80

Fixing position bucket size

Browse files
Files changed (1) hide show
  1. modeling_norbert.py +1 -1
modeling_norbert.py CHANGED
@@ -162,7 +162,7 @@ class Attention(nn.Module):
162
  if self.position_indices.size(0) < query_len:
163
  position_indices = torch.arange(query_len, dtype=torch.long).unsqueeze(1) \
164
  - torch.arange(query_len, dtype=torch.long).unsqueeze(0)
165
- position_indices = self.make_log_bucket_position(position_indices, self.position_bucket_size, 512)
166
  position_indices = self.position_bucket_size - 1 + position_indices
167
  self.position_indices = position_indices.to(hidden_states.device)
168
 
 
162
  if self.position_indices.size(0) < query_len:
163
  position_indices = torch.arange(query_len, dtype=torch.long).unsqueeze(1) \
164
  - torch.arange(query_len, dtype=torch.long).unsqueeze(0)
165
+ position_indices = self.make_log_bucket_position(position_indices, self.config.position_bucket_size, 512)
166
  position_indices = self.position_bucket_size - 1 + position_indices
167
  self.position_indices = position_indices.to(hidden_states.device)
168