minlik commited on
Commit
9cf90ca
1 Parent(s): d4639ea

update config

Browse files
Files changed (1) hide show
  1. modeling_llama.py +3 -4
modeling_llama.py CHANGED
@@ -299,10 +299,9 @@ class LlamaAttention(nn.Module):
299
  self.rope_theta = config.rope_theta
300
  self.is_causal = True
301
 
302
- # fixme: config needs to be updated to include these parameters
303
- self._lambda_ts = 1
304
- self._lambda_st = 1
305
- self._lambda_ss = 1
306
 
307
  if (self.head_dim * self.num_heads) != self.hidden_size:
308
  raise ValueError(
 
299
  self.rope_theta = config.rope_theta
300
  self.is_causal = True
301
 
302
+ self._lambda_ts = config.lambda_ts
303
+ self._lambda_st = config.lambda_st
304
+ self._lambda_ss = config.lambda_ss
 
305
 
306
  if (self.head_dim * self.num_heads) != self.hidden_size:
307
  raise ValueError(