Text Generation
Transformers
Safetensors
English
custom_code
qingsonglv commited on
Commit
7201d1c
1 Parent(s): f1ed53e

Update modeling_cogagent.py

Browse files
Files changed (1) hide show
  1. modeling_cogagent.py +0 -2
modeling_cogagent.py CHANGED
@@ -284,8 +284,6 @@ class CrossAttention(nn.Module):
284
  self.cross_head_dim = self.cross_compute_hidden_size // self.num_heads
285
  self.max_position_embeddings = config.max_position_embeddings
286
 
287
- # self.rotary_emb = RotaryEmbedding(self.hidden_size // self.num_heads)
288
- self.rotary_emb = FastRotaryEmbedding(dim=self.head_dim, pos_idx_in_fp32=False)
289
  self.query = nn.Linear(self.hidden_size, self.cross_compute_hidden_size, bias=False)
290
  self.key_value = nn.Linear(self.cross_hidden_size, self.cross_compute_hidden_size * 2, bias=False)
291
  self.dense = nn.Linear(self.cross_compute_hidden_size, self.hidden_size, bias=False)
 
284
  self.cross_head_dim = self.cross_compute_hidden_size // self.num_heads
285
  self.max_position_embeddings = config.max_position_embeddings
286
 
 
 
287
  self.query = nn.Linear(self.hidden_size, self.cross_compute_hidden_size, bias=False)
288
  self.key_value = nn.Linear(self.cross_hidden_size, self.cross_compute_hidden_size * 2, bias=False)
289
  self.dense = nn.Linear(self.cross_compute_hidden_size, self.hidden_size, bias=False)