if001 commited on
Commit
8cf200f
·
1 Parent(s): 7a518b4
Files changed (1) hide show
  1. modeling_llama.py +1 -1
modeling_llama.py CHANGED
@@ -193,7 +193,7 @@ class LoRAMoeLayer(torch.nn.Module):
193
  weights, selected_experts = torch.topk(
194
  gate_logits, self.num_experts_per_tok
195
  ) # b,s,n
196
- if getattr(self.config, "show_debug", False) and self._layer_idx == 0 or self._layer_idx == 16 or self._layer_idx == 31:
197
  print(f"{self._name}_{self._layer_idx}: {selected_experts}")
198
  print("-"*10)
199
  weights = F.softmax(weights, dim=2, dtype=torch.float).to(
 
193
  weights, selected_experts = torch.topk(
194
  gate_logits, self.num_experts_per_tok
195
  ) # b,s,n
196
+ if getattr(self.config, "show_debug", False) and (self._layer_idx == 0 or self._layer_idx == 16 or self._layer_idx == 31):
197
  print(f"{self._name}_{self._layer_idx}: {selected_experts}")
198
  print("-"*10)
199
  weights = F.softmax(weights, dim=2, dtype=torch.float).to(