winglian Nanobit commited on
Commit
2daa683
·
unverified ·
1 Parent(s): ad0ea6a

Update src/axolotl/monkeypatch/llama_attn_hijack_xformers.py

Browse files
src/axolotl/monkeypatch/llama_attn_hijack_xformers.py CHANGED
@@ -25,7 +25,6 @@ def hijack_llama_sdp_attention():
25
  transformers.models.llama.modeling_llama.LlamaAttention.forward = (
26
  sdp_attention_forward
27
  )
28
- logging.info("Replaced attention with sdp_attention")
29
 
30
 
31
  def xformers_forward(
 
25
  transformers.models.llama.modeling_llama.LlamaAttention.forward = (
26
  sdp_attention_forward
27
  )
 
28
 
29
 
30
  def xformers_forward(