winglian Nanobit commited on
Commit
1076bcb
·
unverified ·
1 Parent(s): 2daa683

Update src/axolotl/monkeypatch/llama_attn_hijack_xformers.py

Browse files
src/axolotl/monkeypatch/llama_attn_hijack_xformers.py CHANGED
@@ -18,7 +18,6 @@ except ImportError:
18
 
19
  def hijack_llama_attention():
20
  transformers.models.llama.modeling_llama.LlamaAttention.forward = xformers_forward
21
- logging.info("Replaced attention with xformers_attention")
22
 
23
 
24
  def hijack_llama_sdp_attention():
 
18
 
19
  def hijack_llama_attention():
20
  transformers.models.llama.modeling_llama.LlamaAttention.forward = xformers_forward
 
21
 
22
 
23
  def hijack_llama_sdp_attention():