emozilla commited on
Commit
1d7b42f
1 Parent(s): 4699e73

update LlamaPreTrainedModel for gradient checkpointing refactor

Browse files
Files changed (1) hide show
  1. modeling_llama_yarn.py +0 -4
modeling_llama_yarn.py CHANGED
@@ -860,10 +860,6 @@ class LlamaPreTrainedModel(PreTrainedModel):
860
  if module.padding_idx is not None:
861
  module.weight.data[module.padding_idx].zero_()
862
 
863
- def _set_gradient_checkpointing(self, module, value=False):
864
- if isinstance(module, LlamaModel):
865
- module.gradient_checkpointing = value
866
-
867
 
868
  LLAMA_INPUTS_DOCSTRING = r"""
869
  Args:
 
860
  if module.padding_idx is not None:
861
  module.weight.data[module.padding_idx].zero_()
862
 
 
 
 
 
863
 
864
  LLAMA_INPUTS_DOCSTRING = r"""
865
  Args: