ccdv commited on
Commit
2042dbf
1 Parent(s): 682cd96

fix for transformers >= 4.35.2

Browse files
Files changed (2) hide show
  1. README.md +1 -1
  2. modeling_lsg_pegasus.py +3 -3
README.md CHANGED
@@ -9,7 +9,7 @@ pipeline_tag: fill-mask
9
  ---
10
 
11
  # LSG model
12
- **Transformers >= 4.23.1**\
13
  **This model relies on a custom modeling file, you need to add trust_remote_code=True**\
14
  **See [\#13467](https://github.com/huggingface/transformers/pull/13467)**
15
 
 
9
  ---
10
 
11
  # LSG model
12
+ **Transformers >= 4.35.2**\
13
  **This model relies on a custom modeling file, you need to add trust_remote_code=True**\
14
  **See [\#13467](https://github.com/huggingface/transformers/pull/13467)**
15
 
modeling_lsg_pegasus.py CHANGED
@@ -1,7 +1,7 @@
1
  from logging import warn
2
  import torch
3
  from transformers.models.pegasus.modeling_pegasus import *
4
- from transformers.models.pegasus.modeling_pegasus import _expand_mask
5
  import torch.nn as nn
6
  import sys
7
 
@@ -895,7 +895,7 @@ class LSGPegasusEncoder(LSGPegasusPreTrainedModel, PegasusEncoder):
895
  # expand attention_mask
896
  if attention_mask is not None:
897
  # [bsz, seq_len] -> [bsz, 1, tgt_seq_len, src_seq_len]
898
- attention_mask = _expand_mask(attention_mask, inputs_embeds.dtype)
899
 
900
  encoder_states = () if output_hidden_states else None
901
  all_attentions = () if output_attentions else None
@@ -1122,4 +1122,4 @@ try:
1122
  str_to_class(value.split(".")[-1]).register_for_auto_class(key)
1123
  except:
1124
  warn("AutoRegister isn't available, you'll have to manually copy modeling.py after .save_pretrained(...).")
1125
- warn("Update to transformers >= 4.23.1 to fix.")
 
1
  from logging import warn
2
  import torch
3
  from transformers.models.pegasus.modeling_pegasus import *
4
+ from transformers.modeling_attn_mask_utils import _prepare_4d_attention_mask, _prepare_4d_causal_attention_mask
5
  import torch.nn as nn
6
  import sys
7
 
 
895
  # expand attention_mask
896
  if attention_mask is not None:
897
  # [bsz, seq_len] -> [bsz, 1, tgt_seq_len, src_seq_len]
898
+ attention_mask = _prepare_4d_attention_mask(attention_mask, inputs_embeds.dtype)
899
 
900
  encoder_states = () if output_hidden_states else None
901
  all_attentions = () if output_attentions else None
 
1122
  str_to_class(value.split(".")[-1]).register_for_auto_class(key)
1123
  except:
1124
  warn("AutoRegister isn't available, you'll have to manually copy modeling.py after .save_pretrained(...).")
1125
+ warn("Update to transformers >= 4.35.2 to fix.")