toilaluan commited on
Commit
f9aadc9
1 Parent(s): 4e4be00

Update modeling_navit_siglip.py

Browse files
Files changed (1) hide show
  1. modeling_navit_siglip.py +0 -5
modeling_navit_siglip.py CHANGED
@@ -142,11 +142,6 @@ SIGLIP_PRETRAINED_MODEL_ARCHIVE_LIST = [
142
  # See all SigLIP models at https://huggingface.co/models?filter=siglip
143
  ]
144
 
145
- if is_flash_attn_2_available():
146
- from flash_attn import flash_attn_func, flash_attn_varlen_func
147
- from flash_attn.bert_padding import index_first_axis, pad_input, unpad_input # noqa
148
-
149
-
150
  # Copied from transformers.models.llama.modeling_llama._get_unpad_data
151
  def _get_unpad_data(attention_mask):
152
  seqlens_in_batch = attention_mask.sum(dim=-1, dtype=torch.int32)
 
142
  # See all SigLIP models at https://huggingface.co/models?filter=siglip
143
  ]
144
 
 
 
 
 
 
145
  # Copied from transformers.models.llama.modeling_llama._get_unpad_data
146
  def _get_unpad_data(attention_mask):
147
  seqlens_in_batch = attention_mask.sum(dim=-1, dtype=torch.int32)