ydshieh commited on
Commit
5a95e3a
1 Parent(s): 7d29388

Change check to crossattention

Browse files
Files changed (1) hide show
  1. vit_gpt2/modeling_flax_gpt2.py +1 -1
vit_gpt2/modeling_flax_gpt2.py CHANGED
@@ -346,7 +346,7 @@ class FlaxGPT2Block(nn.Module):
346
  if encoder_hidden_states is not None:
347
 
348
  # add one self-attention block for cross-attention
349
- if not hasattr(self, "cross_attn"):
350
  raise ValueError(
351
  f"If `encoder_hidden_states` are passed, {self} has to be instantiated with "
352
  "cross-attention layers by setting `config.add_cross_attention=True`"
 
346
  if encoder_hidden_states is not None:
347
 
348
  # add one self-attention block for cross-attention
349
+ if not hasattr(self, "crossattention"):
350
  raise ValueError(
351
  f"If `encoder_hidden_states` are passed, {self} has to be instantiated with "
352
  "cross-attention layers by setting `config.add_cross_attention=True`"