ceyda commited on
Commit
e438e9d
1 Parent(s): 5ffe5df

Update model/model.py

Browse files
Files changed (1) hide show
  1. model/model.py +1 -1
model/model.py CHANGED
@@ -157,7 +157,7 @@ class FlaxHybridCLIP(FlaxPreTrainedModel):
157
  3,
158
  ),
159
  )
160
-
161
  module = self.module_class(config=config, dtype=dtype, **kwargs)
162
  super().__init__(
163
  config, module, input_shape=input_shape, seed=seed, dtype=dtype
157
  3,
158
  ),
159
  )
160
+ kwargs.pop('_do_init', None) # temp fix possibly related: https://github.com/huggingface/transformers/issues/15766
161
  module = self.module_class(config=config, dtype=dtype, **kwargs)
162
  super().__init__(
163
  config, module, input_shape=input_shape, seed=seed, dtype=dtype