robinzixuan commited on
Commit
17dff97
1 Parent(s): e9790b8

Update configuration_opt.py

Browse files
Files changed (1) hide show
  1. configuration_opt.py +0 -2
configuration_opt.py CHANGED
@@ -111,7 +111,6 @@ class OPTConfig(PretrainedConfig):
111
  eos_token_id=2,
112
  enable_bias=True,
113
  layer_norm_elementwise_affine=True,
114
- attn_implementation='eager',
115
  **kwargs,
116
  ):
117
  super().__init__(
@@ -142,4 +141,3 @@ class OPTConfig(PretrainedConfig):
142
  # with checkpoints that have been fine-tuned before transformers v4.20.1
143
  # see https://github.com/facebookresearch/metaseq/pull/164
144
  self._remove_final_layer_norm = _remove_final_layer_norm
145
- self.attn_implementation = attn_implementation
 
111
  eos_token_id=2,
112
  enable_bias=True,
113
  layer_norm_elementwise_affine=True,
 
114
  **kwargs,
115
  ):
116
  super().__init__(
 
141
  # with checkpoints that have been fine-tuned before transformers v4.20.1
142
  # see https://github.com/facebookresearch/metaseq/pull/164
143
  self._remove_final_layer_norm = _remove_final_layer_norm