shijie-wu commited on
Commit
ed8698c
1 Parent(s): c9c1510

Remove unused `activation_dropout`

Browse files

https://github.com/huggingface/transformers/pull/18842

Files changed (1) hide show
  1. config.json +0 -1
config.json CHANGED
@@ -1,7 +1,6 @@
1
  {
2
  "_name_or_path": "facebook/opt-2.7b",
3
  "_remove_final_layer_norm": false,
4
- "activation_dropout": 0.0,
5
  "activation_function": "relu",
6
  "architectures": [
7
  "OPTForCausalLM"
 
1
  {
2
  "_name_or_path": "facebook/opt-2.7b",
3
  "_remove_final_layer_norm": false,
 
4
  "activation_function": "relu",
5
  "architectures": [
6
  "OPTForCausalLM"