Remove unused `activation_dropout`

#8
by shijie-wu - opened
Files changed (1) hide show
  1. config.json +0 -1
config.json CHANGED
@@ -1,6 +1,5 @@
1
  {
2
  "_remove_final_layer_norm": false,
3
- "activation_dropout": 0.0,
4
  "activation_function": "relu",
5
  "architectures": [
6
  "OPTForCausalLM"
 
1
  {
2
  "_remove_final_layer_norm": false,
 
3
  "activation_function": "relu",
4
  "architectures": [
5
  "OPTForCausalLM"