system HF staff commited on
Commit
0b5aa12
1 Parent(s): 1ca1d53

Update config.json

Browse files
Files changed (1) hide show
  1. config.json +3 -3
config.json CHANGED
@@ -2,7 +2,7 @@
2
  "activation_dropout": 0.0,
3
  "activation_function": "gelu",
4
  "add_bias_logits": false,
5
- "add_final_layer_norm": false,
6
  "architectures": [
7
  "BlenderbotForConditionalGeneration"
8
  ],
@@ -42,13 +42,13 @@
42
  "model_type": "blenderbot",
43
  "no_repeat_ngram_size": 3,
44
  "normalize_before": true,
45
- "normalize_embedding": true,
46
  "num_beams": 10,
47
  "num_hidden_layers": 2,
48
  "pad_token_id": 0,
49
  "scale_embedding": true,
50
  "static_position_embeddings": false,
51
  "unk_token_id": 3,
52
- "variant": "prelayernorm",
53
  "vocab_size": 8008
54
  }
2
  "activation_dropout": 0.0,
3
  "activation_function": "gelu",
4
  "add_bias_logits": false,
5
+ "add_final_layer_norm": true,
6
  "architectures": [
7
  "BlenderbotForConditionalGeneration"
8
  ],
42
  "model_type": "blenderbot",
43
  "no_repeat_ngram_size": 3,
44
  "normalize_before": true,
45
+ "normalize_embedding": false,
46
  "num_beams": 10,
47
  "num_hidden_layers": 2,
48
  "pad_token_id": 0,
49
  "scale_embedding": true,
50
  "static_position_embeddings": false,
51
  "unk_token_id": 3,
52
+ "layernorm_variant": "prelayernorm",
53
  "vocab_size": 8008
54
  }