system HF staff commited on
Commit
4d22b79
1 Parent(s): 87bd642

Update config.json

Browse files
Files changed (1) hide show
  1. config.json +22 -1
config.json CHANGED
@@ -1,28 +1,49 @@
1
  {
2
  "attention_probs_dropout_prob": 0.1,
 
3
  "directionality": "bidi",
 
 
4
  "finetuning_task": "dutch-squad2",
 
5
  "hidden_act": "gelu",
6
  "hidden_dropout_prob": 0.1,
7
  "hidden_size": 768,
8
- "model_type": "bert",
 
 
 
9
  "initializer_range": 0.02,
10
  "intermediate_size": 3072,
11
  "is_decoder": false,
 
 
 
 
12
  "layer_norm_eps": 1e-12,
 
 
13
  "max_position_embeddings": 512,
 
14
  "num_attention_heads": 12,
 
15
  "num_hidden_layers": 12,
16
  "num_labels": 2,
 
17
  "output_attentions": false,
18
  "output_hidden_states": false,
19
  "output_past": true,
 
20
  "pooler_fc_size": 768,
21
  "pooler_num_attention_heads": 12,
22
  "pooler_num_fc_layers": 3,
23
  "pooler_size_per_head": 128,
24
  "pooler_type": "first_token_transform",
25
  "pruned_heads": {},
 
 
 
 
26
  "torchscript": false,
27
  "type_vocab_size": 2,
28
  "use_bfloat16": false,
 
1
  {
2
  "attention_probs_dropout_prob": 0.1,
3
+ "bos_token_id": 0,
4
  "directionality": "bidi",
5
+ "do_sample": false,
6
+ "eos_token_ids": 0,
7
  "finetuning_task": "dutch-squad2",
8
+ "model_type": "bert",
9
  "hidden_act": "gelu",
10
  "hidden_dropout_prob": 0.1,
11
  "hidden_size": 768,
12
+ "id2label": {
13
+ "0": "LABEL_0",
14
+ "1": "LABEL_1"
15
+ },
16
  "initializer_range": 0.02,
17
  "intermediate_size": 3072,
18
  "is_decoder": false,
19
+ "label2id": {
20
+ "LABEL_0": 0,
21
+ "LABEL_1": 1
22
+ },
23
  "layer_norm_eps": 1e-12,
24
+ "length_penalty": 1.0,
25
+ "max_length": 20,
26
  "max_position_embeddings": 512,
27
+ "model_type": "bert",
28
  "num_attention_heads": 12,
29
+ "num_beams": 1,
30
  "num_hidden_layers": 12,
31
  "num_labels": 2,
32
+ "num_return_sequences": 1,
33
  "output_attentions": false,
34
  "output_hidden_states": false,
35
  "output_past": true,
36
+ "pad_token_id": 0,
37
  "pooler_fc_size": 768,
38
  "pooler_num_attention_heads": 12,
39
  "pooler_num_fc_layers": 3,
40
  "pooler_size_per_head": 128,
41
  "pooler_type": "first_token_transform",
42
  "pruned_heads": {},
43
+ "repetition_penalty": 1.0,
44
+ "temperature": 1.0,
45
+ "top_k": 50,
46
+ "top_p": 1.0,
47
  "torchscript": false,
48
  "type_vocab_size": 2,
49
  "use_bfloat16": false,