system HF staff commited on
Commit
7058867
1 Parent(s): 4cdc642

Update config.json

Browse files
Files changed (1) hide show
  1. config.json +0 -36
config.json CHANGED
@@ -1,64 +1,28 @@
1
  {
2
- "_num_labels": 2,
3
- "architectures": [
4
- "AlbertForQuestionAnswering"
5
- ],
6
  "attention_probs_dropout_prob": 0,
7
- "bad_words_ids": null,
8
  "bos_token_id": 2,
9
  "classifier_dropout_prob": 0.1,
10
- "decoder_start_token_id": null,
11
- "do_sample": false,
12
  "down_scale_factor": 1,
13
- "early_stopping": false,
14
  "embedding_size": 128,
15
  "eos_token_id": 3,
16
- "finetuning_task": null,
17
  "gap_size": 0,
18
  "hidden_act": "gelu",
19
  "hidden_dropout_prob": 0,
20
  "hidden_size": 4096,
21
- "id2label": {
22
- "0": "LABEL_0",
23
- "1": "LABEL_1"
24
- },
25
  "initializer_range": 0.02,
26
  "inner_group_num": 1,
27
  "intermediate_size": 16384,
28
- "is_decoder": false,
29
- "is_encoder_decoder": false,
30
- "label2id": {
31
- "LABEL_0": 0,
32
- "LABEL_1": 1
33
- },
34
  "layer_norm_eps": 1e-12,
35
  "layers_to_keep": [],
36
- "length_penalty": 1.0,
37
- "max_length": 20,
38
  "max_position_embeddings": 512,
39
- "min_length": 0,
40
  "model_type": "albert",
41
  "net_structure_type": 0,
42
- "no_repeat_ngram_size": 0,
43
  "num_attention_heads": 64,
44
- "num_beams": 1,
45
  "num_hidden_groups": 1,
46
  "num_hidden_layers": 12,
47
  "num_memory_blocks": 0,
48
- "num_return_sequences": 1,
49
- "output_attentions": false,
50
- "output_hidden_states": false,
51
  "output_past": true,
52
  "pad_token_id": 0,
53
- "prefix": null,
54
- "pruned_heads": {},
55
- "repetition_penalty": 1.0,
56
- "task_specific_params": null,
57
- "temperature": 1.0,
58
- "top_k": 50,
59
- "top_p": 1.0,
60
- "torchscript": false,
61
  "type_vocab_size": 2,
62
- "use_bfloat16": false,
63
  "vocab_size": 30000
64
  }
1
  {
 
 
 
 
2
  "attention_probs_dropout_prob": 0,
 
3
  "bos_token_id": 2,
4
  "classifier_dropout_prob": 0.1,
 
 
5
  "down_scale_factor": 1,
 
6
  "embedding_size": 128,
7
  "eos_token_id": 3,
 
8
  "gap_size": 0,
9
  "hidden_act": "gelu",
10
  "hidden_dropout_prob": 0,
11
  "hidden_size": 4096,
 
 
 
 
12
  "initializer_range": 0.02,
13
  "inner_group_num": 1,
14
  "intermediate_size": 16384,
 
 
 
 
 
 
15
  "layer_norm_eps": 1e-12,
16
  "layers_to_keep": [],
 
 
17
  "max_position_embeddings": 512,
 
18
  "model_type": "albert",
19
  "net_structure_type": 0,
 
20
  "num_attention_heads": 64,
 
21
  "num_hidden_groups": 1,
22
  "num_hidden_layers": 12,
23
  "num_memory_blocks": 0,
 
 
 
24
  "output_past": true,
25
  "pad_token_id": 0,
 
 
 
 
 
 
 
 
26
  "type_vocab_size": 2,
 
27
  "vocab_size": 30000
28
  }