File size: 1,469 Bytes
76a5361
2cf9465
 
 
 
76a5361
2cf9465
c7adc5b
 
2cf9465
 
76a5361
2cf9465
76a5361
c7adc5b
2cf9465
76a5361
 
 
 
2cf9465
 
 
 
76a5361
 
 
2cf9465
 
 
 
 
 
76a5361
 
2cf9465
 
76a5361
2cf9465
c7adc5b
76a5361
2cf9465
76a5361
2cf9465
76a5361
 
 
2cf9465
 
 
76a5361
c7adc5b
2cf9465
 
 
 
 
 
 
 
76a5361
2cf9465
76a5361
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
{
  "_num_labels": 2,
  "architectures": [
    "AlbertForQuestionAnswering"
  ],
  "attention_probs_dropout_prob": 0,
  "bad_words_ids": null,
  "bos_token_id": 2,
  "classifier_dropout_prob": 0.1,
  "decoder_start_token_id": null,
  "do_sample": false,
  "down_scale_factor": 1,
  "early_stopping": false,
  "embedding_size": 128,
  "eos_token_id": 3,
  "finetuning_task": null,
  "gap_size": 0,
  "hidden_act": "gelu",
  "hidden_dropout_prob": 0,
  "hidden_size": 4096,
  "id2label": {
    "0": "LABEL_0",
    "1": "LABEL_1"
  },
  "initializer_range": 0.02,
  "inner_group_num": 1,
  "intermediate_size": 16384,
  "is_decoder": false,
  "is_encoder_decoder": false,
  "label2id": {
    "LABEL_0": 0,
    "LABEL_1": 1
  },
  "layer_norm_eps": 1e-12,
  "layers_to_keep": [],
  "length_penalty": 1.0,
  "max_length": 20,
  "max_position_embeddings": 512,
  "min_length": 0,
  "model_type": "albert",
  "net_structure_type": 0,
  "no_repeat_ngram_size": 0,
  "num_attention_heads": 64,
  "num_beams": 1,
  "num_hidden_groups": 1,
  "num_hidden_layers": 12,
  "num_memory_blocks": 0,
  "num_return_sequences": 1,
  "output_attentions": false,
  "output_hidden_states": false,
  "output_past": true,
  "pad_token_id": 0,
  "prefix": null,
  "pruned_heads": {},
  "repetition_penalty": 1.0,
  "task_specific_params": null,
  "temperature": 1.0,
  "top_k": 50,
  "top_p": 1.0,
  "torchscript": false,
  "type_vocab_size": 2,
  "use_bfloat16": false,
  "vocab_size": 30000
}