system
Update config.json d7e1a23
1
{
2
  "_num_labels": 13,
3
  "architectures": [
4
    "BertForTokenClassification"
5
  ],
6
  "attention_probs_dropout_prob": 0.2,
7
  "bad_words_ids": null,
8
  "bos_token_id": null,
9
  "decoder_start_token_id": null,
10
  "do_sample": false,
11
  "early_stopping": false,
12
  "eos_token_id": null,
13
  "finetuning_task": null,
14
  "hidden_act": "gelu",
15
  "hidden_dropout_prob": 0.3,
16
  "hidden_size": 768,
17
  "id2label": {
18
    "0": "B-eve",
19
    "1": "B-loc",
20
    "2": "B-misc",
21
    "3": "B-org",
22
    "4": "B-per",
23
    "5": "B-pro",
24
    "6": "I-eve",
25
    "7": "I-loc",
26
    "8": "I-misc",
27
    "9": "I-org",
28
    "10": "I-per",
29
    "11": "I-pro",
30
    "12": "O"
31
  },
32
  "initializer_range": 0.02,
33
  "intermediate_size": 3072,
34
  "is_decoder": false,
35
  "is_encoder_decoder": false,
36
  "label2id": {
37
    "B-eve": 0,
38
    "B-loc": 1,
39
    "B-misc": 2,
40
    "B-org": 3,
41
    "B-per": 4,
42
    "B-pro": 5,
43
    "I-eve": 6,
44
    "I-loc": 7,
45
    "I-misc": 8,
46
    "I-org": 9,
47
    "I-per": 10,
48
    "I-pro": 11,
49
    "O": 12
50
  },
51
  "layer_norm_eps": 1e-12,
52
  "length_penalty": 1.0,
53
  "max_length": 20,
54
  "max_position_embeddings": 512,
55
  "min_length": 0,
56
  "model_type": "bert",
57
  "no_repeat_ngram_size": 0,
58
  "num_attention_heads": 12,
59
  "num_beams": 1,
60
  "num_hidden_layers": 12,
61
  "num_return_sequences": 1,
62
  "output_attentions": false,
63
  "output_hidden_states": false,
64
  "output_past": true,
65
  "pad_token_id": 0,
66
  "prefix": null,
67
  "pruned_heads": {},
68
  "repetition_penalty": 1.0,
69
  "task_specific_params": null,
70
  "temperature": 1.0,
71
  "top_k": 50,
72
  "top_p": 1.0,
73
  "torchscript": false,
74
  "type_vocab_size": 2,
75
  "use_bfloat16": false,
76
  "vocab_size": 30000
77
}
78