system
Update config.json 9a35bdf
1
{
2
  "_num_labels": 13,
3
  "architectures": [
4
    "BertForTokenClassification"
5
  ],
6
  "attention_probs_dropout_prob": 0.2,
7
  "bad_words_ids": null,
8
  "bos_token_id": null,
9
  "decoder_start_token_id": null,
10
  "directionality": "bidi",
11
  "do_sample": false,
12
  "early_stopping": false,
13
  "eos_token_id": null,
14
  "finetuning_task": null,
15
  "hidden_act": "gelu",
16
  "hidden_dropout_prob": 0.3,
17
  "hidden_size": 768,
18
  "id2label": {
19
    "0": "B-eve",
20
    "1": "B-loc",
21
    "2": "B-misc",
22
    "3": "B-org",
23
    "4": "B-per",
24
    "5": "B-pro",
25
    "6": "I-eve",
26
    "7": "I-loc",
27
    "8": "I-misc",
28
    "9": "I-org",
29
    "10": "I-per",
30
    "11": "I-pro",
31
    "12": "O"
32
  },
33
  "initializer_range": 0.02,
34
  "intermediate_size": 3072,
35
  "is_decoder": false,
36
  "is_encoder_decoder": false,
37
  "label2id": {
38
    "B-eve": 0,
39
    "B-loc": 1,
40
    "B-misc": 2,
41
    "B-org": 3,
42
    "B-per": 4,
43
    "B-pro": 5,
44
    "I-eve": 6,
45
    "I-loc": 7,
46
    "I-misc": 8,
47
    "I-org": 9,
48
    "I-per": 10,
49
    "I-pro": 11,
50
    "O": 12
51
  },
52
  "layer_norm_eps": 1e-12,
53
  "length_penalty": 1.0,
54
  "max_length": 20,
55
  "max_position_embeddings": 512,
56
  "min_length": 0,
57
  "model_type": "bert",
58
  "no_repeat_ngram_size": 0,
59
  "num_attention_heads": 12,
60
  "num_beams": 1,
61
  "num_hidden_layers": 12,
62
  "num_return_sequences": 1,
63
  "output_attentions": false,
64
  "output_hidden_states": false,
65
  "output_past": true,
66
  "pad_token_id": 0,
67
  "pooler_fc_size": 768,
68
  "pooler_num_attention_heads": 12,
69
  "pooler_num_fc_layers": 3,
70
  "pooler_size_per_head": 128,
71
  "pooler_type": "first_token_transform",
72
  "prefix": null,
73
  "pruned_heads": {},
74
  "repetition_penalty": 1.0,
75
  "task_specific_params": null,
76
  "temperature": 1.0,
77
  "top_k": 50,
78
  "top_p": 1.0,
79
  "torchscript": false,
80
  "type_vocab_size": 2,
81
  "use_bfloat16": false,
82
  "vocab_size": 119547
83
}
84