wannaphong commited on
Commit
4bb51b4
1 Parent(s): e2b4f65

Upload config.json

Browse files
Files changed (1) hide show
  1. config.json +89 -0
config.json ADDED
@@ -0,0 +1,89 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "architectures": [
3
+ "RobertaForTokenClassification"
4
+ ],
5
+ "attention_probs_dropout_prob": 0.1,
6
+ "bos_token_id": 0,
7
+ "eos_token_id": 2,
8
+ "gradient_checkpointing": false,
9
+ "hidden_act": "gelu",
10
+ "hidden_dropout_prob": 0.1,
11
+ "hidden_size": 768,
12
+ "id2label": {
13
+ "0": "O",
14
+ "1": "B_BRN",
15
+ "2": "B_DES",
16
+ "3": "B_DTM",
17
+ "4": "B_LOC",
18
+ "5": "B_MEA",
19
+ "6": "B_NUM",
20
+ "7": "B_ORG",
21
+ "8": "B_PER",
22
+ "9": "B_TRM",
23
+ "10": "B_TTL",
24
+ "11": "I_BRN",
25
+ "12": "I_DES",
26
+ "13": "I_DTM",
27
+ "14": "I_LOC",
28
+ "15": "I_MEA",
29
+ "16": "I_NUM",
30
+ "17": "I_ORG",
31
+ "18": "I_PER",
32
+ "19": "I_TRM",
33
+ "20": "I_TTL",
34
+ "21": "E_BRN",
35
+ "22": "E_DES",
36
+ "23": "E_DTM",
37
+ "24": "E_LOC",
38
+ "25": "E_MEA",
39
+ "26": "E_NUM",
40
+ "27": "E_ORG",
41
+ "28": "E_PER",
42
+ "29": "E_TRM",
43
+ "30": "E_TTL"
44
+ },
45
+ "initializer_range": 0.02,
46
+ "intermediate_size": 3072,
47
+ "label2id": {
48
+ "O": 0,
49
+ "B_BRN": 1,
50
+ "B_DES": 2,
51
+ "B_DTM": 3,
52
+ "B_LOC": 4,
53
+ "B_MEA": 5,
54
+ "B_NUM": 6,
55
+ "B_ORG": 7,
56
+ "B_PER": 8,
57
+ "B_TRM": 9,
58
+ "B_TTL": 10,
59
+ "I_BRN": 11,
60
+ "I_DES": 12,
61
+ "I_DTM": 13,
62
+ "I_LOC": 14,
63
+ "I_MEA": 15,
64
+ "I_NUM": 16,
65
+ "I_ORG": 17,
66
+ "I_PER": 18,
67
+ "I_TRM": 19,
68
+ "I_TTL": 20,
69
+ "E_BRN": 21,
70
+ "E_DES": 22,
71
+ "E_DTM": 23,
72
+ "E_LOC": 24,
73
+ "E_MEA": 25,
74
+ "E_NUM": 26,
75
+ "E_ORG": 27,
76
+ "E_PER": 28,
77
+ "E_TRM": 29,
78
+ "E_TTL": 30
79
+ },
80
+ "layer_norm_eps": 1e-12,
81
+ "max_position_embeddings": 512,
82
+ "model_type": "camembert",
83
+ "num_attention_head": 12,
84
+ "num_attention_heads": 12,
85
+ "num_hidden_layers": 12,
86
+ "pad_token_id": 1,
87
+ "type_vocab_size": 1,
88
+ "vocab_size": 25005
89
+ }