File size: 4,436 Bytes
bc0fc35 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167 168 169 170 171 172 173 174 175 176 177 178 179 180 181 182 183 184 185 186 187 188 189 190 191 192 193 194 195 196 197 198 199 200 201 202 203 |
{
"_name_or_path": "/content/tmp/test-richard_bert_base_uncased",
"architectures": [
"BertForTokenClassification"
],
"attention_probs_dropout_prob": 0.1,
"directionality": "bidi",
"finetuning_task": "ner",
"gradient_checkpointing": false,
"hidden_act": "gelu",
"hidden_dropout_prob": 0.1,
"hidden_size": 1024,
"id2label": {
"0": "M:N_CCV",
"1": "M:N_CIN",
"2": "M:N_CLA",
"3": "M:N_CLAdv",
"4": "M:N_CLN",
"5": "M:N_CLP",
"6": "M:N_CLQ",
"7": "M:N_CLV",
"8": "M:N_CMA1",
"9": "M:N_CMAdv",
"10": "M:N_CMN1",
"11": "M:N_CMN2",
"12": "M:N_CMN3",
"13": "M:N_CMN4",
"14": "M:N_CMP",
"15": "M:N_CMP2",
"16": "M:N_CMV1",
"17": "M:N_CMV2",
"18": "M:N_CMV3",
"19": "M:N_COMBINATORY",
"20": "M:N_CPA",
"21": "M:N_ESAdvP",
"22": "M:N_ESCCV",
"23": "M:N_ESCM",
"24": "M:N_ESMA",
"25": "M:N_ESMAdvP",
"26": "M:N_ESMI",
"27": "M:N_ESMN",
"28": "M:N_ESMP",
"29": "M:N_ESMV",
"30": "M:N_HELP",
"31": "M:N_SPECIAL",
"32": "M:N_SSCCV",
"33": "M:N_SSCM",
"34": "M:N_SSMA",
"35": "M:N_SSMAdvP",
"36": "M:N_SSMI",
"37": "M:N_SSMN",
"38": "M:N_SSMP",
"39": "M:N_SSMV",
"40": "M:N_STQ",
"41": "M:N_V",
"42": "M:N_nan",
"43": "M:Y_CCV",
"44": "M:Y_CIN",
"45": "M:Y_CLA",
"46": "M:Y_CLAdv",
"47": "M:Y_CLN",
"48": "M:Y_CLP",
"49": "M:Y_CLQ",
"50": "M:Y_CLV",
"51": "M:Y_CMA1",
"52": "M:Y_CMAdv",
"53": "M:Y_CMN1",
"54": "M:Y_CMN2",
"55": "M:Y_CMN4",
"56": "M:Y_CMP",
"57": "M:Y_CMP2",
"58": "M:Y_CMV1",
"59": "M:Y_CMV2",
"60": "M:Y_CMV3",
"61": "M:Y_COMBINATORY",
"62": "M:Y_CPA",
"63": "M:Y_ESAdvP",
"64": "M:Y_ESCCV",
"65": "M:Y_ESCM",
"66": "M:Y_ESMA",
"67": "M:Y_ESMAdvP",
"68": "M:Y_ESMI",
"69": "M:Y_ESMN",
"70": "M:Y_ESMP",
"71": "M:Y_ESMV",
"72": "M:Y_HELP",
"73": "M:Y_SPECIAL",
"74": "M:Y_SSCCV",
"75": "M:Y_SSCM",
"76": "M:Y_SSMA",
"77": "M:Y_SSMAdvP",
"78": "M:Y_SSMI",
"79": "M:Y_SSMN",
"80": "M:Y_SSMP",
"81": "M:Y_SSMV",
"82": "M:Y_STQ"
},
"initializer_range": 0.02,
"intermediate_size": 4096,
"label2id": {
"M:N_CCV": 0,
"M:N_CIN": 1,
"M:N_CLA": 2,
"M:N_CLAdv": 3,
"M:N_CLN": 4,
"M:N_CLP": 5,
"M:N_CLQ": 6,
"M:N_CLV": 7,
"M:N_CMA1": 8,
"M:N_CMAdv": 9,
"M:N_CMN1": 10,
"M:N_CMN2": 11,
"M:N_CMN3": 12,
"M:N_CMN4": 13,
"M:N_CMP": 14,
"M:N_CMP2": 15,
"M:N_CMV1": 16,
"M:N_CMV2": 17,
"M:N_CMV3": 18,
"M:N_COMBINATORY": 19,
"M:N_CPA": 20,
"M:N_ESAdvP": 21,
"M:N_ESCCV": 22,
"M:N_ESCM": 23,
"M:N_ESMA": 24,
"M:N_ESMAdvP": 25,
"M:N_ESMI": 26,
"M:N_ESMN": 27,
"M:N_ESMP": 28,
"M:N_ESMV": 29,
"M:N_HELP": 30,
"M:N_SPECIAL": 31,
"M:N_SSCCV": 32,
"M:N_SSCM": 33,
"M:N_SSMA": 34,
"M:N_SSMAdvP": 35,
"M:N_SSMI": 36,
"M:N_SSMN": 37,
"M:N_SSMP": 38,
"M:N_SSMV": 39,
"M:N_STQ": 40,
"M:N_V": 41,
"M:N_nan": 42,
"M:Y_CCV": 43,
"M:Y_CIN": 44,
"M:Y_CLA": 45,
"M:Y_CLAdv": 46,
"M:Y_CLN": 47,
"M:Y_CLP": 48,
"M:Y_CLQ": 49,
"M:Y_CLV": 50,
"M:Y_CMA1": 51,
"M:Y_CMAdv": 52,
"M:Y_CMN1": 53,
"M:Y_CMN2": 54,
"M:Y_CMN4": 55,
"M:Y_CMP": 56,
"M:Y_CMP2": 57,
"M:Y_CMV1": 58,
"M:Y_CMV2": 59,
"M:Y_CMV3": 60,
"M:Y_COMBINATORY": 61,
"M:Y_CPA": 62,
"M:Y_ESAdvP": 63,
"M:Y_ESCCV": 64,
"M:Y_ESCM": 65,
"M:Y_ESMA": 66,
"M:Y_ESMAdvP": 67,
"M:Y_ESMI": 68,
"M:Y_ESMN": 69,
"M:Y_ESMP": 70,
"M:Y_ESMV": 71,
"M:Y_HELP": 72,
"M:Y_SPECIAL": 73,
"M:Y_SSCCV": 74,
"M:Y_SSCM": 75,
"M:Y_SSMA": 76,
"M:Y_SSMAdvP": 77,
"M:Y_SSMI": 78,
"M:Y_SSMN": 79,
"M:Y_SSMP": 80,
"M:Y_SSMV": 81,
"M:Y_STQ": 82
},
"layer_norm_eps": 1e-12,
"max_position_embeddings": 512,
"model_type": "bert",
"num_attention_heads": 16,
"num_hidden_layers": 24,
"pad_token_id": 0,
"pooler_fc_size": 768,
"pooler_num_attention_heads": 12,
"pooler_num_fc_layers": 3,
"pooler_size_per_head": 128,
"pooler_type": "first_token_transform",
"position_embedding_type": "absolute",
"torch_dtype": "float32",
"transformers_version": "4.9.2",
"type_vocab_size": 2,
"use_cache": true,
"vocab_size": 28996
}
|