File size: 5,163 Bytes
c68f1ea a907dea c68f1ea |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167 168 169 170 171 172 173 174 175 176 177 178 179 180 181 182 183 184 185 186 187 188 189 190 191 192 193 194 195 196 197 198 199 200 201 202 203 204 205 206 207 208 209 210 211 212 213 |
{
"architectures": [
"RobertaForTokenClassification"
],
"attention_probs_dropout_prob": 0.1,
"bos_token_id": 2,
"classifier_dropout": null,
"custom_pipelines": {
"universal-dependencies": {
"impl": "ud.UniversalDependenciesPipeline"
}
},
"eos_token_id": 3,
"hidden_act": "gelu",
"hidden_dropout_prob": 0.1,
"hidden_size": 1024,
"id2label": {
"0": "-|_|dep",
"1": "ADJ|_|acl",
"2": "ADJ|_|advcl",
"3": "ADJ|_|amod",
"4": "ADJ|_|ccomp",
"5": "ADJ|_|csubj",
"6": "ADJ|_|dep",
"7": "ADJ|_|dislocated",
"8": "ADJ|_|nmod",
"9": "ADJ|_|nsubj",
"10": "ADJ|_|obj",
"11": "ADJ|_|obl",
"12": "ADJ|_|root",
"13": "ADP|_|case",
"14": "ADP|_|fixed",
"15": "ADV|_|advcl",
"16": "ADV|_|advmod",
"17": "ADV|_|dep",
"18": "ADV|_|obj",
"19": "ADV|_|root",
"20": "AUX|Polarity=Neg|aux",
"21": "AUX|_|aux",
"22": "AUX|_|cop",
"23": "AUX|_|fixed",
"24": "AUX|_|root",
"25": "CCONJ|_|cc",
"26": "DET|_|det",
"27": "INTJ|_|discourse",
"28": "INTJ|_|root",
"29": "NOUN|Polarity=Neg|obl",
"30": "NOUN|Polarity=Neg|root",
"31": "NOUN|_|acl",
"32": "NOUN|_|advcl",
"33": "NOUN|_|ccomp",
"34": "NOUN|_|compound",
"35": "NOUN|_|csubj",
"36": "NOUN|_|dislocated",
"37": "NOUN|_|nmod",
"38": "NOUN|_|nsubj",
"39": "NOUN|_|obj",
"40": "NOUN|_|obl",
"41": "NOUN|_|root",
"42": "NUM|_|advcl",
"43": "NUM|_|compound",
"44": "NUM|_|dislocated",
"45": "NUM|_|nmod",
"46": "NUM|_|nsubj",
"47": "NUM|_|nummod",
"48": "NUM|_|obj",
"49": "NUM|_|obl",
"50": "NUM|_|root",
"51": "PART|_|mark",
"52": "PRON|_|acl",
"53": "PRON|_|advcl",
"54": "PRON|_|dislocated",
"55": "PRON|_|nmod",
"56": "PRON|_|nsubj",
"57": "PRON|_|obj",
"58": "PRON|_|obl",
"59": "PRON|_|root",
"60": "PROPN|_|acl",
"61": "PROPN|_|advcl",
"62": "PROPN|_|compound",
"63": "PROPN|_|dislocated",
"64": "PROPN|_|nmod",
"65": "PROPN|_|nsubj",
"66": "PROPN|_|obj",
"67": "PROPN|_|obl",
"68": "PROPN|_|root",
"69": "PUNCT|_|punct",
"70": "SCONJ|_|mark",
"71": "SYM|_|compound",
"72": "SYM|_|dep",
"73": "SYM|_|nmod",
"74": "SYM|_|obl",
"75": "VERB|_|acl",
"76": "VERB|_|advcl",
"77": "VERB|_|ccomp",
"78": "VERB|_|compound",
"79": "VERB|_|csubj",
"80": "VERB|_|dislocated",
"81": "VERB|_|nmod",
"82": "VERB|_|obj",
"83": "VERB|_|obl",
"84": "VERB|_|root",
"85": "X|_|dep",
"86": "X|_|goeswith",
"87": "X|_|nmod"
},
"initializer_range": 0.02,
"intermediate_size": 4096,
"label2id": {
"-|_|dep": 0,
"ADJ|_|acl": 1,
"ADJ|_|advcl": 2,
"ADJ|_|amod": 3,
"ADJ|_|ccomp": 4,
"ADJ|_|csubj": 5,
"ADJ|_|dep": 6,
"ADJ|_|dislocated": 7,
"ADJ|_|nmod": 8,
"ADJ|_|nsubj": 9,
"ADJ|_|obj": 10,
"ADJ|_|obl": 11,
"ADJ|_|root": 12,
"ADP|_|case": 13,
"ADP|_|fixed": 14,
"ADV|_|advcl": 15,
"ADV|_|advmod": 16,
"ADV|_|dep": 17,
"ADV|_|obj": 18,
"ADV|_|root": 19,
"AUX|Polarity=Neg|aux": 20,
"AUX|_|aux": 21,
"AUX|_|cop": 22,
"AUX|_|fixed": 23,
"AUX|_|root": 24,
"CCONJ|_|cc": 25,
"DET|_|det": 26,
"INTJ|_|discourse": 27,
"INTJ|_|root": 28,
"NOUN|Polarity=Neg|obl": 29,
"NOUN|Polarity=Neg|root": 30,
"NOUN|_|acl": 31,
"NOUN|_|advcl": 32,
"NOUN|_|ccomp": 33,
"NOUN|_|compound": 34,
"NOUN|_|csubj": 35,
"NOUN|_|dislocated": 36,
"NOUN|_|nmod": 37,
"NOUN|_|nsubj": 38,
"NOUN|_|obj": 39,
"NOUN|_|obl": 40,
"NOUN|_|root": 41,
"NUM|_|advcl": 42,
"NUM|_|compound": 43,
"NUM|_|dislocated": 44,
"NUM|_|nmod": 45,
"NUM|_|nsubj": 46,
"NUM|_|nummod": 47,
"NUM|_|obj": 48,
"NUM|_|obl": 49,
"NUM|_|root": 50,
"PART|_|mark": 51,
"PRON|_|acl": 52,
"PRON|_|advcl": 53,
"PRON|_|dislocated": 54,
"PRON|_|nmod": 55,
"PRON|_|nsubj": 56,
"PRON|_|obj": 57,
"PRON|_|obl": 58,
"PRON|_|root": 59,
"PROPN|_|acl": 60,
"PROPN|_|advcl": 61,
"PROPN|_|compound": 62,
"PROPN|_|dislocated": 63,
"PROPN|_|nmod": 64,
"PROPN|_|nsubj": 65,
"PROPN|_|obj": 66,
"PROPN|_|obl": 67,
"PROPN|_|root": 68,
"PUNCT|_|punct": 69,
"SCONJ|_|mark": 70,
"SYM|_|compound": 71,
"SYM|_|dep": 72,
"SYM|_|nmod": 73,
"SYM|_|obl": 74,
"VERB|_|acl": 75,
"VERB|_|advcl": 76,
"VERB|_|ccomp": 77,
"VERB|_|compound": 78,
"VERB|_|csubj": 79,
"VERB|_|dislocated": 80,
"VERB|_|nmod": 81,
"VERB|_|obj": 82,
"VERB|_|obl": 83,
"VERB|_|root": 84,
"X|_|dep": 85,
"X|_|goeswith": 86,
"X|_|nmod": 87
},
"layer_norm_eps": 1e-12,
"max_position_embeddings": 512,
"model_type": "roberta",
"num_attention_heads": 16,
"num_hidden_layers": 24,
"pad_token_id": 0,
"position_embedding_type": "absolute",
"tokenizer_class": "RemBertTokenizerFast",
"torch_dtype": "float32",
"transformers_version": "4.22.1",
"type_vocab_size": 2,
"use_cache": true,
"vocab_size": 250315
}
|