|
{ |
|
"_name_or_path": "models/CNEC2_0_nested_slavicbert", |
|
"architectures": [ |
|
"BertForTokenClassification" |
|
], |
|
"attention_probs_dropout_prob": 0.2, |
|
"classifier_dropout": null, |
|
"directionality": "bidi", |
|
"hidden_act": "gelu", |
|
"hidden_dropout_prob": 0.2, |
|
"hidden_size": 768, |
|
"id2label": { |
|
"0": "O", |
|
"1": "B-P", |
|
"2": "I-P", |
|
"3": "B-T", |
|
"4": "I-T", |
|
"5": "B-A", |
|
"6": "I-A", |
|
"7": "B-C", |
|
"8": "I-C", |
|
"9": "B-ah", |
|
"10": "I-ah", |
|
"11": "B-at", |
|
"12": "I-at", |
|
"13": "B-az", |
|
"14": "I-az", |
|
"15": "B-g_", |
|
"16": "I-g_", |
|
"17": "B-gc", |
|
"18": "I-gc", |
|
"19": "B-gh", |
|
"20": "I-gh", |
|
"21": "B-gl", |
|
"22": "I-gl", |
|
"23": "B-gq", |
|
"24": "I-gq", |
|
"25": "B-gr", |
|
"26": "I-gr", |
|
"27": "B-gs", |
|
"28": "I-gs", |
|
"29": "B-gt", |
|
"30": "I-gt", |
|
"31": "B-gu", |
|
"32": "I-gu", |
|
"33": "B-i_", |
|
"34": "I-i_", |
|
"35": "B-ia", |
|
"36": "I-ia", |
|
"37": "B-ic", |
|
"38": "I-ic", |
|
"39": "B-if", |
|
"40": "I-if", |
|
"41": "B-io", |
|
"42": "I-io", |
|
"43": "B-me", |
|
"44": "I-me", |
|
"45": "B-mi", |
|
"46": "I-mi", |
|
"47": "B-mn", |
|
"48": "I-mn", |
|
"49": "B-ms", |
|
"50": "I-ms", |
|
"51": "B-n_", |
|
"52": "I-n_", |
|
"53": "B-na", |
|
"54": "I-na", |
|
"55": "B-nb", |
|
"56": "I-nb", |
|
"57": "B-nc", |
|
"58": "I-nc", |
|
"59": "B-ni", |
|
"60": "I-ni", |
|
"61": "B-no", |
|
"62": "I-no", |
|
"63": "B-ns", |
|
"64": "I-ns", |
|
"65": "B-o_", |
|
"66": "I-o_", |
|
"67": "B-oa", |
|
"68": "I-oa", |
|
"69": "B-oe", |
|
"70": "I-oe", |
|
"71": "B-om", |
|
"72": "I-om", |
|
"73": "B-op", |
|
"74": "I-op", |
|
"75": "B-or", |
|
"76": "I-or", |
|
"77": "B-p_", |
|
"78": "I-p_", |
|
"79": "B-pc", |
|
"80": "I-pc", |
|
"81": "B-pd", |
|
"82": "I-pd", |
|
"83": "B-pf", |
|
"84": "I-pf", |
|
"85": "B-pm", |
|
"86": "I-pm", |
|
"87": "B-pp", |
|
"88": "I-pp", |
|
"89": "B-ps", |
|
"90": "I-ps", |
|
"91": "B-td", |
|
"92": "I-td", |
|
"93": "B-tf", |
|
"94": "I-tf", |
|
"95": "B-th", |
|
"96": "I-th", |
|
"97": "B-tm", |
|
"98": "I-tm", |
|
"99": "B-ty", |
|
"100": "I-ty" |
|
}, |
|
"initializer_range": 0.02, |
|
"intermediate_size": 3072, |
|
"label2id": { |
|
"B-A": 5, |
|
"B-C": 7, |
|
"B-P": 1, |
|
"B-T": 3, |
|
"B-ah": 9, |
|
"B-at": 11, |
|
"B-az": 13, |
|
"B-g_": 15, |
|
"B-gc": 17, |
|
"B-gh": 19, |
|
"B-gl": 21, |
|
"B-gq": 23, |
|
"B-gr": 25, |
|
"B-gs": 27, |
|
"B-gt": 29, |
|
"B-gu": 31, |
|
"B-i_": 33, |
|
"B-ia": 35, |
|
"B-ic": 37, |
|
"B-if": 39, |
|
"B-io": 41, |
|
"B-me": 43, |
|
"B-mi": 45, |
|
"B-mn": 47, |
|
"B-ms": 49, |
|
"B-n_": 51, |
|
"B-na": 53, |
|
"B-nb": 55, |
|
"B-nc": 57, |
|
"B-ni": 59, |
|
"B-no": 61, |
|
"B-ns": 63, |
|
"B-o_": 65, |
|
"B-oa": 67, |
|
"B-oe": 69, |
|
"B-om": 71, |
|
"B-op": 73, |
|
"B-or": 75, |
|
"B-p_": 77, |
|
"B-pc": 79, |
|
"B-pd": 81, |
|
"B-pf": 83, |
|
"B-pm": 85, |
|
"B-pp": 87, |
|
"B-ps": 89, |
|
"B-td": 91, |
|
"B-tf": 93, |
|
"B-th": 95, |
|
"B-tm": 97, |
|
"B-ty": 99, |
|
"I-A": 6, |
|
"I-C": 8, |
|
"I-P": 2, |
|
"I-T": 4, |
|
"I-ah": 10, |
|
"I-at": 12, |
|
"I-az": 14, |
|
"I-g_": 16, |
|
"I-gc": 18, |
|
"I-gh": 20, |
|
"I-gl": 22, |
|
"I-gq": 24, |
|
"I-gr": 26, |
|
"I-gs": 28, |
|
"I-gt": 30, |
|
"I-gu": 32, |
|
"I-i_": 34, |
|
"I-ia": 36, |
|
"I-ic": 38, |
|
"I-if": 40, |
|
"I-io": 42, |
|
"I-me": 44, |
|
"I-mi": 46, |
|
"I-mn": 48, |
|
"I-ms": 50, |
|
"I-n_": 52, |
|
"I-na": 54, |
|
"I-nb": 56, |
|
"I-nc": 58, |
|
"I-ni": 60, |
|
"I-no": 62, |
|
"I-ns": 64, |
|
"I-o_": 66, |
|
"I-oa": 68, |
|
"I-oe": 70, |
|
"I-om": 72, |
|
"I-op": 74, |
|
"I-or": 76, |
|
"I-p_": 78, |
|
"I-pc": 80, |
|
"I-pd": 82, |
|
"I-pf": 84, |
|
"I-pm": 86, |
|
"I-pp": 88, |
|
"I-ps": 90, |
|
"I-td": 92, |
|
"I-tf": 94, |
|
"I-th": 96, |
|
"I-tm": 98, |
|
"I-ty": 100, |
|
"O": 0 |
|
}, |
|
"layer_norm_eps": 1e-12, |
|
"max_position_embeddings": 512, |
|
"model_type": "bert", |
|
"num_attention_heads": 12, |
|
"num_hidden_layers": 12, |
|
"output_past": true, |
|
"pad_token_id": 0, |
|
"pooler_fc_size": 768, |
|
"pooler_num_attention_heads": 12, |
|
"pooler_num_fc_layers": 3, |
|
"pooler_size_per_head": 128, |
|
"pooler_type": "first_token_transform", |
|
"position_embedding_type": "absolute", |
|
"torch_dtype": "float32", |
|
"transformers_version": "4.36.2", |
|
"type_vocab_size": 2, |
|
"use_cache": true, |
|
"vocab_size": 119547 |
|
} |
|
|