emfomy's picture
Specify tokenizer in config.
b72e289
raw
history blame
2.91 kB
{
"architectures": [
"AlbertForTokenClassification"
],
"attention_probs_dropout_prob": 0,
"bos_token_id": 101,
"classifier_dropout_prob": 0.1,
"down_scale_factor": 1,
"embedding_size": 128,
"eos_token_id": 102,
"gap_size": 0,
"hidden_act": "relu",
"hidden_dropout_prob": 0,
"hidden_size": 768,
"id2label": {
"0": "A",
"1": "Caa",
"2": "Cab",
"3": "Cba",
"4": "Cbb",
"5": "D",
"6": "Da",
"7": "Dfa",
"8": "Dfb",
"9": "Di",
"10": "Dk",
"11": "DM",
"12": "I",
"13": "Na",
"14": "Nb",
"15": "Nc",
"16": "Ncd",
"17": "Nd",
"18": "Nep",
"19": "Neqa",
"20": "Neqb",
"21": "Nes",
"22": "Neu",
"23": "Nf",
"24": "Ng",
"25": "Nh",
"26": "Nv",
"27": "P",
"28": "T",
"29": "VA",
"30": "VAC",
"31": "VB",
"32": "VC",
"33": "VCL",
"34": "VD",
"35": "VF",
"36": "VE",
"37": "VG",
"38": "VH",
"39": "VHC",
"40": "VI",
"41": "VJ",
"42": "VK",
"43": "VL",
"44": "V_2",
"45": "DE",
"46": "SHI",
"47": "FW",
"48": "COLONCATEGORY",
"49": "COMMACATEGORY",
"50": "DASHCATEGORY",
"51": "DOTCATEGORY",
"52": "ETCCATEGORY",
"53": "EXCLAMATIONCATEGORY",
"54": "PARENTHESISCATEGORY",
"55": "PAUSECATEGORY",
"56": "PERIODCATEGORY",
"57": "QUESTIONCATEGORY",
"58": "SEMICOLONCATEGORY",
"59": "SPCHANGECATEGORY"
},
"initializer_range": 0.02,
"inner_group_num": 1,
"intermediate_size": 3072,
"label2id": {
"A": 0,
"COLONCATEGORY": 48,
"COMMACATEGORY": 49,
"Caa": 1,
"Cab": 2,
"Cba": 3,
"Cbb": 4,
"D": 5,
"DASHCATEGORY": 50,
"DE": 45,
"DM": 11,
"DOTCATEGORY": 51,
"Da": 6,
"Dfa": 7,
"Dfb": 8,
"Di": 9,
"Dk": 10,
"ETCCATEGORY": 52,
"EXCLAMATIONCATEGORY": 53,
"FW": 47,
"I": 12,
"Na": 13,
"Nb": 14,
"Nc": 15,
"Ncd": 16,
"Nd": 17,
"Nep": 18,
"Neqa": 19,
"Neqb": 20,
"Nes": 21,
"Neu": 22,
"Nf": 23,
"Ng": 24,
"Nh": 25,
"Nv": 26,
"P": 27,
"PARENTHESISCATEGORY": 54,
"PAUSECATEGORY": 55,
"PERIODCATEGORY": 56,
"QUESTIONCATEGORY": 57,
"SEMICOLONCATEGORY": 58,
"SHI": 46,
"SPCHANGECATEGORY": 59,
"T": 28,
"VA": 29,
"VAC": 30,
"VB": 31,
"VC": 32,
"VCL": 33,
"VD": 34,
"VE": 36,
"VF": 35,
"VG": 37,
"VH": 38,
"VHC": 39,
"VI": 40,
"VJ": 41,
"VK": 42,
"VL": 43,
"V_2": 44
},
"layer_norm_eps": 1e-12,
"layers_to_keep": [],
"max_position_embeddings": 512,
"model_type": "albert",
"net_structure_type": 0,
"num_attention_heads": 12,
"num_hidden_groups": 1,
"num_hidden_layers": 12,
"num_memory_blocks": 0,
"pad_token_id": 0,
"tokenizer_class": "BertTokenizerFast",
"type_vocab_size": 2,
"vocab_size": 21128
}