autonlp-txc-17923129 / config.json
abhishek's picture
abhishek HF staff
Commit From AutoNLP
86b6fc2
{
"_name_or_path": "AutoNLP",
"_num_labels": 24,
"architectures": [
"BertForSequenceClassification"
],
"attention_probs_dropout_prob": 0.1,
"directionality": "bidi",
"gradient_checkpointing": false,
"hidden_act": "gelu",
"hidden_dropout_prob": 0.1,
"hidden_size": 1024,
"id2label": {
"0": "1.0",
"1": "10.0",
"2": "11.0",
"3": "12.0",
"4": "13.0",
"5": "14.0",
"6": "15.0",
"7": "16.0",
"8": "17.0",
"9": "18.0",
"10": "19.0",
"11": "2.0",
"12": "20.0",
"13": "21.0",
"14": "22.0",
"15": "23.0",
"16": "24.0",
"17": "3.0",
"18": "4.0",
"19": "5.0",
"20": "6.0",
"21": "7.0",
"22": "8.0",
"23": "9.0"
},
"initializer_range": 0.02,
"intermediate_size": 4096,
"label2id": {
"1.0": 0,
"10.0": 1,
"11.0": 2,
"12.0": 3,
"13.0": 4,
"14.0": 5,
"15.0": 6,
"16.0": 7,
"17.0": 8,
"18.0": 9,
"19.0": 10,
"2.0": 11,
"20.0": 12,
"21.0": 13,
"22.0": 14,
"23.0": 15,
"24.0": 16,
"3.0": 17,
"4.0": 18,
"5.0": 19,
"6.0": 20,
"7.0": 21,
"8.0": 22,
"9.0": 23
},
"layer_norm_eps": 1e-12,
"max_length": 192,
"max_position_embeddings": 512,
"model_type": "bert",
"num_attention_heads": 16,
"num_hidden_layers": 24,
"pad_token_id": 0,
"padding": "max_length",
"pooler_fc_size": 768,
"pooler_num_attention_heads": 12,
"pooler_num_fc_layers": 3,
"pooler_size_per_head": 128,
"pooler_type": "first_token_transform",
"position_embedding_type": "absolute",
"problem_type": "single_label_classification",
"transformers_version": "4.8.0",
"type_vocab_size": 2,
"use_cache": true,
"vocab_size": 28996
}