abhishek's picture
abhishek HF staff
Commit From AutoTrain
3f55b78
{
"_name_or_path": "AutoTrain",
"_num_labels": 57,
"architectures": [
"BertForSequenceClassification"
],
"attention_probs_dropout_prob": 0.1,
"classifier_dropout": null,
"gradient_checkpointing": false,
"hidden_act": "gelu",
"hidden_dropout_prob": 0.1,
"hidden_size": 768,
"id2label": {
"0": "129.0",
"1": "131.0",
"2": "142.0",
"3": "149.0",
"4": "151.0",
"5": "159.0",
"6": "191.0",
"7": "192.0",
"8": "193.0",
"9": "199.0",
"10": "212.0",
"11": "222.0",
"12": "231.0",
"13": "232.0",
"14": "234.0",
"15": "234.1",
"16": "234.2",
"17": "234.3",
"18": "234.4",
"19": "235.0",
"20": "236.0",
"21": "239.0",
"22": "240.0",
"23": "251.0",
"24": "252.0",
"25": "262.0",
"26": "313.0",
"27": "314.0",
"28": "319.0",
"29": "321.0",
"30": "325.0",
"31": "330.0",
"32": "342.0",
"33": "350.0",
"34": "361.0",
"35": "362.0",
"36": "370.0",
"37": "380.0",
"38": "390.0",
"39": "410.0",
"40": "422.0",
"41": "423.0",
"42": "424.0",
"43": "429.0",
"44": "449.0",
"45": "490.0",
"46": "511.0",
"47": "512.0",
"48": "513.0",
"49": "519.0",
"50": "521.0",
"51": "523.0",
"52": "526.0",
"53": "529.0",
"54": "539.0",
"55": "611.0",
"56": "690.0"
},
"initializer_range": 0.02,
"intermediate_size": 3072,
"label2id": {
"129.0": 0,
"131.0": 1,
"142.0": 2,
"149.0": 3,
"151.0": 4,
"159.0": 5,
"191.0": 6,
"192.0": 7,
"193.0": 8,
"199.0": 9,
"212.0": 10,
"222.0": 11,
"231.0": 12,
"232.0": 13,
"234.0": 14,
"234.1": 15,
"234.2": 16,
"234.3": 17,
"234.4": 18,
"235.0": 19,
"236.0": 20,
"239.0": 21,
"240.0": 22,
"251.0": 23,
"252.0": 24,
"262.0": 25,
"313.0": 26,
"314.0": 27,
"319.0": 28,
"321.0": 29,
"325.0": 30,
"330.0": 31,
"342.0": 32,
"350.0": 33,
"361.0": 34,
"362.0": 35,
"370.0": 36,
"380.0": 37,
"390.0": 38,
"410.0": 39,
"422.0": 40,
"423.0": 41,
"424.0": 42,
"429.0": 43,
"449.0": 44,
"490.0": 45,
"511.0": 46,
"512.0": 47,
"513.0": 48,
"519.0": 49,
"521.0": 50,
"523.0": 51,
"526.0": 52,
"529.0": 53,
"539.0": 54,
"611.0": 55,
"690.0": 56
},
"layer_norm_eps": 1e-12,
"max_length": 192,
"max_position_embeddings": 512,
"model_type": "bert",
"num_attention_heads": 12,
"num_hidden_layers": 12,
"output_past": true,
"pad_token_id": 1,
"padding": "max_length",
"position_embedding_type": "absolute",
"problem_type": "single_label_classification",
"torch_dtype": "float32",
"transformers_version": "4.15.0",
"type_vocab_size": 2,
"use_cache": true,
"vocab_size": 31002
}