{ "_name_or_path": "AutoTrain", "_num_labels": 23, "architectures": [ "BertForTokenClassification" ], "attention_probs_dropout_prob": 0.1, "classifier_dropout": null, "directionality": "bidi", "hidden_act": "gelu", "hidden_dropout_prob": 0.1, "hidden_size": 768, "id2label": { "0": "B-overspeed_governor_amts_max", "1": "B-overspeed_governor_amts_mini", "2": "B-overspeed_governor_dmts_max", "3": "B-overspeed_governor_dmts_min", "4": "B-tec_expiry_date", "5": "B-tec_first_issue_date", "6": "B-tec_issue_date", "7": "B-tec_model_no", "8": "B-tec_rated_speed_max", "9": "B-tec_rated_speed_min", "10": "B-tec_standard", "11": "I-overspeed_governor_amts_max", "12": "I-overspeed_governor_amts_mini", "13": "I-overspeed_governor_dmts_max", "14": "I-overspeed_governor_dmts_min", "15": "I-tec_expiry_date", "16": "I-tec_first_issue_date", "17": "I-tec_issue_date", "18": "I-tec_model_no", "19": "I-tec_rated_speed_max", "20": "I-tec_rated_speed_min", "21": "I-tec_standard", "22": "O" }, "initializer_range": 0.02, "intermediate_size": 3072, "label2id": { "B-overspeed_governor_amts_max": 0, "B-overspeed_governor_amts_mini": 1, "B-overspeed_governor_dmts_max": 2, "B-overspeed_governor_dmts_min": 3, "B-tec_expiry_date": 4, "B-tec_first_issue_date": 5, "B-tec_issue_date": 6, "B-tec_model_no": 7, "B-tec_rated_speed_max": 8, "B-tec_rated_speed_min": 9, "B-tec_standard": 10, "I-overspeed_governor_amts_max": 11, "I-overspeed_governor_amts_mini": 12, "I-overspeed_governor_dmts_max": 13, "I-overspeed_governor_dmts_min": 14, "I-tec_expiry_date": 15, "I-tec_first_issue_date": 16, "I-tec_issue_date": 17, "I-tec_model_no": 18, "I-tec_rated_speed_max": 19, "I-tec_rated_speed_min": 20, "I-tec_standard": 21, "O": 22 }, "layer_norm_eps": 1e-12, "max_length": 512, "max_position_embeddings": 512, "model_type": "bert", "num_attention_heads": 12, "num_hidden_layers": 12, "pad_token_id": 0, "padding": "max_length", "pooler_fc_size": 768, "pooler_num_attention_heads": 12, "pooler_num_fc_layers": 3, "pooler_size_per_head": 128, "pooler_type": "first_token_transform", "position_embedding_type": "absolute", "torch_dtype": "float32", "transformers_version": "4.22.1", "type_vocab_size": 2, "use_cache": true, "vocab_size": 105879 }