chintagunta85's picture
Training in progress, step 500
277ecc0
{
"_name_or_path": "giacomomiolo/electramed_small_scivocab",
"architectures": [
"ElectraForTokenClassification"
],
"attention_probs_dropout_prob": 0.1,
"classifier_dropout": null,
"embedding_size": 128,
"hidden_act": "gelu",
"hidden_dropout_prob": 0.1,
"hidden_size": 256,
"id2label": {
"0": "B-ZIP",
"1": "B-FAX",
"10": "B-CITY",
"11": "I-DEVICE",
"12": "I-MEDICALRECORD",
"13": "I-STREET",
"14": "B-MEDICALRECORD",
"15": "B-EMAIL",
"16": "I-AGE",
"17": "B-HOSPITAL",
"18": "B-ORGANIZATION",
"19": "I-ZIP",
"2": "B-DEVICE",
"20": "B-COUNTRY",
"21": "B-BIOID",
"22": "B-URL",
"23": "B-DATE",
"24": "I-ORGANIZATION",
"25": "B-STATE",
"26": "B-PATIENT",
"27": "I-STATE",
"28": "I-LOCATION_OTHER",
"29": "B-STREET",
"3": "I-CITY",
"30": "B-USERNAME",
"31": "B-PROFESSION",
"32": "I-IDNUM",
"33": "I-HOSPITAL",
"34": "B-IDNUM",
"35": "B-DOCTOR",
"36": "B-PHONE",
"37": "I-URL",
"38": "B-HEALTHPLAN",
"39": "B-AGE",
"4": "I-FAX",
"40": "I-DOCTOR",
"41": "O",
"42": "I-PROFESSION",
"43": "I-PATIENT",
"5": "I-DATE",
"6": "B-LOCATION_OTHER",
"7": "I-PHONE",
"8": "I-HEALTHPLAN",
"9": "I-COUNTRY"
},
"initializer_range": 0.02,
"intermediate_size": 1024,
"label2id": {
"B-AGE": "39",
"B-BIOID": "21",
"B-CITY": "10",
"B-COUNTRY": "20",
"B-DATE": "23",
"B-DEVICE": "2",
"B-DOCTOR": "35",
"B-EMAIL": "15",
"B-FAX": "1",
"B-HEALTHPLAN": "38",
"B-HOSPITAL": "17",
"B-IDNUM": "34",
"B-LOCATION_OTHER": "6",
"B-MEDICALRECORD": "14",
"B-ORGANIZATION": "18",
"B-PATIENT": "26",
"B-PHONE": "36",
"B-PROFESSION": "31",
"B-STATE": "25",
"B-STREET": "29",
"B-URL": "22",
"B-USERNAME": "30",
"B-ZIP": "0",
"I-AGE": "16",
"I-CITY": "3",
"I-COUNTRY": "9",
"I-DATE": "5",
"I-DEVICE": "11",
"I-DOCTOR": "40",
"I-FAX": "4",
"I-HEALTHPLAN": "8",
"I-HOSPITAL": "33",
"I-IDNUM": "32",
"I-LOCATION_OTHER": "28",
"I-MEDICALRECORD": "12",
"I-ORGANIZATION": "24",
"I-PATIENT": "43",
"I-PHONE": "7",
"I-PROFESSION": "42",
"I-STATE": "27",
"I-STREET": "13",
"I-URL": "37",
"I-ZIP": "19",
"O": "41"
},
"layer_norm_eps": 1e-12,
"max_position_embeddings": 512,
"model_type": "electra",
"num_attention_heads": 4,
"num_hidden_layers": 12,
"pad_token_id": 0,
"position_embedding_type": "absolute",
"summary_activation": "gelu",
"summary_last_dropout": 0.1,
"summary_type": "first",
"summary_use_proj": true,
"torch_dtype": "float32",
"transformers_version": "4.22.1",
"type_vocab_size": 2,
"use_cache": true,
"vocab_size": 30522
}