dwellbert / config.json
oyesaurav's picture
Added more classes by cleaning data
739979b
{
"_name_or_path": "distilbert-base-uncased",
"activation": "gelu",
"architectures": [
"DistilBertForSequenceClassification"
],
"attention_dropout": 0.1,
"dim": 768,
"dropout": 0.1,
"hidden_dim": 3072,
"id2label": {
"0": "Cardiology",
"1": "Dentistry",
"2": "Dermatology",
"3": "Diets and nutritions",
"4": "Endocrinology",
"5": "Ent",
"6": "Gastroenterology",
"7": "Gynecology",
"8": "Hematology - oncology",
"9": "Hospice care",
"10": "Nephrology",
"11": "Neurology",
"12": "Opd",
"13": "Ophthalmology",
"14": "Orthopedic",
"15": "Orthopedists",
"16": "Pediatrics",
"17": "Psychiatry",
"18": "Radiology",
"19": "Urology"
},
"initializer_range": 0.02,
"label2id": {
"Cardiology": 0,
"Dentistry": 1,
"Dermatology": 2,
"Diets and nutritions": 3,
"Endocrinology": 4,
"Ent": 5,
"Gastroenterology": 6,
"Gynecology": 7,
"Hematology - oncology": 8,
"Hospice care": 9,
"Nephrology": 10,
"Neurology": 11,
"Opd": 12,
"Ophthalmology": 13,
"Orthopedic": 14,
"Orthopedists": 15,
"Pediatrics": 16,
"Psychiatry": 17,
"Radiology": 18,
"Urology": 19
},
"max_position_embeddings": 512,
"model_type": "distilbert",
"n_heads": 12,
"n_layers": 6,
"pad_token_id": 0,
"qa_dropout": 0.1,
"seq_classif_dropout": 0.2,
"sinusoidal_pos_embds": false,
"tie_weights_": true,
"transformers_version": "4.29.2",
"vocab_size": 30522
}