{ "_name_or_path": "dmis-lab/biobert-v1.1", "architectures": [ "BertForSequenceClassification" ], "attention_probs_dropout_prob": 0.1, "classifier_dropout": null, "gradient_checkpointing": false, "hidden_act": "gelu", "hidden_dropout_prob": 0.1, "hidden_size": 768, "id2label": { "0": "Bawl fi’l-Firāsh (Nocturnal Enuresis)", "1": "Bawāsīr Dāmiya (Bleeding Piles)", "2": "Bawāsīr ‘Amiyā (Non-Bleeding Piles)", "3": "Hasāt-i Kuliya (Renal Calculi)", "4": "Hurqat-i Bawl (Burning Micturition)", "5": "Ishāl (Purgation)", "6": "Jarayān (Spermatorrhea)", "7": "Kasrat-i Ihtilām (Excessive Nocturnal Emission)", "8": "Nawāsīr-i Lissa (Pyorrhea)", "9": "Nazla (Catarrh)", "10": "Qabz (Constipation)", "11": "Qillat-i Manī (Oligospermia)", "12": "Qulā‘ (Stomatitis)", "13": "Rīh al-Bawāsīr (Hemorrhoids)", "14": "Sayalān-i Rahim (Leucorrhea)", "15": "Sha‘īra (Stye/Hordeolum)", "16": "Sulāq/Bāminī (Blepharitis)", "17": "Sur‘at-i Inzāl (Premature Ejaculation)", "18": "Su‘āl (Cough)", "19": "Waja‘-i Asnān (Toothache)", "20": "Waram-i Lawzatayn (Tonsillitis)", "21": "Waram-i Lissa (Gingivitis)", "22": "Zahīr (Dysentery)", "23": "Zukām (Coryza)" }, "initializer_range": 0.02, "intermediate_size": 3072, "label2id": { "Bawl fi’l-Firāsh (Nocturnal Enuresis)": 0, "Bawāsīr Dāmiya (Bleeding Piles)": 1, "Qabz (Constipation)": 10, "Qillat-i Manī (Oligospermia)": 11, "Qulā‘ (Stomatitis)": 12, "Rīh al-Bawāsīr (Hemorrhoids)": 13, "Sayalān-i Rahim (Leucorrhea)": 14, "Sha‘īra (Stye/Hordeolum)": 15, "Sulāq/Bāminī (Blepharitis)": 16, "Sur‘at-i Inzāl (Premature Ejaculation)": 17, "Su‘āl (Cough)": 18, "Waja‘-i Asnān (Toothache)": 19, "Bawāsīr ‘Amiyā (Non-Bleeding Piles)": 2, "Waram-i Lawzatayn (Tonsillitis)": 20, "Waram-i Lissa (Gingivitis)": 21, "Zahīr (Dysentery)": 22, "Zukām (Coryza)": 23, "Hasāt-i Kuliya (Renal Calculi)": 3, "Hurqat-i Bawl (Burning Micturition)": 4, "Ishāl (Purgation)": 5, "Jarayān (Spermatorrhea)": 6, "Kasrat-i Ihtilām (Excessive Nocturnal Emission)": 7, "Nawāsīr-i Lissa (Pyorrhea)": 8, "Nazla (Catarrh)": 9 }, "layer_norm_eps": 1e-12, "max_position_embeddings": 512, "model_type": "bert", "num_attention_heads": 12, "num_hidden_layers": 12, "pad_token_id": 0, "position_embedding_type": "absolute", "problem_type": "multi_label_classification", "torch_dtype": "float32", "transformers_version": "4.28.1", "type_vocab_size": 2, "use_cache": true, "vocab_size": 28996 }