|
{ |
|
"_name_or_path": "../workdir/UE_Transformer/yikuan8/Clinical-Longformer/777/", |
|
"architectures": [ |
|
"LongformerForSequenceClassification" |
|
], |
|
"attention_mode": "longformer", |
|
"attention_probs_dropout_prob": 0.1, |
|
"attention_window": [ |
|
512, |
|
512, |
|
512, |
|
512, |
|
512, |
|
512, |
|
512, |
|
512, |
|
512, |
|
512, |
|
512, |
|
512 |
|
], |
|
"bos_token_id": 0, |
|
"eos_token_id": 2, |
|
"gradient_checkpointing": false, |
|
"hidden_act": "gelu", |
|
"hidden_dropout_prob": 0.1, |
|
"hidden_size": 768, |
|
"id2label": { |
|
"0": "E785", |
|
"1": "I10", |
|
"2": "Z87891", |
|
"3": "K219", |
|
"4": "F329", |
|
"5": "I2510", |
|
"6": "N179", |
|
"7": "F419", |
|
"8": "Z7901", |
|
"9": "Z794", |
|
"10": "E039", |
|
"11": "E119", |
|
"12": "G4733", |
|
"13": "D649", |
|
"14": "E669", |
|
"15": "I4891", |
|
"16": "F17210", |
|
"17": "Y929", |
|
"18": "Z66", |
|
"19": "J45909", |
|
"20": "Z7902", |
|
"21": "J449", |
|
"22": "D62", |
|
"23": "02HV33Z", |
|
"24": "N390", |
|
"25": "I129", |
|
"26": "E1122", |
|
"27": "E871", |
|
"28": "I252", |
|
"29": "N189", |
|
"30": "E872", |
|
"31": "Z8673", |
|
"32": "Z955", |
|
"33": "Z86718", |
|
"34": "G8929", |
|
"35": "I110", |
|
"36": "K5900", |
|
"37": "N400", |
|
"38": "N183", |
|
"39": "I480", |
|
"40": "I130", |
|
"41": "G4700", |
|
"42": "D696", |
|
"43": "Z951", |
|
"44": "M109", |
|
"45": "Y92239", |
|
"46": "J9601", |
|
"47": "J189", |
|
"48": "Z23", |
|
"49": "Y92230" |
|
}, |
|
"ignore_attention_mask": false, |
|
"initializer_range": 0.02, |
|
"intermediate_size": 3072, |
|
"label2id": { |
|
"02HV33Z": "23", |
|
"D62": "22", |
|
"D649": "13", |
|
"D696": "42", |
|
"E039": "10", |
|
"E1122": "26", |
|
"E119": "11", |
|
"E669": "14", |
|
"E785": "0", |
|
"E871": "27", |
|
"E872": "30", |
|
"F17210": "16", |
|
"F329": "4", |
|
"F419": "7", |
|
"G4700": "41", |
|
"G4733": "12", |
|
"G8929": "34", |
|
"I10": "1", |
|
"I110": "35", |
|
"I129": "25", |
|
"I130": "40", |
|
"I2510": "5", |
|
"I252": "28", |
|
"I480": "39", |
|
"I4891": "15", |
|
"J189": "47", |
|
"J449": "21", |
|
"J45909": "19", |
|
"J9601": "46", |
|
"K219": "3", |
|
"K5900": "36", |
|
"M109": "44", |
|
"N179": "6", |
|
"N183": "38", |
|
"N189": "29", |
|
"N390": "24", |
|
"N400": "37", |
|
"Y92230": "49", |
|
"Y92239": "45", |
|
"Y929": "17", |
|
"Z23": "48", |
|
"Z66": "18", |
|
"Z7901": "8", |
|
"Z7902": "20", |
|
"Z794": "9", |
|
"Z86718": "33", |
|
"Z8673": "31", |
|
"Z87891": "2", |
|
"Z951": "43", |
|
"Z955": "32" |
|
}, |
|
"layer_norm_eps": 1e-05, |
|
"max_position_embeddings": 4098, |
|
"model_type": "longformer", |
|
"num_attention_heads": 12, |
|
"num_hidden_layers": 12, |
|
"onnx_export": false, |
|
"pad_token_id": 1, |
|
"position_embedding_type": "absolute", |
|
"problem_type": "multi_label_classification", |
|
"sep_token_id": 2, |
|
"torch_dtype": "float32", |
|
"transformers_version": "4.36.2", |
|
"type_vocab_size": 1, |
|
"use_cache": true, |
|
"vocab_size": 50265 |
|
} |
|
|