BioNExt-Extractor / config.json
T-Almeida's picture
Upload model
f4c1f6e verified
raw
history blame
1.61 kB
{
"_name_or_path": "michiyasunaga/BioLinkBERT-large",
"arch_type": "mha",
"architectures": [
"BioNExtExtractorModel"
],
"attention_probs_dropout_prob": 0.1,
"auto_map": {
"AutoConfig": "configuration_bionextextractor.BioNExtExtractorConfig",
"AutoModel": "modeling_bionextextractor.BioNExtExtractorModel"
},
"classifier_dropout": null,
"gradient_checkpointing": false,
"hidden_act": "gelu",
"hidden_dropout_prob": 0.1,
"hidden_size": 1024,
"id2label": {
"0": "Association",
"1": "Positive_Correlation",
"2": "Negative_Correlation",
"3": "Cotreatment",
"4": "Bind",
"5": "Comparison",
"6": "Conversion",
"7": "Drug_Interaction",
"8": "Negative_Class"
},
"index_type": "both",
"initializer_range": 0.02,
"intermediate_size": 4096,
"label2id": {
"Association": 0,
"Bind": 4,
"Comparison": 5,
"Conversion": 6,
"Cotreatment": 3,
"Drug_Interaction": 7,
"Negative_Class": 8,
"Negative_Correlation": 2,
"Positive_Correlation": 1
},
"layer_norm_eps": 1e-12,
"max_position_embeddings": 512,
"model_type": "relation-novelty-extractor",
"novel": true,
"num_attention_heads": 16,
"num_hidden_layers": 24,
"num_lstm_layers": 1,
"pad_token_id": 0,
"position_embedding_type": "absolute",
"resize_embeddings": true,
"tokenizer_special_tokens": [
"[s1]",
"[e1]",
"[s2]",
"[e2]"
],
"torch_dtype": "float32",
"transformers_version": "4.37.2",
"type_vocab_size": 2,
"update_vocab": 28899,
"use_cache": true,
"version": "0.1.0",
"vocab_size": 28899
}