bpfold / config.json
ZhiyuanChen's picture
Upload folder using huggingface_hub
a98bb14 verified
{
"architectures": [
"BpfoldModel"
],
"attention_head_size": 32,
"bos_token_id": 1,
"dtype": "float32",
"eos_token_id": 2,
"hidden_dropout": 0.1,
"hidden_size": 256,
"id2label": null,
"intermediate_size": 768,
"label2id": null,
"mask_token_id": 4,
"max_length": 600,
"model_type": "bpfold",
"motif_radius": 3,
"null_token_id": 5,
"num_hidden_layers": 12,
"num_labels": 1,
"num_members": 6,
"num_pairwise_convolutions": 3,
"pad_token_id": 0,
"pairwise_kernel_size": 3,
"pos_weight": 300.0,
"positional_embedding": "dyn",
"postprocess_iterations": 100,
"postprocess_lr_max": 0.1,
"postprocess_lr_min": 0.01,
"postprocess_nc_rho": 0.5,
"postprocess_nc_s": 0.5,
"postprocess_rho": 1.6,
"postprocess_s": 1.5,
"postprocess_with_l1": true,
"separate_outer_inner_energy": true,
"threshold": 0.5,
"tie_word_embeddings": true,
"transformers_version": "5.4.0",
"unk_token_id": 3,
"use_base_pair_energy": true,
"use_base_pair_probability": false,
"use_postprocessing": false,
"use_squeeze_excitation": true,
"vocab_size": 11
}