esmfold_v1 / config.json
Rocketknight1's picture
Set fp16_esm to false in config.json
53e5f6d
{
"architectures": [
"EsmForProteinFolding"
],
"attention_probs_dropout_prob": 0.0,
"classifier_dropout": null,
"emb_layer_norm_before": false,
"esmfold_config": {
"bypass_lm": false,
"embed_aa": true,
"esm_ablate_pairwise": false,
"esm_ablate_sequence": false,
"esm_input_dropout": 0,
"esm_type": "esm2_3B",
"fp16_esm": false,
"lddt_head_hid_dim": 128,
"trunk": {
"chunk_size": null,
"cpu_grad_checkpoint": false,
"dropout": 0,
"layer_drop": 0,
"max_recycles": 4,
"num_blocks": 48,
"pairwise_head_width": 32,
"pairwise_state_dim": 128,
"position_bins": 32,
"sequence_head_width": 32,
"sequence_state_dim": 1024,
"structure_module": {
"dropout_rate": 0.1,
"epsilon": 1e-08,
"inf": 100000.0,
"ipa_dim": 16,
"num_angles": 7,
"num_blocks": 8,
"num_heads_ipa": 12,
"num_qk_points": 4,
"num_resnet_blocks": 2,
"num_transition_layers": 1,
"num_v_points": 8,
"pairwise_dim": 128,
"resnet_dim": 128,
"sequence_dim": 384,
"trans_scale_factor": 10
}
},
"use_esm_attn_map": false
},
"hidden_act": "gelu",
"hidden_dropout_prob": 0.0,
"hidden_size": 2560,
"initializer_range": 0.02,
"intermediate_size": 10240,
"is_folding_model": true,
"layer_norm_eps": 1e-05,
"mask_token_id": 32,
"max_position_embeddings": 1026,
"model_type": "esm",
"num_attention_heads": 40,
"num_hidden_layers": 36,
"pad_token_id": 1,
"position_embedding_type": "rotary",
"token_dropout": true,
"torch_dtype": "float32",
"transformers_version": "4.25.0.dev0",
"use_cache": true,
"vocab_list": [
"<cls>",
"<pad>",
"<eos>",
"<unk>",
"L",
"A",
"G",
"V",
"S",
"E",
"R",
"T",
"I",
"D",
"P",
"K",
"Q",
"N",
"F",
"Y",
"M",
"H",
"W",
"C",
"X",
"B",
"U",
"Z",
"O",
".",
"-",
"<null_1>",
"<mask>"
],
"vocab_size": 33
}