nicolasb92's picture
Push model using huggingface_hub.
6cdad4a verified
{
"decoder_config": {
"folding_trunk": {
"chunk_size": null,
"cpu_grad_checkpoint": false,
"dropout": 0.1,
"layer_drop": 0,
"max_recycles": 4,
"num_blocks": 32,
"pairwise_head_width": 32,
"pairwise_state_dim": 128,
"position_bins": 32,
"sequence_head_width": 32,
"sequence_state_dim": 768,
"structure_module": {
"c_ipa": 16,
"c_resnet": 128,
"c_s": 384,
"c_z": 128,
"dropout_rate": 0.1,
"epsilon": 1e-08,
"inf": 100000.0,
"no_angles": 7,
"no_blocks": 8,
"no_heads_ipa": 12,
"no_qk_points": 4,
"no_resnet_blocks": 2,
"no_transition_layers": 1,
"no_v_points": 8,
"trans_scale_factor": 10
}
},
"lddt_head_hidden_dim": 128,
"quantize_dim": 384
},
"encoder_config": {
"edge_emb": {
"bessels": {
"bessel_const": 40.0,
"d": 128
},
"d": 128,
"edge_type_emb": {
"d": 128,
"dropout": 0.1,
"max_res_offset": 32,
"max_sym_offset": 2
},
"k_for_knn": 30
},
"eqnet": {
"attn_dropout": 0,
"d": 128,
"d_mult": 3,
"dropout": 0.1,
"n_head": 4
},
"n_eqnet": 12,
"node_emb": {
"d": 128,
"dropout": 0.1,
"mask_node_type": true,
"n_chain_type": 100,
"n_node_type": 200
},
"quantize": {
"decay": 0.99,
"dim": 384,
"eps": 1e-05,
"n_embed": 512,
"normalize": true,
"restart": 100,
"usage_threshold": 1e-09
}
},
"frozen_codebook": false
}