flashppi / config.json
andrecornman's picture
Upload FlashPPI model
2eddd0d verified
raw
history blame contribute delete
618 Bytes
{
"architectures": [
"FlashPPIModel"
],
"clip_embed_dim": 1024,
"contact_embed_dim": 1280,
"contact_num_heads": 8,
"contact_transformer_depth": 2,
"dtype": "float32",
"max_position_embeddings": 512,
"model_type": "flashppi",
"plm_depth": 33,
"plm_dim": 1280,
"plm_ffn_dim_multiplier": null,
"plm_heads": 20,
"plm_norm_eps": 1e-05,
"plm_swiglu_multiple_of": 256,
"plm_vocab_size": 37,
"transformers_version": "4.57.1",
"use_flash_attention": true,
"auto_map": {
"AutoConfig": "configuration_flashppi.FlashPPIConfig",
"AutoModel": "modeling_flashppi.FlashPPIModel"
}
}