File size: 1,228 Bytes
cce5df9 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 |
{
"activation_dropout": 0.1,
"activation_fn": "gelu",
"apply_graphormer_init": true,
"architectures": [
"GraphormerForGraphClassification"
],
"attention_dropout": 0.1,
"bias": true,
"bos_token_id": 1,
"dropout": 0.0,
"edge_type": "multi_hop",
"embed_scale": null,
"embedding_dim": 768,
"encoder_normalize_before": true,
"eos_token_id": 2,
"export": false,
"ffn_embedding_dim": 768,
"freeze_embeddings": false,
"hidden_size": 768,
"id2label": {
"0": "LABEL_0"
},
"init_fn": null,
"kdim": null,
"label2id": {
"LABEL_0": 0
},
"layerdrop": 0.0,
"max_nodes": 512,
"model_type": "graphormer",
"multi_hop_max_dist": 5,
"no_token_positional_embeddings": false,
"num_atoms": 4608,
"num_attention_heads": 32,
"num_edge_dis": 128,
"num_edges": 1536,
"num_in_degree": 512,
"num_layers": 12,
"num_out_degree": 512,
"num_spatial": 512,
"num_trans_layers_to_freeze": 0,
"pad_token_id": 0,
"pre_layernorm": false,
"q_noise": 0.0,
"qn_block_size": 8,
"self_attention": true,
"share_input_output_embed": false,
"spatial_pos_max": 1024,
"torch_dtype": "float32",
"traceable": false,
"transformers_version": "4.26.0.dev0",
"vdim": null
}
|