hle2000's picture
previous trained best checkpoint
2dd3255
{
"_name_or_path": "/workspace/storage/subgraphs_reranking_results/t5-large-ssm/results/clefourrier/graphormer-base-pcqm4mv2_mse/checkpoint-62000",
"activation_dropout": 0.1,
"activation_fn": "gelu",
"apply_graphormer_init": true,
"architectures": [
"GraphormerForGraphClassification"
],
"attention_dropout": 0.1,
"bias": true,
"bos_token_id": 1,
"dropout": 0.0,
"edge_type": "multi_hop",
"embed_scale": null,
"embedding_dim": 768,
"encoder_normalize_before": true,
"eos_token_id": 2,
"export": false,
"ffn_embedding_dim": 768,
"freeze_embeddings": false,
"hidden_size": 768,
"id2label": {
"0": "LABEL_0"
},
"init_fn": null,
"kdim": null,
"label2id": {
"LABEL_0": 0
},
"layerdrop": 0.0,
"max_nodes": 512,
"model_type": "graphormer",
"multi_hop_max_dist": 5,
"no_token_positional_embeddings": false,
"num_atoms": 4608,
"num_attention_heads": 32,
"num_classes": 1,
"num_edge_dis": 128,
"num_edges": 1536,
"num_hidden_layers": 12,
"num_in_degree": 512,
"num_layers": 12,
"num_out_degree": 512,
"num_spatial": 512,
"num_trans_layers_to_freeze": 0,
"pad_token_id": 0,
"pre_layernorm": false,
"q_noise": 0.0,
"qn_block_size": 8,
"self_attention": true,
"share_input_output_embed": false,
"spatial_pos_max": 1024,
"torch_dtype": "float32",
"traceable": false,
"transformers_version": "4.31.0",
"vdim": null
}