{ "_name_or_path": "/n/fs/nlp-mengzhou/space2/out/CoLA/CoFi-test-v3/CoLA_sparsity0.95_reglr0.01_ce0.5_layerdistillv3", "architectures": [ "CoFiBertForSequenceClassification" ], "attention_probs_dropout_prob": 0.1, "classifier_dropout": null, "do_layer_distill": true, "finetuning_task": "cola", "gradient_checkpointing": false, "hidden_act": "gelu", "hidden_dropout_prob": 0.1, "hidden_size": 768, "initializer_range": 0.02, "intermediate_size": 3072, "label2id": { "0": 0, "1": 1 }, "layer_norm_eps": 1e-12, "max_position_embeddings": 512, "model_type": "bert", "num_attention_heads": 12, "num_hidden_layers": 12, "output_attentions": true, "output_hidden_states": true, "pad_token_id": 0, "position_embedding_type": "absolute", "pruned_heads": { "0": [ 0, 1, 2, 4, 5, 6, 7, 8, 9, 11 ], "1": [ 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11 ], "2": [ 0, 2, 3, 4, 5, 6, 7, 8, 10, 11 ], "3": [ 0, 1, 2, 3, 4, 6, 7, 8, 10, 11 ], "4": [ 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11 ], "5": [ 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11 ], "6": [ 0, 1, 3, 4, 5, 6, 7, 8, 9, 11 ], "7": [ 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11 ], "8": [ 0, 1, 2, 3, 5, 6, 7, 9, 10, 11 ], "9": [ 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11 ], "10": [ 0, 1, 2, 3, 5, 6, 8, 9, 10, 11 ], "11": [ 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11 ] }, "torch_dtype": "float32", "transformers_version": "4.17.0.dev0", "type_vocab_size": 2, "use_cache": true, "vocab_size": 30522 }