{ "_name_or_path": "/cluster/home/tialiu/project_tianyu/projects/generative-coref/t0_3b", "architectures": [ "T5Coref" ], "asp_activation": "relu", "asp_dropout_rate": 0.3, "asp_feature_emb_size": 20, "asp_hidden_dim": 4096, "asp_init_std": 0.02, "d_ff": 5120, "d_kv": 64, "d_model": 2048, "decoder_start_token_id": 0, "dense_act_fn": "gelu_new", "dropout_rate": 0.1, "eos_token_id": 1, "feed_forward_proj": "gated-gelu", "gradient_checkpointing": false, "initializer_factor": 1.0, "is_encoder_decoder": true, "is_gated_act": true, "layer_norm_epsilon": 1e-06, "mention_end_id": 32103, "mention_start_id": 32102, "model_type": "t5", "num_decoder_layers": 24, "num_heads": 32, "num_layers": 24, "output_past": true, "pad_token_id": 0, "pretrained_name_or_path": "/cluster/home/tialiu/tianyu/huggingface_models//T0_3B", "relative_attention_max_distance": 128, "relative_attention_num_buckets": 32, "tie_word_embeddings": false, "torch_dtype": "float32", "transformers_version": "4.23.1", "use_cache": true, "vocab_size": 32104 }