Spaces:
Sleeping
Sleeping
{ | |
"prismer_base": { | |
"roberta_model": { | |
"attention_probs_dropout_prob": 0.1, | |
"bos_token_id": 0, | |
"eos_token_id": 2, | |
"hidden_act": "gelu", | |
"hidden_dropout_prob": 0.1, | |
"hidden_size": 768, | |
"vision_hidden_size": 768, | |
"initializer_range": 0.02, | |
"intermediate_size": 3072, | |
"layer_norm_eps": 1e-05, | |
"max_position_embeddings": 514, | |
"model_name": "roberta-base", | |
"num_attention_heads": 12, | |
"num_hidden_layers": 12, | |
"pad_token_id": 1, | |
"type_vocab_size": 1, | |
"vocab_size": 50265, | |
"num_decoder_layers": 4, | |
"is_decoder": true | |
}, | |
"vit_model": "ViT-B/16" | |
}, | |
"prismer_large": { | |
"roberta_model": { | |
"attention_probs_dropout_prob": 0.1, | |
"bos_token_id": 0, | |
"eos_token_id": 2, | |
"hidden_act": "gelu", | |
"hidden_dropout_prob": 0.1, | |
"hidden_size": 1024, | |
"vision_hidden_size": 1024, | |
"initializer_range": 0.02, | |
"intermediate_size": 4096, | |
"layer_norm_eps": 1e-05, | |
"max_position_embeddings": 514, | |
"model_name": "roberta-large", | |
"num_attention_heads": 16, | |
"num_hidden_layers": 24, | |
"pad_token_id": 1, | |
"type_vocab_size": 1, | |
"vocab_size": 50265, | |
"num_decoder_layers": 4, | |
"is_decoder": true | |
}, | |
"vit_model": "ViT-L/14@336px" | |
}, | |
"prismer_huge": { | |
"roberta_model": { | |
"attention_probs_dropout_prob": 0.1, | |
"bos_token_id": 0, | |
"eos_token_id": 2, | |
"hidden_act": "gelu", | |
"hidden_dropout_prob": 0.1, | |
"hidden_size": 1024, | |
"vision_hidden_size": 1280, | |
"initializer_range": 0.02, | |
"intermediate_size": 4096, | |
"layer_norm_eps": 1e-05, | |
"max_position_embeddings": 514, | |
"model_name": "roberta-large", | |
"num_attention_heads": 16, | |
"num_hidden_layers": 24, | |
"pad_token_id": 1, | |
"type_vocab_size": 1, | |
"vocab_size": 50265, | |
"num_decoder_layers": 4, | |
"is_decoder": true | |
}, | |
"vit_model": "ViT-H/14" | |
} | |
} | |