multilingual-e5-small / config.json
Gopala-Krishna.Char
Add model & config
6576f72 unverified
{
"name": "intfloat/multilingual-e5-small",
"version": "1.0.0",
"description": "This is the intfloat/multilingual-e5-small model",
"model_task_type": "TEXT_EMBEDDING",
"model_format": "ONNX",
"model_content_size_in_bytes": 293161335,
"model_content_hash_value": "ce9ab9336fc32c8173d0eed955c800f85643928f991bb51f6f2de6b31c8a2cf9",
"model_config": {
"model_type": "bert",
"embedding_dimension": 384,
"framework_type": "sentence_transformers",
"all_config": "{ \"_name_or_path\": \"intfloat\/multilingual-e5-small\", \"architectures\": [ \"BertModel\" ], \"attention_probs_dropout_prob\": 0.1, \"classifier_dropout\": null, \"hidden_act\": \"gelu\", \"hidden_dropout_prob\": 0.1, \"hidden_size\": 384, \"initializer_range\": 0.02, \"intermediate_size\": 1536, \"layer_norm_eps\": 1e-12, \"max_position_embeddings\": 512, \"model_type\": \"bert\", \"num_attention_heads\": 12, \"num_hidden_layers\": 12, \"pad_token_id\": 0, \"position_embedding_type\": \"absolute\", \"tokenizer_class\": \"XLMRobertaTokenizer\", \"transformers_version\": \"4.30.2\", \"type_vocab_size\": 2, \"use_cache\": true, \"vocab_size\": 250037 }"
},
"created_time": 1676073973126
}