File size: 1,193 Bytes
4dfa058
2da0c87
3e02d6f
 
 
 
4dfa058
3e02d6f
2da0c87
3e02d6f
 
2da0c87
3e02d6f
 
 
 
2da0c87
3e02d6f
2da0c87
 
3e02d6f
 
 
 
 
2da0c87
 
 
3e02d6f
2da0c87
 
 
3e02d6f
2da0c87
3e02d6f
2da0c87
 
3e02d6f
2da0c87
3e02d6f
2da0c87
3e02d6f
 
2da0c87
3e02d6f
2da0c87
3e02d6f
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
{
  "_name_or_path": "ai4bharat/indic-bert",
  "activation_dropout": 0.0,
  "activation_function": "gelu",
  "architectures": [
    "MBartForConditionalGeneration"
  ],
  "attention_dropout": 0.0,
  "attention_probs_dropout_prob": 0,
  "bos_token_id": 0,
  "classifier_dropout": 0.0,
  "d_model": 768,
  "decoder_attention_heads": 16,
  "decoder_ffn_dim": 4096,
  "decoder_layerdrop": 0.0,
  "decoder_layers": 12,
  "down_scale_factor": 1,
  "dropout": 0.1,
  "embedding_size": 128,
  "encoder_attention_heads": 12,
  "encoder_ffn_dim": 4096,
  "encoder_layerdrop": 0.0,
  "encoder_layers": 12,
  "eos_token_id": 2,
  "forced_eos_token_id": 2,
  "gap_size": 0,
  "hidden_act": "gelu",
  "hidden_dropout_prob": 0,
  "init_std": 0.02,
  "initializer_range": 0.02,
  "inner_group_num": 1,
  "intermediate_size": 3072,
  "is_encoder_decoder": true,
  "max_position_embeddings": 512,
  "model_type": "mbart",
  "net_structure_type": 0,
  "num_hidden_groups": 1,
  "num_hidden_layers": 12,
  "num_memory_blocks": 0,
  "pad_token_id": 1,
  "scale_embedding": false,
  "torch_dtype": "float32",
  "transformers_version": "4.30.2",
  "type_vocab_size": 2,
  "use_cache": true,
  "vocab_size": 200000
}