moussaKam commited on
Commit
4d6acef
1 Parent(s): d77cc21
Files changed (1) hide show
  1. config.json +4 -3
config.json CHANGED
@@ -4,7 +4,7 @@
4
  "add_bias_logits": false,
5
  "add_final_layer_norm": true,
6
  "architectures": [
7
- "BarthezForConditionalGeneration"
8
  ],
9
  "attention_dropout": 0.1,
10
  "bos_token_id": 0,
@@ -39,7 +39,7 @@
39
  "LABEL_2": 2
40
  },
41
  "max_position_embeddings": 1024,
42
- "model_type": "barthez",
43
  "no_repeat_ngram_size": 3,
44
  "normalize_before": true,
45
  "normalize_embedding": true,
@@ -48,5 +48,6 @@
48
  "pad_token_id": 1,
49
  "scale_embedding": true,
50
  "static_position_embeddings": false,
51
- "vocab_size": 101122
 
52
  }
 
4
  "add_bias_logits": false,
5
  "add_final_layer_norm": true,
6
  "architectures": [
7
+ "BartForConditionalGeneration"
8
  ],
9
  "attention_dropout": 0.1,
10
  "bos_token_id": 0,
 
39
  "LABEL_2": 2
40
  },
41
  "max_position_embeddings": 1024,
42
+ "model_type": "bart",
43
  "no_repeat_ngram_size": 3,
44
  "normalize_before": true,
45
  "normalize_embedding": true,
 
48
  "pad_token_id": 1,
49
  "scale_embedding": true,
50
  "static_position_embeddings": false,
51
+ "vocab_size": 101122,
52
+ "tokenizer_class": "BarthezTokenizer"
53
  }