valhalla commited on
Commit
64fe9ab
1 Parent(s): fdbc5a9

add flax model

Browse files
Files changed (3) hide show
  1. .gitattributes +1 -0
  2. config.json +5 -0
  3. flax_model.msgpack +3 -0
.gitattributes CHANGED
@@ -6,3 +6,4 @@
6
  *.tar.gz filter=lfs diff=lfs merge=lfs -text
7
  *.ot filter=lfs diff=lfs merge=lfs -text
8
  *.onnx filter=lfs diff=lfs merge=lfs -text
 
6
  *.tar.gz filter=lfs diff=lfs merge=lfs -text
7
  *.ot filter=lfs diff=lfs merge=lfs -text
8
  *.onnx filter=lfs diff=lfs merge=lfs -text
9
+ *.msgpack filter=lfs diff=lfs merge=lfs -text
config.json CHANGED
@@ -10,6 +10,7 @@
10
  "attention_dropout": 0.0,
11
  "bos_token_id": 0,
12
  "classif_dropout": 0.0,
 
13
  "d_model": 1024,
14
  "decoder_attention_heads": 16,
15
  "decoder_ffn_dim": 4096,
@@ -28,6 +29,8 @@
28
  ],
29
  "extra_pos_embeddings": 2,
30
  "force_bos_token_to_be_generated": false,
 
 
31
  "id2label": {
32
  "0": "LABEL_0",
33
  "1": "LABEL_1",
@@ -55,5 +58,7 @@
55
  "scale_embedding": false,
56
  "static_position_embeddings": false,
57
  "task_specific_params": {},
 
 
58
  "vocab_size": 50264
59
  }
10
  "attention_dropout": 0.0,
11
  "bos_token_id": 0,
12
  "classif_dropout": 0.0,
13
+ "classifier_dropout": 0.0,
14
  "d_model": 1024,
15
  "decoder_attention_heads": 16,
16
  "decoder_ffn_dim": 4096,
29
  ],
30
  "extra_pos_embeddings": 2,
31
  "force_bos_token_to_be_generated": false,
32
+ "forced_eos_token_id": 2,
33
+ "gradient_checkpointing": false,
34
  "id2label": {
35
  "0": "LABEL_0",
36
  "1": "LABEL_1",
58
  "scale_embedding": false,
59
  "static_position_embeddings": false,
60
  "task_specific_params": {},
61
+ "transformers_version": "4.7.0.dev0",
62
+ "use_cache": true,
63
  "vocab_size": 50264
64
  }
flax_model.msgpack ADDED
@@ -0,0 +1,3 @@
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:0ad0fde4f95dc746e469b647fbe4a14376c2c968612b3973098ff00ea0d68257
3
+ size 1222255172