valhalla commited on
Commit
d266f51
1 Parent(s): 8cd0aa8

add flax model

Browse files
Files changed (3) hide show
  1. .gitattributes +1 -0
  2. config.json +8 -2
  3. flax_model.msgpack +3 -0
.gitattributes CHANGED
@@ -6,3 +6,4 @@
6
  *.tar.gz filter=lfs diff=lfs merge=lfs -text
7
  *.ot filter=lfs diff=lfs merge=lfs -text
8
  *.onnx filter=lfs diff=lfs merge=lfs -text
 
6
  *.tar.gz filter=lfs diff=lfs merge=lfs -text
7
  *.ot filter=lfs diff=lfs merge=lfs -text
8
  *.onnx filter=lfs diff=lfs merge=lfs -text
9
+ *.msgpack filter=lfs diff=lfs merge=lfs -text
config.json CHANGED
@@ -10,6 +10,7 @@
10
  "attention_dropout": 0.0,
11
  "bos_token_id": 0,
12
  "classif_dropout": 0.0,
 
13
  "d_model": 1024,
14
  "decoder_attention_heads": 16,
15
  "decoder_ffn_dim": 4096,
@@ -23,6 +24,10 @@
23
  "encoder_layerdrop": 0.0,
24
  "encoder_layers": 12,
25
  "eos_token_id": 2,
 
 
 
 
26
  "id2label": {
27
  "0": "LABEL_0",
28
  "1": "LABEL_1",
@@ -60,6 +65,7 @@
60
  "num_beams": 4
61
  }
62
  },
63
- "vocab_size": 50264,
64
- "force_bos_token_to_be_generated": true
 
65
  }
10
  "attention_dropout": 0.0,
11
  "bos_token_id": 0,
12
  "classif_dropout": 0.0,
13
+ "classifier_dropout": 0.0,
14
  "d_model": 1024,
15
  "decoder_attention_heads": 16,
16
  "decoder_ffn_dim": 4096,
24
  "encoder_layerdrop": 0.0,
25
  "encoder_layers": 12,
26
  "eos_token_id": 2,
27
+ "force_bos_token_to_be_generated": true,
28
+ "forced_bos_token_id": 0,
29
+ "forced_eos_token_id": 2,
30
+ "gradient_checkpointing": false,
31
  "id2label": {
32
  "0": "LABEL_0",
33
  "1": "LABEL_1",
65
  "num_beams": 4
66
  }
67
  },
68
+ "transformers_version": "4.7.0.dev0",
69
+ "use_cache": true,
70
+ "vocab_size": 50264
71
  }
flax_model.msgpack ADDED
@@ -0,0 +1,3 @@
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:3b5f386d30bb3cebd906a20274fa3054d37ab25e80dfd1417eb8b7d2501c9486
3
+ size 1222255172