valhalla commited on
Commit
df7df4d
1 Parent(s): 6a35c49

add flax model

Browse files
Files changed (3) hide show
  1. .gitattributes +1 -0
  2. config.json +9 -0
  3. flax_model.msgpack +3 -0
.gitattributes CHANGED
@@ -6,3 +6,4 @@
6
  *.tar.gz filter=lfs diff=lfs merge=lfs -text
7
  *.ot filter=lfs diff=lfs merge=lfs -text
8
  *.onnx filter=lfs diff=lfs merge=lfs -text
 
6
  *.tar.gz filter=lfs diff=lfs merge=lfs -text
7
  *.ot filter=lfs diff=lfs merge=lfs -text
8
  *.onnx filter=lfs diff=lfs merge=lfs -text
9
+ *.msgpack filter=lfs diff=lfs merge=lfs -text
config.json CHANGED
@@ -3,20 +3,27 @@
3
  "activation_dropout": 0.0,
4
  "activation_function": "gelu",
5
  "add_final_layer_norm": false,
 
 
 
6
  "attention_dropout": 0.0,
7
  "bos_token_id": 0,
8
  "classif_dropout": 0.0,
 
9
  "d_model": 1024,
10
  "decoder_attention_heads": 16,
11
  "decoder_ffn_dim": 4096,
12
  "decoder_layerdrop": 0.0,
13
  "decoder_layers": 12,
 
14
  "dropout": 0.1,
15
  "encoder_attention_heads": 16,
16
  "encoder_ffn_dim": 4096,
17
  "encoder_layerdrop": 0.0,
18
  "encoder_layers": 12,
19
  "eos_token_id": 2,
 
 
20
  "id2label": {
21
  "0": "contradiction",
22
  "1": "neutral",
@@ -36,5 +43,7 @@
36
  "output_past": false,
37
  "pad_token_id": 1,
38
  "scale_embedding": false,
 
 
39
  "vocab_size": 50265
40
  }
3
  "activation_dropout": 0.0,
4
  "activation_function": "gelu",
5
  "add_final_layer_norm": false,
6
+ "architectures": [
7
+ "BartForSequenceClassification"
8
+ ],
9
  "attention_dropout": 0.0,
10
  "bos_token_id": 0,
11
  "classif_dropout": 0.0,
12
+ "classifier_dropout": 0.0,
13
  "d_model": 1024,
14
  "decoder_attention_heads": 16,
15
  "decoder_ffn_dim": 4096,
16
  "decoder_layerdrop": 0.0,
17
  "decoder_layers": 12,
18
+ "decoder_start_token_id": 2,
19
  "dropout": 0.1,
20
  "encoder_attention_heads": 16,
21
  "encoder_ffn_dim": 4096,
22
  "encoder_layerdrop": 0.0,
23
  "encoder_layers": 12,
24
  "eos_token_id": 2,
25
+ "forced_eos_token_id": 2,
26
+ "gradient_checkpointing": false,
27
  "id2label": {
28
  "0": "contradiction",
29
  "1": "neutral",
43
  "output_past": false,
44
  "pad_token_id": 1,
45
  "scale_embedding": false,
46
+ "transformers_version": "4.7.0.dev0",
47
+ "use_cache": true,
48
  "vocab_size": 50265
49
  }
flax_model.msgpack ADDED
@@ -0,0 +1,3 @@
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:5e07d1ae73ae1c1267fd174a3b21c73b0d77bad288f8ed17fb685f79c419a897
3
+ size 1629394629