Xenova HF staff commited on
Commit
0c6fe5c
1 Parent(s): d4c5526

Upload folder using huggingface_hub

Browse files
config.json CHANGED
@@ -30,7 +30,7 @@
30
  "num_hidden_layers": 12,
31
  "pad_token_id": 1,
32
  "scale_embedding": true,
33
- "transformers_version": "4.32.0.dev0",
34
  "use_cache": true,
35
  "vocab_size": 128112
36
  }
 
30
  "num_hidden_layers": 12,
31
  "pad_token_id": 1,
32
  "scale_embedding": true,
33
+ "transformers_version": "4.33.0.dev0",
34
  "use_cache": true,
35
  "vocab_size": 128112
36
  }
generation_config.json CHANGED
@@ -7,5 +7,5 @@
7
  "max_length": 200,
8
  "num_beams": 5,
9
  "pad_token_id": 1,
10
- "transformers_version": "4.32.0.dev0"
11
  }
 
7
  "max_length": 200,
8
  "num_beams": 5,
9
  "pad_token_id": 1,
10
+ "transformers_version": "4.33.0.dev0"
11
  }
onnx/decoder_model.onnx CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:c46b4b22a8784d1dfcf5a4fb03023c86447aa5000a4ded9628a187537ab2c45a
3
  size 1860343075
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:4be865411d76ad8f525d9ece3a87c2c06b0341932fc9e119cb01a8afeb941ed8
3
  size 1860343075
onnx/decoder_model_merged.onnx CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:9a161e2bee0787bcb8df55a3f176f490efe50e8835fc7b423b82063d403a36d6
3
  size 1865017920
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:668cb0d35bb6fd44519d05ad05e953666dc5703d574574153882111027ec9f88
3
  size 1865017920
onnx/decoder_model_merged_quantized.onnx CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:fe603e536c2590223d4fec6e18a94809273cb9998f79db2e623780492891b2d1
3
  size 475956446
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:1f707db716fe11a2dcf2d8e084bb2343e4450f966a1da00f2b0d6a59d66c3d0c
3
  size 475956446
onnx/decoder_model_quantized.onnx CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:c21aad0d0fe5553c79190831a1be55427806f67d96a046019157ffd89f9ab77c
3
  size 471009755
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:8a9106731d4c4b0deb21d03b2d5d109c167b3fa1382ab9f822fad58a06902fe4
3
  size 471009755
onnx/decoder_with_past_model.onnx CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:64a5bbc8d54b0f27f94bc71b899264b30e629adf08c8fcff1cbd9e40827c731f
3
  size 1759501391
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:480ee872f48888fd4e96d3e1e1c29bd8207671873e8d718a32df652e51784876
3
  size 1759501391
onnx/decoder_with_past_model_quantized.onnx CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:d450287dbd54564a1d3260f4dd583619dd2b60943838d8b1d59e27f81bc5b26a
3
  size 445490297
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:323716580265b50aa823d76fb55b69a308726d0e56d6547a74aa6f336e3ea971
3
  size 445490297
onnx/encoder_model.onnx CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:26898af1244d73c2b25481534c44cb5bdc566d04e093525b11ee57eb085a360b
3
  size 1133698852
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:2aaf1612c2738dec7de0a12d3f5ebb1168a38b735a33db5be7719dd669750544
3
  size 1133698852
onnx/encoder_model_quantized.onnx CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:dabfae22dc44c7c5cfbdcd2d85fd7daebf7dc415255645a3454c8554e8fdd137
3
  size 287856370
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:13a94e354a9140764eb81102d77d3ec6952d796e6f113c651eeb3c3443da0386
3
  size 287856370
quantize_config.json CHANGED
@@ -2,132 +2,132 @@
2
  "per_channel": true,
3
  "reduce_range": true,
4
  "per_model_config": {
5
- "encoder_model": {
6
  "op_types": [
 
 
 
 
7
  "Shape",
8
- "Pow",
 
 
 
 
 
 
 
 
 
9
  "CumSum",
10
- "ConstantOfShape",
11
  "Expand",
 
 
 
 
 
 
 
 
12
  "Div",
 
 
 
 
 
 
 
 
 
13
  "MatMul",
14
- "Sqrt",
15
  "Mul",
16
- "Sub",
 
 
17
  "ReduceMean",
18
- "Equal",
 
 
 
 
 
19
  "Gather",
20
- "Softmax",
21
  "Not",
22
- "Add",
23
- "Constant",
24
- "Identity",
25
- "Cast",
26
  "Concat",
27
  "Unsqueeze",
28
- "Transpose",
29
- "Relu",
30
- "Reshape",
31
- "Where"
32
  ],
33
  "weight_type": "QInt8"
34
  },
35
  "decoder_model_merged": {
36
  "op_types": [
37
- "Shape",
38
- "Pow",
39
- "ConstantOfShape",
40
- "Expand",
41
- "CumSum",
42
- "Div",
43
  "MatMul",
44
- "Sqrt",
 
45
  "Mul",
46
- "Less",
47
- "Sub",
 
 
 
48
  "ReduceMean",
49
- "If",
50
- "Equal",
 
 
 
 
51
  "Gather",
52
- "Softmax",
53
  "Slice",
54
- "Range",
55
  "Not",
56
- "Add",
57
- "Constant",
58
- "Identity",
59
- "Cast",
60
  "Concat",
61
  "Unsqueeze",
62
- "Transpose",
63
- "Relu",
64
- "Squeeze",
65
- "Reshape",
66
- "Where"
67
  ],
68
  "weight_type": "QInt8"
69
  },
70
  "decoder_with_past_model": {
71
  "op_types": [
72
- "Shape",
73
- "Pow",
74
- "ConstantOfShape",
75
- "Expand",
76
- "CumSum",
77
- "Div",
78
- "MatMul",
79
- "Sqrt",
80
- "Mul",
81
- "Sub",
82
- "ReduceMean",
83
  "Equal",
84
- "Gather",
85
  "Softmax",
86
- "Not",
87
- "Add",
 
88
  "Constant",
89
- "Identity",
90
- "Cast",
91
- "Concat",
92
- "Unsqueeze",
93
  "Transpose",
 
 
 
94
  "Relu",
95
  "Reshape",
96
- "Where"
97
- ],
98
- "weight_type": "QInt8"
99
- },
100
- "decoder_model": {
101
- "op_types": [
102
- "Shape",
103
- "Pow",
104
- "ConstantOfShape",
105
- "Expand",
106
  "CumSum",
107
- "Div",
108
- "MatMul",
109
- "Sqrt",
110
- "Mul",
111
- "Less",
112
  "Sub",
113
- "ReduceMean",
114
- "Equal",
115
  "Gather",
116
- "Softmax",
117
- "Slice",
118
- "Range",
119
  "Not",
120
- "Add",
121
- "Constant",
122
- "Identity",
123
- "Cast",
124
  "Concat",
125
  "Unsqueeze",
126
- "Transpose",
127
- "Relu",
128
- "Squeeze",
129
- "Reshape",
130
- "Where"
131
  ],
132
  "weight_type": "QInt8"
133
  }
 
2
  "per_channel": true,
3
  "reduce_range": true,
4
  "per_model_config": {
5
+ "decoder_model": {
6
  "op_types": [
7
+ "Equal",
8
+ "Softmax",
9
+ "Less",
10
+ "MatMul",
11
  "Shape",
12
+ "Mul",
13
+ "Constant",
14
+ "Squeeze",
15
+ "Transpose",
16
+ "Range",
17
+ "Identity",
18
+ "ReduceMean",
19
+ "Add",
20
+ "Relu",
21
+ "Reshape",
22
  "CumSum",
23
+ "Sub",
24
  "Expand",
25
+ "Gather",
26
+ "Where",
27
+ "Slice",
28
+ "Not",
29
+ "Sqrt",
30
+ "Pow",
31
+ "Concat",
32
+ "Unsqueeze",
33
  "Div",
34
+ "Cast",
35
+ "ConstantOfShape"
36
+ ],
37
+ "weight_type": "QInt8"
38
+ },
39
+ "encoder_model": {
40
+ "op_types": [
41
+ "Equal",
42
+ "Softmax",
43
  "MatMul",
44
+ "Shape",
45
  "Mul",
46
+ "Constant",
47
+ "Transpose",
48
+ "Identity",
49
  "ReduceMean",
50
+ "Add",
51
+ "Relu",
52
+ "Reshape",
53
+ "CumSum",
54
+ "Sub",
55
+ "Expand",
56
  "Gather",
57
+ "Where",
58
  "Not",
59
+ "Sqrt",
60
+ "Pow",
 
 
61
  "Concat",
62
  "Unsqueeze",
63
+ "Div",
64
+ "Cast",
65
+ "ConstantOfShape"
 
66
  ],
67
  "weight_type": "QInt8"
68
  },
69
  "decoder_model_merged": {
70
  "op_types": [
71
+ "Equal",
72
+ "Softmax",
73
+ "Less",
 
 
 
74
  "MatMul",
75
+ "If",
76
+ "Shape",
77
  "Mul",
78
+ "Constant",
79
+ "Squeeze",
80
+ "Transpose",
81
+ "Range",
82
+ "Identity",
83
  "ReduceMean",
84
+ "Add",
85
+ "Relu",
86
+ "Reshape",
87
+ "CumSum",
88
+ "Sub",
89
+ "Expand",
90
  "Gather",
91
+ "Where",
92
  "Slice",
 
93
  "Not",
94
+ "Sqrt",
95
+ "Pow",
 
 
96
  "Concat",
97
  "Unsqueeze",
98
+ "Div",
99
+ "Cast",
100
+ "ConstantOfShape"
 
 
101
  ],
102
  "weight_type": "QInt8"
103
  },
104
  "decoder_with_past_model": {
105
  "op_types": [
 
 
 
 
 
 
 
 
 
 
 
106
  "Equal",
 
107
  "Softmax",
108
+ "MatMul",
109
+ "Shape",
110
+ "Mul",
111
  "Constant",
 
 
 
 
112
  "Transpose",
113
+ "Identity",
114
+ "ReduceMean",
115
+ "Add",
116
  "Relu",
117
  "Reshape",
 
 
 
 
 
 
 
 
 
 
118
  "CumSum",
 
 
 
 
 
119
  "Sub",
120
+ "Expand",
 
121
  "Gather",
122
+ "Where",
 
 
123
  "Not",
124
+ "Sqrt",
125
+ "Pow",
 
 
126
  "Concat",
127
  "Unsqueeze",
128
+ "Div",
129
+ "Cast",
130
+ "ConstantOfShape"
 
 
131
  ],
132
  "weight_type": "QInt8"
133
  }
tokenizer_config.json CHANGED
@@ -113,5 +113,6 @@
113
  "src_lang": null,
114
  "tgt_lang": null,
115
  "tokenizer_class": "M2M100Tokenizer",
 
116
  "unk_token": "<unk>"
117
  }
 
113
  "src_lang": null,
114
  "tgt_lang": null,
115
  "tokenizer_class": "M2M100Tokenizer",
116
+ "tokenizer_file": null,
117
  "unk_token": "<unk>"
118
  }