Xenova HF staff commited on
Commit
ea2b037
1 Parent(s): efb7392

Upload folder using huggingface_hub

Browse files
config.json ADDED
@@ -0,0 +1,29 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "_name_or_path": "AmelieSchreiber/esm2_t6_8M_UR50D_rna_binding_site_predictor",
3
+ "architectures": [
4
+ "EsmForTokenClassification"
5
+ ],
6
+ "attention_probs_dropout_prob": 0.0,
7
+ "classifier_dropout": null,
8
+ "emb_layer_norm_before": false,
9
+ "esmfold_config": null,
10
+ "hidden_act": "gelu",
11
+ "hidden_dropout_prob": 0.0,
12
+ "hidden_size": 320,
13
+ "initializer_range": 0.02,
14
+ "intermediate_size": 1280,
15
+ "is_folding_model": false,
16
+ "layer_norm_eps": 1e-05,
17
+ "mask_token_id": 32,
18
+ "max_position_embeddings": 1026,
19
+ "model_type": "esm",
20
+ "num_attention_heads": 20,
21
+ "num_hidden_layers": 6,
22
+ "pad_token_id": 1,
23
+ "position_embedding_type": "rotary",
24
+ "token_dropout": true,
25
+ "transformers_version": "4.36.0.dev0",
26
+ "use_cache": true,
27
+ "vocab_list": null,
28
+ "vocab_size": 33
29
+ }
onnx/model.onnx ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:d2cc8fb7b7e648b73a22912811adfacd5c2522cef82a5e5ae88ea7a27bb4f682
3
+ size 29885719
onnx/model_quantized.onnx ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:14606ae2355d99c8df32d67484cbc93e7a46c69b2a97a7c68a87771c798625ad
3
+ size 7950761
quantize_config.json ADDED
@@ -0,0 +1,39 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "per_channel": true,
3
+ "reduce_range": true,
4
+ "per_model_config": {
5
+ "model": {
6
+ "op_types": [
7
+ "Shape",
8
+ "Slice",
9
+ "Einsum",
10
+ "Range",
11
+ "MatMul",
12
+ "ReduceMean",
13
+ "Erf",
14
+ "Neg",
15
+ "Sqrt",
16
+ "Constant",
17
+ "Cos",
18
+ "Reshape",
19
+ "Softmax",
20
+ "Div",
21
+ "Sin",
22
+ "ReduceSum",
23
+ "Concat",
24
+ "Add",
25
+ "Where",
26
+ "Unsqueeze",
27
+ "Pow",
28
+ "Identity",
29
+ "Cast",
30
+ "Sub",
31
+ "Gather",
32
+ "Mul",
33
+ "Transpose",
34
+ "Equal"
35
+ ],
36
+ "weight_type": "QInt8"
37
+ }
38
+ }
39
+ }
tokenizer.json ADDED
@@ -0,0 +1,407 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "version": "1.0",
3
+ "truncation": null,
4
+ "padding": null,
5
+ "added_tokens": [
6
+ {
7
+ "id": 0,
8
+ "content": "<cls>",
9
+ "single_word": false,
10
+ "lstrip": false,
11
+ "rstrip": false,
12
+ "normalized": false,
13
+ "special": true
14
+ },
15
+ {
16
+ "id": 1,
17
+ "content": "<pad>",
18
+ "single_word": false,
19
+ "lstrip": false,
20
+ "rstrip": false,
21
+ "normalized": false,
22
+ "special": true
23
+ },
24
+ {
25
+ "id": 2,
26
+ "content": "<eos>",
27
+ "single_word": false,
28
+ "lstrip": false,
29
+ "rstrip": false,
30
+ "normalized": false,
31
+ "special": true
32
+ },
33
+ {
34
+ "id": 3,
35
+ "content": "<unk>",
36
+ "single_word": false,
37
+ "lstrip": false,
38
+ "rstrip": false,
39
+ "normalized": false,
40
+ "special": true
41
+ },
42
+ {
43
+ "id": 4,
44
+ "content": "L",
45
+ "single_word": false,
46
+ "lstrip": false,
47
+ "rstrip": false,
48
+ "normalized": true,
49
+ "special": false
50
+ },
51
+ {
52
+ "id": 5,
53
+ "content": "A",
54
+ "single_word": false,
55
+ "lstrip": false,
56
+ "rstrip": false,
57
+ "normalized": true,
58
+ "special": false
59
+ },
60
+ {
61
+ "id": 6,
62
+ "content": "G",
63
+ "single_word": false,
64
+ "lstrip": false,
65
+ "rstrip": false,
66
+ "normalized": true,
67
+ "special": false
68
+ },
69
+ {
70
+ "id": 7,
71
+ "content": "V",
72
+ "single_word": false,
73
+ "lstrip": false,
74
+ "rstrip": false,
75
+ "normalized": true,
76
+ "special": false
77
+ },
78
+ {
79
+ "id": 8,
80
+ "content": "S",
81
+ "single_word": false,
82
+ "lstrip": false,
83
+ "rstrip": false,
84
+ "normalized": true,
85
+ "special": false
86
+ },
87
+ {
88
+ "id": 9,
89
+ "content": "E",
90
+ "single_word": false,
91
+ "lstrip": false,
92
+ "rstrip": false,
93
+ "normalized": true,
94
+ "special": false
95
+ },
96
+ {
97
+ "id": 10,
98
+ "content": "R",
99
+ "single_word": false,
100
+ "lstrip": false,
101
+ "rstrip": false,
102
+ "normalized": true,
103
+ "special": false
104
+ },
105
+ {
106
+ "id": 11,
107
+ "content": "T",
108
+ "single_word": false,
109
+ "lstrip": false,
110
+ "rstrip": false,
111
+ "normalized": true,
112
+ "special": false
113
+ },
114
+ {
115
+ "id": 12,
116
+ "content": "I",
117
+ "single_word": false,
118
+ "lstrip": false,
119
+ "rstrip": false,
120
+ "normalized": true,
121
+ "special": false
122
+ },
123
+ {
124
+ "id": 13,
125
+ "content": "D",
126
+ "single_word": false,
127
+ "lstrip": false,
128
+ "rstrip": false,
129
+ "normalized": true,
130
+ "special": false
131
+ },
132
+ {
133
+ "id": 14,
134
+ "content": "P",
135
+ "single_word": false,
136
+ "lstrip": false,
137
+ "rstrip": false,
138
+ "normalized": true,
139
+ "special": false
140
+ },
141
+ {
142
+ "id": 15,
143
+ "content": "K",
144
+ "single_word": false,
145
+ "lstrip": false,
146
+ "rstrip": false,
147
+ "normalized": true,
148
+ "special": false
149
+ },
150
+ {
151
+ "id": 16,
152
+ "content": "Q",
153
+ "single_word": false,
154
+ "lstrip": false,
155
+ "rstrip": false,
156
+ "normalized": true,
157
+ "special": false
158
+ },
159
+ {
160
+ "id": 17,
161
+ "content": "N",
162
+ "single_word": false,
163
+ "lstrip": false,
164
+ "rstrip": false,
165
+ "normalized": true,
166
+ "special": false
167
+ },
168
+ {
169
+ "id": 18,
170
+ "content": "F",
171
+ "single_word": false,
172
+ "lstrip": false,
173
+ "rstrip": false,
174
+ "normalized": true,
175
+ "special": false
176
+ },
177
+ {
178
+ "id": 19,
179
+ "content": "Y",
180
+ "single_word": false,
181
+ "lstrip": false,
182
+ "rstrip": false,
183
+ "normalized": true,
184
+ "special": false
185
+ },
186
+ {
187
+ "id": 20,
188
+ "content": "M",
189
+ "single_word": false,
190
+ "lstrip": false,
191
+ "rstrip": false,
192
+ "normalized": true,
193
+ "special": false
194
+ },
195
+ {
196
+ "id": 21,
197
+ "content": "H",
198
+ "single_word": false,
199
+ "lstrip": false,
200
+ "rstrip": false,
201
+ "normalized": true,
202
+ "special": false
203
+ },
204
+ {
205
+ "id": 22,
206
+ "content": "W",
207
+ "single_word": false,
208
+ "lstrip": false,
209
+ "rstrip": false,
210
+ "normalized": true,
211
+ "special": false
212
+ },
213
+ {
214
+ "id": 23,
215
+ "content": "C",
216
+ "single_word": false,
217
+ "lstrip": false,
218
+ "rstrip": false,
219
+ "normalized": true,
220
+ "special": false
221
+ },
222
+ {
223
+ "id": 24,
224
+ "content": "X",
225
+ "single_word": false,
226
+ "lstrip": false,
227
+ "rstrip": false,
228
+ "normalized": true,
229
+ "special": false
230
+ },
231
+ {
232
+ "id": 25,
233
+ "content": "B",
234
+ "single_word": false,
235
+ "lstrip": false,
236
+ "rstrip": false,
237
+ "normalized": true,
238
+ "special": false
239
+ },
240
+ {
241
+ "id": 26,
242
+ "content": "U",
243
+ "single_word": false,
244
+ "lstrip": false,
245
+ "rstrip": false,
246
+ "normalized": true,
247
+ "special": false
248
+ },
249
+ {
250
+ "id": 27,
251
+ "content": "Z",
252
+ "single_word": false,
253
+ "lstrip": false,
254
+ "rstrip": false,
255
+ "normalized": true,
256
+ "special": false
257
+ },
258
+ {
259
+ "id": 28,
260
+ "content": "O",
261
+ "single_word": false,
262
+ "lstrip": false,
263
+ "rstrip": false,
264
+ "normalized": true,
265
+ "special": false
266
+ },
267
+ {
268
+ "id": 29,
269
+ "content": ".",
270
+ "single_word": false,
271
+ "lstrip": false,
272
+ "rstrip": false,
273
+ "normalized": true,
274
+ "special": false
275
+ },
276
+ {
277
+ "id": 30,
278
+ "content": "-",
279
+ "single_word": false,
280
+ "lstrip": false,
281
+ "rstrip": false,
282
+ "normalized": true,
283
+ "special": false
284
+ },
285
+ {
286
+ "id": 31,
287
+ "content": "<null_1>",
288
+ "single_word": false,
289
+ "lstrip": false,
290
+ "rstrip": false,
291
+ "normalized": true,
292
+ "special": false
293
+ },
294
+ {
295
+ "id": 32,
296
+ "content": "<mask>",
297
+ "single_word": false,
298
+ "lstrip": false,
299
+ "rstrip": false,
300
+ "normalized": true,
301
+ "special": false
302
+ }
303
+ ],
304
+ "normalizer": null,
305
+ "pre_tokenizer": {
306
+ "type": "WhitespaceSplit"
307
+ },
308
+ "post_processor": {
309
+ "type": "TemplateProcessing",
310
+ "single": [
311
+ {
312
+ "SpecialToken": {
313
+ "id": "<cls>",
314
+ "type_id": 0
315
+ }
316
+ },
317
+ {
318
+ "Sequence": {
319
+ "id": "A",
320
+ "type_id": 0
321
+ }
322
+ },
323
+ {
324
+ "SpecialToken": {
325
+ "id": "<eos>",
326
+ "type_id": 0
327
+ }
328
+ }
329
+ ],
330
+ "pair": [
331
+ {
332
+ "Sequence": {
333
+ "id": "A",
334
+ "type_id": 0
335
+ }
336
+ },
337
+ {
338
+ "Sequence": {
339
+ "id": "B",
340
+ "type_id": 1
341
+ }
342
+ }
343
+ ],
344
+ "special_tokens": {
345
+ "<cls>": {
346
+ "id": "<cls>",
347
+ "ids": [
348
+ 0
349
+ ],
350
+ "tokens": [
351
+ "<cls>"
352
+ ]
353
+ },
354
+ "<eos>": {
355
+ "id": "<eos>",
356
+ "ids": [
357
+ 2
358
+ ],
359
+ "tokens": [
360
+ "<eos>"
361
+ ]
362
+ }
363
+ }
364
+ },
365
+ "decoder": null,
366
+ "model": {
367
+ "type": "WordPiece",
368
+ "unk_token": "<unk>",
369
+ "continuing_subword_prefix": "",
370
+ "max_input_chars_per_word": 10000000000,
371
+ "vocab": {
372
+ "<cls>": 0,
373
+ "<pad>": 1,
374
+ "<eos>": 2,
375
+ "<unk>": 3,
376
+ "L": 4,
377
+ "A": 5,
378
+ "G": 6,
379
+ "V": 7,
380
+ "S": 8,
381
+ "E": 9,
382
+ "R": 10,
383
+ "T": 11,
384
+ "I": 12,
385
+ "D": 13,
386
+ "P": 14,
387
+ "K": 15,
388
+ "Q": 16,
389
+ "N": 17,
390
+ "F": 18,
391
+ "Y": 19,
392
+ "M": 20,
393
+ "H": 21,
394
+ "W": 22,
395
+ "C": 23,
396
+ "X": 24,
397
+ "B": 25,
398
+ "U": 26,
399
+ "Z": 27,
400
+ "O": 28,
401
+ ".": 29,
402
+ "-": 30,
403
+ "<null_1>": 31,
404
+ "<mask>": 32
405
+ }
406
+ }
407
+ }