maximedb commited on
Commit
2f341ce
1 Parent(s): 030391b

Add new SentenceTransformer model.

Browse files
.gitattributes CHANGED
@@ -25,3 +25,6 @@ saved_model/**/* filter=lfs diff=lfs merge=lfs -text
25
  *.zip filter=lfs diff=lfs merge=lfs -text
26
  *.zstandard filter=lfs diff=lfs merge=lfs -text
27
  *tfevents* filter=lfs diff=lfs merge=lfs -text
 
 
 
 
25
  *.zip filter=lfs diff=lfs merge=lfs -text
26
  *.zstandard filter=lfs diff=lfs merge=lfs -text
27
  *tfevents* filter=lfs diff=lfs merge=lfs -text
28
+ tokenizer.json filter=lfs diff=lfs merge=lfs -text
29
+ pytorch_model.bin filter=lfs diff=lfs merge=lfs -text
30
+ .git/lfs/objects/32/65/32656706e38c4ffb554cd11524a935b5ed31b6685a661db4d802d062a2e34c3c filter=lfs diff=lfs merge=lfs -text
1_Pooling/config.json ADDED
@@ -0,0 +1,7 @@
 
 
 
 
 
 
 
 
1
+ {
2
+ "word_embedding_dimension": 768,
3
+ "pooling_mode_cls_token": false,
4
+ "pooling_mode_mean_tokens": true,
5
+ "pooling_mode_max_tokens": false,
6
+ "pooling_mode_mean_sqrt_len_tokens": false
7
+ }
README.md ADDED
@@ -0,0 +1,93 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ ---
2
+ pipeline_tag: sentence-similarity
3
+ tags:
4
+ - sentence-transformers
5
+ - feature-extraction
6
+ - sentence-similarity
7
+ - transformers
8
+ ---
9
+
10
+ # clips/mfaq
11
+
12
+ This is a [sentence-transformers](https://www.SBERT.net) model: It maps sentences & paragraphs to a 768 dimensional dense vector space and can be used for tasks like clustering or semantic search.
13
+
14
+ <!--- Describe your model here -->
15
+
16
+ ## Usage (Sentence-Transformers)
17
+
18
+ Using this model becomes easy when you have [sentence-transformers](https://www.SBERT.net) installed:
19
+
20
+ ```
21
+ pip install -U sentence-transformers
22
+ ```
23
+
24
+ Then you can use the model like this:
25
+
26
+ ```python
27
+ from sentence_transformers import SentenceTransformer
28
+ sentences = ["This is an example sentence", "Each sentence is converted"]
29
+
30
+ model = SentenceTransformer('clips/mfaq')
31
+ embeddings = model.encode(sentences)
32
+ print(embeddings)
33
+ ```
34
+
35
+
36
+
37
+ ## Usage (HuggingFace Transformers)
38
+ Without [sentence-transformers](https://www.SBERT.net), you can use the model like this: First, you pass your input through the transformer model, then you have to apply the right pooling-operation on-top of the contextualized word embeddings.
39
+
40
+ ```python
41
+ from transformers import AutoTokenizer, AutoModel
42
+ import torch
43
+
44
+
45
+ #Mean Pooling - Take attention mask into account for correct averaging
46
+ def mean_pooling(model_output, attention_mask):
47
+ token_embeddings = model_output[0] #First element of model_output contains all token embeddings
48
+ input_mask_expanded = attention_mask.unsqueeze(-1).expand(token_embeddings.size()).float()
49
+ return torch.sum(token_embeddings * input_mask_expanded, 1) / torch.clamp(input_mask_expanded.sum(1), min=1e-9)
50
+
51
+
52
+ # Sentences we want sentence embeddings for
53
+ sentences = ['This is an example sentence', 'Each sentence is converted']
54
+
55
+ # Load model from HuggingFace Hub
56
+ tokenizer = AutoTokenizer.from_pretrained('clips/mfaq')
57
+ model = AutoModel.from_pretrained('clips/mfaq')
58
+
59
+ # Tokenize sentences
60
+ encoded_input = tokenizer(sentences, padding=True, truncation=True, return_tensors='pt')
61
+
62
+ # Compute token embeddings
63
+ with torch.no_grad():
64
+ model_output = model(**encoded_input)
65
+
66
+ # Perform pooling. In this case, max pooling.
67
+ sentence_embeddings = mean_pooling(model_output, encoded_input['attention_mask'])
68
+
69
+ print("Sentence embeddings:")
70
+ print(sentence_embeddings)
71
+ ```
72
+
73
+
74
+
75
+ ## Evaluation Results
76
+
77
+ <!--- Describe how your model was evaluated -->
78
+
79
+ For an automated evaluation of this model, see the *Sentence Embeddings Benchmark*: [https://seb.sbert.net](https://seb.sbert.net?model_name=clips/mfaq)
80
+
81
+
82
+
83
+ ## Full Model Architecture
84
+ ```
85
+ SentenceTransformer(
86
+ (0): Transformer({'max_seq_length': 128, 'do_lower_case': False}) with Transformer model: XLMRobertaModel
87
+ (1): Pooling({'word_embedding_dimension': 768, 'pooling_mode_cls_token': False, 'pooling_mode_mean_tokens': True, 'pooling_mode_max_tokens': False, 'pooling_mode_mean_sqrt_len_tokens': False})
88
+ )
89
+ ```
90
+
91
+ ## Citing & Authors
92
+
93
+ <!--- Describe where people can find more information -->
added_tokens.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"<Q>": 250002, "<link>": 250004, "<A>": 250003}
config.json CHANGED
@@ -1,1045 +1,18 @@
1
  {
2
- "_name_or_path": "xlm-roberta-base",
3
  "architectures": [
4
- "XLMRobertaForSequenceClassification"
5
  ],
6
  "attention_probs_dropout_prob": 0.1,
7
  "bos_token_id": 0,
 
8
  "eos_token_id": 2,
9
  "gradient_checkpointing": true,
10
  "hidden_act": "gelu",
11
  "hidden_dropout_prob": 0.25,
12
  "hidden_size": 768,
13
- "id2label": {
14
- "0": "LABEL_0",
15
- "1": "LABEL_1",
16
- "2": "LABEL_2",
17
- "3": "LABEL_3",
18
- "4": "LABEL_4",
19
- "5": "LABEL_5",
20
- "6": "LABEL_6",
21
- "7": "LABEL_7",
22
- "8": "LABEL_8",
23
- "9": "LABEL_9",
24
- "10": "LABEL_10",
25
- "11": "LABEL_11",
26
- "12": "LABEL_12",
27
- "13": "LABEL_13",
28
- "14": "LABEL_14",
29
- "15": "LABEL_15",
30
- "16": "LABEL_16",
31
- "17": "LABEL_17",
32
- "18": "LABEL_18",
33
- "19": "LABEL_19",
34
- "20": "LABEL_20",
35
- "21": "LABEL_21",
36
- "22": "LABEL_22",
37
- "23": "LABEL_23",
38
- "24": "LABEL_24",
39
- "25": "LABEL_25",
40
- "26": "LABEL_26",
41
- "27": "LABEL_27",
42
- "28": "LABEL_28",
43
- "29": "LABEL_29",
44
- "30": "LABEL_30",
45
- "31": "LABEL_31",
46
- "32": "LABEL_32",
47
- "33": "LABEL_33",
48
- "34": "LABEL_34",
49
- "35": "LABEL_35",
50
- "36": "LABEL_36",
51
- "37": "LABEL_37",
52
- "38": "LABEL_38",
53
- "39": "LABEL_39",
54
- "40": "LABEL_40",
55
- "41": "LABEL_41",
56
- "42": "LABEL_42",
57
- "43": "LABEL_43",
58
- "44": "LABEL_44",
59
- "45": "LABEL_45",
60
- "46": "LABEL_46",
61
- "47": "LABEL_47",
62
- "48": "LABEL_48",
63
- "49": "LABEL_49",
64
- "50": "LABEL_50",
65
- "51": "LABEL_51",
66
- "52": "LABEL_52",
67
- "53": "LABEL_53",
68
- "54": "LABEL_54",
69
- "55": "LABEL_55",
70
- "56": "LABEL_56",
71
- "57": "LABEL_57",
72
- "58": "LABEL_58",
73
- "59": "LABEL_59",
74
- "60": "LABEL_60",
75
- "61": "LABEL_61",
76
- "62": "LABEL_62",
77
- "63": "LABEL_63",
78
- "64": "LABEL_64",
79
- "65": "LABEL_65",
80
- "66": "LABEL_66",
81
- "67": "LABEL_67",
82
- "68": "LABEL_68",
83
- "69": "LABEL_69",
84
- "70": "LABEL_70",
85
- "71": "LABEL_71",
86
- "72": "LABEL_72",
87
- "73": "LABEL_73",
88
- "74": "LABEL_74",
89
- "75": "LABEL_75",
90
- "76": "LABEL_76",
91
- "77": "LABEL_77",
92
- "78": "LABEL_78",
93
- "79": "LABEL_79",
94
- "80": "LABEL_80",
95
- "81": "LABEL_81",
96
- "82": "LABEL_82",
97
- "83": "LABEL_83",
98
- "84": "LABEL_84",
99
- "85": "LABEL_85",
100
- "86": "LABEL_86",
101
- "87": "LABEL_87",
102
- "88": "LABEL_88",
103
- "89": "LABEL_89",
104
- "90": "LABEL_90",
105
- "91": "LABEL_91",
106
- "92": "LABEL_92",
107
- "93": "LABEL_93",
108
- "94": "LABEL_94",
109
- "95": "LABEL_95",
110
- "96": "LABEL_96",
111
- "97": "LABEL_97",
112
- "98": "LABEL_98",
113
- "99": "LABEL_99",
114
- "100": "LABEL_100",
115
- "101": "LABEL_101",
116
- "102": "LABEL_102",
117
- "103": "LABEL_103",
118
- "104": "LABEL_104",
119
- "105": "LABEL_105",
120
- "106": "LABEL_106",
121
- "107": "LABEL_107",
122
- "108": "LABEL_108",
123
- "109": "LABEL_109",
124
- "110": "LABEL_110",
125
- "111": "LABEL_111",
126
- "112": "LABEL_112",
127
- "113": "LABEL_113",
128
- "114": "LABEL_114",
129
- "115": "LABEL_115",
130
- "116": "LABEL_116",
131
- "117": "LABEL_117",
132
- "118": "LABEL_118",
133
- "119": "LABEL_119",
134
- "120": "LABEL_120",
135
- "121": "LABEL_121",
136
- "122": "LABEL_122",
137
- "123": "LABEL_123",
138
- "124": "LABEL_124",
139
- "125": "LABEL_125",
140
- "126": "LABEL_126",
141
- "127": "LABEL_127",
142
- "128": "LABEL_128",
143
- "129": "LABEL_129",
144
- "130": "LABEL_130",
145
- "131": "LABEL_131",
146
- "132": "LABEL_132",
147
- "133": "LABEL_133",
148
- "134": "LABEL_134",
149
- "135": "LABEL_135",
150
- "136": "LABEL_136",
151
- "137": "LABEL_137",
152
- "138": "LABEL_138",
153
- "139": "LABEL_139",
154
- "140": "LABEL_140",
155
- "141": "LABEL_141",
156
- "142": "LABEL_142",
157
- "143": "LABEL_143",
158
- "144": "LABEL_144",
159
- "145": "LABEL_145",
160
- "146": "LABEL_146",
161
- "147": "LABEL_147",
162
- "148": "LABEL_148",
163
- "149": "LABEL_149",
164
- "150": "LABEL_150",
165
- "151": "LABEL_151",
166
- "152": "LABEL_152",
167
- "153": "LABEL_153",
168
- "154": "LABEL_154",
169
- "155": "LABEL_155",
170
- "156": "LABEL_156",
171
- "157": "LABEL_157",
172
- "158": "LABEL_158",
173
- "159": "LABEL_159",
174
- "160": "LABEL_160",
175
- "161": "LABEL_161",
176
- "162": "LABEL_162",
177
- "163": "LABEL_163",
178
- "164": "LABEL_164",
179
- "165": "LABEL_165",
180
- "166": "LABEL_166",
181
- "167": "LABEL_167",
182
- "168": "LABEL_168",
183
- "169": "LABEL_169",
184
- "170": "LABEL_170",
185
- "171": "LABEL_171",
186
- "172": "LABEL_172",
187
- "173": "LABEL_173",
188
- "174": "LABEL_174",
189
- "175": "LABEL_175",
190
- "176": "LABEL_176",
191
- "177": "LABEL_177",
192
- "178": "LABEL_178",
193
- "179": "LABEL_179",
194
- "180": "LABEL_180",
195
- "181": "LABEL_181",
196
- "182": "LABEL_182",
197
- "183": "LABEL_183",
198
- "184": "LABEL_184",
199
- "185": "LABEL_185",
200
- "186": "LABEL_186",
201
- "187": "LABEL_187",
202
- "188": "LABEL_188",
203
- "189": "LABEL_189",
204
- "190": "LABEL_190",
205
- "191": "LABEL_191",
206
- "192": "LABEL_192",
207
- "193": "LABEL_193",
208
- "194": "LABEL_194",
209
- "195": "LABEL_195",
210
- "196": "LABEL_196",
211
- "197": "LABEL_197",
212
- "198": "LABEL_198",
213
- "199": "LABEL_199",
214
- "200": "LABEL_200",
215
- "201": "LABEL_201",
216
- "202": "LABEL_202",
217
- "203": "LABEL_203",
218
- "204": "LABEL_204",
219
- "205": "LABEL_205",
220
- "206": "LABEL_206",
221
- "207": "LABEL_207",
222
- "208": "LABEL_208",
223
- "209": "LABEL_209",
224
- "210": "LABEL_210",
225
- "211": "LABEL_211",
226
- "212": "LABEL_212",
227
- "213": "LABEL_213",
228
- "214": "LABEL_214",
229
- "215": "LABEL_215",
230
- "216": "LABEL_216",
231
- "217": "LABEL_217",
232
- "218": "LABEL_218",
233
- "219": "LABEL_219",
234
- "220": "LABEL_220",
235
- "221": "LABEL_221",
236
- "222": "LABEL_222",
237
- "223": "LABEL_223",
238
- "224": "LABEL_224",
239
- "225": "LABEL_225",
240
- "226": "LABEL_226",
241
- "227": "LABEL_227",
242
- "228": "LABEL_228",
243
- "229": "LABEL_229",
244
- "230": "LABEL_230",
245
- "231": "LABEL_231",
246
- "232": "LABEL_232",
247
- "233": "LABEL_233",
248
- "234": "LABEL_234",
249
- "235": "LABEL_235",
250
- "236": "LABEL_236",
251
- "237": "LABEL_237",
252
- "238": "LABEL_238",
253
- "239": "LABEL_239",
254
- "240": "LABEL_240",
255
- "241": "LABEL_241",
256
- "242": "LABEL_242",
257
- "243": "LABEL_243",
258
- "244": "LABEL_244",
259
- "245": "LABEL_245",
260
- "246": "LABEL_246",
261
- "247": "LABEL_247",
262
- "248": "LABEL_248",
263
- "249": "LABEL_249",
264
- "250": "LABEL_250",
265
- "251": "LABEL_251",
266
- "252": "LABEL_252",
267
- "253": "LABEL_253",
268
- "254": "LABEL_254",
269
- "255": "LABEL_255",
270
- "256": "LABEL_256",
271
- "257": "LABEL_257",
272
- "258": "LABEL_258",
273
- "259": "LABEL_259",
274
- "260": "LABEL_260",
275
- "261": "LABEL_261",
276
- "262": "LABEL_262",
277
- "263": "LABEL_263",
278
- "264": "LABEL_264",
279
- "265": "LABEL_265",
280
- "266": "LABEL_266",
281
- "267": "LABEL_267",
282
- "268": "LABEL_268",
283
- "269": "LABEL_269",
284
- "270": "LABEL_270",
285
- "271": "LABEL_271",
286
- "272": "LABEL_272",
287
- "273": "LABEL_273",
288
- "274": "LABEL_274",
289
- "275": "LABEL_275",
290
- "276": "LABEL_276",
291
- "277": "LABEL_277",
292
- "278": "LABEL_278",
293
- "279": "LABEL_279",
294
- "280": "LABEL_280",
295
- "281": "LABEL_281",
296
- "282": "LABEL_282",
297
- "283": "LABEL_283",
298
- "284": "LABEL_284",
299
- "285": "LABEL_285",
300
- "286": "LABEL_286",
301
- "287": "LABEL_287",
302
- "288": "LABEL_288",
303
- "289": "LABEL_289",
304
- "290": "LABEL_290",
305
- "291": "LABEL_291",
306
- "292": "LABEL_292",
307
- "293": "LABEL_293",
308
- "294": "LABEL_294",
309
- "295": "LABEL_295",
310
- "296": "LABEL_296",
311
- "297": "LABEL_297",
312
- "298": "LABEL_298",
313
- "299": "LABEL_299",
314
- "300": "LABEL_300",
315
- "301": "LABEL_301",
316
- "302": "LABEL_302",
317
- "303": "LABEL_303",
318
- "304": "LABEL_304",
319
- "305": "LABEL_305",
320
- "306": "LABEL_306",
321
- "307": "LABEL_307",
322
- "308": "LABEL_308",
323
- "309": "LABEL_309",
324
- "310": "LABEL_310",
325
- "311": "LABEL_311",
326
- "312": "LABEL_312",
327
- "313": "LABEL_313",
328
- "314": "LABEL_314",
329
- "315": "LABEL_315",
330
- "316": "LABEL_316",
331
- "317": "LABEL_317",
332
- "318": "LABEL_318",
333
- "319": "LABEL_319",
334
- "320": "LABEL_320",
335
- "321": "LABEL_321",
336
- "322": "LABEL_322",
337
- "323": "LABEL_323",
338
- "324": "LABEL_324",
339
- "325": "LABEL_325",
340
- "326": "LABEL_326",
341
- "327": "LABEL_327",
342
- "328": "LABEL_328",
343
- "329": "LABEL_329",
344
- "330": "LABEL_330",
345
- "331": "LABEL_331",
346
- "332": "LABEL_332",
347
- "333": "LABEL_333",
348
- "334": "LABEL_334",
349
- "335": "LABEL_335",
350
- "336": "LABEL_336",
351
- "337": "LABEL_337",
352
- "338": "LABEL_338",
353
- "339": "LABEL_339",
354
- "340": "LABEL_340",
355
- "341": "LABEL_341",
356
- "342": "LABEL_342",
357
- "343": "LABEL_343",
358
- "344": "LABEL_344",
359
- "345": "LABEL_345",
360
- "346": "LABEL_346",
361
- "347": "LABEL_347",
362
- "348": "LABEL_348",
363
- "349": "LABEL_349",
364
- "350": "LABEL_350",
365
- "351": "LABEL_351",
366
- "352": "LABEL_352",
367
- "353": "LABEL_353",
368
- "354": "LABEL_354",
369
- "355": "LABEL_355",
370
- "356": "LABEL_356",
371
- "357": "LABEL_357",
372
- "358": "LABEL_358",
373
- "359": "LABEL_359",
374
- "360": "LABEL_360",
375
- "361": "LABEL_361",
376
- "362": "LABEL_362",
377
- "363": "LABEL_363",
378
- "364": "LABEL_364",
379
- "365": "LABEL_365",
380
- "366": "LABEL_366",
381
- "367": "LABEL_367",
382
- "368": "LABEL_368",
383
- "369": "LABEL_369",
384
- "370": "LABEL_370",
385
- "371": "LABEL_371",
386
- "372": "LABEL_372",
387
- "373": "LABEL_373",
388
- "374": "LABEL_374",
389
- "375": "LABEL_375",
390
- "376": "LABEL_376",
391
- "377": "LABEL_377",
392
- "378": "LABEL_378",
393
- "379": "LABEL_379",
394
- "380": "LABEL_380",
395
- "381": "LABEL_381",
396
- "382": "LABEL_382",
397
- "383": "LABEL_383",
398
- "384": "LABEL_384",
399
- "385": "LABEL_385",
400
- "386": "LABEL_386",
401
- "387": "LABEL_387",
402
- "388": "LABEL_388",
403
- "389": "LABEL_389",
404
- "390": "LABEL_390",
405
- "391": "LABEL_391",
406
- "392": "LABEL_392",
407
- "393": "LABEL_393",
408
- "394": "LABEL_394",
409
- "395": "LABEL_395",
410
- "396": "LABEL_396",
411
- "397": "LABEL_397",
412
- "398": "LABEL_398",
413
- "399": "LABEL_399",
414
- "400": "LABEL_400",
415
- "401": "LABEL_401",
416
- "402": "LABEL_402",
417
- "403": "LABEL_403",
418
- "404": "LABEL_404",
419
- "405": "LABEL_405",
420
- "406": "LABEL_406",
421
- "407": "LABEL_407",
422
- "408": "LABEL_408",
423
- "409": "LABEL_409",
424
- "410": "LABEL_410",
425
- "411": "LABEL_411",
426
- "412": "LABEL_412",
427
- "413": "LABEL_413",
428
- "414": "LABEL_414",
429
- "415": "LABEL_415",
430
- "416": "LABEL_416",
431
- "417": "LABEL_417",
432
- "418": "LABEL_418",
433
- "419": "LABEL_419",
434
- "420": "LABEL_420",
435
- "421": "LABEL_421",
436
- "422": "LABEL_422",
437
- "423": "LABEL_423",
438
- "424": "LABEL_424",
439
- "425": "LABEL_425",
440
- "426": "LABEL_426",
441
- "427": "LABEL_427",
442
- "428": "LABEL_428",
443
- "429": "LABEL_429",
444
- "430": "LABEL_430",
445
- "431": "LABEL_431",
446
- "432": "LABEL_432",
447
- "433": "LABEL_433",
448
- "434": "LABEL_434",
449
- "435": "LABEL_435",
450
- "436": "LABEL_436",
451
- "437": "LABEL_437",
452
- "438": "LABEL_438",
453
- "439": "LABEL_439",
454
- "440": "LABEL_440",
455
- "441": "LABEL_441",
456
- "442": "LABEL_442",
457
- "443": "LABEL_443",
458
- "444": "LABEL_444",
459
- "445": "LABEL_445",
460
- "446": "LABEL_446",
461
- "447": "LABEL_447",
462
- "448": "LABEL_448",
463
- "449": "LABEL_449",
464
- "450": "LABEL_450",
465
- "451": "LABEL_451",
466
- "452": "LABEL_452",
467
- "453": "LABEL_453",
468
- "454": "LABEL_454",
469
- "455": "LABEL_455",
470
- "456": "LABEL_456",
471
- "457": "LABEL_457",
472
- "458": "LABEL_458",
473
- "459": "LABEL_459",
474
- "460": "LABEL_460",
475
- "461": "LABEL_461",
476
- "462": "LABEL_462",
477
- "463": "LABEL_463",
478
- "464": "LABEL_464",
479
- "465": "LABEL_465",
480
- "466": "LABEL_466",
481
- "467": "LABEL_467",
482
- "468": "LABEL_468",
483
- "469": "LABEL_469",
484
- "470": "LABEL_470",
485
- "471": "LABEL_471",
486
- "472": "LABEL_472",
487
- "473": "LABEL_473",
488
- "474": "LABEL_474",
489
- "475": "LABEL_475",
490
- "476": "LABEL_476",
491
- "477": "LABEL_477",
492
- "478": "LABEL_478",
493
- "479": "LABEL_479",
494
- "480": "LABEL_480",
495
- "481": "LABEL_481",
496
- "482": "LABEL_482",
497
- "483": "LABEL_483",
498
- "484": "LABEL_484",
499
- "485": "LABEL_485",
500
- "486": "LABEL_486",
501
- "487": "LABEL_487",
502
- "488": "LABEL_488",
503
- "489": "LABEL_489",
504
- "490": "LABEL_490",
505
- "491": "LABEL_491",
506
- "492": "LABEL_492",
507
- "493": "LABEL_493",
508
- "494": "LABEL_494",
509
- "495": "LABEL_495",
510
- "496": "LABEL_496",
511
- "497": "LABEL_497",
512
- "498": "LABEL_498",
513
- "499": "LABEL_499",
514
- "500": "LABEL_500",
515
- "501": "LABEL_501",
516
- "502": "LABEL_502",
517
- "503": "LABEL_503",
518
- "504": "LABEL_504",
519
- "505": "LABEL_505",
520
- "506": "LABEL_506",
521
- "507": "LABEL_507",
522
- "508": "LABEL_508",
523
- "509": "LABEL_509",
524
- "510": "LABEL_510",
525
- "511": "LABEL_511"
526
- },
527
  "initializer_range": 0.02,
528
  "intermediate_size": 3072,
529
- "label2id": {
530
- "LABEL_0": 0,
531
- "LABEL_1": 1,
532
- "LABEL_10": 10,
533
- "LABEL_100": 100,
534
- "LABEL_101": 101,
535
- "LABEL_102": 102,
536
- "LABEL_103": 103,
537
- "LABEL_104": 104,
538
- "LABEL_105": 105,
539
- "LABEL_106": 106,
540
- "LABEL_107": 107,
541
- "LABEL_108": 108,
542
- "LABEL_109": 109,
543
- "LABEL_11": 11,
544
- "LABEL_110": 110,
545
- "LABEL_111": 111,
546
- "LABEL_112": 112,
547
- "LABEL_113": 113,
548
- "LABEL_114": 114,
549
- "LABEL_115": 115,
550
- "LABEL_116": 116,
551
- "LABEL_117": 117,
552
- "LABEL_118": 118,
553
- "LABEL_119": 119,
554
- "LABEL_12": 12,
555
- "LABEL_120": 120,
556
- "LABEL_121": 121,
557
- "LABEL_122": 122,
558
- "LABEL_123": 123,
559
- "LABEL_124": 124,
560
- "LABEL_125": 125,
561
- "LABEL_126": 126,
562
- "LABEL_127": 127,
563
- "LABEL_128": 128,
564
- "LABEL_129": 129,
565
- "LABEL_13": 13,
566
- "LABEL_130": 130,
567
- "LABEL_131": 131,
568
- "LABEL_132": 132,
569
- "LABEL_133": 133,
570
- "LABEL_134": 134,
571
- "LABEL_135": 135,
572
- "LABEL_136": 136,
573
- "LABEL_137": 137,
574
- "LABEL_138": 138,
575
- "LABEL_139": 139,
576
- "LABEL_14": 14,
577
- "LABEL_140": 140,
578
- "LABEL_141": 141,
579
- "LABEL_142": 142,
580
- "LABEL_143": 143,
581
- "LABEL_144": 144,
582
- "LABEL_145": 145,
583
- "LABEL_146": 146,
584
- "LABEL_147": 147,
585
- "LABEL_148": 148,
586
- "LABEL_149": 149,
587
- "LABEL_15": 15,
588
- "LABEL_150": 150,
589
- "LABEL_151": 151,
590
- "LABEL_152": 152,
591
- "LABEL_153": 153,
592
- "LABEL_154": 154,
593
- "LABEL_155": 155,
594
- "LABEL_156": 156,
595
- "LABEL_157": 157,
596
- "LABEL_158": 158,
597
- "LABEL_159": 159,
598
- "LABEL_16": 16,
599
- "LABEL_160": 160,
600
- "LABEL_161": 161,
601
- "LABEL_162": 162,
602
- "LABEL_163": 163,
603
- "LABEL_164": 164,
604
- "LABEL_165": 165,
605
- "LABEL_166": 166,
606
- "LABEL_167": 167,
607
- "LABEL_168": 168,
608
- "LABEL_169": 169,
609
- "LABEL_17": 17,
610
- "LABEL_170": 170,
611
- "LABEL_171": 171,
612
- "LABEL_172": 172,
613
- "LABEL_173": 173,
614
- "LABEL_174": 174,
615
- "LABEL_175": 175,
616
- "LABEL_176": 176,
617
- "LABEL_177": 177,
618
- "LABEL_178": 178,
619
- "LABEL_179": 179,
620
- "LABEL_18": 18,
621
- "LABEL_180": 180,
622
- "LABEL_181": 181,
623
- "LABEL_182": 182,
624
- "LABEL_183": 183,
625
- "LABEL_184": 184,
626
- "LABEL_185": 185,
627
- "LABEL_186": 186,
628
- "LABEL_187": 187,
629
- "LABEL_188": 188,
630
- "LABEL_189": 189,
631
- "LABEL_19": 19,
632
- "LABEL_190": 190,
633
- "LABEL_191": 191,
634
- "LABEL_192": 192,
635
- "LABEL_193": 193,
636
- "LABEL_194": 194,
637
- "LABEL_195": 195,
638
- "LABEL_196": 196,
639
- "LABEL_197": 197,
640
- "LABEL_198": 198,
641
- "LABEL_199": 199,
642
- "LABEL_2": 2,
643
- "LABEL_20": 20,
644
- "LABEL_200": 200,
645
- "LABEL_201": 201,
646
- "LABEL_202": 202,
647
- "LABEL_203": 203,
648
- "LABEL_204": 204,
649
- "LABEL_205": 205,
650
- "LABEL_206": 206,
651
- "LABEL_207": 207,
652
- "LABEL_208": 208,
653
- "LABEL_209": 209,
654
- "LABEL_21": 21,
655
- "LABEL_210": 210,
656
- "LABEL_211": 211,
657
- "LABEL_212": 212,
658
- "LABEL_213": 213,
659
- "LABEL_214": 214,
660
- "LABEL_215": 215,
661
- "LABEL_216": 216,
662
- "LABEL_217": 217,
663
- "LABEL_218": 218,
664
- "LABEL_219": 219,
665
- "LABEL_22": 22,
666
- "LABEL_220": 220,
667
- "LABEL_221": 221,
668
- "LABEL_222": 222,
669
- "LABEL_223": 223,
670
- "LABEL_224": 224,
671
- "LABEL_225": 225,
672
- "LABEL_226": 226,
673
- "LABEL_227": 227,
674
- "LABEL_228": 228,
675
- "LABEL_229": 229,
676
- "LABEL_23": 23,
677
- "LABEL_230": 230,
678
- "LABEL_231": 231,
679
- "LABEL_232": 232,
680
- "LABEL_233": 233,
681
- "LABEL_234": 234,
682
- "LABEL_235": 235,
683
- "LABEL_236": 236,
684
- "LABEL_237": 237,
685
- "LABEL_238": 238,
686
- "LABEL_239": 239,
687
- "LABEL_24": 24,
688
- "LABEL_240": 240,
689
- "LABEL_241": 241,
690
- "LABEL_242": 242,
691
- "LABEL_243": 243,
692
- "LABEL_244": 244,
693
- "LABEL_245": 245,
694
- "LABEL_246": 246,
695
- "LABEL_247": 247,
696
- "LABEL_248": 248,
697
- "LABEL_249": 249,
698
- "LABEL_25": 25,
699
- "LABEL_250": 250,
700
- "LABEL_251": 251,
701
- "LABEL_252": 252,
702
- "LABEL_253": 253,
703
- "LABEL_254": 254,
704
- "LABEL_255": 255,
705
- "LABEL_256": 256,
706
- "LABEL_257": 257,
707
- "LABEL_258": 258,
708
- "LABEL_259": 259,
709
- "LABEL_26": 26,
710
- "LABEL_260": 260,
711
- "LABEL_261": 261,
712
- "LABEL_262": 262,
713
- "LABEL_263": 263,
714
- "LABEL_264": 264,
715
- "LABEL_265": 265,
716
- "LABEL_266": 266,
717
- "LABEL_267": 267,
718
- "LABEL_268": 268,
719
- "LABEL_269": 269,
720
- "LABEL_27": 27,
721
- "LABEL_270": 270,
722
- "LABEL_271": 271,
723
- "LABEL_272": 272,
724
- "LABEL_273": 273,
725
- "LABEL_274": 274,
726
- "LABEL_275": 275,
727
- "LABEL_276": 276,
728
- "LABEL_277": 277,
729
- "LABEL_278": 278,
730
- "LABEL_279": 279,
731
- "LABEL_28": 28,
732
- "LABEL_280": 280,
733
- "LABEL_281": 281,
734
- "LABEL_282": 282,
735
- "LABEL_283": 283,
736
- "LABEL_284": 284,
737
- "LABEL_285": 285,
738
- "LABEL_286": 286,
739
- "LABEL_287": 287,
740
- "LABEL_288": 288,
741
- "LABEL_289": 289,
742
- "LABEL_29": 29,
743
- "LABEL_290": 290,
744
- "LABEL_291": 291,
745
- "LABEL_292": 292,
746
- "LABEL_293": 293,
747
- "LABEL_294": 294,
748
- "LABEL_295": 295,
749
- "LABEL_296": 296,
750
- "LABEL_297": 297,
751
- "LABEL_298": 298,
752
- "LABEL_299": 299,
753
- "LABEL_3": 3,
754
- "LABEL_30": 30,
755
- "LABEL_300": 300,
756
- "LABEL_301": 301,
757
- "LABEL_302": 302,
758
- "LABEL_303": 303,
759
- "LABEL_304": 304,
760
- "LABEL_305": 305,
761
- "LABEL_306": 306,
762
- "LABEL_307": 307,
763
- "LABEL_308": 308,
764
- "LABEL_309": 309,
765
- "LABEL_31": 31,
766
- "LABEL_310": 310,
767
- "LABEL_311": 311,
768
- "LABEL_312": 312,
769
- "LABEL_313": 313,
770
- "LABEL_314": 314,
771
- "LABEL_315": 315,
772
- "LABEL_316": 316,
773
- "LABEL_317": 317,
774
- "LABEL_318": 318,
775
- "LABEL_319": 319,
776
- "LABEL_32": 32,
777
- "LABEL_320": 320,
778
- "LABEL_321": 321,
779
- "LABEL_322": 322,
780
- "LABEL_323": 323,
781
- "LABEL_324": 324,
782
- "LABEL_325": 325,
783
- "LABEL_326": 326,
784
- "LABEL_327": 327,
785
- "LABEL_328": 328,
786
- "LABEL_329": 329,
787
- "LABEL_33": 33,
788
- "LABEL_330": 330,
789
- "LABEL_331": 331,
790
- "LABEL_332": 332,
791
- "LABEL_333": 333,
792
- "LABEL_334": 334,
793
- "LABEL_335": 335,
794
- "LABEL_336": 336,
795
- "LABEL_337": 337,
796
- "LABEL_338": 338,
797
- "LABEL_339": 339,
798
- "LABEL_34": 34,
799
- "LABEL_340": 340,
800
- "LABEL_341": 341,
801
- "LABEL_342": 342,
802
- "LABEL_343": 343,
803
- "LABEL_344": 344,
804
- "LABEL_345": 345,
805
- "LABEL_346": 346,
806
- "LABEL_347": 347,
807
- "LABEL_348": 348,
808
- "LABEL_349": 349,
809
- "LABEL_35": 35,
810
- "LABEL_350": 350,
811
- "LABEL_351": 351,
812
- "LABEL_352": 352,
813
- "LABEL_353": 353,
814
- "LABEL_354": 354,
815
- "LABEL_355": 355,
816
- "LABEL_356": 356,
817
- "LABEL_357": 357,
818
- "LABEL_358": 358,
819
- "LABEL_359": 359,
820
- "LABEL_36": 36,
821
- "LABEL_360": 360,
822
- "LABEL_361": 361,
823
- "LABEL_362": 362,
824
- "LABEL_363": 363,
825
- "LABEL_364": 364,
826
- "LABEL_365": 365,
827
- "LABEL_366": 366,
828
- "LABEL_367": 367,
829
- "LABEL_368": 368,
830
- "LABEL_369": 369,
831
- "LABEL_37": 37,
832
- "LABEL_370": 370,
833
- "LABEL_371": 371,
834
- "LABEL_372": 372,
835
- "LABEL_373": 373,
836
- "LABEL_374": 374,
837
- "LABEL_375": 375,
838
- "LABEL_376": 376,
839
- "LABEL_377": 377,
840
- "LABEL_378": 378,
841
- "LABEL_379": 379,
842
- "LABEL_38": 38,
843
- "LABEL_380": 380,
844
- "LABEL_381": 381,
845
- "LABEL_382": 382,
846
- "LABEL_383": 383,
847
- "LABEL_384": 384,
848
- "LABEL_385": 385,
849
- "LABEL_386": 386,
850
- "LABEL_387": 387,
851
- "LABEL_388": 388,
852
- "LABEL_389": 389,
853
- "LABEL_39": 39,
854
- "LABEL_390": 390,
855
- "LABEL_391": 391,
856
- "LABEL_392": 392,
857
- "LABEL_393": 393,
858
- "LABEL_394": 394,
859
- "LABEL_395": 395,
860
- "LABEL_396": 396,
861
- "LABEL_397": 397,
862
- "LABEL_398": 398,
863
- "LABEL_399": 399,
864
- "LABEL_4": 4,
865
- "LABEL_40": 40,
866
- "LABEL_400": 400,
867
- "LABEL_401": 401,
868
- "LABEL_402": 402,
869
- "LABEL_403": 403,
870
- "LABEL_404": 404,
871
- "LABEL_405": 405,
872
- "LABEL_406": 406,
873
- "LABEL_407": 407,
874
- "LABEL_408": 408,
875
- "LABEL_409": 409,
876
- "LABEL_41": 41,
877
- "LABEL_410": 410,
878
- "LABEL_411": 411,
879
- "LABEL_412": 412,
880
- "LABEL_413": 413,
881
- "LABEL_414": 414,
882
- "LABEL_415": 415,
883
- "LABEL_416": 416,
884
- "LABEL_417": 417,
885
- "LABEL_418": 418,
886
- "LABEL_419": 419,
887
- "LABEL_42": 42,
888
- "LABEL_420": 420,
889
- "LABEL_421": 421,
890
- "LABEL_422": 422,
891
- "LABEL_423": 423,
892
- "LABEL_424": 424,
893
- "LABEL_425": 425,
894
- "LABEL_426": 426,
895
- "LABEL_427": 427,
896
- "LABEL_428": 428,
897
- "LABEL_429": 429,
898
- "LABEL_43": 43,
899
- "LABEL_430": 430,
900
- "LABEL_431": 431,
901
- "LABEL_432": 432,
902
- "LABEL_433": 433,
903
- "LABEL_434": 434,
904
- "LABEL_435": 435,
905
- "LABEL_436": 436,
906
- "LABEL_437": 437,
907
- "LABEL_438": 438,
908
- "LABEL_439": 439,
909
- "LABEL_44": 44,
910
- "LABEL_440": 440,
911
- "LABEL_441": 441,
912
- "LABEL_442": 442,
913
- "LABEL_443": 443,
914
- "LABEL_444": 444,
915
- "LABEL_445": 445,
916
- "LABEL_446": 446,
917
- "LABEL_447": 447,
918
- "LABEL_448": 448,
919
- "LABEL_449": 449,
920
- "LABEL_45": 45,
921
- "LABEL_450": 450,
922
- "LABEL_451": 451,
923
- "LABEL_452": 452,
924
- "LABEL_453": 453,
925
- "LABEL_454": 454,
926
- "LABEL_455": 455,
927
- "LABEL_456": 456,
928
- "LABEL_457": 457,
929
- "LABEL_458": 458,
930
- "LABEL_459": 459,
931
- "LABEL_46": 46,
932
- "LABEL_460": 460,
933
- "LABEL_461": 461,
934
- "LABEL_462": 462,
935
- "LABEL_463": 463,
936
- "LABEL_464": 464,
937
- "LABEL_465": 465,
938
- "LABEL_466": 466,
939
- "LABEL_467": 467,
940
- "LABEL_468": 468,
941
- "LABEL_469": 469,
942
- "LABEL_47": 47,
943
- "LABEL_470": 470,
944
- "LABEL_471": 471,
945
- "LABEL_472": 472,
946
- "LABEL_473": 473,
947
- "LABEL_474": 474,
948
- "LABEL_475": 475,
949
- "LABEL_476": 476,
950
- "LABEL_477": 477,
951
- "LABEL_478": 478,
952
- "LABEL_479": 479,
953
- "LABEL_48": 48,
954
- "LABEL_480": 480,
955
- "LABEL_481": 481,
956
- "LABEL_482": 482,
957
- "LABEL_483": 483,
958
- "LABEL_484": 484,
959
- "LABEL_485": 485,
960
- "LABEL_486": 486,
961
- "LABEL_487": 487,
962
- "LABEL_488": 488,
963
- "LABEL_489": 489,
964
- "LABEL_49": 49,
965
- "LABEL_490": 490,
966
- "LABEL_491": 491,
967
- "LABEL_492": 492,
968
- "LABEL_493": 493,
969
- "LABEL_494": 494,
970
- "LABEL_495": 495,
971
- "LABEL_496": 496,
972
- "LABEL_497": 497,
973
- "LABEL_498": 498,
974
- "LABEL_499": 499,
975
- "LABEL_5": 5,
976
- "LABEL_50": 50,
977
- "LABEL_500": 500,
978
- "LABEL_501": 501,
979
- "LABEL_502": 502,
980
- "LABEL_503": 503,
981
- "LABEL_504": 504,
982
- "LABEL_505": 505,
983
- "LABEL_506": 506,
984
- "LABEL_507": 507,
985
- "LABEL_508": 508,
986
- "LABEL_509": 509,
987
- "LABEL_51": 51,
988
- "LABEL_510": 510,
989
- "LABEL_511": 511,
990
- "LABEL_52": 52,
991
- "LABEL_53": 53,
992
- "LABEL_54": 54,
993
- "LABEL_55": 55,
994
- "LABEL_56": 56,
995
- "LABEL_57": 57,
996
- "LABEL_58": 58,
997
- "LABEL_59": 59,
998
- "LABEL_6": 6,
999
- "LABEL_60": 60,
1000
- "LABEL_61": 61,
1001
- "LABEL_62": 62,
1002
- "LABEL_63": 63,
1003
- "LABEL_64": 64,
1004
- "LABEL_65": 65,
1005
- "LABEL_66": 66,
1006
- "LABEL_67": 67,
1007
- "LABEL_68": 68,
1008
- "LABEL_69": 69,
1009
- "LABEL_7": 7,
1010
- "LABEL_70": 70,
1011
- "LABEL_71": 71,
1012
- "LABEL_72": 72,
1013
- "LABEL_73": 73,
1014
- "LABEL_74": 74,
1015
- "LABEL_75": 75,
1016
- "LABEL_76": 76,
1017
- "LABEL_77": 77,
1018
- "LABEL_78": 78,
1019
- "LABEL_79": 79,
1020
- "LABEL_8": 8,
1021
- "LABEL_80": 80,
1022
- "LABEL_81": 81,
1023
- "LABEL_82": 82,
1024
- "LABEL_83": 83,
1025
- "LABEL_84": 84,
1026
- "LABEL_85": 85,
1027
- "LABEL_86": 86,
1028
- "LABEL_87": 87,
1029
- "LABEL_88": 88,
1030
- "LABEL_89": 89,
1031
- "LABEL_9": 9,
1032
- "LABEL_90": 90,
1033
- "LABEL_91": 91,
1034
- "LABEL_92": 92,
1035
- "LABEL_93": 93,
1036
- "LABEL_94": 94,
1037
- "LABEL_95": 95,
1038
- "LABEL_96": 96,
1039
- "LABEL_97": 97,
1040
- "LABEL_98": 98,
1041
- "LABEL_99": 99
1042
- },
1043
  "layer_norm_eps": 1e-05,
1044
  "max_position_embeddings": 514,
1045
  "model_type": "xlm-roberta",
@@ -1048,7 +21,9 @@
1048
  "output_past": true,
1049
  "pad_token_id": 1,
1050
  "position_embedding_type": "absolute",
1051
- "transformers_version": "4.7.0",
 
 
1052
  "type_vocab_size": 1,
1053
  "use_cache": true,
1054
  "vocab_size": 250005
 
1
  {
2
+ "_name_or_path": "output/1792/checkpoint-3000",
3
  "architectures": [
4
+ "XLMRobertaModel"
5
  ],
6
  "attention_probs_dropout_prob": 0.1,
7
  "bos_token_id": 0,
8
+ "classifier_dropout": null,
9
  "eos_token_id": 2,
10
  "gradient_checkpointing": true,
11
  "hidden_act": "gelu",
12
  "hidden_dropout_prob": 0.25,
13
  "hidden_size": 768,
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
14
  "initializer_range": 0.02,
15
  "intermediate_size": 3072,
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
16
  "layer_norm_eps": 1e-05,
17
  "max_position_embeddings": 514,
18
  "model_type": "xlm-roberta",
 
21
  "output_past": true,
22
  "pad_token_id": 1,
23
  "position_embedding_type": "absolute",
24
+ "tokenizer_class": "XLMRobertaTokenizerFast",
25
+ "torch_dtype": "float32",
26
+ "transformers_version": "4.10.2",
27
  "type_vocab_size": 1,
28
  "use_cache": true,
29
  "vocab_size": 250005
config_sentence_transformers.json ADDED
@@ -0,0 +1,7 @@
 
 
 
 
 
 
 
 
1
+ {
2
+ "__version__": {
3
+ "sentence_transformers": "2.0.0",
4
+ "transformers": "4.10.2",
5
+ "pytorch": "1.9.0"
6
+ }
7
+ }
modules.json ADDED
@@ -0,0 +1,14 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ [
2
+ {
3
+ "idx": 0,
4
+ "name": "0",
5
+ "path": "",
6
+ "type": "sentence_transformers.models.Transformer"
7
+ },
8
+ {
9
+ "idx": 1,
10
+ "name": "1",
11
+ "path": "1_Pooling",
12
+ "type": "sentence_transformers.models.Pooling"
13
+ }
14
+ ]
pytorch_model.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:32656706e38c4ffb554cd11524a935b5ed31b6685a661db4d802d062a2e34c3c
3
- size 1113849353
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:06d1de8984836f80f4d67ad569562fe9836cfbbd185be6fd908a4a43d545e001
3
+ size 1112262449
sentence_bert_config.json ADDED
@@ -0,0 +1,4 @@
 
 
 
 
 
1
+ {
2
+ "max_seq_length": 128,
3
+ "do_lower_case": false
4
+ }
sentencepiece.bpe.model ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:cfc8146abe2a0488e9e2a0c56de7952f7c11ab059eca145a0a727afce0db2865
3
+ size 5069051
special_tokens_map.json CHANGED
@@ -1 +1 @@
1
- {"bos_token": "<s>", "eos_token": "</s>", "unk_token": "<unk>", "sep_token": "</s>", "pad_token": "<pad>", "cls_token": "<s>", "mask_token": {"content": "<mask>", "single_word": false, "lstrip": true, "rstrip": false, "normalized": false}, "additional_special_tokens": ["<question>", "<answer>", "<link>"]}
 
1
+ {"bos_token": "<s>", "eos_token": "</s>", "unk_token": "<unk>", "sep_token": "</s>", "pad_token": "<pad>", "cls_token": "<s>", "mask_token": {"content": "<mask>", "single_word": false, "lstrip": true, "rstrip": false, "normalized": false}, "additional_special_tokens": ["<Q>", "<A>", "<link>"]}
tokenizer.json CHANGED
The diff for this file is too large to render. See raw diff
 
tokenizer_config.json CHANGED
@@ -1 +1 @@
1
- {"bos_token": "<s>", "eos_token": "</s>", "sep_token": "</s>", "cls_token": "<s>", "unk_token": "<unk>", "pad_token": "<pad>", "mask_token": {"content": "<mask>", "single_word": false, "lstrip": true, "rstrip": false, "normalized": true, "__type": "AddedToken"}, "model_max_length": 128, "fast": true, "additional_special_tokens": ["<question>", "<answer>", "<link>"], "special_tokens_map_file": null, "name_or_path": "xlm-roberta-base"}
 
1
+ {"bos_token": "<s>", "eos_token": "</s>", "sep_token": "</s>", "cls_token": "<s>", "unk_token": "<unk>", "pad_token": "<pad>", "mask_token": {"content": "<mask>", "single_word": false, "lstrip": true, "rstrip": false, "normalized": true, "__type": "AddedToken"}, "additional_special_tokens": ["<Q>", "<A>", "<link>"], "model_max_length": 512, "special_tokens_map_file": null, "name_or_path": "output/1792/checkpoint-3000", "tokenizer_class": "XLMRobertaTokenizer"}