Commit
0785fcb
1 Parent(s): 418f427

Add new SentenceTransformer model.

Browse files
1_Pooling/config.json ADDED
@@ -0,0 +1,10 @@
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "word_embedding_dimension": 768,
3
+ "pooling_mode_cls_token": false,
4
+ "pooling_mode_mean_tokens": true,
5
+ "pooling_mode_max_tokens": false,
6
+ "pooling_mode_mean_sqrt_len_tokens": false,
7
+ "pooling_mode_weightedmean_tokens": false,
8
+ "pooling_mode_lasttoken": false,
9
+ "include_prompt": true
10
+ }
README.md ADDED
@@ -0,0 +1,842 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ ---
2
+ language: []
3
+ library_name: sentence-transformers
4
+ tags:
5
+ - sentence-transformers
6
+ - sentence-similarity
7
+ - feature-extraction
8
+ - generated_from_trainer
9
+ - dataset_size:557850
10
+ - loss:MatryoshkaLoss
11
+ - loss:MultipleNegativesRankingLoss
12
+ base_model: tomaarsen/mpnet-base-all-nli-triplet
13
+ datasets: []
14
+ metrics:
15
+ - pearson_cosine
16
+ - spearman_cosine
17
+ - pearson_manhattan
18
+ - spearman_manhattan
19
+ - pearson_euclidean
20
+ - spearman_euclidean
21
+ - pearson_dot
22
+ - spearman_dot
23
+ - pearson_max
24
+ - spearman_max
25
+ widget:
26
+ - source_sentence: ذكر متوازن بعناية يقف على قدم واحدة بالقرب من منطقة شاطئ المحيط
27
+ النظيفة
28
+ sentences:
29
+ - رجل يقدم عرضاً
30
+ - هناك رجل بالخارج قرب الشاطئ
31
+ - رجل يجلس على أريكه
32
+ - source_sentence: رجل يقفز إلى سريره القذر
33
+ sentences:
34
+ - السرير قذر.
35
+ - رجل يضحك أثناء غسيل الملابس
36
+ - الرجل على القمر
37
+ - source_sentence: الفتيات بالخارج
38
+ sentences:
39
+ - امرأة تلف الخيط إلى كرات بجانب كومة من الكرات
40
+ - فتيان يركبان في جولة متعة
41
+ - ثلاث فتيات يقفون سوية في غرفة واحدة تستمع وواحدة تكتب على الحائط والثالثة تتحدث
42
+ إليهن
43
+ - source_sentence: الرجل يرتدي قميصاً أزرق.
44
+ sentences:
45
+ - رجل يرتدي قميصاً أزرق يميل إلى الجدار بجانب الطريق مع شاحنة زرقاء وسيارة حمراء
46
+ مع الماء في الخلفية.
47
+ - كتاب القصص مفتوح
48
+ - رجل يرتدي قميص أسود يعزف على الجيتار.
49
+ - source_sentence: يجلس شاب ذو شعر أشقر على الحائط يقرأ جريدة بينما تمر امرأة وفتاة
50
+ شابة.
51
+ sentences:
52
+ - ذكر شاب ينظر إلى جريدة بينما تمر إمرأتان بجانبه
53
+ - رجل يستلقي على وجهه على مقعد في الحديقة.
54
+ - الشاب نائم بينما الأم تقود ابنتها إلى الحديقة
55
+ pipeline_tag: sentence-similarity
56
+ model-index:
57
+ - name: SentenceTransformer based on tomaarsen/mpnet-base-all-nli-triplet
58
+ results:
59
+ - task:
60
+ type: semantic-similarity
61
+ name: Semantic Similarity
62
+ dataset:
63
+ name: sts test 768
64
+ type: sts-test-768
65
+ metrics:
66
+ - type: pearson_cosine
67
+ value: 0.66986244175229
68
+ name: Pearson Cosine
69
+ - type: spearman_cosine
70
+ value: 0.675651628513557
71
+ name: Spearman Cosine
72
+ - type: pearson_manhattan
73
+ value: 0.6943200977280434
74
+ name: Pearson Manhattan
75
+ - type: spearman_manhattan
76
+ value: 0.6839707658313092
77
+ name: Spearman Manhattan
78
+ - type: pearson_euclidean
79
+ value: 0.6973190148612566
80
+ name: Pearson Euclidean
81
+ - type: spearman_euclidean
82
+ value: 0.6872926092972673
83
+ name: Spearman Euclidean
84
+ - type: pearson_dot
85
+ value: 0.5534197296097646
86
+ name: Pearson Dot
87
+ - type: spearman_dot
88
+ value: 0.5421965591416092
89
+ name: Spearman Dot
90
+ - type: pearson_max
91
+ value: 0.6973190148612566
92
+ name: Pearson Max
93
+ - type: spearman_max
94
+ value: 0.6872926092972673
95
+ name: Spearman Max
96
+ - task:
97
+ type: semantic-similarity
98
+ name: Semantic Similarity
99
+ dataset:
100
+ name: sts test 512
101
+ type: sts-test-512
102
+ metrics:
103
+ - type: pearson_cosine
104
+ value: 0.6628171358537143
105
+ name: Pearson Cosine
106
+ - type: spearman_cosine
107
+ value: 0.670314701212355
108
+ name: Spearman Cosine
109
+ - type: pearson_manhattan
110
+ value: 0.6916567677127377
111
+ name: Pearson Manhattan
112
+ - type: spearman_manhattan
113
+ value: 0.6815748132707206
114
+ name: Spearman Manhattan
115
+ - type: pearson_euclidean
116
+ value: 0.6948756461188812
117
+ name: Pearson Euclidean
118
+ - type: spearman_euclidean
119
+ value: 0.685329042213794
120
+ name: Spearman Euclidean
121
+ - type: pearson_dot
122
+ value: 0.5229142840207227
123
+ name: Pearson Dot
124
+ - type: spearman_dot
125
+ value: 0.5113740757424073
126
+ name: Spearman Dot
127
+ - type: pearson_max
128
+ value: 0.6948756461188812
129
+ name: Pearson Max
130
+ - type: spearman_max
131
+ value: 0.685329042213794
132
+ name: Spearman Max
133
+ - task:
134
+ type: semantic-similarity
135
+ name: Semantic Similarity
136
+ dataset:
137
+ name: sts test 256
138
+ type: sts-test-256
139
+ metrics:
140
+ - type: pearson_cosine
141
+ value: 0.6368313837029833
142
+ name: Pearson Cosine
143
+ - type: spearman_cosine
144
+ value: 0.6512526280069127
145
+ name: Spearman Cosine
146
+ - type: pearson_manhattan
147
+ value: 0.6832129716443456
148
+ name: Pearson Manhattan
149
+ - type: spearman_manhattan
150
+ value: 0.674638334774044
151
+ name: Spearman Manhattan
152
+ - type: pearson_euclidean
153
+ value: 0.6843664039671002
154
+ name: Pearson Euclidean
155
+ - type: spearman_euclidean
156
+ value: 0.6760040651639672
157
+ name: Spearman Euclidean
158
+ - type: pearson_dot
159
+ value: 0.4266095536126992
160
+ name: Pearson Dot
161
+ - type: spearman_dot
162
+ value: 0.4179376458107888
163
+ name: Spearman Dot
164
+ - type: pearson_max
165
+ value: 0.6843664039671002
166
+ name: Pearson Max
167
+ - type: spearman_max
168
+ value: 0.6760040651639672
169
+ name: Spearman Max
170
+ - task:
171
+ type: semantic-similarity
172
+ name: Semantic Similarity
173
+ dataset:
174
+ name: sts test 128
175
+ type: sts-test-128
176
+ metrics:
177
+ - type: pearson_cosine
178
+ value: 0.6147896744901056
179
+ name: Pearson Cosine
180
+ - type: spearman_cosine
181
+ value: 0.6354730852658397
182
+ name: Spearman Cosine
183
+ - type: pearson_manhattan
184
+ value: 0.6730782159165468
185
+ name: Pearson Manhattan
186
+ - type: spearman_manhattan
187
+ value: 0.6652649799789521
188
+ name: Spearman Manhattan
189
+ - type: pearson_euclidean
190
+ value: 0.676407799774529
191
+ name: Pearson Euclidean
192
+ - type: spearman_euclidean
193
+ value: 0.6691409653459247
194
+ name: Spearman Euclidean
195
+ - type: pearson_dot
196
+ value: 0.35130869784942953
197
+ name: Pearson Dot
198
+ - type: spearman_dot
199
+ value: 0.3445374275232203
200
+ name: Spearman Dot
201
+ - type: pearson_max
202
+ value: 0.676407799774529
203
+ name: Pearson Max
204
+ - type: spearman_max
205
+ value: 0.6691409653459247
206
+ name: Spearman Max
207
+ - task:
208
+ type: semantic-similarity
209
+ name: Semantic Similarity
210
+ dataset:
211
+ name: sts test 64
212
+ type: sts-test-64
213
+ metrics:
214
+ - type: pearson_cosine
215
+ value: 0.5789158725954748
216
+ name: Pearson Cosine
217
+ - type: spearman_cosine
218
+ value: 0.6081197115891086
219
+ name: Spearman Cosine
220
+ - type: pearson_manhattan
221
+ value: 0.6578631744829946
222
+ name: Pearson Manhattan
223
+ - type: spearman_manhattan
224
+ value: 0.6518503436513217
225
+ name: Spearman Manhattan
226
+ - type: pearson_euclidean
227
+ value: 0.6629734628760299
228
+ name: Pearson Euclidean
229
+ - type: spearman_euclidean
230
+ value: 0.6570510967281272
231
+ name: Spearman Euclidean
232
+ - type: pearson_dot
233
+ value: 0.24034366392620327
234
+ name: Pearson Dot
235
+ - type: spearman_dot
236
+ value: 0.2331392769925126
237
+ name: Spearman Dot
238
+ - type: pearson_max
239
+ value: 0.6629734628760299
240
+ name: Pearson Max
241
+ - type: spearman_max
242
+ value: 0.6570510967281272
243
+ name: Spearman Max
244
+ ---
245
+
246
+ # SentenceTransformer based on tomaarsen/mpnet-base-all-nli-triplet
247
+
248
+ This is a [sentence-transformers](https://www.SBERT.net) model finetuned from [tomaarsen/mpnet-base-all-nli-triplet](https://huggingface.co/tomaarsen/mpnet-base-all-nli-triplet) on the Omartificial-Intelligence-Space/arabic-n_li-triplet dataset. It maps sentences & paragraphs to a 768-dimensional dense vector space and can be used for semantic textual similarity, semantic search, paraphrase mining, text classification, clustering, and more.
249
+
250
+ ## Model Details
251
+
252
+ ### Model Description
253
+ - **Model Type:** Sentence Transformer
254
+ - **Base model:** [tomaarsen/mpnet-base-all-nli-triplet](https://huggingface.co/tomaarsen/mpnet-base-all-nli-triplet) <!-- at revision e88732e5620f3592bf6566604be9a6a5cad814ec -->
255
+ - **Maximum Sequence Length:** 512 tokens
256
+ - **Output Dimensionality:** 768 tokens
257
+ - **Similarity Function:** Cosine Similarity
258
+ - **Training Dataset:**
259
+ - Omartificial-Intelligence-Space/arabic-n_li-triplet
260
+ <!-- - **Language:** Unknown -->
261
+ <!-- - **License:** Unknown -->
262
+
263
+ ### Model Sources
264
+
265
+ - **Documentation:** [Sentence Transformers Documentation](https://sbert.net)
266
+ - **Repository:** [Sentence Transformers on GitHub](https://github.com/UKPLab/sentence-transformers)
267
+ - **Hugging Face:** [Sentence Transformers on Hugging Face](https://huggingface.co/models?library=sentence-transformers)
268
+
269
+ ### Full Model Architecture
270
+
271
+ ```
272
+ SentenceTransformer(
273
+ (0): Transformer({'max_seq_length': 512, 'do_lower_case': False}) with Transformer model: MPNetModel
274
+ (1): Pooling({'word_embedding_dimension': 768, 'pooling_mode_cls_token': False, 'pooling_mode_mean_tokens': True, 'pooling_mode_max_tokens': False, 'pooling_mode_mean_sqrt_len_tokens': False, 'pooling_mode_weightedmean_tokens': False, 'pooling_mode_lasttoken': False, 'include_prompt': True})
275
+ )
276
+ ```
277
+
278
+ ## Usage
279
+
280
+ ### Direct Usage (Sentence Transformers)
281
+
282
+ First install the Sentence Transformers library:
283
+
284
+ ```bash
285
+ pip install -U sentence-transformers
286
+ ```
287
+
288
+ Then you can load this model and run inference.
289
+ ```python
290
+ from sentence_transformers import SentenceTransformer
291
+
292
+ # Download from the 🤗 Hub
293
+ model = SentenceTransformer("Omartificial-Intelligence-Space/mpnet-base-all-nli-triplet-Arabic-mpnet_base")
294
+ # Run inference
295
+ sentences = [
296
+ 'يجلس شاب ذو شعر أشقر على الحائط يقرأ جريدة بينما تمر امرأة وفتاة شابة.',
297
+ 'ذكر شاب ينظر إلى جريدة بينما تمر إمرأتان بجانبه',
298
+ 'الشاب نائم بينما الأم تقود ابنتها إلى الحديقة',
299
+ ]
300
+ embeddings = model.encode(sentences)
301
+ print(embeddings.shape)
302
+ # [3, 768]
303
+
304
+ # Get the similarity scores for the embeddings
305
+ similarities = model.similarity(embeddings, embeddings)
306
+ print(similarities.shape)
307
+ # [3, 3]
308
+ ```
309
+
310
+ <!--
311
+ ### Direct Usage (Transformers)
312
+
313
+ <details><summary>Click to see the direct usage in Transformers</summary>
314
+
315
+ </details>
316
+ -->
317
+
318
+ <!--
319
+ ### Downstream Usage (Sentence Transformers)
320
+
321
+ You can finetune this model on your own dataset.
322
+
323
+ <details><summary>Click to expand</summary>
324
+
325
+ </details>
326
+ -->
327
+
328
+ <!--
329
+ ### Out-of-Scope Use
330
+
331
+ *List how the model may foreseeably be misused and address what users ought not to do with the model.*
332
+ -->
333
+
334
+ ## Evaluation
335
+
336
+ ### Metrics
337
+
338
+ #### Semantic Similarity
339
+ * Dataset: `sts-test-768`
340
+ * Evaluated with [<code>EmbeddingSimilarityEvaluator</code>](https://sbert.net/docs/package_reference/sentence_transformer/evaluation.html#sentence_transformers.evaluation.EmbeddingSimilarityEvaluator)
341
+
342
+ | Metric | Value |
343
+ |:--------------------|:-----------|
344
+ | pearson_cosine | 0.6699 |
345
+ | **spearman_cosine** | **0.6757** |
346
+ | pearson_manhattan | 0.6943 |
347
+ | spearman_manhattan | 0.684 |
348
+ | pearson_euclidean | 0.6973 |
349
+ | spearman_euclidean | 0.6873 |
350
+ | pearson_dot | 0.5534 |
351
+ | spearman_dot | 0.5422 |
352
+ | pearson_max | 0.6973 |
353
+ | spearman_max | 0.6873 |
354
+
355
+ #### Semantic Similarity
356
+ * Dataset: `sts-test-512`
357
+ * Evaluated with [<code>EmbeddingSimilarityEvaluator</code>](https://sbert.net/docs/package_reference/sentence_transformer/evaluation.html#sentence_transformers.evaluation.EmbeddingSimilarityEvaluator)
358
+
359
+ | Metric | Value |
360
+ |:--------------------|:-----------|
361
+ | pearson_cosine | 0.6628 |
362
+ | **spearman_cosine** | **0.6703** |
363
+ | pearson_manhattan | 0.6917 |
364
+ | spearman_manhattan | 0.6816 |
365
+ | pearson_euclidean | 0.6949 |
366
+ | spearman_euclidean | 0.6853 |
367
+ | pearson_dot | 0.5229 |
368
+ | spearman_dot | 0.5114 |
369
+ | pearson_max | 0.6949 |
370
+ | spearman_max | 0.6853 |
371
+
372
+ #### Semantic Similarity
373
+ * Dataset: `sts-test-256`
374
+ * Evaluated with [<code>EmbeddingSimilarityEvaluator</code>](https://sbert.net/docs/package_reference/sentence_transformer/evaluation.html#sentence_transformers.evaluation.EmbeddingSimilarityEvaluator)
375
+
376
+ | Metric | Value |
377
+ |:--------------------|:-----------|
378
+ | pearson_cosine | 0.6368 |
379
+ | **spearman_cosine** | **0.6513** |
380
+ | pearson_manhattan | 0.6832 |
381
+ | spearman_manhattan | 0.6746 |
382
+ | pearson_euclidean | 0.6844 |
383
+ | spearman_euclidean | 0.676 |
384
+ | pearson_dot | 0.4266 |
385
+ | spearman_dot | 0.4179 |
386
+ | pearson_max | 0.6844 |
387
+ | spearman_max | 0.676 |
388
+
389
+ #### Semantic Similarity
390
+ * Dataset: `sts-test-128`
391
+ * Evaluated with [<code>EmbeddingSimilarityEvaluator</code>](https://sbert.net/docs/package_reference/sentence_transformer/evaluation.html#sentence_transformers.evaluation.EmbeddingSimilarityEvaluator)
392
+
393
+ | Metric | Value |
394
+ |:--------------------|:-----------|
395
+ | pearson_cosine | 0.6148 |
396
+ | **spearman_cosine** | **0.6355** |
397
+ | pearson_manhattan | 0.6731 |
398
+ | spearman_manhattan | 0.6653 |
399
+ | pearson_euclidean | 0.6764 |
400
+ | spearman_euclidean | 0.6691 |
401
+ | pearson_dot | 0.3513 |
402
+ | spearman_dot | 0.3445 |
403
+ | pearson_max | 0.6764 |
404
+ | spearman_max | 0.6691 |
405
+
406
+ #### Semantic Similarity
407
+ * Dataset: `sts-test-64`
408
+ * Evaluated with [<code>EmbeddingSimilarityEvaluator</code>](https://sbert.net/docs/package_reference/sentence_transformer/evaluation.html#sentence_transformers.evaluation.EmbeddingSimilarityEvaluator)
409
+
410
+ | Metric | Value |
411
+ |:--------------------|:-----------|
412
+ | pearson_cosine | 0.5789 |
413
+ | **spearman_cosine** | **0.6081** |
414
+ | pearson_manhattan | 0.6579 |
415
+ | spearman_manhattan | 0.6519 |
416
+ | pearson_euclidean | 0.663 |
417
+ | spearman_euclidean | 0.6571 |
418
+ | pearson_dot | 0.2403 |
419
+ | spearman_dot | 0.2331 |
420
+ | pearson_max | 0.663 |
421
+ | spearman_max | 0.6571 |
422
+
423
+ <!--
424
+ ## Bias, Risks and Limitations
425
+
426
+ *What are the known or foreseeable issues stemming from this model? You could also flag here known failure cases or weaknesses of the model.*
427
+ -->
428
+
429
+ <!--
430
+ ### Recommendations
431
+
432
+ *What are recommendations with respect to the foreseeable issues? For example, filtering explicit content.*
433
+ -->
434
+
435
+ ## Training Details
436
+
437
+ ### Training Dataset
438
+
439
+ #### Omartificial-Intelligence-Space/arabic-n_li-triplet
440
+
441
+ * Dataset: Omartificial-Intelligence-Space/arabic-n_li-triplet
442
+ * Size: 557,850 training samples
443
+ * Columns: <code>anchor</code>, <code>positive</code>, and <code>negative</code>
444
+ * Approximate statistics based on the first 1000 samples:
445
+ | | anchor | positive | negative |
446
+ |:--------|:------------------------------------------------------------------------------------|:-----------------------------------------------------------------------------------|:------------------------------------------------------------------------------------|
447
+ | type | string | string | string |
448
+ | details | <ul><li>min: 12 tokens</li><li>mean: 23.93 tokens</li><li>max: 155 tokens</li></ul> | <ul><li>min: 9 tokens</li><li>mean: 29.62 tokens</li><li>max: 117 tokens</li></ul> | <ul><li>min: 13 tokens</li><li>mean: 33.95 tokens</li><li>max: 149 tokens</li></ul> |
449
+ * Samples:
450
+ | anchor | positive | negative |
451
+ |:------------------------------------------------------------|:--------------------------------------------|:------------------------------------|
452
+ | <code>شخص على حصان يقفز فوق طائرة معطلة</code> | <code>شخص في الهواء الطلق، على حصان.</code> | <code>شخص في مطعم، يطلب عجة.</code> |
453
+ | <code>أطفال يبتسمون و يلوحون للكاميرا</code> | <code>هناك أطفال حاضرون</code> | <code>الاطفال يتجهمون</code> |
454
+ | <code>صبي يقفز على لوح التزلج في منتصف الجسر الأحمر.</code> | <code>الفتى يقوم بخدعة التزلج</code> | <code>الصبي يتزلج على الرصيف</code> |
455
+ * Loss: [<code>MatryoshkaLoss</code>](https://sbert.net/docs/package_reference/sentence_transformer/losses.html#matryoshkaloss) with these parameters:
456
+ ```json
457
+ {
458
+ "loss": "MultipleNegativesRankingLoss",
459
+ "matryoshka_dims": [
460
+ 768,
461
+ 512,
462
+ 256,
463
+ 128,
464
+ 64
465
+ ],
466
+ "matryoshka_weights": [
467
+ 1,
468
+ 1,
469
+ 1,
470
+ 1,
471
+ 1
472
+ ],
473
+ "n_dims_per_step": -1
474
+ }
475
+ ```
476
+
477
+ ### Evaluation Dataset
478
+
479
+ #### Omartificial-Intelligence-Space/arabic-n_li-triplet
480
+
481
+ * Dataset: Omartificial-Intelligence-Space/arabic-n_li-triplet
482
+ * Size: 6,584 evaluation samples
483
+ * Columns: <code>anchor</code>, <code>positive</code>, and <code>negative</code>
484
+ * Approximate statistics based on the first 1000 samples:
485
+ | | anchor | positive | negative |
486
+ |:--------|:-----------------------------------------------------------------------------------|:-----------------------------------------------------------------------------------|:----------------------------------------------------------------------------------|
487
+ | type | string | string | string |
488
+ | details | <ul><li>min: 12 tokens</li><li>mean: 49.5 tokens</li><li>max: 246 tokens</li></ul> | <ul><li>min: 8 tokens</li><li>mean: 23.66 tokens</li><li>max: 103 tokens</li></ul> | <ul><li>min: 9 tokens</li><li>mean: 25.33 tokens</li><li>max: 82 tokens</li></ul> |
489
+ * Samples:
490
+ | anchor | positive | negative |
491
+ |:-----------------------------------------------------------------------------------------------------------------------------------------------------|:-------------------------------------------------------|:---------------------------------------------------|
492
+ | <code>امرأتان يتعانقان بينما يحملان حزمة</code> | <code>إمرأتان يحملان حزمة</code> | <code>الرجال يتشاجرون خارج مطعم</code> |
493
+ | <code>طفلين صغيرين يرتديان قميصاً أزرق، أحدهما يرتدي الرقم 9 والآخر يرتدي الرقم 2 يقفان على خطوات خشبية في الحمام ويغسلان أيديهما في المغسلة.</code> | <code>طفلين يرتديان قميصاً مرقماً يغسلون أيديهم</code> | <code>طفلين يرتديان سترة يذهبان إلى المدرسة</code> |
494
+ | <code>رجل يبيع الدونات لعميل خلال معرض عالمي أقيم في مدينة أنجليس</code> | <code>رجل يبيع الدونات لعميل</code> | <code>امرأة تشرب قهوتها في مقهى صغير</code> |
495
+ * Loss: [<code>MatryoshkaLoss</code>](https://sbert.net/docs/package_reference/sentence_transformer/losses.html#matryoshkaloss) with these parameters:
496
+ ```json
497
+ {
498
+ "loss": "MultipleNegativesRankingLoss",
499
+ "matryoshka_dims": [
500
+ 768,
501
+ 512,
502
+ 256,
503
+ 128,
504
+ 64
505
+ ],
506
+ "matryoshka_weights": [
507
+ 1,
508
+ 1,
509
+ 1,
510
+ 1,
511
+ 1
512
+ ],
513
+ "n_dims_per_step": -1
514
+ }
515
+ ```
516
+
517
+ ### Training Hyperparameters
518
+ #### Non-Default Hyperparameters
519
+
520
+ - `per_device_train_batch_size`: 64
521
+ - `per_device_eval_batch_size`: 64
522
+ - `warmup_ratio`: 0.1
523
+ - `fp16`: True
524
+ - `batch_sampler`: no_duplicates
525
+
526
+ #### All Hyperparameters
527
+ <details><summary>Click to expand</summary>
528
+
529
+ - `overwrite_output_dir`: False
530
+ - `do_predict`: False
531
+ - `prediction_loss_only`: True
532
+ - `per_device_train_batch_size`: 64
533
+ - `per_device_eval_batch_size`: 64
534
+ - `per_gpu_train_batch_size`: None
535
+ - `per_gpu_eval_batch_size`: None
536
+ - `gradient_accumulation_steps`: 1
537
+ - `eval_accumulation_steps`: None
538
+ - `learning_rate`: 5e-05
539
+ - `weight_decay`: 0.0
540
+ - `adam_beta1`: 0.9
541
+ - `adam_beta2`: 0.999
542
+ - `adam_epsilon`: 1e-08
543
+ - `max_grad_norm`: 1.0
544
+ - `num_train_epochs`: 3
545
+ - `max_steps`: -1
546
+ - `lr_scheduler_type`: linear
547
+ - `lr_scheduler_kwargs`: {}
548
+ - `warmup_ratio`: 0.1
549
+ - `warmup_steps`: 0
550
+ - `log_level`: passive
551
+ - `log_level_replica`: warning
552
+ - `log_on_each_node`: True
553
+ - `logging_nan_inf_filter`: True
554
+ - `save_safetensors`: True
555
+ - `save_on_each_node`: False
556
+ - `save_only_model`: False
557
+ - `no_cuda`: False
558
+ - `use_cpu`: False
559
+ - `use_mps_device`: False
560
+ - `seed`: 42
561
+ - `data_seed`: None
562
+ - `jit_mode_eval`: False
563
+ - `use_ipex`: False
564
+ - `bf16`: False
565
+ - `fp16`: True
566
+ - `fp16_opt_level`: O1
567
+ - `half_precision_backend`: auto
568
+ - `bf16_full_eval`: False
569
+ - `fp16_full_eval`: False
570
+ - `tf32`: None
571
+ - `local_rank`: 0
572
+ - `ddp_backend`: None
573
+ - `tpu_num_cores`: None
574
+ - `tpu_metrics_debug`: False
575
+ - `debug`: []
576
+ - `dataloader_drop_last`: False
577
+ - `dataloader_num_workers`: 0
578
+ - `dataloader_prefetch_factor`: None
579
+ - `past_index`: -1
580
+ - `disable_tqdm`: False
581
+ - `remove_unused_columns`: True
582
+ - `label_names`: None
583
+ - `load_best_model_at_end`: False
584
+ - `ignore_data_skip`: False
585
+ - `fsdp`: []
586
+ - `fsdp_min_num_params`: 0
587
+ - `fsdp_config`: {'min_num_params': 0, 'xla': False, 'xla_fsdp_v2': False, 'xla_fsdp_grad_ckpt': False}
588
+ - `fsdp_transformer_layer_cls_to_wrap`: None
589
+ - `accelerator_config`: {'split_batches': False, 'dispatch_batches': None, 'even_batches': True, 'use_seedable_sampler': True, 'gradient_accumulation_kwargs': None}
590
+ - `deepspeed`: None
591
+ - `label_smoothing_factor`: 0.0
592
+ - `optim`: adamw_torch
593
+ - `optim_args`: None
594
+ - `adafactor`: False
595
+ - `group_by_length`: False
596
+ - `length_column_name`: length
597
+ - `ddp_find_unused_parameters`: None
598
+ - `ddp_bucket_cap_mb`: None
599
+ - `ddp_broadcast_buffers`: False
600
+ - `dataloader_pin_memory`: True
601
+ - `dataloader_persistent_workers`: False
602
+ - `skip_memory_metrics`: True
603
+ - `use_legacy_prediction_loop`: False
604
+ - `push_to_hub`: False
605
+ - `resume_from_checkpoint`: None
606
+ - `hub_model_id`: None
607
+ - `hub_strategy`: every_save
608
+ - `hub_private_repo`: False
609
+ - `hub_always_push`: False
610
+ - `gradient_checkpointing`: False
611
+ - `gradient_checkpointing_kwargs`: None
612
+ - `include_inputs_for_metrics`: False
613
+ - `eval_do_concat_batches`: True
614
+ - `fp16_backend`: auto
615
+ - `push_to_hub_model_id`: None
616
+ - `push_to_hub_organization`: None
617
+ - `mp_parameters`:
618
+ - `auto_find_batch_size`: False
619
+ - `full_determinism`: False
620
+ - `torchdynamo`: None
621
+ - `ray_scope`: last
622
+ - `ddp_timeout`: 1800
623
+ - `torch_compile`: False
624
+ - `torch_compile_backend`: None
625
+ - `torch_compile_mode`: None
626
+ - `dispatch_batches`: None
627
+ - `split_batches`: None
628
+ - `include_tokens_per_second`: False
629
+ - `include_num_input_tokens_seen`: False
630
+ - `neftune_noise_alpha`: None
631
+ - `optim_target_modules`: None
632
+ - `batch_sampler`: no_duplicates
633
+ - `multi_dataset_batch_sampler`: proportional
634
+
635
+ </details>
636
+
637
+ ### Training Logs
638
+ <details><summary>Click to expand</summary>
639
+
640
+ | Epoch | Step | Training Loss | sts-test-128_spearman_cosine | sts-test-256_spearman_cosine | sts-test-512_spearman_cosine | sts-test-64_spearman_cosine | sts-test-768_spearman_cosine |
641
+ |:------:|:-----:|:-------------:|:----------------------------:|:----------------------------:|:----------------------------:|:---------------------------:|:----------------------------:|
642
+ | 0.0229 | 200 | 21.5318 | - | - | - | - | - |
643
+ | 0.0459 | 400 | 17.2344 | - | - | - | - | - |
644
+ | 0.0688 | 600 | 15.393 | - | - | - | - | - |
645
+ | 0.0918 | 800 | 13.7897 | - | - | - | - | - |
646
+ | 0.1147 | 1000 | 13.534 | - | - | - | - | - |
647
+ | 0.1377 | 1200 | 12.2683 | - | - | - | - | - |
648
+ | 0.1606 | 1400 | 10.9271 | - | - | - | - | - |
649
+ | 0.1835 | 1600 | 11.071 | - | - | - | - | - |
650
+ | 0.2065 | 1800 | 10.0153 | - | - | - | - | - |
651
+ | 0.2294 | 2000 | 9.8463 | - | - | - | - | - |
652
+ | 0.2524 | 2200 | 10.0194 | - | - | - | - | - |
653
+ | 0.2753 | 2400 | 9.8371 | - | - | - | - | - |
654
+ | 0.2983 | 2600 | 9.6315 | - | - | - | - | - |
655
+ | 0.3212 | 2800 | 8.9858 | - | - | - | - | - |
656
+ | 0.3442 | 3000 | 9.1876 | - | - | - | - | - |
657
+ | 0.3671 | 3200 | 8.8028 | - | - | - | - | - |
658
+ | 0.3900 | 3400 | 8.6075 | - | - | - | - | - |
659
+ | 0.4130 | 3600 | 8.4285 | - | - | - | - | - |
660
+ | 0.4359 | 3800 | 8.1258 | - | - | - | - | - |
661
+ | 0.4589 | 4000 | 8.2508 | - | - | - | - | - |
662
+ | 0.4818 | 4200 | 7.8037 | - | - | - | - | - |
663
+ | 0.5048 | 4400 | 7.7133 | - | - | - | - | - |
664
+ | 0.5277 | 4600 | 7.5006 | - | - | - | - | - |
665
+ | 0.5506 | 4800 | 7.7025 | - | - | - | - | - |
666
+ | 0.5736 | 5000 | 7.7593 | - | - | - | - | - |
667
+ | 0.5965 | 5200 | 7.6305 | - | - | - | - | - |
668
+ | 0.6195 | 5400 | 7.7502 | - | - | - | - | - |
669
+ | 0.6424 | 5600 | 7.5624 | - | - | - | - | - |
670
+ | 0.6654 | 5800 | 7.5287 | - | - | - | - | - |
671
+ | 0.6883 | 6000 | 7.4261 | - | - | - | - | - |
672
+ | 0.7113 | 6200 | 7.239 | - | - | - | - | - |
673
+ | 0.7342 | 6400 | 7.1631 | - | - | - | - | - |
674
+ | 0.7571 | 6600 | 7.6865 | - | - | - | - | - |
675
+ | 0.7801 | 6800 | 7.6124 | - | - | - | - | - |
676
+ | 0.8030 | 7000 | 6.9936 | - | - | - | - | - |
677
+ | 0.8260 | 7200 | 6.7331 | - | - | - | - | - |
678
+ | 0.8489 | 7400 | 6.4542 | - | - | - | - | - |
679
+ | 0.8719 | 7600 | 6.1994 | - | - | - | - | - |
680
+ | 0.8948 | 7800 | 5.9798 | - | - | - | - | - |
681
+ | 0.9177 | 8000 | 5.7808 | - | - | - | - | - |
682
+ | 0.9407 | 8200 | 5.6952 | - | - | - | - | - |
683
+ | 0.9636 | 8400 | 5.5082 | - | - | - | - | - |
684
+ | 0.9866 | 8600 | 5.4421 | - | - | - | - | - |
685
+ | 1.0095 | 8800 | 3.0309 | - | - | - | - | - |
686
+ | 1.0026 | 9000 | 1.1835 | - | - | - | - | - |
687
+ | 1.0256 | 9200 | 8.1196 | - | - | - | - | - |
688
+ | 1.0485 | 9400 | 8.0326 | - | - | - | - | - |
689
+ | 1.0715 | 9600 | 8.5028 | - | - | - | - | - |
690
+ | 1.0944 | 9800 | 7.6923 | - | - | - | - | - |
691
+ | 1.1174 | 10000 | 8.029 | - | - | - | - | - |
692
+ | 1.1403 | 10200 | 7.5052 | - | - | - | - | - |
693
+ | 1.1632 | 10400 | 7.1177 | - | - | - | - | - |
694
+ | 1.1862 | 10600 | 6.9594 | - | - | - | - | - |
695
+ | 1.2091 | 10800 | 6.6662 | - | - | - | - | - |
696
+ | 1.2321 | 11000 | 6.6903 | - | - | - | - | - |
697
+ | 1.2550 | 11200 | 6.9523 | - | - | - | - | - |
698
+ | 1.2780 | 11400 | 6.676 | - | - | - | - | - |
699
+ | 1.3009 | 11600 | 6.7141 | - | - | - | - | - |
700
+ | 1.3238 | 11800 | 6.568 | - | - | - | - | - |
701
+ | 1.3468 | 12000 | 6.8938 | - | - | - | - | - |
702
+ | 1.3697 | 12200 | 6.3745 | - | - | - | - | - |
703
+ | 1.3927 | 12400 | 6.2513 | - | - | - | - | - |
704
+ | 1.4156 | 12600 | 6.2589 | - | - | - | - | - |
705
+ | 1.4386 | 12800 | 6.1388 | - | - | - | - | - |
706
+ | 1.4615 | 13000 | 6.1835 | - | - | - | - | - |
707
+ | 1.4845 | 13200 | 5.9004 | - | - | - | - | - |
708
+ | 1.5074 | 13400 | 5.7891 | - | - | - | - | - |
709
+ | 1.5303 | 13600 | 5.6184 | - | - | - | - | - |
710
+ | 1.5533 | 13800 | 5.9762 | - | - | - | - | - |
711
+ | 1.5762 | 14000 | 5.9737 | - | - | - | - | - |
712
+ | 1.5992 | 14200 | 5.8563 | - | - | - | - | - |
713
+ | 1.6221 | 14400 | 5.8904 | - | - | - | - | - |
714
+ | 1.6451 | 14600 | 5.8484 | - | - | - | - | - |
715
+ | 1.6680 | 14800 | 5.8906 | - | - | - | - | - |
716
+ | 1.6909 | 15000 | 5.7613 | - | - | - | - | - |
717
+ | 1.7139 | 15200 | 5.5744 | - | - | - | - | - |
718
+ | 1.7368 | 15400 | 5.6569 | - | - | - | - | - |
719
+ | 1.7598 | 15600 | 5.7439 | - | - | - | - | - |
720
+ | 1.7827 | 15800 | 5.5593 | - | - | - | - | - |
721
+ | 1.8057 | 16000 | 5.2935 | - | - | - | - | - |
722
+ | 1.8286 | 16200 | 5.088 | - | - | - | - | - |
723
+ | 1.8516 | 16400 | 5.0167 | - | - | - | - | - |
724
+ | 1.8745 | 16600 | 4.84 | - | - | - | - | - |
725
+ | 1.8974 | 16800 | 4.6731 | - | - | - | - | - |
726
+ | 1.9204 | 17000 | 4.6404 | - | - | - | - | - |
727
+ | 1.9433 | 17200 | 4.6413 | - | - | - | - | - |
728
+ | 1.9663 | 17400 | 4.4495 | - | - | - | - | - |
729
+ | 1.9892 | 17600 | 4.4262 | - | - | - | - | - |
730
+ | 2.0122 | 17800 | 2.01 | - | - | - | - | - |
731
+ | 2.0053 | 18000 | 1.8418 | - | - | - | - | - |
732
+ | 2.0282 | 18200 | 6.2714 | - | - | - | - | - |
733
+ | 2.0512 | 18400 | 6.1742 | - | - | - | - | - |
734
+ | 2.0741 | 18600 | 6.5996 | - | - | - | - | - |
735
+ | 2.0971 | 18800 | 6.0907 | - | - | - | - | - |
736
+ | 2.1200 | 19000 | 6.2418 | - | - | - | - | - |
737
+ | 2.1429 | 19200 | 5.7817 | - | - | - | - | - |
738
+ | 2.1659 | 19400 | 5.7073 | - | - | - | - | - |
739
+ | 2.1888 | 19600 | 5.2645 | - | - | - | - | - |
740
+ | 2.2118 | 19800 | 5.3451 | - | - | - | - | - |
741
+ | 2.2347 | 20000 | 5.2453 | - | - | - | - | - |
742
+ | 2.2577 | 20200 | 5.6161 | - | - | - | - | - |
743
+ | 2.2806 | 20400 | 5.2289 | - | - | - | - | - |
744
+ | 2.3035 | 20600 | 5.3888 | - | - | - | - | - |
745
+ | 2.3265 | 20800 | 5.2483 | - | - | - | - | - |
746
+ | 2.3494 | 21000 | 5.5791 | - | - | - | - | - |
747
+ | 2.3724 | 21200 | 5.1643 | - | - | - | - | - |
748
+ | 2.3953 | 21400 | 5.1231 | - | - | - | - | - |
749
+ | 2.4183 | 21600 | 5.1055 | - | - | - | - | - |
750
+ | 2.4412 | 21800 | 5.1778 | - | - | - | - | - |
751
+ | 2.4642 | 22000 | 5.0466 | - | - | - | - | - |
752
+ | 2.4871 | 22200 | 4.8321 | - | - | - | - | - |
753
+ | 2.5100 | 22400 | 4.7056 | - | - | - | - | - |
754
+ | 2.5330 | 22600 | 4.6858 | - | - | - | - | - |
755
+ | 2.5559 | 22800 | 4.9189 | - | - | - | - | - |
756
+ | 2.5789 | 23000 | 4.912 | - | - | - | - | - |
757
+ | 2.6018 | 23200 | 4.8289 | - | - | - | - | - |
758
+ | 2.6248 | 23400 | 4.8959 | - | - | - | - | - |
759
+ | 2.6477 | 23600 | 4.9441 | - | - | - | - | - |
760
+ | 2.6706 | 23800 | 4.9334 | - | - | - | - | - |
761
+ | 2.6936 | 24000 | 4.8328 | - | - | - | - | - |
762
+ | 2.7165 | 24200 | 4.601 | - | - | - | - | - |
763
+ | 2.7395 | 24400 | 4.834 | - | - | - | - | - |
764
+ | 2.7624 | 24600 | 5.152 | - | - | - | - | - |
765
+ | 2.7854 | 24800 | 4.9232 | - | - | - | - | - |
766
+ | 2.8083 | 25000 | 4.6556 | - | - | - | - | - |
767
+ | 2.8312 | 25200 | 4.6229 | - | - | - | - | - |
768
+ | 2.8542 | 25400 | 4.5768 | - | - | - | - | - |
769
+ | 2.8771 | 25600 | 4.3619 | - | - | - | - | - |
770
+ | 2.9001 | 25800 | 4.3608 | - | - | - | - | - |
771
+ | 2.9230 | 26000 | 4.2834 | - | - | - | - | - |
772
+ | 2.9403 | 26151 | - | 0.6355 | 0.6513 | 0.6703 | 0.6081 | 0.6757 |
773
+
774
+ </details>
775
+
776
+ ### Framework Versions
777
+ - Python: 3.9.18
778
+ - Sentence Transformers: 3.0.1
779
+ - Transformers: 4.40.0
780
+ - PyTorch: 2.2.2+cu121
781
+ - Accelerate: 0.26.1
782
+ - Datasets: 2.19.0
783
+ - Tokenizers: 0.19.1
784
+
785
+ ## Citation
786
+
787
+ ### BibTeX
788
+
789
+ #### Sentence Transformers
790
+ ```bibtex
791
+ @inproceedings{reimers-2019-sentence-bert,
792
+ title = "Sentence-BERT: Sentence Embeddings using Siamese BERT-Networks",
793
+ author = "Reimers, Nils and Gurevych, Iryna",
794
+ booktitle = "Proceedings of the 2019 Conference on Empirical Methods in Natural Language Processing",
795
+ month = "11",
796
+ year = "2019",
797
+ publisher = "Association for Computational Linguistics",
798
+ url = "https://arxiv.org/abs/1908.10084",
799
+ }
800
+ ```
801
+
802
+ #### MatryoshkaLoss
803
+ ```bibtex
804
+ @misc{kusupati2024matryoshka,
805
+ title={Matryoshka Representation Learning},
806
+ author={Aditya Kusupati and Gantavya Bhatt and Aniket Rege and Matthew Wallingford and Aditya Sinha and Vivek Ramanujan and William Howard-Snyder and Kaifeng Chen and Sham Kakade and Prateek Jain and Ali Farhadi},
807
+ year={2024},
808
+ eprint={2205.13147},
809
+ archivePrefix={arXiv},
810
+ primaryClass={cs.LG}
811
+ }
812
+ ```
813
+
814
+ #### MultipleNegativesRankingLoss
815
+ ```bibtex
816
+ @misc{henderson2017efficient,
817
+ title={Efficient Natural Language Response Suggestion for Smart Reply},
818
+ author={Matthew Henderson and Rami Al-Rfou and Brian Strope and Yun-hsuan Sung and Laszlo Lukacs and Ruiqi Guo and Sanjiv Kumar and Balint Miklos and Ray Kurzweil},
819
+ year={2017},
820
+ eprint={1705.00652},
821
+ archivePrefix={arXiv},
822
+ primaryClass={cs.CL}
823
+ }
824
+ ```
825
+
826
+ <!--
827
+ ## Glossary
828
+
829
+ *Clearly define terms in order to be accessible across audiences.*
830
+ -->
831
+
832
+ <!--
833
+ ## Model Card Authors
834
+
835
+ *Lists the people who create the model card, providing recognition and accountability for the detailed work that goes into its construction.*
836
+ -->
837
+
838
+ <!--
839
+ ## Model Card Contact
840
+
841
+ *Provides a way for people who have updates to the Model Card, suggestions, or questions, to contact the Model Card authors.*
842
+ -->
config.json ADDED
@@ -0,0 +1,24 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "_name_or_path": "tomaarsen/mpnet-base-all-nli-triplet",
3
+ "architectures": [
4
+ "MPNetModel"
5
+ ],
6
+ "attention_probs_dropout_prob": 0.1,
7
+ "bos_token_id": 0,
8
+ "eos_token_id": 2,
9
+ "hidden_act": "gelu",
10
+ "hidden_dropout_prob": 0.1,
11
+ "hidden_size": 768,
12
+ "initializer_range": 0.02,
13
+ "intermediate_size": 3072,
14
+ "layer_norm_eps": 1e-05,
15
+ "max_position_embeddings": 514,
16
+ "model_type": "mpnet",
17
+ "num_attention_heads": 12,
18
+ "num_hidden_layers": 12,
19
+ "pad_token_id": 1,
20
+ "relative_attention_num_buckets": 32,
21
+ "torch_dtype": "float32",
22
+ "transformers_version": "4.40.0",
23
+ "vocab_size": 30527
24
+ }
config_sentence_transformers.json ADDED
@@ -0,0 +1,10 @@
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "__version__": {
3
+ "sentence_transformers": "3.0.1",
4
+ "transformers": "4.40.0",
5
+ "pytorch": "2.2.2+cu121"
6
+ },
7
+ "prompts": {},
8
+ "default_prompt_name": null,
9
+ "similarity_fn_name": "cosine"
10
+ }
model.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:b6824258ca50bcd7ffb16d0b4afdf907af0440e8040ca54589b8f8ee935d9220
3
+ size 437967672
modules.json ADDED
@@ -0,0 +1,14 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ [
2
+ {
3
+ "idx": 0,
4
+ "name": "0",
5
+ "path": "",
6
+ "type": "sentence_transformers.models.Transformer"
7
+ },
8
+ {
9
+ "idx": 1,
10
+ "name": "1",
11
+ "path": "1_Pooling",
12
+ "type": "sentence_transformers.models.Pooling"
13
+ }
14
+ ]
sentence_bert_config.json ADDED
@@ -0,0 +1,4 @@
 
 
 
 
 
1
+ {
2
+ "max_seq_length": 512,
3
+ "do_lower_case": false
4
+ }
special_tokens_map.json ADDED
@@ -0,0 +1,51 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "bos_token": {
3
+ "content": "<s>",
4
+ "lstrip": false,
5
+ "normalized": false,
6
+ "rstrip": false,
7
+ "single_word": false
8
+ },
9
+ "cls_token": {
10
+ "content": "<s>",
11
+ "lstrip": false,
12
+ "normalized": false,
13
+ "rstrip": false,
14
+ "single_word": false
15
+ },
16
+ "eos_token": {
17
+ "content": "</s>",
18
+ "lstrip": false,
19
+ "normalized": false,
20
+ "rstrip": false,
21
+ "single_word": false
22
+ },
23
+ "mask_token": {
24
+ "content": "<mask>",
25
+ "lstrip": true,
26
+ "normalized": false,
27
+ "rstrip": false,
28
+ "single_word": false
29
+ },
30
+ "pad_token": {
31
+ "content": "<pad>",
32
+ "lstrip": false,
33
+ "normalized": false,
34
+ "rstrip": false,
35
+ "single_word": false
36
+ },
37
+ "sep_token": {
38
+ "content": "</s>",
39
+ "lstrip": false,
40
+ "normalized": false,
41
+ "rstrip": false,
42
+ "single_word": false
43
+ },
44
+ "unk_token": {
45
+ "content": "[UNK]",
46
+ "lstrip": false,
47
+ "normalized": false,
48
+ "rstrip": false,
49
+ "single_word": false
50
+ }
51
+ }
tokenizer.json ADDED
The diff for this file is too large to render. See raw diff
 
tokenizer_config.json ADDED
@@ -0,0 +1,72 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "added_tokens_decoder": {
3
+ "0": {
4
+ "content": "<s>",
5
+ "lstrip": false,
6
+ "normalized": false,
7
+ "rstrip": false,
8
+ "single_word": false,
9
+ "special": true
10
+ },
11
+ "1": {
12
+ "content": "<pad>",
13
+ "lstrip": false,
14
+ "normalized": false,
15
+ "rstrip": false,
16
+ "single_word": false,
17
+ "special": true
18
+ },
19
+ "2": {
20
+ "content": "</s>",
21
+ "lstrip": false,
22
+ "normalized": false,
23
+ "rstrip": false,
24
+ "single_word": false,
25
+ "special": true
26
+ },
27
+ "3": {
28
+ "content": "<unk>",
29
+ "lstrip": false,
30
+ "normalized": true,
31
+ "rstrip": false,
32
+ "single_word": false,
33
+ "special": true
34
+ },
35
+ "104": {
36
+ "content": "[UNK]",
37
+ "lstrip": false,
38
+ "normalized": false,
39
+ "rstrip": false,
40
+ "single_word": false,
41
+ "special": true
42
+ },
43
+ "30526": {
44
+ "content": "<mask>",
45
+ "lstrip": true,
46
+ "normalized": false,
47
+ "rstrip": false,
48
+ "single_word": false,
49
+ "special": true
50
+ }
51
+ },
52
+ "bos_token": "<s>",
53
+ "clean_up_tokenization_spaces": true,
54
+ "cls_token": "<s>",
55
+ "do_lower_case": true,
56
+ "eos_token": "</s>",
57
+ "mask_token": "<mask>",
58
+ "max_length": 512,
59
+ "model_max_length": 512,
60
+ "pad_to_multiple_of": null,
61
+ "pad_token": "<pad>",
62
+ "pad_token_type_id": 0,
63
+ "padding_side": "right",
64
+ "sep_token": "</s>",
65
+ "stride": 0,
66
+ "strip_accents": null,
67
+ "tokenize_chinese_chars": true,
68
+ "tokenizer_class": "MPNetTokenizer",
69
+ "truncation_side": "right",
70
+ "truncation_strategy": "longest_first",
71
+ "unk_token": "[UNK]"
72
+ }
vocab.txt ADDED
The diff for this file is too large to render. See raw diff