Commit
0245eac
1 Parent(s): 85c27b9

update readme.md

Browse files
Files changed (1) hide show
  1. README.md +256 -0
README.md CHANGED
@@ -4,6 +4,7 @@ language:
4
  - ar
5
  library_name: sentence-transformers
6
  tags:
 
7
  - sentence-transformers
8
  - sentence-similarity
9
  - feature-extraction
@@ -57,6 +58,261 @@ widget:
57
  - الشاب نائم بينما الأم تقود ابنتها إلى الحديقة
58
  pipeline_tag: sentence-similarity
59
  model-index:
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
60
  - name: SentenceTransformer based on sentence-transformers/LaBSE
61
  results:
62
  - task:
 
4
  - ar
5
  library_name: sentence-transformers
6
  tags:
7
+ - mteb
8
  - sentence-transformers
9
  - sentence-similarity
10
  - feature-extraction
 
58
  - الشاب نائم بينما الأم تقود ابنتها إلى الحديقة
59
  pipeline_tag: sentence-similarity
60
  model-index:
61
+ - name: Omartificial-Intelligence-Space/Arabic-labse-Matryoshka
62
+ results:
63
+ - dataset:
64
+ config: default
65
+ name: MTEB BIOSSES (default)
66
+ revision: d3fb88f8f02e40887cd149695127462bbcf29b4a
67
+ split: test
68
+ type: mteb/biosses-sts
69
+ metrics:
70
+ - type: cosine_pearson
71
+ value: 0.7646793455363026
72
+ - type: cosine_spearman
73
+ value: 0.7666439745271297
74
+ - type: euclidean_pearson
75
+ value: 0.7652075986969038
76
+ - type: euclidean_spearman
77
+ value: 0.7666439745271297
78
+ - type: main_score
79
+ value: 0.7666439745271297
80
+ - type: manhattan_pearson
81
+ value: 0.766800186746739
82
+ - type: manhattan_spearman
83
+ value: 0.7673066402288269
84
+ task:
85
+ type: STS
86
+ - dataset:
87
+ config: default
88
+ name: MTEB SICK-R (default)
89
+ revision: 20a6d6f312dd54037fe07a32d58e5e168867909d
90
+ split: test
91
+ type: mteb/sickr-sts
92
+ metrics:
93
+ - type: cosine_pearson
94
+ value: 0.7967657888969051
95
+ - type: cosine_spearman
96
+ value: 0.7703286109413092
97
+ - type: euclidean_pearson
98
+ value: 0.7819607355683363
99
+ - type: euclidean_spearman
100
+ value: 0.7703284984176336
101
+ - type: main_score
102
+ value: 0.7703286109413092
103
+ - type: manhattan_pearson
104
+ value: 0.7825627279409032
105
+ - type: manhattan_spearman
106
+ value: 0.7700431033772339
107
+ task:
108
+ type: STS
109
+ - dataset:
110
+ config: default
111
+ name: MTEB STS12 (default)
112
+ revision: a0d554a64d88156834ff5ae9920b964011b16384
113
+ split: test
114
+ type: mteb/sts12-sts
115
+ metrics:
116
+ - type: cosine_pearson
117
+ value: 0.8394288958816406
118
+ - type: cosine_spearman
119
+ value: 0.7921432094488725
120
+ - type: euclidean_pearson
121
+ value: 0.8121333332608397
122
+ - type: euclidean_spearman
123
+ value: 0.7921511763341483
124
+ - type: main_score
125
+ value: 0.7921432094488725
126
+ - type: manhattan_pearson
127
+ value: 0.8116910808144705
128
+ - type: manhattan_spearman
129
+ value: 0.7908747588244719
130
+ task:
131
+ type: STS
132
+ - dataset:
133
+ config: default
134
+ name: MTEB STS13 (default)
135
+ revision: 7e90230a92c190f1bf69ae9002b8cea547a64cca
136
+ split: test
137
+ type: mteb/sts13-sts
138
+ metrics:
139
+ - type: cosine_pearson
140
+ value: 0.7748393894472824
141
+ - type: cosine_spearman
142
+ value: 0.7954963868861196
143
+ - type: euclidean_pearson
144
+ value: 0.7928415966325013
145
+ - type: euclidean_spearman
146
+ value: 0.7954963861790114
147
+ - type: main_score
148
+ value: 0.7954963868861196
149
+ - type: manhattan_pearson
150
+ value: 0.7918653941335837
151
+ - type: manhattan_spearman
152
+ value: 0.7946719007113934
153
+ task:
154
+ type: STS
155
+ - dataset:
156
+ config: default
157
+ name: MTEB STS14 (default)
158
+ revision: 6031580fec1f6af667f0bd2da0a551cf4f0b2375
159
+ split: test
160
+ type: mteb/sts14-sts
161
+ metrics:
162
+ - type: cosine_pearson
163
+ value: 0.7851596254385209
164
+ - type: cosine_spearman
165
+ value: 0.7884601914737768
166
+ - type: euclidean_pearson
167
+ value: 0.7855199771287279
168
+ - type: euclidean_spearman
169
+ value: 0.7884602020117785
170
+ - type: main_score
171
+ value: 0.7884601914737768
172
+ - type: manhattan_pearson
173
+ value: 0.7852780144651308
174
+ - type: manhattan_spearman
175
+ value: 0.787774430076152
176
+ task:
177
+ type: STS
178
+ - dataset:
179
+ config: default
180
+ name: MTEB STS15 (default)
181
+ revision: ae752c7c21bf194d8b67fd573edf7ae58183cbe3
182
+ split: test
183
+ type: mteb/sts15-sts
184
+ metrics:
185
+ - type: cosine_pearson
186
+ value: 0.8453393469117113
187
+ - type: cosine_spearman
188
+ value: 0.8560821849381648
189
+ - type: euclidean_pearson
190
+ value: 0.8532813929769605
191
+ - type: euclidean_spearman
192
+ value: 0.8560818604001905
193
+ - type: main_score
194
+ value: 0.8560821849381648
195
+ - type: manhattan_pearson
196
+ value: 0.8532782084710221
197
+ - type: manhattan_spearman
198
+ value: 0.8558098668045864
199
+ task:
200
+ type: STS
201
+ - dataset:
202
+ config: default
203
+ name: MTEB STS16 (default)
204
+ revision: 4d8694f8f0e0100860b497b999b3dbed754a0513
205
+ split: test
206
+ type: mteb/sts16-sts
207
+ metrics:
208
+ - type: cosine_pearson
209
+ value: 0.7700197085349484
210
+ - type: cosine_spearman
211
+ value: 0.79930951699069
212
+ - type: euclidean_pearson
213
+ value: 0.7943196781811703
214
+ - type: euclidean_spearman
215
+ value: 0.7993095112410258
216
+ - type: main_score
217
+ value: 0.79930951699069
218
+ - type: manhattan_pearson
219
+ value: 0.7933744400544465
220
+ - type: manhattan_spearman
221
+ value: 0.7982939266539602
222
+ task:
223
+ type: STS
224
+ - dataset:
225
+ config: ar-ar
226
+ name: MTEB STS17 (ar-ar)
227
+ revision: faeb762787bd10488a50c8b5be4a3b82e411949c
228
+ split: test
229
+ type: mteb/sts17-crosslingual-sts
230
+ metrics:
231
+ - type: cosine_pearson
232
+ value: 0.8160289458371599
233
+ - type: cosine_spearman
234
+ value: 0.8246806381979653
235
+ - type: euclidean_pearson
236
+ value: 0.813223525262988
237
+ - type: euclidean_spearman
238
+ value: 0.8248167872078906
239
+ - type: main_score
240
+ value: 0.8246806381979653
241
+ - type: manhattan_pearson
242
+ value: 0.8143885482754425
243
+ - type: manhattan_spearman
244
+ value: 0.8240446963802593
245
+ task:
246
+ type: STS
247
+ - dataset:
248
+ config: ar
249
+ name: MTEB STS22 (ar)
250
+ revision: de9d86b3b84231dc21f76c7b7af1f28e2f57f6e3
251
+ split: test
252
+ type: mteb/sts22-crosslingual-sts
253
+ metrics:
254
+ - type: cosine_pearson
255
+ value: 0.4958293768761313
256
+ - type: cosine_spearman
257
+ value: 0.5726188878983287
258
+ - type: euclidean_pearson
259
+ value: 0.5336549109538782
260
+ - type: euclidean_spearman
261
+ value: 0.5726188878983287
262
+ - type: main_score
263
+ value: 0.5726188878983287
264
+ - type: manhattan_pearson
265
+ value: 0.5306640323833928
266
+ - type: manhattan_spearman
267
+ value: 0.5705837935512948
268
+ task:
269
+ type: STS
270
+ - dataset:
271
+ config: default
272
+ name: MTEB STSBenchmark (default)
273
+ revision: b0fddb56ed78048fa8b90373c8a3cfc37b684831
274
+ split: test
275
+ type: mteb/stsbenchmark-sts
276
+ metrics:
277
+ - type: cosine_pearson
278
+ value: 0.8143997878838204
279
+ - type: cosine_spearman
280
+ value: 0.8204996129795596
281
+ - type: euclidean_pearson
282
+ value: 0.8201917849577418
283
+ - type: euclidean_spearman
284
+ value: 0.8204996129795596
285
+ - type: main_score
286
+ value: 0.8204996129795596
287
+ - type: manhattan_pearson
288
+ value: 0.8203487073719467
289
+ - type: manhattan_spearman
290
+ value: 0.8203774605775651
291
+ task:
292
+ type: STS
293
+ - dataset:
294
+ config: default
295
+ name: MTEB SummEval (default)
296
+ revision: cda12ad7615edc362dbf25a00fdd61d3b1eaf93c
297
+ split: test
298
+ type: mteb/summeval
299
+ metrics:
300
+ - type: cosine_pearson
301
+ value: 0.32113473699294887
302
+ - type: cosine_spearman
303
+ value: 0.32171942337648785
304
+ - type: dot_pearson
305
+ value: 0.3211346945795219
306
+ - type: dot_spearman
307
+ value: 0.32168802810995234
308
+ - type: main_score
309
+ value: 0.32171942337648785
310
+ - type: pearson
311
+ value: 0.3211347400449732
312
+ - type: spearman
313
+ value: 0.32171942337648785
314
+ task:
315
+ type: Summarization
316
  - name: SentenceTransformer based on sentence-transformers/LaBSE
317
  results:
318
  - task: