metadata
tags:
- sentence-transformers
- sentence-similarity
- feature-extraction
- generated_from_trainer
- dataset_size:18644575
- loss:RZTKMatryoshka2dLoss
base_model: intfloat/multilingual-e5-base
widget:
- source_sentence: 'query: кисточки для макияжа'
sentences:
- 'passage: Парасоля компактна складана Airton Z3510 механіка Чорна'
- 'passage: Корпус FrimeCom LB-081 BL 400W 12cm'
- 'passage: Кисті для макіяжу Kylie 12 шт набір кистей пензлика 12 шт Білі'
- source_sentence: 'query: hg средство'
sentences:
- 'passage: Відеореєстратор Globex GE-115'
- 'passage: Плямовивідник для тканин HG Oxi 0.5 кг (324050106)'
- >-
passage: Мережевий подовжувач MERLION B530, 10А 220В, 5 розеток, 3,0 м,
перетин 3х0,75мм, чорний Q30
- source_sentence: 'query: 471 картридж'
sentences:
- 'passage: Картридж Canon CLI-471 XL PIXMA MG5740/MG6840 Grey (0350C001)'
- 'passage: Ключница Valenta кожаная Синяя (ХК41612)'
- 'passage: Біговели Діаметр коліс 12" (30.5 см)'
- source_sentence: 'query: кольцо'
sentences:
- >-
passage: Сумки SumWin Для кого Для женщин Вид Сумки. Цвет Черный
Количество грузовых мест 1 Модель сумки Кросс-боди Материал
Искусственная кожа Страна регистрации бренда Украина
Страна-производитель товара Китай Тип гарантийного талона Гарантия по
чеку Форма Круглая Доставка Доставка в магазины ROZETKA
- >-
passage: Корпуси Phanteks Форм-фактор материнської плати ATX Тип корпусу
Fulltower Колір Чорний Кількість внутрішніх відсіків 3.5" 13 теги Круті
Матеріал Алюміній
- 'passage: Кольцо с бабочкой "Mini Butterfly", серебро'
- source_sentence: 'query: сумочка женская'
sentences:
- >-
passage: Сумки Без бренда Для кого Для женщин Цвет Черный Стиль
Повседневные Модель сумки Кросс-боди Материал Экокожа Страна регистрации
бренда Украина Страна-производитель товара Китай Количество отделений 3
Форма Трапеция Застежка Магнит
- >-
passage: Пенали Kite Гарантія 14 днів Колір Бірюзовий Стать Для дівчаток
Матеріал Поліестер Кількість відділень 1 Кількість вантажних місць 1
Країна реєстрації бренда Німеччина Країна-виробник товару Китай Вага, г
350 Тип гарантійного талона Гарантія по чеку Особливості З наповненням
Форма Книжка
- >-
passage: Шампунь PROFIStyle Класс косметики Профессиональная Пол Для
женщин Количество грузовых мест 1 Страна регистрации бренда Украина
Серия Profistyle Страна-производитель товара Украина Объем 5 л Тип волос
Все типы волос Назначение Для очищения волос Тип гарантийного талона Без
гарантийного талона Доставка Доставка в магазины ROZETKA Доставка Готов
к отправке
pipeline_tag: sentence-similarity
library_name: sentence-transformers
metrics:
- dot_accuracy@1
- dot_accuracy@3
- dot_accuracy@5
- dot_accuracy@10
- dot_precision@1
- dot_precision@3
- dot_precision@5
- dot_precision@10
- dot_recall@1
- dot_recall@3
- dot_recall@5
- dot_recall@10
- dot_ndcg@10
- dot_mrr@10
- dot_map@100
- dot_ndcg@1
- dot_mrr@1
model-index:
- name: SentenceTransformer based on intfloat/multilingual-e5-base
results:
- task:
type: information-retrieval
name: Information Retrieval
dataset:
name: bm full
type: bm-full
metrics:
- type: dot_accuracy@1
value: 0.47841472045293704
name: Dot Accuracy@1
- type: dot_accuracy@3
value: 0.6553432413305025
name: Dot Accuracy@3
- type: dot_accuracy@5
value: 0.7331917905166313
name: Dot Accuracy@5
- type: dot_accuracy@10
value: 0.8283793347487615
name: Dot Accuracy@10
- type: dot_precision@1
value: 0.47841472045293704
name: Dot Precision@1
- type: dot_precision@3
value: 0.4861995753715499
name: Dot Precision@3
- type: dot_precision@5
value: 0.4876150035385704
name: Dot Precision@5
- type: dot_precision@10
value: 0.4910474168435951
name: Dot Precision@10
- type: dot_recall@1
value: 0.011351462591853162
name: Dot Recall@1
- type: dot_recall@3
value: 0.03449117733770484
name: Dot Recall@3
- type: dot_recall@5
value: 0.057669566486942436
name: Dot Recall@5
- type: dot_recall@10
value: 0.11452942341940178
name: Dot Recall@10
- type: dot_ndcg@10
value: 0.48926390789530216
name: Dot Ndcg@10
- type: dot_mrr@10
value: 0.5827980543479477
name: Dot Mrr@10
- type: dot_map@100
value: 0.36280520756352586
name: Dot Map@100
- task:
type: information-retrieval
name: Information Retrieval
dataset:
name: core uk title
type: core-uk-title
metrics:
- type: dot_accuracy@1
value: 0.6303901437371663
name: Dot Accuracy@1
- type: dot_accuracy@3
value: 0.8542094455852156
name: Dot Accuracy@3
- type: dot_accuracy@5
value: 0.9240246406570842
name: Dot Accuracy@5
- type: dot_accuracy@10
value: 0.973305954825462
name: Dot Accuracy@10
- type: dot_precision@1
value: 0.6303901437371663
name: Dot Precision@1
- type: dot_precision@3
value: 0.6379192334017795
name: Dot Precision@3
- type: dot_precision@5
value: 0.624640657084189
name: Dot Precision@5
- type: dot_precision@10
value: 0.5196098562628337
name: Dot Precision@10
- type: dot_recall@1
value: 0.06432071345934735
name: Dot Recall@1
- type: dot_recall@3
value: 0.1941943610200646
name: Dot Recall@3
- type: dot_recall@5
value: 0.3154921649259734
name: Dot Recall@5
- type: dot_recall@10
value: 0.5135267830369895
name: Dot Recall@10
- type: dot_ndcg@10
value: 0.5824689476221301
name: Dot Ndcg@10
- type: dot_mrr@10
value: 0.7525264007040191
name: Dot Mrr@10
- type: dot_map@100
value: 0.5641261600874217
name: Dot Map@100
- task:
type: information-retrieval
name: Information Retrieval
dataset:
name: core ru title
type: core-ru-title
metrics:
- type: dot_accuracy@1
value: 0.6416837782340863
name: Dot Accuracy@1
- type: dot_accuracy@3
value: 0.8562628336755647
name: Dot Accuracy@3
- type: dot_accuracy@5
value: 0.9229979466119097
name: Dot Accuracy@5
- type: dot_accuracy@10
value: 0.9691991786447639
name: Dot Accuracy@10
- type: dot_precision@1
value: 0.6416837782340863
name: Dot Precision@1
- type: dot_precision@3
value: 0.6471594798083503
name: Dot Precision@3
- type: dot_precision@5
value: 0.633264887063655
name: Dot Precision@5
- type: dot_precision@10
value: 0.5252566735112937
name: Dot Precision@10
- type: dot_recall@1
value: 0.0656523606676101
name: Dot Recall@1
- type: dot_recall@3
value: 0.19734738384711206
name: Dot Recall@3
- type: dot_recall@5
value: 0.32075535697878377
name: Dot Recall@5
- type: dot_recall@10
value: 0.518985171764795
name: Dot Recall@10
- type: dot_ndcg@10
value: 0.5898234843670869
name: Dot Ndcg@10
- type: dot_mrr@10
value: 0.7593062481666181
name: Dot Mrr@10
- type: dot_map@100
value: 0.5728620912840142
name: Dot Map@100
- task:
type: information-retrieval
name: Information Retrieval
dataset:
name: core uk options
type: core-uk-options
metrics:
- type: dot_accuracy@1
value: 0.4948665297741273
name: Dot Accuracy@1
- type: dot_accuracy@3
value: 0.7464065708418891
name: Dot Accuracy@3
- type: dot_accuracy@5
value: 0.837782340862423
name: Dot Accuracy@5
- type: dot_accuracy@10
value: 0.9322381930184805
name: Dot Accuracy@10
- type: dot_precision@1
value: 0.4948665297741273
name: Dot Precision@1
- type: dot_precision@3
value: 0.4989733059548255
name: Dot Precision@3
- type: dot_precision@5
value: 0.49507186858316227
name: Dot Precision@5
- type: dot_precision@10
value: 0.45400410677618075
name: Dot Precision@10
- type: dot_recall@1
value: 0.04964465358761168
name: Dot Recall@1
- type: dot_recall@3
value: 0.15084259771646535
name: Dot Recall@3
- type: dot_recall@5
value: 0.24819367614844123
name: Dot Recall@5
- type: dot_recall@10
value: 0.4471062523959915
name: Dot Recall@10
- type: dot_ndcg@10
value: 0.4895423721577878
name: Dot Ndcg@10
- type: dot_mrr@10
value: 0.6382761318079595
name: Dot Mrr@10
- type: dot_map@100
value: 0.49557058138522575
name: Dot Map@100
- task:
type: information-retrieval
name: Information Retrieval
dataset:
name: core ru options
type: core-ru-options
metrics:
- type: dot_accuracy@1
value: 0.48767967145790553
name: Dot Accuracy@1
- type: dot_accuracy@3
value: 0.7505133470225873
name: Dot Accuracy@3
- type: dot_accuracy@5
value: 0.8367556468172485
name: Dot Accuracy@5
- type: dot_accuracy@10
value: 0.9291581108829569
name: Dot Accuracy@10
- type: dot_precision@1
value: 0.48767967145790553
name: Dot Precision@1
- type: dot_precision@3
value: 0.4986310746064339
name: Dot Precision@3
- type: dot_precision@5
value: 0.4975359342915811
name: Dot Precision@5
- type: dot_precision@10
value: 0.45195071868583164
name: Dot Precision@10
- type: dot_recall@1
value: 0.04851468328413007
name: Dot Recall@1
- type: dot_recall@3
value: 0.14950617034051025
name: Dot Recall@3
- type: dot_recall@5
value: 0.2481739767794847
name: Dot Recall@5
- type: dot_recall@10
value: 0.44488472424288944
name: Dot Recall@10
- type: dot_ndcg@10
value: 0.48827111188574646
name: Dot Ndcg@10
- type: dot_mrr@10
value: 0.6368082037743232
name: Dot Mrr@10
- type: dot_map@100
value: 0.4951823868475039
name: Dot Map@100
- task:
type: information-retrieval
name: Information Retrieval
dataset:
name: options uk title
type: options-uk-title
metrics:
- type: dot_accuracy@1
value: 0.7572383073496659
name: Dot Accuracy@1
- type: dot_accuracy@3
value: 0.9376391982182628
name: Dot Accuracy@3
- type: dot_accuracy@5
value: 0.9665924276169265
name: Dot Accuracy@5
- type: dot_accuracy@10
value: 0.9933184855233853
name: Dot Accuracy@10
- type: dot_precision@1
value: 0.7572383073496659
name: Dot Precision@1
- type: dot_precision@3
value: 0.7490720118782479
name: Dot Precision@3
- type: dot_precision@5
value: 0.711804008908686
name: Dot Precision@5
- type: dot_precision@10
value: 0.5541202672605791
name: Dot Precision@10
- type: dot_recall@1
value: 0.11543915129661232
name: Dot Recall@1
- type: dot_recall@3
value: 0.34200363482100676
name: Dot Recall@3
- type: dot_recall@5
value: 0.5370094518201423
name: Dot Recall@5
- type: dot_recall@10
value: 0.7924881972766159
name: Dot Recall@10
- type: dot_ndcg@10
value: 0.7650169670738622
name: Dot Ndcg@10
- type: dot_mrr@10
value: 0.850054795489094
name: Dot Mrr@10
- type: dot_map@100
value: 0.7380727317887708
name: Dot Map@100
- task:
type: information-retrieval
name: Information Retrieval
dataset:
name: options ru title
type: options-ru-title
metrics:
- type: dot_accuracy@1
value: 0.7706013363028953
name: Dot Accuracy@1
- type: dot_accuracy@3
value: 0.9309576837416481
name: Dot Accuracy@3
- type: dot_accuracy@5
value: 0.9665924276169265
name: Dot Accuracy@5
- type: dot_accuracy@10
value: 0.9933184855233853
name: Dot Accuracy@10
- type: dot_precision@1
value: 0.7706013363028953
name: Dot Precision@1
- type: dot_precision@3
value: 0.746844840386043
name: Dot Precision@3
- type: dot_precision@5
value: 0.712249443207127
name: Dot Precision@5
- type: dot_precision@10
value: 0.5505567928730514
name: Dot Precision@10
- type: dot_recall@1
value: 0.11822576705650203
name: Dot Recall@1
- type: dot_recall@3
value: 0.3409996400530922
name: Dot Recall@3
- type: dot_recall@5
value: 0.5359948514736934
name: Dot Recall@5
- type: dot_recall@10
value: 0.7883290428947444
name: Dot Recall@10
- type: dot_ndcg@10
value: 0.7637672715459831
name: Dot Ndcg@10
- type: dot_mrr@10
value: 0.8557897620815217
name: Dot Mrr@10
- type: dot_map@100
value: 0.7395006608870638
name: Dot Map@100
- task:
type: information-retrieval
name: Information Retrieval
dataset:
name: options uk options
type: options-uk-options
metrics:
- type: dot_accuracy@1
value: 0.6325167037861915
name: Dot Accuracy@1
- type: dot_accuracy@3
value: 0.844097995545657
name: Dot Accuracy@3
- type: dot_accuracy@5
value: 0.910913140311804
name: Dot Accuracy@5
- type: dot_accuracy@10
value: 0.9487750556792873
name: Dot Accuracy@10
- type: dot_precision@1
value: 0.6325167037861915
name: Dot Precision@1
- type: dot_precision@3
value: 0.6221232368225686
name: Dot Precision@3
- type: dot_precision@5
value: 0.602672605790646
name: Dot Precision@5
- type: dot_precision@10
value: 0.49643652561247215
name: Dot Precision@10
- type: dot_recall@1
value: 0.09247718997162206
name: Dot Recall@1
- type: dot_recall@3
value: 0.27593618334152853
name: Dot Recall@3
- type: dot_recall@5
value: 0.4409379348688926
name: Dot Recall@5
- type: dot_recall@10
value: 0.6951361370626404
name: Dot Recall@10
- type: dot_ndcg@10
value: 0.6589801178305443
name: Dot Ndcg@10
- type: dot_mrr@10
value: 0.7480468059532647
name: Dot Mrr@10
- type: dot_map@100
value: 0.6414571076888178
name: Dot Map@100
- task:
type: information-retrieval
name: Information Retrieval
dataset:
name: options ru options
type: options-ru-options
metrics:
- type: dot_accuracy@1
value: 0.6369710467706013
name: Dot Accuracy@1
- type: dot_accuracy@3
value: 0.8374164810690423
name: Dot Accuracy@3
- type: dot_accuracy@5
value: 0.8930957683741648
name: Dot Accuracy@5
- type: dot_accuracy@10
value: 0.9465478841870824
name: Dot Accuracy@10
- type: dot_precision@1
value: 0.6369710467706013
name: Dot Precision@1
- type: dot_precision@3
value: 0.635486265775798
name: Dot Precision@3
- type: dot_precision@5
value: 0.6071269487750557
name: Dot Precision@5
- type: dot_precision@10
value: 0.4939866369710468
name: Dot Precision@10
- type: dot_recall@1
value: 0.09363496562828412
name: Dot Recall@1
- type: dot_recall@3
value: 0.2826698419126036
name: Dot Recall@3
- type: dot_recall@5
value: 0.44549921743685666
name: Dot Recall@5
- type: dot_recall@10
value: 0.6956133880966844
name: Dot Recall@10
- type: dot_ndcg@10
value: 0.6610343624368801
name: Dot Ndcg@10
- type: dot_mrr@10
value: 0.7464338742178385
name: Dot Mrr@10
- type: dot_map@100
value: 0.6451679716029399
name: Dot Map@100
- task:
type: information-retrieval
name: Information Retrieval
dataset:
name: rusisms uk title
type: rusisms-uk-title
metrics:
- type: dot_accuracy@1
value: 0.6412698412698413
name: Dot Accuracy@1
- type: dot_accuracy@3
value: 0.7904761904761904
name: Dot Accuracy@3
- type: dot_accuracy@5
value: 0.8285714285714286
name: Dot Accuracy@5
- type: dot_accuracy@10
value: 0.8857142857142857
name: Dot Accuracy@10
- type: dot_precision@1
value: 0.6412698412698413
name: Dot Precision@1
- type: dot_precision@3
value: 0.6634920634920635
name: Dot Precision@3
- type: dot_precision@5
value: 0.6552380952380952
name: Dot Precision@5
- type: dot_precision@10
value: 0.6234920634920637
name: Dot Precision@10
- type: dot_recall@1
value: 0.04133075530701777
name: Dot Recall@1
- type: dot_recall@3
value: 0.1170592846495341
name: Dot Recall@3
- type: dot_recall@5
value: 0.17830553909997546
name: Dot Recall@5
- type: dot_recall@10
value: 0.31008571089134707
name: Dot Recall@10
- type: dot_ndcg@10
value: 0.6647746031552833
name: Dot Ndcg@10
- type: dot_mrr@10
value: 0.7268543713781809
name: Dot Mrr@10
- type: dot_map@100
value: 0.5853570509786064
name: Dot Map@100
- task:
type: information-retrieval
name: Information Retrieval
dataset:
name: rusisms ru title
type: rusisms-ru-title
metrics:
- type: dot_accuracy@1
value: 0.6698412698412698
name: Dot Accuracy@1
- type: dot_accuracy@3
value: 0.7777777777777778
name: Dot Accuracy@3
- type: dot_accuracy@5
value: 0.834920634920635
name: Dot Accuracy@5
- type: dot_accuracy@10
value: 0.8920634920634921
name: Dot Accuracy@10
- type: dot_precision@1
value: 0.6698412698412698
name: Dot Precision@1
- type: dot_precision@3
value: 0.6645502645502644
name: Dot Precision@3
- type: dot_precision@5
value: 0.6577777777777778
name: Dot Precision@5
- type: dot_precision@10
value: 0.626031746031746
name: Dot Precision@10
- type: dot_recall@1
value: 0.04434422970262397
name: Dot Recall@1
- type: dot_recall@3
value: 0.11704562106444193
name: Dot Recall@3
- type: dot_recall@5
value: 0.1773543477691105
name: Dot Recall@5
- type: dot_recall@10
value: 0.30781876553915866
name: Dot Recall@10
- type: dot_ndcg@10
value: 0.6682830160464889
name: Dot Ndcg@10
- type: dot_mrr@10
value: 0.7408213655832702
name: Dot Mrr@10
- type: dot_map@100
value: 0.592275762720651
name: Dot Map@100
- task:
type: information-retrieval
name: Information Retrieval
dataset:
name: rusisms uk options
type: rusisms-uk-options
metrics:
- type: dot_accuracy@1
value: 0.5079365079365079
name: Dot Accuracy@1
- type: dot_accuracy@3
value: 0.6285714285714286
name: Dot Accuracy@3
- type: dot_accuracy@5
value: 0.7174603174603175
name: Dot Accuracy@5
- type: dot_accuracy@10
value: 0.7904761904761904
name: Dot Accuracy@10
- type: dot_precision@1
value: 0.5079365079365079
name: Dot Precision@1
- type: dot_precision@3
value: 0.5058201058201058
name: Dot Precision@3
- type: dot_precision@5
value: 0.5161904761904762
name: Dot Precision@5
- type: dot_precision@10
value: 0.5111111111111111
name: Dot Precision@10
- type: dot_recall@1
value: 0.032681042657417864
name: Dot Recall@1
- type: dot_recall@3
value: 0.08490359132017175
name: Dot Recall@3
- type: dot_recall@5
value: 0.14131766688622155
name: Dot Recall@5
- type: dot_recall@10
value: 0.25180769140267506
name: Dot Recall@10
- type: dot_ndcg@10
value: 0.5372281406420663
name: Dot Ndcg@10
- type: dot_mrr@10
value: 0.5902317964222725
name: Dot Mrr@10
- type: dot_map@100
value: 0.4986827236995346
name: Dot Map@100
- task:
type: information-retrieval
name: Information Retrieval
dataset:
name: rusisms ru options
type: rusisms-ru-options
metrics:
- type: dot_accuracy@1
value: 0.4984126984126984
name: Dot Accuracy@1
- type: dot_accuracy@3
value: 0.6476190476190476
name: Dot Accuracy@3
- type: dot_accuracy@5
value: 0.7492063492063492
name: Dot Accuracy@5
- type: dot_accuracy@10
value: 0.8063492063492064
name: Dot Accuracy@10
- type: dot_precision@1
value: 0.4984126984126984
name: Dot Precision@1
- type: dot_precision@3
value: 0.5195767195767197
name: Dot Precision@3
- type: dot_precision@5
value: 0.5276190476190475
name: Dot Precision@5
- type: dot_precision@10
value: 0.5177777777777778
name: Dot Precision@10
- type: dot_recall@1
value: 0.03202754604702237
name: Dot Recall@1
- type: dot_recall@3
value: 0.08547386088540315
name: Dot Recall@3
- type: dot_recall@5
value: 0.14166544702843223
name: Dot Recall@5
- type: dot_recall@10
value: 0.25453386918000204
name: Dot Recall@10
- type: dot_ndcg@10
value: 0.5433437983757069
name: Dot Ndcg@10
- type: dot_mrr@10
value: 0.5972423784328544
name: Dot Mrr@10
- type: dot_map@100
value: 0.5053350497126974
name: Dot Map@100
- task:
type: information-retrieval
name: Information Retrieval
dataset:
name: rusisms corrected uk title
type: rusisms_corrected-uk-title
metrics:
- type: dot_accuracy@1
value: 0.7120253164556962
name: Dot Accuracy@1
- type: dot_accuracy@3
value: 0.819620253164557
name: Dot Accuracy@3
- type: dot_accuracy@5
value: 0.870253164556962
name: Dot Accuracy@5
- type: dot_accuracy@10
value: 0.9113924050632911
name: Dot Accuracy@10
- type: dot_precision@1
value: 0.7120253164556962
name: Dot Precision@1
- type: dot_precision@3
value: 0.7183544303797469
name: Dot Precision@3
- type: dot_precision@5
value: 0.7082278481012658
name: Dot Precision@5
- type: dot_precision@10
value: 0.6683544303797468
name: Dot Precision@10
- type: dot_recall@1
value: 0.04969812837853694
name: Dot Recall@1
- type: dot_recall@3
value: 0.12741782983341862
name: Dot Recall@3
- type: dot_recall@5
value: 0.19731659021523865
name: Dot Recall@5
- type: dot_recall@10
value: 0.3409923887206817
name: Dot Recall@10
- type: dot_ndcg@10
value: 0.7194597470114055
name: Dot Ndcg@10
- type: dot_mrr@10
value: 0.7808808016877635
name: Dot Mrr@10
- type: dot_map@100
value: 0.6388697195804478
name: Dot Map@100
- task:
type: information-retrieval
name: Information Retrieval
dataset:
name: rusisms corrected ru title
type: rusisms_corrected-ru-title
metrics:
- type: dot_accuracy@1
value: 0.7088607594936709
name: Dot Accuracy@1
- type: dot_accuracy@3
value: 0.8291139240506329
name: Dot Accuracy@3
- type: dot_accuracy@5
value: 0.870253164556962
name: Dot Accuracy@5
- type: dot_accuracy@10
value: 0.9113924050632911
name: Dot Accuracy@10
- type: dot_precision@1
value: 0.7088607594936709
name: Dot Precision@1
- type: dot_precision@3
value: 0.7162447257383966
name: Dot Precision@3
- type: dot_precision@5
value: 0.7037974683544304
name: Dot Precision@5
- type: dot_precision@10
value: 0.6629746835443039
name: Dot Precision@10
- type: dot_recall@1
value: 0.04888449873376417
name: Dot Recall@1
- type: dot_recall@3
value: 0.12829212684740135
name: Dot Recall@3
- type: dot_recall@5
value: 0.19987613332490306
name: Dot Recall@5
- type: dot_recall@10
value: 0.3356192325461046
name: Dot Recall@10
- type: dot_ndcg@10
value: 0.7127299992462074
name: Dot Ndcg@10
- type: dot_mrr@10
value: 0.7792708961221617
name: Dot Mrr@10
- type: dot_map@100
value: 0.6395009156047453
name: Dot Map@100
- task:
type: information-retrieval
name: Information Retrieval
dataset:
name: rusisms corrected uk options
type: rusisms_corrected-uk-options
metrics:
- type: dot_accuracy@1
value: 0.5537974683544303
name: Dot Accuracy@1
- type: dot_accuracy@3
value: 0.7278481012658228
name: Dot Accuracy@3
- type: dot_accuracy@5
value: 0.7848101265822784
name: Dot Accuracy@5
- type: dot_accuracy@10
value: 0.8544303797468354
name: Dot Accuracy@10
- type: dot_precision@1
value: 0.5537974683544303
name: Dot Precision@1
- type: dot_precision@3
value: 0.5738396624472574
name: Dot Precision@3
- type: dot_precision@5
value: 0.5759493670886076
name: Dot Precision@5
- type: dot_precision@10
value: 0.5645569620253166
name: Dot Precision@10
- type: dot_recall@1
value: 0.0398666442289475
name: Dot Recall@1
- type: dot_recall@3
value: 0.10622990404547561
name: Dot Recall@3
- type: dot_recall@5
value: 0.16577310801842357
name: Dot Recall@5
- type: dot_recall@10
value: 0.28425407965410443
name: Dot Recall@10
- type: dot_ndcg@10
value: 0.5965577106105705
name: Dot Ndcg@10
- type: dot_mrr@10
value: 0.6513147980711272
name: Dot Mrr@10
- type: dot_map@100
value: 0.5594863722365065
name: Dot Map@100
- task:
type: information-retrieval
name: Information Retrieval
dataset:
name: rusisms corrected ru options
type: rusisms_corrected-ru-options
metrics:
- type: dot_accuracy@1
value: 0.5727848101265823
name: Dot Accuracy@1
- type: dot_accuracy@3
value: 0.7215189873417721
name: Dot Accuracy@3
- type: dot_accuracy@5
value: 0.7848101265822784
name: Dot Accuracy@5
- type: dot_accuracy@10
value: 0.8607594936708861
name: Dot Accuracy@10
- type: dot_precision@1
value: 0.5727848101265823
name: Dot Precision@1
- type: dot_precision@3
value: 0.5822784810126582
name: Dot Precision@3
- type: dot_precision@5
value: 0.5810126582278481
name: Dot Precision@5
- type: dot_precision@10
value: 0.5655063291139241
name: Dot Precision@10
- type: dot_recall@1
value: 0.039125841492212286
name: Dot Recall@1
- type: dot_recall@3
value: 0.10198185350545384
name: Dot Recall@3
- type: dot_recall@5
value: 0.16299351965480724
name: Dot Recall@5
- type: dot_recall@10
value: 0.28079825575547895
name: Dot Recall@10
- type: dot_ndcg@10
value: 0.5986535833917944
name: Dot Ndcg@10
- type: dot_mrr@10
value: 0.6613936608398633
name: Dot Mrr@10
- type: dot_map@100
value: 0.5634230712770681
name: Dot Map@100
- task:
type: information-retrieval
name: Information Retrieval
dataset:
name: core typos uk title
type: core_typos-uk-title
metrics:
- type: dot_accuracy@1
value: 0.5451745379876797
name: Dot Accuracy@1
- type: dot_accuracy@3
value: 0.7874743326488707
name: Dot Accuracy@3
- type: dot_accuracy@5
value: 0.8490759753593429
name: Dot Accuracy@5
- type: dot_accuracy@10
value: 0.9117043121149897
name: Dot Accuracy@10
- type: dot_precision@1
value: 0.5451745379876797
name: Dot Precision@1
- type: dot_precision@3
value: 0.5492813141683778
name: Dot Precision@3
- type: dot_precision@5
value: 0.5277207392197125
name: Dot Precision@5
- type: dot_precision@10
value: 0.4458932238193019
name: Dot Precision@10
- type: dot_recall@1
value: 0.055501817487715537
name: Dot Recall@1
- type: dot_recall@3
value: 0.16799075803409055
name: Dot Recall@3
- type: dot_recall@5
value: 0.26738880088922357
name: Dot Recall@5
- type: dot_recall@10
value: 0.4420220810379646
name: Dot Recall@10
- type: dot_ndcg@10
value: 0.5009599181825904
name: Dot Ndcg@10
- type: dot_mrr@10
value: 0.6744923568332187
name: Dot Mrr@10
- type: dot_map@100
value: 0.4756860853091724
name: Dot Map@100
- task:
type: information-retrieval
name: Information Retrieval
dataset:
name: core typos ru title
type: core_typos-ru-title
metrics:
- type: dot_accuracy@1
value: 0.5677618069815195
name: Dot Accuracy@1
- type: dot_accuracy@3
value: 0.7802874743326489
name: Dot Accuracy@3
- type: dot_accuracy@5
value: 0.8490759753593429
name: Dot Accuracy@5
- type: dot_accuracy@10
value: 0.9075975359342916
name: Dot Accuracy@10
- type: dot_precision@1
value: 0.5677618069815195
name: Dot Precision@1
- type: dot_precision@3
value: 0.553388090349076
name: Dot Precision@3
- type: dot_precision@5
value: 0.535523613963039
name: Dot Precision@5
- type: dot_precision@10
value: 0.4521560574948666
name: Dot Precision@10
- type: dot_recall@1
value: 0.05788962617685893
name: Dot Recall@1
- type: dot_recall@3
value: 0.16991013827739276
name: Dot Recall@3
- type: dot_recall@5
value: 0.2724072401441347
name: Dot Recall@5
- type: dot_recall@10
value: 0.4493398516770904
name: Dot Recall@10
- type: dot_ndcg@10
value: 0.5093051901273817
name: Dot Ndcg@10
- type: dot_mrr@10
value: 0.6864704540269218
name: Dot Mrr@10
- type: dot_map@100
value: 0.48302575127668773
name: Dot Map@100
- task:
type: information-retrieval
name: Information Retrieval
dataset:
name: core typos uk options
type: core_typos-uk-options
metrics:
- type: dot_accuracy@1
value: 0.42505133470225875
name: Dot Accuracy@1
- type: dot_accuracy@3
value: 0.6478439425051334
name: Dot Accuracy@3
- type: dot_accuracy@5
value: 0.7433264887063655
name: Dot Accuracy@5
- type: dot_accuracy@10
value: 0.8429158110882957
name: Dot Accuracy@10
- type: dot_precision@1
value: 0.42505133470225875
name: Dot Precision@1
- type: dot_precision@3
value: 0.4182067077344285
name: Dot Precision@3
- type: dot_precision@5
value: 0.41704312114989733
name: Dot Precision@5
- type: dot_precision@10
value: 0.37997946611909655
name: Dot Precision@10
- type: dot_recall@1
value: 0.04281943186324754
name: Dot Recall@1
- type: dot_recall@3
value: 0.12630907755998344
name: Dot Recall@3
- type: dot_recall@5
value: 0.20887045080712793
name: Dot Recall@5
- type: dot_recall@10
value: 0.37542821461999504
name: Dot Recall@10
- type: dot_ndcg@10
value: 0.41173177598646793
name: Dot Ndcg@10
- type: dot_mrr@10
value: 0.5581076236107032
name: Dot Mrr@10
- type: dot_map@100
value: 0.40947735972105836
name: Dot Map@100
- task:
type: information-retrieval
name: Information Retrieval
dataset:
name: core typos ru options
type: core_typos-ru-options
metrics:
- type: dot_accuracy@1
value: 0.42299794661190965
name: Dot Accuracy@1
- type: dot_accuracy@3
value: 0.6509240246406571
name: Dot Accuracy@3
- type: dot_accuracy@5
value: 0.7464065708418891
name: Dot Accuracy@5
- type: dot_accuracy@10
value: 0.8459958932238193
name: Dot Accuracy@10
- type: dot_precision@1
value: 0.42299794661190965
name: Dot Precision@1
- type: dot_precision@3
value: 0.42231348391512663
name: Dot Precision@3
- type: dot_precision@5
value: 0.41581108829568786
name: Dot Precision@5
- type: dot_precision@10
value: 0.376694045174538
name: Dot Precision@10
- type: dot_recall@1
value: 0.04208886721644905
name: Dot Recall@1
- type: dot_recall@3
value: 0.12759403691355015
name: Dot Recall@3
- type: dot_recall@5
value: 0.2076836983626753
name: Dot Recall@5
- type: dot_recall@10
value: 0.37292863983662994
name: Dot Recall@10
- type: dot_ndcg@10
value: 0.4101083135252108
name: Dot Ndcg@10
- type: dot_mrr@10
value: 0.5588503471203672
name: Dot Mrr@10
- type: dot_map@100
value: 0.40851869049399947
name: Dot Map@100
- task:
type: information-retrieval
name: Information Retrieval
dataset:
name: 'bm full matryoshka dim 768 '
type: bm-full--matryoshka_dim-768--
metrics:
- type: dot_accuracy@1
value: 0.47841472045293704
name: Dot Accuracy@1
- type: dot_precision@1
value: 0.47841472045293704
name: Dot Precision@1
- type: dot_recall@1
value: 0.011351462591853162
name: Dot Recall@1
- type: dot_ndcg@1
value: 0.47841472045293704
name: Dot Ndcg@1
- type: dot_mrr@1
value: 0.47841472045293704
name: Dot Mrr@1
- type: dot_map@100
value: 0.36280520756352586
name: Dot Map@100
- task:
type: information-retrieval
name: Information Retrieval
dataset:
name: 'bm full matryoshka dim 512 '
type: bm-full--matryoshka_dim-512--
metrics:
- type: dot_accuracy@1
value: 0.4759377211606511
name: Dot Accuracy@1
- type: dot_precision@1
value: 0.4759377211606511
name: Dot Precision@1
- type: dot_recall@1
value: 0.0114070381458067
name: Dot Recall@1
- type: dot_ndcg@1
value: 0.4759377211606511
name: Dot Ndcg@1
- type: dot_mrr@1
value: 0.4759377211606511
name: Dot Mrr@1
- type: dot_map@100
value: 0.36005063767775514
name: Dot Map@100
- task:
type: information-retrieval
name: Information Retrieval
dataset:
name: 'bm full matryoshka dim 256 '
type: bm-full--matryoshka_dim-256--
metrics:
- type: dot_accuracy@1
value: 0.4759377211606511
name: Dot Accuracy@1
- type: dot_precision@1
value: 0.4759377211606511
name: Dot Precision@1
- type: dot_recall@1
value: 0.011372889899440053
name: Dot Recall@1
- type: dot_ndcg@1
value: 0.4759377211606511
name: Dot Ndcg@1
- type: dot_mrr@1
value: 0.4759377211606511
name: Dot Mrr@1
- type: dot_map@100
value: 0.3488370117998616
name: Dot Map@100
- task:
type: information-retrieval
name: Information Retrieval
dataset:
name: 'bm full matryoshka dim 128 '
type: bm-full--matryoshka_dim-128--
metrics:
- type: dot_accuracy@1
value: 0.45222929936305734
name: Dot Accuracy@1
- type: dot_precision@1
value: 0.45222929936305734
name: Dot Precision@1
- type: dot_recall@1
value: 0.010638577599638174
name: Dot Recall@1
- type: dot_ndcg@1
value: 0.45222929936305734
name: Dot Ndcg@1
- type: dot_mrr@1
value: 0.45222929936305734
name: Dot Mrr@1
- type: dot_map@100
value: 0.32466551163194907
name: Dot Map@100
SentenceTransformer based on intfloat/multilingual-e5-base
This is a sentence-transformers model finetuned from intfloat/multilingual-e5-base on the rozetka_positive_pairs dataset. It maps sentences & paragraphs to a 768-dimensional dense vector space and can be used for semantic textual similarity, semantic search, paraphrase mining, text classification, clustering, and more.
Model Details
Model Description
- Model Type: Sentence Transformer
- Base model: intfloat/multilingual-e5-base
- Maximum Sequence Length: 512 tokens
- Output Dimensionality: 768 dimensions
- Similarity Function: Dot Product
- Training Dataset:
- rozetka_positive_pairs
Model Sources
- Documentation: Sentence Transformers Documentation
- Repository: Sentence Transformers on GitHub
- Hugging Face: Sentence Transformers on Hugging Face
Full Model Architecture
RZTKSentenceTransformer(
(0): Transformer({'max_seq_length': 512, 'do_lower_case': False}) with Transformer model: XLMRobertaModel
(1): Pooling({'word_embedding_dimension': 768, 'pooling_mode_cls_token': False, 'pooling_mode_mean_tokens': True, 'pooling_mode_max_tokens': False, 'pooling_mode_mean_sqrt_len_tokens': False, 'pooling_mode_weightedmean_tokens': False, 'pooling_mode_lasttoken': False, 'include_prompt': True})
(2): Normalize()
)
Usage
Direct Usage (Sentence Transformers)
First install the Sentence Transformers library:
pip install -U sentence-transformers
Then you can load this model and run inference.
from sentence_transformers import SentenceTransformer
# Download from the 🤗 Hub
model = SentenceTransformer("yklymchuk-rztk/multilingual-e5-base-matryoshka2d-mnr-4-continue")
# Run inference
sentences = [
'query: сумочка женская',
'passage: Сумки Без бренда Для кого Для женщин Цвет Черный Стиль Повседневные Модель сумки Кросс-боди Материал Экокожа Страна регистрации бренда Украина Страна-производитель товара Китай Количество отделений 3 Форма Трапеция Застежка Магнит',
'passage: Пенали Kite Гарантія 14 днів Колір Бірюзовий Стать Для дівчаток Матеріал Поліестер Кількість відділень 1 Кількість вантажних місць 1 Країна реєстрації бренда Німеччина Країна-виробник товару Китай Вага, г 350 Тип гарантійного талона Гарантія по чеку Особливості З наповненням Форма Книжка',
]
embeddings = model.encode(sentences)
print(embeddings.shape)
# [3, 768]
# Get the similarity scores for the embeddings
similarities = model.similarity(embeddings, embeddings)
print(similarities.shape)
# [3, 3]
Evaluation
Metrics
Information Retrieval
- Datasets:
bm-full
,core-uk-title
,core-ru-title
,core-uk-options
,core-ru-options
,options-uk-title
,options-ru-title
,options-uk-options
,options-ru-options
,rusisms-uk-title
,rusisms-ru-title
,rusisms-uk-options
,rusisms-ru-options
,rusisms_corrected-uk-title
,rusisms_corrected-ru-title
,rusisms_corrected-uk-options
,rusisms_corrected-ru-options
,core_typos-uk-title
,core_typos-ru-title
,core_typos-uk-options
andcore_typos-ru-options
- Evaluated with
InformationRetrievalEvaluator
Metric | bm-full | core-uk-title | core-ru-title | core-uk-options | core-ru-options | options-uk-title | options-ru-title | options-uk-options | options-ru-options | rusisms-uk-title | rusisms-ru-title | rusisms-uk-options | rusisms-ru-options | rusisms_corrected-uk-title | rusisms_corrected-ru-title | rusisms_corrected-uk-options | rusisms_corrected-ru-options | core_typos-uk-title | core_typos-ru-title | core_typos-uk-options | core_typos-ru-options |
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
dot_accuracy@1 | 0.4784 | 0.6304 | 0.6417 | 0.4949 | 0.4877 | 0.7572 | 0.7706 | 0.6325 | 0.637 | 0.6413 | 0.6698 | 0.5079 | 0.4984 | 0.712 | 0.7089 | 0.5538 | 0.5728 | 0.5452 | 0.5678 | 0.4251 | 0.423 |
dot_accuracy@3 | 0.6553 | 0.8542 | 0.8563 | 0.7464 | 0.7505 | 0.9376 | 0.931 | 0.8441 | 0.8374 | 0.7905 | 0.7778 | 0.6286 | 0.6476 | 0.8196 | 0.8291 | 0.7278 | 0.7215 | 0.7875 | 0.7803 | 0.6478 | 0.6509 |
dot_accuracy@5 | 0.7332 | 0.924 | 0.923 | 0.8378 | 0.8368 | 0.9666 | 0.9666 | 0.9109 | 0.8931 | 0.8286 | 0.8349 | 0.7175 | 0.7492 | 0.8703 | 0.8703 | 0.7848 | 0.7848 | 0.8491 | 0.8491 | 0.7433 | 0.7464 |
dot_accuracy@10 | 0.8284 | 0.9733 | 0.9692 | 0.9322 | 0.9292 | 0.9933 | 0.9933 | 0.9488 | 0.9465 | 0.8857 | 0.8921 | 0.7905 | 0.8063 | 0.9114 | 0.9114 | 0.8544 | 0.8608 | 0.9117 | 0.9076 | 0.8429 | 0.846 |
dot_precision@1 | 0.4784 | 0.6304 | 0.6417 | 0.4949 | 0.4877 | 0.7572 | 0.7706 | 0.6325 | 0.637 | 0.6413 | 0.6698 | 0.5079 | 0.4984 | 0.712 | 0.7089 | 0.5538 | 0.5728 | 0.5452 | 0.5678 | 0.4251 | 0.423 |
dot_precision@3 | 0.4862 | 0.6379 | 0.6472 | 0.499 | 0.4986 | 0.7491 | 0.7468 | 0.6221 | 0.6355 | 0.6635 | 0.6646 | 0.5058 | 0.5196 | 0.7184 | 0.7162 | 0.5738 | 0.5823 | 0.5493 | 0.5534 | 0.4182 | 0.4223 |
dot_precision@5 | 0.4876 | 0.6246 | 0.6333 | 0.4951 | 0.4975 | 0.7118 | 0.7122 | 0.6027 | 0.6071 | 0.6552 | 0.6578 | 0.5162 | 0.5276 | 0.7082 | 0.7038 | 0.5759 | 0.581 | 0.5277 | 0.5355 | 0.417 | 0.4158 |
dot_precision@10 | 0.491 | 0.5196 | 0.5253 | 0.454 | 0.452 | 0.5541 | 0.5506 | 0.4964 | 0.494 | 0.6235 | 0.626 | 0.5111 | 0.5178 | 0.6684 | 0.663 | 0.5646 | 0.5655 | 0.4459 | 0.4522 | 0.38 | 0.3767 |
dot_recall@1 | 0.0114 | 0.0643 | 0.0657 | 0.0496 | 0.0485 | 0.1154 | 0.1182 | 0.0925 | 0.0936 | 0.0413 | 0.0443 | 0.0327 | 0.032 | 0.0497 | 0.0489 | 0.0399 | 0.0391 | 0.0555 | 0.0579 | 0.0428 | 0.0421 |
dot_recall@3 | 0.0345 | 0.1942 | 0.1973 | 0.1508 | 0.1495 | 0.342 | 0.341 | 0.2759 | 0.2827 | 0.1171 | 0.117 | 0.0849 | 0.0855 | 0.1274 | 0.1283 | 0.1062 | 0.102 | 0.168 | 0.1699 | 0.1263 | 0.1276 |
dot_recall@5 | 0.0577 | 0.3155 | 0.3208 | 0.2482 | 0.2482 | 0.537 | 0.536 | 0.4409 | 0.4455 | 0.1783 | 0.1774 | 0.1413 | 0.1417 | 0.1973 | 0.1999 | 0.1658 | 0.163 | 0.2674 | 0.2724 | 0.2089 | 0.2077 |
dot_recall@10 | 0.1145 | 0.5135 | 0.519 | 0.4471 | 0.4449 | 0.7925 | 0.7883 | 0.6951 | 0.6956 | 0.3101 | 0.3078 | 0.2518 | 0.2545 | 0.341 | 0.3356 | 0.2843 | 0.2808 | 0.442 | 0.4493 | 0.3754 | 0.3729 |
dot_ndcg@10 | 0.4893 | 0.5825 | 0.5898 | 0.4895 | 0.4883 | 0.765 | 0.7638 | 0.659 | 0.661 | 0.6648 | 0.6683 | 0.5372 | 0.5433 | 0.7195 | 0.7127 | 0.5966 | 0.5987 | 0.501 | 0.5093 | 0.4117 | 0.4101 |
dot_mrr@10 | 0.5828 | 0.7525 | 0.7593 | 0.6383 | 0.6368 | 0.8501 | 0.8558 | 0.748 | 0.7464 | 0.7269 | 0.7408 | 0.5902 | 0.5972 | 0.7809 | 0.7793 | 0.6513 | 0.6614 | 0.6745 | 0.6865 | 0.5581 | 0.5589 |
dot_map@100 | 0.3628 | 0.5641 | 0.5729 | 0.4956 | 0.4952 | 0.7381 | 0.7395 | 0.6415 | 0.6452 | 0.5854 | 0.5923 | 0.4987 | 0.5053 | 0.6389 | 0.6395 | 0.5595 | 0.5634 | 0.4757 | 0.483 | 0.4095 | 0.4085 |
Information Retrieval
- Datasets:
bm-full--matryoshka_dim-768--
,bm-full--matryoshka_dim-512--
,bm-full--matryoshka_dim-256--
andbm-full--matryoshka_dim-128--
- Evaluated with
InformationRetrievalEvaluator
Metric | bm-full--matryoshka_dim-768-- | bm-full--matryoshka_dim-512-- | bm-full--matryoshka_dim-256-- | bm-full--matryoshka_dim-128-- |
---|---|---|---|---|
dot_accuracy@1 | 0.4784 | 0.4759 | 0.4759 | 0.4522 |
dot_precision@1 | 0.4784 | 0.4759 | 0.4759 | 0.4522 |
dot_recall@1 | 0.0114 | 0.0114 | 0.0114 | 0.0106 |
dot_ndcg@1 | 0.4784 | 0.4759 | 0.4759 | 0.4522 |
dot_mrr@1 | 0.4784 | 0.4759 | 0.4759 | 0.4522 |
dot_map@100 | 0.3628 | 0.3601 | 0.3488 | 0.3247 |
Training Details
Training Dataset
rozetka_positive_pairs
- Dataset: rozetka_positive_pairs
- Size: 18,644,575 training samples
- Columns:
query
andtext
- Approximate statistics based on the first 1000 samples:
query text type string string details - min: 6 tokens
- mean: 12.04 tokens
- max: 30 tokens
- min: 8 tokens
- mean: 55.98 tokens
- max: 512 tokens
- Samples:
query text query: xsiomi 9c скло
passage: Защитные стекла Назначение Для мобильных телефонов Цвет Черный Теги Теги Наличие рамки C рамкой Форм-фактор Плоское Клеевой слой По всей поверхности
query: xsiomi 9c скло
passage: Захисне скло Glass Full Glue для Xiaomi Redmi 9A/9C/10A (Чорний)
query: xsiomi 9c скло
passage: Захисне скло Призначення Для мобільних телефонів Колір Чорний Теги Теги Наявність рамки З рамкою Форм-фактор Плоске Клейовий шар По всій поверхні
- Loss:
sentence_transformers_training.model.matryoshka2d_loss.RZTKMatryoshka2dLoss
with these parameters:{ "loss": "RZTKMultipleNegativesRankingLoss", "n_layers_per_step": 1, "last_layer_weight": 1.0, "prior_layers_weight": 1.0, "kl_div_weight": 1.0, "kl_temperature": 0.3, "matryoshka_dims": [ 768, 512, 256, 128 ], "matryoshka_weights": [ 1, 1, 1, 1 ], "n_dims_per_step": 1 }
Evaluation Dataset
rozetka_positive_pairs
- Dataset: rozetka_positive_pairs
- Size: 202,564 evaluation samples
- Columns:
query
andtext
- Approximate statistics based on the first 1000 samples:
query text type string string details - min: 6 tokens
- mean: 8.57 tokens
- max: 17 tokens
- min: 8 tokens
- mean: 53.17 tokens
- max: 512 tokens
- Samples:
query text query: создаем нейронную сеть
passage: Створюємо нейронну мережу
query: создаем нейронную сеть
passage: Научная и техническая литература Переплет Мягкий
query: создаем нейронную сеть
passage: Создаем нейронную сеть (1666498)
- Loss:
sentence_transformers_training.model.matryoshka2d_loss.RZTKMatryoshka2dLoss
with these parameters:{ "loss": "RZTKMultipleNegativesRankingLoss", "n_layers_per_step": 1, "last_layer_weight": 1.0, "prior_layers_weight": 1.0, "kl_div_weight": 1.0, "kl_temperature": 0.3, "matryoshka_dims": [ 768, 512, 256, 128 ], "matryoshka_weights": [ 1, 1, 1, 1 ], "n_dims_per_step": 1 }
Training Hyperparameters
Non-Default Hyperparameters
eval_strategy
: stepsper_device_train_batch_size
: 88per_device_eval_batch_size
: 88learning_rate
: 2e-05num_train_epochs
: 5.0warmup_ratio
: 0.1bf16
: Truebf16_full_eval
: Truetf32
: Truedataloader_num_workers
: 8load_best_model_at_end
: Trueoptim
: adafactorpush_to_hub
: Truehub_model_id
: yklymchuk-rztk/multilingual-e5-base-matryoshka2d-mnr-4-continuehub_private_repo
: Trueprompts
: {'query': 'query: ', 'text': 'passage: '}batch_sampler
: no_duplicates
All Hyperparameters
Click to expand
overwrite_output_dir
: Falsedo_predict
: Falseeval_strategy
: stepsprediction_loss_only
: Trueper_device_train_batch_size
: 88per_device_eval_batch_size
: 88per_gpu_train_batch_size
: Noneper_gpu_eval_batch_size
: Nonegradient_accumulation_steps
: 1eval_accumulation_steps
: Nonetorch_empty_cache_steps
: Nonelearning_rate
: 2e-05weight_decay
: 0.0adam_beta1
: 0.9adam_beta2
: 0.999adam_epsilon
: 1e-08max_grad_norm
: 1.0num_train_epochs
: 5.0max_steps
: -1lr_scheduler_type
: linearlr_scheduler_kwargs
: {}warmup_ratio
: 0.1warmup_steps
: 0log_level
: passivelog_level_replica
: warninglog_on_each_node
: Truelogging_nan_inf_filter
: Truesave_safetensors
: Truesave_on_each_node
: Falsesave_only_model
: Falserestore_callback_states_from_checkpoint
: Falseno_cuda
: Falseuse_cpu
: Falseuse_mps_device
: Falseseed
: 42data_seed
: Nonejit_mode_eval
: Falseuse_ipex
: Falsebf16
: Truefp16
: Falsefp16_opt_level
: O1half_precision_backend
: autobf16_full_eval
: Truefp16_full_eval
: Falsetf32
: Truelocal_rank
: 0ddp_backend
: Nonetpu_num_cores
: Nonetpu_metrics_debug
: Falsedebug
: []dataloader_drop_last
: Truedataloader_num_workers
: 8dataloader_prefetch_factor
: Nonepast_index
: -1disable_tqdm
: Falseremove_unused_columns
: Truelabel_names
: Noneload_best_model_at_end
: Trueignore_data_skip
: Falsefsdp
: []fsdp_min_num_params
: 0fsdp_config
: {'min_num_params': 0, 'xla': False, 'xla_fsdp_v2': False, 'xla_fsdp_grad_ckpt': False}fsdp_transformer_layer_cls_to_wrap
: Noneaccelerator_config
: {'split_batches': False, 'dispatch_batches': None, 'even_batches': True, 'use_seedable_sampler': True, 'non_blocking': False, 'gradient_accumulation_kwargs': None}deepspeed
: Nonelabel_smoothing_factor
: 0.0optim
: adafactoroptim_args
: Noneadafactor
: Falsegroup_by_length
: Falselength_column_name
: lengthddp_find_unused_parameters
: Noneddp_bucket_cap_mb
: Noneddp_broadcast_buffers
: Falsedataloader_pin_memory
: Truedataloader_persistent_workers
: Falseskip_memory_metrics
: Trueuse_legacy_prediction_loop
: Falsepush_to_hub
: Trueresume_from_checkpoint
: Nonehub_model_id
: yklymchuk-rztk/multilingual-e5-base-matryoshka2d-mnr-4-continuehub_strategy
: every_savehub_private_repo
: Truehub_always_push
: Falsegradient_checkpointing
: Falsegradient_checkpointing_kwargs
: Noneinclude_inputs_for_metrics
: Falseinclude_for_metrics
: []eval_do_concat_batches
: Truefp16_backend
: autopush_to_hub_model_id
: Nonepush_to_hub_organization
: Nonemp_parameters
:auto_find_batch_size
: Falsefull_determinism
: Falsetorchdynamo
: Noneray_scope
: lastddp_timeout
: 1800torch_compile
: Falsetorch_compile_backend
: Nonetorch_compile_mode
: Nonedispatch_batches
: Nonesplit_batches
: Noneinclude_tokens_per_second
: Falseinclude_num_input_tokens_seen
: Falseneftune_noise_alpha
: Noneoptim_target_modules
: Nonebatch_eval_metrics
: Falseeval_on_start
: Falseuse_liger_kernel
: Falseeval_use_gather_object
: Falseaverage_tokens_across_devices
: Falseprompts
: {'query': 'query: ', 'text': 'passage: '}batch_sampler
: no_duplicatesmulti_dataset_batch_sampler
: proportionalddp_static_graph
: Falseddp_comm_hook
: bf16gradient_as_bucket_view
: Falsenum_proc
: 30
Training Logs
Click to expand
Epoch | Step | Training Loss | Validation Loss | bm-full_dot_ndcg@10 | core-uk-title_dot_ndcg@10 | core-ru-title_dot_ndcg@10 | core-uk-options_dot_ndcg@10 | core-ru-options_dot_ndcg@10 | options-uk-title_dot_ndcg@10 | options-ru-title_dot_ndcg@10 | options-uk-options_dot_ndcg@10 | options-ru-options_dot_ndcg@10 | rusisms-uk-title_dot_ndcg@10 | rusisms-ru-title_dot_ndcg@10 | rusisms-uk-options_dot_ndcg@10 | rusisms-ru-options_dot_ndcg@10 | rusisms_corrected-uk-title_dot_ndcg@10 | rusisms_corrected-ru-title_dot_ndcg@10 | rusisms_corrected-uk-options_dot_ndcg@10 | rusisms_corrected-ru-options_dot_ndcg@10 | core_typos-uk-title_dot_ndcg@10 | core_typos-ru-title_dot_ndcg@10 | core_typos-uk-options_dot_ndcg@10 | core_typos-ru-options_dot_ndcg@10 | bm-full--matryoshka_dim-768--_dot_ndcg@1 | bm-full--matryoshka_dim-512--_dot_ndcg@1 | bm-full--matryoshka_dim-256--_dot_ndcg@1 | bm-full--matryoshka_dim-128--_dot_ndcg@1 |
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
2.7017 | 143100 | 0.7397 | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
2.7167 | 143895 | 0.7745 | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
2.7317 | 144690 | 0.8018 | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
2.7467 | 145485 | 0.7712 | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
2.7617 | 146280 | 0.7634 | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
2.7767 | 147075 | 0.7801 | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
2.7917 | 147870 | 0.7608 | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
2.8067 | 148665 | 0.7886 | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
2.8218 | 149460 | 0.7534 | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
2.8368 | 150255 | 0.7848 | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
2.8518 | 151050 | 0.7657 | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
2.8668 | 151845 | 0.7943 | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
2.8818 | 152640 | 0.7683 | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
2.8968 | 153435 | 0.7555 | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
2.9118 | 154230 | 0.7575 | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
2.9268 | 155025 | 0.7253 | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
2.9418 | 155820 | 0.7538 | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
2.9568 | 156615 | 0.7708 | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
2.9719 | 157410 | 0.7582 | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
2.9869 | 158205 | 0.7987 | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
3.0002 | 158910 | - | 0.4537 | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
3.0019 | 159000 | 0.7604 | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
3.0169 | 159795 | 0.7485 | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
3.0319 | 160590 | 0.7761 | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
3.0469 | 161385 | 0.7606 | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
3.0619 | 162180 | 0.7752 | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
3.0769 | 162975 | 0.7624 | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
3.0919 | 163770 | 0.7764 | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
3.1070 | 164565 | 0.7714 | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
3.1220 | 165360 | 0.7916 | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
3.1370 | 166155 | 0.7484 | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
3.1520 | 166950 | 0.7751 | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
3.1670 | 167745 | 0.7634 | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
3.1820 | 168540 | 0.7549 | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
3.1970 | 169335 | 0.7538 | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
3.2120 | 170130 | 0.7545 | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
3.2270 | 170925 | 0.7738 | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
3.2420 | 171720 | 0.7513 | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
3.2570 | 172515 | 0.7479 | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
3.2721 | 173310 | 0.751 | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
3.2871 | 174105 | 0.7583 | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
3.3002 | 174801 | - | 0.4436 | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
3.3021 | 174900 | 0.7593 | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
3.3171 | 175695 | 0.7346 | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
3.3321 | 176490 | 0.759 | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
3.3471 | 177285 | 0.7639 | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
3.3621 | 178080 | 0.7699 | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
3.3771 | 178875 | 0.7463 | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
3.3921 | 179670 | 0.7659 | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
3.4071 | 180465 | 0.7811 | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
3.4221 | 181260 | 0.7658 | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
3.4372 | 182055 | 0.7529 | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
3.4522 | 182850 | 0.7448 | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
3.4672 | 183645 | 0.7308 | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
3.4822 | 184440 | 0.7567 | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
3.4972 | 185235 | 0.7634 | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
3.5122 | 186030 | 0.7619 | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
3.5272 | 186825 | 0.7184 | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
3.5422 | 187620 | 0.7555 | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
3.5572 | 188415 | 0.7801 | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
3.5722 | 189210 | 0.7764 | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
3.5873 | 190005 | 0.7659 | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
3.6002 | 190692 | - | 0.4584 | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
3.6023 | 190800 | 0.7329 | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
3.6173 | 191595 | 0.7439 | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
3.6323 | 192390 | 0.7605 | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
3.6473 | 193185 | 0.7511 | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
3.6623 | 193980 | 0.7458 | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
3.6773 | 194775 | 0.7508 | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
3.6923 | 195570 | 0.7467 | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
3.7073 | 196365 | 0.7463 | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
3.7223 | 197160 | 0.7389 | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
3.7373 | 197955 | 0.772 | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
3.7524 | 198750 | 0.7859 | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
3.7674 | 199545 | 0.7543 | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
3.7824 | 200340 | 0.7635 | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
3.7974 | 201135 | 0.7706 | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
3.8124 | 201930 | 0.7748 | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
3.8274 | 202725 | 0.7552 | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
3.8424 | 203520 | 0.7484 | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
3.8574 | 204315 | 0.7535 | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
3.8724 | 205110 | 0.7615 | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
3.8874 | 205905 | 0.7536 | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
3.9002 | 206583 | - | 0.4789 | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
3.9024 | 206700 | 0.7566 | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
3.9175 | 207495 | 0.7747 | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
3.9325 | 208290 | 0.7526 | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
3.9475 | 209085 | 0.759 | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
3.9625 | 209880 | 0.7477 | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
3.9775 | 210675 | 0.7632 | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
3.9925 | 211470 | 0.7625 | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
4.0075 | 212265 | 0.7535 | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
4.0225 | 213060 | 0.745 | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
4.0376 | 213855 | 0.7311 | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
4.0526 | 214650 | 0.7327 | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
4.0676 | 215445 | 0.7385 | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
4.0826 | 216240 | 0.7521 | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
4.0976 | 217035 | 0.7579 | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
4.1126 | 217830 | 0.7378 | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
4.1276 | 218625 | 0.7641 | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
4.1426 | 219420 | 0.7637 | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
4.1576 | 220215 | 0.7676 | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
4.1726 | 221010 | 0.7789 | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
4.1876 | 221805 | 0.7677 | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
4.2003 | 222474 | - | 0.4703 | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
4.2027 | 222600 | 0.77 | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
4.2177 | 223395 | 0.7386 | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
4.2327 | 224190 | 0.7432 | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
4.2477 | 224985 | 0.7436 | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
4.2627 | 225780 | 0.7366 | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
4.2777 | 226575 | 0.7254 | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
4.2927 | 227370 | 0.7594 | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
4.3077 | 228165 | 0.7646 | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
4.3227 | 228960 | 0.7524 | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
4.3377 | 229755 | 0.7625 | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
4.3527 | 230550 | 0.7647 | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
4.3678 | 231345 | 0.7425 | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
4.3828 | 232140 | 0.7568 | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
4.3978 | 232935 | 0.7809 | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
4.4128 | 233730 | 0.7762 | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
4.4278 | 234525 | 0.7579 | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
4.4428 | 235320 | 0.7625 | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
4.4578 | 236115 | 0.7664 | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
4.4728 | 236910 | 0.7357 | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
4.4878 | 237705 | 0.7316 | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
4.5003 | 238365 | - | 0.4811 | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
4.5028 | 238500 | 0.7568 | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
4.5179 | 239295 | 0.7522 | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
4.5329 | 240090 | 0.7529 | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
4.5479 | 240885 | 0.7468 | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
4.5629 | 241680 | 0.7304 | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
4.5779 | 242475 | 0.749 | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
4.5929 | 243270 | 0.7391 | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
4.6079 | 244065 | 0.7483 | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
4.6229 | 244860 | 0.7682 | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
4.6379 | 245655 | 0.7636 | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
4.6529 | 246450 | 0.7705 | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
4.6679 | 247245 | 0.7516 | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
4.6830 | 248040 | 0.7632 | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
4.6980 | 248835 | 0.7659 | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
4.7130 | 249630 | 0.7254 | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
4.7280 | 250425 | 0.7163 | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
4.7430 | 251220 | 0.7552 | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
4.7580 | 252015 | 0.7654 | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
4.7730 | 252810 | 0.7308 | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
4.7880 | 253605 | 0.7513 | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
4.8003 | 254256 | - | 0.4811 | 0.4893 | 0.5825 | 0.5898 | 0.4895 | 0.4883 | 0.7650 | 0.7638 | 0.6590 | 0.6610 | 0.6648 | 0.6683 | 0.5372 | 0.5433 | 0.7195 | 0.7127 | 0.5966 | 0.5987 | 0.5010 | 0.5093 | 0.4117 | 0.4101 | 0.4784 | 0.4759 | 0.4759 | 0.4522 |
- The bold row denotes the saved checkpoint.
Framework Versions
- Python: 3.11.10
- Sentence Transformers: 3.3.0
- Transformers: 4.46.3
- PyTorch: 2.5.1+cu124
- Accelerate: 1.1.1
- Datasets: 3.1.0
- Tokenizers: 0.20.3
Citation
BibTeX
Sentence Transformers
@inproceedings{reimers-2019-sentence-bert,
title = "Sentence-BERT: Sentence Embeddings using Siamese BERT-Networks",
author = "Reimers, Nils and Gurevych, Iryna",
booktitle = "Proceedings of the 2019 Conference on Empirical Methods in Natural Language Processing",
month = "11",
year = "2019",
publisher = "Association for Computational Linguistics",
url = "https://arxiv.org/abs/1908.10084",
}