Commit
•
db0aadb
1
Parent(s):
1616d1b
Add results (#40)
Browse files* Add
* Add res
* fix: reduce sizes of files and added makefile command
* Add
* Add res
* formatted flores files
* Add
* Add res
* make preprush
* Add OAI T3Small MMTEB
* update ci dependencies to >3.9
* tmp
* reduce large file sizes
* Add overview table to readme
* Add missing TE3L results
* Make prepush
* Add res
* reduce size
* AddRes
* Add
* fix: run make pre-push
* add revision to openai text-embedding-large, add "openai/" to model name
* minor fix to model meta
* make sure pip is upgraded installing packages
---------
Co-authored-by: Kenneth Enevoldsen <kennethcenevoldsen@gmail.com>
This view is limited to 50 files because it contains too many changes.
See raw diff
- .github/workflows/test.yml +0 -1
- .gitignore +3 -0
- README.md +17 -1
- makefile +2 -2
- pyproject.toml +7 -0
- results/Alibaba-NLP__gte-Qwen2-7B-instruct/e26182b2122f4435e8b3ebecbf363990f409b45b/AILAStatutes.json +158 -0
- results/Alibaba-NLP__gte-Qwen2-7B-instruct/e26182b2122f4435e8b3ebecbf363990f409b45b/AfriSentiClassification.json +755 -0
- results/Alibaba-NLP__gte-Qwen2-7B-instruct/e26182b2122f4435e8b3ebecbf363990f409b45b/AlloProfClusteringS2S.v2.json +34 -0
- results/Alibaba-NLP__gte-Qwen2-7B-instruct/e26182b2122f4435e8b3ebecbf363990f409b45b/AlloprofReranking.json +26 -0
- results/Alibaba-NLP__gte-Qwen2-7B-instruct/e26182b2122f4435e8b3ebecbf363990f409b45b/AmazonCounterfactualClassification.json +685 -0
- results/Alibaba-NLP__gte-Qwen2-7B-instruct/e26182b2122f4435e8b3ebecbf363990f409b45b/ArXivHierarchicalClusteringP2P.json +46 -0
- results/Alibaba-NLP__gte-Qwen2-7B-instruct/e26182b2122f4435e8b3ebecbf363990f409b45b/ArXivHierarchicalClusteringS2S.json +46 -0
- results/Alibaba-NLP__gte-Qwen2-7B-instruct/e26182b2122f4435e8b3ebecbf363990f409b45b/ArguAna.json +158 -0
- results/Alibaba-NLP__gte-Qwen2-7B-instruct/e26182b2122f4435e8b3ebecbf363990f409b45b/ArmenianParaphrasePC.json +58 -0
- results/Alibaba-NLP__gte-Qwen2-7B-instruct/e26182b2122f4435e8b3ebecbf363990f409b45b/BUCC.v2.json +59 -0
- results/Alibaba-NLP__gte-Qwen2-7B-instruct/e26182b2122f4435e8b3ebecbf363990f409b45b/BelebeleRetrieval.json +0 -0
- results/Alibaba-NLP__gte-Qwen2-7B-instruct/e26182b2122f4435e8b3ebecbf363990f409b45b/BibleNLPBitextMining.json +0 -0
- results/Alibaba-NLP__gte-Qwen2-7B-instruct/e26182b2122f4435e8b3ebecbf363990f409b45b/BigPatentClustering.v2.json +34 -0
- results/Alibaba-NLP__gte-Qwen2-7B-instruct/e26182b2122f4435e8b3ebecbf363990f409b45b/BiorxivClusteringP2P.v2.json +34 -0
- results/Alibaba-NLP__gte-Qwen2-7B-instruct/e26182b2122f4435e8b3ebecbf363990f409b45b/BornholmBitextMining.json +22 -0
- results/Alibaba-NLP__gte-Qwen2-7B-instruct/e26182b2122f4435e8b3ebecbf363990f409b45b/BrazilianToxicTweetsClassification.json +73 -0
- results/Alibaba-NLP__gte-Qwen2-7B-instruct/e26182b2122f4435e8b3ebecbf363990f409b45b/BulgarianStoreReviewSentimentClassfication.json +73 -0
- results/Alibaba-NLP__gte-Qwen2-7B-instruct/e26182b2122f4435e8b3ebecbf363990f409b45b/CEDRClassification.json +73 -0
- results/Alibaba-NLP__gte-Qwen2-7B-instruct/e26182b2122f4435e8b3ebecbf363990f409b45b/CLSClusteringP2P.v2.json +34 -0
- results/Alibaba-NLP__gte-Qwen2-7B-instruct/e26182b2122f4435e8b3ebecbf363990f409b45b/CSFDSKMovieReviewSentimentClassification.json +73 -0
- results/Alibaba-NLP__gte-Qwen2-7B-instruct/e26182b2122f4435e8b3ebecbf363990f409b45b/CTKFactsNLI.json +107 -0
- results/Alibaba-NLP__gte-Qwen2-7B-instruct/e26182b2122f4435e8b3ebecbf363990f409b45b/CataloniaTweetClassification.json +261 -0
- results/Alibaba-NLP__gte-Qwen2-7B-instruct/e26182b2122f4435e8b3ebecbf363990f409b45b/CovidRetrieval.json +158 -0
- results/Alibaba-NLP__gte-Qwen2-7B-instruct/e26182b2122f4435e8b3ebecbf363990f409b45b/CyrillicTurkicLangClassification.json +81 -0
- results/Alibaba-NLP__gte-Qwen2-7B-instruct/e26182b2122f4435e8b3ebecbf363990f409b45b/CzechProductReviewSentimentClassification.json +73 -0
- results/Alibaba-NLP__gte-Qwen2-7B-instruct/e26182b2122f4435e8b3ebecbf363990f409b45b/DBpediaClassification.json +73 -0
- results/Alibaba-NLP__gte-Qwen2-7B-instruct/e26182b2122f4435e8b3ebecbf363990f409b45b/DalajClassification.json +95 -0
- results/Alibaba-NLP__gte-Qwen2-7B-instruct/e26182b2122f4435e8b3ebecbf363990f409b45b/DiaBlaBitextMining.json +35 -0
- results/Alibaba-NLP__gte-Qwen2-7B-instruct/e26182b2122f4435e8b3ebecbf363990f409b45b/EstonianValenceClassification.json +73 -0
- results/Alibaba-NLP__gte-Qwen2-7B-instruct/e26182b2122f4435e8b3ebecbf363990f409b45b/FaroeseSTS.json +26 -0
- results/Alibaba-NLP__gte-Qwen2-7B-instruct/e26182b2122f4435e8b3ebecbf363990f409b45b/FilipinoShopeeReviewsClassification.json +137 -0
- results/Alibaba-NLP__gte-Qwen2-7B-instruct/e26182b2122f4435e8b3ebecbf363990f409b45b/FinParaSTS.json +43 -0
- results/Alibaba-NLP__gte-Qwen2-7B-instruct/e26182b2122f4435e8b3ebecbf363990f409b45b/FinancialPhrasebankClassification.json +73 -0
- results/Alibaba-NLP__gte-Qwen2-7B-instruct/e26182b2122f4435e8b3ebecbf363990f409b45b/FloresBitextMining.json +0 -0
- results/Alibaba-NLP__gte-Qwen2-7B-instruct/e26182b2122f4435e8b3ebecbf363990f409b45b/GermanSTSBenchmark.json +43 -0
- results/Alibaba-NLP__gte-Qwen2-7B-instruct/e26182b2122f4435e8b3ebecbf363990f409b45b/GreekLegalCodeClassification.json +137 -0
- results/Alibaba-NLP__gte-Qwen2-7B-instruct/e26182b2122f4435e8b3ebecbf363990f409b45b/GujaratiNewsClassification.json +73 -0
- results/Alibaba-NLP__gte-Qwen2-7B-instruct/e26182b2122f4435e8b3ebecbf363990f409b45b/HALClusteringS2S.v2.json +34 -0
- results/Alibaba-NLP__gte-Qwen2-7B-instruct/e26182b2122f4435e8b3ebecbf363990f409b45b/IN22GenBitextMining.json +0 -0
- results/Alibaba-NLP__gte-Qwen2-7B-instruct/e26182b2122f4435e8b3ebecbf363990f409b45b/IndicCrosslingualSTS.json +203 -0
- results/Alibaba-NLP__gte-Qwen2-7B-instruct/e26182b2122f4435e8b3ebecbf363990f409b45b/IndicGenBenchFloresBitextMining.json +1405 -0
- results/Alibaba-NLP__gte-Qwen2-7B-instruct/e26182b2122f4435e8b3ebecbf363990f409b45b/IndicLangClassification.json +96 -0
- results/Alibaba-NLP__gte-Qwen2-7B-instruct/e26182b2122f4435e8b3ebecbf363990f409b45b/IndonesianIdClickbaitClassification.json +95 -0
- results/Alibaba-NLP__gte-Qwen2-7B-instruct/e26182b2122f4435e8b3ebecbf363990f409b45b/IsiZuluNewsClassification.json +73 -0
- results/Alibaba-NLP__gte-Qwen2-7B-instruct/e26182b2122f4435e8b3ebecbf363990f409b45b/ItaCaseholdClassification.json +73 -0
.github/workflows/test.yml
CHANGED
@@ -17,7 +17,6 @@ jobs:
|
|
17 |
matrix:
|
18 |
os: [ubuntu-latest] #, macos-latest, windows-latest]
|
19 |
python-version: ["3.9"]
|
20 |
-
|
21 |
steps:
|
22 |
- uses: actions/checkout@v3
|
23 |
|
|
|
17 |
matrix:
|
18 |
os: [ubuntu-latest] #, macos-latest, windows-latest]
|
19 |
python-version: ["3.9"]
|
|
|
20 |
steps:
|
21 |
- uses: actions/checkout@v3
|
22 |
|
.gitignore
CHANGED
@@ -10,3 +10,6 @@ tmp.py
|
|
10 |
|
11 |
# MacOS
|
12 |
.DS_Store
|
|
|
|
|
|
|
|
10 |
|
11 |
# MacOS
|
12 |
.DS_Store
|
13 |
+
|
14 |
+
# UV
|
15 |
+
uv.lock
|
README.md
CHANGED
@@ -2,4 +2,20 @@
|
|
2 |
benchmark: mteb
|
3 |
type: evaluation
|
4 |
submission_name: MTEB
|
5 |
-
---
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
2 |
benchmark: mteb
|
3 |
type: evaluation
|
4 |
submission_name: MTEB
|
5 |
+
---
|
6 |
+
|
7 |
+
This repository contain the results of the embedding benchmark evaluated using the package `mteb`.
|
8 |
+
|
9 |
+
|
10 |
+
| Reference | |
|
11 |
+
| ------------------- | ---------------------------------------------------------------------------------------- |
|
12 |
+
| 🦾 **[Leaderboard]** | An up to date leaderboard of embedding models |
|
13 |
+
| 📚 **[mteb]** | Guides and instructions on how to use `mteb`, including running, submitting scores, etc. |
|
14 |
+
| 🙋 **[Questions]** | Questions about the results |
|
15 |
+
| 🙋 **[Issues]** | Issues or bugs you have found |
|
16 |
+
|
17 |
+
|
18 |
+
[Leaderboard]: https://huggingface.co/spaces/mteb/leaderboard
|
19 |
+
[mteb]: https://github.com/embeddings-benchmark/mteb
|
20 |
+
[Questions]: https://github.com/embeddings-benchmark/mteb/discussions
|
21 |
+
[Issues]: https://github.com/embeddings-benchmark/mteb/issues
|
makefile
CHANGED
@@ -1,7 +1,7 @@
|
|
1 |
install-for-tests:
|
2 |
@echo "--- Installing dependencies for tests ---"
|
3 |
-
|
4 |
-
pip install
|
5 |
|
6 |
test:
|
7 |
@echo "--- Running tests ---"
|
|
|
1 |
install-for-tests:
|
2 |
@echo "--- Installing dependencies for tests ---"
|
3 |
+
pip install pip --upgrade
|
4 |
+
pip install .
|
5 |
|
6 |
test:
|
7 |
@echo "--- Running tests ---"
|
pyproject.toml
ADDED
@@ -0,0 +1,7 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
[project]
|
2 |
+
name = "results"
|
3 |
+
version = "0.1.0"
|
4 |
+
description = "The result repository for mteb"
|
5 |
+
readme = "README.md"
|
6 |
+
requires-python = ">=3.9"
|
7 |
+
dependencies = ["mteb[dev]>=1.13.0"]
|
results/Alibaba-NLP__gte-Qwen2-7B-instruct/e26182b2122f4435e8b3ebecbf363990f409b45b/AILAStatutes.json
ADDED
@@ -0,0 +1,158 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"dataset_revision": "ebfcd844eadd3d667efa3c57fc5c8c87f5c2867e",
|
3 |
+
"evaluation_time": 17.8603093624115,
|
4 |
+
"kg_co2_emissions": 0.0017055287025926752,
|
5 |
+
"mteb_version": "1.18.0",
|
6 |
+
"scores": {
|
7 |
+
"test": [
|
8 |
+
{
|
9 |
+
"hf_subset": "default",
|
10 |
+
"languages": [
|
11 |
+
"eng-Latn"
|
12 |
+
],
|
13 |
+
"main_score": 0.33765,
|
14 |
+
"map_at_1": 0.084,
|
15 |
+
"map_at_10": 0.21694,
|
16 |
+
"map_at_100": 0.28429,
|
17 |
+
"map_at_1000": 0.28429,
|
18 |
+
"map_at_20": 0.24574,
|
19 |
+
"map_at_3": 0.1425,
|
20 |
+
"map_at_5": 0.18367,
|
21 |
+
"mrr_at_1": 0.3,
|
22 |
+
"mrr_at_10": 0.454095238095238,
|
23 |
+
"mrr_at_100": 0.46212957304133767,
|
24 |
+
"mrr_at_1000": 0.46212957304133767,
|
25 |
+
"mrr_at_20": 0.4612962397080043,
|
26 |
+
"mrr_at_3": 0.39333333333333337,
|
27 |
+
"mrr_at_5": 0.42533333333333334,
|
28 |
+
"nauc_map_at_1000_diff1": 0.1257589273091899,
|
29 |
+
"nauc_map_at_1000_max": 0.4362811047451273,
|
30 |
+
"nauc_map_at_1000_std": 0.15526294395684537,
|
31 |
+
"nauc_map_at_100_diff1": 0.1257589273091899,
|
32 |
+
"nauc_map_at_100_max": 0.4362811047451273,
|
33 |
+
"nauc_map_at_100_std": 0.15526294395684537,
|
34 |
+
"nauc_map_at_10_diff1": 0.12251716861218423,
|
35 |
+
"nauc_map_at_10_max": 0.44750464684869273,
|
36 |
+
"nauc_map_at_10_std": 0.1665813067303774,
|
37 |
+
"nauc_map_at_1_diff1": 0.1603550346774055,
|
38 |
+
"nauc_map_at_1_max": 0.3557488770716093,
|
39 |
+
"nauc_map_at_1_std": -0.03334365911163875,
|
40 |
+
"nauc_map_at_20_diff1": 0.11168162103830682,
|
41 |
+
"nauc_map_at_20_max": 0.42834021224498536,
|
42 |
+
"nauc_map_at_20_std": 0.18809254528446484,
|
43 |
+
"nauc_map_at_3_diff1": 0.1820738040919183,
|
44 |
+
"nauc_map_at_3_max": 0.43576454065851916,
|
45 |
+
"nauc_map_at_3_std": 0.03987196225744017,
|
46 |
+
"nauc_map_at_5_diff1": 0.1516707675586077,
|
47 |
+
"nauc_map_at_5_max": 0.4343805548656787,
|
48 |
+
"nauc_map_at_5_std": 0.12289883063537198,
|
49 |
+
"nauc_mrr_at_1000_diff1": 0.21369110302271785,
|
50 |
+
"nauc_mrr_at_1000_max": 0.4482852535080846,
|
51 |
+
"nauc_mrr_at_1000_std": 0.04709212363192428,
|
52 |
+
"nauc_mrr_at_100_diff1": 0.21369110302271785,
|
53 |
+
"nauc_mrr_at_100_max": 0.4482852535080846,
|
54 |
+
"nauc_mrr_at_100_std": 0.04709212363192428,
|
55 |
+
"nauc_mrr_at_10_diff1": 0.20469773018048376,
|
56 |
+
"nauc_mrr_at_10_max": 0.4376008078285497,
|
57 |
+
"nauc_mrr_at_10_std": 0.05364942361920574,
|
58 |
+
"nauc_mrr_at_1_diff1": 0.2885015538440751,
|
59 |
+
"nauc_mrr_at_1_max": 0.4731252533441427,
|
60 |
+
"nauc_mrr_at_1_std": -0.03950817457100396,
|
61 |
+
"nauc_mrr_at_20_diff1": 0.2115713156894796,
|
62 |
+
"nauc_mrr_at_20_max": 0.4485740159869434,
|
63 |
+
"nauc_mrr_at_20_std": 0.04933466533602979,
|
64 |
+
"nauc_mrr_at_3_diff1": 0.21908223819854072,
|
65 |
+
"nauc_mrr_at_3_max": 0.46955286229090865,
|
66 |
+
"nauc_mrr_at_3_std": 0.027972999623501044,
|
67 |
+
"nauc_mrr_at_5_diff1": 0.18683833661325422,
|
68 |
+
"nauc_mrr_at_5_max": 0.44082372073744047,
|
69 |
+
"nauc_mrr_at_5_std": 0.07459968781624475,
|
70 |
+
"nauc_ndcg_at_1000_diff1": 0.14747261650165755,
|
71 |
+
"nauc_ndcg_at_1000_max": 0.439315617438903,
|
72 |
+
"nauc_ndcg_at_1000_std": 0.13888603634732072,
|
73 |
+
"nauc_ndcg_at_100_diff1": 0.14747261650165755,
|
74 |
+
"nauc_ndcg_at_100_max": 0.439315617438903,
|
75 |
+
"nauc_ndcg_at_100_std": 0.13888603634732072,
|
76 |
+
"nauc_ndcg_at_10_diff1": 0.10151157379963689,
|
77 |
+
"nauc_ndcg_at_10_max": 0.4287131093173576,
|
78 |
+
"nauc_ndcg_at_10_std": 0.21756532293248818,
|
79 |
+
"nauc_ndcg_at_1_diff1": 0.2885015538440751,
|
80 |
+
"nauc_ndcg_at_1_max": 0.4731252533441427,
|
81 |
+
"nauc_ndcg_at_1_std": -0.03950817457100396,
|
82 |
+
"nauc_ndcg_at_20_diff1": 0.08771431706990646,
|
83 |
+
"nauc_ndcg_at_20_max": 0.41146211509813463,
|
84 |
+
"nauc_ndcg_at_20_std": 0.25283813479379036,
|
85 |
+
"nauc_ndcg_at_3_diff1": 0.20179057017543273,
|
86 |
+
"nauc_ndcg_at_3_max": 0.4596305993876074,
|
87 |
+
"nauc_ndcg_at_3_std": 0.05068535832174184,
|
88 |
+
"nauc_ndcg_at_5_diff1": 0.1346805085198524,
|
89 |
+
"nauc_ndcg_at_5_max": 0.434665851425462,
|
90 |
+
"nauc_ndcg_at_5_std": 0.176648112448584,
|
91 |
+
"nauc_precision_at_1000_diff1": -0.001920930828195194,
|
92 |
+
"nauc_precision_at_1000_max": -0.2500503100931195,
|
93 |
+
"nauc_precision_at_1000_std": -0.25191635718336514,
|
94 |
+
"nauc_precision_at_100_diff1": -0.0019209308281937865,
|
95 |
+
"nauc_precision_at_100_max": -0.25005031009311424,
|
96 |
+
"nauc_precision_at_100_std": -0.2519163571833613,
|
97 |
+
"nauc_precision_at_10_diff1": -0.018183872856718023,
|
98 |
+
"nauc_precision_at_10_max": 0.264066818027717,
|
99 |
+
"nauc_precision_at_10_std": 0.2958423653417436,
|
100 |
+
"nauc_precision_at_1_diff1": 0.2885015538440751,
|
101 |
+
"nauc_precision_at_1_max": 0.4731252533441427,
|
102 |
+
"nauc_precision_at_1_std": -0.03950817457100396,
|
103 |
+
"nauc_precision_at_20_diff1": -0.06991681775526426,
|
104 |
+
"nauc_precision_at_20_max": 0.10627830064049434,
|
105 |
+
"nauc_precision_at_20_std": 0.29626727648757734,
|
106 |
+
"nauc_precision_at_3_diff1": 0.19630950310198872,
|
107 |
+
"nauc_precision_at_3_max": 0.4547747434336407,
|
108 |
+
"nauc_precision_at_3_std": 0.09030556038731391,
|
109 |
+
"nauc_precision_at_5_diff1": 0.06223329362070833,
|
110 |
+
"nauc_precision_at_5_max": 0.3396512509476874,
|
111 |
+
"nauc_precision_at_5_std": 0.25769522365428343,
|
112 |
+
"nauc_recall_at_1000_diff1": NaN,
|
113 |
+
"nauc_recall_at_1000_max": NaN,
|
114 |
+
"nauc_recall_at_1000_std": NaN,
|
115 |
+
"nauc_recall_at_100_diff1": NaN,
|
116 |
+
"nauc_recall_at_100_max": NaN,
|
117 |
+
"nauc_recall_at_100_std": NaN,
|
118 |
+
"nauc_recall_at_10_diff1": -0.018606080799264093,
|
119 |
+
"nauc_recall_at_10_max": 0.34265022294542885,
|
120 |
+
"nauc_recall_at_10_std": 0.38239558029948173,
|
121 |
+
"nauc_recall_at_1_diff1": 0.1603550346774055,
|
122 |
+
"nauc_recall_at_1_max": 0.3557488770716093,
|
123 |
+
"nauc_recall_at_1_std": -0.03334365911163875,
|
124 |
+
"nauc_recall_at_20_diff1": -0.07765209201197695,
|
125 |
+
"nauc_recall_at_20_max": 0.20771577314708548,
|
126 |
+
"nauc_recall_at_20_std": 0.430869184234819,
|
127 |
+
"nauc_recall_at_3_diff1": 0.15971238444535218,
|
128 |
+
"nauc_recall_at_3_max": 0.44263477633603043,
|
129 |
+
"nauc_recall_at_3_std": 0.10476761601816997,
|
130 |
+
"nauc_recall_at_5_diff1": 0.061713450179887205,
|
131 |
+
"nauc_recall_at_5_max": 0.374497951016796,
|
132 |
+
"nauc_recall_at_5_std": 0.2932912634434461,
|
133 |
+
"ndcg_at_1": 0.3,
|
134 |
+
"ndcg_at_10": 0.33765,
|
135 |
+
"ndcg_at_100": 0.53487,
|
136 |
+
"ndcg_at_1000": 0.53487,
|
137 |
+
"ndcg_at_20": 0.40329,
|
138 |
+
"ndcg_at_3": 0.26148,
|
139 |
+
"ndcg_at_5": 0.2707,
|
140 |
+
"precision_at_1": 0.3,
|
141 |
+
"precision_at_10": 0.17,
|
142 |
+
"precision_at_100": 0.0434,
|
143 |
+
"precision_at_1000": 0.00434,
|
144 |
+
"precision_at_20": 0.122,
|
145 |
+
"precision_at_3": 0.24,
|
146 |
+
"precision_at_5": 0.224,
|
147 |
+
"recall_at_1": 0.084,
|
148 |
+
"recall_at_10": 0.39633,
|
149 |
+
"recall_at_100": 1.0,
|
150 |
+
"recall_at_1000": 1.0,
|
151 |
+
"recall_at_20": 0.558,
|
152 |
+
"recall_at_3": 0.177,
|
153 |
+
"recall_at_5": 0.26767
|
154 |
+
}
|
155 |
+
]
|
156 |
+
},
|
157 |
+
"task_name": "AILAStatutes"
|
158 |
+
}
|
results/Alibaba-NLP__gte-Qwen2-7B-instruct/e26182b2122f4435e8b3ebecbf363990f409b45b/AfriSentiClassification.json
ADDED
@@ -0,0 +1,755 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"dataset_revision": "b52e930385cf5ed7f063072c3f7bd17b599a16cf",
|
3 |
+
"evaluation_time": 565.641083240509,
|
4 |
+
"kg_co2_emissions": 0.05327796906903396,
|
5 |
+
"mteb_version": "1.18.0",
|
6 |
+
"scores": {
|
7 |
+
"test": [
|
8 |
+
{
|
9 |
+
"accuracy": 0.3440220110055028,
|
10 |
+
"f1": 0.2952631850815317,
|
11 |
+
"f1_weighted": 0.3630343634603874,
|
12 |
+
"hf_subset": "amh",
|
13 |
+
"languages": [
|
14 |
+
"amh-Ethi"
|
15 |
+
],
|
16 |
+
"main_score": 0.3440220110055028,
|
17 |
+
"scores_per_experiment": [
|
18 |
+
{
|
19 |
+
"accuracy": 0.2846423211605803,
|
20 |
+
"f1": 0.2649921213752659,
|
21 |
+
"f1_weighted": 0.2962271668828367
|
22 |
+
},
|
23 |
+
{
|
24 |
+
"accuracy": 0.5287643821910956,
|
25 |
+
"f1": 0.3462814989462357,
|
26 |
+
"f1_weighted": 0.5244779881517598
|
27 |
+
},
|
28 |
+
{
|
29 |
+
"accuracy": 0.2546273136568284,
|
30 |
+
"f1": 0.24673611952989782,
|
31 |
+
"f1_weighted": 0.25760643513278514
|
32 |
+
},
|
33 |
+
{
|
34 |
+
"accuracy": 0.41570785392696347,
|
35 |
+
"f1": 0.3113886088108449,
|
36 |
+
"f1_weighted": 0.453388195491647
|
37 |
+
},
|
38 |
+
{
|
39 |
+
"accuracy": 0.2681340670335168,
|
40 |
+
"f1": 0.2533826203772091,
|
41 |
+
"f1_weighted": 0.2934749727301081
|
42 |
+
},
|
43 |
+
{
|
44 |
+
"accuracy": 0.34217108554277137,
|
45 |
+
"f1": 0.30093210051767166,
|
46 |
+
"f1_weighted": 0.36666743108168
|
47 |
+
},
|
48 |
+
{
|
49 |
+
"accuracy": 0.2861430715357679,
|
50 |
+
"f1": 0.27695303434279656,
|
51 |
+
"f1_weighted": 0.2849336178170178
|
52 |
+
},
|
53 |
+
{
|
54 |
+
"accuracy": 0.29214607303651824,
|
55 |
+
"f1": 0.2819352705607214,
|
56 |
+
"f1_weighted": 0.32004565294140097
|
57 |
+
},
|
58 |
+
{
|
59 |
+
"accuracy": 0.3611805902951476,
|
60 |
+
"f1": 0.3128023787305001,
|
61 |
+
"f1_weighted": 0.3820226669618049
|
62 |
+
},
|
63 |
+
{
|
64 |
+
"accuracy": 0.40670335167583793,
|
65 |
+
"f1": 0.3572280976241738,
|
66 |
+
"f1_weighted": 0.4514995074128338
|
67 |
+
}
|
68 |
+
]
|
69 |
+
},
|
70 |
+
{
|
71 |
+
"accuracy": 0.5377870563674321,
|
72 |
+
"f1": 0.5078749704137754,
|
73 |
+
"f1_weighted": 0.5466908921310866,
|
74 |
+
"hf_subset": "arq",
|
75 |
+
"languages": [
|
76 |
+
"arq-Arab"
|
77 |
+
],
|
78 |
+
"main_score": 0.5377870563674321,
|
79 |
+
"scores_per_experiment": [
|
80 |
+
{
|
81 |
+
"accuracy": 0.5334029227557411,
|
82 |
+
"f1": 0.5190004387939487,
|
83 |
+
"f1_weighted": 0.5618874493789515
|
84 |
+
},
|
85 |
+
{
|
86 |
+
"accuracy": 0.5041753653444676,
|
87 |
+
"f1": 0.4881678751492771,
|
88 |
+
"f1_weighted": 0.5132897327943338
|
89 |
+
},
|
90 |
+
{
|
91 |
+
"accuracy": 0.615866388308977,
|
92 |
+
"f1": 0.5271794490238074,
|
93 |
+
"f1_weighted": 0.6018429175423871
|
94 |
+
},
|
95 |
+
{
|
96 |
+
"accuracy": 0.5271398747390397,
|
97 |
+
"f1": 0.5095051716826331,
|
98 |
+
"f1_weighted": 0.5317873179473804
|
99 |
+
},
|
100 |
+
{
|
101 |
+
"accuracy": 0.4989561586638831,
|
102 |
+
"f1": 0.460856467658368,
|
103 |
+
"f1_weighted": 0.49857224620719504
|
104 |
+
},
|
105 |
+
{
|
106 |
+
"accuracy": 0.5083507306889353,
|
107 |
+
"f1": 0.47798003671211103,
|
108 |
+
"f1_weighted": 0.5281766435917458
|
109 |
+
},
|
110 |
+
{
|
111 |
+
"accuracy": 0.47390396659707723,
|
112 |
+
"f1": 0.46173943900377123,
|
113 |
+
"f1_weighted": 0.5035408205708626
|
114 |
+
},
|
115 |
+
{
|
116 |
+
"accuracy": 0.5292275574112735,
|
117 |
+
"f1": 0.5123349361322483,
|
118 |
+
"f1_weighted": 0.5206617662299967
|
119 |
+
},
|
120 |
+
{
|
121 |
+
"accuracy": 0.6263048016701461,
|
122 |
+
"f1": 0.5783173023025826,
|
123 |
+
"f1_weighted": 0.628960302602882
|
124 |
+
},
|
125 |
+
{
|
126 |
+
"accuracy": 0.5605427974947808,
|
127 |
+
"f1": 0.5436685876790063,
|
128 |
+
"f1_weighted": 0.5781897244451311
|
129 |
+
}
|
130 |
+
]
|
131 |
+
},
|
132 |
+
{
|
133 |
+
"accuracy": 0.50517578125,
|
134 |
+
"f1": 0.49304373021310105,
|
135 |
+
"f1_weighted": 0.4988697211656731,
|
136 |
+
"hf_subset": "ary",
|
137 |
+
"languages": [
|
138 |
+
"ary-Arab"
|
139 |
+
],
|
140 |
+
"main_score": 0.50517578125,
|
141 |
+
"scores_per_experiment": [
|
142 |
+
{
|
143 |
+
"accuracy": 0.53173828125,
|
144 |
+
"f1": 0.5310898782718756,
|
145 |
+
"f1_weighted": 0.5338427868430252
|
146 |
+
},
|
147 |
+
{
|
148 |
+
"accuracy": 0.4775390625,
|
149 |
+
"f1": 0.4776416387086107,
|
150 |
+
"f1_weighted": 0.48005233941789954
|
151 |
+
},
|
152 |
+
{
|
153 |
+
"accuracy": 0.3603515625,
|
154 |
+
"f1": 0.3462570450074205,
|
155 |
+
"f1_weighted": 0.3460526151564849
|
156 |
+
},
|
157 |
+
{
|
158 |
+
"accuracy": 0.556640625,
|
159 |
+
"f1": 0.5491909659831732,
|
160 |
+
"f1_weighted": 0.5533891931272991
|
161 |
+
},
|
162 |
+
{
|
163 |
+
"accuracy": 0.55224609375,
|
164 |
+
"f1": 0.5390187358364881,
|
165 |
+
"f1_weighted": 0.5481175869103161
|
166 |
+
},
|
167 |
+
{
|
168 |
+
"accuracy": 0.4296875,
|
169 |
+
"f1": 0.42142597263787157,
|
170 |
+
"f1_weighted": 0.42264198050476426
|
171 |
+
},
|
172 |
+
{
|
173 |
+
"accuracy": 0.5439453125,
|
174 |
+
"f1": 0.5135653489767846,
|
175 |
+
"f1_weighted": 0.5260338817798391
|
176 |
+
},
|
177 |
+
{
|
178 |
+
"accuracy": 0.529296875,
|
179 |
+
"f1": 0.5284379699007784,
|
180 |
+
"f1_weighted": 0.5338923414561169
|
181 |
+
},
|
182 |
+
{
|
183 |
+
"accuracy": 0.53515625,
|
184 |
+
"f1": 0.5198093787501153,
|
185 |
+
"f1_weighted": 0.527525668908803
|
186 |
+
},
|
187 |
+
{
|
188 |
+
"accuracy": 0.53515625,
|
189 |
+
"f1": 0.5040003680578925,
|
190 |
+
"f1_weighted": 0.517148817552182
|
191 |
+
}
|
192 |
+
]
|
193 |
+
},
|
194 |
+
{
|
195 |
+
"accuracy": 0.6546875,
|
196 |
+
"f1": 0.3643087544436122,
|
197 |
+
"f1_weighted": 0.7395779999564863,
|
198 |
+
"hf_subset": "hau",
|
199 |
+
"languages": [
|
200 |
+
"hau-Latn"
|
201 |
+
],
|
202 |
+
"main_score": 0.6546875,
|
203 |
+
"scores_per_experiment": [
|
204 |
+
{
|
205 |
+
"accuracy": 0.70703125,
|
206 |
+
"f1": 0.4205628831767816,
|
207 |
+
"f1_weighted": 0.7766259200554595
|
208 |
+
},
|
209 |
+
{
|
210 |
+
"accuracy": 0.66064453125,
|
211 |
+
"f1": 0.411153096050707,
|
212 |
+
"f1_weighted": 0.7383019918351568
|
213 |
+
},
|
214 |
+
{
|
215 |
+
"accuracy": 0.6357421875,
|
216 |
+
"f1": 0.404650384229335,
|
217 |
+
"f1_weighted": 0.7328226302355939
|
218 |
+
},
|
219 |
+
{
|
220 |
+
"accuracy": 0.55615234375,
|
221 |
+
"f1": 0.33317188809026604,
|
222 |
+
"f1_weighted": 0.6857060854638739
|
223 |
+
},
|
224 |
+
{
|
225 |
+
"accuracy": 0.62109375,
|
226 |
+
"f1": 0.30472888596779496,
|
227 |
+
"f1_weighted": 0.7191492112404494
|
228 |
+
},
|
229 |
+
{
|
230 |
+
"accuracy": 0.67578125,
|
231 |
+
"f1": 0.2845979847830557,
|
232 |
+
"f1_weighted": 0.7316447216224553
|
233 |
+
},
|
234 |
+
{
|
235 |
+
"accuracy": 0.61279296875,
|
236 |
+
"f1": 0.3137028061864798,
|
237 |
+
"f1_weighted": 0.7181539527296742
|
238 |
+
},
|
239 |
+
{
|
240 |
+
"accuracy": 0.65234375,
|
241 |
+
"f1": 0.3567790518269163,
|
242 |
+
"f1_weighted": 0.7460254273513933
|
243 |
+
},
|
244 |
+
{
|
245 |
+
"accuracy": 0.65771484375,
|
246 |
+
"f1": 0.3885804525378143,
|
247 |
+
"f1_weighted": 0.7479547778332963
|
248 |
+
},
|
249 |
+
{
|
250 |
+
"accuracy": 0.767578125,
|
251 |
+
"f1": 0.42516011158697103,
|
252 |
+
"f1_weighted": 0.7993952811975094
|
253 |
+
}
|
254 |
+
]
|
255 |
+
},
|
256 |
+
{
|
257 |
+
"accuracy": 0.493017578125,
|
258 |
+
"f1": 0.35122565698817526,
|
259 |
+
"f1_weighted": 0.5278703656080557,
|
260 |
+
"hf_subset": "ibo",
|
261 |
+
"languages": [
|
262 |
+
"ibo-Latn"
|
263 |
+
],
|
264 |
+
"main_score": 0.493017578125,
|
265 |
+
"scores_per_experiment": [
|
266 |
+
{
|
267 |
+
"accuracy": 0.49951171875,
|
268 |
+
"f1": 0.3359596359093417,
|
269 |
+
"f1_weighted": 0.4978920214599997
|
270 |
+
},
|
271 |
+
{
|
272 |
+
"accuracy": 0.59130859375,
|
273 |
+
"f1": 0.4187764505546791,
|
274 |
+
"f1_weighted": 0.6352989157228983
|
275 |
+
},
|
276 |
+
{
|
277 |
+
"accuracy": 0.2978515625,
|
278 |
+
"f1": 0.2484078756404555,
|
279 |
+
"f1_weighted": 0.39242972608011134
|
280 |
+
},
|
281 |
+
{
|
282 |
+
"accuracy": 0.541015625,
|
283 |
+
"f1": 0.38544040036500643,
|
284 |
+
"f1_weighted": 0.572848737877612
|
285 |
+
},
|
286 |
+
{
|
287 |
+
"accuracy": 0.56396484375,
|
288 |
+
"f1": 0.414472520594851,
|
289 |
+
"f1_weighted": 0.6243179368019787
|
290 |
+
},
|
291 |
+
{
|
292 |
+
"accuracy": 0.54443359375,
|
293 |
+
"f1": 0.3846831625657747,
|
294 |
+
"f1_weighted": 0.5805182186903713
|
295 |
+
},
|
296 |
+
{
|
297 |
+
"accuracy": 0.3916015625,
|
298 |
+
"f1": 0.32631599173320675,
|
299 |
+
"f1_weighted": 0.4894764959251987
|
300 |
+
},
|
301 |
+
{
|
302 |
+
"accuracy": 0.5390625,
|
303 |
+
"f1": 0.33675079761015064,
|
304 |
+
"f1_weighted": 0.49097596276502076
|
305 |
+
},
|
306 |
+
{
|
307 |
+
"accuracy": 0.56640625,
|
308 |
+
"f1": 0.36681641250823144,
|
309 |
+
"f1_weighted": 0.5403668967598606
|
310 |
+
},
|
311 |
+
{
|
312 |
+
"accuracy": 0.39501953125,
|
313 |
+
"f1": 0.2946333224000553,
|
314 |
+
"f1_weighted": 0.45457874399750486
|
315 |
+
}
|
316 |
+
]
|
317 |
+
},
|
318 |
+
{
|
319 |
+
"accuracy": 0.4394736842105263,
|
320 |
+
"f1": 0.4327233880802699,
|
321 |
+
"f1_weighted": 0.43346938396258244,
|
322 |
+
"hf_subset": "kin",
|
323 |
+
"languages": [
|
324 |
+
"kin-Latn"
|
325 |
+
],
|
326 |
+
"main_score": 0.4394736842105263,
|
327 |
+
"scores_per_experiment": [
|
328 |
+
{
|
329 |
+
"accuracy": 0.45321637426900585,
|
330 |
+
"f1": 0.4505995480668042,
|
331 |
+
"f1_weighted": 0.44853965649540023
|
332 |
+
},
|
333 |
+
{
|
334 |
+
"accuracy": 0.4239766081871345,
|
335 |
+
"f1": 0.41362734457651174,
|
336 |
+
"f1_weighted": 0.41570353442607466
|
337 |
+
},
|
338 |
+
{
|
339 |
+
"accuracy": 0.42105263157894735,
|
340 |
+
"f1": 0.41315330824210345,
|
341 |
+
"f1_weighted": 0.4046384921539682
|
342 |
+
},
|
343 |
+
{
|
344 |
+
"accuracy": 0.44834307992202727,
|
345 |
+
"f1": 0.4417463314517404,
|
346 |
+
"f1_weighted": 0.4490503134638931
|
347 |
+
},
|
348 |
+
{
|
349 |
+
"accuracy": 0.4697855750487329,
|
350 |
+
"f1": 0.46787672080069737,
|
351 |
+
"f1_weighted": 0.4728935630708826
|
352 |
+
},
|
353 |
+
{
|
354 |
+
"accuracy": 0.4405458089668616,
|
355 |
+
"f1": 0.43389637420339655,
|
356 |
+
"f1_weighted": 0.43329225295841256
|
357 |
+
},
|
358 |
+
{
|
359 |
+
"accuracy": 0.4834307992202729,
|
360 |
+
"f1": 0.48457495693243285,
|
361 |
+
"f1_weighted": 0.4821718376280889
|
362 |
+
},
|
363 |
+
{
|
364 |
+
"accuracy": 0.43762183235867447,
|
365 |
+
"f1": 0.41881096517035843,
|
366 |
+
"f1_weighted": 0.4252584121811742
|
367 |
+
},
|
368 |
+
{
|
369 |
+
"accuracy": 0.4171539961013645,
|
370 |
+
"f1": 0.40874203215576266,
|
371 |
+
"f1_weighted": 0.4105945998153718
|
372 |
+
},
|
373 |
+
{
|
374 |
+
"accuracy": 0.3996101364522417,
|
375 |
+
"f1": 0.39420629920289163,
|
376 |
+
"f1_weighted": 0.39255117743255846
|
377 |
+
}
|
378 |
+
]
|
379 |
+
},
|
380 |
+
{
|
381 |
+
"accuracy": 0.39619140625,
|
382 |
+
"f1": 0.3941878740238879,
|
383 |
+
"f1_weighted": 0.3747544501156019,
|
384 |
+
"hf_subset": "por",
|
385 |
+
"languages": [
|
386 |
+
"por-Latn"
|
387 |
+
],
|
388 |
+
"main_score": 0.39619140625,
|
389 |
+
"scores_per_experiment": [
|
390 |
+
{
|
391 |
+
"accuracy": 0.404296875,
|
392 |
+
"f1": 0.4129678381633413,
|
393 |
+
"f1_weighted": 0.40008193829440963
|
394 |
+
},
|
395 |
+
{
|
396 |
+
"accuracy": 0.3837890625,
|
397 |
+
"f1": 0.39102267687508424,
|
398 |
+
"f1_weighted": 0.3748516999236242
|
399 |
+
},
|
400 |
+
{
|
401 |
+
"accuracy": 0.5556640625,
|
402 |
+
"f1": 0.48164790906224564,
|
403 |
+
"f1_weighted": 0.5620253672560956
|
404 |
+
},
|
405 |
+
{
|
406 |
+
"accuracy": 0.396484375,
|
407 |
+
"f1": 0.40975936159170484,
|
408 |
+
"f1_weighted": 0.40497195246243445
|
409 |
+
},
|
410 |
+
{
|
411 |
+
"accuracy": 0.29931640625,
|
412 |
+
"f1": 0.32298011033060775,
|
413 |
+
"f1_weighted": 0.23208427807771204
|
414 |
+
},
|
415 |
+
{
|
416 |
+
"accuracy": 0.39599609375,
|
417 |
+
"f1": 0.41136035768779333,
|
418 |
+
"f1_weighted": 0.38674900906777915
|
419 |
+
},
|
420 |
+
{
|
421 |
+
"accuracy": 0.3173828125,
|
422 |
+
"f1": 0.3217855715999651,
|
423 |
+
"f1_weighted": 0.2255254568100868
|
424 |
+
},
|
425 |
+
{
|
426 |
+
"accuracy": 0.28076171875,
|
427 |
+
"f1": 0.2919257051534559,
|
428 |
+
"f1_weighted": 0.19243179026695342
|
429 |
+
},
|
430 |
+
{
|
431 |
+
"accuracy": 0.44384765625,
|
432 |
+
"f1": 0.43933579442775605,
|
433 |
+
"f1_weighted": 0.45577199617427233
|
434 |
+
},
|
435 |
+
{
|
436 |
+
"accuracy": 0.484375,
|
437 |
+
"f1": 0.45909341534692477,
|
438 |
+
"f1_weighted": 0.5130510128226513
|
439 |
+
}
|
440 |
+
]
|
441 |
+
},
|
442 |
+
{
|
443 |
+
"accuracy": 0.42080078125,
|
444 |
+
"f1": 0.3275713353235739,
|
445 |
+
"f1_weighted": 0.458876312633339,
|
446 |
+
"hf_subset": "pcm",
|
447 |
+
"languages": [
|
448 |
+
"pcm-Latn"
|
449 |
+
],
|
450 |
+
"main_score": 0.42080078125,
|
451 |
+
"scores_per_experiment": [
|
452 |
+
{
|
453 |
+
"accuracy": 0.38623046875,
|
454 |
+
"f1": 0.2978953751345687,
|
455 |
+
"f1_weighted": 0.4427315880700096
|
456 |
+
},
|
457 |
+
{
|
458 |
+
"accuracy": 0.46337890625,
|
459 |
+
"f1": 0.3571171773121317,
|
460 |
+
"f1_weighted": 0.5024623580534701
|
461 |
+
},
|
462 |
+
{
|
463 |
+
"accuracy": 0.5224609375,
|
464 |
+
"f1": 0.34728632977341073,
|
465 |
+
"f1_weighted": 0.5290271964893006
|
466 |
+
},
|
467 |
+
{
|
468 |
+
"accuracy": 0.29296875,
|
469 |
+
"f1": 0.24069131084135317,
|
470 |
+
"f1_weighted": 0.35451211857704035
|
471 |
+
},
|
472 |
+
{
|
473 |
+
"accuracy": 0.5107421875,
|
474 |
+
"f1": 0.3258729675827334,
|
475 |
+
"f1_weighted": 0.5101957888477968
|
476 |
+
},
|
477 |
+
{
|
478 |
+
"accuracy": 0.3466796875,
|
479 |
+
"f1": 0.2643538926385412,
|
480 |
+
"f1_weighted": 0.4058541499630756
|
481 |
+
},
|
482 |
+
{
|
483 |
+
"accuracy": 0.3544921875,
|
484 |
+
"f1": 0.2869893328870674,
|
485 |
+
"f1_weighted": 0.4146762338170529
|
486 |
+
},
|
487 |
+
{
|
488 |
+
"accuracy": 0.400390625,
|
489 |
+
"f1": 0.3526867771845939,
|
490 |
+
"f1_weighted": 0.4357531607234683
|
491 |
+
},
|
492 |
+
{
|
493 |
+
"accuracy": 0.44140625,
|
494 |
+
"f1": 0.36916988156391056,
|
495 |
+
"f1_weighted": 0.473969581712152
|
496 |
+
},
|
497 |
+
{
|
498 |
+
"accuracy": 0.4892578125,
|
499 |
+
"f1": 0.4336503083174281,
|
500 |
+
"f1_weighted": 0.5195809500800236
|
501 |
+
}
|
502 |
+
]
|
503 |
+
},
|
504 |
+
{
|
505 |
+
"accuracy": 0.41002673796791445,
|
506 |
+
"f1": 0.3775338145199986,
|
507 |
+
"f1_weighted": 0.4272960287710961,
|
508 |
+
"hf_subset": "swa",
|
509 |
+
"languages": [
|
510 |
+
"swa-Latn"
|
511 |
+
],
|
512 |
+
"main_score": 0.41002673796791445,
|
513 |
+
"scores_per_experiment": [
|
514 |
+
{
|
515 |
+
"accuracy": 0.3716577540106952,
|
516 |
+
"f1": 0.3523369997958748,
|
517 |
+
"f1_weighted": 0.3765187081032237
|
518 |
+
},
|
519 |
+
{
|
520 |
+
"accuracy": 0.32219251336898397,
|
521 |
+
"f1": 0.3024578022526575,
|
522 |
+
"f1_weighted": 0.35076676997357237
|
523 |
+
},
|
524 |
+
{
|
525 |
+
"accuracy": 0.4117647058823529,
|
526 |
+
"f1": 0.3894847250649521,
|
527 |
+
"f1_weighted": 0.43425486536599267
|
528 |
+
},
|
529 |
+
{
|
530 |
+
"accuracy": 0.34358288770053474,
|
531 |
+
"f1": 0.3280728067637791,
|
532 |
+
"f1_weighted": 0.36567254811575917
|
533 |
+
},
|
534 |
+
{
|
535 |
+
"accuracy": 0.42780748663101603,
|
536 |
+
"f1": 0.4005876601358889,
|
537 |
+
"f1_weighted": 0.45369425348032283
|
538 |
+
},
|
539 |
+
{
|
540 |
+
"accuracy": 0.4799465240641711,
|
541 |
+
"f1": 0.43080724038790436,
|
542 |
+
"f1_weighted": 0.4942171923569025
|
543 |
+
},
|
544 |
+
{
|
545 |
+
"accuracy": 0.43983957219251335,
|
546 |
+
"f1": 0.4077896036494763,
|
547 |
+
"f1_weighted": 0.4651800026216729
|
548 |
+
},
|
549 |
+
{
|
550 |
+
"accuracy": 0.393048128342246,
|
551 |
+
"f1": 0.38050525181049116,
|
552 |
+
"f1_weighted": 0.3975679965132585
|
553 |
+
},
|
554 |
+
{
|
555 |
+
"accuracy": 0.48128342245989303,
|
556 |
+
"f1": 0.4046606172712897,
|
557 |
+
"f1_weighted": 0.48167746871348915
|
558 |
+
},
|
559 |
+
{
|
560 |
+
"accuracy": 0.42914438502673796,
|
561 |
+
"f1": 0.37863543806767225,
|
562 |
+
"f1_weighted": 0.45341048246676696
|
563 |
+
}
|
564 |
+
]
|
565 |
+
},
|
566 |
+
{
|
567 |
+
"accuracy": 0.40505795574288717,
|
568 |
+
"f1": 0.3681781375796969,
|
569 |
+
"f1_weighted": 0.4120850000313431,
|
570 |
+
"hf_subset": "twi",
|
571 |
+
"languages": [
|
572 |
+
"twi-Latn"
|
573 |
+
],
|
574 |
+
"main_score": 0.40505795574288717,
|
575 |
+
"scores_per_experiment": [
|
576 |
+
{
|
577 |
+
"accuracy": 0.422550052687039,
|
578 |
+
"f1": 0.38509513966682435,
|
579 |
+
"f1_weighted": 0.4317038758973267
|
580 |
+
},
|
581 |
+
{
|
582 |
+
"accuracy": 0.40147523709167543,
|
583 |
+
"f1": 0.36941235745399476,
|
584 |
+
"f1_weighted": 0.4131077783791736
|
585 |
+
},
|
586 |
+
{
|
587 |
+
"accuracy": 0.3972602739726027,
|
588 |
+
"f1": 0.3618208218737775,
|
589 |
+
"f1_weighted": 0.40257376508040155
|
590 |
+
},
|
591 |
+
{
|
592 |
+
"accuracy": 0.4320337197049526,
|
593 |
+
"f1": 0.4064352982547666,
|
594 |
+
"f1_weighted": 0.4394909309467544
|
595 |
+
},
|
596 |
+
{
|
597 |
+
"accuracy": 0.422550052687039,
|
598 |
+
"f1": 0.3651593178080124,
|
599 |
+
"f1_weighted": 0.4219338029905163
|
600 |
+
},
|
601 |
+
{
|
602 |
+
"accuracy": 0.3656480505795574,
|
603 |
+
"f1": 0.34067658499493886,
|
604 |
+
"f1_weighted": 0.38211280236041456
|
605 |
+
},
|
606 |
+
{
|
607 |
+
"accuracy": 0.3519494204425711,
|
608 |
+
"f1": 0.3364559060842147,
|
609 |
+
"f1_weighted": 0.37482704518189863
|
610 |
+
},
|
611 |
+
{
|
612 |
+
"accuracy": 0.40252897787144365,
|
613 |
+
"f1": 0.36913740210181795,
|
614 |
+
"f1_weighted": 0.40715937182434453
|
615 |
+
},
|
616 |
+
{
|
617 |
+
"accuracy": 0.4488935721812434,
|
618 |
+
"f1": 0.37854620754940244,
|
619 |
+
"f1_weighted": 0.4387052043273322
|
620 |
+
},
|
621 |
+
{
|
622 |
+
"accuracy": 0.40569020021074814,
|
623 |
+
"f1": 0.36904234000921976,
|
624 |
+
"f1_weighted": 0.4092354233252685
|
625 |
+
}
|
626 |
+
]
|
627 |
+
},
|
628 |
+
{
|
629 |
+
"accuracy": 0.37362204724409454,
|
630 |
+
"f1": 0.3493943797641518,
|
631 |
+
"f1_weighted": 0.3854690556974921,
|
632 |
+
"hf_subset": "tso",
|
633 |
+
"languages": [
|
634 |
+
"tso-Latn"
|
635 |
+
],
|
636 |
+
"main_score": 0.37362204724409454,
|
637 |
+
"scores_per_experiment": [
|
638 |
+
{
|
639 |
+
"accuracy": 0.3228346456692913,
|
640 |
+
"f1": 0.3075459260066948,
|
641 |
+
"f1_weighted": 0.34153995948474486
|
642 |
+
},
|
643 |
+
{
|
644 |
+
"accuracy": 0.36220472440944884,
|
645 |
+
"f1": 0.3560578042599822,
|
646 |
+
"f1_weighted": 0.39353253228799445
|
647 |
+
},
|
648 |
+
{
|
649 |
+
"accuracy": 0.2952755905511811,
|
650 |
+
"f1": 0.29364995782626074,
|
651 |
+
"f1_weighted": 0.31947556667482935
|
652 |
+
},
|
653 |
+
{
|
654 |
+
"accuracy": 0.35826771653543305,
|
655 |
+
"f1": 0.3323540537676541,
|
656 |
+
"f1_weighted": 0.3706659578882402
|
657 |
+
},
|
658 |
+
{
|
659 |
+
"accuracy": 0.39763779527559057,
|
660 |
+
"f1": 0.35926928101312167,
|
661 |
+
"f1_weighted": 0.4027029940757667
|
662 |
+
},
|
663 |
+
{
|
664 |
+
"accuracy": 0.4409448818897638,
|
665 |
+
"f1": 0.40611525332375137,
|
666 |
+
"f1_weighted": 0.437687221841835
|
667 |
+
},
|
668 |
+
{
|
669 |
+
"accuracy": 0.41732283464566927,
|
670 |
+
"f1": 0.38580246913580246,
|
671 |
+
"f1_weighted": 0.4265820939049285
|
672 |
+
},
|
673 |
+
{
|
674 |
+
"accuracy": 0.37401574803149606,
|
675 |
+
"f1": 0.33526470475972364,
|
676 |
+
"f1_weighted": 0.3799033623030031
|
677 |
+
},
|
678 |
+
{
|
679 |
+
"accuracy": 0.3937007874015748,
|
680 |
+
"f1": 0.3647999319641111,
|
681 |
+
"f1_weighted": 0.3936008429485924
|
682 |
+
},
|
683 |
+
{
|
684 |
+
"accuracy": 0.37401574803149606,
|
685 |
+
"f1": 0.35308441558441556,
|
686 |
+
"f1_weighted": 0.38900002556498625
|
687 |
+
}
|
688 |
+
]
|
689 |
+
},
|
690 |
+
{
|
691 |
+
"accuracy": 0.346240234375,
|
692 |
+
"f1": 0.19875513245072227,
|
693 |
+
"f1_weighted": 0.4292295491167349,
|
694 |
+
"hf_subset": "yor",
|
695 |
+
"languages": [
|
696 |
+
"yor-Latn"
|
697 |
+
],
|
698 |
+
"main_score": 0.346240234375,
|
699 |
+
"scores_per_experiment": [
|
700 |
+
{
|
701 |
+
"accuracy": 0.05615234375,
|
702 |
+
"f1": 0.0592235025433406,
|
703 |
+
"f1_weighted": 0.10512614717961694
|
704 |
+
},
|
705 |
+
{
|
706 |
+
"accuracy": 0.4599609375,
|
707 |
+
"f1": 0.2509996501224571,
|
708 |
+
"f1_weighted": 0.5788391177345741
|
709 |
+
},
|
710 |
+
{
|
711 |
+
"accuracy": 0.119140625,
|
712 |
+
"f1": 0.11315480088385434,
|
713 |
+
"f1_weighted": 0.19015341196305274
|
714 |
+
},
|
715 |
+
{
|
716 |
+
"accuracy": 0.68505859375,
|
717 |
+
"f1": 0.33821770302200543,
|
718 |
+
"f1_weighted": 0.7775558269132272
|
719 |
+
},
|
720 |
+
{
|
721 |
+
"accuracy": 0.2744140625,
|
722 |
+
"f1": 0.18359963558639056,
|
723 |
+
"f1_weighted": 0.3876506316602938
|
724 |
+
},
|
725 |
+
{
|
726 |
+
"accuracy": 0.62255859375,
|
727 |
+
"f1": 0.3065700267762263,
|
728 |
+
"f1_weighted": 0.7257637609949273
|
729 |
+
},
|
730 |
+
{
|
731 |
+
"accuracy": 0.13623046875,
|
732 |
+
"f1": 0.11730776099146399,
|
733 |
+
"f1_weighted": 0.23543228873687452
|
734 |
+
},
|
735 |
+
{
|
736 |
+
"accuracy": 0.36181640625,
|
737 |
+
"f1": 0.21260502346609447,
|
738 |
+
"f1_weighted": 0.47107641740287953
|
739 |
+
},
|
740 |
+
{
|
741 |
+
"accuracy": 0.6962890625,
|
742 |
+
"f1": 0.34095699851692424,
|
743 |
+
"f1_weighted": 0.7867569991075509
|
744 |
+
},
|
745 |
+
{
|
746 |
+
"accuracy": 0.05078125,
|
747 |
+
"f1": 0.06491622259846568,
|
748 |
+
"f1_weighted": 0.03394088947435239
|
749 |
+
}
|
750 |
+
]
|
751 |
+
}
|
752 |
+
]
|
753 |
+
},
|
754 |
+
"task_name": "AfriSentiClassification"
|
755 |
+
}
|
results/Alibaba-NLP__gte-Qwen2-7B-instruct/e26182b2122f4435e8b3ebecbf363990f409b45b/AlloProfClusteringS2S.v2.json
ADDED
@@ -0,0 +1,34 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"dataset_revision": "392ba3f5bcc8c51f578786c1fc3dae648662cb9b",
|
3 |
+
"evaluation_time": 69.57186484336853,
|
4 |
+
"kg_co2_emissions": 0.003869526806573268,
|
5 |
+
"mteb_version": "1.18.0",
|
6 |
+
"scores": {
|
7 |
+
"test": [
|
8 |
+
{
|
9 |
+
"hf_subset": "default",
|
10 |
+
"languages": [
|
11 |
+
"fra-Latn"
|
12 |
+
],
|
13 |
+
"main_score": 0.5750644814745391,
|
14 |
+
"v_measure": 0.5750644814745391,
|
15 |
+
"v_measure_std": 0.016989046338955083,
|
16 |
+
"v_measures": {
|
17 |
+
"Level 0": [
|
18 |
+
0.5994381023709426,
|
19 |
+
0.535340900948235,
|
20 |
+
0.57983132574561,
|
21 |
+
0.567432951515727,
|
22 |
+
0.5835125091070451,
|
23 |
+
0.5891465729499775,
|
24 |
+
0.5852441770031983,
|
25 |
+
0.56318707844079,
|
26 |
+
0.5661569823205506,
|
27 |
+
0.5813542143433149
|
28 |
+
]
|
29 |
+
}
|
30 |
+
}
|
31 |
+
]
|
32 |
+
},
|
33 |
+
"task_name": "AlloProfClusteringS2S.v2"
|
34 |
+
}
|
results/Alibaba-NLP__gte-Qwen2-7B-instruct/e26182b2122f4435e8b3ebecbf363990f409b45b/AlloprofReranking.json
ADDED
@@ -0,0 +1,26 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"dataset_revision": "65393d0d7a08a10b4e348135e824f385d420b0fd",
|
3 |
+
"evaluation_time": 410.71631264686584,
|
4 |
+
"kg_co2_emissions": 0.039784753360187844,
|
5 |
+
"mteb_version": "1.18.0",
|
6 |
+
"scores": {
|
7 |
+
"test": [
|
8 |
+
{
|
9 |
+
"hf_subset": "default",
|
10 |
+
"languages": [
|
11 |
+
"fra-Latn"
|
12 |
+
],
|
13 |
+
"main_score": 0.8109504605331439,
|
14 |
+
"map": 0.8109504605331439,
|
15 |
+
"mrr": 0.8221603681772076,
|
16 |
+
"nAUC_map_diff1": 0.6136238421407625,
|
17 |
+
"nAUC_map_max": 0.24895078996827502,
|
18 |
+
"nAUC_map_std": 0.22855379586267022,
|
19 |
+
"nAUC_mrr_diff1": 0.6083804831904698,
|
20 |
+
"nAUC_mrr_max": 0.24806073204422838,
|
21 |
+
"nAUC_mrr_std": 0.21841228664905984
|
22 |
+
}
|
23 |
+
]
|
24 |
+
},
|
25 |
+
"task_name": "AlloprofReranking"
|
26 |
+
}
|
results/Alibaba-NLP__gte-Qwen2-7B-instruct/e26182b2122f4435e8b3ebecbf363990f409b45b/AmazonCounterfactualClassification.json
ADDED
@@ -0,0 +1,685 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"dataset_revision": "e8379541af4e31359cca9fbcf4b00f2671dba205",
|
3 |
+
"evaluation_time": 352.4132328033447,
|
4 |
+
"kg_co2_emissions": 0.028789806143806357,
|
5 |
+
"mteb_version": "1.18.0",
|
6 |
+
"scores": {
|
7 |
+
"test": [
|
8 |
+
{
|
9 |
+
"accuracy": 0.9304347826086955,
|
10 |
+
"ap": 0.5538738229694485,
|
11 |
+
"ap_weighted": 0.5538738229694485,
|
12 |
+
"f1": 0.8422386973276866,
|
13 |
+
"f1_weighted": 0.9356129367904652,
|
14 |
+
"hf_subset": "en-ext",
|
15 |
+
"languages": [
|
16 |
+
"eng-Latn"
|
17 |
+
],
|
18 |
+
"main_score": 0.9304347826086955,
|
19 |
+
"scores_per_experiment": [
|
20 |
+
{
|
21 |
+
"accuracy": 0.9332833583208395,
|
22 |
+
"ap": 0.564697941024296,
|
23 |
+
"ap_weighted": 0.564697941024296,
|
24 |
+
"f1": 0.847308550520542,
|
25 |
+
"f1_weighted": 0.9380072488593175
|
26 |
+
},
|
27 |
+
{
|
28 |
+
"accuracy": 0.9287856071964018,
|
29 |
+
"ap": 0.5481393116391444,
|
30 |
+
"ap_weighted": 0.5481393116391444,
|
31 |
+
"f1": 0.8394869865781078,
|
32 |
+
"f1_weighted": 0.9342601959981264
|
33 |
+
},
|
34 |
+
{
|
35 |
+
"accuracy": 0.9340329835082459,
|
36 |
+
"ap": 0.564964118565976,
|
37 |
+
"ap_weighted": 0.564964118565976,
|
38 |
+
"f1": 0.8478471628172226,
|
39 |
+
"f1_weighted": 0.9384967909543945
|
40 |
+
},
|
41 |
+
{
|
42 |
+
"accuracy": 0.9227886056971514,
|
43 |
+
"ap": 0.5275471699678661,
|
44 |
+
"ap_weighted": 0.5275471699678661,
|
45 |
+
"f1": 0.8293963026470504,
|
46 |
+
"f1_weighted": 0.9293175508951829
|
47 |
+
},
|
48 |
+
{
|
49 |
+
"accuracy": 0.9287856071964018,
|
50 |
+
"ap": 0.5481393116391444,
|
51 |
+
"ap_weighted": 0.5481393116391444,
|
52 |
+
"f1": 0.8394869865781078,
|
53 |
+
"f1_weighted": 0.9342601959981264
|
54 |
+
},
|
55 |
+
{
|
56 |
+
"accuracy": 0.9302848575712144,
|
57 |
+
"ap": 0.5535473488457427,
|
58 |
+
"ap_weighted": 0.5535473488457427,
|
59 |
+
"f1": 0.8420692842057828,
|
60 |
+
"f1_weighted": 0.9355052375396804
|
61 |
+
},
|
62 |
+
{
|
63 |
+
"accuracy": 0.9340329835082459,
|
64 |
+
"ap": 0.5675578079301945,
|
65 |
+
"ap_weighted": 0.5675578079301945,
|
66 |
+
"f1": 0.8486342630797161,
|
67 |
+
"f1_weighted": 0.9386353097588853
|
68 |
+
},
|
69 |
+
{
|
70 |
+
"accuracy": 0.9287856071964018,
|
71 |
+
"ap": 0.5481393116391444,
|
72 |
+
"ap_weighted": 0.5481393116391444,
|
73 |
+
"f1": 0.8394869865781078,
|
74 |
+
"f1_weighted": 0.9342601959981264
|
75 |
+
},
|
76 |
+
{
|
77 |
+
"accuracy": 0.9347826086956522,
|
78 |
+
"ap": 0.567866596803832,
|
79 |
+
"ap_weighted": 0.567866596803832,
|
80 |
+
"f1": 0.8491834636941202,
|
81 |
+
"f1_weighted": 0.9391264459046851
|
82 |
+
},
|
83 |
+
{
|
84 |
+
"accuracy": 0.9287856071964018,
|
85 |
+
"ap": 0.5481393116391444,
|
86 |
+
"ap_weighted": 0.5481393116391444,
|
87 |
+
"f1": 0.8394869865781078,
|
88 |
+
"f1_weighted": 0.9342601959981264
|
89 |
+
}
|
90 |
+
]
|
91 |
+
},
|
92 |
+
{
|
93 |
+
"accuracy": 0.9132835820895522,
|
94 |
+
"ap": 0.6768363465454691,
|
95 |
+
"ap_weighted": 0.6768363465454691,
|
96 |
+
"f1": 0.8755259305015682,
|
97 |
+
"f1_weighted": 0.9172679993615311,
|
98 |
+
"hf_subset": "en",
|
99 |
+
"languages": [
|
100 |
+
"eng-Latn"
|
101 |
+
],
|
102 |
+
"main_score": 0.9132835820895522,
|
103 |
+
"scores_per_experiment": [
|
104 |
+
{
|
105 |
+
"accuracy": 0.9134328358208955,
|
106 |
+
"ap": 0.6778674592752055,
|
107 |
+
"ap_weighted": 0.6778674592752055,
|
108 |
+
"f1": 0.8760351669665302,
|
109 |
+
"f1_weighted": 0.9174977998268049
|
110 |
+
},
|
111 |
+
{
|
112 |
+
"accuracy": 0.9134328358208955,
|
113 |
+
"ap": 0.6762470227119968,
|
114 |
+
"ap_weighted": 0.6762470227119968,
|
115 |
+
"f1": 0.8754487179487179,
|
116 |
+
"f1_weighted": 0.9173339073861463
|
117 |
+
},
|
118 |
+
{
|
119 |
+
"accuracy": 0.9164179104477612,
|
120 |
+
"ap": 0.684167042467418,
|
121 |
+
"ap_weighted": 0.684167042467418,
|
122 |
+
"f1": 0.8791688673047444,
|
123 |
+
"f1_weighted": 0.9200226565583757
|
124 |
+
},
|
125 |
+
{
|
126 |
+
"accuracy": 0.9164179104477612,
|
127 |
+
"ap": 0.684167042467418,
|
128 |
+
"ap_weighted": 0.684167042467418,
|
129 |
+
"f1": 0.8791688673047444,
|
130 |
+
"f1_weighted": 0.9200226565583757
|
131 |
+
},
|
132 |
+
{
|
133 |
+
"accuracy": 0.9134328358208955,
|
134 |
+
"ap": 0.6778674592752055,
|
135 |
+
"ap_weighted": 0.6778674592752055,
|
136 |
+
"f1": 0.8760351669665302,
|
137 |
+
"f1_weighted": 0.9174977998268049
|
138 |
+
},
|
139 |
+
{
|
140 |
+
"accuracy": 0.917910447761194,
|
141 |
+
"ap": 0.6866841550949404,
|
142 |
+
"ap_weighted": 0.6866841550949404,
|
143 |
+
"f1": 0.8804638715432649,
|
144 |
+
"f1_weighted": 0.9212057464683717
|
145 |
+
},
|
146 |
+
{
|
147 |
+
"accuracy": 0.9149253731343283,
|
148 |
+
"ap": 0.6817758982367015,
|
149 |
+
"ap_weighted": 0.6817758982367015,
|
150 |
+
"f1": 0.8778853932512414,
|
151 |
+
"f1_weighted": 0.918840167756118
|
152 |
+
},
|
153 |
+
{
|
154 |
+
"accuracy": 0.9194029850746268,
|
155 |
+
"ap": 0.6907962179455366,
|
156 |
+
"ap_weighted": 0.6907962179455366,
|
157 |
+
"f1": 0.8823506458032544,
|
158 |
+
"f1_weighted": 0.9225563756509139
|
159 |
+
},
|
160 |
+
{
|
161 |
+
"accuracy": 0.9134328358208955,
|
162 |
+
"ap": 0.6778674592752055,
|
163 |
+
"ap_weighted": 0.6778674592752055,
|
164 |
+
"f1": 0.8760351669665302,
|
165 |
+
"f1_weighted": 0.9174977998268049
|
166 |
+
},
|
167 |
+
{
|
168 |
+
"accuracy": 0.8940298507462686,
|
169 |
+
"ap": 0.6309237087050641,
|
170 |
+
"ap_weighted": 0.6309237087050641,
|
171 |
+
"f1": 0.8526674409601239,
|
172 |
+
"f1_weighted": 0.9002050837565945
|
173 |
+
}
|
174 |
+
]
|
175 |
+
},
|
176 |
+
{
|
177 |
+
"accuracy": 0.7562098501070664,
|
178 |
+
"ap": 0.865224865907753,
|
179 |
+
"ap_weighted": 0.865224865907753,
|
180 |
+
"f1": 0.7448635914709125,
|
181 |
+
"f1_weighted": 0.7657890777339263,
|
182 |
+
"hf_subset": "de",
|
183 |
+
"languages": [
|
184 |
+
"deu-Latn"
|
185 |
+
],
|
186 |
+
"main_score": 0.7562098501070664,
|
187 |
+
"scores_per_experiment": [
|
188 |
+
{
|
189 |
+
"accuracy": 0.7366167023554604,
|
190 |
+
"ap": 0.8658356730424517,
|
191 |
+
"ap_weighted": 0.8658356730424517,
|
192 |
+
"f1": 0.7288574827234622,
|
193 |
+
"f1_weighted": 0.7468313712380912
|
194 |
+
},
|
195 |
+
{
|
196 |
+
"accuracy": 0.7601713062098501,
|
197 |
+
"ap": 0.8615397035616177,
|
198 |
+
"ap_weighted": 0.8615397035616177,
|
199 |
+
"f1": 0.7471441071678921,
|
200 |
+
"f1_weighted": 0.7696344602308951
|
201 |
+
},
|
202 |
+
{
|
203 |
+
"accuracy": 0.7708779443254818,
|
204 |
+
"ap": 0.8661425893722701,
|
205 |
+
"ap_weighted": 0.8661425893722701,
|
206 |
+
"f1": 0.7571585474903775,
|
207 |
+
"f1_weighted": 0.7797770125428468
|
208 |
+
},
|
209 |
+
{
|
210 |
+
"accuracy": 0.7462526766595289,
|
211 |
+
"ap": 0.8672066901980827,
|
212 |
+
"ap_weighted": 0.8672066901980827,
|
213 |
+
"f1": 0.7372377665958,
|
214 |
+
"f1_weighted": 0.7563097728577929
|
215 |
+
},
|
216 |
+
{
|
217 |
+
"accuracy": 0.7633832976445396,
|
218 |
+
"ap": 0.8664372474932578,
|
219 |
+
"ap_weighted": 0.8664372474932578,
|
220 |
+
"f1": 0.7511606291447812,
|
221 |
+
"f1_weighted": 0.7727717241733396
|
222 |
+
},
|
223 |
+
{
|
224 |
+
"accuracy": 0.7676659528907923,
|
225 |
+
"ap": 0.864761410125968,
|
226 |
+
"ap_weighted": 0.864761410125968,
|
227 |
+
"f1": 0.7541494065344889,
|
228 |
+
"f1_weighted": 0.7767387031847494
|
229 |
+
},
|
230 |
+
{
|
231 |
+
"accuracy": 0.7698072805139187,
|
232 |
+
"ap": 0.8613766225874082,
|
233 |
+
"ap_weighted": 0.8613766225874082,
|
234 |
+
"f1": 0.7548092479197065,
|
235 |
+
"f1_weighted": 0.7785723644975492
|
236 |
+
},
|
237 |
+
{
|
238 |
+
"accuracy": 0.7526766595289079,
|
239 |
+
"ap": 0.8636019402824784,
|
240 |
+
"ap_weighted": 0.8636019402824784,
|
241 |
+
"f1": 0.7416450826061809,
|
242 |
+
"f1_weighted": 0.7625650678586061
|
243 |
+
},
|
244 |
+
{
|
245 |
+
"accuracy": 0.7462526766595289,
|
246 |
+
"ap": 0.868139122942615,
|
247 |
+
"ap_weighted": 0.868139122942615,
|
248 |
+
"f1": 0.7374521862379391,
|
249 |
+
"f1_weighted": 0.7562883236315173
|
250 |
+
},
|
251 |
+
{
|
252 |
+
"accuracy": 0.7483940042826552,
|
253 |
+
"ap": 0.8672076594713818,
|
254 |
+
"ap_weighted": 0.8672076594713818,
|
255 |
+
"f1": 0.7390214582884973,
|
256 |
+
"f1_weighted": 0.7584019771238748
|
257 |
+
}
|
258 |
+
]
|
259 |
+
},
|
260 |
+
{
|
261 |
+
"accuracy": 0.8358672376873661,
|
262 |
+
"ap": 0.29600689334785024,
|
263 |
+
"ap_weighted": 0.29600689334785024,
|
264 |
+
"f1": 0.6942400840274383,
|
265 |
+
"f1_weighted": 0.8594875714422647,
|
266 |
+
"hf_subset": "ja",
|
267 |
+
"languages": [
|
268 |
+
"jpn-Jpan"
|
269 |
+
],
|
270 |
+
"main_score": 0.8358672376873661,
|
271 |
+
"scores_per_experiment": [
|
272 |
+
{
|
273 |
+
"accuracy": 0.8211991434689507,
|
274 |
+
"ap": 0.280433639401744,
|
275 |
+
"ap_weighted": 0.280433639401744,
|
276 |
+
"f1": 0.6795290878645615,
|
277 |
+
"f1_weighted": 0.848803131759017
|
278 |
+
},
|
279 |
+
{
|
280 |
+
"accuracy": 0.8511777301927195,
|
281 |
+
"ap": 0.31060668537916936,
|
282 |
+
"ap_weighted": 0.31060668537916936,
|
283 |
+
"f1": 0.7088757184149685,
|
284 |
+
"f1_weighted": 0.8705726445085232
|
285 |
+
},
|
286 |
+
{
|
287 |
+
"accuracy": 0.8490364025695931,
|
288 |
+
"ap": 0.30757096249050525,
|
289 |
+
"ap_weighted": 0.30757096249050525,
|
290 |
+
"f1": 0.7064038969580095,
|
291 |
+
"f1_weighted": 0.8689742796980939
|
292 |
+
},
|
293 |
+
{
|
294 |
+
"accuracy": 0.828693790149893,
|
295 |
+
"ap": 0.2853113160929006,
|
296 |
+
"ap_weighted": 0.2853113160929006,
|
297 |
+
"f1": 0.6856912101224929,
|
298 |
+
"f1_weighted": 0.8541164710436531
|
299 |
+
},
|
300 |
+
{
|
301 |
+
"accuracy": 0.8565310492505354,
|
302 |
+
"ap": 0.31069059336675103,
|
303 |
+
"ap_weighted": 0.31069059336675103,
|
304 |
+
"f1": 0.7116965207135486,
|
305 |
+
"f1_weighted": 0.8740337113093857
|
306 |
+
},
|
307 |
+
{
|
308 |
+
"accuracy": 0.860813704496788,
|
309 |
+
"ap": 0.321180170978037,
|
310 |
+
"ap_weighted": 0.321180170978037,
|
311 |
+
"f1": 0.7185834144532517,
|
312 |
+
"f1_weighted": 0.8775214795320228
|
313 |
+
},
|
314 |
+
{
|
315 |
+
"accuracy": 0.8340471092077087,
|
316 |
+
"ap": 0.29911821093973595,
|
317 |
+
"ap_weighted": 0.29911821093973595,
|
318 |
+
"f1": 0.6947036556690813,
|
319 |
+
"f1_weighted": 0.8585591905750427
|
320 |
+
},
|
321 |
+
{
|
322 |
+
"accuracy": 0.8137044967880086,
|
323 |
+
"ap": 0.27947407162177396,
|
324 |
+
"ap_weighted": 0.27947407162177396,
|
325 |
+
"f1": 0.6751343312224142,
|
326 |
+
"f1_weighted": 0.84369017195958
|
327 |
+
},
|
328 |
+
{
|
329 |
+
"accuracy": 0.8179871520342612,
|
330 |
+
"ap": 0.2840631811142551,
|
331 |
+
"ap_weighted": 0.2840631811142551,
|
332 |
+
"f1": 0.6794654392764858,
|
333 |
+
"f1_weighted": 0.8468646426417454
|
334 |
+
},
|
335 |
+
{
|
336 |
+
"accuracy": 0.8254817987152034,
|
337 |
+
"ap": 0.28162010209362986,
|
338 |
+
"ap_weighted": 0.28162010209362986,
|
339 |
+
"f1": 0.6823175655795688,
|
340 |
+
"f1_weighted": 0.851739991395583
|
341 |
+
}
|
342 |
+
]
|
343 |
+
}
|
344 |
+
],
|
345 |
+
"validation": [
|
346 |
+
{
|
347 |
+
"accuracy": 0.9325825825825825,
|
348 |
+
"ap": 0.5494992841460222,
|
349 |
+
"ap_weighted": 0.5494992841460222,
|
350 |
+
"f1": 0.8416363221613246,
|
351 |
+
"f1_weighted": 0.9374979822845108,
|
352 |
+
"hf_subset": "en-ext",
|
353 |
+
"languages": [
|
354 |
+
"eng-Latn"
|
355 |
+
],
|
356 |
+
"main_score": 0.9325825825825825,
|
357 |
+
"scores_per_experiment": [
|
358 |
+
{
|
359 |
+
"accuracy": 0.9354354354354354,
|
360 |
+
"ap": 0.5592630268165635,
|
361 |
+
"ap_weighted": 0.5592630268165635,
|
362 |
+
"f1": 0.8464508034550982,
|
363 |
+
"f1_weighted": 0.9398232378013297
|
364 |
+
},
|
365 |
+
{
|
366 |
+
"accuracy": 0.9309309309309309,
|
367 |
+
"ap": 0.542028831659625,
|
368 |
+
"ap_weighted": 0.542028831659625,
|
369 |
+
"f1": 0.838366571699905,
|
370 |
+
"f1_weighted": 0.9360733953326547
|
371 |
+
},
|
372 |
+
{
|
373 |
+
"accuracy": 0.9354354354354354,
|
374 |
+
"ap": 0.5592630268165635,
|
375 |
+
"ap_weighted": 0.5592630268165635,
|
376 |
+
"f1": 0.8464508034550982,
|
377 |
+
"f1_weighted": 0.9398232378013297
|
378 |
+
},
|
379 |
+
{
|
380 |
+
"accuracy": 0.9219219219219219,
|
381 |
+
"ap": 0.5164316613381965,
|
382 |
+
"ap_weighted": 0.5164316613381965,
|
383 |
+
"f1": 0.8246836083831123,
|
384 |
+
"f1_weighted": 0.9289795414529645
|
385 |
+
},
|
386 |
+
{
|
387 |
+
"accuracy": 0.9309309309309309,
|
388 |
+
"ap": 0.542028831659625,
|
389 |
+
"ap_weighted": 0.542028831659625,
|
390 |
+
"f1": 0.838366571699905,
|
391 |
+
"f1_weighted": 0.9360733953326547
|
392 |
+
},
|
393 |
+
{
|
394 |
+
"accuracy": 0.9309309309309309,
|
395 |
+
"ap": 0.542028831659625,
|
396 |
+
"ap_weighted": 0.542028831659625,
|
397 |
+
"f1": 0.838366571699905,
|
398 |
+
"f1_weighted": 0.9360733953326547
|
399 |
+
},
|
400 |
+
{
|
401 |
+
"accuracy": 0.9354354354354354,
|
402 |
+
"ap": 0.5592630268165635,
|
403 |
+
"ap_weighted": 0.5592630268165635,
|
404 |
+
"f1": 0.8464508034550982,
|
405 |
+
"f1_weighted": 0.9398232378013297
|
406 |
+
},
|
407 |
+
{
|
408 |
+
"accuracy": 0.9309309309309309,
|
409 |
+
"ap": 0.542028831659625,
|
410 |
+
"ap_weighted": 0.542028831659625,
|
411 |
+
"f1": 0.838366571699905,
|
412 |
+
"f1_weighted": 0.9360733953326547
|
413 |
+
},
|
414 |
+
{
|
415 |
+
"accuracy": 0.9429429429429429,
|
416 |
+
"ap": 0.5906279413742099,
|
417 |
+
"ap_weighted": 0.5906279413742099,
|
418 |
+
"f1": 0.8604943443653121,
|
419 |
+
"f1_weighted": 0.9461635913248817
|
420 |
+
},
|
421 |
+
{
|
422 |
+
"accuracy": 0.9309309309309309,
|
423 |
+
"ap": 0.542028831659625,
|
424 |
+
"ap_weighted": 0.542028831659625,
|
425 |
+
"f1": 0.838366571699905,
|
426 |
+
"f1_weighted": 0.9360733953326547
|
427 |
+
}
|
428 |
+
]
|
429 |
+
},
|
430 |
+
{
|
431 |
+
"accuracy": 0.8853731343283583,
|
432 |
+
"ap": 0.5681645722055897,
|
433 |
+
"ap_weighted": 0.5681645722055897,
|
434 |
+
"f1": 0.8279835406995065,
|
435 |
+
"f1_weighted": 0.8929259033207757,
|
436 |
+
"hf_subset": "en",
|
437 |
+
"languages": [
|
438 |
+
"eng-Latn"
|
439 |
+
],
|
440 |
+
"main_score": 0.8853731343283583,
|
441 |
+
"scores_per_experiment": [
|
442 |
+
{
|
443 |
+
"accuracy": 0.8805970149253731,
|
444 |
+
"ap": 0.5600114902631987,
|
445 |
+
"ap_weighted": 0.5600114902631987,
|
446 |
+
"f1": 0.8230882974228981,
|
447 |
+
"f1_weighted": 0.8890276122555789
|
448 |
+
},
|
449 |
+
{
|
450 |
+
"accuracy": 0.8895522388059701,
|
451 |
+
"ap": 0.5745353691313547,
|
452 |
+
"ap_weighted": 0.5745353691313547,
|
453 |
+
"f1": 0.8320665501497108,
|
454 |
+
"f1_weighted": 0.8962980083931842
|
455 |
+
},
|
456 |
+
{
|
457 |
+
"accuracy": 0.8895522388059701,
|
458 |
+
"ap": 0.5745353691313547,
|
459 |
+
"ap_weighted": 0.5745353691313547,
|
460 |
+
"f1": 0.8320665501497108,
|
461 |
+
"f1_weighted": 0.8962980083931842
|
462 |
+
},
|
463 |
+
{
|
464 |
+
"accuracy": 0.8895522388059701,
|
465 |
+
"ap": 0.5745353691313547,
|
466 |
+
"ap_weighted": 0.5745353691313547,
|
467 |
+
"f1": 0.8320665501497108,
|
468 |
+
"f1_weighted": 0.8962980083931842
|
469 |
+
},
|
470 |
+
{
|
471 |
+
"accuracy": 0.8805970149253731,
|
472 |
+
"ap": 0.5600114902631987,
|
473 |
+
"ap_weighted": 0.5600114902631987,
|
474 |
+
"f1": 0.8230882974228981,
|
475 |
+
"f1_weighted": 0.8890276122555789
|
476 |
+
},
|
477 |
+
{
|
478 |
+
"accuracy": 0.8955223880597015,
|
479 |
+
"ap": 0.5885515677830837,
|
480 |
+
"ap_weighted": 0.5885515677830837,
|
481 |
+
"f1": 0.839429753084729,
|
482 |
+
"f1_weighted": 0.9014716069206834
|
483 |
+
},
|
484 |
+
{
|
485 |
+
"accuracy": 0.8865671641791045,
|
486 |
+
"ap": 0.5677836636832657,
|
487 |
+
"ap_weighted": 0.5677836636832657,
|
488 |
+
"f1": 0.8284366576819406,
|
489 |
+
"f1_weighted": 0.8937216880556785
|
490 |
+
},
|
491 |
+
{
|
492 |
+
"accuracy": 0.8895522388059701,
|
493 |
+
"ap": 0.5745353691313547,
|
494 |
+
"ap_weighted": 0.5745353691313547,
|
495 |
+
"f1": 0.8320665501497108,
|
496 |
+
"f1_weighted": 0.8962980083931842
|
497 |
+
},
|
498 |
+
{
|
499 |
+
"accuracy": 0.8805970149253731,
|
500 |
+
"ap": 0.5600114902631987,
|
501 |
+
"ap_weighted": 0.5600114902631987,
|
502 |
+
"f1": 0.8230882974228981,
|
503 |
+
"f1_weighted": 0.8890276122555789
|
504 |
+
},
|
505 |
+
{
|
506 |
+
"accuracy": 0.8716417910447761,
|
507 |
+
"ap": 0.547134543274533,
|
508 |
+
"ap_weighted": 0.547134543274533,
|
509 |
+
"f1": 0.8144379033608574,
|
510 |
+
"f1_weighted": 0.8817908678919231
|
511 |
+
}
|
512 |
+
]
|
513 |
+
},
|
514 |
+
{
|
515 |
+
"accuracy": 0.7894849785407725,
|
516 |
+
"ap": 0.8803517957249118,
|
517 |
+
"ap_weighted": 0.8803517957249118,
|
518 |
+
"f1": 0.7758745619198354,
|
519 |
+
"f1_weighted": 0.7975723112289664,
|
520 |
+
"hf_subset": "de",
|
521 |
+
"languages": [
|
522 |
+
"deu-Latn"
|
523 |
+
],
|
524 |
+
"main_score": 0.7894849785407725,
|
525 |
+
"scores_per_experiment": [
|
526 |
+
{
|
527 |
+
"accuracy": 0.7682403433476395,
|
528 |
+
"ap": 0.8811992160884499,
|
529 |
+
"ap_weighted": 0.8811992160884499,
|
530 |
+
"f1": 0.758410138248848,
|
531 |
+
"f1_weighted": 0.7776522418464824
|
532 |
+
},
|
533 |
+
{
|
534 |
+
"accuracy": 0.7982832618025751,
|
535 |
+
"ap": 0.8811462328040602,
|
536 |
+
"ap_weighted": 0.8811462328040602,
|
537 |
+
"f1": 0.7834401202341401,
|
538 |
+
"f1_weighted": 0.8058264976816158
|
539 |
+
},
|
540 |
+
{
|
541 |
+
"accuracy": 0.7982832618025751,
|
542 |
+
"ap": 0.8793608700797947,
|
543 |
+
"ap_weighted": 0.8793608700797947,
|
544 |
+
"f1": 0.782912082466052,
|
545 |
+
"f1_weighted": 0.8057209292234734
|
546 |
+
},
|
547 |
+
{
|
548 |
+
"accuracy": 0.778969957081545,
|
549 |
+
"ap": 0.878306744480772,
|
550 |
+
"ap_weighted": 0.878306744480772,
|
551 |
+
"f1": 0.7666681250516512,
|
552 |
+
"f1_weighted": 0.7878226773273567
|
553 |
+
},
|
554 |
+
{
|
555 |
+
"accuracy": 0.8004291845493562,
|
556 |
+
"ap": 0.8838705238672475,
|
557 |
+
"ap_weighted": 0.8838705238672475,
|
558 |
+
"f1": 0.7860010369602252,
|
559 |
+
"f1_weighted": 0.8079413605668376
|
560 |
+
},
|
561 |
+
{
|
562 |
+
"accuracy": 0.8004291845493562,
|
563 |
+
"ap": 0.8802792554181756,
|
564 |
+
"ap_weighted": 0.8802792554181756,
|
565 |
+
"f1": 0.7849561605906783,
|
566 |
+
"f1_weighted": 0.8077324518578523
|
567 |
+
},
|
568 |
+
{
|
569 |
+
"accuracy": 0.796137339055794,
|
570 |
+
"ap": 0.8714792787369158,
|
571 |
+
"ap_weighted": 0.8714792787369158,
|
572 |
+
"f1": 0.7786444661116527,
|
573 |
+
"f1_weighted": 0.8032146082622021
|
574 |
+
},
|
575 |
+
{
|
576 |
+
"accuracy": 0.7982832618025751,
|
577 |
+
"ap": 0.886612319562003,
|
578 |
+
"ap_weighted": 0.886612319562003,
|
579 |
+
"f1": 0.7849582719685813,
|
580 |
+
"f1_weighted": 0.8060944627397438
|
581 |
+
},
|
582 |
+
{
|
583 |
+
"accuracy": 0.7725321888412017,
|
584 |
+
"ap": 0.8773862538669406,
|
585 |
+
"ap_weighted": 0.8773862538669406,
|
586 |
+
"f1": 0.7610857452406748,
|
587 |
+
"f1_weighted": 0.7817342317357431
|
588 |
+
},
|
589 |
+
{
|
590 |
+
"accuracy": 0.7832618025751072,
|
591 |
+
"ap": 0.8838772623447595,
|
592 |
+
"ap_weighted": 0.8838772623447595,
|
593 |
+
"f1": 0.771669472325851,
|
594 |
+
"f1_weighted": 0.7919836510483573
|
595 |
+
}
|
596 |
+
]
|
597 |
+
},
|
598 |
+
{
|
599 |
+
"accuracy": 0.8489270386266095,
|
600 |
+
"ap": 0.31653126580452356,
|
601 |
+
"ap_weighted": 0.31653126580452356,
|
602 |
+
"f1": 0.7093006675126466,
|
603 |
+
"f1_weighted": 0.8708958262399417,
|
604 |
+
"hf_subset": "ja",
|
605 |
+
"languages": [
|
606 |
+
"jpn-Jpan"
|
607 |
+
],
|
608 |
+
"main_score": 0.8489270386266095,
|
609 |
+
"scores_per_experiment": [
|
610 |
+
{
|
611 |
+
"accuracy": 0.8347639484978541,
|
612 |
+
"ap": 0.3105440581221563,
|
613 |
+
"ap_weighted": 0.3105440581221563,
|
614 |
+
"f1": 0.698943676743269,
|
615 |
+
"f1_weighted": 0.8612337139196742
|
616 |
+
},
|
617 |
+
{
|
618 |
+
"accuracy": 0.8605150214592274,
|
619 |
+
"ap": 0.3228235185585102,
|
620 |
+
"ap_weighted": 0.3228235185585102,
|
621 |
+
"f1": 0.7184813420697987,
|
622 |
+
"f1_weighted": 0.878966526636706
|
623 |
+
},
|
624 |
+
{
|
625 |
+
"accuracy": 0.8583690987124464,
|
626 |
+
"ap": 0.31132419549101764,
|
627 |
+
"ap_weighted": 0.31132419549101764,
|
628 |
+
"f1": 0.7123779598249355,
|
629 |
+
"f1_weighted": 0.8768378572464085
|
630 |
+
},
|
631 |
+
{
|
632 |
+
"accuracy": 0.8433476394849786,
|
633 |
+
"ap": 0.32193732709877404,
|
634 |
+
"ap_weighted": 0.32193732709877404,
|
635 |
+
"f1": 0.7084179759486402,
|
636 |
+
"f1_weighted": 0.8676094401839416
|
637 |
+
},
|
638 |
+
{
|
639 |
+
"accuracy": 0.8669527896995708,
|
640 |
+
"ap": 0.31660325233593817,
|
641 |
+
"ap_weighted": 0.31660325233593817,
|
642 |
+
"f1": 0.7192552860696517,
|
643 |
+
"f1_weighted": 0.8826838847607456
|
644 |
+
},
|
645 |
+
{
|
646 |
+
"accuracy": 0.8755364806866953,
|
647 |
+
"ap": 0.3314087013937732,
|
648 |
+
"ap_weighted": 0.3314087013937732,
|
649 |
+
"f1": 0.7302379431491537,
|
650 |
+
"f1_weighted": 0.8891316654855297
|
651 |
+
},
|
652 |
+
{
|
653 |
+
"accuracy": 0.851931330472103,
|
654 |
+
"ap": 0.3261263930222403,
|
655 |
+
"ap_weighted": 0.3261263930222403,
|
656 |
+
"f1": 0.7150150229998139,
|
657 |
+
"f1_weighted": 0.8735496948098328
|
658 |
+
},
|
659 |
+
{
|
660 |
+
"accuracy": 0.8304721030042919,
|
661 |
+
"ap": 0.3051609998442131,
|
662 |
+
"ap_weighted": 0.3051609998442131,
|
663 |
+
"f1": 0.6943247394860298,
|
664 |
+
"f1_weighted": 0.8580517940063837
|
665 |
+
},
|
666 |
+
{
|
667 |
+
"accuracy": 0.8240343347639485,
|
668 |
+
"ap": 0.297446885079839,
|
669 |
+
"ap_weighted": 0.297446885079839,
|
670 |
+
"f1": 0.6875347528865339,
|
671 |
+
"f1_weighted": 0.8532842451662517
|
672 |
+
},
|
673 |
+
{
|
674 |
+
"accuracy": 0.8433476394849786,
|
675 |
+
"ap": 0.32193732709877404,
|
676 |
+
"ap_weighted": 0.32193732709877404,
|
677 |
+
"f1": 0.7084179759486402,
|
678 |
+
"f1_weighted": 0.8676094401839416
|
679 |
+
}
|
680 |
+
]
|
681 |
+
}
|
682 |
+
]
|
683 |
+
},
|
684 |
+
"task_name": "AmazonCounterfactualClassification"
|
685 |
+
}
|
results/Alibaba-NLP__gte-Qwen2-7B-instruct/e26182b2122f4435e8b3ebecbf363990f409b45b/ArXivHierarchicalClusteringP2P.json
ADDED
@@ -0,0 +1,46 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"dataset_revision": "0bbdb47bcbe3a90093699aefeed338a0f28a7ee8",
|
3 |
+
"evaluation_time": 45.555355072021484,
|
4 |
+
"kg_co2_emissions": 0.0018720042012877254,
|
5 |
+
"mteb_version": "1.18.0",
|
6 |
+
"scores": {
|
7 |
+
"test": [
|
8 |
+
{
|
9 |
+
"hf_subset": "default",
|
10 |
+
"languages": [
|
11 |
+
"eng-Latn"
|
12 |
+
],
|
13 |
+
"main_score": 0.681929207141039,
|
14 |
+
"v_measure": 0.681929207141039,
|
15 |
+
"v_measure_std": 0.07225266477429353,
|
16 |
+
"v_measures": {
|
17 |
+
"Level 0": [
|
18 |
+
0.7546364823028016,
|
19 |
+
0.7626260245454501,
|
20 |
+
0.7470885131666014,
|
21 |
+
0.7384479332176149,
|
22 |
+
0.7270104745768311,
|
23 |
+
0.7707537422114561,
|
24 |
+
0.7605832529767644,
|
25 |
+
0.7390076367277514,
|
26 |
+
0.7761204221040745,
|
27 |
+
0.7548712369247051
|
28 |
+
],
|
29 |
+
"Level 1": [
|
30 |
+
0.6079794816992645,
|
31 |
+
0.5940890870804622,
|
32 |
+
0.596815674840517,
|
33 |
+
0.6203666755313353,
|
34 |
+
0.6220351572758381,
|
35 |
+
0.6238251403480186,
|
36 |
+
0.6042593314628849,
|
37 |
+
0.6150524815904298,
|
38 |
+
0.6132259714514102,
|
39 |
+
0.6097894227865677
|
40 |
+
]
|
41 |
+
}
|
42 |
+
}
|
43 |
+
]
|
44 |
+
},
|
45 |
+
"task_name": "ArXivHierarchicalClusteringP2P"
|
46 |
+
}
|
results/Alibaba-NLP__gte-Qwen2-7B-instruct/e26182b2122f4435e8b3ebecbf363990f409b45b/ArXivHierarchicalClusteringS2S.json
ADDED
@@ -0,0 +1,46 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"dataset_revision": "b73bd54100e5abfa6e3a23dcafb46fe4d2438dc3",
|
3 |
+
"evaluation_time": 46.06328725814819,
|
4 |
+
"kg_co2_emissions": 0.0013632233210812876,
|
5 |
+
"mteb_version": "1.18.0",
|
6 |
+
"scores": {
|
7 |
+
"test": [
|
8 |
+
{
|
9 |
+
"hf_subset": "default",
|
10 |
+
"languages": [
|
11 |
+
"eng-Latn"
|
12 |
+
],
|
13 |
+
"main_score": 0.6484694347666395,
|
14 |
+
"v_measure": 0.6484694347666395,
|
15 |
+
"v_measure_std": 0.05640974380427167,
|
16 |
+
"v_measures": {
|
17 |
+
"Level 0": [
|
18 |
+
0.7235792496159184,
|
19 |
+
0.7132639596320951,
|
20 |
+
0.7231253199080615,
|
21 |
+
0.7249634172443954,
|
22 |
+
0.6885975630559426,
|
23 |
+
0.726093120717297,
|
24 |
+
0.6939083913830572,
|
25 |
+
0.6582305799034276,
|
26 |
+
0.6765054925525877,
|
27 |
+
0.692698528156315
|
28 |
+
],
|
29 |
+
"Level 1": [
|
30 |
+
0.5842514702273333,
|
31 |
+
0.5978773484717144,
|
32 |
+
0.5748578237256415,
|
33 |
+
0.613060567312002,
|
34 |
+
0.5983809077977338,
|
35 |
+
0.5896824526560589,
|
36 |
+
0.5967159435176793,
|
37 |
+
0.6080497550648092,
|
38 |
+
0.5936733404086417,
|
39 |
+
0.591873463982077
|
40 |
+
]
|
41 |
+
}
|
42 |
+
}
|
43 |
+
]
|
44 |
+
},
|
45 |
+
"task_name": "ArXivHierarchicalClusteringS2S"
|
46 |
+
}
|
results/Alibaba-NLP__gte-Qwen2-7B-instruct/e26182b2122f4435e8b3ebecbf363990f409b45b/ArguAna.json
ADDED
@@ -0,0 +1,158 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"dataset_revision": "c22ab2a51041ffd869aaddef7af8d8215647e41a",
|
3 |
+
"evaluation_time": 951.7596683502197,
|
4 |
+
"kg_co2_emissions": 0.09263817488977462,
|
5 |
+
"mteb_version": "1.18.0",
|
6 |
+
"scores": {
|
7 |
+
"test": [
|
8 |
+
{
|
9 |
+
"hf_subset": "default",
|
10 |
+
"languages": [
|
11 |
+
"eng-Latn"
|
12 |
+
],
|
13 |
+
"main_score": 0.54565,
|
14 |
+
"map_at_1": 0.31294,
|
15 |
+
"map_at_10": 0.46035,
|
16 |
+
"map_at_100": 0.47053,
|
17 |
+
"map_at_1000": 0.47056,
|
18 |
+
"map_at_20": 0.46889,
|
19 |
+
"map_at_3": 0.41145,
|
20 |
+
"map_at_5": 0.43851,
|
21 |
+
"mrr_at_1": 0.3150782361308677,
|
22 |
+
"mrr_at_10": 0.46120650725913914,
|
23 |
+
"mrr_at_100": 0.47138569676213693,
|
24 |
+
"mrr_at_1000": 0.47141646449230296,
|
25 |
+
"mrr_at_20": 0.4697460888891961,
|
26 |
+
"mrr_at_3": 0.41216216216216234,
|
27 |
+
"mrr_at_5": 0.43936699857752515,
|
28 |
+
"nauc_map_at_1000_diff1": 0.2286933815986634,
|
29 |
+
"nauc_map_at_1000_max": -0.031127014711469888,
|
30 |
+
"nauc_map_at_1000_std": -0.16684606034423152,
|
31 |
+
"nauc_map_at_100_diff1": 0.22869524424763388,
|
32 |
+
"nauc_map_at_100_max": -0.031080251410193334,
|
33 |
+
"nauc_map_at_100_std": -0.16686239646660395,
|
34 |
+
"nauc_map_at_10_diff1": 0.22758236789859138,
|
35 |
+
"nauc_map_at_10_max": -0.02949207084704569,
|
36 |
+
"nauc_map_at_10_std": -0.16469305495308986,
|
37 |
+
"nauc_map_at_1_diff1": 0.25026888600462033,
|
38 |
+
"nauc_map_at_1_max": -0.07178601561166408,
|
39 |
+
"nauc_map_at_1_std": -0.17408016835155235,
|
40 |
+
"nauc_map_at_20_diff1": 0.22895339181387592,
|
41 |
+
"nauc_map_at_20_max": -0.02959852334002841,
|
42 |
+
"nauc_map_at_20_std": -0.16685577976472074,
|
43 |
+
"nauc_map_at_3_diff1": 0.22537685351120187,
|
44 |
+
"nauc_map_at_3_max": -0.0323826901159538,
|
45 |
+
"nauc_map_at_3_std": -0.1693394058007679,
|
46 |
+
"nauc_map_at_5_diff1": 0.2241579784383877,
|
47 |
+
"nauc_map_at_5_max": -0.030983305471005335,
|
48 |
+
"nauc_map_at_5_std": -0.16866244876632336,
|
49 |
+
"nauc_mrr_at_1000_diff1": 0.22204972463526157,
|
50 |
+
"nauc_mrr_at_1000_max": -0.03298386574317435,
|
51 |
+
"nauc_mrr_at_1000_std": -0.16628657377601674,
|
52 |
+
"nauc_mrr_at_100_diff1": 0.2220522463820602,
|
53 |
+
"nauc_mrr_at_100_max": -0.032936953327508664,
|
54 |
+
"nauc_mrr_at_100_std": -0.16630295312329985,
|
55 |
+
"nauc_mrr_at_10_diff1": 0.22120132128996917,
|
56 |
+
"nauc_mrr_at_10_max": -0.031263669259484886,
|
57 |
+
"nauc_mrr_at_10_std": -0.16414025878780175,
|
58 |
+
"nauc_mrr_at_1_diff1": 0.2438771876530539,
|
59 |
+
"nauc_mrr_at_1_max": -0.06998155295029498,
|
60 |
+
"nauc_mrr_at_1_std": -0.17324792921846088,
|
61 |
+
"nauc_mrr_at_20_diff1": 0.2223400118168156,
|
62 |
+
"nauc_mrr_at_20_max": -0.031446956427028036,
|
63 |
+
"nauc_mrr_at_20_std": -0.16629774520006327,
|
64 |
+
"nauc_mrr_at_3_diff1": 0.21766214026207303,
|
65 |
+
"nauc_mrr_at_3_max": -0.03561242789784182,
|
66 |
+
"nauc_mrr_at_3_std": -0.16882920970572063,
|
67 |
+
"nauc_mrr_at_5_diff1": 0.21797073985000423,
|
68 |
+
"nauc_mrr_at_5_max": -0.03268933363748191,
|
69 |
+
"nauc_mrr_at_5_std": -0.16814517465863682,
|
70 |
+
"nauc_ndcg_at_1000_diff1": 0.22665400917717615,
|
71 |
+
"nauc_ndcg_at_1000_max": -0.0226640118518625,
|
72 |
+
"nauc_ndcg_at_1000_std": -0.1629214422208459,
|
73 |
+
"nauc_ndcg_at_100_diff1": 0.22655639818806642,
|
74 |
+
"nauc_ndcg_at_100_max": -0.02151896285119171,
|
75 |
+
"nauc_ndcg_at_100_std": -0.16308953645652344,
|
76 |
+
"nauc_ndcg_at_10_diff1": 0.22356021087900935,
|
77 |
+
"nauc_ndcg_at_10_max": -0.010135171505378224,
|
78 |
+
"nauc_ndcg_at_10_std": -0.15443785792495143,
|
79 |
+
"nauc_ndcg_at_1_diff1": 0.25026888600462033,
|
80 |
+
"nauc_ndcg_at_1_max": -0.07178601561166408,
|
81 |
+
"nauc_ndcg_at_1_std": -0.17408016835155235,
|
82 |
+
"nauc_ndcg_at_20_diff1": 0.22855892095292815,
|
83 |
+
"nauc_ndcg_at_20_max": -0.009326413662931186,
|
84 |
+
"nauc_ndcg_at_20_std": -0.1622544501530113,
|
85 |
+
"nauc_ndcg_at_3_diff1": 0.21935070695766123,
|
86 |
+
"nauc_ndcg_at_3_max": -0.019017557090745558,
|
87 |
+
"nauc_ndcg_at_3_std": -0.16719669403196924,
|
88 |
+
"nauc_ndcg_at_5_diff1": 0.2169401620915874,
|
89 |
+
"nauc_ndcg_at_5_max": -0.01467406602806841,
|
90 |
+
"nauc_ndcg_at_5_std": -0.16576917788755632,
|
91 |
+
"nauc_precision_at_1000_diff1": -0.04249093618360151,
|
92 |
+
"nauc_precision_at_1000_max": 0.152996011292648,
|
93 |
+
"nauc_precision_at_1000_std": 0.5532908402747854,
|
94 |
+
"nauc_precision_at_100_diff1": 0.0850547562806181,
|
95 |
+
"nauc_precision_at_100_max": 0.2876600359822113,
|
96 |
+
"nauc_precision_at_100_std": 0.11369193117720497,
|
97 |
+
"nauc_precision_at_10_diff1": 0.20505390902773002,
|
98 |
+
"nauc_precision_at_10_max": 0.09860282145509164,
|
99 |
+
"nauc_precision_at_10_std": -0.08892198164639387,
|
100 |
+
"nauc_precision_at_1_diff1": 0.25026888600462033,
|
101 |
+
"nauc_precision_at_1_max": -0.07178601561166408,
|
102 |
+
"nauc_precision_at_1_std": -0.17408016835155235,
|
103 |
+
"nauc_precision_at_20_diff1": 0.25041676316333766,
|
104 |
+
"nauc_precision_at_20_max": 0.3045363579979527,
|
105 |
+
"nauc_precision_at_20_std": -0.09617709847999481,
|
106 |
+
"nauc_precision_at_3_diff1": 0.20213091085406856,
|
107 |
+
"nauc_precision_at_3_max": 0.02079988238855567,
|
108 |
+
"nauc_precision_at_3_std": -0.16065088867646998,
|
109 |
+
"nauc_precision_at_5_diff1": 0.1929252711562353,
|
110 |
+
"nauc_precision_at_5_max": 0.04250762647429577,
|
111 |
+
"nauc_precision_at_5_std": -0.1554234399764316,
|
112 |
+
"nauc_recall_at_1000_diff1": -0.04249093618364137,
|
113 |
+
"nauc_recall_at_1000_max": 0.1529960112926506,
|
114 |
+
"nauc_recall_at_1000_std": 0.5532908402747452,
|
115 |
+
"nauc_recall_at_100_diff1": 0.08505475628059786,
|
116 |
+
"nauc_recall_at_100_max": 0.2876600359822007,
|
117 |
+
"nauc_recall_at_100_std": 0.11369193117714292,
|
118 |
+
"nauc_recall_at_10_diff1": 0.20505390902772958,
|
119 |
+
"nauc_recall_at_10_max": 0.0986028214550914,
|
120 |
+
"nauc_recall_at_10_std": -0.08892198164639517,
|
121 |
+
"nauc_recall_at_1_diff1": 0.25026888600462033,
|
122 |
+
"nauc_recall_at_1_max": -0.07178601561166408,
|
123 |
+
"nauc_recall_at_1_std": -0.17408016835155235,
|
124 |
+
"nauc_recall_at_20_diff1": 0.25041676316333916,
|
125 |
+
"nauc_recall_at_20_max": 0.30453635799795414,
|
126 |
+
"nauc_recall_at_20_std": -0.09617709847999148,
|
127 |
+
"nauc_recall_at_3_diff1": 0.20213091085406912,
|
128 |
+
"nauc_recall_at_3_max": 0.020799882388555863,
|
129 |
+
"nauc_recall_at_3_std": -0.16065088867646968,
|
130 |
+
"nauc_recall_at_5_diff1": 0.1929252711562358,
|
131 |
+
"nauc_recall_at_5_max": 0.042507626474296144,
|
132 |
+
"nauc_recall_at_5_std": -0.15542343997643077,
|
133 |
+
"ndcg_at_1": 0.31294,
|
134 |
+
"ndcg_at_10": 0.54565,
|
135 |
+
"ndcg_at_100": 0.5864,
|
136 |
+
"ndcg_at_1000": 0.58707,
|
137 |
+
"ndcg_at_20": 0.57629,
|
138 |
+
"ndcg_at_3": 0.44402,
|
139 |
+
"ndcg_at_5": 0.49287,
|
140 |
+
"precision_at_1": 0.31294,
|
141 |
+
"precision_at_10": 0.08201,
|
142 |
+
"precision_at_100": 0.00991,
|
143 |
+
"precision_at_1000": 0.001,
|
144 |
+
"precision_at_20": 0.04701,
|
145 |
+
"precision_at_3": 0.17947,
|
146 |
+
"precision_at_5": 0.13144,
|
147 |
+
"recall_at_1": 0.31294,
|
148 |
+
"recall_at_10": 0.82006,
|
149 |
+
"recall_at_100": 0.99147,
|
150 |
+
"recall_at_1000": 0.99644,
|
151 |
+
"recall_at_20": 0.94026,
|
152 |
+
"recall_at_3": 0.53841,
|
153 |
+
"recall_at_5": 0.65718
|
154 |
+
}
|
155 |
+
]
|
156 |
+
},
|
157 |
+
"task_name": "ArguAna"
|
158 |
+
}
|
results/Alibaba-NLP__gte-Qwen2-7B-instruct/e26182b2122f4435e8b3ebecbf363990f409b45b/ArmenianParaphrasePC.json
ADDED
@@ -0,0 +1,58 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"dataset_revision": "f43b4f32987048043a8b31e5e26be4d360c2438f",
|
3 |
+
"evaluation_time": 177.86594676971436,
|
4 |
+
"kg_co2_emissions": 0.01676962653409378,
|
5 |
+
"mteb_version": "1.18.0",
|
6 |
+
"scores": {
|
7 |
+
"test": [
|
8 |
+
{
|
9 |
+
"cosine_accuracy": 0.8965986394557823,
|
10 |
+
"cosine_accuracy_threshold": 0.6267118453979492,
|
11 |
+
"cosine_ap": 0.9256920739644772,
|
12 |
+
"cosine_f1": 0.9302752293577982,
|
13 |
+
"cosine_f1_threshold": 0.6148819923400879,
|
14 |
+
"cosine_precision": 0.8748921484037964,
|
15 |
+
"cosine_recall": 0.9931439764936337,
|
16 |
+
"dot_accuracy": 0.8965986394557823,
|
17 |
+
"dot_accuracy_threshold": 0.6267120242118835,
|
18 |
+
"dot_ap": 0.9256917562562058,
|
19 |
+
"dot_f1": 0.9302752293577982,
|
20 |
+
"dot_f1_threshold": 0.6148821115493774,
|
21 |
+
"dot_precision": 0.8748921484037964,
|
22 |
+
"dot_recall": 0.9931439764936337,
|
23 |
+
"euclidean_accuracy": 0.8965986394557823,
|
24 |
+
"euclidean_accuracy_threshold": 0.8640101552009583,
|
25 |
+
"euclidean_ap": 0.9256917562562058,
|
26 |
+
"euclidean_f1": 0.9302752293577982,
|
27 |
+
"euclidean_f1_threshold": 0.8776306509971619,
|
28 |
+
"euclidean_precision": 0.8748921484037964,
|
29 |
+
"euclidean_recall": 0.9931439764936337,
|
30 |
+
"hf_subset": "default",
|
31 |
+
"languages": [
|
32 |
+
"hye-Armn"
|
33 |
+
],
|
34 |
+
"main_score": 0.9265075872104955,
|
35 |
+
"manhattan_accuracy": 0.8972789115646258,
|
36 |
+
"manhattan_accuracy_threshold": 38.7757568359375,
|
37 |
+
"manhattan_ap": 0.9265075872104955,
|
38 |
+
"manhattan_f1": 0.9307021569527306,
|
39 |
+
"manhattan_f1_threshold": 39.96601104736328,
|
40 |
+
"manhattan_precision": 0.8756476683937824,
|
41 |
+
"manhattan_recall": 0.9931439764936337,
|
42 |
+
"max_accuracy": 0.8972789115646258,
|
43 |
+
"max_ap": 0.9265075872104955,
|
44 |
+
"max_f1": 0.9307021569527306,
|
45 |
+
"max_precision": 0.8756476683937824,
|
46 |
+
"max_recall": 0.9931439764936337,
|
47 |
+
"similarity_accuracy": 0.8965986394557823,
|
48 |
+
"similarity_accuracy_threshold": 0.6267118453979492,
|
49 |
+
"similarity_ap": 0.9256920739644772,
|
50 |
+
"similarity_f1": 0.9302752293577982,
|
51 |
+
"similarity_f1_threshold": 0.6148819923400879,
|
52 |
+
"similarity_precision": 0.8748921484037964,
|
53 |
+
"similarity_recall": 0.9931439764936337
|
54 |
+
}
|
55 |
+
]
|
56 |
+
},
|
57 |
+
"task_name": "ArmenianParaphrasePC"
|
58 |
+
}
|
results/Alibaba-NLP__gte-Qwen2-7B-instruct/e26182b2122f4435e8b3ebecbf363990f409b45b/BUCC.v2.json
ADDED
@@ -0,0 +1,59 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"dataset_revision": "1739dc11ffe9b7bfccd7f3d585aeb4c544fc6677",
|
3 |
+
"evaluation_time": 1129.315413236618,
|
4 |
+
"kg_co2_emissions": 0.10436803931435228,
|
5 |
+
"mteb_version": "1.18.0",
|
6 |
+
"scores": {
|
7 |
+
"test": [
|
8 |
+
{
|
9 |
+
"accuracy": 0.9958246346555324,
|
10 |
+
"f1": 0.9945024356297841,
|
11 |
+
"hf_subset": "de-en",
|
12 |
+
"languages": [
|
13 |
+
"deu-Latn",
|
14 |
+
"eng-Latn"
|
15 |
+
],
|
16 |
+
"main_score": 0.9945024356297841,
|
17 |
+
"precision": 0.9938413361169103,
|
18 |
+
"recall": 0.9958246346555324
|
19 |
+
},
|
20 |
+
{
|
21 |
+
"accuracy": 0.9907550077041603,
|
22 |
+
"f1": 0.9877467165602758,
|
23 |
+
"hf_subset": "fr-en",
|
24 |
+
"languages": [
|
25 |
+
"fra-Latn",
|
26 |
+
"eng-Latn"
|
27 |
+
],
|
28 |
+
"main_score": 0.9877467165602758,
|
29 |
+
"precision": 0.9862425709883337,
|
30 |
+
"recall": 0.9907550077041603
|
31 |
+
},
|
32 |
+
{
|
33 |
+
"accuracy": 0.9800484932455836,
|
34 |
+
"f1": 0.9739106338759959,
|
35 |
+
"hf_subset": "ru-en",
|
36 |
+
"languages": [
|
37 |
+
"rus-Cyrl",
|
38 |
+
"eng-Latn"
|
39 |
+
],
|
40 |
+
"main_score": 0.9739106338759959,
|
41 |
+
"precision": 0.9708867336335296,
|
42 |
+
"recall": 0.9800484932455836
|
43 |
+
},
|
44 |
+
{
|
45 |
+
"accuracy": 0.9942074776197999,
|
46 |
+
"f1": 0.9922766368264,
|
47 |
+
"hf_subset": "zh-en",
|
48 |
+
"languages": [
|
49 |
+
"cmn-Hans",
|
50 |
+
"eng-Latn"
|
51 |
+
],
|
52 |
+
"main_score": 0.9922766368264,
|
53 |
+
"precision": 0.9913112164296999,
|
54 |
+
"recall": 0.9942074776197999
|
55 |
+
}
|
56 |
+
]
|
57 |
+
},
|
58 |
+
"task_name": "BUCC.v2"
|
59 |
+
}
|
results/Alibaba-NLP__gte-Qwen2-7B-instruct/e26182b2122f4435e8b3ebecbf363990f409b45b/BelebeleRetrieval.json
ADDED
The diff for this file is too large to render.
See raw diff
|
|
results/Alibaba-NLP__gte-Qwen2-7B-instruct/e26182b2122f4435e8b3ebecbf363990f409b45b/BibleNLPBitextMining.json
ADDED
The diff for this file is too large to render.
See raw diff
|
|
results/Alibaba-NLP__gte-Qwen2-7B-instruct/e26182b2122f4435e8b3ebecbf363990f409b45b/BigPatentClustering.v2.json
ADDED
@@ -0,0 +1,34 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"dataset_revision": "58a863a958586a5d6ba51088b94ac74a46aa864f",
|
3 |
+
"evaluation_time": 37.04621124267578,
|
4 |
+
"kg_co2_emissions": 0.0018524731407994774,
|
5 |
+
"mteb_version": "1.18.0",
|
6 |
+
"scores": {
|
7 |
+
"test": [
|
8 |
+
{
|
9 |
+
"hf_subset": "default",
|
10 |
+
"languages": [
|
11 |
+
"eng-Latn"
|
12 |
+
],
|
13 |
+
"main_score": 0.2879449921867366,
|
14 |
+
"v_measure": 0.2879449921867366,
|
15 |
+
"v_measure_std": 0.03995189788950775,
|
16 |
+
"v_measures": {
|
17 |
+
"Level 0": [
|
18 |
+
0.27926691756671307,
|
19 |
+
0.2054046875809588,
|
20 |
+
0.2860227791674624,
|
21 |
+
0.23011871922582547,
|
22 |
+
0.34473027995464833,
|
23 |
+
0.3066212802637793,
|
24 |
+
0.3021808878293252,
|
25 |
+
0.32367882520007846,
|
26 |
+
0.28754490979101427,
|
27 |
+
0.31388063528756066
|
28 |
+
]
|
29 |
+
}
|
30 |
+
}
|
31 |
+
]
|
32 |
+
},
|
33 |
+
"task_name": "BigPatentClustering.v2"
|
34 |
+
}
|
results/Alibaba-NLP__gte-Qwen2-7B-instruct/e26182b2122f4435e8b3ebecbf363990f409b45b/BiorxivClusteringP2P.v2.json
ADDED
@@ -0,0 +1,34 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"dataset_revision": "f5dbc242e11dd8e24def4c4268607a49e02946dc",
|
3 |
+
"evaluation_time": 352.80616521835327,
|
4 |
+
"kg_co2_emissions": 0.029541158024354048,
|
5 |
+
"mteb_version": "1.18.0",
|
6 |
+
"scores": {
|
7 |
+
"test": [
|
8 |
+
{
|
9 |
+
"hf_subset": "default",
|
10 |
+
"languages": [
|
11 |
+
"eng-Latn"
|
12 |
+
],
|
13 |
+
"main_score": 0.5324389713638713,
|
14 |
+
"v_measure": 0.5324389713638713,
|
15 |
+
"v_measure_std": 0.004321808724403448,
|
16 |
+
"v_measures": {
|
17 |
+
"Level 0": [
|
18 |
+
0.538266542302606,
|
19 |
+
0.5277275783520335,
|
20 |
+
0.536466486704368,
|
21 |
+
0.5297569158880998,
|
22 |
+
0.53429639398093,
|
23 |
+
0.5261347581520012,
|
24 |
+
0.5394250356083765,
|
25 |
+
0.5326314473952563,
|
26 |
+
0.5289237740670015,
|
27 |
+
0.5307607811880404
|
28 |
+
]
|
29 |
+
}
|
30 |
+
}
|
31 |
+
]
|
32 |
+
},
|
33 |
+
"task_name": "BiorxivClusteringP2P.v2"
|
34 |
+
}
|
results/Alibaba-NLP__gte-Qwen2-7B-instruct/e26182b2122f4435e8b3ebecbf363990f409b45b/BornholmBitextMining.json
ADDED
@@ -0,0 +1,22 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"dataset_revision": "3bc5cfb4ec514264fe2db5615fac9016f7251552",
|
3 |
+
"evaluation_time": 17.779478549957275,
|
4 |
+
"kg_co2_emissions": 0.0015814270998781685,
|
5 |
+
"mteb_version": "1.18.0",
|
6 |
+
"scores": {
|
7 |
+
"test": [
|
8 |
+
{
|
9 |
+
"accuracy": 0.564,
|
10 |
+
"f1": 0.5016380952380952,
|
11 |
+
"hf_subset": "default",
|
12 |
+
"languages": [
|
13 |
+
"dan-Latn"
|
14 |
+
],
|
15 |
+
"main_score": 0.5016380952380952,
|
16 |
+
"precision": 0.4761222222222222,
|
17 |
+
"recall": 0.564
|
18 |
+
}
|
19 |
+
]
|
20 |
+
},
|
21 |
+
"task_name": "BornholmBitextMining"
|
22 |
+
}
|
results/Alibaba-NLP__gte-Qwen2-7B-instruct/e26182b2122f4435e8b3ebecbf363990f409b45b/BrazilianToxicTweetsClassification.json
ADDED
@@ -0,0 +1,73 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"dataset_revision": "f333c1fcfa3ab43f008a327c8bd0140441354d34",
|
3 |
+
"evaluation_time": 60.98862099647522,
|
4 |
+
"kg_co2_emissions": 0.00554059896901255,
|
5 |
+
"mteb_version": "1.18.0",
|
6 |
+
"scores": {
|
7 |
+
"test": [
|
8 |
+
{
|
9 |
+
"accuracy": 0.257861328125,
|
10 |
+
"f1": 0.1709170149464855,
|
11 |
+
"hf_subset": "default",
|
12 |
+
"languages": [
|
13 |
+
"por-Latn"
|
14 |
+
],
|
15 |
+
"lrap": 0.7975124782986118,
|
16 |
+
"main_score": 0.257861328125,
|
17 |
+
"scores_per_experiment": [
|
18 |
+
{
|
19 |
+
"accuracy": 0.16845703125,
|
20 |
+
"f1": 0.18128135677703963,
|
21 |
+
"lrap": 0.8056233723958339
|
22 |
+
},
|
23 |
+
{
|
24 |
+
"accuracy": 0.1708984375,
|
25 |
+
"f1": 0.17819909856590266,
|
26 |
+
"lrap": 0.7755805121527786
|
27 |
+
},
|
28 |
+
{
|
29 |
+
"accuracy": 0.1884765625,
|
30 |
+
"f1": 0.16233409542055455,
|
31 |
+
"lrap": 0.8379448784722227
|
32 |
+
},
|
33 |
+
{
|
34 |
+
"accuracy": 0.31103515625,
|
35 |
+
"f1": 0.1740491433378385,
|
36 |
+
"lrap": 0.7801920572916675
|
37 |
+
},
|
38 |
+
{
|
39 |
+
"accuracy": 0.3984375,
|
40 |
+
"f1": 0.17938509605176275,
|
41 |
+
"lrap": 0.7632921006944453
|
42 |
+
},
|
43 |
+
{
|
44 |
+
"accuracy": 0.27783203125,
|
45 |
+
"f1": 0.16177159341635883,
|
46 |
+
"lrap": 0.8261990017361116
|
47 |
+
},
|
48 |
+
{
|
49 |
+
"accuracy": 0.19970703125,
|
50 |
+
"f1": 0.16873802356110665,
|
51 |
+
"lrap": 0.7766655815972232
|
52 |
+
},
|
53 |
+
{
|
54 |
+
"accuracy": 0.228515625,
|
55 |
+
"f1": 0.15617685211757085,
|
56 |
+
"lrap": 0.817355685763889
|
57 |
+
},
|
58 |
+
{
|
59 |
+
"accuracy": 0.39697265625,
|
60 |
+
"f1": 0.18795459233841064,
|
61 |
+
"lrap": 0.7765435112847232
|
62 |
+
},
|
63 |
+
{
|
64 |
+
"accuracy": 0.23828125,
|
65 |
+
"f1": 0.15928029787830977,
|
66 |
+
"lrap": 0.815728081597223
|
67 |
+
}
|
68 |
+
]
|
69 |
+
}
|
70 |
+
]
|
71 |
+
},
|
72 |
+
"task_name": "BrazilianToxicTweetsClassification"
|
73 |
+
}
|
results/Alibaba-NLP__gte-Qwen2-7B-instruct/e26182b2122f4435e8b3ebecbf363990f409b45b/BulgarianStoreReviewSentimentClassfication.json
ADDED
@@ -0,0 +1,73 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"dataset_revision": "701984d6c6efea0e14a1c7850ef70e464c5577c0",
|
3 |
+
"evaluation_time": 51.376137018203735,
|
4 |
+
"kg_co2_emissions": 0.004514008822518244,
|
5 |
+
"mteb_version": "1.18.0",
|
6 |
+
"scores": {
|
7 |
+
"test": [
|
8 |
+
{
|
9 |
+
"accuracy": 0.7291208791208791,
|
10 |
+
"f1": 0.5297121413972887,
|
11 |
+
"f1_weighted": 0.7448143418572062,
|
12 |
+
"hf_subset": "default",
|
13 |
+
"languages": [
|
14 |
+
"bul-Cyrl"
|
15 |
+
],
|
16 |
+
"main_score": 0.7291208791208791,
|
17 |
+
"scores_per_experiment": [
|
18 |
+
{
|
19 |
+
"accuracy": 0.6978021978021978,
|
20 |
+
"f1": 0.55089075063797,
|
21 |
+
"f1_weighted": 0.7445205475828038
|
22 |
+
},
|
23 |
+
{
|
24 |
+
"accuracy": 0.7747252747252747,
|
25 |
+
"f1": 0.5430989729790529,
|
26 |
+
"f1_weighted": 0.7722356638317427
|
27 |
+
},
|
28 |
+
{
|
29 |
+
"accuracy": 0.7417582417582418,
|
30 |
+
"f1": 0.548191287570419,
|
31 |
+
"f1_weighted": 0.7699617984347217
|
32 |
+
},
|
33 |
+
{
|
34 |
+
"accuracy": 0.8076923076923077,
|
35 |
+
"f1": 0.5433147975842474,
|
36 |
+
"f1_weighted": 0.7889985583127356
|
37 |
+
},
|
38 |
+
{
|
39 |
+
"accuracy": 0.6263736263736264,
|
40 |
+
"f1": 0.4347072879330944,
|
41 |
+
"f1_weighted": 0.6705528641012513
|
42 |
+
},
|
43 |
+
{
|
44 |
+
"accuracy": 0.6978021978021978,
|
45 |
+
"f1": 0.5289219243575515,
|
46 |
+
"f1_weighted": 0.7252835207991872
|
47 |
+
},
|
48 |
+
{
|
49 |
+
"accuracy": 0.7527472527472527,
|
50 |
+
"f1": 0.5354469507101085,
|
51 |
+
"f1_weighted": 0.7614317845896793
|
52 |
+
},
|
53 |
+
{
|
54 |
+
"accuracy": 0.7472527472527473,
|
55 |
+
"f1": 0.565494505562998,
|
56 |
+
"f1_weighted": 0.7682377077775016
|
57 |
+
},
|
58 |
+
{
|
59 |
+
"accuracy": 0.7417582417582418,
|
60 |
+
"f1": 0.570857041251778,
|
61 |
+
"f1_weighted": 0.7329752395541869
|
62 |
+
},
|
63 |
+
{
|
64 |
+
"accuracy": 0.7032967032967034,
|
65 |
+
"f1": 0.47619789538566776,
|
66 |
+
"f1_weighted": 0.7139457335882525
|
67 |
+
}
|
68 |
+
]
|
69 |
+
}
|
70 |
+
]
|
71 |
+
},
|
72 |
+
"task_name": "BulgarianStoreReviewSentimentClassfication"
|
73 |
+
}
|
results/Alibaba-NLP__gte-Qwen2-7B-instruct/e26182b2122f4435e8b3ebecbf363990f409b45b/CEDRClassification.json
ADDED
@@ -0,0 +1,73 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"dataset_revision": "c0ba03d058e3e1b2f3fd20518875a4563dd12db4",
|
3 |
+
"evaluation_time": 66.53453516960144,
|
4 |
+
"kg_co2_emissions": 0.006086084994675389,
|
5 |
+
"mteb_version": "1.18.0",
|
6 |
+
"scores": {
|
7 |
+
"test": [
|
8 |
+
{
|
9 |
+
"accuracy": 0.5307120085015941,
|
10 |
+
"f1": 0.5917230343930433,
|
11 |
+
"hf_subset": "default",
|
12 |
+
"languages": [
|
13 |
+
"rus-Cyrl"
|
14 |
+
],
|
15 |
+
"lrap": 0.8592109458023435,
|
16 |
+
"main_score": 0.5307120085015941,
|
17 |
+
"scores_per_experiment": [
|
18 |
+
{
|
19 |
+
"accuracy": 0.5642933049946866,
|
20 |
+
"f1": 0.6070292680114152,
|
21 |
+
"lrap": 0.8628055260361375
|
22 |
+
},
|
23 |
+
{
|
24 |
+
"accuracy": 0.5765143464399575,
|
25 |
+
"f1": 0.5857093544461771,
|
26 |
+
"lrap": 0.8492029755579235
|
27 |
+
},
|
28 |
+
{
|
29 |
+
"accuracy": 0.6036131774707758,
|
30 |
+
"f1": 0.62644976226418,
|
31 |
+
"lrap": 0.8632837407013871
|
32 |
+
},
|
33 |
+
{
|
34 |
+
"accuracy": 0.48831030818278426,
|
35 |
+
"f1": 0.5777644438978535,
|
36 |
+
"lrap": 0.8585547290116954
|
37 |
+
},
|
38 |
+
{
|
39 |
+
"accuracy": 0.5090329436769394,
|
40 |
+
"f1": 0.6004348584160987,
|
41 |
+
"lrap": 0.8719978746014928
|
42 |
+
},
|
43 |
+
{
|
44 |
+
"accuracy": 0.5302869287991498,
|
45 |
+
"f1": 0.5946970891286132,
|
46 |
+
"lrap": 0.8562167906482523
|
47 |
+
},
|
48 |
+
{
|
49 |
+
"accuracy": 0.5297555791710946,
|
50 |
+
"f1": 0.5857774068968041,
|
51 |
+
"lrap": 0.8560042507970299
|
52 |
+
},
|
53 |
+
{
|
54 |
+
"accuracy": 0.5074388947927736,
|
55 |
+
"f1": 0.5779301896872774,
|
56 |
+
"lrap": 0.8629383634431511
|
57 |
+
},
|
58 |
+
{
|
59 |
+
"accuracy": 0.5132837407013815,
|
60 |
+
"f1": 0.5894929342611266,
|
61 |
+
"lrap": 0.849787460148784
|
62 |
+
},
|
63 |
+
{
|
64 |
+
"accuracy": 0.48459086078639746,
|
65 |
+
"f1": 0.5719450369208869,
|
66 |
+
"lrap": 0.8613177470775822
|
67 |
+
}
|
68 |
+
]
|
69 |
+
}
|
70 |
+
]
|
71 |
+
},
|
72 |
+
"task_name": "CEDRClassification"
|
73 |
+
}
|
results/Alibaba-NLP__gte-Qwen2-7B-instruct/e26182b2122f4435e8b3ebecbf363990f409b45b/CLSClusteringP2P.v2.json
ADDED
@@ -0,0 +1,34 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"dataset_revision": "4b6227591c6c1a73bc76b1055f3b7f3588e72476",
|
3 |
+
"evaluation_time": 215.09249591827393,
|
4 |
+
"kg_co2_emissions": 0.017260182451138098,
|
5 |
+
"mteb_version": "1.18.0",
|
6 |
+
"scores": {
|
7 |
+
"test": [
|
8 |
+
{
|
9 |
+
"hf_subset": "default",
|
10 |
+
"languages": [
|
11 |
+
"cmn-Hans"
|
12 |
+
],
|
13 |
+
"main_score": 0.48419272227510957,
|
14 |
+
"v_measure": 0.48419272227510957,
|
15 |
+
"v_measure_std": 0.012018814651294568,
|
16 |
+
"v_measures": {
|
17 |
+
"Level 0": [
|
18 |
+
0.4925874969369356,
|
19 |
+
0.4823297695996668,
|
20 |
+
0.4811471614609842,
|
21 |
+
0.47204957848373275,
|
22 |
+
0.4978429058449318,
|
23 |
+
0.4660273291033593,
|
24 |
+
0.46881414008640165,
|
25 |
+
0.49996784860819815,
|
26 |
+
0.49866050572723347,
|
27 |
+
0.48250048689965225
|
28 |
+
]
|
29 |
+
}
|
30 |
+
}
|
31 |
+
]
|
32 |
+
},
|
33 |
+
"task_name": "CLSClusteringP2P.v2"
|
34 |
+
}
|
results/Alibaba-NLP__gte-Qwen2-7B-instruct/e26182b2122f4435e8b3ebecbf363990f409b45b/CSFDSKMovieReviewSentimentClassification.json
ADDED
@@ -0,0 +1,73 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"dataset_revision": "23a20c659d868740ef9c54854de631fe19cd5c17",
|
3 |
+
"evaluation_time": 307.13925337791443,
|
4 |
+
"kg_co2_emissions": 0.02930775210545278,
|
5 |
+
"mteb_version": "1.18.0",
|
6 |
+
"scores": {
|
7 |
+
"test": [
|
8 |
+
{
|
9 |
+
"accuracy": 0.3787109375,
|
10 |
+
"f1": 0.36280243164217146,
|
11 |
+
"f1_weighted": 0.36518776208657266,
|
12 |
+
"hf_subset": "default",
|
13 |
+
"languages": [
|
14 |
+
"slk-Latn"
|
15 |
+
],
|
16 |
+
"main_score": 0.3787109375,
|
17 |
+
"scores_per_experiment": [
|
18 |
+
{
|
19 |
+
"accuracy": 0.376953125,
|
20 |
+
"f1": 0.35726554413878625,
|
21 |
+
"f1_weighted": 0.35973595058927843
|
22 |
+
},
|
23 |
+
{
|
24 |
+
"accuracy": 0.40771484375,
|
25 |
+
"f1": 0.380275253112823,
|
26 |
+
"f1_weighted": 0.3837369374400642
|
27 |
+
},
|
28 |
+
{
|
29 |
+
"accuracy": 0.37353515625,
|
30 |
+
"f1": 0.3486461376901155,
|
31 |
+
"f1_weighted": 0.35181051442039696
|
32 |
+
},
|
33 |
+
{
|
34 |
+
"accuracy": 0.373046875,
|
35 |
+
"f1": 0.3656428855105605,
|
36 |
+
"f1_weighted": 0.3676624018713957
|
37 |
+
},
|
38 |
+
{
|
39 |
+
"accuracy": 0.36962890625,
|
40 |
+
"f1": 0.3499566503629401,
|
41 |
+
"f1_weighted": 0.3517410359101369
|
42 |
+
},
|
43 |
+
{
|
44 |
+
"accuracy": 0.36572265625,
|
45 |
+
"f1": 0.3408223066978333,
|
46 |
+
"f1_weighted": 0.3435367528603117
|
47 |
+
},
|
48 |
+
{
|
49 |
+
"accuracy": 0.36865234375,
|
50 |
+
"f1": 0.3406808074036305,
|
51 |
+
"f1_weighted": 0.34365865849364546
|
52 |
+
},
|
53 |
+
{
|
54 |
+
"accuracy": 0.39208984375,
|
55 |
+
"f1": 0.3921873024823534,
|
56 |
+
"f1_weighted": 0.3938835861516945
|
57 |
+
},
|
58 |
+
{
|
59 |
+
"accuracy": 0.38232421875,
|
60 |
+
"f1": 0.3717309727232007,
|
61 |
+
"f1_weighted": 0.37455797511329647
|
62 |
+
},
|
63 |
+
{
|
64 |
+
"accuracy": 0.37744140625,
|
65 |
+
"f1": 0.3808164562994714,
|
66 |
+
"f1_weighted": 0.3815538080155059
|
67 |
+
}
|
68 |
+
]
|
69 |
+
}
|
70 |
+
]
|
71 |
+
},
|
72 |
+
"task_name": "CSFDSKMovieReviewSentimentClassification"
|
73 |
+
}
|
results/Alibaba-NLP__gte-Qwen2-7B-instruct/e26182b2122f4435e8b3ebecbf363990f409b45b/CTKFactsNLI.json
ADDED
@@ -0,0 +1,107 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"dataset_revision": "387ae4582c8054cb52ef57ef0941f19bd8012abf",
|
3 |
+
"evaluation_time": 46.08272576332092,
|
4 |
+
"kg_co2_emissions": 0.004427805719562056,
|
5 |
+
"mteb_version": "1.18.0",
|
6 |
+
"scores": {
|
7 |
+
"test": [
|
8 |
+
{
|
9 |
+
"cosine_accuracy": 0.808,
|
10 |
+
"cosine_accuracy_threshold": 0.5067664980888367,
|
11 |
+
"cosine_ap": 0.8978736155852208,
|
12 |
+
"cosine_f1": 0.8646748681898067,
|
13 |
+
"cosine_f1_threshold": 0.4422992765903473,
|
14 |
+
"cosine_precision": 0.7961165048543689,
|
15 |
+
"cosine_recall": 0.9461538461538461,
|
16 |
+
"dot_accuracy": 0.808,
|
17 |
+
"dot_accuracy_threshold": 0.506766676902771,
|
18 |
+
"dot_ap": 0.8978736155852208,
|
19 |
+
"dot_f1": 0.8646748681898067,
|
20 |
+
"dot_f1_threshold": 0.44229966402053833,
|
21 |
+
"dot_precision": 0.7961165048543689,
|
22 |
+
"dot_recall": 0.9461538461538461,
|
23 |
+
"euclidean_accuracy": 0.808,
|
24 |
+
"euclidean_accuracy_threshold": 0.993209958076477,
|
25 |
+
"euclidean_ap": 0.8978736155852208,
|
26 |
+
"euclidean_f1": 0.8646748681898067,
|
27 |
+
"euclidean_f1_threshold": 1.0561256408691406,
|
28 |
+
"euclidean_precision": 0.7961165048543689,
|
29 |
+
"euclidean_recall": 0.9461538461538461,
|
30 |
+
"hf_subset": "default",
|
31 |
+
"languages": [
|
32 |
+
"ces-Latn"
|
33 |
+
],
|
34 |
+
"main_score": 0.8978736155852208,
|
35 |
+
"manhattan_accuracy": 0.808,
|
36 |
+
"manhattan_accuracy_threshold": 46.84822082519531,
|
37 |
+
"manhattan_ap": 0.8969834247414092,
|
38 |
+
"manhattan_f1": 0.8641509433962264,
|
39 |
+
"manhattan_f1_threshold": 46.84822082519531,
|
40 |
+
"manhattan_precision": 0.8481481481481481,
|
41 |
+
"manhattan_recall": 0.8807692307692307,
|
42 |
+
"max_accuracy": 0.808,
|
43 |
+
"max_ap": 0.8978736155852208,
|
44 |
+
"max_f1": 0.8646748681898067,
|
45 |
+
"max_precision": 0.8481481481481481,
|
46 |
+
"max_recall": 0.9461538461538461,
|
47 |
+
"similarity_accuracy": 0.808,
|
48 |
+
"similarity_accuracy_threshold": 0.5067664980888367,
|
49 |
+
"similarity_ap": 0.8978736155852208,
|
50 |
+
"similarity_f1": 0.8646748681898067,
|
51 |
+
"similarity_f1_threshold": 0.4422992765903473,
|
52 |
+
"similarity_precision": 0.7961165048543689,
|
53 |
+
"similarity_recall": 0.9461538461538461
|
54 |
+
}
|
55 |
+
],
|
56 |
+
"validation": [
|
57 |
+
{
|
58 |
+
"cosine_accuracy": 0.7573770491803279,
|
59 |
+
"cosine_accuracy_threshold": 0.5435369610786438,
|
60 |
+
"cosine_ap": 0.8571540610834577,
|
61 |
+
"cosine_f1": 0.8082901554404145,
|
62 |
+
"cosine_f1_threshold": 0.5428924560546875,
|
63 |
+
"cosine_precision": 0.8,
|
64 |
+
"cosine_recall": 0.8167539267015707,
|
65 |
+
"dot_accuracy": 0.7573770491803279,
|
66 |
+
"dot_accuracy_threshold": 0.5435371398925781,
|
67 |
+
"dot_ap": 0.8571540610834577,
|
68 |
+
"dot_f1": 0.8082901554404145,
|
69 |
+
"dot_f1_threshold": 0.5428921580314636,
|
70 |
+
"dot_precision": 0.8,
|
71 |
+
"dot_recall": 0.8167539267015707,
|
72 |
+
"euclidean_accuracy": 0.7573770491803279,
|
73 |
+
"euclidean_accuracy_threshold": 0.9554716348648071,
|
74 |
+
"euclidean_ap": 0.8571540610834577,
|
75 |
+
"euclidean_f1": 0.8082901554404145,
|
76 |
+
"euclidean_f1_threshold": 0.9561457633972168,
|
77 |
+
"euclidean_precision": 0.8,
|
78 |
+
"euclidean_recall": 0.8167539267015707,
|
79 |
+
"hf_subset": "default",
|
80 |
+
"languages": [
|
81 |
+
"ces-Latn"
|
82 |
+
],
|
83 |
+
"main_score": 0.8580929104433286,
|
84 |
+
"manhattan_accuracy": 0.760655737704918,
|
85 |
+
"manhattan_accuracy_threshold": 44.14118576049805,
|
86 |
+
"manhattan_ap": 0.8580929104433286,
|
87 |
+
"manhattan_f1": 0.8102564102564103,
|
88 |
+
"manhattan_f1_threshold": 44.617271423339844,
|
89 |
+
"manhattan_precision": 0.7939698492462312,
|
90 |
+
"manhattan_recall": 0.8272251308900523,
|
91 |
+
"max_accuracy": 0.760655737704918,
|
92 |
+
"max_ap": 0.8580929104433286,
|
93 |
+
"max_f1": 0.8102564102564103,
|
94 |
+
"max_precision": 0.8,
|
95 |
+
"max_recall": 0.8272251308900523,
|
96 |
+
"similarity_accuracy": 0.7573770491803279,
|
97 |
+
"similarity_accuracy_threshold": 0.5435369610786438,
|
98 |
+
"similarity_ap": 0.8571540610834577,
|
99 |
+
"similarity_f1": 0.8082901554404145,
|
100 |
+
"similarity_f1_threshold": 0.5428924560546875,
|
101 |
+
"similarity_precision": 0.8,
|
102 |
+
"similarity_recall": 0.8167539267015707
|
103 |
+
}
|
104 |
+
]
|
105 |
+
},
|
106 |
+
"task_name": "CTKFactsNLI"
|
107 |
+
}
|
results/Alibaba-NLP__gte-Qwen2-7B-instruct/e26182b2122f4435e8b3ebecbf363990f409b45b/CataloniaTweetClassification.json
ADDED
@@ -0,0 +1,261 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"dataset_revision": "cf24d44e517efa534f048e5fc5981f399ed25bee",
|
3 |
+
"evaluation_time": 336.5800883769989,
|
4 |
+
"kg_co2_emissions": 0.03109053805766521,
|
5 |
+
"mteb_version": "1.18.0",
|
6 |
+
"scores": {
|
7 |
+
"test": [
|
8 |
+
{
|
9 |
+
"accuracy": 0.5073908730158732,
|
10 |
+
"f1": 0.5216399719604181,
|
11 |
+
"f1_weighted": 0.5019810508331334,
|
12 |
+
"hf_subset": "spanish",
|
13 |
+
"languages": [
|
14 |
+
"spa-Latn"
|
15 |
+
],
|
16 |
+
"main_score": 0.5073908730158732,
|
17 |
+
"scores_per_experiment": [
|
18 |
+
{
|
19 |
+
"accuracy": 0.5471230158730159,
|
20 |
+
"f1": 0.5584995391336549,
|
21 |
+
"f1_weighted": 0.5474645500956254
|
22 |
+
},
|
23 |
+
{
|
24 |
+
"accuracy": 0.5461309523809523,
|
25 |
+
"f1": 0.5636652422110583,
|
26 |
+
"f1_weighted": 0.5426412248210002
|
27 |
+
},
|
28 |
+
{
|
29 |
+
"accuracy": 0.44146825396825395,
|
30 |
+
"f1": 0.4418344078378123,
|
31 |
+
"f1_weighted": 0.42992738352993765
|
32 |
+
},
|
33 |
+
{
|
34 |
+
"accuracy": 0.44047619047619047,
|
35 |
+
"f1": 0.45272955732776515,
|
36 |
+
"f1_weighted": 0.43299122401919643
|
37 |
+
},
|
38 |
+
{
|
39 |
+
"accuracy": 0.4895833333333333,
|
40 |
+
"f1": 0.49823311139018633,
|
41 |
+
"f1_weighted": 0.4807625196972007
|
42 |
+
},
|
43 |
+
{
|
44 |
+
"accuracy": 0.5267857142857143,
|
45 |
+
"f1": 0.5330990594550613,
|
46 |
+
"f1_weighted": 0.5252174754707422
|
47 |
+
},
|
48 |
+
{
|
49 |
+
"accuracy": 0.5520833333333334,
|
50 |
+
"f1": 0.5609355765746405,
|
51 |
+
"f1_weighted": 0.5367573368605056
|
52 |
+
},
|
53 |
+
{
|
54 |
+
"accuracy": 0.49007936507936506,
|
55 |
+
"f1": 0.49728991645107684,
|
56 |
+
"f1_weighted": 0.4844586553150564
|
57 |
+
},
|
58 |
+
{
|
59 |
+
"accuracy": 0.5486111111111112,
|
60 |
+
"f1": 0.5694911709887868,
|
61 |
+
"f1_weighted": 0.5453066955672065
|
62 |
+
},
|
63 |
+
{
|
64 |
+
"accuracy": 0.4915674603174603,
|
65 |
+
"f1": 0.5406221382341395,
|
66 |
+
"f1_weighted": 0.49428344295486354
|
67 |
+
}
|
68 |
+
]
|
69 |
+
},
|
70 |
+
{
|
71 |
+
"accuracy": 0.543134328358209,
|
72 |
+
"f1": 0.5412810032848061,
|
73 |
+
"f1_weighted": 0.5401635740228715,
|
74 |
+
"hf_subset": "catalan",
|
75 |
+
"languages": [
|
76 |
+
"cat-Latn"
|
77 |
+
],
|
78 |
+
"main_score": 0.543134328358209,
|
79 |
+
"scores_per_experiment": [
|
80 |
+
{
|
81 |
+
"accuracy": 0.5472636815920398,
|
82 |
+
"f1": 0.5441226660918383,
|
83 |
+
"f1_weighted": 0.5503893943718985
|
84 |
+
},
|
85 |
+
{
|
86 |
+
"accuracy": 0.5243781094527363,
|
87 |
+
"f1": 0.53548076310171,
|
88 |
+
"f1_weighted": 0.5202161657740486
|
89 |
+
},
|
90 |
+
{
|
91 |
+
"accuracy": 0.5845771144278606,
|
92 |
+
"f1": 0.5860390775204515,
|
93 |
+
"f1_weighted": 0.5816557708508682
|
94 |
+
},
|
95 |
+
{
|
96 |
+
"accuracy": 0.56318407960199,
|
97 |
+
"f1": 0.5620458637577862,
|
98 |
+
"f1_weighted": 0.5610003876132434
|
99 |
+
},
|
100 |
+
{
|
101 |
+
"accuracy": 0.5194029850746269,
|
102 |
+
"f1": 0.49022032242734,
|
103 |
+
"f1_weighted": 0.5095182789099585
|
104 |
+
},
|
105 |
+
{
|
106 |
+
"accuracy": 0.5293532338308458,
|
107 |
+
"f1": 0.5281256581066325,
|
108 |
+
"f1_weighted": 0.5274701767087412
|
109 |
+
},
|
110 |
+
{
|
111 |
+
"accuracy": 0.5701492537313433,
|
112 |
+
"f1": 0.5715991743619179,
|
113 |
+
"f1_weighted": 0.568654760668206
|
114 |
+
},
|
115 |
+
{
|
116 |
+
"accuracy": 0.4975124378109453,
|
117 |
+
"f1": 0.49755625382895435,
|
118 |
+
"f1_weighted": 0.4930645894802533
|
119 |
+
},
|
120 |
+
{
|
121 |
+
"accuracy": 0.5348258706467661,
|
122 |
+
"f1": 0.5361309106745549,
|
123 |
+
"f1_weighted": 0.5323540081479557
|
124 |
+
},
|
125 |
+
{
|
126 |
+
"accuracy": 0.5606965174129354,
|
127 |
+
"f1": 0.5614893429768765,
|
128 |
+
"f1_weighted": 0.5573122077035422
|
129 |
+
}
|
130 |
+
]
|
131 |
+
}
|
132 |
+
],
|
133 |
+
"validation": [
|
134 |
+
{
|
135 |
+
"accuracy": 0.5039205955334987,
|
136 |
+
"f1": 0.5178403916935721,
|
137 |
+
"f1_weighted": 0.49880297512672456,
|
138 |
+
"hf_subset": "spanish",
|
139 |
+
"languages": [
|
140 |
+
"spa-Latn"
|
141 |
+
],
|
142 |
+
"main_score": 0.5039205955334987,
|
143 |
+
"scores_per_experiment": [
|
144 |
+
{
|
145 |
+
"accuracy": 0.5493796526054591,
|
146 |
+
"f1": 0.5617115583231908,
|
147 |
+
"f1_weighted": 0.5494839083973598
|
148 |
+
},
|
149 |
+
{
|
150 |
+
"accuracy": 0.5617866004962779,
|
151 |
+
"f1": 0.5803364613848666,
|
152 |
+
"f1_weighted": 0.5604497062682285
|
153 |
+
},
|
154 |
+
{
|
155 |
+
"accuracy": 0.43424317617866004,
|
156 |
+
"f1": 0.4357312197997952,
|
157 |
+
"f1_weighted": 0.4208664040377133
|
158 |
+
},
|
159 |
+
{
|
160 |
+
"accuracy": 0.4660049627791563,
|
161 |
+
"f1": 0.4708961922516706,
|
162 |
+
"f1_weighted": 0.45798087475610244
|
163 |
+
},
|
164 |
+
{
|
165 |
+
"accuracy": 0.4674937965260546,
|
166 |
+
"f1": 0.480416090595676,
|
167 |
+
"f1_weighted": 0.4596561284020734
|
168 |
+
},
|
169 |
+
{
|
170 |
+
"accuracy": 0.5230769230769231,
|
171 |
+
"f1": 0.5278684836287595,
|
172 |
+
"f1_weighted": 0.5218945055828049
|
173 |
+
},
|
174 |
+
{
|
175 |
+
"accuracy": 0.5196029776674937,
|
176 |
+
"f1": 0.5301582042020166,
|
177 |
+
"f1_weighted": 0.5046005888076389
|
178 |
+
},
|
179 |
+
{
|
180 |
+
"accuracy": 0.47096774193548385,
|
181 |
+
"f1": 0.4805601330885007,
|
182 |
+
"f1_weighted": 0.46326546615810577
|
183 |
+
},
|
184 |
+
{
|
185 |
+
"accuracy": 0.5558312655086849,
|
186 |
+
"f1": 0.5748648364012071,
|
187 |
+
"f1_weighted": 0.5552838078485394
|
188 |
+
},
|
189 |
+
{
|
190 |
+
"accuracy": 0.49081885856079405,
|
191 |
+
"f1": 0.5358607372600376,
|
192 |
+
"f1_weighted": 0.49454836100867927
|
193 |
+
}
|
194 |
+
]
|
195 |
+
},
|
196 |
+
{
|
197 |
+
"accuracy": 0.5390547263681592,
|
198 |
+
"f1": 0.5369145161859703,
|
199 |
+
"f1_weighted": 0.5368880320299417,
|
200 |
+
"hf_subset": "catalan",
|
201 |
+
"languages": [
|
202 |
+
"cat-Latn"
|
203 |
+
],
|
204 |
+
"main_score": 0.5390547263681592,
|
205 |
+
"scores_per_experiment": [
|
206 |
+
{
|
207 |
+
"accuracy": 0.5432835820895522,
|
208 |
+
"f1": 0.5414547222102367,
|
209 |
+
"f1_weighted": 0.5445585355677282
|
210 |
+
},
|
211 |
+
{
|
212 |
+
"accuracy": 0.5079601990049751,
|
213 |
+
"f1": 0.518856960139363,
|
214 |
+
"f1_weighted": 0.5042390613853022
|
215 |
+
},
|
216 |
+
{
|
217 |
+
"accuracy": 0.5781094527363184,
|
218 |
+
"f1": 0.5795040495897905,
|
219 |
+
"f1_weighted": 0.5752699181478103
|
220 |
+
},
|
221 |
+
{
|
222 |
+
"accuracy": 0.56318407960199,
|
223 |
+
"f1": 0.5628819713603804,
|
224 |
+
"f1_weighted": 0.5619599081939008
|
225 |
+
},
|
226 |
+
{
|
227 |
+
"accuracy": 0.5094527363184079,
|
228 |
+
"f1": 0.4731656405520552,
|
229 |
+
"f1_weighted": 0.502393804766539
|
230 |
+
},
|
231 |
+
{
|
232 |
+
"accuracy": 0.5358208955223881,
|
233 |
+
"f1": 0.5354592688601407,
|
234 |
+
"f1_weighted": 0.5355285976086468
|
235 |
+
},
|
236 |
+
{
|
237 |
+
"accuracy": 0.5577114427860697,
|
238 |
+
"f1": 0.562302951573555,
|
239 |
+
"f1_weighted": 0.5557599050814612
|
240 |
+
},
|
241 |
+
{
|
242 |
+
"accuracy": 0.4880597014925373,
|
243 |
+
"f1": 0.48769316860021567,
|
244 |
+
"f1_weighted": 0.4854664106149461
|
245 |
+
},
|
246 |
+
{
|
247 |
+
"accuracy": 0.5407960199004975,
|
248 |
+
"f1": 0.5413286001650545,
|
249 |
+
"f1_weighted": 0.5400113804552278
|
250 |
+
},
|
251 |
+
{
|
252 |
+
"accuracy": 0.5661691542288557,
|
253 |
+
"f1": 0.5664978288089113,
|
254 |
+
"f1_weighted": 0.5636927984778541
|
255 |
+
}
|
256 |
+
]
|
257 |
+
}
|
258 |
+
]
|
259 |
+
},
|
260 |
+
"task_name": "CataloniaTweetClassification"
|
261 |
+
}
|
results/Alibaba-NLP__gte-Qwen2-7B-instruct/e26182b2122f4435e8b3ebecbf363990f409b45b/CovidRetrieval.json
ADDED
@@ -0,0 +1,158 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"dataset_revision": "1271c7809071a13532e05f25fb53511ffce77117",
|
3 |
+
"evaluation_time": 9220.718297481537,
|
4 |
+
"kg_co2_emissions": 0.9002823502535313,
|
5 |
+
"mteb_version": "1.18.0",
|
6 |
+
"scores": {
|
7 |
+
"dev": [
|
8 |
+
{
|
9 |
+
"hf_subset": "default",
|
10 |
+
"languages": [
|
11 |
+
"cmn-Hans"
|
12 |
+
],
|
13 |
+
"main_score": 0.81041,
|
14 |
+
"map_at_1": 0.6794,
|
15 |
+
"map_at_10": 0.77098,
|
16 |
+
"map_at_100": 0.77437,
|
17 |
+
"map_at_1000": 0.77442,
|
18 |
+
"map_at_20": 0.77367,
|
19 |
+
"map_at_3": 0.75544,
|
20 |
+
"map_at_5": 0.76372,
|
21 |
+
"mrr_at_1": 0.6817702845100105,
|
22 |
+
"mrr_at_10": 0.7713579038921506,
|
23 |
+
"mrr_at_100": 0.7747284269817565,
|
24 |
+
"mrr_at_1000": 0.7747827028672725,
|
25 |
+
"mrr_at_20": 0.7740338353418523,
|
26 |
+
"mrr_at_3": 0.756761503336846,
|
27 |
+
"mrr_at_5": 0.7640850017562348,
|
28 |
+
"nauc_map_at_1000_diff1": 0.8077793378139707,
|
29 |
+
"nauc_map_at_1000_max": 0.2518422897225706,
|
30 |
+
"nauc_map_at_1000_std": -0.6451888818806953,
|
31 |
+
"nauc_map_at_100_diff1": 0.8077981977801981,
|
32 |
+
"nauc_map_at_100_max": 0.2519730586057512,
|
33 |
+
"nauc_map_at_100_std": -0.6451295193454591,
|
34 |
+
"nauc_map_at_10_diff1": 0.8070722079344874,
|
35 |
+
"nauc_map_at_10_max": 0.2543243736251492,
|
36 |
+
"nauc_map_at_10_std": -0.6470753793686925,
|
37 |
+
"nauc_map_at_1_diff1": 0.8322503617406967,
|
38 |
+
"nauc_map_at_1_max": 0.24524635252015944,
|
39 |
+
"nauc_map_at_1_std": -0.600790317841228,
|
40 |
+
"nauc_map_at_20_diff1": 0.8075884406537043,
|
41 |
+
"nauc_map_at_20_max": 0.25359035455768425,
|
42 |
+
"nauc_map_at_20_std": -0.644581249792196,
|
43 |
+
"nauc_map_at_3_diff1": 0.8032396300439174,
|
44 |
+
"nauc_map_at_3_max": 0.2310239208992507,
|
45 |
+
"nauc_map_at_3_std": -0.6719311843144181,
|
46 |
+
"nauc_map_at_5_diff1": 0.8058296119140151,
|
47 |
+
"nauc_map_at_5_max": 0.24486467123801967,
|
48 |
+
"nauc_map_at_5_std": -0.6551413713266586,
|
49 |
+
"nauc_mrr_at_1000_diff1": 0.807254511503279,
|
50 |
+
"nauc_mrr_at_1000_max": 0.25239969237393384,
|
51 |
+
"nauc_mrr_at_1000_std": -0.643769341005867,
|
52 |
+
"nauc_mrr_at_100_diff1": 0.8072735277083138,
|
53 |
+
"nauc_mrr_at_100_max": 0.2525303928953166,
|
54 |
+
"nauc_mrr_at_100_std": -0.6437103296446364,
|
55 |
+
"nauc_mrr_at_10_diff1": 0.8065553804196399,
|
56 |
+
"nauc_mrr_at_10_max": 0.25504692857897837,
|
57 |
+
"nauc_mrr_at_10_std": -0.64559564271469,
|
58 |
+
"nauc_mrr_at_1_diff1": 0.8292225050059281,
|
59 |
+
"nauc_mrr_at_1_max": 0.2476572313812656,
|
60 |
+
"nauc_mrr_at_1_std": -0.5972797695074612,
|
61 |
+
"nauc_mrr_at_20_diff1": 0.8070654716184968,
|
62 |
+
"nauc_mrr_at_20_max": 0.2541467810504827,
|
63 |
+
"nauc_mrr_at_20_std": -0.6431666735761755,
|
64 |
+
"nauc_mrr_at_3_diff1": 0.802326427028669,
|
65 |
+
"nauc_mrr_at_3_max": 0.23422022997600034,
|
66 |
+
"nauc_mrr_at_3_std": -0.6663413242635284,
|
67 |
+
"nauc_mrr_at_5_diff1": 0.8049516399339663,
|
68 |
+
"nauc_mrr_at_5_max": 0.24643201881558485,
|
69 |
+
"nauc_mrr_at_5_std": -0.6530465010520141,
|
70 |
+
"nauc_ndcg_at_1000_diff1": 0.8055961393543296,
|
71 |
+
"nauc_ndcg_at_1000_max": 0.2598418518898085,
|
72 |
+
"nauc_ndcg_at_1000_std": -0.644069811944435,
|
73 |
+
"nauc_ndcg_at_100_diff1": 0.8060059885398868,
|
74 |
+
"nauc_ndcg_at_100_max": 0.26449982643820763,
|
75 |
+
"nauc_ndcg_at_100_std": -0.6404492473956309,
|
76 |
+
"nauc_ndcg_at_10_diff1": 0.8033528773416053,
|
77 |
+
"nauc_ndcg_at_10_max": 0.2777779416578495,
|
78 |
+
"nauc_ndcg_at_10_std": -0.6471557595547763,
|
79 |
+
"nauc_ndcg_at_1_diff1": 0.8292225050059281,
|
80 |
+
"nauc_ndcg_at_1_max": 0.25016967111306315,
|
81 |
+
"nauc_ndcg_at_1_std": -0.5937852558657312,
|
82 |
+
"nauc_ndcg_at_20_diff1": 0.8047835090282371,
|
83 |
+
"nauc_ndcg_at_20_max": 0.275819414315875,
|
84 |
+
"nauc_ndcg_at_20_std": -0.635998670117626,
|
85 |
+
"nauc_ndcg_at_3_diff1": 0.7934999118728646,
|
86 |
+
"nauc_ndcg_at_3_max": 0.2246197155872113,
|
87 |
+
"nauc_ndcg_at_3_std": -0.6989044264539914,
|
88 |
+
"nauc_ndcg_at_5_diff1": 0.798670474345462,
|
89 |
+
"nauc_ndcg_at_5_max": 0.2515201208243655,
|
90 |
+
"nauc_ndcg_at_5_std": -0.668149962050245,
|
91 |
+
"nauc_precision_at_1000_diff1": -0.48418942168434587,
|
92 |
+
"nauc_precision_at_1000_max": 0.29923797846504135,
|
93 |
+
"nauc_precision_at_1000_std": 0.7006713042504178,
|
94 |
+
"nauc_precision_at_100_diff1": 0.0015975840227741022,
|
95 |
+
"nauc_precision_at_100_max": 0.48315927419707977,
|
96 |
+
"nauc_precision_at_100_std": 0.3762677365022458,
|
97 |
+
"nauc_precision_at_10_diff1": 0.5411660354799567,
|
98 |
+
"nauc_precision_at_10_max": 0.45670829102755406,
|
99 |
+
"nauc_precision_at_10_std": -0.3705709984217286,
|
100 |
+
"nauc_precision_at_1_diff1": 0.8292225050059281,
|
101 |
+
"nauc_precision_at_1_max": 0.25016967111306315,
|
102 |
+
"nauc_precision_at_1_std": -0.5937852558657312,
|
103 |
+
"nauc_precision_at_20_diff1": 0.3332198193807638,
|
104 |
+
"nauc_precision_at_20_max": 0.5614630225955689,
|
105 |
+
"nauc_precision_at_20_std": 0.007138722965701842,
|
106 |
+
"nauc_precision_at_3_diff1": 0.7113080645862878,
|
107 |
+
"nauc_precision_at_3_max": 0.19452265288005086,
|
108 |
+
"nauc_precision_at_3_std": -0.7678910031377946,
|
109 |
+
"nauc_precision_at_5_diff1": 0.6602115894810012,
|
110 |
+
"nauc_precision_at_5_max": 0.2952010919245439,
|
111 |
+
"nauc_precision_at_5_std": -0.6038306891080962,
|
112 |
+
"nauc_recall_at_1000_diff1": NaN,
|
113 |
+
"nauc_recall_at_1000_max": NaN,
|
114 |
+
"nauc_recall_at_1000_std": NaN,
|
115 |
+
"nauc_recall_at_100_diff1": 0.8459060976786074,
|
116 |
+
"nauc_recall_at_100_max": 0.8028185363939473,
|
117 |
+
"nauc_recall_at_100_std": -0.1875528080828089,
|
118 |
+
"nauc_recall_at_10_diff1": 0.7874903342137993,
|
119 |
+
"nauc_recall_at_10_max": 0.5043006843319862,
|
120 |
+
"nauc_recall_at_10_std": -0.620360914519616,
|
121 |
+
"nauc_recall_at_1_diff1": 0.8322503617406967,
|
122 |
+
"nauc_recall_at_1_max": 0.24524635252015944,
|
123 |
+
"nauc_recall_at_1_std": -0.600790317841228,
|
124 |
+
"nauc_recall_at_20_diff1": 0.7915021618040823,
|
125 |
+
"nauc_recall_at_20_max": 0.708480087523294,
|
126 |
+
"nauc_recall_at_20_std": -0.3816918895729005,
|
127 |
+
"nauc_recall_at_3_diff1": 0.7550117078323749,
|
128 |
+
"nauc_recall_at_3_max": 0.19026580994918008,
|
129 |
+
"nauc_recall_at_3_std": -0.8198866342939074,
|
130 |
+
"nauc_recall_at_5_diff1": 0.7658612571436897,
|
131 |
+
"nauc_recall_at_5_max": 0.28380362434478157,
|
132 |
+
"nauc_recall_at_5_std": -0.7362055331154301,
|
133 |
+
"ndcg_at_1": 0.68177,
|
134 |
+
"ndcg_at_10": 0.81041,
|
135 |
+
"ndcg_at_100": 0.82411,
|
136 |
+
"ndcg_at_1000": 0.82548,
|
137 |
+
"ndcg_at_20": 0.81975,
|
138 |
+
"ndcg_at_3": 0.77891,
|
139 |
+
"ndcg_at_5": 0.79373,
|
140 |
+
"precision_at_1": 0.68177,
|
141 |
+
"precision_at_10": 0.0941,
|
142 |
+
"precision_at_100": 0.01,
|
143 |
+
"precision_at_1000": 0.00101,
|
144 |
+
"precision_at_20": 0.04889,
|
145 |
+
"precision_at_3": 0.28311,
|
146 |
+
"precision_at_5": 0.17766,
|
147 |
+
"recall_at_1": 0.6794,
|
148 |
+
"recall_at_10": 0.93151,
|
149 |
+
"recall_at_100": 0.98946,
|
150 |
+
"recall_at_1000": 1.0,
|
151 |
+
"recall_at_20": 0.96733,
|
152 |
+
"recall_at_3": 0.84536,
|
153 |
+
"recall_at_5": 0.88145
|
154 |
+
}
|
155 |
+
]
|
156 |
+
},
|
157 |
+
"task_name": "CovidRetrieval"
|
158 |
+
}
|
results/Alibaba-NLP__gte-Qwen2-7B-instruct/e26182b2122f4435e8b3ebecbf363990f409b45b/CyrillicTurkicLangClassification.json
ADDED
@@ -0,0 +1,81 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"dataset_revision": "e42d330f33d65b7b72dfd408883daf1661f06f18",
|
3 |
+
"evaluation_time": 99.5006628036499,
|
4 |
+
"kg_co2_emissions": 0.0089220831691798,
|
5 |
+
"mteb_version": "1.18.0",
|
6 |
+
"scores": {
|
7 |
+
"test": [
|
8 |
+
{
|
9 |
+
"accuracy": 0.384033203125,
|
10 |
+
"f1": 0.36242713397186427,
|
11 |
+
"f1_weighted": 0.36245176911147386,
|
12 |
+
"hf_subset": "default",
|
13 |
+
"languages": [
|
14 |
+
"bak-Cyrl",
|
15 |
+
"chv-Cyrl",
|
16 |
+
"tat-Cyrl",
|
17 |
+
"kir-Cyrl",
|
18 |
+
"rus-Cyrl",
|
19 |
+
"kaz-Cyrl",
|
20 |
+
"tyv-Cyrl",
|
21 |
+
"krc-Cyrl",
|
22 |
+
"sah-Cyrl"
|
23 |
+
],
|
24 |
+
"main_score": 0.384033203125,
|
25 |
+
"scores_per_experiment": [
|
26 |
+
{
|
27 |
+
"accuracy": 0.388671875,
|
28 |
+
"f1": 0.36361080314316724,
|
29 |
+
"f1_weighted": 0.36365705396027237
|
30 |
+
},
|
31 |
+
{
|
32 |
+
"accuracy": 0.40478515625,
|
33 |
+
"f1": 0.38210189491486096,
|
34 |
+
"f1_weighted": 0.38207074353660775
|
35 |
+
},
|
36 |
+
{
|
37 |
+
"accuracy": 0.37109375,
|
38 |
+
"f1": 0.36186029668514275,
|
39 |
+
"f1_weighted": 0.3618539453965909
|
40 |
+
},
|
41 |
+
{
|
42 |
+
"accuracy": 0.4228515625,
|
43 |
+
"f1": 0.40849752727890476,
|
44 |
+
"f1_weighted": 0.4085074687051191
|
45 |
+
},
|
46 |
+
{
|
47 |
+
"accuracy": 0.3828125,
|
48 |
+
"f1": 0.3590763359601999,
|
49 |
+
"f1_weighted": 0.35908833000792717
|
50 |
+
},
|
51 |
+
{
|
52 |
+
"accuracy": 0.35009765625,
|
53 |
+
"f1": 0.32559083528651045,
|
54 |
+
"f1_weighted": 0.3255561584814362
|
55 |
+
},
|
56 |
+
{
|
57 |
+
"accuracy": 0.373046875,
|
58 |
+
"f1": 0.35112064354887124,
|
59 |
+
"f1_weighted": 0.35114753297436463
|
60 |
+
},
|
61 |
+
{
|
62 |
+
"accuracy": 0.37158203125,
|
63 |
+
"f1": 0.34247661984934713,
|
64 |
+
"f1_weighted": 0.34256507908886225
|
65 |
+
},
|
66 |
+
{
|
67 |
+
"accuracy": 0.3779296875,
|
68 |
+
"f1": 0.3545300957957717,
|
69 |
+
"f1_weighted": 0.3546088789585852
|
70 |
+
},
|
71 |
+
{
|
72 |
+
"accuracy": 0.3974609375,
|
73 |
+
"f1": 0.3754062872558664,
|
74 |
+
"f1_weighted": 0.3754625000049726
|
75 |
+
}
|
76 |
+
]
|
77 |
+
}
|
78 |
+
]
|
79 |
+
},
|
80 |
+
"task_name": "CyrillicTurkicLangClassification"
|
81 |
+
}
|
results/Alibaba-NLP__gte-Qwen2-7B-instruct/e26182b2122f4435e8b3ebecbf363990f409b45b/CzechProductReviewSentimentClassification.json
ADDED
@@ -0,0 +1,73 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"dataset_revision": "2e6fedf42c9c104e83dfd95c3a453721e683e244",
|
3 |
+
"evaluation_time": 134.51006937026978,
|
4 |
+
"kg_co2_emissions": 0.012521920813826444,
|
5 |
+
"mteb_version": "1.18.0",
|
6 |
+
"scores": {
|
7 |
+
"test": [
|
8 |
+
{
|
9 |
+
"accuracy": 0.55,
|
10 |
+
"f1": 0.5417289119972084,
|
11 |
+
"f1_weighted": 0.5416713146034206,
|
12 |
+
"hf_subset": "default",
|
13 |
+
"languages": [
|
14 |
+
"ces-Latn"
|
15 |
+
],
|
16 |
+
"main_score": 0.55,
|
17 |
+
"scores_per_experiment": [
|
18 |
+
{
|
19 |
+
"accuracy": 0.5888671875,
|
20 |
+
"f1": 0.5720561143973296,
|
21 |
+
"f1_weighted": 0.5719999120752846
|
22 |
+
},
|
23 |
+
{
|
24 |
+
"accuracy": 0.54931640625,
|
25 |
+
"f1": 0.5402633565137679,
|
26 |
+
"f1_weighted": 0.5402016361474823
|
27 |
+
},
|
28 |
+
{
|
29 |
+
"accuracy": 0.56005859375,
|
30 |
+
"f1": 0.5370446452135039,
|
31 |
+
"f1_weighted": 0.5369670838044487
|
32 |
+
},
|
33 |
+
{
|
34 |
+
"accuracy": 0.53662109375,
|
35 |
+
"f1": 0.5307156051569423,
|
36 |
+
"f1_weighted": 0.5306588186549194
|
37 |
+
},
|
38 |
+
{
|
39 |
+
"accuracy": 0.5712890625,
|
40 |
+
"f1": 0.5725048334599578,
|
41 |
+
"f1_weighted": 0.5724690705288994
|
42 |
+
},
|
43 |
+
{
|
44 |
+
"accuracy": 0.5009765625,
|
45 |
+
"f1": 0.48836052193038954,
|
46 |
+
"f1_weighted": 0.48829226337153486
|
47 |
+
},
|
48 |
+
{
|
49 |
+
"accuracy": 0.5751953125,
|
50 |
+
"f1": 0.5704037191320869,
|
51 |
+
"f1_weighted": 0.5703350143508471
|
52 |
+
},
|
53 |
+
{
|
54 |
+
"accuracy": 0.5078125,
|
55 |
+
"f1": 0.5046435498428803,
|
56 |
+
"f1_weighted": 0.5046209454993886
|
57 |
+
},
|
58 |
+
{
|
59 |
+
"accuracy": 0.5341796875,
|
60 |
+
"f1": 0.5335409083754487,
|
61 |
+
"f1_weighted": 0.5334853265779997
|
62 |
+
},
|
63 |
+
{
|
64 |
+
"accuracy": 0.57568359375,
|
65 |
+
"f1": 0.5677558659497771,
|
66 |
+
"f1_weighted": 0.5676830750234009
|
67 |
+
}
|
68 |
+
]
|
69 |
+
}
|
70 |
+
]
|
71 |
+
},
|
72 |
+
"task_name": "CzechProductReviewSentimentClassification"
|
73 |
+
}
|
results/Alibaba-NLP__gte-Qwen2-7B-instruct/e26182b2122f4435e8b3ebecbf363990f409b45b/DBpediaClassification.json
ADDED
@@ -0,0 +1,73 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"dataset_revision": "9abd46cf7fc8b4c64290f26993c540b92aa145ac",
|
3 |
+
"evaluation_time": 133.2152075767517,
|
4 |
+
"kg_co2_emissions": 0.012255297516817688,
|
5 |
+
"mteb_version": "1.18.0",
|
6 |
+
"scores": {
|
7 |
+
"test": [
|
8 |
+
{
|
9 |
+
"accuracy": 0.954638671875,
|
10 |
+
"f1": 0.9538295598950448,
|
11 |
+
"f1_weighted": 0.9538232483392111,
|
12 |
+
"hf_subset": "default",
|
13 |
+
"languages": [
|
14 |
+
"eng-Latn"
|
15 |
+
],
|
16 |
+
"main_score": 0.954638671875,
|
17 |
+
"scores_per_experiment": [
|
18 |
+
{
|
19 |
+
"accuracy": 0.96728515625,
|
20 |
+
"f1": 0.9667296822049306,
|
21 |
+
"f1_weighted": 0.9667369507075539
|
22 |
+
},
|
23 |
+
{
|
24 |
+
"accuracy": 0.9541015625,
|
25 |
+
"f1": 0.9529956793213235,
|
26 |
+
"f1_weighted": 0.9529839182137531
|
27 |
+
},
|
28 |
+
{
|
29 |
+
"accuracy": 0.95263671875,
|
30 |
+
"f1": 0.9517660083166886,
|
31 |
+
"f1_weighted": 0.9517720506382368
|
32 |
+
},
|
33 |
+
{
|
34 |
+
"accuracy": 0.94921875,
|
35 |
+
"f1": 0.9482150982664186,
|
36 |
+
"f1_weighted": 0.9481981591459887
|
37 |
+
},
|
38 |
+
{
|
39 |
+
"accuracy": 0.958984375,
|
40 |
+
"f1": 0.9584479451555407,
|
41 |
+
"f1_weighted": 0.9584581383217922
|
42 |
+
},
|
43 |
+
{
|
44 |
+
"accuracy": 0.955078125,
|
45 |
+
"f1": 0.954244879229141,
|
46 |
+
"f1_weighted": 0.9542247734683502
|
47 |
+
},
|
48 |
+
{
|
49 |
+
"accuracy": 0.95556640625,
|
50 |
+
"f1": 0.9546737596717101,
|
51 |
+
"f1_weighted": 0.9546671057481556
|
52 |
+
},
|
53 |
+
{
|
54 |
+
"accuracy": 0.9443359375,
|
55 |
+
"f1": 0.9437497567119715,
|
56 |
+
"f1_weighted": 0.9437646287882024
|
57 |
+
},
|
58 |
+
{
|
59 |
+
"accuracy": 0.95849609375,
|
60 |
+
"f1": 0.9579701347215164,
|
61 |
+
"f1_weighted": 0.9579476190421918
|
62 |
+
},
|
63 |
+
{
|
64 |
+
"accuracy": 0.95068359375,
|
65 |
+
"f1": 0.9495026553512064,
|
66 |
+
"f1_weighted": 0.9494791393178845
|
67 |
+
}
|
68 |
+
]
|
69 |
+
}
|
70 |
+
]
|
71 |
+
},
|
72 |
+
"task_name": "DBpediaClassification"
|
73 |
+
}
|
results/Alibaba-NLP__gte-Qwen2-7B-instruct/e26182b2122f4435e8b3ebecbf363990f409b45b/DalajClassification.json
ADDED
@@ -0,0 +1,95 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"dataset_revision": "7ebf0b4caa7b2ae39698a889de782c09e6f5ee56",
|
3 |
+
"evaluation_time": 37.661991119384766,
|
4 |
+
"kg_co2_emissions": 0.0031006173030195786,
|
5 |
+
"mteb_version": "1.18.0",
|
6 |
+
"scores": {
|
7 |
+
"test": [
|
8 |
+
{
|
9 |
+
"accuracy": 0.5352477477477479,
|
10 |
+
"ap": 0.5206473601882469,
|
11 |
+
"ap_weighted": 0.5206473601882469,
|
12 |
+
"f1": 0.5256406729207511,
|
13 |
+
"f1_weighted": 0.5256406729207511,
|
14 |
+
"hf_subset": "default",
|
15 |
+
"languages": [
|
16 |
+
"swe-Latn"
|
17 |
+
],
|
18 |
+
"main_score": 0.5352477477477479,
|
19 |
+
"scores_per_experiment": [
|
20 |
+
{
|
21 |
+
"accuracy": 0.5630630630630631,
|
22 |
+
"ap": 0.5367556906018445,
|
23 |
+
"ap_weighted": 0.5367556906018445,
|
24 |
+
"f1": 0.5567471324108333,
|
25 |
+
"f1_weighted": 0.5567471324108333
|
26 |
+
},
|
27 |
+
{
|
28 |
+
"accuracy": 0.4560810810810811,
|
29 |
+
"ap": 0.4795780969479354,
|
30 |
+
"ap_weighted": 0.4795780969479354,
|
31 |
+
"f1": 0.44712835551545227,
|
32 |
+
"f1_weighted": 0.44712835551545227
|
33 |
+
},
|
34 |
+
{
|
35 |
+
"accuracy": 0.5427927927927928,
|
36 |
+
"ap": 0.5243422770596683,
|
37 |
+
"ap_weighted": 0.5243422770596683,
|
38 |
+
"f1": 0.525820707070707,
|
39 |
+
"f1_weighted": 0.525820707070707
|
40 |
+
},
|
41 |
+
{
|
42 |
+
"accuracy": 0.5495495495495496,
|
43 |
+
"ap": 0.527705124479318,
|
44 |
+
"ap_weighted": 0.527705124479318,
|
45 |
+
"f1": 0.5465686274509804,
|
46 |
+
"f1_weighted": 0.5465686274509804
|
47 |
+
},
|
48 |
+
{
|
49 |
+
"accuracy": 0.5518018018018018,
|
50 |
+
"ap": 0.5284144904398069,
|
51 |
+
"ap_weighted": 0.5284144904398069,
|
52 |
+
"f1": 0.5512896689367277,
|
53 |
+
"f1_weighted": 0.5512896689367277
|
54 |
+
},
|
55 |
+
{
|
56 |
+
"accuracy": 0.5563063063063063,
|
57 |
+
"ap": 0.5328768365681118,
|
58 |
+
"ap_weighted": 0.5328768365681118,
|
59 |
+
"f1": 0.5439791039951618,
|
60 |
+
"f1_weighted": 0.5439791039951618
|
61 |
+
},
|
62 |
+
{
|
63 |
+
"accuracy": 0.5506756756756757,
|
64 |
+
"ap": 0.5309545999201172,
|
65 |
+
"ap_weighted": 0.5309545999201172,
|
66 |
+
"f1": 0.5149487379922049,
|
67 |
+
"f1_weighted": 0.514948737992205
|
68 |
+
},
|
69 |
+
{
|
70 |
+
"accuracy": 0.5495495495495496,
|
71 |
+
"ap": 0.5281392503614726,
|
72 |
+
"ap_weighted": 0.5281392503614726,
|
73 |
+
"f1": 0.5411706349206349,
|
74 |
+
"f1_weighted": 0.5411706349206349
|
75 |
+
},
|
76 |
+
{
|
77 |
+
"accuracy": 0.5337837837837838,
|
78 |
+
"ap": 0.5182689482961222,
|
79 |
+
"ap_weighted": 0.5182689482961222,
|
80 |
+
"f1": 0.5303436011691842,
|
81 |
+
"f1_weighted": 0.5303436011691842
|
82 |
+
},
|
83 |
+
{
|
84 |
+
"accuracy": 0.4988738738738739,
|
85 |
+
"ap": 0.4994382872080714,
|
86 |
+
"ap_weighted": 0.4994382872080714,
|
87 |
+
"f1": 0.4984101597456256,
|
88 |
+
"f1_weighted": 0.49841015974562564
|
89 |
+
}
|
90 |
+
]
|
91 |
+
}
|
92 |
+
]
|
93 |
+
},
|
94 |
+
"task_name": "DalajClassification"
|
95 |
+
}
|
results/Alibaba-NLP__gte-Qwen2-7B-instruct/e26182b2122f4435e8b3ebecbf363990f409b45b/DiaBlaBitextMining.json
ADDED
@@ -0,0 +1,35 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"dataset_revision": "5345895c56a601afe1a98519ce3199be60a27dba",
|
3 |
+
"evaluation_time": 323.1442668437958,
|
4 |
+
"kg_co2_emissions": 0.028681875459993,
|
5 |
+
"mteb_version": "1.18.0",
|
6 |
+
"scores": {
|
7 |
+
"test": [
|
8 |
+
{
|
9 |
+
"accuracy": 0.8742171189979123,
|
10 |
+
"f1": 0.8514633147722919,
|
11 |
+
"hf_subset": "fr-en",
|
12 |
+
"languages": [
|
13 |
+
"fra-Latn",
|
14 |
+
"eng-Latn"
|
15 |
+
],
|
16 |
+
"main_score": 0.8514633147722919,
|
17 |
+
"precision": 0.8422302920345718,
|
18 |
+
"recall": 0.8742171189979123
|
19 |
+
},
|
20 |
+
{
|
21 |
+
"accuracy": 0.8742171189979123,
|
22 |
+
"f1": 0.8514633147722919,
|
23 |
+
"hf_subset": "en-fr",
|
24 |
+
"languages": [
|
25 |
+
"eng-Latn",
|
26 |
+
"fra-Latn"
|
27 |
+
],
|
28 |
+
"main_score": 0.8514633147722919,
|
29 |
+
"precision": 0.8422302920345718,
|
30 |
+
"recall": 0.8742171189979123
|
31 |
+
}
|
32 |
+
]
|
33 |
+
},
|
34 |
+
"task_name": "DiaBlaBitextMining"
|
35 |
+
}
|
results/Alibaba-NLP__gte-Qwen2-7B-instruct/e26182b2122f4435e8b3ebecbf363990f409b45b/EstonianValenceClassification.json
ADDED
@@ -0,0 +1,73 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"dataset_revision": "9157397f05a127b3ac93b93dd88abf1bdf710c22",
|
3 |
+
"evaluation_time": 89.2679431438446,
|
4 |
+
"kg_co2_emissions": 0.008138506389326146,
|
5 |
+
"mteb_version": "1.18.0",
|
6 |
+
"scores": {
|
7 |
+
"test": [
|
8 |
+
{
|
9 |
+
"accuracy": 0.426161369193154,
|
10 |
+
"f1": 0.3784671240487333,
|
11 |
+
"f1_weighted": 0.4433601044459242,
|
12 |
+
"hf_subset": "default",
|
13 |
+
"languages": [
|
14 |
+
"est-Latn"
|
15 |
+
],
|
16 |
+
"main_score": 0.426161369193154,
|
17 |
+
"scores_per_experiment": [
|
18 |
+
{
|
19 |
+
"accuracy": 0.3740831295843521,
|
20 |
+
"f1": 0.3393453124767558,
|
21 |
+
"f1_weighted": 0.39799061648965867
|
22 |
+
},
|
23 |
+
{
|
24 |
+
"accuracy": 0.47555012224938875,
|
25 |
+
"f1": 0.4080814548832974,
|
26 |
+
"f1_weighted": 0.48796237011320714
|
27 |
+
},
|
28 |
+
{
|
29 |
+
"accuracy": 0.38753056234718825,
|
30 |
+
"f1": 0.3449988233157741,
|
31 |
+
"f1_weighted": 0.4071791582601872
|
32 |
+
},
|
33 |
+
{
|
34 |
+
"accuracy": 0.3924205378973105,
|
35 |
+
"f1": 0.34395426585147715,
|
36 |
+
"f1_weighted": 0.40816300920336307
|
37 |
+
},
|
38 |
+
{
|
39 |
+
"accuracy": 0.4229828850855746,
|
40 |
+
"f1": 0.3744657249094726,
|
41 |
+
"f1_weighted": 0.44546970102161837
|
42 |
+
},
|
43 |
+
{
|
44 |
+
"accuracy": 0.4058679706601467,
|
45 |
+
"f1": 0.39049279159487865,
|
46 |
+
"f1_weighted": 0.42259193765905884
|
47 |
+
},
|
48 |
+
{
|
49 |
+
"accuracy": 0.44254278728606355,
|
50 |
+
"f1": 0.38434313421111016,
|
51 |
+
"f1_weighted": 0.46206817408306394
|
52 |
+
},
|
53 |
+
{
|
54 |
+
"accuracy": 0.4669926650366748,
|
55 |
+
"f1": 0.39912269229583663,
|
56 |
+
"f1_weighted": 0.47476998931332576
|
57 |
+
},
|
58 |
+
{
|
59 |
+
"accuracy": 0.4315403422982885,
|
60 |
+
"f1": 0.3962260264584607,
|
61 |
+
"f1_weighted": 0.4551426073940001
|
62 |
+
},
|
63 |
+
{
|
64 |
+
"accuracy": 0.4621026894865526,
|
65 |
+
"f1": 0.40364101449026923,
|
66 |
+
"f1_weighted": 0.47226348092175907
|
67 |
+
}
|
68 |
+
]
|
69 |
+
}
|
70 |
+
]
|
71 |
+
},
|
72 |
+
"task_name": "EstonianValenceClassification"
|
73 |
+
}
|
results/Alibaba-NLP__gte-Qwen2-7B-instruct/e26182b2122f4435e8b3ebecbf363990f409b45b/FaroeseSTS.json
ADDED
@@ -0,0 +1,26 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"dataset_revision": "8cb36efa69428b3dc290e1125995a999963163c5",
|
3 |
+
"evaluation_time": 24.739478826522827,
|
4 |
+
"kg_co2_emissions": 0.002296686574845424,
|
5 |
+
"mteb_version": "1.18.0",
|
6 |
+
"scores": {
|
7 |
+
"train": [
|
8 |
+
{
|
9 |
+
"cosine_pearson": 0.7261547567131743,
|
10 |
+
"cosine_spearman": 0.7247030523460988,
|
11 |
+
"euclidean_pearson": 0.7192279416000671,
|
12 |
+
"euclidean_spearman": 0.7247030579578652,
|
13 |
+
"hf_subset": "default",
|
14 |
+
"languages": [
|
15 |
+
"fao-Latn"
|
16 |
+
],
|
17 |
+
"main_score": 0.7247030523460988,
|
18 |
+
"manhattan_pearson": 0.7291975521816488,
|
19 |
+
"manhattan_spearman": 0.736299385947159,
|
20 |
+
"pearson": 0.7261547567131743,
|
21 |
+
"spearman": 0.7247030523460988
|
22 |
+
}
|
23 |
+
]
|
24 |
+
},
|
25 |
+
"task_name": "FaroeseSTS"
|
26 |
+
}
|
results/Alibaba-NLP__gte-Qwen2-7B-instruct/e26182b2122f4435e8b3ebecbf363990f409b45b/FilipinoShopeeReviewsClassification.json
ADDED
@@ -0,0 +1,137 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"dataset_revision": "d096f402fdc76886458c0cfb5dedc829bea2b935",
|
3 |
+
"evaluation_time": 158.7307903766632,
|
4 |
+
"kg_co2_emissions": 0.014064496452949604,
|
5 |
+
"mteb_version": "1.18.0",
|
6 |
+
"scores": {
|
7 |
+
"test": [
|
8 |
+
{
|
9 |
+
"accuracy": 0.386279296875,
|
10 |
+
"f1": 0.3659579620828562,
|
11 |
+
"f1_weighted": 0.3659591194401009,
|
12 |
+
"hf_subset": "default",
|
13 |
+
"languages": [
|
14 |
+
"fil-Latn"
|
15 |
+
],
|
16 |
+
"main_score": 0.386279296875,
|
17 |
+
"scores_per_experiment": [
|
18 |
+
{
|
19 |
+
"accuracy": 0.3994140625,
|
20 |
+
"f1": 0.37993560946391136,
|
21 |
+
"f1_weighted": 0.3799146589957229
|
22 |
+
},
|
23 |
+
{
|
24 |
+
"accuracy": 0.3984375,
|
25 |
+
"f1": 0.3654706231199803,
|
26 |
+
"f1_weighted": 0.36549225183583356
|
27 |
+
},
|
28 |
+
{
|
29 |
+
"accuracy": 0.37744140625,
|
30 |
+
"f1": 0.34991155680030983,
|
31 |
+
"f1_weighted": 0.34992571145551216
|
32 |
+
},
|
33 |
+
{
|
34 |
+
"accuracy": 0.39599609375,
|
35 |
+
"f1": 0.3833277612931965,
|
36 |
+
"f1_weighted": 0.38329917794063384
|
37 |
+
},
|
38 |
+
{
|
39 |
+
"accuracy": 0.38037109375,
|
40 |
+
"f1": 0.3375058137144582,
|
41 |
+
"f1_weighted": 0.33748644335245814
|
42 |
+
},
|
43 |
+
{
|
44 |
+
"accuracy": 0.38818359375,
|
45 |
+
"f1": 0.35346782609037397,
|
46 |
+
"f1_weighted": 0.353422544485466
|
47 |
+
},
|
48 |
+
{
|
49 |
+
"accuracy": 0.4296875,
|
50 |
+
"f1": 0.40843045888290624,
|
51 |
+
"f1_weighted": 0.40840821882878375
|
52 |
+
},
|
53 |
+
{
|
54 |
+
"accuracy": 0.37548828125,
|
55 |
+
"f1": 0.3706605273457944,
|
56 |
+
"f1_weighted": 0.3706769246070689
|
57 |
+
},
|
58 |
+
{
|
59 |
+
"accuracy": 0.390625,
|
60 |
+
"f1": 0.3901959439419795,
|
61 |
+
"f1_weighted": 0.3902392968704814
|
62 |
+
},
|
63 |
+
{
|
64 |
+
"accuracy": 0.3271484375,
|
65 |
+
"f1": 0.3206735001756513,
|
66 |
+
"f1_weighted": 0.320725966029048
|
67 |
+
}
|
68 |
+
]
|
69 |
+
}
|
70 |
+
],
|
71 |
+
"validation": [
|
72 |
+
{
|
73 |
+
"accuracy": 0.386083984375,
|
74 |
+
"f1": 0.36491845787714405,
|
75 |
+
"f1_weighted": 0.36491763360979307,
|
76 |
+
"hf_subset": "default",
|
77 |
+
"languages": [
|
78 |
+
"fil-Latn"
|
79 |
+
],
|
80 |
+
"main_score": 0.386083984375,
|
81 |
+
"scores_per_experiment": [
|
82 |
+
{
|
83 |
+
"accuracy": 0.41162109375,
|
84 |
+
"f1": 0.39470372509180346,
|
85 |
+
"f1_weighted": 0.39466558315293887
|
86 |
+
},
|
87 |
+
{
|
88 |
+
"accuracy": 0.37841796875,
|
89 |
+
"f1": 0.34145713357823604,
|
90 |
+
"f1_weighted": 0.3414980439229637
|
91 |
+
},
|
92 |
+
{
|
93 |
+
"accuracy": 0.37841796875,
|
94 |
+
"f1": 0.3500778112300642,
|
95 |
+
"f1_weighted": 0.3500970762883929
|
96 |
+
},
|
97 |
+
{
|
98 |
+
"accuracy": 0.39306640625,
|
99 |
+
"f1": 0.378891649251826,
|
100 |
+
"f1_weighted": 0.3788550092597086
|
101 |
+
},
|
102 |
+
{
|
103 |
+
"accuracy": 0.359375,
|
104 |
+
"f1": 0.3141044625065105,
|
105 |
+
"f1_weighted": 0.3140812297800874
|
106 |
+
},
|
107 |
+
{
|
108 |
+
"accuracy": 0.4033203125,
|
109 |
+
"f1": 0.3671046794337968,
|
110 |
+
"f1_weighted": 0.3670722897662193
|
111 |
+
},
|
112 |
+
{
|
113 |
+
"accuracy": 0.41552734375,
|
114 |
+
"f1": 0.3972485002249882,
|
115 |
+
"f1_weighted": 0.3972441202541492
|
116 |
+
},
|
117 |
+
{
|
118 |
+
"accuracy": 0.39306640625,
|
119 |
+
"f1": 0.38896088326189043,
|
120 |
+
"f1_weighted": 0.3889475948684062
|
121 |
+
},
|
122 |
+
{
|
123 |
+
"accuracy": 0.39306640625,
|
124 |
+
"f1": 0.39150101210842514,
|
125 |
+
"f1_weighted": 0.3915205036240777
|
126 |
+
},
|
127 |
+
{
|
128 |
+
"accuracy": 0.3349609375,
|
129 |
+
"f1": 0.3251347220838998,
|
130 |
+
"f1_weighted": 0.32519488518098666
|
131 |
+
}
|
132 |
+
]
|
133 |
+
}
|
134 |
+
]
|
135 |
+
},
|
136 |
+
"task_name": "FilipinoShopeeReviewsClassification"
|
137 |
+
}
|
results/Alibaba-NLP__gte-Qwen2-7B-instruct/e26182b2122f4435e8b3ebecbf363990f409b45b/FinParaSTS.json
ADDED
@@ -0,0 +1,43 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"dataset_revision": "e4428e399de70a21b8857464e76f0fe859cabe05",
|
3 |
+
"evaluation_time": 81.72847199440002,
|
4 |
+
"kg_co2_emissions": 0.007744690048168353,
|
5 |
+
"mteb_version": "1.18.0",
|
6 |
+
"scores": {
|
7 |
+
"test": [
|
8 |
+
{
|
9 |
+
"cosine_pearson": 0.26129308455791184,
|
10 |
+
"cosine_spearman": 0.2499385297583571,
|
11 |
+
"euclidean_pearson": 0.25884831186782253,
|
12 |
+
"euclidean_spearman": 0.2499385297583571,
|
13 |
+
"hf_subset": "default",
|
14 |
+
"languages": [
|
15 |
+
"fin-Latn"
|
16 |
+
],
|
17 |
+
"main_score": 0.2499385297583571,
|
18 |
+
"manhattan_pearson": 0.2604959800992051,
|
19 |
+
"manhattan_spearman": 0.25157064543593866,
|
20 |
+
"pearson": 0.26129308455791184,
|
21 |
+
"spearman": 0.2499385297583571
|
22 |
+
}
|
23 |
+
],
|
24 |
+
"validation": [
|
25 |
+
{
|
26 |
+
"cosine_pearson": 0.27013614814107684,
|
27 |
+
"cosine_spearman": 0.26445252877126885,
|
28 |
+
"euclidean_pearson": 0.26451247083276197,
|
29 |
+
"euclidean_spearman": 0.26445252877126885,
|
30 |
+
"hf_subset": "default",
|
31 |
+
"languages": [
|
32 |
+
"fin-Latn"
|
33 |
+
],
|
34 |
+
"main_score": 0.26445252877126885,
|
35 |
+
"manhattan_pearson": 0.26568230910957796,
|
36 |
+
"manhattan_spearman": 0.2659281258740971,
|
37 |
+
"pearson": 0.27013614814107684,
|
38 |
+
"spearman": 0.26445252877126885
|
39 |
+
}
|
40 |
+
]
|
41 |
+
},
|
42 |
+
"task_name": "FinParaSTS"
|
43 |
+
}
|
results/Alibaba-NLP__gte-Qwen2-7B-instruct/e26182b2122f4435e8b3ebecbf363990f409b45b/FinancialPhrasebankClassification.json
ADDED
@@ -0,0 +1,73 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"dataset_revision": "1484d06fe7af23030c7c977b12556108d1f67039",
|
3 |
+
"evaluation_time": 63.24325442314148,
|
4 |
+
"kg_co2_emissions": 0.00542104370138445,
|
5 |
+
"mteb_version": "1.18.0",
|
6 |
+
"scores": {
|
7 |
+
"train": [
|
8 |
+
{
|
9 |
+
"accuracy": 0.7537985865724381,
|
10 |
+
"f1": 0.7170248724964065,
|
11 |
+
"f1_weighted": 0.7615990756420153,
|
12 |
+
"hf_subset": "default",
|
13 |
+
"languages": [
|
14 |
+
"eng-Latn"
|
15 |
+
],
|
16 |
+
"main_score": 0.7537985865724381,
|
17 |
+
"scores_per_experiment": [
|
18 |
+
{
|
19 |
+
"accuracy": 0.7919611307420494,
|
20 |
+
"f1": 0.7744044201685751,
|
21 |
+
"f1_weighted": 0.7976869136745782
|
22 |
+
},
|
23 |
+
{
|
24 |
+
"accuracy": 0.7985865724381626,
|
25 |
+
"f1": 0.7493543576454975,
|
26 |
+
"f1_weighted": 0.7977758299825782
|
27 |
+
},
|
28 |
+
{
|
29 |
+
"accuracy": 0.7111307420494699,
|
30 |
+
"f1": 0.6748897251647032,
|
31 |
+
"f1_weighted": 0.7234785767481197
|
32 |
+
},
|
33 |
+
{
|
34 |
+
"accuracy": 0.7848939929328622,
|
35 |
+
"f1": 0.7327086617243589,
|
36 |
+
"f1_weighted": 0.7924557540790954
|
37 |
+
},
|
38 |
+
{
|
39 |
+
"accuracy": 0.821113074204947,
|
40 |
+
"f1": 0.7915603595988111,
|
41 |
+
"f1_weighted": 0.8219963464565665
|
42 |
+
},
|
43 |
+
{
|
44 |
+
"accuracy": 0.6360424028268551,
|
45 |
+
"f1": 0.6193591161268172,
|
46 |
+
"f1_weighted": 0.6541567070473917
|
47 |
+
},
|
48 |
+
{
|
49 |
+
"accuracy": 0.8021201413427562,
|
50 |
+
"f1": 0.7674247317896011,
|
51 |
+
"f1_weighted": 0.8046091968466633
|
52 |
+
},
|
53 |
+
{
|
54 |
+
"accuracy": 0.7115724381625441,
|
55 |
+
"f1": 0.6695147126367483,
|
56 |
+
"f1_weighted": 0.7247934352636581
|
57 |
+
},
|
58 |
+
{
|
59 |
+
"accuracy": 0.7469081272084805,
|
60 |
+
"f1": 0.7193446307097333,
|
61 |
+
"f1_weighted": 0.7557535568788623
|
62 |
+
},
|
63 |
+
{
|
64 |
+
"accuracy": 0.7336572438162544,
|
65 |
+
"f1": 0.6716880093992188,
|
66 |
+
"f1_weighted": 0.743284439442639
|
67 |
+
}
|
68 |
+
]
|
69 |
+
}
|
70 |
+
]
|
71 |
+
},
|
72 |
+
"task_name": "FinancialPhrasebankClassification"
|
73 |
+
}
|
results/Alibaba-NLP__gte-Qwen2-7B-instruct/e26182b2122f4435e8b3ebecbf363990f409b45b/FloresBitextMining.json
ADDED
The diff for this file is too large to render.
See raw diff
|
|
results/Alibaba-NLP__gte-Qwen2-7B-instruct/e26182b2122f4435e8b3ebecbf363990f409b45b/GermanSTSBenchmark.json
ADDED
@@ -0,0 +1,43 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"dataset_revision": "e36907544d44c3a247898ed81540310442329e20",
|
3 |
+
"evaluation_time": 93.49348282814026,
|
4 |
+
"kg_co2_emissions": 0.008422138538230903,
|
5 |
+
"mteb_version": "1.18.0",
|
6 |
+
"scores": {
|
7 |
+
"test": [
|
8 |
+
{
|
9 |
+
"cosine_pearson": 0.847445913096246,
|
10 |
+
"cosine_spearman": 0.8461459187369128,
|
11 |
+
"euclidean_pearson": 0.8315864319796716,
|
12 |
+
"euclidean_spearman": 0.8461460699990326,
|
13 |
+
"hf_subset": "default",
|
14 |
+
"languages": [
|
15 |
+
"deu-Latn"
|
16 |
+
],
|
17 |
+
"main_score": 0.8461459187369128,
|
18 |
+
"manhattan_pearson": 0.8340354314169279,
|
19 |
+
"manhattan_spearman": 0.8494534850258606,
|
20 |
+
"pearson": 0.847445913096246,
|
21 |
+
"spearman": 0.8461459187369128
|
22 |
+
}
|
23 |
+
],
|
24 |
+
"validation": [
|
25 |
+
{
|
26 |
+
"cosine_pearson": 0.8475683295605461,
|
27 |
+
"cosine_spearman": 0.8489483031710026,
|
28 |
+
"euclidean_pearson": 0.8439345934692796,
|
29 |
+
"euclidean_spearman": 0.8489481612122858,
|
30 |
+
"hf_subset": "default",
|
31 |
+
"languages": [
|
32 |
+
"deu-Latn"
|
33 |
+
],
|
34 |
+
"main_score": 0.8489483031710026,
|
35 |
+
"manhattan_pearson": 0.8478867377584106,
|
36 |
+
"manhattan_spearman": 0.8544695393591298,
|
37 |
+
"pearson": 0.8475683295605461,
|
38 |
+
"spearman": 0.8489483031710026
|
39 |
+
}
|
40 |
+
]
|
41 |
+
},
|
42 |
+
"task_name": "GermanSTSBenchmark"
|
43 |
+
}
|
results/Alibaba-NLP__gte-Qwen2-7B-instruct/e26182b2122f4435e8b3ebecbf363990f409b45b/GreekLegalCodeClassification.json
ADDED
@@ -0,0 +1,137 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"dataset_revision": "de0fdb34424f07d1ac6f0ede23ee0ed44bd9f5d1",
|
3 |
+
"evaluation_time": 9870.178301811218,
|
4 |
+
"kg_co2_emissions": 0.9085740703411566,
|
5 |
+
"mteb_version": "1.18.0",
|
6 |
+
"scores": {
|
7 |
+
"test": [
|
8 |
+
{
|
9 |
+
"accuracy": 0.22021484375,
|
10 |
+
"f1": 0.17350085819476033,
|
11 |
+
"f1_weighted": 0.19374556903113624,
|
12 |
+
"hf_subset": "default",
|
13 |
+
"languages": [
|
14 |
+
"ell-Grek"
|
15 |
+
],
|
16 |
+
"main_score": 0.22021484375,
|
17 |
+
"scores_per_experiment": [
|
18 |
+
{
|
19 |
+
"accuracy": 0.224609375,
|
20 |
+
"f1": 0.16428958430971596,
|
21 |
+
"f1_weighted": 0.1953318773242496
|
22 |
+
},
|
23 |
+
{
|
24 |
+
"accuracy": 0.2177734375,
|
25 |
+
"f1": 0.18284347693798114,
|
26 |
+
"f1_weighted": 0.19714636382810274
|
27 |
+
},
|
28 |
+
{
|
29 |
+
"accuracy": 0.21630859375,
|
30 |
+
"f1": 0.17787905548083002,
|
31 |
+
"f1_weighted": 0.18902099075421
|
32 |
+
},
|
33 |
+
{
|
34 |
+
"accuracy": 0.2275390625,
|
35 |
+
"f1": 0.17450595591961568,
|
36 |
+
"f1_weighted": 0.1960783206214981
|
37 |
+
},
|
38 |
+
{
|
39 |
+
"accuracy": 0.2060546875,
|
40 |
+
"f1": 0.15985199524191987,
|
41 |
+
"f1_weighted": 0.1778744348112578
|
42 |
+
},
|
43 |
+
{
|
44 |
+
"accuracy": 0.228515625,
|
45 |
+
"f1": 0.18504141743211774,
|
46 |
+
"f1_weighted": 0.20109197573303422
|
47 |
+
},
|
48 |
+
{
|
49 |
+
"accuracy": 0.2109375,
|
50 |
+
"f1": 0.16675503185344373,
|
51 |
+
"f1_weighted": 0.18544793428026682
|
52 |
+
},
|
53 |
+
{
|
54 |
+
"accuracy": 0.2138671875,
|
55 |
+
"f1": 0.16741446836819104,
|
56 |
+
"f1_weighted": 0.1902137024770072
|
57 |
+
},
|
58 |
+
{
|
59 |
+
"accuracy": 0.23291015625,
|
60 |
+
"f1": 0.18364489819187577,
|
61 |
+
"f1_weighted": 0.2090446984447673
|
62 |
+
},
|
63 |
+
{
|
64 |
+
"accuracy": 0.2236328125,
|
65 |
+
"f1": 0.17278269821191222,
|
66 |
+
"f1_weighted": 0.1962053920369686
|
67 |
+
}
|
68 |
+
]
|
69 |
+
}
|
70 |
+
],
|
71 |
+
"validation": [
|
72 |
+
{
|
73 |
+
"accuracy": 0.233251953125,
|
74 |
+
"f1": 0.16612325981130974,
|
75 |
+
"f1_weighted": 0.2092314472341919,
|
76 |
+
"hf_subset": "default",
|
77 |
+
"languages": [
|
78 |
+
"ell-Grek"
|
79 |
+
],
|
80 |
+
"main_score": 0.233251953125,
|
81 |
+
"scores_per_experiment": [
|
82 |
+
{
|
83 |
+
"accuracy": 0.23486328125,
|
84 |
+
"f1": 0.16207909353720137,
|
85 |
+
"f1_weighted": 0.2107757615598702
|
86 |
+
},
|
87 |
+
{
|
88 |
+
"accuracy": 0.23095703125,
|
89 |
+
"f1": 0.17765467529439197,
|
90 |
+
"f1_weighted": 0.2075640485528309
|
91 |
+
},
|
92 |
+
{
|
93 |
+
"accuracy": 0.232421875,
|
94 |
+
"f1": 0.16432922082718546,
|
95 |
+
"f1_weighted": 0.2087759133843493
|
96 |
+
},
|
97 |
+
{
|
98 |
+
"accuracy": 0.23095703125,
|
99 |
+
"f1": 0.16379721025296243,
|
100 |
+
"f1_weighted": 0.20568947681353605
|
101 |
+
},
|
102 |
+
{
|
103 |
+
"accuracy": 0.2294921875,
|
104 |
+
"f1": 0.1666732983702079,
|
105 |
+
"f1_weighted": 0.2094394091482015
|
106 |
+
},
|
107 |
+
{
|
108 |
+
"accuracy": 0.2314453125,
|
109 |
+
"f1": 0.16066319203405302,
|
110 |
+
"f1_weighted": 0.20625720028442174
|
111 |
+
},
|
112 |
+
{
|
113 |
+
"accuracy": 0.2412109375,
|
114 |
+
"f1": 0.16845118097986062,
|
115 |
+
"f1_weighted": 0.2204972636092838
|
116 |
+
},
|
117 |
+
{
|
118 |
+
"accuracy": 0.22705078125,
|
119 |
+
"f1": 0.16669307136978911,
|
120 |
+
"f1_weighted": 0.20000338314398236
|
121 |
+
},
|
122 |
+
{
|
123 |
+
"accuracy": 0.24169921875,
|
124 |
+
"f1": 0.1664468516213068,
|
125 |
+
"f1_weighted": 0.2157523467697211
|
126 |
+
},
|
127 |
+
{
|
128 |
+
"accuracy": 0.232421875,
|
129 |
+
"f1": 0.1644448038261385,
|
130 |
+
"f1_weighted": 0.20755966907572182
|
131 |
+
}
|
132 |
+
]
|
133 |
+
}
|
134 |
+
]
|
135 |
+
},
|
136 |
+
"task_name": "GreekLegalCodeClassification"
|
137 |
+
}
|
results/Alibaba-NLP__gte-Qwen2-7B-instruct/e26182b2122f4435e8b3ebecbf363990f409b45b/GujaratiNewsClassification.json
ADDED
@@ -0,0 +1,73 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"dataset_revision": "1a5f2fa2914bfeff4fcdc6fff4194fa8ec8fa19e",
|
3 |
+
"evaluation_time": 67.71335864067078,
|
4 |
+
"kg_co2_emissions": 0.005966830301404906,
|
5 |
+
"mteb_version": "1.18.0",
|
6 |
+
"scores": {
|
7 |
+
"test": [
|
8 |
+
{
|
9 |
+
"accuracy": 0.8795902883156298,
|
10 |
+
"f1": 0.858880586657641,
|
11 |
+
"f1_weighted": 0.8809111445115556,
|
12 |
+
"hf_subset": "default",
|
13 |
+
"languages": [
|
14 |
+
"guj-Gujr"
|
15 |
+
],
|
16 |
+
"main_score": 0.8795902883156298,
|
17 |
+
"scores_per_experiment": [
|
18 |
+
{
|
19 |
+
"accuracy": 0.8945371775417299,
|
20 |
+
"f1": 0.8766502186320203,
|
21 |
+
"f1_weighted": 0.8968272798248685
|
22 |
+
},
|
23 |
+
{
|
24 |
+
"accuracy": 0.8755690440060698,
|
25 |
+
"f1": 0.854854297794299,
|
26 |
+
"f1_weighted": 0.8773903505848866
|
27 |
+
},
|
28 |
+
{
|
29 |
+
"accuracy": 0.874051593323217,
|
30 |
+
"f1": 0.8532348122488655,
|
31 |
+
"f1_weighted": 0.875894801284598
|
32 |
+
},
|
33 |
+
{
|
34 |
+
"accuracy": 0.8611532625189682,
|
35 |
+
"f1": 0.8374559335435109,
|
36 |
+
"f1_weighted": 0.8619133674724181
|
37 |
+
},
|
38 |
+
{
|
39 |
+
"accuracy": 0.8808801213960546,
|
40 |
+
"f1": 0.8605885808813728,
|
41 |
+
"f1_weighted": 0.8800806961977135
|
42 |
+
},
|
43 |
+
{
|
44 |
+
"accuracy": 0.858877086494689,
|
45 |
+
"f1": 0.8385586328046419,
|
46 |
+
"f1_weighted": 0.8633752063566609
|
47 |
+
},
|
48 |
+
{
|
49 |
+
"accuracy": 0.8770864946889226,
|
50 |
+
"f1": 0.8559444017618989,
|
51 |
+
"f1_weighted": 0.8785922068138426
|
52 |
+
},
|
53 |
+
{
|
54 |
+
"accuracy": 0.8937784522003035,
|
55 |
+
"f1": 0.8751545880435009,
|
56 |
+
"f1_weighted": 0.8943005130317843
|
57 |
+
},
|
58 |
+
{
|
59 |
+
"accuracy": 0.8839150227617603,
|
60 |
+
"f1": 0.8614117089404267,
|
61 |
+
"f1_weighted": 0.8843425133479144
|
62 |
+
},
|
63 |
+
{
|
64 |
+
"accuracy": 0.8960546282245827,
|
65 |
+
"f1": 0.8749526919258742,
|
66 |
+
"f1_weighted": 0.8963945102008699
|
67 |
+
}
|
68 |
+
]
|
69 |
+
}
|
70 |
+
]
|
71 |
+
},
|
72 |
+
"task_name": "GujaratiNewsClassification"
|
73 |
+
}
|
results/Alibaba-NLP__gte-Qwen2-7B-instruct/e26182b2122f4435e8b3ebecbf363990f409b45b/HALClusteringS2S.v2.json
ADDED
@@ -0,0 +1,34 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"dataset_revision": "e06ebbbb123f8144bef1a5d18796f3dec9ae2915",
|
3 |
+
"evaluation_time": 62.18480968475342,
|
4 |
+
"kg_co2_emissions": 0.004473789352205918,
|
5 |
+
"mteb_version": "1.18.0",
|
6 |
+
"scores": {
|
7 |
+
"test": [
|
8 |
+
{
|
9 |
+
"hf_subset": "default",
|
10 |
+
"languages": [
|
11 |
+
"fra-Latn"
|
12 |
+
],
|
13 |
+
"main_score": 0.314552198945281,
|
14 |
+
"v_measure": 0.314552198945281,
|
15 |
+
"v_measure_std": 0.01434255621680581,
|
16 |
+
"v_measures": {
|
17 |
+
"Level 0": [
|
18 |
+
0.2901533552422956,
|
19 |
+
0.3155780284460905,
|
20 |
+
0.3169976455166544,
|
21 |
+
0.3057604365584678,
|
22 |
+
0.3381612186025969,
|
23 |
+
0.33217241082860444,
|
24 |
+
0.31786325055180104,
|
25 |
+
0.3096196494549827,
|
26 |
+
0.2950234685621268,
|
27 |
+
0.32419252568919016
|
28 |
+
]
|
29 |
+
}
|
30 |
+
}
|
31 |
+
]
|
32 |
+
},
|
33 |
+
"task_name": "HALClusteringS2S.v2"
|
34 |
+
}
|
results/Alibaba-NLP__gte-Qwen2-7B-instruct/e26182b2122f4435e8b3ebecbf363990f409b45b/IN22GenBitextMining.json
ADDED
The diff for this file is too large to render.
See raw diff
|
|
results/Alibaba-NLP__gte-Qwen2-7B-instruct/e26182b2122f4435e8b3ebecbf363990f409b45b/IndicCrosslingualSTS.json
ADDED
@@ -0,0 +1,203 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"dataset_revision": "0ca7b87dda68ef4ebb2f50a20a62b9dbebcac3e4",
|
3 |
+
"evaluation_time": 238.41262125968933,
|
4 |
+
"kg_co2_emissions": 0.02278846375454172,
|
5 |
+
"mteb_version": "1.18.0",
|
6 |
+
"scores": {
|
7 |
+
"test": [
|
8 |
+
{
|
9 |
+
"cosine_pearson": 0.25721600290319574,
|
10 |
+
"cosine_spearman": 0.2744837332307833,
|
11 |
+
"euclidean_pearson": 0.25563948656002194,
|
12 |
+
"euclidean_spearman": 0.2744837332307833,
|
13 |
+
"hf_subset": "en-ur",
|
14 |
+
"languages": [
|
15 |
+
"eng-Latn",
|
16 |
+
"urd-Arab"
|
17 |
+
],
|
18 |
+
"main_score": 0.2744837332307833,
|
19 |
+
"manhattan_pearson": 0.2553255278795769,
|
20 |
+
"manhattan_spearman": 0.27041427066137935,
|
21 |
+
"pearson": 0.25721600290319574,
|
22 |
+
"spearman": 0.2744837332307833
|
23 |
+
},
|
24 |
+
{
|
25 |
+
"cosine_pearson": 0.5013812446365333,
|
26 |
+
"cosine_spearman": 0.5088424225044093,
|
27 |
+
"euclidean_pearson": 0.48850357725103166,
|
28 |
+
"euclidean_spearman": 0.5088424225044093,
|
29 |
+
"hf_subset": "en-gu",
|
30 |
+
"languages": [
|
31 |
+
"eng-Latn",
|
32 |
+
"guj-Gujr"
|
33 |
+
],
|
34 |
+
"main_score": 0.5088424225044093,
|
35 |
+
"manhattan_pearson": 0.49481204054156397,
|
36 |
+
"manhattan_spearman": 0.5144041383376173,
|
37 |
+
"pearson": 0.5013812446365333,
|
38 |
+
"spearman": 0.5088424225044093
|
39 |
+
},
|
40 |
+
{
|
41 |
+
"cosine_pearson": 0.31064257078990676,
|
42 |
+
"cosine_spearman": 0.3033226468182194,
|
43 |
+
"euclidean_pearson": 0.31026882015312934,
|
44 |
+
"euclidean_spearman": 0.303322592579071,
|
45 |
+
"hf_subset": "en-kn",
|
46 |
+
"languages": [
|
47 |
+
"eng-Latn",
|
48 |
+
"kan-Knda"
|
49 |
+
],
|
50 |
+
"main_score": 0.3033226468182194,
|
51 |
+
"manhattan_pearson": 0.32399095695557545,
|
52 |
+
"manhattan_spearman": 0.316661898480015,
|
53 |
+
"pearson": 0.31064257078990676,
|
54 |
+
"spearman": 0.3033226468182194
|
55 |
+
},
|
56 |
+
{
|
57 |
+
"cosine_pearson": 0.5226400463922058,
|
58 |
+
"cosine_spearman": 0.4936526986837295,
|
59 |
+
"euclidean_pearson": 0.5080871736792377,
|
60 |
+
"euclidean_spearman": 0.4936526986837295,
|
61 |
+
"hf_subset": "en-ml",
|
62 |
+
"languages": [
|
63 |
+
"eng-Latn",
|
64 |
+
"mal-Mlym"
|
65 |
+
],
|
66 |
+
"main_score": 0.4936526986837295,
|
67 |
+
"manhattan_pearson": 0.5125679563993549,
|
68 |
+
"manhattan_spearman": 0.49634742072865184,
|
69 |
+
"pearson": 0.5226400463922058,
|
70 |
+
"spearman": 0.4936526986837295
|
71 |
+
},
|
72 |
+
{
|
73 |
+
"cosine_pearson": 0.2443562991476131,
|
74 |
+
"cosine_spearman": 0.254179840566601,
|
75 |
+
"euclidean_pearson": 0.2459014507764781,
|
76 |
+
"euclidean_spearman": 0.254179840566601,
|
77 |
+
"hf_subset": "en-te",
|
78 |
+
"languages": [
|
79 |
+
"eng-Latn",
|
80 |
+
"tel-Telu"
|
81 |
+
],
|
82 |
+
"main_score": 0.254179840566601,
|
83 |
+
"manhattan_pearson": 0.26125397569408926,
|
84 |
+
"manhattan_spearman": 0.27444974240656866,
|
85 |
+
"pearson": 0.2443562991476131,
|
86 |
+
"spearman": 0.254179840566601
|
87 |
+
},
|
88 |
+
{
|
89 |
+
"cosine_pearson": 0.6313308871174907,
|
90 |
+
"cosine_spearman": 0.6062606867884527,
|
91 |
+
"euclidean_pearson": 0.6044118182373657,
|
92 |
+
"euclidean_spearman": 0.6062606867884527,
|
93 |
+
"hf_subset": "en-bn",
|
94 |
+
"languages": [
|
95 |
+
"eng-Latn",
|
96 |
+
"ben-Beng"
|
97 |
+
],
|
98 |
+
"main_score": 0.6062606867884527,
|
99 |
+
"manhattan_pearson": 0.6095227253814399,
|
100 |
+
"manhattan_spearman": 0.6130171043530104,
|
101 |
+
"pearson": 0.6313308871174907,
|
102 |
+
"spearman": 0.6062606867884527
|
103 |
+
},
|
104 |
+
{
|
105 |
+
"cosine_pearson": 0.019477003374624578,
|
106 |
+
"cosine_spearman": 0.032465549285528396,
|
107 |
+
"euclidean_pearson": 0.020237524214839858,
|
108 |
+
"euclidean_spearman": 0.032465549285528396,
|
109 |
+
"hf_subset": "en-or",
|
110 |
+
"languages": [
|
111 |
+
"eng-Latn",
|
112 |
+
"ory-Orya"
|
113 |
+
],
|
114 |
+
"main_score": 0.032465549285528396,
|
115 |
+
"manhattan_pearson": 0.02322301806093815,
|
116 |
+
"manhattan_spearman": 0.03686536594113675,
|
117 |
+
"pearson": 0.019477003374624578,
|
118 |
+
"spearman": 0.032465549285528396
|
119 |
+
},
|
120 |
+
{
|
121 |
+
"cosine_pearson": 0.4777309016302577,
|
122 |
+
"cosine_spearman": 0.49042773373931275,
|
123 |
+
"euclidean_pearson": 0.4831733078887651,
|
124 |
+
"euclidean_spearman": 0.49042773373931275,
|
125 |
+
"hf_subset": "en-pa",
|
126 |
+
"languages": [
|
127 |
+
"eng-Latn",
|
128 |
+
"pan-Guru"
|
129 |
+
],
|
130 |
+
"main_score": 0.49042773373931275,
|
131 |
+
"manhattan_pearson": 0.4906399916097219,
|
132 |
+
"manhattan_spearman": 0.4963937448396293,
|
133 |
+
"pearson": 0.4777309016302577,
|
134 |
+
"spearman": 0.49042773373931275
|
135 |
+
},
|
136 |
+
{
|
137 |
+
"cosine_pearson": 0.4921656378976576,
|
138 |
+
"cosine_spearman": 0.5155777810787209,
|
139 |
+
"euclidean_pearson": 0.4793488854972405,
|
140 |
+
"euclidean_spearman": 0.5155776888848144,
|
141 |
+
"hf_subset": "en-mr",
|
142 |
+
"languages": [
|
143 |
+
"eng-Latn",
|
144 |
+
"mar-Deva"
|
145 |
+
],
|
146 |
+
"main_score": 0.5155777810787209,
|
147 |
+
"manhattan_pearson": 0.4815975609417631,
|
148 |
+
"manhattan_spearman": 0.5173856350734953,
|
149 |
+
"pearson": 0.4921656378976576,
|
150 |
+
"spearman": 0.5155777810787209
|
151 |
+
},
|
152 |
+
{
|
153 |
+
"cosine_pearson": 0.7008201184970698,
|
154 |
+
"cosine_spearman": 0.7239190839898189,
|
155 |
+
"euclidean_pearson": 0.6842425155578897,
|
156 |
+
"euclidean_spearman": 0.7239190839898189,
|
157 |
+
"hf_subset": "en-hi",
|
158 |
+
"languages": [
|
159 |
+
"eng-Latn",
|
160 |
+
"hin-Deva"
|
161 |
+
],
|
162 |
+
"main_score": 0.7239190839898189,
|
163 |
+
"manhattan_pearson": 0.6869383182798072,
|
164 |
+
"manhattan_spearman": 0.7305550386857045,
|
165 |
+
"pearson": 0.7008201184970698,
|
166 |
+
"spearman": 0.7239190839898189
|
167 |
+
},
|
168 |
+
{
|
169 |
+
"cosine_pearson": 0.24618503554599816,
|
170 |
+
"cosine_spearman": 0.2876018252094678,
|
171 |
+
"euclidean_pearson": 0.247530191471346,
|
172 |
+
"euclidean_spearman": 0.2876018252094678,
|
173 |
+
"hf_subset": "en-ta",
|
174 |
+
"languages": [
|
175 |
+
"eng-Latn",
|
176 |
+
"tam-Taml"
|
177 |
+
],
|
178 |
+
"main_score": 0.2876018252094678,
|
179 |
+
"manhattan_pearson": 0.25667975985649216,
|
180 |
+
"manhattan_spearman": 0.29473073894494267,
|
181 |
+
"pearson": 0.24618503554599816,
|
182 |
+
"spearman": 0.2876018252094678
|
183 |
+
},
|
184 |
+
{
|
185 |
+
"cosine_pearson": 0.5432870646395869,
|
186 |
+
"cosine_spearman": 0.5269755903395872,
|
187 |
+
"euclidean_pearson": 0.5386154062761946,
|
188 |
+
"euclidean_spearman": 0.5269755903395872,
|
189 |
+
"hf_subset": "en-as",
|
190 |
+
"languages": [
|
191 |
+
"eng-Latn",
|
192 |
+
"asm-Beng"
|
193 |
+
],
|
194 |
+
"main_score": 0.5269755903395872,
|
195 |
+
"manhattan_pearson": 0.5387988297784796,
|
196 |
+
"manhattan_spearman": 0.5273629640220625,
|
197 |
+
"pearson": 0.5432870646395869,
|
198 |
+
"spearman": 0.5269755903395872
|
199 |
+
}
|
200 |
+
]
|
201 |
+
},
|
202 |
+
"task_name": "IndicCrosslingualSTS"
|
203 |
+
}
|
results/Alibaba-NLP__gte-Qwen2-7B-instruct/e26182b2122f4435e8b3ebecbf363990f409b45b/IndicGenBenchFloresBitextMining.json
ADDED
@@ -0,0 +1,1405 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"dataset_revision": "f8650438298df086750ff4973661bb58a201a5ee",
|
3 |
+
"evaluation_time": 9962.678290128708,
|
4 |
+
"kg_co2_emissions": 0.961574595113728,
|
5 |
+
"mteb_version": "1.18.0",
|
6 |
+
"scores": {
|
7 |
+
"test": [
|
8 |
+
{
|
9 |
+
"accuracy": 1.0,
|
10 |
+
"f1": 1.0,
|
11 |
+
"hf_subset": "ben-eng",
|
12 |
+
"languages": [
|
13 |
+
"ben-Beng",
|
14 |
+
"eng-Latn"
|
15 |
+
],
|
16 |
+
"main_score": 1.0,
|
17 |
+
"precision": 1.0,
|
18 |
+
"recall": 1.0
|
19 |
+
},
|
20 |
+
{
|
21 |
+
"accuracy": 1.0,
|
22 |
+
"f1": 1.0,
|
23 |
+
"hf_subset": "eng-ben",
|
24 |
+
"languages": [
|
25 |
+
"eng-Latn",
|
26 |
+
"ben-Beng"
|
27 |
+
],
|
28 |
+
"main_score": 1.0,
|
29 |
+
"precision": 1.0,
|
30 |
+
"recall": 1.0
|
31 |
+
},
|
32 |
+
{
|
33 |
+
"accuracy": 1.0,
|
34 |
+
"f1": 1.0,
|
35 |
+
"hf_subset": "guj-eng",
|
36 |
+
"languages": [
|
37 |
+
"guj-Gujr",
|
38 |
+
"eng-Latn"
|
39 |
+
],
|
40 |
+
"main_score": 1.0,
|
41 |
+
"precision": 1.0,
|
42 |
+
"recall": 1.0
|
43 |
+
},
|
44 |
+
{
|
45 |
+
"accuracy": 1.0,
|
46 |
+
"f1": 1.0,
|
47 |
+
"hf_subset": "eng-guj",
|
48 |
+
"languages": [
|
49 |
+
"eng-Latn",
|
50 |
+
"guj-Gujr"
|
51 |
+
],
|
52 |
+
"main_score": 1.0,
|
53 |
+
"precision": 1.0,
|
54 |
+
"recall": 1.0
|
55 |
+
},
|
56 |
+
{
|
57 |
+
"accuracy": 0.9990118577075099,
|
58 |
+
"f1": 0.9986824769433466,
|
59 |
+
"hf_subset": "hin-eng",
|
60 |
+
"languages": [
|
61 |
+
"hin-Deva",
|
62 |
+
"eng-Latn"
|
63 |
+
],
|
64 |
+
"main_score": 0.9986824769433466,
|
65 |
+
"precision": 0.9985177865612648,
|
66 |
+
"recall": 0.9990118577075099
|
67 |
+
},
|
68 |
+
{
|
69 |
+
"accuracy": 1.0,
|
70 |
+
"f1": 1.0,
|
71 |
+
"hf_subset": "eng-hin",
|
72 |
+
"languages": [
|
73 |
+
"eng-Latn",
|
74 |
+
"hin-Deva"
|
75 |
+
],
|
76 |
+
"main_score": 1.0,
|
77 |
+
"precision": 1.0,
|
78 |
+
"recall": 1.0
|
79 |
+
},
|
80 |
+
{
|
81 |
+
"accuracy": 0.9891304347826086,
|
82 |
+
"f1": 0.9855072463768116,
|
83 |
+
"hf_subset": "kan-eng",
|
84 |
+
"languages": [
|
85 |
+
"kan-Knda",
|
86 |
+
"eng-Latn"
|
87 |
+
],
|
88 |
+
"main_score": 0.9855072463768116,
|
89 |
+
"precision": 0.9836956521739131,
|
90 |
+
"recall": 0.9891304347826086
|
91 |
+
},
|
92 |
+
{
|
93 |
+
"accuracy": 0.991106719367589,
|
94 |
+
"f1": 0.9881422924901185,
|
95 |
+
"hf_subset": "eng-kan",
|
96 |
+
"languages": [
|
97 |
+
"eng-Latn",
|
98 |
+
"kan-Knda"
|
99 |
+
],
|
100 |
+
"main_score": 0.9881422924901185,
|
101 |
+
"precision": 0.9866600790513834,
|
102 |
+
"recall": 0.991106719367589
|
103 |
+
},
|
104 |
+
{
|
105 |
+
"accuracy": 0.9970355731225297,
|
106 |
+
"f1": 0.9960474308300395,
|
107 |
+
"hf_subset": "mal-eng",
|
108 |
+
"languages": [
|
109 |
+
"mal-Mlym",
|
110 |
+
"eng-Latn"
|
111 |
+
],
|
112 |
+
"main_score": 0.9960474308300395,
|
113 |
+
"precision": 0.9955533596837944,
|
114 |
+
"recall": 0.9970355731225297
|
115 |
+
},
|
116 |
+
{
|
117 |
+
"accuracy": 0.9980237154150198,
|
118 |
+
"f1": 0.997364953886693,
|
119 |
+
"hf_subset": "eng-mal",
|
120 |
+
"languages": [
|
121 |
+
"eng-Latn",
|
122 |
+
"mal-Mlym"
|
123 |
+
],
|
124 |
+
"main_score": 0.997364953886693,
|
125 |
+
"precision": 0.9970355731225297,
|
126 |
+
"recall": 0.9980237154150198
|
127 |
+
},
|
128 |
+
{
|
129 |
+
"accuracy": 1.0,
|
130 |
+
"f1": 1.0,
|
131 |
+
"hf_subset": "mar-eng",
|
132 |
+
"languages": [
|
133 |
+
"mar-Deva",
|
134 |
+
"eng-Latn"
|
135 |
+
],
|
136 |
+
"main_score": 1.0,
|
137 |
+
"precision": 1.0,
|
138 |
+
"recall": 1.0
|
139 |
+
},
|
140 |
+
{
|
141 |
+
"accuracy": 0.9990118577075099,
|
142 |
+
"f1": 0.9986824769433466,
|
143 |
+
"hf_subset": "eng-mar",
|
144 |
+
"languages": [
|
145 |
+
"eng-Latn",
|
146 |
+
"mar-Deva"
|
147 |
+
],
|
148 |
+
"main_score": 0.9986824769433466,
|
149 |
+
"precision": 0.9985177865612648,
|
150 |
+
"recall": 0.9990118577075099
|
151 |
+
},
|
152 |
+
{
|
153 |
+
"accuracy": 0.9871541501976284,
|
154 |
+
"f1": 0.983201581027668,
|
155 |
+
"hf_subset": "tam-eng",
|
156 |
+
"languages": [
|
157 |
+
"tam-Taml",
|
158 |
+
"eng-Latn"
|
159 |
+
],
|
160 |
+
"main_score": 0.983201581027668,
|
161 |
+
"precision": 0.9812252964426877,
|
162 |
+
"recall": 0.9871541501976284
|
163 |
+
},
|
164 |
+
{
|
165 |
+
"accuracy": 0.9861660079051383,
|
166 |
+
"f1": 0.9818840579710145,
|
167 |
+
"hf_subset": "eng-tam",
|
168 |
+
"languages": [
|
169 |
+
"eng-Latn",
|
170 |
+
"tam-Taml"
|
171 |
+
],
|
172 |
+
"main_score": 0.9818840579710145,
|
173 |
+
"precision": 0.9797430830039525,
|
174 |
+
"recall": 0.9861660079051383
|
175 |
+
},
|
176 |
+
{
|
177 |
+
"accuracy": 0.991106719367589,
|
178 |
+
"f1": 0.9881422924901185,
|
179 |
+
"hf_subset": "tel-eng",
|
180 |
+
"languages": [
|
181 |
+
"tel-Telu",
|
182 |
+
"eng-Latn"
|
183 |
+
],
|
184 |
+
"main_score": 0.9881422924901185,
|
185 |
+
"precision": 0.9866600790513834,
|
186 |
+
"recall": 0.991106719367589
|
187 |
+
},
|
188 |
+
{
|
189 |
+
"accuracy": 0.991106719367589,
|
190 |
+
"f1": 0.9883069828722003,
|
191 |
+
"hf_subset": "eng-tel",
|
192 |
+
"languages": [
|
193 |
+
"eng-Latn",
|
194 |
+
"tel-Telu"
|
195 |
+
],
|
196 |
+
"main_score": 0.9883069828722003,
|
197 |
+
"precision": 0.9869894598155466,
|
198 |
+
"recall": 0.991106719367589
|
199 |
+
},
|
200 |
+
{
|
201 |
+
"accuracy": 0.9990118577075099,
|
202 |
+
"f1": 0.9986824769433466,
|
203 |
+
"hf_subset": "urd-eng",
|
204 |
+
"languages": [
|
205 |
+
"urd-Arab",
|
206 |
+
"eng-Latn"
|
207 |
+
],
|
208 |
+
"main_score": 0.9986824769433466,
|
209 |
+
"precision": 0.9985177865612648,
|
210 |
+
"recall": 0.9990118577075099
|
211 |
+
},
|
212 |
+
{
|
213 |
+
"accuracy": 0.9990118577075099,
|
214 |
+
"f1": 0.9986824769433466,
|
215 |
+
"hf_subset": "eng-urd",
|
216 |
+
"languages": [
|
217 |
+
"eng-Latn",
|
218 |
+
"urd-Arab"
|
219 |
+
],
|
220 |
+
"main_score": 0.9986824769433466,
|
221 |
+
"precision": 0.9985177865612648,
|
222 |
+
"recall": 0.9990118577075099
|
223 |
+
},
|
224 |
+
{
|
225 |
+
"accuracy": 0.9940711462450593,
|
226 |
+
"f1": 0.9920948616600791,
|
227 |
+
"hf_subset": "asm-eng",
|
228 |
+
"languages": [
|
229 |
+
"asm-Beng",
|
230 |
+
"eng-Latn"
|
231 |
+
],
|
232 |
+
"main_score": 0.9920948616600791,
|
233 |
+
"precision": 0.991106719367589,
|
234 |
+
"recall": 0.9940711462450593
|
235 |
+
},
|
236 |
+
{
|
237 |
+
"accuracy": 0.9960474308300395,
|
238 |
+
"f1": 0.9947299077733861,
|
239 |
+
"hf_subset": "eng-asm",
|
240 |
+
"languages": [
|
241 |
+
"eng-Latn",
|
242 |
+
"asm-Beng"
|
243 |
+
],
|
244 |
+
"main_score": 0.9947299077733861,
|
245 |
+
"precision": 0.9940711462450593,
|
246 |
+
"recall": 0.9960474308300395
|
247 |
+
},
|
248 |
+
{
|
249 |
+
"accuracy": 0.9970355731225297,
|
250 |
+
"f1": 0.9960474308300395,
|
251 |
+
"hf_subset": "bho-eng",
|
252 |
+
"languages": [
|
253 |
+
"bho-Deva",
|
254 |
+
"eng-Latn"
|
255 |
+
],
|
256 |
+
"main_score": 0.9960474308300395,
|
257 |
+
"precision": 0.9955533596837944,
|
258 |
+
"recall": 0.9970355731225297
|
259 |
+
},
|
260 |
+
{
|
261 |
+
"accuracy": 0.9960474308300395,
|
262 |
+
"f1": 0.9948945981554677,
|
263 |
+
"hf_subset": "eng-bho",
|
264 |
+
"languages": [
|
265 |
+
"eng-Latn",
|
266 |
+
"bho-Deva"
|
267 |
+
],
|
268 |
+
"main_score": 0.9948945981554677,
|
269 |
+
"precision": 0.9944005270092227,
|
270 |
+
"recall": 0.9960474308300395
|
271 |
+
},
|
272 |
+
{
|
273 |
+
"accuracy": 0.9970355731225297,
|
274 |
+
"f1": 0.9963768115942029,
|
275 |
+
"hf_subset": "nep-eng",
|
276 |
+
"languages": [
|
277 |
+
"nep-Deva",
|
278 |
+
"eng-Latn"
|
279 |
+
],
|
280 |
+
"main_score": 0.9963768115942029,
|
281 |
+
"precision": 0.9960474308300395,
|
282 |
+
"recall": 0.9970355731225297
|
283 |
+
},
|
284 |
+
{
|
285 |
+
"accuracy": 0.9940711462450593,
|
286 |
+
"f1": 0.9927536231884058,
|
287 |
+
"hf_subset": "eng-nep",
|
288 |
+
"languages": [
|
289 |
+
"eng-Latn",
|
290 |
+
"nep-Deva"
|
291 |
+
],
|
292 |
+
"main_score": 0.9927536231884058,
|
293 |
+
"precision": 0.9920948616600791,
|
294 |
+
"recall": 0.9940711462450593
|
295 |
+
},
|
296 |
+
{
|
297 |
+
"accuracy": 0.9990118577075099,
|
298 |
+
"f1": 0.9986824769433464,
|
299 |
+
"hf_subset": "ory-eng",
|
300 |
+
"languages": [
|
301 |
+
"ory-Orya",
|
302 |
+
"eng-Latn"
|
303 |
+
],
|
304 |
+
"main_score": 0.9986824769433464,
|
305 |
+
"precision": 0.9985177865612648,
|
306 |
+
"recall": 0.9990118577075099
|
307 |
+
},
|
308 |
+
{
|
309 |
+
"accuracy": 0.9980237154150198,
|
310 |
+
"f1": 0.997364953886693,
|
311 |
+
"hf_subset": "eng-ory",
|
312 |
+
"languages": [
|
313 |
+
"eng-Latn",
|
314 |
+
"ory-Orya"
|
315 |
+
],
|
316 |
+
"main_score": 0.997364953886693,
|
317 |
+
"precision": 0.9970355731225297,
|
318 |
+
"recall": 0.9980237154150198
|
319 |
+
},
|
320 |
+
{
|
321 |
+
"accuracy": 1.0,
|
322 |
+
"f1": 1.0,
|
323 |
+
"hf_subset": "pan-eng",
|
324 |
+
"languages": [
|
325 |
+
"pan-Guru",
|
326 |
+
"eng-Latn"
|
327 |
+
],
|
328 |
+
"main_score": 1.0,
|
329 |
+
"precision": 1.0,
|
330 |
+
"recall": 1.0
|
331 |
+
},
|
332 |
+
{
|
333 |
+
"accuracy": 0.9990118577075099,
|
334 |
+
"f1": 0.9986824769433466,
|
335 |
+
"hf_subset": "eng-pan",
|
336 |
+
"languages": [
|
337 |
+
"eng-Latn",
|
338 |
+
"pan-Guru"
|
339 |
+
],
|
340 |
+
"main_score": 0.9986824769433466,
|
341 |
+
"precision": 0.9985177865612648,
|
342 |
+
"recall": 0.9990118577075099
|
343 |
+
},
|
344 |
+
{
|
345 |
+
"accuracy": 0.9920948616600791,
|
346 |
+
"f1": 0.989459815546772,
|
347 |
+
"hf_subset": "pus-eng",
|
348 |
+
"languages": [
|
349 |
+
"pus-Arab",
|
350 |
+
"eng-Latn"
|
351 |
+
],
|
352 |
+
"main_score": 0.989459815546772,
|
353 |
+
"precision": 0.9881422924901185,
|
354 |
+
"recall": 0.9920948616600791
|
355 |
+
},
|
356 |
+
{
|
357 |
+
"accuracy": 0.9950592885375494,
|
358 |
+
"f1": 0.9934123847167324,
|
359 |
+
"hf_subset": "eng-pus",
|
360 |
+
"languages": [
|
361 |
+
"eng-Latn",
|
362 |
+
"pus-Arab"
|
363 |
+
],
|
364 |
+
"main_score": 0.9934123847167324,
|
365 |
+
"precision": 0.9925889328063241,
|
366 |
+
"recall": 0.9950592885375494
|
367 |
+
},
|
368 |
+
{
|
369 |
+
"accuracy": 0.9891304347826086,
|
370 |
+
"f1": 0.9856719367588933,
|
371 |
+
"hf_subset": "san-eng",
|
372 |
+
"languages": [
|
373 |
+
"san-Deva",
|
374 |
+
"eng-Latn"
|
375 |
+
],
|
376 |
+
"main_score": 0.9856719367588933,
|
377 |
+
"precision": 0.9840250329380763,
|
378 |
+
"recall": 0.9891304347826086
|
379 |
+
},
|
380 |
+
{
|
381 |
+
"accuracy": 0.9901185770750988,
|
382 |
+
"f1": 0.9876482213438735,
|
383 |
+
"hf_subset": "eng-san",
|
384 |
+
"languages": [
|
385 |
+
"eng-Latn",
|
386 |
+
"san-Deva"
|
387 |
+
],
|
388 |
+
"main_score": 0.9876482213438735,
|
389 |
+
"precision": 0.9864953886693018,
|
390 |
+
"recall": 0.9901185770750988
|
391 |
+
},
|
392 |
+
{
|
393 |
+
"accuracy": 0.991106719367589,
|
394 |
+
"f1": 0.9888010540184454,
|
395 |
+
"hf_subset": "awa-eng",
|
396 |
+
"languages": [
|
397 |
+
"awa-Deva",
|
398 |
+
"eng-Latn"
|
399 |
+
],
|
400 |
+
"main_score": 0.9888010540184454,
|
401 |
+
"precision": 0.9876482213438735,
|
402 |
+
"recall": 0.991106719367589
|
403 |
+
},
|
404 |
+
{
|
405 |
+
"accuracy": 0.9881422924901185,
|
406 |
+
"f1": 0.9850131752305664,
|
407 |
+
"hf_subset": "eng-awa",
|
408 |
+
"languages": [
|
409 |
+
"eng-Latn",
|
410 |
+
"awa-Deva"
|
411 |
+
],
|
412 |
+
"main_score": 0.9850131752305664,
|
413 |
+
"precision": 0.9835309617918314,
|
414 |
+
"recall": 0.9881422924901185
|
415 |
+
},
|
416 |
+
{
|
417 |
+
"accuracy": 1.0,
|
418 |
+
"f1": 1.0,
|
419 |
+
"hf_subset": "bgc-eng",
|
420 |
+
"languages": [
|
421 |
+
"bgc-Deva",
|
422 |
+
"eng-Latn"
|
423 |
+
],
|
424 |
+
"main_score": 1.0,
|
425 |
+
"precision": 1.0,
|
426 |
+
"recall": 1.0
|
427 |
+
},
|
428 |
+
{
|
429 |
+
"accuracy": 0.9990118577075099,
|
430 |
+
"f1": 0.9986824769433466,
|
431 |
+
"hf_subset": "eng-bgc",
|
432 |
+
"languages": [
|
433 |
+
"eng-Latn",
|
434 |
+
"bgc-Deva"
|
435 |
+
],
|
436 |
+
"main_score": 0.9986824769433466,
|
437 |
+
"precision": 0.9985177865612648,
|
438 |
+
"recall": 0.9990118577075099
|
439 |
+
},
|
440 |
+
{
|
441 |
+
"accuracy": 0.17588932806324112,
|
442 |
+
"f1": 0.15074273907394112,
|
443 |
+
"hf_subset": "bod-eng",
|
444 |
+
"languages": [
|
445 |
+
"bod-Tibt",
|
446 |
+
"eng-Latn"
|
447 |
+
],
|
448 |
+
"main_score": 0.15074273907394112,
|
449 |
+
"precision": 0.14365088858277375,
|
450 |
+
"recall": 0.17588932806324112
|
451 |
+
},
|
452 |
+
{
|
453 |
+
"accuracy": 0.22924901185770752,
|
454 |
+
"f1": 0.19153452224104398,
|
455 |
+
"hf_subset": "eng-bod",
|
456 |
+
"languages": [
|
457 |
+
"eng-Latn",
|
458 |
+
"bod-Tibt"
|
459 |
+
],
|
460 |
+
"main_score": 0.19153452224104398,
|
461 |
+
"precision": 0.1793529236464019,
|
462 |
+
"recall": 0.22924901185770752
|
463 |
+
},
|
464 |
+
{
|
465 |
+
"accuracy": 0.5899209486166008,
|
466 |
+
"f1": 0.5396116125859208,
|
467 |
+
"hf_subset": "boy-eng",
|
468 |
+
"languages": [
|
469 |
+
"boy-Deva",
|
470 |
+
"eng-Latn"
|
471 |
+
],
|
472 |
+
"main_score": 0.5396116125859208,
|
473 |
+
"precision": 0.5239119094371361,
|
474 |
+
"recall": 0.5899209486166008
|
475 |
+
},
|
476 |
+
{
|
477 |
+
"accuracy": 0.6511857707509882,
|
478 |
+
"f1": 0.6089254344689128,
|
479 |
+
"hf_subset": "eng-boy",
|
480 |
+
"languages": [
|
481 |
+
"eng-Latn",
|
482 |
+
"boy-Deva"
|
483 |
+
],
|
484 |
+
"main_score": 0.6089254344689128,
|
485 |
+
"precision": 0.5923106199001339,
|
486 |
+
"recall": 0.6511857707509882
|
487 |
+
},
|
488 |
+
{
|
489 |
+
"accuracy": 0.9990118577075099,
|
490 |
+
"f1": 0.9986824769433464,
|
491 |
+
"hf_subset": "gbm-eng",
|
492 |
+
"languages": [
|
493 |
+
"gbm-Deva",
|
494 |
+
"eng-Latn"
|
495 |
+
],
|
496 |
+
"main_score": 0.9986824769433464,
|
497 |
+
"precision": 0.9985177865612648,
|
498 |
+
"recall": 0.9990118577075099
|
499 |
+
},
|
500 |
+
{
|
501 |
+
"accuracy": 0.9990118577075099,
|
502 |
+
"f1": 0.9986824769433464,
|
503 |
+
"hf_subset": "eng-gbm",
|
504 |
+
"languages": [
|
505 |
+
"eng-Latn",
|
506 |
+
"gbm-Deva"
|
507 |
+
],
|
508 |
+
"main_score": 0.9986824769433464,
|
509 |
+
"precision": 0.9985177865612648,
|
510 |
+
"recall": 0.9990118577075099
|
511 |
+
},
|
512 |
+
{
|
513 |
+
"accuracy": 0.9723320158102767,
|
514 |
+
"f1": 0.9640974967061923,
|
515 |
+
"hf_subset": "gom-eng",
|
516 |
+
"languages": [
|
517 |
+
"gom-Deva",
|
518 |
+
"eng-Latn"
|
519 |
+
],
|
520 |
+
"main_score": 0.9640974967061923,
|
521 |
+
"precision": 0.9603096179183136,
|
522 |
+
"recall": 0.9723320158102767
|
523 |
+
},
|
524 |
+
{
|
525 |
+
"accuracy": 0.9861660079051383,
|
526 |
+
"f1": 0.9815546772068511,
|
527 |
+
"hf_subset": "eng-gom",
|
528 |
+
"languages": [
|
529 |
+
"eng-Latn",
|
530 |
+
"gom-Deva"
|
531 |
+
],
|
532 |
+
"main_score": 0.9815546772068511,
|
533 |
+
"precision": 0.9792490118577075,
|
534 |
+
"recall": 0.9861660079051383
|
535 |
+
},
|
536 |
+
{
|
537 |
+
"accuracy": 0.9930830039525692,
|
538 |
+
"f1": 0.9907773386034255,
|
539 |
+
"hf_subset": "hne-eng",
|
540 |
+
"languages": [
|
541 |
+
"hne-Deva",
|
542 |
+
"eng-Latn"
|
543 |
+
],
|
544 |
+
"main_score": 0.9907773386034255,
|
545 |
+
"precision": 0.9896245059288538,
|
546 |
+
"recall": 0.9930830039525692
|
547 |
+
},
|
548 |
+
{
|
549 |
+
"accuracy": 0.9950592885375494,
|
550 |
+
"f1": 0.9934123847167324,
|
551 |
+
"hf_subset": "eng-hne",
|
552 |
+
"languages": [
|
553 |
+
"eng-Latn",
|
554 |
+
"hne-Deva"
|
555 |
+
],
|
556 |
+
"main_score": 0.9934123847167324,
|
557 |
+
"precision": 0.9925889328063241,
|
558 |
+
"recall": 0.9950592885375494
|
559 |
+
},
|
560 |
+
{
|
561 |
+
"accuracy": 1.0,
|
562 |
+
"f1": 1.0,
|
563 |
+
"hf_subset": "raj-eng",
|
564 |
+
"languages": [
|
565 |
+
"raj-Deva",
|
566 |
+
"eng-Latn"
|
567 |
+
],
|
568 |
+
"main_score": 1.0,
|
569 |
+
"precision": 1.0,
|
570 |
+
"recall": 1.0
|
571 |
+
},
|
572 |
+
{
|
573 |
+
"accuracy": 1.0,
|
574 |
+
"f1": 1.0,
|
575 |
+
"hf_subset": "eng-raj",
|
576 |
+
"languages": [
|
577 |
+
"eng-Latn",
|
578 |
+
"raj-Deva"
|
579 |
+
],
|
580 |
+
"main_score": 1.0,
|
581 |
+
"precision": 1.0,
|
582 |
+
"recall": 1.0
|
583 |
+
},
|
584 |
+
{
|
585 |
+
"accuracy": 0.9990118577075099,
|
586 |
+
"f1": 0.9986824769433464,
|
587 |
+
"hf_subset": "mai-eng",
|
588 |
+
"languages": [
|
589 |
+
"mai-Deva",
|
590 |
+
"eng-Latn"
|
591 |
+
],
|
592 |
+
"main_score": 0.9986824769433464,
|
593 |
+
"precision": 0.9985177865612648,
|
594 |
+
"recall": 0.9990118577075099
|
595 |
+
},
|
596 |
+
{
|
597 |
+
"accuracy": 0.9990118577075099,
|
598 |
+
"f1": 0.9986824769433464,
|
599 |
+
"hf_subset": "eng-mai",
|
600 |
+
"languages": [
|
601 |
+
"eng-Latn",
|
602 |
+
"mai-Deva"
|
603 |
+
],
|
604 |
+
"main_score": 0.9986824769433464,
|
605 |
+
"precision": 0.9985177865612648,
|
606 |
+
"recall": 0.9990118577075099
|
607 |
+
},
|
608 |
+
{
|
609 |
+
"accuracy": 0.6679841897233202,
|
610 |
+
"f1": 0.6267081341669047,
|
611 |
+
"hf_subset": "mni-eng",
|
612 |
+
"languages": [
|
613 |
+
"mni-Mtei",
|
614 |
+
"eng-Latn"
|
615 |
+
],
|
616 |
+
"main_score": 0.6267081341669047,
|
617 |
+
"precision": 0.612257639703292,
|
618 |
+
"recall": 0.6679841897233202
|
619 |
+
},
|
620 |
+
{
|
621 |
+
"accuracy": 0.733201581027668,
|
622 |
+
"f1": 0.7001646903820816,
|
623 |
+
"hf_subset": "eng-mni",
|
624 |
+
"languages": [
|
625 |
+
"eng-Latn",
|
626 |
+
"mni-Mtei"
|
627 |
+
],
|
628 |
+
"main_score": 0.7001646903820816,
|
629 |
+
"precision": 0.6858036890645586,
|
630 |
+
"recall": 0.733201581027668
|
631 |
+
},
|
632 |
+
{
|
633 |
+
"accuracy": 0.9990118577075099,
|
634 |
+
"f1": 0.9986824769433464,
|
635 |
+
"hf_subset": "mup-eng",
|
636 |
+
"languages": [
|
637 |
+
"mup-Deva",
|
638 |
+
"eng-Latn"
|
639 |
+
],
|
640 |
+
"main_score": 0.9986824769433464,
|
641 |
+
"precision": 0.9985177865612648,
|
642 |
+
"recall": 0.9990118577075099
|
643 |
+
},
|
644 |
+
{
|
645 |
+
"accuracy": 1.0,
|
646 |
+
"f1": 1.0,
|
647 |
+
"hf_subset": "eng-mup",
|
648 |
+
"languages": [
|
649 |
+
"eng-Latn",
|
650 |
+
"mup-Deva"
|
651 |
+
],
|
652 |
+
"main_score": 1.0,
|
653 |
+
"precision": 1.0,
|
654 |
+
"recall": 1.0
|
655 |
+
},
|
656 |
+
{
|
657 |
+
"accuracy": 0.9970355731225297,
|
658 |
+
"f1": 0.9960474308300395,
|
659 |
+
"hf_subset": "mwr-eng",
|
660 |
+
"languages": [
|
661 |
+
"mwr-Deva",
|
662 |
+
"eng-Latn"
|
663 |
+
],
|
664 |
+
"main_score": 0.9960474308300395,
|
665 |
+
"precision": 0.9955533596837944,
|
666 |
+
"recall": 0.9970355731225297
|
667 |
+
},
|
668 |
+
{
|
669 |
+
"accuracy": 0.9980237154150198,
|
670 |
+
"f1": 0.997364953886693,
|
671 |
+
"hf_subset": "eng-mwr",
|
672 |
+
"languages": [
|
673 |
+
"eng-Latn",
|
674 |
+
"mwr-Deva"
|
675 |
+
],
|
676 |
+
"main_score": 0.997364953886693,
|
677 |
+
"precision": 0.9970355731225297,
|
678 |
+
"recall": 0.9980237154150198
|
679 |
+
},
|
680 |
+
{
|
681 |
+
"accuracy": 0.03557312252964427,
|
682 |
+
"f1": 0.02704713759802576,
|
683 |
+
"hf_subset": "sat-eng",
|
684 |
+
"languages": [
|
685 |
+
"sat-Olck",
|
686 |
+
"eng-Latn"
|
687 |
+
],
|
688 |
+
"main_score": 0.02704713759802576,
|
689 |
+
"precision": 0.025477898002648037,
|
690 |
+
"recall": 0.03557312252964427
|
691 |
+
},
|
692 |
+
{
|
693 |
+
"accuracy": 0.046442687747035576,
|
694 |
+
"f1": 0.023896379839577594,
|
695 |
+
"hf_subset": "eng-sat",
|
696 |
+
"languages": [
|
697 |
+
"eng-Latn",
|
698 |
+
"sat-Olck"
|
699 |
+
],
|
700 |
+
"main_score": 0.023896379839577594,
|
701 |
+
"precision": 0.018917803899786416,
|
702 |
+
"recall": 0.046442687747035576
|
703 |
+
}
|
704 |
+
],
|
705 |
+
"validation": [
|
706 |
+
{
|
707 |
+
"accuracy": 1.0,
|
708 |
+
"f1": 1.0,
|
709 |
+
"hf_subset": "ben-eng",
|
710 |
+
"languages": [
|
711 |
+
"ben-Beng",
|
712 |
+
"eng-Latn"
|
713 |
+
],
|
714 |
+
"main_score": 1.0,
|
715 |
+
"precision": 1.0,
|
716 |
+
"recall": 1.0
|
717 |
+
},
|
718 |
+
{
|
719 |
+
"accuracy": 1.0,
|
720 |
+
"f1": 1.0,
|
721 |
+
"hf_subset": "eng-ben",
|
722 |
+
"languages": [
|
723 |
+
"eng-Latn",
|
724 |
+
"ben-Beng"
|
725 |
+
],
|
726 |
+
"main_score": 1.0,
|
727 |
+
"precision": 1.0,
|
728 |
+
"recall": 1.0
|
729 |
+
},
|
730 |
+
{
|
731 |
+
"accuracy": 0.9989969909729187,
|
732 |
+
"f1": 0.9986626546305584,
|
733 |
+
"hf_subset": "guj-eng",
|
734 |
+
"languages": [
|
735 |
+
"guj-Gujr",
|
736 |
+
"eng-Latn"
|
737 |
+
],
|
738 |
+
"main_score": 0.9986626546305584,
|
739 |
+
"precision": 0.9984954864593781,
|
740 |
+
"recall": 0.9989969909729187
|
741 |
+
},
|
742 |
+
{
|
743 |
+
"accuracy": 0.995987963891675,
|
744 |
+
"f1": 0.9946506185222335,
|
745 |
+
"hf_subset": "eng-guj",
|
746 |
+
"languages": [
|
747 |
+
"eng-Latn",
|
748 |
+
"guj-Gujr"
|
749 |
+
],
|
750 |
+
"main_score": 0.9946506185222335,
|
751 |
+
"precision": 0.9939819458375125,
|
752 |
+
"recall": 0.995987963891675
|
753 |
+
},
|
754 |
+
{
|
755 |
+
"accuracy": 1.0,
|
756 |
+
"f1": 1.0,
|
757 |
+
"hf_subset": "hin-eng",
|
758 |
+
"languages": [
|
759 |
+
"hin-Deva",
|
760 |
+
"eng-Latn"
|
761 |
+
],
|
762 |
+
"main_score": 1.0,
|
763 |
+
"precision": 1.0,
|
764 |
+
"recall": 1.0
|
765 |
+
},
|
766 |
+
{
|
767 |
+
"accuracy": 0.9989969909729187,
|
768 |
+
"f1": 0.9986626546305583,
|
769 |
+
"hf_subset": "eng-hin",
|
770 |
+
"languages": [
|
771 |
+
"eng-Latn",
|
772 |
+
"hin-Deva"
|
773 |
+
],
|
774 |
+
"main_score": 0.9986626546305583,
|
775 |
+
"precision": 0.9984954864593781,
|
776 |
+
"recall": 0.9989969909729187
|
777 |
+
},
|
778 |
+
{
|
779 |
+
"accuracy": 0.9849548645937813,
|
780 |
+
"f1": 0.9799398194583752,
|
781 |
+
"hf_subset": "kan-eng",
|
782 |
+
"languages": [
|
783 |
+
"kan-Knda",
|
784 |
+
"eng-Latn"
|
785 |
+
],
|
786 |
+
"main_score": 0.9799398194583752,
|
787 |
+
"precision": 0.977432296890672,
|
788 |
+
"recall": 0.9849548645937813
|
789 |
+
},
|
790 |
+
{
|
791 |
+
"accuracy": 0.9819458375125376,
|
792 |
+
"f1": 0.976596456034771,
|
793 |
+
"hf_subset": "eng-kan",
|
794 |
+
"languages": [
|
795 |
+
"eng-Latn",
|
796 |
+
"kan-Knda"
|
797 |
+
],
|
798 |
+
"main_score": 0.976596456034771,
|
799 |
+
"precision": 0.9739217652958877,
|
800 |
+
"recall": 0.9819458375125376
|
801 |
+
},
|
802 |
+
{
|
803 |
+
"accuracy": 0.9929789368104313,
|
804 |
+
"f1": 0.9906385824139085,
|
805 |
+
"hf_subset": "mal-eng",
|
806 |
+
"languages": [
|
807 |
+
"mal-Mlym",
|
808 |
+
"eng-Latn"
|
809 |
+
],
|
810 |
+
"main_score": 0.9906385824139085,
|
811 |
+
"precision": 0.9894684052156469,
|
812 |
+
"recall": 0.9929789368104313
|
813 |
+
},
|
814 |
+
{
|
815 |
+
"accuracy": 0.9979939819458375,
|
816 |
+
"f1": 0.9973253092611166,
|
817 |
+
"hf_subset": "eng-mal",
|
818 |
+
"languages": [
|
819 |
+
"eng-Latn",
|
820 |
+
"mal-Mlym"
|
821 |
+
],
|
822 |
+
"main_score": 0.9973253092611166,
|
823 |
+
"precision": 0.9969909729187563,
|
824 |
+
"recall": 0.9979939819458375
|
825 |
+
},
|
826 |
+
{
|
827 |
+
"accuracy": 1.0,
|
828 |
+
"f1": 1.0,
|
829 |
+
"hf_subset": "mar-eng",
|
830 |
+
"languages": [
|
831 |
+
"mar-Deva",
|
832 |
+
"eng-Latn"
|
833 |
+
],
|
834 |
+
"main_score": 1.0,
|
835 |
+
"precision": 1.0,
|
836 |
+
"recall": 1.0
|
837 |
+
},
|
838 |
+
{
|
839 |
+
"accuracy": 1.0,
|
840 |
+
"f1": 1.0,
|
841 |
+
"hf_subset": "eng-mar",
|
842 |
+
"languages": [
|
843 |
+
"eng-Latn",
|
844 |
+
"mar-Deva"
|
845 |
+
],
|
846 |
+
"main_score": 1.0,
|
847 |
+
"precision": 1.0,
|
848 |
+
"recall": 1.0
|
849 |
+
},
|
850 |
+
{
|
851 |
+
"accuracy": 0.9869608826479438,
|
852 |
+
"f1": 0.9834503510531595,
|
853 |
+
"hf_subset": "tam-eng",
|
854 |
+
"languages": [
|
855 |
+
"tam-Taml",
|
856 |
+
"eng-Latn"
|
857 |
+
],
|
858 |
+
"main_score": 0.9834503510531595,
|
859 |
+
"precision": 0.9817786693413575,
|
860 |
+
"recall": 0.9869608826479438
|
861 |
+
},
|
862 |
+
{
|
863 |
+
"accuracy": 0.9889669007021064,
|
864 |
+
"f1": 0.9852892009361417,
|
865 |
+
"hf_subset": "eng-tam",
|
866 |
+
"languages": [
|
867 |
+
"eng-Latn",
|
868 |
+
"tam-Taml"
|
869 |
+
],
|
870 |
+
"main_score": 0.9852892009361417,
|
871 |
+
"precision": 0.9834503510531595,
|
872 |
+
"recall": 0.9889669007021064
|
873 |
+
},
|
874 |
+
{
|
875 |
+
"accuracy": 0.9859578736208626,
|
876 |
+
"f1": 0.9816783684386493,
|
877 |
+
"hf_subset": "tel-eng",
|
878 |
+
"languages": [
|
879 |
+
"tel-Telu",
|
880 |
+
"eng-Latn"
|
881 |
+
],
|
882 |
+
"main_score": 0.9816783684386493,
|
883 |
+
"precision": 0.9796890672016049,
|
884 |
+
"recall": 0.9859578736208626
|
885 |
+
},
|
886 |
+
{
|
887 |
+
"accuracy": 0.9779338014042126,
|
888 |
+
"f1": 0.970912738214644,
|
889 |
+
"hf_subset": "eng-tel",
|
890 |
+
"languages": [
|
891 |
+
"eng-Latn",
|
892 |
+
"tel-Telu"
|
893 |
+
],
|
894 |
+
"main_score": 0.970912738214644,
|
895 |
+
"precision": 0.9674022066198595,
|
896 |
+
"recall": 0.9779338014042126
|
897 |
+
},
|
898 |
+
{
|
899 |
+
"accuracy": 0.995987963891675,
|
900 |
+
"f1": 0.9946506185222335,
|
901 |
+
"hf_subset": "urd-eng",
|
902 |
+
"languages": [
|
903 |
+
"urd-Arab",
|
904 |
+
"eng-Latn"
|
905 |
+
],
|
906 |
+
"main_score": 0.9946506185222335,
|
907 |
+
"precision": 0.9939819458375125,
|
908 |
+
"recall": 0.995987963891675
|
909 |
+
},
|
910 |
+
{
|
911 |
+
"accuracy": 0.9979939819458375,
|
912 |
+
"f1": 0.9973253092611166,
|
913 |
+
"hf_subset": "eng-urd",
|
914 |
+
"languages": [
|
915 |
+
"eng-Latn",
|
916 |
+
"urd-Arab"
|
917 |
+
],
|
918 |
+
"main_score": 0.9973253092611166,
|
919 |
+
"precision": 0.9969909729187563,
|
920 |
+
"recall": 0.9979939819458375
|
921 |
+
},
|
922 |
+
{
|
923 |
+
"accuracy": 0.9949849548645938,
|
924 |
+
"f1": 0.9933132731527916,
|
925 |
+
"hf_subset": "asm-eng",
|
926 |
+
"languages": [
|
927 |
+
"asm-Beng",
|
928 |
+
"eng-Latn"
|
929 |
+
],
|
930 |
+
"main_score": 0.9933132731527916,
|
931 |
+
"precision": 0.9924774322968907,
|
932 |
+
"recall": 0.9949849548645938
|
933 |
+
},
|
934 |
+
{
|
935 |
+
"accuracy": 0.9949849548645938,
|
936 |
+
"f1": 0.9933132731527916,
|
937 |
+
"hf_subset": "eng-asm",
|
938 |
+
"languages": [
|
939 |
+
"eng-Latn",
|
940 |
+
"asm-Beng"
|
941 |
+
],
|
942 |
+
"main_score": 0.9933132731527916,
|
943 |
+
"precision": 0.9924774322968907,
|
944 |
+
"recall": 0.9949849548645938
|
945 |
+
},
|
946 |
+
{
|
947 |
+
"accuracy": 0.9949849548645938,
|
948 |
+
"f1": 0.9933132731527916,
|
949 |
+
"hf_subset": "bho-eng",
|
950 |
+
"languages": [
|
951 |
+
"bho-Deva",
|
952 |
+
"eng-Latn"
|
953 |
+
],
|
954 |
+
"main_score": 0.9933132731527916,
|
955 |
+
"precision": 0.9924774322968907,
|
956 |
+
"recall": 0.9949849548645938
|
957 |
+
},
|
958 |
+
{
|
959 |
+
"accuracy": 0.9939819458375125,
|
960 |
+
"f1": 0.9919759277833501,
|
961 |
+
"hf_subset": "eng-bho",
|
962 |
+
"languages": [
|
963 |
+
"eng-Latn",
|
964 |
+
"bho-Deva"
|
965 |
+
],
|
966 |
+
"main_score": 0.9919759277833501,
|
967 |
+
"precision": 0.9909729187562688,
|
968 |
+
"recall": 0.9939819458375125
|
969 |
+
},
|
970 |
+
{
|
971 |
+
"accuracy": 0.9879638916750251,
|
972 |
+
"f1": 0.9842861919090604,
|
973 |
+
"hf_subset": "nep-eng",
|
974 |
+
"languages": [
|
975 |
+
"nep-Deva",
|
976 |
+
"eng-Latn"
|
977 |
+
],
|
978 |
+
"main_score": 0.9842861919090604,
|
979 |
+
"precision": 0.9824473420260782,
|
980 |
+
"recall": 0.9879638916750251
|
981 |
+
},
|
982 |
+
{
|
983 |
+
"accuracy": 0.9919759277833501,
|
984 |
+
"f1": 0.9899699097291875,
|
985 |
+
"hf_subset": "eng-nep",
|
986 |
+
"languages": [
|
987 |
+
"eng-Latn",
|
988 |
+
"nep-Deva"
|
989 |
+
],
|
990 |
+
"main_score": 0.9899699097291875,
|
991 |
+
"precision": 0.9889669007021064,
|
992 |
+
"recall": 0.9919759277833501
|
993 |
+
},
|
994 |
+
{
|
995 |
+
"accuracy": 0.9989969909729187,
|
996 |
+
"f1": 0.9986626546305584,
|
997 |
+
"hf_subset": "ory-eng",
|
998 |
+
"languages": [
|
999 |
+
"ory-Orya",
|
1000 |
+
"eng-Latn"
|
1001 |
+
],
|
1002 |
+
"main_score": 0.9986626546305584,
|
1003 |
+
"precision": 0.9984954864593781,
|
1004 |
+
"recall": 0.9989969909729187
|
1005 |
+
},
|
1006 |
+
{
|
1007 |
+
"accuracy": 0.9989969909729187,
|
1008 |
+
"f1": 0.9986626546305583,
|
1009 |
+
"hf_subset": "eng-ory",
|
1010 |
+
"languages": [
|
1011 |
+
"eng-Latn",
|
1012 |
+
"ory-Orya"
|
1013 |
+
],
|
1014 |
+
"main_score": 0.9986626546305583,
|
1015 |
+
"precision": 0.9984954864593781,
|
1016 |
+
"recall": 0.9989969909729187
|
1017 |
+
},
|
1018 |
+
{
|
1019 |
+
"accuracy": 1.0,
|
1020 |
+
"f1": 1.0,
|
1021 |
+
"hf_subset": "pan-eng",
|
1022 |
+
"languages": [
|
1023 |
+
"pan-Guru",
|
1024 |
+
"eng-Latn"
|
1025 |
+
],
|
1026 |
+
"main_score": 1.0,
|
1027 |
+
"precision": 1.0,
|
1028 |
+
"recall": 1.0
|
1029 |
+
},
|
1030 |
+
{
|
1031 |
+
"accuracy": 0.9989969909729187,
|
1032 |
+
"f1": 0.9986626546305583,
|
1033 |
+
"hf_subset": "eng-pan",
|
1034 |
+
"languages": [
|
1035 |
+
"eng-Latn",
|
1036 |
+
"pan-Guru"
|
1037 |
+
],
|
1038 |
+
"main_score": 0.9986626546305583,
|
1039 |
+
"precision": 0.9984954864593781,
|
1040 |
+
"recall": 0.9989969909729187
|
1041 |
+
},
|
1042 |
+
{
|
1043 |
+
"accuracy": 0.9769307923771314,
|
1044 |
+
"f1": 0.9697425610163826,
|
1045 |
+
"hf_subset": "pus-eng",
|
1046 |
+
"languages": [
|
1047 |
+
"pus-Arab",
|
1048 |
+
"eng-Latn"
|
1049 |
+
],
|
1050 |
+
"main_score": 0.9697425610163826,
|
1051 |
+
"precision": 0.9662320294215981,
|
1052 |
+
"recall": 0.9769307923771314
|
1053 |
+
},
|
1054 |
+
{
|
1055 |
+
"accuracy": 0.9859578736208626,
|
1056 |
+
"f1": 0.9819458375125376,
|
1057 |
+
"hf_subset": "eng-pus",
|
1058 |
+
"languages": [
|
1059 |
+
"eng-Latn",
|
1060 |
+
"pus-Arab"
|
1061 |
+
],
|
1062 |
+
"main_score": 0.9819458375125376,
|
1063 |
+
"precision": 0.9799398194583752,
|
1064 |
+
"recall": 0.9859578736208626
|
1065 |
+
},
|
1066 |
+
{
|
1067 |
+
"accuracy": 0.9919759277833501,
|
1068 |
+
"f1": 0.9893012370444668,
|
1069 |
+
"hf_subset": "san-eng",
|
1070 |
+
"languages": [
|
1071 |
+
"san-Deva",
|
1072 |
+
"eng-Latn"
|
1073 |
+
],
|
1074 |
+
"main_score": 0.9893012370444668,
|
1075 |
+
"precision": 0.9879638916750251,
|
1076 |
+
"recall": 0.9919759277833501
|
1077 |
+
},
|
1078 |
+
{
|
1079 |
+
"accuracy": 0.9909729187562688,
|
1080 |
+
"f1": 0.988632564359746,
|
1081 |
+
"hf_subset": "eng-san",
|
1082 |
+
"languages": [
|
1083 |
+
"eng-Latn",
|
1084 |
+
"san-Deva"
|
1085 |
+
],
|
1086 |
+
"main_score": 0.988632564359746,
|
1087 |
+
"precision": 0.9874623871614845,
|
1088 |
+
"recall": 0.9909729187562688
|
1089 |
+
},
|
1090 |
+
{
|
1091 |
+
"accuracy": 0.9949849548645938,
|
1092 |
+
"f1": 0.9939819458375125,
|
1093 |
+
"hf_subset": "awa-eng",
|
1094 |
+
"languages": [
|
1095 |
+
"awa-Deva",
|
1096 |
+
"eng-Latn"
|
1097 |
+
],
|
1098 |
+
"main_score": 0.9939819458375125,
|
1099 |
+
"precision": 0.993480441323972,
|
1100 |
+
"recall": 0.9949849548645938
|
1101 |
+
},
|
1102 |
+
{
|
1103 |
+
"accuracy": 0.9929789368104313,
|
1104 |
+
"f1": 0.9919759277833501,
|
1105 |
+
"hf_subset": "eng-awa",
|
1106 |
+
"languages": [
|
1107 |
+
"eng-Latn",
|
1108 |
+
"awa-Deva"
|
1109 |
+
],
|
1110 |
+
"main_score": 0.9919759277833501,
|
1111 |
+
"precision": 0.9914744232698094,
|
1112 |
+
"recall": 0.9929789368104313
|
1113 |
+
},
|
1114 |
+
{
|
1115 |
+
"accuracy": 0.9989969909729187,
|
1116 |
+
"f1": 0.9986626546305584,
|
1117 |
+
"hf_subset": "bgc-eng",
|
1118 |
+
"languages": [
|
1119 |
+
"bgc-Deva",
|
1120 |
+
"eng-Latn"
|
1121 |
+
],
|
1122 |
+
"main_score": 0.9986626546305584,
|
1123 |
+
"precision": 0.9984954864593781,
|
1124 |
+
"recall": 0.9989969909729187
|
1125 |
+
},
|
1126 |
+
{
|
1127 |
+
"accuracy": 0.9989969909729187,
|
1128 |
+
"f1": 0.9986626546305584,
|
1129 |
+
"hf_subset": "eng-bgc",
|
1130 |
+
"languages": [
|
1131 |
+
"eng-Latn",
|
1132 |
+
"bgc-Deva"
|
1133 |
+
],
|
1134 |
+
"main_score": 0.9986626546305584,
|
1135 |
+
"precision": 0.9984954864593781,
|
1136 |
+
"recall": 0.9989969909729187
|
1137 |
+
},
|
1138 |
+
{
|
1139 |
+
"accuracy": 0.18956870611835505,
|
1140 |
+
"f1": 0.16293861115648617,
|
1141 |
+
"hf_subset": "bod-eng",
|
1142 |
+
"languages": [
|
1143 |
+
"bod-Tibt",
|
1144 |
+
"eng-Latn"
|
1145 |
+
],
|
1146 |
+
"main_score": 0.16293861115648617,
|
1147 |
+
"precision": 0.15651273401704532,
|
1148 |
+
"recall": 0.18956870611835505
|
1149 |
+
},
|
1150 |
+
{
|
1151 |
+
"accuracy": 0.23470411233701102,
|
1152 |
+
"f1": 0.19038066580694465,
|
1153 |
+
"hf_subset": "eng-bod",
|
1154 |
+
"languages": [
|
1155 |
+
"eng-Latn",
|
1156 |
+
"bod-Tibt"
|
1157 |
+
],
|
1158 |
+
"main_score": 0.19038066580694465,
|
1159 |
+
"precision": 0.17490447533075415,
|
1160 |
+
"recall": 0.23470411233701102
|
1161 |
+
},
|
1162 |
+
{
|
1163 |
+
"accuracy": 0.5857572718154463,
|
1164 |
+
"f1": 0.5339294578861105,
|
1165 |
+
"hf_subset": "boy-eng",
|
1166 |
+
"languages": [
|
1167 |
+
"boy-Deva",
|
1168 |
+
"eng-Latn"
|
1169 |
+
],
|
1170 |
+
"main_score": 0.5339294578861105,
|
1171 |
+
"precision": 0.5168006238718376,
|
1172 |
+
"recall": 0.5857572718154463
|
1173 |
+
},
|
1174 |
+
{
|
1175 |
+
"accuracy": 0.6760280842527583,
|
1176 |
+
"f1": 0.6343697759946506,
|
1177 |
+
"hf_subset": "eng-boy",
|
1178 |
+
"languages": [
|
1179 |
+
"eng-Latn",
|
1180 |
+
"boy-Deva"
|
1181 |
+
],
|
1182 |
+
"main_score": 0.6343697759946506,
|
1183 |
+
"precision": 0.6166165162153125,
|
1184 |
+
"recall": 0.6760280842527583
|
1185 |
+
},
|
1186 |
+
{
|
1187 |
+
"accuracy": 0.9989969909729187,
|
1188 |
+
"f1": 0.9986626546305583,
|
1189 |
+
"hf_subset": "gbm-eng",
|
1190 |
+
"languages": [
|
1191 |
+
"gbm-Deva",
|
1192 |
+
"eng-Latn"
|
1193 |
+
],
|
1194 |
+
"main_score": 0.9986626546305583,
|
1195 |
+
"precision": 0.9984954864593781,
|
1196 |
+
"recall": 0.9989969909729187
|
1197 |
+
},
|
1198 |
+
{
|
1199 |
+
"accuracy": 0.9979939819458375,
|
1200 |
+
"f1": 0.9973253092611166,
|
1201 |
+
"hf_subset": "eng-gbm",
|
1202 |
+
"languages": [
|
1203 |
+
"eng-Latn",
|
1204 |
+
"gbm-Deva"
|
1205 |
+
],
|
1206 |
+
"main_score": 0.9973253092611166,
|
1207 |
+
"precision": 0.9969909729187563,
|
1208 |
+
"recall": 0.9979939819458375
|
1209 |
+
},
|
1210 |
+
{
|
1211 |
+
"accuracy": 0.9749247743229689,
|
1212 |
+
"f1": 0.967803410230692,
|
1213 |
+
"hf_subset": "gom-eng",
|
1214 |
+
"languages": [
|
1215 |
+
"gom-Deva",
|
1216 |
+
"eng-Latn"
|
1217 |
+
],
|
1218 |
+
"main_score": 0.967803410230692,
|
1219 |
+
"precision": 0.9644767636242059,
|
1220 |
+
"recall": 0.9749247743229689
|
1221 |
+
},
|
1222 |
+
{
|
1223 |
+
"accuracy": 0.9829488465396189,
|
1224 |
+
"f1": 0.9780006686726846,
|
1225 |
+
"hf_subset": "eng-gom",
|
1226 |
+
"languages": [
|
1227 |
+
"eng-Latn",
|
1228 |
+
"gom-Deva"
|
1229 |
+
],
|
1230 |
+
"main_score": 0.9780006686726846,
|
1231 |
+
"precision": 0.9756770310932799,
|
1232 |
+
"recall": 0.9829488465396189
|
1233 |
+
},
|
1234 |
+
{
|
1235 |
+
"accuracy": 0.9969909729187563,
|
1236 |
+
"f1": 0.995987963891675,
|
1237 |
+
"hf_subset": "hne-eng",
|
1238 |
+
"languages": [
|
1239 |
+
"hne-Deva",
|
1240 |
+
"eng-Latn"
|
1241 |
+
],
|
1242 |
+
"main_score": 0.995987963891675,
|
1243 |
+
"precision": 0.9954864593781344,
|
1244 |
+
"recall": 0.9969909729187563
|
1245 |
+
},
|
1246 |
+
{
|
1247 |
+
"accuracy": 0.995987963891675,
|
1248 |
+
"f1": 0.9949849548645938,
|
1249 |
+
"hf_subset": "eng-hne",
|
1250 |
+
"languages": [
|
1251 |
+
"eng-Latn",
|
1252 |
+
"hne-Deva"
|
1253 |
+
],
|
1254 |
+
"main_score": 0.9949849548645938,
|
1255 |
+
"precision": 0.9944834503510531,
|
1256 |
+
"recall": 0.995987963891675
|
1257 |
+
},
|
1258 |
+
{
|
1259 |
+
"accuracy": 0.9989969909729187,
|
1260 |
+
"f1": 0.9986626546305583,
|
1261 |
+
"hf_subset": "raj-eng",
|
1262 |
+
"languages": [
|
1263 |
+
"raj-Deva",
|
1264 |
+
"eng-Latn"
|
1265 |
+
],
|
1266 |
+
"main_score": 0.9986626546305583,
|
1267 |
+
"precision": 0.9984954864593781,
|
1268 |
+
"recall": 0.9989969909729187
|
1269 |
+
},
|
1270 |
+
{
|
1271 |
+
"accuracy": 0.9989969909729187,
|
1272 |
+
"f1": 0.9986626546305583,
|
1273 |
+
"hf_subset": "eng-raj",
|
1274 |
+
"languages": [
|
1275 |
+
"eng-Latn",
|
1276 |
+
"raj-Deva"
|
1277 |
+
],
|
1278 |
+
"main_score": 0.9986626546305583,
|
1279 |
+
"precision": 0.9984954864593781,
|
1280 |
+
"recall": 0.9989969909729187
|
1281 |
+
},
|
1282 |
+
{
|
1283 |
+
"accuracy": 0.9989969909729187,
|
1284 |
+
"f1": 0.9986626546305584,
|
1285 |
+
"hf_subset": "mai-eng",
|
1286 |
+
"languages": [
|
1287 |
+
"mai-Deva",
|
1288 |
+
"eng-Latn"
|
1289 |
+
],
|
1290 |
+
"main_score": 0.9986626546305584,
|
1291 |
+
"precision": 0.9984954864593781,
|
1292 |
+
"recall": 0.9989969909729187
|
1293 |
+
},
|
1294 |
+
{
|
1295 |
+
"accuracy": 1.0,
|
1296 |
+
"f1": 1.0,
|
1297 |
+
"hf_subset": "eng-mai",
|
1298 |
+
"languages": [
|
1299 |
+
"eng-Latn",
|
1300 |
+
"mai-Deva"
|
1301 |
+
],
|
1302 |
+
"main_score": 1.0,
|
1303 |
+
"precision": 1.0,
|
1304 |
+
"recall": 1.0
|
1305 |
+
},
|
1306 |
+
{
|
1307 |
+
"accuracy": 0.6690070210631895,
|
1308 |
+
"f1": 0.6255715268142793,
|
1309 |
+
"hf_subset": "mni-eng",
|
1310 |
+
"languages": [
|
1311 |
+
"mni-Mtei",
|
1312 |
+
"eng-Latn"
|
1313 |
+
],
|
1314 |
+
"main_score": 0.6255715268142793,
|
1315 |
+
"precision": 0.6110208330619564,
|
1316 |
+
"recall": 0.6690070210631895
|
1317 |
+
},
|
1318 |
+
{
|
1319 |
+
"accuracy": 0.7331995987963892,
|
1320 |
+
"f1": 0.6994340163347185,
|
1321 |
+
"hf_subset": "eng-mni",
|
1322 |
+
"languages": [
|
1323 |
+
"eng-Latn",
|
1324 |
+
"mni-Mtei"
|
1325 |
+
],
|
1326 |
+
"main_score": 0.6994340163347185,
|
1327 |
+
"precision": 0.6848641161579978,
|
1328 |
+
"recall": 0.7331995987963892
|
1329 |
+
},
|
1330 |
+
{
|
1331 |
+
"accuracy": 0.9979939819458375,
|
1332 |
+
"f1": 0.9973253092611166,
|
1333 |
+
"hf_subset": "mup-eng",
|
1334 |
+
"languages": [
|
1335 |
+
"mup-Deva",
|
1336 |
+
"eng-Latn"
|
1337 |
+
],
|
1338 |
+
"main_score": 0.9973253092611166,
|
1339 |
+
"precision": 0.9969909729187563,
|
1340 |
+
"recall": 0.9979939819458375
|
1341 |
+
},
|
1342 |
+
{
|
1343 |
+
"accuracy": 0.9979939819458375,
|
1344 |
+
"f1": 0.9973253092611167,
|
1345 |
+
"hf_subset": "eng-mup",
|
1346 |
+
"languages": [
|
1347 |
+
"eng-Latn",
|
1348 |
+
"mup-Deva"
|
1349 |
+
],
|
1350 |
+
"main_score": 0.9973253092611167,
|
1351 |
+
"precision": 0.9969909729187563,
|
1352 |
+
"recall": 0.9979939819458375
|
1353 |
+
},
|
1354 |
+
{
|
1355 |
+
"accuracy": 1.0,
|
1356 |
+
"f1": 1.0,
|
1357 |
+
"hf_subset": "mwr-eng",
|
1358 |
+
"languages": [
|
1359 |
+
"mwr-Deva",
|
1360 |
+
"eng-Latn"
|
1361 |
+
],
|
1362 |
+
"main_score": 1.0,
|
1363 |
+
"precision": 1.0,
|
1364 |
+
"recall": 1.0
|
1365 |
+
},
|
1366 |
+
{
|
1367 |
+
"accuracy": 1.0,
|
1368 |
+
"f1": 1.0,
|
1369 |
+
"hf_subset": "eng-mwr",
|
1370 |
+
"languages": [
|
1371 |
+
"eng-Latn",
|
1372 |
+
"mwr-Deva"
|
1373 |
+
],
|
1374 |
+
"main_score": 1.0,
|
1375 |
+
"precision": 1.0,
|
1376 |
+
"recall": 1.0
|
1377 |
+
},
|
1378 |
+
{
|
1379 |
+
"accuracy": 0.01905717151454363,
|
1380 |
+
"f1": 0.012679753896260559,
|
1381 |
+
"hf_subset": "sat-eng",
|
1382 |
+
"languages": [
|
1383 |
+
"sat-Olck",
|
1384 |
+
"eng-Latn"
|
1385 |
+
],
|
1386 |
+
"main_score": 0.012679753896260559,
|
1387 |
+
"precision": 0.011886025364638455,
|
1388 |
+
"recall": 0.01905717151454363
|
1389 |
+
},
|
1390 |
+
{
|
1391 |
+
"accuracy": 0.034102306920762285,
|
1392 |
+
"f1": 0.01917941812889815,
|
1393 |
+
"hf_subset": "eng-sat",
|
1394 |
+
"languages": [
|
1395 |
+
"eng-Latn",
|
1396 |
+
"sat-Olck"
|
1397 |
+
],
|
1398 |
+
"main_score": 0.01917941812889815,
|
1399 |
+
"precision": 0.016042792792554272,
|
1400 |
+
"recall": 0.034102306920762285
|
1401 |
+
}
|
1402 |
+
]
|
1403 |
+
},
|
1404 |
+
"task_name": "IndicGenBenchFloresBitextMining"
|
1405 |
+
}
|
results/Alibaba-NLP__gte-Qwen2-7B-instruct/e26182b2122f4435e8b3ebecbf363990f409b45b/IndicLangClassification.json
ADDED
@@ -0,0 +1,96 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"dataset_revision": "c54a95d9b9d62c891a03bd5da60715df7176b097",
|
3 |
+
"evaluation_time": 2130.055163383484,
|
4 |
+
"kg_co2_emissions": 0.20619872512539605,
|
5 |
+
"mteb_version": "1.18.0",
|
6 |
+
"scores": {
|
7 |
+
"test": [
|
8 |
+
{
|
9 |
+
"accuracy": 0.19078506147675717,
|
10 |
+
"f1": 0.19064958410415278,
|
11 |
+
"f1_weighted": 0.18286273566477707,
|
12 |
+
"hf_subset": "default",
|
13 |
+
"languages": [
|
14 |
+
"asm-Beng",
|
15 |
+
"brx-Deva",
|
16 |
+
"ben-Beng",
|
17 |
+
"doi-Deva",
|
18 |
+
"gom-Deva",
|
19 |
+
"guj-Gujr",
|
20 |
+
"hin-Deva",
|
21 |
+
"kan-Knda",
|
22 |
+
"kas-Arab",
|
23 |
+
"kas-Deva",
|
24 |
+
"mai-Deva",
|
25 |
+
"mal-Mlym",
|
26 |
+
"mar-Deva",
|
27 |
+
"mni-Beng",
|
28 |
+
"mni-Mtei",
|
29 |
+
"npi-Deva",
|
30 |
+
"ory-Orya",
|
31 |
+
"pan-Guru",
|
32 |
+
"san-Deva",
|
33 |
+
"sat-Olck",
|
34 |
+
"snd-Arab",
|
35 |
+
"tam-Taml",
|
36 |
+
"tel-Telu",
|
37 |
+
"urd-Arab"
|
38 |
+
],
|
39 |
+
"main_score": 0.19078506147675717,
|
40 |
+
"scores_per_experiment": [
|
41 |
+
{
|
42 |
+
"accuracy": 0.1632914721546453,
|
43 |
+
"f1": 0.1619558017758214,
|
44 |
+
"f1_weighted": 0.1528376215992452
|
45 |
+
},
|
46 |
+
{
|
47 |
+
"accuracy": 0.17959760668025512,
|
48 |
+
"f1": 0.17706215686751697,
|
49 |
+
"f1_weighted": 0.1745522987104454
|
50 |
+
},
|
51 |
+
{
|
52 |
+
"accuracy": 0.19462160562824643,
|
53 |
+
"f1": 0.19939128226295122,
|
54 |
+
"f1_weighted": 0.18870436335646837
|
55 |
+
},
|
56 |
+
{
|
57 |
+
"accuracy": 0.20168978894075876,
|
58 |
+
"f1": 0.20437138375350972,
|
59 |
+
"f1_weighted": 0.19041247907872438
|
60 |
+
},
|
61 |
+
{
|
62 |
+
"accuracy": 0.20050627917680322,
|
63 |
+
"f1": 0.20055001421484217,
|
64 |
+
"f1_weighted": 0.19508194149187583
|
65 |
+
},
|
66 |
+
{
|
67 |
+
"accuracy": 0.19291209152475508,
|
68 |
+
"f1": 0.19341383064193723,
|
69 |
+
"f1_weighted": 0.187381076304858
|
70 |
+
},
|
71 |
+
{
|
72 |
+
"accuracy": 0.18778354921428103,
|
73 |
+
"f1": 0.18151254067954978,
|
74 |
+
"f1_weighted": 0.17131923450160655
|
75 |
+
},
|
76 |
+
{
|
77 |
+
"accuracy": 0.19435860345847852,
|
78 |
+
"f1": 0.1899721902824806,
|
79 |
+
"f1_weighted": 0.19190292930559977
|
80 |
+
},
|
81 |
+
{
|
82 |
+
"accuracy": 0.19080807416661189,
|
83 |
+
"f1": 0.1908801988825718,
|
84 |
+
"f1_weighted": 0.1789548956434686
|
85 |
+
},
|
86 |
+
{
|
87 |
+
"accuracy": 0.20228154382273653,
|
88 |
+
"f1": 0.20738644168034673,
|
89 |
+
"f1_weighted": 0.19748051665547844
|
90 |
+
}
|
91 |
+
]
|
92 |
+
}
|
93 |
+
]
|
94 |
+
},
|
95 |
+
"task_name": "IndicLangClassification"
|
96 |
+
}
|
results/Alibaba-NLP__gte-Qwen2-7B-instruct/e26182b2122f4435e8b3ebecbf363990f409b45b/IndonesianIdClickbaitClassification.json
ADDED
@@ -0,0 +1,95 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"dataset_revision": "9fa4d0824015fe537ae2c8166781f5c79873da2c",
|
3 |
+
"evaluation_time": 44.192094564437866,
|
4 |
+
"kg_co2_emissions": 0.0035279698272123773,
|
5 |
+
"mteb_version": "1.18.0",
|
6 |
+
"scores": {
|
7 |
+
"train": [
|
8 |
+
{
|
9 |
+
"accuracy": 0.616748046875,
|
10 |
+
"ap": 0.5009022998115311,
|
11 |
+
"ap_weighted": 0.5009022998115311,
|
12 |
+
"f1": 0.611236448842879,
|
13 |
+
"f1_weighted": 0.6165580786413948,
|
14 |
+
"hf_subset": "default",
|
15 |
+
"languages": [
|
16 |
+
"ind-Latn"
|
17 |
+
],
|
18 |
+
"main_score": 0.611236448842879,
|
19 |
+
"scores_per_experiment": [
|
20 |
+
{
|
21 |
+
"accuracy": 0.68359375,
|
22 |
+
"ap": 0.5420947765210787,
|
23 |
+
"ap_weighted": 0.5420947765210787,
|
24 |
+
"f1": 0.6645474399417617,
|
25 |
+
"f1_weighted": 0.6774271168254065
|
26 |
+
},
|
27 |
+
{
|
28 |
+
"accuracy": 0.44970703125,
|
29 |
+
"ap": 0.40243265128948946,
|
30 |
+
"ap_weighted": 0.40243265128948946,
|
31 |
+
"f1": 0.4497006023715773,
|
32 |
+
"f1_weighted": 0.4493975266745077
|
33 |
+
},
|
34 |
+
{
|
35 |
+
"accuracy": 0.6240234375,
|
36 |
+
"ap": 0.48977765408845453,
|
37 |
+
"ap_weighted": 0.48977765408845453,
|
38 |
+
"f1": 0.6097251920487844,
|
39 |
+
"f1_weighted": 0.6217619803112874
|
40 |
+
},
|
41 |
+
{
|
42 |
+
"accuracy": 0.6201171875,
|
43 |
+
"ap": 0.5047174159760517,
|
44 |
+
"ap_weighted": 0.5047174159760517,
|
45 |
+
"f1": 0.6201041448108799,
|
46 |
+
"f1_weighted": 0.6197454708600769
|
47 |
+
},
|
48 |
+
{
|
49 |
+
"accuracy": 0.68212890625,
|
50 |
+
"ap": 0.5423402482376565,
|
51 |
+
"ap_weighted": 0.5423402482376565,
|
52 |
+
"f1": 0.6686541235941235,
|
53 |
+
"f1_weighted": 0.6794208990818699
|
54 |
+
},
|
55 |
+
{
|
56 |
+
"accuracy": 0.50634765625,
|
57 |
+
"ap": 0.42609685169806955,
|
58 |
+
"ap_weighted": 0.42609685169806955,
|
59 |
+
"f1": 0.5055742436916497,
|
60 |
+
"f1_weighted": 0.5087251837441881
|
61 |
+
},
|
62 |
+
{
|
63 |
+
"accuracy": 0.69189453125,
|
64 |
+
"ap": 0.5583798491587129,
|
65 |
+
"ap_weighted": 0.5583798491587129,
|
66 |
+
"f1": 0.6891337737893773,
|
67 |
+
"f1_weighted": 0.693854239913758
|
68 |
+
},
|
69 |
+
{
|
70 |
+
"accuracy": 0.677734375,
|
71 |
+
"ap": 0.5462821210524533,
|
72 |
+
"ap_weighted": 0.5462821210524533,
|
73 |
+
"f1": 0.6758536276537375,
|
74 |
+
"f1_weighted": 0.6798321316554468
|
75 |
+
},
|
76 |
+
{
|
77 |
+
"accuracy": 0.66162109375,
|
78 |
+
"ap": 0.5307569998798899,
|
79 |
+
"ap_weighted": 0.5307569998798899,
|
80 |
+
"f1": 0.6589572665715577,
|
81 |
+
"f1_weighted": 0.6638139680571156
|
82 |
+
},
|
83 |
+
{
|
84 |
+
"accuracy": 0.5703125,
|
85 |
+
"ap": 0.466144430213454,
|
86 |
+
"ap_weighted": 0.466144430213454,
|
87 |
+
"f1": 0.5701140739553399,
|
88 |
+
"f1_weighted": 0.5716022692902913
|
89 |
+
}
|
90 |
+
]
|
91 |
+
}
|
92 |
+
]
|
93 |
+
},
|
94 |
+
"task_name": "IndonesianIdClickbaitClassification"
|
95 |
+
}
|
results/Alibaba-NLP__gte-Qwen2-7B-instruct/e26182b2122f4435e8b3ebecbf363990f409b45b/IsiZuluNewsClassification.json
ADDED
@@ -0,0 +1,73 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"dataset_revision": "55caf0e52693a1ea63b15a4980a73fc137fb862b",
|
3 |
+
"evaluation_time": 37.1495635509491,
|
4 |
+
"kg_co2_emissions": 0.002721678399847738,
|
5 |
+
"mteb_version": "1.18.0",
|
6 |
+
"scores": {
|
7 |
+
"train": [
|
8 |
+
{
|
9 |
+
"accuracy": 0.26462765957446804,
|
10 |
+
"f1": 0.3077865690163965,
|
11 |
+
"f1_weighted": 0.26225843282114025,
|
12 |
+
"hf_subset": "default",
|
13 |
+
"languages": [
|
14 |
+
"zul-Latn"
|
15 |
+
],
|
16 |
+
"main_score": 0.26462765957446804,
|
17 |
+
"scores_per_experiment": [
|
18 |
+
{
|
19 |
+
"accuracy": 0.2327127659574468,
|
20 |
+
"f1": 0.2970673688928904,
|
21 |
+
"f1_weighted": 0.22398980401663895
|
22 |
+
},
|
23 |
+
{
|
24 |
+
"accuracy": 0.2779255319148936,
|
25 |
+
"f1": 0.30304102511889863,
|
26 |
+
"f1_weighted": 0.2834536995308719
|
27 |
+
},
|
28 |
+
{
|
29 |
+
"accuracy": 0.2726063829787234,
|
30 |
+
"f1": 0.32319413722977114,
|
31 |
+
"f1_weighted": 0.26790976234715275
|
32 |
+
},
|
33 |
+
{
|
34 |
+
"accuracy": 0.29388297872340424,
|
35 |
+
"f1": 0.3170186719117538,
|
36 |
+
"f1_weighted": 0.29771211784355367
|
37 |
+
},
|
38 |
+
{
|
39 |
+
"accuracy": 0.2566489361702128,
|
40 |
+
"f1": 0.3088925011585645,
|
41 |
+
"f1_weighted": 0.24369904048189933
|
42 |
+
},
|
43 |
+
{
|
44 |
+
"accuracy": 0.2752659574468085,
|
45 |
+
"f1": 0.32242889579673917,
|
46 |
+
"f1_weighted": 0.265841314815645
|
47 |
+
},
|
48 |
+
{
|
49 |
+
"accuracy": 0.22606382978723405,
|
50 |
+
"f1": 0.27919627008378345,
|
51 |
+
"f1_weighted": 0.22876497133219853
|
52 |
+
},
|
53 |
+
{
|
54 |
+
"accuracy": 0.29521276595744683,
|
55 |
+
"f1": 0.3230994717011699,
|
56 |
+
"f1_weighted": 0.2995323207068929
|
57 |
+
},
|
58 |
+
{
|
59 |
+
"accuracy": 0.2566489361702128,
|
60 |
+
"f1": 0.29004539739555735,
|
61 |
+
"f1_weighted": 0.25853650875534384
|
62 |
+
},
|
63 |
+
{
|
64 |
+
"accuracy": 0.25930851063829785,
|
65 |
+
"f1": 0.31388195087483695,
|
66 |
+
"f1_weighted": 0.2531447883812053
|
67 |
+
}
|
68 |
+
]
|
69 |
+
}
|
70 |
+
]
|
71 |
+
},
|
72 |
+
"task_name": "IsiZuluNewsClassification"
|
73 |
+
}
|
results/Alibaba-NLP__gte-Qwen2-7B-instruct/e26182b2122f4435e8b3ebecbf363990f409b45b/ItaCaseholdClassification.json
ADDED
@@ -0,0 +1,73 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"dataset_revision": "fafcfc4fee815f7017848e54b26c47ece8ff1626",
|
3 |
+
"evaluation_time": 554.4675099849701,
|
4 |
+
"kg_co2_emissions": 0.052839863467244105,
|
5 |
+
"mteb_version": "1.18.0",
|
6 |
+
"scores": {
|
7 |
+
"test": [
|
8 |
+
{
|
9 |
+
"accuracy": 0.7158371040723981,
|
10 |
+
"f1": 0.31868833542773223,
|
11 |
+
"f1_weighted": 0.6676793240259407,
|
12 |
+
"hf_subset": "default",
|
13 |
+
"languages": [
|
14 |
+
"ita-Latn"
|
15 |
+
],
|
16 |
+
"main_score": 0.7158371040723981,
|
17 |
+
"scores_per_experiment": [
|
18 |
+
{
|
19 |
+
"accuracy": 0.7149321266968326,
|
20 |
+
"f1": 0.32287647858536916,
|
21 |
+
"f1_weighted": 0.667725145021117
|
22 |
+
},
|
23 |
+
{
|
24 |
+
"accuracy": 0.7104072398190046,
|
25 |
+
"f1": 0.31652732547058204,
|
26 |
+
"f1_weighted": 0.6666233887834269
|
27 |
+
},
|
28 |
+
{
|
29 |
+
"accuracy": 0.7239819004524887,
|
30 |
+
"f1": 0.3179080065650477,
|
31 |
+
"f1_weighted": 0.6768830105390045
|
32 |
+
},
|
33 |
+
{
|
34 |
+
"accuracy": 0.7194570135746606,
|
35 |
+
"f1": 0.3172887533701678,
|
36 |
+
"f1_weighted": 0.6681901517814831
|
37 |
+
},
|
38 |
+
{
|
39 |
+
"accuracy": 0.6968325791855203,
|
40 |
+
"f1": 0.30799455866253905,
|
41 |
+
"f1_weighted": 0.6565601814007769
|
42 |
+
},
|
43 |
+
{
|
44 |
+
"accuracy": 0.7285067873303167,
|
45 |
+
"f1": 0.3273109759476181,
|
46 |
+
"f1_weighted": 0.6783869789060101
|
47 |
+
},
|
48 |
+
{
|
49 |
+
"accuracy": 0.6968325791855203,
|
50 |
+
"f1": 0.3163134988345418,
|
51 |
+
"f1_weighted": 0.649647240090693
|
52 |
+
},
|
53 |
+
{
|
54 |
+
"accuracy": 0.7149321266968326,
|
55 |
+
"f1": 0.31773370930614625,
|
56 |
+
"f1_weighted": 0.6593517736706592
|
57 |
+
},
|
58 |
+
{
|
59 |
+
"accuracy": 0.7285067873303167,
|
60 |
+
"f1": 0.3199560445037418,
|
61 |
+
"f1_weighted": 0.683009216445749
|
62 |
+
},
|
63 |
+
{
|
64 |
+
"accuracy": 0.7239819004524887,
|
65 |
+
"f1": 0.3229740030315688,
|
66 |
+
"f1_weighted": 0.670416153620488
|
67 |
+
}
|
68 |
+
]
|
69 |
+
}
|
70 |
+
]
|
71 |
+
},
|
72 |
+
"task_name": "ItaCaseholdClassification"
|
73 |
+
}
|