Spaces:
AIR-Bench
/
Running on CPU Upgrade

leaderboard / tests /toydata /test_results /bge-m3 /NoReranker /results_2023-11-21T18-10-08.json
nan's picture
chore: clean up the requests related codes
3b83af7
raw
history blame
No virus
1.81 kB
[
{
"config": {
"retrieval_model": "bge-m3",
"reranking_model": "NoReranker",
"task": "long_doc",
"metric": "ndcg_at_1"
},
"results": [
{
"domain": "law",
"lang": "en",
"dataset": "lex_files_500K-600K",
"value": 0.45723
}
]
},
{
"config": {
"retrieval_model": "bge-m3",
"reranking_model": "NoReranker",
"task": "long_doc",
"metric": "ndcg_at_3"
},
"results": [
{
"domain": "law",
"lang": "en",
"dataset": "lex_files_500K-600K",
"value": 0.49909
}
]
},
{
"config": {
"retrieval_model": "bge-m3",
"reranking_model": "NoReranker",
"task": "qa",
"metric": "ndcg_at_1"
},
"results": [
{
"domain": "wiki",
"lang": "en",
"dataset": "unknown",
"value": 0.49083
}
]
},
{
"config": {
"retrieval_model": "bge-m3",
"reranking_model": "NoReranker",
"task": "qa",
"metric": "ndcg_at_3"
},
"results": [
{
"domain": "wiki",
"lang": "en",
"dataset": "unknown",
"value": 0.43359
}
]
},
{
"config": {
"retrieval_model": "bge-m3",
"reranking_model": "NoReranker",
"task": "qa",
"metric": "ndcg_at_1"
},
"results": [
{
"domain": "wiki",
"lang": "zh",
"dataset": "unknown",
"value": 0.78358
}
]
},
{
"config": {
"retrieval_model": "bge-m3",
"reranking_model": "NoReranker",
"task": "qa",
"metric": "ndcg_at_3"
},
"results": [
{
"domain": "wiki",
"lang": "zh",
"dataset": "unknown",
"value": 0.78358
}
]
}
]