Datasets:

License:
hanhainebula commited on
Commit
be295a2
1 Parent(s): 66e90c5

Update evaluation results

Browse files
AIR-Bench_24.04/(LlamaIndex)Ada002+BM25+QueryFusionRetriever/NoReranker/results_20240521184012-c203e3788149c1fcb7fda31bd92f9523.json ADDED
@@ -0,0 +1,1367 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ [
2
+ {
3
+ "config": {
4
+ "retrieval_model": "(LlamaIndex)Ada002+BM25+QueryFusionRetriever",
5
+ "retrieval_model_link": "https://huggingface.co/cheesyFishes/llamaindex_ada002_bm25_fusion",
6
+ "reranking_model": "NoReranker",
7
+ "reranking_model_link": null,
8
+ "task": "long-doc",
9
+ "metric": "ndcg_at_1",
10
+ "timestamp": "2024-05-21T18:40:12Z",
11
+ "is_anonymous": false,
12
+ "revision": "c203e3788149c1fcb7fda31bd92f9523"
13
+ },
14
+ "results": [
15
+ {
16
+ "domain": "arxiv",
17
+ "lang": "en",
18
+ "dataset": "llama2",
19
+ "value": 0.3773
20
+ },
21
+ {
22
+ "domain": "arxiv",
23
+ "lang": "en",
24
+ "dataset": "gpt3",
25
+ "value": 0.37685
26
+ },
27
+ {
28
+ "domain": "arxiv",
29
+ "lang": "en",
30
+ "dataset": "llm-survey",
31
+ "value": 0.47619
32
+ },
33
+ {
34
+ "domain": "arxiv",
35
+ "lang": "en",
36
+ "dataset": "gemini",
37
+ "value": 0.43373
38
+ }
39
+ ]
40
+ },
41
+ {
42
+ "config": {
43
+ "retrieval_model": "(LlamaIndex)Ada002+BM25+QueryFusionRetriever",
44
+ "retrieval_model_link": "https://huggingface.co/cheesyFishes/llamaindex_ada002_bm25_fusion",
45
+ "reranking_model": "NoReranker",
46
+ "reranking_model_link": null,
47
+ "task": "long-doc",
48
+ "metric": "ndcg_at_3",
49
+ "timestamp": "2024-05-21T18:40:12Z",
50
+ "is_anonymous": false,
51
+ "revision": "c203e3788149c1fcb7fda31bd92f9523"
52
+ },
53
+ "results": [
54
+ {
55
+ "domain": "arxiv",
56
+ "lang": "en",
57
+ "dataset": "llama2",
58
+ "value": 0.42982
59
+ },
60
+ {
61
+ "domain": "arxiv",
62
+ "lang": "en",
63
+ "dataset": "gpt3",
64
+ "value": 0.45939
65
+ },
66
+ {
67
+ "domain": "arxiv",
68
+ "lang": "en",
69
+ "dataset": "llm-survey",
70
+ "value": 0.46198
71
+ },
72
+ {
73
+ "domain": "arxiv",
74
+ "lang": "en",
75
+ "dataset": "gemini",
76
+ "value": 0.55455
77
+ }
78
+ ]
79
+ },
80
+ {
81
+ "config": {
82
+ "retrieval_model": "(LlamaIndex)Ada002+BM25+QueryFusionRetriever",
83
+ "retrieval_model_link": "https://huggingface.co/cheesyFishes/llamaindex_ada002_bm25_fusion",
84
+ "reranking_model": "NoReranker",
85
+ "reranking_model_link": null,
86
+ "task": "long-doc",
87
+ "metric": "ndcg_at_5",
88
+ "timestamp": "2024-05-21T18:40:12Z",
89
+ "is_anonymous": false,
90
+ "revision": "c203e3788149c1fcb7fda31bd92f9523"
91
+ },
92
+ "results": [
93
+ {
94
+ "domain": "arxiv",
95
+ "lang": "en",
96
+ "dataset": "llama2",
97
+ "value": 0.46897
98
+ },
99
+ {
100
+ "domain": "arxiv",
101
+ "lang": "en",
102
+ "dataset": "gpt3",
103
+ "value": 0.50069
104
+ },
105
+ {
106
+ "domain": "arxiv",
107
+ "lang": "en",
108
+ "dataset": "llm-survey",
109
+ "value": 0.48747
110
+ },
111
+ {
112
+ "domain": "arxiv",
113
+ "lang": "en",
114
+ "dataset": "gemini",
115
+ "value": 0.58636
116
+ }
117
+ ]
118
+ },
119
+ {
120
+ "config": {
121
+ "retrieval_model": "(LlamaIndex)Ada002+BM25+QueryFusionRetriever",
122
+ "retrieval_model_link": "https://huggingface.co/cheesyFishes/llamaindex_ada002_bm25_fusion",
123
+ "reranking_model": "NoReranker",
124
+ "reranking_model_link": null,
125
+ "task": "long-doc",
126
+ "metric": "ndcg_at_10",
127
+ "timestamp": "2024-05-21T18:40:12Z",
128
+ "is_anonymous": false,
129
+ "revision": "c203e3788149c1fcb7fda31bd92f9523"
130
+ },
131
+ "results": [
132
+ {
133
+ "domain": "arxiv",
134
+ "lang": "en",
135
+ "dataset": "llama2",
136
+ "value": 0.51928
137
+ },
138
+ {
139
+ "domain": "arxiv",
140
+ "lang": "en",
141
+ "dataset": "gpt3",
142
+ "value": 0.54175
143
+ },
144
+ {
145
+ "domain": "arxiv",
146
+ "lang": "en",
147
+ "dataset": "llm-survey",
148
+ "value": 0.53722
149
+ },
150
+ {
151
+ "domain": "arxiv",
152
+ "lang": "en",
153
+ "dataset": "gemini",
154
+ "value": 0.60966
155
+ }
156
+ ]
157
+ },
158
+ {
159
+ "config": {
160
+ "retrieval_model": "(LlamaIndex)Ada002+BM25+QueryFusionRetriever",
161
+ "retrieval_model_link": "https://huggingface.co/cheesyFishes/llamaindex_ada002_bm25_fusion",
162
+ "reranking_model": "NoReranker",
163
+ "reranking_model_link": null,
164
+ "task": "long-doc",
165
+ "metric": "ndcg_at_50",
166
+ "timestamp": "2024-05-21T18:40:12Z",
167
+ "is_anonymous": false,
168
+ "revision": "c203e3788149c1fcb7fda31bd92f9523"
169
+ },
170
+ "results": [
171
+ {
172
+ "domain": "arxiv",
173
+ "lang": "en",
174
+ "dataset": "llama2",
175
+ "value": 0.57904
176
+ },
177
+ {
178
+ "domain": "arxiv",
179
+ "lang": "en",
180
+ "dataset": "gpt3",
181
+ "value": 0.59033
182
+ },
183
+ {
184
+ "domain": "arxiv",
185
+ "lang": "en",
186
+ "dataset": "llm-survey",
187
+ "value": 0.59543
188
+ },
189
+ {
190
+ "domain": "arxiv",
191
+ "lang": "en",
192
+ "dataset": "gemini",
193
+ "value": 0.64013
194
+ }
195
+ ]
196
+ },
197
+ {
198
+ "config": {
199
+ "retrieval_model": "(LlamaIndex)Ada002+BM25+QueryFusionRetriever",
200
+ "retrieval_model_link": "https://huggingface.co/cheesyFishes/llamaindex_ada002_bm25_fusion",
201
+ "reranking_model": "NoReranker",
202
+ "reranking_model_link": null,
203
+ "task": "long-doc",
204
+ "metric": "ndcg_at_100",
205
+ "timestamp": "2024-05-21T18:40:12Z",
206
+ "is_anonymous": false,
207
+ "revision": "c203e3788149c1fcb7fda31bd92f9523"
208
+ },
209
+ "results": [
210
+ {
211
+ "domain": "arxiv",
212
+ "lang": "en",
213
+ "dataset": "llama2",
214
+ "value": 0.59125
215
+ },
216
+ {
217
+ "domain": "arxiv",
218
+ "lang": "en",
219
+ "dataset": "gpt3",
220
+ "value": 0.59892
221
+ },
222
+ {
223
+ "domain": "arxiv",
224
+ "lang": "en",
225
+ "dataset": "llm-survey",
226
+ "value": 0.6084
227
+ },
228
+ {
229
+ "domain": "arxiv",
230
+ "lang": "en",
231
+ "dataset": "gemini",
232
+ "value": 0.64924
233
+ }
234
+ ]
235
+ },
236
+ {
237
+ "config": {
238
+ "retrieval_model": "(LlamaIndex)Ada002+BM25+QueryFusionRetriever",
239
+ "retrieval_model_link": "https://huggingface.co/cheesyFishes/llamaindex_ada002_bm25_fusion",
240
+ "reranking_model": "NoReranker",
241
+ "reranking_model_link": null,
242
+ "task": "long-doc",
243
+ "metric": "ndcg_at_1000",
244
+ "timestamp": "2024-05-21T18:40:12Z",
245
+ "is_anonymous": false,
246
+ "revision": "c203e3788149c1fcb7fda31bd92f9523"
247
+ },
248
+ "results": [
249
+ {
250
+ "domain": "arxiv",
251
+ "lang": "en",
252
+ "dataset": "llama2",
253
+ "value": 0.59823
254
+ },
255
+ {
256
+ "domain": "arxiv",
257
+ "lang": "en",
258
+ "dataset": "gpt3",
259
+ "value": 0.60573
260
+ },
261
+ {
262
+ "domain": "arxiv",
263
+ "lang": "en",
264
+ "dataset": "llm-survey",
265
+ "value": 0.62381
266
+ },
267
+ {
268
+ "domain": "arxiv",
269
+ "lang": "en",
270
+ "dataset": "gemini",
271
+ "value": 0.65199
272
+ }
273
+ ]
274
+ },
275
+ {
276
+ "config": {
277
+ "retrieval_model": "(LlamaIndex)Ada002+BM25+QueryFusionRetriever",
278
+ "retrieval_model_link": "https://huggingface.co/cheesyFishes/llamaindex_ada002_bm25_fusion",
279
+ "reranking_model": "NoReranker",
280
+ "reranking_model_link": null,
281
+ "task": "long-doc",
282
+ "metric": "map_at_1",
283
+ "timestamp": "2024-05-21T18:40:12Z",
284
+ "is_anonymous": false,
285
+ "revision": "c203e3788149c1fcb7fda31bd92f9523"
286
+ },
287
+ "results": [
288
+ {
289
+ "domain": "arxiv",
290
+ "lang": "en",
291
+ "dataset": "llama2",
292
+ "value": 0.24921
293
+ },
294
+ {
295
+ "domain": "arxiv",
296
+ "lang": "en",
297
+ "dataset": "gpt3",
298
+ "value": 0.31949
299
+ },
300
+ {
301
+ "domain": "arxiv",
302
+ "lang": "en",
303
+ "dataset": "llm-survey",
304
+ "value": 0.22908
305
+ },
306
+ {
307
+ "domain": "arxiv",
308
+ "lang": "en",
309
+ "dataset": "gemini",
310
+ "value": 0.43373
311
+ }
312
+ ]
313
+ },
314
+ {
315
+ "config": {
316
+ "retrieval_model": "(LlamaIndex)Ada002+BM25+QueryFusionRetriever",
317
+ "retrieval_model_link": "https://huggingface.co/cheesyFishes/llamaindex_ada002_bm25_fusion",
318
+ "reranking_model": "NoReranker",
319
+ "reranking_model_link": null,
320
+ "task": "long-doc",
321
+ "metric": "map_at_3",
322
+ "timestamp": "2024-05-21T18:40:12Z",
323
+ "is_anonymous": false,
324
+ "revision": "c203e3788149c1fcb7fda31bd92f9523"
325
+ },
326
+ "results": [
327
+ {
328
+ "domain": "arxiv",
329
+ "lang": "en",
330
+ "dataset": "llama2",
331
+ "value": 0.36211
332
+ },
333
+ {
334
+ "domain": "arxiv",
335
+ "lang": "en",
336
+ "dataset": "gpt3",
337
+ "value": 0.41609
338
+ },
339
+ {
340
+ "domain": "arxiv",
341
+ "lang": "en",
342
+ "dataset": "llm-survey",
343
+ "value": 0.36551
344
+ },
345
+ {
346
+ "domain": "arxiv",
347
+ "lang": "en",
348
+ "dataset": "gemini",
349
+ "value": 0.52544
350
+ }
351
+ ]
352
+ },
353
+ {
354
+ "config": {
355
+ "retrieval_model": "(LlamaIndex)Ada002+BM25+QueryFusionRetriever",
356
+ "retrieval_model_link": "https://huggingface.co/cheesyFishes/llamaindex_ada002_bm25_fusion",
357
+ "reranking_model": "NoReranker",
358
+ "reranking_model_link": null,
359
+ "task": "long-doc",
360
+ "metric": "map_at_5",
361
+ "timestamp": "2024-05-21T18:40:12Z",
362
+ "is_anonymous": false,
363
+ "revision": "c203e3788149c1fcb7fda31bd92f9523"
364
+ },
365
+ "results": [
366
+ {
367
+ "domain": "arxiv",
368
+ "lang": "en",
369
+ "dataset": "llama2",
370
+ "value": 0.39853
371
+ },
372
+ {
373
+ "domain": "arxiv",
374
+ "lang": "en",
375
+ "dataset": "gpt3",
376
+ "value": 0.44315
377
+ },
378
+ {
379
+ "domain": "arxiv",
380
+ "lang": "en",
381
+ "dataset": "llm-survey",
382
+ "value": 0.40707
383
+ },
384
+ {
385
+ "domain": "arxiv",
386
+ "lang": "en",
387
+ "dataset": "gemini",
388
+ "value": 0.54331
389
+ }
390
+ ]
391
+ },
392
+ {
393
+ "config": {
394
+ "retrieval_model": "(LlamaIndex)Ada002+BM25+QueryFusionRetriever",
395
+ "retrieval_model_link": "https://huggingface.co/cheesyFishes/llamaindex_ada002_bm25_fusion",
396
+ "reranking_model": "NoReranker",
397
+ "reranking_model_link": null,
398
+ "task": "long-doc",
399
+ "metric": "map_at_10",
400
+ "timestamp": "2024-05-21T18:40:12Z",
401
+ "is_anonymous": false,
402
+ "revision": "c203e3788149c1fcb7fda31bd92f9523"
403
+ },
404
+ "results": [
405
+ {
406
+ "domain": "arxiv",
407
+ "lang": "en",
408
+ "dataset": "llama2",
409
+ "value": 0.431
410
+ },
411
+ {
412
+ "domain": "arxiv",
413
+ "lang": "en",
414
+ "dataset": "gpt3",
415
+ "value": 0.46353
416
+ },
417
+ {
418
+ "domain": "arxiv",
419
+ "lang": "en",
420
+ "dataset": "llm-survey",
421
+ "value": 0.4449
422
+ },
423
+ {
424
+ "domain": "arxiv",
425
+ "lang": "en",
426
+ "dataset": "gemini",
427
+ "value": 0.55288
428
+ }
429
+ ]
430
+ },
431
+ {
432
+ "config": {
433
+ "retrieval_model": "(LlamaIndex)Ada002+BM25+QueryFusionRetriever",
434
+ "retrieval_model_link": "https://huggingface.co/cheesyFishes/llamaindex_ada002_bm25_fusion",
435
+ "reranking_model": "NoReranker",
436
+ "reranking_model_link": null,
437
+ "task": "long-doc",
438
+ "metric": "map_at_50",
439
+ "timestamp": "2024-05-21T18:40:12Z",
440
+ "is_anonymous": false,
441
+ "revision": "c203e3788149c1fcb7fda31bd92f9523"
442
+ },
443
+ "results": [
444
+ {
445
+ "domain": "arxiv",
446
+ "lang": "en",
447
+ "dataset": "llama2",
448
+ "value": 0.45109
449
+ },
450
+ {
451
+ "domain": "arxiv",
452
+ "lang": "en",
453
+ "dataset": "gpt3",
454
+ "value": 0.47709
455
+ },
456
+ {
457
+ "domain": "arxiv",
458
+ "lang": "en",
459
+ "dataset": "llm-survey",
460
+ "value": 0.4677
461
+ },
462
+ {
463
+ "domain": "arxiv",
464
+ "lang": "en",
465
+ "dataset": "gemini",
466
+ "value": 0.55954
467
+ }
468
+ ]
469
+ },
470
+ {
471
+ "config": {
472
+ "retrieval_model": "(LlamaIndex)Ada002+BM25+QueryFusionRetriever",
473
+ "retrieval_model_link": "https://huggingface.co/cheesyFishes/llamaindex_ada002_bm25_fusion",
474
+ "reranking_model": "NoReranker",
475
+ "reranking_model_link": null,
476
+ "task": "long-doc",
477
+ "metric": "map_at_100",
478
+ "timestamp": "2024-05-21T18:40:12Z",
479
+ "is_anonymous": false,
480
+ "revision": "c203e3788149c1fcb7fda31bd92f9523"
481
+ },
482
+ "results": [
483
+ {
484
+ "domain": "arxiv",
485
+ "lang": "en",
486
+ "dataset": "llama2",
487
+ "value": 0.45273
488
+ },
489
+ {
490
+ "domain": "arxiv",
491
+ "lang": "en",
492
+ "dataset": "gpt3",
493
+ "value": 0.47807
494
+ },
495
+ {
496
+ "domain": "arxiv",
497
+ "lang": "en",
498
+ "dataset": "llm-survey",
499
+ "value": 0.46958
500
+ },
501
+ {
502
+ "domain": "arxiv",
503
+ "lang": "en",
504
+ "dataset": "gemini",
505
+ "value": 0.56033
506
+ }
507
+ ]
508
+ },
509
+ {
510
+ "config": {
511
+ "retrieval_model": "(LlamaIndex)Ada002+BM25+QueryFusionRetriever",
512
+ "retrieval_model_link": "https://huggingface.co/cheesyFishes/llamaindex_ada002_bm25_fusion",
513
+ "reranking_model": "NoReranker",
514
+ "reranking_model_link": null,
515
+ "task": "long-doc",
516
+ "metric": "map_at_1000",
517
+ "timestamp": "2024-05-21T18:40:12Z",
518
+ "is_anonymous": false,
519
+ "revision": "c203e3788149c1fcb7fda31bd92f9523"
520
+ },
521
+ "results": [
522
+ {
523
+ "domain": "arxiv",
524
+ "lang": "en",
525
+ "dataset": "llama2",
526
+ "value": 0.45313
527
+ },
528
+ {
529
+ "domain": "arxiv",
530
+ "lang": "en",
531
+ "dataset": "gpt3",
532
+ "value": 0.4786
533
+ },
534
+ {
535
+ "domain": "arxiv",
536
+ "lang": "en",
537
+ "dataset": "llm-survey",
538
+ "value": 0.47051
539
+ },
540
+ {
541
+ "domain": "arxiv",
542
+ "lang": "en",
543
+ "dataset": "gemini",
544
+ "value": 0.56046
545
+ }
546
+ ]
547
+ },
548
+ {
549
+ "config": {
550
+ "retrieval_model": "(LlamaIndex)Ada002+BM25+QueryFusionRetriever",
551
+ "retrieval_model_link": "https://huggingface.co/cheesyFishes/llamaindex_ada002_bm25_fusion",
552
+ "reranking_model": "NoReranker",
553
+ "reranking_model_link": null,
554
+ "task": "long-doc",
555
+ "metric": "recall_at_1",
556
+ "timestamp": "2024-05-21T18:40:12Z",
557
+ "is_anonymous": false,
558
+ "revision": "c203e3788149c1fcb7fda31bd92f9523"
559
+ },
560
+ "results": [
561
+ {
562
+ "domain": "arxiv",
563
+ "lang": "en",
564
+ "dataset": "llama2",
565
+ "value": 0.24921
566
+ },
567
+ {
568
+ "domain": "arxiv",
569
+ "lang": "en",
570
+ "dataset": "gpt3",
571
+ "value": 0.31949
572
+ },
573
+ {
574
+ "domain": "arxiv",
575
+ "lang": "en",
576
+ "dataset": "llm-survey",
577
+ "value": 0.22908
578
+ },
579
+ {
580
+ "domain": "arxiv",
581
+ "lang": "en",
582
+ "dataset": "gemini",
583
+ "value": 0.43373
584
+ }
585
+ ]
586
+ },
587
+ {
588
+ "config": {
589
+ "retrieval_model": "(LlamaIndex)Ada002+BM25+QueryFusionRetriever",
590
+ "retrieval_model_link": "https://huggingface.co/cheesyFishes/llamaindex_ada002_bm25_fusion",
591
+ "reranking_model": "NoReranker",
592
+ "reranking_model_link": null,
593
+ "task": "long-doc",
594
+ "metric": "recall_at_3",
595
+ "timestamp": "2024-05-21T18:40:12Z",
596
+ "is_anonymous": false,
597
+ "revision": "c203e3788149c1fcb7fda31bd92f9523"
598
+ },
599
+ "results": [
600
+ {
601
+ "domain": "arxiv",
602
+ "lang": "en",
603
+ "dataset": "llama2",
604
+ "value": 0.45169
605
+ },
606
+ {
607
+ "domain": "arxiv",
608
+ "lang": "en",
609
+ "dataset": "gpt3",
610
+ "value": 0.52028
611
+ },
612
+ {
613
+ "domain": "arxiv",
614
+ "lang": "en",
615
+ "dataset": "llm-survey",
616
+ "value": 0.42672
617
+ },
618
+ {
619
+ "domain": "arxiv",
620
+ "lang": "en",
621
+ "dataset": "gemini",
622
+ "value": 0.63855
623
+ }
624
+ ]
625
+ },
626
+ {
627
+ "config": {
628
+ "retrieval_model": "(LlamaIndex)Ada002+BM25+QueryFusionRetriever",
629
+ "retrieval_model_link": "https://huggingface.co/cheesyFishes/llamaindex_ada002_bm25_fusion",
630
+ "reranking_model": "NoReranker",
631
+ "reranking_model_link": null,
632
+ "task": "long-doc",
633
+ "metric": "recall_at_5",
634
+ "timestamp": "2024-05-21T18:40:12Z",
635
+ "is_anonymous": false,
636
+ "revision": "c203e3788149c1fcb7fda31bd92f9523"
637
+ },
638
+ "results": [
639
+ {
640
+ "domain": "arxiv",
641
+ "lang": "en",
642
+ "dataset": "llama2",
643
+ "value": 0.55009
644
+ },
645
+ {
646
+ "domain": "arxiv",
647
+ "lang": "en",
648
+ "dataset": "gpt3",
649
+ "value": 0.61053
650
+ },
651
+ {
652
+ "domain": "arxiv",
653
+ "lang": "en",
654
+ "dataset": "llm-survey",
655
+ "value": 0.52044
656
+ },
657
+ {
658
+ "domain": "arxiv",
659
+ "lang": "en",
660
+ "dataset": "gemini",
661
+ "value": 0.71486
662
+ }
663
+ ]
664
+ },
665
+ {
666
+ "config": {
667
+ "retrieval_model": "(LlamaIndex)Ada002+BM25+QueryFusionRetriever",
668
+ "retrieval_model_link": "https://huggingface.co/cheesyFishes/llamaindex_ada002_bm25_fusion",
669
+ "reranking_model": "NoReranker",
670
+ "reranking_model_link": null,
671
+ "task": "long-doc",
672
+ "metric": "recall_at_10",
673
+ "timestamp": "2024-05-21T18:40:12Z",
674
+ "is_anonymous": false,
675
+ "revision": "c203e3788149c1fcb7fda31bd92f9523"
676
+ },
677
+ "results": [
678
+ {
679
+ "domain": "arxiv",
680
+ "lang": "en",
681
+ "dataset": "llama2",
682
+ "value": 0.67371
683
+ },
684
+ {
685
+ "domain": "arxiv",
686
+ "lang": "en",
687
+ "dataset": "gpt3",
688
+ "value": 0.72181
689
+ },
690
+ {
691
+ "domain": "arxiv",
692
+ "lang": "en",
693
+ "dataset": "llm-survey",
694
+ "value": 0.64453
695
+ },
696
+ {
697
+ "domain": "arxiv",
698
+ "lang": "en",
699
+ "dataset": "gemini",
700
+ "value": 0.78715
701
+ }
702
+ ]
703
+ },
704
+ {
705
+ "config": {
706
+ "retrieval_model": "(LlamaIndex)Ada002+BM25+QueryFusionRetriever",
707
+ "retrieval_model_link": "https://huggingface.co/cheesyFishes/llamaindex_ada002_bm25_fusion",
708
+ "reranking_model": "NoReranker",
709
+ "reranking_model_link": null,
710
+ "task": "long-doc",
711
+ "metric": "recall_at_50",
712
+ "timestamp": "2024-05-21T18:40:12Z",
713
+ "is_anonymous": false,
714
+ "revision": "c203e3788149c1fcb7fda31bd92f9523"
715
+ },
716
+ "results": [
717
+ {
718
+ "domain": "arxiv",
719
+ "lang": "en",
720
+ "dataset": "llama2",
721
+ "value": 0.88732
722
+ },
723
+ {
724
+ "domain": "arxiv",
725
+ "lang": "en",
726
+ "dataset": "gpt3",
727
+ "value": 0.9095
728
+ },
729
+ {
730
+ "domain": "arxiv",
731
+ "lang": "en",
732
+ "dataset": "llm-survey",
733
+ "value": 0.83635
734
+ },
735
+ {
736
+ "domain": "arxiv",
737
+ "lang": "en",
738
+ "dataset": "gemini",
739
+ "value": 0.92369
740
+ }
741
+ ]
742
+ },
743
+ {
744
+ "config": {
745
+ "retrieval_model": "(LlamaIndex)Ada002+BM25+QueryFusionRetriever",
746
+ "retrieval_model_link": "https://huggingface.co/cheesyFishes/llamaindex_ada002_bm25_fusion",
747
+ "reranking_model": "NoReranker",
748
+ "reranking_model_link": null,
749
+ "task": "long-doc",
750
+ "metric": "recall_at_100",
751
+ "timestamp": "2024-05-21T18:40:12Z",
752
+ "is_anonymous": false,
753
+ "revision": "c203e3788149c1fcb7fda31bd92f9523"
754
+ },
755
+ "results": [
756
+ {
757
+ "domain": "arxiv",
758
+ "lang": "en",
759
+ "dataset": "llama2",
760
+ "value": 0.9521
761
+ },
762
+ {
763
+ "domain": "arxiv",
764
+ "lang": "en",
765
+ "dataset": "gpt3",
766
+ "value": 0.95821
767
+ },
768
+ {
769
+ "domain": "arxiv",
770
+ "lang": "en",
771
+ "dataset": "llm-survey",
772
+ "value": 0.90021
773
+ },
774
+ {
775
+ "domain": "arxiv",
776
+ "lang": "en",
777
+ "dataset": "gemini",
778
+ "value": 0.97992
779
+ }
780
+ ]
781
+ },
782
+ {
783
+ "config": {
784
+ "retrieval_model": "(LlamaIndex)Ada002+BM25+QueryFusionRetriever",
785
+ "retrieval_model_link": "https://huggingface.co/cheesyFishes/llamaindex_ada002_bm25_fusion",
786
+ "reranking_model": "NoReranker",
787
+ "reranking_model_link": null,
788
+ "task": "long-doc",
789
+ "metric": "recall_at_1000",
790
+ "timestamp": "2024-05-21T18:40:12Z",
791
+ "is_anonymous": false,
792
+ "revision": "c203e3788149c1fcb7fda31bd92f9523"
793
+ },
794
+ "results": [
795
+ {
796
+ "domain": "arxiv",
797
+ "lang": "en",
798
+ "dataset": "llama2",
799
+ "value": 1.0
800
+ },
801
+ {
802
+ "domain": "arxiv",
803
+ "lang": "en",
804
+ "dataset": "gpt3",
805
+ "value": 1.0
806
+ },
807
+ {
808
+ "domain": "arxiv",
809
+ "lang": "en",
810
+ "dataset": "llm-survey",
811
+ "value": 1.0
812
+ },
813
+ {
814
+ "domain": "arxiv",
815
+ "lang": "en",
816
+ "dataset": "gemini",
817
+ "value": 1.0
818
+ }
819
+ ]
820
+ },
821
+ {
822
+ "config": {
823
+ "retrieval_model": "(LlamaIndex)Ada002+BM25+QueryFusionRetriever",
824
+ "retrieval_model_link": "https://huggingface.co/cheesyFishes/llamaindex_ada002_bm25_fusion",
825
+ "reranking_model": "NoReranker",
826
+ "reranking_model_link": null,
827
+ "task": "long-doc",
828
+ "metric": "precision_at_1",
829
+ "timestamp": "2024-05-21T18:40:12Z",
830
+ "is_anonymous": false,
831
+ "revision": "c203e3788149c1fcb7fda31bd92f9523"
832
+ },
833
+ "results": [
834
+ {
835
+ "domain": "arxiv",
836
+ "lang": "en",
837
+ "dataset": "llama2",
838
+ "value": 0.3773
839
+ },
840
+ {
841
+ "domain": "arxiv",
842
+ "lang": "en",
843
+ "dataset": "gpt3",
844
+ "value": 0.37685
845
+ },
846
+ {
847
+ "domain": "arxiv",
848
+ "lang": "en",
849
+ "dataset": "llm-survey",
850
+ "value": 0.47619
851
+ },
852
+ {
853
+ "domain": "arxiv",
854
+ "lang": "en",
855
+ "dataset": "gemini",
856
+ "value": 0.43373
857
+ }
858
+ ]
859
+ },
860
+ {
861
+ "config": {
862
+ "retrieval_model": "(LlamaIndex)Ada002+BM25+QueryFusionRetriever",
863
+ "retrieval_model_link": "https://huggingface.co/cheesyFishes/llamaindex_ada002_bm25_fusion",
864
+ "reranking_model": "NoReranker",
865
+ "reranking_model_link": null,
866
+ "task": "long-doc",
867
+ "metric": "precision_at_3",
868
+ "timestamp": "2024-05-21T18:40:12Z",
869
+ "is_anonymous": false,
870
+ "revision": "c203e3788149c1fcb7fda31bd92f9523"
871
+ },
872
+ "results": [
873
+ {
874
+ "domain": "arxiv",
875
+ "lang": "en",
876
+ "dataset": "llama2",
877
+ "value": 0.26176
878
+ },
879
+ {
880
+ "domain": "arxiv",
881
+ "lang": "en",
882
+ "dataset": "gpt3",
883
+ "value": 0.22354
884
+ },
885
+ {
886
+ "domain": "arxiv",
887
+ "lang": "en",
888
+ "dataset": "llm-survey",
889
+ "value": 0.33147
890
+ },
891
+ {
892
+ "domain": "arxiv",
893
+ "lang": "en",
894
+ "dataset": "gemini",
895
+ "value": 0.21285
896
+ }
897
+ ]
898
+ },
899
+ {
900
+ "config": {
901
+ "retrieval_model": "(LlamaIndex)Ada002+BM25+QueryFusionRetriever",
902
+ "retrieval_model_link": "https://huggingface.co/cheesyFishes/llamaindex_ada002_bm25_fusion",
903
+ "reranking_model": "NoReranker",
904
+ "reranking_model_link": null,
905
+ "task": "long-doc",
906
+ "metric": "precision_at_5",
907
+ "timestamp": "2024-05-21T18:40:12Z",
908
+ "is_anonymous": false,
909
+ "revision": "c203e3788149c1fcb7fda31bd92f9523"
910
+ },
911
+ "results": [
912
+ {
913
+ "domain": "arxiv",
914
+ "lang": "en",
915
+ "dataset": "llama2",
916
+ "value": 0.20184
917
+ },
918
+ {
919
+ "domain": "arxiv",
920
+ "lang": "en",
921
+ "dataset": "gpt3",
922
+ "value": 0.16499
923
+ },
924
+ {
925
+ "domain": "arxiv",
926
+ "lang": "en",
927
+ "dataset": "llm-survey",
928
+ "value": 0.25434
929
+ },
930
+ {
931
+ "domain": "arxiv",
932
+ "lang": "en",
933
+ "dataset": "gemini",
934
+ "value": 0.14297
935
+ }
936
+ ]
937
+ },
938
+ {
939
+ "config": {
940
+ "retrieval_model": "(LlamaIndex)Ada002+BM25+QueryFusionRetriever",
941
+ "retrieval_model_link": "https://huggingface.co/cheesyFishes/llamaindex_ada002_bm25_fusion",
942
+ "reranking_model": "NoReranker",
943
+ "reranking_model_link": null,
944
+ "task": "long-doc",
945
+ "metric": "precision_at_10",
946
+ "timestamp": "2024-05-21T18:40:12Z",
947
+ "is_anonymous": false,
948
+ "revision": "c203e3788149c1fcb7fda31bd92f9523"
949
+ },
950
+ "results": [
951
+ {
952
+ "domain": "arxiv",
953
+ "lang": "en",
954
+ "dataset": "llama2",
955
+ "value": 0.13037
956
+ },
957
+ {
958
+ "domain": "arxiv",
959
+ "lang": "en",
960
+ "dataset": "gpt3",
961
+ "value": 0.10089
962
+ },
963
+ {
964
+ "domain": "arxiv",
965
+ "lang": "en",
966
+ "dataset": "llm-survey",
967
+ "value": 0.16779
968
+ },
969
+ {
970
+ "domain": "arxiv",
971
+ "lang": "en",
972
+ "dataset": "gemini",
973
+ "value": 0.07871
974
+ }
975
+ ]
976
+ },
977
+ {
978
+ "config": {
979
+ "retrieval_model": "(LlamaIndex)Ada002+BM25+QueryFusionRetriever",
980
+ "retrieval_model_link": "https://huggingface.co/cheesyFishes/llamaindex_ada002_bm25_fusion",
981
+ "reranking_model": "NoReranker",
982
+ "reranking_model_link": null,
983
+ "task": "long-doc",
984
+ "metric": "precision_at_50",
985
+ "timestamp": "2024-05-21T18:40:12Z",
986
+ "is_anonymous": false,
987
+ "revision": "c203e3788149c1fcb7fda31bd92f9523"
988
+ },
989
+ "results": [
990
+ {
991
+ "domain": "arxiv",
992
+ "lang": "en",
993
+ "dataset": "llama2",
994
+ "value": 0.03521
995
+ },
996
+ {
997
+ "domain": "arxiv",
998
+ "lang": "en",
999
+ "dataset": "gpt3",
1000
+ "value": 0.02653
1001
+ },
1002
+ {
1003
+ "domain": "arxiv",
1004
+ "lang": "en",
1005
+ "dataset": "llm-survey",
1006
+ "value": 0.04465
1007
+ },
1008
+ {
1009
+ "domain": "arxiv",
1010
+ "lang": "en",
1011
+ "dataset": "gemini",
1012
+ "value": 0.01847
1013
+ }
1014
+ ]
1015
+ },
1016
+ {
1017
+ "config": {
1018
+ "retrieval_model": "(LlamaIndex)Ada002+BM25+QueryFusionRetriever",
1019
+ "retrieval_model_link": "https://huggingface.co/cheesyFishes/llamaindex_ada002_bm25_fusion",
1020
+ "reranking_model": "NoReranker",
1021
+ "reranking_model_link": null,
1022
+ "task": "long-doc",
1023
+ "metric": "precision_at_100",
1024
+ "timestamp": "2024-05-21T18:40:12Z",
1025
+ "is_anonymous": false,
1026
+ "revision": "c203e3788149c1fcb7fda31bd92f9523"
1027
+ },
1028
+ "results": [
1029
+ {
1030
+ "domain": "arxiv",
1031
+ "lang": "en",
1032
+ "dataset": "llama2",
1033
+ "value": 0.01874
1034
+ },
1035
+ {
1036
+ "domain": "arxiv",
1037
+ "lang": "en",
1038
+ "dataset": "gpt3",
1039
+ "value": 0.01398
1040
+ },
1041
+ {
1042
+ "domain": "arxiv",
1043
+ "lang": "en",
1044
+ "dataset": "llm-survey",
1045
+ "value": 0.02375
1046
+ },
1047
+ {
1048
+ "domain": "arxiv",
1049
+ "lang": "en",
1050
+ "dataset": "gemini",
1051
+ "value": 0.0098
1052
+ }
1053
+ ]
1054
+ },
1055
+ {
1056
+ "config": {
1057
+ "retrieval_model": "(LlamaIndex)Ada002+BM25+QueryFusionRetriever",
1058
+ "retrieval_model_link": "https://huggingface.co/cheesyFishes/llamaindex_ada002_bm25_fusion",
1059
+ "reranking_model": "NoReranker",
1060
+ "reranking_model_link": null,
1061
+ "task": "long-doc",
1062
+ "metric": "precision_at_1000",
1063
+ "timestamp": "2024-05-21T18:40:12Z",
1064
+ "is_anonymous": false,
1065
+ "revision": "c203e3788149c1fcb7fda31bd92f9523"
1066
+ },
1067
+ "results": [
1068
+ {
1069
+ "domain": "arxiv",
1070
+ "lang": "en",
1071
+ "dataset": "llama2",
1072
+ "value": 0.00195
1073
+ },
1074
+ {
1075
+ "domain": "arxiv",
1076
+ "lang": "en",
1077
+ "dataset": "gpt3",
1078
+ "value": 0.00147
1079
+ },
1080
+ {
1081
+ "domain": "arxiv",
1082
+ "lang": "en",
1083
+ "dataset": "llm-survey",
1084
+ "value": 0.00259
1085
+ },
1086
+ {
1087
+ "domain": "arxiv",
1088
+ "lang": "en",
1089
+ "dataset": "gemini",
1090
+ "value": 0.001
1091
+ }
1092
+ ]
1093
+ },
1094
+ {
1095
+ "config": {
1096
+ "retrieval_model": "(LlamaIndex)Ada002+BM25+QueryFusionRetriever",
1097
+ "retrieval_model_link": "https://huggingface.co/cheesyFishes/llamaindex_ada002_bm25_fusion",
1098
+ "reranking_model": "NoReranker",
1099
+ "reranking_model_link": null,
1100
+ "task": "long-doc",
1101
+ "metric": "mrr_at_1",
1102
+ "timestamp": "2024-05-21T18:40:12Z",
1103
+ "is_anonymous": false,
1104
+ "revision": "c203e3788149c1fcb7fda31bd92f9523"
1105
+ },
1106
+ "results": [
1107
+ {
1108
+ "domain": "arxiv",
1109
+ "lang": "en",
1110
+ "dataset": "llama2",
1111
+ "value": 0.3773
1112
+ },
1113
+ {
1114
+ "domain": "arxiv",
1115
+ "lang": "en",
1116
+ "dataset": "gpt3",
1117
+ "value": 0.37685
1118
+ },
1119
+ {
1120
+ "domain": "arxiv",
1121
+ "lang": "en",
1122
+ "dataset": "llm-survey",
1123
+ "value": 0.47619
1124
+ },
1125
+ {
1126
+ "domain": "arxiv",
1127
+ "lang": "en",
1128
+ "dataset": "gemini",
1129
+ "value": 0.43373
1130
+ }
1131
+ ]
1132
+ },
1133
+ {
1134
+ "config": {
1135
+ "retrieval_model": "(LlamaIndex)Ada002+BM25+QueryFusionRetriever",
1136
+ "retrieval_model_link": "https://huggingface.co/cheesyFishes/llamaindex_ada002_bm25_fusion",
1137
+ "reranking_model": "NoReranker",
1138
+ "reranking_model_link": null,
1139
+ "task": "long-doc",
1140
+ "metric": "mrr_at_3",
1141
+ "timestamp": "2024-05-21T18:40:12Z",
1142
+ "is_anonymous": false,
1143
+ "revision": "c203e3788149c1fcb7fda31bd92f9523"
1144
+ },
1145
+ "results": [
1146
+ {
1147
+ "domain": "arxiv",
1148
+ "lang": "en",
1149
+ "dataset": "llama2",
1150
+ "value": 0.48824
1151
+ },
1152
+ {
1153
+ "domain": "arxiv",
1154
+ "lang": "en",
1155
+ "dataset": "gpt3",
1156
+ "value": 0.47527
1157
+ },
1158
+ {
1159
+ "domain": "arxiv",
1160
+ "lang": "en",
1161
+ "dataset": "llm-survey",
1162
+ "value": 0.55836
1163
+ },
1164
+ {
1165
+ "domain": "arxiv",
1166
+ "lang": "en",
1167
+ "dataset": "gemini",
1168
+ "value": 0.52544
1169
+ }
1170
+ ]
1171
+ },
1172
+ {
1173
+ "config": {
1174
+ "retrieval_model": "(LlamaIndex)Ada002+BM25+QueryFusionRetriever",
1175
+ "retrieval_model_link": "https://huggingface.co/cheesyFishes/llamaindex_ada002_bm25_fusion",
1176
+ "reranking_model": "NoReranker",
1177
+ "reranking_model_link": null,
1178
+ "task": "long-doc",
1179
+ "metric": "mrr_at_5",
1180
+ "timestamp": "2024-05-21T18:40:12Z",
1181
+ "is_anonymous": false,
1182
+ "revision": "c203e3788149c1fcb7fda31bd92f9523"
1183
+ },
1184
+ "results": [
1185
+ {
1186
+ "domain": "arxiv",
1187
+ "lang": "en",
1188
+ "dataset": "llama2",
1189
+ "value": 0.50634
1190
+ },
1191
+ {
1192
+ "domain": "arxiv",
1193
+ "lang": "en",
1194
+ "dataset": "gpt3",
1195
+ "value": 0.4959
1196
+ },
1197
+ {
1198
+ "domain": "arxiv",
1199
+ "lang": "en",
1200
+ "dataset": "llm-survey",
1201
+ "value": 0.5718
1202
+ },
1203
+ {
1204
+ "domain": "arxiv",
1205
+ "lang": "en",
1206
+ "dataset": "gemini",
1207
+ "value": 0.54331
1208
+ }
1209
+ ]
1210
+ },
1211
+ {
1212
+ "config": {
1213
+ "retrieval_model": "(LlamaIndex)Ada002+BM25+QueryFusionRetriever",
1214
+ "retrieval_model_link": "https://huggingface.co/cheesyFishes/llamaindex_ada002_bm25_fusion",
1215
+ "reranking_model": "NoReranker",
1216
+ "reranking_model_link": null,
1217
+ "task": "long-doc",
1218
+ "metric": "mrr_at_10",
1219
+ "timestamp": "2024-05-21T18:40:12Z",
1220
+ "is_anonymous": false,
1221
+ "revision": "c203e3788149c1fcb7fda31bd92f9523"
1222
+ },
1223
+ "results": [
1224
+ {
1225
+ "domain": "arxiv",
1226
+ "lang": "en",
1227
+ "dataset": "llama2",
1228
+ "value": 0.51538
1229
+ },
1230
+ {
1231
+ "domain": "arxiv",
1232
+ "lang": "en",
1233
+ "dataset": "gpt3",
1234
+ "value": 0.50917
1235
+ },
1236
+ {
1237
+ "domain": "arxiv",
1238
+ "lang": "en",
1239
+ "dataset": "llm-survey",
1240
+ "value": 0.58399
1241
+ },
1242
+ {
1243
+ "domain": "arxiv",
1244
+ "lang": "en",
1245
+ "dataset": "gemini",
1246
+ "value": 0.55288
1247
+ }
1248
+ ]
1249
+ },
1250
+ {
1251
+ "config": {
1252
+ "retrieval_model": "(LlamaIndex)Ada002+BM25+QueryFusionRetriever",
1253
+ "retrieval_model_link": "https://huggingface.co/cheesyFishes/llamaindex_ada002_bm25_fusion",
1254
+ "reranking_model": "NoReranker",
1255
+ "reranking_model_link": null,
1256
+ "task": "long-doc",
1257
+ "metric": "mrr_at_50",
1258
+ "timestamp": "2024-05-21T18:40:12Z",
1259
+ "is_anonymous": false,
1260
+ "revision": "c203e3788149c1fcb7fda31bd92f9523"
1261
+ },
1262
+ "results": [
1263
+ {
1264
+ "domain": "arxiv",
1265
+ "lang": "en",
1266
+ "dataset": "llama2",
1267
+ "value": 0.52362
1268
+ },
1269
+ {
1270
+ "domain": "arxiv",
1271
+ "lang": "en",
1272
+ "dataset": "gpt3",
1273
+ "value": 0.51699
1274
+ },
1275
+ {
1276
+ "domain": "arxiv",
1277
+ "lang": "en",
1278
+ "dataset": "llm-survey",
1279
+ "value": 0.58928
1280
+ },
1281
+ {
1282
+ "domain": "arxiv",
1283
+ "lang": "en",
1284
+ "dataset": "gemini",
1285
+ "value": 0.55954
1286
+ }
1287
+ ]
1288
+ },
1289
+ {
1290
+ "config": {
1291
+ "retrieval_model": "(LlamaIndex)Ada002+BM25+QueryFusionRetriever",
1292
+ "retrieval_model_link": "https://huggingface.co/cheesyFishes/llamaindex_ada002_bm25_fusion",
1293
+ "reranking_model": "NoReranker",
1294
+ "reranking_model_link": null,
1295
+ "task": "long-doc",
1296
+ "metric": "mrr_at_100",
1297
+ "timestamp": "2024-05-21T18:40:12Z",
1298
+ "is_anonymous": false,
1299
+ "revision": "c203e3788149c1fcb7fda31bd92f9523"
1300
+ },
1301
+ "results": [
1302
+ {
1303
+ "domain": "arxiv",
1304
+ "lang": "en",
1305
+ "dataset": "llama2",
1306
+ "value": 0.52409
1307
+ },
1308
+ {
1309
+ "domain": "arxiv",
1310
+ "lang": "en",
1311
+ "dataset": "gpt3",
1312
+ "value": 0.51746
1313
+ },
1314
+ {
1315
+ "domain": "arxiv",
1316
+ "lang": "en",
1317
+ "dataset": "llm-survey",
1318
+ "value": 0.58973
1319
+ },
1320
+ {
1321
+ "domain": "arxiv",
1322
+ "lang": "en",
1323
+ "dataset": "gemini",
1324
+ "value": 0.56033
1325
+ }
1326
+ ]
1327
+ },
1328
+ {
1329
+ "config": {
1330
+ "retrieval_model": "(LlamaIndex)Ada002+BM25+QueryFusionRetriever",
1331
+ "retrieval_model_link": "https://huggingface.co/cheesyFishes/llamaindex_ada002_bm25_fusion",
1332
+ "reranking_model": "NoReranker",
1333
+ "reranking_model_link": null,
1334
+ "task": "long-doc",
1335
+ "metric": "mrr_at_1000",
1336
+ "timestamp": "2024-05-21T18:40:12Z",
1337
+ "is_anonymous": false,
1338
+ "revision": "c203e3788149c1fcb7fda31bd92f9523"
1339
+ },
1340
+ "results": [
1341
+ {
1342
+ "domain": "arxiv",
1343
+ "lang": "en",
1344
+ "dataset": "llama2",
1345
+ "value": 0.52427
1346
+ },
1347
+ {
1348
+ "domain": "arxiv",
1349
+ "lang": "en",
1350
+ "dataset": "gpt3",
1351
+ "value": 0.51759
1352
+ },
1353
+ {
1354
+ "domain": "arxiv",
1355
+ "lang": "en",
1356
+ "dataset": "llm-survey",
1357
+ "value": 0.59
1358
+ },
1359
+ {
1360
+ "domain": "arxiv",
1361
+ "lang": "en",
1362
+ "dataset": "gemini",
1363
+ "value": 0.56046
1364
+ }
1365
+ ]
1366
+ }
1367
+ ]