PeterKruger commited on
Commit
11b5f6a
·
verified ·
1 Parent(s): fe799d1

Upload 7 files

Browse files
runs/agro1_2025-12-10/avg_latency.csv ADDED
@@ -0,0 +1,41 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ model_name,ag_input_researcher,large_farm_operator,professional_farmer,small_farmer,Average (All Topics)
2
+ Claude-3.5-haiku,13.1078,14.8323,10.8955,10.1271,12.36590766
3
+ Claude-haiku-4.5,69.3721,56.4385,51.5493,27.6833,52.84063192
4
+ Claude-opus-4.5,66.3852,76.3004,64.6301,54.7895,66.00468475
5
+ Claude-sonnet-4.5,44.2239,50.3045,46.4167,25.3696,42.23496585
6
+ DeepSeek-R1-0528,52.3101,53.3305,48.557,61.5596,53.70458866
7
+ Deepseek-v3.1,34.0768,27.9003,28.3125,25.8293,29.33394455
8
+ Deepseek-v3.2-exp,61.6892,74.9174,65.0436,87.6184,71.34018597
9
+ DeepSeek-V3-0324,29.3007,27.8157,23.0962,23.1379,26.0879648
10
+ Gemini-2.5-flash,16.1984,21.0647,14.4246,16.1057,16.98406469
11
+ Gemini-2.5-flash-lite,9.6342,14.0787,10.6556,9.7651,10.97991167
12
+ Gemini-2.5-pro,47.8884,54.0435,54.4473,45.0034,50.43020029
13
+ Gemini-3-pro-preview,41.3062,54.0394,44.3436,45.0654,46.1533093
14
+ Gemma-3-27b-it,33.2778,34.0911,30.2504,23.5466,30.64022747
15
+ GLM-4.5,50.8355,59.1203,44.3903,48.3185,50.83704673
16
+ GLM-4.5-Air,35.9658,39.438,35.8384,28.6699,35.25712315
17
+ Gpt-5,117.089,111.9879,118.9011,98.3861,112.1926213
18
+ Gpt-5.1,132.4308,157.406,137.7383,135.4157,140.6559751
19
+ Gpt-5-mini,75.4383,75.0885,86.1547,59.0047,74.34256833
20
+ Gpt-oss-120b,30.3434,46.1344,35.0479,26.3054,34.62964683
21
+ Grok-3-mini,23.4522,24.5811,20.3284,24.969,23.29594754
22
+ Grok-4,68.3739,69.3112,73.7128,70.455,70.41157242
23
+ Grok-4.1-fast,25.0256,20.8067,27.1992,23.175,24.08985138
24
+ Grok-4.1-fast-thinking,40.5094,44.5835,43.8897,54.7598,45.40523004
25
+ Kimi-K2-Instruct,24.2689,24.7006,18.4177,15.6973,21.11152292
26
+ Kimi-k2-thinking,63.1766,74.4992,73.7857,60.0506,68.0282735
27
+ Llama-3.1-nemotron-ultra-253b-v1,29.2634,41.2077,31.6158,42.6803,35.678345
28
+ Llama-3.3-nemotron-super-49b-v1.5,29.7519,38.115,34.8889,41.0264,35.55935411
29
+ Llama-4-maverick,11.1642,13.0467,9.7799,14.8059,12.08569704
30
+ Llama-4-scout,16.0982,14.4672,15.4599,14.3707,15.15800778
31
+ Magistral-small-2506,7.3283,7.9912,6.527,8.3095,7.510135598
32
+ Minimax-m2,75.677,62.4322,78.3261,53.4083,68.35790329
33
+ Mistral-large-2512,69.0277,62.8374,60.8943,50.5348,61.59501498
34
+ Nemotron-nano-9b-v2,19.2514,18.184,17.6735,14.2169,17.50408955
35
+ Nova-lite-v1,6.4751,6.8216,6.4547,6.3224,6.525232401
36
+ Nova-pro-v1,8.4274,8.1033,7.5394,7.06,7.83512939
37
+ Phi-3-mini-128k-instruct,11.8013,24.0792,28.4105,16.388,19.88632743
38
+ Phi-4,14.6097,16.3131,12.971,15.6721,14.87012727
39
+ Qwen3-235B-A22B-Thinking-2507,73.2685,78.8224,68.3296,76.9412,74.1826127
40
+ Qwen3-30b-a3b-instruct-2507,22.3004,21.5667,28.993,13.7122,21.86853585
41
+ Qwen3-next-80b-a3b-thinking,28.6866,36.1919,32.1134,32.2459,32.18985488
runs/agro1_2025-12-10/correlations.json ADDED
@@ -0,0 +1,9 @@
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "correlations": {
3
+ "LMArena": 77.16,
4
+ "Artificial Analysis Intelligence Index": 87.08,
5
+ "MMLU": 80.68
6
+
7
+ },
8
+ "description": "Correlation percentages between AutoBench scores and other benchmark scores"
9
+ }
runs/agro1_2025-12-10/cost_data.csv ADDED
@@ -0,0 +1,41 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ model_name,ag_input_researcher,large_farm_operator,professional_farmer,small_farmer,Average (All Topics)
2
+ Claude-3.5-haiku,0.00720051,0.00668192,0.00667434,0.0061498,0.006715112
3
+ Claude-haiku-4.5,0.02489296,0.02013418,0.01843746,0.01245574,0.019457209
4
+ Claude-opus-4.5,0.07110321,0.08098408,0.07469458,0.06461012,0.073115232
5
+ Claude-sonnet-4.5,0.024281,0.02470702,0.02045244,0.01217264,0.02082266
6
+ DeepSeek-R1-0528,0.00286639,0.0029719,0.00259405,0.0037869,0.003029674
7
+ Deepseek-v3.1,0.0009926,0.00103477,0.00089927,0.00086981,0.00095418
8
+ Deepseek-v3.2-exp,0.00066938,0.00082639,0.00067067,0.00099708,0.000778611
9
+ DeepSeek-V3-0324,0.00071608,0.00069401,0.000633,0.00059477,0.000664179
10
+ Gemini-2.5-flash,0.00423233,0.00520907,0.00395669,0.00375983,0.004311835
11
+ Gemini-2.5-flash-lite,0.00063077,0.00079674,0.00059305,0.00064157,0.000663739
12
+ Gemini-2.5-pro,0.03930419,0.04171929,0.03866309,0.03824411,0.039532906
13
+ Gemini-3-pro-preview,0.035101,0.04412368,0.03692442,0.03940919,0.038810289
14
+ Gemma-3-27b-it,0.00034033,0.00033513,0.00031045,0.00027395,0.00031739
15
+ GLM-4.5,0.00334257,0.0034681,0.00302012,0.00362297,0.003354639
16
+ GLM-4.5-Air,0.00171533,0.00164344,0.00145099,0.00134833,0.00155505
17
+ Gpt-5,0.05574779,0.05762194,0.05242862,0.05094797,0.054344102
18
+ Gpt-5.1,0.07600665,0.08595698,0.07228595,0.07323105,0.076993304
19
+ Gpt-5-mini,0.00837425,0.00861073,0.0082708,0.00703315,0.008105237
20
+ Gpt-oss-120b,0.00074131,0.00078443,0.00067502,0.0006203,0.000710107
21
+ Grok-3-mini,0.00093854,0.00100777,0.00094697,0.0009729,0.000965325
22
+ Grok-4,0.03467529,0.03134831,0.03279726,0.03809512,0.034134457
23
+ Grok-4.1-fast,0.00078635,0.00076243,0.00076237,0.00068448,0.000752717
24
+ Grok-4.1-fast-thinking,0.00076111,0.00074955,0.00076133,0.00066767,0.000738308
25
+ Kimi-K2-Instruct,0.00219623,0.00243434,0.00206643,0.00159412,0.002095738
26
+ Kimi-k2-thinking,0.00663416,0.00905259,0.00863894,0.00805154,0.008030023
27
+ Llama-3.1-nemotron-ultra-253b-v1,0.00184655,0.00212877,0.00191439,0.00241326,0.002052804
28
+ Llama-3.3-nemotron-super-49b-v1.5,0.00096714,0.00123108,0.00112008,0.00132855,0.001149371
29
+ Llama-4-maverick,0.00047313,0.00047299,0.00044071,0.00047342,0.00046525
30
+ Llama-4-scout,0.00023322,0.00025136,0.00023453,0.00021578,0.0002344
31
+ Magistral-small-2506,0.00103903,0.00113252,0.00097848,0.00087559,0.001012177
32
+ Minimax-m2,0.0038598,0.00330771,0.00424363,0.00273631,0.003582245
33
+ Mistral-large-2512,0.00352451,0.00354138,0.00324105,0.00268537,0.003281909
34
+ Nemotron-nano-9b-v2,0.0002837,0.00027182,0.00022324,0.00022866,0.000254074
35
+ Nova-lite-v1,0.00017192,0.00018757,0.0001659,0.00017551,0.00017519
36
+ Nova-pro-v1,0.00164883,0.00169965,0.00149093,0.00141377,0.001572756
37
+ Phi-3-mini-128k-instruct,0.00021982,0.00028748,0.00026441,0.0001653,0.000235708
38
+ Phi-4,0.00009619,0.00009927,0.00008531,0.0000851,9.1938E-05
39
+ Qwen3-235B-A22B-Thinking-2507,0.00130406,0.00134382,0.00139143,0.00130582,0.001336043
40
+ Qwen3-30b-a3b-instruct-2507,0.00035343,0.00040343,0.00033623,0.00028744,0.000347808
41
+ Qwen3-next-80b-a3b-thinking,0.00353934,0.00452363,0.00420806,0.00398458,0.004047989
runs/agro1_2025-12-10/domain_ranks.csv ADDED
@@ -0,0 +1,41 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ model_name,ag_input_researcher,large_farm_operator,professional_farmer,small_farmer,Average (All Topics)
2
+ Gpt-5.1,4.863,4.8563,4.829,4.847,4.849488
3
+ Gpt-5,4.8291,4.8157,4.8326,4.83,4.826886
4
+ Gemini-3-pro-preview,4.666,4.6079,4.6675,4.6234,4.642161
5
+ Grok-4.1-fast-thinking,4.69,4.5766,4.6732,4.6105,4.640192
6
+ Gemini-2.5-pro,4.6355,4.6165,4.6455,4.6207,4.629951
7
+ Claude-opus-4.5,4.5737,4.6343,4.5622,4.6384,4.599825
8
+ Gpt-5-mini,4.7231,4.6051,4.5618,4.4562,4.593985
9
+ Mistral-large-2512,4.659,4.5366,4.6212,4.509,4.586085
10
+ Qwen3-235B-A22B-Thinking-2507,4.5097,4.5857,4.6659,4.5961,4.585353
11
+ Grok-4.1-fast,4.6785,4.5224,4.5885,4.5162,4.582359
12
+ Gpt-oss-120b,4.6087,4.5807,4.593,4.4973,4.573846
13
+ Kimi-k2-thinking,4.634,4.494,4.5895,4.4957,4.559271
14
+ GLM-4.5,4.6119,4.526,4.5857,4.4829,4.556426
15
+ DeepSeek-R1-0528,4.5051,4.6084,4.5048,4.5292,4.536355
16
+ Grok-4,4.5685,4.5007,4.5608,4.5004,4.535187
17
+ Minimax-m2,4.6066,4.4987,4.5125,4.4551,4.524406
18
+ Kimi-K2-Instruct,4.5832,4.5291,4.525,4.4066,4.517386
19
+ Gemini-2.5-flash,4.5413,4.4201,4.5151,4.4059,4.474788
20
+ Qwen3-30b-a3b-instruct-2507,4.54,4.4559,4.4617,4.3567,4.460202
21
+ Claude-sonnet-4.5,4.4441,4.4644,4.525,4.371,4.453395
22
+ Claude-haiku-4.5,4.4427,4.5125,4.43,4.3842,4.444505
23
+ Qwen3-next-80b-a3b-thinking,4.5092,4.4749,4.4289,4.3108,4.438935
24
+ Deepseek-v3.2-exp,4.4328,4.3592,4.3597,4.3478,4.378167
25
+ Deepseek-v3.1,4.4572,4.3484,4.419,4.2556,4.376998
26
+ Gemma-3-27b-it,4.394,4.3432,4.3801,4.2116,4.338555
27
+ Gemini-2.5-flash-lite,4.3488,4.3213,4.3988,4.2332,4.329005
28
+ Grok-3-mini,4.3717,4.2871,4.3487,4.2555,4.319847
29
+ GLM-4.5-Air,4.3656,4.2487,4.3356,4.1438,4.27902
30
+ Llama-3.3-nemotron-super-49b-v1.5,4.3147,4.2789,4.2996,4.1616,4.269078
31
+ DeepSeek-V3-0324,4.2683,4.1607,4.1949,4.0821,4.183133
32
+ Llama-3.1-nemotron-ultra-253b-v1,4.2203,4.1373,4.2135,4.0533,4.162814
33
+ Magistral-small-2506,3.9699,3.9472,3.9491,3.75,3.91141
34
+ Claude-3.5-haiku,3.702,3.6301,3.7107,3.6552,3.675851
35
+ Llama-4-maverick,3.7023,3.6229,3.6854,3.6135,3.658967
36
+ Llama-4-scout,3.6244,3.5716,3.6625,3.5826,3.611308
37
+ Nova-lite-v1,3.5711,3.5237,3.534,3.3979,3.512871
38
+ Nova-pro-v1,3.5688,3.4615,3.4772,3.3665,3.475812
39
+ Phi-4,3.5231,3.4759,3.4348,3.3119,3.444269
40
+ Nemotron-nano-9b-v2,3.4208,3.363,3.6285,3.3178,3.433889
41
+ Phi-3-mini-128k-instruct,2.9897,2.8978,2.9211,2.7415,2.900158
runs/agro1_2025-12-10/metadata.json ADDED
@@ -0,0 +1,9 @@
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "run_id": "agro1_2025-12-10",
3
+ "title": "AutoBench Agronomy LLM Benchmark - December 2025",
4
+ "date": "2025-12-10",
5
+ "description": "The first AutoBench run for the Agronomy domain with models Gemini 3 Pro, Gpt 5.1, Grok 4.1, Opus 4.5 and more",
6
+ "blog_url": "https://autobench.org/blog/autobench-Run-Agronomy-1",
7
+ "model_count": 40,
8
+ "is_latest": true
9
+ }
runs/agro1_2025-12-10/p99_latency.csv ADDED
@@ -0,0 +1,41 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ model_name,ag_input_researcher,large_farm_operator,professional_farmer,small_farmer,Average (All Topics)
2
+ Claude-3.5-haiku,82.5566,135.9821,39.3398,34.8821,73.1902
3
+ Claude-haiku-4.5,505.3357,429.9719,428.2429,97.9653,365.379
4
+ Claude-opus-4.5,262.264,287.1955,218.8891,184.0394,238.097
5
+ Claude-sonnet-4.5,303.1843,327.0878,414.7977,88.6822,283.438
6
+ DeepSeek-R1-0528,140.1887,169.6385,128.3669,198.5396,159.1834
7
+ Deepseek-v3.1,183.559,139.6033,187.8543,111.7123,155.6822
8
+ Deepseek-v3.2-exp,244.6648,406.296,299.7771,574.8698,381.4019
9
+ DeepSeek-V3-0324,138.0578,103.0631,61.6246,99.771,100.6291
10
+ Gemini-2.5-flash,74.7668,114.7857,52.9284,117.9559,90.1092
11
+ Gemini-2.5-flash-lite,46.7497,126.2675,78.7248,67.6446,79.8466
12
+ Gemini-2.5-pro,111.8302,211.7131,333.2687,90.8846,186.9242
13
+ Gemini-3-pro-preview,85.0926,226.4012,123.7004,136.8691,143.0158
14
+ Gemma-3-27b-it,125.7667,143.5144,98.4628,78.5559,111.575
15
+ GLM-4.5,177.9138,268.271,156.8146,199.9109,200.7276
16
+ GLM-4.5-Air,133.3058,203.0477,111.3307,129.7452,144.3573
17
+ Gpt-5,270.6952,231.2776,460.3306,287.0378,312.3353
18
+ Gpt-5.1,258.928,421.9474,310.7583,398.995,347.6572
19
+ Gpt-5-mini,178.0177,200.8393,382.6333,135.2867,224.1943
20
+ Gpt-oss-120b,93.7805,232.8232,156.9678,126.4374,152.5022
21
+ Grok-3-mini,111.2626,95.6699,49.413,131.7218,97.0168
22
+ Grok-4,207.0462,201.0777,248.9985,222.3432,219.8664
23
+ Grok-4.1-fast,76.1989,50.1805,81.6435,51.9029,64.9814
24
+ Grok-4.1-fast-thinking,141.7984,183.0821,124.9736,257.7874,176.9104
25
+ Kimi-K2-Instruct,126.4835,101.5517,58.2718,59.561,86.467
26
+ Kimi-k2-thinking,295.3688,424.5914,404.3292,316.7339,360.2558
27
+ Llama-3.1-nemotron-ultra-253b-v1,77.4809,264.0078,111.0316,196.0713,162.1479
28
+ Llama-3.3-nemotron-super-49b-v1.5,144.0573,190.3735,157.552,172.3549,166.0844
29
+ Llama-4-maverick,46.0676,79.7149,42.1974,93.3224,65.3256
30
+ Llama-4-scout,84.3751,50.6148,64.8301,40.7168,60.1342
31
+ Magistral-small-2506,54.7524,39.98,40.8038,90.1499,56.4215
32
+ Minimax-m2,235.9933,245.0915,306.1565,166.9771,238.5546
33
+ Mistral-large-2512,172.1075,172.686,100.2139,127.0358,143.0108
34
+ Nemotron-nano-9b-v2,113.5692,74.4862,108.4956,59.4245,88.9939
35
+ Nova-lite-v1,46.3406,40.7874,44.4915,35.3865,41.7515
36
+ Nova-pro-v1,59.3802,45.6604,38.874,39.8618,45.9441
37
+ Phi-3-mini-128k-instruct,63.87,192.8367,201.1701,113.9741,142.9627
38
+ Phi-4,45.1202,48.9424,37.5591,108.0087,59.9076
39
+ Qwen3-235B-A22B-Thinking-2507,190.5779,230.2108,179.2427,418.9615,254.7482
40
+ Qwen3-30b-a3b-instruct-2507,138.3152,134.0822,368.9601,56.5183,174.469
41
+ Qwen3-next-80b-a3b-thinking,87.0402,165.2689,106.1542,149.1552,126.9046
runs/agro1_2025-12-10/summary_data.csv ADDED
@@ -0,0 +1,41 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ Model,Iterations,AutoBench,LMArena,AAI Index,MMLU-Pro,Costs (USD),Avg Answer Duration (sec),P99 Answer Duration (sec),Fail Rate %
2
+ Claude-3.5-haiku,205,3.676,,,,0.0067,12.37,73.19,0.00%
3
+ Claude-haiku-4.5,196,4.445,,,,0.0195,52.84,365.38,0.51%
4
+ Claude-opus-4.5,194,4.600,,,,0.0731,66.00,238.10,1.52%
5
+ Claude-sonnet-4.5,203,4.453,,,,0.0208,42.23,283.44,0.98%
6
+ DeepSeek-R1-0528,198,4.536,,,,0.0030,53.70,159.18,3.41%
7
+ Deepseek-v3.1,205,4.377,,,,0.0010,29.33,155.68,0.00%
8
+ Deepseek-v3.2-exp,194,4.378,,,,0.0008,71.34,381.40,1.52%
9
+ DeepSeek-V3-0324,205,4.183,,,,0.0007,26.09,100.63,0.00%
10
+ Gemini-2.5-flash,204,4.475,,,,0.0043,16.98,90.11,0.49%
11
+ Gemini-2.5-flash-lite,200,4.329,,,,0.0007,10.98,79.85,2.44%
12
+ Gemini-2.5-pro,205,4.630,,,,0.0395,50.43,186.92,0.00%
13
+ Gemini-3-pro-preview,194,4.642,,,,0.0388,46.15,143.02,1.52%
14
+ Gemma-3-27b-it,204,4.339,,,,0.0003,30.64,111.58,0.49%
15
+ GLM-4.5,204,4.556,,,,0.0034,50.84,200.73,0.49%
16
+ GLM-4.5-Air,196,4.279,,,,0.0016,35.26,144.36,4.39%
17
+ Gpt-5,192,4.827,,,,0.0543,112.19,312.34,1.54%
18
+ Gpt-5.1,195,4.849,,,,0.0770,140.66,347.66,1.02%
19
+ Gpt-5-mini,196,4.594,,,,0.0081,74.34,224.19,4.39%
20
+ Gpt-oss-120b,205,4.574,,,,0.0007,34.63,152.50,0.00%
21
+ Grok-3-mini,204,4.320,,,,0.0010,23.30,97.02,0.49%
22
+ Grok-4,197,4.535,,,,0.0341,70.41,219.87,3.90%
23
+ Grok-4.1-fast,197,4.582,,,,0.0008,24.09,64.98,0.00%
24
+ Grok-4.1-fast-thinking,197,4.640,,,,0.0007,45.41,176.91,0.00%
25
+ Kimi-K2-Instruct,205,4.517,,,,0.0021,21.11,86.47,0.00%
26
+ Kimi-k2-thinking,192,4.559,,,,0.0080,68.03,360.26,2.54%
27
+ Llama-3.1-nemotron-ultra-253b-v1,203,4.163,,,,0.0021,35.68,162.15,0.98%
28
+ Llama-3.3-nemotron-super-49b-v1.5,196,4.269,,,,0.0011,35.56,166.08,0.51%
29
+ Llama-4-maverick,205,3.659,,,,0.0005,12.09,65.33,0.00%
30
+ Llama-4-scout,205,3.611,,,,0.0002,15.16,60.13,0.00%
31
+ Magistral-small-2506,203,3.911,,,,0.0010,7.51,56.42,0.98%
32
+ Minimax-m2,193,4.524,,,,0.0036,68.36,238.55,2.03%
33
+ Mistral-large-2512,175,4.586,,,,0.0033,61.60,143.01,0.00%
34
+ Nemotron-nano-9b-v2,194,3.434,,,,0.0003,17.50,88.99,1.52%
35
+ Nova-lite-v1,205,3.513,,,,0.0002,6.53,41.75,0.00%
36
+ Nova-pro-v1,205,3.476,,,,0.0016,7.84,45.94,0.00%
37
+ Phi-3-mini-128k-instruct,186,2.900,,,,0.0002,19.89,142.96,5.58%
38
+ Phi-4,205,3.444,,,,0.0001,14.87,59.91,0.00%
39
+ Qwen3-235B-A22B-Thinking-2507,193,4.585,,,,0.0013,74.18,254.75,5.85%
40
+ Qwen3-30b-a3b-instruct-2507,204,4.460,,,,0.0003,21.87,174.47,0.49%
41
+ Qwen3-next-80b-a3b-thinking,204,4.439,,,,0.0040,32.19,126.90,0.49%