PeterKruger commited on
Commit
a0bc03a
·
verified ·
1 Parent(s): d518d6b

Run of November 28, 2025

Browse files
runs/run_2025-11-28/avg_latency.csv ADDED
@@ -0,0 +1,33 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ model_name,coding,creative writing,current news,general culture,grammar,history,logics,math,science,technology,Average (All Topics),
2
+ Claude-haiku-4.5,58.7391,45.1571,33.8539,25.816,49.3062,33.4654,56.5625,39.3133,29.8127,45.016,41.7042,41.83
3
+ Claude-opus-4-1,160.936,84.6247,90.3992,92.8329,81.9117,85.4813,193.8744,174.7207,96.4217,74.0471,113.525,113.96
4
+ Claude-sonnet-4.5,89.2451,54.2669,64.1351,42.1343,54.5329,53.5087,70.5153,52.3109,43.103,53.4261,57.7178,58.05
5
+ DeepSeek-R1-0528,222.8545,45.3084,63.747,48.1999,57.561,61.3735,231.8319,225.2358,50.5751,61.3046,106.7992,107.45
6
+ Deepseek-v3.2-exp,169.0653,55.1251,56.8076,56.6134,78.9261,48.6402,215.1592,219.9205,43.0358,51.9097,99.5203,99.89
7
+ Gemini-2.5-flash,68.0633,27.6973,27.569,22.3158,33.6431,23.2926,59.9638,52.2344,47.3171,31.15,39.3246,40.03
8
+ Gemini-2.5-flash-lite,24.3631,10.261,11.3181,13.2746,12.9128,10.8387,20.9955,14.9352,12.5418,10.5412,14.1982,14.38
9
+ Gemini-2.5-pro,91.2418,38.1668,41.1249,38.6921,47.2019,41.8959,72.0911,66.6108,56.266,44.0559,53.7347,54.51
10
+ Gemini-3-pro-preview,63.677,44.6107,33.2165,30.1365,48.3047,32.3809,67.6419,63.0423,40.5995,31.455,45.5065,45.45
11
+ Gemma-3-27b-it,44.7719,50.4246,25.8422,45.4665,25.0368,32.6465,45.7737,33.2924,23.1611,34.8024,36.1218,36.21
12
+ GLM-4.6,133.1697,68.0359,61.7867,69.6313,98.0989,66.1604,184.1942,154.2589,88.9124,74.4897,99.8738,100.48
13
+ Gpt-5,196.5078,123.3507,162.4477,120.2568,140.436,153.6043,219.7226,123.2416,145.3858,125.5599,151.0513,151.90
14
+ Gpt-5.1,158.931,101.4765,119.542,88.9939,122.2953,137.4691,194.2428,158.5537,120.7889,90.0683,129.2361,129.78
15
+ Gpt-5-nano,91.1241,69.7506,53.5343,51.303,84.1889,53.8808,91.6712,77.4804,58.477,52.2229,68.3633,68.67
16
+ Gpt-oss-120b,27.4284,14.345,19.7344,14.2113,14.3535,17.0332,22.2331,13.2228,15.6451,11.4725,16.9679,17.14
17
+ Grok-3-mini,31.277,16.3344,17.4117,14.1025,20.144,14.2064,24.7938,27.0658,15.8205,16.2357,19.7392,19.90
18
+ Grok-4.1-fast,33.2591,16.3825,24.1466,18.5611,22.1003,23.044,34.6221,14.9256,23.1455,21.0239,23.1211,23.30
19
+ Grok-4.1-fast-thinking,98.0177,41.5683,32.4349,30.6874,81.1326,30.1515,134.1396,96.4524,55.0547,31.5393,63.1178,63.81
20
+ Kimi-k2-0905,72.378,28.7963,39.4441,51.217,20.7327,39.6662,44.9747,41.3452,58.0402,49.5102,44.6104,45.14
21
+ Kimi-k2-thinking,126.1738,105.8907,47.8623,41.5867,79.1883,55.1272,174.7176,107.2654,63.4829,54.6113,85.5906,85.92
22
+ Llama-3.3-70b-instruct,27.1267,17.8344,15.8348,16.7581,15.2413,21.6736,21.2501,22.0717,14.4405,10.859,18.309,18.39
23
+ Llama-3.3-nemotron-super-49b-v1.5,80.6883,26.7635,24.3885,23.7254,42.3479,24.0025,94.4969,73.9853,26.5396,21.8686,43.8807,44.39
24
+ Llama-4-maverick,17.5852,10.1771,10.6483,10.8751,16.7924,10.6112,14.4204,17.7789,11.1265,9.3529,12.9368,13.00
25
+ Magistral-medium-2506,22.3257,10.4762,14.4269,8.2297,10.1116,8.7327,41.656,40.9489,8.7825,8.6307,17.4321,17.49
26
+ Mistral-small-3.2-24b-instruct,21.76,11.1941,13.1085,8.9558,14.8763,11.242,17.1284,20.9789,13.8524,12.3487,14.5445,14.65
27
+ Nemotron-nano-9b-v2,55.4504,12.8432,10.9387,11.4131,19.706,11.8203,52.1425,45.4582,15.994,9.5441,24.5311,24.98
28
+ Nova-premier-v1,20.2938,13.4422,12.3214,12.0255,11.8058,14.1318,11.3591,10.1795,13.8182,11.1906,13.0568,13.16
29
+ Nova-pro-v1,9.3119,8.2956,7.1086,6.4015,6.0344,5.6165,4.765,4.1713,5.3871,8.9823,6.6074,6.62
30
+ Phi-4,26.3609,19.7493,17.2262,14.0146,15.2968,15.8625,15.1601,20.1686,17.2634,16.7219,17.7824,17.88
31
+ Qwen3-235b-a22b-2507,74.2713,29.1597,37.7774,23.7464,43.2758,20.5732,78.3867,80.4367,27.6402,15.5055,43.0773,43.38
32
+ Qwen3-235B-A22B-Thinking-2507,231.3204,74.9485,88.9857,68.2265,85.9875,84.8865,287.2099,264.191,115.2909,81.0289,138.2076,135.80
33
+ Qwen3-30b-a3b-instruct-2507,65.3313,20.5034,18.1601,15.8745,28.0737,19.5814,93.9773,72.204,22.0913,19.7902,37.5587,38.14
runs/run_2025-11-28/correlations.json ADDED
@@ -0,0 +1,8 @@
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "correlations": {
3
+ "LMArena": 86.85,
4
+ "Artificial Analysis Intelligence Index": 92.17,
5
+ "MMLU": 75.44
6
+ },
7
+ "description": "Correlation percentages between AutoBench scores and other benchmark scores"
8
+ }
runs/run_2025-11-28/cost_data.csv ADDED
@@ -0,0 +1,33 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ model_name,coding,creative writing,current news,general culture,grammar,history,logics,math,science,technology,Average (All Topics)
2
+ Claude-haiku-4.5,0.02782324,0.0193475,0.01189335,0.00953859,0.01749148,0.01262032,0.03149576,0.027155,0.01324839,0.0167359,0.0188
3
+ Claude-opus-4-1,0.22495095,0.10686,0.0989385,0.10063364,0.11554266,0.10658129,0.28635984,0.3074815,0.104945,0.09116274,0.1544
4
+ Claude-sonnet-4.5,0.06396089,0.03733868,0.03408552,0.02401341,0.03588736,0.03018813,0.06222236,0.0570202,0.02872903,0.03085006,0.0406
5
+ DeepSeek-R1-0528,0.01278123,0.00216561,0.00260693,0.00230966,0.00263934,0.00277899,0.01225792,0.01500126,0.00232862,0.00294141,0.0058
6
+ Deepseek-v3.2-exp,0.00182209,0.00070114,0.00070576,0.00052683,0.00097487,0.00050592,0.00287864,0.00287508,0.0004787,0.00058898,0.0012
7
+ Gemini-2.5-flash,0.02397599,0.00951452,0.00860898,0.00661878,0.01186397,0.00708986,0.02935418,0.02453523,0.00984654,0.00911329,0.0142
8
+ Gemini-2.5-flash-lite,0.00223114,0.00092407,0.00096084,0.00080245,0.00132023,0.00092236,0.00230427,0.0023624,0.00120676,0.00093211,0.0014
9
+ Gemini-2.5-pro,0.07182939,0.03296976,0.03360315,0.03220809,0.04184051,0.03482206,0.07787136,0.08005488,0.03648847,0.03748657,0.0484
10
+ Gemini-3-pro-preview,0.06339117,0.04200606,0.02907568,0.02870759,0.04727903,0.02902923,0.08468593,0.07668755,0.03539894,0.02877477,0.0462
11
+ Gemma-3-27b-it,0.00049348,0.00041525,0.00026073,0.00035877,0.00027512,0.00031754,0.00040566,0.00036665,0.00025037,0.00031837,0.0003
12
+ GLM-4.6,0.01122536,0.00466124,0.0054479,0.00483747,0.00837963,0.00522661,0.01611536,0.01419832,0.0068397,0.00565101,0.0083
13
+ Gpt-5,0.08995687,0.06581961,0.07143667,0.0606533,0.07537652,0.0710823,0.12402037,0.07928714,0.07037777,0.05913202,0.0771
14
+ Gpt-5.1,0.08578476,0.06460375,0.06781714,0.04438633,0.08095288,0.06698198,0.13158449,0.0932006,0.06264889,0.05239137,0.0753
15
+ Gpt-5-nano,0.00294854,0.00279771,0.00199923,0.0019683,0.00355923,0.00214949,0.00368366,0.00313596,0.00207279,0.00184626,0.0026
16
+ Gpt-oss-120b,0.00083633,0.00050696,0.00060142,0.00045741,0.00051434,0.00054701,0.00081747,0.00062792,0.00053388,0.00049203,0.0006
17
+ Grok-3-mini,0.0016217,0.00086379,0.00081357,0.00072475,0.00102859,0.00073543,0.00144468,0.00155123,0.00077988,0.00075479,0.0010
18
+ Grok-4.1-fast,0.00097423,0.00049472,0.0006853,0.00058176,0.00064959,0.00067308,0.00107506,0.00055874,0.00064664,0.00061459,0.0007
19
+ Grok-4.1-fast-thinking,0.00274141,0.00121613,0.00092416,0.00092805,0.00243251,0.00087374,0.00437985,0.00346987,0.00140277,0.00091148,0.0019
20
+ Kimi-k2-0905,0.00263697,0.00158462,0.00197556,0.00161077,0.00132902,0.00191064,0.00207122,0.00116034,0.00150342,0.00158279,0.0017
21
+ Kimi-k2-thinking,0.01249258,0.01247993,0.00483629,0.00473048,0.00806634,0.00479222,0.02349011,0.01635415,0.00596194,0.00476654,0.0098
22
+ Llama-3.3-70b-instruct,0.00055486,0.00027292,0.00032242,0.0003072,0.00032997,0.0003389,0.00042672,0.0004164,0.00033429,0.00030021,0.0004
23
+ Llama-3.3-nemotron-super-49b-v1.5,0.00225777,0.00086849,0.00071712,0.00070383,0.00127625,0.00071879,0.00292434,0.00219246,0.00079359,0.00063317,0.0013
24
+ Llama-4-maverick,0.00071101,0.00039311,0.00041975,0.00041546,0.00048458,0.00044022,0.00061473,0.00067953,0.00039694,0.00039935,0.0005
25
+ Magistral-medium-2506,0.01149008,0.00473003,0.00432403,0.00324703,0.00472455,0.00393006,0.02409512,0.02440416,0.00408835,0.00369019,0.0089
26
+ Mistral-small-3.2-24b-instruct,0.00025285,0.00013076,0.00014449,0.00010989,0.00015809,0.00011975,0.00021938,0.00020709,0.00013364,0.00011866,0.0002
27
+ Nemotron-nano-9b-v2,0.00087371,0.00020862,0.00015202,0.00017596,0.00029949,0.00016478,0.00083356,0.00073602,0.00023461,0.00013783,0.0004
28
+ Nova-premier-v1,0.01354635,0.00751391,0.00769306,0.00737309,0.0085828,0.00804317,0.0101078,0.01209453,0.00823439,0.00725153,0.0091
29
+ Nova-pro-v1,0.0028077,0.0019437,0.00151352,0.00135518,0.00169447,0.00142795,0.00158109,0.00166325,0.00139602,0.00136266,0.0017
30
+ Phi-4,0.00015383,0.00014198,0.00009868,0.00008972,0.00010519,0.00009403,0.00011515,0.0001382,0.00009863,0.00008935,0.0001
31
+ Qwen3-235b-a22b-2507,0.00395615,0.00158927,0.00170797,0.00148831,0.0030372,0.00162377,0.00628657,0.00544896,0.00189379,0.00147577,0.0029
32
+ Qwen3-235B-A22B-Thinking-2507,0.00342316,0.00087996,0.00108686,0.00094438,0.00125686,0.00107364,0.00426387,0.00382737,0.0011338,0.00101468,0.0018
33
+ Qwen3-30b-a3b-instruct-2507,0.00067842,0.00028585,0.00027735,0.00025813,0.00042092,0.00027243,0.00116349,0.00101686,0.00027423,0.00026961,0.0005
runs/run_2025-11-28/domain_ranks.csv ADDED
@@ -0,0 +1,33 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ model_name,coding,creative writing,current news,general culture,grammar,history,logics,math,science,technology,Average (All Topics)
2
+ Claude-haiku-4.5,4.1917,4.2772,4.3494,4.386,4.2697,4.4817,3.939,4.0344,4.3804,4.4381,4.2731
3
+ Claude-opus-4-1,4.0899,4.369,4.3966,4.4782,4.2203,4.4449,3.8805,4.0914,4.3603,4.3448,4.2662
4
+ Claude-sonnet-4.5,4.1416,4.4148,4.3544,4.4328,4.3189,4.5027,4.0525,4.1383,4.3837,4.4235,4.3139
5
+ DeepSeek-R1-0528,4.0838,4.2435,4.3456,4.3995,4.2069,4.4234,3.9294,3.9978,4.3339,4.3849,4.2340
6
+ Deepseek-v3.2-exp,4.1054,4.2336,4.2017,4.3101,4.0769,4.3366,3.864,4.0118,4.2454,4.2687,4.1646
7
+ Gemini-2.5-flash,4.3846,4.188,4.3455,4.3942,4.2847,4.3814,4.0551,4.2086,4.4161,4.3275,4.3004
8
+ Gemini-2.5-flash-lite,4.1562,4.2723,4.335,4.377,4.1955,4.3974,3.8953,3.9731,4.2734,4.3451,4.2201
9
+ Gemini-2.5-pro,4.3583,4.3686,4.3391,4.5184,4.3676,4.3718,4.2106,4.3112,4.4217,4.4253,4.3696
10
+ Gemini-3-pro-preview,4.325,4.4552,4.4234,4.4891,4.3237,4.4394,4.268,4.2951,4.416,4.4131,4.3859
11
+ Gemma-3-27b-it,3.2386,3.7528,4.1354,4.0563,3.6841,4.132,3.0037,2.8486,4.0812,4.1124,3.6967
12
+ GLM-4.6,4.1238,4.2281,4.3688,4.503,4.1579,4.4552,3.8243,4.0604,4.4665,4.3303,4.2525
13
+ Gpt-5,4.3926,4.4247,4.5446,4.6334,4.4041,4.5833,4.279,4.2304,4.4759,4.5755,4.4526
14
+ Gpt-5.1,4.4299,4.4492,4.5391,4.573,4.5075,4.5879,4.3681,4.2867,4.5862,4.5259,4.4855
15
+ Gpt-5-nano,4.2979,4.2957,4.3727,4.4591,4.1985,4.4156,3.9972,4.2141,4.4375,4.465,4.3161
16
+ Gpt-oss-120b,4.4229,4.2552,4.4324,4.4203,4.3395,4.4717,4.0555,4.2512,4.5624,4.4299,4.3651
17
+ Grok-3-mini,3.8811,4.1162,4.184,4.2495,4.1483,4.2647,3.7997,3.7182,4.212,4.2217,4.0764
18
+ Grok-4.1-fast,3.884,4.3391,4.4041,4.4098,4.2793,4.4266,3.6652,3.3695,4.4267,4.4218,4.1660
19
+ Grok-4.1-fast-thinking,4.2711,4.3638,4.3596,4.4644,4.3537,4.3997,4.1521,4.1448,4.4533,4.4475,4.3416
20
+ Kimi-k2-0905,4.1293,4.1777,4.5454,4.553,4.1704,4.4771,3.5109,3.6148,4.4204,4.4964,4.2095
21
+ Kimi-k2-thinking,4.3463,4.2593,4.5484,4.4647,4.2699,4.4278,4.1234,4.1716,4.4039,4.4046,4.3424
22
+ Llama-3.3-70b-instruct,3.046,3.4953,3.7739,4.0223,3.6222,3.8971,2.9618,3.0308,3.8422,3.8866,3.5529
23
+ Llama-3.3-nemotron-super-49b-v1.5,3.9181,4.1363,4.2179,4.2898,4.0581,4.3218,3.7955,3.7745,4.2692,4.2628,4.1026
24
+ Llama-4-maverick,3.2408,3.5467,3.7575,3.9292,3.5997,3.8662,3.1961,3.3612,3.7673,3.8701,3.6088
25
+ Magistral-medium-2506,3.776,4.0234,4.1889,4.1948,3.9164,4.1974,3.4782,3.4167,4.1359,4.1931,3.9490
26
+ Mistral-small-3.2-24b-instruct,3.469,3.8873,4.1444,4.1454,3.7286,4.1003,3.3739,3.2327,4.0151,4.0799,3.8118
27
+ Nemotron-nano-9b-v2,3.2732,3.6016,3.7992,3.9554,3.3375,3.9192,3.2896,3.246,3.757,3.8944,3.6025
28
+ Nova-premier-v1,3.3722,3.7159,3.847,3.9585,3.7453,4.059,3.1779,3.3042,3.9447,3.9287,3.7007
29
+ Nova-pro-v1,3.0891,3.4849,3.5463,3.7489,3.252,3.6208,2.892,2.5606,3.5804,3.7082,3.3464
30
+ Phi-4,3.0892,3.3096,3.7247,3.8606,3.2936,3.8322,2.9117,3.1191,3.7833,3.6977,3.4590
31
+ Qwen3-235b-a22b-2507,4.1716,4.2641,4.3589,4.3407,4.174,4.3734,4.079,3.9034,4.3938,4.2884,4.2354
32
+ Qwen3-235B-A22B-Thinking-2507,4.0006,4.3038,4.4797,4.5268,4.269,4.4649,3.9459,3.9141,4.3524,4.4599,4.2774
33
+ Qwen3-30b-a3b-instruct-2507,4.058,4.244,4.3333,4.4119,4.101,4.4201,3.8975,3.9898,4.3353,4.3511,4.2108
runs/run_2025-11-28/metadata.json ADDED
@@ -0,0 +1,9 @@
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "run_id": "run_2025-11-28",
3
+ "title": "AutoBench Run 4 - November 2025",
4
+ "date": "2025-11-28",
5
+ "description": "Latest AutoBench run with models Gemini 3 Pro, Gpt 5.1, Grok 4.1 and more",
6
+ "blog_url": "https://huggingface.co/blog/PeterKruger/autobench-4th-run",
7
+ "model_count": 33,
8
+ "is_latest": true
9
+ }
runs/run_2025-11-28/p99_latency.csv ADDED
@@ -0,0 +1,33 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ model_name,coding,creative writing,current news,general culture,grammar,history,logics,math,science,technology,Average (All Topics)
2
+ Claude-haiku-4.5,231.4126,182.211,124.5176,61.5514,195.6411,81.9893,240.0292,106.3872,93.3368,341.5921,165.8668
3
+ Claude-opus-4-1,591.2236,247.2937,224.1681,292.507,193.1293,132.6653,584.8942,700.2333,514.5808,138.2082,361.8904
4
+ Claude-sonnet-4.5,339.5576,180.1084,229.0065,87.0519,190.2576,125.2703,225.0131,133.9901,98.1213,256.3547,186.4731
5
+ DeepSeek-R1-0528,696.459,117.9992,203.3626,126.8014,191.3156,174.139,777.4036,618.7027,210.8926,358.7037,347.5779
6
+ Deepseek-v3.2-exp,788.6013,181.3498,249.1684,300.7517,407.0821,137.4158,581.9927,693.0214,238.8906,257.4904,383.5764
7
+ Gemini-2.5-flash,181.8915,93.1984,81.5899,48.3981,88.1856,43.7631,164.8749,190.1452,465.0054,185.2975,154.235
8
+ Gemini-2.5-flash-lite,107.4923,63.5219,29.3151,111.2271,32.5361,24.9558,147.5216,48.2513,81.0012,43.5632,68.9386
9
+ Gemini-2.5-pro,413.9983,97.4841,87.5893,52.1312,115.8275,61.5828,189.1193,161.1094,409.1035,109.0163,169.6962
10
+ Gemini-3-pro-preview,167.6045,99.8047,53.8969,48.0169,117.1739,79.4075,211.76,136.349,200.9484,71.4403,118.6402
11
+ Gemma-3-27b-it,297.5534,379.4592,87.3154,422.392,129.0642,278.2038,411.4459,158.624,71.5718,286.801,252.2431
12
+ GLM-4.6,451.619,198.5386,119.3534,149.1258,404.2955,121.1906,645.3553,800.4194,466.6574,261.8929,361.8448
13
+ Gpt-5,653.8983,308.8149,429.8379,271.8919,403.3629,259.6311,650.33,420.8029,535.197,407.1706,434.0938
14
+ Gpt-5.1,626.9056,248.2025,321.4456,197.7342,321.1098,258.3677,575.3715,583.1459,434.7925,292.2947,385.937
15
+ Gpt-5-nano,313.9615,224.0475,107.7527,129.2811,215.3463,105.3312,259.002,235.532,286.0344,195.7345,207.2023
16
+ Gpt-oss-120b,75.6918,54.4651,62.0918,45.3167,49.0986,54.4429,82.292,28.1551,67.1081,31.7049,55.0367
17
+ Grok-3-mini,79.3929,62.9878,37.2777,22.8073,45.5145,26.2726,82.4834,61.2882,52.9763,56.8612,52.7862
18
+ Grok-4.1-fast,86.2102,42.7469,48.1059,39.4626,47.0274,36.563,181.1955,47.925,71.8339,58.6299,65.97
19
+ Grok-4.1-fast-thinking,309.1013,131.2823,79.0504,128.7197,216.8412,45.2189,549.8862,386.6343,302.1156,85.9001,223.475
20
+ Kimi-k2-0905,273.4298,159.628,119.4317,145.4993,108.3082,132.8953,449.9851,134.4842,428.5853,252.4913,220.4738
21
+ Kimi-k2-thinking,734.0172,512.6387,402.2916,164.35,286.9011,208.542,575.2417,696.8476,451.5535,252.6533,428.5037
22
+ Llama-3.3-70b-instruct,115.8297,111.3032,60.4233,53.1394,52.3116,113.0242,91.5282,106.514,62.983,27.48,79.4537
23
+ Llama-3.3-nemotron-super-49b-v1.5,304.3072,65.6078,60.3173,55.7494,143.3374,32.297,212.3071,260.339,86.7385,59.646,128.0647
24
+ Llama-4-maverick,58.7196,46.823,37.6007,40.2208,70.609,29.8922,43.1468,65.041,60.5671,30.3509,48.2971
25
+ Magistral-medium-2506,109.369,19.3132,91.2219,25.9365,30.7296,16.7314,93.7537,136.713,23.7332,26.3281,57.383
26
+ Mistral-small-3.2-24b-instruct,67.1496,43.8659,34.5422,23.8522,54.6547,30.8032,78.3374,81.6273,31.9764,39.8893,48.6698
27
+ Nemotron-nano-9b-v2,312.8197,32.6838,17.9231,43.7383,131.372,21.3332,170.0116,178.1349,119.0989,25.5748,105.269
28
+ Nova-premier-v1,77.3661,28.8272,22.4307,18.9203,23.3051,24.1045,21.6599,18.8279,72.9933,24.942,33.3377
29
+ Nova-pro-v1,25.2831,26.0173,14.3486,11.8769,11.1153,13.8959,7.3166,8.004,13.482,80.0376,21.1377
30
+ Phi-4,69.8352,50.6929,31.2228,23.5781,56.3044,25.3018,35.5095,45.5401,75.5568,59.196,47.2738
31
+ Qwen3-235b-a22b-2507,450.7437,252.5096,337.092,82.0797,220.5607,62.453,231.1371,332.2794,196.0398,60.6231,222.5518
32
+ Qwen3-235B-A22B-Thinking-2507,712.4944,187.4573,223.1384,195.8595,220.9073,177.2165,900.7489,778.1525,532.4011,278.8527,420.7229
33
+ Qwen3-30b-a3b-instruct-2507,272.0321,125.7736,51.6887,37.7143,139.7261,78.4412,239.2966,213.1199,72.0778,83.0509,131.2921
runs/run_2025-11-28/summary_data.csv ADDED
@@ -0,0 +1,33 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ Model,Iterations,AutoBench,LMArena,AAI Index,MMLU-Pro,Costs (USD),Avg Answer Duration (sec),P99 Answer Duration (sec),Fail Rate %
2
+ Claude-haiku-4.5,330,4.2731,1402,55,76%,0.0188,41.83,165.87,0.30%
3
+ Claude-opus-4-1,324,4.2662,1449,59,88%,0.1544,113.96,361.89,2.11%
4
+ Claude-sonnet-4.5,326,4.3139,1449,63,88%,0.0406,58.05,186.47,1.51%
5
+ DeepSeek-R1-0528,325,4.2340,1395,52,85%,0.0058,107.45,347.58,1.81%
6
+ Deepseek-v3.2-exp,328,4.1646,1421,57,85%,0.0012,99.89,383.58,0.91%
7
+ Gemini-2.5-flash,329,4.3004,1405,54,84%,0.0142,40.03,154.24,0.60%
8
+ Gemini-2.5-flash-lite,330,4.2201,1380,48,81%,0.0014,14.38,68.94,0.30%
9
+ Gemini-2.5-pro,329,4.3696,1451,60,86%,0.0484,54.51,169.70,0.60%
10
+ Gemini-3-pro-preview,324,4.3859,1495,73,90%,0.0462,45.45,118.64,2.11%
11
+ Gemma-3-27b-it,326,3.6967,1364,22,67%,0.0003,36.21,252.24,1.51%
12
+ GLM-4.6,327,4.2525,1426,56,83%,0.0083,100.48,361.84,1.21%
13
+ Gpt-5,328,4.4526,1437,68,87%,0.0771,151.90,434.09,0.91%
14
+ Gpt-5.1,328,4.4855,1454,70,87%,0.0753,129.78,385.94,0.91%
15
+ Gpt-5-nano,329,4.3161,1338,49,77%,0.0026,68.67,207.20,0.60%
16
+ Gpt-oss-120b,329,4.3651,1352,61,81%,0.0006,17.14,55.04,0.60%
17
+ Grok-3-mini,331,4.0764,1410,57,83%,0.0010,19.90,52.79,0.00%
18
+ Grok-4.1-fast,327,4.1660,1462,,,0.0007,23.30,65.97,1.21%
19
+ Grok-4.1-fast-thinking,329,4.3416,1481,64,85%,0.0019,63.81,223.48,0.60%
20
+ Kimi-k2-0905,328,4.2095,1416,50,82%,0.0017,45.14,220.47,0.91%
21
+ Kimi-k2-thinking,328,4.3424,1429,67,85%,0.0098,85.92,428.50,0.91%
22
+ Llama-3.3-70b-instruct,331,3.5529,1319,28,71%,0.0004,18.39,79.45,0.00%
23
+ Llama-3.3-nemotron-super-49b-v1.5,331,4.1026,1340,45,81%,0.0013,44.39,128.06,0.00%
24
+ Llama-4-maverick,330,3.6088,1327,36,81%,0.0005,13.00,48.30,0.30%
25
+ Magistral-medium-2506,331,3.9490,1305,33,82%,0.0089,17.49,57.38,0.00%
26
+ Mistral-small-3.2-24b-instruct,332,3.8118,1354,29,68%,0.0002,14.65,48.67,-0.30%
27
+ Nemotron-nano-9b-v2,331,3.6025,,37,74%,0.0004,24.98,105.27,0.00%
28
+ Nova-premier-v1,330,3.7007,,25,73%,0.0091,13.16,33.34,0.30%
29
+ Nova-pro-v1,330,3.3464,1288,32,69%,0.0017,6.62,21.14,0.30%
30
+ Phi-4,331,3.4590,1255,23,71%,0.0001,17.88,47.27,0.00%
31
+ Qwen3-235b-a22b-2507,330,4.2354,1374,45,83%,0.0029,43.38,222.55,0.30%
32
+ Qwen3-235B-A22B-Thinking-2507,319,4.2774,1397,57,84%,0.0018,135.80,420.72,3.63%
33
+ Qwen3-30b-a3b-instruct-2507,331,4.2108,1382,37,78%,0.0005,38.14,131.29,0.00%