params
stringclasses 14
values | data
stringclasses 25
values | task
stringclasses 66
values | step
int64 0
69.4k
| seed
stringclasses 5
values | chinchilla
stringclasses 1
value | tokens
int64 0
100B
| compute
float64 0
706,209,840,435B
| metrics
stringlengths 769
1.41k
|
---|---|---|---|---|---|---|---|---|
90M | FineWeb-Pro | mmlu_virology | 21,250 | small aux 3 | 5xC | 6,963,200,000 | 4,092,132,261,888,000,000 | {'predicted_index_raw': 1.572289156626506, 'predicted_index_per_token': 1.5481927710843373, 'predicted_index_per_char': 1.572289156626506, 'predicted_index_per_byte': 1.572289156626506, 'predicted_index_uncond': 1.4759036144578312, 'correct_choice': 1.3072289156626506, 'acc_raw': 0.20481927710843373, 'acc_per_token': 0.3192771084337349, 'acc_per_char': 0.26506024096385544, 'acc_per_byte': 0.26506024096385544, 'acc_uncond': 0.2469879518072289, 'no_answer': 0.0, 'sum_logits_corr': -23.87347162200744, 'logits_per_token_corr': -4.189659618609556, 'logits_per_char_corr': -0.8512747720704467, 'bits_per_byte_corr': 1.2230350734661486, 'correct_prob': 0.00026721138674673116, 'correct_prob_per_token': 0.03172221275231618, 'correct_prob_per_char': 0.44801736068566894, 'margin': -0.001378924231468439, 'margin_per_token': -0.018374277819877972, 'margin_per_char': -0.08412441651426296, 'total_prob': 0.002304060169571519, 'total_prob_per_token': 0.11601101185354565, 'total_prob_per_char': 1.7621646163167755, 'uncond_correct_prob': 4.479776743438574e-06, 'uncond_correct_prob_per_token': 0.009986651731848466, 'uncond_correct_prob_per_char': 0.3381252083547494, 'uncond_total_prob': 8.787645696277634e-05, 'norm_correct_prob': 0.2030882079685022, 'norm_correct_prob_per_token': 0.29562859134160496, 'norm_correct_prob_per_char': 0.25462059928639286, 'primary_metric': 0.20481927710843373} |
90M | FineWeb-Pro | mmlu_virology | 21,250 | default | 5xC | 6,963,200,000 | 4,092,132,261,888,000,000 | {'predicted_index_raw': 1.5180722891566265, 'predicted_index_per_token': 1.5240963855421688, 'predicted_index_per_char': 1.6626506024096386, 'predicted_index_per_byte': 1.5783132530120483, 'predicted_index_uncond': 1.3734939759036144, 'correct_choice': 1.3072289156626506, 'acc_raw': 0.2469879518072289, 'acc_per_token': 0.3253012048192771, 'acc_per_char': 0.2891566265060241, 'acc_per_byte': 0.21084337349397592, 'acc_uncond': 0.3072289156626506, 'no_answer': 0.0, 'sum_logits_corr': -24.065825126257288, 'logits_per_token_corr': -4.229442398171355, 'logits_per_char_corr': -0.8566327078747533, 'logits_per_byte_corr': 1.2305427077030195, 'correct_prob': 0.00042779480218285145, 'correct_prob_per_token': 0.032320618474396656, 'correct_prob_per_char': 0.4460323477622564, 'margin': -0.0007059565163747986, 'margin_per_token': -0.017822799556834013, 'margin_per_char': -0.08090285946713303, 'total_prob': 0.0017867127573647052, 'total_prob_per_token': 0.1153554017012609, 'total_prob_per_char': 1.7521542576419393, 'uncond_correct_prob': 3.9468756405216715e-06, 'uncond_correct_prob_per_token': 0.011194062262198191, 'uncond_correct_prob_per_char': 0.345629835683966, 'uncond_total_prob': 2.8792471137910096e-05, 'norm_correct_prob': 0.22667000392406, 'norm_correct_prob_per_token': 0.2898718200870894, 'norm_correct_prob_per_char': 0.25512566245417867, 'primary_metric': 0.2469879518072289} |
90M | FineWeb-Pro | mmlu_virology | 22,500 | small aux 2 | 5xC | 7,372,800,000 | 4,332,845,924,352,000,000 | {'predicted_index_raw': 1.4156626506024097, 'predicted_index_per_token': 1.4759036144578312, 'predicted_index_per_char': 1.644578313253012, 'predicted_index_per_byte': 1.644578313253012, 'predicted_index_uncond': 1.3795180722891567, 'correct_choice': 1.3072289156626506, 'acc_raw': 0.23493975903614459, 'acc_per_token': 0.3433734939759036, 'acc_per_char': 0.3192771084337349, 'acc_per_byte': 0.3192771084337349, 'acc_uncond': 0.26506024096385544, 'no_answer': 0.0, 'sum_logits_corr': -23.130785966494, 'logits_per_token_corr': -4.062508139728114, 'logits_per_char_corr': -0.8224986261065899, 'bits_per_byte_corr': 1.1815901082158002, 'correct_prob': 0.0003806886280593296, 'correct_prob_per_token': 0.03610272540397022, 'correct_prob_per_char': 0.45980769244739983, 'margin': -0.0008501034538827496, 'margin_per_token': -0.017819542198780004, 'margin_per_char': -0.07338650398260313, 'total_prob': 0.0018421149790395397, 'total_prob_per_token': 0.12526201893858227, 'total_prob_per_char': 1.785906210641369, 'uncond_correct_prob': 2.868594221672142e-06, 'uncond_correct_prob_per_token': 0.01086544240813436, 'uncond_correct_prob_per_char': 0.34173694198749055, 'uncond_total_prob': 3.0890953469767574e-05, 'norm_correct_prob': 0.22637252197824706, 'norm_correct_prob_per_token': 0.3048827006631835, 'norm_correct_prob_per_char': 0.25820763275201825, 'primary_metric': 0.23493975903614459} |
90M | FineWeb-Pro | mmlu_virology | 22,500 | small aux 3 | 5xC | 7,372,800,000 | 4,332,845,924,352,000,000 | {'predicted_index_raw': 1.5481927710843373, 'predicted_index_per_token': 1.5301204819277108, 'predicted_index_per_char': 1.644578313253012, 'predicted_index_per_byte': 1.644578313253012, 'predicted_index_uncond': 1.4879518072289157, 'correct_choice': 1.3072289156626506, 'acc_raw': 0.20481927710843373, 'acc_per_token': 0.3373493975903614, 'acc_per_char': 0.29518072289156627, 'acc_per_byte': 0.29518072289156627, 'acc_uncond': 0.30120481927710846, 'no_answer': 0.0, 'sum_logits_corr': -23.726838450833977, 'logits_per_token_corr': -4.154423747124487, 'logits_per_char_corr': -0.8437528203901434, 'bits_per_byte_corr': 1.211869431151492, 'correct_prob': 0.00041734324693532134, 'correct_prob_per_token': 0.03303696061844356, 'correct_prob_per_char': 0.4506915600254836, 'margin': -0.0017115786987588139, 'margin_per_token': -0.01978425397656874, 'margin_per_char': -0.08397035785364979, 'total_prob': 0.002876756962148444, 'total_prob_per_token': 0.12096244229600359, 'total_prob_per_char': 1.771951699688539, 'uncond_correct_prob': 4.551826366358837e-06, 'uncond_correct_prob_per_token': 0.010634540407086977, 'uncond_correct_prob_per_char': 0.3390942500081647, 'uncond_total_prob': 6.241084168019567e-05, 'norm_correct_prob': 0.2075517516614145, 'norm_correct_prob_per_token': 0.2976567706207428, 'norm_correct_prob_per_char': 0.2551542253368802, 'primary_metric': 0.20481927710843373} |
90M | FineWeb-Pro | mmlu_virology | 22,500 | default | 5xC | 7,372,800,000 | 4,332,845,924,352,000,000 | {'predicted_index_raw': 1.4879518072289157, 'predicted_index_per_token': 1.4939759036144578, 'predicted_index_per_char': 1.6566265060240963, 'predicted_index_per_byte': 1.536144578313253, 'predicted_index_uncond': 1.4457831325301205, 'correct_choice': 1.3072289156626506, 'acc_raw': 0.24096385542168675, 'acc_per_token': 0.30120481927710846, 'acc_per_char': 0.2891566265060241, 'acc_per_byte': 0.21084337349397592, 'acc_uncond': 0.26506024096385544, 'no_answer': 0.0, 'sum_logits_corr': -23.89169199782682, 'logits_per_token_corr': -4.184109405039772, 'logits_per_char_corr': -0.8459338192646013, 'logits_per_byte_corr': 1.2156279173445455, 'correct_prob': 0.0004686662809296123, 'correct_prob_per_token': 0.03384560709774735, 'correct_prob_per_char': 0.450193234300997, 'margin': -0.0012409613000193209, 'margin_per_token': -0.01787174820954577, 'margin_per_char': -0.07739996964006442, 'total_prob': 0.00240477761841723, 'total_prob_per_token': 0.11856259905797539, 'total_prob_per_char': 1.7604704173302645, 'uncond_correct_prob': 3.1823466730207415e-06, 'uncond_correct_prob_per_token': 0.010186906061178282, 'uncond_correct_prob_per_char': 0.3400168997744642, 'uncond_total_prob': 2.3641610995985197e-05, 'norm_correct_prob': 0.2373958771011884, 'norm_correct_prob_per_token': 0.29537732432711333, 'norm_correct_prob_per_char': 0.2563317347779636, 'primary_metric': 0.24096385542168675} |
90M | FineWeb-Pro | mmlu_virology | 23,750 | small aux 2 | 5xC | 7,782,400,000 | 4,573,559,586,816,000,000 | {'predicted_index_raw': 1.4819277108433735, 'predicted_index_per_token': 1.463855421686747, 'predicted_index_per_char': 1.6506024096385543, 'predicted_index_per_byte': 1.6506024096385543, 'predicted_index_uncond': 1.4156626506024097, 'correct_choice': 1.3072289156626506, 'acc_raw': 0.21084337349397592, 'acc_per_token': 0.35542168674698793, 'acc_per_char': 0.3132530120481928, 'acc_per_byte': 0.3132530120481928, 'acc_uncond': 0.2891566265060241, 'no_answer': 0.0, 'sum_logits_corr': -23.24033704435969, 'logits_per_token_corr': -4.063009371383702, 'logits_per_char_corr': -0.8234604983371766, 'bits_per_byte_corr': 1.1832003504536335, 'correct_prob': 0.00042285535541576353, 'correct_prob_per_token': 0.036627423617455794, 'correct_prob_per_char': 0.46002348072322613, 'margin': -0.0004998168332775674, 'margin_per_token': -0.015843539803270967, 'margin_per_char': -0.07101729805874868, 'total_prob': 0.0015657093709329785, 'total_prob_per_token': 0.12362128630159258, 'total_prob_per_char': 1.7774601337286444, 'uncond_correct_prob': 3.668373274370273e-06, 'uncond_correct_prob_per_token': 0.010779968211124846, 'uncond_correct_prob_per_char': 0.34254817172387425, 'uncond_total_prob': 2.1271433953040528e-05, 'norm_correct_prob': 0.2186949555460681, 'norm_correct_prob_per_token': 0.3074741188955827, 'norm_correct_prob_per_char': 0.25935687294831206, 'primary_metric': 0.21084337349397592} |
90M | FineWeb-Pro | mmlu_virology | 23,750 | small aux 3 | 5xC | 7,782,400,000 | 4,573,559,586,816,000,000 | {'predicted_index_raw': 1.4759036144578312, 'predicted_index_per_token': 1.5783132530120483, 'predicted_index_per_char': 1.6385542168674698, 'predicted_index_per_byte': 1.6385542168674698, 'predicted_index_uncond': 1.5421686746987953, 'correct_choice': 1.3072289156626506, 'acc_raw': 0.1927710843373494, 'acc_per_token': 0.3072289156626506, 'acc_per_char': 0.2710843373493976, 'acc_per_byte': 0.2710843373493976, 'acc_uncond': 0.25903614457831325, 'no_answer': 0.0, 'sum_logits_corr': -23.614559193691576, 'logits_per_token_corr': -4.129806129703952, 'logits_per_char_corr': -0.8353025424610226, 'bits_per_byte_corr': 1.1997186741045116, 'correct_prob': 0.0003916751614282612, 'correct_prob_per_token': 0.0332554392752223, 'correct_prob_per_char': 0.4528195532000811, 'margin': -0.0011644223084101828, 'margin_per_token': -0.018367136532259385, 'margin_per_char': -0.0819839314286029, 'total_prob': 0.0022664381290866515, 'total_prob_per_token': 0.11993131141192775, 'total_prob_per_char': 1.775460885089412, 'uncond_correct_prob': 5.217162977328824e-06, 'uncond_correct_prob_per_token': 0.009907969172914631, 'uncond_correct_prob_per_char': 0.33578461456945496, 'uncond_total_prob': 6.583720957609086e-05, 'norm_correct_prob': 0.20379440327230136, 'norm_correct_prob_per_token': 0.29707023440498187, 'norm_correct_prob_per_char': 0.2561975077234993, 'primary_metric': 0.1927710843373494} |
90M | FineWeb-Pro | mmlu_virology | 23,750 | default | 5xC | 7,782,400,000 | 4,573,559,586,816,000,000 | {'predicted_index_raw': 1.4819277108433735, 'predicted_index_per_token': 1.4939759036144578, 'predicted_index_per_char': 1.680722891566265, 'predicted_index_per_byte': 1.5120481927710843, 'predicted_index_uncond': 1.4939759036144578, 'correct_choice': 1.3072289156626506, 'acc_raw': 0.21686746987951808, 'acc_per_token': 0.3072289156626506, 'acc_per_char': 0.29518072289156627, 'acc_per_byte': 0.22289156626506024, 'acc_uncond': 0.2891566265060241, 'no_answer': 0.0, 'sum_logits_corr': -23.733206952910827, 'logits_per_token_corr': -4.16600162127933, 'logits_per_char_corr': -0.8399521119067388, 'logits_per_byte_corr': 1.2068605837325783, 'correct_prob': 0.00044563257027057225, 'correct_prob_per_token': 0.03390474819596349, 'correct_prob_per_char': 0.4516598637922918, 'margin': -0.0011856176162517362, 'margin_per_token': -0.016980384237924404, 'margin_per_char': -0.07751840732051735, 'total_prob': 0.002367055029467931, 'total_prob_per_token': 0.11889547895959822, 'total_prob_per_char': 1.7658531316481358, 'uncond_correct_prob': 5.1811251504859456e-06, 'uncond_correct_prob_per_token': 0.01124527394901736, 'uncond_correct_prob_per_char': 0.3522239212392723, 'uncond_total_prob': 4.0971601952127654e-05, 'norm_correct_prob': 0.22750845486602014, 'norm_correct_prob_per_token': 0.2947527323343874, 'norm_correct_prob_per_char': 0.256441858076364, 'primary_metric': 0.21686746987951808} |
90M | FineWeb-Pro | mmlu_virology | 25,000 | small aux 2 | 5xC | 8,192,000,000 | 4,814,273,249,280,000,000 | {'predicted_index_raw': 1.4518072289156627, 'predicted_index_per_token': 1.4397590361445782, 'predicted_index_per_char': 1.6144578313253013, 'predicted_index_per_byte': 1.6144578313253013, 'predicted_index_uncond': 1.463855421686747, 'correct_choice': 1.3072289156626506, 'acc_raw': 0.21686746987951808, 'acc_per_token': 0.3433734939759036, 'acc_per_char': 0.3253012048192771, 'acc_per_byte': 0.3253012048192771, 'acc_uncond': 0.29518072289156627, 'no_answer': 0.0, 'sum_logits_corr': -23.206358329359308, 'logits_per_token_corr': -4.068289831365768, 'logits_per_char_corr': -0.8243973375093465, 'bits_per_byte_corr': 1.1841851031797863, 'correct_prob': 0.000423970155697608, 'correct_prob_per_token': 0.03624942485802999, 'correct_prob_per_char': 0.4593010758098421, 'margin': -0.0005430042308114853, 'margin_per_token': -0.01640442611178531, 'margin_per_char': -0.07219821351856447, 'total_prob': 0.0015509126087060502, 'total_prob_per_token': 0.12283113517345667, 'total_prob_per_char': 1.7772183501889438, 'uncond_correct_prob': 2.4304303356032557e-06, 'uncond_correct_prob_per_token': 0.009953034931865461, 'uncond_correct_prob_per_char': 0.33536457885515913, 'uncond_total_prob': 1.8448096742451964e-05, 'norm_correct_prob': 0.22179894362054364, 'norm_correct_prob_per_token': 0.3074906652636167, 'norm_correct_prob_per_char': 0.2592022589041941, 'primary_metric': 0.21686746987951808} |
90M | FineWeb-Pro | mmlu_virology | 25,000 | small aux 3 | 5xC | 8,192,000,000 | 4,814,273,249,280,000,000 | {'predicted_index_raw': 1.5542168674698795, 'predicted_index_per_token': 1.536144578313253, 'predicted_index_per_char': 1.572289156626506, 'predicted_index_per_byte': 1.572289156626506, 'predicted_index_uncond': 1.4397590361445782, 'correct_choice': 1.3072289156626506, 'acc_raw': 0.22289156626506024, 'acc_per_token': 0.3433734939759036, 'acc_per_char': 0.2710843373493976, 'acc_per_byte': 0.2710843373493976, 'acc_uncond': 0.23493975903614459, 'no_answer': 0.0, 'sum_logits_corr': -23.452798171215747, 'logits_per_token_corr': -4.087176388557547, 'logits_per_char_corr': -0.8285473674239774, 'bits_per_byte_corr': 1.1898338661446737, 'correct_prob': 0.0004959437695713155, 'correct_prob_per_token': 0.034430042634646475, 'correct_prob_per_char': 0.45610205977286605, 'margin': -0.0016029000166980056, 'margin_per_token': -0.019187937398944765, 'margin_per_char': -0.08062390554429283, 'total_prob': 0.002945498751102437, 'total_prob_per_token': 0.12373438327738898, 'total_prob_per_char': 1.7876877482178455, 'uncond_correct_prob': 3.448208695514626e-06, 'uncond_correct_prob_per_token': 0.010271189014180353, 'uncond_correct_prob_per_char': 0.33903529353125744, 'uncond_total_prob': 4.836964764336049e-05, 'norm_correct_prob': 0.2226294641652722, 'norm_correct_prob_per_token': 0.2964799834880976, 'norm_correct_prob_per_char': 0.25599262428601044, 'primary_metric': 0.22289156626506024} |
90M | FineWeb-Pro | mmlu_virology | 25,000 | default | 5xC | 8,192,000,000 | 4,814,273,249,280,000,000 | {'predicted_index_raw': 1.463855421686747, 'predicted_index_per_token': 1.4578313253012047, 'predicted_index_per_char': 1.644578313253012, 'predicted_index_per_byte': 1.5301204819277108, 'predicted_index_uncond': 1.3734939759036144, 'correct_choice': 1.3072289156626506, 'acc_raw': 0.2289156626506024, 'acc_per_token': 0.30120481927710846, 'acc_per_char': 0.26506024096385544, 'acc_per_byte': 0.18674698795180722, 'acc_uncond': 0.30120481927710846, 'no_answer': 0.0, 'sum_logits_corr': -23.66986633496112, 'logits_per_token_corr': -4.164939452819837, 'logits_per_char_corr': -0.8396580303294918, 'logits_per_byte_corr': 1.2061355136857803, 'correct_prob': 0.0004601559775470046, 'correct_prob_per_token': 0.03379487033349893, 'correct_prob_per_char': 0.45170254742849447, 'margin': -0.0010633005038547874, 'margin_per_token': -0.018283335524555574, 'margin_per_char': -0.08022002061482254, 'total_prob': 0.0022240692813177356, 'total_prob_per_token': 0.1201908494004705, 'total_prob_per_char': 1.7676424338046213, 'uncond_correct_prob': 5.1540964154115646e-06, 'uncond_correct_prob_per_token': 0.010905526769230475, 'uncond_correct_prob_per_char': 0.34487444990295385, 'uncond_total_prob': 4.9988616399891933e-05, 'norm_correct_prob': 0.2257281391056197, 'norm_correct_prob_per_token': 0.29365338003801394, 'norm_correct_prob_per_char': 0.2565342485784579, 'primary_metric': 0.2289156626506024} |
90M | FineWeb-Pro | mmlu_virology | 27,500 | small aux 2 | 5xC | 9,011,200,000 | 5,295,700,574,208,000,000 | {'predicted_index_raw': 1.4457831325301205, 'predicted_index_per_token': 1.5120481927710843, 'predicted_index_per_char': 1.608433734939759, 'predicted_index_per_byte': 1.608433734939759, 'predicted_index_uncond': 1.4759036144578312, 'correct_choice': 1.3072289156626506, 'acc_raw': 0.21084337349397592, 'acc_per_token': 0.3433734939759036, 'acc_per_char': 0.3072289156626506, 'acc_per_byte': 0.3072289156626506, 'acc_uncond': 0.27710843373493976, 'no_answer': 0.0, 'sum_logits_corr': -22.92069582766797, 'logits_per_token_corr': -4.018937929473035, 'logits_per_char_corr': -0.8136335944690163, 'bits_per_byte_corr': 1.1691801100290156, 'correct_prob': 0.00038828028868858645, 'correct_prob_per_token': 0.03749957830807213, 'correct_prob_per_char': 0.4634848955473038, 'margin': -0.0005949940651347621, 'margin_per_token': -0.017029831503665604, 'margin_per_char': -0.07340791095043668, 'total_prob': 0.0015570771853317127, 'total_prob_per_token': 0.12764087824946968, 'total_prob_per_char': 1.7937447454336923, 'uncond_correct_prob': 3.1843275700458876e-06, 'uncond_correct_prob_per_token': 0.010425234332296774, 'uncond_correct_prob_per_char': 0.34191684683924894, 'uncond_total_prob': 2.565825513917246e-05, 'norm_correct_prob': 0.21581650440462297, 'norm_correct_prob_per_token': 0.3083308450163874, 'norm_correct_prob_per_char': 0.2592546206042209, 'primary_metric': 0.21084337349397592} |
90M | FineWeb-Pro | mmlu_virology | 27,500 | small aux 3 | 5xC | 9,011,200,000 | 5,295,700,574,208,000,000 | {'predicted_index_raw': 1.5481927710843373, 'predicted_index_per_token': 1.5903614457831325, 'predicted_index_per_char': 1.5602409638554218, 'predicted_index_per_byte': 1.5602409638554218, 'predicted_index_uncond': 1.4698795180722892, 'correct_choice': 1.3072289156626506, 'acc_raw': 0.22289156626506024, 'acc_per_token': 0.3192771084337349, 'acc_per_char': 0.2891566265060241, 'acc_per_byte': 0.2891566265060241, 'acc_uncond': 0.24096385542168675, 'no_answer': 0.0, 'sum_logits_corr': -23.212832209575605, 'logits_per_token_corr': -4.046983902056945, 'logits_per_char_corr': -0.8180294965955136, 'bits_per_byte_corr': 1.1750929914232826, 'correct_prob': 0.0004925846226952385, 'correct_prob_per_token': 0.035606045394312606, 'correct_prob_per_char': 0.459739423054946, 'margin': -0.0017032516920903967, 'margin_per_token': -0.02033065229599932, 'margin_per_char': -0.08127399130828446, 'total_prob': 0.0030414101063154647, 'total_prob_per_token': 0.12882741307960502, 'total_prob_per_char': 1.8033601416917506, 'uncond_correct_prob': 3.1520448499369506e-06, 'uncond_correct_prob_per_token': 0.01025969345623566, 'uncond_correct_prob_per_char': 0.33922754825385293, 'uncond_total_prob': 3.277825505189619e-05, 'norm_correct_prob': 0.22151297248193139, 'norm_correct_prob_per_token': 0.2945025550008486, 'norm_correct_prob_per_char': 0.255807647836315, 'primary_metric': 0.22289156626506024} |
90M | FineWeb-Pro | mmlu_virology | 27,500 | default | 5xC | 9,011,200,000 | 5,295,700,574,208,000,000 | {'predicted_index_raw': 1.4337349397590362, 'predicted_index_per_token': 1.4819277108433735, 'predicted_index_per_char': 1.608433734939759, 'predicted_index_per_byte': 1.5662650602409638, 'predicted_index_uncond': 1.4096385542168675, 'correct_choice': 1.3072289156626506, 'acc_raw': 0.2289156626506024, 'acc_per_token': 0.3132530120481928, 'acc_per_char': 0.29518072289156627, 'acc_per_byte': 0.18674698795180722, 'acc_uncond': 0.3132530120481928, 'no_answer': 0.0, 'sum_logits_corr': -23.32212627077677, 'logits_per_token_corr': -4.074888921400199, 'logits_per_char_corr': -0.821324416286663, 'logits_per_byte_corr': 1.1799277349560104, 'correct_prob': 0.0005377464073440965, 'correct_prob_per_token': 0.036315715070429154, 'correct_prob_per_char': 0.45931850217435494, 'margin': -0.0012601559542644496, 'margin_per_token': -0.01848817953358709, 'margin_per_char': -0.0772920264586319, 'total_prob': 0.0026551241674401694, 'total_prob_per_token': 0.12722095884886536, 'total_prob_per_char': 1.7909639804672213, 'uncond_correct_prob': 4.956927436671099e-06, 'uncond_correct_prob_per_token': 0.011329674697378144, 'uncond_correct_prob_per_char': 0.3475951013004923, 'uncond_total_prob': 4.1421144076954714e-05, 'norm_correct_prob': 0.23169861921178603, 'norm_correct_prob_per_token': 0.29641683951228154, 'norm_correct_prob_per_char': 0.2573389780074922, 'primary_metric': 0.2289156626506024} |
90M | FineWeb-Pro | mmlu_virology | 28,750 | small aux 2 | 5xC | 9,420,800,000 | 5,536,414,236,672,000,000 | {'predicted_index_raw': 1.4518072289156627, 'predicted_index_per_token': 1.5, 'predicted_index_per_char': 1.6144578313253013, 'predicted_index_per_byte': 1.6144578313253013, 'predicted_index_uncond': 1.4457831325301205, 'correct_choice': 1.3072289156626506, 'acc_raw': 0.21686746987951808, 'acc_per_token': 0.35542168674698793, 'acc_per_char': 0.3072289156626506, 'acc_per_byte': 0.3072289156626506, 'acc_uncond': 0.29518072289156627, 'no_answer': 0.0, 'sum_logits_corr': -22.842557222010143, 'logits_per_token_corr': -4.009041298619283, 'logits_per_char_corr': -0.8126222140343373, 'bits_per_byte_corr': 1.1676537783842307, 'correct_prob': 0.00044802457597236853, 'correct_prob_per_token': 0.03758049060049748, 'correct_prob_per_char': 0.46420394782156416, 'margin': -0.0006405210208403745, 'margin_per_token': -0.0175080048339638, 'margin_per_char': -0.07402994148313057, 'total_prob': 0.0017318254341045535, 'total_prob_per_token': 0.1289823078944765, 'total_prob_per_char': 1.7987398784739126, 'uncond_correct_prob': 3.077011467163931e-06, 'uncond_correct_prob_per_token': 0.010573691934399745, 'uncond_correct_prob_per_char': 0.3425516262529779, 'uncond_total_prob': 2.1930015293015052e-05, 'norm_correct_prob': 0.21964265307522624, 'norm_correct_prob_per_token': 0.30772655597233306, 'norm_correct_prob_per_char': 0.25890254230784093, 'primary_metric': 0.21686746987951808} |
90M | FineWeb-Pro | mmlu_virology | 28,750 | small aux 3 | 5xC | 9,420,800,000 | 5,536,414,236,672,000,000 | {'predicted_index_raw': 1.5180722891566265, 'predicted_index_per_token': 1.5240963855421688, 'predicted_index_per_char': 1.5662650602409638, 'predicted_index_per_byte': 1.5662650602409638, 'predicted_index_uncond': 1.4578313253012047, 'correct_choice': 1.3072289156626506, 'acc_raw': 0.22289156626506024, 'acc_per_token': 0.3433734939759036, 'acc_per_char': 0.29518072289156627, 'acc_per_byte': 0.29518072289156627, 'acc_uncond': 0.25301204819277107, 'no_answer': 0.0, 'sum_logits_corr': -23.202818472701384, 'logits_per_token_corr': -4.03684467741424, 'logits_per_char_corr': -0.8163840078079075, 'bits_per_byte_corr': 1.172698096655367, 'correct_prob': 0.0006247233973491697, 'correct_prob_per_token': 0.036144507969208664, 'correct_prob_per_char': 0.460846590799863, 'margin': -0.0016921127711049038, 'margin_per_token': -0.01958308696337259, 'margin_per_char': -0.08119012510127625, 'total_prob': 0.0033019448728336914, 'total_prob_per_token': 0.12948008977719921, 'total_prob_per_char': 1.8073157245442228, 'uncond_correct_prob': 3.459234093566186e-06, 'uncond_correct_prob_per_token': 0.010469240657703754, 'uncond_correct_prob_per_char': 0.33954730520394244, 'uncond_total_prob': 3.4008860995349754e-05, 'norm_correct_prob': 0.22424983349643055, 'norm_correct_prob_per_token': 0.29512992913374503, 'norm_correct_prob_per_char': 0.2557191049012343, 'primary_metric': 0.22289156626506024} |
90M | FineWeb-Pro | mmlu_virology | 28,750 | default | 5xC | 9,420,800,000 | 5,536,414,236,672,000,000 | {'predicted_index_raw': 1.4518072289156627, 'predicted_index_per_token': 1.4879518072289157, 'predicted_index_per_char': 1.608433734939759, 'predicted_index_per_byte': 1.5783132530120483, 'predicted_index_uncond': 1.3975903614457832, 'correct_choice': 1.3072289156626506, 'acc_raw': 0.21686746987951808, 'acc_per_token': 0.3253012048192771, 'acc_per_char': 0.30120481927710846, 'acc_per_byte': 0.19879518072289157, 'acc_uncond': 0.2891566265060241, 'no_answer': 0.0, 'sum_logits_corr': -23.359036256031818, 'logits_per_token_corr': -4.086400140648425, 'logits_per_char_corr': -0.8243980176571786, 'logits_per_byte_corr': 1.1843180878835082, 'correct_prob': 0.0004993873894917015, 'correct_prob_per_token': 0.035627101891533765, 'correct_prob_per_char': 0.45797376951082225, 'margin': -0.0011828221032913356, 'margin_per_token': -0.019177247253004775, 'margin_per_char': -0.07861114383368384, 'total_prob': 0.002487569354919774, 'total_prob_per_token': 0.12629896684628017, 'total_prob_per_char': 1.7910345792376687, 'uncond_correct_prob': 4.72979366985381e-06, 'uncond_correct_prob_per_token': 0.01159290355486686, 'uncond_correct_prob_per_char': 0.35030153418351917, 'uncond_total_prob': 3.910192818908715e-05, 'norm_correct_prob': 0.2265255436354676, 'norm_correct_prob_per_token': 0.29407418951488107, 'norm_correct_prob_per_char': 0.2565510377220957, 'primary_metric': 0.21686746987951808} |
90M | FineWeb-Pro | mmlu_virology | 29,901 | small aux 2 | 5xC | 9,797,959,680 | 5,758,063,377,068,851,000 | {'predicted_index_raw': 1.4518072289156627, 'predicted_index_per_token': 1.4759036144578312, 'predicted_index_per_char': 1.6265060240963856, 'predicted_index_per_byte': 1.6265060240963856, 'predicted_index_uncond': 1.4879518072289157, 'correct_choice': 1.3072289156626506, 'acc_raw': 0.22289156626506024, 'acc_per_token': 0.3493975903614458, 'acc_per_char': 0.3253012048192771, 'acc_per_byte': 0.3253012048192771, 'acc_uncond': 0.27710843373493976, 'no_answer': 0.0, 'sum_logits_corr': -22.856062120701893, 'logits_per_token_corr': -4.0137253439836, 'logits_per_char_corr': -0.813117219032735, 'bits_per_byte_corr': 1.1683413929084734, 'correct_prob': 0.00040896650306832704, 'correct_prob_per_token': 0.03747588447042543, 'correct_prob_per_char': 0.463950841405073, 'margin': -0.0006696304838207183, 'margin_per_token': -0.017683158091270197, 'margin_per_char': -0.07393199563717802, 'total_prob': 0.001689833408228164, 'total_prob_per_token': 0.1291874653481484, 'total_prob_per_char': 1.7995819531386044, 'uncond_correct_prob': 3.376082225952049e-06, 'uncond_correct_prob_per_token': 0.010636772465392044, 'uncond_correct_prob_per_char': 0.3427591113798666, 'uncond_total_prob': 2.5291897119237657e-05, 'norm_correct_prob': 0.2191804596998064, 'norm_correct_prob_per_token': 0.30608158407630764, 'norm_correct_prob_per_char': 0.2585986439503621, 'primary_metric': 0.22289156626506024} |
90M | FineWeb-Pro | mmlu_virology | 29,901 | small aux 3 | 5xC | 9,797,959,680 | 5,758,063,377,068,851,000 | {'predicted_index_raw': 1.4939759036144578, 'predicted_index_per_token': 1.5180722891566265, 'predicted_index_per_char': 1.572289156626506, 'predicted_index_per_byte': 1.572289156626506, 'predicted_index_uncond': 1.4819277108433735, 'correct_choice': 1.3072289156626506, 'acc_raw': 0.22289156626506024, 'acc_per_token': 0.3373493975903614, 'acc_per_char': 0.2891566265060241, 'acc_per_byte': 0.2891566265060241, 'acc_uncond': 0.2469879518072289, 'no_answer': 0.0, 'sum_logits_corr': -23.16814472589148, 'logits_per_token_corr': -4.030860913276904, 'logits_per_char_corr': -0.8158251505591488, 'bits_per_byte_corr': 1.1717570132470978, 'correct_prob': 0.0005727606331840246, 'correct_prob_per_token': 0.03592858337657747, 'correct_prob_per_char': 0.461183864385112, 'margin': -0.0017647009936292146, 'margin_per_token': -0.020215652525035776, 'margin_per_char': -0.08064411987086051, 'total_prob': 0.003294850692431297, 'total_prob_per_token': 0.13000491016953858, 'total_prob_per_char': 1.8096537185208927, 'uncond_correct_prob': 3.614082653934181e-06, 'uncond_correct_prob_per_token': 0.010596262837136026, 'uncond_correct_prob_per_char': 0.3400131765971194, 'uncond_total_prob': 3.560359505375809e-05, 'norm_correct_prob': 0.21799523824156092, 'norm_correct_prob_per_token': 0.2949405350017645, 'norm_correct_prob_per_char': 0.25563117705354504, 'primary_metric': 0.22289156626506024} |
90M | FineWeb-Pro | mmlu_virology | 29,901 | default | 5xC | 9,797,959,680 | 5,758,063,377,068,851,000 | {'predicted_index_raw': 1.4698795180722892, 'predicted_index_per_token': 1.5301204819277108, 'predicted_index_per_char': 1.5963855421686748, 'predicted_index_per_byte': 1.5843373493975903, 'predicted_index_uncond': 1.3674698795180722, 'correct_choice': 1.3072289156626506, 'acc_raw': 0.24096385542168675, 'acc_per_token': 0.3072289156626506, 'acc_per_char': 0.28313253012048195, 'acc_per_byte': 0.20481927710843373, 'acc_uncond': 0.28313253012048195, 'no_answer': 0.0, 'sum_logits_corr': -23.310043016111994, 'logits_per_token_corr': -4.075102518282097, 'logits_per_char_corr': -0.8219997658235018, 'logits_per_byte_corr': 1.1809829879241223, 'correct_prob': 0.0005334391346419029, 'correct_prob_per_token': 0.036407080452114035, 'correct_prob_per_char': 0.45930458729297896, 'margin': -0.0013636274157887715, 'margin_per_token': -0.019708619498512176, 'margin_per_char': -0.07996686292234566, 'total_prob': 0.0027249355354042617, 'total_prob_per_token': 0.1289076352656195, 'total_prob_per_char': 1.7963053886292983, 'uncond_correct_prob': 4.957656481682182e-06, 'uncond_correct_prob_per_token': 0.011445586243580962, 'uncond_correct_prob_per_char': 0.34986020777555676, 'uncond_total_prob': 4.011049438170104e-05, 'norm_correct_prob': 0.23074218419528117, 'norm_correct_prob_per_token': 0.29317719133792003, 'norm_correct_prob_per_char': 0.25653860186366123, 'primary_metric': 0.24096385542168675} |
90M | FineWeb-Pro | mmlu_world_religions | 0 | small aux 2 | 5xC | 0 | 0 | {'predicted_index_raw': 1.4619883040935673, 'predicted_index_per_token': 1.456140350877193, 'predicted_index_per_char': 1.432748538011696, 'predicted_index_per_byte': 1.432748538011696, 'predicted_index_uncond': 1.567251461988304, 'correct_choice': 1.3216374269005848, 'acc_raw': 0.14035087719298245, 'acc_per_token': 0.23391812865497075, 'acc_per_char': 0.21637426900584794, 'acc_per_byte': 0.21637426900584794, 'acc_uncond': 0.21637426900584794, 'no_answer': 0.0, 'sum_logits_corr': -39.68329947176036, 'logits_per_token_corr': -10.964687526945344, 'logits_per_char_corr': -3.162100281288852, 'bits_per_byte_corr': 4.5488348880474865, 'correct_prob': 1.6031250806743696e-06, 'correct_prob_per_token': 1.8151148794995195e-05, 'correct_prob_per_char': 0.08021056003781696, 'margin': -4.276340191648496e-06, 'margin_per_token': -5.1527027431576465e-06, 'margin_per_char': -0.048127243653292094, 'total_prob': 9.986719910981435e-06, 'total_prob_per_token': 7.448071675461697e-05, 'total_prob_per_char': 0.33370328355084583, 'uncond_correct_prob': 2.01308391675656e-06, 'uncond_correct_prob_per_token': 1.8510695085755593e-05, 'uncond_correct_prob_per_char': 0.08133757963923789, 'uncond_total_prob': 1.0005239521256558e-05, 'norm_correct_prob': 0.1680120508586986, 'norm_correct_prob_per_token': 0.24412239565405916, 'norm_correct_prob_per_char': 0.23447249577709092, 'primary_metric': 0.14035087719298245} |
90M | FineWeb-Pro | mmlu_world_religions | 0 | small aux 3 | 5xC | 0 | 0 | {'predicted_index_raw': 1.3157894736842106, 'predicted_index_per_token': 1.432748538011696, 'predicted_index_per_char': 1.391812865497076, 'predicted_index_per_byte': 1.391812865497076, 'predicted_index_uncond': 1.4736842105263157, 'correct_choice': 1.3216374269005848, 'acc_raw': 0.19883040935672514, 'acc_per_token': 0.25146198830409355, 'acc_per_char': 0.22807017543859648, 'acc_per_byte': 0.22807017543859648, 'acc_uncond': 0.2631578947368421, 'no_answer': 0.0, 'sum_logits_corr': -39.77966526098419, 'logits_per_token_corr': -10.971166115414343, 'logits_per_char_corr': -3.175058522963393, 'bits_per_byte_corr': 4.567538964914378, 'correct_prob': 1.726407081079528e-06, 'correct_prob_per_token': 1.7855322032316623e-05, 'correct_prob_per_char': 0.08106500054933626, 'margin': -4.114766849132014e-06, 'margin_per_token': -5.148422051231149e-06, 'margin_per_char': -0.048222714406458825, 'total_prob': 1.0111742103227712e-05, 'total_prob_per_token': 7.377675549469166e-05, 'total_prob_per_char': 0.33469946617041485, 'uncond_correct_prob': 2.427470788154174e-06, 'uncond_correct_prob_per_token': 1.956344362646949e-05, 'uncond_correct_prob_per_char': 0.08225918825918326, 'uncond_total_prob': 1.064107042950399e-05, 'norm_correct_prob': 0.17969446858675148, 'norm_correct_prob_per_token': 0.24314071087193798, 'norm_correct_prob_per_char': 0.235163969733486, 'primary_metric': 0.19883040935672514} |
90M | FineWeb-Pro | mmlu_world_religions | 0 | default | 5xC | 0 | 0 | {'predicted_index_raw': 1.409356725146199, 'predicted_index_per_token': 1.5555555555555556, 'predicted_index_per_char': 1.409356725146199, 'predicted_index_per_byte': 1.5029239766081872, 'predicted_index_uncond': 1.5555555555555556, 'correct_choice': 1.3216374269005848, 'acc_raw': 0.1871345029239766, 'acc_per_token': 0.2807017543859649, 'acc_per_char': 0.24561403508771928, 'acc_per_byte': 0.2631578947368421, 'acc_uncond': 0.26900584795321636, 'no_answer': 0.0, 'sum_logits_corr': -39.52530189982632, 'logits_per_token_corr': -10.914843000973377, 'logits_per_char_corr': -3.1556027967771967, 'logits_per_byte_corr': 4.539701565798926, 'correct_prob': 1.9450982084507753e-06, 'correct_prob_per_token': 1.8795653237383682e-05, 'correct_prob_per_char': 0.0816710423336206, 'margin': -3.2224031610340408e-06, 'margin_per_token': -4.377749874415074e-06, 'margin_per_char': -0.047736482780485416, 'total_prob': 9.298601938234744e-06, 'total_prob_per_token': 7.513504374440647e-05, 'total_prob_per_char': 0.33606279095946223, 'uncond_correct_prob': 2.0417164469408453e-06, 'uncond_correct_prob_per_token': 1.8546329205589118e-05, 'uncond_correct_prob_per_char': 0.08146724452289143, 'uncond_total_prob': 1.02386610524983e-05, 'norm_correct_prob': 0.17728064487578593, 'norm_correct_prob_per_token': 0.25128967534050045, 'norm_correct_prob_per_char': 0.23599932068845433, 'primary_metric': 0.1871345029239766} |
90M | FineWeb-Pro | mmlu_world_religions | 1,250 | small aux 2 | 5xC | 409,600,000 | 240,713,662,464,000,000 | {'predicted_index_raw': 1.4619883040935673, 'predicted_index_per_token': 1.5555555555555556, 'predicted_index_per_char': 1.590643274853801, 'predicted_index_per_byte': 1.590643274853801, 'predicted_index_uncond': 1.5087719298245614, 'correct_choice': 1.3216374269005848, 'acc_raw': 0.15204678362573099, 'acc_per_token': 0.2631578947368421, 'acc_per_char': 0.19298245614035087, 'acc_per_byte': 0.19298245614035087, 'acc_uncond': 0.2573099415204678, 'no_answer': 0.0, 'sum_logits_corr': -19.581442043795224, 'logits_per_token_corr': -6.052054053221176, 'logits_per_char_corr': -1.6771389074106224, 'bits_per_byte_corr': 2.4153655907876357, 'correct_prob': 0.0001564601596851168, 'correct_prob_per_token': 0.009678156568885462, 'correct_prob_per_char': 0.2381056020042755, 'margin': -0.0003913095367380479, 'margin_per_token': -0.011977525364190653, 'margin_per_char': -0.08646768757178629, 'total_prob': 0.0007493089409242909, 'total_prob_per_token': 0.04244351679649196, 'total_prob_per_char': 0.983891594170018, 'uncond_correct_prob': 1.2812529017704719e-05, 'uncond_correct_prob_per_token': 0.003595283010623166, 'uncond_correct_prob_per_char': 0.18322321571402625, 'uncond_total_prob': 6.067916969049977e-05, 'norm_correct_prob': 0.16581225889372023, 'norm_correct_prob_per_token': 0.23755206002973167, 'norm_correct_prob_per_char': 0.2366184832659655, 'primary_metric': 0.15204678362573099} |
90M | FineWeb-Pro | mmlu_world_religions | 1,250 | small aux 3 | 5xC | 409,600,000 | 240,713,662,464,000,000 | {'predicted_index_raw': 1.4795321637426901, 'predicted_index_per_token': 1.6608187134502923, 'predicted_index_per_char': 1.5614035087719298, 'predicted_index_per_byte': 1.567251461988304, 'predicted_index_uncond': 1.5146198830409356, 'correct_choice': 1.3216374269005848, 'acc_raw': 0.14619883040935672, 'acc_per_token': 0.21637426900584794, 'acc_per_char': 0.21052631578947367, 'acc_per_byte': 0.21052631578947367, 'acc_uncond': 0.22807017543859648, 'no_answer': 0.0, 'sum_logits_corr': -20.616470035753753, 'logits_per_token_corr': -6.384016491971946, 'logits_per_char_corr': -1.7610957808048444, 'bits_per_byte_corr': 2.536719760275022, 'correct_prob': 2.5782956246561835e-05, 'correct_prob_per_token': 0.007171406090034148, 'correct_prob_per_char': 0.2194149097589895, 'margin': -0.0001685184273186614, 'margin_per_token': -0.008413691001805284, 'margin_per_char': -0.07783984342687564, 'total_prob': 0.00024051738307318124, 'total_prob_per_token': 0.03063227696348589, 'total_prob_per_char': 0.9051354161126477, 'uncond_correct_prob': 7.352371519857431e-06, 'uncond_correct_prob_per_token': 0.0027143626242270417, 'uncond_correct_prob_per_char': 0.17406303341919474, 'uncond_total_prob': 3.597186503442658e-05, 'norm_correct_prob': 0.15894063982095435, 'norm_correct_prob_per_token': 0.240435410628231, 'norm_correct_prob_per_char': 0.23829712819998494, 'primary_metric': 0.14619883040935672} |
90M | FineWeb-Pro | mmlu_world_religions | 1,250 | default | 5xC | 409,600,000 | 240,713,662,464,000,000 | {'predicted_index_raw': 1.631578947368421, 'predicted_index_per_token': 1.695906432748538, 'predicted_index_per_char': 1.6023391812865497, 'predicted_index_per_byte': 1.543859649122807, 'predicted_index_uncond': 1.456140350877193, 'correct_choice': 1.3216374269005848, 'acc_raw': 0.15789473684210525, 'acc_per_token': 0.2222222222222222, 'acc_per_char': 0.1871345029239766, 'acc_per_byte': 0.25146198830409355, 'acc_uncond': 0.25146198830409355, 'no_answer': 0.0, 'sum_logits_corr': -20.789787303634554, 'logits_per_token_corr': -6.5163128497019125, 'logits_per_char_corr': -1.8078828101657525, 'logits_per_byte_corr': 2.6042361650287926, 'correct_prob': 3.370082746188146e-05, 'correct_prob_per_token': 0.007137289701775195, 'correct_prob_per_char': 0.21722782811743496, 'margin': -0.00017097771094338668, 'margin_per_token': -0.008453742850817323, 'margin_per_char': -0.07667196411127501, 'total_prob': 0.00026084195091286125, 'total_prob_per_token': 0.030487540436269764, 'total_prob_per_char': 0.8977509780695823, 'uncond_correct_prob': 4.203113566723225e-06, 'uncond_correct_prob_per_token': 0.0036020346217643293, 'uncond_correct_prob_per_char': 0.18452663927002533, 'uncond_total_prob': 3.4480671930669764e-05, 'norm_correct_prob': 0.16098642236789162, 'norm_correct_prob_per_token': 0.2405542156158497, 'norm_correct_prob_per_char': 0.23730327102101792, 'primary_metric': 0.15789473684210525} |
90M | FineWeb-Pro | mmlu_world_religions | 2,500 | small aux 2 | 5xC | 819,200,000 | 481,427,324,928,000,000 | {'predicted_index_raw': 1.4385964912280702, 'predicted_index_per_token': 1.5321637426900585, 'predicted_index_per_char': 1.543859649122807, 'predicted_index_per_byte': 1.543859649122807, 'predicted_index_uncond': 1.4795321637426901, 'correct_choice': 1.3216374269005848, 'acc_raw': 0.1871345029239766, 'acc_per_token': 0.25146198830409355, 'acc_per_char': 0.22807017543859648, 'acc_per_byte': 0.22807017543859648, 'acc_uncond': 0.2573099415204678, 'no_answer': 0.0, 'sum_logits_corr': -18.796715549558225, 'logits_per_token_corr': -5.882148000158526, 'logits_per_char_corr': -1.6178484477560306, 'bits_per_byte_corr': 2.329879928189721, 'correct_prob': 0.00011856947672938313, 'correct_prob_per_token': 0.012385989949462432, 'correct_prob_per_char': 0.25072651714093713, 'margin': -0.0003072414702284445, 'margin_per_token': -0.013024064944971717, 'margin_per_char': -0.079814827654114, 'total_prob': 0.0006651745994127514, 'total_prob_per_token': 0.05059136623630312, 'total_prob_per_char': 1.0259707441982775, 'uncond_correct_prob': 8.472173700662699e-06, 'uncond_correct_prob_per_token': 0.003909329496132586, 'uncond_correct_prob_per_char': 0.1883560373736204, 'uncond_total_prob': 3.433434122247778e-05, 'norm_correct_prob': 0.18483337820465892, 'norm_correct_prob_per_token': 0.24635150520013263, 'norm_correct_prob_per_char': 0.23863004862728343, 'primary_metric': 0.1871345029239766} |
90M | FineWeb-Pro | mmlu_world_religions | 2,500 | small aux 3 | 5xC | 819,200,000 | 481,427,324,928,000,000 | {'predicted_index_raw': 1.5146198830409356, 'predicted_index_per_token': 1.6432748538011697, 'predicted_index_per_char': 1.6140350877192982, 'predicted_index_per_byte': 1.6140350877192982, 'predicted_index_uncond': 1.5964912280701755, 'correct_choice': 1.3216374269005848, 'acc_raw': 0.1695906432748538, 'acc_per_token': 0.24561403508771928, 'acc_per_char': 0.23976608187134502, 'acc_per_byte': 0.23976608187134502, 'acc_uncond': 0.2222222222222222, 'no_answer': 0.0, 'sum_logits_corr': -18.875471670028062, 'logits_per_token_corr': -5.881620400887566, 'logits_per_char_corr': -1.6203377673104644, 'bits_per_byte_corr': 2.333917550164067, 'correct_prob': 0.00014555542985654753, 'correct_prob_per_token': 0.012545819937395798, 'correct_prob_per_char': 0.2493852924561364, 'margin': -0.00040301103164555726, 'margin_per_token': -0.013424029782950382, 'margin_per_char': -0.07810194396501632, 'total_prob': 0.0007773341209686203, 'total_prob_per_token': 0.050860090785206286, 'total_prob_per_char': 1.0161805900530003, 'uncond_correct_prob': 5.153829860807068e-06, 'uncond_correct_prob_per_token': 0.0041558731403733866, 'uncond_correct_prob_per_char': 0.19030022965536636, 'uncond_total_prob': 3.347811664999451e-05, 'norm_correct_prob': 0.184493429832297, 'norm_correct_prob_per_token': 0.24295415793024114, 'norm_correct_prob_per_char': 0.240536970651776, 'primary_metric': 0.1695906432748538} |
90M | FineWeb-Pro | mmlu_world_religions | 2,500 | default | 5xC | 819,200,000 | 481,427,324,928,000,000 | {'predicted_index_raw': 1.4035087719298245, 'predicted_index_per_token': 1.5555555555555556, 'predicted_index_per_char': 1.4444444444444444, 'predicted_index_per_byte': 1.5555555555555556, 'predicted_index_uncond': 1.3801169590643274, 'correct_choice': 1.3216374269005848, 'acc_raw': 0.16374269005847952, 'acc_per_token': 0.2573099415204678, 'acc_per_char': 0.1871345029239766, 'acc_per_byte': 0.29239766081871343, 'acc_uncond': 0.2573099415204678, 'no_answer': 0.0, 'sum_logits_corr': -18.411394951636332, 'logits_per_token_corr': -5.734079885695866, 'logits_per_char_corr': -1.5882772339085611, 'logits_per_byte_corr': 2.2874967348562047, 'correct_prob': 0.0011193199925607364, 'correct_prob_per_token': 0.01587686656658927, 'correct_prob_per_char': 0.2618901294990244, 'margin': -0.0002518486618243464, 'margin_per_token': -0.011629721951358148, 'margin_per_char': -0.08116352976855368, 'total_prob': 0.0026046236055851497, 'total_prob_per_token': 0.05856394916493114, 'total_prob_per_char': 1.0716050465165774, 'uncond_correct_prob': 4.905648192944516e-06, 'uncond_correct_prob_per_token': 0.004806120917142246, 'uncond_correct_prob_per_char': 0.1971659848916903, 'uncond_total_prob': 3.5806973394046745e-05, 'norm_correct_prob': 0.18038308083348442, 'norm_correct_prob_per_token': 0.23956882726315665, 'norm_correct_prob_per_char': 0.23790688363644152, 'primary_metric': 0.16374269005847952} |
90M | FineWeb-Pro | mmlu_world_religions | 3,750 | small aux 2 | 5xC | 1,228,800,000 | 722,140,987,392,000,000 | {'predicted_index_raw': 1.4912280701754386, 'predicted_index_per_token': 1.4795321637426901, 'predicted_index_per_char': 1.5321637426900585, 'predicted_index_per_byte': 1.5321637426900585, 'predicted_index_uncond': 1.7076023391812865, 'correct_choice': 1.3216374269005848, 'acc_raw': 0.1871345029239766, 'acc_per_token': 0.2573099415204678, 'acc_per_char': 0.23391812865497075, 'acc_per_byte': 0.23391812865497075, 'acc_uncond': 0.2573099415204678, 'no_answer': 0.0, 'sum_logits_corr': -17.79723739066319, 'logits_per_token_corr': -5.588895434640885, 'logits_per_char_corr': -1.5326458481917375, 'bits_per_byte_corr': 2.207175917627625, 'correct_prob': 0.00014611422724492172, 'correct_prob_per_token': 0.01641325444698336, 'correct_prob_per_char': 0.26910578520599493, 'margin': -0.000584432731519038, 'margin_per_token': -0.017370879715954638, 'margin_per_char': -0.07962401565949277, 'total_prob': 0.0009714605848672577, 'total_prob_per_token': 0.06641495220192606, 'total_prob_per_char': 1.08586721958293, 'uncond_correct_prob': 5.724889831011119e-06, 'uncond_correct_prob_per_token': 0.004963087802996345, 'uncond_correct_prob_per_char': 0.19545256969937588, 'uncond_total_prob': 3.0357606966500255e-05, 'norm_correct_prob': 0.19153881504447845, 'norm_correct_prob_per_token': 0.2595721093333131, 'norm_correct_prob_per_char': 0.2425288983157661, 'primary_metric': 0.1871345029239766} |
90M | FineWeb-Pro | mmlu_world_religions | 3,750 | small aux 3 | 5xC | 1,228,800,000 | 722,140,987,392,000,000 | {'predicted_index_raw': 1.4152046783625731, 'predicted_index_per_token': 1.5789473684210527, 'predicted_index_per_char': 1.590643274853801, 'predicted_index_per_byte': 1.590643274853801, 'predicted_index_uncond': 1.4912280701754386, 'correct_choice': 1.3216374269005848, 'acc_raw': 0.1871345029239766, 'acc_per_token': 0.2573099415204678, 'acc_per_char': 0.2222222222222222, 'acc_per_byte': 0.2222222222222222, 'acc_uncond': 0.21637426900584794, 'no_answer': 0.0, 'sum_logits_corr': -18.076841407352024, 'logits_per_token_corr': -5.58938123881121, 'logits_per_char_corr': -1.5425186800070236, 'bits_per_byte_corr': 2.221728736777451, 'correct_prob': 0.00045892006767128907, 'correct_prob_per_token': 0.015051772218266675, 'correct_prob_per_char': 0.2663758906922368, 'margin': -0.0006283597587474502, 'margin_per_token': -0.018990292388983167, 'margin_per_char': -0.08885327275305022, 'total_prob': 0.0016929627389675923, 'total_prob_per_token': 0.0658036206096856, 'total_prob_per_char': 1.0985045105149613, 'uncond_correct_prob': 3.7004498618917496e-06, 'uncond_correct_prob_per_token': 0.004529266590741231, 'uncond_correct_prob_per_char': 0.1955086862454833, 'uncond_total_prob': 2.6454019819588117e-05, 'norm_correct_prob': 0.18343241735542587, 'norm_correct_prob_per_token': 0.2476604955272727, 'norm_correct_prob_per_char': 0.2387858959401555, 'primary_metric': 0.1871345029239766} |
90M | FineWeb-Pro | mmlu_world_religions | 3,750 | default | 5xC | 1,228,800,000 | 722,140,987,392,000,000 | {'predicted_index_raw': 1.4502923976608186, 'predicted_index_per_token': 1.5321637426900585, 'predicted_index_per_char': 1.6198830409356726, 'predicted_index_per_byte': 1.567251461988304, 'predicted_index_uncond': 1.567251461988304, 'correct_choice': 1.3216374269005848, 'acc_raw': 0.14619883040935672, 'acc_per_token': 0.21637426900584794, 'acc_per_char': 0.21637426900584794, 'acc_per_byte': 0.26900584795321636, 'acc_uncond': 0.24561403508771928, 'no_answer': 0.0, 'sum_logits_corr': -17.944411212240745, 'logits_per_token_corr': -5.48543674097177, 'logits_per_char_corr': -1.5152309724644724, 'logits_per_byte_corr': 2.1820724137113627, 'correct_prob': 0.00046406683504267293, 'correct_prob_per_token': 0.01608834149611517, 'correct_prob_per_char': 0.2715566724115962, 'margin': -0.00024362750517948218, 'margin_per_token': -0.016355891732102015, 'margin_per_char': -0.0838541619791452, 'total_prob': 0.0013589922479850685, 'total_prob_per_token': 0.06563162247762025, 'total_prob_per_char': 1.1143340975052225, 'uncond_correct_prob': 4.657857804600817e-06, 'uncond_correct_prob_per_token': 0.004561002353201138, 'uncond_correct_prob_per_char': 0.1915126111638022, 'uncond_total_prob': 2.5993030757675104e-05, 'norm_correct_prob': 0.16636403676413394, 'norm_correct_prob_per_token': 0.2340736266900831, 'norm_correct_prob_per_char': 0.23739284052014514, 'primary_metric': 0.14619883040935672} |
90M | FineWeb-Pro | mmlu_world_religions | 6,250 | small aux 2 | 5xC | 2,048,000,000 | 1,203,568,312,320,000,000 | {'predicted_index_raw': 1.3976608187134503, 'predicted_index_per_token': 1.4736842105263157, 'predicted_index_per_char': 1.4853801169590644, 'predicted_index_per_byte': 1.4853801169590644, 'predicted_index_uncond': 1.4385964912280702, 'correct_choice': 1.3216374269005848, 'acc_raw': 0.19883040935672514, 'acc_per_token': 0.23976608187134502, 'acc_per_char': 0.24561403508771928, 'acc_per_byte': 0.24561403508771928, 'acc_uncond': 0.30994152046783624, 'no_answer': 0.0, 'sum_logits_corr': -17.18111410754466, 'logits_per_token_corr': -5.317559374849537, 'logits_per_char_corr': -1.443465755801496, 'bits_per_byte_corr': 2.0787353377708406, 'correct_prob': 0.000342198998260763, 'correct_prob_per_token': 0.020058057291886925, 'correct_prob_per_char': 0.2844354820504754, 'margin': -0.0008984250815955, 'margin_per_token': -0.02005300148918488, 'margin_per_char': -0.07964303710183925, 'total_prob': 0.0017973653524356195, 'total_prob_per_token': 0.08088732357660865, 'total_prob_per_char': 1.1423313516201716, 'uncond_correct_prob': 6.9483479573055225e-06, 'uncond_correct_prob_per_token': 0.004538480488848933, 'uncond_correct_prob_per_char': 0.19574443590267623, 'uncond_total_prob': 4.0055075499033764e-05, 'norm_correct_prob': 0.20141901813271307, 'norm_correct_prob_per_token': 0.2604312864462256, 'norm_correct_prob_per_char': 0.24464311705175437, 'primary_metric': 0.19883040935672514} |
90M | FineWeb-Pro | mmlu_world_religions | 6,250 | small aux 3 | 5xC | 2,048,000,000 | 1,203,568,312,320,000,000 | {'predicted_index_raw': 1.391812865497076, 'predicted_index_per_token': 1.5730994152046784, 'predicted_index_per_char': 1.5730994152046784, 'predicted_index_per_byte': 1.5730994152046784, 'predicted_index_uncond': 1.456140350877193, 'correct_choice': 1.3216374269005848, 'acc_raw': 0.16374269005847952, 'acc_per_token': 0.22807017543859648, 'acc_per_char': 0.21052631578947367, 'acc_per_byte': 0.21052631578947367, 'acc_uncond': 0.21637426900584794, 'no_answer': 0.0, 'sum_logits_corr': -16.9926185259345, 'logits_per_token_corr': -5.244088534126467, 'logits_per_char_corr': -1.4320461720054634, 'bits_per_byte_corr': 2.0629545094548853, 'correct_prob': 0.0003937015776255762, 'correct_prob_per_token': 0.019831619287883507, 'correct_prob_per_char': 0.28640193254788865, 'margin': -0.0009547675193023921, 'margin_per_token': -0.02036618623384403, 'margin_per_char': -0.08622353496651339, 'total_prob': 0.0020817457155002978, 'total_prob_per_token': 0.08296581604442203, 'total_prob_per_char': 1.172849729744828, 'uncond_correct_prob': 4.8221866923064605e-06, 'uncond_correct_prob_per_token': 0.005297232526650757, 'uncond_correct_prob_per_char': 0.2018830372045086, 'uncond_total_prob': 3.694986648788753e-05, 'norm_correct_prob': 0.18732861356758967, 'norm_correct_prob_per_token': 0.24647118067684154, 'norm_correct_prob_per_char': 0.2405231410533244, 'primary_metric': 0.16374269005847952} |
90M | FineWeb-Pro | mmlu_world_religions | 6,250 | default | 5xC | 2,048,000,000 | 1,203,568,312,320,000,000 | {'predicted_index_raw': 1.5380116959064327, 'predicted_index_per_token': 1.4912280701754386, 'predicted_index_per_char': 1.5029239766081872, 'predicted_index_per_byte': 1.6608187134502923, 'predicted_index_uncond': 1.5263157894736843, 'correct_choice': 1.3216374269005848, 'acc_raw': 0.1871345029239766, 'acc_per_token': 0.24561403508771928, 'acc_per_char': 0.2631578947368421, 'acc_per_byte': 0.2631578947368421, 'acc_uncond': 0.22807017543859648, 'no_answer': 0.0, 'sum_logits_corr': -16.85971364779779, 'logits_per_token_corr': -5.169977500003467, 'logits_per_char_corr': -1.419175503036134, 'logits_per_byte_corr': 2.0435781002094977, 'correct_prob': 0.0008584436526862034, 'correct_prob_per_token': 0.023240958455558822, 'correct_prob_per_char': 0.2952922402603406, 'margin': -0.0005985072237188475, 'margin_per_token': -0.020984320288370585, 'margin_per_char': -0.08411391489343463, 'total_prob': 0.002614256127493833, 'total_prob_per_token': 0.08836485162180721, 'total_prob_per_char': 1.1921226650150314, 'uncond_correct_prob': 5.5892853518882164e-06, 'uncond_correct_prob_per_token': 0.005494186317388381, 'uncond_correct_prob_per_char': 0.1991009615798796, 'uncond_total_prob': 3.488751668037105e-05, 'norm_correct_prob': 0.1847209196664655, 'norm_correct_prob_per_token': 0.24909533146042626, 'norm_correct_prob_per_char': 0.24229086563738092, 'primary_metric': 0.1871345029239766} |
90M | FineWeb-Pro | mmlu_world_religions | 7,500 | small aux 2 | 5xC | 2,457,600,000 | 1,444,281,974,784,000,000 | {'predicted_index_raw': 1.4269005847953216, 'predicted_index_per_token': 1.4853801169590644, 'predicted_index_per_char': 1.5087719298245614, 'predicted_index_per_byte': 1.5087719298245614, 'predicted_index_uncond': 1.5555555555555556, 'correct_choice': 1.3216374269005848, 'acc_raw': 0.17543859649122806, 'acc_per_token': 0.25146198830409355, 'acc_per_char': 0.22807017543859648, 'acc_per_byte': 0.22807017543859648, 'acc_uncond': 0.2807017543859649, 'no_answer': 0.0, 'sum_logits_corr': -17.439990045034396, 'logits_per_token_corr': -5.360288001484853, 'logits_per_char_corr': -1.471672441743688, 'bits_per_byte_corr': 2.1196575751272366, 'correct_prob': 0.0003589404785054984, 'correct_prob_per_token': 0.018786598155597415, 'correct_prob_per_char': 0.2804163030778817, 'margin': -0.0012274099955907791, 'margin_per_token': -0.01990505615700935, 'margin_per_char': -0.08330741917470227, 'total_prob': 0.002204228863702127, 'total_prob_per_token': 0.07748201106780195, 'total_prob_per_char': 1.1313470859630532, 'uncond_correct_prob': 4.3567907821396e-06, 'uncond_correct_prob_per_token': 0.004793434605200941, 'uncond_correct_prob_per_char': 0.19479930746355537, 'uncond_total_prob': 2.6565724663160683e-05, 'norm_correct_prob': 0.1966222871450486, 'norm_correct_prob_per_token': 0.25133765785880857, 'norm_correct_prob_per_char': 0.24471869841290822, 'primary_metric': 0.17543859649122806} |
90M | FineWeb-Pro | mmlu_world_religions | 7,500 | small aux 3 | 5xC | 2,457,600,000 | 1,444,281,974,784,000,000 | {'predicted_index_raw': 1.3976608187134503, 'predicted_index_per_token': 1.456140350877193, 'predicted_index_per_char': 1.5087719298245614, 'predicted_index_per_byte': 1.5087719298245614, 'predicted_index_uncond': 1.4853801169590644, 'correct_choice': 1.3216374269005848, 'acc_raw': 0.1871345029239766, 'acc_per_token': 0.2982456140350877, 'acc_per_char': 0.23391812865497075, 'acc_per_byte': 0.23391812865497075, 'acc_uncond': 0.23391812865497075, 'no_answer': 0.0, 'sum_logits_corr': -16.555220037873028, 'logits_per_token_corr': -5.101983401119704, 'logits_per_char_corr': -1.4033335053208498, 'bits_per_byte_corr': 2.0212177523680457, 'correct_prob': 0.0009279998675182859, 'correct_prob_per_token': 0.022915800837528862, 'correct_prob_per_char': 0.29757269321535773, 'margin': -0.001749926535329893, 'margin_per_token': -0.02420622607744293, 'margin_per_char': -0.09270037442069823, 'total_prob': 0.004121543684498305, 'total_prob_per_token': 0.09601783184539686, 'total_prob_per_char': 1.2181302872680173, 'uncond_correct_prob': 1.9195442156874338e-05, 'uncond_correct_prob_per_token': 0.005686229041239293, 'uncond_correct_prob_per_char': 0.20366007608858977, 'uncond_total_prob': 8.492324442981062e-05, 'norm_correct_prob': 0.18811424215010017, 'norm_correct_prob_per_token': 0.2541767376573461, 'norm_correct_prob_per_char': 0.24068498168333124, 'primary_metric': 0.1871345029239766} |
90M | FineWeb-Pro | mmlu_world_religions | 7,500 | default | 5xC | 2,457,600,000 | 1,444,281,974,784,000,000 | {'predicted_index_raw': 1.4736842105263157, 'predicted_index_per_token': 1.4502923976608186, 'predicted_index_per_char': 1.4619883040935673, 'predicted_index_per_byte': 1.6608187134502923, 'predicted_index_uncond': 1.4970760233918128, 'correct_choice': 1.3216374269005848, 'acc_raw': 0.19883040935672514, 'acc_per_token': 0.29239766081871343, 'acc_per_char': 0.2573099415204678, 'acc_per_byte': 0.22807017543859648, 'acc_uncond': 0.23391812865497075, 'no_answer': 0.0, 'sum_logits_corr': -16.70568001061155, 'logits_per_token_corr': -5.125564272754894, 'logits_per_char_corr': -1.4175239955131829, 'logits_per_byte_corr': 2.04116471069933, 'correct_prob': 0.0019927459660305314, 'correct_prob_per_token': 0.02429587393959514, 'correct_prob_per_char': 0.297612886697799, 'margin': -0.00024695403211642114, 'margin_per_token': -0.01812927409193319, 'margin_per_char': -0.08389747392910424, 'total_prob': 0.004560190576775116, 'total_prob_per_token': 0.09022093075015748, 'total_prob_per_char': 1.1992770527546288, 'uncond_correct_prob': 4.972662714252523e-06, 'uncond_correct_prob_per_token': 0.005872253756008749, 'uncond_correct_prob_per_char': 0.20222871977383788, 'uncond_total_prob': 3.42545513871729e-05, 'norm_correct_prob': 0.1991086507637898, 'norm_correct_prob_per_token': 0.25656307718329185, 'norm_correct_prob_per_char': 0.2429765980483223, 'primary_metric': 0.19883040935672514} |
90M | FineWeb-Pro | mmlu_world_religions | 8,750 | small aux 2 | 5xC | 2,867,200,000 | 1,684,995,637,248,000,000 | {'predicted_index_raw': 1.3976608187134503, 'predicted_index_per_token': 1.4619883040935673, 'predicted_index_per_char': 1.5204678362573099, 'predicted_index_per_byte': 1.5204678362573099, 'predicted_index_uncond': 1.4736842105263157, 'correct_choice': 1.3216374269005848, 'acc_raw': 0.21052631578947367, 'acc_per_token': 0.27485380116959063, 'acc_per_char': 0.25146198830409355, 'acc_per_byte': 0.25146198830409355, 'acc_uncond': 0.2807017543859649, 'no_answer': 0.0, 'sum_logits_corr': -16.13412760572824, 'logits_per_token_corr': -4.981236905283373, 'logits_per_char_corr': -1.3526624061702082, 'bits_per_byte_corr': 1.9475792137473713, 'correct_prob': 0.0006173674347817638, 'correct_prob_per_token': 0.02668296354825973, 'correct_prob_per_char': 0.3086233884586338, 'margin': -0.0017240094063585652, 'margin_per_token': -0.020907951934261256, 'margin_per_char': -0.08229438694886902, 'total_prob': 0.0034031264259052722, 'total_prob_per_token': 0.10117993305899708, 'total_prob_per_char': 1.2318211813204987, 'uncond_correct_prob': 8.125368766753415e-06, 'uncond_correct_prob_per_token': 0.005822310313617663, 'uncond_correct_prob_per_char': 0.20693047197864622, 'uncond_total_prob': 4.369968740333479e-05, 'norm_correct_prob': 0.20760747057895956, 'norm_correct_prob_per_token': 0.2628259767487796, 'norm_correct_prob_per_char': 0.24807875712053828, 'primary_metric': 0.21052631578947367} |
90M | FineWeb-Pro | mmlu_world_religions | 8,750 | small aux 3 | 5xC | 2,867,200,000 | 1,684,995,637,248,000,000 | {'predicted_index_raw': 1.4619883040935673, 'predicted_index_per_token': 1.5321637426900585, 'predicted_index_per_char': 1.4619883040935673, 'predicted_index_per_byte': 1.4619883040935673, 'predicted_index_uncond': 1.5555555555555556, 'correct_choice': 1.3216374269005848, 'acc_raw': 0.21052631578947367, 'acc_per_token': 0.28654970760233917, 'acc_per_char': 0.22807017543859648, 'acc_per_byte': 0.22807017543859648, 'acc_uncond': 0.2573099415204678, 'no_answer': 0.0, 'sum_logits_corr': -16.286725040764836, 'logits_per_token_corr': -5.002889255930488, 'logits_per_char_corr': -1.3778376541225175, 'bits_per_byte_corr': 1.984465621158317, 'correct_prob': 0.001348551054326393, 'correct_prob_per_token': 0.023490157068151905, 'correct_prob_per_char': 0.3027675471021748, 'margin': -0.0006874422717686643, 'margin_per_token': -0.022464042950071126, 'margin_per_char': -0.08037722179418645, 'total_prob': 0.003819024997982092, 'total_prob_per_token': 0.09212232284290589, 'total_prob_per_char': 1.207223156481905, 'uncond_correct_prob': 3.834123665215564e-06, 'uncond_correct_prob_per_token': 0.005917490181802813, 'uncond_correct_prob_per_char': 0.20668217053711888, 'uncond_total_prob': 2.7462342542030733e-05, 'norm_correct_prob': 0.21043269173256104, 'norm_correct_prob_per_token': 0.2656330606561891, 'norm_correct_prob_per_char': 0.24689152069533976, 'primary_metric': 0.21052631578947367} |
90M | FineWeb-Pro | mmlu_world_religions | 8,750 | default | 5xC | 2,867,200,000 | 1,684,995,637,248,000,000 | {'predicted_index_raw': 1.4152046783625731, 'predicted_index_per_token': 1.5204678362573099, 'predicted_index_per_char': 1.5204678362573099, 'predicted_index_per_byte': 1.5789473684210527, 'predicted_index_uncond': 1.4444444444444444, 'correct_choice': 1.3216374269005848, 'acc_raw': 0.17543859649122806, 'acc_per_token': 0.2807017543859649, 'acc_per_char': 0.27485380116959063, 'acc_per_byte': 0.27485380116959063, 'acc_uncond': 0.2631578947368421, 'no_answer': 0.0, 'sum_logits_corr': -16.869321098104557, 'logits_per_token_corr': -5.2111717410030565, 'logits_per_char_corr': -1.4254071648671192, 'logits_per_byte_corr': 2.052608213150081, 'correct_prob': 0.0007907405228271106, 'correct_prob_per_token': 0.021400508102081498, 'correct_prob_per_char': 0.2897735623098505, 'margin': -0.0005369623694645665, 'margin_per_token': -0.02075857229916592, 'margin_per_char': -0.08604158916706098, 'total_prob': 0.002326900305668641, 'total_prob_per_token': 0.08444835222555412, 'total_prob_per_char': 1.1750697351260158, 'uncond_correct_prob': 6.360678207905004e-06, 'uncond_correct_prob_per_token': 0.005573625659382251, 'uncond_correct_prob_per_char': 0.20040532926030832, 'uncond_total_prob': 4.21353518307591e-05, 'norm_correct_prob': 0.18782857132690042, 'norm_correct_prob_per_token': 0.2561647904463535, 'norm_correct_prob_per_char': 0.2424818287619152, 'primary_metric': 0.17543859649122806} |
90M | FineWeb-Pro | mmlu_world_religions | 10,000 | small aux 2 | 5xC | 3,276,800,000 | 1,925,709,299,712,000,000 | {'predicted_index_raw': 1.3157894736842106, 'predicted_index_per_token': 1.3859649122807018, 'predicted_index_per_char': 1.3976608187134503, 'predicted_index_per_byte': 1.3976608187134503, 'predicted_index_uncond': 1.4444444444444444, 'correct_choice': 1.3216374269005848, 'acc_raw': 0.21637426900584794, 'acc_per_token': 0.30994152046783624, 'acc_per_char': 0.24561403508771928, 'acc_per_byte': 0.24561403508771928, 'acc_uncond': 0.29239766081871343, 'no_answer': 0.0, 'sum_logits_corr': -16.26228230180796, 'logits_per_token_corr': -4.986851913533586, 'logits_per_char_corr': -1.3544074177123735, 'bits_per_byte_corr': 1.9502774152175604, 'correct_prob': 0.0010279215313945855, 'correct_prob_per_token': 0.028877594450802, 'correct_prob_per_char': 0.3092146667622755, 'margin': -0.001174058295229654, 'margin_per_token': -0.023282394710458086, 'margin_per_char': -0.07903730634413285, 'total_prob': 0.003765990631324747, 'total_prob_per_token': 0.1081655305087976, 'total_prob_per_char': 1.2264537561849091, 'uncond_correct_prob': 9.362159207586286e-06, 'uncond_correct_prob_per_token': 0.0058320486857320894, 'uncond_correct_prob_per_char': 0.20720476124861137, 'uncond_total_prob': 6.0149627764891736e-05, 'norm_correct_prob': 0.21387123037799413, 'norm_correct_prob_per_token': 0.26684358186355417, 'norm_correct_prob_per_char': 0.24808630033699966, 'primary_metric': 0.21637426900584794} |
90M | FineWeb-Pro | mmlu_world_religions | 10,000 | small aux 3 | 5xC | 3,276,800,000 | 1,925,709,299,712,000,000 | {'predicted_index_raw': 1.2982456140350878, 'predicted_index_per_token': 1.409356725146199, 'predicted_index_per_char': 1.5029239766081872, 'predicted_index_per_byte': 1.5029239766081872, 'predicted_index_uncond': 1.543859649122807, 'correct_choice': 1.3216374269005848, 'acc_raw': 0.21637426900584794, 'acc_per_token': 0.30994152046783624, 'acc_per_char': 0.2573099415204678, 'acc_per_byte': 0.2573099415204678, 'acc_uncond': 0.2982456140350877, 'no_answer': 0.0, 'sum_logits_corr': -16.00301348326499, 'logits_per_token_corr': -4.895210548264211, 'logits_per_char_corr': -1.3446640274032229, 'bits_per_byte_corr': 1.9369946779854197, 'correct_prob': 0.002414904377230935, 'correct_prob_per_token': 0.02777537456704997, 'correct_prob_per_char': 0.31149841009700546, 'margin': -0.0013098054416845685, 'margin_per_token': -0.02178449192682363, 'margin_per_char': -0.07819700274477388, 'total_prob': 0.006835249338723788, 'total_prob_per_token': 0.10338869307102042, 'total_prob_per_char': 1.2376804045547085, 'uncond_correct_prob': 6.262673560317602e-06, 'uncond_correct_prob_per_token': 0.0060311790057746804, 'uncond_correct_prob_per_char': 0.2084612970923314, 'uncond_total_prob': 4.0427627707521206e-05, 'norm_correct_prob': 0.21217794546525565, 'norm_correct_prob_per_token': 0.27363198345611534, 'norm_correct_prob_per_char': 0.24975212690574963, 'primary_metric': 0.21637426900584794} |
90M | FineWeb-Pro | mmlu_world_religions | 10,000 | default | 5xC | 3,276,800,000 | 1,925,709,299,712,000,000 | {'predicted_index_raw': 1.4853801169590644, 'predicted_index_per_token': 1.5029239766081872, 'predicted_index_per_char': 1.4853801169590644, 'predicted_index_per_byte': 1.567251461988304, 'predicted_index_uncond': 1.5087719298245614, 'correct_choice': 1.3216374269005848, 'acc_raw': 0.21052631578947367, 'acc_per_token': 0.2573099415204678, 'acc_per_char': 0.25146198830409355, 'acc_per_byte': 0.3157894736842105, 'acc_uncond': 0.2573099415204678, 'no_answer': 0.0, 'sum_logits_corr': -16.35612007470159, 'logits_per_token_corr': -4.999125792261816, 'logits_per_char_corr': -1.3749801044377565, 'logits_per_byte_corr': 1.979898280072095, 'correct_prob': 0.0014276591701793066, 'correct_prob_per_token': 0.024275101846008684, 'correct_prob_per_char': 0.3039937529926707, 'margin': -0.001230218080468572, 'margin_per_token': -0.02085465627338231, 'margin_per_char': -0.08520074633935372, 'total_prob': 0.004509038404975792, 'total_prob_per_token': 0.09487745599199246, 'total_prob_per_char': 1.2295216549414407, 'uncond_correct_prob': 4.982217203190054e-06, 'uncond_correct_prob_per_token': 0.0053831935772691375, 'uncond_correct_prob_per_char': 0.19584404446208145, 'uncond_total_prob': 2.8796468253643732e-05, 'norm_correct_prob': 0.1958633618152197, 'norm_correct_prob_per_token': 0.2510308607980574, 'norm_correct_prob_per_char': 0.24245126651201096, 'primary_metric': 0.21052631578947367} |
90M | FineWeb-Pro | mmlu_world_religions | 11,250 | small aux 2 | 5xC | 3,686,400,000 | 2,166,422,962,176,000,000 | {'predicted_index_raw': 1.4152046783625731, 'predicted_index_per_token': 1.4444444444444444, 'predicted_index_per_char': 1.4210526315789473, 'predicted_index_per_byte': 1.4210526315789473, 'predicted_index_uncond': 1.4970760233918128, 'correct_choice': 1.3216374269005848, 'acc_raw': 0.21637426900584794, 'acc_per_token': 0.23976608187134502, 'acc_per_char': 0.21637426900584794, 'acc_per_byte': 0.21637426900584794, 'acc_uncond': 0.28654970760233917, 'no_answer': 0.0, 'sum_logits_corr': -16.500161327116672, 'logits_per_token_corr': -5.1373688319821715, 'logits_per_char_corr': -1.3976342482985462, 'bits_per_byte_corr': 2.012723005758734, 'correct_prob': 0.00039279386811770837, 'correct_prob_per_token': 0.02453167336308944, 'correct_prob_per_char': 0.2984734100315149, 'margin': -0.0014672744296223234, 'margin_per_token': -0.02179888138234741, 'margin_per_char': -0.08377456700296096, 'total_prob': 0.0026398790492380537, 'total_prob_per_token': 0.09553503233151998, 'total_prob_per_char': 1.1972407042640552, 'uncond_correct_prob': 3.2818602794083778e-06, 'uncond_correct_prob_per_token': 0.005584320990944291, 'uncond_correct_prob_per_char': 0.20256795148660986, 'uncond_total_prob': 2.9273869099736086e-05, 'norm_correct_prob': 0.20852918385971425, 'norm_correct_prob_per_token': 0.2542958123936061, 'norm_correct_prob_per_char': 0.24451056350854566, 'primary_metric': 0.21637426900584794} |
90M | FineWeb-Pro | mmlu_world_religions | 11,250 | small aux 3 | 5xC | 3,686,400,000 | 2,166,422,962,176,000,000 | {'predicted_index_raw': 1.4444444444444444, 'predicted_index_per_token': 1.5029239766081872, 'predicted_index_per_char': 1.4970760233918128, 'predicted_index_per_byte': 1.4970760233918128, 'predicted_index_uncond': 1.4912280701754386, 'correct_choice': 1.3216374269005848, 'acc_raw': 0.2046783625730994, 'acc_per_token': 0.2573099415204678, 'acc_per_char': 0.23391812865497075, 'acc_per_byte': 0.23391812865497075, 'acc_uncond': 0.2573099415204678, 'no_answer': 0.0, 'sum_logits_corr': -16.883306132422554, 'logits_per_token_corr': -5.255888400857136, 'logits_per_char_corr': -1.4418308928261028, 'bits_per_byte_corr': 2.0762878856037563, 'correct_prob': 0.0006540389536048241, 'correct_prob_per_token': 0.021125574630028846, 'correct_prob_per_char': 0.28948477469651585, 'margin': -0.0009733012154700706, 'margin_per_token': -0.018027055418271497, 'margin_per_char': -0.08011403131167319, 'total_prob': 0.0025558658834323815, 'total_prob_per_token': 0.08230247318477425, 'total_prob_per_char': 1.1623877754476726, 'uncond_correct_prob': 5.6001434224546264e-06, 'uncond_correct_prob_per_token': 0.005476033202994907, 'uncond_correct_prob_per_char': 0.20212028145409452, 'uncond_total_prob': 3.15187960399204e-05, 'norm_correct_prob': 0.19683896314002627, 'norm_correct_prob_per_token': 0.2588984180311869, 'norm_correct_prob_per_char': 0.2455701808351748, 'primary_metric': 0.2046783625730994} |
90M | FineWeb-Pro | mmlu_world_religions | 11,250 | default | 5xC | 3,686,400,000 | 2,166,422,962,176,000,000 | {'predicted_index_raw': 1.391812865497076, 'predicted_index_per_token': 1.4385964912280702, 'predicted_index_per_char': 1.5087719298245614, 'predicted_index_per_byte': 1.5847953216374269, 'predicted_index_uncond': 1.432748538011696, 'correct_choice': 1.3216374269005848, 'acc_raw': 0.19298245614035087, 'acc_per_token': 0.2631578947368421, 'acc_per_char': 0.2222222222222222, 'acc_per_byte': 0.24561403508771928, 'acc_uncond': 0.25146198830409355, 'no_answer': 0.0, 'sum_logits_corr': -16.116219887259412, 'logits_per_token_corr': -4.934021645514459, 'logits_per_char_corr': -1.3581966173998576, 'logits_per_byte_corr': 1.9556727758007482, 'correct_prob': 0.0007212616980918088, 'correct_prob_per_token': 0.02687514191153689, 'correct_prob_per_char': 0.31119363336756717, 'margin': -0.0018297805161912293, 'margin_per_token': -0.023071706967231827, 'margin_per_char': -0.0904044104513365, 'total_prob': 0.003794825653721116, 'total_prob_per_token': 0.10568358542139858, 'total_prob_per_char': 1.2599239190888618, 'uncond_correct_prob': 2.050697213681638e-06, 'uncond_correct_prob_per_token': 0.00523071414232418, 'uncond_correct_prob_per_char': 0.19535713125827212, 'uncond_total_prob': 1.6301884662745856e-05, 'norm_correct_prob': 0.19251373919820547, 'norm_correct_prob_per_token': 0.25192361748184766, 'norm_correct_prob_per_char': 0.2424442996891088, 'primary_metric': 0.19298245614035087} |
90M | FineWeb-Pro | mmlu_world_religions | 12,500 | small aux 2 | 5xC | 4,096,000,000 | 2,407,136,624,640,000,000 | {'predicted_index_raw': 1.4152046783625731, 'predicted_index_per_token': 1.4619883040935673, 'predicted_index_per_char': 1.4385964912280702, 'predicted_index_per_byte': 1.432748538011696, 'predicted_index_uncond': 1.5204678362573099, 'correct_choice': 1.3216374269005848, 'acc_raw': 0.19883040935672514, 'acc_per_token': 0.2631578947368421, 'acc_per_char': 0.22807017543859648, 'acc_per_byte': 0.22807017543859648, 'acc_uncond': 0.2631578947368421, 'no_answer': 0.0, 'sum_logits_corr': -16.11474585672568, 'logits_per_token_corr': -4.980201379109923, 'logits_per_char_corr': -1.3501401796453394, 'bits_per_byte_corr': 1.9447117151260325, 'correct_prob': 0.0005145344130678771, 'correct_prob_per_token': 0.027206679722253966, 'correct_prob_per_char': 0.3078255089916152, 'margin': -0.0018180894011945682, 'margin_per_token': -0.023985291708525824, 'margin_per_char': -0.08607942502916481, 'total_prob': 0.003288059456240885, 'total_prob_per_token': 0.106548362011567, 'total_prob_per_char': 1.2427364424142116, 'uncond_correct_prob': 3.116374447133029e-06, 'uncond_correct_prob_per_token': 0.0055892148945119145, 'uncond_correct_prob_per_char': 0.20125672103578723, 'uncond_total_prob': 2.5005251219012056e-05, 'norm_correct_prob': 0.20094072952754674, 'norm_correct_prob_per_token': 0.252401871400801, 'norm_correct_prob_per_char': 0.2429520977891646, 'primary_metric': 0.19883040935672514} |
90M | FineWeb-Pro | mmlu_world_religions | 12,500 | small aux 3 | 5xC | 4,096,000,000 | 2,407,136,624,640,000,000 | {'predicted_index_raw': 1.3742690058479532, 'predicted_index_per_token': 1.4619883040935673, 'predicted_index_per_char': 1.5029239766081872, 'predicted_index_per_byte': 1.5029239766081872, 'predicted_index_uncond': 1.5029239766081872, 'correct_choice': 1.3216374269005848, 'acc_raw': 0.2222222222222222, 'acc_per_token': 0.30409356725146197, 'acc_per_char': 0.2222222222222222, 'acc_per_byte': 0.2222222222222222, 'acc_uncond': 0.24561403508771928, 'no_answer': 0.0, 'sum_logits_corr': -16.008815495591413, 'logits_per_token_corr': -4.941696253316162, 'logits_per_char_corr': -1.3539118508841277, 'bits_per_byte_corr': 1.9498485645215102, 'correct_prob': 0.0018856062095043652, 'correct_prob_per_token': 0.028194423691959573, 'correct_prob_per_char': 0.3107981420344059, 'margin': -0.000556961370029634, 'margin_per_token': -0.0206731855708238, 'margin_per_char': -0.08286595195461306, 'total_prob': 0.004829596202798666, 'total_prob_per_token': 0.10460531400774395, 'total_prob_per_char': 1.2464898539523408, 'uncond_correct_prob': 4.82307560568848e-06, 'uncond_correct_prob_per_token': 0.005228018694709702, 'uncond_correct_prob_per_char': 0.19851939678937874, 'uncond_total_prob': 2.5861282203106635e-05, 'norm_correct_prob': 0.2100000158010319, 'norm_correct_prob_per_token': 0.2610683097907215, 'norm_correct_prob_per_char': 0.2461618841270836, 'primary_metric': 0.2222222222222222} |
90M | FineWeb-Pro | mmlu_world_religions | 12,500 | default | 5xC | 4,096,000,000 | 2,407,136,624,640,000,000 | {'predicted_index_raw': 1.391812865497076, 'predicted_index_per_token': 1.5263157894736843, 'predicted_index_per_char': 1.4970760233918128, 'predicted_index_per_byte': 1.5789473684210527, 'predicted_index_uncond': 1.5146198830409356, 'correct_choice': 1.3216374269005848, 'acc_raw': 0.17543859649122806, 'acc_per_token': 0.26900584795321636, 'acc_per_char': 0.22807017543859648, 'acc_per_byte': 0.26900584795321636, 'acc_uncond': 0.25146198830409355, 'no_answer': 0.0, 'sum_logits_corr': -16.31386473304347, 'logits_per_token_corr': -5.035505977534168, 'logits_per_char_corr': -1.375178127950969, 'logits_per_byte_corr': 1.9802295474748897, 'correct_prob': 0.0004804291060515349, 'correct_prob_per_token': 0.02387932410832479, 'correct_prob_per_char': 0.3031230460159716, 'margin': -0.0020365172934364746, 'margin_per_token': -0.02142520685615742, 'margin_per_char': -0.08215619860721086, 'total_prob': 0.0034039464506592544, 'total_prob_per_token': 0.0941215251055036, 'total_prob_per_char': 1.2259912371867983, 'uncond_correct_prob': 3.927463072881002e-06, 'uncond_correct_prob_per_token': 0.005151167456674842, 'uncond_correct_prob_per_char': 0.1951035253068412, 'uncond_total_prob': 2.694885306109514e-05, 'norm_correct_prob': 0.18955851759720904, 'norm_correct_prob_per_token': 0.25351149811264284, 'norm_correct_prob_per_char': 0.24252125875822508, 'primary_metric': 0.17543859649122806} |
90M | FineWeb-Pro | mmlu_world_religions | 13,750 | small aux 2 | 5xC | 4,505,600,000 | 2,647,850,287,104,000,000 | {'predicted_index_raw': 1.4502923976608186, 'predicted_index_per_token': 1.4853801169590644, 'predicted_index_per_char': 1.5964912280701755, 'predicted_index_per_byte': 1.5964912280701755, 'predicted_index_uncond': 1.4736842105263157, 'correct_choice': 1.3216374269005848, 'acc_raw': 0.22807017543859648, 'acc_per_token': 0.23976608187134502, 'acc_per_char': 0.21637426900584794, 'acc_per_byte': 0.21637426900584794, 'acc_uncond': 0.2573099415204678, 'no_answer': 0.0, 'sum_logits_corr': -15.888531715549224, 'logits_per_token_corr': -4.877047783183528, 'logits_per_char_corr': -1.3184853412584538, 'bits_per_byte_corr': 1.8986626409047702, 'correct_prob': 0.0008180262968230838, 'correct_prob_per_token': 0.028437735822015288, 'correct_prob_per_char': 0.31416853589886606, 'margin': -0.0023619975761889895, 'margin_per_token': -0.022580298912188105, 'margin_per_char': -0.08616003723857589, 'total_prob': 0.004463916404618118, 'total_prob_per_token': 0.10804666280466074, 'total_prob_per_char': 1.2620986624387989, 'uncond_correct_prob': 3.2293413357538676e-06, 'uncond_correct_prob_per_token': 0.005615202272874811, 'uncond_correct_prob_per_char': 0.20316128892455132, 'uncond_total_prob': 2.2261991871500734e-05, 'norm_correct_prob': 0.21084723627540478, 'norm_correct_prob_per_token': 0.25410124318938515, 'norm_correct_prob_per_char': 0.2462650879858174, 'primary_metric': 0.22807017543859648} |
90M | FineWeb-Pro | mmlu_world_religions | 13,750 | small aux 3 | 5xC | 4,505,600,000 | 2,647,850,287,104,000,000 | {'predicted_index_raw': 1.4210526315789473, 'predicted_index_per_token': 1.5204678362573099, 'predicted_index_per_char': 1.5321637426900585, 'predicted_index_per_byte': 1.5321637426900585, 'predicted_index_uncond': 1.4970760233918128, 'correct_choice': 1.3216374269005848, 'acc_raw': 0.24561403508771928, 'acc_per_token': 0.30409356725146197, 'acc_per_char': 0.2573099415204678, 'acc_per_byte': 0.2573099415204678, 'acc_uncond': 0.3157894736842105, 'no_answer': 0.0, 'sum_logits_corr': -15.601090743527775, 'logits_per_token_corr': -4.81500831720863, 'logits_per_char_corr': -1.3190581472104976, 'bits_per_byte_corr': 1.8999091744893173, 'correct_prob': 0.001522627721099684, 'correct_prob_per_token': 0.029993987596524063, 'correct_prob_per_char': 0.318475039803457, 'margin': -0.0014762545880348873, 'margin_per_token': -0.021529775068273707, 'margin_per_char': -0.07888166624550143, 'total_prob': 0.005268140111709159, 'total_prob_per_token': 0.10940183065417652, 'total_prob_per_char': 1.265931576818713, 'uncond_correct_prob': 5.855977875883163e-06, 'uncond_correct_prob_per_token': 0.005902016626795971, 'uncond_correct_prob_per_char': 0.20369347151245823, 'uncond_total_prob': 3.652990600294403e-05, 'norm_correct_prob': 0.23278367173363224, 'norm_correct_prob_per_token': 0.27198957662514045, 'norm_correct_prob_per_char': 0.24886856664178927, 'primary_metric': 0.24561403508771928} |
90M | FineWeb-Pro | mmlu_world_religions | 13,750 | default | 5xC | 4,505,600,000 | 2,647,850,287,104,000,000 | {'predicted_index_raw': 1.368421052631579, 'predicted_index_per_token': 1.5029239766081872, 'predicted_index_per_char': 1.4444444444444444, 'predicted_index_per_byte': 1.6491228070175439, 'predicted_index_uncond': 1.4619883040935673, 'correct_choice': 1.3216374269005848, 'acc_raw': 0.18128654970760233, 'acc_per_token': 0.24561403508771928, 'acc_per_char': 0.2573099415204678, 'acc_per_byte': 0.23976608187134502, 'acc_uncond': 0.27485380116959063, 'no_answer': 0.0, 'sum_logits_corr': -16.08347803946824, 'logits_per_token_corr': -4.934799093455596, 'logits_per_char_corr': -1.340145813297492, 'logits_per_byte_corr': 1.9294473224721453, 'correct_prob': 0.0008221245245664372, 'correct_prob_per_token': 0.027272969496090118, 'correct_prob_per_char': 0.310469247505207, 'margin': -0.0016697212002930494, 'margin_per_token': -0.023067660896731835, 'margin_per_char': -0.08127145513492491, 'total_prob': 0.0037712151692044517, 'total_prob_per_token': 0.10509132089622603, 'total_prob_per_char': 1.2502917809920095, 'uncond_correct_prob': 7.57019052984137e-06, 'uncond_correct_prob_per_token': 0.006169615971506807, 'uncond_correct_prob_per_char': 0.2052750942092204, 'uncond_total_prob': 3.8900272041369077e-05, 'norm_correct_prob': 0.1947670822336955, 'norm_correct_prob_per_token': 0.2582943810504493, 'norm_correct_prob_per_char': 0.24439130066008502, 'primary_metric': 0.18128654970760233} |
90M | FineWeb-Pro | mmlu_world_religions | 15,000 | small aux 2 | 5xC | 4,915,200,000 | 2,888,563,949,568,000,000 | {'predicted_index_raw': 1.4444444444444444, 'predicted_index_per_token': 1.4736842105263157, 'predicted_index_per_char': 1.409356725146199, 'predicted_index_per_byte': 1.409356725146199, 'predicted_index_uncond': 1.5380116959064327, 'correct_choice': 1.3216374269005848, 'acc_raw': 0.19883040935672514, 'acc_per_token': 0.2573099415204678, 'acc_per_char': 0.22807017543859648, 'acc_per_byte': 0.22807017543859648, 'acc_uncond': 0.23976608187134502, 'no_answer': 0.0, 'sum_logits_corr': -15.844110962940238, 'logits_per_token_corr': -4.901825792189153, 'logits_per_char_corr': -1.3315498351118664, 'bits_per_byte_corr': 1.9177058682404469, 'correct_prob': 0.000809099713018077, 'correct_prob_per_token': 0.028947311705222238, 'correct_prob_per_char': 0.31516205627293703, 'margin': -0.00142296602456892, 'margin_per_token': -0.02493652321958096, 'margin_per_char': -0.08638635298291016, 'total_prob': 0.003562451330203738, 'total_prob_per_token': 0.11208341319377581, 'total_prob_per_char': 1.2680050353909806, 'uncond_correct_prob': 2.9237596390853365e-06, 'uncond_correct_prob_per_token': 0.005559766537938038, 'uncond_correct_prob_per_char': 0.20032545198401966, 'uncond_total_prob': 1.873297999990715e-05, 'norm_correct_prob': 0.2005495276221039, 'norm_correct_prob_per_token': 0.2526041684474644, 'norm_correct_prob_per_char': 0.24501822166871218, 'primary_metric': 0.19883040935672514} |
90M | FineWeb-Pro | mmlu_world_religions | 15,000 | small aux 3 | 5xC | 4,915,200,000 | 2,888,563,949,568,000,000 | {'predicted_index_raw': 1.3625730994152048, 'predicted_index_per_token': 1.4912280701754386, 'predicted_index_per_char': 1.4444444444444444, 'predicted_index_per_byte': 1.4444444444444444, 'predicted_index_uncond': 1.5087719298245614, 'correct_choice': 1.3216374269005848, 'acc_raw': 0.26900584795321636, 'acc_per_token': 0.3216374269005848, 'acc_per_char': 0.28654970760233917, 'acc_per_byte': 0.28654970760233917, 'acc_uncond': 0.3391812865497076, 'no_answer': 0.0, 'sum_logits_corr': -15.556796356948496, 'logits_per_token_corr': -4.795307634968977, 'logits_per_char_corr': -1.3089805077417922, 'bits_per_byte_corr': 1.8849343957236564, 'correct_prob': 0.0010682180750654223, 'correct_prob_per_token': 0.029452768726978495, 'correct_prob_per_char': 0.3201917113594888, 'margin': -0.0015536217519069278, 'margin_per_token': -0.021162760046346606, 'margin_per_char': -0.07741157466737279, 'total_prob': 0.004287963814054991, 'total_prob_per_token': 0.10865016279656317, 'total_prob_per_char': 1.2636411871094857, 'uncond_correct_prob': 8.975563917935991e-06, 'uncond_correct_prob_per_token': 0.004828150388652458, 'uncond_correct_prob_per_char': 0.19840017782448197, 'uncond_total_prob': 4.286705409851461e-05, 'norm_correct_prob': 0.23793819296898935, 'norm_correct_prob_per_token': 0.27613483183204873, 'norm_correct_prob_per_char': 0.25068681939863413, 'primary_metric': 0.26900584795321636} |
90M | FineWeb-Pro | mmlu_world_religions | 15,000 | default | 5xC | 4,915,200,000 | 2,888,563,949,568,000,000 | {'predicted_index_raw': 1.3333333333333333, 'predicted_index_per_token': 1.5029239766081872, 'predicted_index_per_char': 1.4385964912280702, 'predicted_index_per_byte': 1.590643274853801, 'predicted_index_uncond': 1.5555555555555556, 'correct_choice': 1.3216374269005848, 'acc_raw': 0.19883040935672514, 'acc_per_token': 0.26900584795321636, 'acc_per_char': 0.24561403508771928, 'acc_per_byte': 0.27485380116959063, 'acc_uncond': 0.26900584795321636, 'no_answer': 0.0, 'sum_logits_corr': -15.651027411745305, 'logits_per_token_corr': -4.817236776094229, 'logits_per_char_corr': -1.3120373775887375, 'logits_per_byte_corr': 1.8892934333075762, 'correct_prob': 0.0009201361920501567, 'correct_prob_per_token': 0.03054759992759369, 'correct_prob_per_char': 0.31989752645250147, 'margin': -0.0018628168533370625, 'margin_per_token': -0.02628819955668071, 'margin_per_char': -0.08483084651809254, 'total_prob': 0.004377488817105749, 'total_prob_per_token': 0.11788792623396914, 'total_prob_per_char': 1.28613361549633, 'uncond_correct_prob': 8.673336598251728e-06, 'uncond_correct_prob_per_token': 0.006565453295430156, 'uncond_correct_prob_per_char': 0.2071209177644511, 'uncond_total_prob': 5.2682450180110686e-05, 'norm_correct_prob': 0.1994500672211267, 'norm_correct_prob_per_token': 0.2569612311616095, 'norm_correct_prob_per_char': 0.24458477574940377, 'primary_metric': 0.19883040935672514} |
90M | FineWeb-Pro | mmlu_world_religions | 17,500 | small aux 2 | 5xC | 5,734,400,000 | 3,369,991,274,496,000,000 | {'predicted_index_raw': 1.4210526315789473, 'predicted_index_per_token': 1.4444444444444444, 'predicted_index_per_char': 1.5321637426900585, 'predicted_index_per_byte': 1.5321637426900585, 'predicted_index_uncond': 1.432748538011696, 'correct_choice': 1.3216374269005848, 'acc_raw': 0.23391812865497075, 'acc_per_token': 0.3216374269005848, 'acc_per_char': 0.2631578947368421, 'acc_per_byte': 0.2631578947368421, 'acc_uncond': 0.25146198830409355, 'no_answer': 0.0, 'sum_logits_corr': -15.153000094040095, 'logits_per_token_corr': -4.685389054696169, 'logits_per_char_corr': -1.2698493935852713, 'bits_per_byte_corr': 1.8287856712382224, 'correct_prob': 0.0008348501839328547, 'correct_prob_per_token': 0.03309076718032204, 'correct_prob_per_char': 0.33021156346058617, 'margin': -0.0017307935288026531, 'margin_per_token': -0.022610022100715776, 'margin_per_char': -0.07498005686955463, 'total_prob': 0.004220846760339005, 'total_prob_per_token': 0.12154435176582681, 'total_prob_per_char': 1.303262518202412, 'uncond_correct_prob': 4.4234408593101334e-06, 'uncond_correct_prob_per_token': 0.0070021239330902586, 'uncond_correct_prob_per_char': 0.21305717641985336, 'uncond_total_prob': 3.0764173920888995e-05, 'norm_correct_prob': 0.22885949537326167, 'norm_correct_prob_per_token': 0.27494400508792044, 'norm_correct_prob_per_char': 0.2508964631709146, 'primary_metric': 0.23391812865497075} |
90M | FineWeb-Pro | mmlu_world_religions | 17,500 | small aux 3 | 5xC | 5,734,400,000 | 3,369,991,274,496,000,000 | {'predicted_index_raw': 1.3859649122807018, 'predicted_index_per_token': 1.5204678362573099, 'predicted_index_per_char': 1.5204678362573099, 'predicted_index_per_byte': 1.5204678362573099, 'predicted_index_uncond': 1.4269005847953216, 'correct_choice': 1.3216374269005848, 'acc_raw': 0.2046783625730994, 'acc_per_token': 0.30409356725146197, 'acc_per_char': 0.27485380116959063, 'acc_per_byte': 0.27485380116959063, 'acc_uncond': 0.2573099415204678, 'no_answer': 0.0, 'sum_logits_corr': -15.630797200732761, 'logits_per_token_corr': -4.866663993705713, 'logits_per_char_corr': -1.3260953790167263, 'bits_per_byte_corr': 1.9096980034896833, 'correct_prob': 0.0008984968974379539, 'correct_prob_per_token': 0.030576598653923932, 'correct_prob_per_char': 0.3180129722966147, 'margin': -0.0018154670718026815, 'margin_per_token': -0.022259736151581902, 'margin_per_char': -0.0768433380625149, 'total_prob': 0.004348433215640446, 'total_prob_per_token': 0.11406616683869451, 'total_prob_per_char': 1.265239679955277, 'uncond_correct_prob': 7.4185952978810165e-06, 'uncond_correct_prob_per_token': 0.006260400658251332, 'uncond_correct_prob_per_char': 0.2063654244218547, 'uncond_total_prob': 4.07176385982189e-05, 'norm_correct_prob': 0.204714162079921, 'norm_correct_prob_per_token': 0.27259719693165696, 'norm_correct_prob_per_char': 0.24866526931392746, 'primary_metric': 0.2046783625730994} |
90M | FineWeb-Pro | mmlu_world_religions | 17,500 | default | 5xC | 5,734,400,000 | 3,369,991,274,496,000,000 | {'predicted_index_raw': 1.409356725146199, 'predicted_index_per_token': 1.4619883040935673, 'predicted_index_per_char': 1.4035087719298245, 'predicted_index_per_byte': 1.654970760233918, 'predicted_index_uncond': 1.4619883040935673, 'correct_choice': 1.3216374269005848, 'acc_raw': 0.22807017543859648, 'acc_per_token': 0.32748538011695905, 'acc_per_char': 0.2807017543859649, 'acc_per_byte': 0.27485380116959063, 'acc_uncond': 0.3157894736842105, 'no_answer': 0.0, 'sum_logits_corr': -15.154332526246009, 'logits_per_token_corr': -4.624517601475568, 'logits_per_char_corr': -1.2709880107171816, 'logits_per_byte_corr': 1.830195760446534, 'correct_prob': 0.0016031564085411854, 'correct_prob_per_token': 0.03500386049235959, 'correct_prob_per_char': 0.33403323483578046, 'margin': -0.00233771936764326, 'margin_per_token': -0.02176506080765797, 'margin_per_char': -0.08221870552391645, 'total_prob': 0.006602723148443335, 'total_prob_per_token': 0.12737317022894853, 'total_prob_per_char': 1.33110615031547, 'uncond_correct_prob': 3.5980981636776485e-06, 'uncond_correct_prob_per_token': 0.006101862427142784, 'uncond_correct_prob_per_char': 0.20075908118959562, 'uncond_total_prob': 2.239791665893875e-05, 'norm_correct_prob': 0.22431668257011725, 'norm_correct_prob_per_token': 0.27422635388700906, 'norm_correct_prob_per_char': 0.2472788784605638, 'primary_metric': 0.22807017543859648} |
90M | FineWeb-Pro | mmlu_world_religions | 18,750 | small aux 2 | 5xC | 6,144,000,000 | 3,610,704,936,960,000,000 | {'predicted_index_raw': 1.4619883040935673, 'predicted_index_per_token': 1.5263157894736843, 'predicted_index_per_char': 1.5321637426900585, 'predicted_index_per_byte': 1.5263157894736843, 'predicted_index_uncond': 1.5263157894736843, 'correct_choice': 1.3216374269005848, 'acc_raw': 0.2046783625730994, 'acc_per_token': 0.2807017543859649, 'acc_per_char': 0.22807017543859648, 'acc_per_byte': 0.22807017543859648, 'acc_uncond': 0.2982456140350877, 'no_answer': 0.0, 'sum_logits_corr': -15.145830374712135, 'logits_per_token_corr': -4.676105827326549, 'logits_per_char_corr': -1.2619674936999443, 'bits_per_byte_corr': 1.817515318979805, 'correct_prob': 0.00100255211014232, 'correct_prob_per_token': 0.03652534002118361, 'correct_prob_per_char': 0.33250503633173506, 'margin': -0.002305246485948774, 'margin_per_token': -0.024019330081971848, 'margin_per_char': -0.07820945000647234, 'total_prob': 0.00529363940873176, 'total_prob_per_token': 0.13406269482794497, 'total_prob_per_char': 1.3220306899845498, 'uncond_correct_prob': 4.160139971762994e-06, 'uncond_correct_prob_per_token': 0.006733150309420307, 'uncond_correct_prob_per_char': 0.21199333974472717, 'uncond_total_prob': 3.20019514973298e-05, 'norm_correct_prob': 0.2263995325072242, 'norm_correct_prob_per_token': 0.27339420434316375, 'norm_correct_prob_per_char': 0.24939721271958937, 'primary_metric': 0.2046783625730994} |
90M | FineWeb-Pro | mmlu_world_religions | 18,750 | small aux 3 | 5xC | 6,144,000,000 | 3,610,704,936,960,000,000 | {'predicted_index_raw': 1.3567251461988303, 'predicted_index_per_token': 1.5555555555555556, 'predicted_index_per_char': 1.4152046783625731, 'predicted_index_per_byte': 1.4152046783625731, 'predicted_index_uncond': 1.4912280701754386, 'correct_choice': 1.3216374269005848, 'acc_raw': 0.25146198830409355, 'acc_per_token': 0.30994152046783624, 'acc_per_char': 0.26900584795321636, 'acc_per_byte': 0.26900584795321636, 'acc_uncond': 0.30409356725146197, 'no_answer': 0.0, 'sum_logits_corr': -14.627450618130421, 'logits_per_token_corr': -4.490781293360155, 'logits_per_char_corr': -1.2370608794865465, 'bits_per_byte_corr': 1.7812743186146187, 'correct_prob': 0.002409304346337359, 'correct_prob_per_token': 0.037808420268498504, 'correct_prob_per_char': 0.34460189978804967, 'margin': -0.001311154662836719, 'margin_per_token': -0.025416655373616447, 'margin_per_char': -0.0756936401913966, 'total_prob': 0.007241110648736334, 'total_prob_per_token': 0.1349535865334901, 'total_prob_per_char': 1.3518111784727338, 'uncond_correct_prob': 5.425834890982316e-06, 'uncond_correct_prob_per_token': 0.005781129588983638, 'uncond_correct_prob_per_char': 0.2043306358394741, 'uncond_total_prob': 3.042020983191191e-05, 'norm_correct_prob': 0.2335249388458982, 'norm_correct_prob_per_token': 0.28089870513096843, 'norm_correct_prob_per_char': 0.2507786299296194, 'primary_metric': 0.25146198830409355} |
90M | FineWeb-Pro | mmlu_world_religions | 18,750 | default | 5xC | 6,144,000,000 | 3,610,704,936,960,000,000 | {'predicted_index_raw': 1.327485380116959, 'predicted_index_per_token': 1.456140350877193, 'predicted_index_per_char': 1.432748538011696, 'predicted_index_per_byte': 1.608187134502924, 'predicted_index_uncond': 1.4385964912280702, 'correct_choice': 1.3216374269005848, 'acc_raw': 0.2573099415204678, 'acc_per_token': 0.3216374269005848, 'acc_per_char': 0.25146198830409355, 'acc_per_byte': 0.2631578947368421, 'acc_uncond': 0.28654970760233917, 'no_answer': 0.0, 'sum_logits_corr': -14.952786908512227, 'logits_per_token_corr': -4.609493742907413, 'logits_per_char_corr': -1.2642445292170466, 'logits_per_byte_corr': 1.8202602992530381, 'correct_prob': 0.0019334955289092935, 'correct_prob_per_token': 0.03393578194930712, 'correct_prob_per_char': 0.3346153541525787, 'margin': -0.0011956813885805255, 'margin_per_token': -0.02565625375175161, 'margin_per_char': -0.08149816344128345, 'total_prob': 0.005600607331362247, 'total_prob_per_token': 0.1263415386117427, 'total_prob_per_char': 1.3334859639911343, 'uncond_correct_prob': 3.125178202136131e-06, 'uncond_correct_prob_per_token': 0.006531070772689289, 'uncond_correct_prob_per_char': 0.2063103990439582, 'uncond_total_prob': 2.2320056700314525e-05, 'norm_correct_prob': 0.22716596479339551, 'norm_correct_prob_per_token': 0.27293695115479827, 'norm_correct_prob_per_char': 0.247647286198127, 'primary_metric': 0.2573099415204678} |
90M | FineWeb-Pro | mmlu_world_religions | 20,000 | small aux 2 | 5xC | 6,553,600,000 | 3,851,418,599,424,000,000 | {'predicted_index_raw': 1.327485380116959, 'predicted_index_per_token': 1.456140350877193, 'predicted_index_per_char': 1.4269005847953216, 'predicted_index_per_byte': 1.4269005847953216, 'predicted_index_uncond': 1.5087719298245614, 'correct_choice': 1.3216374269005848, 'acc_raw': 0.23391812865497075, 'acc_per_token': 0.3157894736842105, 'acc_per_char': 0.2631578947368421, 'acc_per_byte': 0.2631578947368421, 'acc_uncond': 0.32748538011695905, 'no_answer': 0.0, 'sum_logits_corr': -14.579628068801256, 'logits_per_token_corr': -4.481362689118254, 'logits_per_char_corr': -1.2216319785553431, 'bits_per_byte_corr': 1.75920213057457, 'correct_prob': 0.001236318023573273, 'correct_prob_per_token': 0.037375184541866474, 'correct_prob_per_char': 0.34541896237891984, 'margin': -0.0016421079629361744, 'margin_per_token': -0.02299760331498423, 'margin_per_char': -0.0725093104198182, 'total_prob': 0.005417896311599067, 'total_prob_per_token': 0.13656549623954597, 'total_prob_per_char': 1.3590566499838477, 'uncond_correct_prob': 3.171931429273329e-06, 'uncond_correct_prob_per_token': 0.007107273781528132, 'uncond_correct_prob_per_char': 0.21066949782139222, 'uncond_total_prob': 2.2438450869517452e-05, 'norm_correct_prob': 0.21948572639005265, 'norm_correct_prob_per_token': 0.2769484119232747, 'norm_correct_prob_per_char': 0.2508121835031694, 'primary_metric': 0.23391812865497075} |
90M | FineWeb-Pro | mmlu_world_religions | 20,000 | small aux 3 | 5xC | 6,553,600,000 | 3,851,418,599,424,000,000 | {'predicted_index_raw': 1.432748538011696, 'predicted_index_per_token': 1.5204678362573099, 'predicted_index_per_char': 1.4152046783625731, 'predicted_index_per_byte': 1.4152046783625731, 'predicted_index_uncond': 1.4736842105263157, 'correct_choice': 1.3216374269005848, 'acc_raw': 0.24561403508771928, 'acc_per_token': 0.28654970760233917, 'acc_per_char': 0.29239766081871343, 'acc_per_byte': 0.29239766081871343, 'acc_uncond': 0.26900584795321636, 'no_answer': 0.0, 'sum_logits_corr': -14.644913225146064, 'logits_per_token_corr': -4.534601777460099, 'logits_per_char_corr': -1.2330076285580214, 'bits_per_byte_corr': 1.7755720849712116, 'correct_prob': 0.0016307729214126343, 'correct_prob_per_token': 0.03921772115575557, 'correct_prob_per_char': 0.34425343957916527, 'margin': -0.002716102924610404, 'margin_per_token': -0.025107184917216026, 'margin_per_char': -0.07550060638510762, 'total_prob': 0.007089502003081815, 'total_prob_per_token': 0.139746879463358, 'total_prob_per_char': 1.3517668167555656, 'uncond_correct_prob': 7.28864890944732e-06, 'uncond_correct_prob_per_token': 0.006833190626742185, 'uncond_correct_prob_per_char': 0.2145923385213629, 'uncond_total_prob': 4.178840675761425e-05, 'norm_correct_prob': 0.23830683456883703, 'norm_correct_prob_per_token': 0.28167250943336203, 'norm_correct_prob_per_char': 0.251401263666305, 'primary_metric': 0.24561403508771928} |
90M | FineWeb-Pro | mmlu_world_religions | 20,000 | default | 5xC | 6,553,600,000 | 3,851,418,599,424,000,000 | {'predicted_index_raw': 1.3391812865497077, 'predicted_index_per_token': 1.456140350877193, 'predicted_index_per_char': 1.4912280701754386, 'predicted_index_per_byte': 1.6023391812865497, 'predicted_index_uncond': 1.5263157894736843, 'correct_choice': 1.3216374269005848, 'acc_raw': 0.23976608187134502, 'acc_per_token': 0.30994152046783624, 'acc_per_char': 0.26900584795321636, 'acc_per_byte': 0.2573099415204678, 'acc_uncond': 0.29239766081871343, 'no_answer': 0.0, 'sum_logits_corr': -14.819255890204893, 'logits_per_token_corr': -4.560064889147918, 'logits_per_char_corr': -1.2444915891834087, 'logits_per_byte_corr': 1.7921542406650446, 'correct_prob': 0.0020180849433295302, 'correct_prob_per_token': 0.03756107191247466, 'correct_prob_per_char': 0.34092663954157765, 'margin': -0.0021727342465889996, 'margin_per_token': -0.027567829492635556, 'margin_per_char': -0.08272897734002906, 'total_prob': 0.007022290269236232, 'total_prob_per_token': 0.14053844802474663, 'total_prob_per_char': 1.3630565120229872, 'uncond_correct_prob': 5.28173746362308e-06, 'uncond_correct_prob_per_token': 0.006900177332789822, 'uncond_correct_prob_per_char': 0.2110527465554516, 'uncond_total_prob': 3.5472899809202715e-05, 'norm_correct_prob': 0.22228688413913905, 'norm_correct_prob_per_token': 0.27109612029280983, 'norm_correct_prob_per_char': 0.24622578554314548, 'primary_metric': 0.23976608187134502} |
90M | FineWeb-Pro | mmlu_world_religions | 21,250 | small aux 2 | 5xC | 6,963,200,000 | 4,092,132,261,888,000,000 | {'predicted_index_raw': 1.4619883040935673, 'predicted_index_per_token': 1.5146198830409356, 'predicted_index_per_char': 1.590643274853801, 'predicted_index_per_byte': 1.5964912280701755, 'predicted_index_uncond': 1.5964912280701755, 'correct_choice': 1.3216374269005848, 'acc_raw': 0.21637426900584794, 'acc_per_token': 0.30994152046783624, 'acc_per_char': 0.25146198830409355, 'acc_per_byte': 0.25146198830409355, 'acc_uncond': 0.28654970760233917, 'no_answer': 0.0, 'sum_logits_corr': -14.876497522432205, 'logits_per_token_corr': -4.588303808089131, 'logits_per_char_corr': -1.2457043362044944, 'bits_per_byte_corr': 1.7940569209461075, 'correct_prob': 0.0011970191092043716, 'correct_prob_per_token': 0.037398669192669855, 'correct_prob_per_char': 0.3388141588764236, 'margin': -0.0018870031610266414, 'margin_per_token': -0.024068868232580583, 'margin_per_char': -0.07594722410585043, 'total_prob': 0.005429427574629705, 'total_prob_per_token': 0.13533915293726345, 'total_prob_per_char': 1.3334466811267582, 'uncond_correct_prob': 2.8317651887909463e-06, 'uncond_correct_prob_per_token': 0.006515671827565871, 'uncond_correct_prob_per_char': 0.2059297179572143, 'uncond_total_prob': 1.9547237485088545e-05, 'norm_correct_prob': 0.22204471985847246, 'norm_correct_prob_per_token': 0.2728395370063903, 'norm_correct_prob_per_char': 0.2513237557576629, 'primary_metric': 0.21637426900584794} |
90M | FineWeb-Pro | mmlu_world_religions | 21,250 | small aux 3 | 5xC | 6,963,200,000 | 4,092,132,261,888,000,000 | {'predicted_index_raw': 1.4444444444444444, 'predicted_index_per_token': 1.5263157894736843, 'predicted_index_per_char': 1.391812865497076, 'predicted_index_per_byte': 1.391812865497076, 'predicted_index_uncond': 1.5730994152046784, 'correct_choice': 1.3216374269005848, 'acc_raw': 0.2573099415204678, 'acc_per_token': 0.32748538011695905, 'acc_per_char': 0.26900584795321636, 'acc_per_byte': 0.26900584795321636, 'acc_uncond': 0.26900584795321636, 'no_answer': 0.0, 'sum_logits_corr': -14.42449913178271, 'logits_per_token_corr': -4.439711866551445, 'logits_per_char_corr': -1.2096252584077256, 'bits_per_byte_corr': 1.741747174853121, 'correct_prob': 0.002692397879232936, 'correct_prob_per_token': 0.041751206153258655, 'correct_prob_per_char': 0.34991263651997484, 'margin': -0.0011982807947383556, 'margin_per_token': -0.02388165456459053, 'margin_per_char': -0.0752078319800486, 'total_prob': 0.007704434054590076, 'total_prob_per_token': 0.1457338096810178, 'total_prob_per_char': 1.380048914002401, 'uncond_correct_prob': 1.1136444231112432e-05, 'uncond_correct_prob_per_token': 0.006378861558424298, 'uncond_correct_prob_per_char': 0.2093373835769738, 'uncond_total_prob': 5.6773590799802635e-05, 'norm_correct_prob': 0.24067661752911976, 'norm_correct_prob_per_token': 0.278102872988301, 'norm_correct_prob_per_char': 0.24983945851882855, 'primary_metric': 0.2573099415204678} |
90M | FineWeb-Pro | mmlu_world_religions | 21,250 | default | 5xC | 6,963,200,000 | 4,092,132,261,888,000,000 | {'predicted_index_raw': 1.3508771929824561, 'predicted_index_per_token': 1.4385964912280702, 'predicted_index_per_char': 1.4385964912280702, 'predicted_index_per_byte': 1.6198830409356726, 'predicted_index_uncond': 1.4210526315789473, 'correct_choice': 1.3216374269005848, 'acc_raw': 0.23976608187134502, 'acc_per_token': 0.3157894736842105, 'acc_per_char': 0.2807017543859649, 'acc_per_byte': 0.2573099415204678, 'acc_uncond': 0.2807017543859649, 'no_answer': 0.0, 'sum_logits_corr': -14.595210566158183, 'logits_per_token_corr': -4.491784118755873, 'logits_per_char_corr': -1.2198395428006594, 'logits_per_byte_corr': 1.756428174127175, 'correct_prob': 0.0014848843166815142, 'correct_prob_per_token': 0.039036861932325154, 'correct_prob_per_char': 0.3457467351202173, 'margin': -0.0019080464036353093, 'margin_per_token': -0.023192108780858485, 'margin_per_char': -0.07416080054437602, 'total_prob': 0.00558291748721822, 'total_prob_per_token': 0.13755841914239336, 'total_prob_per_char': 1.3646440137505271, 'uncond_correct_prob': 2.9601971163848123e-06, 'uncond_correct_prob_per_token': 0.006549010403065643, 'uncond_correct_prob_per_char': 0.20588917872369566, 'uncond_total_prob': 1.981947039782031e-05, 'norm_correct_prob': 0.2306978372938426, 'norm_correct_prob_per_token': 0.28031153316667207, 'norm_correct_prob_per_char': 0.25045695978218063, 'primary_metric': 0.23976608187134502} |
90M | FineWeb-Pro | mmlu_world_religions | 22,500 | small aux 2 | 5xC | 7,372,800,000 | 4,332,845,924,352,000,000 | {'predicted_index_raw': 1.3625730994152048, 'predicted_index_per_token': 1.4619883040935673, 'predicted_index_per_char': 1.5029239766081872, 'predicted_index_per_byte': 1.5029239766081872, 'predicted_index_uncond': 1.391812865497076, 'correct_choice': 1.3216374269005848, 'acc_raw': 0.25146198830409355, 'acc_per_token': 0.30409356725146197, 'acc_per_char': 0.2982456140350877, 'acc_per_byte': 0.2982456140350877, 'acc_uncond': 0.2982456140350877, 'no_answer': 0.0, 'sum_logits_corr': -14.421177020546986, 'logits_per_token_corr': -4.4281051820331845, 'logits_per_char_corr': -1.205368967778019, 'bits_per_byte_corr': 1.735892098024442, 'correct_prob': 0.0018443038243036818, 'correct_prob_per_token': 0.04249587093967253, 'correct_prob_per_char': 0.3517801791060605, 'margin': -0.0012238395275540612, 'margin_per_token': -0.020424425893102734, 'margin_per_char': -0.06877748584378857, 'total_prob': 0.006060187583512973, 'total_prob_per_token': 0.14466265414556445, 'total_prob_per_char': 1.3761256671083264, 'uncond_correct_prob': 3.2468326690070124e-06, 'uncond_correct_prob_per_token': 0.006801466230178979, 'uncond_correct_prob_per_char': 0.20926355127788063, 'uncond_total_prob': 2.4597071473509055e-05, 'norm_correct_prob': 0.23755499307770542, 'norm_correct_prob_per_token': 0.28023951726934077, 'norm_correct_prob_per_char': 0.2520151976136715, 'primary_metric': 0.25146198830409355} |
90M | FineWeb-Pro | mmlu_world_religions | 22,500 | small aux 3 | 5xC | 7,372,800,000 | 4,332,845,924,352,000,000 | {'predicted_index_raw': 1.3157894736842106, 'predicted_index_per_token': 1.4502923976608186, 'predicted_index_per_char': 1.391812865497076, 'predicted_index_per_byte': 1.391812865497076, 'predicted_index_uncond': 1.3742690058479532, 'correct_choice': 1.3216374269005848, 'acc_raw': 0.2573099415204678, 'acc_per_token': 0.30409356725146197, 'acc_per_char': 0.26900584795321636, 'acc_per_byte': 0.26900584795321636, 'acc_uncond': 0.30994152046783624, 'no_answer': 0.0, 'sum_logits_corr': -14.428421173179359, 'logits_per_token_corr': -4.439686270864364, 'logits_per_char_corr': -1.2124381567167666, 'bits_per_byte_corr': 1.7460976825301753, 'correct_prob': 0.0026708426487677883, 'correct_prob_per_token': 0.041648254163749766, 'correct_prob_per_char': 0.3502983314308839, 'margin': -0.001352143241693716, 'margin_per_token': -0.027313362536476756, 'margin_per_char': -0.07830590961570362, 'total_prob': 0.007838412344560713, 'total_prob_per_token': 0.15196862048844692, 'total_prob_per_char': 1.3875754124240223, 'uncond_correct_prob': 6.931742443626864e-06, 'uncond_correct_prob_per_token': 0.006486468012857501, 'uncond_correct_prob_per_char': 0.20893193298652646, 'uncond_total_prob': 3.839052048131762e-05, 'norm_correct_prob': 0.23799038196156413, 'norm_correct_prob_per_token': 0.2726607207782463, 'norm_correct_prob_per_char': 0.2489425503838679, 'primary_metric': 0.2573099415204678} |
90M | FineWeb-Pro | mmlu_world_religions | 22,500 | default | 5xC | 7,372,800,000 | 4,332,845,924,352,000,000 | {'predicted_index_raw': 1.409356725146199, 'predicted_index_per_token': 1.4678362573099415, 'predicted_index_per_char': 1.4736842105263157, 'predicted_index_per_byte': 1.5847953216374269, 'predicted_index_uncond': 1.4795321637426901, 'correct_choice': 1.3216374269005848, 'acc_raw': 0.2222222222222222, 'acc_per_token': 0.2982456140350877, 'acc_per_char': 0.2573099415204678, 'acc_per_byte': 0.2573099415204678, 'acc_uncond': 0.29239766081871343, 'no_answer': 0.0, 'sum_logits_corr': -14.798376360831902, 'logits_per_token_corr': -4.5679310456893765, 'logits_per_char_corr': -1.2361895353865402, 'logits_per_byte_corr': 1.7797241205313914, 'correct_prob': 0.0015759320228020837, 'correct_prob_per_token': 0.03967397142382932, 'correct_prob_per_char': 0.3435540782795732, 'margin': -0.0022254288863551327, 'margin_per_token': -0.027621767165924186, 'margin_per_char': -0.07930357805224986, 'total_prob': 0.006208415918168792, 'total_prob_per_token': 0.1455002623757665, 'total_prob_per_char': 1.3646357313049848, 'uncond_correct_prob': 3.225433733847093e-06, 'uncond_correct_prob_per_token': 0.006708690683928742, 'uncond_correct_prob_per_char': 0.20879302701794325, 'uncond_total_prob': 2.5710651966480636e-05, 'norm_correct_prob': 0.2263348443388079, 'norm_correct_prob_per_token': 0.2721908081097411, 'norm_correct_prob_per_char': 0.24753036034684278, 'primary_metric': 0.2222222222222222} |
90M | FineWeb-Pro | mmlu_world_religions | 23,750 | small aux 2 | 5xC | 7,782,400,000 | 4,573,559,586,816,000,000 | {'predicted_index_raw': 1.3859649122807018, 'predicted_index_per_token': 1.5146198830409356, 'predicted_index_per_char': 1.4795321637426901, 'predicted_index_per_byte': 1.4853801169590644, 'predicted_index_uncond': 1.4736842105263157, 'correct_choice': 1.3216374269005848, 'acc_raw': 0.23976608187134502, 'acc_per_token': 0.2982456140350877, 'acc_per_char': 0.30409356725146197, 'acc_per_byte': 0.30409356725146197, 'acc_uncond': 0.30409356725146197, 'no_answer': 0.0, 'sum_logits_corr': -14.526749270701268, 'logits_per_token_corr': -4.46179810545631, 'logits_per_char_corr': -1.210885211035201, 'bits_per_byte_corr': 1.7439010974289673, 'correct_prob': 0.0016209219749454748, 'correct_prob_per_token': 0.040728132527394906, 'correct_prob_per_char': 0.3486049516129197, 'margin': -0.0015796152573308367, 'margin_per_token': -0.024247291343274517, 'margin_per_char': -0.07363952668393829, 'total_prob': 0.0057997958245829, 'total_prob_per_token': 0.14374094775676594, 'total_prob_per_char': 1.373601861481254, 'uncond_correct_prob': 3.020377292251638e-06, 'uncond_correct_prob_per_token': 0.00727068811856818, 'uncond_correct_prob_per_char': 0.21030915034587183, 'uncond_total_prob': 1.997124796087392e-05, 'norm_correct_prob': 0.23287920729876832, 'norm_correct_prob_per_token': 0.2806871835828777, 'norm_correct_prob_per_char': 0.2507798829108989, 'primary_metric': 0.23976608187134502} |
90M | FineWeb-Pro | mmlu_world_religions | 23,750 | small aux 3 | 5xC | 7,782,400,000 | 4,573,559,586,816,000,000 | {'predicted_index_raw': 1.3742690058479532, 'predicted_index_per_token': 1.4795321637426901, 'predicted_index_per_char': 1.4210526315789473, 'predicted_index_per_byte': 1.4152046783625731, 'predicted_index_uncond': 1.4678362573099415, 'correct_choice': 1.3216374269005848, 'acc_raw': 0.23976608187134502, 'acc_per_token': 0.3333333333333333, 'acc_per_char': 0.2807017543859649, 'acc_per_byte': 0.2807017543859649, 'acc_uncond': 0.32748538011695905, 'no_answer': 0.0, 'sum_logits_corr': -14.160958449045816, 'logits_per_token_corr': -4.368328417561689, 'logits_per_char_corr': -1.1916040616074495, 'bits_per_byte_corr': 1.7160688750380073, 'correct_prob': 0.002978419344433886, 'correct_prob_per_token': 0.04234596746191551, 'correct_prob_per_char': 0.3548698529537911, 'margin': -0.001374965012128114, 'margin_per_token': -0.024751897107945557, 'margin_per_char': -0.0744146327345798, 'total_prob': 0.008484495700078443, 'total_prob_per_token': 0.15030364429357593, 'total_prob_per_char': 1.392967837107893, 'uncond_correct_prob': 8.533078716522163e-06, 'uncond_correct_prob_per_token': 0.007156301577027687, 'uncond_correct_prob_per_char': 0.21443016540876303, 'uncond_total_prob': 4.686277452671828e-05, 'norm_correct_prob': 0.24065650635469998, 'norm_correct_prob_per_token': 0.2792918179355125, 'norm_correct_prob_per_char': 0.2516306753576523, 'primary_metric': 0.23976608187134502} |
90M | FineWeb-Pro | mmlu_world_religions | 23,750 | default | 5xC | 7,782,400,000 | 4,573,559,586,816,000,000 | {'predicted_index_raw': 1.3567251461988303, 'predicted_index_per_token': 1.5087719298245614, 'predicted_index_per_char': 1.4970760233918128, 'predicted_index_per_byte': 1.5789473684210527, 'predicted_index_uncond': 1.5204678362573099, 'correct_choice': 1.3216374269005848, 'acc_raw': 0.26900584795321636, 'acc_per_token': 0.3216374269005848, 'acc_per_char': 0.30409356725146197, 'acc_per_byte': 0.24561403508771928, 'acc_uncond': 0.29239766081871343, 'no_answer': 0.0, 'sum_logits_corr': -14.20292996523673, 'logits_per_token_corr': -4.400784587526468, 'logits_per_char_corr': -1.1903444262574805, 'logits_per_byte_corr': 1.7141379175693328, 'correct_prob': 0.0014152873279751712, 'correct_prob_per_token': 0.04261763754019556, 'correct_prob_per_char': 0.35459160010335344, 'margin': -0.001932230497419324, 'margin_per_token': -0.02474781395075565, 'margin_per_char': -0.07293903016921857, 'total_prob': 0.00559558906151135, 'total_prob_per_token': 0.1495953755108946, 'total_prob_per_char': 1.3979946744510792, 'uncond_correct_prob': 4.275090603490834e-06, 'uncond_correct_prob_per_token': 0.007403450555412236, 'uncond_correct_prob_per_char': 0.21287932475927482, 'uncond_total_prob': 2.4740415433305734e-05, 'norm_correct_prob': 0.2394782283127633, 'norm_correct_prob_per_token': 0.28197306874962536, 'norm_correct_prob_per_char': 0.25034529947701584, 'primary_metric': 0.26900584795321636} |
90M | FineWeb-Pro | mmlu_world_religions | 25,000 | small aux 2 | 5xC | 8,192,000,000 | 4,814,273,249,280,000,000 | {'predicted_index_raw': 1.3099415204678362, 'predicted_index_per_token': 1.4970760233918128, 'predicted_index_per_char': 1.456140350877193, 'predicted_index_per_byte': 1.456140350877193, 'predicted_index_uncond': 1.4795321637426901, 'correct_choice': 1.3216374269005848, 'acc_raw': 0.2631578947368421, 'acc_per_token': 0.3333333333333333, 'acc_per_char': 0.30409356725146197, 'acc_per_byte': 0.30409356725146197, 'acc_uncond': 0.29239766081871343, 'no_answer': 0.0, 'sum_logits_corr': -14.408418835255137, 'logits_per_token_corr': -4.435934104237618, 'logits_per_char_corr': -1.1997872384907502, 'bits_per_byte_corr': 1.7278044505451946, 'correct_prob': 0.0018618372555459055, 'correct_prob_per_token': 0.043632699986176283, 'correct_prob_per_char': 0.35349396091172064, 'margin': -0.0017022341756884943, 'margin_per_token': -0.02728566116125379, 'margin_per_char': -0.07328665743701722, 'total_prob': 0.0067137664967070335, 'total_prob_per_token': 0.15536116510992043, 'total_prob_per_char': 1.3853862592259174, 'uncond_correct_prob': 2.611771697975886e-06, 'uncond_correct_prob_per_token': 0.007474884224382176, 'uncond_correct_prob_per_char': 0.21211259666571816, 'uncond_total_prob': 2.0224130726937042e-05, 'norm_correct_prob': 0.24145694080637878, 'norm_correct_prob_per_token': 0.28631477656436743, 'norm_correct_prob_per_char': 0.25283506741378586, 'primary_metric': 0.2631578947368421} |
90M | FineWeb-Pro | mmlu_world_religions | 25,000 | small aux 3 | 5xC | 8,192,000,000 | 4,814,273,249,280,000,000 | {'predicted_index_raw': 1.2690058479532165, 'predicted_index_per_token': 1.432748538011696, 'predicted_index_per_char': 1.432748538011696, 'predicted_index_per_byte': 1.432748538011696, 'predicted_index_uncond': 1.5029239766081872, 'correct_choice': 1.3216374269005848, 'acc_raw': 0.2631578947368421, 'acc_per_token': 0.32748538011695905, 'acc_per_char': 0.30409356725146197, 'acc_per_byte': 0.30409356725146197, 'acc_uncond': 0.3157894736842105, 'no_answer': 0.0, 'sum_logits_corr': -14.055883852361935, 'logits_per_token_corr': -4.316967692438305, 'logits_per_char_corr': -1.174618030442045, 'bits_per_byte_corr': 1.6916274296955602, 'correct_prob': 0.0030785314716664084, 'correct_prob_per_token': 0.04461331042875006, 'correct_prob_per_char': 0.3592971468404756, 'margin': -0.001037989371260754, 'margin_per_token': -0.024603795371106164, 'margin_per_char': -0.07327701927835997, 'total_prob': 0.00833197075032814, 'total_prob_per_token': 0.15568078676813396, 'total_prob_per_char': 1.4095587603521487, 'uncond_correct_prob': 7.208505503614085e-06, 'uncond_correct_prob_per_token': 0.007316806525870637, 'uncond_correct_prob_per_char': 0.2141512861089786, 'uncond_total_prob': 3.985997976073148e-05, 'norm_correct_prob': 0.24640976190975752, 'norm_correct_prob_per_token': 0.28027795853040416, 'norm_correct_prob_per_char': 0.251382911738386, 'primary_metric': 0.2631578947368421} |
90M | FineWeb-Pro | mmlu_world_religions | 25,000 | default | 5xC | 8,192,000,000 | 4,814,273,249,280,000,000 | {'predicted_index_raw': 1.3391812865497077, 'predicted_index_per_token': 1.4152046783625731, 'predicted_index_per_char': 1.4152046783625731, 'predicted_index_per_byte': 1.6198830409356726, 'predicted_index_uncond': 1.4678362573099415, 'correct_choice': 1.3216374269005848, 'acc_raw': 0.23976608187134502, 'acc_per_token': 0.3157894736842105, 'acc_per_char': 0.26900584795321636, 'acc_per_byte': 0.24561403508771928, 'acc_uncond': 0.27485380116959063, 'no_answer': 0.0, 'sum_logits_corr': -14.293224799005609, 'logits_per_token_corr': -4.396720952146747, 'logits_per_char_corr': -1.1953101070391485, 'logits_per_byte_corr': 1.721065330332123, 'correct_prob': 0.002154593110878567, 'correct_prob_per_token': 0.043319978520529934, 'correct_prob_per_char': 0.35499610105755375, 'margin': -0.002079046709632855, 'margin_per_token': -0.02671799245566066, 'margin_per_char': -0.0767475970089286, 'total_prob': 0.007585473471889371, 'total_prob_per_token': 0.15585284723746173, 'total_prob_per_char': 1.406346889596211, 'uncond_correct_prob': 5.523688071945983e-06, 'uncond_correct_prob_per_token': 0.007081974326732693, 'uncond_correct_prob_per_char': 0.20988739964089162, 'uncond_total_prob': 3.312515686825026e-05, 'norm_correct_prob': 0.23253685031489693, 'norm_correct_prob_per_token': 0.27725242765144276, 'norm_correct_prob_per_char': 0.2490693167211365, 'primary_metric': 0.23976608187134502} |
90M | FineWeb-Pro | mmlu_world_religions | 27,500 | small aux 2 | 5xC | 9,011,200,000 | 5,295,700,574,208,000,000 | {'predicted_index_raw': 1.3801169590643274, 'predicted_index_per_token': 1.5204678362573099, 'predicted_index_per_char': 1.4444444444444444, 'predicted_index_per_byte': 1.4444444444444444, 'predicted_index_uncond': 1.4619883040935673, 'correct_choice': 1.3216374269005848, 'acc_raw': 0.26900584795321636, 'acc_per_token': 0.34502923976608185, 'acc_per_char': 0.30994152046783624, 'acc_per_byte': 0.30994152046783624, 'acc_uncond': 0.2807017543859649, 'no_answer': 0.0, 'sum_logits_corr': -14.115238548022264, 'logits_per_token_corr': -4.3466038267356994, 'logits_per_char_corr': -1.1739855250237288, 'bits_per_byte_corr': 1.690685993377618, 'correct_prob': 0.0018494719259351698, 'correct_prob_per_token': 0.04583701662982223, 'correct_prob_per_char': 0.35976227366531405, 'margin': -0.0016166280952485512, 'margin_per_token': -0.023836683763912066, 'margin_per_char': -0.06890486869502974, 'total_prob': 0.0065379699096922185, 'total_prob_per_token': 0.1571783263546187, 'total_prob_per_char': 1.4019720077287647, 'uncond_correct_prob': 3.3192493473728104e-06, 'uncond_correct_prob_per_token': 0.007611682864946846, 'uncond_correct_prob_per_char': 0.21488009467148328, 'uncond_total_prob': 2.272526870814672e-05, 'norm_correct_prob': 0.24735319003894804, 'norm_correct_prob_per_token': 0.2890254950716765, 'norm_correct_prob_per_char': 0.25442035764831167, 'primary_metric': 0.26900584795321636} |
90M | FineWeb-Pro | mmlu_world_religions | 27,500 | small aux 3 | 5xC | 9,011,200,000 | 5,295,700,574,208,000,000 | {'predicted_index_raw': 1.4152046783625731, 'predicted_index_per_token': 1.4678362573099415, 'predicted_index_per_char': 1.432748538011696, 'predicted_index_per_byte': 1.432748538011696, 'predicted_index_uncond': 1.4444444444444444, 'correct_choice': 1.3216374269005848, 'acc_raw': 0.2573099415204678, 'acc_per_token': 0.30994152046783624, 'acc_per_char': 0.29239766081871343, 'acc_per_byte': 0.29239766081871343, 'acc_uncond': 0.3216374269005848, 'no_answer': 0.0, 'sum_logits_corr': -13.80126715194412, 'logits_per_token_corr': -4.235411428301921, 'logits_per_char_corr': -1.1548955348338517, 'bits_per_byte_corr': 1.6629773422610632, 'correct_prob': 0.0028673260213010667, 'correct_prob_per_token': 0.046060948737336245, 'correct_prob_per_char': 0.36589860212715963, 'margin': -0.0010434768391117378, 'margin_per_token': -0.025424503845182002, 'margin_per_char': -0.07119390826824575, 'total_prob': 0.008090903062155432, 'total_prob_per_token': 0.16109645878833834, 'total_prob_per_char': 1.4318767798507266, 'uncond_correct_prob': 5.251511183167919e-06, 'uncond_correct_prob_per_token': 0.00738389684126042, 'uncond_correct_prob_per_char': 0.21567027473770936, 'uncond_total_prob': 3.0611948863870094e-05, 'norm_correct_prob': 0.2467490699182351, 'norm_correct_prob_per_token': 0.2812456720410481, 'norm_correct_prob_per_char': 0.25215502814361956, 'primary_metric': 0.2573099415204678} |
90M | FineWeb-Pro | mmlu_world_religions | 27,500 | default | 5xC | 9,011,200,000 | 5,295,700,574,208,000,000 | {'predicted_index_raw': 1.3391812865497077, 'predicted_index_per_token': 1.456140350877193, 'predicted_index_per_char': 1.4970760233918128, 'predicted_index_per_byte': 1.6666666666666667, 'predicted_index_uncond': 1.5380116959064327, 'correct_choice': 1.3216374269005848, 'acc_raw': 0.24561403508771928, 'acc_per_token': 0.3157894736842105, 'acc_per_char': 0.2631578947368421, 'acc_per_byte': 0.22807017543859648, 'acc_uncond': 0.30994152046783624, 'no_answer': 0.0, 'sum_logits_corr': -13.915864750655771, 'logits_per_token_corr': -4.2791546916442815, 'logits_per_char_corr': -1.163228247700677, 'logits_per_byte_corr': 1.6748891112704114, 'correct_prob': 0.002643820624764585, 'correct_prob_per_token': 0.04711663719895042, 'correct_prob_per_char': 0.3645622584967814, 'margin': -0.0026392871901517326, 'margin_per_token': -0.027708449999780997, 'margin_per_char': -0.07684354438183873, 'total_prob': 0.00916644891597153, 'total_prob_per_token': 0.16592003643449016, 'total_prob_per_char': 1.4362094515013868, 'uncond_correct_prob': 5.379341731918951e-06, 'uncond_correct_prob_per_token': 0.007582353399612705, 'uncond_correct_prob_per_char': 0.2149619043645247, 'uncond_total_prob': 3.3730501960682415e-05, 'norm_correct_prob': 0.24002094465788357, 'norm_correct_prob_per_token': 0.2810597745798599, 'norm_correct_prob_per_char': 0.2508452354109235, 'primary_metric': 0.24561403508771928} |
90M | FineWeb-Pro | mmlu_world_religions | 28,750 | small aux 2 | 5xC | 9,420,800,000 | 5,536,414,236,672,000,000 | {'predicted_index_raw': 1.345029239766082, 'predicted_index_per_token': 1.4970760233918128, 'predicted_index_per_char': 1.4912280701754386, 'predicted_index_per_byte': 1.4912280701754386, 'predicted_index_uncond': 1.4619883040935673, 'correct_choice': 1.3216374269005848, 'acc_raw': 0.26900584795321636, 'acc_per_token': 0.34502923976608185, 'acc_per_char': 0.29239766081871343, 'acc_per_byte': 0.29239766081871343, 'acc_uncond': 0.2982456140350877, 'no_answer': 0.0, 'sum_logits_corr': -13.990795965083162, 'logits_per_token_corr': -4.305448539745765, 'logits_per_char_corr': -1.164200976628785, 'bits_per_byte_corr': 1.6765740006205083, 'correct_prob': 0.0019773759116725573, 'correct_prob_per_token': 0.04765901502828128, 'correct_prob_per_char': 0.36345973209476196, 'margin': -0.0016565444332210437, 'margin_per_token': -0.024100939360946858, 'margin_per_char': -0.0691086112347125, 'total_prob': 0.0069398073122972845, 'total_prob_per_token': 0.1627626201798855, 'total_prob_per_char': 1.416585729413399, 'uncond_correct_prob': 2.819856510722099e-06, 'uncond_correct_prob_per_token': 0.007829644746003485, 'uncond_correct_prob_per_char': 0.21623461434302196, 'uncond_total_prob': 2.2086897484940056e-05, 'norm_correct_prob': 0.24799355198281664, 'norm_correct_prob_per_token': 0.28699140263122475, 'norm_correct_prob_per_char': 0.25367983249263715, 'primary_metric': 0.26900584795321636} |
90M | FineWeb-Pro | mmlu_world_religions | 28,750 | small aux 3 | 5xC | 9,420,800,000 | 5,536,414,236,672,000,000 | {'predicted_index_raw': 1.3333333333333333, 'predicted_index_per_token': 1.4912280701754386, 'predicted_index_per_char': 1.3859649122807018, 'predicted_index_per_byte': 1.3859649122807018, 'predicted_index_uncond': 1.4385964912280702, 'correct_choice': 1.3216374269005848, 'acc_raw': 0.2631578947368421, 'acc_per_token': 0.3157894736842105, 'acc_per_char': 0.28654970760233917, 'acc_per_byte': 0.28654970760233917, 'acc_uncond': 0.2982456140350877, 'no_answer': 0.0, 'sum_logits_corr': -13.785472927037736, 'logits_per_token_corr': -4.229182601706431, 'logits_per_char_corr': -1.1536251378800797, 'bits_per_byte_corr': 1.6611491056098315, 'correct_prob': 0.0030169919020990557, 'correct_prob_per_token': 0.04638408738771343, 'correct_prob_per_char': 0.3663013385511082, 'margin': -0.0012785186267183725, 'margin_per_token': -0.027090369712684956, 'margin_per_char': -0.07310412981879967, 'total_prob': 0.008587180296623491, 'total_prob_per_token': 0.16394124282047748, 'total_prob_per_char': 1.4355298101470213, 'uncond_correct_prob': 4.9416140715705565e-06, 'uncond_correct_prob_per_token': 0.007575007568058831, 'uncond_correct_prob_per_char': 0.21699347539359756, 'uncond_total_prob': 3.0624881570731254e-05, 'norm_correct_prob': 0.24770006527104266, 'norm_correct_prob_per_token': 0.28141457769614703, 'norm_correct_prob_per_char': 0.2518144016247412, 'primary_metric': 0.2631578947368421} |
90M | FineWeb-Pro | mmlu_world_religions | 28,750 | default | 5xC | 9,420,800,000 | 5,536,414,236,672,000,000 | {'predicted_index_raw': 1.3508771929824561, 'predicted_index_per_token': 1.4269005847953216, 'predicted_index_per_char': 1.4678362573099415, 'predicted_index_per_byte': 1.5964912280701755, 'predicted_index_uncond': 1.4502923976608186, 'correct_choice': 1.3216374269005848, 'acc_raw': 0.23976608187134502, 'acc_per_token': 0.3157894736842105, 'acc_per_char': 0.2807017543859649, 'acc_per_byte': 0.25146198830409355, 'acc_uncond': 0.3157894736842105, 'no_answer': 0.0, 'sum_logits_corr': -13.828651044800965, 'logits_per_token_corr': -4.254742621283774, 'logits_per_char_corr': -1.1564067887757692, 'logits_per_byte_corr': 1.6651171019639306, 'correct_prob': 0.00249512418506234, 'correct_prob_per_token': 0.04844063081976474, 'correct_prob_per_char': 0.3671402447886115, 'margin': -0.002266324841499922, 'margin_per_token': -0.02675002180112167, 'margin_per_char': -0.07404530571793064, 'total_prob': 0.008546105658464687, 'total_prob_per_token': 0.16840342284320636, 'total_prob_per_char': 1.4445433632860083, 'uncond_correct_prob': 4.27862336844089e-06, 'uncond_correct_prob_per_token': 0.0075977688841449564, 'uncond_correct_prob_per_char': 0.21371482094099167, 'uncond_total_prob': 2.74870578796796e-05, 'norm_correct_prob': 0.24182772036737815, 'norm_correct_prob_per_token': 0.2815162131824431, 'norm_correct_prob_per_char': 0.2508922714010882, 'primary_metric': 0.23976608187134502} |
90M | FineWeb-Pro | mmlu_world_religions | 29,901 | small aux 2 | 5xC | 9,797,959,680 | 5,758,063,377,068,851,000 | {'predicted_index_raw': 1.3333333333333333, 'predicted_index_per_token': 1.4970760233918128, 'predicted_index_per_char': 1.3742690058479532, 'predicted_index_per_byte': 1.3742690058479532, 'predicted_index_uncond': 1.4269005847953216, 'correct_choice': 1.3216374269005848, 'acc_raw': 0.27485380116959063, 'acc_per_token': 0.3391812865497076, 'acc_per_char': 0.28654970760233917, 'acc_per_byte': 0.28654970760233917, 'acc_uncond': 0.28654970760233917, 'no_answer': 0.0, 'sum_logits_corr': -13.97867993165178, 'logits_per_token_corr': -4.296596700280949, 'logits_per_char_corr': -1.1629692229320774, 'bits_per_byte_corr': 1.6748304555219673, 'correct_prob': 0.002040134994132104, 'correct_prob_per_token': 0.047118243175431354, 'correct_prob_per_char': 0.3635561234458885, 'margin': -0.0016807381563587963, 'margin_per_token': -0.024875410882859637, 'margin_per_char': -0.06997232228029861, 'total_prob': 0.007086668681894939, 'total_prob_per_token': 0.16301997404656976, 'total_prob_per_char': 1.4191909109689718, 'uncond_correct_prob': 3.0412317133273337e-06, 'uncond_correct_prob_per_token': 0.007958692570680066, 'uncond_correct_prob_per_char': 0.21685684517521867, 'uncond_total_prob': 2.2568324339522333e-05, 'norm_correct_prob': 0.24437093730086565, 'norm_correct_prob_per_token': 0.2856941042899367, 'norm_correct_prob_per_char': 0.25354911688576026, 'primary_metric': 0.27485380116959063} |
90M | FineWeb-Pro | mmlu_world_religions | 29,901 | small aux 3 | 5xC | 9,797,959,680 | 5,758,063,377,068,851,000 | {'predicted_index_raw': 1.3859649122807018, 'predicted_index_per_token': 1.5029239766081872, 'predicted_index_per_char': 1.4269005847953216, 'predicted_index_per_byte': 1.4210526315789473, 'predicted_index_uncond': 1.5029239766081872, 'correct_choice': 1.3216374269005848, 'acc_raw': 0.25146198830409355, 'acc_per_token': 0.30409356725146197, 'acc_per_char': 0.30409356725146197, 'acc_per_byte': 0.30409356725146197, 'acc_uncond': 0.26900584795321636, 'no_answer': 0.0, 'sum_logits_corr': -13.659256229623717, 'logits_per_token_corr': -4.193699571368217, 'logits_per_char_corr': -1.144265454862218, 'bits_per_byte_corr': 1.647703231808063, 'correct_prob': 0.002962764836280774, 'correct_prob_per_token': 0.047493197660596335, 'correct_prob_per_char': 0.36951103932934826, 'margin': -0.0012047015187487465, 'margin_per_token': -0.026618958002482027, 'margin_per_char': -0.07128178137090323, 'total_prob': 0.008448322524545512, 'total_prob_per_token': 0.16679491013337835, 'total_prob_per_char': 1.4455506825338729, 'uncond_correct_prob': 5.311821943843723e-06, 'uncond_correct_prob_per_token': 0.0075981552247081675, 'uncond_correct_prob_per_char': 0.21684873594296025, 'uncond_total_prob': 3.036191841293459e-05, 'norm_correct_prob': 0.24807176759576988, 'norm_correct_prob_per_token': 0.28150393752470293, 'norm_correct_prob_per_char': 0.25213460138808247, 'primary_metric': 0.25146198830409355} |
90M | FineWeb-Pro | mmlu_world_religions | 29,901 | default | 5xC | 9,797,959,680 | 5,758,063,377,068,851,000 | {'predicted_index_raw': 1.3333333333333333, 'predicted_index_per_token': 1.4385964912280702, 'predicted_index_per_char': 1.4210526315789473, 'predicted_index_per_byte': 1.6374269005847952, 'predicted_index_uncond': 1.5263157894736843, 'correct_choice': 1.3216374269005848, 'acc_raw': 0.23391812865497075, 'acc_per_token': 0.2982456140350877, 'acc_per_char': 0.28654970760233917, 'acc_per_byte': 0.23391812865497075, 'acc_uncond': 0.29239766081871343, 'no_answer': 0.0, 'sum_logits_corr': -13.861789043883832, 'logits_per_token_corr': -4.260177755037301, 'logits_per_char_corr': -1.1591228425208662, 'logits_per_byte_corr': 1.6689528451710363, 'correct_prob': 0.002536895906402849, 'correct_prob_per_token': 0.04822995423659005, 'correct_prob_per_char': 0.36670351673112994, 'margin': -0.002373782740330825, 'margin_per_token': -0.026943916128182657, 'margin_per_char': -0.07474052532028477, 'total_prob': 0.008742518856622167, 'total_prob_per_token': 0.16820015151805082, 'total_prob_per_char': 1.4438118169852785, 'uncond_correct_prob': 4.660888125867902e-06, 'uncond_correct_prob_per_token': 0.00743769678998727, 'uncond_correct_prob_per_char': 0.2124089503028578, 'uncond_total_prob': 2.7853329909113434e-05, 'norm_correct_prob': 0.24143381221721238, 'norm_correct_prob_per_token': 0.28114425532399745, 'norm_correct_prob_per_char': 0.2505549985223396, 'primary_metric': 0.23391812865497075} |
90M | FineWeb-Pro | openbookqa | 0 | small aux 2 | 5xC | 0 | 0 | {'predicted_index_raw': 1.584, 'predicted_index_per_token': 1.5, 'predicted_index_per_char': 1.564, 'predicted_index_per_byte': 1.564, 'predicted_index_uncond': 1.506, 'correct_choice': 1.404, 'acc_raw': 0.156, 'acc_per_token': 0.25, 'acc_per_char': 0.276, 'acc_per_byte': 0.276, 'acc_uncond': 0.254, 'no_answer': 0.0, 'sum_logits_corr': -41.585481206893924, 'logits_per_token_corr': -10.923995176174671, 'logits_per_char_corr': -2.229170820859721, 'bits_per_byte_corr': 3.2160136885510098, 'correct_prob': 4.122442427439258e-06, 'correct_prob_per_token': 1.8985604200482566e-05, 'correct_prob_per_char': 0.14066834176589355, 'margin': -5.442998434639735e-06, 'margin_per_token': -5.117543966870787e-06, 'margin_per_char': -0.04919778243535624, 'total_prob': 2.060834545107081e-05, 'total_prob_per_token': 7.667203838845801e-05, 'total_prob_per_char': 0.5556767423404071, 'uncond_correct_prob': 3.983596149955655e-06, 'uncond_correct_prob_per_token': 1.8845295373734396e-05, 'uncond_correct_prob_per_char': 0.14065476477862832, 'uncond_total_prob': 1.9708652324325048e-05, 'norm_correct_prob': 0.15892884271873836, 'norm_correct_prob_per_token': 0.2478929790192008, 'norm_correct_prob_per_char': 0.24736542788583524, 'primary_metric': 0.254} |
90M | FineWeb-Pro | openbookqa | 0 | small aux 3 | 5xC | 0 | 0 | {'predicted_index_raw': 1.502, 'predicted_index_per_token': 1.422, 'predicted_index_per_char': 1.5, 'predicted_index_per_byte': 1.5, 'predicted_index_uncond': 1.378, 'correct_choice': 1.404, 'acc_raw': 0.144, 'acc_per_token': 0.238, 'acc_per_char': 0.286, 'acc_per_byte': 0.286, 'acc_uncond': 0.248, 'no_answer': 0.0, 'sum_logits_corr': -41.62327026939392, 'logits_per_token_corr': -10.9319485480692, 'logits_per_char_corr': -2.2326367276894854, 'bits_per_byte_corr': 3.2210139351464977, 'correct_prob': 3.898622606172354e-06, 'correct_prob_per_token': 1.8615070823933117e-05, 'correct_prob_per_char': 0.14050487593387487, 'margin': -5.323173242297608e-06, 'margin_per_token': -4.841747070559768e-06, 'margin_per_char': -0.04836682239972026, 'total_prob': 2.0112198865996224e-05, 'total_prob_per_token': 7.510479773564663e-05, 'total_prob_per_char': 0.5539455355572289, 'uncond_correct_prob': 4.060876680705528e-06, 'uncond_correct_prob_per_token': 1.7914369774353913e-05, 'uncond_correct_prob_per_char': 0.1394386358379211, 'uncond_total_prob': 1.919646772620334e-05, 'norm_correct_prob': 0.1618036543842497, 'norm_correct_prob_per_token': 0.2486906618288445, 'norm_correct_prob_per_char': 0.2478850870763878, 'primary_metric': 0.248} |
90M | FineWeb-Pro | openbookqa | 0 | default | 5xC | 0 | 0 | {'predicted_index_raw': 1.604, 'predicted_index_per_token': 1.522, 'predicted_index_per_char': 1.55, 'predicted_index_per_byte': 1.474, 'predicted_index_uncond': 1.516, 'correct_choice': 1.404, 'acc_raw': 0.188, 'acc_per_token': 0.252, 'acc_per_char': 0.28, 'acc_per_byte': 0.27, 'acc_uncond': 0.25, 'no_answer': 0.0, 'sum_logits_corr': -41.64579520606995, 'logits_per_token_corr': -10.935844894821201, 'logits_per_char_corr': -2.2302226565387135, 'logits_per_byte_corr': 3.2175311666689237, 'correct_prob': 4.0669373811540846e-06, 'correct_prob_per_token': 1.870535679338645e-05, 'correct_prob_per_char': 0.14021184873574116, 'margin': -5.058141811893041e-06, 'margin_per_token': -4.775944670431008e-06, 'margin_per_char': -0.04871719732432419, 'total_prob': 2.0434138767060828e-05, 'total_prob_per_token': 7.555198690587703e-05, 'total_prob_per_char': 0.5541185474650129, 'uncond_correct_prob': 4.168154425475185e-06, 'uncond_correct_prob_per_token': 1.8705796556693423e-05, 'uncond_correct_prob_per_char': 0.14035245953833062, 'uncond_total_prob': 2.049647100815e-05, 'norm_correct_prob': 0.16691451731940143, 'norm_correct_prob_per_token': 0.24813665325322762, 'norm_correct_prob_per_char': 0.24775714176440722, 'primary_metric': 0.25} |
90M | FineWeb-Pro | openbookqa | 1,250 | small aux 2 | 5xC | 409,600,000 | 240,713,662,464,000,000 | {'predicted_index_raw': 1.502, 'predicted_index_per_token': 1.448, 'predicted_index_per_char': 1.568, 'predicted_index_per_byte': 1.568, 'predicted_index_uncond': 1.46, 'correct_choice': 1.404, 'acc_raw': 0.122, 'acc_per_token': 0.21, 'acc_per_char': 0.224, 'acc_per_byte': 0.224, 'acc_uncond': 0.232, 'no_answer': 0.0, 'sum_logits_corr': -22.55113637161255, 'logits_per_token_corr': -6.852262725492499, 'logits_per_char_corr': -1.3200856134178327, 'bits_per_byte_corr': 1.9044809680281407, 'correct_prob': 0.00015158967841115623, 'correct_prob_per_token': 0.005164345573444461, 'correct_prob_per_char': 0.29681591549175185, 'margin': -0.00046476088706693617, 'margin_per_token': -0.00736763735712847, 'margin_per_char': -0.08318089039370391, 'total_prob': 0.0008516127444237599, 'total_prob_per_token': 0.024719878273905965, 'total_prob_per_char': 1.2103434784273763, 'uncond_correct_prob': 9.852848979196431e-06, 'uncond_correct_prob_per_token': 0.0017911825336414222, 'uncond_correct_prob_per_char': 0.22897379286557154, 'uncond_total_prob': 2.898229078958896e-05, 'norm_correct_prob': 0.12747740763985227, 'norm_correct_prob_per_token': 0.23221274904596784, 'norm_correct_prob_per_char': 0.24434768900707113, 'primary_metric': 0.232} |
90M | FineWeb-Pro | openbookqa | 1,250 | small aux 3 | 5xC | 409,600,000 | 240,713,662,464,000,000 | {'predicted_index_raw': 1.544, 'predicted_index_per_token': 1.498, 'predicted_index_per_char': 1.528, 'predicted_index_per_byte': 1.528, 'predicted_index_uncond': 1.528, 'correct_choice': 1.404, 'acc_raw': 0.116, 'acc_per_token': 0.202, 'acc_per_char': 0.218, 'acc_per_byte': 0.218, 'acc_uncond': 0.264, 'no_answer': 0.0, 'sum_logits_corr': -22.446445271492003, 'logits_per_token_corr': -6.778267162487493, 'logits_per_char_corr': -1.306326897338522, 'bits_per_byte_corr': 1.8846313365715055, 'correct_prob': 0.00010770428847577915, 'correct_prob_per_token': 0.0054383467719437955, 'correct_prob_per_char': 0.2996849750454324, 'margin': -0.0004534961426371281, 'margin_per_token': -0.007380636019747131, 'margin_per_char': -0.0805437889345748, 'total_prob': 0.0007943663521774059, 'total_prob_per_token': 0.025415808582304772, 'total_prob_per_char': 1.2240938004776563, 'uncond_correct_prob': 7.251477592288811e-06, 'uncond_correct_prob_per_token': 0.0016584281473007352, 'uncond_correct_prob_per_char': 0.2339097093650734, 'uncond_total_prob': 2.8507447021958556e-05, 'norm_correct_prob': 0.12859182053343676, 'norm_correct_prob_per_token': 0.23452766141636439, 'norm_correct_prob_per_char': 0.24446780333058404, 'primary_metric': 0.264} |
90M | FineWeb-Pro | openbookqa | 1,250 | default | 5xC | 409,600,000 | 240,713,662,464,000,000 | {'predicted_index_raw': 1.56, 'predicted_index_per_token': 1.522, 'predicted_index_per_char': 1.548, 'predicted_index_per_byte': 1.512, 'predicted_index_uncond': 1.464, 'correct_choice': 1.404, 'acc_raw': 0.12, 'acc_per_token': 0.22, 'acc_per_char': 0.226, 'acc_per_byte': 0.258, 'acc_uncond': 0.274, 'no_answer': 0.0, 'sum_logits_corr': -22.919330923080445, 'logits_per_token_corr': -6.916875274947478, 'logits_per_char_corr': -1.3358646054403585, 'logits_per_byte_corr': 1.9272452415692818, 'correct_prob': 0.0001340609057920537, 'correct_prob_per_token': 0.004916366410328838, 'correct_prob_per_char': 0.2929764200670471, 'margin': -0.0004238499688653331, 'margin_per_token': -0.006506645152835425, 'margin_per_char': -0.0797954806111405, 'total_prob': 0.0007656488140085103, 'total_prob_per_token': 0.02272506914196771, 'total_prob_per_char': 1.1889033709524164, 'uncond_correct_prob': 4.936049622264514e-06, 'uncond_correct_prob_per_token': 0.0016947314489274851, 'uncond_correct_prob_per_char': 0.22391511824502106, 'uncond_total_prob': 2.0389349423330013e-05, 'norm_correct_prob': 0.13185796219014748, 'norm_correct_prob_per_token': 0.2409913496597902, 'norm_correct_prob_per_char': 0.24550120416282947, 'primary_metric': 0.274} |
90M | FineWeb-Pro | openbookqa | 2,500 | small aux 2 | 5xC | 819,200,000 | 481,427,324,928,000,000 | {'predicted_index_raw': 1.542, 'predicted_index_per_token': 1.466, 'predicted_index_per_char': 1.482, 'predicted_index_per_byte': 1.482, 'predicted_index_uncond': 1.442, 'correct_choice': 1.404, 'acc_raw': 0.12, 'acc_per_token': 0.218, 'acc_per_char': 0.218, 'acc_per_byte': 0.218, 'acc_uncond': 0.266, 'no_answer': 0.0, 'sum_logits_corr': -21.815967455387117, 'logits_per_token_corr': -6.661819671098303, 'logits_per_char_corr': -1.283158448109847, 'bits_per_byte_corr': 1.8512063297641843, 'correct_prob': 0.00020613404962418715, 'correct_prob_per_token': 0.006127133816308538, 'correct_prob_per_char': 0.3080377207544481, 'margin': -0.0006081526016702465, 'margin_per_token': -0.00817460899426739, 'margin_per_char': -0.08341021934819678, 'total_prob': 0.0011575364927995783, 'total_prob_per_token': 0.028801046191549533, 'total_prob_per_char': 1.2502696223268013, 'uncond_correct_prob': 7.90638421348833e-06, 'uncond_correct_prob_per_token': 0.0023893521619026093, 'uncond_correct_prob_per_char': 0.24628605919520447, 'uncond_total_prob': 3.0375037923637175e-05, 'norm_correct_prob': 0.13266843533847741, 'norm_correct_prob_per_token': 0.2348547527628754, 'norm_correct_prob_per_char': 0.24530773532248099, 'primary_metric': 0.266} |
90M | FineWeb-Pro | openbookqa | 2,500 | small aux 3 | 5xC | 819,200,000 | 481,427,324,928,000,000 | {'predicted_index_raw': 1.508, 'predicted_index_per_token': 1.472, 'predicted_index_per_char': 1.552, 'predicted_index_per_byte': 1.552, 'predicted_index_uncond': 1.432, 'correct_choice': 1.404, 'acc_raw': 0.102, 'acc_per_token': 0.19, 'acc_per_char': 0.22, 'acc_per_byte': 0.22, 'acc_uncond': 0.256, 'no_answer': 0.0, 'sum_logits_corr': -21.836268607139587, 'logits_per_token_corr': -6.66460556194444, 'logits_per_char_corr': -1.2796010320518867, 'bits_per_byte_corr': 1.8460740632589825, 'correct_prob': 0.00014863969956808404, 'correct_prob_per_token': 0.006307929117461782, 'correct_prob_per_char': 0.3075859291322966, 'margin': -0.0008801382983146829, 'margin_per_token': -0.009017362535975338, 'margin_per_char': -0.08826257479245463, 'total_prob': 0.001331783298802681, 'total_prob_per_token': 0.03013417165490964, 'total_prob_per_char': 1.262095252309855, 'uncond_correct_prob': 7.100386056247155e-06, 'uncond_correct_prob_per_token': 0.002236683189873287, 'uncond_correct_prob_per_char': 0.2449953835315943, 'uncond_total_prob': 2.9249691351837392e-05, 'norm_correct_prob': 0.12339876132010116, 'norm_correct_prob_per_token': 0.23196405508294207, 'norm_correct_prob_per_char': 0.24383124950755444, 'primary_metric': 0.256} |
90M | FineWeb-Pro | openbookqa | 2,500 | default | 5xC | 819,200,000 | 481,427,324,928,000,000 | {'predicted_index_raw': 1.508, 'predicted_index_per_token': 1.45, 'predicted_index_per_char': 1.516, 'predicted_index_per_byte': 1.546, 'predicted_index_uncond': 1.418, 'correct_choice': 1.404, 'acc_raw': 0.138, 'acc_per_token': 0.224, 'acc_per_char': 0.242, 'acc_per_byte': 0.282, 'acc_uncond': 0.28, 'no_answer': 0.0, 'sum_logits_corr': -21.399845056533813, 'logits_per_token_corr': -6.448103928539726, 'logits_per_char_corr': -1.247687871290966, 'logits_per_byte_corr': 1.8000331044900773, 'correct_prob': 0.00030002676546722173, 'correct_prob_per_token': 0.007103792249726698, 'correct_prob_per_char': 0.31884494756546367, 'margin': -0.0005564015829214959, 'margin_per_token': -0.007633626530070234, 'margin_per_char': -0.07943392833049419, 'total_prob': 0.0013894554866573131, 'total_prob_per_token': 0.030573578969451686, 'total_prob_per_char': 1.2887938123668181, 'uncond_correct_prob': 6.455775325522047e-06, 'uncond_correct_prob_per_token': 0.0021969867843945673, 'uncond_correct_prob_per_char': 0.241066210730311, 'uncond_total_prob': 2.910950503066054e-05, 'norm_correct_prob': 0.14315297651818287, 'norm_correct_prob_per_token': 0.24018765641719494, 'norm_correct_prob_per_char': 0.24614584307720758, 'primary_metric': 0.28} |
90M | FineWeb-Pro | openbookqa | 3,750 | small aux 2 | 5xC | 1,228,800,000 | 722,140,987,392,000,000 | {'predicted_index_raw': 1.534, 'predicted_index_per_token': 1.544, 'predicted_index_per_char': 1.554, 'predicted_index_per_byte': 1.554, 'predicted_index_uncond': 1.514, 'correct_choice': 1.404, 'acc_raw': 0.122, 'acc_per_token': 0.23, 'acc_per_char': 0.216, 'acc_per_byte': 0.216, 'acc_uncond': 0.28, 'no_answer': 0.0, 'sum_logits_corr': -22.25384042453766, 'logits_per_token_corr': -6.939440874489723, 'logits_per_char_corr': -1.3335533323374815, 'bits_per_byte_corr': 1.9239107793256185, 'correct_prob': 6.839982293591653e-05, 'correct_prob_per_token': 0.005644539264879901, 'correct_prob_per_char': 0.29631485151707476, 'margin': -0.00016573449215324135, 'margin_per_token': -0.006860058517858496, 'margin_per_char': -0.07840091133107889, 'total_prob': 0.0003635574307108394, 'total_prob_per_token': 0.02551752238883247, 'total_prob_per_char': 1.2000963103018016, 'uncond_correct_prob': 4.339389976513786e-06, 'uncond_correct_prob_per_token': 0.0018135545414746065, 'uncond_correct_prob_per_char': 0.22448390928480574, 'uncond_total_prob': 1.6181758984563678e-05, 'norm_correct_prob': 0.1287669788603936, 'norm_correct_prob_per_token': 0.23641096296850625, 'norm_correct_prob_per_char': 0.24595910133362747, 'primary_metric': 0.28} |
90M | FineWeb-Pro | openbookqa | 3,750 | small aux 3 | 5xC | 1,228,800,000 | 722,140,987,392,000,000 | {'predicted_index_raw': 1.52, 'predicted_index_per_token': 1.5, 'predicted_index_per_char': 1.504, 'predicted_index_per_byte': 1.504, 'predicted_index_uncond': 1.486, 'correct_choice': 1.404, 'acc_raw': 0.116, 'acc_per_token': 0.212, 'acc_per_char': 0.228, 'acc_per_byte': 0.228, 'acc_uncond': 0.264, 'no_answer': 0.0, 'sum_logits_corr': -21.256048666000368, 'logits_per_token_corr': -6.466080228582406, 'logits_per_char_corr': -1.240986461072744, 'bits_per_byte_corr': 1.7903650132012783, 'correct_prob': 0.00020390633728004375, 'correct_prob_per_token': 0.007336181525108797, 'correct_prob_per_char': 0.3180462599284427, 'margin': -0.001129736475522297, 'margin_per_token': -0.0096294019516033, 'margin_per_char': -0.08867486497425645, 'total_prob': 0.001726279551970651, 'total_prob_per_token': 0.034090336438426606, 'total_prob_per_char': 1.301342218822794, 'uncond_correct_prob': 5.6513375700686005e-06, 'uncond_correct_prob_per_token': 0.002213388891919561, 'uncond_correct_prob_per_char': 0.24039541977551157, 'uncond_total_prob': 2.1894700766173213e-05, 'norm_correct_prob': 0.12407792290556867, 'norm_correct_prob_per_token': 0.23098072370653405, 'norm_correct_prob_per_char': 0.24418045311496284, 'primary_metric': 0.264} |
90M | FineWeb-Pro | openbookqa | 3,750 | default | 5xC | 1,228,800,000 | 722,140,987,392,000,000 | {'predicted_index_raw': 1.522, 'predicted_index_per_token': 1.46, 'predicted_index_per_char': 1.496, 'predicted_index_per_byte': 1.564, 'predicted_index_uncond': 1.494, 'correct_choice': 1.404, 'acc_raw': 0.124, 'acc_per_token': 0.222, 'acc_per_char': 0.214, 'acc_per_byte': 0.278, 'acc_uncond': 0.274, 'no_answer': 0.0, 'sum_logits_corr': -21.02560558605194, 'logits_per_token_corr': -6.3412360069146505, 'logits_per_char_corr': -1.222573073430942, 'logits_per_byte_corr': 1.7638001101644654, 'correct_prob': 0.0002591248083030571, 'correct_prob_per_token': 0.007390804035692896, 'correct_prob_per_char': 0.3235868452948291, 'margin': -0.000980512091602433, 'margin_per_token': -0.008752606484117907, 'margin_per_char': -0.08390350722510517, 'total_prob': 0.001757934995684827, 'total_prob_per_token': 0.03285884901145718, 'total_prob_per_char': 1.3112633532280327, 'uncond_correct_prob': 4.952110294473205e-06, 'uncond_correct_prob_per_token': 0.0020442175616577902, 'uncond_correct_prob_per_char': 0.23384251558958727, 'uncond_total_prob': 2.033145351838058e-05, 'norm_correct_prob': 0.13027891030608854, 'norm_correct_prob_per_token': 0.24257972629270225, 'norm_correct_prob_per_char': 0.24648243638688466, 'primary_metric': 0.274} |
90M | FineWeb-Pro | openbookqa | 6,250 | small aux 2 | 5xC | 2,048,000,000 | 1,203,568,312,320,000,000 | {'predicted_index_raw': 1.502, 'predicted_index_per_token': 1.462, 'predicted_index_per_char': 1.552, 'predicted_index_per_byte': 1.552, 'predicted_index_uncond': 1.466, 'correct_choice': 1.404, 'acc_raw': 0.142, 'acc_per_token': 0.236, 'acc_per_char': 0.238, 'acc_per_byte': 0.238, 'acc_uncond': 0.276, 'no_answer': 0.0, 'sum_logits_corr': -20.96873089981079, 'logits_per_token_corr': -6.363607889075388, 'logits_per_char_corr': -1.224658332414623, 'bits_per_byte_corr': 1.7668085029591931, 'correct_prob': 0.00026804902426541565, 'correct_prob_per_token': 0.008100995942672487, 'correct_prob_per_char': 0.3236379842996321, 'margin': -0.0007115107326224606, 'margin_per_token': -0.008700012182098853, 'margin_per_char': -0.07946280282045745, 'total_prob': 0.0014497322768389196, 'total_prob_per_token': 0.034841905488341166, 'total_prob_per_char': 1.3084534004253365, 'uncond_correct_prob': 4.685601350066101e-06, 'uncond_correct_prob_per_token': 0.002226097135612149, 'uncond_correct_prob_per_char': 0.23134412084594685, 'uncond_total_prob': 2.0626286293402514e-05, 'norm_correct_prob': 0.14496683970736696, 'norm_correct_prob_per_token': 0.24376818137819858, 'norm_correct_prob_per_char': 0.2464301034211814, 'primary_metric': 0.276} |
90M | FineWeb-Pro | openbookqa | 6,250 | small aux 3 | 5xC | 2,048,000,000 | 1,203,568,312,320,000,000 | {'predicted_index_raw': 1.518, 'predicted_index_per_token': 1.524, 'predicted_index_per_char': 1.544, 'predicted_index_per_byte': 1.544, 'predicted_index_uncond': 1.474, 'correct_choice': 1.404, 'acc_raw': 0.122, 'acc_per_token': 0.236, 'acc_per_char': 0.228, 'acc_per_byte': 0.228, 'acc_uncond': 0.284, 'no_answer': 0.0, 'sum_logits_corr': -20.89834846496582, 'logits_per_token_corr': -6.410600215111143, 'logits_per_char_corr': -1.2331758733349985, 'bits_per_byte_corr': 1.779096717005597, 'correct_prob': 0.0002004270970187676, 'correct_prob_per_token': 0.0087501145710138, 'correct_prob_per_char': 0.32376740438111856, 'margin': -0.0007046937413346685, 'margin_per_token': -0.008922901017412256, 'margin_per_char': -0.08462287407020672, 'total_prob': 0.0013200323008142564, 'total_prob_per_token': 0.03678151618825092, 'total_prob_per_char': 1.3097729118840258, 'uncond_correct_prob': 4.466969665790973e-06, 'uncond_correct_prob_per_token': 0.0022668183407005002, 'uncond_correct_prob_per_char': 0.23532515280203226, 'uncond_total_prob': 1.799112488146145e-05, 'norm_correct_prob': 0.13719422326562344, 'norm_correct_prob_per_token': 0.240200310853121, 'norm_correct_prob_per_char': 0.2463119256943626, 'primary_metric': 0.284} |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.