|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 0.998691442030882, |
|
"eval_steps": 500, |
|
"global_step": 477, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.010468463752944255, |
|
"grad_norm": 3.843380266792351, |
|
"learning_rate": 5.208333333333333e-08, |
|
"logits/chosen": -2.8176448345184326, |
|
"logits/rejected": -2.775940418243408, |
|
"logps/chosen": -1.0736682415008545, |
|
"logps/rejected": -1.1332499980926514, |
|
"loss": -0.0244, |
|
"rewards/accuracies": 0.512499988079071, |
|
"rewards/chosen": -1.0736682415008545, |
|
"rewards/margins": 0.05958189815282822, |
|
"rewards/rejected": -1.1332499980926514, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.02093692750588851, |
|
"grad_norm": 4.13922138458207, |
|
"learning_rate": 1.0416666666666667e-07, |
|
"logits/chosen": -2.9982171058654785, |
|
"logits/rejected": -2.952279806137085, |
|
"logps/chosen": -1.052896499633789, |
|
"logps/rejected": -1.194782018661499, |
|
"loss": -0.0274, |
|
"rewards/accuracies": 0.5625, |
|
"rewards/chosen": -1.052896499633789, |
|
"rewards/margins": 0.1418854296207428, |
|
"rewards/rejected": -1.194782018661499, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.031405391258832765, |
|
"grad_norm": 4.388036242315528, |
|
"learning_rate": 1.5624999999999999e-07, |
|
"logits/chosen": -2.905001401901245, |
|
"logits/rejected": -2.881582021713257, |
|
"logps/chosen": -1.0749495029449463, |
|
"logps/rejected": -1.1714584827423096, |
|
"loss": -0.0311, |
|
"rewards/accuracies": 0.5375000238418579, |
|
"rewards/chosen": -1.0749495029449463, |
|
"rewards/margins": 0.09650889784097672, |
|
"rewards/rejected": -1.1714584827423096, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.04187385501177702, |
|
"grad_norm": 4.170757799961158, |
|
"learning_rate": 2.0833333333333333e-07, |
|
"logits/chosen": -2.9079184532165527, |
|
"logits/rejected": -2.850598096847534, |
|
"logps/chosen": -0.9996750950813293, |
|
"logps/rejected": -1.0603716373443604, |
|
"loss": -0.0326, |
|
"rewards/accuracies": 0.512499988079071, |
|
"rewards/chosen": -0.9996750950813293, |
|
"rewards/margins": 0.060696445405483246, |
|
"rewards/rejected": -1.0603716373443604, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.05234231876472128, |
|
"grad_norm": 4.2140301502962565, |
|
"learning_rate": 2.604166666666667e-07, |
|
"logits/chosen": -2.9368786811828613, |
|
"logits/rejected": -2.880016565322876, |
|
"logps/chosen": -0.8968712687492371, |
|
"logps/rejected": -1.018384337425232, |
|
"loss": -0.0413, |
|
"rewards/accuracies": 0.5625, |
|
"rewards/chosen": -0.8968712687492371, |
|
"rewards/margins": 0.12151306867599487, |
|
"rewards/rejected": -1.018384337425232, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.06281078251766553, |
|
"grad_norm": 5.648380861586244, |
|
"learning_rate": 3.1249999999999997e-07, |
|
"logits/chosen": -2.9270145893096924, |
|
"logits/rejected": -2.897721767425537, |
|
"logps/chosen": -0.8545770645141602, |
|
"logps/rejected": -0.9005380868911743, |
|
"loss": -0.0369, |
|
"rewards/accuracies": 0.574999988079071, |
|
"rewards/chosen": -0.8545770645141602, |
|
"rewards/margins": 0.04596105217933655, |
|
"rewards/rejected": -0.9005380868911743, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.07327924627060979, |
|
"grad_norm": 5.194083890339375, |
|
"learning_rate": 3.645833333333333e-07, |
|
"logits/chosen": -3.003455638885498, |
|
"logits/rejected": -2.9747724533081055, |
|
"logps/chosen": -0.8473809957504272, |
|
"logps/rejected": -0.9919956922531128, |
|
"loss": -0.0475, |
|
"rewards/accuracies": 0.6000000238418579, |
|
"rewards/chosen": -0.8473809957504272, |
|
"rewards/margins": 0.14461484551429749, |
|
"rewards/rejected": -0.9919956922531128, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.08374771002355404, |
|
"grad_norm": 4.598851505341545, |
|
"learning_rate": 4.1666666666666667e-07, |
|
"logits/chosen": -2.9274966716766357, |
|
"logits/rejected": -2.9125096797943115, |
|
"logps/chosen": -0.875938892364502, |
|
"logps/rejected": -1.0193151235580444, |
|
"loss": -0.0484, |
|
"rewards/accuracies": 0.574999988079071, |
|
"rewards/chosen": -0.875938892364502, |
|
"rewards/margins": 0.1433762013912201, |
|
"rewards/rejected": -1.0193151235580444, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.0942161737764983, |
|
"grad_norm": 4.789508927144662, |
|
"learning_rate": 4.6874999999999996e-07, |
|
"logits/chosen": -2.9352455139160156, |
|
"logits/rejected": -2.911196231842041, |
|
"logps/chosen": -0.8146079182624817, |
|
"logps/rejected": -0.9217915534973145, |
|
"loss": -0.0617, |
|
"rewards/accuracies": 0.5874999761581421, |
|
"rewards/chosen": -0.8146079182624817, |
|
"rewards/margins": 0.10718371719121933, |
|
"rewards/rejected": -0.9217915534973145, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.10468463752944256, |
|
"grad_norm": 5.456564382160052, |
|
"learning_rate": 4.999731868769026e-07, |
|
"logits/chosen": -2.9189658164978027, |
|
"logits/rejected": -2.9572436809539795, |
|
"logps/chosen": -0.8167168498039246, |
|
"logps/rejected": -1.0531059503555298, |
|
"loss": -0.0526, |
|
"rewards/accuracies": 0.675000011920929, |
|
"rewards/chosen": -0.8167168498039246, |
|
"rewards/margins": 0.23638899624347687, |
|
"rewards/rejected": -1.0531059503555298, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.11515310128238682, |
|
"grad_norm": 7.50323556887671, |
|
"learning_rate": 4.996716052911017e-07, |
|
"logits/chosen": -2.9651036262512207, |
|
"logits/rejected": -2.993089199066162, |
|
"logps/chosen": -0.9721449017524719, |
|
"logps/rejected": -1.0521472692489624, |
|
"loss": -0.0482, |
|
"rewards/accuracies": 0.6000000238418579, |
|
"rewards/chosen": -0.9721449017524719, |
|
"rewards/margins": 0.08000238239765167, |
|
"rewards/rejected": -1.0521472692489624, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.12562156503533106, |
|
"grad_norm": 8.485162916006143, |
|
"learning_rate": 4.990353313429303e-07, |
|
"logits/chosen": -2.9055917263031006, |
|
"logits/rejected": -2.9019391536712646, |
|
"logps/chosen": -0.9157769083976746, |
|
"logps/rejected": -1.0977026224136353, |
|
"loss": -0.0682, |
|
"rewards/accuracies": 0.5625, |
|
"rewards/chosen": -0.9157769083976746, |
|
"rewards/margins": 0.18192583322525024, |
|
"rewards/rejected": -1.0977026224136353, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.1360900287882753, |
|
"grad_norm": 6.60792180172325, |
|
"learning_rate": 4.980652179769217e-07, |
|
"logits/chosen": -2.966867446899414, |
|
"logits/rejected": -2.991152048110962, |
|
"logps/chosen": -0.953170895576477, |
|
"logps/rejected": -1.0807805061340332, |
|
"loss": -0.0605, |
|
"rewards/accuracies": 0.550000011920929, |
|
"rewards/chosen": -0.953170895576477, |
|
"rewards/margins": 0.12760967016220093, |
|
"rewards/rejected": -1.0807805061340332, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.14655849254121958, |
|
"grad_norm": 7.134153653486866, |
|
"learning_rate": 4.967625656594781e-07, |
|
"logits/chosen": -2.9338037967681885, |
|
"logits/rejected": -2.921638011932373, |
|
"logps/chosen": -0.9601171612739563, |
|
"logps/rejected": -1.170008659362793, |
|
"loss": -0.0809, |
|
"rewards/accuracies": 0.637499988079071, |
|
"rewards/chosen": -0.9601171612739563, |
|
"rewards/margins": 0.20989160239696503, |
|
"rewards/rejected": -1.170008659362793, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.15702695629416383, |
|
"grad_norm": 8.86078567599782, |
|
"learning_rate": 4.951291206355559e-07, |
|
"logits/chosen": -2.9449570178985596, |
|
"logits/rejected": -2.863473415374756, |
|
"logps/chosen": -1.019167423248291, |
|
"logps/rejected": -1.3563578128814697, |
|
"loss": -0.0632, |
|
"rewards/accuracies": 0.6000000238418579, |
|
"rewards/chosen": -1.019167423248291, |
|
"rewards/margins": 0.3371904492378235, |
|
"rewards/rejected": -1.3563578128814697, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.16749542004710807, |
|
"grad_norm": 6.323638159923026, |
|
"learning_rate": 4.93167072587771e-07, |
|
"logits/chosen": -2.9299821853637695, |
|
"logits/rejected": -2.91919207572937, |
|
"logps/chosen": -0.993580162525177, |
|
"logps/rejected": -1.4255390167236328, |
|
"loss": -0.0782, |
|
"rewards/accuracies": 0.75, |
|
"rewards/chosen": -0.993580162525177, |
|
"rewards/margins": 0.43195900321006775, |
|
"rewards/rejected": -1.4255390167236328, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.17796388380005235, |
|
"grad_norm": 10.148627109182378, |
|
"learning_rate": 4.908790517010636e-07, |
|
"logits/chosen": -2.9560999870300293, |
|
"logits/rejected": -2.9236154556274414, |
|
"logps/chosen": -0.9319014549255371, |
|
"logps/rejected": -1.2665565013885498, |
|
"loss": -0.0829, |
|
"rewards/accuracies": 0.6875, |
|
"rewards/chosen": -0.9319014549255371, |
|
"rewards/margins": 0.3346549868583679, |
|
"rewards/rejected": -1.2665565013885498, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.1884323475529966, |
|
"grad_norm": 6.282748689638336, |
|
"learning_rate": 4.882681251368548e-07, |
|
"logits/chosen": -3.0003504753112793, |
|
"logits/rejected": -2.9371325969696045, |
|
"logps/chosen": -0.9956916570663452, |
|
"logps/rejected": -1.5206801891326904, |
|
"loss": -0.085, |
|
"rewards/accuracies": 0.6499999761581421, |
|
"rewards/chosen": -0.9956916570663452, |
|
"rewards/margins": 0.5249885320663452, |
|
"rewards/rejected": -1.5206801891326904, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.19890081130594087, |
|
"grad_norm": 6.0015528777597265, |
|
"learning_rate": 4.853377929214243e-07, |
|
"logits/chosen": -2.938925266265869, |
|
"logits/rejected": -2.9378859996795654, |
|
"logps/chosen": -0.9370375871658325, |
|
"logps/rejected": -1.2533988952636719, |
|
"loss": -0.073, |
|
"rewards/accuracies": 0.675000011920929, |
|
"rewards/chosen": -0.9370375871658325, |
|
"rewards/margins": 0.3163612484931946, |
|
"rewards/rejected": -1.2533988952636719, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.2093692750588851, |
|
"grad_norm": 9.968256329447078, |
|
"learning_rate": 4.820919832540181e-07, |
|
"logits/chosen": -3.1243631839752197, |
|
"logits/rejected": -3.087338924407959, |
|
"logps/chosen": -0.9904219508171082, |
|
"logps/rejected": -1.245534062385559, |
|
"loss": -0.0777, |
|
"rewards/accuracies": 0.5625, |
|
"rewards/chosen": -0.9904219508171082, |
|
"rewards/margins": 0.2551119923591614, |
|
"rewards/rejected": -1.245534062385559, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.21983773881182936, |
|
"grad_norm": 13.342769400160067, |
|
"learning_rate": 4.785350472409791e-07, |
|
"logits/chosen": -2.987121105194092, |
|
"logits/rejected": -3.0028722286224365, |
|
"logps/chosen": -1.1532871723175049, |
|
"logps/rejected": -1.6190894842147827, |
|
"loss": -0.1063, |
|
"rewards/accuracies": 0.625, |
|
"rewards/chosen": -1.1532871723175049, |
|
"rewards/margins": 0.4658023715019226, |
|
"rewards/rejected": -1.6190894842147827, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 0.23030620256477363, |
|
"grad_norm": 11.26110556096259, |
|
"learning_rate": 4.7467175306295647e-07, |
|
"logits/chosen": -2.966667652130127, |
|
"logits/rejected": -3.0194077491760254, |
|
"logps/chosen": -1.0292152166366577, |
|
"logps/rejected": -1.59182608127594, |
|
"loss": -0.1066, |
|
"rewards/accuracies": 0.699999988079071, |
|
"rewards/chosen": -1.0292152166366577, |
|
"rewards/margins": 0.562610924243927, |
|
"rewards/rejected": -1.59182608127594, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.24077466631771788, |
|
"grad_norm": 8.176863863279703, |
|
"learning_rate": 4.70507279583015e-07, |
|
"logits/chosen": -3.0338950157165527, |
|
"logits/rejected": -3.0541911125183105, |
|
"logps/chosen": -1.1478749513626099, |
|
"logps/rejected": -1.4170184135437012, |
|
"loss": -0.0877, |
|
"rewards/accuracies": 0.6000000238418579, |
|
"rewards/chosen": -1.1478749513626099, |
|
"rewards/margins": 0.2691434919834137, |
|
"rewards/rejected": -1.4170184135437012, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 0.2512431300706621, |
|
"grad_norm": 8.289689216777566, |
|
"learning_rate": 4.6604720940421207e-07, |
|
"logits/chosen": -3.0846564769744873, |
|
"logits/rejected": -3.0871291160583496, |
|
"logps/chosen": -1.1712749004364014, |
|
"logps/rejected": -1.6515331268310547, |
|
"loss": -0.0702, |
|
"rewards/accuracies": 0.637499988079071, |
|
"rewards/chosen": -1.1712749004364014, |
|
"rewards/margins": 0.48025816679000854, |
|
"rewards/rejected": -1.6515331268310547, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.26171159382360637, |
|
"grad_norm": 8.632430270339013, |
|
"learning_rate": 4.612975213859487e-07, |
|
"logits/chosen": -3.0736565589904785, |
|
"logits/rejected": -3.0235087871551514, |
|
"logps/chosen": -1.246620535850525, |
|
"logps/rejected": -1.603502869606018, |
|
"loss": -0.0801, |
|
"rewards/accuracies": 0.6875, |
|
"rewards/chosen": -1.246620535850525, |
|
"rewards/margins": 0.3568824827671051, |
|
"rewards/rejected": -1.603502869606018, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 0.2721800575765506, |
|
"grad_norm": 6.951210195379089, |
|
"learning_rate": 4.5626458262912735e-07, |
|
"logits/chosen": -3.0484471321105957, |
|
"logits/rejected": -3.0786657333374023, |
|
"logps/chosen": -1.2631396055221558, |
|
"logps/rejected": -1.8509140014648438, |
|
"loss": -0.1013, |
|
"rewards/accuracies": 0.6625000238418579, |
|
"rewards/chosen": -1.2631396055221558, |
|
"rewards/margins": 0.5877745747566223, |
|
"rewards/rejected": -1.8509140014648438, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.2826485213294949, |
|
"grad_norm": 6.6831446876498966, |
|
"learning_rate": 4.5095513994085974e-07, |
|
"logits/chosen": -2.9543702602386475, |
|
"logits/rejected": -2.9393744468688965, |
|
"logps/chosen": -1.0893765687942505, |
|
"logps/rejected": -1.7171491384506226, |
|
"loss": -0.0945, |
|
"rewards/accuracies": 0.6875, |
|
"rewards/chosen": -1.0893765687942505, |
|
"rewards/margins": 0.6277726292610168, |
|
"rewards/rejected": -1.7171491384506226, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 0.29311698508243916, |
|
"grad_norm": 11.237918474787836, |
|
"learning_rate": 4.453763107901675e-07, |
|
"logits/chosen": -3.0231282711029053, |
|
"logits/rejected": -3.0941128730773926, |
|
"logps/chosen": -1.3833149671554565, |
|
"logps/rejected": -1.7585477828979492, |
|
"loss": -0.0791, |
|
"rewards/accuracies": 0.5874999761581421, |
|
"rewards/chosen": -1.3833149671554565, |
|
"rewards/margins": 0.37523263692855835, |
|
"rewards/rejected": -1.7585477828979492, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.3035854488353834, |
|
"grad_norm": 11.485846701864144, |
|
"learning_rate": 4.395355737667985e-07, |
|
"logits/chosen": -3.0041990280151367, |
|
"logits/rejected": -3.0510060787200928, |
|
"logps/chosen": -1.1283174753189087, |
|
"logps/rejected": -1.8526252508163452, |
|
"loss": -0.1012, |
|
"rewards/accuracies": 0.675000011920929, |
|
"rewards/chosen": -1.1283174753189087, |
|
"rewards/margins": 0.7243077158927917, |
|
"rewards/rejected": -1.8526252508163452, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 0.31405391258832765, |
|
"grad_norm": 11.357824867739286, |
|
"learning_rate": 4.3344075855595097e-07, |
|
"logits/chosen": -3.0792462825775146, |
|
"logits/rejected": -3.0518386363983154, |
|
"logps/chosen": -1.247040033340454, |
|
"logps/rejected": -1.7509548664093018, |
|
"loss": -0.0889, |
|
"rewards/accuracies": 0.625, |
|
"rewards/chosen": -1.247040033340454, |
|
"rewards/margins": 0.5039147734642029, |
|
"rewards/rejected": -1.7509548664093018, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.3245223763412719, |
|
"grad_norm": 11.335355380541145, |
|
"learning_rate": 4.271000354423425e-07, |
|
"logits/chosen": -2.9956650733947754, |
|
"logits/rejected": -3.0046334266662598, |
|
"logps/chosen": -1.3198177814483643, |
|
"logps/rejected": -2.0559017658233643, |
|
"loss": -0.111, |
|
"rewards/accuracies": 0.675000011920929, |
|
"rewards/chosen": -1.3198177814483643, |
|
"rewards/margins": 0.7360838651657104, |
|
"rewards/rejected": -2.0559017658233643, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 0.33499084009421615, |
|
"grad_norm": 16.693424155067344, |
|
"learning_rate": 4.2052190435769554e-07, |
|
"logits/chosen": -2.9203574657440186, |
|
"logits/rejected": -2.9545798301696777, |
|
"logps/chosen": -1.1988856792449951, |
|
"logps/rejected": -1.823240876197815, |
|
"loss": -0.1107, |
|
"rewards/accuracies": 0.675000011920929, |
|
"rewards/chosen": -1.1988856792449951, |
|
"rewards/margins": 0.6243550181388855, |
|
"rewards/rejected": -1.823240876197815, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.34545930384716045, |
|
"grad_norm": 26.190973492572102, |
|
"learning_rate": 4.137151834863213e-07, |
|
"logits/chosen": -2.992180347442627, |
|
"logits/rejected": -2.9608774185180664, |
|
"logps/chosen": -1.3537108898162842, |
|
"logps/rejected": -1.587428331375122, |
|
"loss": -0.114, |
|
"rewards/accuracies": 0.5874999761581421, |
|
"rewards/chosen": -1.3537108898162842, |
|
"rewards/margins": 0.2337172031402588, |
|
"rewards/rejected": -1.587428331375122, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 0.3559277676001047, |
|
"grad_norm": 11.938074204901223, |
|
"learning_rate": 4.0668899744407567e-07, |
|
"logits/chosen": -2.943603992462158, |
|
"logits/rejected": -2.9042210578918457, |
|
"logps/chosen": -1.1778740882873535, |
|
"logps/rejected": -1.7121427059173584, |
|
"loss": -0.1062, |
|
"rewards/accuracies": 0.625, |
|
"rewards/chosen": -1.1778740882873535, |
|
"rewards/margins": 0.5342684984207153, |
|
"rewards/rejected": -1.7121427059173584, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.36639623135304894, |
|
"grad_norm": 9.013768613990127, |
|
"learning_rate": 3.994527650465352e-07, |
|
"logits/chosen": -2.767575263977051, |
|
"logits/rejected": -2.7348756790161133, |
|
"logps/chosen": -1.2128514051437378, |
|
"logps/rejected": -2.0194027423858643, |
|
"loss": -0.1222, |
|
"rewards/accuracies": 0.6625000238418579, |
|
"rewards/chosen": -1.2128514051437378, |
|
"rewards/margins": 0.8065516352653503, |
|
"rewards/rejected": -2.0194027423858643, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 0.3768646951059932, |
|
"grad_norm": 20.304059925871332, |
|
"learning_rate": 3.920161866827889e-07, |
|
"logits/chosen": -2.6296277046203613, |
|
"logits/rejected": -2.5583887100219727, |
|
"logps/chosen": -1.4558690786361694, |
|
"logps/rejected": -2.660147190093994, |
|
"loss": -0.104, |
|
"rewards/accuracies": 0.6499999761581421, |
|
"rewards/chosen": -1.4558690786361694, |
|
"rewards/margins": 1.2042779922485352, |
|
"rewards/rejected": -2.660147190093994, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.38733315885893743, |
|
"grad_norm": 12.56852650611151, |
|
"learning_rate": 3.8438923131177237e-07, |
|
"logits/chosen": -2.646308660507202, |
|
"logits/rejected": -2.5967037677764893, |
|
"logps/chosen": -1.2808468341827393, |
|
"logps/rejected": -2.6707491874694824, |
|
"loss": -0.1264, |
|
"rewards/accuracies": 0.7250000238418579, |
|
"rewards/chosen": -1.2808468341827393, |
|
"rewards/margins": 1.3899024724960327, |
|
"rewards/rejected": -2.6707491874694824, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 0.39780162261188173, |
|
"grad_norm": 10.671381825602081, |
|
"learning_rate": 3.765821230985757e-07, |
|
"logits/chosen": -2.6582224369049072, |
|
"logits/rejected": -2.643031358718872, |
|
"logps/chosen": -1.2568540573120117, |
|
"logps/rejected": -2.3855767250061035, |
|
"loss": -0.1248, |
|
"rewards/accuracies": 0.675000011920929, |
|
"rewards/chosen": -1.2568540573120117, |
|
"rewards/margins": 1.1287227869033813, |
|
"rewards/rejected": -2.3855767250061035, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.408270086364826, |
|
"grad_norm": 12.305445384315465, |
|
"learning_rate": 3.6860532770864005e-07, |
|
"logits/chosen": -2.6588711738586426, |
|
"logits/rejected": -2.650085926055908, |
|
"logps/chosen": -1.2373688220977783, |
|
"logps/rejected": -2.440690755844116, |
|
"loss": -0.1086, |
|
"rewards/accuracies": 0.737500011920929, |
|
"rewards/chosen": -1.2373688220977783, |
|
"rewards/margins": 1.2033220529556274, |
|
"rewards/rejected": -2.440690755844116, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 0.4187385501177702, |
|
"grad_norm": 114.17793963840649, |
|
"learning_rate": 3.604695382782159e-07, |
|
"logits/chosen": -2.5837745666503906, |
|
"logits/rejected": -2.5190796852111816, |
|
"logps/chosen": -1.535466194152832, |
|
"logps/rejected": -2.662421703338623, |
|
"loss": -0.1231, |
|
"rewards/accuracies": 0.637499988079071, |
|
"rewards/chosen": -1.535466194152832, |
|
"rewards/margins": 1.126955509185791, |
|
"rewards/rejected": -2.662421703338623, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.42920701387071447, |
|
"grad_norm": 29.258244612720908, |
|
"learning_rate": 3.5218566107988867e-07, |
|
"logits/chosen": -2.4933407306671143, |
|
"logits/rejected": -2.3973355293273926, |
|
"logps/chosen": -1.6263624429702759, |
|
"logps/rejected": -3.2516510486602783, |
|
"loss": -0.1144, |
|
"rewards/accuracies": 0.6499999761581421, |
|
"rewards/chosen": -1.6263624429702759, |
|
"rewards/margins": 1.6252882480621338, |
|
"rewards/rejected": -3.2516510486602783, |
|
"step": 205 |
|
}, |
|
{ |
|
"epoch": 0.4396754776236587, |
|
"grad_norm": 39.526967700856005, |
|
"learning_rate": 3.4376480090239047e-07, |
|
"logits/chosen": -2.342941999435425, |
|
"logits/rejected": -2.3288662433624268, |
|
"logps/chosen": -2.3925395011901855, |
|
"logps/rejected": -3.234570264816284, |
|
"loss": -0.0999, |
|
"rewards/accuracies": 0.637499988079071, |
|
"rewards/chosen": -2.3925395011901855, |
|
"rewards/margins": 0.8420308232307434, |
|
"rewards/rejected": -3.234570264816284, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.45014394137660296, |
|
"grad_norm": 15.657987448676648, |
|
"learning_rate": 3.3521824616429284e-07, |
|
"logits/chosen": -2.3377509117126465, |
|
"logits/rejected": -2.3475136756896973, |
|
"logps/chosen": -2.209704875946045, |
|
"logps/rejected": -3.1227197647094727, |
|
"loss": -0.1112, |
|
"rewards/accuracies": 0.699999988079071, |
|
"rewards/chosen": -2.209704875946045, |
|
"rewards/margins": 0.9130150079727173, |
|
"rewards/rejected": -3.1227197647094727, |
|
"step": 215 |
|
}, |
|
{ |
|
"epoch": 0.46061240512954726, |
|
"grad_norm": 24.63970678663639, |
|
"learning_rate": 3.265574537815398e-07, |
|
"logits/chosen": -2.3190903663635254, |
|
"logits/rejected": -2.222909927368164, |
|
"logps/chosen": -1.5355461835861206, |
|
"logps/rejected": -2.3923590183258057, |
|
"loss": -0.125, |
|
"rewards/accuracies": 0.6875, |
|
"rewards/chosen": -1.5355461835861206, |
|
"rewards/margins": 0.8568128347396851, |
|
"rewards/rejected": -2.3923590183258057, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.4710808688824915, |
|
"grad_norm": 10.677074157142071, |
|
"learning_rate": 3.1779403380910425e-07, |
|
"logits/chosen": -2.237248182296753, |
|
"logits/rejected": -2.051560163497925, |
|
"logps/chosen": -1.662973165512085, |
|
"logps/rejected": -2.656738519668579, |
|
"loss": -0.1184, |
|
"rewards/accuracies": 0.75, |
|
"rewards/chosen": -1.662973165512085, |
|
"rewards/margins": 0.9937652349472046, |
|
"rewards/rejected": -2.656738519668579, |
|
"step": 225 |
|
}, |
|
{ |
|
"epoch": 0.48154933263543576, |
|
"grad_norm": 63.22829689035937, |
|
"learning_rate": 3.0893973387735683e-07, |
|
"logits/chosen": -2.1220011711120605, |
|
"logits/rejected": -1.9981296062469482, |
|
"logps/chosen": -1.8179805278778076, |
|
"logps/rejected": -3.1246554851531982, |
|
"loss": -0.1233, |
|
"rewards/accuracies": 0.6875, |
|
"rewards/chosen": -1.8179805278778076, |
|
"rewards/margins": 1.3066750764846802, |
|
"rewards/rejected": -3.1246554851531982, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 0.49201779638838, |
|
"grad_norm": 16.030732819551737, |
|
"learning_rate": 3.000064234440111e-07, |
|
"logits/chosen": -2.058353900909424, |
|
"logits/rejected": -1.8866300582885742, |
|
"logps/chosen": -1.891436219215393, |
|
"logps/rejected": -2.721259832382202, |
|
"loss": -0.1073, |
|
"rewards/accuracies": 0.637499988079071, |
|
"rewards/chosen": -1.891436219215393, |
|
"rewards/margins": 0.8298239707946777, |
|
"rewards/rejected": -2.721259832382202, |
|
"step": 235 |
|
}, |
|
{ |
|
"epoch": 0.5024862601413242, |
|
"grad_norm": 13.634356149094991, |
|
"learning_rate": 2.910060778827554e-07, |
|
"logits/chosen": -1.8825008869171143, |
|
"logits/rejected": -1.8133189678192139, |
|
"logps/chosen": -2.548449993133545, |
|
"logps/rejected": -3.235548496246338, |
|
"loss": -0.1088, |
|
"rewards/accuracies": 0.6499999761581421, |
|
"rewards/chosen": -2.548449993133545, |
|
"rewards/margins": 0.6870983839035034, |
|
"rewards/rejected": -3.235548496246338, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.5129547238942685, |
|
"grad_norm": 29.272649537767588, |
|
"learning_rate": 2.8195076242990116e-07, |
|
"logits/chosen": -2.0280842781066895, |
|
"logits/rejected": -1.8670088052749634, |
|
"logps/chosen": -1.8938684463500977, |
|
"logps/rejected": -3.3442654609680176, |
|
"loss": -0.1234, |
|
"rewards/accuracies": 0.675000011920929, |
|
"rewards/chosen": -1.8938684463500977, |
|
"rewards/margins": 1.4503968954086304, |
|
"rewards/rejected": -3.3442654609680176, |
|
"step": 245 |
|
}, |
|
{ |
|
"epoch": 0.5234231876472127, |
|
"grad_norm": 20.57218560544621, |
|
"learning_rate": 2.7285261601056697e-07, |
|
"logits/chosen": -2.137603759765625, |
|
"logits/rejected": -1.8585504293441772, |
|
"logps/chosen": -1.5400477647781372, |
|
"logps/rejected": -3.192748546600342, |
|
"loss": -0.1016, |
|
"rewards/accuracies": 0.762499988079071, |
|
"rewards/chosen": -1.5400477647781372, |
|
"rewards/margins": 1.6527007818222046, |
|
"rewards/rejected": -3.192748546600342, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.533891651400157, |
|
"grad_norm": 32.87314594164391, |
|
"learning_rate": 2.6372383496608186e-07, |
|
"logits/chosen": -2.2034177780151367, |
|
"logits/rejected": -2.07490873336792, |
|
"logps/chosen": -1.890310525894165, |
|
"logps/rejected": -2.4908692836761475, |
|
"loss": -0.1104, |
|
"rewards/accuracies": 0.5874999761581421, |
|
"rewards/chosen": -1.890310525894165, |
|
"rewards/margins": 0.6005589365959167, |
|
"rewards/rejected": -2.4908692836761475, |
|
"step": 255 |
|
}, |
|
{ |
|
"epoch": 0.5443601151531012, |
|
"grad_norm": 13.702145049518174, |
|
"learning_rate": 2.5457665670441937e-07, |
|
"logits/chosen": -1.9207513332366943, |
|
"logits/rejected": -1.8922897577285767, |
|
"logps/chosen": -1.7071243524551392, |
|
"logps/rejected": -2.4078123569488525, |
|
"loss": -0.1183, |
|
"rewards/accuracies": 0.6499999761581421, |
|
"rewards/chosen": -1.7071243524551392, |
|
"rewards/margins": 0.7006877064704895, |
|
"rewards/rejected": -2.4078123569488525, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 0.5548285789060455, |
|
"grad_norm": 38.3020358782431, |
|
"learning_rate": 2.454233432955807e-07, |
|
"logits/chosen": -2.05800199508667, |
|
"logits/rejected": -2.0371463298797607, |
|
"logps/chosen": -1.9121761322021484, |
|
"logps/rejected": -2.3142733573913574, |
|
"loss": -0.1309, |
|
"rewards/accuracies": 0.675000011920929, |
|
"rewards/chosen": -1.9121761322021484, |
|
"rewards/margins": 0.4020973742008209, |
|
"rewards/rejected": -2.3142733573913574, |
|
"step": 265 |
|
}, |
|
{ |
|
"epoch": 0.5652970426589898, |
|
"grad_norm": 24.19175613282827, |
|
"learning_rate": 2.3627616503391812e-07, |
|
"logits/chosen": -1.966522455215454, |
|
"logits/rejected": -2.0543503761291504, |
|
"logps/chosen": -1.5488941669464111, |
|
"logps/rejected": -2.404010534286499, |
|
"loss": -0.1062, |
|
"rewards/accuracies": 0.6625000238418579, |
|
"rewards/chosen": -1.5488941669464111, |
|
"rewards/margins": 0.8551164865493774, |
|
"rewards/rejected": -2.404010534286499, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 0.575765506411934, |
|
"grad_norm": 32.544264458959944, |
|
"learning_rate": 2.2714738398943308e-07, |
|
"logits/chosen": -2.1266486644744873, |
|
"logits/rejected": -1.9693682193756104, |
|
"logps/chosen": -1.345821499824524, |
|
"logps/rejected": -2.7840895652770996, |
|
"loss": -0.1459, |
|
"rewards/accuracies": 0.75, |
|
"rewards/chosen": -1.345821499824524, |
|
"rewards/margins": 1.4382681846618652, |
|
"rewards/rejected": -2.7840895652770996, |
|
"step": 275 |
|
}, |
|
{ |
|
"epoch": 0.5862339701648783, |
|
"grad_norm": 21.822579332484686, |
|
"learning_rate": 2.1804923757009882e-07, |
|
"logits/chosen": -2.1691737174987793, |
|
"logits/rejected": -1.9860219955444336, |
|
"logps/chosen": -1.2002053260803223, |
|
"logps/rejected": -2.559629201889038, |
|
"loss": -0.1202, |
|
"rewards/accuracies": 0.6625000238418579, |
|
"rewards/chosen": -1.2002053260803223, |
|
"rewards/margins": 1.359424114227295, |
|
"rewards/rejected": -2.559629201889038, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 0.5967024339178225, |
|
"grad_norm": 35.5262923825776, |
|
"learning_rate": 2.089939221172446e-07, |
|
"logits/chosen": -2.1386303901672363, |
|
"logits/rejected": -2.101440906524658, |
|
"logps/chosen": -1.337780237197876, |
|
"logps/rejected": -2.1833016872406006, |
|
"loss": -0.1175, |
|
"rewards/accuracies": 0.6499999761581421, |
|
"rewards/chosen": -1.337780237197876, |
|
"rewards/margins": 0.8455215692520142, |
|
"rewards/rejected": -2.1833016872406006, |
|
"step": 285 |
|
}, |
|
{ |
|
"epoch": 0.6071708976707668, |
|
"grad_norm": 27.60629251004708, |
|
"learning_rate": 1.9999357655598891e-07, |
|
"logits/chosen": -2.098567008972168, |
|
"logits/rejected": -1.9679734706878662, |
|
"logps/chosen": -1.6509898900985718, |
|
"logps/rejected": -2.216376781463623, |
|
"loss": -0.1164, |
|
"rewards/accuracies": 0.6000000238418579, |
|
"rewards/chosen": -1.6509898900985718, |
|
"rewards/margins": 0.5653868913650513, |
|
"rewards/rejected": -2.216376781463623, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 0.6176393614237111, |
|
"grad_norm": 29.13174757981831, |
|
"learning_rate": 1.9106026612264315e-07, |
|
"logits/chosen": -1.9900572299957275, |
|
"logits/rejected": -1.8516021966934204, |
|
"logps/chosen": -1.4829915761947632, |
|
"logps/rejected": -2.072486400604248, |
|
"loss": -0.1277, |
|
"rewards/accuracies": 0.6625000238418579, |
|
"rewards/chosen": -1.4829915761947632, |
|
"rewards/margins": 0.5894945859909058, |
|
"rewards/rejected": -2.072486400604248, |
|
"step": 295 |
|
}, |
|
{ |
|
"epoch": 0.6281078251766553, |
|
"grad_norm": 18.174081755957985, |
|
"learning_rate": 1.8220596619089573e-07, |
|
"logits/chosen": -1.8426250219345093, |
|
"logits/rejected": -1.7023963928222656, |
|
"logps/chosen": -1.5617187023162842, |
|
"logps/rejected": -2.6344046592712402, |
|
"loss": -0.1179, |
|
"rewards/accuracies": 0.7250000238418579, |
|
"rewards/chosen": -1.5617187023162842, |
|
"rewards/margins": 1.072685956954956, |
|
"rewards/rejected": -2.6344046592712402, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.6385762889295996, |
|
"grad_norm": 23.872227813445928, |
|
"learning_rate": 1.7344254621846017e-07, |
|
"logits/chosen": -1.8805307149887085, |
|
"logits/rejected": -1.748567819595337, |
|
"logps/chosen": -1.3314863443374634, |
|
"logps/rejected": -1.9546124935150146, |
|
"loss": -0.1335, |
|
"rewards/accuracies": 0.637499988079071, |
|
"rewards/chosen": -1.3314863443374634, |
|
"rewards/margins": 0.623126208782196, |
|
"rewards/rejected": -1.9546124935150146, |
|
"step": 305 |
|
}, |
|
{ |
|
"epoch": 0.6490447526825438, |
|
"grad_norm": 29.431070606486642, |
|
"learning_rate": 1.647817538357072e-07, |
|
"logits/chosen": -1.8305540084838867, |
|
"logits/rejected": -1.5959422588348389, |
|
"logps/chosen": -1.4600447416305542, |
|
"logps/rejected": -3.613452196121216, |
|
"loss": -0.1338, |
|
"rewards/accuracies": 0.737500011920929, |
|
"rewards/chosen": -1.4600447416305542, |
|
"rewards/margins": 2.153407573699951, |
|
"rewards/rejected": -3.613452196121216, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 0.6595132164354881, |
|
"grad_norm": 20.33172315066391, |
|
"learning_rate": 1.562351990976095e-07, |
|
"logits/chosen": -1.7097995281219482, |
|
"logits/rejected": -1.444483995437622, |
|
"logps/chosen": -1.8937146663665771, |
|
"logps/rejected": -2.9181923866271973, |
|
"loss": -0.147, |
|
"rewards/accuracies": 0.762499988079071, |
|
"rewards/chosen": -1.8937146663665771, |
|
"rewards/margins": 1.0244777202606201, |
|
"rewards/rejected": -2.9181923866271973, |
|
"step": 315 |
|
}, |
|
{ |
|
"epoch": 0.6699816801884323, |
|
"grad_norm": 11.69882174941769, |
|
"learning_rate": 1.478143389201113e-07, |
|
"logits/chosen": -1.5626455545425415, |
|
"logits/rejected": -1.408778190612793, |
|
"logps/chosen": -1.8121535778045654, |
|
"logps/rejected": -2.9141504764556885, |
|
"loss": -0.1327, |
|
"rewards/accuracies": 0.6499999761581421, |
|
"rewards/chosen": -1.8121535778045654, |
|
"rewards/margins": 1.1019970178604126, |
|
"rewards/rejected": -2.9141504764556885, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 0.6804501439413766, |
|
"grad_norm": 31.4758832690907, |
|
"learning_rate": 1.3953046172178413e-07, |
|
"logits/chosen": -1.538895606994629, |
|
"logits/rejected": -1.3528716564178467, |
|
"logps/chosen": -1.577697515487671, |
|
"logps/rejected": -3.2226383686065674, |
|
"loss": -0.108, |
|
"rewards/accuracies": 0.7250000238418579, |
|
"rewards/chosen": -1.577697515487671, |
|
"rewards/margins": 1.6449410915374756, |
|
"rewards/rejected": -3.2226383686065674, |
|
"step": 325 |
|
}, |
|
{ |
|
"epoch": 0.6909186076943209, |
|
"grad_norm": 30.00262748267467, |
|
"learning_rate": 1.3139467229135998e-07, |
|
"logits/chosen": -1.633194923400879, |
|
"logits/rejected": -1.5735431909561157, |
|
"logps/chosen": -1.4919952154159546, |
|
"logps/rejected": -3.3074562549591064, |
|
"loss": -0.1222, |
|
"rewards/accuracies": 0.762499988079071, |
|
"rewards/chosen": -1.4919952154159546, |
|
"rewards/margins": 1.8154609203338623, |
|
"rewards/rejected": -3.3074562549591064, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 0.7013870714472651, |
|
"grad_norm": 148.5006608776355, |
|
"learning_rate": 1.2341787690142435e-07, |
|
"logits/chosen": -1.653297781944275, |
|
"logits/rejected": -1.3402432203292847, |
|
"logps/chosen": -1.81472647190094, |
|
"logps/rejected": -3.3855223655700684, |
|
"loss": -0.118, |
|
"rewards/accuracies": 0.737500011920929, |
|
"rewards/chosen": -1.81472647190094, |
|
"rewards/margins": 1.5707957744598389, |
|
"rewards/rejected": -3.3855223655700684, |
|
"step": 335 |
|
}, |
|
{ |
|
"epoch": 0.7118555352002094, |
|
"grad_norm": 19.087270572643252, |
|
"learning_rate": 1.1561076868822755e-07, |
|
"logits/chosen": -1.3242827653884888, |
|
"logits/rejected": -1.1375129222869873, |
|
"logps/chosen": -1.8913873434066772, |
|
"logps/rejected": -3.8381896018981934, |
|
"loss": -0.1367, |
|
"rewards/accuracies": 0.6625000238418579, |
|
"rewards/chosen": -1.8913873434066772, |
|
"rewards/margins": 1.9468021392822266, |
|
"rewards/rejected": -3.8381896018981934, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 0.7223239989531536, |
|
"grad_norm": 12.352384869534044, |
|
"learning_rate": 1.0798381331721107e-07, |
|
"logits/chosen": -1.622854471206665, |
|
"logits/rejected": -1.4534351825714111, |
|
"logps/chosen": -1.520581603050232, |
|
"logps/rejected": -2.94417142868042, |
|
"loss": -0.1259, |
|
"rewards/accuracies": 0.7875000238418579, |
|
"rewards/chosen": -1.520581603050232, |
|
"rewards/margins": 1.423590064048767, |
|
"rewards/rejected": -2.94417142868042, |
|
"step": 345 |
|
}, |
|
{ |
|
"epoch": 0.7327924627060979, |
|
"grad_norm": 14.805439320214807, |
|
"learning_rate": 1.0054723495346482e-07, |
|
"logits/chosen": -1.6887247562408447, |
|
"logits/rejected": -1.1422927379608154, |
|
"logps/chosen": -1.5419939756393433, |
|
"logps/rejected": -3.1307151317596436, |
|
"loss": -0.1286, |
|
"rewards/accuracies": 0.75, |
|
"rewards/chosen": -1.5419939756393433, |
|
"rewards/margins": 1.5887210369110107, |
|
"rewards/rejected": -3.1307151317596436, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 0.7432609264590422, |
|
"grad_norm": 20.09481183401406, |
|
"learning_rate": 9.331100255592436e-08, |
|
"logits/chosen": -1.481506109237671, |
|
"logits/rejected": -1.2393898963928223, |
|
"logps/chosen": -1.6499707698822021, |
|
"logps/rejected": -2.86677885055542, |
|
"loss": -0.1247, |
|
"rewards/accuracies": 0.625, |
|
"rewards/chosen": -1.6499707698822021, |
|
"rewards/margins": 1.2168083190917969, |
|
"rewards/rejected": -2.86677885055542, |
|
"step": 355 |
|
}, |
|
{ |
|
"epoch": 0.7537293902119864, |
|
"grad_norm": 21.363222549077037, |
|
"learning_rate": 8.628481651367875e-08, |
|
"logits/chosen": -1.531849980354309, |
|
"logits/rejected": -1.4370256662368774, |
|
"logps/chosen": -1.5883554220199585, |
|
"logps/rejected": -3.119661808013916, |
|
"loss": -0.1282, |
|
"rewards/accuracies": 0.699999988079071, |
|
"rewards/chosen": -1.5883554220199585, |
|
"rewards/margins": 1.531306505203247, |
|
"rewards/rejected": -3.119661808013916, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 0.7641978539649307, |
|
"grad_norm": 24.973104208608277, |
|
"learning_rate": 7.947809564230445e-08, |
|
"logits/chosen": -1.7802162170410156, |
|
"logits/rejected": -1.3090742826461792, |
|
"logps/chosen": -1.4277862310409546, |
|
"logps/rejected": -3.4141273498535156, |
|
"loss": -0.1417, |
|
"rewards/accuracies": 0.737500011920929, |
|
"rewards/chosen": -1.4277862310409546, |
|
"rewards/margins": 1.986340880393982, |
|
"rewards/rejected": -3.4141273498535156, |
|
"step": 365 |
|
}, |
|
{ |
|
"epoch": 0.7746663177178749, |
|
"grad_norm": 20.21283934356993, |
|
"learning_rate": 7.289996455765748e-08, |
|
"logits/chosen": -1.6334741115570068, |
|
"logits/rejected": -1.4152132272720337, |
|
"logps/chosen": -1.3029416799545288, |
|
"logps/rejected": -2.3641180992126465, |
|
"loss": -0.162, |
|
"rewards/accuracies": 0.800000011920929, |
|
"rewards/chosen": -1.3029416799545288, |
|
"rewards/margins": 1.0611765384674072, |
|
"rewards/rejected": -2.3641180992126465, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 0.7851347814708192, |
|
"grad_norm": 19.15939039084446, |
|
"learning_rate": 6.655924144404906e-08, |
|
"logits/chosen": -1.5635571479797363, |
|
"logits/rejected": -1.3937865495681763, |
|
"logps/chosen": -1.0526535511016846, |
|
"logps/rejected": -3.306544780731201, |
|
"loss": -0.1278, |
|
"rewards/accuracies": 0.699999988079071, |
|
"rewards/chosen": -1.0526535511016846, |
|
"rewards/margins": 2.2538914680480957, |
|
"rewards/rejected": -3.306544780731201, |
|
"step": 375 |
|
}, |
|
{ |
|
"epoch": 0.7956032452237635, |
|
"grad_norm": 14.808944976338482, |
|
"learning_rate": 6.046442623320145e-08, |
|
"logits/chosen": -1.7755823135375977, |
|
"logits/rejected": -1.715908408164978, |
|
"logps/chosen": -1.3490417003631592, |
|
"logps/rejected": -1.8757483959197998, |
|
"loss": -0.1173, |
|
"rewards/accuracies": 0.637499988079071, |
|
"rewards/chosen": -1.3490417003631592, |
|
"rewards/margins": 0.5267067551612854, |
|
"rewards/rejected": -1.8757483959197998, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 0.8060717089767077, |
|
"grad_norm": 37.744351087223556, |
|
"learning_rate": 5.4623689209832484e-08, |
|
"logits/chosen": -1.621198296546936, |
|
"logits/rejected": -1.5765188932418823, |
|
"logps/chosen": -1.1883819103240967, |
|
"logps/rejected": -2.3331074714660645, |
|
"loss": -0.1254, |
|
"rewards/accuracies": 0.6875, |
|
"rewards/chosen": -1.1883819103240967, |
|
"rewards/margins": 1.1447254419326782, |
|
"rewards/rejected": -2.3331074714660645, |
|
"step": 385 |
|
}, |
|
{ |
|
"epoch": 0.816540172729652, |
|
"grad_norm": 40.457315431893065, |
|
"learning_rate": 4.904486005914027e-08, |
|
"logits/chosen": -1.804915189743042, |
|
"logits/rejected": -1.42214834690094, |
|
"logps/chosen": -1.3127548694610596, |
|
"logps/rejected": -2.009843349456787, |
|
"loss": -0.1109, |
|
"rewards/accuracies": 0.675000011920929, |
|
"rewards/chosen": -1.3127548694610596, |
|
"rewards/margins": 0.697088360786438, |
|
"rewards/rejected": -2.009843349456787, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 0.8270086364825961, |
|
"grad_norm": 24.94087598352193, |
|
"learning_rate": 4.373541737087263e-08, |
|
"logits/chosen": -1.7377560138702393, |
|
"logits/rejected": -1.3866163492202759, |
|
"logps/chosen": -1.1465688943862915, |
|
"logps/rejected": -2.7002861499786377, |
|
"loss": -0.12, |
|
"rewards/accuracies": 0.612500011920929, |
|
"rewards/chosen": -1.1465688943862915, |
|
"rewards/margins": 1.5537172555923462, |
|
"rewards/rejected": -2.7002861499786377, |
|
"step": 395 |
|
}, |
|
{ |
|
"epoch": 0.8374771002355405, |
|
"grad_norm": 88.79784088043822, |
|
"learning_rate": 3.8702478614051345e-08, |
|
"logits/chosen": -1.7607088088989258, |
|
"logits/rejected": -1.5547138452529907, |
|
"logps/chosen": -1.3398617506027222, |
|
"logps/rejected": -2.071460008621216, |
|
"loss": -0.1154, |
|
"rewards/accuracies": 0.637499988079071, |
|
"rewards/chosen": -1.3398617506027222, |
|
"rewards/margins": 0.7315981388092041, |
|
"rewards/rejected": -2.071460008621216, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 0.8479455639884846, |
|
"grad_norm": 9.248843953744515, |
|
"learning_rate": 3.3952790595787986e-08, |
|
"logits/chosen": -1.6915082931518555, |
|
"logits/rejected": -1.4412751197814941, |
|
"logps/chosen": -1.4074960947036743, |
|
"logps/rejected": -2.708440065383911, |
|
"loss": -0.1226, |
|
"rewards/accuracies": 0.699999988079071, |
|
"rewards/chosen": -1.4074960947036743, |
|
"rewards/margins": 1.3009440898895264, |
|
"rewards/rejected": -2.708440065383911, |
|
"step": 405 |
|
}, |
|
{ |
|
"epoch": 0.8584140277414289, |
|
"grad_norm": 19.94643713892694, |
|
"learning_rate": 2.9492720416985e-08, |
|
"logits/chosen": -1.532260537147522, |
|
"logits/rejected": -1.374741554260254, |
|
"logps/chosen": -1.6915867328643799, |
|
"logps/rejected": -3.1600654125213623, |
|
"loss": -0.137, |
|
"rewards/accuracies": 0.637499988079071, |
|
"rewards/chosen": -1.6915867328643799, |
|
"rewards/margins": 1.4684789180755615, |
|
"rewards/rejected": -3.1600654125213623, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 0.8688824914943732, |
|
"grad_norm": 130.75278775438778, |
|
"learning_rate": 2.5328246937043525e-08, |
|
"logits/chosen": -1.5330517292022705, |
|
"logits/rejected": -1.3144365549087524, |
|
"logps/chosen": -1.8018486499786377, |
|
"logps/rejected": -2.8836231231689453, |
|
"loss": -0.1262, |
|
"rewards/accuracies": 0.6499999761581421, |
|
"rewards/chosen": -1.8018486499786377, |
|
"rewards/margins": 1.0817744731903076, |
|
"rewards/rejected": -2.8836231231689453, |
|
"step": 415 |
|
}, |
|
{ |
|
"epoch": 0.8793509552473174, |
|
"grad_norm": 25.478377748856754, |
|
"learning_rate": 2.1464952759020856e-08, |
|
"logits/chosen": -1.623173475265503, |
|
"logits/rejected": -1.3844332695007324, |
|
"logps/chosen": -1.4715114831924438, |
|
"logps/rejected": -2.6988463401794434, |
|
"loss": -0.1389, |
|
"rewards/accuracies": 0.625, |
|
"rewards/chosen": -1.4715114831924438, |
|
"rewards/margins": 1.2273352146148682, |
|
"rewards/rejected": -2.6988463401794434, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 0.8898194190002617, |
|
"grad_norm": 44.22845937144706, |
|
"learning_rate": 1.7908016745981856e-08, |
|
"logits/chosen": -1.6746991872787476, |
|
"logits/rejected": -1.4371649026870728, |
|
"logps/chosen": -1.6984422206878662, |
|
"logps/rejected": -2.9744515419006348, |
|
"loss": -0.1342, |
|
"rewards/accuracies": 0.637499988079071, |
|
"rewards/chosen": -1.6984422206878662, |
|
"rewards/margins": 1.2760093212127686, |
|
"rewards/rejected": -2.9744515419006348, |
|
"step": 425 |
|
}, |
|
{ |
|
"epoch": 0.9002878827532059, |
|
"grad_norm": 10.062945084378958, |
|
"learning_rate": 1.4662207078575684e-08, |
|
"logits/chosen": -1.5444406270980835, |
|
"logits/rejected": -1.2706645727157593, |
|
"logps/chosen": -1.5169013738632202, |
|
"logps/rejected": -3.1792664527893066, |
|
"loss": -0.1458, |
|
"rewards/accuracies": 0.6625000238418579, |
|
"rewards/chosen": -1.5169013738632202, |
|
"rewards/margins": 1.6623647212982178, |
|
"rewards/rejected": -3.1792664527893066, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 0.9107563465061502, |
|
"grad_norm": 26.84204726450129, |
|
"learning_rate": 1.1731874863145142e-08, |
|
"logits/chosen": -1.4832799434661865, |
|
"logits/rejected": -1.6065374612808228, |
|
"logps/chosen": -1.2497235536575317, |
|
"logps/rejected": -2.3406825065612793, |
|
"loss": -0.1329, |
|
"rewards/accuracies": 0.612500011920929, |
|
"rewards/chosen": -1.2497235536575317, |
|
"rewards/margins": 1.0909589529037476, |
|
"rewards/rejected": -2.3406825065612793, |
|
"step": 435 |
|
}, |
|
{ |
|
"epoch": 0.9212248102590945, |
|
"grad_norm": 14.511762413527599, |
|
"learning_rate": 9.12094829893642e-09, |
|
"logits/chosen": -1.6549100875854492, |
|
"logits/rejected": -1.6303857564926147, |
|
"logps/chosen": -1.6263110637664795, |
|
"logps/rejected": -2.7538211345672607, |
|
"loss": -0.1285, |
|
"rewards/accuracies": 0.6000000238418579, |
|
"rewards/chosen": -1.6263110637664795, |
|
"rewards/margins": 1.1275101900100708, |
|
"rewards/rejected": -2.7538211345672607, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 0.9316932740120387, |
|
"grad_norm": 115.57689083092143, |
|
"learning_rate": 6.832927412229017e-09, |
|
"logits/chosen": -1.6242246627807617, |
|
"logits/rejected": -1.3819233179092407, |
|
"logps/chosen": -1.624022126197815, |
|
"logps/rejected": -2.9669742584228516, |
|
"loss": -0.1358, |
|
"rewards/accuracies": 0.737500011920929, |
|
"rewards/chosen": -1.624022126197815, |
|
"rewards/margins": 1.3429521322250366, |
|
"rewards/rejected": -2.9669742584228516, |
|
"step": 445 |
|
}, |
|
{ |
|
"epoch": 0.942161737764983, |
|
"grad_norm": 34.008733673808756, |
|
"learning_rate": 4.8708793644441086e-09, |
|
"logits/chosen": -1.7152725458145142, |
|
"logits/rejected": -1.4042075872421265, |
|
"logps/chosen": -1.1938213109970093, |
|
"logps/rejected": -2.202075958251953, |
|
"loss": -0.1383, |
|
"rewards/accuracies": 0.574999988079071, |
|
"rewards/chosen": -1.1938213109970093, |
|
"rewards/margins": 1.0082545280456543, |
|
"rewards/rejected": -2.202075958251953, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 0.9526302015179272, |
|
"grad_norm": 17.121700600376354, |
|
"learning_rate": 3.2374343405217884e-09, |
|
"logits/chosen": -1.6548484563827515, |
|
"logits/rejected": -1.4442580938339233, |
|
"logps/chosen": -1.8814382553100586, |
|
"logps/rejected": -3.200958251953125, |
|
"loss": -0.1119, |
|
"rewards/accuracies": 0.7250000238418579, |
|
"rewards/chosen": -1.8814382553100586, |
|
"rewards/margins": 1.3195202350616455, |
|
"rewards/rejected": -3.200958251953125, |
|
"step": 455 |
|
}, |
|
{ |
|
"epoch": 0.9630986652708715, |
|
"grad_norm": 71.31324195680072, |
|
"learning_rate": 1.9347820230782295e-09, |
|
"logits/chosen": -1.741633415222168, |
|
"logits/rejected": -1.2778360843658447, |
|
"logps/chosen": -1.1031286716461182, |
|
"logps/rejected": -2.995805025100708, |
|
"loss": -0.1387, |
|
"rewards/accuracies": 0.737500011920929, |
|
"rewards/chosen": -1.1031286716461182, |
|
"rewards/margins": 1.892676591873169, |
|
"rewards/rejected": -2.995805025100708, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 0.9735671290238157, |
|
"grad_norm": 17.102606051768348, |
|
"learning_rate": 9.64668657069706e-10, |
|
"logits/chosen": -1.6080182790756226, |
|
"logits/rejected": -1.2144018411636353, |
|
"logps/chosen": -1.5425249338150024, |
|
"logps/rejected": -2.9966964721679688, |
|
"loss": -0.1402, |
|
"rewards/accuracies": 0.7749999761581421, |
|
"rewards/chosen": -1.5425249338150024, |
|
"rewards/margins": 1.454171895980835, |
|
"rewards/rejected": -2.9966964721679688, |
|
"step": 465 |
|
}, |
|
{ |
|
"epoch": 0.98403559277676, |
|
"grad_norm": 13.352840323224417, |
|
"learning_rate": 3.2839470889836627e-10, |
|
"logits/chosen": -1.6037025451660156, |
|
"logits/rejected": -1.2192766666412354, |
|
"logps/chosen": -1.357714295387268, |
|
"logps/rejected": -3.102246046066284, |
|
"loss": -0.1326, |
|
"rewards/accuracies": 0.75, |
|
"rewards/chosen": -1.357714295387268, |
|
"rewards/margins": 1.7445319890975952, |
|
"rewards/rejected": -3.102246046066284, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 0.9945040565297043, |
|
"grad_norm": 57.54326325826418, |
|
"learning_rate": 2.6813123097352287e-11, |
|
"logits/chosen": -1.570783257484436, |
|
"logits/rejected": -1.3488304615020752, |
|
"logps/chosen": -1.446059226989746, |
|
"logps/rejected": -3.1210341453552246, |
|
"loss": -0.1382, |
|
"rewards/accuracies": 0.6499999761581421, |
|
"rewards/chosen": -1.446059226989746, |
|
"rewards/margins": 1.674975037574768, |
|
"rewards/rejected": -3.1210341453552246, |
|
"step": 475 |
|
}, |
|
{ |
|
"epoch": 0.998691442030882, |
|
"step": 477, |
|
"total_flos": 0.0, |
|
"train_loss": -0.10643900037436616, |
|
"train_runtime": 11655.0761, |
|
"train_samples_per_second": 5.245, |
|
"train_steps_per_second": 0.041 |
|
} |
|
], |
|
"logging_steps": 5, |
|
"max_steps": 477, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 1, |
|
"save_steps": 239, |
|
"stateful_callbacks": { |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": false |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 0.0, |
|
"train_batch_size": 2, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|