|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 0.146177459435755, |
|
"eval_steps": 500, |
|
"global_step": 1000, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.00146177459435755, |
|
"grad_norm": 1.1480557918548584, |
|
"learning_rate": 2.4752475247524754e-05, |
|
"loss": 2.0355, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.0029235491887151, |
|
"grad_norm": 1.2355608940124512, |
|
"learning_rate": 2.4502450245024505e-05, |
|
"loss": 0.9343, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.00438532378307265, |
|
"grad_norm": 1.6818450689315796, |
|
"learning_rate": 2.4252425242524256e-05, |
|
"loss": 0.294, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.0058470983774302, |
|
"grad_norm": 0.5871298909187317, |
|
"learning_rate": 2.4002400240024003e-05, |
|
"loss": 0.2372, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.00730887297178775, |
|
"grad_norm": 0.7146134972572327, |
|
"learning_rate": 2.3752375237523754e-05, |
|
"loss": 0.2836, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.0087706475661453, |
|
"grad_norm": 0.7070990800857544, |
|
"learning_rate": 2.3502350235023505e-05, |
|
"loss": 0.2088, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.01023242216050285, |
|
"grad_norm": 0.4255610704421997, |
|
"learning_rate": 2.3252325232523252e-05, |
|
"loss": 0.1978, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.0116941967548604, |
|
"grad_norm": 0.49717941880226135, |
|
"learning_rate": 2.3002300230023006e-05, |
|
"loss": 0.1968, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.01315597134921795, |
|
"grad_norm": 0.40119174122810364, |
|
"learning_rate": 2.2752275227522754e-05, |
|
"loss": 0.1913, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.0146177459435755, |
|
"grad_norm": 0.7866990566253662, |
|
"learning_rate": 2.2502250225022505e-05, |
|
"loss": 0.2131, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.016079520537933052, |
|
"grad_norm": 0.8951889872550964, |
|
"learning_rate": 2.2252225222522255e-05, |
|
"loss": 0.1765, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.0175412951322906, |
|
"grad_norm": 0.29221057891845703, |
|
"learning_rate": 2.2002200220022003e-05, |
|
"loss": 0.1407, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.019003069726648152, |
|
"grad_norm": 0.49533528089523315, |
|
"learning_rate": 2.1752175217521754e-05, |
|
"loss": 0.1644, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.0204648443210057, |
|
"grad_norm": 0.3729006350040436, |
|
"learning_rate": 2.1502150215021505e-05, |
|
"loss": 0.099, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.021926618915363252, |
|
"grad_norm": 0.8305023312568665, |
|
"learning_rate": 2.1252125212521255e-05, |
|
"loss": 0.1278, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.0233883935097208, |
|
"grad_norm": 0.4704670011997223, |
|
"learning_rate": 2.1002100210021003e-05, |
|
"loss": 0.1521, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.024850168104078352, |
|
"grad_norm": 0.787459135055542, |
|
"learning_rate": 2.0752075207520754e-05, |
|
"loss": 0.1326, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.0263119426984359, |
|
"grad_norm": 0.5290590524673462, |
|
"learning_rate": 2.0502050205020504e-05, |
|
"loss": 0.1146, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.027773717292793453, |
|
"grad_norm": 0.46197742223739624, |
|
"learning_rate": 2.0252025202520252e-05, |
|
"loss": 0.1278, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.029235491887151, |
|
"grad_norm": 0.6425529718399048, |
|
"learning_rate": 2.0002000200020006e-05, |
|
"loss": 0.1063, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.030697266481508553, |
|
"grad_norm": 0.7542579770088196, |
|
"learning_rate": 1.9751975197519753e-05, |
|
"loss": 0.117, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.032159041075866104, |
|
"grad_norm": 0.38565704226493835, |
|
"learning_rate": 1.9501950195019504e-05, |
|
"loss": 0.1298, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.03362081567022365, |
|
"grad_norm": 0.31743547320365906, |
|
"learning_rate": 1.9251925192519255e-05, |
|
"loss": 0.1127, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 0.0350825902645812, |
|
"grad_norm": 0.9804937839508057, |
|
"learning_rate": 1.9001900190019003e-05, |
|
"loss": 0.1271, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.03654436485893875, |
|
"grad_norm": 0.5180727243423462, |
|
"learning_rate": 1.8751875187518753e-05, |
|
"loss": 0.1222, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.038006139453296305, |
|
"grad_norm": 0.5256487131118774, |
|
"learning_rate": 1.8501850185018504e-05, |
|
"loss": 0.0975, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 0.03946791404765385, |
|
"grad_norm": 0.3315429091453552, |
|
"learning_rate": 1.8251825182518255e-05, |
|
"loss": 0.0949, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 0.0409296886420114, |
|
"grad_norm": 0.4036615788936615, |
|
"learning_rate": 1.8001800180018002e-05, |
|
"loss": 0.1127, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 0.04239146323636895, |
|
"grad_norm": 0.35830047726631165, |
|
"learning_rate": 1.7751775177517753e-05, |
|
"loss": 0.1042, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 0.043853237830726505, |
|
"grad_norm": 0.7400040626525879, |
|
"learning_rate": 1.7501750175017504e-05, |
|
"loss": 0.1095, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.04531501242508405, |
|
"grad_norm": 0.4160325229167938, |
|
"learning_rate": 1.725172517251725e-05, |
|
"loss": 0.0921, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 0.0467767870194416, |
|
"grad_norm": 0.6937474012374878, |
|
"learning_rate": 1.7001700170017006e-05, |
|
"loss": 0.1096, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 0.04823856161379915, |
|
"grad_norm": 0.3352717161178589, |
|
"learning_rate": 1.6751675167516753e-05, |
|
"loss": 0.0958, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 0.049700336208156705, |
|
"grad_norm": 0.5310590267181396, |
|
"learning_rate": 1.6501650165016504e-05, |
|
"loss": 0.0866, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 0.05116211080251425, |
|
"grad_norm": 0.44820308685302734, |
|
"learning_rate": 1.6251625162516255e-05, |
|
"loss": 0.0768, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 0.0526238853968718, |
|
"grad_norm": 0.3603011965751648, |
|
"learning_rate": 1.6001600160016002e-05, |
|
"loss": 0.1096, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 0.05408565999122935, |
|
"grad_norm": 0.4311069846153259, |
|
"learning_rate": 1.5751575157515753e-05, |
|
"loss": 0.0957, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 0.055547434585586905, |
|
"grad_norm": 0.36236947774887085, |
|
"learning_rate": 1.5501550155015504e-05, |
|
"loss": 0.1151, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 0.05700920917994445, |
|
"grad_norm": 0.405987948179245, |
|
"learning_rate": 1.5251525152515253e-05, |
|
"loss": 0.1184, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 0.058470983774302, |
|
"grad_norm": 0.7587026357650757, |
|
"learning_rate": 1.5001500150015002e-05, |
|
"loss": 0.0836, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 0.05993275836865955, |
|
"grad_norm": 0.7230392694473267, |
|
"learning_rate": 1.4751475147514754e-05, |
|
"loss": 0.0796, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 0.061394532963017105, |
|
"grad_norm": 0.3412224352359772, |
|
"learning_rate": 1.4501450145014504e-05, |
|
"loss": 0.0883, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 0.06285630755737465, |
|
"grad_norm": 0.4818565845489502, |
|
"learning_rate": 1.4251425142514253e-05, |
|
"loss": 0.0683, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 0.06431808215173221, |
|
"grad_norm": 0.6868996620178223, |
|
"learning_rate": 1.4001400140014002e-05, |
|
"loss": 0.0923, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 0.06577985674608976, |
|
"grad_norm": 0.3734653890132904, |
|
"learning_rate": 1.3751375137513753e-05, |
|
"loss": 0.0841, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 0.0672416313404473, |
|
"grad_norm": 0.4107135534286499, |
|
"learning_rate": 1.3501350135013502e-05, |
|
"loss": 0.0797, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 0.06870340593480485, |
|
"grad_norm": 0.43387386202812195, |
|
"learning_rate": 1.3251325132513251e-05, |
|
"loss": 0.0721, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 0.0701651805291624, |
|
"grad_norm": 0.5933758020401001, |
|
"learning_rate": 1.3001300130013003e-05, |
|
"loss": 0.1408, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 0.07162695512351995, |
|
"grad_norm": 0.42749252915382385, |
|
"learning_rate": 1.2751275127512753e-05, |
|
"loss": 0.0926, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 0.0730887297178775, |
|
"grad_norm": 0.5452368855476379, |
|
"learning_rate": 1.2501250125012502e-05, |
|
"loss": 0.0871, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.07455050431223505, |
|
"grad_norm": 0.42915862798690796, |
|
"learning_rate": 1.2251225122512252e-05, |
|
"loss": 0.0779, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 0.07601227890659261, |
|
"grad_norm": 0.5489547252655029, |
|
"learning_rate": 1.2001200120012002e-05, |
|
"loss": 0.0649, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 0.07747405350095016, |
|
"grad_norm": 0.3804190158843994, |
|
"learning_rate": 1.1751175117511752e-05, |
|
"loss": 0.0692, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 0.0789358280953077, |
|
"grad_norm": 0.4363003075122833, |
|
"learning_rate": 1.1501150115011503e-05, |
|
"loss": 0.1051, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 0.08039760268966525, |
|
"grad_norm": 0.352827787399292, |
|
"learning_rate": 1.1251125112511252e-05, |
|
"loss": 0.0924, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 0.0818593772840228, |
|
"grad_norm": 0.3126923143863678, |
|
"learning_rate": 1.1001100110011001e-05, |
|
"loss": 0.08, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 0.08332115187838035, |
|
"grad_norm": 0.8426673412322998, |
|
"learning_rate": 1.0751075107510752e-05, |
|
"loss": 0.0856, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 0.0847829264727379, |
|
"grad_norm": 0.862802267074585, |
|
"learning_rate": 1.0501050105010501e-05, |
|
"loss": 0.0839, |
|
"step": 580 |
|
}, |
|
{ |
|
"epoch": 0.08624470106709545, |
|
"grad_norm": 0.3358764946460724, |
|
"learning_rate": 1.0251025102510252e-05, |
|
"loss": 0.0661, |
|
"step": 590 |
|
}, |
|
{ |
|
"epoch": 0.08770647566145301, |
|
"grad_norm": 0.27090683579444885, |
|
"learning_rate": 1.0001000100010003e-05, |
|
"loss": 0.0756, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 0.08916825025581056, |
|
"grad_norm": 0.2274910807609558, |
|
"learning_rate": 9.750975097509752e-06, |
|
"loss": 0.0803, |
|
"step": 610 |
|
}, |
|
{ |
|
"epoch": 0.0906300248501681, |
|
"grad_norm": 0.38698479533195496, |
|
"learning_rate": 9.500950095009501e-06, |
|
"loss": 0.0792, |
|
"step": 620 |
|
}, |
|
{ |
|
"epoch": 0.09209179944452565, |
|
"grad_norm": 0.4514479637145996, |
|
"learning_rate": 9.250925092509252e-06, |
|
"loss": 0.0732, |
|
"step": 630 |
|
}, |
|
{ |
|
"epoch": 0.0935535740388832, |
|
"grad_norm": 0.5197333693504333, |
|
"learning_rate": 9.000900090009001e-06, |
|
"loss": 0.0669, |
|
"step": 640 |
|
}, |
|
{ |
|
"epoch": 0.09501534863324075, |
|
"grad_norm": 0.4484429657459259, |
|
"learning_rate": 8.750875087508752e-06, |
|
"loss": 0.0842, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 0.0964771232275983, |
|
"grad_norm": 0.2479228973388672, |
|
"learning_rate": 8.500850085008503e-06, |
|
"loss": 0.0599, |
|
"step": 660 |
|
}, |
|
{ |
|
"epoch": 0.09793889782195585, |
|
"grad_norm": 0.46314725279808044, |
|
"learning_rate": 8.250825082508252e-06, |
|
"loss": 0.0707, |
|
"step": 670 |
|
}, |
|
{ |
|
"epoch": 0.09940067241631341, |
|
"grad_norm": 0.33118072152137756, |
|
"learning_rate": 8.000800080008001e-06, |
|
"loss": 0.0742, |
|
"step": 680 |
|
}, |
|
{ |
|
"epoch": 0.10086244701067096, |
|
"grad_norm": 0.4030800759792328, |
|
"learning_rate": 7.750775077507752e-06, |
|
"loss": 0.0759, |
|
"step": 690 |
|
}, |
|
{ |
|
"epoch": 0.1023242216050285, |
|
"grad_norm": 0.31615331768989563, |
|
"learning_rate": 7.500750075007501e-06, |
|
"loss": 0.0912, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 0.10378599619938605, |
|
"grad_norm": 0.32550713419914246, |
|
"learning_rate": 7.250725072507252e-06, |
|
"loss": 0.0864, |
|
"step": 710 |
|
}, |
|
{ |
|
"epoch": 0.1052477707937436, |
|
"grad_norm": 0.47825929522514343, |
|
"learning_rate": 7.000700070007001e-06, |
|
"loss": 0.0735, |
|
"step": 720 |
|
}, |
|
{ |
|
"epoch": 0.10670954538810115, |
|
"grad_norm": 0.534595787525177, |
|
"learning_rate": 6.750675067506751e-06, |
|
"loss": 0.0742, |
|
"step": 730 |
|
}, |
|
{ |
|
"epoch": 0.1081713199824587, |
|
"grad_norm": 0.5879881978034973, |
|
"learning_rate": 6.500650065006502e-06, |
|
"loss": 0.0816, |
|
"step": 740 |
|
}, |
|
{ |
|
"epoch": 0.10963309457681626, |
|
"grad_norm": 0.7548266053199768, |
|
"learning_rate": 6.250625062506251e-06, |
|
"loss": 0.0612, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 0.11109486917117381, |
|
"grad_norm": 1.1165082454681396, |
|
"learning_rate": 6.000600060006001e-06, |
|
"loss": 0.0766, |
|
"step": 760 |
|
}, |
|
{ |
|
"epoch": 0.11255664376553136, |
|
"grad_norm": 0.5112439393997192, |
|
"learning_rate": 5.750575057505752e-06, |
|
"loss": 0.0628, |
|
"step": 770 |
|
}, |
|
{ |
|
"epoch": 0.1140184183598889, |
|
"grad_norm": 0.416564017534256, |
|
"learning_rate": 5.500550055005501e-06, |
|
"loss": 0.0751, |
|
"step": 780 |
|
}, |
|
{ |
|
"epoch": 0.11548019295424646, |
|
"grad_norm": 0.9259045124053955, |
|
"learning_rate": 5.250525052505251e-06, |
|
"loss": 0.1037, |
|
"step": 790 |
|
}, |
|
{ |
|
"epoch": 0.116941967548604, |
|
"grad_norm": 0.3240310847759247, |
|
"learning_rate": 5.0005000500050015e-06, |
|
"loss": 0.0826, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 0.11840374214296155, |
|
"grad_norm": 0.32784464955329895, |
|
"learning_rate": 4.750475047504751e-06, |
|
"loss": 0.0849, |
|
"step": 810 |
|
}, |
|
{ |
|
"epoch": 0.1198655167373191, |
|
"grad_norm": 0.198726087808609, |
|
"learning_rate": 4.500450045004501e-06, |
|
"loss": 0.0554, |
|
"step": 820 |
|
}, |
|
{ |
|
"epoch": 0.12132729133167666, |
|
"grad_norm": 0.47690388560295105, |
|
"learning_rate": 4.250425042504251e-06, |
|
"loss": 0.07, |
|
"step": 830 |
|
}, |
|
{ |
|
"epoch": 0.12278906592603421, |
|
"grad_norm": 0.6009281873703003, |
|
"learning_rate": 4.0004000400040005e-06, |
|
"loss": 0.1019, |
|
"step": 840 |
|
}, |
|
{ |
|
"epoch": 0.12425084052039176, |
|
"grad_norm": 0.37418490648269653, |
|
"learning_rate": 3.7503750375037505e-06, |
|
"loss": 0.0935, |
|
"step": 850 |
|
}, |
|
{ |
|
"epoch": 0.1257126151147493, |
|
"grad_norm": 0.36421412229537964, |
|
"learning_rate": 3.5003500350035005e-06, |
|
"loss": 0.0883, |
|
"step": 860 |
|
}, |
|
{ |
|
"epoch": 0.12717438970910686, |
|
"grad_norm": 0.44606733322143555, |
|
"learning_rate": 3.250325032503251e-06, |
|
"loss": 0.1029, |
|
"step": 870 |
|
}, |
|
{ |
|
"epoch": 0.12863616430346442, |
|
"grad_norm": 0.4448641836643219, |
|
"learning_rate": 3.0003000300030004e-06, |
|
"loss": 0.0802, |
|
"step": 880 |
|
}, |
|
{ |
|
"epoch": 0.13009793889782195, |
|
"grad_norm": 0.42409053444862366, |
|
"learning_rate": 2.7502750275027504e-06, |
|
"loss": 0.0688, |
|
"step": 890 |
|
}, |
|
{ |
|
"epoch": 0.13155971349217951, |
|
"grad_norm": 0.43361061811447144, |
|
"learning_rate": 2.5002500250025008e-06, |
|
"loss": 0.0575, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 0.13302148808653705, |
|
"grad_norm": 0.3293386995792389, |
|
"learning_rate": 2.2502250225022503e-06, |
|
"loss": 0.0547, |
|
"step": 910 |
|
}, |
|
{ |
|
"epoch": 0.1344832626808946, |
|
"grad_norm": 1.1192256212234497, |
|
"learning_rate": 2.0002000200020003e-06, |
|
"loss": 0.0809, |
|
"step": 920 |
|
}, |
|
{ |
|
"epoch": 0.13594503727525215, |
|
"grad_norm": 0.45779263973236084, |
|
"learning_rate": 1.7501750175017502e-06, |
|
"loss": 0.0993, |
|
"step": 930 |
|
}, |
|
{ |
|
"epoch": 0.1374068118696097, |
|
"grad_norm": 0.3892732560634613, |
|
"learning_rate": 1.5001500150015002e-06, |
|
"loss": 0.0467, |
|
"step": 940 |
|
}, |
|
{ |
|
"epoch": 0.13886858646396727, |
|
"grad_norm": 0.621871829032898, |
|
"learning_rate": 1.2501250125012504e-06, |
|
"loss": 0.0756, |
|
"step": 950 |
|
}, |
|
{ |
|
"epoch": 0.1403303610583248, |
|
"grad_norm": 0.37552908062934875, |
|
"learning_rate": 1.0001000100010001e-06, |
|
"loss": 0.0774, |
|
"step": 960 |
|
}, |
|
{ |
|
"epoch": 0.14179213565268237, |
|
"grad_norm": 0.42678821086883545, |
|
"learning_rate": 7.500750075007501e-07, |
|
"loss": 0.0657, |
|
"step": 970 |
|
}, |
|
{ |
|
"epoch": 0.1432539102470399, |
|
"grad_norm": 0.552952766418457, |
|
"learning_rate": 5.000500050005001e-07, |
|
"loss": 0.0517, |
|
"step": 980 |
|
}, |
|
{ |
|
"epoch": 0.14471568484139746, |
|
"grad_norm": 0.2972164750099182, |
|
"learning_rate": 2.5002500250025003e-07, |
|
"loss": 0.0597, |
|
"step": 990 |
|
}, |
|
{ |
|
"epoch": 0.146177459435755, |
|
"grad_norm": 0.501569926738739, |
|
"learning_rate": 0.0, |
|
"loss": 0.0774, |
|
"step": 1000 |
|
} |
|
], |
|
"logging_steps": 10, |
|
"max_steps": 1000, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 1, |
|
"save_steps": 100, |
|
"stateful_callbacks": { |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": true |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 2645444001792000.0, |
|
"train_batch_size": 1, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|