|
{ |
|
"best_global_step": null, |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 0.041235696367822414, |
|
"eval_steps": 500, |
|
"global_step": 600, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.00048108312429126146, |
|
"grad_norm": 8.154312133789062, |
|
"learning_rate": 4.000000000000001e-06, |
|
"loss": 1.8456, |
|
"step": 7 |
|
}, |
|
{ |
|
"epoch": 0.0009621662485825229, |
|
"grad_norm": 2.8053348064422607, |
|
"learning_rate": 8.666666666666668e-06, |
|
"loss": 1.0127, |
|
"step": 14 |
|
}, |
|
{ |
|
"epoch": 0.0014432493728737843, |
|
"grad_norm": 5.31929349899292, |
|
"learning_rate": 1.3333333333333333e-05, |
|
"loss": 0.791, |
|
"step": 21 |
|
}, |
|
{ |
|
"epoch": 0.0019243324971650458, |
|
"grad_norm": 5.091348171234131, |
|
"learning_rate": 1.8e-05, |
|
"loss": 0.8497, |
|
"step": 28 |
|
}, |
|
{ |
|
"epoch": 0.002405415621456307, |
|
"grad_norm": 3.4551076889038086, |
|
"learning_rate": 2.2666666666666668e-05, |
|
"loss": 0.5847, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.0028864987457475687, |
|
"grad_norm": 2.0914766788482666, |
|
"learning_rate": 2.733333333333333e-05, |
|
"loss": 0.6838, |
|
"step": 42 |
|
}, |
|
{ |
|
"epoch": 0.00336758187003883, |
|
"grad_norm": 4.122292518615723, |
|
"learning_rate": 3.2000000000000005e-05, |
|
"loss": 0.5328, |
|
"step": 49 |
|
}, |
|
{ |
|
"epoch": 0.0038486649943300917, |
|
"grad_norm": 4.441967964172363, |
|
"learning_rate": 3.6666666666666666e-05, |
|
"loss": 0.7609, |
|
"step": 56 |
|
}, |
|
{ |
|
"epoch": 0.004329748118621354, |
|
"grad_norm": 2.408400535583496, |
|
"learning_rate": 4.133333333333333e-05, |
|
"loss": 0.7353, |
|
"step": 63 |
|
}, |
|
{ |
|
"epoch": 0.004810831242912614, |
|
"grad_norm": 4.386236190795898, |
|
"learning_rate": 4.600000000000001e-05, |
|
"loss": 0.5822, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.005291914367203876, |
|
"grad_norm": 2.6810431480407715, |
|
"learning_rate": 5.0666666666666674e-05, |
|
"loss": 0.5084, |
|
"step": 77 |
|
}, |
|
{ |
|
"epoch": 0.005772997491495137, |
|
"grad_norm": 10.199173927307129, |
|
"learning_rate": 5.5333333333333334e-05, |
|
"loss": 1.0656, |
|
"step": 84 |
|
}, |
|
{ |
|
"epoch": 0.006254080615786399, |
|
"grad_norm": 2.62958025932312, |
|
"learning_rate": 6e-05, |
|
"loss": 0.9499, |
|
"step": 91 |
|
}, |
|
{ |
|
"epoch": 0.00673516374007766, |
|
"grad_norm": 3.928619861602783, |
|
"learning_rate": 6.466666666666666e-05, |
|
"loss": 0.7763, |
|
"step": 98 |
|
}, |
|
{ |
|
"epoch": 0.007216246864368922, |
|
"grad_norm": 7.499195098876953, |
|
"learning_rate": 6.933333333333334e-05, |
|
"loss": 1.0431, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 0.007697329988660183, |
|
"grad_norm": 1.4210686683654785, |
|
"learning_rate": 7.4e-05, |
|
"loss": 0.5441, |
|
"step": 112 |
|
}, |
|
{ |
|
"epoch": 0.008178413112951446, |
|
"grad_norm": 3.6974828243255615, |
|
"learning_rate": 7.866666666666666e-05, |
|
"loss": 0.6841, |
|
"step": 119 |
|
}, |
|
{ |
|
"epoch": 0.008659496237242707, |
|
"grad_norm": 7.652034282684326, |
|
"learning_rate": 8.333333333333334e-05, |
|
"loss": 0.8487, |
|
"step": 126 |
|
}, |
|
{ |
|
"epoch": 0.009140579361533967, |
|
"grad_norm": 11.174522399902344, |
|
"learning_rate": 8.800000000000001e-05, |
|
"loss": 0.9727, |
|
"step": 133 |
|
}, |
|
{ |
|
"epoch": 0.009621662485825229, |
|
"grad_norm": 11.040456771850586, |
|
"learning_rate": 9.266666666666666e-05, |
|
"loss": 1.122, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.01010274561011649, |
|
"grad_norm": 8.87497329711914, |
|
"learning_rate": 9.733333333333335e-05, |
|
"loss": 1.4102, |
|
"step": 147 |
|
}, |
|
{ |
|
"epoch": 0.010583828734407752, |
|
"grad_norm": 9.45786190032959, |
|
"learning_rate": 9.999878153526974e-05, |
|
"loss": 1.2933, |
|
"step": 154 |
|
}, |
|
{ |
|
"epoch": 0.011064911858699013, |
|
"grad_norm": 8.866625785827637, |
|
"learning_rate": 9.998646205897309e-05, |
|
"loss": 1.2747, |
|
"step": 161 |
|
}, |
|
{ |
|
"epoch": 0.011545994982990275, |
|
"grad_norm": 6.202242374420166, |
|
"learning_rate": 9.996087868740243e-05, |
|
"loss": 1.7031, |
|
"step": 168 |
|
}, |
|
{ |
|
"epoch": 0.012027078107281536, |
|
"grad_norm": 10.355546951293945, |
|
"learning_rate": 9.992203820909906e-05, |
|
"loss": 1.8137, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 0.012508161231572798, |
|
"grad_norm": 8.716798782348633, |
|
"learning_rate": 9.986995093037421e-05, |
|
"loss": 1.2634, |
|
"step": 182 |
|
}, |
|
{ |
|
"epoch": 0.01298924435586406, |
|
"grad_norm": 11.727725982666016, |
|
"learning_rate": 9.980463067257437e-05, |
|
"loss": 0.8589, |
|
"step": 189 |
|
}, |
|
{ |
|
"epoch": 0.01347032748015532, |
|
"grad_norm": 7.918148517608643, |
|
"learning_rate": 9.972609476841367e-05, |
|
"loss": 1.4014, |
|
"step": 196 |
|
}, |
|
{ |
|
"epoch": 0.013951410604446582, |
|
"grad_norm": 28.750410079956055, |
|
"learning_rate": 9.963436405737476e-05, |
|
"loss": 1.2997, |
|
"step": 203 |
|
}, |
|
{ |
|
"epoch": 0.014432493728737844, |
|
"grad_norm": 15.347841262817383, |
|
"learning_rate": 9.952946288017899e-05, |
|
"loss": 1.766, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.014913576853029105, |
|
"grad_norm": 7.8105950355529785, |
|
"learning_rate": 9.941141907232765e-05, |
|
"loss": 1.4616, |
|
"step": 217 |
|
}, |
|
{ |
|
"epoch": 0.015394659977320367, |
|
"grad_norm": 16.68589973449707, |
|
"learning_rate": 9.928026395671576e-05, |
|
"loss": 1.0106, |
|
"step": 224 |
|
}, |
|
{ |
|
"epoch": 0.01587574310161163, |
|
"grad_norm": 14.563543319702148, |
|
"learning_rate": 9.913603233532067e-05, |
|
"loss": 1.2966, |
|
"step": 231 |
|
}, |
|
{ |
|
"epoch": 0.01635682622590289, |
|
"grad_norm": 11.76324462890625, |
|
"learning_rate": 9.89787624799672e-05, |
|
"loss": 1.4801, |
|
"step": 238 |
|
}, |
|
{ |
|
"epoch": 0.016837909350194153, |
|
"grad_norm": 10.999220848083496, |
|
"learning_rate": 9.880849612217238e-05, |
|
"loss": 1.7505, |
|
"step": 245 |
|
}, |
|
{ |
|
"epoch": 0.017318992474485415, |
|
"grad_norm": 17.89532470703125, |
|
"learning_rate": 9.862527844207189e-05, |
|
"loss": 0.8774, |
|
"step": 252 |
|
}, |
|
{ |
|
"epoch": 0.017800075598776673, |
|
"grad_norm": 7.980606555938721, |
|
"learning_rate": 9.842915805643155e-05, |
|
"loss": 1.7281, |
|
"step": 259 |
|
}, |
|
{ |
|
"epoch": 0.018281158723067934, |
|
"grad_norm": 17.765323638916016, |
|
"learning_rate": 9.822018700574695e-05, |
|
"loss": 1.2403, |
|
"step": 266 |
|
}, |
|
{ |
|
"epoch": 0.018762241847359196, |
|
"grad_norm": 18.817676544189453, |
|
"learning_rate": 9.799842074043439e-05, |
|
"loss": 1.6112, |
|
"step": 273 |
|
}, |
|
{ |
|
"epoch": 0.019243324971650457, |
|
"grad_norm": 15.211094856262207, |
|
"learning_rate": 9.776391810611718e-05, |
|
"loss": 0.784, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 0.01972440809594172, |
|
"grad_norm": 13.791271209716797, |
|
"learning_rate": 9.751674132801107e-05, |
|
"loss": 1.5379, |
|
"step": 287 |
|
}, |
|
{ |
|
"epoch": 0.02020549122023298, |
|
"grad_norm": 12.567949295043945, |
|
"learning_rate": 9.725695599441258e-05, |
|
"loss": 1.7221, |
|
"step": 294 |
|
}, |
|
{ |
|
"epoch": 0.02068657434452424, |
|
"grad_norm": 119.533447265625, |
|
"learning_rate": 9.698463103929542e-05, |
|
"loss": 2.089, |
|
"step": 301 |
|
}, |
|
{ |
|
"epoch": 0.021167657468815503, |
|
"grad_norm": 11.758546829223633, |
|
"learning_rate": 9.669983872401867e-05, |
|
"loss": 1.3068, |
|
"step": 308 |
|
}, |
|
{ |
|
"epoch": 0.021648740593106765, |
|
"grad_norm": 14.528461456298828, |
|
"learning_rate": 9.640265461815234e-05, |
|
"loss": 1.6841, |
|
"step": 315 |
|
}, |
|
{ |
|
"epoch": 0.022129823717398026, |
|
"grad_norm": 12.57625675201416, |
|
"learning_rate": 9.609315757942503e-05, |
|
"loss": 1.2772, |
|
"step": 322 |
|
}, |
|
{ |
|
"epoch": 0.022610906841689288, |
|
"grad_norm": 20.562517166137695, |
|
"learning_rate": 9.577142973279896e-05, |
|
"loss": 1.1584, |
|
"step": 329 |
|
}, |
|
{ |
|
"epoch": 0.02309198996598055, |
|
"grad_norm": 15.014174461364746, |
|
"learning_rate": 9.543755644867822e-05, |
|
"loss": 1.9943, |
|
"step": 336 |
|
}, |
|
{ |
|
"epoch": 0.02357307309027181, |
|
"grad_norm": 3.206474542617798, |
|
"learning_rate": 9.50916263202557e-05, |
|
"loss": 1.1005, |
|
"step": 343 |
|
}, |
|
{ |
|
"epoch": 0.024054156214563072, |
|
"grad_norm": 20.237760543823242, |
|
"learning_rate": 9.473373114000492e-05, |
|
"loss": 1.1736, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 0.024535239338854334, |
|
"grad_norm": 23.357084274291992, |
|
"learning_rate": 9.436396587532296e-05, |
|
"loss": 1.6269, |
|
"step": 357 |
|
}, |
|
{ |
|
"epoch": 0.025016322463145595, |
|
"grad_norm": 25.333106994628906, |
|
"learning_rate": 9.398242864333083e-05, |
|
"loss": 1.1378, |
|
"step": 364 |
|
}, |
|
{ |
|
"epoch": 0.025497405587436857, |
|
"grad_norm": 28.834636688232422, |
|
"learning_rate": 9.358922068483812e-05, |
|
"loss": 0.9558, |
|
"step": 371 |
|
}, |
|
{ |
|
"epoch": 0.02597848871172812, |
|
"grad_norm": 10.581332206726074, |
|
"learning_rate": 9.318444633747883e-05, |
|
"loss": 1.4568, |
|
"step": 378 |
|
}, |
|
{ |
|
"epoch": 0.02645957183601938, |
|
"grad_norm": 22.025737762451172, |
|
"learning_rate": 9.276821300802534e-05, |
|
"loss": 1.2215, |
|
"step": 385 |
|
}, |
|
{ |
|
"epoch": 0.02694065496031064, |
|
"grad_norm": 23.992765426635742, |
|
"learning_rate": 9.234063114388809e-05, |
|
"loss": 2.0521, |
|
"step": 392 |
|
}, |
|
{ |
|
"epoch": 0.027421738084601903, |
|
"grad_norm": 18.102312088012695, |
|
"learning_rate": 9.190181420380836e-05, |
|
"loss": 1.1839, |
|
"step": 399 |
|
}, |
|
{ |
|
"epoch": 0.027902821208893164, |
|
"grad_norm": 21.294204711914062, |
|
"learning_rate": 9.145187862775209e-05, |
|
"loss": 1.0416, |
|
"step": 406 |
|
}, |
|
{ |
|
"epoch": 0.028383904333184426, |
|
"grad_norm": 11.440023422241211, |
|
"learning_rate": 9.099094380601244e-05, |
|
"loss": 0.9043, |
|
"step": 413 |
|
}, |
|
{ |
|
"epoch": 0.028864987457475687, |
|
"grad_norm": 18.921781539916992, |
|
"learning_rate": 9.051913204752972e-05, |
|
"loss": 1.373, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 0.02934607058176695, |
|
"grad_norm": 27.478322982788086, |
|
"learning_rate": 9.003656854743667e-05, |
|
"loss": 1.8896, |
|
"step": 427 |
|
}, |
|
{ |
|
"epoch": 0.02982715370605821, |
|
"grad_norm": 24.056333541870117, |
|
"learning_rate": 8.954338135383804e-05, |
|
"loss": 1.8207, |
|
"step": 434 |
|
}, |
|
{ |
|
"epoch": 0.030308236830349472, |
|
"grad_norm": 29.121618270874023, |
|
"learning_rate": 8.903970133383297e-05, |
|
"loss": 2.3176, |
|
"step": 441 |
|
}, |
|
{ |
|
"epoch": 0.030789319954640734, |
|
"grad_norm": 24.770221710205078, |
|
"learning_rate": 8.852566213878947e-05, |
|
"loss": 1.4976, |
|
"step": 448 |
|
}, |
|
{ |
|
"epoch": 0.031270403078931995, |
|
"grad_norm": 12.233966827392578, |
|
"learning_rate": 8.800140016888009e-05, |
|
"loss": 1.1954, |
|
"step": 455 |
|
}, |
|
{ |
|
"epoch": 0.03175148620322326, |
|
"grad_norm": 28.35350799560547, |
|
"learning_rate": 8.746705453688814e-05, |
|
"loss": 1.8075, |
|
"step": 462 |
|
}, |
|
{ |
|
"epoch": 0.03223256932751452, |
|
"grad_norm": 15.999966621398926, |
|
"learning_rate": 8.692276703129421e-05, |
|
"loss": 1.3143, |
|
"step": 469 |
|
}, |
|
{ |
|
"epoch": 0.03271365245180578, |
|
"grad_norm": 10.898785591125488, |
|
"learning_rate": 8.636868207865244e-05, |
|
"loss": 1.2549, |
|
"step": 476 |
|
}, |
|
{ |
|
"epoch": 0.03319473557609704, |
|
"grad_norm": 26.46807861328125, |
|
"learning_rate": 8.580494670526725e-05, |
|
"loss": 1.5415, |
|
"step": 483 |
|
}, |
|
{ |
|
"epoch": 0.033675818700388306, |
|
"grad_norm": 33.8633918762207, |
|
"learning_rate": 8.523171049817974e-05, |
|
"loss": 2.2099, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 0.034156901824679564, |
|
"grad_norm": 22.85337257385254, |
|
"learning_rate": 8.464912556547486e-05, |
|
"loss": 1.7322, |
|
"step": 497 |
|
}, |
|
{ |
|
"epoch": 0.03463798494897083, |
|
"grad_norm": 39.19854736328125, |
|
"learning_rate": 8.405734649591963e-05, |
|
"loss": 1.7439, |
|
"step": 504 |
|
}, |
|
{ |
|
"epoch": 0.03511906807326209, |
|
"grad_norm": 24.207155227661133, |
|
"learning_rate": 8.345653031794292e-05, |
|
"loss": 2.3688, |
|
"step": 511 |
|
}, |
|
{ |
|
"epoch": 0.035600151197553345, |
|
"grad_norm": 13.661985397338867, |
|
"learning_rate": 8.284683645796813e-05, |
|
"loss": 1.2163, |
|
"step": 518 |
|
}, |
|
{ |
|
"epoch": 0.03608123432184461, |
|
"grad_norm": 13.821810722351074, |
|
"learning_rate": 8.222842669810935e-05, |
|
"loss": 2.1381, |
|
"step": 525 |
|
}, |
|
{ |
|
"epoch": 0.03656231744613587, |
|
"grad_norm": 17.461071014404297, |
|
"learning_rate": 8.160146513324254e-05, |
|
"loss": 1.834, |
|
"step": 532 |
|
}, |
|
{ |
|
"epoch": 0.03704340057042713, |
|
"grad_norm": 27.663408279418945, |
|
"learning_rate": 8.096611812746301e-05, |
|
"loss": 1.7051, |
|
"step": 539 |
|
}, |
|
{ |
|
"epoch": 0.03752448369471839, |
|
"grad_norm": 20.866432189941406, |
|
"learning_rate": 8.032255426994069e-05, |
|
"loss": 1.5578, |
|
"step": 546 |
|
}, |
|
{ |
|
"epoch": 0.038005566819009656, |
|
"grad_norm": 18.067535400390625, |
|
"learning_rate": 7.967094433018508e-05, |
|
"loss": 1.9054, |
|
"step": 553 |
|
}, |
|
{ |
|
"epoch": 0.038486649943300914, |
|
"grad_norm": 19.965730667114258, |
|
"learning_rate": 7.901146121273164e-05, |
|
"loss": 1.2985, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 0.03896773306759218, |
|
"grad_norm": 13.658997535705566, |
|
"learning_rate": 7.834427991126155e-05, |
|
"loss": 1.419, |
|
"step": 567 |
|
}, |
|
{ |
|
"epoch": 0.03944881619188344, |
|
"grad_norm": 53.16463088989258, |
|
"learning_rate": 7.766957746216721e-05, |
|
"loss": 1.7695, |
|
"step": 574 |
|
}, |
|
{ |
|
"epoch": 0.0399298993161747, |
|
"grad_norm": 17.96440887451172, |
|
"learning_rate": 7.698753289757565e-05, |
|
"loss": 2.298, |
|
"step": 581 |
|
}, |
|
{ |
|
"epoch": 0.04041098244046596, |
|
"grad_norm": 34.66753387451172, |
|
"learning_rate": 7.629832719784245e-05, |
|
"loss": 1.3731, |
|
"step": 588 |
|
}, |
|
{ |
|
"epoch": 0.040892065564757225, |
|
"grad_norm": 15.050975799560547, |
|
"learning_rate": 7.560214324352858e-05, |
|
"loss": 1.7391, |
|
"step": 595 |
|
} |
|
], |
|
"logging_steps": 7, |
|
"max_steps": 1500, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 1, |
|
"save_steps": 300, |
|
"stateful_callbacks": { |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": false |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 1.259314668896256e+17, |
|
"train_batch_size": 2, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|