|
{ |
|
"best_global_step": null, |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 0.020617848183911207, |
|
"eval_steps": 500, |
|
"global_step": 300, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.00048108312429126146, |
|
"grad_norm": 8.154312133789062, |
|
"learning_rate": 4.000000000000001e-06, |
|
"loss": 1.8456, |
|
"step": 7 |
|
}, |
|
{ |
|
"epoch": 0.0009621662485825229, |
|
"grad_norm": 2.8053348064422607, |
|
"learning_rate": 8.666666666666668e-06, |
|
"loss": 1.0127, |
|
"step": 14 |
|
}, |
|
{ |
|
"epoch": 0.0014432493728737843, |
|
"grad_norm": 5.31929349899292, |
|
"learning_rate": 1.3333333333333333e-05, |
|
"loss": 0.791, |
|
"step": 21 |
|
}, |
|
{ |
|
"epoch": 0.0019243324971650458, |
|
"grad_norm": 5.091348171234131, |
|
"learning_rate": 1.8e-05, |
|
"loss": 0.8497, |
|
"step": 28 |
|
}, |
|
{ |
|
"epoch": 0.002405415621456307, |
|
"grad_norm": 3.4551076889038086, |
|
"learning_rate": 2.2666666666666668e-05, |
|
"loss": 0.5847, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.0028864987457475687, |
|
"grad_norm": 2.0914766788482666, |
|
"learning_rate": 2.733333333333333e-05, |
|
"loss": 0.6838, |
|
"step": 42 |
|
}, |
|
{ |
|
"epoch": 0.00336758187003883, |
|
"grad_norm": 4.122292518615723, |
|
"learning_rate": 3.2000000000000005e-05, |
|
"loss": 0.5328, |
|
"step": 49 |
|
}, |
|
{ |
|
"epoch": 0.0038486649943300917, |
|
"grad_norm": 4.441967964172363, |
|
"learning_rate": 3.6666666666666666e-05, |
|
"loss": 0.7609, |
|
"step": 56 |
|
}, |
|
{ |
|
"epoch": 0.004329748118621354, |
|
"grad_norm": 2.408400535583496, |
|
"learning_rate": 4.133333333333333e-05, |
|
"loss": 0.7353, |
|
"step": 63 |
|
}, |
|
{ |
|
"epoch": 0.004810831242912614, |
|
"grad_norm": 4.386236190795898, |
|
"learning_rate": 4.600000000000001e-05, |
|
"loss": 0.5822, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.005291914367203876, |
|
"grad_norm": 2.6810431480407715, |
|
"learning_rate": 5.0666666666666674e-05, |
|
"loss": 0.5084, |
|
"step": 77 |
|
}, |
|
{ |
|
"epoch": 0.005772997491495137, |
|
"grad_norm": 10.199173927307129, |
|
"learning_rate": 5.5333333333333334e-05, |
|
"loss": 1.0656, |
|
"step": 84 |
|
}, |
|
{ |
|
"epoch": 0.006254080615786399, |
|
"grad_norm": 2.62958025932312, |
|
"learning_rate": 6e-05, |
|
"loss": 0.9499, |
|
"step": 91 |
|
}, |
|
{ |
|
"epoch": 0.00673516374007766, |
|
"grad_norm": 3.928619861602783, |
|
"learning_rate": 6.466666666666666e-05, |
|
"loss": 0.7763, |
|
"step": 98 |
|
}, |
|
{ |
|
"epoch": 0.007216246864368922, |
|
"grad_norm": 7.499195098876953, |
|
"learning_rate": 6.933333333333334e-05, |
|
"loss": 1.0431, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 0.007697329988660183, |
|
"grad_norm": 1.4210686683654785, |
|
"learning_rate": 7.4e-05, |
|
"loss": 0.5441, |
|
"step": 112 |
|
}, |
|
{ |
|
"epoch": 0.008178413112951446, |
|
"grad_norm": 3.6974828243255615, |
|
"learning_rate": 7.866666666666666e-05, |
|
"loss": 0.6841, |
|
"step": 119 |
|
}, |
|
{ |
|
"epoch": 0.008659496237242707, |
|
"grad_norm": 7.652034282684326, |
|
"learning_rate": 8.333333333333334e-05, |
|
"loss": 0.8487, |
|
"step": 126 |
|
}, |
|
{ |
|
"epoch": 0.009140579361533967, |
|
"grad_norm": 11.174522399902344, |
|
"learning_rate": 8.800000000000001e-05, |
|
"loss": 0.9727, |
|
"step": 133 |
|
}, |
|
{ |
|
"epoch": 0.009621662485825229, |
|
"grad_norm": 11.040456771850586, |
|
"learning_rate": 9.266666666666666e-05, |
|
"loss": 1.122, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.01010274561011649, |
|
"grad_norm": 8.87497329711914, |
|
"learning_rate": 9.733333333333335e-05, |
|
"loss": 1.4102, |
|
"step": 147 |
|
}, |
|
{ |
|
"epoch": 0.010583828734407752, |
|
"grad_norm": 9.45786190032959, |
|
"learning_rate": 9.999878153526974e-05, |
|
"loss": 1.2933, |
|
"step": 154 |
|
}, |
|
{ |
|
"epoch": 0.011064911858699013, |
|
"grad_norm": 8.866625785827637, |
|
"learning_rate": 9.998646205897309e-05, |
|
"loss": 1.2747, |
|
"step": 161 |
|
}, |
|
{ |
|
"epoch": 0.011545994982990275, |
|
"grad_norm": 6.202242374420166, |
|
"learning_rate": 9.996087868740243e-05, |
|
"loss": 1.7031, |
|
"step": 168 |
|
}, |
|
{ |
|
"epoch": 0.012027078107281536, |
|
"grad_norm": 10.355546951293945, |
|
"learning_rate": 9.992203820909906e-05, |
|
"loss": 1.8137, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 0.012508161231572798, |
|
"grad_norm": 8.716798782348633, |
|
"learning_rate": 9.986995093037421e-05, |
|
"loss": 1.2634, |
|
"step": 182 |
|
}, |
|
{ |
|
"epoch": 0.01298924435586406, |
|
"grad_norm": 11.727725982666016, |
|
"learning_rate": 9.980463067257437e-05, |
|
"loss": 0.8589, |
|
"step": 189 |
|
}, |
|
{ |
|
"epoch": 0.01347032748015532, |
|
"grad_norm": 7.918148517608643, |
|
"learning_rate": 9.972609476841367e-05, |
|
"loss": 1.4014, |
|
"step": 196 |
|
}, |
|
{ |
|
"epoch": 0.013951410604446582, |
|
"grad_norm": 28.750410079956055, |
|
"learning_rate": 9.963436405737476e-05, |
|
"loss": 1.2997, |
|
"step": 203 |
|
}, |
|
{ |
|
"epoch": 0.014432493728737844, |
|
"grad_norm": 15.347841262817383, |
|
"learning_rate": 9.952946288017899e-05, |
|
"loss": 1.766, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.014913576853029105, |
|
"grad_norm": 7.8105950355529785, |
|
"learning_rate": 9.941141907232765e-05, |
|
"loss": 1.4616, |
|
"step": 217 |
|
}, |
|
{ |
|
"epoch": 0.015394659977320367, |
|
"grad_norm": 16.68589973449707, |
|
"learning_rate": 9.928026395671576e-05, |
|
"loss": 1.0106, |
|
"step": 224 |
|
}, |
|
{ |
|
"epoch": 0.01587574310161163, |
|
"grad_norm": 14.563543319702148, |
|
"learning_rate": 9.913603233532067e-05, |
|
"loss": 1.2966, |
|
"step": 231 |
|
}, |
|
{ |
|
"epoch": 0.01635682622590289, |
|
"grad_norm": 11.76324462890625, |
|
"learning_rate": 9.89787624799672e-05, |
|
"loss": 1.4801, |
|
"step": 238 |
|
}, |
|
{ |
|
"epoch": 0.016837909350194153, |
|
"grad_norm": 10.999220848083496, |
|
"learning_rate": 9.880849612217238e-05, |
|
"loss": 1.7505, |
|
"step": 245 |
|
}, |
|
{ |
|
"epoch": 0.017318992474485415, |
|
"grad_norm": 17.89532470703125, |
|
"learning_rate": 9.862527844207189e-05, |
|
"loss": 0.8774, |
|
"step": 252 |
|
}, |
|
{ |
|
"epoch": 0.017800075598776673, |
|
"grad_norm": 7.980606555938721, |
|
"learning_rate": 9.842915805643155e-05, |
|
"loss": 1.7281, |
|
"step": 259 |
|
}, |
|
{ |
|
"epoch": 0.018281158723067934, |
|
"grad_norm": 17.765323638916016, |
|
"learning_rate": 9.822018700574695e-05, |
|
"loss": 1.2403, |
|
"step": 266 |
|
}, |
|
{ |
|
"epoch": 0.018762241847359196, |
|
"grad_norm": 18.817676544189453, |
|
"learning_rate": 9.799842074043439e-05, |
|
"loss": 1.6112, |
|
"step": 273 |
|
}, |
|
{ |
|
"epoch": 0.019243324971650457, |
|
"grad_norm": 15.211094856262207, |
|
"learning_rate": 9.776391810611718e-05, |
|
"loss": 0.784, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 0.01972440809594172, |
|
"grad_norm": 13.791271209716797, |
|
"learning_rate": 9.751674132801107e-05, |
|
"loss": 1.5379, |
|
"step": 287 |
|
}, |
|
{ |
|
"epoch": 0.02020549122023298, |
|
"grad_norm": 12.567949295043945, |
|
"learning_rate": 9.725695599441258e-05, |
|
"loss": 1.7221, |
|
"step": 294 |
|
} |
|
], |
|
"logging_steps": 7, |
|
"max_steps": 1500, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 1, |
|
"save_steps": 300, |
|
"stateful_callbacks": { |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": false |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 6.29657334448128e+16, |
|
"train_batch_size": 2, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|