|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 6.897196261682243, |
|
"eval_steps": 500, |
|
"global_step": 420, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.08307372793354102, |
|
"grad_norm": 0.5125107169151306, |
|
"learning_rate": 2.0833333333333334e-06, |
|
"loss": 3.5515, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.16614745586708204, |
|
"grad_norm": 0.4532736539840698, |
|
"learning_rate": 4.166666666666667e-06, |
|
"loss": 3.6807, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.24922118380062305, |
|
"grad_norm": 0.4483909606933594, |
|
"learning_rate": 6.25e-06, |
|
"loss": 3.5357, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.3322949117341641, |
|
"grad_norm": 0.5020395517349243, |
|
"learning_rate": 8.333333333333334e-06, |
|
"loss": 3.645, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.4153686396677051, |
|
"grad_norm": 0.8230155110359192, |
|
"learning_rate": 1.0416666666666668e-05, |
|
"loss": 3.5682, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.4984423676012461, |
|
"grad_norm": 1.3333394527435303, |
|
"learning_rate": 1.25e-05, |
|
"loss": 3.5155, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.5815160955347871, |
|
"grad_norm": 0.7118450999259949, |
|
"learning_rate": 1.4583333333333335e-05, |
|
"loss": 3.4724, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.6645898234683282, |
|
"grad_norm": 1.0282073020935059, |
|
"learning_rate": 1.6666666666666667e-05, |
|
"loss": 3.4205, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.7476635514018691, |
|
"grad_norm": 1.0468826293945312, |
|
"learning_rate": 1.8750000000000002e-05, |
|
"loss": 3.3295, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.8307372793354102, |
|
"grad_norm": 0.8730693459510803, |
|
"learning_rate": 2.0833333333333336e-05, |
|
"loss": 3.0831, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.9138110072689511, |
|
"grad_norm": 0.9377574324607849, |
|
"learning_rate": 2.2916666666666667e-05, |
|
"loss": 2.8412, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.9968847352024922, |
|
"grad_norm": 1.0129190683364868, |
|
"learning_rate": 2.5e-05, |
|
"loss": 2.3229, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 1.066458982346833, |
|
"grad_norm": 0.8808080554008484, |
|
"learning_rate": 2.7083333333333332e-05, |
|
"loss": 2.0489, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 1.1495327102803738, |
|
"grad_norm": 0.8092290759086609, |
|
"learning_rate": 2.916666666666667e-05, |
|
"loss": 1.9307, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 1.2326064382139148, |
|
"grad_norm": 0.8002105951309204, |
|
"learning_rate": 3.125e-05, |
|
"loss": 1.7402, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 1.3156801661474558, |
|
"grad_norm": 0.8581973314285278, |
|
"learning_rate": 3.3333333333333335e-05, |
|
"loss": 1.6696, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 1.398753894080997, |
|
"grad_norm": 0.7443532347679138, |
|
"learning_rate": 3.541666666666667e-05, |
|
"loss": 1.5175, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 1.4818276220145379, |
|
"grad_norm": 0.9584633708000183, |
|
"learning_rate": 3.7500000000000003e-05, |
|
"loss": 1.3968, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 1.5649013499480788, |
|
"grad_norm": 0.8683136105537415, |
|
"learning_rate": 3.958333333333333e-05, |
|
"loss": 1.299, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 1.64797507788162, |
|
"grad_norm": 0.7825261950492859, |
|
"learning_rate": 4.166666666666667e-05, |
|
"loss": 1.1271, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 1.731048805815161, |
|
"grad_norm": 0.654670000076294, |
|
"learning_rate": 4.375e-05, |
|
"loss": 1.14, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 1.814122533748702, |
|
"grad_norm": 0.7511588335037231, |
|
"learning_rate": 4.5833333333333334e-05, |
|
"loss": 1.1614, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 1.897196261682243, |
|
"grad_norm": 0.6596113443374634, |
|
"learning_rate": 4.791666666666667e-05, |
|
"loss": 1.0587, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 1.980269989615784, |
|
"grad_norm": 0.7166474461555481, |
|
"learning_rate": 5e-05, |
|
"loss": 1.0169, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 2.0498442367601246, |
|
"grad_norm": 0.6620935797691345, |
|
"learning_rate": 4.999735579817769e-05, |
|
"loss": 0.985, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 2.132917964693666, |
|
"grad_norm": 0.7846258878707886, |
|
"learning_rate": 4.998942375205502e-05, |
|
"loss": 0.9594, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 2.2159916926272065, |
|
"grad_norm": 0.652454137802124, |
|
"learning_rate": 4.997620553954645e-05, |
|
"loss": 0.9322, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 2.2990654205607477, |
|
"grad_norm": 0.7020059823989868, |
|
"learning_rate": 4.995770395678171e-05, |
|
"loss": 0.9242, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 2.382139148494289, |
|
"grad_norm": 0.8148695826530457, |
|
"learning_rate": 4.993392291751431e-05, |
|
"loss": 0.8394, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 2.4652128764278296, |
|
"grad_norm": 0.8813133835792542, |
|
"learning_rate": 4.990486745229364e-05, |
|
"loss": 0.992, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 2.5482866043613708, |
|
"grad_norm": 0.7893730401992798, |
|
"learning_rate": 4.987054370740083e-05, |
|
"loss": 0.899, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 2.6313603322949115, |
|
"grad_norm": 0.7719221711158752, |
|
"learning_rate": 4.983095894354858e-05, |
|
"loss": 0.9416, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 2.7144340602284527, |
|
"grad_norm": 0.8439667820930481, |
|
"learning_rate": 4.9786121534345265e-05, |
|
"loss": 0.8401, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 2.797507788161994, |
|
"grad_norm": 0.833251953125, |
|
"learning_rate": 4.973604096452361e-05, |
|
"loss": 0.817, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 2.8805815160955346, |
|
"grad_norm": 0.7526916265487671, |
|
"learning_rate": 4.9680727827934354e-05, |
|
"loss": 0.8191, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 2.9636552440290758, |
|
"grad_norm": 1.0640058517456055, |
|
"learning_rate": 4.962019382530521e-05, |
|
"loss": 0.8607, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 3.0332294911734166, |
|
"grad_norm": 0.784114420413971, |
|
"learning_rate": 4.9554451761765766e-05, |
|
"loss": 0.761, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 3.1163032191069573, |
|
"grad_norm": 0.9396352767944336, |
|
"learning_rate": 4.948351554413879e-05, |
|
"loss": 0.6821, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 3.1993769470404985, |
|
"grad_norm": 0.9770582914352417, |
|
"learning_rate": 4.940740017799833e-05, |
|
"loss": 0.742, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 3.2824506749740396, |
|
"grad_norm": 0.8752853870391846, |
|
"learning_rate": 4.9326121764495596e-05, |
|
"loss": 0.69, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 3.3655244029075804, |
|
"grad_norm": 0.8496589064598083, |
|
"learning_rate": 4.92396974969529e-05, |
|
"loss": 0.769, |
|
"step": 205 |
|
}, |
|
{ |
|
"epoch": 3.4485981308411215, |
|
"grad_norm": 0.8192639350891113, |
|
"learning_rate": 4.914814565722671e-05, |
|
"loss": 0.7298, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 3.5316718587746623, |
|
"grad_norm": 1.00070059299469, |
|
"learning_rate": 4.905148561184033e-05, |
|
"loss": 0.7815, |
|
"step": 215 |
|
}, |
|
{ |
|
"epoch": 3.6147455867082035, |
|
"grad_norm": 1.1614112854003906, |
|
"learning_rate": 4.894973780788722e-05, |
|
"loss": 0.7975, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 3.6978193146417446, |
|
"grad_norm": 1.014397382736206, |
|
"learning_rate": 4.884292376870567e-05, |
|
"loss": 0.696, |
|
"step": 225 |
|
}, |
|
{ |
|
"epoch": 3.7808930425752854, |
|
"grad_norm": 0.9468530416488647, |
|
"learning_rate": 4.873106608932585e-05, |
|
"loss": 0.7194, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 3.8639667705088265, |
|
"grad_norm": 1.2201341390609741, |
|
"learning_rate": 4.8614188431690125e-05, |
|
"loss": 0.7795, |
|
"step": 235 |
|
}, |
|
{ |
|
"epoch": 3.9470404984423677, |
|
"grad_norm": 0.9476346373558044, |
|
"learning_rate": 4.849231551964771e-05, |
|
"loss": 0.7202, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 4.0166147455867085, |
|
"grad_norm": 0.9945451021194458, |
|
"learning_rate": 4.836547313372471e-05, |
|
"loss": 0.6931, |
|
"step": 245 |
|
}, |
|
{ |
|
"epoch": 4.099688473520249, |
|
"grad_norm": 1.1113696098327637, |
|
"learning_rate": 4.823368810567056e-05, |
|
"loss": 0.6557, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 4.18276220145379, |
|
"grad_norm": 1.3515146970748901, |
|
"learning_rate": 4.8096988312782174e-05, |
|
"loss": 0.6295, |
|
"step": 255 |
|
}, |
|
{ |
|
"epoch": 4.265835929387332, |
|
"grad_norm": 1.375272274017334, |
|
"learning_rate": 4.7955402672006854e-05, |
|
"loss": 0.6423, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 4.348909657320872, |
|
"grad_norm": 1.1683202981948853, |
|
"learning_rate": 4.780896113382536e-05, |
|
"loss": 0.6167, |
|
"step": 265 |
|
}, |
|
{ |
|
"epoch": 4.431983385254413, |
|
"grad_norm": 1.2091785669326782, |
|
"learning_rate": 4.765769467591625e-05, |
|
"loss": 0.6373, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 4.515057113187955, |
|
"grad_norm": 1.3007235527038574, |
|
"learning_rate": 4.750163529660303e-05, |
|
"loss": 0.5941, |
|
"step": 275 |
|
}, |
|
{ |
|
"epoch": 4.598130841121495, |
|
"grad_norm": 1.2785530090332031, |
|
"learning_rate": 4.734081600808531e-05, |
|
"loss": 0.6384, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 4.681204569055036, |
|
"grad_norm": 1.2808083295822144, |
|
"learning_rate": 4.717527082945554e-05, |
|
"loss": 0.6592, |
|
"step": 285 |
|
}, |
|
{ |
|
"epoch": 4.764278296988578, |
|
"grad_norm": 1.1978651285171509, |
|
"learning_rate": 4.700503477950278e-05, |
|
"loss": 0.6285, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 4.8473520249221185, |
|
"grad_norm": 1.3983532190322876, |
|
"learning_rate": 4.68301438693049e-05, |
|
"loss": 0.5941, |
|
"step": 295 |
|
}, |
|
{ |
|
"epoch": 4.930425752855659, |
|
"grad_norm": 1.286287784576416, |
|
"learning_rate": 4.665063509461097e-05, |
|
"loss": 0.5652, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 5.0, |
|
"grad_norm": 3.1714959144592285, |
|
"learning_rate": 4.6466546428015336e-05, |
|
"loss": 0.5657, |
|
"step": 305 |
|
}, |
|
{ |
|
"epoch": 5.083073727933541, |
|
"grad_norm": 1.0886554718017578, |
|
"learning_rate": 4.627791681092499e-05, |
|
"loss": 0.4776, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 5.166147455867082, |
|
"grad_norm": 1.3735958337783813, |
|
"learning_rate": 4.608478614532215e-05, |
|
"loss": 0.5114, |
|
"step": 315 |
|
}, |
|
{ |
|
"epoch": 5.249221183800623, |
|
"grad_norm": 1.5809016227722168, |
|
"learning_rate": 4.588719528532342e-05, |
|
"loss": 0.5537, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 5.332294911734164, |
|
"grad_norm": 1.401424765586853, |
|
"learning_rate": 4.568518602853776e-05, |
|
"loss": 0.5299, |
|
"step": 325 |
|
}, |
|
{ |
|
"epoch": 5.415368639667705, |
|
"grad_norm": 1.467054009437561, |
|
"learning_rate": 4.54788011072248e-05, |
|
"loss": 0.5236, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 5.498442367601246, |
|
"grad_norm": 1.253794550895691, |
|
"learning_rate": 4.526808417925531e-05, |
|
"loss": 0.5632, |
|
"step": 335 |
|
}, |
|
{ |
|
"epoch": 5.581516095534787, |
|
"grad_norm": 1.4579623937606812, |
|
"learning_rate": 4.50530798188761e-05, |
|
"loss": 0.5556, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 5.6645898234683285, |
|
"grad_norm": 1.171007513999939, |
|
"learning_rate": 4.4833833507280884e-05, |
|
"loss": 0.4934, |
|
"step": 345 |
|
}, |
|
{ |
|
"epoch": 5.747663551401869, |
|
"grad_norm": 1.4656859636306763, |
|
"learning_rate": 4.4610391622989396e-05, |
|
"loss": 0.5697, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 5.83073727933541, |
|
"grad_norm": 1.4671518802642822, |
|
"learning_rate": 4.438280143203665e-05, |
|
"loss": 0.5289, |
|
"step": 355 |
|
}, |
|
{ |
|
"epoch": 5.913811007268951, |
|
"grad_norm": 1.4010504484176636, |
|
"learning_rate": 4.415111107797445e-05, |
|
"loss": 0.5205, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 5.996884735202492, |
|
"grad_norm": 1.6909047365188599, |
|
"learning_rate": 4.391536957168733e-05, |
|
"loss": 0.5469, |
|
"step": 365 |
|
}, |
|
{ |
|
"epoch": 6.066458982346833, |
|
"grad_norm": 1.3057804107666016, |
|
"learning_rate": 4.36756267810249e-05, |
|
"loss": 0.5087, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 6.149532710280374, |
|
"grad_norm": 1.7389354705810547, |
|
"learning_rate": 4.34319334202531e-05, |
|
"loss": 0.446, |
|
"step": 375 |
|
}, |
|
{ |
|
"epoch": 6.232606438213915, |
|
"grad_norm": 1.6178336143493652, |
|
"learning_rate": 4.318434103932622e-05, |
|
"loss": 0.4888, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 6.315680166147456, |
|
"grad_norm": 1.6565852165222168, |
|
"learning_rate": 4.293290201298223e-05, |
|
"loss": 0.4784, |
|
"step": 385 |
|
}, |
|
{ |
|
"epoch": 6.398753894080997, |
|
"grad_norm": 1.4080264568328857, |
|
"learning_rate": 4.267766952966369e-05, |
|
"loss": 0.4155, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 6.481827622014538, |
|
"grad_norm": 1.4906692504882812, |
|
"learning_rate": 4.241869758026638e-05, |
|
"loss": 0.4854, |
|
"step": 395 |
|
}, |
|
{ |
|
"epoch": 6.564901349948079, |
|
"grad_norm": 1.6544594764709473, |
|
"learning_rate": 4.215604094671835e-05, |
|
"loss": 0.4753, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 6.64797507788162, |
|
"grad_norm": 1.3182097673416138, |
|
"learning_rate": 4.188975519039151e-05, |
|
"loss": 0.4352, |
|
"step": 405 |
|
}, |
|
{ |
|
"epoch": 6.731048805815161, |
|
"grad_norm": 1.531633734703064, |
|
"learning_rate": 4.1619896640348445e-05, |
|
"loss": 0.4786, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 6.814122533748702, |
|
"grad_norm": 1.358223557472229, |
|
"learning_rate": 4.1346522381426744e-05, |
|
"loss": 0.4108, |
|
"step": 415 |
|
}, |
|
{ |
|
"epoch": 6.897196261682243, |
|
"grad_norm": 1.6483898162841797, |
|
"learning_rate": 4.1069690242163484e-05, |
|
"loss": 0.4828, |
|
"step": 420 |
|
} |
|
], |
|
"logging_steps": 5, |
|
"max_steps": 1200, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 20, |
|
"save_steps": 60, |
|
"stateful_callbacks": { |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": false |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 1.8136776925249536e+16, |
|
"train_batch_size": 1, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|