|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 19.681204569055037, |
|
"eval_steps": 500, |
|
"global_step": 1200, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.08307372793354102, |
|
"grad_norm": 0.5125107169151306, |
|
"learning_rate": 2.0833333333333334e-06, |
|
"loss": 3.5515, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.16614745586708204, |
|
"grad_norm": 0.4532736539840698, |
|
"learning_rate": 4.166666666666667e-06, |
|
"loss": 3.6807, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.24922118380062305, |
|
"grad_norm": 0.4483909606933594, |
|
"learning_rate": 6.25e-06, |
|
"loss": 3.5357, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.3322949117341641, |
|
"grad_norm": 0.5020395517349243, |
|
"learning_rate": 8.333333333333334e-06, |
|
"loss": 3.645, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.4153686396677051, |
|
"grad_norm": 0.8230155110359192, |
|
"learning_rate": 1.0416666666666668e-05, |
|
"loss": 3.5682, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.4984423676012461, |
|
"grad_norm": 1.3333394527435303, |
|
"learning_rate": 1.25e-05, |
|
"loss": 3.5155, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.5815160955347871, |
|
"grad_norm": 0.7118450999259949, |
|
"learning_rate": 1.4583333333333335e-05, |
|
"loss": 3.4724, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.6645898234683282, |
|
"grad_norm": 1.0282073020935059, |
|
"learning_rate": 1.6666666666666667e-05, |
|
"loss": 3.4205, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.7476635514018691, |
|
"grad_norm": 1.0468826293945312, |
|
"learning_rate": 1.8750000000000002e-05, |
|
"loss": 3.3295, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.8307372793354102, |
|
"grad_norm": 0.8730693459510803, |
|
"learning_rate": 2.0833333333333336e-05, |
|
"loss": 3.0831, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.9138110072689511, |
|
"grad_norm": 0.9377574324607849, |
|
"learning_rate": 2.2916666666666667e-05, |
|
"loss": 2.8412, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.9968847352024922, |
|
"grad_norm": 1.0129190683364868, |
|
"learning_rate": 2.5e-05, |
|
"loss": 2.3229, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 1.066458982346833, |
|
"grad_norm": 0.8808080554008484, |
|
"learning_rate": 2.7083333333333332e-05, |
|
"loss": 2.0489, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 1.1495327102803738, |
|
"grad_norm": 0.8092290759086609, |
|
"learning_rate": 2.916666666666667e-05, |
|
"loss": 1.9307, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 1.2326064382139148, |
|
"grad_norm": 0.8002105951309204, |
|
"learning_rate": 3.125e-05, |
|
"loss": 1.7402, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 1.3156801661474558, |
|
"grad_norm": 0.8581973314285278, |
|
"learning_rate": 3.3333333333333335e-05, |
|
"loss": 1.6696, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 1.398753894080997, |
|
"grad_norm": 0.7443532347679138, |
|
"learning_rate": 3.541666666666667e-05, |
|
"loss": 1.5175, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 1.4818276220145379, |
|
"grad_norm": 0.9584633708000183, |
|
"learning_rate": 3.7500000000000003e-05, |
|
"loss": 1.3968, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 1.5649013499480788, |
|
"grad_norm": 0.8683136105537415, |
|
"learning_rate": 3.958333333333333e-05, |
|
"loss": 1.299, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 1.64797507788162, |
|
"grad_norm": 0.7825261950492859, |
|
"learning_rate": 4.166666666666667e-05, |
|
"loss": 1.1271, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 1.731048805815161, |
|
"grad_norm": 0.654670000076294, |
|
"learning_rate": 4.375e-05, |
|
"loss": 1.14, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 1.814122533748702, |
|
"grad_norm": 0.7511588335037231, |
|
"learning_rate": 4.5833333333333334e-05, |
|
"loss": 1.1614, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 1.897196261682243, |
|
"grad_norm": 0.6596113443374634, |
|
"learning_rate": 4.791666666666667e-05, |
|
"loss": 1.0587, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 1.980269989615784, |
|
"grad_norm": 0.7166474461555481, |
|
"learning_rate": 5e-05, |
|
"loss": 1.0169, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 2.0498442367601246, |
|
"grad_norm": 0.6620935797691345, |
|
"learning_rate": 4.999735579817769e-05, |
|
"loss": 0.985, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 2.132917964693666, |
|
"grad_norm": 0.7846258878707886, |
|
"learning_rate": 4.998942375205502e-05, |
|
"loss": 0.9594, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 2.2159916926272065, |
|
"grad_norm": 0.652454137802124, |
|
"learning_rate": 4.997620553954645e-05, |
|
"loss": 0.9322, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 2.2990654205607477, |
|
"grad_norm": 0.7020059823989868, |
|
"learning_rate": 4.995770395678171e-05, |
|
"loss": 0.9242, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 2.382139148494289, |
|
"grad_norm": 0.8148695826530457, |
|
"learning_rate": 4.993392291751431e-05, |
|
"loss": 0.8394, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 2.4652128764278296, |
|
"grad_norm": 0.8813133835792542, |
|
"learning_rate": 4.990486745229364e-05, |
|
"loss": 0.992, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 2.5482866043613708, |
|
"grad_norm": 0.7893730401992798, |
|
"learning_rate": 4.987054370740083e-05, |
|
"loss": 0.899, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 2.6313603322949115, |
|
"grad_norm": 0.7719221711158752, |
|
"learning_rate": 4.983095894354858e-05, |
|
"loss": 0.9416, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 2.7144340602284527, |
|
"grad_norm": 0.8439667820930481, |
|
"learning_rate": 4.9786121534345265e-05, |
|
"loss": 0.8401, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 2.797507788161994, |
|
"grad_norm": 0.833251953125, |
|
"learning_rate": 4.973604096452361e-05, |
|
"loss": 0.817, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 2.8805815160955346, |
|
"grad_norm": 0.7526916265487671, |
|
"learning_rate": 4.9680727827934354e-05, |
|
"loss": 0.8191, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 2.9636552440290758, |
|
"grad_norm": 1.0640058517456055, |
|
"learning_rate": 4.962019382530521e-05, |
|
"loss": 0.8607, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 3.0332294911734166, |
|
"grad_norm": 0.784114420413971, |
|
"learning_rate": 4.9554451761765766e-05, |
|
"loss": 0.761, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 3.1163032191069573, |
|
"grad_norm": 0.9396352767944336, |
|
"learning_rate": 4.948351554413879e-05, |
|
"loss": 0.6821, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 3.1993769470404985, |
|
"grad_norm": 0.9770582914352417, |
|
"learning_rate": 4.940740017799833e-05, |
|
"loss": 0.742, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 3.2824506749740396, |
|
"grad_norm": 0.8752853870391846, |
|
"learning_rate": 4.9326121764495596e-05, |
|
"loss": 0.69, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 3.3655244029075804, |
|
"grad_norm": 0.8496589064598083, |
|
"learning_rate": 4.92396974969529e-05, |
|
"loss": 0.769, |
|
"step": 205 |
|
}, |
|
{ |
|
"epoch": 3.4485981308411215, |
|
"grad_norm": 0.8192639350891113, |
|
"learning_rate": 4.914814565722671e-05, |
|
"loss": 0.7298, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 3.5316718587746623, |
|
"grad_norm": 1.00070059299469, |
|
"learning_rate": 4.905148561184033e-05, |
|
"loss": 0.7815, |
|
"step": 215 |
|
}, |
|
{ |
|
"epoch": 3.6147455867082035, |
|
"grad_norm": 1.1614112854003906, |
|
"learning_rate": 4.894973780788722e-05, |
|
"loss": 0.7975, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 3.6978193146417446, |
|
"grad_norm": 1.014397382736206, |
|
"learning_rate": 4.884292376870567e-05, |
|
"loss": 0.696, |
|
"step": 225 |
|
}, |
|
{ |
|
"epoch": 3.7808930425752854, |
|
"grad_norm": 0.9468530416488647, |
|
"learning_rate": 4.873106608932585e-05, |
|
"loss": 0.7194, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 3.8639667705088265, |
|
"grad_norm": 1.2201341390609741, |
|
"learning_rate": 4.8614188431690125e-05, |
|
"loss": 0.7795, |
|
"step": 235 |
|
}, |
|
{ |
|
"epoch": 3.9470404984423677, |
|
"grad_norm": 0.9476346373558044, |
|
"learning_rate": 4.849231551964771e-05, |
|
"loss": 0.7202, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 4.0166147455867085, |
|
"grad_norm": 0.9945451021194458, |
|
"learning_rate": 4.836547313372471e-05, |
|
"loss": 0.6931, |
|
"step": 245 |
|
}, |
|
{ |
|
"epoch": 4.099688473520249, |
|
"grad_norm": 1.1113696098327637, |
|
"learning_rate": 4.823368810567056e-05, |
|
"loss": 0.6557, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 4.18276220145379, |
|
"grad_norm": 1.3515146970748901, |
|
"learning_rate": 4.8096988312782174e-05, |
|
"loss": 0.6295, |
|
"step": 255 |
|
}, |
|
{ |
|
"epoch": 4.265835929387332, |
|
"grad_norm": 1.375272274017334, |
|
"learning_rate": 4.7955402672006854e-05, |
|
"loss": 0.6423, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 4.348909657320872, |
|
"grad_norm": 1.1683202981948853, |
|
"learning_rate": 4.780896113382536e-05, |
|
"loss": 0.6167, |
|
"step": 265 |
|
}, |
|
{ |
|
"epoch": 4.431983385254413, |
|
"grad_norm": 1.2091785669326782, |
|
"learning_rate": 4.765769467591625e-05, |
|
"loss": 0.6373, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 4.515057113187955, |
|
"grad_norm": 1.3007235527038574, |
|
"learning_rate": 4.750163529660303e-05, |
|
"loss": 0.5941, |
|
"step": 275 |
|
}, |
|
{ |
|
"epoch": 4.598130841121495, |
|
"grad_norm": 1.2785530090332031, |
|
"learning_rate": 4.734081600808531e-05, |
|
"loss": 0.6384, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 4.681204569055036, |
|
"grad_norm": 1.2808083295822144, |
|
"learning_rate": 4.717527082945554e-05, |
|
"loss": 0.6592, |
|
"step": 285 |
|
}, |
|
{ |
|
"epoch": 4.764278296988578, |
|
"grad_norm": 1.1978651285171509, |
|
"learning_rate": 4.700503477950278e-05, |
|
"loss": 0.6285, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 4.8473520249221185, |
|
"grad_norm": 1.3983532190322876, |
|
"learning_rate": 4.68301438693049e-05, |
|
"loss": 0.5941, |
|
"step": 295 |
|
}, |
|
{ |
|
"epoch": 4.930425752855659, |
|
"grad_norm": 1.286287784576416, |
|
"learning_rate": 4.665063509461097e-05, |
|
"loss": 0.5652, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 5.0, |
|
"grad_norm": 3.1714959144592285, |
|
"learning_rate": 4.6466546428015336e-05, |
|
"loss": 0.5657, |
|
"step": 305 |
|
}, |
|
{ |
|
"epoch": 5.083073727933541, |
|
"grad_norm": 1.0886554718017578, |
|
"learning_rate": 4.627791681092499e-05, |
|
"loss": 0.4776, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 5.166147455867082, |
|
"grad_norm": 1.3735958337783813, |
|
"learning_rate": 4.608478614532215e-05, |
|
"loss": 0.5114, |
|
"step": 315 |
|
}, |
|
{ |
|
"epoch": 5.249221183800623, |
|
"grad_norm": 1.5809016227722168, |
|
"learning_rate": 4.588719528532342e-05, |
|
"loss": 0.5537, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 5.332294911734164, |
|
"grad_norm": 1.401424765586853, |
|
"learning_rate": 4.568518602853776e-05, |
|
"loss": 0.5299, |
|
"step": 325 |
|
}, |
|
{ |
|
"epoch": 5.415368639667705, |
|
"grad_norm": 1.467054009437561, |
|
"learning_rate": 4.54788011072248e-05, |
|
"loss": 0.5236, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 5.498442367601246, |
|
"grad_norm": 1.253794550895691, |
|
"learning_rate": 4.526808417925531e-05, |
|
"loss": 0.5632, |
|
"step": 335 |
|
}, |
|
{ |
|
"epoch": 5.581516095534787, |
|
"grad_norm": 1.4579623937606812, |
|
"learning_rate": 4.50530798188761e-05, |
|
"loss": 0.5556, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 5.6645898234683285, |
|
"grad_norm": 1.171007513999939, |
|
"learning_rate": 4.4833833507280884e-05, |
|
"loss": 0.4934, |
|
"step": 345 |
|
}, |
|
{ |
|
"epoch": 5.747663551401869, |
|
"grad_norm": 1.4656859636306763, |
|
"learning_rate": 4.4610391622989396e-05, |
|
"loss": 0.5697, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 5.83073727933541, |
|
"grad_norm": 1.4671518802642822, |
|
"learning_rate": 4.438280143203665e-05, |
|
"loss": 0.5289, |
|
"step": 355 |
|
}, |
|
{ |
|
"epoch": 5.913811007268951, |
|
"grad_norm": 1.4010504484176636, |
|
"learning_rate": 4.415111107797445e-05, |
|
"loss": 0.5205, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 5.996884735202492, |
|
"grad_norm": 1.6909047365188599, |
|
"learning_rate": 4.391536957168733e-05, |
|
"loss": 0.5469, |
|
"step": 365 |
|
}, |
|
{ |
|
"epoch": 6.066458982346833, |
|
"grad_norm": 1.3057804107666016, |
|
"learning_rate": 4.36756267810249e-05, |
|
"loss": 0.5087, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 6.149532710280374, |
|
"grad_norm": 1.7389354705810547, |
|
"learning_rate": 4.34319334202531e-05, |
|
"loss": 0.446, |
|
"step": 375 |
|
}, |
|
{ |
|
"epoch": 6.232606438213915, |
|
"grad_norm": 1.6178336143493652, |
|
"learning_rate": 4.318434103932622e-05, |
|
"loss": 0.4888, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 6.315680166147456, |
|
"grad_norm": 1.6565852165222168, |
|
"learning_rate": 4.293290201298223e-05, |
|
"loss": 0.4784, |
|
"step": 385 |
|
}, |
|
{ |
|
"epoch": 6.398753894080997, |
|
"grad_norm": 1.4080264568328857, |
|
"learning_rate": 4.267766952966369e-05, |
|
"loss": 0.4155, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 6.481827622014538, |
|
"grad_norm": 1.4906692504882812, |
|
"learning_rate": 4.241869758026638e-05, |
|
"loss": 0.4854, |
|
"step": 395 |
|
}, |
|
{ |
|
"epoch": 6.564901349948079, |
|
"grad_norm": 1.6544594764709473, |
|
"learning_rate": 4.215604094671835e-05, |
|
"loss": 0.4753, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 6.64797507788162, |
|
"grad_norm": 1.3182097673416138, |
|
"learning_rate": 4.188975519039151e-05, |
|
"loss": 0.4352, |
|
"step": 405 |
|
}, |
|
{ |
|
"epoch": 6.731048805815161, |
|
"grad_norm": 1.531633734703064, |
|
"learning_rate": 4.1619896640348445e-05, |
|
"loss": 0.4786, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 6.814122533748702, |
|
"grad_norm": 1.358223557472229, |
|
"learning_rate": 4.1346522381426744e-05, |
|
"loss": 0.4108, |
|
"step": 415 |
|
}, |
|
{ |
|
"epoch": 6.897196261682243, |
|
"grad_norm": 1.6483898162841797, |
|
"learning_rate": 4.1069690242163484e-05, |
|
"loss": 0.4828, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 6.980269989615784, |
|
"grad_norm": 1.5941141843795776, |
|
"learning_rate": 4.078945878256244e-05, |
|
"loss": 0.4276, |
|
"step": 425 |
|
}, |
|
{ |
|
"epoch": 7.049844236760125, |
|
"grad_norm": 1.367634892463684, |
|
"learning_rate": 4.05058872817065e-05, |
|
"loss": 0.3997, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 7.132917964693665, |
|
"grad_norm": 1.3028111457824707, |
|
"learning_rate": 4.021903572521802e-05, |
|
"loss": 0.3919, |
|
"step": 435 |
|
}, |
|
{ |
|
"epoch": 7.215991692627207, |
|
"grad_norm": 1.8897424936294556, |
|
"learning_rate": 3.9928964792569655e-05, |
|
"loss": 0.3778, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 7.299065420560748, |
|
"grad_norm": 1.6549698114395142, |
|
"learning_rate": 3.963573584424852e-05, |
|
"loss": 0.408, |
|
"step": 445 |
|
}, |
|
{ |
|
"epoch": 7.382139148494288, |
|
"grad_norm": 1.3696308135986328, |
|
"learning_rate": 3.933941090877615e-05, |
|
"loss": 0.4262, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 7.46521287642783, |
|
"grad_norm": 1.6997928619384766, |
|
"learning_rate": 3.9040052669587325e-05, |
|
"loss": 0.3982, |
|
"step": 455 |
|
}, |
|
{ |
|
"epoch": 7.548286604361371, |
|
"grad_norm": 1.3938806056976318, |
|
"learning_rate": 3.873772445177015e-05, |
|
"loss": 0.4113, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 7.6313603322949115, |
|
"grad_norm": 1.8341262340545654, |
|
"learning_rate": 3.84324902086706e-05, |
|
"loss": 0.4265, |
|
"step": 465 |
|
}, |
|
{ |
|
"epoch": 7.714434060228453, |
|
"grad_norm": 1.6543951034545898, |
|
"learning_rate": 3.8124414508364e-05, |
|
"loss": 0.4112, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 7.797507788161994, |
|
"grad_norm": 1.5131211280822754, |
|
"learning_rate": 3.781356251999663e-05, |
|
"loss": 0.4098, |
|
"step": 475 |
|
}, |
|
{ |
|
"epoch": 7.880581516095535, |
|
"grad_norm": 1.7143309116363525, |
|
"learning_rate": 3.7500000000000003e-05, |
|
"loss": 0.4183, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 7.963655244029075, |
|
"grad_norm": 1.4201680421829224, |
|
"learning_rate": 3.718379327818106e-05, |
|
"loss": 0.388, |
|
"step": 485 |
|
}, |
|
{ |
|
"epoch": 8.033229491173417, |
|
"grad_norm": 1.36739182472229, |
|
"learning_rate": 3.686500924369101e-05, |
|
"loss": 0.3927, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 8.116303219106957, |
|
"grad_norm": 1.348694086074829, |
|
"learning_rate": 3.654371533087586e-05, |
|
"loss": 0.3614, |
|
"step": 495 |
|
}, |
|
{ |
|
"epoch": 8.199376947040498, |
|
"grad_norm": 1.7005705833435059, |
|
"learning_rate": 3.621997950501156e-05, |
|
"loss": 0.3509, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 8.28245067497404, |
|
"grad_norm": 1.4668842554092407, |
|
"learning_rate": 3.589387024792699e-05, |
|
"loss": 0.344, |
|
"step": 505 |
|
}, |
|
{ |
|
"epoch": 8.36552440290758, |
|
"grad_norm": 1.6029119491577148, |
|
"learning_rate": 3.556545654351749e-05, |
|
"loss": 0.3599, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 8.448598130841122, |
|
"grad_norm": 1.5699125528335571, |
|
"learning_rate": 3.523480786315231e-05, |
|
"loss": 0.3579, |
|
"step": 515 |
|
}, |
|
{ |
|
"epoch": 8.531671858774663, |
|
"grad_norm": 1.4507211446762085, |
|
"learning_rate": 3.490199415097892e-05, |
|
"loss": 0.3341, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 8.614745586708203, |
|
"grad_norm": 1.460924744606018, |
|
"learning_rate": 3.456708580912725e-05, |
|
"loss": 0.3762, |
|
"step": 525 |
|
}, |
|
{ |
|
"epoch": 8.697819314641745, |
|
"grad_norm": 1.6329962015151978, |
|
"learning_rate": 3.423015368281711e-05, |
|
"loss": 0.3948, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 8.780893042575286, |
|
"grad_norm": 1.5261616706848145, |
|
"learning_rate": 3.389126904537192e-05, |
|
"loss": 0.3594, |
|
"step": 535 |
|
}, |
|
{ |
|
"epoch": 8.863966770508826, |
|
"grad_norm": 1.6733344793319702, |
|
"learning_rate": 3.355050358314172e-05, |
|
"loss": 0.3856, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 8.947040498442368, |
|
"grad_norm": 1.4160898923873901, |
|
"learning_rate": 3.3207929380339034e-05, |
|
"loss": 0.3875, |
|
"step": 545 |
|
}, |
|
{ |
|
"epoch": 9.016614745586708, |
|
"grad_norm": 1.2456085681915283, |
|
"learning_rate": 3.2863618903790346e-05, |
|
"loss": 0.3318, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 9.09968847352025, |
|
"grad_norm": 1.309977650642395, |
|
"learning_rate": 3.251764498760683e-05, |
|
"loss": 0.3239, |
|
"step": 555 |
|
}, |
|
{ |
|
"epoch": 9.18276220145379, |
|
"grad_norm": 1.6747145652770996, |
|
"learning_rate": 3.217008081777726e-05, |
|
"loss": 0.342, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 9.26583592938733, |
|
"grad_norm": 1.6204376220703125, |
|
"learning_rate": 3.182099991668653e-05, |
|
"loss": 0.3368, |
|
"step": 565 |
|
}, |
|
{ |
|
"epoch": 9.348909657320872, |
|
"grad_norm": 1.6952364444732666, |
|
"learning_rate": 3.147047612756302e-05, |
|
"loss": 0.333, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 9.431983385254414, |
|
"grad_norm": 1.6254109144210815, |
|
"learning_rate": 3.11185835988581e-05, |
|
"loss": 0.3488, |
|
"step": 575 |
|
}, |
|
{ |
|
"epoch": 9.515057113187954, |
|
"grad_norm": 1.7684122323989868, |
|
"learning_rate": 3.076539676856101e-05, |
|
"loss": 0.3461, |
|
"step": 580 |
|
}, |
|
{ |
|
"epoch": 9.598130841121495, |
|
"grad_norm": 1.480283260345459, |
|
"learning_rate": 3.0410990348452573e-05, |
|
"loss": 0.3457, |
|
"step": 585 |
|
}, |
|
{ |
|
"epoch": 9.681204569055037, |
|
"grad_norm": 1.4912574291229248, |
|
"learning_rate": 3.0055439308300952e-05, |
|
"loss": 0.3307, |
|
"step": 590 |
|
}, |
|
{ |
|
"epoch": 9.764278296988577, |
|
"grad_norm": 1.5759741067886353, |
|
"learning_rate": 2.9698818860002797e-05, |
|
"loss": 0.3387, |
|
"step": 595 |
|
}, |
|
{ |
|
"epoch": 9.847352024922118, |
|
"grad_norm": 1.475316047668457, |
|
"learning_rate": 2.9341204441673266e-05, |
|
"loss": 0.342, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 9.93042575285566, |
|
"grad_norm": 1.5280460119247437, |
|
"learning_rate": 2.898267170168807e-05, |
|
"loss": 0.3325, |
|
"step": 605 |
|
}, |
|
{ |
|
"epoch": 10.0, |
|
"grad_norm": 4.123524188995361, |
|
"learning_rate": 2.8623296482681166e-05, |
|
"loss": 0.3528, |
|
"step": 610 |
|
}, |
|
{ |
|
"epoch": 10.083073727933542, |
|
"grad_norm": 1.3620072603225708, |
|
"learning_rate": 2.8263154805501297e-05, |
|
"loss": 0.3282, |
|
"step": 615 |
|
}, |
|
{ |
|
"epoch": 10.166147455867081, |
|
"grad_norm": 1.8294273614883423, |
|
"learning_rate": 2.7902322853130757e-05, |
|
"loss": 0.3066, |
|
"step": 620 |
|
}, |
|
{ |
|
"epoch": 10.249221183800623, |
|
"grad_norm": 1.5608752965927124, |
|
"learning_rate": 2.7540876954570048e-05, |
|
"loss": 0.3033, |
|
"step": 625 |
|
}, |
|
{ |
|
"epoch": 10.332294911734165, |
|
"grad_norm": 1.7856054306030273, |
|
"learning_rate": 2.717889356869146e-05, |
|
"loss": 0.2969, |
|
"step": 630 |
|
}, |
|
{ |
|
"epoch": 10.415368639667705, |
|
"grad_norm": 1.4942408800125122, |
|
"learning_rate": 2.681644926806527e-05, |
|
"loss": 0.3153, |
|
"step": 635 |
|
}, |
|
{ |
|
"epoch": 10.498442367601246, |
|
"grad_norm": 1.6234569549560547, |
|
"learning_rate": 2.6453620722761896e-05, |
|
"loss": 0.3277, |
|
"step": 640 |
|
}, |
|
{ |
|
"epoch": 10.581516095534788, |
|
"grad_norm": 1.6694717407226562, |
|
"learning_rate": 2.6090484684133404e-05, |
|
"loss": 0.3228, |
|
"step": 645 |
|
}, |
|
{ |
|
"epoch": 10.664589823468328, |
|
"grad_norm": 1.6755950450897217, |
|
"learning_rate": 2.5727117968577784e-05, |
|
"loss": 0.3316, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 10.74766355140187, |
|
"grad_norm": 1.3962074518203735, |
|
"learning_rate": 2.536359744128957e-05, |
|
"loss": 0.3188, |
|
"step": 655 |
|
}, |
|
{ |
|
"epoch": 10.83073727933541, |
|
"grad_norm": 1.7147635221481323, |
|
"learning_rate": 2.5e-05, |
|
"loss": 0.3463, |
|
"step": 660 |
|
}, |
|
{ |
|
"epoch": 10.91381100726895, |
|
"grad_norm": 1.5997766256332397, |
|
"learning_rate": 2.4636402558710432e-05, |
|
"loss": 0.335, |
|
"step": 665 |
|
}, |
|
{ |
|
"epoch": 10.996884735202492, |
|
"grad_norm": 1.483155369758606, |
|
"learning_rate": 2.4272882031422215e-05, |
|
"loss": 0.3084, |
|
"step": 670 |
|
}, |
|
{ |
|
"epoch": 11.066458982346832, |
|
"grad_norm": 1.3599936962127686, |
|
"learning_rate": 2.3909515315866605e-05, |
|
"loss": 0.311, |
|
"step": 675 |
|
}, |
|
{ |
|
"epoch": 11.149532710280374, |
|
"grad_norm": 1.5479919910430908, |
|
"learning_rate": 2.3546379277238107e-05, |
|
"loss": 0.2999, |
|
"step": 680 |
|
}, |
|
{ |
|
"epoch": 11.232606438213915, |
|
"grad_norm": 1.5448635816574097, |
|
"learning_rate": 2.3183550731934735e-05, |
|
"loss": 0.291, |
|
"step": 685 |
|
}, |
|
{ |
|
"epoch": 11.315680166147455, |
|
"grad_norm": 1.4930479526519775, |
|
"learning_rate": 2.2821106431308544e-05, |
|
"loss": 0.2879, |
|
"step": 690 |
|
}, |
|
{ |
|
"epoch": 11.398753894080997, |
|
"grad_norm": 1.578282356262207, |
|
"learning_rate": 2.2459123045429954e-05, |
|
"loss": 0.3033, |
|
"step": 695 |
|
}, |
|
{ |
|
"epoch": 11.481827622014539, |
|
"grad_norm": 1.4327830076217651, |
|
"learning_rate": 2.2097677146869242e-05, |
|
"loss": 0.2946, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 11.564901349948078, |
|
"grad_norm": 1.75092351436615, |
|
"learning_rate": 2.173684519449872e-05, |
|
"loss": 0.2965, |
|
"step": 705 |
|
}, |
|
{ |
|
"epoch": 11.64797507788162, |
|
"grad_norm": 1.4633839130401611, |
|
"learning_rate": 2.1376703517318837e-05, |
|
"loss": 0.3082, |
|
"step": 710 |
|
}, |
|
{ |
|
"epoch": 11.731048805815162, |
|
"grad_norm": 1.4483939409255981, |
|
"learning_rate": 2.101732829831194e-05, |
|
"loss": 0.3048, |
|
"step": 715 |
|
}, |
|
{ |
|
"epoch": 11.814122533748701, |
|
"grad_norm": 1.4273526668548584, |
|
"learning_rate": 2.0658795558326743e-05, |
|
"loss": 0.3093, |
|
"step": 720 |
|
}, |
|
{ |
|
"epoch": 11.897196261682243, |
|
"grad_norm": 1.536641240119934, |
|
"learning_rate": 2.0301181139997205e-05, |
|
"loss": 0.3185, |
|
"step": 725 |
|
}, |
|
{ |
|
"epoch": 11.980269989615785, |
|
"grad_norm": 1.6694177389144897, |
|
"learning_rate": 1.9944560691699057e-05, |
|
"loss": 0.3197, |
|
"step": 730 |
|
}, |
|
{ |
|
"epoch": 12.049844236760125, |
|
"grad_norm": 1.5738413333892822, |
|
"learning_rate": 1.958900965154743e-05, |
|
"loss": 0.3235, |
|
"step": 735 |
|
}, |
|
{ |
|
"epoch": 12.132917964693666, |
|
"grad_norm": 1.4949880838394165, |
|
"learning_rate": 1.9234603231438995e-05, |
|
"loss": 0.2752, |
|
"step": 740 |
|
}, |
|
{ |
|
"epoch": 12.215991692627206, |
|
"grad_norm": 1.7407513856887817, |
|
"learning_rate": 1.8881416401141904e-05, |
|
"loss": 0.282, |
|
"step": 745 |
|
}, |
|
{ |
|
"epoch": 12.299065420560748, |
|
"grad_norm": 1.6446201801300049, |
|
"learning_rate": 1.852952387243698e-05, |
|
"loss": 0.2847, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 12.38213914849429, |
|
"grad_norm": 1.5613073110580444, |
|
"learning_rate": 1.8179000083313483e-05, |
|
"loss": 0.2923, |
|
"step": 755 |
|
}, |
|
{ |
|
"epoch": 12.46521287642783, |
|
"grad_norm": 1.3661807775497437, |
|
"learning_rate": 1.7829919182222752e-05, |
|
"loss": 0.2881, |
|
"step": 760 |
|
}, |
|
{ |
|
"epoch": 12.54828660436137, |
|
"grad_norm": 1.5903205871582031, |
|
"learning_rate": 1.7482355012393177e-05, |
|
"loss": 0.2897, |
|
"step": 765 |
|
}, |
|
{ |
|
"epoch": 12.631360332294912, |
|
"grad_norm": 1.5441614389419556, |
|
"learning_rate": 1.7136381096209664e-05, |
|
"loss": 0.292, |
|
"step": 770 |
|
}, |
|
{ |
|
"epoch": 12.714434060228452, |
|
"grad_norm": 1.6512565612792969, |
|
"learning_rate": 1.6792070619660975e-05, |
|
"loss": 0.289, |
|
"step": 775 |
|
}, |
|
{ |
|
"epoch": 12.797507788161994, |
|
"grad_norm": 1.5514371395111084, |
|
"learning_rate": 1.6449496416858284e-05, |
|
"loss": 0.3012, |
|
"step": 780 |
|
}, |
|
{ |
|
"epoch": 12.880581516095535, |
|
"grad_norm": 1.7717050313949585, |
|
"learning_rate": 1.6108730954628093e-05, |
|
"loss": 0.3055, |
|
"step": 785 |
|
}, |
|
{ |
|
"epoch": 12.963655244029075, |
|
"grad_norm": 1.4902904033660889, |
|
"learning_rate": 1.5769846317182893e-05, |
|
"loss": 0.2913, |
|
"step": 790 |
|
}, |
|
{ |
|
"epoch": 13.033229491173417, |
|
"grad_norm": 1.3144031763076782, |
|
"learning_rate": 1.5432914190872757e-05, |
|
"loss": 0.3069, |
|
"step": 795 |
|
}, |
|
{ |
|
"epoch": 13.116303219106957, |
|
"grad_norm": 1.3925071954727173, |
|
"learning_rate": 1.509800584902108e-05, |
|
"loss": 0.2701, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 13.199376947040498, |
|
"grad_norm": 1.7400624752044678, |
|
"learning_rate": 1.4765192136847685e-05, |
|
"loss": 0.2742, |
|
"step": 805 |
|
}, |
|
{ |
|
"epoch": 13.28245067497404, |
|
"grad_norm": 1.5998958349227905, |
|
"learning_rate": 1.443454345648252e-05, |
|
"loss": 0.2778, |
|
"step": 810 |
|
}, |
|
{ |
|
"epoch": 13.36552440290758, |
|
"grad_norm": 1.7994098663330078, |
|
"learning_rate": 1.4106129752073022e-05, |
|
"loss": 0.2836, |
|
"step": 815 |
|
}, |
|
{ |
|
"epoch": 13.448598130841122, |
|
"grad_norm": 1.672831416130066, |
|
"learning_rate": 1.3780020494988446e-05, |
|
"loss": 0.2709, |
|
"step": 820 |
|
}, |
|
{ |
|
"epoch": 13.531671858774663, |
|
"grad_norm": 1.7876700162887573, |
|
"learning_rate": 1.3456284669124158e-05, |
|
"loss": 0.2687, |
|
"step": 825 |
|
}, |
|
{ |
|
"epoch": 13.614745586708203, |
|
"grad_norm": 1.6950455904006958, |
|
"learning_rate": 1.313499075630899e-05, |
|
"loss": 0.2904, |
|
"step": 830 |
|
}, |
|
{ |
|
"epoch": 13.697819314641745, |
|
"grad_norm": 1.6819844245910645, |
|
"learning_rate": 1.2816206721818944e-05, |
|
"loss": 0.2876, |
|
"step": 835 |
|
}, |
|
{ |
|
"epoch": 13.780893042575286, |
|
"grad_norm": 1.5784549713134766, |
|
"learning_rate": 1.2500000000000006e-05, |
|
"loss": 0.286, |
|
"step": 840 |
|
}, |
|
{ |
|
"epoch": 13.863966770508826, |
|
"grad_norm": 1.6307650804519653, |
|
"learning_rate": 1.2186437480003372e-05, |
|
"loss": 0.3009, |
|
"step": 845 |
|
}, |
|
{ |
|
"epoch": 13.947040498442368, |
|
"grad_norm": 1.4225823879241943, |
|
"learning_rate": 1.1875585491636e-05, |
|
"loss": 0.2741, |
|
"step": 850 |
|
}, |
|
{ |
|
"epoch": 14.016614745586708, |
|
"grad_norm": 1.2541974782943726, |
|
"learning_rate": 1.1567509791329401e-05, |
|
"loss": 0.271, |
|
"step": 855 |
|
}, |
|
{ |
|
"epoch": 14.09968847352025, |
|
"grad_norm": 1.3735196590423584, |
|
"learning_rate": 1.126227554822985e-05, |
|
"loss": 0.2576, |
|
"step": 860 |
|
}, |
|
{ |
|
"epoch": 14.18276220145379, |
|
"grad_norm": 1.4082770347595215, |
|
"learning_rate": 1.0959947330412682e-05, |
|
"loss": 0.2705, |
|
"step": 865 |
|
}, |
|
{ |
|
"epoch": 14.26583592938733, |
|
"grad_norm": 1.5013236999511719, |
|
"learning_rate": 1.0660589091223855e-05, |
|
"loss": 0.2641, |
|
"step": 870 |
|
}, |
|
{ |
|
"epoch": 14.348909657320872, |
|
"grad_norm": 1.8321263790130615, |
|
"learning_rate": 1.0364264155751488e-05, |
|
"loss": 0.2646, |
|
"step": 875 |
|
}, |
|
{ |
|
"epoch": 14.431983385254414, |
|
"grad_norm": 1.5073516368865967, |
|
"learning_rate": 1.0071035207430352e-05, |
|
"loss": 0.267, |
|
"step": 880 |
|
}, |
|
{ |
|
"epoch": 14.515057113187954, |
|
"grad_norm": 1.5713279247283936, |
|
"learning_rate": 9.780964274781984e-06, |
|
"loss": 0.275, |
|
"step": 885 |
|
}, |
|
{ |
|
"epoch": 14.598130841121495, |
|
"grad_norm": 1.4692318439483643, |
|
"learning_rate": 9.494112718293501e-06, |
|
"loss": 0.2674, |
|
"step": 890 |
|
}, |
|
{ |
|
"epoch": 14.681204569055037, |
|
"grad_norm": 1.9062477350234985, |
|
"learning_rate": 9.210541217437565e-06, |
|
"loss": 0.2703, |
|
"step": 895 |
|
}, |
|
{ |
|
"epoch": 14.764278296988577, |
|
"grad_norm": 1.4207799434661865, |
|
"learning_rate": 8.930309757836517e-06, |
|
"loss": 0.2751, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 14.847352024922118, |
|
"grad_norm": 1.4608951807022095, |
|
"learning_rate": 8.65347761857326e-06, |
|
"loss": 0.2828, |
|
"step": 905 |
|
}, |
|
{ |
|
"epoch": 14.93042575285566, |
|
"grad_norm": 1.5288583040237427, |
|
"learning_rate": 8.380103359651553e-06, |
|
"loss": 0.2739, |
|
"step": 910 |
|
}, |
|
{ |
|
"epoch": 15.0, |
|
"grad_norm": 3.328437328338623, |
|
"learning_rate": 8.110244809608495e-06, |
|
"loss": 0.2715, |
|
"step": 915 |
|
}, |
|
{ |
|
"epoch": 15.083073727933542, |
|
"grad_norm": 1.4342622756958008, |
|
"learning_rate": 7.843959053281663e-06, |
|
"loss": 0.2562, |
|
"step": 920 |
|
}, |
|
{ |
|
"epoch": 15.166147455867081, |
|
"grad_norm": 1.3755466938018799, |
|
"learning_rate": 7.581302419733632e-06, |
|
"loss": 0.2488, |
|
"step": 925 |
|
}, |
|
{ |
|
"epoch": 15.249221183800623, |
|
"grad_norm": 1.6790114641189575, |
|
"learning_rate": 7.3223304703363135e-06, |
|
"loss": 0.2675, |
|
"step": 930 |
|
}, |
|
{ |
|
"epoch": 15.332294911734165, |
|
"grad_norm": 1.7311410903930664, |
|
"learning_rate": 7.067097987017762e-06, |
|
"loss": 0.2678, |
|
"step": 935 |
|
}, |
|
{ |
|
"epoch": 15.415368639667705, |
|
"grad_norm": 1.565371036529541, |
|
"learning_rate": 6.815658960673782e-06, |
|
"loss": 0.269, |
|
"step": 940 |
|
}, |
|
{ |
|
"epoch": 15.498442367601246, |
|
"grad_norm": 1.6030529737472534, |
|
"learning_rate": 6.568066579746901e-06, |
|
"loss": 0.2539, |
|
"step": 945 |
|
}, |
|
{ |
|
"epoch": 15.581516095534788, |
|
"grad_norm": 1.7192659378051758, |
|
"learning_rate": 6.324373218975105e-06, |
|
"loss": 0.261, |
|
"step": 950 |
|
}, |
|
{ |
|
"epoch": 15.664589823468328, |
|
"grad_norm": 1.6753857135772705, |
|
"learning_rate": 6.08463042831268e-06, |
|
"loss": 0.2521, |
|
"step": 955 |
|
}, |
|
{ |
|
"epoch": 15.74766355140187, |
|
"grad_norm": 1.9638776779174805, |
|
"learning_rate": 5.848888922025553e-06, |
|
"loss": 0.2615, |
|
"step": 960 |
|
}, |
|
{ |
|
"epoch": 15.83073727933541, |
|
"grad_norm": 1.5389426946640015, |
|
"learning_rate": 5.617198567963352e-06, |
|
"loss": 0.2584, |
|
"step": 965 |
|
}, |
|
{ |
|
"epoch": 15.91381100726895, |
|
"grad_norm": 1.8820122480392456, |
|
"learning_rate": 5.389608377010608e-06, |
|
"loss": 0.2667, |
|
"step": 970 |
|
}, |
|
{ |
|
"epoch": 15.996884735202492, |
|
"grad_norm": 1.8412877321243286, |
|
"learning_rate": 5.166166492719124e-06, |
|
"loss": 0.2697, |
|
"step": 975 |
|
}, |
|
{ |
|
"epoch": 16.066458982346834, |
|
"grad_norm": 1.7076417207717896, |
|
"learning_rate": 4.946920181123904e-06, |
|
"loss": 0.2451, |
|
"step": 980 |
|
}, |
|
{ |
|
"epoch": 16.149532710280372, |
|
"grad_norm": 1.4559515714645386, |
|
"learning_rate": 4.731915820744695e-06, |
|
"loss": 0.2448, |
|
"step": 985 |
|
}, |
|
{ |
|
"epoch": 16.232606438213914, |
|
"grad_norm": 1.4542195796966553, |
|
"learning_rate": 4.521198892775203e-06, |
|
"loss": 0.2525, |
|
"step": 990 |
|
}, |
|
{ |
|
"epoch": 16.315680166147455, |
|
"grad_norm": 1.7294296026229858, |
|
"learning_rate": 4.314813971462237e-06, |
|
"loss": 0.2452, |
|
"step": 995 |
|
}, |
|
{ |
|
"epoch": 16.398753894080997, |
|
"grad_norm": 1.694813847541809, |
|
"learning_rate": 4.112804714676594e-06, |
|
"loss": 0.2533, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 16.48182762201454, |
|
"grad_norm": 1.7992663383483887, |
|
"learning_rate": 3.9152138546778625e-06, |
|
"loss": 0.2604, |
|
"step": 1005 |
|
}, |
|
{ |
|
"epoch": 16.56490134994808, |
|
"grad_norm": 1.520974040031433, |
|
"learning_rate": 3.7220831890750067e-06, |
|
"loss": 0.2571, |
|
"step": 1010 |
|
}, |
|
{ |
|
"epoch": 16.64797507788162, |
|
"grad_norm": 1.9732450246810913, |
|
"learning_rate": 3.5334535719846766e-06, |
|
"loss": 0.2659, |
|
"step": 1015 |
|
}, |
|
{ |
|
"epoch": 16.73104880581516, |
|
"grad_norm": 1.7136303186416626, |
|
"learning_rate": 3.3493649053890326e-06, |
|
"loss": 0.2565, |
|
"step": 1020 |
|
}, |
|
{ |
|
"epoch": 16.8141225337487, |
|
"grad_norm": 1.6919972896575928, |
|
"learning_rate": 3.1698561306951064e-06, |
|
"loss": 0.254, |
|
"step": 1025 |
|
}, |
|
{ |
|
"epoch": 16.897196261682243, |
|
"grad_norm": 1.8692378997802734, |
|
"learning_rate": 2.9949652204972254e-06, |
|
"loss": 0.2529, |
|
"step": 1030 |
|
}, |
|
{ |
|
"epoch": 16.980269989615785, |
|
"grad_norm": 1.798110842704773, |
|
"learning_rate": 2.8247291705444575e-06, |
|
"loss": 0.2631, |
|
"step": 1035 |
|
}, |
|
{ |
|
"epoch": 17.049844236760123, |
|
"grad_norm": 1.5230650901794434, |
|
"learning_rate": 2.659183991914696e-06, |
|
"loss": 0.2521, |
|
"step": 1040 |
|
}, |
|
{ |
|
"epoch": 17.132917964693664, |
|
"grad_norm": 1.6472920179367065, |
|
"learning_rate": 2.4983647033969714e-06, |
|
"loss": 0.2463, |
|
"step": 1045 |
|
}, |
|
{ |
|
"epoch": 17.215991692627206, |
|
"grad_norm": 1.6704550981521606, |
|
"learning_rate": 2.3423053240837515e-06, |
|
"loss": 0.2453, |
|
"step": 1050 |
|
}, |
|
{ |
|
"epoch": 17.299065420560748, |
|
"grad_norm": 1.550706386566162, |
|
"learning_rate": 2.1910388661746493e-06, |
|
"loss": 0.2487, |
|
"step": 1055 |
|
}, |
|
{ |
|
"epoch": 17.38213914849429, |
|
"grad_norm": 1.7238280773162842, |
|
"learning_rate": 2.044597327993153e-06, |
|
"loss": 0.2511, |
|
"step": 1060 |
|
}, |
|
{ |
|
"epoch": 17.46521287642783, |
|
"grad_norm": 1.5124459266662598, |
|
"learning_rate": 1.9030116872178316e-06, |
|
"loss": 0.2436, |
|
"step": 1065 |
|
}, |
|
{ |
|
"epoch": 17.54828660436137, |
|
"grad_norm": 1.7043637037277222, |
|
"learning_rate": 1.7663118943294366e-06, |
|
"loss": 0.2491, |
|
"step": 1070 |
|
}, |
|
{ |
|
"epoch": 17.63136033229491, |
|
"grad_norm": 1.7300227880477905, |
|
"learning_rate": 1.6345268662752906e-06, |
|
"loss": 0.2464, |
|
"step": 1075 |
|
}, |
|
{ |
|
"epoch": 17.714434060228452, |
|
"grad_norm": 1.9400359392166138, |
|
"learning_rate": 1.5076844803522922e-06, |
|
"loss": 0.2576, |
|
"step": 1080 |
|
}, |
|
{ |
|
"epoch": 17.797507788161994, |
|
"grad_norm": 1.5764034986495972, |
|
"learning_rate": 1.3858115683098832e-06, |
|
"loss": 0.2512, |
|
"step": 1085 |
|
}, |
|
{ |
|
"epoch": 17.880581516095535, |
|
"grad_norm": 1.7540063858032227, |
|
"learning_rate": 1.2689339106741527e-06, |
|
"loss": 0.2484, |
|
"step": 1090 |
|
}, |
|
{ |
|
"epoch": 17.963655244029077, |
|
"grad_norm": 1.5876474380493164, |
|
"learning_rate": 1.1570762312943295e-06, |
|
"loss": 0.247, |
|
"step": 1095 |
|
}, |
|
{ |
|
"epoch": 18.033229491173415, |
|
"grad_norm": 1.723120093345642, |
|
"learning_rate": 1.0502621921127776e-06, |
|
"loss": 0.2509, |
|
"step": 1100 |
|
}, |
|
{ |
|
"epoch": 18.116303219106957, |
|
"grad_norm": 1.549980640411377, |
|
"learning_rate": 9.485143881596714e-07, |
|
"loss": 0.2565, |
|
"step": 1105 |
|
}, |
|
{ |
|
"epoch": 18.1993769470405, |
|
"grad_norm": 1.685396671295166, |
|
"learning_rate": 8.51854342773295e-07, |
|
"loss": 0.2386, |
|
"step": 1110 |
|
}, |
|
{ |
|
"epoch": 18.28245067497404, |
|
"grad_norm": 1.5742353200912476, |
|
"learning_rate": 7.603025030471e-07, |
|
"loss": 0.2429, |
|
"step": 1115 |
|
}, |
|
{ |
|
"epoch": 18.36552440290758, |
|
"grad_norm": 1.8015878200531006, |
|
"learning_rate": 6.738782355044049e-07, |
|
"loss": 0.2441, |
|
"step": 1120 |
|
}, |
|
{ |
|
"epoch": 18.44859813084112, |
|
"grad_norm": 1.6814690828323364, |
|
"learning_rate": 5.925998220016659e-07, |
|
"loss": 0.2445, |
|
"step": 1125 |
|
}, |
|
{ |
|
"epoch": 18.53167185877466, |
|
"grad_norm": 1.7425857782363892, |
|
"learning_rate": 5.164844558612131e-07, |
|
"loss": 0.2429, |
|
"step": 1130 |
|
}, |
|
{ |
|
"epoch": 18.614745586708203, |
|
"grad_norm": 1.6815589666366577, |
|
"learning_rate": 4.4554823823423354e-07, |
|
"loss": 0.2523, |
|
"step": 1135 |
|
}, |
|
{ |
|
"epoch": 18.697819314641745, |
|
"grad_norm": 1.819124698638916, |
|
"learning_rate": 3.7980617469479953e-07, |
|
"loss": 0.2567, |
|
"step": 1140 |
|
}, |
|
{ |
|
"epoch": 18.780893042575286, |
|
"grad_norm": 1.7623286247253418, |
|
"learning_rate": 3.192721720656489e-07, |
|
"loss": 0.2354, |
|
"step": 1145 |
|
}, |
|
{ |
|
"epoch": 18.863966770508828, |
|
"grad_norm": 1.9344545602798462, |
|
"learning_rate": 2.6395903547638825e-07, |
|
"loss": 0.2535, |
|
"step": 1150 |
|
}, |
|
{ |
|
"epoch": 18.947040498442366, |
|
"grad_norm": 1.6039988994598389, |
|
"learning_rate": 2.1387846565474045e-07, |
|
"loss": 0.2361, |
|
"step": 1155 |
|
}, |
|
{ |
|
"epoch": 19.016614745586708, |
|
"grad_norm": 1.6311196088790894, |
|
"learning_rate": 1.6904105645142444e-07, |
|
"loss": 0.2456, |
|
"step": 1160 |
|
}, |
|
{ |
|
"epoch": 19.09968847352025, |
|
"grad_norm": 1.8545303344726562, |
|
"learning_rate": 1.2945629259917546e-07, |
|
"loss": 0.2582, |
|
"step": 1165 |
|
}, |
|
{ |
|
"epoch": 19.18276220145379, |
|
"grad_norm": 1.8312244415283203, |
|
"learning_rate": 9.513254770636137e-08, |
|
"loss": 0.2447, |
|
"step": 1170 |
|
}, |
|
{ |
|
"epoch": 19.265835929387332, |
|
"grad_norm": 1.65512216091156, |
|
"learning_rate": 6.607708248569377e-08, |
|
"loss": 0.2456, |
|
"step": 1175 |
|
}, |
|
{ |
|
"epoch": 19.34890965732087, |
|
"grad_norm": 1.5663472414016724, |
|
"learning_rate": 4.229604321829561e-08, |
|
"loss": 0.246, |
|
"step": 1180 |
|
}, |
|
{ |
|
"epoch": 19.431983385254412, |
|
"grad_norm": 1.8947585821151733, |
|
"learning_rate": 2.3794460453555047e-08, |
|
"loss": 0.243, |
|
"step": 1185 |
|
}, |
|
{ |
|
"epoch": 19.515057113187954, |
|
"grad_norm": 1.7258800268173218, |
|
"learning_rate": 1.0576247944985018e-08, |
|
"loss": 0.2435, |
|
"step": 1190 |
|
}, |
|
{ |
|
"epoch": 19.598130841121495, |
|
"grad_norm": 1.59407639503479, |
|
"learning_rate": 2.6442018223132857e-09, |
|
"loss": 0.2415, |
|
"step": 1195 |
|
}, |
|
{ |
|
"epoch": 19.681204569055037, |
|
"grad_norm": 1.5066112279891968, |
|
"learning_rate": 0.0, |
|
"loss": 0.2383, |
|
"step": 1200 |
|
} |
|
], |
|
"logging_steps": 5, |
|
"max_steps": 1200, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 20, |
|
"save_steps": 60, |
|
"stateful_callbacks": { |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": true |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 5.17534376829911e+16, |
|
"train_batch_size": 1, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|