|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 1.0, |
|
"eval_steps": 0, |
|
"global_step": 239, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.0041841004184100415, |
|
"grad_norm": 0.6393154263496399, |
|
"learning_rate": 9.958158995815901e-06, |
|
"loss": 1.9217, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.008368200836820083, |
|
"grad_norm": 0.63503098487854, |
|
"learning_rate": 9.9163179916318e-06, |
|
"loss": 1.9545, |
|
"step": 2 |
|
}, |
|
{ |
|
"epoch": 0.012552301255230125, |
|
"grad_norm": 0.6524788737297058, |
|
"learning_rate": 9.874476987447699e-06, |
|
"loss": 2.0203, |
|
"step": 3 |
|
}, |
|
{ |
|
"epoch": 0.016736401673640166, |
|
"grad_norm": 0.6311175227165222, |
|
"learning_rate": 9.8326359832636e-06, |
|
"loss": 1.9856, |
|
"step": 4 |
|
}, |
|
{ |
|
"epoch": 0.02092050209205021, |
|
"grad_norm": 0.5862050652503967, |
|
"learning_rate": 9.790794979079498e-06, |
|
"loss": 1.921, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.02510460251046025, |
|
"grad_norm": 0.5720086693763733, |
|
"learning_rate": 9.748953974895399e-06, |
|
"loss": 1.8198, |
|
"step": 6 |
|
}, |
|
{ |
|
"epoch": 0.029288702928870293, |
|
"grad_norm": 0.6093158721923828, |
|
"learning_rate": 9.707112970711298e-06, |
|
"loss": 1.9426, |
|
"step": 7 |
|
}, |
|
{ |
|
"epoch": 0.03347280334728033, |
|
"grad_norm": 0.5525884628295898, |
|
"learning_rate": 9.665271966527198e-06, |
|
"loss": 1.8915, |
|
"step": 8 |
|
}, |
|
{ |
|
"epoch": 0.03765690376569038, |
|
"grad_norm": 0.5531294941902161, |
|
"learning_rate": 9.623430962343097e-06, |
|
"loss": 1.926, |
|
"step": 9 |
|
}, |
|
{ |
|
"epoch": 0.04184100418410042, |
|
"grad_norm": 0.5190953612327576, |
|
"learning_rate": 9.581589958158996e-06, |
|
"loss": 1.8451, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.04602510460251046, |
|
"grad_norm": 0.510351300239563, |
|
"learning_rate": 9.539748953974896e-06, |
|
"loss": 1.8092, |
|
"step": 11 |
|
}, |
|
{ |
|
"epoch": 0.0502092050209205, |
|
"grad_norm": 0.4969535171985626, |
|
"learning_rate": 9.497907949790795e-06, |
|
"loss": 1.7935, |
|
"step": 12 |
|
}, |
|
{ |
|
"epoch": 0.05439330543933055, |
|
"grad_norm": 0.5025241374969482, |
|
"learning_rate": 9.456066945606696e-06, |
|
"loss": 1.9057, |
|
"step": 13 |
|
}, |
|
{ |
|
"epoch": 0.058577405857740586, |
|
"grad_norm": 0.5688336491584778, |
|
"learning_rate": 9.414225941422594e-06, |
|
"loss": 1.8393, |
|
"step": 14 |
|
}, |
|
{ |
|
"epoch": 0.06276150627615062, |
|
"grad_norm": 0.47667714953422546, |
|
"learning_rate": 9.372384937238495e-06, |
|
"loss": 1.8035, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.06694560669456066, |
|
"grad_norm": 0.5087830424308777, |
|
"learning_rate": 9.330543933054394e-06, |
|
"loss": 1.8427, |
|
"step": 16 |
|
}, |
|
{ |
|
"epoch": 0.07112970711297072, |
|
"grad_norm": 0.4639821946620941, |
|
"learning_rate": 9.288702928870293e-06, |
|
"loss": 1.7071, |
|
"step": 17 |
|
}, |
|
{ |
|
"epoch": 0.07531380753138076, |
|
"grad_norm": 0.4799270033836365, |
|
"learning_rate": 9.246861924686193e-06, |
|
"loss": 1.782, |
|
"step": 18 |
|
}, |
|
{ |
|
"epoch": 0.0794979079497908, |
|
"grad_norm": 0.4342242181301117, |
|
"learning_rate": 9.205020920502092e-06, |
|
"loss": 1.7014, |
|
"step": 19 |
|
}, |
|
{ |
|
"epoch": 0.08368200836820083, |
|
"grad_norm": 0.44706302881240845, |
|
"learning_rate": 9.163179916317992e-06, |
|
"loss": 1.7027, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.08786610878661087, |
|
"grad_norm": 0.4259682893753052, |
|
"learning_rate": 9.121338912133893e-06, |
|
"loss": 1.745, |
|
"step": 21 |
|
}, |
|
{ |
|
"epoch": 0.09205020920502092, |
|
"grad_norm": 0.40614959597587585, |
|
"learning_rate": 9.079497907949792e-06, |
|
"loss": 1.6348, |
|
"step": 22 |
|
}, |
|
{ |
|
"epoch": 0.09623430962343096, |
|
"grad_norm": 0.41480347514152527, |
|
"learning_rate": 9.03765690376569e-06, |
|
"loss": 1.6336, |
|
"step": 23 |
|
}, |
|
{ |
|
"epoch": 0.100418410041841, |
|
"grad_norm": 0.40425121784210205, |
|
"learning_rate": 8.995815899581591e-06, |
|
"loss": 1.6534, |
|
"step": 24 |
|
}, |
|
{ |
|
"epoch": 0.10460251046025104, |
|
"grad_norm": 0.40777212381362915, |
|
"learning_rate": 8.95397489539749e-06, |
|
"loss": 1.7132, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.1087866108786611, |
|
"grad_norm": 0.41130122542381287, |
|
"learning_rate": 8.91213389121339e-06, |
|
"loss": 1.6169, |
|
"step": 26 |
|
}, |
|
{ |
|
"epoch": 0.11297071129707113, |
|
"grad_norm": 0.47633782029151917, |
|
"learning_rate": 8.87029288702929e-06, |
|
"loss": 1.6586, |
|
"step": 27 |
|
}, |
|
{ |
|
"epoch": 0.11715481171548117, |
|
"grad_norm": 0.43167147040367126, |
|
"learning_rate": 8.82845188284519e-06, |
|
"loss": 1.6174, |
|
"step": 28 |
|
}, |
|
{ |
|
"epoch": 0.12133891213389121, |
|
"grad_norm": 0.4019918739795685, |
|
"learning_rate": 8.786610878661089e-06, |
|
"loss": 1.6277, |
|
"step": 29 |
|
}, |
|
{ |
|
"epoch": 0.12552301255230125, |
|
"grad_norm": 0.3712836802005768, |
|
"learning_rate": 8.744769874476987e-06, |
|
"loss": 1.6287, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.1297071129707113, |
|
"grad_norm": 0.4354589283466339, |
|
"learning_rate": 8.702928870292888e-06, |
|
"loss": 1.6389, |
|
"step": 31 |
|
}, |
|
{ |
|
"epoch": 0.13389121338912133, |
|
"grad_norm": 0.386348694562912, |
|
"learning_rate": 8.661087866108787e-06, |
|
"loss": 1.6217, |
|
"step": 32 |
|
}, |
|
{ |
|
"epoch": 0.13807531380753138, |
|
"grad_norm": 0.3922019600868225, |
|
"learning_rate": 8.619246861924687e-06, |
|
"loss": 1.6063, |
|
"step": 33 |
|
}, |
|
{ |
|
"epoch": 0.14225941422594143, |
|
"grad_norm": 0.34336555004119873, |
|
"learning_rate": 8.577405857740586e-06, |
|
"loss": 1.5818, |
|
"step": 34 |
|
}, |
|
{ |
|
"epoch": 0.14644351464435146, |
|
"grad_norm": 0.39002758264541626, |
|
"learning_rate": 8.535564853556487e-06, |
|
"loss": 1.6544, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.1506276150627615, |
|
"grad_norm": 0.32121986150741577, |
|
"learning_rate": 8.493723849372385e-06, |
|
"loss": 1.5688, |
|
"step": 36 |
|
}, |
|
{ |
|
"epoch": 0.15481171548117154, |
|
"grad_norm": 0.33398061990737915, |
|
"learning_rate": 8.451882845188284e-06, |
|
"loss": 1.5353, |
|
"step": 37 |
|
}, |
|
{ |
|
"epoch": 0.1589958158995816, |
|
"grad_norm": 0.32710814476013184, |
|
"learning_rate": 8.410041841004185e-06, |
|
"loss": 1.5489, |
|
"step": 38 |
|
}, |
|
{ |
|
"epoch": 0.16317991631799164, |
|
"grad_norm": 0.3649294674396515, |
|
"learning_rate": 8.368200836820084e-06, |
|
"loss": 1.5735, |
|
"step": 39 |
|
}, |
|
{ |
|
"epoch": 0.16736401673640167, |
|
"grad_norm": 0.3286899924278259, |
|
"learning_rate": 8.326359832635984e-06, |
|
"loss": 1.5176, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.17154811715481172, |
|
"grad_norm": 0.4433610439300537, |
|
"learning_rate": 8.284518828451885e-06, |
|
"loss": 1.4534, |
|
"step": 41 |
|
}, |
|
{ |
|
"epoch": 0.17573221757322174, |
|
"grad_norm": 0.2977966368198395, |
|
"learning_rate": 8.242677824267783e-06, |
|
"loss": 1.4857, |
|
"step": 42 |
|
}, |
|
{ |
|
"epoch": 0.1799163179916318, |
|
"grad_norm": 0.356638103723526, |
|
"learning_rate": 8.200836820083682e-06, |
|
"loss": 1.5164, |
|
"step": 43 |
|
}, |
|
{ |
|
"epoch": 0.18410041841004185, |
|
"grad_norm": 0.3443019390106201, |
|
"learning_rate": 8.158995815899581e-06, |
|
"loss": 1.6193, |
|
"step": 44 |
|
}, |
|
{ |
|
"epoch": 0.18828451882845187, |
|
"grad_norm": 0.35785749554634094, |
|
"learning_rate": 8.117154811715482e-06, |
|
"loss": 1.5293, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.19246861924686193, |
|
"grad_norm": 0.3255775272846222, |
|
"learning_rate": 8.075313807531382e-06, |
|
"loss": 1.5232, |
|
"step": 46 |
|
}, |
|
{ |
|
"epoch": 0.19665271966527198, |
|
"grad_norm": 0.3624202311038971, |
|
"learning_rate": 8.033472803347281e-06, |
|
"loss": 1.4129, |
|
"step": 47 |
|
}, |
|
{ |
|
"epoch": 0.200836820083682, |
|
"grad_norm": 0.3205145001411438, |
|
"learning_rate": 7.991631799163181e-06, |
|
"loss": 1.4812, |
|
"step": 48 |
|
}, |
|
{ |
|
"epoch": 0.20502092050209206, |
|
"grad_norm": 0.43222349882125854, |
|
"learning_rate": 7.94979079497908e-06, |
|
"loss": 1.5202, |
|
"step": 49 |
|
}, |
|
{ |
|
"epoch": 0.20920502092050208, |
|
"grad_norm": 0.31052061915397644, |
|
"learning_rate": 7.907949790794979e-06, |
|
"loss": 1.5129, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.21338912133891214, |
|
"grad_norm": 0.33750268816947937, |
|
"learning_rate": 7.86610878661088e-06, |
|
"loss": 1.4717, |
|
"step": 51 |
|
}, |
|
{ |
|
"epoch": 0.2175732217573222, |
|
"grad_norm": 0.3067304790019989, |
|
"learning_rate": 7.824267782426778e-06, |
|
"loss": 1.4624, |
|
"step": 52 |
|
}, |
|
{ |
|
"epoch": 0.2217573221757322, |
|
"grad_norm": 0.3119943141937256, |
|
"learning_rate": 7.782426778242679e-06, |
|
"loss": 1.426, |
|
"step": 53 |
|
}, |
|
{ |
|
"epoch": 0.22594142259414227, |
|
"grad_norm": 0.32612940669059753, |
|
"learning_rate": 7.740585774058578e-06, |
|
"loss": 1.4734, |
|
"step": 54 |
|
}, |
|
{ |
|
"epoch": 0.2301255230125523, |
|
"grad_norm": 0.3576732277870178, |
|
"learning_rate": 7.698744769874478e-06, |
|
"loss": 1.4905, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.23430962343096234, |
|
"grad_norm": 0.30397745966911316, |
|
"learning_rate": 7.656903765690377e-06, |
|
"loss": 1.4504, |
|
"step": 56 |
|
}, |
|
{ |
|
"epoch": 0.2384937238493724, |
|
"grad_norm": 0.2985887825489044, |
|
"learning_rate": 7.615062761506277e-06, |
|
"loss": 1.4289, |
|
"step": 57 |
|
}, |
|
{ |
|
"epoch": 0.24267782426778242, |
|
"grad_norm": 0.3366187512874603, |
|
"learning_rate": 7.573221757322176e-06, |
|
"loss": 1.4332, |
|
"step": 58 |
|
}, |
|
{ |
|
"epoch": 0.24686192468619247, |
|
"grad_norm": 0.3567534387111664, |
|
"learning_rate": 7.531380753138075e-06, |
|
"loss": 1.3826, |
|
"step": 59 |
|
}, |
|
{ |
|
"epoch": 0.2510460251046025, |
|
"grad_norm": 0.3008073568344116, |
|
"learning_rate": 7.489539748953976e-06, |
|
"loss": 1.4754, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.25523012552301255, |
|
"grad_norm": 0.29970571398735046, |
|
"learning_rate": 7.4476987447698746e-06, |
|
"loss": 1.459, |
|
"step": 61 |
|
}, |
|
{ |
|
"epoch": 0.2594142259414226, |
|
"grad_norm": 0.4011840522289276, |
|
"learning_rate": 7.405857740585774e-06, |
|
"loss": 1.4442, |
|
"step": 62 |
|
}, |
|
{ |
|
"epoch": 0.26359832635983266, |
|
"grad_norm": 0.3857061564922333, |
|
"learning_rate": 7.364016736401675e-06, |
|
"loss": 1.4669, |
|
"step": 63 |
|
}, |
|
{ |
|
"epoch": 0.26778242677824265, |
|
"grad_norm": 0.34745919704437256, |
|
"learning_rate": 7.3221757322175736e-06, |
|
"loss": 1.386, |
|
"step": 64 |
|
}, |
|
{ |
|
"epoch": 0.2719665271966527, |
|
"grad_norm": 0.37241384387016296, |
|
"learning_rate": 7.280334728033473e-06, |
|
"loss": 1.4307, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.27615062761506276, |
|
"grad_norm": 0.3590523898601532, |
|
"learning_rate": 7.238493723849372e-06, |
|
"loss": 1.4181, |
|
"step": 66 |
|
}, |
|
{ |
|
"epoch": 0.2803347280334728, |
|
"grad_norm": 0.3953969180583954, |
|
"learning_rate": 7.1966527196652726e-06, |
|
"loss": 1.3858, |
|
"step": 67 |
|
}, |
|
{ |
|
"epoch": 0.28451882845188287, |
|
"grad_norm": 0.43755462765693665, |
|
"learning_rate": 7.154811715481172e-06, |
|
"loss": 1.4155, |
|
"step": 68 |
|
}, |
|
{ |
|
"epoch": 0.28870292887029286, |
|
"grad_norm": 0.355756938457489, |
|
"learning_rate": 7.112970711297071e-06, |
|
"loss": 1.3971, |
|
"step": 69 |
|
}, |
|
{ |
|
"epoch": 0.2928870292887029, |
|
"grad_norm": 0.4370933175086975, |
|
"learning_rate": 7.0711297071129716e-06, |
|
"loss": 1.3027, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.29707112970711297, |
|
"grad_norm": 0.31537461280822754, |
|
"learning_rate": 7.02928870292887e-06, |
|
"loss": 1.4147, |
|
"step": 71 |
|
}, |
|
{ |
|
"epoch": 0.301255230125523, |
|
"grad_norm": 0.32908475399017334, |
|
"learning_rate": 6.98744769874477e-06, |
|
"loss": 1.4637, |
|
"step": 72 |
|
}, |
|
{ |
|
"epoch": 0.3054393305439331, |
|
"grad_norm": 0.2952563464641571, |
|
"learning_rate": 6.9456066945606706e-06, |
|
"loss": 1.3477, |
|
"step": 73 |
|
}, |
|
{ |
|
"epoch": 0.30962343096234307, |
|
"grad_norm": 0.29872894287109375, |
|
"learning_rate": 6.903765690376569e-06, |
|
"loss": 1.4093, |
|
"step": 74 |
|
}, |
|
{ |
|
"epoch": 0.3138075313807531, |
|
"grad_norm": 0.3141920566558838, |
|
"learning_rate": 6.861924686192469e-06, |
|
"loss": 1.3911, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.3179916317991632, |
|
"grad_norm": 0.3398382067680359, |
|
"learning_rate": 6.820083682008368e-06, |
|
"loss": 1.437, |
|
"step": 76 |
|
}, |
|
{ |
|
"epoch": 0.32217573221757323, |
|
"grad_norm": 0.40376701951026917, |
|
"learning_rate": 6.778242677824268e-06, |
|
"loss": 1.3147, |
|
"step": 77 |
|
}, |
|
{ |
|
"epoch": 0.3263598326359833, |
|
"grad_norm": 0.30617496371269226, |
|
"learning_rate": 6.736401673640168e-06, |
|
"loss": 1.3865, |
|
"step": 78 |
|
}, |
|
{ |
|
"epoch": 0.3305439330543933, |
|
"grad_norm": 0.338013619184494, |
|
"learning_rate": 6.694560669456067e-06, |
|
"loss": 1.3723, |
|
"step": 79 |
|
}, |
|
{ |
|
"epoch": 0.33472803347280333, |
|
"grad_norm": 0.4524187445640564, |
|
"learning_rate": 6.652719665271967e-06, |
|
"loss": 1.3681, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.3389121338912134, |
|
"grad_norm": 0.33039766550064087, |
|
"learning_rate": 6.610878661087866e-06, |
|
"loss": 1.4012, |
|
"step": 81 |
|
}, |
|
{ |
|
"epoch": 0.34309623430962344, |
|
"grad_norm": 0.434511661529541, |
|
"learning_rate": 6.569037656903766e-06, |
|
"loss": 1.4246, |
|
"step": 82 |
|
}, |
|
{ |
|
"epoch": 0.3472803347280335, |
|
"grad_norm": 0.3561013340950012, |
|
"learning_rate": 6.527196652719666e-06, |
|
"loss": 1.3597, |
|
"step": 83 |
|
}, |
|
{ |
|
"epoch": 0.3514644351464435, |
|
"grad_norm": 0.4421449899673462, |
|
"learning_rate": 6.485355648535565e-06, |
|
"loss": 1.4445, |
|
"step": 84 |
|
}, |
|
{ |
|
"epoch": 0.35564853556485354, |
|
"grad_norm": 0.332782119512558, |
|
"learning_rate": 6.443514644351465e-06, |
|
"loss": 1.4093, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.3598326359832636, |
|
"grad_norm": 0.35183823108673096, |
|
"learning_rate": 6.401673640167364e-06, |
|
"loss": 1.3283, |
|
"step": 86 |
|
}, |
|
{ |
|
"epoch": 0.36401673640167365, |
|
"grad_norm": 0.33525529503822327, |
|
"learning_rate": 6.359832635983264e-06, |
|
"loss": 1.4419, |
|
"step": 87 |
|
}, |
|
{ |
|
"epoch": 0.3682008368200837, |
|
"grad_norm": 0.33096978068351746, |
|
"learning_rate": 6.317991631799164e-06, |
|
"loss": 1.3691, |
|
"step": 88 |
|
}, |
|
{ |
|
"epoch": 0.3723849372384937, |
|
"grad_norm": 0.3147584795951843, |
|
"learning_rate": 6.276150627615063e-06, |
|
"loss": 1.4014, |
|
"step": 89 |
|
}, |
|
{ |
|
"epoch": 0.37656903765690375, |
|
"grad_norm": 0.3018977642059326, |
|
"learning_rate": 6.234309623430963e-06, |
|
"loss": 1.3661, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.3807531380753138, |
|
"grad_norm": 0.31138119101524353, |
|
"learning_rate": 6.192468619246862e-06, |
|
"loss": 1.3163, |
|
"step": 91 |
|
}, |
|
{ |
|
"epoch": 0.38493723849372385, |
|
"grad_norm": 0.35807517170906067, |
|
"learning_rate": 6.150627615062762e-06, |
|
"loss": 1.3546, |
|
"step": 92 |
|
}, |
|
{ |
|
"epoch": 0.3891213389121339, |
|
"grad_norm": 0.3687175512313843, |
|
"learning_rate": 6.108786610878662e-06, |
|
"loss": 1.3204, |
|
"step": 93 |
|
}, |
|
{ |
|
"epoch": 0.39330543933054396, |
|
"grad_norm": 0.3205777704715729, |
|
"learning_rate": 6.066945606694561e-06, |
|
"loss": 1.3651, |
|
"step": 94 |
|
}, |
|
{ |
|
"epoch": 0.39748953974895396, |
|
"grad_norm": 0.3424644470214844, |
|
"learning_rate": 6.025104602510461e-06, |
|
"loss": 1.3837, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.401673640167364, |
|
"grad_norm": 0.334182471036911, |
|
"learning_rate": 5.9832635983263595e-06, |
|
"loss": 1.3737, |
|
"step": 96 |
|
}, |
|
{ |
|
"epoch": 0.40585774058577406, |
|
"grad_norm": 0.32831668853759766, |
|
"learning_rate": 5.94142259414226e-06, |
|
"loss": 1.3455, |
|
"step": 97 |
|
}, |
|
{ |
|
"epoch": 0.4100418410041841, |
|
"grad_norm": 0.2965930700302124, |
|
"learning_rate": 5.89958158995816e-06, |
|
"loss": 1.3143, |
|
"step": 98 |
|
}, |
|
{ |
|
"epoch": 0.41422594142259417, |
|
"grad_norm": 0.38945430517196655, |
|
"learning_rate": 5.8577405857740585e-06, |
|
"loss": 1.3119, |
|
"step": 99 |
|
}, |
|
{ |
|
"epoch": 0.41841004184100417, |
|
"grad_norm": 0.3289240896701813, |
|
"learning_rate": 5.815899581589959e-06, |
|
"loss": 1.3754, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.4225941422594142, |
|
"grad_norm": 0.3681870996952057, |
|
"learning_rate": 5.774058577405858e-06, |
|
"loss": 1.3732, |
|
"step": 101 |
|
}, |
|
{ |
|
"epoch": 0.42677824267782427, |
|
"grad_norm": 0.36226436495780945, |
|
"learning_rate": 5.7322175732217575e-06, |
|
"loss": 1.3542, |
|
"step": 102 |
|
}, |
|
{ |
|
"epoch": 0.4309623430962343, |
|
"grad_norm": 0.5703559517860413, |
|
"learning_rate": 5.690376569037658e-06, |
|
"loss": 1.4442, |
|
"step": 103 |
|
}, |
|
{ |
|
"epoch": 0.4351464435146444, |
|
"grad_norm": 0.3629617393016815, |
|
"learning_rate": 5.648535564853557e-06, |
|
"loss": 1.3407, |
|
"step": 104 |
|
}, |
|
{ |
|
"epoch": 0.4393305439330544, |
|
"grad_norm": 0.31508779525756836, |
|
"learning_rate": 5.6066945606694565e-06, |
|
"loss": 1.369, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 0.4435146443514644, |
|
"grad_norm": 0.4534814655780792, |
|
"learning_rate": 5.564853556485355e-06, |
|
"loss": 1.3886, |
|
"step": 106 |
|
}, |
|
{ |
|
"epoch": 0.4476987447698745, |
|
"grad_norm": 0.3670834004878998, |
|
"learning_rate": 5.523012552301256e-06, |
|
"loss": 1.3609, |
|
"step": 107 |
|
}, |
|
{ |
|
"epoch": 0.45188284518828453, |
|
"grad_norm": 0.3328559994697571, |
|
"learning_rate": 5.4811715481171555e-06, |
|
"loss": 1.2958, |
|
"step": 108 |
|
}, |
|
{ |
|
"epoch": 0.4560669456066946, |
|
"grad_norm": 0.30234208703041077, |
|
"learning_rate": 5.439330543933054e-06, |
|
"loss": 1.3321, |
|
"step": 109 |
|
}, |
|
{ |
|
"epoch": 0.4602510460251046, |
|
"grad_norm": 0.3368891179561615, |
|
"learning_rate": 5.397489539748955e-06, |
|
"loss": 1.4353, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.46443514644351463, |
|
"grad_norm": 0.3192000389099121, |
|
"learning_rate": 5.355648535564854e-06, |
|
"loss": 1.3775, |
|
"step": 111 |
|
}, |
|
{ |
|
"epoch": 0.4686192468619247, |
|
"grad_norm": 0.4967532753944397, |
|
"learning_rate": 5.313807531380753e-06, |
|
"loss": 1.3357, |
|
"step": 112 |
|
}, |
|
{ |
|
"epoch": 0.47280334728033474, |
|
"grad_norm": 0.3520500063896179, |
|
"learning_rate": 5.271966527196654e-06, |
|
"loss": 1.3523, |
|
"step": 113 |
|
}, |
|
{ |
|
"epoch": 0.4769874476987448, |
|
"grad_norm": 0.3255818486213684, |
|
"learning_rate": 5.230125523012553e-06, |
|
"loss": 1.3396, |
|
"step": 114 |
|
}, |
|
{ |
|
"epoch": 0.4811715481171548, |
|
"grad_norm": 0.34796151518821716, |
|
"learning_rate": 5.188284518828452e-06, |
|
"loss": 1.3115, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 0.48535564853556484, |
|
"grad_norm": 0.3277406692504883, |
|
"learning_rate": 5.146443514644351e-06, |
|
"loss": 1.3258, |
|
"step": 116 |
|
}, |
|
{ |
|
"epoch": 0.4895397489539749, |
|
"grad_norm": 0.33442819118499756, |
|
"learning_rate": 5.104602510460252e-06, |
|
"loss": 1.348, |
|
"step": 117 |
|
}, |
|
{ |
|
"epoch": 0.49372384937238495, |
|
"grad_norm": 0.32950884103775024, |
|
"learning_rate": 5.062761506276151e-06, |
|
"loss": 1.3909, |
|
"step": 118 |
|
}, |
|
{ |
|
"epoch": 0.497907949790795, |
|
"grad_norm": 0.33458420634269714, |
|
"learning_rate": 5.02092050209205e-06, |
|
"loss": 1.2859, |
|
"step": 119 |
|
}, |
|
{ |
|
"epoch": 0.502092050209205, |
|
"grad_norm": 0.396798312664032, |
|
"learning_rate": 4.979079497907951e-06, |
|
"loss": 1.3061, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.5062761506276151, |
|
"grad_norm": 0.3303851783275604, |
|
"learning_rate": 4.9372384937238495e-06, |
|
"loss": 1.3011, |
|
"step": 121 |
|
}, |
|
{ |
|
"epoch": 0.5104602510460251, |
|
"grad_norm": 0.3201311528682709, |
|
"learning_rate": 4.895397489539749e-06, |
|
"loss": 1.274, |
|
"step": 122 |
|
}, |
|
{ |
|
"epoch": 0.5146443514644351, |
|
"grad_norm": 0.48614901304244995, |
|
"learning_rate": 4.853556485355649e-06, |
|
"loss": 1.3494, |
|
"step": 123 |
|
}, |
|
{ |
|
"epoch": 0.5188284518828452, |
|
"grad_norm": 0.31757262349128723, |
|
"learning_rate": 4.8117154811715485e-06, |
|
"loss": 1.3184, |
|
"step": 124 |
|
}, |
|
{ |
|
"epoch": 0.5230125523012552, |
|
"grad_norm": 0.329109251499176, |
|
"learning_rate": 4.769874476987448e-06, |
|
"loss": 1.3895, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 0.5271966527196653, |
|
"grad_norm": 0.5232523679733276, |
|
"learning_rate": 4.728033472803348e-06, |
|
"loss": 1.3904, |
|
"step": 126 |
|
}, |
|
{ |
|
"epoch": 0.5313807531380753, |
|
"grad_norm": 0.332783043384552, |
|
"learning_rate": 4.6861924686192475e-06, |
|
"loss": 1.3277, |
|
"step": 127 |
|
}, |
|
{ |
|
"epoch": 0.5355648535564853, |
|
"grad_norm": 0.31772381067276, |
|
"learning_rate": 4.644351464435146e-06, |
|
"loss": 1.3137, |
|
"step": 128 |
|
}, |
|
{ |
|
"epoch": 0.5397489539748954, |
|
"grad_norm": 0.4796607792377472, |
|
"learning_rate": 4.602510460251046e-06, |
|
"loss": 1.3967, |
|
"step": 129 |
|
}, |
|
{ |
|
"epoch": 0.5439330543933054, |
|
"grad_norm": 0.4687912166118622, |
|
"learning_rate": 4.5606694560669465e-06, |
|
"loss": 1.3456, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.5481171548117155, |
|
"grad_norm": 0.33422547578811646, |
|
"learning_rate": 4.518828451882845e-06, |
|
"loss": 1.2768, |
|
"step": 131 |
|
}, |
|
{ |
|
"epoch": 0.5523012552301255, |
|
"grad_norm": 0.3974786698818207, |
|
"learning_rate": 4.476987447698745e-06, |
|
"loss": 1.2981, |
|
"step": 132 |
|
}, |
|
{ |
|
"epoch": 0.5564853556485355, |
|
"grad_norm": 0.34788477420806885, |
|
"learning_rate": 4.435146443514645e-06, |
|
"loss": 1.2914, |
|
"step": 133 |
|
}, |
|
{ |
|
"epoch": 0.5606694560669456, |
|
"grad_norm": 0.3345721364021301, |
|
"learning_rate": 4.393305439330544e-06, |
|
"loss": 1.3216, |
|
"step": 134 |
|
}, |
|
{ |
|
"epoch": 0.5648535564853556, |
|
"grad_norm": 0.3529014587402344, |
|
"learning_rate": 4.351464435146444e-06, |
|
"loss": 1.2673, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 0.5690376569037657, |
|
"grad_norm": 0.32846778631210327, |
|
"learning_rate": 4.309623430962344e-06, |
|
"loss": 1.3022, |
|
"step": 136 |
|
}, |
|
{ |
|
"epoch": 0.5732217573221757, |
|
"grad_norm": 0.5123084187507629, |
|
"learning_rate": 4.267782426778243e-06, |
|
"loss": 1.3141, |
|
"step": 137 |
|
}, |
|
{ |
|
"epoch": 0.5774058577405857, |
|
"grad_norm": 0.3312026560306549, |
|
"learning_rate": 4.225941422594142e-06, |
|
"loss": 1.2946, |
|
"step": 138 |
|
}, |
|
{ |
|
"epoch": 0.5815899581589958, |
|
"grad_norm": 0.4771478772163391, |
|
"learning_rate": 4.184100418410042e-06, |
|
"loss": 1.2487, |
|
"step": 139 |
|
}, |
|
{ |
|
"epoch": 0.5857740585774058, |
|
"grad_norm": 0.38952118158340454, |
|
"learning_rate": 4.142259414225942e-06, |
|
"loss": 1.2993, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.5899581589958159, |
|
"grad_norm": 0.43234747648239136, |
|
"learning_rate": 4.100418410041841e-06, |
|
"loss": 1.2749, |
|
"step": 141 |
|
}, |
|
{ |
|
"epoch": 0.5941422594142259, |
|
"grad_norm": 0.5283378958702087, |
|
"learning_rate": 4.058577405857741e-06, |
|
"loss": 1.3033, |
|
"step": 142 |
|
}, |
|
{ |
|
"epoch": 0.5983263598326359, |
|
"grad_norm": 0.3307763934135437, |
|
"learning_rate": 4.0167364016736405e-06, |
|
"loss": 1.289, |
|
"step": 143 |
|
}, |
|
{ |
|
"epoch": 0.602510460251046, |
|
"grad_norm": 0.3551783263683319, |
|
"learning_rate": 3.97489539748954e-06, |
|
"loss": 1.3522, |
|
"step": 144 |
|
}, |
|
{ |
|
"epoch": 0.606694560669456, |
|
"grad_norm": 0.35077103972435, |
|
"learning_rate": 3.93305439330544e-06, |
|
"loss": 1.3701, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 0.6108786610878661, |
|
"grad_norm": 0.32242652773857117, |
|
"learning_rate": 3.8912133891213395e-06, |
|
"loss": 1.3106, |
|
"step": 146 |
|
}, |
|
{ |
|
"epoch": 0.6150627615062761, |
|
"grad_norm": 0.33282139897346497, |
|
"learning_rate": 3.849372384937239e-06, |
|
"loss": 1.2679, |
|
"step": 147 |
|
}, |
|
{ |
|
"epoch": 0.6192468619246861, |
|
"grad_norm": 0.35546955466270447, |
|
"learning_rate": 3.8075313807531384e-06, |
|
"loss": 1.256, |
|
"step": 148 |
|
}, |
|
{ |
|
"epoch": 0.6234309623430963, |
|
"grad_norm": 0.3453517556190491, |
|
"learning_rate": 3.7656903765690376e-06, |
|
"loss": 1.32, |
|
"step": 149 |
|
}, |
|
{ |
|
"epoch": 0.6276150627615062, |
|
"grad_norm": 0.36277034878730774, |
|
"learning_rate": 3.7238493723849373e-06, |
|
"loss": 1.3104, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.6317991631799164, |
|
"grad_norm": 0.3291323482990265, |
|
"learning_rate": 3.6820083682008374e-06, |
|
"loss": 1.2807, |
|
"step": 151 |
|
}, |
|
{ |
|
"epoch": 0.6359832635983264, |
|
"grad_norm": 0.5009307265281677, |
|
"learning_rate": 3.6401673640167366e-06, |
|
"loss": 1.2196, |
|
"step": 152 |
|
}, |
|
{ |
|
"epoch": 0.6401673640167364, |
|
"grad_norm": 0.3647055923938751, |
|
"learning_rate": 3.5983263598326363e-06, |
|
"loss": 1.2719, |
|
"step": 153 |
|
}, |
|
{ |
|
"epoch": 0.6443514644351465, |
|
"grad_norm": 0.3235703110694885, |
|
"learning_rate": 3.5564853556485355e-06, |
|
"loss": 1.2871, |
|
"step": 154 |
|
}, |
|
{ |
|
"epoch": 0.6485355648535565, |
|
"grad_norm": 0.4133061170578003, |
|
"learning_rate": 3.514644351464435e-06, |
|
"loss": 1.3943, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 0.6527196652719666, |
|
"grad_norm": 0.3607695400714874, |
|
"learning_rate": 3.4728033472803353e-06, |
|
"loss": 1.3791, |
|
"step": 156 |
|
}, |
|
{ |
|
"epoch": 0.6569037656903766, |
|
"grad_norm": 0.5460565686225891, |
|
"learning_rate": 3.4309623430962345e-06, |
|
"loss": 1.2695, |
|
"step": 157 |
|
}, |
|
{ |
|
"epoch": 0.6610878661087866, |
|
"grad_norm": 0.3771589994430542, |
|
"learning_rate": 3.389121338912134e-06, |
|
"loss": 1.2643, |
|
"step": 158 |
|
}, |
|
{ |
|
"epoch": 0.6652719665271967, |
|
"grad_norm": 0.3366248309612274, |
|
"learning_rate": 3.3472803347280334e-06, |
|
"loss": 1.2972, |
|
"step": 159 |
|
}, |
|
{ |
|
"epoch": 0.6694560669456067, |
|
"grad_norm": 0.3988908529281616, |
|
"learning_rate": 3.305439330543933e-06, |
|
"loss": 1.2929, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.6736401673640168, |
|
"grad_norm": 0.339682400226593, |
|
"learning_rate": 3.263598326359833e-06, |
|
"loss": 1.3184, |
|
"step": 161 |
|
}, |
|
{ |
|
"epoch": 0.6778242677824268, |
|
"grad_norm": 0.3286002576351166, |
|
"learning_rate": 3.2217573221757324e-06, |
|
"loss": 1.2801, |
|
"step": 162 |
|
}, |
|
{ |
|
"epoch": 0.6820083682008368, |
|
"grad_norm": 0.33581218123435974, |
|
"learning_rate": 3.179916317991632e-06, |
|
"loss": 1.3073, |
|
"step": 163 |
|
}, |
|
{ |
|
"epoch": 0.6861924686192469, |
|
"grad_norm": 0.3373326361179352, |
|
"learning_rate": 3.1380753138075313e-06, |
|
"loss": 1.2853, |
|
"step": 164 |
|
}, |
|
{ |
|
"epoch": 0.6903765690376569, |
|
"grad_norm": 0.3551255464553833, |
|
"learning_rate": 3.096234309623431e-06, |
|
"loss": 1.2795, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 0.694560669456067, |
|
"grad_norm": 0.33370694518089294, |
|
"learning_rate": 3.054393305439331e-06, |
|
"loss": 1.3062, |
|
"step": 166 |
|
}, |
|
{ |
|
"epoch": 0.698744769874477, |
|
"grad_norm": 0.3263309895992279, |
|
"learning_rate": 3.0125523012552303e-06, |
|
"loss": 1.3348, |
|
"step": 167 |
|
}, |
|
{ |
|
"epoch": 0.702928870292887, |
|
"grad_norm": 0.3350699245929718, |
|
"learning_rate": 2.97071129707113e-06, |
|
"loss": 1.3154, |
|
"step": 168 |
|
}, |
|
{ |
|
"epoch": 0.7071129707112971, |
|
"grad_norm": 0.33460667729377747, |
|
"learning_rate": 2.9288702928870293e-06, |
|
"loss": 1.2403, |
|
"step": 169 |
|
}, |
|
{ |
|
"epoch": 0.7112970711297071, |
|
"grad_norm": 0.42305517196655273, |
|
"learning_rate": 2.887029288702929e-06, |
|
"loss": 1.2679, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.7154811715481172, |
|
"grad_norm": 0.5009521245956421, |
|
"learning_rate": 2.845188284518829e-06, |
|
"loss": 1.2979, |
|
"step": 171 |
|
}, |
|
{ |
|
"epoch": 0.7196652719665272, |
|
"grad_norm": 0.35044097900390625, |
|
"learning_rate": 2.8033472803347283e-06, |
|
"loss": 1.2417, |
|
"step": 172 |
|
}, |
|
{ |
|
"epoch": 0.7238493723849372, |
|
"grad_norm": 0.3329603672027588, |
|
"learning_rate": 2.761506276150628e-06, |
|
"loss": 1.2727, |
|
"step": 173 |
|
}, |
|
{ |
|
"epoch": 0.7280334728033473, |
|
"grad_norm": 0.553231954574585, |
|
"learning_rate": 2.719665271966527e-06, |
|
"loss": 1.2906, |
|
"step": 174 |
|
}, |
|
{ |
|
"epoch": 0.7322175732217573, |
|
"grad_norm": 0.3310469686985016, |
|
"learning_rate": 2.677824267782427e-06, |
|
"loss": 1.2466, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 0.7364016736401674, |
|
"grad_norm": 0.3337252736091614, |
|
"learning_rate": 2.635983263598327e-06, |
|
"loss": 1.2753, |
|
"step": 176 |
|
}, |
|
{ |
|
"epoch": 0.7405857740585774, |
|
"grad_norm": 0.5241883993148804, |
|
"learning_rate": 2.594142259414226e-06, |
|
"loss": 1.3364, |
|
"step": 177 |
|
}, |
|
{ |
|
"epoch": 0.7447698744769874, |
|
"grad_norm": 0.3621697723865509, |
|
"learning_rate": 2.552301255230126e-06, |
|
"loss": 1.2091, |
|
"step": 178 |
|
}, |
|
{ |
|
"epoch": 0.7489539748953975, |
|
"grad_norm": 0.40887123346328735, |
|
"learning_rate": 2.510460251046025e-06, |
|
"loss": 1.3031, |
|
"step": 179 |
|
}, |
|
{ |
|
"epoch": 0.7531380753138075, |
|
"grad_norm": 0.34628698229789734, |
|
"learning_rate": 2.4686192468619247e-06, |
|
"loss": 1.2093, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.7573221757322176, |
|
"grad_norm": 0.3536050617694855, |
|
"learning_rate": 2.4267782426778244e-06, |
|
"loss": 1.3339, |
|
"step": 181 |
|
}, |
|
{ |
|
"epoch": 0.7615062761506276, |
|
"grad_norm": 0.44155943393707275, |
|
"learning_rate": 2.384937238493724e-06, |
|
"loss": 1.3256, |
|
"step": 182 |
|
}, |
|
{ |
|
"epoch": 0.7656903765690377, |
|
"grad_norm": 0.3510695695877075, |
|
"learning_rate": 2.3430962343096237e-06, |
|
"loss": 1.246, |
|
"step": 183 |
|
}, |
|
{ |
|
"epoch": 0.7698744769874477, |
|
"grad_norm": 0.39234158396720886, |
|
"learning_rate": 2.301255230125523e-06, |
|
"loss": 1.1959, |
|
"step": 184 |
|
}, |
|
{ |
|
"epoch": 0.7740585774058577, |
|
"grad_norm": 0.38711047172546387, |
|
"learning_rate": 2.2594142259414227e-06, |
|
"loss": 1.2806, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 0.7782426778242678, |
|
"grad_norm": 0.34728190302848816, |
|
"learning_rate": 2.2175732217573223e-06, |
|
"loss": 1.2284, |
|
"step": 186 |
|
}, |
|
{ |
|
"epoch": 0.7824267782426778, |
|
"grad_norm": 0.4292190372943878, |
|
"learning_rate": 2.175732217573222e-06, |
|
"loss": 1.317, |
|
"step": 187 |
|
}, |
|
{ |
|
"epoch": 0.7866108786610879, |
|
"grad_norm": 0.3527109920978546, |
|
"learning_rate": 2.1338912133891217e-06, |
|
"loss": 1.2481, |
|
"step": 188 |
|
}, |
|
{ |
|
"epoch": 0.7907949790794979, |
|
"grad_norm": 0.41887742280960083, |
|
"learning_rate": 2.092050209205021e-06, |
|
"loss": 1.2332, |
|
"step": 189 |
|
}, |
|
{ |
|
"epoch": 0.7949790794979079, |
|
"grad_norm": 0.40693023800849915, |
|
"learning_rate": 2.0502092050209206e-06, |
|
"loss": 1.2143, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.799163179916318, |
|
"grad_norm": 0.33459046483039856, |
|
"learning_rate": 2.0083682008368202e-06, |
|
"loss": 1.2407, |
|
"step": 191 |
|
}, |
|
{ |
|
"epoch": 0.803347280334728, |
|
"grad_norm": 0.3630668520927429, |
|
"learning_rate": 1.96652719665272e-06, |
|
"loss": 1.2559, |
|
"step": 192 |
|
}, |
|
{ |
|
"epoch": 0.8075313807531381, |
|
"grad_norm": 0.4772534668445587, |
|
"learning_rate": 1.9246861924686196e-06, |
|
"loss": 1.2456, |
|
"step": 193 |
|
}, |
|
{ |
|
"epoch": 0.8117154811715481, |
|
"grad_norm": 0.36172667145729065, |
|
"learning_rate": 1.8828451882845188e-06, |
|
"loss": 1.2749, |
|
"step": 194 |
|
}, |
|
{ |
|
"epoch": 0.8158995815899581, |
|
"grad_norm": 0.37000811100006104, |
|
"learning_rate": 1.8410041841004187e-06, |
|
"loss": 1.2395, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 0.8200836820083682, |
|
"grad_norm": 0.49782806634902954, |
|
"learning_rate": 1.7991631799163181e-06, |
|
"loss": 1.2788, |
|
"step": 196 |
|
}, |
|
{ |
|
"epoch": 0.8242677824267782, |
|
"grad_norm": 0.3391241133213043, |
|
"learning_rate": 1.7573221757322176e-06, |
|
"loss": 1.2772, |
|
"step": 197 |
|
}, |
|
{ |
|
"epoch": 0.8284518828451883, |
|
"grad_norm": 0.3950854539871216, |
|
"learning_rate": 1.7154811715481173e-06, |
|
"loss": 1.231, |
|
"step": 198 |
|
}, |
|
{ |
|
"epoch": 0.8326359832635983, |
|
"grad_norm": 0.4793711304664612, |
|
"learning_rate": 1.6736401673640167e-06, |
|
"loss": 1.248, |
|
"step": 199 |
|
}, |
|
{ |
|
"epoch": 0.8368200836820083, |
|
"grad_norm": 0.51435387134552, |
|
"learning_rate": 1.6317991631799166e-06, |
|
"loss": 1.2203, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.8410041841004184, |
|
"grad_norm": 0.6365242600440979, |
|
"learning_rate": 1.589958158995816e-06, |
|
"loss": 1.3182, |
|
"step": 201 |
|
}, |
|
{ |
|
"epoch": 0.8451882845188284, |
|
"grad_norm": 0.3626181483268738, |
|
"learning_rate": 1.5481171548117155e-06, |
|
"loss": 1.286, |
|
"step": 202 |
|
}, |
|
{ |
|
"epoch": 0.8493723849372385, |
|
"grad_norm": 0.5900272130966187, |
|
"learning_rate": 1.5062761506276152e-06, |
|
"loss": 1.2312, |
|
"step": 203 |
|
}, |
|
{ |
|
"epoch": 0.8535564853556485, |
|
"grad_norm": 0.3442046642303467, |
|
"learning_rate": 1.4644351464435146e-06, |
|
"loss": 1.2708, |
|
"step": 204 |
|
}, |
|
{ |
|
"epoch": 0.8577405857740585, |
|
"grad_norm": 0.3256811797618866, |
|
"learning_rate": 1.4225941422594145e-06, |
|
"loss": 1.2402, |
|
"step": 205 |
|
}, |
|
{ |
|
"epoch": 0.8619246861924686, |
|
"grad_norm": 0.3342282474040985, |
|
"learning_rate": 1.380753138075314e-06, |
|
"loss": 1.2461, |
|
"step": 206 |
|
}, |
|
{ |
|
"epoch": 0.8661087866108786, |
|
"grad_norm": 0.33777275681495667, |
|
"learning_rate": 1.3389121338912134e-06, |
|
"loss": 1.2926, |
|
"step": 207 |
|
}, |
|
{ |
|
"epoch": 0.8702928870292888, |
|
"grad_norm": 0.3379373848438263, |
|
"learning_rate": 1.297071129707113e-06, |
|
"loss": 1.2772, |
|
"step": 208 |
|
}, |
|
{ |
|
"epoch": 0.8744769874476988, |
|
"grad_norm": 0.6433287262916565, |
|
"learning_rate": 1.2552301255230125e-06, |
|
"loss": 1.2669, |
|
"step": 209 |
|
}, |
|
{ |
|
"epoch": 0.8786610878661087, |
|
"grad_norm": 0.6349135041236877, |
|
"learning_rate": 1.2133891213389122e-06, |
|
"loss": 1.305, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.8828451882845189, |
|
"grad_norm": 0.33414891362190247, |
|
"learning_rate": 1.1715481171548119e-06, |
|
"loss": 1.2529, |
|
"step": 211 |
|
}, |
|
{ |
|
"epoch": 0.8870292887029289, |
|
"grad_norm": 0.3989071547985077, |
|
"learning_rate": 1.1297071129707113e-06, |
|
"loss": 1.2506, |
|
"step": 212 |
|
}, |
|
{ |
|
"epoch": 0.891213389121339, |
|
"grad_norm": 0.3862692415714264, |
|
"learning_rate": 1.087866108786611e-06, |
|
"loss": 1.3046, |
|
"step": 213 |
|
}, |
|
{ |
|
"epoch": 0.895397489539749, |
|
"grad_norm": 0.34327417612075806, |
|
"learning_rate": 1.0460251046025104e-06, |
|
"loss": 1.227, |
|
"step": 214 |
|
}, |
|
{ |
|
"epoch": 0.899581589958159, |
|
"grad_norm": 0.3878503441810608, |
|
"learning_rate": 1.0041841004184101e-06, |
|
"loss": 1.2668, |
|
"step": 215 |
|
}, |
|
{ |
|
"epoch": 0.9037656903765691, |
|
"grad_norm": 0.32297104597091675, |
|
"learning_rate": 9.623430962343098e-07, |
|
"loss": 1.2597, |
|
"step": 216 |
|
}, |
|
{ |
|
"epoch": 0.9079497907949791, |
|
"grad_norm": 0.44788116216659546, |
|
"learning_rate": 9.205020920502093e-07, |
|
"loss": 1.3145, |
|
"step": 217 |
|
}, |
|
{ |
|
"epoch": 0.9121338912133892, |
|
"grad_norm": 0.35166341066360474, |
|
"learning_rate": 8.786610878661088e-07, |
|
"loss": 1.2948, |
|
"step": 218 |
|
}, |
|
{ |
|
"epoch": 0.9163179916317992, |
|
"grad_norm": 0.3332233428955078, |
|
"learning_rate": 8.368200836820084e-07, |
|
"loss": 1.2554, |
|
"step": 219 |
|
}, |
|
{ |
|
"epoch": 0.9205020920502092, |
|
"grad_norm": 0.3346772789955139, |
|
"learning_rate": 7.94979079497908e-07, |
|
"loss": 1.2279, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.9246861924686193, |
|
"grad_norm": 0.3380833864212036, |
|
"learning_rate": 7.531380753138076e-07, |
|
"loss": 1.2176, |
|
"step": 221 |
|
}, |
|
{ |
|
"epoch": 0.9288702928870293, |
|
"grad_norm": 0.5502627491950989, |
|
"learning_rate": 7.112970711297073e-07, |
|
"loss": 1.2271, |
|
"step": 222 |
|
}, |
|
{ |
|
"epoch": 0.9330543933054394, |
|
"grad_norm": 0.3738647401332855, |
|
"learning_rate": 6.694560669456067e-07, |
|
"loss": 1.3101, |
|
"step": 223 |
|
}, |
|
{ |
|
"epoch": 0.9372384937238494, |
|
"grad_norm": 0.4273400902748108, |
|
"learning_rate": 6.276150627615063e-07, |
|
"loss": 1.2648, |
|
"step": 224 |
|
}, |
|
{ |
|
"epoch": 0.9414225941422594, |
|
"grad_norm": 0.5694935917854309, |
|
"learning_rate": 5.857740585774059e-07, |
|
"loss": 1.301, |
|
"step": 225 |
|
}, |
|
{ |
|
"epoch": 0.9456066945606695, |
|
"grad_norm": 0.37318360805511475, |
|
"learning_rate": 5.439330543933055e-07, |
|
"loss": 1.2353, |
|
"step": 226 |
|
}, |
|
{ |
|
"epoch": 0.9497907949790795, |
|
"grad_norm": 0.37486758828163147, |
|
"learning_rate": 5.020920502092051e-07, |
|
"loss": 1.2324, |
|
"step": 227 |
|
}, |
|
{ |
|
"epoch": 0.9539748953974896, |
|
"grad_norm": 0.342326283454895, |
|
"learning_rate": 4.6025104602510467e-07, |
|
"loss": 1.265, |
|
"step": 228 |
|
}, |
|
{ |
|
"epoch": 0.9581589958158996, |
|
"grad_norm": 0.3409275710582733, |
|
"learning_rate": 4.184100418410042e-07, |
|
"loss": 1.2498, |
|
"step": 229 |
|
}, |
|
{ |
|
"epoch": 0.9623430962343096, |
|
"grad_norm": 0.41361624002456665, |
|
"learning_rate": 3.765690376569038e-07, |
|
"loss": 1.243, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 0.9665271966527197, |
|
"grad_norm": 0.3393700122833252, |
|
"learning_rate": 3.3472803347280335e-07, |
|
"loss": 1.2796, |
|
"step": 231 |
|
}, |
|
{ |
|
"epoch": 0.9707112970711297, |
|
"grad_norm": 0.3456774055957794, |
|
"learning_rate": 2.9288702928870297e-07, |
|
"loss": 1.3252, |
|
"step": 232 |
|
}, |
|
{ |
|
"epoch": 0.9748953974895398, |
|
"grad_norm": 0.5081372261047363, |
|
"learning_rate": 2.5104602510460253e-07, |
|
"loss": 1.2795, |
|
"step": 233 |
|
}, |
|
{ |
|
"epoch": 0.9790794979079498, |
|
"grad_norm": 0.35145166516304016, |
|
"learning_rate": 2.092050209205021e-07, |
|
"loss": 1.2994, |
|
"step": 234 |
|
}, |
|
{ |
|
"epoch": 0.9832635983263598, |
|
"grad_norm": 0.40834537148475647, |
|
"learning_rate": 1.6736401673640168e-07, |
|
"loss": 1.2504, |
|
"step": 235 |
|
}, |
|
{ |
|
"epoch": 0.9874476987447699, |
|
"grad_norm": 0.338552862405777, |
|
"learning_rate": 1.2552301255230126e-07, |
|
"loss": 1.2258, |
|
"step": 236 |
|
}, |
|
{ |
|
"epoch": 0.9916317991631799, |
|
"grad_norm": 0.35948339104652405, |
|
"learning_rate": 8.368200836820084e-08, |
|
"loss": 1.2088, |
|
"step": 237 |
|
}, |
|
{ |
|
"epoch": 0.99581589958159, |
|
"grad_norm": 0.36100009083747864, |
|
"learning_rate": 4.184100418410042e-08, |
|
"loss": 1.1964, |
|
"step": 238 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"grad_norm": 0.3361973166465759, |
|
"learning_rate": 0.0, |
|
"loss": 1.2125, |
|
"step": 239 |
|
} |
|
], |
|
"logging_steps": 1.0, |
|
"max_steps": 239, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 1, |
|
"save_steps": 0, |
|
"stateful_callbacks": { |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": true |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 8.011575614609818e+16, |
|
"train_batch_size": 1, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|