|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 2.9984496124031006, |
|
"eval_steps": 500, |
|
"global_step": 1449, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.020671834625323, |
|
"grad_norm": 19.961172103881836, |
|
"learning_rate": 6.896551724137932e-06, |
|
"loss": 1.4671, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.041343669250646, |
|
"grad_norm": 10.104804039001465, |
|
"learning_rate": 1.3793103448275863e-05, |
|
"loss": 1.2814, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.06201550387596899, |
|
"grad_norm": 10.660629272460938, |
|
"learning_rate": 2.0689655172413793e-05, |
|
"loss": 1.0494, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.082687338501292, |
|
"grad_norm": 4.328464031219482, |
|
"learning_rate": 2.7586206896551727e-05, |
|
"loss": 0.8603, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.10335917312661498, |
|
"grad_norm": 2.360297918319702, |
|
"learning_rate": 3.4482758620689657e-05, |
|
"loss": 0.7456, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.12403100775193798, |
|
"grad_norm": 2.756227731704712, |
|
"learning_rate": 4.1379310344827587e-05, |
|
"loss": 0.6606, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.14470284237726097, |
|
"grad_norm": 2.7377092838287354, |
|
"learning_rate": 4.827586206896552e-05, |
|
"loss": 0.6343, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.165374677002584, |
|
"grad_norm": 2.682518243789673, |
|
"learning_rate": 5.517241379310345e-05, |
|
"loss": 0.5962, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.18604651162790697, |
|
"grad_norm": 2.2480661869049072, |
|
"learning_rate": 6.206896551724138e-05, |
|
"loss": 0.5689, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.20671834625322996, |
|
"grad_norm": 2.5809719562530518, |
|
"learning_rate": 6.896551724137931e-05, |
|
"loss": 0.5812, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.22739018087855298, |
|
"grad_norm": 1.933281660079956, |
|
"learning_rate": 7.586206896551724e-05, |
|
"loss": 0.5438, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.24806201550387597, |
|
"grad_norm": 2.1786251068115234, |
|
"learning_rate": 8.275862068965517e-05, |
|
"loss": 0.5379, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.268733850129199, |
|
"grad_norm": 1.6331806182861328, |
|
"learning_rate": 8.96551724137931e-05, |
|
"loss": 0.5383, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.28940568475452194, |
|
"grad_norm": 1.713693380355835, |
|
"learning_rate": 9.655172413793105e-05, |
|
"loss": 0.5359, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.31007751937984496, |
|
"grad_norm": 1.6057534217834473, |
|
"learning_rate": 9.999637240054163e-05, |
|
"loss": 0.5957, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.330749354005168, |
|
"grad_norm": 1.2600524425506592, |
|
"learning_rate": 9.996735476307291e-05, |
|
"loss": 0.5528, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.35142118863049093, |
|
"grad_norm": 1.258549451828003, |
|
"learning_rate": 9.990933632982306e-05, |
|
"loss": 0.5274, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.37209302325581395, |
|
"grad_norm": 1.1361678838729858, |
|
"learning_rate": 9.982235077439241e-05, |
|
"loss": 0.5363, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.39276485788113696, |
|
"grad_norm": 1.1429818868637085, |
|
"learning_rate": 9.970644858275008e-05, |
|
"loss": 0.5626, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.4134366925064599, |
|
"grad_norm": 1.168615460395813, |
|
"learning_rate": 9.956169702393215e-05, |
|
"loss": 0.5331, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.43410852713178294, |
|
"grad_norm": 1.353784441947937, |
|
"learning_rate": 9.938818011099905e-05, |
|
"loss": 0.5232, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.45478036175710596, |
|
"grad_norm": 1.1773430109024048, |
|
"learning_rate": 9.918599855227486e-05, |
|
"loss": 0.536, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.4754521963824289, |
|
"grad_norm": 1.1550217866897583, |
|
"learning_rate": 9.895526969289671e-05, |
|
"loss": 0.5162, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 0.49612403100775193, |
|
"grad_norm": 1.0943946838378906, |
|
"learning_rate": 9.869612744670818e-05, |
|
"loss": 0.4939, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.5167958656330749, |
|
"grad_norm": 1.0564571619033813, |
|
"learning_rate": 9.84087222185365e-05, |
|
"loss": 0.5053, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.537467700258398, |
|
"grad_norm": 0.9963967800140381, |
|
"learning_rate": 9.809322081689833e-05, |
|
"loss": 0.5277, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 0.5581395348837209, |
|
"grad_norm": 1.1690253019332886, |
|
"learning_rate": 9.774980635718494e-05, |
|
"loss": 0.5568, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 0.5788113695090439, |
|
"grad_norm": 1.0432251691818237, |
|
"learning_rate": 9.737867815538293e-05, |
|
"loss": 0.5567, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 0.599483204134367, |
|
"grad_norm": 1.033795714378357, |
|
"learning_rate": 9.698005161239236e-05, |
|
"loss": 0.5373, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 0.6201550387596899, |
|
"grad_norm": 1.0568259954452515, |
|
"learning_rate": 9.655415808900914e-05, |
|
"loss": 0.528, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.6408268733850129, |
|
"grad_norm": 1.0200306177139282, |
|
"learning_rate": 9.610124477164435e-05, |
|
"loss": 0.5364, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 0.661498708010336, |
|
"grad_norm": 1.0433367490768433, |
|
"learning_rate": 9.56215745288586e-05, |
|
"loss": 0.546, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 0.6821705426356589, |
|
"grad_norm": 0.936161458492279, |
|
"learning_rate": 9.51154257587944e-05, |
|
"loss": 0.5283, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 0.7028423772609819, |
|
"grad_norm": 0.8251316547393799, |
|
"learning_rate": 9.45830922275954e-05, |
|
"loss": 0.5165, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 0.7235142118863049, |
|
"grad_norm": 0.9545398354530334, |
|
"learning_rate": 9.402488289890591e-05, |
|
"loss": 0.5344, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 0.7441860465116279, |
|
"grad_norm": 1.0172969102859497, |
|
"learning_rate": 9.344112175455013e-05, |
|
"loss": 0.5108, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 0.7648578811369509, |
|
"grad_norm": 0.9597331881523132, |
|
"learning_rate": 9.283214760649461e-05, |
|
"loss": 0.5082, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 0.7855297157622739, |
|
"grad_norm": 0.9407294988632202, |
|
"learning_rate": 9.219831390020366e-05, |
|
"loss": 0.5438, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 0.8062015503875969, |
|
"grad_norm": 0.8958045840263367, |
|
"learning_rate": 9.153998850950132e-05, |
|
"loss": 0.5383, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 0.8268733850129198, |
|
"grad_norm": 1.0094271898269653, |
|
"learning_rate": 9.08575535230594e-05, |
|
"loss": 0.4745, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 0.8475452196382429, |
|
"grad_norm": 0.9919396638870239, |
|
"learning_rate": 9.015140502263506e-05, |
|
"loss": 0.4849, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 0.8682170542635659, |
|
"grad_norm": 1.1863609552383423, |
|
"learning_rate": 8.942195285318704e-05, |
|
"loss": 0.5239, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 0.8888888888888888, |
|
"grad_norm": 0.9280211925506592, |
|
"learning_rate": 8.866962038500379e-05, |
|
"loss": 0.4919, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 0.9095607235142119, |
|
"grad_norm": 1.0376496315002441, |
|
"learning_rate": 8.789484426798146e-05, |
|
"loss": 0.5155, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 0.9302325581395349, |
|
"grad_norm": 0.9633381962776184, |
|
"learning_rate": 8.709807417819465e-05, |
|
"loss": 0.5188, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 0.9509043927648578, |
|
"grad_norm": 1.0249930620193481, |
|
"learning_rate": 8.627977255690657e-05, |
|
"loss": 0.5031, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 0.9715762273901809, |
|
"grad_norm": 1.0092591047286987, |
|
"learning_rate": 8.544041434217068e-05, |
|
"loss": 0.513, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 0.9922480620155039, |
|
"grad_norm": 0.8698219060897827, |
|
"learning_rate": 8.458048669317896e-05, |
|
"loss": 0.5067, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 1.014470284237726, |
|
"grad_norm": 0.8564507365226746, |
|
"learning_rate": 8.370048870751733e-05, |
|
"loss": 0.4898, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 1.0351421188630492, |
|
"grad_norm": 1.9524496793746948, |
|
"learning_rate": 8.280093113149182e-05, |
|
"loss": 0.4215, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 1.0558139534883721, |
|
"grad_norm": 1.0247145891189575, |
|
"learning_rate": 8.188233606369422e-05, |
|
"loss": 0.428, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 1.076485788113695, |
|
"grad_norm": 1.123751163482666, |
|
"learning_rate": 8.094523665197862e-05, |
|
"loss": 0.4318, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 1.097157622739018, |
|
"grad_norm": 1.0905773639678955, |
|
"learning_rate": 7.999017678402519e-05, |
|
"loss": 0.4303, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 1.117829457364341, |
|
"grad_norm": 0.9824921488761902, |
|
"learning_rate": 7.901771077167052e-05, |
|
"loss": 0.4068, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 1.138501291989664, |
|
"grad_norm": 0.9725044965744019, |
|
"learning_rate": 7.802840302918795e-05, |
|
"loss": 0.3728, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 1.1591731266149872, |
|
"grad_norm": 0.9440091848373413, |
|
"learning_rate": 7.702282774570435e-05, |
|
"loss": 0.4264, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 1.1798449612403101, |
|
"grad_norm": 1.0231075286865234, |
|
"learning_rate": 7.600156855194376e-05, |
|
"loss": 0.3708, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 1.200516795865633, |
|
"grad_norm": 0.9228759407997131, |
|
"learning_rate": 7.496521818149114e-05, |
|
"loss": 0.3771, |
|
"step": 580 |
|
}, |
|
{ |
|
"epoch": 1.221188630490956, |
|
"grad_norm": 0.9908185601234436, |
|
"learning_rate": 7.391437812677273e-05, |
|
"loss": 0.424, |
|
"step": 590 |
|
}, |
|
{ |
|
"epoch": 1.241860465116279, |
|
"grad_norm": 0.8130841255187988, |
|
"learning_rate": 7.284965828995309e-05, |
|
"loss": 0.4202, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 1.262532299741602, |
|
"grad_norm": 1.2041234970092773, |
|
"learning_rate": 7.177167662895087e-05, |
|
"loss": 0.3821, |
|
"step": 610 |
|
}, |
|
{ |
|
"epoch": 1.2832041343669252, |
|
"grad_norm": 0.9226158261299133, |
|
"learning_rate": 7.068105879877933e-05, |
|
"loss": 0.4288, |
|
"step": 620 |
|
}, |
|
{ |
|
"epoch": 1.3038759689922481, |
|
"grad_norm": 0.9127386808395386, |
|
"learning_rate": 6.957843778841937e-05, |
|
"loss": 0.4177, |
|
"step": 630 |
|
}, |
|
{ |
|
"epoch": 1.324547803617571, |
|
"grad_norm": 0.9465218186378479, |
|
"learning_rate": 6.84644535534359e-05, |
|
"loss": 0.4326, |
|
"step": 640 |
|
}, |
|
{ |
|
"epoch": 1.345219638242894, |
|
"grad_norm": 0.9808584451675415, |
|
"learning_rate": 6.733975264455097e-05, |
|
"loss": 0.391, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 1.365891472868217, |
|
"grad_norm": 0.8926200866699219, |
|
"learning_rate": 6.620498783238911e-05, |
|
"loss": 0.4072, |
|
"step": 660 |
|
}, |
|
{ |
|
"epoch": 1.3865633074935402, |
|
"grad_norm": 1.060142993927002, |
|
"learning_rate": 6.506081772861254e-05, |
|
"loss": 0.4062, |
|
"step": 670 |
|
}, |
|
{ |
|
"epoch": 1.407235142118863, |
|
"grad_norm": 0.931640625, |
|
"learning_rate": 6.390790640366635e-05, |
|
"loss": 0.4287, |
|
"step": 680 |
|
}, |
|
{ |
|
"epoch": 1.427906976744186, |
|
"grad_norm": 0.8396949172019958, |
|
"learning_rate": 6.27469230013555e-05, |
|
"loss": 0.3781, |
|
"step": 690 |
|
}, |
|
{ |
|
"epoch": 1.448578811369509, |
|
"grad_norm": 0.8786910176277161, |
|
"learning_rate": 6.157854135047707e-05, |
|
"loss": 0.4089, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 1.469250645994832, |
|
"grad_norm": 0.877200722694397, |
|
"learning_rate": 6.0403439573733666e-05, |
|
"loss": 0.4034, |
|
"step": 710 |
|
}, |
|
{ |
|
"epoch": 1.489922480620155, |
|
"grad_norm": 0.8826427459716797, |
|
"learning_rate": 5.9222299694154316e-05, |
|
"loss": 0.4237, |
|
"step": 720 |
|
}, |
|
{ |
|
"epoch": 1.510594315245478, |
|
"grad_norm": 1.0366460084915161, |
|
"learning_rate": 5.803580723925193e-05, |
|
"loss": 0.4131, |
|
"step": 730 |
|
}, |
|
{ |
|
"epoch": 1.5312661498708011, |
|
"grad_norm": 0.8167237639427185, |
|
"learning_rate": 5.6844650843146595e-05, |
|
"loss": 0.3821, |
|
"step": 740 |
|
}, |
|
{ |
|
"epoch": 1.551937984496124, |
|
"grad_norm": 0.8496309518814087, |
|
"learning_rate": 5.5649521846885876e-05, |
|
"loss": 0.3998, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 1.572609819121447, |
|
"grad_norm": 1.0631760358810425, |
|
"learning_rate": 5.445111389719408e-05, |
|
"loss": 0.4448, |
|
"step": 760 |
|
}, |
|
{ |
|
"epoch": 1.59328165374677, |
|
"grad_norm": 0.9399399757385254, |
|
"learning_rate": 5.3250122543883197e-05, |
|
"loss": 0.4061, |
|
"step": 770 |
|
}, |
|
{ |
|
"epoch": 1.613953488372093, |
|
"grad_norm": 0.9156986474990845, |
|
"learning_rate": 5.20472448361594e-05, |
|
"loss": 0.4181, |
|
"step": 780 |
|
}, |
|
{ |
|
"epoch": 1.6346253229974161, |
|
"grad_norm": 0.9186833500862122, |
|
"learning_rate": 5.084317891805928e-05, |
|
"loss": 0.3888, |
|
"step": 790 |
|
}, |
|
{ |
|
"epoch": 1.6552971576227389, |
|
"grad_norm": 1.0180436372756958, |
|
"learning_rate": 4.963862362325058e-05, |
|
"loss": 0.4265, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 1.675968992248062, |
|
"grad_norm": 0.9096766710281372, |
|
"learning_rate": 4.8434278069432834e-05, |
|
"loss": 0.3958, |
|
"step": 810 |
|
}, |
|
{ |
|
"epoch": 1.696640826873385, |
|
"grad_norm": 0.88401198387146, |
|
"learning_rate": 4.7230841252572906e-05, |
|
"loss": 0.3952, |
|
"step": 820 |
|
}, |
|
{ |
|
"epoch": 1.717312661498708, |
|
"grad_norm": 0.834771454334259, |
|
"learning_rate": 4.602901164121145e-05, |
|
"loss": 0.3905, |
|
"step": 830 |
|
}, |
|
{ |
|
"epoch": 1.7379844961240312, |
|
"grad_norm": 1.0104914903640747, |
|
"learning_rate": 4.482948677107533e-05, |
|
"loss": 0.3677, |
|
"step": 840 |
|
}, |
|
{ |
|
"epoch": 1.758656330749354, |
|
"grad_norm": 0.9692713618278503, |
|
"learning_rate": 4.363296284023139e-05, |
|
"loss": 0.4024, |
|
"step": 850 |
|
}, |
|
{ |
|
"epoch": 1.779328165374677, |
|
"grad_norm": 0.9530401229858398, |
|
"learning_rate": 4.24401343050168e-05, |
|
"loss": 0.3809, |
|
"step": 860 |
|
}, |
|
{ |
|
"epoch": 1.8, |
|
"grad_norm": 1.4324836730957031, |
|
"learning_rate": 4.125169347698009e-05, |
|
"loss": 0.4011, |
|
"step": 870 |
|
}, |
|
{ |
|
"epoch": 1.820671834625323, |
|
"grad_norm": 0.9663997888565063, |
|
"learning_rate": 4.006833012106707e-05, |
|
"loss": 0.3726, |
|
"step": 880 |
|
}, |
|
{ |
|
"epoch": 1.841343669250646, |
|
"grad_norm": 0.8397408127784729, |
|
"learning_rate": 3.8890731055284893e-05, |
|
"loss": 0.3845, |
|
"step": 890 |
|
}, |
|
{ |
|
"epoch": 1.862015503875969, |
|
"grad_norm": 0.9423091411590576, |
|
"learning_rate": 3.77195797520763e-05, |
|
"loss": 0.4097, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 1.882687338501292, |
|
"grad_norm": 0.9247391223907471, |
|
"learning_rate": 3.6555555941635865e-05, |
|
"loss": 0.3979, |
|
"step": 910 |
|
}, |
|
{ |
|
"epoch": 1.9033591731266148, |
|
"grad_norm": 0.9327929019927979, |
|
"learning_rate": 3.539933521739793e-05, |
|
"loss": 0.4075, |
|
"step": 920 |
|
}, |
|
{ |
|
"epoch": 1.924031007751938, |
|
"grad_norm": 0.93698650598526, |
|
"learning_rate": 3.425158864392576e-05, |
|
"loss": 0.3885, |
|
"step": 930 |
|
}, |
|
{ |
|
"epoch": 1.944702842377261, |
|
"grad_norm": 0.8598852753639221, |
|
"learning_rate": 3.31129823674291e-05, |
|
"loss": 0.402, |
|
"step": 940 |
|
}, |
|
{ |
|
"epoch": 1.965374677002584, |
|
"grad_norm": 0.8426728844642639, |
|
"learning_rate": 3.1984177229136285e-05, |
|
"loss": 0.3899, |
|
"step": 950 |
|
}, |
|
{ |
|
"epoch": 1.9860465116279071, |
|
"grad_norm": 0.8942709565162659, |
|
"learning_rate": 3.086582838174551e-05, |
|
"loss": 0.3789, |
|
"step": 960 |
|
}, |
|
{ |
|
"epoch": 2.0082687338501293, |
|
"grad_norm": 0.7830824255943298, |
|
"learning_rate": 2.9758584909177433e-05, |
|
"loss": 0.4181, |
|
"step": 970 |
|
}, |
|
{ |
|
"epoch": 2.028940568475452, |
|
"grad_norm": 0.9163928627967834, |
|
"learning_rate": 2.8663089449850243e-05, |
|
"loss": 0.3397, |
|
"step": 980 |
|
}, |
|
{ |
|
"epoch": 2.049612403100775, |
|
"grad_norm": 0.9074885845184326, |
|
"learning_rate": 2.7579977823695723e-05, |
|
"loss": 0.298, |
|
"step": 990 |
|
}, |
|
{ |
|
"epoch": 2.0702842377260984, |
|
"grad_norm": 0.9496545791625977, |
|
"learning_rate": 2.6509878663132514e-05, |
|
"loss": 0.3163, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 2.090956072351421, |
|
"grad_norm": 0.9921571016311646, |
|
"learning_rate": 2.5453413048211172e-05, |
|
"loss": 0.3213, |
|
"step": 1010 |
|
}, |
|
{ |
|
"epoch": 2.1116279069767443, |
|
"grad_norm": 1.1148159503936768, |
|
"learning_rate": 2.4411194146142407e-05, |
|
"loss": 0.2921, |
|
"step": 1020 |
|
}, |
|
{ |
|
"epoch": 2.132299741602067, |
|
"grad_norm": 1.0226593017578125, |
|
"learning_rate": 2.338382685541806e-05, |
|
"loss": 0.2943, |
|
"step": 1030 |
|
}, |
|
{ |
|
"epoch": 2.15297157622739, |
|
"grad_norm": 0.955251932144165, |
|
"learning_rate": 2.2371907454731167e-05, |
|
"loss": 0.3286, |
|
"step": 1040 |
|
}, |
|
{ |
|
"epoch": 2.173643410852713, |
|
"grad_norm": 1.0427072048187256, |
|
"learning_rate": 2.137602325689873e-05, |
|
"loss": 0.3214, |
|
"step": 1050 |
|
}, |
|
{ |
|
"epoch": 2.194315245478036, |
|
"grad_norm": 1.0497926473617554, |
|
"learning_rate": 2.039675226798854e-05, |
|
"loss": 0.3143, |
|
"step": 1060 |
|
}, |
|
{ |
|
"epoch": 2.2149870801033593, |
|
"grad_norm": 1.0081120729446411, |
|
"learning_rate": 1.9434662851847325e-05, |
|
"loss": 0.3213, |
|
"step": 1070 |
|
}, |
|
{ |
|
"epoch": 2.235658914728682, |
|
"grad_norm": 0.9074374437332153, |
|
"learning_rate": 1.8490313400225294e-05, |
|
"loss": 0.3204, |
|
"step": 1080 |
|
}, |
|
{ |
|
"epoch": 2.2563307493540052, |
|
"grad_norm": 0.9639903903007507, |
|
"learning_rate": 1.7564252008688514e-05, |
|
"loss": 0.3066, |
|
"step": 1090 |
|
}, |
|
{ |
|
"epoch": 2.277002583979328, |
|
"grad_norm": 0.9500643610954285, |
|
"learning_rate": 1.6657016158506966e-05, |
|
"loss": 0.3079, |
|
"step": 1100 |
|
}, |
|
{ |
|
"epoch": 2.297674418604651, |
|
"grad_norm": 0.9133275151252747, |
|
"learning_rate": 1.5769132404703274e-05, |
|
"loss": 0.3127, |
|
"step": 1110 |
|
}, |
|
{ |
|
"epoch": 2.3183462532299743, |
|
"grad_norm": 0.9715905785560608, |
|
"learning_rate": 1.4901116070442772e-05, |
|
"loss": 0.3221, |
|
"step": 1120 |
|
}, |
|
{ |
|
"epoch": 2.339018087855297, |
|
"grad_norm": 1.0215041637420654, |
|
"learning_rate": 1.4053470947942693e-05, |
|
"loss": 0.3112, |
|
"step": 1130 |
|
}, |
|
{ |
|
"epoch": 2.3596899224806203, |
|
"grad_norm": 0.9813144207000732, |
|
"learning_rate": 1.3226689006073712e-05, |
|
"loss": 0.3053, |
|
"step": 1140 |
|
}, |
|
{ |
|
"epoch": 2.380361757105943, |
|
"grad_norm": 0.9730923175811768, |
|
"learning_rate": 1.2421250104823684e-05, |
|
"loss": 0.3172, |
|
"step": 1150 |
|
}, |
|
{ |
|
"epoch": 2.401033591731266, |
|
"grad_norm": 1.038800597190857, |
|
"learning_rate": 1.163762171678951e-05, |
|
"loss": 0.3085, |
|
"step": 1160 |
|
}, |
|
{ |
|
"epoch": 2.4217054263565894, |
|
"grad_norm": 0.9317840933799744, |
|
"learning_rate": 1.0876258655858307e-05, |
|
"loss": 0.3281, |
|
"step": 1170 |
|
}, |
|
{ |
|
"epoch": 2.442377260981912, |
|
"grad_norm": 1.0310304164886475, |
|
"learning_rate": 1.0137602813235824e-05, |
|
"loss": 0.3118, |
|
"step": 1180 |
|
}, |
|
{ |
|
"epoch": 2.4630490956072353, |
|
"grad_norm": 0.9994140267372131, |
|
"learning_rate": 9.422082900975104e-06, |
|
"loss": 0.309, |
|
"step": 1190 |
|
}, |
|
{ |
|
"epoch": 2.483720930232558, |
|
"grad_norm": 1.1645585298538208, |
|
"learning_rate": 8.730114203154144e-06, |
|
"loss": 0.3219, |
|
"step": 1200 |
|
}, |
|
{ |
|
"epoch": 2.504392764857881, |
|
"grad_norm": 1.0404480695724487, |
|
"learning_rate": 8.062098334847184e-06, |
|
"loss": 0.3165, |
|
"step": 1210 |
|
}, |
|
{ |
|
"epoch": 2.525064599483204, |
|
"grad_norm": 1.055424451828003, |
|
"learning_rate": 7.418423009029362e-06, |
|
"loss": 0.2978, |
|
"step": 1220 |
|
}, |
|
{ |
|
"epoch": 2.545736434108527, |
|
"grad_norm": 0.9607129693031311, |
|
"learning_rate": 6.799461811550101e-06, |
|
"loss": 0.3369, |
|
"step": 1230 |
|
}, |
|
{ |
|
"epoch": 2.5664082687338503, |
|
"grad_norm": 1.0298357009887695, |
|
"learning_rate": 6.205573984305857e-06, |
|
"loss": 0.3295, |
|
"step": 1240 |
|
}, |
|
{ |
|
"epoch": 2.587080103359173, |
|
"grad_norm": 0.9973547458648682, |
|
"learning_rate": 5.6371042167378706e-06, |
|
"loss": 0.3166, |
|
"step": 1250 |
|
}, |
|
{ |
|
"epoch": 2.6077519379844962, |
|
"grad_norm": 1.4321331977844238, |
|
"learning_rate": 5.09438244577628e-06, |
|
"loss": 0.339, |
|
"step": 1260 |
|
}, |
|
{ |
|
"epoch": 2.628423772609819, |
|
"grad_norm": 0.9741699695587158, |
|
"learning_rate": 4.5777236643463786e-06, |
|
"loss": 0.3011, |
|
"step": 1270 |
|
}, |
|
{ |
|
"epoch": 2.649095607235142, |
|
"grad_norm": 0.980993390083313, |
|
"learning_rate": 4.087427738548321e-06, |
|
"loss": 0.3072, |
|
"step": 1280 |
|
}, |
|
{ |
|
"epoch": 2.669767441860465, |
|
"grad_norm": 1.0376126766204834, |
|
"learning_rate": 3.623779233616442e-06, |
|
"loss": 0.3159, |
|
"step": 1290 |
|
}, |
|
{ |
|
"epoch": 2.690439276485788, |
|
"grad_norm": 1.0413364171981812, |
|
"learning_rate": 3.1870472487589843e-06, |
|
"loss": 0.2953, |
|
"step": 1300 |
|
}, |
|
{ |
|
"epoch": 2.7111111111111112, |
|
"grad_norm": 0.9663779139518738, |
|
"learning_rate": 2.7774852609743886e-06, |
|
"loss": 0.3072, |
|
"step": 1310 |
|
}, |
|
{ |
|
"epoch": 2.731782945736434, |
|
"grad_norm": 0.9956408739089966, |
|
"learning_rate": 2.39533097793444e-06, |
|
"loss": 0.2828, |
|
"step": 1320 |
|
}, |
|
{ |
|
"epoch": 2.752454780361757, |
|
"grad_norm": 0.9878751039505005, |
|
"learning_rate": 2.0408062000200625e-06, |
|
"loss": 0.3278, |
|
"step": 1330 |
|
}, |
|
{ |
|
"epoch": 2.7731266149870804, |
|
"grad_norm": 1.0932310819625854, |
|
"learning_rate": 1.714116691589457e-06, |
|
"loss": 0.3071, |
|
"step": 1340 |
|
}, |
|
{ |
|
"epoch": 2.793798449612403, |
|
"grad_norm": 1.0455061197280884, |
|
"learning_rate": 1.4154520615535183e-06, |
|
"loss": 0.3256, |
|
"step": 1350 |
|
}, |
|
{ |
|
"epoch": 2.814470284237726, |
|
"grad_norm": 1.0817687511444092, |
|
"learning_rate": 1.1449856533278625e-06, |
|
"loss": 0.3147, |
|
"step": 1360 |
|
}, |
|
{ |
|
"epoch": 2.835142118863049, |
|
"grad_norm": 1.0755105018615723, |
|
"learning_rate": 9.028744442251102e-07, |
|
"loss": 0.3353, |
|
"step": 1370 |
|
}, |
|
{ |
|
"epoch": 2.855813953488372, |
|
"grad_norm": 1.0853437185287476, |
|
"learning_rate": 6.892589543461392e-07, |
|
"loss": 0.3174, |
|
"step": 1380 |
|
}, |
|
{ |
|
"epoch": 2.876485788113695, |
|
"grad_norm": 0.9791375398635864, |
|
"learning_rate": 5.042631650229058e-07, |
|
"loss": 0.3113, |
|
"step": 1390 |
|
}, |
|
{ |
|
"epoch": 2.897157622739018, |
|
"grad_norm": 0.9765632748603821, |
|
"learning_rate": 3.4799444686031693e-07, |
|
"loss": 0.2972, |
|
"step": 1400 |
|
}, |
|
{ |
|
"epoch": 2.9178294573643413, |
|
"grad_norm": 1.0426656007766724, |
|
"learning_rate": 2.2054349741888468e-07, |
|
"loss": 0.296, |
|
"step": 1410 |
|
}, |
|
{ |
|
"epoch": 2.938501291989664, |
|
"grad_norm": 0.9949988722801208, |
|
"learning_rate": 1.219842885743394e-07, |
|
"loss": 0.2913, |
|
"step": 1420 |
|
}, |
|
{ |
|
"epoch": 2.959173126614987, |
|
"grad_norm": 0.9954948425292969, |
|
"learning_rate": 5.237402358471144e-08, |
|
"loss": 0.3192, |
|
"step": 1430 |
|
}, |
|
{ |
|
"epoch": 2.97984496124031, |
|
"grad_norm": 1.0401126146316528, |
|
"learning_rate": 1.1753103889883266e-08, |
|
"loss": 0.288, |
|
"step": 1440 |
|
}, |
|
{ |
|
"epoch": 2.9984496124031006, |
|
"step": 1449, |
|
"total_flos": 2.170312406348071e+18, |
|
"train_loss": 0.43735031344464437, |
|
"train_runtime": 9561.9202, |
|
"train_samples_per_second": 2.428, |
|
"train_steps_per_second": 0.152 |
|
} |
|
], |
|
"logging_steps": 10, |
|
"max_steps": 1449, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 3, |
|
"save_steps": 500, |
|
"stateful_callbacks": { |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": true |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 2.170312406348071e+18, |
|
"train_batch_size": 1, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|