|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 0.966536472078539, |
|
"eval_steps": 500, |
|
"global_step": 27468, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.0010908195221506739, |
|
"grad_norm": 27.24474334716797, |
|
"learning_rate": 1.0157273918741808e-06, |
|
"loss": 9.0396, |
|
"step": 31 |
|
}, |
|
{ |
|
"epoch": 0.0021816390443013477, |
|
"grad_norm": 16.773351669311523, |
|
"learning_rate": 2.0314547837483616e-06, |
|
"loss": 7.9471, |
|
"step": 62 |
|
}, |
|
{ |
|
"epoch": 0.0032724585664520214, |
|
"grad_norm": 15.084428787231445, |
|
"learning_rate": 3.0471821756225426e-06, |
|
"loss": 6.5868, |
|
"step": 93 |
|
}, |
|
{ |
|
"epoch": 0.0043632780886026954, |
|
"grad_norm": 6.7851786613464355, |
|
"learning_rate": 4.062909567496723e-06, |
|
"loss": 5.3744, |
|
"step": 124 |
|
}, |
|
{ |
|
"epoch": 0.0054540976107533695, |
|
"grad_norm": 7.081949234008789, |
|
"learning_rate": 5.078636959370905e-06, |
|
"loss": 4.6378, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 0.006544917132904043, |
|
"grad_norm": 8.945405960083008, |
|
"learning_rate": 6.094364351245085e-06, |
|
"loss": 4.2292, |
|
"step": 186 |
|
}, |
|
{ |
|
"epoch": 0.007635736655054717, |
|
"grad_norm": 15.078816413879395, |
|
"learning_rate": 7.110091743119267e-06, |
|
"loss": 3.9767, |
|
"step": 217 |
|
}, |
|
{ |
|
"epoch": 0.008726556177205391, |
|
"grad_norm": 8.799291610717773, |
|
"learning_rate": 8.125819134993446e-06, |
|
"loss": 3.765, |
|
"step": 248 |
|
}, |
|
{ |
|
"epoch": 0.009817375699356064, |
|
"grad_norm": 9.741459846496582, |
|
"learning_rate": 9.141546526867629e-06, |
|
"loss": 3.6496, |
|
"step": 279 |
|
}, |
|
{ |
|
"epoch": 0.010908195221506739, |
|
"grad_norm": 9.403132438659668, |
|
"learning_rate": 1.015727391874181e-05, |
|
"loss": 3.4936, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 0.011999014743657412, |
|
"grad_norm": 8.479109764099121, |
|
"learning_rate": 1.117300131061599e-05, |
|
"loss": 3.3879, |
|
"step": 341 |
|
}, |
|
{ |
|
"epoch": 0.013089834265808085, |
|
"grad_norm": 4.988598823547363, |
|
"learning_rate": 1.218872870249017e-05, |
|
"loss": 3.3147, |
|
"step": 372 |
|
}, |
|
{ |
|
"epoch": 0.01418065378795876, |
|
"grad_norm": 5.518200874328613, |
|
"learning_rate": 1.3204456094364351e-05, |
|
"loss": 3.2022, |
|
"step": 403 |
|
}, |
|
{ |
|
"epoch": 0.015271473310109434, |
|
"grad_norm": 5.685606002807617, |
|
"learning_rate": 1.4220183486238533e-05, |
|
"loss": 3.1393, |
|
"step": 434 |
|
}, |
|
{ |
|
"epoch": 0.01636229283226011, |
|
"grad_norm": 5.6373491287231445, |
|
"learning_rate": 1.5235910878112714e-05, |
|
"loss": 3.05, |
|
"step": 465 |
|
}, |
|
{ |
|
"epoch": 0.017453112354410782, |
|
"grad_norm": 5.174935340881348, |
|
"learning_rate": 1.6251638269986893e-05, |
|
"loss": 2.992, |
|
"step": 496 |
|
}, |
|
{ |
|
"epoch": 0.018543931876561455, |
|
"grad_norm": 5.266006946563721, |
|
"learning_rate": 1.7267365661861077e-05, |
|
"loss": 2.9558, |
|
"step": 527 |
|
}, |
|
{ |
|
"epoch": 0.019634751398712128, |
|
"grad_norm": 4.111412048339844, |
|
"learning_rate": 1.8283093053735257e-05, |
|
"loss": 2.9089, |
|
"step": 558 |
|
}, |
|
{ |
|
"epoch": 0.0207255709208628, |
|
"grad_norm": 4.149659156799316, |
|
"learning_rate": 1.9298820445609438e-05, |
|
"loss": 2.879, |
|
"step": 589 |
|
}, |
|
{ |
|
"epoch": 0.021816390443013478, |
|
"grad_norm": 4.711772441864014, |
|
"learning_rate": 2.031454783748362e-05, |
|
"loss": 2.8057, |
|
"step": 620 |
|
}, |
|
{ |
|
"epoch": 0.02290720996516415, |
|
"grad_norm": 4.372698783874512, |
|
"learning_rate": 2.13302752293578e-05, |
|
"loss": 2.7954, |
|
"step": 651 |
|
}, |
|
{ |
|
"epoch": 0.023998029487314824, |
|
"grad_norm": 3.9052813053131104, |
|
"learning_rate": 2.234600262123198e-05, |
|
"loss": 2.7286, |
|
"step": 682 |
|
}, |
|
{ |
|
"epoch": 0.025088849009465498, |
|
"grad_norm": 3.544926404953003, |
|
"learning_rate": 2.336173001310616e-05, |
|
"loss": 2.6875, |
|
"step": 713 |
|
}, |
|
{ |
|
"epoch": 0.02617966853161617, |
|
"grad_norm": 4.380484104156494, |
|
"learning_rate": 2.437745740498034e-05, |
|
"loss": 2.6703, |
|
"step": 744 |
|
}, |
|
{ |
|
"epoch": 0.027270488053766848, |
|
"grad_norm": 4.843706130981445, |
|
"learning_rate": 2.5393184796854525e-05, |
|
"loss": 2.6387, |
|
"step": 775 |
|
}, |
|
{ |
|
"epoch": 0.02836130757591752, |
|
"grad_norm": 3.555110454559326, |
|
"learning_rate": 2.6408912188728702e-05, |
|
"loss": 2.5996, |
|
"step": 806 |
|
}, |
|
{ |
|
"epoch": 0.029452127098068194, |
|
"grad_norm": 3.3113982677459717, |
|
"learning_rate": 2.7424639580602886e-05, |
|
"loss": 2.5823, |
|
"step": 837 |
|
}, |
|
{ |
|
"epoch": 0.030542946620218867, |
|
"grad_norm": 3.4552953243255615, |
|
"learning_rate": 2.8440366972477066e-05, |
|
"loss": 2.5643, |
|
"step": 868 |
|
}, |
|
{ |
|
"epoch": 0.03163376614236954, |
|
"grad_norm": 2.9786770343780518, |
|
"learning_rate": 2.9456094364351244e-05, |
|
"loss": 2.5426, |
|
"step": 899 |
|
}, |
|
{ |
|
"epoch": 0.03272458566452022, |
|
"grad_norm": 3.227999210357666, |
|
"learning_rate": 3.0471821756225428e-05, |
|
"loss": 2.4834, |
|
"step": 930 |
|
}, |
|
{ |
|
"epoch": 0.03381540518667089, |
|
"grad_norm": 3.0225250720977783, |
|
"learning_rate": 3.148754914809961e-05, |
|
"loss": 2.4755, |
|
"step": 961 |
|
}, |
|
{ |
|
"epoch": 0.034906224708821564, |
|
"grad_norm": 2.6292648315429688, |
|
"learning_rate": 3.2503276539973785e-05, |
|
"loss": 2.4393, |
|
"step": 992 |
|
}, |
|
{ |
|
"epoch": 0.03599704423097224, |
|
"grad_norm": 2.767667055130005, |
|
"learning_rate": 3.351900393184797e-05, |
|
"loss": 2.4124, |
|
"step": 1023 |
|
}, |
|
{ |
|
"epoch": 0.03708786375312291, |
|
"grad_norm": 2.6588900089263916, |
|
"learning_rate": 3.453473132372215e-05, |
|
"loss": 2.3551, |
|
"step": 1054 |
|
}, |
|
{ |
|
"epoch": 0.03817868327527359, |
|
"grad_norm": 2.7497403621673584, |
|
"learning_rate": 3.555045871559633e-05, |
|
"loss": 2.3576, |
|
"step": 1085 |
|
}, |
|
{ |
|
"epoch": 0.039269502797424256, |
|
"grad_norm": 2.5148332118988037, |
|
"learning_rate": 3.6566186107470514e-05, |
|
"loss": 2.3078, |
|
"step": 1116 |
|
}, |
|
{ |
|
"epoch": 0.04036032231957493, |
|
"grad_norm": 3.0025594234466553, |
|
"learning_rate": 3.7581913499344695e-05, |
|
"loss": 2.303, |
|
"step": 1147 |
|
}, |
|
{ |
|
"epoch": 0.0414511418417256, |
|
"grad_norm": 2.6871118545532227, |
|
"learning_rate": 3.8597640891218876e-05, |
|
"loss": 2.3062, |
|
"step": 1178 |
|
}, |
|
{ |
|
"epoch": 0.04254196136387628, |
|
"grad_norm": 10.630929946899414, |
|
"learning_rate": 3.9613368283093056e-05, |
|
"loss": 2.307, |
|
"step": 1209 |
|
}, |
|
{ |
|
"epoch": 0.043632780886026956, |
|
"grad_norm": 2.571220874786377, |
|
"learning_rate": 4.062909567496724e-05, |
|
"loss": 2.2837, |
|
"step": 1240 |
|
}, |
|
{ |
|
"epoch": 0.044723600408177626, |
|
"grad_norm": 3.5510289669036865, |
|
"learning_rate": 4.164482306684142e-05, |
|
"loss": 2.2797, |
|
"step": 1271 |
|
}, |
|
{ |
|
"epoch": 0.0458144199303283, |
|
"grad_norm": 2.794853925704956, |
|
"learning_rate": 4.26605504587156e-05, |
|
"loss": 2.2797, |
|
"step": 1302 |
|
}, |
|
{ |
|
"epoch": 0.04690523945247897, |
|
"grad_norm": 6.132640838623047, |
|
"learning_rate": 4.367627785058978e-05, |
|
"loss": 2.2504, |
|
"step": 1333 |
|
}, |
|
{ |
|
"epoch": 0.04799605897462965, |
|
"grad_norm": 6.17495584487915, |
|
"learning_rate": 4.469200524246396e-05, |
|
"loss": 2.2414, |
|
"step": 1364 |
|
}, |
|
{ |
|
"epoch": 0.049086878496780326, |
|
"grad_norm": 2.3518564701080322, |
|
"learning_rate": 4.570773263433814e-05, |
|
"loss": 2.2198, |
|
"step": 1395 |
|
}, |
|
{ |
|
"epoch": 0.050177698018930995, |
|
"grad_norm": 2.2073466777801514, |
|
"learning_rate": 4.672346002621232e-05, |
|
"loss": 2.2057, |
|
"step": 1426 |
|
}, |
|
{ |
|
"epoch": 0.05126851754108167, |
|
"grad_norm": 2.018749237060547, |
|
"learning_rate": 4.77391874180865e-05, |
|
"loss": 2.1965, |
|
"step": 1457 |
|
}, |
|
{ |
|
"epoch": 0.05235933706323234, |
|
"grad_norm": 2.7125244140625, |
|
"learning_rate": 4.875491480996068e-05, |
|
"loss": 2.19, |
|
"step": 1488 |
|
}, |
|
{ |
|
"epoch": 0.05345015658538302, |
|
"grad_norm": 1.9725641012191772, |
|
"learning_rate": 4.977064220183487e-05, |
|
"loss": 2.1669, |
|
"step": 1519 |
|
}, |
|
{ |
|
"epoch": 0.054540976107533695, |
|
"grad_norm": 2.0198206901550293, |
|
"learning_rate": 4.9999915451558777e-05, |
|
"loss": 2.1486, |
|
"step": 1550 |
|
}, |
|
{ |
|
"epoch": 0.055631795629684365, |
|
"grad_norm": 2.068424940109253, |
|
"learning_rate": 4.999955597496219e-05, |
|
"loss": 2.1476, |
|
"step": 1581 |
|
}, |
|
{ |
|
"epoch": 0.05672261515183504, |
|
"grad_norm": 1.8509161472320557, |
|
"learning_rate": 4.9998914381774255e-05, |
|
"loss": 2.1129, |
|
"step": 1612 |
|
}, |
|
{ |
|
"epoch": 0.05781343467398571, |
|
"grad_norm": 1.805915355682373, |
|
"learning_rate": 4.999799067923527e-05, |
|
"loss": 2.1014, |
|
"step": 1643 |
|
}, |
|
{ |
|
"epoch": 0.05890425419613639, |
|
"grad_norm": 1.9447969198226929, |
|
"learning_rate": 4.999678487776908e-05, |
|
"loss": 2.1067, |
|
"step": 1674 |
|
}, |
|
{ |
|
"epoch": 0.059995073718287065, |
|
"grad_norm": 2.131700277328491, |
|
"learning_rate": 4.9995296990983006e-05, |
|
"loss": 2.0817, |
|
"step": 1705 |
|
}, |
|
{ |
|
"epoch": 0.061085893240437734, |
|
"grad_norm": 1.9113484621047974, |
|
"learning_rate": 4.999352703566763e-05, |
|
"loss": 2.067, |
|
"step": 1736 |
|
}, |
|
{ |
|
"epoch": 0.06217671276258841, |
|
"grad_norm": 1.8802495002746582, |
|
"learning_rate": 4.999147503179668e-05, |
|
"loss": 2.0617, |
|
"step": 1767 |
|
}, |
|
{ |
|
"epoch": 0.06326753228473908, |
|
"grad_norm": 1.6704238653182983, |
|
"learning_rate": 4.998914100252672e-05, |
|
"loss": 2.0671, |
|
"step": 1798 |
|
}, |
|
{ |
|
"epoch": 0.06435835180688976, |
|
"grad_norm": 4.303380966186523, |
|
"learning_rate": 4.998652497419696e-05, |
|
"loss": 2.0436, |
|
"step": 1829 |
|
}, |
|
{ |
|
"epoch": 0.06544917132904043, |
|
"grad_norm": 1.8054529428482056, |
|
"learning_rate": 4.9983626976328927e-05, |
|
"loss": 2.0499, |
|
"step": 1860 |
|
}, |
|
{ |
|
"epoch": 0.06653999085119111, |
|
"grad_norm": 1.663872241973877, |
|
"learning_rate": 4.998044704162613e-05, |
|
"loss": 2.0227, |
|
"step": 1891 |
|
}, |
|
{ |
|
"epoch": 0.06763081037334177, |
|
"grad_norm": 10.130581855773926, |
|
"learning_rate": 4.9976985205973705e-05, |
|
"loss": 2.0022, |
|
"step": 1922 |
|
}, |
|
{ |
|
"epoch": 0.06872162989549245, |
|
"grad_norm": 3.7727627754211426, |
|
"learning_rate": 4.997324150843799e-05, |
|
"loss": 2.0172, |
|
"step": 1953 |
|
}, |
|
{ |
|
"epoch": 0.06981244941764313, |
|
"grad_norm": 1.8669508695602417, |
|
"learning_rate": 4.99692159912661e-05, |
|
"loss": 2.0214, |
|
"step": 1984 |
|
}, |
|
{ |
|
"epoch": 0.0709032689397938, |
|
"grad_norm": 17.45224952697754, |
|
"learning_rate": 4.996490869988546e-05, |
|
"loss": 1.9959, |
|
"step": 2015 |
|
}, |
|
{ |
|
"epoch": 0.07199408846194448, |
|
"grad_norm": 1.7007793188095093, |
|
"learning_rate": 4.996031968290326e-05, |
|
"loss": 2.0139, |
|
"step": 2046 |
|
}, |
|
{ |
|
"epoch": 0.07308490798409514, |
|
"grad_norm": 1.6912076473236084, |
|
"learning_rate": 4.995544899210594e-05, |
|
"loss": 1.9963, |
|
"step": 2077 |
|
}, |
|
{ |
|
"epoch": 0.07417572750624582, |
|
"grad_norm": 1.6827666759490967, |
|
"learning_rate": 4.9950296682458583e-05, |
|
"loss": 1.973, |
|
"step": 2108 |
|
}, |
|
{ |
|
"epoch": 0.0752665470283965, |
|
"grad_norm": 1.767269253730774, |
|
"learning_rate": 4.994486281210429e-05, |
|
"loss": 1.9949, |
|
"step": 2139 |
|
}, |
|
{ |
|
"epoch": 0.07635736655054717, |
|
"grad_norm": 7.507472038269043, |
|
"learning_rate": 4.9939147442363566e-05, |
|
"loss": 1.9837, |
|
"step": 2170 |
|
}, |
|
{ |
|
"epoch": 0.07744818607269784, |
|
"grad_norm": 1.6840234994888306, |
|
"learning_rate": 4.9933150637733574e-05, |
|
"loss": 1.9789, |
|
"step": 2201 |
|
}, |
|
{ |
|
"epoch": 0.07853900559484851, |
|
"grad_norm": 1.6442159414291382, |
|
"learning_rate": 4.992687246588743e-05, |
|
"loss": 1.9436, |
|
"step": 2232 |
|
}, |
|
{ |
|
"epoch": 0.07962982511699919, |
|
"grad_norm": 1.546859622001648, |
|
"learning_rate": 4.992031299767347e-05, |
|
"loss": 1.9341, |
|
"step": 2263 |
|
}, |
|
{ |
|
"epoch": 0.08072064463914987, |
|
"grad_norm": 1.4932371377944946, |
|
"learning_rate": 4.9913472307114386e-05, |
|
"loss": 1.9583, |
|
"step": 2294 |
|
}, |
|
{ |
|
"epoch": 0.08181146416130054, |
|
"grad_norm": 1.4531840085983276, |
|
"learning_rate": 4.9906350471406446e-05, |
|
"loss": 1.9414, |
|
"step": 2325 |
|
}, |
|
{ |
|
"epoch": 0.0829022836834512, |
|
"grad_norm": 1.5285487174987793, |
|
"learning_rate": 4.989894757091861e-05, |
|
"loss": 1.9502, |
|
"step": 2356 |
|
}, |
|
{ |
|
"epoch": 0.08399310320560188, |
|
"grad_norm": 1.4159945249557495, |
|
"learning_rate": 4.989126368919158e-05, |
|
"loss": 1.9299, |
|
"step": 2387 |
|
}, |
|
{ |
|
"epoch": 0.08508392272775256, |
|
"grad_norm": 1.4361426830291748, |
|
"learning_rate": 4.988329891293693e-05, |
|
"loss": 1.9266, |
|
"step": 2418 |
|
}, |
|
{ |
|
"epoch": 0.08617474224990324, |
|
"grad_norm": 1.4310742616653442, |
|
"learning_rate": 4.987505333203608e-05, |
|
"loss": 1.9116, |
|
"step": 2449 |
|
}, |
|
{ |
|
"epoch": 0.08726556177205391, |
|
"grad_norm": 1.3780311346054077, |
|
"learning_rate": 4.9866527039539276e-05, |
|
"loss": 1.9189, |
|
"step": 2480 |
|
}, |
|
{ |
|
"epoch": 0.08835638129420458, |
|
"grad_norm": 1.3917787075042725, |
|
"learning_rate": 4.9857720131664594e-05, |
|
"loss": 1.9107, |
|
"step": 2511 |
|
}, |
|
{ |
|
"epoch": 0.08944720081635525, |
|
"grad_norm": 1.3963998556137085, |
|
"learning_rate": 4.9848632707796773e-05, |
|
"loss": 1.899, |
|
"step": 2542 |
|
}, |
|
{ |
|
"epoch": 0.09053802033850593, |
|
"grad_norm": 1.3573299646377563, |
|
"learning_rate": 4.9839264870486155e-05, |
|
"loss": 1.9084, |
|
"step": 2573 |
|
}, |
|
{ |
|
"epoch": 0.0916288398606566, |
|
"grad_norm": 1.362552523612976, |
|
"learning_rate": 4.9829616725447526e-05, |
|
"loss": 1.8954, |
|
"step": 2604 |
|
}, |
|
{ |
|
"epoch": 0.09271965938280728, |
|
"grad_norm": 1.3538340330123901, |
|
"learning_rate": 4.981968838155888e-05, |
|
"loss": 1.9035, |
|
"step": 2635 |
|
}, |
|
{ |
|
"epoch": 0.09381047890495794, |
|
"grad_norm": 1.407291054725647, |
|
"learning_rate": 4.980947995086024e-05, |
|
"loss": 1.8778, |
|
"step": 2666 |
|
}, |
|
{ |
|
"epoch": 0.09490129842710862, |
|
"grad_norm": 1.3554202318191528, |
|
"learning_rate": 4.979899154855234e-05, |
|
"loss": 1.8783, |
|
"step": 2697 |
|
}, |
|
{ |
|
"epoch": 0.0959921179492593, |
|
"grad_norm": 1.3513182401657104, |
|
"learning_rate": 4.9788223292995386e-05, |
|
"loss": 1.8857, |
|
"step": 2728 |
|
}, |
|
{ |
|
"epoch": 0.09708293747140997, |
|
"grad_norm": 1.345706582069397, |
|
"learning_rate": 4.977717530570768e-05, |
|
"loss": 1.8882, |
|
"step": 2759 |
|
}, |
|
{ |
|
"epoch": 0.09817375699356065, |
|
"grad_norm": 1.3450254201889038, |
|
"learning_rate": 4.976584771136425e-05, |
|
"loss": 1.8746, |
|
"step": 2790 |
|
}, |
|
{ |
|
"epoch": 0.09926457651571131, |
|
"grad_norm": 1.7820961475372314, |
|
"learning_rate": 4.975424063779547e-05, |
|
"loss": 1.8557, |
|
"step": 2821 |
|
}, |
|
{ |
|
"epoch": 0.10035539603786199, |
|
"grad_norm": 1.3493785858154297, |
|
"learning_rate": 4.974235421598557e-05, |
|
"loss": 1.8683, |
|
"step": 2852 |
|
}, |
|
{ |
|
"epoch": 0.10144621556001267, |
|
"grad_norm": 1.3066306114196777, |
|
"learning_rate": 4.973018858007122e-05, |
|
"loss": 1.8777, |
|
"step": 2883 |
|
}, |
|
{ |
|
"epoch": 0.10253703508216334, |
|
"grad_norm": 1.2991000413894653, |
|
"learning_rate": 4.9717743867339963e-05, |
|
"loss": 1.8605, |
|
"step": 2914 |
|
}, |
|
{ |
|
"epoch": 0.10362785460431402, |
|
"grad_norm": 1.3748496770858765, |
|
"learning_rate": 4.9705020218228695e-05, |
|
"loss": 1.8683, |
|
"step": 2945 |
|
}, |
|
{ |
|
"epoch": 0.10471867412646468, |
|
"grad_norm": 1.3229252099990845, |
|
"learning_rate": 4.969201777632205e-05, |
|
"loss": 1.858, |
|
"step": 2976 |
|
}, |
|
{ |
|
"epoch": 0.10580949364861536, |
|
"grad_norm": 1.2874318361282349, |
|
"learning_rate": 4.9678736688350846e-05, |
|
"loss": 1.8435, |
|
"step": 3007 |
|
}, |
|
{ |
|
"epoch": 0.10690031317076604, |
|
"grad_norm": 1.3341971635818481, |
|
"learning_rate": 4.966517710419033e-05, |
|
"loss": 1.8663, |
|
"step": 3038 |
|
}, |
|
{ |
|
"epoch": 0.10799113269291671, |
|
"grad_norm": 1.276282548904419, |
|
"learning_rate": 4.965133917685858e-05, |
|
"loss": 1.854, |
|
"step": 3069 |
|
}, |
|
{ |
|
"epoch": 0.10908195221506739, |
|
"grad_norm": 1.2999404668807983, |
|
"learning_rate": 4.9637223062514714e-05, |
|
"loss": 1.8498, |
|
"step": 3100 |
|
}, |
|
{ |
|
"epoch": 0.11017277173721805, |
|
"grad_norm": 1.2905164957046509, |
|
"learning_rate": 4.962282892045718e-05, |
|
"loss": 1.8436, |
|
"step": 3131 |
|
}, |
|
{ |
|
"epoch": 0.11126359125936873, |
|
"grad_norm": 1.9952430725097656, |
|
"learning_rate": 4.9608156913121904e-05, |
|
"loss": 1.8316, |
|
"step": 3162 |
|
}, |
|
{ |
|
"epoch": 0.1123544107815194, |
|
"grad_norm": 1.2637739181518555, |
|
"learning_rate": 4.959320720608049e-05, |
|
"loss": 1.8391, |
|
"step": 3193 |
|
}, |
|
{ |
|
"epoch": 0.11344523030367008, |
|
"grad_norm": 1.3711034059524536, |
|
"learning_rate": 4.9577979968038354e-05, |
|
"loss": 1.814, |
|
"step": 3224 |
|
}, |
|
{ |
|
"epoch": 0.11453604982582076, |
|
"grad_norm": 1.3050326108932495, |
|
"learning_rate": 4.956247537083282e-05, |
|
"loss": 1.8635, |
|
"step": 3255 |
|
}, |
|
{ |
|
"epoch": 0.11562686934797142, |
|
"grad_norm": 1.230220079421997, |
|
"learning_rate": 4.9546693589431145e-05, |
|
"loss": 1.8203, |
|
"step": 3286 |
|
}, |
|
{ |
|
"epoch": 0.1167176888701221, |
|
"grad_norm": 1.2535375356674194, |
|
"learning_rate": 4.9530634801928595e-05, |
|
"loss": 1.8319, |
|
"step": 3317 |
|
}, |
|
{ |
|
"epoch": 0.11780850839227278, |
|
"grad_norm": 1.227552890777588, |
|
"learning_rate": 4.9514299189546395e-05, |
|
"loss": 1.8157, |
|
"step": 3348 |
|
}, |
|
{ |
|
"epoch": 0.11889932791442345, |
|
"grad_norm": 1.2049622535705566, |
|
"learning_rate": 4.949768693662973e-05, |
|
"loss": 1.8135, |
|
"step": 3379 |
|
}, |
|
{ |
|
"epoch": 0.11999014743657413, |
|
"grad_norm": 1.1608529090881348, |
|
"learning_rate": 4.948079823064559e-05, |
|
"loss": 1.8191, |
|
"step": 3410 |
|
}, |
|
{ |
|
"epoch": 0.12108096695872479, |
|
"grad_norm": 1.2294845581054688, |
|
"learning_rate": 4.946363326218074e-05, |
|
"loss": 1.7977, |
|
"step": 3441 |
|
}, |
|
{ |
|
"epoch": 0.12217178648087547, |
|
"grad_norm": 1.1883920431137085, |
|
"learning_rate": 4.9446192224939525e-05, |
|
"loss": 1.8153, |
|
"step": 3472 |
|
}, |
|
{ |
|
"epoch": 0.12326260600302615, |
|
"grad_norm": 1.2770321369171143, |
|
"learning_rate": 4.942847531574167e-05, |
|
"loss": 1.8064, |
|
"step": 3503 |
|
}, |
|
{ |
|
"epoch": 0.12435342552517682, |
|
"grad_norm": 1.5442144870758057, |
|
"learning_rate": 4.941048273452008e-05, |
|
"loss": 1.8134, |
|
"step": 3534 |
|
}, |
|
{ |
|
"epoch": 0.12544424504732749, |
|
"grad_norm": 1.1753895282745361, |
|
"learning_rate": 4.9392214684318605e-05, |
|
"loss": 1.7909, |
|
"step": 3565 |
|
}, |
|
{ |
|
"epoch": 0.12653506456947816, |
|
"grad_norm": 1.2292894124984741, |
|
"learning_rate": 4.93736713712897e-05, |
|
"loss": 1.7894, |
|
"step": 3596 |
|
}, |
|
{ |
|
"epoch": 0.12762588409162884, |
|
"grad_norm": 1.1696261167526245, |
|
"learning_rate": 4.9354853004692124e-05, |
|
"loss": 1.7913, |
|
"step": 3627 |
|
}, |
|
{ |
|
"epoch": 0.12871670361377952, |
|
"grad_norm": 1.1935744285583496, |
|
"learning_rate": 4.93357597968886e-05, |
|
"loss": 1.8103, |
|
"step": 3658 |
|
}, |
|
{ |
|
"epoch": 0.1298075231359302, |
|
"grad_norm": 1.1842865943908691, |
|
"learning_rate": 4.931639196334338e-05, |
|
"loss": 1.7778, |
|
"step": 3689 |
|
}, |
|
{ |
|
"epoch": 0.13089834265808087, |
|
"grad_norm": 1.187715768814087, |
|
"learning_rate": 4.9296749722619826e-05, |
|
"loss": 1.7955, |
|
"step": 3720 |
|
}, |
|
{ |
|
"epoch": 0.13198916218023155, |
|
"grad_norm": 1.1973644495010376, |
|
"learning_rate": 4.9276833296377966e-05, |
|
"loss": 1.7786, |
|
"step": 3751 |
|
}, |
|
{ |
|
"epoch": 0.13307998170238222, |
|
"grad_norm": 1.129618525505066, |
|
"learning_rate": 4.925664290937196e-05, |
|
"loss": 1.7819, |
|
"step": 3782 |
|
}, |
|
{ |
|
"epoch": 0.13417080122453287, |
|
"grad_norm": 1.1466953754425049, |
|
"learning_rate": 4.9236178789447576e-05, |
|
"loss": 1.7732, |
|
"step": 3813 |
|
}, |
|
{ |
|
"epoch": 0.13526162074668355, |
|
"grad_norm": 1.1432143449783325, |
|
"learning_rate": 4.921544116753962e-05, |
|
"loss": 1.7872, |
|
"step": 3844 |
|
}, |
|
{ |
|
"epoch": 0.13635244026883422, |
|
"grad_norm": 1.1669626235961914, |
|
"learning_rate": 4.919443027766935e-05, |
|
"loss": 1.7708, |
|
"step": 3875 |
|
}, |
|
{ |
|
"epoch": 0.1374432597909849, |
|
"grad_norm": 1.1273548603057861, |
|
"learning_rate": 4.91731463569418e-05, |
|
"loss": 1.7767, |
|
"step": 3906 |
|
}, |
|
{ |
|
"epoch": 0.13853407931313558, |
|
"grad_norm": 1.146063208580017, |
|
"learning_rate": 4.915158964554312e-05, |
|
"loss": 1.7695, |
|
"step": 3937 |
|
}, |
|
{ |
|
"epoch": 0.13962489883528625, |
|
"grad_norm": 1.1724731922149658, |
|
"learning_rate": 4.912976038673786e-05, |
|
"loss": 1.7709, |
|
"step": 3968 |
|
}, |
|
{ |
|
"epoch": 0.14071571835743693, |
|
"grad_norm": 1.122382402420044, |
|
"learning_rate": 4.9107658826866254e-05, |
|
"loss": 1.7871, |
|
"step": 3999 |
|
}, |
|
{ |
|
"epoch": 0.1418065378795876, |
|
"grad_norm": 1.2005928754806519, |
|
"learning_rate": 4.908528521534139e-05, |
|
"loss": 1.7896, |
|
"step": 4030 |
|
}, |
|
{ |
|
"epoch": 0.14289735740173828, |
|
"grad_norm": 1.1000254154205322, |
|
"learning_rate": 4.906263980464644e-05, |
|
"loss": 1.7645, |
|
"step": 4061 |
|
}, |
|
{ |
|
"epoch": 0.14398817692388896, |
|
"grad_norm": 1.1797596216201782, |
|
"learning_rate": 4.903972285033178e-05, |
|
"loss": 1.7702, |
|
"step": 4092 |
|
}, |
|
{ |
|
"epoch": 0.1450789964460396, |
|
"grad_norm": 1.145271897315979, |
|
"learning_rate": 4.901653461101213e-05, |
|
"loss": 1.7541, |
|
"step": 4123 |
|
}, |
|
{ |
|
"epoch": 0.1461698159681903, |
|
"grad_norm": 1.1336746215820312, |
|
"learning_rate": 4.8993075348363626e-05, |
|
"loss": 1.7604, |
|
"step": 4154 |
|
}, |
|
{ |
|
"epoch": 0.14726063549034096, |
|
"grad_norm": 1.1435718536376953, |
|
"learning_rate": 4.896934532712084e-05, |
|
"loss": 1.7461, |
|
"step": 4185 |
|
}, |
|
{ |
|
"epoch": 0.14835145501249164, |
|
"grad_norm": 1.1105029582977295, |
|
"learning_rate": 4.8945344815073846e-05, |
|
"loss": 1.7691, |
|
"step": 4216 |
|
}, |
|
{ |
|
"epoch": 0.14944227453464232, |
|
"grad_norm": 1.1397055387496948, |
|
"learning_rate": 4.892107408306516e-05, |
|
"loss": 1.7599, |
|
"step": 4247 |
|
}, |
|
{ |
|
"epoch": 0.150533094056793, |
|
"grad_norm": 1.1158910989761353, |
|
"learning_rate": 4.889653340498669e-05, |
|
"loss": 1.7537, |
|
"step": 4278 |
|
}, |
|
{ |
|
"epoch": 0.15162391357894367, |
|
"grad_norm": 1.1238468885421753, |
|
"learning_rate": 4.8871723057776664e-05, |
|
"loss": 1.7672, |
|
"step": 4309 |
|
}, |
|
{ |
|
"epoch": 0.15271473310109435, |
|
"grad_norm": 1.1303473711013794, |
|
"learning_rate": 4.8846643321416476e-05, |
|
"loss": 1.7542, |
|
"step": 4340 |
|
}, |
|
{ |
|
"epoch": 0.15380555262324502, |
|
"grad_norm": 1.117262840270996, |
|
"learning_rate": 4.882129447892753e-05, |
|
"loss": 1.7378, |
|
"step": 4371 |
|
}, |
|
{ |
|
"epoch": 0.15489637214539567, |
|
"grad_norm": 1.1086496114730835, |
|
"learning_rate": 4.8795676816368076e-05, |
|
"loss": 1.7439, |
|
"step": 4402 |
|
}, |
|
{ |
|
"epoch": 0.15598719166754635, |
|
"grad_norm": 1.276014804840088, |
|
"learning_rate": 4.876979062282995e-05, |
|
"loss": 1.7699, |
|
"step": 4433 |
|
}, |
|
{ |
|
"epoch": 0.15707801118969703, |
|
"grad_norm": 1.1294457912445068, |
|
"learning_rate": 4.8743636190435325e-05, |
|
"loss": 1.7292, |
|
"step": 4464 |
|
}, |
|
{ |
|
"epoch": 0.1581688307118477, |
|
"grad_norm": 1.1111900806427002, |
|
"learning_rate": 4.871721381433344e-05, |
|
"loss": 1.7487, |
|
"step": 4495 |
|
}, |
|
{ |
|
"epoch": 0.15925965023399838, |
|
"grad_norm": 1.0868422985076904, |
|
"learning_rate": 4.869052379269719e-05, |
|
"loss": 1.7397, |
|
"step": 4526 |
|
}, |
|
{ |
|
"epoch": 0.16035046975614906, |
|
"grad_norm": 1.0772205591201782, |
|
"learning_rate": 4.866356642671985e-05, |
|
"loss": 1.7377, |
|
"step": 4557 |
|
}, |
|
{ |
|
"epoch": 0.16144128927829973, |
|
"grad_norm": 1.0649960041046143, |
|
"learning_rate": 4.8636342020611634e-05, |
|
"loss": 1.7253, |
|
"step": 4588 |
|
}, |
|
{ |
|
"epoch": 0.1625321088004504, |
|
"grad_norm": 1.0653575658798218, |
|
"learning_rate": 4.860885088159626e-05, |
|
"loss": 1.7319, |
|
"step": 4619 |
|
}, |
|
{ |
|
"epoch": 0.16362292832260109, |
|
"grad_norm": 1.0669118165969849, |
|
"learning_rate": 4.858109331990751e-05, |
|
"loss": 1.7425, |
|
"step": 4650 |
|
}, |
|
{ |
|
"epoch": 0.16471374784475176, |
|
"grad_norm": 1.0559724569320679, |
|
"learning_rate": 4.855306964878567e-05, |
|
"loss": 1.7364, |
|
"step": 4681 |
|
}, |
|
{ |
|
"epoch": 0.1658045673669024, |
|
"grad_norm": 1.038191318511963, |
|
"learning_rate": 4.8524780184474084e-05, |
|
"loss": 1.7299, |
|
"step": 4712 |
|
}, |
|
{ |
|
"epoch": 0.1668953868890531, |
|
"grad_norm": 1.0602952241897583, |
|
"learning_rate": 4.8496225246215496e-05, |
|
"loss": 1.7256, |
|
"step": 4743 |
|
}, |
|
{ |
|
"epoch": 0.16798620641120376, |
|
"grad_norm": 1.0466082096099854, |
|
"learning_rate": 4.8467405156248505e-05, |
|
"loss": 1.7148, |
|
"step": 4774 |
|
}, |
|
{ |
|
"epoch": 0.16907702593335444, |
|
"grad_norm": 1.051708698272705, |
|
"learning_rate": 4.843832023980392e-05, |
|
"loss": 1.722, |
|
"step": 4805 |
|
}, |
|
{ |
|
"epoch": 0.17016784545550512, |
|
"grad_norm": 1.065427541732788, |
|
"learning_rate": 4.840897082510106e-05, |
|
"loss": 1.7088, |
|
"step": 4836 |
|
}, |
|
{ |
|
"epoch": 0.1712586649776558, |
|
"grad_norm": 1.0983659029006958, |
|
"learning_rate": 4.8379357243344084e-05, |
|
"loss": 1.7422, |
|
"step": 4867 |
|
}, |
|
{ |
|
"epoch": 0.17234948449980647, |
|
"grad_norm": 1.0919371843338013, |
|
"learning_rate": 4.8349479828718236e-05, |
|
"loss": 1.7253, |
|
"step": 4898 |
|
}, |
|
{ |
|
"epoch": 0.17344030402195715, |
|
"grad_norm": 1.0881919860839844, |
|
"learning_rate": 4.8319338918386075e-05, |
|
"loss": 1.7185, |
|
"step": 4929 |
|
}, |
|
{ |
|
"epoch": 0.17453112354410782, |
|
"grad_norm": 1.0374606847763062, |
|
"learning_rate": 4.828893485248369e-05, |
|
"loss": 1.7255, |
|
"step": 4960 |
|
}, |
|
{ |
|
"epoch": 0.1756219430662585, |
|
"grad_norm": 1.0947892665863037, |
|
"learning_rate": 4.825826797411682e-05, |
|
"loss": 1.7095, |
|
"step": 4991 |
|
}, |
|
{ |
|
"epoch": 0.17671276258840915, |
|
"grad_norm": 1.1086746454238892, |
|
"learning_rate": 4.822733862935702e-05, |
|
"loss": 1.7233, |
|
"step": 5022 |
|
}, |
|
{ |
|
"epoch": 0.17780358211055983, |
|
"grad_norm": 1.0662850141525269, |
|
"learning_rate": 4.819614716723775e-05, |
|
"loss": 1.7426, |
|
"step": 5053 |
|
}, |
|
{ |
|
"epoch": 0.1788944016327105, |
|
"grad_norm": 1.0137659311294556, |
|
"learning_rate": 4.8164693939750425e-05, |
|
"loss": 1.7358, |
|
"step": 5084 |
|
}, |
|
{ |
|
"epoch": 0.17998522115486118, |
|
"grad_norm": 1.049338698387146, |
|
"learning_rate": 4.813297930184042e-05, |
|
"loss": 1.6981, |
|
"step": 5115 |
|
}, |
|
{ |
|
"epoch": 0.18107604067701186, |
|
"grad_norm": 1.08591628074646, |
|
"learning_rate": 4.810100361140314e-05, |
|
"loss": 1.7178, |
|
"step": 5146 |
|
}, |
|
{ |
|
"epoch": 0.18216686019916253, |
|
"grad_norm": 1.0983690023422241, |
|
"learning_rate": 4.8068767229279885e-05, |
|
"loss": 1.7179, |
|
"step": 5177 |
|
}, |
|
{ |
|
"epoch": 0.1832576797213132, |
|
"grad_norm": 1.0783159732818604, |
|
"learning_rate": 4.8036270519253854e-05, |
|
"loss": 1.7198, |
|
"step": 5208 |
|
}, |
|
{ |
|
"epoch": 0.1843484992434639, |
|
"grad_norm": 1.0519956350326538, |
|
"learning_rate": 4.8003513848046e-05, |
|
"loss": 1.7154, |
|
"step": 5239 |
|
}, |
|
{ |
|
"epoch": 0.18543931876561456, |
|
"grad_norm": 1.0484226942062378, |
|
"learning_rate": 4.79704975853109e-05, |
|
"loss": 1.7081, |
|
"step": 5270 |
|
}, |
|
{ |
|
"epoch": 0.18653013828776524, |
|
"grad_norm": 1.0263608694076538, |
|
"learning_rate": 4.793722210363262e-05, |
|
"loss": 1.6961, |
|
"step": 5301 |
|
}, |
|
{ |
|
"epoch": 0.1876209578099159, |
|
"grad_norm": 0.9863409399986267, |
|
"learning_rate": 4.7903687778520414e-05, |
|
"loss": 1.7198, |
|
"step": 5332 |
|
}, |
|
{ |
|
"epoch": 0.18871177733206657, |
|
"grad_norm": 1.0293922424316406, |
|
"learning_rate": 4.7869894988404593e-05, |
|
"loss": 1.6958, |
|
"step": 5363 |
|
}, |
|
{ |
|
"epoch": 0.18980259685421724, |
|
"grad_norm": 0.9902015924453735, |
|
"learning_rate": 4.783584411463221e-05, |
|
"loss": 1.7162, |
|
"step": 5394 |
|
}, |
|
{ |
|
"epoch": 0.19089341637636792, |
|
"grad_norm": 1.0472259521484375, |
|
"learning_rate": 4.780153554146274e-05, |
|
"loss": 1.7189, |
|
"step": 5425 |
|
}, |
|
{ |
|
"epoch": 0.1919842358985186, |
|
"grad_norm": 1.0958000421524048, |
|
"learning_rate": 4.7766969656063766e-05, |
|
"loss": 1.7288, |
|
"step": 5456 |
|
}, |
|
{ |
|
"epoch": 0.19307505542066927, |
|
"grad_norm": 1.0132781267166138, |
|
"learning_rate": 4.773214684850662e-05, |
|
"loss": 1.6915, |
|
"step": 5487 |
|
}, |
|
{ |
|
"epoch": 0.19416587494281995, |
|
"grad_norm": 1.0274685621261597, |
|
"learning_rate": 4.769706751176193e-05, |
|
"loss": 1.7063, |
|
"step": 5518 |
|
}, |
|
{ |
|
"epoch": 0.19525669446497063, |
|
"grad_norm": 1.0145214796066284, |
|
"learning_rate": 4.7661732041695264e-05, |
|
"loss": 1.7072, |
|
"step": 5549 |
|
}, |
|
{ |
|
"epoch": 0.1963475139871213, |
|
"grad_norm": 1.053489327430725, |
|
"learning_rate": 4.762614083706258e-05, |
|
"loss": 1.6897, |
|
"step": 5580 |
|
}, |
|
{ |
|
"epoch": 0.19743833350927198, |
|
"grad_norm": 1.0426928997039795, |
|
"learning_rate": 4.759029429950581e-05, |
|
"loss": 1.6868, |
|
"step": 5611 |
|
}, |
|
{ |
|
"epoch": 0.19852915303142263, |
|
"grad_norm": 1.0594595670700073, |
|
"learning_rate": 4.7554192833548235e-05, |
|
"loss": 1.6978, |
|
"step": 5642 |
|
}, |
|
{ |
|
"epoch": 0.1996199725535733, |
|
"grad_norm": 1.0251655578613281, |
|
"learning_rate": 4.751783684659e-05, |
|
"loss": 1.6834, |
|
"step": 5673 |
|
}, |
|
{ |
|
"epoch": 0.20071079207572398, |
|
"grad_norm": 1.0762938261032104, |
|
"learning_rate": 4.748122674890348e-05, |
|
"loss": 1.6825, |
|
"step": 5704 |
|
}, |
|
{ |
|
"epoch": 0.20180161159787466, |
|
"grad_norm": 1.0672742128372192, |
|
"learning_rate": 4.7444362953628654e-05, |
|
"loss": 1.6811, |
|
"step": 5735 |
|
}, |
|
{ |
|
"epoch": 0.20289243112002533, |
|
"grad_norm": 1.0267951488494873, |
|
"learning_rate": 4.7407245876768424e-05, |
|
"loss": 1.7024, |
|
"step": 5766 |
|
}, |
|
{ |
|
"epoch": 0.203983250642176, |
|
"grad_norm": 1.0520440340042114, |
|
"learning_rate": 4.736987593718397e-05, |
|
"loss": 1.7008, |
|
"step": 5797 |
|
}, |
|
{ |
|
"epoch": 0.2050740701643267, |
|
"grad_norm": 1.0106167793273926, |
|
"learning_rate": 4.733225355658999e-05, |
|
"loss": 1.6721, |
|
"step": 5828 |
|
}, |
|
{ |
|
"epoch": 0.20616488968647737, |
|
"grad_norm": 0.9883444905281067, |
|
"learning_rate": 4.7294379159549926e-05, |
|
"loss": 1.6884, |
|
"step": 5859 |
|
}, |
|
{ |
|
"epoch": 0.20725570920862804, |
|
"grad_norm": 0.9873078465461731, |
|
"learning_rate": 4.725625317347119e-05, |
|
"loss": 1.7011, |
|
"step": 5890 |
|
}, |
|
{ |
|
"epoch": 0.2083465287307787, |
|
"grad_norm": 0.9925514459609985, |
|
"learning_rate": 4.7217876028600374e-05, |
|
"loss": 1.6965, |
|
"step": 5921 |
|
}, |
|
{ |
|
"epoch": 0.20943734825292937, |
|
"grad_norm": 1.0054575204849243, |
|
"learning_rate": 4.717924815801832e-05, |
|
"loss": 1.6923, |
|
"step": 5952 |
|
}, |
|
{ |
|
"epoch": 0.21052816777508004, |
|
"grad_norm": 1.0051021575927734, |
|
"learning_rate": 4.714036999763532e-05, |
|
"loss": 1.6864, |
|
"step": 5983 |
|
}, |
|
{ |
|
"epoch": 0.21161898729723072, |
|
"grad_norm": 1.0312145948410034, |
|
"learning_rate": 4.7101241986186116e-05, |
|
"loss": 1.6776, |
|
"step": 6014 |
|
}, |
|
{ |
|
"epoch": 0.2127098068193814, |
|
"grad_norm": 0.9964466094970703, |
|
"learning_rate": 4.7061864565225e-05, |
|
"loss": 1.6811, |
|
"step": 6045 |
|
}, |
|
{ |
|
"epoch": 0.21380062634153207, |
|
"grad_norm": 0.9997463226318359, |
|
"learning_rate": 4.702223817912081e-05, |
|
"loss": 1.6805, |
|
"step": 6076 |
|
}, |
|
{ |
|
"epoch": 0.21489144586368275, |
|
"grad_norm": 0.9945687055587769, |
|
"learning_rate": 4.698236327505195e-05, |
|
"loss": 1.6746, |
|
"step": 6107 |
|
}, |
|
{ |
|
"epoch": 0.21598226538583343, |
|
"grad_norm": 1.0496478080749512, |
|
"learning_rate": 4.694224030300127e-05, |
|
"loss": 1.6791, |
|
"step": 6138 |
|
}, |
|
{ |
|
"epoch": 0.2170730849079841, |
|
"grad_norm": 1.0148327350616455, |
|
"learning_rate": 4.690186971575107e-05, |
|
"loss": 1.6848, |
|
"step": 6169 |
|
}, |
|
{ |
|
"epoch": 0.21816390443013478, |
|
"grad_norm": 0.9963369965553284, |
|
"learning_rate": 4.6861251968877916e-05, |
|
"loss": 1.7055, |
|
"step": 6200 |
|
}, |
|
{ |
|
"epoch": 0.21925472395228543, |
|
"grad_norm": 0.9630306959152222, |
|
"learning_rate": 4.68203875207476e-05, |
|
"loss": 1.6716, |
|
"step": 6231 |
|
}, |
|
{ |
|
"epoch": 0.2203455434744361, |
|
"grad_norm": 0.9671465158462524, |
|
"learning_rate": 4.677927683250983e-05, |
|
"loss": 1.6754, |
|
"step": 6262 |
|
}, |
|
{ |
|
"epoch": 0.22143636299658678, |
|
"grad_norm": 0.9531513452529907, |
|
"learning_rate": 4.6737920368093156e-05, |
|
"loss": 1.6792, |
|
"step": 6293 |
|
}, |
|
{ |
|
"epoch": 0.22252718251873746, |
|
"grad_norm": 0.9835436344146729, |
|
"learning_rate": 4.669631859419965e-05, |
|
"loss": 1.6651, |
|
"step": 6324 |
|
}, |
|
{ |
|
"epoch": 0.22361800204088814, |
|
"grad_norm": 0.9870143532752991, |
|
"learning_rate": 4.6654471980299676e-05, |
|
"loss": 1.6903, |
|
"step": 6355 |
|
}, |
|
{ |
|
"epoch": 0.2247088215630388, |
|
"grad_norm": 1.0126017332077026, |
|
"learning_rate": 4.661238099862658e-05, |
|
"loss": 1.6883, |
|
"step": 6386 |
|
}, |
|
{ |
|
"epoch": 0.2257996410851895, |
|
"grad_norm": 1.0292701721191406, |
|
"learning_rate": 4.657004612417138e-05, |
|
"loss": 1.6841, |
|
"step": 6417 |
|
}, |
|
{ |
|
"epoch": 0.22689046060734017, |
|
"grad_norm": 0.9697965979576111, |
|
"learning_rate": 4.6527467834677374e-05, |
|
"loss": 1.684, |
|
"step": 6448 |
|
}, |
|
{ |
|
"epoch": 0.22798128012949084, |
|
"grad_norm": 0.9756552577018738, |
|
"learning_rate": 4.648464661063478e-05, |
|
"loss": 1.6756, |
|
"step": 6479 |
|
}, |
|
{ |
|
"epoch": 0.22907209965164152, |
|
"grad_norm": 1.1156080961227417, |
|
"learning_rate": 4.6441582935275264e-05, |
|
"loss": 1.6707, |
|
"step": 6510 |
|
}, |
|
{ |
|
"epoch": 0.23016291917379217, |
|
"grad_norm": 0.975688636302948, |
|
"learning_rate": 4.6398277294566586e-05, |
|
"loss": 1.6738, |
|
"step": 6541 |
|
}, |
|
{ |
|
"epoch": 0.23125373869594285, |
|
"grad_norm": 0.9863653779029846, |
|
"learning_rate": 4.6354730177207e-05, |
|
"loss": 1.6748, |
|
"step": 6572 |
|
}, |
|
{ |
|
"epoch": 0.23234455821809352, |
|
"grad_norm": 0.9869725108146667, |
|
"learning_rate": 4.6310942074619787e-05, |
|
"loss": 1.6715, |
|
"step": 6603 |
|
}, |
|
{ |
|
"epoch": 0.2334353777402442, |
|
"grad_norm": 0.9991948008537292, |
|
"learning_rate": 4.626691348094777e-05, |
|
"loss": 1.6692, |
|
"step": 6634 |
|
}, |
|
{ |
|
"epoch": 0.23452619726239488, |
|
"grad_norm": 0.9353803992271423, |
|
"learning_rate": 4.622264489304762e-05, |
|
"loss": 1.6571, |
|
"step": 6665 |
|
}, |
|
{ |
|
"epoch": 0.23561701678454555, |
|
"grad_norm": 0.9929242134094238, |
|
"learning_rate": 4.617813681048434e-05, |
|
"loss": 1.6614, |
|
"step": 6696 |
|
}, |
|
{ |
|
"epoch": 0.23670783630669623, |
|
"grad_norm": 1.003203272819519, |
|
"learning_rate": 4.61333897355256e-05, |
|
"loss": 1.6577, |
|
"step": 6727 |
|
}, |
|
{ |
|
"epoch": 0.2377986558288469, |
|
"grad_norm": 0.9584728479385376, |
|
"learning_rate": 4.608840417313604e-05, |
|
"loss": 1.6862, |
|
"step": 6758 |
|
}, |
|
{ |
|
"epoch": 0.23888947535099758, |
|
"grad_norm": 0.9825113415718079, |
|
"learning_rate": 4.6043180630971646e-05, |
|
"loss": 1.6651, |
|
"step": 6789 |
|
}, |
|
{ |
|
"epoch": 0.23998029487314826, |
|
"grad_norm": 0.9904153347015381, |
|
"learning_rate": 4.599771961937391e-05, |
|
"loss": 1.6665, |
|
"step": 6820 |
|
}, |
|
{ |
|
"epoch": 0.2410711143952989, |
|
"grad_norm": 0.9444073438644409, |
|
"learning_rate": 4.5952021651364204e-05, |
|
"loss": 1.6591, |
|
"step": 6851 |
|
}, |
|
{ |
|
"epoch": 0.24216193391744958, |
|
"grad_norm": 0.9723752737045288, |
|
"learning_rate": 4.590608724263786e-05, |
|
"loss": 1.6711, |
|
"step": 6882 |
|
}, |
|
{ |
|
"epoch": 0.24325275343960026, |
|
"grad_norm": 1.0078589916229248, |
|
"learning_rate": 4.585991691155845e-05, |
|
"loss": 1.6764, |
|
"step": 6913 |
|
}, |
|
{ |
|
"epoch": 0.24434357296175094, |
|
"grad_norm": 0.9442929625511169, |
|
"learning_rate": 4.581351117915188e-05, |
|
"loss": 1.6484, |
|
"step": 6944 |
|
}, |
|
{ |
|
"epoch": 0.24543439248390161, |
|
"grad_norm": 0.9573075771331787, |
|
"learning_rate": 4.5766870569100534e-05, |
|
"loss": 1.6638, |
|
"step": 6975 |
|
}, |
|
{ |
|
"epoch": 0.2465252120060523, |
|
"grad_norm": 0.9371550679206848, |
|
"learning_rate": 4.571999560773736e-05, |
|
"loss": 1.6565, |
|
"step": 7006 |
|
}, |
|
{ |
|
"epoch": 0.24761603152820297, |
|
"grad_norm": 0.9628654718399048, |
|
"learning_rate": 4.5672886824039915e-05, |
|
"loss": 1.653, |
|
"step": 7037 |
|
}, |
|
{ |
|
"epoch": 0.24870685105035364, |
|
"grad_norm": 1.265812635421753, |
|
"learning_rate": 4.5625544749624435e-05, |
|
"loss": 1.6661, |
|
"step": 7068 |
|
}, |
|
{ |
|
"epoch": 0.24979767057250432, |
|
"grad_norm": 0.9990148544311523, |
|
"learning_rate": 4.5577969918739794e-05, |
|
"loss": 1.6536, |
|
"step": 7099 |
|
}, |
|
{ |
|
"epoch": 0.25088849009465497, |
|
"grad_norm": 0.9349009394645691, |
|
"learning_rate": 4.5530162868261486e-05, |
|
"loss": 1.6578, |
|
"step": 7130 |
|
}, |
|
{ |
|
"epoch": 0.25197930961680565, |
|
"grad_norm": 0.9215208292007446, |
|
"learning_rate": 4.548212413768558e-05, |
|
"loss": 1.6454, |
|
"step": 7161 |
|
}, |
|
{ |
|
"epoch": 0.2530701291389563, |
|
"grad_norm": 0.9580157995223999, |
|
"learning_rate": 4.543385426912261e-05, |
|
"loss": 1.6643, |
|
"step": 7192 |
|
}, |
|
{ |
|
"epoch": 0.254160948661107, |
|
"grad_norm": 0.9289986491203308, |
|
"learning_rate": 4.53853538072915e-05, |
|
"loss": 1.6659, |
|
"step": 7223 |
|
}, |
|
{ |
|
"epoch": 0.2552517681832577, |
|
"grad_norm": 0.9344154000282288, |
|
"learning_rate": 4.533662329951336e-05, |
|
"loss": 1.6772, |
|
"step": 7254 |
|
}, |
|
{ |
|
"epoch": 0.25634258770540835, |
|
"grad_norm": 0.9400781393051147, |
|
"learning_rate": 4.528766329570536e-05, |
|
"loss": 1.6493, |
|
"step": 7285 |
|
}, |
|
{ |
|
"epoch": 0.25743340722755903, |
|
"grad_norm": 0.963218092918396, |
|
"learning_rate": 4.523847434837447e-05, |
|
"loss": 1.6394, |
|
"step": 7316 |
|
}, |
|
{ |
|
"epoch": 0.2585242267497097, |
|
"grad_norm": 0.9619491696357727, |
|
"learning_rate": 4.518905701261128e-05, |
|
"loss": 1.6574, |
|
"step": 7347 |
|
}, |
|
{ |
|
"epoch": 0.2596150462718604, |
|
"grad_norm": 0.9310999512672424, |
|
"learning_rate": 4.5139411846083715e-05, |
|
"loss": 1.673, |
|
"step": 7378 |
|
}, |
|
{ |
|
"epoch": 0.26070586579401106, |
|
"grad_norm": 1.1424349546432495, |
|
"learning_rate": 4.508953940903073e-05, |
|
"loss": 1.6626, |
|
"step": 7409 |
|
}, |
|
{ |
|
"epoch": 0.26179668531616174, |
|
"grad_norm": 0.9687772393226624, |
|
"learning_rate": 4.5039440264255994e-05, |
|
"loss": 1.6548, |
|
"step": 7440 |
|
}, |
|
{ |
|
"epoch": 0.2628875048383124, |
|
"grad_norm": 0.9415249824523926, |
|
"learning_rate": 4.498911497712155e-05, |
|
"loss": 1.6452, |
|
"step": 7471 |
|
}, |
|
{ |
|
"epoch": 0.2639783243604631, |
|
"grad_norm": 0.9598653316497803, |
|
"learning_rate": 4.493856411554142e-05, |
|
"loss": 1.6527, |
|
"step": 7502 |
|
}, |
|
{ |
|
"epoch": 0.26506914388261377, |
|
"grad_norm": 0.9518615007400513, |
|
"learning_rate": 4.4887788249975206e-05, |
|
"loss": 1.6445, |
|
"step": 7533 |
|
}, |
|
{ |
|
"epoch": 0.26615996340476444, |
|
"grad_norm": 0.9153622388839722, |
|
"learning_rate": 4.4836787953421656e-05, |
|
"loss": 1.6255, |
|
"step": 7564 |
|
}, |
|
{ |
|
"epoch": 0.26725078292691506, |
|
"grad_norm": 0.9497701525688171, |
|
"learning_rate": 4.478556380141218e-05, |
|
"loss": 1.6499, |
|
"step": 7595 |
|
}, |
|
{ |
|
"epoch": 0.26834160244906574, |
|
"grad_norm": 0.9468094706535339, |
|
"learning_rate": 4.4734116372004375e-05, |
|
"loss": 1.6269, |
|
"step": 7626 |
|
}, |
|
{ |
|
"epoch": 0.2694324219712164, |
|
"grad_norm": 0.9106606245040894, |
|
"learning_rate": 4.4682446245775477e-05, |
|
"loss": 1.6396, |
|
"step": 7657 |
|
}, |
|
{ |
|
"epoch": 0.2705232414933671, |
|
"grad_norm": 0.946415901184082, |
|
"learning_rate": 4.463055400581586e-05, |
|
"loss": 1.6443, |
|
"step": 7688 |
|
}, |
|
{ |
|
"epoch": 0.27161406101551777, |
|
"grad_norm": 0.9249156713485718, |
|
"learning_rate": 4.4578440237722374e-05, |
|
"loss": 1.649, |
|
"step": 7719 |
|
}, |
|
{ |
|
"epoch": 0.27270488053766845, |
|
"grad_norm": 0.9326067566871643, |
|
"learning_rate": 4.452610552959183e-05, |
|
"loss": 1.6399, |
|
"step": 7750 |
|
}, |
|
{ |
|
"epoch": 0.2737957000598191, |
|
"grad_norm": 0.9351608157157898, |
|
"learning_rate": 4.447355047201428e-05, |
|
"loss": 1.6286, |
|
"step": 7781 |
|
}, |
|
{ |
|
"epoch": 0.2748865195819698, |
|
"grad_norm": 0.9721508622169495, |
|
"learning_rate": 4.4420775658066414e-05, |
|
"loss": 1.6503, |
|
"step": 7812 |
|
}, |
|
{ |
|
"epoch": 0.2759773391041205, |
|
"grad_norm": 0.9394717812538147, |
|
"learning_rate": 4.436778168330484e-05, |
|
"loss": 1.6514, |
|
"step": 7843 |
|
}, |
|
{ |
|
"epoch": 0.27706815862627115, |
|
"grad_norm": 0.9202150702476501, |
|
"learning_rate": 4.4314569145759353e-05, |
|
"loss": 1.6252, |
|
"step": 7874 |
|
}, |
|
{ |
|
"epoch": 0.27815897814842183, |
|
"grad_norm": 0.9670141339302063, |
|
"learning_rate": 4.42611386459262e-05, |
|
"loss": 1.6349, |
|
"step": 7905 |
|
}, |
|
{ |
|
"epoch": 0.2792497976705725, |
|
"grad_norm": 0.9340211749076843, |
|
"learning_rate": 4.420749078676133e-05, |
|
"loss": 1.642, |
|
"step": 7936 |
|
}, |
|
{ |
|
"epoch": 0.2803406171927232, |
|
"grad_norm": 0.9478600025177002, |
|
"learning_rate": 4.4153626173673516e-05, |
|
"loss": 1.6552, |
|
"step": 7967 |
|
}, |
|
{ |
|
"epoch": 0.28143143671487386, |
|
"grad_norm": 0.9464743733406067, |
|
"learning_rate": 4.409954541451762e-05, |
|
"loss": 1.6378, |
|
"step": 7998 |
|
}, |
|
{ |
|
"epoch": 0.28252225623702454, |
|
"grad_norm": 0.9389581084251404, |
|
"learning_rate": 4.404524911958764e-05, |
|
"loss": 1.6325, |
|
"step": 8029 |
|
}, |
|
{ |
|
"epoch": 0.2836130757591752, |
|
"grad_norm": 0.9357293248176575, |
|
"learning_rate": 4.399073790160989e-05, |
|
"loss": 1.6422, |
|
"step": 8060 |
|
}, |
|
{ |
|
"epoch": 0.2847038952813259, |
|
"grad_norm": 0.9474714398384094, |
|
"learning_rate": 4.393601237573607e-05, |
|
"loss": 1.6595, |
|
"step": 8091 |
|
}, |
|
{ |
|
"epoch": 0.28579471480347657, |
|
"grad_norm": 0.9302358031272888, |
|
"learning_rate": 4.388107315953628e-05, |
|
"loss": 1.6439, |
|
"step": 8122 |
|
}, |
|
{ |
|
"epoch": 0.28688553432562724, |
|
"grad_norm": 0.948805570602417, |
|
"learning_rate": 4.382592087299212e-05, |
|
"loss": 1.6459, |
|
"step": 8153 |
|
}, |
|
{ |
|
"epoch": 0.2879763538477779, |
|
"grad_norm": 0.9055014252662659, |
|
"learning_rate": 4.377055613848964e-05, |
|
"loss": 1.6427, |
|
"step": 8184 |
|
}, |
|
{ |
|
"epoch": 0.28906717336992854, |
|
"grad_norm": 0.9128564596176147, |
|
"learning_rate": 4.3714979580812355e-05, |
|
"loss": 1.6254, |
|
"step": 8215 |
|
}, |
|
{ |
|
"epoch": 0.2901579928920792, |
|
"grad_norm": 0.8954737782478333, |
|
"learning_rate": 4.365919182713416e-05, |
|
"loss": 1.6265, |
|
"step": 8246 |
|
}, |
|
{ |
|
"epoch": 0.2912488124142299, |
|
"grad_norm": 0.9502245783805847, |
|
"learning_rate": 4.360319350701226e-05, |
|
"loss": 1.6366, |
|
"step": 8277 |
|
}, |
|
{ |
|
"epoch": 0.2923396319363806, |
|
"grad_norm": 0.907567024230957, |
|
"learning_rate": 4.3546985252380115e-05, |
|
"loss": 1.6222, |
|
"step": 8308 |
|
}, |
|
{ |
|
"epoch": 0.29343045145853125, |
|
"grad_norm": 0.9578949809074402, |
|
"learning_rate": 4.349056769754021e-05, |
|
"loss": 1.6411, |
|
"step": 8339 |
|
}, |
|
{ |
|
"epoch": 0.2945212709806819, |
|
"grad_norm": 0.9497959613800049, |
|
"learning_rate": 4.3433941479156994e-05, |
|
"loss": 1.632, |
|
"step": 8370 |
|
}, |
|
{ |
|
"epoch": 0.2956120905028326, |
|
"grad_norm": 1.1019785404205322, |
|
"learning_rate": 4.3377107236249647e-05, |
|
"loss": 1.6495, |
|
"step": 8401 |
|
}, |
|
{ |
|
"epoch": 0.2967029100249833, |
|
"grad_norm": 0.9178378582000732, |
|
"learning_rate": 4.332006561018488e-05, |
|
"loss": 1.6298, |
|
"step": 8432 |
|
}, |
|
{ |
|
"epoch": 0.29779372954713396, |
|
"grad_norm": 0.9462074041366577, |
|
"learning_rate": 4.3262817244669683e-05, |
|
"loss": 1.6327, |
|
"step": 8463 |
|
}, |
|
{ |
|
"epoch": 0.29888454906928463, |
|
"grad_norm": 0.9369425177574158, |
|
"learning_rate": 4.3205362785744083e-05, |
|
"loss": 1.6372, |
|
"step": 8494 |
|
}, |
|
{ |
|
"epoch": 0.2999753685914353, |
|
"grad_norm": 0.918860912322998, |
|
"learning_rate": 4.314770288177384e-05, |
|
"loss": 1.6205, |
|
"step": 8525 |
|
}, |
|
{ |
|
"epoch": 0.301066188113586, |
|
"grad_norm": 0.9392362236976624, |
|
"learning_rate": 4.308983818344313e-05, |
|
"loss": 1.6184, |
|
"step": 8556 |
|
}, |
|
{ |
|
"epoch": 0.30215700763573666, |
|
"grad_norm": 0.9137156009674072, |
|
"learning_rate": 4.3031769343747206e-05, |
|
"loss": 1.6253, |
|
"step": 8587 |
|
}, |
|
{ |
|
"epoch": 0.30324782715788734, |
|
"grad_norm": 0.9086781144142151, |
|
"learning_rate": 4.297349701798505e-05, |
|
"loss": 1.6264, |
|
"step": 8618 |
|
}, |
|
{ |
|
"epoch": 0.304338646680038, |
|
"grad_norm": 0.9457340240478516, |
|
"learning_rate": 4.2915021863751916e-05, |
|
"loss": 1.6098, |
|
"step": 8649 |
|
}, |
|
{ |
|
"epoch": 0.3054294662021887, |
|
"grad_norm": 0.9216846823692322, |
|
"learning_rate": 4.285634454093198e-05, |
|
"loss": 1.6325, |
|
"step": 8680 |
|
}, |
|
{ |
|
"epoch": 0.30652028572433937, |
|
"grad_norm": 0.9189222455024719, |
|
"learning_rate": 4.279746571169086e-05, |
|
"loss": 1.6164, |
|
"step": 8711 |
|
}, |
|
{ |
|
"epoch": 0.30761110524649005, |
|
"grad_norm": 0.9216083288192749, |
|
"learning_rate": 4.2738386040468136e-05, |
|
"loss": 1.6084, |
|
"step": 8742 |
|
}, |
|
{ |
|
"epoch": 0.3087019247686407, |
|
"grad_norm": 0.9195507764816284, |
|
"learning_rate": 4.2679106193969866e-05, |
|
"loss": 1.6249, |
|
"step": 8773 |
|
}, |
|
{ |
|
"epoch": 0.30979274429079134, |
|
"grad_norm": 0.9332253932952881, |
|
"learning_rate": 4.261962684116106e-05, |
|
"loss": 1.633, |
|
"step": 8804 |
|
}, |
|
{ |
|
"epoch": 0.310883563812942, |
|
"grad_norm": 0.9223430752754211, |
|
"learning_rate": 4.2559948653258145e-05, |
|
"loss": 1.6117, |
|
"step": 8835 |
|
}, |
|
{ |
|
"epoch": 0.3119743833350927, |
|
"grad_norm": 0.8972631692886353, |
|
"learning_rate": 4.250007230372134e-05, |
|
"loss": 1.5931, |
|
"step": 8866 |
|
}, |
|
{ |
|
"epoch": 0.3130652028572434, |
|
"grad_norm": 0.8789275288581848, |
|
"learning_rate": 4.2439998468247126e-05, |
|
"loss": 1.6384, |
|
"step": 8897 |
|
}, |
|
{ |
|
"epoch": 0.31415602237939405, |
|
"grad_norm": 0.8869777321815491, |
|
"learning_rate": 4.2379727824760566e-05, |
|
"loss": 1.6043, |
|
"step": 8928 |
|
}, |
|
{ |
|
"epoch": 0.3152468419015447, |
|
"grad_norm": 0.9047911167144775, |
|
"learning_rate": 4.231926105340768e-05, |
|
"loss": 1.6301, |
|
"step": 8959 |
|
}, |
|
{ |
|
"epoch": 0.3163376614236954, |
|
"grad_norm": 0.8834474086761475, |
|
"learning_rate": 4.225859883654776e-05, |
|
"loss": 1.624, |
|
"step": 8990 |
|
}, |
|
{ |
|
"epoch": 0.3174284809458461, |
|
"grad_norm": 0.9142968058586121, |
|
"learning_rate": 4.219774185874569e-05, |
|
"loss": 1.6232, |
|
"step": 9021 |
|
}, |
|
{ |
|
"epoch": 0.31851930046799676, |
|
"grad_norm": 0.9930256009101868, |
|
"learning_rate": 4.213669080676418e-05, |
|
"loss": 1.6091, |
|
"step": 9052 |
|
}, |
|
{ |
|
"epoch": 0.31961011999014743, |
|
"grad_norm": 0.9061794877052307, |
|
"learning_rate": 4.2075446369556056e-05, |
|
"loss": 1.6468, |
|
"step": 9083 |
|
}, |
|
{ |
|
"epoch": 0.3207009395122981, |
|
"grad_norm": 0.8935773372650146, |
|
"learning_rate": 4.201400923825648e-05, |
|
"loss": 1.598, |
|
"step": 9114 |
|
}, |
|
{ |
|
"epoch": 0.3217917590344488, |
|
"grad_norm": 0.9085298180580139, |
|
"learning_rate": 4.195238010617511e-05, |
|
"loss": 1.6136, |
|
"step": 9145 |
|
}, |
|
{ |
|
"epoch": 0.32288257855659946, |
|
"grad_norm": 0.9264605045318604, |
|
"learning_rate": 4.1890559668788344e-05, |
|
"loss": 1.6174, |
|
"step": 9176 |
|
}, |
|
{ |
|
"epoch": 0.32397339807875014, |
|
"grad_norm": 0.9291498064994812, |
|
"learning_rate": 4.1828548623731405e-05, |
|
"loss": 1.6179, |
|
"step": 9207 |
|
}, |
|
{ |
|
"epoch": 0.3250642176009008, |
|
"grad_norm": 0.8831276893615723, |
|
"learning_rate": 4.1766347670790506e-05, |
|
"loss": 1.6119, |
|
"step": 9238 |
|
}, |
|
{ |
|
"epoch": 0.3261550371230515, |
|
"grad_norm": 0.9102134108543396, |
|
"learning_rate": 4.170395751189495e-05, |
|
"loss": 1.6266, |
|
"step": 9269 |
|
}, |
|
{ |
|
"epoch": 0.32724585664520217, |
|
"grad_norm": 0.8952730298042297, |
|
"learning_rate": 4.164137885110921e-05, |
|
"loss": 1.622, |
|
"step": 9300 |
|
}, |
|
{ |
|
"epoch": 0.32833667616735285, |
|
"grad_norm": 0.9174513220787048, |
|
"learning_rate": 4.157861239462495e-05, |
|
"loss": 1.6163, |
|
"step": 9331 |
|
}, |
|
{ |
|
"epoch": 0.3294274956895035, |
|
"grad_norm": 0.880759060382843, |
|
"learning_rate": 4.1515658850753114e-05, |
|
"loss": 1.5997, |
|
"step": 9362 |
|
}, |
|
{ |
|
"epoch": 0.3305183152116542, |
|
"grad_norm": 0.8925946950912476, |
|
"learning_rate": 4.145251892991588e-05, |
|
"loss": 1.5974, |
|
"step": 9393 |
|
}, |
|
{ |
|
"epoch": 0.3316091347338048, |
|
"grad_norm": 0.9017757773399353, |
|
"learning_rate": 4.138919334463868e-05, |
|
"loss": 1.6019, |
|
"step": 9424 |
|
}, |
|
{ |
|
"epoch": 0.3326999542559555, |
|
"grad_norm": 0.9045889377593994, |
|
"learning_rate": 4.1325682809542124e-05, |
|
"loss": 1.6318, |
|
"step": 9455 |
|
}, |
|
{ |
|
"epoch": 0.3337907737781062, |
|
"grad_norm": 0.870001494884491, |
|
"learning_rate": 4.126198804133398e-05, |
|
"loss": 1.6092, |
|
"step": 9486 |
|
}, |
|
{ |
|
"epoch": 0.33488159330025685, |
|
"grad_norm": 0.8898991942405701, |
|
"learning_rate": 4.1198109758801055e-05, |
|
"loss": 1.6015, |
|
"step": 9517 |
|
}, |
|
{ |
|
"epoch": 0.33597241282240753, |
|
"grad_norm": 0.8767693638801575, |
|
"learning_rate": 4.113404868280107e-05, |
|
"loss": 1.5911, |
|
"step": 9548 |
|
}, |
|
{ |
|
"epoch": 0.3370632323445582, |
|
"grad_norm": 0.8673516511917114, |
|
"learning_rate": 4.106980553625457e-05, |
|
"loss": 1.6137, |
|
"step": 9579 |
|
}, |
|
{ |
|
"epoch": 0.3381540518667089, |
|
"grad_norm": 0.9067351818084717, |
|
"learning_rate": 4.100538104413674e-05, |
|
"loss": 1.5993, |
|
"step": 9610 |
|
}, |
|
{ |
|
"epoch": 0.33924487138885956, |
|
"grad_norm": 0.9343692660331726, |
|
"learning_rate": 4.09407759334692e-05, |
|
"loss": 1.6074, |
|
"step": 9641 |
|
}, |
|
{ |
|
"epoch": 0.34033569091101024, |
|
"grad_norm": 0.9002503752708435, |
|
"learning_rate": 4.087599093331186e-05, |
|
"loss": 1.6267, |
|
"step": 9672 |
|
}, |
|
{ |
|
"epoch": 0.3414265104331609, |
|
"grad_norm": 0.9147412776947021, |
|
"learning_rate": 4.081102677475462e-05, |
|
"loss": 1.6106, |
|
"step": 9703 |
|
}, |
|
{ |
|
"epoch": 0.3425173299553116, |
|
"grad_norm": 0.9228988885879517, |
|
"learning_rate": 4.0745884190909194e-05, |
|
"loss": 1.6258, |
|
"step": 9734 |
|
}, |
|
{ |
|
"epoch": 0.34360814947746227, |
|
"grad_norm": 0.8599364757537842, |
|
"learning_rate": 4.0680563916900796e-05, |
|
"loss": 1.6039, |
|
"step": 9765 |
|
}, |
|
{ |
|
"epoch": 0.34469896899961294, |
|
"grad_norm": 0.844178318977356, |
|
"learning_rate": 4.0615066689859815e-05, |
|
"loss": 1.5899, |
|
"step": 9796 |
|
}, |
|
{ |
|
"epoch": 0.3457897885217636, |
|
"grad_norm": 0.9241573810577393, |
|
"learning_rate": 4.0549393248913584e-05, |
|
"loss": 1.5916, |
|
"step": 9827 |
|
}, |
|
{ |
|
"epoch": 0.3468806080439143, |
|
"grad_norm": 0.9131301641464233, |
|
"learning_rate": 4.048354433517794e-05, |
|
"loss": 1.6045, |
|
"step": 9858 |
|
}, |
|
{ |
|
"epoch": 0.347971427566065, |
|
"grad_norm": 0.8763211965560913, |
|
"learning_rate": 4.0417520691748916e-05, |
|
"loss": 1.6088, |
|
"step": 9889 |
|
}, |
|
{ |
|
"epoch": 0.34906224708821565, |
|
"grad_norm": 0.9151899218559265, |
|
"learning_rate": 4.035132306369438e-05, |
|
"loss": 1.5976, |
|
"step": 9920 |
|
}, |
|
{ |
|
"epoch": 0.3501530666103663, |
|
"grad_norm": 0.8976767063140869, |
|
"learning_rate": 4.028495219804555e-05, |
|
"loss": 1.6129, |
|
"step": 9951 |
|
}, |
|
{ |
|
"epoch": 0.351243886132517, |
|
"grad_norm": 0.8878459930419922, |
|
"learning_rate": 4.021840884378864e-05, |
|
"loss": 1.5994, |
|
"step": 9982 |
|
}, |
|
{ |
|
"epoch": 0.3523347056546677, |
|
"grad_norm": 0.8572807312011719, |
|
"learning_rate": 4.015169375185633e-05, |
|
"loss": 1.6002, |
|
"step": 10013 |
|
}, |
|
{ |
|
"epoch": 0.3534255251768183, |
|
"grad_norm": 0.8996985554695129, |
|
"learning_rate": 4.0084807675119396e-05, |
|
"loss": 1.6136, |
|
"step": 10044 |
|
}, |
|
{ |
|
"epoch": 0.354516344698969, |
|
"grad_norm": 0.8947619795799255, |
|
"learning_rate": 4.0017751368378106e-05, |
|
"loss": 1.598, |
|
"step": 10075 |
|
}, |
|
{ |
|
"epoch": 0.35560716422111965, |
|
"grad_norm": 0.9073825478553772, |
|
"learning_rate": 3.995052558835377e-05, |
|
"loss": 1.6164, |
|
"step": 10106 |
|
}, |
|
{ |
|
"epoch": 0.35669798374327033, |
|
"grad_norm": 0.8670364022254944, |
|
"learning_rate": 3.988313109368017e-05, |
|
"loss": 1.6012, |
|
"step": 10137 |
|
}, |
|
{ |
|
"epoch": 0.357788803265421, |
|
"grad_norm": 0.8530169725418091, |
|
"learning_rate": 3.981556864489504e-05, |
|
"loss": 1.5947, |
|
"step": 10168 |
|
}, |
|
{ |
|
"epoch": 0.3588796227875717, |
|
"grad_norm": 0.8809841275215149, |
|
"learning_rate": 3.974783900443142e-05, |
|
"loss": 1.6033, |
|
"step": 10199 |
|
}, |
|
{ |
|
"epoch": 0.35997044230972236, |
|
"grad_norm": 0.8582516312599182, |
|
"learning_rate": 3.9679942936609095e-05, |
|
"loss": 1.6029, |
|
"step": 10230 |
|
}, |
|
{ |
|
"epoch": 0.36106126183187304, |
|
"grad_norm": 0.8762260675430298, |
|
"learning_rate": 3.961188120762596e-05, |
|
"loss": 1.5921, |
|
"step": 10261 |
|
}, |
|
{ |
|
"epoch": 0.3621520813540237, |
|
"grad_norm": 0.8775869011878967, |
|
"learning_rate": 3.954365458554938e-05, |
|
"loss": 1.6101, |
|
"step": 10292 |
|
}, |
|
{ |
|
"epoch": 0.3632429008761744, |
|
"grad_norm": 0.9066540002822876, |
|
"learning_rate": 3.947526384030751e-05, |
|
"loss": 1.5928, |
|
"step": 10323 |
|
}, |
|
{ |
|
"epoch": 0.36433372039832507, |
|
"grad_norm": 0.9105854630470276, |
|
"learning_rate": 3.9406709743680624e-05, |
|
"loss": 1.5973, |
|
"step": 10354 |
|
}, |
|
{ |
|
"epoch": 0.36542453992047574, |
|
"grad_norm": 0.8927069902420044, |
|
"learning_rate": 3.9337993069292366e-05, |
|
"loss": 1.6159, |
|
"step": 10385 |
|
}, |
|
{ |
|
"epoch": 0.3665153594426264, |
|
"grad_norm": 0.9006061553955078, |
|
"learning_rate": 3.926911459260109e-05, |
|
"loss": 1.6111, |
|
"step": 10416 |
|
}, |
|
{ |
|
"epoch": 0.3676061789647771, |
|
"grad_norm": 0.8990784883499146, |
|
"learning_rate": 3.920007509089102e-05, |
|
"loss": 1.5975, |
|
"step": 10447 |
|
}, |
|
{ |
|
"epoch": 0.3686969984869278, |
|
"grad_norm": 0.9229722619056702, |
|
"learning_rate": 3.913087534326357e-05, |
|
"loss": 1.5962, |
|
"step": 10478 |
|
}, |
|
{ |
|
"epoch": 0.36978781800907845, |
|
"grad_norm": 0.9110054969787598, |
|
"learning_rate": 3.9061516130628475e-05, |
|
"loss": 1.6129, |
|
"step": 10509 |
|
}, |
|
{ |
|
"epoch": 0.3708786375312291, |
|
"grad_norm": 0.917635440826416, |
|
"learning_rate": 3.8991998235695025e-05, |
|
"loss": 1.5975, |
|
"step": 10540 |
|
}, |
|
{ |
|
"epoch": 0.3719694570533798, |
|
"grad_norm": 0.8891541957855225, |
|
"learning_rate": 3.8922322442963224e-05, |
|
"loss": 1.5711, |
|
"step": 10571 |
|
}, |
|
{ |
|
"epoch": 0.3730602765755305, |
|
"grad_norm": 0.8646354675292969, |
|
"learning_rate": 3.885248953871491e-05, |
|
"loss": 1.6038, |
|
"step": 10602 |
|
}, |
|
{ |
|
"epoch": 0.3741510960976811, |
|
"grad_norm": 0.8377805948257446, |
|
"learning_rate": 3.8782500311004915e-05, |
|
"loss": 1.6113, |
|
"step": 10633 |
|
}, |
|
{ |
|
"epoch": 0.3752419156198318, |
|
"grad_norm": 0.8636441826820374, |
|
"learning_rate": 3.871235554965218e-05, |
|
"loss": 1.5802, |
|
"step": 10664 |
|
}, |
|
{ |
|
"epoch": 0.37633273514198246, |
|
"grad_norm": 0.8898093700408936, |
|
"learning_rate": 3.864205604623078e-05, |
|
"loss": 1.6054, |
|
"step": 10695 |
|
}, |
|
{ |
|
"epoch": 0.37742355466413313, |
|
"grad_norm": 0.887256383895874, |
|
"learning_rate": 3.857160259406107e-05, |
|
"loss": 1.5933, |
|
"step": 10726 |
|
}, |
|
{ |
|
"epoch": 0.3785143741862838, |
|
"grad_norm": 0.8577413558959961, |
|
"learning_rate": 3.8500995988200674e-05, |
|
"loss": 1.5875, |
|
"step": 10757 |
|
}, |
|
{ |
|
"epoch": 0.3796051937084345, |
|
"grad_norm": 0.8893071413040161, |
|
"learning_rate": 3.843023702543556e-05, |
|
"loss": 1.5857, |
|
"step": 10788 |
|
}, |
|
{ |
|
"epoch": 0.38069601323058516, |
|
"grad_norm": 0.847111165523529, |
|
"learning_rate": 3.8359326504270984e-05, |
|
"loss": 1.5837, |
|
"step": 10819 |
|
}, |
|
{ |
|
"epoch": 0.38178683275273584, |
|
"grad_norm": 0.8827951550483704, |
|
"learning_rate": 3.828826522492255e-05, |
|
"loss": 1.5748, |
|
"step": 10850 |
|
}, |
|
{ |
|
"epoch": 0.3828776522748865, |
|
"grad_norm": 0.8470546007156372, |
|
"learning_rate": 3.821705398930713e-05, |
|
"loss": 1.5995, |
|
"step": 10881 |
|
}, |
|
{ |
|
"epoch": 0.3839684717970372, |
|
"grad_norm": 0.987573504447937, |
|
"learning_rate": 3.814569360103385e-05, |
|
"loss": 1.5926, |
|
"step": 10912 |
|
}, |
|
{ |
|
"epoch": 0.38505929131918787, |
|
"grad_norm": 0.8766013383865356, |
|
"learning_rate": 3.807418486539499e-05, |
|
"loss": 1.581, |
|
"step": 10943 |
|
}, |
|
{ |
|
"epoch": 0.38615011084133855, |
|
"grad_norm": 0.9069483876228333, |
|
"learning_rate": 3.80025285893569e-05, |
|
"loss": 1.5912, |
|
"step": 10974 |
|
}, |
|
{ |
|
"epoch": 0.3872409303634892, |
|
"grad_norm": 0.9085988998413086, |
|
"learning_rate": 3.793072558155093e-05, |
|
"loss": 1.5871, |
|
"step": 11005 |
|
}, |
|
{ |
|
"epoch": 0.3883317498856399, |
|
"grad_norm": 0.9167149066925049, |
|
"learning_rate": 3.785877665226426e-05, |
|
"loss": 1.5902, |
|
"step": 11036 |
|
}, |
|
{ |
|
"epoch": 0.3894225694077906, |
|
"grad_norm": 0.8932046294212341, |
|
"learning_rate": 3.778668261343079e-05, |
|
"loss": 1.6048, |
|
"step": 11067 |
|
}, |
|
{ |
|
"epoch": 0.39051338892994125, |
|
"grad_norm": 0.8533083200454712, |
|
"learning_rate": 3.771444427862192e-05, |
|
"loss": 1.5803, |
|
"step": 11098 |
|
}, |
|
{ |
|
"epoch": 0.39160420845209193, |
|
"grad_norm": 0.8877330422401428, |
|
"learning_rate": 3.7642062463037465e-05, |
|
"loss": 1.5855, |
|
"step": 11129 |
|
}, |
|
{ |
|
"epoch": 0.3926950279742426, |
|
"grad_norm": 0.8700180649757385, |
|
"learning_rate": 3.7569537983496373e-05, |
|
"loss": 1.5843, |
|
"step": 11160 |
|
}, |
|
{ |
|
"epoch": 0.3937858474963933, |
|
"grad_norm": 0.8378989100456238, |
|
"learning_rate": 3.749687165842753e-05, |
|
"loss": 1.5664, |
|
"step": 11191 |
|
}, |
|
{ |
|
"epoch": 0.39487666701854396, |
|
"grad_norm": 0.8885537981987, |
|
"learning_rate": 3.7424064307860536e-05, |
|
"loss": 1.5822, |
|
"step": 11222 |
|
}, |
|
{ |
|
"epoch": 0.3959674865406946, |
|
"grad_norm": 0.8644111752510071, |
|
"learning_rate": 3.735111675341645e-05, |
|
"loss": 1.5893, |
|
"step": 11253 |
|
}, |
|
{ |
|
"epoch": 0.39705830606284526, |
|
"grad_norm": 0.8942341804504395, |
|
"learning_rate": 3.7278029818298524e-05, |
|
"loss": 1.5909, |
|
"step": 11284 |
|
}, |
|
{ |
|
"epoch": 0.39814912558499593, |
|
"grad_norm": 0.858748733997345, |
|
"learning_rate": 3.720480432728287e-05, |
|
"loss": 1.5832, |
|
"step": 11315 |
|
}, |
|
{ |
|
"epoch": 0.3992399451071466, |
|
"grad_norm": 0.8919023275375366, |
|
"learning_rate": 3.71314411067092e-05, |
|
"loss": 1.5804, |
|
"step": 11346 |
|
}, |
|
{ |
|
"epoch": 0.4003307646292973, |
|
"grad_norm": 0.8735581636428833, |
|
"learning_rate": 3.70579409844715e-05, |
|
"loss": 1.5856, |
|
"step": 11377 |
|
}, |
|
{ |
|
"epoch": 0.40142158415144796, |
|
"grad_norm": 0.843586266040802, |
|
"learning_rate": 3.698430479000865e-05, |
|
"loss": 1.5814, |
|
"step": 11408 |
|
}, |
|
{ |
|
"epoch": 0.40251240367359864, |
|
"grad_norm": 0.8684964776039124, |
|
"learning_rate": 3.691053335429509e-05, |
|
"loss": 1.5823, |
|
"step": 11439 |
|
}, |
|
{ |
|
"epoch": 0.4036032231957493, |
|
"grad_norm": 0.8695327639579773, |
|
"learning_rate": 3.683662750983147e-05, |
|
"loss": 1.5817, |
|
"step": 11470 |
|
}, |
|
{ |
|
"epoch": 0.4046940427179, |
|
"grad_norm": 0.9438059329986572, |
|
"learning_rate": 3.676258809063518e-05, |
|
"loss": 1.6003, |
|
"step": 11501 |
|
}, |
|
{ |
|
"epoch": 0.40578486224005067, |
|
"grad_norm": 0.8711549043655396, |
|
"learning_rate": 3.6688415932231004e-05, |
|
"loss": 1.5843, |
|
"step": 11532 |
|
}, |
|
{ |
|
"epoch": 0.40687568176220135, |
|
"grad_norm": 0.9110082387924194, |
|
"learning_rate": 3.661411187164166e-05, |
|
"loss": 1.5942, |
|
"step": 11563 |
|
}, |
|
{ |
|
"epoch": 0.407966501284352, |
|
"grad_norm": 0.8768247961997986, |
|
"learning_rate": 3.65396767473784e-05, |
|
"loss": 1.6002, |
|
"step": 11594 |
|
}, |
|
{ |
|
"epoch": 0.4090573208065027, |
|
"grad_norm": 0.8380584120750427, |
|
"learning_rate": 3.6465111399431465e-05, |
|
"loss": 1.5755, |
|
"step": 11625 |
|
}, |
|
{ |
|
"epoch": 0.4101481403286534, |
|
"grad_norm": 0.8617690801620483, |
|
"learning_rate": 3.6390416669260674e-05, |
|
"loss": 1.5817, |
|
"step": 11656 |
|
}, |
|
{ |
|
"epoch": 0.41123895985080405, |
|
"grad_norm": 0.8809701800346375, |
|
"learning_rate": 3.63155933997859e-05, |
|
"loss": 1.5779, |
|
"step": 11687 |
|
}, |
|
{ |
|
"epoch": 0.41232977937295473, |
|
"grad_norm": 0.8623855113983154, |
|
"learning_rate": 3.624064243537758e-05, |
|
"loss": 1.5749, |
|
"step": 11718 |
|
}, |
|
{ |
|
"epoch": 0.4134205988951054, |
|
"grad_norm": 0.9026786684989929, |
|
"learning_rate": 3.616556462184716e-05, |
|
"loss": 1.5676, |
|
"step": 11749 |
|
}, |
|
{ |
|
"epoch": 0.4145114184172561, |
|
"grad_norm": 0.8784968852996826, |
|
"learning_rate": 3.609036080643755e-05, |
|
"loss": 1.6103, |
|
"step": 11780 |
|
}, |
|
{ |
|
"epoch": 0.41560223793940676, |
|
"grad_norm": 0.8578303456306458, |
|
"learning_rate": 3.60150318378136e-05, |
|
"loss": 1.6004, |
|
"step": 11811 |
|
}, |
|
{ |
|
"epoch": 0.4166930574615574, |
|
"grad_norm": 0.8620411157608032, |
|
"learning_rate": 3.5939578566052465e-05, |
|
"loss": 1.5821, |
|
"step": 11842 |
|
}, |
|
{ |
|
"epoch": 0.41778387698370806, |
|
"grad_norm": 0.8622260093688965, |
|
"learning_rate": 3.586400184263408e-05, |
|
"loss": 1.5735, |
|
"step": 11873 |
|
}, |
|
{ |
|
"epoch": 0.41887469650585873, |
|
"grad_norm": 0.871585488319397, |
|
"learning_rate": 3.578830252043148e-05, |
|
"loss": 1.5705, |
|
"step": 11904 |
|
}, |
|
{ |
|
"epoch": 0.4199655160280094, |
|
"grad_norm": 0.8741966485977173, |
|
"learning_rate": 3.571248145370125e-05, |
|
"loss": 1.5898, |
|
"step": 11935 |
|
}, |
|
{ |
|
"epoch": 0.4210563355501601, |
|
"grad_norm": 0.8671961426734924, |
|
"learning_rate": 3.5636539498073794e-05, |
|
"loss": 1.5662, |
|
"step": 11966 |
|
}, |
|
{ |
|
"epoch": 0.42214715507231076, |
|
"grad_norm": 0.8873665928840637, |
|
"learning_rate": 3.556047751054378e-05, |
|
"loss": 1.5856, |
|
"step": 11997 |
|
}, |
|
{ |
|
"epoch": 0.42323797459446144, |
|
"grad_norm": 0.8614113926887512, |
|
"learning_rate": 3.548429634946039e-05, |
|
"loss": 1.5609, |
|
"step": 12028 |
|
}, |
|
{ |
|
"epoch": 0.4243287941166121, |
|
"grad_norm": 0.8387187719345093, |
|
"learning_rate": 3.540799687451768e-05, |
|
"loss": 1.5728, |
|
"step": 12059 |
|
}, |
|
{ |
|
"epoch": 0.4254196136387628, |
|
"grad_norm": 0.8875289559364319, |
|
"learning_rate": 3.533157994674485e-05, |
|
"loss": 1.5749, |
|
"step": 12090 |
|
}, |
|
{ |
|
"epoch": 0.42651043316091347, |
|
"grad_norm": 0.8648973107337952, |
|
"learning_rate": 3.5255046428496546e-05, |
|
"loss": 1.5695, |
|
"step": 12121 |
|
}, |
|
{ |
|
"epoch": 0.42760125268306415, |
|
"grad_norm": 0.8634358048439026, |
|
"learning_rate": 3.517839718344311e-05, |
|
"loss": 1.5685, |
|
"step": 12152 |
|
}, |
|
{ |
|
"epoch": 0.4286920722052148, |
|
"grad_norm": 0.8736028075218201, |
|
"learning_rate": 3.510163307656086e-05, |
|
"loss": 1.5721, |
|
"step": 12183 |
|
}, |
|
{ |
|
"epoch": 0.4297828917273655, |
|
"grad_norm": 0.8463747501373291, |
|
"learning_rate": 3.5024754974122324e-05, |
|
"loss": 1.5759, |
|
"step": 12214 |
|
}, |
|
{ |
|
"epoch": 0.4308737112495162, |
|
"grad_norm": 0.8596286177635193, |
|
"learning_rate": 3.494776374368643e-05, |
|
"loss": 1.5906, |
|
"step": 12245 |
|
}, |
|
{ |
|
"epoch": 0.43196453077166685, |
|
"grad_norm": 0.8681018948554993, |
|
"learning_rate": 3.4870660254088724e-05, |
|
"loss": 1.5579, |
|
"step": 12276 |
|
}, |
|
{ |
|
"epoch": 0.43305535029381753, |
|
"grad_norm": 0.9080244302749634, |
|
"learning_rate": 3.479344537543164e-05, |
|
"loss": 1.5618, |
|
"step": 12307 |
|
}, |
|
{ |
|
"epoch": 0.4341461698159682, |
|
"grad_norm": 0.8841635584831238, |
|
"learning_rate": 3.4716119979074565e-05, |
|
"loss": 1.5873, |
|
"step": 12338 |
|
}, |
|
{ |
|
"epoch": 0.4352369893381189, |
|
"grad_norm": 0.8563128709793091, |
|
"learning_rate": 3.463868493762412e-05, |
|
"loss": 1.5673, |
|
"step": 12369 |
|
}, |
|
{ |
|
"epoch": 0.43632780886026956, |
|
"grad_norm": 0.8570665121078491, |
|
"learning_rate": 3.456114112492418e-05, |
|
"loss": 1.5634, |
|
"step": 12400 |
|
}, |
|
{ |
|
"epoch": 0.43741862838242024, |
|
"grad_norm": 0.8617071509361267, |
|
"learning_rate": 3.4483489416046164e-05, |
|
"loss": 1.5555, |
|
"step": 12431 |
|
}, |
|
{ |
|
"epoch": 0.43850944790457086, |
|
"grad_norm": 0.8495959639549255, |
|
"learning_rate": 3.440573068727905e-05, |
|
"loss": 1.5792, |
|
"step": 12462 |
|
}, |
|
{ |
|
"epoch": 0.43960026742672154, |
|
"grad_norm": 0.8845837712287903, |
|
"learning_rate": 3.4327865816119495e-05, |
|
"loss": 1.5587, |
|
"step": 12493 |
|
}, |
|
{ |
|
"epoch": 0.4406910869488722, |
|
"grad_norm": 0.8790666460990906, |
|
"learning_rate": 3.4249895681262025e-05, |
|
"loss": 1.5765, |
|
"step": 12524 |
|
}, |
|
{ |
|
"epoch": 0.4417819064710229, |
|
"grad_norm": 0.8437421917915344, |
|
"learning_rate": 3.417182116258899e-05, |
|
"loss": 1.557, |
|
"step": 12555 |
|
}, |
|
{ |
|
"epoch": 0.44287272599317357, |
|
"grad_norm": 0.868320882320404, |
|
"learning_rate": 3.409364314116074e-05, |
|
"loss": 1.5656, |
|
"step": 12586 |
|
}, |
|
{ |
|
"epoch": 0.44396354551532424, |
|
"grad_norm": 0.8495018482208252, |
|
"learning_rate": 3.401536249920559e-05, |
|
"loss": 1.5709, |
|
"step": 12617 |
|
}, |
|
{ |
|
"epoch": 0.4450543650374749, |
|
"grad_norm": 0.865176796913147, |
|
"learning_rate": 3.393698012010998e-05, |
|
"loss": 1.5564, |
|
"step": 12648 |
|
}, |
|
{ |
|
"epoch": 0.4461451845596256, |
|
"grad_norm": 0.8581182956695557, |
|
"learning_rate": 3.385849688840839e-05, |
|
"loss": 1.5653, |
|
"step": 12679 |
|
}, |
|
{ |
|
"epoch": 0.4472360040817763, |
|
"grad_norm": 0.8474507331848145, |
|
"learning_rate": 3.3779913689773414e-05, |
|
"loss": 1.5645, |
|
"step": 12710 |
|
}, |
|
{ |
|
"epoch": 0.44832682360392695, |
|
"grad_norm": 0.8472003936767578, |
|
"learning_rate": 3.370123141100578e-05, |
|
"loss": 1.5511, |
|
"step": 12741 |
|
}, |
|
{ |
|
"epoch": 0.4494176431260776, |
|
"grad_norm": 0.852480947971344, |
|
"learning_rate": 3.3622450940024305e-05, |
|
"loss": 1.5502, |
|
"step": 12772 |
|
}, |
|
{ |
|
"epoch": 0.4505084626482283, |
|
"grad_norm": 0.8494676947593689, |
|
"learning_rate": 3.35435731658559e-05, |
|
"loss": 1.5574, |
|
"step": 12803 |
|
}, |
|
{ |
|
"epoch": 0.451599282170379, |
|
"grad_norm": 0.8634653091430664, |
|
"learning_rate": 3.346459897862552e-05, |
|
"loss": 1.5666, |
|
"step": 12834 |
|
}, |
|
{ |
|
"epoch": 0.45269010169252966, |
|
"grad_norm": 0.8515731692314148, |
|
"learning_rate": 3.338552926954613e-05, |
|
"loss": 1.5723, |
|
"step": 12865 |
|
}, |
|
{ |
|
"epoch": 0.45378092121468033, |
|
"grad_norm": 0.858883261680603, |
|
"learning_rate": 3.330636493090868e-05, |
|
"loss": 1.5869, |
|
"step": 12896 |
|
}, |
|
{ |
|
"epoch": 0.454871740736831, |
|
"grad_norm": 0.87376868724823, |
|
"learning_rate": 3.322710685607193e-05, |
|
"loss": 1.5599, |
|
"step": 12927 |
|
}, |
|
{ |
|
"epoch": 0.4559625602589817, |
|
"grad_norm": 0.8701228499412537, |
|
"learning_rate": 3.314775593945251e-05, |
|
"loss": 1.5644, |
|
"step": 12958 |
|
}, |
|
{ |
|
"epoch": 0.45705337978113236, |
|
"grad_norm": 0.8871962428092957, |
|
"learning_rate": 3.3068313076514714e-05, |
|
"loss": 1.5628, |
|
"step": 12989 |
|
}, |
|
{ |
|
"epoch": 0.45814419930328304, |
|
"grad_norm": 0.8973777294158936, |
|
"learning_rate": 3.298877916376047e-05, |
|
"loss": 1.5506, |
|
"step": 13020 |
|
}, |
|
{ |
|
"epoch": 0.4592350188254337, |
|
"grad_norm": 0.8526966571807861, |
|
"learning_rate": 3.290915509871915e-05, |
|
"loss": 1.5692, |
|
"step": 13051 |
|
}, |
|
{ |
|
"epoch": 0.46032583834758434, |
|
"grad_norm": 0.853767454624176, |
|
"learning_rate": 3.282944177993753e-05, |
|
"loss": 1.5603, |
|
"step": 13082 |
|
}, |
|
{ |
|
"epoch": 0.461416657869735, |
|
"grad_norm": 0.868984580039978, |
|
"learning_rate": 3.274964010696957e-05, |
|
"loss": 1.5692, |
|
"step": 13113 |
|
}, |
|
{ |
|
"epoch": 0.4625074773918857, |
|
"grad_norm": 0.8377254605293274, |
|
"learning_rate": 3.266975098036629e-05, |
|
"loss": 1.5627, |
|
"step": 13144 |
|
}, |
|
{ |
|
"epoch": 0.46359829691403637, |
|
"grad_norm": 0.8568862676620483, |
|
"learning_rate": 3.258977530166562e-05, |
|
"loss": 1.5593, |
|
"step": 13175 |
|
}, |
|
{ |
|
"epoch": 0.46468911643618704, |
|
"grad_norm": 0.8657397031784058, |
|
"learning_rate": 3.250971397338227e-05, |
|
"loss": 1.5449, |
|
"step": 13206 |
|
}, |
|
{ |
|
"epoch": 0.4657799359583377, |
|
"grad_norm": 0.8855077028274536, |
|
"learning_rate": 3.2429567898997404e-05, |
|
"loss": 1.57, |
|
"step": 13237 |
|
}, |
|
{ |
|
"epoch": 0.4668707554804884, |
|
"grad_norm": 0.8459128737449646, |
|
"learning_rate": 3.234933798294859e-05, |
|
"loss": 1.5474, |
|
"step": 13268 |
|
}, |
|
{ |
|
"epoch": 0.4679615750026391, |
|
"grad_norm": 0.8592433929443359, |
|
"learning_rate": 3.2269025130619535e-05, |
|
"loss": 1.5635, |
|
"step": 13299 |
|
}, |
|
{ |
|
"epoch": 0.46905239452478975, |
|
"grad_norm": 0.8622168898582458, |
|
"learning_rate": 3.218863024832985e-05, |
|
"loss": 1.5597, |
|
"step": 13330 |
|
}, |
|
{ |
|
"epoch": 0.4701432140469404, |
|
"grad_norm": 0.8521956205368042, |
|
"learning_rate": 3.2108154243324864e-05, |
|
"loss": 1.558, |
|
"step": 13361 |
|
}, |
|
{ |
|
"epoch": 0.4712340335690911, |
|
"grad_norm": 0.8465380072593689, |
|
"learning_rate": 3.2027598023765345e-05, |
|
"loss": 1.5481, |
|
"step": 13392 |
|
}, |
|
{ |
|
"epoch": 0.4723248530912418, |
|
"grad_norm": 0.8600775599479675, |
|
"learning_rate": 3.194696249871729e-05, |
|
"loss": 1.5646, |
|
"step": 13423 |
|
}, |
|
{ |
|
"epoch": 0.47341567261339246, |
|
"grad_norm": 0.8471152186393738, |
|
"learning_rate": 3.186624857814164e-05, |
|
"loss": 1.5603, |
|
"step": 13454 |
|
}, |
|
{ |
|
"epoch": 0.47450649213554313, |
|
"grad_norm": 0.8752865195274353, |
|
"learning_rate": 3.178545717288401e-05, |
|
"loss": 1.5475, |
|
"step": 13485 |
|
}, |
|
{ |
|
"epoch": 0.4755973116576938, |
|
"grad_norm": 0.883955717086792, |
|
"learning_rate": 3.170458919466444e-05, |
|
"loss": 1.5522, |
|
"step": 13516 |
|
}, |
|
{ |
|
"epoch": 0.4766881311798445, |
|
"grad_norm": 0.864668607711792, |
|
"learning_rate": 3.1623645556067063e-05, |
|
"loss": 1.5532, |
|
"step": 13547 |
|
}, |
|
{ |
|
"epoch": 0.47777895070199516, |
|
"grad_norm": 0.823254406452179, |
|
"learning_rate": 3.154262717052985e-05, |
|
"loss": 1.5464, |
|
"step": 13578 |
|
}, |
|
{ |
|
"epoch": 0.47886977022414584, |
|
"grad_norm": 0.8613137006759644, |
|
"learning_rate": 3.146153495233426e-05, |
|
"loss": 1.5492, |
|
"step": 13609 |
|
}, |
|
{ |
|
"epoch": 0.4799605897462965, |
|
"grad_norm": 0.8392694592475891, |
|
"learning_rate": 3.1380369816594944e-05, |
|
"loss": 1.5496, |
|
"step": 13640 |
|
}, |
|
{ |
|
"epoch": 0.48105140926844714, |
|
"grad_norm": 0.8625919222831726, |
|
"learning_rate": 3.129913267924946e-05, |
|
"loss": 1.5505, |
|
"step": 13671 |
|
}, |
|
{ |
|
"epoch": 0.4821422287905978, |
|
"grad_norm": 0.8749135732650757, |
|
"learning_rate": 3.121782445704782e-05, |
|
"loss": 1.5475, |
|
"step": 13702 |
|
}, |
|
{ |
|
"epoch": 0.4832330483127485, |
|
"grad_norm": 0.8929088115692139, |
|
"learning_rate": 3.11364460675423e-05, |
|
"loss": 1.5703, |
|
"step": 13733 |
|
}, |
|
{ |
|
"epoch": 0.48432386783489917, |
|
"grad_norm": 0.8600450158119202, |
|
"learning_rate": 3.1054998429076934e-05, |
|
"loss": 1.5423, |
|
"step": 13764 |
|
}, |
|
{ |
|
"epoch": 0.48541468735704985, |
|
"grad_norm": 0.8586206436157227, |
|
"learning_rate": 3.097348246077728e-05, |
|
"loss": 1.577, |
|
"step": 13795 |
|
}, |
|
{ |
|
"epoch": 0.4865055068792005, |
|
"grad_norm": 0.8514885306358337, |
|
"learning_rate": 3.0891899082539924e-05, |
|
"loss": 1.5704, |
|
"step": 13826 |
|
}, |
|
{ |
|
"epoch": 0.4875963264013512, |
|
"grad_norm": 0.8599662184715271, |
|
"learning_rate": 3.0810249215022233e-05, |
|
"loss": 1.547, |
|
"step": 13857 |
|
}, |
|
{ |
|
"epoch": 0.4886871459235019, |
|
"grad_norm": 0.842303454875946, |
|
"learning_rate": 3.0728533779631865e-05, |
|
"loss": 1.5256, |
|
"step": 13888 |
|
}, |
|
{ |
|
"epoch": 0.48977796544565255, |
|
"grad_norm": 0.884244978427887, |
|
"learning_rate": 3.064675369851637e-05, |
|
"loss": 1.558, |
|
"step": 13919 |
|
}, |
|
{ |
|
"epoch": 0.49086878496780323, |
|
"grad_norm": 0.8688809275627136, |
|
"learning_rate": 3.056490989455289e-05, |
|
"loss": 1.5322, |
|
"step": 13950 |
|
}, |
|
{ |
|
"epoch": 0.4919596044899539, |
|
"grad_norm": 0.8296154141426086, |
|
"learning_rate": 3.0483003291337596e-05, |
|
"loss": 1.5606, |
|
"step": 13981 |
|
}, |
|
{ |
|
"epoch": 0.4930504240121046, |
|
"grad_norm": 0.8469901084899902, |
|
"learning_rate": 3.040103481317539e-05, |
|
"loss": 1.5575, |
|
"step": 14012 |
|
}, |
|
{ |
|
"epoch": 0.49414124353425526, |
|
"grad_norm": 0.8387740254402161, |
|
"learning_rate": 3.03190053850694e-05, |
|
"loss": 1.5617, |
|
"step": 14043 |
|
}, |
|
{ |
|
"epoch": 0.49523206305640594, |
|
"grad_norm": 0.8529262542724609, |
|
"learning_rate": 3.0236915932710573e-05, |
|
"loss": 1.5594, |
|
"step": 14074 |
|
}, |
|
{ |
|
"epoch": 0.4963228825785566, |
|
"grad_norm": 0.904517650604248, |
|
"learning_rate": 3.0154767382467232e-05, |
|
"loss": 1.558, |
|
"step": 14105 |
|
}, |
|
{ |
|
"epoch": 0.4974137021007073, |
|
"grad_norm": 0.8514305949211121, |
|
"learning_rate": 3.0072560661374582e-05, |
|
"loss": 1.5582, |
|
"step": 14136 |
|
}, |
|
{ |
|
"epoch": 0.49850452162285797, |
|
"grad_norm": 0.8396731615066528, |
|
"learning_rate": 2.999029669712431e-05, |
|
"loss": 1.5374, |
|
"step": 14167 |
|
}, |
|
{ |
|
"epoch": 0.49959534114500864, |
|
"grad_norm": 0.8450112342834473, |
|
"learning_rate": 2.990797641805408e-05, |
|
"loss": 1.5659, |
|
"step": 14198 |
|
}, |
|
{ |
|
"epoch": 0.5006861606671593, |
|
"grad_norm": 0.8592528104782104, |
|
"learning_rate": 2.982560075313704e-05, |
|
"loss": 1.5605, |
|
"step": 14229 |
|
}, |
|
{ |
|
"epoch": 0.5017769801893099, |
|
"grad_norm": 0.8634563684463501, |
|
"learning_rate": 2.9743170631971368e-05, |
|
"loss": 1.5529, |
|
"step": 14260 |
|
}, |
|
{ |
|
"epoch": 0.5028677997114607, |
|
"grad_norm": 0.8532223105430603, |
|
"learning_rate": 2.9660686984769792e-05, |
|
"loss": 1.5534, |
|
"step": 14291 |
|
}, |
|
{ |
|
"epoch": 0.5039586192336113, |
|
"grad_norm": 0.8415837287902832, |
|
"learning_rate": 2.9578150742349047e-05, |
|
"loss": 1.5379, |
|
"step": 14322 |
|
}, |
|
{ |
|
"epoch": 0.505049438755762, |
|
"grad_norm": 0.8862072825431824, |
|
"learning_rate": 2.949556283611942e-05, |
|
"loss": 1.55, |
|
"step": 14353 |
|
}, |
|
{ |
|
"epoch": 0.5061402582779126, |
|
"grad_norm": 0.8563070297241211, |
|
"learning_rate": 2.9412924198074206e-05, |
|
"loss": 1.5492, |
|
"step": 14384 |
|
}, |
|
{ |
|
"epoch": 0.5072310778000634, |
|
"grad_norm": 0.8677307963371277, |
|
"learning_rate": 2.9330235760779208e-05, |
|
"loss": 1.557, |
|
"step": 14415 |
|
}, |
|
{ |
|
"epoch": 0.508321897322214, |
|
"grad_norm": 0.8541677594184875, |
|
"learning_rate": 2.9247498457362188e-05, |
|
"loss": 1.5328, |
|
"step": 14446 |
|
}, |
|
{ |
|
"epoch": 0.5094127168443647, |
|
"grad_norm": 0.8333261013031006, |
|
"learning_rate": 2.9164713221502373e-05, |
|
"loss": 1.551, |
|
"step": 14477 |
|
}, |
|
{ |
|
"epoch": 0.5105035363665154, |
|
"grad_norm": 0.8497917056083679, |
|
"learning_rate": 2.9081880987419912e-05, |
|
"loss": 1.5471, |
|
"step": 14508 |
|
}, |
|
{ |
|
"epoch": 0.5115943558886661, |
|
"grad_norm": 0.8271819353103638, |
|
"learning_rate": 2.8999002689865296e-05, |
|
"loss": 1.5568, |
|
"step": 14539 |
|
}, |
|
{ |
|
"epoch": 0.5126851754108167, |
|
"grad_norm": 0.8571930527687073, |
|
"learning_rate": 2.8916079264108852e-05, |
|
"loss": 1.5604, |
|
"step": 14570 |
|
}, |
|
{ |
|
"epoch": 0.5137759949329673, |
|
"grad_norm": 0.8891891241073608, |
|
"learning_rate": 2.883311164593017e-05, |
|
"loss": 1.5324, |
|
"step": 14601 |
|
}, |
|
{ |
|
"epoch": 0.5148668144551181, |
|
"grad_norm": 0.8179088234901428, |
|
"learning_rate": 2.875010077160754e-05, |
|
"loss": 1.55, |
|
"step": 14632 |
|
}, |
|
{ |
|
"epoch": 0.5159576339772687, |
|
"grad_norm": 0.8480364680290222, |
|
"learning_rate": 2.866704757790741e-05, |
|
"loss": 1.5279, |
|
"step": 14663 |
|
}, |
|
{ |
|
"epoch": 0.5170484534994194, |
|
"grad_norm": 0.8580318093299866, |
|
"learning_rate": 2.858395300207376e-05, |
|
"loss": 1.5502, |
|
"step": 14694 |
|
}, |
|
{ |
|
"epoch": 0.51813927302157, |
|
"grad_norm": 0.8466590046882629, |
|
"learning_rate": 2.8500817981817607e-05, |
|
"loss": 1.5418, |
|
"step": 14725 |
|
}, |
|
{ |
|
"epoch": 0.5192300925437208, |
|
"grad_norm": 0.8700029253959656, |
|
"learning_rate": 2.8417643455306336e-05, |
|
"loss": 1.5429, |
|
"step": 14756 |
|
}, |
|
{ |
|
"epoch": 0.5203209120658714, |
|
"grad_norm": 0.8434004187583923, |
|
"learning_rate": 2.8334430361153185e-05, |
|
"loss": 1.5313, |
|
"step": 14787 |
|
}, |
|
{ |
|
"epoch": 0.5214117315880221, |
|
"grad_norm": 0.8457748889923096, |
|
"learning_rate": 2.8251179638406612e-05, |
|
"loss": 1.5397, |
|
"step": 14818 |
|
}, |
|
{ |
|
"epoch": 0.5225025511101727, |
|
"grad_norm": 0.8471390008926392, |
|
"learning_rate": 2.8167892226539704e-05, |
|
"loss": 1.5458, |
|
"step": 14849 |
|
}, |
|
{ |
|
"epoch": 0.5235933706323235, |
|
"grad_norm": 0.8333294987678528, |
|
"learning_rate": 2.8084569065439588e-05, |
|
"loss": 1.5382, |
|
"step": 14880 |
|
}, |
|
{ |
|
"epoch": 0.5246841901544741, |
|
"grad_norm": 0.8390504717826843, |
|
"learning_rate": 2.8001211095396807e-05, |
|
"loss": 1.5209, |
|
"step": 14911 |
|
}, |
|
{ |
|
"epoch": 0.5257750096766248, |
|
"grad_norm": 0.8801223039627075, |
|
"learning_rate": 2.791781925709473e-05, |
|
"loss": 1.5539, |
|
"step": 14942 |
|
}, |
|
{ |
|
"epoch": 0.5268658291987754, |
|
"grad_norm": 0.8821811079978943, |
|
"learning_rate": 2.7834394491598908e-05, |
|
"loss": 1.5288, |
|
"step": 14973 |
|
}, |
|
{ |
|
"epoch": 0.5279566487209262, |
|
"grad_norm": 0.8448256254196167, |
|
"learning_rate": 2.7750937740346485e-05, |
|
"loss": 1.5434, |
|
"step": 15004 |
|
}, |
|
{ |
|
"epoch": 0.5290474682430768, |
|
"grad_norm": 0.8483582735061646, |
|
"learning_rate": 2.7667449945135564e-05, |
|
"loss": 1.5408, |
|
"step": 15035 |
|
}, |
|
{ |
|
"epoch": 0.5301382877652275, |
|
"grad_norm": 0.8402996063232422, |
|
"learning_rate": 2.7583932048114557e-05, |
|
"loss": 1.5457, |
|
"step": 15066 |
|
}, |
|
{ |
|
"epoch": 0.5312291072873782, |
|
"grad_norm": 0.868419349193573, |
|
"learning_rate": 2.7500384991771587e-05, |
|
"loss": 1.5264, |
|
"step": 15097 |
|
}, |
|
{ |
|
"epoch": 0.5323199268095289, |
|
"grad_norm": 0.8620421290397644, |
|
"learning_rate": 2.7416809718923825e-05, |
|
"loss": 1.5486, |
|
"step": 15128 |
|
}, |
|
{ |
|
"epoch": 0.5334107463316795, |
|
"grad_norm": 0.843984067440033, |
|
"learning_rate": 2.7333207172706864e-05, |
|
"loss": 1.5385, |
|
"step": 15159 |
|
}, |
|
{ |
|
"epoch": 0.5345015658538301, |
|
"grad_norm": 0.8699086308479309, |
|
"learning_rate": 2.7249578296564088e-05, |
|
"loss": 1.5473, |
|
"step": 15190 |
|
}, |
|
{ |
|
"epoch": 0.5355923853759809, |
|
"grad_norm": 0.8395929932594299, |
|
"learning_rate": 2.7165924034235973e-05, |
|
"loss": 1.5436, |
|
"step": 15221 |
|
}, |
|
{ |
|
"epoch": 0.5366832048981315, |
|
"grad_norm": 0.8811730146408081, |
|
"learning_rate": 2.708224532974953e-05, |
|
"loss": 1.5402, |
|
"step": 15252 |
|
}, |
|
{ |
|
"epoch": 0.5377740244202822, |
|
"grad_norm": 0.8491471409797668, |
|
"learning_rate": 2.6998543127407538e-05, |
|
"loss": 1.5451, |
|
"step": 15283 |
|
}, |
|
{ |
|
"epoch": 0.5388648439424328, |
|
"grad_norm": 0.8612546920776367, |
|
"learning_rate": 2.6914818371777988e-05, |
|
"loss": 1.5549, |
|
"step": 15314 |
|
}, |
|
{ |
|
"epoch": 0.5399556634645836, |
|
"grad_norm": 0.8519375920295715, |
|
"learning_rate": 2.6831072007683373e-05, |
|
"loss": 1.5347, |
|
"step": 15345 |
|
}, |
|
{ |
|
"epoch": 0.5410464829867342, |
|
"grad_norm": 0.8696710467338562, |
|
"learning_rate": 2.6747304980190018e-05, |
|
"loss": 1.5324, |
|
"step": 15376 |
|
}, |
|
{ |
|
"epoch": 0.5421373025088849, |
|
"grad_norm": 0.8403492569923401, |
|
"learning_rate": 2.6663518234597453e-05, |
|
"loss": 1.5364, |
|
"step": 15407 |
|
}, |
|
{ |
|
"epoch": 0.5432281220310355, |
|
"grad_norm": 0.8714444041252136, |
|
"learning_rate": 2.6579712716427696e-05, |
|
"loss": 1.5495, |
|
"step": 15438 |
|
}, |
|
{ |
|
"epoch": 0.5443189415531863, |
|
"grad_norm": 0.8570107817649841, |
|
"learning_rate": 2.6495889371414652e-05, |
|
"loss": 1.5301, |
|
"step": 15469 |
|
}, |
|
{ |
|
"epoch": 0.5454097610753369, |
|
"grad_norm": 0.8590418100357056, |
|
"learning_rate": 2.6412049145493367e-05, |
|
"loss": 1.5222, |
|
"step": 15500 |
|
}, |
|
{ |
|
"epoch": 0.5465005805974876, |
|
"grad_norm": 0.8773406147956848, |
|
"learning_rate": 2.632819298478939e-05, |
|
"loss": 1.5338, |
|
"step": 15531 |
|
}, |
|
{ |
|
"epoch": 0.5475914001196382, |
|
"grad_norm": 1.2176588773727417, |
|
"learning_rate": 2.6244321835608105e-05, |
|
"loss": 1.526, |
|
"step": 15562 |
|
}, |
|
{ |
|
"epoch": 0.548682219641789, |
|
"grad_norm": 0.846109926700592, |
|
"learning_rate": 2.6160436644424024e-05, |
|
"loss": 1.5415, |
|
"step": 15593 |
|
}, |
|
{ |
|
"epoch": 0.5497730391639396, |
|
"grad_norm": 0.831891655921936, |
|
"learning_rate": 2.6076538357870133e-05, |
|
"loss": 1.526, |
|
"step": 15624 |
|
}, |
|
{ |
|
"epoch": 0.5508638586860903, |
|
"grad_norm": 0.8248304128646851, |
|
"learning_rate": 2.5992627922727196e-05, |
|
"loss": 1.5244, |
|
"step": 15655 |
|
}, |
|
{ |
|
"epoch": 0.551954678208241, |
|
"grad_norm": 0.841568648815155, |
|
"learning_rate": 2.5908706285913066e-05, |
|
"loss": 1.54, |
|
"step": 15686 |
|
}, |
|
{ |
|
"epoch": 0.5530454977303917, |
|
"grad_norm": 0.8265954256057739, |
|
"learning_rate": 2.5824774394472008e-05, |
|
"loss": 1.5389, |
|
"step": 15717 |
|
}, |
|
{ |
|
"epoch": 0.5541363172525423, |
|
"grad_norm": 0.8356689810752869, |
|
"learning_rate": 2.5740833195563996e-05, |
|
"loss": 1.5329, |
|
"step": 15748 |
|
}, |
|
{ |
|
"epoch": 0.555227136774693, |
|
"grad_norm": 0.8584093451499939, |
|
"learning_rate": 2.5656883636454067e-05, |
|
"loss": 1.5282, |
|
"step": 15779 |
|
}, |
|
{ |
|
"epoch": 0.5563179562968437, |
|
"grad_norm": 0.8550272583961487, |
|
"learning_rate": 2.557292666450159e-05, |
|
"loss": 1.5379, |
|
"step": 15810 |
|
}, |
|
{ |
|
"epoch": 0.5574087758189943, |
|
"grad_norm": 0.8530085682868958, |
|
"learning_rate": 2.5488963227149566e-05, |
|
"loss": 1.5489, |
|
"step": 15841 |
|
}, |
|
{ |
|
"epoch": 0.558499595341145, |
|
"grad_norm": 0.8257724046707153, |
|
"learning_rate": 2.5404994271913983e-05, |
|
"loss": 1.5305, |
|
"step": 15872 |
|
}, |
|
{ |
|
"epoch": 0.5595904148632956, |
|
"grad_norm": 0.8309580683708191, |
|
"learning_rate": 2.5321020746373085e-05, |
|
"loss": 1.531, |
|
"step": 15903 |
|
}, |
|
{ |
|
"epoch": 0.5606812343854464, |
|
"grad_norm": 0.8664075136184692, |
|
"learning_rate": 2.52370435981567e-05, |
|
"loss": 1.5372, |
|
"step": 15934 |
|
}, |
|
{ |
|
"epoch": 0.561772053907597, |
|
"grad_norm": 0.8499038219451904, |
|
"learning_rate": 2.5153063774935533e-05, |
|
"loss": 1.5258, |
|
"step": 15965 |
|
}, |
|
{ |
|
"epoch": 0.5628628734297477, |
|
"grad_norm": 0.8438250422477722, |
|
"learning_rate": 2.506908222441045e-05, |
|
"loss": 1.5309, |
|
"step": 15996 |
|
}, |
|
{ |
|
"epoch": 0.5639536929518983, |
|
"grad_norm": 0.8702678084373474, |
|
"learning_rate": 2.498509989430187e-05, |
|
"loss": 1.5376, |
|
"step": 16027 |
|
}, |
|
{ |
|
"epoch": 0.5650445124740491, |
|
"grad_norm": 0.833772599697113, |
|
"learning_rate": 2.4901117732338958e-05, |
|
"loss": 1.5369, |
|
"step": 16058 |
|
}, |
|
{ |
|
"epoch": 0.5661353319961997, |
|
"grad_norm": 0.8213962912559509, |
|
"learning_rate": 2.481713668624899e-05, |
|
"loss": 1.5243, |
|
"step": 16089 |
|
}, |
|
{ |
|
"epoch": 0.5672261515183504, |
|
"grad_norm": 0.8809503316879272, |
|
"learning_rate": 2.4733157703746663e-05, |
|
"loss": 1.5337, |
|
"step": 16120 |
|
}, |
|
{ |
|
"epoch": 0.568316971040501, |
|
"grad_norm": 0.8623045086860657, |
|
"learning_rate": 2.4649181732523392e-05, |
|
"loss": 1.5284, |
|
"step": 16151 |
|
}, |
|
{ |
|
"epoch": 0.5694077905626518, |
|
"grad_norm": 0.8352603316307068, |
|
"learning_rate": 2.4565209720236582e-05, |
|
"loss": 1.5434, |
|
"step": 16182 |
|
}, |
|
{ |
|
"epoch": 0.5704986100848024, |
|
"grad_norm": 0.8410354852676392, |
|
"learning_rate": 2.4481242614498975e-05, |
|
"loss": 1.543, |
|
"step": 16213 |
|
}, |
|
{ |
|
"epoch": 0.5715894296069531, |
|
"grad_norm": 0.8443413376808167, |
|
"learning_rate": 2.439728136286796e-05, |
|
"loss": 1.5398, |
|
"step": 16244 |
|
}, |
|
{ |
|
"epoch": 0.5726802491291038, |
|
"grad_norm": 0.8590528964996338, |
|
"learning_rate": 2.4313326912834852e-05, |
|
"loss": 1.5302, |
|
"step": 16275 |
|
}, |
|
{ |
|
"epoch": 0.5737710686512545, |
|
"grad_norm": 0.8705992102622986, |
|
"learning_rate": 2.4229380211814206e-05, |
|
"loss": 1.5314, |
|
"step": 16306 |
|
}, |
|
{ |
|
"epoch": 0.5748618881734051, |
|
"grad_norm": 0.8387395143508911, |
|
"learning_rate": 2.4145442207133124e-05, |
|
"loss": 1.5393, |
|
"step": 16337 |
|
}, |
|
{ |
|
"epoch": 0.5759527076955558, |
|
"grad_norm": 0.8432089686393738, |
|
"learning_rate": 2.406151384602059e-05, |
|
"loss": 1.5231, |
|
"step": 16368 |
|
}, |
|
{ |
|
"epoch": 0.5770435272177065, |
|
"grad_norm": 0.8307583332061768, |
|
"learning_rate": 2.3977596075596747e-05, |
|
"loss": 1.5354, |
|
"step": 16399 |
|
}, |
|
{ |
|
"epoch": 0.5781343467398571, |
|
"grad_norm": 0.8590096235275269, |
|
"learning_rate": 2.3893689842862223e-05, |
|
"loss": 1.531, |
|
"step": 16430 |
|
}, |
|
{ |
|
"epoch": 0.5792251662620078, |
|
"grad_norm": 0.8796259164810181, |
|
"learning_rate": 2.3809796094687475e-05, |
|
"loss": 1.5311, |
|
"step": 16461 |
|
}, |
|
{ |
|
"epoch": 0.5803159857841584, |
|
"grad_norm": 0.9219604134559631, |
|
"learning_rate": 2.372591577780202e-05, |
|
"loss": 1.5163, |
|
"step": 16492 |
|
}, |
|
{ |
|
"epoch": 0.5814068053063092, |
|
"grad_norm": 0.870782732963562, |
|
"learning_rate": 2.3642049838783838e-05, |
|
"loss": 1.5405, |
|
"step": 16523 |
|
}, |
|
{ |
|
"epoch": 0.5824976248284598, |
|
"grad_norm": 0.8680859208106995, |
|
"learning_rate": 2.3558199224048666e-05, |
|
"loss": 1.5465, |
|
"step": 16554 |
|
}, |
|
{ |
|
"epoch": 0.5835884443506105, |
|
"grad_norm": 0.8209128379821777, |
|
"learning_rate": 2.347436487983929e-05, |
|
"loss": 1.5134, |
|
"step": 16585 |
|
}, |
|
{ |
|
"epoch": 0.5846792638727611, |
|
"grad_norm": 0.8626577854156494, |
|
"learning_rate": 2.3390547752214888e-05, |
|
"loss": 1.5382, |
|
"step": 16616 |
|
}, |
|
{ |
|
"epoch": 0.5857700833949119, |
|
"grad_norm": 0.8471314311027527, |
|
"learning_rate": 2.330674878704035e-05, |
|
"loss": 1.5079, |
|
"step": 16647 |
|
}, |
|
{ |
|
"epoch": 0.5868609029170625, |
|
"grad_norm": 0.8420313000679016, |
|
"learning_rate": 2.322296892997561e-05, |
|
"loss": 1.5354, |
|
"step": 16678 |
|
}, |
|
{ |
|
"epoch": 0.5879517224392132, |
|
"grad_norm": 0.823274552822113, |
|
"learning_rate": 2.313920912646497e-05, |
|
"loss": 1.5335, |
|
"step": 16709 |
|
}, |
|
{ |
|
"epoch": 0.5890425419613639, |
|
"grad_norm": 0.8439893126487732, |
|
"learning_rate": 2.305547032172643e-05, |
|
"loss": 1.528, |
|
"step": 16740 |
|
}, |
|
{ |
|
"epoch": 0.5901333614835146, |
|
"grad_norm": 0.8543256521224976, |
|
"learning_rate": 2.2971753460741014e-05, |
|
"loss": 1.541, |
|
"step": 16771 |
|
}, |
|
{ |
|
"epoch": 0.5912241810056652, |
|
"grad_norm": 0.8447850346565247, |
|
"learning_rate": 2.288805948824212e-05, |
|
"loss": 1.5215, |
|
"step": 16802 |
|
}, |
|
{ |
|
"epoch": 0.5923150005278159, |
|
"grad_norm": 0.8680288791656494, |
|
"learning_rate": 2.2804389348704858e-05, |
|
"loss": 1.5196, |
|
"step": 16833 |
|
}, |
|
{ |
|
"epoch": 0.5934058200499666, |
|
"grad_norm": 0.8200852870941162, |
|
"learning_rate": 2.2720743986335374e-05, |
|
"loss": 1.5123, |
|
"step": 16864 |
|
}, |
|
{ |
|
"epoch": 0.5944966395721173, |
|
"grad_norm": 0.8493021726608276, |
|
"learning_rate": 2.2637124345060233e-05, |
|
"loss": 1.5169, |
|
"step": 16895 |
|
}, |
|
{ |
|
"epoch": 0.5955874590942679, |
|
"grad_norm": 0.8634234666824341, |
|
"learning_rate": 2.2553531368515695e-05, |
|
"loss": 1.5349, |
|
"step": 16926 |
|
}, |
|
{ |
|
"epoch": 0.5966782786164186, |
|
"grad_norm": 0.8441646695137024, |
|
"learning_rate": 2.2469966000037144e-05, |
|
"loss": 1.5072, |
|
"step": 16957 |
|
}, |
|
{ |
|
"epoch": 0.5977690981385693, |
|
"grad_norm": 0.843173623085022, |
|
"learning_rate": 2.2386429182648417e-05, |
|
"loss": 1.5101, |
|
"step": 16988 |
|
}, |
|
{ |
|
"epoch": 0.5988599176607199, |
|
"grad_norm": 0.8240063786506653, |
|
"learning_rate": 2.230292185905114e-05, |
|
"loss": 1.5332, |
|
"step": 17019 |
|
}, |
|
{ |
|
"epoch": 0.5999507371828706, |
|
"grad_norm": 0.8468651175498962, |
|
"learning_rate": 2.2219444971614116e-05, |
|
"loss": 1.5135, |
|
"step": 17050 |
|
}, |
|
{ |
|
"epoch": 0.6010415567050212, |
|
"grad_norm": 0.858584463596344, |
|
"learning_rate": 2.2135999462362655e-05, |
|
"loss": 1.5276, |
|
"step": 17081 |
|
}, |
|
{ |
|
"epoch": 0.602132376227172, |
|
"grad_norm": 0.8320130109786987, |
|
"learning_rate": 2.2052586272968003e-05, |
|
"loss": 1.5309, |
|
"step": 17112 |
|
}, |
|
{ |
|
"epoch": 0.6032231957493226, |
|
"grad_norm": 0.8636554479598999, |
|
"learning_rate": 2.196920634473666e-05, |
|
"loss": 1.5286, |
|
"step": 17143 |
|
}, |
|
{ |
|
"epoch": 0.6043140152714733, |
|
"grad_norm": 0.8534757494926453, |
|
"learning_rate": 2.1885860618599787e-05, |
|
"loss": 1.5259, |
|
"step": 17174 |
|
}, |
|
{ |
|
"epoch": 0.605404834793624, |
|
"grad_norm": 0.8796008825302124, |
|
"learning_rate": 2.1802550035102577e-05, |
|
"loss": 1.5249, |
|
"step": 17205 |
|
}, |
|
{ |
|
"epoch": 0.6064956543157747, |
|
"grad_norm": 0.8278890252113342, |
|
"learning_rate": 2.171927553439363e-05, |
|
"loss": 1.5209, |
|
"step": 17236 |
|
}, |
|
{ |
|
"epoch": 0.6075864738379253, |
|
"grad_norm": 0.8656840920448303, |
|
"learning_rate": 2.1636038056214376e-05, |
|
"loss": 1.5378, |
|
"step": 17267 |
|
}, |
|
{ |
|
"epoch": 0.608677293360076, |
|
"grad_norm": 0.8418081402778625, |
|
"learning_rate": 2.155283853988844e-05, |
|
"loss": 1.5181, |
|
"step": 17298 |
|
}, |
|
{ |
|
"epoch": 0.6097681128822267, |
|
"grad_norm": 0.8615252375602722, |
|
"learning_rate": 2.146967792431106e-05, |
|
"loss": 1.5211, |
|
"step": 17329 |
|
}, |
|
{ |
|
"epoch": 0.6108589324043774, |
|
"grad_norm": 0.8344350457191467, |
|
"learning_rate": 2.138655714793849e-05, |
|
"loss": 1.5173, |
|
"step": 17360 |
|
}, |
|
{ |
|
"epoch": 0.611949751926528, |
|
"grad_norm": 0.8445223569869995, |
|
"learning_rate": 2.1303477148777367e-05, |
|
"loss": 1.5157, |
|
"step": 17391 |
|
}, |
|
{ |
|
"epoch": 0.6130405714486787, |
|
"grad_norm": 0.8439963459968567, |
|
"learning_rate": 2.122043886437421e-05, |
|
"loss": 1.5193, |
|
"step": 17422 |
|
}, |
|
{ |
|
"epoch": 0.6141313909708294, |
|
"grad_norm": 0.833831250667572, |
|
"learning_rate": 2.1137443231804765e-05, |
|
"loss": 1.5319, |
|
"step": 17453 |
|
}, |
|
{ |
|
"epoch": 0.6152222104929801, |
|
"grad_norm": 0.8764674663543701, |
|
"learning_rate": 2.105449118766347e-05, |
|
"loss": 1.519, |
|
"step": 17484 |
|
}, |
|
{ |
|
"epoch": 0.6163130300151307, |
|
"grad_norm": 0.8398875594139099, |
|
"learning_rate": 2.097158366805287e-05, |
|
"loss": 1.5154, |
|
"step": 17515 |
|
}, |
|
{ |
|
"epoch": 0.6174038495372814, |
|
"grad_norm": 0.8722443580627441, |
|
"learning_rate": 2.0888721608573047e-05, |
|
"loss": 1.5324, |
|
"step": 17546 |
|
}, |
|
{ |
|
"epoch": 0.6184946690594321, |
|
"grad_norm": 0.8242905139923096, |
|
"learning_rate": 2.0805905944311087e-05, |
|
"loss": 1.5149, |
|
"step": 17577 |
|
}, |
|
{ |
|
"epoch": 0.6195854885815827, |
|
"grad_norm": 0.8585349321365356, |
|
"learning_rate": 2.0723137609830497e-05, |
|
"loss": 1.5194, |
|
"step": 17608 |
|
}, |
|
{ |
|
"epoch": 0.6206763081037334, |
|
"grad_norm": 0.8693426847457886, |
|
"learning_rate": 2.0640417539160686e-05, |
|
"loss": 1.501, |
|
"step": 17639 |
|
}, |
|
{ |
|
"epoch": 0.621767127625884, |
|
"grad_norm": 0.8580697774887085, |
|
"learning_rate": 2.0557746665786427e-05, |
|
"loss": 1.5132, |
|
"step": 17670 |
|
}, |
|
{ |
|
"epoch": 0.6228579471480348, |
|
"grad_norm": 0.8322170376777649, |
|
"learning_rate": 2.0475125922637256e-05, |
|
"loss": 1.5147, |
|
"step": 17701 |
|
}, |
|
{ |
|
"epoch": 0.6239487666701854, |
|
"grad_norm": 0.8389680981636047, |
|
"learning_rate": 2.0392556242077047e-05, |
|
"loss": 1.4992, |
|
"step": 17732 |
|
}, |
|
{ |
|
"epoch": 0.6250395861923361, |
|
"grad_norm": 0.8317235112190247, |
|
"learning_rate": 2.031003855589343e-05, |
|
"loss": 1.5265, |
|
"step": 17763 |
|
}, |
|
{ |
|
"epoch": 0.6261304057144867, |
|
"grad_norm": 0.8752438426017761, |
|
"learning_rate": 2.022757379528727e-05, |
|
"loss": 1.5077, |
|
"step": 17794 |
|
}, |
|
{ |
|
"epoch": 0.6272212252366375, |
|
"grad_norm": 0.937268853187561, |
|
"learning_rate": 2.0145162890862184e-05, |
|
"loss": 1.5182, |
|
"step": 17825 |
|
}, |
|
{ |
|
"epoch": 0.6283120447587881, |
|
"grad_norm": 0.8401532173156738, |
|
"learning_rate": 2.0062806772614022e-05, |
|
"loss": 1.5112, |
|
"step": 17856 |
|
}, |
|
{ |
|
"epoch": 0.6294028642809388, |
|
"grad_norm": 0.8764299750328064, |
|
"learning_rate": 1.9980506369920392e-05, |
|
"loss": 1.5133, |
|
"step": 17887 |
|
}, |
|
{ |
|
"epoch": 0.6304936838030895, |
|
"grad_norm": 0.8382470607757568, |
|
"learning_rate": 1.989826261153015e-05, |
|
"loss": 1.5295, |
|
"step": 17918 |
|
}, |
|
{ |
|
"epoch": 0.6315845033252402, |
|
"grad_norm": 0.8590472340583801, |
|
"learning_rate": 1.9816076425552923e-05, |
|
"loss": 1.5159, |
|
"step": 17949 |
|
}, |
|
{ |
|
"epoch": 0.6326753228473908, |
|
"grad_norm": 0.8332110047340393, |
|
"learning_rate": 1.9733948739448676e-05, |
|
"loss": 1.5012, |
|
"step": 17980 |
|
}, |
|
{ |
|
"epoch": 0.6337661423695415, |
|
"grad_norm": 0.8751401305198669, |
|
"learning_rate": 1.9651880480017155e-05, |
|
"loss": 1.5361, |
|
"step": 18011 |
|
}, |
|
{ |
|
"epoch": 0.6348569618916922, |
|
"grad_norm": 0.845674991607666, |
|
"learning_rate": 1.9569872573387516e-05, |
|
"loss": 1.5064, |
|
"step": 18042 |
|
}, |
|
{ |
|
"epoch": 0.6359477814138429, |
|
"grad_norm": 0.8510170578956604, |
|
"learning_rate": 1.9487925945007854e-05, |
|
"loss": 1.5145, |
|
"step": 18073 |
|
}, |
|
{ |
|
"epoch": 0.6370386009359935, |
|
"grad_norm": 0.8701625466346741, |
|
"learning_rate": 1.9406041519634726e-05, |
|
"loss": 1.51, |
|
"step": 18104 |
|
}, |
|
{ |
|
"epoch": 0.6381294204581442, |
|
"grad_norm": 0.82243412733078, |
|
"learning_rate": 1.932422022132275e-05, |
|
"loss": 1.5284, |
|
"step": 18135 |
|
}, |
|
{ |
|
"epoch": 0.6392202399802949, |
|
"grad_norm": 0.849349856376648, |
|
"learning_rate": 1.924246297341414e-05, |
|
"loss": 1.5056, |
|
"step": 18166 |
|
}, |
|
{ |
|
"epoch": 0.6403110595024456, |
|
"grad_norm": 0.8479828834533691, |
|
"learning_rate": 1.9160770698528338e-05, |
|
"loss": 1.5159, |
|
"step": 18197 |
|
}, |
|
{ |
|
"epoch": 0.6414018790245962, |
|
"grad_norm": 0.8278518319129944, |
|
"learning_rate": 1.907914431855156e-05, |
|
"loss": 1.5187, |
|
"step": 18228 |
|
}, |
|
{ |
|
"epoch": 0.6424926985467468, |
|
"grad_norm": 0.8431084156036377, |
|
"learning_rate": 1.8997584754626412e-05, |
|
"loss": 1.5181, |
|
"step": 18259 |
|
}, |
|
{ |
|
"epoch": 0.6435835180688976, |
|
"grad_norm": 0.8304771780967712, |
|
"learning_rate": 1.8916092927141486e-05, |
|
"loss": 1.5361, |
|
"step": 18290 |
|
}, |
|
{ |
|
"epoch": 0.6446743375910482, |
|
"grad_norm": 0.8463602662086487, |
|
"learning_rate": 1.883466975572098e-05, |
|
"loss": 1.532, |
|
"step": 18321 |
|
}, |
|
{ |
|
"epoch": 0.6457651571131989, |
|
"grad_norm": 0.8624921441078186, |
|
"learning_rate": 1.8753316159214312e-05, |
|
"loss": 1.5142, |
|
"step": 18352 |
|
}, |
|
{ |
|
"epoch": 0.6468559766353495, |
|
"grad_norm": 0.8516690135002136, |
|
"learning_rate": 1.8672033055685766e-05, |
|
"loss": 1.5106, |
|
"step": 18383 |
|
}, |
|
{ |
|
"epoch": 0.6479467961575003, |
|
"grad_norm": 0.8513640761375427, |
|
"learning_rate": 1.8590821362404116e-05, |
|
"loss": 1.5242, |
|
"step": 18414 |
|
}, |
|
{ |
|
"epoch": 0.6490376156796509, |
|
"grad_norm": 0.845120370388031, |
|
"learning_rate": 1.8509681995832294e-05, |
|
"loss": 1.5037, |
|
"step": 18445 |
|
}, |
|
{ |
|
"epoch": 0.6501284352018016, |
|
"grad_norm": 0.839189887046814, |
|
"learning_rate": 1.8428615871617004e-05, |
|
"loss": 1.4979, |
|
"step": 18476 |
|
}, |
|
{ |
|
"epoch": 0.6512192547239523, |
|
"grad_norm": 0.8610638976097107, |
|
"learning_rate": 1.8347623904578448e-05, |
|
"loss": 1.5174, |
|
"step": 18507 |
|
}, |
|
{ |
|
"epoch": 0.652310074246103, |
|
"grad_norm": 0.8210963010787964, |
|
"learning_rate": 1.8266707008699975e-05, |
|
"loss": 1.4945, |
|
"step": 18538 |
|
}, |
|
{ |
|
"epoch": 0.6534008937682536, |
|
"grad_norm": 0.8418785333633423, |
|
"learning_rate": 1.818586609711774e-05, |
|
"loss": 1.5294, |
|
"step": 18569 |
|
}, |
|
{ |
|
"epoch": 0.6544917132904043, |
|
"grad_norm": 0.8562109470367432, |
|
"learning_rate": 1.8105102082110462e-05, |
|
"loss": 1.5056, |
|
"step": 18600 |
|
}, |
|
{ |
|
"epoch": 0.655582532812555, |
|
"grad_norm": 0.8791701793670654, |
|
"learning_rate": 1.8024415875089058e-05, |
|
"loss": 1.4997, |
|
"step": 18631 |
|
}, |
|
{ |
|
"epoch": 0.6566733523347057, |
|
"grad_norm": 0.8357826471328735, |
|
"learning_rate": 1.7943808386586407e-05, |
|
"loss": 1.4965, |
|
"step": 18662 |
|
}, |
|
{ |
|
"epoch": 0.6577641718568563, |
|
"grad_norm": 0.8441767692565918, |
|
"learning_rate": 1.7863280526247073e-05, |
|
"loss": 1.5243, |
|
"step": 18693 |
|
}, |
|
{ |
|
"epoch": 0.658854991379007, |
|
"grad_norm": 0.8386645317077637, |
|
"learning_rate": 1.7782833202817003e-05, |
|
"loss": 1.5157, |
|
"step": 18724 |
|
}, |
|
{ |
|
"epoch": 0.6599458109011577, |
|
"grad_norm": 0.865395188331604, |
|
"learning_rate": 1.7702467324133327e-05, |
|
"loss": 1.5175, |
|
"step": 18755 |
|
}, |
|
{ |
|
"epoch": 0.6610366304233084, |
|
"grad_norm": 0.8624420762062073, |
|
"learning_rate": 1.7622183797114042e-05, |
|
"loss": 1.4891, |
|
"step": 18786 |
|
}, |
|
{ |
|
"epoch": 0.662127449945459, |
|
"grad_norm": 0.8843603134155273, |
|
"learning_rate": 1.7541983527747838e-05, |
|
"loss": 1.5266, |
|
"step": 18817 |
|
}, |
|
{ |
|
"epoch": 0.6632182694676096, |
|
"grad_norm": 0.8440201878547668, |
|
"learning_rate": 1.746186742108387e-05, |
|
"loss": 1.5213, |
|
"step": 18848 |
|
}, |
|
{ |
|
"epoch": 0.6643090889897604, |
|
"grad_norm": 0.8229241371154785, |
|
"learning_rate": 1.73818363812215e-05, |
|
"loss": 1.4987, |
|
"step": 18879 |
|
}, |
|
{ |
|
"epoch": 0.665399908511911, |
|
"grad_norm": 0.8973398804664612, |
|
"learning_rate": 1.7301891311300153e-05, |
|
"loss": 1.5114, |
|
"step": 18910 |
|
}, |
|
{ |
|
"epoch": 0.6664907280340617, |
|
"grad_norm": 0.8309417963027954, |
|
"learning_rate": 1.7222033113489055e-05, |
|
"loss": 1.5001, |
|
"step": 18941 |
|
}, |
|
{ |
|
"epoch": 0.6675815475562124, |
|
"grad_norm": 0.8955307006835938, |
|
"learning_rate": 1.7142262688977127e-05, |
|
"loss": 1.4995, |
|
"step": 18972 |
|
}, |
|
{ |
|
"epoch": 0.6686723670783631, |
|
"grad_norm": 0.8319917917251587, |
|
"learning_rate": 1.7062580937962764e-05, |
|
"loss": 1.4931, |
|
"step": 19003 |
|
}, |
|
{ |
|
"epoch": 0.6697631866005137, |
|
"grad_norm": 0.8668432235717773, |
|
"learning_rate": 1.698298875964369e-05, |
|
"loss": 1.5232, |
|
"step": 19034 |
|
}, |
|
{ |
|
"epoch": 0.6708540061226644, |
|
"grad_norm": 0.8770532608032227, |
|
"learning_rate": 1.690348705220684e-05, |
|
"loss": 1.5177, |
|
"step": 19065 |
|
}, |
|
{ |
|
"epoch": 0.6719448256448151, |
|
"grad_norm": 0.8868733644485474, |
|
"learning_rate": 1.6824076712818156e-05, |
|
"loss": 1.5302, |
|
"step": 19096 |
|
}, |
|
{ |
|
"epoch": 0.6730356451669658, |
|
"grad_norm": 0.8315250277519226, |
|
"learning_rate": 1.6744758637612533e-05, |
|
"loss": 1.5092, |
|
"step": 19127 |
|
}, |
|
{ |
|
"epoch": 0.6741264646891164, |
|
"grad_norm": 0.8182474374771118, |
|
"learning_rate": 1.6665533721683664e-05, |
|
"loss": 1.5077, |
|
"step": 19158 |
|
}, |
|
{ |
|
"epoch": 0.6752172842112671, |
|
"grad_norm": 0.8515792489051819, |
|
"learning_rate": 1.6586402859073974e-05, |
|
"loss": 1.5042, |
|
"step": 19189 |
|
}, |
|
{ |
|
"epoch": 0.6763081037334178, |
|
"grad_norm": 0.8750668168067932, |
|
"learning_rate": 1.6507366942764463e-05, |
|
"loss": 1.5225, |
|
"step": 19220 |
|
}, |
|
{ |
|
"epoch": 0.6773989232555685, |
|
"grad_norm": 0.8479793667793274, |
|
"learning_rate": 1.6428426864664732e-05, |
|
"loss": 1.5076, |
|
"step": 19251 |
|
}, |
|
{ |
|
"epoch": 0.6784897427777191, |
|
"grad_norm": 0.8461045026779175, |
|
"learning_rate": 1.6349583515602816e-05, |
|
"loss": 1.4932, |
|
"step": 19282 |
|
}, |
|
{ |
|
"epoch": 0.6795805622998699, |
|
"grad_norm": 0.8231117725372314, |
|
"learning_rate": 1.6270837785315208e-05, |
|
"loss": 1.5178, |
|
"step": 19313 |
|
}, |
|
{ |
|
"epoch": 0.6806713818220205, |
|
"grad_norm": 0.8422976732254028, |
|
"learning_rate": 1.619219056243676e-05, |
|
"loss": 1.5039, |
|
"step": 19344 |
|
}, |
|
{ |
|
"epoch": 0.6817622013441712, |
|
"grad_norm": 0.8593392968177795, |
|
"learning_rate": 1.6113642734490698e-05, |
|
"loss": 1.4901, |
|
"step": 19375 |
|
}, |
|
{ |
|
"epoch": 0.6828530208663218, |
|
"grad_norm": 0.8338266015052795, |
|
"learning_rate": 1.6035195187878577e-05, |
|
"loss": 1.5152, |
|
"step": 19406 |
|
}, |
|
{ |
|
"epoch": 0.6839438403884724, |
|
"grad_norm": 0.8571571111679077, |
|
"learning_rate": 1.5956848807870305e-05, |
|
"loss": 1.5135, |
|
"step": 19437 |
|
}, |
|
{ |
|
"epoch": 0.6850346599106232, |
|
"grad_norm": 0.8638436794281006, |
|
"learning_rate": 1.587860447859413e-05, |
|
"loss": 1.5174, |
|
"step": 19468 |
|
}, |
|
{ |
|
"epoch": 0.6861254794327738, |
|
"grad_norm": 0.9013044834136963, |
|
"learning_rate": 1.5800463083026686e-05, |
|
"loss": 1.5075, |
|
"step": 19499 |
|
}, |
|
{ |
|
"epoch": 0.6872162989549245, |
|
"grad_norm": 0.8595010638237, |
|
"learning_rate": 1.572242550298298e-05, |
|
"loss": 1.4825, |
|
"step": 19530 |
|
}, |
|
{ |
|
"epoch": 0.6883071184770752, |
|
"grad_norm": 0.8541821241378784, |
|
"learning_rate": 1.56444926191065e-05, |
|
"loss": 1.5039, |
|
"step": 19561 |
|
}, |
|
{ |
|
"epoch": 0.6893979379992259, |
|
"grad_norm": 0.8488218784332275, |
|
"learning_rate": 1.5566665310859257e-05, |
|
"loss": 1.4839, |
|
"step": 19592 |
|
}, |
|
{ |
|
"epoch": 0.6904887575213765, |
|
"grad_norm": 0.8451359868049622, |
|
"learning_rate": 1.5488944456511846e-05, |
|
"loss": 1.4954, |
|
"step": 19623 |
|
}, |
|
{ |
|
"epoch": 0.6915795770435272, |
|
"grad_norm": 0.8344883918762207, |
|
"learning_rate": 1.5411330933133546e-05, |
|
"loss": 1.4964, |
|
"step": 19654 |
|
}, |
|
{ |
|
"epoch": 0.6926703965656779, |
|
"grad_norm": 0.8662464022636414, |
|
"learning_rate": 1.533382561658241e-05, |
|
"loss": 1.5033, |
|
"step": 19685 |
|
}, |
|
{ |
|
"epoch": 0.6937612160878286, |
|
"grad_norm": 0.8548587560653687, |
|
"learning_rate": 1.525642938149541e-05, |
|
"loss": 1.5025, |
|
"step": 19716 |
|
}, |
|
{ |
|
"epoch": 0.6948520356099792, |
|
"grad_norm": 0.8340123295783997, |
|
"learning_rate": 1.5179143101278536e-05, |
|
"loss": 1.5146, |
|
"step": 19747 |
|
}, |
|
{ |
|
"epoch": 0.69594285513213, |
|
"grad_norm": 0.825764536857605, |
|
"learning_rate": 1.5101967648096955e-05, |
|
"loss": 1.5002, |
|
"step": 19778 |
|
}, |
|
{ |
|
"epoch": 0.6970336746542806, |
|
"grad_norm": 0.8531391024589539, |
|
"learning_rate": 1.5024903892865172e-05, |
|
"loss": 1.4916, |
|
"step": 19809 |
|
}, |
|
{ |
|
"epoch": 0.6981244941764313, |
|
"grad_norm": 0.8312798738479614, |
|
"learning_rate": 1.4947952705237184e-05, |
|
"loss": 1.5029, |
|
"step": 19840 |
|
}, |
|
{ |
|
"epoch": 0.6992153136985819, |
|
"grad_norm": 0.8844854831695557, |
|
"learning_rate": 1.4871114953596682e-05, |
|
"loss": 1.5297, |
|
"step": 19871 |
|
}, |
|
{ |
|
"epoch": 0.7003061332207327, |
|
"grad_norm": 0.8333762288093567, |
|
"learning_rate": 1.4794391505047256e-05, |
|
"loss": 1.4796, |
|
"step": 19902 |
|
}, |
|
{ |
|
"epoch": 0.7013969527428833, |
|
"grad_norm": 0.8612198829650879, |
|
"learning_rate": 1.4717783225402596e-05, |
|
"loss": 1.4999, |
|
"step": 19933 |
|
}, |
|
{ |
|
"epoch": 0.702487772265034, |
|
"grad_norm": 0.8647238612174988, |
|
"learning_rate": 1.4641290979176735e-05, |
|
"loss": 1.5138, |
|
"step": 19964 |
|
}, |
|
{ |
|
"epoch": 0.7035785917871846, |
|
"grad_norm": 0.8337448835372925, |
|
"learning_rate": 1.4564915629574246e-05, |
|
"loss": 1.5069, |
|
"step": 19995 |
|
}, |
|
{ |
|
"epoch": 0.7046694113093354, |
|
"grad_norm": 0.8397265076637268, |
|
"learning_rate": 1.4488658038480601e-05, |
|
"loss": 1.4936, |
|
"step": 20026 |
|
}, |
|
{ |
|
"epoch": 0.705760230831486, |
|
"grad_norm": 0.8537731766700745, |
|
"learning_rate": 1.4412519066452323e-05, |
|
"loss": 1.5015, |
|
"step": 20057 |
|
}, |
|
{ |
|
"epoch": 0.7068510503536366, |
|
"grad_norm": 0.8549908995628357, |
|
"learning_rate": 1.4336499572707373e-05, |
|
"loss": 1.507, |
|
"step": 20088 |
|
}, |
|
{ |
|
"epoch": 0.7079418698757873, |
|
"grad_norm": 0.8688744306564331, |
|
"learning_rate": 1.4260600415115433e-05, |
|
"loss": 1.5045, |
|
"step": 20119 |
|
}, |
|
{ |
|
"epoch": 0.709032689397938, |
|
"grad_norm": 0.8355604410171509, |
|
"learning_rate": 1.4184822450188137e-05, |
|
"loss": 1.5, |
|
"step": 20150 |
|
}, |
|
{ |
|
"epoch": 0.7101235089200887, |
|
"grad_norm": 0.8467512130737305, |
|
"learning_rate": 1.410916653306954e-05, |
|
"loss": 1.5093, |
|
"step": 20181 |
|
}, |
|
{ |
|
"epoch": 0.7112143284422393, |
|
"grad_norm": 0.8274648785591125, |
|
"learning_rate": 1.403363351752639e-05, |
|
"loss": 1.4932, |
|
"step": 20212 |
|
}, |
|
{ |
|
"epoch": 0.71230514796439, |
|
"grad_norm": 0.8433395624160767, |
|
"learning_rate": 1.3958224255938485e-05, |
|
"loss": 1.5064, |
|
"step": 20243 |
|
}, |
|
{ |
|
"epoch": 0.7133959674865407, |
|
"grad_norm": 0.8672435879707336, |
|
"learning_rate": 1.388293959928911e-05, |
|
"loss": 1.5008, |
|
"step": 20274 |
|
}, |
|
{ |
|
"epoch": 0.7144867870086914, |
|
"grad_norm": 0.8505216240882874, |
|
"learning_rate": 1.3807780397155379e-05, |
|
"loss": 1.4892, |
|
"step": 20305 |
|
}, |
|
{ |
|
"epoch": 0.715577606530842, |
|
"grad_norm": 0.8569562435150146, |
|
"learning_rate": 1.3732747497698655e-05, |
|
"loss": 1.4895, |
|
"step": 20336 |
|
}, |
|
{ |
|
"epoch": 0.7166684260529927, |
|
"grad_norm": 0.8675398826599121, |
|
"learning_rate": 1.3657841747655038e-05, |
|
"loss": 1.4879, |
|
"step": 20367 |
|
}, |
|
{ |
|
"epoch": 0.7177592455751434, |
|
"grad_norm": 0.8449987769126892, |
|
"learning_rate": 1.3583063992325706e-05, |
|
"loss": 1.4887, |
|
"step": 20398 |
|
}, |
|
{ |
|
"epoch": 0.7188500650972941, |
|
"grad_norm": 0.8670520782470703, |
|
"learning_rate": 1.3508415075567496e-05, |
|
"loss": 1.5038, |
|
"step": 20429 |
|
}, |
|
{ |
|
"epoch": 0.7199408846194447, |
|
"grad_norm": 0.8580642938613892, |
|
"learning_rate": 1.343389583978327e-05, |
|
"loss": 1.4946, |
|
"step": 20460 |
|
}, |
|
{ |
|
"epoch": 0.7210317041415955, |
|
"grad_norm": 0.8375786542892456, |
|
"learning_rate": 1.3359507125912468e-05, |
|
"loss": 1.5131, |
|
"step": 20491 |
|
}, |
|
{ |
|
"epoch": 0.7221225236637461, |
|
"grad_norm": 0.8435585498809814, |
|
"learning_rate": 1.3285249773421627e-05, |
|
"loss": 1.4937, |
|
"step": 20522 |
|
}, |
|
{ |
|
"epoch": 0.7232133431858968, |
|
"grad_norm": 0.8349316716194153, |
|
"learning_rate": 1.3211124620294884e-05, |
|
"loss": 1.5134, |
|
"step": 20553 |
|
}, |
|
{ |
|
"epoch": 0.7243041627080474, |
|
"grad_norm": 0.8518332839012146, |
|
"learning_rate": 1.313713250302451e-05, |
|
"loss": 1.4998, |
|
"step": 20584 |
|
}, |
|
{ |
|
"epoch": 0.7253949822301982, |
|
"grad_norm": 0.8346456289291382, |
|
"learning_rate": 1.3063274256601479e-05, |
|
"loss": 1.5013, |
|
"step": 20615 |
|
}, |
|
{ |
|
"epoch": 0.7264858017523488, |
|
"grad_norm": 0.851972222328186, |
|
"learning_rate": 1.2989550714506086e-05, |
|
"loss": 1.4916, |
|
"step": 20646 |
|
}, |
|
{ |
|
"epoch": 0.7275766212744994, |
|
"grad_norm": 0.8488277196884155, |
|
"learning_rate": 1.291596270869846e-05, |
|
"loss": 1.4971, |
|
"step": 20677 |
|
}, |
|
{ |
|
"epoch": 0.7286674407966501, |
|
"grad_norm": 0.8665834665298462, |
|
"learning_rate": 1.284251106960927e-05, |
|
"loss": 1.4792, |
|
"step": 20708 |
|
}, |
|
{ |
|
"epoch": 0.7297582603188008, |
|
"grad_norm": 0.8406128287315369, |
|
"learning_rate": 1.2769196626130263e-05, |
|
"loss": 1.4847, |
|
"step": 20739 |
|
}, |
|
{ |
|
"epoch": 0.7308490798409515, |
|
"grad_norm": 0.8546832799911499, |
|
"learning_rate": 1.2696020205604969e-05, |
|
"loss": 1.5155, |
|
"step": 20770 |
|
}, |
|
{ |
|
"epoch": 0.7319398993631021, |
|
"grad_norm": 0.8713514804840088, |
|
"learning_rate": 1.2622982633819359e-05, |
|
"loss": 1.4893, |
|
"step": 20801 |
|
}, |
|
{ |
|
"epoch": 0.7330307188852528, |
|
"grad_norm": 0.8382667899131775, |
|
"learning_rate": 1.2550084734992484e-05, |
|
"loss": 1.5017, |
|
"step": 20832 |
|
}, |
|
{ |
|
"epoch": 0.7341215384074035, |
|
"grad_norm": 0.8436316847801208, |
|
"learning_rate": 1.247732733176724e-05, |
|
"loss": 1.4876, |
|
"step": 20863 |
|
}, |
|
{ |
|
"epoch": 0.7352123579295542, |
|
"grad_norm": 0.8521105647087097, |
|
"learning_rate": 1.2404711245201044e-05, |
|
"loss": 1.5145, |
|
"step": 20894 |
|
}, |
|
{ |
|
"epoch": 0.7363031774517048, |
|
"grad_norm": 0.8565787076950073, |
|
"learning_rate": 1.2332237294756535e-05, |
|
"loss": 1.497, |
|
"step": 20925 |
|
}, |
|
{ |
|
"epoch": 0.7373939969738555, |
|
"grad_norm": 0.8692595958709717, |
|
"learning_rate": 1.225990629829241e-05, |
|
"loss": 1.488, |
|
"step": 20956 |
|
}, |
|
{ |
|
"epoch": 0.7384848164960062, |
|
"grad_norm": 0.8533311486244202, |
|
"learning_rate": 1.2187719072054136e-05, |
|
"loss": 1.5012, |
|
"step": 20987 |
|
}, |
|
{ |
|
"epoch": 0.7395756360181569, |
|
"grad_norm": 0.8499316573143005, |
|
"learning_rate": 1.2115676430664735e-05, |
|
"loss": 1.4937, |
|
"step": 21018 |
|
}, |
|
{ |
|
"epoch": 0.7406664555403075, |
|
"grad_norm": 0.8615989089012146, |
|
"learning_rate": 1.2043779187115647e-05, |
|
"loss": 1.5005, |
|
"step": 21049 |
|
}, |
|
{ |
|
"epoch": 0.7417572750624583, |
|
"grad_norm": 0.8634625673294067, |
|
"learning_rate": 1.1972028152757476e-05, |
|
"loss": 1.5067, |
|
"step": 21080 |
|
}, |
|
{ |
|
"epoch": 0.7428480945846089, |
|
"grad_norm": 0.885278582572937, |
|
"learning_rate": 1.1900424137290889e-05, |
|
"loss": 1.4832, |
|
"step": 21111 |
|
}, |
|
{ |
|
"epoch": 0.7439389141067596, |
|
"grad_norm": 0.8779427409172058, |
|
"learning_rate": 1.1828967948757482e-05, |
|
"loss": 1.4774, |
|
"step": 21142 |
|
}, |
|
{ |
|
"epoch": 0.7450297336289102, |
|
"grad_norm": 0.8246616125106812, |
|
"learning_rate": 1.175766039353062e-05, |
|
"loss": 1.4921, |
|
"step": 21173 |
|
}, |
|
{ |
|
"epoch": 0.746120553151061, |
|
"grad_norm": 0.8430258631706238, |
|
"learning_rate": 1.1686502276306382e-05, |
|
"loss": 1.512, |
|
"step": 21204 |
|
}, |
|
{ |
|
"epoch": 0.7472113726732116, |
|
"grad_norm": 0.873651921749115, |
|
"learning_rate": 1.1615494400094445e-05, |
|
"loss": 1.4958, |
|
"step": 21235 |
|
}, |
|
{ |
|
"epoch": 0.7483021921953622, |
|
"grad_norm": 0.8593094348907471, |
|
"learning_rate": 1.1544637566209029e-05, |
|
"loss": 1.4965, |
|
"step": 21266 |
|
}, |
|
{ |
|
"epoch": 0.7493930117175129, |
|
"grad_norm": 0.839512825012207, |
|
"learning_rate": 1.1473932574259886e-05, |
|
"loss": 1.4759, |
|
"step": 21297 |
|
}, |
|
{ |
|
"epoch": 0.7504838312396636, |
|
"grad_norm": 0.8661448955535889, |
|
"learning_rate": 1.1403380222143247e-05, |
|
"loss": 1.5006, |
|
"step": 21328 |
|
}, |
|
{ |
|
"epoch": 0.7515746507618143, |
|
"grad_norm": 0.8609933257102966, |
|
"learning_rate": 1.1332981306032808e-05, |
|
"loss": 1.4816, |
|
"step": 21359 |
|
}, |
|
{ |
|
"epoch": 0.7526654702839649, |
|
"grad_norm": 0.8672799468040466, |
|
"learning_rate": 1.1262736620370762e-05, |
|
"loss": 1.482, |
|
"step": 21390 |
|
}, |
|
{ |
|
"epoch": 0.7537562898061156, |
|
"grad_norm": 0.8496883511543274, |
|
"learning_rate": 1.1192646957858854e-05, |
|
"loss": 1.4735, |
|
"step": 21421 |
|
}, |
|
{ |
|
"epoch": 0.7548471093282663, |
|
"grad_norm": 0.8749503493309021, |
|
"learning_rate": 1.1122713109449381e-05, |
|
"loss": 1.4944, |
|
"step": 21452 |
|
}, |
|
{ |
|
"epoch": 0.755937928850417, |
|
"grad_norm": 1.626726508140564, |
|
"learning_rate": 1.105293586433634e-05, |
|
"loss": 1.492, |
|
"step": 21483 |
|
}, |
|
{ |
|
"epoch": 0.7570287483725676, |
|
"grad_norm": 0.8712369203567505, |
|
"learning_rate": 1.0983316009946446e-05, |
|
"loss": 1.4879, |
|
"step": 21514 |
|
}, |
|
{ |
|
"epoch": 0.7581195678947183, |
|
"grad_norm": 0.875504195690155, |
|
"learning_rate": 1.0913854331930282e-05, |
|
"loss": 1.4961, |
|
"step": 21545 |
|
}, |
|
{ |
|
"epoch": 0.759210387416869, |
|
"grad_norm": 0.8538352251052856, |
|
"learning_rate": 1.0844551614153456e-05, |
|
"loss": 1.4951, |
|
"step": 21576 |
|
}, |
|
{ |
|
"epoch": 0.7603012069390197, |
|
"grad_norm": 0.8490918278694153, |
|
"learning_rate": 1.0775408638687725e-05, |
|
"loss": 1.5021, |
|
"step": 21607 |
|
}, |
|
{ |
|
"epoch": 0.7613920264611703, |
|
"grad_norm": 0.8524895906448364, |
|
"learning_rate": 1.0706426185802165e-05, |
|
"loss": 1.5031, |
|
"step": 21638 |
|
}, |
|
{ |
|
"epoch": 0.7624828459833211, |
|
"grad_norm": 0.8854188323020935, |
|
"learning_rate": 1.0637605033954371e-05, |
|
"loss": 1.4909, |
|
"step": 21669 |
|
}, |
|
{ |
|
"epoch": 0.7635736655054717, |
|
"grad_norm": 0.8424195051193237, |
|
"learning_rate": 1.05689459597817e-05, |
|
"loss": 1.4788, |
|
"step": 21700 |
|
}, |
|
{ |
|
"epoch": 0.7646644850276224, |
|
"grad_norm": 0.8683730363845825, |
|
"learning_rate": 1.050044973809246e-05, |
|
"loss": 1.4905, |
|
"step": 21731 |
|
}, |
|
{ |
|
"epoch": 0.765755304549773, |
|
"grad_norm": 0.8552282452583313, |
|
"learning_rate": 1.043211714185722e-05, |
|
"loss": 1.4984, |
|
"step": 21762 |
|
}, |
|
{ |
|
"epoch": 0.7668461240719238, |
|
"grad_norm": 0.8360662460327148, |
|
"learning_rate": 1.036394894220003e-05, |
|
"loss": 1.4726, |
|
"step": 21793 |
|
}, |
|
{ |
|
"epoch": 0.7679369435940744, |
|
"grad_norm": 0.8638837337493896, |
|
"learning_rate": 1.0295945908389751e-05, |
|
"loss": 1.4891, |
|
"step": 21824 |
|
}, |
|
{ |
|
"epoch": 0.7690277631162251, |
|
"grad_norm": 0.839370608329773, |
|
"learning_rate": 1.0228108807831393e-05, |
|
"loss": 1.4942, |
|
"step": 21855 |
|
}, |
|
{ |
|
"epoch": 0.7701185826383757, |
|
"grad_norm": 0.8426961898803711, |
|
"learning_rate": 1.01604384060574e-05, |
|
"loss": 1.4975, |
|
"step": 21886 |
|
}, |
|
{ |
|
"epoch": 0.7712094021605264, |
|
"grad_norm": 0.8592336773872375, |
|
"learning_rate": 1.009293546671907e-05, |
|
"loss": 1.4714, |
|
"step": 21917 |
|
}, |
|
{ |
|
"epoch": 0.7723002216826771, |
|
"grad_norm": 0.87094646692276, |
|
"learning_rate": 1.002560075157791e-05, |
|
"loss": 1.4973, |
|
"step": 21948 |
|
}, |
|
{ |
|
"epoch": 0.7733910412048277, |
|
"grad_norm": 0.820807158946991, |
|
"learning_rate": 9.958435020496995e-06, |
|
"loss": 1.4818, |
|
"step": 21979 |
|
}, |
|
{ |
|
"epoch": 0.7744818607269784, |
|
"grad_norm": 0.8604585528373718, |
|
"learning_rate": 9.89143903143249e-06, |
|
"loss": 1.4837, |
|
"step": 22010 |
|
}, |
|
{ |
|
"epoch": 0.7755726802491291, |
|
"grad_norm": 0.8644665479660034, |
|
"learning_rate": 9.824613540425038e-06, |
|
"loss": 1.4973, |
|
"step": 22041 |
|
}, |
|
{ |
|
"epoch": 0.7766634997712798, |
|
"grad_norm": 0.88955157995224, |
|
"learning_rate": 9.757959301591197e-06, |
|
"loss": 1.5109, |
|
"step": 22072 |
|
}, |
|
{ |
|
"epoch": 0.7777543192934304, |
|
"grad_norm": 0.8649373054504395, |
|
"learning_rate": 9.691477067115017e-06, |
|
"loss": 1.4953, |
|
"step": 22103 |
|
}, |
|
{ |
|
"epoch": 0.7788451388155812, |
|
"grad_norm": 0.8767892718315125, |
|
"learning_rate": 9.625167587239467e-06, |
|
"loss": 1.4927, |
|
"step": 22134 |
|
}, |
|
{ |
|
"epoch": 0.7799359583377318, |
|
"grad_norm": 0.8520987033843994, |
|
"learning_rate": 9.559031610258007e-06, |
|
"loss": 1.4849, |
|
"step": 22165 |
|
}, |
|
{ |
|
"epoch": 0.7810267778598825, |
|
"grad_norm": 0.8774919509887695, |
|
"learning_rate": 9.493069882506164e-06, |
|
"loss": 1.4916, |
|
"step": 22196 |
|
}, |
|
{ |
|
"epoch": 0.7821175973820331, |
|
"grad_norm": 0.860447108745575, |
|
"learning_rate": 9.427283148353056e-06, |
|
"loss": 1.4923, |
|
"step": 22227 |
|
}, |
|
{ |
|
"epoch": 0.7832084169041839, |
|
"grad_norm": 0.8607807159423828, |
|
"learning_rate": 9.361672150193052e-06, |
|
"loss": 1.5016, |
|
"step": 22258 |
|
}, |
|
{ |
|
"epoch": 0.7842992364263345, |
|
"grad_norm": 0.8656525015830994, |
|
"learning_rate": 9.29623762843734e-06, |
|
"loss": 1.4969, |
|
"step": 22289 |
|
}, |
|
{ |
|
"epoch": 0.7853900559484852, |
|
"grad_norm": 0.8755804300308228, |
|
"learning_rate": 9.230980321505594e-06, |
|
"loss": 1.4974, |
|
"step": 22320 |
|
}, |
|
{ |
|
"epoch": 0.7864808754706358, |
|
"grad_norm": 0.8609233498573303, |
|
"learning_rate": 9.165900965817668e-06, |
|
"loss": 1.4707, |
|
"step": 22351 |
|
}, |
|
{ |
|
"epoch": 0.7875716949927866, |
|
"grad_norm": 0.885404646396637, |
|
"learning_rate": 9.101000295785245e-06, |
|
"loss": 1.4872, |
|
"step": 22382 |
|
}, |
|
{ |
|
"epoch": 0.7886625145149372, |
|
"grad_norm": 0.8608178496360779, |
|
"learning_rate": 9.036279043803565e-06, |
|
"loss": 1.4854, |
|
"step": 22413 |
|
}, |
|
{ |
|
"epoch": 0.7897533340370879, |
|
"grad_norm": 0.8458565473556519, |
|
"learning_rate": 8.971737940243147e-06, |
|
"loss": 1.48, |
|
"step": 22444 |
|
}, |
|
{ |
|
"epoch": 0.7908441535592385, |
|
"grad_norm": 0.8370563983917236, |
|
"learning_rate": 8.907377713441592e-06, |
|
"loss": 1.4736, |
|
"step": 22475 |
|
}, |
|
{ |
|
"epoch": 0.7919349730813892, |
|
"grad_norm": 0.8452663421630859, |
|
"learning_rate": 8.843199089695293e-06, |
|
"loss": 1.484, |
|
"step": 22506 |
|
}, |
|
{ |
|
"epoch": 0.7930257926035399, |
|
"grad_norm": 0.857646644115448, |
|
"learning_rate": 8.779202793251311e-06, |
|
"loss": 1.4787, |
|
"step": 22537 |
|
}, |
|
{ |
|
"epoch": 0.7941166121256905, |
|
"grad_norm": 0.869369387626648, |
|
"learning_rate": 8.715389546299149e-06, |
|
"loss": 1.4712, |
|
"step": 22568 |
|
}, |
|
{ |
|
"epoch": 0.7952074316478412, |
|
"grad_norm": 0.8824241161346436, |
|
"learning_rate": 8.651760068962617e-06, |
|
"loss": 1.4883, |
|
"step": 22599 |
|
}, |
|
{ |
|
"epoch": 0.7962982511699919, |
|
"grad_norm": 0.8725176453590393, |
|
"learning_rate": 8.588315079291733e-06, |
|
"loss": 1.4669, |
|
"step": 22630 |
|
}, |
|
{ |
|
"epoch": 0.7973890706921426, |
|
"grad_norm": 0.8481059074401855, |
|
"learning_rate": 8.52505529325457e-06, |
|
"loss": 1.4945, |
|
"step": 22661 |
|
}, |
|
{ |
|
"epoch": 0.7984798902142932, |
|
"grad_norm": 0.8689181208610535, |
|
"learning_rate": 8.461981424729216e-06, |
|
"loss": 1.4812, |
|
"step": 22692 |
|
}, |
|
{ |
|
"epoch": 0.799570709736444, |
|
"grad_norm": 0.8892386555671692, |
|
"learning_rate": 8.399094185495725e-06, |
|
"loss": 1.4929, |
|
"step": 22723 |
|
}, |
|
{ |
|
"epoch": 0.8006615292585946, |
|
"grad_norm": 0.8685445785522461, |
|
"learning_rate": 8.336394285228017e-06, |
|
"loss": 1.4848, |
|
"step": 22754 |
|
}, |
|
{ |
|
"epoch": 0.8017523487807453, |
|
"grad_norm": 0.8544855713844299, |
|
"learning_rate": 8.273882431485952e-06, |
|
"loss": 1.481, |
|
"step": 22785 |
|
}, |
|
{ |
|
"epoch": 0.8028431683028959, |
|
"grad_norm": 0.8621202707290649, |
|
"learning_rate": 8.211559329707316e-06, |
|
"loss": 1.4539, |
|
"step": 22816 |
|
}, |
|
{ |
|
"epoch": 0.8039339878250467, |
|
"grad_norm": 0.8611202836036682, |
|
"learning_rate": 8.149425683199823e-06, |
|
"loss": 1.4865, |
|
"step": 22847 |
|
}, |
|
{ |
|
"epoch": 0.8050248073471973, |
|
"grad_norm": 0.8885299563407898, |
|
"learning_rate": 8.08748219313325e-06, |
|
"loss": 1.4865, |
|
"step": 22878 |
|
}, |
|
{ |
|
"epoch": 0.806115626869348, |
|
"grad_norm": 0.8586752414703369, |
|
"learning_rate": 8.025729558531453e-06, |
|
"loss": 1.4754, |
|
"step": 22909 |
|
}, |
|
{ |
|
"epoch": 0.8072064463914986, |
|
"grad_norm": 0.8655884861946106, |
|
"learning_rate": 7.964168476264508e-06, |
|
"loss": 1.4644, |
|
"step": 22940 |
|
}, |
|
{ |
|
"epoch": 0.8082972659136494, |
|
"grad_norm": 0.8527194261550903, |
|
"learning_rate": 7.902799641040884e-06, |
|
"loss": 1.4824, |
|
"step": 22971 |
|
}, |
|
{ |
|
"epoch": 0.8093880854358, |
|
"grad_norm": 0.8602740168571472, |
|
"learning_rate": 7.841623745399523e-06, |
|
"loss": 1.4832, |
|
"step": 23002 |
|
}, |
|
{ |
|
"epoch": 0.8104789049579507, |
|
"grad_norm": 0.8708512783050537, |
|
"learning_rate": 7.780641479702114e-06, |
|
"loss": 1.4808, |
|
"step": 23033 |
|
}, |
|
{ |
|
"epoch": 0.8115697244801013, |
|
"grad_norm": 0.8606247305870056, |
|
"learning_rate": 7.719853532125227e-06, |
|
"loss": 1.4971, |
|
"step": 23064 |
|
}, |
|
{ |
|
"epoch": 0.812660544002252, |
|
"grad_norm": 0.8713460564613342, |
|
"learning_rate": 7.65926058865258e-06, |
|
"loss": 1.4983, |
|
"step": 23095 |
|
}, |
|
{ |
|
"epoch": 0.8137513635244027, |
|
"grad_norm": 0.8963667750358582, |
|
"learning_rate": 7.598863333067313e-06, |
|
"loss": 1.4911, |
|
"step": 23126 |
|
}, |
|
{ |
|
"epoch": 0.8148421830465533, |
|
"grad_norm": 0.873379647731781, |
|
"learning_rate": 7.538662446944253e-06, |
|
"loss": 1.4898, |
|
"step": 23157 |
|
}, |
|
{ |
|
"epoch": 0.815933002568704, |
|
"grad_norm": 0.8980262279510498, |
|
"learning_rate": 7.478658609642211e-06, |
|
"loss": 1.4838, |
|
"step": 23188 |
|
}, |
|
{ |
|
"epoch": 0.8170238220908547, |
|
"grad_norm": 0.8681792616844177, |
|
"learning_rate": 7.418852498296327e-06, |
|
"loss": 1.4766, |
|
"step": 23219 |
|
}, |
|
{ |
|
"epoch": 0.8181146416130054, |
|
"grad_norm": 0.8889434933662415, |
|
"learning_rate": 7.359244787810457e-06, |
|
"loss": 1.4892, |
|
"step": 23250 |
|
}, |
|
{ |
|
"epoch": 0.819205461135156, |
|
"grad_norm": 0.8408189415931702, |
|
"learning_rate": 7.299836150849493e-06, |
|
"loss": 1.481, |
|
"step": 23281 |
|
}, |
|
{ |
|
"epoch": 0.8202962806573068, |
|
"grad_norm": 0.8755449056625366, |
|
"learning_rate": 7.240627257831847e-06, |
|
"loss": 1.4793, |
|
"step": 23312 |
|
}, |
|
{ |
|
"epoch": 0.8213871001794574, |
|
"grad_norm": 0.8448764085769653, |
|
"learning_rate": 7.1816187769218195e-06, |
|
"loss": 1.4857, |
|
"step": 23343 |
|
}, |
|
{ |
|
"epoch": 0.8224779197016081, |
|
"grad_norm": 0.8427819013595581, |
|
"learning_rate": 7.1228113740220895e-06, |
|
"loss": 1.4762, |
|
"step": 23374 |
|
}, |
|
{ |
|
"epoch": 0.8235687392237587, |
|
"grad_norm": 0.8959159255027771, |
|
"learning_rate": 7.064205712766226e-06, |
|
"loss": 1.5094, |
|
"step": 23405 |
|
}, |
|
{ |
|
"epoch": 0.8246595587459095, |
|
"grad_norm": 0.8826150298118591, |
|
"learning_rate": 7.005802454511129e-06, |
|
"loss": 1.4803, |
|
"step": 23436 |
|
}, |
|
{ |
|
"epoch": 0.8257503782680601, |
|
"grad_norm": 0.8280698657035828, |
|
"learning_rate": 6.947602258329639e-06, |
|
"loss": 1.4791, |
|
"step": 23467 |
|
}, |
|
{ |
|
"epoch": 0.8268411977902108, |
|
"grad_norm": 0.8621764779090881, |
|
"learning_rate": 6.889605781003078e-06, |
|
"loss": 1.4622, |
|
"step": 23498 |
|
}, |
|
{ |
|
"epoch": 0.8279320173123614, |
|
"grad_norm": 0.8792335987091064, |
|
"learning_rate": 6.831813677013776e-06, |
|
"loss": 1.4717, |
|
"step": 23529 |
|
}, |
|
{ |
|
"epoch": 0.8290228368345122, |
|
"grad_norm": 0.8960312604904175, |
|
"learning_rate": 6.774226598537792e-06, |
|
"loss": 1.4805, |
|
"step": 23560 |
|
}, |
|
{ |
|
"epoch": 0.8301136563566628, |
|
"grad_norm": 0.8715002536773682, |
|
"learning_rate": 6.716845195437482e-06, |
|
"loss": 1.4845, |
|
"step": 23591 |
|
}, |
|
{ |
|
"epoch": 0.8312044758788135, |
|
"grad_norm": 0.8723446726799011, |
|
"learning_rate": 6.659670115254168e-06, |
|
"loss": 1.4704, |
|
"step": 23622 |
|
}, |
|
{ |
|
"epoch": 0.8322952954009641, |
|
"grad_norm": 0.8784072399139404, |
|
"learning_rate": 6.602702003200872e-06, |
|
"loss": 1.4895, |
|
"step": 23653 |
|
}, |
|
{ |
|
"epoch": 0.8333861149231148, |
|
"grad_norm": 0.8655444383621216, |
|
"learning_rate": 6.545941502154992e-06, |
|
"loss": 1.4764, |
|
"step": 23684 |
|
}, |
|
{ |
|
"epoch": 0.8344769344452655, |
|
"grad_norm": 0.8808048963546753, |
|
"learning_rate": 6.489389252651057e-06, |
|
"loss": 1.4776, |
|
"step": 23715 |
|
}, |
|
{ |
|
"epoch": 0.8355677539674161, |
|
"grad_norm": 0.8857665657997131, |
|
"learning_rate": 6.4330458928735325e-06, |
|
"loss": 1.4986, |
|
"step": 23746 |
|
}, |
|
{ |
|
"epoch": 0.8366585734895668, |
|
"grad_norm": 0.8763651847839355, |
|
"learning_rate": 6.376912058649559e-06, |
|
"loss": 1.4727, |
|
"step": 23777 |
|
}, |
|
{ |
|
"epoch": 0.8377493930117175, |
|
"grad_norm": 0.8747501969337463, |
|
"learning_rate": 6.320988383441845e-06, |
|
"loss": 1.4894, |
|
"step": 23808 |
|
}, |
|
{ |
|
"epoch": 0.8388402125338682, |
|
"grad_norm": 0.860714316368103, |
|
"learning_rate": 6.265275498341452e-06, |
|
"loss": 1.4767, |
|
"step": 23839 |
|
}, |
|
{ |
|
"epoch": 0.8399310320560188, |
|
"grad_norm": 0.8668991923332214, |
|
"learning_rate": 6.209774032060714e-06, |
|
"loss": 1.4834, |
|
"step": 23870 |
|
}, |
|
{ |
|
"epoch": 0.8410218515781696, |
|
"grad_norm": 0.8528944849967957, |
|
"learning_rate": 6.1544846109261365e-06, |
|
"loss": 1.4694, |
|
"step": 23901 |
|
}, |
|
{ |
|
"epoch": 0.8421126711003202, |
|
"grad_norm": 0.8633365631103516, |
|
"learning_rate": 6.099407858871342e-06, |
|
"loss": 1.493, |
|
"step": 23932 |
|
}, |
|
{ |
|
"epoch": 0.8432034906224709, |
|
"grad_norm": 0.8978642225265503, |
|
"learning_rate": 6.044544397429958e-06, |
|
"loss": 1.4812, |
|
"step": 23963 |
|
}, |
|
{ |
|
"epoch": 0.8442943101446215, |
|
"grad_norm": 0.8880563378334045, |
|
"learning_rate": 5.989894845728708e-06, |
|
"loss": 1.4707, |
|
"step": 23994 |
|
}, |
|
{ |
|
"epoch": 0.8453851296667723, |
|
"grad_norm": 0.8570977449417114, |
|
"learning_rate": 5.9354598204803605e-06, |
|
"loss": 1.4792, |
|
"step": 24025 |
|
}, |
|
{ |
|
"epoch": 0.8464759491889229, |
|
"grad_norm": 0.8557071685791016, |
|
"learning_rate": 5.881239935976762e-06, |
|
"loss": 1.4645, |
|
"step": 24056 |
|
}, |
|
{ |
|
"epoch": 0.8475667687110736, |
|
"grad_norm": 0.8857972025871277, |
|
"learning_rate": 5.827235804081954e-06, |
|
"loss": 1.4774, |
|
"step": 24087 |
|
}, |
|
{ |
|
"epoch": 0.8486575882332242, |
|
"grad_norm": 0.8797951936721802, |
|
"learning_rate": 5.773448034225221e-06, |
|
"loss": 1.472, |
|
"step": 24118 |
|
}, |
|
{ |
|
"epoch": 0.849748407755375, |
|
"grad_norm": 0.9085538983345032, |
|
"learning_rate": 5.719877233394228e-06, |
|
"loss": 1.493, |
|
"step": 24149 |
|
}, |
|
{ |
|
"epoch": 0.8508392272775256, |
|
"grad_norm": 0.8451566696166992, |
|
"learning_rate": 5.666524006128191e-06, |
|
"loss": 1.4836, |
|
"step": 24180 |
|
}, |
|
{ |
|
"epoch": 0.8519300467996763, |
|
"grad_norm": 0.8434094786643982, |
|
"learning_rate": 5.613388954511015e-06, |
|
"loss": 1.4696, |
|
"step": 24211 |
|
}, |
|
{ |
|
"epoch": 0.8530208663218269, |
|
"grad_norm": 0.8790538311004639, |
|
"learning_rate": 5.560472678164552e-06, |
|
"loss": 1.4776, |
|
"step": 24242 |
|
}, |
|
{ |
|
"epoch": 0.8541116858439777, |
|
"grad_norm": 0.8586615920066833, |
|
"learning_rate": 5.507775774241775e-06, |
|
"loss": 1.4761, |
|
"step": 24273 |
|
}, |
|
{ |
|
"epoch": 0.8552025053661283, |
|
"grad_norm": 0.8723267912864685, |
|
"learning_rate": 5.4552988374200945e-06, |
|
"loss": 1.4985, |
|
"step": 24304 |
|
}, |
|
{ |
|
"epoch": 0.8562933248882789, |
|
"grad_norm": 0.8562921285629272, |
|
"learning_rate": 5.403042459894597e-06, |
|
"loss": 1.4926, |
|
"step": 24335 |
|
}, |
|
{ |
|
"epoch": 0.8573841444104296, |
|
"grad_norm": 0.8985985517501831, |
|
"learning_rate": 5.3510072313714135e-06, |
|
"loss": 1.4866, |
|
"step": 24366 |
|
}, |
|
{ |
|
"epoch": 0.8584749639325803, |
|
"grad_norm": 0.8635309934616089, |
|
"learning_rate": 5.2991937390610205e-06, |
|
"loss": 1.4769, |
|
"step": 24397 |
|
}, |
|
{ |
|
"epoch": 0.859565783454731, |
|
"grad_norm": 0.8855069875717163, |
|
"learning_rate": 5.247602567671625e-06, |
|
"loss": 1.4879, |
|
"step": 24428 |
|
}, |
|
{ |
|
"epoch": 0.8606566029768816, |
|
"grad_norm": 0.8451640605926514, |
|
"learning_rate": 5.196234299402603e-06, |
|
"loss": 1.4658, |
|
"step": 24459 |
|
}, |
|
{ |
|
"epoch": 0.8617474224990324, |
|
"grad_norm": 0.8517320156097412, |
|
"learning_rate": 5.145089513937865e-06, |
|
"loss": 1.4812, |
|
"step": 24490 |
|
}, |
|
{ |
|
"epoch": 0.862838242021183, |
|
"grad_norm": 0.8421504497528076, |
|
"learning_rate": 5.094168788439369e-06, |
|
"loss": 1.4678, |
|
"step": 24521 |
|
}, |
|
{ |
|
"epoch": 0.8639290615433337, |
|
"grad_norm": 0.8663870096206665, |
|
"learning_rate": 5.043472697540594e-06, |
|
"loss": 1.4621, |
|
"step": 24552 |
|
}, |
|
{ |
|
"epoch": 0.8650198810654843, |
|
"grad_norm": 0.8404938578605652, |
|
"learning_rate": 4.993001813340012e-06, |
|
"loss": 1.4847, |
|
"step": 24583 |
|
}, |
|
{ |
|
"epoch": 0.8661107005876351, |
|
"grad_norm": 0.8644596338272095, |
|
"learning_rate": 4.942756705394702e-06, |
|
"loss": 1.4696, |
|
"step": 24614 |
|
}, |
|
{ |
|
"epoch": 0.8672015201097857, |
|
"grad_norm": 0.8497793674468994, |
|
"learning_rate": 4.892737940713884e-06, |
|
"loss": 1.4885, |
|
"step": 24645 |
|
}, |
|
{ |
|
"epoch": 0.8682923396319364, |
|
"grad_norm": 0.8994928598403931, |
|
"learning_rate": 4.842946083752511e-06, |
|
"loss": 1.4637, |
|
"step": 24676 |
|
}, |
|
{ |
|
"epoch": 0.869383159154087, |
|
"grad_norm": 0.8935580849647522, |
|
"learning_rate": 4.79338169640493e-06, |
|
"loss": 1.4833, |
|
"step": 24707 |
|
}, |
|
{ |
|
"epoch": 0.8704739786762378, |
|
"grad_norm": 0.8669634461402893, |
|
"learning_rate": 4.74404533799851e-06, |
|
"loss": 1.4759, |
|
"step": 24738 |
|
}, |
|
{ |
|
"epoch": 0.8715647981983884, |
|
"grad_norm": 0.8927462100982666, |
|
"learning_rate": 4.694937565287344e-06, |
|
"loss": 1.4846, |
|
"step": 24769 |
|
}, |
|
{ |
|
"epoch": 0.8726556177205391, |
|
"grad_norm": 0.8816105127334595, |
|
"learning_rate": 4.646058932445985e-06, |
|
"loss": 1.4674, |
|
"step": 24800 |
|
}, |
|
{ |
|
"epoch": 0.8737464372426897, |
|
"grad_norm": 0.8687214851379395, |
|
"learning_rate": 4.597409991063148e-06, |
|
"loss": 1.4809, |
|
"step": 24831 |
|
}, |
|
{ |
|
"epoch": 0.8748372567648405, |
|
"grad_norm": 0.8640971183776855, |
|
"learning_rate": 4.5489912901355375e-06, |
|
"loss": 1.4476, |
|
"step": 24862 |
|
}, |
|
{ |
|
"epoch": 0.8759280762869911, |
|
"grad_norm": 0.8814966082572937, |
|
"learning_rate": 4.500803376061608e-06, |
|
"loss": 1.474, |
|
"step": 24893 |
|
}, |
|
{ |
|
"epoch": 0.8770188958091417, |
|
"grad_norm": 0.8621838092803955, |
|
"learning_rate": 4.45284679263541e-06, |
|
"loss": 1.4672, |
|
"step": 24924 |
|
}, |
|
{ |
|
"epoch": 0.8781097153312925, |
|
"grad_norm": 0.8565123081207275, |
|
"learning_rate": 4.4051220810404775e-06, |
|
"loss": 1.481, |
|
"step": 24955 |
|
}, |
|
{ |
|
"epoch": 0.8792005348534431, |
|
"grad_norm": 0.8686378002166748, |
|
"learning_rate": 4.3576297798437025e-06, |
|
"loss": 1.4758, |
|
"step": 24986 |
|
}, |
|
{ |
|
"epoch": 0.8802913543755938, |
|
"grad_norm": 0.8388208150863647, |
|
"learning_rate": 4.3103704249892436e-06, |
|
"loss": 1.4649, |
|
"step": 25017 |
|
}, |
|
{ |
|
"epoch": 0.8813821738977444, |
|
"grad_norm": 0.8457533121109009, |
|
"learning_rate": 4.263344549792487e-06, |
|
"loss": 1.4858, |
|
"step": 25048 |
|
}, |
|
{ |
|
"epoch": 0.8824729934198952, |
|
"grad_norm": 0.8637520670890808, |
|
"learning_rate": 4.216552684934056e-06, |
|
"loss": 1.4781, |
|
"step": 25079 |
|
}, |
|
{ |
|
"epoch": 0.8835638129420458, |
|
"grad_norm": 0.8748880624771118, |
|
"learning_rate": 4.169995358453777e-06, |
|
"loss": 1.4826, |
|
"step": 25110 |
|
}, |
|
{ |
|
"epoch": 0.8846546324641965, |
|
"grad_norm": 0.920896589756012, |
|
"learning_rate": 4.123673095744757e-06, |
|
"loss": 1.4707, |
|
"step": 25141 |
|
}, |
|
{ |
|
"epoch": 0.8857454519863471, |
|
"grad_norm": 0.880017876625061, |
|
"learning_rate": 4.077586419547435e-06, |
|
"loss": 1.4727, |
|
"step": 25172 |
|
}, |
|
{ |
|
"epoch": 0.8868362715084979, |
|
"grad_norm": 0.9001519680023193, |
|
"learning_rate": 4.03173584994368e-06, |
|
"loss": 1.4822, |
|
"step": 25203 |
|
}, |
|
{ |
|
"epoch": 0.8879270910306485, |
|
"grad_norm": 0.8996759653091431, |
|
"learning_rate": 3.986121904350948e-06, |
|
"loss": 1.4839, |
|
"step": 25234 |
|
}, |
|
{ |
|
"epoch": 0.8890179105527992, |
|
"grad_norm": 0.8671228885650635, |
|
"learning_rate": 3.940745097516407e-06, |
|
"loss": 1.4715, |
|
"step": 25265 |
|
}, |
|
{ |
|
"epoch": 0.8901087300749498, |
|
"grad_norm": 0.8500885963439941, |
|
"learning_rate": 3.89560594151116e-06, |
|
"loss": 1.4731, |
|
"step": 25296 |
|
}, |
|
{ |
|
"epoch": 0.8911995495971006, |
|
"grad_norm": 0.8616065979003906, |
|
"learning_rate": 3.850704945724456e-06, |
|
"loss": 1.4727, |
|
"step": 25327 |
|
}, |
|
{ |
|
"epoch": 0.8922903691192512, |
|
"grad_norm": 0.9085158109664917, |
|
"learning_rate": 3.8060426168579077e-06, |
|
"loss": 1.4756, |
|
"step": 25358 |
|
}, |
|
{ |
|
"epoch": 0.8933811886414019, |
|
"grad_norm": 0.8764066100120544, |
|
"learning_rate": 3.7616194589198407e-06, |
|
"loss": 1.4914, |
|
"step": 25389 |
|
}, |
|
{ |
|
"epoch": 0.8944720081635525, |
|
"grad_norm": 0.8965243101119995, |
|
"learning_rate": 3.7174359732195574e-06, |
|
"loss": 1.4795, |
|
"step": 25420 |
|
}, |
|
{ |
|
"epoch": 0.8955628276857033, |
|
"grad_norm": 0.889213502407074, |
|
"learning_rate": 3.673492658361677e-06, |
|
"loss": 1.48, |
|
"step": 25451 |
|
}, |
|
{ |
|
"epoch": 0.8966536472078539, |
|
"grad_norm": 0.8793845772743225, |
|
"learning_rate": 3.6297900102405467e-06, |
|
"loss": 1.4674, |
|
"step": 25482 |
|
}, |
|
{ |
|
"epoch": 0.8977444667300045, |
|
"grad_norm": 0.8472430109977722, |
|
"learning_rate": 3.586328522034607e-06, |
|
"loss": 1.4944, |
|
"step": 25513 |
|
}, |
|
{ |
|
"epoch": 0.8988352862521553, |
|
"grad_norm": 0.8861201405525208, |
|
"learning_rate": 3.543108684200838e-06, |
|
"loss": 1.4778, |
|
"step": 25544 |
|
}, |
|
{ |
|
"epoch": 0.8999261057743059, |
|
"grad_norm": 0.8611108064651489, |
|
"learning_rate": 3.5001309844692464e-06, |
|
"loss": 1.4599, |
|
"step": 25575 |
|
}, |
|
{ |
|
"epoch": 0.9010169252964566, |
|
"grad_norm": 0.8657307624816895, |
|
"learning_rate": 3.4573959078373215e-06, |
|
"loss": 1.473, |
|
"step": 25606 |
|
}, |
|
{ |
|
"epoch": 0.9021077448186072, |
|
"grad_norm": 0.8999704122543335, |
|
"learning_rate": 3.4149039365646063e-06, |
|
"loss": 1.4805, |
|
"step": 25637 |
|
}, |
|
{ |
|
"epoch": 0.903198564340758, |
|
"grad_norm": 0.8825884461402893, |
|
"learning_rate": 3.3726555501672143e-06, |
|
"loss": 1.4845, |
|
"step": 25668 |
|
}, |
|
{ |
|
"epoch": 0.9042893838629086, |
|
"grad_norm": 0.9047882556915283, |
|
"learning_rate": 3.33065122541244e-06, |
|
"loss": 1.487, |
|
"step": 25699 |
|
}, |
|
{ |
|
"epoch": 0.9053802033850593, |
|
"grad_norm": 0.8389387130737305, |
|
"learning_rate": 3.288891436313385e-06, |
|
"loss": 1.4695, |
|
"step": 25730 |
|
}, |
|
{ |
|
"epoch": 0.9064710229072099, |
|
"grad_norm": 0.8764780163764954, |
|
"learning_rate": 3.2473766541235963e-06, |
|
"loss": 1.4967, |
|
"step": 25761 |
|
}, |
|
{ |
|
"epoch": 0.9075618424293607, |
|
"grad_norm": 0.8840581774711609, |
|
"learning_rate": 3.2061073473317466e-06, |
|
"loss": 1.471, |
|
"step": 25792 |
|
}, |
|
{ |
|
"epoch": 0.9086526619515113, |
|
"grad_norm": 0.8746303915977478, |
|
"learning_rate": 3.1650839816563444e-06, |
|
"loss": 1.4801, |
|
"step": 25823 |
|
}, |
|
{ |
|
"epoch": 0.909743481473662, |
|
"grad_norm": 0.8652745485305786, |
|
"learning_rate": 3.1243070200405093e-06, |
|
"loss": 1.4726, |
|
"step": 25854 |
|
}, |
|
{ |
|
"epoch": 0.9108343009958126, |
|
"grad_norm": 0.8579427599906921, |
|
"learning_rate": 3.0837769226467e-06, |
|
"loss": 1.4756, |
|
"step": 25885 |
|
}, |
|
{ |
|
"epoch": 0.9119251205179634, |
|
"grad_norm": 0.8902285099029541, |
|
"learning_rate": 3.0434941468515666e-06, |
|
"loss": 1.47, |
|
"step": 25916 |
|
}, |
|
{ |
|
"epoch": 0.913015940040114, |
|
"grad_norm": 0.8812154531478882, |
|
"learning_rate": 3.003459147240753e-06, |
|
"loss": 1.4857, |
|
"step": 25947 |
|
}, |
|
{ |
|
"epoch": 0.9141067595622647, |
|
"grad_norm": 0.8862067461013794, |
|
"learning_rate": 2.9636723756037875e-06, |
|
"loss": 1.4696, |
|
"step": 25978 |
|
}, |
|
{ |
|
"epoch": 0.9151975790844153, |
|
"grad_norm": 0.8698602914810181, |
|
"learning_rate": 2.9241342809289833e-06, |
|
"loss": 1.4682, |
|
"step": 26009 |
|
}, |
|
{ |
|
"epoch": 0.9162883986065661, |
|
"grad_norm": 0.859427809715271, |
|
"learning_rate": 2.8848453093983594e-06, |
|
"loss": 1.4713, |
|
"step": 26040 |
|
}, |
|
{ |
|
"epoch": 0.9173792181287167, |
|
"grad_norm": 0.8523387908935547, |
|
"learning_rate": 2.8458059043826257e-06, |
|
"loss": 1.4689, |
|
"step": 26071 |
|
}, |
|
{ |
|
"epoch": 0.9184700376508674, |
|
"grad_norm": 0.8701951503753662, |
|
"learning_rate": 2.807016506436172e-06, |
|
"loss": 1.4703, |
|
"step": 26102 |
|
}, |
|
{ |
|
"epoch": 0.919560857173018, |
|
"grad_norm": 0.8645933866500854, |
|
"learning_rate": 2.7684775532920566e-06, |
|
"loss": 1.4759, |
|
"step": 26133 |
|
}, |
|
{ |
|
"epoch": 0.9206516766951687, |
|
"grad_norm": 0.883593738079071, |
|
"learning_rate": 2.7301894798571425e-06, |
|
"loss": 1.4789, |
|
"step": 26164 |
|
}, |
|
{ |
|
"epoch": 0.9217424962173194, |
|
"grad_norm": 0.8780304789543152, |
|
"learning_rate": 2.6921527182071386e-06, |
|
"loss": 1.4669, |
|
"step": 26195 |
|
}, |
|
{ |
|
"epoch": 0.92283331573947, |
|
"grad_norm": 0.8663890361785889, |
|
"learning_rate": 2.654367697581725e-06, |
|
"loss": 1.4775, |
|
"step": 26226 |
|
}, |
|
{ |
|
"epoch": 0.9239241352616208, |
|
"grad_norm": 0.879367470741272, |
|
"learning_rate": 2.6168348443797175e-06, |
|
"loss": 1.4906, |
|
"step": 26257 |
|
}, |
|
{ |
|
"epoch": 0.9250149547837714, |
|
"grad_norm": 0.8769102096557617, |
|
"learning_rate": 2.5795545821542757e-06, |
|
"loss": 1.4632, |
|
"step": 26288 |
|
}, |
|
{ |
|
"epoch": 0.9261057743059221, |
|
"grad_norm": 0.8605228662490845, |
|
"learning_rate": 2.54252733160808e-06, |
|
"loss": 1.4729, |
|
"step": 26319 |
|
}, |
|
{ |
|
"epoch": 0.9271965938280727, |
|
"grad_norm": 0.8758933544158936, |
|
"learning_rate": 2.5057535105886294e-06, |
|
"loss": 1.4971, |
|
"step": 26350 |
|
}, |
|
{ |
|
"epoch": 0.9282874133502235, |
|
"grad_norm": 0.8632429242134094, |
|
"learning_rate": 2.4692335340834953e-06, |
|
"loss": 1.4876, |
|
"step": 26381 |
|
}, |
|
{ |
|
"epoch": 0.9293782328723741, |
|
"grad_norm": 0.8739559054374695, |
|
"learning_rate": 2.432967814215639e-06, |
|
"loss": 1.4797, |
|
"step": 26412 |
|
}, |
|
{ |
|
"epoch": 0.9304690523945248, |
|
"grad_norm": 0.898240864276886, |
|
"learning_rate": 2.396956760238794e-06, |
|
"loss": 1.4764, |
|
"step": 26443 |
|
}, |
|
{ |
|
"epoch": 0.9315598719166754, |
|
"grad_norm": 0.8729254603385925, |
|
"learning_rate": 2.361200778532796e-06, |
|
"loss": 1.4736, |
|
"step": 26474 |
|
}, |
|
{ |
|
"epoch": 0.9326506914388262, |
|
"grad_norm": 0.9099497199058533, |
|
"learning_rate": 2.325700272599049e-06, |
|
"loss": 1.4698, |
|
"step": 26505 |
|
}, |
|
{ |
|
"epoch": 0.9337415109609768, |
|
"grad_norm": 0.8835102319717407, |
|
"learning_rate": 2.2904556430559415e-06, |
|
"loss": 1.4838, |
|
"step": 26536 |
|
}, |
|
{ |
|
"epoch": 0.9348323304831275, |
|
"grad_norm": 0.856787919998169, |
|
"learning_rate": 2.2554672876343106e-06, |
|
"loss": 1.484, |
|
"step": 26567 |
|
}, |
|
{ |
|
"epoch": 0.9359231500052781, |
|
"grad_norm": 0.8656269311904907, |
|
"learning_rate": 2.220735601173002e-06, |
|
"loss": 1.4923, |
|
"step": 26598 |
|
}, |
|
{ |
|
"epoch": 0.9370139695274289, |
|
"grad_norm": 0.8644323945045471, |
|
"learning_rate": 2.186260975614382e-06, |
|
"loss": 1.4754, |
|
"step": 26629 |
|
}, |
|
{ |
|
"epoch": 0.9381047890495795, |
|
"grad_norm": 0.8761284351348877, |
|
"learning_rate": 2.1520437999999034e-06, |
|
"loss": 1.4775, |
|
"step": 26660 |
|
}, |
|
{ |
|
"epoch": 0.9391956085717302, |
|
"grad_norm": 0.8988614678382874, |
|
"learning_rate": 2.1180844604657526e-06, |
|
"loss": 1.4579, |
|
"step": 26691 |
|
}, |
|
{ |
|
"epoch": 0.9402864280938809, |
|
"grad_norm": 0.8772773742675781, |
|
"learning_rate": 2.084383340238455e-06, |
|
"loss": 1.4808, |
|
"step": 26722 |
|
}, |
|
{ |
|
"epoch": 0.9413772476160315, |
|
"grad_norm": 0.8579154014587402, |
|
"learning_rate": 2.0509408196305704e-06, |
|
"loss": 1.4722, |
|
"step": 26753 |
|
}, |
|
{ |
|
"epoch": 0.9424680671381822, |
|
"grad_norm": 0.8794492483139038, |
|
"learning_rate": 2.017757276036403e-06, |
|
"loss": 1.4794, |
|
"step": 26784 |
|
}, |
|
{ |
|
"epoch": 0.9435588866603328, |
|
"grad_norm": 0.8876990675926208, |
|
"learning_rate": 1.984833083927726e-06, |
|
"loss": 1.4942, |
|
"step": 26815 |
|
}, |
|
{ |
|
"epoch": 0.9446497061824836, |
|
"grad_norm": 0.8663929104804993, |
|
"learning_rate": 1.952168614849581e-06, |
|
"loss": 1.4749, |
|
"step": 26846 |
|
}, |
|
{ |
|
"epoch": 0.9457405257046342, |
|
"grad_norm": 0.8618311882019043, |
|
"learning_rate": 1.919764237416058e-06, |
|
"loss": 1.4683, |
|
"step": 26877 |
|
}, |
|
{ |
|
"epoch": 0.9468313452267849, |
|
"grad_norm": 0.8830083012580872, |
|
"learning_rate": 1.8876203173061463e-06, |
|
"loss": 1.4686, |
|
"step": 26908 |
|
}, |
|
{ |
|
"epoch": 0.9479221647489355, |
|
"grad_norm": 0.8856537342071533, |
|
"learning_rate": 1.8557372172596206e-06, |
|
"loss": 1.4712, |
|
"step": 26939 |
|
}, |
|
{ |
|
"epoch": 0.9490129842710863, |
|
"grad_norm": 0.857903778553009, |
|
"learning_rate": 1.8241152970729341e-06, |
|
"loss": 1.4765, |
|
"step": 26970 |
|
}, |
|
{ |
|
"epoch": 0.9501038037932369, |
|
"grad_norm": 0.8622974753379822, |
|
"learning_rate": 1.7927549135951572e-06, |
|
"loss": 1.4674, |
|
"step": 27001 |
|
}, |
|
{ |
|
"epoch": 0.9511946233153876, |
|
"grad_norm": 0.8996565341949463, |
|
"learning_rate": 1.7616564207239477e-06, |
|
"loss": 1.4731, |
|
"step": 27032 |
|
}, |
|
{ |
|
"epoch": 0.9522854428375382, |
|
"grad_norm": 0.8489288091659546, |
|
"learning_rate": 1.730820169401584e-06, |
|
"loss": 1.4804, |
|
"step": 27063 |
|
}, |
|
{ |
|
"epoch": 0.953376262359689, |
|
"grad_norm": 0.8844205141067505, |
|
"learning_rate": 1.7002465076109558e-06, |
|
"loss": 1.4579, |
|
"step": 27094 |
|
}, |
|
{ |
|
"epoch": 0.9544670818818396, |
|
"grad_norm": 0.8494292497634888, |
|
"learning_rate": 1.6699357803716898e-06, |
|
"loss": 1.4798, |
|
"step": 27125 |
|
}, |
|
{ |
|
"epoch": 0.9555579014039903, |
|
"grad_norm": 0.872955322265625, |
|
"learning_rate": 1.6398883297362305e-06, |
|
"loss": 1.4595, |
|
"step": 27156 |
|
}, |
|
{ |
|
"epoch": 0.956648720926141, |
|
"grad_norm": 0.9201899766921997, |
|
"learning_rate": 1.6101044947859606e-06, |
|
"loss": 1.4847, |
|
"step": 27187 |
|
}, |
|
{ |
|
"epoch": 0.9577395404482917, |
|
"grad_norm": 0.8855260014533997, |
|
"learning_rate": 1.5805846116274114e-06, |
|
"loss": 1.4802, |
|
"step": 27218 |
|
}, |
|
{ |
|
"epoch": 0.9588303599704423, |
|
"grad_norm": 0.862812340259552, |
|
"learning_rate": 1.5513290133884611e-06, |
|
"loss": 1.4841, |
|
"step": 27249 |
|
}, |
|
{ |
|
"epoch": 0.959921179492593, |
|
"grad_norm": 0.8608940839767456, |
|
"learning_rate": 1.5223380302145512e-06, |
|
"loss": 1.4747, |
|
"step": 27280 |
|
}, |
|
{ |
|
"epoch": 0.9610119990147437, |
|
"grad_norm": 0.8537495136260986, |
|
"learning_rate": 1.4936119892649925e-06, |
|
"loss": 1.4742, |
|
"step": 27311 |
|
}, |
|
{ |
|
"epoch": 0.9621028185368943, |
|
"grad_norm": 0.8615581393241882, |
|
"learning_rate": 1.4651512147092482e-06, |
|
"loss": 1.4616, |
|
"step": 27342 |
|
}, |
|
{ |
|
"epoch": 0.963193638059045, |
|
"grad_norm": 0.8731122612953186, |
|
"learning_rate": 1.4369560277232908e-06, |
|
"loss": 1.4677, |
|
"step": 27373 |
|
}, |
|
{ |
|
"epoch": 0.9642844575811956, |
|
"grad_norm": 0.8720675110816956, |
|
"learning_rate": 1.409026746485978e-06, |
|
"loss": 1.4828, |
|
"step": 27404 |
|
}, |
|
{ |
|
"epoch": 0.9653752771033464, |
|
"grad_norm": 0.887996256351471, |
|
"learning_rate": 1.3813636861754464e-06, |
|
"loss": 1.4753, |
|
"step": 27435 |
|
}, |
|
{ |
|
"epoch": 0.966466096625497, |
|
"grad_norm": 0.8772040009498596, |
|
"learning_rate": 1.3539671589655773e-06, |
|
"loss": 1.459, |
|
"step": 27466 |
|
} |
|
], |
|
"logging_steps": 31, |
|
"max_steps": 30517, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 2, |
|
"save_steps": 3052, |
|
"stateful_callbacks": { |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": false |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 1.9185168775775257e+19, |
|
"train_batch_size": 16, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|