|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 0.9983579638752053, |
|
"eval_steps": 500, |
|
"global_step": 456, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.0021893814997263274, |
|
"grad_norm": 1.099035580545203, |
|
"learning_rate": 4.347826086956522e-06, |
|
"loss": 1.3884, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.010946907498631636, |
|
"grad_norm": 1.0633282817267398, |
|
"learning_rate": 2.173913043478261e-05, |
|
"loss": 1.3896, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.021893814997263273, |
|
"grad_norm": 0.2753570266695847, |
|
"learning_rate": 4.347826086956522e-05, |
|
"loss": 1.3001, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.03284072249589491, |
|
"grad_norm": 0.23486140029795313, |
|
"learning_rate": 6.521739130434783e-05, |
|
"loss": 1.2641, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.043787629994526546, |
|
"grad_norm": 0.17661059915753233, |
|
"learning_rate": 8.695652173913044e-05, |
|
"loss": 1.244, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.05473453749315818, |
|
"grad_norm": 0.13588516740211107, |
|
"learning_rate": 0.00010869565217391305, |
|
"loss": 1.2062, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.06568144499178982, |
|
"grad_norm": 0.09952038895662578, |
|
"learning_rate": 0.00013043478260869567, |
|
"loss": 1.2036, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.07662835249042145, |
|
"grad_norm": 0.08662921860732123, |
|
"learning_rate": 0.00015217391304347827, |
|
"loss": 1.1697, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.08757525998905309, |
|
"grad_norm": 0.08556102174203851, |
|
"learning_rate": 0.00017391304347826088, |
|
"loss": 1.168, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.09852216748768473, |
|
"grad_norm": 0.08239874598331373, |
|
"learning_rate": 0.0001956521739130435, |
|
"loss": 1.1752, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.10946907498631636, |
|
"grad_norm": 0.08011130479266197, |
|
"learning_rate": 0.0001999530335191093, |
|
"loss": 1.1317, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.120415982484948, |
|
"grad_norm": 0.09091690871262173, |
|
"learning_rate": 0.00019976230779866525, |
|
"loss": 1.1235, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.13136288998357964, |
|
"grad_norm": 0.10018228425429193, |
|
"learning_rate": 0.000199425167134466, |
|
"loss": 1.14, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.1423097974822113, |
|
"grad_norm": 0.08568976262387419, |
|
"learning_rate": 0.00019894210632692745, |
|
"loss": 1.1184, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.1532567049808429, |
|
"grad_norm": 0.09058910078299229, |
|
"learning_rate": 0.00019831383433439797, |
|
"loss": 1.1182, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.16420361247947454, |
|
"grad_norm": 0.07075651635331383, |
|
"learning_rate": 0.00019754127323266428, |
|
"loss": 1.1107, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.17515051997810618, |
|
"grad_norm": 0.08205972533630575, |
|
"learning_rate": 0.00019662555686167808, |
|
"loss": 1.1146, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.18609742747673783, |
|
"grad_norm": 0.09822244044433728, |
|
"learning_rate": 0.00019556802916148962, |
|
"loss": 1.1206, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.19704433497536947, |
|
"grad_norm": 0.08491467009411377, |
|
"learning_rate": 0.00019437024219983028, |
|
"loss": 1.1091, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.20799124247400108, |
|
"grad_norm": 0.08166420957630437, |
|
"learning_rate": 0.00019303395389423918, |
|
"loss": 1.1115, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.21893814997263272, |
|
"grad_norm": 0.08260201243872915, |
|
"learning_rate": 0.00019156112543207673, |
|
"loss": 1.1175, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.22988505747126436, |
|
"grad_norm": 0.08255167959693703, |
|
"learning_rate": 0.0001899539183922119, |
|
"loss": 1.0968, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 0.240831964969896, |
|
"grad_norm": 0.07836100445915845, |
|
"learning_rate": 0.00018821469157260685, |
|
"loss": 1.0888, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.25177887246852765, |
|
"grad_norm": 0.07207251942982666, |
|
"learning_rate": 0.00018634599752845592, |
|
"loss": 1.1074, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 0.2627257799671593, |
|
"grad_norm": 0.08720514170327426, |
|
"learning_rate": 0.00018435057882595882, |
|
"loss": 1.1052, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.27367268746579093, |
|
"grad_norm": 0.0853293654672411, |
|
"learning_rate": 0.0001822313640172265, |
|
"loss": 1.0952, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 0.2846195949644226, |
|
"grad_norm": 0.09875940431911555, |
|
"learning_rate": 0.00017999146334222695, |
|
"loss": 1.1227, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.2955665024630542, |
|
"grad_norm": 0.07021884431606892, |
|
"learning_rate": 0.00017763416416407952, |
|
"loss": 1.0942, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 0.3065134099616858, |
|
"grad_norm": 0.06842699832201858, |
|
"learning_rate": 0.00017516292614439585, |
|
"loss": 1.0953, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.31746031746031744, |
|
"grad_norm": 0.07208694079619522, |
|
"learning_rate": 0.0001725813761657495, |
|
"loss": 1.0967, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 0.3284072249589491, |
|
"grad_norm": 0.06921789319582454, |
|
"learning_rate": 0.00016989330300872576, |
|
"loss": 1.0801, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.3393541324575807, |
|
"grad_norm": 0.07271469189791151, |
|
"learning_rate": 0.0001671026517913634, |
|
"loss": 1.0833, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 0.35030103995621237, |
|
"grad_norm": 0.06997352100806448, |
|
"learning_rate": 0.00016421351817915024, |
|
"loss": 1.0965, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.361247947454844, |
|
"grad_norm": 0.07856963780328721, |
|
"learning_rate": 0.00016123014237406913, |
|
"loss": 1.1091, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 0.37219485495347565, |
|
"grad_norm": 0.07209466893505154, |
|
"learning_rate": 0.00015815690289151658, |
|
"loss": 1.0899, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.3831417624521073, |
|
"grad_norm": 0.07481315197731042, |
|
"learning_rate": 0.00015499831013422803, |
|
"loss": 1.1041, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 0.39408866995073893, |
|
"grad_norm": 0.08084183121949147, |
|
"learning_rate": 0.00015175899977263964, |
|
"loss": 1.0955, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.4050355774493706, |
|
"grad_norm": 0.0818129087053093, |
|
"learning_rate": 0.0001484437259414027, |
|
"loss": 1.1145, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 0.41598248494800216, |
|
"grad_norm": 0.06950621743678526, |
|
"learning_rate": 0.00014505735426203543, |
|
"loss": 1.1149, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.4269293924466338, |
|
"grad_norm": 0.07548210193666667, |
|
"learning_rate": 0.00014160485470195245, |
|
"loss": 1.11, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 0.43787629994526545, |
|
"grad_norm": 0.07058243846026176, |
|
"learning_rate": 0.00013809129428035227, |
|
"loss": 1.0846, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.4488232074438971, |
|
"grad_norm": 0.07020891379155109, |
|
"learning_rate": 0.00013452182963166794, |
|
"loss": 1.0848, |
|
"step": 205 |
|
}, |
|
{ |
|
"epoch": 0.45977011494252873, |
|
"grad_norm": 0.06835760784196979, |
|
"learning_rate": 0.00013090169943749476, |
|
"loss": 1.0851, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.47071702244116037, |
|
"grad_norm": 0.07605646093102703, |
|
"learning_rate": 0.00012723621673810275, |
|
"loss": 1.1047, |
|
"step": 215 |
|
}, |
|
{ |
|
"epoch": 0.481663929939792, |
|
"grad_norm": 0.0705995475810648, |
|
"learning_rate": 0.0001235307611348174, |
|
"loss": 1.0808, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.49261083743842365, |
|
"grad_norm": 0.07066082603796292, |
|
"learning_rate": 0.00011979077089471287, |
|
"loss": 1.0823, |
|
"step": 225 |
|
}, |
|
{ |
|
"epoch": 0.5035577449370553, |
|
"grad_norm": 0.06843040733212367, |
|
"learning_rate": 0.0001160217349692051, |
|
"loss": 1.093, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 0.5145046524356869, |
|
"grad_norm": 0.07581398076733512, |
|
"learning_rate": 0.00011222918493825876, |
|
"loss": 1.0799, |
|
"step": 235 |
|
}, |
|
{ |
|
"epoch": 0.5254515599343186, |
|
"grad_norm": 0.06701479053423436, |
|
"learning_rate": 0.00010841868689203071, |
|
"loss": 1.0984, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.5363984674329502, |
|
"grad_norm": 0.06919851726403416, |
|
"learning_rate": 0.00010459583326186533, |
|
"loss": 1.0919, |
|
"step": 245 |
|
}, |
|
{ |
|
"epoch": 0.5473453749315819, |
|
"grad_norm": 0.07632131476135244, |
|
"learning_rate": 0.00010076623461263018, |
|
"loss": 1.0801, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.5582922824302134, |
|
"grad_norm": 0.0661733510360967, |
|
"learning_rate": 9.693551140843847e-05, |
|
"loss": 1.0824, |
|
"step": 255 |
|
}, |
|
{ |
|
"epoch": 0.5692391899288451, |
|
"grad_norm": 0.06851059826679573, |
|
"learning_rate": 9.310928576384293e-05, |
|
"loss": 1.0956, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 0.5801860974274767, |
|
"grad_norm": 0.07225957964642454, |
|
"learning_rate": 8.929317319260726e-05, |
|
"loss": 1.0818, |
|
"step": 265 |
|
}, |
|
{ |
|
"epoch": 0.5911330049261084, |
|
"grad_norm": 0.07526177174581719, |
|
"learning_rate": 8.549277436616551e-05, |
|
"loss": 1.0876, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 0.60207991242474, |
|
"grad_norm": 0.0688828763506549, |
|
"learning_rate": 8.171366689386432e-05, |
|
"loss": 1.0834, |
|
"step": 275 |
|
}, |
|
{ |
|
"epoch": 0.6130268199233716, |
|
"grad_norm": 0.07233185605708406, |
|
"learning_rate": 7.796139713705213e-05, |
|
"loss": 1.0904, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 0.6239737274220033, |
|
"grad_norm": 0.07238689140745656, |
|
"learning_rate": 7.424147206902891e-05, |
|
"loss": 1.078, |
|
"step": 285 |
|
}, |
|
{ |
|
"epoch": 0.6349206349206349, |
|
"grad_norm": 0.07045252374699128, |
|
"learning_rate": 7.055935119280369e-05, |
|
"loss": 1.0746, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 0.6458675424192666, |
|
"grad_norm": 0.06885925177718841, |
|
"learning_rate": 6.692043852852079e-05, |
|
"loss": 1.0914, |
|
"step": 295 |
|
}, |
|
{ |
|
"epoch": 0.6568144499178982, |
|
"grad_norm": 0.0714964117404916, |
|
"learning_rate": 6.333007468231521e-05, |
|
"loss": 1.0829, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.6677613574165299, |
|
"grad_norm": 0.0670654539976087, |
|
"learning_rate": 5.9793529008236625e-05, |
|
"loss": 1.0756, |
|
"step": 305 |
|
}, |
|
{ |
|
"epoch": 0.6787082649151615, |
|
"grad_norm": 0.06833244959359351, |
|
"learning_rate": 5.63159918747457e-05, |
|
"loss": 1.0779, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 0.6896551724137931, |
|
"grad_norm": 0.07144684011257411, |
|
"learning_rate": 5.29025670471325e-05, |
|
"loss": 1.0983, |
|
"step": 315 |
|
}, |
|
{ |
|
"epoch": 0.7006020799124247, |
|
"grad_norm": 0.07142805023654634, |
|
"learning_rate": 4.955826419703735e-05, |
|
"loss": 1.0867, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 0.7115489874110563, |
|
"grad_norm": 0.07674467638840121, |
|
"learning_rate": 4.628799155006669e-05, |
|
"loss": 1.0651, |
|
"step": 325 |
|
}, |
|
{ |
|
"epoch": 0.722495894909688, |
|
"grad_norm": 0.06634612204586297, |
|
"learning_rate": 4.30965486822953e-05, |
|
"loss": 1.0938, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 0.7334428024083196, |
|
"grad_norm": 0.06579811491660853, |
|
"learning_rate": 3.9988619476226355e-05, |
|
"loss": 1.0781, |
|
"step": 335 |
|
}, |
|
{ |
|
"epoch": 0.7443897099069513, |
|
"grad_norm": 0.06839286253175804, |
|
"learning_rate": 3.69687652465482e-05, |
|
"loss": 1.0807, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 0.7553366174055829, |
|
"grad_norm": 0.06644270765658328, |
|
"learning_rate": 3.4041418045775895e-05, |
|
"loss": 1.074, |
|
"step": 345 |
|
}, |
|
{ |
|
"epoch": 0.7662835249042146, |
|
"grad_norm": 0.06772025228639957, |
|
"learning_rate": 3.121087415960304e-05, |
|
"loss": 1.0762, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 0.7772304324028462, |
|
"grad_norm": 0.06615084228702964, |
|
"learning_rate": 2.848128780150995e-05, |
|
"loss": 1.0779, |
|
"step": 355 |
|
}, |
|
{ |
|
"epoch": 0.7881773399014779, |
|
"grad_norm": 0.0706561964889078, |
|
"learning_rate": 2.5856665015882685e-05, |
|
"loss": 1.0904, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 0.7991242474001095, |
|
"grad_norm": 0.06748598590158515, |
|
"learning_rate": 2.334085779859041e-05, |
|
"loss": 1.0654, |
|
"step": 365 |
|
}, |
|
{ |
|
"epoch": 0.8100711548987412, |
|
"grad_norm": 0.06669809967764875, |
|
"learning_rate": 2.0937558443650206e-05, |
|
"loss": 1.0835, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 0.8210180623973727, |
|
"grad_norm": 0.06776479655939238, |
|
"learning_rate": 1.8650294124276556e-05, |
|
"loss": 1.0836, |
|
"step": 375 |
|
}, |
|
{ |
|
"epoch": 0.8319649698960043, |
|
"grad_norm": 0.06672866236853266, |
|
"learning_rate": 1.6482421716268215e-05, |
|
"loss": 1.0909, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 0.842911877394636, |
|
"grad_norm": 0.0670546035798045, |
|
"learning_rate": 1.4437122871329955e-05, |
|
"loss": 1.0984, |
|
"step": 385 |
|
}, |
|
{ |
|
"epoch": 0.8538587848932676, |
|
"grad_norm": 0.06639094648741257, |
|
"learning_rate": 1.2517399347560167e-05, |
|
"loss": 1.1008, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 0.8648056923918993, |
|
"grad_norm": 0.06852807789172832, |
|
"learning_rate": 1.0726068603956741e-05, |
|
"loss": 1.0783, |
|
"step": 395 |
|
}, |
|
{ |
|
"epoch": 0.8757525998905309, |
|
"grad_norm": 0.06914284979355585, |
|
"learning_rate": 9.065759665407515e-06, |
|
"loss": 1.0828, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 0.8866995073891626, |
|
"grad_norm": 0.06687442460085294, |
|
"learning_rate": 7.538909264233751e-06, |
|
"loss": 1.0845, |
|
"step": 405 |
|
}, |
|
{ |
|
"epoch": 0.8976464148877942, |
|
"grad_norm": 0.07046699389113141, |
|
"learning_rate": 6.147758263949321e-06, |
|
"loss": 1.0784, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 0.9085933223864259, |
|
"grad_norm": 0.06774076270250588, |
|
"learning_rate": 4.8943483704846475e-06, |
|
"loss": 1.0914, |
|
"step": 415 |
|
}, |
|
{ |
|
"epoch": 0.9195402298850575, |
|
"grad_norm": 0.06693453050067996, |
|
"learning_rate": 3.7805191357015657e-06, |
|
"loss": 1.0986, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 0.9304871373836892, |
|
"grad_norm": 0.06389085654103847, |
|
"learning_rate": 2.8079052575973765e-06, |
|
"loss": 1.087, |
|
"step": 425 |
|
}, |
|
{ |
|
"epoch": 0.9414340448823207, |
|
"grad_norm": 0.06811361952720432, |
|
"learning_rate": 1.9779341811600794e-06, |
|
"loss": 1.07, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 0.9523809523809523, |
|
"grad_norm": 0.06592264001597521, |
|
"learning_rate": 1.2918240033960027e-06, |
|
"loss": 1.0825, |
|
"step": 435 |
|
}, |
|
{ |
|
"epoch": 0.963327859879584, |
|
"grad_norm": 0.06657689313692897, |
|
"learning_rate": 7.505816856045012e-07, |
|
"loss": 1.0843, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 0.9742747673782156, |
|
"grad_norm": 0.06488852108049653, |
|
"learning_rate": 3.5500157552337264e-07, |
|
"loss": 1.0923, |
|
"step": 445 |
|
}, |
|
{ |
|
"epoch": 0.9852216748768473, |
|
"grad_norm": 0.06657099771817772, |
|
"learning_rate": 1.0566424151401411e-07, |
|
"loss": 1.0882, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 0.9961685823754789, |
|
"grad_norm": 0.06558284673944498, |
|
"learning_rate": 2.935620497301894e-09, |
|
"loss": 1.0893, |
|
"step": 455 |
|
}, |
|
{ |
|
"epoch": 0.9983579638752053, |
|
"eval_loss": 0.9926520586013794, |
|
"eval_runtime": 2.1266, |
|
"eval_samples_per_second": 3.292, |
|
"eval_steps_per_second": 0.94, |
|
"step": 456 |
|
}, |
|
{ |
|
"epoch": 0.9983579638752053, |
|
"step": 456, |
|
"total_flos": 1.447234595979264e+16, |
|
"train_loss": 1.1072515829613334, |
|
"train_runtime": 17333.1907, |
|
"train_samples_per_second": 3.373, |
|
"train_steps_per_second": 0.026 |
|
} |
|
], |
|
"logging_steps": 5, |
|
"max_steps": 456, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 1, |
|
"save_steps": 100, |
|
"stateful_callbacks": { |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": true |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 1.447234595979264e+16, |
|
"train_batch_size": 8, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|