|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 3.0, |
|
"eval_steps": 100, |
|
"global_step": 2730, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.001098901098901099, |
|
"grad_norm": 2.5938866235339426, |
|
"learning_rate": 7.326007326007327e-08, |
|
"loss": 1.4331, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.005494505494505495, |
|
"grad_norm": 2.3305040971210316, |
|
"learning_rate": 3.6630036630036635e-07, |
|
"loss": 1.3861, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.01098901098901099, |
|
"grad_norm": 2.3032315871842037, |
|
"learning_rate": 7.326007326007327e-07, |
|
"loss": 1.3892, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.016483516483516484, |
|
"grad_norm": 2.0293111765459386, |
|
"learning_rate": 1.098901098901099e-06, |
|
"loss": 1.3496, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.02197802197802198, |
|
"grad_norm": 1.4027172304084468, |
|
"learning_rate": 1.4652014652014654e-06, |
|
"loss": 1.2629, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.027472527472527472, |
|
"grad_norm": 1.4093182507371345, |
|
"learning_rate": 1.8315018315018316e-06, |
|
"loss": 1.1094, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.03296703296703297, |
|
"grad_norm": 0.6639612973057083, |
|
"learning_rate": 2.197802197802198e-06, |
|
"loss": 1.0554, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.038461538461538464, |
|
"grad_norm": 0.6143428370522973, |
|
"learning_rate": 2.564102564102564e-06, |
|
"loss": 0.9925, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.04395604395604396, |
|
"grad_norm": 0.49724360049126803, |
|
"learning_rate": 2.930402930402931e-06, |
|
"loss": 0.964, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.04945054945054945, |
|
"grad_norm": 0.4775849339023127, |
|
"learning_rate": 3.2967032967032968e-06, |
|
"loss": 0.9479, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.054945054945054944, |
|
"grad_norm": 0.33493876785224186, |
|
"learning_rate": 3.663003663003663e-06, |
|
"loss": 0.9174, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.06043956043956044, |
|
"grad_norm": 0.32489963376779446, |
|
"learning_rate": 4.0293040293040296e-06, |
|
"loss": 0.8859, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.06593406593406594, |
|
"grad_norm": 0.492730880028892, |
|
"learning_rate": 4.395604395604396e-06, |
|
"loss": 0.9012, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.07142857142857142, |
|
"grad_norm": 0.3109753841977473, |
|
"learning_rate": 4.761904761904762e-06, |
|
"loss": 0.8766, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.07692307692307693, |
|
"grad_norm": 0.2867877510996371, |
|
"learning_rate": 5.128205128205128e-06, |
|
"loss": 0.8571, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.08241758241758242, |
|
"grad_norm": 0.27490143167772046, |
|
"learning_rate": 5.494505494505495e-06, |
|
"loss": 0.8589, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.08791208791208792, |
|
"grad_norm": 0.28545809911171605, |
|
"learning_rate": 5.860805860805862e-06, |
|
"loss": 0.868, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.09340659340659341, |
|
"grad_norm": 0.2757573337248957, |
|
"learning_rate": 6.227106227106228e-06, |
|
"loss": 0.8488, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.0989010989010989, |
|
"grad_norm": 0.28168284468857463, |
|
"learning_rate": 6.5934065934065935e-06, |
|
"loss": 0.8415, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.1043956043956044, |
|
"grad_norm": 0.2789025930294136, |
|
"learning_rate": 6.95970695970696e-06, |
|
"loss": 0.8478, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.10989010989010989, |
|
"grad_norm": 0.2793158898424537, |
|
"learning_rate": 7.326007326007326e-06, |
|
"loss": 0.8402, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.10989010989010989, |
|
"eval_loss": 0.8307385444641113, |
|
"eval_runtime": 58.2285, |
|
"eval_samples_per_second": 23.665, |
|
"eval_steps_per_second": 0.103, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.11538461538461539, |
|
"grad_norm": 0.28714132101165046, |
|
"learning_rate": 7.692307692307694e-06, |
|
"loss": 0.8132, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 0.12087912087912088, |
|
"grad_norm": 0.25219489491882496, |
|
"learning_rate": 8.058608058608059e-06, |
|
"loss": 0.8212, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.12637362637362637, |
|
"grad_norm": 0.2621955241800005, |
|
"learning_rate": 8.424908424908426e-06, |
|
"loss": 0.8152, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 0.13186813186813187, |
|
"grad_norm": 0.2699161655817323, |
|
"learning_rate": 8.791208791208792e-06, |
|
"loss": 0.8227, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.13736263736263737, |
|
"grad_norm": 0.283439689013222, |
|
"learning_rate": 9.157509157509158e-06, |
|
"loss": 0.7977, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 0.14285714285714285, |
|
"grad_norm": 0.2513858523994278, |
|
"learning_rate": 9.523809523809525e-06, |
|
"loss": 0.7834, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.14835164835164835, |
|
"grad_norm": 0.2563724649349777, |
|
"learning_rate": 9.890109890109892e-06, |
|
"loss": 0.8214, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 0.15384615384615385, |
|
"grad_norm": 0.26746604704320437, |
|
"learning_rate": 1.0256410256410256e-05, |
|
"loss": 0.7852, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.15934065934065933, |
|
"grad_norm": 0.280443649102156, |
|
"learning_rate": 1.0622710622710623e-05, |
|
"loss": 0.7943, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 0.16483516483516483, |
|
"grad_norm": 0.2661703492350103, |
|
"learning_rate": 1.098901098901099e-05, |
|
"loss": 0.7846, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.17032967032967034, |
|
"grad_norm": 0.25295008227570925, |
|
"learning_rate": 1.1355311355311356e-05, |
|
"loss": 0.79, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 0.17582417582417584, |
|
"grad_norm": 0.2572179650619883, |
|
"learning_rate": 1.1721611721611723e-05, |
|
"loss": 0.7945, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.1813186813186813, |
|
"grad_norm": 0.28163509900788125, |
|
"learning_rate": 1.2087912087912089e-05, |
|
"loss": 0.7699, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 0.18681318681318682, |
|
"grad_norm": 0.2998101858443158, |
|
"learning_rate": 1.2454212454212456e-05, |
|
"loss": 0.789, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.19230769230769232, |
|
"grad_norm": 0.259731613700112, |
|
"learning_rate": 1.2820512820512823e-05, |
|
"loss": 0.7518, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 0.1978021978021978, |
|
"grad_norm": 0.26085689374421417, |
|
"learning_rate": 1.3186813186813187e-05, |
|
"loss": 0.7778, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.2032967032967033, |
|
"grad_norm": 0.25933950442397596, |
|
"learning_rate": 1.3553113553113554e-05, |
|
"loss": 0.7752, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 0.2087912087912088, |
|
"grad_norm": 0.2655972864098073, |
|
"learning_rate": 1.391941391941392e-05, |
|
"loss": 0.7754, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.21428571428571427, |
|
"grad_norm": 0.3027958618309835, |
|
"learning_rate": 1.4285714285714287e-05, |
|
"loss": 0.7738, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 0.21978021978021978, |
|
"grad_norm": 0.2636680908875259, |
|
"learning_rate": 1.4652014652014653e-05, |
|
"loss": 0.7611, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.21978021978021978, |
|
"eval_loss": 0.7792791724205017, |
|
"eval_runtime": 60.1437, |
|
"eval_samples_per_second": 22.912, |
|
"eval_steps_per_second": 0.1, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.22527472527472528, |
|
"grad_norm": 0.28418095535412446, |
|
"learning_rate": 1.501831501831502e-05, |
|
"loss": 0.7773, |
|
"step": 205 |
|
}, |
|
{ |
|
"epoch": 0.23076923076923078, |
|
"grad_norm": 0.253579945341443, |
|
"learning_rate": 1.5384615384615387e-05, |
|
"loss": 0.7466, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.23626373626373626, |
|
"grad_norm": 0.2709322711520115, |
|
"learning_rate": 1.575091575091575e-05, |
|
"loss": 0.7486, |
|
"step": 215 |
|
}, |
|
{ |
|
"epoch": 0.24175824175824176, |
|
"grad_norm": 0.25784522395547, |
|
"learning_rate": 1.6117216117216118e-05, |
|
"loss": 0.7602, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.24725274725274726, |
|
"grad_norm": 0.25800289350422184, |
|
"learning_rate": 1.6483516483516486e-05, |
|
"loss": 0.7593, |
|
"step": 225 |
|
}, |
|
{ |
|
"epoch": 0.25274725274725274, |
|
"grad_norm": 0.4335652422653119, |
|
"learning_rate": 1.6849816849816853e-05, |
|
"loss": 0.7357, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 0.25824175824175827, |
|
"grad_norm": 0.3289531244189634, |
|
"learning_rate": 1.721611721611722e-05, |
|
"loss": 0.7456, |
|
"step": 235 |
|
}, |
|
{ |
|
"epoch": 0.26373626373626374, |
|
"grad_norm": 1.2095875802009959, |
|
"learning_rate": 1.7582417582417584e-05, |
|
"loss": 0.7226, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.2692307692307692, |
|
"grad_norm": 0.2550501846115449, |
|
"learning_rate": 1.794871794871795e-05, |
|
"loss": 0.7304, |
|
"step": 245 |
|
}, |
|
{ |
|
"epoch": 0.27472527472527475, |
|
"grad_norm": 0.32241036727466216, |
|
"learning_rate": 1.8315018315018315e-05, |
|
"loss": 0.7322, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.2802197802197802, |
|
"grad_norm": 0.23942812421802775, |
|
"learning_rate": 1.8681318681318682e-05, |
|
"loss": 0.7261, |
|
"step": 255 |
|
}, |
|
{ |
|
"epoch": 0.2857142857142857, |
|
"grad_norm": 0.2532890631657975, |
|
"learning_rate": 1.904761904761905e-05, |
|
"loss": 0.752, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 0.29120879120879123, |
|
"grad_norm": 0.24364925383721642, |
|
"learning_rate": 1.9413919413919417e-05, |
|
"loss": 0.755, |
|
"step": 265 |
|
}, |
|
{ |
|
"epoch": 0.2967032967032967, |
|
"grad_norm": 0.24181710554401747, |
|
"learning_rate": 1.9780219780219784e-05, |
|
"loss": 0.7554, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 0.3021978021978022, |
|
"grad_norm": 0.23828479627116422, |
|
"learning_rate": 1.9999967302150437e-05, |
|
"loss": 0.7246, |
|
"step": 275 |
|
}, |
|
{ |
|
"epoch": 0.3076923076923077, |
|
"grad_norm": 0.26360754943139847, |
|
"learning_rate": 1.9999599453798523e-05, |
|
"loss": 0.7293, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 0.3131868131868132, |
|
"grad_norm": 0.2624620835053123, |
|
"learning_rate": 1.9998822899867633e-05, |
|
"loss": 0.7282, |
|
"step": 285 |
|
}, |
|
{ |
|
"epoch": 0.31868131868131866, |
|
"grad_norm": 0.24888154268515242, |
|
"learning_rate": 1.9997637672097222e-05, |
|
"loss": 0.7343, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 0.3241758241758242, |
|
"grad_norm": 0.23324534028548077, |
|
"learning_rate": 1.9996043818930153e-05, |
|
"loss": 0.7158, |
|
"step": 295 |
|
}, |
|
{ |
|
"epoch": 0.32967032967032966, |
|
"grad_norm": 0.23821533213282017, |
|
"learning_rate": 1.9994041405510705e-05, |
|
"loss": 0.7361, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.32967032967032966, |
|
"eval_loss": 0.7524750828742981, |
|
"eval_runtime": 58.5251, |
|
"eval_samples_per_second": 23.545, |
|
"eval_steps_per_second": 0.103, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.33516483516483514, |
|
"grad_norm": 0.2419083485626964, |
|
"learning_rate": 1.999163051368191e-05, |
|
"loss": 0.7392, |
|
"step": 305 |
|
}, |
|
{ |
|
"epoch": 0.34065934065934067, |
|
"grad_norm": 0.23969743620339276, |
|
"learning_rate": 1.9988811241982206e-05, |
|
"loss": 0.722, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 0.34615384615384615, |
|
"grad_norm": 0.24732719030591468, |
|
"learning_rate": 1.9985583705641418e-05, |
|
"loss": 0.7375, |
|
"step": 315 |
|
}, |
|
{ |
|
"epoch": 0.3516483516483517, |
|
"grad_norm": 0.2524078352421696, |
|
"learning_rate": 1.9981948036576045e-05, |
|
"loss": 0.7174, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 0.35714285714285715, |
|
"grad_norm": 0.2450128000419284, |
|
"learning_rate": 1.997790438338385e-05, |
|
"loss": 0.7193, |
|
"step": 325 |
|
}, |
|
{ |
|
"epoch": 0.3626373626373626, |
|
"grad_norm": 0.22547733974392728, |
|
"learning_rate": 1.997345291133783e-05, |
|
"loss": 0.7121, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 0.36813186813186816, |
|
"grad_norm": 0.23499045457962084, |
|
"learning_rate": 1.9968593802379405e-05, |
|
"loss": 0.7067, |
|
"step": 335 |
|
}, |
|
{ |
|
"epoch": 0.37362637362637363, |
|
"grad_norm": 0.24009998902460783, |
|
"learning_rate": 1.9963327255111033e-05, |
|
"loss": 0.724, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 0.3791208791208791, |
|
"grad_norm": 0.27381888592320386, |
|
"learning_rate": 1.9957653484788054e-05, |
|
"loss": 0.7088, |
|
"step": 345 |
|
}, |
|
{ |
|
"epoch": 0.38461538461538464, |
|
"grad_norm": 0.23470929489474682, |
|
"learning_rate": 1.9951572723309918e-05, |
|
"loss": 0.7123, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 0.3901098901098901, |
|
"grad_norm": 0.234461070541495, |
|
"learning_rate": 1.99450852192107e-05, |
|
"loss": 0.7132, |
|
"step": 355 |
|
}, |
|
{ |
|
"epoch": 0.3956043956043956, |
|
"grad_norm": 0.25459804884418324, |
|
"learning_rate": 1.9938191237648924e-05, |
|
"loss": 0.7076, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 0.4010989010989011, |
|
"grad_norm": 0.245152397924957, |
|
"learning_rate": 1.9930891060396757e-05, |
|
"loss": 0.6918, |
|
"step": 365 |
|
}, |
|
{ |
|
"epoch": 0.4065934065934066, |
|
"grad_norm": 0.23860847532916626, |
|
"learning_rate": 1.992318498582846e-05, |
|
"loss": 0.7304, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 0.41208791208791207, |
|
"grad_norm": 0.2519260957646762, |
|
"learning_rate": 1.9915073328908217e-05, |
|
"loss": 0.709, |
|
"step": 375 |
|
}, |
|
{ |
|
"epoch": 0.4175824175824176, |
|
"grad_norm": 0.23360487704945615, |
|
"learning_rate": 1.9906556421177256e-05, |
|
"loss": 0.7258, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 0.4230769230769231, |
|
"grad_norm": 0.23254394099980408, |
|
"learning_rate": 1.989763461074029e-05, |
|
"loss": 0.707, |
|
"step": 385 |
|
}, |
|
{ |
|
"epoch": 0.42857142857142855, |
|
"grad_norm": 0.22834651014549015, |
|
"learning_rate": 1.9888308262251286e-05, |
|
"loss": 0.7085, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 0.4340659340659341, |
|
"grad_norm": 0.22350055020087028, |
|
"learning_rate": 1.987857775689859e-05, |
|
"loss": 0.7153, |
|
"step": 395 |
|
}, |
|
{ |
|
"epoch": 0.43956043956043955, |
|
"grad_norm": 0.2559347137216262, |
|
"learning_rate": 1.9868443492389307e-05, |
|
"loss": 0.6854, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 0.43956043956043955, |
|
"eval_loss": 0.7337242960929871, |
|
"eval_runtime": 57.135, |
|
"eval_samples_per_second": 24.118, |
|
"eval_steps_per_second": 0.105, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 0.44505494505494503, |
|
"grad_norm": 0.27398370193799876, |
|
"learning_rate": 1.985790588293308e-05, |
|
"loss": 0.716, |
|
"step": 405 |
|
}, |
|
{ |
|
"epoch": 0.45054945054945056, |
|
"grad_norm": 0.49293536001562244, |
|
"learning_rate": 1.9846965359225127e-05, |
|
"loss": 0.7187, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 0.45604395604395603, |
|
"grad_norm": 0.23991864498334772, |
|
"learning_rate": 1.9835622368428673e-05, |
|
"loss": 0.6915, |
|
"step": 415 |
|
}, |
|
{ |
|
"epoch": 0.46153846153846156, |
|
"grad_norm": 0.2223500820145743, |
|
"learning_rate": 1.9823877374156647e-05, |
|
"loss": 0.7076, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 0.46703296703296704, |
|
"grad_norm": 0.2327646676318351, |
|
"learning_rate": 1.9811730856452754e-05, |
|
"loss": 0.6865, |
|
"step": 425 |
|
}, |
|
{ |
|
"epoch": 0.4725274725274725, |
|
"grad_norm": 0.23366623666692093, |
|
"learning_rate": 1.9799183311771823e-05, |
|
"loss": 0.697, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 0.47802197802197804, |
|
"grad_norm": 0.23211216480431496, |
|
"learning_rate": 1.9786235252959555e-05, |
|
"loss": 0.6949, |
|
"step": 435 |
|
}, |
|
{ |
|
"epoch": 0.4835164835164835, |
|
"grad_norm": 0.2335049326202, |
|
"learning_rate": 1.977288720923153e-05, |
|
"loss": 0.688, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 0.489010989010989, |
|
"grad_norm": 0.23612214303202572, |
|
"learning_rate": 1.9759139726151597e-05, |
|
"loss": 0.7006, |
|
"step": 445 |
|
}, |
|
{ |
|
"epoch": 0.4945054945054945, |
|
"grad_norm": 0.2296371958742712, |
|
"learning_rate": 1.9744993365609563e-05, |
|
"loss": 0.7096, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"grad_norm": 0.23792816575947545, |
|
"learning_rate": 1.973044870579824e-05, |
|
"loss": 0.6799, |
|
"step": 455 |
|
}, |
|
{ |
|
"epoch": 0.5054945054945055, |
|
"grad_norm": 0.23888207855755858, |
|
"learning_rate": 1.9715506341189795e-05, |
|
"loss": 0.7189, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 0.510989010989011, |
|
"grad_norm": 0.22873637589635504, |
|
"learning_rate": 1.970016688251147e-05, |
|
"loss": 0.7041, |
|
"step": 465 |
|
}, |
|
{ |
|
"epoch": 0.5164835164835165, |
|
"grad_norm": 0.22109885665030304, |
|
"learning_rate": 1.9684430956720613e-05, |
|
"loss": 0.6889, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 0.521978021978022, |
|
"grad_norm": 0.22943401975482253, |
|
"learning_rate": 1.966829920697905e-05, |
|
"loss": 0.701, |
|
"step": 475 |
|
}, |
|
{ |
|
"epoch": 0.5274725274725275, |
|
"grad_norm": 0.24368759924523126, |
|
"learning_rate": 1.9651772292626804e-05, |
|
"loss": 0.6798, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 0.532967032967033, |
|
"grad_norm": 0.22502323506511537, |
|
"learning_rate": 1.963485088915514e-05, |
|
"loss": 0.6732, |
|
"step": 485 |
|
}, |
|
{ |
|
"epoch": 0.5384615384615384, |
|
"grad_norm": 0.23406619299034903, |
|
"learning_rate": 1.961753568817896e-05, |
|
"loss": 0.6808, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 0.5439560439560439, |
|
"grad_norm": 0.24379416412543173, |
|
"learning_rate": 1.959982739740854e-05, |
|
"loss": 0.6732, |
|
"step": 495 |
|
}, |
|
{ |
|
"epoch": 0.5494505494505495, |
|
"grad_norm": 0.22395929004707257, |
|
"learning_rate": 1.9581726740620585e-05, |
|
"loss": 0.6926, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.5494505494505495, |
|
"eval_loss": 0.7197047472000122, |
|
"eval_runtime": 57.3825, |
|
"eval_samples_per_second": 24.014, |
|
"eval_steps_per_second": 0.105, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.554945054945055, |
|
"grad_norm": 0.23635721578380114, |
|
"learning_rate": 1.9563234457628678e-05, |
|
"loss": 0.7019, |
|
"step": 505 |
|
}, |
|
{ |
|
"epoch": 0.5604395604395604, |
|
"grad_norm": 0.22361881412683712, |
|
"learning_rate": 1.954435130425301e-05, |
|
"loss": 0.6725, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 0.5659340659340659, |
|
"grad_norm": 0.27988489835986297, |
|
"learning_rate": 1.952507805228951e-05, |
|
"loss": 0.6806, |
|
"step": 515 |
|
}, |
|
{ |
|
"epoch": 0.5714285714285714, |
|
"grad_norm": 0.22714617789786667, |
|
"learning_rate": 1.9505415489478293e-05, |
|
"loss": 0.6642, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 0.5769230769230769, |
|
"grad_norm": 0.234738186404094, |
|
"learning_rate": 1.9485364419471454e-05, |
|
"loss": 0.6788, |
|
"step": 525 |
|
}, |
|
{ |
|
"epoch": 0.5824175824175825, |
|
"grad_norm": 0.22212354431865575, |
|
"learning_rate": 1.9464925661800247e-05, |
|
"loss": 0.6714, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 0.5879120879120879, |
|
"grad_norm": 0.2260832469755207, |
|
"learning_rate": 1.9444100051841556e-05, |
|
"loss": 0.6742, |
|
"step": 535 |
|
}, |
|
{ |
|
"epoch": 0.5934065934065934, |
|
"grad_norm": 0.22847007927311688, |
|
"learning_rate": 1.9422888440783773e-05, |
|
"loss": 0.6825, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 0.5989010989010989, |
|
"grad_norm": 0.24175296826977385, |
|
"learning_rate": 1.9401291695592e-05, |
|
"loss": 0.6748, |
|
"step": 545 |
|
}, |
|
{ |
|
"epoch": 0.6043956043956044, |
|
"grad_norm": 0.22595935576781812, |
|
"learning_rate": 1.9379310698972618e-05, |
|
"loss": 0.6922, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 0.6098901098901099, |
|
"grad_norm": 0.2357931340905214, |
|
"learning_rate": 1.935694634933721e-05, |
|
"loss": 0.6961, |
|
"step": 555 |
|
}, |
|
{ |
|
"epoch": 0.6153846153846154, |
|
"grad_norm": 0.23436108471733894, |
|
"learning_rate": 1.933419956076584e-05, |
|
"loss": 0.6539, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 0.6208791208791209, |
|
"grad_norm": 0.21960475435288737, |
|
"learning_rate": 1.9311071262969675e-05, |
|
"loss": 0.6815, |
|
"step": 565 |
|
}, |
|
{ |
|
"epoch": 0.6263736263736264, |
|
"grad_norm": 0.22404812837767762, |
|
"learning_rate": 1.9287562401253023e-05, |
|
"loss": 0.6654, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 0.6318681318681318, |
|
"grad_norm": 0.2283357687835303, |
|
"learning_rate": 1.9263673936474662e-05, |
|
"loss": 0.6791, |
|
"step": 575 |
|
}, |
|
{ |
|
"epoch": 0.6373626373626373, |
|
"grad_norm": 0.22007214714544407, |
|
"learning_rate": 1.9239406845008583e-05, |
|
"loss": 0.6707, |
|
"step": 580 |
|
}, |
|
{ |
|
"epoch": 0.6428571428571429, |
|
"grad_norm": 0.21680436366260772, |
|
"learning_rate": 1.921476211870408e-05, |
|
"loss": 0.6875, |
|
"step": 585 |
|
}, |
|
{ |
|
"epoch": 0.6483516483516484, |
|
"grad_norm": 0.2206990118778276, |
|
"learning_rate": 1.918974076484521e-05, |
|
"loss": 0.689, |
|
"step": 590 |
|
}, |
|
{ |
|
"epoch": 0.6538461538461539, |
|
"grad_norm": 0.23773830363891224, |
|
"learning_rate": 1.916434380610963e-05, |
|
"loss": 0.6904, |
|
"step": 595 |
|
}, |
|
{ |
|
"epoch": 0.6593406593406593, |
|
"grad_norm": 0.22280953580435744, |
|
"learning_rate": 1.9138572280526795e-05, |
|
"loss": 0.7125, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 0.6593406593406593, |
|
"eval_loss": 0.709740936756134, |
|
"eval_runtime": 56.8842, |
|
"eval_samples_per_second": 24.225, |
|
"eval_steps_per_second": 0.105, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 0.6648351648351648, |
|
"grad_norm": 0.21474109145946474, |
|
"learning_rate": 1.911242724143552e-05, |
|
"loss": 0.6883, |
|
"step": 605 |
|
}, |
|
{ |
|
"epoch": 0.6703296703296703, |
|
"grad_norm": 0.22132757673296177, |
|
"learning_rate": 1.908590975744094e-05, |
|
"loss": 0.6664, |
|
"step": 610 |
|
}, |
|
{ |
|
"epoch": 0.6758241758241759, |
|
"grad_norm": 0.22862364892697207, |
|
"learning_rate": 1.9059020912370836e-05, |
|
"loss": 0.6628, |
|
"step": 615 |
|
}, |
|
{ |
|
"epoch": 0.6813186813186813, |
|
"grad_norm": 0.22871592729546456, |
|
"learning_rate": 1.9031761805231322e-05, |
|
"loss": 0.6672, |
|
"step": 620 |
|
}, |
|
{ |
|
"epoch": 0.6868131868131868, |
|
"grad_norm": 0.22248256090434504, |
|
"learning_rate": 1.9004133550161953e-05, |
|
"loss": 0.6601, |
|
"step": 625 |
|
}, |
|
{ |
|
"epoch": 0.6923076923076923, |
|
"grad_norm": 0.22365938486859313, |
|
"learning_rate": 1.8976137276390145e-05, |
|
"loss": 0.6826, |
|
"step": 630 |
|
}, |
|
{ |
|
"epoch": 0.6978021978021978, |
|
"grad_norm": 0.2199011359745548, |
|
"learning_rate": 1.894777412818506e-05, |
|
"loss": 0.6641, |
|
"step": 635 |
|
}, |
|
{ |
|
"epoch": 0.7032967032967034, |
|
"grad_norm": 0.22510686451837608, |
|
"learning_rate": 1.891904526481083e-05, |
|
"loss": 0.6859, |
|
"step": 640 |
|
}, |
|
{ |
|
"epoch": 0.7087912087912088, |
|
"grad_norm": 0.21671102889261787, |
|
"learning_rate": 1.8889951860479165e-05, |
|
"loss": 0.6714, |
|
"step": 645 |
|
}, |
|
{ |
|
"epoch": 0.7142857142857143, |
|
"grad_norm": 0.2281707606944894, |
|
"learning_rate": 1.8860495104301346e-05, |
|
"loss": 0.6606, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 0.7197802197802198, |
|
"grad_norm": 0.2307798038244349, |
|
"learning_rate": 1.8830676200239666e-05, |
|
"loss": 0.6743, |
|
"step": 655 |
|
}, |
|
{ |
|
"epoch": 0.7252747252747253, |
|
"grad_norm": 0.2231346316824852, |
|
"learning_rate": 1.8800496367058187e-05, |
|
"loss": 0.6779, |
|
"step": 660 |
|
}, |
|
{ |
|
"epoch": 0.7307692307692307, |
|
"grad_norm": 0.2163289792210269, |
|
"learning_rate": 1.8769956838272937e-05, |
|
"loss": 0.6589, |
|
"step": 665 |
|
}, |
|
{ |
|
"epoch": 0.7362637362637363, |
|
"grad_norm": 0.21367697388384943, |
|
"learning_rate": 1.8739058862101487e-05, |
|
"loss": 0.6663, |
|
"step": 670 |
|
}, |
|
{ |
|
"epoch": 0.7417582417582418, |
|
"grad_norm": 0.2129068920296721, |
|
"learning_rate": 1.8707803701411946e-05, |
|
"loss": 0.6476, |
|
"step": 675 |
|
}, |
|
{ |
|
"epoch": 0.7472527472527473, |
|
"grad_norm": 0.225469317489798, |
|
"learning_rate": 1.8676192633671342e-05, |
|
"loss": 0.6617, |
|
"step": 680 |
|
}, |
|
{ |
|
"epoch": 0.7527472527472527, |
|
"grad_norm": 0.22021116546638458, |
|
"learning_rate": 1.8644226950893394e-05, |
|
"loss": 0.6709, |
|
"step": 685 |
|
}, |
|
{ |
|
"epoch": 0.7582417582417582, |
|
"grad_norm": 0.21699187730804526, |
|
"learning_rate": 1.861190795958573e-05, |
|
"loss": 0.6758, |
|
"step": 690 |
|
}, |
|
{ |
|
"epoch": 0.7637362637362637, |
|
"grad_norm": 0.22500716309661084, |
|
"learning_rate": 1.857923698069646e-05, |
|
"loss": 0.6623, |
|
"step": 695 |
|
}, |
|
{ |
|
"epoch": 0.7692307692307693, |
|
"grad_norm": 0.22095635026785526, |
|
"learning_rate": 1.8546215349560204e-05, |
|
"loss": 0.6662, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 0.7692307692307693, |
|
"eval_loss": 0.7014729976654053, |
|
"eval_runtime": 58.7488, |
|
"eval_samples_per_second": 23.456, |
|
"eval_steps_per_second": 0.102, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 0.7747252747252747, |
|
"grad_norm": 0.2309523218504365, |
|
"learning_rate": 1.8512844415843514e-05, |
|
"loss": 0.6638, |
|
"step": 705 |
|
}, |
|
{ |
|
"epoch": 0.7802197802197802, |
|
"grad_norm": 0.2603467352522321, |
|
"learning_rate": 1.8479125543489694e-05, |
|
"loss": 0.6585, |
|
"step": 710 |
|
}, |
|
{ |
|
"epoch": 0.7857142857142857, |
|
"grad_norm": 0.25516737652528354, |
|
"learning_rate": 1.844506011066308e-05, |
|
"loss": 0.6731, |
|
"step": 715 |
|
}, |
|
{ |
|
"epoch": 0.7912087912087912, |
|
"grad_norm": 0.20905095426392303, |
|
"learning_rate": 1.841064950969268e-05, |
|
"loss": 0.6516, |
|
"step": 720 |
|
}, |
|
{ |
|
"epoch": 0.7967032967032966, |
|
"grad_norm": 0.21250771141180422, |
|
"learning_rate": 1.8375895147015285e-05, |
|
"loss": 0.6597, |
|
"step": 725 |
|
}, |
|
{ |
|
"epoch": 0.8021978021978022, |
|
"grad_norm": 0.21331846730755302, |
|
"learning_rate": 1.8340798443117992e-05, |
|
"loss": 0.6551, |
|
"step": 730 |
|
}, |
|
{ |
|
"epoch": 0.8076923076923077, |
|
"grad_norm": 0.21452172763665409, |
|
"learning_rate": 1.8305360832480118e-05, |
|
"loss": 0.6284, |
|
"step": 735 |
|
}, |
|
{ |
|
"epoch": 0.8131868131868132, |
|
"grad_norm": 0.22124518551235928, |
|
"learning_rate": 1.8269583763514603e-05, |
|
"loss": 0.6629, |
|
"step": 740 |
|
}, |
|
{ |
|
"epoch": 0.8186813186813187, |
|
"grad_norm": 0.20994044274257415, |
|
"learning_rate": 1.8233468698508786e-05, |
|
"loss": 0.6379, |
|
"step": 745 |
|
}, |
|
{ |
|
"epoch": 0.8241758241758241, |
|
"grad_norm": 0.21519633358820012, |
|
"learning_rate": 1.819701711356464e-05, |
|
"loss": 0.657, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 0.8296703296703297, |
|
"grad_norm": 0.2131452043844892, |
|
"learning_rate": 1.8160230498538464e-05, |
|
"loss": 0.6607, |
|
"step": 755 |
|
}, |
|
{ |
|
"epoch": 0.8351648351648352, |
|
"grad_norm": 0.21693178791987028, |
|
"learning_rate": 1.8123110356979955e-05, |
|
"loss": 0.6691, |
|
"step": 760 |
|
}, |
|
{ |
|
"epoch": 0.8406593406593407, |
|
"grad_norm": 0.2122186850793786, |
|
"learning_rate": 1.808565820607078e-05, |
|
"loss": 0.6605, |
|
"step": 765 |
|
}, |
|
{ |
|
"epoch": 0.8461538461538461, |
|
"grad_norm": 0.21668819567741526, |
|
"learning_rate": 1.8047875576562556e-05, |
|
"loss": 0.6518, |
|
"step": 770 |
|
}, |
|
{ |
|
"epoch": 0.8516483516483516, |
|
"grad_norm": 0.21597653641956766, |
|
"learning_rate": 1.8009764012714283e-05, |
|
"loss": 0.6552, |
|
"step": 775 |
|
}, |
|
{ |
|
"epoch": 0.8571428571428571, |
|
"grad_norm": 0.21403410821941848, |
|
"learning_rate": 1.7971325072229227e-05, |
|
"loss": 0.6524, |
|
"step": 780 |
|
}, |
|
{ |
|
"epoch": 0.8626373626373627, |
|
"grad_norm": 0.21858357009497015, |
|
"learning_rate": 1.7932560326191265e-05, |
|
"loss": 0.6432, |
|
"step": 785 |
|
}, |
|
{ |
|
"epoch": 0.8681318681318682, |
|
"grad_norm": 0.20835225444574582, |
|
"learning_rate": 1.789347135900066e-05, |
|
"loss": 0.6521, |
|
"step": 790 |
|
}, |
|
{ |
|
"epoch": 0.8736263736263736, |
|
"grad_norm": 0.2147631378246422, |
|
"learning_rate": 1.7854059768309292e-05, |
|
"loss": 0.6635, |
|
"step": 795 |
|
}, |
|
{ |
|
"epoch": 0.8791208791208791, |
|
"grad_norm": 0.21742833943392462, |
|
"learning_rate": 1.7814327164955388e-05, |
|
"loss": 0.6517, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 0.8791208791208791, |
|
"eval_loss": 0.6937416791915894, |
|
"eval_runtime": 58.6272, |
|
"eval_samples_per_second": 23.504, |
|
"eval_steps_per_second": 0.102, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 0.8846153846153846, |
|
"grad_norm": 0.21815615754521292, |
|
"learning_rate": 1.777427517289766e-05, |
|
"loss": 0.6513, |
|
"step": 805 |
|
}, |
|
{ |
|
"epoch": 0.8901098901098901, |
|
"grad_norm": 0.21132299920641998, |
|
"learning_rate": 1.773390542914894e-05, |
|
"loss": 0.6451, |
|
"step": 810 |
|
}, |
|
{ |
|
"epoch": 0.8956043956043956, |
|
"grad_norm": 0.2153981254584012, |
|
"learning_rate": 1.7693219583709266e-05, |
|
"loss": 0.6472, |
|
"step": 815 |
|
}, |
|
{ |
|
"epoch": 0.9010989010989011, |
|
"grad_norm": 0.21516183847901768, |
|
"learning_rate": 1.765221929949845e-05, |
|
"loss": 0.6455, |
|
"step": 820 |
|
}, |
|
{ |
|
"epoch": 0.9065934065934066, |
|
"grad_norm": 0.21374150031251749, |
|
"learning_rate": 1.7610906252288097e-05, |
|
"loss": 0.6599, |
|
"step": 825 |
|
}, |
|
{ |
|
"epoch": 0.9120879120879121, |
|
"grad_norm": 0.22521849898029836, |
|
"learning_rate": 1.7569282130633137e-05, |
|
"loss": 0.6618, |
|
"step": 830 |
|
}, |
|
{ |
|
"epoch": 0.9175824175824175, |
|
"grad_norm": 0.20871378565296117, |
|
"learning_rate": 1.752734863580278e-05, |
|
"loss": 0.6435, |
|
"step": 835 |
|
}, |
|
{ |
|
"epoch": 0.9230769230769231, |
|
"grad_norm": 0.20579905659289663, |
|
"learning_rate": 1.7485107481711014e-05, |
|
"loss": 0.6384, |
|
"step": 840 |
|
}, |
|
{ |
|
"epoch": 0.9285714285714286, |
|
"grad_norm": 0.24382815508466385, |
|
"learning_rate": 1.7442560394846518e-05, |
|
"loss": 0.6433, |
|
"step": 845 |
|
}, |
|
{ |
|
"epoch": 0.9340659340659341, |
|
"grad_norm": 0.20272371923265584, |
|
"learning_rate": 1.739970911420213e-05, |
|
"loss": 0.6647, |
|
"step": 850 |
|
}, |
|
{ |
|
"epoch": 0.9395604395604396, |
|
"grad_norm": 0.21061337303398175, |
|
"learning_rate": 1.7356555391203745e-05, |
|
"loss": 0.6627, |
|
"step": 855 |
|
}, |
|
{ |
|
"epoch": 0.945054945054945, |
|
"grad_norm": 0.22526349877300406, |
|
"learning_rate": 1.7313100989638745e-05, |
|
"loss": 0.6528, |
|
"step": 860 |
|
}, |
|
{ |
|
"epoch": 0.9505494505494505, |
|
"grad_norm": 0.2405922721275717, |
|
"learning_rate": 1.7269347685583913e-05, |
|
"loss": 0.641, |
|
"step": 865 |
|
}, |
|
{ |
|
"epoch": 0.9560439560439561, |
|
"grad_norm": 0.22058952892730063, |
|
"learning_rate": 1.7225297267332815e-05, |
|
"loss": 0.6632, |
|
"step": 870 |
|
}, |
|
{ |
|
"epoch": 0.9615384615384616, |
|
"grad_norm": 0.21098073518114888, |
|
"learning_rate": 1.7180951535322742e-05, |
|
"loss": 0.651, |
|
"step": 875 |
|
}, |
|
{ |
|
"epoch": 0.967032967032967, |
|
"grad_norm": 0.21900984210996413, |
|
"learning_rate": 1.7136312302061097e-05, |
|
"loss": 0.6651, |
|
"step": 880 |
|
}, |
|
{ |
|
"epoch": 0.9725274725274725, |
|
"grad_norm": 0.20847336917155376, |
|
"learning_rate": 1.7091381392051333e-05, |
|
"loss": 0.6364, |
|
"step": 885 |
|
}, |
|
{ |
|
"epoch": 0.978021978021978, |
|
"grad_norm": 0.22051072508635897, |
|
"learning_rate": 1.704616064171836e-05, |
|
"loss": 0.6577, |
|
"step": 890 |
|
}, |
|
{ |
|
"epoch": 0.9835164835164835, |
|
"grad_norm": 0.21269562851079712, |
|
"learning_rate": 1.7000651899333512e-05, |
|
"loss": 0.6457, |
|
"step": 895 |
|
}, |
|
{ |
|
"epoch": 0.989010989010989, |
|
"grad_norm": 0.20529825370314203, |
|
"learning_rate": 1.6954857024938976e-05, |
|
"loss": 0.6234, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 0.989010989010989, |
|
"eval_loss": 0.6869306564331055, |
|
"eval_runtime": 57.713, |
|
"eval_samples_per_second": 23.877, |
|
"eval_steps_per_second": 0.104, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 0.9945054945054945, |
|
"grad_norm": 0.2434572328244998, |
|
"learning_rate": 1.6908777890271794e-05, |
|
"loss": 0.6504, |
|
"step": 905 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"grad_norm": 0.21741173594960767, |
|
"learning_rate": 1.686241637868734e-05, |
|
"loss": 0.6335, |
|
"step": 910 |
|
}, |
|
{ |
|
"epoch": 1.0054945054945055, |
|
"grad_norm": 0.2130051055875561, |
|
"learning_rate": 1.6815774385082355e-05, |
|
"loss": 0.5875, |
|
"step": 915 |
|
}, |
|
{ |
|
"epoch": 1.010989010989011, |
|
"grad_norm": 0.21037185236973288, |
|
"learning_rate": 1.6768853815817506e-05, |
|
"loss": 0.5925, |
|
"step": 920 |
|
}, |
|
{ |
|
"epoch": 1.0164835164835164, |
|
"grad_norm": 0.22203340887583678, |
|
"learning_rate": 1.6721656588639444e-05, |
|
"loss": 0.6027, |
|
"step": 925 |
|
}, |
|
{ |
|
"epoch": 1.021978021978022, |
|
"grad_norm": 0.2835076660446732, |
|
"learning_rate": 1.6674184632602447e-05, |
|
"loss": 0.5949, |
|
"step": 930 |
|
}, |
|
{ |
|
"epoch": 1.0274725274725274, |
|
"grad_norm": 0.21059794691991446, |
|
"learning_rate": 1.6626439887989552e-05, |
|
"loss": 0.5952, |
|
"step": 935 |
|
}, |
|
{ |
|
"epoch": 1.032967032967033, |
|
"grad_norm": 0.21091351684344237, |
|
"learning_rate": 1.6578424306233282e-05, |
|
"loss": 0.5912, |
|
"step": 940 |
|
}, |
|
{ |
|
"epoch": 1.0384615384615385, |
|
"grad_norm": 0.21087167061209808, |
|
"learning_rate": 1.653013984983585e-05, |
|
"loss": 0.5862, |
|
"step": 945 |
|
}, |
|
{ |
|
"epoch": 1.043956043956044, |
|
"grad_norm": 0.21380595384587936, |
|
"learning_rate": 1.6481588492288985e-05, |
|
"loss": 0.599, |
|
"step": 950 |
|
}, |
|
{ |
|
"epoch": 1.0494505494505495, |
|
"grad_norm": 0.20714451035024295, |
|
"learning_rate": 1.643277221799323e-05, |
|
"loss": 0.5841, |
|
"step": 955 |
|
}, |
|
{ |
|
"epoch": 1.054945054945055, |
|
"grad_norm": 0.2159347638582695, |
|
"learning_rate": 1.638369302217687e-05, |
|
"loss": 0.5919, |
|
"step": 960 |
|
}, |
|
{ |
|
"epoch": 1.0604395604395604, |
|
"grad_norm": 0.20846702806767928, |
|
"learning_rate": 1.633435291081437e-05, |
|
"loss": 0.5943, |
|
"step": 965 |
|
}, |
|
{ |
|
"epoch": 1.065934065934066, |
|
"grad_norm": 0.21732557354687126, |
|
"learning_rate": 1.6284753900544384e-05, |
|
"loss": 0.6074, |
|
"step": 970 |
|
}, |
|
{ |
|
"epoch": 1.0714285714285714, |
|
"grad_norm": 0.21491296524750328, |
|
"learning_rate": 1.6234898018587336e-05, |
|
"loss": 0.6033, |
|
"step": 975 |
|
}, |
|
{ |
|
"epoch": 1.0769230769230769, |
|
"grad_norm": 0.2420317609022985, |
|
"learning_rate": 1.618478730266255e-05, |
|
"loss": 0.6081, |
|
"step": 980 |
|
}, |
|
{ |
|
"epoch": 1.0824175824175823, |
|
"grad_norm": 0.21022308551794014, |
|
"learning_rate": 1.6134423800904985e-05, |
|
"loss": 0.5755, |
|
"step": 985 |
|
}, |
|
{ |
|
"epoch": 1.0879120879120878, |
|
"grad_norm": 0.20675117493646727, |
|
"learning_rate": 1.6083809571781498e-05, |
|
"loss": 0.5977, |
|
"step": 990 |
|
}, |
|
{ |
|
"epoch": 1.0934065934065935, |
|
"grad_norm": 0.21348585093868772, |
|
"learning_rate": 1.6032946684006745e-05, |
|
"loss": 0.5859, |
|
"step": 995 |
|
}, |
|
{ |
|
"epoch": 1.098901098901099, |
|
"grad_norm": 0.2840550425577152, |
|
"learning_rate": 1.598183721645858e-05, |
|
"loss": 0.5925, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 1.098901098901099, |
|
"eval_loss": 0.6866068243980408, |
|
"eval_runtime": 56.3212, |
|
"eval_samples_per_second": 24.467, |
|
"eval_steps_per_second": 0.107, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 1.1043956043956045, |
|
"grad_norm": 0.21980107752404765, |
|
"learning_rate": 1.5930483258093144e-05, |
|
"loss": 0.5916, |
|
"step": 1005 |
|
}, |
|
{ |
|
"epoch": 1.10989010989011, |
|
"grad_norm": 0.22069775219844529, |
|
"learning_rate": 1.5878886907859423e-05, |
|
"loss": 0.6005, |
|
"step": 1010 |
|
}, |
|
{ |
|
"epoch": 1.1153846153846154, |
|
"grad_norm": 0.22181894571988076, |
|
"learning_rate": 1.5827050274613512e-05, |
|
"loss": 0.5862, |
|
"step": 1015 |
|
}, |
|
{ |
|
"epoch": 1.120879120879121, |
|
"grad_norm": 0.20906581970205632, |
|
"learning_rate": 1.57749754770324e-05, |
|
"loss": 0.5745, |
|
"step": 1020 |
|
}, |
|
{ |
|
"epoch": 1.1263736263736264, |
|
"grad_norm": 0.21467487180581052, |
|
"learning_rate": 1.5722664643527362e-05, |
|
"loss": 0.5796, |
|
"step": 1025 |
|
}, |
|
{ |
|
"epoch": 1.1318681318681318, |
|
"grad_norm": 0.2126168321569666, |
|
"learning_rate": 1.567011991215699e-05, |
|
"loss": 0.5842, |
|
"step": 1030 |
|
}, |
|
{ |
|
"epoch": 1.1373626373626373, |
|
"grad_norm": 0.21522455237738478, |
|
"learning_rate": 1.561734343053979e-05, |
|
"loss": 0.5979, |
|
"step": 1035 |
|
}, |
|
{ |
|
"epoch": 1.1428571428571428, |
|
"grad_norm": 0.22633801737454678, |
|
"learning_rate": 1.5564337355766412e-05, |
|
"loss": 0.5759, |
|
"step": 1040 |
|
}, |
|
{ |
|
"epoch": 1.1483516483516483, |
|
"grad_norm": 0.21358958977145795, |
|
"learning_rate": 1.551110385431148e-05, |
|
"loss": 0.5939, |
|
"step": 1045 |
|
}, |
|
{ |
|
"epoch": 1.1538461538461537, |
|
"grad_norm": 0.23012592316508443, |
|
"learning_rate": 1.5457645101945046e-05, |
|
"loss": 0.5844, |
|
"step": 1050 |
|
}, |
|
{ |
|
"epoch": 1.1593406593406592, |
|
"grad_norm": 0.22381921198752516, |
|
"learning_rate": 1.540396328364367e-05, |
|
"loss": 0.5952, |
|
"step": 1055 |
|
}, |
|
{ |
|
"epoch": 1.164835164835165, |
|
"grad_norm": 0.21581592323525847, |
|
"learning_rate": 1.5350060593501086e-05, |
|
"loss": 0.5964, |
|
"step": 1060 |
|
}, |
|
{ |
|
"epoch": 1.1703296703296704, |
|
"grad_norm": 0.21213106121775074, |
|
"learning_rate": 1.5295939234638566e-05, |
|
"loss": 0.5948, |
|
"step": 1065 |
|
}, |
|
{ |
|
"epoch": 1.1758241758241759, |
|
"grad_norm": 0.2148049280696334, |
|
"learning_rate": 1.5241601419114842e-05, |
|
"loss": 0.5764, |
|
"step": 1070 |
|
}, |
|
{ |
|
"epoch": 1.1813186813186813, |
|
"grad_norm": 0.22960383958999012, |
|
"learning_rate": 1.5187049367835709e-05, |
|
"loss": 0.6003, |
|
"step": 1075 |
|
}, |
|
{ |
|
"epoch": 1.1868131868131868, |
|
"grad_norm": 0.21916919776904648, |
|
"learning_rate": 1.5132285310463243e-05, |
|
"loss": 0.5821, |
|
"step": 1080 |
|
}, |
|
{ |
|
"epoch": 1.1923076923076923, |
|
"grad_norm": 0.20455516360752496, |
|
"learning_rate": 1.507731148532468e-05, |
|
"loss": 0.5725, |
|
"step": 1085 |
|
}, |
|
{ |
|
"epoch": 1.1978021978021978, |
|
"grad_norm": 0.21171630637088715, |
|
"learning_rate": 1.5022130139320916e-05, |
|
"loss": 0.6011, |
|
"step": 1090 |
|
}, |
|
{ |
|
"epoch": 1.2032967032967032, |
|
"grad_norm": 0.21663323232960344, |
|
"learning_rate": 1.4966743527834691e-05, |
|
"loss": 0.5824, |
|
"step": 1095 |
|
}, |
|
{ |
|
"epoch": 1.2087912087912087, |
|
"grad_norm": 0.21254281294030117, |
|
"learning_rate": 1.4911153914638388e-05, |
|
"loss": 0.585, |
|
"step": 1100 |
|
}, |
|
{ |
|
"epoch": 1.2087912087912087, |
|
"eval_loss": 0.6832450032234192, |
|
"eval_runtime": 57.3698, |
|
"eval_samples_per_second": 24.02, |
|
"eval_steps_per_second": 0.105, |
|
"step": 1100 |
|
}, |
|
{ |
|
"epoch": 1.2142857142857142, |
|
"grad_norm": 0.21064353654883214, |
|
"learning_rate": 1.4855363571801523e-05, |
|
"loss": 0.5825, |
|
"step": 1105 |
|
}, |
|
{ |
|
"epoch": 1.2197802197802199, |
|
"grad_norm": 0.21183460637710103, |
|
"learning_rate": 1.4799374779597866e-05, |
|
"loss": 0.5845, |
|
"step": 1110 |
|
}, |
|
{ |
|
"epoch": 1.2252747252747254, |
|
"grad_norm": 0.21260336129469928, |
|
"learning_rate": 1.474318982641225e-05, |
|
"loss": 0.5677, |
|
"step": 1115 |
|
}, |
|
{ |
|
"epoch": 1.2307692307692308, |
|
"grad_norm": 0.2126511780407026, |
|
"learning_rate": 1.4686811008647037e-05, |
|
"loss": 0.5804, |
|
"step": 1120 |
|
}, |
|
{ |
|
"epoch": 1.2362637362637363, |
|
"grad_norm": 0.21779018291407423, |
|
"learning_rate": 1.463024063062827e-05, |
|
"loss": 0.5863, |
|
"step": 1125 |
|
}, |
|
{ |
|
"epoch": 1.2417582417582418, |
|
"grad_norm": 0.20654603994279122, |
|
"learning_rate": 1.457348100451146e-05, |
|
"loss": 0.594, |
|
"step": 1130 |
|
}, |
|
{ |
|
"epoch": 1.2472527472527473, |
|
"grad_norm": 0.2085523679075223, |
|
"learning_rate": 1.4516534450187126e-05, |
|
"loss": 0.5755, |
|
"step": 1135 |
|
}, |
|
{ |
|
"epoch": 1.2527472527472527, |
|
"grad_norm": 0.20261083379143977, |
|
"learning_rate": 1.4459403295185933e-05, |
|
"loss": 0.5898, |
|
"step": 1140 |
|
}, |
|
{ |
|
"epoch": 1.2582417582417582, |
|
"grad_norm": 0.21173700024568526, |
|
"learning_rate": 1.4402089874583594e-05, |
|
"loss": 0.5757, |
|
"step": 1145 |
|
}, |
|
{ |
|
"epoch": 1.2637362637362637, |
|
"grad_norm": 0.20789003864449884, |
|
"learning_rate": 1.4344596530905412e-05, |
|
"loss": 0.5956, |
|
"step": 1150 |
|
}, |
|
{ |
|
"epoch": 1.2692307692307692, |
|
"grad_norm": 0.21703411260373892, |
|
"learning_rate": 1.4286925614030542e-05, |
|
"loss": 0.5783, |
|
"step": 1155 |
|
}, |
|
{ |
|
"epoch": 1.2747252747252746, |
|
"grad_norm": 0.20821140503325464, |
|
"learning_rate": 1.4229079481095949e-05, |
|
"loss": 0.5698, |
|
"step": 1160 |
|
}, |
|
{ |
|
"epoch": 1.2802197802197801, |
|
"grad_norm": 0.20754015036739895, |
|
"learning_rate": 1.4171060496400055e-05, |
|
"loss": 0.5683, |
|
"step": 1165 |
|
}, |
|
{ |
|
"epoch": 1.2857142857142856, |
|
"grad_norm": 0.21632631037889694, |
|
"learning_rate": 1.4112871031306118e-05, |
|
"loss": 0.5722, |
|
"step": 1170 |
|
}, |
|
{ |
|
"epoch": 1.2912087912087913, |
|
"grad_norm": 0.21139448126540564, |
|
"learning_rate": 1.4054513464145303e-05, |
|
"loss": 0.5941, |
|
"step": 1175 |
|
}, |
|
{ |
|
"epoch": 1.2967032967032968, |
|
"grad_norm": 0.2005961523532221, |
|
"learning_rate": 1.3995990180119478e-05, |
|
"loss": 0.5783, |
|
"step": 1180 |
|
}, |
|
{ |
|
"epoch": 1.3021978021978022, |
|
"grad_norm": 0.22506737587889664, |
|
"learning_rate": 1.3937303571203718e-05, |
|
"loss": 0.5815, |
|
"step": 1185 |
|
}, |
|
{ |
|
"epoch": 1.3076923076923077, |
|
"grad_norm": 0.20776135745507532, |
|
"learning_rate": 1.387845603604855e-05, |
|
"loss": 0.5754, |
|
"step": 1190 |
|
}, |
|
{ |
|
"epoch": 1.3131868131868132, |
|
"grad_norm": 0.20550133841197799, |
|
"learning_rate": 1.3819449979881907e-05, |
|
"loss": 0.5784, |
|
"step": 1195 |
|
}, |
|
{ |
|
"epoch": 1.3186813186813187, |
|
"grad_norm": 0.20805118712132745, |
|
"learning_rate": 1.3760287814410822e-05, |
|
"loss": 0.5857, |
|
"step": 1200 |
|
}, |
|
{ |
|
"epoch": 1.3186813186813187, |
|
"eval_loss": 0.6798372864723206, |
|
"eval_runtime": 56.9599, |
|
"eval_samples_per_second": 24.192, |
|
"eval_steps_per_second": 0.105, |
|
"step": 1200 |
|
}, |
|
{ |
|
"epoch": 1.3241758241758241, |
|
"grad_norm": 0.2139723251065959, |
|
"learning_rate": 1.3700971957722861e-05, |
|
"loss": 0.5755, |
|
"step": 1205 |
|
}, |
|
{ |
|
"epoch": 1.3296703296703296, |
|
"grad_norm": 0.20154696968263214, |
|
"learning_rate": 1.3641504834187288e-05, |
|
"loss": 0.5905, |
|
"step": 1210 |
|
}, |
|
{ |
|
"epoch": 1.335164835164835, |
|
"grad_norm": 0.21481447035842982, |
|
"learning_rate": 1.3581888874355969e-05, |
|
"loss": 0.5968, |
|
"step": 1215 |
|
}, |
|
{ |
|
"epoch": 1.3406593406593408, |
|
"grad_norm": 0.23044920349242504, |
|
"learning_rate": 1.3522126514864047e-05, |
|
"loss": 0.5788, |
|
"step": 1220 |
|
}, |
|
{ |
|
"epoch": 1.3461538461538463, |
|
"grad_norm": 0.19789912123306916, |
|
"learning_rate": 1.346222019833033e-05, |
|
"loss": 0.5808, |
|
"step": 1225 |
|
}, |
|
{ |
|
"epoch": 1.3516483516483517, |
|
"grad_norm": 0.21068837876070498, |
|
"learning_rate": 1.3402172373257466e-05, |
|
"loss": 0.5777, |
|
"step": 1230 |
|
}, |
|
{ |
|
"epoch": 1.3571428571428572, |
|
"grad_norm": 0.20881025023596286, |
|
"learning_rate": 1.3341985493931877e-05, |
|
"loss": 0.5918, |
|
"step": 1235 |
|
}, |
|
{ |
|
"epoch": 1.3626373626373627, |
|
"grad_norm": 0.21040201908308628, |
|
"learning_rate": 1.3281662020323434e-05, |
|
"loss": 0.5678, |
|
"step": 1240 |
|
}, |
|
{ |
|
"epoch": 1.3681318681318682, |
|
"grad_norm": 0.21648376396617017, |
|
"learning_rate": 1.3221204417984907e-05, |
|
"loss": 0.5816, |
|
"step": 1245 |
|
}, |
|
{ |
|
"epoch": 1.3736263736263736, |
|
"grad_norm": 0.23069884686246664, |
|
"learning_rate": 1.3160615157951218e-05, |
|
"loss": 0.5825, |
|
"step": 1250 |
|
}, |
|
{ |
|
"epoch": 1.379120879120879, |
|
"grad_norm": 0.21219224260123462, |
|
"learning_rate": 1.3099896716638414e-05, |
|
"loss": 0.5862, |
|
"step": 1255 |
|
}, |
|
{ |
|
"epoch": 1.3846153846153846, |
|
"grad_norm": 0.20937858254743488, |
|
"learning_rate": 1.303905157574247e-05, |
|
"loss": 0.5754, |
|
"step": 1260 |
|
}, |
|
{ |
|
"epoch": 1.39010989010989, |
|
"grad_norm": 0.287029581105537, |
|
"learning_rate": 1.297808222213785e-05, |
|
"loss": 0.5723, |
|
"step": 1265 |
|
}, |
|
{ |
|
"epoch": 1.3956043956043955, |
|
"grad_norm": 0.2115500774984686, |
|
"learning_rate": 1.2916991147775867e-05, |
|
"loss": 0.5903, |
|
"step": 1270 |
|
}, |
|
{ |
|
"epoch": 1.401098901098901, |
|
"grad_norm": 0.2360214862879323, |
|
"learning_rate": 1.2855780849582828e-05, |
|
"loss": 0.5995, |
|
"step": 1275 |
|
}, |
|
{ |
|
"epoch": 1.4065934065934065, |
|
"grad_norm": 0.20079252362217995, |
|
"learning_rate": 1.2794453829357974e-05, |
|
"loss": 0.5845, |
|
"step": 1280 |
|
}, |
|
{ |
|
"epoch": 1.412087912087912, |
|
"grad_norm": 0.21328822844172537, |
|
"learning_rate": 1.2733012593671235e-05, |
|
"loss": 0.5764, |
|
"step": 1285 |
|
}, |
|
{ |
|
"epoch": 1.4175824175824177, |
|
"grad_norm": 0.20595464274671632, |
|
"learning_rate": 1.2671459653760781e-05, |
|
"loss": 0.5722, |
|
"step": 1290 |
|
}, |
|
{ |
|
"epoch": 1.4230769230769231, |
|
"grad_norm": 0.21114157296745673, |
|
"learning_rate": 1.2609797525430374e-05, |
|
"loss": 0.5851, |
|
"step": 1295 |
|
}, |
|
{ |
|
"epoch": 1.4285714285714286, |
|
"grad_norm": 0.28130303512847965, |
|
"learning_rate": 1.2548028728946548e-05, |
|
"loss": 0.5736, |
|
"step": 1300 |
|
}, |
|
{ |
|
"epoch": 1.4285714285714286, |
|
"eval_loss": 0.6746197938919067, |
|
"eval_runtime": 56.6881, |
|
"eval_samples_per_second": 24.308, |
|
"eval_steps_per_second": 0.106, |
|
"step": 1300 |
|
}, |
|
{ |
|
"epoch": 1.434065934065934, |
|
"grad_norm": 0.2112732742944607, |
|
"learning_rate": 1.2486155788935599e-05, |
|
"loss": 0.5856, |
|
"step": 1305 |
|
}, |
|
{ |
|
"epoch": 1.4395604395604396, |
|
"grad_norm": 0.21296297985278367, |
|
"learning_rate": 1.24241812342804e-05, |
|
"loss": 0.5741, |
|
"step": 1310 |
|
}, |
|
{ |
|
"epoch": 1.445054945054945, |
|
"grad_norm": 0.20477018149128742, |
|
"learning_rate": 1.2362107598017037e-05, |
|
"loss": 0.5747, |
|
"step": 1315 |
|
}, |
|
{ |
|
"epoch": 1.4505494505494505, |
|
"grad_norm": 0.20359961146660085, |
|
"learning_rate": 1.2299937417231269e-05, |
|
"loss": 0.5909, |
|
"step": 1320 |
|
}, |
|
{ |
|
"epoch": 1.456043956043956, |
|
"grad_norm": 0.19983003339561645, |
|
"learning_rate": 1.2237673232954854e-05, |
|
"loss": 0.5819, |
|
"step": 1325 |
|
}, |
|
{ |
|
"epoch": 1.4615384615384617, |
|
"grad_norm": 0.21585126952570063, |
|
"learning_rate": 1.2175317590061676e-05, |
|
"loss": 0.5744, |
|
"step": 1330 |
|
}, |
|
{ |
|
"epoch": 1.4670329670329672, |
|
"grad_norm": 0.2164749075408294, |
|
"learning_rate": 1.2112873037163728e-05, |
|
"loss": 0.5921, |
|
"step": 1335 |
|
}, |
|
{ |
|
"epoch": 1.4725274725274726, |
|
"grad_norm": 0.21214564202757996, |
|
"learning_rate": 1.2050342126506958e-05, |
|
"loss": 0.6007, |
|
"step": 1340 |
|
}, |
|
{ |
|
"epoch": 1.478021978021978, |
|
"grad_norm": 0.20961989493961736, |
|
"learning_rate": 1.1987727413866936e-05, |
|
"loss": 0.5922, |
|
"step": 1345 |
|
}, |
|
{ |
|
"epoch": 1.4835164835164836, |
|
"grad_norm": 0.2070781399263859, |
|
"learning_rate": 1.1925031458444416e-05, |
|
"loss": 0.5579, |
|
"step": 1350 |
|
}, |
|
{ |
|
"epoch": 1.489010989010989, |
|
"grad_norm": 0.21230956255901673, |
|
"learning_rate": 1.1862256822760704e-05, |
|
"loss": 0.583, |
|
"step": 1355 |
|
}, |
|
{ |
|
"epoch": 1.4945054945054945, |
|
"grad_norm": 0.20764551222516942, |
|
"learning_rate": 1.1799406072552963e-05, |
|
"loss": 0.5881, |
|
"step": 1360 |
|
}, |
|
{ |
|
"epoch": 1.5, |
|
"grad_norm": 0.2216686537947161, |
|
"learning_rate": 1.1736481776669307e-05, |
|
"loss": 0.5884, |
|
"step": 1365 |
|
}, |
|
{ |
|
"epoch": 1.5054945054945055, |
|
"grad_norm": 0.20026575192100984, |
|
"learning_rate": 1.1673486506963824e-05, |
|
"loss": 0.5789, |
|
"step": 1370 |
|
}, |
|
{ |
|
"epoch": 1.510989010989011, |
|
"grad_norm": 0.20025791445692404, |
|
"learning_rate": 1.1610422838191473e-05, |
|
"loss": 0.5757, |
|
"step": 1375 |
|
}, |
|
{ |
|
"epoch": 1.5164835164835164, |
|
"grad_norm": 0.21762293248064593, |
|
"learning_rate": 1.1547293347902813e-05, |
|
"loss": 0.595, |
|
"step": 1380 |
|
}, |
|
{ |
|
"epoch": 1.521978021978022, |
|
"grad_norm": 0.20522965991028572, |
|
"learning_rate": 1.148410061633869e-05, |
|
"loss": 0.5825, |
|
"step": 1385 |
|
}, |
|
{ |
|
"epoch": 1.5274725274725274, |
|
"grad_norm": 0.2007877147929741, |
|
"learning_rate": 1.1420847226324746e-05, |
|
"loss": 0.5699, |
|
"step": 1390 |
|
}, |
|
{ |
|
"epoch": 1.5329670329670328, |
|
"grad_norm": 0.2117099052435157, |
|
"learning_rate": 1.135753576316588e-05, |
|
"loss": 0.5814, |
|
"step": 1395 |
|
}, |
|
{ |
|
"epoch": 1.5384615384615383, |
|
"grad_norm": 0.26509052806023586, |
|
"learning_rate": 1.1294168814540554e-05, |
|
"loss": 0.5906, |
|
"step": 1400 |
|
}, |
|
{ |
|
"epoch": 1.5384615384615383, |
|
"eval_loss": 0.6723337769508362, |
|
"eval_runtime": 55.2482, |
|
"eval_samples_per_second": 24.942, |
|
"eval_steps_per_second": 0.109, |
|
"step": 1400 |
|
}, |
|
{ |
|
"epoch": 1.5439560439560438, |
|
"grad_norm": 0.21156955434544839, |
|
"learning_rate": 1.1230748970395056e-05, |
|
"loss": 0.577, |
|
"step": 1405 |
|
}, |
|
{ |
|
"epoch": 1.5494505494505495, |
|
"grad_norm": 0.20751359958793425, |
|
"learning_rate": 1.1167278822837621e-05, |
|
"loss": 0.5655, |
|
"step": 1410 |
|
}, |
|
{ |
|
"epoch": 1.554945054945055, |
|
"grad_norm": 0.21010912643848495, |
|
"learning_rate": 1.1103760966032497e-05, |
|
"loss": 0.5657, |
|
"step": 1415 |
|
}, |
|
{ |
|
"epoch": 1.5604395604395604, |
|
"grad_norm": 0.20804119817991984, |
|
"learning_rate": 1.1040197996093915e-05, |
|
"loss": 0.5829, |
|
"step": 1420 |
|
}, |
|
{ |
|
"epoch": 1.565934065934066, |
|
"grad_norm": 0.200793527641334, |
|
"learning_rate": 1.0976592510979982e-05, |
|
"loss": 0.5695, |
|
"step": 1425 |
|
}, |
|
{ |
|
"epoch": 1.5714285714285714, |
|
"grad_norm": 0.21571689584942777, |
|
"learning_rate": 1.0912947110386484e-05, |
|
"loss": 0.5687, |
|
"step": 1430 |
|
}, |
|
{ |
|
"epoch": 1.5769230769230769, |
|
"grad_norm": 0.20881262462287056, |
|
"learning_rate": 1.084926439564065e-05, |
|
"loss": 0.5813, |
|
"step": 1435 |
|
}, |
|
{ |
|
"epoch": 1.5824175824175826, |
|
"grad_norm": 0.19724870907211803, |
|
"learning_rate": 1.0785546969594813e-05, |
|
"loss": 0.5695, |
|
"step": 1440 |
|
}, |
|
{ |
|
"epoch": 1.587912087912088, |
|
"grad_norm": 0.19796826932631745, |
|
"learning_rate": 1.0721797436520044e-05, |
|
"loss": 0.5745, |
|
"step": 1445 |
|
}, |
|
{ |
|
"epoch": 1.5934065934065935, |
|
"grad_norm": 0.20623299794109134, |
|
"learning_rate": 1.0658018401999681e-05, |
|
"loss": 0.5916, |
|
"step": 1450 |
|
}, |
|
{ |
|
"epoch": 1.598901098901099, |
|
"grad_norm": 0.22551529540554374, |
|
"learning_rate": 1.0594212472822865e-05, |
|
"loss": 0.5844, |
|
"step": 1455 |
|
}, |
|
{ |
|
"epoch": 1.6043956043956045, |
|
"grad_norm": 0.20013753224483868, |
|
"learning_rate": 1.053038225687798e-05, |
|
"loss": 0.5767, |
|
"step": 1460 |
|
}, |
|
{ |
|
"epoch": 1.60989010989011, |
|
"grad_norm": 0.20025644857315295, |
|
"learning_rate": 1.0466530363046057e-05, |
|
"loss": 0.57, |
|
"step": 1465 |
|
}, |
|
{ |
|
"epoch": 1.6153846153846154, |
|
"grad_norm": 0.22069359110995249, |
|
"learning_rate": 1.0402659401094154e-05, |
|
"loss": 0.5803, |
|
"step": 1470 |
|
}, |
|
{ |
|
"epoch": 1.620879120879121, |
|
"grad_norm": 0.2016457322498759, |
|
"learning_rate": 1.033877198156868e-05, |
|
"loss": 0.5711, |
|
"step": 1475 |
|
}, |
|
{ |
|
"epoch": 1.6263736263736264, |
|
"grad_norm": 0.20948058749001328, |
|
"learning_rate": 1.0274870715688713e-05, |
|
"loss": 0.5614, |
|
"step": 1480 |
|
}, |
|
{ |
|
"epoch": 1.6318681318681318, |
|
"grad_norm": 0.20613326525368722, |
|
"learning_rate": 1.0210958215239249e-05, |
|
"loss": 0.5617, |
|
"step": 1485 |
|
}, |
|
{ |
|
"epoch": 1.6373626373626373, |
|
"grad_norm": 0.20057743312747597, |
|
"learning_rate": 1.0147037092464469e-05, |
|
"loss": 0.5749, |
|
"step": 1490 |
|
}, |
|
{ |
|
"epoch": 1.6428571428571428, |
|
"grad_norm": 0.21757021973274904, |
|
"learning_rate": 1.0083109959960974e-05, |
|
"loss": 0.5719, |
|
"step": 1495 |
|
}, |
|
{ |
|
"epoch": 1.6483516483516483, |
|
"grad_norm": 0.1988420530692502, |
|
"learning_rate": 1.0019179430570984e-05, |
|
"loss": 0.569, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 1.6483516483516483, |
|
"eval_loss": 0.6686215996742249, |
|
"eval_runtime": 57.8791, |
|
"eval_samples_per_second": 23.808, |
|
"eval_steps_per_second": 0.104, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 1.6538461538461537, |
|
"grad_norm": 0.21660113082980018, |
|
"learning_rate": 9.955248117275566e-06, |
|
"loss": 0.5584, |
|
"step": 1505 |
|
}, |
|
{ |
|
"epoch": 1.6593406593406592, |
|
"grad_norm": 0.23375649427094025, |
|
"learning_rate": 9.891318633087831e-06, |
|
"loss": 0.5515, |
|
"step": 1510 |
|
}, |
|
{ |
|
"epoch": 1.6648351648351647, |
|
"grad_norm": 0.20598295650184129, |
|
"learning_rate": 9.827393590946116e-06, |
|
"loss": 0.5591, |
|
"step": 1515 |
|
}, |
|
{ |
|
"epoch": 1.6703296703296702, |
|
"grad_norm": 0.19812798829418632, |
|
"learning_rate": 9.763475603607215e-06, |
|
"loss": 0.5589, |
|
"step": 1520 |
|
}, |
|
{ |
|
"epoch": 1.6758241758241759, |
|
"grad_norm": 0.20951557993767064, |
|
"learning_rate": 9.699567283539567e-06, |
|
"loss": 0.5777, |
|
"step": 1525 |
|
}, |
|
{ |
|
"epoch": 1.6813186813186813, |
|
"grad_norm": 0.20861568891956944, |
|
"learning_rate": 9.635671242816503e-06, |
|
"loss": 0.5793, |
|
"step": 1530 |
|
}, |
|
{ |
|
"epoch": 1.6868131868131868, |
|
"grad_norm": 0.2045724615591832, |
|
"learning_rate": 9.571790093009445e-06, |
|
"loss": 0.5507, |
|
"step": 1535 |
|
}, |
|
{ |
|
"epoch": 1.6923076923076923, |
|
"grad_norm": 0.2062400013339212, |
|
"learning_rate": 9.50792644508122e-06, |
|
"loss": 0.5859, |
|
"step": 1540 |
|
}, |
|
{ |
|
"epoch": 1.6978021978021978, |
|
"grad_norm": 0.20539716997080384, |
|
"learning_rate": 9.44408290927929e-06, |
|
"loss": 0.5883, |
|
"step": 1545 |
|
}, |
|
{ |
|
"epoch": 1.7032967032967035, |
|
"grad_norm": 0.2007069765651346, |
|
"learning_rate": 9.380262095029113e-06, |
|
"loss": 0.5433, |
|
"step": 1550 |
|
}, |
|
{ |
|
"epoch": 1.708791208791209, |
|
"grad_norm": 0.19988561870754676, |
|
"learning_rate": 9.316466610827446e-06, |
|
"loss": 0.5681, |
|
"step": 1555 |
|
}, |
|
{ |
|
"epoch": 1.7142857142857144, |
|
"grad_norm": 0.20674989840235833, |
|
"learning_rate": 9.252699064135759e-06, |
|
"loss": 0.56, |
|
"step": 1560 |
|
}, |
|
{ |
|
"epoch": 1.7197802197802199, |
|
"grad_norm": 0.19685538726018847, |
|
"learning_rate": 9.188962061273664e-06, |
|
"loss": 0.5762, |
|
"step": 1565 |
|
}, |
|
{ |
|
"epoch": 1.7252747252747254, |
|
"grad_norm": 0.20541519207163753, |
|
"learning_rate": 9.125258207312365e-06, |
|
"loss": 0.5637, |
|
"step": 1570 |
|
}, |
|
{ |
|
"epoch": 1.7307692307692308, |
|
"grad_norm": 0.1975839480015212, |
|
"learning_rate": 9.061590105968208e-06, |
|
"loss": 0.5529, |
|
"step": 1575 |
|
}, |
|
{ |
|
"epoch": 1.7362637362637363, |
|
"grad_norm": 0.22500565969016117, |
|
"learning_rate": 8.997960359496248e-06, |
|
"loss": 0.5736, |
|
"step": 1580 |
|
}, |
|
{ |
|
"epoch": 1.7417582417582418, |
|
"grad_norm": 0.19091706107610168, |
|
"learning_rate": 8.934371568583893e-06, |
|
"loss": 0.5683, |
|
"step": 1585 |
|
}, |
|
{ |
|
"epoch": 1.7472527472527473, |
|
"grad_norm": 0.2105931306233096, |
|
"learning_rate": 8.8708263322446e-06, |
|
"loss": 0.5751, |
|
"step": 1590 |
|
}, |
|
{ |
|
"epoch": 1.7527472527472527, |
|
"grad_norm": 0.20458297854035193, |
|
"learning_rate": 8.807327247711667e-06, |
|
"loss": 0.5873, |
|
"step": 1595 |
|
}, |
|
{ |
|
"epoch": 1.7582417582417582, |
|
"grad_norm": 0.19955511509412213, |
|
"learning_rate": 8.743876910332057e-06, |
|
"loss": 0.5756, |
|
"step": 1600 |
|
}, |
|
{ |
|
"epoch": 1.7582417582417582, |
|
"eval_loss": 0.6655026078224182, |
|
"eval_runtime": 58.1146, |
|
"eval_samples_per_second": 23.712, |
|
"eval_steps_per_second": 0.103, |
|
"step": 1600 |
|
}, |
|
{ |
|
"epoch": 1.7637362637362637, |
|
"grad_norm": 0.20167066864184663, |
|
"learning_rate": 8.680477913460339e-06, |
|
"loss": 0.5625, |
|
"step": 1605 |
|
}, |
|
{ |
|
"epoch": 1.7692307692307692, |
|
"grad_norm": 0.20048559966418272, |
|
"learning_rate": 8.617132848352672e-06, |
|
"loss": 0.589, |
|
"step": 1610 |
|
}, |
|
{ |
|
"epoch": 1.7747252747252746, |
|
"grad_norm": 0.20227923458206726, |
|
"learning_rate": 8.553844304060908e-06, |
|
"loss": 0.5601, |
|
"step": 1615 |
|
}, |
|
{ |
|
"epoch": 1.7802197802197801, |
|
"grad_norm": 0.19698401864090306, |
|
"learning_rate": 8.490614867326775e-06, |
|
"loss": 0.5653, |
|
"step": 1620 |
|
}, |
|
{ |
|
"epoch": 1.7857142857142856, |
|
"grad_norm": 0.20175985199043966, |
|
"learning_rate": 8.427447122476148e-06, |
|
"loss": 0.576, |
|
"step": 1625 |
|
}, |
|
{ |
|
"epoch": 1.791208791208791, |
|
"grad_norm": 0.20649958059932677, |
|
"learning_rate": 8.364343651313406e-06, |
|
"loss": 0.5789, |
|
"step": 1630 |
|
}, |
|
{ |
|
"epoch": 1.7967032967032965, |
|
"grad_norm": 0.20362236096913583, |
|
"learning_rate": 8.301307033015928e-06, |
|
"loss": 0.5743, |
|
"step": 1635 |
|
}, |
|
{ |
|
"epoch": 1.8021978021978022, |
|
"grad_norm": 0.1986543134675767, |
|
"learning_rate": 8.23833984402868e-06, |
|
"loss": 0.5643, |
|
"step": 1640 |
|
}, |
|
{ |
|
"epoch": 1.8076923076923077, |
|
"grad_norm": 0.198913330885529, |
|
"learning_rate": 8.175444657958875e-06, |
|
"loss": 0.5735, |
|
"step": 1645 |
|
}, |
|
{ |
|
"epoch": 1.8131868131868132, |
|
"grad_norm": 0.19864368535876992, |
|
"learning_rate": 8.112624045470834e-06, |
|
"loss": 0.5717, |
|
"step": 1650 |
|
}, |
|
{ |
|
"epoch": 1.8186813186813187, |
|
"grad_norm": 0.19883717015134883, |
|
"learning_rate": 8.04988057418088e-06, |
|
"loss": 0.5557, |
|
"step": 1655 |
|
}, |
|
{ |
|
"epoch": 1.8241758241758241, |
|
"grad_norm": 0.20332693450451117, |
|
"learning_rate": 7.987216808552409e-06, |
|
"loss": 0.5736, |
|
"step": 1660 |
|
}, |
|
{ |
|
"epoch": 1.8296703296703298, |
|
"grad_norm": 0.2022305746327796, |
|
"learning_rate": 7.924635309791065e-06, |
|
"loss": 0.5605, |
|
"step": 1665 |
|
}, |
|
{ |
|
"epoch": 1.8351648351648353, |
|
"grad_norm": 0.20043133551344314, |
|
"learning_rate": 7.862138635740078e-06, |
|
"loss": 0.5509, |
|
"step": 1670 |
|
}, |
|
{ |
|
"epoch": 1.8406593406593408, |
|
"grad_norm": 0.22455898506641436, |
|
"learning_rate": 7.799729340775688e-06, |
|
"loss": 0.5869, |
|
"step": 1675 |
|
}, |
|
{ |
|
"epoch": 1.8461538461538463, |
|
"grad_norm": 0.19030565857505835, |
|
"learning_rate": 7.73740997570278e-06, |
|
"loss": 0.5657, |
|
"step": 1680 |
|
}, |
|
{ |
|
"epoch": 1.8516483516483517, |
|
"grad_norm": 0.20815080209673342, |
|
"learning_rate": 7.675183087650592e-06, |
|
"loss": 0.566, |
|
"step": 1685 |
|
}, |
|
{ |
|
"epoch": 1.8571428571428572, |
|
"grad_norm": 0.20412451434740683, |
|
"learning_rate": 7.613051219968624e-06, |
|
"loss": 0.5571, |
|
"step": 1690 |
|
}, |
|
{ |
|
"epoch": 1.8626373626373627, |
|
"grad_norm": 0.20467604912515247, |
|
"learning_rate": 7.551016912122692e-06, |
|
"loss": 0.5677, |
|
"step": 1695 |
|
}, |
|
{ |
|
"epoch": 1.8681318681318682, |
|
"grad_norm": 0.2049947541178511, |
|
"learning_rate": 7.489082699591128e-06, |
|
"loss": 0.545, |
|
"step": 1700 |
|
}, |
|
{ |
|
"epoch": 1.8681318681318682, |
|
"eval_loss": 0.6621970534324646, |
|
"eval_runtime": 53.6631, |
|
"eval_samples_per_second": 25.679, |
|
"eval_steps_per_second": 0.112, |
|
"step": 1700 |
|
}, |
|
{ |
|
"epoch": 1.8736263736263736, |
|
"grad_norm": 0.20749354283076393, |
|
"learning_rate": 7.4272511137611405e-06, |
|
"loss": 0.5875, |
|
"step": 1705 |
|
}, |
|
{ |
|
"epoch": 1.879120879120879, |
|
"grad_norm": 0.19337165187460637, |
|
"learning_rate": 7.3655246818253626e-06, |
|
"loss": 0.5633, |
|
"step": 1710 |
|
}, |
|
{ |
|
"epoch": 1.8846153846153846, |
|
"grad_norm": 0.20317650293507142, |
|
"learning_rate": 7.303905926678565e-06, |
|
"loss": 0.5625, |
|
"step": 1715 |
|
}, |
|
{ |
|
"epoch": 1.89010989010989, |
|
"grad_norm": 0.20166481148644033, |
|
"learning_rate": 7.242397366814516e-06, |
|
"loss": 0.5444, |
|
"step": 1720 |
|
}, |
|
{ |
|
"epoch": 1.8956043956043955, |
|
"grad_norm": 0.20130249873133135, |
|
"learning_rate": 7.181001516223074e-06, |
|
"loss": 0.5501, |
|
"step": 1725 |
|
}, |
|
{ |
|
"epoch": 1.901098901098901, |
|
"grad_norm": 0.20104391428043816, |
|
"learning_rate": 7.1197208842874175e-06, |
|
"loss": 0.5568, |
|
"step": 1730 |
|
}, |
|
{ |
|
"epoch": 1.9065934065934065, |
|
"grad_norm": 0.19669831797494047, |
|
"learning_rate": 7.058557975681488e-06, |
|
"loss": 0.5691, |
|
"step": 1735 |
|
}, |
|
{ |
|
"epoch": 1.912087912087912, |
|
"grad_norm": 0.19994455544845724, |
|
"learning_rate": 6.997515290267611e-06, |
|
"loss": 0.5641, |
|
"step": 1740 |
|
}, |
|
{ |
|
"epoch": 1.9175824175824174, |
|
"grad_norm": 0.2022020053476063, |
|
"learning_rate": 6.936595322994328e-06, |
|
"loss": 0.5625, |
|
"step": 1745 |
|
}, |
|
{ |
|
"epoch": 1.9230769230769231, |
|
"grad_norm": 0.20426025496672773, |
|
"learning_rate": 6.8758005637944245e-06, |
|
"loss": 0.5662, |
|
"step": 1750 |
|
}, |
|
{ |
|
"epoch": 1.9285714285714286, |
|
"grad_norm": 0.19660653769715136, |
|
"learning_rate": 6.815133497483157e-06, |
|
"loss": 0.575, |
|
"step": 1755 |
|
}, |
|
{ |
|
"epoch": 1.934065934065934, |
|
"grad_norm": 0.2091108562276251, |
|
"learning_rate": 6.754596603656687e-06, |
|
"loss": 0.5668, |
|
"step": 1760 |
|
}, |
|
{ |
|
"epoch": 1.9395604395604396, |
|
"grad_norm": 0.1990872798241454, |
|
"learning_rate": 6.694192356590743e-06, |
|
"loss": 0.5677, |
|
"step": 1765 |
|
}, |
|
{ |
|
"epoch": 1.945054945054945, |
|
"grad_norm": 0.2035184385391121, |
|
"learning_rate": 6.633923225139498e-06, |
|
"loss": 0.5848, |
|
"step": 1770 |
|
}, |
|
{ |
|
"epoch": 1.9505494505494505, |
|
"grad_norm": 0.2087152884886787, |
|
"learning_rate": 6.573791672634638e-06, |
|
"loss": 0.5779, |
|
"step": 1775 |
|
}, |
|
{ |
|
"epoch": 1.9560439560439562, |
|
"grad_norm": 0.19903589160386567, |
|
"learning_rate": 6.513800156784709e-06, |
|
"loss": 0.5545, |
|
"step": 1780 |
|
}, |
|
{ |
|
"epoch": 1.9615384615384617, |
|
"grad_norm": 0.1988257810600432, |
|
"learning_rate": 6.453951129574644e-06, |
|
"loss": 0.5591, |
|
"step": 1785 |
|
}, |
|
{ |
|
"epoch": 1.9670329670329672, |
|
"grad_norm": 0.2440643065747747, |
|
"learning_rate": 6.394247037165559e-06, |
|
"loss": 0.5575, |
|
"step": 1790 |
|
}, |
|
{ |
|
"epoch": 1.9725274725274726, |
|
"grad_norm": 0.19915770600503882, |
|
"learning_rate": 6.3346903197947564e-06, |
|
"loss": 0.5713, |
|
"step": 1795 |
|
}, |
|
{ |
|
"epoch": 1.978021978021978, |
|
"grad_norm": 0.19824070469872943, |
|
"learning_rate": 6.275283411676008e-06, |
|
"loss": 0.5505, |
|
"step": 1800 |
|
}, |
|
{ |
|
"epoch": 1.978021978021978, |
|
"eval_loss": 0.6606090068817139, |
|
"eval_runtime": 57.2329, |
|
"eval_samples_per_second": 24.077, |
|
"eval_steps_per_second": 0.105, |
|
"step": 1800 |
|
}, |
|
{ |
|
"epoch": 1.9835164835164836, |
|
"grad_norm": 0.20291412687117444, |
|
"learning_rate": 6.216028740900042e-06, |
|
"loss": 0.5715, |
|
"step": 1805 |
|
}, |
|
{ |
|
"epoch": 1.989010989010989, |
|
"grad_norm": 0.1992635144672437, |
|
"learning_rate": 6.1569287293353274e-06, |
|
"loss": 0.5648, |
|
"step": 1810 |
|
}, |
|
{ |
|
"epoch": 1.9945054945054945, |
|
"grad_norm": 0.20133042378281404, |
|
"learning_rate": 6.097985792529055e-06, |
|
"loss": 0.5718, |
|
"step": 1815 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"grad_norm": 0.18939749792824842, |
|
"learning_rate": 6.039202339608432e-06, |
|
"loss": 0.5509, |
|
"step": 1820 |
|
}, |
|
{ |
|
"epoch": 2.0054945054945055, |
|
"grad_norm": 0.21548294059114115, |
|
"learning_rate": 5.980580773182214e-06, |
|
"loss": 0.5084, |
|
"step": 1825 |
|
}, |
|
{ |
|
"epoch": 2.010989010989011, |
|
"grad_norm": 0.19938738432640987, |
|
"learning_rate": 5.922123489242499e-06, |
|
"loss": 0.5162, |
|
"step": 1830 |
|
}, |
|
{ |
|
"epoch": 2.0164835164835164, |
|
"grad_norm": 0.2027789624856937, |
|
"learning_rate": 5.8638328770667905e-06, |
|
"loss": 0.5119, |
|
"step": 1835 |
|
}, |
|
{ |
|
"epoch": 2.021978021978022, |
|
"grad_norm": 0.20197197450501087, |
|
"learning_rate": 5.805711319120358e-06, |
|
"loss": 0.5093, |
|
"step": 1840 |
|
}, |
|
{ |
|
"epoch": 2.0274725274725274, |
|
"grad_norm": 0.20124091857994694, |
|
"learning_rate": 5.747761190958859e-06, |
|
"loss": 0.5118, |
|
"step": 1845 |
|
}, |
|
{ |
|
"epoch": 2.032967032967033, |
|
"grad_norm": 0.2060214343274437, |
|
"learning_rate": 5.689984861131221e-06, |
|
"loss": 0.529, |
|
"step": 1850 |
|
}, |
|
{ |
|
"epoch": 2.0384615384615383, |
|
"grad_norm": 0.1949171402387555, |
|
"learning_rate": 5.632384691082874e-06, |
|
"loss": 0.5038, |
|
"step": 1855 |
|
}, |
|
{ |
|
"epoch": 2.043956043956044, |
|
"grad_norm": 0.2065371272676731, |
|
"learning_rate": 5.5749630350592e-06, |
|
"loss": 0.524, |
|
"step": 1860 |
|
}, |
|
{ |
|
"epoch": 2.0494505494505493, |
|
"grad_norm": 0.20211687410000778, |
|
"learning_rate": 5.517722240009319e-06, |
|
"loss": 0.5249, |
|
"step": 1865 |
|
}, |
|
{ |
|
"epoch": 2.0549450549450547, |
|
"grad_norm": 0.21155781039557575, |
|
"learning_rate": 5.460664645490172e-06, |
|
"loss": 0.5222, |
|
"step": 1870 |
|
}, |
|
{ |
|
"epoch": 2.0604395604395602, |
|
"grad_norm": 0.20557792586063273, |
|
"learning_rate": 5.403792583570884e-06, |
|
"loss": 0.5122, |
|
"step": 1875 |
|
}, |
|
{ |
|
"epoch": 2.065934065934066, |
|
"grad_norm": 0.20892091915093894, |
|
"learning_rate": 5.347108378737469e-06, |
|
"loss": 0.5209, |
|
"step": 1880 |
|
}, |
|
{ |
|
"epoch": 2.0714285714285716, |
|
"grad_norm": 0.21130418206749751, |
|
"learning_rate": 5.290614347797802e-06, |
|
"loss": 0.4999, |
|
"step": 1885 |
|
}, |
|
{ |
|
"epoch": 2.076923076923077, |
|
"grad_norm": 0.20265157501265516, |
|
"learning_rate": 5.234312799786921e-06, |
|
"loss": 0.5113, |
|
"step": 1890 |
|
}, |
|
{ |
|
"epoch": 2.0824175824175826, |
|
"grad_norm": 0.2380811857415137, |
|
"learning_rate": 5.1782060358726885e-06, |
|
"loss": 0.5137, |
|
"step": 1895 |
|
}, |
|
{ |
|
"epoch": 2.087912087912088, |
|
"grad_norm": 0.20021684752472454, |
|
"learning_rate": 5.122296349261695e-06, |
|
"loss": 0.5149, |
|
"step": 1900 |
|
}, |
|
{ |
|
"epoch": 2.087912087912088, |
|
"eval_loss": 0.6648159027099609, |
|
"eval_runtime": 57.1476, |
|
"eval_samples_per_second": 24.113, |
|
"eval_steps_per_second": 0.105, |
|
"step": 1900 |
|
}, |
|
{ |
|
"epoch": 2.0934065934065935, |
|
"grad_norm": 0.20298609856841238, |
|
"learning_rate": 5.066586025105558e-06, |
|
"loss": 0.527, |
|
"step": 1905 |
|
}, |
|
{ |
|
"epoch": 2.098901098901099, |
|
"grad_norm": 0.2035387697050701, |
|
"learning_rate": 5.011077340407509e-06, |
|
"loss": 0.5055, |
|
"step": 1910 |
|
}, |
|
{ |
|
"epoch": 2.1043956043956045, |
|
"grad_norm": 0.19786708962281988, |
|
"learning_rate": 4.955772563929334e-06, |
|
"loss": 0.5151, |
|
"step": 1915 |
|
}, |
|
{ |
|
"epoch": 2.10989010989011, |
|
"grad_norm": 0.20387216209111395, |
|
"learning_rate": 4.900673956098644e-06, |
|
"loss": 0.5049, |
|
"step": 1920 |
|
}, |
|
{ |
|
"epoch": 2.1153846153846154, |
|
"grad_norm": 0.20554401944132458, |
|
"learning_rate": 4.845783768916482e-06, |
|
"loss": 0.519, |
|
"step": 1925 |
|
}, |
|
{ |
|
"epoch": 2.120879120879121, |
|
"grad_norm": 0.2111896896707393, |
|
"learning_rate": 4.79110424586528e-06, |
|
"loss": 0.5193, |
|
"step": 1930 |
|
}, |
|
{ |
|
"epoch": 2.1263736263736264, |
|
"grad_norm": 0.20409361712091278, |
|
"learning_rate": 4.736637621817176e-06, |
|
"loss": 0.5134, |
|
"step": 1935 |
|
}, |
|
{ |
|
"epoch": 2.131868131868132, |
|
"grad_norm": 0.20305198178835793, |
|
"learning_rate": 4.682386122942649e-06, |
|
"loss": 0.5338, |
|
"step": 1940 |
|
}, |
|
{ |
|
"epoch": 2.1373626373626373, |
|
"grad_norm": 0.20969960596506004, |
|
"learning_rate": 4.628351966619531e-06, |
|
"loss": 0.5285, |
|
"step": 1945 |
|
}, |
|
{ |
|
"epoch": 2.142857142857143, |
|
"grad_norm": 0.2016497737059224, |
|
"learning_rate": 4.5745373613424075e-06, |
|
"loss": 0.5196, |
|
"step": 1950 |
|
}, |
|
{ |
|
"epoch": 2.1483516483516483, |
|
"grad_norm": 0.20780959230200222, |
|
"learning_rate": 4.520944506632314e-06, |
|
"loss": 0.5102, |
|
"step": 1955 |
|
}, |
|
{ |
|
"epoch": 2.1538461538461537, |
|
"grad_norm": 0.20807222664357544, |
|
"learning_rate": 4.467575592946865e-06, |
|
"loss": 0.5325, |
|
"step": 1960 |
|
}, |
|
{ |
|
"epoch": 2.159340659340659, |
|
"grad_norm": 0.2167502541161845, |
|
"learning_rate": 4.414432801590703e-06, |
|
"loss": 0.5078, |
|
"step": 1965 |
|
}, |
|
{ |
|
"epoch": 2.1648351648351647, |
|
"grad_norm": 0.21490727997644427, |
|
"learning_rate": 4.361518304626366e-06, |
|
"loss": 0.5407, |
|
"step": 1970 |
|
}, |
|
{ |
|
"epoch": 2.17032967032967, |
|
"grad_norm": 0.21734918740860756, |
|
"learning_rate": 4.308834264785483e-06, |
|
"loss": 0.5141, |
|
"step": 1975 |
|
}, |
|
{ |
|
"epoch": 2.1758241758241756, |
|
"grad_norm": 0.1991788993982973, |
|
"learning_rate": 4.256382835380421e-06, |
|
"loss": 0.5069, |
|
"step": 1980 |
|
}, |
|
{ |
|
"epoch": 2.181318681318681, |
|
"grad_norm": 0.2143454079537833, |
|
"learning_rate": 4.204166160216216e-06, |
|
"loss": 0.5249, |
|
"step": 1985 |
|
}, |
|
{ |
|
"epoch": 2.186813186813187, |
|
"grad_norm": 0.19879558037154738, |
|
"learning_rate": 4.1521863735030065e-06, |
|
"loss": 0.4902, |
|
"step": 1990 |
|
}, |
|
{ |
|
"epoch": 2.1923076923076925, |
|
"grad_norm": 0.20002933623961325, |
|
"learning_rate": 4.100445599768774e-06, |
|
"loss": 0.5051, |
|
"step": 1995 |
|
}, |
|
{ |
|
"epoch": 2.197802197802198, |
|
"grad_norm": 0.20285409795219067, |
|
"learning_rate": 4.048945953772504e-06, |
|
"loss": 0.5234, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 2.197802197802198, |
|
"eval_loss": 0.6638170480728149, |
|
"eval_runtime": 57.5818, |
|
"eval_samples_per_second": 23.931, |
|
"eval_steps_per_second": 0.104, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 2.2032967032967035, |
|
"grad_norm": 0.2018945000710767, |
|
"learning_rate": 3.99768954041778e-06, |
|
"loss": 0.5071, |
|
"step": 2005 |
|
}, |
|
{ |
|
"epoch": 2.208791208791209, |
|
"grad_norm": 0.20646923773133413, |
|
"learning_rate": 3.946678454666719e-06, |
|
"loss": 0.4988, |
|
"step": 2010 |
|
}, |
|
{ |
|
"epoch": 2.2142857142857144, |
|
"grad_norm": 0.20067691812172808, |
|
"learning_rate": 3.89591478145437e-06, |
|
"loss": 0.5182, |
|
"step": 2015 |
|
}, |
|
{ |
|
"epoch": 2.21978021978022, |
|
"grad_norm": 0.21205391255701184, |
|
"learning_rate": 3.845400595603482e-06, |
|
"loss": 0.5307, |
|
"step": 2020 |
|
}, |
|
{ |
|
"epoch": 2.2252747252747254, |
|
"grad_norm": 0.20460875351302696, |
|
"learning_rate": 3.79513796173971e-06, |
|
"loss": 0.5031, |
|
"step": 2025 |
|
}, |
|
{ |
|
"epoch": 2.230769230769231, |
|
"grad_norm": 0.1973070576811456, |
|
"learning_rate": 3.745128934207225e-06, |
|
"loss": 0.5284, |
|
"step": 2030 |
|
}, |
|
{ |
|
"epoch": 2.2362637362637363, |
|
"grad_norm": 0.21339279534660974, |
|
"learning_rate": 3.695375556984764e-06, |
|
"loss": 0.5252, |
|
"step": 2035 |
|
}, |
|
{ |
|
"epoch": 2.241758241758242, |
|
"grad_norm": 0.20247153184924946, |
|
"learning_rate": 3.6458798636020477e-06, |
|
"loss": 0.5128, |
|
"step": 2040 |
|
}, |
|
{ |
|
"epoch": 2.2472527472527473, |
|
"grad_norm": 0.19749668386741256, |
|
"learning_rate": 3.59664387705672e-06, |
|
"loss": 0.5062, |
|
"step": 2045 |
|
}, |
|
{ |
|
"epoch": 2.2527472527472527, |
|
"grad_norm": 0.21402795878492867, |
|
"learning_rate": 3.5476696097316253e-06, |
|
"loss": 0.5109, |
|
"step": 2050 |
|
}, |
|
{ |
|
"epoch": 2.258241758241758, |
|
"grad_norm": 0.2077870859543597, |
|
"learning_rate": 3.4989590633125583e-06, |
|
"loss": 0.5274, |
|
"step": 2055 |
|
}, |
|
{ |
|
"epoch": 2.2637362637362637, |
|
"grad_norm": 0.2660838503313638, |
|
"learning_rate": 3.450514228706482e-06, |
|
"loss": 0.5059, |
|
"step": 2060 |
|
}, |
|
{ |
|
"epoch": 2.269230769230769, |
|
"grad_norm": 0.20089637585406683, |
|
"learning_rate": 3.4023370859601192e-06, |
|
"loss": 0.5044, |
|
"step": 2065 |
|
}, |
|
{ |
|
"epoch": 2.2747252747252746, |
|
"grad_norm": 0.19730583864844892, |
|
"learning_rate": 3.3544296041790457e-06, |
|
"loss": 0.5002, |
|
"step": 2070 |
|
}, |
|
{ |
|
"epoch": 2.28021978021978, |
|
"grad_norm": 0.202178274091821, |
|
"learning_rate": 3.3067937414471986e-06, |
|
"loss": 0.5244, |
|
"step": 2075 |
|
}, |
|
{ |
|
"epoch": 2.2857142857142856, |
|
"grad_norm": 0.19886623288328378, |
|
"learning_rate": 3.2594314447468457e-06, |
|
"loss": 0.5115, |
|
"step": 2080 |
|
}, |
|
{ |
|
"epoch": 2.291208791208791, |
|
"grad_norm": 0.20284655112337058, |
|
"learning_rate": 3.2123446498790214e-06, |
|
"loss": 0.5108, |
|
"step": 2085 |
|
}, |
|
{ |
|
"epoch": 2.2967032967032965, |
|
"grad_norm": 0.21307262525015022, |
|
"learning_rate": 3.1655352813843886e-06, |
|
"loss": 0.5205, |
|
"step": 2090 |
|
}, |
|
{ |
|
"epoch": 2.302197802197802, |
|
"grad_norm": 0.2016117227949298, |
|
"learning_rate": 3.1190052524645752e-06, |
|
"loss": 0.5036, |
|
"step": 2095 |
|
}, |
|
{ |
|
"epoch": 2.3076923076923075, |
|
"grad_norm": 0.20155504502839525, |
|
"learning_rate": 3.0727564649040066e-06, |
|
"loss": 0.5239, |
|
"step": 2100 |
|
}, |
|
{ |
|
"epoch": 2.3076923076923075, |
|
"eval_loss": 0.6632149815559387, |
|
"eval_runtime": 55.6194, |
|
"eval_samples_per_second": 24.776, |
|
"eval_steps_per_second": 0.108, |
|
"step": 2100 |
|
}, |
|
{ |
|
"epoch": 2.313186813186813, |
|
"grad_norm": 0.21425188656541483, |
|
"learning_rate": 3.0267908089921438e-06, |
|
"loss": 0.5151, |
|
"step": 2105 |
|
}, |
|
{ |
|
"epoch": 2.3186813186813184, |
|
"grad_norm": 0.20081101739821433, |
|
"learning_rate": 2.9811101634462414e-06, |
|
"loss": 0.5147, |
|
"step": 2110 |
|
}, |
|
{ |
|
"epoch": 2.3241758241758244, |
|
"grad_norm": 0.20232270966888258, |
|
"learning_rate": 2.93571639533455e-06, |
|
"loss": 0.522, |
|
"step": 2115 |
|
}, |
|
{ |
|
"epoch": 2.32967032967033, |
|
"grad_norm": 0.20125405245316808, |
|
"learning_rate": 2.8906113600000153e-06, |
|
"loss": 0.5209, |
|
"step": 2120 |
|
}, |
|
{ |
|
"epoch": 2.3351648351648353, |
|
"grad_norm": 0.20454677450030168, |
|
"learning_rate": 2.8457969009844354e-06, |
|
"loss": 0.5131, |
|
"step": 2125 |
|
}, |
|
{ |
|
"epoch": 2.340659340659341, |
|
"grad_norm": 0.2068065615757479, |
|
"learning_rate": 2.8012748499531195e-06, |
|
"loss": 0.5268, |
|
"step": 2130 |
|
}, |
|
{ |
|
"epoch": 2.3461538461538463, |
|
"grad_norm": 0.20463770808980064, |
|
"learning_rate": 2.7570470266200177e-06, |
|
"loss": 0.5269, |
|
"step": 2135 |
|
}, |
|
{ |
|
"epoch": 2.3516483516483517, |
|
"grad_norm": 0.20997582101579929, |
|
"learning_rate": 2.713115238673356e-06, |
|
"loss": 0.5247, |
|
"step": 2140 |
|
}, |
|
{ |
|
"epoch": 2.357142857142857, |
|
"grad_norm": 0.20156173202574512, |
|
"learning_rate": 2.669481281701739e-06, |
|
"loss": 0.5039, |
|
"step": 2145 |
|
}, |
|
{ |
|
"epoch": 2.3626373626373627, |
|
"grad_norm": 0.19654077766547687, |
|
"learning_rate": 2.626146939120757e-06, |
|
"loss": 0.5185, |
|
"step": 2150 |
|
}, |
|
{ |
|
"epoch": 2.368131868131868, |
|
"grad_norm": 0.20382023726525478, |
|
"learning_rate": 2.5831139821001184e-06, |
|
"loss": 0.5259, |
|
"step": 2155 |
|
}, |
|
{ |
|
"epoch": 2.3736263736263736, |
|
"grad_norm": 0.20680053202116233, |
|
"learning_rate": 2.5403841694912333e-06, |
|
"loss": 0.513, |
|
"step": 2160 |
|
}, |
|
{ |
|
"epoch": 2.379120879120879, |
|
"grad_norm": 0.1941492774491302, |
|
"learning_rate": 2.497959247755335e-06, |
|
"loss": 0.5239, |
|
"step": 2165 |
|
}, |
|
{ |
|
"epoch": 2.3846153846153846, |
|
"grad_norm": 0.20405640437952138, |
|
"learning_rate": 2.455840950892099e-06, |
|
"loss": 0.5156, |
|
"step": 2170 |
|
}, |
|
{ |
|
"epoch": 2.39010989010989, |
|
"grad_norm": 0.2843212523020798, |
|
"learning_rate": 2.414031000368767e-06, |
|
"loss": 0.5034, |
|
"step": 2175 |
|
}, |
|
{ |
|
"epoch": 2.3956043956043955, |
|
"grad_norm": 0.20197161260199611, |
|
"learning_rate": 2.372531105049789e-06, |
|
"loss": 0.5003, |
|
"step": 2180 |
|
}, |
|
{ |
|
"epoch": 2.401098901098901, |
|
"grad_norm": 0.20727591825649075, |
|
"learning_rate": 2.331342961126988e-06, |
|
"loss": 0.5079, |
|
"step": 2185 |
|
}, |
|
{ |
|
"epoch": 2.4065934065934065, |
|
"grad_norm": 0.2054230166031501, |
|
"learning_rate": 2.290468252050204e-06, |
|
"loss": 0.5034, |
|
"step": 2190 |
|
}, |
|
{ |
|
"epoch": 2.412087912087912, |
|
"grad_norm": 0.2011557586955495, |
|
"learning_rate": 2.2499086484585255e-06, |
|
"loss": 0.5023, |
|
"step": 2195 |
|
}, |
|
{ |
|
"epoch": 2.4175824175824174, |
|
"grad_norm": 0.2043529990444942, |
|
"learning_rate": 2.2096658081119793e-06, |
|
"loss": 0.5142, |
|
"step": 2200 |
|
}, |
|
{ |
|
"epoch": 2.4175824175824174, |
|
"eval_loss": 0.662318229675293, |
|
"eval_runtime": 56.3794, |
|
"eval_samples_per_second": 24.442, |
|
"eval_steps_per_second": 0.106, |
|
"step": 2200 |
|
}, |
|
{ |
|
"epoch": 2.423076923076923, |
|
"grad_norm": 0.2007104518256576, |
|
"learning_rate": 2.1697413758237785e-06, |
|
"loss": 0.5068, |
|
"step": 2205 |
|
}, |
|
{ |
|
"epoch": 2.4285714285714284, |
|
"grad_norm": 0.20519816271013885, |
|
"learning_rate": 2.130136983393112e-06, |
|
"loss": 0.5153, |
|
"step": 2210 |
|
}, |
|
{ |
|
"epoch": 2.4340659340659343, |
|
"grad_norm": 0.19385746620653593, |
|
"learning_rate": 2.0908542495384276e-06, |
|
"loss": 0.4915, |
|
"step": 2215 |
|
}, |
|
{ |
|
"epoch": 2.4395604395604398, |
|
"grad_norm": 0.20504945100422145, |
|
"learning_rate": 2.051894779831286e-06, |
|
"loss": 0.5117, |
|
"step": 2220 |
|
}, |
|
{ |
|
"epoch": 2.4450549450549453, |
|
"grad_norm": 0.20350739624315656, |
|
"learning_rate": 2.0132601666307295e-06, |
|
"loss": 0.5118, |
|
"step": 2225 |
|
}, |
|
{ |
|
"epoch": 2.4505494505494507, |
|
"grad_norm": 0.2038803889957168, |
|
"learning_rate": 1.9749519890182035e-06, |
|
"loss": 0.5144, |
|
"step": 2230 |
|
}, |
|
{ |
|
"epoch": 2.456043956043956, |
|
"grad_norm": 0.20292180332730542, |
|
"learning_rate": 1.936971812733012e-06, |
|
"loss": 0.513, |
|
"step": 2235 |
|
}, |
|
{ |
|
"epoch": 2.4615384615384617, |
|
"grad_norm": 0.19993755435515206, |
|
"learning_rate": 1.8993211901083353e-06, |
|
"loss": 0.5145, |
|
"step": 2240 |
|
}, |
|
{ |
|
"epoch": 2.467032967032967, |
|
"grad_norm": 0.20670573681391916, |
|
"learning_rate": 1.8620016600077516e-06, |
|
"loss": 0.5159, |
|
"step": 2245 |
|
}, |
|
{ |
|
"epoch": 2.4725274725274726, |
|
"grad_norm": 0.19479382540898788, |
|
"learning_rate": 1.8250147477623836e-06, |
|
"loss": 0.5068, |
|
"step": 2250 |
|
}, |
|
{ |
|
"epoch": 2.478021978021978, |
|
"grad_norm": 0.20005895519952888, |
|
"learning_rate": 1.7883619651085194e-06, |
|
"loss": 0.5165, |
|
"step": 2255 |
|
}, |
|
{ |
|
"epoch": 2.4835164835164836, |
|
"grad_norm": 0.2009142357322209, |
|
"learning_rate": 1.7520448101258325e-06, |
|
"loss": 0.4934, |
|
"step": 2260 |
|
}, |
|
{ |
|
"epoch": 2.489010989010989, |
|
"grad_norm": 0.20615350297111454, |
|
"learning_rate": 1.716064767176172e-06, |
|
"loss": 0.4966, |
|
"step": 2265 |
|
}, |
|
{ |
|
"epoch": 2.4945054945054945, |
|
"grad_norm": 0.19574790617423804, |
|
"learning_rate": 1.6804233068428678e-06, |
|
"loss": 0.4991, |
|
"step": 2270 |
|
}, |
|
{ |
|
"epoch": 2.5, |
|
"grad_norm": 0.1968274274952375, |
|
"learning_rate": 1.6451218858706374e-06, |
|
"loss": 0.5132, |
|
"step": 2275 |
|
}, |
|
{ |
|
"epoch": 2.5054945054945055, |
|
"grad_norm": 0.19627672556133136, |
|
"learning_rate": 1.6101619471060415e-06, |
|
"loss": 0.4986, |
|
"step": 2280 |
|
}, |
|
{ |
|
"epoch": 2.510989010989011, |
|
"grad_norm": 0.232139426765921, |
|
"learning_rate": 1.5755449194385164e-06, |
|
"loss": 0.5119, |
|
"step": 2285 |
|
}, |
|
{ |
|
"epoch": 2.5164835164835164, |
|
"grad_norm": 0.20274183634685633, |
|
"learning_rate": 1.5412722177419658e-06, |
|
"loss": 0.5023, |
|
"step": 2290 |
|
}, |
|
{ |
|
"epoch": 2.521978021978022, |
|
"grad_norm": 0.1991951421133761, |
|
"learning_rate": 1.5073452428169444e-06, |
|
"loss": 0.4949, |
|
"step": 2295 |
|
}, |
|
{ |
|
"epoch": 2.5274725274725274, |
|
"grad_norm": 0.19379452216149648, |
|
"learning_rate": 1.4737653813333774e-06, |
|
"loss": 0.5086, |
|
"step": 2300 |
|
}, |
|
{ |
|
"epoch": 2.5274725274725274, |
|
"eval_loss": 0.6616050004959106, |
|
"eval_runtime": 55.0037, |
|
"eval_samples_per_second": 25.053, |
|
"eval_steps_per_second": 0.109, |
|
"step": 2300 |
|
}, |
|
{ |
|
"epoch": 2.532967032967033, |
|
"grad_norm": 0.21494093339800838, |
|
"learning_rate": 1.4405340057739203e-06, |
|
"loss": 0.5233, |
|
"step": 2305 |
|
}, |
|
{ |
|
"epoch": 2.5384615384615383, |
|
"grad_norm": 0.20790725753107414, |
|
"learning_rate": 1.407652474377832e-06, |
|
"loss": 0.5262, |
|
"step": 2310 |
|
}, |
|
{ |
|
"epoch": 2.543956043956044, |
|
"grad_norm": 0.23490670194522875, |
|
"learning_rate": 1.3751221310854778e-06, |
|
"loss": 0.5064, |
|
"step": 2315 |
|
}, |
|
{ |
|
"epoch": 2.5494505494505493, |
|
"grad_norm": 0.2045871257636731, |
|
"learning_rate": 1.3429443054833913e-06, |
|
"loss": 0.5022, |
|
"step": 2320 |
|
}, |
|
{ |
|
"epoch": 2.5549450549450547, |
|
"grad_norm": 0.20260454811288686, |
|
"learning_rate": 1.311120312749935e-06, |
|
"loss": 0.5001, |
|
"step": 2325 |
|
}, |
|
{ |
|
"epoch": 2.5604395604395602, |
|
"grad_norm": 0.20417611729894805, |
|
"learning_rate": 1.2796514536015492e-06, |
|
"loss": 0.5051, |
|
"step": 2330 |
|
}, |
|
{ |
|
"epoch": 2.5659340659340657, |
|
"grad_norm": 0.20980337074664027, |
|
"learning_rate": 1.2485390142395793e-06, |
|
"loss": 0.5191, |
|
"step": 2335 |
|
}, |
|
{ |
|
"epoch": 2.571428571428571, |
|
"grad_norm": 0.20318999346422076, |
|
"learning_rate": 1.2177842662977136e-06, |
|
"loss": 0.5247, |
|
"step": 2340 |
|
}, |
|
{ |
|
"epoch": 2.5769230769230766, |
|
"grad_norm": 0.19629810633248806, |
|
"learning_rate": 1.1873884667900125e-06, |
|
"loss": 0.5117, |
|
"step": 2345 |
|
}, |
|
{ |
|
"epoch": 2.5824175824175826, |
|
"grad_norm": 0.20122953995800785, |
|
"learning_rate": 1.1573528580595195e-06, |
|
"loss": 0.5022, |
|
"step": 2350 |
|
}, |
|
{ |
|
"epoch": 2.587912087912088, |
|
"grad_norm": 0.19407168789538073, |
|
"learning_rate": 1.1276786677274866e-06, |
|
"loss": 0.5018, |
|
"step": 2355 |
|
}, |
|
{ |
|
"epoch": 2.5934065934065935, |
|
"grad_norm": 0.20866298600964597, |
|
"learning_rate": 1.0983671086432146e-06, |
|
"loss": 0.5131, |
|
"step": 2360 |
|
}, |
|
{ |
|
"epoch": 2.598901098901099, |
|
"grad_norm": 0.19859110363436613, |
|
"learning_rate": 1.069419378834461e-06, |
|
"loss": 0.5072, |
|
"step": 2365 |
|
}, |
|
{ |
|
"epoch": 2.6043956043956045, |
|
"grad_norm": 0.1972544497689168, |
|
"learning_rate": 1.040836661458482e-06, |
|
"loss": 0.5121, |
|
"step": 2370 |
|
}, |
|
{ |
|
"epoch": 2.60989010989011, |
|
"grad_norm": 0.2229095733014629, |
|
"learning_rate": 1.0126201247536783e-06, |
|
"loss": 0.5023, |
|
"step": 2375 |
|
}, |
|
{ |
|
"epoch": 2.6153846153846154, |
|
"grad_norm": 0.2078898580527157, |
|
"learning_rate": 9.8477092199184e-07, |
|
"loss": 0.4999, |
|
"step": 2380 |
|
}, |
|
{ |
|
"epoch": 2.620879120879121, |
|
"grad_norm": 0.20385043702217534, |
|
"learning_rate": 9.57290191431013e-07, |
|
"loss": 0.4973, |
|
"step": 2385 |
|
}, |
|
{ |
|
"epoch": 2.6263736263736264, |
|
"grad_norm": 0.20024077890781708, |
|
"learning_rate": 9.301790562689794e-07, |
|
"loss": 0.5115, |
|
"step": 2390 |
|
}, |
|
{ |
|
"epoch": 2.631868131868132, |
|
"grad_norm": 0.20229645144805108, |
|
"learning_rate": 9.034386245973359e-07, |
|
"loss": 0.518, |
|
"step": 2395 |
|
}, |
|
{ |
|
"epoch": 2.6373626373626373, |
|
"grad_norm": 0.20806472260190578, |
|
"learning_rate": 8.770699893562273e-07, |
|
"loss": 0.4998, |
|
"step": 2400 |
|
}, |
|
{ |
|
"epoch": 2.6373626373626373, |
|
"eval_loss": 0.6603860259056091, |
|
"eval_runtime": 55.4261, |
|
"eval_samples_per_second": 24.862, |
|
"eval_steps_per_second": 0.108, |
|
"step": 2400 |
|
}, |
|
{ |
|
"epoch": 2.642857142857143, |
|
"grad_norm": 0.20212616235290484, |
|
"learning_rate": 8.510742282896545e-07, |
|
"loss": 0.5176, |
|
"step": 2405 |
|
}, |
|
{ |
|
"epoch": 2.6483516483516483, |
|
"grad_norm": 0.19926286905249757, |
|
"learning_rate": 8.254524039014289e-07, |
|
"loss": 0.504, |
|
"step": 2410 |
|
}, |
|
{ |
|
"epoch": 2.6538461538461537, |
|
"grad_norm": 0.20491396527113304, |
|
"learning_rate": 8.002055634117578e-07, |
|
"loss": 0.5127, |
|
"step": 2415 |
|
}, |
|
{ |
|
"epoch": 2.659340659340659, |
|
"grad_norm": 0.1984394044529964, |
|
"learning_rate": 7.753347387144294e-07, |
|
"loss": 0.4961, |
|
"step": 2420 |
|
}, |
|
{ |
|
"epoch": 2.6648351648351647, |
|
"grad_norm": 0.20457728804801084, |
|
"learning_rate": 7.508409463346389e-07, |
|
"loss": 0.5166, |
|
"step": 2425 |
|
}, |
|
{ |
|
"epoch": 2.67032967032967, |
|
"grad_norm": 0.2053681782347483, |
|
"learning_rate": 7.26725187387446e-07, |
|
"loss": 0.4985, |
|
"step": 2430 |
|
}, |
|
{ |
|
"epoch": 2.675824175824176, |
|
"grad_norm": 0.21034306160224336, |
|
"learning_rate": 7.029884475368542e-07, |
|
"loss": 0.5286, |
|
"step": 2435 |
|
}, |
|
{ |
|
"epoch": 2.6813186813186816, |
|
"grad_norm": 0.21208091059236112, |
|
"learning_rate": 6.796316969555205e-07, |
|
"loss": 0.5135, |
|
"step": 2440 |
|
}, |
|
{ |
|
"epoch": 2.686813186813187, |
|
"grad_norm": 0.20061617557170722, |
|
"learning_rate": 6.566558902851161e-07, |
|
"loss": 0.5248, |
|
"step": 2445 |
|
}, |
|
{ |
|
"epoch": 2.6923076923076925, |
|
"grad_norm": 0.2077389312882985, |
|
"learning_rate": 6.340619665972847e-07, |
|
"loss": 0.5265, |
|
"step": 2450 |
|
}, |
|
{ |
|
"epoch": 2.697802197802198, |
|
"grad_norm": 0.1907623497336921, |
|
"learning_rate": 6.118508493552866e-07, |
|
"loss": 0.5048, |
|
"step": 2455 |
|
}, |
|
{ |
|
"epoch": 2.7032967032967035, |
|
"grad_norm": 0.19886591589245772, |
|
"learning_rate": 5.900234463762367e-07, |
|
"loss": 0.506, |
|
"step": 2460 |
|
}, |
|
{ |
|
"epoch": 2.708791208791209, |
|
"grad_norm": 0.19691007260870758, |
|
"learning_rate": 5.685806497940027e-07, |
|
"loss": 0.511, |
|
"step": 2465 |
|
}, |
|
{ |
|
"epoch": 2.7142857142857144, |
|
"grad_norm": 0.19864992732710893, |
|
"learning_rate": 5.475233360227516e-07, |
|
"loss": 0.5093, |
|
"step": 2470 |
|
}, |
|
{ |
|
"epoch": 2.71978021978022, |
|
"grad_norm": 0.20129564143877238, |
|
"learning_rate": 5.268523657211188e-07, |
|
"loss": 0.4973, |
|
"step": 2475 |
|
}, |
|
{ |
|
"epoch": 2.7252747252747254, |
|
"grad_norm": 0.19814396411057444, |
|
"learning_rate": 5.065685837570312e-07, |
|
"loss": 0.4985, |
|
"step": 2480 |
|
}, |
|
{ |
|
"epoch": 2.730769230769231, |
|
"grad_norm": 0.19965550111609479, |
|
"learning_rate": 4.866728191731829e-07, |
|
"loss": 0.5111, |
|
"step": 2485 |
|
}, |
|
{ |
|
"epoch": 2.7362637362637363, |
|
"grad_norm": 0.19457379885149514, |
|
"learning_rate": 4.671658851531424e-07, |
|
"loss": 0.5057, |
|
"step": 2490 |
|
}, |
|
{ |
|
"epoch": 2.741758241758242, |
|
"grad_norm": 0.19940679212384896, |
|
"learning_rate": 4.480485789881217e-07, |
|
"loss": 0.5127, |
|
"step": 2495 |
|
}, |
|
{ |
|
"epoch": 2.7472527472527473, |
|
"grad_norm": 0.19251317932524864, |
|
"learning_rate": 4.293216820443891e-07, |
|
"loss": 0.5029, |
|
"step": 2500 |
|
}, |
|
{ |
|
"epoch": 2.7472527472527473, |
|
"eval_loss": 0.6601957678794861, |
|
"eval_runtime": 54.7865, |
|
"eval_samples_per_second": 25.152, |
|
"eval_steps_per_second": 0.11, |
|
"step": 2500 |
|
}, |
|
{ |
|
"epoch": 2.7527472527472527, |
|
"grad_norm": 0.20141031688990624, |
|
"learning_rate": 4.109859597313237e-07, |
|
"loss": 0.5199, |
|
"step": 2505 |
|
}, |
|
{ |
|
"epoch": 2.758241758241758, |
|
"grad_norm": 0.2031083447799231, |
|
"learning_rate": 3.9304216147014853e-07, |
|
"loss": 0.5058, |
|
"step": 2510 |
|
}, |
|
{ |
|
"epoch": 2.7637362637362637, |
|
"grad_norm": 0.20105232675523538, |
|
"learning_rate": 3.7549102066328226e-07, |
|
"loss": 0.512, |
|
"step": 2515 |
|
}, |
|
{ |
|
"epoch": 2.769230769230769, |
|
"grad_norm": 0.39484901944232603, |
|
"learning_rate": 3.5833325466437697e-07, |
|
"loss": 0.5012, |
|
"step": 2520 |
|
}, |
|
{ |
|
"epoch": 2.7747252747252746, |
|
"grad_norm": 0.19709914420778502, |
|
"learning_rate": 3.4156956474898805e-07, |
|
"loss": 0.5045, |
|
"step": 2525 |
|
}, |
|
{ |
|
"epoch": 2.78021978021978, |
|
"grad_norm": 0.20621710050936062, |
|
"learning_rate": 3.2520063608592165e-07, |
|
"loss": 0.5121, |
|
"step": 2530 |
|
}, |
|
{ |
|
"epoch": 2.7857142857142856, |
|
"grad_norm": 0.1991105530357979, |
|
"learning_rate": 3.0922713770922155e-07, |
|
"loss": 0.5203, |
|
"step": 2535 |
|
}, |
|
{ |
|
"epoch": 2.791208791208791, |
|
"grad_norm": 0.20012961338063315, |
|
"learning_rate": 2.9364972249082747e-07, |
|
"loss": 0.5184, |
|
"step": 2540 |
|
}, |
|
{ |
|
"epoch": 2.7967032967032965, |
|
"grad_norm": 0.19896717513784648, |
|
"learning_rate": 2.7846902711389236e-07, |
|
"loss": 0.5108, |
|
"step": 2545 |
|
}, |
|
{ |
|
"epoch": 2.802197802197802, |
|
"grad_norm": 0.1977548907547512, |
|
"learning_rate": 2.636856720467573e-07, |
|
"loss": 0.5208, |
|
"step": 2550 |
|
}, |
|
{ |
|
"epoch": 2.8076923076923075, |
|
"grad_norm": 0.20270402323570907, |
|
"learning_rate": 2.493002615175977e-07, |
|
"loss": 0.5006, |
|
"step": 2555 |
|
}, |
|
{ |
|
"epoch": 2.813186813186813, |
|
"grad_norm": 0.1942837253435945, |
|
"learning_rate": 2.3531338348971366e-07, |
|
"loss": 0.5119, |
|
"step": 2560 |
|
}, |
|
{ |
|
"epoch": 2.8186813186813184, |
|
"grad_norm": 0.20474339797776248, |
|
"learning_rate": 2.217256096375131e-07, |
|
"loss": 0.5176, |
|
"step": 2565 |
|
}, |
|
{ |
|
"epoch": 2.824175824175824, |
|
"grad_norm": 0.2010552477495884, |
|
"learning_rate": 2.0853749532314006e-07, |
|
"loss": 0.5073, |
|
"step": 2570 |
|
}, |
|
{ |
|
"epoch": 2.82967032967033, |
|
"grad_norm": 0.2035067000005909, |
|
"learning_rate": 1.9574957957377294e-07, |
|
"loss": 0.5096, |
|
"step": 2575 |
|
}, |
|
{ |
|
"epoch": 2.8351648351648353, |
|
"grad_norm": 0.20125633333431672, |
|
"learning_rate": 1.8336238505959892e-07, |
|
"loss": 0.5121, |
|
"step": 2580 |
|
}, |
|
{ |
|
"epoch": 2.840659340659341, |
|
"grad_norm": 0.20332908603793057, |
|
"learning_rate": 1.7137641807244754e-07, |
|
"loss": 0.5125, |
|
"step": 2585 |
|
}, |
|
{ |
|
"epoch": 2.8461538461538463, |
|
"grad_norm": 0.19665455030518153, |
|
"learning_rate": 1.5979216850509848e-07, |
|
"loss": 0.4987, |
|
"step": 2590 |
|
}, |
|
{ |
|
"epoch": 2.8516483516483517, |
|
"grad_norm": 0.1966270172986848, |
|
"learning_rate": 1.4861010983126202e-07, |
|
"loss": 0.4874, |
|
"step": 2595 |
|
}, |
|
{ |
|
"epoch": 2.857142857142857, |
|
"grad_norm": 0.19834917582035766, |
|
"learning_rate": 1.3783069908621772e-07, |
|
"loss": 0.5146, |
|
"step": 2600 |
|
}, |
|
{ |
|
"epoch": 2.857142857142857, |
|
"eval_loss": 0.6599460244178772, |
|
"eval_runtime": 56.5256, |
|
"eval_samples_per_second": 24.378, |
|
"eval_steps_per_second": 0.106, |
|
"step": 2600 |
|
}, |
|
{ |
|
"epoch": 2.8626373626373627, |
|
"grad_norm": 0.19885509465905268, |
|
"learning_rate": 1.274543768481451e-07, |
|
"loss": 0.5192, |
|
"step": 2605 |
|
}, |
|
{ |
|
"epoch": 2.868131868131868, |
|
"grad_norm": 0.19541257662283346, |
|
"learning_rate": 1.1748156722011128e-07, |
|
"loss": 0.4926, |
|
"step": 2610 |
|
}, |
|
{ |
|
"epoch": 2.8736263736263736, |
|
"grad_norm": 0.19388744876156347, |
|
"learning_rate": 1.0791267781273263e-07, |
|
"loss": 0.5094, |
|
"step": 2615 |
|
}, |
|
{ |
|
"epoch": 2.879120879120879, |
|
"grad_norm": 0.1976970112287548, |
|
"learning_rate": 9.874809972752697e-08, |
|
"loss": 0.5156, |
|
"step": 2620 |
|
}, |
|
{ |
|
"epoch": 2.8846153846153846, |
|
"grad_norm": 0.20191971179188523, |
|
"learning_rate": 8.99882075409153e-08, |
|
"loss": 0.5176, |
|
"step": 2625 |
|
}, |
|
{ |
|
"epoch": 2.89010989010989, |
|
"grad_norm": 0.20009589256600918, |
|
"learning_rate": 8.16333592889207e-08, |
|
"loss": 0.5135, |
|
"step": 2630 |
|
}, |
|
{ |
|
"epoch": 2.8956043956043955, |
|
"grad_norm": 0.19825056208420425, |
|
"learning_rate": 7.368389645252772e-08, |
|
"loss": 0.5048, |
|
"step": 2635 |
|
}, |
|
{ |
|
"epoch": 2.901098901098901, |
|
"grad_norm": 0.20207333625508234, |
|
"learning_rate": 6.61401439437348e-08, |
|
"loss": 0.4971, |
|
"step": 2640 |
|
}, |
|
{ |
|
"epoch": 2.9065934065934065, |
|
"grad_norm": 0.20352522502880846, |
|
"learning_rate": 5.9002410092262593e-08, |
|
"loss": 0.5106, |
|
"step": 2645 |
|
}, |
|
{ |
|
"epoch": 2.912087912087912, |
|
"grad_norm": 0.20403786605478308, |
|
"learning_rate": 5.227098663296404e-08, |
|
"loss": 0.5229, |
|
"step": 2650 |
|
}, |
|
{ |
|
"epoch": 2.9175824175824174, |
|
"grad_norm": 0.2041806025949136, |
|
"learning_rate": 4.594614869388947e-08, |
|
"loss": 0.5118, |
|
"step": 2655 |
|
}, |
|
{ |
|
"epoch": 2.9230769230769234, |
|
"grad_norm": 0.20136601507382682, |
|
"learning_rate": 4.002815478505007e-08, |
|
"loss": 0.4982, |
|
"step": 2660 |
|
}, |
|
{ |
|
"epoch": 2.928571428571429, |
|
"grad_norm": 0.20662702533640964, |
|
"learning_rate": 3.451724678784518e-08, |
|
"loss": 0.5103, |
|
"step": 2665 |
|
}, |
|
{ |
|
"epoch": 2.9340659340659343, |
|
"grad_norm": 0.21824571202036225, |
|
"learning_rate": 2.9413649945182475e-08, |
|
"loss": 0.5227, |
|
"step": 2670 |
|
}, |
|
{ |
|
"epoch": 2.9395604395604398, |
|
"grad_norm": 0.20023301939180432, |
|
"learning_rate": 2.47175728522675e-08, |
|
"loss": 0.5141, |
|
"step": 2675 |
|
}, |
|
{ |
|
"epoch": 2.9450549450549453, |
|
"grad_norm": 0.20718821800921122, |
|
"learning_rate": 2.0429207448078302e-08, |
|
"loss": 0.5065, |
|
"step": 2680 |
|
}, |
|
{ |
|
"epoch": 2.9505494505494507, |
|
"grad_norm": 0.20188941781097472, |
|
"learning_rate": 1.654872900752169e-08, |
|
"loss": 0.5202, |
|
"step": 2685 |
|
}, |
|
{ |
|
"epoch": 2.956043956043956, |
|
"grad_norm": 0.20379828773126438, |
|
"learning_rate": 1.3076296134271194e-08, |
|
"loss": 0.5097, |
|
"step": 2690 |
|
}, |
|
{ |
|
"epoch": 2.9615384615384617, |
|
"grad_norm": 0.2043103342841997, |
|
"learning_rate": 1.0012050754277802e-08, |
|
"loss": 0.5127, |
|
"step": 2695 |
|
}, |
|
{ |
|
"epoch": 2.967032967032967, |
|
"grad_norm": 0.19873220074389136, |
|
"learning_rate": 7.356118109977939e-09, |
|
"loss": 0.5293, |
|
"step": 2700 |
|
}, |
|
{ |
|
"epoch": 2.967032967032967, |
|
"eval_loss": 0.6600582599639893, |
|
"eval_runtime": 57.0547, |
|
"eval_samples_per_second": 24.152, |
|
"eval_steps_per_second": 0.105, |
|
"step": 2700 |
|
}, |
|
{ |
|
"epoch": 2.9725274725274726, |
|
"grad_norm": 0.20314150689373117, |
|
"learning_rate": 5.108606755168666e-09, |
|
"loss": 0.5146, |
|
"step": 2705 |
|
}, |
|
{ |
|
"epoch": 2.978021978021978, |
|
"grad_norm": 0.19470505277621175, |
|
"learning_rate": 3.269608550571235e-09, |
|
"loss": 0.4919, |
|
"step": 2710 |
|
}, |
|
{ |
|
"epoch": 2.9835164835164836, |
|
"grad_norm": 0.19203738679178015, |
|
"learning_rate": 1.839198660079644e-09, |
|
"loss": 0.5043, |
|
"step": 2715 |
|
}, |
|
{ |
|
"epoch": 2.989010989010989, |
|
"grad_norm": 0.20387845762850706, |
|
"learning_rate": 8.174355476864293e-10, |
|
"loss": 0.5199, |
|
"step": 2720 |
|
}, |
|
{ |
|
"epoch": 2.9945054945054945, |
|
"grad_norm": 0.19811095343066756, |
|
"learning_rate": 2.0436097509235475e-10, |
|
"loss": 0.5033, |
|
"step": 2725 |
|
}, |
|
{ |
|
"epoch": 3.0, |
|
"grad_norm": 0.1948663079714202, |
|
"learning_rate": 0.0, |
|
"loss": 0.4979, |
|
"step": 2730 |
|
}, |
|
{ |
|
"epoch": 3.0, |
|
"step": 2730, |
|
"total_flos": 1.183665069490176e+16, |
|
"train_loss": 0.5413940727492392, |
|
"train_runtime": 63958.3461, |
|
"train_samples_per_second": 5.459, |
|
"train_steps_per_second": 0.043 |
|
} |
|
], |
|
"logging_steps": 5, |
|
"max_steps": 2730, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 3, |
|
"save_steps": 100, |
|
"stateful_callbacks": { |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": true |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 1.183665069490176e+16, |
|
"train_batch_size": 4, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|