|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 20.0, |
|
"eval_steps": 500, |
|
"global_step": 640, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.03125, |
|
"grad_norm": 46.110501381920706, |
|
"learning_rate": 3.125e-07, |
|
"loss": 12.2656, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.0625, |
|
"grad_norm": 44.3840095403182, |
|
"learning_rate": 6.25e-07, |
|
"loss": 12.25, |
|
"step": 2 |
|
}, |
|
{ |
|
"epoch": 0.09375, |
|
"grad_norm": 45.08551841682328, |
|
"learning_rate": 9.375000000000001e-07, |
|
"loss": 12.2344, |
|
"step": 3 |
|
}, |
|
{ |
|
"epoch": 0.125, |
|
"grad_norm": 45.1316287538876, |
|
"learning_rate": 1.25e-06, |
|
"loss": 12.2344, |
|
"step": 4 |
|
}, |
|
{ |
|
"epoch": 0.15625, |
|
"grad_norm": 46.41560905837161, |
|
"learning_rate": 1.5625e-06, |
|
"loss": 12.2031, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.1875, |
|
"grad_norm": 45.99984421115488, |
|
"learning_rate": 1.8750000000000003e-06, |
|
"loss": 12.2188, |
|
"step": 6 |
|
}, |
|
{ |
|
"epoch": 0.21875, |
|
"grad_norm": 46.38339519477593, |
|
"learning_rate": 2.1875000000000002e-06, |
|
"loss": 12.1406, |
|
"step": 7 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"grad_norm": 46.40529439389694, |
|
"learning_rate": 2.5e-06, |
|
"loss": 12.1719, |
|
"step": 8 |
|
}, |
|
{ |
|
"epoch": 0.28125, |
|
"grad_norm": 43.65578500040681, |
|
"learning_rate": 2.8125e-06, |
|
"loss": 12.0156, |
|
"step": 9 |
|
}, |
|
{ |
|
"epoch": 0.3125, |
|
"grad_norm": 44.68853723233476, |
|
"learning_rate": 3.125e-06, |
|
"loss": 11.9688, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.34375, |
|
"grad_norm": 44.78101790123477, |
|
"learning_rate": 3.4375e-06, |
|
"loss": 11.9219, |
|
"step": 11 |
|
}, |
|
{ |
|
"epoch": 0.375, |
|
"grad_norm": 46.09518551380032, |
|
"learning_rate": 3.7500000000000005e-06, |
|
"loss": 11.2031, |
|
"step": 12 |
|
}, |
|
{ |
|
"epoch": 0.40625, |
|
"grad_norm": 43.314408501134345, |
|
"learning_rate": 4.0625000000000005e-06, |
|
"loss": 11.1406, |
|
"step": 13 |
|
}, |
|
{ |
|
"epoch": 0.4375, |
|
"grad_norm": 43.67395224267264, |
|
"learning_rate": 4.3750000000000005e-06, |
|
"loss": 10.9844, |
|
"step": 14 |
|
}, |
|
{ |
|
"epoch": 0.46875, |
|
"grad_norm": 44.942577210615966, |
|
"learning_rate": 4.6875000000000004e-06, |
|
"loss": 10.9062, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"grad_norm": 39.795501724737136, |
|
"learning_rate": 5e-06, |
|
"loss": 9.2031, |
|
"step": 16 |
|
}, |
|
{ |
|
"epoch": 0.53125, |
|
"grad_norm": 36.85445424560905, |
|
"learning_rate": 5.3125e-06, |
|
"loss": 9.25, |
|
"step": 17 |
|
}, |
|
{ |
|
"epoch": 0.5625, |
|
"grad_norm": 36.386407981382206, |
|
"learning_rate": 5.625e-06, |
|
"loss": 9.0, |
|
"step": 18 |
|
}, |
|
{ |
|
"epoch": 0.59375, |
|
"grad_norm": 31.01202194392733, |
|
"learning_rate": 5.9375e-06, |
|
"loss": 8.4844, |
|
"step": 19 |
|
}, |
|
{ |
|
"epoch": 0.625, |
|
"grad_norm": 29.63788066305587, |
|
"learning_rate": 6.25e-06, |
|
"loss": 8.4062, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.65625, |
|
"grad_norm": 26.180432188386177, |
|
"learning_rate": 6.5625e-06, |
|
"loss": 8.2969, |
|
"step": 21 |
|
}, |
|
{ |
|
"epoch": 0.6875, |
|
"grad_norm": 19.359932695283444, |
|
"learning_rate": 6.875e-06, |
|
"loss": 7.7578, |
|
"step": 22 |
|
}, |
|
{ |
|
"epoch": 0.71875, |
|
"grad_norm": 16.80095311236084, |
|
"learning_rate": 7.1875e-06, |
|
"loss": 7.6016, |
|
"step": 23 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"grad_norm": 12.119680802563018, |
|
"learning_rate": 7.500000000000001e-06, |
|
"loss": 7.3125, |
|
"step": 24 |
|
}, |
|
{ |
|
"epoch": 0.78125, |
|
"grad_norm": 12.600638957374223, |
|
"learning_rate": 7.8125e-06, |
|
"loss": 7.4453, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.8125, |
|
"grad_norm": 19.42990002822014, |
|
"learning_rate": 8.125000000000001e-06, |
|
"loss": 7.5469, |
|
"step": 26 |
|
}, |
|
{ |
|
"epoch": 0.84375, |
|
"grad_norm": 22.374131848811093, |
|
"learning_rate": 8.4375e-06, |
|
"loss": 7.375, |
|
"step": 27 |
|
}, |
|
{ |
|
"epoch": 0.875, |
|
"grad_norm": 19.57930909452537, |
|
"learning_rate": 8.750000000000001e-06, |
|
"loss": 7.2109, |
|
"step": 28 |
|
}, |
|
{ |
|
"epoch": 0.90625, |
|
"grad_norm": 20.324224063135294, |
|
"learning_rate": 9.0625e-06, |
|
"loss": 7.0781, |
|
"step": 29 |
|
}, |
|
{ |
|
"epoch": 0.9375, |
|
"grad_norm": 18.50485383281225, |
|
"learning_rate": 9.375000000000001e-06, |
|
"loss": 7.1016, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.96875, |
|
"grad_norm": 19.282887599812298, |
|
"learning_rate": 9.6875e-06, |
|
"loss": 6.7734, |
|
"step": 31 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"grad_norm": 13.922627090211678, |
|
"learning_rate": 1e-05, |
|
"loss": 6.7344, |
|
"step": 32 |
|
}, |
|
{ |
|
"epoch": 1.03125, |
|
"grad_norm": 13.050915366974877, |
|
"learning_rate": 9.983552631578947e-06, |
|
"loss": 6.6406, |
|
"step": 33 |
|
}, |
|
{ |
|
"epoch": 1.0625, |
|
"grad_norm": 9.21367695459411, |
|
"learning_rate": 9.967105263157895e-06, |
|
"loss": 6.1484, |
|
"step": 34 |
|
}, |
|
{ |
|
"epoch": 1.09375, |
|
"grad_norm": 7.469938122435962, |
|
"learning_rate": 9.950657894736842e-06, |
|
"loss": 6.2891, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 1.125, |
|
"grad_norm": 6.322117149479666, |
|
"learning_rate": 9.93421052631579e-06, |
|
"loss": 6.0703, |
|
"step": 36 |
|
}, |
|
{ |
|
"epoch": 1.15625, |
|
"grad_norm": 6.067036130357828, |
|
"learning_rate": 9.917763157894738e-06, |
|
"loss": 6.2734, |
|
"step": 37 |
|
}, |
|
{ |
|
"epoch": 1.1875, |
|
"grad_norm": 6.159787369412575, |
|
"learning_rate": 9.901315789473686e-06, |
|
"loss": 6.0703, |
|
"step": 38 |
|
}, |
|
{ |
|
"epoch": 1.21875, |
|
"grad_norm": 6.0713071098842395, |
|
"learning_rate": 9.884868421052633e-06, |
|
"loss": 6.0, |
|
"step": 39 |
|
}, |
|
{ |
|
"epoch": 1.25, |
|
"grad_norm": 6.592393486649236, |
|
"learning_rate": 9.868421052631579e-06, |
|
"loss": 5.6641, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 1.28125, |
|
"grad_norm": 7.035653985047366, |
|
"learning_rate": 9.851973684210527e-06, |
|
"loss": 5.6016, |
|
"step": 41 |
|
}, |
|
{ |
|
"epoch": 1.3125, |
|
"grad_norm": 6.717323149397708, |
|
"learning_rate": 9.835526315789474e-06, |
|
"loss": 5.5469, |
|
"step": 42 |
|
}, |
|
{ |
|
"epoch": 1.34375, |
|
"grad_norm": 5.970768286182251, |
|
"learning_rate": 9.819078947368422e-06, |
|
"loss": 5.4297, |
|
"step": 43 |
|
}, |
|
{ |
|
"epoch": 1.375, |
|
"grad_norm": 5.723626744044176, |
|
"learning_rate": 9.80263157894737e-06, |
|
"loss": 5.7734, |
|
"step": 44 |
|
}, |
|
{ |
|
"epoch": 1.40625, |
|
"grad_norm": 5.1471050746329485, |
|
"learning_rate": 9.786184210526316e-06, |
|
"loss": 5.2812, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 1.4375, |
|
"grad_norm": 4.323727754176942, |
|
"learning_rate": 9.769736842105264e-06, |
|
"loss": 5.3047, |
|
"step": 46 |
|
}, |
|
{ |
|
"epoch": 1.46875, |
|
"grad_norm": 3.9055527444416493, |
|
"learning_rate": 9.753289473684211e-06, |
|
"loss": 5.3906, |
|
"step": 47 |
|
}, |
|
{ |
|
"epoch": 1.5, |
|
"grad_norm": 4.228742611105712, |
|
"learning_rate": 9.736842105263159e-06, |
|
"loss": 5.0391, |
|
"step": 48 |
|
}, |
|
{ |
|
"epoch": 1.53125, |
|
"grad_norm": 4.066259863295825, |
|
"learning_rate": 9.720394736842105e-06, |
|
"loss": 4.9141, |
|
"step": 49 |
|
}, |
|
{ |
|
"epoch": 1.5625, |
|
"grad_norm": 4.649487456233776, |
|
"learning_rate": 9.703947368421054e-06, |
|
"loss": 4.7344, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 1.59375, |
|
"grad_norm": 4.284492602425991, |
|
"learning_rate": 9.6875e-06, |
|
"loss": 4.7891, |
|
"step": 51 |
|
}, |
|
{ |
|
"epoch": 1.625, |
|
"grad_norm": 4.255834421159205, |
|
"learning_rate": 9.671052631578948e-06, |
|
"loss": 4.7656, |
|
"step": 52 |
|
}, |
|
{ |
|
"epoch": 1.65625, |
|
"grad_norm": 3.3905156951443787, |
|
"learning_rate": 9.654605263157896e-06, |
|
"loss": 4.8203, |
|
"step": 53 |
|
}, |
|
{ |
|
"epoch": 1.6875, |
|
"grad_norm": 3.372000616668671, |
|
"learning_rate": 9.638157894736843e-06, |
|
"loss": 4.6562, |
|
"step": 54 |
|
}, |
|
{ |
|
"epoch": 1.71875, |
|
"grad_norm": 3.3716451672730043, |
|
"learning_rate": 9.62171052631579e-06, |
|
"loss": 4.5703, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 1.75, |
|
"grad_norm": 3.4701893070848757, |
|
"learning_rate": 9.605263157894737e-06, |
|
"loss": 4.5469, |
|
"step": 56 |
|
}, |
|
{ |
|
"epoch": 1.78125, |
|
"grad_norm": 3.160350331885458, |
|
"learning_rate": 9.588815789473685e-06, |
|
"loss": 4.2344, |
|
"step": 57 |
|
}, |
|
{ |
|
"epoch": 1.8125, |
|
"grad_norm": 3.3967526879578323, |
|
"learning_rate": 9.572368421052632e-06, |
|
"loss": 4.3828, |
|
"step": 58 |
|
}, |
|
{ |
|
"epoch": 1.84375, |
|
"grad_norm": 3.0675497190805037, |
|
"learning_rate": 9.55592105263158e-06, |
|
"loss": 4.1836, |
|
"step": 59 |
|
}, |
|
{ |
|
"epoch": 1.875, |
|
"grad_norm": 3.2209407679699535, |
|
"learning_rate": 9.539473684210528e-06, |
|
"loss": 4.5391, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 1.90625, |
|
"grad_norm": 3.0374784801495185, |
|
"learning_rate": 9.523026315789474e-06, |
|
"loss": 4.5234, |
|
"step": 61 |
|
}, |
|
{ |
|
"epoch": 1.9375, |
|
"grad_norm": 3.1584530825465893, |
|
"learning_rate": 9.506578947368423e-06, |
|
"loss": 4.3125, |
|
"step": 62 |
|
}, |
|
{ |
|
"epoch": 1.96875, |
|
"grad_norm": 2.71281632857147, |
|
"learning_rate": 9.490131578947369e-06, |
|
"loss": 3.8281, |
|
"step": 63 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"grad_norm": 2.7121073900509622, |
|
"learning_rate": 9.473684210526315e-06, |
|
"loss": 4.1562, |
|
"step": 64 |
|
}, |
|
{ |
|
"epoch": 2.03125, |
|
"grad_norm": 2.7419832449346178, |
|
"learning_rate": 9.457236842105264e-06, |
|
"loss": 3.6016, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 2.0625, |
|
"grad_norm": 2.822322918514082, |
|
"learning_rate": 9.440789473684212e-06, |
|
"loss": 3.8555, |
|
"step": 66 |
|
}, |
|
{ |
|
"epoch": 2.09375, |
|
"grad_norm": 2.6876019984970316, |
|
"learning_rate": 9.424342105263158e-06, |
|
"loss": 4.2031, |
|
"step": 67 |
|
}, |
|
{ |
|
"epoch": 2.125, |
|
"grad_norm": 2.7014572688765037, |
|
"learning_rate": 9.407894736842106e-06, |
|
"loss": 3.9922, |
|
"step": 68 |
|
}, |
|
{ |
|
"epoch": 2.15625, |
|
"grad_norm": 2.5749081557210487, |
|
"learning_rate": 9.391447368421054e-06, |
|
"loss": 4.0, |
|
"step": 69 |
|
}, |
|
{ |
|
"epoch": 2.1875, |
|
"grad_norm": 2.5094384154907816, |
|
"learning_rate": 9.375000000000001e-06, |
|
"loss": 3.5703, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 2.21875, |
|
"grad_norm": 2.3759939616387262, |
|
"learning_rate": 9.358552631578947e-06, |
|
"loss": 3.5703, |
|
"step": 71 |
|
}, |
|
{ |
|
"epoch": 2.25, |
|
"grad_norm": 2.396832813190029, |
|
"learning_rate": 9.342105263157895e-06, |
|
"loss": 3.5039, |
|
"step": 72 |
|
}, |
|
{ |
|
"epoch": 2.28125, |
|
"grad_norm": 2.2974057602228353, |
|
"learning_rate": 9.325657894736842e-06, |
|
"loss": 3.6836, |
|
"step": 73 |
|
}, |
|
{ |
|
"epoch": 2.3125, |
|
"grad_norm": 2.299078356511566, |
|
"learning_rate": 9.30921052631579e-06, |
|
"loss": 3.6328, |
|
"step": 74 |
|
}, |
|
{ |
|
"epoch": 2.34375, |
|
"grad_norm": 2.2167536329414, |
|
"learning_rate": 9.292763157894738e-06, |
|
"loss": 3.6289, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 2.375, |
|
"grad_norm": 2.291424274819479, |
|
"learning_rate": 9.276315789473686e-06, |
|
"loss": 3.5898, |
|
"step": 76 |
|
}, |
|
{ |
|
"epoch": 2.40625, |
|
"grad_norm": 2.0711161335212536, |
|
"learning_rate": 9.259868421052633e-06, |
|
"loss": 3.3398, |
|
"step": 77 |
|
}, |
|
{ |
|
"epoch": 2.4375, |
|
"grad_norm": 2.168829352719516, |
|
"learning_rate": 9.243421052631579e-06, |
|
"loss": 3.6367, |
|
"step": 78 |
|
}, |
|
{ |
|
"epoch": 2.46875, |
|
"grad_norm": 2.0050888506256443, |
|
"learning_rate": 9.226973684210527e-06, |
|
"loss": 3.3594, |
|
"step": 79 |
|
}, |
|
{ |
|
"epoch": 2.5, |
|
"grad_norm": 2.213218342065591, |
|
"learning_rate": 9.210526315789474e-06, |
|
"loss": 3.4688, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 2.53125, |
|
"grad_norm": 2.1783865648233567, |
|
"learning_rate": 9.194078947368422e-06, |
|
"loss": 3.5781, |
|
"step": 81 |
|
}, |
|
{ |
|
"epoch": 2.5625, |
|
"grad_norm": 1.8789106547226722, |
|
"learning_rate": 9.17763157894737e-06, |
|
"loss": 3.2969, |
|
"step": 82 |
|
}, |
|
{ |
|
"epoch": 2.59375, |
|
"grad_norm": 2.197246298703413, |
|
"learning_rate": 9.161184210526316e-06, |
|
"loss": 3.3203, |
|
"step": 83 |
|
}, |
|
{ |
|
"epoch": 2.625, |
|
"grad_norm": 1.9163387667758176, |
|
"learning_rate": 9.144736842105264e-06, |
|
"loss": 3.1172, |
|
"step": 84 |
|
}, |
|
{ |
|
"epoch": 2.65625, |
|
"grad_norm": 2.0702491303544193, |
|
"learning_rate": 9.128289473684211e-06, |
|
"loss": 3.0586, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 2.6875, |
|
"grad_norm": 1.8964079143443917, |
|
"learning_rate": 9.111842105263159e-06, |
|
"loss": 2.9219, |
|
"step": 86 |
|
}, |
|
{ |
|
"epoch": 2.71875, |
|
"grad_norm": 1.9913389941190272, |
|
"learning_rate": 9.095394736842105e-06, |
|
"loss": 3.3828, |
|
"step": 87 |
|
}, |
|
{ |
|
"epoch": 2.75, |
|
"grad_norm": 1.9481029459662997, |
|
"learning_rate": 9.078947368421054e-06, |
|
"loss": 2.8828, |
|
"step": 88 |
|
}, |
|
{ |
|
"epoch": 2.78125, |
|
"grad_norm": 1.897304436878228, |
|
"learning_rate": 9.0625e-06, |
|
"loss": 2.8633, |
|
"step": 89 |
|
}, |
|
{ |
|
"epoch": 2.8125, |
|
"grad_norm": 1.7962340916408015, |
|
"learning_rate": 9.046052631578948e-06, |
|
"loss": 3.0625, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 2.84375, |
|
"grad_norm": 1.8381297453196523, |
|
"learning_rate": 9.029605263157896e-06, |
|
"loss": 3.1094, |
|
"step": 91 |
|
}, |
|
{ |
|
"epoch": 2.875, |
|
"grad_norm": 1.7794526602162275, |
|
"learning_rate": 9.013157894736843e-06, |
|
"loss": 2.8594, |
|
"step": 92 |
|
}, |
|
{ |
|
"epoch": 2.90625, |
|
"grad_norm": 1.8047121766254635, |
|
"learning_rate": 8.996710526315791e-06, |
|
"loss": 3.1406, |
|
"step": 93 |
|
}, |
|
{ |
|
"epoch": 2.9375, |
|
"grad_norm": 1.9699073388267623, |
|
"learning_rate": 8.980263157894737e-06, |
|
"loss": 3.0078, |
|
"step": 94 |
|
}, |
|
{ |
|
"epoch": 2.96875, |
|
"grad_norm": 1.8731129484295872, |
|
"learning_rate": 8.963815789473685e-06, |
|
"loss": 2.875, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 3.0, |
|
"grad_norm": 1.6982200970791388, |
|
"learning_rate": 8.947368421052632e-06, |
|
"loss": 2.5625, |
|
"step": 96 |
|
}, |
|
{ |
|
"epoch": 3.03125, |
|
"grad_norm": 1.9031029201560958, |
|
"learning_rate": 8.93092105263158e-06, |
|
"loss": 2.9844, |
|
"step": 97 |
|
}, |
|
{ |
|
"epoch": 3.0625, |
|
"grad_norm": 1.6695451188240085, |
|
"learning_rate": 8.914473684210528e-06, |
|
"loss": 2.6523, |
|
"step": 98 |
|
}, |
|
{ |
|
"epoch": 3.09375, |
|
"grad_norm": 1.730680102186145, |
|
"learning_rate": 8.898026315789475e-06, |
|
"loss": 3.0859, |
|
"step": 99 |
|
}, |
|
{ |
|
"epoch": 3.125, |
|
"grad_norm": 1.6884819836662068, |
|
"learning_rate": 8.881578947368423e-06, |
|
"loss": 2.7148, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 3.15625, |
|
"grad_norm": 1.728765765557413, |
|
"learning_rate": 8.865131578947369e-06, |
|
"loss": 3.0, |
|
"step": 101 |
|
}, |
|
{ |
|
"epoch": 3.1875, |
|
"grad_norm": 1.8602197774702887, |
|
"learning_rate": 8.848684210526316e-06, |
|
"loss": 2.75, |
|
"step": 102 |
|
}, |
|
{ |
|
"epoch": 3.21875, |
|
"grad_norm": 1.7228141108566688, |
|
"learning_rate": 8.832236842105264e-06, |
|
"loss": 2.5664, |
|
"step": 103 |
|
}, |
|
{ |
|
"epoch": 3.25, |
|
"grad_norm": 1.810657852919142, |
|
"learning_rate": 8.81578947368421e-06, |
|
"loss": 2.7617, |
|
"step": 104 |
|
}, |
|
{ |
|
"epoch": 3.28125, |
|
"grad_norm": 1.649528540621614, |
|
"learning_rate": 8.799342105263158e-06, |
|
"loss": 2.9102, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 3.3125, |
|
"grad_norm": 1.6281918056322846, |
|
"learning_rate": 8.782894736842106e-06, |
|
"loss": 2.7109, |
|
"step": 106 |
|
}, |
|
{ |
|
"epoch": 3.34375, |
|
"grad_norm": 1.6464336351267892, |
|
"learning_rate": 8.766447368421054e-06, |
|
"loss": 2.6055, |
|
"step": 107 |
|
}, |
|
{ |
|
"epoch": 3.375, |
|
"grad_norm": 1.8980671641413749, |
|
"learning_rate": 8.750000000000001e-06, |
|
"loss": 2.8438, |
|
"step": 108 |
|
}, |
|
{ |
|
"epoch": 3.40625, |
|
"grad_norm": 1.8109357611137384, |
|
"learning_rate": 8.733552631578947e-06, |
|
"loss": 2.8516, |
|
"step": 109 |
|
}, |
|
{ |
|
"epoch": 3.4375, |
|
"grad_norm": 1.7292602276559284, |
|
"learning_rate": 8.717105263157895e-06, |
|
"loss": 2.8945, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 3.46875, |
|
"grad_norm": 1.5948507175911883, |
|
"learning_rate": 8.700657894736842e-06, |
|
"loss": 2.5156, |
|
"step": 111 |
|
}, |
|
{ |
|
"epoch": 3.5, |
|
"grad_norm": 1.806808364445775, |
|
"learning_rate": 8.68421052631579e-06, |
|
"loss": 2.7656, |
|
"step": 112 |
|
}, |
|
{ |
|
"epoch": 3.53125, |
|
"grad_norm": 1.5695807259145398, |
|
"learning_rate": 8.667763157894738e-06, |
|
"loss": 2.4023, |
|
"step": 113 |
|
}, |
|
{ |
|
"epoch": 3.5625, |
|
"grad_norm": 1.5483755852685912, |
|
"learning_rate": 8.651315789473685e-06, |
|
"loss": 2.543, |
|
"step": 114 |
|
}, |
|
{ |
|
"epoch": 3.59375, |
|
"grad_norm": 1.6190500817619615, |
|
"learning_rate": 8.634868421052633e-06, |
|
"loss": 2.4062, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 3.625, |
|
"grad_norm": 1.725081555813222, |
|
"learning_rate": 8.61842105263158e-06, |
|
"loss": 2.668, |
|
"step": 116 |
|
}, |
|
{ |
|
"epoch": 3.65625, |
|
"grad_norm": 1.5223857347255458, |
|
"learning_rate": 8.601973684210527e-06, |
|
"loss": 2.582, |
|
"step": 117 |
|
}, |
|
{ |
|
"epoch": 3.6875, |
|
"grad_norm": 1.4573586184299665, |
|
"learning_rate": 8.585526315789474e-06, |
|
"loss": 2.1621, |
|
"step": 118 |
|
}, |
|
{ |
|
"epoch": 3.71875, |
|
"grad_norm": 1.551630975643171, |
|
"learning_rate": 8.569078947368422e-06, |
|
"loss": 2.25, |
|
"step": 119 |
|
}, |
|
{ |
|
"epoch": 3.75, |
|
"grad_norm": 1.7115247638697302, |
|
"learning_rate": 8.552631578947368e-06, |
|
"loss": 2.5078, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 3.78125, |
|
"grad_norm": 1.5161089621031751, |
|
"learning_rate": 8.536184210526316e-06, |
|
"loss": 2.4844, |
|
"step": 121 |
|
}, |
|
{ |
|
"epoch": 3.8125, |
|
"grad_norm": 1.5463172048875915, |
|
"learning_rate": 8.519736842105265e-06, |
|
"loss": 2.3594, |
|
"step": 122 |
|
}, |
|
{ |
|
"epoch": 3.84375, |
|
"grad_norm": 1.632449509289891, |
|
"learning_rate": 8.503289473684211e-06, |
|
"loss": 2.3711, |
|
"step": 123 |
|
}, |
|
{ |
|
"epoch": 3.875, |
|
"grad_norm": 1.5220532463788692, |
|
"learning_rate": 8.486842105263159e-06, |
|
"loss": 2.5781, |
|
"step": 124 |
|
}, |
|
{ |
|
"epoch": 3.90625, |
|
"grad_norm": 1.4018744182292626, |
|
"learning_rate": 8.470394736842106e-06, |
|
"loss": 2.125, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 3.9375, |
|
"grad_norm": 1.452042757407898, |
|
"learning_rate": 8.453947368421054e-06, |
|
"loss": 2.2168, |
|
"step": 126 |
|
}, |
|
{ |
|
"epoch": 3.96875, |
|
"grad_norm": 1.5840590841632787, |
|
"learning_rate": 8.4375e-06, |
|
"loss": 2.375, |
|
"step": 127 |
|
}, |
|
{ |
|
"epoch": 4.0, |
|
"grad_norm": 1.6041264843039755, |
|
"learning_rate": 8.421052631578948e-06, |
|
"loss": 2.5898, |
|
"step": 128 |
|
}, |
|
{ |
|
"epoch": 4.03125, |
|
"grad_norm": 1.5449529169706775, |
|
"learning_rate": 8.404605263157896e-06, |
|
"loss": 2.1406, |
|
"step": 129 |
|
}, |
|
{ |
|
"epoch": 4.0625, |
|
"grad_norm": 1.4114794133075035, |
|
"learning_rate": 8.388157894736843e-06, |
|
"loss": 2.3242, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 4.09375, |
|
"grad_norm": 1.4120631432352837, |
|
"learning_rate": 8.371710526315791e-06, |
|
"loss": 2.4492, |
|
"step": 131 |
|
}, |
|
{ |
|
"epoch": 4.125, |
|
"grad_norm": 1.611229701046415, |
|
"learning_rate": 8.355263157894737e-06, |
|
"loss": 2.4121, |
|
"step": 132 |
|
}, |
|
{ |
|
"epoch": 4.15625, |
|
"grad_norm": 1.342411831838034, |
|
"learning_rate": 8.338815789473685e-06, |
|
"loss": 1.9609, |
|
"step": 133 |
|
}, |
|
{ |
|
"epoch": 4.1875, |
|
"grad_norm": 1.841969844503565, |
|
"learning_rate": 8.322368421052632e-06, |
|
"loss": 2.5645, |
|
"step": 134 |
|
}, |
|
{ |
|
"epoch": 4.21875, |
|
"grad_norm": 1.5185920240220854, |
|
"learning_rate": 8.30592105263158e-06, |
|
"loss": 2.2754, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 4.25, |
|
"grad_norm": 1.7834691967758824, |
|
"learning_rate": 8.289473684210526e-06, |
|
"loss": 2.4453, |
|
"step": 136 |
|
}, |
|
{ |
|
"epoch": 4.28125, |
|
"grad_norm": 1.5536983087480682, |
|
"learning_rate": 8.273026315789475e-06, |
|
"loss": 2.3867, |
|
"step": 137 |
|
}, |
|
{ |
|
"epoch": 4.3125, |
|
"grad_norm": 1.6713110707162413, |
|
"learning_rate": 8.256578947368423e-06, |
|
"loss": 2.5352, |
|
"step": 138 |
|
}, |
|
{ |
|
"epoch": 4.34375, |
|
"grad_norm": 1.4508550182948057, |
|
"learning_rate": 8.24013157894737e-06, |
|
"loss": 2.0938, |
|
"step": 139 |
|
}, |
|
{ |
|
"epoch": 4.375, |
|
"grad_norm": 1.5433665435103934, |
|
"learning_rate": 8.223684210526316e-06, |
|
"loss": 2.3633, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 4.40625, |
|
"grad_norm": 1.4646130800229755, |
|
"learning_rate": 8.207236842105264e-06, |
|
"loss": 2.1895, |
|
"step": 141 |
|
}, |
|
{ |
|
"epoch": 4.4375, |
|
"grad_norm": 1.2612998058796774, |
|
"learning_rate": 8.19078947368421e-06, |
|
"loss": 1.8887, |
|
"step": 142 |
|
}, |
|
{ |
|
"epoch": 4.46875, |
|
"grad_norm": 1.4242229934206938, |
|
"learning_rate": 8.174342105263158e-06, |
|
"loss": 2.248, |
|
"step": 143 |
|
}, |
|
{ |
|
"epoch": 4.5, |
|
"grad_norm": 1.3568311619256344, |
|
"learning_rate": 8.157894736842106e-06, |
|
"loss": 2.2656, |
|
"step": 144 |
|
}, |
|
{ |
|
"epoch": 4.53125, |
|
"grad_norm": 1.5675469731233866, |
|
"learning_rate": 8.141447368421055e-06, |
|
"loss": 2.3906, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 4.5625, |
|
"grad_norm": 1.4677734689265398, |
|
"learning_rate": 8.125000000000001e-06, |
|
"loss": 2.166, |
|
"step": 146 |
|
}, |
|
{ |
|
"epoch": 4.59375, |
|
"grad_norm": 1.4095494134291513, |
|
"learning_rate": 8.108552631578947e-06, |
|
"loss": 2.0801, |
|
"step": 147 |
|
}, |
|
{ |
|
"epoch": 4.625, |
|
"grad_norm": 1.2878578828666303, |
|
"learning_rate": 8.092105263157896e-06, |
|
"loss": 1.9707, |
|
"step": 148 |
|
}, |
|
{ |
|
"epoch": 4.65625, |
|
"grad_norm": 1.3374208950735806, |
|
"learning_rate": 8.075657894736842e-06, |
|
"loss": 2.0273, |
|
"step": 149 |
|
}, |
|
{ |
|
"epoch": 4.6875, |
|
"grad_norm": 1.3729371606088205, |
|
"learning_rate": 8.05921052631579e-06, |
|
"loss": 2.1289, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 4.71875, |
|
"grad_norm": 1.3241742252417898, |
|
"learning_rate": 8.042763157894737e-06, |
|
"loss": 2.0879, |
|
"step": 151 |
|
}, |
|
{ |
|
"epoch": 4.75, |
|
"grad_norm": 1.3471247242485715, |
|
"learning_rate": 8.026315789473685e-06, |
|
"loss": 1.8652, |
|
"step": 152 |
|
}, |
|
{ |
|
"epoch": 4.78125, |
|
"grad_norm": 1.4796814241372787, |
|
"learning_rate": 8.009868421052633e-06, |
|
"loss": 2.0898, |
|
"step": 153 |
|
}, |
|
{ |
|
"epoch": 4.8125, |
|
"grad_norm": 1.495355734738571, |
|
"learning_rate": 7.99342105263158e-06, |
|
"loss": 2.2305, |
|
"step": 154 |
|
}, |
|
{ |
|
"epoch": 4.84375, |
|
"grad_norm": 1.5568481473400642, |
|
"learning_rate": 7.976973684210527e-06, |
|
"loss": 2.0957, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 4.875, |
|
"grad_norm": 1.418950047941357, |
|
"learning_rate": 7.960526315789474e-06, |
|
"loss": 2.0879, |
|
"step": 156 |
|
}, |
|
{ |
|
"epoch": 4.90625, |
|
"grad_norm": 1.2150012585524623, |
|
"learning_rate": 7.944078947368422e-06, |
|
"loss": 1.7637, |
|
"step": 157 |
|
}, |
|
{ |
|
"epoch": 4.9375, |
|
"grad_norm": 1.5502099489754206, |
|
"learning_rate": 7.927631578947368e-06, |
|
"loss": 2.3086, |
|
"step": 158 |
|
}, |
|
{ |
|
"epoch": 4.96875, |
|
"grad_norm": 1.4300086207532696, |
|
"learning_rate": 7.911184210526316e-06, |
|
"loss": 1.7852, |
|
"step": 159 |
|
}, |
|
{ |
|
"epoch": 5.0, |
|
"grad_norm": 1.348782645249337, |
|
"learning_rate": 7.894736842105265e-06, |
|
"loss": 1.8965, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 5.03125, |
|
"grad_norm": 1.345313969026504, |
|
"learning_rate": 7.878289473684211e-06, |
|
"loss": 2.0703, |
|
"step": 161 |
|
}, |
|
{ |
|
"epoch": 5.0625, |
|
"grad_norm": 1.3410519038383748, |
|
"learning_rate": 7.86184210526316e-06, |
|
"loss": 1.9863, |
|
"step": 162 |
|
}, |
|
{ |
|
"epoch": 5.09375, |
|
"grad_norm": 1.3415275737905352, |
|
"learning_rate": 7.845394736842106e-06, |
|
"loss": 1.9258, |
|
"step": 163 |
|
}, |
|
{ |
|
"epoch": 5.125, |
|
"grad_norm": 1.4042294952658152, |
|
"learning_rate": 7.828947368421054e-06, |
|
"loss": 2.125, |
|
"step": 164 |
|
}, |
|
{ |
|
"epoch": 5.15625, |
|
"grad_norm": 1.3663594348069061, |
|
"learning_rate": 7.8125e-06, |
|
"loss": 2.0352, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 5.1875, |
|
"grad_norm": 1.5744500230349077, |
|
"learning_rate": 7.796052631578948e-06, |
|
"loss": 2.2285, |
|
"step": 166 |
|
}, |
|
{ |
|
"epoch": 5.21875, |
|
"grad_norm": 1.3562739084585815, |
|
"learning_rate": 7.779605263157895e-06, |
|
"loss": 2.0586, |
|
"step": 167 |
|
}, |
|
{ |
|
"epoch": 5.25, |
|
"grad_norm": 1.2277460241295945, |
|
"learning_rate": 7.763157894736843e-06, |
|
"loss": 1.7441, |
|
"step": 168 |
|
}, |
|
{ |
|
"epoch": 5.28125, |
|
"grad_norm": 1.3433735977540853, |
|
"learning_rate": 7.746710526315791e-06, |
|
"loss": 2.043, |
|
"step": 169 |
|
}, |
|
{ |
|
"epoch": 5.3125, |
|
"grad_norm": 1.5745506941777736, |
|
"learning_rate": 7.730263157894737e-06, |
|
"loss": 2.041, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 5.34375, |
|
"grad_norm": 1.466403250520866, |
|
"learning_rate": 7.713815789473686e-06, |
|
"loss": 2.1289, |
|
"step": 171 |
|
}, |
|
{ |
|
"epoch": 5.375, |
|
"grad_norm": 1.3116428932247919, |
|
"learning_rate": 7.697368421052632e-06, |
|
"loss": 1.9102, |
|
"step": 172 |
|
}, |
|
{ |
|
"epoch": 5.40625, |
|
"grad_norm": 1.41620004981607, |
|
"learning_rate": 7.680921052631578e-06, |
|
"loss": 1.9766, |
|
"step": 173 |
|
}, |
|
{ |
|
"epoch": 5.4375, |
|
"grad_norm": 1.2068214874535954, |
|
"learning_rate": 7.664473684210527e-06, |
|
"loss": 1.7871, |
|
"step": 174 |
|
}, |
|
{ |
|
"epoch": 5.46875, |
|
"grad_norm": 1.3520693075990964, |
|
"learning_rate": 7.648026315789475e-06, |
|
"loss": 1.9141, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 5.5, |
|
"grad_norm": 1.2305509194591682, |
|
"learning_rate": 7.631578947368423e-06, |
|
"loss": 1.8008, |
|
"step": 176 |
|
}, |
|
{ |
|
"epoch": 5.53125, |
|
"grad_norm": 1.349983596276008, |
|
"learning_rate": 7.615131578947369e-06, |
|
"loss": 1.7227, |
|
"step": 177 |
|
}, |
|
{ |
|
"epoch": 5.5625, |
|
"grad_norm": 1.4158833046390478, |
|
"learning_rate": 7.598684210526316e-06, |
|
"loss": 2.002, |
|
"step": 178 |
|
}, |
|
{ |
|
"epoch": 5.59375, |
|
"grad_norm": 1.2163070728239471, |
|
"learning_rate": 7.582236842105264e-06, |
|
"loss": 1.918, |
|
"step": 179 |
|
}, |
|
{ |
|
"epoch": 5.625, |
|
"grad_norm": 1.2891781817034034, |
|
"learning_rate": 7.565789473684211e-06, |
|
"loss": 1.752, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 5.65625, |
|
"grad_norm": 1.2873813695474143, |
|
"learning_rate": 7.549342105263159e-06, |
|
"loss": 1.8574, |
|
"step": 181 |
|
}, |
|
{ |
|
"epoch": 5.6875, |
|
"grad_norm": 1.4129403193981556, |
|
"learning_rate": 7.532894736842106e-06, |
|
"loss": 2.1094, |
|
"step": 182 |
|
}, |
|
{ |
|
"epoch": 5.71875, |
|
"grad_norm": 1.272823109386593, |
|
"learning_rate": 7.516447368421054e-06, |
|
"loss": 1.7344, |
|
"step": 183 |
|
}, |
|
{ |
|
"epoch": 5.75, |
|
"grad_norm": 1.4755129897251515, |
|
"learning_rate": 7.500000000000001e-06, |
|
"loss": 2.0078, |
|
"step": 184 |
|
}, |
|
{ |
|
"epoch": 5.78125, |
|
"grad_norm": 1.2740980855037742, |
|
"learning_rate": 7.4835526315789475e-06, |
|
"loss": 1.873, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 5.8125, |
|
"grad_norm": 1.23447908173342, |
|
"learning_rate": 7.467105263157896e-06, |
|
"loss": 1.8242, |
|
"step": 186 |
|
}, |
|
{ |
|
"epoch": 5.84375, |
|
"grad_norm": 1.3585314479888622, |
|
"learning_rate": 7.450657894736842e-06, |
|
"loss": 1.8633, |
|
"step": 187 |
|
}, |
|
{ |
|
"epoch": 5.875, |
|
"grad_norm": 1.2678054921308055, |
|
"learning_rate": 7.43421052631579e-06, |
|
"loss": 1.8125, |
|
"step": 188 |
|
}, |
|
{ |
|
"epoch": 5.90625, |
|
"grad_norm": 1.3108222757243544, |
|
"learning_rate": 7.4177631578947374e-06, |
|
"loss": 1.6895, |
|
"step": 189 |
|
}, |
|
{ |
|
"epoch": 5.9375, |
|
"grad_norm": 1.4562794514992154, |
|
"learning_rate": 7.4013157894736856e-06, |
|
"loss": 1.9023, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 5.96875, |
|
"grad_norm": 1.2807964774559686, |
|
"learning_rate": 7.384868421052632e-06, |
|
"loss": 1.9629, |
|
"step": 191 |
|
}, |
|
{ |
|
"epoch": 6.0, |
|
"grad_norm": 1.340863110638616, |
|
"learning_rate": 7.368421052631579e-06, |
|
"loss": 1.9531, |
|
"step": 192 |
|
}, |
|
{ |
|
"epoch": 6.03125, |
|
"grad_norm": 1.2127416869589702, |
|
"learning_rate": 7.351973684210527e-06, |
|
"loss": 1.8223, |
|
"step": 193 |
|
}, |
|
{ |
|
"epoch": 6.0625, |
|
"grad_norm": 1.3467940572703827, |
|
"learning_rate": 7.335526315789474e-06, |
|
"loss": 2.0371, |
|
"step": 194 |
|
}, |
|
{ |
|
"epoch": 6.09375, |
|
"grad_norm": 1.1536204273784294, |
|
"learning_rate": 7.319078947368422e-06, |
|
"loss": 1.584, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 6.125, |
|
"grad_norm": 1.2729596706853674, |
|
"learning_rate": 7.302631578947369e-06, |
|
"loss": 1.8203, |
|
"step": 196 |
|
}, |
|
{ |
|
"epoch": 6.15625, |
|
"grad_norm": 1.1709559405981904, |
|
"learning_rate": 7.286184210526316e-06, |
|
"loss": 1.6777, |
|
"step": 197 |
|
}, |
|
{ |
|
"epoch": 6.1875, |
|
"grad_norm": 1.352052761699238, |
|
"learning_rate": 7.269736842105264e-06, |
|
"loss": 1.9102, |
|
"step": 198 |
|
}, |
|
{ |
|
"epoch": 6.21875, |
|
"grad_norm": 1.2750580445285065, |
|
"learning_rate": 7.253289473684211e-06, |
|
"loss": 1.7734, |
|
"step": 199 |
|
}, |
|
{ |
|
"epoch": 6.25, |
|
"grad_norm": 1.2939217003267616, |
|
"learning_rate": 7.236842105263158e-06, |
|
"loss": 1.791, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 6.28125, |
|
"grad_norm": 1.2650695751339902, |
|
"learning_rate": 7.220394736842106e-06, |
|
"loss": 1.6094, |
|
"step": 201 |
|
}, |
|
{ |
|
"epoch": 6.3125, |
|
"grad_norm": 1.25464131412363, |
|
"learning_rate": 7.203947368421054e-06, |
|
"loss": 1.7402, |
|
"step": 202 |
|
}, |
|
{ |
|
"epoch": 6.34375, |
|
"grad_norm": 1.5497524942602627, |
|
"learning_rate": 7.1875e-06, |
|
"loss": 1.7734, |
|
"step": 203 |
|
}, |
|
{ |
|
"epoch": 6.375, |
|
"grad_norm": 1.2160427199624535, |
|
"learning_rate": 7.1710526315789475e-06, |
|
"loss": 1.7285, |
|
"step": 204 |
|
}, |
|
{ |
|
"epoch": 6.40625, |
|
"grad_norm": 1.3487818927806237, |
|
"learning_rate": 7.154605263157896e-06, |
|
"loss": 1.7402, |
|
"step": 205 |
|
}, |
|
{ |
|
"epoch": 6.4375, |
|
"grad_norm": 1.107072434898807, |
|
"learning_rate": 7.138157894736842e-06, |
|
"loss": 1.6641, |
|
"step": 206 |
|
}, |
|
{ |
|
"epoch": 6.46875, |
|
"grad_norm": 1.4016573341641314, |
|
"learning_rate": 7.12171052631579e-06, |
|
"loss": 2.1387, |
|
"step": 207 |
|
}, |
|
{ |
|
"epoch": 6.5, |
|
"grad_norm": 1.3392867039415794, |
|
"learning_rate": 7.1052631578947375e-06, |
|
"loss": 1.7852, |
|
"step": 208 |
|
}, |
|
{ |
|
"epoch": 6.53125, |
|
"grad_norm": 1.344829225483593, |
|
"learning_rate": 7.088815789473686e-06, |
|
"loss": 1.8262, |
|
"step": 209 |
|
}, |
|
{ |
|
"epoch": 6.5625, |
|
"grad_norm": 1.2901220099849333, |
|
"learning_rate": 7.072368421052632e-06, |
|
"loss": 1.7832, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 6.59375, |
|
"grad_norm": 1.2583202389216346, |
|
"learning_rate": 7.055921052631579e-06, |
|
"loss": 1.625, |
|
"step": 211 |
|
}, |
|
{ |
|
"epoch": 6.625, |
|
"grad_norm": 1.2024207284992958, |
|
"learning_rate": 7.0394736842105274e-06, |
|
"loss": 1.7324, |
|
"step": 212 |
|
}, |
|
{ |
|
"epoch": 6.65625, |
|
"grad_norm": 1.574981670155728, |
|
"learning_rate": 7.023026315789474e-06, |
|
"loss": 1.8496, |
|
"step": 213 |
|
}, |
|
{ |
|
"epoch": 6.6875, |
|
"grad_norm": 1.2969168811442906, |
|
"learning_rate": 7.006578947368422e-06, |
|
"loss": 1.5938, |
|
"step": 214 |
|
}, |
|
{ |
|
"epoch": 6.71875, |
|
"grad_norm": 1.1616206155336706, |
|
"learning_rate": 6.990131578947369e-06, |
|
"loss": 1.4629, |
|
"step": 215 |
|
}, |
|
{ |
|
"epoch": 6.75, |
|
"grad_norm": 1.365223546599706, |
|
"learning_rate": 6.973684210526316e-06, |
|
"loss": 1.8008, |
|
"step": 216 |
|
}, |
|
{ |
|
"epoch": 6.78125, |
|
"grad_norm": 1.3510596482406705, |
|
"learning_rate": 6.957236842105264e-06, |
|
"loss": 1.8281, |
|
"step": 217 |
|
}, |
|
{ |
|
"epoch": 6.8125, |
|
"grad_norm": 1.4694592813286227, |
|
"learning_rate": 6.940789473684211e-06, |
|
"loss": 1.7637, |
|
"step": 218 |
|
}, |
|
{ |
|
"epoch": 6.84375, |
|
"grad_norm": 1.4286299323547051, |
|
"learning_rate": 6.924342105263158e-06, |
|
"loss": 1.7031, |
|
"step": 219 |
|
}, |
|
{ |
|
"epoch": 6.875, |
|
"grad_norm": 1.5197759621356106, |
|
"learning_rate": 6.907894736842106e-06, |
|
"loss": 1.5879, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 6.90625, |
|
"grad_norm": 1.1308149311661615, |
|
"learning_rate": 6.891447368421054e-06, |
|
"loss": 1.666, |
|
"step": 221 |
|
}, |
|
{ |
|
"epoch": 6.9375, |
|
"grad_norm": 1.290793747756769, |
|
"learning_rate": 6.875e-06, |
|
"loss": 1.7852, |
|
"step": 222 |
|
}, |
|
{ |
|
"epoch": 6.96875, |
|
"grad_norm": 1.4260685025654767, |
|
"learning_rate": 6.8585526315789475e-06, |
|
"loss": 1.8496, |
|
"step": 223 |
|
}, |
|
{ |
|
"epoch": 7.0, |
|
"grad_norm": 1.2591429399622196, |
|
"learning_rate": 6.842105263157896e-06, |
|
"loss": 1.6543, |
|
"step": 224 |
|
}, |
|
{ |
|
"epoch": 7.03125, |
|
"grad_norm": 1.3838081743754846, |
|
"learning_rate": 6.825657894736842e-06, |
|
"loss": 1.8633, |
|
"step": 225 |
|
}, |
|
{ |
|
"epoch": 7.0625, |
|
"grad_norm": 1.3226514168347299, |
|
"learning_rate": 6.80921052631579e-06, |
|
"loss": 1.918, |
|
"step": 226 |
|
}, |
|
{ |
|
"epoch": 7.09375, |
|
"grad_norm": 1.119759607546033, |
|
"learning_rate": 6.7927631578947375e-06, |
|
"loss": 1.3574, |
|
"step": 227 |
|
}, |
|
{ |
|
"epoch": 7.125, |
|
"grad_norm": 1.1770895940221853, |
|
"learning_rate": 6.776315789473686e-06, |
|
"loss": 1.5879, |
|
"step": 228 |
|
}, |
|
{ |
|
"epoch": 7.15625, |
|
"grad_norm": 1.4013517475514878, |
|
"learning_rate": 6.759868421052632e-06, |
|
"loss": 1.916, |
|
"step": 229 |
|
}, |
|
{ |
|
"epoch": 7.1875, |
|
"grad_norm": 1.2035057060414645, |
|
"learning_rate": 6.743421052631579e-06, |
|
"loss": 1.4551, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 7.21875, |
|
"grad_norm": 1.3524206532170369, |
|
"learning_rate": 6.7269736842105275e-06, |
|
"loss": 1.5488, |
|
"step": 231 |
|
}, |
|
{ |
|
"epoch": 7.25, |
|
"grad_norm": 1.3000002404533175, |
|
"learning_rate": 6.710526315789474e-06, |
|
"loss": 1.6367, |
|
"step": 232 |
|
}, |
|
{ |
|
"epoch": 7.28125, |
|
"grad_norm": 1.3002929917889723, |
|
"learning_rate": 6.694078947368422e-06, |
|
"loss": 1.6387, |
|
"step": 233 |
|
}, |
|
{ |
|
"epoch": 7.3125, |
|
"grad_norm": 1.3198740119814931, |
|
"learning_rate": 6.677631578947369e-06, |
|
"loss": 1.6973, |
|
"step": 234 |
|
}, |
|
{ |
|
"epoch": 7.34375, |
|
"grad_norm": 1.3505488242950343, |
|
"learning_rate": 6.661184210526316e-06, |
|
"loss": 1.7246, |
|
"step": 235 |
|
}, |
|
{ |
|
"epoch": 7.375, |
|
"grad_norm": 1.087590027191493, |
|
"learning_rate": 6.644736842105264e-06, |
|
"loss": 1.3516, |
|
"step": 236 |
|
}, |
|
{ |
|
"epoch": 7.40625, |
|
"grad_norm": 1.2635273727752099, |
|
"learning_rate": 6.62828947368421e-06, |
|
"loss": 1.5234, |
|
"step": 237 |
|
}, |
|
{ |
|
"epoch": 7.4375, |
|
"grad_norm": 1.2982082251620062, |
|
"learning_rate": 6.6118421052631584e-06, |
|
"loss": 1.6328, |
|
"step": 238 |
|
}, |
|
{ |
|
"epoch": 7.46875, |
|
"grad_norm": 1.194402347892603, |
|
"learning_rate": 6.595394736842106e-06, |
|
"loss": 1.5547, |
|
"step": 239 |
|
}, |
|
{ |
|
"epoch": 7.5, |
|
"grad_norm": 1.3089950328485123, |
|
"learning_rate": 6.578947368421054e-06, |
|
"loss": 1.7637, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 7.53125, |
|
"grad_norm": 1.2762780422661464, |
|
"learning_rate": 6.5625e-06, |
|
"loss": 1.5586, |
|
"step": 241 |
|
}, |
|
{ |
|
"epoch": 7.5625, |
|
"grad_norm": 1.1284842098871226, |
|
"learning_rate": 6.5460526315789476e-06, |
|
"loss": 1.4492, |
|
"step": 242 |
|
}, |
|
{ |
|
"epoch": 7.59375, |
|
"grad_norm": 1.529655443376855, |
|
"learning_rate": 6.529605263157896e-06, |
|
"loss": 1.7422, |
|
"step": 243 |
|
}, |
|
{ |
|
"epoch": 7.625, |
|
"grad_norm": 1.1665841904157732, |
|
"learning_rate": 6.513157894736842e-06, |
|
"loss": 1.5293, |
|
"step": 244 |
|
}, |
|
{ |
|
"epoch": 7.65625, |
|
"grad_norm": 1.1960803290051991, |
|
"learning_rate": 6.49671052631579e-06, |
|
"loss": 1.498, |
|
"step": 245 |
|
}, |
|
{ |
|
"epoch": 7.6875, |
|
"grad_norm": 1.03923554987526, |
|
"learning_rate": 6.4802631578947375e-06, |
|
"loss": 1.3477, |
|
"step": 246 |
|
}, |
|
{ |
|
"epoch": 7.71875, |
|
"grad_norm": 1.3337104732841285, |
|
"learning_rate": 6.463815789473686e-06, |
|
"loss": 1.918, |
|
"step": 247 |
|
}, |
|
{ |
|
"epoch": 7.75, |
|
"grad_norm": 1.2816634472422972, |
|
"learning_rate": 6.447368421052632e-06, |
|
"loss": 1.7285, |
|
"step": 248 |
|
}, |
|
{ |
|
"epoch": 7.78125, |
|
"grad_norm": 1.3927598479283319, |
|
"learning_rate": 6.430921052631579e-06, |
|
"loss": 1.7148, |
|
"step": 249 |
|
}, |
|
{ |
|
"epoch": 7.8125, |
|
"grad_norm": 1.1575457380088763, |
|
"learning_rate": 6.4144736842105275e-06, |
|
"loss": 1.4395, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 7.84375, |
|
"grad_norm": 1.3948540383365446, |
|
"learning_rate": 6.398026315789474e-06, |
|
"loss": 1.7207, |
|
"step": 251 |
|
}, |
|
{ |
|
"epoch": 7.875, |
|
"grad_norm": 1.1846914337228318, |
|
"learning_rate": 6.381578947368422e-06, |
|
"loss": 1.6621, |
|
"step": 252 |
|
}, |
|
{ |
|
"epoch": 7.90625, |
|
"grad_norm": 1.3729206538265337, |
|
"learning_rate": 6.3651315789473685e-06, |
|
"loss": 1.8105, |
|
"step": 253 |
|
}, |
|
{ |
|
"epoch": 7.9375, |
|
"grad_norm": 1.092566362043007, |
|
"learning_rate": 6.348684210526316e-06, |
|
"loss": 1.5332, |
|
"step": 254 |
|
}, |
|
{ |
|
"epoch": 7.96875, |
|
"grad_norm": 1.1938075771886005, |
|
"learning_rate": 6.332236842105264e-06, |
|
"loss": 1.5332, |
|
"step": 255 |
|
}, |
|
{ |
|
"epoch": 8.0, |
|
"grad_norm": 1.1884574116970386, |
|
"learning_rate": 6.31578947368421e-06, |
|
"loss": 1.6016, |
|
"step": 256 |
|
}, |
|
{ |
|
"epoch": 8.03125, |
|
"grad_norm": 1.2323040477702536, |
|
"learning_rate": 6.2993421052631585e-06, |
|
"loss": 1.7051, |
|
"step": 257 |
|
}, |
|
{ |
|
"epoch": 8.0625, |
|
"grad_norm": 1.1826155398481955, |
|
"learning_rate": 6.282894736842106e-06, |
|
"loss": 1.4453, |
|
"step": 258 |
|
}, |
|
{ |
|
"epoch": 8.09375, |
|
"grad_norm": 1.4256490905240415, |
|
"learning_rate": 6.266447368421054e-06, |
|
"loss": 2.0039, |
|
"step": 259 |
|
}, |
|
{ |
|
"epoch": 8.125, |
|
"grad_norm": 1.137042790902281, |
|
"learning_rate": 6.25e-06, |
|
"loss": 1.4004, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 8.15625, |
|
"grad_norm": 1.1547256937707602, |
|
"learning_rate": 6.233552631578948e-06, |
|
"loss": 1.4336, |
|
"step": 261 |
|
}, |
|
{ |
|
"epoch": 8.1875, |
|
"grad_norm": 1.3234819920803873, |
|
"learning_rate": 6.217105263157896e-06, |
|
"loss": 1.7266, |
|
"step": 262 |
|
}, |
|
{ |
|
"epoch": 8.21875, |
|
"grad_norm": 1.1223848680889128, |
|
"learning_rate": 6.200657894736842e-06, |
|
"loss": 1.4199, |
|
"step": 263 |
|
}, |
|
{ |
|
"epoch": 8.25, |
|
"grad_norm": 1.1469925068381117, |
|
"learning_rate": 6.18421052631579e-06, |
|
"loss": 1.4414, |
|
"step": 264 |
|
}, |
|
{ |
|
"epoch": 8.28125, |
|
"grad_norm": 1.2977669850108717, |
|
"learning_rate": 6.1677631578947376e-06, |
|
"loss": 1.6523, |
|
"step": 265 |
|
}, |
|
{ |
|
"epoch": 8.3125, |
|
"grad_norm": 1.4735184954231604, |
|
"learning_rate": 6.151315789473685e-06, |
|
"loss": 1.8223, |
|
"step": 266 |
|
}, |
|
{ |
|
"epoch": 8.34375, |
|
"grad_norm": 1.040621746376745, |
|
"learning_rate": 6.134868421052632e-06, |
|
"loss": 1.4766, |
|
"step": 267 |
|
}, |
|
{ |
|
"epoch": 8.375, |
|
"grad_norm": 1.34186018093504, |
|
"learning_rate": 6.118421052631579e-06, |
|
"loss": 1.8047, |
|
"step": 268 |
|
}, |
|
{ |
|
"epoch": 8.40625, |
|
"grad_norm": 1.1839311529004948, |
|
"learning_rate": 6.101973684210527e-06, |
|
"loss": 1.4316, |
|
"step": 269 |
|
}, |
|
{ |
|
"epoch": 8.4375, |
|
"grad_norm": 1.2199376693433344, |
|
"learning_rate": 6.085526315789474e-06, |
|
"loss": 1.543, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 8.46875, |
|
"grad_norm": 1.2316121100991364, |
|
"learning_rate": 6.069078947368422e-06, |
|
"loss": 1.5156, |
|
"step": 271 |
|
}, |
|
{ |
|
"epoch": 8.5, |
|
"grad_norm": 1.3823753070802702, |
|
"learning_rate": 6.0526315789473685e-06, |
|
"loss": 1.5879, |
|
"step": 272 |
|
}, |
|
{ |
|
"epoch": 8.53125, |
|
"grad_norm": 1.32128273050164, |
|
"learning_rate": 6.036184210526316e-06, |
|
"loss": 1.5781, |
|
"step": 273 |
|
}, |
|
{ |
|
"epoch": 8.5625, |
|
"grad_norm": 1.3424113003624236, |
|
"learning_rate": 6.019736842105264e-06, |
|
"loss": 1.748, |
|
"step": 274 |
|
}, |
|
{ |
|
"epoch": 8.59375, |
|
"grad_norm": 1.1466546955884545, |
|
"learning_rate": 6.00328947368421e-06, |
|
"loss": 1.3809, |
|
"step": 275 |
|
}, |
|
{ |
|
"epoch": 8.625, |
|
"grad_norm": 1.2076133131499247, |
|
"learning_rate": 5.9868421052631585e-06, |
|
"loss": 1.6699, |
|
"step": 276 |
|
}, |
|
{ |
|
"epoch": 8.65625, |
|
"grad_norm": 1.2213610720940913, |
|
"learning_rate": 5.970394736842106e-06, |
|
"loss": 1.3398, |
|
"step": 277 |
|
}, |
|
{ |
|
"epoch": 8.6875, |
|
"grad_norm": 0.9935233872059256, |
|
"learning_rate": 5.953947368421054e-06, |
|
"loss": 1.2568, |
|
"step": 278 |
|
}, |
|
{ |
|
"epoch": 8.71875, |
|
"grad_norm": 1.2865221082169063, |
|
"learning_rate": 5.9375e-06, |
|
"loss": 1.4844, |
|
"step": 279 |
|
}, |
|
{ |
|
"epoch": 8.75, |
|
"grad_norm": 1.3289498848746482, |
|
"learning_rate": 5.921052631578948e-06, |
|
"loss": 1.6348, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 8.78125, |
|
"grad_norm": 1.1164359514707043, |
|
"learning_rate": 5.904605263157896e-06, |
|
"loss": 1.3809, |
|
"step": 281 |
|
}, |
|
{ |
|
"epoch": 8.8125, |
|
"grad_norm": 1.2970501761466038, |
|
"learning_rate": 5.888157894736842e-06, |
|
"loss": 1.4102, |
|
"step": 282 |
|
}, |
|
{ |
|
"epoch": 8.84375, |
|
"grad_norm": 1.3343317408525743, |
|
"learning_rate": 5.87171052631579e-06, |
|
"loss": 1.6523, |
|
"step": 283 |
|
}, |
|
{ |
|
"epoch": 8.875, |
|
"grad_norm": 1.2378422677250207, |
|
"learning_rate": 5.855263157894738e-06, |
|
"loss": 1.3047, |
|
"step": 284 |
|
}, |
|
{ |
|
"epoch": 8.90625, |
|
"grad_norm": 1.119983613468547, |
|
"learning_rate": 5.838815789473685e-06, |
|
"loss": 1.4043, |
|
"step": 285 |
|
}, |
|
{ |
|
"epoch": 8.9375, |
|
"grad_norm": 1.1996702075618084, |
|
"learning_rate": 5.822368421052632e-06, |
|
"loss": 1.5703, |
|
"step": 286 |
|
}, |
|
{ |
|
"epoch": 8.96875, |
|
"grad_norm": 1.0778663070491803, |
|
"learning_rate": 5.805921052631579e-06, |
|
"loss": 1.3828, |
|
"step": 287 |
|
}, |
|
{ |
|
"epoch": 9.0, |
|
"grad_norm": 1.068292333670538, |
|
"learning_rate": 5.789473684210527e-06, |
|
"loss": 1.4727, |
|
"step": 288 |
|
}, |
|
{ |
|
"epoch": 9.03125, |
|
"grad_norm": 1.2192159062465524, |
|
"learning_rate": 5.773026315789474e-06, |
|
"loss": 1.5195, |
|
"step": 289 |
|
}, |
|
{ |
|
"epoch": 9.0625, |
|
"grad_norm": 1.3481737413765114, |
|
"learning_rate": 5.756578947368422e-06, |
|
"loss": 1.6465, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 9.09375, |
|
"grad_norm": 1.3284157045718723, |
|
"learning_rate": 5.7401315789473685e-06, |
|
"loss": 1.668, |
|
"step": 291 |
|
}, |
|
{ |
|
"epoch": 9.125, |
|
"grad_norm": 1.1523606928804837, |
|
"learning_rate": 5.723684210526316e-06, |
|
"loss": 1.3711, |
|
"step": 292 |
|
}, |
|
{ |
|
"epoch": 9.15625, |
|
"grad_norm": 1.2624009140169858, |
|
"learning_rate": 5.707236842105264e-06, |
|
"loss": 1.4805, |
|
"step": 293 |
|
}, |
|
{ |
|
"epoch": 9.1875, |
|
"grad_norm": 1.1974249834174664, |
|
"learning_rate": 5.69078947368421e-06, |
|
"loss": 1.416, |
|
"step": 294 |
|
}, |
|
{ |
|
"epoch": 9.21875, |
|
"grad_norm": 1.0880104341393793, |
|
"learning_rate": 5.6743421052631585e-06, |
|
"loss": 1.416, |
|
"step": 295 |
|
}, |
|
{ |
|
"epoch": 9.25, |
|
"grad_norm": 1.0648083314607524, |
|
"learning_rate": 5.657894736842106e-06, |
|
"loss": 1.4922, |
|
"step": 296 |
|
}, |
|
{ |
|
"epoch": 9.28125, |
|
"grad_norm": 1.3924241155835761, |
|
"learning_rate": 5.641447368421054e-06, |
|
"loss": 1.8535, |
|
"step": 297 |
|
}, |
|
{ |
|
"epoch": 9.3125, |
|
"grad_norm": 1.1997511105605825, |
|
"learning_rate": 5.625e-06, |
|
"loss": 1.5508, |
|
"step": 298 |
|
}, |
|
{ |
|
"epoch": 9.34375, |
|
"grad_norm": 0.9991377489525408, |
|
"learning_rate": 5.608552631578948e-06, |
|
"loss": 1.2793, |
|
"step": 299 |
|
}, |
|
{ |
|
"epoch": 9.375, |
|
"grad_norm": 0.9245773551113274, |
|
"learning_rate": 5.592105263157896e-06, |
|
"loss": 1.1406, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 9.40625, |
|
"grad_norm": 1.3441639892433364, |
|
"learning_rate": 5.575657894736842e-06, |
|
"loss": 1.8691, |
|
"step": 301 |
|
}, |
|
{ |
|
"epoch": 9.4375, |
|
"grad_norm": 1.2026188224693601, |
|
"learning_rate": 5.55921052631579e-06, |
|
"loss": 1.3486, |
|
"step": 302 |
|
}, |
|
{ |
|
"epoch": 9.46875, |
|
"grad_norm": 1.1016342505763965, |
|
"learning_rate": 5.542763157894737e-06, |
|
"loss": 1.3008, |
|
"step": 303 |
|
}, |
|
{ |
|
"epoch": 9.5, |
|
"grad_norm": 1.0605253903830034, |
|
"learning_rate": 5.526315789473685e-06, |
|
"loss": 1.3301, |
|
"step": 304 |
|
}, |
|
{ |
|
"epoch": 9.53125, |
|
"grad_norm": 1.2775271089840106, |
|
"learning_rate": 5.509868421052632e-06, |
|
"loss": 1.5605, |
|
"step": 305 |
|
}, |
|
{ |
|
"epoch": 9.5625, |
|
"grad_norm": 1.1104372435375556, |
|
"learning_rate": 5.493421052631579e-06, |
|
"loss": 1.3984, |
|
"step": 306 |
|
}, |
|
{ |
|
"epoch": 9.59375, |
|
"grad_norm": 1.1918158892068837, |
|
"learning_rate": 5.476973684210527e-06, |
|
"loss": 1.3594, |
|
"step": 307 |
|
}, |
|
{ |
|
"epoch": 9.625, |
|
"grad_norm": 1.06273551473133, |
|
"learning_rate": 5.460526315789474e-06, |
|
"loss": 1.3809, |
|
"step": 308 |
|
}, |
|
{ |
|
"epoch": 9.65625, |
|
"grad_norm": 1.2788595464616823, |
|
"learning_rate": 5.444078947368422e-06, |
|
"loss": 1.6016, |
|
"step": 309 |
|
}, |
|
{ |
|
"epoch": 9.6875, |
|
"grad_norm": 1.2042036204677522, |
|
"learning_rate": 5.4276315789473686e-06, |
|
"loss": 1.334, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 9.71875, |
|
"grad_norm": 1.118364091577098, |
|
"learning_rate": 5.411184210526316e-06, |
|
"loss": 1.4043, |
|
"step": 311 |
|
}, |
|
{ |
|
"epoch": 9.75, |
|
"grad_norm": 1.2565371105509984, |
|
"learning_rate": 5.394736842105264e-06, |
|
"loss": 1.4629, |
|
"step": 312 |
|
}, |
|
{ |
|
"epoch": 9.78125, |
|
"grad_norm": 1.1689553385554297, |
|
"learning_rate": 5.37828947368421e-06, |
|
"loss": 1.5547, |
|
"step": 313 |
|
}, |
|
{ |
|
"epoch": 9.8125, |
|
"grad_norm": 1.0306781671470933, |
|
"learning_rate": 5.3618421052631585e-06, |
|
"loss": 1.3008, |
|
"step": 314 |
|
}, |
|
{ |
|
"epoch": 9.84375, |
|
"grad_norm": 1.1832250443460393, |
|
"learning_rate": 5.345394736842106e-06, |
|
"loss": 1.6035, |
|
"step": 315 |
|
}, |
|
{ |
|
"epoch": 9.875, |
|
"grad_norm": 1.204745943784607, |
|
"learning_rate": 5.328947368421054e-06, |
|
"loss": 1.3887, |
|
"step": 316 |
|
}, |
|
{ |
|
"epoch": 9.90625, |
|
"grad_norm": 1.1435805601154991, |
|
"learning_rate": 5.3125e-06, |
|
"loss": 1.3242, |
|
"step": 317 |
|
}, |
|
{ |
|
"epoch": 9.9375, |
|
"grad_norm": 1.1279236877871361, |
|
"learning_rate": 5.296052631578948e-06, |
|
"loss": 1.3496, |
|
"step": 318 |
|
}, |
|
{ |
|
"epoch": 9.96875, |
|
"grad_norm": 1.206055707255631, |
|
"learning_rate": 5.279605263157895e-06, |
|
"loss": 1.4844, |
|
"step": 319 |
|
}, |
|
{ |
|
"epoch": 10.0, |
|
"grad_norm": 1.0754848156346868, |
|
"learning_rate": 5.263157894736842e-06, |
|
"loss": 1.3359, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 10.03125, |
|
"grad_norm": 1.2154135689523968, |
|
"learning_rate": 5.24671052631579e-06, |
|
"loss": 1.5293, |
|
"step": 321 |
|
}, |
|
{ |
|
"epoch": 10.0625, |
|
"grad_norm": 1.129676688837292, |
|
"learning_rate": 5.230263157894737e-06, |
|
"loss": 1.3799, |
|
"step": 322 |
|
}, |
|
{ |
|
"epoch": 10.09375, |
|
"grad_norm": 1.1816669829377744, |
|
"learning_rate": 5.213815789473685e-06, |
|
"loss": 1.4102, |
|
"step": 323 |
|
}, |
|
{ |
|
"epoch": 10.125, |
|
"grad_norm": 1.2594276176240513, |
|
"learning_rate": 5.197368421052632e-06, |
|
"loss": 1.377, |
|
"step": 324 |
|
}, |
|
{ |
|
"epoch": 10.15625, |
|
"grad_norm": 1.008987196838564, |
|
"learning_rate": 5.180921052631579e-06, |
|
"loss": 1.2754, |
|
"step": 325 |
|
}, |
|
{ |
|
"epoch": 10.1875, |
|
"grad_norm": 1.0077078292816104, |
|
"learning_rate": 5.164473684210527e-06, |
|
"loss": 1.2441, |
|
"step": 326 |
|
}, |
|
{ |
|
"epoch": 10.21875, |
|
"grad_norm": 1.1493149526564261, |
|
"learning_rate": 5.148026315789474e-06, |
|
"loss": 1.5977, |
|
"step": 327 |
|
}, |
|
{ |
|
"epoch": 10.25, |
|
"grad_norm": 1.21245285652362, |
|
"learning_rate": 5.131578947368422e-06, |
|
"loss": 1.2969, |
|
"step": 328 |
|
}, |
|
{ |
|
"epoch": 10.28125, |
|
"grad_norm": 1.1945879462228663, |
|
"learning_rate": 5.115131578947369e-06, |
|
"loss": 1.5371, |
|
"step": 329 |
|
}, |
|
{ |
|
"epoch": 10.3125, |
|
"grad_norm": 1.2197917228914175, |
|
"learning_rate": 5.098684210526316e-06, |
|
"loss": 1.5391, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 10.34375, |
|
"grad_norm": 1.3025833566173235, |
|
"learning_rate": 5.082236842105264e-06, |
|
"loss": 1.5586, |
|
"step": 331 |
|
}, |
|
{ |
|
"epoch": 10.375, |
|
"grad_norm": 1.186750044727453, |
|
"learning_rate": 5.0657894736842104e-06, |
|
"loss": 1.4277, |
|
"step": 332 |
|
}, |
|
{ |
|
"epoch": 10.40625, |
|
"grad_norm": 1.3934048340733807, |
|
"learning_rate": 5.0493421052631586e-06, |
|
"loss": 1.6484, |
|
"step": 333 |
|
}, |
|
{ |
|
"epoch": 10.4375, |
|
"grad_norm": 1.1105348891949298, |
|
"learning_rate": 5.032894736842106e-06, |
|
"loss": 1.2539, |
|
"step": 334 |
|
}, |
|
{ |
|
"epoch": 10.46875, |
|
"grad_norm": 1.1506739890579292, |
|
"learning_rate": 5.016447368421053e-06, |
|
"loss": 1.2949, |
|
"step": 335 |
|
}, |
|
{ |
|
"epoch": 10.5, |
|
"grad_norm": 1.0957711868381537, |
|
"learning_rate": 5e-06, |
|
"loss": 1.2129, |
|
"step": 336 |
|
}, |
|
{ |
|
"epoch": 10.53125, |
|
"grad_norm": 1.1175328217521991, |
|
"learning_rate": 4.983552631578948e-06, |
|
"loss": 1.5078, |
|
"step": 337 |
|
}, |
|
{ |
|
"epoch": 10.5625, |
|
"grad_norm": 1.1896931608496566, |
|
"learning_rate": 4.967105263157895e-06, |
|
"loss": 1.418, |
|
"step": 338 |
|
}, |
|
{ |
|
"epoch": 10.59375, |
|
"grad_norm": 1.1953282095258284, |
|
"learning_rate": 4.950657894736843e-06, |
|
"loss": 1.4043, |
|
"step": 339 |
|
}, |
|
{ |
|
"epoch": 10.625, |
|
"grad_norm": 1.0534650726069181, |
|
"learning_rate": 4.9342105263157895e-06, |
|
"loss": 1.1973, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 10.65625, |
|
"grad_norm": 1.0926845658185267, |
|
"learning_rate": 4.917763157894737e-06, |
|
"loss": 1.3262, |
|
"step": 341 |
|
}, |
|
{ |
|
"epoch": 10.6875, |
|
"grad_norm": 1.1569298118442002, |
|
"learning_rate": 4.901315789473685e-06, |
|
"loss": 1.3516, |
|
"step": 342 |
|
}, |
|
{ |
|
"epoch": 10.71875, |
|
"grad_norm": 1.118753536452266, |
|
"learning_rate": 4.884868421052632e-06, |
|
"loss": 1.2559, |
|
"step": 343 |
|
}, |
|
{ |
|
"epoch": 10.75, |
|
"grad_norm": 1.165228582996436, |
|
"learning_rate": 4.8684210526315795e-06, |
|
"loss": 1.377, |
|
"step": 344 |
|
}, |
|
{ |
|
"epoch": 10.78125, |
|
"grad_norm": 1.2039659932149558, |
|
"learning_rate": 4.851973684210527e-06, |
|
"loss": 1.3125, |
|
"step": 345 |
|
}, |
|
{ |
|
"epoch": 10.8125, |
|
"grad_norm": 1.1220609308798875, |
|
"learning_rate": 4.835526315789474e-06, |
|
"loss": 1.4238, |
|
"step": 346 |
|
}, |
|
{ |
|
"epoch": 10.84375, |
|
"grad_norm": 1.2507059658078532, |
|
"learning_rate": 4.819078947368421e-06, |
|
"loss": 1.4609, |
|
"step": 347 |
|
}, |
|
{ |
|
"epoch": 10.875, |
|
"grad_norm": 1.3271056298248607, |
|
"learning_rate": 4.802631578947369e-06, |
|
"loss": 1.6504, |
|
"step": 348 |
|
}, |
|
{ |
|
"epoch": 10.90625, |
|
"grad_norm": 1.0787012990106282, |
|
"learning_rate": 4.786184210526316e-06, |
|
"loss": 1.2803, |
|
"step": 349 |
|
}, |
|
{ |
|
"epoch": 10.9375, |
|
"grad_norm": 1.154705302242999, |
|
"learning_rate": 4.769736842105264e-06, |
|
"loss": 1.3359, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 10.96875, |
|
"grad_norm": 1.0937048750337814, |
|
"learning_rate": 4.753289473684211e-06, |
|
"loss": 1.2041, |
|
"step": 351 |
|
}, |
|
{ |
|
"epoch": 11.0, |
|
"grad_norm": 1.1617527213309744, |
|
"learning_rate": 4.736842105263158e-06, |
|
"loss": 1.3711, |
|
"step": 352 |
|
}, |
|
{ |
|
"epoch": 11.03125, |
|
"grad_norm": 1.1883830201473682, |
|
"learning_rate": 4.720394736842106e-06, |
|
"loss": 1.5977, |
|
"step": 353 |
|
}, |
|
{ |
|
"epoch": 11.0625, |
|
"grad_norm": 1.0139784705048083, |
|
"learning_rate": 4.703947368421053e-06, |
|
"loss": 1.2891, |
|
"step": 354 |
|
}, |
|
{ |
|
"epoch": 11.09375, |
|
"grad_norm": 1.2579898083868604, |
|
"learning_rate": 4.6875000000000004e-06, |
|
"loss": 1.5254, |
|
"step": 355 |
|
}, |
|
{ |
|
"epoch": 11.125, |
|
"grad_norm": 1.1725513734302704, |
|
"learning_rate": 4.671052631578948e-06, |
|
"loss": 1.373, |
|
"step": 356 |
|
}, |
|
{ |
|
"epoch": 11.15625, |
|
"grad_norm": 1.1987413898195225, |
|
"learning_rate": 4.654605263157895e-06, |
|
"loss": 1.4863, |
|
"step": 357 |
|
}, |
|
{ |
|
"epoch": 11.1875, |
|
"grad_norm": 1.093239113754782, |
|
"learning_rate": 4.638157894736843e-06, |
|
"loss": 1.4727, |
|
"step": 358 |
|
}, |
|
{ |
|
"epoch": 11.21875, |
|
"grad_norm": 1.1208003088492746, |
|
"learning_rate": 4.6217105263157896e-06, |
|
"loss": 1.332, |
|
"step": 359 |
|
}, |
|
{ |
|
"epoch": 11.25, |
|
"grad_norm": 1.3151133748590742, |
|
"learning_rate": 4.605263157894737e-06, |
|
"loss": 1.5781, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 11.28125, |
|
"grad_norm": 1.0342040777680257, |
|
"learning_rate": 4.588815789473685e-06, |
|
"loss": 1.3066, |
|
"step": 361 |
|
}, |
|
{ |
|
"epoch": 11.3125, |
|
"grad_norm": 1.040927136579576, |
|
"learning_rate": 4.572368421052632e-06, |
|
"loss": 1.377, |
|
"step": 362 |
|
}, |
|
{ |
|
"epoch": 11.34375, |
|
"grad_norm": 1.1625272195538854, |
|
"learning_rate": 4.5559210526315795e-06, |
|
"loss": 1.4863, |
|
"step": 363 |
|
}, |
|
{ |
|
"epoch": 11.375, |
|
"grad_norm": 1.2739101469375733, |
|
"learning_rate": 4.539473684210527e-06, |
|
"loss": 1.418, |
|
"step": 364 |
|
}, |
|
{ |
|
"epoch": 11.40625, |
|
"grad_norm": 0.9656151744390092, |
|
"learning_rate": 4.523026315789474e-06, |
|
"loss": 1.0996, |
|
"step": 365 |
|
}, |
|
{ |
|
"epoch": 11.4375, |
|
"grad_norm": 1.1015967438212477, |
|
"learning_rate": 4.506578947368421e-06, |
|
"loss": 1.2646, |
|
"step": 366 |
|
}, |
|
{ |
|
"epoch": 11.46875, |
|
"grad_norm": 1.2010035878113032, |
|
"learning_rate": 4.490131578947369e-06, |
|
"loss": 1.3281, |
|
"step": 367 |
|
}, |
|
{ |
|
"epoch": 11.5, |
|
"grad_norm": 1.1519349789068691, |
|
"learning_rate": 4.473684210526316e-06, |
|
"loss": 1.2158, |
|
"step": 368 |
|
}, |
|
{ |
|
"epoch": 11.53125, |
|
"grad_norm": 1.1196243544282645, |
|
"learning_rate": 4.457236842105264e-06, |
|
"loss": 1.2852, |
|
"step": 369 |
|
}, |
|
{ |
|
"epoch": 11.5625, |
|
"grad_norm": 1.0907715090076417, |
|
"learning_rate": 4.440789473684211e-06, |
|
"loss": 1.2773, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 11.59375, |
|
"grad_norm": 0.9841441198723676, |
|
"learning_rate": 4.424342105263158e-06, |
|
"loss": 1.2598, |
|
"step": 371 |
|
}, |
|
{ |
|
"epoch": 11.625, |
|
"grad_norm": 1.1345770150297678, |
|
"learning_rate": 4.407894736842105e-06, |
|
"loss": 1.4023, |
|
"step": 372 |
|
}, |
|
{ |
|
"epoch": 11.65625, |
|
"grad_norm": 1.1185447421477923, |
|
"learning_rate": 4.391447368421053e-06, |
|
"loss": 1.3691, |
|
"step": 373 |
|
}, |
|
{ |
|
"epoch": 11.6875, |
|
"grad_norm": 1.1540730230879181, |
|
"learning_rate": 4.3750000000000005e-06, |
|
"loss": 1.3203, |
|
"step": 374 |
|
}, |
|
{ |
|
"epoch": 11.71875, |
|
"grad_norm": 1.184669271964522, |
|
"learning_rate": 4.358552631578948e-06, |
|
"loss": 1.1768, |
|
"step": 375 |
|
}, |
|
{ |
|
"epoch": 11.75, |
|
"grad_norm": 1.1637275265772447, |
|
"learning_rate": 4.342105263157895e-06, |
|
"loss": 1.4512, |
|
"step": 376 |
|
}, |
|
{ |
|
"epoch": 11.78125, |
|
"grad_norm": 1.0358827548745186, |
|
"learning_rate": 4.325657894736842e-06, |
|
"loss": 1.2461, |
|
"step": 377 |
|
}, |
|
{ |
|
"epoch": 11.8125, |
|
"grad_norm": 1.1128314718491639, |
|
"learning_rate": 4.30921052631579e-06, |
|
"loss": 1.3018, |
|
"step": 378 |
|
}, |
|
{ |
|
"epoch": 11.84375, |
|
"grad_norm": 1.0161634189398387, |
|
"learning_rate": 4.292763157894737e-06, |
|
"loss": 1.1387, |
|
"step": 379 |
|
}, |
|
{ |
|
"epoch": 11.875, |
|
"grad_norm": 1.4521991968396184, |
|
"learning_rate": 4.276315789473684e-06, |
|
"loss": 1.5, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 11.90625, |
|
"grad_norm": 1.1899659944122705, |
|
"learning_rate": 4.259868421052632e-06, |
|
"loss": 1.3926, |
|
"step": 381 |
|
}, |
|
{ |
|
"epoch": 11.9375, |
|
"grad_norm": 1.1121528138192258, |
|
"learning_rate": 4.2434210526315796e-06, |
|
"loss": 1.207, |
|
"step": 382 |
|
}, |
|
{ |
|
"epoch": 11.96875, |
|
"grad_norm": 1.0588783355137454, |
|
"learning_rate": 4.226973684210527e-06, |
|
"loss": 1.1055, |
|
"step": 383 |
|
}, |
|
{ |
|
"epoch": 12.0, |
|
"grad_norm": 1.1614232418764687, |
|
"learning_rate": 4.210526315789474e-06, |
|
"loss": 1.4883, |
|
"step": 384 |
|
}, |
|
{ |
|
"epoch": 12.03125, |
|
"grad_norm": 1.0746097740254226, |
|
"learning_rate": 4.194078947368421e-06, |
|
"loss": 1.2393, |
|
"step": 385 |
|
}, |
|
{ |
|
"epoch": 12.0625, |
|
"grad_norm": 0.9854229757382881, |
|
"learning_rate": 4.177631578947369e-06, |
|
"loss": 1.123, |
|
"step": 386 |
|
}, |
|
{ |
|
"epoch": 12.09375, |
|
"grad_norm": 1.1065232705122436, |
|
"learning_rate": 4.161184210526316e-06, |
|
"loss": 1.3027, |
|
"step": 387 |
|
}, |
|
{ |
|
"epoch": 12.125, |
|
"grad_norm": 1.2394820431877753, |
|
"learning_rate": 4.144736842105263e-06, |
|
"loss": 1.4453, |
|
"step": 388 |
|
}, |
|
{ |
|
"epoch": 12.15625, |
|
"grad_norm": 1.283661056628274, |
|
"learning_rate": 4.128289473684211e-06, |
|
"loss": 1.416, |
|
"step": 389 |
|
}, |
|
{ |
|
"epoch": 12.1875, |
|
"grad_norm": 0.9969260727592318, |
|
"learning_rate": 4.111842105263158e-06, |
|
"loss": 1.1445, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 12.21875, |
|
"grad_norm": 1.1882926090414414, |
|
"learning_rate": 4.095394736842105e-06, |
|
"loss": 1.4688, |
|
"step": 391 |
|
}, |
|
{ |
|
"epoch": 12.25, |
|
"grad_norm": 1.0949407531874138, |
|
"learning_rate": 4.078947368421053e-06, |
|
"loss": 1.1846, |
|
"step": 392 |
|
}, |
|
{ |
|
"epoch": 12.28125, |
|
"grad_norm": 1.1279581192159158, |
|
"learning_rate": 4.0625000000000005e-06, |
|
"loss": 1.4238, |
|
"step": 393 |
|
}, |
|
{ |
|
"epoch": 12.3125, |
|
"grad_norm": 1.0976208579632025, |
|
"learning_rate": 4.046052631578948e-06, |
|
"loss": 1.2354, |
|
"step": 394 |
|
}, |
|
{ |
|
"epoch": 12.34375, |
|
"grad_norm": 1.047171336883935, |
|
"learning_rate": 4.029605263157895e-06, |
|
"loss": 1.1592, |
|
"step": 395 |
|
}, |
|
{ |
|
"epoch": 12.375, |
|
"grad_norm": 1.0235369257101183, |
|
"learning_rate": 4.013157894736842e-06, |
|
"loss": 1.1787, |
|
"step": 396 |
|
}, |
|
{ |
|
"epoch": 12.40625, |
|
"grad_norm": 1.1017349959377196, |
|
"learning_rate": 3.99671052631579e-06, |
|
"loss": 1.1836, |
|
"step": 397 |
|
}, |
|
{ |
|
"epoch": 12.4375, |
|
"grad_norm": 1.2601490910061588, |
|
"learning_rate": 3.980263157894737e-06, |
|
"loss": 1.5059, |
|
"step": 398 |
|
}, |
|
{ |
|
"epoch": 12.46875, |
|
"grad_norm": 1.1846034199737945, |
|
"learning_rate": 3.963815789473684e-06, |
|
"loss": 1.4277, |
|
"step": 399 |
|
}, |
|
{ |
|
"epoch": 12.5, |
|
"grad_norm": 1.1414440302505462, |
|
"learning_rate": 3.947368421052632e-06, |
|
"loss": 1.3496, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 12.53125, |
|
"grad_norm": 1.1382135616662248, |
|
"learning_rate": 3.93092105263158e-06, |
|
"loss": 1.3652, |
|
"step": 401 |
|
}, |
|
{ |
|
"epoch": 12.5625, |
|
"grad_norm": 1.0393915037961405, |
|
"learning_rate": 3.914473684210527e-06, |
|
"loss": 1.21, |
|
"step": 402 |
|
}, |
|
{ |
|
"epoch": 12.59375, |
|
"grad_norm": 1.1448568792854905, |
|
"learning_rate": 3.898026315789474e-06, |
|
"loss": 1.3174, |
|
"step": 403 |
|
}, |
|
{ |
|
"epoch": 12.625, |
|
"grad_norm": 1.0592051048429247, |
|
"learning_rate": 3.8815789473684214e-06, |
|
"loss": 1.2021, |
|
"step": 404 |
|
}, |
|
{ |
|
"epoch": 12.65625, |
|
"grad_norm": 1.0218252432617492, |
|
"learning_rate": 3.865131578947369e-06, |
|
"loss": 1.1309, |
|
"step": 405 |
|
}, |
|
{ |
|
"epoch": 12.6875, |
|
"grad_norm": 1.19885665360683, |
|
"learning_rate": 3.848684210526316e-06, |
|
"loss": 1.4141, |
|
"step": 406 |
|
}, |
|
{ |
|
"epoch": 12.71875, |
|
"grad_norm": 1.2686967780371439, |
|
"learning_rate": 3.832236842105263e-06, |
|
"loss": 1.4121, |
|
"step": 407 |
|
}, |
|
{ |
|
"epoch": 12.75, |
|
"grad_norm": 1.209271788347019, |
|
"learning_rate": 3.815789473684211e-06, |
|
"loss": 1.3906, |
|
"step": 408 |
|
}, |
|
{ |
|
"epoch": 12.78125, |
|
"grad_norm": 1.0673063533229126, |
|
"learning_rate": 3.799342105263158e-06, |
|
"loss": 1.2637, |
|
"step": 409 |
|
}, |
|
{ |
|
"epoch": 12.8125, |
|
"grad_norm": 0.9011876750636091, |
|
"learning_rate": 3.7828947368421055e-06, |
|
"loss": 1.0303, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 12.84375, |
|
"grad_norm": 1.082837212073044, |
|
"learning_rate": 3.766447368421053e-06, |
|
"loss": 1.2461, |
|
"step": 411 |
|
}, |
|
{ |
|
"epoch": 12.875, |
|
"grad_norm": 1.100263665438574, |
|
"learning_rate": 3.7500000000000005e-06, |
|
"loss": 1.249, |
|
"step": 412 |
|
}, |
|
{ |
|
"epoch": 12.90625, |
|
"grad_norm": 1.0885744676023275, |
|
"learning_rate": 3.733552631578948e-06, |
|
"loss": 1.3848, |
|
"step": 413 |
|
}, |
|
{ |
|
"epoch": 12.9375, |
|
"grad_norm": 1.3329324019634943, |
|
"learning_rate": 3.717105263157895e-06, |
|
"loss": 1.4492, |
|
"step": 414 |
|
}, |
|
{ |
|
"epoch": 12.96875, |
|
"grad_norm": 1.3882784181575505, |
|
"learning_rate": 3.7006578947368428e-06, |
|
"loss": 1.6699, |
|
"step": 415 |
|
}, |
|
{ |
|
"epoch": 13.0, |
|
"grad_norm": 1.0716251354423374, |
|
"learning_rate": 3.6842105263157896e-06, |
|
"loss": 1.2139, |
|
"step": 416 |
|
}, |
|
{ |
|
"epoch": 13.03125, |
|
"grad_norm": 1.287986851928492, |
|
"learning_rate": 3.667763157894737e-06, |
|
"loss": 1.5117, |
|
"step": 417 |
|
}, |
|
{ |
|
"epoch": 13.0625, |
|
"grad_norm": 1.0840412540341369, |
|
"learning_rate": 3.6513157894736846e-06, |
|
"loss": 1.3145, |
|
"step": 418 |
|
}, |
|
{ |
|
"epoch": 13.09375, |
|
"grad_norm": 1.1037476377552207, |
|
"learning_rate": 3.634868421052632e-06, |
|
"loss": 1.2344, |
|
"step": 419 |
|
}, |
|
{ |
|
"epoch": 13.125, |
|
"grad_norm": 1.0800076547795563, |
|
"learning_rate": 3.618421052631579e-06, |
|
"loss": 1.2266, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 13.15625, |
|
"grad_norm": 0.9998065477775393, |
|
"learning_rate": 3.601973684210527e-06, |
|
"loss": 1.2285, |
|
"step": 421 |
|
}, |
|
{ |
|
"epoch": 13.1875, |
|
"grad_norm": 1.175154093974312, |
|
"learning_rate": 3.5855263157894737e-06, |
|
"loss": 1.2109, |
|
"step": 422 |
|
}, |
|
{ |
|
"epoch": 13.21875, |
|
"grad_norm": 1.0116582403798422, |
|
"learning_rate": 3.569078947368421e-06, |
|
"loss": 1.2529, |
|
"step": 423 |
|
}, |
|
{ |
|
"epoch": 13.25, |
|
"grad_norm": 1.202897202439205, |
|
"learning_rate": 3.5526315789473687e-06, |
|
"loss": 1.4688, |
|
"step": 424 |
|
}, |
|
{ |
|
"epoch": 13.28125, |
|
"grad_norm": 1.004145952160525, |
|
"learning_rate": 3.536184210526316e-06, |
|
"loss": 1.1387, |
|
"step": 425 |
|
}, |
|
{ |
|
"epoch": 13.3125, |
|
"grad_norm": 1.0297417125754638, |
|
"learning_rate": 3.5197368421052637e-06, |
|
"loss": 1.1719, |
|
"step": 426 |
|
}, |
|
{ |
|
"epoch": 13.34375, |
|
"grad_norm": 1.07787501319166, |
|
"learning_rate": 3.503289473684211e-06, |
|
"loss": 1.3145, |
|
"step": 427 |
|
}, |
|
{ |
|
"epoch": 13.375, |
|
"grad_norm": 1.150503122039903, |
|
"learning_rate": 3.486842105263158e-06, |
|
"loss": 1.2793, |
|
"step": 428 |
|
}, |
|
{ |
|
"epoch": 13.40625, |
|
"grad_norm": 1.1739991668311942, |
|
"learning_rate": 3.4703947368421056e-06, |
|
"loss": 1.4102, |
|
"step": 429 |
|
}, |
|
{ |
|
"epoch": 13.4375, |
|
"grad_norm": 1.2153703174808637, |
|
"learning_rate": 3.453947368421053e-06, |
|
"loss": 1.2109, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 13.46875, |
|
"grad_norm": 1.1554295615769916, |
|
"learning_rate": 3.4375e-06, |
|
"loss": 1.3301, |
|
"step": 431 |
|
}, |
|
{ |
|
"epoch": 13.5, |
|
"grad_norm": 1.2893469286620574, |
|
"learning_rate": 3.421052631578948e-06, |
|
"loss": 1.3027, |
|
"step": 432 |
|
}, |
|
{ |
|
"epoch": 13.53125, |
|
"grad_norm": 1.0075597242681218, |
|
"learning_rate": 3.404605263157895e-06, |
|
"loss": 1.1182, |
|
"step": 433 |
|
}, |
|
{ |
|
"epoch": 13.5625, |
|
"grad_norm": 1.3509099608240223, |
|
"learning_rate": 3.388157894736843e-06, |
|
"loss": 1.3809, |
|
"step": 434 |
|
}, |
|
{ |
|
"epoch": 13.59375, |
|
"grad_norm": 1.069498050149035, |
|
"learning_rate": 3.3717105263157897e-06, |
|
"loss": 1.1846, |
|
"step": 435 |
|
}, |
|
{ |
|
"epoch": 13.625, |
|
"grad_norm": 1.114890453198544, |
|
"learning_rate": 3.355263157894737e-06, |
|
"loss": 1.1875, |
|
"step": 436 |
|
}, |
|
{ |
|
"epoch": 13.65625, |
|
"grad_norm": 1.129765979380437, |
|
"learning_rate": 3.3388157894736847e-06, |
|
"loss": 1.3633, |
|
"step": 437 |
|
}, |
|
{ |
|
"epoch": 13.6875, |
|
"grad_norm": 1.0998066220104334, |
|
"learning_rate": 3.322368421052632e-06, |
|
"loss": 1.2539, |
|
"step": 438 |
|
}, |
|
{ |
|
"epoch": 13.71875, |
|
"grad_norm": 0.9304831734458692, |
|
"learning_rate": 3.3059210526315792e-06, |
|
"loss": 1.0859, |
|
"step": 439 |
|
}, |
|
{ |
|
"epoch": 13.75, |
|
"grad_norm": 1.0694382341833957, |
|
"learning_rate": 3.289473684210527e-06, |
|
"loss": 1.3496, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 13.78125, |
|
"grad_norm": 1.1708315245441043, |
|
"learning_rate": 3.2730263157894738e-06, |
|
"loss": 1.3828, |
|
"step": 441 |
|
}, |
|
{ |
|
"epoch": 13.8125, |
|
"grad_norm": 1.0656716652628695, |
|
"learning_rate": 3.256578947368421e-06, |
|
"loss": 1.4023, |
|
"step": 442 |
|
}, |
|
{ |
|
"epoch": 13.84375, |
|
"grad_norm": 1.0607375221223125, |
|
"learning_rate": 3.2401315789473688e-06, |
|
"loss": 1.2344, |
|
"step": 443 |
|
}, |
|
{ |
|
"epoch": 13.875, |
|
"grad_norm": 1.2210966070357725, |
|
"learning_rate": 3.223684210526316e-06, |
|
"loss": 1.3945, |
|
"step": 444 |
|
}, |
|
{ |
|
"epoch": 13.90625, |
|
"grad_norm": 0.9014517324691765, |
|
"learning_rate": 3.2072368421052637e-06, |
|
"loss": 0.998, |
|
"step": 445 |
|
}, |
|
{ |
|
"epoch": 13.9375, |
|
"grad_norm": 1.125188484311246, |
|
"learning_rate": 3.190789473684211e-06, |
|
"loss": 1.2012, |
|
"step": 446 |
|
}, |
|
{ |
|
"epoch": 13.96875, |
|
"grad_norm": 1.0457511830501967, |
|
"learning_rate": 3.174342105263158e-06, |
|
"loss": 1.1953, |
|
"step": 447 |
|
}, |
|
{ |
|
"epoch": 14.0, |
|
"grad_norm": 1.2950505608894367, |
|
"learning_rate": 3.157894736842105e-06, |
|
"loss": 1.457, |
|
"step": 448 |
|
}, |
|
{ |
|
"epoch": 14.03125, |
|
"grad_norm": 1.170056852408912, |
|
"learning_rate": 3.141447368421053e-06, |
|
"loss": 1.3926, |
|
"step": 449 |
|
}, |
|
{ |
|
"epoch": 14.0625, |
|
"grad_norm": 1.2493616975451318, |
|
"learning_rate": 3.125e-06, |
|
"loss": 1.5918, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 14.09375, |
|
"grad_norm": 0.8888799675990482, |
|
"learning_rate": 3.108552631578948e-06, |
|
"loss": 0.9805, |
|
"step": 451 |
|
}, |
|
{ |
|
"epoch": 14.125, |
|
"grad_norm": 1.204210154016948, |
|
"learning_rate": 3.092105263157895e-06, |
|
"loss": 1.4355, |
|
"step": 452 |
|
}, |
|
{ |
|
"epoch": 14.15625, |
|
"grad_norm": 1.054523469321179, |
|
"learning_rate": 3.0756578947368424e-06, |
|
"loss": 1.2363, |
|
"step": 453 |
|
}, |
|
{ |
|
"epoch": 14.1875, |
|
"grad_norm": 0.9613996471643123, |
|
"learning_rate": 3.0592105263157897e-06, |
|
"loss": 1.1055, |
|
"step": 454 |
|
}, |
|
{ |
|
"epoch": 14.21875, |
|
"grad_norm": 1.0551733236211187, |
|
"learning_rate": 3.042763157894737e-06, |
|
"loss": 1.2793, |
|
"step": 455 |
|
}, |
|
{ |
|
"epoch": 14.25, |
|
"grad_norm": 0.9152832465100885, |
|
"learning_rate": 3.0263157894736843e-06, |
|
"loss": 0.9717, |
|
"step": 456 |
|
}, |
|
{ |
|
"epoch": 14.28125, |
|
"grad_norm": 0.9987134923379365, |
|
"learning_rate": 3.009868421052632e-06, |
|
"loss": 1.1562, |
|
"step": 457 |
|
}, |
|
{ |
|
"epoch": 14.3125, |
|
"grad_norm": 0.9956381989955094, |
|
"learning_rate": 2.9934210526315792e-06, |
|
"loss": 1.0273, |
|
"step": 458 |
|
}, |
|
{ |
|
"epoch": 14.34375, |
|
"grad_norm": 1.0938183028544668, |
|
"learning_rate": 2.976973684210527e-06, |
|
"loss": 1.25, |
|
"step": 459 |
|
}, |
|
{ |
|
"epoch": 14.375, |
|
"grad_norm": 1.1362098980479445, |
|
"learning_rate": 2.960526315789474e-06, |
|
"loss": 1.1943, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 14.40625, |
|
"grad_norm": 1.2643759923619813, |
|
"learning_rate": 2.944078947368421e-06, |
|
"loss": 1.4707, |
|
"step": 461 |
|
}, |
|
{ |
|
"epoch": 14.4375, |
|
"grad_norm": 0.9737358507532656, |
|
"learning_rate": 2.927631578947369e-06, |
|
"loss": 1.1211, |
|
"step": 462 |
|
}, |
|
{ |
|
"epoch": 14.46875, |
|
"grad_norm": 1.0496208877927968, |
|
"learning_rate": 2.911184210526316e-06, |
|
"loss": 1.1504, |
|
"step": 463 |
|
}, |
|
{ |
|
"epoch": 14.5, |
|
"grad_norm": 1.2256958010679917, |
|
"learning_rate": 2.8947368421052634e-06, |
|
"loss": 1.5234, |
|
"step": 464 |
|
}, |
|
{ |
|
"epoch": 14.53125, |
|
"grad_norm": 1.173766625463973, |
|
"learning_rate": 2.878289473684211e-06, |
|
"loss": 1.2148, |
|
"step": 465 |
|
}, |
|
{ |
|
"epoch": 14.5625, |
|
"grad_norm": 0.9930757226403226, |
|
"learning_rate": 2.861842105263158e-06, |
|
"loss": 1.1084, |
|
"step": 466 |
|
}, |
|
{ |
|
"epoch": 14.59375, |
|
"grad_norm": 0.8924636979654819, |
|
"learning_rate": 2.845394736842105e-06, |
|
"loss": 1.0645, |
|
"step": 467 |
|
}, |
|
{ |
|
"epoch": 14.625, |
|
"grad_norm": 1.0071866945186874, |
|
"learning_rate": 2.828947368421053e-06, |
|
"loss": 1.0977, |
|
"step": 468 |
|
}, |
|
{ |
|
"epoch": 14.65625, |
|
"grad_norm": 1.161231836469243, |
|
"learning_rate": 2.8125e-06, |
|
"loss": 1.2852, |
|
"step": 469 |
|
}, |
|
{ |
|
"epoch": 14.6875, |
|
"grad_norm": 1.1586427690801844, |
|
"learning_rate": 2.796052631578948e-06, |
|
"loss": 1.4531, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 14.71875, |
|
"grad_norm": 1.0735965447794522, |
|
"learning_rate": 2.779605263157895e-06, |
|
"loss": 1.1904, |
|
"step": 471 |
|
}, |
|
{ |
|
"epoch": 14.75, |
|
"grad_norm": 1.1132905570623621, |
|
"learning_rate": 2.7631578947368424e-06, |
|
"loss": 1.416, |
|
"step": 472 |
|
}, |
|
{ |
|
"epoch": 14.78125, |
|
"grad_norm": 1.159521186004377, |
|
"learning_rate": 2.7467105263157893e-06, |
|
"loss": 1.2432, |
|
"step": 473 |
|
}, |
|
{ |
|
"epoch": 14.8125, |
|
"grad_norm": 1.0656935107635523, |
|
"learning_rate": 2.730263157894737e-06, |
|
"loss": 1.2812, |
|
"step": 474 |
|
}, |
|
{ |
|
"epoch": 14.84375, |
|
"grad_norm": 1.0011066736142082, |
|
"learning_rate": 2.7138157894736843e-06, |
|
"loss": 1.1523, |
|
"step": 475 |
|
}, |
|
{ |
|
"epoch": 14.875, |
|
"grad_norm": 1.14999073916424, |
|
"learning_rate": 2.697368421052632e-06, |
|
"loss": 1.4043, |
|
"step": 476 |
|
}, |
|
{ |
|
"epoch": 14.90625, |
|
"grad_norm": 1.2325422262223233, |
|
"learning_rate": 2.6809210526315793e-06, |
|
"loss": 1.2559, |
|
"step": 477 |
|
}, |
|
{ |
|
"epoch": 14.9375, |
|
"grad_norm": 0.9847559909054264, |
|
"learning_rate": 2.664473684210527e-06, |
|
"loss": 1.1162, |
|
"step": 478 |
|
}, |
|
{ |
|
"epoch": 14.96875, |
|
"grad_norm": 1.3310005724561664, |
|
"learning_rate": 2.648026315789474e-06, |
|
"loss": 1.5029, |
|
"step": 479 |
|
}, |
|
{ |
|
"epoch": 15.0, |
|
"grad_norm": 1.1154233561514635, |
|
"learning_rate": 2.631578947368421e-06, |
|
"loss": 1.2266, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 15.03125, |
|
"grad_norm": 1.2563971886551109, |
|
"learning_rate": 2.6151315789473684e-06, |
|
"loss": 1.3633, |
|
"step": 481 |
|
}, |
|
{ |
|
"epoch": 15.0625, |
|
"grad_norm": 1.105742026406257, |
|
"learning_rate": 2.598684210526316e-06, |
|
"loss": 1.2539, |
|
"step": 482 |
|
}, |
|
{ |
|
"epoch": 15.09375, |
|
"grad_norm": 1.0780836881301994, |
|
"learning_rate": 2.5822368421052634e-06, |
|
"loss": 1.1729, |
|
"step": 483 |
|
}, |
|
{ |
|
"epoch": 15.125, |
|
"grad_norm": 1.1413296775014503, |
|
"learning_rate": 2.565789473684211e-06, |
|
"loss": 1.2656, |
|
"step": 484 |
|
}, |
|
{ |
|
"epoch": 15.15625, |
|
"grad_norm": 1.2881008284721613, |
|
"learning_rate": 2.549342105263158e-06, |
|
"loss": 1.3848, |
|
"step": 485 |
|
}, |
|
{ |
|
"epoch": 15.1875, |
|
"grad_norm": 0.976527781645127, |
|
"learning_rate": 2.5328947368421052e-06, |
|
"loss": 1.2031, |
|
"step": 486 |
|
}, |
|
{ |
|
"epoch": 15.21875, |
|
"grad_norm": 0.9358594834433476, |
|
"learning_rate": 2.516447368421053e-06, |
|
"loss": 1.0771, |
|
"step": 487 |
|
}, |
|
{ |
|
"epoch": 15.25, |
|
"grad_norm": 1.1081336222960927, |
|
"learning_rate": 2.5e-06, |
|
"loss": 1.1973, |
|
"step": 488 |
|
}, |
|
{ |
|
"epoch": 15.28125, |
|
"grad_norm": 0.9289762234227542, |
|
"learning_rate": 2.4835526315789475e-06, |
|
"loss": 0.9883, |
|
"step": 489 |
|
}, |
|
{ |
|
"epoch": 15.3125, |
|
"grad_norm": 0.9842240371852441, |
|
"learning_rate": 2.4671052631578948e-06, |
|
"loss": 1.0938, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 15.34375, |
|
"grad_norm": 1.2307196149704511, |
|
"learning_rate": 2.4506578947368425e-06, |
|
"loss": 1.1777, |
|
"step": 491 |
|
}, |
|
{ |
|
"epoch": 15.375, |
|
"grad_norm": 1.0986796436184, |
|
"learning_rate": 2.4342105263157898e-06, |
|
"loss": 1.1787, |
|
"step": 492 |
|
}, |
|
{ |
|
"epoch": 15.40625, |
|
"grad_norm": 1.1798771532584746, |
|
"learning_rate": 2.417763157894737e-06, |
|
"loss": 1.3105, |
|
"step": 493 |
|
}, |
|
{ |
|
"epoch": 15.4375, |
|
"grad_norm": 1.2256001371386596, |
|
"learning_rate": 2.4013157894736843e-06, |
|
"loss": 1.3604, |
|
"step": 494 |
|
}, |
|
{ |
|
"epoch": 15.46875, |
|
"grad_norm": 1.021954952646941, |
|
"learning_rate": 2.384868421052632e-06, |
|
"loss": 1.2246, |
|
"step": 495 |
|
}, |
|
{ |
|
"epoch": 15.5, |
|
"grad_norm": 1.299346493712266, |
|
"learning_rate": 2.368421052631579e-06, |
|
"loss": 1.3535, |
|
"step": 496 |
|
}, |
|
{ |
|
"epoch": 15.53125, |
|
"grad_norm": 1.336121635352978, |
|
"learning_rate": 2.3519736842105266e-06, |
|
"loss": 1.6211, |
|
"step": 497 |
|
}, |
|
{ |
|
"epoch": 15.5625, |
|
"grad_norm": 0.9520793848137924, |
|
"learning_rate": 2.335526315789474e-06, |
|
"loss": 0.957, |
|
"step": 498 |
|
}, |
|
{ |
|
"epoch": 15.59375, |
|
"grad_norm": 1.0898192526364332, |
|
"learning_rate": 2.3190789473684216e-06, |
|
"loss": 1.2246, |
|
"step": 499 |
|
}, |
|
{ |
|
"epoch": 15.625, |
|
"grad_norm": 1.1748821308428201, |
|
"learning_rate": 2.3026315789473684e-06, |
|
"loss": 1.0957, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 15.65625, |
|
"grad_norm": 1.0083430893858323, |
|
"learning_rate": 2.286184210526316e-06, |
|
"loss": 1.1348, |
|
"step": 501 |
|
}, |
|
{ |
|
"epoch": 15.6875, |
|
"grad_norm": 1.2608117651126283, |
|
"learning_rate": 2.2697368421052634e-06, |
|
"loss": 1.4043, |
|
"step": 502 |
|
}, |
|
{ |
|
"epoch": 15.71875, |
|
"grad_norm": 1.0733128080248424, |
|
"learning_rate": 2.2532894736842107e-06, |
|
"loss": 1.3398, |
|
"step": 503 |
|
}, |
|
{ |
|
"epoch": 15.75, |
|
"grad_norm": 1.0173381426456558, |
|
"learning_rate": 2.236842105263158e-06, |
|
"loss": 1.1191, |
|
"step": 504 |
|
}, |
|
{ |
|
"epoch": 15.78125, |
|
"grad_norm": 0.9976139576050151, |
|
"learning_rate": 2.2203947368421057e-06, |
|
"loss": 1.0898, |
|
"step": 505 |
|
}, |
|
{ |
|
"epoch": 15.8125, |
|
"grad_norm": 1.0432828287429723, |
|
"learning_rate": 2.2039473684210525e-06, |
|
"loss": 1.1582, |
|
"step": 506 |
|
}, |
|
{ |
|
"epoch": 15.84375, |
|
"grad_norm": 1.3471757738180266, |
|
"learning_rate": 2.1875000000000002e-06, |
|
"loss": 1.4473, |
|
"step": 507 |
|
}, |
|
{ |
|
"epoch": 15.875, |
|
"grad_norm": 1.0421236696424703, |
|
"learning_rate": 2.1710526315789475e-06, |
|
"loss": 1.1934, |
|
"step": 508 |
|
}, |
|
{ |
|
"epoch": 15.90625, |
|
"grad_norm": 0.952901790435728, |
|
"learning_rate": 2.154605263157895e-06, |
|
"loss": 1.0947, |
|
"step": 509 |
|
}, |
|
{ |
|
"epoch": 15.9375, |
|
"grad_norm": 1.090112867461769, |
|
"learning_rate": 2.138157894736842e-06, |
|
"loss": 1.2266, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 15.96875, |
|
"grad_norm": 1.0259764617752856, |
|
"learning_rate": 2.1217105263157898e-06, |
|
"loss": 1.1895, |
|
"step": 511 |
|
}, |
|
{ |
|
"epoch": 16.0, |
|
"grad_norm": 1.0780860545017692, |
|
"learning_rate": 2.105263157894737e-06, |
|
"loss": 1.2773, |
|
"step": 512 |
|
}, |
|
{ |
|
"epoch": 16.03125, |
|
"grad_norm": 1.192598478720439, |
|
"learning_rate": 2.0888157894736843e-06, |
|
"loss": 1.1406, |
|
"step": 513 |
|
}, |
|
{ |
|
"epoch": 16.0625, |
|
"grad_norm": 1.0157090658357621, |
|
"learning_rate": 2.0723684210526316e-06, |
|
"loss": 1.1953, |
|
"step": 514 |
|
}, |
|
{ |
|
"epoch": 16.09375, |
|
"grad_norm": 1.1696332490676487, |
|
"learning_rate": 2.055921052631579e-06, |
|
"loss": 1.2686, |
|
"step": 515 |
|
}, |
|
{ |
|
"epoch": 16.125, |
|
"grad_norm": 0.8971977511377294, |
|
"learning_rate": 2.0394736842105266e-06, |
|
"loss": 0.9502, |
|
"step": 516 |
|
}, |
|
{ |
|
"epoch": 16.15625, |
|
"grad_norm": 1.1312500938030132, |
|
"learning_rate": 2.023026315789474e-06, |
|
"loss": 1.2422, |
|
"step": 517 |
|
}, |
|
{ |
|
"epoch": 16.1875, |
|
"grad_norm": 1.2149617401537083, |
|
"learning_rate": 2.006578947368421e-06, |
|
"loss": 1.3496, |
|
"step": 518 |
|
}, |
|
{ |
|
"epoch": 16.21875, |
|
"grad_norm": 1.2223065264663917, |
|
"learning_rate": 1.9901315789473684e-06, |
|
"loss": 1.4199, |
|
"step": 519 |
|
}, |
|
{ |
|
"epoch": 16.25, |
|
"grad_norm": 1.1257780879283918, |
|
"learning_rate": 1.973684210526316e-06, |
|
"loss": 1.3555, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 16.28125, |
|
"grad_norm": 1.1391743777442347, |
|
"learning_rate": 1.9572368421052634e-06, |
|
"loss": 1.3047, |
|
"step": 521 |
|
}, |
|
{ |
|
"epoch": 16.3125, |
|
"grad_norm": 1.0831660200474136, |
|
"learning_rate": 1.9407894736842107e-06, |
|
"loss": 1.25, |
|
"step": 522 |
|
}, |
|
{ |
|
"epoch": 16.34375, |
|
"grad_norm": 1.0567996107762117, |
|
"learning_rate": 1.924342105263158e-06, |
|
"loss": 1.1465, |
|
"step": 523 |
|
}, |
|
{ |
|
"epoch": 16.375, |
|
"grad_norm": 1.1628482634546986, |
|
"learning_rate": 1.9078947368421057e-06, |
|
"loss": 1.2549, |
|
"step": 524 |
|
}, |
|
{ |
|
"epoch": 16.40625, |
|
"grad_norm": 0.9482355578709101, |
|
"learning_rate": 1.8914473684210528e-06, |
|
"loss": 1.1299, |
|
"step": 525 |
|
}, |
|
{ |
|
"epoch": 16.4375, |
|
"grad_norm": 0.9628299682769264, |
|
"learning_rate": 1.8750000000000003e-06, |
|
"loss": 1.0742, |
|
"step": 526 |
|
}, |
|
{ |
|
"epoch": 16.46875, |
|
"grad_norm": 1.107300320497425, |
|
"learning_rate": 1.8585526315789475e-06, |
|
"loss": 1.2012, |
|
"step": 527 |
|
}, |
|
{ |
|
"epoch": 16.5, |
|
"grad_norm": 1.0287657590791992, |
|
"learning_rate": 1.8421052631578948e-06, |
|
"loss": 1.2949, |
|
"step": 528 |
|
}, |
|
{ |
|
"epoch": 16.53125, |
|
"grad_norm": 0.9567055660381166, |
|
"learning_rate": 1.8256578947368423e-06, |
|
"loss": 1.1904, |
|
"step": 529 |
|
}, |
|
{ |
|
"epoch": 16.5625, |
|
"grad_norm": 1.0002030531549826, |
|
"learning_rate": 1.8092105263157896e-06, |
|
"loss": 1.0918, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 16.59375, |
|
"grad_norm": 1.0750461930124113, |
|
"learning_rate": 1.7927631578947369e-06, |
|
"loss": 1.123, |
|
"step": 531 |
|
}, |
|
{ |
|
"epoch": 16.625, |
|
"grad_norm": 1.0587518347896514, |
|
"learning_rate": 1.7763157894736844e-06, |
|
"loss": 1.2207, |
|
"step": 532 |
|
}, |
|
{ |
|
"epoch": 16.65625, |
|
"grad_norm": 1.2056106418322352, |
|
"learning_rate": 1.7598684210526319e-06, |
|
"loss": 1.2188, |
|
"step": 533 |
|
}, |
|
{ |
|
"epoch": 16.6875, |
|
"grad_norm": 1.140280544591844, |
|
"learning_rate": 1.743421052631579e-06, |
|
"loss": 1.2119, |
|
"step": 534 |
|
}, |
|
{ |
|
"epoch": 16.71875, |
|
"grad_norm": 1.2106066605287251, |
|
"learning_rate": 1.7269736842105264e-06, |
|
"loss": 1.4746, |
|
"step": 535 |
|
}, |
|
{ |
|
"epoch": 16.75, |
|
"grad_norm": 1.077182787791311, |
|
"learning_rate": 1.710526315789474e-06, |
|
"loss": 1.2598, |
|
"step": 536 |
|
}, |
|
{ |
|
"epoch": 16.78125, |
|
"grad_norm": 0.9360280704539896, |
|
"learning_rate": 1.6940789473684214e-06, |
|
"loss": 1.0205, |
|
"step": 537 |
|
}, |
|
{ |
|
"epoch": 16.8125, |
|
"grad_norm": 1.177830582531183, |
|
"learning_rate": 1.6776315789473685e-06, |
|
"loss": 1.165, |
|
"step": 538 |
|
}, |
|
{ |
|
"epoch": 16.84375, |
|
"grad_norm": 0.9833377707764928, |
|
"learning_rate": 1.661184210526316e-06, |
|
"loss": 0.9775, |
|
"step": 539 |
|
}, |
|
{ |
|
"epoch": 16.875, |
|
"grad_norm": 1.020582698769153, |
|
"learning_rate": 1.6447368421052635e-06, |
|
"loss": 1.2578, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 16.90625, |
|
"grad_norm": 0.8580065921387311, |
|
"learning_rate": 1.6282894736842105e-06, |
|
"loss": 0.9551, |
|
"step": 541 |
|
}, |
|
{ |
|
"epoch": 16.9375, |
|
"grad_norm": 1.3268669487142282, |
|
"learning_rate": 1.611842105263158e-06, |
|
"loss": 1.373, |
|
"step": 542 |
|
}, |
|
{ |
|
"epoch": 16.96875, |
|
"grad_norm": 1.1004982029921277, |
|
"learning_rate": 1.5953947368421055e-06, |
|
"loss": 1.3125, |
|
"step": 543 |
|
}, |
|
{ |
|
"epoch": 17.0, |
|
"grad_norm": 1.000229419328728, |
|
"learning_rate": 1.5789473684210526e-06, |
|
"loss": 1.0898, |
|
"step": 544 |
|
}, |
|
{ |
|
"epoch": 17.03125, |
|
"grad_norm": 0.9935344747918229, |
|
"learning_rate": 1.5625e-06, |
|
"loss": 1.1025, |
|
"step": 545 |
|
}, |
|
{ |
|
"epoch": 17.0625, |
|
"grad_norm": 1.0543671143937776, |
|
"learning_rate": 1.5460526315789476e-06, |
|
"loss": 1.2422, |
|
"step": 546 |
|
}, |
|
{ |
|
"epoch": 17.09375, |
|
"grad_norm": 1.0267873484315073, |
|
"learning_rate": 1.5296052631578948e-06, |
|
"loss": 1.0781, |
|
"step": 547 |
|
}, |
|
{ |
|
"epoch": 17.125, |
|
"grad_norm": 1.1249613526875928, |
|
"learning_rate": 1.5131578947368421e-06, |
|
"loss": 1.1729, |
|
"step": 548 |
|
}, |
|
{ |
|
"epoch": 17.15625, |
|
"grad_norm": 1.0314777193884443, |
|
"learning_rate": 1.4967105263157896e-06, |
|
"loss": 1.0635, |
|
"step": 549 |
|
}, |
|
{ |
|
"epoch": 17.1875, |
|
"grad_norm": 0.9142765714159412, |
|
"learning_rate": 1.480263157894737e-06, |
|
"loss": 0.9785, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 17.21875, |
|
"grad_norm": 0.9901266850655975, |
|
"learning_rate": 1.4638157894736844e-06, |
|
"loss": 1.168, |
|
"step": 551 |
|
}, |
|
{ |
|
"epoch": 17.25, |
|
"grad_norm": 1.0319109364032018, |
|
"learning_rate": 1.4473684210526317e-06, |
|
"loss": 1.1641, |
|
"step": 552 |
|
}, |
|
{ |
|
"epoch": 17.28125, |
|
"grad_norm": 1.0709685127822617, |
|
"learning_rate": 1.430921052631579e-06, |
|
"loss": 1.2012, |
|
"step": 553 |
|
}, |
|
{ |
|
"epoch": 17.3125, |
|
"grad_norm": 1.0250829611009873, |
|
"learning_rate": 1.4144736842105264e-06, |
|
"loss": 1.125, |
|
"step": 554 |
|
}, |
|
{ |
|
"epoch": 17.34375, |
|
"grad_norm": 1.2173412186529595, |
|
"learning_rate": 1.398026315789474e-06, |
|
"loss": 1.3848, |
|
"step": 555 |
|
}, |
|
{ |
|
"epoch": 17.375, |
|
"grad_norm": 1.1661265073694773, |
|
"learning_rate": 1.3815789473684212e-06, |
|
"loss": 1.3242, |
|
"step": 556 |
|
}, |
|
{ |
|
"epoch": 17.40625, |
|
"grad_norm": 1.0121306619298118, |
|
"learning_rate": 1.3651315789473685e-06, |
|
"loss": 1.1172, |
|
"step": 557 |
|
}, |
|
{ |
|
"epoch": 17.4375, |
|
"grad_norm": 1.145353023757232, |
|
"learning_rate": 1.348684210526316e-06, |
|
"loss": 1.0859, |
|
"step": 558 |
|
}, |
|
{ |
|
"epoch": 17.46875, |
|
"grad_norm": 1.1543024421121426, |
|
"learning_rate": 1.3322368421052635e-06, |
|
"loss": 1.2773, |
|
"step": 559 |
|
}, |
|
{ |
|
"epoch": 17.5, |
|
"grad_norm": 1.1931114578200357, |
|
"learning_rate": 1.3157894736842106e-06, |
|
"loss": 1.4395, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 17.53125, |
|
"grad_norm": 1.0823108980476308, |
|
"learning_rate": 1.299342105263158e-06, |
|
"loss": 1.291, |
|
"step": 561 |
|
}, |
|
{ |
|
"epoch": 17.5625, |
|
"grad_norm": 1.2958777300604947, |
|
"learning_rate": 1.2828947368421055e-06, |
|
"loss": 1.5244, |
|
"step": 562 |
|
}, |
|
{ |
|
"epoch": 17.59375, |
|
"grad_norm": 1.0958459267557619, |
|
"learning_rate": 1.2664473684210526e-06, |
|
"loss": 1.2695, |
|
"step": 563 |
|
}, |
|
{ |
|
"epoch": 17.625, |
|
"grad_norm": 1.0560505531646338, |
|
"learning_rate": 1.25e-06, |
|
"loss": 1.127, |
|
"step": 564 |
|
}, |
|
{ |
|
"epoch": 17.65625, |
|
"grad_norm": 0.9502701291758148, |
|
"learning_rate": 1.2335526315789474e-06, |
|
"loss": 1.042, |
|
"step": 565 |
|
}, |
|
{ |
|
"epoch": 17.6875, |
|
"grad_norm": 1.167107395596191, |
|
"learning_rate": 1.2171052631578949e-06, |
|
"loss": 1.3281, |
|
"step": 566 |
|
}, |
|
{ |
|
"epoch": 17.71875, |
|
"grad_norm": 1.0014573626532628, |
|
"learning_rate": 1.2006578947368422e-06, |
|
"loss": 1.2539, |
|
"step": 567 |
|
}, |
|
{ |
|
"epoch": 17.75, |
|
"grad_norm": 1.0590612632945187, |
|
"learning_rate": 1.1842105263157894e-06, |
|
"loss": 1.1602, |
|
"step": 568 |
|
}, |
|
{ |
|
"epoch": 17.78125, |
|
"grad_norm": 1.1056440658535576, |
|
"learning_rate": 1.167763157894737e-06, |
|
"loss": 1.2578, |
|
"step": 569 |
|
}, |
|
{ |
|
"epoch": 17.8125, |
|
"grad_norm": 0.9707174845773445, |
|
"learning_rate": 1.1513157894736842e-06, |
|
"loss": 1.0381, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 17.84375, |
|
"grad_norm": 0.9319534550891547, |
|
"learning_rate": 1.1348684210526317e-06, |
|
"loss": 1.0469, |
|
"step": 571 |
|
}, |
|
{ |
|
"epoch": 17.875, |
|
"grad_norm": 1.07640867533832, |
|
"learning_rate": 1.118421052631579e-06, |
|
"loss": 1.2539, |
|
"step": 572 |
|
}, |
|
{ |
|
"epoch": 17.90625, |
|
"grad_norm": 1.0562839901611762, |
|
"learning_rate": 1.1019736842105263e-06, |
|
"loss": 1.1445, |
|
"step": 573 |
|
}, |
|
{ |
|
"epoch": 17.9375, |
|
"grad_norm": 0.9476515587217543, |
|
"learning_rate": 1.0855263157894738e-06, |
|
"loss": 0.9941, |
|
"step": 574 |
|
}, |
|
{ |
|
"epoch": 17.96875, |
|
"grad_norm": 1.243227986698938, |
|
"learning_rate": 1.069078947368421e-06, |
|
"loss": 1.4209, |
|
"step": 575 |
|
}, |
|
{ |
|
"epoch": 18.0, |
|
"grad_norm": 1.0047531189502914, |
|
"learning_rate": 1.0526315789473685e-06, |
|
"loss": 1.168, |
|
"step": 576 |
|
}, |
|
{ |
|
"epoch": 18.03125, |
|
"grad_norm": 1.145263566255281, |
|
"learning_rate": 1.0361842105263158e-06, |
|
"loss": 1.1484, |
|
"step": 577 |
|
}, |
|
{ |
|
"epoch": 18.0625, |
|
"grad_norm": 1.0227699125913654, |
|
"learning_rate": 1.0197368421052633e-06, |
|
"loss": 1.1543, |
|
"step": 578 |
|
}, |
|
{ |
|
"epoch": 18.09375, |
|
"grad_norm": 1.0023191716319868, |
|
"learning_rate": 1.0032894736842106e-06, |
|
"loss": 1.1289, |
|
"step": 579 |
|
}, |
|
{ |
|
"epoch": 18.125, |
|
"grad_norm": 1.184064441667458, |
|
"learning_rate": 9.86842105263158e-07, |
|
"loss": 1.3711, |
|
"step": 580 |
|
}, |
|
{ |
|
"epoch": 18.15625, |
|
"grad_norm": 0.9164067698376666, |
|
"learning_rate": 9.703947368421054e-07, |
|
"loss": 1.0156, |
|
"step": 581 |
|
}, |
|
{ |
|
"epoch": 18.1875, |
|
"grad_norm": 1.068057644565925, |
|
"learning_rate": 9.539473684210528e-07, |
|
"loss": 1.2002, |
|
"step": 582 |
|
}, |
|
{ |
|
"epoch": 18.21875, |
|
"grad_norm": 0.9402667674389827, |
|
"learning_rate": 9.375000000000001e-07, |
|
"loss": 1.0391, |
|
"step": 583 |
|
}, |
|
{ |
|
"epoch": 18.25, |
|
"grad_norm": 1.0724473831725843, |
|
"learning_rate": 9.210526315789474e-07, |
|
"loss": 1.1973, |
|
"step": 584 |
|
}, |
|
{ |
|
"epoch": 18.28125, |
|
"grad_norm": 0.899157164043245, |
|
"learning_rate": 9.046052631578948e-07, |
|
"loss": 1.0166, |
|
"step": 585 |
|
}, |
|
{ |
|
"epoch": 18.3125, |
|
"grad_norm": 1.1592644941231875, |
|
"learning_rate": 8.881578947368422e-07, |
|
"loss": 1.2871, |
|
"step": 586 |
|
}, |
|
{ |
|
"epoch": 18.34375, |
|
"grad_norm": 1.1279497041498054, |
|
"learning_rate": 8.717105263157895e-07, |
|
"loss": 1.1748, |
|
"step": 587 |
|
}, |
|
{ |
|
"epoch": 18.375, |
|
"grad_norm": 1.0947637178030056, |
|
"learning_rate": 8.55263157894737e-07, |
|
"loss": 1.1621, |
|
"step": 588 |
|
}, |
|
{ |
|
"epoch": 18.40625, |
|
"grad_norm": 0.971340645847889, |
|
"learning_rate": 8.388157894736842e-07, |
|
"loss": 1.042, |
|
"step": 589 |
|
}, |
|
{ |
|
"epoch": 18.4375, |
|
"grad_norm": 1.135852458781184, |
|
"learning_rate": 8.223684210526317e-07, |
|
"loss": 1.3008, |
|
"step": 590 |
|
}, |
|
{ |
|
"epoch": 18.46875, |
|
"grad_norm": 1.0277173173937963, |
|
"learning_rate": 8.05921052631579e-07, |
|
"loss": 1.2139, |
|
"step": 591 |
|
}, |
|
{ |
|
"epoch": 18.5, |
|
"grad_norm": 0.9587889530985669, |
|
"learning_rate": 7.894736842105263e-07, |
|
"loss": 1.1016, |
|
"step": 592 |
|
}, |
|
{ |
|
"epoch": 18.53125, |
|
"grad_norm": 0.9531093861419058, |
|
"learning_rate": 7.730263157894738e-07, |
|
"loss": 1.0674, |
|
"step": 593 |
|
}, |
|
{ |
|
"epoch": 18.5625, |
|
"grad_norm": 1.1666257762014436, |
|
"learning_rate": 7.565789473684211e-07, |
|
"loss": 1.1885, |
|
"step": 594 |
|
}, |
|
{ |
|
"epoch": 18.59375, |
|
"grad_norm": 1.0813604792585834, |
|
"learning_rate": 7.401315789473685e-07, |
|
"loss": 1.2129, |
|
"step": 595 |
|
}, |
|
{ |
|
"epoch": 18.625, |
|
"grad_norm": 1.1302000825187128, |
|
"learning_rate": 7.236842105263158e-07, |
|
"loss": 1.2676, |
|
"step": 596 |
|
}, |
|
{ |
|
"epoch": 18.65625, |
|
"grad_norm": 0.9312416313068738, |
|
"learning_rate": 7.072368421052632e-07, |
|
"loss": 1.0625, |
|
"step": 597 |
|
}, |
|
{ |
|
"epoch": 18.6875, |
|
"grad_norm": 1.1066387434686906, |
|
"learning_rate": 6.907894736842106e-07, |
|
"loss": 1.1797, |
|
"step": 598 |
|
}, |
|
{ |
|
"epoch": 18.71875, |
|
"grad_norm": 1.1853531130328365, |
|
"learning_rate": 6.74342105263158e-07, |
|
"loss": 1.4082, |
|
"step": 599 |
|
}, |
|
{ |
|
"epoch": 18.75, |
|
"grad_norm": 1.208353836982986, |
|
"learning_rate": 6.578947368421053e-07, |
|
"loss": 1.3457, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 18.78125, |
|
"grad_norm": 1.0980397009903324, |
|
"learning_rate": 6.414473684210528e-07, |
|
"loss": 1.2666, |
|
"step": 601 |
|
}, |
|
{ |
|
"epoch": 18.8125, |
|
"grad_norm": 1.3324274671780056, |
|
"learning_rate": 6.25e-07, |
|
"loss": 1.4922, |
|
"step": 602 |
|
}, |
|
{ |
|
"epoch": 18.84375, |
|
"grad_norm": 1.0217051090320044, |
|
"learning_rate": 6.085526315789474e-07, |
|
"loss": 1.1758, |
|
"step": 603 |
|
}, |
|
{ |
|
"epoch": 18.875, |
|
"grad_norm": 1.1832208524104049, |
|
"learning_rate": 5.921052631578947e-07, |
|
"loss": 1.2168, |
|
"step": 604 |
|
}, |
|
{ |
|
"epoch": 18.90625, |
|
"grad_norm": 1.138472364101582, |
|
"learning_rate": 5.756578947368421e-07, |
|
"loss": 1.1719, |
|
"step": 605 |
|
}, |
|
{ |
|
"epoch": 18.9375, |
|
"grad_norm": 0.9489909061618802, |
|
"learning_rate": 5.592105263157895e-07, |
|
"loss": 1.0996, |
|
"step": 606 |
|
}, |
|
{ |
|
"epoch": 18.96875, |
|
"grad_norm": 1.0800080525984865, |
|
"learning_rate": 5.427631578947369e-07, |
|
"loss": 1.0498, |
|
"step": 607 |
|
}, |
|
{ |
|
"epoch": 19.0, |
|
"grad_norm": 1.1052351881529183, |
|
"learning_rate": 5.263157894736843e-07, |
|
"loss": 1.1875, |
|
"step": 608 |
|
}, |
|
{ |
|
"epoch": 19.03125, |
|
"grad_norm": 0.9529877481968042, |
|
"learning_rate": 5.098684210526317e-07, |
|
"loss": 1.0557, |
|
"step": 609 |
|
}, |
|
{ |
|
"epoch": 19.0625, |
|
"grad_norm": 1.2147697421430195, |
|
"learning_rate": 4.93421052631579e-07, |
|
"loss": 1.3379, |
|
"step": 610 |
|
}, |
|
{ |
|
"epoch": 19.09375, |
|
"grad_norm": 1.1820998596909449, |
|
"learning_rate": 4.769736842105264e-07, |
|
"loss": 1.2344, |
|
"step": 611 |
|
}, |
|
{ |
|
"epoch": 19.125, |
|
"grad_norm": 1.0063229019805375, |
|
"learning_rate": 4.605263157894737e-07, |
|
"loss": 1.2012, |
|
"step": 612 |
|
}, |
|
{ |
|
"epoch": 19.15625, |
|
"grad_norm": 1.2778895444138885, |
|
"learning_rate": 4.440789473684211e-07, |
|
"loss": 1.3174, |
|
"step": 613 |
|
}, |
|
{ |
|
"epoch": 19.1875, |
|
"grad_norm": 1.2037019784132217, |
|
"learning_rate": 4.276315789473685e-07, |
|
"loss": 1.3789, |
|
"step": 614 |
|
}, |
|
{ |
|
"epoch": 19.21875, |
|
"grad_norm": 1.1854865563566213, |
|
"learning_rate": 4.1118421052631586e-07, |
|
"loss": 1.3145, |
|
"step": 615 |
|
}, |
|
{ |
|
"epoch": 19.25, |
|
"grad_norm": 1.0783877288786168, |
|
"learning_rate": 3.9473684210526315e-07, |
|
"loss": 1.1973, |
|
"step": 616 |
|
}, |
|
{ |
|
"epoch": 19.28125, |
|
"grad_norm": 1.0345740226249058, |
|
"learning_rate": 3.7828947368421053e-07, |
|
"loss": 1.2031, |
|
"step": 617 |
|
}, |
|
{ |
|
"epoch": 19.3125, |
|
"grad_norm": 1.156444902840872, |
|
"learning_rate": 3.618421052631579e-07, |
|
"loss": 1.3125, |
|
"step": 618 |
|
}, |
|
{ |
|
"epoch": 19.34375, |
|
"grad_norm": 1.2384005537624165, |
|
"learning_rate": 3.453947368421053e-07, |
|
"loss": 1.293, |
|
"step": 619 |
|
}, |
|
{ |
|
"epoch": 19.375, |
|
"grad_norm": 1.0524947955888673, |
|
"learning_rate": 3.2894736842105264e-07, |
|
"loss": 1.2129, |
|
"step": 620 |
|
}, |
|
{ |
|
"epoch": 19.40625, |
|
"grad_norm": 0.9648764915337302, |
|
"learning_rate": 3.125e-07, |
|
"loss": 1.2207, |
|
"step": 621 |
|
}, |
|
{ |
|
"epoch": 19.4375, |
|
"grad_norm": 0.9444957798556137, |
|
"learning_rate": 2.9605263157894736e-07, |
|
"loss": 1.1064, |
|
"step": 622 |
|
}, |
|
{ |
|
"epoch": 19.46875, |
|
"grad_norm": 1.0658323116370334, |
|
"learning_rate": 2.7960526315789475e-07, |
|
"loss": 1.2363, |
|
"step": 623 |
|
}, |
|
{ |
|
"epoch": 19.5, |
|
"grad_norm": 0.885475098072067, |
|
"learning_rate": 2.6315789473684213e-07, |
|
"loss": 1.0254, |
|
"step": 624 |
|
}, |
|
{ |
|
"epoch": 19.53125, |
|
"grad_norm": 1.0065666057958396, |
|
"learning_rate": 2.467105263157895e-07, |
|
"loss": 1.0156, |
|
"step": 625 |
|
}, |
|
{ |
|
"epoch": 19.5625, |
|
"grad_norm": 0.921909489715102, |
|
"learning_rate": 2.3026315789473685e-07, |
|
"loss": 1.0195, |
|
"step": 626 |
|
}, |
|
{ |
|
"epoch": 19.59375, |
|
"grad_norm": 0.9649682488701493, |
|
"learning_rate": 2.1381578947368424e-07, |
|
"loss": 1.1338, |
|
"step": 627 |
|
}, |
|
{ |
|
"epoch": 19.625, |
|
"grad_norm": 1.053086822564824, |
|
"learning_rate": 1.9736842105263157e-07, |
|
"loss": 1.1055, |
|
"step": 628 |
|
}, |
|
{ |
|
"epoch": 19.65625, |
|
"grad_norm": 0.8521740297213248, |
|
"learning_rate": 1.8092105263157896e-07, |
|
"loss": 0.9922, |
|
"step": 629 |
|
}, |
|
{ |
|
"epoch": 19.6875, |
|
"grad_norm": 0.8978034476299945, |
|
"learning_rate": 1.6447368421052632e-07, |
|
"loss": 1.041, |
|
"step": 630 |
|
}, |
|
{ |
|
"epoch": 19.71875, |
|
"grad_norm": 1.2331631454932384, |
|
"learning_rate": 1.4802631578947368e-07, |
|
"loss": 1.334, |
|
"step": 631 |
|
}, |
|
{ |
|
"epoch": 19.75, |
|
"grad_norm": 1.1387006974305094, |
|
"learning_rate": 1.3157894736842107e-07, |
|
"loss": 1.2686, |
|
"step": 632 |
|
}, |
|
{ |
|
"epoch": 19.78125, |
|
"grad_norm": 1.032056506324884, |
|
"learning_rate": 1.1513157894736843e-07, |
|
"loss": 1.166, |
|
"step": 633 |
|
}, |
|
{ |
|
"epoch": 19.8125, |
|
"grad_norm": 1.0941401119246281, |
|
"learning_rate": 9.868421052631579e-08, |
|
"loss": 1.2197, |
|
"step": 634 |
|
}, |
|
{ |
|
"epoch": 19.84375, |
|
"grad_norm": 1.0184179316050685, |
|
"learning_rate": 8.223684210526316e-08, |
|
"loss": 1.123, |
|
"step": 635 |
|
}, |
|
{ |
|
"epoch": 19.875, |
|
"grad_norm": 1.014831481795213, |
|
"learning_rate": 6.578947368421053e-08, |
|
"loss": 1.1279, |
|
"step": 636 |
|
}, |
|
{ |
|
"epoch": 19.90625, |
|
"grad_norm": 1.044195891863528, |
|
"learning_rate": 4.934210526315789e-08, |
|
"loss": 1.0684, |
|
"step": 637 |
|
}, |
|
{ |
|
"epoch": 19.9375, |
|
"grad_norm": 0.980659591323473, |
|
"learning_rate": 3.2894736842105267e-08, |
|
"loss": 1.1396, |
|
"step": 638 |
|
}, |
|
{ |
|
"epoch": 19.96875, |
|
"grad_norm": 1.0564637321806472, |
|
"learning_rate": 1.6447368421052633e-08, |
|
"loss": 1.2861, |
|
"step": 639 |
|
}, |
|
{ |
|
"epoch": 20.0, |
|
"grad_norm": 0.9521578921532778, |
|
"learning_rate": 0.0, |
|
"loss": 1.1064, |
|
"step": 640 |
|
}, |
|
{ |
|
"epoch": 20.0, |
|
"step": 640, |
|
"total_flos": 7749606014976.0, |
|
"train_loss": 2.1912933349609376, |
|
"train_runtime": 187.775, |
|
"train_samples_per_second": 53.255, |
|
"train_steps_per_second": 3.408 |
|
} |
|
], |
|
"logging_steps": 1, |
|
"max_steps": 640, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 20, |
|
"save_steps": 500, |
|
"stateful_callbacks": { |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": false, |
|
"should_training_stop": false |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 7749606014976.0, |
|
"train_batch_size": 4, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|