|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 10.030090270812437, |
|
"eval_steps": 1000, |
|
"global_step": 5000, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.05015045135406219, |
|
"grad_norm": 7.0412750244140625, |
|
"learning_rate": 5.000000000000001e-07, |
|
"loss": 1.2267, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.10030090270812438, |
|
"grad_norm": 4.570739269256592, |
|
"learning_rate": 1.0000000000000002e-06, |
|
"loss": 1.0287, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.15045135406218657, |
|
"grad_norm": 4.061439514160156, |
|
"learning_rate": 1.5e-06, |
|
"loss": 0.716, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.20060180541624875, |
|
"grad_norm": 3.5382726192474365, |
|
"learning_rate": 2.0000000000000003e-06, |
|
"loss": 0.5682, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.25075225677031093, |
|
"grad_norm": 3.460435152053833, |
|
"learning_rate": 2.5e-06, |
|
"loss": 0.5493, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 0.30090270812437314, |
|
"grad_norm": 3.3355889320373535, |
|
"learning_rate": 3e-06, |
|
"loss": 0.4992, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.3510531594784353, |
|
"grad_norm": 3.1046106815338135, |
|
"learning_rate": 3.5e-06, |
|
"loss": 0.495, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 0.4012036108324975, |
|
"grad_norm": 3.444261074066162, |
|
"learning_rate": 4.000000000000001e-06, |
|
"loss": 0.4769, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.45135406218655966, |
|
"grad_norm": 3.0431270599365234, |
|
"learning_rate": 4.5e-06, |
|
"loss": 0.4349, |
|
"step": 225 |
|
}, |
|
{ |
|
"epoch": 0.5015045135406219, |
|
"grad_norm": 2.8672142028808594, |
|
"learning_rate": 5e-06, |
|
"loss": 0.4178, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.551654964894684, |
|
"grad_norm": 3.4275848865509033, |
|
"learning_rate": 5.500000000000001e-06, |
|
"loss": 0.417, |
|
"step": 275 |
|
}, |
|
{ |
|
"epoch": 0.6018054162487463, |
|
"grad_norm": 2.5542633533477783, |
|
"learning_rate": 6e-06, |
|
"loss": 0.409, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.6519558676028084, |
|
"grad_norm": 2.6586201190948486, |
|
"learning_rate": 6.5000000000000004e-06, |
|
"loss": 0.4022, |
|
"step": 325 |
|
}, |
|
{ |
|
"epoch": 0.7021063189568706, |
|
"grad_norm": 3.106419324874878, |
|
"learning_rate": 7e-06, |
|
"loss": 0.4231, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 0.7522567703109327, |
|
"grad_norm": 2.45819354057312, |
|
"learning_rate": 7.500000000000001e-06, |
|
"loss": 0.3943, |
|
"step": 375 |
|
}, |
|
{ |
|
"epoch": 0.802407221664995, |
|
"grad_norm": 2.844208002090454, |
|
"learning_rate": 8.000000000000001e-06, |
|
"loss": 0.3942, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 0.8525576730190572, |
|
"grad_norm": 2.8705875873565674, |
|
"learning_rate": 8.5e-06, |
|
"loss": 0.3947, |
|
"step": 425 |
|
}, |
|
{ |
|
"epoch": 0.9027081243731193, |
|
"grad_norm": 2.5415868759155273, |
|
"learning_rate": 9e-06, |
|
"loss": 0.3928, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 0.9528585757271816, |
|
"grad_norm": 3.9096789360046387, |
|
"learning_rate": 9.5e-06, |
|
"loss": 0.3802, |
|
"step": 475 |
|
}, |
|
{ |
|
"epoch": 1.0030090270812437, |
|
"grad_norm": 2.4224791526794434, |
|
"learning_rate": 1e-05, |
|
"loss": 0.3726, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 1.053159478435306, |
|
"grad_norm": 3.116126298904419, |
|
"learning_rate": 9.944444444444445e-06, |
|
"loss": 0.282, |
|
"step": 525 |
|
}, |
|
{ |
|
"epoch": 1.103309929789368, |
|
"grad_norm": 3.0114049911499023, |
|
"learning_rate": 9.88888888888889e-06, |
|
"loss": 0.3006, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 1.1534603811434303, |
|
"grad_norm": 2.352905035018921, |
|
"learning_rate": 9.833333333333333e-06, |
|
"loss": 0.2841, |
|
"step": 575 |
|
}, |
|
{ |
|
"epoch": 1.2036108324974926, |
|
"grad_norm": 2.395094871520996, |
|
"learning_rate": 9.777777777777779e-06, |
|
"loss": 0.2905, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 1.2537612838515546, |
|
"grad_norm": 2.6098897457122803, |
|
"learning_rate": 9.722222222222223e-06, |
|
"loss": 0.276, |
|
"step": 625 |
|
}, |
|
{ |
|
"epoch": 1.3039117352056169, |
|
"grad_norm": 2.0559678077697754, |
|
"learning_rate": 9.666666666666667e-06, |
|
"loss": 0.2741, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 1.3540621865596791, |
|
"grad_norm": 2.6575767993927, |
|
"learning_rate": 9.611111111111112e-06, |
|
"loss": 0.2903, |
|
"step": 675 |
|
}, |
|
{ |
|
"epoch": 1.4042126379137412, |
|
"grad_norm": 2.2294228076934814, |
|
"learning_rate": 9.555555555555556e-06, |
|
"loss": 0.2818, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 1.4543630892678034, |
|
"grad_norm": 2.2873806953430176, |
|
"learning_rate": 9.5e-06, |
|
"loss": 0.2778, |
|
"step": 725 |
|
}, |
|
{ |
|
"epoch": 1.5045135406218657, |
|
"grad_norm": 3.035785436630249, |
|
"learning_rate": 9.444444444444445e-06, |
|
"loss": 0.2958, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 1.5546639919759278, |
|
"grad_norm": 2.3122036457061768, |
|
"learning_rate": 9.38888888888889e-06, |
|
"loss": 0.2719, |
|
"step": 775 |
|
}, |
|
{ |
|
"epoch": 1.60481444332999, |
|
"grad_norm": 2.1870856285095215, |
|
"learning_rate": 9.333333333333334e-06, |
|
"loss": 0.2696, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 1.6549648946840523, |
|
"grad_norm": 2.747004270553589, |
|
"learning_rate": 9.277777777777778e-06, |
|
"loss": 0.276, |
|
"step": 825 |
|
}, |
|
{ |
|
"epoch": 1.7051153460381143, |
|
"grad_norm": 2.084136486053467, |
|
"learning_rate": 9.222222222222224e-06, |
|
"loss": 0.2979, |
|
"step": 850 |
|
}, |
|
{ |
|
"epoch": 1.7552657973921764, |
|
"grad_norm": 2.6127214431762695, |
|
"learning_rate": 9.166666666666666e-06, |
|
"loss": 0.2824, |
|
"step": 875 |
|
}, |
|
{ |
|
"epoch": 1.8054162487462388, |
|
"grad_norm": 2.4067749977111816, |
|
"learning_rate": 9.111111111111112e-06, |
|
"loss": 0.2671, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 1.855566700100301, |
|
"grad_norm": 2.577610731124878, |
|
"learning_rate": 9.055555555555556e-06, |
|
"loss": 0.2744, |
|
"step": 925 |
|
}, |
|
{ |
|
"epoch": 1.905717151454363, |
|
"grad_norm": 2.2848598957061768, |
|
"learning_rate": 9e-06, |
|
"loss": 0.2658, |
|
"step": 950 |
|
}, |
|
{ |
|
"epoch": 1.9558676028084254, |
|
"grad_norm": 2.707517385482788, |
|
"learning_rate": 8.944444444444446e-06, |
|
"loss": 0.2824, |
|
"step": 975 |
|
}, |
|
{ |
|
"epoch": 2.0060180541624875, |
|
"grad_norm": 1.714905023574829, |
|
"learning_rate": 8.891111111111111e-06, |
|
"loss": 0.2506, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 2.0060180541624875, |
|
"eval_loss": 0.34494394063949585, |
|
"eval_runtime": 4608.7156, |
|
"eval_samples_per_second": 2.335, |
|
"eval_steps_per_second": 0.146, |
|
"eval_wer": 0.21134514589689432, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 2.0561685055165495, |
|
"grad_norm": 1.9843376874923706, |
|
"learning_rate": 8.835555555555557e-06, |
|
"loss": 0.1393, |
|
"step": 1025 |
|
}, |
|
{ |
|
"epoch": 2.106318956870612, |
|
"grad_norm": 2.0423927307128906, |
|
"learning_rate": 8.78e-06, |
|
"loss": 0.1513, |
|
"step": 1050 |
|
}, |
|
{ |
|
"epoch": 2.156469408224674, |
|
"grad_norm": 2.1197094917297363, |
|
"learning_rate": 8.724444444444445e-06, |
|
"loss": 0.1454, |
|
"step": 1075 |
|
}, |
|
{ |
|
"epoch": 2.206619859578736, |
|
"grad_norm": 1.8990999460220337, |
|
"learning_rate": 8.66888888888889e-06, |
|
"loss": 0.1484, |
|
"step": 1100 |
|
}, |
|
{ |
|
"epoch": 2.2567703109327986, |
|
"grad_norm": 1.6921278238296509, |
|
"learning_rate": 8.613333333333333e-06, |
|
"loss": 0.1461, |
|
"step": 1125 |
|
}, |
|
{ |
|
"epoch": 2.3069207622868606, |
|
"grad_norm": 1.9527013301849365, |
|
"learning_rate": 8.557777777777778e-06, |
|
"loss": 0.1557, |
|
"step": 1150 |
|
}, |
|
{ |
|
"epoch": 2.3570712136409226, |
|
"grad_norm": 2.20961332321167, |
|
"learning_rate": 8.502222222222223e-06, |
|
"loss": 0.1459, |
|
"step": 1175 |
|
}, |
|
{ |
|
"epoch": 2.407221664994985, |
|
"grad_norm": 2.1781654357910156, |
|
"learning_rate": 8.446666666666668e-06, |
|
"loss": 0.156, |
|
"step": 1200 |
|
}, |
|
{ |
|
"epoch": 2.457372116349047, |
|
"grad_norm": 2.2167611122131348, |
|
"learning_rate": 8.391111111111112e-06, |
|
"loss": 0.1527, |
|
"step": 1225 |
|
}, |
|
{ |
|
"epoch": 2.507522567703109, |
|
"grad_norm": 2.0940988063812256, |
|
"learning_rate": 8.335555555555556e-06, |
|
"loss": 0.1535, |
|
"step": 1250 |
|
}, |
|
{ |
|
"epoch": 2.5576730190571713, |
|
"grad_norm": 2.141779899597168, |
|
"learning_rate": 8.28e-06, |
|
"loss": 0.15, |
|
"step": 1275 |
|
}, |
|
{ |
|
"epoch": 2.6078234704112337, |
|
"grad_norm": 1.9819048643112183, |
|
"learning_rate": 8.224444444444444e-06, |
|
"loss": 0.1483, |
|
"step": 1300 |
|
}, |
|
{ |
|
"epoch": 2.657973921765296, |
|
"grad_norm": 1.7420545816421509, |
|
"learning_rate": 8.16888888888889e-06, |
|
"loss": 0.1487, |
|
"step": 1325 |
|
}, |
|
{ |
|
"epoch": 2.7081243731193583, |
|
"grad_norm": 2.012843608856201, |
|
"learning_rate": 8.113333333333334e-06, |
|
"loss": 0.1549, |
|
"step": 1350 |
|
}, |
|
{ |
|
"epoch": 2.7582748244734203, |
|
"grad_norm": 2.0659139156341553, |
|
"learning_rate": 8.057777777777778e-06, |
|
"loss": 0.1515, |
|
"step": 1375 |
|
}, |
|
{ |
|
"epoch": 2.8084252758274824, |
|
"grad_norm": 2.046651840209961, |
|
"learning_rate": 8.002222222222222e-06, |
|
"loss": 0.1414, |
|
"step": 1400 |
|
}, |
|
{ |
|
"epoch": 2.8585757271815444, |
|
"grad_norm": 2.0306413173675537, |
|
"learning_rate": 7.946666666666666e-06, |
|
"loss": 0.1486, |
|
"step": 1425 |
|
}, |
|
{ |
|
"epoch": 2.908726178535607, |
|
"grad_norm": 2.124835729598999, |
|
"learning_rate": 7.891111111111112e-06, |
|
"loss": 0.1598, |
|
"step": 1450 |
|
}, |
|
{ |
|
"epoch": 2.958876629889669, |
|
"grad_norm": 2.09136700630188, |
|
"learning_rate": 7.835555555555556e-06, |
|
"loss": 0.1507, |
|
"step": 1475 |
|
}, |
|
{ |
|
"epoch": 3.0090270812437314, |
|
"grad_norm": 1.2488484382629395, |
|
"learning_rate": 7.78e-06, |
|
"loss": 0.1431, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 3.0591775325977935, |
|
"grad_norm": 1.3443397283554077, |
|
"learning_rate": 7.724444444444446e-06, |
|
"loss": 0.0791, |
|
"step": 1525 |
|
}, |
|
{ |
|
"epoch": 3.1093279839518555, |
|
"grad_norm": 1.274675726890564, |
|
"learning_rate": 7.66888888888889e-06, |
|
"loss": 0.0807, |
|
"step": 1550 |
|
}, |
|
{ |
|
"epoch": 3.1594784353059175, |
|
"grad_norm": 1.38137948513031, |
|
"learning_rate": 7.613333333333334e-06, |
|
"loss": 0.0814, |
|
"step": 1575 |
|
}, |
|
{ |
|
"epoch": 3.20962888665998, |
|
"grad_norm": 1.5349384546279907, |
|
"learning_rate": 7.557777777777779e-06, |
|
"loss": 0.0792, |
|
"step": 1600 |
|
}, |
|
{ |
|
"epoch": 3.259779338014042, |
|
"grad_norm": 1.2551178932189941, |
|
"learning_rate": 7.502222222222223e-06, |
|
"loss": 0.0786, |
|
"step": 1625 |
|
}, |
|
{ |
|
"epoch": 3.3099297893681046, |
|
"grad_norm": 1.400694489479065, |
|
"learning_rate": 7.446666666666668e-06, |
|
"loss": 0.0827, |
|
"step": 1650 |
|
}, |
|
{ |
|
"epoch": 3.3600802407221666, |
|
"grad_norm": 1.382521390914917, |
|
"learning_rate": 7.3911111111111125e-06, |
|
"loss": 0.0814, |
|
"step": 1675 |
|
}, |
|
{ |
|
"epoch": 3.4102306920762286, |
|
"grad_norm": 1.4124006032943726, |
|
"learning_rate": 7.335555555555556e-06, |
|
"loss": 0.087, |
|
"step": 1700 |
|
}, |
|
{ |
|
"epoch": 3.4603811434302907, |
|
"grad_norm": 1.130927562713623, |
|
"learning_rate": 7.280000000000001e-06, |
|
"loss": 0.0911, |
|
"step": 1725 |
|
}, |
|
{ |
|
"epoch": 3.510531594784353, |
|
"grad_norm": 1.6954821348190308, |
|
"learning_rate": 7.224444444444445e-06, |
|
"loss": 0.0856, |
|
"step": 1750 |
|
}, |
|
{ |
|
"epoch": 3.560682046138415, |
|
"grad_norm": 1.1873488426208496, |
|
"learning_rate": 7.1688888888888895e-06, |
|
"loss": 0.0829, |
|
"step": 1775 |
|
}, |
|
{ |
|
"epoch": 3.6108324974924777, |
|
"grad_norm": 1.500501275062561, |
|
"learning_rate": 7.113333333333334e-06, |
|
"loss": 0.0828, |
|
"step": 1800 |
|
}, |
|
{ |
|
"epoch": 3.6609829488465397, |
|
"grad_norm": 1.2689720392227173, |
|
"learning_rate": 7.057777777777778e-06, |
|
"loss": 0.0828, |
|
"step": 1825 |
|
}, |
|
{ |
|
"epoch": 3.711133400200602, |
|
"grad_norm": 1.4504294395446777, |
|
"learning_rate": 7.0022222222222225e-06, |
|
"loss": 0.0875, |
|
"step": 1850 |
|
}, |
|
{ |
|
"epoch": 3.761283851554664, |
|
"grad_norm": 1.286098599433899, |
|
"learning_rate": 6.946666666666667e-06, |
|
"loss": 0.082, |
|
"step": 1875 |
|
}, |
|
{ |
|
"epoch": 3.8114343029087263, |
|
"grad_norm": 1.1930783987045288, |
|
"learning_rate": 6.891111111111111e-06, |
|
"loss": 0.0817, |
|
"step": 1900 |
|
}, |
|
{ |
|
"epoch": 3.8615847542627884, |
|
"grad_norm": 1.4965267181396484, |
|
"learning_rate": 6.835555555555556e-06, |
|
"loss": 0.0808, |
|
"step": 1925 |
|
}, |
|
{ |
|
"epoch": 3.9117352056168504, |
|
"grad_norm": 1.3505349159240723, |
|
"learning_rate": 6.780000000000001e-06, |
|
"loss": 0.0847, |
|
"step": 1950 |
|
}, |
|
{ |
|
"epoch": 3.961885656970913, |
|
"grad_norm": 2.0747885704040527, |
|
"learning_rate": 6.724444444444444e-06, |
|
"loss": 0.0846, |
|
"step": 1975 |
|
}, |
|
{ |
|
"epoch": 4.012036108324975, |
|
"grad_norm": 0.9830118417739868, |
|
"learning_rate": 6.668888888888889e-06, |
|
"loss": 0.0783, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 4.012036108324975, |
|
"eval_loss": 0.3656960725784302, |
|
"eval_runtime": 4598.7916, |
|
"eval_samples_per_second": 2.34, |
|
"eval_steps_per_second": 0.146, |
|
"eval_wer": 0.2009700383321599, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 4.062186559679037, |
|
"grad_norm": 1.436523675918579, |
|
"learning_rate": 6.613333333333334e-06, |
|
"loss": 0.0531, |
|
"step": 2025 |
|
}, |
|
{ |
|
"epoch": 4.112337011033099, |
|
"grad_norm": 0.6627805233001709, |
|
"learning_rate": 6.557777777777778e-06, |
|
"loss": 0.0505, |
|
"step": 2050 |
|
}, |
|
{ |
|
"epoch": 4.162487462387162, |
|
"grad_norm": 1.2392983436584473, |
|
"learning_rate": 6.502222222222223e-06, |
|
"loss": 0.0548, |
|
"step": 2075 |
|
}, |
|
{ |
|
"epoch": 4.212637913741224, |
|
"grad_norm": 1.0560728311538696, |
|
"learning_rate": 6.446666666666668e-06, |
|
"loss": 0.0492, |
|
"step": 2100 |
|
}, |
|
{ |
|
"epoch": 4.262788365095286, |
|
"grad_norm": 1.5553325414657593, |
|
"learning_rate": 6.391111111111111e-06, |
|
"loss": 0.0519, |
|
"step": 2125 |
|
}, |
|
{ |
|
"epoch": 4.312938816449348, |
|
"grad_norm": 1.320204257965088, |
|
"learning_rate": 6.335555555555556e-06, |
|
"loss": 0.0534, |
|
"step": 2150 |
|
}, |
|
{ |
|
"epoch": 4.36308926780341, |
|
"grad_norm": 0.9548274278640747, |
|
"learning_rate": 6.280000000000001e-06, |
|
"loss": 0.0506, |
|
"step": 2175 |
|
}, |
|
{ |
|
"epoch": 4.413239719157472, |
|
"grad_norm": 1.0882978439331055, |
|
"learning_rate": 6.224444444444445e-06, |
|
"loss": 0.0513, |
|
"step": 2200 |
|
}, |
|
{ |
|
"epoch": 4.463390170511534, |
|
"grad_norm": 1.3011126518249512, |
|
"learning_rate": 6.16888888888889e-06, |
|
"loss": 0.052, |
|
"step": 2225 |
|
}, |
|
{ |
|
"epoch": 4.513540621865597, |
|
"grad_norm": 0.8610795140266418, |
|
"learning_rate": 6.113333333333333e-06, |
|
"loss": 0.0502, |
|
"step": 2250 |
|
}, |
|
{ |
|
"epoch": 4.563691073219659, |
|
"grad_norm": 1.2139874696731567, |
|
"learning_rate": 6.057777777777778e-06, |
|
"loss": 0.0526, |
|
"step": 2275 |
|
}, |
|
{ |
|
"epoch": 4.613841524573721, |
|
"grad_norm": 0.9586848020553589, |
|
"learning_rate": 6.002222222222223e-06, |
|
"loss": 0.0523, |
|
"step": 2300 |
|
}, |
|
{ |
|
"epoch": 4.663991975927783, |
|
"grad_norm": 0.9463987350463867, |
|
"learning_rate": 5.946666666666668e-06, |
|
"loss": 0.0534, |
|
"step": 2325 |
|
}, |
|
{ |
|
"epoch": 4.714142427281845, |
|
"grad_norm": 1.0908218622207642, |
|
"learning_rate": 5.891111111111112e-06, |
|
"loss": 0.0522, |
|
"step": 2350 |
|
}, |
|
{ |
|
"epoch": 4.764292878635907, |
|
"grad_norm": 1.063152551651001, |
|
"learning_rate": 5.8355555555555565e-06, |
|
"loss": 0.0507, |
|
"step": 2375 |
|
}, |
|
{ |
|
"epoch": 4.81444332998997, |
|
"grad_norm": 0.8035433292388916, |
|
"learning_rate": 5.78e-06, |
|
"loss": 0.052, |
|
"step": 2400 |
|
}, |
|
{ |
|
"epoch": 4.864593781344032, |
|
"grad_norm": 1.3851343393325806, |
|
"learning_rate": 5.724444444444445e-06, |
|
"loss": 0.0511, |
|
"step": 2425 |
|
}, |
|
{ |
|
"epoch": 4.914744232698094, |
|
"grad_norm": 0.9622118473052979, |
|
"learning_rate": 5.6688888888888895e-06, |
|
"loss": 0.0538, |
|
"step": 2450 |
|
}, |
|
{ |
|
"epoch": 4.964894684052156, |
|
"grad_norm": 0.7414006590843201, |
|
"learning_rate": 5.613333333333334e-06, |
|
"loss": 0.0501, |
|
"step": 2475 |
|
}, |
|
{ |
|
"epoch": 5.015045135406218, |
|
"grad_norm": 0.7375747561454773, |
|
"learning_rate": 5.557777777777778e-06, |
|
"loss": 0.0489, |
|
"step": 2500 |
|
}, |
|
{ |
|
"epoch": 5.0651955867602805, |
|
"grad_norm": 0.4028901755809784, |
|
"learning_rate": 5.5022222222222224e-06, |
|
"loss": 0.0413, |
|
"step": 2525 |
|
}, |
|
{ |
|
"epoch": 5.115346038114343, |
|
"grad_norm": 0.49548035860061646, |
|
"learning_rate": 5.4466666666666665e-06, |
|
"loss": 0.041, |
|
"step": 2550 |
|
}, |
|
{ |
|
"epoch": 5.1654964894684054, |
|
"grad_norm": 0.7137969732284546, |
|
"learning_rate": 5.391111111111111e-06, |
|
"loss": 0.0393, |
|
"step": 2575 |
|
}, |
|
{ |
|
"epoch": 5.2156469408224675, |
|
"grad_norm": 1.015681266784668, |
|
"learning_rate": 5.335555555555556e-06, |
|
"loss": 0.0396, |
|
"step": 2600 |
|
}, |
|
{ |
|
"epoch": 5.2657973921765295, |
|
"grad_norm": 1.030476450920105, |
|
"learning_rate": 5.28e-06, |
|
"loss": 0.0386, |
|
"step": 2625 |
|
}, |
|
{ |
|
"epoch": 5.315947843530592, |
|
"grad_norm": 1.2187836170196533, |
|
"learning_rate": 5.224444444444445e-06, |
|
"loss": 0.0391, |
|
"step": 2650 |
|
}, |
|
{ |
|
"epoch": 5.366098294884654, |
|
"grad_norm": 0.6549712419509888, |
|
"learning_rate": 5.168888888888889e-06, |
|
"loss": 0.0388, |
|
"step": 2675 |
|
}, |
|
{ |
|
"epoch": 5.4162487462387165, |
|
"grad_norm": 0.6556330919265747, |
|
"learning_rate": 5.113333333333333e-06, |
|
"loss": 0.0399, |
|
"step": 2700 |
|
}, |
|
{ |
|
"epoch": 5.466399197592779, |
|
"grad_norm": 1.0968986749649048, |
|
"learning_rate": 5.057777777777778e-06, |
|
"loss": 0.039, |
|
"step": 2725 |
|
}, |
|
{ |
|
"epoch": 5.516549648946841, |
|
"grad_norm": 0.8567794561386108, |
|
"learning_rate": 5.002222222222223e-06, |
|
"loss": 0.0395, |
|
"step": 2750 |
|
}, |
|
{ |
|
"epoch": 5.566700100300903, |
|
"grad_norm": 0.48124751448631287, |
|
"learning_rate": 4.946666666666667e-06, |
|
"loss": 0.0385, |
|
"step": 2775 |
|
}, |
|
{ |
|
"epoch": 5.616850551654965, |
|
"grad_norm": 0.16856200993061066, |
|
"learning_rate": 4.891111111111111e-06, |
|
"loss": 0.0381, |
|
"step": 2800 |
|
}, |
|
{ |
|
"epoch": 5.667001003009027, |
|
"grad_norm": 0.4240492284297943, |
|
"learning_rate": 4.835555555555556e-06, |
|
"loss": 0.0401, |
|
"step": 2825 |
|
}, |
|
{ |
|
"epoch": 5.717151454363089, |
|
"grad_norm": 0.7855473756790161, |
|
"learning_rate": 4.78e-06, |
|
"loss": 0.0397, |
|
"step": 2850 |
|
}, |
|
{ |
|
"epoch": 5.767301905717152, |
|
"grad_norm": 1.1043782234191895, |
|
"learning_rate": 4.724444444444445e-06, |
|
"loss": 0.0384, |
|
"step": 2875 |
|
}, |
|
{ |
|
"epoch": 5.817452357071214, |
|
"grad_norm": 0.4021261930465698, |
|
"learning_rate": 4.66888888888889e-06, |
|
"loss": 0.0387, |
|
"step": 2900 |
|
}, |
|
{ |
|
"epoch": 5.867602808425276, |
|
"grad_norm": 0.9103265404701233, |
|
"learning_rate": 4.613333333333334e-06, |
|
"loss": 0.0429, |
|
"step": 2925 |
|
}, |
|
{ |
|
"epoch": 5.917753259779338, |
|
"grad_norm": 2.1955697536468506, |
|
"learning_rate": 4.557777777777778e-06, |
|
"loss": 0.0395, |
|
"step": 2950 |
|
}, |
|
{ |
|
"epoch": 5.9679037111334, |
|
"grad_norm": 0.39817795157432556, |
|
"learning_rate": 4.502222222222223e-06, |
|
"loss": 0.0386, |
|
"step": 2975 |
|
}, |
|
{ |
|
"epoch": 6.018054162487463, |
|
"grad_norm": 0.6511321067810059, |
|
"learning_rate": 4.446666666666667e-06, |
|
"loss": 0.0377, |
|
"step": 3000 |
|
}, |
|
{ |
|
"epoch": 6.018054162487463, |
|
"eval_loss": 0.4252609610557556, |
|
"eval_runtime": 4620.2895, |
|
"eval_samples_per_second": 2.33, |
|
"eval_steps_per_second": 0.146, |
|
"eval_wer": 0.19864272862395368, |
|
"step": 3000 |
|
}, |
|
{ |
|
"epoch": 6.068204613841525, |
|
"grad_norm": 0.8066951036453247, |
|
"learning_rate": 4.391111111111112e-06, |
|
"loss": 0.0351, |
|
"step": 3025 |
|
}, |
|
{ |
|
"epoch": 6.118355065195587, |
|
"grad_norm": 1.054024338722229, |
|
"learning_rate": 4.3355555555555565e-06, |
|
"loss": 0.0364, |
|
"step": 3050 |
|
}, |
|
{ |
|
"epoch": 6.168505516549649, |
|
"grad_norm": 0.4796682894229889, |
|
"learning_rate": 4.2800000000000005e-06, |
|
"loss": 0.0348, |
|
"step": 3075 |
|
}, |
|
{ |
|
"epoch": 6.218655967903711, |
|
"grad_norm": 1.1482007503509521, |
|
"learning_rate": 4.2244444444444446e-06, |
|
"loss": 0.0344, |
|
"step": 3100 |
|
}, |
|
{ |
|
"epoch": 6.268806419257773, |
|
"grad_norm": 0.5434701442718506, |
|
"learning_rate": 4.168888888888889e-06, |
|
"loss": 0.0346, |
|
"step": 3125 |
|
}, |
|
{ |
|
"epoch": 6.318956870611835, |
|
"grad_norm": 0.18123650550842285, |
|
"learning_rate": 4.1133333333333335e-06, |
|
"loss": 0.0342, |
|
"step": 3150 |
|
}, |
|
{ |
|
"epoch": 6.369107321965898, |
|
"grad_norm": 0.48640918731689453, |
|
"learning_rate": 4.057777777777778e-06, |
|
"loss": 0.0345, |
|
"step": 3175 |
|
}, |
|
{ |
|
"epoch": 6.41925777331996, |
|
"grad_norm": 0.16967743635177612, |
|
"learning_rate": 4.002222222222222e-06, |
|
"loss": 0.0339, |
|
"step": 3200 |
|
}, |
|
{ |
|
"epoch": 6.469408224674022, |
|
"grad_norm": 1.0342961549758911, |
|
"learning_rate": 3.946666666666667e-06, |
|
"loss": 0.035, |
|
"step": 3225 |
|
}, |
|
{ |
|
"epoch": 6.519558676028084, |
|
"grad_norm": 0.4811567962169647, |
|
"learning_rate": 3.891111111111111e-06, |
|
"loss": 0.0346, |
|
"step": 3250 |
|
}, |
|
{ |
|
"epoch": 6.569709127382146, |
|
"grad_norm": 0.41370365023612976, |
|
"learning_rate": 3.835555555555555e-06, |
|
"loss": 0.0347, |
|
"step": 3275 |
|
}, |
|
{ |
|
"epoch": 6.619859578736209, |
|
"grad_norm": 0.32207295298576355, |
|
"learning_rate": 3.7800000000000002e-06, |
|
"loss": 0.0341, |
|
"step": 3300 |
|
}, |
|
{ |
|
"epoch": 6.670010030090271, |
|
"grad_norm": 0.6185954213142395, |
|
"learning_rate": 3.724444444444445e-06, |
|
"loss": 0.034, |
|
"step": 3325 |
|
}, |
|
{ |
|
"epoch": 6.720160481444333, |
|
"grad_norm": 0.23153382539749146, |
|
"learning_rate": 3.668888888888889e-06, |
|
"loss": 0.0345, |
|
"step": 3350 |
|
}, |
|
{ |
|
"epoch": 6.770310932798395, |
|
"grad_norm": 0.8901228308677673, |
|
"learning_rate": 3.6133333333333336e-06, |
|
"loss": 0.0342, |
|
"step": 3375 |
|
}, |
|
{ |
|
"epoch": 6.820461384152457, |
|
"grad_norm": 0.6802942156791687, |
|
"learning_rate": 3.5577777777777785e-06, |
|
"loss": 0.0355, |
|
"step": 3400 |
|
}, |
|
{ |
|
"epoch": 6.870611835506519, |
|
"grad_norm": 0.41077226400375366, |
|
"learning_rate": 3.5022222222222225e-06, |
|
"loss": 0.0358, |
|
"step": 3425 |
|
}, |
|
{ |
|
"epoch": 6.920762286860581, |
|
"grad_norm": 0.5837184190750122, |
|
"learning_rate": 3.446666666666667e-06, |
|
"loss": 0.0336, |
|
"step": 3450 |
|
}, |
|
{ |
|
"epoch": 6.970912738214644, |
|
"grad_norm": 0.5418899655342102, |
|
"learning_rate": 3.391111111111111e-06, |
|
"loss": 0.0337, |
|
"step": 3475 |
|
}, |
|
{ |
|
"epoch": 7.021063189568706, |
|
"grad_norm": 0.4523651599884033, |
|
"learning_rate": 3.335555555555556e-06, |
|
"loss": 0.0336, |
|
"step": 3500 |
|
}, |
|
{ |
|
"epoch": 7.071213640922768, |
|
"grad_norm": 0.20040252804756165, |
|
"learning_rate": 3.2800000000000004e-06, |
|
"loss": 0.0332, |
|
"step": 3525 |
|
}, |
|
{ |
|
"epoch": 7.12136409227683, |
|
"grad_norm": 0.8929922580718994, |
|
"learning_rate": 3.2244444444444444e-06, |
|
"loss": 0.0333, |
|
"step": 3550 |
|
}, |
|
{ |
|
"epoch": 7.1715145436308925, |
|
"grad_norm": 0.19636617600917816, |
|
"learning_rate": 3.1688888888888893e-06, |
|
"loss": 0.0329, |
|
"step": 3575 |
|
}, |
|
{ |
|
"epoch": 7.2216649949849545, |
|
"grad_norm": 0.5786032676696777, |
|
"learning_rate": 3.1133333333333337e-06, |
|
"loss": 0.0323, |
|
"step": 3600 |
|
}, |
|
{ |
|
"epoch": 7.271815446339017, |
|
"grad_norm": 0.14882159233093262, |
|
"learning_rate": 3.0577777777777778e-06, |
|
"loss": 0.0323, |
|
"step": 3625 |
|
}, |
|
{ |
|
"epoch": 7.3219658976930795, |
|
"grad_norm": 0.6977226138114929, |
|
"learning_rate": 3.0022222222222227e-06, |
|
"loss": 0.0337, |
|
"step": 3650 |
|
}, |
|
{ |
|
"epoch": 7.3721163490471415, |
|
"grad_norm": 0.27530452609062195, |
|
"learning_rate": 2.946666666666667e-06, |
|
"loss": 0.034, |
|
"step": 3675 |
|
}, |
|
{ |
|
"epoch": 7.422266800401204, |
|
"grad_norm": 0.16913309693336487, |
|
"learning_rate": 2.891111111111111e-06, |
|
"loss": 0.0333, |
|
"step": 3700 |
|
}, |
|
{ |
|
"epoch": 7.472417251755266, |
|
"grad_norm": 0.42673996090888977, |
|
"learning_rate": 2.835555555555556e-06, |
|
"loss": 0.0349, |
|
"step": 3725 |
|
}, |
|
{ |
|
"epoch": 7.522567703109328, |
|
"grad_norm": 1.0766425132751465, |
|
"learning_rate": 2.7800000000000005e-06, |
|
"loss": 0.0328, |
|
"step": 3750 |
|
}, |
|
{ |
|
"epoch": 7.57271815446339, |
|
"grad_norm": 0.1712706983089447, |
|
"learning_rate": 2.7244444444444445e-06, |
|
"loss": 0.0332, |
|
"step": 3775 |
|
}, |
|
{ |
|
"epoch": 7.622868605817453, |
|
"grad_norm": 0.3342408239841461, |
|
"learning_rate": 2.6688888888888894e-06, |
|
"loss": 0.0328, |
|
"step": 3800 |
|
}, |
|
{ |
|
"epoch": 7.673019057171515, |
|
"grad_norm": 0.15629036724567413, |
|
"learning_rate": 2.6133333333333334e-06, |
|
"loss": 0.0334, |
|
"step": 3825 |
|
}, |
|
{ |
|
"epoch": 7.723169508525577, |
|
"grad_norm": 0.923606276512146, |
|
"learning_rate": 2.557777777777778e-06, |
|
"loss": 0.0339, |
|
"step": 3850 |
|
}, |
|
{ |
|
"epoch": 7.773319959879639, |
|
"grad_norm": 0.9420749545097351, |
|
"learning_rate": 2.5022222222222224e-06, |
|
"loss": 0.033, |
|
"step": 3875 |
|
}, |
|
{ |
|
"epoch": 7.823470411233701, |
|
"grad_norm": 0.7262980341911316, |
|
"learning_rate": 2.446666666666667e-06, |
|
"loss": 0.0347, |
|
"step": 3900 |
|
}, |
|
{ |
|
"epoch": 7.873620862587764, |
|
"grad_norm": 0.44713786244392395, |
|
"learning_rate": 2.3911111111111113e-06, |
|
"loss": 0.0319, |
|
"step": 3925 |
|
}, |
|
{ |
|
"epoch": 7.923771313941826, |
|
"grad_norm": 0.16295509040355682, |
|
"learning_rate": 2.3355555555555557e-06, |
|
"loss": 0.0326, |
|
"step": 3950 |
|
}, |
|
{ |
|
"epoch": 7.973921765295888, |
|
"grad_norm": 0.7272210717201233, |
|
"learning_rate": 2.28e-06, |
|
"loss": 0.0318, |
|
"step": 3975 |
|
}, |
|
{ |
|
"epoch": 8.02407221664995, |
|
"grad_norm": 0.36710575222969055, |
|
"learning_rate": 2.2244444444444447e-06, |
|
"loss": 0.0326, |
|
"step": 4000 |
|
}, |
|
{ |
|
"epoch": 8.02407221664995, |
|
"eval_loss": 0.4601472318172455, |
|
"eval_runtime": 4510.0106, |
|
"eval_samples_per_second": 2.386, |
|
"eval_steps_per_second": 0.149, |
|
"eval_wer": 0.19771376046311506, |
|
"step": 4000 |
|
}, |
|
{ |
|
"epoch": 8.074222668004012, |
|
"grad_norm": 0.6596123576164246, |
|
"learning_rate": 2.168888888888889e-06, |
|
"loss": 0.0318, |
|
"step": 4025 |
|
}, |
|
{ |
|
"epoch": 8.124373119358074, |
|
"grad_norm": 0.12991444766521454, |
|
"learning_rate": 2.1133333333333336e-06, |
|
"loss": 0.0329, |
|
"step": 4050 |
|
}, |
|
{ |
|
"epoch": 8.174523570712136, |
|
"grad_norm": 0.17968349158763885, |
|
"learning_rate": 2.057777777777778e-06, |
|
"loss": 0.0315, |
|
"step": 4075 |
|
}, |
|
{ |
|
"epoch": 8.224674022066198, |
|
"grad_norm": 0.2529272139072418, |
|
"learning_rate": 2.0022222222222225e-06, |
|
"loss": 0.0347, |
|
"step": 4100 |
|
}, |
|
{ |
|
"epoch": 8.27482447342026, |
|
"grad_norm": 0.31072911620140076, |
|
"learning_rate": 1.9466666666666665e-06, |
|
"loss": 0.0329, |
|
"step": 4125 |
|
}, |
|
{ |
|
"epoch": 8.324974924774324, |
|
"grad_norm": 0.3404042422771454, |
|
"learning_rate": 1.8911111111111114e-06, |
|
"loss": 0.0317, |
|
"step": 4150 |
|
}, |
|
{ |
|
"epoch": 8.375125376128386, |
|
"grad_norm": 0.6961965560913086, |
|
"learning_rate": 1.8355555555555557e-06, |
|
"loss": 0.032, |
|
"step": 4175 |
|
}, |
|
{ |
|
"epoch": 8.425275827482448, |
|
"grad_norm": 0.3431238830089569, |
|
"learning_rate": 1.7800000000000001e-06, |
|
"loss": 0.0327, |
|
"step": 4200 |
|
}, |
|
{ |
|
"epoch": 8.47542627883651, |
|
"grad_norm": 0.9905340671539307, |
|
"learning_rate": 1.7244444444444448e-06, |
|
"loss": 0.032, |
|
"step": 4225 |
|
}, |
|
{ |
|
"epoch": 8.525576730190572, |
|
"grad_norm": 0.17600752413272858, |
|
"learning_rate": 1.668888888888889e-06, |
|
"loss": 0.0321, |
|
"step": 4250 |
|
}, |
|
{ |
|
"epoch": 8.575727181544634, |
|
"grad_norm": 0.4973747432231903, |
|
"learning_rate": 1.6133333333333335e-06, |
|
"loss": 0.0314, |
|
"step": 4275 |
|
}, |
|
{ |
|
"epoch": 8.625877632898696, |
|
"grad_norm": 0.6604855060577393, |
|
"learning_rate": 1.5577777777777777e-06, |
|
"loss": 0.0325, |
|
"step": 4300 |
|
}, |
|
{ |
|
"epoch": 8.676028084252758, |
|
"grad_norm": 0.46838390827178955, |
|
"learning_rate": 1.5022222222222224e-06, |
|
"loss": 0.0325, |
|
"step": 4325 |
|
}, |
|
{ |
|
"epoch": 8.72617853560682, |
|
"grad_norm": 0.30177703499794006, |
|
"learning_rate": 1.4466666666666669e-06, |
|
"loss": 0.0316, |
|
"step": 4350 |
|
}, |
|
{ |
|
"epoch": 8.776328986960882, |
|
"grad_norm": 0.6524584293365479, |
|
"learning_rate": 1.3911111111111111e-06, |
|
"loss": 0.0321, |
|
"step": 4375 |
|
}, |
|
{ |
|
"epoch": 8.826479438314944, |
|
"grad_norm": 0.9478035569190979, |
|
"learning_rate": 1.3355555555555558e-06, |
|
"loss": 0.0318, |
|
"step": 4400 |
|
}, |
|
{ |
|
"epoch": 8.876629889669006, |
|
"grad_norm": 0.1826964169740677, |
|
"learning_rate": 1.28e-06, |
|
"loss": 0.032, |
|
"step": 4425 |
|
}, |
|
{ |
|
"epoch": 8.926780341023068, |
|
"grad_norm": 0.35147854685783386, |
|
"learning_rate": 1.2244444444444445e-06, |
|
"loss": 0.0323, |
|
"step": 4450 |
|
}, |
|
{ |
|
"epoch": 8.976930792377132, |
|
"grad_norm": 0.12909051775932312, |
|
"learning_rate": 1.168888888888889e-06, |
|
"loss": 0.0327, |
|
"step": 4475 |
|
}, |
|
{ |
|
"epoch": 9.027081243731194, |
|
"grad_norm": 0.18831254541873932, |
|
"learning_rate": 1.1133333333333334e-06, |
|
"loss": 0.0325, |
|
"step": 4500 |
|
}, |
|
{ |
|
"epoch": 9.077231695085256, |
|
"grad_norm": 0.17683148384094238, |
|
"learning_rate": 1.0577777777777779e-06, |
|
"loss": 0.0328, |
|
"step": 4525 |
|
}, |
|
{ |
|
"epoch": 9.127382146439318, |
|
"grad_norm": 0.47570478916168213, |
|
"learning_rate": 1.0022222222222223e-06, |
|
"loss": 0.0316, |
|
"step": 4550 |
|
}, |
|
{ |
|
"epoch": 9.17753259779338, |
|
"grad_norm": 0.5286629796028137, |
|
"learning_rate": 9.466666666666667e-07, |
|
"loss": 0.0322, |
|
"step": 4575 |
|
}, |
|
{ |
|
"epoch": 9.227683049147442, |
|
"grad_norm": 0.7967525720596313, |
|
"learning_rate": 8.911111111111112e-07, |
|
"loss": 0.0311, |
|
"step": 4600 |
|
}, |
|
{ |
|
"epoch": 9.277833500501504, |
|
"grad_norm": 0.6575748324394226, |
|
"learning_rate": 8.355555555555556e-07, |
|
"loss": 0.0322, |
|
"step": 4625 |
|
}, |
|
{ |
|
"epoch": 9.327983951855567, |
|
"grad_norm": 0.705969512462616, |
|
"learning_rate": 7.8e-07, |
|
"loss": 0.0316, |
|
"step": 4650 |
|
}, |
|
{ |
|
"epoch": 9.378134403209629, |
|
"grad_norm": 0.385044664144516, |
|
"learning_rate": 7.244444444444446e-07, |
|
"loss": 0.0316, |
|
"step": 4675 |
|
}, |
|
{ |
|
"epoch": 9.42828485456369, |
|
"grad_norm": 1.0980923175811768, |
|
"learning_rate": 6.68888888888889e-07, |
|
"loss": 0.0318, |
|
"step": 4700 |
|
}, |
|
{ |
|
"epoch": 9.478435305917753, |
|
"grad_norm": 0.8150089979171753, |
|
"learning_rate": 6.133333333333333e-07, |
|
"loss": 0.0328, |
|
"step": 4725 |
|
}, |
|
{ |
|
"epoch": 9.528585757271815, |
|
"grad_norm": 0.7437707185745239, |
|
"learning_rate": 5.577777777777779e-07, |
|
"loss": 0.0319, |
|
"step": 4750 |
|
}, |
|
{ |
|
"epoch": 9.578736208625877, |
|
"grad_norm": 0.18542395532131195, |
|
"learning_rate": 5.022222222222222e-07, |
|
"loss": 0.0316, |
|
"step": 4775 |
|
}, |
|
{ |
|
"epoch": 9.62888665997994, |
|
"grad_norm": 0.22823591530323029, |
|
"learning_rate": 4.466666666666667e-07, |
|
"loss": 0.0321, |
|
"step": 4800 |
|
}, |
|
{ |
|
"epoch": 9.679037111334003, |
|
"grad_norm": 0.5161811113357544, |
|
"learning_rate": 3.9111111111111115e-07, |
|
"loss": 0.0323, |
|
"step": 4825 |
|
}, |
|
{ |
|
"epoch": 9.729187562688065, |
|
"grad_norm": 0.797174334526062, |
|
"learning_rate": 3.3555555555555556e-07, |
|
"loss": 0.032, |
|
"step": 4850 |
|
}, |
|
{ |
|
"epoch": 9.779338014042127, |
|
"grad_norm": 0.7822555899620056, |
|
"learning_rate": 2.8e-07, |
|
"loss": 0.0313, |
|
"step": 4875 |
|
}, |
|
{ |
|
"epoch": 9.829488465396189, |
|
"grad_norm": 0.7201440930366516, |
|
"learning_rate": 2.2444444444444445e-07, |
|
"loss": 0.0319, |
|
"step": 4900 |
|
}, |
|
{ |
|
"epoch": 9.87963891675025, |
|
"grad_norm": 0.06772810220718384, |
|
"learning_rate": 1.6888888888888888e-07, |
|
"loss": 0.032, |
|
"step": 4925 |
|
}, |
|
{ |
|
"epoch": 9.929789368104313, |
|
"grad_norm": 0.12714877724647522, |
|
"learning_rate": 1.1333333333333336e-07, |
|
"loss": 0.0315, |
|
"step": 4950 |
|
}, |
|
{ |
|
"epoch": 9.979939819458375, |
|
"grad_norm": 0.15611818432807922, |
|
"learning_rate": 5.777777777777778e-08, |
|
"loss": 0.0317, |
|
"step": 4975 |
|
}, |
|
{ |
|
"epoch": 10.030090270812437, |
|
"grad_norm": 0.24346430599689484, |
|
"learning_rate": 2.2222222222222225e-09, |
|
"loss": 0.0313, |
|
"step": 5000 |
|
}, |
|
{ |
|
"epoch": 10.030090270812437, |
|
"eval_loss": 0.4825207591056824, |
|
"eval_runtime": 4514.4484, |
|
"eval_samples_per_second": 2.384, |
|
"eval_steps_per_second": 0.149, |
|
"eval_wer": 0.19794844715637958, |
|
"step": 5000 |
|
}, |
|
{ |
|
"epoch": 10.030090270812437, |
|
"step": 5000, |
|
"total_flos": 5.43090104303616e+20, |
|
"train_loss": 0.1254427011013031, |
|
"train_runtime": 51254.2105, |
|
"train_samples_per_second": 3.122, |
|
"train_steps_per_second": 0.098 |
|
} |
|
], |
|
"logging_steps": 25, |
|
"max_steps": 5000, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 11, |
|
"save_steps": 1000, |
|
"stateful_callbacks": { |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": true |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 5.43090104303616e+20, |
|
"train_batch_size": 16, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|