|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 30.01, |
|
"eval_steps": 3000, |
|
"global_step": 3000, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.008333333333333333, |
|
"grad_norm": 44.35525131225586, |
|
"learning_rate": 4.2000000000000006e-07, |
|
"loss": 2.441, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.016666666666666666, |
|
"grad_norm": 22.19506072998047, |
|
"learning_rate": 9.200000000000001e-07, |
|
"loss": 1.7682, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.025, |
|
"grad_norm": 9.717977523803711, |
|
"learning_rate": 1.42e-06, |
|
"loss": 0.9699, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 1.0003333333333333, |
|
"grad_norm": 7.14320182800293, |
|
"learning_rate": 1.9200000000000003e-06, |
|
"loss": 0.5701, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 1.0086666666666666, |
|
"grad_norm": 3.4074296951293945, |
|
"learning_rate": 2.42e-06, |
|
"loss": 0.3157, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 1.017, |
|
"grad_norm": 3.311967372894287, |
|
"learning_rate": 2.92e-06, |
|
"loss": 0.2732, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 1.0253333333333334, |
|
"grad_norm": 3.092350959777832, |
|
"learning_rate": 3.4200000000000007e-06, |
|
"loss": 0.2653, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 2.0006666666666666, |
|
"grad_norm": 2.89245343208313, |
|
"learning_rate": 3.920000000000001e-06, |
|
"loss": 0.2502, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 2.009, |
|
"grad_norm": 3.0979723930358887, |
|
"learning_rate": 4.42e-06, |
|
"loss": 0.1824, |
|
"step": 225 |
|
}, |
|
{ |
|
"epoch": 2.017333333333333, |
|
"grad_norm": 3.1348249912261963, |
|
"learning_rate": 4.92e-06, |
|
"loss": 0.1828, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 2.0256666666666665, |
|
"grad_norm": 2.990718364715576, |
|
"learning_rate": 5.420000000000001e-06, |
|
"loss": 0.187, |
|
"step": 275 |
|
}, |
|
{ |
|
"epoch": 3.001, |
|
"grad_norm": 2.026869058609009, |
|
"learning_rate": 5.92e-06, |
|
"loss": 0.1859, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 3.009333333333333, |
|
"grad_norm": 1.9744936227798462, |
|
"learning_rate": 6.42e-06, |
|
"loss": 0.1179, |
|
"step": 325 |
|
}, |
|
{ |
|
"epoch": 3.0176666666666665, |
|
"grad_norm": 2.247405767440796, |
|
"learning_rate": 6.92e-06, |
|
"loss": 0.1157, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 3.026, |
|
"grad_norm": 2.4352807998657227, |
|
"learning_rate": 7.420000000000001e-06, |
|
"loss": 0.1191, |
|
"step": 375 |
|
}, |
|
{ |
|
"epoch": 4.001333333333333, |
|
"grad_norm": 1.5341213941574097, |
|
"learning_rate": 7.92e-06, |
|
"loss": 0.1136, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 4.009666666666667, |
|
"grad_norm": 1.683750033378601, |
|
"learning_rate": 8.42e-06, |
|
"loss": 0.0566, |
|
"step": 425 |
|
}, |
|
{ |
|
"epoch": 4.018, |
|
"grad_norm": 1.9196466207504272, |
|
"learning_rate": 8.920000000000001e-06, |
|
"loss": 0.0575, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 4.0263333333333335, |
|
"grad_norm": 1.5782861709594727, |
|
"learning_rate": 9.42e-06, |
|
"loss": 0.0651, |
|
"step": 475 |
|
}, |
|
{ |
|
"epoch": 5.001666666666667, |
|
"grad_norm": 1.724906325340271, |
|
"learning_rate": 9.920000000000002e-06, |
|
"loss": 0.0546, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 5.01, |
|
"grad_norm": 1.6357895135879517, |
|
"learning_rate": 9.916000000000001e-06, |
|
"loss": 0.0253, |
|
"step": 525 |
|
}, |
|
{ |
|
"epoch": 5.0183333333333335, |
|
"grad_norm": 1.2953633069992065, |
|
"learning_rate": 9.816e-06, |
|
"loss": 0.0254, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 5.026666666666666, |
|
"grad_norm": 1.3112106323242188, |
|
"learning_rate": 9.716000000000002e-06, |
|
"loss": 0.0281, |
|
"step": 575 |
|
}, |
|
{ |
|
"epoch": 6.002, |
|
"grad_norm": 1.5078754425048828, |
|
"learning_rate": 9.616e-06, |
|
"loss": 0.0243, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 6.0103333333333335, |
|
"grad_norm": 0.5965685248374939, |
|
"learning_rate": 9.516e-06, |
|
"loss": 0.0149, |
|
"step": 625 |
|
}, |
|
{ |
|
"epoch": 6.018666666666666, |
|
"grad_norm": 0.7676089406013489, |
|
"learning_rate": 9.416000000000001e-06, |
|
"loss": 0.0133, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 6.027, |
|
"grad_norm": 0.9765186905860901, |
|
"learning_rate": 9.316e-06, |
|
"loss": 0.0137, |
|
"step": 675 |
|
}, |
|
{ |
|
"epoch": 7.0023333333333335, |
|
"grad_norm": 1.1814440488815308, |
|
"learning_rate": 9.216000000000001e-06, |
|
"loss": 0.0145, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 7.010666666666666, |
|
"grad_norm": 0.573243260383606, |
|
"learning_rate": 9.116e-06, |
|
"loss": 0.0095, |
|
"step": 725 |
|
}, |
|
{ |
|
"epoch": 7.019, |
|
"grad_norm": 0.9459896683692932, |
|
"learning_rate": 9.016e-06, |
|
"loss": 0.0066, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 7.027333333333333, |
|
"grad_norm": 1.4142969846725464, |
|
"learning_rate": 8.916e-06, |
|
"loss": 0.0076, |
|
"step": 775 |
|
}, |
|
{ |
|
"epoch": 8.002666666666666, |
|
"grad_norm": 0.46104899048805237, |
|
"learning_rate": 8.816000000000002e-06, |
|
"loss": 0.0078, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 8.011, |
|
"grad_norm": 0.6141907572746277, |
|
"learning_rate": 8.716000000000001e-06, |
|
"loss": 0.0064, |
|
"step": 825 |
|
}, |
|
{ |
|
"epoch": 8.019333333333334, |
|
"grad_norm": 0.41213399171829224, |
|
"learning_rate": 8.616000000000002e-06, |
|
"loss": 0.005, |
|
"step": 850 |
|
}, |
|
{ |
|
"epoch": 8.027666666666667, |
|
"grad_norm": 3.1000335216522217, |
|
"learning_rate": 8.516000000000001e-06, |
|
"loss": 0.0049, |
|
"step": 875 |
|
}, |
|
{ |
|
"epoch": 9.003, |
|
"grad_norm": 0.7775946259498596, |
|
"learning_rate": 8.416e-06, |
|
"loss": 0.0047, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 9.011333333333333, |
|
"grad_norm": 0.1238495409488678, |
|
"learning_rate": 8.316000000000001e-06, |
|
"loss": 0.0039, |
|
"step": 925 |
|
}, |
|
{ |
|
"epoch": 9.019666666666666, |
|
"grad_norm": 0.17236466705799103, |
|
"learning_rate": 8.216e-06, |
|
"loss": 0.0034, |
|
"step": 950 |
|
}, |
|
{ |
|
"epoch": 9.028, |
|
"grad_norm": 0.5582916140556335, |
|
"learning_rate": 8.116e-06, |
|
"loss": 0.0039, |
|
"step": 975 |
|
}, |
|
{ |
|
"epoch": 10.003333333333334, |
|
"grad_norm": 0.07784730941057205, |
|
"learning_rate": 8.016e-06, |
|
"loss": 0.0027, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 10.011666666666667, |
|
"grad_norm": 0.07262728363275528, |
|
"learning_rate": 7.916e-06, |
|
"loss": 0.002, |
|
"step": 1025 |
|
}, |
|
{ |
|
"epoch": 10.02, |
|
"grad_norm": 0.08925045281648636, |
|
"learning_rate": 7.816000000000001e-06, |
|
"loss": 0.0028, |
|
"step": 1050 |
|
}, |
|
{ |
|
"epoch": 10.028333333333334, |
|
"grad_norm": 0.39910703897476196, |
|
"learning_rate": 7.716e-06, |
|
"loss": 0.002, |
|
"step": 1075 |
|
}, |
|
{ |
|
"epoch": 11.003666666666666, |
|
"grad_norm": 0.0856546089053154, |
|
"learning_rate": 7.616000000000001e-06, |
|
"loss": 0.0015, |
|
"step": 1100 |
|
}, |
|
{ |
|
"epoch": 11.012, |
|
"grad_norm": 0.8903342485427856, |
|
"learning_rate": 7.516000000000001e-06, |
|
"loss": 0.004, |
|
"step": 1125 |
|
}, |
|
{ |
|
"epoch": 11.020333333333333, |
|
"grad_norm": 0.06683026254177094, |
|
"learning_rate": 7.416000000000001e-06, |
|
"loss": 0.0017, |
|
"step": 1150 |
|
}, |
|
{ |
|
"epoch": 11.028666666666666, |
|
"grad_norm": 0.3519723415374756, |
|
"learning_rate": 7.316000000000001e-06, |
|
"loss": 0.0013, |
|
"step": 1175 |
|
}, |
|
{ |
|
"epoch": 12.004, |
|
"grad_norm": 0.05283417925238609, |
|
"learning_rate": 7.216000000000001e-06, |
|
"loss": 0.0012, |
|
"step": 1200 |
|
}, |
|
{ |
|
"epoch": 12.012333333333334, |
|
"grad_norm": 0.797774076461792, |
|
"learning_rate": 7.116000000000001e-06, |
|
"loss": 0.0018, |
|
"step": 1225 |
|
}, |
|
{ |
|
"epoch": 12.020666666666667, |
|
"grad_norm": 0.26817482709884644, |
|
"learning_rate": 7.016e-06, |
|
"loss": 0.0022, |
|
"step": 1250 |
|
}, |
|
{ |
|
"epoch": 12.029, |
|
"grad_norm": 0.04436012730002403, |
|
"learning_rate": 6.916e-06, |
|
"loss": 0.0013, |
|
"step": 1275 |
|
}, |
|
{ |
|
"epoch": 13.004333333333333, |
|
"grad_norm": 0.039572861045598984, |
|
"learning_rate": 6.8160000000000005e-06, |
|
"loss": 0.0012, |
|
"step": 1300 |
|
}, |
|
{ |
|
"epoch": 13.012666666666666, |
|
"grad_norm": 0.06484813243150711, |
|
"learning_rate": 6.716000000000001e-06, |
|
"loss": 0.0009, |
|
"step": 1325 |
|
}, |
|
{ |
|
"epoch": 13.021, |
|
"grad_norm": 0.02630372904241085, |
|
"learning_rate": 6.616e-06, |
|
"loss": 0.0011, |
|
"step": 1350 |
|
}, |
|
{ |
|
"epoch": 13.029333333333334, |
|
"grad_norm": 0.04369799420237541, |
|
"learning_rate": 6.516e-06, |
|
"loss": 0.0013, |
|
"step": 1375 |
|
}, |
|
{ |
|
"epoch": 14.004666666666667, |
|
"grad_norm": 0.036155689507722855, |
|
"learning_rate": 6.416e-06, |
|
"loss": 0.0009, |
|
"step": 1400 |
|
}, |
|
{ |
|
"epoch": 14.013, |
|
"grad_norm": 0.024141667410731316, |
|
"learning_rate": 6.316000000000001e-06, |
|
"loss": 0.0006, |
|
"step": 1425 |
|
}, |
|
{ |
|
"epoch": 14.021333333333333, |
|
"grad_norm": 0.1784670650959015, |
|
"learning_rate": 6.216000000000001e-06, |
|
"loss": 0.0007, |
|
"step": 1450 |
|
}, |
|
{ |
|
"epoch": 14.029666666666667, |
|
"grad_norm": 0.023341720923781395, |
|
"learning_rate": 6.116000000000001e-06, |
|
"loss": 0.0012, |
|
"step": 1475 |
|
}, |
|
{ |
|
"epoch": 15.005, |
|
"grad_norm": 0.018143419176340103, |
|
"learning_rate": 6.0160000000000005e-06, |
|
"loss": 0.0008, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 15.013333333333334, |
|
"grad_norm": 0.02102494426071644, |
|
"learning_rate": 5.916000000000001e-06, |
|
"loss": 0.0007, |
|
"step": 1525 |
|
}, |
|
{ |
|
"epoch": 15.021666666666667, |
|
"grad_norm": 0.023690875619649887, |
|
"learning_rate": 5.816000000000001e-06, |
|
"loss": 0.0006, |
|
"step": 1550 |
|
}, |
|
{ |
|
"epoch": 15.03, |
|
"grad_norm": 0.04580727219581604, |
|
"learning_rate": 5.716000000000001e-06, |
|
"loss": 0.001, |
|
"step": 1575 |
|
}, |
|
{ |
|
"epoch": 16.005333333333333, |
|
"grad_norm": 0.018508635461330414, |
|
"learning_rate": 5.616e-06, |
|
"loss": 0.0005, |
|
"step": 1600 |
|
}, |
|
{ |
|
"epoch": 16.013666666666666, |
|
"grad_norm": 0.019894391298294067, |
|
"learning_rate": 5.516e-06, |
|
"loss": 0.0005, |
|
"step": 1625 |
|
}, |
|
{ |
|
"epoch": 16.022, |
|
"grad_norm": 0.022277018055319786, |
|
"learning_rate": 5.416e-06, |
|
"loss": 0.0005, |
|
"step": 1650 |
|
}, |
|
{ |
|
"epoch": 16.030333333333335, |
|
"grad_norm": 0.02107921987771988, |
|
"learning_rate": 5.3160000000000004e-06, |
|
"loss": 0.0006, |
|
"step": 1675 |
|
}, |
|
{ |
|
"epoch": 17.005666666666666, |
|
"grad_norm": 0.0199015811085701, |
|
"learning_rate": 5.216e-06, |
|
"loss": 0.0004, |
|
"step": 1700 |
|
}, |
|
{ |
|
"epoch": 17.014, |
|
"grad_norm": 0.014493563212454319, |
|
"learning_rate": 5.116000000000001e-06, |
|
"loss": 0.0004, |
|
"step": 1725 |
|
}, |
|
{ |
|
"epoch": 17.022333333333332, |
|
"grad_norm": 0.017925165593624115, |
|
"learning_rate": 5.016000000000001e-06, |
|
"loss": 0.0008, |
|
"step": 1750 |
|
}, |
|
{ |
|
"epoch": 17.030666666666665, |
|
"grad_norm": 0.014746254310011864, |
|
"learning_rate": 4.916e-06, |
|
"loss": 0.0004, |
|
"step": 1775 |
|
}, |
|
{ |
|
"epoch": 18.006, |
|
"grad_norm": 0.01628812402486801, |
|
"learning_rate": 4.816e-06, |
|
"loss": 0.0004, |
|
"step": 1800 |
|
}, |
|
{ |
|
"epoch": 18.014333333333333, |
|
"grad_norm": 0.015700463205575943, |
|
"learning_rate": 4.716e-06, |
|
"loss": 0.0005, |
|
"step": 1825 |
|
}, |
|
{ |
|
"epoch": 18.022666666666666, |
|
"grad_norm": 0.012755331583321095, |
|
"learning_rate": 4.616e-06, |
|
"loss": 0.0005, |
|
"step": 1850 |
|
}, |
|
{ |
|
"epoch": 18.031, |
|
"grad_norm": 0.018821561709046364, |
|
"learning_rate": 4.5160000000000005e-06, |
|
"loss": 0.0004, |
|
"step": 1875 |
|
}, |
|
{ |
|
"epoch": 19.006333333333334, |
|
"grad_norm": 0.016657203435897827, |
|
"learning_rate": 4.416000000000001e-06, |
|
"loss": 0.0004, |
|
"step": 1900 |
|
}, |
|
{ |
|
"epoch": 19.014666666666667, |
|
"grad_norm": 0.016449443995952606, |
|
"learning_rate": 4.316e-06, |
|
"loss": 0.0005, |
|
"step": 1925 |
|
}, |
|
{ |
|
"epoch": 19.023, |
|
"grad_norm": 0.015951134264469147, |
|
"learning_rate": 4.216e-06, |
|
"loss": 0.0008, |
|
"step": 1950 |
|
}, |
|
{ |
|
"epoch": 19.031333333333333, |
|
"grad_norm": 0.013866595923900604, |
|
"learning_rate": 4.116000000000001e-06, |
|
"loss": 0.0004, |
|
"step": 1975 |
|
}, |
|
{ |
|
"epoch": 20.006666666666668, |
|
"grad_norm": 0.01308687124401331, |
|
"learning_rate": 4.016e-06, |
|
"loss": 0.0006, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 20.015, |
|
"grad_norm": 0.012808864936232567, |
|
"learning_rate": 3.916e-06, |
|
"loss": 0.0004, |
|
"step": 2025 |
|
}, |
|
{ |
|
"epoch": 20.023333333333333, |
|
"grad_norm": 0.013972673565149307, |
|
"learning_rate": 3.816e-06, |
|
"loss": 0.0004, |
|
"step": 2050 |
|
}, |
|
{ |
|
"epoch": 20.031666666666666, |
|
"grad_norm": 0.01527604553848505, |
|
"learning_rate": 3.716e-06, |
|
"loss": 0.0003, |
|
"step": 2075 |
|
}, |
|
{ |
|
"epoch": 21.007, |
|
"grad_norm": 0.03139733895659447, |
|
"learning_rate": 3.616e-06, |
|
"loss": 0.0003, |
|
"step": 2100 |
|
}, |
|
{ |
|
"epoch": 21.015333333333334, |
|
"grad_norm": 0.011607775464653969, |
|
"learning_rate": 3.5160000000000007e-06, |
|
"loss": 0.0003, |
|
"step": 2125 |
|
}, |
|
{ |
|
"epoch": 21.023666666666667, |
|
"grad_norm": 0.012830990366637707, |
|
"learning_rate": 3.4160000000000004e-06, |
|
"loss": 0.0004, |
|
"step": 2150 |
|
}, |
|
{ |
|
"epoch": 21.032, |
|
"grad_norm": 0.013420588336884975, |
|
"learning_rate": 3.3160000000000005e-06, |
|
"loss": 0.0004, |
|
"step": 2175 |
|
}, |
|
{ |
|
"epoch": 22.00733333333333, |
|
"grad_norm": 0.013782207854092121, |
|
"learning_rate": 3.216e-06, |
|
"loss": 0.0003, |
|
"step": 2200 |
|
}, |
|
{ |
|
"epoch": 22.015666666666668, |
|
"grad_norm": 0.009835809469223022, |
|
"learning_rate": 3.1160000000000003e-06, |
|
"loss": 0.0003, |
|
"step": 2225 |
|
}, |
|
{ |
|
"epoch": 22.024, |
|
"grad_norm": 0.012228522449731827, |
|
"learning_rate": 3.016e-06, |
|
"loss": 0.0003, |
|
"step": 2250 |
|
}, |
|
{ |
|
"epoch": 22.032333333333334, |
|
"grad_norm": 0.010861766524612904, |
|
"learning_rate": 2.9160000000000005e-06, |
|
"loss": 0.0004, |
|
"step": 2275 |
|
}, |
|
{ |
|
"epoch": 23.007666666666665, |
|
"grad_norm": 0.0120700066909194, |
|
"learning_rate": 2.8160000000000002e-06, |
|
"loss": 0.0003, |
|
"step": 2300 |
|
}, |
|
{ |
|
"epoch": 23.016, |
|
"grad_norm": 0.013041774742305279, |
|
"learning_rate": 2.7160000000000003e-06, |
|
"loss": 0.0003, |
|
"step": 2325 |
|
}, |
|
{ |
|
"epoch": 23.024333333333335, |
|
"grad_norm": 0.01479949988424778, |
|
"learning_rate": 2.616e-06, |
|
"loss": 0.0003, |
|
"step": 2350 |
|
}, |
|
{ |
|
"epoch": 23.032666666666668, |
|
"grad_norm": 0.010125643573701382, |
|
"learning_rate": 2.516e-06, |
|
"loss": 0.0003, |
|
"step": 2375 |
|
}, |
|
{ |
|
"epoch": 24.008, |
|
"grad_norm": 0.011404238641262054, |
|
"learning_rate": 2.4160000000000002e-06, |
|
"loss": 0.0003, |
|
"step": 2400 |
|
}, |
|
{ |
|
"epoch": 24.016333333333332, |
|
"grad_norm": 0.01367497444152832, |
|
"learning_rate": 2.3160000000000004e-06, |
|
"loss": 0.0003, |
|
"step": 2425 |
|
}, |
|
{ |
|
"epoch": 24.02466666666667, |
|
"grad_norm": 0.0132884681224823, |
|
"learning_rate": 2.216e-06, |
|
"loss": 0.0003, |
|
"step": 2450 |
|
}, |
|
{ |
|
"epoch": 24.033, |
|
"grad_norm": 0.01917206309735775, |
|
"learning_rate": 2.116e-06, |
|
"loss": 0.0003, |
|
"step": 2475 |
|
}, |
|
{ |
|
"epoch": 25.008333333333333, |
|
"grad_norm": 0.012251622974872589, |
|
"learning_rate": 2.0160000000000003e-06, |
|
"loss": 0.0003, |
|
"step": 2500 |
|
}, |
|
{ |
|
"epoch": 25.016666666666666, |
|
"grad_norm": 0.20725885033607483, |
|
"learning_rate": 1.916e-06, |
|
"loss": 0.0003, |
|
"step": 2525 |
|
}, |
|
{ |
|
"epoch": 25.025, |
|
"grad_norm": 0.01084393635392189, |
|
"learning_rate": 1.8160000000000003e-06, |
|
"loss": 0.0003, |
|
"step": 2550 |
|
}, |
|
{ |
|
"epoch": 26.000333333333334, |
|
"grad_norm": 0.01044237520545721, |
|
"learning_rate": 1.7160000000000002e-06, |
|
"loss": 0.0003, |
|
"step": 2575 |
|
}, |
|
{ |
|
"epoch": 26.008666666666667, |
|
"grad_norm": 0.010727889835834503, |
|
"learning_rate": 1.616e-06, |
|
"loss": 0.0003, |
|
"step": 2600 |
|
}, |
|
{ |
|
"epoch": 26.017, |
|
"grad_norm": 0.009735923260450363, |
|
"learning_rate": 1.5160000000000002e-06, |
|
"loss": 0.0003, |
|
"step": 2625 |
|
}, |
|
{ |
|
"epoch": 26.025333333333332, |
|
"grad_norm": 0.010894649662077427, |
|
"learning_rate": 1.416e-06, |
|
"loss": 0.0003, |
|
"step": 2650 |
|
}, |
|
{ |
|
"epoch": 27.000666666666667, |
|
"grad_norm": 0.009900378994643688, |
|
"learning_rate": 1.316e-06, |
|
"loss": 0.0002, |
|
"step": 2675 |
|
}, |
|
{ |
|
"epoch": 27.009, |
|
"grad_norm": 0.010404952801764011, |
|
"learning_rate": 1.216e-06, |
|
"loss": 0.0002, |
|
"step": 2700 |
|
}, |
|
{ |
|
"epoch": 27.017333333333333, |
|
"grad_norm": 0.009290441870689392, |
|
"learning_rate": 1.1160000000000002e-06, |
|
"loss": 0.0002, |
|
"step": 2725 |
|
}, |
|
{ |
|
"epoch": 27.025666666666666, |
|
"grad_norm": 0.011125192977488041, |
|
"learning_rate": 1.016e-06, |
|
"loss": 0.0003, |
|
"step": 2750 |
|
}, |
|
{ |
|
"epoch": 28.001, |
|
"grad_norm": 0.0103826392441988, |
|
"learning_rate": 9.160000000000001e-07, |
|
"loss": 0.0003, |
|
"step": 2775 |
|
}, |
|
{ |
|
"epoch": 28.009333333333334, |
|
"grad_norm": 0.010299935936927795, |
|
"learning_rate": 8.160000000000001e-07, |
|
"loss": 0.0002, |
|
"step": 2800 |
|
}, |
|
{ |
|
"epoch": 28.017666666666667, |
|
"grad_norm": 0.009835168719291687, |
|
"learning_rate": 7.16e-07, |
|
"loss": 0.0002, |
|
"step": 2825 |
|
}, |
|
{ |
|
"epoch": 28.026, |
|
"grad_norm": 0.009076902642846107, |
|
"learning_rate": 6.160000000000001e-07, |
|
"loss": 0.0003, |
|
"step": 2850 |
|
}, |
|
{ |
|
"epoch": 29.001333333333335, |
|
"grad_norm": 0.009419168345630169, |
|
"learning_rate": 5.16e-07, |
|
"loss": 0.0003, |
|
"step": 2875 |
|
}, |
|
{ |
|
"epoch": 29.009666666666668, |
|
"grad_norm": 0.010406610555946827, |
|
"learning_rate": 4.16e-07, |
|
"loss": 0.0002, |
|
"step": 2900 |
|
}, |
|
{ |
|
"epoch": 29.018, |
|
"grad_norm": 0.009127378463745117, |
|
"learning_rate": 3.160000000000001e-07, |
|
"loss": 0.0002, |
|
"step": 2925 |
|
}, |
|
{ |
|
"epoch": 29.026333333333334, |
|
"grad_norm": 0.007270839996635914, |
|
"learning_rate": 2.1600000000000003e-07, |
|
"loss": 0.0002, |
|
"step": 2950 |
|
}, |
|
{ |
|
"epoch": 30.001666666666665, |
|
"grad_norm": 0.009513996541500092, |
|
"learning_rate": 1.16e-07, |
|
"loss": 0.0003, |
|
"step": 2975 |
|
}, |
|
{ |
|
"epoch": 30.01, |
|
"grad_norm": 0.008983739651739597, |
|
"learning_rate": 1.6e-08, |
|
"loss": 0.0002, |
|
"step": 3000 |
|
}, |
|
{ |
|
"epoch": 30.01, |
|
"eval_loss": 1.3570799827575684, |
|
"eval_runtime": 1096.6232, |
|
"eval_samples_per_second": 2.963, |
|
"eval_steps_per_second": 0.371, |
|
"eval_wer": 32.550952630658976, |
|
"step": 3000 |
|
}, |
|
{ |
|
"epoch": 30.01, |
|
"step": 3000, |
|
"total_flos": 2.76089652486144e+19, |
|
"train_loss": 0.07142243345221505, |
|
"train_runtime": 18533.1767, |
|
"train_samples_per_second": 5.18, |
|
"train_steps_per_second": 0.162 |
|
} |
|
], |
|
"logging_steps": 25, |
|
"max_steps": 3000, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 9223372036854775807, |
|
"save_steps": 3000, |
|
"stateful_callbacks": { |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": true |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 2.76089652486144e+19, |
|
"train_batch_size": 32, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|