|
{ |
|
"best_global_step": null, |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 1.0, |
|
"eval_steps": 223, |
|
"global_step": 223, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.004484304932735426, |
|
"grad_norm": 0.54296875, |
|
"learning_rate": 1e-05, |
|
"loss": 2.2275, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.008968609865470852, |
|
"grad_norm": 0.5625, |
|
"learning_rate": 9.955156950672647e-06, |
|
"loss": 2.2741, |
|
"step": 2 |
|
}, |
|
{ |
|
"epoch": 0.013452914798206279, |
|
"grad_norm": 0.54296875, |
|
"learning_rate": 9.910313901345292e-06, |
|
"loss": 2.2308, |
|
"step": 3 |
|
}, |
|
{ |
|
"epoch": 0.017937219730941704, |
|
"grad_norm": 0.51171875, |
|
"learning_rate": 9.865470852017938e-06, |
|
"loss": 2.2035, |
|
"step": 4 |
|
}, |
|
{ |
|
"epoch": 0.02242152466367713, |
|
"grad_norm": 0.50390625, |
|
"learning_rate": 9.820627802690584e-06, |
|
"loss": 2.2455, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.026905829596412557, |
|
"grad_norm": 0.51953125, |
|
"learning_rate": 9.77578475336323e-06, |
|
"loss": 2.2096, |
|
"step": 6 |
|
}, |
|
{ |
|
"epoch": 0.03139013452914798, |
|
"grad_norm": 0.484375, |
|
"learning_rate": 9.730941704035875e-06, |
|
"loss": 2.2212, |
|
"step": 7 |
|
}, |
|
{ |
|
"epoch": 0.03587443946188341, |
|
"grad_norm": 0.443359375, |
|
"learning_rate": 9.686098654708521e-06, |
|
"loss": 2.1279, |
|
"step": 8 |
|
}, |
|
{ |
|
"epoch": 0.04035874439461883, |
|
"grad_norm": 0.4453125, |
|
"learning_rate": 9.641255605381167e-06, |
|
"loss": 2.1524, |
|
"step": 9 |
|
}, |
|
{ |
|
"epoch": 0.04484304932735426, |
|
"grad_norm": 0.42578125, |
|
"learning_rate": 9.596412556053812e-06, |
|
"loss": 2.112, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.04932735426008968, |
|
"grad_norm": 0.41796875, |
|
"learning_rate": 9.551569506726458e-06, |
|
"loss": 2.0661, |
|
"step": 11 |
|
}, |
|
{ |
|
"epoch": 0.053811659192825115, |
|
"grad_norm": 0.4140625, |
|
"learning_rate": 9.506726457399104e-06, |
|
"loss": 2.0643, |
|
"step": 12 |
|
}, |
|
{ |
|
"epoch": 0.05829596412556054, |
|
"grad_norm": 0.427734375, |
|
"learning_rate": 9.46188340807175e-06, |
|
"loss": 2.0716, |
|
"step": 13 |
|
}, |
|
{ |
|
"epoch": 0.06278026905829596, |
|
"grad_norm": 0.41796875, |
|
"learning_rate": 9.417040358744395e-06, |
|
"loss": 2.0337, |
|
"step": 14 |
|
}, |
|
{ |
|
"epoch": 0.06726457399103139, |
|
"grad_norm": 0.40625, |
|
"learning_rate": 9.372197309417041e-06, |
|
"loss": 1.9808, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.07174887892376682, |
|
"grad_norm": 0.400390625, |
|
"learning_rate": 9.327354260089687e-06, |
|
"loss": 1.9436, |
|
"step": 16 |
|
}, |
|
{ |
|
"epoch": 0.07623318385650224, |
|
"grad_norm": 0.38671875, |
|
"learning_rate": 9.282511210762332e-06, |
|
"loss": 1.9335, |
|
"step": 17 |
|
}, |
|
{ |
|
"epoch": 0.08071748878923767, |
|
"grad_norm": 0.380859375, |
|
"learning_rate": 9.237668161434978e-06, |
|
"loss": 1.925, |
|
"step": 18 |
|
}, |
|
{ |
|
"epoch": 0.08520179372197309, |
|
"grad_norm": 0.3515625, |
|
"learning_rate": 9.192825112107624e-06, |
|
"loss": 1.8701, |
|
"step": 19 |
|
}, |
|
{ |
|
"epoch": 0.08968609865470852, |
|
"grad_norm": 0.359375, |
|
"learning_rate": 9.14798206278027e-06, |
|
"loss": 1.895, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.09417040358744394, |
|
"grad_norm": 0.349609375, |
|
"learning_rate": 9.103139013452915e-06, |
|
"loss": 1.8519, |
|
"step": 21 |
|
}, |
|
{ |
|
"epoch": 0.09865470852017937, |
|
"grad_norm": 0.337890625, |
|
"learning_rate": 9.058295964125561e-06, |
|
"loss": 1.8597, |
|
"step": 22 |
|
}, |
|
{ |
|
"epoch": 0.1031390134529148, |
|
"grad_norm": 0.345703125, |
|
"learning_rate": 9.013452914798207e-06, |
|
"loss": 1.8558, |
|
"step": 23 |
|
}, |
|
{ |
|
"epoch": 0.10762331838565023, |
|
"grad_norm": 0.31640625, |
|
"learning_rate": 8.968609865470853e-06, |
|
"loss": 1.806, |
|
"step": 24 |
|
}, |
|
{ |
|
"epoch": 0.11210762331838565, |
|
"grad_norm": 0.3203125, |
|
"learning_rate": 8.923766816143498e-06, |
|
"loss": 1.7923, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.11659192825112108, |
|
"grad_norm": 0.2890625, |
|
"learning_rate": 8.878923766816144e-06, |
|
"loss": 1.7465, |
|
"step": 26 |
|
}, |
|
{ |
|
"epoch": 0.1210762331838565, |
|
"grad_norm": 0.29296875, |
|
"learning_rate": 8.83408071748879e-06, |
|
"loss": 1.7441, |
|
"step": 27 |
|
}, |
|
{ |
|
"epoch": 0.12556053811659193, |
|
"grad_norm": 0.31640625, |
|
"learning_rate": 8.789237668161435e-06, |
|
"loss": 1.7437, |
|
"step": 28 |
|
}, |
|
{ |
|
"epoch": 0.13004484304932734, |
|
"grad_norm": 0.28515625, |
|
"learning_rate": 8.744394618834081e-06, |
|
"loss": 1.7009, |
|
"step": 29 |
|
}, |
|
{ |
|
"epoch": 0.13452914798206278, |
|
"grad_norm": 0.28125, |
|
"learning_rate": 8.699551569506727e-06, |
|
"loss": 1.7144, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.13901345291479822, |
|
"grad_norm": 0.2734375, |
|
"learning_rate": 8.654708520179373e-06, |
|
"loss": 1.7108, |
|
"step": 31 |
|
}, |
|
{ |
|
"epoch": 0.14349775784753363, |
|
"grad_norm": 0.26171875, |
|
"learning_rate": 8.609865470852018e-06, |
|
"loss": 1.6834, |
|
"step": 32 |
|
}, |
|
{ |
|
"epoch": 0.14798206278026907, |
|
"grad_norm": 0.255859375, |
|
"learning_rate": 8.565022421524664e-06, |
|
"loss": 1.6749, |
|
"step": 33 |
|
}, |
|
{ |
|
"epoch": 0.15246636771300448, |
|
"grad_norm": 0.255859375, |
|
"learning_rate": 8.52017937219731e-06, |
|
"loss": 1.6493, |
|
"step": 34 |
|
}, |
|
{ |
|
"epoch": 0.15695067264573992, |
|
"grad_norm": 0.2412109375, |
|
"learning_rate": 8.475336322869956e-06, |
|
"loss": 1.6519, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.16143497757847533, |
|
"grad_norm": 0.2451171875, |
|
"learning_rate": 8.430493273542601e-06, |
|
"loss": 1.6414, |
|
"step": 36 |
|
}, |
|
{ |
|
"epoch": 0.16591928251121077, |
|
"grad_norm": 0.2451171875, |
|
"learning_rate": 8.385650224215247e-06, |
|
"loss": 1.5904, |
|
"step": 37 |
|
}, |
|
{ |
|
"epoch": 0.17040358744394618, |
|
"grad_norm": 0.25, |
|
"learning_rate": 8.340807174887893e-06, |
|
"loss": 1.6295, |
|
"step": 38 |
|
}, |
|
{ |
|
"epoch": 0.17488789237668162, |
|
"grad_norm": 0.2470703125, |
|
"learning_rate": 8.295964125560539e-06, |
|
"loss": 1.5888, |
|
"step": 39 |
|
}, |
|
{ |
|
"epoch": 0.17937219730941703, |
|
"grad_norm": 0.234375, |
|
"learning_rate": 8.251121076233184e-06, |
|
"loss": 1.5606, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.18385650224215247, |
|
"grad_norm": 0.244140625, |
|
"learning_rate": 8.20627802690583e-06, |
|
"loss": 1.6187, |
|
"step": 41 |
|
}, |
|
{ |
|
"epoch": 0.18834080717488788, |
|
"grad_norm": 0.2353515625, |
|
"learning_rate": 8.161434977578476e-06, |
|
"loss": 1.5723, |
|
"step": 42 |
|
}, |
|
{ |
|
"epoch": 0.19282511210762332, |
|
"grad_norm": 0.2314453125, |
|
"learning_rate": 8.116591928251121e-06, |
|
"loss": 1.562, |
|
"step": 43 |
|
}, |
|
{ |
|
"epoch": 0.19730941704035873, |
|
"grad_norm": 0.2275390625, |
|
"learning_rate": 8.071748878923767e-06, |
|
"loss": 1.5383, |
|
"step": 44 |
|
}, |
|
{ |
|
"epoch": 0.20179372197309417, |
|
"grad_norm": 0.2119140625, |
|
"learning_rate": 8.026905829596413e-06, |
|
"loss": 1.5229, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.2062780269058296, |
|
"grad_norm": 0.2177734375, |
|
"learning_rate": 7.982062780269059e-06, |
|
"loss": 1.5122, |
|
"step": 46 |
|
}, |
|
{ |
|
"epoch": 0.21076233183856502, |
|
"grad_norm": 0.205078125, |
|
"learning_rate": 7.937219730941704e-06, |
|
"loss": 1.4984, |
|
"step": 47 |
|
}, |
|
{ |
|
"epoch": 0.21524663677130046, |
|
"grad_norm": 0.22265625, |
|
"learning_rate": 7.89237668161435e-06, |
|
"loss": 1.529, |
|
"step": 48 |
|
}, |
|
{ |
|
"epoch": 0.21973094170403587, |
|
"grad_norm": 0.203125, |
|
"learning_rate": 7.847533632286996e-06, |
|
"loss": 1.5143, |
|
"step": 49 |
|
}, |
|
{ |
|
"epoch": 0.2242152466367713, |
|
"grad_norm": 0.2294921875, |
|
"learning_rate": 7.802690582959642e-06, |
|
"loss": 1.5457, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.22869955156950672, |
|
"grad_norm": 0.2041015625, |
|
"learning_rate": 7.757847533632287e-06, |
|
"loss": 1.4941, |
|
"step": 51 |
|
}, |
|
{ |
|
"epoch": 0.23318385650224216, |
|
"grad_norm": 0.201171875, |
|
"learning_rate": 7.713004484304933e-06, |
|
"loss": 1.4835, |
|
"step": 52 |
|
}, |
|
{ |
|
"epoch": 0.23766816143497757, |
|
"grad_norm": 0.1962890625, |
|
"learning_rate": 7.668161434977579e-06, |
|
"loss": 1.5036, |
|
"step": 53 |
|
}, |
|
{ |
|
"epoch": 0.242152466367713, |
|
"grad_norm": 0.2001953125, |
|
"learning_rate": 7.6233183856502244e-06, |
|
"loss": 1.4446, |
|
"step": 54 |
|
}, |
|
{ |
|
"epoch": 0.24663677130044842, |
|
"grad_norm": 0.19140625, |
|
"learning_rate": 7.57847533632287e-06, |
|
"loss": 1.4724, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.25112107623318386, |
|
"grad_norm": 0.1923828125, |
|
"learning_rate": 7.533632286995516e-06, |
|
"loss": 1.4845, |
|
"step": 56 |
|
}, |
|
{ |
|
"epoch": 0.2556053811659193, |
|
"grad_norm": 0.201171875, |
|
"learning_rate": 7.4887892376681625e-06, |
|
"loss": 1.4574, |
|
"step": 57 |
|
}, |
|
{ |
|
"epoch": 0.2600896860986547, |
|
"grad_norm": 0.181640625, |
|
"learning_rate": 7.443946188340808e-06, |
|
"loss": 1.4404, |
|
"step": 58 |
|
}, |
|
{ |
|
"epoch": 0.2645739910313901, |
|
"grad_norm": 0.1982421875, |
|
"learning_rate": 7.399103139013454e-06, |
|
"loss": 1.4726, |
|
"step": 59 |
|
}, |
|
{ |
|
"epoch": 0.26905829596412556, |
|
"grad_norm": 0.1787109375, |
|
"learning_rate": 7.3542600896861e-06, |
|
"loss": 1.4343, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.273542600896861, |
|
"grad_norm": 0.1904296875, |
|
"learning_rate": 7.309417040358745e-06, |
|
"loss": 1.4508, |
|
"step": 61 |
|
}, |
|
{ |
|
"epoch": 0.27802690582959644, |
|
"grad_norm": 0.1865234375, |
|
"learning_rate": 7.26457399103139e-06, |
|
"loss": 1.4306, |
|
"step": 62 |
|
}, |
|
{ |
|
"epoch": 0.2825112107623318, |
|
"grad_norm": 0.19140625, |
|
"learning_rate": 7.219730941704036e-06, |
|
"loss": 1.4158, |
|
"step": 63 |
|
}, |
|
{ |
|
"epoch": 0.28699551569506726, |
|
"grad_norm": 0.1962890625, |
|
"learning_rate": 7.174887892376682e-06, |
|
"loss": 1.4418, |
|
"step": 64 |
|
}, |
|
{ |
|
"epoch": 0.2914798206278027, |
|
"grad_norm": 0.1845703125, |
|
"learning_rate": 7.1300448430493275e-06, |
|
"loss": 1.4084, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.29596412556053814, |
|
"grad_norm": 0.197265625, |
|
"learning_rate": 7.085201793721974e-06, |
|
"loss": 1.4158, |
|
"step": 66 |
|
}, |
|
{ |
|
"epoch": 0.3004484304932735, |
|
"grad_norm": 0.1748046875, |
|
"learning_rate": 7.04035874439462e-06, |
|
"loss": 1.4305, |
|
"step": 67 |
|
}, |
|
{ |
|
"epoch": 0.30493273542600896, |
|
"grad_norm": 0.1826171875, |
|
"learning_rate": 6.9955156950672655e-06, |
|
"loss": 1.4109, |
|
"step": 68 |
|
}, |
|
{ |
|
"epoch": 0.3094170403587444, |
|
"grad_norm": 0.181640625, |
|
"learning_rate": 6.950672645739911e-06, |
|
"loss": 1.4452, |
|
"step": 69 |
|
}, |
|
{ |
|
"epoch": 0.31390134529147984, |
|
"grad_norm": 0.18359375, |
|
"learning_rate": 6.905829596412557e-06, |
|
"loss": 1.423, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.3183856502242152, |
|
"grad_norm": 0.171875, |
|
"learning_rate": 6.860986547085202e-06, |
|
"loss": 1.4077, |
|
"step": 71 |
|
}, |
|
{ |
|
"epoch": 0.32286995515695066, |
|
"grad_norm": 0.173828125, |
|
"learning_rate": 6.8161434977578476e-06, |
|
"loss": 1.4084, |
|
"step": 72 |
|
}, |
|
{ |
|
"epoch": 0.3273542600896861, |
|
"grad_norm": 0.181640625, |
|
"learning_rate": 6.771300448430493e-06, |
|
"loss": 1.3865, |
|
"step": 73 |
|
}, |
|
{ |
|
"epoch": 0.33183856502242154, |
|
"grad_norm": 0.162109375, |
|
"learning_rate": 6.72645739910314e-06, |
|
"loss": 1.3746, |
|
"step": 74 |
|
}, |
|
{ |
|
"epoch": 0.336322869955157, |
|
"grad_norm": 0.18359375, |
|
"learning_rate": 6.681614349775786e-06, |
|
"loss": 1.3825, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.34080717488789236, |
|
"grad_norm": 0.1845703125, |
|
"learning_rate": 6.636771300448431e-06, |
|
"loss": 1.3663, |
|
"step": 76 |
|
}, |
|
{ |
|
"epoch": 0.3452914798206278, |
|
"grad_norm": 0.171875, |
|
"learning_rate": 6.591928251121077e-06, |
|
"loss": 1.3589, |
|
"step": 77 |
|
}, |
|
{ |
|
"epoch": 0.34977578475336324, |
|
"grad_norm": 0.181640625, |
|
"learning_rate": 6.547085201793723e-06, |
|
"loss": 1.3903, |
|
"step": 78 |
|
}, |
|
{ |
|
"epoch": 0.3542600896860987, |
|
"grad_norm": 0.173828125, |
|
"learning_rate": 6.5022421524663685e-06, |
|
"loss": 1.3759, |
|
"step": 79 |
|
}, |
|
{ |
|
"epoch": 0.35874439461883406, |
|
"grad_norm": 0.169921875, |
|
"learning_rate": 6.457399103139013e-06, |
|
"loss": 1.3475, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.3632286995515695, |
|
"grad_norm": 0.1865234375, |
|
"learning_rate": 6.412556053811659e-06, |
|
"loss": 1.3864, |
|
"step": 81 |
|
}, |
|
{ |
|
"epoch": 0.36771300448430494, |
|
"grad_norm": 0.1845703125, |
|
"learning_rate": 6.367713004484305e-06, |
|
"loss": 1.3477, |
|
"step": 82 |
|
}, |
|
{ |
|
"epoch": 0.3721973094170404, |
|
"grad_norm": 0.171875, |
|
"learning_rate": 6.322869955156951e-06, |
|
"loss": 1.3686, |
|
"step": 83 |
|
}, |
|
{ |
|
"epoch": 0.37668161434977576, |
|
"grad_norm": 0.1806640625, |
|
"learning_rate": 6.278026905829597e-06, |
|
"loss": 1.3655, |
|
"step": 84 |
|
}, |
|
{ |
|
"epoch": 0.3811659192825112, |
|
"grad_norm": 0.18359375, |
|
"learning_rate": 6.233183856502243e-06, |
|
"loss": 1.3452, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.38565022421524664, |
|
"grad_norm": 0.185546875, |
|
"learning_rate": 6.188340807174889e-06, |
|
"loss": 1.3658, |
|
"step": 86 |
|
}, |
|
{ |
|
"epoch": 0.3901345291479821, |
|
"grad_norm": 0.1787109375, |
|
"learning_rate": 6.143497757847534e-06, |
|
"loss": 1.362, |
|
"step": 87 |
|
}, |
|
{ |
|
"epoch": 0.39461883408071746, |
|
"grad_norm": 0.169921875, |
|
"learning_rate": 6.09865470852018e-06, |
|
"loss": 1.3745, |
|
"step": 88 |
|
}, |
|
{ |
|
"epoch": 0.3991031390134529, |
|
"grad_norm": 0.17578125, |
|
"learning_rate": 6.053811659192825e-06, |
|
"loss": 1.376, |
|
"step": 89 |
|
}, |
|
{ |
|
"epoch": 0.40358744394618834, |
|
"grad_norm": 0.1748046875, |
|
"learning_rate": 6.008968609865471e-06, |
|
"loss": 1.3155, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.4080717488789238, |
|
"grad_norm": 0.1796875, |
|
"learning_rate": 5.964125560538116e-06, |
|
"loss": 1.3238, |
|
"step": 91 |
|
}, |
|
{ |
|
"epoch": 0.4125560538116592, |
|
"grad_norm": 0.1796875, |
|
"learning_rate": 5.919282511210763e-06, |
|
"loss": 1.3415, |
|
"step": 92 |
|
}, |
|
{ |
|
"epoch": 0.4170403587443946, |
|
"grad_norm": 0.2021484375, |
|
"learning_rate": 5.874439461883409e-06, |
|
"loss": 1.3439, |
|
"step": 93 |
|
}, |
|
{ |
|
"epoch": 0.42152466367713004, |
|
"grad_norm": 0.1787109375, |
|
"learning_rate": 5.8295964125560544e-06, |
|
"loss": 1.3371, |
|
"step": 94 |
|
}, |
|
{ |
|
"epoch": 0.4260089686098655, |
|
"grad_norm": 0.173828125, |
|
"learning_rate": 5.7847533632287e-06, |
|
"loss": 1.3286, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.4304932735426009, |
|
"grad_norm": 0.1904296875, |
|
"learning_rate": 5.739910313901346e-06, |
|
"loss": 1.3182, |
|
"step": 96 |
|
}, |
|
{ |
|
"epoch": 0.4349775784753363, |
|
"grad_norm": 0.169921875, |
|
"learning_rate": 5.695067264573992e-06, |
|
"loss": 1.306, |
|
"step": 97 |
|
}, |
|
{ |
|
"epoch": 0.43946188340807174, |
|
"grad_norm": 0.162109375, |
|
"learning_rate": 5.6502242152466365e-06, |
|
"loss": 1.3622, |
|
"step": 98 |
|
}, |
|
{ |
|
"epoch": 0.4439461883408072, |
|
"grad_norm": 0.17578125, |
|
"learning_rate": 5.605381165919282e-06, |
|
"loss": 1.3065, |
|
"step": 99 |
|
}, |
|
{ |
|
"epoch": 0.4484304932735426, |
|
"grad_norm": 0.1611328125, |
|
"learning_rate": 5.560538116591929e-06, |
|
"loss": 1.3182, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.452914798206278, |
|
"grad_norm": 0.1806640625, |
|
"learning_rate": 5.5156950672645745e-06, |
|
"loss": 1.3018, |
|
"step": 101 |
|
}, |
|
{ |
|
"epoch": 0.45739910313901344, |
|
"grad_norm": 0.1767578125, |
|
"learning_rate": 5.47085201793722e-06, |
|
"loss": 1.2976, |
|
"step": 102 |
|
}, |
|
{ |
|
"epoch": 0.4618834080717489, |
|
"grad_norm": 0.171875, |
|
"learning_rate": 5.426008968609866e-06, |
|
"loss": 1.3101, |
|
"step": 103 |
|
}, |
|
{ |
|
"epoch": 0.4663677130044843, |
|
"grad_norm": 0.173828125, |
|
"learning_rate": 5.381165919282512e-06, |
|
"loss": 1.315, |
|
"step": 104 |
|
}, |
|
{ |
|
"epoch": 0.47085201793721976, |
|
"grad_norm": 0.173828125, |
|
"learning_rate": 5.3363228699551574e-06, |
|
"loss": 1.2834, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 0.47533632286995514, |
|
"grad_norm": 0.1962890625, |
|
"learning_rate": 5.291479820627803e-06, |
|
"loss": 1.3312, |
|
"step": 106 |
|
}, |
|
{ |
|
"epoch": 0.4798206278026906, |
|
"grad_norm": 0.1650390625, |
|
"learning_rate": 5.246636771300448e-06, |
|
"loss": 1.2831, |
|
"step": 107 |
|
}, |
|
{ |
|
"epoch": 0.484304932735426, |
|
"grad_norm": 0.18359375, |
|
"learning_rate": 5.201793721973094e-06, |
|
"loss": 1.3327, |
|
"step": 108 |
|
}, |
|
{ |
|
"epoch": 0.48878923766816146, |
|
"grad_norm": 0.1767578125, |
|
"learning_rate": 5.156950672645741e-06, |
|
"loss": 1.3041, |
|
"step": 109 |
|
}, |
|
{ |
|
"epoch": 0.49327354260089684, |
|
"grad_norm": 0.1767578125, |
|
"learning_rate": 5.112107623318386e-06, |
|
"loss": 1.3387, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.4977578475336323, |
|
"grad_norm": 0.1669921875, |
|
"learning_rate": 5.067264573991032e-06, |
|
"loss": 1.3188, |
|
"step": 111 |
|
}, |
|
{ |
|
"epoch": 0.5022421524663677, |
|
"grad_norm": 0.1796875, |
|
"learning_rate": 5.0224215246636775e-06, |
|
"loss": 1.3294, |
|
"step": 112 |
|
}, |
|
{ |
|
"epoch": 0.5067264573991032, |
|
"grad_norm": 0.18359375, |
|
"learning_rate": 4.977578475336323e-06, |
|
"loss": 1.2988, |
|
"step": 113 |
|
}, |
|
{ |
|
"epoch": 0.5112107623318386, |
|
"grad_norm": 0.1708984375, |
|
"learning_rate": 4.932735426008969e-06, |
|
"loss": 1.2789, |
|
"step": 114 |
|
}, |
|
{ |
|
"epoch": 0.515695067264574, |
|
"grad_norm": 0.1640625, |
|
"learning_rate": 4.887892376681615e-06, |
|
"loss": 1.3185, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 0.5201793721973094, |
|
"grad_norm": 0.166015625, |
|
"learning_rate": 4.8430493273542605e-06, |
|
"loss": 1.2749, |
|
"step": 116 |
|
}, |
|
{ |
|
"epoch": 0.5246636771300448, |
|
"grad_norm": 0.1689453125, |
|
"learning_rate": 4.798206278026906e-06, |
|
"loss": 1.3056, |
|
"step": 117 |
|
}, |
|
{ |
|
"epoch": 0.5291479820627802, |
|
"grad_norm": 0.162109375, |
|
"learning_rate": 4.753363228699552e-06, |
|
"loss": 1.2906, |
|
"step": 118 |
|
}, |
|
{ |
|
"epoch": 0.5336322869955157, |
|
"grad_norm": 0.1787109375, |
|
"learning_rate": 4.708520179372198e-06, |
|
"loss": 1.2802, |
|
"step": 119 |
|
}, |
|
{ |
|
"epoch": 0.5381165919282511, |
|
"grad_norm": 0.1640625, |
|
"learning_rate": 4.663677130044843e-06, |
|
"loss": 1.2854, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.5426008968609866, |
|
"grad_norm": 0.1787109375, |
|
"learning_rate": 4.618834080717489e-06, |
|
"loss": 1.2822, |
|
"step": 121 |
|
}, |
|
{ |
|
"epoch": 0.547085201793722, |
|
"grad_norm": 0.1591796875, |
|
"learning_rate": 4.573991031390135e-06, |
|
"loss": 1.2886, |
|
"step": 122 |
|
}, |
|
{ |
|
"epoch": 0.5515695067264574, |
|
"grad_norm": 0.1767578125, |
|
"learning_rate": 4.5291479820627806e-06, |
|
"loss": 1.2908, |
|
"step": 123 |
|
}, |
|
{ |
|
"epoch": 0.5560538116591929, |
|
"grad_norm": 0.1640625, |
|
"learning_rate": 4.484304932735426e-06, |
|
"loss": 1.252, |
|
"step": 124 |
|
}, |
|
{ |
|
"epoch": 0.5605381165919282, |
|
"grad_norm": 0.1689453125, |
|
"learning_rate": 4.439461883408072e-06, |
|
"loss": 1.2675, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 0.5650224215246636, |
|
"grad_norm": 0.1708984375, |
|
"learning_rate": 4.394618834080718e-06, |
|
"loss": 1.3111, |
|
"step": 126 |
|
}, |
|
{ |
|
"epoch": 0.5695067264573991, |
|
"grad_norm": 0.1865234375, |
|
"learning_rate": 4.3497757847533635e-06, |
|
"loss": 1.2815, |
|
"step": 127 |
|
}, |
|
{ |
|
"epoch": 0.5739910313901345, |
|
"grad_norm": 0.17578125, |
|
"learning_rate": 4.304932735426009e-06, |
|
"loss": 1.2813, |
|
"step": 128 |
|
}, |
|
{ |
|
"epoch": 0.57847533632287, |
|
"grad_norm": 0.169921875, |
|
"learning_rate": 4.260089686098655e-06, |
|
"loss": 1.2951, |
|
"step": 129 |
|
}, |
|
{ |
|
"epoch": 0.5829596412556054, |
|
"grad_norm": 0.1650390625, |
|
"learning_rate": 4.215246636771301e-06, |
|
"loss": 1.3015, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.5874439461883408, |
|
"grad_norm": 0.1708984375, |
|
"learning_rate": 4.170403587443946e-06, |
|
"loss": 1.2626, |
|
"step": 131 |
|
}, |
|
{ |
|
"epoch": 0.5919282511210763, |
|
"grad_norm": 0.1806640625, |
|
"learning_rate": 4.125560538116592e-06, |
|
"loss": 1.2303, |
|
"step": 132 |
|
}, |
|
{ |
|
"epoch": 0.5964125560538116, |
|
"grad_norm": 0.1787109375, |
|
"learning_rate": 4.080717488789238e-06, |
|
"loss": 1.2687, |
|
"step": 133 |
|
}, |
|
{ |
|
"epoch": 0.600896860986547, |
|
"grad_norm": 0.177734375, |
|
"learning_rate": 4.0358744394618836e-06, |
|
"loss": 1.2653, |
|
"step": 134 |
|
}, |
|
{ |
|
"epoch": 0.6053811659192825, |
|
"grad_norm": 0.1806640625, |
|
"learning_rate": 3.991031390134529e-06, |
|
"loss": 1.2764, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 0.6098654708520179, |
|
"grad_norm": 0.197265625, |
|
"learning_rate": 3.946188340807175e-06, |
|
"loss": 1.2828, |
|
"step": 136 |
|
}, |
|
{ |
|
"epoch": 0.6143497757847534, |
|
"grad_norm": 0.1845703125, |
|
"learning_rate": 3.901345291479821e-06, |
|
"loss": 1.2714, |
|
"step": 137 |
|
}, |
|
{ |
|
"epoch": 0.6188340807174888, |
|
"grad_norm": 0.1708984375, |
|
"learning_rate": 3.8565022421524665e-06, |
|
"loss": 1.2921, |
|
"step": 138 |
|
}, |
|
{ |
|
"epoch": 0.6233183856502242, |
|
"grad_norm": 0.17578125, |
|
"learning_rate": 3.8116591928251122e-06, |
|
"loss": 1.2781, |
|
"step": 139 |
|
}, |
|
{ |
|
"epoch": 0.6278026905829597, |
|
"grad_norm": 0.1689453125, |
|
"learning_rate": 3.766816143497758e-06, |
|
"loss": 1.2432, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.6322869955156951, |
|
"grad_norm": 0.2119140625, |
|
"learning_rate": 3.721973094170404e-06, |
|
"loss": 1.2984, |
|
"step": 141 |
|
}, |
|
{ |
|
"epoch": 0.6367713004484304, |
|
"grad_norm": 0.1796875, |
|
"learning_rate": 3.67713004484305e-06, |
|
"loss": 1.2705, |
|
"step": 142 |
|
}, |
|
{ |
|
"epoch": 0.6412556053811659, |
|
"grad_norm": 0.17578125, |
|
"learning_rate": 3.632286995515695e-06, |
|
"loss": 1.2889, |
|
"step": 143 |
|
}, |
|
{ |
|
"epoch": 0.6457399103139013, |
|
"grad_norm": 0.185546875, |
|
"learning_rate": 3.587443946188341e-06, |
|
"loss": 1.2673, |
|
"step": 144 |
|
}, |
|
{ |
|
"epoch": 0.6502242152466368, |
|
"grad_norm": 0.185546875, |
|
"learning_rate": 3.542600896860987e-06, |
|
"loss": 1.2667, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 0.6547085201793722, |
|
"grad_norm": 0.1826171875, |
|
"learning_rate": 3.4977578475336327e-06, |
|
"loss": 1.2724, |
|
"step": 146 |
|
}, |
|
{ |
|
"epoch": 0.6591928251121076, |
|
"grad_norm": 0.16796875, |
|
"learning_rate": 3.4529147982062785e-06, |
|
"loss": 1.2479, |
|
"step": 147 |
|
}, |
|
{ |
|
"epoch": 0.6636771300448431, |
|
"grad_norm": 0.1611328125, |
|
"learning_rate": 3.4080717488789238e-06, |
|
"loss": 1.2538, |
|
"step": 148 |
|
}, |
|
{ |
|
"epoch": 0.6681614349775785, |
|
"grad_norm": 0.173828125, |
|
"learning_rate": 3.36322869955157e-06, |
|
"loss": 1.2886, |
|
"step": 149 |
|
}, |
|
{ |
|
"epoch": 0.672645739910314, |
|
"grad_norm": 0.16796875, |
|
"learning_rate": 3.3183856502242157e-06, |
|
"loss": 1.2383, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.6771300448430493, |
|
"grad_norm": 0.1953125, |
|
"learning_rate": 3.2735426008968614e-06, |
|
"loss": 1.2533, |
|
"step": 151 |
|
}, |
|
{ |
|
"epoch": 0.6816143497757847, |
|
"grad_norm": 0.177734375, |
|
"learning_rate": 3.2286995515695067e-06, |
|
"loss": 1.2204, |
|
"step": 152 |
|
}, |
|
{ |
|
"epoch": 0.6860986547085202, |
|
"grad_norm": 0.2236328125, |
|
"learning_rate": 3.1838565022421524e-06, |
|
"loss": 1.2802, |
|
"step": 153 |
|
}, |
|
{ |
|
"epoch": 0.6905829596412556, |
|
"grad_norm": 0.1943359375, |
|
"learning_rate": 3.1390134529147986e-06, |
|
"loss": 1.2289, |
|
"step": 154 |
|
}, |
|
{ |
|
"epoch": 0.695067264573991, |
|
"grad_norm": 0.1884765625, |
|
"learning_rate": 3.0941704035874443e-06, |
|
"loss": 1.2532, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 0.6995515695067265, |
|
"grad_norm": 0.18359375, |
|
"learning_rate": 3.04932735426009e-06, |
|
"loss": 1.2419, |
|
"step": 156 |
|
}, |
|
{ |
|
"epoch": 0.7040358744394619, |
|
"grad_norm": 0.1728515625, |
|
"learning_rate": 3.0044843049327353e-06, |
|
"loss": 1.2441, |
|
"step": 157 |
|
}, |
|
{ |
|
"epoch": 0.7085201793721974, |
|
"grad_norm": 0.1748046875, |
|
"learning_rate": 2.9596412556053815e-06, |
|
"loss": 1.2687, |
|
"step": 158 |
|
}, |
|
{ |
|
"epoch": 0.7130044843049327, |
|
"grad_norm": 0.1748046875, |
|
"learning_rate": 2.9147982062780272e-06, |
|
"loss": 1.2804, |
|
"step": 159 |
|
}, |
|
{ |
|
"epoch": 0.7174887892376681, |
|
"grad_norm": 0.1689453125, |
|
"learning_rate": 2.869955156950673e-06, |
|
"loss": 1.2303, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.7219730941704036, |
|
"grad_norm": 0.181640625, |
|
"learning_rate": 2.8251121076233182e-06, |
|
"loss": 1.2568, |
|
"step": 161 |
|
}, |
|
{ |
|
"epoch": 0.726457399103139, |
|
"grad_norm": 0.1845703125, |
|
"learning_rate": 2.7802690582959644e-06, |
|
"loss": 1.2491, |
|
"step": 162 |
|
}, |
|
{ |
|
"epoch": 0.7309417040358744, |
|
"grad_norm": 0.169921875, |
|
"learning_rate": 2.73542600896861e-06, |
|
"loss": 1.2457, |
|
"step": 163 |
|
}, |
|
{ |
|
"epoch": 0.7354260089686099, |
|
"grad_norm": 0.1708984375, |
|
"learning_rate": 2.690582959641256e-06, |
|
"loss": 1.2436, |
|
"step": 164 |
|
}, |
|
{ |
|
"epoch": 0.7399103139013453, |
|
"grad_norm": 0.1708984375, |
|
"learning_rate": 2.6457399103139016e-06, |
|
"loss": 1.2438, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 0.7443946188340808, |
|
"grad_norm": 0.1884765625, |
|
"learning_rate": 2.600896860986547e-06, |
|
"loss": 1.2719, |
|
"step": 166 |
|
}, |
|
{ |
|
"epoch": 0.7488789237668162, |
|
"grad_norm": 0.1943359375, |
|
"learning_rate": 2.556053811659193e-06, |
|
"loss": 1.2614, |
|
"step": 167 |
|
}, |
|
{ |
|
"epoch": 0.7533632286995515, |
|
"grad_norm": 0.1748046875, |
|
"learning_rate": 2.5112107623318388e-06, |
|
"loss": 1.285, |
|
"step": 168 |
|
}, |
|
{ |
|
"epoch": 0.757847533632287, |
|
"grad_norm": 0.171875, |
|
"learning_rate": 2.4663677130044845e-06, |
|
"loss": 1.249, |
|
"step": 169 |
|
}, |
|
{ |
|
"epoch": 0.7623318385650224, |
|
"grad_norm": 0.1826171875, |
|
"learning_rate": 2.4215246636771302e-06, |
|
"loss": 1.2864, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.7668161434977578, |
|
"grad_norm": 0.1845703125, |
|
"learning_rate": 2.376681614349776e-06, |
|
"loss": 1.227, |
|
"step": 171 |
|
}, |
|
{ |
|
"epoch": 0.7713004484304933, |
|
"grad_norm": 0.17578125, |
|
"learning_rate": 2.3318385650224217e-06, |
|
"loss": 1.2425, |
|
"step": 172 |
|
}, |
|
{ |
|
"epoch": 0.7757847533632287, |
|
"grad_norm": 0.18359375, |
|
"learning_rate": 2.2869955156950674e-06, |
|
"loss": 1.2429, |
|
"step": 173 |
|
}, |
|
{ |
|
"epoch": 0.7802690582959642, |
|
"grad_norm": 0.1728515625, |
|
"learning_rate": 2.242152466367713e-06, |
|
"loss": 1.2689, |
|
"step": 174 |
|
}, |
|
{ |
|
"epoch": 0.7847533632286996, |
|
"grad_norm": 0.166015625, |
|
"learning_rate": 2.197309417040359e-06, |
|
"loss": 1.2351, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 0.7892376681614349, |
|
"grad_norm": 0.1806640625, |
|
"learning_rate": 2.1524663677130046e-06, |
|
"loss": 1.2556, |
|
"step": 176 |
|
}, |
|
{ |
|
"epoch": 0.7937219730941704, |
|
"grad_norm": 0.1708984375, |
|
"learning_rate": 2.1076233183856503e-06, |
|
"loss": 1.2379, |
|
"step": 177 |
|
}, |
|
{ |
|
"epoch": 0.7982062780269058, |
|
"grad_norm": 0.1728515625, |
|
"learning_rate": 2.062780269058296e-06, |
|
"loss": 1.2595, |
|
"step": 178 |
|
}, |
|
{ |
|
"epoch": 0.8026905829596412, |
|
"grad_norm": 0.1787109375, |
|
"learning_rate": 2.0179372197309418e-06, |
|
"loss": 1.237, |
|
"step": 179 |
|
}, |
|
{ |
|
"epoch": 0.8071748878923767, |
|
"grad_norm": 0.1845703125, |
|
"learning_rate": 1.9730941704035875e-06, |
|
"loss": 1.2507, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.8116591928251121, |
|
"grad_norm": 0.1796875, |
|
"learning_rate": 1.9282511210762332e-06, |
|
"loss": 1.2416, |
|
"step": 181 |
|
}, |
|
{ |
|
"epoch": 0.8161434977578476, |
|
"grad_norm": 0.169921875, |
|
"learning_rate": 1.883408071748879e-06, |
|
"loss": 1.2529, |
|
"step": 182 |
|
}, |
|
{ |
|
"epoch": 0.820627802690583, |
|
"grad_norm": 0.1796875, |
|
"learning_rate": 1.838565022421525e-06, |
|
"loss": 1.2119, |
|
"step": 183 |
|
}, |
|
{ |
|
"epoch": 0.8251121076233184, |
|
"grad_norm": 0.1943359375, |
|
"learning_rate": 1.7937219730941704e-06, |
|
"loss": 1.2747, |
|
"step": 184 |
|
}, |
|
{ |
|
"epoch": 0.8295964125560538, |
|
"grad_norm": 0.177734375, |
|
"learning_rate": 1.7488789237668164e-06, |
|
"loss": 1.2351, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 0.8340807174887892, |
|
"grad_norm": 0.1962890625, |
|
"learning_rate": 1.7040358744394619e-06, |
|
"loss": 1.2667, |
|
"step": 186 |
|
}, |
|
{ |
|
"epoch": 0.8385650224215246, |
|
"grad_norm": 0.166015625, |
|
"learning_rate": 1.6591928251121078e-06, |
|
"loss": 1.2366, |
|
"step": 187 |
|
}, |
|
{ |
|
"epoch": 0.8430493273542601, |
|
"grad_norm": 0.181640625, |
|
"learning_rate": 1.6143497757847533e-06, |
|
"loss": 1.2678, |
|
"step": 188 |
|
}, |
|
{ |
|
"epoch": 0.8475336322869955, |
|
"grad_norm": 0.1728515625, |
|
"learning_rate": 1.5695067264573993e-06, |
|
"loss": 1.2341, |
|
"step": 189 |
|
}, |
|
{ |
|
"epoch": 0.852017937219731, |
|
"grad_norm": 0.1845703125, |
|
"learning_rate": 1.524663677130045e-06, |
|
"loss": 1.2729, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.8565022421524664, |
|
"grad_norm": 0.1767578125, |
|
"learning_rate": 1.4798206278026907e-06, |
|
"loss": 1.2554, |
|
"step": 191 |
|
}, |
|
{ |
|
"epoch": 0.8609865470852018, |
|
"grad_norm": 0.1884765625, |
|
"learning_rate": 1.4349775784753365e-06, |
|
"loss": 1.2458, |
|
"step": 192 |
|
}, |
|
{ |
|
"epoch": 0.8654708520179372, |
|
"grad_norm": 0.1943359375, |
|
"learning_rate": 1.3901345291479822e-06, |
|
"loss": 1.2548, |
|
"step": 193 |
|
}, |
|
{ |
|
"epoch": 0.8699551569506726, |
|
"grad_norm": 0.1806640625, |
|
"learning_rate": 1.345291479820628e-06, |
|
"loss": 1.249, |
|
"step": 194 |
|
}, |
|
{ |
|
"epoch": 0.874439461883408, |
|
"grad_norm": 0.1767578125, |
|
"learning_rate": 1.3004484304932734e-06, |
|
"loss": 1.2348, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 0.8789237668161435, |
|
"grad_norm": 0.17578125, |
|
"learning_rate": 1.2556053811659194e-06, |
|
"loss": 1.2901, |
|
"step": 196 |
|
}, |
|
{ |
|
"epoch": 0.8834080717488789, |
|
"grad_norm": 0.177734375, |
|
"learning_rate": 1.2107623318385651e-06, |
|
"loss": 1.2534, |
|
"step": 197 |
|
}, |
|
{ |
|
"epoch": 0.8878923766816144, |
|
"grad_norm": 0.1748046875, |
|
"learning_rate": 1.1659192825112108e-06, |
|
"loss": 1.2501, |
|
"step": 198 |
|
}, |
|
{ |
|
"epoch": 0.8923766816143498, |
|
"grad_norm": 0.181640625, |
|
"learning_rate": 1.1210762331838566e-06, |
|
"loss": 1.2476, |
|
"step": 199 |
|
}, |
|
{ |
|
"epoch": 0.8968609865470852, |
|
"grad_norm": 0.1650390625, |
|
"learning_rate": 1.0762331838565023e-06, |
|
"loss": 1.2431, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.9013452914798207, |
|
"grad_norm": 0.1767578125, |
|
"learning_rate": 1.031390134529148e-06, |
|
"loss": 1.2111, |
|
"step": 201 |
|
}, |
|
{ |
|
"epoch": 0.905829596412556, |
|
"grad_norm": 0.1767578125, |
|
"learning_rate": 9.865470852017938e-07, |
|
"loss": 1.2568, |
|
"step": 202 |
|
}, |
|
{ |
|
"epoch": 0.9103139013452914, |
|
"grad_norm": 0.173828125, |
|
"learning_rate": 9.417040358744395e-07, |
|
"loss": 1.2448, |
|
"step": 203 |
|
}, |
|
{ |
|
"epoch": 0.9147982062780269, |
|
"grad_norm": 0.1708984375, |
|
"learning_rate": 8.968609865470852e-07, |
|
"loss": 1.2646, |
|
"step": 204 |
|
}, |
|
{ |
|
"epoch": 0.9192825112107623, |
|
"grad_norm": 0.1796875, |
|
"learning_rate": 8.520179372197309e-07, |
|
"loss": 1.2353, |
|
"step": 205 |
|
}, |
|
{ |
|
"epoch": 0.9237668161434978, |
|
"grad_norm": 0.201171875, |
|
"learning_rate": 8.071748878923767e-07, |
|
"loss": 1.2665, |
|
"step": 206 |
|
}, |
|
{ |
|
"epoch": 0.9282511210762332, |
|
"grad_norm": 0.173828125, |
|
"learning_rate": 7.623318385650225e-07, |
|
"loss": 1.2371, |
|
"step": 207 |
|
}, |
|
{ |
|
"epoch": 0.9327354260089686, |
|
"grad_norm": 0.171875, |
|
"learning_rate": 7.174887892376682e-07, |
|
"loss": 1.2212, |
|
"step": 208 |
|
}, |
|
{ |
|
"epoch": 0.9372197309417041, |
|
"grad_norm": 0.1748046875, |
|
"learning_rate": 6.72645739910314e-07, |
|
"loss": 1.2678, |
|
"step": 209 |
|
}, |
|
{ |
|
"epoch": 0.9417040358744395, |
|
"grad_norm": 0.189453125, |
|
"learning_rate": 6.278026905829597e-07, |
|
"loss": 1.2491, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.9461883408071748, |
|
"grad_norm": 0.177734375, |
|
"learning_rate": 5.829596412556054e-07, |
|
"loss": 1.2474, |
|
"step": 211 |
|
}, |
|
{ |
|
"epoch": 0.9506726457399103, |
|
"grad_norm": 0.1875, |
|
"learning_rate": 5.381165919282512e-07, |
|
"loss": 1.2555, |
|
"step": 212 |
|
}, |
|
{ |
|
"epoch": 0.9551569506726457, |
|
"grad_norm": 0.1806640625, |
|
"learning_rate": 4.932735426008969e-07, |
|
"loss": 1.2419, |
|
"step": 213 |
|
}, |
|
{ |
|
"epoch": 0.9596412556053812, |
|
"grad_norm": 0.169921875, |
|
"learning_rate": 4.484304932735426e-07, |
|
"loss": 1.2552, |
|
"step": 214 |
|
}, |
|
{ |
|
"epoch": 0.9641255605381166, |
|
"grad_norm": 0.1630859375, |
|
"learning_rate": 4.0358744394618834e-07, |
|
"loss": 1.2409, |
|
"step": 215 |
|
}, |
|
{ |
|
"epoch": 0.968609865470852, |
|
"grad_norm": 0.1728515625, |
|
"learning_rate": 3.587443946188341e-07, |
|
"loss": 1.2568, |
|
"step": 216 |
|
}, |
|
{ |
|
"epoch": 0.9730941704035875, |
|
"grad_norm": 0.1767578125, |
|
"learning_rate": 3.1390134529147985e-07, |
|
"loss": 1.2724, |
|
"step": 217 |
|
}, |
|
{ |
|
"epoch": 0.9775784753363229, |
|
"grad_norm": 0.1884765625, |
|
"learning_rate": 2.690582959641256e-07, |
|
"loss": 1.2111, |
|
"step": 218 |
|
}, |
|
{ |
|
"epoch": 0.9820627802690582, |
|
"grad_norm": 0.1708984375, |
|
"learning_rate": 2.242152466367713e-07, |
|
"loss": 1.2522, |
|
"step": 219 |
|
}, |
|
{ |
|
"epoch": 0.9865470852017937, |
|
"grad_norm": 0.1787109375, |
|
"learning_rate": 1.7937219730941706e-07, |
|
"loss": 1.2562, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.9910313901345291, |
|
"grad_norm": 0.1748046875, |
|
"learning_rate": 1.345291479820628e-07, |
|
"loss": 1.2403, |
|
"step": 221 |
|
}, |
|
{ |
|
"epoch": 0.9955156950672646, |
|
"grad_norm": 0.16796875, |
|
"learning_rate": 8.968609865470853e-08, |
|
"loss": 1.2427, |
|
"step": 222 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"grad_norm": 0.173828125, |
|
"learning_rate": 4.4843049327354265e-08, |
|
"loss": 1.2616, |
|
"step": 223 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"eval_loss": 1.2369356155395508, |
|
"eval_runtime": 12.4002, |
|
"eval_samples_per_second": 2.742, |
|
"eval_steps_per_second": 0.403, |
|
"step": 223 |
|
} |
|
], |
|
"logging_steps": 1.0, |
|
"max_steps": 223, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 1, |
|
"save_steps": 0, |
|
"stateful_callbacks": { |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": true |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 7.646783793403003e+17, |
|
"train_batch_size": 1, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|