|
{ |
|
"best_global_step": null, |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 1.0, |
|
"eval_steps": 0, |
|
"global_step": 222, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.0045045045045045045, |
|
"grad_norm": 3.4618234634399414, |
|
"learning_rate": 1e-05, |
|
"loss": 2.6001, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.009009009009009009, |
|
"grad_norm": 3.3645927906036377, |
|
"learning_rate": 9.954954954954956e-06, |
|
"loss": 2.6307, |
|
"step": 2 |
|
}, |
|
{ |
|
"epoch": 0.013513513513513514, |
|
"grad_norm": 3.065110921859741, |
|
"learning_rate": 9.90990990990991e-06, |
|
"loss": 2.6657, |
|
"step": 3 |
|
}, |
|
{ |
|
"epoch": 0.018018018018018018, |
|
"grad_norm": 2.7996578216552734, |
|
"learning_rate": 9.864864864864865e-06, |
|
"loss": 2.6424, |
|
"step": 4 |
|
}, |
|
{ |
|
"epoch": 0.02252252252252252, |
|
"grad_norm": 2.491414785385132, |
|
"learning_rate": 9.81981981981982e-06, |
|
"loss": 2.4789, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.02702702702702703, |
|
"grad_norm": 2.2924463748931885, |
|
"learning_rate": 9.774774774774776e-06, |
|
"loss": 2.5258, |
|
"step": 6 |
|
}, |
|
{ |
|
"epoch": 0.03153153153153153, |
|
"grad_norm": 2.1462929248809814, |
|
"learning_rate": 9.729729729729732e-06, |
|
"loss": 2.5148, |
|
"step": 7 |
|
}, |
|
{ |
|
"epoch": 0.036036036036036036, |
|
"grad_norm": 1.8647502660751343, |
|
"learning_rate": 9.684684684684685e-06, |
|
"loss": 2.5141, |
|
"step": 8 |
|
}, |
|
{ |
|
"epoch": 0.04054054054054054, |
|
"grad_norm": 1.70541512966156, |
|
"learning_rate": 9.63963963963964e-06, |
|
"loss": 2.3598, |
|
"step": 9 |
|
}, |
|
{ |
|
"epoch": 0.04504504504504504, |
|
"grad_norm": 1.575402855873108, |
|
"learning_rate": 9.594594594594594e-06, |
|
"loss": 2.4029, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.04954954954954955, |
|
"grad_norm": 1.519256830215454, |
|
"learning_rate": 9.54954954954955e-06, |
|
"loss": 2.3365, |
|
"step": 11 |
|
}, |
|
{ |
|
"epoch": 0.05405405405405406, |
|
"grad_norm": 1.4052317142486572, |
|
"learning_rate": 9.504504504504505e-06, |
|
"loss": 2.411, |
|
"step": 12 |
|
}, |
|
{ |
|
"epoch": 0.05855855855855856, |
|
"grad_norm": 1.3125083446502686, |
|
"learning_rate": 9.45945945945946e-06, |
|
"loss": 2.3526, |
|
"step": 13 |
|
}, |
|
{ |
|
"epoch": 0.06306306306306306, |
|
"grad_norm": 1.2365236282348633, |
|
"learning_rate": 9.414414414414416e-06, |
|
"loss": 2.4095, |
|
"step": 14 |
|
}, |
|
{ |
|
"epoch": 0.06756756756756757, |
|
"grad_norm": 1.150956630706787, |
|
"learning_rate": 9.36936936936937e-06, |
|
"loss": 2.3604, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.07207207207207207, |
|
"grad_norm": 1.049568772315979, |
|
"learning_rate": 9.324324324324325e-06, |
|
"loss": 2.3305, |
|
"step": 16 |
|
}, |
|
{ |
|
"epoch": 0.07657657657657657, |
|
"grad_norm": 0.9859227538108826, |
|
"learning_rate": 9.27927927927928e-06, |
|
"loss": 2.297, |
|
"step": 17 |
|
}, |
|
{ |
|
"epoch": 0.08108108108108109, |
|
"grad_norm": 0.9480313658714294, |
|
"learning_rate": 9.234234234234236e-06, |
|
"loss": 2.3005, |
|
"step": 18 |
|
}, |
|
{ |
|
"epoch": 0.08558558558558559, |
|
"grad_norm": 0.8677704930305481, |
|
"learning_rate": 9.189189189189191e-06, |
|
"loss": 2.2562, |
|
"step": 19 |
|
}, |
|
{ |
|
"epoch": 0.09009009009009009, |
|
"grad_norm": 0.9002907872200012, |
|
"learning_rate": 9.144144144144145e-06, |
|
"loss": 2.194, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.0945945945945946, |
|
"grad_norm": 0.8388268351554871, |
|
"learning_rate": 9.0990990990991e-06, |
|
"loss": 2.2484, |
|
"step": 21 |
|
}, |
|
{ |
|
"epoch": 0.0990990990990991, |
|
"grad_norm": 0.7674593925476074, |
|
"learning_rate": 9.054054054054054e-06, |
|
"loss": 2.2072, |
|
"step": 22 |
|
}, |
|
{ |
|
"epoch": 0.1036036036036036, |
|
"grad_norm": 0.7858290076255798, |
|
"learning_rate": 9.00900900900901e-06, |
|
"loss": 2.2069, |
|
"step": 23 |
|
}, |
|
{ |
|
"epoch": 0.10810810810810811, |
|
"grad_norm": 0.7265046834945679, |
|
"learning_rate": 8.963963963963965e-06, |
|
"loss": 2.155, |
|
"step": 24 |
|
}, |
|
{ |
|
"epoch": 0.11261261261261261, |
|
"grad_norm": 0.7166778445243835, |
|
"learning_rate": 8.91891891891892e-06, |
|
"loss": 2.1444, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.11711711711711711, |
|
"grad_norm": 0.6849911212921143, |
|
"learning_rate": 8.873873873873876e-06, |
|
"loss": 2.1187, |
|
"step": 26 |
|
}, |
|
{ |
|
"epoch": 0.12162162162162163, |
|
"grad_norm": 0.7022340297698975, |
|
"learning_rate": 8.82882882882883e-06, |
|
"loss": 2.1996, |
|
"step": 27 |
|
}, |
|
{ |
|
"epoch": 0.12612612612612611, |
|
"grad_norm": 0.633515477180481, |
|
"learning_rate": 8.783783783783785e-06, |
|
"loss": 2.0704, |
|
"step": 28 |
|
}, |
|
{ |
|
"epoch": 0.13063063063063063, |
|
"grad_norm": 0.627775251865387, |
|
"learning_rate": 8.738738738738739e-06, |
|
"loss": 2.1014, |
|
"step": 29 |
|
}, |
|
{ |
|
"epoch": 0.13513513513513514, |
|
"grad_norm": 0.6084970831871033, |
|
"learning_rate": 8.693693693693694e-06, |
|
"loss": 2.0819, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.13963963963963963, |
|
"grad_norm": 0.6401589512825012, |
|
"learning_rate": 8.64864864864865e-06, |
|
"loss": 2.1203, |
|
"step": 31 |
|
}, |
|
{ |
|
"epoch": 0.14414414414414414, |
|
"grad_norm": 0.6475760340690613, |
|
"learning_rate": 8.603603603603605e-06, |
|
"loss": 2.0912, |
|
"step": 32 |
|
}, |
|
{ |
|
"epoch": 0.14864864864864866, |
|
"grad_norm": 0.6095289587974548, |
|
"learning_rate": 8.55855855855856e-06, |
|
"loss": 2.0502, |
|
"step": 33 |
|
}, |
|
{ |
|
"epoch": 0.15315315315315314, |
|
"grad_norm": 0.5864396691322327, |
|
"learning_rate": 8.513513513513514e-06, |
|
"loss": 2.0702, |
|
"step": 34 |
|
}, |
|
{ |
|
"epoch": 0.15765765765765766, |
|
"grad_norm": 0.5499593615531921, |
|
"learning_rate": 8.46846846846847e-06, |
|
"loss": 2.0222, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.16216216216216217, |
|
"grad_norm": 0.5458055734634399, |
|
"learning_rate": 8.423423423423423e-06, |
|
"loss": 2.0236, |
|
"step": 36 |
|
}, |
|
{ |
|
"epoch": 0.16666666666666666, |
|
"grad_norm": 0.5508635640144348, |
|
"learning_rate": 8.378378378378378e-06, |
|
"loss": 2.0447, |
|
"step": 37 |
|
}, |
|
{ |
|
"epoch": 0.17117117117117117, |
|
"grad_norm": 0.5145464539527893, |
|
"learning_rate": 8.333333333333334e-06, |
|
"loss": 1.9726, |
|
"step": 38 |
|
}, |
|
{ |
|
"epoch": 0.17567567567567569, |
|
"grad_norm": 0.5341795086860657, |
|
"learning_rate": 8.288288288288289e-06, |
|
"loss": 2.0392, |
|
"step": 39 |
|
}, |
|
{ |
|
"epoch": 0.18018018018018017, |
|
"grad_norm": 0.5210247039794922, |
|
"learning_rate": 8.243243243243245e-06, |
|
"loss": 1.9915, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.18468468468468469, |
|
"grad_norm": 0.49647679924964905, |
|
"learning_rate": 8.198198198198198e-06, |
|
"loss": 1.9797, |
|
"step": 41 |
|
}, |
|
{ |
|
"epoch": 0.1891891891891892, |
|
"grad_norm": 0.5066887736320496, |
|
"learning_rate": 8.153153153153154e-06, |
|
"loss": 1.9937, |
|
"step": 42 |
|
}, |
|
{ |
|
"epoch": 0.19369369369369369, |
|
"grad_norm": 0.5093082189559937, |
|
"learning_rate": 8.108108108108109e-06, |
|
"loss": 2.0124, |
|
"step": 43 |
|
}, |
|
{ |
|
"epoch": 0.1981981981981982, |
|
"grad_norm": 0.4824818968772888, |
|
"learning_rate": 8.063063063063063e-06, |
|
"loss": 1.9615, |
|
"step": 44 |
|
}, |
|
{ |
|
"epoch": 0.20270270270270271, |
|
"grad_norm": 0.5253979563713074, |
|
"learning_rate": 8.018018018018018e-06, |
|
"loss": 1.967, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.2072072072072072, |
|
"grad_norm": 0.5245465040206909, |
|
"learning_rate": 7.972972972972974e-06, |
|
"loss": 2.0298, |
|
"step": 46 |
|
}, |
|
{ |
|
"epoch": 0.21171171171171171, |
|
"grad_norm": 0.4816925823688507, |
|
"learning_rate": 7.927927927927929e-06, |
|
"loss": 2.0132, |
|
"step": 47 |
|
}, |
|
{ |
|
"epoch": 0.21621621621621623, |
|
"grad_norm": 0.4973873794078827, |
|
"learning_rate": 7.882882882882884e-06, |
|
"loss": 2.0363, |
|
"step": 48 |
|
}, |
|
{ |
|
"epoch": 0.22072072072072071, |
|
"grad_norm": 0.461952269077301, |
|
"learning_rate": 7.837837837837838e-06, |
|
"loss": 1.9707, |
|
"step": 49 |
|
}, |
|
{ |
|
"epoch": 0.22522522522522523, |
|
"grad_norm": 0.450893759727478, |
|
"learning_rate": 7.792792792792793e-06, |
|
"loss": 1.9285, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.22972972972972974, |
|
"grad_norm": 0.45231375098228455, |
|
"learning_rate": 7.747747747747749e-06, |
|
"loss": 1.9003, |
|
"step": 51 |
|
}, |
|
{ |
|
"epoch": 0.23423423423423423, |
|
"grad_norm": 0.4198550581932068, |
|
"learning_rate": 7.702702702702704e-06, |
|
"loss": 1.8864, |
|
"step": 52 |
|
}, |
|
{ |
|
"epoch": 0.23873873873873874, |
|
"grad_norm": 0.41821354627609253, |
|
"learning_rate": 7.657657657657658e-06, |
|
"loss": 1.9022, |
|
"step": 53 |
|
}, |
|
{ |
|
"epoch": 0.24324324324324326, |
|
"grad_norm": 0.4159207344055176, |
|
"learning_rate": 7.612612612612613e-06, |
|
"loss": 1.8989, |
|
"step": 54 |
|
}, |
|
{ |
|
"epoch": 0.24774774774774774, |
|
"grad_norm": 0.4249446392059326, |
|
"learning_rate": 7.567567567567569e-06, |
|
"loss": 1.8928, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.25225225225225223, |
|
"grad_norm": 0.43254926800727844, |
|
"learning_rate": 7.5225225225225225e-06, |
|
"loss": 1.9572, |
|
"step": 56 |
|
}, |
|
{ |
|
"epoch": 0.25675675675675674, |
|
"grad_norm": 0.5341597199440002, |
|
"learning_rate": 7.477477477477479e-06, |
|
"loss": 1.9215, |
|
"step": 57 |
|
}, |
|
{ |
|
"epoch": 0.26126126126126126, |
|
"grad_norm": 0.41006702184677124, |
|
"learning_rate": 7.4324324324324324e-06, |
|
"loss": 1.9183, |
|
"step": 58 |
|
}, |
|
{ |
|
"epoch": 0.26576576576576577, |
|
"grad_norm": 0.45759063959121704, |
|
"learning_rate": 7.387387387387388e-06, |
|
"loss": 1.8702, |
|
"step": 59 |
|
}, |
|
{ |
|
"epoch": 0.2702702702702703, |
|
"grad_norm": 0.41543591022491455, |
|
"learning_rate": 7.342342342342343e-06, |
|
"loss": 1.9162, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.2747747747747748, |
|
"grad_norm": 0.4476662278175354, |
|
"learning_rate": 7.297297297297298e-06, |
|
"loss": 1.8759, |
|
"step": 61 |
|
}, |
|
{ |
|
"epoch": 0.27927927927927926, |
|
"grad_norm": 0.43256688117980957, |
|
"learning_rate": 7.252252252252253e-06, |
|
"loss": 1.9299, |
|
"step": 62 |
|
}, |
|
{ |
|
"epoch": 0.28378378378378377, |
|
"grad_norm": 0.46529731154441833, |
|
"learning_rate": 7.207207207207208e-06, |
|
"loss": 2.0264, |
|
"step": 63 |
|
}, |
|
{ |
|
"epoch": 0.2882882882882883, |
|
"grad_norm": 0.39733076095581055, |
|
"learning_rate": 7.162162162162163e-06, |
|
"loss": 1.868, |
|
"step": 64 |
|
}, |
|
{ |
|
"epoch": 0.2927927927927928, |
|
"grad_norm": 0.3892308473587036, |
|
"learning_rate": 7.117117117117117e-06, |
|
"loss": 1.8749, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.2972972972972973, |
|
"grad_norm": 0.46271243691444397, |
|
"learning_rate": 7.072072072072072e-06, |
|
"loss": 1.8241, |
|
"step": 66 |
|
}, |
|
{ |
|
"epoch": 0.30180180180180183, |
|
"grad_norm": 0.3786090016365051, |
|
"learning_rate": 7.027027027027028e-06, |
|
"loss": 1.7992, |
|
"step": 67 |
|
}, |
|
{ |
|
"epoch": 0.3063063063063063, |
|
"grad_norm": 0.394674688577652, |
|
"learning_rate": 6.981981981981982e-06, |
|
"loss": 1.8436, |
|
"step": 68 |
|
}, |
|
{ |
|
"epoch": 0.3108108108108108, |
|
"grad_norm": 0.36820194125175476, |
|
"learning_rate": 6.936936936936938e-06, |
|
"loss": 1.8284, |
|
"step": 69 |
|
}, |
|
{ |
|
"epoch": 0.3153153153153153, |
|
"grad_norm": 0.389141708612442, |
|
"learning_rate": 6.891891891891892e-06, |
|
"loss": 1.8153, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.31981981981981983, |
|
"grad_norm": 0.4735696613788605, |
|
"learning_rate": 6.846846846846848e-06, |
|
"loss": 1.8535, |
|
"step": 71 |
|
}, |
|
{ |
|
"epoch": 0.32432432432432434, |
|
"grad_norm": 0.37137389183044434, |
|
"learning_rate": 6.801801801801803e-06, |
|
"loss": 1.8457, |
|
"step": 72 |
|
}, |
|
{ |
|
"epoch": 0.32882882882882886, |
|
"grad_norm": 0.36660048365592957, |
|
"learning_rate": 6.7567567567567575e-06, |
|
"loss": 1.8191, |
|
"step": 73 |
|
}, |
|
{ |
|
"epoch": 0.3333333333333333, |
|
"grad_norm": 0.36980313062667847, |
|
"learning_rate": 6.711711711711713e-06, |
|
"loss": 1.8441, |
|
"step": 74 |
|
}, |
|
{ |
|
"epoch": 0.33783783783783783, |
|
"grad_norm": 0.3742966949939728, |
|
"learning_rate": 6.666666666666667e-06, |
|
"loss": 1.8521, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.34234234234234234, |
|
"grad_norm": 0.37553393840789795, |
|
"learning_rate": 6.621621621621622e-06, |
|
"loss": 1.8133, |
|
"step": 76 |
|
}, |
|
{ |
|
"epoch": 0.34684684684684686, |
|
"grad_norm": 0.48281577229499817, |
|
"learning_rate": 6.5765765765765775e-06, |
|
"loss": 1.8447, |
|
"step": 77 |
|
}, |
|
{ |
|
"epoch": 0.35135135135135137, |
|
"grad_norm": 0.37523260712623596, |
|
"learning_rate": 6.531531531531532e-06, |
|
"loss": 1.8422, |
|
"step": 78 |
|
}, |
|
{ |
|
"epoch": 0.35585585585585583, |
|
"grad_norm": 0.3609691262245178, |
|
"learning_rate": 6.486486486486487e-06, |
|
"loss": 1.8179, |
|
"step": 79 |
|
}, |
|
{ |
|
"epoch": 0.36036036036036034, |
|
"grad_norm": 0.47471126914024353, |
|
"learning_rate": 6.441441441441442e-06, |
|
"loss": 1.828, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.36486486486486486, |
|
"grad_norm": 0.3547884225845337, |
|
"learning_rate": 6.396396396396397e-06, |
|
"loss": 1.8362, |
|
"step": 81 |
|
}, |
|
{ |
|
"epoch": 0.36936936936936937, |
|
"grad_norm": 0.38408973813056946, |
|
"learning_rate": 6.351351351351351e-06, |
|
"loss": 1.8652, |
|
"step": 82 |
|
}, |
|
{ |
|
"epoch": 0.3738738738738739, |
|
"grad_norm": 0.36634501814842224, |
|
"learning_rate": 6.3063063063063065e-06, |
|
"loss": 1.799, |
|
"step": 83 |
|
}, |
|
{ |
|
"epoch": 0.3783783783783784, |
|
"grad_norm": 0.3446308970451355, |
|
"learning_rate": 6.261261261261262e-06, |
|
"loss": 1.7622, |
|
"step": 84 |
|
}, |
|
{ |
|
"epoch": 0.38288288288288286, |
|
"grad_norm": 0.36346280574798584, |
|
"learning_rate": 6.2162162162162164e-06, |
|
"loss": 1.7713, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.38738738738738737, |
|
"grad_norm": 0.34694963693618774, |
|
"learning_rate": 6.171171171171172e-06, |
|
"loss": 1.7814, |
|
"step": 86 |
|
}, |
|
{ |
|
"epoch": 0.3918918918918919, |
|
"grad_norm": 0.355396568775177, |
|
"learning_rate": 6.126126126126126e-06, |
|
"loss": 1.8025, |
|
"step": 87 |
|
}, |
|
{ |
|
"epoch": 0.3963963963963964, |
|
"grad_norm": 0.3488863706588745, |
|
"learning_rate": 6.081081081081082e-06, |
|
"loss": 1.7832, |
|
"step": 88 |
|
}, |
|
{ |
|
"epoch": 0.4009009009009009, |
|
"grad_norm": 0.3477393686771393, |
|
"learning_rate": 6.036036036036037e-06, |
|
"loss": 1.7375, |
|
"step": 89 |
|
}, |
|
{ |
|
"epoch": 0.40540540540540543, |
|
"grad_norm": 0.3712894022464752, |
|
"learning_rate": 5.990990990990992e-06, |
|
"loss": 1.8348, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.4099099099099099, |
|
"grad_norm": 0.44912734627723694, |
|
"learning_rate": 5.945945945945947e-06, |
|
"loss": 1.7269, |
|
"step": 91 |
|
}, |
|
{ |
|
"epoch": 0.4144144144144144, |
|
"grad_norm": 0.35568878054618835, |
|
"learning_rate": 5.900900900900901e-06, |
|
"loss": 1.8012, |
|
"step": 92 |
|
}, |
|
{ |
|
"epoch": 0.4189189189189189, |
|
"grad_norm": 0.3438522219657898, |
|
"learning_rate": 5.855855855855856e-06, |
|
"loss": 1.7039, |
|
"step": 93 |
|
}, |
|
{ |
|
"epoch": 0.42342342342342343, |
|
"grad_norm": 0.5138081312179565, |
|
"learning_rate": 5.810810810810811e-06, |
|
"loss": 1.78, |
|
"step": 94 |
|
}, |
|
{ |
|
"epoch": 0.42792792792792794, |
|
"grad_norm": 0.3379000723361969, |
|
"learning_rate": 5.765765765765766e-06, |
|
"loss": 1.7418, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.43243243243243246, |
|
"grad_norm": 0.3370780348777771, |
|
"learning_rate": 5.720720720720722e-06, |
|
"loss": 1.7348, |
|
"step": 96 |
|
}, |
|
{ |
|
"epoch": 0.4369369369369369, |
|
"grad_norm": 0.3390265107154846, |
|
"learning_rate": 5.675675675675676e-06, |
|
"loss": 1.721, |
|
"step": 97 |
|
}, |
|
{ |
|
"epoch": 0.44144144144144143, |
|
"grad_norm": 0.3562930226325989, |
|
"learning_rate": 5.6306306306306316e-06, |
|
"loss": 1.7826, |
|
"step": 98 |
|
}, |
|
{ |
|
"epoch": 0.44594594594594594, |
|
"grad_norm": 0.477252334356308, |
|
"learning_rate": 5.585585585585585e-06, |
|
"loss": 1.7175, |
|
"step": 99 |
|
}, |
|
{ |
|
"epoch": 0.45045045045045046, |
|
"grad_norm": 0.334209680557251, |
|
"learning_rate": 5.540540540540541e-06, |
|
"loss": 1.7416, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.45495495495495497, |
|
"grad_norm": 0.4595113694667816, |
|
"learning_rate": 5.495495495495496e-06, |
|
"loss": 1.7259, |
|
"step": 101 |
|
}, |
|
{ |
|
"epoch": 0.4594594594594595, |
|
"grad_norm": 0.4791535437107086, |
|
"learning_rate": 5.450450450450451e-06, |
|
"loss": 1.7314, |
|
"step": 102 |
|
}, |
|
{ |
|
"epoch": 0.46396396396396394, |
|
"grad_norm": 0.34569066762924194, |
|
"learning_rate": 5.405405405405406e-06, |
|
"loss": 1.7451, |
|
"step": 103 |
|
}, |
|
{ |
|
"epoch": 0.46846846846846846, |
|
"grad_norm": 0.33820703625679016, |
|
"learning_rate": 5.360360360360361e-06, |
|
"loss": 1.7172, |
|
"step": 104 |
|
}, |
|
{ |
|
"epoch": 0.47297297297297297, |
|
"grad_norm": 0.3545491695404053, |
|
"learning_rate": 5.315315315315316e-06, |
|
"loss": 1.7654, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 0.4774774774774775, |
|
"grad_norm": 0.45435503125190735, |
|
"learning_rate": 5.2702702702702705e-06, |
|
"loss": 1.6965, |
|
"step": 106 |
|
}, |
|
{ |
|
"epoch": 0.481981981981982, |
|
"grad_norm": 0.3440285921096802, |
|
"learning_rate": 5.225225225225226e-06, |
|
"loss": 1.7138, |
|
"step": 107 |
|
}, |
|
{ |
|
"epoch": 0.4864864864864865, |
|
"grad_norm": 0.3427916467189789, |
|
"learning_rate": 5.180180180180181e-06, |
|
"loss": 1.7171, |
|
"step": 108 |
|
}, |
|
{ |
|
"epoch": 0.49099099099099097, |
|
"grad_norm": 0.341746985912323, |
|
"learning_rate": 5.135135135135135e-06, |
|
"loss": 1.7144, |
|
"step": 109 |
|
}, |
|
{ |
|
"epoch": 0.4954954954954955, |
|
"grad_norm": 0.3410569131374359, |
|
"learning_rate": 5.0900900900900905e-06, |
|
"loss": 1.7582, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"grad_norm": 0.34173962473869324, |
|
"learning_rate": 5.045045045045045e-06, |
|
"loss": 1.7419, |
|
"step": 111 |
|
}, |
|
{ |
|
"epoch": 0.5045045045045045, |
|
"grad_norm": 0.34266290068626404, |
|
"learning_rate": 5e-06, |
|
"loss": 1.7169, |
|
"step": 112 |
|
}, |
|
{ |
|
"epoch": 0.509009009009009, |
|
"grad_norm": 0.3380753993988037, |
|
"learning_rate": 4.954954954954955e-06, |
|
"loss": 1.7385, |
|
"step": 113 |
|
}, |
|
{ |
|
"epoch": 0.5135135135135135, |
|
"grad_norm": 0.45319607853889465, |
|
"learning_rate": 4.90990990990991e-06, |
|
"loss": 1.673, |
|
"step": 114 |
|
}, |
|
{ |
|
"epoch": 0.5180180180180181, |
|
"grad_norm": 0.3970378339290619, |
|
"learning_rate": 4.864864864864866e-06, |
|
"loss": 1.7046, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 0.5225225225225225, |
|
"grad_norm": 0.5082581043243408, |
|
"learning_rate": 4.81981981981982e-06, |
|
"loss": 1.7395, |
|
"step": 116 |
|
}, |
|
{ |
|
"epoch": 0.527027027027027, |
|
"grad_norm": 0.33490023016929626, |
|
"learning_rate": 4.774774774774775e-06, |
|
"loss": 1.7578, |
|
"step": 117 |
|
}, |
|
{ |
|
"epoch": 0.5315315315315315, |
|
"grad_norm": 0.3550601601600647, |
|
"learning_rate": 4.72972972972973e-06, |
|
"loss": 1.7359, |
|
"step": 118 |
|
}, |
|
{ |
|
"epoch": 0.536036036036036, |
|
"grad_norm": 0.39418432116508484, |
|
"learning_rate": 4.684684684684685e-06, |
|
"loss": 1.7227, |
|
"step": 119 |
|
}, |
|
{ |
|
"epoch": 0.5405405405405406, |
|
"grad_norm": 0.34488528966903687, |
|
"learning_rate": 4.63963963963964e-06, |
|
"loss": 1.7826, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.545045045045045, |
|
"grad_norm": 0.36367273330688477, |
|
"learning_rate": 4.594594594594596e-06, |
|
"loss": 1.6977, |
|
"step": 121 |
|
}, |
|
{ |
|
"epoch": 0.5495495495495496, |
|
"grad_norm": 0.3362616300582886, |
|
"learning_rate": 4.54954954954955e-06, |
|
"loss": 1.7513, |
|
"step": 122 |
|
}, |
|
{ |
|
"epoch": 0.5540540540540541, |
|
"grad_norm": 0.35472577810287476, |
|
"learning_rate": 4.504504504504505e-06, |
|
"loss": 1.7207, |
|
"step": 123 |
|
}, |
|
{ |
|
"epoch": 0.5585585585585585, |
|
"grad_norm": 0.33426880836486816, |
|
"learning_rate": 4.45945945945946e-06, |
|
"loss": 1.7081, |
|
"step": 124 |
|
}, |
|
{ |
|
"epoch": 0.5630630630630631, |
|
"grad_norm": 0.44085991382598877, |
|
"learning_rate": 4.414414414414415e-06, |
|
"loss": 1.7064, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 0.5675675675675675, |
|
"grad_norm": 0.34073606133461, |
|
"learning_rate": 4.369369369369369e-06, |
|
"loss": 1.7106, |
|
"step": 126 |
|
}, |
|
{ |
|
"epoch": 0.5720720720720721, |
|
"grad_norm": 0.33393681049346924, |
|
"learning_rate": 4.324324324324325e-06, |
|
"loss": 1.7123, |
|
"step": 127 |
|
}, |
|
{ |
|
"epoch": 0.5765765765765766, |
|
"grad_norm": 0.34808585047721863, |
|
"learning_rate": 4.27927927927928e-06, |
|
"loss": 1.746, |
|
"step": 128 |
|
}, |
|
{ |
|
"epoch": 0.581081081081081, |
|
"grad_norm": 0.32998034358024597, |
|
"learning_rate": 4.234234234234235e-06, |
|
"loss": 1.6711, |
|
"step": 129 |
|
}, |
|
{ |
|
"epoch": 0.5855855855855856, |
|
"grad_norm": 0.3403429090976715, |
|
"learning_rate": 4.189189189189189e-06, |
|
"loss": 1.6766, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.5900900900900901, |
|
"grad_norm": 0.3442879617214203, |
|
"learning_rate": 4.1441441441441446e-06, |
|
"loss": 1.7167, |
|
"step": 131 |
|
}, |
|
{ |
|
"epoch": 0.5945945945945946, |
|
"grad_norm": 0.35281968116760254, |
|
"learning_rate": 4.099099099099099e-06, |
|
"loss": 1.6911, |
|
"step": 132 |
|
}, |
|
{ |
|
"epoch": 0.5990990990990991, |
|
"grad_norm": 0.36312294006347656, |
|
"learning_rate": 4.0540540540540545e-06, |
|
"loss": 1.6752, |
|
"step": 133 |
|
}, |
|
{ |
|
"epoch": 0.6036036036036037, |
|
"grad_norm": 0.5125902891159058, |
|
"learning_rate": 4.009009009009009e-06, |
|
"loss": 1.7018, |
|
"step": 134 |
|
}, |
|
{ |
|
"epoch": 0.6081081081081081, |
|
"grad_norm": 0.3540242314338684, |
|
"learning_rate": 3.9639639639639645e-06, |
|
"loss": 1.7434, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 0.6126126126126126, |
|
"grad_norm": 0.3490309417247772, |
|
"learning_rate": 3.918918918918919e-06, |
|
"loss": 1.6623, |
|
"step": 136 |
|
}, |
|
{ |
|
"epoch": 0.6171171171171171, |
|
"grad_norm": 0.3411913812160492, |
|
"learning_rate": 3.8738738738738744e-06, |
|
"loss": 1.6375, |
|
"step": 137 |
|
}, |
|
{ |
|
"epoch": 0.6216216216216216, |
|
"grad_norm": 0.3555877208709717, |
|
"learning_rate": 3.828828828828829e-06, |
|
"loss": 1.6833, |
|
"step": 138 |
|
}, |
|
{ |
|
"epoch": 0.6261261261261262, |
|
"grad_norm": 0.391271710395813, |
|
"learning_rate": 3.7837837837837844e-06, |
|
"loss": 1.6057, |
|
"step": 139 |
|
}, |
|
{ |
|
"epoch": 0.6306306306306306, |
|
"grad_norm": 0.3273333013057709, |
|
"learning_rate": 3.7387387387387394e-06, |
|
"loss": 1.6553, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.6351351351351351, |
|
"grad_norm": 0.3500949442386627, |
|
"learning_rate": 3.693693693693694e-06, |
|
"loss": 1.6151, |
|
"step": 141 |
|
}, |
|
{ |
|
"epoch": 0.6396396396396397, |
|
"grad_norm": 0.36416658759117126, |
|
"learning_rate": 3.648648648648649e-06, |
|
"loss": 1.6769, |
|
"step": 142 |
|
}, |
|
{ |
|
"epoch": 0.6441441441441441, |
|
"grad_norm": 0.3524824380874634, |
|
"learning_rate": 3.603603603603604e-06, |
|
"loss": 1.6869, |
|
"step": 143 |
|
}, |
|
{ |
|
"epoch": 0.6486486486486487, |
|
"grad_norm": 0.3461499512195587, |
|
"learning_rate": 3.5585585585585584e-06, |
|
"loss": 1.665, |
|
"step": 144 |
|
}, |
|
{ |
|
"epoch": 0.6531531531531531, |
|
"grad_norm": 0.3453364074230194, |
|
"learning_rate": 3.513513513513514e-06, |
|
"loss": 1.6884, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 0.6576576576576577, |
|
"grad_norm": 0.3358488976955414, |
|
"learning_rate": 3.468468468468469e-06, |
|
"loss": 1.6457, |
|
"step": 146 |
|
}, |
|
{ |
|
"epoch": 0.6621621621621622, |
|
"grad_norm": 0.3561137020587921, |
|
"learning_rate": 3.423423423423424e-06, |
|
"loss": 1.7074, |
|
"step": 147 |
|
}, |
|
{ |
|
"epoch": 0.6666666666666666, |
|
"grad_norm": 0.3294833302497864, |
|
"learning_rate": 3.3783783783783788e-06, |
|
"loss": 1.6475, |
|
"step": 148 |
|
}, |
|
{ |
|
"epoch": 0.6711711711711712, |
|
"grad_norm": 0.3332567512989044, |
|
"learning_rate": 3.3333333333333333e-06, |
|
"loss": 1.6376, |
|
"step": 149 |
|
}, |
|
{ |
|
"epoch": 0.6756756756756757, |
|
"grad_norm": 0.4869362711906433, |
|
"learning_rate": 3.2882882882882887e-06, |
|
"loss": 1.6186, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.6801801801801802, |
|
"grad_norm": 0.33938735723495483, |
|
"learning_rate": 3.2432432432432437e-06, |
|
"loss": 1.6374, |
|
"step": 151 |
|
}, |
|
{ |
|
"epoch": 0.6846846846846847, |
|
"grad_norm": 0.48691487312316895, |
|
"learning_rate": 3.1981981981981987e-06, |
|
"loss": 1.6214, |
|
"step": 152 |
|
}, |
|
{ |
|
"epoch": 0.6891891891891891, |
|
"grad_norm": 0.3412449359893799, |
|
"learning_rate": 3.1531531531531532e-06, |
|
"loss": 1.6594, |
|
"step": 153 |
|
}, |
|
{ |
|
"epoch": 0.6936936936936937, |
|
"grad_norm": 0.35119444131851196, |
|
"learning_rate": 3.1081081081081082e-06, |
|
"loss": 1.7138, |
|
"step": 154 |
|
}, |
|
{ |
|
"epoch": 0.6981981981981982, |
|
"grad_norm": 0.37252432107925415, |
|
"learning_rate": 3.063063063063063e-06, |
|
"loss": 1.6712, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 0.7027027027027027, |
|
"grad_norm": 0.32262712717056274, |
|
"learning_rate": 3.0180180180180186e-06, |
|
"loss": 1.6269, |
|
"step": 156 |
|
}, |
|
{ |
|
"epoch": 0.7072072072072072, |
|
"grad_norm": 0.3406342566013336, |
|
"learning_rate": 2.9729729729729736e-06, |
|
"loss": 1.694, |
|
"step": 157 |
|
}, |
|
{ |
|
"epoch": 0.7117117117117117, |
|
"grad_norm": 0.34315645694732666, |
|
"learning_rate": 2.927927927927928e-06, |
|
"loss": 1.7172, |
|
"step": 158 |
|
}, |
|
{ |
|
"epoch": 0.7162162162162162, |
|
"grad_norm": 0.4974067807197571, |
|
"learning_rate": 2.882882882882883e-06, |
|
"loss": 1.6876, |
|
"step": 159 |
|
}, |
|
{ |
|
"epoch": 0.7207207207207207, |
|
"grad_norm": 0.3327634334564209, |
|
"learning_rate": 2.837837837837838e-06, |
|
"loss": 1.668, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.7252252252252253, |
|
"grad_norm": 0.363633930683136, |
|
"learning_rate": 2.7927927927927926e-06, |
|
"loss": 1.6732, |
|
"step": 161 |
|
}, |
|
{ |
|
"epoch": 0.7297297297297297, |
|
"grad_norm": 0.35024866461753845, |
|
"learning_rate": 2.747747747747748e-06, |
|
"loss": 1.6703, |
|
"step": 162 |
|
}, |
|
{ |
|
"epoch": 0.7342342342342343, |
|
"grad_norm": 0.34215882420539856, |
|
"learning_rate": 2.702702702702703e-06, |
|
"loss": 1.6729, |
|
"step": 163 |
|
}, |
|
{ |
|
"epoch": 0.7387387387387387, |
|
"grad_norm": 0.34051352739334106, |
|
"learning_rate": 2.657657657657658e-06, |
|
"loss": 1.6548, |
|
"step": 164 |
|
}, |
|
{ |
|
"epoch": 0.7432432432432432, |
|
"grad_norm": 0.5225317478179932, |
|
"learning_rate": 2.612612612612613e-06, |
|
"loss": 1.6344, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 0.7477477477477478, |
|
"grad_norm": 0.40349280834198, |
|
"learning_rate": 2.5675675675675675e-06, |
|
"loss": 1.6252, |
|
"step": 166 |
|
}, |
|
{ |
|
"epoch": 0.7522522522522522, |
|
"grad_norm": 0.3395436704158783, |
|
"learning_rate": 2.5225225225225225e-06, |
|
"loss": 1.6486, |
|
"step": 167 |
|
}, |
|
{ |
|
"epoch": 0.7567567567567568, |
|
"grad_norm": 0.3456726670265198, |
|
"learning_rate": 2.4774774774774775e-06, |
|
"loss": 1.6756, |
|
"step": 168 |
|
}, |
|
{ |
|
"epoch": 0.7612612612612613, |
|
"grad_norm": 0.3367043435573578, |
|
"learning_rate": 2.432432432432433e-06, |
|
"loss": 1.6589, |
|
"step": 169 |
|
}, |
|
{ |
|
"epoch": 0.7657657657657657, |
|
"grad_norm": 0.3494514226913452, |
|
"learning_rate": 2.3873873873873874e-06, |
|
"loss": 1.6763, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.7702702702702703, |
|
"grad_norm": 0.3343624770641327, |
|
"learning_rate": 2.3423423423423424e-06, |
|
"loss": 1.6118, |
|
"step": 171 |
|
}, |
|
{ |
|
"epoch": 0.7747747747747747, |
|
"grad_norm": 0.33982744812965393, |
|
"learning_rate": 2.297297297297298e-06, |
|
"loss": 1.639, |
|
"step": 172 |
|
}, |
|
{ |
|
"epoch": 0.7792792792792793, |
|
"grad_norm": 0.33141806721687317, |
|
"learning_rate": 2.2522522522522524e-06, |
|
"loss": 1.6643, |
|
"step": 173 |
|
}, |
|
{ |
|
"epoch": 0.7837837837837838, |
|
"grad_norm": 0.3434072434902191, |
|
"learning_rate": 2.2072072072072073e-06, |
|
"loss": 1.5689, |
|
"step": 174 |
|
}, |
|
{ |
|
"epoch": 0.7882882882882883, |
|
"grad_norm": 0.3629867136478424, |
|
"learning_rate": 2.1621621621621623e-06, |
|
"loss": 1.6995, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 0.7927927927927928, |
|
"grad_norm": 0.33522751927375793, |
|
"learning_rate": 2.1171171171171173e-06, |
|
"loss": 1.6482, |
|
"step": 176 |
|
}, |
|
{ |
|
"epoch": 0.7972972972972973, |
|
"grad_norm": 0.34863755106925964, |
|
"learning_rate": 2.0720720720720723e-06, |
|
"loss": 1.6613, |
|
"step": 177 |
|
}, |
|
{ |
|
"epoch": 0.8018018018018018, |
|
"grad_norm": 0.3450457751750946, |
|
"learning_rate": 2.0270270270270273e-06, |
|
"loss": 1.6708, |
|
"step": 178 |
|
}, |
|
{ |
|
"epoch": 0.8063063063063063, |
|
"grad_norm": 0.3388313055038452, |
|
"learning_rate": 1.9819819819819822e-06, |
|
"loss": 1.6517, |
|
"step": 179 |
|
}, |
|
{ |
|
"epoch": 0.8108108108108109, |
|
"grad_norm": 0.3772425651550293, |
|
"learning_rate": 1.9369369369369372e-06, |
|
"loss": 1.5883, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.8153153153153153, |
|
"grad_norm": 0.34325921535491943, |
|
"learning_rate": 1.8918918918918922e-06, |
|
"loss": 1.6263, |
|
"step": 181 |
|
}, |
|
{ |
|
"epoch": 0.8198198198198198, |
|
"grad_norm": 0.31989938020706177, |
|
"learning_rate": 1.846846846846847e-06, |
|
"loss": 1.6133, |
|
"step": 182 |
|
}, |
|
{ |
|
"epoch": 0.8243243243243243, |
|
"grad_norm": 0.3870126008987427, |
|
"learning_rate": 1.801801801801802e-06, |
|
"loss": 1.6299, |
|
"step": 183 |
|
}, |
|
{ |
|
"epoch": 0.8288288288288288, |
|
"grad_norm": 0.3582821190357208, |
|
"learning_rate": 1.756756756756757e-06, |
|
"loss": 1.6416, |
|
"step": 184 |
|
}, |
|
{ |
|
"epoch": 0.8333333333333334, |
|
"grad_norm": 0.3285435736179352, |
|
"learning_rate": 1.711711711711712e-06, |
|
"loss": 1.6556, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 0.8378378378378378, |
|
"grad_norm": 0.3296816647052765, |
|
"learning_rate": 1.6666666666666667e-06, |
|
"loss": 1.6513, |
|
"step": 186 |
|
}, |
|
{ |
|
"epoch": 0.8423423423423423, |
|
"grad_norm": 0.32003799080848694, |
|
"learning_rate": 1.6216216216216219e-06, |
|
"loss": 1.6641, |
|
"step": 187 |
|
}, |
|
{ |
|
"epoch": 0.8468468468468469, |
|
"grad_norm": 0.41711196303367615, |
|
"learning_rate": 1.5765765765765766e-06, |
|
"loss": 1.6022, |
|
"step": 188 |
|
}, |
|
{ |
|
"epoch": 0.8513513513513513, |
|
"grad_norm": 0.3556946814060211, |
|
"learning_rate": 1.5315315315315316e-06, |
|
"loss": 1.6824, |
|
"step": 189 |
|
}, |
|
{ |
|
"epoch": 0.8558558558558559, |
|
"grad_norm": 0.33673131465911865, |
|
"learning_rate": 1.4864864864864868e-06, |
|
"loss": 1.6419, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.8603603603603603, |
|
"grad_norm": 0.41371607780456543, |
|
"learning_rate": 1.4414414414414416e-06, |
|
"loss": 1.6329, |
|
"step": 191 |
|
}, |
|
{ |
|
"epoch": 0.8648648648648649, |
|
"grad_norm": 0.3232820928096771, |
|
"learning_rate": 1.3963963963963963e-06, |
|
"loss": 1.645, |
|
"step": 192 |
|
}, |
|
{ |
|
"epoch": 0.8693693693693694, |
|
"grad_norm": 0.5039376616477966, |
|
"learning_rate": 1.3513513513513515e-06, |
|
"loss": 1.6429, |
|
"step": 193 |
|
}, |
|
{ |
|
"epoch": 0.8738738738738738, |
|
"grad_norm": 0.4405737519264221, |
|
"learning_rate": 1.3063063063063065e-06, |
|
"loss": 1.6726, |
|
"step": 194 |
|
}, |
|
{ |
|
"epoch": 0.8783783783783784, |
|
"grad_norm": 0.32625824213027954, |
|
"learning_rate": 1.2612612612612613e-06, |
|
"loss": 1.6614, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 0.8828828828828829, |
|
"grad_norm": 0.34024447202682495, |
|
"learning_rate": 1.2162162162162164e-06, |
|
"loss": 1.6198, |
|
"step": 196 |
|
}, |
|
{ |
|
"epoch": 0.8873873873873874, |
|
"grad_norm": 0.3739813566207886, |
|
"learning_rate": 1.1711711711711712e-06, |
|
"loss": 1.6033, |
|
"step": 197 |
|
}, |
|
{ |
|
"epoch": 0.8918918918918919, |
|
"grad_norm": 0.35833895206451416, |
|
"learning_rate": 1.1261261261261262e-06, |
|
"loss": 1.7058, |
|
"step": 198 |
|
}, |
|
{ |
|
"epoch": 0.8963963963963963, |
|
"grad_norm": 0.3302564322948456, |
|
"learning_rate": 1.0810810810810812e-06, |
|
"loss": 1.5926, |
|
"step": 199 |
|
}, |
|
{ |
|
"epoch": 0.9009009009009009, |
|
"grad_norm": 0.32919058203697205, |
|
"learning_rate": 1.0360360360360361e-06, |
|
"loss": 1.6766, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.9054054054054054, |
|
"grad_norm": 0.3549056649208069, |
|
"learning_rate": 9.909909909909911e-07, |
|
"loss": 1.644, |
|
"step": 201 |
|
}, |
|
{ |
|
"epoch": 0.9099099099099099, |
|
"grad_norm": 0.3365701735019684, |
|
"learning_rate": 9.459459459459461e-07, |
|
"loss": 1.6275, |
|
"step": 202 |
|
}, |
|
{ |
|
"epoch": 0.9144144144144144, |
|
"grad_norm": 0.3276926577091217, |
|
"learning_rate": 9.00900900900901e-07, |
|
"loss": 1.5943, |
|
"step": 203 |
|
}, |
|
{ |
|
"epoch": 0.918918918918919, |
|
"grad_norm": 0.47995641827583313, |
|
"learning_rate": 8.55855855855856e-07, |
|
"loss": 1.6125, |
|
"step": 204 |
|
}, |
|
{ |
|
"epoch": 0.9234234234234234, |
|
"grad_norm": 0.3210660517215729, |
|
"learning_rate": 8.108108108108109e-07, |
|
"loss": 1.5901, |
|
"step": 205 |
|
}, |
|
{ |
|
"epoch": 0.9279279279279279, |
|
"grad_norm": 0.35450634360313416, |
|
"learning_rate": 7.657657657657658e-07, |
|
"loss": 1.6628, |
|
"step": 206 |
|
}, |
|
{ |
|
"epoch": 0.9324324324324325, |
|
"grad_norm": 0.33351245522499084, |
|
"learning_rate": 7.207207207207208e-07, |
|
"loss": 1.5704, |
|
"step": 207 |
|
}, |
|
{ |
|
"epoch": 0.9369369369369369, |
|
"grad_norm": 0.40133386850357056, |
|
"learning_rate": 6.756756756756758e-07, |
|
"loss": 1.572, |
|
"step": 208 |
|
}, |
|
{ |
|
"epoch": 0.9414414414414415, |
|
"grad_norm": 0.35304591059684753, |
|
"learning_rate": 6.306306306306306e-07, |
|
"loss": 1.6827, |
|
"step": 209 |
|
}, |
|
{ |
|
"epoch": 0.9459459459459459, |
|
"grad_norm": 0.4823031425476074, |
|
"learning_rate": 5.855855855855856e-07, |
|
"loss": 1.6265, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.9504504504504504, |
|
"grad_norm": 0.3416145443916321, |
|
"learning_rate": 5.405405405405406e-07, |
|
"loss": 1.5836, |
|
"step": 211 |
|
}, |
|
{ |
|
"epoch": 0.954954954954955, |
|
"grad_norm": 0.34280046820640564, |
|
"learning_rate": 4.954954954954956e-07, |
|
"loss": 1.61, |
|
"step": 212 |
|
}, |
|
{ |
|
"epoch": 0.9594594594594594, |
|
"grad_norm": 0.3362351357936859, |
|
"learning_rate": 4.504504504504505e-07, |
|
"loss": 1.6703, |
|
"step": 213 |
|
}, |
|
{ |
|
"epoch": 0.963963963963964, |
|
"grad_norm": 0.32132312655448914, |
|
"learning_rate": 4.0540540540540546e-07, |
|
"loss": 1.6469, |
|
"step": 214 |
|
}, |
|
{ |
|
"epoch": 0.9684684684684685, |
|
"grad_norm": 0.3417748212814331, |
|
"learning_rate": 3.603603603603604e-07, |
|
"loss": 1.6052, |
|
"step": 215 |
|
}, |
|
{ |
|
"epoch": 0.972972972972973, |
|
"grad_norm": 0.3503134548664093, |
|
"learning_rate": 3.153153153153153e-07, |
|
"loss": 1.6253, |
|
"step": 216 |
|
}, |
|
{ |
|
"epoch": 0.9774774774774775, |
|
"grad_norm": 0.332475870847702, |
|
"learning_rate": 2.702702702702703e-07, |
|
"loss": 1.6158, |
|
"step": 217 |
|
}, |
|
{ |
|
"epoch": 0.9819819819819819, |
|
"grad_norm": 0.3545438349246979, |
|
"learning_rate": 2.2522522522522524e-07, |
|
"loss": 1.67, |
|
"step": 218 |
|
}, |
|
{ |
|
"epoch": 0.9864864864864865, |
|
"grad_norm": 0.3189946115016937, |
|
"learning_rate": 1.801801801801802e-07, |
|
"loss": 1.5921, |
|
"step": 219 |
|
}, |
|
{ |
|
"epoch": 0.990990990990991, |
|
"grad_norm": 0.35738781094551086, |
|
"learning_rate": 1.3513513513513515e-07, |
|
"loss": 1.6595, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.9954954954954955, |
|
"grad_norm": 0.41471225023269653, |
|
"learning_rate": 9.00900900900901e-08, |
|
"loss": 1.6064, |
|
"step": 221 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"grad_norm": 0.33184003829956055, |
|
"learning_rate": 4.504504504504505e-08, |
|
"loss": 1.6121, |
|
"step": 222 |
|
} |
|
], |
|
"logging_steps": 1.0, |
|
"max_steps": 222, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 1, |
|
"save_steps": 0, |
|
"stateful_callbacks": { |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": true |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 1.5005440937033728e+16, |
|
"train_batch_size": 1, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|