|
{ |
|
"best_global_step": null, |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 5.0, |
|
"eval_steps": 500, |
|
"global_step": 410, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.012195121951219513, |
|
"grad_norm": 20.605318069458008, |
|
"learning_rate": 0.0, |
|
"loss": 1.7928, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.024390243902439025, |
|
"grad_norm": 20.76787567138672, |
|
"learning_rate": 2.439024390243903e-07, |
|
"loss": 1.7386, |
|
"step": 2 |
|
}, |
|
{ |
|
"epoch": 0.036585365853658534, |
|
"grad_norm": 21.81036949157715, |
|
"learning_rate": 4.878048780487805e-07, |
|
"loss": 1.8763, |
|
"step": 3 |
|
}, |
|
{ |
|
"epoch": 0.04878048780487805, |
|
"grad_norm": 20.621498107910156, |
|
"learning_rate": 7.317073170731707e-07, |
|
"loss": 1.8537, |
|
"step": 4 |
|
}, |
|
{ |
|
"epoch": 0.06097560975609756, |
|
"grad_norm": 22.98723793029785, |
|
"learning_rate": 9.75609756097561e-07, |
|
"loss": 1.8113, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.07317073170731707, |
|
"grad_norm": 19.314804077148438, |
|
"learning_rate": 1.2195121951219514e-06, |
|
"loss": 1.7677, |
|
"step": 6 |
|
}, |
|
{ |
|
"epoch": 0.08536585365853659, |
|
"grad_norm": 21.158281326293945, |
|
"learning_rate": 1.4634146341463414e-06, |
|
"loss": 1.7847, |
|
"step": 7 |
|
}, |
|
{ |
|
"epoch": 0.0975609756097561, |
|
"grad_norm": 16.294034957885742, |
|
"learning_rate": 1.707317073170732e-06, |
|
"loss": 1.6678, |
|
"step": 8 |
|
}, |
|
{ |
|
"epoch": 0.10975609756097561, |
|
"grad_norm": 16.788780212402344, |
|
"learning_rate": 1.951219512195122e-06, |
|
"loss": 1.6558, |
|
"step": 9 |
|
}, |
|
{ |
|
"epoch": 0.12195121951219512, |
|
"grad_norm": 12.225774765014648, |
|
"learning_rate": 2.1951219512195125e-06, |
|
"loss": 1.2956, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.13414634146341464, |
|
"grad_norm": 14.551143646240234, |
|
"learning_rate": 2.4390243902439027e-06, |
|
"loss": 1.5254, |
|
"step": 11 |
|
}, |
|
{ |
|
"epoch": 0.14634146341463414, |
|
"grad_norm": 11.28449535369873, |
|
"learning_rate": 2.682926829268293e-06, |
|
"loss": 1.3579, |
|
"step": 12 |
|
}, |
|
{ |
|
"epoch": 0.15853658536585366, |
|
"grad_norm": 7.676495552062988, |
|
"learning_rate": 2.926829268292683e-06, |
|
"loss": 1.2552, |
|
"step": 13 |
|
}, |
|
{ |
|
"epoch": 0.17073170731707318, |
|
"grad_norm": 6.054831027984619, |
|
"learning_rate": 3.1707317073170736e-06, |
|
"loss": 1.0942, |
|
"step": 14 |
|
}, |
|
{ |
|
"epoch": 0.18292682926829268, |
|
"grad_norm": 6.24427604675293, |
|
"learning_rate": 3.414634146341464e-06, |
|
"loss": 1.1486, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.1951219512195122, |
|
"grad_norm": 5.555965900421143, |
|
"learning_rate": 3.6585365853658537e-06, |
|
"loss": 1.0225, |
|
"step": 16 |
|
}, |
|
{ |
|
"epoch": 0.2073170731707317, |
|
"grad_norm": 4.953287124633789, |
|
"learning_rate": 3.902439024390244e-06, |
|
"loss": 1.0188, |
|
"step": 17 |
|
}, |
|
{ |
|
"epoch": 0.21951219512195122, |
|
"grad_norm": 4.212824821472168, |
|
"learning_rate": 4.146341463414634e-06, |
|
"loss": 0.9555, |
|
"step": 18 |
|
}, |
|
{ |
|
"epoch": 0.23170731707317074, |
|
"grad_norm": 4.176329135894775, |
|
"learning_rate": 4.390243902439025e-06, |
|
"loss": 0.9, |
|
"step": 19 |
|
}, |
|
{ |
|
"epoch": 0.24390243902439024, |
|
"grad_norm": 4.0246734619140625, |
|
"learning_rate": 4.634146341463416e-06, |
|
"loss": 0.9001, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.25609756097560976, |
|
"grad_norm": 4.022885322570801, |
|
"learning_rate": 4.8780487804878055e-06, |
|
"loss": 0.8557, |
|
"step": 21 |
|
}, |
|
{ |
|
"epoch": 0.2682926829268293, |
|
"grad_norm": 3.9502739906311035, |
|
"learning_rate": 5.121951219512195e-06, |
|
"loss": 0.9313, |
|
"step": 22 |
|
}, |
|
{ |
|
"epoch": 0.2804878048780488, |
|
"grad_norm": 3.4761359691619873, |
|
"learning_rate": 5.365853658536586e-06, |
|
"loss": 0.8559, |
|
"step": 23 |
|
}, |
|
{ |
|
"epoch": 0.2926829268292683, |
|
"grad_norm": 3.896311044692993, |
|
"learning_rate": 5.609756097560977e-06, |
|
"loss": 0.9048, |
|
"step": 24 |
|
}, |
|
{ |
|
"epoch": 0.3048780487804878, |
|
"grad_norm": 3.714123010635376, |
|
"learning_rate": 5.853658536585366e-06, |
|
"loss": 0.7699, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.3170731707317073, |
|
"grad_norm": 4.503406524658203, |
|
"learning_rate": 6.0975609756097564e-06, |
|
"loss": 0.8699, |
|
"step": 26 |
|
}, |
|
{ |
|
"epoch": 0.32926829268292684, |
|
"grad_norm": 3.643167734146118, |
|
"learning_rate": 6.341463414634147e-06, |
|
"loss": 0.8047, |
|
"step": 27 |
|
}, |
|
{ |
|
"epoch": 0.34146341463414637, |
|
"grad_norm": 3.93937087059021, |
|
"learning_rate": 6.585365853658538e-06, |
|
"loss": 0.8064, |
|
"step": 28 |
|
}, |
|
{ |
|
"epoch": 0.35365853658536583, |
|
"grad_norm": 3.669752836227417, |
|
"learning_rate": 6.829268292682928e-06, |
|
"loss": 0.7593, |
|
"step": 29 |
|
}, |
|
{ |
|
"epoch": 0.36585365853658536, |
|
"grad_norm": 3.5783209800720215, |
|
"learning_rate": 7.0731707317073175e-06, |
|
"loss": 0.7464, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.3780487804878049, |
|
"grad_norm": 3.4129626750946045, |
|
"learning_rate": 7.317073170731707e-06, |
|
"loss": 0.8218, |
|
"step": 31 |
|
}, |
|
{ |
|
"epoch": 0.3902439024390244, |
|
"grad_norm": 3.250596761703491, |
|
"learning_rate": 7.560975609756098e-06, |
|
"loss": 0.8161, |
|
"step": 32 |
|
}, |
|
{ |
|
"epoch": 0.4024390243902439, |
|
"grad_norm": 3.030006170272827, |
|
"learning_rate": 7.804878048780489e-06, |
|
"loss": 0.6851, |
|
"step": 33 |
|
}, |
|
{ |
|
"epoch": 0.4146341463414634, |
|
"grad_norm": 3.556096076965332, |
|
"learning_rate": 8.048780487804879e-06, |
|
"loss": 0.8649, |
|
"step": 34 |
|
}, |
|
{ |
|
"epoch": 0.4268292682926829, |
|
"grad_norm": 3.155592203140259, |
|
"learning_rate": 8.292682926829268e-06, |
|
"loss": 0.7146, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.43902439024390244, |
|
"grad_norm": 2.923524856567383, |
|
"learning_rate": 8.536585365853658e-06, |
|
"loss": 0.7535, |
|
"step": 36 |
|
}, |
|
{ |
|
"epoch": 0.45121951219512196, |
|
"grad_norm": 3.1197190284729004, |
|
"learning_rate": 8.78048780487805e-06, |
|
"loss": 0.7267, |
|
"step": 37 |
|
}, |
|
{ |
|
"epoch": 0.4634146341463415, |
|
"grad_norm": 2.902597188949585, |
|
"learning_rate": 9.02439024390244e-06, |
|
"loss": 0.7113, |
|
"step": 38 |
|
}, |
|
{ |
|
"epoch": 0.47560975609756095, |
|
"grad_norm": 3.2583975791931152, |
|
"learning_rate": 9.268292682926831e-06, |
|
"loss": 0.8452, |
|
"step": 39 |
|
}, |
|
{ |
|
"epoch": 0.4878048780487805, |
|
"grad_norm": 3.5036613941192627, |
|
"learning_rate": 9.51219512195122e-06, |
|
"loss": 0.7932, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"grad_norm": 2.883305788040161, |
|
"learning_rate": 9.756097560975611e-06, |
|
"loss": 0.7578, |
|
"step": 41 |
|
}, |
|
{ |
|
"epoch": 0.5121951219512195, |
|
"grad_norm": 2.8983325958251953, |
|
"learning_rate": 1e-05, |
|
"loss": 0.6646, |
|
"step": 42 |
|
}, |
|
{ |
|
"epoch": 0.524390243902439, |
|
"grad_norm": 3.0411853790283203, |
|
"learning_rate": 9.999959340292497e-06, |
|
"loss": 0.743, |
|
"step": 43 |
|
}, |
|
{ |
|
"epoch": 0.5365853658536586, |
|
"grad_norm": 3.015455484390259, |
|
"learning_rate": 9.999837361831269e-06, |
|
"loss": 0.6727, |
|
"step": 44 |
|
}, |
|
{ |
|
"epoch": 0.5487804878048781, |
|
"grad_norm": 3.099972724914551, |
|
"learning_rate": 9.999634066600162e-06, |
|
"loss": 0.7748, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.5609756097560976, |
|
"grad_norm": 2.834282875061035, |
|
"learning_rate": 9.999349457905545e-06, |
|
"loss": 0.6954, |
|
"step": 46 |
|
}, |
|
{ |
|
"epoch": 0.573170731707317, |
|
"grad_norm": 3.012594223022461, |
|
"learning_rate": 9.998983540376262e-06, |
|
"loss": 0.8249, |
|
"step": 47 |
|
}, |
|
{ |
|
"epoch": 0.5853658536585366, |
|
"grad_norm": 3.121540069580078, |
|
"learning_rate": 9.99853631996355e-06, |
|
"loss": 0.7512, |
|
"step": 48 |
|
}, |
|
{ |
|
"epoch": 0.5975609756097561, |
|
"grad_norm": 2.814594030380249, |
|
"learning_rate": 9.99800780394095e-06, |
|
"loss": 0.749, |
|
"step": 49 |
|
}, |
|
{ |
|
"epoch": 0.6097560975609756, |
|
"grad_norm": 2.8075897693634033, |
|
"learning_rate": 9.997398000904185e-06, |
|
"loss": 0.7249, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.6219512195121951, |
|
"grad_norm": 3.2552330493927, |
|
"learning_rate": 9.996706920771024e-06, |
|
"loss": 0.7802, |
|
"step": 51 |
|
}, |
|
{ |
|
"epoch": 0.6341463414634146, |
|
"grad_norm": 3.095428705215454, |
|
"learning_rate": 9.995934574781108e-06, |
|
"loss": 0.753, |
|
"step": 52 |
|
}, |
|
{ |
|
"epoch": 0.6463414634146342, |
|
"grad_norm": 2.9792091846466064, |
|
"learning_rate": 9.995080975495786e-06, |
|
"loss": 0.7911, |
|
"step": 53 |
|
}, |
|
{ |
|
"epoch": 0.6585365853658537, |
|
"grad_norm": 3.0372695922851562, |
|
"learning_rate": 9.994146136797893e-06, |
|
"loss": 0.7471, |
|
"step": 54 |
|
}, |
|
{ |
|
"epoch": 0.6707317073170732, |
|
"grad_norm": 3.14581036567688, |
|
"learning_rate": 9.993130073891539e-06, |
|
"loss": 0.7912, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.6829268292682927, |
|
"grad_norm": 2.859478235244751, |
|
"learning_rate": 9.992032803301852e-06, |
|
"loss": 0.6547, |
|
"step": 56 |
|
}, |
|
{ |
|
"epoch": 0.6951219512195121, |
|
"grad_norm": 2.866575002670288, |
|
"learning_rate": 9.990854342874712e-06, |
|
"loss": 0.7098, |
|
"step": 57 |
|
}, |
|
{ |
|
"epoch": 0.7073170731707317, |
|
"grad_norm": 3.036907434463501, |
|
"learning_rate": 9.98959471177646e-06, |
|
"loss": 0.8274, |
|
"step": 58 |
|
}, |
|
{ |
|
"epoch": 0.7195121951219512, |
|
"grad_norm": 2.837873935699463, |
|
"learning_rate": 9.988253930493592e-06, |
|
"loss": 0.7151, |
|
"step": 59 |
|
}, |
|
{ |
|
"epoch": 0.7317073170731707, |
|
"grad_norm": 2.6678829193115234, |
|
"learning_rate": 9.986832020832416e-06, |
|
"loss": 0.6541, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.7439024390243902, |
|
"grad_norm": 2.9930105209350586, |
|
"learning_rate": 9.985329005918702e-06, |
|
"loss": 0.6892, |
|
"step": 61 |
|
}, |
|
{ |
|
"epoch": 0.7560975609756098, |
|
"grad_norm": 2.858548164367676, |
|
"learning_rate": 9.983744910197315e-06, |
|
"loss": 0.6988, |
|
"step": 62 |
|
}, |
|
{ |
|
"epoch": 0.7682926829268293, |
|
"grad_norm": 3.0590319633483887, |
|
"learning_rate": 9.982079759431797e-06, |
|
"loss": 0.6853, |
|
"step": 63 |
|
}, |
|
{ |
|
"epoch": 0.7804878048780488, |
|
"grad_norm": 2.8750498294830322, |
|
"learning_rate": 9.980333580703968e-06, |
|
"loss": 0.7181, |
|
"step": 64 |
|
}, |
|
{ |
|
"epoch": 0.7926829268292683, |
|
"grad_norm": 2.720283031463623, |
|
"learning_rate": 9.978506402413472e-06, |
|
"loss": 0.6339, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.8048780487804879, |
|
"grad_norm": 2.936540126800537, |
|
"learning_rate": 9.976598254277324e-06, |
|
"loss": 0.7085, |
|
"step": 66 |
|
}, |
|
{ |
|
"epoch": 0.8170731707317073, |
|
"grad_norm": 2.7820205688476562, |
|
"learning_rate": 9.974609167329425e-06, |
|
"loss": 0.6682, |
|
"step": 67 |
|
}, |
|
{ |
|
"epoch": 0.8292682926829268, |
|
"grad_norm": 2.852302312850952, |
|
"learning_rate": 9.972539173920048e-06, |
|
"loss": 0.7067, |
|
"step": 68 |
|
}, |
|
{ |
|
"epoch": 0.8414634146341463, |
|
"grad_norm": 2.763120651245117, |
|
"learning_rate": 9.970388307715326e-06, |
|
"loss": 0.6512, |
|
"step": 69 |
|
}, |
|
{ |
|
"epoch": 0.8536585365853658, |
|
"grad_norm": 2.834955930709839, |
|
"learning_rate": 9.968156603696696e-06, |
|
"loss": 0.692, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.8658536585365854, |
|
"grad_norm": 2.5952882766723633, |
|
"learning_rate": 9.965844098160326e-06, |
|
"loss": 0.6458, |
|
"step": 71 |
|
}, |
|
{ |
|
"epoch": 0.8780487804878049, |
|
"grad_norm": 2.793827533721924, |
|
"learning_rate": 9.963450828716543e-06, |
|
"loss": 0.7312, |
|
"step": 72 |
|
}, |
|
{ |
|
"epoch": 0.8902439024390244, |
|
"grad_norm": 2.7760300636291504, |
|
"learning_rate": 9.960976834289197e-06, |
|
"loss": 0.6733, |
|
"step": 73 |
|
}, |
|
{ |
|
"epoch": 0.9024390243902439, |
|
"grad_norm": 3.0652453899383545, |
|
"learning_rate": 9.958422155115044e-06, |
|
"loss": 0.7255, |
|
"step": 74 |
|
}, |
|
{ |
|
"epoch": 0.9146341463414634, |
|
"grad_norm": 2.7409512996673584, |
|
"learning_rate": 9.955786832743089e-06, |
|
"loss": 0.7146, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.926829268292683, |
|
"grad_norm": 2.671405553817749, |
|
"learning_rate": 9.953070910033904e-06, |
|
"loss": 0.7051, |
|
"step": 76 |
|
}, |
|
{ |
|
"epoch": 0.9390243902439024, |
|
"grad_norm": 3.065516233444214, |
|
"learning_rate": 9.95027443115894e-06, |
|
"loss": 0.7027, |
|
"step": 77 |
|
}, |
|
{ |
|
"epoch": 0.9512195121951219, |
|
"grad_norm": 2.724518060684204, |
|
"learning_rate": 9.947397441599801e-06, |
|
"loss": 0.7046, |
|
"step": 78 |
|
}, |
|
{ |
|
"epoch": 0.9634146341463414, |
|
"grad_norm": 2.762394428253174, |
|
"learning_rate": 9.944439988147509e-06, |
|
"loss": 0.6638, |
|
"step": 79 |
|
}, |
|
{ |
|
"epoch": 0.975609756097561, |
|
"grad_norm": 2.7874350547790527, |
|
"learning_rate": 9.941402118901743e-06, |
|
"loss": 0.6985, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.9878048780487805, |
|
"grad_norm": 2.785700798034668, |
|
"learning_rate": 9.938283883270051e-06, |
|
"loss": 0.6443, |
|
"step": 81 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"grad_norm": 2.859963893890381, |
|
"learning_rate": 9.935085331967054e-06, |
|
"loss": 0.6987, |
|
"step": 82 |
|
}, |
|
{ |
|
"epoch": 1.0121951219512195, |
|
"grad_norm": 2.341641902923584, |
|
"learning_rate": 9.931806517013612e-06, |
|
"loss": 0.4309, |
|
"step": 83 |
|
}, |
|
{ |
|
"epoch": 1.024390243902439, |
|
"grad_norm": 2.2350566387176514, |
|
"learning_rate": 9.928447491735994e-06, |
|
"loss": 0.3769, |
|
"step": 84 |
|
}, |
|
{ |
|
"epoch": 1.0365853658536586, |
|
"grad_norm": 2.750514030456543, |
|
"learning_rate": 9.925008310764988e-06, |
|
"loss": 0.5076, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 1.048780487804878, |
|
"grad_norm": 2.627335548400879, |
|
"learning_rate": 9.921489030035036e-06, |
|
"loss": 0.359, |
|
"step": 86 |
|
}, |
|
{ |
|
"epoch": 1.0609756097560976, |
|
"grad_norm": 2.739978075027466, |
|
"learning_rate": 9.917889706783304e-06, |
|
"loss": 0.4735, |
|
"step": 87 |
|
}, |
|
{ |
|
"epoch": 1.0731707317073171, |
|
"grad_norm": 3.0831549167633057, |
|
"learning_rate": 9.914210399548768e-06, |
|
"loss": 0.5604, |
|
"step": 88 |
|
}, |
|
{ |
|
"epoch": 1.0853658536585367, |
|
"grad_norm": 3.0366146564483643, |
|
"learning_rate": 9.910451168171248e-06, |
|
"loss": 0.3986, |
|
"step": 89 |
|
}, |
|
{ |
|
"epoch": 1.0975609756097562, |
|
"grad_norm": 2.8682730197906494, |
|
"learning_rate": 9.906612073790443e-06, |
|
"loss": 0.4118, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 1.1097560975609757, |
|
"grad_norm": 2.9994473457336426, |
|
"learning_rate": 9.902693178844937e-06, |
|
"loss": 0.4581, |
|
"step": 91 |
|
}, |
|
{ |
|
"epoch": 1.1219512195121952, |
|
"grad_norm": 3.4703030586242676, |
|
"learning_rate": 9.898694547071177e-06, |
|
"loss": 0.5222, |
|
"step": 92 |
|
}, |
|
{ |
|
"epoch": 1.1341463414634148, |
|
"grad_norm": 2.6934309005737305, |
|
"learning_rate": 9.894616243502442e-06, |
|
"loss": 0.3656, |
|
"step": 93 |
|
}, |
|
{ |
|
"epoch": 1.146341463414634, |
|
"grad_norm": 2.379758834838867, |
|
"learning_rate": 9.890458334467784e-06, |
|
"loss": 0.3277, |
|
"step": 94 |
|
}, |
|
{ |
|
"epoch": 1.1585365853658536, |
|
"grad_norm": 2.7950727939605713, |
|
"learning_rate": 9.886220887590953e-06, |
|
"loss": 0.4012, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 1.170731707317073, |
|
"grad_norm": 2.668951988220215, |
|
"learning_rate": 9.881903971789285e-06, |
|
"loss": 0.4384, |
|
"step": 96 |
|
}, |
|
{ |
|
"epoch": 1.1829268292682926, |
|
"grad_norm": 2.785778522491455, |
|
"learning_rate": 9.877507657272596e-06, |
|
"loss": 0.4727, |
|
"step": 97 |
|
}, |
|
{ |
|
"epoch": 1.1951219512195121, |
|
"grad_norm": 2.7798571586608887, |
|
"learning_rate": 9.873032015542027e-06, |
|
"loss": 0.4594, |
|
"step": 98 |
|
}, |
|
{ |
|
"epoch": 1.2073170731707317, |
|
"grad_norm": 2.9862515926361084, |
|
"learning_rate": 9.868477119388897e-06, |
|
"loss": 0.4715, |
|
"step": 99 |
|
}, |
|
{ |
|
"epoch": 1.2195121951219512, |
|
"grad_norm": 2.749171495437622, |
|
"learning_rate": 9.863843042893499e-06, |
|
"loss": 0.4125, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 1.2317073170731707, |
|
"grad_norm": 2.4786319732666016, |
|
"learning_rate": 9.859129861423915e-06, |
|
"loss": 0.4036, |
|
"step": 101 |
|
}, |
|
{ |
|
"epoch": 1.2439024390243902, |
|
"grad_norm": 2.724829912185669, |
|
"learning_rate": 9.854337651634773e-06, |
|
"loss": 0.4688, |
|
"step": 102 |
|
}, |
|
{ |
|
"epoch": 1.2560975609756098, |
|
"grad_norm": 2.5419397354125977, |
|
"learning_rate": 9.849466491466017e-06, |
|
"loss": 0.4276, |
|
"step": 103 |
|
}, |
|
{ |
|
"epoch": 1.2682926829268293, |
|
"grad_norm": 2.508129596710205, |
|
"learning_rate": 9.844516460141622e-06, |
|
"loss": 0.401, |
|
"step": 104 |
|
}, |
|
{ |
|
"epoch": 1.2804878048780488, |
|
"grad_norm": 2.677839756011963, |
|
"learning_rate": 9.839487638168321e-06, |
|
"loss": 0.3839, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 1.2926829268292683, |
|
"grad_norm": 2.811065912246704, |
|
"learning_rate": 9.834380107334284e-06, |
|
"loss": 0.3876, |
|
"step": 106 |
|
}, |
|
{ |
|
"epoch": 1.3048780487804879, |
|
"grad_norm": 2.7741312980651855, |
|
"learning_rate": 9.829193950707798e-06, |
|
"loss": 0.4019, |
|
"step": 107 |
|
}, |
|
{ |
|
"epoch": 1.3170731707317074, |
|
"grad_norm": 2.604609727859497, |
|
"learning_rate": 9.823929252635905e-06, |
|
"loss": 0.3753, |
|
"step": 108 |
|
}, |
|
{ |
|
"epoch": 1.329268292682927, |
|
"grad_norm": 3.5267436504364014, |
|
"learning_rate": 9.818586098743038e-06, |
|
"loss": 0.5063, |
|
"step": 109 |
|
}, |
|
{ |
|
"epoch": 1.3414634146341464, |
|
"grad_norm": 2.785386085510254, |
|
"learning_rate": 9.813164575929628e-06, |
|
"loss": 0.4035, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 1.3536585365853657, |
|
"grad_norm": 2.7874209880828857, |
|
"learning_rate": 9.807664772370689e-06, |
|
"loss": 0.4387, |
|
"step": 111 |
|
}, |
|
{ |
|
"epoch": 1.3658536585365852, |
|
"grad_norm": 2.616459369659424, |
|
"learning_rate": 9.80208677751438e-06, |
|
"loss": 0.4403, |
|
"step": 112 |
|
}, |
|
{ |
|
"epoch": 1.3780487804878048, |
|
"grad_norm": 2.593151092529297, |
|
"learning_rate": 9.79643068208056e-06, |
|
"loss": 0.418, |
|
"step": 113 |
|
}, |
|
{ |
|
"epoch": 1.3902439024390243, |
|
"grad_norm": 2.3522331714630127, |
|
"learning_rate": 9.7906965780593e-06, |
|
"loss": 0.3226, |
|
"step": 114 |
|
}, |
|
{ |
|
"epoch": 1.4024390243902438, |
|
"grad_norm": 2.945878028869629, |
|
"learning_rate": 9.784884558709398e-06, |
|
"loss": 0.4744, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 1.4146341463414633, |
|
"grad_norm": 2.6254990100860596, |
|
"learning_rate": 9.778994718556856e-06, |
|
"loss": 0.3656, |
|
"step": 116 |
|
}, |
|
{ |
|
"epoch": 1.4268292682926829, |
|
"grad_norm": 2.6019349098205566, |
|
"learning_rate": 9.773027153393349e-06, |
|
"loss": 0.3957, |
|
"step": 117 |
|
}, |
|
{ |
|
"epoch": 1.4390243902439024, |
|
"grad_norm": 2.8025217056274414, |
|
"learning_rate": 9.766981960274653e-06, |
|
"loss": 0.4242, |
|
"step": 118 |
|
}, |
|
{ |
|
"epoch": 1.451219512195122, |
|
"grad_norm": 2.747736930847168, |
|
"learning_rate": 9.760859237519087e-06, |
|
"loss": 0.4247, |
|
"step": 119 |
|
}, |
|
{ |
|
"epoch": 1.4634146341463414, |
|
"grad_norm": 2.8022918701171875, |
|
"learning_rate": 9.754659084705893e-06, |
|
"loss": 0.3984, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 1.475609756097561, |
|
"grad_norm": 2.5835225582122803, |
|
"learning_rate": 9.748381602673633e-06, |
|
"loss": 0.4205, |
|
"step": 121 |
|
}, |
|
{ |
|
"epoch": 1.4878048780487805, |
|
"grad_norm": 2.7356934547424316, |
|
"learning_rate": 9.742026893518541e-06, |
|
"loss": 0.4098, |
|
"step": 122 |
|
}, |
|
{ |
|
"epoch": 1.5, |
|
"grad_norm": 2.6171412467956543, |
|
"learning_rate": 9.735595060592861e-06, |
|
"loss": 0.4281, |
|
"step": 123 |
|
}, |
|
{ |
|
"epoch": 1.5121951219512195, |
|
"grad_norm": 2.646216630935669, |
|
"learning_rate": 9.729086208503174e-06, |
|
"loss": 0.4301, |
|
"step": 124 |
|
}, |
|
{ |
|
"epoch": 1.524390243902439, |
|
"grad_norm": 3.031221866607666, |
|
"learning_rate": 9.722500443108687e-06, |
|
"loss": 0.5132, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 1.5365853658536586, |
|
"grad_norm": 2.813753843307495, |
|
"learning_rate": 9.715837871519518e-06, |
|
"loss": 0.464, |
|
"step": 126 |
|
}, |
|
{ |
|
"epoch": 1.548780487804878, |
|
"grad_norm": 2.7644271850585938, |
|
"learning_rate": 9.709098602094952e-06, |
|
"loss": 0.4589, |
|
"step": 127 |
|
}, |
|
{ |
|
"epoch": 1.5609756097560976, |
|
"grad_norm": 2.8581771850585938, |
|
"learning_rate": 9.70228274444168e-06, |
|
"loss": 0.4659, |
|
"step": 128 |
|
}, |
|
{ |
|
"epoch": 1.5731707317073171, |
|
"grad_norm": 2.6003692150115967, |
|
"learning_rate": 9.695390409412011e-06, |
|
"loss": 0.3784, |
|
"step": 129 |
|
}, |
|
{ |
|
"epoch": 1.5853658536585367, |
|
"grad_norm": 2.455249547958374, |
|
"learning_rate": 9.688421709102076e-06, |
|
"loss": 0.4141, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 1.5975609756097562, |
|
"grad_norm": 2.439664363861084, |
|
"learning_rate": 9.681376756850003e-06, |
|
"loss": 0.3995, |
|
"step": 131 |
|
}, |
|
{ |
|
"epoch": 1.6097560975609757, |
|
"grad_norm": 2.6555984020233154, |
|
"learning_rate": 9.67425566723407e-06, |
|
"loss": 0.4611, |
|
"step": 132 |
|
}, |
|
{ |
|
"epoch": 1.6219512195121952, |
|
"grad_norm": 2.4294567108154297, |
|
"learning_rate": 9.667058556070846e-06, |
|
"loss": 0.4316, |
|
"step": 133 |
|
}, |
|
{ |
|
"epoch": 1.6341463414634148, |
|
"grad_norm": 2.5822300910949707, |
|
"learning_rate": 9.659785540413303e-06, |
|
"loss": 0.4274, |
|
"step": 134 |
|
}, |
|
{ |
|
"epoch": 1.6463414634146343, |
|
"grad_norm": 2.7250919342041016, |
|
"learning_rate": 9.652436738548917e-06, |
|
"loss": 0.4271, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 1.6585365853658538, |
|
"grad_norm": 2.6819536685943604, |
|
"learning_rate": 9.645012269997747e-06, |
|
"loss": 0.4141, |
|
"step": 136 |
|
}, |
|
{ |
|
"epoch": 1.6707317073170733, |
|
"grad_norm": 2.830106496810913, |
|
"learning_rate": 9.637512255510475e-06, |
|
"loss": 0.466, |
|
"step": 137 |
|
}, |
|
{ |
|
"epoch": 1.6829268292682928, |
|
"grad_norm": 2.6315557956695557, |
|
"learning_rate": 9.629936817066459e-06, |
|
"loss": 0.4085, |
|
"step": 138 |
|
}, |
|
{ |
|
"epoch": 1.6951219512195121, |
|
"grad_norm": 2.916368246078491, |
|
"learning_rate": 9.622286077871748e-06, |
|
"loss": 0.4728, |
|
"step": 139 |
|
}, |
|
{ |
|
"epoch": 1.7073170731707317, |
|
"grad_norm": 3.0268235206604004, |
|
"learning_rate": 9.614560162357065e-06, |
|
"loss": 0.4548, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 1.7195121951219512, |
|
"grad_norm": 2.8294835090637207, |
|
"learning_rate": 9.606759196175799e-06, |
|
"loss": 0.4145, |
|
"step": 141 |
|
}, |
|
{ |
|
"epoch": 1.7317073170731707, |
|
"grad_norm": 2.861173391342163, |
|
"learning_rate": 9.598883306201949e-06, |
|
"loss": 0.4283, |
|
"step": 142 |
|
}, |
|
{ |
|
"epoch": 1.7439024390243902, |
|
"grad_norm": 2.8794517517089844, |
|
"learning_rate": 9.590932620528068e-06, |
|
"loss": 0.5036, |
|
"step": 143 |
|
}, |
|
{ |
|
"epoch": 1.7560975609756098, |
|
"grad_norm": 2.633896589279175, |
|
"learning_rate": 9.58290726846318e-06, |
|
"loss": 0.4355, |
|
"step": 144 |
|
}, |
|
{ |
|
"epoch": 1.7682926829268293, |
|
"grad_norm": 2.5964772701263428, |
|
"learning_rate": 9.57480738053067e-06, |
|
"loss": 0.443, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 1.7804878048780488, |
|
"grad_norm": 2.5255353450775146, |
|
"learning_rate": 9.566633088466169e-06, |
|
"loss": 0.4135, |
|
"step": 146 |
|
}, |
|
{ |
|
"epoch": 1.7926829268292683, |
|
"grad_norm": 2.3389077186584473, |
|
"learning_rate": 9.558384525215406e-06, |
|
"loss": 0.4233, |
|
"step": 147 |
|
}, |
|
{ |
|
"epoch": 1.8048780487804879, |
|
"grad_norm": 2.570801019668579, |
|
"learning_rate": 9.550061824932047e-06, |
|
"loss": 0.4227, |
|
"step": 148 |
|
}, |
|
{ |
|
"epoch": 1.8170731707317072, |
|
"grad_norm": 2.7482798099517822, |
|
"learning_rate": 9.54166512297552e-06, |
|
"loss": 0.4779, |
|
"step": 149 |
|
}, |
|
{ |
|
"epoch": 1.8292682926829267, |
|
"grad_norm": 3.0880026817321777, |
|
"learning_rate": 9.533194555908796e-06, |
|
"loss": 0.5231, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 1.8414634146341462, |
|
"grad_norm": 2.6744909286499023, |
|
"learning_rate": 9.524650261496195e-06, |
|
"loss": 0.4608, |
|
"step": 151 |
|
}, |
|
{ |
|
"epoch": 1.8536585365853657, |
|
"grad_norm": 2.891713857650757, |
|
"learning_rate": 9.516032378701117e-06, |
|
"loss": 0.473, |
|
"step": 152 |
|
}, |
|
{ |
|
"epoch": 1.8658536585365852, |
|
"grad_norm": 2.547239303588867, |
|
"learning_rate": 9.5073410476838e-06, |
|
"loss": 0.4051, |
|
"step": 153 |
|
}, |
|
{ |
|
"epoch": 1.8780487804878048, |
|
"grad_norm": 2.723076581954956, |
|
"learning_rate": 9.498576409799034e-06, |
|
"loss": 0.4558, |
|
"step": 154 |
|
}, |
|
{ |
|
"epoch": 1.8902439024390243, |
|
"grad_norm": 3.1596052646636963, |
|
"learning_rate": 9.489738607593867e-06, |
|
"loss": 0.4865, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 1.9024390243902438, |
|
"grad_norm": 2.7183949947357178, |
|
"learning_rate": 9.480827784805278e-06, |
|
"loss": 0.5014, |
|
"step": 156 |
|
}, |
|
{ |
|
"epoch": 1.9146341463414633, |
|
"grad_norm": 2.5864574909210205, |
|
"learning_rate": 9.471844086357848e-06, |
|
"loss": 0.4149, |
|
"step": 157 |
|
}, |
|
{ |
|
"epoch": 1.9268292682926829, |
|
"grad_norm": 2.5046157836914062, |
|
"learning_rate": 9.462787658361394e-06, |
|
"loss": 0.3962, |
|
"step": 158 |
|
}, |
|
{ |
|
"epoch": 1.9390243902439024, |
|
"grad_norm": 2.8331422805786133, |
|
"learning_rate": 9.453658648108604e-06, |
|
"loss": 0.3853, |
|
"step": 159 |
|
}, |
|
{ |
|
"epoch": 1.951219512195122, |
|
"grad_norm": 2.512298822402954, |
|
"learning_rate": 9.444457204072632e-06, |
|
"loss": 0.4437, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 1.9634146341463414, |
|
"grad_norm": 2.444852828979492, |
|
"learning_rate": 9.435183475904688e-06, |
|
"loss": 0.3504, |
|
"step": 161 |
|
}, |
|
{ |
|
"epoch": 1.975609756097561, |
|
"grad_norm": 2.8331000804901123, |
|
"learning_rate": 9.425837614431601e-06, |
|
"loss": 0.4716, |
|
"step": 162 |
|
}, |
|
{ |
|
"epoch": 1.9878048780487805, |
|
"grad_norm": 2.661059856414795, |
|
"learning_rate": 9.416419771653368e-06, |
|
"loss": 0.4385, |
|
"step": 163 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"grad_norm": 2.646305799484253, |
|
"learning_rate": 9.406930100740686e-06, |
|
"loss": 0.4184, |
|
"step": 164 |
|
}, |
|
{ |
|
"epoch": 2.0121951219512195, |
|
"grad_norm": 2.712597608566284, |
|
"learning_rate": 9.397368756032445e-06, |
|
"loss": 0.2314, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 2.024390243902439, |
|
"grad_norm": 2.586576461791992, |
|
"learning_rate": 9.387735893033244e-06, |
|
"loss": 0.1831, |
|
"step": 166 |
|
}, |
|
{ |
|
"epoch": 2.0365853658536586, |
|
"grad_norm": 2.5278258323669434, |
|
"learning_rate": 9.378031668410836e-06, |
|
"loss": 0.2375, |
|
"step": 167 |
|
}, |
|
{ |
|
"epoch": 2.048780487804878, |
|
"grad_norm": 2.541187047958374, |
|
"learning_rate": 9.368256239993597e-06, |
|
"loss": 0.1981, |
|
"step": 168 |
|
}, |
|
{ |
|
"epoch": 2.0609756097560976, |
|
"grad_norm": 2.764477252960205, |
|
"learning_rate": 9.358409766767946e-06, |
|
"loss": 0.2029, |
|
"step": 169 |
|
}, |
|
{ |
|
"epoch": 2.073170731707317, |
|
"grad_norm": 2.4784131050109863, |
|
"learning_rate": 9.348492408875779e-06, |
|
"loss": 0.1535, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 2.0853658536585367, |
|
"grad_norm": 2.915125846862793, |
|
"learning_rate": 9.338504327611839e-06, |
|
"loss": 0.1598, |
|
"step": 171 |
|
}, |
|
{ |
|
"epoch": 2.097560975609756, |
|
"grad_norm": 2.7254488468170166, |
|
"learning_rate": 9.328445685421113e-06, |
|
"loss": 0.1462, |
|
"step": 172 |
|
}, |
|
{ |
|
"epoch": 2.1097560975609757, |
|
"grad_norm": 2.9409985542297363, |
|
"learning_rate": 9.318316645896182e-06, |
|
"loss": 0.203, |
|
"step": 173 |
|
}, |
|
{ |
|
"epoch": 2.1219512195121952, |
|
"grad_norm": 2.588385820388794, |
|
"learning_rate": 9.308117373774555e-06, |
|
"loss": 0.1795, |
|
"step": 174 |
|
}, |
|
{ |
|
"epoch": 2.1341463414634148, |
|
"grad_norm": 2.7931816577911377, |
|
"learning_rate": 9.297848034936007e-06, |
|
"loss": 0.1993, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 2.1463414634146343, |
|
"grad_norm": 2.3102173805236816, |
|
"learning_rate": 9.287508796399858e-06, |
|
"loss": 0.1839, |
|
"step": 176 |
|
}, |
|
{ |
|
"epoch": 2.158536585365854, |
|
"grad_norm": 2.3756439685821533, |
|
"learning_rate": 9.277099826322277e-06, |
|
"loss": 0.2063, |
|
"step": 177 |
|
}, |
|
{ |
|
"epoch": 2.1707317073170733, |
|
"grad_norm": 2.2752017974853516, |
|
"learning_rate": 9.266621293993534e-06, |
|
"loss": 0.1699, |
|
"step": 178 |
|
}, |
|
{ |
|
"epoch": 2.182926829268293, |
|
"grad_norm": 2.484127998352051, |
|
"learning_rate": 9.256073369835255e-06, |
|
"loss": 0.1763, |
|
"step": 179 |
|
}, |
|
{ |
|
"epoch": 2.1951219512195124, |
|
"grad_norm": 2.3598098754882812, |
|
"learning_rate": 9.245456225397642e-06, |
|
"loss": 0.1677, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 2.207317073170732, |
|
"grad_norm": 2.2330524921417236, |
|
"learning_rate": 9.23477003335669e-06, |
|
"loss": 0.185, |
|
"step": 181 |
|
}, |
|
{ |
|
"epoch": 2.2195121951219514, |
|
"grad_norm": 2.439162492752075, |
|
"learning_rate": 9.224014967511378e-06, |
|
"loss": 0.1582, |
|
"step": 182 |
|
}, |
|
{ |
|
"epoch": 2.231707317073171, |
|
"grad_norm": 2.601541042327881, |
|
"learning_rate": 9.213191202780835e-06, |
|
"loss": 0.1737, |
|
"step": 183 |
|
}, |
|
{ |
|
"epoch": 2.2439024390243905, |
|
"grad_norm": 2.3318488597869873, |
|
"learning_rate": 9.20229891520151e-06, |
|
"loss": 0.1688, |
|
"step": 184 |
|
}, |
|
{ |
|
"epoch": 2.2560975609756095, |
|
"grad_norm": 2.883798122406006, |
|
"learning_rate": 9.191338281924288e-06, |
|
"loss": 0.2094, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 2.2682926829268295, |
|
"grad_norm": 2.4024503231048584, |
|
"learning_rate": 9.180309481211629e-06, |
|
"loss": 0.183, |
|
"step": 186 |
|
}, |
|
{ |
|
"epoch": 2.2804878048780486, |
|
"grad_norm": 2.7932958602905273, |
|
"learning_rate": 9.169212692434658e-06, |
|
"loss": 0.2388, |
|
"step": 187 |
|
}, |
|
{ |
|
"epoch": 2.292682926829268, |
|
"grad_norm": 2.345780372619629, |
|
"learning_rate": 9.158048096070249e-06, |
|
"loss": 0.1698, |
|
"step": 188 |
|
}, |
|
{ |
|
"epoch": 2.3048780487804876, |
|
"grad_norm": 2.3633759021759033, |
|
"learning_rate": 9.14681587369809e-06, |
|
"loss": 0.1797, |
|
"step": 189 |
|
}, |
|
{ |
|
"epoch": 2.317073170731707, |
|
"grad_norm": 2.4073266983032227, |
|
"learning_rate": 9.13551620799773e-06, |
|
"loss": 0.1744, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 2.3292682926829267, |
|
"grad_norm": 2.4266092777252197, |
|
"learning_rate": 9.124149282745614e-06, |
|
"loss": 0.1874, |
|
"step": 191 |
|
}, |
|
{ |
|
"epoch": 2.341463414634146, |
|
"grad_norm": 2.277799129486084, |
|
"learning_rate": 9.112715282812081e-06, |
|
"loss": 0.2014, |
|
"step": 192 |
|
}, |
|
{ |
|
"epoch": 2.3536585365853657, |
|
"grad_norm": 2.5907177925109863, |
|
"learning_rate": 9.101214394158371e-06, |
|
"loss": 0.1911, |
|
"step": 193 |
|
}, |
|
{ |
|
"epoch": 2.3658536585365852, |
|
"grad_norm": 2.6057519912719727, |
|
"learning_rate": 9.089646803833589e-06, |
|
"loss": 0.2172, |
|
"step": 194 |
|
}, |
|
{ |
|
"epoch": 2.3780487804878048, |
|
"grad_norm": 2.3195533752441406, |
|
"learning_rate": 9.078012699971673e-06, |
|
"loss": 0.184, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 2.3902439024390243, |
|
"grad_norm": 2.62652850151062, |
|
"learning_rate": 9.066312271788323e-06, |
|
"loss": 0.2185, |
|
"step": 196 |
|
}, |
|
{ |
|
"epoch": 2.402439024390244, |
|
"grad_norm": 2.2538259029388428, |
|
"learning_rate": 9.054545709577939e-06, |
|
"loss": 0.1797, |
|
"step": 197 |
|
}, |
|
{ |
|
"epoch": 2.4146341463414633, |
|
"grad_norm": 2.573920965194702, |
|
"learning_rate": 9.042713204710509e-06, |
|
"loss": 0.1791, |
|
"step": 198 |
|
}, |
|
{ |
|
"epoch": 2.426829268292683, |
|
"grad_norm": 2.010896921157837, |
|
"learning_rate": 9.030814949628509e-06, |
|
"loss": 0.1471, |
|
"step": 199 |
|
}, |
|
{ |
|
"epoch": 2.4390243902439024, |
|
"grad_norm": 2.5009727478027344, |
|
"learning_rate": 9.018851137843765e-06, |
|
"loss": 0.1805, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 2.451219512195122, |
|
"grad_norm": 2.673194169998169, |
|
"learning_rate": 9.006821963934316e-06, |
|
"loss": 0.2134, |
|
"step": 201 |
|
}, |
|
{ |
|
"epoch": 2.4634146341463414, |
|
"grad_norm": 2.851163387298584, |
|
"learning_rate": 8.994727623541237e-06, |
|
"loss": 0.1902, |
|
"step": 202 |
|
}, |
|
{ |
|
"epoch": 2.475609756097561, |
|
"grad_norm": 3.064375877380371, |
|
"learning_rate": 8.982568313365467e-06, |
|
"loss": 0.2247, |
|
"step": 203 |
|
}, |
|
{ |
|
"epoch": 2.4878048780487805, |
|
"grad_norm": 2.5090184211730957, |
|
"learning_rate": 8.970344231164602e-06, |
|
"loss": 0.2022, |
|
"step": 204 |
|
}, |
|
{ |
|
"epoch": 2.5, |
|
"grad_norm": 2.2384963035583496, |
|
"learning_rate": 8.958055575749685e-06, |
|
"loss": 0.1954, |
|
"step": 205 |
|
}, |
|
{ |
|
"epoch": 2.5121951219512195, |
|
"grad_norm": 2.3855085372924805, |
|
"learning_rate": 8.94570254698197e-06, |
|
"loss": 0.2088, |
|
"step": 206 |
|
}, |
|
{ |
|
"epoch": 2.524390243902439, |
|
"grad_norm": 2.38485050201416, |
|
"learning_rate": 8.933285345769671e-06, |
|
"loss": 0.1926, |
|
"step": 207 |
|
}, |
|
{ |
|
"epoch": 2.5365853658536586, |
|
"grad_norm": 2.5828115940093994, |
|
"learning_rate": 8.920804174064697e-06, |
|
"loss": 0.2452, |
|
"step": 208 |
|
}, |
|
{ |
|
"epoch": 2.548780487804878, |
|
"grad_norm": 2.271554470062256, |
|
"learning_rate": 8.908259234859365e-06, |
|
"loss": 0.1858, |
|
"step": 209 |
|
}, |
|
{ |
|
"epoch": 2.5609756097560976, |
|
"grad_norm": 2.114044189453125, |
|
"learning_rate": 8.895650732183094e-06, |
|
"loss": 0.1766, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 2.573170731707317, |
|
"grad_norm": 2.3854148387908936, |
|
"learning_rate": 8.882978871099104e-06, |
|
"loss": 0.2026, |
|
"step": 211 |
|
}, |
|
{ |
|
"epoch": 2.5853658536585367, |
|
"grad_norm": 2.409749746322632, |
|
"learning_rate": 8.870243857701054e-06, |
|
"loss": 0.2135, |
|
"step": 212 |
|
}, |
|
{ |
|
"epoch": 2.597560975609756, |
|
"grad_norm": 2.269014596939087, |
|
"learning_rate": 8.857445899109716e-06, |
|
"loss": 0.173, |
|
"step": 213 |
|
}, |
|
{ |
|
"epoch": 2.6097560975609757, |
|
"grad_norm": 2.1958768367767334, |
|
"learning_rate": 8.84458520346959e-06, |
|
"loss": 0.1803, |
|
"step": 214 |
|
}, |
|
{ |
|
"epoch": 2.6219512195121952, |
|
"grad_norm": 2.2031567096710205, |
|
"learning_rate": 8.831661979945522e-06, |
|
"loss": 0.1701, |
|
"step": 215 |
|
}, |
|
{ |
|
"epoch": 2.6341463414634148, |
|
"grad_norm": 2.523292303085327, |
|
"learning_rate": 8.818676438719314e-06, |
|
"loss": 0.1988, |
|
"step": 216 |
|
}, |
|
{ |
|
"epoch": 2.6463414634146343, |
|
"grad_norm": 2.597362995147705, |
|
"learning_rate": 8.805628790986284e-06, |
|
"loss": 0.2264, |
|
"step": 217 |
|
}, |
|
{ |
|
"epoch": 2.658536585365854, |
|
"grad_norm": 2.802621603012085, |
|
"learning_rate": 8.792519248951851e-06, |
|
"loss": 0.2293, |
|
"step": 218 |
|
}, |
|
{ |
|
"epoch": 2.6707317073170733, |
|
"grad_norm": 2.707906484603882, |
|
"learning_rate": 8.779348025828071e-06, |
|
"loss": 0.2012, |
|
"step": 219 |
|
}, |
|
{ |
|
"epoch": 2.682926829268293, |
|
"grad_norm": 2.630911350250244, |
|
"learning_rate": 8.766115335830178e-06, |
|
"loss": 0.1975, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 2.6951219512195124, |
|
"grad_norm": 2.492384195327759, |
|
"learning_rate": 8.752821394173092e-06, |
|
"loss": 0.1893, |
|
"step": 221 |
|
}, |
|
{ |
|
"epoch": 2.7073170731707314, |
|
"grad_norm": 2.3401095867156982, |
|
"learning_rate": 8.739466417067926e-06, |
|
"loss": 0.1769, |
|
"step": 222 |
|
}, |
|
{ |
|
"epoch": 2.7195121951219514, |
|
"grad_norm": 2.6099853515625, |
|
"learning_rate": 8.726050621718462e-06, |
|
"loss": 0.1746, |
|
"step": 223 |
|
}, |
|
{ |
|
"epoch": 2.7317073170731705, |
|
"grad_norm": 2.4008710384368896, |
|
"learning_rate": 8.71257422631763e-06, |
|
"loss": 0.222, |
|
"step": 224 |
|
}, |
|
{ |
|
"epoch": 2.7439024390243905, |
|
"grad_norm": 2.5295095443725586, |
|
"learning_rate": 8.699037450043945e-06, |
|
"loss": 0.2196, |
|
"step": 225 |
|
}, |
|
{ |
|
"epoch": 2.7560975609756095, |
|
"grad_norm": 2.4341542720794678, |
|
"learning_rate": 8.685440513057955e-06, |
|
"loss": 0.2019, |
|
"step": 226 |
|
}, |
|
{ |
|
"epoch": 2.7682926829268295, |
|
"grad_norm": 2.379326343536377, |
|
"learning_rate": 8.671783636498652e-06, |
|
"loss": 0.2263, |
|
"step": 227 |
|
}, |
|
{ |
|
"epoch": 2.7804878048780486, |
|
"grad_norm": 2.4653515815734863, |
|
"learning_rate": 8.658067042479877e-06, |
|
"loss": 0.197, |
|
"step": 228 |
|
}, |
|
{ |
|
"epoch": 2.7926829268292686, |
|
"grad_norm": 2.4599173069000244, |
|
"learning_rate": 8.644290954086711e-06, |
|
"loss": 0.1995, |
|
"step": 229 |
|
}, |
|
{ |
|
"epoch": 2.8048780487804876, |
|
"grad_norm": 2.559979200363159, |
|
"learning_rate": 8.630455595371846e-06, |
|
"loss": 0.2138, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 2.817073170731707, |
|
"grad_norm": 2.173933267593384, |
|
"learning_rate": 8.616561191351934e-06, |
|
"loss": 0.1822, |
|
"step": 231 |
|
}, |
|
{ |
|
"epoch": 2.8292682926829267, |
|
"grad_norm": 2.4872312545776367, |
|
"learning_rate": 8.602607968003935e-06, |
|
"loss": 0.1805, |
|
"step": 232 |
|
}, |
|
{ |
|
"epoch": 2.841463414634146, |
|
"grad_norm": 2.255208730697632, |
|
"learning_rate": 8.588596152261447e-06, |
|
"loss": 0.1825, |
|
"step": 233 |
|
}, |
|
{ |
|
"epoch": 2.8536585365853657, |
|
"grad_norm": 2.602861166000366, |
|
"learning_rate": 8.574525972010997e-06, |
|
"loss": 0.2079, |
|
"step": 234 |
|
}, |
|
{ |
|
"epoch": 2.8658536585365852, |
|
"grad_norm": 2.6664066314697266, |
|
"learning_rate": 8.560397656088353e-06, |
|
"loss": 0.1909, |
|
"step": 235 |
|
}, |
|
{ |
|
"epoch": 2.8780487804878048, |
|
"grad_norm": 3.140064001083374, |
|
"learning_rate": 8.546211434274791e-06, |
|
"loss": 0.1985, |
|
"step": 236 |
|
}, |
|
{ |
|
"epoch": 2.8902439024390243, |
|
"grad_norm": 2.759251832962036, |
|
"learning_rate": 8.531967537293365e-06, |
|
"loss": 0.1862, |
|
"step": 237 |
|
}, |
|
{ |
|
"epoch": 2.902439024390244, |
|
"grad_norm": 2.6289727687835693, |
|
"learning_rate": 8.517666196805142e-06, |
|
"loss": 0.207, |
|
"step": 238 |
|
}, |
|
{ |
|
"epoch": 2.9146341463414633, |
|
"grad_norm": 2.671435594558716, |
|
"learning_rate": 8.503307645405461e-06, |
|
"loss": 0.2103, |
|
"step": 239 |
|
}, |
|
{ |
|
"epoch": 2.926829268292683, |
|
"grad_norm": 2.4491989612579346, |
|
"learning_rate": 8.488892116620114e-06, |
|
"loss": 0.2086, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 2.9390243902439024, |
|
"grad_norm": 2.1627562046051025, |
|
"learning_rate": 8.474419844901575e-06, |
|
"loss": 0.1785, |
|
"step": 241 |
|
}, |
|
{ |
|
"epoch": 2.951219512195122, |
|
"grad_norm": 2.683394432067871, |
|
"learning_rate": 8.459891065625184e-06, |
|
"loss": 0.2746, |
|
"step": 242 |
|
}, |
|
{ |
|
"epoch": 2.9634146341463414, |
|
"grad_norm": 2.3977043628692627, |
|
"learning_rate": 8.445306015085301e-06, |
|
"loss": 0.2042, |
|
"step": 243 |
|
}, |
|
{ |
|
"epoch": 2.975609756097561, |
|
"grad_norm": 2.0520613193511963, |
|
"learning_rate": 8.430664930491485e-06, |
|
"loss": 0.1897, |
|
"step": 244 |
|
}, |
|
{ |
|
"epoch": 2.9878048780487805, |
|
"grad_norm": 2.36509370803833, |
|
"learning_rate": 8.415968049964623e-06, |
|
"loss": 0.1859, |
|
"step": 245 |
|
}, |
|
{ |
|
"epoch": 3.0, |
|
"grad_norm": 2.167886257171631, |
|
"learning_rate": 8.401215612533056e-06, |
|
"loss": 0.1665, |
|
"step": 246 |
|
}, |
|
{ |
|
"epoch": 3.0121951219512195, |
|
"grad_norm": 1.8608198165893555, |
|
"learning_rate": 8.386407858128707e-06, |
|
"loss": 0.1037, |
|
"step": 247 |
|
}, |
|
{ |
|
"epoch": 3.024390243902439, |
|
"grad_norm": 1.8207582235336304, |
|
"learning_rate": 8.371545027583154e-06, |
|
"loss": 0.0807, |
|
"step": 248 |
|
}, |
|
{ |
|
"epoch": 3.0365853658536586, |
|
"grad_norm": 1.7909525632858276, |
|
"learning_rate": 8.356627362623742e-06, |
|
"loss": 0.0819, |
|
"step": 249 |
|
}, |
|
{ |
|
"epoch": 3.048780487804878, |
|
"grad_norm": 2.130682945251465, |
|
"learning_rate": 8.341655105869622e-06, |
|
"loss": 0.1154, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 3.0609756097560976, |
|
"grad_norm": 1.9704978466033936, |
|
"learning_rate": 8.326628500827826e-06, |
|
"loss": 0.0959, |
|
"step": 251 |
|
}, |
|
{ |
|
"epoch": 3.073170731707317, |
|
"grad_norm": 2.402252197265625, |
|
"learning_rate": 8.311547791889307e-06, |
|
"loss": 0.1006, |
|
"step": 252 |
|
}, |
|
{ |
|
"epoch": 3.0853658536585367, |
|
"grad_norm": 2.2904582023620605, |
|
"learning_rate": 8.296413224324944e-06, |
|
"loss": 0.0985, |
|
"step": 253 |
|
}, |
|
{ |
|
"epoch": 3.097560975609756, |
|
"grad_norm": 2.511240005493164, |
|
"learning_rate": 8.281225044281578e-06, |
|
"loss": 0.0695, |
|
"step": 254 |
|
}, |
|
{ |
|
"epoch": 3.1097560975609757, |
|
"grad_norm": 2.37315034866333, |
|
"learning_rate": 8.265983498777987e-06, |
|
"loss": 0.086, |
|
"step": 255 |
|
}, |
|
{ |
|
"epoch": 3.1219512195121952, |
|
"grad_norm": 2.4025444984436035, |
|
"learning_rate": 8.25068883570089e-06, |
|
"loss": 0.0877, |
|
"step": 256 |
|
}, |
|
{ |
|
"epoch": 3.1341463414634148, |
|
"grad_norm": 2.855544328689575, |
|
"learning_rate": 8.235341303800892e-06, |
|
"loss": 0.1104, |
|
"step": 257 |
|
}, |
|
{ |
|
"epoch": 3.1463414634146343, |
|
"grad_norm": 2.7334654331207275, |
|
"learning_rate": 8.219941152688459e-06, |
|
"loss": 0.0996, |
|
"step": 258 |
|
}, |
|
{ |
|
"epoch": 3.158536585365854, |
|
"grad_norm": 1.7848544120788574, |
|
"learning_rate": 8.204488632829848e-06, |
|
"loss": 0.0779, |
|
"step": 259 |
|
}, |
|
{ |
|
"epoch": 3.1707317073170733, |
|
"grad_norm": 2.5994298458099365, |
|
"learning_rate": 8.188983995543031e-06, |
|
"loss": 0.1027, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 3.182926829268293, |
|
"grad_norm": 2.1597657203674316, |
|
"learning_rate": 8.173427492993617e-06, |
|
"loss": 0.0974, |
|
"step": 261 |
|
}, |
|
{ |
|
"epoch": 3.1951219512195124, |
|
"grad_norm": 2.6595215797424316, |
|
"learning_rate": 8.157819378190743e-06, |
|
"loss": 0.1053, |
|
"step": 262 |
|
}, |
|
{ |
|
"epoch": 3.207317073170732, |
|
"grad_norm": 1.92975652217865, |
|
"learning_rate": 8.142159904982963e-06, |
|
"loss": 0.1003, |
|
"step": 263 |
|
}, |
|
{ |
|
"epoch": 3.2195121951219514, |
|
"grad_norm": 1.939504861831665, |
|
"learning_rate": 8.126449328054115e-06, |
|
"loss": 0.0948, |
|
"step": 264 |
|
}, |
|
{ |
|
"epoch": 3.231707317073171, |
|
"grad_norm": 2.238565444946289, |
|
"learning_rate": 8.110687902919185e-06, |
|
"loss": 0.1021, |
|
"step": 265 |
|
}, |
|
{ |
|
"epoch": 3.2439024390243905, |
|
"grad_norm": 2.1030704975128174, |
|
"learning_rate": 8.094875885920148e-06, |
|
"loss": 0.0961, |
|
"step": 266 |
|
}, |
|
{ |
|
"epoch": 3.2560975609756095, |
|
"grad_norm": 2.0035948753356934, |
|
"learning_rate": 8.079013534221798e-06, |
|
"loss": 0.0985, |
|
"step": 267 |
|
}, |
|
{ |
|
"epoch": 3.2682926829268295, |
|
"grad_norm": 2.1001100540161133, |
|
"learning_rate": 8.063101105807566e-06, |
|
"loss": 0.1089, |
|
"step": 268 |
|
}, |
|
{ |
|
"epoch": 3.2804878048780486, |
|
"grad_norm": 1.935497760772705, |
|
"learning_rate": 8.047138859475328e-06, |
|
"loss": 0.0882, |
|
"step": 269 |
|
}, |
|
{ |
|
"epoch": 3.292682926829268, |
|
"grad_norm": 2.4864578247070312, |
|
"learning_rate": 8.031127054833192e-06, |
|
"loss": 0.1085, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 3.3048780487804876, |
|
"grad_norm": 1.89180326461792, |
|
"learning_rate": 8.01506595229527e-06, |
|
"loss": 0.1096, |
|
"step": 271 |
|
}, |
|
{ |
|
"epoch": 3.317073170731707, |
|
"grad_norm": 2.166079521179199, |
|
"learning_rate": 7.998955813077457e-06, |
|
"loss": 0.0717, |
|
"step": 272 |
|
}, |
|
{ |
|
"epoch": 3.3292682926829267, |
|
"grad_norm": 2.1305079460144043, |
|
"learning_rate": 7.982796899193177e-06, |
|
"loss": 0.1042, |
|
"step": 273 |
|
}, |
|
{ |
|
"epoch": 3.341463414634146, |
|
"grad_norm": 2.0318334102630615, |
|
"learning_rate": 7.966589473449109e-06, |
|
"loss": 0.0943, |
|
"step": 274 |
|
}, |
|
{ |
|
"epoch": 3.3536585365853657, |
|
"grad_norm": 2.6421074867248535, |
|
"learning_rate": 7.95033379944093e-06, |
|
"loss": 0.1161, |
|
"step": 275 |
|
}, |
|
{ |
|
"epoch": 3.3658536585365852, |
|
"grad_norm": 2.3139538764953613, |
|
"learning_rate": 7.934030141549024e-06, |
|
"loss": 0.1219, |
|
"step": 276 |
|
}, |
|
{ |
|
"epoch": 3.3780487804878048, |
|
"grad_norm": 2.0743587017059326, |
|
"learning_rate": 7.917678764934169e-06, |
|
"loss": 0.1024, |
|
"step": 277 |
|
}, |
|
{ |
|
"epoch": 3.3902439024390243, |
|
"grad_norm": 2.187187671661377, |
|
"learning_rate": 7.901279935533248e-06, |
|
"loss": 0.0864, |
|
"step": 278 |
|
}, |
|
{ |
|
"epoch": 3.402439024390244, |
|
"grad_norm": 1.9640257358551025, |
|
"learning_rate": 7.8848339200549e-06, |
|
"loss": 0.0954, |
|
"step": 279 |
|
}, |
|
{ |
|
"epoch": 3.4146341463414633, |
|
"grad_norm": 2.0996806621551514, |
|
"learning_rate": 7.868340985975195e-06, |
|
"loss": 0.0941, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 3.426829268292683, |
|
"grad_norm": 2.0792341232299805, |
|
"learning_rate": 7.851801401533288e-06, |
|
"loss": 0.0908, |
|
"step": 281 |
|
}, |
|
{ |
|
"epoch": 3.4390243902439024, |
|
"grad_norm": 2.0881197452545166, |
|
"learning_rate": 7.835215435727042e-06, |
|
"loss": 0.1059, |
|
"step": 282 |
|
}, |
|
{ |
|
"epoch": 3.451219512195122, |
|
"grad_norm": 2.6827352046966553, |
|
"learning_rate": 7.818583358308664e-06, |
|
"loss": 0.1316, |
|
"step": 283 |
|
}, |
|
{ |
|
"epoch": 3.4634146341463414, |
|
"grad_norm": 2.0524280071258545, |
|
"learning_rate": 7.801905439780317e-06, |
|
"loss": 0.0957, |
|
"step": 284 |
|
}, |
|
{ |
|
"epoch": 3.475609756097561, |
|
"grad_norm": 2.184852361679077, |
|
"learning_rate": 7.785181951389718e-06, |
|
"loss": 0.1123, |
|
"step": 285 |
|
}, |
|
{ |
|
"epoch": 3.4878048780487805, |
|
"grad_norm": 2.2295339107513428, |
|
"learning_rate": 7.76841316512572e-06, |
|
"loss": 0.1198, |
|
"step": 286 |
|
}, |
|
{ |
|
"epoch": 3.5, |
|
"grad_norm": 2.101522922515869, |
|
"learning_rate": 7.751599353713906e-06, |
|
"loss": 0.0991, |
|
"step": 287 |
|
}, |
|
{ |
|
"epoch": 3.5121951219512195, |
|
"grad_norm": 1.8743051290512085, |
|
"learning_rate": 7.734740790612137e-06, |
|
"loss": 0.0869, |
|
"step": 288 |
|
}, |
|
{ |
|
"epoch": 3.524390243902439, |
|
"grad_norm": 1.9927822351455688, |
|
"learning_rate": 7.717837750006106e-06, |
|
"loss": 0.1094, |
|
"step": 289 |
|
}, |
|
{ |
|
"epoch": 3.5365853658536586, |
|
"grad_norm": 2.079759359359741, |
|
"learning_rate": 7.700890506804895e-06, |
|
"loss": 0.1011, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 3.548780487804878, |
|
"grad_norm": 2.3300259113311768, |
|
"learning_rate": 7.68389933663648e-06, |
|
"loss": 0.1374, |
|
"step": 291 |
|
}, |
|
{ |
|
"epoch": 3.5609756097560976, |
|
"grad_norm": 2.1061301231384277, |
|
"learning_rate": 7.666864515843266e-06, |
|
"loss": 0.1123, |
|
"step": 292 |
|
}, |
|
{ |
|
"epoch": 3.573170731707317, |
|
"grad_norm": 1.9325755834579468, |
|
"learning_rate": 7.649786321477585e-06, |
|
"loss": 0.1052, |
|
"step": 293 |
|
}, |
|
{ |
|
"epoch": 3.5853658536585367, |
|
"grad_norm": 2.3022353649139404, |
|
"learning_rate": 7.632665031297193e-06, |
|
"loss": 0.102, |
|
"step": 294 |
|
}, |
|
{ |
|
"epoch": 3.597560975609756, |
|
"grad_norm": 1.8938615322113037, |
|
"learning_rate": 7.615500923760748e-06, |
|
"loss": 0.1065, |
|
"step": 295 |
|
}, |
|
{ |
|
"epoch": 3.6097560975609757, |
|
"grad_norm": 1.8526796102523804, |
|
"learning_rate": 7.59829427802329e-06, |
|
"loss": 0.0971, |
|
"step": 296 |
|
}, |
|
{ |
|
"epoch": 3.6219512195121952, |
|
"grad_norm": 2.010892391204834, |
|
"learning_rate": 7.581045373931691e-06, |
|
"loss": 0.0932, |
|
"step": 297 |
|
}, |
|
{ |
|
"epoch": 3.6341463414634148, |
|
"grad_norm": 2.140416383743286, |
|
"learning_rate": 7.563754492020108e-06, |
|
"loss": 0.0934, |
|
"step": 298 |
|
}, |
|
{ |
|
"epoch": 3.6463414634146343, |
|
"grad_norm": 1.9991627931594849, |
|
"learning_rate": 7.54642191350542e-06, |
|
"loss": 0.1137, |
|
"step": 299 |
|
}, |
|
{ |
|
"epoch": 3.658536585365854, |
|
"grad_norm": 1.98257577419281, |
|
"learning_rate": 7.5290479202826596e-06, |
|
"loss": 0.1058, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 3.6707317073170733, |
|
"grad_norm": 1.9862565994262695, |
|
"learning_rate": 7.511632794920419e-06, |
|
"loss": 0.0977, |
|
"step": 301 |
|
}, |
|
{ |
|
"epoch": 3.682926829268293, |
|
"grad_norm": 2.034688711166382, |
|
"learning_rate": 7.494176820656258e-06, |
|
"loss": 0.1248, |
|
"step": 302 |
|
}, |
|
{ |
|
"epoch": 3.6951219512195124, |
|
"grad_norm": 1.8107631206512451, |
|
"learning_rate": 7.4766802813921016e-06, |
|
"loss": 0.0888, |
|
"step": 303 |
|
}, |
|
{ |
|
"epoch": 3.7073170731707314, |
|
"grad_norm": 1.7797682285308838, |
|
"learning_rate": 7.4591434616896156e-06, |
|
"loss": 0.0971, |
|
"step": 304 |
|
}, |
|
{ |
|
"epoch": 3.7195121951219514, |
|
"grad_norm": 1.8483872413635254, |
|
"learning_rate": 7.4415666467655835e-06, |
|
"loss": 0.1033, |
|
"step": 305 |
|
}, |
|
{ |
|
"epoch": 3.7317073170731705, |
|
"grad_norm": 1.8434807062149048, |
|
"learning_rate": 7.423950122487269e-06, |
|
"loss": 0.0929, |
|
"step": 306 |
|
}, |
|
{ |
|
"epoch": 3.7439024390243905, |
|
"grad_norm": 2.006572961807251, |
|
"learning_rate": 7.406294175367758e-06, |
|
"loss": 0.1034, |
|
"step": 307 |
|
}, |
|
{ |
|
"epoch": 3.7560975609756095, |
|
"grad_norm": 2.015620708465576, |
|
"learning_rate": 7.388599092561315e-06, |
|
"loss": 0.1091, |
|
"step": 308 |
|
}, |
|
{ |
|
"epoch": 3.7682926829268295, |
|
"grad_norm": 2.08795428276062, |
|
"learning_rate": 7.3708651618586925e-06, |
|
"loss": 0.0908, |
|
"step": 309 |
|
}, |
|
{ |
|
"epoch": 3.7804878048780486, |
|
"grad_norm": 2.066549777984619, |
|
"learning_rate": 7.353092671682464e-06, |
|
"loss": 0.093, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 3.7926829268292686, |
|
"grad_norm": 2.227687120437622, |
|
"learning_rate": 7.335281911082332e-06, |
|
"loss": 0.1042, |
|
"step": 311 |
|
}, |
|
{ |
|
"epoch": 3.8048780487804876, |
|
"grad_norm": 2.5046164989471436, |
|
"learning_rate": 7.317433169730421e-06, |
|
"loss": 0.136, |
|
"step": 312 |
|
}, |
|
{ |
|
"epoch": 3.817073170731707, |
|
"grad_norm": 2.0135955810546875, |
|
"learning_rate": 7.299546737916574e-06, |
|
"loss": 0.0942, |
|
"step": 313 |
|
}, |
|
{ |
|
"epoch": 3.8292682926829267, |
|
"grad_norm": 2.3147573471069336, |
|
"learning_rate": 7.281622906543625e-06, |
|
"loss": 0.11, |
|
"step": 314 |
|
}, |
|
{ |
|
"epoch": 3.841463414634146, |
|
"grad_norm": 2.515584707260132, |
|
"learning_rate": 7.26366196712267e-06, |
|
"loss": 0.1248, |
|
"step": 315 |
|
}, |
|
{ |
|
"epoch": 3.8536585365853657, |
|
"grad_norm": 1.988805890083313, |
|
"learning_rate": 7.245664211768327e-06, |
|
"loss": 0.089, |
|
"step": 316 |
|
}, |
|
{ |
|
"epoch": 3.8658536585365852, |
|
"grad_norm": 2.0414860248565674, |
|
"learning_rate": 7.227629933193983e-06, |
|
"loss": 0.0991, |
|
"step": 317 |
|
}, |
|
{ |
|
"epoch": 3.8780487804878048, |
|
"grad_norm": 1.9820183515548706, |
|
"learning_rate": 7.209559424707034e-06, |
|
"loss": 0.1163, |
|
"step": 318 |
|
}, |
|
{ |
|
"epoch": 3.8902439024390243, |
|
"grad_norm": 1.9290958642959595, |
|
"learning_rate": 7.191452980204119e-06, |
|
"loss": 0.1201, |
|
"step": 319 |
|
}, |
|
{ |
|
"epoch": 3.902439024390244, |
|
"grad_norm": 1.9230592250823975, |
|
"learning_rate": 7.173310894166328e-06, |
|
"loss": 0.1138, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 3.9146341463414633, |
|
"grad_norm": 1.6345875263214111, |
|
"learning_rate": 7.155133461654429e-06, |
|
"loss": 0.0935, |
|
"step": 321 |
|
}, |
|
{ |
|
"epoch": 3.926829268292683, |
|
"grad_norm": 1.9335048198699951, |
|
"learning_rate": 7.136920978304056e-06, |
|
"loss": 0.1031, |
|
"step": 322 |
|
}, |
|
{ |
|
"epoch": 3.9390243902439024, |
|
"grad_norm": 1.7330572605133057, |
|
"learning_rate": 7.118673740320907e-06, |
|
"loss": 0.0945, |
|
"step": 323 |
|
}, |
|
{ |
|
"epoch": 3.951219512195122, |
|
"grad_norm": 1.8825818300247192, |
|
"learning_rate": 7.10039204447593e-06, |
|
"loss": 0.0966, |
|
"step": 324 |
|
}, |
|
{ |
|
"epoch": 3.9634146341463414, |
|
"grad_norm": 2.1690921783447266, |
|
"learning_rate": 7.082076188100483e-06, |
|
"loss": 0.1348, |
|
"step": 325 |
|
}, |
|
{ |
|
"epoch": 3.975609756097561, |
|
"grad_norm": 2.1976025104522705, |
|
"learning_rate": 7.063726469081511e-06, |
|
"loss": 0.1046, |
|
"step": 326 |
|
}, |
|
{ |
|
"epoch": 3.9878048780487805, |
|
"grad_norm": 2.0651566982269287, |
|
"learning_rate": 7.045343185856701e-06, |
|
"loss": 0.0848, |
|
"step": 327 |
|
}, |
|
{ |
|
"epoch": 4.0, |
|
"grad_norm": 2.3218235969543457, |
|
"learning_rate": 7.026926637409615e-06, |
|
"loss": 0.1261, |
|
"step": 328 |
|
}, |
|
{ |
|
"epoch": 4.012195121951219, |
|
"grad_norm": 1.517854928970337, |
|
"learning_rate": 7.008477123264849e-06, |
|
"loss": 0.0424, |
|
"step": 329 |
|
}, |
|
{ |
|
"epoch": 4.024390243902439, |
|
"grad_norm": 1.6785979270935059, |
|
"learning_rate": 6.989994943483136e-06, |
|
"loss": 0.053, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 4.036585365853658, |
|
"grad_norm": 1.0940113067626953, |
|
"learning_rate": 6.971480398656488e-06, |
|
"loss": 0.0347, |
|
"step": 331 |
|
}, |
|
{ |
|
"epoch": 4.048780487804878, |
|
"grad_norm": 1.434532880783081, |
|
"learning_rate": 6.952933789903299e-06, |
|
"loss": 0.0468, |
|
"step": 332 |
|
}, |
|
{ |
|
"epoch": 4.060975609756097, |
|
"grad_norm": 1.7367973327636719, |
|
"learning_rate": 6.93435541886344e-06, |
|
"loss": 0.0439, |
|
"step": 333 |
|
}, |
|
{ |
|
"epoch": 4.073170731707317, |
|
"grad_norm": 1.4013808965682983, |
|
"learning_rate": 6.915745587693365e-06, |
|
"loss": 0.0341, |
|
"step": 334 |
|
}, |
|
{ |
|
"epoch": 4.085365853658536, |
|
"grad_norm": 1.7729628086090088, |
|
"learning_rate": 6.89710459906119e-06, |
|
"loss": 0.0524, |
|
"step": 335 |
|
}, |
|
{ |
|
"epoch": 4.097560975609756, |
|
"grad_norm": 1.8550630807876587, |
|
"learning_rate": 6.878432756141775e-06, |
|
"loss": 0.0559, |
|
"step": 336 |
|
}, |
|
{ |
|
"epoch": 4.109756097560975, |
|
"grad_norm": 1.9048420190811157, |
|
"learning_rate": 6.8597303626117886e-06, |
|
"loss": 0.0567, |
|
"step": 337 |
|
}, |
|
{ |
|
"epoch": 4.121951219512195, |
|
"grad_norm": 2.3313469886779785, |
|
"learning_rate": 6.8409977226447685e-06, |
|
"loss": 0.0589, |
|
"step": 338 |
|
}, |
|
{ |
|
"epoch": 4.134146341463414, |
|
"grad_norm": 1.5067005157470703, |
|
"learning_rate": 6.822235140906183e-06, |
|
"loss": 0.0415, |
|
"step": 339 |
|
}, |
|
{ |
|
"epoch": 4.146341463414634, |
|
"grad_norm": 1.7281876802444458, |
|
"learning_rate": 6.803442922548462e-06, |
|
"loss": 0.0491, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 4.158536585365853, |
|
"grad_norm": 1.7764736413955688, |
|
"learning_rate": 6.784621373206051e-06, |
|
"loss": 0.049, |
|
"step": 341 |
|
}, |
|
{ |
|
"epoch": 4.170731707317073, |
|
"grad_norm": 2.0232222080230713, |
|
"learning_rate": 6.765770798990423e-06, |
|
"loss": 0.0524, |
|
"step": 342 |
|
}, |
|
{ |
|
"epoch": 4.182926829268292, |
|
"grad_norm": 1.9550089836120605, |
|
"learning_rate": 6.746891506485112e-06, |
|
"loss": 0.0526, |
|
"step": 343 |
|
}, |
|
{ |
|
"epoch": 4.195121951219512, |
|
"grad_norm": 2.0394773483276367, |
|
"learning_rate": 6.727983802740723e-06, |
|
"loss": 0.0546, |
|
"step": 344 |
|
}, |
|
{ |
|
"epoch": 4.2073170731707314, |
|
"grad_norm": 1.6590560674667358, |
|
"learning_rate": 6.709047995269939e-06, |
|
"loss": 0.0422, |
|
"step": 345 |
|
}, |
|
{ |
|
"epoch": 4.219512195121951, |
|
"grad_norm": 1.8558006286621094, |
|
"learning_rate": 6.690084392042514e-06, |
|
"loss": 0.0518, |
|
"step": 346 |
|
}, |
|
{ |
|
"epoch": 4.2317073170731705, |
|
"grad_norm": 1.2415188550949097, |
|
"learning_rate": 6.671093301480276e-06, |
|
"loss": 0.0333, |
|
"step": 347 |
|
}, |
|
{ |
|
"epoch": 4.2439024390243905, |
|
"grad_norm": 1.7380534410476685, |
|
"learning_rate": 6.6520750324520965e-06, |
|
"loss": 0.0556, |
|
"step": 348 |
|
}, |
|
{ |
|
"epoch": 4.2560975609756095, |
|
"grad_norm": 1.4161667823791504, |
|
"learning_rate": 6.63302989426888e-06, |
|
"loss": 0.0414, |
|
"step": 349 |
|
}, |
|
{ |
|
"epoch": 4.2682926829268295, |
|
"grad_norm": 1.6313724517822266, |
|
"learning_rate": 6.613958196678525e-06, |
|
"loss": 0.0757, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 4.280487804878049, |
|
"grad_norm": 1.9501330852508545, |
|
"learning_rate": 6.594860249860888e-06, |
|
"loss": 0.0675, |
|
"step": 351 |
|
}, |
|
{ |
|
"epoch": 4.2926829268292686, |
|
"grad_norm": 1.5222731828689575, |
|
"learning_rate": 6.575736364422747e-06, |
|
"loss": 0.0537, |
|
"step": 352 |
|
}, |
|
{ |
|
"epoch": 4.304878048780488, |
|
"grad_norm": 1.367255687713623, |
|
"learning_rate": 6.55658685139273e-06, |
|
"loss": 0.0459, |
|
"step": 353 |
|
}, |
|
{ |
|
"epoch": 4.317073170731708, |
|
"grad_norm": 1.4813297986984253, |
|
"learning_rate": 6.5374120222162815e-06, |
|
"loss": 0.06, |
|
"step": 354 |
|
}, |
|
{ |
|
"epoch": 4.329268292682927, |
|
"grad_norm": 1.5068612098693848, |
|
"learning_rate": 6.518212188750579e-06, |
|
"loss": 0.0514, |
|
"step": 355 |
|
}, |
|
{ |
|
"epoch": 4.341463414634147, |
|
"grad_norm": 1.66206955909729, |
|
"learning_rate": 6.498987663259467e-06, |
|
"loss": 0.0675, |
|
"step": 356 |
|
}, |
|
{ |
|
"epoch": 4.353658536585366, |
|
"grad_norm": 1.4990217685699463, |
|
"learning_rate": 6.479738758408379e-06, |
|
"loss": 0.0695, |
|
"step": 357 |
|
}, |
|
{ |
|
"epoch": 4.365853658536586, |
|
"grad_norm": 1.5749341249465942, |
|
"learning_rate": 6.460465787259251e-06, |
|
"loss": 0.0508, |
|
"step": 358 |
|
}, |
|
{ |
|
"epoch": 4.378048780487805, |
|
"grad_norm": 1.499898076057434, |
|
"learning_rate": 6.44116906326543e-06, |
|
"loss": 0.0591, |
|
"step": 359 |
|
}, |
|
{ |
|
"epoch": 4.390243902439025, |
|
"grad_norm": 1.46736478805542, |
|
"learning_rate": 6.421848900266581e-06, |
|
"loss": 0.05, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 4.402439024390244, |
|
"grad_norm": 1.4807460308074951, |
|
"learning_rate": 6.402505612483569e-06, |
|
"loss": 0.0523, |
|
"step": 361 |
|
}, |
|
{ |
|
"epoch": 4.414634146341464, |
|
"grad_norm": 1.4587833881378174, |
|
"learning_rate": 6.383139514513368e-06, |
|
"loss": 0.0576, |
|
"step": 362 |
|
}, |
|
{ |
|
"epoch": 4.426829268292683, |
|
"grad_norm": 1.4291479587554932, |
|
"learning_rate": 6.363750921323929e-06, |
|
"loss": 0.0479, |
|
"step": 363 |
|
}, |
|
{ |
|
"epoch": 4.439024390243903, |
|
"grad_norm": 1.364157795906067, |
|
"learning_rate": 6.3443401482490615e-06, |
|
"loss": 0.0528, |
|
"step": 364 |
|
}, |
|
{ |
|
"epoch": 4.451219512195122, |
|
"grad_norm": 2.088580369949341, |
|
"learning_rate": 6.32490751098331e-06, |
|
"loss": 0.0605, |
|
"step": 365 |
|
}, |
|
{ |
|
"epoch": 4.463414634146342, |
|
"grad_norm": 1.5994398593902588, |
|
"learning_rate": 6.30545332557681e-06, |
|
"loss": 0.0525, |
|
"step": 366 |
|
}, |
|
{ |
|
"epoch": 4.475609756097561, |
|
"grad_norm": 1.7937228679656982, |
|
"learning_rate": 6.2859779084301584e-06, |
|
"loss": 0.0517, |
|
"step": 367 |
|
}, |
|
{ |
|
"epoch": 4.487804878048781, |
|
"grad_norm": 1.3765718936920166, |
|
"learning_rate": 6.266481576289263e-06, |
|
"loss": 0.041, |
|
"step": 368 |
|
}, |
|
{ |
|
"epoch": 4.5, |
|
"grad_norm": 1.7616742849349976, |
|
"learning_rate": 6.246964646240186e-06, |
|
"loss": 0.0715, |
|
"step": 369 |
|
}, |
|
{ |
|
"epoch": 4.512195121951219, |
|
"grad_norm": 1.496747374534607, |
|
"learning_rate": 6.227427435703997e-06, |
|
"loss": 0.0633, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 4.524390243902439, |
|
"grad_norm": 1.53587007522583, |
|
"learning_rate": 6.207870262431599e-06, |
|
"loss": 0.0557, |
|
"step": 371 |
|
}, |
|
{ |
|
"epoch": 4.536585365853659, |
|
"grad_norm": 1.664995789527893, |
|
"learning_rate": 6.188293444498573e-06, |
|
"loss": 0.0599, |
|
"step": 372 |
|
}, |
|
{ |
|
"epoch": 4.548780487804878, |
|
"grad_norm": 1.8567813634872437, |
|
"learning_rate": 6.1686973002999935e-06, |
|
"loss": 0.0643, |
|
"step": 373 |
|
}, |
|
{ |
|
"epoch": 4.560975609756097, |
|
"grad_norm": 2.01507568359375, |
|
"learning_rate": 6.149082148545258e-06, |
|
"loss": 0.0637, |
|
"step": 374 |
|
}, |
|
{ |
|
"epoch": 4.573170731707317, |
|
"grad_norm": 1.800641417503357, |
|
"learning_rate": 6.129448308252899e-06, |
|
"loss": 0.0587, |
|
"step": 375 |
|
}, |
|
{ |
|
"epoch": 4.585365853658536, |
|
"grad_norm": 2.0126662254333496, |
|
"learning_rate": 6.109796098745398e-06, |
|
"loss": 0.0669, |
|
"step": 376 |
|
}, |
|
{ |
|
"epoch": 4.597560975609756, |
|
"grad_norm": 1.8245577812194824, |
|
"learning_rate": 6.090125839643991e-06, |
|
"loss": 0.0541, |
|
"step": 377 |
|
}, |
|
{ |
|
"epoch": 4.609756097560975, |
|
"grad_norm": 1.3531700372695923, |
|
"learning_rate": 6.070437850863472e-06, |
|
"loss": 0.0445, |
|
"step": 378 |
|
}, |
|
{ |
|
"epoch": 4.621951219512195, |
|
"grad_norm": 1.9308772087097168, |
|
"learning_rate": 6.0507324526069854e-06, |
|
"loss": 0.0608, |
|
"step": 379 |
|
}, |
|
{ |
|
"epoch": 4.634146341463414, |
|
"grad_norm": 1.5027072429656982, |
|
"learning_rate": 6.031009965360824e-06, |
|
"loss": 0.0634, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 4.646341463414634, |
|
"grad_norm": 1.3451308012008667, |
|
"learning_rate": 6.011270709889213e-06, |
|
"loss": 0.0411, |
|
"step": 381 |
|
}, |
|
{ |
|
"epoch": 4.658536585365853, |
|
"grad_norm": 1.618082046508789, |
|
"learning_rate": 5.991515007229093e-06, |
|
"loss": 0.0575, |
|
"step": 382 |
|
}, |
|
{ |
|
"epoch": 4.670731707317073, |
|
"grad_norm": 1.6030172109603882, |
|
"learning_rate": 5.971743178684901e-06, |
|
"loss": 0.0575, |
|
"step": 383 |
|
}, |
|
{ |
|
"epoch": 4.682926829268292, |
|
"grad_norm": 1.582740306854248, |
|
"learning_rate": 5.951955545823342e-06, |
|
"loss": 0.0613, |
|
"step": 384 |
|
}, |
|
{ |
|
"epoch": 4.695121951219512, |
|
"grad_norm": 1.7536263465881348, |
|
"learning_rate": 5.932152430468165e-06, |
|
"loss": 0.052, |
|
"step": 385 |
|
}, |
|
{ |
|
"epoch": 4.7073170731707314, |
|
"grad_norm": 2.1995296478271484, |
|
"learning_rate": 5.912334154694919e-06, |
|
"loss": 0.0629, |
|
"step": 386 |
|
}, |
|
{ |
|
"epoch": 4.719512195121951, |
|
"grad_norm": 1.8581688404083252, |
|
"learning_rate": 5.892501040825721e-06, |
|
"loss": 0.041, |
|
"step": 387 |
|
}, |
|
{ |
|
"epoch": 4.7317073170731705, |
|
"grad_norm": 1.8024824857711792, |
|
"learning_rate": 5.872653411424017e-06, |
|
"loss": 0.0708, |
|
"step": 388 |
|
}, |
|
{ |
|
"epoch": 4.7439024390243905, |
|
"grad_norm": 1.7822990417480469, |
|
"learning_rate": 5.85279158928933e-06, |
|
"loss": 0.0528, |
|
"step": 389 |
|
}, |
|
{ |
|
"epoch": 4.7560975609756095, |
|
"grad_norm": 1.9106731414794922, |
|
"learning_rate": 5.832915897452008e-06, |
|
"loss": 0.0643, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 4.7682926829268295, |
|
"grad_norm": 1.593004584312439, |
|
"learning_rate": 5.813026659167982e-06, |
|
"loss": 0.054, |
|
"step": 391 |
|
}, |
|
{ |
|
"epoch": 4.780487804878049, |
|
"grad_norm": 1.8973208665847778, |
|
"learning_rate": 5.793124197913492e-06, |
|
"loss": 0.0737, |
|
"step": 392 |
|
}, |
|
{ |
|
"epoch": 4.7926829268292686, |
|
"grad_norm": 1.9966886043548584, |
|
"learning_rate": 5.773208837379843e-06, |
|
"loss": 0.0634, |
|
"step": 393 |
|
}, |
|
{ |
|
"epoch": 4.804878048780488, |
|
"grad_norm": 1.5227646827697754, |
|
"learning_rate": 5.753280901468126e-06, |
|
"loss": 0.0496, |
|
"step": 394 |
|
}, |
|
{ |
|
"epoch": 4.817073170731708, |
|
"grad_norm": 1.6435083150863647, |
|
"learning_rate": 5.733340714283959e-06, |
|
"loss": 0.0664, |
|
"step": 395 |
|
}, |
|
{ |
|
"epoch": 4.829268292682927, |
|
"grad_norm": 1.3312773704528809, |
|
"learning_rate": 5.713388600132217e-06, |
|
"loss": 0.0534, |
|
"step": 396 |
|
}, |
|
{ |
|
"epoch": 4.841463414634147, |
|
"grad_norm": 1.868194580078125, |
|
"learning_rate": 5.693424883511748e-06, |
|
"loss": 0.0565, |
|
"step": 397 |
|
}, |
|
{ |
|
"epoch": 4.853658536585366, |
|
"grad_norm": 1.5551823377609253, |
|
"learning_rate": 5.6734498891101005e-06, |
|
"loss": 0.0604, |
|
"step": 398 |
|
}, |
|
{ |
|
"epoch": 4.865853658536586, |
|
"grad_norm": 1.8578870296478271, |
|
"learning_rate": 5.653463941798252e-06, |
|
"loss": 0.0728, |
|
"step": 399 |
|
}, |
|
{ |
|
"epoch": 4.878048780487805, |
|
"grad_norm": 1.5294170379638672, |
|
"learning_rate": 5.633467366625306e-06, |
|
"loss": 0.0637, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 4.890243902439025, |
|
"grad_norm": 1.2593622207641602, |
|
"learning_rate": 5.613460488813225e-06, |
|
"loss": 0.0512, |
|
"step": 401 |
|
}, |
|
{ |
|
"epoch": 4.902439024390244, |
|
"grad_norm": 1.7771371603012085, |
|
"learning_rate": 5.593443633751527e-06, |
|
"loss": 0.0658, |
|
"step": 402 |
|
}, |
|
{ |
|
"epoch": 4.914634146341464, |
|
"grad_norm": 1.5825587511062622, |
|
"learning_rate": 5.573417126992004e-06, |
|
"loss": 0.0671, |
|
"step": 403 |
|
}, |
|
{ |
|
"epoch": 4.926829268292683, |
|
"grad_norm": 1.6244094371795654, |
|
"learning_rate": 5.553381294243413e-06, |
|
"loss": 0.0585, |
|
"step": 404 |
|
}, |
|
{ |
|
"epoch": 4.939024390243903, |
|
"grad_norm": 1.501323938369751, |
|
"learning_rate": 5.5333364613662e-06, |
|
"loss": 0.0578, |
|
"step": 405 |
|
}, |
|
{ |
|
"epoch": 4.951219512195122, |
|
"grad_norm": 1.5930196046829224, |
|
"learning_rate": 5.513282954367179e-06, |
|
"loss": 0.064, |
|
"step": 406 |
|
}, |
|
{ |
|
"epoch": 4.963414634146341, |
|
"grad_norm": 1.4195719957351685, |
|
"learning_rate": 5.493221099394239e-06, |
|
"loss": 0.0443, |
|
"step": 407 |
|
}, |
|
{ |
|
"epoch": 4.975609756097561, |
|
"grad_norm": 1.3484866619110107, |
|
"learning_rate": 5.473151222731044e-06, |
|
"loss": 0.0577, |
|
"step": 408 |
|
}, |
|
{ |
|
"epoch": 4.987804878048781, |
|
"grad_norm": 1.677027940750122, |
|
"learning_rate": 5.453073650791724e-06, |
|
"loss": 0.0604, |
|
"step": 409 |
|
}, |
|
{ |
|
"epoch": 5.0, |
|
"grad_norm": 1.7022733688354492, |
|
"learning_rate": 5.432988710115553e-06, |
|
"loss": 0.0674, |
|
"step": 410 |
|
} |
|
], |
|
"logging_steps": 1, |
|
"max_steps": 820, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 10, |
|
"save_steps": 1, |
|
"stateful_callbacks": { |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": false |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 7614453848064.0, |
|
"train_batch_size": 1, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|