|
{ |
|
"best_global_step": null, |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 0.999096657633243, |
|
"eval_steps": 500, |
|
"global_step": 553, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.0018066847335140017, |
|
"grad_norm": 2.8401210755443786, |
|
"learning_rate": 0.0, |
|
"loss": 0.8251, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.0036133694670280035, |
|
"grad_norm": 2.884596771811998, |
|
"learning_rate": 1.7857142857142857e-06, |
|
"loss": 0.8284, |
|
"step": 2 |
|
}, |
|
{ |
|
"epoch": 0.005420054200542005, |
|
"grad_norm": 2.885426730353446, |
|
"learning_rate": 3.5714285714285714e-06, |
|
"loss": 0.8426, |
|
"step": 3 |
|
}, |
|
{ |
|
"epoch": 0.007226738934056007, |
|
"grad_norm": 2.651910724864975, |
|
"learning_rate": 5.357142857142857e-06, |
|
"loss": 0.8329, |
|
"step": 4 |
|
}, |
|
{ |
|
"epoch": 0.009033423667570008, |
|
"grad_norm": 2.138099539200911, |
|
"learning_rate": 7.142857142857143e-06, |
|
"loss": 0.8087, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.01084010840108401, |
|
"grad_norm": 1.5516804515831568, |
|
"learning_rate": 8.92857142857143e-06, |
|
"loss": 0.7878, |
|
"step": 6 |
|
}, |
|
{ |
|
"epoch": 0.012646793134598013, |
|
"grad_norm": 1.3537216088647457, |
|
"learning_rate": 1.0714285714285714e-05, |
|
"loss": 0.76, |
|
"step": 7 |
|
}, |
|
{ |
|
"epoch": 0.014453477868112014, |
|
"grad_norm": 2.2798033166818095, |
|
"learning_rate": 1.25e-05, |
|
"loss": 0.7502, |
|
"step": 8 |
|
}, |
|
{ |
|
"epoch": 0.016260162601626018, |
|
"grad_norm": 2.4280919600136315, |
|
"learning_rate": 1.4285714285714285e-05, |
|
"loss": 0.7449, |
|
"step": 9 |
|
}, |
|
{ |
|
"epoch": 0.018066847335140017, |
|
"grad_norm": 1.931081683969551, |
|
"learning_rate": 1.6071428571428572e-05, |
|
"loss": 0.7271, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.01987353206865402, |
|
"grad_norm": 2.033957326273334, |
|
"learning_rate": 1.785714285714286e-05, |
|
"loss": 0.7224, |
|
"step": 11 |
|
}, |
|
{ |
|
"epoch": 0.02168021680216802, |
|
"grad_norm": 1.7440303482576578, |
|
"learning_rate": 1.9642857142857145e-05, |
|
"loss": 0.7373, |
|
"step": 12 |
|
}, |
|
{ |
|
"epoch": 0.023486901535682024, |
|
"grad_norm": 1.1423427090231095, |
|
"learning_rate": 2.1428571428571428e-05, |
|
"loss": 0.7033, |
|
"step": 13 |
|
}, |
|
{ |
|
"epoch": 0.025293586269196026, |
|
"grad_norm": 0.9350758811837806, |
|
"learning_rate": 2.3214285714285715e-05, |
|
"loss": 0.6886, |
|
"step": 14 |
|
}, |
|
{ |
|
"epoch": 0.02710027100271003, |
|
"grad_norm": 0.9041529205527444, |
|
"learning_rate": 2.5e-05, |
|
"loss": 0.6858, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.028906955736224028, |
|
"grad_norm": 0.7774159470583337, |
|
"learning_rate": 2.6785714285714288e-05, |
|
"loss": 0.676, |
|
"step": 16 |
|
}, |
|
{ |
|
"epoch": 0.03071364046973803, |
|
"grad_norm": 0.6642849851627606, |
|
"learning_rate": 2.857142857142857e-05, |
|
"loss": 0.6816, |
|
"step": 17 |
|
}, |
|
{ |
|
"epoch": 0.032520325203252036, |
|
"grad_norm": 0.6696862181877565, |
|
"learning_rate": 3.0357142857142857e-05, |
|
"loss": 0.6727, |
|
"step": 18 |
|
}, |
|
{ |
|
"epoch": 0.03432700993676603, |
|
"grad_norm": 0.6199596133483118, |
|
"learning_rate": 3.2142857142857144e-05, |
|
"loss": 0.6678, |
|
"step": 19 |
|
}, |
|
{ |
|
"epoch": 0.036133694670280034, |
|
"grad_norm": 0.5724104094717255, |
|
"learning_rate": 3.392857142857143e-05, |
|
"loss": 0.6448, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.037940379403794036, |
|
"grad_norm": 0.5762214978597714, |
|
"learning_rate": 3.571428571428572e-05, |
|
"loss": 0.6506, |
|
"step": 21 |
|
}, |
|
{ |
|
"epoch": 0.03974706413730804, |
|
"grad_norm": 0.5444644294356963, |
|
"learning_rate": 3.7500000000000003e-05, |
|
"loss": 0.6413, |
|
"step": 22 |
|
}, |
|
{ |
|
"epoch": 0.04155374887082204, |
|
"grad_norm": 0.4801008888914334, |
|
"learning_rate": 3.928571428571429e-05, |
|
"loss": 0.6425, |
|
"step": 23 |
|
}, |
|
{ |
|
"epoch": 0.04336043360433604, |
|
"grad_norm": 0.4780671650041637, |
|
"learning_rate": 4.107142857142857e-05, |
|
"loss": 0.6489, |
|
"step": 24 |
|
}, |
|
{ |
|
"epoch": 0.045167118337850046, |
|
"grad_norm": 0.5145358853730851, |
|
"learning_rate": 4.2857142857142856e-05, |
|
"loss": 0.6306, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.04697380307136405, |
|
"grad_norm": 0.4229563893889767, |
|
"learning_rate": 4.464285714285715e-05, |
|
"loss": 0.6453, |
|
"step": 26 |
|
}, |
|
{ |
|
"epoch": 0.04878048780487805, |
|
"grad_norm": 0.4021332182222708, |
|
"learning_rate": 4.642857142857143e-05, |
|
"loss": 0.6251, |
|
"step": 27 |
|
}, |
|
{ |
|
"epoch": 0.05058717253839205, |
|
"grad_norm": 0.48615223659558016, |
|
"learning_rate": 4.8214285714285716e-05, |
|
"loss": 0.6272, |
|
"step": 28 |
|
}, |
|
{ |
|
"epoch": 0.052393857271906055, |
|
"grad_norm": 0.46304419453924084, |
|
"learning_rate": 5e-05, |
|
"loss": 0.6385, |
|
"step": 29 |
|
}, |
|
{ |
|
"epoch": 0.05420054200542006, |
|
"grad_norm": 0.4055914485047887, |
|
"learning_rate": 4.999955240022902e-05, |
|
"loss": 0.6406, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.05600722673893405, |
|
"grad_norm": 0.4320919468210144, |
|
"learning_rate": 4.999820961694372e-05, |
|
"loss": 0.6303, |
|
"step": 31 |
|
}, |
|
{ |
|
"epoch": 0.057813911472448055, |
|
"grad_norm": 0.4042983593828653, |
|
"learning_rate": 4.999597169822646e-05, |
|
"loss": 0.6186, |
|
"step": 32 |
|
}, |
|
{ |
|
"epoch": 0.05962059620596206, |
|
"grad_norm": 0.4310835292518631, |
|
"learning_rate": 4.9992838724212585e-05, |
|
"loss": 0.6287, |
|
"step": 33 |
|
}, |
|
{ |
|
"epoch": 0.06142728093947606, |
|
"grad_norm": 0.412271814827728, |
|
"learning_rate": 4.9988810807087584e-05, |
|
"loss": 0.6165, |
|
"step": 34 |
|
}, |
|
{ |
|
"epoch": 0.06323396567299007, |
|
"grad_norm": 0.40598837987994935, |
|
"learning_rate": 4.998388809108303e-05, |
|
"loss": 0.622, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.06504065040650407, |
|
"grad_norm": 0.4223908220324365, |
|
"learning_rate": 4.997807075247146e-05, |
|
"loss": 0.6189, |
|
"step": 36 |
|
}, |
|
{ |
|
"epoch": 0.06684733514001806, |
|
"grad_norm": 0.38197899028265064, |
|
"learning_rate": 4.997135899956001e-05, |
|
"loss": 0.623, |
|
"step": 37 |
|
}, |
|
{ |
|
"epoch": 0.06865401987353206, |
|
"grad_norm": 0.3885330150920031, |
|
"learning_rate": 4.9963753072683025e-05, |
|
"loss": 0.6164, |
|
"step": 38 |
|
}, |
|
{ |
|
"epoch": 0.07046070460704607, |
|
"grad_norm": 0.41115019726029983, |
|
"learning_rate": 4.9955253244193375e-05, |
|
"loss": 0.6182, |
|
"step": 39 |
|
}, |
|
{ |
|
"epoch": 0.07226738934056007, |
|
"grad_norm": 0.3495037257115665, |
|
"learning_rate": 4.994585981845278e-05, |
|
"loss": 0.6072, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.07407407407407407, |
|
"grad_norm": 0.3973831198418077, |
|
"learning_rate": 4.9935573131820854e-05, |
|
"loss": 0.6137, |
|
"step": 41 |
|
}, |
|
{ |
|
"epoch": 0.07588075880758807, |
|
"grad_norm": 0.3339390206679997, |
|
"learning_rate": 4.9924393552643075e-05, |
|
"loss": 0.6064, |
|
"step": 42 |
|
}, |
|
{ |
|
"epoch": 0.07768744354110207, |
|
"grad_norm": 0.3384678373992373, |
|
"learning_rate": 4.991232148123761e-05, |
|
"loss": 0.606, |
|
"step": 43 |
|
}, |
|
{ |
|
"epoch": 0.07949412827461608, |
|
"grad_norm": 0.3764492630812678, |
|
"learning_rate": 4.989935734988098e-05, |
|
"loss": 0.601, |
|
"step": 44 |
|
}, |
|
{ |
|
"epoch": 0.08130081300813008, |
|
"grad_norm": 0.35707353533847597, |
|
"learning_rate": 4.988550162279255e-05, |
|
"loss": 0.6097, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.08310749774164408, |
|
"grad_norm": 0.34040533355863656, |
|
"learning_rate": 4.987075479611796e-05, |
|
"loss": 0.6094, |
|
"step": 46 |
|
}, |
|
{ |
|
"epoch": 0.08491418247515808, |
|
"grad_norm": 0.34496537693497575, |
|
"learning_rate": 4.985511739791129e-05, |
|
"loss": 0.6129, |
|
"step": 47 |
|
}, |
|
{ |
|
"epoch": 0.08672086720867209, |
|
"grad_norm": 0.3655410266089117, |
|
"learning_rate": 4.983858998811622e-05, |
|
"loss": 0.5994, |
|
"step": 48 |
|
}, |
|
{ |
|
"epoch": 0.08852755194218609, |
|
"grad_norm": 0.32151319632227454, |
|
"learning_rate": 4.9821173158545936e-05, |
|
"loss": 0.607, |
|
"step": 49 |
|
}, |
|
{ |
|
"epoch": 0.09033423667570009, |
|
"grad_norm": 0.3573869377855791, |
|
"learning_rate": 4.980286753286195e-05, |
|
"loss": 0.6067, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.0921409214092141, |
|
"grad_norm": 0.3601371463623409, |
|
"learning_rate": 4.978367376655177e-05, |
|
"loss": 0.5965, |
|
"step": 51 |
|
}, |
|
{ |
|
"epoch": 0.0939476061427281, |
|
"grad_norm": 0.3288839599265164, |
|
"learning_rate": 4.976359254690543e-05, |
|
"loss": 0.6128, |
|
"step": 52 |
|
}, |
|
{ |
|
"epoch": 0.0957542908762421, |
|
"grad_norm": 0.3899144366212297, |
|
"learning_rate": 4.974262459299087e-05, |
|
"loss": 0.5992, |
|
"step": 53 |
|
}, |
|
{ |
|
"epoch": 0.0975609756097561, |
|
"grad_norm": 0.31373036051997816, |
|
"learning_rate": 4.972077065562821e-05, |
|
"loss": 0.5894, |
|
"step": 54 |
|
}, |
|
{ |
|
"epoch": 0.0993676603432701, |
|
"grad_norm": 0.3317064945312652, |
|
"learning_rate": 4.969803151736284e-05, |
|
"loss": 0.6046, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.1011743450767841, |
|
"grad_norm": 0.4008096818415782, |
|
"learning_rate": 4.9674407992437394e-05, |
|
"loss": 0.6091, |
|
"step": 56 |
|
}, |
|
{ |
|
"epoch": 0.10298102981029811, |
|
"grad_norm": 0.32205121061079506, |
|
"learning_rate": 4.964990092676263e-05, |
|
"loss": 0.5936, |
|
"step": 57 |
|
}, |
|
{ |
|
"epoch": 0.10478771454381211, |
|
"grad_norm": 0.3834592472356572, |
|
"learning_rate": 4.962451119788709e-05, |
|
"loss": 0.6035, |
|
"step": 58 |
|
}, |
|
{ |
|
"epoch": 0.10659439927732611, |
|
"grad_norm": 0.4364607834229955, |
|
"learning_rate": 4.959823971496574e-05, |
|
"loss": 0.5895, |
|
"step": 59 |
|
}, |
|
{ |
|
"epoch": 0.10840108401084012, |
|
"grad_norm": 0.39798614932188364, |
|
"learning_rate": 4.957108741872736e-05, |
|
"loss": 0.5903, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.1102077687443541, |
|
"grad_norm": 0.4078983073259993, |
|
"learning_rate": 4.954305528144085e-05, |
|
"loss": 0.594, |
|
"step": 61 |
|
}, |
|
{ |
|
"epoch": 0.1120144534778681, |
|
"grad_norm": 0.5064139886264023, |
|
"learning_rate": 4.9514144306880506e-05, |
|
"loss": 0.5989, |
|
"step": 62 |
|
}, |
|
{ |
|
"epoch": 0.11382113821138211, |
|
"grad_norm": 0.3049367414530056, |
|
"learning_rate": 4.9484355530289944e-05, |
|
"loss": 0.5982, |
|
"step": 63 |
|
}, |
|
{ |
|
"epoch": 0.11562782294489611, |
|
"grad_norm": 0.45932591541254997, |
|
"learning_rate": 4.9453690018345144e-05, |
|
"loss": 0.598, |
|
"step": 64 |
|
}, |
|
{ |
|
"epoch": 0.11743450767841011, |
|
"grad_norm": 0.34088884376991047, |
|
"learning_rate": 4.9422148869116194e-05, |
|
"loss": 0.5914, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.11924119241192412, |
|
"grad_norm": 0.4408196994719187, |
|
"learning_rate": 4.938973321202799e-05, |
|
"loss": 0.5943, |
|
"step": 66 |
|
}, |
|
{ |
|
"epoch": 0.12104787714543812, |
|
"grad_norm": 0.4034840944061305, |
|
"learning_rate": 4.935644420781978e-05, |
|
"loss": 0.5852, |
|
"step": 67 |
|
}, |
|
{ |
|
"epoch": 0.12285456187895212, |
|
"grad_norm": 0.3532258693927161, |
|
"learning_rate": 4.932228304850363e-05, |
|
"loss": 0.6003, |
|
"step": 68 |
|
}, |
|
{ |
|
"epoch": 0.12466124661246612, |
|
"grad_norm": 0.4041417285254443, |
|
"learning_rate": 4.928725095732169e-05, |
|
"loss": 0.6019, |
|
"step": 69 |
|
}, |
|
{ |
|
"epoch": 0.12646793134598014, |
|
"grad_norm": 0.338119605860217, |
|
"learning_rate": 4.925134918870245e-05, |
|
"loss": 0.6056, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.12827461607949414, |
|
"grad_norm": 0.2924641177682486, |
|
"learning_rate": 4.9214579028215776e-05, |
|
"loss": 0.5784, |
|
"step": 71 |
|
}, |
|
{ |
|
"epoch": 0.13008130081300814, |
|
"grad_norm": 0.37576892630911196, |
|
"learning_rate": 4.917694179252692e-05, |
|
"loss": 0.5966, |
|
"step": 72 |
|
}, |
|
{ |
|
"epoch": 0.13188798554652212, |
|
"grad_norm": 0.3066908816324021, |
|
"learning_rate": 4.91384388293493e-05, |
|
"loss": 0.5944, |
|
"step": 73 |
|
}, |
|
{ |
|
"epoch": 0.13369467028003612, |
|
"grad_norm": 0.3296017371410444, |
|
"learning_rate": 4.909907151739633e-05, |
|
"loss": 0.5863, |
|
"step": 74 |
|
}, |
|
{ |
|
"epoch": 0.13550135501355012, |
|
"grad_norm": 0.3510646842800697, |
|
"learning_rate": 4.9058841266332e-05, |
|
"loss": 0.5854, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.13730803974706413, |
|
"grad_norm": 0.33050013383675975, |
|
"learning_rate": 4.90177495167204e-05, |
|
"loss": 0.5816, |
|
"step": 76 |
|
}, |
|
{ |
|
"epoch": 0.13911472448057813, |
|
"grad_norm": 0.336545072709173, |
|
"learning_rate": 4.897579773997415e-05, |
|
"loss": 0.5768, |
|
"step": 77 |
|
}, |
|
{ |
|
"epoch": 0.14092140921409213, |
|
"grad_norm": 0.3401542906091868, |
|
"learning_rate": 4.893298743830168e-05, |
|
"loss": 0.5877, |
|
"step": 78 |
|
}, |
|
{ |
|
"epoch": 0.14272809394760613, |
|
"grad_norm": 0.30833272476392615, |
|
"learning_rate": 4.888932014465352e-05, |
|
"loss": 0.5949, |
|
"step": 79 |
|
}, |
|
{ |
|
"epoch": 0.14453477868112014, |
|
"grad_norm": 0.40829913126640544, |
|
"learning_rate": 4.88447974226673e-05, |
|
"loss": 0.6045, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.14634146341463414, |
|
"grad_norm": 0.3247095550760803, |
|
"learning_rate": 4.879942086661184e-05, |
|
"loss": 0.5901, |
|
"step": 81 |
|
}, |
|
{ |
|
"epoch": 0.14814814814814814, |
|
"grad_norm": 0.40484730371305205, |
|
"learning_rate": 4.875319210133004e-05, |
|
"loss": 0.5825, |
|
"step": 82 |
|
}, |
|
{ |
|
"epoch": 0.14995483288166214, |
|
"grad_norm": 0.37019522474324174, |
|
"learning_rate": 4.870611278218066e-05, |
|
"loss": 0.5918, |
|
"step": 83 |
|
}, |
|
{ |
|
"epoch": 0.15176151761517614, |
|
"grad_norm": 0.3654744041300334, |
|
"learning_rate": 4.865818459497911e-05, |
|
"loss": 0.5865, |
|
"step": 84 |
|
}, |
|
{ |
|
"epoch": 0.15356820234869015, |
|
"grad_norm": 0.3001653084612634, |
|
"learning_rate": 4.860940925593703e-05, |
|
"loss": 0.5889, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.15537488708220415, |
|
"grad_norm": 0.3463142769052332, |
|
"learning_rate": 4.8559788511600876e-05, |
|
"loss": 0.5881, |
|
"step": 86 |
|
}, |
|
{ |
|
"epoch": 0.15718157181571815, |
|
"grad_norm": 0.3221292284344934, |
|
"learning_rate": 4.850932413878934e-05, |
|
"loss": 0.5901, |
|
"step": 87 |
|
}, |
|
{ |
|
"epoch": 0.15898825654923215, |
|
"grad_norm": 0.33253415770699135, |
|
"learning_rate": 4.8458017944529776e-05, |
|
"loss": 0.5952, |
|
"step": 88 |
|
}, |
|
{ |
|
"epoch": 0.16079494128274616, |
|
"grad_norm": 0.37021022779245716, |
|
"learning_rate": 4.8405871765993433e-05, |
|
"loss": 0.5928, |
|
"step": 89 |
|
}, |
|
{ |
|
"epoch": 0.16260162601626016, |
|
"grad_norm": 0.3197338147243217, |
|
"learning_rate": 4.8352887470429726e-05, |
|
"loss": 0.5837, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.16440831074977416, |
|
"grad_norm": 0.35706780201968874, |
|
"learning_rate": 4.8299066955099335e-05, |
|
"loss": 0.5811, |
|
"step": 91 |
|
}, |
|
{ |
|
"epoch": 0.16621499548328816, |
|
"grad_norm": 0.3730971582648582, |
|
"learning_rate": 4.8244412147206284e-05, |
|
"loss": 0.586, |
|
"step": 92 |
|
}, |
|
{ |
|
"epoch": 0.16802168021680217, |
|
"grad_norm": 0.33467520904104064, |
|
"learning_rate": 4.8188925003828945e-05, |
|
"loss": 0.5919, |
|
"step": 93 |
|
}, |
|
{ |
|
"epoch": 0.16982836495031617, |
|
"grad_norm": 0.41181901183464464, |
|
"learning_rate": 4.813260751184992e-05, |
|
"loss": 0.5922, |
|
"step": 94 |
|
}, |
|
{ |
|
"epoch": 0.17163504968383017, |
|
"grad_norm": 0.31057237794043846, |
|
"learning_rate": 4.807546168788494e-05, |
|
"loss": 0.5835, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.17344173441734417, |
|
"grad_norm": 0.4045185112342142, |
|
"learning_rate": 4.8017489578210604e-05, |
|
"loss": 0.5839, |
|
"step": 96 |
|
}, |
|
{ |
|
"epoch": 0.17524841915085818, |
|
"grad_norm": 0.342091450071029, |
|
"learning_rate": 4.7958693258691167e-05, |
|
"loss": 0.5891, |
|
"step": 97 |
|
}, |
|
{ |
|
"epoch": 0.17705510388437218, |
|
"grad_norm": 0.32193383766669476, |
|
"learning_rate": 4.7899074834704165e-05, |
|
"loss": 0.5814, |
|
"step": 98 |
|
}, |
|
{ |
|
"epoch": 0.17886178861788618, |
|
"grad_norm": 0.33881983844597735, |
|
"learning_rate": 4.783863644106502e-05, |
|
"loss": 0.5951, |
|
"step": 99 |
|
}, |
|
{ |
|
"epoch": 0.18066847335140018, |
|
"grad_norm": 0.3141049036238513, |
|
"learning_rate": 4.7777380241950645e-05, |
|
"loss": 0.5672, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.18247515808491419, |
|
"grad_norm": 0.30258609960076854, |
|
"learning_rate": 4.7715308430821864e-05, |
|
"loss": 0.5831, |
|
"step": 101 |
|
}, |
|
{ |
|
"epoch": 0.1842818428184282, |
|
"grad_norm": 0.33013650150122287, |
|
"learning_rate": 4.765242323034498e-05, |
|
"loss": 0.6019, |
|
"step": 102 |
|
}, |
|
{ |
|
"epoch": 0.1860885275519422, |
|
"grad_norm": 0.31075076006352215, |
|
"learning_rate": 4.758872689231208e-05, |
|
"loss": 0.581, |
|
"step": 103 |
|
}, |
|
{ |
|
"epoch": 0.1878952122854562, |
|
"grad_norm": 0.3376659425636789, |
|
"learning_rate": 4.752422169756048e-05, |
|
"loss": 0.5775, |
|
"step": 104 |
|
}, |
|
{ |
|
"epoch": 0.1897018970189702, |
|
"grad_norm": 0.3373397970824128, |
|
"learning_rate": 4.745890995589101e-05, |
|
"loss": 0.5705, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 0.1915085817524842, |
|
"grad_norm": 0.3159036744458245, |
|
"learning_rate": 4.7392794005985326e-05, |
|
"loss": 0.574, |
|
"step": 106 |
|
}, |
|
{ |
|
"epoch": 0.1933152664859982, |
|
"grad_norm": 0.3157369760717556, |
|
"learning_rate": 4.732587621532214e-05, |
|
"loss": 0.58, |
|
"step": 107 |
|
}, |
|
{ |
|
"epoch": 0.1951219512195122, |
|
"grad_norm": 0.3151534184596026, |
|
"learning_rate": 4.725815898009247e-05, |
|
"loss": 0.5785, |
|
"step": 108 |
|
}, |
|
{ |
|
"epoch": 0.1969286359530262, |
|
"grad_norm": 0.3067792635656275, |
|
"learning_rate": 4.718964472511386e-05, |
|
"loss": 0.5864, |
|
"step": 109 |
|
}, |
|
{ |
|
"epoch": 0.1987353206865402, |
|
"grad_norm": 0.34885189836356073, |
|
"learning_rate": 4.712033590374346e-05, |
|
"loss": 0.5685, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.2005420054200542, |
|
"grad_norm": 0.2725346961548414, |
|
"learning_rate": 4.705023499779031e-05, |
|
"loss": 0.5876, |
|
"step": 111 |
|
}, |
|
{ |
|
"epoch": 0.2023486901535682, |
|
"grad_norm": 0.34146491162388654, |
|
"learning_rate": 4.6979344517426345e-05, |
|
"loss": 0.5814, |
|
"step": 112 |
|
}, |
|
{ |
|
"epoch": 0.2041553748870822, |
|
"grad_norm": 0.2979010304617956, |
|
"learning_rate": 4.690766700109659e-05, |
|
"loss": 0.5857, |
|
"step": 113 |
|
}, |
|
{ |
|
"epoch": 0.20596205962059622, |
|
"grad_norm": 0.3317475544222318, |
|
"learning_rate": 4.6835205015428246e-05, |
|
"loss": 0.5685, |
|
"step": 114 |
|
}, |
|
{ |
|
"epoch": 0.20776874435411022, |
|
"grad_norm": 0.3490432374306535, |
|
"learning_rate": 4.676196115513876e-05, |
|
"loss": 0.585, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 0.20957542908762422, |
|
"grad_norm": 0.2845519792036694, |
|
"learning_rate": 4.668793804294294e-05, |
|
"loss": 0.5779, |
|
"step": 116 |
|
}, |
|
{ |
|
"epoch": 0.21138211382113822, |
|
"grad_norm": 0.30626266068728963, |
|
"learning_rate": 4.661313832945904e-05, |
|
"loss": 0.5901, |
|
"step": 117 |
|
}, |
|
{ |
|
"epoch": 0.21318879855465223, |
|
"grad_norm": 0.3411792829442568, |
|
"learning_rate": 4.653756469311381e-05, |
|
"loss": 0.5766, |
|
"step": 118 |
|
}, |
|
{ |
|
"epoch": 0.21499548328816623, |
|
"grad_norm": 0.273095998341187, |
|
"learning_rate": 4.6461219840046654e-05, |
|
"loss": 0.5666, |
|
"step": 119 |
|
}, |
|
{ |
|
"epoch": 0.21680216802168023, |
|
"grad_norm": 0.3070242568215688, |
|
"learning_rate": 4.638410650401267e-05, |
|
"loss": 0.5861, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.2186088527551942, |
|
"grad_norm": 0.315496788680795, |
|
"learning_rate": 4.6306227446284775e-05, |
|
"loss": 0.5817, |
|
"step": 121 |
|
}, |
|
{ |
|
"epoch": 0.2204155374887082, |
|
"grad_norm": 0.285486208489652, |
|
"learning_rate": 4.622758545555485e-05, |
|
"loss": 0.5862, |
|
"step": 122 |
|
}, |
|
{ |
|
"epoch": 0.2222222222222222, |
|
"grad_norm": 0.3014286983026253, |
|
"learning_rate": 4.614818334783384e-05, |
|
"loss": 0.5756, |
|
"step": 123 |
|
}, |
|
{ |
|
"epoch": 0.2240289069557362, |
|
"grad_norm": 0.3286311142757772, |
|
"learning_rate": 4.606802396635098e-05, |
|
"loss": 0.5622, |
|
"step": 124 |
|
}, |
|
{ |
|
"epoch": 0.22583559168925021, |
|
"grad_norm": 0.3099894773692972, |
|
"learning_rate": 4.598711018145193e-05, |
|
"loss": 0.5702, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 0.22764227642276422, |
|
"grad_norm": 0.3402171436347276, |
|
"learning_rate": 4.590544489049602e-05, |
|
"loss": 0.5822, |
|
"step": 126 |
|
}, |
|
{ |
|
"epoch": 0.22944896115627822, |
|
"grad_norm": 0.2949342532195096, |
|
"learning_rate": 4.5823031017752485e-05, |
|
"loss": 0.5753, |
|
"step": 127 |
|
}, |
|
{ |
|
"epoch": 0.23125564588979222, |
|
"grad_norm": 0.33271367144515557, |
|
"learning_rate": 4.5739871514295786e-05, |
|
"loss": 0.5746, |
|
"step": 128 |
|
}, |
|
{ |
|
"epoch": 0.23306233062330622, |
|
"grad_norm": 0.2854287194746108, |
|
"learning_rate": 4.5655969357899874e-05, |
|
"loss": 0.5688, |
|
"step": 129 |
|
}, |
|
{ |
|
"epoch": 0.23486901535682023, |
|
"grad_norm": 0.2911634608087971, |
|
"learning_rate": 4.5571327552931645e-05, |
|
"loss": 0.5875, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.23667570009033423, |
|
"grad_norm": 0.3282856212432573, |
|
"learning_rate": 4.54859491302433e-05, |
|
"loss": 0.5828, |
|
"step": 131 |
|
}, |
|
{ |
|
"epoch": 0.23848238482384823, |
|
"grad_norm": 0.31230217501155305, |
|
"learning_rate": 4.5399837147063825e-05, |
|
"loss": 0.5622, |
|
"step": 132 |
|
}, |
|
{ |
|
"epoch": 0.24028906955736223, |
|
"grad_norm": 0.26497408669368616, |
|
"learning_rate": 4.531299468688955e-05, |
|
"loss": 0.576, |
|
"step": 133 |
|
}, |
|
{ |
|
"epoch": 0.24209575429087624, |
|
"grad_norm": 0.31735124152798094, |
|
"learning_rate": 4.522542485937369e-05, |
|
"loss": 0.5772, |
|
"step": 134 |
|
}, |
|
{ |
|
"epoch": 0.24390243902439024, |
|
"grad_norm": 0.3015639590739915, |
|
"learning_rate": 4.5137130800215025e-05, |
|
"loss": 0.5748, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 0.24570912375790424, |
|
"grad_norm": 0.27756271594362325, |
|
"learning_rate": 4.50481156710456e-05, |
|
"loss": 0.5782, |
|
"step": 136 |
|
}, |
|
{ |
|
"epoch": 0.24751580849141824, |
|
"grad_norm": 0.3311373615406198, |
|
"learning_rate": 4.495838265931754e-05, |
|
"loss": 0.5632, |
|
"step": 137 |
|
}, |
|
{ |
|
"epoch": 0.24932249322493225, |
|
"grad_norm": 0.26966897365996273, |
|
"learning_rate": 4.486793497818889e-05, |
|
"loss": 0.5778, |
|
"step": 138 |
|
}, |
|
{ |
|
"epoch": 0.25112917795844625, |
|
"grad_norm": 0.3458179405656153, |
|
"learning_rate": 4.477677586640854e-05, |
|
"loss": 0.5814, |
|
"step": 139 |
|
}, |
|
{ |
|
"epoch": 0.2529358626919603, |
|
"grad_norm": 0.2722125808652539, |
|
"learning_rate": 4.4684908588200304e-05, |
|
"loss": 0.5804, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.25474254742547425, |
|
"grad_norm": 0.29853453900528515, |
|
"learning_rate": 4.4592336433146e-05, |
|
"loss": 0.5622, |
|
"step": 141 |
|
}, |
|
{ |
|
"epoch": 0.2565492321589883, |
|
"grad_norm": 0.2616025861086357, |
|
"learning_rate": 4.449906271606766e-05, |
|
"loss": 0.5816, |
|
"step": 142 |
|
}, |
|
{ |
|
"epoch": 0.25835591689250226, |
|
"grad_norm": 0.28875897468487793, |
|
"learning_rate": 4.440509077690883e-05, |
|
"loss": 0.5751, |
|
"step": 143 |
|
}, |
|
{ |
|
"epoch": 0.2601626016260163, |
|
"grad_norm": 0.31089280557100357, |
|
"learning_rate": 4.431042398061499e-05, |
|
"loss": 0.5614, |
|
"step": 144 |
|
}, |
|
{ |
|
"epoch": 0.26196928635953026, |
|
"grad_norm": 0.24453740569318233, |
|
"learning_rate": 4.421506571701305e-05, |
|
"loss": 0.5739, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 0.26377597109304424, |
|
"grad_norm": 0.3298892559409927, |
|
"learning_rate": 4.4119019400689967e-05, |
|
"loss": 0.5789, |
|
"step": 146 |
|
}, |
|
{ |
|
"epoch": 0.26558265582655827, |
|
"grad_norm": 0.29175370949614915, |
|
"learning_rate": 4.402228847087047e-05, |
|
"loss": 0.5687, |
|
"step": 147 |
|
}, |
|
{ |
|
"epoch": 0.26738934056007224, |
|
"grad_norm": 0.30364879113096566, |
|
"learning_rate": 4.3924876391293915e-05, |
|
"loss": 0.57, |
|
"step": 148 |
|
}, |
|
{ |
|
"epoch": 0.26919602529358627, |
|
"grad_norm": 0.2924856865957413, |
|
"learning_rate": 4.382678665009028e-05, |
|
"loss": 0.5675, |
|
"step": 149 |
|
}, |
|
{ |
|
"epoch": 0.27100271002710025, |
|
"grad_norm": 0.2831086406138475, |
|
"learning_rate": 4.372802275965521e-05, |
|
"loss": 0.5777, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.2728093947606143, |
|
"grad_norm": 0.3017093604784243, |
|
"learning_rate": 4.3628588256524285e-05, |
|
"loss": 0.5589, |
|
"step": 151 |
|
}, |
|
{ |
|
"epoch": 0.27461607949412825, |
|
"grad_norm": 0.2765860953865558, |
|
"learning_rate": 4.3528486701246376e-05, |
|
"loss": 0.5669, |
|
"step": 152 |
|
}, |
|
{ |
|
"epoch": 0.2764227642276423, |
|
"grad_norm": 0.2904031273845622, |
|
"learning_rate": 4.3427721678256125e-05, |
|
"loss": 0.5671, |
|
"step": 153 |
|
}, |
|
{ |
|
"epoch": 0.27822944896115626, |
|
"grad_norm": 0.2540349949087337, |
|
"learning_rate": 4.332629679574566e-05, |
|
"loss": 0.5702, |
|
"step": 154 |
|
}, |
|
{ |
|
"epoch": 0.2800361336946703, |
|
"grad_norm": 0.3069633678570879, |
|
"learning_rate": 4.3224215685535294e-05, |
|
"loss": 0.5637, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 0.28184281842818426, |
|
"grad_norm": 0.2542441755605158, |
|
"learning_rate": 4.312148200294355e-05, |
|
"loss": 0.5627, |
|
"step": 156 |
|
}, |
|
{ |
|
"epoch": 0.2836495031616983, |
|
"grad_norm": 0.3018408188777611, |
|
"learning_rate": 4.301809942665625e-05, |
|
"loss": 0.5757, |
|
"step": 157 |
|
}, |
|
{ |
|
"epoch": 0.28545618789521227, |
|
"grad_norm": 0.28743959410933695, |
|
"learning_rate": 4.2914071658594805e-05, |
|
"loss": 0.5734, |
|
"step": 158 |
|
}, |
|
{ |
|
"epoch": 0.2872628726287263, |
|
"grad_norm": 0.2787808725595535, |
|
"learning_rate": 4.2809402423783624e-05, |
|
"loss": 0.5542, |
|
"step": 159 |
|
}, |
|
{ |
|
"epoch": 0.28906955736224027, |
|
"grad_norm": 0.29206183996226404, |
|
"learning_rate": 4.2704095470216744e-05, |
|
"loss": 0.5635, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.2908762420957543, |
|
"grad_norm": 0.28832091136901655, |
|
"learning_rate": 4.2598154568723626e-05, |
|
"loss": 0.5716, |
|
"step": 161 |
|
}, |
|
{ |
|
"epoch": 0.2926829268292683, |
|
"grad_norm": 0.26053870722630096, |
|
"learning_rate": 4.249158351283414e-05, |
|
"loss": 0.5568, |
|
"step": 162 |
|
}, |
|
{ |
|
"epoch": 0.2944896115627823, |
|
"grad_norm": 0.30602256840625275, |
|
"learning_rate": 4.2384386118642694e-05, |
|
"loss": 0.5661, |
|
"step": 163 |
|
}, |
|
{ |
|
"epoch": 0.2962962962962963, |
|
"grad_norm": 0.24580296790912456, |
|
"learning_rate": 4.227656622467162e-05, |
|
"loss": 0.5751, |
|
"step": 164 |
|
}, |
|
{ |
|
"epoch": 0.2981029810298103, |
|
"grad_norm": 0.29059059202913196, |
|
"learning_rate": 4.2168127691733706e-05, |
|
"loss": 0.5662, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 0.2999096657633243, |
|
"grad_norm": 0.29414515714153977, |
|
"learning_rate": 4.205907440279395e-05, |
|
"loss": 0.5724, |
|
"step": 166 |
|
}, |
|
{ |
|
"epoch": 0.3017163504968383, |
|
"grad_norm": 0.29414614437874254, |
|
"learning_rate": 4.1949410262830525e-05, |
|
"loss": 0.5717, |
|
"step": 167 |
|
}, |
|
{ |
|
"epoch": 0.3035230352303523, |
|
"grad_norm": 0.2971349224474148, |
|
"learning_rate": 4.1839139198694946e-05, |
|
"loss": 0.5697, |
|
"step": 168 |
|
}, |
|
{ |
|
"epoch": 0.3053297199638663, |
|
"grad_norm": 0.2793813743979144, |
|
"learning_rate": 4.172826515897146e-05, |
|
"loss": 0.5617, |
|
"step": 169 |
|
}, |
|
{ |
|
"epoch": 0.3071364046973803, |
|
"grad_norm": 0.3023179135068114, |
|
"learning_rate": 4.161679211383565e-05, |
|
"loss": 0.5751, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.3089430894308943, |
|
"grad_norm": 0.2477222878497878, |
|
"learning_rate": 4.150472405491226e-05, |
|
"loss": 0.5583, |
|
"step": 171 |
|
}, |
|
{ |
|
"epoch": 0.3107497741644083, |
|
"grad_norm": 0.2707854956368433, |
|
"learning_rate": 4.139206499513231e-05, |
|
"loss": 0.5658, |
|
"step": 172 |
|
}, |
|
{ |
|
"epoch": 0.31255645889792233, |
|
"grad_norm": 0.26805394435378294, |
|
"learning_rate": 4.127881896858934e-05, |
|
"loss": 0.5687, |
|
"step": 173 |
|
}, |
|
{ |
|
"epoch": 0.3143631436314363, |
|
"grad_norm": 0.24818328664431513, |
|
"learning_rate": 4.116499003039499e-05, |
|
"loss": 0.5597, |
|
"step": 174 |
|
}, |
|
{ |
|
"epoch": 0.31616982836495033, |
|
"grad_norm": 0.3076486841492656, |
|
"learning_rate": 4.105058225653381e-05, |
|
"loss": 0.5638, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 0.3179765130984643, |
|
"grad_norm": 0.24484381551700804, |
|
"learning_rate": 4.093559974371725e-05, |
|
"loss": 0.578, |
|
"step": 176 |
|
}, |
|
{ |
|
"epoch": 0.31978319783197834, |
|
"grad_norm": 0.25779831771912026, |
|
"learning_rate": 4.082004660923703e-05, |
|
"loss": 0.5536, |
|
"step": 177 |
|
}, |
|
{ |
|
"epoch": 0.3215898825654923, |
|
"grad_norm": 0.2431924544197558, |
|
"learning_rate": 4.070392699081767e-05, |
|
"loss": 0.5606, |
|
"step": 178 |
|
}, |
|
{ |
|
"epoch": 0.32339656729900634, |
|
"grad_norm": 0.24360453043613553, |
|
"learning_rate": 4.058724504646834e-05, |
|
"loss": 0.571, |
|
"step": 179 |
|
}, |
|
{ |
|
"epoch": 0.3252032520325203, |
|
"grad_norm": 0.28406098303419186, |
|
"learning_rate": 4.047000495433397e-05, |
|
"loss": 0.5781, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.32700993676603435, |
|
"grad_norm": 0.22058075555118684, |
|
"learning_rate": 4.035221091254563e-05, |
|
"loss": 0.5574, |
|
"step": 181 |
|
}, |
|
{ |
|
"epoch": 0.3288166214995483, |
|
"grad_norm": 0.3009339432220675, |
|
"learning_rate": 4.023386713907021e-05, |
|
"loss": 0.5677, |
|
"step": 182 |
|
}, |
|
{ |
|
"epoch": 0.33062330623306235, |
|
"grad_norm": 0.26774508776974565, |
|
"learning_rate": 4.011497787155938e-05, |
|
"loss": 0.577, |
|
"step": 183 |
|
}, |
|
{ |
|
"epoch": 0.3324299909665763, |
|
"grad_norm": 0.284303170530224, |
|
"learning_rate": 3.9995547367197845e-05, |
|
"loss": 0.5735, |
|
"step": 184 |
|
}, |
|
{ |
|
"epoch": 0.33423667570009036, |
|
"grad_norm": 0.2614285883935899, |
|
"learning_rate": 3.987557990255093e-05, |
|
"loss": 0.5695, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 0.33604336043360433, |
|
"grad_norm": 0.28256380914027235, |
|
"learning_rate": 3.975507977341141e-05, |
|
"loss": 0.5648, |
|
"step": 186 |
|
}, |
|
{ |
|
"epoch": 0.33785004516711836, |
|
"grad_norm": 0.28945389202706384, |
|
"learning_rate": 3.963405129464569e-05, |
|
"loss": 0.5618, |
|
"step": 187 |
|
}, |
|
{ |
|
"epoch": 0.33965672990063234, |
|
"grad_norm": 0.2654043705467122, |
|
"learning_rate": 3.9512498800039335e-05, |
|
"loss": 0.5622, |
|
"step": 188 |
|
}, |
|
{ |
|
"epoch": 0.34146341463414637, |
|
"grad_norm": 0.28212350075401693, |
|
"learning_rate": 3.939042664214184e-05, |
|
"loss": 0.5691, |
|
"step": 189 |
|
}, |
|
{ |
|
"epoch": 0.34327009936766034, |
|
"grad_norm": 0.2666801982334387, |
|
"learning_rate": 3.92678391921108e-05, |
|
"loss": 0.5582, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.34507678410117437, |
|
"grad_norm": 0.2658380267209616, |
|
"learning_rate": 3.914474083955537e-05, |
|
"loss": 0.5561, |
|
"step": 191 |
|
}, |
|
{ |
|
"epoch": 0.34688346883468835, |
|
"grad_norm": 0.2767810249999182, |
|
"learning_rate": 3.902113599237911e-05, |
|
"loss": 0.5637, |
|
"step": 192 |
|
}, |
|
{ |
|
"epoch": 0.3486901535682023, |
|
"grad_norm": 0.2686248952050933, |
|
"learning_rate": 3.8897029076622116e-05, |
|
"loss": 0.5648, |
|
"step": 193 |
|
}, |
|
{ |
|
"epoch": 0.35049683830171635, |
|
"grad_norm": 0.2671044597386809, |
|
"learning_rate": 3.8772424536302564e-05, |
|
"loss": 0.5668, |
|
"step": 194 |
|
}, |
|
{ |
|
"epoch": 0.3523035230352303, |
|
"grad_norm": 0.28240978604941985, |
|
"learning_rate": 3.8647326833257545e-05, |
|
"loss": 0.5503, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 0.35411020776874436, |
|
"grad_norm": 0.2545745653651051, |
|
"learning_rate": 3.852174044698333e-05, |
|
"loss": 0.564, |
|
"step": 196 |
|
}, |
|
{ |
|
"epoch": 0.35591689250225833, |
|
"grad_norm": 0.2788914079559153, |
|
"learning_rate": 3.8395669874474915e-05, |
|
"loss": 0.563, |
|
"step": 197 |
|
}, |
|
{ |
|
"epoch": 0.35772357723577236, |
|
"grad_norm": 0.27664913837367605, |
|
"learning_rate": 3.826911963006507e-05, |
|
"loss": 0.5397, |
|
"step": 198 |
|
}, |
|
{ |
|
"epoch": 0.35953026196928634, |
|
"grad_norm": 0.2854592929422416, |
|
"learning_rate": 3.814209424526262e-05, |
|
"loss": 0.5646, |
|
"step": 199 |
|
}, |
|
{ |
|
"epoch": 0.36133694670280037, |
|
"grad_norm": 0.31461524287063203, |
|
"learning_rate": 3.801459826859022e-05, |
|
"loss": 0.5521, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.36314363143631434, |
|
"grad_norm": 0.2571618033810251, |
|
"learning_rate": 3.788663626542146e-05, |
|
"loss": 0.547, |
|
"step": 201 |
|
}, |
|
{ |
|
"epoch": 0.36495031616982837, |
|
"grad_norm": 0.2698316727635178, |
|
"learning_rate": 3.7758212817817405e-05, |
|
"loss": 0.5735, |
|
"step": 202 |
|
}, |
|
{ |
|
"epoch": 0.36675700090334235, |
|
"grad_norm": 0.28386625197611576, |
|
"learning_rate": 3.762933252436253e-05, |
|
"loss": 0.5607, |
|
"step": 203 |
|
}, |
|
{ |
|
"epoch": 0.3685636856368564, |
|
"grad_norm": 0.23910352580001357, |
|
"learning_rate": 3.7500000000000003e-05, |
|
"loss": 0.5729, |
|
"step": 204 |
|
}, |
|
{ |
|
"epoch": 0.37037037037037035, |
|
"grad_norm": 0.25373354611118293, |
|
"learning_rate": 3.73702198758665e-05, |
|
"loss": 0.5636, |
|
"step": 205 |
|
}, |
|
{ |
|
"epoch": 0.3721770551038844, |
|
"grad_norm": 0.26220155901200526, |
|
"learning_rate": 3.7239996799126314e-05, |
|
"loss": 0.5511, |
|
"step": 206 |
|
}, |
|
{ |
|
"epoch": 0.37398373983739835, |
|
"grad_norm": 0.2703230132182625, |
|
"learning_rate": 3.7109335432805006e-05, |
|
"loss": 0.559, |
|
"step": 207 |
|
}, |
|
{ |
|
"epoch": 0.3757904245709124, |
|
"grad_norm": 0.25300776840046674, |
|
"learning_rate": 3.697824045562238e-05, |
|
"loss": 0.5467, |
|
"step": 208 |
|
}, |
|
{ |
|
"epoch": 0.37759710930442636, |
|
"grad_norm": 0.31024727576581884, |
|
"learning_rate": 3.6846716561824965e-05, |
|
"loss": 0.5681, |
|
"step": 209 |
|
}, |
|
{ |
|
"epoch": 0.3794037940379404, |
|
"grad_norm": 0.2547242302465973, |
|
"learning_rate": 3.6714768461017965e-05, |
|
"loss": 0.5641, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.38121047877145436, |
|
"grad_norm": 0.2832537591345291, |
|
"learning_rate": 3.6582400877996546e-05, |
|
"loss": 0.5631, |
|
"step": 211 |
|
}, |
|
{ |
|
"epoch": 0.3830171635049684, |
|
"grad_norm": 0.23740735296590512, |
|
"learning_rate": 3.6449618552576695e-05, |
|
"loss": 0.5705, |
|
"step": 212 |
|
}, |
|
{ |
|
"epoch": 0.38482384823848237, |
|
"grad_norm": 0.25138099601672315, |
|
"learning_rate": 3.6316426239425485e-05, |
|
"loss": 0.5712, |
|
"step": 213 |
|
}, |
|
{ |
|
"epoch": 0.3866305329719964, |
|
"grad_norm": 0.2446936529822529, |
|
"learning_rate": 3.6182828707890816e-05, |
|
"loss": 0.5574, |
|
"step": 214 |
|
}, |
|
{ |
|
"epoch": 0.3884372177055104, |
|
"grad_norm": 0.28278554244015486, |
|
"learning_rate": 3.604883074183068e-05, |
|
"loss": 0.5848, |
|
"step": 215 |
|
}, |
|
{ |
|
"epoch": 0.3902439024390244, |
|
"grad_norm": 0.2868056773879424, |
|
"learning_rate": 3.591443713944175e-05, |
|
"loss": 0.5728, |
|
"step": 216 |
|
}, |
|
{ |
|
"epoch": 0.3920505871725384, |
|
"grad_norm": 0.23733474164783092, |
|
"learning_rate": 3.577965271308771e-05, |
|
"loss": 0.5734, |
|
"step": 217 |
|
}, |
|
{ |
|
"epoch": 0.3938572719060524, |
|
"grad_norm": 0.2418348363911958, |
|
"learning_rate": 3.564448228912682e-05, |
|
"loss": 0.5644, |
|
"step": 218 |
|
}, |
|
{ |
|
"epoch": 0.3956639566395664, |
|
"grad_norm": 0.24258866663076342, |
|
"learning_rate": 3.550893070773914e-05, |
|
"loss": 0.5597, |
|
"step": 219 |
|
}, |
|
{ |
|
"epoch": 0.3974706413730804, |
|
"grad_norm": 0.23248379507210618, |
|
"learning_rate": 3.5373002822753216e-05, |
|
"loss": 0.5619, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.3992773261065944, |
|
"grad_norm": 0.23221387790837575, |
|
"learning_rate": 3.5236703501472266e-05, |
|
"loss": 0.551, |
|
"step": 221 |
|
}, |
|
{ |
|
"epoch": 0.4010840108401084, |
|
"grad_norm": 0.27427489494087726, |
|
"learning_rate": 3.510003762449988e-05, |
|
"loss": 0.5685, |
|
"step": 222 |
|
}, |
|
{ |
|
"epoch": 0.4028906955736224, |
|
"grad_norm": 0.25685297943543806, |
|
"learning_rate": 3.496301008556529e-05, |
|
"loss": 0.5508, |
|
"step": 223 |
|
}, |
|
{ |
|
"epoch": 0.4046973803071364, |
|
"grad_norm": 0.23751224178133434, |
|
"learning_rate": 3.4825625791348096e-05, |
|
"loss": 0.5553, |
|
"step": 224 |
|
}, |
|
{ |
|
"epoch": 0.4065040650406504, |
|
"grad_norm": 0.2533622057517779, |
|
"learning_rate": 3.4687889661302576e-05, |
|
"loss": 0.5616, |
|
"step": 225 |
|
}, |
|
{ |
|
"epoch": 0.4083107497741644, |
|
"grad_norm": 0.25908397166669356, |
|
"learning_rate": 3.454980662748156e-05, |
|
"loss": 0.5659, |
|
"step": 226 |
|
}, |
|
{ |
|
"epoch": 0.4101174345076784, |
|
"grad_norm": 0.25053206427350594, |
|
"learning_rate": 3.44113816343598e-05, |
|
"loss": 0.5701, |
|
"step": 227 |
|
}, |
|
{ |
|
"epoch": 0.41192411924119243, |
|
"grad_norm": 0.2371124422311737, |
|
"learning_rate": 3.427261963865691e-05, |
|
"loss": 0.5567, |
|
"step": 228 |
|
}, |
|
{ |
|
"epoch": 0.4137308039747064, |
|
"grad_norm": 0.26537475430824775, |
|
"learning_rate": 3.413352560915988e-05, |
|
"loss": 0.5594, |
|
"step": 229 |
|
}, |
|
{ |
|
"epoch": 0.41553748870822044, |
|
"grad_norm": 0.24213575293916792, |
|
"learning_rate": 3.399410452654518e-05, |
|
"loss": 0.5612, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 0.4173441734417344, |
|
"grad_norm": 0.24275309716779916, |
|
"learning_rate": 3.3854361383200374e-05, |
|
"loss": 0.561, |
|
"step": 231 |
|
}, |
|
{ |
|
"epoch": 0.41915085817524844, |
|
"grad_norm": 0.23843095168601489, |
|
"learning_rate": 3.3714301183045385e-05, |
|
"loss": 0.5631, |
|
"step": 232 |
|
}, |
|
{ |
|
"epoch": 0.4209575429087624, |
|
"grad_norm": 0.21791224002383097, |
|
"learning_rate": 3.357392894135329e-05, |
|
"loss": 0.5457, |
|
"step": 233 |
|
}, |
|
{ |
|
"epoch": 0.42276422764227645, |
|
"grad_norm": 0.23785549929021552, |
|
"learning_rate": 3.343324968457076e-05, |
|
"loss": 0.5608, |
|
"step": 234 |
|
}, |
|
{ |
|
"epoch": 0.4245709123757904, |
|
"grad_norm": 0.22396755626120254, |
|
"learning_rate": 3.329226845013802e-05, |
|
"loss": 0.5558, |
|
"step": 235 |
|
}, |
|
{ |
|
"epoch": 0.42637759710930445, |
|
"grad_norm": 0.22217117995010718, |
|
"learning_rate": 3.315099028630855e-05, |
|
"loss": 0.5669, |
|
"step": 236 |
|
}, |
|
{ |
|
"epoch": 0.4281842818428184, |
|
"grad_norm": 0.2368068176727353, |
|
"learning_rate": 3.3009420251968244e-05, |
|
"loss": 0.5676, |
|
"step": 237 |
|
}, |
|
{ |
|
"epoch": 0.42999096657633246, |
|
"grad_norm": 0.2913166749962225, |
|
"learning_rate": 3.28675634164543e-05, |
|
"loss": 0.5757, |
|
"step": 238 |
|
}, |
|
{ |
|
"epoch": 0.43179765130984643, |
|
"grad_norm": 0.2326443530840128, |
|
"learning_rate": 3.272542485937369e-05, |
|
"loss": 0.5562, |
|
"step": 239 |
|
}, |
|
{ |
|
"epoch": 0.43360433604336046, |
|
"grad_norm": 0.21709853787458164, |
|
"learning_rate": 3.258300967042125e-05, |
|
"loss": 0.5566, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.43541102077687444, |
|
"grad_norm": 0.24943018423195573, |
|
"learning_rate": 3.244032294919747e-05, |
|
"loss": 0.5558, |
|
"step": 241 |
|
}, |
|
{ |
|
"epoch": 0.4372177055103884, |
|
"grad_norm": 0.2088255652107171, |
|
"learning_rate": 3.229736980502584e-05, |
|
"loss": 0.5561, |
|
"step": 242 |
|
}, |
|
{ |
|
"epoch": 0.43902439024390244, |
|
"grad_norm": 0.2753534476789369, |
|
"learning_rate": 3.215415535676992e-05, |
|
"loss": 0.561, |
|
"step": 243 |
|
}, |
|
{ |
|
"epoch": 0.4408310749774164, |
|
"grad_norm": 0.21949470480639668, |
|
"learning_rate": 3.201068473265007e-05, |
|
"loss": 0.5681, |
|
"step": 244 |
|
}, |
|
{ |
|
"epoch": 0.44263775971093045, |
|
"grad_norm": 0.26944393055258614, |
|
"learning_rate": 3.186696307005976e-05, |
|
"loss": 0.5591, |
|
"step": 245 |
|
}, |
|
{ |
|
"epoch": 0.4444444444444444, |
|
"grad_norm": 0.2191119214759813, |
|
"learning_rate": 3.172299551538164e-05, |
|
"loss": 0.566, |
|
"step": 246 |
|
}, |
|
{ |
|
"epoch": 0.44625112917795845, |
|
"grad_norm": 0.23598250909414767, |
|
"learning_rate": 3.15787872238033e-05, |
|
"loss": 0.5474, |
|
"step": 247 |
|
}, |
|
{ |
|
"epoch": 0.4480578139114724, |
|
"grad_norm": 0.2244716812824527, |
|
"learning_rate": 3.143434335913256e-05, |
|
"loss": 0.5665, |
|
"step": 248 |
|
}, |
|
{ |
|
"epoch": 0.44986449864498645, |
|
"grad_norm": 0.2605217389849008, |
|
"learning_rate": 3.1289669093612714e-05, |
|
"loss": 0.55, |
|
"step": 249 |
|
}, |
|
{ |
|
"epoch": 0.45167118337850043, |
|
"grad_norm": 0.23565788514397004, |
|
"learning_rate": 3.1144769607737204e-05, |
|
"loss": 0.5648, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.45347786811201446, |
|
"grad_norm": 0.2520253908292615, |
|
"learning_rate": 3.099965009006415e-05, |
|
"loss": 0.5515, |
|
"step": 251 |
|
}, |
|
{ |
|
"epoch": 0.45528455284552843, |
|
"grad_norm": 0.23382294482671243, |
|
"learning_rate": 3.0854315737030596e-05, |
|
"loss": 0.5343, |
|
"step": 252 |
|
}, |
|
{ |
|
"epoch": 0.45709123757904246, |
|
"grad_norm": 0.21156862362731088, |
|
"learning_rate": 3.0708771752766394e-05, |
|
"loss": 0.5566, |
|
"step": 253 |
|
}, |
|
{ |
|
"epoch": 0.45889792231255644, |
|
"grad_norm": 0.2858583262540506, |
|
"learning_rate": 3.056302334890786e-05, |
|
"loss": 0.5562, |
|
"step": 254 |
|
}, |
|
{ |
|
"epoch": 0.46070460704607047, |
|
"grad_norm": 0.21446015297714893, |
|
"learning_rate": 3.0417075744411178e-05, |
|
"loss": 0.5491, |
|
"step": 255 |
|
}, |
|
{ |
|
"epoch": 0.46251129177958444, |
|
"grad_norm": 0.27175171429400885, |
|
"learning_rate": 3.0270934165365478e-05, |
|
"loss": 0.549, |
|
"step": 256 |
|
}, |
|
{ |
|
"epoch": 0.4643179765130985, |
|
"grad_norm": 0.2332022756162156, |
|
"learning_rate": 3.0124603844805767e-05, |
|
"loss": 0.5615, |
|
"step": 257 |
|
}, |
|
{ |
|
"epoch": 0.46612466124661245, |
|
"grad_norm": 0.25726612906200885, |
|
"learning_rate": 2.997809002252546e-05, |
|
"loss": 0.5545, |
|
"step": 258 |
|
}, |
|
{ |
|
"epoch": 0.4679313459801265, |
|
"grad_norm": 0.26470532087807114, |
|
"learning_rate": 2.9831397944888833e-05, |
|
"loss": 0.5586, |
|
"step": 259 |
|
}, |
|
{ |
|
"epoch": 0.46973803071364045, |
|
"grad_norm": 0.244315447384743, |
|
"learning_rate": 2.9684532864643122e-05, |
|
"loss": 0.5719, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 0.4715447154471545, |
|
"grad_norm": 0.2579290960374767, |
|
"learning_rate": 2.953750004073041e-05, |
|
"loss": 0.5625, |
|
"step": 261 |
|
}, |
|
{ |
|
"epoch": 0.47335140018066846, |
|
"grad_norm": 0.2502355307959324, |
|
"learning_rate": 2.9390304738099384e-05, |
|
"loss": 0.5508, |
|
"step": 262 |
|
}, |
|
{ |
|
"epoch": 0.4751580849141825, |
|
"grad_norm": 0.22745648145569985, |
|
"learning_rate": 2.9242952227516722e-05, |
|
"loss": 0.5558, |
|
"step": 263 |
|
}, |
|
{ |
|
"epoch": 0.47696476964769646, |
|
"grad_norm": 0.2880551827736454, |
|
"learning_rate": 2.9095447785378443e-05, |
|
"loss": 0.5615, |
|
"step": 264 |
|
}, |
|
{ |
|
"epoch": 0.4787714543812105, |
|
"grad_norm": 0.21091165839321852, |
|
"learning_rate": 2.89477966935209e-05, |
|
"loss": 0.543, |
|
"step": 265 |
|
}, |
|
{ |
|
"epoch": 0.48057813911472447, |
|
"grad_norm": 0.3146456092706863, |
|
"learning_rate": 2.8800004239031684e-05, |
|
"loss": 0.5676, |
|
"step": 266 |
|
}, |
|
{ |
|
"epoch": 0.4823848238482385, |
|
"grad_norm": 0.2187643898145053, |
|
"learning_rate": 2.8652075714060295e-05, |
|
"loss": 0.552, |
|
"step": 267 |
|
}, |
|
{ |
|
"epoch": 0.48419150858175247, |
|
"grad_norm": 0.25789704379288414, |
|
"learning_rate": 2.850401641562865e-05, |
|
"loss": 0.5511, |
|
"step": 268 |
|
}, |
|
{ |
|
"epoch": 0.4859981933152665, |
|
"grad_norm": 0.23735131054824035, |
|
"learning_rate": 2.8355831645441388e-05, |
|
"loss": 0.5593, |
|
"step": 269 |
|
}, |
|
{ |
|
"epoch": 0.4878048780487805, |
|
"grad_norm": 0.22178545968987512, |
|
"learning_rate": 2.8207526709696057e-05, |
|
"loss": 0.54, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 0.4896115627822945, |
|
"grad_norm": 0.2444983259488006, |
|
"learning_rate": 2.8059106918893068e-05, |
|
"loss": 0.5618, |
|
"step": 271 |
|
}, |
|
{ |
|
"epoch": 0.4914182475158085, |
|
"grad_norm": 0.23074078288504987, |
|
"learning_rate": 2.791057758764557e-05, |
|
"loss": 0.5561, |
|
"step": 272 |
|
}, |
|
{ |
|
"epoch": 0.4932249322493225, |
|
"grad_norm": 0.2122648915326668, |
|
"learning_rate": 2.7761944034489152e-05, |
|
"loss": 0.564, |
|
"step": 273 |
|
}, |
|
{ |
|
"epoch": 0.4950316169828365, |
|
"grad_norm": 0.21492258086562654, |
|
"learning_rate": 2.761321158169134e-05, |
|
"loss": 0.5517, |
|
"step": 274 |
|
}, |
|
{ |
|
"epoch": 0.4968383017163505, |
|
"grad_norm": 0.27472351877781365, |
|
"learning_rate": 2.746438555506109e-05, |
|
"loss": 0.5462, |
|
"step": 275 |
|
}, |
|
{ |
|
"epoch": 0.4986449864498645, |
|
"grad_norm": 0.21428421421146693, |
|
"learning_rate": 2.7315471283758037e-05, |
|
"loss": 0.5378, |
|
"step": 276 |
|
}, |
|
{ |
|
"epoch": 0.5004516711833785, |
|
"grad_norm": 0.22502606248228424, |
|
"learning_rate": 2.7166474100101673e-05, |
|
"loss": 0.5497, |
|
"step": 277 |
|
}, |
|
{ |
|
"epoch": 0.5022583559168925, |
|
"grad_norm": 0.22837843189987042, |
|
"learning_rate": 2.7017399339380434e-05, |
|
"loss": 0.5371, |
|
"step": 278 |
|
}, |
|
{ |
|
"epoch": 0.5040650406504065, |
|
"grad_norm": 0.24309416552693688, |
|
"learning_rate": 2.686825233966061e-05, |
|
"loss": 0.5521, |
|
"step": 279 |
|
}, |
|
{ |
|
"epoch": 0.5058717253839206, |
|
"grad_norm": 0.21900766697042293, |
|
"learning_rate": 2.6719038441595233e-05, |
|
"loss": 0.5481, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 0.5076784101174345, |
|
"grad_norm": 0.22388193391843347, |
|
"learning_rate": 2.656976298823284e-05, |
|
"loss": 0.5491, |
|
"step": 281 |
|
}, |
|
{ |
|
"epoch": 0.5094850948509485, |
|
"grad_norm": 0.22518528793598785, |
|
"learning_rate": 2.6420431324826117e-05, |
|
"loss": 0.5437, |
|
"step": 282 |
|
}, |
|
{ |
|
"epoch": 0.5112917795844625, |
|
"grad_norm": 0.22899910230756002, |
|
"learning_rate": 2.6271048798640547e-05, |
|
"loss": 0.5585, |
|
"step": 283 |
|
}, |
|
{ |
|
"epoch": 0.5130984643179766, |
|
"grad_norm": 0.23580513860894517, |
|
"learning_rate": 2.6121620758762877e-05, |
|
"loss": 0.5466, |
|
"step": 284 |
|
}, |
|
{ |
|
"epoch": 0.5149051490514905, |
|
"grad_norm": 0.2424173649022125, |
|
"learning_rate": 2.5972152555909625e-05, |
|
"loss": 0.542, |
|
"step": 285 |
|
}, |
|
{ |
|
"epoch": 0.5167118337850045, |
|
"grad_norm": 0.21694029342367355, |
|
"learning_rate": 2.5822649542235466e-05, |
|
"loss": 0.5386, |
|
"step": 286 |
|
}, |
|
{ |
|
"epoch": 0.5185185185185185, |
|
"grad_norm": 0.25827875560858626, |
|
"learning_rate": 2.5673117071141572e-05, |
|
"loss": 0.5601, |
|
"step": 287 |
|
}, |
|
{ |
|
"epoch": 0.5203252032520326, |
|
"grad_norm": 0.23262800077998166, |
|
"learning_rate": 2.5523560497083926e-05, |
|
"loss": 0.5575, |
|
"step": 288 |
|
}, |
|
{ |
|
"epoch": 0.5221318879855466, |
|
"grad_norm": 0.2761966922116131, |
|
"learning_rate": 2.5373985175381594e-05, |
|
"loss": 0.5418, |
|
"step": 289 |
|
}, |
|
{ |
|
"epoch": 0.5239385727190605, |
|
"grad_norm": 0.26156166020108806, |
|
"learning_rate": 2.5224396462024947e-05, |
|
"loss": 0.5539, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 0.5257452574525745, |
|
"grad_norm": 0.21779581948691018, |
|
"learning_rate": 2.507479971348391e-05, |
|
"loss": 0.5581, |
|
"step": 291 |
|
}, |
|
{ |
|
"epoch": 0.5275519421860885, |
|
"grad_norm": 0.2377723759423303, |
|
"learning_rate": 2.4925200286516097e-05, |
|
"loss": 0.5621, |
|
"step": 292 |
|
}, |
|
{ |
|
"epoch": 0.5293586269196026, |
|
"grad_norm": 0.25645028358412414, |
|
"learning_rate": 2.4775603537975052e-05, |
|
"loss": 0.5545, |
|
"step": 293 |
|
}, |
|
{ |
|
"epoch": 0.5311653116531165, |
|
"grad_norm": 0.23027125665344333, |
|
"learning_rate": 2.4626014824618415e-05, |
|
"loss": 0.5466, |
|
"step": 294 |
|
}, |
|
{ |
|
"epoch": 0.5329719963866305, |
|
"grad_norm": 0.21506264402370667, |
|
"learning_rate": 2.447643950291608e-05, |
|
"loss": 0.5444, |
|
"step": 295 |
|
}, |
|
{ |
|
"epoch": 0.5347786811201445, |
|
"grad_norm": 0.23689584066775435, |
|
"learning_rate": 2.4326882928858434e-05, |
|
"loss": 0.5486, |
|
"step": 296 |
|
}, |
|
{ |
|
"epoch": 0.5365853658536586, |
|
"grad_norm": 0.24755488327450098, |
|
"learning_rate": 2.417735045776453e-05, |
|
"loss": 0.553, |
|
"step": 297 |
|
}, |
|
{ |
|
"epoch": 0.5383920505871725, |
|
"grad_norm": 0.23203698871403608, |
|
"learning_rate": 2.402784744409038e-05, |
|
"loss": 0.5484, |
|
"step": 298 |
|
}, |
|
{ |
|
"epoch": 0.5401987353206865, |
|
"grad_norm": 0.24571656282529886, |
|
"learning_rate": 2.3878379241237136e-05, |
|
"loss": 0.5632, |
|
"step": 299 |
|
}, |
|
{ |
|
"epoch": 0.5420054200542005, |
|
"grad_norm": 0.20706812965056257, |
|
"learning_rate": 2.372895120135946e-05, |
|
"loss": 0.5513, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.5438121047877146, |
|
"grad_norm": 0.2232064747129914, |
|
"learning_rate": 2.3579568675173895e-05, |
|
"loss": 0.5538, |
|
"step": 301 |
|
}, |
|
{ |
|
"epoch": 0.5456187895212286, |
|
"grad_norm": 0.22016741676196278, |
|
"learning_rate": 2.3430237011767167e-05, |
|
"loss": 0.5652, |
|
"step": 302 |
|
}, |
|
{ |
|
"epoch": 0.5474254742547425, |
|
"grad_norm": 0.2235020612958559, |
|
"learning_rate": 2.3280961558404773e-05, |
|
"loss": 0.5464, |
|
"step": 303 |
|
}, |
|
{ |
|
"epoch": 0.5492321589882565, |
|
"grad_norm": 0.22311333060737396, |
|
"learning_rate": 2.3131747660339394e-05, |
|
"loss": 0.5413, |
|
"step": 304 |
|
}, |
|
{ |
|
"epoch": 0.5510388437217706, |
|
"grad_norm": 0.22680586256234805, |
|
"learning_rate": 2.2982600660619572e-05, |
|
"loss": 0.5485, |
|
"step": 305 |
|
}, |
|
{ |
|
"epoch": 0.5528455284552846, |
|
"grad_norm": 0.21059821879953544, |
|
"learning_rate": 2.2833525899898326e-05, |
|
"loss": 0.5459, |
|
"step": 306 |
|
}, |
|
{ |
|
"epoch": 0.5546522131887985, |
|
"grad_norm": 0.20743952304888144, |
|
"learning_rate": 2.268452871624197e-05, |
|
"loss": 0.5626, |
|
"step": 307 |
|
}, |
|
{ |
|
"epoch": 0.5564588979223125, |
|
"grad_norm": 0.21066384195448498, |
|
"learning_rate": 2.2535614444938912e-05, |
|
"loss": 0.5422, |
|
"step": 308 |
|
}, |
|
{ |
|
"epoch": 0.5582655826558266, |
|
"grad_norm": 0.2161934609194102, |
|
"learning_rate": 2.238678841830867e-05, |
|
"loss": 0.552, |
|
"step": 309 |
|
}, |
|
{ |
|
"epoch": 0.5600722673893406, |
|
"grad_norm": 0.1908970151997147, |
|
"learning_rate": 2.223805596551085e-05, |
|
"loss": 0.5339, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 0.5618789521228545, |
|
"grad_norm": 0.2116878435572703, |
|
"learning_rate": 2.2089422412354432e-05, |
|
"loss": 0.553, |
|
"step": 311 |
|
}, |
|
{ |
|
"epoch": 0.5636856368563685, |
|
"grad_norm": 0.6821216144361701, |
|
"learning_rate": 2.1940893081106945e-05, |
|
"loss": 0.5522, |
|
"step": 312 |
|
}, |
|
{ |
|
"epoch": 0.5654923215898826, |
|
"grad_norm": 0.19933075280256027, |
|
"learning_rate": 2.1792473290303946e-05, |
|
"loss": 0.5527, |
|
"step": 313 |
|
}, |
|
{ |
|
"epoch": 0.5672990063233966, |
|
"grad_norm": 0.20221512416983153, |
|
"learning_rate": 2.164416835455862e-05, |
|
"loss": 0.5461, |
|
"step": 314 |
|
}, |
|
{ |
|
"epoch": 0.5691056910569106, |
|
"grad_norm": 0.2068054666438288, |
|
"learning_rate": 2.1495983584371353e-05, |
|
"loss": 0.5466, |
|
"step": 315 |
|
}, |
|
{ |
|
"epoch": 0.5709123757904245, |
|
"grad_norm": 0.2134121906837669, |
|
"learning_rate": 2.1347924285939714e-05, |
|
"loss": 0.5422, |
|
"step": 316 |
|
}, |
|
{ |
|
"epoch": 0.5727190605239386, |
|
"grad_norm": 0.20455764217338732, |
|
"learning_rate": 2.119999576096832e-05, |
|
"loss": 0.5382, |
|
"step": 317 |
|
}, |
|
{ |
|
"epoch": 0.5745257452574526, |
|
"grad_norm": 0.199331906522498, |
|
"learning_rate": 2.1052203306479105e-05, |
|
"loss": 0.5501, |
|
"step": 318 |
|
}, |
|
{ |
|
"epoch": 0.5763324299909666, |
|
"grad_norm": 0.2412560852605461, |
|
"learning_rate": 2.090455221462156e-05, |
|
"loss": 0.5603, |
|
"step": 319 |
|
}, |
|
{ |
|
"epoch": 0.5781391147244805, |
|
"grad_norm": 0.21519069134067864, |
|
"learning_rate": 2.075704777248328e-05, |
|
"loss": 0.5418, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 0.5799457994579946, |
|
"grad_norm": 0.228727628427062, |
|
"learning_rate": 2.0609695261900622e-05, |
|
"loss": 0.5394, |
|
"step": 321 |
|
}, |
|
{ |
|
"epoch": 0.5817524841915086, |
|
"grad_norm": 0.2176156146729185, |
|
"learning_rate": 2.0462499959269593e-05, |
|
"loss": 0.56, |
|
"step": 322 |
|
}, |
|
{ |
|
"epoch": 0.5835591689250226, |
|
"grad_norm": 0.21488586345042776, |
|
"learning_rate": 2.031546713535688e-05, |
|
"loss": 0.5466, |
|
"step": 323 |
|
}, |
|
{ |
|
"epoch": 0.5853658536585366, |
|
"grad_norm": 0.22926197483380642, |
|
"learning_rate": 2.0168602055111173e-05, |
|
"loss": 0.5274, |
|
"step": 324 |
|
}, |
|
{ |
|
"epoch": 0.5871725383920506, |
|
"grad_norm": 0.2229955712111904, |
|
"learning_rate": 2.002190997747455e-05, |
|
"loss": 0.5448, |
|
"step": 325 |
|
}, |
|
{ |
|
"epoch": 0.5889792231255646, |
|
"grad_norm": 0.21531963162859075, |
|
"learning_rate": 1.9875396155194242e-05, |
|
"loss": 0.5544, |
|
"step": 326 |
|
}, |
|
{ |
|
"epoch": 0.5907859078590786, |
|
"grad_norm": 0.20885719552228893, |
|
"learning_rate": 1.972906583463453e-05, |
|
"loss": 0.5404, |
|
"step": 327 |
|
}, |
|
{ |
|
"epoch": 0.5925925925925926, |
|
"grad_norm": 0.261458475205465, |
|
"learning_rate": 1.9582924255588828e-05, |
|
"loss": 0.5472, |
|
"step": 328 |
|
}, |
|
{ |
|
"epoch": 0.5943992773261066, |
|
"grad_norm": 0.2103996141488197, |
|
"learning_rate": 1.9436976651092144e-05, |
|
"loss": 0.5368, |
|
"step": 329 |
|
}, |
|
{ |
|
"epoch": 0.5962059620596206, |
|
"grad_norm": 0.21748833338313744, |
|
"learning_rate": 1.9291228247233605e-05, |
|
"loss": 0.5443, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 0.5980126467931346, |
|
"grad_norm": 0.2520334978409223, |
|
"learning_rate": 1.9145684262969403e-05, |
|
"loss": 0.5477, |
|
"step": 331 |
|
}, |
|
{ |
|
"epoch": 0.5998193315266486, |
|
"grad_norm": 0.1957961791185171, |
|
"learning_rate": 1.9000349909935853e-05, |
|
"loss": 0.5315, |
|
"step": 332 |
|
}, |
|
{ |
|
"epoch": 0.6016260162601627, |
|
"grad_norm": 0.20783779398827862, |
|
"learning_rate": 1.885523039226281e-05, |
|
"loss": 0.5521, |
|
"step": 333 |
|
}, |
|
{ |
|
"epoch": 0.6034327009936766, |
|
"grad_norm": 0.20898559191833543, |
|
"learning_rate": 1.871033090638729e-05, |
|
"loss": 0.5642, |
|
"step": 334 |
|
}, |
|
{ |
|
"epoch": 0.6052393857271906, |
|
"grad_norm": 0.1873586938961715, |
|
"learning_rate": 1.8565656640867446e-05, |
|
"loss": 0.5479, |
|
"step": 335 |
|
}, |
|
{ |
|
"epoch": 0.6070460704607046, |
|
"grad_norm": 0.2255988844983079, |
|
"learning_rate": 1.8421212776196712e-05, |
|
"loss": 0.5442, |
|
"step": 336 |
|
}, |
|
{ |
|
"epoch": 0.6088527551942186, |
|
"grad_norm": 0.2132867422782542, |
|
"learning_rate": 1.827700448461836e-05, |
|
"loss": 0.5455, |
|
"step": 337 |
|
}, |
|
{ |
|
"epoch": 0.6106594399277326, |
|
"grad_norm": 0.2013497795480394, |
|
"learning_rate": 1.813303692994025e-05, |
|
"loss": 0.5418, |
|
"step": 338 |
|
}, |
|
{ |
|
"epoch": 0.6124661246612466, |
|
"grad_norm": 0.19449789688874272, |
|
"learning_rate": 1.7989315267349936e-05, |
|
"loss": 0.5503, |
|
"step": 339 |
|
}, |
|
{ |
|
"epoch": 0.6142728093947606, |
|
"grad_norm": 0.20623159680652037, |
|
"learning_rate": 1.7845844643230086e-05, |
|
"loss": 0.5452, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 0.6160794941282746, |
|
"grad_norm": 0.20573869402653808, |
|
"learning_rate": 1.7702630194974168e-05, |
|
"loss": 0.5445, |
|
"step": 341 |
|
}, |
|
{ |
|
"epoch": 0.6178861788617886, |
|
"grad_norm": 0.1964685647724217, |
|
"learning_rate": 1.7559677050802544e-05, |
|
"loss": 0.5466, |
|
"step": 342 |
|
}, |
|
{ |
|
"epoch": 0.6196928635953026, |
|
"grad_norm": 0.20315885913212622, |
|
"learning_rate": 1.7416990329578753e-05, |
|
"loss": 0.5415, |
|
"step": 343 |
|
}, |
|
{ |
|
"epoch": 0.6214995483288166, |
|
"grad_norm": 0.18101549711166035, |
|
"learning_rate": 1.7274575140626318e-05, |
|
"loss": 0.5307, |
|
"step": 344 |
|
}, |
|
{ |
|
"epoch": 0.6233062330623306, |
|
"grad_norm": 0.1994104297590127, |
|
"learning_rate": 1.71324365835457e-05, |
|
"loss": 0.5444, |
|
"step": 345 |
|
}, |
|
{ |
|
"epoch": 0.6251129177958447, |
|
"grad_norm": 0.19723145686498575, |
|
"learning_rate": 1.699057974803176e-05, |
|
"loss": 0.5467, |
|
"step": 346 |
|
}, |
|
{ |
|
"epoch": 0.6269196025293586, |
|
"grad_norm": 0.1850077678736798, |
|
"learning_rate": 1.6849009713691454e-05, |
|
"loss": 0.5323, |
|
"step": 347 |
|
}, |
|
{ |
|
"epoch": 0.6287262872628726, |
|
"grad_norm": 0.276502296878791, |
|
"learning_rate": 1.670773154986199e-05, |
|
"loss": 0.5335, |
|
"step": 348 |
|
}, |
|
{ |
|
"epoch": 0.6305329719963866, |
|
"grad_norm": 0.19317461968464228, |
|
"learning_rate": 1.6566750315429254e-05, |
|
"loss": 0.5375, |
|
"step": 349 |
|
}, |
|
{ |
|
"epoch": 0.6323396567299007, |
|
"grad_norm": 0.19910869280957114, |
|
"learning_rate": 1.6426071058646717e-05, |
|
"loss": 0.549, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 0.6341463414634146, |
|
"grad_norm": 0.18690184695616166, |
|
"learning_rate": 1.6285698816954624e-05, |
|
"loss": 0.5381, |
|
"step": 351 |
|
}, |
|
{ |
|
"epoch": 0.6359530261969286, |
|
"grad_norm": 0.18174973617298104, |
|
"learning_rate": 1.6145638616799635e-05, |
|
"loss": 0.5521, |
|
"step": 352 |
|
}, |
|
{ |
|
"epoch": 0.6377597109304426, |
|
"grad_norm": 0.19064784919043637, |
|
"learning_rate": 1.6005895473454834e-05, |
|
"loss": 0.5525, |
|
"step": 353 |
|
}, |
|
{ |
|
"epoch": 0.6395663956639567, |
|
"grad_norm": 0.18767396978783557, |
|
"learning_rate": 1.5866474390840125e-05, |
|
"loss": 0.5474, |
|
"step": 354 |
|
}, |
|
{ |
|
"epoch": 0.6413730803974707, |
|
"grad_norm": 0.1954432958053184, |
|
"learning_rate": 1.5727380361343103e-05, |
|
"loss": 0.5362, |
|
"step": 355 |
|
}, |
|
{ |
|
"epoch": 0.6431797651309846, |
|
"grad_norm": 0.17705406426867062, |
|
"learning_rate": 1.55886183656402e-05, |
|
"loss": 0.5496, |
|
"step": 356 |
|
}, |
|
{ |
|
"epoch": 0.6449864498644986, |
|
"grad_norm": 0.20060277252597722, |
|
"learning_rate": 1.545019337251844e-05, |
|
"loss": 0.5514, |
|
"step": 357 |
|
}, |
|
{ |
|
"epoch": 0.6467931345980127, |
|
"grad_norm": 0.193669354774866, |
|
"learning_rate": 1.5312110338697426e-05, |
|
"loss": 0.5568, |
|
"step": 358 |
|
}, |
|
{ |
|
"epoch": 0.6485998193315267, |
|
"grad_norm": 0.18018469221264521, |
|
"learning_rate": 1.5174374208651912e-05, |
|
"loss": 0.5306, |
|
"step": 359 |
|
}, |
|
{ |
|
"epoch": 0.6504065040650406, |
|
"grad_norm": 0.1778288207244956, |
|
"learning_rate": 1.503698991443471e-05, |
|
"loss": 0.5392, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 0.6522131887985546, |
|
"grad_norm": 0.2115671504855792, |
|
"learning_rate": 1.4899962375500121e-05, |
|
"loss": 0.544, |
|
"step": 361 |
|
}, |
|
{ |
|
"epoch": 0.6540198735320687, |
|
"grad_norm": 0.18387092791329196, |
|
"learning_rate": 1.4763296498527743e-05, |
|
"loss": 0.5551, |
|
"step": 362 |
|
}, |
|
{ |
|
"epoch": 0.6558265582655827, |
|
"grad_norm": 0.20242860658284711, |
|
"learning_rate": 1.4626997177246787e-05, |
|
"loss": 0.5457, |
|
"step": 363 |
|
}, |
|
{ |
|
"epoch": 0.6576332429990966, |
|
"grad_norm": 0.19345607164662235, |
|
"learning_rate": 1.4491069292260868e-05, |
|
"loss": 0.5458, |
|
"step": 364 |
|
}, |
|
{ |
|
"epoch": 0.6594399277326106, |
|
"grad_norm": 0.186250552017406, |
|
"learning_rate": 1.4355517710873184e-05, |
|
"loss": 0.537, |
|
"step": 365 |
|
}, |
|
{ |
|
"epoch": 0.6612466124661247, |
|
"grad_norm": 0.19349404868910808, |
|
"learning_rate": 1.4220347286912294e-05, |
|
"loss": 0.558, |
|
"step": 366 |
|
}, |
|
{ |
|
"epoch": 0.6630532971996387, |
|
"grad_norm": 0.1805348865094129, |
|
"learning_rate": 1.4085562860558255e-05, |
|
"loss": 0.5395, |
|
"step": 367 |
|
}, |
|
{ |
|
"epoch": 0.6648599819331527, |
|
"grad_norm": 0.18495404781177935, |
|
"learning_rate": 1.3951169258169338e-05, |
|
"loss": 0.551, |
|
"step": 368 |
|
}, |
|
{ |
|
"epoch": 0.6666666666666666, |
|
"grad_norm": 0.18688251287931992, |
|
"learning_rate": 1.3817171292109183e-05, |
|
"loss": 0.5516, |
|
"step": 369 |
|
}, |
|
{ |
|
"epoch": 0.6684733514001807, |
|
"grad_norm": 0.1898547525063398, |
|
"learning_rate": 1.3683573760574526e-05, |
|
"loss": 0.5506, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 0.6702800361336947, |
|
"grad_norm": 0.18239213301354457, |
|
"learning_rate": 1.3550381447423316e-05, |
|
"loss": 0.5381, |
|
"step": 371 |
|
}, |
|
{ |
|
"epoch": 0.6720867208672087, |
|
"grad_norm": 0.17814691094904994, |
|
"learning_rate": 1.3417599122003464e-05, |
|
"loss": 0.5555, |
|
"step": 372 |
|
}, |
|
{ |
|
"epoch": 0.6738934056007226, |
|
"grad_norm": 0.21908295063893554, |
|
"learning_rate": 1.3285231538982034e-05, |
|
"loss": 0.5415, |
|
"step": 373 |
|
}, |
|
{ |
|
"epoch": 0.6757000903342367, |
|
"grad_norm": 0.17230078309311048, |
|
"learning_rate": 1.3153283438175034e-05, |
|
"loss": 0.5317, |
|
"step": 374 |
|
}, |
|
{ |
|
"epoch": 0.6775067750677507, |
|
"grad_norm": 0.18162525160837265, |
|
"learning_rate": 1.3021759544377632e-05, |
|
"loss": 0.5453, |
|
"step": 375 |
|
}, |
|
{ |
|
"epoch": 0.6793134598012647, |
|
"grad_norm": 0.17485786762275105, |
|
"learning_rate": 1.2890664567194998e-05, |
|
"loss": 0.5304, |
|
"step": 376 |
|
}, |
|
{ |
|
"epoch": 0.6811201445347786, |
|
"grad_norm": 0.16794425941753846, |
|
"learning_rate": 1.2760003200873699e-05, |
|
"loss": 0.5464, |
|
"step": 377 |
|
}, |
|
{ |
|
"epoch": 0.6829268292682927, |
|
"grad_norm": 0.18163641320102142, |
|
"learning_rate": 1.2629780124133511e-05, |
|
"loss": 0.5472, |
|
"step": 378 |
|
}, |
|
{ |
|
"epoch": 0.6847335140018067, |
|
"grad_norm": 0.18356921181202607, |
|
"learning_rate": 1.2500000000000006e-05, |
|
"loss": 0.5503, |
|
"step": 379 |
|
}, |
|
{ |
|
"epoch": 0.6865401987353207, |
|
"grad_norm": 0.189587834239473, |
|
"learning_rate": 1.2370667475637473e-05, |
|
"loss": 0.5316, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 0.6883468834688347, |
|
"grad_norm": 0.19057832221881246, |
|
"learning_rate": 1.2241787182182595e-05, |
|
"loss": 0.5473, |
|
"step": 381 |
|
}, |
|
{ |
|
"epoch": 0.6901535682023487, |
|
"grad_norm": 0.1874557710612082, |
|
"learning_rate": 1.2113363734578548e-05, |
|
"loss": 0.5347, |
|
"step": 382 |
|
}, |
|
{ |
|
"epoch": 0.6919602529358627, |
|
"grad_norm": 0.17256058169239688, |
|
"learning_rate": 1.1985401731409792e-05, |
|
"loss": 0.5537, |
|
"step": 383 |
|
}, |
|
{ |
|
"epoch": 0.6937669376693767, |
|
"grad_norm": 0.1871844566367914, |
|
"learning_rate": 1.185790575473738e-05, |
|
"loss": 0.5506, |
|
"step": 384 |
|
}, |
|
{ |
|
"epoch": 0.6955736224028907, |
|
"grad_norm": 0.1894245222651623, |
|
"learning_rate": 1.1730880369934933e-05, |
|
"loss": 0.5338, |
|
"step": 385 |
|
}, |
|
{ |
|
"epoch": 0.6973803071364046, |
|
"grad_norm": 0.18773033629918157, |
|
"learning_rate": 1.1604330125525079e-05, |
|
"loss": 0.5455, |
|
"step": 386 |
|
}, |
|
{ |
|
"epoch": 0.6991869918699187, |
|
"grad_norm": 0.19204257911058253, |
|
"learning_rate": 1.1478259553016682e-05, |
|
"loss": 0.5455, |
|
"step": 387 |
|
}, |
|
{ |
|
"epoch": 0.7009936766034327, |
|
"grad_norm": 0.17625044703357365, |
|
"learning_rate": 1.135267316674246e-05, |
|
"loss": 0.5442, |
|
"step": 388 |
|
}, |
|
{ |
|
"epoch": 0.7028003613369467, |
|
"grad_norm": 0.18034125469253723, |
|
"learning_rate": 1.122757546369744e-05, |
|
"loss": 0.5467, |
|
"step": 389 |
|
}, |
|
{ |
|
"epoch": 0.7046070460704607, |
|
"grad_norm": 0.19225536088509088, |
|
"learning_rate": 1.1102970923377892e-05, |
|
"loss": 0.5418, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 0.7064137308039747, |
|
"grad_norm": 0.19942810091065274, |
|
"learning_rate": 1.0978864007620895e-05, |
|
"loss": 0.5407, |
|
"step": 391 |
|
}, |
|
{ |
|
"epoch": 0.7082204155374887, |
|
"grad_norm": 0.21501138455823338, |
|
"learning_rate": 1.0855259160444639e-05, |
|
"loss": 0.5476, |
|
"step": 392 |
|
}, |
|
{ |
|
"epoch": 0.7100271002710027, |
|
"grad_norm": 0.2884668010582998, |
|
"learning_rate": 1.0732160807889211e-05, |
|
"loss": 0.5519, |
|
"step": 393 |
|
}, |
|
{ |
|
"epoch": 0.7118337850045167, |
|
"grad_norm": 0.17022174290624156, |
|
"learning_rate": 1.0609573357858166e-05, |
|
"loss": 0.5546, |
|
"step": 394 |
|
}, |
|
{ |
|
"epoch": 0.7136404697380307, |
|
"grad_norm": 0.1854412182788549, |
|
"learning_rate": 1.0487501199960662e-05, |
|
"loss": 0.5484, |
|
"step": 395 |
|
}, |
|
{ |
|
"epoch": 0.7154471544715447, |
|
"grad_norm": 0.17931062842447656, |
|
"learning_rate": 1.0365948705354308e-05, |
|
"loss": 0.5432, |
|
"step": 396 |
|
}, |
|
{ |
|
"epoch": 0.7172538392050587, |
|
"grad_norm": 0.18675467992935785, |
|
"learning_rate": 1.0244920226588597e-05, |
|
"loss": 0.5433, |
|
"step": 397 |
|
}, |
|
{ |
|
"epoch": 0.7190605239385727, |
|
"grad_norm": 0.1813756337924599, |
|
"learning_rate": 1.0124420097449078e-05, |
|
"loss": 0.5448, |
|
"step": 398 |
|
}, |
|
{ |
|
"epoch": 0.7208672086720868, |
|
"grad_norm": 0.1827888408060404, |
|
"learning_rate": 1.0004452632802158e-05, |
|
"loss": 0.5467, |
|
"step": 399 |
|
}, |
|
{ |
|
"epoch": 0.7226738934056007, |
|
"grad_norm": 0.193764370666216, |
|
"learning_rate": 9.88502212844063e-06, |
|
"loss": 0.5443, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 0.7244805781391147, |
|
"grad_norm": 0.16545671285046895, |
|
"learning_rate": 9.7661328609298e-06, |
|
"loss": 0.5457, |
|
"step": 401 |
|
}, |
|
{ |
|
"epoch": 0.7262872628726287, |
|
"grad_norm": 0.17759841178915517, |
|
"learning_rate": 9.64778908745437e-06, |
|
"loss": 0.5523, |
|
"step": 402 |
|
}, |
|
{ |
|
"epoch": 0.7280939476061428, |
|
"grad_norm": 0.16977669465620818, |
|
"learning_rate": 9.52999504566604e-06, |
|
"loss": 0.5339, |
|
"step": 403 |
|
}, |
|
{ |
|
"epoch": 0.7299006323396567, |
|
"grad_norm": 0.1697665049131124, |
|
"learning_rate": 9.412754953531663e-06, |
|
"loss": 0.54, |
|
"step": 404 |
|
}, |
|
{ |
|
"epoch": 0.7317073170731707, |
|
"grad_norm": 0.17079386129703159, |
|
"learning_rate": 9.29607300918234e-06, |
|
"loss": 0.5376, |
|
"step": 405 |
|
}, |
|
{ |
|
"epoch": 0.7335140018066847, |
|
"grad_norm": 0.19277525431650847, |
|
"learning_rate": 9.179953390762977e-06, |
|
"loss": 0.5438, |
|
"step": 406 |
|
}, |
|
{ |
|
"epoch": 0.7353206865401988, |
|
"grad_norm": 0.16461495695395428, |
|
"learning_rate": 9.064400256282757e-06, |
|
"loss": 0.5358, |
|
"step": 407 |
|
}, |
|
{ |
|
"epoch": 0.7371273712737128, |
|
"grad_norm": 0.18879440447103327, |
|
"learning_rate": 8.9494177434662e-06, |
|
"loss": 0.5297, |
|
"step": 408 |
|
}, |
|
{ |
|
"epoch": 0.7389340560072267, |
|
"grad_norm": 0.19522621872944385, |
|
"learning_rate": 8.835009969605012e-06, |
|
"loss": 0.5501, |
|
"step": 409 |
|
}, |
|
{ |
|
"epoch": 0.7407407407407407, |
|
"grad_norm": 0.17214238999653064, |
|
"learning_rate": 8.72118103141066e-06, |
|
"loss": 0.5498, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 0.7425474254742548, |
|
"grad_norm": 0.1749110612132328, |
|
"learning_rate": 8.607935004867693e-06, |
|
"loss": 0.5524, |
|
"step": 411 |
|
}, |
|
{ |
|
"epoch": 0.7443541102077688, |
|
"grad_norm": 0.17609386516997122, |
|
"learning_rate": 8.495275945087744e-06, |
|
"loss": 0.5258, |
|
"step": 412 |
|
}, |
|
{ |
|
"epoch": 0.7461607949412827, |
|
"grad_norm": 0.1797618429427097, |
|
"learning_rate": 8.383207886164366e-06, |
|
"loss": 0.5412, |
|
"step": 413 |
|
}, |
|
{ |
|
"epoch": 0.7479674796747967, |
|
"grad_norm": 0.17992769342301995, |
|
"learning_rate": 8.271734841028553e-06, |
|
"loss": 0.5299, |
|
"step": 414 |
|
}, |
|
{ |
|
"epoch": 0.7497741644083108, |
|
"grad_norm": 0.17963490369072127, |
|
"learning_rate": 8.16086080130506e-06, |
|
"loss": 0.5504, |
|
"step": 415 |
|
}, |
|
{ |
|
"epoch": 0.7515808491418248, |
|
"grad_norm": 0.1816082193046965, |
|
"learning_rate": 8.050589737169485e-06, |
|
"loss": 0.5374, |
|
"step": 416 |
|
}, |
|
{ |
|
"epoch": 0.7533875338753387, |
|
"grad_norm": 0.17209070843288865, |
|
"learning_rate": 7.940925597206054e-06, |
|
"loss": 0.5577, |
|
"step": 417 |
|
}, |
|
{ |
|
"epoch": 0.7551942186088527, |
|
"grad_norm": 0.1772584025417161, |
|
"learning_rate": 7.831872308266305e-06, |
|
"loss": 0.5395, |
|
"step": 418 |
|
}, |
|
{ |
|
"epoch": 0.7570009033423668, |
|
"grad_norm": 0.16311139160161808, |
|
"learning_rate": 7.723433775328384e-06, |
|
"loss": 0.5417, |
|
"step": 419 |
|
}, |
|
{ |
|
"epoch": 0.7588075880758808, |
|
"grad_norm": 0.17003456646796242, |
|
"learning_rate": 7.615613881357314e-06, |
|
"loss": 0.555, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 0.7606142728093948, |
|
"grad_norm": 0.16854440477398433, |
|
"learning_rate": 7.508416487165862e-06, |
|
"loss": 0.5375, |
|
"step": 421 |
|
}, |
|
{ |
|
"epoch": 0.7624209575429087, |
|
"grad_norm": 0.15674937980320675, |
|
"learning_rate": 7.401845431276378e-06, |
|
"loss": 0.5411, |
|
"step": 422 |
|
}, |
|
{ |
|
"epoch": 0.7642276422764228, |
|
"grad_norm": 0.16794250291803633, |
|
"learning_rate": 7.2959045297832655e-06, |
|
"loss": 0.5493, |
|
"step": 423 |
|
}, |
|
{ |
|
"epoch": 0.7660343270099368, |
|
"grad_norm": 0.17102277468152005, |
|
"learning_rate": 7.190597576216385e-06, |
|
"loss": 0.5529, |
|
"step": 424 |
|
}, |
|
{ |
|
"epoch": 0.7678410117434508, |
|
"grad_norm": 0.17340617098247255, |
|
"learning_rate": 7.085928341405193e-06, |
|
"loss": 0.5316, |
|
"step": 425 |
|
}, |
|
{ |
|
"epoch": 0.7696476964769647, |
|
"grad_norm": 0.20436247869710572, |
|
"learning_rate": 6.98190057334375e-06, |
|
"loss": 0.5515, |
|
"step": 426 |
|
}, |
|
{ |
|
"epoch": 0.7714543812104788, |
|
"grad_norm": 0.16090455324449274, |
|
"learning_rate": 6.8785179970564575e-06, |
|
"loss": 0.5235, |
|
"step": 427 |
|
}, |
|
{ |
|
"epoch": 0.7732610659439928, |
|
"grad_norm": 0.16491506792849778, |
|
"learning_rate": 6.775784314464717e-06, |
|
"loss": 0.544, |
|
"step": 428 |
|
}, |
|
{ |
|
"epoch": 0.7750677506775068, |
|
"grad_norm": 0.17703605814499973, |
|
"learning_rate": 6.673703204254347e-06, |
|
"loss": 0.5392, |
|
"step": 429 |
|
}, |
|
{ |
|
"epoch": 0.7768744354110207, |
|
"grad_norm": 0.16598578274238826, |
|
"learning_rate": 6.572278321743871e-06, |
|
"loss": 0.54, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 0.7786811201445348, |
|
"grad_norm": 0.18841603342599839, |
|
"learning_rate": 6.471513298753634e-06, |
|
"loss": 0.5489, |
|
"step": 431 |
|
}, |
|
{ |
|
"epoch": 0.7804878048780488, |
|
"grad_norm": 0.16749176773882715, |
|
"learning_rate": 6.371411743475716e-06, |
|
"loss": 0.5443, |
|
"step": 432 |
|
}, |
|
{ |
|
"epoch": 0.7822944896115628, |
|
"grad_norm": 0.16428065339926168, |
|
"learning_rate": 6.271977240344795e-06, |
|
"loss": 0.5517, |
|
"step": 433 |
|
}, |
|
{ |
|
"epoch": 0.7841011743450768, |
|
"grad_norm": 0.15595782253122276, |
|
"learning_rate": 6.173213349909729e-06, |
|
"loss": 0.5498, |
|
"step": 434 |
|
}, |
|
{ |
|
"epoch": 0.7859078590785907, |
|
"grad_norm": 0.1687428753872847, |
|
"learning_rate": 6.075123608706093e-06, |
|
"loss": 0.5283, |
|
"step": 435 |
|
}, |
|
{ |
|
"epoch": 0.7877145438121048, |
|
"grad_norm": 0.17401608915458225, |
|
"learning_rate": 5.97771152912954e-06, |
|
"loss": 0.5419, |
|
"step": 436 |
|
}, |
|
{ |
|
"epoch": 0.7895212285456188, |
|
"grad_norm": 0.165969115970308, |
|
"learning_rate": 5.88098059931004e-06, |
|
"loss": 0.5414, |
|
"step": 437 |
|
}, |
|
{ |
|
"epoch": 0.7913279132791328, |
|
"grad_norm": 0.18993772688540833, |
|
"learning_rate": 5.784934282986956e-06, |
|
"loss": 0.5326, |
|
"step": 438 |
|
}, |
|
{ |
|
"epoch": 0.7931345980126467, |
|
"grad_norm": 0.17267310640404834, |
|
"learning_rate": 5.689576019385015e-06, |
|
"loss": 0.5498, |
|
"step": 439 |
|
}, |
|
{ |
|
"epoch": 0.7949412827461608, |
|
"grad_norm": 0.1636848023371795, |
|
"learning_rate": 5.59490922309118e-06, |
|
"loss": 0.5408, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 0.7967479674796748, |
|
"grad_norm": 0.15808909958482328, |
|
"learning_rate": 5.500937283932348e-06, |
|
"loss": 0.5268, |
|
"step": 441 |
|
}, |
|
{ |
|
"epoch": 0.7985546522131888, |
|
"grad_norm": 0.16564988647802914, |
|
"learning_rate": 5.4076635668540075e-06, |
|
"loss": 0.5355, |
|
"step": 442 |
|
}, |
|
{ |
|
"epoch": 0.8003613369467028, |
|
"grad_norm": 0.17071401506597764, |
|
"learning_rate": 5.3150914117996995e-06, |
|
"loss": 0.543, |
|
"step": 443 |
|
}, |
|
{ |
|
"epoch": 0.8021680216802168, |
|
"grad_norm": 0.1628448162977609, |
|
"learning_rate": 5.223224133591476e-06, |
|
"loss": 0.5482, |
|
"step": 444 |
|
}, |
|
{ |
|
"epoch": 0.8039747064137308, |
|
"grad_norm": 0.17103430640341877, |
|
"learning_rate": 5.132065021811122e-06, |
|
"loss": 0.5366, |
|
"step": 445 |
|
}, |
|
{ |
|
"epoch": 0.8057813911472448, |
|
"grad_norm": 0.16682698787825212, |
|
"learning_rate": 5.041617340682467e-06, |
|
"loss": 0.5346, |
|
"step": 446 |
|
}, |
|
{ |
|
"epoch": 0.8075880758807588, |
|
"grad_norm": 0.15971926510706536, |
|
"learning_rate": 4.951884328954401e-06, |
|
"loss": 0.5393, |
|
"step": 447 |
|
}, |
|
{ |
|
"epoch": 0.8093947606142728, |
|
"grad_norm": 0.15478384009393892, |
|
"learning_rate": 4.862869199784984e-06, |
|
"loss": 0.5421, |
|
"step": 448 |
|
}, |
|
{ |
|
"epoch": 0.8112014453477868, |
|
"grad_norm": 0.15933300652649096, |
|
"learning_rate": 4.7745751406263165e-06, |
|
"loss": 0.536, |
|
"step": 449 |
|
}, |
|
{ |
|
"epoch": 0.8130081300813008, |
|
"grad_norm": 0.16285567779331575, |
|
"learning_rate": 4.687005313110454e-06, |
|
"loss": 0.547, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 0.8148148148148148, |
|
"grad_norm": 0.1745816724015838, |
|
"learning_rate": 4.600162852936171e-06, |
|
"loss": 0.5475, |
|
"step": 451 |
|
}, |
|
{ |
|
"epoch": 0.8166214995483289, |
|
"grad_norm": 0.15772843709224638, |
|
"learning_rate": 4.514050869756703e-06, |
|
"loss": 0.5452, |
|
"step": 452 |
|
}, |
|
{ |
|
"epoch": 0.8184281842818428, |
|
"grad_norm": 0.17376869601451123, |
|
"learning_rate": 4.428672447068357e-06, |
|
"loss": 0.5365, |
|
"step": 453 |
|
}, |
|
{ |
|
"epoch": 0.8202348690153568, |
|
"grad_norm": 0.17830063470449606, |
|
"learning_rate": 4.344030642100133e-06, |
|
"loss": 0.5216, |
|
"step": 454 |
|
}, |
|
{ |
|
"epoch": 0.8220415537488708, |
|
"grad_norm": 0.1624558883333754, |
|
"learning_rate": 4.2601284857042266e-06, |
|
"loss": 0.5441, |
|
"step": 455 |
|
}, |
|
{ |
|
"epoch": 0.8238482384823849, |
|
"grad_norm": 0.1524054782501282, |
|
"learning_rate": 4.176968982247514e-06, |
|
"loss": 0.5373, |
|
"step": 456 |
|
}, |
|
{ |
|
"epoch": 0.8256549232158988, |
|
"grad_norm": 0.16912982631464618, |
|
"learning_rate": 4.094555109503983e-06, |
|
"loss": 0.551, |
|
"step": 457 |
|
}, |
|
{ |
|
"epoch": 0.8274616079494128, |
|
"grad_norm": 0.15367849586984553, |
|
"learning_rate": 4.012889818548069e-06, |
|
"loss": 0.5427, |
|
"step": 458 |
|
}, |
|
{ |
|
"epoch": 0.8292682926829268, |
|
"grad_norm": 0.15386717642477954, |
|
"learning_rate": 3.931976033649021e-06, |
|
"loss": 0.5327, |
|
"step": 459 |
|
}, |
|
{ |
|
"epoch": 0.8310749774164409, |
|
"grad_norm": 0.16360562053015648, |
|
"learning_rate": 3.851816652166165e-06, |
|
"loss": 0.542, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 0.8328816621499548, |
|
"grad_norm": 0.16611831314230519, |
|
"learning_rate": 3.772414544445163e-06, |
|
"loss": 0.5308, |
|
"step": 461 |
|
}, |
|
{ |
|
"epoch": 0.8346883468834688, |
|
"grad_norm": 0.1719087225544376, |
|
"learning_rate": 3.6937725537152274e-06, |
|
"loss": 0.5389, |
|
"step": 462 |
|
}, |
|
{ |
|
"epoch": 0.8364950316169828, |
|
"grad_norm": 0.1530697651515559, |
|
"learning_rate": 3.6158934959873353e-06, |
|
"loss": 0.5421, |
|
"step": 463 |
|
}, |
|
{ |
|
"epoch": 0.8383017163504969, |
|
"grad_norm": 0.16949870584991475, |
|
"learning_rate": 3.5387801599533475e-06, |
|
"loss": 0.5446, |
|
"step": 464 |
|
}, |
|
{ |
|
"epoch": 0.8401084010840109, |
|
"grad_norm": 0.1814848827742537, |
|
"learning_rate": 3.4624353068861943e-06, |
|
"loss": 0.5547, |
|
"step": 465 |
|
}, |
|
{ |
|
"epoch": 0.8419150858175248, |
|
"grad_norm": 0.1617536304146353, |
|
"learning_rate": 3.386861670540972e-06, |
|
"loss": 0.5374, |
|
"step": 466 |
|
}, |
|
{ |
|
"epoch": 0.8437217705510388, |
|
"grad_norm": 0.15306707484519436, |
|
"learning_rate": 3.312061957057061e-06, |
|
"loss": 0.5348, |
|
"step": 467 |
|
}, |
|
{ |
|
"epoch": 0.8455284552845529, |
|
"grad_norm": 0.23382324587988676, |
|
"learning_rate": 3.2380388448612437e-06, |
|
"loss": 0.5448, |
|
"step": 468 |
|
}, |
|
{ |
|
"epoch": 0.8473351400180669, |
|
"grad_norm": 0.1662985426730699, |
|
"learning_rate": 3.164794984571759e-06, |
|
"loss": 0.5342, |
|
"step": 469 |
|
}, |
|
{ |
|
"epoch": 0.8491418247515808, |
|
"grad_norm": 0.1867364488340354, |
|
"learning_rate": 3.092332998903416e-06, |
|
"loss": 0.531, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 0.8509485094850948, |
|
"grad_norm": 0.16499419657563977, |
|
"learning_rate": 3.020655482573659e-06, |
|
"loss": 0.5593, |
|
"step": 471 |
|
}, |
|
{ |
|
"epoch": 0.8527551942186089, |
|
"grad_norm": 0.16082015565225696, |
|
"learning_rate": 2.949765002209698e-06, |
|
"loss": 0.5312, |
|
"step": 472 |
|
}, |
|
{ |
|
"epoch": 0.8545618789521229, |
|
"grad_norm": 0.15597231184905325, |
|
"learning_rate": 2.8796640962565374e-06, |
|
"loss": 0.549, |
|
"step": 473 |
|
}, |
|
{ |
|
"epoch": 0.8563685636856369, |
|
"grad_norm": 0.39671617649713814, |
|
"learning_rate": 2.8103552748861476e-06, |
|
"loss": 0.5451, |
|
"step": 474 |
|
}, |
|
{ |
|
"epoch": 0.8581752484191508, |
|
"grad_norm": 0.17753504313617807, |
|
"learning_rate": 2.741841019907529e-06, |
|
"loss": 0.5348, |
|
"step": 475 |
|
}, |
|
{ |
|
"epoch": 0.8599819331526649, |
|
"grad_norm": 0.1637289948609238, |
|
"learning_rate": 2.6741237846778676e-06, |
|
"loss": 0.5409, |
|
"step": 476 |
|
}, |
|
{ |
|
"epoch": 0.8617886178861789, |
|
"grad_norm": 0.1585219127905966, |
|
"learning_rate": 2.6072059940146775e-06, |
|
"loss": 0.5388, |
|
"step": 477 |
|
}, |
|
{ |
|
"epoch": 0.8635953026196929, |
|
"grad_norm": 0.168840088490212, |
|
"learning_rate": 2.5410900441089903e-06, |
|
"loss": 0.5485, |
|
"step": 478 |
|
}, |
|
{ |
|
"epoch": 0.8654019873532068, |
|
"grad_norm": 0.16390582789463012, |
|
"learning_rate": 2.475778302439524e-06, |
|
"loss": 0.5344, |
|
"step": 479 |
|
}, |
|
{ |
|
"epoch": 0.8672086720867209, |
|
"grad_norm": 0.1520203237608367, |
|
"learning_rate": 2.411273107687925e-06, |
|
"loss": 0.5335, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 0.8690153568202349, |
|
"grad_norm": 0.16016367056006486, |
|
"learning_rate": 2.3475767696550327e-06, |
|
"loss": 0.5441, |
|
"step": 481 |
|
}, |
|
{ |
|
"epoch": 0.8708220415537489, |
|
"grad_norm": 0.16365201764510548, |
|
"learning_rate": 2.284691569178138e-06, |
|
"loss": 0.5481, |
|
"step": 482 |
|
}, |
|
{ |
|
"epoch": 0.8726287262872628, |
|
"grad_norm": 0.1580464673317223, |
|
"learning_rate": 2.222619758049366e-06, |
|
"loss": 0.5333, |
|
"step": 483 |
|
}, |
|
{ |
|
"epoch": 0.8744354110207768, |
|
"grad_norm": 0.156148786762582, |
|
"learning_rate": 2.1613635589349756e-06, |
|
"loss": 0.5291, |
|
"step": 484 |
|
}, |
|
{ |
|
"epoch": 0.8762420957542909, |
|
"grad_norm": 0.1659657301447351, |
|
"learning_rate": 2.1009251652958387e-06, |
|
"loss": 0.5394, |
|
"step": 485 |
|
}, |
|
{ |
|
"epoch": 0.8780487804878049, |
|
"grad_norm": 0.15790108586836402, |
|
"learning_rate": 2.041306741308832e-06, |
|
"loss": 0.5455, |
|
"step": 486 |
|
}, |
|
{ |
|
"epoch": 0.8798554652213189, |
|
"grad_norm": 0.1478785936292581, |
|
"learning_rate": 1.9825104217894018e-06, |
|
"loss": 0.5332, |
|
"step": 487 |
|
}, |
|
{ |
|
"epoch": 0.8816621499548328, |
|
"grad_norm": 0.1462534164047016, |
|
"learning_rate": 1.9245383121150677e-06, |
|
"loss": 0.5327, |
|
"step": 488 |
|
}, |
|
{ |
|
"epoch": 0.8834688346883469, |
|
"grad_norm": 0.14605069151679811, |
|
"learning_rate": 1.8673924881500826e-06, |
|
"loss": 0.5416, |
|
"step": 489 |
|
}, |
|
{ |
|
"epoch": 0.8852755194218609, |
|
"grad_norm": 0.1589930446245649, |
|
"learning_rate": 1.8110749961710584e-06, |
|
"loss": 0.5392, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 0.8870822041553749, |
|
"grad_norm": 0.14986223598486392, |
|
"learning_rate": 1.7555878527937164e-06, |
|
"loss": 0.5301, |
|
"step": 491 |
|
}, |
|
{ |
|
"epoch": 0.8888888888888888, |
|
"grad_norm": 0.15447871937241114, |
|
"learning_rate": 1.700933044900671e-06, |
|
"loss": 0.5319, |
|
"step": 492 |
|
}, |
|
{ |
|
"epoch": 0.8906955736224029, |
|
"grad_norm": 0.15296232399715282, |
|
"learning_rate": 1.6471125295702771e-06, |
|
"loss": 0.5261, |
|
"step": 493 |
|
}, |
|
{ |
|
"epoch": 0.8925022583559169, |
|
"grad_norm": 0.14757354919345866, |
|
"learning_rate": 1.59412823400657e-06, |
|
"loss": 0.5435, |
|
"step": 494 |
|
}, |
|
{ |
|
"epoch": 0.8943089430894309, |
|
"grad_norm": 0.15297818755134698, |
|
"learning_rate": 1.5419820554702314e-06, |
|
"loss": 0.5401, |
|
"step": 495 |
|
}, |
|
{ |
|
"epoch": 0.8961156278229448, |
|
"grad_norm": 0.15271018404390577, |
|
"learning_rate": 1.4906758612106636e-06, |
|
"loss": 0.5519, |
|
"step": 496 |
|
}, |
|
{ |
|
"epoch": 0.8979223125564589, |
|
"grad_norm": 0.1625002948888311, |
|
"learning_rate": 1.4402114883991318e-06, |
|
"loss": 0.5395, |
|
"step": 497 |
|
}, |
|
{ |
|
"epoch": 0.8997289972899729, |
|
"grad_norm": 0.16940382579519814, |
|
"learning_rate": 1.3905907440629752e-06, |
|
"loss": 0.539, |
|
"step": 498 |
|
}, |
|
{ |
|
"epoch": 0.9015356820234869, |
|
"grad_norm": 0.16321190702331076, |
|
"learning_rate": 1.3418154050208936e-06, |
|
"loss": 0.5475, |
|
"step": 499 |
|
}, |
|
{ |
|
"epoch": 0.9033423667570009, |
|
"grad_norm": 0.24374183136883182, |
|
"learning_rate": 1.2938872178193395e-06, |
|
"loss": 0.5465, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.9051490514905149, |
|
"grad_norm": 0.15051129764673166, |
|
"learning_rate": 1.2468078986699634e-06, |
|
"loss": 0.545, |
|
"step": 501 |
|
}, |
|
{ |
|
"epoch": 0.9069557362240289, |
|
"grad_norm": 0.15319443685481185, |
|
"learning_rate": 1.200579133388155e-06, |
|
"loss": 0.5389, |
|
"step": 502 |
|
}, |
|
{ |
|
"epoch": 0.9087624209575429, |
|
"grad_norm": 0.1451628487699734, |
|
"learning_rate": 1.1552025773327007e-06, |
|
"loss": 0.5358, |
|
"step": 503 |
|
}, |
|
{ |
|
"epoch": 0.9105691056910569, |
|
"grad_norm": 0.15134290540815853, |
|
"learning_rate": 1.1106798553464804e-06, |
|
"loss": 0.5511, |
|
"step": 504 |
|
}, |
|
{ |
|
"epoch": 0.912375790424571, |
|
"grad_norm": 0.1503501621168193, |
|
"learning_rate": 1.067012561698319e-06, |
|
"loss": 0.523, |
|
"step": 505 |
|
}, |
|
{ |
|
"epoch": 0.9141824751580849, |
|
"grad_norm": 0.14506084238294983, |
|
"learning_rate": 1.024202260025861e-06, |
|
"loss": 0.5428, |
|
"step": 506 |
|
}, |
|
{ |
|
"epoch": 0.9159891598915989, |
|
"grad_norm": 0.15033450056319353, |
|
"learning_rate": 9.822504832796036e-07, |
|
"loss": 0.5414, |
|
"step": 507 |
|
}, |
|
{ |
|
"epoch": 0.9177958446251129, |
|
"grad_norm": 0.14929667455668477, |
|
"learning_rate": 9.411587336679989e-07, |
|
"loss": 0.5339, |
|
"step": 508 |
|
}, |
|
{ |
|
"epoch": 0.919602529358627, |
|
"grad_norm": 0.15153312450878684, |
|
"learning_rate": 9.009284826036691e-07, |
|
"loss": 0.5529, |
|
"step": 509 |
|
}, |
|
{ |
|
"epoch": 0.9214092140921409, |
|
"grad_norm": 0.17053255660732877, |
|
"learning_rate": 8.615611706507043e-07, |
|
"loss": 0.5365, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 0.9232158988256549, |
|
"grad_norm": 0.1608923256793346, |
|
"learning_rate": 8.230582074730903e-07, |
|
"loss": 0.5264, |
|
"step": 511 |
|
}, |
|
{ |
|
"epoch": 0.9250225835591689, |
|
"grad_norm": 0.165778884389128, |
|
"learning_rate": 7.854209717842231e-07, |
|
"loss": 0.5541, |
|
"step": 512 |
|
}, |
|
{ |
|
"epoch": 0.926829268292683, |
|
"grad_norm": 0.14815134326147517, |
|
"learning_rate": 7.486508112975548e-07, |
|
"loss": 0.5362, |
|
"step": 513 |
|
}, |
|
{ |
|
"epoch": 0.928635953026197, |
|
"grad_norm": 0.15901694427467797, |
|
"learning_rate": 7.127490426783123e-07, |
|
"loss": 0.5325, |
|
"step": 514 |
|
}, |
|
{ |
|
"epoch": 0.9304426377597109, |
|
"grad_norm": 0.15383848504404457, |
|
"learning_rate": 6.777169514963766e-07, |
|
"loss": 0.5385, |
|
"step": 515 |
|
}, |
|
{ |
|
"epoch": 0.9322493224932249, |
|
"grad_norm": 0.142355498475389, |
|
"learning_rate": 6.435557921802254e-07, |
|
"loss": 0.5469, |
|
"step": 516 |
|
}, |
|
{ |
|
"epoch": 0.934056007226739, |
|
"grad_norm": 0.16876148808491534, |
|
"learning_rate": 6.102667879720164e-07, |
|
"loss": 0.5351, |
|
"step": 517 |
|
}, |
|
{ |
|
"epoch": 0.935862691960253, |
|
"grad_norm": 0.15425727197523914, |
|
"learning_rate": 5.778511308838108e-07, |
|
"loss": 0.5364, |
|
"step": 518 |
|
}, |
|
{ |
|
"epoch": 0.9376693766937669, |
|
"grad_norm": 0.15990766509390575, |
|
"learning_rate": 5.463099816548579e-07, |
|
"loss": 0.5264, |
|
"step": 519 |
|
}, |
|
{ |
|
"epoch": 0.9394760614272809, |
|
"grad_norm": 0.1502693196204282, |
|
"learning_rate": 5.15644469710061e-07, |
|
"loss": 0.5356, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 0.941282746160795, |
|
"grad_norm": 0.14369656200396672, |
|
"learning_rate": 4.858556931194996e-07, |
|
"loss": 0.5431, |
|
"step": 521 |
|
}, |
|
{ |
|
"epoch": 0.943089430894309, |
|
"grad_norm": 0.15290045489678736, |
|
"learning_rate": 4.5694471855914914e-07, |
|
"loss": 0.539, |
|
"step": 522 |
|
}, |
|
{ |
|
"epoch": 0.9448961156278229, |
|
"grad_norm": 0.13691631778640515, |
|
"learning_rate": 4.2891258127264745e-07, |
|
"loss": 0.5299, |
|
"step": 523 |
|
}, |
|
{ |
|
"epoch": 0.9467028003613369, |
|
"grad_norm": 0.15611569264256087, |
|
"learning_rate": 4.0176028503425835e-07, |
|
"loss": 0.539, |
|
"step": 524 |
|
}, |
|
{ |
|
"epoch": 0.948509485094851, |
|
"grad_norm": 0.14415668389225905, |
|
"learning_rate": 3.7548880211290825e-07, |
|
"loss": 0.5324, |
|
"step": 525 |
|
}, |
|
{ |
|
"epoch": 0.950316169828365, |
|
"grad_norm": 0.14724614624038865, |
|
"learning_rate": 3.5009907323737825e-07, |
|
"loss": 0.5369, |
|
"step": 526 |
|
}, |
|
{ |
|
"epoch": 0.952122854561879, |
|
"grad_norm": 0.15606099252189523, |
|
"learning_rate": 3.2559200756260846e-07, |
|
"loss": 0.537, |
|
"step": 527 |
|
}, |
|
{ |
|
"epoch": 0.9539295392953929, |
|
"grad_norm": 0.14368873287433334, |
|
"learning_rate": 3.0196848263716327e-07, |
|
"loss": 0.5357, |
|
"step": 528 |
|
}, |
|
{ |
|
"epoch": 0.955736224028907, |
|
"grad_norm": 0.1491717198389433, |
|
"learning_rate": 2.7922934437178695e-07, |
|
"loss": 0.5415, |
|
"step": 529 |
|
}, |
|
{ |
|
"epoch": 0.957542908762421, |
|
"grad_norm": 0.15475915986644267, |
|
"learning_rate": 2.573754070091278e-07, |
|
"loss": 0.5329, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 0.959349593495935, |
|
"grad_norm": 0.14163962631031565, |
|
"learning_rate": 2.3640745309457256e-07, |
|
"loss": 0.5457, |
|
"step": 531 |
|
}, |
|
{ |
|
"epoch": 0.9611562782294489, |
|
"grad_norm": 0.1440214341364719, |
|
"learning_rate": 2.16326233448233e-07, |
|
"loss": 0.5355, |
|
"step": 532 |
|
}, |
|
{ |
|
"epoch": 0.9629629629629629, |
|
"grad_norm": 0.15233722035050537, |
|
"learning_rate": 1.9713246713805588e-07, |
|
"loss": 0.5398, |
|
"step": 533 |
|
}, |
|
{ |
|
"epoch": 0.964769647696477, |
|
"grad_norm": 0.23433531335438026, |
|
"learning_rate": 1.7882684145406614e-07, |
|
"loss": 0.5355, |
|
"step": 534 |
|
}, |
|
{ |
|
"epoch": 0.966576332429991, |
|
"grad_norm": 0.16569253954096036, |
|
"learning_rate": 1.614100118837808e-07, |
|
"loss": 0.5363, |
|
"step": 535 |
|
}, |
|
{ |
|
"epoch": 0.9683830171635049, |
|
"grad_norm": 0.1439541745185468, |
|
"learning_rate": 1.4488260208871397e-07, |
|
"loss": 0.5449, |
|
"step": 536 |
|
}, |
|
{ |
|
"epoch": 0.9701897018970189, |
|
"grad_norm": 0.14639044655493838, |
|
"learning_rate": 1.2924520388204464e-07, |
|
"loss": 0.5512, |
|
"step": 537 |
|
}, |
|
{ |
|
"epoch": 0.971996386630533, |
|
"grad_norm": 0.17436828715544114, |
|
"learning_rate": 1.1449837720745037e-07, |
|
"loss": 0.5352, |
|
"step": 538 |
|
}, |
|
{ |
|
"epoch": 0.973803071364047, |
|
"grad_norm": 0.15070039688252895, |
|
"learning_rate": 1.006426501190233e-07, |
|
"loss": 0.5332, |
|
"step": 539 |
|
}, |
|
{ |
|
"epoch": 0.975609756097561, |
|
"grad_norm": 0.1514769323001164, |
|
"learning_rate": 8.767851876239074e-08, |
|
"loss": 0.5333, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 0.9774164408310749, |
|
"grad_norm": 0.14344393108318002, |
|
"learning_rate": 7.560644735692956e-08, |
|
"loss": 0.542, |
|
"step": 541 |
|
}, |
|
{ |
|
"epoch": 0.979223125564589, |
|
"grad_norm": 0.15491867081099303, |
|
"learning_rate": 6.442686817914877e-08, |
|
"loss": 0.5339, |
|
"step": 542 |
|
}, |
|
{ |
|
"epoch": 0.981029810298103, |
|
"grad_norm": 0.15798230842015154, |
|
"learning_rate": 5.414018154721867e-08, |
|
"loss": 0.5477, |
|
"step": 543 |
|
}, |
|
{ |
|
"epoch": 0.982836495031617, |
|
"grad_norm": 0.14860239453630705, |
|
"learning_rate": 4.474675580662113e-08, |
|
"loss": 0.5475, |
|
"step": 544 |
|
}, |
|
{ |
|
"epoch": 0.9846431797651309, |
|
"grad_norm": 0.1418692614977398, |
|
"learning_rate": 3.6246927316976875e-08, |
|
"loss": 0.5495, |
|
"step": 545 |
|
}, |
|
{ |
|
"epoch": 0.986449864498645, |
|
"grad_norm": 0.14636120337023928, |
|
"learning_rate": 2.864100043998563e-08, |
|
"loss": 0.5392, |
|
"step": 546 |
|
}, |
|
{ |
|
"epoch": 0.988256549232159, |
|
"grad_norm": 0.1522208123086209, |
|
"learning_rate": 2.192924752854042e-08, |
|
"loss": 0.5509, |
|
"step": 547 |
|
}, |
|
{ |
|
"epoch": 0.990063233965673, |
|
"grad_norm": 0.14909766126130344, |
|
"learning_rate": 1.6111908916965903e-08, |
|
"loss": 0.5479, |
|
"step": 548 |
|
}, |
|
{ |
|
"epoch": 0.991869918699187, |
|
"grad_norm": 0.14765596505567274, |
|
"learning_rate": 1.1189192912416934e-08, |
|
"loss": 0.5426, |
|
"step": 549 |
|
}, |
|
{ |
|
"epoch": 0.993676603432701, |
|
"grad_norm": 0.14558245782809656, |
|
"learning_rate": 7.1612757874151e-09, |
|
"loss": 0.5323, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 0.995483288166215, |
|
"grad_norm": 0.14980144301653261, |
|
"learning_rate": 4.028301773545407e-09, |
|
"loss": 0.5512, |
|
"step": 551 |
|
}, |
|
{ |
|
"epoch": 0.997289972899729, |
|
"grad_norm": 0.1466612660048151, |
|
"learning_rate": 1.7903830562826517e-09, |
|
"loss": 0.5335, |
|
"step": 552 |
|
}, |
|
{ |
|
"epoch": 0.999096657633243, |
|
"grad_norm": 0.14883550497430476, |
|
"learning_rate": 4.4759977098074446e-10, |
|
"loss": 0.5371, |
|
"step": 553 |
|
} |
|
], |
|
"logging_steps": 1, |
|
"max_steps": 553, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 1, |
|
"save_steps": 100, |
|
"stateful_callbacks": { |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": true |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 641050695434240.0, |
|
"train_batch_size": 2, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|