|
{ |
|
"best_global_step": null, |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 0.36133694670280037, |
|
"eval_steps": 500, |
|
"global_step": 200, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.0018066847335140017, |
|
"grad_norm": 2.8401210755443786, |
|
"learning_rate": 0.0, |
|
"loss": 0.8251, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.0036133694670280035, |
|
"grad_norm": 2.884596771811998, |
|
"learning_rate": 1.7857142857142857e-06, |
|
"loss": 0.8284, |
|
"step": 2 |
|
}, |
|
{ |
|
"epoch": 0.005420054200542005, |
|
"grad_norm": 2.885426730353446, |
|
"learning_rate": 3.5714285714285714e-06, |
|
"loss": 0.8426, |
|
"step": 3 |
|
}, |
|
{ |
|
"epoch": 0.007226738934056007, |
|
"grad_norm": 2.651910724864975, |
|
"learning_rate": 5.357142857142857e-06, |
|
"loss": 0.8329, |
|
"step": 4 |
|
}, |
|
{ |
|
"epoch": 0.009033423667570008, |
|
"grad_norm": 2.138099539200911, |
|
"learning_rate": 7.142857142857143e-06, |
|
"loss": 0.8087, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.01084010840108401, |
|
"grad_norm": 1.5516804515831568, |
|
"learning_rate": 8.92857142857143e-06, |
|
"loss": 0.7878, |
|
"step": 6 |
|
}, |
|
{ |
|
"epoch": 0.012646793134598013, |
|
"grad_norm": 1.3537216088647457, |
|
"learning_rate": 1.0714285714285714e-05, |
|
"loss": 0.76, |
|
"step": 7 |
|
}, |
|
{ |
|
"epoch": 0.014453477868112014, |
|
"grad_norm": 2.2798033166818095, |
|
"learning_rate": 1.25e-05, |
|
"loss": 0.7502, |
|
"step": 8 |
|
}, |
|
{ |
|
"epoch": 0.016260162601626018, |
|
"grad_norm": 2.4280919600136315, |
|
"learning_rate": 1.4285714285714285e-05, |
|
"loss": 0.7449, |
|
"step": 9 |
|
}, |
|
{ |
|
"epoch": 0.018066847335140017, |
|
"grad_norm": 1.931081683969551, |
|
"learning_rate": 1.6071428571428572e-05, |
|
"loss": 0.7271, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.01987353206865402, |
|
"grad_norm": 2.033957326273334, |
|
"learning_rate": 1.785714285714286e-05, |
|
"loss": 0.7224, |
|
"step": 11 |
|
}, |
|
{ |
|
"epoch": 0.02168021680216802, |
|
"grad_norm": 1.7440303482576578, |
|
"learning_rate": 1.9642857142857145e-05, |
|
"loss": 0.7373, |
|
"step": 12 |
|
}, |
|
{ |
|
"epoch": 0.023486901535682024, |
|
"grad_norm": 1.1423427090231095, |
|
"learning_rate": 2.1428571428571428e-05, |
|
"loss": 0.7033, |
|
"step": 13 |
|
}, |
|
{ |
|
"epoch": 0.025293586269196026, |
|
"grad_norm": 0.9350758811837806, |
|
"learning_rate": 2.3214285714285715e-05, |
|
"loss": 0.6886, |
|
"step": 14 |
|
}, |
|
{ |
|
"epoch": 0.02710027100271003, |
|
"grad_norm": 0.9041529205527444, |
|
"learning_rate": 2.5e-05, |
|
"loss": 0.6858, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.028906955736224028, |
|
"grad_norm": 0.7774159470583337, |
|
"learning_rate": 2.6785714285714288e-05, |
|
"loss": 0.676, |
|
"step": 16 |
|
}, |
|
{ |
|
"epoch": 0.03071364046973803, |
|
"grad_norm": 0.6642849851627606, |
|
"learning_rate": 2.857142857142857e-05, |
|
"loss": 0.6816, |
|
"step": 17 |
|
}, |
|
{ |
|
"epoch": 0.032520325203252036, |
|
"grad_norm": 0.6696862181877565, |
|
"learning_rate": 3.0357142857142857e-05, |
|
"loss": 0.6727, |
|
"step": 18 |
|
}, |
|
{ |
|
"epoch": 0.03432700993676603, |
|
"grad_norm": 0.6199596133483118, |
|
"learning_rate": 3.2142857142857144e-05, |
|
"loss": 0.6678, |
|
"step": 19 |
|
}, |
|
{ |
|
"epoch": 0.036133694670280034, |
|
"grad_norm": 0.5724104094717255, |
|
"learning_rate": 3.392857142857143e-05, |
|
"loss": 0.6448, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.037940379403794036, |
|
"grad_norm": 0.5762214978597714, |
|
"learning_rate": 3.571428571428572e-05, |
|
"loss": 0.6506, |
|
"step": 21 |
|
}, |
|
{ |
|
"epoch": 0.03974706413730804, |
|
"grad_norm": 0.5444644294356963, |
|
"learning_rate": 3.7500000000000003e-05, |
|
"loss": 0.6413, |
|
"step": 22 |
|
}, |
|
{ |
|
"epoch": 0.04155374887082204, |
|
"grad_norm": 0.4801008888914334, |
|
"learning_rate": 3.928571428571429e-05, |
|
"loss": 0.6425, |
|
"step": 23 |
|
}, |
|
{ |
|
"epoch": 0.04336043360433604, |
|
"grad_norm": 0.4780671650041637, |
|
"learning_rate": 4.107142857142857e-05, |
|
"loss": 0.6489, |
|
"step": 24 |
|
}, |
|
{ |
|
"epoch": 0.045167118337850046, |
|
"grad_norm": 0.5145358853730851, |
|
"learning_rate": 4.2857142857142856e-05, |
|
"loss": 0.6306, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.04697380307136405, |
|
"grad_norm": 0.4229563893889767, |
|
"learning_rate": 4.464285714285715e-05, |
|
"loss": 0.6453, |
|
"step": 26 |
|
}, |
|
{ |
|
"epoch": 0.04878048780487805, |
|
"grad_norm": 0.4021332182222708, |
|
"learning_rate": 4.642857142857143e-05, |
|
"loss": 0.6251, |
|
"step": 27 |
|
}, |
|
{ |
|
"epoch": 0.05058717253839205, |
|
"grad_norm": 0.48615223659558016, |
|
"learning_rate": 4.8214285714285716e-05, |
|
"loss": 0.6272, |
|
"step": 28 |
|
}, |
|
{ |
|
"epoch": 0.052393857271906055, |
|
"grad_norm": 0.46304419453924084, |
|
"learning_rate": 5e-05, |
|
"loss": 0.6385, |
|
"step": 29 |
|
}, |
|
{ |
|
"epoch": 0.05420054200542006, |
|
"grad_norm": 0.4055914485047887, |
|
"learning_rate": 4.999955240022902e-05, |
|
"loss": 0.6406, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.05600722673893405, |
|
"grad_norm": 0.4320919468210144, |
|
"learning_rate": 4.999820961694372e-05, |
|
"loss": 0.6303, |
|
"step": 31 |
|
}, |
|
{ |
|
"epoch": 0.057813911472448055, |
|
"grad_norm": 0.4042983593828653, |
|
"learning_rate": 4.999597169822646e-05, |
|
"loss": 0.6186, |
|
"step": 32 |
|
}, |
|
{ |
|
"epoch": 0.05962059620596206, |
|
"grad_norm": 0.4310835292518631, |
|
"learning_rate": 4.9992838724212585e-05, |
|
"loss": 0.6287, |
|
"step": 33 |
|
}, |
|
{ |
|
"epoch": 0.06142728093947606, |
|
"grad_norm": 0.412271814827728, |
|
"learning_rate": 4.9988810807087584e-05, |
|
"loss": 0.6165, |
|
"step": 34 |
|
}, |
|
{ |
|
"epoch": 0.06323396567299007, |
|
"grad_norm": 0.40598837987994935, |
|
"learning_rate": 4.998388809108303e-05, |
|
"loss": 0.622, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.06504065040650407, |
|
"grad_norm": 0.4223908220324365, |
|
"learning_rate": 4.997807075247146e-05, |
|
"loss": 0.6189, |
|
"step": 36 |
|
}, |
|
{ |
|
"epoch": 0.06684733514001806, |
|
"grad_norm": 0.38197899028265064, |
|
"learning_rate": 4.997135899956001e-05, |
|
"loss": 0.623, |
|
"step": 37 |
|
}, |
|
{ |
|
"epoch": 0.06865401987353206, |
|
"grad_norm": 0.3885330150920031, |
|
"learning_rate": 4.9963753072683025e-05, |
|
"loss": 0.6164, |
|
"step": 38 |
|
}, |
|
{ |
|
"epoch": 0.07046070460704607, |
|
"grad_norm": 0.41115019726029983, |
|
"learning_rate": 4.9955253244193375e-05, |
|
"loss": 0.6182, |
|
"step": 39 |
|
}, |
|
{ |
|
"epoch": 0.07226738934056007, |
|
"grad_norm": 0.3495037257115665, |
|
"learning_rate": 4.994585981845278e-05, |
|
"loss": 0.6072, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.07407407407407407, |
|
"grad_norm": 0.3973831198418077, |
|
"learning_rate": 4.9935573131820854e-05, |
|
"loss": 0.6137, |
|
"step": 41 |
|
}, |
|
{ |
|
"epoch": 0.07588075880758807, |
|
"grad_norm": 0.3339390206679997, |
|
"learning_rate": 4.9924393552643075e-05, |
|
"loss": 0.6064, |
|
"step": 42 |
|
}, |
|
{ |
|
"epoch": 0.07768744354110207, |
|
"grad_norm": 0.3384678373992373, |
|
"learning_rate": 4.991232148123761e-05, |
|
"loss": 0.606, |
|
"step": 43 |
|
}, |
|
{ |
|
"epoch": 0.07949412827461608, |
|
"grad_norm": 0.3764492630812678, |
|
"learning_rate": 4.989935734988098e-05, |
|
"loss": 0.601, |
|
"step": 44 |
|
}, |
|
{ |
|
"epoch": 0.08130081300813008, |
|
"grad_norm": 0.35707353533847597, |
|
"learning_rate": 4.988550162279255e-05, |
|
"loss": 0.6097, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.08310749774164408, |
|
"grad_norm": 0.34040533355863656, |
|
"learning_rate": 4.987075479611796e-05, |
|
"loss": 0.6094, |
|
"step": 46 |
|
}, |
|
{ |
|
"epoch": 0.08491418247515808, |
|
"grad_norm": 0.34496537693497575, |
|
"learning_rate": 4.985511739791129e-05, |
|
"loss": 0.6129, |
|
"step": 47 |
|
}, |
|
{ |
|
"epoch": 0.08672086720867209, |
|
"grad_norm": 0.3655410266089117, |
|
"learning_rate": 4.983858998811622e-05, |
|
"loss": 0.5994, |
|
"step": 48 |
|
}, |
|
{ |
|
"epoch": 0.08852755194218609, |
|
"grad_norm": 0.32151319632227454, |
|
"learning_rate": 4.9821173158545936e-05, |
|
"loss": 0.607, |
|
"step": 49 |
|
}, |
|
{ |
|
"epoch": 0.09033423667570009, |
|
"grad_norm": 0.3573869377855791, |
|
"learning_rate": 4.980286753286195e-05, |
|
"loss": 0.6067, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.0921409214092141, |
|
"grad_norm": 0.3601371463623409, |
|
"learning_rate": 4.978367376655177e-05, |
|
"loss": 0.5965, |
|
"step": 51 |
|
}, |
|
{ |
|
"epoch": 0.0939476061427281, |
|
"grad_norm": 0.3288839599265164, |
|
"learning_rate": 4.976359254690543e-05, |
|
"loss": 0.6128, |
|
"step": 52 |
|
}, |
|
{ |
|
"epoch": 0.0957542908762421, |
|
"grad_norm": 0.3899144366212297, |
|
"learning_rate": 4.974262459299087e-05, |
|
"loss": 0.5992, |
|
"step": 53 |
|
}, |
|
{ |
|
"epoch": 0.0975609756097561, |
|
"grad_norm": 0.31373036051997816, |
|
"learning_rate": 4.972077065562821e-05, |
|
"loss": 0.5894, |
|
"step": 54 |
|
}, |
|
{ |
|
"epoch": 0.0993676603432701, |
|
"grad_norm": 0.3317064945312652, |
|
"learning_rate": 4.969803151736284e-05, |
|
"loss": 0.6046, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.1011743450767841, |
|
"grad_norm": 0.4008096818415782, |
|
"learning_rate": 4.9674407992437394e-05, |
|
"loss": 0.6091, |
|
"step": 56 |
|
}, |
|
{ |
|
"epoch": 0.10298102981029811, |
|
"grad_norm": 0.32205121061079506, |
|
"learning_rate": 4.964990092676263e-05, |
|
"loss": 0.5936, |
|
"step": 57 |
|
}, |
|
{ |
|
"epoch": 0.10478771454381211, |
|
"grad_norm": 0.3834592472356572, |
|
"learning_rate": 4.962451119788709e-05, |
|
"loss": 0.6035, |
|
"step": 58 |
|
}, |
|
{ |
|
"epoch": 0.10659439927732611, |
|
"grad_norm": 0.4364607834229955, |
|
"learning_rate": 4.959823971496574e-05, |
|
"loss": 0.5895, |
|
"step": 59 |
|
}, |
|
{ |
|
"epoch": 0.10840108401084012, |
|
"grad_norm": 0.39798614932188364, |
|
"learning_rate": 4.957108741872736e-05, |
|
"loss": 0.5903, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.1102077687443541, |
|
"grad_norm": 0.4078983073259993, |
|
"learning_rate": 4.954305528144085e-05, |
|
"loss": 0.594, |
|
"step": 61 |
|
}, |
|
{ |
|
"epoch": 0.1120144534778681, |
|
"grad_norm": 0.5064139886264023, |
|
"learning_rate": 4.9514144306880506e-05, |
|
"loss": 0.5989, |
|
"step": 62 |
|
}, |
|
{ |
|
"epoch": 0.11382113821138211, |
|
"grad_norm": 0.3049367414530056, |
|
"learning_rate": 4.9484355530289944e-05, |
|
"loss": 0.5982, |
|
"step": 63 |
|
}, |
|
{ |
|
"epoch": 0.11562782294489611, |
|
"grad_norm": 0.45932591541254997, |
|
"learning_rate": 4.9453690018345144e-05, |
|
"loss": 0.598, |
|
"step": 64 |
|
}, |
|
{ |
|
"epoch": 0.11743450767841011, |
|
"grad_norm": 0.34088884376991047, |
|
"learning_rate": 4.9422148869116194e-05, |
|
"loss": 0.5914, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.11924119241192412, |
|
"grad_norm": 0.4408196994719187, |
|
"learning_rate": 4.938973321202799e-05, |
|
"loss": 0.5943, |
|
"step": 66 |
|
}, |
|
{ |
|
"epoch": 0.12104787714543812, |
|
"grad_norm": 0.4034840944061305, |
|
"learning_rate": 4.935644420781978e-05, |
|
"loss": 0.5852, |
|
"step": 67 |
|
}, |
|
{ |
|
"epoch": 0.12285456187895212, |
|
"grad_norm": 0.3532258693927161, |
|
"learning_rate": 4.932228304850363e-05, |
|
"loss": 0.6003, |
|
"step": 68 |
|
}, |
|
{ |
|
"epoch": 0.12466124661246612, |
|
"grad_norm": 0.4041417285254443, |
|
"learning_rate": 4.928725095732169e-05, |
|
"loss": 0.6019, |
|
"step": 69 |
|
}, |
|
{ |
|
"epoch": 0.12646793134598014, |
|
"grad_norm": 0.338119605860217, |
|
"learning_rate": 4.925134918870245e-05, |
|
"loss": 0.6056, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.12827461607949414, |
|
"grad_norm": 0.2924641177682486, |
|
"learning_rate": 4.9214579028215776e-05, |
|
"loss": 0.5784, |
|
"step": 71 |
|
}, |
|
{ |
|
"epoch": 0.13008130081300814, |
|
"grad_norm": 0.37576892630911196, |
|
"learning_rate": 4.917694179252692e-05, |
|
"loss": 0.5966, |
|
"step": 72 |
|
}, |
|
{ |
|
"epoch": 0.13188798554652212, |
|
"grad_norm": 0.3066908816324021, |
|
"learning_rate": 4.91384388293493e-05, |
|
"loss": 0.5944, |
|
"step": 73 |
|
}, |
|
{ |
|
"epoch": 0.13369467028003612, |
|
"grad_norm": 0.3296017371410444, |
|
"learning_rate": 4.909907151739633e-05, |
|
"loss": 0.5863, |
|
"step": 74 |
|
}, |
|
{ |
|
"epoch": 0.13550135501355012, |
|
"grad_norm": 0.3510646842800697, |
|
"learning_rate": 4.9058841266332e-05, |
|
"loss": 0.5854, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.13730803974706413, |
|
"grad_norm": 0.33050013383675975, |
|
"learning_rate": 4.90177495167204e-05, |
|
"loss": 0.5816, |
|
"step": 76 |
|
}, |
|
{ |
|
"epoch": 0.13911472448057813, |
|
"grad_norm": 0.336545072709173, |
|
"learning_rate": 4.897579773997415e-05, |
|
"loss": 0.5768, |
|
"step": 77 |
|
}, |
|
{ |
|
"epoch": 0.14092140921409213, |
|
"grad_norm": 0.3401542906091868, |
|
"learning_rate": 4.893298743830168e-05, |
|
"loss": 0.5877, |
|
"step": 78 |
|
}, |
|
{ |
|
"epoch": 0.14272809394760613, |
|
"grad_norm": 0.30833272476392615, |
|
"learning_rate": 4.888932014465352e-05, |
|
"loss": 0.5949, |
|
"step": 79 |
|
}, |
|
{ |
|
"epoch": 0.14453477868112014, |
|
"grad_norm": 0.40829913126640544, |
|
"learning_rate": 4.88447974226673e-05, |
|
"loss": 0.6045, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.14634146341463414, |
|
"grad_norm": 0.3247095550760803, |
|
"learning_rate": 4.879942086661184e-05, |
|
"loss": 0.5901, |
|
"step": 81 |
|
}, |
|
{ |
|
"epoch": 0.14814814814814814, |
|
"grad_norm": 0.40484730371305205, |
|
"learning_rate": 4.875319210133004e-05, |
|
"loss": 0.5825, |
|
"step": 82 |
|
}, |
|
{ |
|
"epoch": 0.14995483288166214, |
|
"grad_norm": 0.37019522474324174, |
|
"learning_rate": 4.870611278218066e-05, |
|
"loss": 0.5918, |
|
"step": 83 |
|
}, |
|
{ |
|
"epoch": 0.15176151761517614, |
|
"grad_norm": 0.3654744041300334, |
|
"learning_rate": 4.865818459497911e-05, |
|
"loss": 0.5865, |
|
"step": 84 |
|
}, |
|
{ |
|
"epoch": 0.15356820234869015, |
|
"grad_norm": 0.3001653084612634, |
|
"learning_rate": 4.860940925593703e-05, |
|
"loss": 0.5889, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.15537488708220415, |
|
"grad_norm": 0.3463142769052332, |
|
"learning_rate": 4.8559788511600876e-05, |
|
"loss": 0.5881, |
|
"step": 86 |
|
}, |
|
{ |
|
"epoch": 0.15718157181571815, |
|
"grad_norm": 0.3221292284344934, |
|
"learning_rate": 4.850932413878934e-05, |
|
"loss": 0.5901, |
|
"step": 87 |
|
}, |
|
{ |
|
"epoch": 0.15898825654923215, |
|
"grad_norm": 0.33253415770699135, |
|
"learning_rate": 4.8458017944529776e-05, |
|
"loss": 0.5952, |
|
"step": 88 |
|
}, |
|
{ |
|
"epoch": 0.16079494128274616, |
|
"grad_norm": 0.37021022779245716, |
|
"learning_rate": 4.8405871765993433e-05, |
|
"loss": 0.5928, |
|
"step": 89 |
|
}, |
|
{ |
|
"epoch": 0.16260162601626016, |
|
"grad_norm": 0.3197338147243217, |
|
"learning_rate": 4.8352887470429726e-05, |
|
"loss": 0.5837, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.16440831074977416, |
|
"grad_norm": 0.35706780201968874, |
|
"learning_rate": 4.8299066955099335e-05, |
|
"loss": 0.5811, |
|
"step": 91 |
|
}, |
|
{ |
|
"epoch": 0.16621499548328816, |
|
"grad_norm": 0.3730971582648582, |
|
"learning_rate": 4.8244412147206284e-05, |
|
"loss": 0.586, |
|
"step": 92 |
|
}, |
|
{ |
|
"epoch": 0.16802168021680217, |
|
"grad_norm": 0.33467520904104064, |
|
"learning_rate": 4.8188925003828945e-05, |
|
"loss": 0.5919, |
|
"step": 93 |
|
}, |
|
{ |
|
"epoch": 0.16982836495031617, |
|
"grad_norm": 0.41181901183464464, |
|
"learning_rate": 4.813260751184992e-05, |
|
"loss": 0.5922, |
|
"step": 94 |
|
}, |
|
{ |
|
"epoch": 0.17163504968383017, |
|
"grad_norm": 0.31057237794043846, |
|
"learning_rate": 4.807546168788494e-05, |
|
"loss": 0.5835, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.17344173441734417, |
|
"grad_norm": 0.4045185112342142, |
|
"learning_rate": 4.8017489578210604e-05, |
|
"loss": 0.5839, |
|
"step": 96 |
|
}, |
|
{ |
|
"epoch": 0.17524841915085818, |
|
"grad_norm": 0.342091450071029, |
|
"learning_rate": 4.7958693258691167e-05, |
|
"loss": 0.5891, |
|
"step": 97 |
|
}, |
|
{ |
|
"epoch": 0.17705510388437218, |
|
"grad_norm": 0.32193383766669476, |
|
"learning_rate": 4.7899074834704165e-05, |
|
"loss": 0.5814, |
|
"step": 98 |
|
}, |
|
{ |
|
"epoch": 0.17886178861788618, |
|
"grad_norm": 0.33881983844597735, |
|
"learning_rate": 4.783863644106502e-05, |
|
"loss": 0.5951, |
|
"step": 99 |
|
}, |
|
{ |
|
"epoch": 0.18066847335140018, |
|
"grad_norm": 0.3141049036238513, |
|
"learning_rate": 4.7777380241950645e-05, |
|
"loss": 0.5672, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.18247515808491419, |
|
"grad_norm": 0.30258609960076854, |
|
"learning_rate": 4.7715308430821864e-05, |
|
"loss": 0.5831, |
|
"step": 101 |
|
}, |
|
{ |
|
"epoch": 0.1842818428184282, |
|
"grad_norm": 0.33013650150122287, |
|
"learning_rate": 4.765242323034498e-05, |
|
"loss": 0.6019, |
|
"step": 102 |
|
}, |
|
{ |
|
"epoch": 0.1860885275519422, |
|
"grad_norm": 0.31075076006352215, |
|
"learning_rate": 4.758872689231208e-05, |
|
"loss": 0.581, |
|
"step": 103 |
|
}, |
|
{ |
|
"epoch": 0.1878952122854562, |
|
"grad_norm": 0.3376659425636789, |
|
"learning_rate": 4.752422169756048e-05, |
|
"loss": 0.5775, |
|
"step": 104 |
|
}, |
|
{ |
|
"epoch": 0.1897018970189702, |
|
"grad_norm": 0.3373397970824128, |
|
"learning_rate": 4.745890995589101e-05, |
|
"loss": 0.5705, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 0.1915085817524842, |
|
"grad_norm": 0.3159036744458245, |
|
"learning_rate": 4.7392794005985326e-05, |
|
"loss": 0.574, |
|
"step": 106 |
|
}, |
|
{ |
|
"epoch": 0.1933152664859982, |
|
"grad_norm": 0.3157369760717556, |
|
"learning_rate": 4.732587621532214e-05, |
|
"loss": 0.58, |
|
"step": 107 |
|
}, |
|
{ |
|
"epoch": 0.1951219512195122, |
|
"grad_norm": 0.3151534184596026, |
|
"learning_rate": 4.725815898009247e-05, |
|
"loss": 0.5785, |
|
"step": 108 |
|
}, |
|
{ |
|
"epoch": 0.1969286359530262, |
|
"grad_norm": 0.3067792635656275, |
|
"learning_rate": 4.718964472511386e-05, |
|
"loss": 0.5864, |
|
"step": 109 |
|
}, |
|
{ |
|
"epoch": 0.1987353206865402, |
|
"grad_norm": 0.34885189836356073, |
|
"learning_rate": 4.712033590374346e-05, |
|
"loss": 0.5685, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.2005420054200542, |
|
"grad_norm": 0.2725346961548414, |
|
"learning_rate": 4.705023499779031e-05, |
|
"loss": 0.5876, |
|
"step": 111 |
|
}, |
|
{ |
|
"epoch": 0.2023486901535682, |
|
"grad_norm": 0.34146491162388654, |
|
"learning_rate": 4.6979344517426345e-05, |
|
"loss": 0.5814, |
|
"step": 112 |
|
}, |
|
{ |
|
"epoch": 0.2041553748870822, |
|
"grad_norm": 0.2979010304617956, |
|
"learning_rate": 4.690766700109659e-05, |
|
"loss": 0.5857, |
|
"step": 113 |
|
}, |
|
{ |
|
"epoch": 0.20596205962059622, |
|
"grad_norm": 0.3317475544222318, |
|
"learning_rate": 4.6835205015428246e-05, |
|
"loss": 0.5685, |
|
"step": 114 |
|
}, |
|
{ |
|
"epoch": 0.20776874435411022, |
|
"grad_norm": 0.3490432374306535, |
|
"learning_rate": 4.676196115513876e-05, |
|
"loss": 0.585, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 0.20957542908762422, |
|
"grad_norm": 0.2845519792036694, |
|
"learning_rate": 4.668793804294294e-05, |
|
"loss": 0.5779, |
|
"step": 116 |
|
}, |
|
{ |
|
"epoch": 0.21138211382113822, |
|
"grad_norm": 0.30626266068728963, |
|
"learning_rate": 4.661313832945904e-05, |
|
"loss": 0.5901, |
|
"step": 117 |
|
}, |
|
{ |
|
"epoch": 0.21318879855465223, |
|
"grad_norm": 0.3411792829442568, |
|
"learning_rate": 4.653756469311381e-05, |
|
"loss": 0.5766, |
|
"step": 118 |
|
}, |
|
{ |
|
"epoch": 0.21499548328816623, |
|
"grad_norm": 0.273095998341187, |
|
"learning_rate": 4.6461219840046654e-05, |
|
"loss": 0.5666, |
|
"step": 119 |
|
}, |
|
{ |
|
"epoch": 0.21680216802168023, |
|
"grad_norm": 0.3070242568215688, |
|
"learning_rate": 4.638410650401267e-05, |
|
"loss": 0.5861, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.2186088527551942, |
|
"grad_norm": 0.315496788680795, |
|
"learning_rate": 4.6306227446284775e-05, |
|
"loss": 0.5817, |
|
"step": 121 |
|
}, |
|
{ |
|
"epoch": 0.2204155374887082, |
|
"grad_norm": 0.285486208489652, |
|
"learning_rate": 4.622758545555485e-05, |
|
"loss": 0.5862, |
|
"step": 122 |
|
}, |
|
{ |
|
"epoch": 0.2222222222222222, |
|
"grad_norm": 0.3014286983026253, |
|
"learning_rate": 4.614818334783384e-05, |
|
"loss": 0.5756, |
|
"step": 123 |
|
}, |
|
{ |
|
"epoch": 0.2240289069557362, |
|
"grad_norm": 0.3286311142757772, |
|
"learning_rate": 4.606802396635098e-05, |
|
"loss": 0.5622, |
|
"step": 124 |
|
}, |
|
{ |
|
"epoch": 0.22583559168925021, |
|
"grad_norm": 0.3099894773692972, |
|
"learning_rate": 4.598711018145193e-05, |
|
"loss": 0.5702, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 0.22764227642276422, |
|
"grad_norm": 0.3402171436347276, |
|
"learning_rate": 4.590544489049602e-05, |
|
"loss": 0.5822, |
|
"step": 126 |
|
}, |
|
{ |
|
"epoch": 0.22944896115627822, |
|
"grad_norm": 0.2949342532195096, |
|
"learning_rate": 4.5823031017752485e-05, |
|
"loss": 0.5753, |
|
"step": 127 |
|
}, |
|
{ |
|
"epoch": 0.23125564588979222, |
|
"grad_norm": 0.33271367144515557, |
|
"learning_rate": 4.5739871514295786e-05, |
|
"loss": 0.5746, |
|
"step": 128 |
|
}, |
|
{ |
|
"epoch": 0.23306233062330622, |
|
"grad_norm": 0.2854287194746108, |
|
"learning_rate": 4.5655969357899874e-05, |
|
"loss": 0.5688, |
|
"step": 129 |
|
}, |
|
{ |
|
"epoch": 0.23486901535682023, |
|
"grad_norm": 0.2911634608087971, |
|
"learning_rate": 4.5571327552931645e-05, |
|
"loss": 0.5875, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.23667570009033423, |
|
"grad_norm": 0.3282856212432573, |
|
"learning_rate": 4.54859491302433e-05, |
|
"loss": 0.5828, |
|
"step": 131 |
|
}, |
|
{ |
|
"epoch": 0.23848238482384823, |
|
"grad_norm": 0.31230217501155305, |
|
"learning_rate": 4.5399837147063825e-05, |
|
"loss": 0.5622, |
|
"step": 132 |
|
}, |
|
{ |
|
"epoch": 0.24028906955736223, |
|
"grad_norm": 0.26497408669368616, |
|
"learning_rate": 4.531299468688955e-05, |
|
"loss": 0.576, |
|
"step": 133 |
|
}, |
|
{ |
|
"epoch": 0.24209575429087624, |
|
"grad_norm": 0.31735124152798094, |
|
"learning_rate": 4.522542485937369e-05, |
|
"loss": 0.5772, |
|
"step": 134 |
|
}, |
|
{ |
|
"epoch": 0.24390243902439024, |
|
"grad_norm": 0.3015639590739915, |
|
"learning_rate": 4.5137130800215025e-05, |
|
"loss": 0.5748, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 0.24570912375790424, |
|
"grad_norm": 0.27756271594362325, |
|
"learning_rate": 4.50481156710456e-05, |
|
"loss": 0.5782, |
|
"step": 136 |
|
}, |
|
{ |
|
"epoch": 0.24751580849141824, |
|
"grad_norm": 0.3311373615406198, |
|
"learning_rate": 4.495838265931754e-05, |
|
"loss": 0.5632, |
|
"step": 137 |
|
}, |
|
{ |
|
"epoch": 0.24932249322493225, |
|
"grad_norm": 0.26966897365996273, |
|
"learning_rate": 4.486793497818889e-05, |
|
"loss": 0.5778, |
|
"step": 138 |
|
}, |
|
{ |
|
"epoch": 0.25112917795844625, |
|
"grad_norm": 0.3458179405656153, |
|
"learning_rate": 4.477677586640854e-05, |
|
"loss": 0.5814, |
|
"step": 139 |
|
}, |
|
{ |
|
"epoch": 0.2529358626919603, |
|
"grad_norm": 0.2722125808652539, |
|
"learning_rate": 4.4684908588200304e-05, |
|
"loss": 0.5804, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.25474254742547425, |
|
"grad_norm": 0.29853453900528515, |
|
"learning_rate": 4.4592336433146e-05, |
|
"loss": 0.5622, |
|
"step": 141 |
|
}, |
|
{ |
|
"epoch": 0.2565492321589883, |
|
"grad_norm": 0.2616025861086357, |
|
"learning_rate": 4.449906271606766e-05, |
|
"loss": 0.5816, |
|
"step": 142 |
|
}, |
|
{ |
|
"epoch": 0.25835591689250226, |
|
"grad_norm": 0.28875897468487793, |
|
"learning_rate": 4.440509077690883e-05, |
|
"loss": 0.5751, |
|
"step": 143 |
|
}, |
|
{ |
|
"epoch": 0.2601626016260163, |
|
"grad_norm": 0.31089280557100357, |
|
"learning_rate": 4.431042398061499e-05, |
|
"loss": 0.5614, |
|
"step": 144 |
|
}, |
|
{ |
|
"epoch": 0.26196928635953026, |
|
"grad_norm": 0.24453740569318233, |
|
"learning_rate": 4.421506571701305e-05, |
|
"loss": 0.5739, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 0.26377597109304424, |
|
"grad_norm": 0.3298892559409927, |
|
"learning_rate": 4.4119019400689967e-05, |
|
"loss": 0.5789, |
|
"step": 146 |
|
}, |
|
{ |
|
"epoch": 0.26558265582655827, |
|
"grad_norm": 0.29175370949614915, |
|
"learning_rate": 4.402228847087047e-05, |
|
"loss": 0.5687, |
|
"step": 147 |
|
}, |
|
{ |
|
"epoch": 0.26738934056007224, |
|
"grad_norm": 0.30364879113096566, |
|
"learning_rate": 4.3924876391293915e-05, |
|
"loss": 0.57, |
|
"step": 148 |
|
}, |
|
{ |
|
"epoch": 0.26919602529358627, |
|
"grad_norm": 0.2924856865957413, |
|
"learning_rate": 4.382678665009028e-05, |
|
"loss": 0.5675, |
|
"step": 149 |
|
}, |
|
{ |
|
"epoch": 0.27100271002710025, |
|
"grad_norm": 0.2831086406138475, |
|
"learning_rate": 4.372802275965521e-05, |
|
"loss": 0.5777, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.2728093947606143, |
|
"grad_norm": 0.3017093604784243, |
|
"learning_rate": 4.3628588256524285e-05, |
|
"loss": 0.5589, |
|
"step": 151 |
|
}, |
|
{ |
|
"epoch": 0.27461607949412825, |
|
"grad_norm": 0.2765860953865558, |
|
"learning_rate": 4.3528486701246376e-05, |
|
"loss": 0.5669, |
|
"step": 152 |
|
}, |
|
{ |
|
"epoch": 0.2764227642276423, |
|
"grad_norm": 0.2904031273845622, |
|
"learning_rate": 4.3427721678256125e-05, |
|
"loss": 0.5671, |
|
"step": 153 |
|
}, |
|
{ |
|
"epoch": 0.27822944896115626, |
|
"grad_norm": 0.2540349949087337, |
|
"learning_rate": 4.332629679574566e-05, |
|
"loss": 0.5702, |
|
"step": 154 |
|
}, |
|
{ |
|
"epoch": 0.2800361336946703, |
|
"grad_norm": 0.3069633678570879, |
|
"learning_rate": 4.3224215685535294e-05, |
|
"loss": 0.5637, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 0.28184281842818426, |
|
"grad_norm": 0.2542441755605158, |
|
"learning_rate": 4.312148200294355e-05, |
|
"loss": 0.5627, |
|
"step": 156 |
|
}, |
|
{ |
|
"epoch": 0.2836495031616983, |
|
"grad_norm": 0.3018408188777611, |
|
"learning_rate": 4.301809942665625e-05, |
|
"loss": 0.5757, |
|
"step": 157 |
|
}, |
|
{ |
|
"epoch": 0.28545618789521227, |
|
"grad_norm": 0.28743959410933695, |
|
"learning_rate": 4.2914071658594805e-05, |
|
"loss": 0.5734, |
|
"step": 158 |
|
}, |
|
{ |
|
"epoch": 0.2872628726287263, |
|
"grad_norm": 0.2787808725595535, |
|
"learning_rate": 4.2809402423783624e-05, |
|
"loss": 0.5542, |
|
"step": 159 |
|
}, |
|
{ |
|
"epoch": 0.28906955736224027, |
|
"grad_norm": 0.29206183996226404, |
|
"learning_rate": 4.2704095470216744e-05, |
|
"loss": 0.5635, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.2908762420957543, |
|
"grad_norm": 0.28832091136901655, |
|
"learning_rate": 4.2598154568723626e-05, |
|
"loss": 0.5716, |
|
"step": 161 |
|
}, |
|
{ |
|
"epoch": 0.2926829268292683, |
|
"grad_norm": 0.26053870722630096, |
|
"learning_rate": 4.249158351283414e-05, |
|
"loss": 0.5568, |
|
"step": 162 |
|
}, |
|
{ |
|
"epoch": 0.2944896115627823, |
|
"grad_norm": 0.30602256840625275, |
|
"learning_rate": 4.2384386118642694e-05, |
|
"loss": 0.5661, |
|
"step": 163 |
|
}, |
|
{ |
|
"epoch": 0.2962962962962963, |
|
"grad_norm": 0.24580296790912456, |
|
"learning_rate": 4.227656622467162e-05, |
|
"loss": 0.5751, |
|
"step": 164 |
|
}, |
|
{ |
|
"epoch": 0.2981029810298103, |
|
"grad_norm": 0.29059059202913196, |
|
"learning_rate": 4.2168127691733706e-05, |
|
"loss": 0.5662, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 0.2999096657633243, |
|
"grad_norm": 0.29414515714153977, |
|
"learning_rate": 4.205907440279395e-05, |
|
"loss": 0.5724, |
|
"step": 166 |
|
}, |
|
{ |
|
"epoch": 0.3017163504968383, |
|
"grad_norm": 0.29414614437874254, |
|
"learning_rate": 4.1949410262830525e-05, |
|
"loss": 0.5717, |
|
"step": 167 |
|
}, |
|
{ |
|
"epoch": 0.3035230352303523, |
|
"grad_norm": 0.2971349224474148, |
|
"learning_rate": 4.1839139198694946e-05, |
|
"loss": 0.5697, |
|
"step": 168 |
|
}, |
|
{ |
|
"epoch": 0.3053297199638663, |
|
"grad_norm": 0.2793813743979144, |
|
"learning_rate": 4.172826515897146e-05, |
|
"loss": 0.5617, |
|
"step": 169 |
|
}, |
|
{ |
|
"epoch": 0.3071364046973803, |
|
"grad_norm": 0.3023179135068114, |
|
"learning_rate": 4.161679211383565e-05, |
|
"loss": 0.5751, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.3089430894308943, |
|
"grad_norm": 0.2477222878497878, |
|
"learning_rate": 4.150472405491226e-05, |
|
"loss": 0.5583, |
|
"step": 171 |
|
}, |
|
{ |
|
"epoch": 0.3107497741644083, |
|
"grad_norm": 0.2707854956368433, |
|
"learning_rate": 4.139206499513231e-05, |
|
"loss": 0.5658, |
|
"step": 172 |
|
}, |
|
{ |
|
"epoch": 0.31255645889792233, |
|
"grad_norm": 0.26805394435378294, |
|
"learning_rate": 4.127881896858934e-05, |
|
"loss": 0.5687, |
|
"step": 173 |
|
}, |
|
{ |
|
"epoch": 0.3143631436314363, |
|
"grad_norm": 0.24818328664431513, |
|
"learning_rate": 4.116499003039499e-05, |
|
"loss": 0.5597, |
|
"step": 174 |
|
}, |
|
{ |
|
"epoch": 0.31616982836495033, |
|
"grad_norm": 0.3076486841492656, |
|
"learning_rate": 4.105058225653381e-05, |
|
"loss": 0.5638, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 0.3179765130984643, |
|
"grad_norm": 0.24484381551700804, |
|
"learning_rate": 4.093559974371725e-05, |
|
"loss": 0.578, |
|
"step": 176 |
|
}, |
|
{ |
|
"epoch": 0.31978319783197834, |
|
"grad_norm": 0.25779831771912026, |
|
"learning_rate": 4.082004660923703e-05, |
|
"loss": 0.5536, |
|
"step": 177 |
|
}, |
|
{ |
|
"epoch": 0.3215898825654923, |
|
"grad_norm": 0.2431924544197558, |
|
"learning_rate": 4.070392699081767e-05, |
|
"loss": 0.5606, |
|
"step": 178 |
|
}, |
|
{ |
|
"epoch": 0.32339656729900634, |
|
"grad_norm": 0.24360453043613553, |
|
"learning_rate": 4.058724504646834e-05, |
|
"loss": 0.571, |
|
"step": 179 |
|
}, |
|
{ |
|
"epoch": 0.3252032520325203, |
|
"grad_norm": 0.28406098303419186, |
|
"learning_rate": 4.047000495433397e-05, |
|
"loss": 0.5781, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.32700993676603435, |
|
"grad_norm": 0.22058075555118684, |
|
"learning_rate": 4.035221091254563e-05, |
|
"loss": 0.5574, |
|
"step": 181 |
|
}, |
|
{ |
|
"epoch": 0.3288166214995483, |
|
"grad_norm": 0.3009339432220675, |
|
"learning_rate": 4.023386713907021e-05, |
|
"loss": 0.5677, |
|
"step": 182 |
|
}, |
|
{ |
|
"epoch": 0.33062330623306235, |
|
"grad_norm": 0.26774508776974565, |
|
"learning_rate": 4.011497787155938e-05, |
|
"loss": 0.577, |
|
"step": 183 |
|
}, |
|
{ |
|
"epoch": 0.3324299909665763, |
|
"grad_norm": 0.284303170530224, |
|
"learning_rate": 3.9995547367197845e-05, |
|
"loss": 0.5735, |
|
"step": 184 |
|
}, |
|
{ |
|
"epoch": 0.33423667570009036, |
|
"grad_norm": 0.2614285883935899, |
|
"learning_rate": 3.987557990255093e-05, |
|
"loss": 0.5695, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 0.33604336043360433, |
|
"grad_norm": 0.28256380914027235, |
|
"learning_rate": 3.975507977341141e-05, |
|
"loss": 0.5648, |
|
"step": 186 |
|
}, |
|
{ |
|
"epoch": 0.33785004516711836, |
|
"grad_norm": 0.28945389202706384, |
|
"learning_rate": 3.963405129464569e-05, |
|
"loss": 0.5618, |
|
"step": 187 |
|
}, |
|
{ |
|
"epoch": 0.33965672990063234, |
|
"grad_norm": 0.2654043705467122, |
|
"learning_rate": 3.9512498800039335e-05, |
|
"loss": 0.5622, |
|
"step": 188 |
|
}, |
|
{ |
|
"epoch": 0.34146341463414637, |
|
"grad_norm": 0.28212350075401693, |
|
"learning_rate": 3.939042664214184e-05, |
|
"loss": 0.5691, |
|
"step": 189 |
|
}, |
|
{ |
|
"epoch": 0.34327009936766034, |
|
"grad_norm": 0.2666801982334387, |
|
"learning_rate": 3.92678391921108e-05, |
|
"loss": 0.5582, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.34507678410117437, |
|
"grad_norm": 0.2658380267209616, |
|
"learning_rate": 3.914474083955537e-05, |
|
"loss": 0.5561, |
|
"step": 191 |
|
}, |
|
{ |
|
"epoch": 0.34688346883468835, |
|
"grad_norm": 0.2767810249999182, |
|
"learning_rate": 3.902113599237911e-05, |
|
"loss": 0.5637, |
|
"step": 192 |
|
}, |
|
{ |
|
"epoch": 0.3486901535682023, |
|
"grad_norm": 0.2686248952050933, |
|
"learning_rate": 3.8897029076622116e-05, |
|
"loss": 0.5648, |
|
"step": 193 |
|
}, |
|
{ |
|
"epoch": 0.35049683830171635, |
|
"grad_norm": 0.2671044597386809, |
|
"learning_rate": 3.8772424536302564e-05, |
|
"loss": 0.5668, |
|
"step": 194 |
|
}, |
|
{ |
|
"epoch": 0.3523035230352303, |
|
"grad_norm": 0.28240978604941985, |
|
"learning_rate": 3.8647326833257545e-05, |
|
"loss": 0.5503, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 0.35411020776874436, |
|
"grad_norm": 0.2545745653651051, |
|
"learning_rate": 3.852174044698333e-05, |
|
"loss": 0.564, |
|
"step": 196 |
|
}, |
|
{ |
|
"epoch": 0.35591689250225833, |
|
"grad_norm": 0.2788914079559153, |
|
"learning_rate": 3.8395669874474915e-05, |
|
"loss": 0.563, |
|
"step": 197 |
|
}, |
|
{ |
|
"epoch": 0.35772357723577236, |
|
"grad_norm": 0.27664913837367605, |
|
"learning_rate": 3.826911963006507e-05, |
|
"loss": 0.5397, |
|
"step": 198 |
|
}, |
|
{ |
|
"epoch": 0.35953026196928634, |
|
"grad_norm": 0.2854592929422416, |
|
"learning_rate": 3.814209424526262e-05, |
|
"loss": 0.5646, |
|
"step": 199 |
|
}, |
|
{ |
|
"epoch": 0.36133694670280037, |
|
"grad_norm": 0.31461524287063203, |
|
"learning_rate": 3.801459826859022e-05, |
|
"loss": 0.5521, |
|
"step": 200 |
|
} |
|
], |
|
"logging_steps": 1, |
|
"max_steps": 553, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 1, |
|
"save_steps": 100, |
|
"stateful_callbacks": { |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": false |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 231524290789376.0, |
|
"train_batch_size": 2, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|