|
{ |
|
"best_global_step": null, |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 0.9033423667570009, |
|
"eval_steps": 500, |
|
"global_step": 500, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.0018066847335140017, |
|
"grad_norm": 2.833710603673153, |
|
"learning_rate": 0.0, |
|
"loss": 0.8251, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.0036133694670280035, |
|
"grad_norm": 2.881526090787093, |
|
"learning_rate": 1.7857142857142858e-07, |
|
"loss": 0.8284, |
|
"step": 2 |
|
}, |
|
{ |
|
"epoch": 0.005420054200542005, |
|
"grad_norm": 2.892351545715412, |
|
"learning_rate": 3.5714285714285716e-07, |
|
"loss": 0.8432, |
|
"step": 3 |
|
}, |
|
{ |
|
"epoch": 0.007226738934056007, |
|
"grad_norm": 2.854997748052911, |
|
"learning_rate": 5.357142857142857e-07, |
|
"loss": 0.8398, |
|
"step": 4 |
|
}, |
|
{ |
|
"epoch": 0.009033423667570008, |
|
"grad_norm": 2.800194786509123, |
|
"learning_rate": 7.142857142857143e-07, |
|
"loss": 0.8302, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.01084010840108401, |
|
"grad_norm": 2.7703722030307465, |
|
"learning_rate": 8.928571428571429e-07, |
|
"loss": 0.8306, |
|
"step": 6 |
|
}, |
|
{ |
|
"epoch": 0.012646793134598013, |
|
"grad_norm": 2.7419868468578503, |
|
"learning_rate": 1.0714285714285714e-06, |
|
"loss": 0.813, |
|
"step": 7 |
|
}, |
|
{ |
|
"epoch": 0.014453477868112014, |
|
"grad_norm": 2.782799904203301, |
|
"learning_rate": 1.25e-06, |
|
"loss": 0.8215, |
|
"step": 8 |
|
}, |
|
{ |
|
"epoch": 0.016260162601626018, |
|
"grad_norm": 2.657957788320578, |
|
"learning_rate": 1.4285714285714286e-06, |
|
"loss": 0.8192, |
|
"step": 9 |
|
}, |
|
{ |
|
"epoch": 0.018066847335140017, |
|
"grad_norm": 2.599128638584182, |
|
"learning_rate": 1.6071428571428574e-06, |
|
"loss": 0.8146, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.01987353206865402, |
|
"grad_norm": 2.2653416227389065, |
|
"learning_rate": 1.7857142857142859e-06, |
|
"loss": 0.8023, |
|
"step": 11 |
|
}, |
|
{ |
|
"epoch": 0.02168021680216802, |
|
"grad_norm": 2.132529562690701, |
|
"learning_rate": 1.9642857142857144e-06, |
|
"loss": 0.813, |
|
"step": 12 |
|
}, |
|
{ |
|
"epoch": 0.023486901535682024, |
|
"grad_norm": 2.183869626490225, |
|
"learning_rate": 2.1428571428571427e-06, |
|
"loss": 0.8047, |
|
"step": 13 |
|
}, |
|
{ |
|
"epoch": 0.025293586269196026, |
|
"grad_norm": 2.038470938805882, |
|
"learning_rate": 2.321428571428572e-06, |
|
"loss": 0.8006, |
|
"step": 14 |
|
}, |
|
{ |
|
"epoch": 0.02710027100271003, |
|
"grad_norm": 1.4549601698726884, |
|
"learning_rate": 2.5e-06, |
|
"loss": 0.7864, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.028906955736224028, |
|
"grad_norm": 1.4204350596775097, |
|
"learning_rate": 2.6785714285714285e-06, |
|
"loss": 0.7795, |
|
"step": 16 |
|
}, |
|
{ |
|
"epoch": 0.03071364046973803, |
|
"grad_norm": 1.3806667810715707, |
|
"learning_rate": 2.8571428571428573e-06, |
|
"loss": 0.7872, |
|
"step": 17 |
|
}, |
|
{ |
|
"epoch": 0.032520325203252036, |
|
"grad_norm": 1.274491626508539, |
|
"learning_rate": 3.0357142857142856e-06, |
|
"loss": 0.7795, |
|
"step": 18 |
|
}, |
|
{ |
|
"epoch": 0.03432700993676603, |
|
"grad_norm": 1.216777658487583, |
|
"learning_rate": 3.2142857142857147e-06, |
|
"loss": 0.773, |
|
"step": 19 |
|
}, |
|
{ |
|
"epoch": 0.036133694670280034, |
|
"grad_norm": 1.0495765967682895, |
|
"learning_rate": 3.3928571428571435e-06, |
|
"loss": 0.743, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.037940379403794036, |
|
"grad_norm": 1.1207660747477255, |
|
"learning_rate": 3.5714285714285718e-06, |
|
"loss": 0.7403, |
|
"step": 21 |
|
}, |
|
{ |
|
"epoch": 0.03974706413730804, |
|
"grad_norm": 1.2817143165080294, |
|
"learning_rate": 3.7500000000000005e-06, |
|
"loss": 0.7296, |
|
"step": 22 |
|
}, |
|
{ |
|
"epoch": 0.04155374887082204, |
|
"grad_norm": 1.3509916465584577, |
|
"learning_rate": 3.928571428571429e-06, |
|
"loss": 0.7302, |
|
"step": 23 |
|
}, |
|
{ |
|
"epoch": 0.04336043360433604, |
|
"grad_norm": 1.2332702771542092, |
|
"learning_rate": 4.107142857142857e-06, |
|
"loss": 0.7359, |
|
"step": 24 |
|
}, |
|
{ |
|
"epoch": 0.045167118337850046, |
|
"grad_norm": 0.9471118008485697, |
|
"learning_rate": 4.2857142857142855e-06, |
|
"loss": 0.7129, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.04697380307136405, |
|
"grad_norm": 0.7486583362489228, |
|
"learning_rate": 4.464285714285715e-06, |
|
"loss": 0.7275, |
|
"step": 26 |
|
}, |
|
{ |
|
"epoch": 0.04878048780487805, |
|
"grad_norm": 0.6717818173905871, |
|
"learning_rate": 4.642857142857144e-06, |
|
"loss": 0.7021, |
|
"step": 27 |
|
}, |
|
{ |
|
"epoch": 0.05058717253839205, |
|
"grad_norm": 0.7950859243166388, |
|
"learning_rate": 4.821428571428572e-06, |
|
"loss": 0.6997, |
|
"step": 28 |
|
}, |
|
{ |
|
"epoch": 0.052393857271906055, |
|
"grad_norm": 0.8884910173051872, |
|
"learning_rate": 5e-06, |
|
"loss": 0.7086, |
|
"step": 29 |
|
}, |
|
{ |
|
"epoch": 0.05420054200542006, |
|
"grad_norm": 0.9673224881715593, |
|
"learning_rate": 4.999955240022903e-06, |
|
"loss": 0.7067, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.05600722673893405, |
|
"grad_norm": 0.9034786964026056, |
|
"learning_rate": 4.999820961694372e-06, |
|
"loss": 0.6936, |
|
"step": 31 |
|
}, |
|
{ |
|
"epoch": 0.057813911472448055, |
|
"grad_norm": 0.662557988289521, |
|
"learning_rate": 4.999597169822646e-06, |
|
"loss": 0.6811, |
|
"step": 32 |
|
}, |
|
{ |
|
"epoch": 0.05962059620596206, |
|
"grad_norm": 0.5043328133424003, |
|
"learning_rate": 4.999283872421259e-06, |
|
"loss": 0.6911, |
|
"step": 33 |
|
}, |
|
{ |
|
"epoch": 0.06142728093947606, |
|
"grad_norm": 0.5806752340113361, |
|
"learning_rate": 4.998881080708759e-06, |
|
"loss": 0.6766, |
|
"step": 34 |
|
}, |
|
{ |
|
"epoch": 0.06323396567299007, |
|
"grad_norm": 0.572041040601822, |
|
"learning_rate": 4.998388809108304e-06, |
|
"loss": 0.6815, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.06504065040650407, |
|
"grad_norm": 0.49944539615671646, |
|
"learning_rate": 4.997807075247147e-06, |
|
"loss": 0.6752, |
|
"step": 36 |
|
}, |
|
{ |
|
"epoch": 0.06684733514001806, |
|
"grad_norm": 0.5564450644413174, |
|
"learning_rate": 4.997135899956002e-06, |
|
"loss": 0.6781, |
|
"step": 37 |
|
}, |
|
{ |
|
"epoch": 0.06865401987353206, |
|
"grad_norm": 0.4421624128151595, |
|
"learning_rate": 4.996375307268303e-06, |
|
"loss": 0.6708, |
|
"step": 38 |
|
}, |
|
{ |
|
"epoch": 0.07046070460704607, |
|
"grad_norm": 0.4865410430073666, |
|
"learning_rate": 4.995525324419338e-06, |
|
"loss": 0.6706, |
|
"step": 39 |
|
}, |
|
{ |
|
"epoch": 0.07226738934056007, |
|
"grad_norm": 0.3765775514297677, |
|
"learning_rate": 4.994585981845278e-06, |
|
"loss": 0.659, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.07407407407407407, |
|
"grad_norm": 0.3548944502238461, |
|
"learning_rate": 4.993557313182086e-06, |
|
"loss": 0.6664, |
|
"step": 41 |
|
}, |
|
{ |
|
"epoch": 0.07588075880758807, |
|
"grad_norm": 0.36293566558445617, |
|
"learning_rate": 4.992439355264308e-06, |
|
"loss": 0.656, |
|
"step": 42 |
|
}, |
|
{ |
|
"epoch": 0.07768744354110207, |
|
"grad_norm": 0.3975630575488288, |
|
"learning_rate": 4.9912321481237616e-06, |
|
"loss": 0.6566, |
|
"step": 43 |
|
}, |
|
{ |
|
"epoch": 0.07949412827461608, |
|
"grad_norm": 0.37034479741196286, |
|
"learning_rate": 4.989935734988098e-06, |
|
"loss": 0.6492, |
|
"step": 44 |
|
}, |
|
{ |
|
"epoch": 0.08130081300813008, |
|
"grad_norm": 0.35041438533218666, |
|
"learning_rate": 4.988550162279255e-06, |
|
"loss": 0.6557, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.08310749774164408, |
|
"grad_norm": 0.3929751461483523, |
|
"learning_rate": 4.9870754796117956e-06, |
|
"loss": 0.658, |
|
"step": 46 |
|
}, |
|
{ |
|
"epoch": 0.08491418247515808, |
|
"grad_norm": 0.3228131120122744, |
|
"learning_rate": 4.985511739791129e-06, |
|
"loss": 0.6595, |
|
"step": 47 |
|
}, |
|
{ |
|
"epoch": 0.08672086720867209, |
|
"grad_norm": 0.32669811901998275, |
|
"learning_rate": 4.983858998811622e-06, |
|
"loss": 0.6464, |
|
"step": 48 |
|
}, |
|
{ |
|
"epoch": 0.08852755194218609, |
|
"grad_norm": 0.2958192972688314, |
|
"learning_rate": 4.982117315854594e-06, |
|
"loss": 0.6508, |
|
"step": 49 |
|
}, |
|
{ |
|
"epoch": 0.09033423667570009, |
|
"grad_norm": 0.29517256658089447, |
|
"learning_rate": 4.980286753286196e-06, |
|
"loss": 0.6493, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.0921409214092141, |
|
"grad_norm": 0.28707888927411485, |
|
"learning_rate": 4.978367376655177e-06, |
|
"loss": 0.639, |
|
"step": 51 |
|
}, |
|
{ |
|
"epoch": 0.0939476061427281, |
|
"grad_norm": 0.2839510706729385, |
|
"learning_rate": 4.976359254690543e-06, |
|
"loss": 0.6577, |
|
"step": 52 |
|
}, |
|
{ |
|
"epoch": 0.0957542908762421, |
|
"grad_norm": 0.2652433955956874, |
|
"learning_rate": 4.974262459299088e-06, |
|
"loss": 0.6408, |
|
"step": 53 |
|
}, |
|
{ |
|
"epoch": 0.0975609756097561, |
|
"grad_norm": 0.26894761764637504, |
|
"learning_rate": 4.9720770655628216e-06, |
|
"loss": 0.6323, |
|
"step": 54 |
|
}, |
|
{ |
|
"epoch": 0.0993676603432701, |
|
"grad_norm": 0.3393488396315471, |
|
"learning_rate": 4.969803151736285e-06, |
|
"loss": 0.6443, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.1011743450767841, |
|
"grad_norm": 0.2936280995299213, |
|
"learning_rate": 4.967440799243739e-06, |
|
"loss": 0.6504, |
|
"step": 56 |
|
}, |
|
{ |
|
"epoch": 0.10298102981029811, |
|
"grad_norm": 0.5048572168975388, |
|
"learning_rate": 4.964990092676263e-06, |
|
"loss": 0.6358, |
|
"step": 57 |
|
}, |
|
{ |
|
"epoch": 0.10478771454381211, |
|
"grad_norm": 0.253534540909337, |
|
"learning_rate": 4.962451119788709e-06, |
|
"loss": 0.6434, |
|
"step": 58 |
|
}, |
|
{ |
|
"epoch": 0.10659439927732611, |
|
"grad_norm": 0.24450647410382542, |
|
"learning_rate": 4.959823971496575e-06, |
|
"loss": 0.6287, |
|
"step": 59 |
|
}, |
|
{ |
|
"epoch": 0.10840108401084012, |
|
"grad_norm": 0.24318659934415682, |
|
"learning_rate": 4.957108741872736e-06, |
|
"loss": 0.6298, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.1102077687443541, |
|
"grad_norm": 0.26159880879938346, |
|
"learning_rate": 4.954305528144085e-06, |
|
"loss": 0.6338, |
|
"step": 61 |
|
}, |
|
{ |
|
"epoch": 0.1120144534778681, |
|
"grad_norm": 0.25390256208872614, |
|
"learning_rate": 4.9514144306880506e-06, |
|
"loss": 0.6392, |
|
"step": 62 |
|
}, |
|
{ |
|
"epoch": 0.11382113821138211, |
|
"grad_norm": 0.2557236984724456, |
|
"learning_rate": 4.948435553028994e-06, |
|
"loss": 0.6381, |
|
"step": 63 |
|
}, |
|
{ |
|
"epoch": 0.11562782294489611, |
|
"grad_norm": 0.24995746272587419, |
|
"learning_rate": 4.9453690018345144e-06, |
|
"loss": 0.6358, |
|
"step": 64 |
|
}, |
|
{ |
|
"epoch": 0.11743450767841011, |
|
"grad_norm": 0.24266667399842143, |
|
"learning_rate": 4.942214886911619e-06, |
|
"loss": 0.6309, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.11924119241192412, |
|
"grad_norm": 0.2614760212116958, |
|
"learning_rate": 4.938973321202799e-06, |
|
"loss": 0.6324, |
|
"step": 66 |
|
}, |
|
{ |
|
"epoch": 0.12104787714543812, |
|
"grad_norm": 0.23803026874912583, |
|
"learning_rate": 4.935644420781978e-06, |
|
"loss": 0.6228, |
|
"step": 67 |
|
}, |
|
{ |
|
"epoch": 0.12285456187895212, |
|
"grad_norm": 0.23755765861041625, |
|
"learning_rate": 4.932228304850363e-06, |
|
"loss": 0.6387, |
|
"step": 68 |
|
}, |
|
{ |
|
"epoch": 0.12466124661246612, |
|
"grad_norm": 0.23843258528052497, |
|
"learning_rate": 4.9287250957321685e-06, |
|
"loss": 0.6401, |
|
"step": 69 |
|
}, |
|
{ |
|
"epoch": 0.12646793134598014, |
|
"grad_norm": 0.26210939608771777, |
|
"learning_rate": 4.925134918870245e-06, |
|
"loss": 0.6458, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.12827461607949414, |
|
"grad_norm": 0.32756867637738996, |
|
"learning_rate": 4.921457902821578e-06, |
|
"loss": 0.6164, |
|
"step": 71 |
|
}, |
|
{ |
|
"epoch": 0.13008130081300814, |
|
"grad_norm": 0.23025066317701987, |
|
"learning_rate": 4.917694179252692e-06, |
|
"loss": 0.6343, |
|
"step": 72 |
|
}, |
|
{ |
|
"epoch": 0.13188798554652212, |
|
"grad_norm": 0.2342725143215051, |
|
"learning_rate": 4.9138438829349296e-06, |
|
"loss": 0.633, |
|
"step": 73 |
|
}, |
|
{ |
|
"epoch": 0.13369467028003612, |
|
"grad_norm": 0.25602275015410886, |
|
"learning_rate": 4.909907151739634e-06, |
|
"loss": 0.6247, |
|
"step": 74 |
|
}, |
|
{ |
|
"epoch": 0.13550135501355012, |
|
"grad_norm": 0.27962135134351895, |
|
"learning_rate": 4.9058841266332005e-06, |
|
"loss": 0.6226, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.13730803974706413, |
|
"grad_norm": 0.2577599322141621, |
|
"learning_rate": 4.901774951672041e-06, |
|
"loss": 0.618, |
|
"step": 76 |
|
}, |
|
{ |
|
"epoch": 0.13911472448057813, |
|
"grad_norm": 0.25384048802645065, |
|
"learning_rate": 4.897579773997415e-06, |
|
"loss": 0.6122, |
|
"step": 77 |
|
}, |
|
{ |
|
"epoch": 0.14092140921409213, |
|
"grad_norm": 0.25252662487176686, |
|
"learning_rate": 4.893298743830168e-06, |
|
"loss": 0.625, |
|
"step": 78 |
|
}, |
|
{ |
|
"epoch": 0.14272809394760613, |
|
"grad_norm": 0.2581525435101277, |
|
"learning_rate": 4.8889320144653525e-06, |
|
"loss": 0.6345, |
|
"step": 79 |
|
}, |
|
{ |
|
"epoch": 0.14453477868112014, |
|
"grad_norm": 0.2559916652692462, |
|
"learning_rate": 4.884479742266731e-06, |
|
"loss": 0.6432, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.14634146341463414, |
|
"grad_norm": 0.2633785753789179, |
|
"learning_rate": 4.879942086661185e-06, |
|
"loss": 0.625, |
|
"step": 81 |
|
}, |
|
{ |
|
"epoch": 0.14814814814814814, |
|
"grad_norm": 0.25653911708344745, |
|
"learning_rate": 4.875319210133004e-06, |
|
"loss": 0.6185, |
|
"step": 82 |
|
}, |
|
{ |
|
"epoch": 0.14995483288166214, |
|
"grad_norm": 0.2508750960975656, |
|
"learning_rate": 4.870611278218066e-06, |
|
"loss": 0.6301, |
|
"step": 83 |
|
}, |
|
{ |
|
"epoch": 0.15176151761517614, |
|
"grad_norm": 0.2561874287228457, |
|
"learning_rate": 4.865818459497911e-06, |
|
"loss": 0.6222, |
|
"step": 84 |
|
}, |
|
{ |
|
"epoch": 0.15356820234869015, |
|
"grad_norm": 0.2353115985430909, |
|
"learning_rate": 4.860940925593703e-06, |
|
"loss": 0.6238, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.15537488708220415, |
|
"grad_norm": 0.24543988420919832, |
|
"learning_rate": 4.855978851160088e-06, |
|
"loss": 0.6243, |
|
"step": 86 |
|
}, |
|
{ |
|
"epoch": 0.15718157181571815, |
|
"grad_norm": 0.23980236271661898, |
|
"learning_rate": 4.850932413878934e-06, |
|
"loss": 0.6264, |
|
"step": 87 |
|
}, |
|
{ |
|
"epoch": 0.15898825654923215, |
|
"grad_norm": 0.23746967391567278, |
|
"learning_rate": 4.845801794452978e-06, |
|
"loss": 0.6308, |
|
"step": 88 |
|
}, |
|
{ |
|
"epoch": 0.16079494128274616, |
|
"grad_norm": 0.2438060428432456, |
|
"learning_rate": 4.8405871765993435e-06, |
|
"loss": 0.6256, |
|
"step": 89 |
|
}, |
|
{ |
|
"epoch": 0.16260162601626016, |
|
"grad_norm": 0.2504401843691212, |
|
"learning_rate": 4.8352887470429726e-06, |
|
"loss": 0.6193, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.16440831074977416, |
|
"grad_norm": 0.22268738858248663, |
|
"learning_rate": 4.829906695509934e-06, |
|
"loss": 0.6175, |
|
"step": 91 |
|
}, |
|
{ |
|
"epoch": 0.16621499548328816, |
|
"grad_norm": 0.2560623721447496, |
|
"learning_rate": 4.824441214720629e-06, |
|
"loss": 0.6207, |
|
"step": 92 |
|
}, |
|
{ |
|
"epoch": 0.16802168021680217, |
|
"grad_norm": 0.23139572900738714, |
|
"learning_rate": 4.8188925003828945e-06, |
|
"loss": 0.6276, |
|
"step": 93 |
|
}, |
|
{ |
|
"epoch": 0.16982836495031617, |
|
"grad_norm": 0.2394430562047215, |
|
"learning_rate": 4.813260751184992e-06, |
|
"loss": 0.6254, |
|
"step": 94 |
|
}, |
|
{ |
|
"epoch": 0.17163504968383017, |
|
"grad_norm": 0.23489365258879913, |
|
"learning_rate": 4.8075461687884935e-06, |
|
"loss": 0.6184, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.17344173441734417, |
|
"grad_norm": 0.2390908704544338, |
|
"learning_rate": 4.801748957821061e-06, |
|
"loss": 0.6164, |
|
"step": 96 |
|
}, |
|
{ |
|
"epoch": 0.17524841915085818, |
|
"grad_norm": 0.2250588188582116, |
|
"learning_rate": 4.795869325869117e-06, |
|
"loss": 0.623, |
|
"step": 97 |
|
}, |
|
{ |
|
"epoch": 0.17705510388437218, |
|
"grad_norm": 0.21860584575831038, |
|
"learning_rate": 4.7899074834704165e-06, |
|
"loss": 0.6173, |
|
"step": 98 |
|
}, |
|
{ |
|
"epoch": 0.17886178861788618, |
|
"grad_norm": 0.24617787352480855, |
|
"learning_rate": 4.783863644106502e-06, |
|
"loss": 0.6291, |
|
"step": 99 |
|
}, |
|
{ |
|
"epoch": 0.18066847335140018, |
|
"grad_norm": 0.21802652765301336, |
|
"learning_rate": 4.777738024195065e-06, |
|
"loss": 0.603, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.18247515808491419, |
|
"grad_norm": 0.24612254991135574, |
|
"learning_rate": 4.771530843082187e-06, |
|
"loss": 0.6184, |
|
"step": 101 |
|
}, |
|
{ |
|
"epoch": 0.1842818428184282, |
|
"grad_norm": 0.2215674593325698, |
|
"learning_rate": 4.765242323034498e-06, |
|
"loss": 0.6373, |
|
"step": 102 |
|
}, |
|
{ |
|
"epoch": 0.1860885275519422, |
|
"grad_norm": 0.25404300985614986, |
|
"learning_rate": 4.7588726892312085e-06, |
|
"loss": 0.6163, |
|
"step": 103 |
|
}, |
|
{ |
|
"epoch": 0.1878952122854562, |
|
"grad_norm": 0.23783935115186008, |
|
"learning_rate": 4.752422169756048e-06, |
|
"loss": 0.6147, |
|
"step": 104 |
|
}, |
|
{ |
|
"epoch": 0.1897018970189702, |
|
"grad_norm": 0.24743986064760354, |
|
"learning_rate": 4.7458909955891015e-06, |
|
"loss": 0.6065, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 0.1915085817524842, |
|
"grad_norm": 0.23334887533510995, |
|
"learning_rate": 4.7392794005985324e-06, |
|
"loss": 0.6078, |
|
"step": 106 |
|
}, |
|
{ |
|
"epoch": 0.1933152664859982, |
|
"grad_norm": 0.23602009638229515, |
|
"learning_rate": 4.732587621532214e-06, |
|
"loss": 0.6138, |
|
"step": 107 |
|
}, |
|
{ |
|
"epoch": 0.1951219512195122, |
|
"grad_norm": 0.24805889543073664, |
|
"learning_rate": 4.7258158980092475e-06, |
|
"loss": 0.6125, |
|
"step": 108 |
|
}, |
|
{ |
|
"epoch": 0.1969286359530262, |
|
"grad_norm": 0.23498308809668583, |
|
"learning_rate": 4.718964472511386e-06, |
|
"loss": 0.6225, |
|
"step": 109 |
|
}, |
|
{ |
|
"epoch": 0.1987353206865402, |
|
"grad_norm": 0.23642037798176344, |
|
"learning_rate": 4.712033590374346e-06, |
|
"loss": 0.6039, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.2005420054200542, |
|
"grad_norm": 0.22293626788964602, |
|
"learning_rate": 4.705023499779031e-06, |
|
"loss": 0.6229, |
|
"step": 111 |
|
}, |
|
{ |
|
"epoch": 0.2023486901535682, |
|
"grad_norm": 0.2760226539747174, |
|
"learning_rate": 4.6979344517426345e-06, |
|
"loss": 0.6157, |
|
"step": 112 |
|
}, |
|
{ |
|
"epoch": 0.2041553748870822, |
|
"grad_norm": 0.21252834514519925, |
|
"learning_rate": 4.690766700109659e-06, |
|
"loss": 0.6192, |
|
"step": 113 |
|
}, |
|
{ |
|
"epoch": 0.20596205962059622, |
|
"grad_norm": 0.2445316744832756, |
|
"learning_rate": 4.683520501542825e-06, |
|
"loss": 0.6021, |
|
"step": 114 |
|
}, |
|
{ |
|
"epoch": 0.20776874435411022, |
|
"grad_norm": 0.24106223998119997, |
|
"learning_rate": 4.676196115513876e-06, |
|
"loss": 0.6184, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 0.20957542908762422, |
|
"grad_norm": 0.22622127875647777, |
|
"learning_rate": 4.668793804294294e-06, |
|
"loss": 0.6128, |
|
"step": 116 |
|
}, |
|
{ |
|
"epoch": 0.21138211382113822, |
|
"grad_norm": 0.2438334449683093, |
|
"learning_rate": 4.661313832945904e-06, |
|
"loss": 0.6226, |
|
"step": 117 |
|
}, |
|
{ |
|
"epoch": 0.21318879855465223, |
|
"grad_norm": 0.22677844575581005, |
|
"learning_rate": 4.653756469311381e-06, |
|
"loss": 0.6103, |
|
"step": 118 |
|
}, |
|
{ |
|
"epoch": 0.21499548328816623, |
|
"grad_norm": 0.22925833245122096, |
|
"learning_rate": 4.646121984004666e-06, |
|
"loss": 0.6017, |
|
"step": 119 |
|
}, |
|
{ |
|
"epoch": 0.21680216802168023, |
|
"grad_norm": 0.23651197792785295, |
|
"learning_rate": 4.638410650401267e-06, |
|
"loss": 0.6192, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.2186088527551942, |
|
"grad_norm": 0.23284082311202126, |
|
"learning_rate": 4.630622744628478e-06, |
|
"loss": 0.6156, |
|
"step": 121 |
|
}, |
|
{ |
|
"epoch": 0.2204155374887082, |
|
"grad_norm": 0.22835537092235955, |
|
"learning_rate": 4.622758545555485e-06, |
|
"loss": 0.6202, |
|
"step": 122 |
|
}, |
|
{ |
|
"epoch": 0.2222222222222222, |
|
"grad_norm": 0.26232157389806815, |
|
"learning_rate": 4.614818334783384e-06, |
|
"loss": 0.6118, |
|
"step": 123 |
|
}, |
|
{ |
|
"epoch": 0.2240289069557362, |
|
"grad_norm": 1.0190897748265104, |
|
"learning_rate": 4.606802396635098e-06, |
|
"loss": 0.5974, |
|
"step": 124 |
|
}, |
|
{ |
|
"epoch": 0.22583559168925021, |
|
"grad_norm": 0.22367646253830858, |
|
"learning_rate": 4.598711018145193e-06, |
|
"loss": 0.6034, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 0.22764227642276422, |
|
"grad_norm": 0.30477162914891215, |
|
"learning_rate": 4.590544489049602e-06, |
|
"loss": 0.6166, |
|
"step": 126 |
|
}, |
|
{ |
|
"epoch": 0.22944896115627822, |
|
"grad_norm": 0.24111073356626345, |
|
"learning_rate": 4.582303101775249e-06, |
|
"loss": 0.608, |
|
"step": 127 |
|
}, |
|
{ |
|
"epoch": 0.23125564588979222, |
|
"grad_norm": 0.22993786809415329, |
|
"learning_rate": 4.573987151429579e-06, |
|
"loss": 0.6075, |
|
"step": 128 |
|
}, |
|
{ |
|
"epoch": 0.23306233062330622, |
|
"grad_norm": 0.24166169868220508, |
|
"learning_rate": 4.565596935789987e-06, |
|
"loss": 0.6032, |
|
"step": 129 |
|
}, |
|
{ |
|
"epoch": 0.23486901535682023, |
|
"grad_norm": 0.2817874670342333, |
|
"learning_rate": 4.557132755293164e-06, |
|
"loss": 0.622, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.23667570009033423, |
|
"grad_norm": 0.24069107260810058, |
|
"learning_rate": 4.54859491302433e-06, |
|
"loss": 0.6154, |
|
"step": 131 |
|
}, |
|
{ |
|
"epoch": 0.23848238482384823, |
|
"grad_norm": 0.22882671727182136, |
|
"learning_rate": 4.539983714706383e-06, |
|
"loss": 0.5943, |
|
"step": 132 |
|
}, |
|
{ |
|
"epoch": 0.24028906955736223, |
|
"grad_norm": 0.22659848940942331, |
|
"learning_rate": 4.531299468688956e-06, |
|
"loss": 0.61, |
|
"step": 133 |
|
}, |
|
{ |
|
"epoch": 0.24209575429087624, |
|
"grad_norm": 0.22507834440544988, |
|
"learning_rate": 4.522542485937369e-06, |
|
"loss": 0.6105, |
|
"step": 134 |
|
}, |
|
{ |
|
"epoch": 0.24390243902439024, |
|
"grad_norm": 0.2337177257871415, |
|
"learning_rate": 4.5137130800215025e-06, |
|
"loss": 0.6068, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 0.24570912375790424, |
|
"grad_norm": 0.22156146362385887, |
|
"learning_rate": 4.50481156710456e-06, |
|
"loss": 0.6115, |
|
"step": 136 |
|
}, |
|
{ |
|
"epoch": 0.24751580849141824, |
|
"grad_norm": 0.23698883199253348, |
|
"learning_rate": 4.495838265931754e-06, |
|
"loss": 0.5951, |
|
"step": 137 |
|
}, |
|
{ |
|
"epoch": 0.24932249322493225, |
|
"grad_norm": 0.2229333193124451, |
|
"learning_rate": 4.486793497818889e-06, |
|
"loss": 0.612, |
|
"step": 138 |
|
}, |
|
{ |
|
"epoch": 0.25112917795844625, |
|
"grad_norm": 0.22777084728583424, |
|
"learning_rate": 4.477677586640854e-06, |
|
"loss": 0.6156, |
|
"step": 139 |
|
}, |
|
{ |
|
"epoch": 0.2529358626919603, |
|
"grad_norm": 0.29038768857745073, |
|
"learning_rate": 4.4684908588200305e-06, |
|
"loss": 0.6157, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.25474254742547425, |
|
"grad_norm": 0.2473807343598257, |
|
"learning_rate": 4.4592336433146e-06, |
|
"loss": 0.5953, |
|
"step": 141 |
|
}, |
|
{ |
|
"epoch": 0.2565492321589883, |
|
"grad_norm": 0.21649386979218563, |
|
"learning_rate": 4.449906271606766e-06, |
|
"loss": 0.6159, |
|
"step": 142 |
|
}, |
|
{ |
|
"epoch": 0.25835591689250226, |
|
"grad_norm": 0.24042251758250033, |
|
"learning_rate": 4.440509077690883e-06, |
|
"loss": 0.6069, |
|
"step": 143 |
|
}, |
|
{ |
|
"epoch": 0.2601626016260163, |
|
"grad_norm": 0.36770971559928356, |
|
"learning_rate": 4.431042398061499e-06, |
|
"loss": 0.5967, |
|
"step": 144 |
|
}, |
|
{ |
|
"epoch": 0.26196928635953026, |
|
"grad_norm": 0.20875517006997382, |
|
"learning_rate": 4.421506571701305e-06, |
|
"loss": 0.6091, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 0.26377597109304424, |
|
"grad_norm": 0.34540212409106824, |
|
"learning_rate": 4.411901940068997e-06, |
|
"loss": 0.6111, |
|
"step": 146 |
|
}, |
|
{ |
|
"epoch": 0.26558265582655827, |
|
"grad_norm": 0.2313988977179789, |
|
"learning_rate": 4.402228847087046e-06, |
|
"loss": 0.6036, |
|
"step": 147 |
|
}, |
|
{ |
|
"epoch": 0.26738934056007224, |
|
"grad_norm": 0.2566389864906536, |
|
"learning_rate": 4.3924876391293915e-06, |
|
"loss": 0.6049, |
|
"step": 148 |
|
}, |
|
{ |
|
"epoch": 0.26919602529358627, |
|
"grad_norm": 0.22910952216670477, |
|
"learning_rate": 4.382678665009028e-06, |
|
"loss": 0.6016, |
|
"step": 149 |
|
}, |
|
{ |
|
"epoch": 0.27100271002710025, |
|
"grad_norm": 0.229201881251706, |
|
"learning_rate": 4.372802275965521e-06, |
|
"loss": 0.6102, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.2728093947606143, |
|
"grad_norm": 0.2689283377981859, |
|
"learning_rate": 4.362858825652428e-06, |
|
"loss": 0.595, |
|
"step": 151 |
|
}, |
|
{ |
|
"epoch": 0.27461607949412825, |
|
"grad_norm": 0.2348399745355362, |
|
"learning_rate": 4.352848670124637e-06, |
|
"loss": 0.5995, |
|
"step": 152 |
|
}, |
|
{ |
|
"epoch": 0.2764227642276423, |
|
"grad_norm": 0.23839869995063648, |
|
"learning_rate": 4.342772167825613e-06, |
|
"loss": 0.6009, |
|
"step": 153 |
|
}, |
|
{ |
|
"epoch": 0.27822944896115626, |
|
"grad_norm": 0.22612816420838172, |
|
"learning_rate": 4.332629679574566e-06, |
|
"loss": 0.6063, |
|
"step": 154 |
|
}, |
|
{ |
|
"epoch": 0.2800361336946703, |
|
"grad_norm": 0.2385731884325847, |
|
"learning_rate": 4.322421568553529e-06, |
|
"loss": 0.5963, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 0.28184281842818426, |
|
"grad_norm": 0.2069980149679815, |
|
"learning_rate": 4.312148200294355e-06, |
|
"loss": 0.598, |
|
"step": 156 |
|
}, |
|
{ |
|
"epoch": 0.2836495031616983, |
|
"grad_norm": 0.2443362265256338, |
|
"learning_rate": 4.3018099426656255e-06, |
|
"loss": 0.6106, |
|
"step": 157 |
|
}, |
|
{ |
|
"epoch": 0.28545618789521227, |
|
"grad_norm": 0.22125798424540696, |
|
"learning_rate": 4.291407165859481e-06, |
|
"loss": 0.6073, |
|
"step": 158 |
|
}, |
|
{ |
|
"epoch": 0.2872628726287263, |
|
"grad_norm": 0.23484255914212887, |
|
"learning_rate": 4.280940242378363e-06, |
|
"loss": 0.5893, |
|
"step": 159 |
|
}, |
|
{ |
|
"epoch": 0.28906955736224027, |
|
"grad_norm": 0.2378182470960287, |
|
"learning_rate": 4.2704095470216745e-06, |
|
"loss": 0.5983, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.2908762420957543, |
|
"grad_norm": 0.22837213595253308, |
|
"learning_rate": 4.259815456872363e-06, |
|
"loss": 0.604, |
|
"step": 161 |
|
}, |
|
{ |
|
"epoch": 0.2926829268292683, |
|
"grad_norm": 0.22929353301353925, |
|
"learning_rate": 4.249158351283414e-06, |
|
"loss": 0.5904, |
|
"step": 162 |
|
}, |
|
{ |
|
"epoch": 0.2944896115627823, |
|
"grad_norm": 0.23217030236809272, |
|
"learning_rate": 4.2384386118642696e-06, |
|
"loss": 0.6007, |
|
"step": 163 |
|
}, |
|
{ |
|
"epoch": 0.2962962962962963, |
|
"grad_norm": 0.23822375648362773, |
|
"learning_rate": 4.227656622467162e-06, |
|
"loss": 0.609, |
|
"step": 164 |
|
}, |
|
{ |
|
"epoch": 0.2981029810298103, |
|
"grad_norm": 0.24124088260796128, |
|
"learning_rate": 4.216812769173371e-06, |
|
"loss": 0.6008, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 0.2999096657633243, |
|
"grad_norm": 0.22153075465196947, |
|
"learning_rate": 4.205907440279395e-06, |
|
"loss": 0.607, |
|
"step": 166 |
|
}, |
|
{ |
|
"epoch": 0.3017163504968383, |
|
"grad_norm": 0.21711862309864874, |
|
"learning_rate": 4.194941026283053e-06, |
|
"loss": 0.6063, |
|
"step": 167 |
|
}, |
|
{ |
|
"epoch": 0.3035230352303523, |
|
"grad_norm": 0.22432330344213172, |
|
"learning_rate": 4.183913919869495e-06, |
|
"loss": 0.6039, |
|
"step": 168 |
|
}, |
|
{ |
|
"epoch": 0.3053297199638663, |
|
"grad_norm": 0.21572998238848462, |
|
"learning_rate": 4.172826515897146e-06, |
|
"loss": 0.5969, |
|
"step": 169 |
|
}, |
|
{ |
|
"epoch": 0.3071364046973803, |
|
"grad_norm": 0.22067551805641855, |
|
"learning_rate": 4.161679211383565e-06, |
|
"loss": 0.6116, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.3089430894308943, |
|
"grad_norm": 0.22703585317519762, |
|
"learning_rate": 4.150472405491226e-06, |
|
"loss": 0.5937, |
|
"step": 171 |
|
}, |
|
{ |
|
"epoch": 0.3107497741644083, |
|
"grad_norm": 0.22250449959205287, |
|
"learning_rate": 4.139206499513231e-06, |
|
"loss": 0.6001, |
|
"step": 172 |
|
}, |
|
{ |
|
"epoch": 0.31255645889792233, |
|
"grad_norm": 0.20802156234358865, |
|
"learning_rate": 4.127881896858934e-06, |
|
"loss": 0.6037, |
|
"step": 173 |
|
}, |
|
{ |
|
"epoch": 0.3143631436314363, |
|
"grad_norm": 0.22038948428329844, |
|
"learning_rate": 4.116499003039499e-06, |
|
"loss": 0.5948, |
|
"step": 174 |
|
}, |
|
{ |
|
"epoch": 0.31616982836495033, |
|
"grad_norm": 0.23649589313584202, |
|
"learning_rate": 4.105058225653381e-06, |
|
"loss": 0.5999, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 0.3179765130984643, |
|
"grad_norm": 0.2274306926862935, |
|
"learning_rate": 4.093559974371725e-06, |
|
"loss": 0.6134, |
|
"step": 176 |
|
}, |
|
{ |
|
"epoch": 0.31978319783197834, |
|
"grad_norm": 0.22419208649241737, |
|
"learning_rate": 4.0820046609237026e-06, |
|
"loss": 0.5881, |
|
"step": 177 |
|
}, |
|
{ |
|
"epoch": 0.3215898825654923, |
|
"grad_norm": 0.21680164152050496, |
|
"learning_rate": 4.070392699081767e-06, |
|
"loss": 0.5947, |
|
"step": 178 |
|
}, |
|
{ |
|
"epoch": 0.32339656729900634, |
|
"grad_norm": 0.2223339254360786, |
|
"learning_rate": 4.058724504646834e-06, |
|
"loss": 0.6055, |
|
"step": 179 |
|
}, |
|
{ |
|
"epoch": 0.3252032520325203, |
|
"grad_norm": 0.23331107157838193, |
|
"learning_rate": 4.047000495433397e-06, |
|
"loss": 0.6134, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.32700993676603435, |
|
"grad_norm": 0.21996861718872435, |
|
"learning_rate": 4.035221091254563e-06, |
|
"loss": 0.5902, |
|
"step": 181 |
|
}, |
|
{ |
|
"epoch": 0.3288166214995483, |
|
"grad_norm": 0.24428874512652157, |
|
"learning_rate": 4.023386713907021e-06, |
|
"loss": 0.6003, |
|
"step": 182 |
|
}, |
|
{ |
|
"epoch": 0.33062330623306235, |
|
"grad_norm": 0.2305392278811606, |
|
"learning_rate": 4.011497787155938e-06, |
|
"loss": 0.6123, |
|
"step": 183 |
|
}, |
|
{ |
|
"epoch": 0.3324299909665763, |
|
"grad_norm": 0.2561643139152072, |
|
"learning_rate": 3.999554736719785e-06, |
|
"loss": 0.6095, |
|
"step": 184 |
|
}, |
|
{ |
|
"epoch": 0.33423667570009036, |
|
"grad_norm": 0.2358012826148262, |
|
"learning_rate": 3.987557990255093e-06, |
|
"loss": 0.6053, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 0.33604336043360433, |
|
"grad_norm": 0.24287341826766704, |
|
"learning_rate": 3.975507977341141e-06, |
|
"loss": 0.601, |
|
"step": 186 |
|
}, |
|
{ |
|
"epoch": 0.33785004516711836, |
|
"grad_norm": 0.20879030526781392, |
|
"learning_rate": 3.963405129464569e-06, |
|
"loss": 0.5983, |
|
"step": 187 |
|
}, |
|
{ |
|
"epoch": 0.33965672990063234, |
|
"grad_norm": 0.28353592150903156, |
|
"learning_rate": 3.951249880003934e-06, |
|
"loss": 0.5974, |
|
"step": 188 |
|
}, |
|
{ |
|
"epoch": 0.34146341463414637, |
|
"grad_norm": 0.2214488122199161, |
|
"learning_rate": 3.939042664214185e-06, |
|
"loss": 0.6053, |
|
"step": 189 |
|
}, |
|
{ |
|
"epoch": 0.34327009936766034, |
|
"grad_norm": 0.23803809202081058, |
|
"learning_rate": 3.92678391921108e-06, |
|
"loss": 0.5937, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.34507678410117437, |
|
"grad_norm": 0.22684507950838628, |
|
"learning_rate": 3.914474083955537e-06, |
|
"loss": 0.592, |
|
"step": 191 |
|
}, |
|
{ |
|
"epoch": 0.34688346883468835, |
|
"grad_norm": 0.20681090678985584, |
|
"learning_rate": 3.902113599237911e-06, |
|
"loss": 0.5986, |
|
"step": 192 |
|
}, |
|
{ |
|
"epoch": 0.3486901535682023, |
|
"grad_norm": 0.19866617565983444, |
|
"learning_rate": 3.889702907662212e-06, |
|
"loss": 0.6008, |
|
"step": 193 |
|
}, |
|
{ |
|
"epoch": 0.35049683830171635, |
|
"grad_norm": 0.2302303968413167, |
|
"learning_rate": 3.8772424536302565e-06, |
|
"loss": 0.602, |
|
"step": 194 |
|
}, |
|
{ |
|
"epoch": 0.3523035230352303, |
|
"grad_norm": 0.2363831434168681, |
|
"learning_rate": 3.864732683325755e-06, |
|
"loss": 0.5857, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 0.35411020776874436, |
|
"grad_norm": 0.23853047160053847, |
|
"learning_rate": 3.852174044698333e-06, |
|
"loss": 0.5988, |
|
"step": 196 |
|
}, |
|
{ |
|
"epoch": 0.35591689250225833, |
|
"grad_norm": 0.22998735263346828, |
|
"learning_rate": 3.839566987447492e-06, |
|
"loss": 0.598, |
|
"step": 197 |
|
}, |
|
{ |
|
"epoch": 0.35772357723577236, |
|
"grad_norm": 0.23941067077880493, |
|
"learning_rate": 3.826911963006508e-06, |
|
"loss": 0.5752, |
|
"step": 198 |
|
}, |
|
{ |
|
"epoch": 0.35953026196928634, |
|
"grad_norm": 0.2284898801622664, |
|
"learning_rate": 3.8142094245262617e-06, |
|
"loss": 0.5978, |
|
"step": 199 |
|
}, |
|
{ |
|
"epoch": 0.36133694670280037, |
|
"grad_norm": 0.2310316432666973, |
|
"learning_rate": 3.801459826859022e-06, |
|
"loss": 0.5876, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.36314363143631434, |
|
"grad_norm": 0.22874021440150738, |
|
"learning_rate": 3.788663626542146e-06, |
|
"loss": 0.5822, |
|
"step": 201 |
|
}, |
|
{ |
|
"epoch": 0.36495031616982837, |
|
"grad_norm": 0.23754593387894263, |
|
"learning_rate": 3.7758212817817406e-06, |
|
"loss": 0.6092, |
|
"step": 202 |
|
}, |
|
{ |
|
"epoch": 0.36675700090334235, |
|
"grad_norm": 0.23555267453276538, |
|
"learning_rate": 3.7629332524362532e-06, |
|
"loss": 0.5961, |
|
"step": 203 |
|
}, |
|
{ |
|
"epoch": 0.3685636856368564, |
|
"grad_norm": 0.263996613196296, |
|
"learning_rate": 3.7500000000000005e-06, |
|
"loss": 0.6088, |
|
"step": 204 |
|
}, |
|
{ |
|
"epoch": 0.37037037037037035, |
|
"grad_norm": 0.2220695749412859, |
|
"learning_rate": 3.7370219875866497e-06, |
|
"loss": 0.5985, |
|
"step": 205 |
|
}, |
|
{ |
|
"epoch": 0.3721770551038844, |
|
"grad_norm": 0.22266417943488925, |
|
"learning_rate": 3.7239996799126315e-06, |
|
"loss": 0.5863, |
|
"step": 206 |
|
}, |
|
{ |
|
"epoch": 0.37398373983739835, |
|
"grad_norm": 0.22936855717559157, |
|
"learning_rate": 3.7109335432805006e-06, |
|
"loss": 0.5944, |
|
"step": 207 |
|
}, |
|
{ |
|
"epoch": 0.3757904245709124, |
|
"grad_norm": 0.22850775987856617, |
|
"learning_rate": 3.697824045562238e-06, |
|
"loss": 0.5806, |
|
"step": 208 |
|
}, |
|
{ |
|
"epoch": 0.37759710930442636, |
|
"grad_norm": 0.24701920957996232, |
|
"learning_rate": 3.684671656182497e-06, |
|
"loss": 0.6027, |
|
"step": 209 |
|
}, |
|
{ |
|
"epoch": 0.3794037940379404, |
|
"grad_norm": 0.21373080476647208, |
|
"learning_rate": 3.671476846101797e-06, |
|
"loss": 0.5974, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.38121047877145436, |
|
"grad_norm": 0.22435107184170758, |
|
"learning_rate": 3.658240087799655e-06, |
|
"loss": 0.5996, |
|
"step": 211 |
|
}, |
|
{ |
|
"epoch": 0.3830171635049684, |
|
"grad_norm": 0.22948717204733204, |
|
"learning_rate": 3.644961855257669e-06, |
|
"loss": 0.608, |
|
"step": 212 |
|
}, |
|
{ |
|
"epoch": 0.38482384823848237, |
|
"grad_norm": 0.24067858739835768, |
|
"learning_rate": 3.6316426239425484e-06, |
|
"loss": 0.6062, |
|
"step": 213 |
|
}, |
|
{ |
|
"epoch": 0.3866305329719964, |
|
"grad_norm": 0.24899724125737957, |
|
"learning_rate": 3.6182828707890816e-06, |
|
"loss": 0.5924, |
|
"step": 214 |
|
}, |
|
{ |
|
"epoch": 0.3884372177055104, |
|
"grad_norm": 0.2355151032011817, |
|
"learning_rate": 3.6048830741830678e-06, |
|
"loss": 0.6195, |
|
"step": 215 |
|
}, |
|
{ |
|
"epoch": 0.3902439024390244, |
|
"grad_norm": 0.23883341592646898, |
|
"learning_rate": 3.5914437139441754e-06, |
|
"loss": 0.6082, |
|
"step": 216 |
|
}, |
|
{ |
|
"epoch": 0.3920505871725384, |
|
"grad_norm": 0.23501581194194024, |
|
"learning_rate": 3.5779652713087717e-06, |
|
"loss": 0.6082, |
|
"step": 217 |
|
}, |
|
{ |
|
"epoch": 0.3938572719060524, |
|
"grad_norm": 0.2569106741294839, |
|
"learning_rate": 3.564448228912682e-06, |
|
"loss": 0.5999, |
|
"step": 218 |
|
}, |
|
{ |
|
"epoch": 0.3956639566395664, |
|
"grad_norm": 0.23451645141530233, |
|
"learning_rate": 3.5508930707739143e-06, |
|
"loss": 0.5942, |
|
"step": 219 |
|
}, |
|
{ |
|
"epoch": 0.3974706413730804, |
|
"grad_norm": 0.25332615378033807, |
|
"learning_rate": 3.5373002822753217e-06, |
|
"loss": 0.5984, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.3992773261065944, |
|
"grad_norm": 0.21573314905985583, |
|
"learning_rate": 3.523670350147227e-06, |
|
"loss": 0.586, |
|
"step": 221 |
|
}, |
|
{ |
|
"epoch": 0.4010840108401084, |
|
"grad_norm": 0.22402654512077783, |
|
"learning_rate": 3.510003762449988e-06, |
|
"loss": 0.6031, |
|
"step": 222 |
|
}, |
|
{ |
|
"epoch": 0.4028906955736224, |
|
"grad_norm": 0.23430699042260314, |
|
"learning_rate": 3.496301008556529e-06, |
|
"loss": 0.5861, |
|
"step": 223 |
|
}, |
|
{ |
|
"epoch": 0.4046973803071364, |
|
"grad_norm": 0.2112766887800661, |
|
"learning_rate": 3.4825625791348093e-06, |
|
"loss": 0.5917, |
|
"step": 224 |
|
}, |
|
{ |
|
"epoch": 0.4065040650406504, |
|
"grad_norm": 0.2594229676066792, |
|
"learning_rate": 3.4687889661302577e-06, |
|
"loss": 0.5951, |
|
"step": 225 |
|
}, |
|
{ |
|
"epoch": 0.4083107497741644, |
|
"grad_norm": 0.21977632231714242, |
|
"learning_rate": 3.454980662748156e-06, |
|
"loss": 0.6022, |
|
"step": 226 |
|
}, |
|
{ |
|
"epoch": 0.4101174345076784, |
|
"grad_norm": 0.24331427531174873, |
|
"learning_rate": 3.44113816343598e-06, |
|
"loss": 0.6074, |
|
"step": 227 |
|
}, |
|
{ |
|
"epoch": 0.41192411924119243, |
|
"grad_norm": 0.2420730990351626, |
|
"learning_rate": 3.4272619638656914e-06, |
|
"loss": 0.5938, |
|
"step": 228 |
|
}, |
|
{ |
|
"epoch": 0.4137308039747064, |
|
"grad_norm": 0.2113848379255305, |
|
"learning_rate": 3.4133525609159883e-06, |
|
"loss": 0.5953, |
|
"step": 229 |
|
}, |
|
{ |
|
"epoch": 0.41553748870822044, |
|
"grad_norm": 0.2278511980807453, |
|
"learning_rate": 3.399410452654518e-06, |
|
"loss": 0.5976, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 0.4173441734417344, |
|
"grad_norm": 0.22656717412493943, |
|
"learning_rate": 3.3854361383200372e-06, |
|
"loss": 0.5981, |
|
"step": 231 |
|
}, |
|
{ |
|
"epoch": 0.41915085817524844, |
|
"grad_norm": 0.21257157836910553, |
|
"learning_rate": 3.3714301183045382e-06, |
|
"loss": 0.6008, |
|
"step": 232 |
|
}, |
|
{ |
|
"epoch": 0.4209575429087624, |
|
"grad_norm": 0.225673129949644, |
|
"learning_rate": 3.357392894135329e-06, |
|
"loss": 0.5818, |
|
"step": 233 |
|
}, |
|
{ |
|
"epoch": 0.42276422764227645, |
|
"grad_norm": 0.4682554631351164, |
|
"learning_rate": 3.3433249684570757e-06, |
|
"loss": 0.5984, |
|
"step": 234 |
|
}, |
|
{ |
|
"epoch": 0.4245709123757904, |
|
"grad_norm": 0.21876994996840093, |
|
"learning_rate": 3.329226845013802e-06, |
|
"loss": 0.5929, |
|
"step": 235 |
|
}, |
|
{ |
|
"epoch": 0.42637759710930445, |
|
"grad_norm": 0.21396549715257288, |
|
"learning_rate": 3.315099028630855e-06, |
|
"loss": 0.605, |
|
"step": 236 |
|
}, |
|
{ |
|
"epoch": 0.4281842818428184, |
|
"grad_norm": 0.24546725294633087, |
|
"learning_rate": 3.3009420251968245e-06, |
|
"loss": 0.6027, |
|
"step": 237 |
|
}, |
|
{ |
|
"epoch": 0.42999096657633246, |
|
"grad_norm": 0.21189353474136885, |
|
"learning_rate": 3.28675634164543e-06, |
|
"loss": 0.6123, |
|
"step": 238 |
|
}, |
|
{ |
|
"epoch": 0.43179765130984643, |
|
"grad_norm": 0.23626902591793353, |
|
"learning_rate": 3.272542485937369e-06, |
|
"loss": 0.5943, |
|
"step": 239 |
|
}, |
|
{ |
|
"epoch": 0.43360433604336046, |
|
"grad_norm": 0.21589261558067036, |
|
"learning_rate": 3.258300967042125e-06, |
|
"loss": 0.5947, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.43541102077687444, |
|
"grad_norm": 0.24017253917658596, |
|
"learning_rate": 3.2440322949197467e-06, |
|
"loss": 0.5927, |
|
"step": 241 |
|
}, |
|
{ |
|
"epoch": 0.4372177055103884, |
|
"grad_norm": 0.21325298090224806, |
|
"learning_rate": 3.229736980502584e-06, |
|
"loss": 0.5935, |
|
"step": 242 |
|
}, |
|
{ |
|
"epoch": 0.43902439024390244, |
|
"grad_norm": 0.23117870895524098, |
|
"learning_rate": 3.2154155356769922e-06, |
|
"loss": 0.5974, |
|
"step": 243 |
|
}, |
|
{ |
|
"epoch": 0.4408310749774164, |
|
"grad_norm": 0.2206632860171201, |
|
"learning_rate": 3.201068473265007e-06, |
|
"loss": 0.6029, |
|
"step": 244 |
|
}, |
|
{ |
|
"epoch": 0.44263775971093045, |
|
"grad_norm": 0.22021715956332263, |
|
"learning_rate": 3.186696307005976e-06, |
|
"loss": 0.5962, |
|
"step": 245 |
|
}, |
|
{ |
|
"epoch": 0.4444444444444444, |
|
"grad_norm": 0.23476509271910456, |
|
"learning_rate": 3.1722995515381644e-06, |
|
"loss": 0.6027, |
|
"step": 246 |
|
}, |
|
{ |
|
"epoch": 0.44625112917795845, |
|
"grad_norm": 0.22784064428785847, |
|
"learning_rate": 3.1578787223803296e-06, |
|
"loss": 0.5855, |
|
"step": 247 |
|
}, |
|
{ |
|
"epoch": 0.4480578139114724, |
|
"grad_norm": 0.2235087678808535, |
|
"learning_rate": 3.1434343359132565e-06, |
|
"loss": 0.6036, |
|
"step": 248 |
|
}, |
|
{ |
|
"epoch": 0.44986449864498645, |
|
"grad_norm": 0.207464881051444, |
|
"learning_rate": 3.128966909361272e-06, |
|
"loss": 0.5866, |
|
"step": 249 |
|
}, |
|
{ |
|
"epoch": 0.45167118337850043, |
|
"grad_norm": 0.2174732745725859, |
|
"learning_rate": 3.1144769607737204e-06, |
|
"loss": 0.6011, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.45347786811201446, |
|
"grad_norm": 0.21953836324624784, |
|
"learning_rate": 3.099965009006415e-06, |
|
"loss": 0.5894, |
|
"step": 251 |
|
}, |
|
{ |
|
"epoch": 0.45528455284552843, |
|
"grad_norm": 0.21126348106595308, |
|
"learning_rate": 3.08543157370306e-06, |
|
"loss": 0.5707, |
|
"step": 252 |
|
}, |
|
{ |
|
"epoch": 0.45709123757904246, |
|
"grad_norm": 0.24650243361345475, |
|
"learning_rate": 3.0708771752766397e-06, |
|
"loss": 0.5935, |
|
"step": 253 |
|
}, |
|
{ |
|
"epoch": 0.45889792231255644, |
|
"grad_norm": 0.2249746208569381, |
|
"learning_rate": 3.056302334890786e-06, |
|
"loss": 0.5921, |
|
"step": 254 |
|
}, |
|
{ |
|
"epoch": 0.46070460704607047, |
|
"grad_norm": 0.21497114185767313, |
|
"learning_rate": 3.041707574441118e-06, |
|
"loss": 0.5862, |
|
"step": 255 |
|
}, |
|
{ |
|
"epoch": 0.46251129177958444, |
|
"grad_norm": 0.22138733157917687, |
|
"learning_rate": 3.027093416536548e-06, |
|
"loss": 0.5867, |
|
"step": 256 |
|
}, |
|
{ |
|
"epoch": 0.4643179765130985, |
|
"grad_norm": 0.22437622502926427, |
|
"learning_rate": 3.0124603844805767e-06, |
|
"loss": 0.5962, |
|
"step": 257 |
|
}, |
|
{ |
|
"epoch": 0.46612466124661245, |
|
"grad_norm": 0.2135165286596709, |
|
"learning_rate": 2.9978090022525456e-06, |
|
"loss": 0.5914, |
|
"step": 258 |
|
}, |
|
{ |
|
"epoch": 0.4679313459801265, |
|
"grad_norm": 0.2182517656574138, |
|
"learning_rate": 2.9831397944888833e-06, |
|
"loss": 0.5955, |
|
"step": 259 |
|
}, |
|
{ |
|
"epoch": 0.46973803071364045, |
|
"grad_norm": 0.24251723379092627, |
|
"learning_rate": 2.9684532864643123e-06, |
|
"loss": 0.6096, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 0.4715447154471545, |
|
"grad_norm": 0.30798107619678294, |
|
"learning_rate": 2.953750004073041e-06, |
|
"loss": 0.6009, |
|
"step": 261 |
|
}, |
|
{ |
|
"epoch": 0.47335140018066846, |
|
"grad_norm": 0.2036561256279162, |
|
"learning_rate": 2.9390304738099385e-06, |
|
"loss": 0.5895, |
|
"step": 262 |
|
}, |
|
{ |
|
"epoch": 0.4751580849141825, |
|
"grad_norm": 0.20262069143669137, |
|
"learning_rate": 2.9242952227516726e-06, |
|
"loss": 0.5931, |
|
"step": 263 |
|
}, |
|
{ |
|
"epoch": 0.47696476964769646, |
|
"grad_norm": 0.22115029057191055, |
|
"learning_rate": 2.9095447785378446e-06, |
|
"loss": 0.6007, |
|
"step": 264 |
|
}, |
|
{ |
|
"epoch": 0.4787714543812105, |
|
"grad_norm": 0.20864586645540997, |
|
"learning_rate": 2.89477966935209e-06, |
|
"loss": 0.581, |
|
"step": 265 |
|
}, |
|
{ |
|
"epoch": 0.48057813911472447, |
|
"grad_norm": 0.21448437622422867, |
|
"learning_rate": 2.8800004239031687e-06, |
|
"loss": 0.6053, |
|
"step": 266 |
|
}, |
|
{ |
|
"epoch": 0.4823848238482385, |
|
"grad_norm": 0.22058650439308047, |
|
"learning_rate": 2.8652075714060296e-06, |
|
"loss": 0.5895, |
|
"step": 267 |
|
}, |
|
{ |
|
"epoch": 0.48419150858175247, |
|
"grad_norm": 0.22504440683675958, |
|
"learning_rate": 2.850401641562865e-06, |
|
"loss": 0.5901, |
|
"step": 268 |
|
}, |
|
{ |
|
"epoch": 0.4859981933152665, |
|
"grad_norm": 0.2143472148561155, |
|
"learning_rate": 2.835583164544139e-06, |
|
"loss": 0.5957, |
|
"step": 269 |
|
}, |
|
{ |
|
"epoch": 0.4878048780487805, |
|
"grad_norm": 0.21751502930550626, |
|
"learning_rate": 2.820752670969606e-06, |
|
"loss": 0.5786, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 0.4896115627822945, |
|
"grad_norm": 0.21691622039430525, |
|
"learning_rate": 2.805910691889307e-06, |
|
"loss": 0.5997, |
|
"step": 271 |
|
}, |
|
{ |
|
"epoch": 0.4914182475158085, |
|
"grad_norm": 0.2219827422506738, |
|
"learning_rate": 2.791057758764557e-06, |
|
"loss": 0.5942, |
|
"step": 272 |
|
}, |
|
{ |
|
"epoch": 0.4932249322493225, |
|
"grad_norm": 0.22182577176913804, |
|
"learning_rate": 2.776194403448915e-06, |
|
"loss": 0.6024, |
|
"step": 273 |
|
}, |
|
{ |
|
"epoch": 0.4950316169828365, |
|
"grad_norm": 0.23456424438625753, |
|
"learning_rate": 2.761321158169134e-06, |
|
"loss": 0.5891, |
|
"step": 274 |
|
}, |
|
{ |
|
"epoch": 0.4968383017163505, |
|
"grad_norm": 0.20662902731079838, |
|
"learning_rate": 2.7464385555061092e-06, |
|
"loss": 0.5843, |
|
"step": 275 |
|
}, |
|
{ |
|
"epoch": 0.4986449864498645, |
|
"grad_norm": 0.2042688233772306, |
|
"learning_rate": 2.731547128375804e-06, |
|
"loss": 0.5765, |
|
"step": 276 |
|
}, |
|
{ |
|
"epoch": 0.5004516711833785, |
|
"grad_norm": 0.2010718475151129, |
|
"learning_rate": 2.7166474100101676e-06, |
|
"loss": 0.5889, |
|
"step": 277 |
|
}, |
|
{ |
|
"epoch": 0.5022583559168925, |
|
"grad_norm": 0.21780066674743287, |
|
"learning_rate": 2.7017399339380435e-06, |
|
"loss": 0.5753, |
|
"step": 278 |
|
}, |
|
{ |
|
"epoch": 0.5040650406504065, |
|
"grad_norm": 0.23094036536640075, |
|
"learning_rate": 2.686825233966061e-06, |
|
"loss": 0.5901, |
|
"step": 279 |
|
}, |
|
{ |
|
"epoch": 0.5058717253839206, |
|
"grad_norm": 0.22149977838891852, |
|
"learning_rate": 2.6719038441595236e-06, |
|
"loss": 0.5863, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 0.5076784101174345, |
|
"grad_norm": 0.23588829860771496, |
|
"learning_rate": 2.6569762988232838e-06, |
|
"loss": 0.5874, |
|
"step": 281 |
|
}, |
|
{ |
|
"epoch": 0.5094850948509485, |
|
"grad_norm": 0.20928084741622358, |
|
"learning_rate": 2.642043132482612e-06, |
|
"loss": 0.5806, |
|
"step": 282 |
|
}, |
|
{ |
|
"epoch": 0.5112917795844625, |
|
"grad_norm": 0.21032848792961903, |
|
"learning_rate": 2.6271048798640547e-06, |
|
"loss": 0.598, |
|
"step": 283 |
|
}, |
|
{ |
|
"epoch": 0.5130984643179766, |
|
"grad_norm": 0.21154254189290178, |
|
"learning_rate": 2.6121620758762877e-06, |
|
"loss": 0.5855, |
|
"step": 284 |
|
}, |
|
{ |
|
"epoch": 0.5149051490514905, |
|
"grad_norm": 0.19044363575025977, |
|
"learning_rate": 2.5972152555909625e-06, |
|
"loss": 0.578, |
|
"step": 285 |
|
}, |
|
{ |
|
"epoch": 0.5167118337850045, |
|
"grad_norm": 0.2164720308872652, |
|
"learning_rate": 2.5822649542235468e-06, |
|
"loss": 0.5785, |
|
"step": 286 |
|
}, |
|
{ |
|
"epoch": 0.5185185185185185, |
|
"grad_norm": 0.2267479938328202, |
|
"learning_rate": 2.5673117071141574e-06, |
|
"loss": 0.6006, |
|
"step": 287 |
|
}, |
|
{ |
|
"epoch": 0.5203252032520326, |
|
"grad_norm": 0.2264960086832089, |
|
"learning_rate": 2.5523560497083927e-06, |
|
"loss": 0.5964, |
|
"step": 288 |
|
}, |
|
{ |
|
"epoch": 0.5221318879855466, |
|
"grad_norm": 0.1985933300834711, |
|
"learning_rate": 2.5373985175381595e-06, |
|
"loss": 0.5806, |
|
"step": 289 |
|
}, |
|
{ |
|
"epoch": 0.5239385727190605, |
|
"grad_norm": 0.21800556164349963, |
|
"learning_rate": 2.522439646202495e-06, |
|
"loss": 0.5927, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 0.5257452574525745, |
|
"grad_norm": 0.2307111155167328, |
|
"learning_rate": 2.507479971348391e-06, |
|
"loss": 0.5973, |
|
"step": 291 |
|
}, |
|
{ |
|
"epoch": 0.5275519421860885, |
|
"grad_norm": 0.19676562110774456, |
|
"learning_rate": 2.49252002865161e-06, |
|
"loss": 0.5999, |
|
"step": 292 |
|
}, |
|
{ |
|
"epoch": 0.5293586269196026, |
|
"grad_norm": 0.20847492332742104, |
|
"learning_rate": 2.4775603537975055e-06, |
|
"loss": 0.5943, |
|
"step": 293 |
|
}, |
|
{ |
|
"epoch": 0.5311653116531165, |
|
"grad_norm": 0.21153503973865617, |
|
"learning_rate": 2.4626014824618418e-06, |
|
"loss": 0.5853, |
|
"step": 294 |
|
}, |
|
{ |
|
"epoch": 0.5329719963866305, |
|
"grad_norm": 0.2031008224582669, |
|
"learning_rate": 2.447643950291608e-06, |
|
"loss": 0.5842, |
|
"step": 295 |
|
}, |
|
{ |
|
"epoch": 0.5347786811201445, |
|
"grad_norm": 0.21983351954706135, |
|
"learning_rate": 2.4326882928858435e-06, |
|
"loss": 0.5869, |
|
"step": 296 |
|
}, |
|
{ |
|
"epoch": 0.5365853658536586, |
|
"grad_norm": 0.20906785660797234, |
|
"learning_rate": 2.417735045776453e-06, |
|
"loss": 0.5937, |
|
"step": 297 |
|
}, |
|
{ |
|
"epoch": 0.5383920505871725, |
|
"grad_norm": 0.20179687852224082, |
|
"learning_rate": 2.4027847444090384e-06, |
|
"loss": 0.5873, |
|
"step": 298 |
|
}, |
|
{ |
|
"epoch": 0.5401987353206865, |
|
"grad_norm": 0.21814673778220395, |
|
"learning_rate": 2.3878379241237136e-06, |
|
"loss": 0.604, |
|
"step": 299 |
|
}, |
|
{ |
|
"epoch": 0.5420054200542005, |
|
"grad_norm": 0.20643292380076875, |
|
"learning_rate": 2.372895120135946e-06, |
|
"loss": 0.5917, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.5438121047877146, |
|
"grad_norm": 0.2046393946281201, |
|
"learning_rate": 2.3579568675173894e-06, |
|
"loss": 0.5918, |
|
"step": 301 |
|
}, |
|
{ |
|
"epoch": 0.5456187895212286, |
|
"grad_norm": 0.19615967291155278, |
|
"learning_rate": 2.3430237011767166e-06, |
|
"loss": 0.6048, |
|
"step": 302 |
|
}, |
|
{ |
|
"epoch": 0.5474254742547425, |
|
"grad_norm": 0.2000764963063287, |
|
"learning_rate": 2.3280961558404773e-06, |
|
"loss": 0.5862, |
|
"step": 303 |
|
}, |
|
{ |
|
"epoch": 0.5492321589882565, |
|
"grad_norm": 0.20741775151038053, |
|
"learning_rate": 2.3131747660339396e-06, |
|
"loss": 0.5808, |
|
"step": 304 |
|
}, |
|
{ |
|
"epoch": 0.5510388437217706, |
|
"grad_norm": 0.22297483996840473, |
|
"learning_rate": 2.2982600660619574e-06, |
|
"loss": 0.5871, |
|
"step": 305 |
|
}, |
|
{ |
|
"epoch": 0.5528455284552846, |
|
"grad_norm": 0.22522123218884285, |
|
"learning_rate": 2.2833525899898324e-06, |
|
"loss": 0.5851, |
|
"step": 306 |
|
}, |
|
{ |
|
"epoch": 0.5546522131887985, |
|
"grad_norm": 0.21268387741990938, |
|
"learning_rate": 2.268452871624197e-06, |
|
"loss": 0.6034, |
|
"step": 307 |
|
}, |
|
{ |
|
"epoch": 0.5564588979223125, |
|
"grad_norm": 0.20654302956803075, |
|
"learning_rate": 2.253561444493891e-06, |
|
"loss": 0.5824, |
|
"step": 308 |
|
}, |
|
{ |
|
"epoch": 0.5582655826558266, |
|
"grad_norm": 0.21409459284336346, |
|
"learning_rate": 2.238678841830867e-06, |
|
"loss": 0.5917, |
|
"step": 309 |
|
}, |
|
{ |
|
"epoch": 0.5600722673893406, |
|
"grad_norm": 0.23678847045512513, |
|
"learning_rate": 2.2238055965510853e-06, |
|
"loss": 0.5753, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 0.5618789521228545, |
|
"grad_norm": 0.20888881522894184, |
|
"learning_rate": 2.2089422412354434e-06, |
|
"loss": 0.5927, |
|
"step": 311 |
|
}, |
|
{ |
|
"epoch": 0.5636856368563685, |
|
"grad_norm": 0.35491005892326005, |
|
"learning_rate": 2.1940893081106946e-06, |
|
"loss": 0.5922, |
|
"step": 312 |
|
}, |
|
{ |
|
"epoch": 0.5654923215898826, |
|
"grad_norm": 0.2359074382604886, |
|
"learning_rate": 2.179247329030395e-06, |
|
"loss": 0.5946, |
|
"step": 313 |
|
}, |
|
{ |
|
"epoch": 0.5672990063233966, |
|
"grad_norm": 0.2006465173455305, |
|
"learning_rate": 2.1644168354558623e-06, |
|
"loss": 0.5858, |
|
"step": 314 |
|
}, |
|
{ |
|
"epoch": 0.5691056910569106, |
|
"grad_norm": 0.1964655166756819, |
|
"learning_rate": 2.1495983584371354e-06, |
|
"loss": 0.5855, |
|
"step": 315 |
|
}, |
|
{ |
|
"epoch": 0.5709123757904245, |
|
"grad_norm": 0.25935669261529887, |
|
"learning_rate": 2.134792428593971e-06, |
|
"loss": 0.5832, |
|
"step": 316 |
|
}, |
|
{ |
|
"epoch": 0.5727190605239386, |
|
"grad_norm": 0.21166676225314787, |
|
"learning_rate": 2.119999576096832e-06, |
|
"loss": 0.5774, |
|
"step": 317 |
|
}, |
|
{ |
|
"epoch": 0.5745257452574526, |
|
"grad_norm": 0.21034466035167568, |
|
"learning_rate": 2.1052203306479108e-06, |
|
"loss": 0.5894, |
|
"step": 318 |
|
}, |
|
{ |
|
"epoch": 0.5763324299909666, |
|
"grad_norm": 0.21477089270590338, |
|
"learning_rate": 2.090455221462156e-06, |
|
"loss": 0.5995, |
|
"step": 319 |
|
}, |
|
{ |
|
"epoch": 0.5781391147244805, |
|
"grad_norm": 0.22654999181566404, |
|
"learning_rate": 2.0757047772483278e-06, |
|
"loss": 0.583, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 0.5799457994579946, |
|
"grad_norm": 0.210213799395079, |
|
"learning_rate": 2.0609695261900624e-06, |
|
"loss": 0.5786, |
|
"step": 321 |
|
}, |
|
{ |
|
"epoch": 0.5817524841915086, |
|
"grad_norm": 0.22316241515409585, |
|
"learning_rate": 2.0462499959269596e-06, |
|
"loss": 0.6005, |
|
"step": 322 |
|
}, |
|
{ |
|
"epoch": 0.5835591689250226, |
|
"grad_norm": 0.21470128847812983, |
|
"learning_rate": 2.031546713535688e-06, |
|
"loss": 0.5856, |
|
"step": 323 |
|
}, |
|
{ |
|
"epoch": 0.5853658536585366, |
|
"grad_norm": 0.19586563209961527, |
|
"learning_rate": 2.0168602055111175e-06, |
|
"loss": 0.5659, |
|
"step": 324 |
|
}, |
|
{ |
|
"epoch": 0.5871725383920506, |
|
"grad_norm": 0.20972410141424652, |
|
"learning_rate": 2.0021909977474553e-06, |
|
"loss": 0.5844, |
|
"step": 325 |
|
}, |
|
{ |
|
"epoch": 0.5889792231255646, |
|
"grad_norm": 0.20568687520267012, |
|
"learning_rate": 1.987539615519424e-06, |
|
"loss": 0.5928, |
|
"step": 326 |
|
}, |
|
{ |
|
"epoch": 0.5907859078590786, |
|
"grad_norm": 0.22858746561989765, |
|
"learning_rate": 1.9729065834634533e-06, |
|
"loss": 0.5809, |
|
"step": 327 |
|
}, |
|
{ |
|
"epoch": 0.5925925925925926, |
|
"grad_norm": 0.219066035309145, |
|
"learning_rate": 1.958292425558883e-06, |
|
"loss": 0.5864, |
|
"step": 328 |
|
}, |
|
{ |
|
"epoch": 0.5943992773261066, |
|
"grad_norm": 0.2226824752237514, |
|
"learning_rate": 1.9436976651092143e-06, |
|
"loss": 0.5777, |
|
"step": 329 |
|
}, |
|
{ |
|
"epoch": 0.5962059620596206, |
|
"grad_norm": 0.2034245216202459, |
|
"learning_rate": 1.9291228247233607e-06, |
|
"loss": 0.585, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 0.5980126467931346, |
|
"grad_norm": 0.21339109772232404, |
|
"learning_rate": 1.9145684262969404e-06, |
|
"loss": 0.5903, |
|
"step": 331 |
|
}, |
|
{ |
|
"epoch": 0.5998193315266486, |
|
"grad_norm": 0.1991853712111239, |
|
"learning_rate": 1.9000349909935852e-06, |
|
"loss": 0.5723, |
|
"step": 332 |
|
}, |
|
{ |
|
"epoch": 0.6016260162601627, |
|
"grad_norm": 0.2002553566062744, |
|
"learning_rate": 1.8855230392262809e-06, |
|
"loss": 0.5929, |
|
"step": 333 |
|
}, |
|
{ |
|
"epoch": 0.6034327009936766, |
|
"grad_norm": 0.20662439079944833, |
|
"learning_rate": 1.8710330906387288e-06, |
|
"loss": 0.6059, |
|
"step": 334 |
|
}, |
|
{ |
|
"epoch": 0.6052393857271906, |
|
"grad_norm": 0.21347474567951025, |
|
"learning_rate": 1.8565656640867448e-06, |
|
"loss": 0.5879, |
|
"step": 335 |
|
}, |
|
{ |
|
"epoch": 0.6070460704607046, |
|
"grad_norm": 0.2052816900866658, |
|
"learning_rate": 1.8421212776196712e-06, |
|
"loss": 0.5856, |
|
"step": 336 |
|
}, |
|
{ |
|
"epoch": 0.6088527551942186, |
|
"grad_norm": 0.20270795074379191, |
|
"learning_rate": 1.827700448461836e-06, |
|
"loss": 0.5864, |
|
"step": 337 |
|
}, |
|
{ |
|
"epoch": 0.6106594399277326, |
|
"grad_norm": 0.20702481129474415, |
|
"learning_rate": 1.813303692994025e-06, |
|
"loss": 0.5822, |
|
"step": 338 |
|
}, |
|
{ |
|
"epoch": 0.6124661246612466, |
|
"grad_norm": 0.20555781447822888, |
|
"learning_rate": 1.7989315267349936e-06, |
|
"loss": 0.591, |
|
"step": 339 |
|
}, |
|
{ |
|
"epoch": 0.6142728093947606, |
|
"grad_norm": 0.19378812967196427, |
|
"learning_rate": 1.7845844643230086e-06, |
|
"loss": 0.5866, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 0.6160794941282746, |
|
"grad_norm": 0.2018027260189549, |
|
"learning_rate": 1.770263019497417e-06, |
|
"loss": 0.5846, |
|
"step": 341 |
|
}, |
|
{ |
|
"epoch": 0.6178861788617886, |
|
"grad_norm": 0.2034365649959298, |
|
"learning_rate": 1.7559677050802543e-06, |
|
"loss": 0.5883, |
|
"step": 342 |
|
}, |
|
{ |
|
"epoch": 0.6196928635953026, |
|
"grad_norm": 0.2121729903043798, |
|
"learning_rate": 1.7416990329578753e-06, |
|
"loss": 0.5827, |
|
"step": 343 |
|
}, |
|
{ |
|
"epoch": 0.6214995483288166, |
|
"grad_norm": 0.1996950776064429, |
|
"learning_rate": 1.7274575140626318e-06, |
|
"loss": 0.5719, |
|
"step": 344 |
|
}, |
|
{ |
|
"epoch": 0.6233062330623306, |
|
"grad_norm": 0.21273393853952527, |
|
"learning_rate": 1.7132436583545703e-06, |
|
"loss": 0.5867, |
|
"step": 345 |
|
}, |
|
{ |
|
"epoch": 0.6251129177958447, |
|
"grad_norm": 0.21807600293104162, |
|
"learning_rate": 1.699057974803176e-06, |
|
"loss": 0.5883, |
|
"step": 346 |
|
}, |
|
{ |
|
"epoch": 0.6269196025293586, |
|
"grad_norm": 0.2645660891129371, |
|
"learning_rate": 1.6849009713691456e-06, |
|
"loss": 0.5732, |
|
"step": 347 |
|
}, |
|
{ |
|
"epoch": 0.6287262872628726, |
|
"grad_norm": 0.22205298104140844, |
|
"learning_rate": 1.670773154986199e-06, |
|
"loss": 0.5741, |
|
"step": 348 |
|
}, |
|
{ |
|
"epoch": 0.6305329719963866, |
|
"grad_norm": 0.20314162053665355, |
|
"learning_rate": 1.6566750315429254e-06, |
|
"loss": 0.5766, |
|
"step": 349 |
|
}, |
|
{ |
|
"epoch": 0.6323396567299007, |
|
"grad_norm": 0.23300059534576428, |
|
"learning_rate": 1.6426071058646718e-06, |
|
"loss": 0.5893, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 0.6341463414634146, |
|
"grad_norm": 0.22463617554746781, |
|
"learning_rate": 1.6285698816954626e-06, |
|
"loss": 0.5795, |
|
"step": 351 |
|
}, |
|
{ |
|
"epoch": 0.6359530261969286, |
|
"grad_norm": 0.21871006268327386, |
|
"learning_rate": 1.6145638616799636e-06, |
|
"loss": 0.5932, |
|
"step": 352 |
|
}, |
|
{ |
|
"epoch": 0.6377597109304426, |
|
"grad_norm": 0.20172551655088927, |
|
"learning_rate": 1.6005895473454836e-06, |
|
"loss": 0.5932, |
|
"step": 353 |
|
}, |
|
{ |
|
"epoch": 0.6395663956639567, |
|
"grad_norm": 0.20333883984236462, |
|
"learning_rate": 1.5866474390840126e-06, |
|
"loss": 0.5885, |
|
"step": 354 |
|
}, |
|
{ |
|
"epoch": 0.6413730803974707, |
|
"grad_norm": 0.19927897295532812, |
|
"learning_rate": 1.5727380361343103e-06, |
|
"loss": 0.5778, |
|
"step": 355 |
|
}, |
|
{ |
|
"epoch": 0.6431797651309846, |
|
"grad_norm": 0.19972091036693457, |
|
"learning_rate": 1.55886183656402e-06, |
|
"loss": 0.5933, |
|
"step": 356 |
|
}, |
|
{ |
|
"epoch": 0.6449864498644986, |
|
"grad_norm": 0.21662963302788196, |
|
"learning_rate": 1.545019337251844e-06, |
|
"loss": 0.5936, |
|
"step": 357 |
|
}, |
|
{ |
|
"epoch": 0.6467931345980127, |
|
"grad_norm": 0.1881586687054326, |
|
"learning_rate": 1.5312110338697427e-06, |
|
"loss": 0.5988, |
|
"step": 358 |
|
}, |
|
{ |
|
"epoch": 0.6485998193315267, |
|
"grad_norm": 0.20352510057073783, |
|
"learning_rate": 1.5174374208651913e-06, |
|
"loss": 0.5715, |
|
"step": 359 |
|
}, |
|
{ |
|
"epoch": 0.6504065040650406, |
|
"grad_norm": 0.22480023651434194, |
|
"learning_rate": 1.503698991443471e-06, |
|
"loss": 0.5799, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 0.6522131887985546, |
|
"grad_norm": 0.2190975956485496, |
|
"learning_rate": 1.489996237550012e-06, |
|
"loss": 0.5866, |
|
"step": 361 |
|
}, |
|
{ |
|
"epoch": 0.6540198735320687, |
|
"grad_norm": 0.2022143060520903, |
|
"learning_rate": 1.4763296498527744e-06, |
|
"loss": 0.5974, |
|
"step": 362 |
|
}, |
|
{ |
|
"epoch": 0.6558265582655827, |
|
"grad_norm": 0.21608348435965957, |
|
"learning_rate": 1.4626997177246787e-06, |
|
"loss": 0.5879, |
|
"step": 363 |
|
}, |
|
{ |
|
"epoch": 0.6576332429990966, |
|
"grad_norm": 0.19739502471170703, |
|
"learning_rate": 1.4491069292260867e-06, |
|
"loss": 0.5861, |
|
"step": 364 |
|
}, |
|
{ |
|
"epoch": 0.6594399277326106, |
|
"grad_norm": 0.19474952415745872, |
|
"learning_rate": 1.4355517710873184e-06, |
|
"loss": 0.5787, |
|
"step": 365 |
|
}, |
|
{ |
|
"epoch": 0.6612466124661247, |
|
"grad_norm": 0.21091567910860665, |
|
"learning_rate": 1.4220347286912296e-06, |
|
"loss": 0.6009, |
|
"step": 366 |
|
}, |
|
{ |
|
"epoch": 0.6630532971996387, |
|
"grad_norm": 0.20662747423112274, |
|
"learning_rate": 1.4085562860558256e-06, |
|
"loss": 0.5819, |
|
"step": 367 |
|
}, |
|
{ |
|
"epoch": 0.6648599819331527, |
|
"grad_norm": 0.2728937498350651, |
|
"learning_rate": 1.395116925816934e-06, |
|
"loss": 0.5939, |
|
"step": 368 |
|
}, |
|
{ |
|
"epoch": 0.6666666666666666, |
|
"grad_norm": 0.20917713778131836, |
|
"learning_rate": 1.3817171292109182e-06, |
|
"loss": 0.5947, |
|
"step": 369 |
|
}, |
|
{ |
|
"epoch": 0.6684733514001807, |
|
"grad_norm": 0.20216752147153688, |
|
"learning_rate": 1.3683573760574526e-06, |
|
"loss": 0.5932, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 0.6702800361336947, |
|
"grad_norm": 0.24086639188361567, |
|
"learning_rate": 1.3550381447423317e-06, |
|
"loss": 0.5813, |
|
"step": 371 |
|
}, |
|
{ |
|
"epoch": 0.6720867208672087, |
|
"grad_norm": 0.2091604026851147, |
|
"learning_rate": 1.3417599122003464e-06, |
|
"loss": 0.5992, |
|
"step": 372 |
|
}, |
|
{ |
|
"epoch": 0.6738934056007226, |
|
"grad_norm": 0.20808064999420775, |
|
"learning_rate": 1.3285231538982036e-06, |
|
"loss": 0.5848, |
|
"step": 373 |
|
}, |
|
{ |
|
"epoch": 0.6757000903342367, |
|
"grad_norm": 0.18817236588164815, |
|
"learning_rate": 1.3153283438175036e-06, |
|
"loss": 0.5735, |
|
"step": 374 |
|
}, |
|
{ |
|
"epoch": 0.6775067750677507, |
|
"grad_norm": 0.19128344320769594, |
|
"learning_rate": 1.3021759544377632e-06, |
|
"loss": 0.5889, |
|
"step": 375 |
|
}, |
|
{ |
|
"epoch": 0.6793134598012647, |
|
"grad_norm": 0.19834550984118285, |
|
"learning_rate": 1.2890664567195e-06, |
|
"loss": 0.5736, |
|
"step": 376 |
|
}, |
|
{ |
|
"epoch": 0.6811201445347786, |
|
"grad_norm": 0.2077959887040641, |
|
"learning_rate": 1.27600032008737e-06, |
|
"loss": 0.5892, |
|
"step": 377 |
|
}, |
|
{ |
|
"epoch": 0.6829268292682927, |
|
"grad_norm": 0.20162867649815305, |
|
"learning_rate": 1.2629780124133511e-06, |
|
"loss": 0.5903, |
|
"step": 378 |
|
}, |
|
{ |
|
"epoch": 0.6847335140018067, |
|
"grad_norm": 0.21683500604581735, |
|
"learning_rate": 1.2500000000000007e-06, |
|
"loss": 0.592, |
|
"step": 379 |
|
}, |
|
{ |
|
"epoch": 0.6865401987353207, |
|
"grad_norm": 0.18845292977716074, |
|
"learning_rate": 1.2370667475637474e-06, |
|
"loss": 0.5737, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 0.6883468834688347, |
|
"grad_norm": 0.19858214832528265, |
|
"learning_rate": 1.2241787182182596e-06, |
|
"loss": 0.5894, |
|
"step": 381 |
|
}, |
|
{ |
|
"epoch": 0.6901535682023487, |
|
"grad_norm": 0.20192306854242342, |
|
"learning_rate": 1.2113363734578548e-06, |
|
"loss": 0.5773, |
|
"step": 382 |
|
}, |
|
{ |
|
"epoch": 0.6919602529358627, |
|
"grad_norm": 0.1957085698823615, |
|
"learning_rate": 1.1985401731409793e-06, |
|
"loss": 0.5968, |
|
"step": 383 |
|
}, |
|
{ |
|
"epoch": 0.6937669376693767, |
|
"grad_norm": 0.2125180828512492, |
|
"learning_rate": 1.185790575473738e-06, |
|
"loss": 0.5952, |
|
"step": 384 |
|
}, |
|
{ |
|
"epoch": 0.6955736224028907, |
|
"grad_norm": 0.19996798857466866, |
|
"learning_rate": 1.1730880369934933e-06, |
|
"loss": 0.5768, |
|
"step": 385 |
|
}, |
|
{ |
|
"epoch": 0.6973803071364046, |
|
"grad_norm": 0.19109311362278736, |
|
"learning_rate": 1.160433012552508e-06, |
|
"loss": 0.588, |
|
"step": 386 |
|
}, |
|
{ |
|
"epoch": 0.6991869918699187, |
|
"grad_norm": 0.2069488322575602, |
|
"learning_rate": 1.1478259553016683e-06, |
|
"loss": 0.5884, |
|
"step": 387 |
|
}, |
|
{ |
|
"epoch": 0.7009936766034327, |
|
"grad_norm": 0.2063220949153012, |
|
"learning_rate": 1.1352673166742463e-06, |
|
"loss": 0.5879, |
|
"step": 388 |
|
}, |
|
{ |
|
"epoch": 0.7028003613369467, |
|
"grad_norm": 0.19867402281700364, |
|
"learning_rate": 1.122757546369744e-06, |
|
"loss": 0.5915, |
|
"step": 389 |
|
}, |
|
{ |
|
"epoch": 0.7046070460704607, |
|
"grad_norm": 0.1954873804514819, |
|
"learning_rate": 1.1102970923377893e-06, |
|
"loss": 0.5836, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 0.7064137308039747, |
|
"grad_norm": 0.2010207307649061, |
|
"learning_rate": 1.0978864007620896e-06, |
|
"loss": 0.5852, |
|
"step": 391 |
|
}, |
|
{ |
|
"epoch": 0.7082204155374887, |
|
"grad_norm": 0.2211151863933889, |
|
"learning_rate": 1.085525916044464e-06, |
|
"loss": 0.5911, |
|
"step": 392 |
|
}, |
|
{ |
|
"epoch": 0.7100271002710027, |
|
"grad_norm": 0.22126252458153034, |
|
"learning_rate": 1.073216080788921e-06, |
|
"loss": 0.5954, |
|
"step": 393 |
|
}, |
|
{ |
|
"epoch": 0.7118337850045167, |
|
"grad_norm": 0.19413607526264012, |
|
"learning_rate": 1.0609573357858166e-06, |
|
"loss": 0.597, |
|
"step": 394 |
|
}, |
|
{ |
|
"epoch": 0.7136404697380307, |
|
"grad_norm": 0.20696275357198107, |
|
"learning_rate": 1.048750119996066e-06, |
|
"loss": 0.5937, |
|
"step": 395 |
|
}, |
|
{ |
|
"epoch": 0.7154471544715447, |
|
"grad_norm": 0.20262348611385844, |
|
"learning_rate": 1.0365948705354309e-06, |
|
"loss": 0.5864, |
|
"step": 396 |
|
}, |
|
{ |
|
"epoch": 0.7172538392050587, |
|
"grad_norm": 0.20538614507552014, |
|
"learning_rate": 1.0244920226588599e-06, |
|
"loss": 0.5853, |
|
"step": 397 |
|
}, |
|
{ |
|
"epoch": 0.7190605239385727, |
|
"grad_norm": 0.1905482173846242, |
|
"learning_rate": 1.0124420097449077e-06, |
|
"loss": 0.5873, |
|
"step": 398 |
|
}, |
|
{ |
|
"epoch": 0.7208672086720868, |
|
"grad_norm": 0.18443281595390557, |
|
"learning_rate": 1.0004452632802158e-06, |
|
"loss": 0.5917, |
|
"step": 399 |
|
}, |
|
{ |
|
"epoch": 0.7226738934056007, |
|
"grad_norm": 0.20129888302461288, |
|
"learning_rate": 9.88502212844063e-07, |
|
"loss": 0.5874, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 0.7244805781391147, |
|
"grad_norm": 0.21452153041002034, |
|
"learning_rate": 9.7661328609298e-07, |
|
"loss": 0.59, |
|
"step": 401 |
|
}, |
|
{ |
|
"epoch": 0.7262872628726287, |
|
"grad_norm": 0.21422138477452932, |
|
"learning_rate": 9.64778908745437e-07, |
|
"loss": 0.5977, |
|
"step": 402 |
|
}, |
|
{ |
|
"epoch": 0.7280939476061428, |
|
"grad_norm": 0.20274956421616797, |
|
"learning_rate": 9.529995045666041e-07, |
|
"loss": 0.5788, |
|
"step": 403 |
|
}, |
|
{ |
|
"epoch": 0.7299006323396567, |
|
"grad_norm": 0.2013431410310367, |
|
"learning_rate": 9.412754953531664e-07, |
|
"loss": 0.5828, |
|
"step": 404 |
|
}, |
|
{ |
|
"epoch": 0.7317073170731707, |
|
"grad_norm": 0.1919758247929078, |
|
"learning_rate": 9.296073009182341e-07, |
|
"loss": 0.5823, |
|
"step": 405 |
|
}, |
|
{ |
|
"epoch": 0.7335140018066847, |
|
"grad_norm": 0.20154872882177322, |
|
"learning_rate": 9.179953390762977e-07, |
|
"loss": 0.5888, |
|
"step": 406 |
|
}, |
|
{ |
|
"epoch": 0.7353206865401988, |
|
"grad_norm": 0.20958430246936258, |
|
"learning_rate": 9.064400256282757e-07, |
|
"loss": 0.5806, |
|
"step": 407 |
|
}, |
|
{ |
|
"epoch": 0.7371273712737128, |
|
"grad_norm": 0.346779642044963, |
|
"learning_rate": 8.949417743466199e-07, |
|
"loss": 0.5754, |
|
"step": 408 |
|
}, |
|
{ |
|
"epoch": 0.7389340560072267, |
|
"grad_norm": 0.26408423210453413, |
|
"learning_rate": 8.835009969605013e-07, |
|
"loss": 0.5928, |
|
"step": 409 |
|
}, |
|
{ |
|
"epoch": 0.7407407407407407, |
|
"grad_norm": 0.1961937889808973, |
|
"learning_rate": 8.721181031410661e-07, |
|
"loss": 0.595, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 0.7425474254742548, |
|
"grad_norm": 0.19801858217452842, |
|
"learning_rate": 8.607935004867693e-07, |
|
"loss": 0.5963, |
|
"step": 411 |
|
}, |
|
{ |
|
"epoch": 0.7443541102077688, |
|
"grad_norm": 0.20021513556312917, |
|
"learning_rate": 8.495275945087744e-07, |
|
"loss": 0.5694, |
|
"step": 412 |
|
}, |
|
{ |
|
"epoch": 0.7461607949412827, |
|
"grad_norm": 0.19661429119710955, |
|
"learning_rate": 8.383207886164366e-07, |
|
"loss": 0.5861, |
|
"step": 413 |
|
}, |
|
{ |
|
"epoch": 0.7479674796747967, |
|
"grad_norm": 0.1928677620579264, |
|
"learning_rate": 8.271734841028553e-07, |
|
"loss": 0.5733, |
|
"step": 414 |
|
}, |
|
{ |
|
"epoch": 0.7497741644083108, |
|
"grad_norm": 0.1979045441126446, |
|
"learning_rate": 8.16086080130506e-07, |
|
"loss": 0.5951, |
|
"step": 415 |
|
}, |
|
{ |
|
"epoch": 0.7515808491418248, |
|
"grad_norm": 0.20218794601192208, |
|
"learning_rate": 8.050589737169485e-07, |
|
"loss": 0.5811, |
|
"step": 416 |
|
}, |
|
{ |
|
"epoch": 0.7533875338753387, |
|
"grad_norm": 0.20344794195772742, |
|
"learning_rate": 7.940925597206053e-07, |
|
"loss": 0.6035, |
|
"step": 417 |
|
}, |
|
{ |
|
"epoch": 0.7551942186088527, |
|
"grad_norm": 0.21268269017167418, |
|
"learning_rate": 7.831872308266306e-07, |
|
"loss": 0.5857, |
|
"step": 418 |
|
}, |
|
{ |
|
"epoch": 0.7570009033423668, |
|
"grad_norm": 0.1857432797738388, |
|
"learning_rate": 7.723433775328385e-07, |
|
"loss": 0.5858, |
|
"step": 419 |
|
}, |
|
{ |
|
"epoch": 0.7588075880758808, |
|
"grad_norm": 0.1968892912670059, |
|
"learning_rate": 7.615613881357315e-07, |
|
"loss": 0.6002, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 0.7606142728093948, |
|
"grad_norm": 0.19458011717852447, |
|
"learning_rate": 7.508416487165862e-07, |
|
"loss": 0.5805, |
|
"step": 421 |
|
}, |
|
{ |
|
"epoch": 0.7624209575429087, |
|
"grad_norm": 0.18851481977247553, |
|
"learning_rate": 7.401845431276378e-07, |
|
"loss": 0.5843, |
|
"step": 422 |
|
}, |
|
{ |
|
"epoch": 0.7642276422764228, |
|
"grad_norm": 0.20293782749771516, |
|
"learning_rate": 7.295904529783265e-07, |
|
"loss": 0.5939, |
|
"step": 423 |
|
}, |
|
{ |
|
"epoch": 0.7660343270099368, |
|
"grad_norm": 0.1929807120562277, |
|
"learning_rate": 7.190597576216385e-07, |
|
"loss": 0.5974, |
|
"step": 424 |
|
}, |
|
{ |
|
"epoch": 0.7678410117434508, |
|
"grad_norm": 0.19854186586265798, |
|
"learning_rate": 7.085928341405193e-07, |
|
"loss": 0.5761, |
|
"step": 425 |
|
}, |
|
{ |
|
"epoch": 0.7696476964769647, |
|
"grad_norm": 0.1914772718420052, |
|
"learning_rate": 6.98190057334375e-07, |
|
"loss": 0.5964, |
|
"step": 426 |
|
}, |
|
{ |
|
"epoch": 0.7714543812104788, |
|
"grad_norm": 0.19547394087507244, |
|
"learning_rate": 6.878517997056458e-07, |
|
"loss": 0.568, |
|
"step": 427 |
|
}, |
|
{ |
|
"epoch": 0.7732610659439928, |
|
"grad_norm": 0.19890854360676258, |
|
"learning_rate": 6.775784314464717e-07, |
|
"loss": 0.5885, |
|
"step": 428 |
|
}, |
|
{ |
|
"epoch": 0.7750677506775068, |
|
"grad_norm": 0.19723288328989222, |
|
"learning_rate": 6.673703204254348e-07, |
|
"loss": 0.5826, |
|
"step": 429 |
|
}, |
|
{ |
|
"epoch": 0.7768744354110207, |
|
"grad_norm": 0.1917732111792597, |
|
"learning_rate": 6.572278321743871e-07, |
|
"loss": 0.5851, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 0.7786811201445348, |
|
"grad_norm": 0.2144258319855419, |
|
"learning_rate": 6.471513298753634e-07, |
|
"loss": 0.5938, |
|
"step": 431 |
|
}, |
|
{ |
|
"epoch": 0.7804878048780488, |
|
"grad_norm": 0.1914954286444934, |
|
"learning_rate": 6.371411743475717e-07, |
|
"loss": 0.5886, |
|
"step": 432 |
|
}, |
|
{ |
|
"epoch": 0.7822944896115628, |
|
"grad_norm": 0.1915436424852639, |
|
"learning_rate": 6.271977240344795e-07, |
|
"loss": 0.5973, |
|
"step": 433 |
|
}, |
|
{ |
|
"epoch": 0.7841011743450768, |
|
"grad_norm": 0.1962061264070641, |
|
"learning_rate": 6.17321334990973e-07, |
|
"loss": 0.5964, |
|
"step": 434 |
|
}, |
|
{ |
|
"epoch": 0.7859078590785907, |
|
"grad_norm": 0.19084926119503218, |
|
"learning_rate": 6.075123608706093e-07, |
|
"loss": 0.5728, |
|
"step": 435 |
|
}, |
|
{ |
|
"epoch": 0.7877145438121048, |
|
"grad_norm": 0.19050326256104155, |
|
"learning_rate": 5.97771152912954e-07, |
|
"loss": 0.587, |
|
"step": 436 |
|
}, |
|
{ |
|
"epoch": 0.7895212285456188, |
|
"grad_norm": 0.18836498258283543, |
|
"learning_rate": 5.880980599310041e-07, |
|
"loss": 0.585, |
|
"step": 437 |
|
}, |
|
{ |
|
"epoch": 0.7913279132791328, |
|
"grad_norm": 0.20435071863480905, |
|
"learning_rate": 5.784934282986956e-07, |
|
"loss": 0.5772, |
|
"step": 438 |
|
}, |
|
{ |
|
"epoch": 0.7931345980126467, |
|
"grad_norm": 0.2023157209439081, |
|
"learning_rate": 5.689576019385015e-07, |
|
"loss": 0.5941, |
|
"step": 439 |
|
}, |
|
{ |
|
"epoch": 0.7949412827461608, |
|
"grad_norm": 0.20024639862122715, |
|
"learning_rate": 5.59490922309118e-07, |
|
"loss": 0.5861, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 0.7967479674796748, |
|
"grad_norm": 0.19423594398341354, |
|
"learning_rate": 5.500937283932348e-07, |
|
"loss": 0.57, |
|
"step": 441 |
|
}, |
|
{ |
|
"epoch": 0.7985546522131888, |
|
"grad_norm": 0.19433916433983744, |
|
"learning_rate": 5.407663566854008e-07, |
|
"loss": 0.5801, |
|
"step": 442 |
|
}, |
|
{ |
|
"epoch": 0.8003613369467028, |
|
"grad_norm": 0.20578340689151056, |
|
"learning_rate": 5.3150914117997e-07, |
|
"loss": 0.5879, |
|
"step": 443 |
|
}, |
|
{ |
|
"epoch": 0.8021680216802168, |
|
"grad_norm": 0.19675195680591387, |
|
"learning_rate": 5.223224133591475e-07, |
|
"loss": 0.5938, |
|
"step": 444 |
|
}, |
|
{ |
|
"epoch": 0.8039747064137308, |
|
"grad_norm": 0.20725412186119369, |
|
"learning_rate": 5.132065021811123e-07, |
|
"loss": 0.5817, |
|
"step": 445 |
|
}, |
|
{ |
|
"epoch": 0.8057813911472448, |
|
"grad_norm": 0.20053119388587262, |
|
"learning_rate": 5.041617340682467e-07, |
|
"loss": 0.5786, |
|
"step": 446 |
|
}, |
|
{ |
|
"epoch": 0.8075880758807588, |
|
"grad_norm": 0.19063636355734778, |
|
"learning_rate": 4.951884328954401e-07, |
|
"loss": 0.584, |
|
"step": 447 |
|
}, |
|
{ |
|
"epoch": 0.8093947606142728, |
|
"grad_norm": 0.17948151730955053, |
|
"learning_rate": 4.862869199784984e-07, |
|
"loss": 0.5863, |
|
"step": 448 |
|
}, |
|
{ |
|
"epoch": 0.8112014453477868, |
|
"grad_norm": 0.19768757570053508, |
|
"learning_rate": 4.774575140626317e-07, |
|
"loss": 0.5822, |
|
"step": 449 |
|
}, |
|
{ |
|
"epoch": 0.8130081300813008, |
|
"grad_norm": 0.1942777228720005, |
|
"learning_rate": 4.687005313110454e-07, |
|
"loss": 0.5921, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 0.8148148148148148, |
|
"grad_norm": 0.21425291439601463, |
|
"learning_rate": 4.600162852936171e-07, |
|
"loss": 0.5946, |
|
"step": 451 |
|
}, |
|
{ |
|
"epoch": 0.8166214995483289, |
|
"grad_norm": 0.19819708538024772, |
|
"learning_rate": 4.514050869756703e-07, |
|
"loss": 0.5899, |
|
"step": 452 |
|
}, |
|
{ |
|
"epoch": 0.8184281842818428, |
|
"grad_norm": 0.19195362842264865, |
|
"learning_rate": 4.4286724470683576e-07, |
|
"loss": 0.5819, |
|
"step": 453 |
|
}, |
|
{ |
|
"epoch": 0.8202348690153568, |
|
"grad_norm": 0.20338057359461525, |
|
"learning_rate": 4.344030642100133e-07, |
|
"loss": 0.5666, |
|
"step": 454 |
|
}, |
|
{ |
|
"epoch": 0.8220415537488708, |
|
"grad_norm": 0.20327817658583644, |
|
"learning_rate": 4.2601284857042263e-07, |
|
"loss": 0.5906, |
|
"step": 455 |
|
}, |
|
{ |
|
"epoch": 0.8238482384823849, |
|
"grad_norm": 0.20744598329087444, |
|
"learning_rate": 4.1769689822475147e-07, |
|
"loss": 0.5819, |
|
"step": 456 |
|
}, |
|
{ |
|
"epoch": 0.8256549232158988, |
|
"grad_norm": 0.2029409286632038, |
|
"learning_rate": 4.0945551095039837e-07, |
|
"loss": 0.5967, |
|
"step": 457 |
|
}, |
|
{ |
|
"epoch": 0.8274616079494128, |
|
"grad_norm": 0.19729156914796803, |
|
"learning_rate": 4.012889818548069e-07, |
|
"loss": 0.589, |
|
"step": 458 |
|
}, |
|
{ |
|
"epoch": 0.8292682926829268, |
|
"grad_norm": 0.1872570351127295, |
|
"learning_rate": 3.931976033649021e-07, |
|
"loss": 0.5783, |
|
"step": 459 |
|
}, |
|
{ |
|
"epoch": 0.8310749774164409, |
|
"grad_norm": 0.1943816366413793, |
|
"learning_rate": 3.851816652166165e-07, |
|
"loss": 0.5881, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 0.8328816621499548, |
|
"grad_norm": 0.18999841178320626, |
|
"learning_rate": 3.772414544445163e-07, |
|
"loss": 0.5757, |
|
"step": 461 |
|
}, |
|
{ |
|
"epoch": 0.8346883468834688, |
|
"grad_norm": 0.23829838305045872, |
|
"learning_rate": 3.6937725537152277e-07, |
|
"loss": 0.583, |
|
"step": 462 |
|
}, |
|
{ |
|
"epoch": 0.8364950316169828, |
|
"grad_norm": 0.1885927060959385, |
|
"learning_rate": 3.615893495987335e-07, |
|
"loss": 0.5879, |
|
"step": 463 |
|
}, |
|
{ |
|
"epoch": 0.8383017163504969, |
|
"grad_norm": 0.2015764989007377, |
|
"learning_rate": 3.538780159953348e-07, |
|
"loss": 0.5902, |
|
"step": 464 |
|
}, |
|
{ |
|
"epoch": 0.8401084010840109, |
|
"grad_norm": 0.2015900600206813, |
|
"learning_rate": 3.462435306886194e-07, |
|
"loss": 0.6, |
|
"step": 465 |
|
}, |
|
{ |
|
"epoch": 0.8419150858175248, |
|
"grad_norm": 0.2043727279833967, |
|
"learning_rate": 3.3868616705409723e-07, |
|
"loss": 0.5836, |
|
"step": 466 |
|
}, |
|
{ |
|
"epoch": 0.8437217705510388, |
|
"grad_norm": 0.19666307771397085, |
|
"learning_rate": 3.312061957057061e-07, |
|
"loss": 0.5797, |
|
"step": 467 |
|
}, |
|
{ |
|
"epoch": 0.8455284552845529, |
|
"grad_norm": 0.19069167880376858, |
|
"learning_rate": 3.2380388448612437e-07, |
|
"loss": 0.5906, |
|
"step": 468 |
|
}, |
|
{ |
|
"epoch": 0.8473351400180669, |
|
"grad_norm": 0.23586769464721072, |
|
"learning_rate": 3.164794984571759e-07, |
|
"loss": 0.5794, |
|
"step": 469 |
|
}, |
|
{ |
|
"epoch": 0.8491418247515808, |
|
"grad_norm": 0.2024878857222796, |
|
"learning_rate": 3.092332998903416e-07, |
|
"loss": 0.5748, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 0.8509485094850948, |
|
"grad_norm": 0.2067579361021575, |
|
"learning_rate": 3.020655482573659e-07, |
|
"loss": 0.6049, |
|
"step": 471 |
|
}, |
|
{ |
|
"epoch": 0.8527551942186089, |
|
"grad_norm": 0.21935627947112848, |
|
"learning_rate": 2.949765002209698e-07, |
|
"loss": 0.5763, |
|
"step": 472 |
|
}, |
|
{ |
|
"epoch": 0.8545618789521229, |
|
"grad_norm": 0.1906611720754469, |
|
"learning_rate": 2.8796640962565374e-07, |
|
"loss": 0.5955, |
|
"step": 473 |
|
}, |
|
{ |
|
"epoch": 0.8563685636856369, |
|
"grad_norm": 0.18330400428801807, |
|
"learning_rate": 2.810355274886148e-07, |
|
"loss": 0.5907, |
|
"step": 474 |
|
}, |
|
{ |
|
"epoch": 0.8581752484191508, |
|
"grad_norm": 0.2067435257059269, |
|
"learning_rate": 2.7418410199075293e-07, |
|
"loss": 0.5805, |
|
"step": 475 |
|
}, |
|
{ |
|
"epoch": 0.8599819331526649, |
|
"grad_norm": 0.19000373388890107, |
|
"learning_rate": 2.674123784677868e-07, |
|
"loss": 0.5874, |
|
"step": 476 |
|
}, |
|
{ |
|
"epoch": 0.8617886178861789, |
|
"grad_norm": 0.18526442128158882, |
|
"learning_rate": 2.6072059940146775e-07, |
|
"loss": 0.5836, |
|
"step": 477 |
|
}, |
|
{ |
|
"epoch": 0.8635953026196929, |
|
"grad_norm": 0.21478407044446765, |
|
"learning_rate": 2.5410900441089905e-07, |
|
"loss": 0.5954, |
|
"step": 478 |
|
}, |
|
{ |
|
"epoch": 0.8654019873532068, |
|
"grad_norm": 0.22386509278719685, |
|
"learning_rate": 2.4757783024395244e-07, |
|
"loss": 0.5818, |
|
"step": 479 |
|
}, |
|
{ |
|
"epoch": 0.8672086720867209, |
|
"grad_norm": 0.18372007023252018, |
|
"learning_rate": 2.4112731076879254e-07, |
|
"loss": 0.5798, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 0.8690153568202349, |
|
"grad_norm": 0.19040176908950068, |
|
"learning_rate": 2.3475767696550327e-07, |
|
"loss": 0.5894, |
|
"step": 481 |
|
}, |
|
{ |
|
"epoch": 0.8708220415537489, |
|
"grad_norm": 0.2004862041545315, |
|
"learning_rate": 2.284691569178138e-07, |
|
"loss": 0.5933, |
|
"step": 482 |
|
}, |
|
{ |
|
"epoch": 0.8726287262872628, |
|
"grad_norm": 0.19654881181561126, |
|
"learning_rate": 2.2226197580493657e-07, |
|
"loss": 0.5781, |
|
"step": 483 |
|
}, |
|
{ |
|
"epoch": 0.8744354110207768, |
|
"grad_norm": 0.1977337131166601, |
|
"learning_rate": 2.1613635589349756e-07, |
|
"loss": 0.5756, |
|
"step": 484 |
|
}, |
|
{ |
|
"epoch": 0.8762420957542909, |
|
"grad_norm": 0.19903230308008285, |
|
"learning_rate": 2.100925165295839e-07, |
|
"loss": 0.5849, |
|
"step": 485 |
|
}, |
|
{ |
|
"epoch": 0.8780487804878049, |
|
"grad_norm": 0.19613496635724054, |
|
"learning_rate": 2.041306741308832e-07, |
|
"loss": 0.5913, |
|
"step": 486 |
|
}, |
|
{ |
|
"epoch": 0.8798554652213189, |
|
"grad_norm": 0.19251722934409113, |
|
"learning_rate": 1.982510421789402e-07, |
|
"loss": 0.5789, |
|
"step": 487 |
|
}, |
|
{ |
|
"epoch": 0.8816621499548328, |
|
"grad_norm": 0.1883072319364769, |
|
"learning_rate": 1.9245383121150678e-07, |
|
"loss": 0.5783, |
|
"step": 488 |
|
}, |
|
{ |
|
"epoch": 0.8834688346883469, |
|
"grad_norm": 0.18855885019091942, |
|
"learning_rate": 1.8673924881500826e-07, |
|
"loss": 0.5877, |
|
"step": 489 |
|
}, |
|
{ |
|
"epoch": 0.8852755194218609, |
|
"grad_norm": 0.2181375847970685, |
|
"learning_rate": 1.8110749961710582e-07, |
|
"loss": 0.5843, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 0.8870822041553749, |
|
"grad_norm": 0.1925063200042687, |
|
"learning_rate": 1.7555878527937164e-07, |
|
"loss": 0.5763, |
|
"step": 491 |
|
}, |
|
{ |
|
"epoch": 0.8888888888888888, |
|
"grad_norm": 0.2021368170109263, |
|
"learning_rate": 1.7009330449006712e-07, |
|
"loss": 0.5781, |
|
"step": 492 |
|
}, |
|
{ |
|
"epoch": 0.8906955736224029, |
|
"grad_norm": 0.19509047381234865, |
|
"learning_rate": 1.6471125295702773e-07, |
|
"loss": 0.5719, |
|
"step": 493 |
|
}, |
|
{ |
|
"epoch": 0.8925022583559169, |
|
"grad_norm": 0.1921901997278579, |
|
"learning_rate": 1.59412823400657e-07, |
|
"loss": 0.5895, |
|
"step": 494 |
|
}, |
|
{ |
|
"epoch": 0.8943089430894309, |
|
"grad_norm": 0.1923785346679999, |
|
"learning_rate": 1.5419820554702313e-07, |
|
"loss": 0.5861, |
|
"step": 495 |
|
}, |
|
{ |
|
"epoch": 0.8961156278229448, |
|
"grad_norm": 0.19764959276565705, |
|
"learning_rate": 1.4906758612106637e-07, |
|
"loss": 0.599, |
|
"step": 496 |
|
}, |
|
{ |
|
"epoch": 0.8979223125564589, |
|
"grad_norm": 0.19700462380217873, |
|
"learning_rate": 1.4402114883991318e-07, |
|
"loss": 0.5858, |
|
"step": 497 |
|
}, |
|
{ |
|
"epoch": 0.8997289972899729, |
|
"grad_norm": 0.20609368769470798, |
|
"learning_rate": 1.3905907440629752e-07, |
|
"loss": 0.5848, |
|
"step": 498 |
|
}, |
|
{ |
|
"epoch": 0.9015356820234869, |
|
"grad_norm": 0.20327434936015687, |
|
"learning_rate": 1.3418154050208937e-07, |
|
"loss": 0.5946, |
|
"step": 499 |
|
}, |
|
{ |
|
"epoch": 0.9033423667570009, |
|
"grad_norm": 0.19818277901642153, |
|
"learning_rate": 1.2938872178193395e-07, |
|
"loss": 0.5924, |
|
"step": 500 |
|
} |
|
], |
|
"logging_steps": 1, |
|
"max_steps": 553, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 1, |
|
"save_steps": 100, |
|
"stateful_callbacks": { |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": false |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 579500543967232.0, |
|
"train_batch_size": 2, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|