|
{ |
|
"best_global_step": null, |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 0.999096657633243, |
|
"eval_steps": 500, |
|
"global_step": 553, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.0018066847335140017, |
|
"grad_norm": 2.8337757951637657, |
|
"learning_rate": 0.0, |
|
"loss": 0.8251, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.0036133694670280035, |
|
"grad_norm": 2.8815146894381005, |
|
"learning_rate": 1.7857142857142858e-07, |
|
"loss": 0.8284, |
|
"step": 2 |
|
}, |
|
{ |
|
"epoch": 0.005420054200542005, |
|
"grad_norm": 2.888366333138549, |
|
"learning_rate": 3.5714285714285716e-07, |
|
"loss": 0.8432, |
|
"step": 3 |
|
}, |
|
{ |
|
"epoch": 0.007226738934056007, |
|
"grad_norm": 2.8554157896663708, |
|
"learning_rate": 5.357142857142857e-07, |
|
"loss": 0.8398, |
|
"step": 4 |
|
}, |
|
{ |
|
"epoch": 0.009033423667570008, |
|
"grad_norm": 2.7996302090063954, |
|
"learning_rate": 7.142857142857143e-07, |
|
"loss": 0.8303, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.01084010840108401, |
|
"grad_norm": 2.758540000006854, |
|
"learning_rate": 8.928571428571429e-07, |
|
"loss": 0.8306, |
|
"step": 6 |
|
}, |
|
{ |
|
"epoch": 0.012646793134598013, |
|
"grad_norm": 2.7440712027687453, |
|
"learning_rate": 1.0714285714285714e-06, |
|
"loss": 0.8129, |
|
"step": 7 |
|
}, |
|
{ |
|
"epoch": 0.014453477868112014, |
|
"grad_norm": 2.789670841276354, |
|
"learning_rate": 1.25e-06, |
|
"loss": 0.8217, |
|
"step": 8 |
|
}, |
|
{ |
|
"epoch": 0.016260162601626018, |
|
"grad_norm": 2.637271873271262, |
|
"learning_rate": 1.4285714285714286e-06, |
|
"loss": 0.8192, |
|
"step": 9 |
|
}, |
|
{ |
|
"epoch": 0.018066847335140017, |
|
"grad_norm": 2.6274284830887247, |
|
"learning_rate": 1.6071428571428574e-06, |
|
"loss": 0.8146, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.01987353206865402, |
|
"grad_norm": 2.2614743320388992, |
|
"learning_rate": 1.7857142857142859e-06, |
|
"loss": 0.8022, |
|
"step": 11 |
|
}, |
|
{ |
|
"epoch": 0.02168021680216802, |
|
"grad_norm": 2.1218891806221882, |
|
"learning_rate": 1.9642857142857144e-06, |
|
"loss": 0.8131, |
|
"step": 12 |
|
}, |
|
{ |
|
"epoch": 0.023486901535682024, |
|
"grad_norm": 2.1362019297809636, |
|
"learning_rate": 2.1428571428571427e-06, |
|
"loss": 0.8047, |
|
"step": 13 |
|
}, |
|
{ |
|
"epoch": 0.025293586269196026, |
|
"grad_norm": 2.043181941163786, |
|
"learning_rate": 2.321428571428572e-06, |
|
"loss": 0.8006, |
|
"step": 14 |
|
}, |
|
{ |
|
"epoch": 0.02710027100271003, |
|
"grad_norm": 1.4654153096579285, |
|
"learning_rate": 2.5e-06, |
|
"loss": 0.7863, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.028906955736224028, |
|
"grad_norm": 1.424950643183691, |
|
"learning_rate": 2.6785714285714285e-06, |
|
"loss": 0.7794, |
|
"step": 16 |
|
}, |
|
{ |
|
"epoch": 0.03071364046973803, |
|
"grad_norm": 1.371800886806853, |
|
"learning_rate": 2.8571428571428573e-06, |
|
"loss": 0.7871, |
|
"step": 17 |
|
}, |
|
{ |
|
"epoch": 0.032520325203252036, |
|
"grad_norm": 1.2910993566284936, |
|
"learning_rate": 3.0357142857142856e-06, |
|
"loss": 0.7792, |
|
"step": 18 |
|
}, |
|
{ |
|
"epoch": 0.03432700993676603, |
|
"grad_norm": 1.2099658145135412, |
|
"learning_rate": 3.2142857142857147e-06, |
|
"loss": 0.7728, |
|
"step": 19 |
|
}, |
|
{ |
|
"epoch": 0.036133694670280034, |
|
"grad_norm": 1.0479474035735314, |
|
"learning_rate": 3.3928571428571435e-06, |
|
"loss": 0.743, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.037940379403794036, |
|
"grad_norm": 1.112276534792924, |
|
"learning_rate": 3.5714285714285718e-06, |
|
"loss": 0.7403, |
|
"step": 21 |
|
}, |
|
{ |
|
"epoch": 0.03974706413730804, |
|
"grad_norm": 1.3035077079579298, |
|
"learning_rate": 3.7500000000000005e-06, |
|
"loss": 0.7296, |
|
"step": 22 |
|
}, |
|
{ |
|
"epoch": 0.04155374887082204, |
|
"grad_norm": 1.3695661288387202, |
|
"learning_rate": 3.928571428571429e-06, |
|
"loss": 0.7302, |
|
"step": 23 |
|
}, |
|
{ |
|
"epoch": 0.04336043360433604, |
|
"grad_norm": 1.2631605637070566, |
|
"learning_rate": 4.107142857142857e-06, |
|
"loss": 0.7359, |
|
"step": 24 |
|
}, |
|
{ |
|
"epoch": 0.045167118337850046, |
|
"grad_norm": 0.9577414796019008, |
|
"learning_rate": 4.2857142857142855e-06, |
|
"loss": 0.713, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.04697380307136405, |
|
"grad_norm": 0.7548293526053346, |
|
"learning_rate": 4.464285714285715e-06, |
|
"loss": 0.7275, |
|
"step": 26 |
|
}, |
|
{ |
|
"epoch": 0.04878048780487805, |
|
"grad_norm": 0.6708116039946823, |
|
"learning_rate": 4.642857142857144e-06, |
|
"loss": 0.7021, |
|
"step": 27 |
|
}, |
|
{ |
|
"epoch": 0.05058717253839205, |
|
"grad_norm": 0.7960055478682195, |
|
"learning_rate": 4.821428571428572e-06, |
|
"loss": 0.6998, |
|
"step": 28 |
|
}, |
|
{ |
|
"epoch": 0.052393857271906055, |
|
"grad_norm": 0.8790784871024999, |
|
"learning_rate": 5e-06, |
|
"loss": 0.7086, |
|
"step": 29 |
|
}, |
|
{ |
|
"epoch": 0.05420054200542006, |
|
"grad_norm": 0.9579979234005123, |
|
"learning_rate": 4.999955240022903e-06, |
|
"loss": 0.7065, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.05600722673893405, |
|
"grad_norm": 1.003933265230055, |
|
"learning_rate": 4.999820961694372e-06, |
|
"loss": 0.6937, |
|
"step": 31 |
|
}, |
|
{ |
|
"epoch": 0.057813911472448055, |
|
"grad_norm": 0.6710805038763018, |
|
"learning_rate": 4.999597169822646e-06, |
|
"loss": 0.6812, |
|
"step": 32 |
|
}, |
|
{ |
|
"epoch": 0.05962059620596206, |
|
"grad_norm": 0.5001839964623817, |
|
"learning_rate": 4.999283872421259e-06, |
|
"loss": 0.6911, |
|
"step": 33 |
|
}, |
|
{ |
|
"epoch": 0.06142728093947606, |
|
"grad_norm": 0.5681434362593133, |
|
"learning_rate": 4.998881080708759e-06, |
|
"loss": 0.6766, |
|
"step": 34 |
|
}, |
|
{ |
|
"epoch": 0.06323396567299007, |
|
"grad_norm": 0.5752663504650961, |
|
"learning_rate": 4.998388809108304e-06, |
|
"loss": 0.6816, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.06504065040650407, |
|
"grad_norm": 0.4990120714898939, |
|
"learning_rate": 4.997807075247147e-06, |
|
"loss": 0.6754, |
|
"step": 36 |
|
}, |
|
{ |
|
"epoch": 0.06684733514001806, |
|
"grad_norm": 0.4974519304946596, |
|
"learning_rate": 4.997135899956002e-06, |
|
"loss": 0.6782, |
|
"step": 37 |
|
}, |
|
{ |
|
"epoch": 0.06865401987353206, |
|
"grad_norm": 0.4349874019011085, |
|
"learning_rate": 4.996375307268303e-06, |
|
"loss": 0.6709, |
|
"step": 38 |
|
}, |
|
{ |
|
"epoch": 0.07046070460704607, |
|
"grad_norm": 0.48312199739205014, |
|
"learning_rate": 4.995525324419338e-06, |
|
"loss": 0.6707, |
|
"step": 39 |
|
}, |
|
{ |
|
"epoch": 0.07226738934056007, |
|
"grad_norm": 0.3760455090466998, |
|
"learning_rate": 4.994585981845278e-06, |
|
"loss": 0.6592, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.07407407407407407, |
|
"grad_norm": 0.362711899175025, |
|
"learning_rate": 4.993557313182086e-06, |
|
"loss": 0.6666, |
|
"step": 41 |
|
}, |
|
{ |
|
"epoch": 0.07588075880758807, |
|
"grad_norm": 0.3527864788229306, |
|
"learning_rate": 4.992439355264308e-06, |
|
"loss": 0.656, |
|
"step": 42 |
|
}, |
|
{ |
|
"epoch": 0.07768744354110207, |
|
"grad_norm": 0.37039474310454246, |
|
"learning_rate": 4.9912321481237616e-06, |
|
"loss": 0.6567, |
|
"step": 43 |
|
}, |
|
{ |
|
"epoch": 0.07949412827461608, |
|
"grad_norm": 0.3381704985127657, |
|
"learning_rate": 4.989935734988098e-06, |
|
"loss": 0.6492, |
|
"step": 44 |
|
}, |
|
{ |
|
"epoch": 0.08130081300813008, |
|
"grad_norm": 0.3368799245864057, |
|
"learning_rate": 4.988550162279255e-06, |
|
"loss": 0.6558, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.08310749774164408, |
|
"grad_norm": 0.33217201284358455, |
|
"learning_rate": 4.9870754796117956e-06, |
|
"loss": 0.658, |
|
"step": 46 |
|
}, |
|
{ |
|
"epoch": 0.08491418247515808, |
|
"grad_norm": 0.3203999210132069, |
|
"learning_rate": 4.985511739791129e-06, |
|
"loss": 0.6596, |
|
"step": 47 |
|
}, |
|
{ |
|
"epoch": 0.08672086720867209, |
|
"grad_norm": 0.3108831280349791, |
|
"learning_rate": 4.983858998811622e-06, |
|
"loss": 0.6465, |
|
"step": 48 |
|
}, |
|
{ |
|
"epoch": 0.08852755194218609, |
|
"grad_norm": 0.28944557461245024, |
|
"learning_rate": 4.982117315854594e-06, |
|
"loss": 0.651, |
|
"step": 49 |
|
}, |
|
{ |
|
"epoch": 0.09033423667570009, |
|
"grad_norm": 0.2724500694923891, |
|
"learning_rate": 4.980286753286196e-06, |
|
"loss": 0.6493, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.0921409214092141, |
|
"grad_norm": 0.29233908216498683, |
|
"learning_rate": 4.978367376655177e-06, |
|
"loss": 0.6391, |
|
"step": 51 |
|
}, |
|
{ |
|
"epoch": 0.0939476061427281, |
|
"grad_norm": 0.29050327837368234, |
|
"learning_rate": 4.976359254690543e-06, |
|
"loss": 0.6577, |
|
"step": 52 |
|
}, |
|
{ |
|
"epoch": 0.0957542908762421, |
|
"grad_norm": 0.26277310012199956, |
|
"learning_rate": 4.974262459299088e-06, |
|
"loss": 0.641, |
|
"step": 53 |
|
}, |
|
{ |
|
"epoch": 0.0975609756097561, |
|
"grad_norm": 0.2694317681957786, |
|
"learning_rate": 4.9720770655628216e-06, |
|
"loss": 0.6322, |
|
"step": 54 |
|
}, |
|
{ |
|
"epoch": 0.0993676603432701, |
|
"grad_norm": 0.27643434259676186, |
|
"learning_rate": 4.969803151736285e-06, |
|
"loss": 0.6446, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.1011743450767841, |
|
"grad_norm": 0.27938135134555137, |
|
"learning_rate": 4.967440799243739e-06, |
|
"loss": 0.6505, |
|
"step": 56 |
|
}, |
|
{ |
|
"epoch": 0.10298102981029811, |
|
"grad_norm": 0.2723254558437683, |
|
"learning_rate": 4.964990092676263e-06, |
|
"loss": 0.6358, |
|
"step": 57 |
|
}, |
|
{ |
|
"epoch": 0.10478771454381211, |
|
"grad_norm": 0.2476211836425744, |
|
"learning_rate": 4.962451119788709e-06, |
|
"loss": 0.6435, |
|
"step": 58 |
|
}, |
|
{ |
|
"epoch": 0.10659439927732611, |
|
"grad_norm": 0.2385367032602417, |
|
"learning_rate": 4.959823971496575e-06, |
|
"loss": 0.6289, |
|
"step": 59 |
|
}, |
|
{ |
|
"epoch": 0.10840108401084012, |
|
"grad_norm": 0.24895994998658083, |
|
"learning_rate": 4.957108741872736e-06, |
|
"loss": 0.6299, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.1102077687443541, |
|
"grad_norm": 0.28129654057596637, |
|
"learning_rate": 4.954305528144085e-06, |
|
"loss": 0.6339, |
|
"step": 61 |
|
}, |
|
{ |
|
"epoch": 0.1120144534778681, |
|
"grad_norm": 0.25373425749505557, |
|
"learning_rate": 4.9514144306880506e-06, |
|
"loss": 0.6393, |
|
"step": 62 |
|
}, |
|
{ |
|
"epoch": 0.11382113821138211, |
|
"grad_norm": 0.2583299024885213, |
|
"learning_rate": 4.948435553028994e-06, |
|
"loss": 0.6383, |
|
"step": 63 |
|
}, |
|
{ |
|
"epoch": 0.11562782294489611, |
|
"grad_norm": 0.24423529044993955, |
|
"learning_rate": 4.9453690018345144e-06, |
|
"loss": 0.6359, |
|
"step": 64 |
|
}, |
|
{ |
|
"epoch": 0.11743450767841011, |
|
"grad_norm": 0.24605558051142096, |
|
"learning_rate": 4.942214886911619e-06, |
|
"loss": 0.631, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.11924119241192412, |
|
"grad_norm": 0.2548336350045181, |
|
"learning_rate": 4.938973321202799e-06, |
|
"loss": 0.6323, |
|
"step": 66 |
|
}, |
|
{ |
|
"epoch": 0.12104787714543812, |
|
"grad_norm": 0.32845268505362657, |
|
"learning_rate": 4.935644420781978e-06, |
|
"loss": 0.6229, |
|
"step": 67 |
|
}, |
|
{ |
|
"epoch": 0.12285456187895212, |
|
"grad_norm": 0.25203625601717533, |
|
"learning_rate": 4.932228304850363e-06, |
|
"loss": 0.6386, |
|
"step": 68 |
|
}, |
|
{ |
|
"epoch": 0.12466124661246612, |
|
"grad_norm": 0.2467239132093227, |
|
"learning_rate": 4.9287250957321685e-06, |
|
"loss": 0.6401, |
|
"step": 69 |
|
}, |
|
{ |
|
"epoch": 0.12646793134598014, |
|
"grad_norm": 0.28444822854929847, |
|
"learning_rate": 4.925134918870245e-06, |
|
"loss": 0.646, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.12827461607949414, |
|
"grad_norm": 0.23105312295557145, |
|
"learning_rate": 4.921457902821578e-06, |
|
"loss": 0.6164, |
|
"step": 71 |
|
}, |
|
{ |
|
"epoch": 0.13008130081300814, |
|
"grad_norm": 0.23048405537510025, |
|
"learning_rate": 4.917694179252692e-06, |
|
"loss": 0.6342, |
|
"step": 72 |
|
}, |
|
{ |
|
"epoch": 0.13188798554652212, |
|
"grad_norm": 0.24150602333720247, |
|
"learning_rate": 4.9138438829349296e-06, |
|
"loss": 0.6332, |
|
"step": 73 |
|
}, |
|
{ |
|
"epoch": 0.13369467028003612, |
|
"grad_norm": 0.2462886765781574, |
|
"learning_rate": 4.909907151739634e-06, |
|
"loss": 0.6246, |
|
"step": 74 |
|
}, |
|
{ |
|
"epoch": 0.13550135501355012, |
|
"grad_norm": 0.2821908512766841, |
|
"learning_rate": 4.9058841266332005e-06, |
|
"loss": 0.6227, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.13730803974706413, |
|
"grad_norm": 0.25172603846639535, |
|
"learning_rate": 4.901774951672041e-06, |
|
"loss": 0.6181, |
|
"step": 76 |
|
}, |
|
{ |
|
"epoch": 0.13911472448057813, |
|
"grad_norm": 0.23478283786140364, |
|
"learning_rate": 4.897579773997415e-06, |
|
"loss": 0.6123, |
|
"step": 77 |
|
}, |
|
{ |
|
"epoch": 0.14092140921409213, |
|
"grad_norm": 0.24524464756556244, |
|
"learning_rate": 4.893298743830168e-06, |
|
"loss": 0.625, |
|
"step": 78 |
|
}, |
|
{ |
|
"epoch": 0.14272809394760613, |
|
"grad_norm": 0.2557156779275681, |
|
"learning_rate": 4.8889320144653525e-06, |
|
"loss": 0.6347, |
|
"step": 79 |
|
}, |
|
{ |
|
"epoch": 0.14453477868112014, |
|
"grad_norm": 0.2501383477274707, |
|
"learning_rate": 4.884479742266731e-06, |
|
"loss": 0.6435, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.14634146341463414, |
|
"grad_norm": 0.2777905831842133, |
|
"learning_rate": 4.879942086661185e-06, |
|
"loss": 0.6249, |
|
"step": 81 |
|
}, |
|
{ |
|
"epoch": 0.14814814814814814, |
|
"grad_norm": 0.2311051834333071, |
|
"learning_rate": 4.875319210133004e-06, |
|
"loss": 0.6186, |
|
"step": 82 |
|
}, |
|
{ |
|
"epoch": 0.14995483288166214, |
|
"grad_norm": 0.24510662627866567, |
|
"learning_rate": 4.870611278218066e-06, |
|
"loss": 0.6301, |
|
"step": 83 |
|
}, |
|
{ |
|
"epoch": 0.15176151761517614, |
|
"grad_norm": 0.2457506431539522, |
|
"learning_rate": 4.865818459497911e-06, |
|
"loss": 0.6221, |
|
"step": 84 |
|
}, |
|
{ |
|
"epoch": 0.15356820234869015, |
|
"grad_norm": 0.23750506338642635, |
|
"learning_rate": 4.860940925593703e-06, |
|
"loss": 0.6238, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.15537488708220415, |
|
"grad_norm": 0.24583995522041222, |
|
"learning_rate": 4.855978851160088e-06, |
|
"loss": 0.6245, |
|
"step": 86 |
|
}, |
|
{ |
|
"epoch": 0.15718157181571815, |
|
"grad_norm": 0.24412234277510458, |
|
"learning_rate": 4.850932413878934e-06, |
|
"loss": 0.6266, |
|
"step": 87 |
|
}, |
|
{ |
|
"epoch": 0.15898825654923215, |
|
"grad_norm": 0.23692543465028104, |
|
"learning_rate": 4.845801794452978e-06, |
|
"loss": 0.6307, |
|
"step": 88 |
|
}, |
|
{ |
|
"epoch": 0.16079494128274616, |
|
"grad_norm": 0.24055583410087153, |
|
"learning_rate": 4.8405871765993435e-06, |
|
"loss": 0.6258, |
|
"step": 89 |
|
}, |
|
{ |
|
"epoch": 0.16260162601626016, |
|
"grad_norm": 0.25682681695753845, |
|
"learning_rate": 4.8352887470429726e-06, |
|
"loss": 0.6193, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.16440831074977416, |
|
"grad_norm": 0.2451987238773762, |
|
"learning_rate": 4.829906695509934e-06, |
|
"loss": 0.6175, |
|
"step": 91 |
|
}, |
|
{ |
|
"epoch": 0.16621499548328816, |
|
"grad_norm": 0.33308910526145086, |
|
"learning_rate": 4.824441214720629e-06, |
|
"loss": 0.6207, |
|
"step": 92 |
|
}, |
|
{ |
|
"epoch": 0.16802168021680217, |
|
"grad_norm": 0.2599636527935745, |
|
"learning_rate": 4.8188925003828945e-06, |
|
"loss": 0.6275, |
|
"step": 93 |
|
}, |
|
{ |
|
"epoch": 0.16982836495031617, |
|
"grad_norm": 0.24262017713570547, |
|
"learning_rate": 4.813260751184992e-06, |
|
"loss": 0.6255, |
|
"step": 94 |
|
}, |
|
{ |
|
"epoch": 0.17163504968383017, |
|
"grad_norm": 0.24155086390799096, |
|
"learning_rate": 4.8075461687884935e-06, |
|
"loss": 0.6184, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.17344173441734417, |
|
"grad_norm": 0.2399226609650259, |
|
"learning_rate": 4.801748957821061e-06, |
|
"loss": 0.6164, |
|
"step": 96 |
|
}, |
|
{ |
|
"epoch": 0.17524841915085818, |
|
"grad_norm": 0.22903596103655702, |
|
"learning_rate": 4.795869325869117e-06, |
|
"loss": 0.6229, |
|
"step": 97 |
|
}, |
|
{ |
|
"epoch": 0.17705510388437218, |
|
"grad_norm": 0.2307755782529314, |
|
"learning_rate": 4.7899074834704165e-06, |
|
"loss": 0.6173, |
|
"step": 98 |
|
}, |
|
{ |
|
"epoch": 0.17886178861788618, |
|
"grad_norm": 0.24823755288051938, |
|
"learning_rate": 4.783863644106502e-06, |
|
"loss": 0.6292, |
|
"step": 99 |
|
}, |
|
{ |
|
"epoch": 0.18066847335140018, |
|
"grad_norm": 0.21658543875130712, |
|
"learning_rate": 4.777738024195065e-06, |
|
"loss": 0.6032, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.18247515808491419, |
|
"grad_norm": 0.24020742361139732, |
|
"learning_rate": 4.771530843082187e-06, |
|
"loss": 0.6184, |
|
"step": 101 |
|
}, |
|
{ |
|
"epoch": 0.1842818428184282, |
|
"grad_norm": 0.23250154874953444, |
|
"learning_rate": 4.765242323034498e-06, |
|
"loss": 0.6373, |
|
"step": 102 |
|
}, |
|
{ |
|
"epoch": 0.1860885275519422, |
|
"grad_norm": 0.2515794157481243, |
|
"learning_rate": 4.7588726892312085e-06, |
|
"loss": 0.6162, |
|
"step": 103 |
|
}, |
|
{ |
|
"epoch": 0.1878952122854562, |
|
"grad_norm": 0.2388452484683859, |
|
"learning_rate": 4.752422169756048e-06, |
|
"loss": 0.6147, |
|
"step": 104 |
|
}, |
|
{ |
|
"epoch": 0.1897018970189702, |
|
"grad_norm": 0.23451396083233195, |
|
"learning_rate": 4.7458909955891015e-06, |
|
"loss": 0.6066, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 0.1915085817524842, |
|
"grad_norm": 0.23211345055411792, |
|
"learning_rate": 4.7392794005985324e-06, |
|
"loss": 0.6076, |
|
"step": 106 |
|
}, |
|
{ |
|
"epoch": 0.1933152664859982, |
|
"grad_norm": 0.2405071836183994, |
|
"learning_rate": 4.732587621532214e-06, |
|
"loss": 0.6139, |
|
"step": 107 |
|
}, |
|
{ |
|
"epoch": 0.1951219512195122, |
|
"grad_norm": 0.238214343050957, |
|
"learning_rate": 4.7258158980092475e-06, |
|
"loss": 0.6125, |
|
"step": 108 |
|
}, |
|
{ |
|
"epoch": 0.1969286359530262, |
|
"grad_norm": 0.23118377153943317, |
|
"learning_rate": 4.718964472511386e-06, |
|
"loss": 0.6224, |
|
"step": 109 |
|
}, |
|
{ |
|
"epoch": 0.1987353206865402, |
|
"grad_norm": 0.4227293348441868, |
|
"learning_rate": 4.712033590374346e-06, |
|
"loss": 0.604, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.2005420054200542, |
|
"grad_norm": 0.22706445039114057, |
|
"learning_rate": 4.705023499779031e-06, |
|
"loss": 0.6231, |
|
"step": 111 |
|
}, |
|
{ |
|
"epoch": 0.2023486901535682, |
|
"grad_norm": 0.21914294121010527, |
|
"learning_rate": 4.6979344517426345e-06, |
|
"loss": 0.6157, |
|
"step": 112 |
|
}, |
|
{ |
|
"epoch": 0.2041553748870822, |
|
"grad_norm": 0.2160022263814738, |
|
"learning_rate": 4.690766700109659e-06, |
|
"loss": 0.6193, |
|
"step": 113 |
|
}, |
|
{ |
|
"epoch": 0.20596205962059622, |
|
"grad_norm": 0.2304707253911858, |
|
"learning_rate": 4.683520501542825e-06, |
|
"loss": 0.6021, |
|
"step": 114 |
|
}, |
|
{ |
|
"epoch": 0.20776874435411022, |
|
"grad_norm": 0.24127382191408644, |
|
"learning_rate": 4.676196115513876e-06, |
|
"loss": 0.6186, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 0.20957542908762422, |
|
"grad_norm": 0.2406722048256909, |
|
"learning_rate": 4.668793804294294e-06, |
|
"loss": 0.6129, |
|
"step": 116 |
|
}, |
|
{ |
|
"epoch": 0.21138211382113822, |
|
"grad_norm": 0.24481652156673936, |
|
"learning_rate": 4.661313832945904e-06, |
|
"loss": 0.6228, |
|
"step": 117 |
|
}, |
|
{ |
|
"epoch": 0.21318879855465223, |
|
"grad_norm": 0.2283120728906989, |
|
"learning_rate": 4.653756469311381e-06, |
|
"loss": 0.6104, |
|
"step": 118 |
|
}, |
|
{ |
|
"epoch": 0.21499548328816623, |
|
"grad_norm": 0.2191260930120605, |
|
"learning_rate": 4.646121984004666e-06, |
|
"loss": 0.6015, |
|
"step": 119 |
|
}, |
|
{ |
|
"epoch": 0.21680216802168023, |
|
"grad_norm": 0.24306778049828862, |
|
"learning_rate": 4.638410650401267e-06, |
|
"loss": 0.6193, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.2186088527551942, |
|
"grad_norm": 0.2269775698273598, |
|
"learning_rate": 4.630622744628478e-06, |
|
"loss": 0.6157, |
|
"step": 121 |
|
}, |
|
{ |
|
"epoch": 0.2204155374887082, |
|
"grad_norm": 0.22557834203825844, |
|
"learning_rate": 4.622758545555485e-06, |
|
"loss": 0.6205, |
|
"step": 122 |
|
}, |
|
{ |
|
"epoch": 0.2222222222222222, |
|
"grad_norm": 0.2388627744876031, |
|
"learning_rate": 4.614818334783384e-06, |
|
"loss": 0.6117, |
|
"step": 123 |
|
}, |
|
{ |
|
"epoch": 0.2240289069557362, |
|
"grad_norm": 0.33690846185965356, |
|
"learning_rate": 4.606802396635098e-06, |
|
"loss": 0.5973, |
|
"step": 124 |
|
}, |
|
{ |
|
"epoch": 0.22583559168925021, |
|
"grad_norm": 0.2559933256711994, |
|
"learning_rate": 4.598711018145193e-06, |
|
"loss": 0.6035, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 0.22764227642276422, |
|
"grad_norm": 0.23041365241908557, |
|
"learning_rate": 4.590544489049602e-06, |
|
"loss": 0.6167, |
|
"step": 126 |
|
}, |
|
{ |
|
"epoch": 0.22944896115627822, |
|
"grad_norm": 0.2386416733431471, |
|
"learning_rate": 4.582303101775249e-06, |
|
"loss": 0.6081, |
|
"step": 127 |
|
}, |
|
{ |
|
"epoch": 0.23125564588979222, |
|
"grad_norm": 0.2253681313258335, |
|
"learning_rate": 4.573987151429579e-06, |
|
"loss": 0.6075, |
|
"step": 128 |
|
}, |
|
{ |
|
"epoch": 0.23306233062330622, |
|
"grad_norm": 0.21952470403470545, |
|
"learning_rate": 4.565596935789987e-06, |
|
"loss": 0.6031, |
|
"step": 129 |
|
}, |
|
{ |
|
"epoch": 0.23486901535682023, |
|
"grad_norm": 0.24331315011786012, |
|
"learning_rate": 4.557132755293164e-06, |
|
"loss": 0.6219, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.23667570009033423, |
|
"grad_norm": 0.24835834109503124, |
|
"learning_rate": 4.54859491302433e-06, |
|
"loss": 0.6154, |
|
"step": 131 |
|
}, |
|
{ |
|
"epoch": 0.23848238482384823, |
|
"grad_norm": 0.22732916433505923, |
|
"learning_rate": 4.539983714706383e-06, |
|
"loss": 0.5945, |
|
"step": 132 |
|
}, |
|
{ |
|
"epoch": 0.24028906955736223, |
|
"grad_norm": 0.2181618946471109, |
|
"learning_rate": 4.531299468688956e-06, |
|
"loss": 0.61, |
|
"step": 133 |
|
}, |
|
{ |
|
"epoch": 0.24209575429087624, |
|
"grad_norm": 0.2259645647372355, |
|
"learning_rate": 4.522542485937369e-06, |
|
"loss": 0.6105, |
|
"step": 134 |
|
}, |
|
{ |
|
"epoch": 0.24390243902439024, |
|
"grad_norm": 0.2176244229453355, |
|
"learning_rate": 4.5137130800215025e-06, |
|
"loss": 0.6069, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 0.24570912375790424, |
|
"grad_norm": 0.2552195449531324, |
|
"learning_rate": 4.50481156710456e-06, |
|
"loss": 0.6113, |
|
"step": 136 |
|
}, |
|
{ |
|
"epoch": 0.24751580849141824, |
|
"grad_norm": 0.23421861438168898, |
|
"learning_rate": 4.495838265931754e-06, |
|
"loss": 0.5952, |
|
"step": 137 |
|
}, |
|
{ |
|
"epoch": 0.24932249322493225, |
|
"grad_norm": 0.21810921222578047, |
|
"learning_rate": 4.486793497818889e-06, |
|
"loss": 0.6121, |
|
"step": 138 |
|
}, |
|
{ |
|
"epoch": 0.25112917795844625, |
|
"grad_norm": 0.22773114029207175, |
|
"learning_rate": 4.477677586640854e-06, |
|
"loss": 0.6156, |
|
"step": 139 |
|
}, |
|
{ |
|
"epoch": 0.2529358626919603, |
|
"grad_norm": 0.23705077325515508, |
|
"learning_rate": 4.4684908588200305e-06, |
|
"loss": 0.6156, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.25474254742547425, |
|
"grad_norm": 0.24780177201776643, |
|
"learning_rate": 4.4592336433146e-06, |
|
"loss": 0.5953, |
|
"step": 141 |
|
}, |
|
{ |
|
"epoch": 0.2565492321589883, |
|
"grad_norm": 0.22862755103918672, |
|
"learning_rate": 4.449906271606766e-06, |
|
"loss": 0.616, |
|
"step": 142 |
|
}, |
|
{ |
|
"epoch": 0.25835591689250226, |
|
"grad_norm": 0.2404243241200089, |
|
"learning_rate": 4.440509077690883e-06, |
|
"loss": 0.607, |
|
"step": 143 |
|
}, |
|
{ |
|
"epoch": 0.2601626016260163, |
|
"grad_norm": 0.22293770252772532, |
|
"learning_rate": 4.431042398061499e-06, |
|
"loss": 0.5965, |
|
"step": 144 |
|
}, |
|
{ |
|
"epoch": 0.26196928635953026, |
|
"grad_norm": 0.20972193775216136, |
|
"learning_rate": 4.421506571701305e-06, |
|
"loss": 0.6093, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 0.26377597109304424, |
|
"grad_norm": 0.23518600200676912, |
|
"learning_rate": 4.411901940068997e-06, |
|
"loss": 0.6112, |
|
"step": 146 |
|
}, |
|
{ |
|
"epoch": 0.26558265582655827, |
|
"grad_norm": 0.22746108225760642, |
|
"learning_rate": 4.402228847087046e-06, |
|
"loss": 0.6037, |
|
"step": 147 |
|
}, |
|
{ |
|
"epoch": 0.26738934056007224, |
|
"grad_norm": 0.24903963512865804, |
|
"learning_rate": 4.3924876391293915e-06, |
|
"loss": 0.6048, |
|
"step": 148 |
|
}, |
|
{ |
|
"epoch": 0.26919602529358627, |
|
"grad_norm": 0.23472947723463866, |
|
"learning_rate": 4.382678665009028e-06, |
|
"loss": 0.6016, |
|
"step": 149 |
|
}, |
|
{ |
|
"epoch": 0.27100271002710025, |
|
"grad_norm": 0.23431207097830484, |
|
"learning_rate": 4.372802275965521e-06, |
|
"loss": 0.61, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.2728093947606143, |
|
"grad_norm": 0.22422800322111083, |
|
"learning_rate": 4.362858825652428e-06, |
|
"loss": 0.5949, |
|
"step": 151 |
|
}, |
|
{ |
|
"epoch": 0.27461607949412825, |
|
"grad_norm": 0.23561711836643753, |
|
"learning_rate": 4.352848670124637e-06, |
|
"loss": 0.5996, |
|
"step": 152 |
|
}, |
|
{ |
|
"epoch": 0.2764227642276423, |
|
"grad_norm": 0.21652376026883982, |
|
"learning_rate": 4.342772167825613e-06, |
|
"loss": 0.6009, |
|
"step": 153 |
|
}, |
|
{ |
|
"epoch": 0.27822944896115626, |
|
"grad_norm": 0.22742532543566119, |
|
"learning_rate": 4.332629679574566e-06, |
|
"loss": 0.6063, |
|
"step": 154 |
|
}, |
|
{ |
|
"epoch": 0.2800361336946703, |
|
"grad_norm": 0.3993451377830147, |
|
"learning_rate": 4.322421568553529e-06, |
|
"loss": 0.5961, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 0.28184281842818426, |
|
"grad_norm": 0.21468028516819, |
|
"learning_rate": 4.312148200294355e-06, |
|
"loss": 0.598, |
|
"step": 156 |
|
}, |
|
{ |
|
"epoch": 0.2836495031616983, |
|
"grad_norm": 0.2790705925726011, |
|
"learning_rate": 4.3018099426656255e-06, |
|
"loss": 0.6108, |
|
"step": 157 |
|
}, |
|
{ |
|
"epoch": 0.28545618789521227, |
|
"grad_norm": 0.22328718703580291, |
|
"learning_rate": 4.291407165859481e-06, |
|
"loss": 0.6073, |
|
"step": 158 |
|
}, |
|
{ |
|
"epoch": 0.2872628726287263, |
|
"grad_norm": 0.24079715236297133, |
|
"learning_rate": 4.280940242378363e-06, |
|
"loss": 0.5892, |
|
"step": 159 |
|
}, |
|
{ |
|
"epoch": 0.28906955736224027, |
|
"grad_norm": 0.3349816071529816, |
|
"learning_rate": 4.2704095470216745e-06, |
|
"loss": 0.5985, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.2908762420957543, |
|
"grad_norm": 0.2351483485898414, |
|
"learning_rate": 4.259815456872363e-06, |
|
"loss": 0.6041, |
|
"step": 161 |
|
}, |
|
{ |
|
"epoch": 0.2926829268292683, |
|
"grad_norm": 0.23385513145667092, |
|
"learning_rate": 4.249158351283414e-06, |
|
"loss": 0.5903, |
|
"step": 162 |
|
}, |
|
{ |
|
"epoch": 0.2944896115627823, |
|
"grad_norm": 0.22962794810239154, |
|
"learning_rate": 4.2384386118642696e-06, |
|
"loss": 0.6006, |
|
"step": 163 |
|
}, |
|
{ |
|
"epoch": 0.2962962962962963, |
|
"grad_norm": 0.227041646781334, |
|
"learning_rate": 4.227656622467162e-06, |
|
"loss": 0.609, |
|
"step": 164 |
|
}, |
|
{ |
|
"epoch": 0.2981029810298103, |
|
"grad_norm": 0.23339515173861125, |
|
"learning_rate": 4.216812769173371e-06, |
|
"loss": 0.6008, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 0.2999096657633243, |
|
"grad_norm": 0.2279892093631049, |
|
"learning_rate": 4.205907440279395e-06, |
|
"loss": 0.6067, |
|
"step": 166 |
|
}, |
|
{ |
|
"epoch": 0.3017163504968383, |
|
"grad_norm": 0.24349945347421037, |
|
"learning_rate": 4.194941026283053e-06, |
|
"loss": 0.6064, |
|
"step": 167 |
|
}, |
|
{ |
|
"epoch": 0.3035230352303523, |
|
"grad_norm": 0.2191845217404836, |
|
"learning_rate": 4.183913919869495e-06, |
|
"loss": 0.6039, |
|
"step": 168 |
|
}, |
|
{ |
|
"epoch": 0.3053297199638663, |
|
"grad_norm": 0.2180478057517194, |
|
"learning_rate": 4.172826515897146e-06, |
|
"loss": 0.597, |
|
"step": 169 |
|
}, |
|
{ |
|
"epoch": 0.3071364046973803, |
|
"grad_norm": 0.22148092498016186, |
|
"learning_rate": 4.161679211383565e-06, |
|
"loss": 0.6116, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.3089430894308943, |
|
"grad_norm": 0.21976711391650572, |
|
"learning_rate": 4.150472405491226e-06, |
|
"loss": 0.5937, |
|
"step": 171 |
|
}, |
|
{ |
|
"epoch": 0.3107497741644083, |
|
"grad_norm": 0.2359272043525047, |
|
"learning_rate": 4.139206499513231e-06, |
|
"loss": 0.6002, |
|
"step": 172 |
|
}, |
|
{ |
|
"epoch": 0.31255645889792233, |
|
"grad_norm": 0.20743057651505534, |
|
"learning_rate": 4.127881896858934e-06, |
|
"loss": 0.6036, |
|
"step": 173 |
|
}, |
|
{ |
|
"epoch": 0.3143631436314363, |
|
"grad_norm": 0.2185478761510027, |
|
"learning_rate": 4.116499003039499e-06, |
|
"loss": 0.5947, |
|
"step": 174 |
|
}, |
|
{ |
|
"epoch": 0.31616982836495033, |
|
"grad_norm": 0.24479063305435536, |
|
"learning_rate": 4.105058225653381e-06, |
|
"loss": 0.6, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 0.3179765130984643, |
|
"grad_norm": 0.22541665254858562, |
|
"learning_rate": 4.093559974371725e-06, |
|
"loss": 0.6137, |
|
"step": 176 |
|
}, |
|
{ |
|
"epoch": 0.31978319783197834, |
|
"grad_norm": 0.23354779603092837, |
|
"learning_rate": 4.0820046609237026e-06, |
|
"loss": 0.5881, |
|
"step": 177 |
|
}, |
|
{ |
|
"epoch": 0.3215898825654923, |
|
"grad_norm": 0.22004453141007924, |
|
"learning_rate": 4.070392699081767e-06, |
|
"loss": 0.5948, |
|
"step": 178 |
|
}, |
|
{ |
|
"epoch": 0.32339656729900634, |
|
"grad_norm": 0.22301964541218125, |
|
"learning_rate": 4.058724504646834e-06, |
|
"loss": 0.6054, |
|
"step": 179 |
|
}, |
|
{ |
|
"epoch": 0.3252032520325203, |
|
"grad_norm": 0.23125154097212539, |
|
"learning_rate": 4.047000495433397e-06, |
|
"loss": 0.6135, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.32700993676603435, |
|
"grad_norm": 0.2194814968746533, |
|
"learning_rate": 4.035221091254563e-06, |
|
"loss": 0.5902, |
|
"step": 181 |
|
}, |
|
{ |
|
"epoch": 0.3288166214995483, |
|
"grad_norm": 0.22257729627885203, |
|
"learning_rate": 4.023386713907021e-06, |
|
"loss": 0.6005, |
|
"step": 182 |
|
}, |
|
{ |
|
"epoch": 0.33062330623306235, |
|
"grad_norm": 0.22264840616047507, |
|
"learning_rate": 4.011497787155938e-06, |
|
"loss": 0.6123, |
|
"step": 183 |
|
}, |
|
{ |
|
"epoch": 0.3324299909665763, |
|
"grad_norm": 0.22891861604269678, |
|
"learning_rate": 3.999554736719785e-06, |
|
"loss": 0.6093, |
|
"step": 184 |
|
}, |
|
{ |
|
"epoch": 0.33423667570009036, |
|
"grad_norm": 0.2368591129219394, |
|
"learning_rate": 3.987557990255093e-06, |
|
"loss": 0.6054, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 0.33604336043360433, |
|
"grad_norm": 0.2483359395611164, |
|
"learning_rate": 3.975507977341141e-06, |
|
"loss": 0.6011, |
|
"step": 186 |
|
}, |
|
{ |
|
"epoch": 0.33785004516711836, |
|
"grad_norm": 0.20257786753801169, |
|
"learning_rate": 3.963405129464569e-06, |
|
"loss": 0.5983, |
|
"step": 187 |
|
}, |
|
{ |
|
"epoch": 0.33965672990063234, |
|
"grad_norm": 0.2186759693689697, |
|
"learning_rate": 3.951249880003934e-06, |
|
"loss": 0.5973, |
|
"step": 188 |
|
}, |
|
{ |
|
"epoch": 0.34146341463414637, |
|
"grad_norm": 0.21876513682058207, |
|
"learning_rate": 3.939042664214185e-06, |
|
"loss": 0.6052, |
|
"step": 189 |
|
}, |
|
{ |
|
"epoch": 0.34327009936766034, |
|
"grad_norm": 0.2099916515113305, |
|
"learning_rate": 3.92678391921108e-06, |
|
"loss": 0.5936, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.34507678410117437, |
|
"grad_norm": 0.2343120602972887, |
|
"learning_rate": 3.914474083955537e-06, |
|
"loss": 0.592, |
|
"step": 191 |
|
}, |
|
{ |
|
"epoch": 0.34688346883468835, |
|
"grad_norm": 0.20255138476243217, |
|
"learning_rate": 3.902113599237911e-06, |
|
"loss": 0.5986, |
|
"step": 192 |
|
}, |
|
{ |
|
"epoch": 0.3486901535682023, |
|
"grad_norm": 0.20455659875517468, |
|
"learning_rate": 3.889702907662212e-06, |
|
"loss": 0.6009, |
|
"step": 193 |
|
}, |
|
{ |
|
"epoch": 0.35049683830171635, |
|
"grad_norm": 0.22151992117493785, |
|
"learning_rate": 3.8772424536302565e-06, |
|
"loss": 0.6022, |
|
"step": 194 |
|
}, |
|
{ |
|
"epoch": 0.3523035230352303, |
|
"grad_norm": 0.23833014531149102, |
|
"learning_rate": 3.864732683325755e-06, |
|
"loss": 0.5856, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 0.35411020776874436, |
|
"grad_norm": 0.2139415037008631, |
|
"learning_rate": 3.852174044698333e-06, |
|
"loss": 0.5989, |
|
"step": 196 |
|
}, |
|
{ |
|
"epoch": 0.35591689250225833, |
|
"grad_norm": 0.2194749540116677, |
|
"learning_rate": 3.839566987447492e-06, |
|
"loss": 0.598, |
|
"step": 197 |
|
}, |
|
{ |
|
"epoch": 0.35772357723577236, |
|
"grad_norm": 0.2381302592790127, |
|
"learning_rate": 3.826911963006508e-06, |
|
"loss": 0.5752, |
|
"step": 198 |
|
}, |
|
{ |
|
"epoch": 0.35953026196928634, |
|
"grad_norm": 0.23075119894415708, |
|
"learning_rate": 3.8142094245262617e-06, |
|
"loss": 0.5978, |
|
"step": 199 |
|
}, |
|
{ |
|
"epoch": 0.36133694670280037, |
|
"grad_norm": 0.23286158873769425, |
|
"learning_rate": 3.801459826859022e-06, |
|
"loss": 0.5877, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.36314363143631434, |
|
"grad_norm": 0.22495213943890388, |
|
"learning_rate": 3.788663626542146e-06, |
|
"loss": 0.5822, |
|
"step": 201 |
|
}, |
|
{ |
|
"epoch": 0.36495031616982837, |
|
"grad_norm": 0.22147751444154135, |
|
"learning_rate": 3.7758212817817406e-06, |
|
"loss": 0.6091, |
|
"step": 202 |
|
}, |
|
{ |
|
"epoch": 0.36675700090334235, |
|
"grad_norm": 0.23184305404944502, |
|
"learning_rate": 3.7629332524362532e-06, |
|
"loss": 0.5962, |
|
"step": 203 |
|
}, |
|
{ |
|
"epoch": 0.3685636856368564, |
|
"grad_norm": 0.23109985129728863, |
|
"learning_rate": 3.7500000000000005e-06, |
|
"loss": 0.6088, |
|
"step": 204 |
|
}, |
|
{ |
|
"epoch": 0.37037037037037035, |
|
"grad_norm": 0.22131095632923808, |
|
"learning_rate": 3.7370219875866497e-06, |
|
"loss": 0.5985, |
|
"step": 205 |
|
}, |
|
{ |
|
"epoch": 0.3721770551038844, |
|
"grad_norm": 0.22292160877295167, |
|
"learning_rate": 3.7239996799126315e-06, |
|
"loss": 0.5862, |
|
"step": 206 |
|
}, |
|
{ |
|
"epoch": 0.37398373983739835, |
|
"grad_norm": 0.23049662951148336, |
|
"learning_rate": 3.7109335432805006e-06, |
|
"loss": 0.5943, |
|
"step": 207 |
|
}, |
|
{ |
|
"epoch": 0.3757904245709124, |
|
"grad_norm": 0.22653281643581213, |
|
"learning_rate": 3.697824045562238e-06, |
|
"loss": 0.5807, |
|
"step": 208 |
|
}, |
|
{ |
|
"epoch": 0.37759710930442636, |
|
"grad_norm": 0.2261566142221293, |
|
"learning_rate": 3.684671656182497e-06, |
|
"loss": 0.6027, |
|
"step": 209 |
|
}, |
|
{ |
|
"epoch": 0.3794037940379404, |
|
"grad_norm": 0.20736585757802733, |
|
"learning_rate": 3.671476846101797e-06, |
|
"loss": 0.5975, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.38121047877145436, |
|
"grad_norm": 0.26687108119675534, |
|
"learning_rate": 3.658240087799655e-06, |
|
"loss": 0.5995, |
|
"step": 211 |
|
}, |
|
{ |
|
"epoch": 0.3830171635049684, |
|
"grad_norm": 0.2701090110685354, |
|
"learning_rate": 3.644961855257669e-06, |
|
"loss": 0.608, |
|
"step": 212 |
|
}, |
|
{ |
|
"epoch": 0.38482384823848237, |
|
"grad_norm": 0.23937840184912948, |
|
"learning_rate": 3.6316426239425484e-06, |
|
"loss": 0.6063, |
|
"step": 213 |
|
}, |
|
{ |
|
"epoch": 0.3866305329719964, |
|
"grad_norm": 0.26506285623579445, |
|
"learning_rate": 3.6182828707890816e-06, |
|
"loss": 0.5925, |
|
"step": 214 |
|
}, |
|
{ |
|
"epoch": 0.3884372177055104, |
|
"grad_norm": 0.23169437753599226, |
|
"learning_rate": 3.6048830741830678e-06, |
|
"loss": 0.6193, |
|
"step": 215 |
|
}, |
|
{ |
|
"epoch": 0.3902439024390244, |
|
"grad_norm": 0.24107575514470772, |
|
"learning_rate": 3.5914437139441754e-06, |
|
"loss": 0.6084, |
|
"step": 216 |
|
}, |
|
{ |
|
"epoch": 0.3920505871725384, |
|
"grad_norm": 0.24497545746975022, |
|
"learning_rate": 3.5779652713087717e-06, |
|
"loss": 0.6081, |
|
"step": 217 |
|
}, |
|
{ |
|
"epoch": 0.3938572719060524, |
|
"grad_norm": 0.22341956530539367, |
|
"learning_rate": 3.564448228912682e-06, |
|
"loss": 0.6001, |
|
"step": 218 |
|
}, |
|
{ |
|
"epoch": 0.3956639566395664, |
|
"grad_norm": 0.23686461833528502, |
|
"learning_rate": 3.5508930707739143e-06, |
|
"loss": 0.5942, |
|
"step": 219 |
|
}, |
|
{ |
|
"epoch": 0.3974706413730804, |
|
"grad_norm": 0.22220451962583762, |
|
"learning_rate": 3.5373002822753217e-06, |
|
"loss": 0.5982, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.3992773261065944, |
|
"grad_norm": 0.20991512264600337, |
|
"learning_rate": 3.523670350147227e-06, |
|
"loss": 0.586, |
|
"step": 221 |
|
}, |
|
{ |
|
"epoch": 0.4010840108401084, |
|
"grad_norm": 0.2597272978289921, |
|
"learning_rate": 3.510003762449988e-06, |
|
"loss": 0.6031, |
|
"step": 222 |
|
}, |
|
{ |
|
"epoch": 0.4028906955736224, |
|
"grad_norm": 0.23319091020778485, |
|
"learning_rate": 3.496301008556529e-06, |
|
"loss": 0.5861, |
|
"step": 223 |
|
}, |
|
{ |
|
"epoch": 0.4046973803071364, |
|
"grad_norm": 0.21740514214818824, |
|
"learning_rate": 3.4825625791348093e-06, |
|
"loss": 0.5917, |
|
"step": 224 |
|
}, |
|
{ |
|
"epoch": 0.4065040650406504, |
|
"grad_norm": 0.22499201448021908, |
|
"learning_rate": 3.4687889661302577e-06, |
|
"loss": 0.5951, |
|
"step": 225 |
|
}, |
|
{ |
|
"epoch": 0.4083107497741644, |
|
"grad_norm": 0.21928333843378897, |
|
"learning_rate": 3.454980662748156e-06, |
|
"loss": 0.6022, |
|
"step": 226 |
|
}, |
|
{ |
|
"epoch": 0.4101174345076784, |
|
"grad_norm": 0.24366650749602084, |
|
"learning_rate": 3.44113816343598e-06, |
|
"loss": 0.6071, |
|
"step": 227 |
|
}, |
|
{ |
|
"epoch": 0.41192411924119243, |
|
"grad_norm": 0.2475981006923509, |
|
"learning_rate": 3.4272619638656914e-06, |
|
"loss": 0.5936, |
|
"step": 228 |
|
}, |
|
{ |
|
"epoch": 0.4137308039747064, |
|
"grad_norm": 0.2153288321867736, |
|
"learning_rate": 3.4133525609159883e-06, |
|
"loss": 0.5953, |
|
"step": 229 |
|
}, |
|
{ |
|
"epoch": 0.41553748870822044, |
|
"grad_norm": 0.22686236074382107, |
|
"learning_rate": 3.399410452654518e-06, |
|
"loss": 0.5977, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 0.4173441734417344, |
|
"grad_norm": 0.229068504220945, |
|
"learning_rate": 3.3854361383200372e-06, |
|
"loss": 0.5982, |
|
"step": 231 |
|
}, |
|
{ |
|
"epoch": 0.41915085817524844, |
|
"grad_norm": 0.22116222448509554, |
|
"learning_rate": 3.3714301183045382e-06, |
|
"loss": 0.6009, |
|
"step": 232 |
|
}, |
|
{ |
|
"epoch": 0.4209575429087624, |
|
"grad_norm": 0.21437564493080197, |
|
"learning_rate": 3.357392894135329e-06, |
|
"loss": 0.5817, |
|
"step": 233 |
|
}, |
|
{ |
|
"epoch": 0.42276422764227645, |
|
"grad_norm": 0.21046827286991526, |
|
"learning_rate": 3.3433249684570757e-06, |
|
"loss": 0.5983, |
|
"step": 234 |
|
}, |
|
{ |
|
"epoch": 0.4245709123757904, |
|
"grad_norm": 0.21841757461269223, |
|
"learning_rate": 3.329226845013802e-06, |
|
"loss": 0.593, |
|
"step": 235 |
|
}, |
|
{ |
|
"epoch": 0.42637759710930445, |
|
"grad_norm": 0.22078792972966105, |
|
"learning_rate": 3.315099028630855e-06, |
|
"loss": 0.6048, |
|
"step": 236 |
|
}, |
|
{ |
|
"epoch": 0.4281842818428184, |
|
"grad_norm": 0.2198180395201429, |
|
"learning_rate": 3.3009420251968245e-06, |
|
"loss": 0.603, |
|
"step": 237 |
|
}, |
|
{ |
|
"epoch": 0.42999096657633246, |
|
"grad_norm": 0.22974521057955885, |
|
"learning_rate": 3.28675634164543e-06, |
|
"loss": 0.6122, |
|
"step": 238 |
|
}, |
|
{ |
|
"epoch": 0.43179765130984643, |
|
"grad_norm": 0.24212813694550406, |
|
"learning_rate": 3.272542485937369e-06, |
|
"loss": 0.5942, |
|
"step": 239 |
|
}, |
|
{ |
|
"epoch": 0.43360433604336046, |
|
"grad_norm": 0.2151106216359987, |
|
"learning_rate": 3.258300967042125e-06, |
|
"loss": 0.5947, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.43541102077687444, |
|
"grad_norm": 0.2523287732959785, |
|
"learning_rate": 3.2440322949197467e-06, |
|
"loss": 0.5927, |
|
"step": 241 |
|
}, |
|
{ |
|
"epoch": 0.4372177055103884, |
|
"grad_norm": 0.21564961729801052, |
|
"learning_rate": 3.229736980502584e-06, |
|
"loss": 0.5934, |
|
"step": 242 |
|
}, |
|
{ |
|
"epoch": 0.43902439024390244, |
|
"grad_norm": 0.2242395897057702, |
|
"learning_rate": 3.2154155356769922e-06, |
|
"loss": 0.5973, |
|
"step": 243 |
|
}, |
|
{ |
|
"epoch": 0.4408310749774164, |
|
"grad_norm": 0.2232376839835937, |
|
"learning_rate": 3.201068473265007e-06, |
|
"loss": 0.6029, |
|
"step": 244 |
|
}, |
|
{ |
|
"epoch": 0.44263775971093045, |
|
"grad_norm": 0.2192077460257132, |
|
"learning_rate": 3.186696307005976e-06, |
|
"loss": 0.5962, |
|
"step": 245 |
|
}, |
|
{ |
|
"epoch": 0.4444444444444444, |
|
"grad_norm": 0.2222570172571517, |
|
"learning_rate": 3.1722995515381644e-06, |
|
"loss": 0.6026, |
|
"step": 246 |
|
}, |
|
{ |
|
"epoch": 0.44625112917795845, |
|
"grad_norm": 0.21527201790189038, |
|
"learning_rate": 3.1578787223803296e-06, |
|
"loss": 0.5855, |
|
"step": 247 |
|
}, |
|
{ |
|
"epoch": 0.4480578139114724, |
|
"grad_norm": 0.35449983881422265, |
|
"learning_rate": 3.1434343359132565e-06, |
|
"loss": 0.6037, |
|
"step": 248 |
|
}, |
|
{ |
|
"epoch": 0.44986449864498645, |
|
"grad_norm": 0.2062475229994345, |
|
"learning_rate": 3.128966909361272e-06, |
|
"loss": 0.5867, |
|
"step": 249 |
|
}, |
|
{ |
|
"epoch": 0.45167118337850043, |
|
"grad_norm": 0.21960764548448194, |
|
"learning_rate": 3.1144769607737204e-06, |
|
"loss": 0.6011, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.45347786811201446, |
|
"grad_norm": 0.22438900737589454, |
|
"learning_rate": 3.099965009006415e-06, |
|
"loss": 0.5894, |
|
"step": 251 |
|
}, |
|
{ |
|
"epoch": 0.45528455284552843, |
|
"grad_norm": 0.21781256304179047, |
|
"learning_rate": 3.08543157370306e-06, |
|
"loss": 0.5705, |
|
"step": 252 |
|
}, |
|
{ |
|
"epoch": 0.45709123757904246, |
|
"grad_norm": 0.23539403534261766, |
|
"learning_rate": 3.0708771752766397e-06, |
|
"loss": 0.5933, |
|
"step": 253 |
|
}, |
|
{ |
|
"epoch": 0.45889792231255644, |
|
"grad_norm": 0.2329715923142623, |
|
"learning_rate": 3.056302334890786e-06, |
|
"loss": 0.5919, |
|
"step": 254 |
|
}, |
|
{ |
|
"epoch": 0.46070460704607047, |
|
"grad_norm": 0.20731356483768837, |
|
"learning_rate": 3.041707574441118e-06, |
|
"loss": 0.5864, |
|
"step": 255 |
|
}, |
|
{ |
|
"epoch": 0.46251129177958444, |
|
"grad_norm": 0.23154663889322297, |
|
"learning_rate": 3.027093416536548e-06, |
|
"loss": 0.5868, |
|
"step": 256 |
|
}, |
|
{ |
|
"epoch": 0.4643179765130985, |
|
"grad_norm": 0.2206996661813995, |
|
"learning_rate": 3.0124603844805767e-06, |
|
"loss": 0.596, |
|
"step": 257 |
|
}, |
|
{ |
|
"epoch": 0.46612466124661245, |
|
"grad_norm": 0.2047968733465246, |
|
"learning_rate": 2.9978090022525456e-06, |
|
"loss": 0.5913, |
|
"step": 258 |
|
}, |
|
{ |
|
"epoch": 0.4679313459801265, |
|
"grad_norm": 0.21949609169081136, |
|
"learning_rate": 2.9831397944888833e-06, |
|
"loss": 0.5954, |
|
"step": 259 |
|
}, |
|
{ |
|
"epoch": 0.46973803071364045, |
|
"grad_norm": 0.2313444882097326, |
|
"learning_rate": 2.9684532864643123e-06, |
|
"loss": 0.6094, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 0.4715447154471545, |
|
"grad_norm": 0.23087971694617518, |
|
"learning_rate": 2.953750004073041e-06, |
|
"loss": 0.6008, |
|
"step": 261 |
|
}, |
|
{ |
|
"epoch": 0.47335140018066846, |
|
"grad_norm": 0.20279190342082626, |
|
"learning_rate": 2.9390304738099385e-06, |
|
"loss": 0.5897, |
|
"step": 262 |
|
}, |
|
{ |
|
"epoch": 0.4751580849141825, |
|
"grad_norm": 0.20638429907077857, |
|
"learning_rate": 2.9242952227516726e-06, |
|
"loss": 0.593, |
|
"step": 263 |
|
}, |
|
{ |
|
"epoch": 0.47696476964769646, |
|
"grad_norm": 0.22710491639329788, |
|
"learning_rate": 2.9095447785378446e-06, |
|
"loss": 0.6007, |
|
"step": 264 |
|
}, |
|
{ |
|
"epoch": 0.4787714543812105, |
|
"grad_norm": 0.21937325130622803, |
|
"learning_rate": 2.89477966935209e-06, |
|
"loss": 0.5811, |
|
"step": 265 |
|
}, |
|
{ |
|
"epoch": 0.48057813911472447, |
|
"grad_norm": 0.21143254384346213, |
|
"learning_rate": 2.8800004239031687e-06, |
|
"loss": 0.6052, |
|
"step": 266 |
|
}, |
|
{ |
|
"epoch": 0.4823848238482385, |
|
"grad_norm": 0.21519263645728862, |
|
"learning_rate": 2.8652075714060296e-06, |
|
"loss": 0.5895, |
|
"step": 267 |
|
}, |
|
{ |
|
"epoch": 0.48419150858175247, |
|
"grad_norm": 0.2241818383841725, |
|
"learning_rate": 2.850401641562865e-06, |
|
"loss": 0.5903, |
|
"step": 268 |
|
}, |
|
{ |
|
"epoch": 0.4859981933152665, |
|
"grad_norm": 0.2145645757032261, |
|
"learning_rate": 2.835583164544139e-06, |
|
"loss": 0.5958, |
|
"step": 269 |
|
}, |
|
{ |
|
"epoch": 0.4878048780487805, |
|
"grad_norm": 0.21634063420771613, |
|
"learning_rate": 2.820752670969606e-06, |
|
"loss": 0.5785, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 0.4896115627822945, |
|
"grad_norm": 0.22267426902820459, |
|
"learning_rate": 2.805910691889307e-06, |
|
"loss": 0.5996, |
|
"step": 271 |
|
}, |
|
{ |
|
"epoch": 0.4914182475158085, |
|
"grad_norm": 0.2209235755384397, |
|
"learning_rate": 2.791057758764557e-06, |
|
"loss": 0.5941, |
|
"step": 272 |
|
}, |
|
{ |
|
"epoch": 0.4932249322493225, |
|
"grad_norm": 0.21545901337093543, |
|
"learning_rate": 2.776194403448915e-06, |
|
"loss": 0.6022, |
|
"step": 273 |
|
}, |
|
{ |
|
"epoch": 0.4950316169828365, |
|
"grad_norm": 0.23880363865262091, |
|
"learning_rate": 2.761321158169134e-06, |
|
"loss": 0.5893, |
|
"step": 274 |
|
}, |
|
{ |
|
"epoch": 0.4968383017163505, |
|
"grad_norm": 0.20265741752620156, |
|
"learning_rate": 2.7464385555061092e-06, |
|
"loss": 0.5842, |
|
"step": 275 |
|
}, |
|
{ |
|
"epoch": 0.4986449864498645, |
|
"grad_norm": 0.19957842808608858, |
|
"learning_rate": 2.731547128375804e-06, |
|
"loss": 0.5765, |
|
"step": 276 |
|
}, |
|
{ |
|
"epoch": 0.5004516711833785, |
|
"grad_norm": 0.24313154793244834, |
|
"learning_rate": 2.7166474100101676e-06, |
|
"loss": 0.5889, |
|
"step": 277 |
|
}, |
|
{ |
|
"epoch": 0.5022583559168925, |
|
"grad_norm": 0.21578072503135698, |
|
"learning_rate": 2.7017399339380435e-06, |
|
"loss": 0.5752, |
|
"step": 278 |
|
}, |
|
{ |
|
"epoch": 0.5040650406504065, |
|
"grad_norm": 0.22695165367196, |
|
"learning_rate": 2.686825233966061e-06, |
|
"loss": 0.5901, |
|
"step": 279 |
|
}, |
|
{ |
|
"epoch": 0.5058717253839206, |
|
"grad_norm": 0.22124104537145198, |
|
"learning_rate": 2.6719038441595236e-06, |
|
"loss": 0.5865, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 0.5076784101174345, |
|
"grad_norm": 0.20876329166600804, |
|
"learning_rate": 2.6569762988232838e-06, |
|
"loss": 0.5875, |
|
"step": 281 |
|
}, |
|
{ |
|
"epoch": 0.5094850948509485, |
|
"grad_norm": 0.21423770322129848, |
|
"learning_rate": 2.642043132482612e-06, |
|
"loss": 0.5805, |
|
"step": 282 |
|
}, |
|
{ |
|
"epoch": 0.5112917795844625, |
|
"grad_norm": 0.21416739364725013, |
|
"learning_rate": 2.6271048798640547e-06, |
|
"loss": 0.5979, |
|
"step": 283 |
|
}, |
|
{ |
|
"epoch": 0.5130984643179766, |
|
"grad_norm": 0.20856984547372137, |
|
"learning_rate": 2.6121620758762877e-06, |
|
"loss": 0.5854, |
|
"step": 284 |
|
}, |
|
{ |
|
"epoch": 0.5149051490514905, |
|
"grad_norm": 0.20653823718614767, |
|
"learning_rate": 2.5972152555909625e-06, |
|
"loss": 0.5777, |
|
"step": 285 |
|
}, |
|
{ |
|
"epoch": 0.5167118337850045, |
|
"grad_norm": 0.20566248008855165, |
|
"learning_rate": 2.5822649542235468e-06, |
|
"loss": 0.5784, |
|
"step": 286 |
|
}, |
|
{ |
|
"epoch": 0.5185185185185185, |
|
"grad_norm": 0.2256337819483314, |
|
"learning_rate": 2.5673117071141574e-06, |
|
"loss": 0.6006, |
|
"step": 287 |
|
}, |
|
{ |
|
"epoch": 0.5203252032520326, |
|
"grad_norm": 0.2187486752392677, |
|
"learning_rate": 2.5523560497083927e-06, |
|
"loss": 0.5963, |
|
"step": 288 |
|
}, |
|
{ |
|
"epoch": 0.5221318879855466, |
|
"grad_norm": 0.2256640463012208, |
|
"learning_rate": 2.5373985175381595e-06, |
|
"loss": 0.5806, |
|
"step": 289 |
|
}, |
|
{ |
|
"epoch": 0.5239385727190605, |
|
"grad_norm": 0.20558830059411975, |
|
"learning_rate": 2.522439646202495e-06, |
|
"loss": 0.5927, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 0.5257452574525745, |
|
"grad_norm": 0.21091753896787113, |
|
"learning_rate": 2.507479971348391e-06, |
|
"loss": 0.5974, |
|
"step": 291 |
|
}, |
|
{ |
|
"epoch": 0.5275519421860885, |
|
"grad_norm": 0.20163177164774673, |
|
"learning_rate": 2.49252002865161e-06, |
|
"loss": 0.5999, |
|
"step": 292 |
|
}, |
|
{ |
|
"epoch": 0.5293586269196026, |
|
"grad_norm": 0.21301882714221618, |
|
"learning_rate": 2.4775603537975055e-06, |
|
"loss": 0.5943, |
|
"step": 293 |
|
}, |
|
{ |
|
"epoch": 0.5311653116531165, |
|
"grad_norm": 0.21486758628846855, |
|
"learning_rate": 2.4626014824618418e-06, |
|
"loss": 0.5855, |
|
"step": 294 |
|
}, |
|
{ |
|
"epoch": 0.5329719963866305, |
|
"grad_norm": 0.20752918302238268, |
|
"learning_rate": 2.447643950291608e-06, |
|
"loss": 0.5843, |
|
"step": 295 |
|
}, |
|
{ |
|
"epoch": 0.5347786811201445, |
|
"grad_norm": 0.21785997768890877, |
|
"learning_rate": 2.4326882928858435e-06, |
|
"loss": 0.5868, |
|
"step": 296 |
|
}, |
|
{ |
|
"epoch": 0.5365853658536586, |
|
"grad_norm": 0.21841154134063173, |
|
"learning_rate": 2.417735045776453e-06, |
|
"loss": 0.5935, |
|
"step": 297 |
|
}, |
|
{ |
|
"epoch": 0.5383920505871725, |
|
"grad_norm": 0.20318141655763375, |
|
"learning_rate": 2.4027847444090384e-06, |
|
"loss": 0.5873, |
|
"step": 298 |
|
}, |
|
{ |
|
"epoch": 0.5401987353206865, |
|
"grad_norm": 0.3832839441010807, |
|
"learning_rate": 2.3878379241237136e-06, |
|
"loss": 0.604, |
|
"step": 299 |
|
}, |
|
{ |
|
"epoch": 0.5420054200542005, |
|
"grad_norm": 0.2105425262709393, |
|
"learning_rate": 2.372895120135946e-06, |
|
"loss": 0.5917, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.5438121047877146, |
|
"grad_norm": 0.21755593789778102, |
|
"learning_rate": 2.3579568675173894e-06, |
|
"loss": 0.5917, |
|
"step": 301 |
|
}, |
|
{ |
|
"epoch": 0.5456187895212286, |
|
"grad_norm": 0.19989127666575213, |
|
"learning_rate": 2.3430237011767166e-06, |
|
"loss": 0.6047, |
|
"step": 302 |
|
}, |
|
{ |
|
"epoch": 0.5474254742547425, |
|
"grad_norm": 0.2055760488608315, |
|
"learning_rate": 2.3280961558404773e-06, |
|
"loss": 0.5859, |
|
"step": 303 |
|
}, |
|
{ |
|
"epoch": 0.5492321589882565, |
|
"grad_norm": 0.2121939170236, |
|
"learning_rate": 2.3131747660339396e-06, |
|
"loss": 0.581, |
|
"step": 304 |
|
}, |
|
{ |
|
"epoch": 0.5510388437217706, |
|
"grad_norm": 0.21164112959432857, |
|
"learning_rate": 2.2982600660619574e-06, |
|
"loss": 0.5872, |
|
"step": 305 |
|
}, |
|
{ |
|
"epoch": 0.5528455284552846, |
|
"grad_norm": 0.20304982313955747, |
|
"learning_rate": 2.2833525899898324e-06, |
|
"loss": 0.5851, |
|
"step": 306 |
|
}, |
|
{ |
|
"epoch": 0.5546522131887985, |
|
"grad_norm": 0.2144496646482213, |
|
"learning_rate": 2.268452871624197e-06, |
|
"loss": 0.6032, |
|
"step": 307 |
|
}, |
|
{ |
|
"epoch": 0.5564588979223125, |
|
"grad_norm": 0.20431153355938347, |
|
"learning_rate": 2.253561444493891e-06, |
|
"loss": 0.5824, |
|
"step": 308 |
|
}, |
|
{ |
|
"epoch": 0.5582655826558266, |
|
"grad_norm": 0.21664720003578983, |
|
"learning_rate": 2.238678841830867e-06, |
|
"loss": 0.5915, |
|
"step": 309 |
|
}, |
|
{ |
|
"epoch": 0.5600722673893406, |
|
"grad_norm": 0.20197470495761588, |
|
"learning_rate": 2.2238055965510853e-06, |
|
"loss": 0.5751, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 0.5618789521228545, |
|
"grad_norm": 0.4074972564492422, |
|
"learning_rate": 2.2089422412354434e-06, |
|
"loss": 0.5929, |
|
"step": 311 |
|
}, |
|
{ |
|
"epoch": 0.5636856368563685, |
|
"grad_norm": 0.19998974102467304, |
|
"learning_rate": 2.1940893081106946e-06, |
|
"loss": 0.5923, |
|
"step": 312 |
|
}, |
|
{ |
|
"epoch": 0.5654923215898826, |
|
"grad_norm": 0.2117759167471859, |
|
"learning_rate": 2.179247329030395e-06, |
|
"loss": 0.5946, |
|
"step": 313 |
|
}, |
|
{ |
|
"epoch": 0.5672990063233966, |
|
"grad_norm": 0.2017003806183738, |
|
"learning_rate": 2.1644168354558623e-06, |
|
"loss": 0.5859, |
|
"step": 314 |
|
}, |
|
{ |
|
"epoch": 0.5691056910569106, |
|
"grad_norm": 0.20263926965950277, |
|
"learning_rate": 2.1495983584371354e-06, |
|
"loss": 0.5855, |
|
"step": 315 |
|
}, |
|
{ |
|
"epoch": 0.5709123757904245, |
|
"grad_norm": 0.20684496610144393, |
|
"learning_rate": 2.134792428593971e-06, |
|
"loss": 0.5831, |
|
"step": 316 |
|
}, |
|
{ |
|
"epoch": 0.5727190605239386, |
|
"grad_norm": 0.2144432554994338, |
|
"learning_rate": 2.119999576096832e-06, |
|
"loss": 0.5774, |
|
"step": 317 |
|
}, |
|
{ |
|
"epoch": 0.5745257452574526, |
|
"grad_norm": 0.21247466919295185, |
|
"learning_rate": 2.1052203306479108e-06, |
|
"loss": 0.5896, |
|
"step": 318 |
|
}, |
|
{ |
|
"epoch": 0.5763324299909666, |
|
"grad_norm": 0.21422659294493748, |
|
"learning_rate": 2.090455221462156e-06, |
|
"loss": 0.5993, |
|
"step": 319 |
|
}, |
|
{ |
|
"epoch": 0.5781391147244805, |
|
"grad_norm": 0.22762142116566894, |
|
"learning_rate": 2.0757047772483278e-06, |
|
"loss": 0.583, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 0.5799457994579946, |
|
"grad_norm": 0.20743085860376967, |
|
"learning_rate": 2.0609695261900624e-06, |
|
"loss": 0.5784, |
|
"step": 321 |
|
}, |
|
{ |
|
"epoch": 0.5817524841915086, |
|
"grad_norm": 0.21691881173857994, |
|
"learning_rate": 2.0462499959269596e-06, |
|
"loss": 0.6002, |
|
"step": 322 |
|
}, |
|
{ |
|
"epoch": 0.5835591689250226, |
|
"grad_norm": 0.22483397993289836, |
|
"learning_rate": 2.031546713535688e-06, |
|
"loss": 0.5857, |
|
"step": 323 |
|
}, |
|
{ |
|
"epoch": 0.5853658536585366, |
|
"grad_norm": 0.1941832295922758, |
|
"learning_rate": 2.0168602055111175e-06, |
|
"loss": 0.5657, |
|
"step": 324 |
|
}, |
|
{ |
|
"epoch": 0.5871725383920506, |
|
"grad_norm": 0.2106447854766266, |
|
"learning_rate": 2.0021909977474553e-06, |
|
"loss": 0.5842, |
|
"step": 325 |
|
}, |
|
{ |
|
"epoch": 0.5889792231255646, |
|
"grad_norm": 0.20623748048616064, |
|
"learning_rate": 1.987539615519424e-06, |
|
"loss": 0.5928, |
|
"step": 326 |
|
}, |
|
{ |
|
"epoch": 0.5907859078590786, |
|
"grad_norm": 0.24434345791680295, |
|
"learning_rate": 1.9729065834634533e-06, |
|
"loss": 0.5808, |
|
"step": 327 |
|
}, |
|
{ |
|
"epoch": 0.5925925925925926, |
|
"grad_norm": 0.2091268010269944, |
|
"learning_rate": 1.958292425558883e-06, |
|
"loss": 0.5863, |
|
"step": 328 |
|
}, |
|
{ |
|
"epoch": 0.5943992773261066, |
|
"grad_norm": 0.2290855141568565, |
|
"learning_rate": 1.9436976651092143e-06, |
|
"loss": 0.5778, |
|
"step": 329 |
|
}, |
|
{ |
|
"epoch": 0.5962059620596206, |
|
"grad_norm": 0.2326519271370711, |
|
"learning_rate": 1.9291228247233607e-06, |
|
"loss": 0.5847, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 0.5980126467931346, |
|
"grad_norm": 0.21071088791420517, |
|
"learning_rate": 1.9145684262969404e-06, |
|
"loss": 0.5901, |
|
"step": 331 |
|
}, |
|
{ |
|
"epoch": 0.5998193315266486, |
|
"grad_norm": 0.19854103741318652, |
|
"learning_rate": 1.9000349909935852e-06, |
|
"loss": 0.5721, |
|
"step": 332 |
|
}, |
|
{ |
|
"epoch": 0.6016260162601627, |
|
"grad_norm": 0.19972103169844774, |
|
"learning_rate": 1.8855230392262809e-06, |
|
"loss": 0.5929, |
|
"step": 333 |
|
}, |
|
{ |
|
"epoch": 0.6034327009936766, |
|
"grad_norm": 0.20709957794921252, |
|
"learning_rate": 1.8710330906387288e-06, |
|
"loss": 0.6057, |
|
"step": 334 |
|
}, |
|
{ |
|
"epoch": 0.6052393857271906, |
|
"grad_norm": 0.21536606128137195, |
|
"learning_rate": 1.8565656640867448e-06, |
|
"loss": 0.5879, |
|
"step": 335 |
|
}, |
|
{ |
|
"epoch": 0.6070460704607046, |
|
"grad_norm": 0.20178356038933531, |
|
"learning_rate": 1.8421212776196712e-06, |
|
"loss": 0.5854, |
|
"step": 336 |
|
}, |
|
{ |
|
"epoch": 0.6088527551942186, |
|
"grad_norm": 0.2014990307576943, |
|
"learning_rate": 1.827700448461836e-06, |
|
"loss": 0.5863, |
|
"step": 337 |
|
}, |
|
{ |
|
"epoch": 0.6106594399277326, |
|
"grad_norm": 0.2136062417554571, |
|
"learning_rate": 1.813303692994025e-06, |
|
"loss": 0.5822, |
|
"step": 338 |
|
}, |
|
{ |
|
"epoch": 0.6124661246612466, |
|
"grad_norm": 0.2019958187136403, |
|
"learning_rate": 1.7989315267349936e-06, |
|
"loss": 0.591, |
|
"step": 339 |
|
}, |
|
{ |
|
"epoch": 0.6142728093947606, |
|
"grad_norm": 0.1964885288651704, |
|
"learning_rate": 1.7845844643230086e-06, |
|
"loss": 0.5865, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 0.6160794941282746, |
|
"grad_norm": 0.1981630352126773, |
|
"learning_rate": 1.770263019497417e-06, |
|
"loss": 0.5845, |
|
"step": 341 |
|
}, |
|
{ |
|
"epoch": 0.6178861788617886, |
|
"grad_norm": 0.2088515893373291, |
|
"learning_rate": 1.7559677050802543e-06, |
|
"loss": 0.5882, |
|
"step": 342 |
|
}, |
|
{ |
|
"epoch": 0.6196928635953026, |
|
"grad_norm": 0.20763167733521767, |
|
"learning_rate": 1.7416990329578753e-06, |
|
"loss": 0.5825, |
|
"step": 343 |
|
}, |
|
{ |
|
"epoch": 0.6214995483288166, |
|
"grad_norm": 0.22467403511881023, |
|
"learning_rate": 1.7274575140626318e-06, |
|
"loss": 0.572, |
|
"step": 344 |
|
}, |
|
{ |
|
"epoch": 0.6233062330623306, |
|
"grad_norm": 0.21203843083401738, |
|
"learning_rate": 1.7132436583545703e-06, |
|
"loss": 0.5866, |
|
"step": 345 |
|
}, |
|
{ |
|
"epoch": 0.6251129177958447, |
|
"grad_norm": 0.22023981214278937, |
|
"learning_rate": 1.699057974803176e-06, |
|
"loss": 0.5884, |
|
"step": 346 |
|
}, |
|
{ |
|
"epoch": 0.6269196025293586, |
|
"grad_norm": 0.20345704072148607, |
|
"learning_rate": 1.6849009713691456e-06, |
|
"loss": 0.5729, |
|
"step": 347 |
|
}, |
|
{ |
|
"epoch": 0.6287262872628726, |
|
"grad_norm": 0.2203981009854613, |
|
"learning_rate": 1.670773154986199e-06, |
|
"loss": 0.5741, |
|
"step": 348 |
|
}, |
|
{ |
|
"epoch": 0.6305329719963866, |
|
"grad_norm": 0.2124430311627528, |
|
"learning_rate": 1.6566750315429254e-06, |
|
"loss": 0.5764, |
|
"step": 349 |
|
}, |
|
{ |
|
"epoch": 0.6323396567299007, |
|
"grad_norm": 0.2167604699186445, |
|
"learning_rate": 1.6426071058646718e-06, |
|
"loss": 0.5892, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 0.6341463414634146, |
|
"grad_norm": 0.2068136183136913, |
|
"learning_rate": 1.6285698816954626e-06, |
|
"loss": 0.5795, |
|
"step": 351 |
|
}, |
|
{ |
|
"epoch": 0.6359530261969286, |
|
"grad_norm": 0.22139771703636216, |
|
"learning_rate": 1.6145638616799636e-06, |
|
"loss": 0.5933, |
|
"step": 352 |
|
}, |
|
{ |
|
"epoch": 0.6377597109304426, |
|
"grad_norm": 0.2169072872024213, |
|
"learning_rate": 1.6005895473454836e-06, |
|
"loss": 0.5931, |
|
"step": 353 |
|
}, |
|
{ |
|
"epoch": 0.6395663956639567, |
|
"grad_norm": 0.19987512033116334, |
|
"learning_rate": 1.5866474390840126e-06, |
|
"loss": 0.5885, |
|
"step": 354 |
|
}, |
|
{ |
|
"epoch": 0.6413730803974707, |
|
"grad_norm": 0.1976920115301846, |
|
"learning_rate": 1.5727380361343103e-06, |
|
"loss": 0.5778, |
|
"step": 355 |
|
}, |
|
{ |
|
"epoch": 0.6431797651309846, |
|
"grad_norm": 0.20307084136655387, |
|
"learning_rate": 1.55886183656402e-06, |
|
"loss": 0.5932, |
|
"step": 356 |
|
}, |
|
{ |
|
"epoch": 0.6449864498644986, |
|
"grad_norm": 0.22040332423078723, |
|
"learning_rate": 1.545019337251844e-06, |
|
"loss": 0.5935, |
|
"step": 357 |
|
}, |
|
{ |
|
"epoch": 0.6467931345980127, |
|
"grad_norm": 0.19534686963161454, |
|
"learning_rate": 1.5312110338697427e-06, |
|
"loss": 0.5989, |
|
"step": 358 |
|
}, |
|
{ |
|
"epoch": 0.6485998193315267, |
|
"grad_norm": 0.2071188241938468, |
|
"learning_rate": 1.5174374208651913e-06, |
|
"loss": 0.5713, |
|
"step": 359 |
|
}, |
|
{ |
|
"epoch": 0.6504065040650406, |
|
"grad_norm": 0.22023603109435608, |
|
"learning_rate": 1.503698991443471e-06, |
|
"loss": 0.5799, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 0.6522131887985546, |
|
"grad_norm": 0.23279333269648594, |
|
"learning_rate": 1.489996237550012e-06, |
|
"loss": 0.5864, |
|
"step": 361 |
|
}, |
|
{ |
|
"epoch": 0.6540198735320687, |
|
"grad_norm": 0.21130573335881214, |
|
"learning_rate": 1.4763296498527744e-06, |
|
"loss": 0.5975, |
|
"step": 362 |
|
}, |
|
{ |
|
"epoch": 0.6558265582655827, |
|
"grad_norm": 0.21591708899052672, |
|
"learning_rate": 1.4626997177246787e-06, |
|
"loss": 0.5878, |
|
"step": 363 |
|
}, |
|
{ |
|
"epoch": 0.6576332429990966, |
|
"grad_norm": 0.2371469411885916, |
|
"learning_rate": 1.4491069292260867e-06, |
|
"loss": 0.5862, |
|
"step": 364 |
|
}, |
|
{ |
|
"epoch": 0.6594399277326106, |
|
"grad_norm": 0.20206187850887294, |
|
"learning_rate": 1.4355517710873184e-06, |
|
"loss": 0.5784, |
|
"step": 365 |
|
}, |
|
{ |
|
"epoch": 0.6612466124661247, |
|
"grad_norm": 0.20548129735299142, |
|
"learning_rate": 1.4220347286912296e-06, |
|
"loss": 0.6009, |
|
"step": 366 |
|
}, |
|
{ |
|
"epoch": 0.6630532971996387, |
|
"grad_norm": 0.20462764510073306, |
|
"learning_rate": 1.4085562860558256e-06, |
|
"loss": 0.5818, |
|
"step": 367 |
|
}, |
|
{ |
|
"epoch": 0.6648599819331527, |
|
"grad_norm": 0.21559927754232822, |
|
"learning_rate": 1.395116925816934e-06, |
|
"loss": 0.5937, |
|
"step": 368 |
|
}, |
|
{ |
|
"epoch": 0.6666666666666666, |
|
"grad_norm": 0.24435009358387672, |
|
"learning_rate": 1.3817171292109182e-06, |
|
"loss": 0.5945, |
|
"step": 369 |
|
}, |
|
{ |
|
"epoch": 0.6684733514001807, |
|
"grad_norm": 0.20734944097931568, |
|
"learning_rate": 1.3683573760574526e-06, |
|
"loss": 0.5931, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 0.6702800361336947, |
|
"grad_norm": 0.28137987727489505, |
|
"learning_rate": 1.3550381447423317e-06, |
|
"loss": 0.5813, |
|
"step": 371 |
|
}, |
|
{ |
|
"epoch": 0.6720867208672087, |
|
"grad_norm": 0.20129121777849085, |
|
"learning_rate": 1.3417599122003464e-06, |
|
"loss": 0.5992, |
|
"step": 372 |
|
}, |
|
{ |
|
"epoch": 0.6738934056007226, |
|
"grad_norm": 0.207337984886545, |
|
"learning_rate": 1.3285231538982036e-06, |
|
"loss": 0.5848, |
|
"step": 373 |
|
}, |
|
{ |
|
"epoch": 0.6757000903342367, |
|
"grad_norm": 0.19414987785618634, |
|
"learning_rate": 1.3153283438175036e-06, |
|
"loss": 0.5734, |
|
"step": 374 |
|
}, |
|
{ |
|
"epoch": 0.6775067750677507, |
|
"grad_norm": 0.19139015126778156, |
|
"learning_rate": 1.3021759544377632e-06, |
|
"loss": 0.5889, |
|
"step": 375 |
|
}, |
|
{ |
|
"epoch": 0.6793134598012647, |
|
"grad_norm": 0.19455499325414877, |
|
"learning_rate": 1.2890664567195e-06, |
|
"loss": 0.5735, |
|
"step": 376 |
|
}, |
|
{ |
|
"epoch": 0.6811201445347786, |
|
"grad_norm": 0.20663774065898713, |
|
"learning_rate": 1.27600032008737e-06, |
|
"loss": 0.589, |
|
"step": 377 |
|
}, |
|
{ |
|
"epoch": 0.6829268292682927, |
|
"grad_norm": 0.19788629496456012, |
|
"learning_rate": 1.2629780124133511e-06, |
|
"loss": 0.5903, |
|
"step": 378 |
|
}, |
|
{ |
|
"epoch": 0.6847335140018067, |
|
"grad_norm": 0.20175651716363546, |
|
"learning_rate": 1.2500000000000007e-06, |
|
"loss": 0.592, |
|
"step": 379 |
|
}, |
|
{ |
|
"epoch": 0.6865401987353207, |
|
"grad_norm": 0.18819797058808696, |
|
"learning_rate": 1.2370667475637474e-06, |
|
"loss": 0.5733, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 0.6883468834688347, |
|
"grad_norm": 0.20251745573990865, |
|
"learning_rate": 1.2241787182182596e-06, |
|
"loss": 0.5893, |
|
"step": 381 |
|
}, |
|
{ |
|
"epoch": 0.6901535682023487, |
|
"grad_norm": 0.20807680817859822, |
|
"learning_rate": 1.2113363734578548e-06, |
|
"loss": 0.5771, |
|
"step": 382 |
|
}, |
|
{ |
|
"epoch": 0.6919602529358627, |
|
"grad_norm": 0.19645518245442806, |
|
"learning_rate": 1.1985401731409793e-06, |
|
"loss": 0.5966, |
|
"step": 383 |
|
}, |
|
{ |
|
"epoch": 0.6937669376693767, |
|
"grad_norm": 0.2079159019479432, |
|
"learning_rate": 1.185790575473738e-06, |
|
"loss": 0.5951, |
|
"step": 384 |
|
}, |
|
{ |
|
"epoch": 0.6955736224028907, |
|
"grad_norm": 0.19274863118289084, |
|
"learning_rate": 1.1730880369934933e-06, |
|
"loss": 0.5769, |
|
"step": 385 |
|
}, |
|
{ |
|
"epoch": 0.6973803071364046, |
|
"grad_norm": 0.18698223784520404, |
|
"learning_rate": 1.160433012552508e-06, |
|
"loss": 0.5881, |
|
"step": 386 |
|
}, |
|
{ |
|
"epoch": 0.6991869918699187, |
|
"grad_norm": 0.20644762996969238, |
|
"learning_rate": 1.1478259553016683e-06, |
|
"loss": 0.5883, |
|
"step": 387 |
|
}, |
|
{ |
|
"epoch": 0.7009936766034327, |
|
"grad_norm": 0.20354407357487309, |
|
"learning_rate": 1.1352673166742463e-06, |
|
"loss": 0.5878, |
|
"step": 388 |
|
}, |
|
{ |
|
"epoch": 0.7028003613369467, |
|
"grad_norm": 0.21972699427814146, |
|
"learning_rate": 1.122757546369744e-06, |
|
"loss": 0.5915, |
|
"step": 389 |
|
}, |
|
{ |
|
"epoch": 0.7046070460704607, |
|
"grad_norm": 0.21740455290435515, |
|
"learning_rate": 1.1102970923377893e-06, |
|
"loss": 0.5837, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 0.7064137308039747, |
|
"grad_norm": 0.2024883299761826, |
|
"learning_rate": 1.0978864007620896e-06, |
|
"loss": 0.5852, |
|
"step": 391 |
|
}, |
|
{ |
|
"epoch": 0.7082204155374887, |
|
"grad_norm": 0.2187579171935915, |
|
"learning_rate": 1.085525916044464e-06, |
|
"loss": 0.591, |
|
"step": 392 |
|
}, |
|
{ |
|
"epoch": 0.7100271002710027, |
|
"grad_norm": 0.20784813623988108, |
|
"learning_rate": 1.073216080788921e-06, |
|
"loss": 0.5954, |
|
"step": 393 |
|
}, |
|
{ |
|
"epoch": 0.7118337850045167, |
|
"grad_norm": 0.19102198360909103, |
|
"learning_rate": 1.0609573357858166e-06, |
|
"loss": 0.5969, |
|
"step": 394 |
|
}, |
|
{ |
|
"epoch": 0.7136404697380307, |
|
"grad_norm": 0.22162192671998815, |
|
"learning_rate": 1.048750119996066e-06, |
|
"loss": 0.5937, |
|
"step": 395 |
|
}, |
|
{ |
|
"epoch": 0.7154471544715447, |
|
"grad_norm": 0.25432100082751324, |
|
"learning_rate": 1.0365948705354309e-06, |
|
"loss": 0.5863, |
|
"step": 396 |
|
}, |
|
{ |
|
"epoch": 0.7172538392050587, |
|
"grad_norm": 0.24853077784213276, |
|
"learning_rate": 1.0244920226588599e-06, |
|
"loss": 0.5853, |
|
"step": 397 |
|
}, |
|
{ |
|
"epoch": 0.7190605239385727, |
|
"grad_norm": 0.20385116203175707, |
|
"learning_rate": 1.0124420097449077e-06, |
|
"loss": 0.5871, |
|
"step": 398 |
|
}, |
|
{ |
|
"epoch": 0.7208672086720868, |
|
"grad_norm": 0.18532845669355508, |
|
"learning_rate": 1.0004452632802158e-06, |
|
"loss": 0.5916, |
|
"step": 399 |
|
}, |
|
{ |
|
"epoch": 0.7226738934056007, |
|
"grad_norm": 0.20423244886055075, |
|
"learning_rate": 9.88502212844063e-07, |
|
"loss": 0.5872, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 0.7244805781391147, |
|
"grad_norm": 0.19509657230241026, |
|
"learning_rate": 9.7661328609298e-07, |
|
"loss": 0.5898, |
|
"step": 401 |
|
}, |
|
{ |
|
"epoch": 0.7262872628726287, |
|
"grad_norm": 0.21353802006223266, |
|
"learning_rate": 9.64778908745437e-07, |
|
"loss": 0.5976, |
|
"step": 402 |
|
}, |
|
{ |
|
"epoch": 0.7280939476061428, |
|
"grad_norm": 0.20435405015728278, |
|
"learning_rate": 9.529995045666041e-07, |
|
"loss": 0.5788, |
|
"step": 403 |
|
}, |
|
{ |
|
"epoch": 0.7299006323396567, |
|
"grad_norm": 0.1984529066802218, |
|
"learning_rate": 9.412754953531664e-07, |
|
"loss": 0.5827, |
|
"step": 404 |
|
}, |
|
{ |
|
"epoch": 0.7317073170731707, |
|
"grad_norm": 0.2167858934267235, |
|
"learning_rate": 9.296073009182341e-07, |
|
"loss": 0.5821, |
|
"step": 405 |
|
}, |
|
{ |
|
"epoch": 0.7335140018066847, |
|
"grad_norm": 0.1987523835045179, |
|
"learning_rate": 9.179953390762977e-07, |
|
"loss": 0.5889, |
|
"step": 406 |
|
}, |
|
{ |
|
"epoch": 0.7353206865401988, |
|
"grad_norm": 0.20697438352924746, |
|
"learning_rate": 9.064400256282757e-07, |
|
"loss": 0.5806, |
|
"step": 407 |
|
}, |
|
{ |
|
"epoch": 0.7371273712737128, |
|
"grad_norm": 0.19116589279660615, |
|
"learning_rate": 8.949417743466199e-07, |
|
"loss": 0.5754, |
|
"step": 408 |
|
}, |
|
{ |
|
"epoch": 0.7389340560072267, |
|
"grad_norm": 0.211250643585691, |
|
"learning_rate": 8.835009969605013e-07, |
|
"loss": 0.5927, |
|
"step": 409 |
|
}, |
|
{ |
|
"epoch": 0.7407407407407407, |
|
"grad_norm": 0.20536890873705252, |
|
"learning_rate": 8.721181031410661e-07, |
|
"loss": 0.5949, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 0.7425474254742548, |
|
"grad_norm": 0.2093940066655208, |
|
"learning_rate": 8.607935004867693e-07, |
|
"loss": 0.5963, |
|
"step": 411 |
|
}, |
|
{ |
|
"epoch": 0.7443541102077688, |
|
"grad_norm": 0.20232355314663383, |
|
"learning_rate": 8.495275945087744e-07, |
|
"loss": 0.5694, |
|
"step": 412 |
|
}, |
|
{ |
|
"epoch": 0.7461607949412827, |
|
"grad_norm": 0.19499772252681724, |
|
"learning_rate": 8.383207886164366e-07, |
|
"loss": 0.5859, |
|
"step": 413 |
|
}, |
|
{ |
|
"epoch": 0.7479674796747967, |
|
"grad_norm": 0.24905986363655266, |
|
"learning_rate": 8.271734841028553e-07, |
|
"loss": 0.5732, |
|
"step": 414 |
|
}, |
|
{ |
|
"epoch": 0.7497741644083108, |
|
"grad_norm": 0.1976495120915159, |
|
"learning_rate": 8.16086080130506e-07, |
|
"loss": 0.595, |
|
"step": 415 |
|
}, |
|
{ |
|
"epoch": 0.7515808491418248, |
|
"grad_norm": 0.20289395576214606, |
|
"learning_rate": 8.050589737169485e-07, |
|
"loss": 0.5811, |
|
"step": 416 |
|
}, |
|
{ |
|
"epoch": 0.7533875338753387, |
|
"grad_norm": 0.2131733098150311, |
|
"learning_rate": 7.940925597206053e-07, |
|
"loss": 0.6034, |
|
"step": 417 |
|
}, |
|
{ |
|
"epoch": 0.7551942186088527, |
|
"grad_norm": 0.2043801754256928, |
|
"learning_rate": 7.831872308266306e-07, |
|
"loss": 0.5856, |
|
"step": 418 |
|
}, |
|
{ |
|
"epoch": 0.7570009033423668, |
|
"grad_norm": 0.1875418775915756, |
|
"learning_rate": 7.723433775328385e-07, |
|
"loss": 0.5857, |
|
"step": 419 |
|
}, |
|
{ |
|
"epoch": 0.7588075880758808, |
|
"grad_norm": 0.1976789303893716, |
|
"learning_rate": 7.615613881357315e-07, |
|
"loss": 0.6002, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 0.7606142728093948, |
|
"grad_norm": 0.1914286809513274, |
|
"learning_rate": 7.508416487165862e-07, |
|
"loss": 0.5805, |
|
"step": 421 |
|
}, |
|
{ |
|
"epoch": 0.7624209575429087, |
|
"grad_norm": 0.22294210796830027, |
|
"learning_rate": 7.401845431276378e-07, |
|
"loss": 0.5842, |
|
"step": 422 |
|
}, |
|
{ |
|
"epoch": 0.7642276422764228, |
|
"grad_norm": 0.22041399450512092, |
|
"learning_rate": 7.295904529783265e-07, |
|
"loss": 0.5935, |
|
"step": 423 |
|
}, |
|
{ |
|
"epoch": 0.7660343270099368, |
|
"grad_norm": 0.19814565637399076, |
|
"learning_rate": 7.190597576216385e-07, |
|
"loss": 0.5975, |
|
"step": 424 |
|
}, |
|
{ |
|
"epoch": 0.7678410117434508, |
|
"grad_norm": 0.20032853547884555, |
|
"learning_rate": 7.085928341405193e-07, |
|
"loss": 0.576, |
|
"step": 425 |
|
}, |
|
{ |
|
"epoch": 0.7696476964769647, |
|
"grad_norm": 0.19451615175607623, |
|
"learning_rate": 6.98190057334375e-07, |
|
"loss": 0.5963, |
|
"step": 426 |
|
}, |
|
{ |
|
"epoch": 0.7714543812104788, |
|
"grad_norm": 0.24329319131853208, |
|
"learning_rate": 6.878517997056458e-07, |
|
"loss": 0.568, |
|
"step": 427 |
|
}, |
|
{ |
|
"epoch": 0.7732610659439928, |
|
"grad_norm": 0.21219438141394756, |
|
"learning_rate": 6.775784314464717e-07, |
|
"loss": 0.5886, |
|
"step": 428 |
|
}, |
|
{ |
|
"epoch": 0.7750677506775068, |
|
"grad_norm": 0.19342036608461366, |
|
"learning_rate": 6.673703204254348e-07, |
|
"loss": 0.5827, |
|
"step": 429 |
|
}, |
|
{ |
|
"epoch": 0.7768744354110207, |
|
"grad_norm": 0.19432449104766572, |
|
"learning_rate": 6.572278321743871e-07, |
|
"loss": 0.5849, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 0.7786811201445348, |
|
"grad_norm": 0.20880674647287806, |
|
"learning_rate": 6.471513298753634e-07, |
|
"loss": 0.594, |
|
"step": 431 |
|
}, |
|
{ |
|
"epoch": 0.7804878048780488, |
|
"grad_norm": 0.1880528634390492, |
|
"learning_rate": 6.371411743475717e-07, |
|
"loss": 0.5885, |
|
"step": 432 |
|
}, |
|
{ |
|
"epoch": 0.7822944896115628, |
|
"grad_norm": 0.20738544996334468, |
|
"learning_rate": 6.271977240344795e-07, |
|
"loss": 0.5973, |
|
"step": 433 |
|
}, |
|
{ |
|
"epoch": 0.7841011743450768, |
|
"grad_norm": 0.19621576251799466, |
|
"learning_rate": 6.17321334990973e-07, |
|
"loss": 0.5963, |
|
"step": 434 |
|
}, |
|
{ |
|
"epoch": 0.7859078590785907, |
|
"grad_norm": 0.19592692847981952, |
|
"learning_rate": 6.075123608706093e-07, |
|
"loss": 0.5728, |
|
"step": 435 |
|
}, |
|
{ |
|
"epoch": 0.7877145438121048, |
|
"grad_norm": 0.1896894091910412, |
|
"learning_rate": 5.97771152912954e-07, |
|
"loss": 0.587, |
|
"step": 436 |
|
}, |
|
{ |
|
"epoch": 0.7895212285456188, |
|
"grad_norm": 0.19300077047150074, |
|
"learning_rate": 5.880980599310041e-07, |
|
"loss": 0.585, |
|
"step": 437 |
|
}, |
|
{ |
|
"epoch": 0.7913279132791328, |
|
"grad_norm": 0.20695396686165002, |
|
"learning_rate": 5.784934282986956e-07, |
|
"loss": 0.5773, |
|
"step": 438 |
|
}, |
|
{ |
|
"epoch": 0.7931345980126467, |
|
"grad_norm": 0.24669040853690524, |
|
"learning_rate": 5.689576019385015e-07, |
|
"loss": 0.5941, |
|
"step": 439 |
|
}, |
|
{ |
|
"epoch": 0.7949412827461608, |
|
"grad_norm": 0.2416648334177664, |
|
"learning_rate": 5.59490922309118e-07, |
|
"loss": 0.5861, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 0.7967479674796748, |
|
"grad_norm": 0.19023570934613537, |
|
"learning_rate": 5.500937283932348e-07, |
|
"loss": 0.57, |
|
"step": 441 |
|
}, |
|
{ |
|
"epoch": 0.7985546522131888, |
|
"grad_norm": 0.19597698226304042, |
|
"learning_rate": 5.407663566854008e-07, |
|
"loss": 0.5801, |
|
"step": 442 |
|
}, |
|
{ |
|
"epoch": 0.8003613369467028, |
|
"grad_norm": 0.2132598888952329, |
|
"learning_rate": 5.3150914117997e-07, |
|
"loss": 0.588, |
|
"step": 443 |
|
}, |
|
{ |
|
"epoch": 0.8021680216802168, |
|
"grad_norm": 0.19785474755013488, |
|
"learning_rate": 5.223224133591475e-07, |
|
"loss": 0.5938, |
|
"step": 444 |
|
}, |
|
{ |
|
"epoch": 0.8039747064137308, |
|
"grad_norm": 0.20562314317318503, |
|
"learning_rate": 5.132065021811123e-07, |
|
"loss": 0.5816, |
|
"step": 445 |
|
}, |
|
{ |
|
"epoch": 0.8057813911472448, |
|
"grad_norm": 0.19393590998324234, |
|
"learning_rate": 5.041617340682467e-07, |
|
"loss": 0.5785, |
|
"step": 446 |
|
}, |
|
{ |
|
"epoch": 0.8075880758807588, |
|
"grad_norm": 0.19561218339143732, |
|
"learning_rate": 4.951884328954401e-07, |
|
"loss": 0.5839, |
|
"step": 447 |
|
}, |
|
{ |
|
"epoch": 0.8093947606142728, |
|
"grad_norm": 0.19261102405426736, |
|
"learning_rate": 4.862869199784984e-07, |
|
"loss": 0.5864, |
|
"step": 448 |
|
}, |
|
{ |
|
"epoch": 0.8112014453477868, |
|
"grad_norm": 0.19722138182817417, |
|
"learning_rate": 4.774575140626317e-07, |
|
"loss": 0.582, |
|
"step": 449 |
|
}, |
|
{ |
|
"epoch": 0.8130081300813008, |
|
"grad_norm": 0.19905614668622285, |
|
"learning_rate": 4.687005313110454e-07, |
|
"loss": 0.5921, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 0.8148148148148148, |
|
"grad_norm": 0.2192978730560753, |
|
"learning_rate": 4.600162852936171e-07, |
|
"loss": 0.5947, |
|
"step": 451 |
|
}, |
|
{ |
|
"epoch": 0.8166214995483289, |
|
"grad_norm": 0.19931575730564813, |
|
"learning_rate": 4.514050869756703e-07, |
|
"loss": 0.5898, |
|
"step": 452 |
|
}, |
|
{ |
|
"epoch": 0.8184281842818428, |
|
"grad_norm": 0.19512336370042827, |
|
"learning_rate": 4.4286724470683576e-07, |
|
"loss": 0.5818, |
|
"step": 453 |
|
}, |
|
{ |
|
"epoch": 0.8202348690153568, |
|
"grad_norm": 0.19982404753283103, |
|
"learning_rate": 4.344030642100133e-07, |
|
"loss": 0.5664, |
|
"step": 454 |
|
}, |
|
{ |
|
"epoch": 0.8220415537488708, |
|
"grad_norm": 0.2075498736745964, |
|
"learning_rate": 4.2601284857042263e-07, |
|
"loss": 0.5905, |
|
"step": 455 |
|
}, |
|
{ |
|
"epoch": 0.8238482384823849, |
|
"grad_norm": 0.19838773426671638, |
|
"learning_rate": 4.1769689822475147e-07, |
|
"loss": 0.5819, |
|
"step": 456 |
|
}, |
|
{ |
|
"epoch": 0.8256549232158988, |
|
"grad_norm": 0.20470638453793452, |
|
"learning_rate": 4.0945551095039837e-07, |
|
"loss": 0.5965, |
|
"step": 457 |
|
}, |
|
{ |
|
"epoch": 0.8274616079494128, |
|
"grad_norm": 0.20178722345870734, |
|
"learning_rate": 4.012889818548069e-07, |
|
"loss": 0.5888, |
|
"step": 458 |
|
}, |
|
{ |
|
"epoch": 0.8292682926829268, |
|
"grad_norm": 0.18729573179360015, |
|
"learning_rate": 3.931976033649021e-07, |
|
"loss": 0.5782, |
|
"step": 459 |
|
}, |
|
{ |
|
"epoch": 0.8310749774164409, |
|
"grad_norm": 0.1986072934668412, |
|
"learning_rate": 3.851816652166165e-07, |
|
"loss": 0.5881, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 0.8328816621499548, |
|
"grad_norm": 0.1881841489141097, |
|
"learning_rate": 3.772414544445163e-07, |
|
"loss": 0.5756, |
|
"step": 461 |
|
}, |
|
{ |
|
"epoch": 0.8346883468834688, |
|
"grad_norm": 0.20510840467823352, |
|
"learning_rate": 3.6937725537152277e-07, |
|
"loss": 0.5831, |
|
"step": 462 |
|
}, |
|
{ |
|
"epoch": 0.8364950316169828, |
|
"grad_norm": 0.18584350117057763, |
|
"learning_rate": 3.615893495987335e-07, |
|
"loss": 0.5879, |
|
"step": 463 |
|
}, |
|
{ |
|
"epoch": 0.8383017163504969, |
|
"grad_norm": 0.19674763846332158, |
|
"learning_rate": 3.538780159953348e-07, |
|
"loss": 0.5902, |
|
"step": 464 |
|
}, |
|
{ |
|
"epoch": 0.8401084010840109, |
|
"grad_norm": 0.20468627530762953, |
|
"learning_rate": 3.462435306886194e-07, |
|
"loss": 0.5997, |
|
"step": 465 |
|
}, |
|
{ |
|
"epoch": 0.8419150858175248, |
|
"grad_norm": 0.20528912287709225, |
|
"learning_rate": 3.3868616705409723e-07, |
|
"loss": 0.5836, |
|
"step": 466 |
|
}, |
|
{ |
|
"epoch": 0.8437217705510388, |
|
"grad_norm": 0.27273003628867537, |
|
"learning_rate": 3.312061957057061e-07, |
|
"loss": 0.5799, |
|
"step": 467 |
|
}, |
|
{ |
|
"epoch": 0.8455284552845529, |
|
"grad_norm": 0.19053099912349022, |
|
"learning_rate": 3.2380388448612437e-07, |
|
"loss": 0.5905, |
|
"step": 468 |
|
}, |
|
{ |
|
"epoch": 0.8473351400180669, |
|
"grad_norm": 0.19085010418502601, |
|
"learning_rate": 3.164794984571759e-07, |
|
"loss": 0.5793, |
|
"step": 469 |
|
}, |
|
{ |
|
"epoch": 0.8491418247515808, |
|
"grad_norm": 0.20111905938166816, |
|
"learning_rate": 3.092332998903416e-07, |
|
"loss": 0.5749, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 0.8509485094850948, |
|
"grad_norm": 0.20358849876251972, |
|
"learning_rate": 3.020655482573659e-07, |
|
"loss": 0.6049, |
|
"step": 471 |
|
}, |
|
{ |
|
"epoch": 0.8527551942186089, |
|
"grad_norm": 0.2008916336129198, |
|
"learning_rate": 2.949765002209698e-07, |
|
"loss": 0.5761, |
|
"step": 472 |
|
}, |
|
{ |
|
"epoch": 0.8545618789521229, |
|
"grad_norm": 0.20435420909912377, |
|
"learning_rate": 2.8796640962565374e-07, |
|
"loss": 0.5955, |
|
"step": 473 |
|
}, |
|
{ |
|
"epoch": 0.8563685636856369, |
|
"grad_norm": 0.18287280066629497, |
|
"learning_rate": 2.810355274886148e-07, |
|
"loss": 0.5907, |
|
"step": 474 |
|
}, |
|
{ |
|
"epoch": 0.8581752484191508, |
|
"grad_norm": 0.20714481331888074, |
|
"learning_rate": 2.7418410199075293e-07, |
|
"loss": 0.5805, |
|
"step": 475 |
|
}, |
|
{ |
|
"epoch": 0.8599819331526649, |
|
"grad_norm": 0.19382788901021236, |
|
"learning_rate": 2.674123784677868e-07, |
|
"loss": 0.5874, |
|
"step": 476 |
|
}, |
|
{ |
|
"epoch": 0.8617886178861789, |
|
"grad_norm": 0.18523438801234704, |
|
"learning_rate": 2.6072059940146775e-07, |
|
"loss": 0.5836, |
|
"step": 477 |
|
}, |
|
{ |
|
"epoch": 0.8635953026196929, |
|
"grad_norm": 0.2028457298614708, |
|
"learning_rate": 2.5410900441089905e-07, |
|
"loss": 0.5953, |
|
"step": 478 |
|
}, |
|
{ |
|
"epoch": 0.8654019873532068, |
|
"grad_norm": 0.2078467522572559, |
|
"learning_rate": 2.4757783024395244e-07, |
|
"loss": 0.5816, |
|
"step": 479 |
|
}, |
|
{ |
|
"epoch": 0.8672086720867209, |
|
"grad_norm": 0.1873967534012521, |
|
"learning_rate": 2.4112731076879254e-07, |
|
"loss": 0.5798, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 0.8690153568202349, |
|
"grad_norm": 0.1933251862519231, |
|
"learning_rate": 2.3475767696550327e-07, |
|
"loss": 0.5894, |
|
"step": 481 |
|
}, |
|
{ |
|
"epoch": 0.8708220415537489, |
|
"grad_norm": 0.19064894480648312, |
|
"learning_rate": 2.284691569178138e-07, |
|
"loss": 0.5931, |
|
"step": 482 |
|
}, |
|
{ |
|
"epoch": 0.8726287262872628, |
|
"grad_norm": 0.2284657581088162, |
|
"learning_rate": 2.2226197580493657e-07, |
|
"loss": 0.5779, |
|
"step": 483 |
|
}, |
|
{ |
|
"epoch": 0.8744354110207768, |
|
"grad_norm": 0.19935241220998284, |
|
"learning_rate": 2.1613635589349756e-07, |
|
"loss": 0.5757, |
|
"step": 484 |
|
}, |
|
{ |
|
"epoch": 0.8762420957542909, |
|
"grad_norm": 0.2053137455836231, |
|
"learning_rate": 2.100925165295839e-07, |
|
"loss": 0.5848, |
|
"step": 485 |
|
}, |
|
{ |
|
"epoch": 0.8780487804878049, |
|
"grad_norm": 0.2010408803052494, |
|
"learning_rate": 2.041306741308832e-07, |
|
"loss": 0.5912, |
|
"step": 486 |
|
}, |
|
{ |
|
"epoch": 0.8798554652213189, |
|
"grad_norm": 0.19347960149543364, |
|
"learning_rate": 1.982510421789402e-07, |
|
"loss": 0.5787, |
|
"step": 487 |
|
}, |
|
{ |
|
"epoch": 0.8816621499548328, |
|
"grad_norm": 0.19351069706418417, |
|
"learning_rate": 1.9245383121150678e-07, |
|
"loss": 0.5782, |
|
"step": 488 |
|
}, |
|
{ |
|
"epoch": 0.8834688346883469, |
|
"grad_norm": 0.18822543167978809, |
|
"learning_rate": 1.8673924881500826e-07, |
|
"loss": 0.5875, |
|
"step": 489 |
|
}, |
|
{ |
|
"epoch": 0.8852755194218609, |
|
"grad_norm": 0.19257655016560987, |
|
"learning_rate": 1.8110749961710582e-07, |
|
"loss": 0.5843, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 0.8870822041553749, |
|
"grad_norm": 0.25925387426203955, |
|
"learning_rate": 1.7555878527937164e-07, |
|
"loss": 0.5762, |
|
"step": 491 |
|
}, |
|
{ |
|
"epoch": 0.8888888888888888, |
|
"grad_norm": 0.19553638949563604, |
|
"learning_rate": 1.7009330449006712e-07, |
|
"loss": 0.5781, |
|
"step": 492 |
|
}, |
|
{ |
|
"epoch": 0.8906955736224029, |
|
"grad_norm": 0.1945143019656124, |
|
"learning_rate": 1.6471125295702773e-07, |
|
"loss": 0.5721, |
|
"step": 493 |
|
}, |
|
{ |
|
"epoch": 0.8925022583559169, |
|
"grad_norm": 0.264819060676782, |
|
"learning_rate": 1.59412823400657e-07, |
|
"loss": 0.5894, |
|
"step": 494 |
|
}, |
|
{ |
|
"epoch": 0.8943089430894309, |
|
"grad_norm": 0.19083307342352715, |
|
"learning_rate": 1.5419820554702313e-07, |
|
"loss": 0.586, |
|
"step": 495 |
|
}, |
|
{ |
|
"epoch": 0.8961156278229448, |
|
"grad_norm": 0.32477290657900737, |
|
"learning_rate": 1.4906758612106637e-07, |
|
"loss": 0.599, |
|
"step": 496 |
|
}, |
|
{ |
|
"epoch": 0.8979223125564589, |
|
"grad_norm": 0.2446280764829226, |
|
"learning_rate": 1.4402114883991318e-07, |
|
"loss": 0.5858, |
|
"step": 497 |
|
}, |
|
{ |
|
"epoch": 0.8997289972899729, |
|
"grad_norm": 0.20615033714545855, |
|
"learning_rate": 1.3905907440629752e-07, |
|
"loss": 0.5847, |
|
"step": 498 |
|
}, |
|
{ |
|
"epoch": 0.9015356820234869, |
|
"grad_norm": 0.2087451994496376, |
|
"learning_rate": 1.3418154050208937e-07, |
|
"loss": 0.5948, |
|
"step": 499 |
|
}, |
|
{ |
|
"epoch": 0.9033423667570009, |
|
"grad_norm": 0.3050011108253672, |
|
"learning_rate": 1.2938872178193395e-07, |
|
"loss": 0.5923, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.9051490514905149, |
|
"grad_norm": 0.2832626023760989, |
|
"learning_rate": 1.2468078986699633e-07, |
|
"loss": 0.59, |
|
"step": 501 |
|
}, |
|
{ |
|
"epoch": 0.9069557362240289, |
|
"grad_norm": 0.1893248345236461, |
|
"learning_rate": 1.200579133388155e-07, |
|
"loss": 0.586, |
|
"step": 502 |
|
}, |
|
{ |
|
"epoch": 0.9087624209575429, |
|
"grad_norm": 0.18631095112826992, |
|
"learning_rate": 1.1552025773327008e-07, |
|
"loss": 0.5814, |
|
"step": 503 |
|
}, |
|
{ |
|
"epoch": 0.9105691056910569, |
|
"grad_norm": 0.1825244758133897, |
|
"learning_rate": 1.1106798553464804e-07, |
|
"loss": 0.5969, |
|
"step": 504 |
|
}, |
|
{ |
|
"epoch": 0.912375790424571, |
|
"grad_norm": 0.19272725378716518, |
|
"learning_rate": 1.067012561698319e-07, |
|
"loss": 0.5681, |
|
"step": 505 |
|
}, |
|
{ |
|
"epoch": 0.9141824751580849, |
|
"grad_norm": 0.18360862506223993, |
|
"learning_rate": 1.0242022600258611e-07, |
|
"loss": 0.5887, |
|
"step": 506 |
|
}, |
|
{ |
|
"epoch": 0.9159891598915989, |
|
"grad_norm": 0.21234881666515093, |
|
"learning_rate": 9.822504832796037e-08, |
|
"loss": 0.5876, |
|
"step": 507 |
|
}, |
|
{ |
|
"epoch": 0.9177958446251129, |
|
"grad_norm": 0.19196260234968993, |
|
"learning_rate": 9.41158733667999e-08, |
|
"loss": 0.58, |
|
"step": 508 |
|
}, |
|
{ |
|
"epoch": 0.919602529358627, |
|
"grad_norm": 0.1969153672664882, |
|
"learning_rate": 9.00928482603669e-08, |
|
"loss": 0.5997, |
|
"step": 509 |
|
}, |
|
{ |
|
"epoch": 0.9214092140921409, |
|
"grad_norm": 0.1945863429430772, |
|
"learning_rate": 8.615611706507043e-08, |
|
"loss": 0.5827, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 0.9232158988256549, |
|
"grad_norm": 0.23778461680575155, |
|
"learning_rate": 8.230582074730902e-08, |
|
"loss": 0.573, |
|
"step": 511 |
|
}, |
|
{ |
|
"epoch": 0.9250225835591689, |
|
"grad_norm": 0.20332757728437817, |
|
"learning_rate": 7.854209717842231e-08, |
|
"loss": 0.6005, |
|
"step": 512 |
|
}, |
|
{ |
|
"epoch": 0.926829268292683, |
|
"grad_norm": 0.19387134731372843, |
|
"learning_rate": 7.486508112975549e-08, |
|
"loss": 0.5833, |
|
"step": 513 |
|
}, |
|
{ |
|
"epoch": 0.928635953026197, |
|
"grad_norm": 0.19782012258453638, |
|
"learning_rate": 7.127490426783124e-08, |
|
"loss": 0.5797, |
|
"step": 514 |
|
}, |
|
{ |
|
"epoch": 0.9304426377597109, |
|
"grad_norm": 0.19286615430131876, |
|
"learning_rate": 6.777169514963766e-08, |
|
"loss": 0.5844, |
|
"step": 515 |
|
}, |
|
{ |
|
"epoch": 0.9322493224932249, |
|
"grad_norm": 0.19637869096966293, |
|
"learning_rate": 6.435557921802254e-08, |
|
"loss": 0.5924, |
|
"step": 516 |
|
}, |
|
{ |
|
"epoch": 0.934056007226739, |
|
"grad_norm": 0.188554049011414, |
|
"learning_rate": 6.102667879720165e-08, |
|
"loss": 0.5799, |
|
"step": 517 |
|
}, |
|
{ |
|
"epoch": 0.935862691960253, |
|
"grad_norm": 0.20883892103413773, |
|
"learning_rate": 5.778511308838108e-08, |
|
"loss": 0.5829, |
|
"step": 518 |
|
}, |
|
{ |
|
"epoch": 0.9376693766937669, |
|
"grad_norm": 0.20437240656846004, |
|
"learning_rate": 5.463099816548578e-08, |
|
"loss": 0.5717, |
|
"step": 519 |
|
}, |
|
{ |
|
"epoch": 0.9394760614272809, |
|
"grad_norm": 0.19852353971692513, |
|
"learning_rate": 5.15644469710061e-08, |
|
"loss": 0.582, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 0.941282746160795, |
|
"grad_norm": 0.18095323109737302, |
|
"learning_rate": 4.8585569311949966e-08, |
|
"loss": 0.5889, |
|
"step": 521 |
|
}, |
|
{ |
|
"epoch": 0.943089430894309, |
|
"grad_norm": 0.18174076650377002, |
|
"learning_rate": 4.5694471855914915e-08, |
|
"loss": 0.5838, |
|
"step": 522 |
|
}, |
|
{ |
|
"epoch": 0.9448961156278229, |
|
"grad_norm": 0.18479803288886815, |
|
"learning_rate": 4.289125812726475e-08, |
|
"loss": 0.5748, |
|
"step": 523 |
|
}, |
|
{ |
|
"epoch": 0.9467028003613369, |
|
"grad_norm": 0.19193590197592694, |
|
"learning_rate": 4.017602850342584e-08, |
|
"loss": 0.5859, |
|
"step": 524 |
|
}, |
|
{ |
|
"epoch": 0.948509485094851, |
|
"grad_norm": 0.19470238994455222, |
|
"learning_rate": 3.754888021129083e-08, |
|
"loss": 0.5788, |
|
"step": 525 |
|
}, |
|
{ |
|
"epoch": 0.950316169828365, |
|
"grad_norm": 0.18966253565903646, |
|
"learning_rate": 3.5009907323737826e-08, |
|
"loss": 0.5839, |
|
"step": 526 |
|
}, |
|
{ |
|
"epoch": 0.952122854561879, |
|
"grad_norm": 0.20275158347015088, |
|
"learning_rate": 3.2559200756260845e-08, |
|
"loss": 0.5832, |
|
"step": 527 |
|
}, |
|
{ |
|
"epoch": 0.9539295392953929, |
|
"grad_norm": 0.18676114511717182, |
|
"learning_rate": 3.019684826371633e-08, |
|
"loss": 0.5801, |
|
"step": 528 |
|
}, |
|
{ |
|
"epoch": 0.955736224028907, |
|
"grad_norm": 0.2163227342300819, |
|
"learning_rate": 2.7922934437178695e-08, |
|
"loss": 0.5883, |
|
"step": 529 |
|
}, |
|
{ |
|
"epoch": 0.957542908762421, |
|
"grad_norm": 0.21344582125626332, |
|
"learning_rate": 2.5737540700912777e-08, |
|
"loss": 0.5813, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 0.959349593495935, |
|
"grad_norm": 0.18607944951873648, |
|
"learning_rate": 2.3640745309457258e-08, |
|
"loss": 0.5923, |
|
"step": 531 |
|
}, |
|
{ |
|
"epoch": 0.9611562782294489, |
|
"grad_norm": 0.18510297216059776, |
|
"learning_rate": 2.16326233448233e-08, |
|
"loss": 0.5807, |
|
"step": 532 |
|
}, |
|
{ |
|
"epoch": 0.9629629629629629, |
|
"grad_norm": 0.1974938236004036, |
|
"learning_rate": 1.9713246713805588e-08, |
|
"loss": 0.5858, |
|
"step": 533 |
|
}, |
|
{ |
|
"epoch": 0.964769647696477, |
|
"grad_norm": 0.20144502512514245, |
|
"learning_rate": 1.7882684145406616e-08, |
|
"loss": 0.5825, |
|
"step": 534 |
|
}, |
|
{ |
|
"epoch": 0.966576332429991, |
|
"grad_norm": 0.1905225782300761, |
|
"learning_rate": 1.6141001188378082e-08, |
|
"loss": 0.5827, |
|
"step": 535 |
|
}, |
|
{ |
|
"epoch": 0.9683830171635049, |
|
"grad_norm": 0.19016394964817498, |
|
"learning_rate": 1.4488260208871397e-08, |
|
"loss": 0.5929, |
|
"step": 536 |
|
}, |
|
{ |
|
"epoch": 0.9701897018970189, |
|
"grad_norm": 0.18423294596460907, |
|
"learning_rate": 1.2924520388204465e-08, |
|
"loss": 0.5987, |
|
"step": 537 |
|
}, |
|
{ |
|
"epoch": 0.971996386630533, |
|
"grad_norm": 0.20028347781639222, |
|
"learning_rate": 1.1449837720745039e-08, |
|
"loss": 0.5817, |
|
"step": 538 |
|
}, |
|
{ |
|
"epoch": 0.973803071364047, |
|
"grad_norm": 0.19564562368726848, |
|
"learning_rate": 1.006426501190233e-08, |
|
"loss": 0.5784, |
|
"step": 539 |
|
}, |
|
{ |
|
"epoch": 0.975609756097561, |
|
"grad_norm": 0.18942355645645967, |
|
"learning_rate": 8.767851876239075e-09, |
|
"loss": 0.5796, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 0.9774164408310749, |
|
"grad_norm": 0.1872475487831209, |
|
"learning_rate": 7.560644735692957e-09, |
|
"loss": 0.5884, |
|
"step": 541 |
|
}, |
|
{ |
|
"epoch": 0.979223125564589, |
|
"grad_norm": 0.199499510224137, |
|
"learning_rate": 6.442686817914878e-09, |
|
"loss": 0.5805, |
|
"step": 542 |
|
}, |
|
{ |
|
"epoch": 0.981029810298103, |
|
"grad_norm": 0.18125890965998775, |
|
"learning_rate": 5.414018154721868e-09, |
|
"loss": 0.5935, |
|
"step": 543 |
|
}, |
|
{ |
|
"epoch": 0.982836495031617, |
|
"grad_norm": 0.21667359886815266, |
|
"learning_rate": 4.474675580662113e-09, |
|
"loss": 0.593, |
|
"step": 544 |
|
}, |
|
{ |
|
"epoch": 0.9846431797651309, |
|
"grad_norm": 0.19857918487394038, |
|
"learning_rate": 3.6246927316976875e-09, |
|
"loss": 0.5964, |
|
"step": 545 |
|
}, |
|
{ |
|
"epoch": 0.986449864498645, |
|
"grad_norm": 0.1929371434279703, |
|
"learning_rate": 2.864100043998563e-09, |
|
"loss": 0.5862, |
|
"step": 546 |
|
}, |
|
{ |
|
"epoch": 0.988256549232159, |
|
"grad_norm": 0.18917095075953314, |
|
"learning_rate": 2.192924752854042e-09, |
|
"loss": 0.5983, |
|
"step": 547 |
|
}, |
|
{ |
|
"epoch": 0.990063233965673, |
|
"grad_norm": 0.19052822059983585, |
|
"learning_rate": 1.6111908916965902e-09, |
|
"loss": 0.5948, |
|
"step": 548 |
|
}, |
|
{ |
|
"epoch": 0.991869918699187, |
|
"grad_norm": 0.20501303397618248, |
|
"learning_rate": 1.1189192912416935e-09, |
|
"loss": 0.5892, |
|
"step": 549 |
|
}, |
|
{ |
|
"epoch": 0.993676603432701, |
|
"grad_norm": 0.19753256710279912, |
|
"learning_rate": 7.161275787415101e-10, |
|
"loss": 0.5795, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 0.995483288166215, |
|
"grad_norm": 0.1882439395222658, |
|
"learning_rate": 4.0283017735454066e-10, |
|
"loss": 0.5983, |
|
"step": 551 |
|
}, |
|
{ |
|
"epoch": 0.997289972899729, |
|
"grad_norm": 0.19218850747751368, |
|
"learning_rate": 1.790383056282652e-10, |
|
"loss": 0.5806, |
|
"step": 552 |
|
}, |
|
{ |
|
"epoch": 0.999096657633243, |
|
"grad_norm": 0.19150393638935695, |
|
"learning_rate": 4.4759977098074445e-11, |
|
"loss": 0.5844, |
|
"step": 553 |
|
} |
|
], |
|
"logging_steps": 1, |
|
"max_steps": 553, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 1, |
|
"save_steps": 100, |
|
"stateful_callbacks": { |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": true |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 641050695434240.0, |
|
"train_batch_size": 2, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|