|
{ |
|
"best_global_step": null, |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 0.999096657633243, |
|
"eval_steps": 500, |
|
"global_step": 553, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.0018066847335140017, |
|
"grad_norm": 2.834148994241387, |
|
"learning_rate": 0.0, |
|
"loss": 0.8251, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.0036133694670280035, |
|
"grad_norm": 2.8814188855606915, |
|
"learning_rate": 3.5714285714285716e-07, |
|
"loss": 0.8284, |
|
"step": 2 |
|
}, |
|
{ |
|
"epoch": 0.005420054200542005, |
|
"grad_norm": 2.8877082225775945, |
|
"learning_rate": 7.142857142857143e-07, |
|
"loss": 0.8432, |
|
"step": 3 |
|
}, |
|
{ |
|
"epoch": 0.007226738934056007, |
|
"grad_norm": 2.84706824634251, |
|
"learning_rate": 1.0714285714285714e-06, |
|
"loss": 0.8395, |
|
"step": 4 |
|
}, |
|
{ |
|
"epoch": 0.009033423667570008, |
|
"grad_norm": 2.763620649396407, |
|
"learning_rate": 1.4285714285714286e-06, |
|
"loss": 0.8291, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.01084010840108401, |
|
"grad_norm": 2.7250326443499935, |
|
"learning_rate": 1.7857142857142859e-06, |
|
"loss": 0.8289, |
|
"step": 6 |
|
}, |
|
{ |
|
"epoch": 0.012646793134598013, |
|
"grad_norm": 2.5921911890865355, |
|
"learning_rate": 2.1428571428571427e-06, |
|
"loss": 0.8081, |
|
"step": 7 |
|
}, |
|
{ |
|
"epoch": 0.014453477868112014, |
|
"grad_norm": 2.5976860246328948, |
|
"learning_rate": 2.5e-06, |
|
"loss": 0.8152, |
|
"step": 8 |
|
}, |
|
{ |
|
"epoch": 0.016260162601626018, |
|
"grad_norm": 2.1600249112189367, |
|
"learning_rate": 2.8571428571428573e-06, |
|
"loss": 0.8041, |
|
"step": 9 |
|
}, |
|
{ |
|
"epoch": 0.018066847335140017, |
|
"grad_norm": 2.05676893822907, |
|
"learning_rate": 3.2142857142857147e-06, |
|
"loss": 0.7978, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.01987353206865402, |
|
"grad_norm": 1.5227088422406245, |
|
"learning_rate": 3.5714285714285718e-06, |
|
"loss": 0.7766, |
|
"step": 11 |
|
}, |
|
{ |
|
"epoch": 0.02168021680216802, |
|
"grad_norm": 1.4305134864471831, |
|
"learning_rate": 3.928571428571429e-06, |
|
"loss": 0.7865, |
|
"step": 12 |
|
}, |
|
{ |
|
"epoch": 0.023486901535682024, |
|
"grad_norm": 1.3403916222492462, |
|
"learning_rate": 4.2857142857142855e-06, |
|
"loss": 0.7734, |
|
"step": 13 |
|
}, |
|
{ |
|
"epoch": 0.025293586269196026, |
|
"grad_norm": 1.2438356291232484, |
|
"learning_rate": 4.642857142857144e-06, |
|
"loss": 0.7668, |
|
"step": 14 |
|
}, |
|
{ |
|
"epoch": 0.02710027100271003, |
|
"grad_norm": 1.4470043220956499, |
|
"learning_rate": 5e-06, |
|
"loss": 0.7518, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.028906955736224028, |
|
"grad_norm": 1.6508105965347686, |
|
"learning_rate": 5.357142857142857e-06, |
|
"loss": 0.7428, |
|
"step": 16 |
|
}, |
|
{ |
|
"epoch": 0.03071364046973803, |
|
"grad_norm": 1.8197388085593902, |
|
"learning_rate": 5.7142857142857145e-06, |
|
"loss": 0.7504, |
|
"step": 17 |
|
}, |
|
{ |
|
"epoch": 0.032520325203252036, |
|
"grad_norm": 1.6628024493622566, |
|
"learning_rate": 6.071428571428571e-06, |
|
"loss": 0.7406, |
|
"step": 18 |
|
}, |
|
{ |
|
"epoch": 0.03432700993676603, |
|
"grad_norm": 1.3996342649224327, |
|
"learning_rate": 6.4285714285714295e-06, |
|
"loss": 0.7317, |
|
"step": 19 |
|
}, |
|
{ |
|
"epoch": 0.036133694670280034, |
|
"grad_norm": 0.8529424931984829, |
|
"learning_rate": 6.785714285714287e-06, |
|
"loss": 0.7029, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.037940379403794036, |
|
"grad_norm": 1.012307815845279, |
|
"learning_rate": 7.1428571428571436e-06, |
|
"loss": 0.7052, |
|
"step": 21 |
|
}, |
|
{ |
|
"epoch": 0.03974706413730804, |
|
"grad_norm": 0.996083810328283, |
|
"learning_rate": 7.500000000000001e-06, |
|
"loss": 0.6937, |
|
"step": 22 |
|
}, |
|
{ |
|
"epoch": 0.04155374887082204, |
|
"grad_norm": 0.9027655260544101, |
|
"learning_rate": 7.857142857142858e-06, |
|
"loss": 0.691, |
|
"step": 23 |
|
}, |
|
{ |
|
"epoch": 0.04336043360433604, |
|
"grad_norm": 1.9068544177754214, |
|
"learning_rate": 8.214285714285714e-06, |
|
"loss": 0.697, |
|
"step": 24 |
|
}, |
|
{ |
|
"epoch": 0.045167118337850046, |
|
"grad_norm": 0.7710305137202722, |
|
"learning_rate": 8.571428571428571e-06, |
|
"loss": 0.677, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.04697380307136405, |
|
"grad_norm": 0.6673976579110235, |
|
"learning_rate": 8.92857142857143e-06, |
|
"loss": 0.6911, |
|
"step": 26 |
|
}, |
|
{ |
|
"epoch": 0.04878048780487805, |
|
"grad_norm": 0.6017238871929963, |
|
"learning_rate": 9.285714285714288e-06, |
|
"loss": 0.667, |
|
"step": 27 |
|
}, |
|
{ |
|
"epoch": 0.05058717253839205, |
|
"grad_norm": 0.6176897562760387, |
|
"learning_rate": 9.642857142857144e-06, |
|
"loss": 0.6685, |
|
"step": 28 |
|
}, |
|
{ |
|
"epoch": 0.052393857271906055, |
|
"grad_norm": 0.5460140411641373, |
|
"learning_rate": 1e-05, |
|
"loss": 0.6788, |
|
"step": 29 |
|
}, |
|
{ |
|
"epoch": 0.05420054200542006, |
|
"grad_norm": 0.5024441002282133, |
|
"learning_rate": 9.999910480045805e-06, |
|
"loss": 0.6776, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.05600722673893405, |
|
"grad_norm": 0.5327568235598038, |
|
"learning_rate": 9.999641923388745e-06, |
|
"loss": 0.666, |
|
"step": 31 |
|
}, |
|
{ |
|
"epoch": 0.057813911472448055, |
|
"grad_norm": 0.45860832293770903, |
|
"learning_rate": 9.999194339645292e-06, |
|
"loss": 0.6537, |
|
"step": 32 |
|
}, |
|
{ |
|
"epoch": 0.05962059620596206, |
|
"grad_norm": 0.462200098378075, |
|
"learning_rate": 9.998567744842518e-06, |
|
"loss": 0.6639, |
|
"step": 33 |
|
}, |
|
{ |
|
"epoch": 0.06142728093947606, |
|
"grad_norm": 0.44108437546829815, |
|
"learning_rate": 9.997762161417517e-06, |
|
"loss": 0.6507, |
|
"step": 34 |
|
}, |
|
{ |
|
"epoch": 0.06323396567299007, |
|
"grad_norm": 0.4056783549136984, |
|
"learning_rate": 9.996777618216608e-06, |
|
"loss": 0.6559, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.06504065040650407, |
|
"grad_norm": 0.3860777271649062, |
|
"learning_rate": 9.995614150494293e-06, |
|
"loss": 0.6503, |
|
"step": 36 |
|
}, |
|
{ |
|
"epoch": 0.06684733514001806, |
|
"grad_norm": 0.434533435588928, |
|
"learning_rate": 9.994271799912004e-06, |
|
"loss": 0.6541, |
|
"step": 37 |
|
}, |
|
{ |
|
"epoch": 0.06865401987353206, |
|
"grad_norm": 0.4371779969673632, |
|
"learning_rate": 9.992750614536606e-06, |
|
"loss": 0.6469, |
|
"step": 38 |
|
}, |
|
{ |
|
"epoch": 0.07046070460704607, |
|
"grad_norm": 0.36457045763018364, |
|
"learning_rate": 9.991050648838676e-06, |
|
"loss": 0.6475, |
|
"step": 39 |
|
}, |
|
{ |
|
"epoch": 0.07226738934056007, |
|
"grad_norm": 0.322635334253873, |
|
"learning_rate": 9.989171963690556e-06, |
|
"loss": 0.6366, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.07407407407407407, |
|
"grad_norm": 0.35059303076021425, |
|
"learning_rate": 9.987114626364172e-06, |
|
"loss": 0.6431, |
|
"step": 41 |
|
}, |
|
{ |
|
"epoch": 0.07588075880758807, |
|
"grad_norm": 0.36612621878509766, |
|
"learning_rate": 9.984878710528615e-06, |
|
"loss": 0.6339, |
|
"step": 42 |
|
}, |
|
{ |
|
"epoch": 0.07768744354110207, |
|
"grad_norm": 0.3365894359412745, |
|
"learning_rate": 9.982464296247523e-06, |
|
"loss": 0.6343, |
|
"step": 43 |
|
}, |
|
{ |
|
"epoch": 0.07949412827461608, |
|
"grad_norm": 0.3035590675895417, |
|
"learning_rate": 9.979871469976197e-06, |
|
"loss": 0.6275, |
|
"step": 44 |
|
}, |
|
{ |
|
"epoch": 0.08130081300813008, |
|
"grad_norm": 0.2858077116353655, |
|
"learning_rate": 9.97710032455851e-06, |
|
"loss": 0.6344, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.08310749774164408, |
|
"grad_norm": 0.2752507660143639, |
|
"learning_rate": 9.974150959223591e-06, |
|
"loss": 0.6362, |
|
"step": 46 |
|
}, |
|
{ |
|
"epoch": 0.08491418247515808, |
|
"grad_norm": 0.3109343353140871, |
|
"learning_rate": 9.971023479582258e-06, |
|
"loss": 0.6389, |
|
"step": 47 |
|
}, |
|
{ |
|
"epoch": 0.08672086720867209, |
|
"grad_norm": 0.27841301256018586, |
|
"learning_rate": 9.967717997623245e-06, |
|
"loss": 0.6256, |
|
"step": 48 |
|
}, |
|
{ |
|
"epoch": 0.08852755194218609, |
|
"grad_norm": 0.3002752587221535, |
|
"learning_rate": 9.964234631709188e-06, |
|
"loss": 0.6316, |
|
"step": 49 |
|
}, |
|
{ |
|
"epoch": 0.09033423667570009, |
|
"grad_norm": 0.24995397436718791, |
|
"learning_rate": 9.960573506572391e-06, |
|
"loss": 0.6303, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.0921409214092141, |
|
"grad_norm": 0.2592033541282412, |
|
"learning_rate": 9.956734753310355e-06, |
|
"loss": 0.6199, |
|
"step": 51 |
|
}, |
|
{ |
|
"epoch": 0.0939476061427281, |
|
"grad_norm": 0.26267469782137715, |
|
"learning_rate": 9.952718509381086e-06, |
|
"loss": 0.6381, |
|
"step": 52 |
|
}, |
|
{ |
|
"epoch": 0.0957542908762421, |
|
"grad_norm": 0.2841156370165673, |
|
"learning_rate": 9.948524918598175e-06, |
|
"loss": 0.6223, |
|
"step": 53 |
|
}, |
|
{ |
|
"epoch": 0.0975609756097561, |
|
"grad_norm": 0.2602382902911376, |
|
"learning_rate": 9.944154131125643e-06, |
|
"loss": 0.613, |
|
"step": 54 |
|
}, |
|
{ |
|
"epoch": 0.0993676603432701, |
|
"grad_norm": 0.2548980055658011, |
|
"learning_rate": 9.93960630347257e-06, |
|
"loss": 0.6265, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.1011743450767841, |
|
"grad_norm": 0.23755160689461077, |
|
"learning_rate": 9.934881598487478e-06, |
|
"loss": 0.6318, |
|
"step": 56 |
|
}, |
|
{ |
|
"epoch": 0.10298102981029811, |
|
"grad_norm": 0.24890312911017654, |
|
"learning_rate": 9.929980185352525e-06, |
|
"loss": 0.6175, |
|
"step": 57 |
|
}, |
|
{ |
|
"epoch": 0.10478771454381211, |
|
"grad_norm": 0.2773541758032373, |
|
"learning_rate": 9.924902239577419e-06, |
|
"loss": 0.6253, |
|
"step": 58 |
|
}, |
|
{ |
|
"epoch": 0.10659439927732611, |
|
"grad_norm": 0.25242119733836627, |
|
"learning_rate": 9.91964794299315e-06, |
|
"loss": 0.611, |
|
"step": 59 |
|
}, |
|
{ |
|
"epoch": 0.10840108401084012, |
|
"grad_norm": 0.2684984552226452, |
|
"learning_rate": 9.914217483745472e-06, |
|
"loss": 0.6119, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.1102077687443541, |
|
"grad_norm": 0.2574590656590093, |
|
"learning_rate": 9.90861105628817e-06, |
|
"loss": 0.6159, |
|
"step": 61 |
|
}, |
|
{ |
|
"epoch": 0.1120144534778681, |
|
"grad_norm": 0.25603682816247697, |
|
"learning_rate": 9.902828861376101e-06, |
|
"loss": 0.621, |
|
"step": 62 |
|
}, |
|
{ |
|
"epoch": 0.11382113821138211, |
|
"grad_norm": 0.24731176189318566, |
|
"learning_rate": 9.896871106057989e-06, |
|
"loss": 0.6205, |
|
"step": 63 |
|
}, |
|
{ |
|
"epoch": 0.11562782294489611, |
|
"grad_norm": 0.27598163635496337, |
|
"learning_rate": 9.890738003669029e-06, |
|
"loss": 0.6188, |
|
"step": 64 |
|
}, |
|
{ |
|
"epoch": 0.11743450767841011, |
|
"grad_norm": 0.26447533420584596, |
|
"learning_rate": 9.884429773823238e-06, |
|
"loss": 0.6134, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.11924119241192412, |
|
"grad_norm": 0.2764756892440485, |
|
"learning_rate": 9.877946642405598e-06, |
|
"loss": 0.6153, |
|
"step": 66 |
|
}, |
|
{ |
|
"epoch": 0.12104787714543812, |
|
"grad_norm": 0.2878202327908946, |
|
"learning_rate": 9.871288841563956e-06, |
|
"loss": 0.6057, |
|
"step": 67 |
|
}, |
|
{ |
|
"epoch": 0.12285456187895212, |
|
"grad_norm": 0.25954176321509254, |
|
"learning_rate": 9.864456609700726e-06, |
|
"loss": 0.6213, |
|
"step": 68 |
|
}, |
|
{ |
|
"epoch": 0.12466124661246612, |
|
"grad_norm": 0.25912508931378314, |
|
"learning_rate": 9.857450191464337e-06, |
|
"loss": 0.6233, |
|
"step": 69 |
|
}, |
|
{ |
|
"epoch": 0.12646793134598014, |
|
"grad_norm": 0.2698506612904357, |
|
"learning_rate": 9.85026983774049e-06, |
|
"loss": 0.6285, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.12827461607949414, |
|
"grad_norm": 0.23787370000958719, |
|
"learning_rate": 9.842915805643156e-06, |
|
"loss": 0.5996, |
|
"step": 71 |
|
}, |
|
{ |
|
"epoch": 0.13008130081300814, |
|
"grad_norm": 0.21973413712743903, |
|
"learning_rate": 9.835388358505383e-06, |
|
"loss": 0.6171, |
|
"step": 72 |
|
}, |
|
{ |
|
"epoch": 0.13188798554652212, |
|
"grad_norm": 0.2542097049285353, |
|
"learning_rate": 9.827687765869859e-06, |
|
"loss": 0.6159, |
|
"step": 73 |
|
}, |
|
{ |
|
"epoch": 0.13369467028003612, |
|
"grad_norm": 0.28885915694073955, |
|
"learning_rate": 9.819814303479268e-06, |
|
"loss": 0.6081, |
|
"step": 74 |
|
}, |
|
{ |
|
"epoch": 0.13550135501355012, |
|
"grad_norm": 0.2732542861288398, |
|
"learning_rate": 9.811768253266401e-06, |
|
"loss": 0.606, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.13730803974706413, |
|
"grad_norm": 0.2639987596902754, |
|
"learning_rate": 9.803549903344081e-06, |
|
"loss": 0.6016, |
|
"step": 76 |
|
}, |
|
{ |
|
"epoch": 0.13911472448057813, |
|
"grad_norm": 0.2625743746400295, |
|
"learning_rate": 9.79515954799483e-06, |
|
"loss": 0.5963, |
|
"step": 77 |
|
}, |
|
{ |
|
"epoch": 0.14092140921409213, |
|
"grad_norm": 0.2636110963817243, |
|
"learning_rate": 9.786597487660336e-06, |
|
"loss": 0.6082, |
|
"step": 78 |
|
}, |
|
{ |
|
"epoch": 0.14272809394760613, |
|
"grad_norm": 0.2548650343445913, |
|
"learning_rate": 9.777864028930705e-06, |
|
"loss": 0.6173, |
|
"step": 79 |
|
}, |
|
{ |
|
"epoch": 0.14453477868112014, |
|
"grad_norm": 0.24198639697590743, |
|
"learning_rate": 9.768959484533461e-06, |
|
"loss": 0.6262, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.14634146341463414, |
|
"grad_norm": 0.23616390413838728, |
|
"learning_rate": 9.75988417332237e-06, |
|
"loss": 0.6087, |
|
"step": 81 |
|
}, |
|
{ |
|
"epoch": 0.14814814814814814, |
|
"grad_norm": 0.24740792444302148, |
|
"learning_rate": 9.750638420266008e-06, |
|
"loss": 0.6023, |
|
"step": 82 |
|
}, |
|
{ |
|
"epoch": 0.14995483288166214, |
|
"grad_norm": 0.2708089867070639, |
|
"learning_rate": 9.741222556436132e-06, |
|
"loss": 0.6133, |
|
"step": 83 |
|
}, |
|
{ |
|
"epoch": 0.15176151761517614, |
|
"grad_norm": 0.23019955730854466, |
|
"learning_rate": 9.731636918995821e-06, |
|
"loss": 0.606, |
|
"step": 84 |
|
}, |
|
{ |
|
"epoch": 0.15356820234869015, |
|
"grad_norm": 0.24984754231513484, |
|
"learning_rate": 9.721881851187406e-06, |
|
"loss": 0.6082, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.15537488708220415, |
|
"grad_norm": 0.2514290283088365, |
|
"learning_rate": 9.711957702320176e-06, |
|
"loss": 0.6082, |
|
"step": 86 |
|
}, |
|
{ |
|
"epoch": 0.15718157181571815, |
|
"grad_norm": 0.25050319285982814, |
|
"learning_rate": 9.701864827757868e-06, |
|
"loss": 0.6103, |
|
"step": 87 |
|
}, |
|
{ |
|
"epoch": 0.15898825654923215, |
|
"grad_norm": 0.27127901136367444, |
|
"learning_rate": 9.691603588905956e-06, |
|
"loss": 0.6146, |
|
"step": 88 |
|
}, |
|
{ |
|
"epoch": 0.16079494128274616, |
|
"grad_norm": 0.2538064222847297, |
|
"learning_rate": 9.681174353198687e-06, |
|
"loss": 0.6103, |
|
"step": 89 |
|
}, |
|
{ |
|
"epoch": 0.16260162601626016, |
|
"grad_norm": 0.23895346723279925, |
|
"learning_rate": 9.670577494085945e-06, |
|
"loss": 0.6034, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.16440831074977416, |
|
"grad_norm": 0.23015986092903712, |
|
"learning_rate": 9.659813391019867e-06, |
|
"loss": 0.6013, |
|
"step": 91 |
|
}, |
|
{ |
|
"epoch": 0.16621499548328816, |
|
"grad_norm": 0.23826711464547476, |
|
"learning_rate": 9.648882429441258e-06, |
|
"loss": 0.6048, |
|
"step": 92 |
|
}, |
|
{ |
|
"epoch": 0.16802168021680217, |
|
"grad_norm": 0.27338890022372714, |
|
"learning_rate": 9.637785000765789e-06, |
|
"loss": 0.6114, |
|
"step": 93 |
|
}, |
|
{ |
|
"epoch": 0.16982836495031617, |
|
"grad_norm": 0.3426444715164819, |
|
"learning_rate": 9.626521502369984e-06, |
|
"loss": 0.6104, |
|
"step": 94 |
|
}, |
|
{ |
|
"epoch": 0.17163504968383017, |
|
"grad_norm": 0.23367032192171455, |
|
"learning_rate": 9.615092337576987e-06, |
|
"loss": 0.6027, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.17344173441734417, |
|
"grad_norm": 0.25432897179920155, |
|
"learning_rate": 9.603497915642122e-06, |
|
"loss": 0.6016, |
|
"step": 96 |
|
}, |
|
{ |
|
"epoch": 0.17524841915085818, |
|
"grad_norm": 0.26462048665974897, |
|
"learning_rate": 9.591738651738235e-06, |
|
"loss": 0.6073, |
|
"step": 97 |
|
}, |
|
{ |
|
"epoch": 0.17705510388437218, |
|
"grad_norm": 0.24672431475599113, |
|
"learning_rate": 9.579814966940833e-06, |
|
"loss": 0.6013, |
|
"step": 98 |
|
}, |
|
{ |
|
"epoch": 0.17886178861788618, |
|
"grad_norm": 0.23213124494510456, |
|
"learning_rate": 9.567727288213005e-06, |
|
"loss": 0.6138, |
|
"step": 99 |
|
}, |
|
{ |
|
"epoch": 0.18066847335140018, |
|
"grad_norm": 0.22763203934971926, |
|
"learning_rate": 9.55547604839013e-06, |
|
"loss": 0.5873, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.18247515808491419, |
|
"grad_norm": 0.26501816102015613, |
|
"learning_rate": 9.543061686164374e-06, |
|
"loss": 0.6035, |
|
"step": 101 |
|
}, |
|
{ |
|
"epoch": 0.1842818428184282, |
|
"grad_norm": 0.2413240831373431, |
|
"learning_rate": 9.530484646068996e-06, |
|
"loss": 0.6216, |
|
"step": 102 |
|
}, |
|
{ |
|
"epoch": 0.1860885275519422, |
|
"grad_norm": 0.2455642901112215, |
|
"learning_rate": 9.517745378462417e-06, |
|
"loss": 0.6003, |
|
"step": 103 |
|
}, |
|
{ |
|
"epoch": 0.1878952122854562, |
|
"grad_norm": 0.2372588122094204, |
|
"learning_rate": 9.504844339512096e-06, |
|
"loss": 0.5987, |
|
"step": 104 |
|
}, |
|
{ |
|
"epoch": 0.1897018970189702, |
|
"grad_norm": 0.2369535320245091, |
|
"learning_rate": 9.491781991178203e-06, |
|
"loss": 0.5909, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 0.1915085817524842, |
|
"grad_norm": 0.24116406271180205, |
|
"learning_rate": 9.478558801197065e-06, |
|
"loss": 0.5927, |
|
"step": 106 |
|
}, |
|
{ |
|
"epoch": 0.1933152664859982, |
|
"grad_norm": 0.2536103123094775, |
|
"learning_rate": 9.465175243064428e-06, |
|
"loss": 0.5988, |
|
"step": 107 |
|
}, |
|
{ |
|
"epoch": 0.1951219512195122, |
|
"grad_norm": 0.23921310478785365, |
|
"learning_rate": 9.451631796018495e-06, |
|
"loss": 0.597, |
|
"step": 108 |
|
}, |
|
{ |
|
"epoch": 0.1969286359530262, |
|
"grad_norm": 0.26269878735675534, |
|
"learning_rate": 9.437928945022772e-06, |
|
"loss": 0.6068, |
|
"step": 109 |
|
}, |
|
{ |
|
"epoch": 0.1987353206865402, |
|
"grad_norm": 0.24370446927617562, |
|
"learning_rate": 9.424067180748692e-06, |
|
"loss": 0.588, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.2005420054200542, |
|
"grad_norm": 0.25487924476218216, |
|
"learning_rate": 9.410046999558062e-06, |
|
"loss": 0.6073, |
|
"step": 111 |
|
}, |
|
{ |
|
"epoch": 0.2023486901535682, |
|
"grad_norm": 0.22826520458316987, |
|
"learning_rate": 9.395868903485269e-06, |
|
"loss": 0.6007, |
|
"step": 112 |
|
}, |
|
{ |
|
"epoch": 0.2041553748870822, |
|
"grad_norm": 0.26322160681669377, |
|
"learning_rate": 9.381533400219319e-06, |
|
"loss": 0.6043, |
|
"step": 113 |
|
}, |
|
{ |
|
"epoch": 0.20596205962059622, |
|
"grad_norm": 0.2501950853430307, |
|
"learning_rate": 9.36704100308565e-06, |
|
"loss": 0.5872, |
|
"step": 114 |
|
}, |
|
{ |
|
"epoch": 0.20776874435411022, |
|
"grad_norm": 0.23922028420502847, |
|
"learning_rate": 9.352392231027752e-06, |
|
"loss": 0.6035, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 0.20957542908762422, |
|
"grad_norm": 0.24040487245272643, |
|
"learning_rate": 9.337587608588588e-06, |
|
"loss": 0.5975, |
|
"step": 116 |
|
}, |
|
{ |
|
"epoch": 0.21138211382113822, |
|
"grad_norm": 0.2535195388665429, |
|
"learning_rate": 9.322627665891807e-06, |
|
"loss": 0.6078, |
|
"step": 117 |
|
}, |
|
{ |
|
"epoch": 0.21318879855465223, |
|
"grad_norm": 0.25208953663999395, |
|
"learning_rate": 9.307512938622762e-06, |
|
"loss": 0.5952, |
|
"step": 118 |
|
}, |
|
{ |
|
"epoch": 0.21499548328816623, |
|
"grad_norm": 0.3012991411883545, |
|
"learning_rate": 9.292243968009332e-06, |
|
"loss": 0.5865, |
|
"step": 119 |
|
}, |
|
{ |
|
"epoch": 0.21680216802168023, |
|
"grad_norm": 0.24955916880092432, |
|
"learning_rate": 9.276821300802535e-06, |
|
"loss": 0.6043, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.2186088527551942, |
|
"grad_norm": 0.25955737667557055, |
|
"learning_rate": 9.261245489256956e-06, |
|
"loss": 0.6002, |
|
"step": 121 |
|
}, |
|
{ |
|
"epoch": 0.2204155374887082, |
|
"grad_norm": 0.2713812159916643, |
|
"learning_rate": 9.24551709111097e-06, |
|
"loss": 0.6047, |
|
"step": 122 |
|
}, |
|
{ |
|
"epoch": 0.2222222222222222, |
|
"grad_norm": 0.256661948228577, |
|
"learning_rate": 9.229636669566769e-06, |
|
"loss": 0.5961, |
|
"step": 123 |
|
}, |
|
{ |
|
"epoch": 0.2240289069557362, |
|
"grad_norm": 0.2570022633057745, |
|
"learning_rate": 9.213604793270196e-06, |
|
"loss": 0.5821, |
|
"step": 124 |
|
}, |
|
{ |
|
"epoch": 0.22583559168925021, |
|
"grad_norm": 0.24737028748051598, |
|
"learning_rate": 9.197422036290386e-06, |
|
"loss": 0.5886, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 0.22764227642276422, |
|
"grad_norm": 0.23200373796347, |
|
"learning_rate": 9.181088978099203e-06, |
|
"loss": 0.6013, |
|
"step": 126 |
|
}, |
|
{ |
|
"epoch": 0.22944896115627822, |
|
"grad_norm": 0.2614594061693726, |
|
"learning_rate": 9.164606203550498e-06, |
|
"loss": 0.5934, |
|
"step": 127 |
|
}, |
|
{ |
|
"epoch": 0.23125564588979222, |
|
"grad_norm": 0.24202475192708897, |
|
"learning_rate": 9.147974302859158e-06, |
|
"loss": 0.5925, |
|
"step": 128 |
|
}, |
|
{ |
|
"epoch": 0.23306233062330622, |
|
"grad_norm": 0.24504631186486292, |
|
"learning_rate": 9.131193871579975e-06, |
|
"loss": 0.5883, |
|
"step": 129 |
|
}, |
|
{ |
|
"epoch": 0.23486901535682023, |
|
"grad_norm": 0.241479245168364, |
|
"learning_rate": 9.114265510586329e-06, |
|
"loss": 0.6067, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.23667570009033423, |
|
"grad_norm": 0.25269889538291007, |
|
"learning_rate": 9.09718982604866e-06, |
|
"loss": 0.6005, |
|
"step": 131 |
|
}, |
|
{ |
|
"epoch": 0.23848238482384823, |
|
"grad_norm": 0.22912564655640813, |
|
"learning_rate": 9.079967429412766e-06, |
|
"loss": 0.5796, |
|
"step": 132 |
|
}, |
|
{ |
|
"epoch": 0.24028906955736223, |
|
"grad_norm": 0.31576321467577306, |
|
"learning_rate": 9.062598937377911e-06, |
|
"loss": 0.5952, |
|
"step": 133 |
|
}, |
|
{ |
|
"epoch": 0.24209575429087624, |
|
"grad_norm": 0.25569462450215846, |
|
"learning_rate": 9.045084971874738e-06, |
|
"loss": 0.5959, |
|
"step": 134 |
|
}, |
|
{ |
|
"epoch": 0.24390243902439024, |
|
"grad_norm": 0.23822844770081567, |
|
"learning_rate": 9.027426160043005e-06, |
|
"loss": 0.5926, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 0.24570912375790424, |
|
"grad_norm": 0.24131282447033375, |
|
"learning_rate": 9.00962313420912e-06, |
|
"loss": 0.5969, |
|
"step": 136 |
|
}, |
|
{ |
|
"epoch": 0.24751580849141824, |
|
"grad_norm": 0.22227856877632468, |
|
"learning_rate": 8.991676531863507e-06, |
|
"loss": 0.5801, |
|
"step": 137 |
|
}, |
|
{ |
|
"epoch": 0.24932249322493225, |
|
"grad_norm": 0.23734800622262067, |
|
"learning_rate": 8.973586995637778e-06, |
|
"loss": 0.5977, |
|
"step": 138 |
|
}, |
|
{ |
|
"epoch": 0.25112917795844625, |
|
"grad_norm": 0.23876828633881889, |
|
"learning_rate": 8.955355173281709e-06, |
|
"loss": 0.6007, |
|
"step": 139 |
|
}, |
|
{ |
|
"epoch": 0.2529358626919603, |
|
"grad_norm": 0.25712286168873755, |
|
"learning_rate": 8.936981717640061e-06, |
|
"loss": 0.6003, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.25474254742547425, |
|
"grad_norm": 0.27391107054484026, |
|
"learning_rate": 8.9184672866292e-06, |
|
"loss": 0.5808, |
|
"step": 141 |
|
}, |
|
{ |
|
"epoch": 0.2565492321589883, |
|
"grad_norm": 0.24435732405268018, |
|
"learning_rate": 8.899812543213532e-06, |
|
"loss": 0.601, |
|
"step": 142 |
|
}, |
|
{ |
|
"epoch": 0.25835591689250226, |
|
"grad_norm": 0.23867286575502863, |
|
"learning_rate": 8.881018155381766e-06, |
|
"loss": 0.5921, |
|
"step": 143 |
|
}, |
|
{ |
|
"epoch": 0.2601626016260163, |
|
"grad_norm": 0.2422479510828615, |
|
"learning_rate": 8.862084796122998e-06, |
|
"loss": 0.5814, |
|
"step": 144 |
|
}, |
|
{ |
|
"epoch": 0.26196928635953026, |
|
"grad_norm": 0.2576783987177789, |
|
"learning_rate": 8.84301314340261e-06, |
|
"loss": 0.594, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 0.26377597109304424, |
|
"grad_norm": 0.23125790193661352, |
|
"learning_rate": 8.823803880137993e-06, |
|
"loss": 0.597, |
|
"step": 146 |
|
}, |
|
{ |
|
"epoch": 0.26558265582655827, |
|
"grad_norm": 0.42006165912724247, |
|
"learning_rate": 8.804457694174093e-06, |
|
"loss": 0.5883, |
|
"step": 147 |
|
}, |
|
{ |
|
"epoch": 0.26738934056007224, |
|
"grad_norm": 0.26469920234134653, |
|
"learning_rate": 8.784975278258783e-06, |
|
"loss": 0.5896, |
|
"step": 148 |
|
}, |
|
{ |
|
"epoch": 0.26919602529358627, |
|
"grad_norm": 0.22003631431564813, |
|
"learning_rate": 8.765357330018056e-06, |
|
"loss": 0.5868, |
|
"step": 149 |
|
}, |
|
{ |
|
"epoch": 0.27100271002710025, |
|
"grad_norm": 0.22526003515742166, |
|
"learning_rate": 8.745604551931042e-06, |
|
"loss": 0.5957, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.2728093947606143, |
|
"grad_norm": 0.2479830093779378, |
|
"learning_rate": 8.725717651304856e-06, |
|
"loss": 0.5795, |
|
"step": 151 |
|
}, |
|
{ |
|
"epoch": 0.27461607949412825, |
|
"grad_norm": 0.23742908063794085, |
|
"learning_rate": 8.705697340249275e-06, |
|
"loss": 0.5851, |
|
"step": 152 |
|
}, |
|
{ |
|
"epoch": 0.2764227642276423, |
|
"grad_norm": 0.25930775547454743, |
|
"learning_rate": 8.685544335651226e-06, |
|
"loss": 0.5862, |
|
"step": 153 |
|
}, |
|
{ |
|
"epoch": 0.27822944896115626, |
|
"grad_norm": 0.23422254555588726, |
|
"learning_rate": 8.665259359149132e-06, |
|
"loss": 0.5911, |
|
"step": 154 |
|
}, |
|
{ |
|
"epoch": 0.2800361336946703, |
|
"grad_norm": 0.24216095846438934, |
|
"learning_rate": 8.644843137107058e-06, |
|
"loss": 0.582, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 0.28184281842818426, |
|
"grad_norm": 0.24125281024439596, |
|
"learning_rate": 8.62429640058871e-06, |
|
"loss": 0.5831, |
|
"step": 156 |
|
}, |
|
{ |
|
"epoch": 0.2836495031616983, |
|
"grad_norm": 0.2793788877010957, |
|
"learning_rate": 8.603619885331251e-06, |
|
"loss": 0.5958, |
|
"step": 157 |
|
}, |
|
{ |
|
"epoch": 0.28545618789521227, |
|
"grad_norm": 0.25516033375875935, |
|
"learning_rate": 8.582814331718961e-06, |
|
"loss": 0.593, |
|
"step": 158 |
|
}, |
|
{ |
|
"epoch": 0.2872628726287263, |
|
"grad_norm": 0.22739931552418488, |
|
"learning_rate": 8.561880484756726e-06, |
|
"loss": 0.5743, |
|
"step": 159 |
|
}, |
|
{ |
|
"epoch": 0.28906955736224027, |
|
"grad_norm": 0.2468158715198243, |
|
"learning_rate": 8.540819094043349e-06, |
|
"loss": 0.583, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.2908762420957543, |
|
"grad_norm": 0.25128717740643786, |
|
"learning_rate": 8.519630913744726e-06, |
|
"loss": 0.5899, |
|
"step": 161 |
|
}, |
|
{ |
|
"epoch": 0.2926829268292683, |
|
"grad_norm": 0.24595325998036194, |
|
"learning_rate": 8.498316702566828e-06, |
|
"loss": 0.576, |
|
"step": 162 |
|
}, |
|
{ |
|
"epoch": 0.2944896115627823, |
|
"grad_norm": 0.24724060243158863, |
|
"learning_rate": 8.476877223728539e-06, |
|
"loss": 0.5857, |
|
"step": 163 |
|
}, |
|
{ |
|
"epoch": 0.2962962962962963, |
|
"grad_norm": 0.24138093249405998, |
|
"learning_rate": 8.455313244934324e-06, |
|
"loss": 0.5948, |
|
"step": 164 |
|
}, |
|
{ |
|
"epoch": 0.2981029810298103, |
|
"grad_norm": 0.23669686589328862, |
|
"learning_rate": 8.433625538346742e-06, |
|
"loss": 0.586, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 0.2999096657633243, |
|
"grad_norm": 0.2449082662790083, |
|
"learning_rate": 8.41181488055879e-06, |
|
"loss": 0.5923, |
|
"step": 166 |
|
}, |
|
{ |
|
"epoch": 0.3017163504968383, |
|
"grad_norm": 0.2304832467395112, |
|
"learning_rate": 8.389882052566106e-06, |
|
"loss": 0.5913, |
|
"step": 167 |
|
}, |
|
{ |
|
"epoch": 0.3035230352303523, |
|
"grad_norm": 0.23899223073070122, |
|
"learning_rate": 8.36782783973899e-06, |
|
"loss": 0.5895, |
|
"step": 168 |
|
}, |
|
{ |
|
"epoch": 0.3053297199638663, |
|
"grad_norm": 0.2220823080610669, |
|
"learning_rate": 8.345653031794292e-06, |
|
"loss": 0.5819, |
|
"step": 169 |
|
}, |
|
{ |
|
"epoch": 0.3071364046973803, |
|
"grad_norm": 0.22768672377384522, |
|
"learning_rate": 8.32335842276713e-06, |
|
"loss": 0.5962, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.3089430894308943, |
|
"grad_norm": 0.3100181774645041, |
|
"learning_rate": 8.300944810982452e-06, |
|
"loss": 0.5788, |
|
"step": 171 |
|
}, |
|
{ |
|
"epoch": 0.3107497741644083, |
|
"grad_norm": 0.24057356644337244, |
|
"learning_rate": 8.278412999026462e-06, |
|
"loss": 0.5857, |
|
"step": 172 |
|
}, |
|
{ |
|
"epoch": 0.31255645889792233, |
|
"grad_norm": 0.21775769040396545, |
|
"learning_rate": 8.255763793717868e-06, |
|
"loss": 0.5888, |
|
"step": 173 |
|
}, |
|
{ |
|
"epoch": 0.3143631436314363, |
|
"grad_norm": 0.2356216154767705, |
|
"learning_rate": 8.232998006078998e-06, |
|
"loss": 0.5801, |
|
"step": 174 |
|
}, |
|
{ |
|
"epoch": 0.31616982836495033, |
|
"grad_norm": 0.22451312946018775, |
|
"learning_rate": 8.210116451306762e-06, |
|
"loss": 0.5845, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 0.3179765130984643, |
|
"grad_norm": 0.24215834930945668, |
|
"learning_rate": 8.18711994874345e-06, |
|
"loss": 0.5988, |
|
"step": 176 |
|
}, |
|
{ |
|
"epoch": 0.31978319783197834, |
|
"grad_norm": 0.22096563655138926, |
|
"learning_rate": 8.164009321847405e-06, |
|
"loss": 0.5739, |
|
"step": 177 |
|
}, |
|
{ |
|
"epoch": 0.3215898825654923, |
|
"grad_norm": 0.23114887937213074, |
|
"learning_rate": 8.140785398163535e-06, |
|
"loss": 0.5801, |
|
"step": 178 |
|
}, |
|
{ |
|
"epoch": 0.32339656729900634, |
|
"grad_norm": 0.24621261902091238, |
|
"learning_rate": 8.117449009293668e-06, |
|
"loss": 0.5903, |
|
"step": 179 |
|
}, |
|
{ |
|
"epoch": 0.3252032520325203, |
|
"grad_norm": 0.21755578147304513, |
|
"learning_rate": 8.094000990866795e-06, |
|
"loss": 0.5983, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.32700993676603435, |
|
"grad_norm": 0.4721115880502492, |
|
"learning_rate": 8.070442182509127e-06, |
|
"loss": 0.5761, |
|
"step": 181 |
|
}, |
|
{ |
|
"epoch": 0.3288166214995483, |
|
"grad_norm": 0.23709839720323575, |
|
"learning_rate": 8.046773427814043e-06, |
|
"loss": 0.5861, |
|
"step": 182 |
|
}, |
|
{ |
|
"epoch": 0.33062330623306235, |
|
"grad_norm": 0.2368260602274903, |
|
"learning_rate": 8.022995574311876e-06, |
|
"loss": 0.5975, |
|
"step": 183 |
|
}, |
|
{ |
|
"epoch": 0.3324299909665763, |
|
"grad_norm": 0.23910801570243506, |
|
"learning_rate": 7.99910947343957e-06, |
|
"loss": 0.5941, |
|
"step": 184 |
|
}, |
|
{ |
|
"epoch": 0.33423667570009036, |
|
"grad_norm": 0.23953108399048112, |
|
"learning_rate": 7.975115980510187e-06, |
|
"loss": 0.5905, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 0.33604336043360433, |
|
"grad_norm": 0.223257758342428, |
|
"learning_rate": 7.951015954682281e-06, |
|
"loss": 0.5857, |
|
"step": 186 |
|
}, |
|
{ |
|
"epoch": 0.33785004516711836, |
|
"grad_norm": 0.23272612825616595, |
|
"learning_rate": 7.926810258929138e-06, |
|
"loss": 0.5833, |
|
"step": 187 |
|
}, |
|
{ |
|
"epoch": 0.33965672990063234, |
|
"grad_norm": 0.23180332436404757, |
|
"learning_rate": 7.902499760007867e-06, |
|
"loss": 0.5829, |
|
"step": 188 |
|
}, |
|
{ |
|
"epoch": 0.34146341463414637, |
|
"grad_norm": 0.21858853978825646, |
|
"learning_rate": 7.87808532842837e-06, |
|
"loss": 0.5904, |
|
"step": 189 |
|
}, |
|
{ |
|
"epoch": 0.34327009936766034, |
|
"grad_norm": 0.2534706895576975, |
|
"learning_rate": 7.85356783842216e-06, |
|
"loss": 0.579, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.34507678410117437, |
|
"grad_norm": 0.22297606287457467, |
|
"learning_rate": 7.828948167911073e-06, |
|
"loss": 0.5772, |
|
"step": 191 |
|
}, |
|
{ |
|
"epoch": 0.34688346883468835, |
|
"grad_norm": 0.21979886538270768, |
|
"learning_rate": 7.804227198475823e-06, |
|
"loss": 0.5839, |
|
"step": 192 |
|
}, |
|
{ |
|
"epoch": 0.3486901535682023, |
|
"grad_norm": 0.23142173264795982, |
|
"learning_rate": 7.779405815324424e-06, |
|
"loss": 0.5862, |
|
"step": 193 |
|
}, |
|
{ |
|
"epoch": 0.35049683830171635, |
|
"grad_norm": 0.22254439764036268, |
|
"learning_rate": 7.754484907260513e-06, |
|
"loss": 0.5875, |
|
"step": 194 |
|
}, |
|
{ |
|
"epoch": 0.3523035230352303, |
|
"grad_norm": 0.21851884135006985, |
|
"learning_rate": 7.72946536665151e-06, |
|
"loss": 0.5707, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 0.35411020776874436, |
|
"grad_norm": 0.23339711422683737, |
|
"learning_rate": 7.704348089396667e-06, |
|
"loss": 0.584, |
|
"step": 196 |
|
}, |
|
{ |
|
"epoch": 0.35591689250225833, |
|
"grad_norm": 0.22860223954846823, |
|
"learning_rate": 7.679133974894984e-06, |
|
"loss": 0.5833, |
|
"step": 197 |
|
}, |
|
{ |
|
"epoch": 0.35772357723577236, |
|
"grad_norm": 0.23431317558060544, |
|
"learning_rate": 7.653823926013016e-06, |
|
"loss": 0.5605, |
|
"step": 198 |
|
}, |
|
{ |
|
"epoch": 0.35953026196928634, |
|
"grad_norm": 0.23915573089081735, |
|
"learning_rate": 7.628418849052523e-06, |
|
"loss": 0.5831, |
|
"step": 199 |
|
}, |
|
{ |
|
"epoch": 0.36133694670280037, |
|
"grad_norm": 0.24771798767259023, |
|
"learning_rate": 7.602919653718044e-06, |
|
"loss": 0.573, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.36314363143631434, |
|
"grad_norm": 0.22566217967051752, |
|
"learning_rate": 7.577327253084292e-06, |
|
"loss": 0.5675, |
|
"step": 201 |
|
}, |
|
{ |
|
"epoch": 0.36495031616982837, |
|
"grad_norm": 0.25869762451164763, |
|
"learning_rate": 7.551642563563481e-06, |
|
"loss": 0.5943, |
|
"step": 202 |
|
}, |
|
{ |
|
"epoch": 0.36675700090334235, |
|
"grad_norm": 0.2436116192126039, |
|
"learning_rate": 7.5258665048725065e-06, |
|
"loss": 0.5816, |
|
"step": 203 |
|
}, |
|
{ |
|
"epoch": 0.3685636856368564, |
|
"grad_norm": 0.2534916530296785, |
|
"learning_rate": 7.500000000000001e-06, |
|
"loss": 0.5939, |
|
"step": 204 |
|
}, |
|
{ |
|
"epoch": 0.37037037037037035, |
|
"grad_norm": 0.2398855410103261, |
|
"learning_rate": 7.4740439751732994e-06, |
|
"loss": 0.5842, |
|
"step": 205 |
|
}, |
|
{ |
|
"epoch": 0.3721770551038844, |
|
"grad_norm": 0.2490207053834905, |
|
"learning_rate": 7.447999359825263e-06, |
|
"loss": 0.5714, |
|
"step": 206 |
|
}, |
|
{ |
|
"epoch": 0.37398373983739835, |
|
"grad_norm": 0.2497379506431991, |
|
"learning_rate": 7.421867086561001e-06, |
|
"loss": 0.5797, |
|
"step": 207 |
|
}, |
|
{ |
|
"epoch": 0.3757904245709124, |
|
"grad_norm": 0.23639168380972936, |
|
"learning_rate": 7.395648091124476e-06, |
|
"loss": 0.5669, |
|
"step": 208 |
|
}, |
|
{ |
|
"epoch": 0.37759710930442636, |
|
"grad_norm": 0.26306682836445616, |
|
"learning_rate": 7.369343312364994e-06, |
|
"loss": 0.5881, |
|
"step": 209 |
|
}, |
|
{ |
|
"epoch": 0.3794037940379404, |
|
"grad_norm": 0.23899707885081173, |
|
"learning_rate": 7.342953692203594e-06, |
|
"loss": 0.5836, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.38121047877145436, |
|
"grad_norm": 0.21917547991525116, |
|
"learning_rate": 7.31648017559931e-06, |
|
"loss": 0.5845, |
|
"step": 211 |
|
}, |
|
{ |
|
"epoch": 0.3830171635049684, |
|
"grad_norm": 0.2498595873094313, |
|
"learning_rate": 7.289923710515338e-06, |
|
"loss": 0.5928, |
|
"step": 212 |
|
}, |
|
{ |
|
"epoch": 0.38482384823848237, |
|
"grad_norm": 0.23198183299347874, |
|
"learning_rate": 7.263285247885097e-06, |
|
"loss": 0.5916, |
|
"step": 213 |
|
}, |
|
{ |
|
"epoch": 0.3866305329719964, |
|
"grad_norm": 0.22870603046354684, |
|
"learning_rate": 7.236565741578163e-06, |
|
"loss": 0.5779, |
|
"step": 214 |
|
}, |
|
{ |
|
"epoch": 0.3884372177055104, |
|
"grad_norm": 0.232670901139665, |
|
"learning_rate": 7.2097661483661355e-06, |
|
"loss": 0.6046, |
|
"step": 215 |
|
}, |
|
{ |
|
"epoch": 0.3902439024390244, |
|
"grad_norm": 0.26519467596956026, |
|
"learning_rate": 7.182887427888351e-06, |
|
"loss": 0.5939, |
|
"step": 216 |
|
}, |
|
{ |
|
"epoch": 0.3920505871725384, |
|
"grad_norm": 0.2361374357204248, |
|
"learning_rate": 7.155930542617543e-06, |
|
"loss": 0.5935, |
|
"step": 217 |
|
}, |
|
{ |
|
"epoch": 0.3938572719060524, |
|
"grad_norm": 0.22982603176136704, |
|
"learning_rate": 7.128896457825364e-06, |
|
"loss": 0.5855, |
|
"step": 218 |
|
}, |
|
{ |
|
"epoch": 0.3956639566395664, |
|
"grad_norm": 0.24061444470093468, |
|
"learning_rate": 7.101786141547829e-06, |
|
"loss": 0.5802, |
|
"step": 219 |
|
}, |
|
{ |
|
"epoch": 0.3974706413730804, |
|
"grad_norm": 0.2657362469174554, |
|
"learning_rate": 7.074600564550643e-06, |
|
"loss": 0.5833, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.3992773261065944, |
|
"grad_norm": 0.25742709680526865, |
|
"learning_rate": 7.047340700294454e-06, |
|
"loss": 0.5717, |
|
"step": 221 |
|
}, |
|
{ |
|
"epoch": 0.4010840108401084, |
|
"grad_norm": 0.24674502467730397, |
|
"learning_rate": 7.020007524899976e-06, |
|
"loss": 0.5889, |
|
"step": 222 |
|
}, |
|
{ |
|
"epoch": 0.4028906955736224, |
|
"grad_norm": 0.2171692347779104, |
|
"learning_rate": 6.992602017113058e-06, |
|
"loss": 0.5713, |
|
"step": 223 |
|
}, |
|
{ |
|
"epoch": 0.4046973803071364, |
|
"grad_norm": 0.2498429422989435, |
|
"learning_rate": 6.965125158269619e-06, |
|
"loss": 0.5765, |
|
"step": 224 |
|
}, |
|
{ |
|
"epoch": 0.4065040650406504, |
|
"grad_norm": 0.33189852838778705, |
|
"learning_rate": 6.9375779322605154e-06, |
|
"loss": 0.5814, |
|
"step": 225 |
|
}, |
|
{ |
|
"epoch": 0.4083107497741644, |
|
"grad_norm": 0.26400537575803656, |
|
"learning_rate": 6.909961325496312e-06, |
|
"loss": 0.5878, |
|
"step": 226 |
|
}, |
|
{ |
|
"epoch": 0.4101174345076784, |
|
"grad_norm": 0.22155028748374633, |
|
"learning_rate": 6.88227632687196e-06, |
|
"loss": 0.592, |
|
"step": 227 |
|
}, |
|
{ |
|
"epoch": 0.41192411924119243, |
|
"grad_norm": 0.23981105169244862, |
|
"learning_rate": 6.854523927731383e-06, |
|
"loss": 0.5786, |
|
"step": 228 |
|
}, |
|
{ |
|
"epoch": 0.4137308039747064, |
|
"grad_norm": 0.2425346919111165, |
|
"learning_rate": 6.8267051218319766e-06, |
|
"loss": 0.5807, |
|
"step": 229 |
|
}, |
|
{ |
|
"epoch": 0.41553748870822044, |
|
"grad_norm": 0.24436971692539694, |
|
"learning_rate": 6.798820905309036e-06, |
|
"loss": 0.5831, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 0.4173441734417344, |
|
"grad_norm": 0.24576247596843034, |
|
"learning_rate": 6.7708722766400745e-06, |
|
"loss": 0.5832, |
|
"step": 231 |
|
}, |
|
{ |
|
"epoch": 0.41915085817524844, |
|
"grad_norm": 0.22279415909174483, |
|
"learning_rate": 6.7428602366090764e-06, |
|
"loss": 0.5859, |
|
"step": 232 |
|
}, |
|
{ |
|
"epoch": 0.4209575429087624, |
|
"grad_norm": 0.23515720046685648, |
|
"learning_rate": 6.714785788270658e-06, |
|
"loss": 0.567, |
|
"step": 233 |
|
}, |
|
{ |
|
"epoch": 0.42276422764227645, |
|
"grad_norm": 0.2249124149357309, |
|
"learning_rate": 6.686649936914151e-06, |
|
"loss": 0.5834, |
|
"step": 234 |
|
}, |
|
{ |
|
"epoch": 0.4245709123757904, |
|
"grad_norm": 0.22758860403193296, |
|
"learning_rate": 6.658453690027604e-06, |
|
"loss": 0.5781, |
|
"step": 235 |
|
}, |
|
{ |
|
"epoch": 0.42637759710930445, |
|
"grad_norm": 0.2381404018609259, |
|
"learning_rate": 6.63019805726171e-06, |
|
"loss": 0.5899, |
|
"step": 236 |
|
}, |
|
{ |
|
"epoch": 0.4281842818428184, |
|
"grad_norm": 0.23902637207284608, |
|
"learning_rate": 6.601884050393649e-06, |
|
"loss": 0.5885, |
|
"step": 237 |
|
}, |
|
{ |
|
"epoch": 0.42999096657633246, |
|
"grad_norm": 0.22765927211330927, |
|
"learning_rate": 6.57351268329086e-06, |
|
"loss": 0.5976, |
|
"step": 238 |
|
}, |
|
{ |
|
"epoch": 0.43179765130984643, |
|
"grad_norm": 0.22982897410990663, |
|
"learning_rate": 6.545084971874738e-06, |
|
"loss": 0.579, |
|
"step": 239 |
|
}, |
|
{ |
|
"epoch": 0.43360433604336046, |
|
"grad_norm": 0.2553936917989646, |
|
"learning_rate": 6.51660193408425e-06, |
|
"loss": 0.5795, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.43541102077687444, |
|
"grad_norm": 0.22510326077869028, |
|
"learning_rate": 6.4880645898394935e-06, |
|
"loss": 0.5777, |
|
"step": 241 |
|
}, |
|
{ |
|
"epoch": 0.4372177055103884, |
|
"grad_norm": 0.23299426062630849, |
|
"learning_rate": 6.459473961005168e-06, |
|
"loss": 0.5786, |
|
"step": 242 |
|
}, |
|
{ |
|
"epoch": 0.43902439024390244, |
|
"grad_norm": 0.2424647643940713, |
|
"learning_rate": 6.4308310713539845e-06, |
|
"loss": 0.5828, |
|
"step": 243 |
|
}, |
|
{ |
|
"epoch": 0.4408310749774164, |
|
"grad_norm": 0.23272151161384586, |
|
"learning_rate": 6.402136946530014e-06, |
|
"loss": 0.5881, |
|
"step": 244 |
|
}, |
|
{ |
|
"epoch": 0.44263775971093045, |
|
"grad_norm": 0.22802783213202982, |
|
"learning_rate": 6.373392614011952e-06, |
|
"loss": 0.5813, |
|
"step": 245 |
|
}, |
|
{ |
|
"epoch": 0.4444444444444444, |
|
"grad_norm": 0.2485353426062764, |
|
"learning_rate": 6.344599103076329e-06, |
|
"loss": 0.588, |
|
"step": 246 |
|
}, |
|
{ |
|
"epoch": 0.44625112917795845, |
|
"grad_norm": 0.21248935513626893, |
|
"learning_rate": 6.315757444760659e-06, |
|
"loss": 0.5706, |
|
"step": 247 |
|
}, |
|
{ |
|
"epoch": 0.4480578139114724, |
|
"grad_norm": 0.23498983453654892, |
|
"learning_rate": 6.286868671826513e-06, |
|
"loss": 0.5888, |
|
"step": 248 |
|
}, |
|
{ |
|
"epoch": 0.44986449864498645, |
|
"grad_norm": 0.21846215379927772, |
|
"learning_rate": 6.257933818722544e-06, |
|
"loss": 0.5719, |
|
"step": 249 |
|
}, |
|
{ |
|
"epoch": 0.45167118337850043, |
|
"grad_norm": 0.22767296186445063, |
|
"learning_rate": 6.228953921547441e-06, |
|
"loss": 0.5866, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.45347786811201446, |
|
"grad_norm": 0.22816978626710174, |
|
"learning_rate": 6.19993001801283e-06, |
|
"loss": 0.5745, |
|
"step": 251 |
|
}, |
|
{ |
|
"epoch": 0.45528455284552843, |
|
"grad_norm": 0.21916680779440528, |
|
"learning_rate": 6.17086314740612e-06, |
|
"loss": 0.5559, |
|
"step": 252 |
|
}, |
|
{ |
|
"epoch": 0.45709123757904246, |
|
"grad_norm": 0.2324362121259239, |
|
"learning_rate": 6.141754350553279e-06, |
|
"loss": 0.5788, |
|
"step": 253 |
|
}, |
|
{ |
|
"epoch": 0.45889792231255644, |
|
"grad_norm": 0.22348999950857165, |
|
"learning_rate": 6.112604669781572e-06, |
|
"loss": 0.5775, |
|
"step": 254 |
|
}, |
|
{ |
|
"epoch": 0.46070460704607047, |
|
"grad_norm": 0.23054541043455123, |
|
"learning_rate": 6.083415148882236e-06, |
|
"loss": 0.5715, |
|
"step": 255 |
|
}, |
|
{ |
|
"epoch": 0.46251129177958444, |
|
"grad_norm": 0.23068402595973864, |
|
"learning_rate": 6.054186833073096e-06, |
|
"loss": 0.5719, |
|
"step": 256 |
|
}, |
|
{ |
|
"epoch": 0.4643179765130985, |
|
"grad_norm": 0.23217623204156318, |
|
"learning_rate": 6.024920768961153e-06, |
|
"loss": 0.581, |
|
"step": 257 |
|
}, |
|
{ |
|
"epoch": 0.46612466124661245, |
|
"grad_norm": 0.22985330442952737, |
|
"learning_rate": 5.995618004505091e-06, |
|
"loss": 0.5767, |
|
"step": 258 |
|
}, |
|
{ |
|
"epoch": 0.4679313459801265, |
|
"grad_norm": 0.22449766670966562, |
|
"learning_rate": 5.9662795889777666e-06, |
|
"loss": 0.5804, |
|
"step": 259 |
|
}, |
|
{ |
|
"epoch": 0.46973803071364045, |
|
"grad_norm": 0.23055385442803344, |
|
"learning_rate": 5.936906572928625e-06, |
|
"loss": 0.5946, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 0.4715447154471545, |
|
"grad_norm": 0.26457326244915724, |
|
"learning_rate": 5.907500008146082e-06, |
|
"loss": 0.5857, |
|
"step": 261 |
|
}, |
|
{ |
|
"epoch": 0.47335140018066846, |
|
"grad_norm": 0.2172516653668416, |
|
"learning_rate": 5.878060947619877e-06, |
|
"loss": 0.5742, |
|
"step": 262 |
|
}, |
|
{ |
|
"epoch": 0.4751580849141825, |
|
"grad_norm": 0.2144178920324397, |
|
"learning_rate": 5.848590445503345e-06, |
|
"loss": 0.5784, |
|
"step": 263 |
|
}, |
|
{ |
|
"epoch": 0.47696476964769646, |
|
"grad_norm": 0.22867942289400467, |
|
"learning_rate": 5.819089557075689e-06, |
|
"loss": 0.5851, |
|
"step": 264 |
|
}, |
|
{ |
|
"epoch": 0.4787714543812105, |
|
"grad_norm": 0.20867595560023927, |
|
"learning_rate": 5.78955933870418e-06, |
|
"loss": 0.566, |
|
"step": 265 |
|
}, |
|
{ |
|
"epoch": 0.48057813911472447, |
|
"grad_norm": 0.2753862444054913, |
|
"learning_rate": 5.760000847806337e-06, |
|
"loss": 0.5902, |
|
"step": 266 |
|
}, |
|
{ |
|
"epoch": 0.4823848238482385, |
|
"grad_norm": 0.24641932570912456, |
|
"learning_rate": 5.730415142812059e-06, |
|
"loss": 0.5745, |
|
"step": 267 |
|
}, |
|
{ |
|
"epoch": 0.48419150858175247, |
|
"grad_norm": 0.23760140818207695, |
|
"learning_rate": 5.70080328312573e-06, |
|
"loss": 0.5754, |
|
"step": 268 |
|
}, |
|
{ |
|
"epoch": 0.4859981933152665, |
|
"grad_norm": 0.22025309526282863, |
|
"learning_rate": 5.671166329088278e-06, |
|
"loss": 0.581, |
|
"step": 269 |
|
}, |
|
{ |
|
"epoch": 0.4878048780487805, |
|
"grad_norm": 0.20669409641102707, |
|
"learning_rate": 5.641505341939212e-06, |
|
"loss": 0.5632, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 0.4896115627822945, |
|
"grad_norm": 0.23448298972119733, |
|
"learning_rate": 5.611821383778614e-06, |
|
"loss": 0.5848, |
|
"step": 271 |
|
}, |
|
{ |
|
"epoch": 0.4914182475158085, |
|
"grad_norm": 0.23237120061240638, |
|
"learning_rate": 5.582115517529114e-06, |
|
"loss": 0.5795, |
|
"step": 272 |
|
}, |
|
{ |
|
"epoch": 0.4932249322493225, |
|
"grad_norm": 0.23515585901808658, |
|
"learning_rate": 5.55238880689783e-06, |
|
"loss": 0.5875, |
|
"step": 273 |
|
}, |
|
{ |
|
"epoch": 0.4950316169828365, |
|
"grad_norm": 0.2527502719115226, |
|
"learning_rate": 5.522642316338268e-06, |
|
"loss": 0.5746, |
|
"step": 274 |
|
}, |
|
{ |
|
"epoch": 0.4968383017163505, |
|
"grad_norm": 0.24042048877480285, |
|
"learning_rate": 5.4928771110122185e-06, |
|
"loss": 0.5691, |
|
"step": 275 |
|
}, |
|
{ |
|
"epoch": 0.4986449864498645, |
|
"grad_norm": 0.22144199174125812, |
|
"learning_rate": 5.463094256751608e-06, |
|
"loss": 0.5617, |
|
"step": 276 |
|
}, |
|
{ |
|
"epoch": 0.5004516711833785, |
|
"grad_norm": 0.22117064677439477, |
|
"learning_rate": 5.433294820020335e-06, |
|
"loss": 0.5737, |
|
"step": 277 |
|
}, |
|
{ |
|
"epoch": 0.5022583559168925, |
|
"grad_norm": 0.22810855717519768, |
|
"learning_rate": 5.403479867876087e-06, |
|
"loss": 0.5602, |
|
"step": 278 |
|
}, |
|
{ |
|
"epoch": 0.5040650406504065, |
|
"grad_norm": 0.2431102529557088, |
|
"learning_rate": 5.373650467932122e-06, |
|
"loss": 0.5752, |
|
"step": 279 |
|
}, |
|
{ |
|
"epoch": 0.5058717253839206, |
|
"grad_norm": 0.21643082283869217, |
|
"learning_rate": 5.343807688319047e-06, |
|
"loss": 0.5715, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 0.5076784101174345, |
|
"grad_norm": 0.2106247958442796, |
|
"learning_rate": 5.3139525976465675e-06, |
|
"loss": 0.5726, |
|
"step": 281 |
|
}, |
|
{ |
|
"epoch": 0.5094850948509485, |
|
"grad_norm": 0.20535156199197266, |
|
"learning_rate": 5.284086264965224e-06, |
|
"loss": 0.5663, |
|
"step": 282 |
|
}, |
|
{ |
|
"epoch": 0.5112917795844625, |
|
"grad_norm": 0.2178927295972816, |
|
"learning_rate": 5.2542097597281095e-06, |
|
"loss": 0.5825, |
|
"step": 283 |
|
}, |
|
{ |
|
"epoch": 0.5130984643179766, |
|
"grad_norm": 0.2261778592064716, |
|
"learning_rate": 5.224324151752575e-06, |
|
"loss": 0.5703, |
|
"step": 284 |
|
}, |
|
{ |
|
"epoch": 0.5149051490514905, |
|
"grad_norm": 0.22392216246707566, |
|
"learning_rate": 5.194430511181925e-06, |
|
"loss": 0.5637, |
|
"step": 285 |
|
}, |
|
{ |
|
"epoch": 0.5167118337850045, |
|
"grad_norm": 0.2404878535442045, |
|
"learning_rate": 5.1645299084470936e-06, |
|
"loss": 0.563, |
|
"step": 286 |
|
}, |
|
{ |
|
"epoch": 0.5185185185185185, |
|
"grad_norm": 0.22254146427879729, |
|
"learning_rate": 5.134623414228315e-06, |
|
"loss": 0.5848, |
|
"step": 287 |
|
}, |
|
{ |
|
"epoch": 0.5203252032520326, |
|
"grad_norm": 0.21304622454449643, |
|
"learning_rate": 5.1047120994167855e-06, |
|
"loss": 0.5813, |
|
"step": 288 |
|
}, |
|
{ |
|
"epoch": 0.5221318879855466, |
|
"grad_norm": 0.22271135560602576, |
|
"learning_rate": 5.074797035076319e-06, |
|
"loss": 0.5657, |
|
"step": 289 |
|
}, |
|
{ |
|
"epoch": 0.5239385727190605, |
|
"grad_norm": 0.23005954607679505, |
|
"learning_rate": 5.04487929240499e-06, |
|
"loss": 0.5778, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 0.5257452574525745, |
|
"grad_norm": 0.21478376662682697, |
|
"learning_rate": 5.014959942696782e-06, |
|
"loss": 0.5821, |
|
"step": 291 |
|
}, |
|
{ |
|
"epoch": 0.5275519421860885, |
|
"grad_norm": 0.21969184987666926, |
|
"learning_rate": 4.98504005730322e-06, |
|
"loss": 0.5852, |
|
"step": 292 |
|
}, |
|
{ |
|
"epoch": 0.5293586269196026, |
|
"grad_norm": 0.21975967673073044, |
|
"learning_rate": 4.955120707595011e-06, |
|
"loss": 0.5788, |
|
"step": 293 |
|
}, |
|
{ |
|
"epoch": 0.5311653116531165, |
|
"grad_norm": 0.21951856116759466, |
|
"learning_rate": 4.9252029649236835e-06, |
|
"loss": 0.5708, |
|
"step": 294 |
|
}, |
|
{ |
|
"epoch": 0.5329719963866305, |
|
"grad_norm": 0.22120179120056116, |
|
"learning_rate": 4.895287900583216e-06, |
|
"loss": 0.5689, |
|
"step": 295 |
|
}, |
|
{ |
|
"epoch": 0.5347786811201445, |
|
"grad_norm": 0.22555663183495606, |
|
"learning_rate": 4.865376585771687e-06, |
|
"loss": 0.5722, |
|
"step": 296 |
|
}, |
|
{ |
|
"epoch": 0.5365853658536586, |
|
"grad_norm": 0.24465888318717097, |
|
"learning_rate": 4.835470091552906e-06, |
|
"loss": 0.578, |
|
"step": 297 |
|
}, |
|
{ |
|
"epoch": 0.5383920505871725, |
|
"grad_norm": 0.21974179994892612, |
|
"learning_rate": 4.805569488818077e-06, |
|
"loss": 0.5724, |
|
"step": 298 |
|
}, |
|
{ |
|
"epoch": 0.5401987353206865, |
|
"grad_norm": 0.21884986461240788, |
|
"learning_rate": 4.775675848247427e-06, |
|
"loss": 0.5885, |
|
"step": 299 |
|
}, |
|
{ |
|
"epoch": 0.5420054200542005, |
|
"grad_norm": 0.22111504748994093, |
|
"learning_rate": 4.745790240271892e-06, |
|
"loss": 0.576, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.5438121047877146, |
|
"grad_norm": 0.2120166681445097, |
|
"learning_rate": 4.715913735034779e-06, |
|
"loss": 0.5773, |
|
"step": 301 |
|
}, |
|
{ |
|
"epoch": 0.5456187895212286, |
|
"grad_norm": 0.21089341870029393, |
|
"learning_rate": 4.686047402353433e-06, |
|
"loss": 0.5896, |
|
"step": 302 |
|
}, |
|
{ |
|
"epoch": 0.5474254742547425, |
|
"grad_norm": 0.2254482836833871, |
|
"learning_rate": 4.6561923116809545e-06, |
|
"loss": 0.5709, |
|
"step": 303 |
|
}, |
|
{ |
|
"epoch": 0.5492321589882565, |
|
"grad_norm": 0.2106071545714772, |
|
"learning_rate": 4.626349532067879e-06, |
|
"loss": 0.5657, |
|
"step": 304 |
|
}, |
|
{ |
|
"epoch": 0.5510388437217706, |
|
"grad_norm": 0.22207021742532604, |
|
"learning_rate": 4.596520132123915e-06, |
|
"loss": 0.5723, |
|
"step": 305 |
|
}, |
|
{ |
|
"epoch": 0.5528455284552846, |
|
"grad_norm": 0.21341260540637924, |
|
"learning_rate": 4.566705179979665e-06, |
|
"loss": 0.57, |
|
"step": 306 |
|
}, |
|
{ |
|
"epoch": 0.5546522131887985, |
|
"grad_norm": 0.2085210802540439, |
|
"learning_rate": 4.536905743248394e-06, |
|
"loss": 0.5877, |
|
"step": 307 |
|
}, |
|
{ |
|
"epoch": 0.5564588979223125, |
|
"grad_norm": 0.20872893231471018, |
|
"learning_rate": 4.507122888987782e-06, |
|
"loss": 0.5673, |
|
"step": 308 |
|
}, |
|
{ |
|
"epoch": 0.5582655826558266, |
|
"grad_norm": 0.21452564527296392, |
|
"learning_rate": 4.477357683661734e-06, |
|
"loss": 0.5764, |
|
"step": 309 |
|
}, |
|
{ |
|
"epoch": 0.5600722673893406, |
|
"grad_norm": 0.2220712438136986, |
|
"learning_rate": 4.447611193102171e-06, |
|
"loss": 0.5594, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 0.5618789521228545, |
|
"grad_norm": 0.20739127928939716, |
|
"learning_rate": 4.417884482470887e-06, |
|
"loss": 0.5777, |
|
"step": 311 |
|
}, |
|
{ |
|
"epoch": 0.5636856368563685, |
|
"grad_norm": 0.20327606439871923, |
|
"learning_rate": 4.388178616221389e-06, |
|
"loss": 0.577, |
|
"step": 312 |
|
}, |
|
{ |
|
"epoch": 0.5654923215898826, |
|
"grad_norm": 0.21650542787101193, |
|
"learning_rate": 4.35849465806079e-06, |
|
"loss": 0.5789, |
|
"step": 313 |
|
}, |
|
{ |
|
"epoch": 0.5672990063233966, |
|
"grad_norm": 0.20691358553040365, |
|
"learning_rate": 4.3288336709117246e-06, |
|
"loss": 0.5707, |
|
"step": 314 |
|
}, |
|
{ |
|
"epoch": 0.5691056910569106, |
|
"grad_norm": 0.23453104931805596, |
|
"learning_rate": 4.299196716874271e-06, |
|
"loss": 0.5706, |
|
"step": 315 |
|
}, |
|
{ |
|
"epoch": 0.5709123757904245, |
|
"grad_norm": 0.21953655360969157, |
|
"learning_rate": 4.269584857187942e-06, |
|
"loss": 0.5676, |
|
"step": 316 |
|
}, |
|
{ |
|
"epoch": 0.5727190605239386, |
|
"grad_norm": 0.2092430697715485, |
|
"learning_rate": 4.239999152193664e-06, |
|
"loss": 0.5622, |
|
"step": 317 |
|
}, |
|
{ |
|
"epoch": 0.5745257452574526, |
|
"grad_norm": 0.20317177561948455, |
|
"learning_rate": 4.2104406612958216e-06, |
|
"loss": 0.5743, |
|
"step": 318 |
|
}, |
|
{ |
|
"epoch": 0.5763324299909666, |
|
"grad_norm": 0.21517519346533806, |
|
"learning_rate": 4.180910442924312e-06, |
|
"loss": 0.5843, |
|
"step": 319 |
|
}, |
|
{ |
|
"epoch": 0.5781391147244805, |
|
"grad_norm": 0.211034863639269, |
|
"learning_rate": 4.1514095544966556e-06, |
|
"loss": 0.5671, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 0.5799457994579946, |
|
"grad_norm": 0.21293791768630807, |
|
"learning_rate": 4.121939052380125e-06, |
|
"loss": 0.5634, |
|
"step": 321 |
|
}, |
|
{ |
|
"epoch": 0.5817524841915086, |
|
"grad_norm": 0.23990516473010776, |
|
"learning_rate": 4.092499991853919e-06, |
|
"loss": 0.5851, |
|
"step": 322 |
|
}, |
|
{ |
|
"epoch": 0.5835591689250226, |
|
"grad_norm": 0.21305791476970695, |
|
"learning_rate": 4.063093427071376e-06, |
|
"loss": 0.5708, |
|
"step": 323 |
|
}, |
|
{ |
|
"epoch": 0.5853658536585366, |
|
"grad_norm": 0.22152034591999475, |
|
"learning_rate": 4.033720411022235e-06, |
|
"loss": 0.551, |
|
"step": 324 |
|
}, |
|
{ |
|
"epoch": 0.5871725383920506, |
|
"grad_norm": 0.24741746440422901, |
|
"learning_rate": 4.0043819954949105e-06, |
|
"loss": 0.5693, |
|
"step": 325 |
|
}, |
|
{ |
|
"epoch": 0.5889792231255646, |
|
"grad_norm": 0.23319543650728994, |
|
"learning_rate": 3.975079231038848e-06, |
|
"loss": 0.5781, |
|
"step": 326 |
|
}, |
|
{ |
|
"epoch": 0.5907859078590786, |
|
"grad_norm": 0.2562877844594374, |
|
"learning_rate": 3.9458131669269066e-06, |
|
"loss": 0.5656, |
|
"step": 327 |
|
}, |
|
{ |
|
"epoch": 0.5925925925925926, |
|
"grad_norm": 0.21153036788677582, |
|
"learning_rate": 3.916584851117766e-06, |
|
"loss": 0.5712, |
|
"step": 328 |
|
}, |
|
{ |
|
"epoch": 0.5943992773261066, |
|
"grad_norm": 0.2036962689195446, |
|
"learning_rate": 3.887395330218429e-06, |
|
"loss": 0.5622, |
|
"step": 329 |
|
}, |
|
{ |
|
"epoch": 0.5962059620596206, |
|
"grad_norm": 0.22556374814001054, |
|
"learning_rate": 3.8582456494467214e-06, |
|
"loss": 0.5693, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 0.5980126467931346, |
|
"grad_norm": 0.22342745745182274, |
|
"learning_rate": 3.829136852593881e-06, |
|
"loss": 0.5742, |
|
"step": 331 |
|
}, |
|
{ |
|
"epoch": 0.5998193315266486, |
|
"grad_norm": 0.2130691852685183, |
|
"learning_rate": 3.8000699819871704e-06, |
|
"loss": 0.5569, |
|
"step": 332 |
|
}, |
|
{ |
|
"epoch": 0.6016260162601627, |
|
"grad_norm": 0.19727091458414242, |
|
"learning_rate": 3.7710460784525617e-06, |
|
"loss": 0.5776, |
|
"step": 333 |
|
}, |
|
{ |
|
"epoch": 0.6034327009936766, |
|
"grad_norm": 0.2146233826191007, |
|
"learning_rate": 3.7420661812774577e-06, |
|
"loss": 0.5904, |
|
"step": 334 |
|
}, |
|
{ |
|
"epoch": 0.6052393857271906, |
|
"grad_norm": 0.2032389532543255, |
|
"learning_rate": 3.7131313281734895e-06, |
|
"loss": 0.5728, |
|
"step": 335 |
|
}, |
|
{ |
|
"epoch": 0.6070460704607046, |
|
"grad_norm": 0.2152966373067959, |
|
"learning_rate": 3.6842425552393424e-06, |
|
"loss": 0.5701, |
|
"step": 336 |
|
}, |
|
{ |
|
"epoch": 0.6088527551942186, |
|
"grad_norm": 0.23087382458330968, |
|
"learning_rate": 3.655400896923672e-06, |
|
"loss": 0.5714, |
|
"step": 337 |
|
}, |
|
{ |
|
"epoch": 0.6106594399277326, |
|
"grad_norm": 0.20946373615215116, |
|
"learning_rate": 3.62660738598805e-06, |
|
"loss": 0.5671, |
|
"step": 338 |
|
}, |
|
{ |
|
"epoch": 0.6124661246612466, |
|
"grad_norm": 0.20810849142259824, |
|
"learning_rate": 3.5978630534699873e-06, |
|
"loss": 0.5759, |
|
"step": 339 |
|
}, |
|
{ |
|
"epoch": 0.6142728093947606, |
|
"grad_norm": 0.20530067815462075, |
|
"learning_rate": 3.5691689286460172e-06, |
|
"loss": 0.5713, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 0.6160794941282746, |
|
"grad_norm": 0.19818029483030003, |
|
"learning_rate": 3.540526038994834e-06, |
|
"loss": 0.5698, |
|
"step": 341 |
|
}, |
|
{ |
|
"epoch": 0.6178861788617886, |
|
"grad_norm": 0.21183226275727388, |
|
"learning_rate": 3.5119354101605086e-06, |
|
"loss": 0.5731, |
|
"step": 342 |
|
}, |
|
{ |
|
"epoch": 0.6196928635953026, |
|
"grad_norm": 0.21849196760937517, |
|
"learning_rate": 3.4833980659157507e-06, |
|
"loss": 0.5672, |
|
"step": 343 |
|
}, |
|
{ |
|
"epoch": 0.6214995483288166, |
|
"grad_norm": 0.1940531424360125, |
|
"learning_rate": 3.4549150281252635e-06, |
|
"loss": 0.5568, |
|
"step": 344 |
|
}, |
|
{ |
|
"epoch": 0.6233062330623306, |
|
"grad_norm": 0.20705949032650917, |
|
"learning_rate": 3.4264873167091405e-06, |
|
"loss": 0.571, |
|
"step": 345 |
|
}, |
|
{ |
|
"epoch": 0.6251129177958447, |
|
"grad_norm": 0.2094233428486086, |
|
"learning_rate": 3.398115949606352e-06, |
|
"loss": 0.5726, |
|
"step": 346 |
|
}, |
|
{ |
|
"epoch": 0.6269196025293586, |
|
"grad_norm": 0.19121716306533118, |
|
"learning_rate": 3.3698019427382912e-06, |
|
"loss": 0.5578, |
|
"step": 347 |
|
}, |
|
{ |
|
"epoch": 0.6287262872628726, |
|
"grad_norm": 0.20426639852726766, |
|
"learning_rate": 3.341546309972398e-06, |
|
"loss": 0.5589, |
|
"step": 348 |
|
}, |
|
{ |
|
"epoch": 0.6305329719963866, |
|
"grad_norm": 0.19738122568646607, |
|
"learning_rate": 3.3133500630858507e-06, |
|
"loss": 0.5618, |
|
"step": 349 |
|
}, |
|
{ |
|
"epoch": 0.6323396567299007, |
|
"grad_norm": 0.2051543715396433, |
|
"learning_rate": 3.2852142117293435e-06, |
|
"loss": 0.5743, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 0.6341463414634146, |
|
"grad_norm": 0.19808519245451528, |
|
"learning_rate": 3.2571397633909252e-06, |
|
"loss": 0.5642, |
|
"step": 351 |
|
}, |
|
{ |
|
"epoch": 0.6359530261969286, |
|
"grad_norm": 0.21808756281857763, |
|
"learning_rate": 3.229127723359927e-06, |
|
"loss": 0.5778, |
|
"step": 352 |
|
}, |
|
{ |
|
"epoch": 0.6377597109304426, |
|
"grad_norm": 0.19417668983418937, |
|
"learning_rate": 3.2011790946909673e-06, |
|
"loss": 0.5783, |
|
"step": 353 |
|
}, |
|
{ |
|
"epoch": 0.6395663956639567, |
|
"grad_norm": 0.2079586248025546, |
|
"learning_rate": 3.173294878168025e-06, |
|
"loss": 0.5733, |
|
"step": 354 |
|
}, |
|
{ |
|
"epoch": 0.6413730803974707, |
|
"grad_norm": 0.22055722119887894, |
|
"learning_rate": 3.1454760722686206e-06, |
|
"loss": 0.5625, |
|
"step": 355 |
|
}, |
|
{ |
|
"epoch": 0.6431797651309846, |
|
"grad_norm": 0.20910396932981393, |
|
"learning_rate": 3.11772367312804e-06, |
|
"loss": 0.5773, |
|
"step": 356 |
|
}, |
|
{ |
|
"epoch": 0.6449864498644986, |
|
"grad_norm": 0.2031776915729197, |
|
"learning_rate": 3.090038674503688e-06, |
|
"loss": 0.5779, |
|
"step": 357 |
|
}, |
|
{ |
|
"epoch": 0.6467931345980127, |
|
"grad_norm": 0.2336756362579364, |
|
"learning_rate": 3.0624220677394854e-06, |
|
"loss": 0.5834, |
|
"step": 358 |
|
}, |
|
{ |
|
"epoch": 0.6485998193315267, |
|
"grad_norm": 0.23182552557923047, |
|
"learning_rate": 3.0348748417303826e-06, |
|
"loss": 0.5564, |
|
"step": 359 |
|
}, |
|
{ |
|
"epoch": 0.6504065040650406, |
|
"grad_norm": 0.23582441085258554, |
|
"learning_rate": 3.007397982886942e-06, |
|
"loss": 0.5649, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 0.6522131887985546, |
|
"grad_norm": 0.20272398204237846, |
|
"learning_rate": 2.979992475100024e-06, |
|
"loss": 0.5707, |
|
"step": 361 |
|
}, |
|
{ |
|
"epoch": 0.6540198735320687, |
|
"grad_norm": 0.19546234691230657, |
|
"learning_rate": 2.9526592997055488e-06, |
|
"loss": 0.5818, |
|
"step": 362 |
|
}, |
|
{ |
|
"epoch": 0.6558265582655827, |
|
"grad_norm": 0.22417157116419753, |
|
"learning_rate": 2.9253994354493575e-06, |
|
"loss": 0.5726, |
|
"step": 363 |
|
}, |
|
{ |
|
"epoch": 0.6576332429990966, |
|
"grad_norm": 0.2019836311281834, |
|
"learning_rate": 2.8982138584521734e-06, |
|
"loss": 0.5713, |
|
"step": 364 |
|
}, |
|
{ |
|
"epoch": 0.6594399277326106, |
|
"grad_norm": 0.7102203405593134, |
|
"learning_rate": 2.871103542174637e-06, |
|
"loss": 0.5635, |
|
"step": 365 |
|
}, |
|
{ |
|
"epoch": 0.6612466124661247, |
|
"grad_norm": 0.20583264395654918, |
|
"learning_rate": 2.844069457382459e-06, |
|
"loss": 0.5854, |
|
"step": 366 |
|
}, |
|
{ |
|
"epoch": 0.6630532971996387, |
|
"grad_norm": 0.22941392916103912, |
|
"learning_rate": 2.817112572111651e-06, |
|
"loss": 0.5664, |
|
"step": 367 |
|
}, |
|
{ |
|
"epoch": 0.6648599819331527, |
|
"grad_norm": 0.20083466761028634, |
|
"learning_rate": 2.790233851633868e-06, |
|
"loss": 0.5782, |
|
"step": 368 |
|
}, |
|
{ |
|
"epoch": 0.6666666666666666, |
|
"grad_norm": 0.20836386937730783, |
|
"learning_rate": 2.7634342584218364e-06, |
|
"loss": 0.5791, |
|
"step": 369 |
|
}, |
|
{ |
|
"epoch": 0.6684733514001807, |
|
"grad_norm": 0.20215809708632768, |
|
"learning_rate": 2.7367147521149052e-06, |
|
"loss": 0.5775, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 0.6702800361336947, |
|
"grad_norm": 0.20283921573384886, |
|
"learning_rate": 2.7100762894846633e-06, |
|
"loss": 0.5657, |
|
"step": 371 |
|
}, |
|
{ |
|
"epoch": 0.6720867208672087, |
|
"grad_norm": 0.19940980557913102, |
|
"learning_rate": 2.683519824400693e-06, |
|
"loss": 0.5834, |
|
"step": 372 |
|
}, |
|
{ |
|
"epoch": 0.6738934056007226, |
|
"grad_norm": 0.1971475171725753, |
|
"learning_rate": 2.657046307796407e-06, |
|
"loss": 0.5691, |
|
"step": 373 |
|
}, |
|
{ |
|
"epoch": 0.6757000903342367, |
|
"grad_norm": 0.18670858880742722, |
|
"learning_rate": 2.6306566876350072e-06, |
|
"loss": 0.5583, |
|
"step": 374 |
|
}, |
|
{ |
|
"epoch": 0.6775067750677507, |
|
"grad_norm": 0.19963015977573684, |
|
"learning_rate": 2.6043519088755263e-06, |
|
"loss": 0.5732, |
|
"step": 375 |
|
}, |
|
{ |
|
"epoch": 0.6793134598012647, |
|
"grad_norm": 0.2022318594566399, |
|
"learning_rate": 2.578132913439e-06, |
|
"loss": 0.5578, |
|
"step": 376 |
|
}, |
|
{ |
|
"epoch": 0.6811201445347786, |
|
"grad_norm": 0.1889632843204264, |
|
"learning_rate": 2.55200064017474e-06, |
|
"loss": 0.5736, |
|
"step": 377 |
|
}, |
|
{ |
|
"epoch": 0.6829268292682927, |
|
"grad_norm": 0.1949807928013803, |
|
"learning_rate": 2.5259560248267022e-06, |
|
"loss": 0.5747, |
|
"step": 378 |
|
}, |
|
{ |
|
"epoch": 0.6847335140018067, |
|
"grad_norm": 0.20295809643739116, |
|
"learning_rate": 2.5000000000000015e-06, |
|
"loss": 0.5771, |
|
"step": 379 |
|
}, |
|
{ |
|
"epoch": 0.6865401987353207, |
|
"grad_norm": 0.18664865833445815, |
|
"learning_rate": 2.4741334951274948e-06, |
|
"loss": 0.558, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 0.6883468834688347, |
|
"grad_norm": 0.19667666846949652, |
|
"learning_rate": 2.448357436436519e-06, |
|
"loss": 0.5743, |
|
"step": 381 |
|
}, |
|
{ |
|
"epoch": 0.6901535682023487, |
|
"grad_norm": 0.19564857934070595, |
|
"learning_rate": 2.4226727469157097e-06, |
|
"loss": 0.5621, |
|
"step": 382 |
|
}, |
|
{ |
|
"epoch": 0.6919602529358627, |
|
"grad_norm": 0.1979603446322215, |
|
"learning_rate": 2.3970803462819586e-06, |
|
"loss": 0.5812, |
|
"step": 383 |
|
}, |
|
{ |
|
"epoch": 0.6937669376693767, |
|
"grad_norm": 0.20281766667125065, |
|
"learning_rate": 2.371581150947476e-06, |
|
"loss": 0.5795, |
|
"step": 384 |
|
}, |
|
{ |
|
"epoch": 0.6955736224028907, |
|
"grad_norm": 0.191510322636999, |
|
"learning_rate": 2.3461760739869865e-06, |
|
"loss": 0.5613, |
|
"step": 385 |
|
}, |
|
{ |
|
"epoch": 0.6973803071364046, |
|
"grad_norm": 0.19219216588342034, |
|
"learning_rate": 2.320866025105016e-06, |
|
"loss": 0.5728, |
|
"step": 386 |
|
}, |
|
{ |
|
"epoch": 0.6991869918699187, |
|
"grad_norm": 0.20519661977052045, |
|
"learning_rate": 2.2956519106033366e-06, |
|
"loss": 0.5731, |
|
"step": 387 |
|
}, |
|
{ |
|
"epoch": 0.7009936766034327, |
|
"grad_norm": 0.25836867497386395, |
|
"learning_rate": 2.2705346333484925e-06, |
|
"loss": 0.5725, |
|
"step": 388 |
|
}, |
|
{ |
|
"epoch": 0.7028003613369467, |
|
"grad_norm": 0.203911135585441, |
|
"learning_rate": 2.245515092739488e-06, |
|
"loss": 0.5755, |
|
"step": 389 |
|
}, |
|
{ |
|
"epoch": 0.7046070460704607, |
|
"grad_norm": 0.19395124713713474, |
|
"learning_rate": 2.2205941846755787e-06, |
|
"loss": 0.5687, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 0.7064137308039747, |
|
"grad_norm": 0.21376707652025245, |
|
"learning_rate": 2.1957728015241793e-06, |
|
"loss": 0.5694, |
|
"step": 391 |
|
}, |
|
{ |
|
"epoch": 0.7082204155374887, |
|
"grad_norm": 0.2298954337232273, |
|
"learning_rate": 2.171051832088928e-06, |
|
"loss": 0.575, |
|
"step": 392 |
|
}, |
|
{ |
|
"epoch": 0.7100271002710027, |
|
"grad_norm": 0.19451685337291036, |
|
"learning_rate": 2.146432161577842e-06, |
|
"loss": 0.5797, |
|
"step": 393 |
|
}, |
|
{ |
|
"epoch": 0.7118337850045167, |
|
"grad_norm": 0.2021060747744258, |
|
"learning_rate": 2.1219146715716332e-06, |
|
"loss": 0.5822, |
|
"step": 394 |
|
}, |
|
{ |
|
"epoch": 0.7136404697380307, |
|
"grad_norm": 0.20271170144226877, |
|
"learning_rate": 2.097500239992132e-06, |
|
"loss": 0.5775, |
|
"step": 395 |
|
}, |
|
{ |
|
"epoch": 0.7154471544715447, |
|
"grad_norm": 0.2546574681361855, |
|
"learning_rate": 2.0731897410708618e-06, |
|
"loss": 0.5713, |
|
"step": 396 |
|
}, |
|
{ |
|
"epoch": 0.7172538392050587, |
|
"grad_norm": 0.19840365539957144, |
|
"learning_rate": 2.0489840453177198e-06, |
|
"loss": 0.5704, |
|
"step": 397 |
|
}, |
|
{ |
|
"epoch": 0.7190605239385727, |
|
"grad_norm": 0.21939563906369686, |
|
"learning_rate": 2.0248840194898155e-06, |
|
"loss": 0.5718, |
|
"step": 398 |
|
}, |
|
{ |
|
"epoch": 0.7208672086720868, |
|
"grad_norm": 0.18553818391727428, |
|
"learning_rate": 2.0008905265604316e-06, |
|
"loss": 0.5759, |
|
"step": 399 |
|
}, |
|
{ |
|
"epoch": 0.7226738934056007, |
|
"grad_norm": 0.19560381575890134, |
|
"learning_rate": 1.977004425688126e-06, |
|
"loss": 0.5719, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 0.7244805781391147, |
|
"grad_norm": 0.1955704171413575, |
|
"learning_rate": 1.95322657218596e-06, |
|
"loss": 0.5743, |
|
"step": 401 |
|
}, |
|
{ |
|
"epoch": 0.7262872628726287, |
|
"grad_norm": 0.19385026443586315, |
|
"learning_rate": 1.929557817490874e-06, |
|
"loss": 0.5816, |
|
"step": 402 |
|
}, |
|
{ |
|
"epoch": 0.7280939476061428, |
|
"grad_norm": 0.2019803721425656, |
|
"learning_rate": 1.9059990091332082e-06, |
|
"loss": 0.563, |
|
"step": 403 |
|
}, |
|
{ |
|
"epoch": 0.7299006323396567, |
|
"grad_norm": 0.19651420035266637, |
|
"learning_rate": 1.8825509907063328e-06, |
|
"loss": 0.5677, |
|
"step": 404 |
|
}, |
|
{ |
|
"epoch": 0.7317073170731707, |
|
"grad_norm": 0.2114157269995201, |
|
"learning_rate": 1.8592146018364682e-06, |
|
"loss": 0.5665, |
|
"step": 405 |
|
}, |
|
{ |
|
"epoch": 0.7335140018066847, |
|
"grad_norm": 0.19769213583005163, |
|
"learning_rate": 1.8359906781525955e-06, |
|
"loss": 0.5729, |
|
"step": 406 |
|
}, |
|
{ |
|
"epoch": 0.7353206865401988, |
|
"grad_norm": 0.20762643479823137, |
|
"learning_rate": 1.8128800512565514e-06, |
|
"loss": 0.5648, |
|
"step": 407 |
|
}, |
|
{ |
|
"epoch": 0.7371273712737128, |
|
"grad_norm": 0.18617743174402618, |
|
"learning_rate": 1.7898835486932398e-06, |
|
"loss": 0.5593, |
|
"step": 408 |
|
}, |
|
{ |
|
"epoch": 0.7389340560072267, |
|
"grad_norm": 0.19762378326385793, |
|
"learning_rate": 1.7670019939210025e-06, |
|
"loss": 0.5775, |
|
"step": 409 |
|
}, |
|
{ |
|
"epoch": 0.7407407407407407, |
|
"grad_norm": 0.2305864761255618, |
|
"learning_rate": 1.7442362062821323e-06, |
|
"loss": 0.5788, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 0.7425474254742548, |
|
"grad_norm": 0.21172088224443286, |
|
"learning_rate": 1.7215870009735386e-06, |
|
"loss": 0.5809, |
|
"step": 411 |
|
}, |
|
{ |
|
"epoch": 0.7443541102077688, |
|
"grad_norm": 0.20643552873991003, |
|
"learning_rate": 1.6990551890175488e-06, |
|
"loss": 0.5542, |
|
"step": 412 |
|
}, |
|
{ |
|
"epoch": 0.7461607949412827, |
|
"grad_norm": 0.19758864444336163, |
|
"learning_rate": 1.6766415772328732e-06, |
|
"loss": 0.5702, |
|
"step": 413 |
|
}, |
|
{ |
|
"epoch": 0.7479674796747967, |
|
"grad_norm": 0.19941453180240049, |
|
"learning_rate": 1.6543469682057105e-06, |
|
"loss": 0.5581, |
|
"step": 414 |
|
}, |
|
{ |
|
"epoch": 0.7497741644083108, |
|
"grad_norm": 0.20479120015754532, |
|
"learning_rate": 1.632172160261012e-06, |
|
"loss": 0.5793, |
|
"step": 415 |
|
}, |
|
{ |
|
"epoch": 0.7515808491418248, |
|
"grad_norm": 0.19350596328984304, |
|
"learning_rate": 1.610117947433897e-06, |
|
"loss": 0.5656, |
|
"step": 416 |
|
}, |
|
{ |
|
"epoch": 0.7533875338753387, |
|
"grad_norm": 0.19423471560299505, |
|
"learning_rate": 1.5881851194412106e-06, |
|
"loss": 0.5873, |
|
"step": 417 |
|
}, |
|
{ |
|
"epoch": 0.7551942186088527, |
|
"grad_norm": 0.20077490007516463, |
|
"learning_rate": 1.5663744616532612e-06, |
|
"loss": 0.5694, |
|
"step": 418 |
|
}, |
|
{ |
|
"epoch": 0.7570009033423668, |
|
"grad_norm": 0.18838263771434816, |
|
"learning_rate": 1.544686755065677e-06, |
|
"loss": 0.5702, |
|
"step": 419 |
|
}, |
|
{ |
|
"epoch": 0.7588075880758808, |
|
"grad_norm": 0.21701148053887975, |
|
"learning_rate": 1.523122776271463e-06, |
|
"loss": 0.5847, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 0.7606142728093948, |
|
"grad_norm": 0.2080916174274085, |
|
"learning_rate": 1.5016832974331725e-06, |
|
"loss": 0.5657, |
|
"step": 421 |
|
}, |
|
{ |
|
"epoch": 0.7624209575429087, |
|
"grad_norm": 0.21199432876342922, |
|
"learning_rate": 1.4803690862552755e-06, |
|
"loss": 0.5691, |
|
"step": 422 |
|
}, |
|
{ |
|
"epoch": 0.7642276422764228, |
|
"grad_norm": 0.21079346404578475, |
|
"learning_rate": 1.459180905956653e-06, |
|
"loss": 0.5783, |
|
"step": 423 |
|
}, |
|
{ |
|
"epoch": 0.7660343270099368, |
|
"grad_norm": 0.19794884095923385, |
|
"learning_rate": 1.438119515243277e-06, |
|
"loss": 0.5818, |
|
"step": 424 |
|
}, |
|
{ |
|
"epoch": 0.7678410117434508, |
|
"grad_norm": 0.18648321380496394, |
|
"learning_rate": 1.4171856682810386e-06, |
|
"loss": 0.5603, |
|
"step": 425 |
|
}, |
|
{ |
|
"epoch": 0.7696476964769647, |
|
"grad_norm": 0.19012424528816732, |
|
"learning_rate": 1.39638011466875e-06, |
|
"loss": 0.5807, |
|
"step": 426 |
|
}, |
|
{ |
|
"epoch": 0.7714543812104788, |
|
"grad_norm": 0.1883015470745895, |
|
"learning_rate": 1.3757035994112915e-06, |
|
"loss": 0.5526, |
|
"step": 427 |
|
}, |
|
{ |
|
"epoch": 0.7732610659439928, |
|
"grad_norm": 0.2167316937231412, |
|
"learning_rate": 1.3551568628929434e-06, |
|
"loss": 0.5731, |
|
"step": 428 |
|
}, |
|
{ |
|
"epoch": 0.7750677506775068, |
|
"grad_norm": 0.2028996662499182, |
|
"learning_rate": 1.3347406408508695e-06, |
|
"loss": 0.5676, |
|
"step": 429 |
|
}, |
|
{ |
|
"epoch": 0.7768744354110207, |
|
"grad_norm": 0.2046126468128694, |
|
"learning_rate": 1.3144556643487743e-06, |
|
"loss": 0.5692, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 0.7786811201445348, |
|
"grad_norm": 0.20256042727360743, |
|
"learning_rate": 1.2943026597507268e-06, |
|
"loss": 0.578, |
|
"step": 431 |
|
}, |
|
{ |
|
"epoch": 0.7804878048780488, |
|
"grad_norm": 0.18594150966030543, |
|
"learning_rate": 1.2742823486951434e-06, |
|
"loss": 0.5733, |
|
"step": 432 |
|
}, |
|
{ |
|
"epoch": 0.7822944896115628, |
|
"grad_norm": 0.18598179155010222, |
|
"learning_rate": 1.254395448068959e-06, |
|
"loss": 0.5814, |
|
"step": 433 |
|
}, |
|
{ |
|
"epoch": 0.7841011743450768, |
|
"grad_norm": 0.19531950934183231, |
|
"learning_rate": 1.234642669981946e-06, |
|
"loss": 0.58, |
|
"step": 434 |
|
}, |
|
{ |
|
"epoch": 0.7859078590785907, |
|
"grad_norm": 0.18777486366895885, |
|
"learning_rate": 1.2150247217412186e-06, |
|
"loss": 0.5572, |
|
"step": 435 |
|
}, |
|
{ |
|
"epoch": 0.7877145438121048, |
|
"grad_norm": 0.19677766481310113, |
|
"learning_rate": 1.195542305825908e-06, |
|
"loss": 0.5713, |
|
"step": 436 |
|
}, |
|
{ |
|
"epoch": 0.7895212285456188, |
|
"grad_norm": 0.18260137587600686, |
|
"learning_rate": 1.1761961198620081e-06, |
|
"loss": 0.5699, |
|
"step": 437 |
|
}, |
|
{ |
|
"epoch": 0.7913279132791328, |
|
"grad_norm": 0.19986344346713772, |
|
"learning_rate": 1.1569868565973912e-06, |
|
"loss": 0.5615, |
|
"step": 438 |
|
}, |
|
{ |
|
"epoch": 0.7931345980126467, |
|
"grad_norm": 0.19605426450561478, |
|
"learning_rate": 1.137915203877003e-06, |
|
"loss": 0.5786, |
|
"step": 439 |
|
}, |
|
{ |
|
"epoch": 0.7949412827461608, |
|
"grad_norm": 0.1942457012335732, |
|
"learning_rate": 1.118981844618236e-06, |
|
"loss": 0.5701, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 0.7967479674796748, |
|
"grad_norm": 0.18017956817789316, |
|
"learning_rate": 1.1001874567864696e-06, |
|
"loss": 0.5548, |
|
"step": 441 |
|
}, |
|
{ |
|
"epoch": 0.7985546522131888, |
|
"grad_norm": 0.19282482451446936, |
|
"learning_rate": 1.0815327133708015e-06, |
|
"loss": 0.5645, |
|
"step": 442 |
|
}, |
|
{ |
|
"epoch": 0.8003613369467028, |
|
"grad_norm": 0.2073987161742136, |
|
"learning_rate": 1.06301828235994e-06, |
|
"loss": 0.5724, |
|
"step": 443 |
|
}, |
|
{ |
|
"epoch": 0.8021680216802168, |
|
"grad_norm": 0.19760791073433562, |
|
"learning_rate": 1.044644826718295e-06, |
|
"loss": 0.5779, |
|
"step": 444 |
|
}, |
|
{ |
|
"epoch": 0.8039747064137308, |
|
"grad_norm": 0.18977162869468303, |
|
"learning_rate": 1.0264130043622245e-06, |
|
"loss": 0.5661, |
|
"step": 445 |
|
}, |
|
{ |
|
"epoch": 0.8057813911472448, |
|
"grad_norm": 0.22751473565597174, |
|
"learning_rate": 1.0083234681364934e-06, |
|
"loss": 0.5636, |
|
"step": 446 |
|
}, |
|
{ |
|
"epoch": 0.8075880758807588, |
|
"grad_norm": 0.1866035783785984, |
|
"learning_rate": 9.903768657908803e-07, |
|
"loss": 0.5685, |
|
"step": 447 |
|
}, |
|
{ |
|
"epoch": 0.8093947606142728, |
|
"grad_norm": 0.18025173354051788, |
|
"learning_rate": 9.725738399569968e-07, |
|
"loss": 0.571, |
|
"step": 448 |
|
}, |
|
{ |
|
"epoch": 0.8112014453477868, |
|
"grad_norm": 0.25843108031766465, |
|
"learning_rate": 9.549150281252633e-07, |
|
"loss": 0.5662, |
|
"step": 449 |
|
}, |
|
{ |
|
"epoch": 0.8130081300813008, |
|
"grad_norm": 0.1889222566305079, |
|
"learning_rate": 9.374010626220908e-07, |
|
"loss": 0.5767, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 0.8148148148148148, |
|
"grad_norm": 0.2079298160547098, |
|
"learning_rate": 9.200325705872342e-07, |
|
"loss": 0.5782, |
|
"step": 451 |
|
}, |
|
{ |
|
"epoch": 0.8166214995483289, |
|
"grad_norm": 0.19872345058724483, |
|
"learning_rate": 9.028101739513406e-07, |
|
"loss": 0.5743, |
|
"step": 452 |
|
}, |
|
{ |
|
"epoch": 0.8184281842818428, |
|
"grad_norm": 0.18984627720548802, |
|
"learning_rate": 8.857344894136715e-07, |
|
"loss": 0.5663, |
|
"step": 453 |
|
}, |
|
{ |
|
"epoch": 0.8202348690153568, |
|
"grad_norm": 0.2171863569377669, |
|
"learning_rate": 8.688061284200266e-07, |
|
"loss": 0.551, |
|
"step": 454 |
|
}, |
|
{ |
|
"epoch": 0.8220415537488708, |
|
"grad_norm": 0.31205401342756556, |
|
"learning_rate": 8.520256971408453e-07, |
|
"loss": 0.5745, |
|
"step": 455 |
|
}, |
|
{ |
|
"epoch": 0.8238482384823849, |
|
"grad_norm": 0.19197282383619316, |
|
"learning_rate": 8.353937964495029e-07, |
|
"loss": 0.5666, |
|
"step": 456 |
|
}, |
|
{ |
|
"epoch": 0.8256549232158988, |
|
"grad_norm": 0.18978462666210455, |
|
"learning_rate": 8.189110219007967e-07, |
|
"loss": 0.5807, |
|
"step": 457 |
|
}, |
|
{ |
|
"epoch": 0.8274616079494128, |
|
"grad_norm": 0.1881230488368621, |
|
"learning_rate": 8.025779637096138e-07, |
|
"loss": 0.5731, |
|
"step": 458 |
|
}, |
|
{ |
|
"epoch": 0.8292682926829268, |
|
"grad_norm": 0.18499416483139547, |
|
"learning_rate": 7.863952067298042e-07, |
|
"loss": 0.5629, |
|
"step": 459 |
|
}, |
|
{ |
|
"epoch": 0.8310749774164409, |
|
"grad_norm": 0.1956610098209481, |
|
"learning_rate": 7.70363330433233e-07, |
|
"loss": 0.5719, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 0.8328816621499548, |
|
"grad_norm": 0.19237490479516348, |
|
"learning_rate": 7.544829088890326e-07, |
|
"loss": 0.56, |
|
"step": 461 |
|
}, |
|
{ |
|
"epoch": 0.8346883468834688, |
|
"grad_norm": 0.18443561080860438, |
|
"learning_rate": 7.387545107430455e-07, |
|
"loss": 0.568, |
|
"step": 462 |
|
}, |
|
{ |
|
"epoch": 0.8364950316169828, |
|
"grad_norm": 0.1787709060352765, |
|
"learning_rate": 7.23178699197467e-07, |
|
"loss": 0.5723, |
|
"step": 463 |
|
}, |
|
{ |
|
"epoch": 0.8383017163504969, |
|
"grad_norm": 0.19805029001335558, |
|
"learning_rate": 7.077560319906696e-07, |
|
"loss": 0.5747, |
|
"step": 464 |
|
}, |
|
{ |
|
"epoch": 0.8401084010840109, |
|
"grad_norm": 0.20031602040796653, |
|
"learning_rate": 6.924870613772388e-07, |
|
"loss": 0.5841, |
|
"step": 465 |
|
}, |
|
{ |
|
"epoch": 0.8419150858175248, |
|
"grad_norm": 0.19620408006336973, |
|
"learning_rate": 6.773723341081945e-07, |
|
"loss": 0.5676, |
|
"step": 466 |
|
}, |
|
{ |
|
"epoch": 0.8437217705510388, |
|
"grad_norm": 0.18466300633175925, |
|
"learning_rate": 6.624123914114122e-07, |
|
"loss": 0.5643, |
|
"step": 467 |
|
}, |
|
{ |
|
"epoch": 0.8455284552845529, |
|
"grad_norm": 0.18010825886215534, |
|
"learning_rate": 6.476077689722487e-07, |
|
"loss": 0.575, |
|
"step": 468 |
|
}, |
|
{ |
|
"epoch": 0.8473351400180669, |
|
"grad_norm": 0.19390872239198673, |
|
"learning_rate": 6.329589969143518e-07, |
|
"loss": 0.564, |
|
"step": 469 |
|
}, |
|
{ |
|
"epoch": 0.8491418247515808, |
|
"grad_norm": 0.2016995782903763, |
|
"learning_rate": 6.184665997806832e-07, |
|
"loss": 0.5596, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 0.8509485094850948, |
|
"grad_norm": 0.20130261573098188, |
|
"learning_rate": 6.041310965147318e-07, |
|
"loss": 0.5892, |
|
"step": 471 |
|
}, |
|
{ |
|
"epoch": 0.8527551942186089, |
|
"grad_norm": 0.1889905749156045, |
|
"learning_rate": 5.899530004419396e-07, |
|
"loss": 0.5605, |
|
"step": 472 |
|
}, |
|
{ |
|
"epoch": 0.8545618789521229, |
|
"grad_norm": 0.18839588656086062, |
|
"learning_rate": 5.759328192513075e-07, |
|
"loss": 0.5794, |
|
"step": 473 |
|
}, |
|
{ |
|
"epoch": 0.8563685636856369, |
|
"grad_norm": 0.18265997264247555, |
|
"learning_rate": 5.620710549772295e-07, |
|
"loss": 0.5749, |
|
"step": 474 |
|
}, |
|
{ |
|
"epoch": 0.8581752484191508, |
|
"grad_norm": 0.2902211914572893, |
|
"learning_rate": 5.483682039815059e-07, |
|
"loss": 0.5646, |
|
"step": 475 |
|
}, |
|
{ |
|
"epoch": 0.8599819331526649, |
|
"grad_norm": 0.1886242353787816, |
|
"learning_rate": 5.348247569355736e-07, |
|
"loss": 0.5715, |
|
"step": 476 |
|
}, |
|
{ |
|
"epoch": 0.8617886178861789, |
|
"grad_norm": 0.17985820815334427, |
|
"learning_rate": 5.214411988029355e-07, |
|
"loss": 0.5683, |
|
"step": 477 |
|
}, |
|
{ |
|
"epoch": 0.8635953026196929, |
|
"grad_norm": 0.1932334144030631, |
|
"learning_rate": 5.082180088217981e-07, |
|
"loss": 0.5793, |
|
"step": 478 |
|
}, |
|
{ |
|
"epoch": 0.8654019873532068, |
|
"grad_norm": 0.19846804700374185, |
|
"learning_rate": 4.951556604879049e-07, |
|
"loss": 0.5656, |
|
"step": 479 |
|
}, |
|
{ |
|
"epoch": 0.8672086720867209, |
|
"grad_norm": 0.19097544246883594, |
|
"learning_rate": 4.822546215375851e-07, |
|
"loss": 0.5637, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 0.8690153568202349, |
|
"grad_norm": 0.20251701539113373, |
|
"learning_rate": 4.6951535393100654e-07, |
|
"loss": 0.5738, |
|
"step": 481 |
|
}, |
|
{ |
|
"epoch": 0.8708220415537489, |
|
"grad_norm": 0.18387137241320917, |
|
"learning_rate": 4.569383138356276e-07, |
|
"loss": 0.5779, |
|
"step": 482 |
|
}, |
|
{ |
|
"epoch": 0.8726287262872628, |
|
"grad_norm": 0.18016240078214524, |
|
"learning_rate": 4.4452395160987314e-07, |
|
"loss": 0.5626, |
|
"step": 483 |
|
}, |
|
{ |
|
"epoch": 0.8744354110207768, |
|
"grad_norm": 0.18401325938899177, |
|
"learning_rate": 4.322727117869951e-07, |
|
"loss": 0.5596, |
|
"step": 484 |
|
}, |
|
{ |
|
"epoch": 0.8762420957542909, |
|
"grad_norm": 0.1964814901834314, |
|
"learning_rate": 4.201850330591678e-07, |
|
"loss": 0.5693, |
|
"step": 485 |
|
}, |
|
{ |
|
"epoch": 0.8780487804878049, |
|
"grad_norm": 0.20214900977512928, |
|
"learning_rate": 4.082613482617664e-07, |
|
"loss": 0.5757, |
|
"step": 486 |
|
}, |
|
{ |
|
"epoch": 0.8798554652213189, |
|
"grad_norm": 0.18568992727418057, |
|
"learning_rate": 3.965020843578804e-07, |
|
"loss": 0.5632, |
|
"step": 487 |
|
}, |
|
{ |
|
"epoch": 0.8816621499548328, |
|
"grad_norm": 0.2770002675977485, |
|
"learning_rate": 3.8490766242301356e-07, |
|
"loss": 0.5626, |
|
"step": 488 |
|
}, |
|
{ |
|
"epoch": 0.8834688346883469, |
|
"grad_norm": 0.1824458528913731, |
|
"learning_rate": 3.734784976300165e-07, |
|
"loss": 0.5717, |
|
"step": 489 |
|
}, |
|
{ |
|
"epoch": 0.8852755194218609, |
|
"grad_norm": 0.19117508774443912, |
|
"learning_rate": 3.6221499923421164e-07, |
|
"loss": 0.5687, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 0.8870822041553749, |
|
"grad_norm": 0.18365017893423594, |
|
"learning_rate": 3.511175705587433e-07, |
|
"loss": 0.5604, |
|
"step": 491 |
|
}, |
|
{ |
|
"epoch": 0.8888888888888888, |
|
"grad_norm": 0.18741146999424094, |
|
"learning_rate": 3.4018660898013423e-07, |
|
"loss": 0.5619, |
|
"step": 492 |
|
}, |
|
{ |
|
"epoch": 0.8906955736224029, |
|
"grad_norm": 0.18938927116387327, |
|
"learning_rate": 3.2942250591405546e-07, |
|
"loss": 0.5562, |
|
"step": 493 |
|
}, |
|
{ |
|
"epoch": 0.8925022583559169, |
|
"grad_norm": 0.18530272323579683, |
|
"learning_rate": 3.18825646801314e-07, |
|
"loss": 0.5736, |
|
"step": 494 |
|
}, |
|
{ |
|
"epoch": 0.8943089430894309, |
|
"grad_norm": 0.18343515766026144, |
|
"learning_rate": 3.0839641109404627e-07, |
|
"loss": 0.5703, |
|
"step": 495 |
|
}, |
|
{ |
|
"epoch": 0.8961156278229448, |
|
"grad_norm": 0.19798495531064736, |
|
"learning_rate": 2.9813517224213274e-07, |
|
"loss": 0.5827, |
|
"step": 496 |
|
}, |
|
{ |
|
"epoch": 0.8979223125564589, |
|
"grad_norm": 0.1866740069336857, |
|
"learning_rate": 2.8804229767982637e-07, |
|
"loss": 0.5699, |
|
"step": 497 |
|
}, |
|
{ |
|
"epoch": 0.8997289972899729, |
|
"grad_norm": 0.19720117374324211, |
|
"learning_rate": 2.7811814881259503e-07, |
|
"loss": 0.5692, |
|
"step": 498 |
|
}, |
|
{ |
|
"epoch": 0.9015356820234869, |
|
"grad_norm": 0.19877461850217384, |
|
"learning_rate": 2.6836308100417874e-07, |
|
"loss": 0.5787, |
|
"step": 499 |
|
}, |
|
{ |
|
"epoch": 0.9033423667570009, |
|
"grad_norm": 0.18169475905608862, |
|
"learning_rate": 2.587774435638679e-07, |
|
"loss": 0.5767, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.9051490514905149, |
|
"grad_norm": 0.17907733346278654, |
|
"learning_rate": 2.4936157973399266e-07, |
|
"loss": 0.5747, |
|
"step": 501 |
|
}, |
|
{ |
|
"epoch": 0.9069557362240289, |
|
"grad_norm": 0.1893802003889699, |
|
"learning_rate": 2.40115826677631e-07, |
|
"loss": 0.5697, |
|
"step": 502 |
|
}, |
|
{ |
|
"epoch": 0.9087624209575429, |
|
"grad_norm": 0.179086296505346, |
|
"learning_rate": 2.3104051546654016e-07, |
|
"loss": 0.5657, |
|
"step": 503 |
|
}, |
|
{ |
|
"epoch": 0.9105691056910569, |
|
"grad_norm": 0.1743285301118043, |
|
"learning_rate": 2.2213597106929608e-07, |
|
"loss": 0.5811, |
|
"step": 504 |
|
}, |
|
{ |
|
"epoch": 0.912375790424571, |
|
"grad_norm": 0.18881072074302382, |
|
"learning_rate": 2.134025123396638e-07, |
|
"loss": 0.5525, |
|
"step": 505 |
|
}, |
|
{ |
|
"epoch": 0.9141824751580849, |
|
"grad_norm": 0.1739559721154075, |
|
"learning_rate": 2.0484045200517222e-07, |
|
"loss": 0.573, |
|
"step": 506 |
|
}, |
|
{ |
|
"epoch": 0.9159891598915989, |
|
"grad_norm": 0.19468990027144603, |
|
"learning_rate": 1.9645009665592073e-07, |
|
"loss": 0.5718, |
|
"step": 507 |
|
}, |
|
{ |
|
"epoch": 0.9177958446251129, |
|
"grad_norm": 0.18114240404038176, |
|
"learning_rate": 1.882317467335998e-07, |
|
"loss": 0.5642, |
|
"step": 508 |
|
}, |
|
{ |
|
"epoch": 0.919602529358627, |
|
"grad_norm": 0.20265384494591884, |
|
"learning_rate": 1.801856965207338e-07, |
|
"loss": 0.5838, |
|
"step": 509 |
|
}, |
|
{ |
|
"epoch": 0.9214092140921409, |
|
"grad_norm": 0.19308320011002092, |
|
"learning_rate": 1.7231223413014086e-07, |
|
"loss": 0.5669, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 0.9232158988256549, |
|
"grad_norm": 0.17837143049403117, |
|
"learning_rate": 1.6461164149461805e-07, |
|
"loss": 0.5571, |
|
"step": 511 |
|
}, |
|
{ |
|
"epoch": 0.9250225835591689, |
|
"grad_norm": 0.20010809681711472, |
|
"learning_rate": 1.5708419435684463e-07, |
|
"loss": 0.5847, |
|
"step": 512 |
|
}, |
|
{ |
|
"epoch": 0.926829268292683, |
|
"grad_norm": 0.1820109727866399, |
|
"learning_rate": 1.4973016225951097e-07, |
|
"loss": 0.5667, |
|
"step": 513 |
|
}, |
|
{ |
|
"epoch": 0.928635953026197, |
|
"grad_norm": 0.18754938297151763, |
|
"learning_rate": 1.4254980853566248e-07, |
|
"loss": 0.5636, |
|
"step": 514 |
|
}, |
|
{ |
|
"epoch": 0.9304426377597109, |
|
"grad_norm": 0.189569959814434, |
|
"learning_rate": 1.3554339029927532e-07, |
|
"loss": 0.5689, |
|
"step": 515 |
|
}, |
|
{ |
|
"epoch": 0.9322493224932249, |
|
"grad_norm": 0.18079845632481395, |
|
"learning_rate": 1.2871115843604508e-07, |
|
"loss": 0.5766, |
|
"step": 516 |
|
}, |
|
{ |
|
"epoch": 0.934056007226739, |
|
"grad_norm": 0.1839280509491482, |
|
"learning_rate": 1.220533575944033e-07, |
|
"loss": 0.5646, |
|
"step": 517 |
|
}, |
|
{ |
|
"epoch": 0.935862691960253, |
|
"grad_norm": 0.18889839481265241, |
|
"learning_rate": 1.1557022617676217e-07, |
|
"loss": 0.567, |
|
"step": 518 |
|
}, |
|
{ |
|
"epoch": 0.9376693766937669, |
|
"grad_norm": 0.20003542162367108, |
|
"learning_rate": 1.0926199633097156e-07, |
|
"loss": 0.5563, |
|
"step": 519 |
|
}, |
|
{ |
|
"epoch": 0.9394760614272809, |
|
"grad_norm": 0.1873942422515769, |
|
"learning_rate": 1.031288939420122e-07, |
|
"loss": 0.566, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 0.941282746160795, |
|
"grad_norm": 0.17403941816338192, |
|
"learning_rate": 9.717113862389993e-08, |
|
"loss": 0.5733, |
|
"step": 521 |
|
}, |
|
{ |
|
"epoch": 0.943089430894309, |
|
"grad_norm": 0.18080175118175626, |
|
"learning_rate": 9.138894371182983e-08, |
|
"loss": 0.5684, |
|
"step": 522 |
|
}, |
|
{ |
|
"epoch": 0.9448961156278229, |
|
"grad_norm": 0.18769377355938535, |
|
"learning_rate": 8.57825162545295e-08, |
|
"loss": 0.5592, |
|
"step": 523 |
|
}, |
|
{ |
|
"epoch": 0.9467028003613369, |
|
"grad_norm": 0.20107798489550263, |
|
"learning_rate": 8.035205700685167e-08, |
|
"loss": 0.5699, |
|
"step": 524 |
|
}, |
|
{ |
|
"epoch": 0.948509485094851, |
|
"grad_norm": 0.18089485861437263, |
|
"learning_rate": 7.509776042258166e-08, |
|
"loss": 0.5629, |
|
"step": 525 |
|
}, |
|
{ |
|
"epoch": 0.950316169828365, |
|
"grad_norm": 0.18211106036031965, |
|
"learning_rate": 7.001981464747565e-08, |
|
"loss": 0.5677, |
|
"step": 526 |
|
}, |
|
{ |
|
"epoch": 0.952122854561879, |
|
"grad_norm": 0.19222610263862472, |
|
"learning_rate": 6.511840151252169e-08, |
|
"loss": 0.5675, |
|
"step": 527 |
|
}, |
|
{ |
|
"epoch": 0.9539295392953929, |
|
"grad_norm": 0.183560584125327, |
|
"learning_rate": 6.039369652743266e-08, |
|
"loss": 0.5647, |
|
"step": 528 |
|
}, |
|
{ |
|
"epoch": 0.955736224028907, |
|
"grad_norm": 0.1877668482685949, |
|
"learning_rate": 5.584586887435739e-08, |
|
"loss": 0.5724, |
|
"step": 529 |
|
}, |
|
{ |
|
"epoch": 0.957542908762421, |
|
"grad_norm": 0.19549336510702425, |
|
"learning_rate": 5.1475081401825553e-08, |
|
"loss": 0.5649, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 0.959349593495935, |
|
"grad_norm": 0.17632527526062533, |
|
"learning_rate": 4.7281490618914516e-08, |
|
"loss": 0.5766, |
|
"step": 531 |
|
}, |
|
{ |
|
"epoch": 0.9611562782294489, |
|
"grad_norm": 0.17696039919597048, |
|
"learning_rate": 4.32652466896466e-08, |
|
"loss": 0.5653, |
|
"step": 532 |
|
}, |
|
{ |
|
"epoch": 0.9629629629629629, |
|
"grad_norm": 0.19198931824326956, |
|
"learning_rate": 3.9426493427611177e-08, |
|
"loss": 0.5702, |
|
"step": 533 |
|
}, |
|
{ |
|
"epoch": 0.964769647696477, |
|
"grad_norm": 0.17514742553909676, |
|
"learning_rate": 3.576536829081323e-08, |
|
"loss": 0.5664, |
|
"step": 534 |
|
}, |
|
{ |
|
"epoch": 0.966576332429991, |
|
"grad_norm": 0.1761980199058517, |
|
"learning_rate": 3.2282002376756163e-08, |
|
"loss": 0.5668, |
|
"step": 535 |
|
}, |
|
{ |
|
"epoch": 0.9683830171635049, |
|
"grad_norm": 0.21019936995115918, |
|
"learning_rate": 2.8976520417742794e-08, |
|
"loss": 0.5766, |
|
"step": 536 |
|
}, |
|
{ |
|
"epoch": 0.9701897018970189, |
|
"grad_norm": 0.18037841972758342, |
|
"learning_rate": 2.584904077640893e-08, |
|
"loss": 0.5825, |
|
"step": 537 |
|
}, |
|
{ |
|
"epoch": 0.971996386630533, |
|
"grad_norm": 0.187560981572366, |
|
"learning_rate": 2.2899675441490078e-08, |
|
"loss": 0.566, |
|
"step": 538 |
|
}, |
|
{ |
|
"epoch": 0.973803071364047, |
|
"grad_norm": 0.19585848587381222, |
|
"learning_rate": 2.012853002380466e-08, |
|
"loss": 0.5628, |
|
"step": 539 |
|
}, |
|
{ |
|
"epoch": 0.975609756097561, |
|
"grad_norm": 0.18268467840158134, |
|
"learning_rate": 1.753570375247815e-08, |
|
"loss": 0.5636, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 0.9774164408310749, |
|
"grad_norm": 0.17595450161294304, |
|
"learning_rate": 1.5121289471385915e-08, |
|
"loss": 0.5725, |
|
"step": 541 |
|
}, |
|
{ |
|
"epoch": 0.979223125564589, |
|
"grad_norm": 0.1889911671042619, |
|
"learning_rate": 1.2885373635829756e-08, |
|
"loss": 0.5645, |
|
"step": 542 |
|
}, |
|
{ |
|
"epoch": 0.981029810298103, |
|
"grad_norm": 0.1741912607850618, |
|
"learning_rate": 1.0828036309443735e-08, |
|
"loss": 0.578, |
|
"step": 543 |
|
}, |
|
{ |
|
"epoch": 0.982836495031617, |
|
"grad_norm": 0.19355887998953347, |
|
"learning_rate": 8.949351161324227e-09, |
|
"loss": 0.5773, |
|
"step": 544 |
|
}, |
|
{ |
|
"epoch": 0.9846431797651309, |
|
"grad_norm": 0.19335384537887662, |
|
"learning_rate": 7.249385463395375e-09, |
|
"loss": 0.5804, |
|
"step": 545 |
|
}, |
|
{ |
|
"epoch": 0.986449864498645, |
|
"grad_norm": 0.17980975980664549, |
|
"learning_rate": 5.728200087997126e-09, |
|
"loss": 0.5701, |
|
"step": 546 |
|
}, |
|
{ |
|
"epoch": 0.988256549232159, |
|
"grad_norm": 0.18389260019456813, |
|
"learning_rate": 4.385849505708084e-09, |
|
"loss": 0.5822, |
|
"step": 547 |
|
}, |
|
{ |
|
"epoch": 0.990063233965673, |
|
"grad_norm": 0.18907380134911314, |
|
"learning_rate": 3.2223817833931803e-09, |
|
"loss": 0.5788, |
|
"step": 548 |
|
}, |
|
{ |
|
"epoch": 0.991869918699187, |
|
"grad_norm": 0.18419433148589204, |
|
"learning_rate": 2.237838582483387e-09, |
|
"loss": 0.5733, |
|
"step": 549 |
|
}, |
|
{ |
|
"epoch": 0.993676603432701, |
|
"grad_norm": 0.18807839166321505, |
|
"learning_rate": 1.4322551574830202e-09, |
|
"loss": 0.5633, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 0.995483288166215, |
|
"grad_norm": 0.23737159678096686, |
|
"learning_rate": 8.056603547090813e-10, |
|
"loss": 0.5822, |
|
"step": 551 |
|
}, |
|
{ |
|
"epoch": 0.997289972899729, |
|
"grad_norm": 0.17954169667347017, |
|
"learning_rate": 3.580766112565304e-10, |
|
"loss": 0.5642, |
|
"step": 552 |
|
}, |
|
{ |
|
"epoch": 0.999096657633243, |
|
"grad_norm": 0.1838556783559033, |
|
"learning_rate": 8.951995419614889e-11, |
|
"loss": 0.568, |
|
"step": 553 |
|
} |
|
], |
|
"logging_steps": 1, |
|
"max_steps": 553, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 1, |
|
"save_steps": 100, |
|
"stateful_callbacks": { |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": true |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 641050695434240.0, |
|
"train_batch_size": 2, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|