|
{ |
|
"best_global_step": null, |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 0.7226738934056007, |
|
"eval_steps": 500, |
|
"global_step": 400, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.0018066847335140017, |
|
"grad_norm": 2.833585783869219, |
|
"learning_rate": 0.0, |
|
"loss": 0.8251, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.0036133694670280035, |
|
"grad_norm": 2.8814908232125855, |
|
"learning_rate": 1.0714285714285714e-06, |
|
"loss": 0.8284, |
|
"step": 2 |
|
}, |
|
{ |
|
"epoch": 0.005420054200542005, |
|
"grad_norm": 2.8770896332736204, |
|
"learning_rate": 2.1428571428571427e-06, |
|
"loss": 0.8426, |
|
"step": 3 |
|
}, |
|
{ |
|
"epoch": 0.007226738934056007, |
|
"grad_norm": 2.7937082319961015, |
|
"learning_rate": 3.2142857142857143e-06, |
|
"loss": 0.8378, |
|
"step": 4 |
|
}, |
|
{ |
|
"epoch": 0.009033423667570008, |
|
"grad_norm": 2.6064730264267317, |
|
"learning_rate": 4.2857142857142855e-06, |
|
"loss": 0.8226, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.01084010840108401, |
|
"grad_norm": 2.08053811254878, |
|
"learning_rate": 5.357142857142857e-06, |
|
"loss": 0.8094, |
|
"step": 6 |
|
}, |
|
{ |
|
"epoch": 0.012646793134598013, |
|
"grad_norm": 1.4474261934350556, |
|
"learning_rate": 6.428571428571429e-06, |
|
"loss": 0.7737, |
|
"step": 7 |
|
}, |
|
{ |
|
"epoch": 0.014453477868112014, |
|
"grad_norm": 1.3829973165220981, |
|
"learning_rate": 7.5e-06, |
|
"loss": 0.7732, |
|
"step": 8 |
|
}, |
|
{ |
|
"epoch": 0.016260162601626018, |
|
"grad_norm": 1.286324724661706, |
|
"learning_rate": 8.571428571428571e-06, |
|
"loss": 0.7653, |
|
"step": 9 |
|
}, |
|
{ |
|
"epoch": 0.018066847335140017, |
|
"grad_norm": 2.3802925033552484, |
|
"learning_rate": 9.642857142857144e-06, |
|
"loss": 0.7502, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.01987353206865402, |
|
"grad_norm": 2.332362112237301, |
|
"learning_rate": 1.0714285714285714e-05, |
|
"loss": 0.7352, |
|
"step": 11 |
|
}, |
|
{ |
|
"epoch": 0.02168021680216802, |
|
"grad_norm": 2.255275511725211, |
|
"learning_rate": 1.1785714285714286e-05, |
|
"loss": 0.7485, |
|
"step": 12 |
|
}, |
|
{ |
|
"epoch": 0.023486901535682024, |
|
"grad_norm": 1.862321465187453, |
|
"learning_rate": 1.2857142857142857e-05, |
|
"loss": 0.7252, |
|
"step": 13 |
|
}, |
|
{ |
|
"epoch": 0.025293586269196026, |
|
"grad_norm": 1.7503296986837806, |
|
"learning_rate": 1.3928571428571429e-05, |
|
"loss": 0.7286, |
|
"step": 14 |
|
}, |
|
{ |
|
"epoch": 0.02710027100271003, |
|
"grad_norm": 1.3910078564328057, |
|
"learning_rate": 1.5e-05, |
|
"loss": 0.7179, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.028906955736224028, |
|
"grad_norm": 1.1447088267333418, |
|
"learning_rate": 1.6071428571428572e-05, |
|
"loss": 0.7019, |
|
"step": 16 |
|
}, |
|
{ |
|
"epoch": 0.03071364046973803, |
|
"grad_norm": 0.8309495264632762, |
|
"learning_rate": 1.7142857142857142e-05, |
|
"loss": 0.6998, |
|
"step": 17 |
|
}, |
|
{ |
|
"epoch": 0.032520325203252036, |
|
"grad_norm": 0.7727537242977915, |
|
"learning_rate": 1.8214285714285712e-05, |
|
"loss": 0.6887, |
|
"step": 18 |
|
}, |
|
{ |
|
"epoch": 0.03432700993676603, |
|
"grad_norm": 0.8344364990521232, |
|
"learning_rate": 1.928571428571429e-05, |
|
"loss": 0.6834, |
|
"step": 19 |
|
}, |
|
{ |
|
"epoch": 0.036133694670280034, |
|
"grad_norm": 0.6823059422974896, |
|
"learning_rate": 2.0357142857142858e-05, |
|
"loss": 0.6597, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.037940379403794036, |
|
"grad_norm": 0.6081366218483659, |
|
"learning_rate": 2.1428571428571428e-05, |
|
"loss": 0.666, |
|
"step": 21 |
|
}, |
|
{ |
|
"epoch": 0.03974706413730804, |
|
"grad_norm": 0.5681526786044694, |
|
"learning_rate": 2.25e-05, |
|
"loss": 0.6549, |
|
"step": 22 |
|
}, |
|
{ |
|
"epoch": 0.04155374887082204, |
|
"grad_norm": 0.5770791293540624, |
|
"learning_rate": 2.357142857142857e-05, |
|
"loss": 0.6552, |
|
"step": 23 |
|
}, |
|
{ |
|
"epoch": 0.04336043360433604, |
|
"grad_norm": 0.5162292144131758, |
|
"learning_rate": 2.464285714285714e-05, |
|
"loss": 0.6604, |
|
"step": 24 |
|
}, |
|
{ |
|
"epoch": 0.045167118337850046, |
|
"grad_norm": 0.5490380070807557, |
|
"learning_rate": 2.5714285714285714e-05, |
|
"loss": 0.6421, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.04697380307136405, |
|
"grad_norm": 0.4844741148097692, |
|
"learning_rate": 2.6785714285714288e-05, |
|
"loss": 0.6566, |
|
"step": 26 |
|
}, |
|
{ |
|
"epoch": 0.04878048780487805, |
|
"grad_norm": 0.4632816755153851, |
|
"learning_rate": 2.7857142857142858e-05, |
|
"loss": 0.6346, |
|
"step": 27 |
|
}, |
|
{ |
|
"epoch": 0.05058717253839205, |
|
"grad_norm": 0.47413499783511553, |
|
"learning_rate": 2.892857142857143e-05, |
|
"loss": 0.6372, |
|
"step": 28 |
|
}, |
|
{ |
|
"epoch": 0.052393857271906055, |
|
"grad_norm": 0.46848340364244656, |
|
"learning_rate": 3e-05, |
|
"loss": 0.6485, |
|
"step": 29 |
|
}, |
|
{ |
|
"epoch": 0.05420054200542006, |
|
"grad_norm": 0.4249862814563853, |
|
"learning_rate": 2.9999731440137413e-05, |
|
"loss": 0.649, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.05600722673893405, |
|
"grad_norm": 0.3741955197649565, |
|
"learning_rate": 2.9998925770166232e-05, |
|
"loss": 0.6379, |
|
"step": 31 |
|
}, |
|
{ |
|
"epoch": 0.057813911472448055, |
|
"grad_norm": 0.3881925801129848, |
|
"learning_rate": 2.9997583018935875e-05, |
|
"loss": 0.6262, |
|
"step": 32 |
|
}, |
|
{ |
|
"epoch": 0.05962059620596206, |
|
"grad_norm": 0.3922238715881848, |
|
"learning_rate": 2.9995703234527553e-05, |
|
"loss": 0.6364, |
|
"step": 33 |
|
}, |
|
{ |
|
"epoch": 0.06142728093947606, |
|
"grad_norm": 0.33920092930264795, |
|
"learning_rate": 2.999328648425255e-05, |
|
"loss": 0.6233, |
|
"step": 34 |
|
}, |
|
{ |
|
"epoch": 0.06323396567299007, |
|
"grad_norm": 0.38360312337448227, |
|
"learning_rate": 2.999033285464982e-05, |
|
"loss": 0.6291, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.06504065040650407, |
|
"grad_norm": 0.37438403307661106, |
|
"learning_rate": 2.9986842451482876e-05, |
|
"loss": 0.6248, |
|
"step": 36 |
|
}, |
|
{ |
|
"epoch": 0.06684733514001806, |
|
"grad_norm": 0.3660585694043342, |
|
"learning_rate": 2.9982815399736008e-05, |
|
"loss": 0.6297, |
|
"step": 37 |
|
}, |
|
{ |
|
"epoch": 0.06865401987353206, |
|
"grad_norm": 0.3536480766826361, |
|
"learning_rate": 2.9978251843609816e-05, |
|
"loss": 0.6221, |
|
"step": 38 |
|
}, |
|
{ |
|
"epoch": 0.07046070460704607, |
|
"grad_norm": 0.4002479177979865, |
|
"learning_rate": 2.9973151946516027e-05, |
|
"loss": 0.6245, |
|
"step": 39 |
|
}, |
|
{ |
|
"epoch": 0.07226738934056007, |
|
"grad_norm": 0.3063169356732776, |
|
"learning_rate": 2.996751589107167e-05, |
|
"loss": 0.6123, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.07407407407407407, |
|
"grad_norm": 0.35680447479317984, |
|
"learning_rate": 2.9961343879092512e-05, |
|
"loss": 0.6186, |
|
"step": 41 |
|
}, |
|
{ |
|
"epoch": 0.07588075880758807, |
|
"grad_norm": 0.29028093811805317, |
|
"learning_rate": 2.9954636131585845e-05, |
|
"loss": 0.6117, |
|
"step": 42 |
|
}, |
|
{ |
|
"epoch": 0.07768744354110207, |
|
"grad_norm": 0.327087930172019, |
|
"learning_rate": 2.9947392888742566e-05, |
|
"loss": 0.6116, |
|
"step": 43 |
|
}, |
|
{ |
|
"epoch": 0.07949412827461608, |
|
"grad_norm": 0.30051645313739944, |
|
"learning_rate": 2.993961440992859e-05, |
|
"loss": 0.6057, |
|
"step": 44 |
|
}, |
|
{ |
|
"epoch": 0.08130081300813008, |
|
"grad_norm": 0.32276337072694117, |
|
"learning_rate": 2.993130097367553e-05, |
|
"loss": 0.6129, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.08310749774164408, |
|
"grad_norm": 0.29149163779977444, |
|
"learning_rate": 2.9922452877670775e-05, |
|
"loss": 0.6145, |
|
"step": 46 |
|
}, |
|
{ |
|
"epoch": 0.08491418247515808, |
|
"grad_norm": 0.2848254537728177, |
|
"learning_rate": 2.991307043874677e-05, |
|
"loss": 0.6171, |
|
"step": 47 |
|
}, |
|
{ |
|
"epoch": 0.08672086720867209, |
|
"grad_norm": 0.2736548047269907, |
|
"learning_rate": 2.9903153992869734e-05, |
|
"loss": 0.6033, |
|
"step": 48 |
|
}, |
|
{ |
|
"epoch": 0.08852755194218609, |
|
"grad_norm": 0.3041549583089305, |
|
"learning_rate": 2.989270389512756e-05, |
|
"loss": 0.6108, |
|
"step": 49 |
|
}, |
|
{ |
|
"epoch": 0.09033423667570009, |
|
"grad_norm": 0.28826353495304036, |
|
"learning_rate": 2.988172051971717e-05, |
|
"loss": 0.6101, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.0921409214092141, |
|
"grad_norm": 0.28025059682410447, |
|
"learning_rate": 2.9870204259931062e-05, |
|
"loss": 0.6, |
|
"step": 51 |
|
}, |
|
{ |
|
"epoch": 0.0939476061427281, |
|
"grad_norm": 0.27249462262783575, |
|
"learning_rate": 2.9858155528143256e-05, |
|
"loss": 0.617, |
|
"step": 52 |
|
}, |
|
{ |
|
"epoch": 0.0957542908762421, |
|
"grad_norm": 0.3172471705535385, |
|
"learning_rate": 2.9845574755794522e-05, |
|
"loss": 0.6026, |
|
"step": 53 |
|
}, |
|
{ |
|
"epoch": 0.0975609756097561, |
|
"grad_norm": 0.24824194798422894, |
|
"learning_rate": 2.9832462393376926e-05, |
|
"loss": 0.5921, |
|
"step": 54 |
|
}, |
|
{ |
|
"epoch": 0.0993676603432701, |
|
"grad_norm": 0.3434388403796833, |
|
"learning_rate": 2.9818818910417706e-05, |
|
"loss": 0.6079, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.1011743450767841, |
|
"grad_norm": 0.30129494072287116, |
|
"learning_rate": 2.9804644795462437e-05, |
|
"loss": 0.6121, |
|
"step": 56 |
|
}, |
|
{ |
|
"epoch": 0.10298102981029811, |
|
"grad_norm": 0.29313587299072186, |
|
"learning_rate": 2.9789940556057574e-05, |
|
"loss": 0.5974, |
|
"step": 57 |
|
}, |
|
{ |
|
"epoch": 0.10478771454381211, |
|
"grad_norm": 0.32928460639595075, |
|
"learning_rate": 2.9774706718732255e-05, |
|
"loss": 0.6061, |
|
"step": 58 |
|
}, |
|
{ |
|
"epoch": 0.10659439927732611, |
|
"grad_norm": 0.2680249584703676, |
|
"learning_rate": 2.9758943828979444e-05, |
|
"loss": 0.592, |
|
"step": 59 |
|
}, |
|
{ |
|
"epoch": 0.10840108401084012, |
|
"grad_norm": 0.34348625850914644, |
|
"learning_rate": 2.9742652451236414e-05, |
|
"loss": 0.5934, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.1102077687443541, |
|
"grad_norm": 0.28452776028596916, |
|
"learning_rate": 2.972583316886451e-05, |
|
"loss": 0.597, |
|
"step": 61 |
|
}, |
|
{ |
|
"epoch": 0.1120144534778681, |
|
"grad_norm": 0.3322182395606521, |
|
"learning_rate": 2.9708486584128303e-05, |
|
"loss": 0.6018, |
|
"step": 62 |
|
}, |
|
{ |
|
"epoch": 0.11382113821138211, |
|
"grad_norm": 0.30021383036006005, |
|
"learning_rate": 2.9690613318173966e-05, |
|
"loss": 0.6017, |
|
"step": 63 |
|
}, |
|
{ |
|
"epoch": 0.11562782294489611, |
|
"grad_norm": 0.276650237644649, |
|
"learning_rate": 2.9672214011007087e-05, |
|
"loss": 0.6001, |
|
"step": 64 |
|
}, |
|
{ |
|
"epoch": 0.11743450767841011, |
|
"grad_norm": 0.5862319982197399, |
|
"learning_rate": 2.9653289321469715e-05, |
|
"loss": 0.5941, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.11924119241192412, |
|
"grad_norm": 0.26853876464603094, |
|
"learning_rate": 2.9633839927216793e-05, |
|
"loss": 0.5971, |
|
"step": 66 |
|
}, |
|
{ |
|
"epoch": 0.12104787714543812, |
|
"grad_norm": 0.2829683425685341, |
|
"learning_rate": 2.9613866524691867e-05, |
|
"loss": 0.5874, |
|
"step": 67 |
|
}, |
|
{ |
|
"epoch": 0.12285456187895212, |
|
"grad_norm": 0.2819305232851346, |
|
"learning_rate": 2.9593369829102173e-05, |
|
"loss": 0.6031, |
|
"step": 68 |
|
}, |
|
{ |
|
"epoch": 0.12466124661246612, |
|
"grad_norm": 0.27702734164669873, |
|
"learning_rate": 2.957235057439301e-05, |
|
"loss": 0.6047, |
|
"step": 69 |
|
}, |
|
{ |
|
"epoch": 0.12646793134598014, |
|
"grad_norm": 0.29063816468584863, |
|
"learning_rate": 2.955080951322147e-05, |
|
"loss": 0.6089, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.12827461607949414, |
|
"grad_norm": 0.3063448907538783, |
|
"learning_rate": 2.9528747416929467e-05, |
|
"loss": 0.582, |
|
"step": 71 |
|
}, |
|
{ |
|
"epoch": 0.13008130081300814, |
|
"grad_norm": 0.2996205495531954, |
|
"learning_rate": 2.9506165075516148e-05, |
|
"loss": 0.5989, |
|
"step": 72 |
|
}, |
|
{ |
|
"epoch": 0.13188798554652212, |
|
"grad_norm": 0.3741556351883678, |
|
"learning_rate": 2.9483063297609577e-05, |
|
"loss": 0.5978, |
|
"step": 73 |
|
}, |
|
{ |
|
"epoch": 0.13369467028003612, |
|
"grad_norm": 0.2987012098136024, |
|
"learning_rate": 2.9459442910437798e-05, |
|
"loss": 0.5894, |
|
"step": 74 |
|
}, |
|
{ |
|
"epoch": 0.13550135501355012, |
|
"grad_norm": 0.33060763601185644, |
|
"learning_rate": 2.94353047597992e-05, |
|
"loss": 0.5888, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.13730803974706413, |
|
"grad_norm": 0.2858723573255857, |
|
"learning_rate": 2.941064971003224e-05, |
|
"loss": 0.5843, |
|
"step": 76 |
|
}, |
|
{ |
|
"epoch": 0.13911472448057813, |
|
"grad_norm": 0.30513180176369226, |
|
"learning_rate": 2.9385478643984484e-05, |
|
"loss": 0.58, |
|
"step": 77 |
|
}, |
|
{ |
|
"epoch": 0.14092140921409213, |
|
"grad_norm": 0.3480721530294818, |
|
"learning_rate": 2.9359792462981007e-05, |
|
"loss": 0.5909, |
|
"step": 78 |
|
}, |
|
{ |
|
"epoch": 0.14272809394760613, |
|
"grad_norm": 0.3098785903465462, |
|
"learning_rate": 2.9333592086792113e-05, |
|
"loss": 0.5987, |
|
"step": 79 |
|
}, |
|
{ |
|
"epoch": 0.14453477868112014, |
|
"grad_norm": 0.32360139759712253, |
|
"learning_rate": 2.9306878453600382e-05, |
|
"loss": 0.6078, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.14634146341463414, |
|
"grad_norm": 0.31681100648297805, |
|
"learning_rate": 2.9279652519967105e-05, |
|
"loss": 0.5928, |
|
"step": 81 |
|
}, |
|
{ |
|
"epoch": 0.14814814814814814, |
|
"grad_norm": 0.33637175342033093, |
|
"learning_rate": 2.9251915260798024e-05, |
|
"loss": 0.5851, |
|
"step": 82 |
|
}, |
|
{ |
|
"epoch": 0.14995483288166214, |
|
"grad_norm": 0.32126065455334785, |
|
"learning_rate": 2.9223667669308395e-05, |
|
"loss": 0.595, |
|
"step": 83 |
|
}, |
|
{ |
|
"epoch": 0.15176151761517614, |
|
"grad_norm": 0.28162115413110317, |
|
"learning_rate": 2.9194910756987464e-05, |
|
"loss": 0.5886, |
|
"step": 84 |
|
}, |
|
{ |
|
"epoch": 0.15356820234869015, |
|
"grad_norm": 0.2927832038468001, |
|
"learning_rate": 2.9165645553562215e-05, |
|
"loss": 0.5919, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.15537488708220415, |
|
"grad_norm": 0.2842368528997179, |
|
"learning_rate": 2.9135873106960525e-05, |
|
"loss": 0.5907, |
|
"step": 86 |
|
}, |
|
{ |
|
"epoch": 0.15718157181571815, |
|
"grad_norm": 0.3044892933421028, |
|
"learning_rate": 2.9105594483273603e-05, |
|
"loss": 0.5932, |
|
"step": 87 |
|
}, |
|
{ |
|
"epoch": 0.15898825654923215, |
|
"grad_norm": 0.2787616554815814, |
|
"learning_rate": 2.9074810766717865e-05, |
|
"loss": 0.5972, |
|
"step": 88 |
|
}, |
|
{ |
|
"epoch": 0.16079494128274616, |
|
"grad_norm": 0.31536931569551113, |
|
"learning_rate": 2.904352305959606e-05, |
|
"loss": 0.5941, |
|
"step": 89 |
|
}, |
|
{ |
|
"epoch": 0.16260162601626016, |
|
"grad_norm": 0.2812317162219802, |
|
"learning_rate": 2.9011732482257835e-05, |
|
"loss": 0.5858, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.16440831074977416, |
|
"grad_norm": 0.3243626162570077, |
|
"learning_rate": 2.89794401730596e-05, |
|
"loss": 0.5839, |
|
"step": 91 |
|
}, |
|
{ |
|
"epoch": 0.16621499548328816, |
|
"grad_norm": 0.31149750778219104, |
|
"learning_rate": 2.894664728832377e-05, |
|
"loss": 0.5879, |
|
"step": 92 |
|
}, |
|
{ |
|
"epoch": 0.16802168021680217, |
|
"grad_norm": 0.2799988189260621, |
|
"learning_rate": 2.8913355002297367e-05, |
|
"loss": 0.5939, |
|
"step": 93 |
|
}, |
|
{ |
|
"epoch": 0.16982836495031617, |
|
"grad_norm": 0.3659704073921071, |
|
"learning_rate": 2.887956450710995e-05, |
|
"loss": 0.594, |
|
"step": 94 |
|
}, |
|
{ |
|
"epoch": 0.17163504968383017, |
|
"grad_norm": 0.3155038381999107, |
|
"learning_rate": 2.8845277012730963e-05, |
|
"loss": 0.5857, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.17344173441734417, |
|
"grad_norm": 0.28670310893186607, |
|
"learning_rate": 2.8810493746926364e-05, |
|
"loss": 0.5854, |
|
"step": 96 |
|
}, |
|
{ |
|
"epoch": 0.17524841915085818, |
|
"grad_norm": 0.3027782704454321, |
|
"learning_rate": 2.87752159552147e-05, |
|
"loss": 0.5913, |
|
"step": 97 |
|
}, |
|
{ |
|
"epoch": 0.17705510388437218, |
|
"grad_norm": 0.29488509286302705, |
|
"learning_rate": 2.87394449008225e-05, |
|
"loss": 0.5842, |
|
"step": 98 |
|
}, |
|
{ |
|
"epoch": 0.17886178861788618, |
|
"grad_norm": 0.3023043372178035, |
|
"learning_rate": 2.8703181864639013e-05, |
|
"loss": 0.5971, |
|
"step": 99 |
|
}, |
|
{ |
|
"epoch": 0.18066847335140018, |
|
"grad_norm": 0.2920343922477137, |
|
"learning_rate": 2.8666428145170385e-05, |
|
"loss": 0.5703, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.18247515808491419, |
|
"grad_norm": 0.26724172739119173, |
|
"learning_rate": 2.8629185058493116e-05, |
|
"loss": 0.5864, |
|
"step": 101 |
|
}, |
|
{ |
|
"epoch": 0.1842818428184282, |
|
"grad_norm": 0.34770771323943805, |
|
"learning_rate": 2.8591453938206985e-05, |
|
"loss": 0.6044, |
|
"step": 102 |
|
}, |
|
{ |
|
"epoch": 0.1860885275519422, |
|
"grad_norm": 0.273213628338846, |
|
"learning_rate": 2.8553236135387247e-05, |
|
"loss": 0.5831, |
|
"step": 103 |
|
}, |
|
{ |
|
"epoch": 0.1878952122854562, |
|
"grad_norm": 0.34065241651987704, |
|
"learning_rate": 2.8514533018536286e-05, |
|
"loss": 0.5812, |
|
"step": 104 |
|
}, |
|
{ |
|
"epoch": 0.1897018970189702, |
|
"grad_norm": 0.28586917405518736, |
|
"learning_rate": 2.8475345973534605e-05, |
|
"loss": 0.5734, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 0.1915085817524842, |
|
"grad_norm": 0.2702413811100727, |
|
"learning_rate": 2.8435676403591193e-05, |
|
"loss": 0.5764, |
|
"step": 106 |
|
}, |
|
{ |
|
"epoch": 0.1933152664859982, |
|
"grad_norm": 0.28403756910607586, |
|
"learning_rate": 2.8395525729193284e-05, |
|
"loss": 0.5826, |
|
"step": 107 |
|
}, |
|
{ |
|
"epoch": 0.1951219512195122, |
|
"grad_norm": 0.2578481483409949, |
|
"learning_rate": 2.835489538805548e-05, |
|
"loss": 0.5803, |
|
"step": 108 |
|
}, |
|
{ |
|
"epoch": 0.1969286359530262, |
|
"grad_norm": 0.2757886473056093, |
|
"learning_rate": 2.8313786835068314e-05, |
|
"loss": 0.5893, |
|
"step": 109 |
|
}, |
|
{ |
|
"epoch": 0.1987353206865402, |
|
"grad_norm": 0.28782114342933773, |
|
"learning_rate": 2.8272201542246077e-05, |
|
"loss": 0.571, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.2005420054200542, |
|
"grad_norm": 0.28817871732654127, |
|
"learning_rate": 2.8230140998674185e-05, |
|
"loss": 0.5907, |
|
"step": 111 |
|
}, |
|
{ |
|
"epoch": 0.2023486901535682, |
|
"grad_norm": 0.27487749659574895, |
|
"learning_rate": 2.8187606710455807e-05, |
|
"loss": 0.584, |
|
"step": 112 |
|
}, |
|
{ |
|
"epoch": 0.2041553748870822, |
|
"grad_norm": 0.29745293014063967, |
|
"learning_rate": 2.8144600200657953e-05, |
|
"loss": 0.5881, |
|
"step": 113 |
|
}, |
|
{ |
|
"epoch": 0.20596205962059622, |
|
"grad_norm": 0.30986472376082147, |
|
"learning_rate": 2.8101123009256946e-05, |
|
"loss": 0.5713, |
|
"step": 114 |
|
}, |
|
{ |
|
"epoch": 0.20776874435411022, |
|
"grad_norm": 0.258458540249251, |
|
"learning_rate": 2.8057176693083253e-05, |
|
"loss": 0.5869, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 0.20957542908762422, |
|
"grad_norm": 0.31218243071865426, |
|
"learning_rate": 2.8012762825765763e-05, |
|
"loss": 0.5808, |
|
"step": 116 |
|
}, |
|
{ |
|
"epoch": 0.21138211382113822, |
|
"grad_norm": 0.3015978973078328, |
|
"learning_rate": 2.7967882997675424e-05, |
|
"loss": 0.5921, |
|
"step": 117 |
|
}, |
|
{ |
|
"epoch": 0.21318879855465223, |
|
"grad_norm": 0.2906725555402426, |
|
"learning_rate": 2.7922538815868287e-05, |
|
"loss": 0.5793, |
|
"step": 118 |
|
}, |
|
{ |
|
"epoch": 0.21499548328816623, |
|
"grad_norm": 0.2686708606832387, |
|
"learning_rate": 2.7876731904027994e-05, |
|
"loss": 0.5696, |
|
"step": 119 |
|
}, |
|
{ |
|
"epoch": 0.21680216802168023, |
|
"grad_norm": 0.306920182031447, |
|
"learning_rate": 2.78304639024076e-05, |
|
"loss": 0.5886, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.2186088527551942, |
|
"grad_norm": 0.2675555748745438, |
|
"learning_rate": 2.7783736467770863e-05, |
|
"loss": 0.5841, |
|
"step": 121 |
|
}, |
|
{ |
|
"epoch": 0.2204155374887082, |
|
"grad_norm": 0.2961259999734875, |
|
"learning_rate": 2.7736551273332908e-05, |
|
"loss": 0.5888, |
|
"step": 122 |
|
}, |
|
{ |
|
"epoch": 0.2222222222222222, |
|
"grad_norm": 0.2979054845453111, |
|
"learning_rate": 2.7688910008700305e-05, |
|
"loss": 0.5792, |
|
"step": 123 |
|
}, |
|
{ |
|
"epoch": 0.2240289069557362, |
|
"grad_norm": 0.2643324547090545, |
|
"learning_rate": 2.764081437981059e-05, |
|
"loss": 0.565, |
|
"step": 124 |
|
}, |
|
{ |
|
"epoch": 0.22583559168925021, |
|
"grad_norm": 0.27546367306007746, |
|
"learning_rate": 2.7592266108871158e-05, |
|
"loss": 0.5723, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 0.22764227642276422, |
|
"grad_norm": 0.3046480301612398, |
|
"learning_rate": 2.754326693429761e-05, |
|
"loss": 0.5847, |
|
"step": 126 |
|
}, |
|
{ |
|
"epoch": 0.22944896115627822, |
|
"grad_norm": 0.2706720967609819, |
|
"learning_rate": 2.7493818610651493e-05, |
|
"loss": 0.5774, |
|
"step": 127 |
|
}, |
|
{ |
|
"epoch": 0.23125564588979222, |
|
"grad_norm": 0.25419185973706093, |
|
"learning_rate": 2.744392290857747e-05, |
|
"loss": 0.5763, |
|
"step": 128 |
|
}, |
|
{ |
|
"epoch": 0.23306233062330622, |
|
"grad_norm": 0.27770964690614774, |
|
"learning_rate": 2.7393581614739924e-05, |
|
"loss": 0.5717, |
|
"step": 129 |
|
}, |
|
{ |
|
"epoch": 0.23486901535682023, |
|
"grad_norm": 0.28692816251688974, |
|
"learning_rate": 2.7342796531758984e-05, |
|
"loss": 0.5901, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.23667570009033423, |
|
"grad_norm": 0.291082485601733, |
|
"learning_rate": 2.729156947814598e-05, |
|
"loss": 0.5849, |
|
"step": 131 |
|
}, |
|
{ |
|
"epoch": 0.23848238482384823, |
|
"grad_norm": 0.2655019529750113, |
|
"learning_rate": 2.7239902288238297e-05, |
|
"loss": 0.5633, |
|
"step": 132 |
|
}, |
|
{ |
|
"epoch": 0.24028906955736223, |
|
"grad_norm": 0.2703759360941128, |
|
"learning_rate": 2.7187796812133733e-05, |
|
"loss": 0.5788, |
|
"step": 133 |
|
}, |
|
{ |
|
"epoch": 0.24209575429087624, |
|
"grad_norm": 0.2975052958330194, |
|
"learning_rate": 2.7135254915624213e-05, |
|
"loss": 0.5795, |
|
"step": 134 |
|
}, |
|
{ |
|
"epoch": 0.24390243902439024, |
|
"grad_norm": 0.27240132194115707, |
|
"learning_rate": 2.708227848012901e-05, |
|
"loss": 0.5771, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 0.24570912375790424, |
|
"grad_norm": 0.27811650822381756, |
|
"learning_rate": 2.7028869402627357e-05, |
|
"loss": 0.5803, |
|
"step": 136 |
|
}, |
|
{ |
|
"epoch": 0.24751580849141824, |
|
"grad_norm": 0.2830474782550724, |
|
"learning_rate": 2.6975029595590523e-05, |
|
"loss": 0.5645, |
|
"step": 137 |
|
}, |
|
{ |
|
"epoch": 0.24932249322493225, |
|
"grad_norm": 0.2938710437376192, |
|
"learning_rate": 2.6920760986913332e-05, |
|
"loss": 0.5814, |
|
"step": 138 |
|
}, |
|
{ |
|
"epoch": 0.25112917795844625, |
|
"grad_norm": 0.27314197829276166, |
|
"learning_rate": 2.6866065519845124e-05, |
|
"loss": 0.584, |
|
"step": 139 |
|
}, |
|
{ |
|
"epoch": 0.2529358626919603, |
|
"grad_norm": 0.2828223996342519, |
|
"learning_rate": 2.681094515292018e-05, |
|
"loss": 0.5834, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.25474254742547425, |
|
"grad_norm": 0.24839151099849252, |
|
"learning_rate": 2.6755401859887598e-05, |
|
"loss": 0.5644, |
|
"step": 141 |
|
}, |
|
{ |
|
"epoch": 0.2565492321589883, |
|
"grad_norm": 0.32766177067955377, |
|
"learning_rate": 2.6699437629640595e-05, |
|
"loss": 0.5843, |
|
"step": 142 |
|
}, |
|
{ |
|
"epoch": 0.25835591689250226, |
|
"grad_norm": 0.2953450112150113, |
|
"learning_rate": 2.6643054466145297e-05, |
|
"loss": 0.5763, |
|
"step": 143 |
|
}, |
|
{ |
|
"epoch": 0.2601626016260163, |
|
"grad_norm": 0.2867043623826392, |
|
"learning_rate": 2.6586254388368995e-05, |
|
"loss": 0.5647, |
|
"step": 144 |
|
}, |
|
{ |
|
"epoch": 0.26196928635953026, |
|
"grad_norm": 0.28484485619651345, |
|
"learning_rate": 2.652903943020783e-05, |
|
"loss": 0.5772, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 0.26377597109304424, |
|
"grad_norm": 0.2783119448944388, |
|
"learning_rate": 2.647141164041398e-05, |
|
"loss": 0.582, |
|
"step": 146 |
|
}, |
|
{ |
|
"epoch": 0.26558265582655827, |
|
"grad_norm": 0.286490896354593, |
|
"learning_rate": 2.641337308252228e-05, |
|
"loss": 0.5715, |
|
"step": 147 |
|
}, |
|
{ |
|
"epoch": 0.26738934056007224, |
|
"grad_norm": 0.2986882378912008, |
|
"learning_rate": 2.6354925834776346e-05, |
|
"loss": 0.573, |
|
"step": 148 |
|
}, |
|
{ |
|
"epoch": 0.26919602529358627, |
|
"grad_norm": 0.308835984272729, |
|
"learning_rate": 2.6296071990054167e-05, |
|
"loss": 0.5703, |
|
"step": 149 |
|
}, |
|
{ |
|
"epoch": 0.27100271002710025, |
|
"grad_norm": 0.2665260387800768, |
|
"learning_rate": 2.6236813655793123e-05, |
|
"loss": 0.58, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.2728093947606143, |
|
"grad_norm": 0.28323735489775853, |
|
"learning_rate": 2.617715295391457e-05, |
|
"loss": 0.562, |
|
"step": 151 |
|
}, |
|
{ |
|
"epoch": 0.27461607949412825, |
|
"grad_norm": 0.27363701184963624, |
|
"learning_rate": 2.6117092020747824e-05, |
|
"loss": 0.5697, |
|
"step": 152 |
|
}, |
|
{ |
|
"epoch": 0.2764227642276423, |
|
"grad_norm": 0.2791323579500085, |
|
"learning_rate": 2.6056633006953677e-05, |
|
"loss": 0.5697, |
|
"step": 153 |
|
}, |
|
{ |
|
"epoch": 0.27822944896115626, |
|
"grad_norm": 0.2742265379635391, |
|
"learning_rate": 2.5995778077447393e-05, |
|
"loss": 0.5736, |
|
"step": 154 |
|
}, |
|
{ |
|
"epoch": 0.2800361336946703, |
|
"grad_norm": 0.3098153479837776, |
|
"learning_rate": 2.5934529411321174e-05, |
|
"loss": 0.5665, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 0.28184281842818426, |
|
"grad_norm": 0.25253625108017397, |
|
"learning_rate": 2.587288920176613e-05, |
|
"loss": 0.5661, |
|
"step": 156 |
|
}, |
|
{ |
|
"epoch": 0.2836495031616983, |
|
"grad_norm": 0.3596999576828901, |
|
"learning_rate": 2.581085965599375e-05, |
|
"loss": 0.5794, |
|
"step": 157 |
|
}, |
|
{ |
|
"epoch": 0.28545618789521227, |
|
"grad_norm": 0.26441404761712806, |
|
"learning_rate": 2.5748442995156882e-05, |
|
"loss": 0.5767, |
|
"step": 158 |
|
}, |
|
{ |
|
"epoch": 0.2872628726287263, |
|
"grad_norm": 0.28376911904978996, |
|
"learning_rate": 2.5685641454270172e-05, |
|
"loss": 0.5576, |
|
"step": 159 |
|
}, |
|
{ |
|
"epoch": 0.28906955736224027, |
|
"grad_norm": 0.31680061742974575, |
|
"learning_rate": 2.5622457282130046e-05, |
|
"loss": 0.5664, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.2908762420957543, |
|
"grad_norm": 0.25774838728867683, |
|
"learning_rate": 2.5558892741234173e-05, |
|
"loss": 0.5746, |
|
"step": 161 |
|
}, |
|
{ |
|
"epoch": 0.2926829268292683, |
|
"grad_norm": 0.32688394361315487, |
|
"learning_rate": 2.5494950107700482e-05, |
|
"loss": 0.5605, |
|
"step": 162 |
|
}, |
|
{ |
|
"epoch": 0.2944896115627823, |
|
"grad_norm": 0.25439549403737804, |
|
"learning_rate": 2.5430631671185616e-05, |
|
"loss": 0.5689, |
|
"step": 163 |
|
}, |
|
{ |
|
"epoch": 0.2962962962962963, |
|
"grad_norm": 0.3068192818856112, |
|
"learning_rate": 2.5365939734802973e-05, |
|
"loss": 0.5784, |
|
"step": 164 |
|
}, |
|
{ |
|
"epoch": 0.2981029810298103, |
|
"grad_norm": 0.2628542180584984, |
|
"learning_rate": 2.5300876615040223e-05, |
|
"loss": 0.5694, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 0.2999096657633243, |
|
"grad_norm": 0.3126576916530388, |
|
"learning_rate": 2.523544464167637e-05, |
|
"loss": 0.5756, |
|
"step": 166 |
|
}, |
|
{ |
|
"epoch": 0.3017163504968383, |
|
"grad_norm": 0.2661513382628665, |
|
"learning_rate": 2.5169646157698313e-05, |
|
"loss": 0.5743, |
|
"step": 167 |
|
}, |
|
{ |
|
"epoch": 0.3035230352303523, |
|
"grad_norm": 0.2794359843877779, |
|
"learning_rate": 2.5103483519216964e-05, |
|
"loss": 0.5731, |
|
"step": 168 |
|
}, |
|
{ |
|
"epoch": 0.3053297199638663, |
|
"grad_norm": 0.267337015732144, |
|
"learning_rate": 2.5036959095382875e-05, |
|
"loss": 0.5652, |
|
"step": 169 |
|
}, |
|
{ |
|
"epoch": 0.3071364046973803, |
|
"grad_norm": 0.283872456782531, |
|
"learning_rate": 2.4970075268301388e-05, |
|
"loss": 0.5787, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.3089430894308943, |
|
"grad_norm": 0.2615383089315913, |
|
"learning_rate": 2.4902834432947353e-05, |
|
"loss": 0.5616, |
|
"step": 171 |
|
}, |
|
{ |
|
"epoch": 0.3107497741644083, |
|
"grad_norm": 0.27454236304750723, |
|
"learning_rate": 2.4835238997079382e-05, |
|
"loss": 0.569, |
|
"step": 172 |
|
}, |
|
{ |
|
"epoch": 0.31255645889792233, |
|
"grad_norm": 0.26790606034696746, |
|
"learning_rate": 2.4767291381153603e-05, |
|
"loss": 0.5721, |
|
"step": 173 |
|
}, |
|
{ |
|
"epoch": 0.3143631436314363, |
|
"grad_norm": 0.24338452694103885, |
|
"learning_rate": 2.4698994018236994e-05, |
|
"loss": 0.5635, |
|
"step": 174 |
|
}, |
|
{ |
|
"epoch": 0.31616982836495033, |
|
"grad_norm": 0.27640360012160314, |
|
"learning_rate": 2.4630349353920284e-05, |
|
"loss": 0.5676, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 0.3179765130984643, |
|
"grad_norm": 0.2701056771293538, |
|
"learning_rate": 2.4561359846230346e-05, |
|
"loss": 0.582, |
|
"step": 176 |
|
}, |
|
{ |
|
"epoch": 0.31978319783197834, |
|
"grad_norm": 0.24753738293952274, |
|
"learning_rate": 2.4492027965542217e-05, |
|
"loss": 0.5571, |
|
"step": 177 |
|
}, |
|
{ |
|
"epoch": 0.3215898825654923, |
|
"grad_norm": 0.26161253512639976, |
|
"learning_rate": 2.44223561944906e-05, |
|
"loss": 0.5641, |
|
"step": 178 |
|
}, |
|
{ |
|
"epoch": 0.32339656729900634, |
|
"grad_norm": 0.2570144122258103, |
|
"learning_rate": 2.4352347027881003e-05, |
|
"loss": 0.5738, |
|
"step": 179 |
|
}, |
|
{ |
|
"epoch": 0.3252032520325203, |
|
"grad_norm": 0.27930694625424896, |
|
"learning_rate": 2.4282002972600382e-05, |
|
"loss": 0.5817, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.32700993676603435, |
|
"grad_norm": 0.23436344914835724, |
|
"learning_rate": 2.4211326547527377e-05, |
|
"loss": 0.5602, |
|
"step": 181 |
|
}, |
|
{ |
|
"epoch": 0.3288166214995483, |
|
"grad_norm": 0.2729605037353879, |
|
"learning_rate": 2.4140320283442125e-05, |
|
"loss": 0.5704, |
|
"step": 182 |
|
}, |
|
{ |
|
"epoch": 0.33062330623306235, |
|
"grad_norm": 0.2798695539410275, |
|
"learning_rate": 2.4068986722935625e-05, |
|
"loss": 0.5808, |
|
"step": 183 |
|
}, |
|
{ |
|
"epoch": 0.3324299909665763, |
|
"grad_norm": 0.2789180229196701, |
|
"learning_rate": 2.3997328420318705e-05, |
|
"loss": 0.5769, |
|
"step": 184 |
|
}, |
|
{ |
|
"epoch": 0.33423667570009036, |
|
"grad_norm": 0.27131707792538245, |
|
"learning_rate": 2.3925347941530556e-05, |
|
"loss": 0.5733, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 0.33604336043360433, |
|
"grad_norm": 0.27412770965167466, |
|
"learning_rate": 2.3853047864046843e-05, |
|
"loss": 0.5684, |
|
"step": 186 |
|
}, |
|
{ |
|
"epoch": 0.33785004516711836, |
|
"grad_norm": 0.2782894711462652, |
|
"learning_rate": 2.3780430776787413e-05, |
|
"loss": 0.5659, |
|
"step": 187 |
|
}, |
|
{ |
|
"epoch": 0.33965672990063234, |
|
"grad_norm": 0.2430242526823377, |
|
"learning_rate": 2.3707499280023604e-05, |
|
"loss": 0.5658, |
|
"step": 188 |
|
}, |
|
{ |
|
"epoch": 0.34146341463414637, |
|
"grad_norm": 0.28013753256340473, |
|
"learning_rate": 2.3634255985285104e-05, |
|
"loss": 0.5729, |
|
"step": 189 |
|
}, |
|
{ |
|
"epoch": 0.34327009936766034, |
|
"grad_norm": 0.2595735350692398, |
|
"learning_rate": 2.356070351526648e-05, |
|
"loss": 0.5621, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.34507678410117437, |
|
"grad_norm": 0.2675696222506633, |
|
"learning_rate": 2.348684450373322e-05, |
|
"loss": 0.5598, |
|
"step": 191 |
|
}, |
|
{ |
|
"epoch": 0.34688346883468835, |
|
"grad_norm": 0.25005185921177986, |
|
"learning_rate": 2.3412681595427467e-05, |
|
"loss": 0.5674, |
|
"step": 192 |
|
}, |
|
{ |
|
"epoch": 0.3486901535682023, |
|
"grad_norm": 0.2830143632836114, |
|
"learning_rate": 2.3338217445973268e-05, |
|
"loss": 0.5688, |
|
"step": 193 |
|
}, |
|
{ |
|
"epoch": 0.35049683830171635, |
|
"grad_norm": 0.2512108464193193, |
|
"learning_rate": 2.3263454721781537e-05, |
|
"loss": 0.5706, |
|
"step": 194 |
|
}, |
|
{ |
|
"epoch": 0.3523035230352303, |
|
"grad_norm": 0.2768218752940674, |
|
"learning_rate": 2.318839609995453e-05, |
|
"loss": 0.5536, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 0.35411020776874436, |
|
"grad_norm": 0.25016072504753256, |
|
"learning_rate": 2.3113044268189995e-05, |
|
"loss": 0.5669, |
|
"step": 196 |
|
}, |
|
{ |
|
"epoch": 0.35591689250225833, |
|
"grad_norm": 0.25210388765899894, |
|
"learning_rate": 2.303740192468495e-05, |
|
"loss": 0.5668, |
|
"step": 197 |
|
}, |
|
{ |
|
"epoch": 0.35772357723577236, |
|
"grad_norm": 0.28513394233627476, |
|
"learning_rate": 2.2961471778039045e-05, |
|
"loss": 0.5438, |
|
"step": 198 |
|
}, |
|
{ |
|
"epoch": 0.35953026196928634, |
|
"grad_norm": 0.2548176732782281, |
|
"learning_rate": 2.288525654715757e-05, |
|
"loss": 0.5671, |
|
"step": 199 |
|
}, |
|
{ |
|
"epoch": 0.36133694670280037, |
|
"grad_norm": 0.27079689306361104, |
|
"learning_rate": 2.280875896115413e-05, |
|
"loss": 0.556, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.36314363143631434, |
|
"grad_norm": 0.24997335486124786, |
|
"learning_rate": 2.2731981759252876e-05, |
|
"loss": 0.5507, |
|
"step": 201 |
|
}, |
|
{ |
|
"epoch": 0.36495031616982837, |
|
"grad_norm": 0.24175290069909067, |
|
"learning_rate": 2.2654927690690445e-05, |
|
"loss": 0.5774, |
|
"step": 202 |
|
}, |
|
{ |
|
"epoch": 0.36675700090334235, |
|
"grad_norm": 0.2532933339218424, |
|
"learning_rate": 2.257759951461752e-05, |
|
"loss": 0.5641, |
|
"step": 203 |
|
}, |
|
{ |
|
"epoch": 0.3685636856368564, |
|
"grad_norm": 0.2621868163907268, |
|
"learning_rate": 2.25e-05, |
|
"loss": 0.577, |
|
"step": 204 |
|
}, |
|
{ |
|
"epoch": 0.37037037037037035, |
|
"grad_norm": 0.24085524911800554, |
|
"learning_rate": 2.24221319255199e-05, |
|
"loss": 0.5677, |
|
"step": 205 |
|
}, |
|
{ |
|
"epoch": 0.3721770551038844, |
|
"grad_norm": 0.2632917461430485, |
|
"learning_rate": 2.234399807947579e-05, |
|
"loss": 0.5546, |
|
"step": 206 |
|
}, |
|
{ |
|
"epoch": 0.37398373983739835, |
|
"grad_norm": 0.23460851905806784, |
|
"learning_rate": 2.2265601259683e-05, |
|
"loss": 0.5626, |
|
"step": 207 |
|
}, |
|
{ |
|
"epoch": 0.3757904245709124, |
|
"grad_norm": 0.24036262844750986, |
|
"learning_rate": 2.2186944273373426e-05, |
|
"loss": 0.55, |
|
"step": 208 |
|
}, |
|
{ |
|
"epoch": 0.37759710930442636, |
|
"grad_norm": 0.25653419125069626, |
|
"learning_rate": 2.210802993709498e-05, |
|
"loss": 0.5714, |
|
"step": 209 |
|
}, |
|
{ |
|
"epoch": 0.3794037940379404, |
|
"grad_norm": 0.25795821587482737, |
|
"learning_rate": 2.202886107661078e-05, |
|
"loss": 0.5674, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.38121047877145436, |
|
"grad_norm": 0.2682406799824342, |
|
"learning_rate": 2.1949440526797928e-05, |
|
"loss": 0.5671, |
|
"step": 211 |
|
}, |
|
{ |
|
"epoch": 0.3830171635049684, |
|
"grad_norm": 0.25405140008902716, |
|
"learning_rate": 2.1869771131546015e-05, |
|
"loss": 0.5748, |
|
"step": 212 |
|
}, |
|
{ |
|
"epoch": 0.38482384823848237, |
|
"grad_norm": 0.277498893414283, |
|
"learning_rate": 2.178985574365529e-05, |
|
"loss": 0.5751, |
|
"step": 213 |
|
}, |
|
{ |
|
"epoch": 0.3866305329719964, |
|
"grad_norm": 0.2621687610495017, |
|
"learning_rate": 2.170969722473449e-05, |
|
"loss": 0.561, |
|
"step": 214 |
|
}, |
|
{ |
|
"epoch": 0.3884372177055104, |
|
"grad_norm": 0.33519408751709534, |
|
"learning_rate": 2.1629298445098403e-05, |
|
"loss": 0.588, |
|
"step": 215 |
|
}, |
|
{ |
|
"epoch": 0.3902439024390244, |
|
"grad_norm": 0.25011729377513336, |
|
"learning_rate": 2.154866228366505e-05, |
|
"loss": 0.5765, |
|
"step": 216 |
|
}, |
|
{ |
|
"epoch": 0.3920505871725384, |
|
"grad_norm": 0.24393104363426, |
|
"learning_rate": 2.146779162785263e-05, |
|
"loss": 0.5767, |
|
"step": 217 |
|
}, |
|
{ |
|
"epoch": 0.3938572719060524, |
|
"grad_norm": 0.2712254007014316, |
|
"learning_rate": 2.138668937347609e-05, |
|
"loss": 0.5684, |
|
"step": 218 |
|
}, |
|
{ |
|
"epoch": 0.3956639566395664, |
|
"grad_norm": 0.23805865346251903, |
|
"learning_rate": 2.1305358424643484e-05, |
|
"loss": 0.5634, |
|
"step": 219 |
|
}, |
|
{ |
|
"epoch": 0.3974706413730804, |
|
"grad_norm": 0.23597914856820135, |
|
"learning_rate": 2.1223801693651927e-05, |
|
"loss": 0.5656, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.3992773261065944, |
|
"grad_norm": 0.2606858225008041, |
|
"learning_rate": 2.114202210088336e-05, |
|
"loss": 0.5547, |
|
"step": 221 |
|
}, |
|
{ |
|
"epoch": 0.4010840108401084, |
|
"grad_norm": 0.5424755323054944, |
|
"learning_rate": 2.106002257469993e-05, |
|
"loss": 0.572, |
|
"step": 222 |
|
}, |
|
{ |
|
"epoch": 0.4028906955736224, |
|
"grad_norm": 0.2507723535221933, |
|
"learning_rate": 2.0977806051339172e-05, |
|
"loss": 0.555, |
|
"step": 223 |
|
}, |
|
{ |
|
"epoch": 0.4046973803071364, |
|
"grad_norm": 0.258072944340719, |
|
"learning_rate": 2.0895375474808857e-05, |
|
"loss": 0.5593, |
|
"step": 224 |
|
}, |
|
{ |
|
"epoch": 0.4065040650406504, |
|
"grad_norm": 0.2872582066072117, |
|
"learning_rate": 2.0812733796781544e-05, |
|
"loss": 0.565, |
|
"step": 225 |
|
}, |
|
{ |
|
"epoch": 0.4083107497741644, |
|
"grad_norm": 0.2364423377916723, |
|
"learning_rate": 2.0729883976488936e-05, |
|
"loss": 0.5703, |
|
"step": 226 |
|
}, |
|
{ |
|
"epoch": 0.4101174345076784, |
|
"grad_norm": 0.26270639218601316, |
|
"learning_rate": 2.064682898061588e-05, |
|
"loss": 0.5747, |
|
"step": 227 |
|
}, |
|
{ |
|
"epoch": 0.41192411924119243, |
|
"grad_norm": 0.2631951060473206, |
|
"learning_rate": 2.0563571783194146e-05, |
|
"loss": 0.5615, |
|
"step": 228 |
|
}, |
|
{ |
|
"epoch": 0.4137308039747064, |
|
"grad_norm": 0.2317741311806957, |
|
"learning_rate": 2.0480115365495928e-05, |
|
"loss": 0.5639, |
|
"step": 229 |
|
}, |
|
{ |
|
"epoch": 0.41553748870822044, |
|
"grad_norm": 0.30997167552763305, |
|
"learning_rate": 2.0396462715927107e-05, |
|
"loss": 0.5661, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 0.4173441734417344, |
|
"grad_norm": 0.2480886169428658, |
|
"learning_rate": 2.0312616829920222e-05, |
|
"loss": 0.5652, |
|
"step": 231 |
|
}, |
|
{ |
|
"epoch": 0.41915085817524844, |
|
"grad_norm": 0.23699658602720144, |
|
"learning_rate": 2.022858070982723e-05, |
|
"loss": 0.568, |
|
"step": 232 |
|
}, |
|
{ |
|
"epoch": 0.4209575429087624, |
|
"grad_norm": 0.24827523861823048, |
|
"learning_rate": 2.0144357364811973e-05, |
|
"loss": 0.5504, |
|
"step": 233 |
|
}, |
|
{ |
|
"epoch": 0.42276422764227645, |
|
"grad_norm": 0.25171738807398075, |
|
"learning_rate": 2.0059949810742452e-05, |
|
"loss": 0.5656, |
|
"step": 234 |
|
}, |
|
{ |
|
"epoch": 0.4245709123757904, |
|
"grad_norm": 0.2205290140571607, |
|
"learning_rate": 1.997536107008281e-05, |
|
"loss": 0.5603, |
|
"step": 235 |
|
}, |
|
{ |
|
"epoch": 0.42637759710930445, |
|
"grad_norm": 0.23922227707786625, |
|
"learning_rate": 1.9890594171785128e-05, |
|
"loss": 0.5722, |
|
"step": 236 |
|
}, |
|
{ |
|
"epoch": 0.4281842818428184, |
|
"grad_norm": 0.24378531634524622, |
|
"learning_rate": 1.9805652151180945e-05, |
|
"loss": 0.5719, |
|
"step": 237 |
|
}, |
|
{ |
|
"epoch": 0.42999096657633246, |
|
"grad_norm": 0.22020061853610345, |
|
"learning_rate": 1.972053804987258e-05, |
|
"loss": 0.58, |
|
"step": 238 |
|
}, |
|
{ |
|
"epoch": 0.43179765130984643, |
|
"grad_norm": 0.24680402960694306, |
|
"learning_rate": 1.963525491562421e-05, |
|
"loss": 0.561, |
|
"step": 239 |
|
}, |
|
{ |
|
"epoch": 0.43360433604336046, |
|
"grad_norm": 0.22284712718565894, |
|
"learning_rate": 1.954980580225275e-05, |
|
"loss": 0.5617, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.43541102077687444, |
|
"grad_norm": 0.2484592670769788, |
|
"learning_rate": 1.946419376951848e-05, |
|
"loss": 0.5603, |
|
"step": 241 |
|
}, |
|
{ |
|
"epoch": 0.4372177055103884, |
|
"grad_norm": 0.2388978520409849, |
|
"learning_rate": 1.9378421883015505e-05, |
|
"loss": 0.5608, |
|
"step": 242 |
|
}, |
|
{ |
|
"epoch": 0.43902439024390244, |
|
"grad_norm": 0.2523677374867834, |
|
"learning_rate": 1.9292493214061953e-05, |
|
"loss": 0.5654, |
|
"step": 243 |
|
}, |
|
{ |
|
"epoch": 0.4408310749774164, |
|
"grad_norm": 0.2383883235220744, |
|
"learning_rate": 1.9206410839590042e-05, |
|
"loss": 0.5719, |
|
"step": 244 |
|
}, |
|
{ |
|
"epoch": 0.44263775971093045, |
|
"grad_norm": 0.29119882075217696, |
|
"learning_rate": 1.9120177842035853e-05, |
|
"loss": 0.5636, |
|
"step": 245 |
|
}, |
|
{ |
|
"epoch": 0.4444444444444444, |
|
"grad_norm": 0.24457061677600248, |
|
"learning_rate": 1.9033797309228984e-05, |
|
"loss": 0.5703, |
|
"step": 246 |
|
}, |
|
{ |
|
"epoch": 0.44625112917795845, |
|
"grad_norm": 0.245042331857765, |
|
"learning_rate": 1.8947272334281977e-05, |
|
"loss": 0.5523, |
|
"step": 247 |
|
}, |
|
{ |
|
"epoch": 0.4480578139114724, |
|
"grad_norm": 0.28714938771611187, |
|
"learning_rate": 1.8860606015479537e-05, |
|
"loss": 0.5707, |
|
"step": 248 |
|
}, |
|
{ |
|
"epoch": 0.44986449864498645, |
|
"grad_norm": 0.24121091985031393, |
|
"learning_rate": 1.877380145616763e-05, |
|
"loss": 0.5548, |
|
"step": 249 |
|
}, |
|
{ |
|
"epoch": 0.45167118337850043, |
|
"grad_norm": 0.2606569077339658, |
|
"learning_rate": 1.868686176464232e-05, |
|
"loss": 0.5697, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.45347786811201446, |
|
"grad_norm": 0.23580420111011996, |
|
"learning_rate": 1.8599790054038487e-05, |
|
"loss": 0.5565, |
|
"step": 251 |
|
}, |
|
{ |
|
"epoch": 0.45528455284552843, |
|
"grad_norm": 0.23127251461544818, |
|
"learning_rate": 1.8512589442218358e-05, |
|
"loss": 0.5387, |
|
"step": 252 |
|
}, |
|
{ |
|
"epoch": 0.45709123757904246, |
|
"grad_norm": 0.24560555350844132, |
|
"learning_rate": 1.8425263051659838e-05, |
|
"loss": 0.5618, |
|
"step": 253 |
|
}, |
|
{ |
|
"epoch": 0.45889792231255644, |
|
"grad_norm": 0.2563321621344126, |
|
"learning_rate": 1.8337814009344716e-05, |
|
"loss": 0.5606, |
|
"step": 254 |
|
}, |
|
{ |
|
"epoch": 0.46070460704607047, |
|
"grad_norm": 0.2605334576865085, |
|
"learning_rate": 1.8250245446646707e-05, |
|
"loss": 0.5541, |
|
"step": 255 |
|
}, |
|
{ |
|
"epoch": 0.46251129177958444, |
|
"grad_norm": 0.25888298184378367, |
|
"learning_rate": 1.8162560499219286e-05, |
|
"loss": 0.5539, |
|
"step": 256 |
|
}, |
|
{ |
|
"epoch": 0.4643179765130985, |
|
"grad_norm": 0.25609617770435344, |
|
"learning_rate": 1.807476230688346e-05, |
|
"loss": 0.5647, |
|
"step": 257 |
|
}, |
|
{ |
|
"epoch": 0.46612466124661245, |
|
"grad_norm": 0.25864894166343505, |
|
"learning_rate": 1.7986854013515274e-05, |
|
"loss": 0.5591, |
|
"step": 258 |
|
}, |
|
{ |
|
"epoch": 0.4679313459801265, |
|
"grad_norm": 0.24989575985544257, |
|
"learning_rate": 1.78988387669333e-05, |
|
"loss": 0.5629, |
|
"step": 259 |
|
}, |
|
{ |
|
"epoch": 0.46973803071364045, |
|
"grad_norm": 0.25850049217533544, |
|
"learning_rate": 1.781071971878587e-05, |
|
"loss": 0.5765, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 0.4715447154471545, |
|
"grad_norm": 0.26195883131588654, |
|
"learning_rate": 1.7722500024438244e-05, |
|
"loss": 0.5669, |
|
"step": 261 |
|
}, |
|
{ |
|
"epoch": 0.47335140018066846, |
|
"grad_norm": 0.2643441022284166, |
|
"learning_rate": 1.7634182842859628e-05, |
|
"loss": 0.5559, |
|
"step": 262 |
|
}, |
|
{ |
|
"epoch": 0.4751580849141825, |
|
"grad_norm": 0.25888645697984375, |
|
"learning_rate": 1.7545771336510033e-05, |
|
"loss": 0.5608, |
|
"step": 263 |
|
}, |
|
{ |
|
"epoch": 0.47696476964769646, |
|
"grad_norm": 0.25422111952455206, |
|
"learning_rate": 1.7457268671227067e-05, |
|
"loss": 0.566, |
|
"step": 264 |
|
}, |
|
{ |
|
"epoch": 0.4787714543812105, |
|
"grad_norm": 0.27122044053732625, |
|
"learning_rate": 1.736867801611254e-05, |
|
"loss": 0.5484, |
|
"step": 265 |
|
}, |
|
{ |
|
"epoch": 0.48057813911472447, |
|
"grad_norm": 0.23792779766126487, |
|
"learning_rate": 1.728000254341901e-05, |
|
"loss": 0.5718, |
|
"step": 266 |
|
}, |
|
{ |
|
"epoch": 0.4823848238482385, |
|
"grad_norm": 0.22612645512697185, |
|
"learning_rate": 1.7191245428436175e-05, |
|
"loss": 0.5567, |
|
"step": 267 |
|
}, |
|
{ |
|
"epoch": 0.48419150858175247, |
|
"grad_norm": 0.242856441232987, |
|
"learning_rate": 1.7102409849377188e-05, |
|
"loss": 0.5565, |
|
"step": 268 |
|
}, |
|
{ |
|
"epoch": 0.4859981933152665, |
|
"grad_norm": 0.22598008818744872, |
|
"learning_rate": 1.7013498987264832e-05, |
|
"loss": 0.5634, |
|
"step": 269 |
|
}, |
|
{ |
|
"epoch": 0.4878048780487805, |
|
"grad_norm": 0.28177732615064893, |
|
"learning_rate": 1.6924516025817636e-05, |
|
"loss": 0.545, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 0.4896115627822945, |
|
"grad_norm": 0.2440174064317069, |
|
"learning_rate": 1.683546415133584e-05, |
|
"loss": 0.5667, |
|
"step": 271 |
|
}, |
|
{ |
|
"epoch": 0.4914182475158085, |
|
"grad_norm": 0.2617297152457466, |
|
"learning_rate": 1.6746346552587342e-05, |
|
"loss": 0.5612, |
|
"step": 272 |
|
}, |
|
{ |
|
"epoch": 0.4932249322493225, |
|
"grad_norm": 0.25621942967426764, |
|
"learning_rate": 1.665716642069349e-05, |
|
"loss": 0.5692, |
|
"step": 273 |
|
}, |
|
{ |
|
"epoch": 0.4950316169828365, |
|
"grad_norm": 0.25791288879390334, |
|
"learning_rate": 1.6567926949014805e-05, |
|
"loss": 0.5568, |
|
"step": 274 |
|
}, |
|
{ |
|
"epoch": 0.4968383017163505, |
|
"grad_norm": 0.2418430510543647, |
|
"learning_rate": 1.6478631333036655e-05, |
|
"loss": 0.5509, |
|
"step": 275 |
|
}, |
|
{ |
|
"epoch": 0.4986449864498645, |
|
"grad_norm": 0.22083223278151687, |
|
"learning_rate": 1.638928277025482e-05, |
|
"loss": 0.5435, |
|
"step": 276 |
|
}, |
|
{ |
|
"epoch": 0.5004516711833785, |
|
"grad_norm": 0.2291376855754137, |
|
"learning_rate": 1.6299884460061005e-05, |
|
"loss": 0.5551, |
|
"step": 277 |
|
}, |
|
{ |
|
"epoch": 0.5022583559168925, |
|
"grad_norm": 0.2302128127786599, |
|
"learning_rate": 1.621043960362826e-05, |
|
"loss": 0.5419, |
|
"step": 278 |
|
}, |
|
{ |
|
"epoch": 0.5040650406504065, |
|
"grad_norm": 0.228883386803342, |
|
"learning_rate": 1.6120951403796367e-05, |
|
"loss": 0.5571, |
|
"step": 279 |
|
}, |
|
{ |
|
"epoch": 0.5058717253839206, |
|
"grad_norm": 0.2401265638231575, |
|
"learning_rate": 1.603142306495714e-05, |
|
"loss": 0.5532, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 0.5076784101174345, |
|
"grad_norm": 0.2114239387262738, |
|
"learning_rate": 1.5941857792939702e-05, |
|
"loss": 0.5541, |
|
"step": 281 |
|
}, |
|
{ |
|
"epoch": 0.5094850948509485, |
|
"grad_norm": 0.22325184258966826, |
|
"learning_rate": 1.585225879489567e-05, |
|
"loss": 0.5487, |
|
"step": 282 |
|
}, |
|
{ |
|
"epoch": 0.5112917795844625, |
|
"grad_norm": 0.22702669120714522, |
|
"learning_rate": 1.5762629279184326e-05, |
|
"loss": 0.5635, |
|
"step": 283 |
|
}, |
|
{ |
|
"epoch": 0.5130984643179766, |
|
"grad_norm": 0.2165975196960249, |
|
"learning_rate": 1.5672972455257726e-05, |
|
"loss": 0.552, |
|
"step": 284 |
|
}, |
|
{ |
|
"epoch": 0.5149051490514905, |
|
"grad_norm": 0.22658048500397085, |
|
"learning_rate": 1.5583291533545775e-05, |
|
"loss": 0.5465, |
|
"step": 285 |
|
}, |
|
{ |
|
"epoch": 0.5167118337850045, |
|
"grad_norm": 0.23175050113812948, |
|
"learning_rate": 1.549358972534128e-05, |
|
"loss": 0.5441, |
|
"step": 286 |
|
}, |
|
{ |
|
"epoch": 0.5185185185185185, |
|
"grad_norm": 0.21294545077390667, |
|
"learning_rate": 1.5403870242684942e-05, |
|
"loss": 0.5654, |
|
"step": 287 |
|
}, |
|
{ |
|
"epoch": 0.5203252032520326, |
|
"grad_norm": 0.2504268423528941, |
|
"learning_rate": 1.5314136298250355e-05, |
|
"loss": 0.5628, |
|
"step": 288 |
|
}, |
|
{ |
|
"epoch": 0.5221318879855466, |
|
"grad_norm": 0.2099986397700923, |
|
"learning_rate": 1.5224391105228956e-05, |
|
"loss": 0.5474, |
|
"step": 289 |
|
}, |
|
{ |
|
"epoch": 0.5239385727190605, |
|
"grad_norm": 0.20317375678953348, |
|
"learning_rate": 1.5134637877214968e-05, |
|
"loss": 0.5588, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 0.5257452574525745, |
|
"grad_norm": 0.23346942768814213, |
|
"learning_rate": 1.5044879828090346e-05, |
|
"loss": 0.5631, |
|
"step": 291 |
|
}, |
|
{ |
|
"epoch": 0.5275519421860885, |
|
"grad_norm": 0.23620525056730876, |
|
"learning_rate": 1.4955120171909658e-05, |
|
"loss": 0.5671, |
|
"step": 292 |
|
}, |
|
{ |
|
"epoch": 0.5293586269196026, |
|
"grad_norm": 0.20358818421382174, |
|
"learning_rate": 1.4865362122785031e-05, |
|
"loss": 0.5598, |
|
"step": 293 |
|
}, |
|
{ |
|
"epoch": 0.5311653116531165, |
|
"grad_norm": 0.24259950926712837, |
|
"learning_rate": 1.4775608894771048e-05, |
|
"loss": 0.5521, |
|
"step": 294 |
|
}, |
|
{ |
|
"epoch": 0.5329719963866305, |
|
"grad_norm": 0.2643019881724919, |
|
"learning_rate": 1.4685863701749648e-05, |
|
"loss": 0.5497, |
|
"step": 295 |
|
}, |
|
{ |
|
"epoch": 0.5347786811201445, |
|
"grad_norm": 0.24143753848243574, |
|
"learning_rate": 1.4596129757315062e-05, |
|
"loss": 0.554, |
|
"step": 296 |
|
}, |
|
{ |
|
"epoch": 0.5365853658536586, |
|
"grad_norm": 0.23280547368412433, |
|
"learning_rate": 1.4506410274658718e-05, |
|
"loss": 0.5589, |
|
"step": 297 |
|
}, |
|
{ |
|
"epoch": 0.5383920505871725, |
|
"grad_norm": 0.23707855241324371, |
|
"learning_rate": 1.441670846645423e-05, |
|
"loss": 0.5536, |
|
"step": 298 |
|
}, |
|
{ |
|
"epoch": 0.5401987353206865, |
|
"grad_norm": 0.23874631751676395, |
|
"learning_rate": 1.4327027544742281e-05, |
|
"loss": 0.569, |
|
"step": 299 |
|
}, |
|
{ |
|
"epoch": 0.5420054200542005, |
|
"grad_norm": 0.22915529315710134, |
|
"learning_rate": 1.4237370720815675e-05, |
|
"loss": 0.5569, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.5438121047877146, |
|
"grad_norm": 0.2086901981356731, |
|
"learning_rate": 1.4147741205104336e-05, |
|
"loss": 0.5588, |
|
"step": 301 |
|
}, |
|
{ |
|
"epoch": 0.5456187895212286, |
|
"grad_norm": 0.20402556308618858, |
|
"learning_rate": 1.40581422070603e-05, |
|
"loss": 0.5704, |
|
"step": 302 |
|
}, |
|
{ |
|
"epoch": 0.5474254742547425, |
|
"grad_norm": 0.2294307429970762, |
|
"learning_rate": 1.3968576935042864e-05, |
|
"loss": 0.5518, |
|
"step": 303 |
|
}, |
|
{ |
|
"epoch": 0.5492321589882565, |
|
"grad_norm": 0.21919976703992083, |
|
"learning_rate": 1.3879048596203637e-05, |
|
"loss": 0.547, |
|
"step": 304 |
|
}, |
|
{ |
|
"epoch": 0.5510388437217706, |
|
"grad_norm": 0.219255764255333, |
|
"learning_rate": 1.3789560396371742e-05, |
|
"loss": 0.5536, |
|
"step": 305 |
|
}, |
|
{ |
|
"epoch": 0.5528455284552846, |
|
"grad_norm": 0.22276402144655247, |
|
"learning_rate": 1.3700115539938995e-05, |
|
"loss": 0.5513, |
|
"step": 306 |
|
}, |
|
{ |
|
"epoch": 0.5546522131887985, |
|
"grad_norm": 0.24177217240652826, |
|
"learning_rate": 1.3610717229745183e-05, |
|
"loss": 0.5684, |
|
"step": 307 |
|
}, |
|
{ |
|
"epoch": 0.5564588979223125, |
|
"grad_norm": 0.21972466195098042, |
|
"learning_rate": 1.3521368666963348e-05, |
|
"loss": 0.548, |
|
"step": 308 |
|
}, |
|
{ |
|
"epoch": 0.5582655826558266, |
|
"grad_norm": 0.23251696445513334, |
|
"learning_rate": 1.3432073050985201e-05, |
|
"loss": 0.5575, |
|
"step": 309 |
|
}, |
|
{ |
|
"epoch": 0.5600722673893406, |
|
"grad_norm": 0.20890855952451545, |
|
"learning_rate": 1.3342833579306511e-05, |
|
"loss": 0.5399, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 0.5618789521228545, |
|
"grad_norm": 0.20970022697329482, |
|
"learning_rate": 1.3253653447412658e-05, |
|
"loss": 0.5589, |
|
"step": 311 |
|
}, |
|
{ |
|
"epoch": 0.5636856368563685, |
|
"grad_norm": 0.22861069585146973, |
|
"learning_rate": 1.3164535848664167e-05, |
|
"loss": 0.5578, |
|
"step": 312 |
|
}, |
|
{ |
|
"epoch": 0.5654923215898826, |
|
"grad_norm": 0.207477975560217, |
|
"learning_rate": 1.3075483974182368e-05, |
|
"loss": 0.559, |
|
"step": 313 |
|
}, |
|
{ |
|
"epoch": 0.5672990063233966, |
|
"grad_norm": 0.19459169531094797, |
|
"learning_rate": 1.2986501012735174e-05, |
|
"loss": 0.5512, |
|
"step": 314 |
|
}, |
|
{ |
|
"epoch": 0.5691056910569106, |
|
"grad_norm": 0.21162983728697593, |
|
"learning_rate": 1.2897590150622812e-05, |
|
"loss": 0.552, |
|
"step": 315 |
|
}, |
|
{ |
|
"epoch": 0.5709123757904245, |
|
"grad_norm": 0.27521841534463504, |
|
"learning_rate": 1.2808754571563827e-05, |
|
"loss": 0.5481, |
|
"step": 316 |
|
}, |
|
{ |
|
"epoch": 0.5727190605239386, |
|
"grad_norm": 0.21570526772732707, |
|
"learning_rate": 1.2719997456580993e-05, |
|
"loss": 0.5437, |
|
"step": 317 |
|
}, |
|
{ |
|
"epoch": 0.5745257452574526, |
|
"grad_norm": 0.2306349309037197, |
|
"learning_rate": 1.2631321983887464e-05, |
|
"loss": 0.556, |
|
"step": 318 |
|
}, |
|
{ |
|
"epoch": 0.5763324299909666, |
|
"grad_norm": 0.234035655610594, |
|
"learning_rate": 1.2542731328772936e-05, |
|
"loss": 0.5652, |
|
"step": 319 |
|
}, |
|
{ |
|
"epoch": 0.5781391147244805, |
|
"grad_norm": 0.2257357122157385, |
|
"learning_rate": 1.2454228663489967e-05, |
|
"loss": 0.5473, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 0.5799457994579946, |
|
"grad_norm": 0.22399687169655194, |
|
"learning_rate": 1.2365817157140373e-05, |
|
"loss": 0.5447, |
|
"step": 321 |
|
}, |
|
{ |
|
"epoch": 0.5817524841915086, |
|
"grad_norm": 0.2188698587058208, |
|
"learning_rate": 1.2277499975561755e-05, |
|
"loss": 0.5655, |
|
"step": 322 |
|
}, |
|
{ |
|
"epoch": 0.5835591689250226, |
|
"grad_norm": 0.2201799534590002, |
|
"learning_rate": 1.2189280281214128e-05, |
|
"loss": 0.5518, |
|
"step": 323 |
|
}, |
|
{ |
|
"epoch": 0.5853658536585366, |
|
"grad_norm": 0.21825927972002432, |
|
"learning_rate": 1.2101161233066704e-05, |
|
"loss": 0.5326, |
|
"step": 324 |
|
}, |
|
{ |
|
"epoch": 0.5871725383920506, |
|
"grad_norm": 0.2167697132068363, |
|
"learning_rate": 1.201314598648473e-05, |
|
"loss": 0.5502, |
|
"step": 325 |
|
}, |
|
{ |
|
"epoch": 0.5889792231255646, |
|
"grad_norm": 0.2201723524764833, |
|
"learning_rate": 1.1925237693116546e-05, |
|
"loss": 0.5594, |
|
"step": 326 |
|
}, |
|
{ |
|
"epoch": 0.5907859078590786, |
|
"grad_norm": 0.21799360608456733, |
|
"learning_rate": 1.1837439500780718e-05, |
|
"loss": 0.5459, |
|
"step": 327 |
|
}, |
|
{ |
|
"epoch": 0.5925925925925926, |
|
"grad_norm": 0.24466157076749992, |
|
"learning_rate": 1.1749754553353297e-05, |
|
"loss": 0.5524, |
|
"step": 328 |
|
}, |
|
{ |
|
"epoch": 0.5943992773261066, |
|
"grad_norm": 0.2181076011783831, |
|
"learning_rate": 1.1662185990655285e-05, |
|
"loss": 0.5422, |
|
"step": 329 |
|
}, |
|
{ |
|
"epoch": 0.5962059620596206, |
|
"grad_norm": 0.21656928944874607, |
|
"learning_rate": 1.1574736948340163e-05, |
|
"loss": 0.5495, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 0.5980126467931346, |
|
"grad_norm": 0.2165347851980439, |
|
"learning_rate": 1.1487410557781642e-05, |
|
"loss": 0.5537, |
|
"step": 331 |
|
}, |
|
{ |
|
"epoch": 0.5998193315266486, |
|
"grad_norm": 0.2142744405919634, |
|
"learning_rate": 1.140020994596151e-05, |
|
"loss": 0.5372, |
|
"step": 332 |
|
}, |
|
{ |
|
"epoch": 0.6016260162601627, |
|
"grad_norm": 0.20765388093747186, |
|
"learning_rate": 1.1313138235357683e-05, |
|
"loss": 0.5579, |
|
"step": 333 |
|
}, |
|
{ |
|
"epoch": 0.6034327009936766, |
|
"grad_norm": 0.21430397230337211, |
|
"learning_rate": 1.1226198543832373e-05, |
|
"loss": 0.5703, |
|
"step": 334 |
|
}, |
|
{ |
|
"epoch": 0.6052393857271906, |
|
"grad_norm": 0.2285140304691723, |
|
"learning_rate": 1.1139393984520467e-05, |
|
"loss": 0.5533, |
|
"step": 335 |
|
}, |
|
{ |
|
"epoch": 0.6070460704607046, |
|
"grad_norm": 0.20230990763758133, |
|
"learning_rate": 1.1052727665718027e-05, |
|
"loss": 0.5503, |
|
"step": 336 |
|
}, |
|
{ |
|
"epoch": 0.6088527551942186, |
|
"grad_norm": 0.20765339700829402, |
|
"learning_rate": 1.0966202690771015e-05, |
|
"loss": 0.5517, |
|
"step": 337 |
|
}, |
|
{ |
|
"epoch": 0.6106594399277326, |
|
"grad_norm": 0.230333906040596, |
|
"learning_rate": 1.087982215796415e-05, |
|
"loss": 0.5479, |
|
"step": 338 |
|
}, |
|
{ |
|
"epoch": 0.6124661246612466, |
|
"grad_norm": 0.21808621426326946, |
|
"learning_rate": 1.079358916040996e-05, |
|
"loss": 0.5563, |
|
"step": 339 |
|
}, |
|
{ |
|
"epoch": 0.6142728093947606, |
|
"grad_norm": 0.205686948340861, |
|
"learning_rate": 1.0707506785938052e-05, |
|
"loss": 0.5511, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 0.6160794941282746, |
|
"grad_norm": 0.190927972476689, |
|
"learning_rate": 1.0621578116984501e-05, |
|
"loss": 0.5501, |
|
"step": 341 |
|
}, |
|
{ |
|
"epoch": 0.6178861788617886, |
|
"grad_norm": 0.20134157638968017, |
|
"learning_rate": 1.0535806230481525e-05, |
|
"loss": 0.5529, |
|
"step": 342 |
|
}, |
|
{ |
|
"epoch": 0.6196928635953026, |
|
"grad_norm": 0.2105416239709376, |
|
"learning_rate": 1.0450194197747252e-05, |
|
"loss": 0.5475, |
|
"step": 343 |
|
}, |
|
{ |
|
"epoch": 0.6214995483288166, |
|
"grad_norm": 0.20160538858713717, |
|
"learning_rate": 1.036474508437579e-05, |
|
"loss": 0.5369, |
|
"step": 344 |
|
}, |
|
{ |
|
"epoch": 0.6233062330623306, |
|
"grad_norm": 0.2031508476910825, |
|
"learning_rate": 1.0279461950127422e-05, |
|
"loss": 0.5511, |
|
"step": 345 |
|
}, |
|
{ |
|
"epoch": 0.6251129177958447, |
|
"grad_norm": 0.20473911575958653, |
|
"learning_rate": 1.0194347848819055e-05, |
|
"loss": 0.553, |
|
"step": 346 |
|
}, |
|
{ |
|
"epoch": 0.6269196025293586, |
|
"grad_norm": 0.19859484608101366, |
|
"learning_rate": 1.0109405828214872e-05, |
|
"loss": 0.5385, |
|
"step": 347 |
|
}, |
|
{ |
|
"epoch": 0.6287262872628726, |
|
"grad_norm": 0.20360585789098407, |
|
"learning_rate": 1.0024638929917192e-05, |
|
"loss": 0.5395, |
|
"step": 348 |
|
}, |
|
{ |
|
"epoch": 0.6305329719963866, |
|
"grad_norm": 0.19975767712280418, |
|
"learning_rate": 9.940050189257552e-06, |
|
"loss": 0.5429, |
|
"step": 349 |
|
}, |
|
{ |
|
"epoch": 0.6323396567299007, |
|
"grad_norm": 0.24700377233323906, |
|
"learning_rate": 9.85564263518803e-06, |
|
"loss": 0.5554, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 0.6341463414634146, |
|
"grad_norm": 0.20041714113991527, |
|
"learning_rate": 9.771419290172776e-06, |
|
"loss": 0.5444, |
|
"step": 351 |
|
}, |
|
{ |
|
"epoch": 0.6359530261969286, |
|
"grad_norm": 0.21083478243785528, |
|
"learning_rate": 9.68738317007978e-06, |
|
"loss": 0.5581, |
|
"step": 352 |
|
}, |
|
{ |
|
"epoch": 0.6377597109304426, |
|
"grad_norm": 0.211692060830837, |
|
"learning_rate": 9.6035372840729e-06, |
|
"loss": 0.5587, |
|
"step": 353 |
|
}, |
|
{ |
|
"epoch": 0.6395663956639567, |
|
"grad_norm": 0.20531075079582653, |
|
"learning_rate": 9.519884634504074e-06, |
|
"loss": 0.5534, |
|
"step": 354 |
|
}, |
|
{ |
|
"epoch": 0.6413730803974707, |
|
"grad_norm": 0.19690323142552518, |
|
"learning_rate": 9.436428216805862e-06, |
|
"loss": 0.5424, |
|
"step": 355 |
|
}, |
|
{ |
|
"epoch": 0.6431797651309846, |
|
"grad_norm": 0.18843247974966473, |
|
"learning_rate": 9.35317101938412e-06, |
|
"loss": 0.5563, |
|
"step": 356 |
|
}, |
|
{ |
|
"epoch": 0.6449864498644986, |
|
"grad_norm": 0.20504004491586353, |
|
"learning_rate": 9.270116023511063e-06, |
|
"loss": 0.5577, |
|
"step": 357 |
|
}, |
|
{ |
|
"epoch": 0.6467931345980127, |
|
"grad_norm": 0.21734304763898019, |
|
"learning_rate": 9.187266203218457e-06, |
|
"loss": 0.5634, |
|
"step": 358 |
|
}, |
|
{ |
|
"epoch": 0.6485998193315267, |
|
"grad_norm": 0.22817395445319444, |
|
"learning_rate": 9.104624525191147e-06, |
|
"loss": 0.5365, |
|
"step": 359 |
|
}, |
|
{ |
|
"epoch": 0.6504065040650406, |
|
"grad_norm": 0.18372627208786557, |
|
"learning_rate": 9.022193948660825e-06, |
|
"loss": 0.5452, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 0.6522131887985546, |
|
"grad_norm": 0.21615699946170763, |
|
"learning_rate": 8.939977425300072e-06, |
|
"loss": 0.5501, |
|
"step": 361 |
|
}, |
|
{ |
|
"epoch": 0.6540198735320687, |
|
"grad_norm": 0.20008611102684865, |
|
"learning_rate": 8.857977899116645e-06, |
|
"loss": 0.5616, |
|
"step": 362 |
|
}, |
|
{ |
|
"epoch": 0.6558265582655827, |
|
"grad_norm": 0.23174705952303656, |
|
"learning_rate": 8.776198306348072e-06, |
|
"loss": 0.5524, |
|
"step": 363 |
|
}, |
|
{ |
|
"epoch": 0.6576332429990966, |
|
"grad_norm": 0.20382907023733024, |
|
"learning_rate": 8.69464157535652e-06, |
|
"loss": 0.552, |
|
"step": 364 |
|
}, |
|
{ |
|
"epoch": 0.6594399277326106, |
|
"grad_norm": 0.19478150790759052, |
|
"learning_rate": 8.61331062652391e-06, |
|
"loss": 0.5434, |
|
"step": 365 |
|
}, |
|
{ |
|
"epoch": 0.6612466124661247, |
|
"grad_norm": 0.2078838874517212, |
|
"learning_rate": 8.532208372147376e-06, |
|
"loss": 0.5644, |
|
"step": 366 |
|
}, |
|
{ |
|
"epoch": 0.6630532971996387, |
|
"grad_norm": 0.19589046504154586, |
|
"learning_rate": 8.451337716334953e-06, |
|
"loss": 0.5463, |
|
"step": 367 |
|
}, |
|
{ |
|
"epoch": 0.6648599819331527, |
|
"grad_norm": 0.19502515145119065, |
|
"learning_rate": 8.370701554901603e-06, |
|
"loss": 0.5577, |
|
"step": 368 |
|
}, |
|
{ |
|
"epoch": 0.6666666666666666, |
|
"grad_norm": 0.20983006733253054, |
|
"learning_rate": 8.290302775265509e-06, |
|
"loss": 0.5584, |
|
"step": 369 |
|
}, |
|
{ |
|
"epoch": 0.6684733514001807, |
|
"grad_norm": 0.2437592642935105, |
|
"learning_rate": 8.210144256344714e-06, |
|
"loss": 0.5571, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 0.6702800361336947, |
|
"grad_norm": 0.21669009822868343, |
|
"learning_rate": 8.130228868453989e-06, |
|
"loss": 0.5451, |
|
"step": 371 |
|
}, |
|
{ |
|
"epoch": 0.6720867208672087, |
|
"grad_norm": 0.1979712500001685, |
|
"learning_rate": 8.050559473202078e-06, |
|
"loss": 0.5624, |
|
"step": 372 |
|
}, |
|
{ |
|
"epoch": 0.6738934056007226, |
|
"grad_norm": 0.198665648334873, |
|
"learning_rate": 7.971138923389221e-06, |
|
"loss": 0.5483, |
|
"step": 373 |
|
}, |
|
{ |
|
"epoch": 0.6757000903342367, |
|
"grad_norm": 0.20283079234556398, |
|
"learning_rate": 7.89197006290502e-06, |
|
"loss": 0.5385, |
|
"step": 374 |
|
}, |
|
{ |
|
"epoch": 0.6775067750677507, |
|
"grad_norm": 0.20311321436753843, |
|
"learning_rate": 7.813055726626579e-06, |
|
"loss": 0.5524, |
|
"step": 375 |
|
}, |
|
{ |
|
"epoch": 0.6793134598012647, |
|
"grad_norm": 0.20230781705475714, |
|
"learning_rate": 7.734398740316999e-06, |
|
"loss": 0.5371, |
|
"step": 376 |
|
}, |
|
{ |
|
"epoch": 0.6811201445347786, |
|
"grad_norm": 0.20433473196118349, |
|
"learning_rate": 7.65600192052422e-06, |
|
"loss": 0.5534, |
|
"step": 377 |
|
}, |
|
{ |
|
"epoch": 0.6829268292682927, |
|
"grad_norm": 0.2049802150842661, |
|
"learning_rate": 7.577868074480107e-06, |
|
"loss": 0.5539, |
|
"step": 378 |
|
}, |
|
{ |
|
"epoch": 0.6847335140018067, |
|
"grad_norm": 0.21549344697790077, |
|
"learning_rate": 7.500000000000004e-06, |
|
"loss": 0.557, |
|
"step": 379 |
|
}, |
|
{ |
|
"epoch": 0.6865401987353207, |
|
"grad_norm": 0.20686961542523605, |
|
"learning_rate": 7.422400485382484e-06, |
|
"loss": 0.538, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 0.6883468834688347, |
|
"grad_norm": 0.20513275663992378, |
|
"learning_rate": 7.345072309309557e-06, |
|
"loss": 0.5543, |
|
"step": 381 |
|
}, |
|
{ |
|
"epoch": 0.6901535682023487, |
|
"grad_norm": 0.2080906491574277, |
|
"learning_rate": 7.268018240747129e-06, |
|
"loss": 0.5417, |
|
"step": 382 |
|
}, |
|
{ |
|
"epoch": 0.6919602529358627, |
|
"grad_norm": 0.20260548256686983, |
|
"learning_rate": 7.191241038845876e-06, |
|
"loss": 0.5605, |
|
"step": 383 |
|
}, |
|
{ |
|
"epoch": 0.6937669376693767, |
|
"grad_norm": 0.19848636745559115, |
|
"learning_rate": 7.114743452842428e-06, |
|
"loss": 0.5582, |
|
"step": 384 |
|
}, |
|
{ |
|
"epoch": 0.6955736224028907, |
|
"grad_norm": 0.17844944293452994, |
|
"learning_rate": 7.038528221960959e-06, |
|
"loss": 0.5405, |
|
"step": 385 |
|
}, |
|
{ |
|
"epoch": 0.6973803071364046, |
|
"grad_norm": 0.19177131571831604, |
|
"learning_rate": 6.962598075315047e-06, |
|
"loss": 0.5523, |
|
"step": 386 |
|
}, |
|
{ |
|
"epoch": 0.6991869918699187, |
|
"grad_norm": 0.2285740302085458, |
|
"learning_rate": 6.886955731810009e-06, |
|
"loss": 0.5524, |
|
"step": 387 |
|
}, |
|
{ |
|
"epoch": 0.7009936766034327, |
|
"grad_norm": 0.20997983991994407, |
|
"learning_rate": 6.811603900045477e-06, |
|
"loss": 0.5512, |
|
"step": 388 |
|
}, |
|
{ |
|
"epoch": 0.7028003613369467, |
|
"grad_norm": 0.22602951979137562, |
|
"learning_rate": 6.736545278218464e-06, |
|
"loss": 0.5539, |
|
"step": 389 |
|
}, |
|
{ |
|
"epoch": 0.7046070460704607, |
|
"grad_norm": 0.18285607463364956, |
|
"learning_rate": 6.661782554026735e-06, |
|
"loss": 0.5487, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 0.7064137308039747, |
|
"grad_norm": 0.20354611498704106, |
|
"learning_rate": 6.587318404572537e-06, |
|
"loss": 0.548, |
|
"step": 391 |
|
}, |
|
{ |
|
"epoch": 0.7082204155374887, |
|
"grad_norm": 0.19311036930296172, |
|
"learning_rate": 6.513155496266783e-06, |
|
"loss": 0.5542, |
|
"step": 392 |
|
}, |
|
{ |
|
"epoch": 0.7100271002710027, |
|
"grad_norm": 0.19791496465028627, |
|
"learning_rate": 6.439296484733526e-06, |
|
"loss": 0.559, |
|
"step": 393 |
|
}, |
|
{ |
|
"epoch": 0.7118337850045167, |
|
"grad_norm": 0.20186541471546574, |
|
"learning_rate": 6.3657440147149e-06, |
|
"loss": 0.5615, |
|
"step": 394 |
|
}, |
|
{ |
|
"epoch": 0.7136404697380307, |
|
"grad_norm": 0.18247192759402112, |
|
"learning_rate": 6.292500719976397e-06, |
|
"loss": 0.5559, |
|
"step": 395 |
|
}, |
|
{ |
|
"epoch": 0.7154471544715447, |
|
"grad_norm": 0.19043505810301548, |
|
"learning_rate": 6.219569223212585e-06, |
|
"loss": 0.5502, |
|
"step": 396 |
|
}, |
|
{ |
|
"epoch": 0.7172538392050587, |
|
"grad_norm": 0.20028185627315484, |
|
"learning_rate": 6.146952135953158e-06, |
|
"loss": 0.5503, |
|
"step": 397 |
|
}, |
|
{ |
|
"epoch": 0.7190605239385727, |
|
"grad_norm": 0.1965621626117177, |
|
"learning_rate": 6.074652058469447e-06, |
|
"loss": 0.5514, |
|
"step": 398 |
|
}, |
|
{ |
|
"epoch": 0.7208672086720868, |
|
"grad_norm": 0.2128416316047354, |
|
"learning_rate": 6.0026715796812945e-06, |
|
"loss": 0.5539, |
|
"step": 399 |
|
}, |
|
{ |
|
"epoch": 0.7226738934056007, |
|
"grad_norm": 0.21681643715767268, |
|
"learning_rate": 5.931013277064377e-06, |
|
"loss": 0.5513, |
|
"step": 400 |
|
} |
|
], |
|
"logging_steps": 1, |
|
"max_steps": 553, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 1, |
|
"save_steps": 100, |
|
"stateful_callbacks": { |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": false |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 463977396436992.0, |
|
"train_batch_size": 2, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|