|
{ |
|
"best_global_step": null, |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 0.3656307129798903, |
|
"eval_steps": 500, |
|
"global_step": 200, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.0018281535648994515, |
|
"grad_norm": 2.089374507496642, |
|
"learning_rate": 0.0, |
|
"loss": 0.5957, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.003656307129798903, |
|
"grad_norm": 2.2205513668741044, |
|
"learning_rate": 1.0714285714285714e-06, |
|
"loss": 0.5994, |
|
"step": 2 |
|
}, |
|
{ |
|
"epoch": 0.005484460694698354, |
|
"grad_norm": 2.545945852200423, |
|
"learning_rate": 2.1428571428571427e-06, |
|
"loss": 0.5948, |
|
"step": 3 |
|
}, |
|
{ |
|
"epoch": 0.007312614259597806, |
|
"grad_norm": 2.2330035148383947, |
|
"learning_rate": 3.2142857142857143e-06, |
|
"loss": 0.6305, |
|
"step": 4 |
|
}, |
|
{ |
|
"epoch": 0.009140767824497258, |
|
"grad_norm": 1.8952582634630641, |
|
"learning_rate": 4.2857142857142855e-06, |
|
"loss": 0.5998, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.010968921389396709, |
|
"grad_norm": 1.570960207475644, |
|
"learning_rate": 5.357142857142857e-06, |
|
"loss": 0.5898, |
|
"step": 6 |
|
}, |
|
{ |
|
"epoch": 0.012797074954296161, |
|
"grad_norm": 1.2522662311648174, |
|
"learning_rate": 6.428571428571429e-06, |
|
"loss": 0.6013, |
|
"step": 7 |
|
}, |
|
{ |
|
"epoch": 0.014625228519195612, |
|
"grad_norm": 1.065080174164971, |
|
"learning_rate": 7.5e-06, |
|
"loss": 0.5682, |
|
"step": 8 |
|
}, |
|
{ |
|
"epoch": 0.016453382084095063, |
|
"grad_norm": 0.9448479764184926, |
|
"learning_rate": 8.571428571428571e-06, |
|
"loss": 0.5452, |
|
"step": 9 |
|
}, |
|
{ |
|
"epoch": 0.018281535648994516, |
|
"grad_norm": 1.7141233153619826, |
|
"learning_rate": 9.642857142857144e-06, |
|
"loss": 0.59, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.02010968921389397, |
|
"grad_norm": 2.017633207813829, |
|
"learning_rate": 1.0714285714285714e-05, |
|
"loss": 0.575, |
|
"step": 11 |
|
}, |
|
{ |
|
"epoch": 0.021937842778793418, |
|
"grad_norm": 1.8293000392166587, |
|
"learning_rate": 1.1785714285714286e-05, |
|
"loss": 0.5598, |
|
"step": 12 |
|
}, |
|
{ |
|
"epoch": 0.02376599634369287, |
|
"grad_norm": 1.4816869854216612, |
|
"learning_rate": 1.2857142857142857e-05, |
|
"loss": 0.5495, |
|
"step": 13 |
|
}, |
|
{ |
|
"epoch": 0.025594149908592323, |
|
"grad_norm": 1.5033597863132906, |
|
"learning_rate": 1.3928571428571429e-05, |
|
"loss": 0.5372, |
|
"step": 14 |
|
}, |
|
{ |
|
"epoch": 0.027422303473491772, |
|
"grad_norm": 1.3141514496532496, |
|
"learning_rate": 1.5e-05, |
|
"loss": 0.5733, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.029250457038391225, |
|
"grad_norm": 2.7794219916654233, |
|
"learning_rate": 1.6071428571428572e-05, |
|
"loss": 0.5347, |
|
"step": 16 |
|
}, |
|
{ |
|
"epoch": 0.031078610603290677, |
|
"grad_norm": 0.9928713947921902, |
|
"learning_rate": 1.7142857142857142e-05, |
|
"loss": 0.4908, |
|
"step": 17 |
|
}, |
|
{ |
|
"epoch": 0.03290676416819013, |
|
"grad_norm": 0.8852252619423812, |
|
"learning_rate": 1.8214285714285712e-05, |
|
"loss": 0.5307, |
|
"step": 18 |
|
}, |
|
{ |
|
"epoch": 0.03473491773308958, |
|
"grad_norm": 0.7832318403464806, |
|
"learning_rate": 1.928571428571429e-05, |
|
"loss": 0.5025, |
|
"step": 19 |
|
}, |
|
{ |
|
"epoch": 0.03656307129798903, |
|
"grad_norm": 0.9796265282565124, |
|
"learning_rate": 2.0357142857142858e-05, |
|
"loss": 0.5095, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.038391224862888484, |
|
"grad_norm": 0.7218139820603727, |
|
"learning_rate": 2.1428571428571428e-05, |
|
"loss": 0.4834, |
|
"step": 21 |
|
}, |
|
{ |
|
"epoch": 0.04021937842778794, |
|
"grad_norm": 0.6506357909260763, |
|
"learning_rate": 2.25e-05, |
|
"loss": 0.4947, |
|
"step": 22 |
|
}, |
|
{ |
|
"epoch": 0.04204753199268738, |
|
"grad_norm": 0.5616806424289195, |
|
"learning_rate": 2.357142857142857e-05, |
|
"loss": 0.4866, |
|
"step": 23 |
|
}, |
|
{ |
|
"epoch": 0.043875685557586835, |
|
"grad_norm": 0.566744323961197, |
|
"learning_rate": 2.464285714285714e-05, |
|
"loss": 0.515, |
|
"step": 24 |
|
}, |
|
{ |
|
"epoch": 0.04570383912248629, |
|
"grad_norm": 0.6913117297678986, |
|
"learning_rate": 2.5714285714285714e-05, |
|
"loss": 0.5227, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.04753199268738574, |
|
"grad_norm": 0.5881117480751623, |
|
"learning_rate": 2.6785714285714288e-05, |
|
"loss": 0.478, |
|
"step": 26 |
|
}, |
|
{ |
|
"epoch": 0.04936014625228519, |
|
"grad_norm": 0.5399101198373484, |
|
"learning_rate": 2.7857142857142858e-05, |
|
"loss": 0.5039, |
|
"step": 27 |
|
}, |
|
{ |
|
"epoch": 0.051188299817184646, |
|
"grad_norm": 0.5359845255880198, |
|
"learning_rate": 2.892857142857143e-05, |
|
"loss": 0.5015, |
|
"step": 28 |
|
}, |
|
{ |
|
"epoch": 0.05301645338208409, |
|
"grad_norm": 0.4718417677524385, |
|
"learning_rate": 3e-05, |
|
"loss": 0.4738, |
|
"step": 29 |
|
}, |
|
{ |
|
"epoch": 0.054844606946983544, |
|
"grad_norm": 0.47359316021290165, |
|
"learning_rate": 2.999972519478696e-05, |
|
"loss": 0.4993, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.056672760511883, |
|
"grad_norm": 0.4922109148988566, |
|
"learning_rate": 2.99989007892169e-05, |
|
"loss": 0.4888, |
|
"step": 31 |
|
}, |
|
{ |
|
"epoch": 0.05850091407678245, |
|
"grad_norm": 0.3579397510509835, |
|
"learning_rate": 2.9997526813496602e-05, |
|
"loss": 0.4792, |
|
"step": 32 |
|
}, |
|
{ |
|
"epoch": 0.0603290676416819, |
|
"grad_norm": 0.37626393648031903, |
|
"learning_rate": 2.9995603317969497e-05, |
|
"loss": 0.4873, |
|
"step": 33 |
|
}, |
|
{ |
|
"epoch": 0.062157221206581355, |
|
"grad_norm": 0.41211122381414894, |
|
"learning_rate": 2.99931303731138e-05, |
|
"loss": 0.4939, |
|
"step": 34 |
|
}, |
|
{ |
|
"epoch": 0.06398537477148081, |
|
"grad_norm": 0.3770284364229165, |
|
"learning_rate": 2.9990108069539932e-05, |
|
"loss": 0.479, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.06581352833638025, |
|
"grad_norm": 0.3709162789872314, |
|
"learning_rate": 2.9986536517987187e-05, |
|
"loss": 0.4864, |
|
"step": 36 |
|
}, |
|
{ |
|
"epoch": 0.06764168190127971, |
|
"grad_norm": 0.36989388190185846, |
|
"learning_rate": 2.998241584931971e-05, |
|
"loss": 0.4727, |
|
"step": 37 |
|
}, |
|
{ |
|
"epoch": 0.06946983546617916, |
|
"grad_norm": 0.36339103914080506, |
|
"learning_rate": 2.9977746214521646e-05, |
|
"loss": 0.472, |
|
"step": 38 |
|
}, |
|
{ |
|
"epoch": 0.0712979890310786, |
|
"grad_norm": 0.3378220732996809, |
|
"learning_rate": 2.997252778469168e-05, |
|
"loss": 0.4581, |
|
"step": 39 |
|
}, |
|
{ |
|
"epoch": 0.07312614259597806, |
|
"grad_norm": 0.33242480046792755, |
|
"learning_rate": 2.9966760751036697e-05, |
|
"loss": 0.4567, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.07495429616087751, |
|
"grad_norm": 0.3517734032444255, |
|
"learning_rate": 2.9960445324864815e-05, |
|
"loss": 0.4674, |
|
"step": 41 |
|
}, |
|
{ |
|
"epoch": 0.07678244972577697, |
|
"grad_norm": 0.3558065174543996, |
|
"learning_rate": 2.995358173757765e-05, |
|
"loss": 0.4718, |
|
"step": 42 |
|
}, |
|
{ |
|
"epoch": 0.07861060329067641, |
|
"grad_norm": 0.34290523065110684, |
|
"learning_rate": 2.994617024066181e-05, |
|
"loss": 0.4659, |
|
"step": 43 |
|
}, |
|
{ |
|
"epoch": 0.08043875685557587, |
|
"grad_norm": 0.3028572716833668, |
|
"learning_rate": 2.9938211105679677e-05, |
|
"loss": 0.4543, |
|
"step": 44 |
|
}, |
|
{ |
|
"epoch": 0.08226691042047532, |
|
"grad_norm": 0.36216688731700014, |
|
"learning_rate": 2.9929704624259508e-05, |
|
"loss": 0.4724, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.08409506398537477, |
|
"grad_norm": 0.34546125994172217, |
|
"learning_rate": 2.992065110808469e-05, |
|
"loss": 0.4717, |
|
"step": 46 |
|
}, |
|
{ |
|
"epoch": 0.08592321755027423, |
|
"grad_norm": 0.3173199655699041, |
|
"learning_rate": 2.991105088888234e-05, |
|
"loss": 0.4893, |
|
"step": 47 |
|
}, |
|
{ |
|
"epoch": 0.08775137111517367, |
|
"grad_norm": 0.27327168658587364, |
|
"learning_rate": 2.990090431841117e-05, |
|
"loss": 0.4877, |
|
"step": 48 |
|
}, |
|
{ |
|
"epoch": 0.08957952468007313, |
|
"grad_norm": 0.3207165383004196, |
|
"learning_rate": 2.9890211768448572e-05, |
|
"loss": 0.4666, |
|
"step": 49 |
|
}, |
|
{ |
|
"epoch": 0.09140767824497258, |
|
"grad_norm": 0.29795651824929764, |
|
"learning_rate": 2.9878973630777012e-05, |
|
"loss": 0.4764, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.09323583180987204, |
|
"grad_norm": 0.3066207186556079, |
|
"learning_rate": 2.9867190317169665e-05, |
|
"loss": 0.4793, |
|
"step": 51 |
|
}, |
|
{ |
|
"epoch": 0.09506398537477148, |
|
"grad_norm": 0.30152926589089146, |
|
"learning_rate": 2.9854862259375326e-05, |
|
"loss": 0.48, |
|
"step": 52 |
|
}, |
|
{ |
|
"epoch": 0.09689213893967093, |
|
"grad_norm": 0.31982845164198154, |
|
"learning_rate": 2.9841989909102607e-05, |
|
"loss": 0.4827, |
|
"step": 53 |
|
}, |
|
{ |
|
"epoch": 0.09872029250457039, |
|
"grad_norm": 0.3226249549846725, |
|
"learning_rate": 2.982857373800337e-05, |
|
"loss": 0.4815, |
|
"step": 54 |
|
}, |
|
{ |
|
"epoch": 0.10054844606946983, |
|
"grad_norm": 0.29313323454641665, |
|
"learning_rate": 2.9814614237655445e-05, |
|
"loss": 0.4624, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.10237659963436929, |
|
"grad_norm": 0.30450893112709426, |
|
"learning_rate": 2.9800111919544632e-05, |
|
"loss": 0.4856, |
|
"step": 56 |
|
}, |
|
{ |
|
"epoch": 0.10420475319926874, |
|
"grad_norm": 0.31077892246708605, |
|
"learning_rate": 2.9785067315045943e-05, |
|
"loss": 0.4801, |
|
"step": 57 |
|
}, |
|
{ |
|
"epoch": 0.10603290676416818, |
|
"grad_norm": 0.30352760576382176, |
|
"learning_rate": 2.9769480975404143e-05, |
|
"loss": 0.4462, |
|
"step": 58 |
|
}, |
|
{ |
|
"epoch": 0.10786106032906764, |
|
"grad_norm": 0.2820427097517702, |
|
"learning_rate": 2.975335347171356e-05, |
|
"loss": 0.4275, |
|
"step": 59 |
|
}, |
|
{ |
|
"epoch": 0.10968921389396709, |
|
"grad_norm": 0.3193688120609219, |
|
"learning_rate": 2.9736685394897123e-05, |
|
"loss": 0.4585, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.11151736745886655, |
|
"grad_norm": 0.29309453499656224, |
|
"learning_rate": 2.9719477355684767e-05, |
|
"loss": 0.4563, |
|
"step": 61 |
|
}, |
|
{ |
|
"epoch": 0.113345521023766, |
|
"grad_norm": 0.291897548964064, |
|
"learning_rate": 2.9701729984591003e-05, |
|
"loss": 0.4402, |
|
"step": 62 |
|
}, |
|
{ |
|
"epoch": 0.11517367458866545, |
|
"grad_norm": 0.3386425303422288, |
|
"learning_rate": 2.9683443931891837e-05, |
|
"loss": 0.4586, |
|
"step": 63 |
|
}, |
|
{ |
|
"epoch": 0.1170018281535649, |
|
"grad_norm": 0.28801030329886385, |
|
"learning_rate": 2.966461986760096e-05, |
|
"loss": 0.4452, |
|
"step": 64 |
|
}, |
|
{ |
|
"epoch": 0.11882998171846434, |
|
"grad_norm": 0.27225571075726074, |
|
"learning_rate": 2.964525848144517e-05, |
|
"loss": 0.4219, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.1206581352833638, |
|
"grad_norm": 0.3150014322578861, |
|
"learning_rate": 2.9625360482839114e-05, |
|
"loss": 0.4833, |
|
"step": 66 |
|
}, |
|
{ |
|
"epoch": 0.12248628884826325, |
|
"grad_norm": 0.28747617110884083, |
|
"learning_rate": 2.9604926600859287e-05, |
|
"loss": 0.4673, |
|
"step": 67 |
|
}, |
|
{ |
|
"epoch": 0.12431444241316271, |
|
"grad_norm": 0.2935455145106659, |
|
"learning_rate": 2.958395758421733e-05, |
|
"loss": 0.4492, |
|
"step": 68 |
|
}, |
|
{ |
|
"epoch": 0.12614259597806216, |
|
"grad_norm": 0.2748773662739781, |
|
"learning_rate": 2.9562454201232583e-05, |
|
"loss": 0.4763, |
|
"step": 69 |
|
}, |
|
{ |
|
"epoch": 0.12797074954296161, |
|
"grad_norm": 0.3040072222347735, |
|
"learning_rate": 2.9540417239803954e-05, |
|
"loss": 0.4658, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.12979890310786105, |
|
"grad_norm": 0.3401190893101851, |
|
"learning_rate": 2.9517847507381022e-05, |
|
"loss": 0.4619, |
|
"step": 71 |
|
}, |
|
{ |
|
"epoch": 0.1316270566727605, |
|
"grad_norm": 0.2832439537823879, |
|
"learning_rate": 2.9494745830934473e-05, |
|
"loss": 0.4594, |
|
"step": 72 |
|
}, |
|
{ |
|
"epoch": 0.13345521023765997, |
|
"grad_norm": 0.4216765622686639, |
|
"learning_rate": 2.9471113056925786e-05, |
|
"loss": 0.4939, |
|
"step": 73 |
|
}, |
|
{ |
|
"epoch": 0.13528336380255943, |
|
"grad_norm": 0.2884581020061622, |
|
"learning_rate": 2.944695005127623e-05, |
|
"loss": 0.458, |
|
"step": 74 |
|
}, |
|
{ |
|
"epoch": 0.13711151736745886, |
|
"grad_norm": 0.28039598694507106, |
|
"learning_rate": 2.942225769933512e-05, |
|
"loss": 0.4361, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.13893967093235832, |
|
"grad_norm": 0.27724633932247766, |
|
"learning_rate": 2.939703690584741e-05, |
|
"loss": 0.4557, |
|
"step": 76 |
|
}, |
|
{ |
|
"epoch": 0.14076782449725778, |
|
"grad_norm": 0.2694165757512015, |
|
"learning_rate": 2.9371288594920484e-05, |
|
"loss": 0.4332, |
|
"step": 77 |
|
}, |
|
{ |
|
"epoch": 0.1425959780621572, |
|
"grad_norm": 0.25780899472474833, |
|
"learning_rate": 2.934501370999037e-05, |
|
"loss": 0.4242, |
|
"step": 78 |
|
}, |
|
{ |
|
"epoch": 0.14442413162705667, |
|
"grad_norm": 0.2967403245642137, |
|
"learning_rate": 2.93182132137871e-05, |
|
"loss": 0.446, |
|
"step": 79 |
|
}, |
|
{ |
|
"epoch": 0.14625228519195613, |
|
"grad_norm": 0.34690556417034824, |
|
"learning_rate": 2.9290888088299486e-05, |
|
"loss": 0.4818, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.1480804387568556, |
|
"grad_norm": 0.3003683124367745, |
|
"learning_rate": 2.9263039334739127e-05, |
|
"loss": 0.462, |
|
"step": 81 |
|
}, |
|
{ |
|
"epoch": 0.14990859232175502, |
|
"grad_norm": 0.302108544256545, |
|
"learning_rate": 2.92346679735037e-05, |
|
"loss": 0.4561, |
|
"step": 82 |
|
}, |
|
{ |
|
"epoch": 0.15173674588665448, |
|
"grad_norm": 0.2841043066219529, |
|
"learning_rate": 2.9205775044139608e-05, |
|
"loss": 0.4684, |
|
"step": 83 |
|
}, |
|
{ |
|
"epoch": 0.15356489945155394, |
|
"grad_norm": 0.2952668737797339, |
|
"learning_rate": 2.9176361605303867e-05, |
|
"loss": 0.4693, |
|
"step": 84 |
|
}, |
|
{ |
|
"epoch": 0.15539305301645337, |
|
"grad_norm": 0.28847383193289927, |
|
"learning_rate": 2.914642873472531e-05, |
|
"loss": 0.452, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.15722120658135283, |
|
"grad_norm": 0.31050885319522226, |
|
"learning_rate": 2.9115977529165132e-05, |
|
"loss": 0.4621, |
|
"step": 86 |
|
}, |
|
{ |
|
"epoch": 0.1590493601462523, |
|
"grad_norm": 0.3143842313652269, |
|
"learning_rate": 2.9085009104376663e-05, |
|
"loss": 0.4563, |
|
"step": 87 |
|
}, |
|
{ |
|
"epoch": 0.16087751371115175, |
|
"grad_norm": 0.33281281256892775, |
|
"learning_rate": 2.905352459506452e-05, |
|
"loss": 0.437, |
|
"step": 88 |
|
}, |
|
{ |
|
"epoch": 0.16270566727605118, |
|
"grad_norm": 0.30726176715777354, |
|
"learning_rate": 2.9021525154842998e-05, |
|
"loss": 0.4614, |
|
"step": 89 |
|
}, |
|
{ |
|
"epoch": 0.16453382084095064, |
|
"grad_norm": 0.31519069611911227, |
|
"learning_rate": 2.8989011956193834e-05, |
|
"loss": 0.4584, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.1663619744058501, |
|
"grad_norm": 0.3142796251849267, |
|
"learning_rate": 2.8955986190423225e-05, |
|
"loss": 0.4848, |
|
"step": 91 |
|
}, |
|
{ |
|
"epoch": 0.16819012797074953, |
|
"grad_norm": 0.2831585707772231, |
|
"learning_rate": 2.892244906761819e-05, |
|
"loss": 0.4463, |
|
"step": 92 |
|
}, |
|
{ |
|
"epoch": 0.170018281535649, |
|
"grad_norm": 0.3271624604307319, |
|
"learning_rate": 2.8888401816602207e-05, |
|
"loss": 0.4496, |
|
"step": 93 |
|
}, |
|
{ |
|
"epoch": 0.17184643510054845, |
|
"grad_norm": 0.3265076622565222, |
|
"learning_rate": 2.885384568489023e-05, |
|
"loss": 0.434, |
|
"step": 94 |
|
}, |
|
{ |
|
"epoch": 0.1736745886654479, |
|
"grad_norm": 0.3384143662294581, |
|
"learning_rate": 2.881878193864294e-05, |
|
"loss": 0.4662, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.17550274223034734, |
|
"grad_norm": 0.29519544921435537, |
|
"learning_rate": 2.878321186262037e-05, |
|
"loss": 0.4415, |
|
"step": 96 |
|
}, |
|
{ |
|
"epoch": 0.1773308957952468, |
|
"grad_norm": 0.3152573413188835, |
|
"learning_rate": 2.8747136760134827e-05, |
|
"loss": 0.4525, |
|
"step": 97 |
|
}, |
|
{ |
|
"epoch": 0.17915904936014626, |
|
"grad_norm": 0.2911315525075031, |
|
"learning_rate": 2.871055795300315e-05, |
|
"loss": 0.4393, |
|
"step": 98 |
|
}, |
|
{ |
|
"epoch": 0.1809872029250457, |
|
"grad_norm": 0.28890139736154113, |
|
"learning_rate": 2.8673476781498242e-05, |
|
"loss": 0.4301, |
|
"step": 99 |
|
}, |
|
{ |
|
"epoch": 0.18281535648994515, |
|
"grad_norm": 0.29566233008031895, |
|
"learning_rate": 2.8635894604300018e-05, |
|
"loss": 0.4477, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.1846435100548446, |
|
"grad_norm": 0.2759848384654238, |
|
"learning_rate": 2.859781279844556e-05, |
|
"loss": 0.4477, |
|
"step": 101 |
|
}, |
|
{ |
|
"epoch": 0.18647166361974407, |
|
"grad_norm": 0.3231166719493857, |
|
"learning_rate": 2.855923275927871e-05, |
|
"loss": 0.4732, |
|
"step": 102 |
|
}, |
|
{ |
|
"epoch": 0.1882998171846435, |
|
"grad_norm": 0.27671665616727253, |
|
"learning_rate": 2.8520155900398922e-05, |
|
"loss": 0.4348, |
|
"step": 103 |
|
}, |
|
{ |
|
"epoch": 0.19012797074954296, |
|
"grad_norm": 0.3373290606091851, |
|
"learning_rate": 2.8480583653609457e-05, |
|
"loss": 0.4772, |
|
"step": 104 |
|
}, |
|
{ |
|
"epoch": 0.19195612431444242, |
|
"grad_norm": 0.270804966443481, |
|
"learning_rate": 2.844051746886495e-05, |
|
"loss": 0.4522, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 0.19378427787934185, |
|
"grad_norm": 0.28232177001780473, |
|
"learning_rate": 2.8399958814218258e-05, |
|
"loss": 0.465, |
|
"step": 106 |
|
}, |
|
{ |
|
"epoch": 0.1956124314442413, |
|
"grad_norm": 0.32735140894867865, |
|
"learning_rate": 2.8358909175766674e-05, |
|
"loss": 0.4665, |
|
"step": 107 |
|
}, |
|
{ |
|
"epoch": 0.19744058500914077, |
|
"grad_norm": 0.3535841008181332, |
|
"learning_rate": 2.831737005759749e-05, |
|
"loss": 0.4817, |
|
"step": 108 |
|
}, |
|
{ |
|
"epoch": 0.19926873857404023, |
|
"grad_norm": 0.31153302714419157, |
|
"learning_rate": 2.8275342981732868e-05, |
|
"loss": 0.4774, |
|
"step": 109 |
|
}, |
|
{ |
|
"epoch": 0.20109689213893966, |
|
"grad_norm": 0.3414165680532852, |
|
"learning_rate": 2.8232829488074077e-05, |
|
"loss": 0.4354, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.20292504570383912, |
|
"grad_norm": 0.2657136503074252, |
|
"learning_rate": 2.8189831134345074e-05, |
|
"loss": 0.4403, |
|
"step": 111 |
|
}, |
|
{ |
|
"epoch": 0.20475319926873858, |
|
"grad_norm": 0.2963856605986798, |
|
"learning_rate": 2.8146349496035426e-05, |
|
"loss": 0.4262, |
|
"step": 112 |
|
}, |
|
{ |
|
"epoch": 0.20658135283363802, |
|
"grad_norm": 0.3119283296440065, |
|
"learning_rate": 2.8102386166342582e-05, |
|
"loss": 0.4418, |
|
"step": 113 |
|
}, |
|
{ |
|
"epoch": 0.20840950639853748, |
|
"grad_norm": 0.37109957781553604, |
|
"learning_rate": 2.8057942756113504e-05, |
|
"loss": 0.4765, |
|
"step": 114 |
|
}, |
|
{ |
|
"epoch": 0.21023765996343693, |
|
"grad_norm": 0.2999662023442714, |
|
"learning_rate": 2.8013020893785635e-05, |
|
"loss": 0.4539, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 0.21206581352833637, |
|
"grad_norm": 0.28970493967104405, |
|
"learning_rate": 2.796762222532723e-05, |
|
"loss": 0.4289, |
|
"step": 116 |
|
}, |
|
{ |
|
"epoch": 0.21389396709323583, |
|
"grad_norm": 0.2939203180452282, |
|
"learning_rate": 2.7921748414177063e-05, |
|
"loss": 0.4689, |
|
"step": 117 |
|
}, |
|
{ |
|
"epoch": 0.21572212065813529, |
|
"grad_norm": 0.3201929927655123, |
|
"learning_rate": 2.787540114118345e-05, |
|
"loss": 0.4338, |
|
"step": 118 |
|
}, |
|
{ |
|
"epoch": 0.21755027422303475, |
|
"grad_norm": 0.2864175962342283, |
|
"learning_rate": 2.7828582104542694e-05, |
|
"loss": 0.4464, |
|
"step": 119 |
|
}, |
|
{ |
|
"epoch": 0.21937842778793418, |
|
"grad_norm": 0.28592410203121005, |
|
"learning_rate": 2.7781293019736845e-05, |
|
"loss": 0.4397, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.22120658135283364, |
|
"grad_norm": 0.3304180569852307, |
|
"learning_rate": 2.7733535619470835e-05, |
|
"loss": 0.4481, |
|
"step": 121 |
|
}, |
|
{ |
|
"epoch": 0.2230347349177331, |
|
"grad_norm": 0.3390393526626585, |
|
"learning_rate": 2.7685311653609004e-05, |
|
"loss": 0.4432, |
|
"step": 122 |
|
}, |
|
{ |
|
"epoch": 0.22486288848263253, |
|
"grad_norm": 0.2940159306891994, |
|
"learning_rate": 2.7636622889110975e-05, |
|
"loss": 0.475, |
|
"step": 123 |
|
}, |
|
{ |
|
"epoch": 0.226691042047532, |
|
"grad_norm": 0.31229638977483115, |
|
"learning_rate": 2.758747110996693e-05, |
|
"loss": 0.4643, |
|
"step": 124 |
|
}, |
|
{ |
|
"epoch": 0.22851919561243145, |
|
"grad_norm": 0.285967777532962, |
|
"learning_rate": 2.7537858117132217e-05, |
|
"loss": 0.4505, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 0.2303473491773309, |
|
"grad_norm": 0.3096207093074659, |
|
"learning_rate": 2.7487785728461383e-05, |
|
"loss": 0.4606, |
|
"step": 126 |
|
}, |
|
{ |
|
"epoch": 0.23217550274223034, |
|
"grad_norm": 0.3196007759195916, |
|
"learning_rate": 2.7437255778641548e-05, |
|
"loss": 0.4688, |
|
"step": 127 |
|
}, |
|
{ |
|
"epoch": 0.2340036563071298, |
|
"grad_norm": 0.278093004843679, |
|
"learning_rate": 2.7386270119125193e-05, |
|
"loss": 0.4476, |
|
"step": 128 |
|
}, |
|
{ |
|
"epoch": 0.23583180987202926, |
|
"grad_norm": 0.28238850493053447, |
|
"learning_rate": 2.7334830618062327e-05, |
|
"loss": 0.4328, |
|
"step": 129 |
|
}, |
|
{ |
|
"epoch": 0.2376599634369287, |
|
"grad_norm": 0.31893781223089274, |
|
"learning_rate": 2.728293916023202e-05, |
|
"loss": 0.4368, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.23948811700182815, |
|
"grad_norm": 0.3226432084535994, |
|
"learning_rate": 2.7230597646973355e-05, |
|
"loss": 0.4693, |
|
"step": 131 |
|
}, |
|
{ |
|
"epoch": 0.2413162705667276, |
|
"grad_norm": 0.3098641582546802, |
|
"learning_rate": 2.717780799611576e-05, |
|
"loss": 0.4677, |
|
"step": 132 |
|
}, |
|
{ |
|
"epoch": 0.24314442413162707, |
|
"grad_norm": 0.316005877064317, |
|
"learning_rate": 2.7124572141908737e-05, |
|
"loss": 0.4641, |
|
"step": 133 |
|
}, |
|
{ |
|
"epoch": 0.2449725776965265, |
|
"grad_norm": 0.29116768124986503, |
|
"learning_rate": 2.707089203495098e-05, |
|
"loss": 0.4504, |
|
"step": 134 |
|
}, |
|
{ |
|
"epoch": 0.24680073126142596, |
|
"grad_norm": 0.3312565101776482, |
|
"learning_rate": 2.701676964211893e-05, |
|
"loss": 0.4869, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 0.24862888482632542, |
|
"grad_norm": 0.3307114085092842, |
|
"learning_rate": 2.696220694649467e-05, |
|
"loss": 0.4307, |
|
"step": 136 |
|
}, |
|
{ |
|
"epoch": 0.25045703839122485, |
|
"grad_norm": 0.3139451066311321, |
|
"learning_rate": 2.69072059472933e-05, |
|
"loss": 0.4593, |
|
"step": 137 |
|
}, |
|
{ |
|
"epoch": 0.2522851919561243, |
|
"grad_norm": 0.3298447274769771, |
|
"learning_rate": 2.685176865978965e-05, |
|
"loss": 0.4544, |
|
"step": 138 |
|
}, |
|
{ |
|
"epoch": 0.25411334552102377, |
|
"grad_norm": 0.3032493627313141, |
|
"learning_rate": 2.6795897115244478e-05, |
|
"loss": 0.4279, |
|
"step": 139 |
|
}, |
|
{ |
|
"epoch": 0.25594149908592323, |
|
"grad_norm": 0.2658733658787214, |
|
"learning_rate": 2.6739593360830006e-05, |
|
"loss": 0.4106, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.2577696526508227, |
|
"grad_norm": 0.3190884630430669, |
|
"learning_rate": 2.668285945955493e-05, |
|
"loss": 0.4662, |
|
"step": 141 |
|
}, |
|
{ |
|
"epoch": 0.2595978062157221, |
|
"grad_norm": 0.34411371393720197, |
|
"learning_rate": 2.6625697490188832e-05, |
|
"loss": 0.4499, |
|
"step": 142 |
|
}, |
|
{ |
|
"epoch": 0.26142595978062155, |
|
"grad_norm": 0.32823365391601855, |
|
"learning_rate": 2.6568109547185996e-05, |
|
"loss": 0.4783, |
|
"step": 143 |
|
}, |
|
{ |
|
"epoch": 0.263254113345521, |
|
"grad_norm": 0.30268317080382134, |
|
"learning_rate": 2.651009774060868e-05, |
|
"loss": 0.4677, |
|
"step": 144 |
|
}, |
|
{ |
|
"epoch": 0.26508226691042047, |
|
"grad_norm": 0.290999041829207, |
|
"learning_rate": 2.6451664196049802e-05, |
|
"loss": 0.4461, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 0.26691042047531993, |
|
"grad_norm": 0.3169422559498593, |
|
"learning_rate": 2.639281105455505e-05, |
|
"loss": 0.4503, |
|
"step": 146 |
|
}, |
|
{ |
|
"epoch": 0.2687385740402194, |
|
"grad_norm": 0.3189364221479283, |
|
"learning_rate": 2.6333540472544442e-05, |
|
"loss": 0.4335, |
|
"step": 147 |
|
}, |
|
{ |
|
"epoch": 0.27056672760511885, |
|
"grad_norm": 0.30758294437768796, |
|
"learning_rate": 2.6273854621733286e-05, |
|
"loss": 0.4504, |
|
"step": 148 |
|
}, |
|
{ |
|
"epoch": 0.27239488117001825, |
|
"grad_norm": 0.2849217195325153, |
|
"learning_rate": 2.621375568905266e-05, |
|
"loss": 0.4487, |
|
"step": 149 |
|
}, |
|
{ |
|
"epoch": 0.2742230347349177, |
|
"grad_norm": 0.344396845889534, |
|
"learning_rate": 2.615324587656921e-05, |
|
"loss": 0.4821, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.2760511882998172, |
|
"grad_norm": 0.2696584787135129, |
|
"learning_rate": 2.6092327401404538e-05, |
|
"loss": 0.475, |
|
"step": 151 |
|
}, |
|
{ |
|
"epoch": 0.27787934186471663, |
|
"grad_norm": 0.30241018017889437, |
|
"learning_rate": 2.6031002495653913e-05, |
|
"loss": 0.4342, |
|
"step": 152 |
|
}, |
|
{ |
|
"epoch": 0.2797074954296161, |
|
"grad_norm": 0.28693150625713587, |
|
"learning_rate": 2.596927340630451e-05, |
|
"loss": 0.4359, |
|
"step": 153 |
|
}, |
|
{ |
|
"epoch": 0.28153564899451555, |
|
"grad_norm": 0.29754809227022666, |
|
"learning_rate": 2.590714239515306e-05, |
|
"loss": 0.4438, |
|
"step": 154 |
|
}, |
|
{ |
|
"epoch": 0.283363802559415, |
|
"grad_norm": 0.299567522325594, |
|
"learning_rate": 2.584461173872301e-05, |
|
"loss": 0.4532, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 0.2851919561243144, |
|
"grad_norm": 0.2813266176963191, |
|
"learning_rate": 2.5781683728181066e-05, |
|
"loss": 0.4405, |
|
"step": 156 |
|
}, |
|
{ |
|
"epoch": 0.2870201096892139, |
|
"grad_norm": 0.3171717930616808, |
|
"learning_rate": 2.5718360669253276e-05, |
|
"loss": 0.454, |
|
"step": 157 |
|
}, |
|
{ |
|
"epoch": 0.28884826325411334, |
|
"grad_norm": 0.30159353684035844, |
|
"learning_rate": 2.565464488214053e-05, |
|
"loss": 0.4375, |
|
"step": 158 |
|
}, |
|
{ |
|
"epoch": 0.2906764168190128, |
|
"grad_norm": 0.2692705044539712, |
|
"learning_rate": 2.559053870143356e-05, |
|
"loss": 0.4276, |
|
"step": 159 |
|
}, |
|
{ |
|
"epoch": 0.29250457038391225, |
|
"grad_norm": 0.30691595589677334, |
|
"learning_rate": 2.5526044476027383e-05, |
|
"loss": 0.4606, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.2943327239488117, |
|
"grad_norm": 0.31630931258544637, |
|
"learning_rate": 2.5461164569035244e-05, |
|
"loss": 0.4406, |
|
"step": 161 |
|
}, |
|
{ |
|
"epoch": 0.2961608775137112, |
|
"grad_norm": 0.2440944204924941, |
|
"learning_rate": 2.5395901357702032e-05, |
|
"loss": 0.4511, |
|
"step": 162 |
|
}, |
|
{ |
|
"epoch": 0.2979890310786106, |
|
"grad_norm": 0.31355225894665767, |
|
"learning_rate": 2.533025723331718e-05, |
|
"loss": 0.4384, |
|
"step": 163 |
|
}, |
|
{ |
|
"epoch": 0.29981718464351004, |
|
"grad_norm": 0.2828232991554723, |
|
"learning_rate": 2.526423460112703e-05, |
|
"loss": 0.4318, |
|
"step": 164 |
|
}, |
|
{ |
|
"epoch": 0.3016453382084095, |
|
"grad_norm": 0.2872196769388632, |
|
"learning_rate": 2.5197835880246702e-05, |
|
"loss": 0.4508, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 0.30347349177330896, |
|
"grad_norm": 0.27755470022103107, |
|
"learning_rate": 2.51310635035715e-05, |
|
"loss": 0.4443, |
|
"step": 166 |
|
}, |
|
{ |
|
"epoch": 0.3053016453382084, |
|
"grad_norm": 0.29288725811791394, |
|
"learning_rate": 2.506391991768771e-05, |
|
"loss": 0.4437, |
|
"step": 167 |
|
}, |
|
{ |
|
"epoch": 0.3071297989031079, |
|
"grad_norm": 0.28756847297478805, |
|
"learning_rate": 2.4996407582782987e-05, |
|
"loss": 0.4306, |
|
"step": 168 |
|
}, |
|
{ |
|
"epoch": 0.30895795246800734, |
|
"grad_norm": 0.3062510638432968, |
|
"learning_rate": 2.4928528972556207e-05, |
|
"loss": 0.4401, |
|
"step": 169 |
|
}, |
|
{ |
|
"epoch": 0.31078610603290674, |
|
"grad_norm": 0.28844799158530887, |
|
"learning_rate": 2.486028657412683e-05, |
|
"loss": 0.465, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.3126142595978062, |
|
"grad_norm": 0.30254982711927847, |
|
"learning_rate": 2.479168288794377e-05, |
|
"loss": 0.4257, |
|
"step": 171 |
|
}, |
|
{ |
|
"epoch": 0.31444241316270566, |
|
"grad_norm": 0.29617551569459377, |
|
"learning_rate": 2.472272042769377e-05, |
|
"loss": 0.4413, |
|
"step": 172 |
|
}, |
|
{ |
|
"epoch": 0.3162705667276051, |
|
"grad_norm": 0.2894388824454487, |
|
"learning_rate": 2.4653401720209316e-05, |
|
"loss": 0.4339, |
|
"step": 173 |
|
}, |
|
{ |
|
"epoch": 0.3180987202925046, |
|
"grad_norm": 0.31278541819511707, |
|
"learning_rate": 2.4583729305376014e-05, |
|
"loss": 0.4433, |
|
"step": 174 |
|
}, |
|
{ |
|
"epoch": 0.31992687385740404, |
|
"grad_norm": 0.29889716089103313, |
|
"learning_rate": 2.451370573603959e-05, |
|
"loss": 0.4465, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 0.3217550274223035, |
|
"grad_norm": 0.30650603689771794, |
|
"learning_rate": 2.4443333577912285e-05, |
|
"loss": 0.4497, |
|
"step": 176 |
|
}, |
|
{ |
|
"epoch": 0.3235831809872029, |
|
"grad_norm": 0.27739674261767966, |
|
"learning_rate": 2.437261540947889e-05, |
|
"loss": 0.4417, |
|
"step": 177 |
|
}, |
|
{ |
|
"epoch": 0.32541133455210236, |
|
"grad_norm": 0.29186196118144964, |
|
"learning_rate": 2.430155382190225e-05, |
|
"loss": 0.46, |
|
"step": 178 |
|
}, |
|
{ |
|
"epoch": 0.3272394881170018, |
|
"grad_norm": 0.28342780349608687, |
|
"learning_rate": 2.4230151418928326e-05, |
|
"loss": 0.4414, |
|
"step": 179 |
|
}, |
|
{ |
|
"epoch": 0.3290676416819013, |
|
"grad_norm": 0.2751850949398033, |
|
"learning_rate": 2.415841081679079e-05, |
|
"loss": 0.4569, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.33089579524680074, |
|
"grad_norm": 0.2848165003785897, |
|
"learning_rate": 2.4086334644115176e-05, |
|
"loss": 0.4167, |
|
"step": 181 |
|
}, |
|
{ |
|
"epoch": 0.3327239488117002, |
|
"grad_norm": 0.26869428432353776, |
|
"learning_rate": 2.4013925541822543e-05, |
|
"loss": 0.4393, |
|
"step": 182 |
|
}, |
|
{ |
|
"epoch": 0.33455210237659966, |
|
"grad_norm": 0.3041074292613331, |
|
"learning_rate": 2.3941186163032736e-05, |
|
"loss": 0.443, |
|
"step": 183 |
|
}, |
|
{ |
|
"epoch": 0.33638025594149906, |
|
"grad_norm": 0.3054719627730434, |
|
"learning_rate": 2.3868119172967145e-05, |
|
"loss": 0.4711, |
|
"step": 184 |
|
}, |
|
{ |
|
"epoch": 0.3382084095063985, |
|
"grad_norm": 0.294864775838586, |
|
"learning_rate": 2.379472724885108e-05, |
|
"loss": 0.4208, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 0.340036563071298, |
|
"grad_norm": 0.2815964015764938, |
|
"learning_rate": 2.3721013079815645e-05, |
|
"loss": 0.445, |
|
"step": 186 |
|
}, |
|
{ |
|
"epoch": 0.34186471663619744, |
|
"grad_norm": 0.31854040107909304, |
|
"learning_rate": 2.3646979366799234e-05, |
|
"loss": 0.4601, |
|
"step": 187 |
|
}, |
|
{ |
|
"epoch": 0.3436928702010969, |
|
"grad_norm": 0.28167691281400475, |
|
"learning_rate": 2.3572628822448546e-05, |
|
"loss": 0.4506, |
|
"step": 188 |
|
}, |
|
{ |
|
"epoch": 0.34552102376599636, |
|
"grad_norm": 0.2771446663943378, |
|
"learning_rate": 2.3497964171019214e-05, |
|
"loss": 0.4358, |
|
"step": 189 |
|
}, |
|
{ |
|
"epoch": 0.3473491773308958, |
|
"grad_norm": 0.27745556919254083, |
|
"learning_rate": 2.3422988148275963e-05, |
|
"loss": 0.4461, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.3491773308957952, |
|
"grad_norm": 0.26029043603341223, |
|
"learning_rate": 2.3347703501392373e-05, |
|
"loss": 0.4538, |
|
"step": 191 |
|
}, |
|
{ |
|
"epoch": 0.3510054844606947, |
|
"grad_norm": 0.2744751742780149, |
|
"learning_rate": 2.3272112988850237e-05, |
|
"loss": 0.4343, |
|
"step": 192 |
|
}, |
|
{ |
|
"epoch": 0.35283363802559414, |
|
"grad_norm": 0.3148709000115808, |
|
"learning_rate": 2.319621938033848e-05, |
|
"loss": 0.4421, |
|
"step": 193 |
|
}, |
|
{ |
|
"epoch": 0.3546617915904936, |
|
"grad_norm": 0.2802326000313149, |
|
"learning_rate": 2.3120025456651658e-05, |
|
"loss": 0.4528, |
|
"step": 194 |
|
}, |
|
{ |
|
"epoch": 0.35648994515539306, |
|
"grad_norm": 0.25928215844810953, |
|
"learning_rate": 2.3043534009588108e-05, |
|
"loss": 0.4405, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 0.3583180987202925, |
|
"grad_norm": 0.2732416188380195, |
|
"learning_rate": 2.296674784184761e-05, |
|
"loss": 0.4123, |
|
"step": 196 |
|
}, |
|
{ |
|
"epoch": 0.360146252285192, |
|
"grad_norm": 0.2616002327630866, |
|
"learning_rate": 2.288966976692873e-05, |
|
"loss": 0.4211, |
|
"step": 197 |
|
}, |
|
{ |
|
"epoch": 0.3619744058500914, |
|
"grad_norm": 0.25552504229923495, |
|
"learning_rate": 2.2812302609025692e-05, |
|
"loss": 0.4219, |
|
"step": 198 |
|
}, |
|
{ |
|
"epoch": 0.36380255941499084, |
|
"grad_norm": 0.26460690701785383, |
|
"learning_rate": 2.2734649202924955e-05, |
|
"loss": 0.4519, |
|
"step": 199 |
|
}, |
|
{ |
|
"epoch": 0.3656307129798903, |
|
"grad_norm": 0.2703925981362186, |
|
"learning_rate": 2.265671239390128e-05, |
|
"loss": 0.4525, |
|
"step": 200 |
|
} |
|
], |
|
"logging_steps": 1, |
|
"max_steps": 547, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 1, |
|
"save_steps": 200, |
|
"stateful_callbacks": { |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": false |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 131774435229696.0, |
|
"train_batch_size": 2, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|