fengyao1909's picture
Upload folder using huggingface_hub
2d88ef7 verified
{
"best_global_step": null,
"best_metric": null,
"best_model_checkpoint": null,
"epoch": 1.0,
"eval_steps": 500,
"global_step": 547,
"is_hyper_param_search": false,
"is_local_process_zero": true,
"is_world_process_zero": true,
"log_history": [
{
"epoch": 0.0018281535648994515,
"grad_norm": 2.0932669659411407,
"learning_rate": 0.0,
"loss": 0.5957,
"step": 1
},
{
"epoch": 0.003656307129798903,
"grad_norm": 2.2225871592919857,
"learning_rate": 1.0714285714285714e-06,
"loss": 0.5994,
"step": 2
},
{
"epoch": 0.005484460694698354,
"grad_norm": 2.1034495331981367,
"learning_rate": 2.1428571428571427e-06,
"loss": 0.595,
"step": 3
},
{
"epoch": 0.007312614259597806,
"grad_norm": 2.166865863914735,
"learning_rate": 3.2142857142857143e-06,
"loss": 0.6304,
"step": 4
},
{
"epoch": 0.009140767824497258,
"grad_norm": 1.9888391803618566,
"learning_rate": 4.2857142857142855e-06,
"loss": 0.5996,
"step": 5
},
{
"epoch": 0.010968921389396709,
"grad_norm": 1.574113693444916,
"learning_rate": 5.357142857142857e-06,
"loss": 0.5897,
"step": 6
},
{
"epoch": 0.012797074954296161,
"grad_norm": 1.2137552408963244,
"learning_rate": 6.428571428571429e-06,
"loss": 0.6007,
"step": 7
},
{
"epoch": 0.014625228519195612,
"grad_norm": 1.0350502098616132,
"learning_rate": 7.5e-06,
"loss": 0.568,
"step": 8
},
{
"epoch": 0.016453382084095063,
"grad_norm": 0.894475082603116,
"learning_rate": 8.571428571428571e-06,
"loss": 0.5448,
"step": 9
},
{
"epoch": 0.018281535648994516,
"grad_norm": 1.7395859008823984,
"learning_rate": 9.642857142857144e-06,
"loss": 0.5899,
"step": 10
},
{
"epoch": 0.02010968921389397,
"grad_norm": 1.8244614303880249,
"learning_rate": 1.0714285714285714e-05,
"loss": 0.5746,
"step": 11
},
{
"epoch": 0.021937842778793418,
"grad_norm": 1.5925645409877431,
"learning_rate": 1.1785714285714286e-05,
"loss": 0.5565,
"step": 12
},
{
"epoch": 0.02376599634369287,
"grad_norm": 1.36746956600136,
"learning_rate": 1.2857142857142857e-05,
"loss": 0.5471,
"step": 13
},
{
"epoch": 0.025594149908592323,
"grad_norm": 1.3707686542130966,
"learning_rate": 1.3928571428571429e-05,
"loss": 0.5331,
"step": 14
},
{
"epoch": 0.027422303473491772,
"grad_norm": 1.242238871065678,
"learning_rate": 1.5e-05,
"loss": 0.5699,
"step": 15
},
{
"epoch": 0.029250457038391225,
"grad_norm": 1.0311550418009305,
"learning_rate": 1.6071428571428572e-05,
"loss": 0.5308,
"step": 16
},
{
"epoch": 0.031078610603290677,
"grad_norm": 0.7712626361430579,
"learning_rate": 1.7142857142857142e-05,
"loss": 0.4846,
"step": 17
},
{
"epoch": 0.03290676416819013,
"grad_norm": 0.7368030930782065,
"learning_rate": 1.8214285714285712e-05,
"loss": 0.5252,
"step": 18
},
{
"epoch": 0.03473491773308958,
"grad_norm": 0.7569594066617599,
"learning_rate": 1.928571428571429e-05,
"loss": 0.5004,
"step": 19
},
{
"epoch": 0.03656307129798903,
"grad_norm": 0.775121486961269,
"learning_rate": 2.0357142857142858e-05,
"loss": 0.5081,
"step": 20
},
{
"epoch": 0.038391224862888484,
"grad_norm": 0.6937386237956013,
"learning_rate": 2.1428571428571428e-05,
"loss": 0.4823,
"step": 21
},
{
"epoch": 0.04021937842778794,
"grad_norm": 0.5513659607436744,
"learning_rate": 2.25e-05,
"loss": 0.4939,
"step": 22
},
{
"epoch": 0.04204753199268738,
"grad_norm": 0.5026124919107627,
"learning_rate": 2.357142857142857e-05,
"loss": 0.486,
"step": 23
},
{
"epoch": 0.043875685557586835,
"grad_norm": 0.5955689249641745,
"learning_rate": 2.464285714285714e-05,
"loss": 0.5142,
"step": 24
},
{
"epoch": 0.04570383912248629,
"grad_norm": 0.632394003160485,
"learning_rate": 2.5714285714285714e-05,
"loss": 0.5214,
"step": 25
},
{
"epoch": 0.04753199268738574,
"grad_norm": 0.5309532283880241,
"learning_rate": 2.6785714285714288e-05,
"loss": 0.4774,
"step": 26
},
{
"epoch": 0.04936014625228519,
"grad_norm": 0.4875964236553614,
"learning_rate": 2.7857142857142858e-05,
"loss": 0.5032,
"step": 27
},
{
"epoch": 0.051188299817184646,
"grad_norm": 0.48155522874845014,
"learning_rate": 2.892857142857143e-05,
"loss": 0.5002,
"step": 28
},
{
"epoch": 0.05301645338208409,
"grad_norm": 0.41845545316273386,
"learning_rate": 3e-05,
"loss": 0.4736,
"step": 29
},
{
"epoch": 0.054844606946983544,
"grad_norm": 0.426756665035819,
"learning_rate": 2.999972519478696e-05,
"loss": 0.4988,
"step": 30
},
{
"epoch": 0.056672760511883,
"grad_norm": 0.44740653887205223,
"learning_rate": 2.99989007892169e-05,
"loss": 0.4877,
"step": 31
},
{
"epoch": 0.05850091407678245,
"grad_norm": 0.35965997153522855,
"learning_rate": 2.9997526813496602e-05,
"loss": 0.4791,
"step": 32
},
{
"epoch": 0.0603290676416819,
"grad_norm": 0.3544019526584708,
"learning_rate": 2.9995603317969497e-05,
"loss": 0.4868,
"step": 33
},
{
"epoch": 0.062157221206581355,
"grad_norm": 0.3939085264133643,
"learning_rate": 2.99931303731138e-05,
"loss": 0.4933,
"step": 34
},
{
"epoch": 0.06398537477148081,
"grad_norm": 0.357457478844048,
"learning_rate": 2.9990108069539932e-05,
"loss": 0.4788,
"step": 35
},
{
"epoch": 0.06581352833638025,
"grad_norm": 0.36148014385057126,
"learning_rate": 2.9986536517987187e-05,
"loss": 0.4861,
"step": 36
},
{
"epoch": 0.06764168190127971,
"grad_norm": 0.3891946625957914,
"learning_rate": 2.998241584931971e-05,
"loss": 0.4728,
"step": 37
},
{
"epoch": 0.06946983546617916,
"grad_norm": 0.3498249933538063,
"learning_rate": 2.9977746214521646e-05,
"loss": 0.4714,
"step": 38
},
{
"epoch": 0.0712979890310786,
"grad_norm": 0.32795069985610614,
"learning_rate": 2.997252778469168e-05,
"loss": 0.4582,
"step": 39
},
{
"epoch": 0.07312614259597806,
"grad_norm": 0.3229786426441325,
"learning_rate": 2.9966760751036697e-05,
"loss": 0.4566,
"step": 40
},
{
"epoch": 0.07495429616087751,
"grad_norm": 0.34189242509234885,
"learning_rate": 2.9960445324864815e-05,
"loss": 0.4669,
"step": 41
},
{
"epoch": 0.07678244972577697,
"grad_norm": 0.3346248011539502,
"learning_rate": 2.995358173757765e-05,
"loss": 0.4719,
"step": 42
},
{
"epoch": 0.07861060329067641,
"grad_norm": 0.3445606995846261,
"learning_rate": 2.994617024066181e-05,
"loss": 0.4657,
"step": 43
},
{
"epoch": 0.08043875685557587,
"grad_norm": 0.3084736463888063,
"learning_rate": 2.9938211105679677e-05,
"loss": 0.4545,
"step": 44
},
{
"epoch": 0.08226691042047532,
"grad_norm": 0.363723170271056,
"learning_rate": 2.9929704624259508e-05,
"loss": 0.4734,
"step": 45
},
{
"epoch": 0.08409506398537477,
"grad_norm": 0.3434867460486705,
"learning_rate": 2.992065110808469e-05,
"loss": 0.4716,
"step": 46
},
{
"epoch": 0.08592321755027423,
"grad_norm": 0.3091094518128968,
"learning_rate": 2.991105088888234e-05,
"loss": 0.4892,
"step": 47
},
{
"epoch": 0.08775137111517367,
"grad_norm": 0.28634845553939514,
"learning_rate": 2.990090431841117e-05,
"loss": 0.4877,
"step": 48
},
{
"epoch": 0.08957952468007313,
"grad_norm": 0.32229417127358173,
"learning_rate": 2.9890211768448572e-05,
"loss": 0.4665,
"step": 49
},
{
"epoch": 0.09140767824497258,
"grad_norm": 0.2998997014253872,
"learning_rate": 2.9878973630777012e-05,
"loss": 0.4763,
"step": 50
},
{
"epoch": 0.09323583180987204,
"grad_norm": 0.31761959826728753,
"learning_rate": 2.9867190317169665e-05,
"loss": 0.4796,
"step": 51
},
{
"epoch": 0.09506398537477148,
"grad_norm": 0.31856904774743844,
"learning_rate": 2.9854862259375326e-05,
"loss": 0.4795,
"step": 52
},
{
"epoch": 0.09689213893967093,
"grad_norm": 0.3211537093722151,
"learning_rate": 2.9841989909102607e-05,
"loss": 0.4831,
"step": 53
},
{
"epoch": 0.09872029250457039,
"grad_norm": 0.2759276306734268,
"learning_rate": 2.982857373800337e-05,
"loss": 0.4818,
"step": 54
},
{
"epoch": 0.10054844606946983,
"grad_norm": 0.3157463034106083,
"learning_rate": 2.9814614237655445e-05,
"loss": 0.4623,
"step": 55
},
{
"epoch": 0.10237659963436929,
"grad_norm": 0.3032312590655212,
"learning_rate": 2.9800111919544632e-05,
"loss": 0.4858,
"step": 56
},
{
"epoch": 0.10420475319926874,
"grad_norm": 0.3113927848158391,
"learning_rate": 2.9785067315045943e-05,
"loss": 0.4805,
"step": 57
},
{
"epoch": 0.10603290676416818,
"grad_norm": 0.2744027006341311,
"learning_rate": 2.9769480975404143e-05,
"loss": 0.4461,
"step": 58
},
{
"epoch": 0.10786106032906764,
"grad_norm": 0.29230266010233613,
"learning_rate": 2.975335347171356e-05,
"loss": 0.4276,
"step": 59
},
{
"epoch": 0.10968921389396709,
"grad_norm": 0.2889756002721036,
"learning_rate": 2.9736685394897123e-05,
"loss": 0.4591,
"step": 60
},
{
"epoch": 0.11151736745886655,
"grad_norm": 0.3033109334524834,
"learning_rate": 2.9719477355684767e-05,
"loss": 0.4567,
"step": 61
},
{
"epoch": 0.113345521023766,
"grad_norm": 0.2781677992045063,
"learning_rate": 2.9701729984591003e-05,
"loss": 0.4398,
"step": 62
},
{
"epoch": 0.11517367458866545,
"grad_norm": 0.271555541969376,
"learning_rate": 2.9683443931891837e-05,
"loss": 0.4587,
"step": 63
},
{
"epoch": 0.1170018281535649,
"grad_norm": 0.28462475447605384,
"learning_rate": 2.966461986760096e-05,
"loss": 0.4454,
"step": 64
},
{
"epoch": 0.11882998171846434,
"grad_norm": 0.5503364181994502,
"learning_rate": 2.964525848144517e-05,
"loss": 0.4224,
"step": 65
},
{
"epoch": 0.1206581352833638,
"grad_norm": 0.36156039057533235,
"learning_rate": 2.9625360482839114e-05,
"loss": 0.4832,
"step": 66
},
{
"epoch": 0.12248628884826325,
"grad_norm": 0.28852346241189863,
"learning_rate": 2.9604926600859287e-05,
"loss": 0.4674,
"step": 67
},
{
"epoch": 0.12431444241316271,
"grad_norm": 0.301231419014804,
"learning_rate": 2.958395758421733e-05,
"loss": 0.4497,
"step": 68
},
{
"epoch": 0.12614259597806216,
"grad_norm": 0.2780712443173308,
"learning_rate": 2.9562454201232583e-05,
"loss": 0.477,
"step": 69
},
{
"epoch": 0.12797074954296161,
"grad_norm": 0.32406424984925947,
"learning_rate": 2.9540417239803954e-05,
"loss": 0.4667,
"step": 70
},
{
"epoch": 0.12979890310786105,
"grad_norm": 0.2623604313786073,
"learning_rate": 2.9517847507381022e-05,
"loss": 0.4627,
"step": 71
},
{
"epoch": 0.1316270566727605,
"grad_norm": 0.2977740538239248,
"learning_rate": 2.9494745830934473e-05,
"loss": 0.4599,
"step": 72
},
{
"epoch": 0.13345521023765997,
"grad_norm": 0.3519063323266925,
"learning_rate": 2.9471113056925786e-05,
"loss": 0.4945,
"step": 73
},
{
"epoch": 0.13528336380255943,
"grad_norm": 0.2993177800143668,
"learning_rate": 2.944695005127623e-05,
"loss": 0.4585,
"step": 74
},
{
"epoch": 0.13711151736745886,
"grad_norm": 0.28778657695750315,
"learning_rate": 2.942225769933512e-05,
"loss": 0.4368,
"step": 75
},
{
"epoch": 0.13893967093235832,
"grad_norm": 0.28115223256932165,
"learning_rate": 2.939703690584741e-05,
"loss": 0.4556,
"step": 76
},
{
"epoch": 0.14076782449725778,
"grad_norm": 0.2665269198819317,
"learning_rate": 2.9371288594920484e-05,
"loss": 0.4334,
"step": 77
},
{
"epoch": 0.1425959780621572,
"grad_norm": 0.24969029459114356,
"learning_rate": 2.934501370999037e-05,
"loss": 0.4243,
"step": 78
},
{
"epoch": 0.14442413162705667,
"grad_norm": 0.28019989404552176,
"learning_rate": 2.93182132137871e-05,
"loss": 0.4465,
"step": 79
},
{
"epoch": 0.14625228519195613,
"grad_norm": 0.3262629362699753,
"learning_rate": 2.9290888088299486e-05,
"loss": 0.4824,
"step": 80
},
{
"epoch": 0.1480804387568556,
"grad_norm": 0.3012249689853169,
"learning_rate": 2.9263039334739127e-05,
"loss": 0.4624,
"step": 81
},
{
"epoch": 0.14990859232175502,
"grad_norm": 0.28055708820894665,
"learning_rate": 2.92346679735037e-05,
"loss": 0.4562,
"step": 82
},
{
"epoch": 0.15173674588665448,
"grad_norm": 0.27808074144332834,
"learning_rate": 2.9205775044139608e-05,
"loss": 0.4686,
"step": 83
},
{
"epoch": 0.15356489945155394,
"grad_norm": 0.2806140830160609,
"learning_rate": 2.9176361605303867e-05,
"loss": 0.4695,
"step": 84
},
{
"epoch": 0.15539305301645337,
"grad_norm": 0.2705147511752726,
"learning_rate": 2.914642873472531e-05,
"loss": 0.4524,
"step": 85
},
{
"epoch": 0.15722120658135283,
"grad_norm": 0.32806782334241796,
"learning_rate": 2.9115977529165132e-05,
"loss": 0.4625,
"step": 86
},
{
"epoch": 0.1590493601462523,
"grad_norm": 0.2819528809225014,
"learning_rate": 2.9085009104376663e-05,
"loss": 0.4563,
"step": 87
},
{
"epoch": 0.16087751371115175,
"grad_norm": 0.3080336756223102,
"learning_rate": 2.905352459506452e-05,
"loss": 0.4369,
"step": 88
},
{
"epoch": 0.16270566727605118,
"grad_norm": 0.30703315538546055,
"learning_rate": 2.9021525154842998e-05,
"loss": 0.4617,
"step": 89
},
{
"epoch": 0.16453382084095064,
"grad_norm": 0.30026528997859425,
"learning_rate": 2.8989011956193834e-05,
"loss": 0.4586,
"step": 90
},
{
"epoch": 0.1663619744058501,
"grad_norm": 0.29970528278132424,
"learning_rate": 2.8955986190423225e-05,
"loss": 0.4847,
"step": 91
},
{
"epoch": 0.16819012797074953,
"grad_norm": 0.2943719684747729,
"learning_rate": 2.892244906761819e-05,
"loss": 0.4464,
"step": 92
},
{
"epoch": 0.170018281535649,
"grad_norm": 0.30807210021130066,
"learning_rate": 2.8888401816602207e-05,
"loss": 0.45,
"step": 93
},
{
"epoch": 0.17184643510054845,
"grad_norm": 0.2763940481138354,
"learning_rate": 2.885384568489023e-05,
"loss": 0.4343,
"step": 94
},
{
"epoch": 0.1736745886654479,
"grad_norm": 0.3253203044927249,
"learning_rate": 2.881878193864294e-05,
"loss": 0.4659,
"step": 95
},
{
"epoch": 0.17550274223034734,
"grad_norm": 0.266312604175572,
"learning_rate": 2.878321186262037e-05,
"loss": 0.4417,
"step": 96
},
{
"epoch": 0.1773308957952468,
"grad_norm": 0.3137831393989107,
"learning_rate": 2.8747136760134827e-05,
"loss": 0.4528,
"step": 97
},
{
"epoch": 0.17915904936014626,
"grad_norm": 0.2880929427628198,
"learning_rate": 2.871055795300315e-05,
"loss": 0.4395,
"step": 98
},
{
"epoch": 0.1809872029250457,
"grad_norm": 0.27062645315880496,
"learning_rate": 2.8673476781498242e-05,
"loss": 0.4306,
"step": 99
},
{
"epoch": 0.18281535648994515,
"grad_norm": 0.28008745353310155,
"learning_rate": 2.8635894604300018e-05,
"loss": 0.4477,
"step": 100
},
{
"epoch": 0.1846435100548446,
"grad_norm": 0.25820871027942605,
"learning_rate": 2.859781279844556e-05,
"loss": 0.4478,
"step": 101
},
{
"epoch": 0.18647166361974407,
"grad_norm": 0.30634908739409245,
"learning_rate": 2.855923275927871e-05,
"loss": 0.4732,
"step": 102
},
{
"epoch": 0.1882998171846435,
"grad_norm": 0.27737282375395794,
"learning_rate": 2.8520155900398922e-05,
"loss": 0.4347,
"step": 103
},
{
"epoch": 0.19012797074954296,
"grad_norm": 0.3201019734940999,
"learning_rate": 2.8480583653609457e-05,
"loss": 0.4773,
"step": 104
},
{
"epoch": 0.19195612431444242,
"grad_norm": 0.25763164370530267,
"learning_rate": 2.844051746886495e-05,
"loss": 0.452,
"step": 105
},
{
"epoch": 0.19378427787934185,
"grad_norm": 0.26590404580502985,
"learning_rate": 2.8399958814218258e-05,
"loss": 0.4649,
"step": 106
},
{
"epoch": 0.1956124314442413,
"grad_norm": 0.31104547148499023,
"learning_rate": 2.8358909175766674e-05,
"loss": 0.4669,
"step": 107
},
{
"epoch": 0.19744058500914077,
"grad_norm": 0.3436561078000218,
"learning_rate": 2.831737005759749e-05,
"loss": 0.4818,
"step": 108
},
{
"epoch": 0.19926873857404023,
"grad_norm": 0.2978747020545755,
"learning_rate": 2.8275342981732868e-05,
"loss": 0.4775,
"step": 109
},
{
"epoch": 0.20109689213893966,
"grad_norm": 0.3297773258073724,
"learning_rate": 2.8232829488074077e-05,
"loss": 0.4353,
"step": 110
},
{
"epoch": 0.20292504570383912,
"grad_norm": 0.26736943136912134,
"learning_rate": 2.8189831134345074e-05,
"loss": 0.4402,
"step": 111
},
{
"epoch": 0.20475319926873858,
"grad_norm": 0.46856659250948707,
"learning_rate": 2.8146349496035426e-05,
"loss": 0.4268,
"step": 112
},
{
"epoch": 0.20658135283363802,
"grad_norm": 0.27589234154547465,
"learning_rate": 2.8102386166342582e-05,
"loss": 0.4417,
"step": 113
},
{
"epoch": 0.20840950639853748,
"grad_norm": 0.35079551159665306,
"learning_rate": 2.8057942756113504e-05,
"loss": 0.4772,
"step": 114
},
{
"epoch": 0.21023765996343693,
"grad_norm": 0.2882498554120959,
"learning_rate": 2.8013020893785635e-05,
"loss": 0.454,
"step": 115
},
{
"epoch": 0.21206581352833637,
"grad_norm": 0.2941664738490589,
"learning_rate": 2.796762222532723e-05,
"loss": 0.4293,
"step": 116
},
{
"epoch": 0.21389396709323583,
"grad_norm": 0.2702184421809731,
"learning_rate": 2.7921748414177063e-05,
"loss": 0.4687,
"step": 117
},
{
"epoch": 0.21572212065813529,
"grad_norm": 0.3301109570692598,
"learning_rate": 2.787540114118345e-05,
"loss": 0.4345,
"step": 118
},
{
"epoch": 0.21755027422303475,
"grad_norm": 0.27446129864889174,
"learning_rate": 2.7828582104542694e-05,
"loss": 0.4469,
"step": 119
},
{
"epoch": 0.21937842778793418,
"grad_norm": 0.29213098317806835,
"learning_rate": 2.7781293019736845e-05,
"loss": 0.4395,
"step": 120
},
{
"epoch": 0.22120658135283364,
"grad_norm": 0.29952710363645,
"learning_rate": 2.7733535619470835e-05,
"loss": 0.4482,
"step": 121
},
{
"epoch": 0.2230347349177331,
"grad_norm": 0.33479362279837327,
"learning_rate": 2.7685311653609004e-05,
"loss": 0.4436,
"step": 122
},
{
"epoch": 0.22486288848263253,
"grad_norm": 0.2943017968120905,
"learning_rate": 2.7636622889110975e-05,
"loss": 0.4751,
"step": 123
},
{
"epoch": 0.226691042047532,
"grad_norm": 0.30034787188266204,
"learning_rate": 2.758747110996693e-05,
"loss": 0.4646,
"step": 124
},
{
"epoch": 0.22851919561243145,
"grad_norm": 0.28893394314876153,
"learning_rate": 2.7537858117132217e-05,
"loss": 0.451,
"step": 125
},
{
"epoch": 0.2303473491773309,
"grad_norm": 0.2971868357645203,
"learning_rate": 2.7487785728461383e-05,
"loss": 0.4613,
"step": 126
},
{
"epoch": 0.23217550274223034,
"grad_norm": 0.3404033245887309,
"learning_rate": 2.7437255778641548e-05,
"loss": 0.4685,
"step": 127
},
{
"epoch": 0.2340036563071298,
"grad_norm": 0.27331638590219476,
"learning_rate": 2.7386270119125193e-05,
"loss": 0.448,
"step": 128
},
{
"epoch": 0.23583180987202926,
"grad_norm": 0.272124504795692,
"learning_rate": 2.7334830618062327e-05,
"loss": 0.4333,
"step": 129
},
{
"epoch": 0.2376599634369287,
"grad_norm": 0.29807736925005807,
"learning_rate": 2.728293916023202e-05,
"loss": 0.4372,
"step": 130
},
{
"epoch": 0.23948811700182815,
"grad_norm": 0.31922280763058825,
"learning_rate": 2.7230597646973355e-05,
"loss": 0.4701,
"step": 131
},
{
"epoch": 0.2413162705667276,
"grad_norm": 0.30586172261152206,
"learning_rate": 2.717780799611576e-05,
"loss": 0.4677,
"step": 132
},
{
"epoch": 0.24314442413162707,
"grad_norm": 0.31340471142779364,
"learning_rate": 2.7124572141908737e-05,
"loss": 0.4643,
"step": 133
},
{
"epoch": 0.2449725776965265,
"grad_norm": 0.2816615247738506,
"learning_rate": 2.707089203495098e-05,
"loss": 0.4503,
"step": 134
},
{
"epoch": 0.24680073126142596,
"grad_norm": 0.33804681353834326,
"learning_rate": 2.701676964211893e-05,
"loss": 0.4874,
"step": 135
},
{
"epoch": 0.24862888482632542,
"grad_norm": 0.30236149555278835,
"learning_rate": 2.696220694649467e-05,
"loss": 0.4311,
"step": 136
},
{
"epoch": 0.25045703839122485,
"grad_norm": 0.30621062436628155,
"learning_rate": 2.69072059472933e-05,
"loss": 0.4593,
"step": 137
},
{
"epoch": 0.2522851919561243,
"grad_norm": 0.32266904581704525,
"learning_rate": 2.685176865978965e-05,
"loss": 0.4543,
"step": 138
},
{
"epoch": 0.25411334552102377,
"grad_norm": 0.2880721603734785,
"learning_rate": 2.6795897115244478e-05,
"loss": 0.428,
"step": 139
},
{
"epoch": 0.25594149908592323,
"grad_norm": 0.25365800448080494,
"learning_rate": 2.6739593360830006e-05,
"loss": 0.4113,
"step": 140
},
{
"epoch": 0.2577696526508227,
"grad_norm": 0.3019181957537817,
"learning_rate": 2.668285945955493e-05,
"loss": 0.4664,
"step": 141
},
{
"epoch": 0.2595978062157221,
"grad_norm": 0.3267274278580188,
"learning_rate": 2.6625697490188832e-05,
"loss": 0.4504,
"step": 142
},
{
"epoch": 0.26142595978062155,
"grad_norm": 0.3069320624243545,
"learning_rate": 2.6568109547185996e-05,
"loss": 0.4782,
"step": 143
},
{
"epoch": 0.263254113345521,
"grad_norm": 0.2940302664382109,
"learning_rate": 2.651009774060868e-05,
"loss": 0.4678,
"step": 144
},
{
"epoch": 0.26508226691042047,
"grad_norm": 0.2856321101391868,
"learning_rate": 2.6451664196049802e-05,
"loss": 0.4465,
"step": 145
},
{
"epoch": 0.26691042047531993,
"grad_norm": 0.26787811332236683,
"learning_rate": 2.639281105455505e-05,
"loss": 0.4508,
"step": 146
},
{
"epoch": 0.2687385740402194,
"grad_norm": 0.310781815465313,
"learning_rate": 2.6333540472544442e-05,
"loss": 0.434,
"step": 147
},
{
"epoch": 0.27056672760511885,
"grad_norm": 0.2831519687807007,
"learning_rate": 2.6273854621733286e-05,
"loss": 0.4506,
"step": 148
},
{
"epoch": 0.27239488117001825,
"grad_norm": 0.26444215825774453,
"learning_rate": 2.621375568905266e-05,
"loss": 0.4491,
"step": 149
},
{
"epoch": 0.2742230347349177,
"grad_norm": 0.3318439638760175,
"learning_rate": 2.615324587656921e-05,
"loss": 0.4817,
"step": 150
},
{
"epoch": 0.2760511882998172,
"grad_norm": 0.26186010556255945,
"learning_rate": 2.6092327401404538e-05,
"loss": 0.4749,
"step": 151
},
{
"epoch": 0.27787934186471663,
"grad_norm": 0.28214623332209515,
"learning_rate": 2.6031002495653913e-05,
"loss": 0.4341,
"step": 152
},
{
"epoch": 0.2797074954296161,
"grad_norm": 0.2658749515925076,
"learning_rate": 2.596927340630451e-05,
"loss": 0.4356,
"step": 153
},
{
"epoch": 0.28153564899451555,
"grad_norm": 0.2919462760948431,
"learning_rate": 2.590714239515306e-05,
"loss": 0.4445,
"step": 154
},
{
"epoch": 0.283363802559415,
"grad_norm": 0.2729523412693323,
"learning_rate": 2.584461173872301e-05,
"loss": 0.453,
"step": 155
},
{
"epoch": 0.2851919561243144,
"grad_norm": 0.27376503902302884,
"learning_rate": 2.5781683728181066e-05,
"loss": 0.4408,
"step": 156
},
{
"epoch": 0.2870201096892139,
"grad_norm": 0.29825503051650754,
"learning_rate": 2.5718360669253276e-05,
"loss": 0.4534,
"step": 157
},
{
"epoch": 0.28884826325411334,
"grad_norm": 0.28587305936658125,
"learning_rate": 2.565464488214053e-05,
"loss": 0.4377,
"step": 158
},
{
"epoch": 0.2906764168190128,
"grad_norm": 0.2514941220386994,
"learning_rate": 2.559053870143356e-05,
"loss": 0.4278,
"step": 159
},
{
"epoch": 0.29250457038391225,
"grad_norm": 0.3064320998896908,
"learning_rate": 2.5526044476027383e-05,
"loss": 0.4606,
"step": 160
},
{
"epoch": 0.2943327239488117,
"grad_norm": 0.2996428461412703,
"learning_rate": 2.5461164569035244e-05,
"loss": 0.4406,
"step": 161
},
{
"epoch": 0.2961608775137112,
"grad_norm": 0.24445658979859197,
"learning_rate": 2.5395901357702032e-05,
"loss": 0.4512,
"step": 162
},
{
"epoch": 0.2979890310786106,
"grad_norm": 0.3020928493183503,
"learning_rate": 2.533025723331718e-05,
"loss": 0.4381,
"step": 163
},
{
"epoch": 0.29981718464351004,
"grad_norm": 0.259497736171246,
"learning_rate": 2.526423460112703e-05,
"loss": 0.4317,
"step": 164
},
{
"epoch": 0.3016453382084095,
"grad_norm": 0.2845997617061945,
"learning_rate": 2.5197835880246702e-05,
"loss": 0.4508,
"step": 165
},
{
"epoch": 0.30347349177330896,
"grad_norm": 0.25599143813626735,
"learning_rate": 2.51310635035715e-05,
"loss": 0.4439,
"step": 166
},
{
"epoch": 0.3053016453382084,
"grad_norm": 0.27151337502431655,
"learning_rate": 2.506391991768771e-05,
"loss": 0.4437,
"step": 167
},
{
"epoch": 0.3071297989031079,
"grad_norm": 0.2820431046523887,
"learning_rate": 2.4996407582782987e-05,
"loss": 0.4303,
"step": 168
},
{
"epoch": 0.30895795246800734,
"grad_norm": 0.2722072935751051,
"learning_rate": 2.4928528972556207e-05,
"loss": 0.4404,
"step": 169
},
{
"epoch": 0.31078610603290674,
"grad_norm": 0.27234372659701445,
"learning_rate": 2.486028657412683e-05,
"loss": 0.4651,
"step": 170
},
{
"epoch": 0.3126142595978062,
"grad_norm": 0.2887259505587887,
"learning_rate": 2.479168288794377e-05,
"loss": 0.4258,
"step": 171
},
{
"epoch": 0.31444241316270566,
"grad_norm": 0.2863499537204956,
"learning_rate": 2.472272042769377e-05,
"loss": 0.4417,
"step": 172
},
{
"epoch": 0.3162705667276051,
"grad_norm": 0.28012635605541286,
"learning_rate": 2.4653401720209316e-05,
"loss": 0.4339,
"step": 173
},
{
"epoch": 0.3180987202925046,
"grad_norm": 0.2901255279537208,
"learning_rate": 2.4583729305376014e-05,
"loss": 0.4434,
"step": 174
},
{
"epoch": 0.31992687385740404,
"grad_norm": 0.28670533171458207,
"learning_rate": 2.451370573603959e-05,
"loss": 0.4465,
"step": 175
},
{
"epoch": 0.3217550274223035,
"grad_norm": 0.29099195398037236,
"learning_rate": 2.4443333577912285e-05,
"loss": 0.4502,
"step": 176
},
{
"epoch": 0.3235831809872029,
"grad_norm": 0.2767320359979587,
"learning_rate": 2.437261540947889e-05,
"loss": 0.4419,
"step": 177
},
{
"epoch": 0.32541133455210236,
"grad_norm": 0.27043885704043746,
"learning_rate": 2.430155382190225e-05,
"loss": 0.4599,
"step": 178
},
{
"epoch": 0.3272394881170018,
"grad_norm": 0.2730084443234302,
"learning_rate": 2.4230151418928326e-05,
"loss": 0.4414,
"step": 179
},
{
"epoch": 0.3290676416819013,
"grad_norm": 0.26865234353182316,
"learning_rate": 2.415841081679079e-05,
"loss": 0.4566,
"step": 180
},
{
"epoch": 0.33089579524680074,
"grad_norm": 0.2587412822837069,
"learning_rate": 2.4086334644115176e-05,
"loss": 0.4165,
"step": 181
},
{
"epoch": 0.3327239488117002,
"grad_norm": 0.2576294969452134,
"learning_rate": 2.4013925541822543e-05,
"loss": 0.4389,
"step": 182
},
{
"epoch": 0.33455210237659966,
"grad_norm": 0.28831507664483796,
"learning_rate": 2.3941186163032736e-05,
"loss": 0.4423,
"step": 183
},
{
"epoch": 0.33638025594149906,
"grad_norm": 0.3030948383504952,
"learning_rate": 2.3868119172967145e-05,
"loss": 0.4714,
"step": 184
},
{
"epoch": 0.3382084095063985,
"grad_norm": 0.27032107322162624,
"learning_rate": 2.379472724885108e-05,
"loss": 0.421,
"step": 185
},
{
"epoch": 0.340036563071298,
"grad_norm": 0.2760164862865866,
"learning_rate": 2.3721013079815645e-05,
"loss": 0.4447,
"step": 186
},
{
"epoch": 0.34186471663619744,
"grad_norm": 0.30959882474810846,
"learning_rate": 2.3646979366799234e-05,
"loss": 0.4609,
"step": 187
},
{
"epoch": 0.3436928702010969,
"grad_norm": 0.2714448004833152,
"learning_rate": 2.3572628822448546e-05,
"loss": 0.4507,
"step": 188
},
{
"epoch": 0.34552102376599636,
"grad_norm": 0.27266712502337187,
"learning_rate": 2.3497964171019214e-05,
"loss": 0.4365,
"step": 189
},
{
"epoch": 0.3473491773308958,
"grad_norm": 0.2781660755274924,
"learning_rate": 2.3422988148275963e-05,
"loss": 0.4456,
"step": 190
},
{
"epoch": 0.3491773308957952,
"grad_norm": 0.2555222680884862,
"learning_rate": 2.3347703501392373e-05,
"loss": 0.4539,
"step": 191
},
{
"epoch": 0.3510054844606947,
"grad_norm": 0.2564207916106098,
"learning_rate": 2.3272112988850237e-05,
"loss": 0.4341,
"step": 192
},
{
"epoch": 0.35283363802559414,
"grad_norm": 0.3184883384427764,
"learning_rate": 2.319621938033848e-05,
"loss": 0.442,
"step": 193
},
{
"epoch": 0.3546617915904936,
"grad_norm": 0.26891911220862924,
"learning_rate": 2.3120025456651658e-05,
"loss": 0.4526,
"step": 194
},
{
"epoch": 0.35648994515539306,
"grad_norm": 0.2508425033641723,
"learning_rate": 2.3043534009588108e-05,
"loss": 0.4403,
"step": 195
},
{
"epoch": 0.3583180987202925,
"grad_norm": 0.2844052123597973,
"learning_rate": 2.296674784184761e-05,
"loss": 0.4127,
"step": 196
},
{
"epoch": 0.360146252285192,
"grad_norm": 0.26258111786205446,
"learning_rate": 2.288966976692873e-05,
"loss": 0.4211,
"step": 197
},
{
"epoch": 0.3619744058500914,
"grad_norm": 0.2520374791669725,
"learning_rate": 2.2812302609025692e-05,
"loss": 0.4221,
"step": 198
},
{
"epoch": 0.36380255941499084,
"grad_norm": 0.26337297446508356,
"learning_rate": 2.2734649202924955e-05,
"loss": 0.4522,
"step": 199
},
{
"epoch": 0.3656307129798903,
"grad_norm": 0.2855343544923256,
"learning_rate": 2.265671239390128e-05,
"loss": 0.4523,
"step": 200
},
{
"epoch": 0.36745886654478976,
"grad_norm": 0.28164010246391696,
"learning_rate": 2.2578495037613532e-05,
"loss": 0.4432,
"step": 201
},
{
"epoch": 0.3692870201096892,
"grad_norm": 0.24907385038322546,
"learning_rate": 2.25e-05,
"loss": 0.4553,
"step": 202
},
{
"epoch": 0.3711151736745887,
"grad_norm": 0.2627538166376267,
"learning_rate": 2.242123015717343e-05,
"loss": 0.4349,
"step": 203
},
{
"epoch": 0.37294332723948814,
"grad_norm": 0.31354766743356877,
"learning_rate": 2.2342188395315614e-05,
"loss": 0.4432,
"step": 204
},
{
"epoch": 0.37477148080438755,
"grad_norm": 0.23675269296963747,
"learning_rate": 2.226287761057165e-05,
"loss": 0.418,
"step": 205
},
{
"epoch": 0.376599634369287,
"grad_norm": 0.2891554833178968,
"learning_rate": 2.2183300708943803e-05,
"loss": 0.4489,
"step": 206
},
{
"epoch": 0.37842778793418647,
"grad_norm": 0.26640363819873775,
"learning_rate": 2.2103460606185075e-05,
"loss": 0.4615,
"step": 207
},
{
"epoch": 0.3802559414990859,
"grad_norm": 0.2558864794570339,
"learning_rate": 2.2023360227692323e-05,
"loss": 0.4261,
"step": 208
},
{
"epoch": 0.3820840950639854,
"grad_norm": 0.2699523067538174,
"learning_rate": 2.194300250839908e-05,
"loss": 0.4195,
"step": 209
},
{
"epoch": 0.38391224862888484,
"grad_norm": 0.27340419517313214,
"learning_rate": 2.1862390392668047e-05,
"loss": 0.4346,
"step": 210
},
{
"epoch": 0.3857404021937843,
"grad_norm": 0.27350160128660483,
"learning_rate": 2.1781526834183172e-05,
"loss": 0.4342,
"step": 211
},
{
"epoch": 0.3875685557586837,
"grad_norm": 0.2708971031355086,
"learning_rate": 2.1700414795841442e-05,
"loss": 0.4273,
"step": 212
},
{
"epoch": 0.38939670932358317,
"grad_norm": 0.2550932743091051,
"learning_rate": 2.1619057249644323e-05,
"loss": 0.414,
"step": 213
},
{
"epoch": 0.3912248628884826,
"grad_norm": 0.2803194395165915,
"learning_rate": 2.1537457176588854e-05,
"loss": 0.4552,
"step": 214
},
{
"epoch": 0.3930530164533821,
"grad_norm": 0.3055183656958912,
"learning_rate": 2.145561756655844e-05,
"loss": 0.467,
"step": 215
},
{
"epoch": 0.39488117001828155,
"grad_norm": 0.2559113693108124,
"learning_rate": 2.137354141821326e-05,
"loss": 0.4606,
"step": 216
},
{
"epoch": 0.396709323583181,
"grad_norm": 0.26418557735551346,
"learning_rate": 2.129123173888045e-05,
"loss": 0.4265,
"step": 217
},
{
"epoch": 0.39853747714808047,
"grad_norm": 0.28274527019614765,
"learning_rate": 2.120869154444387e-05,
"loss": 0.4446,
"step": 218
},
{
"epoch": 0.40036563071297987,
"grad_norm": 0.27808247484752874,
"learning_rate": 2.1125923859233613e-05,
"loss": 0.4444,
"step": 219
},
{
"epoch": 0.40219378427787933,
"grad_norm": 0.27782505030038473,
"learning_rate": 2.1042931715915194e-05,
"loss": 0.4549,
"step": 220
},
{
"epoch": 0.4040219378427788,
"grad_norm": 0.27007523589065596,
"learning_rate": 2.0959718155378434e-05,
"loss": 0.4201,
"step": 221
},
{
"epoch": 0.40585009140767825,
"grad_norm": 0.24533996656769982,
"learning_rate": 2.0876286226626027e-05,
"loss": 0.4394,
"step": 222
},
{
"epoch": 0.4076782449725777,
"grad_norm": 0.25493787596449796,
"learning_rate": 2.0792638986661835e-05,
"loss": 0.4361,
"step": 223
},
{
"epoch": 0.40950639853747717,
"grad_norm": 0.24866181426568823,
"learning_rate": 2.0708779500378876e-05,
"loss": 0.422,
"step": 224
},
{
"epoch": 0.4113345521023766,
"grad_norm": 0.2626340677947448,
"learning_rate": 2.0624710840447002e-05,
"loss": 0.4699,
"step": 225
},
{
"epoch": 0.41316270566727603,
"grad_norm": 0.2548747162562123,
"learning_rate": 2.054043608720036e-05,
"loss": 0.4381,
"step": 226
},
{
"epoch": 0.4149908592321755,
"grad_norm": 0.25440840922549496,
"learning_rate": 2.0455958328524484e-05,
"loss": 0.4201,
"step": 227
},
{
"epoch": 0.41681901279707495,
"grad_norm": 0.26449635353916284,
"learning_rate": 2.0371280659743166e-05,
"loss": 0.4554,
"step": 228
},
{
"epoch": 0.4186471663619744,
"grad_norm": 0.24905974740356548,
"learning_rate": 2.0286406183505042e-05,
"loss": 0.4464,
"step": 229
},
{
"epoch": 0.42047531992687387,
"grad_norm": 0.2615769980174596,
"learning_rate": 2.0201338009669928e-05,
"loss": 0.4349,
"step": 230
},
{
"epoch": 0.42230347349177333,
"grad_norm": 0.23359275701455742,
"learning_rate": 2.0116079255194836e-05,
"loss": 0.4081,
"step": 231
},
{
"epoch": 0.42413162705667273,
"grad_norm": 0.24808773148753901,
"learning_rate": 2.0030633044019785e-05,
"loss": 0.4298,
"step": 232
},
{
"epoch": 0.4259597806215722,
"grad_norm": 0.24232976420589095,
"learning_rate": 1.9945002506953354e-05,
"loss": 0.4409,
"step": 233
},
{
"epoch": 0.42778793418647165,
"grad_norm": 0.22567433822272406,
"learning_rate": 1.9859190781557938e-05,
"loss": 0.442,
"step": 234
},
{
"epoch": 0.4296160877513711,
"grad_norm": 0.2311061708408994,
"learning_rate": 1.9773201012034795e-05,
"loss": 0.421,
"step": 235
},
{
"epoch": 0.43144424131627057,
"grad_norm": 0.22816213383251369,
"learning_rate": 1.9687036349108852e-05,
"loss": 0.4158,
"step": 236
},
{
"epoch": 0.43327239488117003,
"grad_norm": 0.2510785001261421,
"learning_rate": 1.9600699949913258e-05,
"loss": 0.4431,
"step": 237
},
{
"epoch": 0.4351005484460695,
"grad_norm": 0.2598409752211816,
"learning_rate": 1.951419497787367e-05,
"loss": 0.4458,
"step": 238
},
{
"epoch": 0.4369287020109689,
"grad_norm": 0.24332508968296312,
"learning_rate": 1.942752460259241e-05,
"loss": 0.4349,
"step": 239
},
{
"epoch": 0.43875685557586835,
"grad_norm": 0.2552207965650322,
"learning_rate": 1.934069199973226e-05,
"loss": 0.451,
"step": 240
},
{
"epoch": 0.4405850091407678,
"grad_norm": 0.3134487399340424,
"learning_rate": 1.9253700350900144e-05,
"loss": 0.4774,
"step": 241
},
{
"epoch": 0.4424131627056673,
"grad_norm": 0.24561994226229733,
"learning_rate": 1.916655284353055e-05,
"loss": 0.4113,
"step": 242
},
{
"epoch": 0.44424131627056673,
"grad_norm": 0.2441778108075136,
"learning_rate": 1.9079252670768712e-05,
"loss": 0.4164,
"step": 243
},
{
"epoch": 0.4460694698354662,
"grad_norm": 0.24667253648402243,
"learning_rate": 1.8991803031353648e-05,
"loss": 0.4667,
"step": 244
},
{
"epoch": 0.44789762340036565,
"grad_norm": 0.2532308170425146,
"learning_rate": 1.8904207129500916e-05,
"loss": 0.4507,
"step": 245
},
{
"epoch": 0.44972577696526506,
"grad_norm": 0.2410145562683754,
"learning_rate": 1.881646817478526e-05,
"loss": 0.4335,
"step": 246
},
{
"epoch": 0.4515539305301645,
"grad_norm": 0.2677011816262894,
"learning_rate": 1.8728589382022958e-05,
"loss": 0.4655,
"step": 247
},
{
"epoch": 0.453382084095064,
"grad_norm": 0.2813593321239021,
"learning_rate": 1.864057397115406e-05,
"loss": 0.404,
"step": 248
},
{
"epoch": 0.45521023765996343,
"grad_norm": 0.2335241250466671,
"learning_rate": 1.8552425167124402e-05,
"loss": 0.4455,
"step": 249
},
{
"epoch": 0.4570383912248629,
"grad_norm": 0.28481078145389616,
"learning_rate": 1.8464146199767426e-05,
"loss": 0.4479,
"step": 250
},
{
"epoch": 0.45886654478976235,
"grad_norm": 0.29084428436925236,
"learning_rate": 1.837574030368586e-05,
"loss": 0.432,
"step": 251
},
{
"epoch": 0.4606946983546618,
"grad_norm": 0.2526474847324583,
"learning_rate": 1.8287210718133185e-05,
"loss": 0.4475,
"step": 252
},
{
"epoch": 0.4625228519195612,
"grad_norm": 0.23451195798066396,
"learning_rate": 1.819856068689495e-05,
"loss": 0.4271,
"step": 253
},
{
"epoch": 0.4643510054844607,
"grad_norm": 0.2857912680872776,
"learning_rate": 1.8109793458169914e-05,
"loss": 0.4127,
"step": 254
},
{
"epoch": 0.46617915904936014,
"grad_norm": 0.2202114882392875,
"learning_rate": 1.8020912284451047e-05,
"loss": 0.4268,
"step": 255
},
{
"epoch": 0.4680073126142596,
"grad_norm": 0.2847281184055229,
"learning_rate": 1.7931920422406323e-05,
"loss": 0.4259,
"step": 256
},
{
"epoch": 0.46983546617915906,
"grad_norm": 0.2598281156255513,
"learning_rate": 1.7842821132759427e-05,
"loss": 0.4514,
"step": 257
},
{
"epoch": 0.4716636197440585,
"grad_norm": 0.2844988917742255,
"learning_rate": 1.775361768017026e-05,
"loss": 0.4615,
"step": 258
},
{
"epoch": 0.473491773308958,
"grad_norm": 0.26617898814153196,
"learning_rate": 1.766431333311533e-05,
"loss": 0.4426,
"step": 259
},
{
"epoch": 0.4753199268738574,
"grad_norm": 0.2428332403956185,
"learning_rate": 1.757491136376799e-05,
"loss": 0.4386,
"step": 260
},
{
"epoch": 0.47714808043875684,
"grad_norm": 0.2502587869532296,
"learning_rate": 1.7485415047878525e-05,
"loss": 0.4371,
"step": 261
},
{
"epoch": 0.4789762340036563,
"grad_norm": 0.25465501530170725,
"learning_rate": 1.7395827664654168e-05,
"loss": 0.404,
"step": 262
},
{
"epoch": 0.48080438756855576,
"grad_norm": 0.27989566357801815,
"learning_rate": 1.7306152496638906e-05,
"loss": 0.4461,
"step": 263
},
{
"epoch": 0.4826325411334552,
"grad_norm": 0.2671213153187633,
"learning_rate": 1.7216392829593225e-05,
"loss": 0.4461,
"step": 264
},
{
"epoch": 0.4844606946983547,
"grad_norm": 0.24388873946187056,
"learning_rate": 1.712655195237371e-05,
"loss": 0.4366,
"step": 265
},
{
"epoch": 0.48628884826325414,
"grad_norm": 0.2621521928469628,
"learning_rate": 1.703663315681256e-05,
"loss": 0.4361,
"step": 266
},
{
"epoch": 0.48811700182815354,
"grad_norm": 0.23355568735312865,
"learning_rate": 1.6946639737596936e-05,
"loss": 0.4547,
"step": 267
},
{
"epoch": 0.489945155393053,
"grad_norm": 0.2433323374022999,
"learning_rate": 1.685657499214827e-05,
"loss": 0.4217,
"step": 268
},
{
"epoch": 0.49177330895795246,
"grad_norm": 0.2835980161687909,
"learning_rate": 1.676644222050144e-05,
"loss": 0.4699,
"step": 269
},
{
"epoch": 0.4936014625228519,
"grad_norm": 0.2332386437859208,
"learning_rate": 1.6676244725183848e-05,
"loss": 0.4312,
"step": 270
},
{
"epoch": 0.4954296160877514,
"grad_norm": 0.21880840810143093,
"learning_rate": 1.6585985811094412e-05,
"loss": 0.4264,
"step": 271
},
{
"epoch": 0.49725776965265084,
"grad_norm": 0.26293014355482136,
"learning_rate": 1.6495668785382482e-05,
"loss": 0.4377,
"step": 272
},
{
"epoch": 0.4990859232175503,
"grad_norm": 0.25742637339428476,
"learning_rate": 1.6405296957326665e-05,
"loss": 0.4443,
"step": 273
},
{
"epoch": 0.5009140767824497,
"grad_norm": 0.2433810121654413,
"learning_rate": 1.631487363821354e-05,
"loss": 0.4496,
"step": 274
},
{
"epoch": 0.5027422303473492,
"grad_norm": 0.25311836103685337,
"learning_rate": 1.6224402141216387e-05,
"loss": 0.4309,
"step": 275
},
{
"epoch": 0.5045703839122486,
"grad_norm": 0.2363827435920569,
"learning_rate": 1.6133885781273724e-05,
"loss": 0.4444,
"step": 276
},
{
"epoch": 0.506398537477148,
"grad_norm": 0.2375498739699462,
"learning_rate": 1.6043327874967907e-05,
"loss": 0.4264,
"step": 277
},
{
"epoch": 0.5082266910420475,
"grad_norm": 0.23971752210791406,
"learning_rate": 1.595273174040357e-05,
"loss": 0.4547,
"step": 278
},
{
"epoch": 0.5100548446069469,
"grad_norm": 0.2413334639110406,
"learning_rate": 1.5862100697086037e-05,
"loss": 0.4327,
"step": 279
},
{
"epoch": 0.5118829981718465,
"grad_norm": 0.26601032650299644,
"learning_rate": 1.5771438065799745e-05,
"loss": 0.4471,
"step": 280
},
{
"epoch": 0.5137111517367459,
"grad_norm": 0.24442841821130334,
"learning_rate": 1.568074716848652e-05,
"loss": 0.4338,
"step": 281
},
{
"epoch": 0.5155393053016454,
"grad_norm": 0.24161461577302404,
"learning_rate": 1.559003132812387e-05,
"loss": 0.4258,
"step": 282
},
{
"epoch": 0.5173674588665448,
"grad_norm": 0.2678319638095495,
"learning_rate": 1.5499293868603244e-05,
"loss": 0.4445,
"step": 283
},
{
"epoch": 0.5191956124314442,
"grad_norm": 0.23988755994528907,
"learning_rate": 1.5408538114608227e-05,
"loss": 0.4708,
"step": 284
},
{
"epoch": 0.5210237659963437,
"grad_norm": 0.23428382288193408,
"learning_rate": 1.531776739149273e-05,
"loss": 0.4623,
"step": 285
},
{
"epoch": 0.5228519195612431,
"grad_norm": 0.2536087816515709,
"learning_rate": 1.522698502515913e-05,
"loss": 0.4261,
"step": 286
},
{
"epoch": 0.5246800731261426,
"grad_norm": 0.24842397791428794,
"learning_rate": 1.513619434193645e-05,
"loss": 0.4366,
"step": 287
},
{
"epoch": 0.526508226691042,
"grad_norm": 0.2250112860332024,
"learning_rate": 1.5045398668458405e-05,
"loss": 0.4089,
"step": 288
},
{
"epoch": 0.5283363802559415,
"grad_norm": 0.37363010526021784,
"learning_rate": 1.4954601331541598e-05,
"loss": 0.4222,
"step": 289
},
{
"epoch": 0.5301645338208409,
"grad_norm": 0.22688923246924197,
"learning_rate": 1.4863805658063553e-05,
"loss": 0.449,
"step": 290
},
{
"epoch": 0.5319926873857403,
"grad_norm": 0.23042126253173695,
"learning_rate": 1.4773014974840868e-05,
"loss": 0.403,
"step": 291
},
{
"epoch": 0.5338208409506399,
"grad_norm": 0.23434077527141906,
"learning_rate": 1.4682232608507276e-05,
"loss": 0.4234,
"step": 292
},
{
"epoch": 0.5356489945155393,
"grad_norm": 0.2533253906652841,
"learning_rate": 1.4591461885391774e-05,
"loss": 0.4707,
"step": 293
},
{
"epoch": 0.5374771480804388,
"grad_norm": 0.2367196781343478,
"learning_rate": 1.4500706131396758e-05,
"loss": 0.4561,
"step": 294
},
{
"epoch": 0.5393053016453382,
"grad_norm": 0.24155540742179146,
"learning_rate": 1.440996867187613e-05,
"loss": 0.4213,
"step": 295
},
{
"epoch": 0.5411334552102377,
"grad_norm": 0.23888649479779048,
"learning_rate": 1.4319252831513483e-05,
"loss": 0.4539,
"step": 296
},
{
"epoch": 0.5429616087751371,
"grad_norm": 0.2399360717612798,
"learning_rate": 1.4228561934200252e-05,
"loss": 0.4294,
"step": 297
},
{
"epoch": 0.5447897623400365,
"grad_norm": 0.21310241182150114,
"learning_rate": 1.4137899302913967e-05,
"loss": 0.4092,
"step": 298
},
{
"epoch": 0.546617915904936,
"grad_norm": 0.24287450467753255,
"learning_rate": 1.4047268259596438e-05,
"loss": 0.4393,
"step": 299
},
{
"epoch": 0.5484460694698354,
"grad_norm": 0.3527758266951121,
"learning_rate": 1.3956672125032095e-05,
"loss": 0.4297,
"step": 300
},
{
"epoch": 0.5502742230347349,
"grad_norm": 0.2587107143122098,
"learning_rate": 1.3866114218726276e-05,
"loss": 0.4397,
"step": 301
},
{
"epoch": 0.5521023765996343,
"grad_norm": 0.21583428645223188,
"learning_rate": 1.3775597858783618e-05,
"loss": 0.4042,
"step": 302
},
{
"epoch": 0.5539305301645339,
"grad_norm": 0.23908697208787028,
"learning_rate": 1.3685126361786459e-05,
"loss": 0.4645,
"step": 303
},
{
"epoch": 0.5557586837294333,
"grad_norm": 0.22752588531186632,
"learning_rate": 1.359470304267334e-05,
"loss": 0.4293,
"step": 304
},
{
"epoch": 0.5575868372943327,
"grad_norm": 0.23542512484036232,
"learning_rate": 1.3504331214617522e-05,
"loss": 0.404,
"step": 305
},
{
"epoch": 0.5594149908592322,
"grad_norm": 0.25330785799275174,
"learning_rate": 1.3414014188905592e-05,
"loss": 0.4272,
"step": 306
},
{
"epoch": 0.5612431444241316,
"grad_norm": 0.234302364995047,
"learning_rate": 1.3323755274816156e-05,
"loss": 0.4218,
"step": 307
},
{
"epoch": 0.5630712979890311,
"grad_norm": 0.21718003102435057,
"learning_rate": 1.323355777949856e-05,
"loss": 0.435,
"step": 308
},
{
"epoch": 0.5648994515539305,
"grad_norm": 0.23771222899292566,
"learning_rate": 1.3143425007851731e-05,
"loss": 0.4027,
"step": 309
},
{
"epoch": 0.56672760511883,
"grad_norm": 0.21581964509667692,
"learning_rate": 1.3053360262403065e-05,
"loss": 0.4275,
"step": 310
},
{
"epoch": 0.5685557586837294,
"grad_norm": 0.23676296954191686,
"learning_rate": 1.2963366843187444e-05,
"loss": 0.4573,
"step": 311
},
{
"epoch": 0.5703839122486288,
"grad_norm": 0.22115404157686464,
"learning_rate": 1.2873448047626292e-05,
"loss": 0.4256,
"step": 312
},
{
"epoch": 0.5722120658135283,
"grad_norm": 0.22618312895939494,
"learning_rate": 1.278360717040678e-05,
"loss": 0.4412,
"step": 313
},
{
"epoch": 0.5740402193784278,
"grad_norm": 0.22530056042606073,
"learning_rate": 1.2693847503361098e-05,
"loss": 0.4396,
"step": 314
},
{
"epoch": 0.5758683729433273,
"grad_norm": 0.23534013353212493,
"learning_rate": 1.2604172335345831e-05,
"loss": 0.4285,
"step": 315
},
{
"epoch": 0.5776965265082267,
"grad_norm": 0.22290585998943552,
"learning_rate": 1.2514584952121476e-05,
"loss": 0.4307,
"step": 316
},
{
"epoch": 0.5795246800731262,
"grad_norm": 0.2175202235328278,
"learning_rate": 1.2425088636232018e-05,
"loss": 0.4242,
"step": 317
},
{
"epoch": 0.5813528336380256,
"grad_norm": 0.23770394090188834,
"learning_rate": 1.233568666688467e-05,
"loss": 0.425,
"step": 318
},
{
"epoch": 0.583180987202925,
"grad_norm": 0.24602097112526436,
"learning_rate": 1.2246382319829742e-05,
"loss": 0.4314,
"step": 319
},
{
"epoch": 0.5850091407678245,
"grad_norm": 0.21380429110162302,
"learning_rate": 1.2157178867240578e-05,
"loss": 0.4124,
"step": 320
},
{
"epoch": 0.5868372943327239,
"grad_norm": 0.2059051087152771,
"learning_rate": 1.2068079577593677e-05,
"loss": 0.434,
"step": 321
},
{
"epoch": 0.5886654478976234,
"grad_norm": 0.20782805102967639,
"learning_rate": 1.1979087715548956e-05,
"loss": 0.426,
"step": 322
},
{
"epoch": 0.5904936014625228,
"grad_norm": 0.2308269667389375,
"learning_rate": 1.1890206541830081e-05,
"loss": 0.4367,
"step": 323
},
{
"epoch": 0.5923217550274223,
"grad_norm": 0.22433989736933327,
"learning_rate": 1.1801439313105053e-05,
"loss": 0.4137,
"step": 324
},
{
"epoch": 0.5941499085923218,
"grad_norm": 0.24818633510952665,
"learning_rate": 1.171278928186682e-05,
"loss": 0.4326,
"step": 325
},
{
"epoch": 0.5959780621572212,
"grad_norm": 0.22641907903537842,
"learning_rate": 1.1624259696314142e-05,
"loss": 0.4452,
"step": 326
},
{
"epoch": 0.5978062157221207,
"grad_norm": 0.2502208733018346,
"learning_rate": 1.1535853800232578e-05,
"loss": 0.4219,
"step": 327
},
{
"epoch": 0.5996343692870201,
"grad_norm": 0.20315718232089036,
"learning_rate": 1.1447574832875602e-05,
"loss": 0.4146,
"step": 328
},
{
"epoch": 0.6014625228519196,
"grad_norm": 0.2704027878651348,
"learning_rate": 1.1359426028845942e-05,
"loss": 0.4388,
"step": 329
},
{
"epoch": 0.603290676416819,
"grad_norm": 0.2474545966686057,
"learning_rate": 1.1271410617977048e-05,
"loss": 0.4316,
"step": 330
},
{
"epoch": 0.6051188299817185,
"grad_norm": 0.23074391315897977,
"learning_rate": 1.1183531825214744e-05,
"loss": 0.4513,
"step": 331
},
{
"epoch": 0.6069469835466179,
"grad_norm": 0.25731515715188485,
"learning_rate": 1.1095792870499087e-05,
"loss": 0.4366,
"step": 332
},
{
"epoch": 0.6087751371115173,
"grad_norm": 0.25634778394355273,
"learning_rate": 1.1008196968646356e-05,
"loss": 0.4718,
"step": 333
},
{
"epoch": 0.6106032906764168,
"grad_norm": 0.26331951561134626,
"learning_rate": 1.0920747329231289e-05,
"loss": 0.4551,
"step": 334
},
{
"epoch": 0.6124314442413162,
"grad_norm": 0.3504082117772083,
"learning_rate": 1.0833447156469448e-05,
"loss": 0.4125,
"step": 335
},
{
"epoch": 0.6142595978062158,
"grad_norm": 0.2574920075665294,
"learning_rate": 1.0746299649099855e-05,
"loss": 0.4539,
"step": 336
},
{
"epoch": 0.6160877513711152,
"grad_norm": 0.2292032218757861,
"learning_rate": 1.0659308000267744e-05,
"loss": 0.4305,
"step": 337
},
{
"epoch": 0.6179159049360147,
"grad_norm": 0.26470754729984625,
"learning_rate": 1.0572475397407594e-05,
"loss": 0.4363,
"step": 338
},
{
"epoch": 0.6197440585009141,
"grad_norm": 0.24478744270497613,
"learning_rate": 1.0485805022126331e-05,
"loss": 0.431,
"step": 339
},
{
"epoch": 0.6215722120658135,
"grad_norm": 0.22868825374949903,
"learning_rate": 1.0399300050086748e-05,
"loss": 0.4127,
"step": 340
},
{
"epoch": 0.623400365630713,
"grad_norm": 0.22844457512096136,
"learning_rate": 1.0312963650891147e-05,
"loss": 0.4248,
"step": 341
},
{
"epoch": 0.6252285191956124,
"grad_norm": 0.20397429178914392,
"learning_rate": 1.0226798987965207e-05,
"loss": 0.397,
"step": 342
},
{
"epoch": 0.6270566727605119,
"grad_norm": 0.24300323205974386,
"learning_rate": 1.014080921844207e-05,
"loss": 0.4192,
"step": 343
},
{
"epoch": 0.6288848263254113,
"grad_norm": 0.23615643791202737,
"learning_rate": 1.0054997493046647e-05,
"loss": 0.4252,
"step": 344
},
{
"epoch": 0.6307129798903108,
"grad_norm": 0.22224915020860084,
"learning_rate": 9.969366955980217e-06,
"loss": 0.442,
"step": 345
},
{
"epoch": 0.6325411334552102,
"grad_norm": 0.21254715342037545,
"learning_rate": 9.883920744805168e-06,
"loss": 0.4269,
"step": 346
},
{
"epoch": 0.6343692870201096,
"grad_norm": 0.2505381298484902,
"learning_rate": 9.798661990330073e-06,
"loss": 0.4491,
"step": 347
},
{
"epoch": 0.6361974405850092,
"grad_norm": 0.222922428077655,
"learning_rate": 9.713593816494955e-06,
"loss": 0.4284,
"step": 348
},
{
"epoch": 0.6380255941499086,
"grad_norm": 0.24435028204482773,
"learning_rate": 9.628719340256842e-06,
"loss": 0.4212,
"step": 349
},
{
"epoch": 0.6398537477148081,
"grad_norm": 0.2141788033954091,
"learning_rate": 9.544041671475522e-06,
"loss": 0.4302,
"step": 350
},
{
"epoch": 0.6416819012797075,
"grad_norm": 0.21973056409704741,
"learning_rate": 9.45956391279964e-06,
"loss": 0.4418,
"step": 351
},
{
"epoch": 0.643510054844607,
"grad_norm": 0.21401712763189235,
"learning_rate": 9.375289159552999e-06,
"loss": 0.4373,
"step": 352
},
{
"epoch": 0.6453382084095064,
"grad_norm": 0.2390813821307003,
"learning_rate": 9.291220499621128e-06,
"loss": 0.4292,
"step": 353
},
{
"epoch": 0.6471663619744058,
"grad_norm": 0.23488978656838674,
"learning_rate": 9.207361013338166e-06,
"loss": 0.4413,
"step": 354
},
{
"epoch": 0.6489945155393053,
"grad_norm": 0.32109686809085475,
"learning_rate": 9.123713773373977e-06,
"loss": 0.444,
"step": 355
},
{
"epoch": 0.6508226691042047,
"grad_norm": 0.23126269385774315,
"learning_rate": 9.040281844621568e-06,
"loss": 0.4485,
"step": 356
},
{
"epoch": 0.6526508226691042,
"grad_norm": 0.2312349995045198,
"learning_rate": 8.95706828408481e-06,
"loss": 0.4656,
"step": 357
},
{
"epoch": 0.6544789762340036,
"grad_norm": 0.22595422455956285,
"learning_rate": 8.874076140766391e-06,
"loss": 0.4322,
"step": 358
},
{
"epoch": 0.6563071297989032,
"grad_norm": 0.2467206671663252,
"learning_rate": 8.791308455556134e-06,
"loss": 0.4647,
"step": 359
},
{
"epoch": 0.6581352833638026,
"grad_norm": 0.20260204616971095,
"learning_rate": 8.708768261119549e-06,
"loss": 0.4034,
"step": 360
},
{
"epoch": 0.659963436928702,
"grad_norm": 0.21644768273281778,
"learning_rate": 8.626458581786741e-06,
"loss": 0.4254,
"step": 361
},
{
"epoch": 0.6617915904936015,
"grad_norm": 0.23345228716575211,
"learning_rate": 8.54438243344157e-06,
"loss": 0.4298,
"step": 362
},
{
"epoch": 0.6636197440585009,
"grad_norm": 0.23761761296603565,
"learning_rate": 8.462542823411145e-06,
"loss": 0.4656,
"step": 363
},
{
"epoch": 0.6654478976234004,
"grad_norm": 0.2024404905038723,
"learning_rate": 8.380942750355678e-06,
"loss": 0.4088,
"step": 364
},
{
"epoch": 0.6672760511882998,
"grad_norm": 0.2250242044317574,
"learning_rate": 8.29958520415856e-06,
"loss": 0.4338,
"step": 365
},
{
"epoch": 0.6691042047531993,
"grad_norm": 0.22826787810679472,
"learning_rate": 8.218473165816832e-06,
"loss": 0.4267,
"step": 366
},
{
"epoch": 0.6709323583180987,
"grad_norm": 0.23447452470751842,
"learning_rate": 8.13760960733195e-06,
"loss": 0.4292,
"step": 367
},
{
"epoch": 0.6727605118829981,
"grad_norm": 0.2340371506899419,
"learning_rate": 8.056997491600925e-06,
"loss": 0.4288,
"step": 368
},
{
"epoch": 0.6745886654478976,
"grad_norm": 0.21118605658197215,
"learning_rate": 7.976639772307686e-06,
"loss": 0.4453,
"step": 369
},
{
"epoch": 0.676416819012797,
"grad_norm": 0.23569144130105643,
"learning_rate": 7.896539393814926e-06,
"loss": 0.4344,
"step": 370
},
{
"epoch": 0.6782449725776966,
"grad_norm": 0.23524067199595933,
"learning_rate": 7.816699291056196e-06,
"loss": 0.4279,
"step": 371
},
{
"epoch": 0.680073126142596,
"grad_norm": 0.23054186478241442,
"learning_rate": 7.737122389428354e-06,
"loss": 0.4535,
"step": 372
},
{
"epoch": 0.6819012797074955,
"grad_norm": 0.22158488037091248,
"learning_rate": 7.657811604684385e-06,
"loss": 0.4406,
"step": 373
},
{
"epoch": 0.6837294332723949,
"grad_norm": 0.21854720152712095,
"learning_rate": 7.578769842826572e-06,
"loss": 0.4169,
"step": 374
},
{
"epoch": 0.6855575868372943,
"grad_norm": 0.24796845622790753,
"learning_rate": 7.500000000000004e-06,
"loss": 0.4397,
"step": 375
},
{
"epoch": 0.6873857404021938,
"grad_norm": 0.21265082674010502,
"learning_rate": 7.421504962386476e-06,
"loss": 0.4197,
"step": 376
},
{
"epoch": 0.6892138939670932,
"grad_norm": 0.20579035323970915,
"learning_rate": 7.343287606098719e-06,
"loss": 0.4361,
"step": 377
},
{
"epoch": 0.6910420475319927,
"grad_norm": 0.26821523496520233,
"learning_rate": 7.265350797075046e-06,
"loss": 0.4271,
"step": 378
},
{
"epoch": 0.6928702010968921,
"grad_norm": 0.24850263668705072,
"learning_rate": 7.187697390974307e-06,
"loss": 0.4394,
"step": 379
},
{
"epoch": 0.6946983546617916,
"grad_norm": 0.2368443493234265,
"learning_rate": 7.110330233071275e-06,
"loss": 0.422,
"step": 380
},
{
"epoch": 0.696526508226691,
"grad_norm": 0.2373300944554724,
"learning_rate": 7.03325215815239e-06,
"loss": 0.4672,
"step": 381
},
{
"epoch": 0.6983546617915904,
"grad_norm": 0.23091696951892882,
"learning_rate": 6.9564659904118925e-06,
"loss": 0.4475,
"step": 382
},
{
"epoch": 0.70018281535649,
"grad_norm": 0.22821762753125135,
"learning_rate": 6.879974543348344e-06,
"loss": 0.4526,
"step": 383
},
{
"epoch": 0.7020109689213894,
"grad_norm": 0.22270221536060336,
"learning_rate": 6.803780619661527e-06,
"loss": 0.4334,
"step": 384
},
{
"epoch": 0.7038391224862889,
"grad_norm": 0.21474970364310128,
"learning_rate": 6.727887011149762e-06,
"loss": 0.41,
"step": 385
},
{
"epoch": 0.7056672760511883,
"grad_norm": 0.20072244573229295,
"learning_rate": 6.652296498607628e-06,
"loss": 0.4242,
"step": 386
},
{
"epoch": 0.7074954296160878,
"grad_norm": 0.22140503089356514,
"learning_rate": 6.577011851724044e-06,
"loss": 0.428,
"step": 387
},
{
"epoch": 0.7093235831809872,
"grad_norm": 0.22728109637185054,
"learning_rate": 6.502035828980786e-06,
"loss": 0.443,
"step": 388
},
{
"epoch": 0.7111517367458866,
"grad_norm": 0.22775217564080383,
"learning_rate": 6.4273711775514545e-06,
"loss": 0.426,
"step": 389
},
{
"epoch": 0.7129798903107861,
"grad_norm": 0.2169525200009275,
"learning_rate": 6.353020633200773e-06,
"loss": 0.4324,
"step": 390
},
{
"epoch": 0.7148080438756855,
"grad_norm": 0.21065282217975448,
"learning_rate": 6.278986920184362e-06,
"loss": 0.4402,
"step": 391
},
{
"epoch": 0.716636197440585,
"grad_norm": 0.23089737840960195,
"learning_rate": 6.2052727511489224e-06,
"loss": 0.435,
"step": 392
},
{
"epoch": 0.7184643510054844,
"grad_norm": 0.20103080266684545,
"learning_rate": 6.1318808270328595e-06,
"loss": 0.4391,
"step": 393
},
{
"epoch": 0.720292504570384,
"grad_norm": 0.1819699544299102,
"learning_rate": 6.058813836967268e-06,
"loss": 0.4234,
"step": 394
},
{
"epoch": 0.7221206581352834,
"grad_norm": 0.1970644497845393,
"learning_rate": 5.986074458177454e-06,
"loss": 0.4023,
"step": 395
},
{
"epoch": 0.7239488117001828,
"grad_norm": 0.2207239084699447,
"learning_rate": 5.913665355884823e-06,
"loss": 0.4536,
"step": 396
},
{
"epoch": 0.7257769652650823,
"grad_norm": 0.210996065737019,
"learning_rate": 5.841589183209211e-06,
"loss": 0.4188,
"step": 397
},
{
"epoch": 0.7276051188299817,
"grad_norm": 0.22498686510864818,
"learning_rate": 5.769848581071677e-06,
"loss": 0.4381,
"step": 398
},
{
"epoch": 0.7294332723948812,
"grad_norm": 0.21270916771515924,
"learning_rate": 5.698446178097748e-06,
"loss": 0.4156,
"step": 399
},
{
"epoch": 0.7312614259597806,
"grad_norm": 0.22221640286229932,
"learning_rate": 5.627384590521113e-06,
"loss": 0.4395,
"step": 400
},
{
"epoch": 0.7330895795246801,
"grad_norm": 0.26239010826295806,
"learning_rate": 5.556666422087718e-06,
"loss": 0.4414,
"step": 401
},
{
"epoch": 0.7349177330895795,
"grad_norm": 0.22826022941736593,
"learning_rate": 5.4862942639604095e-06,
"loss": 0.428,
"step": 402
},
{
"epoch": 0.7367458866544789,
"grad_norm": 0.20636330833730426,
"learning_rate": 5.4162706946239845e-06,
"loss": 0.4263,
"step": 403
},
{
"epoch": 0.7385740402193784,
"grad_norm": 0.22431157145631084,
"learning_rate": 5.346598279790688e-06,
"loss": 0.4044,
"step": 404
},
{
"epoch": 0.7404021937842779,
"grad_norm": 0.5892059212939031,
"learning_rate": 5.277279572306229e-06,
"loss": 0.4153,
"step": 405
},
{
"epoch": 0.7422303473491774,
"grad_norm": 0.21508181389329334,
"learning_rate": 5.208317112056232e-06,
"loss": 0.4232,
"step": 406
},
{
"epoch": 0.7440585009140768,
"grad_norm": 0.21875844269434971,
"learning_rate": 5.1397134258731715e-06,
"loss": 0.4272,
"step": 407
},
{
"epoch": 0.7458866544789763,
"grad_norm": 0.2175895820259891,
"learning_rate": 5.071471027443797e-06,
"loss": 0.438,
"step": 408
},
{
"epoch": 0.7477148080438757,
"grad_norm": 0.2104760636466657,
"learning_rate": 5.003592417217018e-06,
"loss": 0.4369,
"step": 409
},
{
"epoch": 0.7495429616087751,
"grad_norm": 0.20325748365814275,
"learning_rate": 4.93608008231229e-06,
"loss": 0.4231,
"step": 410
},
{
"epoch": 0.7513711151736746,
"grad_norm": 0.2283795847438411,
"learning_rate": 4.868936496428499e-06,
"loss": 0.4278,
"step": 411
},
{
"epoch": 0.753199268738574,
"grad_norm": 0.21598790103934168,
"learning_rate": 4.8021641197533015e-06,
"loss": 0.4751,
"step": 412
},
{
"epoch": 0.7550274223034735,
"grad_norm": 0.21267188419993519,
"learning_rate": 4.735765398872975e-06,
"loss": 0.4218,
"step": 413
},
{
"epoch": 0.7568555758683729,
"grad_norm": 0.21835371602592832,
"learning_rate": 4.669742766682822e-06,
"loss": 0.467,
"step": 414
},
{
"epoch": 0.7586837294332724,
"grad_norm": 0.21431758759649963,
"learning_rate": 4.604098642297967e-06,
"loss": 0.4427,
"step": 415
},
{
"epoch": 0.7605118829981719,
"grad_norm": 0.20394610806752284,
"learning_rate": 4.538835430964761e-06,
"loss": 0.4158,
"step": 416
},
{
"epoch": 0.7623400365630713,
"grad_norm": 0.22118152352329448,
"learning_rate": 4.4739555239726195e-06,
"loss": 0.4126,
"step": 417
},
{
"epoch": 0.7641681901279708,
"grad_norm": 0.19821690630376193,
"learning_rate": 4.409461298566441e-06,
"loss": 0.4136,
"step": 418
},
{
"epoch": 0.7659963436928702,
"grad_norm": 0.20576614680650593,
"learning_rate": 4.345355117859476e-06,
"loss": 0.4275,
"step": 419
},
{
"epoch": 0.7678244972577697,
"grad_norm": 0.21000754849206363,
"learning_rate": 4.281639330746726e-06,
"loss": 0.4313,
"step": 420
},
{
"epoch": 0.7696526508226691,
"grad_norm": 0.21010480215020017,
"learning_rate": 4.218316271818936e-06,
"loss": 0.4218,
"step": 421
},
{
"epoch": 0.7714808043875686,
"grad_norm": 0.20418431773261317,
"learning_rate": 4.155388261276991e-06,
"loss": 0.4248,
"step": 422
},
{
"epoch": 0.773308957952468,
"grad_norm": 0.2199774987466384,
"learning_rate": 4.092857604846938e-06,
"loss": 0.4248,
"step": 423
},
{
"epoch": 0.7751371115173674,
"grad_norm": 0.21214957724197503,
"learning_rate": 4.030726593695488e-06,
"loss": 0.4378,
"step": 424
},
{
"epoch": 0.7769652650822669,
"grad_norm": 0.20578868307896656,
"learning_rate": 3.968997504346089e-06,
"loss": 0.4122,
"step": 425
},
{
"epoch": 0.7787934186471663,
"grad_norm": 0.1886281438392857,
"learning_rate": 3.907672598595466e-06,
"loss": 0.416,
"step": 426
},
{
"epoch": 0.7806215722120659,
"grad_norm": 0.204539336638738,
"learning_rate": 3.846754123430795e-06,
"loss": 0.4601,
"step": 427
},
{
"epoch": 0.7824497257769653,
"grad_norm": 0.20439689596076707,
"learning_rate": 3.786244310947344e-06,
"loss": 0.4663,
"step": 428
},
{
"epoch": 0.7842778793418648,
"grad_norm": 0.19344518153234772,
"learning_rate": 3.7261453782667127e-06,
"loss": 0.4354,
"step": 429
},
{
"epoch": 0.7861060329067642,
"grad_norm": 0.20523878372722695,
"learning_rate": 3.6664595274555616e-06,
"loss": 0.4606,
"step": 430
},
{
"epoch": 0.7879341864716636,
"grad_norm": 0.22004057714254224,
"learning_rate": 3.6071889454449494e-06,
"loss": 0.4174,
"step": 431
},
{
"epoch": 0.7897623400365631,
"grad_norm": 0.2021255803819884,
"learning_rate": 3.5483358039502007e-06,
"loss": 0.4261,
"step": 432
},
{
"epoch": 0.7915904936014625,
"grad_norm": 0.1929633249306559,
"learning_rate": 3.4899022593913255e-06,
"loss": 0.438,
"step": 433
},
{
"epoch": 0.793418647166362,
"grad_norm": 0.20480335838833477,
"learning_rate": 3.431890452814011e-06,
"loss": 0.4561,
"step": 434
},
{
"epoch": 0.7952468007312614,
"grad_norm": 0.20512601271555475,
"learning_rate": 3.374302509811169e-06,
"loss": 0.4209,
"step": 435
},
{
"epoch": 0.7970749542961609,
"grad_norm": 0.21794939868456775,
"learning_rate": 3.3171405404450693e-06,
"loss": 0.4336,
"step": 436
},
{
"epoch": 0.7989031078610603,
"grad_norm": 0.191636319781514,
"learning_rate": 3.2604066391699955e-06,
"loss": 0.4368,
"step": 437
},
{
"epoch": 0.8007312614259597,
"grad_norm": 0.19873019665769706,
"learning_rate": 3.204102884755522e-06,
"loss": 0.4146,
"step": 438
},
{
"epoch": 0.8025594149908593,
"grad_norm": 0.1855317851926338,
"learning_rate": 3.1482313402103496e-06,
"loss": 0.4216,
"step": 439
},
{
"epoch": 0.8043875685557587,
"grad_norm": 0.2020787383524066,
"learning_rate": 3.092794052706705e-06,
"loss": 0.435,
"step": 440
},
{
"epoch": 0.8062157221206582,
"grad_norm": 0.19443105615168507,
"learning_rate": 3.037793053505331e-06,
"loss": 0.4349,
"step": 441
},
{
"epoch": 0.8080438756855576,
"grad_norm": 0.2026227342481353,
"learning_rate": 2.9832303578810684e-06,
"loss": 0.4024,
"step": 442
},
{
"epoch": 0.8098720292504571,
"grad_norm": 0.19573762549365448,
"learning_rate": 2.9291079650490166e-06,
"loss": 0.4412,
"step": 443
},
{
"epoch": 0.8117001828153565,
"grad_norm": 0.20669588995453167,
"learning_rate": 2.875427858091268e-06,
"loss": 0.4334,
"step": 444
},
{
"epoch": 0.8135283363802559,
"grad_norm": 0.21522636326162242,
"learning_rate": 2.822192003884244e-06,
"loss": 0.4408,
"step": 445
},
{
"epoch": 0.8153564899451554,
"grad_norm": 0.19651960546053018,
"learning_rate": 2.769402353026646e-06,
"loss": 0.4332,
"step": 446
},
{
"epoch": 0.8171846435100548,
"grad_norm": 0.20383575785459615,
"learning_rate": 2.7170608397679825e-06,
"loss": 0.4164,
"step": 447
},
{
"epoch": 0.8190127970749543,
"grad_norm": 0.21018717171591206,
"learning_rate": 2.6651693819376753e-06,
"loss": 0.4463,
"step": 448
},
{
"epoch": 0.8208409506398537,
"grad_norm": 0.20937930611080946,
"learning_rate": 2.6137298808748093e-06,
"loss": 0.4188,
"step": 449
},
{
"epoch": 0.8226691042047533,
"grad_norm": 0.22706868702353505,
"learning_rate": 2.5627442213584574e-06,
"loss": 0.4201,
"step": 450
},
{
"epoch": 0.8244972577696527,
"grad_norm": 0.20530598319338253,
"learning_rate": 2.5122142715386203e-06,
"loss": 0.4488,
"step": 451
},
{
"epoch": 0.8263254113345521,
"grad_norm": 0.19894243483138846,
"learning_rate": 2.462141882867785e-06,
"loss": 0.4144,
"step": 452
},
{
"epoch": 0.8281535648994516,
"grad_norm": 0.2037935555659803,
"learning_rate": 2.41252889003307e-06,
"loss": 0.4263,
"step": 453
},
{
"epoch": 0.829981718464351,
"grad_norm": 0.20620169687161713,
"learning_rate": 2.3633771108890274e-06,
"loss": 0.4287,
"step": 454
},
{
"epoch": 0.8318098720292505,
"grad_norm": 0.2014915847909342,
"learning_rate": 2.3146883463910018e-06,
"loss": 0.4583,
"step": 455
},
{
"epoch": 0.8336380255941499,
"grad_norm": 0.19976760209400773,
"learning_rate": 2.266464380529169e-06,
"loss": 0.4402,
"step": 456
},
{
"epoch": 0.8354661791590493,
"grad_norm": 0.2069104803503857,
"learning_rate": 2.218706980263158e-06,
"loss": 0.4584,
"step": 457
},
{
"epoch": 0.8372943327239488,
"grad_norm": 0.20632313603116845,
"learning_rate": 2.171417895457306e-06,
"loss": 0.4429,
"step": 458
},
{
"epoch": 0.8391224862888482,
"grad_norm": 0.18419607677951597,
"learning_rate": 2.124598858816554e-06,
"loss": 0.4227,
"step": 459
},
{
"epoch": 0.8409506398537477,
"grad_norm": 0.18649993832518527,
"learning_rate": 2.0782515858229388e-06,
"loss": 0.4074,
"step": 460
},
{
"epoch": 0.8427787934186471,
"grad_norm": 0.21647144597998413,
"learning_rate": 2.03237777467277e-06,
"loss": 0.4474,
"step": 461
},
{
"epoch": 0.8446069469835467,
"grad_norm": 0.19623227819931613,
"learning_rate": 1.9869791062143663e-06,
"loss": 0.4485,
"step": 462
},
{
"epoch": 0.8464351005484461,
"grad_norm": 0.19921087499030055,
"learning_rate": 1.9420572438864954e-06,
"loss": 0.4252,
"step": 463
},
{
"epoch": 0.8482632541133455,
"grad_norm": 0.18411267626689917,
"learning_rate": 1.8976138336574195e-06,
"loss": 0.4216,
"step": 464
},
{
"epoch": 0.850091407678245,
"grad_norm": 0.20840375502535938,
"learning_rate": 1.8536505039645785e-06,
"loss": 0.4093,
"step": 465
},
{
"epoch": 0.8519195612431444,
"grad_norm": 0.2015956797087723,
"learning_rate": 1.8101688656549298e-06,
"loss": 0.4201,
"step": 466
},
{
"epoch": 0.8537477148080439,
"grad_norm": 0.18741347554534585,
"learning_rate": 1.7671705119259274e-06,
"loss": 0.4161,
"step": 467
},
{
"epoch": 0.8555758683729433,
"grad_norm": 0.21388589426910684,
"learning_rate": 1.7246570182671317e-06,
"loss": 0.4576,
"step": 468
},
{
"epoch": 0.8574040219378428,
"grad_norm": 0.2018135484162476,
"learning_rate": 1.682629942402511e-06,
"loss": 0.4113,
"step": 469
},
{
"epoch": 0.8592321755027422,
"grad_norm": 0.24452792776099322,
"learning_rate": 1.6410908242333284e-06,
"loss": 0.4089,
"step": 470
},
{
"epoch": 0.8610603290676416,
"grad_norm": 0.20178814526816566,
"learning_rate": 1.6000411857817438e-06,
"loss": 0.4498,
"step": 471
},
{
"epoch": 0.8628884826325411,
"grad_norm": 0.20766015798465665,
"learning_rate": 1.559482531135052e-06,
"loss": 0.4308,
"step": 472
},
{
"epoch": 0.8647166361974405,
"grad_norm": 0.19647310981956118,
"learning_rate": 1.5194163463905464e-06,
"loss": 0.4237,
"step": 473
},
{
"epoch": 0.8665447897623401,
"grad_norm": 0.2090092069463731,
"learning_rate": 1.4798440996010832e-06,
"loss": 0.4727,
"step": 474
},
{
"epoch": 0.8683729433272395,
"grad_norm": 0.2035707096977595,
"learning_rate": 1.4407672407212903e-06,
"loss": 0.4249,
"step": 475
},
{
"epoch": 0.870201096892139,
"grad_norm": 0.193673601330511,
"learning_rate": 1.4021872015544436e-06,
"loss": 0.4079,
"step": 476
},
{
"epoch": 0.8720292504570384,
"grad_norm": 0.1860147544164734,
"learning_rate": 1.3641053956999872e-06,
"loss": 0.427,
"step": 477
},
{
"epoch": 0.8738574040219378,
"grad_norm": 0.20106253569921867,
"learning_rate": 1.3265232185017568e-06,
"loss": 0.4403,
"step": 478
},
{
"epoch": 0.8756855575868373,
"grad_norm": 0.20889891898189425,
"learning_rate": 1.2894420469968531e-06,
"loss": 0.442,
"step": 479
},
{
"epoch": 0.8775137111517367,
"grad_norm": 0.21579172543010242,
"learning_rate": 1.2528632398651735e-06,
"loss": 0.4524,
"step": 480
},
{
"epoch": 0.8793418647166362,
"grad_norm": 0.1929721500871047,
"learning_rate": 1.216788137379632e-06,
"loss": 0.4394,
"step": 481
},
{
"epoch": 0.8811700182815356,
"grad_norm": 0.2027520838331581,
"learning_rate": 1.181218061357061e-06,
"loss": 0.4561,
"step": 482
},
{
"epoch": 0.8829981718464351,
"grad_norm": 0.2019924797320044,
"learning_rate": 1.146154315109771e-06,
"loss": 0.4081,
"step": 483
},
{
"epoch": 0.8848263254113345,
"grad_norm": 0.20531685417721204,
"learning_rate": 1.1115981833977929e-06,
"loss": 0.4212,
"step": 484
},
{
"epoch": 0.886654478976234,
"grad_norm": 0.19657067016449098,
"learning_rate": 1.07755093238181e-06,
"loss": 0.4452,
"step": 485
},
{
"epoch": 0.8884826325411335,
"grad_norm": 0.20980179090504925,
"learning_rate": 1.0440138095767721e-06,
"loss": 0.4454,
"step": 486
},
{
"epoch": 0.8903107861060329,
"grad_norm": 0.19666682586019124,
"learning_rate": 1.0109880438061654e-06,
"loss": 0.4234,
"step": 487
},
{
"epoch": 0.8921389396709324,
"grad_norm": 0.20130916027345605,
"learning_rate": 9.784748451570068e-07,
"loss": 0.4392,
"step": 488
},
{
"epoch": 0.8939670932358318,
"grad_norm": 0.20109495721005471,
"learning_rate": 9.46475404935484e-07,
"loss": 0.4335,
"step": 489
},
{
"epoch": 0.8957952468007313,
"grad_norm": 0.20122377375831474,
"learning_rate": 9.149908956233366e-07,
"loss": 0.4108,
"step": 490
},
{
"epoch": 0.8976234003656307,
"grad_norm": 0.201965889207875,
"learning_rate": 8.840224708348687e-07,
"loss": 0.4178,
"step": 491
},
{
"epoch": 0.8994515539305301,
"grad_norm": 0.18609558289052022,
"learning_rate": 8.535712652746891e-07,
"loss": 0.4243,
"step": 492
},
{
"epoch": 0.9012797074954296,
"grad_norm": 0.1806096540849523,
"learning_rate": 8.23638394696134e-07,
"loss": 0.4202,
"step": 493
},
{
"epoch": 0.903107861060329,
"grad_norm": 0.2067319930487612,
"learning_rate": 7.942249558603904e-07,
"loss": 0.4352,
"step": 494
},
{
"epoch": 0.9049360146252285,
"grad_norm": 0.20864885239932834,
"learning_rate": 7.65332026496301e-07,
"loss": 0.4382,
"step": 495
},
{
"epoch": 0.906764168190128,
"grad_norm": 0.3102684122243654,
"learning_rate": 7.369606652608745e-07,
"loss": 0.3962,
"step": 496
},
{
"epoch": 0.9085923217550275,
"grad_norm": 0.1839392468913682,
"learning_rate": 7.091119117005135e-07,
"loss": 0.4,
"step": 497
},
{
"epoch": 0.9104204753199269,
"grad_norm": 0.17959806983392868,
"learning_rate": 6.817867862129024e-07,
"loss": 0.4423,
"step": 498
},
{
"epoch": 0.9122486288848263,
"grad_norm": 0.19595323659927366,
"learning_rate": 6.549862900096332e-07,
"loss": 0.4211,
"step": 499
},
{
"epoch": 0.9140767824497258,
"grad_norm": 0.20816092557758029,
"learning_rate": 6.287114050795129e-07,
"loss": 0.4273,
"step": 500
},
{
"epoch": 0.9159049360146252,
"grad_norm": 0.19203785451952926,
"learning_rate": 6.029630941525938e-07,
"loss": 0.4064,
"step": 501
},
{
"epoch": 0.9177330895795247,
"grad_norm": 0.19627123579900582,
"learning_rate": 5.777423006648785e-07,
"loss": 0.4123,
"step": 502
},
{
"epoch": 0.9195612431444241,
"grad_norm": 0.1795560900383567,
"learning_rate": 5.530499487237711e-07,
"loss": 0.4228,
"step": 503
},
{
"epoch": 0.9213893967093236,
"grad_norm": 1.301897469947339,
"learning_rate": 5.288869430742154e-07,
"loss": 0.4074,
"step": 504
},
{
"epoch": 0.923217550274223,
"grad_norm": 0.1874449977405021,
"learning_rate": 5.052541690655288e-07,
"loss": 0.4587,
"step": 505
},
{
"epoch": 0.9250457038391224,
"grad_norm": 0.19441671824953077,
"learning_rate": 4.821524926189774e-07,
"loss": 0.4145,
"step": 506
},
{
"epoch": 0.926873857404022,
"grad_norm": 0.18780877877666258,
"learning_rate": 4.5958276019604725e-07,
"loss": 0.4175,
"step": 507
},
{
"epoch": 0.9287020109689214,
"grad_norm": 0.18766332816620063,
"learning_rate": 4.37545798767417e-07,
"loss": 0.4092,
"step": 508
},
{
"epoch": 0.9305301645338209,
"grad_norm": 0.20320470210632727,
"learning_rate": 4.160424157826753e-07,
"loss": 0.4312,
"step": 509
},
{
"epoch": 0.9323583180987203,
"grad_norm": 0.21028082478625537,
"learning_rate": 3.9507339914071684e-07,
"loss": 0.4344,
"step": 510
},
{
"epoch": 0.9341864716636198,
"grad_norm": 0.1983417630419607,
"learning_rate": 3.7463951716088795e-07,
"loss": 0.4375,
"step": 511
},
{
"epoch": 0.9360146252285192,
"grad_norm": 0.18791572094068032,
"learning_rate": 3.5474151855482973e-07,
"loss": 0.4302,
"step": 512
},
{
"epoch": 0.9378427787934186,
"grad_norm": 0.17810535619019122,
"learning_rate": 3.3538013239903964e-07,
"loss": 0.4364,
"step": 513
},
{
"epoch": 0.9396709323583181,
"grad_norm": 0.2072249750910098,
"learning_rate": 3.165560681081631e-07,
"loss": 0.4207,
"step": 514
},
{
"epoch": 0.9414990859232175,
"grad_norm": 0.2179401591312887,
"learning_rate": 2.9827001540900234e-07,
"loss": 0.4707,
"step": 515
},
{
"epoch": 0.943327239488117,
"grad_norm": 0.17915440732531465,
"learning_rate": 2.805226443152353e-07,
"loss": 0.4111,
"step": 516
},
{
"epoch": 0.9451553930530164,
"grad_norm": 0.18355262574436557,
"learning_rate": 2.633146051028767e-07,
"loss": 0.4071,
"step": 517
},
{
"epoch": 0.946983546617916,
"grad_norm": 0.2000102181192153,
"learning_rate": 2.466465282864422e-07,
"loss": 0.4285,
"step": 518
},
{
"epoch": 0.9488117001828154,
"grad_norm": 0.20323535358780875,
"learning_rate": 2.3051902459585494e-07,
"loss": 0.4579,
"step": 519
},
{
"epoch": 0.9506398537477148,
"grad_norm": 0.18919147718641008,
"learning_rate": 2.1493268495405892e-07,
"loss": 0.4307,
"step": 520
},
{
"epoch": 0.9524680073126143,
"grad_norm": 0.18769550871377885,
"learning_rate": 1.9988808045536933e-07,
"loss": 0.4121,
"step": 521
},
{
"epoch": 0.9542961608775137,
"grad_norm": 0.2077842654696244,
"learning_rate": 1.853857623445543e-07,
"loss": 0.4324,
"step": 522
},
{
"epoch": 0.9561243144424132,
"grad_norm": 0.17884962771458185,
"learning_rate": 1.7142626199663115e-07,
"loss": 0.4317,
"step": 523
},
{
"epoch": 0.9579524680073126,
"grad_norm": 0.1737483251528809,
"learning_rate": 1.5801009089739195e-07,
"loss": 0.3991,
"step": 524
},
{
"epoch": 0.9597806215722121,
"grad_norm": 0.1824044598603505,
"learning_rate": 1.4513774062467345e-07,
"loss": 0.4398,
"step": 525
},
{
"epoch": 0.9616087751371115,
"grad_norm": 0.17988282890228924,
"learning_rate": 1.3280968283033824e-07,
"loss": 0.4181,
"step": 526
},
{
"epoch": 0.9634369287020109,
"grad_norm": 0.1989158354387293,
"learning_rate": 1.210263692229885e-07,
"loss": 0.4057,
"step": 527
},
{
"epoch": 0.9652650822669104,
"grad_norm": 0.20218804624972406,
"learning_rate": 1.0978823155142759e-07,
"loss": 0.4087,
"step": 528
},
{
"epoch": 0.9670932358318098,
"grad_norm": 0.18753525060847254,
"learning_rate": 9.909568158883108e-08,
"loss": 0.4517,
"step": 529
},
{
"epoch": 0.9689213893967094,
"grad_norm": 0.19964026436486013,
"learning_rate": 8.894911111766213e-08,
"loss": 0.4593,
"step": 530
},
{
"epoch": 0.9707495429616088,
"grad_norm": 0.20199077568456258,
"learning_rate": 7.934889191531458e-08,
"loss": 0.4324,
"step": 531
},
{
"epoch": 0.9725776965265083,
"grad_norm": 0.21136885591694635,
"learning_rate": 7.029537574049228e-08,
"loss": 0.4772,
"step": 532
},
{
"epoch": 0.9744058500914077,
"grad_norm": 0.18831505200310864,
"learning_rate": 6.178889432032265e-08,
"loss": 0.4274,
"step": 533
},
{
"epoch": 0.9762340036563071,
"grad_norm": 0.19856253387529885,
"learning_rate": 5.3829759338194784e-08,
"loss": 0.4208,
"step": 534
},
{
"epoch": 0.9780621572212066,
"grad_norm": 0.18857931820807147,
"learning_rate": 4.641826242234859e-08,
"loss": 0.4169,
"step": 535
},
{
"epoch": 0.979890310786106,
"grad_norm": 0.18542577124245166,
"learning_rate": 3.9554675135183293e-08,
"loss": 0.4103,
"step": 536
},
{
"epoch": 0.9817184643510055,
"grad_norm": 0.21032034132870359,
"learning_rate": 3.3239248963305435e-08,
"loss": 0.47,
"step": 537
},
{
"epoch": 0.9835466179159049,
"grad_norm": 0.20775025613298942,
"learning_rate": 2.7472215308321225e-08,
"loss": 0.4346,
"step": 538
},
{
"epoch": 0.9853747714808044,
"grad_norm": 0.2001834889405841,
"learning_rate": 2.2253785478353327e-08,
"loss": 0.4171,
"step": 539
},
{
"epoch": 0.9872029250457038,
"grad_norm": 0.18559236233208531,
"learning_rate": 1.7584150680295397e-08,
"loss": 0.4002,
"step": 540
},
{
"epoch": 0.9890310786106032,
"grad_norm": 0.20591201286932131,
"learning_rate": 1.3463482012812666e-08,
"loss": 0.4012,
"step": 541
},
{
"epoch": 0.9908592321755028,
"grad_norm": 0.19900543023229678,
"learning_rate": 9.891930460070309e-09,
"loss": 0.4383,
"step": 542
},
{
"epoch": 0.9926873857404022,
"grad_norm": 0.17153790889860102,
"learning_rate": 6.86962688619952e-09,
"loss": 0.384,
"step": 543
},
{
"epoch": 0.9945155393053017,
"grad_norm": 0.1998289006849129,
"learning_rate": 4.3966820305030295e-09,
"loss": 0.3993,
"step": 544
},
{
"epoch": 0.9963436928702011,
"grad_norm": 0.19123526224485105,
"learning_rate": 2.4731865034000047e-09,
"loss": 0.3933,
"step": 545
},
{
"epoch": 0.9981718464351006,
"grad_norm": 0.18564371104552926,
"learning_rate": 1.0992107831037102e-09,
"loss": 0.3916,
"step": 546
},
{
"epoch": 1.0,
"grad_norm": 0.19303916330586324,
"learning_rate": 2.748052130402412e-10,
"loss": 0.42,
"step": 547
}
],
"logging_steps": 1,
"max_steps": 547,
"num_input_tokens_seen": 0,
"num_train_epochs": 1,
"save_steps": 200,
"stateful_callbacks": {
"TrainerControl": {
"args": {
"should_epoch_stop": false,
"should_evaluate": false,
"should_log": false,
"should_save": true,
"should_training_stop": true
},
"attributes": {}
}
},
"total_flos": 361444842012672.0,
"train_batch_size": 2,
"trial_name": null,
"trial_params": null
}