|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 0.9983633387888707, |
|
"eval_steps": 500, |
|
"global_step": 305, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.0032733224222585926, |
|
"grad_norm": 4.5458229969104, |
|
"learning_rate": 3.3333333333333335e-07, |
|
"loss": 2.5273, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.006546644844517185, |
|
"grad_norm": 4.12339411074909, |
|
"learning_rate": 6.666666666666667e-07, |
|
"loss": 2.5212, |
|
"step": 2 |
|
}, |
|
{ |
|
"epoch": 0.009819967266775777, |
|
"grad_norm": 7.698312038734646, |
|
"learning_rate": 1.0000000000000002e-06, |
|
"loss": 2.553, |
|
"step": 3 |
|
}, |
|
{ |
|
"epoch": 0.01309328968903437, |
|
"grad_norm": 3.2743265686425325, |
|
"learning_rate": 1.3333333333333334e-06, |
|
"loss": 2.5286, |
|
"step": 4 |
|
}, |
|
{ |
|
"epoch": 0.016366612111292964, |
|
"grad_norm": 4.882632956145234, |
|
"learning_rate": 1.6666666666666667e-06, |
|
"loss": 2.5099, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.019639934533551555, |
|
"grad_norm": 1.5422971469796205, |
|
"learning_rate": 2.0000000000000003e-06, |
|
"loss": 2.5156, |
|
"step": 6 |
|
}, |
|
{ |
|
"epoch": 0.022913256955810146, |
|
"grad_norm": 4.3292940294795, |
|
"learning_rate": 2.3333333333333336e-06, |
|
"loss": 2.5432, |
|
"step": 7 |
|
}, |
|
{ |
|
"epoch": 0.02618657937806874, |
|
"grad_norm": 1.3106542988714809, |
|
"learning_rate": 2.666666666666667e-06, |
|
"loss": 2.5001, |
|
"step": 8 |
|
}, |
|
{ |
|
"epoch": 0.029459901800327332, |
|
"grad_norm": 1.915309733377902, |
|
"learning_rate": 3e-06, |
|
"loss": 2.5289, |
|
"step": 9 |
|
}, |
|
{ |
|
"epoch": 0.03273322422258593, |
|
"grad_norm": 1.4811295343620543, |
|
"learning_rate": 3.3333333333333333e-06, |
|
"loss": 2.5249, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.03600654664484452, |
|
"grad_norm": 0.9585352491709347, |
|
"learning_rate": 3.6666666666666666e-06, |
|
"loss": 2.5195, |
|
"step": 11 |
|
}, |
|
{ |
|
"epoch": 0.03927986906710311, |
|
"grad_norm": 0.6542974173213092, |
|
"learning_rate": 4.000000000000001e-06, |
|
"loss": 2.5096, |
|
"step": 12 |
|
}, |
|
{ |
|
"epoch": 0.0425531914893617, |
|
"grad_norm": 0.6159788634871752, |
|
"learning_rate": 4.333333333333334e-06, |
|
"loss": 2.509, |
|
"step": 13 |
|
}, |
|
{ |
|
"epoch": 0.04582651391162029, |
|
"grad_norm": 0.6302127259561674, |
|
"learning_rate": 4.666666666666667e-06, |
|
"loss": 2.5159, |
|
"step": 14 |
|
}, |
|
{ |
|
"epoch": 0.049099836333878884, |
|
"grad_norm": 0.5116118125911384, |
|
"learning_rate": 5e-06, |
|
"loss": 2.4919, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.05237315875613748, |
|
"grad_norm": 0.42670850345300865, |
|
"learning_rate": 5.333333333333334e-06, |
|
"loss": 2.5056, |
|
"step": 16 |
|
}, |
|
{ |
|
"epoch": 0.05564648117839607, |
|
"grad_norm": 0.3955257011722223, |
|
"learning_rate": 5.666666666666667e-06, |
|
"loss": 2.493, |
|
"step": 17 |
|
}, |
|
{ |
|
"epoch": 0.058919803600654665, |
|
"grad_norm": 0.3954725948519232, |
|
"learning_rate": 6e-06, |
|
"loss": 2.4947, |
|
"step": 18 |
|
}, |
|
{ |
|
"epoch": 0.062193126022913256, |
|
"grad_norm": 0.37175019309525287, |
|
"learning_rate": 6.333333333333333e-06, |
|
"loss": 2.5092, |
|
"step": 19 |
|
}, |
|
{ |
|
"epoch": 0.06546644844517185, |
|
"grad_norm": 0.39057048150465146, |
|
"learning_rate": 6.666666666666667e-06, |
|
"loss": 2.4981, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.06873977086743044, |
|
"grad_norm": 0.39262665115560863, |
|
"learning_rate": 7e-06, |
|
"loss": 2.4854, |
|
"step": 21 |
|
}, |
|
{ |
|
"epoch": 0.07201309328968904, |
|
"grad_norm": 0.3951637407824461, |
|
"learning_rate": 7.333333333333333e-06, |
|
"loss": 2.5021, |
|
"step": 22 |
|
}, |
|
{ |
|
"epoch": 0.07528641571194762, |
|
"grad_norm": 0.40140742815376235, |
|
"learning_rate": 7.666666666666667e-06, |
|
"loss": 2.517, |
|
"step": 23 |
|
}, |
|
{ |
|
"epoch": 0.07855973813420622, |
|
"grad_norm": 0.36348786250077453, |
|
"learning_rate": 8.000000000000001e-06, |
|
"loss": 2.492, |
|
"step": 24 |
|
}, |
|
{ |
|
"epoch": 0.08183306055646482, |
|
"grad_norm": 0.3927422984780222, |
|
"learning_rate": 8.333333333333334e-06, |
|
"loss": 2.4891, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.0851063829787234, |
|
"grad_norm": 0.3802256129939313, |
|
"learning_rate": 8.666666666666668e-06, |
|
"loss": 2.5241, |
|
"step": 26 |
|
}, |
|
{ |
|
"epoch": 0.088379705400982, |
|
"grad_norm": 0.4048560033284272, |
|
"learning_rate": 9e-06, |
|
"loss": 2.4996, |
|
"step": 27 |
|
}, |
|
{ |
|
"epoch": 0.09165302782324058, |
|
"grad_norm": 0.3938035425880514, |
|
"learning_rate": 9.333333333333334e-06, |
|
"loss": 2.51, |
|
"step": 28 |
|
}, |
|
{ |
|
"epoch": 0.09492635024549918, |
|
"grad_norm": 0.4318232114640702, |
|
"learning_rate": 9.666666666666667e-06, |
|
"loss": 2.5345, |
|
"step": 29 |
|
}, |
|
{ |
|
"epoch": 0.09819967266775777, |
|
"grad_norm": 0.3960154579660599, |
|
"learning_rate": 1e-05, |
|
"loss": 2.511, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.10147299509001637, |
|
"grad_norm": 0.4569501622130681, |
|
"learning_rate": 9.999673735634259e-06, |
|
"loss": 2.4827, |
|
"step": 31 |
|
}, |
|
{ |
|
"epoch": 0.10474631751227496, |
|
"grad_norm": 0.3958075272764396, |
|
"learning_rate": 9.998694985116406e-06, |
|
"loss": 2.4884, |
|
"step": 32 |
|
}, |
|
{ |
|
"epoch": 0.10801963993453355, |
|
"grad_norm": 0.37500089768123596, |
|
"learning_rate": 9.997063876179007e-06, |
|
"loss": 2.5027, |
|
"step": 33 |
|
}, |
|
{ |
|
"epoch": 0.11129296235679215, |
|
"grad_norm": 0.3815772878145323, |
|
"learning_rate": 9.994780621691156e-06, |
|
"loss": 2.4957, |
|
"step": 34 |
|
}, |
|
{ |
|
"epoch": 0.11456628477905073, |
|
"grad_norm": 0.38634081421059385, |
|
"learning_rate": 9.991845519630679e-06, |
|
"loss": 2.4937, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.11783960720130933, |
|
"grad_norm": 0.3671354090721152, |
|
"learning_rate": 9.988258953045264e-06, |
|
"loss": 2.4789, |
|
"step": 36 |
|
}, |
|
{ |
|
"epoch": 0.12111292962356793, |
|
"grad_norm": 0.36190374114193224, |
|
"learning_rate": 9.984021390002458e-06, |
|
"loss": 2.499, |
|
"step": 37 |
|
}, |
|
{ |
|
"epoch": 0.12438625204582651, |
|
"grad_norm": 0.38466932485680505, |
|
"learning_rate": 9.979133383528591e-06, |
|
"loss": 2.5026, |
|
"step": 38 |
|
}, |
|
{ |
|
"epoch": 0.1276595744680851, |
|
"grad_norm": 0.3573078258794223, |
|
"learning_rate": 9.973595571536593e-06, |
|
"loss": 2.4787, |
|
"step": 39 |
|
}, |
|
{ |
|
"epoch": 0.1309328968903437, |
|
"grad_norm": 0.39354137647528986, |
|
"learning_rate": 9.96740867674275e-06, |
|
"loss": 2.5228, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.1342062193126023, |
|
"grad_norm": 0.3834121962923347, |
|
"learning_rate": 9.960573506572391e-06, |
|
"loss": 2.5147, |
|
"step": 41 |
|
}, |
|
{ |
|
"epoch": 0.13747954173486088, |
|
"grad_norm": 0.383218733958223, |
|
"learning_rate": 9.953090953054491e-06, |
|
"loss": 2.4847, |
|
"step": 42 |
|
}, |
|
{ |
|
"epoch": 0.1407528641571195, |
|
"grad_norm": 0.3964524303138896, |
|
"learning_rate": 9.944961992705288e-06, |
|
"loss": 2.4953, |
|
"step": 43 |
|
}, |
|
{ |
|
"epoch": 0.14402618657937807, |
|
"grad_norm": 0.3907082692537958, |
|
"learning_rate": 9.936187686400814e-06, |
|
"loss": 2.5074, |
|
"step": 44 |
|
}, |
|
{ |
|
"epoch": 0.14729950900163666, |
|
"grad_norm": 0.42441529396911143, |
|
"learning_rate": 9.926769179238467e-06, |
|
"loss": 2.4694, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.15057283142389524, |
|
"grad_norm": 0.36309886081053155, |
|
"learning_rate": 9.916707700387546e-06, |
|
"loss": 2.4816, |
|
"step": 46 |
|
}, |
|
{ |
|
"epoch": 0.15384615384615385, |
|
"grad_norm": 0.3745053331706986, |
|
"learning_rate": 9.906004562928865e-06, |
|
"loss": 2.4876, |
|
"step": 47 |
|
}, |
|
{ |
|
"epoch": 0.15711947626841244, |
|
"grad_norm": 0.3558032864873746, |
|
"learning_rate": 9.894661163683361e-06, |
|
"loss": 2.4887, |
|
"step": 48 |
|
}, |
|
{ |
|
"epoch": 0.16039279869067102, |
|
"grad_norm": 0.3560175097976504, |
|
"learning_rate": 9.882678983029819e-06, |
|
"loss": 2.4817, |
|
"step": 49 |
|
}, |
|
{ |
|
"epoch": 0.16366612111292964, |
|
"grad_norm": 0.36618662107001704, |
|
"learning_rate": 9.870059584711668e-06, |
|
"loss": 2.4665, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.16693944353518822, |
|
"grad_norm": 0.3601957684934551, |
|
"learning_rate": 9.856804615632904e-06, |
|
"loss": 2.4854, |
|
"step": 51 |
|
}, |
|
{ |
|
"epoch": 0.1702127659574468, |
|
"grad_norm": 0.3704954734907262, |
|
"learning_rate": 9.842915805643156e-06, |
|
"loss": 2.5347, |
|
"step": 52 |
|
}, |
|
{ |
|
"epoch": 0.1734860883797054, |
|
"grad_norm": 0.36959475458440244, |
|
"learning_rate": 9.82839496731194e-06, |
|
"loss": 2.4887, |
|
"step": 53 |
|
}, |
|
{ |
|
"epoch": 0.176759410801964, |
|
"grad_norm": 0.384552486477283, |
|
"learning_rate": 9.813243995692097e-06, |
|
"loss": 2.4753, |
|
"step": 54 |
|
}, |
|
{ |
|
"epoch": 0.18003273322422259, |
|
"grad_norm": 0.36249860452124605, |
|
"learning_rate": 9.797464868072489e-06, |
|
"loss": 2.469, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.18330605564648117, |
|
"grad_norm": 0.3648846542235022, |
|
"learning_rate": 9.781059643719937e-06, |
|
"loss": 2.4984, |
|
"step": 56 |
|
}, |
|
{ |
|
"epoch": 0.18657937806873978, |
|
"grad_norm": 0.37334256629644896, |
|
"learning_rate": 9.76403046361049e-06, |
|
"loss": 2.456, |
|
"step": 57 |
|
}, |
|
{ |
|
"epoch": 0.18985270049099837, |
|
"grad_norm": 0.36980026599186505, |
|
"learning_rate": 9.74637955015001e-06, |
|
"loss": 2.485, |
|
"step": 58 |
|
}, |
|
{ |
|
"epoch": 0.19312602291325695, |
|
"grad_norm": 0.40834498386413537, |
|
"learning_rate": 9.728109206884125e-06, |
|
"loss": 2.5011, |
|
"step": 59 |
|
}, |
|
{ |
|
"epoch": 0.19639934533551553, |
|
"grad_norm": 0.35772757844310776, |
|
"learning_rate": 9.709221818197626e-06, |
|
"loss": 2.4915, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.19967266775777415, |
|
"grad_norm": 0.5331894263162329, |
|
"learning_rate": 9.689719849003261e-06, |
|
"loss": 2.4779, |
|
"step": 61 |
|
}, |
|
{ |
|
"epoch": 0.20294599018003273, |
|
"grad_norm": 0.5108742054472333, |
|
"learning_rate": 9.66960584442008e-06, |
|
"loss": 2.5002, |
|
"step": 62 |
|
}, |
|
{ |
|
"epoch": 0.20621931260229132, |
|
"grad_norm": 0.36603499456334015, |
|
"learning_rate": 9.648882429441258e-06, |
|
"loss": 2.4605, |
|
"step": 63 |
|
}, |
|
{ |
|
"epoch": 0.20949263502454993, |
|
"grad_norm": 0.3858608557671342, |
|
"learning_rate": 9.627552308591534e-06, |
|
"loss": 2.484, |
|
"step": 64 |
|
}, |
|
{ |
|
"epoch": 0.2127659574468085, |
|
"grad_norm": 0.39024218298879865, |
|
"learning_rate": 9.60561826557425e-06, |
|
"loss": 2.5057, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.2160392798690671, |
|
"grad_norm": 0.38798042222376583, |
|
"learning_rate": 9.58308316290806e-06, |
|
"loss": 2.4573, |
|
"step": 66 |
|
}, |
|
{ |
|
"epoch": 0.2193126022913257, |
|
"grad_norm": 0.4495343758022552, |
|
"learning_rate": 9.559949941553351e-06, |
|
"loss": 2.4751, |
|
"step": 67 |
|
}, |
|
{ |
|
"epoch": 0.2225859247135843, |
|
"grad_norm": 0.5550558249858909, |
|
"learning_rate": 9.536221620528442e-06, |
|
"loss": 2.4706, |
|
"step": 68 |
|
}, |
|
{ |
|
"epoch": 0.22585924713584288, |
|
"grad_norm": 0.7753792348358921, |
|
"learning_rate": 9.511901296515578e-06, |
|
"loss": 2.4536, |
|
"step": 69 |
|
}, |
|
{ |
|
"epoch": 0.22913256955810146, |
|
"grad_norm": 0.4620597211236889, |
|
"learning_rate": 9.486992143456792e-06, |
|
"loss": 2.4966, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.23240589198036007, |
|
"grad_norm": 2.0294893276083674, |
|
"learning_rate": 9.461497412139697e-06, |
|
"loss": 2.4896, |
|
"step": 71 |
|
}, |
|
{ |
|
"epoch": 0.23567921440261866, |
|
"grad_norm": 3.9731994256120258, |
|
"learning_rate": 9.435420429773227e-06, |
|
"loss": 2.4775, |
|
"step": 72 |
|
}, |
|
{ |
|
"epoch": 0.23895253682487724, |
|
"grad_norm": 3.716661029391411, |
|
"learning_rate": 9.408764599553429e-06, |
|
"loss": 2.4774, |
|
"step": 73 |
|
}, |
|
{ |
|
"epoch": 0.24222585924713586, |
|
"grad_norm": 2.4511968994240396, |
|
"learning_rate": 9.381533400219319e-06, |
|
"loss": 2.4545, |
|
"step": 74 |
|
}, |
|
{ |
|
"epoch": 0.24549918166939444, |
|
"grad_norm": 0.9224443041076055, |
|
"learning_rate": 9.353730385598887e-06, |
|
"loss": 2.4927, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.24877250409165302, |
|
"grad_norm": 0.4657968594732789, |
|
"learning_rate": 9.325359184145307e-06, |
|
"loss": 2.4565, |
|
"step": 76 |
|
}, |
|
{ |
|
"epoch": 0.2520458265139116, |
|
"grad_norm": 0.7177608849321568, |
|
"learning_rate": 9.296423498463396e-06, |
|
"loss": 2.4825, |
|
"step": 77 |
|
}, |
|
{ |
|
"epoch": 0.2553191489361702, |
|
"grad_norm": 0.57974277665311, |
|
"learning_rate": 9.26692710482641e-06, |
|
"loss": 2.4789, |
|
"step": 78 |
|
}, |
|
{ |
|
"epoch": 0.25859247135842883, |
|
"grad_norm": 0.5116606332293397, |
|
"learning_rate": 9.236873852683213e-06, |
|
"loss": 2.4654, |
|
"step": 79 |
|
}, |
|
{ |
|
"epoch": 0.2618657937806874, |
|
"grad_norm": 0.4372804881120678, |
|
"learning_rate": 9.206267664155906e-06, |
|
"loss": 2.4649, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.265139116202946, |
|
"grad_norm": 0.4185499524749441, |
|
"learning_rate": 9.175112533527963e-06, |
|
"loss": 2.5026, |
|
"step": 81 |
|
}, |
|
{ |
|
"epoch": 0.2684124386252046, |
|
"grad_norm": 0.5272610155007883, |
|
"learning_rate": 9.143412526722958e-06, |
|
"loss": 2.4975, |
|
"step": 82 |
|
}, |
|
{ |
|
"epoch": 0.27168576104746317, |
|
"grad_norm": 0.5340935332042765, |
|
"learning_rate": 9.111171780773938e-06, |
|
"loss": 2.4816, |
|
"step": 83 |
|
}, |
|
{ |
|
"epoch": 0.27495908346972175, |
|
"grad_norm": 0.3904804241846333, |
|
"learning_rate": 9.078394503283509e-06, |
|
"loss": 2.4675, |
|
"step": 84 |
|
}, |
|
{ |
|
"epoch": 0.27823240589198034, |
|
"grad_norm": 0.3328349798561506, |
|
"learning_rate": 9.045084971874738e-06, |
|
"loss": 2.4633, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.281505728314239, |
|
"grad_norm": 0.37021724634968695, |
|
"learning_rate": 9.011247533632876e-06, |
|
"loss": 2.4508, |
|
"step": 86 |
|
}, |
|
{ |
|
"epoch": 0.28477905073649756, |
|
"grad_norm": 0.3729529690097639, |
|
"learning_rate": 8.976886604538055e-06, |
|
"loss": 2.489, |
|
"step": 87 |
|
}, |
|
{ |
|
"epoch": 0.28805237315875615, |
|
"grad_norm": 0.33761554067347327, |
|
"learning_rate": 8.942006668888972e-06, |
|
"loss": 2.4734, |
|
"step": 88 |
|
}, |
|
{ |
|
"epoch": 0.29132569558101473, |
|
"grad_norm": 0.3584893895881646, |
|
"learning_rate": 8.906612278717657e-06, |
|
"loss": 2.4702, |
|
"step": 89 |
|
}, |
|
{ |
|
"epoch": 0.2945990180032733, |
|
"grad_norm": 0.3546249378424476, |
|
"learning_rate": 8.870708053195414e-06, |
|
"loss": 2.4891, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.2978723404255319, |
|
"grad_norm": 0.3472403078226491, |
|
"learning_rate": 8.834298678029988e-06, |
|
"loss": 2.4671, |
|
"step": 91 |
|
}, |
|
{ |
|
"epoch": 0.3011456628477905, |
|
"grad_norm": 0.3453678794183207, |
|
"learning_rate": 8.797388904854064e-06, |
|
"loss": 2.4971, |
|
"step": 92 |
|
}, |
|
{ |
|
"epoch": 0.3044189852700491, |
|
"grad_norm": 0.35760827827717245, |
|
"learning_rate": 8.759983550605132e-06, |
|
"loss": 2.478, |
|
"step": 93 |
|
}, |
|
{ |
|
"epoch": 0.3076923076923077, |
|
"grad_norm": 0.32637961131906473, |
|
"learning_rate": 8.72208749689686e-06, |
|
"loss": 2.4519, |
|
"step": 94 |
|
}, |
|
{ |
|
"epoch": 0.3109656301145663, |
|
"grad_norm": 0.3451716663724565, |
|
"learning_rate": 8.683705689382025e-06, |
|
"loss": 2.4597, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.3142389525368249, |
|
"grad_norm": 0.3397179170708975, |
|
"learning_rate": 8.644843137107058e-06, |
|
"loss": 2.4738, |
|
"step": 96 |
|
}, |
|
{ |
|
"epoch": 0.31751227495908346, |
|
"grad_norm": 0.3507051892225581, |
|
"learning_rate": 8.605504911858347e-06, |
|
"loss": 2.5, |
|
"step": 97 |
|
}, |
|
{ |
|
"epoch": 0.32078559738134205, |
|
"grad_norm": 0.3400362404421244, |
|
"learning_rate": 8.565696147500338e-06, |
|
"loss": 2.4768, |
|
"step": 98 |
|
}, |
|
{ |
|
"epoch": 0.32405891980360063, |
|
"grad_norm": 0.35484798538066187, |
|
"learning_rate": 8.525422039305529e-06, |
|
"loss": 2.4858, |
|
"step": 99 |
|
}, |
|
{ |
|
"epoch": 0.32733224222585927, |
|
"grad_norm": 0.3658788522731373, |
|
"learning_rate": 8.48468784327647e-06, |
|
"loss": 2.4738, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.33060556464811786, |
|
"grad_norm": 0.3398517081347872, |
|
"learning_rate": 8.44349887545981e-06, |
|
"loss": 2.4877, |
|
"step": 101 |
|
}, |
|
{ |
|
"epoch": 0.33387888707037644, |
|
"grad_norm": 0.38013549611094194, |
|
"learning_rate": 8.401860511252535e-06, |
|
"loss": 2.4638, |
|
"step": 102 |
|
}, |
|
{ |
|
"epoch": 0.337152209492635, |
|
"grad_norm": 0.3484658452958359, |
|
"learning_rate": 8.35977818470044e-06, |
|
"loss": 2.4783, |
|
"step": 103 |
|
}, |
|
{ |
|
"epoch": 0.3404255319148936, |
|
"grad_norm": 0.3554237638577351, |
|
"learning_rate": 8.31725738778896e-06, |
|
"loss": 2.474, |
|
"step": 104 |
|
}, |
|
{ |
|
"epoch": 0.3436988543371522, |
|
"grad_norm": 0.3448509168732586, |
|
"learning_rate": 8.274303669726427e-06, |
|
"loss": 2.4568, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 0.3469721767594108, |
|
"grad_norm": 0.3491512405342342, |
|
"learning_rate": 8.230922636219872e-06, |
|
"loss": 2.479, |
|
"step": 106 |
|
}, |
|
{ |
|
"epoch": 0.3502454991816694, |
|
"grad_norm": 0.35293167144622156, |
|
"learning_rate": 8.18711994874345e-06, |
|
"loss": 2.4889, |
|
"step": 107 |
|
}, |
|
{ |
|
"epoch": 0.353518821603928, |
|
"grad_norm": 0.34554222359891335, |
|
"learning_rate": 8.142901323799578e-06, |
|
"loss": 2.4948, |
|
"step": 108 |
|
}, |
|
{ |
|
"epoch": 0.3567921440261866, |
|
"grad_norm": 0.3433706276706439, |
|
"learning_rate": 8.098272532172906e-06, |
|
"loss": 2.4896, |
|
"step": 109 |
|
}, |
|
{ |
|
"epoch": 0.36006546644844517, |
|
"grad_norm": 0.32061609152441495, |
|
"learning_rate": 8.053239398177191e-06, |
|
"loss": 2.4374, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.36333878887070375, |
|
"grad_norm": 0.3443744364995488, |
|
"learning_rate": 8.007807798895195e-06, |
|
"loss": 2.4754, |
|
"step": 111 |
|
}, |
|
{ |
|
"epoch": 0.36661211129296234, |
|
"grad_norm": 0.32691985158573217, |
|
"learning_rate": 7.961983663411684e-06, |
|
"loss": 2.4777, |
|
"step": 112 |
|
}, |
|
{ |
|
"epoch": 0.3698854337152209, |
|
"grad_norm": 0.33245765943085837, |
|
"learning_rate": 7.91577297203966e-06, |
|
"loss": 2.4723, |
|
"step": 113 |
|
}, |
|
{ |
|
"epoch": 0.37315875613747956, |
|
"grad_norm": 0.33707956489119834, |
|
"learning_rate": 7.869181755539888e-06, |
|
"loss": 2.4709, |
|
"step": 114 |
|
}, |
|
{ |
|
"epoch": 0.37643207855973815, |
|
"grad_norm": 0.3334493326179338, |
|
"learning_rate": 7.822216094333847e-06, |
|
"loss": 2.5068, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 0.37970540098199673, |
|
"grad_norm": 0.3244946372709713, |
|
"learning_rate": 7.774882117710203e-06, |
|
"loss": 2.4797, |
|
"step": 116 |
|
}, |
|
{ |
|
"epoch": 0.3829787234042553, |
|
"grad_norm": 0.3277650850109358, |
|
"learning_rate": 7.727186003024902e-06, |
|
"loss": 2.4618, |
|
"step": 117 |
|
}, |
|
{ |
|
"epoch": 0.3862520458265139, |
|
"grad_norm": 0.34009316740947815, |
|
"learning_rate": 7.679133974894984e-06, |
|
"loss": 2.4581, |
|
"step": 118 |
|
}, |
|
{ |
|
"epoch": 0.3895253682487725, |
|
"grad_norm": 0.3381461847729053, |
|
"learning_rate": 7.630732304386244e-06, |
|
"loss": 2.4817, |
|
"step": 119 |
|
}, |
|
{ |
|
"epoch": 0.39279869067103107, |
|
"grad_norm": 0.3385097066946103, |
|
"learning_rate": 7.5819873081948105e-06, |
|
"loss": 2.4847, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.3960720130932897, |
|
"grad_norm": 0.3372871065877283, |
|
"learning_rate": 7.532905347822792e-06, |
|
"loss": 2.4577, |
|
"step": 121 |
|
}, |
|
{ |
|
"epoch": 0.3993453355155483, |
|
"grad_norm": 0.32559796645917616, |
|
"learning_rate": 7.4834928287480566e-06, |
|
"loss": 2.4923, |
|
"step": 122 |
|
}, |
|
{ |
|
"epoch": 0.4026186579378069, |
|
"grad_norm": 0.3251819544833203, |
|
"learning_rate": 7.433756199588282e-06, |
|
"loss": 2.4746, |
|
"step": 123 |
|
}, |
|
{ |
|
"epoch": 0.40589198036006546, |
|
"grad_norm": 0.33165207760018856, |
|
"learning_rate": 7.383701951259375e-06, |
|
"loss": 2.497, |
|
"step": 124 |
|
}, |
|
{ |
|
"epoch": 0.40916530278232405, |
|
"grad_norm": 0.33880444150123695, |
|
"learning_rate": 7.333336616128369e-06, |
|
"loss": 2.4362, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 0.41243862520458263, |
|
"grad_norm": 0.32428200516815325, |
|
"learning_rate": 7.282666767160913e-06, |
|
"loss": 2.4439, |
|
"step": 126 |
|
}, |
|
{ |
|
"epoch": 0.4157119476268412, |
|
"grad_norm": 0.3273982952564575, |
|
"learning_rate": 7.23169901706346e-06, |
|
"loss": 2.4801, |
|
"step": 127 |
|
}, |
|
{ |
|
"epoch": 0.41898527004909986, |
|
"grad_norm": 0.3261565580453942, |
|
"learning_rate": 7.180440017420277e-06, |
|
"loss": 2.4903, |
|
"step": 128 |
|
}, |
|
{ |
|
"epoch": 0.42225859247135844, |
|
"grad_norm": 0.35528485781883334, |
|
"learning_rate": 7.128896457825364e-06, |
|
"loss": 2.4653, |
|
"step": 129 |
|
}, |
|
{ |
|
"epoch": 0.425531914893617, |
|
"grad_norm": 0.3185186205453131, |
|
"learning_rate": 7.0770750650094335e-06, |
|
"loss": 2.4605, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.4288052373158756, |
|
"grad_norm": 0.3431153323420711, |
|
"learning_rate": 7.024982601962027e-06, |
|
"loss": 2.4627, |
|
"step": 131 |
|
}, |
|
{ |
|
"epoch": 0.4320785597381342, |
|
"grad_norm": 0.31409563060102685, |
|
"learning_rate": 6.972625867048914e-06, |
|
"loss": 2.457, |
|
"step": 132 |
|
}, |
|
{ |
|
"epoch": 0.4353518821603928, |
|
"grad_norm": 0.3352629031883897, |
|
"learning_rate": 6.9200116931248575e-06, |
|
"loss": 2.4805, |
|
"step": 133 |
|
}, |
|
{ |
|
"epoch": 0.4386252045826514, |
|
"grad_norm": 0.3295764773687296, |
|
"learning_rate": 6.8671469466418914e-06, |
|
"loss": 2.465, |
|
"step": 134 |
|
}, |
|
{ |
|
"epoch": 0.44189852700491, |
|
"grad_norm": 0.32638616945204396, |
|
"learning_rate": 6.814038526753205e-06, |
|
"loss": 2.4317, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 0.4451718494271686, |
|
"grad_norm": 0.358055738131873, |
|
"learning_rate": 6.760693364412776e-06, |
|
"loss": 2.4459, |
|
"step": 136 |
|
}, |
|
{ |
|
"epoch": 0.44844517184942717, |
|
"grad_norm": 0.32782232063682554, |
|
"learning_rate": 6.707118421470822e-06, |
|
"loss": 2.4652, |
|
"step": 137 |
|
}, |
|
{ |
|
"epoch": 0.45171849427168576, |
|
"grad_norm": 0.3213728199970128, |
|
"learning_rate": 6.653320689765257e-06, |
|
"loss": 2.4812, |
|
"step": 138 |
|
}, |
|
{ |
|
"epoch": 0.45499181669394434, |
|
"grad_norm": 0.3145652401211947, |
|
"learning_rate": 6.599307190209206e-06, |
|
"loss": 2.4622, |
|
"step": 139 |
|
}, |
|
{ |
|
"epoch": 0.4582651391162029, |
|
"grad_norm": 0.33107965983633225, |
|
"learning_rate": 6.545084971874738e-06, |
|
"loss": 2.4856, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.46153846153846156, |
|
"grad_norm": 0.33391686920613367, |
|
"learning_rate": 6.490661111072923e-06, |
|
"loss": 2.4519, |
|
"step": 141 |
|
}, |
|
{ |
|
"epoch": 0.46481178396072015, |
|
"grad_norm": 0.31915606377667516, |
|
"learning_rate": 6.4360427104303326e-06, |
|
"loss": 2.458, |
|
"step": 142 |
|
}, |
|
{ |
|
"epoch": 0.46808510638297873, |
|
"grad_norm": 0.32619864635648854, |
|
"learning_rate": 6.381236897962102e-06, |
|
"loss": 2.48, |
|
"step": 143 |
|
}, |
|
{ |
|
"epoch": 0.4713584288052373, |
|
"grad_norm": 0.3344755693427373, |
|
"learning_rate": 6.326250826141689e-06, |
|
"loss": 2.4728, |
|
"step": 144 |
|
}, |
|
{ |
|
"epoch": 0.4746317512274959, |
|
"grad_norm": 0.3125523770454477, |
|
"learning_rate": 6.271091670967437e-06, |
|
"loss": 2.4587, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 0.4779050736497545, |
|
"grad_norm": 0.32456956099182205, |
|
"learning_rate": 6.215766631026049e-06, |
|
"loss": 2.4694, |
|
"step": 146 |
|
}, |
|
{ |
|
"epoch": 0.48117839607201307, |
|
"grad_norm": 0.3183205037717091, |
|
"learning_rate": 6.1602829265531585e-06, |
|
"loss": 2.4752, |
|
"step": 147 |
|
}, |
|
{ |
|
"epoch": 0.4844517184942717, |
|
"grad_norm": 0.31706883694319504, |
|
"learning_rate": 6.1046477984910215e-06, |
|
"loss": 2.4715, |
|
"step": 148 |
|
}, |
|
{ |
|
"epoch": 0.4877250409165303, |
|
"grad_norm": 0.32677657711179986, |
|
"learning_rate": 6.048868507543547e-06, |
|
"loss": 2.4684, |
|
"step": 149 |
|
}, |
|
{ |
|
"epoch": 0.4909983633387889, |
|
"grad_norm": 0.32457051719454905, |
|
"learning_rate": 5.9929523332287275e-06, |
|
"loss": 2.471, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.49427168576104746, |
|
"grad_norm": 0.329167055323462, |
|
"learning_rate": 5.936906572928625e-06, |
|
"loss": 2.4696, |
|
"step": 151 |
|
}, |
|
{ |
|
"epoch": 0.49754500818330605, |
|
"grad_norm": 0.3233867081897887, |
|
"learning_rate": 5.880738540937008e-06, |
|
"loss": 2.4758, |
|
"step": 152 |
|
}, |
|
{ |
|
"epoch": 0.5008183306055647, |
|
"grad_norm": 0.310943717613368, |
|
"learning_rate": 5.824455567504817e-06, |
|
"loss": 2.4813, |
|
"step": 153 |
|
}, |
|
{ |
|
"epoch": 0.5040916530278232, |
|
"grad_norm": 0.35767604251132745, |
|
"learning_rate": 5.7680649978834976e-06, |
|
"loss": 2.4628, |
|
"step": 154 |
|
}, |
|
{ |
|
"epoch": 0.5073649754500819, |
|
"grad_norm": 0.3066334220540355, |
|
"learning_rate": 5.711574191366427e-06, |
|
"loss": 2.4645, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 0.5106382978723404, |
|
"grad_norm": 0.3299515321649155, |
|
"learning_rate": 5.654990520328465e-06, |
|
"loss": 2.4445, |
|
"step": 156 |
|
}, |
|
{ |
|
"epoch": 0.513911620294599, |
|
"grad_norm": 0.31272357276253326, |
|
"learning_rate": 5.59832136926383e-06, |
|
"loss": 2.4594, |
|
"step": 157 |
|
}, |
|
{ |
|
"epoch": 0.5171849427168577, |
|
"grad_norm": 0.3094045826418548, |
|
"learning_rate": 5.541574133822374e-06, |
|
"loss": 2.4481, |
|
"step": 158 |
|
}, |
|
{ |
|
"epoch": 0.5204582651391162, |
|
"grad_norm": 0.33143709507448227, |
|
"learning_rate": 5.484756219844408e-06, |
|
"loss": 2.4621, |
|
"step": 159 |
|
}, |
|
{ |
|
"epoch": 0.5237315875613748, |
|
"grad_norm": 0.30398134753496064, |
|
"learning_rate": 5.4278750423942e-06, |
|
"loss": 2.4715, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.5270049099836334, |
|
"grad_norm": 0.31511994044804714, |
|
"learning_rate": 5.370938024792262e-06, |
|
"loss": 2.4609, |
|
"step": 161 |
|
}, |
|
{ |
|
"epoch": 0.530278232405892, |
|
"grad_norm": 0.3204336918739602, |
|
"learning_rate": 5.3139525976465675e-06, |
|
"loss": 2.4501, |
|
"step": 162 |
|
}, |
|
{ |
|
"epoch": 0.5335515548281505, |
|
"grad_norm": 0.3243453056297712, |
|
"learning_rate": 5.2569261978828155e-06, |
|
"loss": 2.4634, |
|
"step": 163 |
|
}, |
|
{ |
|
"epoch": 0.5368248772504092, |
|
"grad_norm": 0.3131904676215095, |
|
"learning_rate": 5.199866267773868e-06, |
|
"loss": 2.4684, |
|
"step": 164 |
|
}, |
|
{ |
|
"epoch": 0.5400981996726678, |
|
"grad_norm": 0.3338712281774405, |
|
"learning_rate": 5.142780253968481e-06, |
|
"loss": 2.4687, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 0.5433715220949263, |
|
"grad_norm": 0.3108967805240922, |
|
"learning_rate": 5.085675606519496e-06, |
|
"loss": 2.4443, |
|
"step": 166 |
|
}, |
|
{ |
|
"epoch": 0.546644844517185, |
|
"grad_norm": 0.31028829982787426, |
|
"learning_rate": 5.028559777911543e-06, |
|
"loss": 2.471, |
|
"step": 167 |
|
}, |
|
{ |
|
"epoch": 0.5499181669394435, |
|
"grad_norm": 0.31679487774858733, |
|
"learning_rate": 4.971440222088459e-06, |
|
"loss": 2.4654, |
|
"step": 168 |
|
}, |
|
{ |
|
"epoch": 0.5531914893617021, |
|
"grad_norm": 0.31296921041810605, |
|
"learning_rate": 4.914324393480504e-06, |
|
"loss": 2.4442, |
|
"step": 169 |
|
}, |
|
{ |
|
"epoch": 0.5564648117839607, |
|
"grad_norm": 0.3165078522607312, |
|
"learning_rate": 4.85721974603152e-06, |
|
"loss": 2.4494, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.5597381342062193, |
|
"grad_norm": 0.3540062454779396, |
|
"learning_rate": 4.800133732226135e-06, |
|
"loss": 2.4664, |
|
"step": 171 |
|
}, |
|
{ |
|
"epoch": 0.563011456628478, |
|
"grad_norm": 0.32009750544034943, |
|
"learning_rate": 4.743073802117185e-06, |
|
"loss": 2.4809, |
|
"step": 172 |
|
}, |
|
{ |
|
"epoch": 0.5662847790507365, |
|
"grad_norm": 0.2983415714217039, |
|
"learning_rate": 4.686047402353433e-06, |
|
"loss": 2.4564, |
|
"step": 173 |
|
}, |
|
{ |
|
"epoch": 0.5695581014729951, |
|
"grad_norm": 0.2999034797032862, |
|
"learning_rate": 4.62906197520774e-06, |
|
"loss": 2.467, |
|
"step": 174 |
|
}, |
|
{ |
|
"epoch": 0.5728314238952537, |
|
"grad_norm": 0.3102585042911049, |
|
"learning_rate": 4.572124957605803e-06, |
|
"loss": 2.46, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 0.5761047463175123, |
|
"grad_norm": 0.31057870076393, |
|
"learning_rate": 4.515243780155594e-06, |
|
"loss": 2.4704, |
|
"step": 176 |
|
}, |
|
{ |
|
"epoch": 0.5793780687397708, |
|
"grad_norm": 0.3046118860204264, |
|
"learning_rate": 4.458425866177628e-06, |
|
"loss": 2.467, |
|
"step": 177 |
|
}, |
|
{ |
|
"epoch": 0.5826513911620295, |
|
"grad_norm": 0.30073951913953495, |
|
"learning_rate": 4.401678630736172e-06, |
|
"loss": 2.4743, |
|
"step": 178 |
|
}, |
|
{ |
|
"epoch": 0.5859247135842881, |
|
"grad_norm": 0.30406830972525584, |
|
"learning_rate": 4.3450094796715354e-06, |
|
"loss": 2.4798, |
|
"step": 179 |
|
}, |
|
{ |
|
"epoch": 0.5891980360065466, |
|
"grad_norm": 0.3054725070167328, |
|
"learning_rate": 4.2884258086335755e-06, |
|
"loss": 2.468, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.5924713584288053, |
|
"grad_norm": 0.31961569501005616, |
|
"learning_rate": 4.231935002116504e-06, |
|
"loss": 2.4853, |
|
"step": 181 |
|
}, |
|
{ |
|
"epoch": 0.5957446808510638, |
|
"grad_norm": 0.29332706934686226, |
|
"learning_rate": 4.175544432495184e-06, |
|
"loss": 2.4684, |
|
"step": 182 |
|
}, |
|
{ |
|
"epoch": 0.5990180032733224, |
|
"grad_norm": 0.3136548340197598, |
|
"learning_rate": 4.119261459062992e-06, |
|
"loss": 2.4617, |
|
"step": 183 |
|
}, |
|
{ |
|
"epoch": 0.602291325695581, |
|
"grad_norm": 0.29768754171106776, |
|
"learning_rate": 4.063093427071376e-06, |
|
"loss": 2.4506, |
|
"step": 184 |
|
}, |
|
{ |
|
"epoch": 0.6055646481178396, |
|
"grad_norm": 0.3102339131602259, |
|
"learning_rate": 4.007047666771274e-06, |
|
"loss": 2.4692, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 0.6088379705400983, |
|
"grad_norm": 0.31512899490271845, |
|
"learning_rate": 3.951131492456455e-06, |
|
"loss": 2.4399, |
|
"step": 186 |
|
}, |
|
{ |
|
"epoch": 0.6121112929623568, |
|
"grad_norm": 0.29668620197615053, |
|
"learning_rate": 3.895352201508981e-06, |
|
"loss": 2.4658, |
|
"step": 187 |
|
}, |
|
{ |
|
"epoch": 0.6153846153846154, |
|
"grad_norm": 0.3129729278174446, |
|
"learning_rate": 3.839717073446842e-06, |
|
"loss": 2.4296, |
|
"step": 188 |
|
}, |
|
{ |
|
"epoch": 0.618657937806874, |
|
"grad_norm": 0.3087654037315499, |
|
"learning_rate": 3.7842333689739524e-06, |
|
"loss": 2.4512, |
|
"step": 189 |
|
}, |
|
{ |
|
"epoch": 0.6219312602291326, |
|
"grad_norm": 0.29989373586590584, |
|
"learning_rate": 3.7289083290325668e-06, |
|
"loss": 2.4543, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.6252045826513911, |
|
"grad_norm": 0.30200062774615277, |
|
"learning_rate": 3.673749173858312e-06, |
|
"loss": 2.4637, |
|
"step": 191 |
|
}, |
|
{ |
|
"epoch": 0.6284779050736498, |
|
"grad_norm": 0.3059048859940154, |
|
"learning_rate": 3.618763102037899e-06, |
|
"loss": 2.4714, |
|
"step": 192 |
|
}, |
|
{ |
|
"epoch": 0.6317512274959084, |
|
"grad_norm": 0.2995831991905797, |
|
"learning_rate": 3.563957289569669e-06, |
|
"loss": 2.4581, |
|
"step": 193 |
|
}, |
|
{ |
|
"epoch": 0.6350245499181669, |
|
"grad_norm": 0.3058506564330453, |
|
"learning_rate": 3.509338888927079e-06, |
|
"loss": 2.4556, |
|
"step": 194 |
|
}, |
|
{ |
|
"epoch": 0.6382978723404256, |
|
"grad_norm": 0.2932520015994215, |
|
"learning_rate": 3.4549150281252635e-06, |
|
"loss": 2.4536, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 0.6415711947626841, |
|
"grad_norm": 0.29644091797619826, |
|
"learning_rate": 3.400692809790796e-06, |
|
"loss": 2.4524, |
|
"step": 196 |
|
}, |
|
{ |
|
"epoch": 0.6448445171849427, |
|
"grad_norm": 0.2973231753014199, |
|
"learning_rate": 3.346679310234744e-06, |
|
"loss": 2.439, |
|
"step": 197 |
|
}, |
|
{ |
|
"epoch": 0.6481178396072013, |
|
"grad_norm": 0.2970530949566317, |
|
"learning_rate": 3.292881578529179e-06, |
|
"loss": 2.441, |
|
"step": 198 |
|
}, |
|
{ |
|
"epoch": 0.6513911620294599, |
|
"grad_norm": 0.29457945074596364, |
|
"learning_rate": 3.2393066355872264e-06, |
|
"loss": 2.4393, |
|
"step": 199 |
|
}, |
|
{ |
|
"epoch": 0.6546644844517185, |
|
"grad_norm": 0.2959752353295032, |
|
"learning_rate": 3.1859614732467957e-06, |
|
"loss": 2.4413, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.6579378068739771, |
|
"grad_norm": 0.2894024794008193, |
|
"learning_rate": 3.1328530533581102e-06, |
|
"loss": 2.4486, |
|
"step": 201 |
|
}, |
|
{ |
|
"epoch": 0.6612111292962357, |
|
"grad_norm": 0.30092182634576625, |
|
"learning_rate": 3.0799883068751433e-06, |
|
"loss": 2.4523, |
|
"step": 202 |
|
}, |
|
{ |
|
"epoch": 0.6644844517184942, |
|
"grad_norm": 0.2957309468597844, |
|
"learning_rate": 3.0273741329510852e-06, |
|
"loss": 2.4335, |
|
"step": 203 |
|
}, |
|
{ |
|
"epoch": 0.6677577741407529, |
|
"grad_norm": 0.28459255910232356, |
|
"learning_rate": 2.975017398037974e-06, |
|
"loss": 2.4875, |
|
"step": 204 |
|
}, |
|
{ |
|
"epoch": 0.6710310965630114, |
|
"grad_norm": 0.29271515123042124, |
|
"learning_rate": 2.9229249349905686e-06, |
|
"loss": 2.475, |
|
"step": 205 |
|
}, |
|
{ |
|
"epoch": 0.67430441898527, |
|
"grad_norm": 0.30277820836046737, |
|
"learning_rate": 2.871103542174637e-06, |
|
"loss": 2.4669, |
|
"step": 206 |
|
}, |
|
{ |
|
"epoch": 0.6775777414075287, |
|
"grad_norm": 0.2859827817423741, |
|
"learning_rate": 2.8195599825797233e-06, |
|
"loss": 2.4514, |
|
"step": 207 |
|
}, |
|
{ |
|
"epoch": 0.6808510638297872, |
|
"grad_norm": 0.2968379809937716, |
|
"learning_rate": 2.7683009829365417e-06, |
|
"loss": 2.4607, |
|
"step": 208 |
|
}, |
|
{ |
|
"epoch": 0.6841243862520459, |
|
"grad_norm": 0.29656383723002844, |
|
"learning_rate": 2.717333232839088e-06, |
|
"loss": 2.4905, |
|
"step": 209 |
|
}, |
|
{ |
|
"epoch": 0.6873977086743044, |
|
"grad_norm": 0.28883812040540374, |
|
"learning_rate": 2.6666633838716317e-06, |
|
"loss": 2.4543, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.690671031096563, |
|
"grad_norm": 0.29531117152054726, |
|
"learning_rate": 2.616298048740626e-06, |
|
"loss": 2.4551, |
|
"step": 211 |
|
}, |
|
{ |
|
"epoch": 0.6939443535188216, |
|
"grad_norm": 0.2862187454441366, |
|
"learning_rate": 2.566243800411719e-06, |
|
"loss": 2.452, |
|
"step": 212 |
|
}, |
|
{ |
|
"epoch": 0.6972176759410802, |
|
"grad_norm": 0.2911286270146174, |
|
"learning_rate": 2.5165071712519447e-06, |
|
"loss": 2.4624, |
|
"step": 213 |
|
}, |
|
{ |
|
"epoch": 0.7004909983633388, |
|
"grad_norm": 0.2869776645891394, |
|
"learning_rate": 2.467094652177209e-06, |
|
"loss": 2.4607, |
|
"step": 214 |
|
}, |
|
{ |
|
"epoch": 0.7037643207855974, |
|
"grad_norm": 0.2811233703409396, |
|
"learning_rate": 2.418012691805191e-06, |
|
"loss": 2.4739, |
|
"step": 215 |
|
}, |
|
{ |
|
"epoch": 0.707037643207856, |
|
"grad_norm": 0.28005066226487846, |
|
"learning_rate": 2.3692676956137585e-06, |
|
"loss": 2.457, |
|
"step": 216 |
|
}, |
|
{ |
|
"epoch": 0.7103109656301145, |
|
"grad_norm": 0.29229180839794044, |
|
"learning_rate": 2.320866025105016e-06, |
|
"loss": 2.4505, |
|
"step": 217 |
|
}, |
|
{ |
|
"epoch": 0.7135842880523732, |
|
"grad_norm": 0.28905509654195427, |
|
"learning_rate": 2.2728139969751005e-06, |
|
"loss": 2.4497, |
|
"step": 218 |
|
}, |
|
{ |
|
"epoch": 0.7168576104746317, |
|
"grad_norm": 0.29091875602679856, |
|
"learning_rate": 2.225117882289799e-06, |
|
"loss": 2.4386, |
|
"step": 219 |
|
}, |
|
{ |
|
"epoch": 0.7201309328968903, |
|
"grad_norm": 0.28132598389097907, |
|
"learning_rate": 2.1777839056661555e-06, |
|
"loss": 2.4688, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.723404255319149, |
|
"grad_norm": 0.27552653164791197, |
|
"learning_rate": 2.1308182444601126e-06, |
|
"loss": 2.4515, |
|
"step": 221 |
|
}, |
|
{ |
|
"epoch": 0.7266775777414075, |
|
"grad_norm": 0.285817433950407, |
|
"learning_rate": 2.0842270279603403e-06, |
|
"loss": 2.4321, |
|
"step": 222 |
|
}, |
|
{ |
|
"epoch": 0.7299509001636661, |
|
"grad_norm": 0.2774842353710076, |
|
"learning_rate": 2.0380163365883188e-06, |
|
"loss": 2.4686, |
|
"step": 223 |
|
}, |
|
{ |
|
"epoch": 0.7332242225859247, |
|
"grad_norm": 0.2782616476099915, |
|
"learning_rate": 1.9921922011048065e-06, |
|
"loss": 2.4438, |
|
"step": 224 |
|
}, |
|
{ |
|
"epoch": 0.7364975450081833, |
|
"grad_norm": 0.27697006620218906, |
|
"learning_rate": 1.946760601822809e-06, |
|
"loss": 2.4461, |
|
"step": 225 |
|
}, |
|
{ |
|
"epoch": 0.7397708674304418, |
|
"grad_norm": 0.2794520367771161, |
|
"learning_rate": 1.9017274678270948e-06, |
|
"loss": 2.457, |
|
"step": 226 |
|
}, |
|
{ |
|
"epoch": 0.7430441898527005, |
|
"grad_norm": 0.293225655343382, |
|
"learning_rate": 1.8570986762004246e-06, |
|
"loss": 2.4339, |
|
"step": 227 |
|
}, |
|
{ |
|
"epoch": 0.7463175122749591, |
|
"grad_norm": 0.28410174489532863, |
|
"learning_rate": 1.8128800512565514e-06, |
|
"loss": 2.4398, |
|
"step": 228 |
|
}, |
|
{ |
|
"epoch": 0.7495908346972177, |
|
"grad_norm": 0.28113616787582135, |
|
"learning_rate": 1.7690773637801295e-06, |
|
"loss": 2.463, |
|
"step": 229 |
|
}, |
|
{ |
|
"epoch": 0.7528641571194763, |
|
"grad_norm": 0.27128743447878584, |
|
"learning_rate": 1.7256963302735752e-06, |
|
"loss": 2.4638, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 0.7561374795417348, |
|
"grad_norm": 0.28051379395375337, |
|
"learning_rate": 1.6827426122110412e-06, |
|
"loss": 2.4803, |
|
"step": 231 |
|
}, |
|
{ |
|
"epoch": 0.7594108019639935, |
|
"grad_norm": 0.2774673111470984, |
|
"learning_rate": 1.6402218152995609e-06, |
|
"loss": 2.4255, |
|
"step": 232 |
|
}, |
|
{ |
|
"epoch": 0.762684124386252, |
|
"grad_norm": 0.26906487455817846, |
|
"learning_rate": 1.598139488747467e-06, |
|
"loss": 2.4478, |
|
"step": 233 |
|
}, |
|
{ |
|
"epoch": 0.7659574468085106, |
|
"grad_norm": 0.2764519019580199, |
|
"learning_rate": 1.5565011245401928e-06, |
|
"loss": 2.4674, |
|
"step": 234 |
|
}, |
|
{ |
|
"epoch": 0.7692307692307693, |
|
"grad_norm": 0.28254452300968197, |
|
"learning_rate": 1.5153121567235334e-06, |
|
"loss": 2.4648, |
|
"step": 235 |
|
}, |
|
{ |
|
"epoch": 0.7725040916530278, |
|
"grad_norm": 0.28044552819552365, |
|
"learning_rate": 1.4745779606944716e-06, |
|
"loss": 2.4634, |
|
"step": 236 |
|
}, |
|
{ |
|
"epoch": 0.7757774140752864, |
|
"grad_norm": 0.2883807662161686, |
|
"learning_rate": 1.4343038524996645e-06, |
|
"loss": 2.4409, |
|
"step": 237 |
|
}, |
|
{ |
|
"epoch": 0.779050736497545, |
|
"grad_norm": 0.2712743023254104, |
|
"learning_rate": 1.3944950881416541e-06, |
|
"loss": 2.4749, |
|
"step": 238 |
|
}, |
|
{ |
|
"epoch": 0.7823240589198036, |
|
"grad_norm": 0.2844283835642021, |
|
"learning_rate": 1.3551568628929434e-06, |
|
"loss": 2.4682, |
|
"step": 239 |
|
}, |
|
{ |
|
"epoch": 0.7855973813420621, |
|
"grad_norm": 0.2792759057607264, |
|
"learning_rate": 1.3162943106179748e-06, |
|
"loss": 2.4414, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.7888707037643208, |
|
"grad_norm": 0.2733989225438821, |
|
"learning_rate": 1.2779125031031413e-06, |
|
"loss": 2.4257, |
|
"step": 241 |
|
}, |
|
{ |
|
"epoch": 0.7921440261865794, |
|
"grad_norm": 0.271099884675861, |
|
"learning_rate": 1.2400164493948713e-06, |
|
"loss": 2.4569, |
|
"step": 242 |
|
}, |
|
{ |
|
"epoch": 0.795417348608838, |
|
"grad_norm": 0.2745676234993269, |
|
"learning_rate": 1.2026110951459364e-06, |
|
"loss": 2.4489, |
|
"step": 243 |
|
}, |
|
{ |
|
"epoch": 0.7986906710310966, |
|
"grad_norm": 0.2767138934905047, |
|
"learning_rate": 1.1657013219700108e-06, |
|
"loss": 2.4562, |
|
"step": 244 |
|
}, |
|
{ |
|
"epoch": 0.8019639934533551, |
|
"grad_norm": 0.2742378409963404, |
|
"learning_rate": 1.1292919468045876e-06, |
|
"loss": 2.4333, |
|
"step": 245 |
|
}, |
|
{ |
|
"epoch": 0.8052373158756138, |
|
"grad_norm": 0.2585166236299042, |
|
"learning_rate": 1.0933877212823462e-06, |
|
"loss": 2.4476, |
|
"step": 246 |
|
}, |
|
{ |
|
"epoch": 0.8085106382978723, |
|
"grad_norm": 0.2696183429586083, |
|
"learning_rate": 1.057993331111029e-06, |
|
"loss": 2.4402, |
|
"step": 247 |
|
}, |
|
{ |
|
"epoch": 0.8117839607201309, |
|
"grad_norm": 0.2742359647860916, |
|
"learning_rate": 1.0231133954619449e-06, |
|
"loss": 2.431, |
|
"step": 248 |
|
}, |
|
{ |
|
"epoch": 0.8150572831423896, |
|
"grad_norm": 0.2745250582957182, |
|
"learning_rate": 9.887524663671243e-07, |
|
"loss": 2.456, |
|
"step": 249 |
|
}, |
|
{ |
|
"epoch": 0.8183306055646481, |
|
"grad_norm": 0.2915439246328324, |
|
"learning_rate": 9.549150281252633e-07, |
|
"loss": 2.4739, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.8216039279869067, |
|
"grad_norm": 0.2703132458554063, |
|
"learning_rate": 9.216054967164916e-07, |
|
"loss": 2.461, |
|
"step": 251 |
|
}, |
|
{ |
|
"epoch": 0.8248772504091653, |
|
"grad_norm": 0.2902377178678097, |
|
"learning_rate": 8.888282192260645e-07, |
|
"loss": 2.4499, |
|
"step": 252 |
|
}, |
|
{ |
|
"epoch": 0.8281505728314239, |
|
"grad_norm": 0.2732853452830904, |
|
"learning_rate": 8.565874732770429e-07, |
|
"loss": 2.4597, |
|
"step": 253 |
|
}, |
|
{ |
|
"epoch": 0.8314238952536824, |
|
"grad_norm": 0.2696194963576079, |
|
"learning_rate": 8.248874664720375e-07, |
|
"loss": 2.4506, |
|
"step": 254 |
|
}, |
|
{ |
|
"epoch": 0.8346972176759411, |
|
"grad_norm": 0.26860289744719607, |
|
"learning_rate": 7.937323358440935e-07, |
|
"loss": 2.4469, |
|
"step": 255 |
|
}, |
|
{ |
|
"epoch": 0.8379705400981997, |
|
"grad_norm": 0.271233428425454, |
|
"learning_rate": 7.631261473167878e-07, |
|
"loss": 2.4532, |
|
"step": 256 |
|
}, |
|
{ |
|
"epoch": 0.8412438625204582, |
|
"grad_norm": 0.2725978617713584, |
|
"learning_rate": 7.330728951735916e-07, |
|
"loss": 2.4745, |
|
"step": 257 |
|
}, |
|
{ |
|
"epoch": 0.8445171849427169, |
|
"grad_norm": 0.26803651743311957, |
|
"learning_rate": 7.035765015366047e-07, |
|
"loss": 2.4381, |
|
"step": 258 |
|
}, |
|
{ |
|
"epoch": 0.8477905073649754, |
|
"grad_norm": 0.25921598512129085, |
|
"learning_rate": 6.746408158546947e-07, |
|
"loss": 2.4533, |
|
"step": 259 |
|
}, |
|
{ |
|
"epoch": 0.851063829787234, |
|
"grad_norm": 0.2733847496928928, |
|
"learning_rate": 6.462696144011149e-07, |
|
"loss": 2.4568, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 0.8543371522094927, |
|
"grad_norm": 0.26178373020648743, |
|
"learning_rate": 6.184665997806832e-07, |
|
"loss": 2.4581, |
|
"step": 261 |
|
}, |
|
{ |
|
"epoch": 0.8576104746317512, |
|
"grad_norm": 0.26098357909178693, |
|
"learning_rate": 5.912354004465709e-07, |
|
"loss": 2.4706, |
|
"step": 262 |
|
}, |
|
{ |
|
"epoch": 0.8608837970540099, |
|
"grad_norm": 0.25697853000027987, |
|
"learning_rate": 5.645795702267731e-07, |
|
"loss": 2.4477, |
|
"step": 263 |
|
}, |
|
{ |
|
"epoch": 0.8641571194762684, |
|
"grad_norm": 0.26192967323861704, |
|
"learning_rate": 5.385025878603039e-07, |
|
"loss": 2.4414, |
|
"step": 264 |
|
}, |
|
{ |
|
"epoch": 0.867430441898527, |
|
"grad_norm": 0.26218888376989036, |
|
"learning_rate": 5.130078565432089e-07, |
|
"loss": 2.4518, |
|
"step": 265 |
|
}, |
|
{ |
|
"epoch": 0.8707037643207856, |
|
"grad_norm": 0.26990782878286096, |
|
"learning_rate": 4.880987034844231e-07, |
|
"loss": 2.4553, |
|
"step": 266 |
|
}, |
|
{ |
|
"epoch": 0.8739770867430442, |
|
"grad_norm": 0.26556563767940716, |
|
"learning_rate": 4.637783794715589e-07, |
|
"loss": 2.4513, |
|
"step": 267 |
|
}, |
|
{ |
|
"epoch": 0.8772504091653028, |
|
"grad_norm": 0.2653970429292643, |
|
"learning_rate": 4.400500584466505e-07, |
|
"loss": 2.4545, |
|
"step": 268 |
|
}, |
|
{ |
|
"epoch": 0.8805237315875614, |
|
"grad_norm": 0.26432968619903263, |
|
"learning_rate": 4.1691683709194184e-07, |
|
"loss": 2.4707, |
|
"step": 269 |
|
}, |
|
{ |
|
"epoch": 0.88379705400982, |
|
"grad_norm": 0.2657171703652861, |
|
"learning_rate": 3.9438173442575e-07, |
|
"loss": 2.4651, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 0.8870703764320785, |
|
"grad_norm": 0.2538156470759212, |
|
"learning_rate": 3.724476914084657e-07, |
|
"loss": 2.4449, |
|
"step": 271 |
|
}, |
|
{ |
|
"epoch": 0.8903436988543372, |
|
"grad_norm": 0.27133692461664, |
|
"learning_rate": 3.511175705587433e-07, |
|
"loss": 2.473, |
|
"step": 272 |
|
}, |
|
{ |
|
"epoch": 0.8936170212765957, |
|
"grad_norm": 0.25858521334517437, |
|
"learning_rate": 3.303941555799223e-07, |
|
"loss": 2.4669, |
|
"step": 273 |
|
}, |
|
{ |
|
"epoch": 0.8968903436988543, |
|
"grad_norm": 0.2552024596786959, |
|
"learning_rate": 3.1028015099673957e-07, |
|
"loss": 2.4784, |
|
"step": 274 |
|
}, |
|
{ |
|
"epoch": 0.900163666121113, |
|
"grad_norm": 0.2639761279668147, |
|
"learning_rate": 2.9077818180237693e-07, |
|
"loss": 2.4608, |
|
"step": 275 |
|
}, |
|
{ |
|
"epoch": 0.9034369885433715, |
|
"grad_norm": 0.26158090716261023, |
|
"learning_rate": 2.7189079311587596e-07, |
|
"loss": 2.4317, |
|
"step": 276 |
|
}, |
|
{ |
|
"epoch": 0.9067103109656302, |
|
"grad_norm": 0.26420232076269845, |
|
"learning_rate": 2.536204498499922e-07, |
|
"loss": 2.4715, |
|
"step": 277 |
|
}, |
|
{ |
|
"epoch": 0.9099836333878887, |
|
"grad_norm": 0.2611776184759321, |
|
"learning_rate": 2.3596953638951093e-07, |
|
"loss": 2.4496, |
|
"step": 278 |
|
}, |
|
{ |
|
"epoch": 0.9132569558101473, |
|
"grad_norm": 0.2566448882353072, |
|
"learning_rate": 2.1894035628006517e-07, |
|
"loss": 2.4535, |
|
"step": 279 |
|
}, |
|
{ |
|
"epoch": 0.9165302782324058, |
|
"grad_norm": 0.2583889196721565, |
|
"learning_rate": 2.0253513192751374e-07, |
|
"loss": 2.4312, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 0.9198036006546645, |
|
"grad_norm": 0.25005870928436097, |
|
"learning_rate": 1.867560043079031e-07, |
|
"loss": 2.4451, |
|
"step": 281 |
|
}, |
|
{ |
|
"epoch": 0.9230769230769231, |
|
"grad_norm": 0.25955023725898285, |
|
"learning_rate": 1.7160503268806084e-07, |
|
"loss": 2.4591, |
|
"step": 282 |
|
}, |
|
{ |
|
"epoch": 0.9263502454991817, |
|
"grad_norm": 0.25878369029655873, |
|
"learning_rate": 1.5708419435684463e-07, |
|
"loss": 2.4613, |
|
"step": 283 |
|
}, |
|
{ |
|
"epoch": 0.9296235679214403, |
|
"grad_norm": 0.2559725798090891, |
|
"learning_rate": 1.4319538436709746e-07, |
|
"loss": 2.4422, |
|
"step": 284 |
|
}, |
|
{ |
|
"epoch": 0.9328968903436988, |
|
"grad_norm": 0.2560397895273692, |
|
"learning_rate": 1.2994041528833267e-07, |
|
"loss": 2.4712, |
|
"step": 285 |
|
}, |
|
{ |
|
"epoch": 0.9361702127659575, |
|
"grad_norm": 0.26063225688341923, |
|
"learning_rate": 1.1732101697018161e-07, |
|
"loss": 2.4452, |
|
"step": 286 |
|
}, |
|
{ |
|
"epoch": 0.939443535188216, |
|
"grad_norm": 0.2582518091886246, |
|
"learning_rate": 1.0533883631663966e-07, |
|
"loss": 2.4559, |
|
"step": 287 |
|
}, |
|
{ |
|
"epoch": 0.9427168576104746, |
|
"grad_norm": 0.26206927755347836, |
|
"learning_rate": 9.399543707113601e-08, |
|
"loss": 2.477, |
|
"step": 288 |
|
}, |
|
{ |
|
"epoch": 0.9459901800327333, |
|
"grad_norm": 0.26042235837250066, |
|
"learning_rate": 8.329229961245355e-08, |
|
"loss": 2.4496, |
|
"step": 289 |
|
}, |
|
{ |
|
"epoch": 0.9492635024549918, |
|
"grad_norm": 0.257835901510826, |
|
"learning_rate": 7.32308207615351e-08, |
|
"loss": 2.4546, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 0.9525368248772504, |
|
"grad_norm": 0.26136373848402616, |
|
"learning_rate": 6.381231359918638e-08, |
|
"loss": 2.4491, |
|
"step": 291 |
|
}, |
|
{ |
|
"epoch": 0.955810147299509, |
|
"grad_norm": 0.25188481875716506, |
|
"learning_rate": 5.503800729471376e-08, |
|
"loss": 2.4425, |
|
"step": 292 |
|
}, |
|
{ |
|
"epoch": 0.9590834697217676, |
|
"grad_norm": 0.25523476009318996, |
|
"learning_rate": 4.690904694550913e-08, |
|
"loss": 2.4415, |
|
"step": 293 |
|
}, |
|
{ |
|
"epoch": 0.9623567921440261, |
|
"grad_norm": 0.2558833671558322, |
|
"learning_rate": 3.9426493427611177e-08, |
|
"loss": 2.4591, |
|
"step": 294 |
|
}, |
|
{ |
|
"epoch": 0.9656301145662848, |
|
"grad_norm": 0.2607553411892932, |
|
"learning_rate": 3.25913232572489e-08, |
|
"loss": 2.4868, |
|
"step": 295 |
|
}, |
|
{ |
|
"epoch": 0.9689034369885434, |
|
"grad_norm": 0.262015003675821, |
|
"learning_rate": 2.640442846340796e-08, |
|
"loss": 2.4675, |
|
"step": 296 |
|
}, |
|
{ |
|
"epoch": 0.972176759410802, |
|
"grad_norm": 0.2578562996530708, |
|
"learning_rate": 2.0866616471409974e-08, |
|
"loss": 2.4707, |
|
"step": 297 |
|
}, |
|
{ |
|
"epoch": 0.9754500818330606, |
|
"grad_norm": 0.2567381919735001, |
|
"learning_rate": 1.5978609997542306e-08, |
|
"loss": 2.4582, |
|
"step": 298 |
|
}, |
|
{ |
|
"epoch": 0.9787234042553191, |
|
"grad_norm": 0.2549822571991868, |
|
"learning_rate": 1.174104695473688e-08, |
|
"loss": 2.4825, |
|
"step": 299 |
|
}, |
|
{ |
|
"epoch": 0.9819967266775778, |
|
"grad_norm": 0.2554549372989953, |
|
"learning_rate": 8.15448036932176e-09, |
|
"loss": 2.4719, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.9852700490998363, |
|
"grad_norm": 0.263381774850731, |
|
"learning_rate": 5.219378308845558e-09, |
|
"loss": 2.4653, |
|
"step": 301 |
|
}, |
|
{ |
|
"epoch": 0.9885433715220949, |
|
"grad_norm": 0.2528687928806352, |
|
"learning_rate": 2.9361238209935085e-09, |
|
"loss": 2.4689, |
|
"step": 302 |
|
}, |
|
{ |
|
"epoch": 0.9918166939443536, |
|
"grad_norm": 0.2610005338150273, |
|
"learning_rate": 1.305014883595801e-09, |
|
"loss": 2.4555, |
|
"step": 303 |
|
}, |
|
{ |
|
"epoch": 0.9950900163666121, |
|
"grad_norm": 0.26402197276145056, |
|
"learning_rate": 3.262643657425679e-10, |
|
"loss": 2.4546, |
|
"step": 304 |
|
}, |
|
{ |
|
"epoch": 0.9983633387888707, |
|
"grad_norm": 0.25445050680135267, |
|
"learning_rate": 0.0, |
|
"loss": 2.4702, |
|
"step": 305 |
|
} |
|
], |
|
"logging_steps": 1, |
|
"max_steps": 305, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 1, |
|
"save_steps": 500, |
|
"stateful_callbacks": { |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": true |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 7.568140075322573e+17, |
|
"train_batch_size": 4, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|