|
{ |
|
"best_global_step": null, |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 0.40210627094303497, |
|
"eval_steps": 500, |
|
"global_step": 210, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.0019147917663954045, |
|
"grad_norm": 1.17178213596344, |
|
"learning_rate": 0.0, |
|
"loss": 0.5878, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.003829583532790809, |
|
"grad_norm": 1.167578101158142, |
|
"learning_rate": 3.846153846153847e-07, |
|
"loss": 0.5815, |
|
"step": 2 |
|
}, |
|
{ |
|
"epoch": 0.0057443752991862135, |
|
"grad_norm": 1.2005791664123535, |
|
"learning_rate": 7.692307692307694e-07, |
|
"loss": 0.6327, |
|
"step": 3 |
|
}, |
|
{ |
|
"epoch": 0.007659167065581618, |
|
"grad_norm": 1.1366013288497925, |
|
"learning_rate": 1.153846153846154e-06, |
|
"loss": 0.5869, |
|
"step": 4 |
|
}, |
|
{ |
|
"epoch": 0.009573958831977022, |
|
"grad_norm": 1.0155231952667236, |
|
"learning_rate": 1.5384615384615387e-06, |
|
"loss": 0.5233, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.011488750598372427, |
|
"grad_norm": 1.2220700979232788, |
|
"learning_rate": 1.9230769230769234e-06, |
|
"loss": 0.5699, |
|
"step": 6 |
|
}, |
|
{ |
|
"epoch": 0.013403542364767831, |
|
"grad_norm": 1.1095085144042969, |
|
"learning_rate": 2.307692307692308e-06, |
|
"loss": 0.5378, |
|
"step": 7 |
|
}, |
|
{ |
|
"epoch": 0.015318334131163236, |
|
"grad_norm": 1.0518015623092651, |
|
"learning_rate": 2.6923076923076923e-06, |
|
"loss": 0.5432, |
|
"step": 8 |
|
}, |
|
{ |
|
"epoch": 0.01723312589755864, |
|
"grad_norm": 1.0267938375473022, |
|
"learning_rate": 3.0769230769230774e-06, |
|
"loss": 0.5702, |
|
"step": 9 |
|
}, |
|
{ |
|
"epoch": 0.019147917663954045, |
|
"grad_norm": 1.0278018712997437, |
|
"learning_rate": 3.4615384615384617e-06, |
|
"loss": 0.5411, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.02106270943034945, |
|
"grad_norm": 0.9818478226661682, |
|
"learning_rate": 3.846153846153847e-06, |
|
"loss": 0.5214, |
|
"step": 11 |
|
}, |
|
{ |
|
"epoch": 0.022977501196744854, |
|
"grad_norm": 0.9112760424613953, |
|
"learning_rate": 4.230769230769231e-06, |
|
"loss": 0.5235, |
|
"step": 12 |
|
}, |
|
{ |
|
"epoch": 0.02489229296314026, |
|
"grad_norm": 1.1367770433425903, |
|
"learning_rate": 4.615384615384616e-06, |
|
"loss": 0.5689, |
|
"step": 13 |
|
}, |
|
{ |
|
"epoch": 0.026807084729535663, |
|
"grad_norm": 0.887293815612793, |
|
"learning_rate": 5e-06, |
|
"loss": 0.5062, |
|
"step": 14 |
|
}, |
|
{ |
|
"epoch": 0.028721876495931067, |
|
"grad_norm": 0.8234561085700989, |
|
"learning_rate": 5.384615384615385e-06, |
|
"loss": 0.4938, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.030636668262326472, |
|
"grad_norm": 0.8301621079444885, |
|
"learning_rate": 5.769230769230769e-06, |
|
"loss": 0.4955, |
|
"step": 16 |
|
}, |
|
{ |
|
"epoch": 0.032551460028721876, |
|
"grad_norm": 0.7201998829841614, |
|
"learning_rate": 6.153846153846155e-06, |
|
"loss": 0.4481, |
|
"step": 17 |
|
}, |
|
{ |
|
"epoch": 0.03446625179511728, |
|
"grad_norm": 0.7492491602897644, |
|
"learning_rate": 6.538461538461539e-06, |
|
"loss": 0.4924, |
|
"step": 18 |
|
}, |
|
{ |
|
"epoch": 0.036381043561512685, |
|
"grad_norm": 0.8051350712776184, |
|
"learning_rate": 6.923076923076923e-06, |
|
"loss": 0.5109, |
|
"step": 19 |
|
}, |
|
{ |
|
"epoch": 0.03829583532790809, |
|
"grad_norm": 0.6143934726715088, |
|
"learning_rate": 7.307692307692308e-06, |
|
"loss": 0.4846, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.040210627094303494, |
|
"grad_norm": 0.7769045233726501, |
|
"learning_rate": 7.692307692307694e-06, |
|
"loss": 0.4761, |
|
"step": 21 |
|
}, |
|
{ |
|
"epoch": 0.0421254188606989, |
|
"grad_norm": 0.5961856842041016, |
|
"learning_rate": 8.076923076923077e-06, |
|
"loss": 0.467, |
|
"step": 22 |
|
}, |
|
{ |
|
"epoch": 0.0440402106270943, |
|
"grad_norm": 0.683874785900116, |
|
"learning_rate": 8.461538461538462e-06, |
|
"loss": 0.4847, |
|
"step": 23 |
|
}, |
|
{ |
|
"epoch": 0.04595500239348971, |
|
"grad_norm": 0.5849653482437134, |
|
"learning_rate": 8.846153846153847e-06, |
|
"loss": 0.4514, |
|
"step": 24 |
|
}, |
|
{ |
|
"epoch": 0.04786979415988511, |
|
"grad_norm": 0.6217619180679321, |
|
"learning_rate": 9.230769230769232e-06, |
|
"loss": 0.4733, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.04978458592628052, |
|
"grad_norm": 0.5796915888786316, |
|
"learning_rate": 9.615384615384616e-06, |
|
"loss": 0.4735, |
|
"step": 26 |
|
}, |
|
{ |
|
"epoch": 0.05169937769267592, |
|
"grad_norm": 0.5245983600616455, |
|
"learning_rate": 1e-05, |
|
"loss": 0.4398, |
|
"step": 27 |
|
}, |
|
{ |
|
"epoch": 0.053614169459071326, |
|
"grad_norm": 0.6732852458953857, |
|
"learning_rate": 9.999899706000774e-06, |
|
"loss": 0.4907, |
|
"step": 28 |
|
}, |
|
{ |
|
"epoch": 0.05552896122546673, |
|
"grad_norm": 0.5512049794197083, |
|
"learning_rate": 9.999598828026644e-06, |
|
"loss": 0.4723, |
|
"step": 29 |
|
}, |
|
{ |
|
"epoch": 0.057443752991862135, |
|
"grad_norm": 0.6110374331474304, |
|
"learning_rate": 9.999097378148116e-06, |
|
"loss": 0.4414, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.05935854475825754, |
|
"grad_norm": 0.5515976548194885, |
|
"learning_rate": 9.998395376482152e-06, |
|
"loss": 0.4314, |
|
"step": 31 |
|
}, |
|
{ |
|
"epoch": 0.061273336524652944, |
|
"grad_norm": 0.5074656009674072, |
|
"learning_rate": 9.99749285119138e-06, |
|
"loss": 0.4149, |
|
"step": 32 |
|
}, |
|
{ |
|
"epoch": 0.06318812829104835, |
|
"grad_norm": 0.6015526056289673, |
|
"learning_rate": 9.996389838482942e-06, |
|
"loss": 0.517, |
|
"step": 33 |
|
}, |
|
{ |
|
"epoch": 0.06510292005744375, |
|
"grad_norm": 0.5998480916023254, |
|
"learning_rate": 9.995086382607064e-06, |
|
"loss": 0.4695, |
|
"step": 34 |
|
}, |
|
{ |
|
"epoch": 0.06701771182383916, |
|
"grad_norm": 0.5995810627937317, |
|
"learning_rate": 9.993582535855265e-06, |
|
"loss": 0.4453, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.06893250359023456, |
|
"grad_norm": 0.5346629023551941, |
|
"learning_rate": 9.991878358558267e-06, |
|
"loss": 0.4667, |
|
"step": 36 |
|
}, |
|
{ |
|
"epoch": 0.07084729535662997, |
|
"grad_norm": 0.5550440549850464, |
|
"learning_rate": 9.989973919083576e-06, |
|
"loss": 0.4488, |
|
"step": 37 |
|
}, |
|
{ |
|
"epoch": 0.07276208712302537, |
|
"grad_norm": 0.5914222598075867, |
|
"learning_rate": 9.987869293832727e-06, |
|
"loss": 0.4473, |
|
"step": 38 |
|
}, |
|
{ |
|
"epoch": 0.07467687888942078, |
|
"grad_norm": 0.46610352396965027, |
|
"learning_rate": 9.985564567238237e-06, |
|
"loss": 0.4265, |
|
"step": 39 |
|
}, |
|
{ |
|
"epoch": 0.07659167065581618, |
|
"grad_norm": 0.6092692017555237, |
|
"learning_rate": 9.983059831760205e-06, |
|
"loss": 0.469, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.07850646242221158, |
|
"grad_norm": 0.48001110553741455, |
|
"learning_rate": 9.980355187882606e-06, |
|
"loss": 0.4219, |
|
"step": 41 |
|
}, |
|
{ |
|
"epoch": 0.08042125418860699, |
|
"grad_norm": 0.4854871332645416, |
|
"learning_rate": 9.977450744109258e-06, |
|
"loss": 0.3991, |
|
"step": 42 |
|
}, |
|
{ |
|
"epoch": 0.0823360459550024, |
|
"grad_norm": 0.47407639026641846, |
|
"learning_rate": 9.974346616959476e-06, |
|
"loss": 0.4177, |
|
"step": 43 |
|
}, |
|
{ |
|
"epoch": 0.0842508377213978, |
|
"grad_norm": 0.599115788936615, |
|
"learning_rate": 9.97104293096339e-06, |
|
"loss": 0.4401, |
|
"step": 44 |
|
}, |
|
{ |
|
"epoch": 0.0861656294877932, |
|
"grad_norm": 0.5324896574020386, |
|
"learning_rate": 9.967539818656953e-06, |
|
"loss": 0.4265, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.0880804212541886, |
|
"grad_norm": 0.7636860609054565, |
|
"learning_rate": 9.96383742057662e-06, |
|
"loss": 0.4957, |
|
"step": 46 |
|
}, |
|
{ |
|
"epoch": 0.08999521302058401, |
|
"grad_norm": 0.4660010039806366, |
|
"learning_rate": 9.959935885253715e-06, |
|
"loss": 0.4236, |
|
"step": 47 |
|
}, |
|
{ |
|
"epoch": 0.09191000478697942, |
|
"grad_norm": 0.4723781645298004, |
|
"learning_rate": 9.955835369208475e-06, |
|
"loss": 0.405, |
|
"step": 48 |
|
}, |
|
{ |
|
"epoch": 0.09382479655337482, |
|
"grad_norm": 0.473716139793396, |
|
"learning_rate": 9.951536036943753e-06, |
|
"loss": 0.4054, |
|
"step": 49 |
|
}, |
|
{ |
|
"epoch": 0.09573958831977022, |
|
"grad_norm": 0.8201298713684082, |
|
"learning_rate": 9.94703806093845e-06, |
|
"loss": 0.4437, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.09765438008616563, |
|
"grad_norm": 0.5334436297416687, |
|
"learning_rate": 9.942341621640558e-06, |
|
"loss": 0.4112, |
|
"step": 51 |
|
}, |
|
{ |
|
"epoch": 0.09956917185256103, |
|
"grad_norm": 0.4933352768421173, |
|
"learning_rate": 9.937446907459954e-06, |
|
"loss": 0.4303, |
|
"step": 52 |
|
}, |
|
{ |
|
"epoch": 0.10148396361895644, |
|
"grad_norm": 0.44361305236816406, |
|
"learning_rate": 9.932354114760819e-06, |
|
"loss": 0.4039, |
|
"step": 53 |
|
}, |
|
{ |
|
"epoch": 0.10339875538535184, |
|
"grad_norm": 0.5590924024581909, |
|
"learning_rate": 9.92706344785377e-06, |
|
"loss": 0.51, |
|
"step": 54 |
|
}, |
|
{ |
|
"epoch": 0.10531354715174725, |
|
"grad_norm": 0.3887651264667511, |
|
"learning_rate": 9.921575118987672e-06, |
|
"loss": 0.3709, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.10722833891814265, |
|
"grad_norm": 0.43217676877975464, |
|
"learning_rate": 9.915889348341098e-06, |
|
"loss": 0.4107, |
|
"step": 56 |
|
}, |
|
{ |
|
"epoch": 0.10914313068453806, |
|
"grad_norm": 0.46679773926734924, |
|
"learning_rate": 9.910006364013522e-06, |
|
"loss": 0.3832, |
|
"step": 57 |
|
}, |
|
{ |
|
"epoch": 0.11105792245093346, |
|
"grad_norm": 0.5174672603607178, |
|
"learning_rate": 9.903926402016153e-06, |
|
"loss": 0.397, |
|
"step": 58 |
|
}, |
|
{ |
|
"epoch": 0.11297271421732887, |
|
"grad_norm": 0.5932629704475403, |
|
"learning_rate": 9.897649706262474e-06, |
|
"loss": 0.4584, |
|
"step": 59 |
|
}, |
|
{ |
|
"epoch": 0.11488750598372427, |
|
"grad_norm": 0.5002132058143616, |
|
"learning_rate": 9.891176528558451e-06, |
|
"loss": 0.4115, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.11680229775011967, |
|
"grad_norm": 0.44753795862197876, |
|
"learning_rate": 9.884507128592435e-06, |
|
"loss": 0.417, |
|
"step": 61 |
|
}, |
|
{ |
|
"epoch": 0.11871708951651508, |
|
"grad_norm": 0.43308743834495544, |
|
"learning_rate": 9.877641773924748e-06, |
|
"loss": 0.3872, |
|
"step": 62 |
|
}, |
|
{ |
|
"epoch": 0.12063188128291048, |
|
"grad_norm": 0.44310009479522705, |
|
"learning_rate": 9.870580739976936e-06, |
|
"loss": 0.3877, |
|
"step": 63 |
|
}, |
|
{ |
|
"epoch": 0.12254667304930589, |
|
"grad_norm": 0.4362034797668457, |
|
"learning_rate": 9.863324310020735e-06, |
|
"loss": 0.3957, |
|
"step": 64 |
|
}, |
|
{ |
|
"epoch": 0.12446146481570129, |
|
"grad_norm": 0.46891993284225464, |
|
"learning_rate": 9.855872775166696e-06, |
|
"loss": 0.427, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.1263762565820967, |
|
"grad_norm": 0.47662439942359924, |
|
"learning_rate": 9.848226434352513e-06, |
|
"loss": 0.4041, |
|
"step": 66 |
|
}, |
|
{ |
|
"epoch": 0.12829104834849211, |
|
"grad_norm": 0.5884003043174744, |
|
"learning_rate": 9.840385594331022e-06, |
|
"loss": 0.4487, |
|
"step": 67 |
|
}, |
|
{ |
|
"epoch": 0.1302058401148875, |
|
"grad_norm": 0.5149067640304565, |
|
"learning_rate": 9.83235056965791e-06, |
|
"loss": 0.4567, |
|
"step": 68 |
|
}, |
|
{ |
|
"epoch": 0.13212063188128292, |
|
"grad_norm": 0.4653552174568176, |
|
"learning_rate": 9.824121682679072e-06, |
|
"loss": 0.4024, |
|
"step": 69 |
|
}, |
|
{ |
|
"epoch": 0.13403542364767831, |
|
"grad_norm": 0.47937148809432983, |
|
"learning_rate": 9.815699263517712e-06, |
|
"loss": 0.414, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.13595021541407373, |
|
"grad_norm": 0.4588964581489563, |
|
"learning_rate": 9.807083650061063e-06, |
|
"loss": 0.4199, |
|
"step": 71 |
|
}, |
|
{ |
|
"epoch": 0.13786500718046912, |
|
"grad_norm": 0.5369502902030945, |
|
"learning_rate": 9.798275187946859e-06, |
|
"loss": 0.4213, |
|
"step": 72 |
|
}, |
|
{ |
|
"epoch": 0.13977979894686454, |
|
"grad_norm": 0.48553192615509033, |
|
"learning_rate": 9.789274230549456e-06, |
|
"loss": 0.3694, |
|
"step": 73 |
|
}, |
|
{ |
|
"epoch": 0.14169459071325993, |
|
"grad_norm": 0.3765530586242676, |
|
"learning_rate": 9.780081138965663e-06, |
|
"loss": 0.3445, |
|
"step": 74 |
|
}, |
|
{ |
|
"epoch": 0.14360938247965535, |
|
"grad_norm": 0.48994067311286926, |
|
"learning_rate": 9.770696282000245e-06, |
|
"loss": 0.4202, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.14552417424605074, |
|
"grad_norm": 0.45693519711494446, |
|
"learning_rate": 9.761120036151138e-06, |
|
"loss": 0.384, |
|
"step": 76 |
|
}, |
|
{ |
|
"epoch": 0.14743896601244616, |
|
"grad_norm": 0.4083925485610962, |
|
"learning_rate": 9.751352785594337e-06, |
|
"loss": 0.3618, |
|
"step": 77 |
|
}, |
|
{ |
|
"epoch": 0.14935375777884155, |
|
"grad_norm": 0.5029312372207642, |
|
"learning_rate": 9.741394922168495e-06, |
|
"loss": 0.4446, |
|
"step": 78 |
|
}, |
|
{ |
|
"epoch": 0.15126854954523697, |
|
"grad_norm": 0.4641478359699249, |
|
"learning_rate": 9.731246845359187e-06, |
|
"loss": 0.3837, |
|
"step": 79 |
|
}, |
|
{ |
|
"epoch": 0.15318334131163236, |
|
"grad_norm": 0.42773452401161194, |
|
"learning_rate": 9.720908962282893e-06, |
|
"loss": 0.3747, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.15509813307802778, |
|
"grad_norm": 0.45671483874320984, |
|
"learning_rate": 9.710381687670675e-06, |
|
"loss": 0.396, |
|
"step": 81 |
|
}, |
|
{ |
|
"epoch": 0.15701292484442317, |
|
"grad_norm": 0.518532931804657, |
|
"learning_rate": 9.699665443851518e-06, |
|
"loss": 0.4207, |
|
"step": 82 |
|
}, |
|
{ |
|
"epoch": 0.1589277166108186, |
|
"grad_norm": 0.42763078212738037, |
|
"learning_rate": 9.688760660735403e-06, |
|
"loss": 0.3617, |
|
"step": 83 |
|
}, |
|
{ |
|
"epoch": 0.16084250837721398, |
|
"grad_norm": 0.4328898787498474, |
|
"learning_rate": 9.677667775796052e-06, |
|
"loss": 0.3612, |
|
"step": 84 |
|
}, |
|
{ |
|
"epoch": 0.1627573001436094, |
|
"grad_norm": 0.5515812635421753, |
|
"learning_rate": 9.666387234053385e-06, |
|
"loss": 0.4151, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.1646720919100048, |
|
"grad_norm": 0.4455530047416687, |
|
"learning_rate": 9.654919488055656e-06, |
|
"loss": 0.4048, |
|
"step": 86 |
|
}, |
|
{ |
|
"epoch": 0.1665868836764002, |
|
"grad_norm": 0.43045058846473694, |
|
"learning_rate": 9.643264997861312e-06, |
|
"loss": 0.3769, |
|
"step": 87 |
|
}, |
|
{ |
|
"epoch": 0.1685016754427956, |
|
"grad_norm": 0.44380709528923035, |
|
"learning_rate": 9.631424231020523e-06, |
|
"loss": 0.3963, |
|
"step": 88 |
|
}, |
|
{ |
|
"epoch": 0.170416467209191, |
|
"grad_norm": 0.5282372832298279, |
|
"learning_rate": 9.619397662556434e-06, |
|
"loss": 0.418, |
|
"step": 89 |
|
}, |
|
{ |
|
"epoch": 0.1723312589755864, |
|
"grad_norm": 0.5417309403419495, |
|
"learning_rate": 9.607185774946106e-06, |
|
"loss": 0.483, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.17424605074198182, |
|
"grad_norm": 0.48023760318756104, |
|
"learning_rate": 9.594789058101154e-06, |
|
"loss": 0.4041, |
|
"step": 91 |
|
}, |
|
{ |
|
"epoch": 0.1761608425083772, |
|
"grad_norm": 0.43183737993240356, |
|
"learning_rate": 9.582208009348104e-06, |
|
"loss": 0.3724, |
|
"step": 92 |
|
}, |
|
{ |
|
"epoch": 0.17807563427477263, |
|
"grad_norm": 0.4684963822364807, |
|
"learning_rate": 9.569443133408434e-06, |
|
"loss": 0.4199, |
|
"step": 93 |
|
}, |
|
{ |
|
"epoch": 0.17999042604116802, |
|
"grad_norm": 0.5076436400413513, |
|
"learning_rate": 9.556494942378328e-06, |
|
"loss": 0.4138, |
|
"step": 94 |
|
}, |
|
{ |
|
"epoch": 0.18190521780756344, |
|
"grad_norm": 0.527299165725708, |
|
"learning_rate": 9.543363955708124e-06, |
|
"loss": 0.4089, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.18382000957395883, |
|
"grad_norm": 0.4080921411514282, |
|
"learning_rate": 9.530050700181499e-06, |
|
"loss": 0.3337, |
|
"step": 96 |
|
}, |
|
{ |
|
"epoch": 0.18573480134035425, |
|
"grad_norm": 0.42223554849624634, |
|
"learning_rate": 9.5165557098943e-06, |
|
"loss": 0.3758, |
|
"step": 97 |
|
}, |
|
{ |
|
"epoch": 0.18764959310674964, |
|
"grad_norm": 0.4336399435997009, |
|
"learning_rate": 9.502879526233151e-06, |
|
"loss": 0.3664, |
|
"step": 98 |
|
}, |
|
{ |
|
"epoch": 0.18956438487314506, |
|
"grad_norm": 0.43095213174819946, |
|
"learning_rate": 9.48902269785371e-06, |
|
"loss": 0.379, |
|
"step": 99 |
|
}, |
|
{ |
|
"epoch": 0.19147917663954045, |
|
"grad_norm": 0.4262320399284363, |
|
"learning_rate": 9.47498578065867e-06, |
|
"loss": 0.3666, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.19339396840593587, |
|
"grad_norm": 0.4407063126564026, |
|
"learning_rate": 9.460769337775461e-06, |
|
"loss": 0.3857, |
|
"step": 101 |
|
}, |
|
{ |
|
"epoch": 0.19530876017233126, |
|
"grad_norm": 0.39989781379699707, |
|
"learning_rate": 9.446373939533642e-06, |
|
"loss": 0.3568, |
|
"step": 102 |
|
}, |
|
{ |
|
"epoch": 0.19722355193872668, |
|
"grad_norm": 0.43383434414863586, |
|
"learning_rate": 9.431800163442043e-06, |
|
"loss": 0.374, |
|
"step": 103 |
|
}, |
|
{ |
|
"epoch": 0.19913834370512207, |
|
"grad_norm": 0.5074713230133057, |
|
"learning_rate": 9.417048594165572e-06, |
|
"loss": 0.4137, |
|
"step": 104 |
|
}, |
|
{ |
|
"epoch": 0.20105313547151749, |
|
"grad_norm": 0.45141083002090454, |
|
"learning_rate": 9.402119823501787e-06, |
|
"loss": 0.3977, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 0.20296792723791288, |
|
"grad_norm": 0.44136589765548706, |
|
"learning_rate": 9.387014450357128e-06, |
|
"loss": 0.3808, |
|
"step": 106 |
|
}, |
|
{ |
|
"epoch": 0.2048827190043083, |
|
"grad_norm": 0.6126969456672668, |
|
"learning_rate": 9.371733080722911e-06, |
|
"loss": 0.4527, |
|
"step": 107 |
|
}, |
|
{ |
|
"epoch": 0.20679751077070369, |
|
"grad_norm": 0.48484617471694946, |
|
"learning_rate": 9.356276327651006e-06, |
|
"loss": 0.4032, |
|
"step": 108 |
|
}, |
|
{ |
|
"epoch": 0.2087123025370991, |
|
"grad_norm": 0.4131053388118744, |
|
"learning_rate": 9.340644811229243e-06, |
|
"loss": 0.3594, |
|
"step": 109 |
|
}, |
|
{ |
|
"epoch": 0.2106270943034945, |
|
"grad_norm": 0.3629656434059143, |
|
"learning_rate": 9.324839158556542e-06, |
|
"loss": 0.3337, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.2125418860698899, |
|
"grad_norm": 0.4247240424156189, |
|
"learning_rate": 9.308860003717748e-06, |
|
"loss": 0.3809, |
|
"step": 111 |
|
}, |
|
{ |
|
"epoch": 0.2144566778362853, |
|
"grad_norm": 0.4432498812675476, |
|
"learning_rate": 9.292707987758202e-06, |
|
"loss": 0.398, |
|
"step": 112 |
|
}, |
|
{ |
|
"epoch": 0.21637146960268072, |
|
"grad_norm": 0.45140188932418823, |
|
"learning_rate": 9.27638375865801e-06, |
|
"loss": 0.3938, |
|
"step": 113 |
|
}, |
|
{ |
|
"epoch": 0.2182862613690761, |
|
"grad_norm": 0.5100187063217163, |
|
"learning_rate": 9.259887971306064e-06, |
|
"loss": 0.4427, |
|
"step": 114 |
|
}, |
|
{ |
|
"epoch": 0.22020105313547153, |
|
"grad_norm": 0.5069835782051086, |
|
"learning_rate": 9.243221287473755e-06, |
|
"loss": 0.4131, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 0.22211584490186692, |
|
"grad_norm": 0.4621468782424927, |
|
"learning_rate": 9.226384375788435e-06, |
|
"loss": 0.3788, |
|
"step": 116 |
|
}, |
|
{ |
|
"epoch": 0.22403063666826234, |
|
"grad_norm": 0.4977872371673584, |
|
"learning_rate": 9.209377911706585e-06, |
|
"loss": 0.3719, |
|
"step": 117 |
|
}, |
|
{ |
|
"epoch": 0.22594542843465773, |
|
"grad_norm": 0.4791455566883087, |
|
"learning_rate": 9.192202577486725e-06, |
|
"loss": 0.3562, |
|
"step": 118 |
|
}, |
|
{ |
|
"epoch": 0.22786022020105315, |
|
"grad_norm": 0.4171106219291687, |
|
"learning_rate": 9.174859062162037e-06, |
|
"loss": 0.3568, |
|
"step": 119 |
|
}, |
|
{ |
|
"epoch": 0.22977501196744854, |
|
"grad_norm": 0.4501950740814209, |
|
"learning_rate": 9.157348061512728e-06, |
|
"loss": 0.4015, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.23168980373384396, |
|
"grad_norm": 0.4359966814517975, |
|
"learning_rate": 9.139670278038109e-06, |
|
"loss": 0.4014, |
|
"step": 121 |
|
}, |
|
{ |
|
"epoch": 0.23360459550023935, |
|
"grad_norm": 0.3942509889602661, |
|
"learning_rate": 9.121826420928421e-06, |
|
"loss": 0.3328, |
|
"step": 122 |
|
}, |
|
{ |
|
"epoch": 0.23551938726663477, |
|
"grad_norm": 0.4340513050556183, |
|
"learning_rate": 9.103817206036383e-06, |
|
"loss": 0.3628, |
|
"step": 123 |
|
}, |
|
{ |
|
"epoch": 0.23743417903303016, |
|
"grad_norm": 0.44619980454444885, |
|
"learning_rate": 9.085643355848468e-06, |
|
"loss": 0.4003, |
|
"step": 124 |
|
}, |
|
{ |
|
"epoch": 0.23934897079942558, |
|
"grad_norm": 0.43487605452537537, |
|
"learning_rate": 9.06730559945592e-06, |
|
"loss": 0.3558, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 0.24126376256582097, |
|
"grad_norm": 0.4643803834915161, |
|
"learning_rate": 9.048804672525513e-06, |
|
"loss": 0.375, |
|
"step": 126 |
|
}, |
|
{ |
|
"epoch": 0.24317855433221638, |
|
"grad_norm": 0.44975554943084717, |
|
"learning_rate": 9.030141317270026e-06, |
|
"loss": 0.3602, |
|
"step": 127 |
|
}, |
|
{ |
|
"epoch": 0.24509334609861178, |
|
"grad_norm": 0.45412832498550415, |
|
"learning_rate": 9.011316282418474e-06, |
|
"loss": 0.3724, |
|
"step": 128 |
|
}, |
|
{ |
|
"epoch": 0.2470081378650072, |
|
"grad_norm": 0.4402771592140198, |
|
"learning_rate": 8.992330323186069e-06, |
|
"loss": 0.3945, |
|
"step": 129 |
|
}, |
|
{ |
|
"epoch": 0.24892292963140258, |
|
"grad_norm": 0.46597278118133545, |
|
"learning_rate": 8.973184201243922e-06, |
|
"loss": 0.4407, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.250837721397798, |
|
"grad_norm": 0.44561654329299927, |
|
"learning_rate": 8.953878684688492e-06, |
|
"loss": 0.3814, |
|
"step": 131 |
|
}, |
|
{ |
|
"epoch": 0.2527525131641934, |
|
"grad_norm": 0.4216248095035553, |
|
"learning_rate": 8.934414548010764e-06, |
|
"loss": 0.3647, |
|
"step": 132 |
|
}, |
|
{ |
|
"epoch": 0.2546673049305888, |
|
"grad_norm": 0.4280121326446533, |
|
"learning_rate": 8.914792572065178e-06, |
|
"loss": 0.3733, |
|
"step": 133 |
|
}, |
|
{ |
|
"epoch": 0.25658209669698423, |
|
"grad_norm": 0.5127595067024231, |
|
"learning_rate": 8.89501354403831e-06, |
|
"loss": 0.4195, |
|
"step": 134 |
|
}, |
|
{ |
|
"epoch": 0.2584968884633796, |
|
"grad_norm": 0.5074846744537354, |
|
"learning_rate": 8.875078257417294e-06, |
|
"loss": 0.4204, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 0.260411680229775, |
|
"grad_norm": 0.4372941553592682, |
|
"learning_rate": 8.854987511957974e-06, |
|
"loss": 0.3924, |
|
"step": 136 |
|
}, |
|
{ |
|
"epoch": 0.26232647199617043, |
|
"grad_norm": 0.482693076133728, |
|
"learning_rate": 8.834742113652835e-06, |
|
"loss": 0.3989, |
|
"step": 137 |
|
}, |
|
{ |
|
"epoch": 0.26424126376256585, |
|
"grad_norm": 0.40820303559303284, |
|
"learning_rate": 8.81434287469866e-06, |
|
"loss": 0.3339, |
|
"step": 138 |
|
}, |
|
{ |
|
"epoch": 0.2661560555289612, |
|
"grad_norm": 0.5613487362861633, |
|
"learning_rate": 8.793790613463956e-06, |
|
"loss": 0.3689, |
|
"step": 139 |
|
}, |
|
{ |
|
"epoch": 0.26807084729535663, |
|
"grad_norm": 0.4296424686908722, |
|
"learning_rate": 8.773086154456106e-06, |
|
"loss": 0.3781, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.26998563906175205, |
|
"grad_norm": 0.4859068989753723, |
|
"learning_rate": 8.752230328288314e-06, |
|
"loss": 0.4299, |
|
"step": 141 |
|
}, |
|
{ |
|
"epoch": 0.27190043082814747, |
|
"grad_norm": 0.4237840175628662, |
|
"learning_rate": 8.731223971646261e-06, |
|
"loss": 0.3507, |
|
"step": 142 |
|
}, |
|
{ |
|
"epoch": 0.27381522259454283, |
|
"grad_norm": 0.40383824706077576, |
|
"learning_rate": 8.710067927254555e-06, |
|
"loss": 0.344, |
|
"step": 143 |
|
}, |
|
{ |
|
"epoch": 0.27573001436093825, |
|
"grad_norm": 0.40671247243881226, |
|
"learning_rate": 8.688763043842916e-06, |
|
"loss": 0.361, |
|
"step": 144 |
|
}, |
|
{ |
|
"epoch": 0.27764480612733367, |
|
"grad_norm": 0.4422585666179657, |
|
"learning_rate": 8.66731017611213e-06, |
|
"loss": 0.3884, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 0.2795595978937291, |
|
"grad_norm": 0.4300481081008911, |
|
"learning_rate": 8.645710184699756e-06, |
|
"loss": 0.3804, |
|
"step": 146 |
|
}, |
|
{ |
|
"epoch": 0.28147438966012445, |
|
"grad_norm": 0.43995732069015503, |
|
"learning_rate": 8.6239639361456e-06, |
|
"loss": 0.3715, |
|
"step": 147 |
|
}, |
|
{ |
|
"epoch": 0.28338918142651986, |
|
"grad_norm": 0.39543983340263367, |
|
"learning_rate": 8.602072302856961e-06, |
|
"loss": 0.3664, |
|
"step": 148 |
|
}, |
|
{ |
|
"epoch": 0.2853039731929153, |
|
"grad_norm": 0.4740181863307953, |
|
"learning_rate": 8.580036163073615e-06, |
|
"loss": 0.3998, |
|
"step": 149 |
|
}, |
|
{ |
|
"epoch": 0.2872187649593107, |
|
"grad_norm": 0.4195634126663208, |
|
"learning_rate": 8.5578564008326e-06, |
|
"loss": 0.3514, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.28913355672570606, |
|
"grad_norm": 0.4382091164588928, |
|
"learning_rate": 8.535533905932739e-06, |
|
"loss": 0.3645, |
|
"step": 151 |
|
}, |
|
{ |
|
"epoch": 0.2910483484921015, |
|
"grad_norm": 0.47153332829475403, |
|
"learning_rate": 8.513069573898944e-06, |
|
"loss": 0.3709, |
|
"step": 152 |
|
}, |
|
{ |
|
"epoch": 0.2929631402584969, |
|
"grad_norm": 0.428423672914505, |
|
"learning_rate": 8.490464305946296e-06, |
|
"loss": 0.377, |
|
"step": 153 |
|
}, |
|
{ |
|
"epoch": 0.2948779320248923, |
|
"grad_norm": 0.4477589428424835, |
|
"learning_rate": 8.467719008943886e-06, |
|
"loss": 0.3696, |
|
"step": 154 |
|
}, |
|
{ |
|
"epoch": 0.2967927237912877, |
|
"grad_norm": 0.44177618622779846, |
|
"learning_rate": 8.444834595378434e-06, |
|
"loss": 0.3793, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 0.2987075155576831, |
|
"grad_norm": 0.40658193826675415, |
|
"learning_rate": 8.421811983317682e-06, |
|
"loss": 0.3564, |
|
"step": 156 |
|
}, |
|
{ |
|
"epoch": 0.3006223073240785, |
|
"grad_norm": 0.40630021691322327, |
|
"learning_rate": 8.398652096373566e-06, |
|
"loss": 0.3743, |
|
"step": 157 |
|
}, |
|
{ |
|
"epoch": 0.30253709909047394, |
|
"grad_norm": 0.419085294008255, |
|
"learning_rate": 8.375355863665155e-06, |
|
"loss": 0.3609, |
|
"step": 158 |
|
}, |
|
{ |
|
"epoch": 0.3044518908568693, |
|
"grad_norm": 0.4741552174091339, |
|
"learning_rate": 8.351924219781393e-06, |
|
"loss": 0.4016, |
|
"step": 159 |
|
}, |
|
{ |
|
"epoch": 0.3063666826232647, |
|
"grad_norm": 0.445846825838089, |
|
"learning_rate": 8.328358104743588e-06, |
|
"loss": 0.3714, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.30828147438966014, |
|
"grad_norm": 0.4190356135368347, |
|
"learning_rate": 8.304658463967705e-06, |
|
"loss": 0.3718, |
|
"step": 161 |
|
}, |
|
{ |
|
"epoch": 0.31019626615605556, |
|
"grad_norm": 0.4018896222114563, |
|
"learning_rate": 8.28082624822645e-06, |
|
"loss": 0.3352, |
|
"step": 162 |
|
}, |
|
{ |
|
"epoch": 0.3121110579224509, |
|
"grad_norm": 0.41169753670692444, |
|
"learning_rate": 8.256862413611113e-06, |
|
"loss": 0.3439, |
|
"step": 163 |
|
}, |
|
{ |
|
"epoch": 0.31402584968884634, |
|
"grad_norm": 0.4585376977920532, |
|
"learning_rate": 8.232767921493216e-06, |
|
"loss": 0.3901, |
|
"step": 164 |
|
}, |
|
{ |
|
"epoch": 0.31594064145524176, |
|
"grad_norm": 0.4590586721897125, |
|
"learning_rate": 8.20854373848595e-06, |
|
"loss": 0.4042, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 0.3178554332216372, |
|
"grad_norm": 0.42637982964515686, |
|
"learning_rate": 8.184190836405394e-06, |
|
"loss": 0.3295, |
|
"step": 166 |
|
}, |
|
{ |
|
"epoch": 0.31977022498803254, |
|
"grad_norm": 0.45910605788230896, |
|
"learning_rate": 8.15971019223152e-06, |
|
"loss": 0.3759, |
|
"step": 167 |
|
}, |
|
{ |
|
"epoch": 0.32168501675442795, |
|
"grad_norm": 0.3774631917476654, |
|
"learning_rate": 8.135102788069015e-06, |
|
"loss": 0.3142, |
|
"step": 168 |
|
}, |
|
{ |
|
"epoch": 0.3235998085208234, |
|
"grad_norm": 0.3829216957092285, |
|
"learning_rate": 8.110369611107869e-06, |
|
"loss": 0.3288, |
|
"step": 169 |
|
}, |
|
{ |
|
"epoch": 0.3255146002872188, |
|
"grad_norm": 0.42439737915992737, |
|
"learning_rate": 8.085511653583772e-06, |
|
"loss": 0.3452, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.32742939205361415, |
|
"grad_norm": 0.4537997543811798, |
|
"learning_rate": 8.060529912738316e-06, |
|
"loss": 0.4024, |
|
"step": 171 |
|
}, |
|
{ |
|
"epoch": 0.3293441838200096, |
|
"grad_norm": 0.5059738755226135, |
|
"learning_rate": 8.035425390778975e-06, |
|
"loss": 0.4093, |
|
"step": 172 |
|
}, |
|
{ |
|
"epoch": 0.331258975586405, |
|
"grad_norm": 0.41058847308158875, |
|
"learning_rate": 8.010199094838915e-06, |
|
"loss": 0.3704, |
|
"step": 173 |
|
}, |
|
{ |
|
"epoch": 0.3331737673528004, |
|
"grad_norm": 0.43019795417785645, |
|
"learning_rate": 7.984852036936578e-06, |
|
"loss": 0.3463, |
|
"step": 174 |
|
}, |
|
{ |
|
"epoch": 0.3350885591191958, |
|
"grad_norm": 0.4632018506526947, |
|
"learning_rate": 7.959385233935087e-06, |
|
"loss": 0.4076, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 0.3370033508855912, |
|
"grad_norm": 0.363118976354599, |
|
"learning_rate": 7.933799707501448e-06, |
|
"loss": 0.3089, |
|
"step": 176 |
|
}, |
|
{ |
|
"epoch": 0.3389181426519866, |
|
"grad_norm": 0.414247065782547, |
|
"learning_rate": 7.908096484065569e-06, |
|
"loss": 0.3691, |
|
"step": 177 |
|
}, |
|
{ |
|
"epoch": 0.340832934418382, |
|
"grad_norm": 0.4680274724960327, |
|
"learning_rate": 7.88227659477908e-06, |
|
"loss": 0.3742, |
|
"step": 178 |
|
}, |
|
{ |
|
"epoch": 0.3427477261847774, |
|
"grad_norm": 0.4838680624961853, |
|
"learning_rate": 7.856341075473963e-06, |
|
"loss": 0.3509, |
|
"step": 179 |
|
}, |
|
{ |
|
"epoch": 0.3446625179511728, |
|
"grad_norm": 0.39406099915504456, |
|
"learning_rate": 7.830290966620997e-06, |
|
"loss": 0.327, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.3465773097175682, |
|
"grad_norm": 0.4498644471168518, |
|
"learning_rate": 7.804127313288023e-06, |
|
"loss": 0.3665, |
|
"step": 181 |
|
}, |
|
{ |
|
"epoch": 0.34849210148396365, |
|
"grad_norm": 0.42900529503822327, |
|
"learning_rate": 7.777851165098012e-06, |
|
"loss": 0.3779, |
|
"step": 182 |
|
}, |
|
{ |
|
"epoch": 0.350406893250359, |
|
"grad_norm": 0.3877162039279938, |
|
"learning_rate": 7.751463576186957e-06, |
|
"loss": 0.3394, |
|
"step": 183 |
|
}, |
|
{ |
|
"epoch": 0.3523216850167544, |
|
"grad_norm": 0.6071410179138184, |
|
"learning_rate": 7.72496560516159e-06, |
|
"loss": 0.3813, |
|
"step": 184 |
|
}, |
|
{ |
|
"epoch": 0.35423647678314985, |
|
"grad_norm": 0.5272730588912964, |
|
"learning_rate": 7.6983583150569e-06, |
|
"loss": 0.3817, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 0.35615126854954526, |
|
"grad_norm": 0.4268261194229126, |
|
"learning_rate": 7.671642773293506e-06, |
|
"loss": 0.3415, |
|
"step": 186 |
|
}, |
|
{ |
|
"epoch": 0.3580660603159406, |
|
"grad_norm": 0.4886492192745209, |
|
"learning_rate": 7.644820051634813e-06, |
|
"loss": 0.3846, |
|
"step": 187 |
|
}, |
|
{ |
|
"epoch": 0.35998085208233604, |
|
"grad_norm": 0.4869603216648102, |
|
"learning_rate": 7.617891226144034e-06, |
|
"loss": 0.4274, |
|
"step": 188 |
|
}, |
|
{ |
|
"epoch": 0.36189564384873146, |
|
"grad_norm": 0.42415544390678406, |
|
"learning_rate": 7.59085737714101e-06, |
|
"loss": 0.3438, |
|
"step": 189 |
|
}, |
|
{ |
|
"epoch": 0.3638104356151269, |
|
"grad_norm": 0.5172827243804932, |
|
"learning_rate": 7.563719589158874e-06, |
|
"loss": 0.3836, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.36572522738152224, |
|
"grad_norm": 0.4153439998626709, |
|
"learning_rate": 7.536478950900537e-06, |
|
"loss": 0.3636, |
|
"step": 191 |
|
}, |
|
{ |
|
"epoch": 0.36764001914791766, |
|
"grad_norm": 0.42014452815055847, |
|
"learning_rate": 7.509136555195025e-06, |
|
"loss": 0.3501, |
|
"step": 192 |
|
}, |
|
{ |
|
"epoch": 0.3695548109143131, |
|
"grad_norm": 0.4194999933242798, |
|
"learning_rate": 7.481693498953621e-06, |
|
"loss": 0.3745, |
|
"step": 193 |
|
}, |
|
{ |
|
"epoch": 0.3714696026807085, |
|
"grad_norm": 0.4361554980278015, |
|
"learning_rate": 7.4541508831258695e-06, |
|
"loss": 0.3469, |
|
"step": 194 |
|
}, |
|
{ |
|
"epoch": 0.37338439444710386, |
|
"grad_norm": 0.3900616765022278, |
|
"learning_rate": 7.4265098126554065e-06, |
|
"loss": 0.3354, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 0.3752991862134993, |
|
"grad_norm": 0.47491058707237244, |
|
"learning_rate": 7.3987713964356335e-06, |
|
"loss": 0.4077, |
|
"step": 196 |
|
}, |
|
{ |
|
"epoch": 0.3772139779798947, |
|
"grad_norm": 0.5020147562026978, |
|
"learning_rate": 7.370936747265226e-06, |
|
"loss": 0.4198, |
|
"step": 197 |
|
}, |
|
{ |
|
"epoch": 0.3791287697462901, |
|
"grad_norm": 0.4094443619251251, |
|
"learning_rate": 7.3430069818035e-06, |
|
"loss": 0.3482, |
|
"step": 198 |
|
}, |
|
{ |
|
"epoch": 0.3810435615126855, |
|
"grad_norm": 0.4021066427230835, |
|
"learning_rate": 7.314983220525604e-06, |
|
"loss": 0.3561, |
|
"step": 199 |
|
}, |
|
{ |
|
"epoch": 0.3829583532790809, |
|
"grad_norm": 0.3932279348373413, |
|
"learning_rate": 7.286866587677576e-06, |
|
"loss": 0.3414, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.3848731450454763, |
|
"grad_norm": 0.466545045375824, |
|
"learning_rate": 7.2586582112312355e-06, |
|
"loss": 0.3868, |
|
"step": 201 |
|
}, |
|
{ |
|
"epoch": 0.38678793681187174, |
|
"grad_norm": 0.4097374677658081, |
|
"learning_rate": 7.230359222838939e-06, |
|
"loss": 0.3468, |
|
"step": 202 |
|
}, |
|
{ |
|
"epoch": 0.3887027285782671, |
|
"grad_norm": 0.4792751967906952, |
|
"learning_rate": 7.201970757788172e-06, |
|
"loss": 0.4131, |
|
"step": 203 |
|
}, |
|
{ |
|
"epoch": 0.3906175203446625, |
|
"grad_norm": 0.40435218811035156, |
|
"learning_rate": 7.173493954956012e-06, |
|
"loss": 0.3456, |
|
"step": 204 |
|
}, |
|
{ |
|
"epoch": 0.39253231211105793, |
|
"grad_norm": 0.45885634422302246, |
|
"learning_rate": 7.144929956763438e-06, |
|
"loss": 0.3672, |
|
"step": 205 |
|
}, |
|
{ |
|
"epoch": 0.39444710387745335, |
|
"grad_norm": 0.5055181384086609, |
|
"learning_rate": 7.116279909129492e-06, |
|
"loss": 0.4122, |
|
"step": 206 |
|
}, |
|
{ |
|
"epoch": 0.3963618956438487, |
|
"grad_norm": 0.42308080196380615, |
|
"learning_rate": 7.087544961425317e-06, |
|
"loss": 0.3621, |
|
"step": 207 |
|
}, |
|
{ |
|
"epoch": 0.39827668741024413, |
|
"grad_norm": 0.41654130816459656, |
|
"learning_rate": 7.058726266428042e-06, |
|
"loss": 0.3623, |
|
"step": 208 |
|
}, |
|
{ |
|
"epoch": 0.40019147917663955, |
|
"grad_norm": 0.3884025812149048, |
|
"learning_rate": 7.029824980274536e-06, |
|
"loss": 0.3302, |
|
"step": 209 |
|
}, |
|
{ |
|
"epoch": 0.40210627094303497, |
|
"grad_norm": 0.43903034925460815, |
|
"learning_rate": 7.0008422624150285e-06, |
|
"loss": 0.3728, |
|
"step": 210 |
|
} |
|
], |
|
"logging_steps": 1, |
|
"max_steps": 522, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 1, |
|
"save_steps": 105, |
|
"stateful_callbacks": { |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": false |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 1.1029906388628275e+17, |
|
"train_batch_size": 8, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|