|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 2.1577182486758373, |
|
"eval_steps": 500, |
|
"global_step": 226500, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.004763174941889266, |
|
"grad_norm": 23.845611572265625, |
|
"learning_rate": 4.976184125290554e-05, |
|
"loss": 1.6947, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.009526349883778531, |
|
"grad_norm": 11.257529258728027, |
|
"learning_rate": 4.952368250581108e-05, |
|
"loss": 1.2773, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 0.014289524825667798, |
|
"grad_norm": 19.1054630279541, |
|
"learning_rate": 4.9285523758716614e-05, |
|
"loss": 1.1464, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 0.019052699767557062, |
|
"grad_norm": 8.174609184265137, |
|
"learning_rate": 4.904736501162215e-05, |
|
"loss": 1.0952, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 0.02381587470944633, |
|
"grad_norm": 22.265609741210938, |
|
"learning_rate": 4.880920626452769e-05, |
|
"loss": 1.0419, |
|
"step": 2500 |
|
}, |
|
{ |
|
"epoch": 0.028579049651335595, |
|
"grad_norm": 3.621635913848877, |
|
"learning_rate": 4.857104751743322e-05, |
|
"loss": 0.9673, |
|
"step": 3000 |
|
}, |
|
{ |
|
"epoch": 0.03334222459322486, |
|
"grad_norm": 9.05103874206543, |
|
"learning_rate": 4.8332888770338755e-05, |
|
"loss": 0.9227, |
|
"step": 3500 |
|
}, |
|
{ |
|
"epoch": 0.038105399535114125, |
|
"grad_norm": 6.0216474533081055, |
|
"learning_rate": 4.809473002324429e-05, |
|
"loss": 0.917, |
|
"step": 4000 |
|
}, |
|
{ |
|
"epoch": 0.042868574477003395, |
|
"grad_norm": 12.011100769042969, |
|
"learning_rate": 4.785657127614983e-05, |
|
"loss": 0.8644, |
|
"step": 4500 |
|
}, |
|
{ |
|
"epoch": 0.04763174941889266, |
|
"grad_norm": 10.062934875488281, |
|
"learning_rate": 4.761841252905537e-05, |
|
"loss": 0.8573, |
|
"step": 5000 |
|
}, |
|
{ |
|
"epoch": 0.05239492436078192, |
|
"grad_norm": 4.252622127532959, |
|
"learning_rate": 4.7380253781960904e-05, |
|
"loss": 0.8198, |
|
"step": 5500 |
|
}, |
|
{ |
|
"epoch": 0.05715809930267119, |
|
"grad_norm": 11.673833847045898, |
|
"learning_rate": 4.714209503486644e-05, |
|
"loss": 0.7742, |
|
"step": 6000 |
|
}, |
|
{ |
|
"epoch": 0.061921274244560454, |
|
"grad_norm": 8.625577926635742, |
|
"learning_rate": 4.690393628777198e-05, |
|
"loss": 0.7513, |
|
"step": 6500 |
|
}, |
|
{ |
|
"epoch": 0.06668444918644972, |
|
"grad_norm": 11.9224853515625, |
|
"learning_rate": 4.666577754067752e-05, |
|
"loss": 0.7376, |
|
"step": 7000 |
|
}, |
|
{ |
|
"epoch": 0.07144762412833898, |
|
"grad_norm": 3.3534774780273438, |
|
"learning_rate": 4.642761879358305e-05, |
|
"loss": 0.733, |
|
"step": 7500 |
|
}, |
|
{ |
|
"epoch": 0.07621079907022825, |
|
"grad_norm": 6.773929595947266, |
|
"learning_rate": 4.618946004648859e-05, |
|
"loss": 0.6743, |
|
"step": 8000 |
|
}, |
|
{ |
|
"epoch": 0.08097397401211752, |
|
"grad_norm": 12.177743911743164, |
|
"learning_rate": 4.595130129939413e-05, |
|
"loss": 0.6837, |
|
"step": 8500 |
|
}, |
|
{ |
|
"epoch": 0.08573714895400679, |
|
"grad_norm": 10.937847137451172, |
|
"learning_rate": 4.5713142552299664e-05, |
|
"loss": 0.6745, |
|
"step": 9000 |
|
}, |
|
{ |
|
"epoch": 0.09050032389589605, |
|
"grad_norm": 9.08661937713623, |
|
"learning_rate": 4.54749838052052e-05, |
|
"loss": 0.6366, |
|
"step": 9500 |
|
}, |
|
{ |
|
"epoch": 0.09526349883778532, |
|
"grad_norm": 14.092183113098145, |
|
"learning_rate": 4.523682505811074e-05, |
|
"loss": 0.6218, |
|
"step": 10000 |
|
}, |
|
{ |
|
"epoch": 0.10002667377967459, |
|
"grad_norm": 9.430176734924316, |
|
"learning_rate": 4.4998666311016275e-05, |
|
"loss": 0.6286, |
|
"step": 10500 |
|
}, |
|
{ |
|
"epoch": 0.10478984872156384, |
|
"grad_norm": 6.812849998474121, |
|
"learning_rate": 4.476050756392181e-05, |
|
"loss": 0.6121, |
|
"step": 11000 |
|
}, |
|
{ |
|
"epoch": 0.10955302366345311, |
|
"grad_norm": 6.120726108551025, |
|
"learning_rate": 4.452234881682735e-05, |
|
"loss": 0.581, |
|
"step": 11500 |
|
}, |
|
{ |
|
"epoch": 0.11431619860534238, |
|
"grad_norm": 13.171046257019043, |
|
"learning_rate": 4.4284190069732887e-05, |
|
"loss": 0.5797, |
|
"step": 12000 |
|
}, |
|
{ |
|
"epoch": 0.11907937354723164, |
|
"grad_norm": 9.89405632019043, |
|
"learning_rate": 4.404603132263842e-05, |
|
"loss": 0.5461, |
|
"step": 12500 |
|
}, |
|
{ |
|
"epoch": 0.12384254848912091, |
|
"grad_norm": 6.620090484619141, |
|
"learning_rate": 4.3807872575543954e-05, |
|
"loss": 0.5583, |
|
"step": 13000 |
|
}, |
|
{ |
|
"epoch": 0.12860572343101018, |
|
"grad_norm": 17.443872451782227, |
|
"learning_rate": 4.356971382844949e-05, |
|
"loss": 0.562, |
|
"step": 13500 |
|
}, |
|
{ |
|
"epoch": 0.13336889837289945, |
|
"grad_norm": 4.9741997718811035, |
|
"learning_rate": 4.333155508135503e-05, |
|
"loss": 0.5448, |
|
"step": 14000 |
|
}, |
|
{ |
|
"epoch": 0.13813207331478872, |
|
"grad_norm": 13.39028549194336, |
|
"learning_rate": 4.3093396334260565e-05, |
|
"loss": 0.5166, |
|
"step": 14500 |
|
}, |
|
{ |
|
"epoch": 0.14289524825667796, |
|
"grad_norm": 7.8550333976745605, |
|
"learning_rate": 4.28552375871661e-05, |
|
"loss": 0.5071, |
|
"step": 15000 |
|
}, |
|
{ |
|
"epoch": 0.14765842319856723, |
|
"grad_norm": 13.109572410583496, |
|
"learning_rate": 4.261707884007164e-05, |
|
"loss": 0.489, |
|
"step": 15500 |
|
}, |
|
{ |
|
"epoch": 0.1524215981404565, |
|
"grad_norm": 8.581804275512695, |
|
"learning_rate": 4.237892009297718e-05, |
|
"loss": 0.4992, |
|
"step": 16000 |
|
}, |
|
{ |
|
"epoch": 0.15718477308234577, |
|
"grad_norm": 0.8759533166885376, |
|
"learning_rate": 4.2140761345882714e-05, |
|
"loss": 0.5041, |
|
"step": 16500 |
|
}, |
|
{ |
|
"epoch": 0.16194794802423504, |
|
"grad_norm": 4.165910243988037, |
|
"learning_rate": 4.190260259878825e-05, |
|
"loss": 0.4923, |
|
"step": 17000 |
|
}, |
|
{ |
|
"epoch": 0.1667111229661243, |
|
"grad_norm": 4.400519847869873, |
|
"learning_rate": 4.166444385169379e-05, |
|
"loss": 0.4895, |
|
"step": 17500 |
|
}, |
|
{ |
|
"epoch": 0.17147429790801358, |
|
"grad_norm": 3.9758849143981934, |
|
"learning_rate": 4.142628510459932e-05, |
|
"loss": 0.4845, |
|
"step": 18000 |
|
}, |
|
{ |
|
"epoch": 0.17623747284990282, |
|
"grad_norm": 11.77698802947998, |
|
"learning_rate": 4.1188126357504856e-05, |
|
"loss": 0.4523, |
|
"step": 18500 |
|
}, |
|
{ |
|
"epoch": 0.1810006477917921, |
|
"grad_norm": 8.056093215942383, |
|
"learning_rate": 4.094996761041039e-05, |
|
"loss": 0.4815, |
|
"step": 19000 |
|
}, |
|
{ |
|
"epoch": 0.18576382273368136, |
|
"grad_norm": 3.4172158241271973, |
|
"learning_rate": 4.071180886331593e-05, |
|
"loss": 0.478, |
|
"step": 19500 |
|
}, |
|
{ |
|
"epoch": 0.19052699767557063, |
|
"grad_norm": 4.890318393707275, |
|
"learning_rate": 4.0473650116221474e-05, |
|
"loss": 0.4492, |
|
"step": 20000 |
|
}, |
|
{ |
|
"epoch": 0.1952901726174599, |
|
"grad_norm": 9.595198631286621, |
|
"learning_rate": 4.023549136912701e-05, |
|
"loss": 0.4507, |
|
"step": 20500 |
|
}, |
|
{ |
|
"epoch": 0.20005334755934917, |
|
"grad_norm": 8.482044219970703, |
|
"learning_rate": 3.999733262203255e-05, |
|
"loss": 0.4436, |
|
"step": 21000 |
|
}, |
|
{ |
|
"epoch": 0.2048165225012384, |
|
"grad_norm": 5.411190509796143, |
|
"learning_rate": 3.9759173874938085e-05, |
|
"loss": 0.4406, |
|
"step": 21500 |
|
}, |
|
{ |
|
"epoch": 0.20957969744312768, |
|
"grad_norm": 13.22948932647705, |
|
"learning_rate": 3.952101512784362e-05, |
|
"loss": 0.4222, |
|
"step": 22000 |
|
}, |
|
{ |
|
"epoch": 0.21434287238501695, |
|
"grad_norm": 11.69010066986084, |
|
"learning_rate": 3.928285638074915e-05, |
|
"loss": 0.4119, |
|
"step": 22500 |
|
}, |
|
{ |
|
"epoch": 0.21910604732690622, |
|
"grad_norm": 11.298418998718262, |
|
"learning_rate": 3.904469763365469e-05, |
|
"loss": 0.4058, |
|
"step": 23000 |
|
}, |
|
{ |
|
"epoch": 0.2238692222687955, |
|
"grad_norm": 8.810882568359375, |
|
"learning_rate": 3.880653888656023e-05, |
|
"loss": 0.4201, |
|
"step": 23500 |
|
}, |
|
{ |
|
"epoch": 0.22863239721068476, |
|
"grad_norm": 15.669658660888672, |
|
"learning_rate": 3.8568380139465764e-05, |
|
"loss": 0.4098, |
|
"step": 24000 |
|
}, |
|
{ |
|
"epoch": 0.23339557215257403, |
|
"grad_norm": 3.749591112136841, |
|
"learning_rate": 3.83302213923713e-05, |
|
"loss": 0.3972, |
|
"step": 24500 |
|
}, |
|
{ |
|
"epoch": 0.23815874709446327, |
|
"grad_norm": 8.824658393859863, |
|
"learning_rate": 3.809206264527684e-05, |
|
"loss": 0.3722, |
|
"step": 25000 |
|
}, |
|
{ |
|
"epoch": 0.24292192203635254, |
|
"grad_norm": 13.041702270507812, |
|
"learning_rate": 3.7853903898182375e-05, |
|
"loss": 0.4254, |
|
"step": 25500 |
|
}, |
|
{ |
|
"epoch": 0.24768509697824181, |
|
"grad_norm": 15.908291816711426, |
|
"learning_rate": 3.761574515108791e-05, |
|
"loss": 0.3736, |
|
"step": 26000 |
|
}, |
|
{ |
|
"epoch": 0.2524482719201311, |
|
"grad_norm": 10.208578109741211, |
|
"learning_rate": 3.737758640399345e-05, |
|
"loss": 0.412, |
|
"step": 26500 |
|
}, |
|
{ |
|
"epoch": 0.25721144686202035, |
|
"grad_norm": 5.080774784088135, |
|
"learning_rate": 3.713942765689899e-05, |
|
"loss": 0.3799, |
|
"step": 27000 |
|
}, |
|
{ |
|
"epoch": 0.2619746218039096, |
|
"grad_norm": 8.108014106750488, |
|
"learning_rate": 3.6901268909804524e-05, |
|
"loss": 0.3557, |
|
"step": 27500 |
|
}, |
|
{ |
|
"epoch": 0.2667377967457989, |
|
"grad_norm": 13.029576301574707, |
|
"learning_rate": 3.6663110162710054e-05, |
|
"loss": 0.3785, |
|
"step": 28000 |
|
}, |
|
{ |
|
"epoch": 0.27150097168768816, |
|
"grad_norm": 9.865415573120117, |
|
"learning_rate": 3.642495141561559e-05, |
|
"loss": 0.4013, |
|
"step": 28500 |
|
}, |
|
{ |
|
"epoch": 0.27626414662957743, |
|
"grad_norm": 4.568774700164795, |
|
"learning_rate": 3.618679266852113e-05, |
|
"loss": 0.3663, |
|
"step": 29000 |
|
}, |
|
{ |
|
"epoch": 0.2810273215714667, |
|
"grad_norm": 3.7032947540283203, |
|
"learning_rate": 3.5948633921426665e-05, |
|
"loss": 0.3728, |
|
"step": 29500 |
|
}, |
|
{ |
|
"epoch": 0.2857904965133559, |
|
"grad_norm": 5.560993194580078, |
|
"learning_rate": 3.57104751743322e-05, |
|
"loss": 0.364, |
|
"step": 30000 |
|
}, |
|
{ |
|
"epoch": 0.2905536714552452, |
|
"grad_norm": 12.633008003234863, |
|
"learning_rate": 3.547231642723774e-05, |
|
"loss": 0.3738, |
|
"step": 30500 |
|
}, |
|
{ |
|
"epoch": 0.29531684639713446, |
|
"grad_norm": 16.694622039794922, |
|
"learning_rate": 3.523415768014328e-05, |
|
"loss": 0.363, |
|
"step": 31000 |
|
}, |
|
{ |
|
"epoch": 0.30008002133902373, |
|
"grad_norm": 6.716195583343506, |
|
"learning_rate": 3.4995998933048814e-05, |
|
"loss": 0.3547, |
|
"step": 31500 |
|
}, |
|
{ |
|
"epoch": 0.304843196280913, |
|
"grad_norm": 5.371184349060059, |
|
"learning_rate": 3.475784018595435e-05, |
|
"loss": 0.3392, |
|
"step": 32000 |
|
}, |
|
{ |
|
"epoch": 0.30960637122280227, |
|
"grad_norm": 15.001012802124023, |
|
"learning_rate": 3.451968143885989e-05, |
|
"loss": 0.3419, |
|
"step": 32500 |
|
}, |
|
{ |
|
"epoch": 0.31436954616469154, |
|
"grad_norm": 14.817428588867188, |
|
"learning_rate": 3.4281522691765425e-05, |
|
"loss": 0.3454, |
|
"step": 33000 |
|
}, |
|
{ |
|
"epoch": 0.3191327211065808, |
|
"grad_norm": 8.50633716583252, |
|
"learning_rate": 3.404336394467096e-05, |
|
"loss": 0.3322, |
|
"step": 33500 |
|
}, |
|
{ |
|
"epoch": 0.3238958960484701, |
|
"grad_norm": 10.963394165039062, |
|
"learning_rate": 3.38052051975765e-05, |
|
"loss": 0.3507, |
|
"step": 34000 |
|
}, |
|
{ |
|
"epoch": 0.32865907099035935, |
|
"grad_norm": 6.286830902099609, |
|
"learning_rate": 3.356704645048204e-05, |
|
"loss": 0.3662, |
|
"step": 34500 |
|
}, |
|
{ |
|
"epoch": 0.3334222459322486, |
|
"grad_norm": 7.931427955627441, |
|
"learning_rate": 3.3328887703387574e-05, |
|
"loss": 0.3223, |
|
"step": 35000 |
|
}, |
|
{ |
|
"epoch": 0.3381854208741379, |
|
"grad_norm": 0.015142062678933144, |
|
"learning_rate": 3.309072895629311e-05, |
|
"loss": 0.3492, |
|
"step": 35500 |
|
}, |
|
{ |
|
"epoch": 0.34294859581602716, |
|
"grad_norm": 6.33823823928833, |
|
"learning_rate": 3.285257020919865e-05, |
|
"loss": 0.3443, |
|
"step": 36000 |
|
}, |
|
{ |
|
"epoch": 0.34771177075791637, |
|
"grad_norm": 14.21090030670166, |
|
"learning_rate": 3.2614411462104185e-05, |
|
"loss": 0.3208, |
|
"step": 36500 |
|
}, |
|
{ |
|
"epoch": 0.35247494569980564, |
|
"grad_norm": 6.757291793823242, |
|
"learning_rate": 3.237625271500972e-05, |
|
"loss": 0.3283, |
|
"step": 37000 |
|
}, |
|
{ |
|
"epoch": 0.3572381206416949, |
|
"grad_norm": 25.391630172729492, |
|
"learning_rate": 3.213809396791525e-05, |
|
"loss": 0.3207, |
|
"step": 37500 |
|
}, |
|
{ |
|
"epoch": 0.3620012955835842, |
|
"grad_norm": 4.002015113830566, |
|
"learning_rate": 3.189993522082079e-05, |
|
"loss": 0.3001, |
|
"step": 38000 |
|
}, |
|
{ |
|
"epoch": 0.36676447052547345, |
|
"grad_norm": 8.16422176361084, |
|
"learning_rate": 3.166177647372633e-05, |
|
"loss": 0.2973, |
|
"step": 38500 |
|
}, |
|
{ |
|
"epoch": 0.3715276454673627, |
|
"grad_norm": 4.861073017120361, |
|
"learning_rate": 3.1423617726631864e-05, |
|
"loss": 0.3104, |
|
"step": 39000 |
|
}, |
|
{ |
|
"epoch": 0.376290820409252, |
|
"grad_norm": 9.573932647705078, |
|
"learning_rate": 3.11854589795374e-05, |
|
"loss": 0.2984, |
|
"step": 39500 |
|
}, |
|
{ |
|
"epoch": 0.38105399535114126, |
|
"grad_norm": 10.641077041625977, |
|
"learning_rate": 3.094730023244294e-05, |
|
"loss": 0.314, |
|
"step": 40000 |
|
}, |
|
{ |
|
"epoch": 0.38581717029303053, |
|
"grad_norm": 19.601299285888672, |
|
"learning_rate": 3.0709141485348475e-05, |
|
"loss": 0.292, |
|
"step": 40500 |
|
}, |
|
{ |
|
"epoch": 0.3905803452349198, |
|
"grad_norm": 1.557869553565979, |
|
"learning_rate": 3.0470982738254012e-05, |
|
"loss": 0.3244, |
|
"step": 41000 |
|
}, |
|
{ |
|
"epoch": 0.39534352017680907, |
|
"grad_norm": 9.880559921264648, |
|
"learning_rate": 3.0232823991159546e-05, |
|
"loss": 0.3053, |
|
"step": 41500 |
|
}, |
|
{ |
|
"epoch": 0.40010669511869834, |
|
"grad_norm": 16.77007293701172, |
|
"learning_rate": 2.9994665244065083e-05, |
|
"loss": 0.2935, |
|
"step": 42000 |
|
}, |
|
{ |
|
"epoch": 0.4048698700605876, |
|
"grad_norm": 15.44863510131836, |
|
"learning_rate": 2.975650649697062e-05, |
|
"loss": 0.2885, |
|
"step": 42500 |
|
}, |
|
{ |
|
"epoch": 0.4096330450024768, |
|
"grad_norm": 2.127357006072998, |
|
"learning_rate": 2.9518347749876158e-05, |
|
"loss": 0.2896, |
|
"step": 43000 |
|
}, |
|
{ |
|
"epoch": 0.4143962199443661, |
|
"grad_norm": 4.906562328338623, |
|
"learning_rate": 2.9280189002781695e-05, |
|
"loss": 0.2961, |
|
"step": 43500 |
|
}, |
|
{ |
|
"epoch": 0.41915939488625537, |
|
"grad_norm": 5.996885299682617, |
|
"learning_rate": 2.9042030255687232e-05, |
|
"loss": 0.283, |
|
"step": 44000 |
|
}, |
|
{ |
|
"epoch": 0.42392256982814464, |
|
"grad_norm": 4.349148750305176, |
|
"learning_rate": 2.8803871508592766e-05, |
|
"loss": 0.285, |
|
"step": 44500 |
|
}, |
|
{ |
|
"epoch": 0.4286857447700339, |
|
"grad_norm": 11.325324058532715, |
|
"learning_rate": 2.8565712761498303e-05, |
|
"loss": 0.2991, |
|
"step": 45000 |
|
}, |
|
{ |
|
"epoch": 0.4334489197119232, |
|
"grad_norm": 29.92687225341797, |
|
"learning_rate": 2.832755401440384e-05, |
|
"loss": 0.2758, |
|
"step": 45500 |
|
}, |
|
{ |
|
"epoch": 0.43821209465381245, |
|
"grad_norm": 6.005437850952148, |
|
"learning_rate": 2.8089395267309377e-05, |
|
"loss": 0.285, |
|
"step": 46000 |
|
}, |
|
{ |
|
"epoch": 0.4429752695957017, |
|
"grad_norm": 7.799108505249023, |
|
"learning_rate": 2.7851236520214917e-05, |
|
"loss": 0.2851, |
|
"step": 46500 |
|
}, |
|
{ |
|
"epoch": 0.447738444537591, |
|
"grad_norm": 7.533286094665527, |
|
"learning_rate": 2.7613077773120455e-05, |
|
"loss": 0.2713, |
|
"step": 47000 |
|
}, |
|
{ |
|
"epoch": 0.45250161947948025, |
|
"grad_norm": 16.269254684448242, |
|
"learning_rate": 2.737491902602599e-05, |
|
"loss": 0.2954, |
|
"step": 47500 |
|
}, |
|
{ |
|
"epoch": 0.4572647944213695, |
|
"grad_norm": 12.095314025878906, |
|
"learning_rate": 2.713676027893153e-05, |
|
"loss": 0.2733, |
|
"step": 48000 |
|
}, |
|
{ |
|
"epoch": 0.4620279693632588, |
|
"grad_norm": 9.375202178955078, |
|
"learning_rate": 2.6898601531837066e-05, |
|
"loss": 0.266, |
|
"step": 48500 |
|
}, |
|
{ |
|
"epoch": 0.46679114430514806, |
|
"grad_norm": 18.472373962402344, |
|
"learning_rate": 2.66604427847426e-05, |
|
"loss": 0.2616, |
|
"step": 49000 |
|
}, |
|
{ |
|
"epoch": 0.4715543192470373, |
|
"grad_norm": 10.298583984375, |
|
"learning_rate": 2.6422284037648137e-05, |
|
"loss": 0.273, |
|
"step": 49500 |
|
}, |
|
{ |
|
"epoch": 0.47631749418892655, |
|
"grad_norm": 5.778076648712158, |
|
"learning_rate": 2.6184125290553674e-05, |
|
"loss": 0.2754, |
|
"step": 50000 |
|
}, |
|
{ |
|
"epoch": 0.4810806691308158, |
|
"grad_norm": 3.6435136795043945, |
|
"learning_rate": 2.594596654345921e-05, |
|
"loss": 0.2664, |
|
"step": 50500 |
|
}, |
|
{ |
|
"epoch": 0.4858438440727051, |
|
"grad_norm": 21.803163528442383, |
|
"learning_rate": 2.5707807796364748e-05, |
|
"loss": 0.2621, |
|
"step": 51000 |
|
}, |
|
{ |
|
"epoch": 0.49060701901459436, |
|
"grad_norm": 11.081718444824219, |
|
"learning_rate": 2.5469649049270282e-05, |
|
"loss": 0.2806, |
|
"step": 51500 |
|
}, |
|
{ |
|
"epoch": 0.49537019395648363, |
|
"grad_norm": 3.4980528354644775, |
|
"learning_rate": 2.523149030217582e-05, |
|
"loss": 0.278, |
|
"step": 52000 |
|
}, |
|
{ |
|
"epoch": 0.5001333688983729, |
|
"grad_norm": 7.148010730743408, |
|
"learning_rate": 2.4993331555081356e-05, |
|
"loss": 0.2592, |
|
"step": 52500 |
|
}, |
|
{ |
|
"epoch": 0.5048965438402622, |
|
"grad_norm": 3.838651657104492, |
|
"learning_rate": 2.4755172807986893e-05, |
|
"loss": 0.2636, |
|
"step": 53000 |
|
}, |
|
{ |
|
"epoch": 0.5096597187821514, |
|
"grad_norm": 12.54211711883545, |
|
"learning_rate": 2.451701406089243e-05, |
|
"loss": 0.2483, |
|
"step": 53500 |
|
}, |
|
{ |
|
"epoch": 0.5144228937240407, |
|
"grad_norm": 17.774137496948242, |
|
"learning_rate": 2.4278855313797964e-05, |
|
"loss": 0.2442, |
|
"step": 54000 |
|
}, |
|
{ |
|
"epoch": 0.51918606866593, |
|
"grad_norm": 10.924212455749512, |
|
"learning_rate": 2.40406965667035e-05, |
|
"loss": 0.2367, |
|
"step": 54500 |
|
}, |
|
{ |
|
"epoch": 0.5239492436078192, |
|
"grad_norm": 0.054358381778001785, |
|
"learning_rate": 2.380253781960904e-05, |
|
"loss": 0.2239, |
|
"step": 55000 |
|
}, |
|
{ |
|
"epoch": 0.5287124185497085, |
|
"grad_norm": 1.8560116291046143, |
|
"learning_rate": 2.3564379072514575e-05, |
|
"loss": 0.2621, |
|
"step": 55500 |
|
}, |
|
{ |
|
"epoch": 0.5334755934915978, |
|
"grad_norm": 0.23067010939121246, |
|
"learning_rate": 2.3326220325420116e-05, |
|
"loss": 0.245, |
|
"step": 56000 |
|
}, |
|
{ |
|
"epoch": 0.5382387684334871, |
|
"grad_norm": 9.185586929321289, |
|
"learning_rate": 2.308806157832565e-05, |
|
"loss": 0.2578, |
|
"step": 56500 |
|
}, |
|
{ |
|
"epoch": 0.5430019433753763, |
|
"grad_norm": 5.946952819824219, |
|
"learning_rate": 2.2849902831231187e-05, |
|
"loss": 0.2053, |
|
"step": 57000 |
|
}, |
|
{ |
|
"epoch": 0.5477651183172656, |
|
"grad_norm": 21.640161514282227, |
|
"learning_rate": 2.2611744084136724e-05, |
|
"loss": 0.2419, |
|
"step": 57500 |
|
}, |
|
{ |
|
"epoch": 0.5525282932591549, |
|
"grad_norm": 4.966391563415527, |
|
"learning_rate": 2.237358533704226e-05, |
|
"loss": 0.2472, |
|
"step": 58000 |
|
}, |
|
{ |
|
"epoch": 0.5572914682010441, |
|
"grad_norm": 6.430874347686768, |
|
"learning_rate": 2.2135426589947798e-05, |
|
"loss": 0.2176, |
|
"step": 58500 |
|
}, |
|
{ |
|
"epoch": 0.5620546431429334, |
|
"grad_norm": 1.1396760940551758, |
|
"learning_rate": 2.1897267842853332e-05, |
|
"loss": 0.2297, |
|
"step": 59000 |
|
}, |
|
{ |
|
"epoch": 0.5668178180848226, |
|
"grad_norm": 0.023226283490657806, |
|
"learning_rate": 2.165910909575887e-05, |
|
"loss": 0.2286, |
|
"step": 59500 |
|
}, |
|
{ |
|
"epoch": 0.5715809930267118, |
|
"grad_norm": 7.5434770584106445, |
|
"learning_rate": 2.1420950348664406e-05, |
|
"loss": 0.2453, |
|
"step": 60000 |
|
}, |
|
{ |
|
"epoch": 0.5763441679686011, |
|
"grad_norm": 20.513235092163086, |
|
"learning_rate": 2.1182791601569943e-05, |
|
"loss": 0.2237, |
|
"step": 60500 |
|
}, |
|
{ |
|
"epoch": 0.5811073429104904, |
|
"grad_norm": 0.2174549102783203, |
|
"learning_rate": 2.094463285447548e-05, |
|
"loss": 0.2206, |
|
"step": 61000 |
|
}, |
|
{ |
|
"epoch": 0.5858705178523796, |
|
"grad_norm": 5.196407794952393, |
|
"learning_rate": 2.0706474107381018e-05, |
|
"loss": 0.2249, |
|
"step": 61500 |
|
}, |
|
{ |
|
"epoch": 0.5906336927942689, |
|
"grad_norm": 6.904590129852295, |
|
"learning_rate": 2.046831536028655e-05, |
|
"loss": 0.2158, |
|
"step": 62000 |
|
}, |
|
{ |
|
"epoch": 0.5953968677361582, |
|
"grad_norm": 4.865005970001221, |
|
"learning_rate": 2.0230156613192092e-05, |
|
"loss": 0.2369, |
|
"step": 62500 |
|
}, |
|
{ |
|
"epoch": 0.6001600426780475, |
|
"grad_norm": 8.591822624206543, |
|
"learning_rate": 1.999199786609763e-05, |
|
"loss": 0.2186, |
|
"step": 63000 |
|
}, |
|
{ |
|
"epoch": 0.6049232176199367, |
|
"grad_norm": 11.895712852478027, |
|
"learning_rate": 1.9753839119003166e-05, |
|
"loss": 0.2175, |
|
"step": 63500 |
|
}, |
|
{ |
|
"epoch": 0.609686392561826, |
|
"grad_norm": 29.44073486328125, |
|
"learning_rate": 1.95156803719087e-05, |
|
"loss": 0.239, |
|
"step": 64000 |
|
}, |
|
{ |
|
"epoch": 0.6144495675037153, |
|
"grad_norm": 0.14603924751281738, |
|
"learning_rate": 1.9277521624814237e-05, |
|
"loss": 0.2342, |
|
"step": 64500 |
|
}, |
|
{ |
|
"epoch": 0.6192127424456045, |
|
"grad_norm": 8.721657752990723, |
|
"learning_rate": 1.9039362877719774e-05, |
|
"loss": 0.2072, |
|
"step": 65000 |
|
}, |
|
{ |
|
"epoch": 0.6239759173874938, |
|
"grad_norm": 7.2579755783081055, |
|
"learning_rate": 1.880120413062531e-05, |
|
"loss": 0.2176, |
|
"step": 65500 |
|
}, |
|
{ |
|
"epoch": 0.6287390923293831, |
|
"grad_norm": 10.267131805419922, |
|
"learning_rate": 1.8563045383530848e-05, |
|
"loss": 0.2118, |
|
"step": 66000 |
|
}, |
|
{ |
|
"epoch": 0.6335022672712723, |
|
"grad_norm": 12.264985084533691, |
|
"learning_rate": 1.8324886636436382e-05, |
|
"loss": 0.2215, |
|
"step": 66500 |
|
}, |
|
{ |
|
"epoch": 0.6382654422131616, |
|
"grad_norm": 15.631693840026855, |
|
"learning_rate": 1.808672788934192e-05, |
|
"loss": 0.2213, |
|
"step": 67000 |
|
}, |
|
{ |
|
"epoch": 0.6430286171550509, |
|
"grad_norm": 9.132320404052734, |
|
"learning_rate": 1.7848569142247456e-05, |
|
"loss": 0.2007, |
|
"step": 67500 |
|
}, |
|
{ |
|
"epoch": 0.6477917920969402, |
|
"grad_norm": 2.836796283721924, |
|
"learning_rate": 1.7610410395152993e-05, |
|
"loss": 0.2197, |
|
"step": 68000 |
|
}, |
|
{ |
|
"epoch": 0.6525549670388294, |
|
"grad_norm": 1.20395028591156, |
|
"learning_rate": 1.737225164805853e-05, |
|
"loss": 0.2092, |
|
"step": 68500 |
|
}, |
|
{ |
|
"epoch": 0.6573181419807187, |
|
"grad_norm": 4.42557954788208, |
|
"learning_rate": 1.7134092900964068e-05, |
|
"loss": 0.2228, |
|
"step": 69000 |
|
}, |
|
{ |
|
"epoch": 0.662081316922608, |
|
"grad_norm": 7.136639595031738, |
|
"learning_rate": 1.6895934153869605e-05, |
|
"loss": 0.2159, |
|
"step": 69500 |
|
}, |
|
{ |
|
"epoch": 0.6668444918644972, |
|
"grad_norm": 12.695110321044922, |
|
"learning_rate": 1.6657775406775142e-05, |
|
"loss": 0.1984, |
|
"step": 70000 |
|
}, |
|
{ |
|
"epoch": 0.6716076668063865, |
|
"grad_norm": 11.841693878173828, |
|
"learning_rate": 1.641961665968068e-05, |
|
"loss": 0.1913, |
|
"step": 70500 |
|
}, |
|
{ |
|
"epoch": 0.6763708417482758, |
|
"grad_norm": 0.7839029431343079, |
|
"learning_rate": 1.6181457912586216e-05, |
|
"loss": 0.1911, |
|
"step": 71000 |
|
}, |
|
{ |
|
"epoch": 0.681134016690165, |
|
"grad_norm": 6.188957691192627, |
|
"learning_rate": 1.594329916549175e-05, |
|
"loss": 0.2164, |
|
"step": 71500 |
|
}, |
|
{ |
|
"epoch": 0.6858971916320543, |
|
"grad_norm": 11.414396286010742, |
|
"learning_rate": 1.5705140418397287e-05, |
|
"loss": 0.2026, |
|
"step": 72000 |
|
}, |
|
{ |
|
"epoch": 0.6906603665739435, |
|
"grad_norm": 21.34324836730957, |
|
"learning_rate": 1.5466981671302824e-05, |
|
"loss": 0.1877, |
|
"step": 72500 |
|
}, |
|
{ |
|
"epoch": 0.6954235415158327, |
|
"grad_norm": 10.534087181091309, |
|
"learning_rate": 1.5228822924208361e-05, |
|
"loss": 0.1889, |
|
"step": 73000 |
|
}, |
|
{ |
|
"epoch": 0.700186716457722, |
|
"grad_norm": 0.4472333788871765, |
|
"learning_rate": 1.4990664177113897e-05, |
|
"loss": 0.1902, |
|
"step": 73500 |
|
}, |
|
{ |
|
"epoch": 0.7049498913996113, |
|
"grad_norm": 27.742639541625977, |
|
"learning_rate": 1.4752505430019434e-05, |
|
"loss": 0.1929, |
|
"step": 74000 |
|
}, |
|
{ |
|
"epoch": 0.7097130663415006, |
|
"grad_norm": 12.095074653625488, |
|
"learning_rate": 1.451434668292497e-05, |
|
"loss": 0.1754, |
|
"step": 74500 |
|
}, |
|
{ |
|
"epoch": 0.7144762412833898, |
|
"grad_norm": 10.153932571411133, |
|
"learning_rate": 1.4276187935830506e-05, |
|
"loss": 0.1866, |
|
"step": 75000 |
|
}, |
|
{ |
|
"epoch": 0.7192394162252791, |
|
"grad_norm": 9.065908432006836, |
|
"learning_rate": 1.4038029188736043e-05, |
|
"loss": 0.2004, |
|
"step": 75500 |
|
}, |
|
{ |
|
"epoch": 0.7240025911671684, |
|
"grad_norm": 1.7107542753219604, |
|
"learning_rate": 1.3799870441641582e-05, |
|
"loss": 0.175, |
|
"step": 76000 |
|
}, |
|
{ |
|
"epoch": 0.7287657661090576, |
|
"grad_norm": 1.4907644987106323, |
|
"learning_rate": 1.356171169454712e-05, |
|
"loss": 0.1865, |
|
"step": 76500 |
|
}, |
|
{ |
|
"epoch": 0.7335289410509469, |
|
"grad_norm": 0.11667291074991226, |
|
"learning_rate": 1.3323552947452655e-05, |
|
"loss": 0.1694, |
|
"step": 77000 |
|
}, |
|
{ |
|
"epoch": 0.7382921159928362, |
|
"grad_norm": 29.062976837158203, |
|
"learning_rate": 1.3085394200358192e-05, |
|
"loss": 0.1821, |
|
"step": 77500 |
|
}, |
|
{ |
|
"epoch": 0.7430552909347254, |
|
"grad_norm": 7.0561113357543945, |
|
"learning_rate": 1.2847235453263729e-05, |
|
"loss": 0.1692, |
|
"step": 78000 |
|
}, |
|
{ |
|
"epoch": 0.7478184658766147, |
|
"grad_norm": 7.916496753692627, |
|
"learning_rate": 1.2609076706169264e-05, |
|
"loss": 0.1711, |
|
"step": 78500 |
|
}, |
|
{ |
|
"epoch": 0.752581640818504, |
|
"grad_norm": 0.009064608253538609, |
|
"learning_rate": 1.2370917959074802e-05, |
|
"loss": 0.1795, |
|
"step": 79000 |
|
}, |
|
{ |
|
"epoch": 0.7573448157603933, |
|
"grad_norm": 16.94624900817871, |
|
"learning_rate": 1.2132759211980339e-05, |
|
"loss": 0.1732, |
|
"step": 79500 |
|
}, |
|
{ |
|
"epoch": 0.7621079907022825, |
|
"grad_norm": 24.09770965576172, |
|
"learning_rate": 1.1894600464885874e-05, |
|
"loss": 0.1599, |
|
"step": 80000 |
|
}, |
|
{ |
|
"epoch": 0.7668711656441718, |
|
"grad_norm": 17.340219497680664, |
|
"learning_rate": 1.1656441717791411e-05, |
|
"loss": 0.1702, |
|
"step": 80500 |
|
}, |
|
{ |
|
"epoch": 0.7716343405860611, |
|
"grad_norm": 0.015018216334283352, |
|
"learning_rate": 1.1418282970696948e-05, |
|
"loss": 0.1679, |
|
"step": 81000 |
|
}, |
|
{ |
|
"epoch": 0.7763975155279503, |
|
"grad_norm": 16.005643844604492, |
|
"learning_rate": 1.1180124223602485e-05, |
|
"loss": 0.1832, |
|
"step": 81500 |
|
}, |
|
{ |
|
"epoch": 0.7811606904698396, |
|
"grad_norm": 0.0013979446375742555, |
|
"learning_rate": 1.0941965476508023e-05, |
|
"loss": 0.1505, |
|
"step": 82000 |
|
}, |
|
{ |
|
"epoch": 0.7859238654117289, |
|
"grad_norm": 1.049574375152588, |
|
"learning_rate": 1.0703806729413558e-05, |
|
"loss": 0.1663, |
|
"step": 82500 |
|
}, |
|
{ |
|
"epoch": 0.7906870403536181, |
|
"grad_norm": 5.492803573608398, |
|
"learning_rate": 1.0465647982319095e-05, |
|
"loss": 0.1561, |
|
"step": 83000 |
|
}, |
|
{ |
|
"epoch": 0.7954502152955074, |
|
"grad_norm": 14.530741691589355, |
|
"learning_rate": 1.022748923522463e-05, |
|
"loss": 0.1876, |
|
"step": 83500 |
|
}, |
|
{ |
|
"epoch": 0.8002133902373967, |
|
"grad_norm": 0.0616171695291996, |
|
"learning_rate": 9.98933048813017e-06, |
|
"loss": 0.1524, |
|
"step": 84000 |
|
}, |
|
{ |
|
"epoch": 0.804976565179286, |
|
"grad_norm": 15.610015869140625, |
|
"learning_rate": 9.751171741035706e-06, |
|
"loss": 0.1588, |
|
"step": 84500 |
|
}, |
|
{ |
|
"epoch": 0.8097397401211752, |
|
"grad_norm": 7.8352885246276855, |
|
"learning_rate": 9.513012993941242e-06, |
|
"loss": 0.1779, |
|
"step": 85000 |
|
}, |
|
{ |
|
"epoch": 0.8145029150630644, |
|
"grad_norm": 21.532123565673828, |
|
"learning_rate": 9.274854246846779e-06, |
|
"loss": 0.1436, |
|
"step": 85500 |
|
}, |
|
{ |
|
"epoch": 0.8192660900049537, |
|
"grad_norm": 0.025519462302327156, |
|
"learning_rate": 9.036695499752314e-06, |
|
"loss": 0.1472, |
|
"step": 86000 |
|
}, |
|
{ |
|
"epoch": 0.8240292649468429, |
|
"grad_norm": 0.09057486802339554, |
|
"learning_rate": 8.798536752657852e-06, |
|
"loss": 0.1525, |
|
"step": 86500 |
|
}, |
|
{ |
|
"epoch": 0.8287924398887322, |
|
"grad_norm": 0.9066371917724609, |
|
"learning_rate": 8.560378005563389e-06, |
|
"loss": 0.1614, |
|
"step": 87000 |
|
}, |
|
{ |
|
"epoch": 0.8335556148306215, |
|
"grad_norm": 2.612293004989624, |
|
"learning_rate": 8.322219258468926e-06, |
|
"loss": 0.1492, |
|
"step": 87500 |
|
}, |
|
{ |
|
"epoch": 0.8383187897725107, |
|
"grad_norm": 6.420555114746094, |
|
"learning_rate": 8.084060511374463e-06, |
|
"loss": 0.1518, |
|
"step": 88000 |
|
}, |
|
{ |
|
"epoch": 0.8430819647144, |
|
"grad_norm": 0.04058153182268143, |
|
"learning_rate": 7.845901764279998e-06, |
|
"loss": 0.1581, |
|
"step": 88500 |
|
}, |
|
{ |
|
"epoch": 0.8478451396562893, |
|
"grad_norm": 17.142908096313477, |
|
"learning_rate": 7.6077430171855355e-06, |
|
"loss": 0.166, |
|
"step": 89000 |
|
}, |
|
{ |
|
"epoch": 0.8526083145981785, |
|
"grad_norm": 17.988386154174805, |
|
"learning_rate": 7.369584270091072e-06, |
|
"loss": 0.1613, |
|
"step": 89500 |
|
}, |
|
{ |
|
"epoch": 0.8573714895400678, |
|
"grad_norm": 0.28535395860671997, |
|
"learning_rate": 7.131425522996608e-06, |
|
"loss": 0.1448, |
|
"step": 90000 |
|
}, |
|
{ |
|
"epoch": 0.8621346644819571, |
|
"grad_norm": 0.02261945605278015, |
|
"learning_rate": 6.893266775902146e-06, |
|
"loss": 0.1397, |
|
"step": 90500 |
|
}, |
|
{ |
|
"epoch": 0.8668978394238464, |
|
"grad_norm": 15.836788177490234, |
|
"learning_rate": 6.655108028807683e-06, |
|
"loss": 0.146, |
|
"step": 91000 |
|
}, |
|
{ |
|
"epoch": 0.8716610143657356, |
|
"grad_norm": 0.6049064993858337, |
|
"learning_rate": 6.416949281713219e-06, |
|
"loss": 0.1405, |
|
"step": 91500 |
|
}, |
|
{ |
|
"epoch": 0.8764241893076249, |
|
"grad_norm": 0.08131851255893707, |
|
"learning_rate": 6.178790534618756e-06, |
|
"loss": 0.1466, |
|
"step": 92000 |
|
}, |
|
{ |
|
"epoch": 0.8811873642495142, |
|
"grad_norm": 16.24254608154297, |
|
"learning_rate": 5.940631787524292e-06, |
|
"loss": 0.1398, |
|
"step": 92500 |
|
}, |
|
{ |
|
"epoch": 0.8859505391914034, |
|
"grad_norm": 8.061177504714578e-05, |
|
"learning_rate": 5.702473040429829e-06, |
|
"loss": 0.124, |
|
"step": 93000 |
|
}, |
|
{ |
|
"epoch": 0.8907137141332927, |
|
"grad_norm": 2.3429534435272217, |
|
"learning_rate": 5.464314293335366e-06, |
|
"loss": 0.1448, |
|
"step": 93500 |
|
}, |
|
{ |
|
"epoch": 0.895476889075182, |
|
"grad_norm": 21.22195053100586, |
|
"learning_rate": 5.2261555462409025e-06, |
|
"loss": 0.1304, |
|
"step": 94000 |
|
}, |
|
{ |
|
"epoch": 0.9002400640170712, |
|
"grad_norm": 22.701601028442383, |
|
"learning_rate": 4.9879967991464396e-06, |
|
"loss": 0.1211, |
|
"step": 94500 |
|
}, |
|
{ |
|
"epoch": 0.9050032389589605, |
|
"grad_norm": 15.280872344970703, |
|
"learning_rate": 4.749838052051976e-06, |
|
"loss": 0.1514, |
|
"step": 95000 |
|
}, |
|
{ |
|
"epoch": 0.9097664139008498, |
|
"grad_norm": 0.01960950717329979, |
|
"learning_rate": 4.511679304957513e-06, |
|
"loss": 0.1115, |
|
"step": 95500 |
|
}, |
|
{ |
|
"epoch": 0.914529588842739, |
|
"grad_norm": 0.0017501560505479574, |
|
"learning_rate": 4.273520557863049e-06, |
|
"loss": 0.1571, |
|
"step": 96000 |
|
}, |
|
{ |
|
"epoch": 0.9192927637846283, |
|
"grad_norm": 0.05184149742126465, |
|
"learning_rate": 4.035361810768586e-06, |
|
"loss": 0.1499, |
|
"step": 96500 |
|
}, |
|
{ |
|
"epoch": 0.9240559387265176, |
|
"grad_norm": 6.8865180015563965, |
|
"learning_rate": 3.7972030636741226e-06, |
|
"loss": 0.1272, |
|
"step": 97000 |
|
}, |
|
{ |
|
"epoch": 0.9288191136684069, |
|
"grad_norm": 0.10639504343271255, |
|
"learning_rate": 3.5590443165796593e-06, |
|
"loss": 0.1263, |
|
"step": 97500 |
|
}, |
|
{ |
|
"epoch": 0.9335822886102961, |
|
"grad_norm": 0.9005939960479736, |
|
"learning_rate": 3.3208855694851965e-06, |
|
"loss": 0.133, |
|
"step": 98000 |
|
}, |
|
{ |
|
"epoch": 0.9383454635521854, |
|
"grad_norm": 1.1748387813568115, |
|
"learning_rate": 3.082726822390733e-06, |
|
"loss": 0.1325, |
|
"step": 98500 |
|
}, |
|
{ |
|
"epoch": 0.9431086384940746, |
|
"grad_norm": 28.960805892944336, |
|
"learning_rate": 2.8445680752962694e-06, |
|
"loss": 0.1319, |
|
"step": 99000 |
|
}, |
|
{ |
|
"epoch": 0.9478718134359638, |
|
"grad_norm": 8.118181228637695, |
|
"learning_rate": 2.606409328201806e-06, |
|
"loss": 0.1531, |
|
"step": 99500 |
|
}, |
|
{ |
|
"epoch": 0.9526349883778531, |
|
"grad_norm": 0.004353045951575041, |
|
"learning_rate": 2.3682505811073433e-06, |
|
"loss": 0.1275, |
|
"step": 100000 |
|
}, |
|
{ |
|
"epoch": 0.9573981633197424, |
|
"grad_norm": 2.1794090270996094, |
|
"learning_rate": 2.1300918340128795e-06, |
|
"loss": 0.1428, |
|
"step": 100500 |
|
}, |
|
{ |
|
"epoch": 0.9621613382616316, |
|
"grad_norm": 8.74696159362793, |
|
"learning_rate": 1.8919330869184164e-06, |
|
"loss": 0.1135, |
|
"step": 101000 |
|
}, |
|
{ |
|
"epoch": 0.9669245132035209, |
|
"grad_norm": 31.47783851623535, |
|
"learning_rate": 1.6537743398239533e-06, |
|
"loss": 0.1391, |
|
"step": 101500 |
|
}, |
|
{ |
|
"epoch": 0.9716876881454102, |
|
"grad_norm": 1.8234331607818604, |
|
"learning_rate": 1.4156155927294898e-06, |
|
"loss": 0.12, |
|
"step": 102000 |
|
}, |
|
{ |
|
"epoch": 0.9764508630872994, |
|
"grad_norm": 5.736881732940674, |
|
"learning_rate": 1.1774568456350265e-06, |
|
"loss": 0.1253, |
|
"step": 102500 |
|
}, |
|
{ |
|
"epoch": 0.9812140380291887, |
|
"grad_norm": 0.919281005859375, |
|
"learning_rate": 9.392980985405633e-07, |
|
"loss": 0.1216, |
|
"step": 103000 |
|
}, |
|
{ |
|
"epoch": 0.985977212971078, |
|
"grad_norm": 6.265872001647949, |
|
"learning_rate": 7.011393514460999e-07, |
|
"loss": 0.1245, |
|
"step": 103500 |
|
}, |
|
{ |
|
"epoch": 0.9907403879129673, |
|
"grad_norm": 19.050594329833984, |
|
"learning_rate": 4.629806043516366e-07, |
|
"loss": 0.1298, |
|
"step": 104000 |
|
}, |
|
{ |
|
"epoch": 0.9955035628548565, |
|
"grad_norm": 9.125242233276367, |
|
"learning_rate": 2.2482185725717335e-07, |
|
"loss": 0.1177, |
|
"step": 104500 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"step": 104972, |
|
"total_flos": 1.5105749507715635e+17, |
|
"train_loss": 0.05896880445613715, |
|
"train_runtime": 27901.1518, |
|
"train_samples_per_second": 30.098, |
|
"train_steps_per_second": 3.762 |
|
}, |
|
{ |
|
"epoch": 1.0002667377967458, |
|
"grad_norm": 14.302581787109375, |
|
"learning_rate": 2.2480186776155165e-07, |
|
"loss": 0.0893, |
|
"step": 105000 |
|
}, |
|
{ |
|
"epoch": 1.005029912738635, |
|
"grad_norm": 2.5177512168884277, |
|
"learning_rate": 2.2444491248259286e-07, |
|
"loss": 0.1055, |
|
"step": 105500 |
|
}, |
|
{ |
|
"epoch": 1.0097930876805243, |
|
"grad_norm": 2.1406190395355225, |
|
"learning_rate": 2.2408795720363407e-07, |
|
"loss": 0.1254, |
|
"step": 106000 |
|
}, |
|
{ |
|
"epoch": 1.0145562626224136, |
|
"grad_norm": 1.1113516092300415, |
|
"learning_rate": 2.2373100192467528e-07, |
|
"loss": 0.0951, |
|
"step": 106500 |
|
}, |
|
{ |
|
"epoch": 1.0193194375643029, |
|
"grad_norm": 35.0811653137207, |
|
"learning_rate": 2.233740466457165e-07, |
|
"loss": 0.11, |
|
"step": 107000 |
|
}, |
|
{ |
|
"epoch": 1.0240826125061921, |
|
"grad_norm": 11.133783340454102, |
|
"learning_rate": 2.230170913667577e-07, |
|
"loss": 0.1132, |
|
"step": 107500 |
|
}, |
|
{ |
|
"epoch": 1.0288457874480814, |
|
"grad_norm": 4.060661315917969, |
|
"learning_rate": 2.226601360877989e-07, |
|
"loss": 0.1202, |
|
"step": 108000 |
|
}, |
|
{ |
|
"epoch": 1.0336089623899707, |
|
"grad_norm": 0.13336028158664703, |
|
"learning_rate": 2.2230318080884012e-07, |
|
"loss": 0.1139, |
|
"step": 108500 |
|
}, |
|
{ |
|
"epoch": 1.03837213733186, |
|
"grad_norm": 1.6817339658737183, |
|
"learning_rate": 2.2194622552988133e-07, |
|
"loss": 0.1093, |
|
"step": 109000 |
|
}, |
|
{ |
|
"epoch": 1.0431353122737492, |
|
"grad_norm": 7.226846694946289, |
|
"learning_rate": 2.215892702509225e-07, |
|
"loss": 0.1019, |
|
"step": 109500 |
|
}, |
|
{ |
|
"epoch": 1.0478984872156385, |
|
"grad_norm": 20.337854385375977, |
|
"learning_rate": 2.2123231497196372e-07, |
|
"loss": 0.1159, |
|
"step": 110000 |
|
}, |
|
{ |
|
"epoch": 1.0526616621575278, |
|
"grad_norm": 12.429189682006836, |
|
"learning_rate": 2.2087535969300493e-07, |
|
"loss": 0.1058, |
|
"step": 110500 |
|
}, |
|
{ |
|
"epoch": 1.057424837099417, |
|
"grad_norm": 2.344127655029297, |
|
"learning_rate": 2.2051840441404614e-07, |
|
"loss": 0.1252, |
|
"step": 111000 |
|
}, |
|
{ |
|
"epoch": 1.0621880120413063, |
|
"grad_norm": 3.0213356018066406, |
|
"learning_rate": 2.2016144913508735e-07, |
|
"loss": 0.1091, |
|
"step": 111500 |
|
}, |
|
{ |
|
"epoch": 1.0669511869831956, |
|
"grad_norm": 8.44406509399414, |
|
"learning_rate": 2.198044938561286e-07, |
|
"loss": 0.1084, |
|
"step": 112000 |
|
}, |
|
{ |
|
"epoch": 1.0717143619250848, |
|
"grad_norm": 5.177192211151123, |
|
"learning_rate": 2.194475385771698e-07, |
|
"loss": 0.111, |
|
"step": 112500 |
|
}, |
|
{ |
|
"epoch": 1.0764775368669741, |
|
"grad_norm": 0.7378814816474915, |
|
"learning_rate": 2.19090583298211e-07, |
|
"loss": 0.1123, |
|
"step": 113000 |
|
}, |
|
{ |
|
"epoch": 1.0812407118088634, |
|
"grad_norm": 11.683990478515625, |
|
"learning_rate": 2.1873362801925222e-07, |
|
"loss": 0.1049, |
|
"step": 113500 |
|
}, |
|
{ |
|
"epoch": 1.0860038867507527, |
|
"grad_norm": 1.845595121383667, |
|
"learning_rate": 2.1837667274029343e-07, |
|
"loss": 0.1359, |
|
"step": 114000 |
|
}, |
|
{ |
|
"epoch": 1.090767061692642, |
|
"grad_norm": 18.56471824645996, |
|
"learning_rate": 2.1801971746133464e-07, |
|
"loss": 0.1101, |
|
"step": 114500 |
|
}, |
|
{ |
|
"epoch": 1.0955302366345312, |
|
"grad_norm": 2.1949081420898438, |
|
"learning_rate": 2.1766276218237585e-07, |
|
"loss": 0.1213, |
|
"step": 115000 |
|
}, |
|
{ |
|
"epoch": 1.1002934115764205, |
|
"grad_norm": 8.824433326721191, |
|
"learning_rate": 2.1730580690341706e-07, |
|
"loss": 0.0972, |
|
"step": 115500 |
|
}, |
|
{ |
|
"epoch": 1.1050565865183097, |
|
"grad_norm": 8.95617389678955, |
|
"learning_rate": 2.1694885162445827e-07, |
|
"loss": 0.1088, |
|
"step": 116000 |
|
}, |
|
{ |
|
"epoch": 1.109819761460199, |
|
"grad_norm": 16.236719131469727, |
|
"learning_rate": 2.1659189634549948e-07, |
|
"loss": 0.126, |
|
"step": 116500 |
|
}, |
|
{ |
|
"epoch": 1.1145829364020883, |
|
"grad_norm": 0.8696116805076599, |
|
"learning_rate": 2.1623494106654069e-07, |
|
"loss": 0.111, |
|
"step": 117000 |
|
}, |
|
{ |
|
"epoch": 1.1193461113439775, |
|
"grad_norm": 22.61142349243164, |
|
"learning_rate": 2.158779857875819e-07, |
|
"loss": 0.1128, |
|
"step": 117500 |
|
}, |
|
{ |
|
"epoch": 1.1241092862858668, |
|
"grad_norm": 9.516005516052246, |
|
"learning_rate": 2.155210305086231e-07, |
|
"loss": 0.1262, |
|
"step": 118000 |
|
}, |
|
{ |
|
"epoch": 1.1288724612277559, |
|
"grad_norm": 2.8408925533294678, |
|
"learning_rate": 2.1516407522966432e-07, |
|
"loss": 0.1254, |
|
"step": 118500 |
|
}, |
|
{ |
|
"epoch": 1.1336356361696454, |
|
"grad_norm": 0.08307074010372162, |
|
"learning_rate": 2.148071199507055e-07, |
|
"loss": 0.1128, |
|
"step": 119000 |
|
}, |
|
{ |
|
"epoch": 1.1383988111115344, |
|
"grad_norm": 0.07671812921762466, |
|
"learning_rate": 2.144501646717467e-07, |
|
"loss": 0.1105, |
|
"step": 119500 |
|
}, |
|
{ |
|
"epoch": 1.1431619860534237, |
|
"grad_norm": 23.30893325805664, |
|
"learning_rate": 2.1409320939278792e-07, |
|
"loss": 0.1016, |
|
"step": 120000 |
|
}, |
|
{ |
|
"epoch": 1.147925160995313, |
|
"grad_norm": 2.4784510135650635, |
|
"learning_rate": 2.1373625411382913e-07, |
|
"loss": 0.121, |
|
"step": 120500 |
|
}, |
|
{ |
|
"epoch": 1.1526883359372022, |
|
"grad_norm": 6.876108169555664, |
|
"learning_rate": 2.1337929883487034e-07, |
|
"loss": 0.1191, |
|
"step": 121000 |
|
}, |
|
{ |
|
"epoch": 1.1574515108790915, |
|
"grad_norm": 0.040010467171669006, |
|
"learning_rate": 2.1302234355591155e-07, |
|
"loss": 0.107, |
|
"step": 121500 |
|
}, |
|
{ |
|
"epoch": 1.1622146858209808, |
|
"grad_norm": 0.3573683202266693, |
|
"learning_rate": 2.1266538827695276e-07, |
|
"loss": 0.1226, |
|
"step": 122000 |
|
}, |
|
{ |
|
"epoch": 1.16697786076287, |
|
"grad_norm": 5.653687477111816, |
|
"learning_rate": 2.1230843299799397e-07, |
|
"loss": 0.1171, |
|
"step": 122500 |
|
}, |
|
{ |
|
"epoch": 1.1717410357047593, |
|
"grad_norm": 0.8669099807739258, |
|
"learning_rate": 2.1195147771903518e-07, |
|
"loss": 0.1194, |
|
"step": 123000 |
|
}, |
|
{ |
|
"epoch": 1.1765042106466486, |
|
"grad_norm": 4.056451320648193, |
|
"learning_rate": 2.115945224400764e-07, |
|
"loss": 0.111, |
|
"step": 123500 |
|
}, |
|
{ |
|
"epoch": 1.1812673855885378, |
|
"grad_norm": 20.773231506347656, |
|
"learning_rate": 2.112375671611176e-07, |
|
"loss": 0.1009, |
|
"step": 124000 |
|
}, |
|
{ |
|
"epoch": 1.186030560530427, |
|
"grad_norm": 0.029131252318620682, |
|
"learning_rate": 2.108806118821588e-07, |
|
"loss": 0.1076, |
|
"step": 124500 |
|
}, |
|
{ |
|
"epoch": 1.1907937354723164, |
|
"grad_norm": 0.8366307020187378, |
|
"learning_rate": 2.1052365660320002e-07, |
|
"loss": 0.1112, |
|
"step": 125000 |
|
}, |
|
{ |
|
"epoch": 1.1955569104142056, |
|
"grad_norm": 9.74376106262207, |
|
"learning_rate": 2.1016670132424123e-07, |
|
"loss": 0.1101, |
|
"step": 125500 |
|
}, |
|
{ |
|
"epoch": 1.200320085356095, |
|
"grad_norm": 4.660519123077393, |
|
"learning_rate": 2.0980974604528244e-07, |
|
"loss": 0.1084, |
|
"step": 126000 |
|
}, |
|
{ |
|
"epoch": 1.2050832602979842, |
|
"grad_norm": 14.676050186157227, |
|
"learning_rate": 2.0945279076632365e-07, |
|
"loss": 0.1186, |
|
"step": 126500 |
|
}, |
|
{ |
|
"epoch": 1.2098464352398735, |
|
"grad_norm": 20.924720764160156, |
|
"learning_rate": 2.0909583548736486e-07, |
|
"loss": 0.1106, |
|
"step": 127000 |
|
}, |
|
{ |
|
"epoch": 1.2146096101817627, |
|
"grad_norm": 0.0010393880074843764, |
|
"learning_rate": 2.0873888020840607e-07, |
|
"loss": 0.1225, |
|
"step": 127500 |
|
}, |
|
{ |
|
"epoch": 1.219372785123652, |
|
"grad_norm": 11.950651168823242, |
|
"learning_rate": 2.0838192492944728e-07, |
|
"loss": 0.1122, |
|
"step": 128000 |
|
}, |
|
{ |
|
"epoch": 1.2241359600655413, |
|
"grad_norm": 2.3444035053253174, |
|
"learning_rate": 2.0802496965048849e-07, |
|
"loss": 0.116, |
|
"step": 128500 |
|
}, |
|
{ |
|
"epoch": 1.2288991350074305, |
|
"grad_norm": 18.723051071166992, |
|
"learning_rate": 2.0766801437152967e-07, |
|
"loss": 0.1091, |
|
"step": 129000 |
|
}, |
|
{ |
|
"epoch": 1.2336623099493198, |
|
"grad_norm": 13.435932159423828, |
|
"learning_rate": 2.0731105909257088e-07, |
|
"loss": 0.1068, |
|
"step": 129500 |
|
}, |
|
{ |
|
"epoch": 1.238425484891209, |
|
"grad_norm": 1.0864711999893188, |
|
"learning_rate": 2.069541038136121e-07, |
|
"loss": 0.1266, |
|
"step": 130000 |
|
}, |
|
{ |
|
"epoch": 1.2431886598330983, |
|
"grad_norm": 8.090726852416992, |
|
"learning_rate": 2.065971485346533e-07, |
|
"loss": 0.1256, |
|
"step": 130500 |
|
}, |
|
{ |
|
"epoch": 1.2479518347749876, |
|
"grad_norm": 3.7775135040283203, |
|
"learning_rate": 2.062401932556945e-07, |
|
"loss": 0.1112, |
|
"step": 131000 |
|
}, |
|
{ |
|
"epoch": 1.2527150097168769, |
|
"grad_norm": 0.0022473677527159452, |
|
"learning_rate": 2.0588323797673572e-07, |
|
"loss": 0.12, |
|
"step": 131500 |
|
}, |
|
{ |
|
"epoch": 1.2574781846587662, |
|
"grad_norm": 1.232242465019226, |
|
"learning_rate": 2.0552628269777693e-07, |
|
"loss": 0.1023, |
|
"step": 132000 |
|
}, |
|
{ |
|
"epoch": 1.2622413596006554, |
|
"grad_norm": 0.16098114848136902, |
|
"learning_rate": 2.0516932741881814e-07, |
|
"loss": 0.1346, |
|
"step": 132500 |
|
}, |
|
{ |
|
"epoch": 1.2670045345425447, |
|
"grad_norm": 12.452221870422363, |
|
"learning_rate": 2.0481237213985935e-07, |
|
"loss": 0.1215, |
|
"step": 133000 |
|
}, |
|
{ |
|
"epoch": 1.271767709484434, |
|
"grad_norm": 10.727972984313965, |
|
"learning_rate": 2.0445541686090056e-07, |
|
"loss": 0.1062, |
|
"step": 133500 |
|
}, |
|
{ |
|
"epoch": 1.2765308844263232, |
|
"grad_norm": 0.5605247020721436, |
|
"learning_rate": 2.0409846158194177e-07, |
|
"loss": 0.1082, |
|
"step": 134000 |
|
}, |
|
{ |
|
"epoch": 1.2812940593682125, |
|
"grad_norm": 0.9301961660385132, |
|
"learning_rate": 2.0374150630298298e-07, |
|
"loss": 0.1273, |
|
"step": 134500 |
|
}, |
|
{ |
|
"epoch": 1.2860572343101018, |
|
"grad_norm": 0.5364490747451782, |
|
"learning_rate": 2.033845510240242e-07, |
|
"loss": 0.117, |
|
"step": 135000 |
|
}, |
|
{ |
|
"epoch": 1.290820409251991, |
|
"grad_norm": 11.421608924865723, |
|
"learning_rate": 2.030275957450654e-07, |
|
"loss": 0.119, |
|
"step": 135500 |
|
}, |
|
{ |
|
"epoch": 1.2955835841938803, |
|
"grad_norm": 29.850894927978516, |
|
"learning_rate": 2.026706404661066e-07, |
|
"loss": 0.0931, |
|
"step": 136000 |
|
}, |
|
{ |
|
"epoch": 1.3003467591357696, |
|
"grad_norm": 1.1624155044555664, |
|
"learning_rate": 2.0231368518714782e-07, |
|
"loss": 0.1055, |
|
"step": 136500 |
|
}, |
|
{ |
|
"epoch": 1.3051099340776589, |
|
"grad_norm": 8.889257431030273, |
|
"learning_rate": 2.0195672990818903e-07, |
|
"loss": 0.1174, |
|
"step": 137000 |
|
}, |
|
{ |
|
"epoch": 1.3098731090195481, |
|
"grad_norm": 0.10517348349094391, |
|
"learning_rate": 2.0159977462923024e-07, |
|
"loss": 0.0913, |
|
"step": 137500 |
|
}, |
|
{ |
|
"epoch": 1.3146362839614374, |
|
"grad_norm": 3.1619289075024426e-05, |
|
"learning_rate": 2.0124281935027145e-07, |
|
"loss": 0.1053, |
|
"step": 138000 |
|
}, |
|
{ |
|
"epoch": 1.3193994589033267, |
|
"grad_norm": 8.190518379211426, |
|
"learning_rate": 2.0088586407131266e-07, |
|
"loss": 0.1017, |
|
"step": 138500 |
|
}, |
|
{ |
|
"epoch": 1.324162633845216, |
|
"grad_norm": 8.892984390258789, |
|
"learning_rate": 2.0052890879235387e-07, |
|
"loss": 0.1225, |
|
"step": 139000 |
|
}, |
|
{ |
|
"epoch": 1.3289258087871052, |
|
"grad_norm": 0.019881388172507286, |
|
"learning_rate": 2.0017195351339505e-07, |
|
"loss": 0.1109, |
|
"step": 139500 |
|
}, |
|
{ |
|
"epoch": 1.3336889837289945, |
|
"grad_norm": 0.39558929204940796, |
|
"learning_rate": 1.9981499823443626e-07, |
|
"loss": 0.1113, |
|
"step": 140000 |
|
}, |
|
{ |
|
"epoch": 1.3384521586708837, |
|
"grad_norm": 0.0027440183330327272, |
|
"learning_rate": 1.9945804295547747e-07, |
|
"loss": 0.1111, |
|
"step": 140500 |
|
}, |
|
{ |
|
"epoch": 1.343215333612773, |
|
"grad_norm": 0.051865462213754654, |
|
"learning_rate": 1.9910108767651868e-07, |
|
"loss": 0.1001, |
|
"step": 141000 |
|
}, |
|
{ |
|
"epoch": 1.3479785085546623, |
|
"grad_norm": 17.542285919189453, |
|
"learning_rate": 1.987441323975599e-07, |
|
"loss": 0.1093, |
|
"step": 141500 |
|
}, |
|
{ |
|
"epoch": 1.3527416834965513, |
|
"grad_norm": 1.5649853944778442, |
|
"learning_rate": 1.983871771186011e-07, |
|
"loss": 0.1108, |
|
"step": 142000 |
|
}, |
|
{ |
|
"epoch": 1.3575048584384408, |
|
"grad_norm": 3.2729179859161377, |
|
"learning_rate": 1.980302218396423e-07, |
|
"loss": 0.1094, |
|
"step": 142500 |
|
}, |
|
{ |
|
"epoch": 1.3622680333803299, |
|
"grad_norm": 19.61039161682129, |
|
"learning_rate": 1.9767326656068352e-07, |
|
"loss": 0.1232, |
|
"step": 143000 |
|
}, |
|
{ |
|
"epoch": 1.3670312083222194, |
|
"grad_norm": 0.48026251792907715, |
|
"learning_rate": 1.9731631128172473e-07, |
|
"loss": 0.1179, |
|
"step": 143500 |
|
}, |
|
{ |
|
"epoch": 1.3717943832641084, |
|
"grad_norm": 0.011149807833135128, |
|
"learning_rate": 1.9695935600276594e-07, |
|
"loss": 0.1205, |
|
"step": 144000 |
|
}, |
|
{ |
|
"epoch": 1.376557558205998, |
|
"grad_norm": 0.02112853154540062, |
|
"learning_rate": 1.9660240072380715e-07, |
|
"loss": 0.1124, |
|
"step": 144500 |
|
}, |
|
{ |
|
"epoch": 1.381320733147887, |
|
"grad_norm": 34.17774200439453, |
|
"learning_rate": 1.9624544544484836e-07, |
|
"loss": 0.096, |
|
"step": 145000 |
|
}, |
|
{ |
|
"epoch": 1.3860839080897764, |
|
"grad_norm": 0.002524700714275241, |
|
"learning_rate": 1.9588849016588957e-07, |
|
"loss": 0.1279, |
|
"step": 145500 |
|
}, |
|
{ |
|
"epoch": 1.3908470830316655, |
|
"grad_norm": 0.4319082498550415, |
|
"learning_rate": 1.9553153488693078e-07, |
|
"loss": 0.1096, |
|
"step": 146000 |
|
}, |
|
{ |
|
"epoch": 1.395610257973555, |
|
"grad_norm": 17.499792098999023, |
|
"learning_rate": 1.95174579607972e-07, |
|
"loss": 0.1137, |
|
"step": 146500 |
|
}, |
|
{ |
|
"epoch": 1.400373432915444, |
|
"grad_norm": 16.145008087158203, |
|
"learning_rate": 1.948176243290132e-07, |
|
"loss": 0.1051, |
|
"step": 147000 |
|
}, |
|
{ |
|
"epoch": 1.4051366078573335, |
|
"grad_norm": 0.8132957816123962, |
|
"learning_rate": 1.944606690500544e-07, |
|
"loss": 0.1063, |
|
"step": 147500 |
|
}, |
|
{ |
|
"epoch": 1.4098997827992226, |
|
"grad_norm": 0.05169007182121277, |
|
"learning_rate": 1.9410371377109562e-07, |
|
"loss": 0.1047, |
|
"step": 148000 |
|
}, |
|
{ |
|
"epoch": 1.4146629577411118, |
|
"grad_norm": 3.716925859451294, |
|
"learning_rate": 1.9374675849213683e-07, |
|
"loss": 0.1081, |
|
"step": 148500 |
|
}, |
|
{ |
|
"epoch": 1.419426132683001, |
|
"grad_norm": 16.134973526000977, |
|
"learning_rate": 1.9338980321317804e-07, |
|
"loss": 0.127, |
|
"step": 149000 |
|
}, |
|
{ |
|
"epoch": 1.4241893076248904, |
|
"grad_norm": 0.8836007118225098, |
|
"learning_rate": 1.9303284793421922e-07, |
|
"loss": 0.1116, |
|
"step": 149500 |
|
}, |
|
{ |
|
"epoch": 1.4289524825667796, |
|
"grad_norm": 4.612446308135986, |
|
"learning_rate": 1.9267589265526043e-07, |
|
"loss": 0.1263, |
|
"step": 150000 |
|
}, |
|
{ |
|
"epoch": 1.433715657508669, |
|
"grad_norm": 0.24257691204547882, |
|
"learning_rate": 1.9231893737630164e-07, |
|
"loss": 0.1067, |
|
"step": 150500 |
|
}, |
|
{ |
|
"epoch": 1.4384788324505582, |
|
"grad_norm": 18.72231101989746, |
|
"learning_rate": 1.9196198209734285e-07, |
|
"loss": 0.1174, |
|
"step": 151000 |
|
}, |
|
{ |
|
"epoch": 1.4432420073924475, |
|
"grad_norm": 0.09871924668550491, |
|
"learning_rate": 1.9160502681838409e-07, |
|
"loss": 0.1096, |
|
"step": 151500 |
|
}, |
|
{ |
|
"epoch": 1.4480051823343367, |
|
"grad_norm": 1.2534486055374146, |
|
"learning_rate": 1.912480715394253e-07, |
|
"loss": 0.1155, |
|
"step": 152000 |
|
}, |
|
{ |
|
"epoch": 1.452768357276226, |
|
"grad_norm": 14.395082473754883, |
|
"learning_rate": 1.908911162604665e-07, |
|
"loss": 0.1047, |
|
"step": 152500 |
|
}, |
|
{ |
|
"epoch": 1.4575315322181153, |
|
"grad_norm": 2.9728424549102783, |
|
"learning_rate": 1.9053416098150771e-07, |
|
"loss": 0.0942, |
|
"step": 153000 |
|
}, |
|
{ |
|
"epoch": 1.4622947071600045, |
|
"grad_norm": 10.138772964477539, |
|
"learning_rate": 1.9017720570254892e-07, |
|
"loss": 0.1219, |
|
"step": 153500 |
|
}, |
|
{ |
|
"epoch": 1.4670578821018938, |
|
"grad_norm": 16.113460540771484, |
|
"learning_rate": 1.8982025042359013e-07, |
|
"loss": 0.1094, |
|
"step": 154000 |
|
}, |
|
{ |
|
"epoch": 1.471821057043783, |
|
"grad_norm": 1.4605754613876343, |
|
"learning_rate": 1.8946329514463134e-07, |
|
"loss": 0.1227, |
|
"step": 154500 |
|
}, |
|
{ |
|
"epoch": 1.4765842319856723, |
|
"grad_norm": 19.34916877746582, |
|
"learning_rate": 1.8910633986567255e-07, |
|
"loss": 0.1032, |
|
"step": 155000 |
|
}, |
|
{ |
|
"epoch": 1.4813474069275616, |
|
"grad_norm": 5.430410861968994, |
|
"learning_rate": 1.8874938458671376e-07, |
|
"loss": 0.1107, |
|
"step": 155500 |
|
}, |
|
{ |
|
"epoch": 1.4861105818694509, |
|
"grad_norm": 0.01834196224808693, |
|
"learning_rate": 1.8839242930775497e-07, |
|
"loss": 0.1243, |
|
"step": 156000 |
|
}, |
|
{ |
|
"epoch": 1.4908737568113402, |
|
"grad_norm": 22.170907974243164, |
|
"learning_rate": 1.8803547402879618e-07, |
|
"loss": 0.1019, |
|
"step": 156500 |
|
}, |
|
{ |
|
"epoch": 1.4956369317532294, |
|
"grad_norm": 0.28510582447052, |
|
"learning_rate": 1.876785187498374e-07, |
|
"loss": 0.1354, |
|
"step": 157000 |
|
}, |
|
{ |
|
"epoch": 1.5004001066951187, |
|
"grad_norm": 8.662985801696777, |
|
"learning_rate": 1.873215634708786e-07, |
|
"loss": 0.1043, |
|
"step": 157500 |
|
}, |
|
{ |
|
"epoch": 1.505163281637008, |
|
"grad_norm": 5.042104721069336, |
|
"learning_rate": 1.869646081919198e-07, |
|
"loss": 0.1102, |
|
"step": 158000 |
|
}, |
|
{ |
|
"epoch": 1.5099264565788972, |
|
"grad_norm": 0.07364173978567123, |
|
"learning_rate": 1.8660765291296102e-07, |
|
"loss": 0.129, |
|
"step": 158500 |
|
}, |
|
{ |
|
"epoch": 1.5146896315207865, |
|
"grad_norm": 3.405261278152466, |
|
"learning_rate": 1.862506976340022e-07, |
|
"loss": 0.111, |
|
"step": 159000 |
|
}, |
|
{ |
|
"epoch": 1.5194528064626758, |
|
"grad_norm": 3.3657171726226807, |
|
"learning_rate": 1.8589374235504342e-07, |
|
"loss": 0.1089, |
|
"step": 159500 |
|
}, |
|
{ |
|
"epoch": 1.524215981404565, |
|
"grad_norm": 0.028935715556144714, |
|
"learning_rate": 1.8553678707608463e-07, |
|
"loss": 0.1038, |
|
"step": 160000 |
|
}, |
|
{ |
|
"epoch": 1.5289791563464543, |
|
"grad_norm": 1.048443078994751, |
|
"learning_rate": 1.8517983179712584e-07, |
|
"loss": 0.0938, |
|
"step": 160500 |
|
}, |
|
{ |
|
"epoch": 1.5337423312883436, |
|
"grad_norm": 15.945822715759277, |
|
"learning_rate": 1.8482287651816705e-07, |
|
"loss": 0.1138, |
|
"step": 161000 |
|
}, |
|
{ |
|
"epoch": 1.5385055062302329, |
|
"grad_norm": 0.36497658491134644, |
|
"learning_rate": 1.8446592123920826e-07, |
|
"loss": 0.107, |
|
"step": 161500 |
|
}, |
|
{ |
|
"epoch": 1.5432686811721221, |
|
"grad_norm": 0.7298380732536316, |
|
"learning_rate": 1.8410896596024947e-07, |
|
"loss": 0.1012, |
|
"step": 162000 |
|
}, |
|
{ |
|
"epoch": 1.5480318561140114, |
|
"grad_norm": 0.041379645466804504, |
|
"learning_rate": 1.8375201068129068e-07, |
|
"loss": 0.1171, |
|
"step": 162500 |
|
}, |
|
{ |
|
"epoch": 1.5527950310559007, |
|
"grad_norm": 1.361636996269226, |
|
"learning_rate": 1.8339505540233188e-07, |
|
"loss": 0.0978, |
|
"step": 163000 |
|
}, |
|
{ |
|
"epoch": 1.55755820599779, |
|
"grad_norm": 0.0006419995916076005, |
|
"learning_rate": 1.830381001233731e-07, |
|
"loss": 0.1152, |
|
"step": 163500 |
|
}, |
|
{ |
|
"epoch": 1.5623213809396792, |
|
"grad_norm": 29.9415340423584, |
|
"learning_rate": 1.826811448444143e-07, |
|
"loss": 0.1124, |
|
"step": 164000 |
|
}, |
|
{ |
|
"epoch": 1.5670845558815683, |
|
"grad_norm": 14.25601577758789, |
|
"learning_rate": 1.8232418956545551e-07, |
|
"loss": 0.1096, |
|
"step": 164500 |
|
}, |
|
{ |
|
"epoch": 1.5718477308234577, |
|
"grad_norm": 10.357718467712402, |
|
"learning_rate": 1.8196723428649672e-07, |
|
"loss": 0.1068, |
|
"step": 165000 |
|
}, |
|
{ |
|
"epoch": 1.5766109057653468, |
|
"grad_norm": 2.6006975173950195, |
|
"learning_rate": 1.8161027900753793e-07, |
|
"loss": 0.112, |
|
"step": 165500 |
|
}, |
|
{ |
|
"epoch": 1.5813740807072363, |
|
"grad_norm": 0.013517569750547409, |
|
"learning_rate": 1.8125332372857914e-07, |
|
"loss": 0.1046, |
|
"step": 166000 |
|
}, |
|
{ |
|
"epoch": 1.5861372556491253, |
|
"grad_norm": 8.826921463012695, |
|
"learning_rate": 1.8089636844962035e-07, |
|
"loss": 0.1148, |
|
"step": 166500 |
|
}, |
|
{ |
|
"epoch": 1.5909004305910148, |
|
"grad_norm": 30.500505447387695, |
|
"learning_rate": 1.8053941317066156e-07, |
|
"loss": 0.1022, |
|
"step": 167000 |
|
}, |
|
{ |
|
"epoch": 1.5956636055329039, |
|
"grad_norm": 0.0015779563691467047, |
|
"learning_rate": 1.8018245789170277e-07, |
|
"loss": 0.0967, |
|
"step": 167500 |
|
}, |
|
{ |
|
"epoch": 1.6004267804747934, |
|
"grad_norm": 5.478053092956543, |
|
"learning_rate": 1.7982550261274398e-07, |
|
"loss": 0.1134, |
|
"step": 168000 |
|
}, |
|
{ |
|
"epoch": 1.6051899554166824, |
|
"grad_norm": 0.11672214418649673, |
|
"learning_rate": 1.794685473337852e-07, |
|
"loss": 0.1, |
|
"step": 168500 |
|
}, |
|
{ |
|
"epoch": 1.609953130358572, |
|
"grad_norm": 1.3032927513122559, |
|
"learning_rate": 1.791115920548264e-07, |
|
"loss": 0.1165, |
|
"step": 169000 |
|
}, |
|
{ |
|
"epoch": 1.614716305300461, |
|
"grad_norm": 0.20952224731445312, |
|
"learning_rate": 1.7875463677586759e-07, |
|
"loss": 0.112, |
|
"step": 169500 |
|
}, |
|
{ |
|
"epoch": 1.6194794802423504, |
|
"grad_norm": 5.338772296905518, |
|
"learning_rate": 1.783976814969088e-07, |
|
"loss": 0.1151, |
|
"step": 170000 |
|
}, |
|
{ |
|
"epoch": 1.6242426551842395, |
|
"grad_norm": 14.308218955993652, |
|
"learning_rate": 1.7804072621795e-07, |
|
"loss": 0.1188, |
|
"step": 170500 |
|
}, |
|
{ |
|
"epoch": 1.629005830126129, |
|
"grad_norm": 2.5902771949768066, |
|
"learning_rate": 1.7768377093899122e-07, |
|
"loss": 0.1095, |
|
"step": 171000 |
|
}, |
|
{ |
|
"epoch": 1.633769005068018, |
|
"grad_norm": 1.145963191986084, |
|
"learning_rate": 1.7732681566003243e-07, |
|
"loss": 0.1081, |
|
"step": 171500 |
|
}, |
|
{ |
|
"epoch": 1.6385321800099075, |
|
"grad_norm": 0.019109368324279785, |
|
"learning_rate": 1.7696986038107364e-07, |
|
"loss": 0.1037, |
|
"step": 172000 |
|
}, |
|
{ |
|
"epoch": 1.6432953549517966, |
|
"grad_norm": 0.381287544965744, |
|
"learning_rate": 1.7661290510211485e-07, |
|
"loss": 0.1024, |
|
"step": 172500 |
|
}, |
|
{ |
|
"epoch": 1.648058529893686, |
|
"grad_norm": 0.28823208808898926, |
|
"learning_rate": 1.7625594982315606e-07, |
|
"loss": 0.1112, |
|
"step": 173000 |
|
}, |
|
{ |
|
"epoch": 1.6528217048355751, |
|
"grad_norm": 7.33282995223999, |
|
"learning_rate": 1.7589899454419726e-07, |
|
"loss": 0.1037, |
|
"step": 173500 |
|
}, |
|
{ |
|
"epoch": 1.6575848797774646, |
|
"grad_norm": 18.685562133789062, |
|
"learning_rate": 1.7554203926523847e-07, |
|
"loss": 0.1096, |
|
"step": 174000 |
|
}, |
|
{ |
|
"epoch": 1.6623480547193537, |
|
"grad_norm": 5.129574298858643, |
|
"learning_rate": 1.7518508398627968e-07, |
|
"loss": 0.1028, |
|
"step": 174500 |
|
}, |
|
{ |
|
"epoch": 1.6671112296612431, |
|
"grad_norm": 1.4392452239990234, |
|
"learning_rate": 1.748281287073209e-07, |
|
"loss": 0.1106, |
|
"step": 175000 |
|
}, |
|
{ |
|
"epoch": 1.6718744046031322, |
|
"grad_norm": 3.51692795753479, |
|
"learning_rate": 1.744711734283621e-07, |
|
"loss": 0.1142, |
|
"step": 175500 |
|
}, |
|
{ |
|
"epoch": 1.6766375795450217, |
|
"grad_norm": 0.026211030781269073, |
|
"learning_rate": 1.7411421814940331e-07, |
|
"loss": 0.1083, |
|
"step": 176000 |
|
}, |
|
{ |
|
"epoch": 1.6814007544869107, |
|
"grad_norm": 4.911788463592529, |
|
"learning_rate": 1.7375726287044452e-07, |
|
"loss": 0.1053, |
|
"step": 176500 |
|
}, |
|
{ |
|
"epoch": 1.6861639294288002, |
|
"grad_norm": 5.630278587341309, |
|
"learning_rate": 1.7340030759148573e-07, |
|
"loss": 0.1087, |
|
"step": 177000 |
|
}, |
|
{ |
|
"epoch": 1.6909271043706893, |
|
"grad_norm": 1.8357000350952148, |
|
"learning_rate": 1.7304335231252694e-07, |
|
"loss": 0.104, |
|
"step": 177500 |
|
}, |
|
{ |
|
"epoch": 1.6956902793125785, |
|
"grad_norm": 2.734142780303955, |
|
"learning_rate": 1.7268639703356815e-07, |
|
"loss": 0.1339, |
|
"step": 178000 |
|
}, |
|
{ |
|
"epoch": 1.7004534542544678, |
|
"grad_norm": 27.017423629760742, |
|
"learning_rate": 1.7232944175460936e-07, |
|
"loss": 0.1085, |
|
"step": 178500 |
|
}, |
|
{ |
|
"epoch": 1.705216629196357, |
|
"grad_norm": 0.3949691653251648, |
|
"learning_rate": 1.7197248647565057e-07, |
|
"loss": 0.1074, |
|
"step": 179000 |
|
}, |
|
{ |
|
"epoch": 1.7099798041382464, |
|
"grad_norm": 14.20468807220459, |
|
"learning_rate": 1.7161553119669176e-07, |
|
"loss": 0.099, |
|
"step": 179500 |
|
}, |
|
{ |
|
"epoch": 1.7147429790801356, |
|
"grad_norm": 0.0762014091014862, |
|
"learning_rate": 1.7125857591773297e-07, |
|
"loss": 0.1108, |
|
"step": 180000 |
|
}, |
|
{ |
|
"epoch": 1.719506154022025, |
|
"grad_norm": 19.510847091674805, |
|
"learning_rate": 1.7090162063877418e-07, |
|
"loss": 0.1225, |
|
"step": 180500 |
|
}, |
|
{ |
|
"epoch": 1.7242693289639142, |
|
"grad_norm": 0.08469968289136887, |
|
"learning_rate": 1.7054466535981539e-07, |
|
"loss": 0.1015, |
|
"step": 181000 |
|
}, |
|
{ |
|
"epoch": 1.7290325039058034, |
|
"grad_norm": 30.91697120666504, |
|
"learning_rate": 1.701877100808566e-07, |
|
"loss": 0.108, |
|
"step": 181500 |
|
}, |
|
{ |
|
"epoch": 1.7337956788476927, |
|
"grad_norm": 5.243447780609131, |
|
"learning_rate": 1.698307548018978e-07, |
|
"loss": 0.1279, |
|
"step": 182000 |
|
}, |
|
{ |
|
"epoch": 1.738558853789582, |
|
"grad_norm": 19.558759689331055, |
|
"learning_rate": 1.6947379952293902e-07, |
|
"loss": 0.1043, |
|
"step": 182500 |
|
}, |
|
{ |
|
"epoch": 1.7433220287314712, |
|
"grad_norm": 18.18353271484375, |
|
"learning_rate": 1.6911684424398023e-07, |
|
"loss": 0.1005, |
|
"step": 183000 |
|
}, |
|
{ |
|
"epoch": 1.7480852036733605, |
|
"grad_norm": 0.06688349694013596, |
|
"learning_rate": 1.6875988896502144e-07, |
|
"loss": 0.1021, |
|
"step": 183500 |
|
}, |
|
{ |
|
"epoch": 1.7528483786152498, |
|
"grad_norm": 3.162032127380371, |
|
"learning_rate": 1.6840293368606264e-07, |
|
"loss": 0.1244, |
|
"step": 184000 |
|
}, |
|
{ |
|
"epoch": 1.757611553557139, |
|
"grad_norm": 0.6052823662757874, |
|
"learning_rate": 1.6804597840710385e-07, |
|
"loss": 0.1145, |
|
"step": 184500 |
|
}, |
|
{ |
|
"epoch": 1.7623747284990283, |
|
"grad_norm": 12.853279113769531, |
|
"learning_rate": 1.6768902312814506e-07, |
|
"loss": 0.1004, |
|
"step": 185000 |
|
}, |
|
{ |
|
"epoch": 1.7671379034409176, |
|
"grad_norm": 0.010185870341956615, |
|
"learning_rate": 1.6733206784918627e-07, |
|
"loss": 0.1048, |
|
"step": 185500 |
|
}, |
|
{ |
|
"epoch": 1.7719010783828069, |
|
"grad_norm": 7.452764839399606e-05, |
|
"learning_rate": 1.6697511257022748e-07, |
|
"loss": 0.1, |
|
"step": 186000 |
|
}, |
|
{ |
|
"epoch": 1.7766642533246961, |
|
"grad_norm": 17.169069290161133, |
|
"learning_rate": 1.666181572912687e-07, |
|
"loss": 0.1176, |
|
"step": 186500 |
|
}, |
|
{ |
|
"epoch": 1.7814274282665854, |
|
"grad_norm": 12.43346118927002, |
|
"learning_rate": 1.662612020123099e-07, |
|
"loss": 0.111, |
|
"step": 187000 |
|
}, |
|
{ |
|
"epoch": 1.7861906032084747, |
|
"grad_norm": 13.447138786315918, |
|
"learning_rate": 1.6590424673335111e-07, |
|
"loss": 0.1198, |
|
"step": 187500 |
|
}, |
|
{ |
|
"epoch": 1.790953778150364, |
|
"grad_norm": 2.679323673248291, |
|
"learning_rate": 1.6554729145439232e-07, |
|
"loss": 0.1083, |
|
"step": 188000 |
|
}, |
|
{ |
|
"epoch": 1.7957169530922532, |
|
"grad_norm": 12.469381332397461, |
|
"learning_rate": 1.6519033617543353e-07, |
|
"loss": 0.0948, |
|
"step": 188500 |
|
}, |
|
{ |
|
"epoch": 1.8004801280341425, |
|
"grad_norm": 5.650728225708008, |
|
"learning_rate": 1.6483338089647474e-07, |
|
"loss": 0.1329, |
|
"step": 189000 |
|
}, |
|
{ |
|
"epoch": 1.8052433029760317, |
|
"grad_norm": 0.8955023884773254, |
|
"learning_rate": 1.6447642561751593e-07, |
|
"loss": 0.0933, |
|
"step": 189500 |
|
}, |
|
{ |
|
"epoch": 1.810006477917921, |
|
"grad_norm": 26.87681770324707, |
|
"learning_rate": 1.6411947033855714e-07, |
|
"loss": 0.1015, |
|
"step": 190000 |
|
}, |
|
{ |
|
"epoch": 1.8147696528598103, |
|
"grad_norm": 4.95468282699585, |
|
"learning_rate": 1.6376251505959835e-07, |
|
"loss": 0.1067, |
|
"step": 190500 |
|
}, |
|
{ |
|
"epoch": 1.8195328278016993, |
|
"grad_norm": 0.931701123714447, |
|
"learning_rate": 1.6340555978063958e-07, |
|
"loss": 0.1282, |
|
"step": 191000 |
|
}, |
|
{ |
|
"epoch": 1.8242960027435888, |
|
"grad_norm": 7.3879780769348145, |
|
"learning_rate": 1.630486045016808e-07, |
|
"loss": 0.0935, |
|
"step": 191500 |
|
}, |
|
{ |
|
"epoch": 1.8290591776854779, |
|
"grad_norm": 0.022659489884972572, |
|
"learning_rate": 1.62691649222722e-07, |
|
"loss": 0.1086, |
|
"step": 192000 |
|
}, |
|
{ |
|
"epoch": 1.8338223526273674, |
|
"grad_norm": 0.1140231192111969, |
|
"learning_rate": 1.623346939437632e-07, |
|
"loss": 0.1005, |
|
"step": 192500 |
|
}, |
|
{ |
|
"epoch": 1.8385855275692564, |
|
"grad_norm": 0.6795700192451477, |
|
"learning_rate": 1.6197773866480442e-07, |
|
"loss": 0.11, |
|
"step": 193000 |
|
}, |
|
{ |
|
"epoch": 1.843348702511146, |
|
"grad_norm": 9.37308406829834, |
|
"learning_rate": 1.6162078338584563e-07, |
|
"loss": 0.1224, |
|
"step": 193500 |
|
}, |
|
{ |
|
"epoch": 1.848111877453035, |
|
"grad_norm": 33.167701721191406, |
|
"learning_rate": 1.6126382810688684e-07, |
|
"loss": 0.1122, |
|
"step": 194000 |
|
}, |
|
{ |
|
"epoch": 1.8528750523949244, |
|
"grad_norm": 0.8446443676948547, |
|
"learning_rate": 1.6090687282792805e-07, |
|
"loss": 0.1098, |
|
"step": 194500 |
|
}, |
|
{ |
|
"epoch": 1.8576382273368135, |
|
"grad_norm": 30.448610305786133, |
|
"learning_rate": 1.6054991754896926e-07, |
|
"loss": 0.1041, |
|
"step": 195000 |
|
}, |
|
{ |
|
"epoch": 1.862401402278703, |
|
"grad_norm": 21.807527542114258, |
|
"learning_rate": 1.6019296227001047e-07, |
|
"loss": 0.097, |
|
"step": 195500 |
|
}, |
|
{ |
|
"epoch": 1.867164577220592, |
|
"grad_norm": 0.030451184138655663, |
|
"learning_rate": 1.5983600699105168e-07, |
|
"loss": 0.1073, |
|
"step": 196000 |
|
}, |
|
{ |
|
"epoch": 1.8719277521624815, |
|
"grad_norm": 0.03133384510874748, |
|
"learning_rate": 1.594790517120929e-07, |
|
"loss": 0.1123, |
|
"step": 196500 |
|
}, |
|
{ |
|
"epoch": 1.8766909271043706, |
|
"grad_norm": 2.491389513015747, |
|
"learning_rate": 1.591220964331341e-07, |
|
"loss": 0.0996, |
|
"step": 197000 |
|
}, |
|
{ |
|
"epoch": 1.88145410204626, |
|
"grad_norm": 29.67350959777832, |
|
"learning_rate": 1.587651411541753e-07, |
|
"loss": 0.0885, |
|
"step": 197500 |
|
}, |
|
{ |
|
"epoch": 1.8862172769881491, |
|
"grad_norm": 0.28274720907211304, |
|
"learning_rate": 1.5840818587521652e-07, |
|
"loss": 0.1085, |
|
"step": 198000 |
|
}, |
|
{ |
|
"epoch": 1.8909804519300386, |
|
"grad_norm": 0.04003256559371948, |
|
"learning_rate": 1.5805123059625773e-07, |
|
"loss": 0.1044, |
|
"step": 198500 |
|
}, |
|
{ |
|
"epoch": 1.8957436268719277, |
|
"grad_norm": 44.06182861328125, |
|
"learning_rate": 1.5769427531729894e-07, |
|
"loss": 0.1161, |
|
"step": 199000 |
|
}, |
|
{ |
|
"epoch": 1.9005068018138171, |
|
"grad_norm": 3.635102621046826e-05, |
|
"learning_rate": 1.5733732003834012e-07, |
|
"loss": 0.112, |
|
"step": 199500 |
|
}, |
|
{ |
|
"epoch": 1.9052699767557062, |
|
"grad_norm": 0.03733281418681145, |
|
"learning_rate": 1.5698036475938133e-07, |
|
"loss": 0.1241, |
|
"step": 200000 |
|
}, |
|
{ |
|
"epoch": 1.9100331516975957, |
|
"grad_norm": 0.009642825461924076, |
|
"learning_rate": 1.5662340948042254e-07, |
|
"loss": 0.1116, |
|
"step": 200500 |
|
}, |
|
{ |
|
"epoch": 1.9147963266394847, |
|
"grad_norm": 0.04384785518050194, |
|
"learning_rate": 1.5626645420146375e-07, |
|
"loss": 0.0975, |
|
"step": 201000 |
|
}, |
|
{ |
|
"epoch": 1.9195595015813742, |
|
"grad_norm": 28.704349517822266, |
|
"learning_rate": 1.5590949892250496e-07, |
|
"loss": 0.1376, |
|
"step": 201500 |
|
}, |
|
{ |
|
"epoch": 1.9243226765232633, |
|
"grad_norm": 0.0023307004012167454, |
|
"learning_rate": 1.5555254364354617e-07, |
|
"loss": 0.1216, |
|
"step": 202000 |
|
}, |
|
{ |
|
"epoch": 1.9290858514651528, |
|
"grad_norm": 3.073564291000366, |
|
"learning_rate": 1.5519558836458738e-07, |
|
"loss": 0.1139, |
|
"step": 202500 |
|
}, |
|
{ |
|
"epoch": 1.9338490264070418, |
|
"grad_norm": 0.6808690428733826, |
|
"learning_rate": 1.548386330856286e-07, |
|
"loss": 0.0976, |
|
"step": 203000 |
|
}, |
|
{ |
|
"epoch": 1.9386122013489313, |
|
"grad_norm": 5.560125350952148, |
|
"learning_rate": 1.544816778066698e-07, |
|
"loss": 0.1022, |
|
"step": 203500 |
|
}, |
|
{ |
|
"epoch": 1.9433753762908204, |
|
"grad_norm": 0.019307024776935577, |
|
"learning_rate": 1.54124722527711e-07, |
|
"loss": 0.1042, |
|
"step": 204000 |
|
}, |
|
{ |
|
"epoch": 1.9481385512327096, |
|
"grad_norm": 0.23428542912006378, |
|
"learning_rate": 1.5376776724875222e-07, |
|
"loss": 0.1173, |
|
"step": 204500 |
|
}, |
|
{ |
|
"epoch": 1.952901726174599, |
|
"grad_norm": 5.58413553237915, |
|
"learning_rate": 1.5341081196979343e-07, |
|
"loss": 0.1216, |
|
"step": 205000 |
|
}, |
|
{ |
|
"epoch": 1.9576649011164882, |
|
"grad_norm": 2.8649184703826904, |
|
"learning_rate": 1.5305385669083464e-07, |
|
"loss": 0.1034, |
|
"step": 205500 |
|
}, |
|
{ |
|
"epoch": 1.9624280760583774, |
|
"grad_norm": 0.037286777049303055, |
|
"learning_rate": 1.5269690141187585e-07, |
|
"loss": 0.1011, |
|
"step": 206000 |
|
}, |
|
{ |
|
"epoch": 1.9671912510002667, |
|
"grad_norm": 0.007163808681070805, |
|
"learning_rate": 1.5233994613291706e-07, |
|
"loss": 0.1087, |
|
"step": 206500 |
|
}, |
|
{ |
|
"epoch": 1.971954425942156, |
|
"grad_norm": 24.465059280395508, |
|
"learning_rate": 1.5198299085395827e-07, |
|
"loss": 0.1122, |
|
"step": 207000 |
|
}, |
|
{ |
|
"epoch": 1.9767176008840452, |
|
"grad_norm": 6.201001167297363, |
|
"learning_rate": 1.5162603557499948e-07, |
|
"loss": 0.0882, |
|
"step": 207500 |
|
}, |
|
{ |
|
"epoch": 1.9814807758259345, |
|
"grad_norm": 0.19351300597190857, |
|
"learning_rate": 1.512690802960407e-07, |
|
"loss": 0.1109, |
|
"step": 208000 |
|
}, |
|
{ |
|
"epoch": 1.9862439507678238, |
|
"grad_norm": 0.0022748825140297413, |
|
"learning_rate": 1.509121250170819e-07, |
|
"loss": 0.1045, |
|
"step": 208500 |
|
}, |
|
{ |
|
"epoch": 1.991007125709713, |
|
"grad_norm": 0.00041236350079998374, |
|
"learning_rate": 1.505551697381231e-07, |
|
"loss": 0.1095, |
|
"step": 209000 |
|
}, |
|
{ |
|
"epoch": 1.9957703006516023, |
|
"grad_norm": 11.131386756896973, |
|
"learning_rate": 1.501982144591643e-07, |
|
"loss": 0.1293, |
|
"step": 209500 |
|
}, |
|
{ |
|
"epoch": 2.0005334755934916, |
|
"grad_norm": 0.002583843655884266, |
|
"learning_rate": 1.498412591802055e-07, |
|
"loss": 0.1142, |
|
"step": 210000 |
|
}, |
|
{ |
|
"epoch": 2.0052966505353806, |
|
"grad_norm": 14.562299728393555, |
|
"learning_rate": 1.494843039012467e-07, |
|
"loss": 0.1048, |
|
"step": 210500 |
|
}, |
|
{ |
|
"epoch": 2.01005982547727, |
|
"grad_norm": 4.4846510887146, |
|
"learning_rate": 1.4912734862228792e-07, |
|
"loss": 0.1019, |
|
"step": 211000 |
|
}, |
|
{ |
|
"epoch": 2.014823000419159, |
|
"grad_norm": 0.0017532992642372847, |
|
"learning_rate": 1.4877039334332913e-07, |
|
"loss": 0.1092, |
|
"step": 211500 |
|
}, |
|
{ |
|
"epoch": 2.0195861753610487, |
|
"grad_norm": 0.005973345600068569, |
|
"learning_rate": 1.4841343806437034e-07, |
|
"loss": 0.099, |
|
"step": 212000 |
|
}, |
|
{ |
|
"epoch": 2.0243493503029377, |
|
"grad_norm": 25.91645622253418, |
|
"learning_rate": 1.4805648278541155e-07, |
|
"loss": 0.1197, |
|
"step": 212500 |
|
}, |
|
{ |
|
"epoch": 2.029112525244827, |
|
"grad_norm": 0.009553452022373676, |
|
"learning_rate": 1.4769952750645276e-07, |
|
"loss": 0.1058, |
|
"step": 213000 |
|
}, |
|
{ |
|
"epoch": 2.0338757001867163, |
|
"grad_norm": 2.3231234550476074, |
|
"learning_rate": 1.4734257222749397e-07, |
|
"loss": 0.1098, |
|
"step": 213500 |
|
}, |
|
{ |
|
"epoch": 2.0386388751286058, |
|
"grad_norm": 7.085731506347656, |
|
"learning_rate": 1.4698561694853518e-07, |
|
"loss": 0.0992, |
|
"step": 214000 |
|
}, |
|
{ |
|
"epoch": 2.043402050070495, |
|
"grad_norm": 0.2953038215637207, |
|
"learning_rate": 1.466286616695764e-07, |
|
"loss": 0.1126, |
|
"step": 214500 |
|
}, |
|
{ |
|
"epoch": 2.0481652250123843, |
|
"grad_norm": 0.0010392339900135994, |
|
"learning_rate": 1.462717063906176e-07, |
|
"loss": 0.1034, |
|
"step": 215000 |
|
}, |
|
{ |
|
"epoch": 2.0529283999542733, |
|
"grad_norm": 0.07149825990200043, |
|
"learning_rate": 1.459147511116588e-07, |
|
"loss": 0.1008, |
|
"step": 215500 |
|
}, |
|
{ |
|
"epoch": 2.057691574896163, |
|
"grad_norm": 0.594185471534729, |
|
"learning_rate": 1.4555779583270002e-07, |
|
"loss": 0.1004, |
|
"step": 216000 |
|
}, |
|
{ |
|
"epoch": 2.062454749838052, |
|
"grad_norm": 2.347266912460327, |
|
"learning_rate": 1.4520084055374123e-07, |
|
"loss": 0.1087, |
|
"step": 216500 |
|
}, |
|
{ |
|
"epoch": 2.0672179247799414, |
|
"grad_norm": 0.16203026473522186, |
|
"learning_rate": 1.4484388527478244e-07, |
|
"loss": 0.1065, |
|
"step": 217000 |
|
}, |
|
{ |
|
"epoch": 2.0719810997218304, |
|
"grad_norm": 0.03992275521159172, |
|
"learning_rate": 1.4448692999582365e-07, |
|
"loss": 0.1247, |
|
"step": 217500 |
|
}, |
|
{ |
|
"epoch": 2.07674427466372, |
|
"grad_norm": 0.5468718409538269, |
|
"learning_rate": 1.4412997471686486e-07, |
|
"loss": 0.1078, |
|
"step": 218000 |
|
}, |
|
{ |
|
"epoch": 2.081507449605609, |
|
"grad_norm": 7.58856201171875, |
|
"learning_rate": 1.4377301943790607e-07, |
|
"loss": 0.0921, |
|
"step": 218500 |
|
}, |
|
{ |
|
"epoch": 2.0862706245474985, |
|
"grad_norm": 17.591888427734375, |
|
"learning_rate": 1.4341606415894728e-07, |
|
"loss": 0.1105, |
|
"step": 219000 |
|
}, |
|
{ |
|
"epoch": 2.0910337994893875, |
|
"grad_norm": 9.395910263061523, |
|
"learning_rate": 1.4305910887998846e-07, |
|
"loss": 0.1021, |
|
"step": 219500 |
|
}, |
|
{ |
|
"epoch": 2.095796974431277, |
|
"grad_norm": 0.0014383163070306182, |
|
"learning_rate": 1.4270215360102967e-07, |
|
"loss": 0.1089, |
|
"step": 220000 |
|
}, |
|
{ |
|
"epoch": 2.100560149373166, |
|
"grad_norm": 0.11439737677574158, |
|
"learning_rate": 1.4234519832207088e-07, |
|
"loss": 0.1108, |
|
"step": 220500 |
|
}, |
|
{ |
|
"epoch": 2.1053233243150555, |
|
"grad_norm": 1.5022146701812744, |
|
"learning_rate": 1.419882430431121e-07, |
|
"loss": 0.1098, |
|
"step": 221000 |
|
}, |
|
{ |
|
"epoch": 2.1100864992569446, |
|
"grad_norm": 0.004490518942475319, |
|
"learning_rate": 1.416312877641533e-07, |
|
"loss": 0.1063, |
|
"step": 221500 |
|
}, |
|
{ |
|
"epoch": 2.114849674198834, |
|
"grad_norm": 5.6877360343933105, |
|
"learning_rate": 1.412743324851945e-07, |
|
"loss": 0.1107, |
|
"step": 222000 |
|
}, |
|
{ |
|
"epoch": 2.119612849140723, |
|
"grad_norm": 1.135617733001709, |
|
"learning_rate": 1.4091737720623572e-07, |
|
"loss": 0.115, |
|
"step": 222500 |
|
}, |
|
{ |
|
"epoch": 2.1243760240826126, |
|
"grad_norm": 2.0897531509399414, |
|
"learning_rate": 1.4056042192727693e-07, |
|
"loss": 0.1242, |
|
"step": 223000 |
|
}, |
|
{ |
|
"epoch": 2.1291391990245017, |
|
"grad_norm": 12.82266902923584, |
|
"learning_rate": 1.4020346664831814e-07, |
|
"loss": 0.1066, |
|
"step": 223500 |
|
}, |
|
{ |
|
"epoch": 2.133902373966391, |
|
"grad_norm": 0.15470212697982788, |
|
"learning_rate": 1.3984651136935935e-07, |
|
"loss": 0.0952, |
|
"step": 224000 |
|
}, |
|
{ |
|
"epoch": 2.13866554890828, |
|
"grad_norm": 7.721962174400687e-05, |
|
"learning_rate": 1.3948955609040056e-07, |
|
"loss": 0.1237, |
|
"step": 224500 |
|
}, |
|
{ |
|
"epoch": 2.1434287238501697, |
|
"grad_norm": 0.023135853931307793, |
|
"learning_rate": 1.3913260081144177e-07, |
|
"loss": 0.1118, |
|
"step": 225000 |
|
}, |
|
{ |
|
"epoch": 2.1481918987920587, |
|
"grad_norm": 1.2089338302612305, |
|
"learning_rate": 1.3877564553248298e-07, |
|
"loss": 0.0944, |
|
"step": 225500 |
|
}, |
|
{ |
|
"epoch": 2.1529550737339482, |
|
"grad_norm": 7.6081109046936035, |
|
"learning_rate": 1.384186902535242e-07, |
|
"loss": 0.1025, |
|
"step": 226000 |
|
}, |
|
{ |
|
"epoch": 2.1577182486758373, |
|
"grad_norm": 11.101881980895996, |
|
"learning_rate": 1.380617349745654e-07, |
|
"loss": 0.081, |
|
"step": 226500 |
|
} |
|
], |
|
"logging_steps": 500, |
|
"max_steps": 314916, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 3, |
|
"save_steps": 500, |
|
"stateful_callbacks": { |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": false |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 3.2593962720815494e+17, |
|
"train_batch_size": 8, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|