|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 2.3645320197044333, |
|
"eval_steps": 500, |
|
"global_step": 1500, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.011034482758620689, |
|
"grad_norm": 0.2625392973423004, |
|
"learning_rate": 7.000000000000001e-06, |
|
"loss": 1.4548, |
|
"step": 7 |
|
}, |
|
{ |
|
"epoch": 0.022068965517241378, |
|
"grad_norm": 0.255765438079834, |
|
"learning_rate": 1.4000000000000001e-05, |
|
"loss": 1.3014, |
|
"step": 14 |
|
}, |
|
{ |
|
"epoch": 0.03310344827586207, |
|
"grad_norm": 0.20604930818080902, |
|
"learning_rate": 2.1e-05, |
|
"loss": 1.1312, |
|
"step": 21 |
|
}, |
|
{ |
|
"epoch": 0.044137931034482755, |
|
"grad_norm": 0.20505264401435852, |
|
"learning_rate": 2.8000000000000003e-05, |
|
"loss": 1.0359, |
|
"step": 28 |
|
}, |
|
{ |
|
"epoch": 0.05517241379310345, |
|
"grad_norm": 0.21282243728637695, |
|
"learning_rate": 3.5e-05, |
|
"loss": 0.9636, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.06620689655172414, |
|
"grad_norm": 0.3072029948234558, |
|
"learning_rate": 4.2e-05, |
|
"loss": 0.9305, |
|
"step": 42 |
|
}, |
|
{ |
|
"epoch": 0.07724137931034483, |
|
"grad_norm": 0.23971162736415863, |
|
"learning_rate": 4.9e-05, |
|
"loss": 0.9174, |
|
"step": 49 |
|
}, |
|
{ |
|
"epoch": 0.08827586206896551, |
|
"grad_norm": 0.2698459327220917, |
|
"learning_rate": 5.6000000000000006e-05, |
|
"loss": 0.8612, |
|
"step": 56 |
|
}, |
|
{ |
|
"epoch": 0.0993103448275862, |
|
"grad_norm": 0.3638197183609009, |
|
"learning_rate": 6.3e-05, |
|
"loss": 0.8511, |
|
"step": 63 |
|
}, |
|
{ |
|
"epoch": 0.1103448275862069, |
|
"grad_norm": 0.27103668451309204, |
|
"learning_rate": 7e-05, |
|
"loss": 0.8312, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.12137931034482759, |
|
"grad_norm": 0.3014815151691437, |
|
"learning_rate": 7.7e-05, |
|
"loss": 0.7998, |
|
"step": 77 |
|
}, |
|
{ |
|
"epoch": 0.13241379310344828, |
|
"grad_norm": 0.2660246789455414, |
|
"learning_rate": 8.4e-05, |
|
"loss": 0.8015, |
|
"step": 84 |
|
}, |
|
{ |
|
"epoch": 0.14344827586206896, |
|
"grad_norm": 0.30547192692756653, |
|
"learning_rate": 9.1e-05, |
|
"loss": 0.8076, |
|
"step": 91 |
|
}, |
|
{ |
|
"epoch": 0.15448275862068966, |
|
"grad_norm": 0.3411053717136383, |
|
"learning_rate": 9.8e-05, |
|
"loss": 0.7944, |
|
"step": 98 |
|
}, |
|
{ |
|
"epoch": 0.16551724137931034, |
|
"grad_norm": 0.31632518768310547, |
|
"learning_rate": 9.999685283773504e-05, |
|
"loss": 0.7816, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 0.17655172413793102, |
|
"grad_norm": 0.36275714635849, |
|
"learning_rate": 9.998187325055106e-05, |
|
"loss": 0.7829, |
|
"step": 112 |
|
}, |
|
{ |
|
"epoch": 0.18758620689655173, |
|
"grad_norm": 0.32636329531669617, |
|
"learning_rate": 9.995456138403733e-05, |
|
"loss": 0.7791, |
|
"step": 119 |
|
}, |
|
{ |
|
"epoch": 0.1986206896551724, |
|
"grad_norm": 0.3593531847000122, |
|
"learning_rate": 9.991492397698826e-05, |
|
"loss": 0.7709, |
|
"step": 126 |
|
}, |
|
{ |
|
"epoch": 0.2096551724137931, |
|
"grad_norm": 0.3396188020706177, |
|
"learning_rate": 9.986297080934089e-05, |
|
"loss": 0.7686, |
|
"step": 133 |
|
}, |
|
{ |
|
"epoch": 0.2206896551724138, |
|
"grad_norm": 0.37299859523773193, |
|
"learning_rate": 9.979871469976196e-05, |
|
"loss": 0.7603, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.2317241379310345, |
|
"grad_norm": 0.34510111808776855, |
|
"learning_rate": 9.972217150248503e-05, |
|
"loss": 0.7712, |
|
"step": 147 |
|
}, |
|
{ |
|
"epoch": 0.24275862068965517, |
|
"grad_norm": 0.3415180444717407, |
|
"learning_rate": 9.963336010339868e-05, |
|
"loss": 0.7608, |
|
"step": 154 |
|
}, |
|
{ |
|
"epoch": 0.2537931034482759, |
|
"grad_norm": 0.38360491394996643, |
|
"learning_rate": 9.953230241538674e-05, |
|
"loss": 0.777, |
|
"step": 161 |
|
}, |
|
{ |
|
"epoch": 0.26482758620689656, |
|
"grad_norm": 0.3310436010360718, |
|
"learning_rate": 9.941902337292155e-05, |
|
"loss": 0.7498, |
|
"step": 168 |
|
}, |
|
{ |
|
"epoch": 0.27586206896551724, |
|
"grad_norm": 0.34418851137161255, |
|
"learning_rate": 9.92935509259118e-05, |
|
"loss": 0.7456, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 0.2868965517241379, |
|
"grad_norm": 0.35113954544067383, |
|
"learning_rate": 9.915591603280631e-05, |
|
"loss": 0.7452, |
|
"step": 182 |
|
}, |
|
{ |
|
"epoch": 0.2979310344827586, |
|
"grad_norm": 0.3386627435684204, |
|
"learning_rate": 9.900615265295552e-05, |
|
"loss": 0.7377, |
|
"step": 189 |
|
}, |
|
{ |
|
"epoch": 0.30896551724137933, |
|
"grad_norm": 0.3772924542427063, |
|
"learning_rate": 9.884429773823239e-05, |
|
"loss": 0.736, |
|
"step": 196 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"grad_norm": 0.36270612478256226, |
|
"learning_rate": 9.867039122391527e-05, |
|
"loss": 0.7195, |
|
"step": 203 |
|
}, |
|
{ |
|
"epoch": 0.3310344827586207, |
|
"grad_norm": 0.33858415484428406, |
|
"learning_rate": 9.848447601883435e-05, |
|
"loss": 0.716, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.34206896551724136, |
|
"grad_norm": 0.3483560085296631, |
|
"learning_rate": 9.828659799478456e-05, |
|
"loss": 0.7287, |
|
"step": 217 |
|
}, |
|
{ |
|
"epoch": 0.35310344827586204, |
|
"grad_norm": 0.3399845063686371, |
|
"learning_rate": 9.807680597520746e-05, |
|
"loss": 0.7073, |
|
"step": 224 |
|
}, |
|
{ |
|
"epoch": 0.3641379310344828, |
|
"grad_norm": 0.3727072477340698, |
|
"learning_rate": 9.785515172314463e-05, |
|
"loss": 0.7112, |
|
"step": 231 |
|
}, |
|
{ |
|
"epoch": 0.37517241379310345, |
|
"grad_norm": 0.32936757802963257, |
|
"learning_rate": 9.762168992846614e-05, |
|
"loss": 0.7263, |
|
"step": 238 |
|
}, |
|
{ |
|
"epoch": 0.38620689655172413, |
|
"grad_norm": 0.3701590299606323, |
|
"learning_rate": 9.737647819437645e-05, |
|
"loss": 0.7145, |
|
"step": 245 |
|
}, |
|
{ |
|
"epoch": 0.3972413793103448, |
|
"grad_norm": 0.3669060468673706, |
|
"learning_rate": 9.711957702320175e-05, |
|
"loss": 0.7127, |
|
"step": 252 |
|
}, |
|
{ |
|
"epoch": 0.40827586206896554, |
|
"grad_norm": 0.3532595634460449, |
|
"learning_rate": 9.685104980146193e-05, |
|
"loss": 0.7064, |
|
"step": 259 |
|
}, |
|
{ |
|
"epoch": 0.4193103448275862, |
|
"grad_norm": 0.37261611223220825, |
|
"learning_rate": 9.657096278423093e-05, |
|
"loss": 0.697, |
|
"step": 266 |
|
}, |
|
{ |
|
"epoch": 0.4303448275862069, |
|
"grad_norm": 0.37206652760505676, |
|
"learning_rate": 9.627938507878917e-05, |
|
"loss": 0.709, |
|
"step": 273 |
|
}, |
|
{ |
|
"epoch": 0.4413793103448276, |
|
"grad_norm": 0.3685338497161865, |
|
"learning_rate": 9.597638862757255e-05, |
|
"loss": 0.6987, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 0.45241379310344826, |
|
"grad_norm": 0.36724820733070374, |
|
"learning_rate": 9.566204819042152e-05, |
|
"loss": 0.711, |
|
"step": 287 |
|
}, |
|
{ |
|
"epoch": 0.463448275862069, |
|
"grad_norm": 0.36288705468177795, |
|
"learning_rate": 9.533644132613541e-05, |
|
"loss": 0.712, |
|
"step": 294 |
|
}, |
|
{ |
|
"epoch": 0.47448275862068967, |
|
"grad_norm": 0.3663863241672516, |
|
"learning_rate": 9.49996483733358e-05, |
|
"loss": 0.6997, |
|
"step": 301 |
|
}, |
|
{ |
|
"epoch": 0.48551724137931035, |
|
"grad_norm": 0.3941881060600281, |
|
"learning_rate": 9.465175243064428e-05, |
|
"loss": 0.6958, |
|
"step": 308 |
|
}, |
|
{ |
|
"epoch": 0.496551724137931, |
|
"grad_norm": 0.3758893311023712, |
|
"learning_rate": 9.4292839336179e-05, |
|
"loss": 0.7035, |
|
"step": 315 |
|
}, |
|
{ |
|
"epoch": 0.5075862068965518, |
|
"grad_norm": 0.3466153144836426, |
|
"learning_rate": 9.39229976463755e-05, |
|
"loss": 0.6833, |
|
"step": 322 |
|
}, |
|
{ |
|
"epoch": 0.5186206896551724, |
|
"grad_norm": 0.3633442521095276, |
|
"learning_rate": 9.354231861413668e-05, |
|
"loss": 0.6893, |
|
"step": 329 |
|
}, |
|
{ |
|
"epoch": 0.5296551724137931, |
|
"grad_norm": 0.3545131981372833, |
|
"learning_rate": 9.315089616631752e-05, |
|
"loss": 0.6967, |
|
"step": 336 |
|
}, |
|
{ |
|
"epoch": 0.5406896551724137, |
|
"grad_norm": 0.3482682406902313, |
|
"learning_rate": 9.274882688055005e-05, |
|
"loss": 0.6953, |
|
"step": 343 |
|
}, |
|
{ |
|
"epoch": 0.5517241379310345, |
|
"grad_norm": 0.36485904455184937, |
|
"learning_rate": 9.233620996141421e-05, |
|
"loss": 0.6942, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 0.5627586206896552, |
|
"grad_norm": 0.35146597027778625, |
|
"learning_rate": 9.191314721596072e-05, |
|
"loss": 0.6749, |
|
"step": 357 |
|
}, |
|
{ |
|
"epoch": 0.5737931034482758, |
|
"grad_norm": 0.4116743206977844, |
|
"learning_rate": 9.147974302859157e-05, |
|
"loss": 0.6892, |
|
"step": 364 |
|
}, |
|
{ |
|
"epoch": 0.5848275862068966, |
|
"grad_norm": 0.40801239013671875, |
|
"learning_rate": 9.103610433530483e-05, |
|
"loss": 0.6774, |
|
"step": 371 |
|
}, |
|
{ |
|
"epoch": 0.5958620689655172, |
|
"grad_norm": 0.3636874258518219, |
|
"learning_rate": 9.058234059730976e-05, |
|
"loss": 0.6794, |
|
"step": 378 |
|
}, |
|
{ |
|
"epoch": 0.6068965517241379, |
|
"grad_norm": 0.40341806411743164, |
|
"learning_rate": 9.01185637740189e-05, |
|
"loss": 0.6923, |
|
"step": 385 |
|
}, |
|
{ |
|
"epoch": 0.6179310344827587, |
|
"grad_norm": 0.35487258434295654, |
|
"learning_rate": 8.964488829542377e-05, |
|
"loss": 0.6863, |
|
"step": 392 |
|
}, |
|
{ |
|
"epoch": 0.6289655172413793, |
|
"grad_norm": 0.38306641578674316, |
|
"learning_rate": 8.916143103386093e-05, |
|
"loss": 0.6935, |
|
"step": 399 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"grad_norm": 0.3719683587551117, |
|
"learning_rate": 8.866831127517557e-05, |
|
"loss": 0.6848, |
|
"step": 406 |
|
}, |
|
{ |
|
"epoch": 0.6510344827586206, |
|
"grad_norm": 0.36525729298591614, |
|
"learning_rate": 8.81656506892894e-05, |
|
"loss": 0.6723, |
|
"step": 413 |
|
}, |
|
{ |
|
"epoch": 0.6620689655172414, |
|
"grad_norm": 0.3747231662273407, |
|
"learning_rate": 8.765357330018056e-05, |
|
"loss": 0.6684, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 0.6731034482758621, |
|
"grad_norm": 0.3755883574485779, |
|
"learning_rate": 8.71322054552824e-05, |
|
"loss": 0.6671, |
|
"step": 427 |
|
}, |
|
{ |
|
"epoch": 0.6841379310344827, |
|
"grad_norm": 0.3866102397441864, |
|
"learning_rate": 8.660167579430927e-05, |
|
"loss": 0.6721, |
|
"step": 434 |
|
}, |
|
{ |
|
"epoch": 0.6951724137931035, |
|
"grad_norm": 0.3862460255622864, |
|
"learning_rate": 8.606211521751652e-05, |
|
"loss": 0.6806, |
|
"step": 441 |
|
}, |
|
{ |
|
"epoch": 0.7062068965517241, |
|
"grad_norm": 0.3793068528175354, |
|
"learning_rate": 8.551365685340285e-05, |
|
"loss": 0.6919, |
|
"step": 448 |
|
}, |
|
{ |
|
"epoch": 0.7172413793103448, |
|
"grad_norm": 0.3663445711135864, |
|
"learning_rate": 8.495643602586287e-05, |
|
"loss": 0.6767, |
|
"step": 455 |
|
}, |
|
{ |
|
"epoch": 0.7282758620689656, |
|
"grad_norm": 0.365164190530777, |
|
"learning_rate": 8.439059022079789e-05, |
|
"loss": 0.6528, |
|
"step": 462 |
|
}, |
|
{ |
|
"epoch": 0.7393103448275862, |
|
"grad_norm": 0.37421566247940063, |
|
"learning_rate": 8.381625905219339e-05, |
|
"loss": 0.6528, |
|
"step": 469 |
|
}, |
|
{ |
|
"epoch": 0.7503448275862069, |
|
"grad_norm": 0.4169900715351105, |
|
"learning_rate": 8.32335842276713e-05, |
|
"loss": 0.6619, |
|
"step": 476 |
|
}, |
|
{ |
|
"epoch": 0.7613793103448275, |
|
"grad_norm": 0.3822152316570282, |
|
"learning_rate": 8.264270951352581e-05, |
|
"loss": 0.6685, |
|
"step": 483 |
|
}, |
|
{ |
|
"epoch": 0.7724137931034483, |
|
"grad_norm": 0.37115615606307983, |
|
"learning_rate": 8.20437806992512e-05, |
|
"loss": 0.6634, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 0.783448275862069, |
|
"grad_norm": 0.40461844205856323, |
|
"learning_rate": 8.143694556157046e-05, |
|
"loss": 0.6688, |
|
"step": 497 |
|
}, |
|
{ |
|
"epoch": 0.7944827586206896, |
|
"grad_norm": 0.3837352693080902, |
|
"learning_rate": 8.082235382797349e-05, |
|
"loss": 0.6707, |
|
"step": 504 |
|
}, |
|
{ |
|
"epoch": 0.8055172413793104, |
|
"grad_norm": 0.3932512104511261, |
|
"learning_rate": 8.020015713977427e-05, |
|
"loss": 0.6666, |
|
"step": 511 |
|
}, |
|
{ |
|
"epoch": 0.8165517241379311, |
|
"grad_norm": 0.3982972204685211, |
|
"learning_rate": 7.957050901469545e-05, |
|
"loss": 0.6764, |
|
"step": 518 |
|
}, |
|
{ |
|
"epoch": 0.8275862068965517, |
|
"grad_norm": 0.3910225033760071, |
|
"learning_rate": 7.89335648089903e-05, |
|
"loss": 0.6721, |
|
"step": 525 |
|
}, |
|
{ |
|
"epoch": 0.8386206896551724, |
|
"grad_norm": 0.4260442852973938, |
|
"learning_rate": 7.828948167911074e-05, |
|
"loss": 0.6595, |
|
"step": 532 |
|
}, |
|
{ |
|
"epoch": 0.8496551724137931, |
|
"grad_norm": 0.37996965646743774, |
|
"learning_rate": 7.763841854293145e-05, |
|
"loss": 0.6551, |
|
"step": 539 |
|
}, |
|
{ |
|
"epoch": 0.8606896551724138, |
|
"grad_norm": 0.38492435216903687, |
|
"learning_rate": 7.698053604053922e-05, |
|
"loss": 0.6623, |
|
"step": 546 |
|
}, |
|
{ |
|
"epoch": 0.8717241379310345, |
|
"grad_norm": 0.3707030117511749, |
|
"learning_rate": 7.631599649459744e-05, |
|
"loss": 0.6457, |
|
"step": 553 |
|
}, |
|
{ |
|
"epoch": 0.8827586206896552, |
|
"grad_norm": 0.37289130687713623, |
|
"learning_rate": 7.564496387029532e-05, |
|
"loss": 0.6529, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 0.8937931034482759, |
|
"grad_norm": 0.39539724588394165, |
|
"learning_rate": 7.496760373489202e-05, |
|
"loss": 0.6442, |
|
"step": 567 |
|
}, |
|
{ |
|
"epoch": 0.9048275862068965, |
|
"grad_norm": 0.3812501132488251, |
|
"learning_rate": 7.428408321686541e-05, |
|
"loss": 0.6484, |
|
"step": 574 |
|
}, |
|
{ |
|
"epoch": 0.9158620689655173, |
|
"grad_norm": 0.3905741572380066, |
|
"learning_rate": 7.35945709646756e-05, |
|
"loss": 0.6479, |
|
"step": 581 |
|
}, |
|
{ |
|
"epoch": 0.926896551724138, |
|
"grad_norm": 0.3890450596809387, |
|
"learning_rate": 7.289923710515339e-05, |
|
"loss": 0.6569, |
|
"step": 588 |
|
}, |
|
{ |
|
"epoch": 0.9379310344827586, |
|
"grad_norm": 0.3712526559829712, |
|
"learning_rate": 7.219825320152411e-05, |
|
"loss": 0.6443, |
|
"step": 595 |
|
}, |
|
{ |
|
"epoch": 0.9489655172413793, |
|
"grad_norm": 0.3965970277786255, |
|
"learning_rate": 7.149179221107694e-05, |
|
"loss": 0.6515, |
|
"step": 602 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"grad_norm": 0.37230828404426575, |
|
"learning_rate": 7.078002844249032e-05, |
|
"loss": 0.6394, |
|
"step": 609 |
|
}, |
|
{ |
|
"epoch": 0.9710344827586207, |
|
"grad_norm": 0.4104078710079193, |
|
"learning_rate": 7.006313751282372e-05, |
|
"loss": 0.6424, |
|
"step": 616 |
|
}, |
|
{ |
|
"epoch": 0.9820689655172414, |
|
"grad_norm": 0.39799052476882935, |
|
"learning_rate": 6.934129630418701e-05, |
|
"loss": 0.655, |
|
"step": 623 |
|
}, |
|
{ |
|
"epoch": 0.993103448275862, |
|
"grad_norm": 0.3745689392089844, |
|
"learning_rate": 6.861468292009727e-05, |
|
"loss": 0.646, |
|
"step": 630 |
|
}, |
|
{ |
|
"epoch": 1.0041379310344827, |
|
"grad_norm": 0.4002785384654999, |
|
"learning_rate": 6.788347664153447e-05, |
|
"loss": 0.6665, |
|
"step": 637 |
|
}, |
|
{ |
|
"epoch": 1.0151724137931035, |
|
"grad_norm": 0.42168787121772766, |
|
"learning_rate": 6.714785788270658e-05, |
|
"loss": 0.5686, |
|
"step": 644 |
|
}, |
|
{ |
|
"epoch": 1.0262068965517241, |
|
"grad_norm": 0.3978714644908905, |
|
"learning_rate": 6.640800814653503e-05, |
|
"loss": 0.5379, |
|
"step": 651 |
|
}, |
|
{ |
|
"epoch": 1.0372413793103448, |
|
"grad_norm": 0.4157312214374542, |
|
"learning_rate": 6.566410997987163e-05, |
|
"loss": 0.5554, |
|
"step": 658 |
|
}, |
|
{ |
|
"epoch": 1.0482758620689656, |
|
"grad_norm": 0.3982793092727661, |
|
"learning_rate": 6.49163469284578e-05, |
|
"loss": 0.5526, |
|
"step": 665 |
|
}, |
|
{ |
|
"epoch": 1.0593103448275862, |
|
"grad_norm": 0.41753092408180237, |
|
"learning_rate": 6.416490349163748e-05, |
|
"loss": 0.5535, |
|
"step": 672 |
|
}, |
|
{ |
|
"epoch": 1.0703448275862069, |
|
"grad_norm": 0.4113280475139618, |
|
"learning_rate": 6.340996507683458e-05, |
|
"loss": 0.5645, |
|
"step": 679 |
|
}, |
|
{ |
|
"epoch": 1.0813793103448275, |
|
"grad_norm": 0.4069710671901703, |
|
"learning_rate": 6.265171795380659e-05, |
|
"loss": 0.536, |
|
"step": 686 |
|
}, |
|
{ |
|
"epoch": 1.0924137931034483, |
|
"grad_norm": 0.41753774881362915, |
|
"learning_rate": 6.189034920868522e-05, |
|
"loss": 0.5474, |
|
"step": 693 |
|
}, |
|
{ |
|
"epoch": 1.103448275862069, |
|
"grad_norm": 0.3889447748661041, |
|
"learning_rate": 6.112604669781572e-05, |
|
"loss": 0.5237, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 1.1144827586206896, |
|
"grad_norm": 0.41418376564979553, |
|
"learning_rate": 6.0358999001406156e-05, |
|
"loss": 0.5591, |
|
"step": 707 |
|
}, |
|
{ |
|
"epoch": 1.1255172413793104, |
|
"grad_norm": 0.4075161814689636, |
|
"learning_rate": 5.9589395376998e-05, |
|
"loss": 0.5473, |
|
"step": 714 |
|
}, |
|
{ |
|
"epoch": 1.136551724137931, |
|
"grad_norm": 0.38883715867996216, |
|
"learning_rate": 5.8817425712769794e-05, |
|
"loss": 0.5345, |
|
"step": 721 |
|
}, |
|
{ |
|
"epoch": 1.1475862068965517, |
|
"grad_norm": 0.40612930059432983, |
|
"learning_rate": 5.804328048068492e-05, |
|
"loss": 0.5512, |
|
"step": 728 |
|
}, |
|
{ |
|
"epoch": 1.1586206896551725, |
|
"grad_norm": 0.4203000068664551, |
|
"learning_rate": 5.7267150689495644e-05, |
|
"loss": 0.546, |
|
"step": 735 |
|
}, |
|
{ |
|
"epoch": 1.1696551724137931, |
|
"grad_norm": 0.40659859776496887, |
|
"learning_rate": 5.648922783761443e-05, |
|
"loss": 0.57, |
|
"step": 742 |
|
}, |
|
{ |
|
"epoch": 1.1806896551724138, |
|
"grad_norm": 0.3896077871322632, |
|
"learning_rate": 5.570970386586469e-05, |
|
"loss": 0.5565, |
|
"step": 749 |
|
}, |
|
{ |
|
"epoch": 1.1917241379310344, |
|
"grad_norm": 0.4040294587612152, |
|
"learning_rate": 5.492877111012218e-05, |
|
"loss": 0.5489, |
|
"step": 756 |
|
}, |
|
{ |
|
"epoch": 1.2027586206896552, |
|
"grad_norm": 0.41124486923217773, |
|
"learning_rate": 5.414662225385903e-05, |
|
"loss": 0.5467, |
|
"step": 763 |
|
}, |
|
{ |
|
"epoch": 1.2137931034482758, |
|
"grad_norm": 0.4190484583377838, |
|
"learning_rate": 5.336345028060199e-05, |
|
"loss": 0.5508, |
|
"step": 770 |
|
}, |
|
{ |
|
"epoch": 1.2248275862068965, |
|
"grad_norm": 0.4511507749557495, |
|
"learning_rate": 5.257944842631658e-05, |
|
"loss": 0.5538, |
|
"step": 777 |
|
}, |
|
{ |
|
"epoch": 1.2358620689655173, |
|
"grad_norm": 0.4155430495738983, |
|
"learning_rate": 5.179481013172912e-05, |
|
"loss": 0.5503, |
|
"step": 784 |
|
}, |
|
{ |
|
"epoch": 1.246896551724138, |
|
"grad_norm": 0.40163281559944153, |
|
"learning_rate": 5.100972899459796e-05, |
|
"loss": 0.5406, |
|
"step": 791 |
|
}, |
|
{ |
|
"epoch": 1.2579310344827586, |
|
"grad_norm": 0.4043784737586975, |
|
"learning_rate": 5.022439872194629e-05, |
|
"loss": 0.5494, |
|
"step": 798 |
|
}, |
|
{ |
|
"epoch": 1.2689655172413792, |
|
"grad_norm": 0.4234471917152405, |
|
"learning_rate": 4.943901308226771e-05, |
|
"loss": 0.5424, |
|
"step": 805 |
|
}, |
|
{ |
|
"epoch": 1.28, |
|
"grad_norm": 0.41634508967399597, |
|
"learning_rate": 4.865376585771687e-05, |
|
"loss": 0.5385, |
|
"step": 812 |
|
}, |
|
{ |
|
"epoch": 1.2910344827586206, |
|
"grad_norm": 0.3989020586013794, |
|
"learning_rate": 4.7868850796296495e-05, |
|
"loss": 0.5596, |
|
"step": 819 |
|
}, |
|
{ |
|
"epoch": 1.3020689655172415, |
|
"grad_norm": 0.4220573902130127, |
|
"learning_rate": 4.708446156405307e-05, |
|
"loss": 0.5546, |
|
"step": 826 |
|
}, |
|
{ |
|
"epoch": 1.3131034482758621, |
|
"grad_norm": 0.4011528789997101, |
|
"learning_rate": 4.630079169729257e-05, |
|
"loss": 0.549, |
|
"step": 833 |
|
}, |
|
{ |
|
"epoch": 1.3241379310344827, |
|
"grad_norm": 0.3943440318107605, |
|
"learning_rate": 4.551803455482833e-05, |
|
"loss": 0.5369, |
|
"step": 840 |
|
}, |
|
{ |
|
"epoch": 1.3351724137931034, |
|
"grad_norm": 0.3866331875324249, |
|
"learning_rate": 4.473638327027259e-05, |
|
"loss": 0.5369, |
|
"step": 847 |
|
}, |
|
{ |
|
"epoch": 1.3462068965517242, |
|
"grad_norm": 0.4149519205093384, |
|
"learning_rate": 4.395603070438373e-05, |
|
"loss": 0.5462, |
|
"step": 854 |
|
}, |
|
{ |
|
"epoch": 1.3572413793103448, |
|
"grad_norm": 0.395431786775589, |
|
"learning_rate": 4.31771693974807e-05, |
|
"loss": 0.5453, |
|
"step": 861 |
|
}, |
|
{ |
|
"epoch": 1.3682758620689655, |
|
"grad_norm": 0.38319146633148193, |
|
"learning_rate": 4.239999152193664e-05, |
|
"loss": 0.5441, |
|
"step": 868 |
|
}, |
|
{ |
|
"epoch": 1.3793103448275863, |
|
"grad_norm": 0.4112420082092285, |
|
"learning_rate": 4.162468883476319e-05, |
|
"loss": 0.5348, |
|
"step": 875 |
|
}, |
|
{ |
|
"epoch": 1.390344827586207, |
|
"grad_norm": 0.40384915471076965, |
|
"learning_rate": 4.085145263029726e-05, |
|
"loss": 0.5532, |
|
"step": 882 |
|
}, |
|
{ |
|
"epoch": 1.4013793103448275, |
|
"grad_norm": 0.4221806228160858, |
|
"learning_rate": 4.008047369300218e-05, |
|
"loss": 0.5474, |
|
"step": 889 |
|
}, |
|
{ |
|
"epoch": 1.4124137931034482, |
|
"grad_norm": 0.432949960231781, |
|
"learning_rate": 3.9311942250394276e-05, |
|
"loss": 0.5206, |
|
"step": 896 |
|
}, |
|
{ |
|
"epoch": 1.423448275862069, |
|
"grad_norm": 0.4182206988334656, |
|
"learning_rate": 3.8546047926107256e-05, |
|
"loss": 0.544, |
|
"step": 903 |
|
}, |
|
{ |
|
"epoch": 1.4344827586206896, |
|
"grad_norm": 0.4261842370033264, |
|
"learning_rate": 3.778297969310529e-05, |
|
"loss": 0.5511, |
|
"step": 910 |
|
}, |
|
{ |
|
"epoch": 1.4455172413793105, |
|
"grad_norm": 0.4241354465484619, |
|
"learning_rate": 3.7022925827056884e-05, |
|
"loss": 0.5392, |
|
"step": 917 |
|
}, |
|
{ |
|
"epoch": 1.456551724137931, |
|
"grad_norm": 0.3890000283718109, |
|
"learning_rate": 3.62660738598805e-05, |
|
"loss": 0.5457, |
|
"step": 924 |
|
}, |
|
{ |
|
"epoch": 1.4675862068965517, |
|
"grad_norm": 0.41937190294265747, |
|
"learning_rate": 3.551261053347404e-05, |
|
"loss": 0.531, |
|
"step": 931 |
|
}, |
|
{ |
|
"epoch": 1.4786206896551723, |
|
"grad_norm": 0.40612250566482544, |
|
"learning_rate": 3.4762721753638995e-05, |
|
"loss": 0.5333, |
|
"step": 938 |
|
}, |
|
{ |
|
"epoch": 1.489655172413793, |
|
"grad_norm": 0.41920045018196106, |
|
"learning_rate": 3.401659254421094e-05, |
|
"loss": 0.547, |
|
"step": 945 |
|
}, |
|
{ |
|
"epoch": 1.5006896551724138, |
|
"grad_norm": 0.42577028274536133, |
|
"learning_rate": 3.3274407001407735e-05, |
|
"loss": 0.532, |
|
"step": 952 |
|
}, |
|
{ |
|
"epoch": 1.5117241379310344, |
|
"grad_norm": 0.40364742279052734, |
|
"learning_rate": 3.2536348248406534e-05, |
|
"loss": 0.5337, |
|
"step": 959 |
|
}, |
|
{ |
|
"epoch": 1.5227586206896553, |
|
"grad_norm": 0.41058188676834106, |
|
"learning_rate": 3.1802598390160784e-05, |
|
"loss": 0.5301, |
|
"step": 966 |
|
}, |
|
{ |
|
"epoch": 1.533793103448276, |
|
"grad_norm": 0.4022769629955292, |
|
"learning_rate": 3.107333846846872e-05, |
|
"loss": 0.5341, |
|
"step": 973 |
|
}, |
|
{ |
|
"epoch": 1.5448275862068965, |
|
"grad_norm": 0.39046093821525574, |
|
"learning_rate": 3.0348748417303823e-05, |
|
"loss": 0.5298, |
|
"step": 980 |
|
}, |
|
{ |
|
"epoch": 1.5558620689655172, |
|
"grad_norm": 0.40701109170913696, |
|
"learning_rate": 2.9629007018418985e-05, |
|
"loss": 0.5344, |
|
"step": 987 |
|
}, |
|
{ |
|
"epoch": 1.5668965517241378, |
|
"grad_norm": 0.4146254360675812, |
|
"learning_rate": 2.8914291857234636e-05, |
|
"loss": 0.533, |
|
"step": 994 |
|
}, |
|
{ |
|
"epoch": 1.5779310344827586, |
|
"grad_norm": 0.408590704202652, |
|
"learning_rate": 2.8204779279022276e-05, |
|
"loss": 0.5247, |
|
"step": 1001 |
|
}, |
|
{ |
|
"epoch": 1.5889655172413795, |
|
"grad_norm": 0.3929610848426819, |
|
"learning_rate": 2.7500644345393943e-05, |
|
"loss": 0.5412, |
|
"step": 1008 |
|
}, |
|
{ |
|
"epoch": 1.6, |
|
"grad_norm": 0.40061962604522705, |
|
"learning_rate": 2.68020607911083e-05, |
|
"loss": 0.5167, |
|
"step": 1015 |
|
}, |
|
{ |
|
"epoch": 1.6110344827586207, |
|
"grad_norm": 0.4084263741970062, |
|
"learning_rate": 2.610920098120424e-05, |
|
"loss": 0.5524, |
|
"step": 1022 |
|
}, |
|
{ |
|
"epoch": 1.6220689655172413, |
|
"grad_norm": 0.4147377014160156, |
|
"learning_rate": 2.5422235868472345e-05, |
|
"loss": 0.5328, |
|
"step": 1029 |
|
}, |
|
{ |
|
"epoch": 1.633103448275862, |
|
"grad_norm": 0.4028978645801544, |
|
"learning_rate": 2.4741334951274947e-05, |
|
"loss": 0.5218, |
|
"step": 1036 |
|
}, |
|
{ |
|
"epoch": 1.6441379310344828, |
|
"grad_norm": 0.4164421856403351, |
|
"learning_rate": 2.40666662317248e-05, |
|
"loss": 0.5282, |
|
"step": 1043 |
|
}, |
|
{ |
|
"epoch": 1.6551724137931034, |
|
"grad_norm": 0.4045639932155609, |
|
"learning_rate": 2.3398396174233178e-05, |
|
"loss": 0.5347, |
|
"step": 1050 |
|
}, |
|
{ |
|
"epoch": 1.6662068965517243, |
|
"grad_norm": 0.4201739728450775, |
|
"learning_rate": 2.2736689664437217e-05, |
|
"loss": 0.5364, |
|
"step": 1057 |
|
}, |
|
{ |
|
"epoch": 1.677241379310345, |
|
"grad_norm": 0.39254507422447205, |
|
"learning_rate": 2.2081709968516866e-05, |
|
"loss": 0.5143, |
|
"step": 1064 |
|
}, |
|
{ |
|
"epoch": 1.6882758620689655, |
|
"grad_norm": 0.42642974853515625, |
|
"learning_rate": 2.1433618692911467e-05, |
|
"loss": 0.5268, |
|
"step": 1071 |
|
}, |
|
{ |
|
"epoch": 1.6993103448275861, |
|
"grad_norm": 0.41387197375297546, |
|
"learning_rate": 2.0792575744445653e-05, |
|
"loss": 0.5213, |
|
"step": 1078 |
|
}, |
|
{ |
|
"epoch": 1.7103448275862068, |
|
"grad_norm": 0.4264724552631378, |
|
"learning_rate": 2.015873929087482e-05, |
|
"loss": 0.5262, |
|
"step": 1085 |
|
}, |
|
{ |
|
"epoch": 1.7213793103448276, |
|
"grad_norm": 0.40214499831199646, |
|
"learning_rate": 1.95322657218596e-05, |
|
"loss": 0.5169, |
|
"step": 1092 |
|
}, |
|
{ |
|
"epoch": 1.7324137931034482, |
|
"grad_norm": 0.4083193838596344, |
|
"learning_rate": 1.8913309610379015e-05, |
|
"loss": 0.5298, |
|
"step": 1099 |
|
}, |
|
{ |
|
"epoch": 1.743448275862069, |
|
"grad_norm": 0.39298340678215027, |
|
"learning_rate": 1.8302023674591935e-05, |
|
"loss": 0.5167, |
|
"step": 1106 |
|
}, |
|
{ |
|
"epoch": 1.7544827586206897, |
|
"grad_norm": 0.42638927698135376, |
|
"learning_rate": 1.7698558740156135e-05, |
|
"loss": 0.5215, |
|
"step": 1113 |
|
}, |
|
{ |
|
"epoch": 1.7655172413793103, |
|
"grad_norm": 0.40725937485694885, |
|
"learning_rate": 1.7103063703014372e-05, |
|
"loss": 0.5297, |
|
"step": 1120 |
|
}, |
|
{ |
|
"epoch": 1.776551724137931, |
|
"grad_norm": 0.3900083005428314, |
|
"learning_rate": 1.6515685492656467e-05, |
|
"loss": 0.5179, |
|
"step": 1127 |
|
}, |
|
{ |
|
"epoch": 1.7875862068965516, |
|
"grad_norm": 0.4194444715976715, |
|
"learning_rate": 1.59365690358667e-05, |
|
"loss": 0.5181, |
|
"step": 1134 |
|
}, |
|
{ |
|
"epoch": 1.7986206896551724, |
|
"grad_norm": 0.39986348152160645, |
|
"learning_rate": 1.5365857220965275e-05, |
|
"loss": 0.5225, |
|
"step": 1141 |
|
}, |
|
{ |
|
"epoch": 1.8096551724137933, |
|
"grad_norm": 0.4116763472557068, |
|
"learning_rate": 1.4803690862552755e-05, |
|
"loss": 0.5203, |
|
"step": 1148 |
|
}, |
|
{ |
|
"epoch": 1.8206896551724139, |
|
"grad_norm": 0.4018745422363281, |
|
"learning_rate": 1.4250208666766235e-05, |
|
"loss": 0.5181, |
|
"step": 1155 |
|
}, |
|
{ |
|
"epoch": 1.8317241379310345, |
|
"grad_norm": 0.3829161822795868, |
|
"learning_rate": 1.3705547197055584e-05, |
|
"loss": 0.5179, |
|
"step": 1162 |
|
}, |
|
{ |
|
"epoch": 1.8427586206896551, |
|
"grad_norm": 0.40862053632736206, |
|
"learning_rate": 1.3169840840488501e-05, |
|
"loss": 0.5214, |
|
"step": 1169 |
|
}, |
|
{ |
|
"epoch": 1.8537931034482757, |
|
"grad_norm": 0.43748798966407776, |
|
"learning_rate": 1.2643221774592518e-05, |
|
"loss": 0.5231, |
|
"step": 1176 |
|
}, |
|
{ |
|
"epoch": 1.8648275862068966, |
|
"grad_norm": 0.41041648387908936, |
|
"learning_rate": 1.2125819934742188e-05, |
|
"loss": 0.5161, |
|
"step": 1183 |
|
}, |
|
{ |
|
"epoch": 1.8758620689655172, |
|
"grad_norm": 0.3941383957862854, |
|
"learning_rate": 1.1617762982099446e-05, |
|
"loss": 0.5152, |
|
"step": 1190 |
|
}, |
|
{ |
|
"epoch": 1.886896551724138, |
|
"grad_norm": 0.422385036945343, |
|
"learning_rate": 1.1119176272115128e-05, |
|
"loss": 0.5124, |
|
"step": 1197 |
|
}, |
|
{ |
|
"epoch": 1.8979310344827587, |
|
"grad_norm": 0.4116532504558563, |
|
"learning_rate": 1.0630182823599399e-05, |
|
"loss": 0.5263, |
|
"step": 1204 |
|
}, |
|
{ |
|
"epoch": 1.9089655172413793, |
|
"grad_norm": 0.4027996361255646, |
|
"learning_rate": 1.0150903288368741e-05, |
|
"loss": 0.5172, |
|
"step": 1211 |
|
}, |
|
{ |
|
"epoch": 1.92, |
|
"grad_norm": 0.40005895495414734, |
|
"learning_rate": 9.681455921476839e-06, |
|
"loss": 0.5136, |
|
"step": 1218 |
|
}, |
|
{ |
|
"epoch": 1.9310344827586206, |
|
"grad_norm": 0.4146558940410614, |
|
"learning_rate": 9.221956552036992e-06, |
|
"loss": 0.531, |
|
"step": 1225 |
|
}, |
|
{ |
|
"epoch": 1.9420689655172414, |
|
"grad_norm": 0.4217384457588196, |
|
"learning_rate": 8.772518554642973e-06, |
|
"loss": 0.5333, |
|
"step": 1232 |
|
}, |
|
{ |
|
"epoch": 1.953103448275862, |
|
"grad_norm": 0.4167080819606781, |
|
"learning_rate": 8.333252821395526e-06, |
|
"loss": 0.5138, |
|
"step": 1239 |
|
}, |
|
{ |
|
"epoch": 1.9641379310344829, |
|
"grad_norm": 0.40761256217956543, |
|
"learning_rate": 7.904267734541498e-06, |
|
"loss": 0.5215, |
|
"step": 1246 |
|
}, |
|
{ |
|
"epoch": 1.9751724137931035, |
|
"grad_norm": 0.3934479057788849, |
|
"learning_rate": 7.485669139732004e-06, |
|
"loss": 0.5095, |
|
"step": 1253 |
|
}, |
|
{ |
|
"epoch": 1.986206896551724, |
|
"grad_norm": 0.4060850739479065, |
|
"learning_rate": 7.077560319906695e-06, |
|
"loss": 0.5129, |
|
"step": 1260 |
|
}, |
|
{ |
|
"epoch": 1.9972413793103447, |
|
"grad_norm": 0.39918309450149536, |
|
"learning_rate": 6.680041969810203e-06, |
|
"loss": 0.5018, |
|
"step": 1267 |
|
}, |
|
{ |
|
"epoch": 2.0082758620689654, |
|
"grad_norm": 0.39984405040740967, |
|
"learning_rate": 6.293212171147206e-06, |
|
"loss": 0.5029, |
|
"step": 1274 |
|
}, |
|
{ |
|
"epoch": 2.0193103448275864, |
|
"grad_norm": 0.409271776676178, |
|
"learning_rate": 5.917166368382277e-06, |
|
"loss": 0.4303, |
|
"step": 1281 |
|
}, |
|
{ |
|
"epoch": 2.030344827586207, |
|
"grad_norm": 0.4450385570526123, |
|
"learning_rate": 5.5519973451903405e-06, |
|
"loss": 0.4304, |
|
"step": 1288 |
|
}, |
|
{ |
|
"epoch": 2.0413793103448277, |
|
"grad_norm": 0.4327320456504822, |
|
"learning_rate": 5.197795201563743e-06, |
|
"loss": 0.4258, |
|
"step": 1295 |
|
}, |
|
{ |
|
"epoch": 2.0524137931034483, |
|
"grad_norm": 0.42840683460235596, |
|
"learning_rate": 4.8546473315813856e-06, |
|
"loss": 0.4298, |
|
"step": 1302 |
|
}, |
|
{ |
|
"epoch": 2.063448275862069, |
|
"grad_norm": 0.41301754117012024, |
|
"learning_rate": 4.522638401845547e-06, |
|
"loss": 0.4287, |
|
"step": 1309 |
|
}, |
|
{ |
|
"epoch": 2.0744827586206895, |
|
"grad_norm": 0.4132091701030731, |
|
"learning_rate": 4.2018503305916775e-06, |
|
"loss": 0.4211, |
|
"step": 1316 |
|
}, |
|
{ |
|
"epoch": 2.08551724137931, |
|
"grad_norm": 0.43061113357543945, |
|
"learning_rate": 3.892362267476313e-06, |
|
"loss": 0.429, |
|
"step": 1323 |
|
}, |
|
{ |
|
"epoch": 2.0965517241379312, |
|
"grad_norm": 0.41242024302482605, |
|
"learning_rate": 3.5942505740480582e-06, |
|
"loss": 0.4254, |
|
"step": 1330 |
|
}, |
|
{ |
|
"epoch": 2.107586206896552, |
|
"grad_norm": 0.4244064390659332, |
|
"learning_rate": 3.3075888049065196e-06, |
|
"loss": 0.4263, |
|
"step": 1337 |
|
}, |
|
{ |
|
"epoch": 2.1186206896551725, |
|
"grad_norm": 0.4177404046058655, |
|
"learning_rate": 3.03244768955383e-06, |
|
"loss": 0.431, |
|
"step": 1344 |
|
}, |
|
{ |
|
"epoch": 2.129655172413793, |
|
"grad_norm": 0.42242953181266785, |
|
"learning_rate": 2.7688951149431595e-06, |
|
"loss": 0.4312, |
|
"step": 1351 |
|
}, |
|
{ |
|
"epoch": 2.1406896551724137, |
|
"grad_norm": 0.43014487624168396, |
|
"learning_rate": 2.5169961087286974e-06, |
|
"loss": 0.4265, |
|
"step": 1358 |
|
}, |
|
{ |
|
"epoch": 2.1517241379310343, |
|
"grad_norm": 0.43356141448020935, |
|
"learning_rate": 2.276812823220964e-06, |
|
"loss": 0.4384, |
|
"step": 1365 |
|
}, |
|
{ |
|
"epoch": 2.162758620689655, |
|
"grad_norm": 0.4384445548057556, |
|
"learning_rate": 2.048404520051722e-06, |
|
"loss": 0.4187, |
|
"step": 1372 |
|
}, |
|
{ |
|
"epoch": 2.173793103448276, |
|
"grad_norm": 0.4111485183238983, |
|
"learning_rate": 1.8318275555520237e-06, |
|
"loss": 0.4313, |
|
"step": 1379 |
|
}, |
|
{ |
|
"epoch": 2.1848275862068967, |
|
"grad_norm": 0.4494364261627197, |
|
"learning_rate": 1.6271353668471655e-06, |
|
"loss": 0.4305, |
|
"step": 1386 |
|
}, |
|
{ |
|
"epoch": 2.1958620689655173, |
|
"grad_norm": 0.422455757856369, |
|
"learning_rate": 1.4343784586718311e-06, |
|
"loss": 0.4174, |
|
"step": 1393 |
|
}, |
|
{ |
|
"epoch": 2.206896551724138, |
|
"grad_norm": 0.4379701614379883, |
|
"learning_rate": 1.2536043909088191e-06, |
|
"loss": 0.4177, |
|
"step": 1400 |
|
}, |
|
{ |
|
"epoch": 2.2179310344827585, |
|
"grad_norm": 0.4343680143356323, |
|
"learning_rate": 1.0848577668543802e-06, |
|
"loss": 0.4176, |
|
"step": 1407 |
|
}, |
|
{ |
|
"epoch": 2.228965517241379, |
|
"grad_norm": 0.42176929116249084, |
|
"learning_rate": 9.281802222129765e-07, |
|
"loss": 0.417, |
|
"step": 1414 |
|
}, |
|
{ |
|
"epoch": 2.24, |
|
"grad_norm": 0.4197026491165161, |
|
"learning_rate": 7.836104148243484e-07, |
|
"loss": 0.4201, |
|
"step": 1421 |
|
}, |
|
{ |
|
"epoch": 2.251034482758621, |
|
"grad_norm": 0.42568454146385193, |
|
"learning_rate": 6.511840151252169e-07, |
|
"loss": 0.4164, |
|
"step": 1428 |
|
}, |
|
{ |
|
"epoch": 2.2620689655172415, |
|
"grad_norm": 0.4492688775062561, |
|
"learning_rate": 5.309336973481683e-07, |
|
"loss": 0.4289, |
|
"step": 1435 |
|
}, |
|
{ |
|
"epoch": 2.273103448275862, |
|
"grad_norm": 0.43771427869796753, |
|
"learning_rate": 4.228891314597694e-07, |
|
"loss": 0.4362, |
|
"step": 1442 |
|
}, |
|
{ |
|
"epoch": 2.2841379310344827, |
|
"grad_norm": 0.4285426139831543, |
|
"learning_rate": 3.2707697583995167e-07, |
|
"loss": 0.4161, |
|
"step": 1449 |
|
}, |
|
{ |
|
"epoch": 2.2951724137931033, |
|
"grad_norm": 0.433178573846817, |
|
"learning_rate": 2.4352087070443895e-07, |
|
"loss": 0.4252, |
|
"step": 1456 |
|
}, |
|
{ |
|
"epoch": 2.306206896551724, |
|
"grad_norm": 0.43515661358833313, |
|
"learning_rate": 1.7224143227190236e-07, |
|
"loss": 0.4155, |
|
"step": 1463 |
|
}, |
|
{ |
|
"epoch": 2.317241379310345, |
|
"grad_norm": 0.45461907982826233, |
|
"learning_rate": 1.132562476771959e-07, |
|
"loss": 0.4276, |
|
"step": 1470 |
|
}, |
|
{ |
|
"epoch": 2.3282758620689656, |
|
"grad_norm": 0.42468348145484924, |
|
"learning_rate": 6.657987063200533e-08, |
|
"loss": 0.4256, |
|
"step": 1477 |
|
}, |
|
{ |
|
"epoch": 2.3393103448275863, |
|
"grad_norm": 0.4367210865020752, |
|
"learning_rate": 3.2223817833931805e-08, |
|
"loss": 0.429, |
|
"step": 1484 |
|
}, |
|
{ |
|
"epoch": 2.350344827586207, |
|
"grad_norm": 0.43067917227745056, |
|
"learning_rate": 1.019656612492592e-08, |
|
"loss": 0.4242, |
|
"step": 1491 |
|
}, |
|
{ |
|
"epoch": 2.3613793103448275, |
|
"grad_norm": 0.44099918007850647, |
|
"learning_rate": 5.035503997385949e-10, |
|
"loss": 0.424, |
|
"step": 1498 |
|
} |
|
], |
|
"logging_steps": 7, |
|
"max_steps": 1500, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 3, |
|
"save_steps": 500, |
|
"stateful_callbacks": { |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": true |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 2.238031356493824e+18, |
|
"train_batch_size": 2, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|