|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 5.994876571960876, |
|
"eval_steps": 500, |
|
"global_step": 6438, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.004657661853749418, |
|
"grad_norm": 14.037870104148775, |
|
"learning_rate": 3.8819875776397516e-07, |
|
"loss": 2.6559, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.009315323707498836, |
|
"grad_norm": 14.018507695640166, |
|
"learning_rate": 7.763975155279503e-07, |
|
"loss": 2.6439, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.013972985561248253, |
|
"grad_norm": 13.74585203517481, |
|
"learning_rate": 1.1645962732919254e-06, |
|
"loss": 2.6109, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.018630647414997672, |
|
"grad_norm": 12.26602342821938, |
|
"learning_rate": 1.5527950310559006e-06, |
|
"loss": 2.541, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.02328830926874709, |
|
"grad_norm": 8.985034614318803, |
|
"learning_rate": 1.940993788819876e-06, |
|
"loss": 2.365, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.027945971122496506, |
|
"grad_norm": 8.518106704276775, |
|
"learning_rate": 2.329192546583851e-06, |
|
"loss": 2.2655, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.032603632976245925, |
|
"grad_norm": 7.692662664956911, |
|
"learning_rate": 2.7173913043478263e-06, |
|
"loss": 2.2177, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.037261294829995344, |
|
"grad_norm": 2.2695875679058966, |
|
"learning_rate": 3.1055900621118013e-06, |
|
"loss": 2.1003, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.04191895668374476, |
|
"grad_norm": 2.7959358163596217, |
|
"learning_rate": 3.4937888198757763e-06, |
|
"loss": 1.9482, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.04657661853749418, |
|
"grad_norm": 2.1750434470602715, |
|
"learning_rate": 3.881987577639752e-06, |
|
"loss": 1.9068, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.05123428039124359, |
|
"grad_norm": 1.1232980728931912, |
|
"learning_rate": 4.270186335403727e-06, |
|
"loss": 1.8378, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.05589194224499301, |
|
"grad_norm": 0.8825405994938238, |
|
"learning_rate": 4.658385093167702e-06, |
|
"loss": 1.7925, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.06054960409874243, |
|
"grad_norm": 0.8987047049592403, |
|
"learning_rate": 5.046583850931677e-06, |
|
"loss": 1.7318, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.06520726595249185, |
|
"grad_norm": 0.7265996503150173, |
|
"learning_rate": 5.4347826086956525e-06, |
|
"loss": 1.65, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.06986492780624126, |
|
"grad_norm": 0.6528101497449033, |
|
"learning_rate": 5.8229813664596275e-06, |
|
"loss": 1.6243, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.07452258965999069, |
|
"grad_norm": 0.7224211410313209, |
|
"learning_rate": 6.2111801242236025e-06, |
|
"loss": 1.6201, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.0791802515137401, |
|
"grad_norm": 0.5806690237524301, |
|
"learning_rate": 6.5993788819875775e-06, |
|
"loss": 1.5475, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.08383791336748952, |
|
"grad_norm": 0.597058825325402, |
|
"learning_rate": 6.9875776397515525e-06, |
|
"loss": 1.5384, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.08849557522123894, |
|
"grad_norm": 0.55670574011478, |
|
"learning_rate": 7.375776397515528e-06, |
|
"loss": 1.5362, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.09315323707498836, |
|
"grad_norm": 0.5393976696535505, |
|
"learning_rate": 7.763975155279503e-06, |
|
"loss": 1.4973, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.09781089892873777, |
|
"grad_norm": 0.5204227077686031, |
|
"learning_rate": 8.15217391304348e-06, |
|
"loss": 1.4845, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 0.10246856078248719, |
|
"grad_norm": 0.5563822350418148, |
|
"learning_rate": 8.540372670807453e-06, |
|
"loss": 1.4439, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.10712622263623661, |
|
"grad_norm": 0.5181030795462002, |
|
"learning_rate": 8.92857142857143e-06, |
|
"loss": 1.4305, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 0.11178388448998602, |
|
"grad_norm": 0.5731923823330213, |
|
"learning_rate": 9.316770186335403e-06, |
|
"loss": 1.439, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.11644154634373545, |
|
"grad_norm": 0.582681646921711, |
|
"learning_rate": 9.70496894409938e-06, |
|
"loss": 1.3811, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 0.12109920819748486, |
|
"grad_norm": 0.5474901747641734, |
|
"learning_rate": 1.0093167701863353e-05, |
|
"loss": 1.3784, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.1257568700512343, |
|
"grad_norm": 0.5932031228323433, |
|
"learning_rate": 1.048136645962733e-05, |
|
"loss": 1.3829, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 0.1304145319049837, |
|
"grad_norm": 0.5513551481294792, |
|
"learning_rate": 1.0869565217391305e-05, |
|
"loss": 1.3528, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.1350721937587331, |
|
"grad_norm": 0.5778299522858029, |
|
"learning_rate": 1.1257763975155281e-05, |
|
"loss": 1.3657, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 0.13972985561248252, |
|
"grad_norm": 0.6252635575293012, |
|
"learning_rate": 1.1645962732919255e-05, |
|
"loss": 1.3365, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.14438751746623196, |
|
"grad_norm": 0.6131491406232994, |
|
"learning_rate": 1.2034161490683231e-05, |
|
"loss": 1.2873, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 0.14904517931998137, |
|
"grad_norm": 0.6853390178591717, |
|
"learning_rate": 1.2422360248447205e-05, |
|
"loss": 1.2928, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.1537028411737308, |
|
"grad_norm": 0.6954345622753815, |
|
"learning_rate": 1.2810559006211181e-05, |
|
"loss": 1.2843, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 0.1583605030274802, |
|
"grad_norm": 0.7535183287534551, |
|
"learning_rate": 1.3198757763975155e-05, |
|
"loss": 1.2709, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.1630181648812296, |
|
"grad_norm": 0.6598685647547688, |
|
"learning_rate": 1.3586956521739131e-05, |
|
"loss": 1.2856, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 0.16767582673497905, |
|
"grad_norm": 0.6918935778616571, |
|
"learning_rate": 1.3975155279503105e-05, |
|
"loss": 1.2668, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.17233348858872846, |
|
"grad_norm": 0.6301889269605553, |
|
"learning_rate": 1.4363354037267083e-05, |
|
"loss": 1.2513, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 0.17699115044247787, |
|
"grad_norm": 0.8764303684748792, |
|
"learning_rate": 1.4751552795031057e-05, |
|
"loss": 1.2342, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.18164881229622729, |
|
"grad_norm": 0.8939970938191784, |
|
"learning_rate": 1.5139751552795031e-05, |
|
"loss": 1.2355, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 0.18630647414997673, |
|
"grad_norm": 0.6866794293645567, |
|
"learning_rate": 1.5527950310559007e-05, |
|
"loss": 1.1672, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.19096413600372614, |
|
"grad_norm": 0.649144682338704, |
|
"learning_rate": 1.5916149068322984e-05, |
|
"loss": 1.2104, |
|
"step": 205 |
|
}, |
|
{ |
|
"epoch": 0.19562179785747555, |
|
"grad_norm": 0.8218693043165641, |
|
"learning_rate": 1.630434782608696e-05, |
|
"loss": 1.1818, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.20027945971122496, |
|
"grad_norm": 0.7430490952040086, |
|
"learning_rate": 1.6692546583850933e-05, |
|
"loss": 1.1996, |
|
"step": 215 |
|
}, |
|
{ |
|
"epoch": 0.20493712156497437, |
|
"grad_norm": 0.8125678825371954, |
|
"learning_rate": 1.7080745341614907e-05, |
|
"loss": 1.1879, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.2095947834187238, |
|
"grad_norm": 0.676480033060189, |
|
"learning_rate": 1.7468944099378884e-05, |
|
"loss": 1.1874, |
|
"step": 225 |
|
}, |
|
{ |
|
"epoch": 0.21425244527247322, |
|
"grad_norm": 0.8515544361485938, |
|
"learning_rate": 1.785714285714286e-05, |
|
"loss": 1.1789, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 0.21891010712622264, |
|
"grad_norm": 0.6704690025578643, |
|
"learning_rate": 1.8245341614906833e-05, |
|
"loss": 1.1653, |
|
"step": 235 |
|
}, |
|
{ |
|
"epoch": 0.22356776897997205, |
|
"grad_norm": 0.8706610468125755, |
|
"learning_rate": 1.8633540372670807e-05, |
|
"loss": 1.1587, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.22822543083372146, |
|
"grad_norm": 0.5898112084526719, |
|
"learning_rate": 1.9021739130434784e-05, |
|
"loss": 1.133, |
|
"step": 245 |
|
}, |
|
{ |
|
"epoch": 0.2328830926874709, |
|
"grad_norm": 1.106503362987722, |
|
"learning_rate": 1.940993788819876e-05, |
|
"loss": 1.1191, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.2375407545412203, |
|
"grad_norm": 1.0286327080054072, |
|
"learning_rate": 1.9798136645962733e-05, |
|
"loss": 1.1433, |
|
"step": 255 |
|
}, |
|
{ |
|
"epoch": 0.24219841639496972, |
|
"grad_norm": 0.8407919099301676, |
|
"learning_rate": 2.0186335403726707e-05, |
|
"loss": 1.1506, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 0.24685607824871914, |
|
"grad_norm": 0.8654728309548365, |
|
"learning_rate": 2.0574534161490684e-05, |
|
"loss": 1.1308, |
|
"step": 265 |
|
}, |
|
{ |
|
"epoch": 0.2515137401024686, |
|
"grad_norm": 0.7695850121818572, |
|
"learning_rate": 2.096273291925466e-05, |
|
"loss": 1.1041, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 0.25617140195621796, |
|
"grad_norm": 0.7910585274047307, |
|
"learning_rate": 2.1350931677018636e-05, |
|
"loss": 1.1376, |
|
"step": 275 |
|
}, |
|
{ |
|
"epoch": 0.2608290638099674, |
|
"grad_norm": 0.7458734894767046, |
|
"learning_rate": 2.173913043478261e-05, |
|
"loss": 1.1132, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 0.26548672566371684, |
|
"grad_norm": 0.8187339847365924, |
|
"learning_rate": 2.2127329192546584e-05, |
|
"loss": 1.1015, |
|
"step": 285 |
|
}, |
|
{ |
|
"epoch": 0.2701443875174662, |
|
"grad_norm": 0.763447142888627, |
|
"learning_rate": 2.2515527950310562e-05, |
|
"loss": 1.096, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 0.27480204937121566, |
|
"grad_norm": 0.7934188486978877, |
|
"learning_rate": 2.2903726708074536e-05, |
|
"loss": 1.0807, |
|
"step": 295 |
|
}, |
|
{ |
|
"epoch": 0.27945971122496505, |
|
"grad_norm": 0.788453952770843, |
|
"learning_rate": 2.329192546583851e-05, |
|
"loss": 1.1043, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.2841173730787145, |
|
"grad_norm": 0.7047893933708144, |
|
"learning_rate": 2.3680124223602484e-05, |
|
"loss": 1.1136, |
|
"step": 305 |
|
}, |
|
{ |
|
"epoch": 0.2887750349324639, |
|
"grad_norm": 0.795257449558012, |
|
"learning_rate": 2.4068322981366462e-05, |
|
"loss": 1.0977, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 0.2934326967862133, |
|
"grad_norm": 0.6915051544535833, |
|
"learning_rate": 2.4456521739130436e-05, |
|
"loss": 1.0722, |
|
"step": 315 |
|
}, |
|
{ |
|
"epoch": 0.29809035863996275, |
|
"grad_norm": 0.7265299997807345, |
|
"learning_rate": 2.484472049689441e-05, |
|
"loss": 1.0911, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 0.30274802049371213, |
|
"grad_norm": 0.7601977613919204, |
|
"learning_rate": 2.5232919254658388e-05, |
|
"loss": 1.0928, |
|
"step": 325 |
|
}, |
|
{ |
|
"epoch": 0.3074056823474616, |
|
"grad_norm": 0.8099690160950118, |
|
"learning_rate": 2.5621118012422362e-05, |
|
"loss": 1.0879, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 0.312063344201211, |
|
"grad_norm": 0.6886893057959771, |
|
"learning_rate": 2.6009316770186336e-05, |
|
"loss": 1.0453, |
|
"step": 335 |
|
}, |
|
{ |
|
"epoch": 0.3167210060549604, |
|
"grad_norm": 0.7367664406007456, |
|
"learning_rate": 2.639751552795031e-05, |
|
"loss": 1.0748, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 0.32137866790870984, |
|
"grad_norm": 0.6783866452713414, |
|
"learning_rate": 2.6785714285714288e-05, |
|
"loss": 1.0507, |
|
"step": 345 |
|
}, |
|
{ |
|
"epoch": 0.3260363297624592, |
|
"grad_norm": 0.9161119691045607, |
|
"learning_rate": 2.7173913043478262e-05, |
|
"loss": 1.054, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 0.33069399161620866, |
|
"grad_norm": 0.7644762711765951, |
|
"learning_rate": 2.7562111801242236e-05, |
|
"loss": 1.0447, |
|
"step": 355 |
|
}, |
|
{ |
|
"epoch": 0.3353516534699581, |
|
"grad_norm": 0.7491094719090783, |
|
"learning_rate": 2.795031055900621e-05, |
|
"loss": 1.0648, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 0.3400093153237075, |
|
"grad_norm": 0.7464676647377744, |
|
"learning_rate": 2.833850931677019e-05, |
|
"loss": 1.0619, |
|
"step": 365 |
|
}, |
|
{ |
|
"epoch": 0.3446669771774569, |
|
"grad_norm": 0.8302793649617403, |
|
"learning_rate": 2.8726708074534165e-05, |
|
"loss": 1.0842, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 0.3493246390312063, |
|
"grad_norm": 0.7553615087579988, |
|
"learning_rate": 2.911490683229814e-05, |
|
"loss": 1.0676, |
|
"step": 375 |
|
}, |
|
{ |
|
"epoch": 0.35398230088495575, |
|
"grad_norm": 0.7122417970022175, |
|
"learning_rate": 2.9503105590062114e-05, |
|
"loss": 1.04, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 0.3586399627387052, |
|
"grad_norm": 0.7963778926214264, |
|
"learning_rate": 2.9891304347826088e-05, |
|
"loss": 1.0258, |
|
"step": 385 |
|
}, |
|
{ |
|
"epoch": 0.36329762459245457, |
|
"grad_norm": 0.8039906902669227, |
|
"learning_rate": 3.0279503105590062e-05, |
|
"loss": 1.0318, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 0.367955286446204, |
|
"grad_norm": 0.828586879177044, |
|
"learning_rate": 3.066770186335404e-05, |
|
"loss": 1.0176, |
|
"step": 395 |
|
}, |
|
{ |
|
"epoch": 0.37261294829995345, |
|
"grad_norm": 0.7741728323429156, |
|
"learning_rate": 3.1055900621118014e-05, |
|
"loss": 1.0147, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 0.37727061015370283, |
|
"grad_norm": 0.7854840295592591, |
|
"learning_rate": 3.1444099378881995e-05, |
|
"loss": 1.0193, |
|
"step": 405 |
|
}, |
|
{ |
|
"epoch": 0.3819282720074523, |
|
"grad_norm": 0.7952699696224765, |
|
"learning_rate": 3.183229813664597e-05, |
|
"loss": 1.0299, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 0.38658593386120166, |
|
"grad_norm": 0.8433374305657526, |
|
"learning_rate": 3.222049689440994e-05, |
|
"loss": 1.0284, |
|
"step": 415 |
|
}, |
|
{ |
|
"epoch": 0.3912435957149511, |
|
"grad_norm": 0.8256286197983435, |
|
"learning_rate": 3.260869565217392e-05, |
|
"loss": 1.0079, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 0.39590125756870054, |
|
"grad_norm": 0.7671147431831079, |
|
"learning_rate": 3.299689440993789e-05, |
|
"loss": 1.037, |
|
"step": 425 |
|
}, |
|
{ |
|
"epoch": 0.4005589194224499, |
|
"grad_norm": 0.6726151565927365, |
|
"learning_rate": 3.3385093167701865e-05, |
|
"loss": 1.0252, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 0.40521658127619936, |
|
"grad_norm": 0.912517511617104, |
|
"learning_rate": 3.377329192546584e-05, |
|
"loss": 1.0278, |
|
"step": 435 |
|
}, |
|
{ |
|
"epoch": 0.40987424312994875, |
|
"grad_norm": 0.8685516813400533, |
|
"learning_rate": 3.4161490683229814e-05, |
|
"loss": 1.0481, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 0.4145319049836982, |
|
"grad_norm": 0.7671909964824053, |
|
"learning_rate": 3.4549689440993795e-05, |
|
"loss": 0.9964, |
|
"step": 445 |
|
}, |
|
{ |
|
"epoch": 0.4191895668374476, |
|
"grad_norm": 1.0135652385996252, |
|
"learning_rate": 3.493788819875777e-05, |
|
"loss": 1.0028, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 0.423847228691197, |
|
"grad_norm": 0.899973975312835, |
|
"learning_rate": 3.532608695652174e-05, |
|
"loss": 1.0448, |
|
"step": 455 |
|
}, |
|
{ |
|
"epoch": 0.42850489054494645, |
|
"grad_norm": 0.9156532713655958, |
|
"learning_rate": 3.571428571428572e-05, |
|
"loss": 1.0024, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 0.43316255239869583, |
|
"grad_norm": 1.1365454340055587, |
|
"learning_rate": 3.610248447204969e-05, |
|
"loss": 1.0213, |
|
"step": 465 |
|
}, |
|
{ |
|
"epoch": 0.43782021425244527, |
|
"grad_norm": 0.9918128476101238, |
|
"learning_rate": 3.6490683229813665e-05, |
|
"loss": 1.002, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 0.4424778761061947, |
|
"grad_norm": 0.9151718575356949, |
|
"learning_rate": 3.687888198757764e-05, |
|
"loss": 1.0194, |
|
"step": 475 |
|
}, |
|
{ |
|
"epoch": 0.4471355379599441, |
|
"grad_norm": 0.742255103116907, |
|
"learning_rate": 3.7267080745341614e-05, |
|
"loss": 0.9891, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 0.45179319981369354, |
|
"grad_norm": 0.8586376934224837, |
|
"learning_rate": 3.765527950310559e-05, |
|
"loss": 0.9955, |
|
"step": 485 |
|
}, |
|
{ |
|
"epoch": 0.4564508616674429, |
|
"grad_norm": 0.9103531228057, |
|
"learning_rate": 3.804347826086957e-05, |
|
"loss": 1.0018, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 0.46110852352119236, |
|
"grad_norm": 0.7448983584328042, |
|
"learning_rate": 3.843167701863354e-05, |
|
"loss": 0.9823, |
|
"step": 495 |
|
}, |
|
{ |
|
"epoch": 0.4657661853749418, |
|
"grad_norm": 0.7349324323567711, |
|
"learning_rate": 3.881987577639752e-05, |
|
"loss": 0.9967, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.4704238472286912, |
|
"grad_norm": 0.695545445845715, |
|
"learning_rate": 3.920807453416149e-05, |
|
"loss": 0.983, |
|
"step": 505 |
|
}, |
|
{ |
|
"epoch": 0.4750815090824406, |
|
"grad_norm": 0.9785822896697502, |
|
"learning_rate": 3.9596273291925465e-05, |
|
"loss": 0.9726, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 0.47973917093619, |
|
"grad_norm": 1.0555748680465804, |
|
"learning_rate": 3.998447204968944e-05, |
|
"loss": 0.9996, |
|
"step": 515 |
|
}, |
|
{ |
|
"epoch": 0.48439683278993945, |
|
"grad_norm": 0.966587096450481, |
|
"learning_rate": 4.0372670807453414e-05, |
|
"loss": 0.9898, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 0.4890544946436889, |
|
"grad_norm": 0.910161503630037, |
|
"learning_rate": 4.076086956521739e-05, |
|
"loss": 1.0064, |
|
"step": 525 |
|
}, |
|
{ |
|
"epoch": 0.49371215649743827, |
|
"grad_norm": 0.9152897977721635, |
|
"learning_rate": 4.114906832298137e-05, |
|
"loss": 0.9759, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 0.4983698183511877, |
|
"grad_norm": 0.8668603504226737, |
|
"learning_rate": 4.153726708074534e-05, |
|
"loss": 0.9891, |
|
"step": 535 |
|
}, |
|
{ |
|
"epoch": 0.5030274802049371, |
|
"grad_norm": 0.8468939833128593, |
|
"learning_rate": 4.192546583850932e-05, |
|
"loss": 0.9987, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 0.5076851420586865, |
|
"grad_norm": 0.731218604929643, |
|
"learning_rate": 4.23136645962733e-05, |
|
"loss": 0.9713, |
|
"step": 545 |
|
}, |
|
{ |
|
"epoch": 0.5123428039124359, |
|
"grad_norm": 0.9843894198247464, |
|
"learning_rate": 4.270186335403727e-05, |
|
"loss": 1.0025, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 0.5170004657661854, |
|
"grad_norm": 0.7631176615609254, |
|
"learning_rate": 4.3090062111801246e-05, |
|
"loss": 0.9754, |
|
"step": 555 |
|
}, |
|
{ |
|
"epoch": 0.5216581276199348, |
|
"grad_norm": 0.9494172654241818, |
|
"learning_rate": 4.347826086956522e-05, |
|
"loss": 0.9847, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 0.5263157894736842, |
|
"grad_norm": 0.758600818741894, |
|
"learning_rate": 4.3866459627329195e-05, |
|
"loss": 0.981, |
|
"step": 565 |
|
}, |
|
{ |
|
"epoch": 0.5309734513274337, |
|
"grad_norm": 0.6523938368399681, |
|
"learning_rate": 4.425465838509317e-05, |
|
"loss": 0.9766, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 0.5356311131811831, |
|
"grad_norm": 0.8196799702305156, |
|
"learning_rate": 4.464285714285715e-05, |
|
"loss": 0.9726, |
|
"step": 575 |
|
}, |
|
{ |
|
"epoch": 0.5402887750349324, |
|
"grad_norm": 0.7875301746371101, |
|
"learning_rate": 4.5031055900621124e-05, |
|
"loss": 0.9716, |
|
"step": 580 |
|
}, |
|
{ |
|
"epoch": 0.5449464368886818, |
|
"grad_norm": 0.9366890391534561, |
|
"learning_rate": 4.54192546583851e-05, |
|
"loss": 0.9639, |
|
"step": 585 |
|
}, |
|
{ |
|
"epoch": 0.5496040987424313, |
|
"grad_norm": 0.8159290575760685, |
|
"learning_rate": 4.580745341614907e-05, |
|
"loss": 0.9959, |
|
"step": 590 |
|
}, |
|
{ |
|
"epoch": 0.5542617605961807, |
|
"grad_norm": 0.7904038693901743, |
|
"learning_rate": 4.6195652173913046e-05, |
|
"loss": 0.9777, |
|
"step": 595 |
|
}, |
|
{ |
|
"epoch": 0.5589194224499301, |
|
"grad_norm": 0.8820141458513885, |
|
"learning_rate": 4.658385093167702e-05, |
|
"loss": 0.987, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 0.5635770843036796, |
|
"grad_norm": 0.6581899070095203, |
|
"learning_rate": 4.6972049689440995e-05, |
|
"loss": 0.9313, |
|
"step": 605 |
|
}, |
|
{ |
|
"epoch": 0.568234746157429, |
|
"grad_norm": 0.6921366906956813, |
|
"learning_rate": 4.736024844720497e-05, |
|
"loss": 0.9711, |
|
"step": 610 |
|
}, |
|
{ |
|
"epoch": 0.5728924080111784, |
|
"grad_norm": 1.0400083726029987, |
|
"learning_rate": 4.774844720496895e-05, |
|
"loss": 0.9567, |
|
"step": 615 |
|
}, |
|
{ |
|
"epoch": 0.5775500698649279, |
|
"grad_norm": 1.0259646153662887, |
|
"learning_rate": 4.8136645962732924e-05, |
|
"loss": 0.9636, |
|
"step": 620 |
|
}, |
|
{ |
|
"epoch": 0.5822077317186772, |
|
"grad_norm": 0.8898990676451768, |
|
"learning_rate": 4.85248447204969e-05, |
|
"loss": 0.9842, |
|
"step": 625 |
|
}, |
|
{ |
|
"epoch": 0.5868653935724266, |
|
"grad_norm": 0.9603886153911195, |
|
"learning_rate": 4.891304347826087e-05, |
|
"loss": 0.9552, |
|
"step": 630 |
|
}, |
|
{ |
|
"epoch": 0.5915230554261761, |
|
"grad_norm": 0.7643568136993506, |
|
"learning_rate": 4.9301242236024846e-05, |
|
"loss": 0.951, |
|
"step": 635 |
|
}, |
|
{ |
|
"epoch": 0.5961807172799255, |
|
"grad_norm": 0.7613193066594093, |
|
"learning_rate": 4.968944099378882e-05, |
|
"loss": 0.9648, |
|
"step": 640 |
|
}, |
|
{ |
|
"epoch": 0.6008383791336749, |
|
"grad_norm": 0.8024613228030518, |
|
"learning_rate": 4.999137038315499e-05, |
|
"loss": 0.963, |
|
"step": 645 |
|
}, |
|
{ |
|
"epoch": 0.6054960409874243, |
|
"grad_norm": 0.8661787138644403, |
|
"learning_rate": 4.994822229892993e-05, |
|
"loss": 0.9544, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 0.6101537028411738, |
|
"grad_norm": 0.9059824800026692, |
|
"learning_rate": 4.9905074214704875e-05, |
|
"loss": 0.9679, |
|
"step": 655 |
|
}, |
|
{ |
|
"epoch": 0.6148113646949231, |
|
"grad_norm": 0.8439746120342664, |
|
"learning_rate": 4.986192613047981e-05, |
|
"loss": 0.9739, |
|
"step": 660 |
|
}, |
|
{ |
|
"epoch": 0.6194690265486725, |
|
"grad_norm": 1.0683925183560257, |
|
"learning_rate": 4.9818778046254746e-05, |
|
"loss": 0.9585, |
|
"step": 665 |
|
}, |
|
{ |
|
"epoch": 0.624126688402422, |
|
"grad_norm": 0.6613910069447578, |
|
"learning_rate": 4.977562996202969e-05, |
|
"loss": 0.9645, |
|
"step": 670 |
|
}, |
|
{ |
|
"epoch": 0.6287843502561714, |
|
"grad_norm": 0.7575403562844513, |
|
"learning_rate": 4.9732481877804624e-05, |
|
"loss": 0.9441, |
|
"step": 675 |
|
}, |
|
{ |
|
"epoch": 0.6334420121099208, |
|
"grad_norm": 0.7124161020635701, |
|
"learning_rate": 4.968933379357957e-05, |
|
"loss": 0.9522, |
|
"step": 680 |
|
}, |
|
{ |
|
"epoch": 0.6380996739636703, |
|
"grad_norm": 0.7797416220084484, |
|
"learning_rate": 4.964618570935451e-05, |
|
"loss": 0.9767, |
|
"step": 685 |
|
}, |
|
{ |
|
"epoch": 0.6427573358174197, |
|
"grad_norm": 0.7611566850882846, |
|
"learning_rate": 4.9603037625129445e-05, |
|
"loss": 0.9508, |
|
"step": 690 |
|
}, |
|
{ |
|
"epoch": 0.6474149976711691, |
|
"grad_norm": 0.6799999335018179, |
|
"learning_rate": 4.955988954090439e-05, |
|
"loss": 0.9512, |
|
"step": 695 |
|
}, |
|
{ |
|
"epoch": 0.6520726595249184, |
|
"grad_norm": 0.8631080619144671, |
|
"learning_rate": 4.951674145667933e-05, |
|
"loss": 0.9445, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 0.6567303213786679, |
|
"grad_norm": 0.7799699034483604, |
|
"learning_rate": 4.9473593372454266e-05, |
|
"loss": 0.9364, |
|
"step": 705 |
|
}, |
|
{ |
|
"epoch": 0.6613879832324173, |
|
"grad_norm": 0.8550493933043184, |
|
"learning_rate": 4.94304452882292e-05, |
|
"loss": 0.9609, |
|
"step": 710 |
|
}, |
|
{ |
|
"epoch": 0.6660456450861667, |
|
"grad_norm": 0.7299471562792063, |
|
"learning_rate": 4.9387297204004144e-05, |
|
"loss": 0.9489, |
|
"step": 715 |
|
}, |
|
{ |
|
"epoch": 0.6707033069399162, |
|
"grad_norm": 0.8935263071636578, |
|
"learning_rate": 4.934414911977908e-05, |
|
"loss": 0.9283, |
|
"step": 720 |
|
}, |
|
{ |
|
"epoch": 0.6753609687936656, |
|
"grad_norm": 0.6888098427802909, |
|
"learning_rate": 4.930100103555402e-05, |
|
"loss": 0.939, |
|
"step": 725 |
|
}, |
|
{ |
|
"epoch": 0.680018630647415, |
|
"grad_norm": 0.7995262285128696, |
|
"learning_rate": 4.9257852951328965e-05, |
|
"loss": 0.9539, |
|
"step": 730 |
|
}, |
|
{ |
|
"epoch": 0.6846762925011645, |
|
"grad_norm": 0.7947129512554866, |
|
"learning_rate": 4.921470486710391e-05, |
|
"loss": 0.9293, |
|
"step": 735 |
|
}, |
|
{ |
|
"epoch": 0.6893339543549138, |
|
"grad_norm": 0.779585099199785, |
|
"learning_rate": 4.917155678287884e-05, |
|
"loss": 0.9405, |
|
"step": 740 |
|
}, |
|
{ |
|
"epoch": 0.6939916162086632, |
|
"grad_norm": 0.8303446372378965, |
|
"learning_rate": 4.9128408698653785e-05, |
|
"loss": 0.9314, |
|
"step": 745 |
|
}, |
|
{ |
|
"epoch": 0.6986492780624126, |
|
"grad_norm": 1.139024040895227, |
|
"learning_rate": 4.908526061442872e-05, |
|
"loss": 0.9389, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 0.7033069399161621, |
|
"grad_norm": 0.7605700848959173, |
|
"learning_rate": 4.904211253020366e-05, |
|
"loss": 0.9178, |
|
"step": 755 |
|
}, |
|
{ |
|
"epoch": 0.7079646017699115, |
|
"grad_norm": 0.688563607175027, |
|
"learning_rate": 4.89989644459786e-05, |
|
"loss": 0.9215, |
|
"step": 760 |
|
}, |
|
{ |
|
"epoch": 0.7126222636236609, |
|
"grad_norm": 0.6181695501552628, |
|
"learning_rate": 4.895581636175354e-05, |
|
"loss": 0.9419, |
|
"step": 765 |
|
}, |
|
{ |
|
"epoch": 0.7172799254774104, |
|
"grad_norm": 0.6622209131129708, |
|
"learning_rate": 4.891266827752848e-05, |
|
"loss": 0.9408, |
|
"step": 770 |
|
}, |
|
{ |
|
"epoch": 0.7219375873311598, |
|
"grad_norm": 0.6582361407570896, |
|
"learning_rate": 4.886952019330342e-05, |
|
"loss": 0.915, |
|
"step": 775 |
|
}, |
|
{ |
|
"epoch": 0.7265952491849091, |
|
"grad_norm": 0.6803939066291419, |
|
"learning_rate": 4.882637210907836e-05, |
|
"loss": 0.9487, |
|
"step": 780 |
|
}, |
|
{ |
|
"epoch": 0.7312529110386586, |
|
"grad_norm": 0.6720553599662832, |
|
"learning_rate": 4.87832240248533e-05, |
|
"loss": 0.9369, |
|
"step": 785 |
|
}, |
|
{ |
|
"epoch": 0.735910572892408, |
|
"grad_norm": 0.7316521366862686, |
|
"learning_rate": 4.874007594062824e-05, |
|
"loss": 0.9198, |
|
"step": 790 |
|
}, |
|
{ |
|
"epoch": 0.7405682347461574, |
|
"grad_norm": 0.5950973796307311, |
|
"learning_rate": 4.8696927856403176e-05, |
|
"loss": 0.9336, |
|
"step": 795 |
|
}, |
|
{ |
|
"epoch": 0.7452258965999069, |
|
"grad_norm": 0.733870802001813, |
|
"learning_rate": 4.865377977217811e-05, |
|
"loss": 0.9235, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 0.7498835584536563, |
|
"grad_norm": 0.6129379811403421, |
|
"learning_rate": 4.8610631687953055e-05, |
|
"loss": 0.9569, |
|
"step": 805 |
|
}, |
|
{ |
|
"epoch": 0.7545412203074057, |
|
"grad_norm": 0.591475313672276, |
|
"learning_rate": 4.8567483603728e-05, |
|
"loss": 0.9244, |
|
"step": 810 |
|
}, |
|
{ |
|
"epoch": 0.759198882161155, |
|
"grad_norm": 0.6044956006930611, |
|
"learning_rate": 4.852433551950293e-05, |
|
"loss": 0.9314, |
|
"step": 815 |
|
}, |
|
{ |
|
"epoch": 0.7638565440149045, |
|
"grad_norm": 0.7664178521354441, |
|
"learning_rate": 4.8481187435277875e-05, |
|
"loss": 0.9293, |
|
"step": 820 |
|
}, |
|
{ |
|
"epoch": 0.7685142058686539, |
|
"grad_norm": 0.7107416917500197, |
|
"learning_rate": 4.843803935105282e-05, |
|
"loss": 0.9055, |
|
"step": 825 |
|
}, |
|
{ |
|
"epoch": 0.7731718677224033, |
|
"grad_norm": 0.7618528783533417, |
|
"learning_rate": 4.839489126682776e-05, |
|
"loss": 0.9209, |
|
"step": 830 |
|
}, |
|
{ |
|
"epoch": 0.7778295295761528, |
|
"grad_norm": 0.7258800949018822, |
|
"learning_rate": 4.8351743182602696e-05, |
|
"loss": 0.9345, |
|
"step": 835 |
|
}, |
|
{ |
|
"epoch": 0.7824871914299022, |
|
"grad_norm": 0.6783953124506613, |
|
"learning_rate": 4.830859509837763e-05, |
|
"loss": 0.904, |
|
"step": 840 |
|
}, |
|
{ |
|
"epoch": 0.7871448532836516, |
|
"grad_norm": 0.6836122062349896, |
|
"learning_rate": 4.8265447014152574e-05, |
|
"loss": 0.918, |
|
"step": 845 |
|
}, |
|
{ |
|
"epoch": 0.7918025151374011, |
|
"grad_norm": 0.6507985445046874, |
|
"learning_rate": 4.822229892992751e-05, |
|
"loss": 0.9203, |
|
"step": 850 |
|
}, |
|
{ |
|
"epoch": 0.7964601769911505, |
|
"grad_norm": 0.6643585008530555, |
|
"learning_rate": 4.817915084570245e-05, |
|
"loss": 0.9002, |
|
"step": 855 |
|
}, |
|
{ |
|
"epoch": 0.8011178388448998, |
|
"grad_norm": 0.6054726374838411, |
|
"learning_rate": 4.8136002761477395e-05, |
|
"loss": 0.9094, |
|
"step": 860 |
|
}, |
|
{ |
|
"epoch": 0.8057755006986492, |
|
"grad_norm": 0.650341215526472, |
|
"learning_rate": 4.809285467725233e-05, |
|
"loss": 0.9123, |
|
"step": 865 |
|
}, |
|
{ |
|
"epoch": 0.8104331625523987, |
|
"grad_norm": 0.6506499441912863, |
|
"learning_rate": 4.804970659302727e-05, |
|
"loss": 0.8989, |
|
"step": 870 |
|
}, |
|
{ |
|
"epoch": 0.8150908244061481, |
|
"grad_norm": 0.8580048917270087, |
|
"learning_rate": 4.8006558508802216e-05, |
|
"loss": 0.9226, |
|
"step": 875 |
|
}, |
|
{ |
|
"epoch": 0.8197484862598975, |
|
"grad_norm": 0.6055892662420432, |
|
"learning_rate": 4.796341042457715e-05, |
|
"loss": 0.8859, |
|
"step": 880 |
|
}, |
|
{ |
|
"epoch": 0.824406148113647, |
|
"grad_norm": 0.7139422196799956, |
|
"learning_rate": 4.792026234035209e-05, |
|
"loss": 0.9232, |
|
"step": 885 |
|
}, |
|
{ |
|
"epoch": 0.8290638099673964, |
|
"grad_norm": 0.6089661735907969, |
|
"learning_rate": 4.787711425612703e-05, |
|
"loss": 0.919, |
|
"step": 890 |
|
}, |
|
{ |
|
"epoch": 0.8337214718211458, |
|
"grad_norm": 0.580454247375901, |
|
"learning_rate": 4.7833966171901966e-05, |
|
"loss": 0.9171, |
|
"step": 895 |
|
}, |
|
{ |
|
"epoch": 0.8383791336748952, |
|
"grad_norm": 0.5849797012971689, |
|
"learning_rate": 4.779081808767691e-05, |
|
"loss": 0.9055, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 0.8430367955286446, |
|
"grad_norm": 0.6008467270294285, |
|
"learning_rate": 4.774767000345185e-05, |
|
"loss": 0.914, |
|
"step": 905 |
|
}, |
|
{ |
|
"epoch": 0.847694457382394, |
|
"grad_norm": 0.5537296928970568, |
|
"learning_rate": 4.770452191922679e-05, |
|
"loss": 0.9286, |
|
"step": 910 |
|
}, |
|
{ |
|
"epoch": 0.8523521192361434, |
|
"grad_norm": 0.6492423617592114, |
|
"learning_rate": 4.766137383500173e-05, |
|
"loss": 0.9057, |
|
"step": 915 |
|
}, |
|
{ |
|
"epoch": 0.8570097810898929, |
|
"grad_norm": 0.62560716037305, |
|
"learning_rate": 4.761822575077667e-05, |
|
"loss": 0.9245, |
|
"step": 920 |
|
}, |
|
{ |
|
"epoch": 0.8616674429436423, |
|
"grad_norm": 0.7082200597011241, |
|
"learning_rate": 4.757507766655161e-05, |
|
"loss": 0.9103, |
|
"step": 925 |
|
}, |
|
{ |
|
"epoch": 0.8663251047973917, |
|
"grad_norm": 0.6957809388430001, |
|
"learning_rate": 4.753192958232654e-05, |
|
"loss": 0.9158, |
|
"step": 930 |
|
}, |
|
{ |
|
"epoch": 0.8709827666511412, |
|
"grad_norm": 0.5595395190034976, |
|
"learning_rate": 4.7488781498101485e-05, |
|
"loss": 0.8892, |
|
"step": 935 |
|
}, |
|
{ |
|
"epoch": 0.8756404285048905, |
|
"grad_norm": 0.7398962569929148, |
|
"learning_rate": 4.744563341387643e-05, |
|
"loss": 0.8897, |
|
"step": 940 |
|
}, |
|
{ |
|
"epoch": 0.8802980903586399, |
|
"grad_norm": 0.6298362220627544, |
|
"learning_rate": 4.7402485329651363e-05, |
|
"loss": 0.8979, |
|
"step": 945 |
|
}, |
|
{ |
|
"epoch": 0.8849557522123894, |
|
"grad_norm": 0.6392904649774634, |
|
"learning_rate": 4.7359337245426306e-05, |
|
"loss": 0.8749, |
|
"step": 950 |
|
}, |
|
{ |
|
"epoch": 0.8896134140661388, |
|
"grad_norm": 0.5910452010752443, |
|
"learning_rate": 4.731618916120125e-05, |
|
"loss": 0.9037, |
|
"step": 955 |
|
}, |
|
{ |
|
"epoch": 0.8942710759198882, |
|
"grad_norm": 0.5724046276347944, |
|
"learning_rate": 4.7273041076976184e-05, |
|
"loss": 0.9139, |
|
"step": 960 |
|
}, |
|
{ |
|
"epoch": 0.8989287377736377, |
|
"grad_norm": 0.6169462890930449, |
|
"learning_rate": 4.722989299275113e-05, |
|
"loss": 0.903, |
|
"step": 965 |
|
}, |
|
{ |
|
"epoch": 0.9035863996273871, |
|
"grad_norm": 0.5556544386361997, |
|
"learning_rate": 4.718674490852606e-05, |
|
"loss": 0.9093, |
|
"step": 970 |
|
}, |
|
{ |
|
"epoch": 0.9082440614811365, |
|
"grad_norm": 0.6590333956524478, |
|
"learning_rate": 4.7143596824301e-05, |
|
"loss": 0.8983, |
|
"step": 975 |
|
}, |
|
{ |
|
"epoch": 0.9129017233348858, |
|
"grad_norm": 0.748488058996119, |
|
"learning_rate": 4.710044874007594e-05, |
|
"loss": 0.9043, |
|
"step": 980 |
|
}, |
|
{ |
|
"epoch": 0.9175593851886353, |
|
"grad_norm": 0.606400629600308, |
|
"learning_rate": 4.705730065585088e-05, |
|
"loss": 0.9128, |
|
"step": 985 |
|
}, |
|
{ |
|
"epoch": 0.9222170470423847, |
|
"grad_norm": 0.5852937118094048, |
|
"learning_rate": 4.7014152571625826e-05, |
|
"loss": 0.8554, |
|
"step": 990 |
|
}, |
|
{ |
|
"epoch": 0.9268747088961341, |
|
"grad_norm": 0.5476277605999371, |
|
"learning_rate": 4.697100448740076e-05, |
|
"loss": 0.8878, |
|
"step": 995 |
|
}, |
|
{ |
|
"epoch": 0.9315323707498836, |
|
"grad_norm": 0.6330430492588804, |
|
"learning_rate": 4.6927856403175704e-05, |
|
"loss": 0.8924, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 0.936190032603633, |
|
"grad_norm": 0.7023864398254552, |
|
"learning_rate": 4.6884708318950646e-05, |
|
"loss": 0.9133, |
|
"step": 1005 |
|
}, |
|
{ |
|
"epoch": 0.9408476944573824, |
|
"grad_norm": 0.5921577951396615, |
|
"learning_rate": 4.684156023472558e-05, |
|
"loss": 0.8913, |
|
"step": 1010 |
|
}, |
|
{ |
|
"epoch": 0.9455053563111319, |
|
"grad_norm": 0.6151573442083585, |
|
"learning_rate": 4.679841215050052e-05, |
|
"loss": 0.8973, |
|
"step": 1015 |
|
}, |
|
{ |
|
"epoch": 0.9501630181648812, |
|
"grad_norm": 0.5967475819345346, |
|
"learning_rate": 4.675526406627546e-05, |
|
"loss": 0.9138, |
|
"step": 1020 |
|
}, |
|
{ |
|
"epoch": 0.9548206800186306, |
|
"grad_norm": 0.6013234118124504, |
|
"learning_rate": 4.6712115982050396e-05, |
|
"loss": 0.8966, |
|
"step": 1025 |
|
}, |
|
{ |
|
"epoch": 0.95947834187238, |
|
"grad_norm": 0.6608553067855343, |
|
"learning_rate": 4.666896789782534e-05, |
|
"loss": 0.88, |
|
"step": 1030 |
|
}, |
|
{ |
|
"epoch": 0.9641360037261295, |
|
"grad_norm": 0.6695319084789703, |
|
"learning_rate": 4.662581981360028e-05, |
|
"loss": 0.887, |
|
"step": 1035 |
|
}, |
|
{ |
|
"epoch": 0.9687936655798789, |
|
"grad_norm": 0.5744412033836418, |
|
"learning_rate": 4.658267172937522e-05, |
|
"loss": 0.8807, |
|
"step": 1040 |
|
}, |
|
{ |
|
"epoch": 0.9734513274336283, |
|
"grad_norm": 0.5550351398683305, |
|
"learning_rate": 4.653952364515016e-05, |
|
"loss": 0.8856, |
|
"step": 1045 |
|
}, |
|
{ |
|
"epoch": 0.9781089892873778, |
|
"grad_norm": 0.5022448639615988, |
|
"learning_rate": 4.64963755609251e-05, |
|
"loss": 0.8831, |
|
"step": 1050 |
|
}, |
|
{ |
|
"epoch": 0.9827666511411272, |
|
"grad_norm": 0.5621280358732187, |
|
"learning_rate": 4.645322747670004e-05, |
|
"loss": 0.8882, |
|
"step": 1055 |
|
}, |
|
{ |
|
"epoch": 0.9874243129948765, |
|
"grad_norm": 0.5680029674592303, |
|
"learning_rate": 4.641007939247497e-05, |
|
"loss": 0.8915, |
|
"step": 1060 |
|
}, |
|
{ |
|
"epoch": 0.992081974848626, |
|
"grad_norm": 0.6056783632384961, |
|
"learning_rate": 4.6366931308249916e-05, |
|
"loss": 0.8898, |
|
"step": 1065 |
|
}, |
|
{ |
|
"epoch": 0.9967396367023754, |
|
"grad_norm": 0.5456430939985069, |
|
"learning_rate": 4.632378322402486e-05, |
|
"loss": 0.87, |
|
"step": 1070 |
|
}, |
|
{ |
|
"epoch": 1.00093153237075, |
|
"grad_norm": 0.9221112959963783, |
|
"learning_rate": 4.6280635139799794e-05, |
|
"loss": 0.9038, |
|
"step": 1075 |
|
}, |
|
{ |
|
"epoch": 1.0055891942244992, |
|
"grad_norm": 0.6363659450908413, |
|
"learning_rate": 4.6237487055574736e-05, |
|
"loss": 0.8434, |
|
"step": 1080 |
|
}, |
|
{ |
|
"epoch": 1.0102468560782487, |
|
"grad_norm": 0.6126175571817937, |
|
"learning_rate": 4.619433897134968e-05, |
|
"loss": 0.8165, |
|
"step": 1085 |
|
}, |
|
{ |
|
"epoch": 1.0149045179319982, |
|
"grad_norm": 0.7222816091274098, |
|
"learning_rate": 4.6151190887124615e-05, |
|
"loss": 0.8534, |
|
"step": 1090 |
|
}, |
|
{ |
|
"epoch": 1.0195621797857475, |
|
"grad_norm": 0.5536745759404794, |
|
"learning_rate": 4.610804280289955e-05, |
|
"loss": 0.8433, |
|
"step": 1095 |
|
}, |
|
{ |
|
"epoch": 1.024219841639497, |
|
"grad_norm": 0.6558844604399796, |
|
"learning_rate": 4.606489471867449e-05, |
|
"loss": 0.8482, |
|
"step": 1100 |
|
}, |
|
{ |
|
"epoch": 1.0288775034932465, |
|
"grad_norm": 0.7025611932544967, |
|
"learning_rate": 4.602174663444943e-05, |
|
"loss": 0.8425, |
|
"step": 1105 |
|
}, |
|
{ |
|
"epoch": 1.0335351653469957, |
|
"grad_norm": 0.5360081102807046, |
|
"learning_rate": 4.597859855022437e-05, |
|
"loss": 0.8374, |
|
"step": 1110 |
|
}, |
|
{ |
|
"epoch": 1.0381928272007452, |
|
"grad_norm": 0.5624535132798797, |
|
"learning_rate": 4.5935450465999314e-05, |
|
"loss": 0.8244, |
|
"step": 1115 |
|
}, |
|
{ |
|
"epoch": 1.0428504890544947, |
|
"grad_norm": 0.6292956541367164, |
|
"learning_rate": 4.589230238177425e-05, |
|
"loss": 0.8411, |
|
"step": 1120 |
|
}, |
|
{ |
|
"epoch": 1.047508150908244, |
|
"grad_norm": 0.5677787694167703, |
|
"learning_rate": 4.584915429754919e-05, |
|
"loss": 0.8507, |
|
"step": 1125 |
|
}, |
|
{ |
|
"epoch": 1.0521658127619935, |
|
"grad_norm": 0.5852407402420233, |
|
"learning_rate": 4.5806006213324134e-05, |
|
"loss": 0.8293, |
|
"step": 1130 |
|
}, |
|
{ |
|
"epoch": 1.056823474615743, |
|
"grad_norm": 0.5286627972800246, |
|
"learning_rate": 4.576285812909907e-05, |
|
"loss": 0.8402, |
|
"step": 1135 |
|
}, |
|
{ |
|
"epoch": 1.0614811364694923, |
|
"grad_norm": 0.565583073571462, |
|
"learning_rate": 4.5719710044874006e-05, |
|
"loss": 0.842, |
|
"step": 1140 |
|
}, |
|
{ |
|
"epoch": 1.0661387983232418, |
|
"grad_norm": 0.6235025356308691, |
|
"learning_rate": 4.567656196064895e-05, |
|
"loss": 0.8384, |
|
"step": 1145 |
|
}, |
|
{ |
|
"epoch": 1.0707964601769913, |
|
"grad_norm": 0.5676116085646312, |
|
"learning_rate": 4.563341387642389e-05, |
|
"loss": 0.838, |
|
"step": 1150 |
|
}, |
|
{ |
|
"epoch": 1.0754541220307405, |
|
"grad_norm": 0.7437050533342857, |
|
"learning_rate": 4.5590265792198827e-05, |
|
"loss": 0.8434, |
|
"step": 1155 |
|
}, |
|
{ |
|
"epoch": 1.08011178388449, |
|
"grad_norm": 0.7179223471862046, |
|
"learning_rate": 4.554711770797377e-05, |
|
"loss": 0.8587, |
|
"step": 1160 |
|
}, |
|
{ |
|
"epoch": 1.0847694457382393, |
|
"grad_norm": 0.5402283961364768, |
|
"learning_rate": 4.550396962374871e-05, |
|
"loss": 0.8469, |
|
"step": 1165 |
|
}, |
|
{ |
|
"epoch": 1.0894271075919888, |
|
"grad_norm": 0.602625129953774, |
|
"learning_rate": 4.546082153952365e-05, |
|
"loss": 0.8432, |
|
"step": 1170 |
|
}, |
|
{ |
|
"epoch": 1.0940847694457383, |
|
"grad_norm": 0.5631553474157106, |
|
"learning_rate": 4.541767345529859e-05, |
|
"loss": 0.8477, |
|
"step": 1175 |
|
}, |
|
{ |
|
"epoch": 1.0987424312994876, |
|
"grad_norm": 0.6162837263832036, |
|
"learning_rate": 4.5374525371073526e-05, |
|
"loss": 0.8314, |
|
"step": 1180 |
|
}, |
|
{ |
|
"epoch": 1.103400093153237, |
|
"grad_norm": 0.5696876350925066, |
|
"learning_rate": 4.533137728684846e-05, |
|
"loss": 0.8268, |
|
"step": 1185 |
|
}, |
|
{ |
|
"epoch": 1.1080577550069866, |
|
"grad_norm": 0.5741552121659874, |
|
"learning_rate": 4.5288229202623404e-05, |
|
"loss": 0.797, |
|
"step": 1190 |
|
}, |
|
{ |
|
"epoch": 1.1127154168607358, |
|
"grad_norm": 0.5868300402624936, |
|
"learning_rate": 4.5245081118398346e-05, |
|
"loss": 0.8177, |
|
"step": 1195 |
|
}, |
|
{ |
|
"epoch": 1.1173730787144853, |
|
"grad_norm": 0.575094872638148, |
|
"learning_rate": 4.520193303417328e-05, |
|
"loss": 0.8297, |
|
"step": 1200 |
|
}, |
|
{ |
|
"epoch": 1.1220307405682348, |
|
"grad_norm": 0.6424040008443889, |
|
"learning_rate": 4.5158784949948224e-05, |
|
"loss": 0.8511, |
|
"step": 1205 |
|
}, |
|
{ |
|
"epoch": 1.126688402421984, |
|
"grad_norm": 0.5915975477381874, |
|
"learning_rate": 4.511563686572317e-05, |
|
"loss": 0.8421, |
|
"step": 1210 |
|
}, |
|
{ |
|
"epoch": 1.1313460642757336, |
|
"grad_norm": 0.6551217680153546, |
|
"learning_rate": 4.50724887814981e-05, |
|
"loss": 0.8318, |
|
"step": 1215 |
|
}, |
|
{ |
|
"epoch": 1.136003726129483, |
|
"grad_norm": 0.606747965007546, |
|
"learning_rate": 4.5029340697273045e-05, |
|
"loss": 0.8586, |
|
"step": 1220 |
|
}, |
|
{ |
|
"epoch": 1.1406613879832324, |
|
"grad_norm": 0.5836907149894681, |
|
"learning_rate": 4.498619261304798e-05, |
|
"loss": 0.8292, |
|
"step": 1225 |
|
}, |
|
{ |
|
"epoch": 1.1453190498369819, |
|
"grad_norm": 0.5472296611267455, |
|
"learning_rate": 4.4943044528822923e-05, |
|
"loss": 0.8288, |
|
"step": 1230 |
|
}, |
|
{ |
|
"epoch": 1.1499767116907313, |
|
"grad_norm": 0.6576237995587761, |
|
"learning_rate": 4.489989644459786e-05, |
|
"loss": 0.8345, |
|
"step": 1235 |
|
}, |
|
{ |
|
"epoch": 1.1546343735444806, |
|
"grad_norm": 0.6057108499428023, |
|
"learning_rate": 4.48567483603728e-05, |
|
"loss": 0.8506, |
|
"step": 1240 |
|
}, |
|
{ |
|
"epoch": 1.1592920353982301, |
|
"grad_norm": 0.6564626329492939, |
|
"learning_rate": 4.4813600276147744e-05, |
|
"loss": 0.8415, |
|
"step": 1245 |
|
}, |
|
{ |
|
"epoch": 1.1639496972519794, |
|
"grad_norm": 0.6820004841637497, |
|
"learning_rate": 4.477045219192268e-05, |
|
"loss": 0.8611, |
|
"step": 1250 |
|
}, |
|
{ |
|
"epoch": 1.1686073591057289, |
|
"grad_norm": 0.6171342695838625, |
|
"learning_rate": 4.472730410769762e-05, |
|
"loss": 0.8402, |
|
"step": 1255 |
|
}, |
|
{ |
|
"epoch": 1.1732650209594784, |
|
"grad_norm": 0.5688972110628895, |
|
"learning_rate": 4.4684156023472565e-05, |
|
"loss": 0.8505, |
|
"step": 1260 |
|
}, |
|
{ |
|
"epoch": 1.1779226828132279, |
|
"grad_norm": 0.5891759900853567, |
|
"learning_rate": 4.46410079392475e-05, |
|
"loss": 0.8278, |
|
"step": 1265 |
|
}, |
|
{ |
|
"epoch": 1.1825803446669771, |
|
"grad_norm": 0.6977683673147888, |
|
"learning_rate": 4.4597859855022436e-05, |
|
"loss": 0.8168, |
|
"step": 1270 |
|
}, |
|
{ |
|
"epoch": 1.1872380065207266, |
|
"grad_norm": 0.6546497320070066, |
|
"learning_rate": 4.455471177079738e-05, |
|
"loss": 0.8269, |
|
"step": 1275 |
|
}, |
|
{ |
|
"epoch": 1.191895668374476, |
|
"grad_norm": 0.6048800604194604, |
|
"learning_rate": 4.4511563686572315e-05, |
|
"loss": 0.843, |
|
"step": 1280 |
|
}, |
|
{ |
|
"epoch": 1.1965533302282254, |
|
"grad_norm": 0.6978477920880507, |
|
"learning_rate": 4.446841560234726e-05, |
|
"loss": 0.8457, |
|
"step": 1285 |
|
}, |
|
{ |
|
"epoch": 1.201210992081975, |
|
"grad_norm": 0.815572653341471, |
|
"learning_rate": 4.44252675181222e-05, |
|
"loss": 0.8208, |
|
"step": 1290 |
|
}, |
|
{ |
|
"epoch": 1.2058686539357242, |
|
"grad_norm": 0.6579935666327247, |
|
"learning_rate": 4.4382119433897135e-05, |
|
"loss": 0.8394, |
|
"step": 1295 |
|
}, |
|
{ |
|
"epoch": 1.2105263157894737, |
|
"grad_norm": 0.5995063193473495, |
|
"learning_rate": 4.433897134967208e-05, |
|
"loss": 0.8275, |
|
"step": 1300 |
|
}, |
|
{ |
|
"epoch": 1.2151839776432232, |
|
"grad_norm": 0.6609879496434774, |
|
"learning_rate": 4.429582326544702e-05, |
|
"loss": 0.846, |
|
"step": 1305 |
|
}, |
|
{ |
|
"epoch": 1.2198416394969724, |
|
"grad_norm": 0.5789903731200582, |
|
"learning_rate": 4.4252675181221956e-05, |
|
"loss": 0.8503, |
|
"step": 1310 |
|
}, |
|
{ |
|
"epoch": 1.224499301350722, |
|
"grad_norm": 0.5432938262471018, |
|
"learning_rate": 4.420952709699689e-05, |
|
"loss": 0.8335, |
|
"step": 1315 |
|
}, |
|
{ |
|
"epoch": 1.2291569632044714, |
|
"grad_norm": 0.9738307220223945, |
|
"learning_rate": 4.4166379012771834e-05, |
|
"loss": 0.8536, |
|
"step": 1320 |
|
}, |
|
{ |
|
"epoch": 1.2338146250582207, |
|
"grad_norm": 0.6601401078017379, |
|
"learning_rate": 4.412323092854678e-05, |
|
"loss": 0.8287, |
|
"step": 1325 |
|
}, |
|
{ |
|
"epoch": 1.2384722869119702, |
|
"grad_norm": 0.8454838556235085, |
|
"learning_rate": 4.408008284432171e-05, |
|
"loss": 0.847, |
|
"step": 1330 |
|
}, |
|
{ |
|
"epoch": 1.2431299487657197, |
|
"grad_norm": 0.7189640931574706, |
|
"learning_rate": 4.4036934760096655e-05, |
|
"loss": 0.8341, |
|
"step": 1335 |
|
}, |
|
{ |
|
"epoch": 1.247787610619469, |
|
"grad_norm": 0.65499438366459, |
|
"learning_rate": 4.39937866758716e-05, |
|
"loss": 0.8296, |
|
"step": 1340 |
|
}, |
|
{ |
|
"epoch": 1.2524452724732185, |
|
"grad_norm": 0.5293078036494245, |
|
"learning_rate": 4.395063859164653e-05, |
|
"loss": 0.8123, |
|
"step": 1345 |
|
}, |
|
{ |
|
"epoch": 1.257102934326968, |
|
"grad_norm": 0.4988808471368861, |
|
"learning_rate": 4.3907490507421476e-05, |
|
"loss": 0.8299, |
|
"step": 1350 |
|
}, |
|
{ |
|
"epoch": 1.2617605961807172, |
|
"grad_norm": 1.0174215574294028, |
|
"learning_rate": 4.386434242319641e-05, |
|
"loss": 0.8237, |
|
"step": 1355 |
|
}, |
|
{ |
|
"epoch": 1.2664182580344667, |
|
"grad_norm": 0.5529007964457375, |
|
"learning_rate": 4.382119433897135e-05, |
|
"loss": 0.8091, |
|
"step": 1360 |
|
}, |
|
{ |
|
"epoch": 1.271075919888216, |
|
"grad_norm": 0.5429714398441454, |
|
"learning_rate": 4.377804625474629e-05, |
|
"loss": 0.8399, |
|
"step": 1365 |
|
}, |
|
{ |
|
"epoch": 1.2757335817419655, |
|
"grad_norm": 0.5963112183610055, |
|
"learning_rate": 4.373489817052123e-05, |
|
"loss": 0.8297, |
|
"step": 1370 |
|
}, |
|
{ |
|
"epoch": 1.280391243595715, |
|
"grad_norm": 0.6012562114727978, |
|
"learning_rate": 4.369175008629617e-05, |
|
"loss": 0.8292, |
|
"step": 1375 |
|
}, |
|
{ |
|
"epoch": 1.2850489054494645, |
|
"grad_norm": 0.5118548369236212, |
|
"learning_rate": 4.364860200207111e-05, |
|
"loss": 0.8291, |
|
"step": 1380 |
|
}, |
|
{ |
|
"epoch": 1.2897065673032138, |
|
"grad_norm": 0.558834446477445, |
|
"learning_rate": 4.360545391784605e-05, |
|
"loss": 0.829, |
|
"step": 1385 |
|
}, |
|
{ |
|
"epoch": 1.2943642291569633, |
|
"grad_norm": 0.7487565636309964, |
|
"learning_rate": 4.356230583362099e-05, |
|
"loss": 0.8453, |
|
"step": 1390 |
|
}, |
|
{ |
|
"epoch": 1.2990218910107125, |
|
"grad_norm": 0.5866609927345416, |
|
"learning_rate": 4.351915774939593e-05, |
|
"loss": 0.8177, |
|
"step": 1395 |
|
}, |
|
{ |
|
"epoch": 1.303679552864462, |
|
"grad_norm": 0.7832341341720332, |
|
"learning_rate": 4.347600966517087e-05, |
|
"loss": 0.8437, |
|
"step": 1400 |
|
}, |
|
{ |
|
"epoch": 1.3083372147182115, |
|
"grad_norm": 0.5539507604974554, |
|
"learning_rate": 4.343286158094581e-05, |
|
"loss": 0.8251, |
|
"step": 1405 |
|
}, |
|
{ |
|
"epoch": 1.312994876571961, |
|
"grad_norm": 0.48034968028596836, |
|
"learning_rate": 4.3389713496720745e-05, |
|
"loss": 0.8157, |
|
"step": 1410 |
|
}, |
|
{ |
|
"epoch": 1.3176525384257103, |
|
"grad_norm": 0.5146231089706201, |
|
"learning_rate": 4.334656541249569e-05, |
|
"loss": 0.851, |
|
"step": 1415 |
|
}, |
|
{ |
|
"epoch": 1.3223102002794598, |
|
"grad_norm": 0.5596868182553039, |
|
"learning_rate": 4.330341732827063e-05, |
|
"loss": 0.8336, |
|
"step": 1420 |
|
}, |
|
{ |
|
"epoch": 1.326967862133209, |
|
"grad_norm": 0.6983008027531773, |
|
"learning_rate": 4.3260269244045566e-05, |
|
"loss": 0.82, |
|
"step": 1425 |
|
}, |
|
{ |
|
"epoch": 1.3316255239869585, |
|
"grad_norm": 0.744496091683945, |
|
"learning_rate": 4.321712115982051e-05, |
|
"loss": 0.8193, |
|
"step": 1430 |
|
}, |
|
{ |
|
"epoch": 1.336283185840708, |
|
"grad_norm": 0.5150047650943387, |
|
"learning_rate": 4.317397307559545e-05, |
|
"loss": 0.8248, |
|
"step": 1435 |
|
}, |
|
{ |
|
"epoch": 1.3409408476944573, |
|
"grad_norm": 0.5484843193419154, |
|
"learning_rate": 4.3130824991370387e-05, |
|
"loss": 0.8301, |
|
"step": 1440 |
|
}, |
|
{ |
|
"epoch": 1.3455985095482068, |
|
"grad_norm": 0.5344563616800695, |
|
"learning_rate": 4.308767690714532e-05, |
|
"loss": 0.846, |
|
"step": 1445 |
|
}, |
|
{ |
|
"epoch": 1.350256171401956, |
|
"grad_norm": 0.5727837145848953, |
|
"learning_rate": 4.3044528822920265e-05, |
|
"loss": 0.8254, |
|
"step": 1450 |
|
}, |
|
{ |
|
"epoch": 1.3549138332557056, |
|
"grad_norm": 0.636733485984348, |
|
"learning_rate": 4.30013807386952e-05, |
|
"loss": 0.8556, |
|
"step": 1455 |
|
}, |
|
{ |
|
"epoch": 1.359571495109455, |
|
"grad_norm": 1.9749022217888352, |
|
"learning_rate": 4.295823265447014e-05, |
|
"loss": 0.8287, |
|
"step": 1460 |
|
}, |
|
{ |
|
"epoch": 1.3642291569632046, |
|
"grad_norm": 0.6137468932762344, |
|
"learning_rate": 4.2915084570245085e-05, |
|
"loss": 0.8247, |
|
"step": 1465 |
|
}, |
|
{ |
|
"epoch": 1.3688868188169538, |
|
"grad_norm": 0.5057907698773128, |
|
"learning_rate": 4.287193648602002e-05, |
|
"loss": 0.8304, |
|
"step": 1470 |
|
}, |
|
{ |
|
"epoch": 1.3735444806707033, |
|
"grad_norm": 0.7818389700842573, |
|
"learning_rate": 4.2828788401794964e-05, |
|
"loss": 0.8176, |
|
"step": 1475 |
|
}, |
|
{ |
|
"epoch": 1.3782021425244526, |
|
"grad_norm": 0.5323352228644667, |
|
"learning_rate": 4.27856403175699e-05, |
|
"loss": 0.8183, |
|
"step": 1480 |
|
}, |
|
{ |
|
"epoch": 1.382859804378202, |
|
"grad_norm": 0.637670077434287, |
|
"learning_rate": 4.274249223334484e-05, |
|
"loss": 0.8269, |
|
"step": 1485 |
|
}, |
|
{ |
|
"epoch": 1.3875174662319516, |
|
"grad_norm": 0.7043428559259748, |
|
"learning_rate": 4.269934414911978e-05, |
|
"loss": 0.837, |
|
"step": 1490 |
|
}, |
|
{ |
|
"epoch": 1.392175128085701, |
|
"grad_norm": 0.5064589289765138, |
|
"learning_rate": 4.265619606489472e-05, |
|
"loss": 0.8351, |
|
"step": 1495 |
|
}, |
|
{ |
|
"epoch": 1.3968327899394504, |
|
"grad_norm": 0.6623292044979259, |
|
"learning_rate": 4.261304798066966e-05, |
|
"loss": 0.9041, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 1.4014904517931999, |
|
"grad_norm": 0.5186037454741974, |
|
"learning_rate": 4.25698998964446e-05, |
|
"loss": 0.8478, |
|
"step": 1505 |
|
}, |
|
{ |
|
"epoch": 1.4061481136469491, |
|
"grad_norm": 4.197835799315802, |
|
"learning_rate": 4.252675181221954e-05, |
|
"loss": 0.8281, |
|
"step": 1510 |
|
}, |
|
{ |
|
"epoch": 1.4108057755006986, |
|
"grad_norm": 0.7873855530523827, |
|
"learning_rate": 4.248360372799448e-05, |
|
"loss": 0.8473, |
|
"step": 1515 |
|
}, |
|
{ |
|
"epoch": 1.4154634373544481, |
|
"grad_norm": 0.6809163327185851, |
|
"learning_rate": 4.244045564376942e-05, |
|
"loss": 0.8224, |
|
"step": 1520 |
|
}, |
|
{ |
|
"epoch": 1.4201210992081974, |
|
"grad_norm": 0.6235694245025729, |
|
"learning_rate": 4.2397307559544355e-05, |
|
"loss": 0.8207, |
|
"step": 1525 |
|
}, |
|
{ |
|
"epoch": 1.424778761061947, |
|
"grad_norm": 0.6402875688149763, |
|
"learning_rate": 4.23541594753193e-05, |
|
"loss": 0.8077, |
|
"step": 1530 |
|
}, |
|
{ |
|
"epoch": 1.4294364229156964, |
|
"grad_norm": 0.4740272476367251, |
|
"learning_rate": 4.231101139109423e-05, |
|
"loss": 0.8356, |
|
"step": 1535 |
|
}, |
|
{ |
|
"epoch": 1.4340940847694457, |
|
"grad_norm": 0.6099287192468218, |
|
"learning_rate": 4.2267863306869176e-05, |
|
"loss": 0.8182, |
|
"step": 1540 |
|
}, |
|
{ |
|
"epoch": 1.4387517466231952, |
|
"grad_norm": 0.5545835645045659, |
|
"learning_rate": 4.222471522264412e-05, |
|
"loss": 0.8192, |
|
"step": 1545 |
|
}, |
|
{ |
|
"epoch": 1.4434094084769447, |
|
"grad_norm": 0.5409026812594355, |
|
"learning_rate": 4.2181567138419054e-05, |
|
"loss": 0.8085, |
|
"step": 1550 |
|
}, |
|
{ |
|
"epoch": 1.448067070330694, |
|
"grad_norm": 0.5436612116893749, |
|
"learning_rate": 4.2138419054193996e-05, |
|
"loss": 0.8024, |
|
"step": 1555 |
|
}, |
|
{ |
|
"epoch": 1.4527247321844434, |
|
"grad_norm": 0.5495238333319105, |
|
"learning_rate": 4.209527096996894e-05, |
|
"loss": 0.8157, |
|
"step": 1560 |
|
}, |
|
{ |
|
"epoch": 1.4573823940381927, |
|
"grad_norm": 0.5624828440338228, |
|
"learning_rate": 4.2052122885743875e-05, |
|
"loss": 0.8337, |
|
"step": 1565 |
|
}, |
|
{ |
|
"epoch": 1.4620400558919422, |
|
"grad_norm": 0.5658622352956784, |
|
"learning_rate": 4.200897480151881e-05, |
|
"loss": 0.8604, |
|
"step": 1570 |
|
}, |
|
{ |
|
"epoch": 1.4666977177456917, |
|
"grad_norm": 0.49705242407659034, |
|
"learning_rate": 4.196582671729375e-05, |
|
"loss": 0.8203, |
|
"step": 1575 |
|
}, |
|
{ |
|
"epoch": 1.4713553795994412, |
|
"grad_norm": 0.4961979991263778, |
|
"learning_rate": 4.1922678633068695e-05, |
|
"loss": 0.8002, |
|
"step": 1580 |
|
}, |
|
{ |
|
"epoch": 1.4760130414531905, |
|
"grad_norm": 0.4834846803784741, |
|
"learning_rate": 4.187953054884363e-05, |
|
"loss": 0.8162, |
|
"step": 1585 |
|
}, |
|
{ |
|
"epoch": 1.48067070330694, |
|
"grad_norm": 0.525816662994698, |
|
"learning_rate": 4.1836382464618573e-05, |
|
"loss": 0.8156, |
|
"step": 1590 |
|
}, |
|
{ |
|
"epoch": 1.4853283651606892, |
|
"grad_norm": 0.5214766244167187, |
|
"learning_rate": 4.1793234380393516e-05, |
|
"loss": 0.8091, |
|
"step": 1595 |
|
}, |
|
{ |
|
"epoch": 1.4899860270144387, |
|
"grad_norm": 0.4556881067346665, |
|
"learning_rate": 4.175008629616845e-05, |
|
"loss": 0.8226, |
|
"step": 1600 |
|
}, |
|
{ |
|
"epoch": 1.4946436888681882, |
|
"grad_norm": 0.4621437528857733, |
|
"learning_rate": 4.1706938211943394e-05, |
|
"loss": 0.8068, |
|
"step": 1605 |
|
}, |
|
{ |
|
"epoch": 1.4993013507219377, |
|
"grad_norm": 0.5285179894725354, |
|
"learning_rate": 4.166379012771833e-05, |
|
"loss": 0.8173, |
|
"step": 1610 |
|
}, |
|
{ |
|
"epoch": 1.503959012575687, |
|
"grad_norm": 0.5260089002965593, |
|
"learning_rate": 4.1620642043493266e-05, |
|
"loss": 0.8198, |
|
"step": 1615 |
|
}, |
|
{ |
|
"epoch": 1.5086166744294365, |
|
"grad_norm": 0.5295370263008858, |
|
"learning_rate": 4.157749395926821e-05, |
|
"loss": 0.8154, |
|
"step": 1620 |
|
}, |
|
{ |
|
"epoch": 1.5132743362831858, |
|
"grad_norm": 0.4869530399329456, |
|
"learning_rate": 4.153434587504315e-05, |
|
"loss": 0.8249, |
|
"step": 1625 |
|
}, |
|
{ |
|
"epoch": 1.5179319981369352, |
|
"grad_norm": 0.6067836521311675, |
|
"learning_rate": 4.1491197790818086e-05, |
|
"loss": 0.8197, |
|
"step": 1630 |
|
}, |
|
{ |
|
"epoch": 1.5225896599906847, |
|
"grad_norm": 0.5303238410825005, |
|
"learning_rate": 4.144804970659303e-05, |
|
"loss": 0.8411, |
|
"step": 1635 |
|
}, |
|
{ |
|
"epoch": 1.5272473218444342, |
|
"grad_norm": 0.5262674711845159, |
|
"learning_rate": 4.140490162236797e-05, |
|
"loss": 0.8248, |
|
"step": 1640 |
|
}, |
|
{ |
|
"epoch": 1.5319049836981835, |
|
"grad_norm": 0.4789682783106058, |
|
"learning_rate": 4.1361753538142914e-05, |
|
"loss": 0.8351, |
|
"step": 1645 |
|
}, |
|
{ |
|
"epoch": 1.5365626455519328, |
|
"grad_norm": 0.5368803341047521, |
|
"learning_rate": 4.131860545391785e-05, |
|
"loss": 0.8064, |
|
"step": 1650 |
|
}, |
|
{ |
|
"epoch": 1.5412203074056823, |
|
"grad_norm": 0.5179417938574834, |
|
"learning_rate": 4.1275457369692785e-05, |
|
"loss": 0.802, |
|
"step": 1655 |
|
}, |
|
{ |
|
"epoch": 1.5458779692594318, |
|
"grad_norm": 0.5668282815058873, |
|
"learning_rate": 4.123230928546773e-05, |
|
"loss": 0.7992, |
|
"step": 1660 |
|
}, |
|
{ |
|
"epoch": 1.5505356311131813, |
|
"grad_norm": 0.5678868939249553, |
|
"learning_rate": 4.1189161201242664e-05, |
|
"loss": 0.8255, |
|
"step": 1665 |
|
}, |
|
{ |
|
"epoch": 1.5551932929669308, |
|
"grad_norm": 2.284864352356659, |
|
"learning_rate": 4.1146013117017606e-05, |
|
"loss": 0.8147, |
|
"step": 1670 |
|
}, |
|
{ |
|
"epoch": 1.55985095482068, |
|
"grad_norm": 0.5386569746518188, |
|
"learning_rate": 4.110286503279255e-05, |
|
"loss": 0.8085, |
|
"step": 1675 |
|
}, |
|
{ |
|
"epoch": 1.5645086166744293, |
|
"grad_norm": 0.5364020323012693, |
|
"learning_rate": 4.1059716948567484e-05, |
|
"loss": 0.8253, |
|
"step": 1680 |
|
}, |
|
{ |
|
"epoch": 1.5691662785281788, |
|
"grad_norm": 0.6168960795213244, |
|
"learning_rate": 4.101656886434243e-05, |
|
"loss": 0.8035, |
|
"step": 1685 |
|
}, |
|
{ |
|
"epoch": 1.5738239403819283, |
|
"grad_norm": 0.4846106072381326, |
|
"learning_rate": 4.097342078011737e-05, |
|
"loss": 0.8179, |
|
"step": 1690 |
|
}, |
|
{ |
|
"epoch": 1.5784816022356778, |
|
"grad_norm": 0.5532441305001439, |
|
"learning_rate": 4.0930272695892305e-05, |
|
"loss": 0.8117, |
|
"step": 1695 |
|
}, |
|
{ |
|
"epoch": 1.583139264089427, |
|
"grad_norm": 0.4839401425043646, |
|
"learning_rate": 4.088712461166724e-05, |
|
"loss": 0.8208, |
|
"step": 1700 |
|
}, |
|
{ |
|
"epoch": 1.5877969259431766, |
|
"grad_norm": 0.4659417284258822, |
|
"learning_rate": 4.084397652744218e-05, |
|
"loss": 0.8101, |
|
"step": 1705 |
|
}, |
|
{ |
|
"epoch": 1.5924545877969258, |
|
"grad_norm": 0.5536360542493969, |
|
"learning_rate": 4.080082844321712e-05, |
|
"loss": 0.8183, |
|
"step": 1710 |
|
}, |
|
{ |
|
"epoch": 1.5971122496506753, |
|
"grad_norm": 0.613015733329207, |
|
"learning_rate": 4.075768035899206e-05, |
|
"loss": 0.8345, |
|
"step": 1715 |
|
}, |
|
{ |
|
"epoch": 1.6017699115044248, |
|
"grad_norm": 0.5037047505496149, |
|
"learning_rate": 4.0714532274767004e-05, |
|
"loss": 0.8043, |
|
"step": 1720 |
|
}, |
|
{ |
|
"epoch": 1.6064275733581743, |
|
"grad_norm": 0.5314368843138948, |
|
"learning_rate": 4.0671384190541946e-05, |
|
"loss": 0.8093, |
|
"step": 1725 |
|
}, |
|
{ |
|
"epoch": 1.6110852352119236, |
|
"grad_norm": 0.5472689278490457, |
|
"learning_rate": 4.062823610631688e-05, |
|
"loss": 0.8339, |
|
"step": 1730 |
|
}, |
|
{ |
|
"epoch": 1.6157428970656729, |
|
"grad_norm": 0.6018155674442411, |
|
"learning_rate": 4.0585088022091825e-05, |
|
"loss": 0.8264, |
|
"step": 1735 |
|
}, |
|
{ |
|
"epoch": 1.6204005589194224, |
|
"grad_norm": 0.49294301703177756, |
|
"learning_rate": 4.054193993786676e-05, |
|
"loss": 0.7908, |
|
"step": 1740 |
|
}, |
|
{ |
|
"epoch": 1.6250582207731719, |
|
"grad_norm": 0.4867952107412176, |
|
"learning_rate": 4.0498791853641696e-05, |
|
"loss": 0.8535, |
|
"step": 1745 |
|
}, |
|
{ |
|
"epoch": 1.6297158826269214, |
|
"grad_norm": 0.5458001464255956, |
|
"learning_rate": 4.045564376941664e-05, |
|
"loss": 0.8033, |
|
"step": 1750 |
|
}, |
|
{ |
|
"epoch": 1.6343735444806708, |
|
"grad_norm": 0.4903678189049089, |
|
"learning_rate": 4.041249568519158e-05, |
|
"loss": 0.8336, |
|
"step": 1755 |
|
}, |
|
{ |
|
"epoch": 1.6390312063344201, |
|
"grad_norm": 0.48052156111025546, |
|
"learning_rate": 4.036934760096652e-05, |
|
"loss": 0.7971, |
|
"step": 1760 |
|
}, |
|
{ |
|
"epoch": 1.6436888681881694, |
|
"grad_norm": 0.4775423173877248, |
|
"learning_rate": 4.032619951674146e-05, |
|
"loss": 0.8074, |
|
"step": 1765 |
|
}, |
|
{ |
|
"epoch": 1.648346530041919, |
|
"grad_norm": 0.5512406872892543, |
|
"learning_rate": 4.02830514325164e-05, |
|
"loss": 0.7981, |
|
"step": 1770 |
|
}, |
|
{ |
|
"epoch": 1.6530041918956684, |
|
"grad_norm": 0.47524257875404746, |
|
"learning_rate": 4.023990334829134e-05, |
|
"loss": 0.8297, |
|
"step": 1775 |
|
}, |
|
{ |
|
"epoch": 1.6576618537494179, |
|
"grad_norm": 0.4729453416458736, |
|
"learning_rate": 4.019675526406628e-05, |
|
"loss": 0.7991, |
|
"step": 1780 |
|
}, |
|
{ |
|
"epoch": 1.6623195156031674, |
|
"grad_norm": 0.5069795573310173, |
|
"learning_rate": 4.0153607179841216e-05, |
|
"loss": 0.8202, |
|
"step": 1785 |
|
}, |
|
{ |
|
"epoch": 1.6669771774569166, |
|
"grad_norm": 0.4950494671876146, |
|
"learning_rate": 4.011045909561615e-05, |
|
"loss": 0.8024, |
|
"step": 1790 |
|
}, |
|
{ |
|
"epoch": 1.671634839310666, |
|
"grad_norm": 0.581784405111925, |
|
"learning_rate": 4.0067311011391094e-05, |
|
"loss": 0.7972, |
|
"step": 1795 |
|
}, |
|
{ |
|
"epoch": 1.6762925011644154, |
|
"grad_norm": 0.4427443809477199, |
|
"learning_rate": 4.0024162927166037e-05, |
|
"loss": 0.798, |
|
"step": 1800 |
|
}, |
|
{ |
|
"epoch": 1.680950163018165, |
|
"grad_norm": 0.5630812899708987, |
|
"learning_rate": 3.998101484294098e-05, |
|
"loss": 0.8084, |
|
"step": 1805 |
|
}, |
|
{ |
|
"epoch": 1.6856078248719144, |
|
"grad_norm": 0.47170620633552024, |
|
"learning_rate": 3.9937866758715915e-05, |
|
"loss": 0.8061, |
|
"step": 1810 |
|
}, |
|
{ |
|
"epoch": 1.6902654867256637, |
|
"grad_norm": 0.484745803492533, |
|
"learning_rate": 3.989471867449086e-05, |
|
"loss": 0.8085, |
|
"step": 1815 |
|
}, |
|
{ |
|
"epoch": 1.6949231485794132, |
|
"grad_norm": 0.5256720082001222, |
|
"learning_rate": 3.98515705902658e-05, |
|
"loss": 0.7787, |
|
"step": 1820 |
|
}, |
|
{ |
|
"epoch": 1.6995808104331624, |
|
"grad_norm": 0.5203870132810984, |
|
"learning_rate": 3.9808422506040736e-05, |
|
"loss": 0.7886, |
|
"step": 1825 |
|
}, |
|
{ |
|
"epoch": 1.704238472286912, |
|
"grad_norm": 0.5045427269257229, |
|
"learning_rate": 3.976527442181567e-05, |
|
"loss": 0.7939, |
|
"step": 1830 |
|
}, |
|
{ |
|
"epoch": 1.7088961341406614, |
|
"grad_norm": 0.5169331146428379, |
|
"learning_rate": 3.9722126337590614e-05, |
|
"loss": 0.81, |
|
"step": 1835 |
|
}, |
|
{ |
|
"epoch": 1.713553795994411, |
|
"grad_norm": 0.5431794591639149, |
|
"learning_rate": 3.967897825336555e-05, |
|
"loss": 0.8029, |
|
"step": 1840 |
|
}, |
|
{ |
|
"epoch": 1.7182114578481602, |
|
"grad_norm": 0.5477208936261382, |
|
"learning_rate": 3.963583016914049e-05, |
|
"loss": 0.8069, |
|
"step": 1845 |
|
}, |
|
{ |
|
"epoch": 1.7228691197019095, |
|
"grad_norm": 0.6041641324030878, |
|
"learning_rate": 3.9592682084915434e-05, |
|
"loss": 0.8258, |
|
"step": 1850 |
|
}, |
|
{ |
|
"epoch": 1.727526781555659, |
|
"grad_norm": 0.5090806504384892, |
|
"learning_rate": 3.954953400069037e-05, |
|
"loss": 0.7881, |
|
"step": 1855 |
|
}, |
|
{ |
|
"epoch": 1.7321844434094085, |
|
"grad_norm": 0.49889036648991425, |
|
"learning_rate": 3.950638591646531e-05, |
|
"loss": 0.8397, |
|
"step": 1860 |
|
}, |
|
{ |
|
"epoch": 1.736842105263158, |
|
"grad_norm": 0.4935177448464897, |
|
"learning_rate": 3.9463237832240255e-05, |
|
"loss": 0.7969, |
|
"step": 1865 |
|
}, |
|
{ |
|
"epoch": 1.7414997671169075, |
|
"grad_norm": 0.47140321745349795, |
|
"learning_rate": 3.942008974801519e-05, |
|
"loss": 0.8106, |
|
"step": 1870 |
|
}, |
|
{ |
|
"epoch": 1.7461574289706567, |
|
"grad_norm": 0.469571803119941, |
|
"learning_rate": 3.937694166379013e-05, |
|
"loss": 0.8384, |
|
"step": 1875 |
|
}, |
|
{ |
|
"epoch": 1.750815090824406, |
|
"grad_norm": 0.5050432985953494, |
|
"learning_rate": 3.933379357956507e-05, |
|
"loss": 0.8378, |
|
"step": 1880 |
|
}, |
|
{ |
|
"epoch": 1.7554727526781555, |
|
"grad_norm": 0.563820523969095, |
|
"learning_rate": 3.9290645495340005e-05, |
|
"loss": 0.8234, |
|
"step": 1885 |
|
}, |
|
{ |
|
"epoch": 1.760130414531905, |
|
"grad_norm": 0.5849722918534452, |
|
"learning_rate": 3.924749741111495e-05, |
|
"loss": 0.8135, |
|
"step": 1890 |
|
}, |
|
{ |
|
"epoch": 1.7647880763856545, |
|
"grad_norm": 0.5619437429386521, |
|
"learning_rate": 3.920434932688989e-05, |
|
"loss": 0.8321, |
|
"step": 1895 |
|
}, |
|
{ |
|
"epoch": 1.7694457382394038, |
|
"grad_norm": 0.5541650724131253, |
|
"learning_rate": 3.916120124266483e-05, |
|
"loss": 0.8122, |
|
"step": 1900 |
|
}, |
|
{ |
|
"epoch": 1.7741034000931533, |
|
"grad_norm": 0.4790373788753262, |
|
"learning_rate": 3.911805315843977e-05, |
|
"loss": 0.8107, |
|
"step": 1905 |
|
}, |
|
{ |
|
"epoch": 1.7787610619469025, |
|
"grad_norm": 0.48571446107425204, |
|
"learning_rate": 3.9074905074214704e-05, |
|
"loss": 0.7954, |
|
"step": 1910 |
|
}, |
|
{ |
|
"epoch": 1.783418723800652, |
|
"grad_norm": 0.4554059775758542, |
|
"learning_rate": 3.9031756989989646e-05, |
|
"loss": 0.7812, |
|
"step": 1915 |
|
}, |
|
{ |
|
"epoch": 1.7880763856544015, |
|
"grad_norm": 0.5130980660028385, |
|
"learning_rate": 3.898860890576458e-05, |
|
"loss": 0.7971, |
|
"step": 1920 |
|
}, |
|
{ |
|
"epoch": 1.792734047508151, |
|
"grad_norm": 0.5185882487044102, |
|
"learning_rate": 3.8945460821539525e-05, |
|
"loss": 0.8086, |
|
"step": 1925 |
|
}, |
|
{ |
|
"epoch": 1.7973917093619003, |
|
"grad_norm": 0.5103912287550575, |
|
"learning_rate": 3.890231273731447e-05, |
|
"loss": 0.7979, |
|
"step": 1930 |
|
}, |
|
{ |
|
"epoch": 1.8020493712156498, |
|
"grad_norm": 0.462819612155367, |
|
"learning_rate": 3.88591646530894e-05, |
|
"loss": 0.7848, |
|
"step": 1935 |
|
}, |
|
{ |
|
"epoch": 1.806707033069399, |
|
"grad_norm": 0.4664370966792569, |
|
"learning_rate": 3.8816016568864345e-05, |
|
"loss": 0.7909, |
|
"step": 1940 |
|
}, |
|
{ |
|
"epoch": 1.8113646949231486, |
|
"grad_norm": 0.4543630912158898, |
|
"learning_rate": 3.877286848463929e-05, |
|
"loss": 0.8142, |
|
"step": 1945 |
|
}, |
|
{ |
|
"epoch": 1.816022356776898, |
|
"grad_norm": 0.46997582522664094, |
|
"learning_rate": 3.8729720400414224e-05, |
|
"loss": 0.7964, |
|
"step": 1950 |
|
}, |
|
{ |
|
"epoch": 1.8206800186306475, |
|
"grad_norm": 0.4608432519022556, |
|
"learning_rate": 3.868657231618916e-05, |
|
"loss": 0.7993, |
|
"step": 1955 |
|
}, |
|
{ |
|
"epoch": 1.8253376804843968, |
|
"grad_norm": 0.48943547745302063, |
|
"learning_rate": 3.86434242319641e-05, |
|
"loss": 0.8045, |
|
"step": 1960 |
|
}, |
|
{ |
|
"epoch": 1.829995342338146, |
|
"grad_norm": 0.5034658417649208, |
|
"learning_rate": 3.860027614773904e-05, |
|
"loss": 0.8074, |
|
"step": 1965 |
|
}, |
|
{ |
|
"epoch": 1.8346530041918956, |
|
"grad_norm": 0.5079636831645327, |
|
"learning_rate": 3.855712806351398e-05, |
|
"loss": 0.8014, |
|
"step": 1970 |
|
}, |
|
{ |
|
"epoch": 1.839310666045645, |
|
"grad_norm": 0.5350620373119227, |
|
"learning_rate": 3.851397997928892e-05, |
|
"loss": 0.8108, |
|
"step": 1975 |
|
}, |
|
{ |
|
"epoch": 1.8439683278993946, |
|
"grad_norm": 0.4876171067090009, |
|
"learning_rate": 3.8470831895063865e-05, |
|
"loss": 0.7971, |
|
"step": 1980 |
|
}, |
|
{ |
|
"epoch": 1.848625989753144, |
|
"grad_norm": 0.5792114882681738, |
|
"learning_rate": 3.84276838108388e-05, |
|
"loss": 0.8266, |
|
"step": 1985 |
|
}, |
|
{ |
|
"epoch": 1.8532836516068933, |
|
"grad_norm": 0.5633248691401386, |
|
"learning_rate": 3.838453572661374e-05, |
|
"loss": 0.8094, |
|
"step": 1990 |
|
}, |
|
{ |
|
"epoch": 1.8579413134606426, |
|
"grad_norm": 0.4635065065769153, |
|
"learning_rate": 3.834138764238868e-05, |
|
"loss": 0.7795, |
|
"step": 1995 |
|
}, |
|
{ |
|
"epoch": 1.8625989753143921, |
|
"grad_norm": 0.5211367454249431, |
|
"learning_rate": 3.8298239558163615e-05, |
|
"loss": 0.803, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 1.8672566371681416, |
|
"grad_norm": 0.5244552884735643, |
|
"learning_rate": 3.825509147393856e-05, |
|
"loss": 0.8218, |
|
"step": 2005 |
|
}, |
|
{ |
|
"epoch": 1.871914299021891, |
|
"grad_norm": 0.4367703254486583, |
|
"learning_rate": 3.82119433897135e-05, |
|
"loss": 0.7895, |
|
"step": 2010 |
|
}, |
|
{ |
|
"epoch": 1.8765719608756404, |
|
"grad_norm": 0.4365293713310002, |
|
"learning_rate": 3.8168795305488435e-05, |
|
"loss": 0.8254, |
|
"step": 2015 |
|
}, |
|
{ |
|
"epoch": 1.8812296227293899, |
|
"grad_norm": 0.4401620684419659, |
|
"learning_rate": 3.812564722126338e-05, |
|
"loss": 0.7918, |
|
"step": 2020 |
|
}, |
|
{ |
|
"epoch": 1.8858872845831391, |
|
"grad_norm": 0.47779920006319, |
|
"learning_rate": 3.808249913703832e-05, |
|
"loss": 0.8029, |
|
"step": 2025 |
|
}, |
|
{ |
|
"epoch": 1.8905449464368886, |
|
"grad_norm": 0.5555355624639677, |
|
"learning_rate": 3.8039351052813256e-05, |
|
"loss": 0.796, |
|
"step": 2030 |
|
}, |
|
{ |
|
"epoch": 1.8952026082906381, |
|
"grad_norm": 0.49458336406404724, |
|
"learning_rate": 3.79962029685882e-05, |
|
"loss": 0.789, |
|
"step": 2035 |
|
}, |
|
{ |
|
"epoch": 1.8998602701443876, |
|
"grad_norm": 0.5146735090214605, |
|
"learning_rate": 3.7953054884363134e-05, |
|
"loss": 0.8121, |
|
"step": 2040 |
|
}, |
|
{ |
|
"epoch": 1.904517931998137, |
|
"grad_norm": 0.4494321925050035, |
|
"learning_rate": 3.790990680013807e-05, |
|
"loss": 0.8214, |
|
"step": 2045 |
|
}, |
|
{ |
|
"epoch": 1.9091755938518864, |
|
"grad_norm": 0.5050066179121853, |
|
"learning_rate": 3.786675871591301e-05, |
|
"loss": 0.7987, |
|
"step": 2050 |
|
}, |
|
{ |
|
"epoch": 1.9138332557056357, |
|
"grad_norm": 0.47989526540598243, |
|
"learning_rate": 3.7823610631687955e-05, |
|
"loss": 0.8003, |
|
"step": 2055 |
|
}, |
|
{ |
|
"epoch": 1.9184909175593852, |
|
"grad_norm": 0.48483535452469545, |
|
"learning_rate": 3.77804625474629e-05, |
|
"loss": 0.7898, |
|
"step": 2060 |
|
}, |
|
{ |
|
"epoch": 1.9231485794131347, |
|
"grad_norm": 0.4687766430461443, |
|
"learning_rate": 3.773731446323783e-05, |
|
"loss": 0.7986, |
|
"step": 2065 |
|
}, |
|
{ |
|
"epoch": 1.9278062412668842, |
|
"grad_norm": 0.4752511382396736, |
|
"learning_rate": 3.7694166379012776e-05, |
|
"loss": 0.8032, |
|
"step": 2070 |
|
}, |
|
{ |
|
"epoch": 1.9324639031206334, |
|
"grad_norm": 0.5009445413481693, |
|
"learning_rate": 3.765101829478772e-05, |
|
"loss": 0.7997, |
|
"step": 2075 |
|
}, |
|
{ |
|
"epoch": 1.9371215649743827, |
|
"grad_norm": 0.4787984334128784, |
|
"learning_rate": 3.7607870210562654e-05, |
|
"loss": 0.7837, |
|
"step": 2080 |
|
}, |
|
{ |
|
"epoch": 1.9417792268281322, |
|
"grad_norm": 0.49842411743772097, |
|
"learning_rate": 3.756472212633759e-05, |
|
"loss": 0.8119, |
|
"step": 2085 |
|
}, |
|
{ |
|
"epoch": 1.9464368886818817, |
|
"grad_norm": 0.5129102375544244, |
|
"learning_rate": 3.752157404211253e-05, |
|
"loss": 0.7717, |
|
"step": 2090 |
|
}, |
|
{ |
|
"epoch": 1.9510945505356312, |
|
"grad_norm": 0.4983269320057428, |
|
"learning_rate": 3.747842595788747e-05, |
|
"loss": 0.7932, |
|
"step": 2095 |
|
}, |
|
{ |
|
"epoch": 1.9557522123893807, |
|
"grad_norm": 0.5694242293113918, |
|
"learning_rate": 3.743527787366241e-05, |
|
"loss": 0.7911, |
|
"step": 2100 |
|
}, |
|
{ |
|
"epoch": 1.96040987424313, |
|
"grad_norm": 0.47125598634785504, |
|
"learning_rate": 3.739212978943735e-05, |
|
"loss": 0.8082, |
|
"step": 2105 |
|
}, |
|
{ |
|
"epoch": 1.9650675360968792, |
|
"grad_norm": 0.4686243867209378, |
|
"learning_rate": 3.734898170521229e-05, |
|
"loss": 0.7781, |
|
"step": 2110 |
|
}, |
|
{ |
|
"epoch": 1.9697251979506287, |
|
"grad_norm": 0.4900196821966246, |
|
"learning_rate": 3.730583362098723e-05, |
|
"loss": 0.7998, |
|
"step": 2115 |
|
}, |
|
{ |
|
"epoch": 1.9743828598043782, |
|
"grad_norm": 0.5097124271224043, |
|
"learning_rate": 3.7262685536762174e-05, |
|
"loss": 0.8051, |
|
"step": 2120 |
|
}, |
|
{ |
|
"epoch": 1.9790405216581277, |
|
"grad_norm": 0.4957311238066013, |
|
"learning_rate": 3.721953745253711e-05, |
|
"loss": 0.7859, |
|
"step": 2125 |
|
}, |
|
{ |
|
"epoch": 1.983698183511877, |
|
"grad_norm": 0.4697294046892151, |
|
"learning_rate": 3.7176389368312045e-05, |
|
"loss": 0.7934, |
|
"step": 2130 |
|
}, |
|
{ |
|
"epoch": 1.9883558453656265, |
|
"grad_norm": 0.49474241334656344, |
|
"learning_rate": 3.713324128408699e-05, |
|
"loss": 0.8048, |
|
"step": 2135 |
|
}, |
|
{ |
|
"epoch": 1.9930135072193758, |
|
"grad_norm": 0.46310710184268955, |
|
"learning_rate": 3.709009319986193e-05, |
|
"loss": 0.8022, |
|
"step": 2140 |
|
}, |
|
{ |
|
"epoch": 1.9976711690731253, |
|
"grad_norm": 0.4756968698386488, |
|
"learning_rate": 3.7046945115636866e-05, |
|
"loss": 0.8023, |
|
"step": 2145 |
|
}, |
|
{ |
|
"epoch": 2.0018630647415, |
|
"grad_norm": 0.527343779742018, |
|
"learning_rate": 3.700379703141181e-05, |
|
"loss": 0.7902, |
|
"step": 2150 |
|
}, |
|
{ |
|
"epoch": 2.0065207265952494, |
|
"grad_norm": 0.5306054657790139, |
|
"learning_rate": 3.696064894718675e-05, |
|
"loss": 0.7417, |
|
"step": 2155 |
|
}, |
|
{ |
|
"epoch": 2.0111783884489984, |
|
"grad_norm": 0.4980162319421436, |
|
"learning_rate": 3.6917500862961687e-05, |
|
"loss": 0.7281, |
|
"step": 2160 |
|
}, |
|
{ |
|
"epoch": 2.015836050302748, |
|
"grad_norm": 0.49663031109205596, |
|
"learning_rate": 3.687435277873663e-05, |
|
"loss": 0.7391, |
|
"step": 2165 |
|
}, |
|
{ |
|
"epoch": 2.0204937121564974, |
|
"grad_norm": 0.5451837034273049, |
|
"learning_rate": 3.6831204694511565e-05, |
|
"loss": 0.7146, |
|
"step": 2170 |
|
}, |
|
{ |
|
"epoch": 2.025151374010247, |
|
"grad_norm": 0.509552114169026, |
|
"learning_rate": 3.67880566102865e-05, |
|
"loss": 0.7549, |
|
"step": 2175 |
|
}, |
|
{ |
|
"epoch": 2.0298090358639964, |
|
"grad_norm": 0.47803133974312734, |
|
"learning_rate": 3.674490852606144e-05, |
|
"loss": 0.7273, |
|
"step": 2180 |
|
}, |
|
{ |
|
"epoch": 2.034466697717746, |
|
"grad_norm": 0.5043879767130786, |
|
"learning_rate": 3.6701760441836386e-05, |
|
"loss": 0.7476, |
|
"step": 2185 |
|
}, |
|
{ |
|
"epoch": 2.039124359571495, |
|
"grad_norm": 0.5383186227544483, |
|
"learning_rate": 3.665861235761132e-05, |
|
"loss": 0.739, |
|
"step": 2190 |
|
}, |
|
{ |
|
"epoch": 2.0437820214252445, |
|
"grad_norm": 0.49461761719203784, |
|
"learning_rate": 3.6615464273386264e-05, |
|
"loss": 0.7376, |
|
"step": 2195 |
|
}, |
|
{ |
|
"epoch": 2.048439683278994, |
|
"grad_norm": 0.5533513553704322, |
|
"learning_rate": 3.6572316189161206e-05, |
|
"loss": 0.7344, |
|
"step": 2200 |
|
}, |
|
{ |
|
"epoch": 2.0530973451327434, |
|
"grad_norm": 0.48201139067056653, |
|
"learning_rate": 3.652916810493614e-05, |
|
"loss": 0.7452, |
|
"step": 2205 |
|
}, |
|
{ |
|
"epoch": 2.057755006986493, |
|
"grad_norm": 0.4956778283399664, |
|
"learning_rate": 3.6486020020711085e-05, |
|
"loss": 0.7213, |
|
"step": 2210 |
|
}, |
|
{ |
|
"epoch": 2.062412668840242, |
|
"grad_norm": 0.5511345017727285, |
|
"learning_rate": 3.644287193648602e-05, |
|
"loss": 0.7346, |
|
"step": 2215 |
|
}, |
|
{ |
|
"epoch": 2.0670703306939915, |
|
"grad_norm": 0.4657556929449439, |
|
"learning_rate": 3.639972385226096e-05, |
|
"loss": 0.7404, |
|
"step": 2220 |
|
}, |
|
{ |
|
"epoch": 2.071727992547741, |
|
"grad_norm": 0.5089880530513546, |
|
"learning_rate": 3.63565757680359e-05, |
|
"loss": 0.7359, |
|
"step": 2225 |
|
}, |
|
{ |
|
"epoch": 2.0763856544014905, |
|
"grad_norm": 0.5058181267132648, |
|
"learning_rate": 3.631342768381084e-05, |
|
"loss": 0.7377, |
|
"step": 2230 |
|
}, |
|
{ |
|
"epoch": 2.08104331625524, |
|
"grad_norm": 0.507478279733912, |
|
"learning_rate": 3.6270279599585783e-05, |
|
"loss": 0.7287, |
|
"step": 2235 |
|
}, |
|
{ |
|
"epoch": 2.0857009781089895, |
|
"grad_norm": 0.49894629085867603, |
|
"learning_rate": 3.622713151536072e-05, |
|
"loss": 0.7436, |
|
"step": 2240 |
|
}, |
|
{ |
|
"epoch": 2.0903586399627385, |
|
"grad_norm": 0.47534602756485905, |
|
"learning_rate": 3.618398343113566e-05, |
|
"loss": 0.7434, |
|
"step": 2245 |
|
}, |
|
{ |
|
"epoch": 2.095016301816488, |
|
"grad_norm": 0.48030281583814866, |
|
"learning_rate": 3.6140835346910604e-05, |
|
"loss": 0.7289, |
|
"step": 2250 |
|
}, |
|
{ |
|
"epoch": 2.0996739636702375, |
|
"grad_norm": 0.5258046969040029, |
|
"learning_rate": 3.609768726268554e-05, |
|
"loss": 0.7533, |
|
"step": 2255 |
|
}, |
|
{ |
|
"epoch": 2.104331625523987, |
|
"grad_norm": 0.5136907462435186, |
|
"learning_rate": 3.6054539178460476e-05, |
|
"loss": 0.7276, |
|
"step": 2260 |
|
}, |
|
{ |
|
"epoch": 2.1089892873777365, |
|
"grad_norm": 0.4810156623610355, |
|
"learning_rate": 3.601139109423542e-05, |
|
"loss": 0.756, |
|
"step": 2265 |
|
}, |
|
{ |
|
"epoch": 2.113646949231486, |
|
"grad_norm": 0.47960283436866896, |
|
"learning_rate": 3.5968243010010354e-05, |
|
"loss": 0.7346, |
|
"step": 2270 |
|
}, |
|
{ |
|
"epoch": 2.118304611085235, |
|
"grad_norm": 0.45763532355865844, |
|
"learning_rate": 3.5925094925785296e-05, |
|
"loss": 0.7377, |
|
"step": 2275 |
|
}, |
|
{ |
|
"epoch": 2.1229622729389845, |
|
"grad_norm": 0.504308557124316, |
|
"learning_rate": 3.588194684156024e-05, |
|
"loss": 0.7372, |
|
"step": 2280 |
|
}, |
|
{ |
|
"epoch": 2.127619934792734, |
|
"grad_norm": 0.5033911076958377, |
|
"learning_rate": 3.5838798757335175e-05, |
|
"loss": 0.7439, |
|
"step": 2285 |
|
}, |
|
{ |
|
"epoch": 2.1322775966464835, |
|
"grad_norm": 0.5592257193363861, |
|
"learning_rate": 3.579565067311012e-05, |
|
"loss": 0.7429, |
|
"step": 2290 |
|
}, |
|
{ |
|
"epoch": 2.136935258500233, |
|
"grad_norm": 0.5060165047760968, |
|
"learning_rate": 3.575250258888505e-05, |
|
"loss": 0.7538, |
|
"step": 2295 |
|
}, |
|
{ |
|
"epoch": 2.1415929203539825, |
|
"grad_norm": 0.4365692283959372, |
|
"learning_rate": 3.5709354504659995e-05, |
|
"loss": 0.7556, |
|
"step": 2300 |
|
}, |
|
{ |
|
"epoch": 2.1462505822077316, |
|
"grad_norm": 0.487649630360977, |
|
"learning_rate": 3.566620642043493e-05, |
|
"loss": 0.7397, |
|
"step": 2305 |
|
}, |
|
{ |
|
"epoch": 2.150908244061481, |
|
"grad_norm": 0.4475405819347315, |
|
"learning_rate": 3.5623058336209874e-05, |
|
"loss": 0.7112, |
|
"step": 2310 |
|
}, |
|
{ |
|
"epoch": 2.1555659059152306, |
|
"grad_norm": 0.4722891130226953, |
|
"learning_rate": 3.5579910251984816e-05, |
|
"loss": 0.7258, |
|
"step": 2315 |
|
}, |
|
{ |
|
"epoch": 2.16022356776898, |
|
"grad_norm": 0.4888175714369976, |
|
"learning_rate": 3.553676216775975e-05, |
|
"loss": 0.7291, |
|
"step": 2320 |
|
}, |
|
{ |
|
"epoch": 2.1648812296227296, |
|
"grad_norm": 0.5149403178372757, |
|
"learning_rate": 3.5493614083534694e-05, |
|
"loss": 0.7481, |
|
"step": 2325 |
|
}, |
|
{ |
|
"epoch": 2.1695388914764786, |
|
"grad_norm": 0.4656812561283848, |
|
"learning_rate": 3.545046599930964e-05, |
|
"loss": 0.7508, |
|
"step": 2330 |
|
}, |
|
{ |
|
"epoch": 2.174196553330228, |
|
"grad_norm": 0.5137790034694885, |
|
"learning_rate": 3.540731791508457e-05, |
|
"loss": 0.7233, |
|
"step": 2335 |
|
}, |
|
{ |
|
"epoch": 2.1788542151839776, |
|
"grad_norm": 0.4553249699224624, |
|
"learning_rate": 3.536416983085951e-05, |
|
"loss": 0.7452, |
|
"step": 2340 |
|
}, |
|
{ |
|
"epoch": 2.183511877037727, |
|
"grad_norm": 0.5039975669698809, |
|
"learning_rate": 3.532102174663445e-05, |
|
"loss": 0.7418, |
|
"step": 2345 |
|
}, |
|
{ |
|
"epoch": 2.1881695388914766, |
|
"grad_norm": 0.5019742558189558, |
|
"learning_rate": 3.5277873662409386e-05, |
|
"loss": 0.7434, |
|
"step": 2350 |
|
}, |
|
{ |
|
"epoch": 2.192827200745226, |
|
"grad_norm": 0.4976202334401095, |
|
"learning_rate": 3.523472557818433e-05, |
|
"loss": 0.7286, |
|
"step": 2355 |
|
}, |
|
{ |
|
"epoch": 2.197484862598975, |
|
"grad_norm": 0.46894087915981364, |
|
"learning_rate": 3.519157749395927e-05, |
|
"loss": 0.7463, |
|
"step": 2360 |
|
}, |
|
{ |
|
"epoch": 2.2021425244527246, |
|
"grad_norm": 0.43387707760639765, |
|
"learning_rate": 3.514842940973421e-05, |
|
"loss": 0.7411, |
|
"step": 2365 |
|
}, |
|
{ |
|
"epoch": 2.206800186306474, |
|
"grad_norm": 0.49150140111118595, |
|
"learning_rate": 3.510528132550915e-05, |
|
"loss": 0.7496, |
|
"step": 2370 |
|
}, |
|
{ |
|
"epoch": 2.2114578481602236, |
|
"grad_norm": 0.5585531969734789, |
|
"learning_rate": 3.506213324128409e-05, |
|
"loss": 0.7495, |
|
"step": 2375 |
|
}, |
|
{ |
|
"epoch": 2.216115510013973, |
|
"grad_norm": 0.5153195977847673, |
|
"learning_rate": 3.501898515705903e-05, |
|
"loss": 0.7333, |
|
"step": 2380 |
|
}, |
|
{ |
|
"epoch": 2.2207731718677226, |
|
"grad_norm": 0.4492901284094745, |
|
"learning_rate": 3.4975837072833964e-05, |
|
"loss": 0.7596, |
|
"step": 2385 |
|
}, |
|
{ |
|
"epoch": 2.2254308337214717, |
|
"grad_norm": 0.44696866001250674, |
|
"learning_rate": 3.4932688988608906e-05, |
|
"loss": 0.74, |
|
"step": 2390 |
|
}, |
|
{ |
|
"epoch": 2.230088495575221, |
|
"grad_norm": 0.5220507714567176, |
|
"learning_rate": 3.488954090438385e-05, |
|
"loss": 0.7391, |
|
"step": 2395 |
|
}, |
|
{ |
|
"epoch": 2.2347461574289706, |
|
"grad_norm": 0.47887225464445543, |
|
"learning_rate": 3.4846392820158784e-05, |
|
"loss": 0.7243, |
|
"step": 2400 |
|
}, |
|
{ |
|
"epoch": 2.23940381928272, |
|
"grad_norm": 0.4811847900886404, |
|
"learning_rate": 3.480324473593373e-05, |
|
"loss": 0.7418, |
|
"step": 2405 |
|
}, |
|
{ |
|
"epoch": 2.2440614811364696, |
|
"grad_norm": 0.5272801320352328, |
|
"learning_rate": 3.476009665170867e-05, |
|
"loss": 0.74, |
|
"step": 2410 |
|
}, |
|
{ |
|
"epoch": 2.248719142990219, |
|
"grad_norm": 0.5307464586993595, |
|
"learning_rate": 3.4716948567483605e-05, |
|
"loss": 0.7324, |
|
"step": 2415 |
|
}, |
|
{ |
|
"epoch": 2.253376804843968, |
|
"grad_norm": 0.49901292595440055, |
|
"learning_rate": 3.467380048325855e-05, |
|
"loss": 0.7113, |
|
"step": 2420 |
|
}, |
|
{ |
|
"epoch": 2.2580344666977177, |
|
"grad_norm": 0.5070314821761425, |
|
"learning_rate": 3.463065239903348e-05, |
|
"loss": 0.7562, |
|
"step": 2425 |
|
}, |
|
{ |
|
"epoch": 2.262692128551467, |
|
"grad_norm": 0.44427974244795587, |
|
"learning_rate": 3.458750431480842e-05, |
|
"loss": 0.7242, |
|
"step": 2430 |
|
}, |
|
{ |
|
"epoch": 2.2673497904052167, |
|
"grad_norm": 0.4771303989768548, |
|
"learning_rate": 3.454435623058336e-05, |
|
"loss": 0.7391, |
|
"step": 2435 |
|
}, |
|
{ |
|
"epoch": 2.272007452258966, |
|
"grad_norm": 0.49142003038405735, |
|
"learning_rate": 3.4501208146358304e-05, |
|
"loss": 0.7359, |
|
"step": 2440 |
|
}, |
|
{ |
|
"epoch": 2.276665114112715, |
|
"grad_norm": 0.5101425248743833, |
|
"learning_rate": 3.445806006213324e-05, |
|
"loss": 0.7337, |
|
"step": 2445 |
|
}, |
|
{ |
|
"epoch": 2.2813227759664647, |
|
"grad_norm": 0.4469242564135119, |
|
"learning_rate": 3.441491197790818e-05, |
|
"loss": 0.7328, |
|
"step": 2450 |
|
}, |
|
{ |
|
"epoch": 2.285980437820214, |
|
"grad_norm": 0.5151260716573384, |
|
"learning_rate": 3.4371763893683125e-05, |
|
"loss": 0.7285, |
|
"step": 2455 |
|
}, |
|
{ |
|
"epoch": 2.2906380996739637, |
|
"grad_norm": 0.6197779956105796, |
|
"learning_rate": 3.432861580945806e-05, |
|
"loss": 0.7195, |
|
"step": 2460 |
|
}, |
|
{ |
|
"epoch": 2.295295761527713, |
|
"grad_norm": 0.5338226806005612, |
|
"learning_rate": 3.4285467725233e-05, |
|
"loss": 0.7581, |
|
"step": 2465 |
|
}, |
|
{ |
|
"epoch": 2.2999534233814627, |
|
"grad_norm": 0.47245608705220976, |
|
"learning_rate": 3.424231964100794e-05, |
|
"loss": 0.7431, |
|
"step": 2470 |
|
}, |
|
{ |
|
"epoch": 2.3046110852352117, |
|
"grad_norm": 0.5201411523481256, |
|
"learning_rate": 3.419917155678288e-05, |
|
"loss": 0.7482, |
|
"step": 2475 |
|
}, |
|
{ |
|
"epoch": 2.3092687470889612, |
|
"grad_norm": 0.4908970760918655, |
|
"learning_rate": 3.415602347255782e-05, |
|
"loss": 0.7355, |
|
"step": 2480 |
|
}, |
|
{ |
|
"epoch": 2.3139264089427107, |
|
"grad_norm": 0.4931573032699934, |
|
"learning_rate": 3.411287538833276e-05, |
|
"loss": 0.7569, |
|
"step": 2485 |
|
}, |
|
{ |
|
"epoch": 2.3185840707964602, |
|
"grad_norm": 0.47470883095660726, |
|
"learning_rate": 3.40697273041077e-05, |
|
"loss": 0.756, |
|
"step": 2490 |
|
}, |
|
{ |
|
"epoch": 2.3232417326502097, |
|
"grad_norm": 0.4878991939271055, |
|
"learning_rate": 3.402657921988264e-05, |
|
"loss": 0.7369, |
|
"step": 2495 |
|
}, |
|
{ |
|
"epoch": 2.3278993945039588, |
|
"grad_norm": 0.4594200319210018, |
|
"learning_rate": 3.398343113565758e-05, |
|
"loss": 0.7142, |
|
"step": 2500 |
|
}, |
|
{ |
|
"epoch": 2.3325570563577083, |
|
"grad_norm": 0.46387141485413524, |
|
"learning_rate": 3.394028305143252e-05, |
|
"loss": 0.7345, |
|
"step": 2505 |
|
}, |
|
{ |
|
"epoch": 2.3372147182114578, |
|
"grad_norm": 0.531841628390487, |
|
"learning_rate": 3.389713496720746e-05, |
|
"loss": 0.7383, |
|
"step": 2510 |
|
}, |
|
{ |
|
"epoch": 2.3418723800652073, |
|
"grad_norm": 0.46190990050373143, |
|
"learning_rate": 3.3853986882982394e-05, |
|
"loss": 0.7381, |
|
"step": 2515 |
|
}, |
|
{ |
|
"epoch": 2.3465300419189568, |
|
"grad_norm": 0.4862616056924088, |
|
"learning_rate": 3.381083879875734e-05, |
|
"loss": 0.7447, |
|
"step": 2520 |
|
}, |
|
{ |
|
"epoch": 2.3511877037727063, |
|
"grad_norm": 0.46877636975068077, |
|
"learning_rate": 3.376769071453227e-05, |
|
"loss": 0.7409, |
|
"step": 2525 |
|
}, |
|
{ |
|
"epoch": 2.3558453656264557, |
|
"grad_norm": 0.47736005453793845, |
|
"learning_rate": 3.3724542630307215e-05, |
|
"loss": 0.74, |
|
"step": 2530 |
|
}, |
|
{ |
|
"epoch": 2.360503027480205, |
|
"grad_norm": 0.45506496994603135, |
|
"learning_rate": 3.368139454608216e-05, |
|
"loss": 0.7189, |
|
"step": 2535 |
|
}, |
|
{ |
|
"epoch": 2.3651606893339543, |
|
"grad_norm": 0.43838587525237144, |
|
"learning_rate": 3.363824646185709e-05, |
|
"loss": 0.7168, |
|
"step": 2540 |
|
}, |
|
{ |
|
"epoch": 2.369818351187704, |
|
"grad_norm": 0.44812953360079566, |
|
"learning_rate": 3.3595098377632036e-05, |
|
"loss": 0.7436, |
|
"step": 2545 |
|
}, |
|
{ |
|
"epoch": 2.3744760130414533, |
|
"grad_norm": 0.45612583541921564, |
|
"learning_rate": 3.355195029340698e-05, |
|
"loss": 0.7422, |
|
"step": 2550 |
|
}, |
|
{ |
|
"epoch": 2.3791336748952028, |
|
"grad_norm": 0.47178312549781826, |
|
"learning_rate": 3.3508802209181914e-05, |
|
"loss": 0.7269, |
|
"step": 2555 |
|
}, |
|
{ |
|
"epoch": 2.383791336748952, |
|
"grad_norm": 0.48551060780875127, |
|
"learning_rate": 3.346565412495685e-05, |
|
"loss": 0.7337, |
|
"step": 2560 |
|
}, |
|
{ |
|
"epoch": 2.3884489986027013, |
|
"grad_norm": 0.44976638729567375, |
|
"learning_rate": 3.342250604073179e-05, |
|
"loss": 0.7372, |
|
"step": 2565 |
|
}, |
|
{ |
|
"epoch": 2.393106660456451, |
|
"grad_norm": 0.43474437953643713, |
|
"learning_rate": 3.3379357956506735e-05, |
|
"loss": 0.7377, |
|
"step": 2570 |
|
}, |
|
{ |
|
"epoch": 2.3977643223102003, |
|
"grad_norm": 0.49975414042600347, |
|
"learning_rate": 3.333620987228167e-05, |
|
"loss": 0.7386, |
|
"step": 2575 |
|
}, |
|
{ |
|
"epoch": 2.40242198416395, |
|
"grad_norm": 0.5001674134152022, |
|
"learning_rate": 3.329306178805661e-05, |
|
"loss": 0.7241, |
|
"step": 2580 |
|
}, |
|
{ |
|
"epoch": 2.4070796460176993, |
|
"grad_norm": 0.5029118589595981, |
|
"learning_rate": 3.3249913703831555e-05, |
|
"loss": 0.7256, |
|
"step": 2585 |
|
}, |
|
{ |
|
"epoch": 2.4117373078714484, |
|
"grad_norm": 0.46960690178551306, |
|
"learning_rate": 3.320676561960649e-05, |
|
"loss": 0.7154, |
|
"step": 2590 |
|
}, |
|
{ |
|
"epoch": 2.416394969725198, |
|
"grad_norm": 0.4531885637486837, |
|
"learning_rate": 3.3163617535381434e-05, |
|
"loss": 0.7499, |
|
"step": 2595 |
|
}, |
|
{ |
|
"epoch": 2.4210526315789473, |
|
"grad_norm": 0.47476110228618335, |
|
"learning_rate": 3.312046945115637e-05, |
|
"loss": 0.7428, |
|
"step": 2600 |
|
}, |
|
{ |
|
"epoch": 2.425710293432697, |
|
"grad_norm": 0.4523064808621614, |
|
"learning_rate": 3.3077321366931305e-05, |
|
"loss": 0.7277, |
|
"step": 2605 |
|
}, |
|
{ |
|
"epoch": 2.4303679552864463, |
|
"grad_norm": 0.45178488039892994, |
|
"learning_rate": 3.303417328270625e-05, |
|
"loss": 0.7188, |
|
"step": 2610 |
|
}, |
|
{ |
|
"epoch": 2.4350256171401954, |
|
"grad_norm": 0.49014947823403127, |
|
"learning_rate": 3.299102519848119e-05, |
|
"loss": 0.7252, |
|
"step": 2615 |
|
}, |
|
{ |
|
"epoch": 2.439683278993945, |
|
"grad_norm": 0.4948237045149303, |
|
"learning_rate": 3.2947877114256126e-05, |
|
"loss": 0.7345, |
|
"step": 2620 |
|
}, |
|
{ |
|
"epoch": 2.4443409408476944, |
|
"grad_norm": 0.5370877028222926, |
|
"learning_rate": 3.290472903003107e-05, |
|
"loss": 0.7313, |
|
"step": 2625 |
|
}, |
|
{ |
|
"epoch": 2.448998602701444, |
|
"grad_norm": 0.46235224775820505, |
|
"learning_rate": 3.286158094580601e-05, |
|
"loss": 0.7203, |
|
"step": 2630 |
|
}, |
|
{ |
|
"epoch": 2.4536562645551934, |
|
"grad_norm": 0.44349107580473884, |
|
"learning_rate": 3.281843286158095e-05, |
|
"loss": 0.7427, |
|
"step": 2635 |
|
}, |
|
{ |
|
"epoch": 2.458313926408943, |
|
"grad_norm": 0.5208697307440874, |
|
"learning_rate": 3.277528477735589e-05, |
|
"loss": 0.7405, |
|
"step": 2640 |
|
}, |
|
{ |
|
"epoch": 2.4629715882626924, |
|
"grad_norm": 0.5024334402123977, |
|
"learning_rate": 3.2732136693130825e-05, |
|
"loss": 0.7398, |
|
"step": 2645 |
|
}, |
|
{ |
|
"epoch": 2.4676292501164414, |
|
"grad_norm": 0.4790235899407453, |
|
"learning_rate": 3.268898860890577e-05, |
|
"loss": 0.7254, |
|
"step": 2650 |
|
}, |
|
{ |
|
"epoch": 2.472286911970191, |
|
"grad_norm": 0.5322015962737839, |
|
"learning_rate": 3.26458405246807e-05, |
|
"loss": 0.738, |
|
"step": 2655 |
|
}, |
|
{ |
|
"epoch": 2.4769445738239404, |
|
"grad_norm": 0.4569166889476487, |
|
"learning_rate": 3.2602692440455645e-05, |
|
"loss": 0.7265, |
|
"step": 2660 |
|
}, |
|
{ |
|
"epoch": 2.48160223567769, |
|
"grad_norm": 0.48054196038218905, |
|
"learning_rate": 3.255954435623059e-05, |
|
"loss": 0.7266, |
|
"step": 2665 |
|
}, |
|
{ |
|
"epoch": 2.4862598975314394, |
|
"grad_norm": 0.5641082189572171, |
|
"learning_rate": 3.2516396272005524e-05, |
|
"loss": 0.7388, |
|
"step": 2670 |
|
}, |
|
{ |
|
"epoch": 2.4909175593851884, |
|
"grad_norm": 0.5662978894220593, |
|
"learning_rate": 3.2473248187780466e-05, |
|
"loss": 0.7235, |
|
"step": 2675 |
|
}, |
|
{ |
|
"epoch": 2.495575221238938, |
|
"grad_norm": 0.5391118159361664, |
|
"learning_rate": 3.243010010355541e-05, |
|
"loss": 0.7273, |
|
"step": 2680 |
|
}, |
|
{ |
|
"epoch": 2.5002328830926874, |
|
"grad_norm": 0.5362127590103279, |
|
"learning_rate": 3.2386952019330344e-05, |
|
"loss": 0.7336, |
|
"step": 2685 |
|
}, |
|
{ |
|
"epoch": 2.504890544946437, |
|
"grad_norm": 0.4748435027401365, |
|
"learning_rate": 3.234380393510528e-05, |
|
"loss": 0.7096, |
|
"step": 2690 |
|
}, |
|
{ |
|
"epoch": 2.5095482068001864, |
|
"grad_norm": 0.5033991418235945, |
|
"learning_rate": 3.230065585088022e-05, |
|
"loss": 0.7303, |
|
"step": 2695 |
|
}, |
|
{ |
|
"epoch": 2.514205868653936, |
|
"grad_norm": 0.43440064872955475, |
|
"learning_rate": 3.225750776665516e-05, |
|
"loss": 0.7216, |
|
"step": 2700 |
|
}, |
|
{ |
|
"epoch": 2.5188635305076854, |
|
"grad_norm": 0.5577535793990844, |
|
"learning_rate": 3.22143596824301e-05, |
|
"loss": 0.7426, |
|
"step": 2705 |
|
}, |
|
{ |
|
"epoch": 2.5235211923614345, |
|
"grad_norm": 0.48251183034179124, |
|
"learning_rate": 3.217121159820504e-05, |
|
"loss": 0.7272, |
|
"step": 2710 |
|
}, |
|
{ |
|
"epoch": 2.528178854215184, |
|
"grad_norm": 0.46412438688592356, |
|
"learning_rate": 3.2128063513979986e-05, |
|
"loss": 0.7425, |
|
"step": 2715 |
|
}, |
|
{ |
|
"epoch": 2.5328365160689335, |
|
"grad_norm": 0.4495342474637197, |
|
"learning_rate": 3.208491542975492e-05, |
|
"loss": 0.762, |
|
"step": 2720 |
|
}, |
|
{ |
|
"epoch": 2.537494177922683, |
|
"grad_norm": 0.44955454953760104, |
|
"learning_rate": 3.204176734552986e-05, |
|
"loss": 0.7301, |
|
"step": 2725 |
|
}, |
|
{ |
|
"epoch": 2.542151839776432, |
|
"grad_norm": 0.4726584773035974, |
|
"learning_rate": 3.19986192613048e-05, |
|
"loss": 0.7504, |
|
"step": 2730 |
|
}, |
|
{ |
|
"epoch": 2.5468095016301815, |
|
"grad_norm": 0.45736686524629533, |
|
"learning_rate": 3.1955471177079735e-05, |
|
"loss": 0.7263, |
|
"step": 2735 |
|
}, |
|
{ |
|
"epoch": 2.551467163483931, |
|
"grad_norm": 0.44559915699792074, |
|
"learning_rate": 3.191232309285468e-05, |
|
"loss": 0.7257, |
|
"step": 2740 |
|
}, |
|
{ |
|
"epoch": 2.5561248253376805, |
|
"grad_norm": 0.4487038810087417, |
|
"learning_rate": 3.186917500862962e-05, |
|
"loss": 0.7237, |
|
"step": 2745 |
|
}, |
|
{ |
|
"epoch": 2.56078248719143, |
|
"grad_norm": 0.41389475679174176, |
|
"learning_rate": 3.1826026924404556e-05, |
|
"loss": 0.7238, |
|
"step": 2750 |
|
}, |
|
{ |
|
"epoch": 2.5654401490451795, |
|
"grad_norm": 0.4786476587851291, |
|
"learning_rate": 3.17828788401795e-05, |
|
"loss": 0.7155, |
|
"step": 2755 |
|
}, |
|
{ |
|
"epoch": 2.570097810898929, |
|
"grad_norm": 0.489442857141447, |
|
"learning_rate": 3.173973075595444e-05, |
|
"loss": 0.7372, |
|
"step": 2760 |
|
}, |
|
{ |
|
"epoch": 2.574755472752678, |
|
"grad_norm": 0.5094148417556145, |
|
"learning_rate": 3.169658267172938e-05, |
|
"loss": 0.724, |
|
"step": 2765 |
|
}, |
|
{ |
|
"epoch": 2.5794131346064275, |
|
"grad_norm": 0.5051284189770395, |
|
"learning_rate": 3.165343458750431e-05, |
|
"loss": 0.7293, |
|
"step": 2770 |
|
}, |
|
{ |
|
"epoch": 2.584070796460177, |
|
"grad_norm": 0.5091129444887145, |
|
"learning_rate": 3.1610286503279255e-05, |
|
"loss": 0.7239, |
|
"step": 2775 |
|
}, |
|
{ |
|
"epoch": 2.5887284583139265, |
|
"grad_norm": 0.47972755418083146, |
|
"learning_rate": 3.156713841905419e-05, |
|
"loss": 0.7344, |
|
"step": 2780 |
|
}, |
|
{ |
|
"epoch": 2.5933861201676756, |
|
"grad_norm": 0.4345209631870362, |
|
"learning_rate": 3.152399033482913e-05, |
|
"loss": 0.7455, |
|
"step": 2785 |
|
}, |
|
{ |
|
"epoch": 2.598043782021425, |
|
"grad_norm": 0.4646896409021501, |
|
"learning_rate": 3.1480842250604076e-05, |
|
"loss": 0.7449, |
|
"step": 2790 |
|
}, |
|
{ |
|
"epoch": 2.6027014438751745, |
|
"grad_norm": 0.4536688273152399, |
|
"learning_rate": 3.143769416637902e-05, |
|
"loss": 0.7289, |
|
"step": 2795 |
|
}, |
|
{ |
|
"epoch": 2.607359105728924, |
|
"grad_norm": 0.47474413012505823, |
|
"learning_rate": 3.1394546082153954e-05, |
|
"loss": 0.7296, |
|
"step": 2800 |
|
}, |
|
{ |
|
"epoch": 2.6120167675826735, |
|
"grad_norm": 0.4493379121694987, |
|
"learning_rate": 3.13513979979289e-05, |
|
"loss": 0.7222, |
|
"step": 2805 |
|
}, |
|
{ |
|
"epoch": 2.616674429436423, |
|
"grad_norm": 0.4800703688735423, |
|
"learning_rate": 3.130824991370383e-05, |
|
"loss": 0.736, |
|
"step": 2810 |
|
}, |
|
{ |
|
"epoch": 2.6213320912901725, |
|
"grad_norm": 0.45499876944819995, |
|
"learning_rate": 3.126510182947877e-05, |
|
"loss": 0.724, |
|
"step": 2815 |
|
}, |
|
{ |
|
"epoch": 2.625989753143922, |
|
"grad_norm": 0.47590616696249266, |
|
"learning_rate": 3.122195374525371e-05, |
|
"loss": 0.7423, |
|
"step": 2820 |
|
}, |
|
{ |
|
"epoch": 2.630647414997671, |
|
"grad_norm": 0.4607628535501758, |
|
"learning_rate": 3.117880566102865e-05, |
|
"loss": 0.743, |
|
"step": 2825 |
|
}, |
|
{ |
|
"epoch": 2.6353050768514206, |
|
"grad_norm": 0.6908597333736273, |
|
"learning_rate": 3.113565757680359e-05, |
|
"loss": 0.725, |
|
"step": 2830 |
|
}, |
|
{ |
|
"epoch": 2.63996273870517, |
|
"grad_norm": 0.4523479184212291, |
|
"learning_rate": 3.109250949257853e-05, |
|
"loss": 0.7449, |
|
"step": 2835 |
|
}, |
|
{ |
|
"epoch": 2.6446204005589196, |
|
"grad_norm": 0.46655610331143293, |
|
"learning_rate": 3.1049361408353474e-05, |
|
"loss": 0.7503, |
|
"step": 2840 |
|
}, |
|
{ |
|
"epoch": 2.6492780624126686, |
|
"grad_norm": 0.495804001874504, |
|
"learning_rate": 3.100621332412841e-05, |
|
"loss": 0.7399, |
|
"step": 2845 |
|
}, |
|
{ |
|
"epoch": 2.653935724266418, |
|
"grad_norm": 0.4861510474965587, |
|
"learning_rate": 3.096306523990335e-05, |
|
"loss": 0.7416, |
|
"step": 2850 |
|
}, |
|
{ |
|
"epoch": 2.6585933861201676, |
|
"grad_norm": 0.43741153602282423, |
|
"learning_rate": 3.091991715567829e-05, |
|
"loss": 0.7293, |
|
"step": 2855 |
|
}, |
|
{ |
|
"epoch": 2.663251047973917, |
|
"grad_norm": 0.48413889421676215, |
|
"learning_rate": 3.0876769071453223e-05, |
|
"loss": 0.7422, |
|
"step": 2860 |
|
}, |
|
{ |
|
"epoch": 2.6679087098276666, |
|
"grad_norm": 0.4977917714734477, |
|
"learning_rate": 3.0833620987228166e-05, |
|
"loss": 0.7391, |
|
"step": 2865 |
|
}, |
|
{ |
|
"epoch": 2.672566371681416, |
|
"grad_norm": 0.4931812204297124, |
|
"learning_rate": 3.079047290300311e-05, |
|
"loss": 0.7235, |
|
"step": 2870 |
|
}, |
|
{ |
|
"epoch": 2.6772240335351656, |
|
"grad_norm": 0.42330639560301353, |
|
"learning_rate": 3.074732481877805e-05, |
|
"loss": 0.7322, |
|
"step": 2875 |
|
}, |
|
{ |
|
"epoch": 2.6818816953889146, |
|
"grad_norm": 0.4580606866066788, |
|
"learning_rate": 3.070417673455299e-05, |
|
"loss": 0.7199, |
|
"step": 2880 |
|
}, |
|
{ |
|
"epoch": 2.686539357242664, |
|
"grad_norm": 0.4685523411063327, |
|
"learning_rate": 3.066102865032793e-05, |
|
"loss": 0.7497, |
|
"step": 2885 |
|
}, |
|
{ |
|
"epoch": 2.6911970190964136, |
|
"grad_norm": 0.43758724338416044, |
|
"learning_rate": 3.061788056610287e-05, |
|
"loss": 0.7241, |
|
"step": 2890 |
|
}, |
|
{ |
|
"epoch": 2.695854680950163, |
|
"grad_norm": 0.5139581505046792, |
|
"learning_rate": 3.057473248187781e-05, |
|
"loss": 0.7387, |
|
"step": 2895 |
|
}, |
|
{ |
|
"epoch": 2.700512342803912, |
|
"grad_norm": 0.4868622670145385, |
|
"learning_rate": 3.053158439765274e-05, |
|
"loss": 0.7288, |
|
"step": 2900 |
|
}, |
|
{ |
|
"epoch": 2.7051700046576617, |
|
"grad_norm": 0.472071906864403, |
|
"learning_rate": 3.048843631342769e-05, |
|
"loss": 0.72, |
|
"step": 2905 |
|
}, |
|
{ |
|
"epoch": 2.709827666511411, |
|
"grad_norm": 0.461171899159895, |
|
"learning_rate": 3.0445288229202625e-05, |
|
"loss": 0.7256, |
|
"step": 2910 |
|
}, |
|
{ |
|
"epoch": 2.7144853283651607, |
|
"grad_norm": 0.4381661896304001, |
|
"learning_rate": 3.0402140144977564e-05, |
|
"loss": 0.7412, |
|
"step": 2915 |
|
}, |
|
{ |
|
"epoch": 2.71914299021891, |
|
"grad_norm": 0.4671330057430109, |
|
"learning_rate": 3.0358992060752506e-05, |
|
"loss": 0.7253, |
|
"step": 2920 |
|
}, |
|
{ |
|
"epoch": 2.7238006520726596, |
|
"grad_norm": 0.5346735807031436, |
|
"learning_rate": 3.0315843976527442e-05, |
|
"loss": 0.7486, |
|
"step": 2925 |
|
}, |
|
{ |
|
"epoch": 2.728458313926409, |
|
"grad_norm": 0.4354706109435107, |
|
"learning_rate": 3.027269589230238e-05, |
|
"loss": 0.7472, |
|
"step": 2930 |
|
}, |
|
{ |
|
"epoch": 2.7331159757801586, |
|
"grad_norm": 0.4437288684321577, |
|
"learning_rate": 3.0229547808077324e-05, |
|
"loss": 0.7269, |
|
"step": 2935 |
|
}, |
|
{ |
|
"epoch": 2.7377736376339077, |
|
"grad_norm": 0.5218710146843476, |
|
"learning_rate": 3.018639972385226e-05, |
|
"loss": 0.7287, |
|
"step": 2940 |
|
}, |
|
{ |
|
"epoch": 2.742431299487657, |
|
"grad_norm": 0.4459758133871711, |
|
"learning_rate": 3.0143251639627202e-05, |
|
"loss": 0.7414, |
|
"step": 2945 |
|
}, |
|
{ |
|
"epoch": 2.7470889613414067, |
|
"grad_norm": 0.46373131985846017, |
|
"learning_rate": 3.010010355540214e-05, |
|
"loss": 0.7273, |
|
"step": 2950 |
|
}, |
|
{ |
|
"epoch": 2.751746623195156, |
|
"grad_norm": 0.45258002027378524, |
|
"learning_rate": 3.0056955471177077e-05, |
|
"loss": 0.7207, |
|
"step": 2955 |
|
}, |
|
{ |
|
"epoch": 2.7564042850489052, |
|
"grad_norm": 0.4742833473816359, |
|
"learning_rate": 3.001380738695202e-05, |
|
"loss": 0.7327, |
|
"step": 2960 |
|
}, |
|
{ |
|
"epoch": 2.7610619469026547, |
|
"grad_norm": 0.4403546844429926, |
|
"learning_rate": 2.9970659302726962e-05, |
|
"loss": 0.73, |
|
"step": 2965 |
|
}, |
|
{ |
|
"epoch": 2.765719608756404, |
|
"grad_norm": 0.442379107371057, |
|
"learning_rate": 2.99275112185019e-05, |
|
"loss": 0.7343, |
|
"step": 2970 |
|
}, |
|
{ |
|
"epoch": 2.7703772706101537, |
|
"grad_norm": 0.4789864727290059, |
|
"learning_rate": 2.9884363134276837e-05, |
|
"loss": 0.7193, |
|
"step": 2975 |
|
}, |
|
{ |
|
"epoch": 2.775034932463903, |
|
"grad_norm": 0.488186649733138, |
|
"learning_rate": 2.984121505005178e-05, |
|
"loss": 0.7224, |
|
"step": 2980 |
|
}, |
|
{ |
|
"epoch": 2.7796925943176527, |
|
"grad_norm": 0.518256732525424, |
|
"learning_rate": 2.979806696582672e-05, |
|
"loss": 0.7403, |
|
"step": 2985 |
|
}, |
|
{ |
|
"epoch": 2.784350256171402, |
|
"grad_norm": 0.4382664967456426, |
|
"learning_rate": 2.9754918881601657e-05, |
|
"loss": 0.7272, |
|
"step": 2990 |
|
}, |
|
{ |
|
"epoch": 2.7890079180251512, |
|
"grad_norm": 0.47112665618864485, |
|
"learning_rate": 2.9711770797376596e-05, |
|
"loss": 0.7242, |
|
"step": 2995 |
|
}, |
|
{ |
|
"epoch": 2.7936655798789007, |
|
"grad_norm": 0.4338350589114092, |
|
"learning_rate": 2.966862271315154e-05, |
|
"loss": 0.7427, |
|
"step": 3000 |
|
}, |
|
{ |
|
"epoch": 2.7983232417326502, |
|
"grad_norm": 0.42042818529701553, |
|
"learning_rate": 2.9625474628926475e-05, |
|
"loss": 0.7185, |
|
"step": 3005 |
|
}, |
|
{ |
|
"epoch": 2.8029809035863997, |
|
"grad_norm": 0.4517788331872833, |
|
"learning_rate": 2.9582326544701417e-05, |
|
"loss": 0.7448, |
|
"step": 3010 |
|
}, |
|
{ |
|
"epoch": 2.807638565440149, |
|
"grad_norm": 0.46640580860154485, |
|
"learning_rate": 2.9539178460476356e-05, |
|
"loss": 0.7069, |
|
"step": 3015 |
|
}, |
|
{ |
|
"epoch": 2.8122962272938983, |
|
"grad_norm": 0.4720403640791126, |
|
"learning_rate": 2.9496030376251292e-05, |
|
"loss": 0.7383, |
|
"step": 3020 |
|
}, |
|
{ |
|
"epoch": 2.8169538891476478, |
|
"grad_norm": 0.45321007730262314, |
|
"learning_rate": 2.9452882292026235e-05, |
|
"loss": 0.7308, |
|
"step": 3025 |
|
}, |
|
{ |
|
"epoch": 2.8216115510013973, |
|
"grad_norm": 0.5182778932168417, |
|
"learning_rate": 2.9409734207801177e-05, |
|
"loss": 0.7372, |
|
"step": 3030 |
|
}, |
|
{ |
|
"epoch": 2.8262692128551468, |
|
"grad_norm": 0.4939735503668823, |
|
"learning_rate": 2.9366586123576113e-05, |
|
"loss": 0.7349, |
|
"step": 3035 |
|
}, |
|
{ |
|
"epoch": 2.8309268747088963, |
|
"grad_norm": 0.4955354642242852, |
|
"learning_rate": 2.9323438039351052e-05, |
|
"loss": 0.7522, |
|
"step": 3040 |
|
}, |
|
{ |
|
"epoch": 2.8355845365626458, |
|
"grad_norm": 0.4389893298550623, |
|
"learning_rate": 2.9280289955125994e-05, |
|
"loss": 0.7277, |
|
"step": 3045 |
|
}, |
|
{ |
|
"epoch": 2.840242198416395, |
|
"grad_norm": 0.4555715150024362, |
|
"learning_rate": 2.9237141870900937e-05, |
|
"loss": 0.7364, |
|
"step": 3050 |
|
}, |
|
{ |
|
"epoch": 2.8448998602701443, |
|
"grad_norm": 0.4906466706375227, |
|
"learning_rate": 2.9193993786675873e-05, |
|
"loss": 0.7401, |
|
"step": 3055 |
|
}, |
|
{ |
|
"epoch": 2.849557522123894, |
|
"grad_norm": 0.4743288708851488, |
|
"learning_rate": 2.9150845702450812e-05, |
|
"loss": 0.7424, |
|
"step": 3060 |
|
}, |
|
{ |
|
"epoch": 2.8542151839776433, |
|
"grad_norm": 0.4720591375600582, |
|
"learning_rate": 2.9107697618225754e-05, |
|
"loss": 0.7303, |
|
"step": 3065 |
|
}, |
|
{ |
|
"epoch": 2.858872845831393, |
|
"grad_norm": 0.4538021780231351, |
|
"learning_rate": 2.906454953400069e-05, |
|
"loss": 0.7142, |
|
"step": 3070 |
|
}, |
|
{ |
|
"epoch": 2.863530507685142, |
|
"grad_norm": 0.4659413361395233, |
|
"learning_rate": 2.9021401449775632e-05, |
|
"loss": 0.7336, |
|
"step": 3075 |
|
}, |
|
{ |
|
"epoch": 2.8681881695388913, |
|
"grad_norm": 0.46579867620172544, |
|
"learning_rate": 2.897825336555057e-05, |
|
"loss": 0.7139, |
|
"step": 3080 |
|
}, |
|
{ |
|
"epoch": 2.872845831392641, |
|
"grad_norm": 0.4558032880562939, |
|
"learning_rate": 2.8935105281325507e-05, |
|
"loss": 0.7329, |
|
"step": 3085 |
|
}, |
|
{ |
|
"epoch": 2.8775034932463903, |
|
"grad_norm": 0.43377128416649396, |
|
"learning_rate": 2.889195719710045e-05, |
|
"loss": 0.7429, |
|
"step": 3090 |
|
}, |
|
{ |
|
"epoch": 2.88216115510014, |
|
"grad_norm": 0.5083131527805567, |
|
"learning_rate": 2.8848809112875392e-05, |
|
"loss": 0.7187, |
|
"step": 3095 |
|
}, |
|
{ |
|
"epoch": 2.8868188169538893, |
|
"grad_norm": 0.4500221460901065, |
|
"learning_rate": 2.8805661028650328e-05, |
|
"loss": 0.7313, |
|
"step": 3100 |
|
}, |
|
{ |
|
"epoch": 2.891476478807639, |
|
"grad_norm": 0.4792267346394915, |
|
"learning_rate": 2.8762512944425267e-05, |
|
"loss": 0.7422, |
|
"step": 3105 |
|
}, |
|
{ |
|
"epoch": 2.896134140661388, |
|
"grad_norm": 0.4756004802811119, |
|
"learning_rate": 2.871936486020021e-05, |
|
"loss": 0.7208, |
|
"step": 3110 |
|
}, |
|
{ |
|
"epoch": 2.9007918025151374, |
|
"grad_norm": 0.5367158641267008, |
|
"learning_rate": 2.8676216775975145e-05, |
|
"loss": 0.7139, |
|
"step": 3115 |
|
}, |
|
{ |
|
"epoch": 2.905449464368887, |
|
"grad_norm": 0.48984639642406336, |
|
"learning_rate": 2.8633068691750088e-05, |
|
"loss": 0.7287, |
|
"step": 3120 |
|
}, |
|
{ |
|
"epoch": 2.9101071262226363, |
|
"grad_norm": 0.44180556724784487, |
|
"learning_rate": 2.8589920607525027e-05, |
|
"loss": 0.7391, |
|
"step": 3125 |
|
}, |
|
{ |
|
"epoch": 2.9147647880763854, |
|
"grad_norm": 0.4036112417048904, |
|
"learning_rate": 2.854677252329997e-05, |
|
"loss": 0.708, |
|
"step": 3130 |
|
}, |
|
{ |
|
"epoch": 2.919422449930135, |
|
"grad_norm": 0.4257374710769896, |
|
"learning_rate": 2.8503624439074905e-05, |
|
"loss": 0.718, |
|
"step": 3135 |
|
}, |
|
{ |
|
"epoch": 2.9240801117838844, |
|
"grad_norm": 0.4129638150505555, |
|
"learning_rate": 2.8460476354849848e-05, |
|
"loss": 0.7401, |
|
"step": 3140 |
|
}, |
|
{ |
|
"epoch": 2.928737773637634, |
|
"grad_norm": 0.45338592438503983, |
|
"learning_rate": 2.8417328270624787e-05, |
|
"loss": 0.7442, |
|
"step": 3145 |
|
}, |
|
{ |
|
"epoch": 2.9333954354913834, |
|
"grad_norm": 0.4527231664247476, |
|
"learning_rate": 2.8374180186399723e-05, |
|
"loss": 0.7401, |
|
"step": 3150 |
|
}, |
|
{ |
|
"epoch": 2.938053097345133, |
|
"grad_norm": 0.5703778818207271, |
|
"learning_rate": 2.8331032102174665e-05, |
|
"loss": 0.713, |
|
"step": 3155 |
|
}, |
|
{ |
|
"epoch": 2.9427107591988824, |
|
"grad_norm": 0.46786631290538294, |
|
"learning_rate": 2.8287884017949608e-05, |
|
"loss": 0.7337, |
|
"step": 3160 |
|
}, |
|
{ |
|
"epoch": 2.9473684210526314, |
|
"grad_norm": 0.44571913627760557, |
|
"learning_rate": 2.8244735933724543e-05, |
|
"loss": 0.7358, |
|
"step": 3165 |
|
}, |
|
{ |
|
"epoch": 2.952026082906381, |
|
"grad_norm": 0.42610446275618197, |
|
"learning_rate": 2.8201587849499482e-05, |
|
"loss": 0.7281, |
|
"step": 3170 |
|
}, |
|
{ |
|
"epoch": 2.9566837447601304, |
|
"grad_norm": 0.41660557764054273, |
|
"learning_rate": 2.8158439765274425e-05, |
|
"loss": 0.7393, |
|
"step": 3175 |
|
}, |
|
{ |
|
"epoch": 2.96134140661388, |
|
"grad_norm": 0.4224639364977561, |
|
"learning_rate": 2.811529168104936e-05, |
|
"loss": 0.7211, |
|
"step": 3180 |
|
}, |
|
{ |
|
"epoch": 2.9659990684676294, |
|
"grad_norm": 0.46634643211770355, |
|
"learning_rate": 2.8072143596824303e-05, |
|
"loss": 0.7325, |
|
"step": 3185 |
|
}, |
|
{ |
|
"epoch": 2.9706567303213784, |
|
"grad_norm": 0.4519644958202247, |
|
"learning_rate": 2.8028995512599242e-05, |
|
"loss": 0.7122, |
|
"step": 3190 |
|
}, |
|
{ |
|
"epoch": 2.975314392175128, |
|
"grad_norm": 0.4138426548649053, |
|
"learning_rate": 2.7985847428374178e-05, |
|
"loss": 0.7405, |
|
"step": 3195 |
|
}, |
|
{ |
|
"epoch": 2.9799720540288774, |
|
"grad_norm": 0.44704044906136875, |
|
"learning_rate": 2.794269934414912e-05, |
|
"loss": 0.7371, |
|
"step": 3200 |
|
}, |
|
{ |
|
"epoch": 2.984629715882627, |
|
"grad_norm": 0.4577403564712834, |
|
"learning_rate": 2.7899551259924063e-05, |
|
"loss": 0.748, |
|
"step": 3205 |
|
}, |
|
{ |
|
"epoch": 2.9892873777363764, |
|
"grad_norm": 0.44734870917193764, |
|
"learning_rate": 2.7856403175699002e-05, |
|
"loss": 0.7026, |
|
"step": 3210 |
|
}, |
|
{ |
|
"epoch": 2.993945039590126, |
|
"grad_norm": 0.44948584194107366, |
|
"learning_rate": 2.7813255091473938e-05, |
|
"loss": 0.7045, |
|
"step": 3215 |
|
}, |
|
{ |
|
"epoch": 2.9986027014438754, |
|
"grad_norm": 0.44929280385567383, |
|
"learning_rate": 2.777010700724888e-05, |
|
"loss": 0.737, |
|
"step": 3220 |
|
}, |
|
{ |
|
"epoch": 3.0027945971122496, |
|
"grad_norm": 0.5649620824470394, |
|
"learning_rate": 2.7726958923023823e-05, |
|
"loss": 0.6761, |
|
"step": 3225 |
|
}, |
|
{ |
|
"epoch": 3.007452258965999, |
|
"grad_norm": 0.4955694235403784, |
|
"learning_rate": 2.768381083879876e-05, |
|
"loss": 0.655, |
|
"step": 3230 |
|
}, |
|
{ |
|
"epoch": 3.0121099208197486, |
|
"grad_norm": 0.5520902971013796, |
|
"learning_rate": 2.7640662754573698e-05, |
|
"loss": 0.6628, |
|
"step": 3235 |
|
}, |
|
{ |
|
"epoch": 3.016767582673498, |
|
"grad_norm": 0.4274420618363629, |
|
"learning_rate": 2.759751467034864e-05, |
|
"loss": 0.6445, |
|
"step": 3240 |
|
}, |
|
{ |
|
"epoch": 3.021425244527247, |
|
"grad_norm": 0.5411731386852289, |
|
"learning_rate": 2.7554366586123576e-05, |
|
"loss": 0.6606, |
|
"step": 3245 |
|
}, |
|
{ |
|
"epoch": 3.0260829063809966, |
|
"grad_norm": 0.44698573945453757, |
|
"learning_rate": 2.751121850189852e-05, |
|
"loss": 0.6648, |
|
"step": 3250 |
|
}, |
|
{ |
|
"epoch": 3.030740568234746, |
|
"grad_norm": 0.438420990691833, |
|
"learning_rate": 2.7468070417673457e-05, |
|
"loss": 0.6663, |
|
"step": 3255 |
|
}, |
|
{ |
|
"epoch": 3.0353982300884956, |
|
"grad_norm": 0.455229642854329, |
|
"learning_rate": 2.7424922333448393e-05, |
|
"loss": 0.6683, |
|
"step": 3260 |
|
}, |
|
{ |
|
"epoch": 3.040055891942245, |
|
"grad_norm": 0.49190199547309743, |
|
"learning_rate": 2.7381774249223336e-05, |
|
"loss": 0.6731, |
|
"step": 3265 |
|
}, |
|
{ |
|
"epoch": 3.0447135537959946, |
|
"grad_norm": 0.4487013040424457, |
|
"learning_rate": 2.7338626164998278e-05, |
|
"loss": 0.6528, |
|
"step": 3270 |
|
}, |
|
{ |
|
"epoch": 3.0493712156497437, |
|
"grad_norm": 0.43437068343719426, |
|
"learning_rate": 2.7295478080773214e-05, |
|
"loss": 0.6607, |
|
"step": 3275 |
|
}, |
|
{ |
|
"epoch": 3.054028877503493, |
|
"grad_norm": 0.4582623419182831, |
|
"learning_rate": 2.7252329996548153e-05, |
|
"loss": 0.65, |
|
"step": 3280 |
|
}, |
|
{ |
|
"epoch": 3.0586865393572427, |
|
"grad_norm": 0.46784851131740257, |
|
"learning_rate": 2.7209181912323096e-05, |
|
"loss": 0.6817, |
|
"step": 3285 |
|
}, |
|
{ |
|
"epoch": 3.063344201210992, |
|
"grad_norm": 0.4618365438998452, |
|
"learning_rate": 2.7166033828098038e-05, |
|
"loss": 0.659, |
|
"step": 3290 |
|
}, |
|
{ |
|
"epoch": 3.0680018630647417, |
|
"grad_norm": 0.5051196818431409, |
|
"learning_rate": 2.7122885743872974e-05, |
|
"loss": 0.6664, |
|
"step": 3295 |
|
}, |
|
{ |
|
"epoch": 3.0726595249184907, |
|
"grad_norm": 0.5278805351509813, |
|
"learning_rate": 2.7079737659647913e-05, |
|
"loss": 0.655, |
|
"step": 3300 |
|
}, |
|
{ |
|
"epoch": 3.07731718677224, |
|
"grad_norm": 0.5238205105124322, |
|
"learning_rate": 2.7036589575422855e-05, |
|
"loss": 0.6501, |
|
"step": 3305 |
|
}, |
|
{ |
|
"epoch": 3.0819748486259897, |
|
"grad_norm": 0.5238389473428904, |
|
"learning_rate": 2.699344149119779e-05, |
|
"loss": 0.6587, |
|
"step": 3310 |
|
}, |
|
{ |
|
"epoch": 3.086632510479739, |
|
"grad_norm": 0.4956542427920923, |
|
"learning_rate": 2.6950293406972734e-05, |
|
"loss": 0.652, |
|
"step": 3315 |
|
}, |
|
{ |
|
"epoch": 3.0912901723334887, |
|
"grad_norm": 0.4323544530467572, |
|
"learning_rate": 2.6907145322747673e-05, |
|
"loss": 0.6423, |
|
"step": 3320 |
|
}, |
|
{ |
|
"epoch": 3.095947834187238, |
|
"grad_norm": 0.4501450597084916, |
|
"learning_rate": 2.686399723852261e-05, |
|
"loss": 0.6727, |
|
"step": 3325 |
|
}, |
|
{ |
|
"epoch": 3.1006054960409872, |
|
"grad_norm": 0.5342968111952526, |
|
"learning_rate": 2.682084915429755e-05, |
|
"loss": 0.6613, |
|
"step": 3330 |
|
}, |
|
{ |
|
"epoch": 3.1052631578947367, |
|
"grad_norm": 0.4952428719045549, |
|
"learning_rate": 2.677770107007249e-05, |
|
"loss": 0.6539, |
|
"step": 3335 |
|
}, |
|
{ |
|
"epoch": 3.109920819748486, |
|
"grad_norm": 0.4464148435601403, |
|
"learning_rate": 2.673455298584743e-05, |
|
"loss": 0.6496, |
|
"step": 3340 |
|
}, |
|
{ |
|
"epoch": 3.1145784816022357, |
|
"grad_norm": 0.42423733730735846, |
|
"learning_rate": 2.6691404901622368e-05, |
|
"loss": 0.662, |
|
"step": 3345 |
|
}, |
|
{ |
|
"epoch": 3.119236143455985, |
|
"grad_norm": 0.4526284930230991, |
|
"learning_rate": 2.664825681739731e-05, |
|
"loss": 0.6862, |
|
"step": 3350 |
|
}, |
|
{ |
|
"epoch": 3.1238938053097347, |
|
"grad_norm": 0.4327570355006767, |
|
"learning_rate": 2.6605108733172247e-05, |
|
"loss": 0.6616, |
|
"step": 3355 |
|
}, |
|
{ |
|
"epoch": 3.1285514671634838, |
|
"grad_norm": 0.4180247578911309, |
|
"learning_rate": 2.6561960648947186e-05, |
|
"loss": 0.6613, |
|
"step": 3360 |
|
}, |
|
{ |
|
"epoch": 3.1332091290172333, |
|
"grad_norm": 0.48989064474286764, |
|
"learning_rate": 2.6518812564722128e-05, |
|
"loss": 0.673, |
|
"step": 3365 |
|
}, |
|
{ |
|
"epoch": 3.1378667908709827, |
|
"grad_norm": 0.45013987334870337, |
|
"learning_rate": 2.647566448049707e-05, |
|
"loss": 0.6647, |
|
"step": 3370 |
|
}, |
|
{ |
|
"epoch": 3.1425244527247322, |
|
"grad_norm": 0.4306897032201952, |
|
"learning_rate": 2.6432516396272006e-05, |
|
"loss": 0.663, |
|
"step": 3375 |
|
}, |
|
{ |
|
"epoch": 3.1471821145784817, |
|
"grad_norm": 0.45218980570674416, |
|
"learning_rate": 2.6389368312046945e-05, |
|
"loss": 0.6626, |
|
"step": 3380 |
|
}, |
|
{ |
|
"epoch": 3.1518397764322312, |
|
"grad_norm": 0.4269558776822659, |
|
"learning_rate": 2.6346220227821888e-05, |
|
"loss": 0.6587, |
|
"step": 3385 |
|
}, |
|
{ |
|
"epoch": 3.1564974382859803, |
|
"grad_norm": 0.4570131615052829, |
|
"learning_rate": 2.6303072143596824e-05, |
|
"loss": 0.6681, |
|
"step": 3390 |
|
}, |
|
{ |
|
"epoch": 3.16115510013973, |
|
"grad_norm": 0.41909205537848343, |
|
"learning_rate": 2.6259924059371766e-05, |
|
"loss": 0.6512, |
|
"step": 3395 |
|
}, |
|
{ |
|
"epoch": 3.1658127619934793, |
|
"grad_norm": 0.45070558039326897, |
|
"learning_rate": 2.6216775975146705e-05, |
|
"loss": 0.6688, |
|
"step": 3400 |
|
}, |
|
{ |
|
"epoch": 3.1704704238472288, |
|
"grad_norm": 0.4418832834353741, |
|
"learning_rate": 2.617362789092164e-05, |
|
"loss": 0.6742, |
|
"step": 3405 |
|
}, |
|
{ |
|
"epoch": 3.1751280857009783, |
|
"grad_norm": 0.43144075729659026, |
|
"learning_rate": 2.6130479806696584e-05, |
|
"loss": 0.6819, |
|
"step": 3410 |
|
}, |
|
{ |
|
"epoch": 3.1797857475547273, |
|
"grad_norm": 0.4461048208336212, |
|
"learning_rate": 2.6087331722471526e-05, |
|
"loss": 0.6578, |
|
"step": 3415 |
|
}, |
|
{ |
|
"epoch": 3.184443409408477, |
|
"grad_norm": 0.495460849711094, |
|
"learning_rate": 2.6044183638246462e-05, |
|
"loss": 0.6808, |
|
"step": 3420 |
|
}, |
|
{ |
|
"epoch": 3.1891010712622263, |
|
"grad_norm": 0.5193560885811147, |
|
"learning_rate": 2.60010355540214e-05, |
|
"loss": 0.6629, |
|
"step": 3425 |
|
}, |
|
{ |
|
"epoch": 3.193758733115976, |
|
"grad_norm": 0.520924441326943, |
|
"learning_rate": 2.5957887469796343e-05, |
|
"loss": 0.6854, |
|
"step": 3430 |
|
}, |
|
{ |
|
"epoch": 3.1984163949697253, |
|
"grad_norm": 0.4557258318037281, |
|
"learning_rate": 2.591473938557128e-05, |
|
"loss": 0.6797, |
|
"step": 3435 |
|
}, |
|
{ |
|
"epoch": 3.203074056823475, |
|
"grad_norm": 0.48141994845392755, |
|
"learning_rate": 2.587159130134622e-05, |
|
"loss": 0.6728, |
|
"step": 3440 |
|
}, |
|
{ |
|
"epoch": 3.207731718677224, |
|
"grad_norm": 0.4813706001238264, |
|
"learning_rate": 2.582844321712116e-05, |
|
"loss": 0.6529, |
|
"step": 3445 |
|
}, |
|
{ |
|
"epoch": 3.2123893805309733, |
|
"grad_norm": 0.4510461852751938, |
|
"learning_rate": 2.5785295132896096e-05, |
|
"loss": 0.6669, |
|
"step": 3450 |
|
}, |
|
{ |
|
"epoch": 3.217047042384723, |
|
"grad_norm": 0.4602528355138011, |
|
"learning_rate": 2.574214704867104e-05, |
|
"loss": 0.6725, |
|
"step": 3455 |
|
}, |
|
{ |
|
"epoch": 3.2217047042384723, |
|
"grad_norm": 0.4633154565224799, |
|
"learning_rate": 2.569899896444598e-05, |
|
"loss": 0.6692, |
|
"step": 3460 |
|
}, |
|
{ |
|
"epoch": 3.226362366092222, |
|
"grad_norm": 0.47600755891130275, |
|
"learning_rate": 2.565585088022092e-05, |
|
"loss": 0.6613, |
|
"step": 3465 |
|
}, |
|
{ |
|
"epoch": 3.2310200279459713, |
|
"grad_norm": 0.4882939190293464, |
|
"learning_rate": 2.5612702795995856e-05, |
|
"loss": 0.674, |
|
"step": 3470 |
|
}, |
|
{ |
|
"epoch": 3.2356776897997204, |
|
"grad_norm": 0.4532071802883846, |
|
"learning_rate": 2.55695547117708e-05, |
|
"loss": 0.6606, |
|
"step": 3475 |
|
}, |
|
{ |
|
"epoch": 3.24033535165347, |
|
"grad_norm": 0.4296635507904013, |
|
"learning_rate": 2.552640662754574e-05, |
|
"loss": 0.67, |
|
"step": 3480 |
|
}, |
|
{ |
|
"epoch": 3.2449930135072194, |
|
"grad_norm": 0.5017255136543564, |
|
"learning_rate": 2.5483258543320677e-05, |
|
"loss": 0.6871, |
|
"step": 3485 |
|
}, |
|
{ |
|
"epoch": 3.249650675360969, |
|
"grad_norm": 0.4948112197113389, |
|
"learning_rate": 2.5440110459095616e-05, |
|
"loss": 0.693, |
|
"step": 3490 |
|
}, |
|
{ |
|
"epoch": 3.2543083372147183, |
|
"grad_norm": 0.49247917585392487, |
|
"learning_rate": 2.539696237487056e-05, |
|
"loss": 0.6876, |
|
"step": 3495 |
|
}, |
|
{ |
|
"epoch": 3.258965999068468, |
|
"grad_norm": 0.5347369741382454, |
|
"learning_rate": 2.5353814290645494e-05, |
|
"loss": 0.6849, |
|
"step": 3500 |
|
}, |
|
{ |
|
"epoch": 3.263623660922217, |
|
"grad_norm": 0.495732345471852, |
|
"learning_rate": 2.5310666206420437e-05, |
|
"loss": 0.6908, |
|
"step": 3505 |
|
}, |
|
{ |
|
"epoch": 3.2682813227759664, |
|
"grad_norm": 0.45392823221334166, |
|
"learning_rate": 2.5267518122195376e-05, |
|
"loss": 0.6901, |
|
"step": 3510 |
|
}, |
|
{ |
|
"epoch": 3.272938984629716, |
|
"grad_norm": 0.44957677688338854, |
|
"learning_rate": 2.5224370037970312e-05, |
|
"loss": 0.6776, |
|
"step": 3515 |
|
}, |
|
{ |
|
"epoch": 3.2775966464834654, |
|
"grad_norm": 0.448342787281423, |
|
"learning_rate": 2.5181221953745254e-05, |
|
"loss": 0.6751, |
|
"step": 3520 |
|
}, |
|
{ |
|
"epoch": 3.282254308337215, |
|
"grad_norm": 0.4753362927286903, |
|
"learning_rate": 2.5138073869520197e-05, |
|
"loss": 0.7008, |
|
"step": 3525 |
|
}, |
|
{ |
|
"epoch": 3.286911970190964, |
|
"grad_norm": 0.4303570706933123, |
|
"learning_rate": 2.5094925785295132e-05, |
|
"loss": 0.6564, |
|
"step": 3530 |
|
}, |
|
{ |
|
"epoch": 3.2915696320447134, |
|
"grad_norm": 0.4566321018828311, |
|
"learning_rate": 2.505177770107007e-05, |
|
"loss": 0.6548, |
|
"step": 3535 |
|
}, |
|
{ |
|
"epoch": 3.296227293898463, |
|
"grad_norm": 0.5013218048356842, |
|
"learning_rate": 2.5008629616845014e-05, |
|
"loss": 0.6602, |
|
"step": 3540 |
|
}, |
|
{ |
|
"epoch": 3.3008849557522124, |
|
"grad_norm": 0.48919607850045277, |
|
"learning_rate": 2.4965481532619953e-05, |
|
"loss": 0.6775, |
|
"step": 3545 |
|
}, |
|
{ |
|
"epoch": 3.305542617605962, |
|
"grad_norm": 0.48623977714687, |
|
"learning_rate": 2.4922333448394892e-05, |
|
"loss": 0.6658, |
|
"step": 3550 |
|
}, |
|
{ |
|
"epoch": 3.3102002794597114, |
|
"grad_norm": 0.49148464778598894, |
|
"learning_rate": 2.487918536416983e-05, |
|
"loss": 0.6521, |
|
"step": 3555 |
|
}, |
|
{ |
|
"epoch": 3.3148579413134605, |
|
"grad_norm": 0.4893622152875232, |
|
"learning_rate": 2.483603727994477e-05, |
|
"loss": 0.6684, |
|
"step": 3560 |
|
}, |
|
{ |
|
"epoch": 3.31951560316721, |
|
"grad_norm": 0.4536686442042023, |
|
"learning_rate": 2.4792889195719713e-05, |
|
"loss": 0.6689, |
|
"step": 3565 |
|
}, |
|
{ |
|
"epoch": 3.3241732650209594, |
|
"grad_norm": 0.4736127984225089, |
|
"learning_rate": 2.4749741111494652e-05, |
|
"loss": 0.6622, |
|
"step": 3570 |
|
}, |
|
{ |
|
"epoch": 3.328830926874709, |
|
"grad_norm": 0.46628026872899403, |
|
"learning_rate": 2.4706593027269588e-05, |
|
"loss": 0.6813, |
|
"step": 3575 |
|
}, |
|
{ |
|
"epoch": 3.3334885887284584, |
|
"grad_norm": 0.4999633681973708, |
|
"learning_rate": 2.466344494304453e-05, |
|
"loss": 0.6731, |
|
"step": 3580 |
|
}, |
|
{ |
|
"epoch": 3.3381462505822075, |
|
"grad_norm": 0.5641043771334887, |
|
"learning_rate": 2.462029685881947e-05, |
|
"loss": 0.6767, |
|
"step": 3585 |
|
}, |
|
{ |
|
"epoch": 3.342803912435957, |
|
"grad_norm": 0.45714731895877286, |
|
"learning_rate": 2.457714877459441e-05, |
|
"loss": 0.6643, |
|
"step": 3590 |
|
}, |
|
{ |
|
"epoch": 3.3474615742897065, |
|
"grad_norm": 0.4334973778339274, |
|
"learning_rate": 2.4534000690369348e-05, |
|
"loss": 0.6614, |
|
"step": 3595 |
|
}, |
|
{ |
|
"epoch": 3.352119236143456, |
|
"grad_norm": 0.4886669385610417, |
|
"learning_rate": 2.4490852606144287e-05, |
|
"loss": 0.664, |
|
"step": 3600 |
|
}, |
|
{ |
|
"epoch": 3.3567768979972055, |
|
"grad_norm": 0.45151461746127364, |
|
"learning_rate": 2.444770452191923e-05, |
|
"loss": 0.6492, |
|
"step": 3605 |
|
}, |
|
{ |
|
"epoch": 3.361434559850955, |
|
"grad_norm": 0.4532080460532112, |
|
"learning_rate": 2.440455643769417e-05, |
|
"loss": 0.6684, |
|
"step": 3610 |
|
}, |
|
{ |
|
"epoch": 3.3660922217047045, |
|
"grad_norm": 0.4607883027240199, |
|
"learning_rate": 2.4361408353469108e-05, |
|
"loss": 0.6627, |
|
"step": 3615 |
|
}, |
|
{ |
|
"epoch": 3.3707498835584535, |
|
"grad_norm": 0.5184420407083233, |
|
"learning_rate": 2.4318260269244047e-05, |
|
"loss": 0.6633, |
|
"step": 3620 |
|
}, |
|
{ |
|
"epoch": 3.375407545412203, |
|
"grad_norm": 0.42981832962189226, |
|
"learning_rate": 2.4275112185018986e-05, |
|
"loss": 0.6794, |
|
"step": 3625 |
|
}, |
|
{ |
|
"epoch": 3.3800652072659525, |
|
"grad_norm": 0.4696743898481451, |
|
"learning_rate": 2.4231964100793925e-05, |
|
"loss": 0.6665, |
|
"step": 3630 |
|
}, |
|
{ |
|
"epoch": 3.384722869119702, |
|
"grad_norm": 0.4471999488266161, |
|
"learning_rate": 2.4188816016568867e-05, |
|
"loss": 0.6842, |
|
"step": 3635 |
|
}, |
|
{ |
|
"epoch": 3.3893805309734515, |
|
"grad_norm": 0.42037921982570414, |
|
"learning_rate": 2.4145667932343803e-05, |
|
"loss": 0.6449, |
|
"step": 3640 |
|
}, |
|
{ |
|
"epoch": 3.3940381928272005, |
|
"grad_norm": 0.452619450283994, |
|
"learning_rate": 2.4102519848118746e-05, |
|
"loss": 0.6728, |
|
"step": 3645 |
|
}, |
|
{ |
|
"epoch": 3.39869585468095, |
|
"grad_norm": 0.42839450229764453, |
|
"learning_rate": 2.4059371763893685e-05, |
|
"loss": 0.6549, |
|
"step": 3650 |
|
}, |
|
{ |
|
"epoch": 3.4033535165346995, |
|
"grad_norm": 0.4358545473409289, |
|
"learning_rate": 2.4016223679668624e-05, |
|
"loss": 0.6831, |
|
"step": 3655 |
|
}, |
|
{ |
|
"epoch": 3.408011178388449, |
|
"grad_norm": 0.44463467097834486, |
|
"learning_rate": 2.3973075595443563e-05, |
|
"loss": 0.6588, |
|
"step": 3660 |
|
}, |
|
{ |
|
"epoch": 3.4126688402421985, |
|
"grad_norm": 0.44918393535421824, |
|
"learning_rate": 2.3929927511218502e-05, |
|
"loss": 0.6592, |
|
"step": 3665 |
|
}, |
|
{ |
|
"epoch": 3.417326502095948, |
|
"grad_norm": 0.45572870889698214, |
|
"learning_rate": 2.388677942699344e-05, |
|
"loss": 0.6847, |
|
"step": 3670 |
|
}, |
|
{ |
|
"epoch": 3.421984163949697, |
|
"grad_norm": 0.5312757889979894, |
|
"learning_rate": 2.3843631342768384e-05, |
|
"loss": 0.6639, |
|
"step": 3675 |
|
}, |
|
{ |
|
"epoch": 3.4266418258034466, |
|
"grad_norm": 0.5266603067472917, |
|
"learning_rate": 2.3800483258543323e-05, |
|
"loss": 0.6482, |
|
"step": 3680 |
|
}, |
|
{ |
|
"epoch": 3.431299487657196, |
|
"grad_norm": 0.4584084291790257, |
|
"learning_rate": 2.3757335174318262e-05, |
|
"loss": 0.6757, |
|
"step": 3685 |
|
}, |
|
{ |
|
"epoch": 3.4359571495109456, |
|
"grad_norm": 0.4304692509027962, |
|
"learning_rate": 2.37141870900932e-05, |
|
"loss": 0.6596, |
|
"step": 3690 |
|
}, |
|
{ |
|
"epoch": 3.440614811364695, |
|
"grad_norm": 0.4655682089191918, |
|
"learning_rate": 2.367103900586814e-05, |
|
"loss": 0.6664, |
|
"step": 3695 |
|
}, |
|
{ |
|
"epoch": 3.445272473218444, |
|
"grad_norm": 0.4473286471345806, |
|
"learning_rate": 2.3627890921643083e-05, |
|
"loss": 0.6814, |
|
"step": 3700 |
|
}, |
|
{ |
|
"epoch": 3.4499301350721936, |
|
"grad_norm": 0.4342461882548441, |
|
"learning_rate": 2.358474283741802e-05, |
|
"loss": 0.6859, |
|
"step": 3705 |
|
}, |
|
{ |
|
"epoch": 3.454587796925943, |
|
"grad_norm": 0.4634466813676635, |
|
"learning_rate": 2.3541594753192957e-05, |
|
"loss": 0.675, |
|
"step": 3710 |
|
}, |
|
{ |
|
"epoch": 3.4592454587796926, |
|
"grad_norm": 0.463462899087277, |
|
"learning_rate": 2.34984466689679e-05, |
|
"loss": 0.6581, |
|
"step": 3715 |
|
}, |
|
{ |
|
"epoch": 3.463903120633442, |
|
"grad_norm": 0.4718492791389515, |
|
"learning_rate": 2.345529858474284e-05, |
|
"loss": 0.6633, |
|
"step": 3720 |
|
}, |
|
{ |
|
"epoch": 3.4685607824871916, |
|
"grad_norm": 0.46408016382949474, |
|
"learning_rate": 2.3412150500517778e-05, |
|
"loss": 0.6803, |
|
"step": 3725 |
|
}, |
|
{ |
|
"epoch": 3.473218444340941, |
|
"grad_norm": 0.4515706039077529, |
|
"learning_rate": 2.3369002416292717e-05, |
|
"loss": 0.672, |
|
"step": 3730 |
|
}, |
|
{ |
|
"epoch": 3.47787610619469, |
|
"grad_norm": 0.4546481906424658, |
|
"learning_rate": 2.3325854332067656e-05, |
|
"loss": 0.6788, |
|
"step": 3735 |
|
}, |
|
{ |
|
"epoch": 3.4825337680484396, |
|
"grad_norm": 0.45260779635281945, |
|
"learning_rate": 2.32827062478426e-05, |
|
"loss": 0.6621, |
|
"step": 3740 |
|
}, |
|
{ |
|
"epoch": 3.487191429902189, |
|
"grad_norm": 0.4465723090136659, |
|
"learning_rate": 2.3239558163617538e-05, |
|
"loss": 0.6711, |
|
"step": 3745 |
|
}, |
|
{ |
|
"epoch": 3.4918490917559386, |
|
"grad_norm": 0.4503456117275582, |
|
"learning_rate": 2.3196410079392474e-05, |
|
"loss": 0.6787, |
|
"step": 3750 |
|
}, |
|
{ |
|
"epoch": 3.496506753609688, |
|
"grad_norm": 0.4488480782408311, |
|
"learning_rate": 2.3153261995167416e-05, |
|
"loss": 0.6715, |
|
"step": 3755 |
|
}, |
|
{ |
|
"epoch": 3.501164415463437, |
|
"grad_norm": 0.4807688539808926, |
|
"learning_rate": 2.3110113910942355e-05, |
|
"loss": 0.6691, |
|
"step": 3760 |
|
}, |
|
{ |
|
"epoch": 3.5058220773171866, |
|
"grad_norm": 0.5167283934755129, |
|
"learning_rate": 2.3066965826717294e-05, |
|
"loss": 0.6556, |
|
"step": 3765 |
|
}, |
|
{ |
|
"epoch": 3.510479739170936, |
|
"grad_norm": 0.5540283763976265, |
|
"learning_rate": 2.3023817742492234e-05, |
|
"loss": 0.6796, |
|
"step": 3770 |
|
}, |
|
{ |
|
"epoch": 3.5151374010246856, |
|
"grad_norm": 0.42946681800570363, |
|
"learning_rate": 2.2980669658267173e-05, |
|
"loss": 0.659, |
|
"step": 3775 |
|
}, |
|
{ |
|
"epoch": 3.519795062878435, |
|
"grad_norm": 0.42051099266667324, |
|
"learning_rate": 2.2937521574042115e-05, |
|
"loss": 0.6409, |
|
"step": 3780 |
|
}, |
|
{ |
|
"epoch": 3.5244527247321846, |
|
"grad_norm": 22.372062876041408, |
|
"learning_rate": 2.2894373489817054e-05, |
|
"loss": 0.8085, |
|
"step": 3785 |
|
}, |
|
{ |
|
"epoch": 3.529110386585934, |
|
"grad_norm": 0.4376085550930596, |
|
"learning_rate": 2.285122540559199e-05, |
|
"loss": 0.6663, |
|
"step": 3790 |
|
}, |
|
{ |
|
"epoch": 3.533768048439683, |
|
"grad_norm": 0.4721290842606934, |
|
"learning_rate": 2.2808077321366933e-05, |
|
"loss": 0.6597, |
|
"step": 3795 |
|
}, |
|
{ |
|
"epoch": 3.5384257102934327, |
|
"grad_norm": 0.4524333273382029, |
|
"learning_rate": 2.276492923714187e-05, |
|
"loss": 0.6813, |
|
"step": 3800 |
|
}, |
|
{ |
|
"epoch": 3.543083372147182, |
|
"grad_norm": 0.4683136917340155, |
|
"learning_rate": 2.2721781152916814e-05, |
|
"loss": 0.6913, |
|
"step": 3805 |
|
}, |
|
{ |
|
"epoch": 3.5477410340009317, |
|
"grad_norm": 0.44767173362613494, |
|
"learning_rate": 2.267863306869175e-05, |
|
"loss": 0.6795, |
|
"step": 3810 |
|
}, |
|
{ |
|
"epoch": 3.5523986958546807, |
|
"grad_norm": 0.42108561845735404, |
|
"learning_rate": 2.263548498446669e-05, |
|
"loss": 0.678, |
|
"step": 3815 |
|
}, |
|
{ |
|
"epoch": 3.55705635770843, |
|
"grad_norm": 0.42298340047101063, |
|
"learning_rate": 2.259233690024163e-05, |
|
"loss": 0.6593, |
|
"step": 3820 |
|
}, |
|
{ |
|
"epoch": 3.5617140195621797, |
|
"grad_norm": 0.4416808046947552, |
|
"learning_rate": 2.254918881601657e-05, |
|
"loss": 0.6868, |
|
"step": 3825 |
|
}, |
|
{ |
|
"epoch": 3.566371681415929, |
|
"grad_norm": 0.4488088247207295, |
|
"learning_rate": 2.250604073179151e-05, |
|
"loss": 0.6741, |
|
"step": 3830 |
|
}, |
|
{ |
|
"epoch": 3.5710293432696787, |
|
"grad_norm": 0.4212039178070685, |
|
"learning_rate": 2.246289264756645e-05, |
|
"loss": 0.6811, |
|
"step": 3835 |
|
}, |
|
{ |
|
"epoch": 3.575687005123428, |
|
"grad_norm": 0.44260327570897895, |
|
"learning_rate": 2.2419744563341388e-05, |
|
"loss": 0.6686, |
|
"step": 3840 |
|
}, |
|
{ |
|
"epoch": 3.5803446669771777, |
|
"grad_norm": 0.4438396142882864, |
|
"learning_rate": 2.237659647911633e-05, |
|
"loss": 0.659, |
|
"step": 3845 |
|
}, |
|
{ |
|
"epoch": 3.5850023288309267, |
|
"grad_norm": 0.44951766788920505, |
|
"learning_rate": 2.233344839489127e-05, |
|
"loss": 0.6702, |
|
"step": 3850 |
|
}, |
|
{ |
|
"epoch": 3.5896599906846762, |
|
"grad_norm": 0.4313373929106834, |
|
"learning_rate": 2.2290300310666205e-05, |
|
"loss": 0.6851, |
|
"step": 3855 |
|
}, |
|
{ |
|
"epoch": 3.5943176525384257, |
|
"grad_norm": 0.4271227216658829, |
|
"learning_rate": 2.2247152226441148e-05, |
|
"loss": 0.6717, |
|
"step": 3860 |
|
}, |
|
{ |
|
"epoch": 3.598975314392175, |
|
"grad_norm": 0.4648664337839032, |
|
"learning_rate": 2.2204004142216087e-05, |
|
"loss": 0.6823, |
|
"step": 3865 |
|
}, |
|
{ |
|
"epoch": 3.6036329762459243, |
|
"grad_norm": 0.4473468080782318, |
|
"learning_rate": 2.2160856057991026e-05, |
|
"loss": 0.6735, |
|
"step": 3870 |
|
}, |
|
{ |
|
"epoch": 3.6082906380996738, |
|
"grad_norm": 0.43996342525349236, |
|
"learning_rate": 2.2117707973765965e-05, |
|
"loss": 0.6734, |
|
"step": 3875 |
|
}, |
|
{ |
|
"epoch": 3.6129482999534233, |
|
"grad_norm": 0.4291942243921852, |
|
"learning_rate": 2.2074559889540904e-05, |
|
"loss": 0.6789, |
|
"step": 3880 |
|
}, |
|
{ |
|
"epoch": 3.6176059618071728, |
|
"grad_norm": 0.4765745278898558, |
|
"learning_rate": 2.2031411805315847e-05, |
|
"loss": 0.668, |
|
"step": 3885 |
|
}, |
|
{ |
|
"epoch": 3.6222636236609222, |
|
"grad_norm": 0.41815398973713275, |
|
"learning_rate": 2.1988263721090786e-05, |
|
"loss": 0.6682, |
|
"step": 3890 |
|
}, |
|
{ |
|
"epoch": 3.6269212855146717, |
|
"grad_norm": 0.43719379830239535, |
|
"learning_rate": 2.1945115636865725e-05, |
|
"loss": 0.6753, |
|
"step": 3895 |
|
}, |
|
{ |
|
"epoch": 3.6315789473684212, |
|
"grad_norm": 0.4006880656840204, |
|
"learning_rate": 2.1901967552640664e-05, |
|
"loss": 0.6505, |
|
"step": 3900 |
|
}, |
|
{ |
|
"epoch": 3.6362366092221707, |
|
"grad_norm": 0.4121265038250903, |
|
"learning_rate": 2.1858819468415603e-05, |
|
"loss": 0.6857, |
|
"step": 3905 |
|
}, |
|
{ |
|
"epoch": 3.64089427107592, |
|
"grad_norm": 0.4326970184912615, |
|
"learning_rate": 2.1815671384190542e-05, |
|
"loss": 0.6683, |
|
"step": 3910 |
|
}, |
|
{ |
|
"epoch": 3.6455519329296693, |
|
"grad_norm": 0.4201491695924619, |
|
"learning_rate": 2.1772523299965485e-05, |
|
"loss": 0.6771, |
|
"step": 3915 |
|
}, |
|
{ |
|
"epoch": 3.6502095947834188, |
|
"grad_norm": 0.43258507023687753, |
|
"learning_rate": 2.172937521574042e-05, |
|
"loss": 0.6584, |
|
"step": 3920 |
|
}, |
|
{ |
|
"epoch": 3.6548672566371683, |
|
"grad_norm": 0.4585906560215873, |
|
"learning_rate": 2.1686227131515363e-05, |
|
"loss": 0.6373, |
|
"step": 3925 |
|
}, |
|
{ |
|
"epoch": 3.6595249184909173, |
|
"grad_norm": 0.43713677266751094, |
|
"learning_rate": 2.1643079047290302e-05, |
|
"loss": 0.6701, |
|
"step": 3930 |
|
}, |
|
{ |
|
"epoch": 3.664182580344667, |
|
"grad_norm": 0.452764864256112, |
|
"learning_rate": 2.159993096306524e-05, |
|
"loss": 0.6669, |
|
"step": 3935 |
|
}, |
|
{ |
|
"epoch": 3.6688402421984163, |
|
"grad_norm": 0.41850536874968525, |
|
"learning_rate": 2.155678287884018e-05, |
|
"loss": 0.6703, |
|
"step": 3940 |
|
}, |
|
{ |
|
"epoch": 3.673497904052166, |
|
"grad_norm": 0.4785367330909523, |
|
"learning_rate": 2.151363479461512e-05, |
|
"loss": 0.6653, |
|
"step": 3945 |
|
}, |
|
{ |
|
"epoch": 3.6781555659059153, |
|
"grad_norm": 0.42672653887627465, |
|
"learning_rate": 2.147048671039006e-05, |
|
"loss": 0.6523, |
|
"step": 3950 |
|
}, |
|
{ |
|
"epoch": 3.682813227759665, |
|
"grad_norm": 0.38619011006304327, |
|
"learning_rate": 2.1427338626165e-05, |
|
"loss": 0.6766, |
|
"step": 3955 |
|
}, |
|
{ |
|
"epoch": 3.6874708896134143, |
|
"grad_norm": 0.48329034915930996, |
|
"learning_rate": 2.1384190541939937e-05, |
|
"loss": 0.6794, |
|
"step": 3960 |
|
}, |
|
{ |
|
"epoch": 3.6921285514671633, |
|
"grad_norm": 0.41716760703725536, |
|
"learning_rate": 2.134104245771488e-05, |
|
"loss": 0.6674, |
|
"step": 3965 |
|
}, |
|
{ |
|
"epoch": 3.696786213320913, |
|
"grad_norm": 0.46039645375421206, |
|
"learning_rate": 2.129789437348982e-05, |
|
"loss": 0.6829, |
|
"step": 3970 |
|
}, |
|
{ |
|
"epoch": 3.7014438751746623, |
|
"grad_norm": 0.41626180642112476, |
|
"learning_rate": 2.1254746289264758e-05, |
|
"loss": 0.648, |
|
"step": 3975 |
|
}, |
|
{ |
|
"epoch": 3.706101537028412, |
|
"grad_norm": 0.447684709770321, |
|
"learning_rate": 2.1211598205039697e-05, |
|
"loss": 0.6604, |
|
"step": 3980 |
|
}, |
|
{ |
|
"epoch": 3.710759198882161, |
|
"grad_norm": 0.41527686884250226, |
|
"learning_rate": 2.1168450120814636e-05, |
|
"loss": 0.6944, |
|
"step": 3985 |
|
}, |
|
{ |
|
"epoch": 3.7154168607359104, |
|
"grad_norm": 0.4673707506990223, |
|
"learning_rate": 2.1125302036589575e-05, |
|
"loss": 0.6715, |
|
"step": 3990 |
|
}, |
|
{ |
|
"epoch": 3.72007452258966, |
|
"grad_norm": 0.4259681640946891, |
|
"learning_rate": 2.1082153952364517e-05, |
|
"loss": 0.6609, |
|
"step": 3995 |
|
}, |
|
{ |
|
"epoch": 3.7247321844434094, |
|
"grad_norm": 0.44578301736933307, |
|
"learning_rate": 2.1039005868139457e-05, |
|
"loss": 0.6669, |
|
"step": 4000 |
|
}, |
|
{ |
|
"epoch": 3.729389846297159, |
|
"grad_norm": 0.4201611550934968, |
|
"learning_rate": 2.0995857783914392e-05, |
|
"loss": 0.6676, |
|
"step": 4005 |
|
}, |
|
{ |
|
"epoch": 3.7340475081509084, |
|
"grad_norm": 0.4451294722121708, |
|
"learning_rate": 2.0952709699689335e-05, |
|
"loss": 0.6807, |
|
"step": 4010 |
|
}, |
|
{ |
|
"epoch": 3.738705170004658, |
|
"grad_norm": 0.39293811743802215, |
|
"learning_rate": 2.0909561615464274e-05, |
|
"loss": 0.6537, |
|
"step": 4015 |
|
}, |
|
{ |
|
"epoch": 3.7433628318584073, |
|
"grad_norm": 0.425727834151418, |
|
"learning_rate": 2.0866413531239216e-05, |
|
"loss": 0.6812, |
|
"step": 4020 |
|
}, |
|
{ |
|
"epoch": 3.7480204937121564, |
|
"grad_norm": 0.43571368169120256, |
|
"learning_rate": 2.0823265447014152e-05, |
|
"loss": 0.6749, |
|
"step": 4025 |
|
}, |
|
{ |
|
"epoch": 3.752678155565906, |
|
"grad_norm": 0.40642489831038664, |
|
"learning_rate": 2.078011736278909e-05, |
|
"loss": 0.6701, |
|
"step": 4030 |
|
}, |
|
{ |
|
"epoch": 3.7573358174196554, |
|
"grad_norm": 0.46216496870944207, |
|
"learning_rate": 2.0736969278564034e-05, |
|
"loss": 0.6635, |
|
"step": 4035 |
|
}, |
|
{ |
|
"epoch": 3.761993479273405, |
|
"grad_norm": 0.4365114089567727, |
|
"learning_rate": 2.0693821194338973e-05, |
|
"loss": 0.6608, |
|
"step": 4040 |
|
}, |
|
{ |
|
"epoch": 3.766651141127154, |
|
"grad_norm": 0.43741874072599796, |
|
"learning_rate": 2.0650673110113912e-05, |
|
"loss": 0.6885, |
|
"step": 4045 |
|
}, |
|
{ |
|
"epoch": 3.7713088029809034, |
|
"grad_norm": 0.42096705297972964, |
|
"learning_rate": 2.060752502588885e-05, |
|
"loss": 0.6802, |
|
"step": 4050 |
|
}, |
|
{ |
|
"epoch": 3.775966464834653, |
|
"grad_norm": 0.49406108041678903, |
|
"learning_rate": 2.056437694166379e-05, |
|
"loss": 0.6843, |
|
"step": 4055 |
|
}, |
|
{ |
|
"epoch": 3.7806241266884024, |
|
"grad_norm": 0.42050323757733654, |
|
"learning_rate": 2.0521228857438733e-05, |
|
"loss": 0.6659, |
|
"step": 4060 |
|
}, |
|
{ |
|
"epoch": 3.785281788542152, |
|
"grad_norm": 0.43875536709277346, |
|
"learning_rate": 2.0478080773213672e-05, |
|
"loss": 0.6593, |
|
"step": 4065 |
|
}, |
|
{ |
|
"epoch": 3.7899394503959014, |
|
"grad_norm": 0.4551758764550099, |
|
"learning_rate": 2.0434932688988608e-05, |
|
"loss": 0.6676, |
|
"step": 4070 |
|
}, |
|
{ |
|
"epoch": 3.794597112249651, |
|
"grad_norm": 0.43875906331559933, |
|
"learning_rate": 2.039178460476355e-05, |
|
"loss": 0.6633, |
|
"step": 4075 |
|
}, |
|
{ |
|
"epoch": 3.7992547741034, |
|
"grad_norm": 0.49522270487134434, |
|
"learning_rate": 2.034863652053849e-05, |
|
"loss": 0.6723, |
|
"step": 4080 |
|
}, |
|
{ |
|
"epoch": 3.8039124359571495, |
|
"grad_norm": 0.4255138431861227, |
|
"learning_rate": 2.0305488436313428e-05, |
|
"loss": 0.6707, |
|
"step": 4085 |
|
}, |
|
{ |
|
"epoch": 3.808570097810899, |
|
"grad_norm": 0.4249333456739798, |
|
"learning_rate": 2.0262340352088367e-05, |
|
"loss": 0.6672, |
|
"step": 4090 |
|
}, |
|
{ |
|
"epoch": 3.8132277596646484, |
|
"grad_norm": 0.4222583096651918, |
|
"learning_rate": 2.0219192267863306e-05, |
|
"loss": 0.6578, |
|
"step": 4095 |
|
}, |
|
{ |
|
"epoch": 3.8178854215183975, |
|
"grad_norm": 0.46110284243808286, |
|
"learning_rate": 2.017604418363825e-05, |
|
"loss": 0.665, |
|
"step": 4100 |
|
}, |
|
{ |
|
"epoch": 3.822543083372147, |
|
"grad_norm": 0.439490642784066, |
|
"learning_rate": 2.0132896099413188e-05, |
|
"loss": 0.6601, |
|
"step": 4105 |
|
}, |
|
{ |
|
"epoch": 3.8272007452258965, |
|
"grad_norm": 0.44224347985960144, |
|
"learning_rate": 2.0089748015188127e-05, |
|
"loss": 0.6647, |
|
"step": 4110 |
|
}, |
|
{ |
|
"epoch": 3.831858407079646, |
|
"grad_norm": 0.43734391875827633, |
|
"learning_rate": 2.0046599930963066e-05, |
|
"loss": 0.6789, |
|
"step": 4115 |
|
}, |
|
{ |
|
"epoch": 3.8365160689333955, |
|
"grad_norm": 0.44541776084227985, |
|
"learning_rate": 2.0003451846738005e-05, |
|
"loss": 0.6567, |
|
"step": 4120 |
|
}, |
|
{ |
|
"epoch": 3.841173730787145, |
|
"grad_norm": 0.4351663828609302, |
|
"learning_rate": 1.9960303762512945e-05, |
|
"loss": 0.6774, |
|
"step": 4125 |
|
}, |
|
{ |
|
"epoch": 3.8458313926408945, |
|
"grad_norm": 0.46212604143724856, |
|
"learning_rate": 1.9917155678287887e-05, |
|
"loss": 0.6816, |
|
"step": 4130 |
|
}, |
|
{ |
|
"epoch": 3.850489054494644, |
|
"grad_norm": 0.4827514548611152, |
|
"learning_rate": 1.9874007594062823e-05, |
|
"loss": 0.6771, |
|
"step": 4135 |
|
}, |
|
{ |
|
"epoch": 3.855146716348393, |
|
"grad_norm": 0.466359350965105, |
|
"learning_rate": 1.9830859509837765e-05, |
|
"loss": 0.6727, |
|
"step": 4140 |
|
}, |
|
{ |
|
"epoch": 3.8598043782021425, |
|
"grad_norm": 0.4288376838447238, |
|
"learning_rate": 1.9787711425612704e-05, |
|
"loss": 0.6782, |
|
"step": 4145 |
|
}, |
|
{ |
|
"epoch": 3.864462040055892, |
|
"grad_norm": 0.4221479927446133, |
|
"learning_rate": 1.9744563341387643e-05, |
|
"loss": 0.6582, |
|
"step": 4150 |
|
}, |
|
{ |
|
"epoch": 3.8691197019096415, |
|
"grad_norm": 0.42808299688301105, |
|
"learning_rate": 1.9701415257162583e-05, |
|
"loss": 0.6633, |
|
"step": 4155 |
|
}, |
|
{ |
|
"epoch": 3.8737773637633905, |
|
"grad_norm": 0.4385868446482319, |
|
"learning_rate": 1.9658267172937522e-05, |
|
"loss": 0.6957, |
|
"step": 4160 |
|
}, |
|
{ |
|
"epoch": 3.87843502561714, |
|
"grad_norm": 0.46664060467404894, |
|
"learning_rate": 1.961511908871246e-05, |
|
"loss": 0.6815, |
|
"step": 4165 |
|
}, |
|
{ |
|
"epoch": 3.8830926874708895, |
|
"grad_norm": 0.4415348584856722, |
|
"learning_rate": 1.9571971004487403e-05, |
|
"loss": 0.6647, |
|
"step": 4170 |
|
}, |
|
{ |
|
"epoch": 3.887750349324639, |
|
"grad_norm": 0.449942040491185, |
|
"learning_rate": 1.952882292026234e-05, |
|
"loss": 0.68, |
|
"step": 4175 |
|
}, |
|
{ |
|
"epoch": 3.8924080111783885, |
|
"grad_norm": 0.4414098811625752, |
|
"learning_rate": 1.948567483603728e-05, |
|
"loss": 0.671, |
|
"step": 4180 |
|
}, |
|
{ |
|
"epoch": 3.897065673032138, |
|
"grad_norm": 0.42891467747530376, |
|
"learning_rate": 1.944252675181222e-05, |
|
"loss": 0.6732, |
|
"step": 4185 |
|
}, |
|
{ |
|
"epoch": 3.9017233348858875, |
|
"grad_norm": 0.41584348048954695, |
|
"learning_rate": 1.939937866758716e-05, |
|
"loss": 0.6689, |
|
"step": 4190 |
|
}, |
|
{ |
|
"epoch": 3.9063809967396366, |
|
"grad_norm": 0.43294634671450094, |
|
"learning_rate": 1.93562305833621e-05, |
|
"loss": 0.6562, |
|
"step": 4195 |
|
}, |
|
{ |
|
"epoch": 3.911038658593386, |
|
"grad_norm": 0.43238591716188074, |
|
"learning_rate": 1.9313082499137038e-05, |
|
"loss": 0.6619, |
|
"step": 4200 |
|
}, |
|
{ |
|
"epoch": 3.9156963204471356, |
|
"grad_norm": 0.417950730256557, |
|
"learning_rate": 1.9269934414911977e-05, |
|
"loss": 0.6711, |
|
"step": 4205 |
|
}, |
|
{ |
|
"epoch": 3.920353982300885, |
|
"grad_norm": 0.40509888786084697, |
|
"learning_rate": 1.922678633068692e-05, |
|
"loss": 0.6623, |
|
"step": 4210 |
|
}, |
|
{ |
|
"epoch": 3.925011644154634, |
|
"grad_norm": 0.4243068617748856, |
|
"learning_rate": 1.918363824646186e-05, |
|
"loss": 0.6583, |
|
"step": 4215 |
|
}, |
|
{ |
|
"epoch": 3.9296693060083836, |
|
"grad_norm": 0.41822911394479817, |
|
"learning_rate": 1.9140490162236798e-05, |
|
"loss": 0.6645, |
|
"step": 4220 |
|
}, |
|
{ |
|
"epoch": 3.934326967862133, |
|
"grad_norm": 0.4377285415892218, |
|
"learning_rate": 1.9097342078011737e-05, |
|
"loss": 0.6603, |
|
"step": 4225 |
|
}, |
|
{ |
|
"epoch": 3.9389846297158826, |
|
"grad_norm": 0.4141650537612767, |
|
"learning_rate": 1.9054193993786676e-05, |
|
"loss": 0.6673, |
|
"step": 4230 |
|
}, |
|
{ |
|
"epoch": 3.943642291569632, |
|
"grad_norm": 0.4382605865395, |
|
"learning_rate": 1.901104590956162e-05, |
|
"loss": 0.6643, |
|
"step": 4235 |
|
}, |
|
{ |
|
"epoch": 3.9482999534233816, |
|
"grad_norm": 0.4202680156252619, |
|
"learning_rate": 1.8967897825336554e-05, |
|
"loss": 0.6587, |
|
"step": 4240 |
|
}, |
|
{ |
|
"epoch": 3.952957615277131, |
|
"grad_norm": 0.40387812732388845, |
|
"learning_rate": 1.8924749741111493e-05, |
|
"loss": 0.6601, |
|
"step": 4245 |
|
}, |
|
{ |
|
"epoch": 3.9576152771308806, |
|
"grad_norm": 0.4156926899477138, |
|
"learning_rate": 1.8881601656886436e-05, |
|
"loss": 0.6676, |
|
"step": 4250 |
|
}, |
|
{ |
|
"epoch": 3.9622729389846296, |
|
"grad_norm": 0.42946870450766167, |
|
"learning_rate": 1.8838453572661375e-05, |
|
"loss": 0.6683, |
|
"step": 4255 |
|
}, |
|
{ |
|
"epoch": 3.966930600838379, |
|
"grad_norm": 0.43212939224797026, |
|
"learning_rate": 1.8795305488436314e-05, |
|
"loss": 0.6792, |
|
"step": 4260 |
|
}, |
|
{ |
|
"epoch": 3.9715882626921286, |
|
"grad_norm": 0.43345652576381805, |
|
"learning_rate": 1.8752157404211253e-05, |
|
"loss": 0.678, |
|
"step": 4265 |
|
}, |
|
{ |
|
"epoch": 3.976245924545878, |
|
"grad_norm": 0.40339649642482517, |
|
"learning_rate": 1.8709009319986192e-05, |
|
"loss": 0.6699, |
|
"step": 4270 |
|
}, |
|
{ |
|
"epoch": 3.980903586399627, |
|
"grad_norm": 0.4169258845494642, |
|
"learning_rate": 1.8665861235761135e-05, |
|
"loss": 0.6817, |
|
"step": 4275 |
|
}, |
|
{ |
|
"epoch": 3.9855612482533767, |
|
"grad_norm": 0.42456347826037305, |
|
"learning_rate": 1.8622713151536074e-05, |
|
"loss": 0.6763, |
|
"step": 4280 |
|
}, |
|
{ |
|
"epoch": 3.990218910107126, |
|
"grad_norm": 0.4181412977181726, |
|
"learning_rate": 1.857956506731101e-05, |
|
"loss": 0.6579, |
|
"step": 4285 |
|
}, |
|
{ |
|
"epoch": 3.9948765719608756, |
|
"grad_norm": 0.41303621027879367, |
|
"learning_rate": 1.8536416983085952e-05, |
|
"loss": 0.6697, |
|
"step": 4290 |
|
}, |
|
{ |
|
"epoch": 3.999534233814625, |
|
"grad_norm": 0.4264209763127758, |
|
"learning_rate": 1.849326889886089e-05, |
|
"loss": 0.673, |
|
"step": 4295 |
|
}, |
|
{ |
|
"epoch": 4.003726129483, |
|
"grad_norm": 0.4806819361610478, |
|
"learning_rate": 1.8450120814635834e-05, |
|
"loss": 0.612, |
|
"step": 4300 |
|
}, |
|
{ |
|
"epoch": 4.008383791336749, |
|
"grad_norm": 0.4636853968999321, |
|
"learning_rate": 1.840697273041077e-05, |
|
"loss": 0.5965, |
|
"step": 4305 |
|
}, |
|
{ |
|
"epoch": 4.013041453190499, |
|
"grad_norm": 0.47840553026490606, |
|
"learning_rate": 1.836382464618571e-05, |
|
"loss": 0.605, |
|
"step": 4310 |
|
}, |
|
{ |
|
"epoch": 4.017699115044247, |
|
"grad_norm": 0.4368148682865373, |
|
"learning_rate": 1.832067656196065e-05, |
|
"loss": 0.5943, |
|
"step": 4315 |
|
}, |
|
{ |
|
"epoch": 4.022356776897997, |
|
"grad_norm": 0.4507347734680539, |
|
"learning_rate": 1.827752847773559e-05, |
|
"loss": 0.5999, |
|
"step": 4320 |
|
}, |
|
{ |
|
"epoch": 4.027014438751746, |
|
"grad_norm": 0.4473462336632657, |
|
"learning_rate": 1.823438039351053e-05, |
|
"loss": 0.5837, |
|
"step": 4325 |
|
}, |
|
{ |
|
"epoch": 4.031672100605496, |
|
"grad_norm": 0.45940433656549884, |
|
"learning_rate": 1.819123230928547e-05, |
|
"loss": 0.6025, |
|
"step": 4330 |
|
}, |
|
{ |
|
"epoch": 4.036329762459245, |
|
"grad_norm": 0.4373918323359047, |
|
"learning_rate": 1.8148084225060408e-05, |
|
"loss": 0.6083, |
|
"step": 4335 |
|
}, |
|
{ |
|
"epoch": 4.040987424312995, |
|
"grad_norm": 0.45627386533438385, |
|
"learning_rate": 1.810493614083535e-05, |
|
"loss": 0.6057, |
|
"step": 4340 |
|
}, |
|
{ |
|
"epoch": 4.045645086166744, |
|
"grad_norm": 0.4158899609247656, |
|
"learning_rate": 1.806178805661029e-05, |
|
"loss": 0.5976, |
|
"step": 4345 |
|
}, |
|
{ |
|
"epoch": 4.050302748020494, |
|
"grad_norm": 0.44858142598667394, |
|
"learning_rate": 1.8018639972385225e-05, |
|
"loss": 0.6072, |
|
"step": 4350 |
|
}, |
|
{ |
|
"epoch": 4.054960409874243, |
|
"grad_norm": 0.4319843019181772, |
|
"learning_rate": 1.7975491888160167e-05, |
|
"loss": 0.6168, |
|
"step": 4355 |
|
}, |
|
{ |
|
"epoch": 4.059618071727993, |
|
"grad_norm": 0.45788706950464525, |
|
"learning_rate": 1.7932343803935107e-05, |
|
"loss": 0.6216, |
|
"step": 4360 |
|
}, |
|
{ |
|
"epoch": 4.064275733581742, |
|
"grad_norm": 0.44601975328923293, |
|
"learning_rate": 1.7889195719710046e-05, |
|
"loss": 0.6102, |
|
"step": 4365 |
|
}, |
|
{ |
|
"epoch": 4.068933395435492, |
|
"grad_norm": 0.4264885542380311, |
|
"learning_rate": 1.7846047635484985e-05, |
|
"loss": 0.6101, |
|
"step": 4370 |
|
}, |
|
{ |
|
"epoch": 4.07359105728924, |
|
"grad_norm": 0.484392380924622, |
|
"learning_rate": 1.7802899551259924e-05, |
|
"loss": 0.6034, |
|
"step": 4375 |
|
}, |
|
{ |
|
"epoch": 4.07824871914299, |
|
"grad_norm": 0.4616445369154263, |
|
"learning_rate": 1.7759751467034866e-05, |
|
"loss": 0.601, |
|
"step": 4380 |
|
}, |
|
{ |
|
"epoch": 4.082906380996739, |
|
"grad_norm": 0.44094231922096266, |
|
"learning_rate": 1.7716603382809806e-05, |
|
"loss": 0.5995, |
|
"step": 4385 |
|
}, |
|
{ |
|
"epoch": 4.087564042850489, |
|
"grad_norm": 0.42668516125422923, |
|
"learning_rate": 1.767345529858474e-05, |
|
"loss": 0.6134, |
|
"step": 4390 |
|
}, |
|
{ |
|
"epoch": 4.092221704704238, |
|
"grad_norm": 0.43374717210175096, |
|
"learning_rate": 1.7630307214359684e-05, |
|
"loss": 0.6061, |
|
"step": 4395 |
|
}, |
|
{ |
|
"epoch": 4.096879366557988, |
|
"grad_norm": 0.46240607909102194, |
|
"learning_rate": 1.7587159130134623e-05, |
|
"loss": 0.6074, |
|
"step": 4400 |
|
}, |
|
{ |
|
"epoch": 4.101537028411737, |
|
"grad_norm": 0.4541018188451341, |
|
"learning_rate": 1.7544011045909562e-05, |
|
"loss": 0.6072, |
|
"step": 4405 |
|
}, |
|
{ |
|
"epoch": 4.106194690265487, |
|
"grad_norm": 0.458832120215879, |
|
"learning_rate": 1.75008629616845e-05, |
|
"loss": 0.6118, |
|
"step": 4410 |
|
}, |
|
{ |
|
"epoch": 4.110852352119236, |
|
"grad_norm": 0.4540320628873728, |
|
"learning_rate": 1.745771487745944e-05, |
|
"loss": 0.6022, |
|
"step": 4415 |
|
}, |
|
{ |
|
"epoch": 4.115510013972986, |
|
"grad_norm": 0.4961044448323278, |
|
"learning_rate": 1.7414566793234383e-05, |
|
"loss": 0.6226, |
|
"step": 4420 |
|
}, |
|
{ |
|
"epoch": 4.120167675826735, |
|
"grad_norm": 0.4214996889905062, |
|
"learning_rate": 1.7371418709009322e-05, |
|
"loss": 0.611, |
|
"step": 4425 |
|
}, |
|
{ |
|
"epoch": 4.124825337680484, |
|
"grad_norm": 0.4336548232531617, |
|
"learning_rate": 1.732827062478426e-05, |
|
"loss": 0.6198, |
|
"step": 4430 |
|
}, |
|
{ |
|
"epoch": 4.1294829995342335, |
|
"grad_norm": 0.46168529407914377, |
|
"learning_rate": 1.72851225405592e-05, |
|
"loss": 0.5939, |
|
"step": 4435 |
|
}, |
|
{ |
|
"epoch": 4.134140661387983, |
|
"grad_norm": 0.47290688120512087, |
|
"learning_rate": 1.724197445633414e-05, |
|
"loss": 0.6097, |
|
"step": 4440 |
|
}, |
|
{ |
|
"epoch": 4.1387983232417325, |
|
"grad_norm": 0.4755058837401803, |
|
"learning_rate": 1.7198826372109078e-05, |
|
"loss": 0.6138, |
|
"step": 4445 |
|
}, |
|
{ |
|
"epoch": 4.143455985095482, |
|
"grad_norm": 0.41589445408296166, |
|
"learning_rate": 1.715567828788402e-05, |
|
"loss": 0.6109, |
|
"step": 4450 |
|
}, |
|
{ |
|
"epoch": 4.1481136469492315, |
|
"grad_norm": 0.4498672135606939, |
|
"learning_rate": 1.7112530203658957e-05, |
|
"loss": 0.6083, |
|
"step": 4455 |
|
}, |
|
{ |
|
"epoch": 4.152771308802981, |
|
"grad_norm": 0.4766526068840867, |
|
"learning_rate": 1.70693821194339e-05, |
|
"loss": 0.6085, |
|
"step": 4460 |
|
}, |
|
{ |
|
"epoch": 4.1574289706567304, |
|
"grad_norm": 0.4295528412479077, |
|
"learning_rate": 1.7026234035208838e-05, |
|
"loss": 0.6225, |
|
"step": 4465 |
|
}, |
|
{ |
|
"epoch": 4.16208663251048, |
|
"grad_norm": 0.4298764761727921, |
|
"learning_rate": 1.6983085950983777e-05, |
|
"loss": 0.6221, |
|
"step": 4470 |
|
}, |
|
{ |
|
"epoch": 4.166744294364229, |
|
"grad_norm": 0.4591338614641095, |
|
"learning_rate": 1.6939937866758716e-05, |
|
"loss": 0.6085, |
|
"step": 4475 |
|
}, |
|
{ |
|
"epoch": 4.171401956217979, |
|
"grad_norm": 0.44174712200018607, |
|
"learning_rate": 1.6896789782533655e-05, |
|
"loss": 0.6066, |
|
"step": 4480 |
|
}, |
|
{ |
|
"epoch": 4.1760596180717275, |
|
"grad_norm": 0.41101833546266026, |
|
"learning_rate": 1.6853641698308595e-05, |
|
"loss": 0.5892, |
|
"step": 4485 |
|
}, |
|
{ |
|
"epoch": 4.180717279925477, |
|
"grad_norm": 0.4256851452409376, |
|
"learning_rate": 1.6810493614083537e-05, |
|
"loss": 0.5994, |
|
"step": 4490 |
|
}, |
|
{ |
|
"epoch": 4.1853749417792265, |
|
"grad_norm": 0.42254805258229255, |
|
"learning_rate": 1.6767345529858476e-05, |
|
"loss": 0.6137, |
|
"step": 4495 |
|
}, |
|
{ |
|
"epoch": 4.190032603632976, |
|
"grad_norm": 0.42742224570110604, |
|
"learning_rate": 1.6724197445633415e-05, |
|
"loss": 0.6209, |
|
"step": 4500 |
|
}, |
|
{ |
|
"epoch": 4.1946902654867255, |
|
"grad_norm": 0.45649771343928924, |
|
"learning_rate": 1.6681049361408354e-05, |
|
"loss": 0.5993, |
|
"step": 4505 |
|
}, |
|
{ |
|
"epoch": 4.199347927340475, |
|
"grad_norm": 0.49288033709364676, |
|
"learning_rate": 1.6637901277183294e-05, |
|
"loss": 0.6159, |
|
"step": 4510 |
|
}, |
|
{ |
|
"epoch": 4.2040055891942245, |
|
"grad_norm": 0.4503495359867892, |
|
"learning_rate": 1.6594753192958236e-05, |
|
"loss": 0.6108, |
|
"step": 4515 |
|
}, |
|
{ |
|
"epoch": 4.208663251047974, |
|
"grad_norm": 0.4808981005373163, |
|
"learning_rate": 1.6551605108733172e-05, |
|
"loss": 0.6112, |
|
"step": 4520 |
|
}, |
|
{ |
|
"epoch": 4.2133209129017235, |
|
"grad_norm": 0.42439860467905544, |
|
"learning_rate": 1.650845702450811e-05, |
|
"loss": 0.6039, |
|
"step": 4525 |
|
}, |
|
{ |
|
"epoch": 4.217978574755473, |
|
"grad_norm": 0.4993795950407368, |
|
"learning_rate": 1.6465308940283053e-05, |
|
"loss": 0.6071, |
|
"step": 4530 |
|
}, |
|
{ |
|
"epoch": 4.2226362366092225, |
|
"grad_norm": 0.43029625292079626, |
|
"learning_rate": 1.6422160856057992e-05, |
|
"loss": 0.6083, |
|
"step": 4535 |
|
}, |
|
{ |
|
"epoch": 4.227293898462972, |
|
"grad_norm": 0.4423811846646929, |
|
"learning_rate": 1.637901277183293e-05, |
|
"loss": 0.6143, |
|
"step": 4540 |
|
}, |
|
{ |
|
"epoch": 4.231951560316721, |
|
"grad_norm": 0.49407362582852016, |
|
"learning_rate": 1.633586468760787e-05, |
|
"loss": 0.6145, |
|
"step": 4545 |
|
}, |
|
{ |
|
"epoch": 4.23660922217047, |
|
"grad_norm": 0.45352381389441976, |
|
"learning_rate": 1.629271660338281e-05, |
|
"loss": 0.6092, |
|
"step": 4550 |
|
}, |
|
{ |
|
"epoch": 4.24126688402422, |
|
"grad_norm": 0.4867561002947084, |
|
"learning_rate": 1.6249568519157752e-05, |
|
"loss": 0.6188, |
|
"step": 4555 |
|
}, |
|
{ |
|
"epoch": 4.245924545877969, |
|
"grad_norm": 0.47489599095147444, |
|
"learning_rate": 1.620642043493269e-05, |
|
"loss": 0.6247, |
|
"step": 4560 |
|
}, |
|
{ |
|
"epoch": 4.250582207731719, |
|
"grad_norm": 0.4838388077146676, |
|
"learning_rate": 1.6163272350707627e-05, |
|
"loss": 0.6098, |
|
"step": 4565 |
|
}, |
|
{ |
|
"epoch": 4.255239869585468, |
|
"grad_norm": 0.49923583496165636, |
|
"learning_rate": 1.612012426648257e-05, |
|
"loss": 0.6239, |
|
"step": 4570 |
|
}, |
|
{ |
|
"epoch": 4.259897531439218, |
|
"grad_norm": 0.4203945221613941, |
|
"learning_rate": 1.607697618225751e-05, |
|
"loss": 0.6216, |
|
"step": 4575 |
|
}, |
|
{ |
|
"epoch": 4.264555193292967, |
|
"grad_norm": 0.446133559000005, |
|
"learning_rate": 1.6033828098032448e-05, |
|
"loss": 0.6192, |
|
"step": 4580 |
|
}, |
|
{ |
|
"epoch": 4.269212855146717, |
|
"grad_norm": 0.4180265005539548, |
|
"learning_rate": 1.5990680013807387e-05, |
|
"loss": 0.6143, |
|
"step": 4585 |
|
}, |
|
{ |
|
"epoch": 4.273870517000466, |
|
"grad_norm": 0.4525922240195107, |
|
"learning_rate": 1.5947531929582326e-05, |
|
"loss": 0.6081, |
|
"step": 4590 |
|
}, |
|
{ |
|
"epoch": 4.2785281788542155, |
|
"grad_norm": 0.45234994843255383, |
|
"learning_rate": 1.590438384535727e-05, |
|
"loss": 0.6178, |
|
"step": 4595 |
|
}, |
|
{ |
|
"epoch": 4.283185840707965, |
|
"grad_norm": 0.4494375930375664, |
|
"learning_rate": 1.5861235761132208e-05, |
|
"loss": 0.6198, |
|
"step": 4600 |
|
}, |
|
{ |
|
"epoch": 4.287843502561714, |
|
"grad_norm": 0.4225621793308862, |
|
"learning_rate": 1.5818087676907143e-05, |
|
"loss": 0.6271, |
|
"step": 4605 |
|
}, |
|
{ |
|
"epoch": 4.292501164415463, |
|
"grad_norm": 0.46236415865283903, |
|
"learning_rate": 1.5774939592682086e-05, |
|
"loss": 0.6107, |
|
"step": 4610 |
|
}, |
|
{ |
|
"epoch": 4.297158826269213, |
|
"grad_norm": 0.46052115118679277, |
|
"learning_rate": 1.5731791508457025e-05, |
|
"loss": 0.6102, |
|
"step": 4615 |
|
}, |
|
{ |
|
"epoch": 4.301816488122962, |
|
"grad_norm": 0.46117671507013813, |
|
"learning_rate": 1.5688643424231964e-05, |
|
"loss": 0.6049, |
|
"step": 4620 |
|
}, |
|
{ |
|
"epoch": 4.306474149976712, |
|
"grad_norm": 0.4407496966658314, |
|
"learning_rate": 1.5645495340006903e-05, |
|
"loss": 0.6126, |
|
"step": 4625 |
|
}, |
|
{ |
|
"epoch": 4.311131811830461, |
|
"grad_norm": 0.4457616926070442, |
|
"learning_rate": 1.5602347255781842e-05, |
|
"loss": 0.619, |
|
"step": 4630 |
|
}, |
|
{ |
|
"epoch": 4.315789473684211, |
|
"grad_norm": 0.45136738284407385, |
|
"learning_rate": 1.5559199171556785e-05, |
|
"loss": 0.6102, |
|
"step": 4635 |
|
}, |
|
{ |
|
"epoch": 4.32044713553796, |
|
"grad_norm": 0.4380548820490039, |
|
"learning_rate": 1.5516051087331724e-05, |
|
"loss": 0.6095, |
|
"step": 4640 |
|
}, |
|
{ |
|
"epoch": 4.32510479739171, |
|
"grad_norm": 0.44183287095699714, |
|
"learning_rate": 1.5472903003106663e-05, |
|
"loss": 0.6067, |
|
"step": 4645 |
|
}, |
|
{ |
|
"epoch": 4.329762459245459, |
|
"grad_norm": 0.43123650201876623, |
|
"learning_rate": 1.5429754918881602e-05, |
|
"loss": 0.6329, |
|
"step": 4650 |
|
}, |
|
{ |
|
"epoch": 4.334420121099209, |
|
"grad_norm": 0.4177619149141171, |
|
"learning_rate": 1.538660683465654e-05, |
|
"loss": 0.6097, |
|
"step": 4655 |
|
}, |
|
{ |
|
"epoch": 4.339077782952957, |
|
"grad_norm": 0.4352353479215684, |
|
"learning_rate": 1.534345875043148e-05, |
|
"loss": 0.6161, |
|
"step": 4660 |
|
}, |
|
{ |
|
"epoch": 4.343735444806707, |
|
"grad_norm": 0.4605489056353871, |
|
"learning_rate": 1.5300310666206423e-05, |
|
"loss": 0.6272, |
|
"step": 4665 |
|
}, |
|
{ |
|
"epoch": 4.348393106660456, |
|
"grad_norm": 0.43609413400101865, |
|
"learning_rate": 1.525716258198136e-05, |
|
"loss": 0.6194, |
|
"step": 4670 |
|
}, |
|
{ |
|
"epoch": 4.353050768514206, |
|
"grad_norm": 0.43918268795029974, |
|
"learning_rate": 1.5214014497756301e-05, |
|
"loss": 0.6012, |
|
"step": 4675 |
|
}, |
|
{ |
|
"epoch": 4.357708430367955, |
|
"grad_norm": 0.4554666556635843, |
|
"learning_rate": 1.517086641353124e-05, |
|
"loss": 0.6248, |
|
"step": 4680 |
|
}, |
|
{ |
|
"epoch": 4.362366092221705, |
|
"grad_norm": 0.44745527897814913, |
|
"learning_rate": 1.5127718329306178e-05, |
|
"loss": 0.6052, |
|
"step": 4685 |
|
}, |
|
{ |
|
"epoch": 4.367023754075454, |
|
"grad_norm": 0.4566195590924305, |
|
"learning_rate": 1.508457024508112e-05, |
|
"loss": 0.6147, |
|
"step": 4690 |
|
}, |
|
{ |
|
"epoch": 4.371681415929204, |
|
"grad_norm": 0.4295277655025628, |
|
"learning_rate": 1.5041422160856058e-05, |
|
"loss": 0.6056, |
|
"step": 4695 |
|
}, |
|
{ |
|
"epoch": 4.376339077782953, |
|
"grad_norm": 0.4762038397304181, |
|
"learning_rate": 1.4998274076630997e-05, |
|
"loss": 0.6267, |
|
"step": 4700 |
|
}, |
|
{ |
|
"epoch": 4.380996739636703, |
|
"grad_norm": 0.43161468066172953, |
|
"learning_rate": 1.4955125992405938e-05, |
|
"loss": 0.6093, |
|
"step": 4705 |
|
}, |
|
{ |
|
"epoch": 4.385654401490452, |
|
"grad_norm": 0.4537790846607464, |
|
"learning_rate": 1.4911977908180877e-05, |
|
"loss": 0.627, |
|
"step": 4710 |
|
}, |
|
{ |
|
"epoch": 4.390312063344201, |
|
"grad_norm": 0.47628975099139476, |
|
"learning_rate": 1.4868829823955818e-05, |
|
"loss": 0.599, |
|
"step": 4715 |
|
}, |
|
{ |
|
"epoch": 4.39496972519795, |
|
"grad_norm": 0.4284210913770137, |
|
"learning_rate": 1.4825681739730757e-05, |
|
"loss": 0.6047, |
|
"step": 4720 |
|
}, |
|
{ |
|
"epoch": 4.3996273870517, |
|
"grad_norm": 0.4623777997484651, |
|
"learning_rate": 1.4782533655505696e-05, |
|
"loss": 0.5996, |
|
"step": 4725 |
|
}, |
|
{ |
|
"epoch": 4.404285048905449, |
|
"grad_norm": 0.44782456058740255, |
|
"learning_rate": 1.4739385571280637e-05, |
|
"loss": 0.6242, |
|
"step": 4730 |
|
}, |
|
{ |
|
"epoch": 4.408942710759199, |
|
"grad_norm": 0.42865704978519364, |
|
"learning_rate": 1.4696237487055576e-05, |
|
"loss": 0.6216, |
|
"step": 4735 |
|
}, |
|
{ |
|
"epoch": 4.413600372612948, |
|
"grad_norm": 0.42258284119468553, |
|
"learning_rate": 1.4653089402830513e-05, |
|
"loss": 0.5949, |
|
"step": 4740 |
|
}, |
|
{ |
|
"epoch": 4.418258034466698, |
|
"grad_norm": 0.4404360437172725, |
|
"learning_rate": 1.4609941318605456e-05, |
|
"loss": 0.6142, |
|
"step": 4745 |
|
}, |
|
{ |
|
"epoch": 4.422915696320447, |
|
"grad_norm": 0.4471745958531887, |
|
"learning_rate": 1.4566793234380393e-05, |
|
"loss": 0.6046, |
|
"step": 4750 |
|
}, |
|
{ |
|
"epoch": 4.427573358174197, |
|
"grad_norm": 0.45193754607906805, |
|
"learning_rate": 1.4523645150155336e-05, |
|
"loss": 0.6301, |
|
"step": 4755 |
|
}, |
|
{ |
|
"epoch": 4.432231020027946, |
|
"grad_norm": 0.4378890697332401, |
|
"learning_rate": 1.4480497065930273e-05, |
|
"loss": 0.613, |
|
"step": 4760 |
|
}, |
|
{ |
|
"epoch": 4.436888681881696, |
|
"grad_norm": 0.40172229970174256, |
|
"learning_rate": 1.4437348981705212e-05, |
|
"loss": 0.6126, |
|
"step": 4765 |
|
}, |
|
{ |
|
"epoch": 4.441546343735445, |
|
"grad_norm": 0.44671360931171006, |
|
"learning_rate": 1.4394200897480153e-05, |
|
"loss": 0.5993, |
|
"step": 4770 |
|
}, |
|
{ |
|
"epoch": 4.446204005589194, |
|
"grad_norm": 0.4208530812149307, |
|
"learning_rate": 1.4351052813255092e-05, |
|
"loss": 0.6117, |
|
"step": 4775 |
|
}, |
|
{ |
|
"epoch": 4.450861667442943, |
|
"grad_norm": 0.4566343802538676, |
|
"learning_rate": 1.4307904729030031e-05, |
|
"loss": 0.6165, |
|
"step": 4780 |
|
}, |
|
{ |
|
"epoch": 4.455519329296693, |
|
"grad_norm": 0.43920444604920234, |
|
"learning_rate": 1.4264756644804972e-05, |
|
"loss": 0.6244, |
|
"step": 4785 |
|
}, |
|
{ |
|
"epoch": 4.460176991150442, |
|
"grad_norm": 0.4410914601766885, |
|
"learning_rate": 1.4221608560579911e-05, |
|
"loss": 0.6096, |
|
"step": 4790 |
|
}, |
|
{ |
|
"epoch": 4.464834653004192, |
|
"grad_norm": 0.47815149847379673, |
|
"learning_rate": 1.4178460476354852e-05, |
|
"loss": 0.6144, |
|
"step": 4795 |
|
}, |
|
{ |
|
"epoch": 4.469492314857941, |
|
"grad_norm": 0.4393143547044593, |
|
"learning_rate": 1.4135312392129791e-05, |
|
"loss": 0.6212, |
|
"step": 4800 |
|
}, |
|
{ |
|
"epoch": 4.474149976711691, |
|
"grad_norm": 0.4326176715876059, |
|
"learning_rate": 1.4092164307904728e-05, |
|
"loss": 0.6182, |
|
"step": 4805 |
|
}, |
|
{ |
|
"epoch": 4.47880763856544, |
|
"grad_norm": 0.45343036326683683, |
|
"learning_rate": 1.404901622367967e-05, |
|
"loss": 0.6116, |
|
"step": 4810 |
|
}, |
|
{ |
|
"epoch": 4.48346530041919, |
|
"grad_norm": 0.41682266432299153, |
|
"learning_rate": 1.4005868139454608e-05, |
|
"loss": 0.6026, |
|
"step": 4815 |
|
}, |
|
{ |
|
"epoch": 4.488122962272939, |
|
"grad_norm": 0.4493369452130179, |
|
"learning_rate": 1.3962720055229547e-05, |
|
"loss": 0.6237, |
|
"step": 4820 |
|
}, |
|
{ |
|
"epoch": 4.492780624126689, |
|
"grad_norm": 0.4337623071265652, |
|
"learning_rate": 1.3919571971004488e-05, |
|
"loss": 0.6161, |
|
"step": 4825 |
|
}, |
|
{ |
|
"epoch": 4.497438285980438, |
|
"grad_norm": 0.43658371794237966, |
|
"learning_rate": 1.3876423886779427e-05, |
|
"loss": 0.6259, |
|
"step": 4830 |
|
}, |
|
{ |
|
"epoch": 4.502095947834187, |
|
"grad_norm": 0.43087708114430684, |
|
"learning_rate": 1.3833275802554368e-05, |
|
"loss": 0.615, |
|
"step": 4835 |
|
}, |
|
{ |
|
"epoch": 4.506753609687936, |
|
"grad_norm": 0.4308877230714715, |
|
"learning_rate": 1.3790127718329307e-05, |
|
"loss": 0.6178, |
|
"step": 4840 |
|
}, |
|
{ |
|
"epoch": 4.511411271541686, |
|
"grad_norm": 0.4345512713590378, |
|
"learning_rate": 1.3746979634104246e-05, |
|
"loss": 0.6135, |
|
"step": 4845 |
|
}, |
|
{ |
|
"epoch": 4.516068933395435, |
|
"grad_norm": 0.42499223546672094, |
|
"learning_rate": 1.3703831549879187e-05, |
|
"loss": 0.6011, |
|
"step": 4850 |
|
}, |
|
{ |
|
"epoch": 4.520726595249185, |
|
"grad_norm": 0.4297293994576531, |
|
"learning_rate": 1.3660683465654126e-05, |
|
"loss": 0.6155, |
|
"step": 4855 |
|
}, |
|
{ |
|
"epoch": 4.525384257102934, |
|
"grad_norm": 0.44676211626706, |
|
"learning_rate": 1.3617535381429064e-05, |
|
"loss": 0.6026, |
|
"step": 4860 |
|
}, |
|
{ |
|
"epoch": 4.530041918956684, |
|
"grad_norm": 0.41451280302413623, |
|
"learning_rate": 1.3574387297204006e-05, |
|
"loss": 0.6135, |
|
"step": 4865 |
|
}, |
|
{ |
|
"epoch": 4.534699580810433, |
|
"grad_norm": 0.41024444397996657, |
|
"learning_rate": 1.3531239212978944e-05, |
|
"loss": 0.6172, |
|
"step": 4870 |
|
}, |
|
{ |
|
"epoch": 4.539357242664183, |
|
"grad_norm": 0.4249317938403838, |
|
"learning_rate": 1.3488091128753886e-05, |
|
"loss": 0.6206, |
|
"step": 4875 |
|
}, |
|
{ |
|
"epoch": 4.544014904517932, |
|
"grad_norm": 0.4288584553452124, |
|
"learning_rate": 1.3444943044528824e-05, |
|
"loss": 0.6187, |
|
"step": 4880 |
|
}, |
|
{ |
|
"epoch": 4.548672566371682, |
|
"grad_norm": 0.42815344099306313, |
|
"learning_rate": 1.3401794960303763e-05, |
|
"loss": 0.6162, |
|
"step": 4885 |
|
}, |
|
{ |
|
"epoch": 4.55333022822543, |
|
"grad_norm": 0.4476784897625909, |
|
"learning_rate": 1.3358646876078703e-05, |
|
"loss": 0.612, |
|
"step": 4890 |
|
}, |
|
{ |
|
"epoch": 4.55798789007918, |
|
"grad_norm": 0.4146360781064623, |
|
"learning_rate": 1.3315498791853643e-05, |
|
"loss": 0.608, |
|
"step": 4895 |
|
}, |
|
{ |
|
"epoch": 4.562645551932929, |
|
"grad_norm": 0.42180653622806297, |
|
"learning_rate": 1.327235070762858e-05, |
|
"loss": 0.5984, |
|
"step": 4900 |
|
}, |
|
{ |
|
"epoch": 4.567303213786679, |
|
"grad_norm": 0.42786143566290896, |
|
"learning_rate": 1.3229202623403522e-05, |
|
"loss": 0.616, |
|
"step": 4905 |
|
}, |
|
{ |
|
"epoch": 4.571960875640428, |
|
"grad_norm": 0.4337556272151318, |
|
"learning_rate": 1.318605453917846e-05, |
|
"loss": 0.6104, |
|
"step": 4910 |
|
}, |
|
{ |
|
"epoch": 4.576618537494178, |
|
"grad_norm": 0.43813645125024153, |
|
"learning_rate": 1.3142906454953402e-05, |
|
"loss": 0.6059, |
|
"step": 4915 |
|
}, |
|
{ |
|
"epoch": 4.581276199347927, |
|
"grad_norm": 0.44397103218263223, |
|
"learning_rate": 1.309975837072834e-05, |
|
"loss": 0.6067, |
|
"step": 4920 |
|
}, |
|
{ |
|
"epoch": 4.585933861201677, |
|
"grad_norm": 0.4206021176834172, |
|
"learning_rate": 1.3056610286503279e-05, |
|
"loss": 0.6188, |
|
"step": 4925 |
|
}, |
|
{ |
|
"epoch": 4.590591523055426, |
|
"grad_norm": 0.4155111249198103, |
|
"learning_rate": 1.301346220227822e-05, |
|
"loss": 0.6044, |
|
"step": 4930 |
|
}, |
|
{ |
|
"epoch": 4.595249184909176, |
|
"grad_norm": 0.4077942507751532, |
|
"learning_rate": 1.2970314118053159e-05, |
|
"loss": 0.6195, |
|
"step": 4935 |
|
}, |
|
{ |
|
"epoch": 4.599906846762925, |
|
"grad_norm": 0.40937985817451483, |
|
"learning_rate": 1.2927166033828098e-05, |
|
"loss": 0.6046, |
|
"step": 4940 |
|
}, |
|
{ |
|
"epoch": 4.604564508616674, |
|
"grad_norm": 0.42556174213849096, |
|
"learning_rate": 1.2884017949603039e-05, |
|
"loss": 0.6312, |
|
"step": 4945 |
|
}, |
|
{ |
|
"epoch": 4.6092221704704235, |
|
"grad_norm": 0.42998849734417294, |
|
"learning_rate": 1.2840869865377978e-05, |
|
"loss": 0.6057, |
|
"step": 4950 |
|
}, |
|
{ |
|
"epoch": 4.613879832324173, |
|
"grad_norm": 0.41004193899090896, |
|
"learning_rate": 1.2797721781152919e-05, |
|
"loss": 0.6198, |
|
"step": 4955 |
|
}, |
|
{ |
|
"epoch": 4.6185374941779225, |
|
"grad_norm": 0.42096090582338275, |
|
"learning_rate": 1.2754573696927858e-05, |
|
"loss": 0.6039, |
|
"step": 4960 |
|
}, |
|
{ |
|
"epoch": 4.623195156031672, |
|
"grad_norm": 0.42400995011780857, |
|
"learning_rate": 1.2711425612702795e-05, |
|
"loss": 0.6266, |
|
"step": 4965 |
|
}, |
|
{ |
|
"epoch": 4.6278528178854215, |
|
"grad_norm": 0.42040078255527125, |
|
"learning_rate": 1.2668277528477738e-05, |
|
"loss": 0.6228, |
|
"step": 4970 |
|
}, |
|
{ |
|
"epoch": 4.632510479739171, |
|
"grad_norm": 0.4431900153893484, |
|
"learning_rate": 1.2625129444252675e-05, |
|
"loss": 0.5979, |
|
"step": 4975 |
|
}, |
|
{ |
|
"epoch": 4.6371681415929205, |
|
"grad_norm": 0.4212544728790483, |
|
"learning_rate": 1.2581981360027614e-05, |
|
"loss": 0.603, |
|
"step": 4980 |
|
}, |
|
{ |
|
"epoch": 4.64182580344667, |
|
"grad_norm": 0.437403098140137, |
|
"learning_rate": 1.2538833275802555e-05, |
|
"loss": 0.6282, |
|
"step": 4985 |
|
}, |
|
{ |
|
"epoch": 4.6464834653004194, |
|
"grad_norm": 0.42245841231912035, |
|
"learning_rate": 1.2495685191577494e-05, |
|
"loss": 0.6025, |
|
"step": 4990 |
|
}, |
|
{ |
|
"epoch": 4.651141127154169, |
|
"grad_norm": 0.4195991173380262, |
|
"learning_rate": 1.2452537107352433e-05, |
|
"loss": 0.6155, |
|
"step": 4995 |
|
}, |
|
{ |
|
"epoch": 4.6557987890079175, |
|
"grad_norm": 0.42896342530484133, |
|
"learning_rate": 1.2409389023127374e-05, |
|
"loss": 0.6172, |
|
"step": 5000 |
|
}, |
|
{ |
|
"epoch": 4.660456450861668, |
|
"grad_norm": 0.4519927520866544, |
|
"learning_rate": 1.2366240938902313e-05, |
|
"loss": 0.6151, |
|
"step": 5005 |
|
}, |
|
{ |
|
"epoch": 4.6651141127154165, |
|
"grad_norm": 0.4154772218254218, |
|
"learning_rate": 1.2323092854677252e-05, |
|
"loss": 0.6231, |
|
"step": 5010 |
|
}, |
|
{ |
|
"epoch": 4.669771774569166, |
|
"grad_norm": 0.41189543902219217, |
|
"learning_rate": 1.2279944770452193e-05, |
|
"loss": 0.6129, |
|
"step": 5015 |
|
}, |
|
{ |
|
"epoch": 4.6744294364229155, |
|
"grad_norm": 0.41834462910543274, |
|
"learning_rate": 1.2236796686227132e-05, |
|
"loss": 0.6144, |
|
"step": 5020 |
|
}, |
|
{ |
|
"epoch": 4.679087098276665, |
|
"grad_norm": 0.4276803315518868, |
|
"learning_rate": 1.2193648602002073e-05, |
|
"loss": 0.6193, |
|
"step": 5025 |
|
}, |
|
{ |
|
"epoch": 4.6837447601304145, |
|
"grad_norm": 0.4471592285620113, |
|
"learning_rate": 1.215050051777701e-05, |
|
"loss": 0.6115, |
|
"step": 5030 |
|
}, |
|
{ |
|
"epoch": 4.688402421984164, |
|
"grad_norm": 0.4280179038134522, |
|
"learning_rate": 1.2107352433551951e-05, |
|
"loss": 0.6118, |
|
"step": 5035 |
|
}, |
|
{ |
|
"epoch": 4.6930600838379135, |
|
"grad_norm": 0.4156278571195452, |
|
"learning_rate": 1.206420434932689e-05, |
|
"loss": 0.6085, |
|
"step": 5040 |
|
}, |
|
{ |
|
"epoch": 4.697717745691663, |
|
"grad_norm": 0.4389936054467469, |
|
"learning_rate": 1.202105626510183e-05, |
|
"loss": 0.6016, |
|
"step": 5045 |
|
}, |
|
{ |
|
"epoch": 4.7023754075454125, |
|
"grad_norm": 0.4398668606342934, |
|
"learning_rate": 1.1977908180876769e-05, |
|
"loss": 0.6053, |
|
"step": 5050 |
|
}, |
|
{ |
|
"epoch": 4.707033069399162, |
|
"grad_norm": 0.4152682167746281, |
|
"learning_rate": 1.193476009665171e-05, |
|
"loss": 0.6069, |
|
"step": 5055 |
|
}, |
|
{ |
|
"epoch": 4.7116907312529115, |
|
"grad_norm": 0.42222398978193676, |
|
"learning_rate": 1.1891612012426649e-05, |
|
"loss": 0.6076, |
|
"step": 5060 |
|
}, |
|
{ |
|
"epoch": 4.71634839310666, |
|
"grad_norm": 0.43524199728540414, |
|
"learning_rate": 1.1848463928201588e-05, |
|
"loss": 0.6008, |
|
"step": 5065 |
|
}, |
|
{ |
|
"epoch": 4.72100605496041, |
|
"grad_norm": 0.4110464798198264, |
|
"learning_rate": 1.1805315843976528e-05, |
|
"loss": 0.6073, |
|
"step": 5070 |
|
}, |
|
{ |
|
"epoch": 4.725663716814159, |
|
"grad_norm": 0.43391534282606625, |
|
"learning_rate": 1.1762167759751468e-05, |
|
"loss": 0.6187, |
|
"step": 5075 |
|
}, |
|
{ |
|
"epoch": 4.730321378667909, |
|
"grad_norm": 0.4344268574647356, |
|
"learning_rate": 1.1719019675526408e-05, |
|
"loss": 0.6183, |
|
"step": 5080 |
|
}, |
|
{ |
|
"epoch": 4.734979040521658, |
|
"grad_norm": 0.46079886577181006, |
|
"learning_rate": 1.1675871591301346e-05, |
|
"loss": 0.6032, |
|
"step": 5085 |
|
}, |
|
{ |
|
"epoch": 4.739636702375408, |
|
"grad_norm": 0.4095438317917713, |
|
"learning_rate": 1.1632723507076287e-05, |
|
"loss": 0.6108, |
|
"step": 5090 |
|
}, |
|
{ |
|
"epoch": 4.744294364229157, |
|
"grad_norm": 0.4192945945524116, |
|
"learning_rate": 1.1589575422851226e-05, |
|
"loss": 0.6014, |
|
"step": 5095 |
|
}, |
|
{ |
|
"epoch": 4.748952026082907, |
|
"grad_norm": 0.425187657232121, |
|
"learning_rate": 1.1546427338626167e-05, |
|
"loss": 0.6149, |
|
"step": 5100 |
|
}, |
|
{ |
|
"epoch": 4.753609687936656, |
|
"grad_norm": 0.4314640586451845, |
|
"learning_rate": 1.1503279254401104e-05, |
|
"loss": 0.617, |
|
"step": 5105 |
|
}, |
|
{ |
|
"epoch": 4.7582673497904056, |
|
"grad_norm": 0.4226989429033583, |
|
"learning_rate": 1.1460131170176045e-05, |
|
"loss": 0.6161, |
|
"step": 5110 |
|
}, |
|
{ |
|
"epoch": 4.762925011644155, |
|
"grad_norm": 0.4609333444144553, |
|
"learning_rate": 1.1416983085950984e-05, |
|
"loss": 0.6114, |
|
"step": 5115 |
|
}, |
|
{ |
|
"epoch": 4.767582673497904, |
|
"grad_norm": 0.41768650972615995, |
|
"learning_rate": 1.1373835001725925e-05, |
|
"loss": 0.6002, |
|
"step": 5120 |
|
}, |
|
{ |
|
"epoch": 4.772240335351653, |
|
"grad_norm": 0.43194965243676026, |
|
"learning_rate": 1.1330686917500862e-05, |
|
"loss": 0.6113, |
|
"step": 5125 |
|
}, |
|
{ |
|
"epoch": 4.776897997205403, |
|
"grad_norm": 0.4095581873851778, |
|
"learning_rate": 1.1287538833275803e-05, |
|
"loss": 0.6164, |
|
"step": 5130 |
|
}, |
|
{ |
|
"epoch": 4.781555659059152, |
|
"grad_norm": 0.4313289478900628, |
|
"learning_rate": 1.1244390749050742e-05, |
|
"loss": 0.634, |
|
"step": 5135 |
|
}, |
|
{ |
|
"epoch": 4.786213320912902, |
|
"grad_norm": 0.4127155972701948, |
|
"learning_rate": 1.1201242664825683e-05, |
|
"loss": 0.6017, |
|
"step": 5140 |
|
}, |
|
{ |
|
"epoch": 4.790870982766651, |
|
"grad_norm": 0.39603431972121866, |
|
"learning_rate": 1.1158094580600622e-05, |
|
"loss": 0.6125, |
|
"step": 5145 |
|
}, |
|
{ |
|
"epoch": 4.795528644620401, |
|
"grad_norm": 0.4342284527499743, |
|
"learning_rate": 1.1114946496375561e-05, |
|
"loss": 0.5879, |
|
"step": 5150 |
|
}, |
|
{ |
|
"epoch": 4.80018630647415, |
|
"grad_norm": 0.41941974920430475, |
|
"learning_rate": 1.1071798412150502e-05, |
|
"loss": 0.5973, |
|
"step": 5155 |
|
}, |
|
{ |
|
"epoch": 4.8048439683279, |
|
"grad_norm": 0.47802974385560876, |
|
"learning_rate": 1.1028650327925441e-05, |
|
"loss": 0.5995, |
|
"step": 5160 |
|
}, |
|
{ |
|
"epoch": 4.809501630181649, |
|
"grad_norm": 0.3965798387548902, |
|
"learning_rate": 1.098550224370038e-05, |
|
"loss": 0.6184, |
|
"step": 5165 |
|
}, |
|
{ |
|
"epoch": 4.814159292035399, |
|
"grad_norm": 0.43801160852058696, |
|
"learning_rate": 1.094235415947532e-05, |
|
"loss": 0.6203, |
|
"step": 5170 |
|
}, |
|
{ |
|
"epoch": 4.818816953889147, |
|
"grad_norm": 0.4241260906344107, |
|
"learning_rate": 1.089920607525026e-05, |
|
"loss": 0.6272, |
|
"step": 5175 |
|
}, |
|
{ |
|
"epoch": 4.823474615742897, |
|
"grad_norm": 0.4105124110466843, |
|
"learning_rate": 1.0856057991025199e-05, |
|
"loss": 0.6052, |
|
"step": 5180 |
|
}, |
|
{ |
|
"epoch": 4.828132277596646, |
|
"grad_norm": 0.425415723767141, |
|
"learning_rate": 1.0812909906800138e-05, |
|
"loss": 0.6038, |
|
"step": 5185 |
|
}, |
|
{ |
|
"epoch": 4.832789939450396, |
|
"grad_norm": 0.47310761948520047, |
|
"learning_rate": 1.0769761822575077e-05, |
|
"loss": 0.629, |
|
"step": 5190 |
|
}, |
|
{ |
|
"epoch": 4.837447601304145, |
|
"grad_norm": 0.4083624743588117, |
|
"learning_rate": 1.0726613738350018e-05, |
|
"loss": 0.6096, |
|
"step": 5195 |
|
}, |
|
{ |
|
"epoch": 4.842105263157895, |
|
"grad_norm": 0.3993868337304555, |
|
"learning_rate": 1.0683465654124957e-05, |
|
"loss": 0.6014, |
|
"step": 5200 |
|
}, |
|
{ |
|
"epoch": 4.846762925011644, |
|
"grad_norm": 0.4206135006510575, |
|
"learning_rate": 1.0640317569899896e-05, |
|
"loss": 0.6206, |
|
"step": 5205 |
|
}, |
|
{ |
|
"epoch": 4.851420586865394, |
|
"grad_norm": 0.43557861630634326, |
|
"learning_rate": 1.0597169485674835e-05, |
|
"loss": 0.6261, |
|
"step": 5210 |
|
}, |
|
{ |
|
"epoch": 4.856078248719143, |
|
"grad_norm": 0.4317307534235413, |
|
"learning_rate": 1.0554021401449776e-05, |
|
"loss": 0.6129, |
|
"step": 5215 |
|
}, |
|
{ |
|
"epoch": 4.860735910572893, |
|
"grad_norm": 0.41079339176887714, |
|
"learning_rate": 1.0510873317224715e-05, |
|
"loss": 0.6159, |
|
"step": 5220 |
|
}, |
|
{ |
|
"epoch": 4.865393572426642, |
|
"grad_norm": 0.41800514370168124, |
|
"learning_rate": 1.0467725232999655e-05, |
|
"loss": 0.6007, |
|
"step": 5225 |
|
}, |
|
{ |
|
"epoch": 4.870051234280391, |
|
"grad_norm": 0.42937334307178815, |
|
"learning_rate": 1.0424577148774595e-05, |
|
"loss": 0.6134, |
|
"step": 5230 |
|
}, |
|
{ |
|
"epoch": 4.874708896134141, |
|
"grad_norm": 0.45733900734235955, |
|
"learning_rate": 1.0381429064549534e-05, |
|
"loss": 0.6238, |
|
"step": 5235 |
|
}, |
|
{ |
|
"epoch": 4.87936655798789, |
|
"grad_norm": 0.4418166298473757, |
|
"learning_rate": 1.0338280980324475e-05, |
|
"loss": 0.6091, |
|
"step": 5240 |
|
}, |
|
{ |
|
"epoch": 4.884024219841639, |
|
"grad_norm": 0.4176075023708318, |
|
"learning_rate": 1.0295132896099413e-05, |
|
"loss": 0.6327, |
|
"step": 5245 |
|
}, |
|
{ |
|
"epoch": 4.888681881695389, |
|
"grad_norm": 0.45132383481251026, |
|
"learning_rate": 1.0251984811874353e-05, |
|
"loss": 0.6271, |
|
"step": 5250 |
|
}, |
|
{ |
|
"epoch": 4.893339543549138, |
|
"grad_norm": 0.4093678308993761, |
|
"learning_rate": 1.0208836727649293e-05, |
|
"loss": 0.6118, |
|
"step": 5255 |
|
}, |
|
{ |
|
"epoch": 4.897997205402888, |
|
"grad_norm": 0.4358137402583084, |
|
"learning_rate": 1.0165688643424233e-05, |
|
"loss": 0.5982, |
|
"step": 5260 |
|
}, |
|
{ |
|
"epoch": 4.902654867256637, |
|
"grad_norm": 0.40661293305643104, |
|
"learning_rate": 1.012254055919917e-05, |
|
"loss": 0.6342, |
|
"step": 5265 |
|
}, |
|
{ |
|
"epoch": 4.907312529110387, |
|
"grad_norm": 0.42806269456221924, |
|
"learning_rate": 1.0079392474974112e-05, |
|
"loss": 0.6172, |
|
"step": 5270 |
|
}, |
|
{ |
|
"epoch": 4.911970190964136, |
|
"grad_norm": 0.41622182265496577, |
|
"learning_rate": 1.003624439074905e-05, |
|
"loss": 0.6149, |
|
"step": 5275 |
|
}, |
|
{ |
|
"epoch": 4.916627852817886, |
|
"grad_norm": 0.4183378435998186, |
|
"learning_rate": 9.993096306523992e-06, |
|
"loss": 0.6153, |
|
"step": 5280 |
|
}, |
|
{ |
|
"epoch": 4.921285514671635, |
|
"grad_norm": 0.422784188546255, |
|
"learning_rate": 9.94994822229893e-06, |
|
"loss": 0.6084, |
|
"step": 5285 |
|
}, |
|
{ |
|
"epoch": 4.925943176525385, |
|
"grad_norm": 0.428353990517149, |
|
"learning_rate": 9.90680013807387e-06, |
|
"loss": 0.6241, |
|
"step": 5290 |
|
}, |
|
{ |
|
"epoch": 4.930600838379133, |
|
"grad_norm": 0.41604173789317267, |
|
"learning_rate": 9.86365205384881e-06, |
|
"loss": 0.605, |
|
"step": 5295 |
|
}, |
|
{ |
|
"epoch": 4.935258500232883, |
|
"grad_norm": 0.39758808242607707, |
|
"learning_rate": 9.82050396962375e-06, |
|
"loss": 0.6116, |
|
"step": 5300 |
|
}, |
|
{ |
|
"epoch": 4.939916162086632, |
|
"grad_norm": 0.4253286290481327, |
|
"learning_rate": 9.777355885398689e-06, |
|
"loss": 0.6197, |
|
"step": 5305 |
|
}, |
|
{ |
|
"epoch": 4.944573823940382, |
|
"grad_norm": 0.42181425213043905, |
|
"learning_rate": 9.734207801173628e-06, |
|
"loss": 0.6117, |
|
"step": 5310 |
|
}, |
|
{ |
|
"epoch": 4.949231485794131, |
|
"grad_norm": 0.4134798514571126, |
|
"learning_rate": 9.691059716948569e-06, |
|
"loss": 0.5979, |
|
"step": 5315 |
|
}, |
|
{ |
|
"epoch": 4.953889147647881, |
|
"grad_norm": 0.4098416893467789, |
|
"learning_rate": 9.647911632723508e-06, |
|
"loss": 0.6119, |
|
"step": 5320 |
|
}, |
|
{ |
|
"epoch": 4.95854680950163, |
|
"grad_norm": 0.45216033261906713, |
|
"learning_rate": 9.604763548498447e-06, |
|
"loss": 0.6189, |
|
"step": 5325 |
|
}, |
|
{ |
|
"epoch": 4.96320447135538, |
|
"grad_norm": 0.44225564927802763, |
|
"learning_rate": 9.561615464273386e-06, |
|
"loss": 0.6181, |
|
"step": 5330 |
|
}, |
|
{ |
|
"epoch": 4.967862133209129, |
|
"grad_norm": 0.4078319297335572, |
|
"learning_rate": 9.518467380048327e-06, |
|
"loss": 0.5954, |
|
"step": 5335 |
|
}, |
|
{ |
|
"epoch": 4.972519795062879, |
|
"grad_norm": 0.4118776896593466, |
|
"learning_rate": 9.475319295823266e-06, |
|
"loss": 0.6138, |
|
"step": 5340 |
|
}, |
|
{ |
|
"epoch": 4.977177456916628, |
|
"grad_norm": 0.4211790875068119, |
|
"learning_rate": 9.432171211598205e-06, |
|
"loss": 0.6049, |
|
"step": 5345 |
|
}, |
|
{ |
|
"epoch": 4.981835118770377, |
|
"grad_norm": 0.40842310789076874, |
|
"learning_rate": 9.389023127373144e-06, |
|
"loss": 0.5949, |
|
"step": 5350 |
|
}, |
|
{ |
|
"epoch": 4.986492780624126, |
|
"grad_norm": 0.40580311588149054, |
|
"learning_rate": 9.345875043148085e-06, |
|
"loss": 0.6134, |
|
"step": 5355 |
|
}, |
|
{ |
|
"epoch": 4.991150442477876, |
|
"grad_norm": 0.4323753404756848, |
|
"learning_rate": 9.302726958923024e-06, |
|
"loss": 0.5883, |
|
"step": 5360 |
|
}, |
|
{ |
|
"epoch": 4.995808104331625, |
|
"grad_norm": 0.4064246433001648, |
|
"learning_rate": 9.259578874697963e-06, |
|
"loss": 0.6183, |
|
"step": 5365 |
|
}, |
|
{ |
|
"epoch": 5.0, |
|
"grad_norm": 0.4189595295526227, |
|
"learning_rate": 9.216430790472904e-06, |
|
"loss": 0.5964, |
|
"step": 5370 |
|
}, |
|
{ |
|
"epoch": 5.0046576618537495, |
|
"grad_norm": 0.45660893906293887, |
|
"learning_rate": 9.173282706247843e-06, |
|
"loss": 0.5727, |
|
"step": 5375 |
|
}, |
|
{ |
|
"epoch": 5.009315323707499, |
|
"grad_norm": 0.48950393937721143, |
|
"learning_rate": 9.130134622022784e-06, |
|
"loss": 0.5716, |
|
"step": 5380 |
|
}, |
|
{ |
|
"epoch": 5.0139729855612485, |
|
"grad_norm": 0.46305435866792793, |
|
"learning_rate": 9.086986537797721e-06, |
|
"loss": 0.5652, |
|
"step": 5385 |
|
}, |
|
{ |
|
"epoch": 5.018630647414998, |
|
"grad_norm": 0.46351578346695144, |
|
"learning_rate": 9.043838453572662e-06, |
|
"loss": 0.5831, |
|
"step": 5390 |
|
}, |
|
{ |
|
"epoch": 5.0232883092687475, |
|
"grad_norm": 0.43383882806850615, |
|
"learning_rate": 9.000690369347601e-06, |
|
"loss": 0.5556, |
|
"step": 5395 |
|
}, |
|
{ |
|
"epoch": 5.027945971122496, |
|
"grad_norm": 0.4283268226889008, |
|
"learning_rate": 8.957542285122542e-06, |
|
"loss": 0.5523, |
|
"step": 5400 |
|
}, |
|
{ |
|
"epoch": 5.032603632976246, |
|
"grad_norm": 0.429645710696869, |
|
"learning_rate": 8.91439420089748e-06, |
|
"loss": 0.5574, |
|
"step": 5405 |
|
}, |
|
{ |
|
"epoch": 5.037261294829995, |
|
"grad_norm": 0.4094166465952819, |
|
"learning_rate": 8.87124611667242e-06, |
|
"loss": 0.5597, |
|
"step": 5410 |
|
}, |
|
{ |
|
"epoch": 5.041918956683745, |
|
"grad_norm": 0.4712850595020832, |
|
"learning_rate": 8.82809803244736e-06, |
|
"loss": 0.5609, |
|
"step": 5415 |
|
}, |
|
{ |
|
"epoch": 5.046576618537494, |
|
"grad_norm": 0.4246892863915132, |
|
"learning_rate": 8.7849499482223e-06, |
|
"loss": 0.5457, |
|
"step": 5420 |
|
}, |
|
{ |
|
"epoch": 5.051234280391244, |
|
"grad_norm": 0.4187158662633382, |
|
"learning_rate": 8.741801863997238e-06, |
|
"loss": 0.5606, |
|
"step": 5425 |
|
}, |
|
{ |
|
"epoch": 5.055891942244993, |
|
"grad_norm": 0.41213110304851663, |
|
"learning_rate": 8.698653779772179e-06, |
|
"loss": 0.5716, |
|
"step": 5430 |
|
}, |
|
{ |
|
"epoch": 5.0605496040987425, |
|
"grad_norm": 0.4153843626149877, |
|
"learning_rate": 8.655505695547118e-06, |
|
"loss": 0.5389, |
|
"step": 5435 |
|
}, |
|
{ |
|
"epoch": 5.065207265952492, |
|
"grad_norm": 0.40899362039500287, |
|
"learning_rate": 8.612357611322058e-06, |
|
"loss": 0.5449, |
|
"step": 5440 |
|
}, |
|
{ |
|
"epoch": 5.0698649278062415, |
|
"grad_norm": 0.4246192080492623, |
|
"learning_rate": 8.569209527096998e-06, |
|
"loss": 0.5508, |
|
"step": 5445 |
|
}, |
|
{ |
|
"epoch": 5.074522589659991, |
|
"grad_norm": 0.40779228236908316, |
|
"learning_rate": 8.526061442871937e-06, |
|
"loss": 0.5756, |
|
"step": 5450 |
|
}, |
|
{ |
|
"epoch": 5.0791802515137405, |
|
"grad_norm": 0.45516682878989534, |
|
"learning_rate": 8.482913358646877e-06, |
|
"loss": 0.5734, |
|
"step": 5455 |
|
}, |
|
{ |
|
"epoch": 5.083837913367489, |
|
"grad_norm": 0.3998404619097546, |
|
"learning_rate": 8.439765274421817e-06, |
|
"loss": 0.5394, |
|
"step": 5460 |
|
}, |
|
{ |
|
"epoch": 5.088495575221239, |
|
"grad_norm": 0.4340401362574142, |
|
"learning_rate": 8.396617190196756e-06, |
|
"loss": 0.5634, |
|
"step": 5465 |
|
}, |
|
{ |
|
"epoch": 5.093153237074988, |
|
"grad_norm": 0.44887014864072095, |
|
"learning_rate": 8.353469105971695e-06, |
|
"loss": 0.5702, |
|
"step": 5470 |
|
}, |
|
{ |
|
"epoch": 5.097810898928738, |
|
"grad_norm": 0.42103357271725655, |
|
"learning_rate": 8.310321021746636e-06, |
|
"loss": 0.569, |
|
"step": 5475 |
|
}, |
|
{ |
|
"epoch": 5.102468560782487, |
|
"grad_norm": 0.4425626777811027, |
|
"learning_rate": 8.267172937521575e-06, |
|
"loss": 0.5801, |
|
"step": 5480 |
|
}, |
|
{ |
|
"epoch": 5.107126222636237, |
|
"grad_norm": 0.43103556729424025, |
|
"learning_rate": 8.224024853296514e-06, |
|
"loss": 0.562, |
|
"step": 5485 |
|
}, |
|
{ |
|
"epoch": 5.111783884489986, |
|
"grad_norm": 0.4288766363689286, |
|
"learning_rate": 8.180876769071453e-06, |
|
"loss": 0.5549, |
|
"step": 5490 |
|
}, |
|
{ |
|
"epoch": 5.116441546343736, |
|
"grad_norm": 0.4411211103107296, |
|
"learning_rate": 8.137728684846394e-06, |
|
"loss": 0.5782, |
|
"step": 5495 |
|
}, |
|
{ |
|
"epoch": 5.121099208197485, |
|
"grad_norm": 0.44590845105225113, |
|
"learning_rate": 8.094580600621333e-06, |
|
"loss": 0.5677, |
|
"step": 5500 |
|
}, |
|
{ |
|
"epoch": 5.125756870051235, |
|
"grad_norm": 0.42044193491024917, |
|
"learning_rate": 8.051432516396272e-06, |
|
"loss": 0.5647, |
|
"step": 5505 |
|
}, |
|
{ |
|
"epoch": 5.130414531904984, |
|
"grad_norm": 0.4220746510226187, |
|
"learning_rate": 8.008284432171211e-06, |
|
"loss": 0.5794, |
|
"step": 5510 |
|
}, |
|
{ |
|
"epoch": 5.135072193758733, |
|
"grad_norm": 0.41475894763305793, |
|
"learning_rate": 7.965136347946152e-06, |
|
"loss": 0.5517, |
|
"step": 5515 |
|
}, |
|
{ |
|
"epoch": 5.139729855612482, |
|
"grad_norm": 0.42827780055854336, |
|
"learning_rate": 7.921988263721091e-06, |
|
"loss": 0.5737, |
|
"step": 5520 |
|
}, |
|
{ |
|
"epoch": 5.144387517466232, |
|
"grad_norm": 0.41403286591032884, |
|
"learning_rate": 7.87884017949603e-06, |
|
"loss": 0.563, |
|
"step": 5525 |
|
}, |
|
{ |
|
"epoch": 5.149045179319981, |
|
"grad_norm": 0.42984527591895905, |
|
"learning_rate": 7.835692095270971e-06, |
|
"loss": 0.5561, |
|
"step": 5530 |
|
}, |
|
{ |
|
"epoch": 5.153702841173731, |
|
"grad_norm": 0.42945380057609706, |
|
"learning_rate": 7.79254401104591e-06, |
|
"loss": 0.5666, |
|
"step": 5535 |
|
}, |
|
{ |
|
"epoch": 5.15836050302748, |
|
"grad_norm": 0.4432460499019077, |
|
"learning_rate": 7.749395926820851e-06, |
|
"loss": 0.5615, |
|
"step": 5540 |
|
}, |
|
{ |
|
"epoch": 5.16301816488123, |
|
"grad_norm": 0.42159801759717264, |
|
"learning_rate": 7.706247842595788e-06, |
|
"loss": 0.5604, |
|
"step": 5545 |
|
}, |
|
{ |
|
"epoch": 5.167675826734979, |
|
"grad_norm": 0.4375171715557641, |
|
"learning_rate": 7.663099758370729e-06, |
|
"loss": 0.5708, |
|
"step": 5550 |
|
}, |
|
{ |
|
"epoch": 5.172333488588729, |
|
"grad_norm": 0.42874893445419604, |
|
"learning_rate": 7.619951674145669e-06, |
|
"loss": 0.5676, |
|
"step": 5555 |
|
}, |
|
{ |
|
"epoch": 5.176991150442478, |
|
"grad_norm": 0.42541474226000026, |
|
"learning_rate": 7.576803589920608e-06, |
|
"loss": 0.5902, |
|
"step": 5560 |
|
}, |
|
{ |
|
"epoch": 5.181648812296228, |
|
"grad_norm": 0.4302044467345254, |
|
"learning_rate": 7.533655505695547e-06, |
|
"loss": 0.5623, |
|
"step": 5565 |
|
}, |
|
{ |
|
"epoch": 5.186306474149977, |
|
"grad_norm": 0.4124995234442213, |
|
"learning_rate": 7.490507421470487e-06, |
|
"loss": 0.5612, |
|
"step": 5570 |
|
}, |
|
{ |
|
"epoch": 5.190964136003726, |
|
"grad_norm": 0.40565576085511523, |
|
"learning_rate": 7.447359337245427e-06, |
|
"loss": 0.5681, |
|
"step": 5575 |
|
}, |
|
{ |
|
"epoch": 5.195621797857475, |
|
"grad_norm": 0.41713465634842206, |
|
"learning_rate": 7.4042112530203655e-06, |
|
"loss": 0.5571, |
|
"step": 5580 |
|
}, |
|
{ |
|
"epoch": 5.200279459711225, |
|
"grad_norm": 0.41967547106733516, |
|
"learning_rate": 7.361063168795305e-06, |
|
"loss": 0.5639, |
|
"step": 5585 |
|
}, |
|
{ |
|
"epoch": 5.204937121564974, |
|
"grad_norm": 0.4506892995367639, |
|
"learning_rate": 7.317915084570245e-06, |
|
"loss": 0.5677, |
|
"step": 5590 |
|
}, |
|
{ |
|
"epoch": 5.209594783418724, |
|
"grad_norm": 0.4258131849898496, |
|
"learning_rate": 7.274767000345185e-06, |
|
"loss": 0.5683, |
|
"step": 5595 |
|
}, |
|
{ |
|
"epoch": 5.214252445272473, |
|
"grad_norm": 0.4204658172936071, |
|
"learning_rate": 7.231618916120124e-06, |
|
"loss": 0.5745, |
|
"step": 5600 |
|
}, |
|
{ |
|
"epoch": 5.218910107126223, |
|
"grad_norm": 0.4336757745199986, |
|
"learning_rate": 7.1884708318950636e-06, |
|
"loss": 0.5739, |
|
"step": 5605 |
|
}, |
|
{ |
|
"epoch": 5.223567768979972, |
|
"grad_norm": 0.42950896150032264, |
|
"learning_rate": 7.1453227476700035e-06, |
|
"loss": 0.5782, |
|
"step": 5610 |
|
}, |
|
{ |
|
"epoch": 5.228225430833722, |
|
"grad_norm": 0.4374368910234369, |
|
"learning_rate": 7.1021746634449435e-06, |
|
"loss": 0.5584, |
|
"step": 5615 |
|
}, |
|
{ |
|
"epoch": 5.232883092687471, |
|
"grad_norm": 0.42566321904544496, |
|
"learning_rate": 7.059026579219883e-06, |
|
"loss": 0.5612, |
|
"step": 5620 |
|
}, |
|
{ |
|
"epoch": 5.237540754541221, |
|
"grad_norm": 0.42008279382378816, |
|
"learning_rate": 7.0158784949948226e-06, |
|
"loss": 0.5476, |
|
"step": 5625 |
|
}, |
|
{ |
|
"epoch": 5.242198416394969, |
|
"grad_norm": 0.449770356710987, |
|
"learning_rate": 6.9727304107697625e-06, |
|
"loss": 0.5578, |
|
"step": 5630 |
|
}, |
|
{ |
|
"epoch": 5.246856078248719, |
|
"grad_norm": 0.4445174977744607, |
|
"learning_rate": 6.9295823265447025e-06, |
|
"loss": 0.5685, |
|
"step": 5635 |
|
}, |
|
{ |
|
"epoch": 5.251513740102468, |
|
"grad_norm": 0.4117334382190548, |
|
"learning_rate": 6.886434242319641e-06, |
|
"loss": 0.5547, |
|
"step": 5640 |
|
}, |
|
{ |
|
"epoch": 5.256171401956218, |
|
"grad_norm": 0.4016495989823376, |
|
"learning_rate": 6.843286158094581e-06, |
|
"loss": 0.5613, |
|
"step": 5645 |
|
}, |
|
{ |
|
"epoch": 5.260829063809967, |
|
"grad_norm": 0.44309336309542724, |
|
"learning_rate": 6.800138073869521e-06, |
|
"loss": 0.5637, |
|
"step": 5650 |
|
}, |
|
{ |
|
"epoch": 5.265486725663717, |
|
"grad_norm": 0.4278304988266007, |
|
"learning_rate": 6.756989989644461e-06, |
|
"loss": 0.5587, |
|
"step": 5655 |
|
}, |
|
{ |
|
"epoch": 5.270144387517466, |
|
"grad_norm": 0.4277346963877531, |
|
"learning_rate": 6.713841905419399e-06, |
|
"loss": 0.5575, |
|
"step": 5660 |
|
}, |
|
{ |
|
"epoch": 5.274802049371216, |
|
"grad_norm": 0.40625036184582464, |
|
"learning_rate": 6.670693821194339e-06, |
|
"loss": 0.5679, |
|
"step": 5665 |
|
}, |
|
{ |
|
"epoch": 5.279459711224965, |
|
"grad_norm": 0.4213874882571437, |
|
"learning_rate": 6.627545736969279e-06, |
|
"loss": 0.5592, |
|
"step": 5670 |
|
}, |
|
{ |
|
"epoch": 5.284117373078715, |
|
"grad_norm": 0.41154248379143205, |
|
"learning_rate": 6.584397652744219e-06, |
|
"loss": 0.5511, |
|
"step": 5675 |
|
}, |
|
{ |
|
"epoch": 5.288775034932464, |
|
"grad_norm": 0.44036501495565056, |
|
"learning_rate": 6.541249568519157e-06, |
|
"loss": 0.5765, |
|
"step": 5680 |
|
}, |
|
{ |
|
"epoch": 5.293432696786214, |
|
"grad_norm": 0.41669603634270275, |
|
"learning_rate": 6.498101484294097e-06, |
|
"loss": 0.5635, |
|
"step": 5685 |
|
}, |
|
{ |
|
"epoch": 5.298090358639962, |
|
"grad_norm": 0.4035117456098896, |
|
"learning_rate": 6.454953400069037e-06, |
|
"loss": 0.5676, |
|
"step": 5690 |
|
}, |
|
{ |
|
"epoch": 5.302748020493712, |
|
"grad_norm": 0.4105214591482102, |
|
"learning_rate": 6.411805315843977e-06, |
|
"loss": 0.5653, |
|
"step": 5695 |
|
}, |
|
{ |
|
"epoch": 5.307405682347461, |
|
"grad_norm": 0.40962030817642703, |
|
"learning_rate": 6.368657231618916e-06, |
|
"loss": 0.5576, |
|
"step": 5700 |
|
}, |
|
{ |
|
"epoch": 5.312063344201211, |
|
"grad_norm": 0.41377230715840563, |
|
"learning_rate": 6.325509147393856e-06, |
|
"loss": 0.5571, |
|
"step": 5705 |
|
}, |
|
{ |
|
"epoch": 5.31672100605496, |
|
"grad_norm": 0.4333285787168063, |
|
"learning_rate": 6.282361063168796e-06, |
|
"loss": 0.5671, |
|
"step": 5710 |
|
}, |
|
{ |
|
"epoch": 5.32137866790871, |
|
"grad_norm": 0.42261550816877297, |
|
"learning_rate": 6.239212978943735e-06, |
|
"loss": 0.5661, |
|
"step": 5715 |
|
}, |
|
{ |
|
"epoch": 5.326036329762459, |
|
"grad_norm": 0.4288493148116201, |
|
"learning_rate": 6.196064894718675e-06, |
|
"loss": 0.565, |
|
"step": 5720 |
|
}, |
|
{ |
|
"epoch": 5.330693991616209, |
|
"grad_norm": 0.44420344734015876, |
|
"learning_rate": 6.152916810493614e-06, |
|
"loss": 0.5701, |
|
"step": 5725 |
|
}, |
|
{ |
|
"epoch": 5.335351653469958, |
|
"grad_norm": 0.438366969446799, |
|
"learning_rate": 6.109768726268554e-06, |
|
"loss": 0.5734, |
|
"step": 5730 |
|
}, |
|
{ |
|
"epoch": 5.340009315323708, |
|
"grad_norm": 0.41601777699539805, |
|
"learning_rate": 6.066620642043493e-06, |
|
"loss": 0.5668, |
|
"step": 5735 |
|
}, |
|
{ |
|
"epoch": 5.344666977177457, |
|
"grad_norm": 0.42093441496169604, |
|
"learning_rate": 6.023472557818433e-06, |
|
"loss": 0.5707, |
|
"step": 5740 |
|
}, |
|
{ |
|
"epoch": 5.349324639031206, |
|
"grad_norm": 0.44402461374564856, |
|
"learning_rate": 5.980324473593372e-06, |
|
"loss": 0.5611, |
|
"step": 5745 |
|
}, |
|
{ |
|
"epoch": 5.353982300884955, |
|
"grad_norm": 0.41520612597240397, |
|
"learning_rate": 5.937176389368312e-06, |
|
"loss": 0.5575, |
|
"step": 5750 |
|
}, |
|
{ |
|
"epoch": 5.358639962738705, |
|
"grad_norm": 0.429765461012515, |
|
"learning_rate": 5.894028305143251e-06, |
|
"loss": 0.56, |
|
"step": 5755 |
|
}, |
|
{ |
|
"epoch": 5.363297624592454, |
|
"grad_norm": 0.4122117030859129, |
|
"learning_rate": 5.850880220918191e-06, |
|
"loss": 0.5616, |
|
"step": 5760 |
|
}, |
|
{ |
|
"epoch": 5.367955286446204, |
|
"grad_norm": 0.42948434368378274, |
|
"learning_rate": 5.807732136693131e-06, |
|
"loss": 0.5609, |
|
"step": 5765 |
|
}, |
|
{ |
|
"epoch": 5.372612948299953, |
|
"grad_norm": 0.42924647584016984, |
|
"learning_rate": 5.76458405246807e-06, |
|
"loss": 0.5665, |
|
"step": 5770 |
|
}, |
|
{ |
|
"epoch": 5.377270610153703, |
|
"grad_norm": 0.4065232953187284, |
|
"learning_rate": 5.72143596824301e-06, |
|
"loss": 0.5611, |
|
"step": 5775 |
|
}, |
|
{ |
|
"epoch": 5.381928272007452, |
|
"grad_norm": 0.4269979309846978, |
|
"learning_rate": 5.67828788401795e-06, |
|
"loss": 0.5615, |
|
"step": 5780 |
|
}, |
|
{ |
|
"epoch": 5.386585933861202, |
|
"grad_norm": 0.43683618580909445, |
|
"learning_rate": 5.6351397997928894e-06, |
|
"loss": 0.5653, |
|
"step": 5785 |
|
}, |
|
{ |
|
"epoch": 5.391243595714951, |
|
"grad_norm": 0.4138395029946517, |
|
"learning_rate": 5.591991715567829e-06, |
|
"loss": 0.5802, |
|
"step": 5790 |
|
}, |
|
{ |
|
"epoch": 5.395901257568701, |
|
"grad_norm": 0.39958490348240855, |
|
"learning_rate": 5.5488436313427685e-06, |
|
"loss": 0.573, |
|
"step": 5795 |
|
}, |
|
{ |
|
"epoch": 5.4005589194224495, |
|
"grad_norm": 0.4213641587451733, |
|
"learning_rate": 5.5056955471177085e-06, |
|
"loss": 0.561, |
|
"step": 5800 |
|
}, |
|
{ |
|
"epoch": 5.405216581276199, |
|
"grad_norm": 0.4301783875024671, |
|
"learning_rate": 5.462547462892648e-06, |
|
"loss": 0.5911, |
|
"step": 5805 |
|
}, |
|
{ |
|
"epoch": 5.4098742431299485, |
|
"grad_norm": 0.42552211116782834, |
|
"learning_rate": 5.4193993786675876e-06, |
|
"loss": 0.5581, |
|
"step": 5810 |
|
}, |
|
{ |
|
"epoch": 5.414531904983698, |
|
"grad_norm": 0.4178598551406305, |
|
"learning_rate": 5.376251294442527e-06, |
|
"loss": 0.5568, |
|
"step": 5815 |
|
}, |
|
{ |
|
"epoch": 5.4191895668374475, |
|
"grad_norm": 0.39499266804796795, |
|
"learning_rate": 5.333103210217467e-06, |
|
"loss": 0.5573, |
|
"step": 5820 |
|
}, |
|
{ |
|
"epoch": 5.423847228691197, |
|
"grad_norm": 0.4119408264424358, |
|
"learning_rate": 5.289955125992406e-06, |
|
"loss": 0.5637, |
|
"step": 5825 |
|
}, |
|
{ |
|
"epoch": 5.4285048905449464, |
|
"grad_norm": 0.4459547056976113, |
|
"learning_rate": 5.246807041767346e-06, |
|
"loss": 0.5585, |
|
"step": 5830 |
|
}, |
|
{ |
|
"epoch": 5.433162552398696, |
|
"grad_norm": 0.41872986179375277, |
|
"learning_rate": 5.203658957542285e-06, |
|
"loss": 0.5613, |
|
"step": 5835 |
|
}, |
|
{ |
|
"epoch": 5.437820214252445, |
|
"grad_norm": 0.4264565493001382, |
|
"learning_rate": 5.160510873317225e-06, |
|
"loss": 0.5826, |
|
"step": 5840 |
|
}, |
|
{ |
|
"epoch": 5.442477876106195, |
|
"grad_norm": 0.422871863759466, |
|
"learning_rate": 5.117362789092165e-06, |
|
"loss": 0.5572, |
|
"step": 5845 |
|
}, |
|
{ |
|
"epoch": 5.447135537959944, |
|
"grad_norm": 0.42489006212816244, |
|
"learning_rate": 5.074214704867105e-06, |
|
"loss": 0.5717, |
|
"step": 5850 |
|
}, |
|
{ |
|
"epoch": 5.451793199813694, |
|
"grad_norm": 0.42662171969491214, |
|
"learning_rate": 5.031066620642044e-06, |
|
"loss": 0.5692, |
|
"step": 5855 |
|
}, |
|
{ |
|
"epoch": 5.4564508616674425, |
|
"grad_norm": 0.408395244062005, |
|
"learning_rate": 4.987918536416984e-06, |
|
"loss": 0.5663, |
|
"step": 5860 |
|
}, |
|
{ |
|
"epoch": 5.461108523521192, |
|
"grad_norm": 0.44099787038065397, |
|
"learning_rate": 4.944770452191923e-06, |
|
"loss": 0.5857, |
|
"step": 5865 |
|
}, |
|
{ |
|
"epoch": 5.4657661853749415, |
|
"grad_norm": 0.42041261750553266, |
|
"learning_rate": 4.901622367966863e-06, |
|
"loss": 0.5652, |
|
"step": 5870 |
|
}, |
|
{ |
|
"epoch": 5.470423847228691, |
|
"grad_norm": 0.45645506662310636, |
|
"learning_rate": 4.858474283741802e-06, |
|
"loss": 0.5386, |
|
"step": 5875 |
|
}, |
|
{ |
|
"epoch": 5.4750815090824405, |
|
"grad_norm": 0.41592940114800914, |
|
"learning_rate": 4.815326199516742e-06, |
|
"loss": 0.5596, |
|
"step": 5880 |
|
}, |
|
{ |
|
"epoch": 5.47973917093619, |
|
"grad_norm": 0.4294223606896892, |
|
"learning_rate": 4.772178115291681e-06, |
|
"loss": 0.5634, |
|
"step": 5885 |
|
}, |
|
{ |
|
"epoch": 5.4843968327899395, |
|
"grad_norm": 0.40597268155861205, |
|
"learning_rate": 4.729030031066621e-06, |
|
"loss": 0.5719, |
|
"step": 5890 |
|
}, |
|
{ |
|
"epoch": 5.489054494643689, |
|
"grad_norm": 0.4028982988870574, |
|
"learning_rate": 4.68588194684156e-06, |
|
"loss": 0.5736, |
|
"step": 5895 |
|
}, |
|
{ |
|
"epoch": 5.4937121564974385, |
|
"grad_norm": 0.40186298121389796, |
|
"learning_rate": 4.6427338626165e-06, |
|
"loss": 0.5599, |
|
"step": 5900 |
|
}, |
|
{ |
|
"epoch": 5.498369818351188, |
|
"grad_norm": 0.42303591732933404, |
|
"learning_rate": 4.599585778391439e-06, |
|
"loss": 0.5818, |
|
"step": 5905 |
|
}, |
|
{ |
|
"epoch": 5.5030274802049375, |
|
"grad_norm": 0.4234758874100327, |
|
"learning_rate": 4.556437694166379e-06, |
|
"loss": 0.5542, |
|
"step": 5910 |
|
}, |
|
{ |
|
"epoch": 5.507685142058687, |
|
"grad_norm": 0.43047017308758134, |
|
"learning_rate": 4.513289609941319e-06, |
|
"loss": 0.5635, |
|
"step": 5915 |
|
}, |
|
{ |
|
"epoch": 5.512342803912436, |
|
"grad_norm": 0.40987555865691677, |
|
"learning_rate": 4.470141525716258e-06, |
|
"loss": 0.5559, |
|
"step": 5920 |
|
}, |
|
{ |
|
"epoch": 5.517000465766185, |
|
"grad_norm": 0.3863516212833326, |
|
"learning_rate": 4.426993441491198e-06, |
|
"loss": 0.5685, |
|
"step": 5925 |
|
}, |
|
{ |
|
"epoch": 5.521658127619935, |
|
"grad_norm": 0.4105942607404805, |
|
"learning_rate": 4.383845357266138e-06, |
|
"loss": 0.559, |
|
"step": 5930 |
|
}, |
|
{ |
|
"epoch": 5.526315789473684, |
|
"grad_norm": 0.39860381357892277, |
|
"learning_rate": 4.340697273041077e-06, |
|
"loss": 0.5617, |
|
"step": 5935 |
|
}, |
|
{ |
|
"epoch": 5.530973451327434, |
|
"grad_norm": 0.41665052343649345, |
|
"learning_rate": 4.297549188816017e-06, |
|
"loss": 0.5626, |
|
"step": 5940 |
|
}, |
|
{ |
|
"epoch": 5.535631113181183, |
|
"grad_norm": 0.4095867788678644, |
|
"learning_rate": 4.254401104590956e-06, |
|
"loss": 0.5653, |
|
"step": 5945 |
|
}, |
|
{ |
|
"epoch": 5.5402887750349326, |
|
"grad_norm": 0.4195303349878415, |
|
"learning_rate": 4.211253020365896e-06, |
|
"loss": 0.5839, |
|
"step": 5950 |
|
}, |
|
{ |
|
"epoch": 5.544946436888682, |
|
"grad_norm": 0.4120068956230269, |
|
"learning_rate": 4.168104936140835e-06, |
|
"loss": 0.5438, |
|
"step": 5955 |
|
}, |
|
{ |
|
"epoch": 5.5496040987424315, |
|
"grad_norm": 0.4196848830981198, |
|
"learning_rate": 4.124956851915775e-06, |
|
"loss": 0.5646, |
|
"step": 5960 |
|
}, |
|
{ |
|
"epoch": 5.554261760596181, |
|
"grad_norm": 0.40960812480651215, |
|
"learning_rate": 4.0818087676907145e-06, |
|
"loss": 0.5621, |
|
"step": 5965 |
|
}, |
|
{ |
|
"epoch": 5.5589194224499305, |
|
"grad_norm": 0.4173392960919226, |
|
"learning_rate": 4.0386606834656544e-06, |
|
"loss": 0.5564, |
|
"step": 5970 |
|
}, |
|
{ |
|
"epoch": 5.563577084303679, |
|
"grad_norm": 0.4212717258387222, |
|
"learning_rate": 3.9955125992405935e-06, |
|
"loss": 0.5678, |
|
"step": 5975 |
|
}, |
|
{ |
|
"epoch": 5.568234746157429, |
|
"grad_norm": 0.42550419511681836, |
|
"learning_rate": 3.9523645150155335e-06, |
|
"loss": 0.5662, |
|
"step": 5980 |
|
}, |
|
{ |
|
"epoch": 5.572892408011178, |
|
"grad_norm": 0.41623348722575887, |
|
"learning_rate": 3.909216430790473e-06, |
|
"loss": 0.5643, |
|
"step": 5985 |
|
}, |
|
{ |
|
"epoch": 5.577550069864928, |
|
"grad_norm": 0.41015141306127173, |
|
"learning_rate": 3.8660683465654126e-06, |
|
"loss": 0.5576, |
|
"step": 5990 |
|
}, |
|
{ |
|
"epoch": 5.582207731718677, |
|
"grad_norm": 0.42500858113635887, |
|
"learning_rate": 3.8229202623403525e-06, |
|
"loss": 0.5587, |
|
"step": 5995 |
|
}, |
|
{ |
|
"epoch": 5.586865393572427, |
|
"grad_norm": 0.4320183407766433, |
|
"learning_rate": 3.779772178115292e-06, |
|
"loss": 0.5761, |
|
"step": 6000 |
|
}, |
|
{ |
|
"epoch": 5.591523055426176, |
|
"grad_norm": 0.40912841983889053, |
|
"learning_rate": 3.736624093890231e-06, |
|
"loss": 0.5715, |
|
"step": 6005 |
|
}, |
|
{ |
|
"epoch": 5.596180717279926, |
|
"grad_norm": 0.40709797406945314, |
|
"learning_rate": 3.693476009665171e-06, |
|
"loss": 0.5675, |
|
"step": 6010 |
|
}, |
|
{ |
|
"epoch": 5.600838379133675, |
|
"grad_norm": 0.41347940747913237, |
|
"learning_rate": 3.6503279254401107e-06, |
|
"loss": 0.5723, |
|
"step": 6015 |
|
}, |
|
{ |
|
"epoch": 5.605496040987425, |
|
"grad_norm": 0.4169211842077042, |
|
"learning_rate": 3.6071798412150506e-06, |
|
"loss": 0.5585, |
|
"step": 6020 |
|
}, |
|
{ |
|
"epoch": 5.610153702841174, |
|
"grad_norm": 0.42537558386566227, |
|
"learning_rate": 3.5640317569899898e-06, |
|
"loss": 0.5531, |
|
"step": 6025 |
|
}, |
|
{ |
|
"epoch": 5.614811364694923, |
|
"grad_norm": 0.4305352425559143, |
|
"learning_rate": 3.5208836727649297e-06, |
|
"loss": 0.5604, |
|
"step": 6030 |
|
}, |
|
{ |
|
"epoch": 5.619469026548672, |
|
"grad_norm": 0.4036448018189107, |
|
"learning_rate": 3.477735588539869e-06, |
|
"loss": 0.5539, |
|
"step": 6035 |
|
}, |
|
{ |
|
"epoch": 5.624126688402422, |
|
"grad_norm": 0.41741840345951775, |
|
"learning_rate": 3.434587504314809e-06, |
|
"loss": 0.5563, |
|
"step": 6040 |
|
}, |
|
{ |
|
"epoch": 5.628784350256171, |
|
"grad_norm": 0.4033377591192777, |
|
"learning_rate": 3.3914394200897483e-06, |
|
"loss": 0.5645, |
|
"step": 6045 |
|
}, |
|
{ |
|
"epoch": 5.633442012109921, |
|
"grad_norm": 0.4171782665393845, |
|
"learning_rate": 3.348291335864688e-06, |
|
"loss": 0.5797, |
|
"step": 6050 |
|
}, |
|
{ |
|
"epoch": 5.63809967396367, |
|
"grad_norm": 0.39433610011537246, |
|
"learning_rate": 3.3051432516396274e-06, |
|
"loss": 0.5583, |
|
"step": 6055 |
|
}, |
|
{ |
|
"epoch": 5.64275733581742, |
|
"grad_norm": 0.4103448842108257, |
|
"learning_rate": 3.2619951674145674e-06, |
|
"loss": 0.5625, |
|
"step": 6060 |
|
}, |
|
{ |
|
"epoch": 5.647414997671169, |
|
"grad_norm": 0.3889964864956959, |
|
"learning_rate": 3.2188470831895065e-06, |
|
"loss": 0.5574, |
|
"step": 6065 |
|
}, |
|
{ |
|
"epoch": 5.652072659524919, |
|
"grad_norm": 0.39757072994654735, |
|
"learning_rate": 3.1756989989644464e-06, |
|
"loss": 0.5617, |
|
"step": 6070 |
|
}, |
|
{ |
|
"epoch": 5.656730321378668, |
|
"grad_norm": 0.40262036958192626, |
|
"learning_rate": 3.1325509147393856e-06, |
|
"loss": 0.5668, |
|
"step": 6075 |
|
}, |
|
{ |
|
"epoch": 5.661387983232418, |
|
"grad_norm": 0.39979293172348945, |
|
"learning_rate": 3.089402830514325e-06, |
|
"loss": 0.5684, |
|
"step": 6080 |
|
}, |
|
{ |
|
"epoch": 5.666045645086166, |
|
"grad_norm": 0.42639437886086473, |
|
"learning_rate": 3.046254746289265e-06, |
|
"loss": 0.5749, |
|
"step": 6085 |
|
}, |
|
{ |
|
"epoch": 5.670703306939917, |
|
"grad_norm": 0.4038998352505134, |
|
"learning_rate": 3.0031066620642046e-06, |
|
"loss": 0.5505, |
|
"step": 6090 |
|
}, |
|
{ |
|
"epoch": 5.675360968793665, |
|
"grad_norm": 0.42105952397990837, |
|
"learning_rate": 2.959958577839144e-06, |
|
"loss": 0.5506, |
|
"step": 6095 |
|
}, |
|
{ |
|
"epoch": 5.680018630647415, |
|
"grad_norm": 0.3907174988280262, |
|
"learning_rate": 2.9168104936140837e-06, |
|
"loss": 0.5632, |
|
"step": 6100 |
|
}, |
|
{ |
|
"epoch": 5.684676292501164, |
|
"grad_norm": 0.4142483675415474, |
|
"learning_rate": 2.873662409389023e-06, |
|
"loss": 0.5695, |
|
"step": 6105 |
|
}, |
|
{ |
|
"epoch": 5.689333954354914, |
|
"grad_norm": 0.41194771682758324, |
|
"learning_rate": 2.8305143251639627e-06, |
|
"loss": 0.5645, |
|
"step": 6110 |
|
}, |
|
{ |
|
"epoch": 5.693991616208663, |
|
"grad_norm": 0.41453421686895764, |
|
"learning_rate": 2.7873662409389023e-06, |
|
"loss": 0.5572, |
|
"step": 6115 |
|
}, |
|
{ |
|
"epoch": 5.698649278062413, |
|
"grad_norm": 0.4065816283780963, |
|
"learning_rate": 2.7442181567138422e-06, |
|
"loss": 0.5704, |
|
"step": 6120 |
|
}, |
|
{ |
|
"epoch": 5.703306939916162, |
|
"grad_norm": 0.42769337257365975, |
|
"learning_rate": 2.7010700724887818e-06, |
|
"loss": 0.5698, |
|
"step": 6125 |
|
}, |
|
{ |
|
"epoch": 5.707964601769912, |
|
"grad_norm": 0.3743277260802833, |
|
"learning_rate": 2.6579219882637213e-06, |
|
"loss": 0.5562, |
|
"step": 6130 |
|
}, |
|
{ |
|
"epoch": 5.712622263623661, |
|
"grad_norm": 0.4035379866637785, |
|
"learning_rate": 2.614773904038661e-06, |
|
"loss": 0.5655, |
|
"step": 6135 |
|
}, |
|
{ |
|
"epoch": 5.717279925477411, |
|
"grad_norm": 0.41488838089367225, |
|
"learning_rate": 2.5716258198136004e-06, |
|
"loss": 0.568, |
|
"step": 6140 |
|
}, |
|
{ |
|
"epoch": 5.72193758733116, |
|
"grad_norm": 0.41383472645868846, |
|
"learning_rate": 2.52847773558854e-06, |
|
"loss": 0.5565, |
|
"step": 6145 |
|
}, |
|
{ |
|
"epoch": 5.726595249184909, |
|
"grad_norm": 0.4054906747465409, |
|
"learning_rate": 2.4853296513634795e-06, |
|
"loss": 0.5666, |
|
"step": 6150 |
|
}, |
|
{ |
|
"epoch": 5.731252911038658, |
|
"grad_norm": 0.40137154610973613, |
|
"learning_rate": 2.442181567138419e-06, |
|
"loss": 0.5759, |
|
"step": 6155 |
|
}, |
|
{ |
|
"epoch": 5.735910572892408, |
|
"grad_norm": 0.4070737264766523, |
|
"learning_rate": 2.399033482913359e-06, |
|
"loss": 0.5726, |
|
"step": 6160 |
|
}, |
|
{ |
|
"epoch": 5.740568234746157, |
|
"grad_norm": 0.4007959321109693, |
|
"learning_rate": 2.3558853986882985e-06, |
|
"loss": 0.5746, |
|
"step": 6165 |
|
}, |
|
{ |
|
"epoch": 5.745225896599907, |
|
"grad_norm": 0.40888165957386624, |
|
"learning_rate": 2.312737314463238e-06, |
|
"loss": 0.5524, |
|
"step": 6170 |
|
}, |
|
{ |
|
"epoch": 5.749883558453656, |
|
"grad_norm": 0.4035306077364339, |
|
"learning_rate": 2.2695892302381776e-06, |
|
"loss": 0.5673, |
|
"step": 6175 |
|
}, |
|
{ |
|
"epoch": 5.754541220307406, |
|
"grad_norm": 0.39502013880626413, |
|
"learning_rate": 2.226441146013117e-06, |
|
"loss": 0.5655, |
|
"step": 6180 |
|
}, |
|
{ |
|
"epoch": 5.759198882161155, |
|
"grad_norm": 0.43395568549444674, |
|
"learning_rate": 2.1832930617880566e-06, |
|
"loss": 0.5632, |
|
"step": 6185 |
|
}, |
|
{ |
|
"epoch": 5.763856544014905, |
|
"grad_norm": 0.39420045671889875, |
|
"learning_rate": 2.140144977562996e-06, |
|
"loss": 0.5551, |
|
"step": 6190 |
|
}, |
|
{ |
|
"epoch": 5.768514205868654, |
|
"grad_norm": 0.39867593169108606, |
|
"learning_rate": 2.096996893337936e-06, |
|
"loss": 0.5742, |
|
"step": 6195 |
|
}, |
|
{ |
|
"epoch": 5.773171867722404, |
|
"grad_norm": 0.40156836437794047, |
|
"learning_rate": 2.0538488091128757e-06, |
|
"loss": 0.5699, |
|
"step": 6200 |
|
}, |
|
{ |
|
"epoch": 5.777829529576152, |
|
"grad_norm": 0.4048010544032807, |
|
"learning_rate": 2.010700724887815e-06, |
|
"loss": 0.5756, |
|
"step": 6205 |
|
}, |
|
{ |
|
"epoch": 5.782487191429902, |
|
"grad_norm": 0.38549911867624526, |
|
"learning_rate": 1.9675526406627547e-06, |
|
"loss": 0.5579, |
|
"step": 6210 |
|
}, |
|
{ |
|
"epoch": 5.787144853283651, |
|
"grad_norm": 0.39742374128807434, |
|
"learning_rate": 1.9244045564376943e-06, |
|
"loss": 0.557, |
|
"step": 6215 |
|
}, |
|
{ |
|
"epoch": 5.791802515137401, |
|
"grad_norm": 0.40433000457181545, |
|
"learning_rate": 1.8812564722126338e-06, |
|
"loss": 0.5574, |
|
"step": 6220 |
|
}, |
|
{ |
|
"epoch": 5.79646017699115, |
|
"grad_norm": 0.3941441139360801, |
|
"learning_rate": 1.8381083879875736e-06, |
|
"loss": 0.5682, |
|
"step": 6225 |
|
}, |
|
{ |
|
"epoch": 5.8011178388449, |
|
"grad_norm": 0.3992145826472066, |
|
"learning_rate": 1.794960303762513e-06, |
|
"loss": 0.5807, |
|
"step": 6230 |
|
}, |
|
{ |
|
"epoch": 5.805775500698649, |
|
"grad_norm": 0.40959722802929466, |
|
"learning_rate": 1.7518122195374526e-06, |
|
"loss": 0.5724, |
|
"step": 6235 |
|
}, |
|
{ |
|
"epoch": 5.810433162552399, |
|
"grad_norm": 0.40172842565861966, |
|
"learning_rate": 1.7086641353123924e-06, |
|
"loss": 0.5643, |
|
"step": 6240 |
|
}, |
|
{ |
|
"epoch": 5.815090824406148, |
|
"grad_norm": 0.394640153858509, |
|
"learning_rate": 1.665516051087332e-06, |
|
"loss": 0.5621, |
|
"step": 6245 |
|
}, |
|
{ |
|
"epoch": 5.819748486259898, |
|
"grad_norm": 0.4033965017648463, |
|
"learning_rate": 1.6223679668622715e-06, |
|
"loss": 0.5673, |
|
"step": 6250 |
|
}, |
|
{ |
|
"epoch": 5.824406148113647, |
|
"grad_norm": 0.398166753004289, |
|
"learning_rate": 1.579219882637211e-06, |
|
"loss": 0.5815, |
|
"step": 6255 |
|
}, |
|
{ |
|
"epoch": 5.829063809967396, |
|
"grad_norm": 0.40136934344268105, |
|
"learning_rate": 1.5360717984121505e-06, |
|
"loss": 0.5542, |
|
"step": 6260 |
|
}, |
|
{ |
|
"epoch": 5.833721471821145, |
|
"grad_norm": 0.42153262160447147, |
|
"learning_rate": 1.49292371418709e-06, |
|
"loss": 0.5644, |
|
"step": 6265 |
|
}, |
|
{ |
|
"epoch": 5.838379133674895, |
|
"grad_norm": 0.4013430168188573, |
|
"learning_rate": 1.4497756299620296e-06, |
|
"loss": 0.5807, |
|
"step": 6270 |
|
}, |
|
{ |
|
"epoch": 5.843036795528644, |
|
"grad_norm": 0.40144159950620084, |
|
"learning_rate": 1.4066275457369694e-06, |
|
"loss": 0.5672, |
|
"step": 6275 |
|
}, |
|
{ |
|
"epoch": 5.847694457382394, |
|
"grad_norm": 0.3905704278699857, |
|
"learning_rate": 1.363479461511909e-06, |
|
"loss": 0.5643, |
|
"step": 6280 |
|
}, |
|
{ |
|
"epoch": 5.852352119236143, |
|
"grad_norm": 0.38944035369021035, |
|
"learning_rate": 1.3203313772868484e-06, |
|
"loss": 0.5686, |
|
"step": 6285 |
|
}, |
|
{ |
|
"epoch": 5.857009781089893, |
|
"grad_norm": 0.3916599156858741, |
|
"learning_rate": 1.277183293061788e-06, |
|
"loss": 0.5562, |
|
"step": 6290 |
|
}, |
|
{ |
|
"epoch": 5.861667442943642, |
|
"grad_norm": 0.3736920720767908, |
|
"learning_rate": 1.2340352088367277e-06, |
|
"loss": 0.5545, |
|
"step": 6295 |
|
}, |
|
{ |
|
"epoch": 5.866325104797392, |
|
"grad_norm": 0.3921475156350814, |
|
"learning_rate": 1.1908871246116673e-06, |
|
"loss": 0.5569, |
|
"step": 6300 |
|
}, |
|
{ |
|
"epoch": 5.870982766651141, |
|
"grad_norm": 0.39718217346161483, |
|
"learning_rate": 1.1477390403866068e-06, |
|
"loss": 0.5473, |
|
"step": 6305 |
|
}, |
|
{ |
|
"epoch": 5.875640428504891, |
|
"grad_norm": 0.3994935700401086, |
|
"learning_rate": 1.1045909561615463e-06, |
|
"loss": 0.5736, |
|
"step": 6310 |
|
}, |
|
{ |
|
"epoch": 5.8802980903586395, |
|
"grad_norm": 0.40030906526793253, |
|
"learning_rate": 1.061442871936486e-06, |
|
"loss": 0.5441, |
|
"step": 6315 |
|
}, |
|
{ |
|
"epoch": 5.88495575221239, |
|
"grad_norm": 0.38817914033940354, |
|
"learning_rate": 1.0182947877114256e-06, |
|
"loss": 0.5584, |
|
"step": 6320 |
|
}, |
|
{ |
|
"epoch": 5.8896134140661385, |
|
"grad_norm": 0.39461128334067946, |
|
"learning_rate": 9.751467034863652e-07, |
|
"loss": 0.5598, |
|
"step": 6325 |
|
}, |
|
{ |
|
"epoch": 5.894271075919888, |
|
"grad_norm": 0.4000216002515271, |
|
"learning_rate": 9.319986192613048e-07, |
|
"loss": 0.5767, |
|
"step": 6330 |
|
}, |
|
{ |
|
"epoch": 5.8989287377736375, |
|
"grad_norm": 0.405724680285917, |
|
"learning_rate": 8.888505350362444e-07, |
|
"loss": 0.5532, |
|
"step": 6335 |
|
}, |
|
{ |
|
"epoch": 5.903586399627387, |
|
"grad_norm": 0.3966245206781564, |
|
"learning_rate": 8.45702450811184e-07, |
|
"loss": 0.5548, |
|
"step": 6340 |
|
}, |
|
{ |
|
"epoch": 5.9082440614811365, |
|
"grad_norm": 0.3985605889076508, |
|
"learning_rate": 8.025543665861236e-07, |
|
"loss": 0.5435, |
|
"step": 6345 |
|
}, |
|
{ |
|
"epoch": 5.912901723334886, |
|
"grad_norm": 0.3911890676900607, |
|
"learning_rate": 7.594062823610632e-07, |
|
"loss": 0.563, |
|
"step": 6350 |
|
}, |
|
{ |
|
"epoch": 5.9175593851886354, |
|
"grad_norm": 0.401965871420791, |
|
"learning_rate": 7.162581981360028e-07, |
|
"loss": 0.5627, |
|
"step": 6355 |
|
}, |
|
{ |
|
"epoch": 5.922217047042385, |
|
"grad_norm": 0.39202956971696346, |
|
"learning_rate": 6.731101139109423e-07, |
|
"loss": 0.5494, |
|
"step": 6360 |
|
}, |
|
{ |
|
"epoch": 5.926874708896134, |
|
"grad_norm": 0.40217414438035676, |
|
"learning_rate": 6.29962029685882e-07, |
|
"loss": 0.5716, |
|
"step": 6365 |
|
}, |
|
{ |
|
"epoch": 5.931532370749884, |
|
"grad_norm": 0.39499893054659513, |
|
"learning_rate": 5.868139454608215e-07, |
|
"loss": 0.5495, |
|
"step": 6370 |
|
}, |
|
{ |
|
"epoch": 5.936190032603633, |
|
"grad_norm": 0.4016719588732879, |
|
"learning_rate": 5.436658612357612e-07, |
|
"loss": 0.5802, |
|
"step": 6375 |
|
}, |
|
{ |
|
"epoch": 5.940847694457382, |
|
"grad_norm": 0.39553277802822207, |
|
"learning_rate": 5.005177770107007e-07, |
|
"loss": 0.5606, |
|
"step": 6380 |
|
}, |
|
{ |
|
"epoch": 5.9455053563111315, |
|
"grad_norm": 0.39850656808167323, |
|
"learning_rate": 4.5736969278564034e-07, |
|
"loss": 0.5679, |
|
"step": 6385 |
|
}, |
|
{ |
|
"epoch": 5.950163018164881, |
|
"grad_norm": 0.401297459288811, |
|
"learning_rate": 4.142216085605799e-07, |
|
"loss": 0.5821, |
|
"step": 6390 |
|
}, |
|
{ |
|
"epoch": 5.9548206800186305, |
|
"grad_norm": 0.3972992243477489, |
|
"learning_rate": 3.710735243355195e-07, |
|
"loss": 0.5721, |
|
"step": 6395 |
|
}, |
|
{ |
|
"epoch": 5.95947834187238, |
|
"grad_norm": 0.3958166758264783, |
|
"learning_rate": 3.279254401104591e-07, |
|
"loss": 0.5678, |
|
"step": 6400 |
|
}, |
|
{ |
|
"epoch": 5.9641360037261295, |
|
"grad_norm": 0.3834453135346943, |
|
"learning_rate": 2.847773558853987e-07, |
|
"loss": 0.5566, |
|
"step": 6405 |
|
}, |
|
{ |
|
"epoch": 5.968793665579879, |
|
"grad_norm": 0.39085792149862253, |
|
"learning_rate": 2.416292716603383e-07, |
|
"loss": 0.5616, |
|
"step": 6410 |
|
}, |
|
{ |
|
"epoch": 5.9734513274336285, |
|
"grad_norm": 0.3816402087868799, |
|
"learning_rate": 1.984811874352779e-07, |
|
"loss": 0.5426, |
|
"step": 6415 |
|
}, |
|
{ |
|
"epoch": 5.978108989287378, |
|
"grad_norm": 0.3949736427696376, |
|
"learning_rate": 1.5533310321021747e-07, |
|
"loss": 0.549, |
|
"step": 6420 |
|
}, |
|
{ |
|
"epoch": 5.9827666511411275, |
|
"grad_norm": 0.38007476087612035, |
|
"learning_rate": 1.1218501898515707e-07, |
|
"loss": 0.5606, |
|
"step": 6425 |
|
}, |
|
{ |
|
"epoch": 5.987424312994877, |
|
"grad_norm": 0.3925924080479119, |
|
"learning_rate": 6.903693476009665e-08, |
|
"loss": 0.5742, |
|
"step": 6430 |
|
}, |
|
{ |
|
"epoch": 5.992081974848626, |
|
"grad_norm": 0.38285900233997977, |
|
"learning_rate": 2.5888850535036245e-08, |
|
"loss": 0.5667, |
|
"step": 6435 |
|
}, |
|
{ |
|
"epoch": 5.994876571960876, |
|
"step": 6438, |
|
"total_flos": 3121889937063936.0, |
|
"train_loss": 0.7511046439751812, |
|
"train_runtime": 81013.2052, |
|
"train_samples_per_second": 1.272, |
|
"train_steps_per_second": 0.079 |
|
} |
|
], |
|
"logging_steps": 5, |
|
"max_steps": 6438, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 6, |
|
"save_steps": 500, |
|
"stateful_callbacks": { |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": true |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 3121889937063936.0, |
|
"train_batch_size": 1, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|