|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 0.9868043602983362, |
|
"eval_steps": 500, |
|
"global_step": 4300, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.00022948938611589215, |
|
"grad_norm": 0.2832958400249481, |
|
"learning_rate": 4.587155963302753e-07, |
|
"loss": 1.1374, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.0011474469305794606, |
|
"grad_norm": 0.3270454704761505, |
|
"learning_rate": 2.2935779816513764e-06, |
|
"loss": 1.1705, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.002294893861158921, |
|
"grad_norm": 0.27594515681266785, |
|
"learning_rate": 4.587155963302753e-06, |
|
"loss": 1.1193, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.0034423407917383822, |
|
"grad_norm": 0.2871370315551758, |
|
"learning_rate": 6.880733944954129e-06, |
|
"loss": 1.1392, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.004589787722317842, |
|
"grad_norm": 0.2703236937522888, |
|
"learning_rate": 9.174311926605506e-06, |
|
"loss": 1.1746, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.005737234652897304, |
|
"grad_norm": 0.2527748644351959, |
|
"learning_rate": 1.1467889908256882e-05, |
|
"loss": 1.129, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.0068846815834767644, |
|
"grad_norm": 0.1984025090932846, |
|
"learning_rate": 1.3761467889908258e-05, |
|
"loss": 1.0819, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.008032128514056224, |
|
"grad_norm": 0.21797247231006622, |
|
"learning_rate": 1.6055045871559634e-05, |
|
"loss": 1.0604, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.009179575444635685, |
|
"grad_norm": 0.1990118771791458, |
|
"learning_rate": 1.834862385321101e-05, |
|
"loss": 1.0285, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.010327022375215147, |
|
"grad_norm": 0.19321753084659576, |
|
"learning_rate": 2.0642201834862388e-05, |
|
"loss": 1.0389, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.011474469305794608, |
|
"grad_norm": 0.18540368974208832, |
|
"learning_rate": 2.2935779816513765e-05, |
|
"loss": 1.0454, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.012621916236374068, |
|
"grad_norm": 0.18497976660728455, |
|
"learning_rate": 2.5229357798165138e-05, |
|
"loss": 1.0578, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.013769363166953529, |
|
"grad_norm": 0.14606232941150665, |
|
"learning_rate": 2.7522935779816515e-05, |
|
"loss": 1.064, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.01491681009753299, |
|
"grad_norm": 0.1588371992111206, |
|
"learning_rate": 2.9816513761467892e-05, |
|
"loss": 1.0231, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.01606425702811245, |
|
"grad_norm": 0.1489713490009308, |
|
"learning_rate": 3.211009174311927e-05, |
|
"loss": 0.9939, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.01721170395869191, |
|
"grad_norm": 0.1601668745279312, |
|
"learning_rate": 3.4403669724770645e-05, |
|
"loss": 1.04, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.01835915088927137, |
|
"grad_norm": 0.17821019887924194, |
|
"learning_rate": 3.669724770642202e-05, |
|
"loss": 1.0147, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.019506597819850834, |
|
"grad_norm": 0.15738487243652344, |
|
"learning_rate": 3.89908256880734e-05, |
|
"loss": 1.0404, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.020654044750430294, |
|
"grad_norm": 0.16155536472797394, |
|
"learning_rate": 4.1284403669724776e-05, |
|
"loss": 1.0388, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.021801491681009755, |
|
"grad_norm": 0.1788385957479477, |
|
"learning_rate": 4.3577981651376146e-05, |
|
"loss": 0.9844, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.022948938611589215, |
|
"grad_norm": 0.2024046927690506, |
|
"learning_rate": 4.587155963302753e-05, |
|
"loss": 0.9918, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.024096385542168676, |
|
"grad_norm": 0.16764964163303375, |
|
"learning_rate": 4.81651376146789e-05, |
|
"loss": 0.9971, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 0.025243832472748137, |
|
"grad_norm": 0.17267030477523804, |
|
"learning_rate": 5.0458715596330276e-05, |
|
"loss": 1.0152, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.026391279403327597, |
|
"grad_norm": 0.1706738919019699, |
|
"learning_rate": 5.2752293577981646e-05, |
|
"loss": 1.0135, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 0.027538726333907058, |
|
"grad_norm": 0.1741032898426056, |
|
"learning_rate": 5.504587155963303e-05, |
|
"loss": 0.9995, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.02868617326448652, |
|
"grad_norm": 0.17674313485622406, |
|
"learning_rate": 5.733944954128441e-05, |
|
"loss": 0.9853, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 0.02983362019506598, |
|
"grad_norm": 0.16848498582839966, |
|
"learning_rate": 5.9633027522935784e-05, |
|
"loss": 1.0349, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.03098106712564544, |
|
"grad_norm": 0.17360493540763855, |
|
"learning_rate": 6.192660550458716e-05, |
|
"loss": 0.9893, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 0.0321285140562249, |
|
"grad_norm": 0.191062331199646, |
|
"learning_rate": 6.422018348623854e-05, |
|
"loss": 0.9922, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.03327596098680436, |
|
"grad_norm": 0.1799137443304062, |
|
"learning_rate": 6.651376146788991e-05, |
|
"loss": 0.9998, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 0.03442340791738382, |
|
"grad_norm": 0.19225718080997467, |
|
"learning_rate": 6.880733944954129e-05, |
|
"loss": 1.0426, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.03557085484796328, |
|
"grad_norm": 0.20365940034389496, |
|
"learning_rate": 7.110091743119265e-05, |
|
"loss": 0.9869, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 0.03671830177854274, |
|
"grad_norm": 0.18115347623825073, |
|
"learning_rate": 7.339449541284404e-05, |
|
"loss": 0.9939, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.0378657487091222, |
|
"grad_norm": 0.16979213058948517, |
|
"learning_rate": 7.568807339449542e-05, |
|
"loss": 0.9788, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 0.03901319563970167, |
|
"grad_norm": 0.16331952810287476, |
|
"learning_rate": 7.79816513761468e-05, |
|
"loss": 1.0014, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.040160642570281124, |
|
"grad_norm": 0.20406360924243927, |
|
"learning_rate": 8.027522935779816e-05, |
|
"loss": 0.988, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 0.04130808950086059, |
|
"grad_norm": 0.17396636307239532, |
|
"learning_rate": 8.256880733944955e-05, |
|
"loss": 0.9756, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.042455536431440045, |
|
"grad_norm": 0.19402877986431122, |
|
"learning_rate": 8.486238532110093e-05, |
|
"loss": 1.0017, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 0.04360298336201951, |
|
"grad_norm": 0.16275173425674438, |
|
"learning_rate": 8.715596330275229e-05, |
|
"loss": 0.9881, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.04475043029259897, |
|
"grad_norm": 0.16075727343559265, |
|
"learning_rate": 8.944954128440367e-05, |
|
"loss": 0.9818, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 0.04589787722317843, |
|
"grad_norm": 0.17031803727149963, |
|
"learning_rate": 9.174311926605506e-05, |
|
"loss": 0.9813, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.04704532415375789, |
|
"grad_norm": 0.16039007902145386, |
|
"learning_rate": 9.403669724770642e-05, |
|
"loss": 0.9655, |
|
"step": 205 |
|
}, |
|
{ |
|
"epoch": 0.04819277108433735, |
|
"grad_norm": 0.17182576656341553, |
|
"learning_rate": 9.63302752293578e-05, |
|
"loss": 1.0034, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.04934021801491681, |
|
"grad_norm": 0.16478177905082703, |
|
"learning_rate": 9.862385321100918e-05, |
|
"loss": 0.9993, |
|
"step": 215 |
|
}, |
|
{ |
|
"epoch": 0.05048766494549627, |
|
"grad_norm": 0.16542178392410278, |
|
"learning_rate": 0.00010091743119266055, |
|
"loss": 1.005, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.05163511187607573, |
|
"grad_norm": 0.1568165123462677, |
|
"learning_rate": 0.00010321100917431193, |
|
"loss": 1.0148, |
|
"step": 225 |
|
}, |
|
{ |
|
"epoch": 0.052782558806655194, |
|
"grad_norm": 0.15992042422294617, |
|
"learning_rate": 0.00010550458715596329, |
|
"loss": 1.0084, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 0.05393000573723465, |
|
"grad_norm": 0.16262054443359375, |
|
"learning_rate": 0.0001077981651376147, |
|
"loss": 0.9871, |
|
"step": 235 |
|
}, |
|
{ |
|
"epoch": 0.055077452667814115, |
|
"grad_norm": 0.15893304347991943, |
|
"learning_rate": 0.00011009174311926606, |
|
"loss": 0.9609, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.05622489959839357, |
|
"grad_norm": 0.1627625972032547, |
|
"learning_rate": 0.00011238532110091744, |
|
"loss": 0.9571, |
|
"step": 245 |
|
}, |
|
{ |
|
"epoch": 0.05737234652897304, |
|
"grad_norm": 0.15538789331912994, |
|
"learning_rate": 0.00011467889908256881, |
|
"loss": 0.9632, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.058519793459552494, |
|
"grad_norm": 0.14804205298423767, |
|
"learning_rate": 0.00011697247706422019, |
|
"loss": 1.0001, |
|
"step": 255 |
|
}, |
|
{ |
|
"epoch": 0.05966724039013196, |
|
"grad_norm": 0.15267077088356018, |
|
"learning_rate": 0.00011926605504587157, |
|
"loss": 0.9697, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 0.060814687320711415, |
|
"grad_norm": 0.14952810108661652, |
|
"learning_rate": 0.00012155963302752293, |
|
"loss": 1.0034, |
|
"step": 265 |
|
}, |
|
{ |
|
"epoch": 0.06196213425129088, |
|
"grad_norm": 0.15380804240703583, |
|
"learning_rate": 0.00012385321100917432, |
|
"loss": 0.9614, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 0.06310958118187034, |
|
"grad_norm": 0.144377201795578, |
|
"learning_rate": 0.0001261467889908257, |
|
"loss": 0.965, |
|
"step": 275 |
|
}, |
|
{ |
|
"epoch": 0.0642570281124498, |
|
"grad_norm": 0.13880601525306702, |
|
"learning_rate": 0.00012844036697247707, |
|
"loss": 0.9586, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 0.06540447504302926, |
|
"grad_norm": 0.1473158597946167, |
|
"learning_rate": 0.00013073394495412844, |
|
"loss": 0.994, |
|
"step": 285 |
|
}, |
|
{ |
|
"epoch": 0.06655192197360872, |
|
"grad_norm": 0.14506657421588898, |
|
"learning_rate": 0.00013302752293577983, |
|
"loss": 1.0082, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 0.06769936890418818, |
|
"grad_norm": 0.1545286476612091, |
|
"learning_rate": 0.0001353211009174312, |
|
"loss": 0.9639, |
|
"step": 295 |
|
}, |
|
{ |
|
"epoch": 0.06884681583476764, |
|
"grad_norm": 0.14149315655231476, |
|
"learning_rate": 0.00013761467889908258, |
|
"loss": 0.9682, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.0699942627653471, |
|
"grad_norm": 0.1382177472114563, |
|
"learning_rate": 0.00013990825688073395, |
|
"loss": 0.9534, |
|
"step": 305 |
|
}, |
|
{ |
|
"epoch": 0.07114170969592656, |
|
"grad_norm": 0.1412287950515747, |
|
"learning_rate": 0.0001422018348623853, |
|
"loss": 0.9463, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 0.07228915662650602, |
|
"grad_norm": 0.13654330372810364, |
|
"learning_rate": 0.00014449541284403673, |
|
"loss": 0.9851, |
|
"step": 315 |
|
}, |
|
{ |
|
"epoch": 0.07343660355708548, |
|
"grad_norm": 0.13656587898731232, |
|
"learning_rate": 0.0001467889908256881, |
|
"loss": 0.9938, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 0.07458405048766495, |
|
"grad_norm": 0.13453873991966248, |
|
"learning_rate": 0.00014908256880733945, |
|
"loss": 0.9847, |
|
"step": 325 |
|
}, |
|
{ |
|
"epoch": 0.0757314974182444, |
|
"grad_norm": 0.13527260720729828, |
|
"learning_rate": 0.00015137614678899084, |
|
"loss": 0.9436, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 0.07687894434882386, |
|
"grad_norm": 0.13411866128444672, |
|
"learning_rate": 0.0001536697247706422, |
|
"loss": 0.9737, |
|
"step": 335 |
|
}, |
|
{ |
|
"epoch": 0.07802639127940333, |
|
"grad_norm": 0.13352230191230774, |
|
"learning_rate": 0.0001559633027522936, |
|
"loss": 0.977, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 0.07917383820998279, |
|
"grad_norm": 0.14445261657238007, |
|
"learning_rate": 0.00015825688073394496, |
|
"loss": 0.9802, |
|
"step": 345 |
|
}, |
|
{ |
|
"epoch": 0.08032128514056225, |
|
"grad_norm": 0.13070940971374512, |
|
"learning_rate": 0.00016055045871559632, |
|
"loss": 0.9509, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 0.0814687320711417, |
|
"grad_norm": 0.13417434692382812, |
|
"learning_rate": 0.0001628440366972477, |
|
"loss": 1.0253, |
|
"step": 355 |
|
}, |
|
{ |
|
"epoch": 0.08261617900172118, |
|
"grad_norm": 0.15762361884117126, |
|
"learning_rate": 0.0001651376146788991, |
|
"loss": 0.9936, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 0.08376362593230063, |
|
"grad_norm": 0.13492801785469055, |
|
"learning_rate": 0.00016743119266055047, |
|
"loss": 0.9601, |
|
"step": 365 |
|
}, |
|
{ |
|
"epoch": 0.08491107286288009, |
|
"grad_norm": 0.1378079354763031, |
|
"learning_rate": 0.00016972477064220186, |
|
"loss": 1.0292, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 0.08605851979345955, |
|
"grad_norm": 0.1303025484085083, |
|
"learning_rate": 0.00017201834862385322, |
|
"loss": 0.9491, |
|
"step": 375 |
|
}, |
|
{ |
|
"epoch": 0.08720596672403902, |
|
"grad_norm": 0.13195742666721344, |
|
"learning_rate": 0.00017431192660550458, |
|
"loss": 1.0134, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 0.08835341365461848, |
|
"grad_norm": 0.13493070006370544, |
|
"learning_rate": 0.00017660550458715597, |
|
"loss": 0.9563, |
|
"step": 385 |
|
}, |
|
{ |
|
"epoch": 0.08950086058519793, |
|
"grad_norm": 0.12674984335899353, |
|
"learning_rate": 0.00017889908256880734, |
|
"loss": 0.9772, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 0.09064830751577739, |
|
"grad_norm": 0.12925055623054504, |
|
"learning_rate": 0.00018119266055045873, |
|
"loss": 0.9445, |
|
"step": 395 |
|
}, |
|
{ |
|
"epoch": 0.09179575444635686, |
|
"grad_norm": 0.13387279212474823, |
|
"learning_rate": 0.00018348623853211012, |
|
"loss": 0.9724, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 0.09294320137693632, |
|
"grad_norm": 0.13336487114429474, |
|
"learning_rate": 0.00018577981651376148, |
|
"loss": 0.9938, |
|
"step": 405 |
|
}, |
|
{ |
|
"epoch": 0.09409064830751578, |
|
"grad_norm": 0.13736997544765472, |
|
"learning_rate": 0.00018807339449541284, |
|
"loss": 0.9719, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 0.09523809523809523, |
|
"grad_norm": 0.13675136864185333, |
|
"learning_rate": 0.00019036697247706424, |
|
"loss": 0.9648, |
|
"step": 415 |
|
}, |
|
{ |
|
"epoch": 0.0963855421686747, |
|
"grad_norm": 0.13273541629314423, |
|
"learning_rate": 0.0001926605504587156, |
|
"loss": 0.9717, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 0.09753298909925416, |
|
"grad_norm": 0.13400444388389587, |
|
"learning_rate": 0.000194954128440367, |
|
"loss": 0.9819, |
|
"step": 425 |
|
}, |
|
{ |
|
"epoch": 0.09868043602983362, |
|
"grad_norm": 0.1314323991537094, |
|
"learning_rate": 0.00019724770642201835, |
|
"loss": 0.984, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 0.09982788296041308, |
|
"grad_norm": 0.13610613346099854, |
|
"learning_rate": 0.00019954128440366972, |
|
"loss": 0.9681, |
|
"step": 435 |
|
}, |
|
{ |
|
"epoch": 0.10097532989099255, |
|
"grad_norm": 0.13332229852676392, |
|
"learning_rate": 0.00019999948643469536, |
|
"loss": 0.9801, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 0.102122776821572, |
|
"grad_norm": 0.12949495017528534, |
|
"learning_rate": 0.00019999740008468594, |
|
"loss": 0.9846, |
|
"step": 445 |
|
}, |
|
{ |
|
"epoch": 0.10327022375215146, |
|
"grad_norm": 0.13086913526058197, |
|
"learning_rate": 0.00019999370888559804, |
|
"loss": 0.923, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 0.10441767068273092, |
|
"grad_norm": 0.1352614462375641, |
|
"learning_rate": 0.0001999884128966714, |
|
"loss": 0.9459, |
|
"step": 455 |
|
}, |
|
{ |
|
"epoch": 0.10556511761331039, |
|
"grad_norm": 0.13356110453605652, |
|
"learning_rate": 0.00019998151220290082, |
|
"loss": 0.9859, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 0.10671256454388985, |
|
"grad_norm": 0.13900211453437805, |
|
"learning_rate": 0.00019997300691503497, |
|
"loss": 0.9691, |
|
"step": 465 |
|
}, |
|
{ |
|
"epoch": 0.1078600114744693, |
|
"grad_norm": 0.12718099355697632, |
|
"learning_rate": 0.0001999628971695744, |
|
"loss": 0.9618, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 0.10900745840504876, |
|
"grad_norm": 0.12601250410079956, |
|
"learning_rate": 0.00019995118312876944, |
|
"loss": 0.9603, |
|
"step": 475 |
|
}, |
|
{ |
|
"epoch": 0.11015490533562823, |
|
"grad_norm": 0.13364988565444946, |
|
"learning_rate": 0.0001999378649806177, |
|
"loss": 0.9366, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 0.11130235226620769, |
|
"grad_norm": 0.13241152465343475, |
|
"learning_rate": 0.00019992294293886095, |
|
"loss": 0.9565, |
|
"step": 485 |
|
}, |
|
{ |
|
"epoch": 0.11244979919678715, |
|
"grad_norm": 0.1273089498281479, |
|
"learning_rate": 0.00019990641724298156, |
|
"loss": 0.96, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 0.11359724612736662, |
|
"grad_norm": 0.13082469999790192, |
|
"learning_rate": 0.000199888288158199, |
|
"loss": 0.9767, |
|
"step": 495 |
|
}, |
|
{ |
|
"epoch": 0.11474469305794607, |
|
"grad_norm": 0.14526137709617615, |
|
"learning_rate": 0.00019986855597546526, |
|
"loss": 0.9641, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.11589213998852553, |
|
"grad_norm": 0.13247860968112946, |
|
"learning_rate": 0.00019984722101146029, |
|
"loss": 0.9676, |
|
"step": 505 |
|
}, |
|
{ |
|
"epoch": 0.11703958691910499, |
|
"grad_norm": 0.14177852869033813, |
|
"learning_rate": 0.000199824283608587, |
|
"loss": 0.9406, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 0.11818703384968446, |
|
"grad_norm": 0.1286071240901947, |
|
"learning_rate": 0.00019979974413496566, |
|
"loss": 0.9689, |
|
"step": 515 |
|
}, |
|
{ |
|
"epoch": 0.11933448078026392, |
|
"grad_norm": 0.1299976259469986, |
|
"learning_rate": 0.00019977360298442803, |
|
"loss": 0.9623, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 0.12048192771084337, |
|
"grad_norm": 0.12755008041858673, |
|
"learning_rate": 0.00019974586057651102, |
|
"loss": 0.9139, |
|
"step": 525 |
|
}, |
|
{ |
|
"epoch": 0.12162937464142283, |
|
"grad_norm": 0.13445819914340973, |
|
"learning_rate": 0.00019971651735644995, |
|
"loss": 0.9513, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 0.1227768215720023, |
|
"grad_norm": 0.13108594715595245, |
|
"learning_rate": 0.00019968557379517152, |
|
"loss": 0.9492, |
|
"step": 535 |
|
}, |
|
{ |
|
"epoch": 0.12392426850258176, |
|
"grad_norm": 0.124118372797966, |
|
"learning_rate": 0.00019965303038928608, |
|
"loss": 0.9411, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 0.12507171543316123, |
|
"grad_norm": 0.12611638009548187, |
|
"learning_rate": 0.00019961888766107972, |
|
"loss": 0.976, |
|
"step": 545 |
|
}, |
|
{ |
|
"epoch": 0.12621916236374067, |
|
"grad_norm": 0.13510450720787048, |
|
"learning_rate": 0.00019958314615850598, |
|
"loss": 0.9632, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 0.12736660929432014, |
|
"grad_norm": 0.12858641147613525, |
|
"learning_rate": 0.00019954580645517697, |
|
"loss": 0.9491, |
|
"step": 555 |
|
}, |
|
{ |
|
"epoch": 0.1285140562248996, |
|
"grad_norm": 0.13497839868068695, |
|
"learning_rate": 0.0001995068691503541, |
|
"loss": 0.9604, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 0.12966150315547906, |
|
"grad_norm": 0.12632782757282257, |
|
"learning_rate": 0.00019946633486893865, |
|
"loss": 0.9363, |
|
"step": 565 |
|
}, |
|
{ |
|
"epoch": 0.13080895008605853, |
|
"grad_norm": 0.12541595101356506, |
|
"learning_rate": 0.00019942420426146153, |
|
"loss": 0.9897, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 0.13195639701663797, |
|
"grad_norm": 0.1276572197675705, |
|
"learning_rate": 0.00019938047800407302, |
|
"loss": 0.9535, |
|
"step": 575 |
|
}, |
|
{ |
|
"epoch": 0.13310384394721744, |
|
"grad_norm": 0.12992243468761444, |
|
"learning_rate": 0.00019933515679853182, |
|
"loss": 0.9879, |
|
"step": 580 |
|
}, |
|
{ |
|
"epoch": 0.1342512908777969, |
|
"grad_norm": 0.1361103504896164, |
|
"learning_rate": 0.0001992882413721937, |
|
"loss": 0.9684, |
|
"step": 585 |
|
}, |
|
{ |
|
"epoch": 0.13539873780837636, |
|
"grad_norm": 0.13442201912403107, |
|
"learning_rate": 0.0001992397324780001, |
|
"loss": 0.9427, |
|
"step": 590 |
|
}, |
|
{ |
|
"epoch": 0.13654618473895583, |
|
"grad_norm": 0.1316874623298645, |
|
"learning_rate": 0.00019918963089446577, |
|
"loss": 0.9795, |
|
"step": 595 |
|
}, |
|
{ |
|
"epoch": 0.13769363166953527, |
|
"grad_norm": 0.12683913111686707, |
|
"learning_rate": 0.00019913793742566647, |
|
"loss": 0.9517, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 0.13884107860011474, |
|
"grad_norm": 0.12805037200450897, |
|
"learning_rate": 0.00019908465290122585, |
|
"loss": 0.9728, |
|
"step": 605 |
|
}, |
|
{ |
|
"epoch": 0.1399885255306942, |
|
"grad_norm": 0.13088442385196686, |
|
"learning_rate": 0.00019902977817630243, |
|
"loss": 0.9695, |
|
"step": 610 |
|
}, |
|
{ |
|
"epoch": 0.14113597246127366, |
|
"grad_norm": 0.1324809193611145, |
|
"learning_rate": 0.00019897331413157548, |
|
"loss": 0.9996, |
|
"step": 615 |
|
}, |
|
{ |
|
"epoch": 0.14228341939185313, |
|
"grad_norm": 0.12211659550666809, |
|
"learning_rate": 0.00019891526167323145, |
|
"loss": 0.989, |
|
"step": 620 |
|
}, |
|
{ |
|
"epoch": 0.1434308663224326, |
|
"grad_norm": 0.13171370327472687, |
|
"learning_rate": 0.0001988556217329488, |
|
"loss": 0.9714, |
|
"step": 625 |
|
}, |
|
{ |
|
"epoch": 0.14457831325301204, |
|
"grad_norm": 0.12398378551006317, |
|
"learning_rate": 0.00019879439526788341, |
|
"loss": 0.9717, |
|
"step": 630 |
|
}, |
|
{ |
|
"epoch": 0.1457257601835915, |
|
"grad_norm": 0.12855181097984314, |
|
"learning_rate": 0.00019873158326065327, |
|
"loss": 0.9635, |
|
"step": 635 |
|
}, |
|
{ |
|
"epoch": 0.14687320711417096, |
|
"grad_norm": 0.13017693161964417, |
|
"learning_rate": 0.00019866718671932249, |
|
"loss": 0.9804, |
|
"step": 640 |
|
}, |
|
{ |
|
"epoch": 0.14802065404475043, |
|
"grad_norm": 0.12396004796028137, |
|
"learning_rate": 0.00019860120667738516, |
|
"loss": 0.9985, |
|
"step": 645 |
|
}, |
|
{ |
|
"epoch": 0.1491681009753299, |
|
"grad_norm": 0.12556292116641998, |
|
"learning_rate": 0.00019853364419374902, |
|
"loss": 0.9498, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 0.15031554790590934, |
|
"grad_norm": 0.12642225623130798, |
|
"learning_rate": 0.00019846450035271808, |
|
"loss": 1.0224, |
|
"step": 655 |
|
}, |
|
{ |
|
"epoch": 0.1514629948364888, |
|
"grad_norm": 0.12983497977256775, |
|
"learning_rate": 0.00019839377626397554, |
|
"loss": 0.9604, |
|
"step": 660 |
|
}, |
|
{ |
|
"epoch": 0.15261044176706828, |
|
"grad_norm": 0.13211478292942047, |
|
"learning_rate": 0.00019832147306256576, |
|
"loss": 0.9864, |
|
"step": 665 |
|
}, |
|
{ |
|
"epoch": 0.15375788869764773, |
|
"grad_norm": 0.1218995749950409, |
|
"learning_rate": 0.00019824759190887622, |
|
"loss": 0.937, |
|
"step": 670 |
|
}, |
|
{ |
|
"epoch": 0.1549053356282272, |
|
"grad_norm": 0.13377314805984497, |
|
"learning_rate": 0.00019817213398861866, |
|
"loss": 0.9704, |
|
"step": 675 |
|
}, |
|
{ |
|
"epoch": 0.15605278255880667, |
|
"grad_norm": 0.12591040134429932, |
|
"learning_rate": 0.0001980951005128104, |
|
"loss": 0.9874, |
|
"step": 680 |
|
}, |
|
{ |
|
"epoch": 0.1572002294893861, |
|
"grad_norm": 0.12856173515319824, |
|
"learning_rate": 0.00019801649271775459, |
|
"loss": 0.9469, |
|
"step": 685 |
|
}, |
|
{ |
|
"epoch": 0.15834767641996558, |
|
"grad_norm": 0.1214781105518341, |
|
"learning_rate": 0.00019793631186502047, |
|
"loss": 0.9739, |
|
"step": 690 |
|
}, |
|
{ |
|
"epoch": 0.15949512335054503, |
|
"grad_norm": 0.12588472664356232, |
|
"learning_rate": 0.00019785455924142318, |
|
"loss": 0.9629, |
|
"step": 695 |
|
}, |
|
{ |
|
"epoch": 0.1606425702811245, |
|
"grad_norm": 0.12644240260124207, |
|
"learning_rate": 0.0001977712361590031, |
|
"loss": 1.0156, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 0.16179001721170397, |
|
"grad_norm": 0.12597164511680603, |
|
"learning_rate": 0.00019768634395500465, |
|
"loss": 0.9728, |
|
"step": 705 |
|
}, |
|
{ |
|
"epoch": 0.1629374641422834, |
|
"grad_norm": 0.12837018072605133, |
|
"learning_rate": 0.00019759988399185505, |
|
"loss": 0.9771, |
|
"step": 710 |
|
}, |
|
{ |
|
"epoch": 0.16408491107286288, |
|
"grad_norm": 0.13782522082328796, |
|
"learning_rate": 0.00019751185765714234, |
|
"loss": 0.9618, |
|
"step": 715 |
|
}, |
|
{ |
|
"epoch": 0.16523235800344235, |
|
"grad_norm": 0.12514767050743103, |
|
"learning_rate": 0.00019742226636359296, |
|
"loss": 0.9594, |
|
"step": 720 |
|
}, |
|
{ |
|
"epoch": 0.1663798049340218, |
|
"grad_norm": 0.1280808001756668, |
|
"learning_rate": 0.00019733111154904943, |
|
"loss": 0.9856, |
|
"step": 725 |
|
}, |
|
{ |
|
"epoch": 0.16752725186460127, |
|
"grad_norm": 0.12929755449295044, |
|
"learning_rate": 0.00019723839467644699, |
|
"loss": 0.9382, |
|
"step": 730 |
|
}, |
|
{ |
|
"epoch": 0.1686746987951807, |
|
"grad_norm": 0.5807048678398132, |
|
"learning_rate": 0.00019714411723379015, |
|
"loss": 0.9602, |
|
"step": 735 |
|
}, |
|
{ |
|
"epoch": 0.16982214572576018, |
|
"grad_norm": 0.13152436912059784, |
|
"learning_rate": 0.0001970482807341289, |
|
"loss": 1.003, |
|
"step": 740 |
|
}, |
|
{ |
|
"epoch": 0.17096959265633965, |
|
"grad_norm": 0.12607312202453613, |
|
"learning_rate": 0.0001969508867155345, |
|
"loss": 0.953, |
|
"step": 745 |
|
}, |
|
{ |
|
"epoch": 0.1721170395869191, |
|
"grad_norm": 0.13689285516738892, |
|
"learning_rate": 0.00019685193674107452, |
|
"loss": 0.974, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 0.17326448651749857, |
|
"grad_norm": 0.1324600875377655, |
|
"learning_rate": 0.00019675143239878805, |
|
"loss": 0.9953, |
|
"step": 755 |
|
}, |
|
{ |
|
"epoch": 0.17441193344807804, |
|
"grad_norm": 0.13404534757137299, |
|
"learning_rate": 0.00019664937530166002, |
|
"loss": 0.9812, |
|
"step": 760 |
|
}, |
|
{ |
|
"epoch": 0.17555938037865748, |
|
"grad_norm": 0.13384674489498138, |
|
"learning_rate": 0.00019654576708759538, |
|
"loss": 0.9632, |
|
"step": 765 |
|
}, |
|
{ |
|
"epoch": 0.17670682730923695, |
|
"grad_norm": 0.1277248114347458, |
|
"learning_rate": 0.00019644060941939286, |
|
"loss": 0.9361, |
|
"step": 770 |
|
}, |
|
{ |
|
"epoch": 0.1778542742398164, |
|
"grad_norm": 0.13291795551776886, |
|
"learning_rate": 0.00019633390398471817, |
|
"loss": 0.9762, |
|
"step": 775 |
|
}, |
|
{ |
|
"epoch": 0.17900172117039587, |
|
"grad_norm": 0.13213355839252472, |
|
"learning_rate": 0.00019622565249607704, |
|
"loss": 0.9474, |
|
"step": 780 |
|
}, |
|
{ |
|
"epoch": 0.18014916810097534, |
|
"grad_norm": 0.12764747440814972, |
|
"learning_rate": 0.0001961158566907877, |
|
"loss": 0.9329, |
|
"step": 785 |
|
}, |
|
{ |
|
"epoch": 0.18129661503155478, |
|
"grad_norm": 0.13760751485824585, |
|
"learning_rate": 0.00019600451833095287, |
|
"loss": 0.9512, |
|
"step": 790 |
|
}, |
|
{ |
|
"epoch": 0.18244406196213425, |
|
"grad_norm": 0.13257338106632233, |
|
"learning_rate": 0.00019589163920343163, |
|
"loss": 0.944, |
|
"step": 795 |
|
}, |
|
{ |
|
"epoch": 0.18359150889271372, |
|
"grad_norm": 0.13907872140407562, |
|
"learning_rate": 0.00019577722111981078, |
|
"loss": 0.9839, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 0.18473895582329317, |
|
"grad_norm": 0.1304904818534851, |
|
"learning_rate": 0.00019566126591637558, |
|
"loss": 0.9763, |
|
"step": 805 |
|
}, |
|
{ |
|
"epoch": 0.18588640275387264, |
|
"grad_norm": 0.13839825987815857, |
|
"learning_rate": 0.0001955437754540805, |
|
"loss": 0.96, |
|
"step": 810 |
|
}, |
|
{ |
|
"epoch": 0.18703384968445208, |
|
"grad_norm": 0.12638252973556519, |
|
"learning_rate": 0.00019542475161851906, |
|
"loss": 0.9421, |
|
"step": 815 |
|
}, |
|
{ |
|
"epoch": 0.18818129661503155, |
|
"grad_norm": 0.12756413221359253, |
|
"learning_rate": 0.00019530419631989392, |
|
"loss": 0.9593, |
|
"step": 820 |
|
}, |
|
{ |
|
"epoch": 0.18932874354561102, |
|
"grad_norm": 0.12539435923099518, |
|
"learning_rate": 0.00019518211149298595, |
|
"loss": 0.9643, |
|
"step": 825 |
|
}, |
|
{ |
|
"epoch": 0.19047619047619047, |
|
"grad_norm": 0.12664757668972015, |
|
"learning_rate": 0.00019505849909712332, |
|
"loss": 0.9575, |
|
"step": 830 |
|
}, |
|
{ |
|
"epoch": 0.19162363740676994, |
|
"grad_norm": 0.12583203613758087, |
|
"learning_rate": 0.00019493336111615003, |
|
"loss": 0.9277, |
|
"step": 835 |
|
}, |
|
{ |
|
"epoch": 0.1927710843373494, |
|
"grad_norm": 0.1292329877614975, |
|
"learning_rate": 0.00019480669955839402, |
|
"loss": 0.9797, |
|
"step": 840 |
|
}, |
|
{ |
|
"epoch": 0.19391853126792885, |
|
"grad_norm": 0.12693488597869873, |
|
"learning_rate": 0.00019467851645663494, |
|
"loss": 0.9749, |
|
"step": 845 |
|
}, |
|
{ |
|
"epoch": 0.19506597819850832, |
|
"grad_norm": 0.13166065514087677, |
|
"learning_rate": 0.00019454881386807163, |
|
"loss": 0.9645, |
|
"step": 850 |
|
}, |
|
{ |
|
"epoch": 0.1962134251290878, |
|
"grad_norm": 0.12855538725852966, |
|
"learning_rate": 0.00019441759387428903, |
|
"loss": 0.9463, |
|
"step": 855 |
|
}, |
|
{ |
|
"epoch": 0.19736087205966724, |
|
"grad_norm": 0.12849357724189758, |
|
"learning_rate": 0.00019428485858122472, |
|
"loss": 0.988, |
|
"step": 860 |
|
}, |
|
{ |
|
"epoch": 0.1985083189902467, |
|
"grad_norm": 0.12299315631389618, |
|
"learning_rate": 0.00019415061011913523, |
|
"loss": 0.9733, |
|
"step": 865 |
|
}, |
|
{ |
|
"epoch": 0.19965576592082615, |
|
"grad_norm": 0.13390155136585236, |
|
"learning_rate": 0.00019401485064256176, |
|
"loss": 0.9502, |
|
"step": 870 |
|
}, |
|
{ |
|
"epoch": 0.20080321285140562, |
|
"grad_norm": 0.13177447021007538, |
|
"learning_rate": 0.0001938775823302957, |
|
"loss": 0.9706, |
|
"step": 875 |
|
}, |
|
{ |
|
"epoch": 0.2019506597819851, |
|
"grad_norm": 0.13233035802841187, |
|
"learning_rate": 0.00019373880738534358, |
|
"loss": 1.0028, |
|
"step": 880 |
|
}, |
|
{ |
|
"epoch": 0.20309810671256454, |
|
"grad_norm": 0.12969984114170074, |
|
"learning_rate": 0.00019359852803489168, |
|
"loss": 0.9423, |
|
"step": 885 |
|
}, |
|
{ |
|
"epoch": 0.204245553643144, |
|
"grad_norm": 0.13504785299301147, |
|
"learning_rate": 0.0001934567465302704, |
|
"loss": 0.9708, |
|
"step": 890 |
|
}, |
|
{ |
|
"epoch": 0.20539300057372348, |
|
"grad_norm": 0.12698131799697876, |
|
"learning_rate": 0.00019331346514691813, |
|
"loss": 0.9548, |
|
"step": 895 |
|
}, |
|
{ |
|
"epoch": 0.20654044750430292, |
|
"grad_norm": 0.1307503879070282, |
|
"learning_rate": 0.00019316868618434455, |
|
"loss": 0.9595, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 0.2076878944348824, |
|
"grad_norm": 0.12671291828155518, |
|
"learning_rate": 0.00019302241196609397, |
|
"loss": 0.9456, |
|
"step": 905 |
|
}, |
|
{ |
|
"epoch": 0.20883534136546184, |
|
"grad_norm": 0.1356162577867508, |
|
"learning_rate": 0.0001928746448397078, |
|
"loss": 0.9805, |
|
"step": 910 |
|
}, |
|
{ |
|
"epoch": 0.2099827882960413, |
|
"grad_norm": 0.1294623613357544, |
|
"learning_rate": 0.00019272538717668715, |
|
"loss": 0.9831, |
|
"step": 915 |
|
}, |
|
{ |
|
"epoch": 0.21113023522662078, |
|
"grad_norm": 0.13363958895206451, |
|
"learning_rate": 0.00019257464137245446, |
|
"loss": 0.9971, |
|
"step": 920 |
|
}, |
|
{ |
|
"epoch": 0.21227768215720022, |
|
"grad_norm": 0.1361180692911148, |
|
"learning_rate": 0.0001924224098463153, |
|
"loss": 0.9393, |
|
"step": 925 |
|
}, |
|
{ |
|
"epoch": 0.2134251290877797, |
|
"grad_norm": 0.1273190975189209, |
|
"learning_rate": 0.00019226869504141943, |
|
"loss": 0.9613, |
|
"step": 930 |
|
}, |
|
{ |
|
"epoch": 0.21457257601835916, |
|
"grad_norm": 0.1308612823486328, |
|
"learning_rate": 0.00019211349942472165, |
|
"loss": 0.9703, |
|
"step": 935 |
|
}, |
|
{ |
|
"epoch": 0.2157200229489386, |
|
"grad_norm": 0.13174669444561005, |
|
"learning_rate": 0.00019195682548694208, |
|
"loss": 0.9693, |
|
"step": 940 |
|
}, |
|
{ |
|
"epoch": 0.21686746987951808, |
|
"grad_norm": 0.1349845975637436, |
|
"learning_rate": 0.00019179867574252638, |
|
"loss": 0.986, |
|
"step": 945 |
|
}, |
|
{ |
|
"epoch": 0.21801491681009752, |
|
"grad_norm": 0.13183628022670746, |
|
"learning_rate": 0.00019163905272960528, |
|
"loss": 0.9804, |
|
"step": 950 |
|
}, |
|
{ |
|
"epoch": 0.219162363740677, |
|
"grad_norm": 0.1328561156988144, |
|
"learning_rate": 0.0001914779590099538, |
|
"loss": 0.9582, |
|
"step": 955 |
|
}, |
|
{ |
|
"epoch": 0.22030981067125646, |
|
"grad_norm": 0.1274995058774948, |
|
"learning_rate": 0.00019131539716895024, |
|
"loss": 0.9245, |
|
"step": 960 |
|
}, |
|
{ |
|
"epoch": 0.2214572576018359, |
|
"grad_norm": 0.13776043057441711, |
|
"learning_rate": 0.00019115136981553464, |
|
"loss": 0.9683, |
|
"step": 965 |
|
}, |
|
{ |
|
"epoch": 0.22260470453241538, |
|
"grad_norm": 0.1258002072572708, |
|
"learning_rate": 0.00019098587958216688, |
|
"loss": 0.968, |
|
"step": 970 |
|
}, |
|
{ |
|
"epoch": 0.22375215146299485, |
|
"grad_norm": 0.13454443216323853, |
|
"learning_rate": 0.00019081892912478456, |
|
"loss": 0.9407, |
|
"step": 975 |
|
}, |
|
{ |
|
"epoch": 0.2248995983935743, |
|
"grad_norm": 0.13280132412910461, |
|
"learning_rate": 0.00019065052112276018, |
|
"loss": 0.9714, |
|
"step": 980 |
|
}, |
|
{ |
|
"epoch": 0.22604704532415376, |
|
"grad_norm": 0.1270975023508072, |
|
"learning_rate": 0.00019048065827885827, |
|
"loss": 0.942, |
|
"step": 985 |
|
}, |
|
{ |
|
"epoch": 0.22719449225473323, |
|
"grad_norm": 0.13367241621017456, |
|
"learning_rate": 0.000190309343319192, |
|
"loss": 0.981, |
|
"step": 990 |
|
}, |
|
{ |
|
"epoch": 0.22834193918531268, |
|
"grad_norm": 0.13318173587322235, |
|
"learning_rate": 0.00019013657899317942, |
|
"loss": 0.9668, |
|
"step": 995 |
|
}, |
|
{ |
|
"epoch": 0.22948938611589215, |
|
"grad_norm": 0.1365250200033188, |
|
"learning_rate": 0.0001899623680734993, |
|
"loss": 0.939, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 0.2306368330464716, |
|
"grad_norm": 0.13613171875476837, |
|
"learning_rate": 0.00018978671335604665, |
|
"loss": 0.9375, |
|
"step": 1005 |
|
}, |
|
{ |
|
"epoch": 0.23178427997705106, |
|
"grad_norm": 0.12359146773815155, |
|
"learning_rate": 0.00018960961765988792, |
|
"loss": 0.9686, |
|
"step": 1010 |
|
}, |
|
{ |
|
"epoch": 0.23293172690763053, |
|
"grad_norm": 0.12440812587738037, |
|
"learning_rate": 0.00018943108382721562, |
|
"loss": 0.9284, |
|
"step": 1015 |
|
}, |
|
{ |
|
"epoch": 0.23407917383820998, |
|
"grad_norm": 0.13303610682487488, |
|
"learning_rate": 0.00018925111472330283, |
|
"loss": 0.9631, |
|
"step": 1020 |
|
}, |
|
{ |
|
"epoch": 0.23522662076878945, |
|
"grad_norm": 0.13873472809791565, |
|
"learning_rate": 0.00018906971323645713, |
|
"loss": 0.9854, |
|
"step": 1025 |
|
}, |
|
{ |
|
"epoch": 0.23637406769936892, |
|
"grad_norm": 0.13052476942539215, |
|
"learning_rate": 0.00018888688227797432, |
|
"loss": 0.9422, |
|
"step": 1030 |
|
}, |
|
{ |
|
"epoch": 0.23752151462994836, |
|
"grad_norm": 0.1390550136566162, |
|
"learning_rate": 0.00018870262478209163, |
|
"loss": 0.9847, |
|
"step": 1035 |
|
}, |
|
{ |
|
"epoch": 0.23866896156052783, |
|
"grad_norm": 0.13032066822052002, |
|
"learning_rate": 0.00018851694370594069, |
|
"loss": 0.9539, |
|
"step": 1040 |
|
}, |
|
{ |
|
"epoch": 0.23981640849110727, |
|
"grad_norm": 0.13494379818439484, |
|
"learning_rate": 0.00018832984202949996, |
|
"loss": 0.9427, |
|
"step": 1045 |
|
}, |
|
{ |
|
"epoch": 0.24096385542168675, |
|
"grad_norm": 0.13439466059207916, |
|
"learning_rate": 0.00018814132275554713, |
|
"loss": 0.992, |
|
"step": 1050 |
|
}, |
|
{ |
|
"epoch": 0.24211130235226622, |
|
"grad_norm": 0.1290155053138733, |
|
"learning_rate": 0.0001879513889096106, |
|
"loss": 0.9436, |
|
"step": 1055 |
|
}, |
|
{ |
|
"epoch": 0.24325874928284566, |
|
"grad_norm": 0.13683564960956573, |
|
"learning_rate": 0.00018776004353992124, |
|
"loss": 0.9595, |
|
"step": 1060 |
|
}, |
|
{ |
|
"epoch": 0.24440619621342513, |
|
"grad_norm": 0.12635931372642517, |
|
"learning_rate": 0.00018756728971736327, |
|
"loss": 0.9596, |
|
"step": 1065 |
|
}, |
|
{ |
|
"epoch": 0.2455536431440046, |
|
"grad_norm": 0.12940926849842072, |
|
"learning_rate": 0.00018737313053542512, |
|
"loss": 0.976, |
|
"step": 1070 |
|
}, |
|
{ |
|
"epoch": 0.24670109007458405, |
|
"grad_norm": 0.13413465023040771, |
|
"learning_rate": 0.0001871775691101496, |
|
"loss": 0.9374, |
|
"step": 1075 |
|
}, |
|
{ |
|
"epoch": 0.24784853700516352, |
|
"grad_norm": 0.14798474311828613, |
|
"learning_rate": 0.00018698060858008403, |
|
"loss": 0.9331, |
|
"step": 1080 |
|
}, |
|
{ |
|
"epoch": 0.24899598393574296, |
|
"grad_norm": 0.13452093303203583, |
|
"learning_rate": 0.00018678225210622986, |
|
"loss": 0.9893, |
|
"step": 1085 |
|
}, |
|
{ |
|
"epoch": 0.25014343086632246, |
|
"grad_norm": 0.13061241805553436, |
|
"learning_rate": 0.00018658250287199196, |
|
"loss": 0.9793, |
|
"step": 1090 |
|
}, |
|
{ |
|
"epoch": 0.2512908777969019, |
|
"grad_norm": 0.13494077324867249, |
|
"learning_rate": 0.00018638136408312728, |
|
"loss": 0.9793, |
|
"step": 1095 |
|
}, |
|
{ |
|
"epoch": 0.25243832472748134, |
|
"grad_norm": 0.12755267322063446, |
|
"learning_rate": 0.0001861788389676939, |
|
"loss": 0.957, |
|
"step": 1100 |
|
}, |
|
{ |
|
"epoch": 0.2535857716580608, |
|
"grad_norm": 0.13325680792331696, |
|
"learning_rate": 0.00018597493077599867, |
|
"loss": 0.9778, |
|
"step": 1105 |
|
}, |
|
{ |
|
"epoch": 0.2547332185886403, |
|
"grad_norm": 0.13293135166168213, |
|
"learning_rate": 0.00018576964278054544, |
|
"loss": 0.9538, |
|
"step": 1110 |
|
}, |
|
{ |
|
"epoch": 0.25588066551921973, |
|
"grad_norm": 0.13516201078891754, |
|
"learning_rate": 0.00018556297827598242, |
|
"loss": 0.9815, |
|
"step": 1115 |
|
}, |
|
{ |
|
"epoch": 0.2570281124497992, |
|
"grad_norm": 0.12776044011116028, |
|
"learning_rate": 0.00018535494057904915, |
|
"loss": 0.9513, |
|
"step": 1120 |
|
}, |
|
{ |
|
"epoch": 0.25817555938037867, |
|
"grad_norm": 0.12933199107646942, |
|
"learning_rate": 0.00018514553302852356, |
|
"loss": 0.9749, |
|
"step": 1125 |
|
}, |
|
{ |
|
"epoch": 0.2593230063109581, |
|
"grad_norm": 0.13189588487148285, |
|
"learning_rate": 0.00018493475898516813, |
|
"loss": 0.9604, |
|
"step": 1130 |
|
}, |
|
{ |
|
"epoch": 0.26047045324153756, |
|
"grad_norm": 0.12965457141399384, |
|
"learning_rate": 0.00018472262183167614, |
|
"loss": 0.9882, |
|
"step": 1135 |
|
}, |
|
{ |
|
"epoch": 0.26161790017211706, |
|
"grad_norm": 0.13817626237869263, |
|
"learning_rate": 0.00018450912497261723, |
|
"loss": 0.9508, |
|
"step": 1140 |
|
}, |
|
{ |
|
"epoch": 0.2627653471026965, |
|
"grad_norm": 0.12874102592468262, |
|
"learning_rate": 0.00018429427183438288, |
|
"loss": 0.9871, |
|
"step": 1145 |
|
}, |
|
{ |
|
"epoch": 0.26391279403327594, |
|
"grad_norm": 0.12571579217910767, |
|
"learning_rate": 0.00018407806586513134, |
|
"loss": 0.9739, |
|
"step": 1150 |
|
}, |
|
{ |
|
"epoch": 0.26506024096385544, |
|
"grad_norm": 0.13456568121910095, |
|
"learning_rate": 0.00018386051053473232, |
|
"loss": 0.9752, |
|
"step": 1155 |
|
}, |
|
{ |
|
"epoch": 0.2662076878944349, |
|
"grad_norm": 0.1293318271636963, |
|
"learning_rate": 0.00018364160933471134, |
|
"loss": 0.9483, |
|
"step": 1160 |
|
}, |
|
{ |
|
"epoch": 0.26735513482501433, |
|
"grad_norm": 0.13392923772335052, |
|
"learning_rate": 0.0001834213657781936, |
|
"loss": 0.9443, |
|
"step": 1165 |
|
}, |
|
{ |
|
"epoch": 0.2685025817555938, |
|
"grad_norm": 0.1313287764787674, |
|
"learning_rate": 0.00018319978339984767, |
|
"loss": 0.9485, |
|
"step": 1170 |
|
}, |
|
{ |
|
"epoch": 0.26965002868617327, |
|
"grad_norm": 0.13383394479751587, |
|
"learning_rate": 0.0001829768657558288, |
|
"loss": 0.9647, |
|
"step": 1175 |
|
}, |
|
{ |
|
"epoch": 0.2707974756167527, |
|
"grad_norm": 0.12766633927822113, |
|
"learning_rate": 0.00018275261642372175, |
|
"loss": 0.9627, |
|
"step": 1180 |
|
}, |
|
{ |
|
"epoch": 0.2719449225473322, |
|
"grad_norm": 0.12958547472953796, |
|
"learning_rate": 0.0001825270390024834, |
|
"loss": 0.9628, |
|
"step": 1185 |
|
}, |
|
{ |
|
"epoch": 0.27309236947791166, |
|
"grad_norm": 0.1323728859424591, |
|
"learning_rate": 0.00018230013711238513, |
|
"loss": 0.9905, |
|
"step": 1190 |
|
}, |
|
{ |
|
"epoch": 0.2742398164084911, |
|
"grad_norm": 0.13171570003032684, |
|
"learning_rate": 0.00018207191439495438, |
|
"loss": 0.9514, |
|
"step": 1195 |
|
}, |
|
{ |
|
"epoch": 0.27538726333907054, |
|
"grad_norm": 0.13061773777008057, |
|
"learning_rate": 0.00018184237451291665, |
|
"loss": 0.9644, |
|
"step": 1200 |
|
}, |
|
{ |
|
"epoch": 0.27653471026965004, |
|
"grad_norm": 0.1268092542886734, |
|
"learning_rate": 0.00018161152115013637, |
|
"loss": 0.9399, |
|
"step": 1205 |
|
}, |
|
{ |
|
"epoch": 0.2776821572002295, |
|
"grad_norm": 0.12714898586273193, |
|
"learning_rate": 0.00018137935801155794, |
|
"loss": 1.0103, |
|
"step": 1210 |
|
}, |
|
{ |
|
"epoch": 0.27882960413080893, |
|
"grad_norm": 0.16152754426002502, |
|
"learning_rate": 0.0001811458888231462, |
|
"loss": 0.9428, |
|
"step": 1215 |
|
}, |
|
{ |
|
"epoch": 0.2799770510613884, |
|
"grad_norm": 0.13298989832401276, |
|
"learning_rate": 0.0001809111173318267, |
|
"loss": 0.9532, |
|
"step": 1220 |
|
}, |
|
{ |
|
"epoch": 0.28112449799196787, |
|
"grad_norm": 0.1261643022298813, |
|
"learning_rate": 0.00018067504730542551, |
|
"loss": 1.0076, |
|
"step": 1225 |
|
}, |
|
{ |
|
"epoch": 0.2822719449225473, |
|
"grad_norm": 0.13440310955047607, |
|
"learning_rate": 0.0001804376825326088, |
|
"loss": 0.962, |
|
"step": 1230 |
|
}, |
|
{ |
|
"epoch": 0.2834193918531268, |
|
"grad_norm": 0.13871833682060242, |
|
"learning_rate": 0.00018019902682282193, |
|
"loss": 0.9581, |
|
"step": 1235 |
|
}, |
|
{ |
|
"epoch": 0.28456683878370626, |
|
"grad_norm": 0.12701858580112457, |
|
"learning_rate": 0.0001799590840062285, |
|
"loss": 0.9703, |
|
"step": 1240 |
|
}, |
|
{ |
|
"epoch": 0.2857142857142857, |
|
"grad_norm": 0.13460960984230042, |
|
"learning_rate": 0.00017971785793364866, |
|
"loss": 0.9483, |
|
"step": 1245 |
|
}, |
|
{ |
|
"epoch": 0.2868617326448652, |
|
"grad_norm": 0.12756766378879547, |
|
"learning_rate": 0.0001794753524764975, |
|
"loss": 0.9693, |
|
"step": 1250 |
|
}, |
|
{ |
|
"epoch": 0.28800917957544464, |
|
"grad_norm": 0.12904788553714752, |
|
"learning_rate": 0.00017923157152672278, |
|
"loss": 0.9874, |
|
"step": 1255 |
|
}, |
|
{ |
|
"epoch": 0.2891566265060241, |
|
"grad_norm": 0.1294935941696167, |
|
"learning_rate": 0.00017898651899674254, |
|
"loss": 0.9746, |
|
"step": 1260 |
|
}, |
|
{ |
|
"epoch": 0.2903040734366036, |
|
"grad_norm": 0.13519194722175598, |
|
"learning_rate": 0.00017874019881938233, |
|
"loss": 0.9489, |
|
"step": 1265 |
|
}, |
|
{ |
|
"epoch": 0.291451520367183, |
|
"grad_norm": 0.1270766407251358, |
|
"learning_rate": 0.000178492614947812, |
|
"loss": 0.9573, |
|
"step": 1270 |
|
}, |
|
{ |
|
"epoch": 0.29259896729776247, |
|
"grad_norm": 0.1287229061126709, |
|
"learning_rate": 0.00017824377135548236, |
|
"loss": 0.9544, |
|
"step": 1275 |
|
}, |
|
{ |
|
"epoch": 0.2937464142283419, |
|
"grad_norm": 0.1388266533613205, |
|
"learning_rate": 0.00017799367203606128, |
|
"loss": 0.9781, |
|
"step": 1280 |
|
}, |
|
{ |
|
"epoch": 0.2948938611589214, |
|
"grad_norm": 0.1323292851448059, |
|
"learning_rate": 0.00017774232100336982, |
|
"loss": 0.9783, |
|
"step": 1285 |
|
}, |
|
{ |
|
"epoch": 0.29604130808950085, |
|
"grad_norm": 0.12682774662971497, |
|
"learning_rate": 0.00017748972229131757, |
|
"loss": 0.9363, |
|
"step": 1290 |
|
}, |
|
{ |
|
"epoch": 0.2971887550200803, |
|
"grad_norm": 0.12563958764076233, |
|
"learning_rate": 0.000177235879953838, |
|
"loss": 0.972, |
|
"step": 1295 |
|
}, |
|
{ |
|
"epoch": 0.2983362019506598, |
|
"grad_norm": 0.13384099304676056, |
|
"learning_rate": 0.00017698079806482343, |
|
"loss": 0.9991, |
|
"step": 1300 |
|
}, |
|
{ |
|
"epoch": 0.29948364888123924, |
|
"grad_norm": 0.12890778481960297, |
|
"learning_rate": 0.0001767244807180597, |
|
"loss": 0.9437, |
|
"step": 1305 |
|
}, |
|
{ |
|
"epoch": 0.3006310958118187, |
|
"grad_norm": 0.13081791996955872, |
|
"learning_rate": 0.00017646693202716033, |
|
"loss": 0.9583, |
|
"step": 1310 |
|
}, |
|
{ |
|
"epoch": 0.3017785427423982, |
|
"grad_norm": 0.13217350840568542, |
|
"learning_rate": 0.0001762081561255005, |
|
"loss": 0.9599, |
|
"step": 1315 |
|
}, |
|
{ |
|
"epoch": 0.3029259896729776, |
|
"grad_norm": 0.1300850510597229, |
|
"learning_rate": 0.00017594815716615093, |
|
"loss": 0.96, |
|
"step": 1320 |
|
}, |
|
{ |
|
"epoch": 0.30407343660355707, |
|
"grad_norm": 0.13618028163909912, |
|
"learning_rate": 0.000175686939321811, |
|
"loss": 0.9466, |
|
"step": 1325 |
|
}, |
|
{ |
|
"epoch": 0.30522088353413657, |
|
"grad_norm": 0.13294175267219543, |
|
"learning_rate": 0.00017542450678474184, |
|
"loss": 0.9794, |
|
"step": 1330 |
|
}, |
|
{ |
|
"epoch": 0.306368330464716, |
|
"grad_norm": 0.12827391922473907, |
|
"learning_rate": 0.00017516086376669917, |
|
"loss": 0.9388, |
|
"step": 1335 |
|
}, |
|
{ |
|
"epoch": 0.30751577739529545, |
|
"grad_norm": 0.1285424679517746, |
|
"learning_rate": 0.00017489601449886547, |
|
"loss": 0.9533, |
|
"step": 1340 |
|
}, |
|
{ |
|
"epoch": 0.30866322432587495, |
|
"grad_norm": 0.13624240458011627, |
|
"learning_rate": 0.00017462996323178235, |
|
"loss": 0.93, |
|
"step": 1345 |
|
}, |
|
{ |
|
"epoch": 0.3098106712564544, |
|
"grad_norm": 0.13076668977737427, |
|
"learning_rate": 0.00017436271423528206, |
|
"loss": 0.9547, |
|
"step": 1350 |
|
}, |
|
{ |
|
"epoch": 0.31095811818703384, |
|
"grad_norm": 0.12973956763744354, |
|
"learning_rate": 0.0001740942717984192, |
|
"loss": 1.0114, |
|
"step": 1355 |
|
}, |
|
{ |
|
"epoch": 0.31210556511761334, |
|
"grad_norm": 0.13649657368659973, |
|
"learning_rate": 0.00017382464022940182, |
|
"loss": 0.9735, |
|
"step": 1360 |
|
}, |
|
{ |
|
"epoch": 0.3132530120481928, |
|
"grad_norm": 0.13023148477077484, |
|
"learning_rate": 0.00017355382385552206, |
|
"loss": 0.9856, |
|
"step": 1365 |
|
}, |
|
{ |
|
"epoch": 0.3144004589787722, |
|
"grad_norm": 0.1284298449754715, |
|
"learning_rate": 0.0001732818270230871, |
|
"loss": 0.966, |
|
"step": 1370 |
|
}, |
|
{ |
|
"epoch": 0.31554790590935167, |
|
"grad_norm": 0.1332443803548813, |
|
"learning_rate": 0.000173008654097349, |
|
"loss": 0.9429, |
|
"step": 1375 |
|
}, |
|
{ |
|
"epoch": 0.31669535283993117, |
|
"grad_norm": 0.13395999372005463, |
|
"learning_rate": 0.000172734309462435, |
|
"loss": 0.9748, |
|
"step": 1380 |
|
}, |
|
{ |
|
"epoch": 0.3178427997705106, |
|
"grad_norm": 0.12902694940567017, |
|
"learning_rate": 0.00017245879752127692, |
|
"loss": 0.9386, |
|
"step": 1385 |
|
}, |
|
{ |
|
"epoch": 0.31899024670109005, |
|
"grad_norm": 0.12902621924877167, |
|
"learning_rate": 0.0001721821226955405, |
|
"loss": 0.9216, |
|
"step": 1390 |
|
}, |
|
{ |
|
"epoch": 0.32013769363166955, |
|
"grad_norm": 0.1328728049993515, |
|
"learning_rate": 0.00017190428942555463, |
|
"loss": 0.943, |
|
"step": 1395 |
|
}, |
|
{ |
|
"epoch": 0.321285140562249, |
|
"grad_norm": 0.12963657081127167, |
|
"learning_rate": 0.0001716253021702399, |
|
"loss": 0.9254, |
|
"step": 1400 |
|
}, |
|
{ |
|
"epoch": 0.32243258749282844, |
|
"grad_norm": 0.12427925318479538, |
|
"learning_rate": 0.0001713451654070371, |
|
"loss": 0.9665, |
|
"step": 1405 |
|
}, |
|
{ |
|
"epoch": 0.32358003442340794, |
|
"grad_norm": 0.12566518783569336, |
|
"learning_rate": 0.0001710638836318354, |
|
"loss": 0.9778, |
|
"step": 1410 |
|
}, |
|
{ |
|
"epoch": 0.3247274813539874, |
|
"grad_norm": 0.1280212700366974, |
|
"learning_rate": 0.00017078146135890014, |
|
"loss": 0.9761, |
|
"step": 1415 |
|
}, |
|
{ |
|
"epoch": 0.3258749282845668, |
|
"grad_norm": 0.14105786383152008, |
|
"learning_rate": 0.0001704979031208004, |
|
"loss": 0.9658, |
|
"step": 1420 |
|
}, |
|
{ |
|
"epoch": 0.3270223752151463, |
|
"grad_norm": 0.1319873183965683, |
|
"learning_rate": 0.0001702132134683363, |
|
"loss": 0.96, |
|
"step": 1425 |
|
}, |
|
{ |
|
"epoch": 0.32816982214572576, |
|
"grad_norm": 0.12675578892230988, |
|
"learning_rate": 0.00016992739697046586, |
|
"loss": 0.9478, |
|
"step": 1430 |
|
}, |
|
{ |
|
"epoch": 0.3293172690763052, |
|
"grad_norm": 0.1259857565164566, |
|
"learning_rate": 0.00016964045821423178, |
|
"loss": 0.9258, |
|
"step": 1435 |
|
}, |
|
{ |
|
"epoch": 0.3304647160068847, |
|
"grad_norm": 0.1366124451160431, |
|
"learning_rate": 0.00016935240180468775, |
|
"loss": 0.9556, |
|
"step": 1440 |
|
}, |
|
{ |
|
"epoch": 0.33161216293746415, |
|
"grad_norm": 0.13446134328842163, |
|
"learning_rate": 0.00016906323236482465, |
|
"loss": 0.9675, |
|
"step": 1445 |
|
}, |
|
{ |
|
"epoch": 0.3327596098680436, |
|
"grad_norm": 0.13119171559810638, |
|
"learning_rate": 0.00016877295453549614, |
|
"loss": 0.9208, |
|
"step": 1450 |
|
}, |
|
{ |
|
"epoch": 0.33390705679862304, |
|
"grad_norm": 0.13426977396011353, |
|
"learning_rate": 0.00016848157297534453, |
|
"loss": 0.9755, |
|
"step": 1455 |
|
}, |
|
{ |
|
"epoch": 0.33505450372920254, |
|
"grad_norm": 0.1286066770553589, |
|
"learning_rate": 0.0001681890923607256, |
|
"loss": 0.9624, |
|
"step": 1460 |
|
}, |
|
{ |
|
"epoch": 0.336201950659782, |
|
"grad_norm": 0.1409013718366623, |
|
"learning_rate": 0.00016789551738563384, |
|
"loss": 0.9616, |
|
"step": 1465 |
|
}, |
|
{ |
|
"epoch": 0.3373493975903614, |
|
"grad_norm": 0.1301148682832718, |
|
"learning_rate": 0.00016760085276162708, |
|
"loss": 0.9543, |
|
"step": 1470 |
|
}, |
|
{ |
|
"epoch": 0.3384968445209409, |
|
"grad_norm": 0.12759418785572052, |
|
"learning_rate": 0.00016730510321775075, |
|
"loss": 0.9395, |
|
"step": 1475 |
|
}, |
|
{ |
|
"epoch": 0.33964429145152036, |
|
"grad_norm": 0.12628066539764404, |
|
"learning_rate": 0.00016700827350046206, |
|
"loss": 0.951, |
|
"step": 1480 |
|
}, |
|
{ |
|
"epoch": 0.3407917383820998, |
|
"grad_norm": 0.13049326837062836, |
|
"learning_rate": 0.00016671036837355386, |
|
"loss": 0.9897, |
|
"step": 1485 |
|
}, |
|
{ |
|
"epoch": 0.3419391853126793, |
|
"grad_norm": 0.13148042559623718, |
|
"learning_rate": 0.00016641139261807818, |
|
"loss": 0.9477, |
|
"step": 1490 |
|
}, |
|
{ |
|
"epoch": 0.34308663224325875, |
|
"grad_norm": 0.1383514553308487, |
|
"learning_rate": 0.00016611135103226937, |
|
"loss": 0.9836, |
|
"step": 1495 |
|
}, |
|
{ |
|
"epoch": 0.3442340791738382, |
|
"grad_norm": 0.13166822493076324, |
|
"learning_rate": 0.00016581024843146725, |
|
"loss": 0.9857, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 0.3453815261044177, |
|
"grad_norm": 0.1284855753183365, |
|
"learning_rate": 0.00016550808964803978, |
|
"loss": 0.9568, |
|
"step": 1505 |
|
}, |
|
{ |
|
"epoch": 0.34652897303499713, |
|
"grad_norm": 0.13784931600093842, |
|
"learning_rate": 0.00016520487953130552, |
|
"loss": 0.9829, |
|
"step": 1510 |
|
}, |
|
{ |
|
"epoch": 0.3476764199655766, |
|
"grad_norm": 0.1401350200176239, |
|
"learning_rate": 0.00016490062294745571, |
|
"loss": 0.9468, |
|
"step": 1515 |
|
}, |
|
{ |
|
"epoch": 0.3488238668961561, |
|
"grad_norm": 0.13035733997821808, |
|
"learning_rate": 0.00016459532477947634, |
|
"loss": 0.9681, |
|
"step": 1520 |
|
}, |
|
{ |
|
"epoch": 0.3499713138267355, |
|
"grad_norm": 0.12921980023384094, |
|
"learning_rate": 0.00016428898992706955, |
|
"loss": 0.9839, |
|
"step": 1525 |
|
}, |
|
{ |
|
"epoch": 0.35111876075731496, |
|
"grad_norm": 0.12521222233772278, |
|
"learning_rate": 0.00016398162330657533, |
|
"loss": 0.97, |
|
"step": 1530 |
|
}, |
|
{ |
|
"epoch": 0.35226620768789446, |
|
"grad_norm": 0.13276994228363037, |
|
"learning_rate": 0.0001636732298508922, |
|
"loss": 0.9453, |
|
"step": 1535 |
|
}, |
|
{ |
|
"epoch": 0.3534136546184739, |
|
"grad_norm": 0.1314193159341812, |
|
"learning_rate": 0.0001633638145093984, |
|
"loss": 0.9544, |
|
"step": 1540 |
|
}, |
|
{ |
|
"epoch": 0.35456110154905335, |
|
"grad_norm": 0.13540154695510864, |
|
"learning_rate": 0.00016305338224787235, |
|
"loss": 0.9958, |
|
"step": 1545 |
|
}, |
|
{ |
|
"epoch": 0.3557085484796328, |
|
"grad_norm": 0.13043038547039032, |
|
"learning_rate": 0.0001627419380484128, |
|
"loss": 0.9166, |
|
"step": 1550 |
|
}, |
|
{ |
|
"epoch": 0.3568559954102123, |
|
"grad_norm": 0.13303428888320923, |
|
"learning_rate": 0.00016242948690935912, |
|
"loss": 0.9615, |
|
"step": 1555 |
|
}, |
|
{ |
|
"epoch": 0.35800344234079173, |
|
"grad_norm": 0.13199162483215332, |
|
"learning_rate": 0.00016211603384521083, |
|
"loss": 0.9632, |
|
"step": 1560 |
|
}, |
|
{ |
|
"epoch": 0.3591508892713712, |
|
"grad_norm": 0.1341785341501236, |
|
"learning_rate": 0.00016180158388654742, |
|
"loss": 0.9744, |
|
"step": 1565 |
|
}, |
|
{ |
|
"epoch": 0.3602983362019507, |
|
"grad_norm": 0.13043268024921417, |
|
"learning_rate": 0.00016148614207994735, |
|
"loss": 0.9494, |
|
"step": 1570 |
|
}, |
|
{ |
|
"epoch": 0.3614457831325301, |
|
"grad_norm": 0.13018083572387695, |
|
"learning_rate": 0.00016116971348790712, |
|
"loss": 0.9418, |
|
"step": 1575 |
|
}, |
|
{ |
|
"epoch": 0.36259323006310956, |
|
"grad_norm": 0.1259637027978897, |
|
"learning_rate": 0.0001608523031887601, |
|
"loss": 0.9399, |
|
"step": 1580 |
|
}, |
|
{ |
|
"epoch": 0.36374067699368906, |
|
"grad_norm": 0.13221842050552368, |
|
"learning_rate": 0.00016053391627659505, |
|
"loss": 0.9549, |
|
"step": 1585 |
|
}, |
|
{ |
|
"epoch": 0.3648881239242685, |
|
"grad_norm": 0.14202679693698883, |
|
"learning_rate": 0.0001602145578611742, |
|
"loss": 0.9279, |
|
"step": 1590 |
|
}, |
|
{ |
|
"epoch": 0.36603557085484795, |
|
"grad_norm": 0.13095086812973022, |
|
"learning_rate": 0.00015989423306785142, |
|
"loss": 0.9788, |
|
"step": 1595 |
|
}, |
|
{ |
|
"epoch": 0.36718301778542745, |
|
"grad_norm": 0.12878695130348206, |
|
"learning_rate": 0.00015957294703748982, |
|
"loss": 0.9564, |
|
"step": 1600 |
|
}, |
|
{ |
|
"epoch": 0.3683304647160069, |
|
"grad_norm": 0.12988603115081787, |
|
"learning_rate": 0.00015925070492637944, |
|
"loss": 0.9672, |
|
"step": 1605 |
|
}, |
|
{ |
|
"epoch": 0.36947791164658633, |
|
"grad_norm": 0.13579297065734863, |
|
"learning_rate": 0.0001589275119061542, |
|
"loss": 0.9381, |
|
"step": 1610 |
|
}, |
|
{ |
|
"epoch": 0.37062535857716583, |
|
"grad_norm": 0.12680459022521973, |
|
"learning_rate": 0.00015860337316370916, |
|
"loss": 0.9512, |
|
"step": 1615 |
|
}, |
|
{ |
|
"epoch": 0.3717728055077453, |
|
"grad_norm": 0.12983225286006927, |
|
"learning_rate": 0.0001582782939011173, |
|
"loss": 0.9539, |
|
"step": 1620 |
|
}, |
|
{ |
|
"epoch": 0.3729202524383247, |
|
"grad_norm": 0.13091208040714264, |
|
"learning_rate": 0.00015795227933554568, |
|
"loss": 0.9675, |
|
"step": 1625 |
|
}, |
|
{ |
|
"epoch": 0.37406769936890416, |
|
"grad_norm": 0.13072653114795685, |
|
"learning_rate": 0.00015762533469917216, |
|
"loss": 0.9327, |
|
"step": 1630 |
|
}, |
|
{ |
|
"epoch": 0.37521514629948366, |
|
"grad_norm": 0.13025379180908203, |
|
"learning_rate": 0.00015729746523910113, |
|
"loss": 0.9623, |
|
"step": 1635 |
|
}, |
|
{ |
|
"epoch": 0.3763625932300631, |
|
"grad_norm": 0.14888666570186615, |
|
"learning_rate": 0.00015696867621727942, |
|
"loss": 0.9436, |
|
"step": 1640 |
|
}, |
|
{ |
|
"epoch": 0.37751004016064255, |
|
"grad_norm": 0.1301380842924118, |
|
"learning_rate": 0.00015663897291041175, |
|
"loss": 0.9613, |
|
"step": 1645 |
|
}, |
|
{ |
|
"epoch": 0.37865748709122204, |
|
"grad_norm": 0.12864987552165985, |
|
"learning_rate": 0.00015630836060987624, |
|
"loss": 0.9701, |
|
"step": 1650 |
|
}, |
|
{ |
|
"epoch": 0.3798049340218015, |
|
"grad_norm": 0.13158966600894928, |
|
"learning_rate": 0.00015597684462163923, |
|
"loss": 1.0041, |
|
"step": 1655 |
|
}, |
|
{ |
|
"epoch": 0.38095238095238093, |
|
"grad_norm": 0.12787610292434692, |
|
"learning_rate": 0.0001556444302661704, |
|
"loss": 0.942, |
|
"step": 1660 |
|
}, |
|
{ |
|
"epoch": 0.38209982788296043, |
|
"grad_norm": 0.1329614371061325, |
|
"learning_rate": 0.00015531112287835717, |
|
"loss": 0.9607, |
|
"step": 1665 |
|
}, |
|
{ |
|
"epoch": 0.3832472748135399, |
|
"grad_norm": 0.13437345623970032, |
|
"learning_rate": 0.00015497692780741908, |
|
"loss": 0.9657, |
|
"step": 1670 |
|
}, |
|
{ |
|
"epoch": 0.3843947217441193, |
|
"grad_norm": 0.13434943556785583, |
|
"learning_rate": 0.0001546418504168222, |
|
"loss": 0.9635, |
|
"step": 1675 |
|
}, |
|
{ |
|
"epoch": 0.3855421686746988, |
|
"grad_norm": 0.129192054271698, |
|
"learning_rate": 0.00015430589608419264, |
|
"loss": 0.9527, |
|
"step": 1680 |
|
}, |
|
{ |
|
"epoch": 0.38668961560527826, |
|
"grad_norm": 0.1313447207212448, |
|
"learning_rate": 0.00015396907020123068, |
|
"loss": 0.942, |
|
"step": 1685 |
|
}, |
|
{ |
|
"epoch": 0.3878370625358577, |
|
"grad_norm": 0.13218559324741364, |
|
"learning_rate": 0.00015363137817362392, |
|
"loss": 0.9602, |
|
"step": 1690 |
|
}, |
|
{ |
|
"epoch": 0.3889845094664372, |
|
"grad_norm": 0.14416509866714478, |
|
"learning_rate": 0.00015329282542096064, |
|
"loss": 0.9419, |
|
"step": 1695 |
|
}, |
|
{ |
|
"epoch": 0.39013195639701664, |
|
"grad_norm": 0.14515794813632965, |
|
"learning_rate": 0.00015295341737664285, |
|
"loss": 0.9381, |
|
"step": 1700 |
|
}, |
|
{ |
|
"epoch": 0.3912794033275961, |
|
"grad_norm": 0.13562296330928802, |
|
"learning_rate": 0.000152613159487799, |
|
"loss": 0.9594, |
|
"step": 1705 |
|
}, |
|
{ |
|
"epoch": 0.3924268502581756, |
|
"grad_norm": 0.13049104809761047, |
|
"learning_rate": 0.00015227205721519675, |
|
"loss": 0.9472, |
|
"step": 1710 |
|
}, |
|
{ |
|
"epoch": 0.39357429718875503, |
|
"grad_norm": 0.13842852413654327, |
|
"learning_rate": 0.00015193011603315503, |
|
"loss": 0.995, |
|
"step": 1715 |
|
}, |
|
{ |
|
"epoch": 0.3947217441193345, |
|
"grad_norm": 0.14135673642158508, |
|
"learning_rate": 0.00015158734142945644, |
|
"loss": 0.9992, |
|
"step": 1720 |
|
}, |
|
{ |
|
"epoch": 0.3958691910499139, |
|
"grad_norm": 0.13362735509872437, |
|
"learning_rate": 0.0001512437389052591, |
|
"loss": 0.9279, |
|
"step": 1725 |
|
}, |
|
{ |
|
"epoch": 0.3970166379804934, |
|
"grad_norm": 0.1268196702003479, |
|
"learning_rate": 0.0001508993139750083, |
|
"loss": 0.9369, |
|
"step": 1730 |
|
}, |
|
{ |
|
"epoch": 0.39816408491107286, |
|
"grad_norm": 0.12576265633106232, |
|
"learning_rate": 0.0001505540721663481, |
|
"loss": 0.9777, |
|
"step": 1735 |
|
}, |
|
{ |
|
"epoch": 0.3993115318416523, |
|
"grad_norm": 0.1377476304769516, |
|
"learning_rate": 0.0001502080190200325, |
|
"loss": 0.96, |
|
"step": 1740 |
|
}, |
|
{ |
|
"epoch": 0.4004589787722318, |
|
"grad_norm": 0.12954863905906677, |
|
"learning_rate": 0.00014986116008983664, |
|
"loss": 0.9612, |
|
"step": 1745 |
|
}, |
|
{ |
|
"epoch": 0.40160642570281124, |
|
"grad_norm": 0.13743706047534943, |
|
"learning_rate": 0.00014951350094246762, |
|
"loss": 0.977, |
|
"step": 1750 |
|
}, |
|
{ |
|
"epoch": 0.4027538726333907, |
|
"grad_norm": 0.1272636353969574, |
|
"learning_rate": 0.0001491650471574751, |
|
"loss": 0.9603, |
|
"step": 1755 |
|
}, |
|
{ |
|
"epoch": 0.4039013195639702, |
|
"grad_norm": 0.13247370719909668, |
|
"learning_rate": 0.00014881580432716182, |
|
"loss": 0.9251, |
|
"step": 1760 |
|
}, |
|
{ |
|
"epoch": 0.40504876649454963, |
|
"grad_norm": 0.13983896374702454, |
|
"learning_rate": 0.00014846577805649388, |
|
"loss": 0.9556, |
|
"step": 1765 |
|
}, |
|
{ |
|
"epoch": 0.40619621342512907, |
|
"grad_norm": 0.1289101541042328, |
|
"learning_rate": 0.00014811497396301072, |
|
"loss": 0.9391, |
|
"step": 1770 |
|
}, |
|
{ |
|
"epoch": 0.40734366035570857, |
|
"grad_norm": 0.12569357454776764, |
|
"learning_rate": 0.00014776339767673491, |
|
"loss": 0.9301, |
|
"step": 1775 |
|
}, |
|
{ |
|
"epoch": 0.408491107286288, |
|
"grad_norm": 0.1362708956003189, |
|
"learning_rate": 0.000147411054840082, |
|
"loss": 0.9802, |
|
"step": 1780 |
|
}, |
|
{ |
|
"epoch": 0.40963855421686746, |
|
"grad_norm": 0.13561737537384033, |
|
"learning_rate": 0.00014705795110776974, |
|
"loss": 0.9624, |
|
"step": 1785 |
|
}, |
|
{ |
|
"epoch": 0.41078600114744696, |
|
"grad_norm": 0.13607259094715118, |
|
"learning_rate": 0.0001467040921467275, |
|
"loss": 0.9543, |
|
"step": 1790 |
|
}, |
|
{ |
|
"epoch": 0.4119334480780264, |
|
"grad_norm": 0.13344360888004303, |
|
"learning_rate": 0.00014634948363600518, |
|
"loss": 0.959, |
|
"step": 1795 |
|
}, |
|
{ |
|
"epoch": 0.41308089500860584, |
|
"grad_norm": 0.14161522686481476, |
|
"learning_rate": 0.00014599413126668213, |
|
"loss": 0.9432, |
|
"step": 1800 |
|
}, |
|
{ |
|
"epoch": 0.4142283419391853, |
|
"grad_norm": 0.13088949024677277, |
|
"learning_rate": 0.00014563804074177588, |
|
"loss": 0.9647, |
|
"step": 1805 |
|
}, |
|
{ |
|
"epoch": 0.4153757888697648, |
|
"grad_norm": 0.13591496646404266, |
|
"learning_rate": 0.00014528121777615058, |
|
"loss": 0.9531, |
|
"step": 1810 |
|
}, |
|
{ |
|
"epoch": 0.4165232358003442, |
|
"grad_norm": 0.1339530646800995, |
|
"learning_rate": 0.0001449236680964251, |
|
"loss": 0.9549, |
|
"step": 1815 |
|
}, |
|
{ |
|
"epoch": 0.41767068273092367, |
|
"grad_norm": 0.12666349112987518, |
|
"learning_rate": 0.00014456539744088138, |
|
"loss": 0.9424, |
|
"step": 1820 |
|
}, |
|
{ |
|
"epoch": 0.41881812966150317, |
|
"grad_norm": 0.12611252069473267, |
|
"learning_rate": 0.00014420641155937224, |
|
"loss": 0.9359, |
|
"step": 1825 |
|
}, |
|
{ |
|
"epoch": 0.4199655765920826, |
|
"grad_norm": 0.13051684200763702, |
|
"learning_rate": 0.00014384671621322915, |
|
"loss": 0.9488, |
|
"step": 1830 |
|
}, |
|
{ |
|
"epoch": 0.42111302352266206, |
|
"grad_norm": 0.13898026943206787, |
|
"learning_rate": 0.00014348631717516953, |
|
"loss": 0.9748, |
|
"step": 1835 |
|
}, |
|
{ |
|
"epoch": 0.42226047045324155, |
|
"grad_norm": 0.13012805581092834, |
|
"learning_rate": 0.00014312522022920444, |
|
"loss": 0.9631, |
|
"step": 1840 |
|
}, |
|
{ |
|
"epoch": 0.423407917383821, |
|
"grad_norm": 0.12887822091579437, |
|
"learning_rate": 0.00014276343117054563, |
|
"loss": 0.9532, |
|
"step": 1845 |
|
}, |
|
{ |
|
"epoch": 0.42455536431440044, |
|
"grad_norm": 0.14254815876483917, |
|
"learning_rate": 0.00014240095580551234, |
|
"loss": 1.0004, |
|
"step": 1850 |
|
}, |
|
{ |
|
"epoch": 0.42570281124497994, |
|
"grad_norm": 0.13232548534870148, |
|
"learning_rate": 0.0001420377999514384, |
|
"loss": 0.9644, |
|
"step": 1855 |
|
}, |
|
{ |
|
"epoch": 0.4268502581755594, |
|
"grad_norm": 0.13256525993347168, |
|
"learning_rate": 0.00014167396943657874, |
|
"loss": 0.9671, |
|
"step": 1860 |
|
}, |
|
{ |
|
"epoch": 0.4279977051061388, |
|
"grad_norm": 0.13868069648742676, |
|
"learning_rate": 0.0001413094701000158, |
|
"loss": 0.924, |
|
"step": 1865 |
|
}, |
|
{ |
|
"epoch": 0.4291451520367183, |
|
"grad_norm": 0.13542011380195618, |
|
"learning_rate": 0.00014094430779156597, |
|
"loss": 0.9751, |
|
"step": 1870 |
|
}, |
|
{ |
|
"epoch": 0.43029259896729777, |
|
"grad_norm": 0.15172503888607025, |
|
"learning_rate": 0.00014057848837168547, |
|
"loss": 0.9811, |
|
"step": 1875 |
|
}, |
|
{ |
|
"epoch": 0.4314400458978772, |
|
"grad_norm": 0.13170887529850006, |
|
"learning_rate": 0.00014021201771137663, |
|
"loss": 0.9598, |
|
"step": 1880 |
|
}, |
|
{ |
|
"epoch": 0.4325874928284567, |
|
"grad_norm": 0.13413281738758087, |
|
"learning_rate": 0.00013984490169209333, |
|
"loss": 0.9251, |
|
"step": 1885 |
|
}, |
|
{ |
|
"epoch": 0.43373493975903615, |
|
"grad_norm": 0.13315904140472412, |
|
"learning_rate": 0.00013947714620564678, |
|
"loss": 0.9983, |
|
"step": 1890 |
|
}, |
|
{ |
|
"epoch": 0.4348823866896156, |
|
"grad_norm": 0.131989523768425, |
|
"learning_rate": 0.00013910875715411098, |
|
"loss": 0.9411, |
|
"step": 1895 |
|
}, |
|
{ |
|
"epoch": 0.43602983362019504, |
|
"grad_norm": 0.1380765587091446, |
|
"learning_rate": 0.000138739740449728, |
|
"loss": 0.9292, |
|
"step": 1900 |
|
}, |
|
{ |
|
"epoch": 0.43717728055077454, |
|
"grad_norm": 0.13661834597587585, |
|
"learning_rate": 0.0001383701020148129, |
|
"loss": 0.9477, |
|
"step": 1905 |
|
}, |
|
{ |
|
"epoch": 0.438324727481354, |
|
"grad_norm": 0.13486482203006744, |
|
"learning_rate": 0.000137999847781659, |
|
"loss": 1.0169, |
|
"step": 1910 |
|
}, |
|
{ |
|
"epoch": 0.4394721744119334, |
|
"grad_norm": 0.13214510679244995, |
|
"learning_rate": 0.00013762898369244238, |
|
"loss": 0.9418, |
|
"step": 1915 |
|
}, |
|
{ |
|
"epoch": 0.4406196213425129, |
|
"grad_norm": 0.12652263045310974, |
|
"learning_rate": 0.00013725751569912682, |
|
"loss": 0.9679, |
|
"step": 1920 |
|
}, |
|
{ |
|
"epoch": 0.44176706827309237, |
|
"grad_norm": 0.13453719019889832, |
|
"learning_rate": 0.00013688544976336783, |
|
"loss": 0.9318, |
|
"step": 1925 |
|
}, |
|
{ |
|
"epoch": 0.4429145152036718, |
|
"grad_norm": 0.14246058464050293, |
|
"learning_rate": 0.00013651279185641752, |
|
"loss": 0.9544, |
|
"step": 1930 |
|
}, |
|
{ |
|
"epoch": 0.4440619621342513, |
|
"grad_norm": 0.13308945298194885, |
|
"learning_rate": 0.0001361395479590283, |
|
"loss": 0.9817, |
|
"step": 1935 |
|
}, |
|
{ |
|
"epoch": 0.44520940906483075, |
|
"grad_norm": 0.12958864867687225, |
|
"learning_rate": 0.00013576572406135722, |
|
"loss": 0.9555, |
|
"step": 1940 |
|
}, |
|
{ |
|
"epoch": 0.4463568559954102, |
|
"grad_norm": 0.1342882663011551, |
|
"learning_rate": 0.00013539132616286956, |
|
"loss": 0.9723, |
|
"step": 1945 |
|
}, |
|
{ |
|
"epoch": 0.4475043029259897, |
|
"grad_norm": 0.12933650612831116, |
|
"learning_rate": 0.00013501636027224282, |
|
"loss": 0.9516, |
|
"step": 1950 |
|
}, |
|
{ |
|
"epoch": 0.44865174985656914, |
|
"grad_norm": 0.1319791078567505, |
|
"learning_rate": 0.0001346408324072701, |
|
"loss": 0.9619, |
|
"step": 1955 |
|
}, |
|
{ |
|
"epoch": 0.4497991967871486, |
|
"grad_norm": 0.13945066928863525, |
|
"learning_rate": 0.0001342647485947635, |
|
"loss": 0.9109, |
|
"step": 1960 |
|
}, |
|
{ |
|
"epoch": 0.4509466437177281, |
|
"grad_norm": 0.13085277378559113, |
|
"learning_rate": 0.00013388811487045766, |
|
"loss": 0.968, |
|
"step": 1965 |
|
}, |
|
{ |
|
"epoch": 0.4520940906483075, |
|
"grad_norm": 0.12879082560539246, |
|
"learning_rate": 0.0001335109372789125, |
|
"loss": 0.9263, |
|
"step": 1970 |
|
}, |
|
{ |
|
"epoch": 0.45324153757888697, |
|
"grad_norm": 0.13467159867286682, |
|
"learning_rate": 0.00013313322187341652, |
|
"loss": 0.9529, |
|
"step": 1975 |
|
}, |
|
{ |
|
"epoch": 0.45438898450946646, |
|
"grad_norm": 0.12812353670597076, |
|
"learning_rate": 0.00013275497471588953, |
|
"loss": 0.9455, |
|
"step": 1980 |
|
}, |
|
{ |
|
"epoch": 0.4555364314400459, |
|
"grad_norm": 0.1305771768093109, |
|
"learning_rate": 0.0001323762018767854, |
|
"loss": 0.9627, |
|
"step": 1985 |
|
}, |
|
{ |
|
"epoch": 0.45668387837062535, |
|
"grad_norm": 0.14161469042301178, |
|
"learning_rate": 0.00013199690943499457, |
|
"loss": 0.9981, |
|
"step": 1990 |
|
}, |
|
{ |
|
"epoch": 0.4578313253012048, |
|
"grad_norm": 0.13396026194095612, |
|
"learning_rate": 0.0001316171034777466, |
|
"loss": 0.9786, |
|
"step": 1995 |
|
}, |
|
{ |
|
"epoch": 0.4589787722317843, |
|
"grad_norm": 0.18823152780532837, |
|
"learning_rate": 0.00013123679010051232, |
|
"loss": 0.9294, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 0.46012621916236374, |
|
"grad_norm": 0.137470543384552, |
|
"learning_rate": 0.00013085597540690618, |
|
"loss": 0.9393, |
|
"step": 2005 |
|
}, |
|
{ |
|
"epoch": 0.4612736660929432, |
|
"grad_norm": 0.1302119791507721, |
|
"learning_rate": 0.00013047466550858812, |
|
"loss": 0.9666, |
|
"step": 2010 |
|
}, |
|
{ |
|
"epoch": 0.4624211130235227, |
|
"grad_norm": 0.13366107642650604, |
|
"learning_rate": 0.00013009286652516575, |
|
"loss": 0.9243, |
|
"step": 2015 |
|
}, |
|
{ |
|
"epoch": 0.4635685599541021, |
|
"grad_norm": 0.14005546271800995, |
|
"learning_rate": 0.00012971058458409576, |
|
"loss": 0.9415, |
|
"step": 2020 |
|
}, |
|
{ |
|
"epoch": 0.46471600688468157, |
|
"grad_norm": 0.1359815001487732, |
|
"learning_rate": 0.00012932782582058584, |
|
"loss": 0.9681, |
|
"step": 2025 |
|
}, |
|
{ |
|
"epoch": 0.46586345381526106, |
|
"grad_norm": 0.12984395027160645, |
|
"learning_rate": 0.00012894459637749627, |
|
"loss": 0.9475, |
|
"step": 2030 |
|
}, |
|
{ |
|
"epoch": 0.4670109007458405, |
|
"grad_norm": 0.12904886901378632, |
|
"learning_rate": 0.0001285609024052411, |
|
"loss": 0.9584, |
|
"step": 2035 |
|
}, |
|
{ |
|
"epoch": 0.46815834767641995, |
|
"grad_norm": 0.13644804060459137, |
|
"learning_rate": 0.00012817675006168963, |
|
"loss": 0.9757, |
|
"step": 2040 |
|
}, |
|
{ |
|
"epoch": 0.46930579460699945, |
|
"grad_norm": 0.1333729475736618, |
|
"learning_rate": 0.00012779214551206746, |
|
"loss": 0.9733, |
|
"step": 2045 |
|
}, |
|
{ |
|
"epoch": 0.4704532415375789, |
|
"grad_norm": 0.128279909491539, |
|
"learning_rate": 0.0001274070949288577, |
|
"loss": 0.9772, |
|
"step": 2050 |
|
}, |
|
{ |
|
"epoch": 0.47160068846815834, |
|
"grad_norm": 0.15798795223236084, |
|
"learning_rate": 0.00012702160449170165, |
|
"loss": 0.9499, |
|
"step": 2055 |
|
}, |
|
{ |
|
"epoch": 0.47274813539873783, |
|
"grad_norm": 0.12967512011528015, |
|
"learning_rate": 0.0001266356803873, |
|
"loss": 0.9532, |
|
"step": 2060 |
|
}, |
|
{ |
|
"epoch": 0.4738955823293173, |
|
"grad_norm": 0.1309663951396942, |
|
"learning_rate": 0.0001262493288093131, |
|
"loss": 0.9517, |
|
"step": 2065 |
|
}, |
|
{ |
|
"epoch": 0.4750430292598967, |
|
"grad_norm": 0.12817169725894928, |
|
"learning_rate": 0.000125862555958262, |
|
"loss": 0.98, |
|
"step": 2070 |
|
}, |
|
{ |
|
"epoch": 0.47619047619047616, |
|
"grad_norm": 0.1262216567993164, |
|
"learning_rate": 0.00012547536804142858, |
|
"loss": 0.9459, |
|
"step": 2075 |
|
}, |
|
{ |
|
"epoch": 0.47733792312105566, |
|
"grad_norm": 0.13227002322673798, |
|
"learning_rate": 0.0001250877712727561, |
|
"loss": 0.9733, |
|
"step": 2080 |
|
}, |
|
{ |
|
"epoch": 0.4784853700516351, |
|
"grad_norm": 0.13167516887187958, |
|
"learning_rate": 0.00012469977187274945, |
|
"loss": 0.9611, |
|
"step": 2085 |
|
}, |
|
{ |
|
"epoch": 0.47963281698221455, |
|
"grad_norm": 0.12410388141870499, |
|
"learning_rate": 0.00012431137606837538, |
|
"loss": 0.9346, |
|
"step": 2090 |
|
}, |
|
{ |
|
"epoch": 0.48078026391279405, |
|
"grad_norm": 0.13433882594108582, |
|
"learning_rate": 0.0001239225900929624, |
|
"loss": 0.9528, |
|
"step": 2095 |
|
}, |
|
{ |
|
"epoch": 0.4819277108433735, |
|
"grad_norm": 0.13042834401130676, |
|
"learning_rate": 0.00012353342018610084, |
|
"loss": 0.958, |
|
"step": 2100 |
|
}, |
|
{ |
|
"epoch": 0.48307515777395293, |
|
"grad_norm": 0.1367100179195404, |
|
"learning_rate": 0.00012314387259354282, |
|
"loss": 0.9665, |
|
"step": 2105 |
|
}, |
|
{ |
|
"epoch": 0.48422260470453243, |
|
"grad_norm": 0.13082900643348694, |
|
"learning_rate": 0.00012275395356710177, |
|
"loss": 0.9706, |
|
"step": 2110 |
|
}, |
|
{ |
|
"epoch": 0.4853700516351119, |
|
"grad_norm": 0.13011516630649567, |
|
"learning_rate": 0.0001223636693645523, |
|
"loss": 1.0014, |
|
"step": 2115 |
|
}, |
|
{ |
|
"epoch": 0.4865174985656913, |
|
"grad_norm": 0.12624427676200867, |
|
"learning_rate": 0.00012197302624952971, |
|
"loss": 0.9607, |
|
"step": 2120 |
|
}, |
|
{ |
|
"epoch": 0.4876649454962708, |
|
"grad_norm": 0.13805273175239563, |
|
"learning_rate": 0.00012158203049142947, |
|
"loss": 0.9245, |
|
"step": 2125 |
|
}, |
|
{ |
|
"epoch": 0.48881239242685026, |
|
"grad_norm": 0.125767320394516, |
|
"learning_rate": 0.00012119068836530644, |
|
"loss": 0.9598, |
|
"step": 2130 |
|
}, |
|
{ |
|
"epoch": 0.4899598393574297, |
|
"grad_norm": 0.12957511842250824, |
|
"learning_rate": 0.00012079900615177449, |
|
"loss": 0.9658, |
|
"step": 2135 |
|
}, |
|
{ |
|
"epoch": 0.4911072862880092, |
|
"grad_norm": 0.13400690257549286, |
|
"learning_rate": 0.00012040699013690543, |
|
"loss": 0.9033, |
|
"step": 2140 |
|
}, |
|
{ |
|
"epoch": 0.49225473321858865, |
|
"grad_norm": 0.1384160965681076, |
|
"learning_rate": 0.00012001464661212827, |
|
"loss": 0.9111, |
|
"step": 2145 |
|
}, |
|
{ |
|
"epoch": 0.4934021801491681, |
|
"grad_norm": 0.13281968235969543, |
|
"learning_rate": 0.0001196219818741281, |
|
"loss": 0.9509, |
|
"step": 2150 |
|
}, |
|
{ |
|
"epoch": 0.4945496270797476, |
|
"grad_norm": 0.12926004827022552, |
|
"learning_rate": 0.00011922900222474523, |
|
"loss": 0.9744, |
|
"step": 2155 |
|
}, |
|
{ |
|
"epoch": 0.49569707401032703, |
|
"grad_norm": 0.13690362870693207, |
|
"learning_rate": 0.00011883571397087387, |
|
"loss": 0.9519, |
|
"step": 2160 |
|
}, |
|
{ |
|
"epoch": 0.4968445209409065, |
|
"grad_norm": 0.13583113253116608, |
|
"learning_rate": 0.00011844212342436112, |
|
"loss": 0.9651, |
|
"step": 2165 |
|
}, |
|
{ |
|
"epoch": 0.4979919678714859, |
|
"grad_norm": 0.13219110667705536, |
|
"learning_rate": 0.00011804823690190538, |
|
"loss": 0.9724, |
|
"step": 2170 |
|
}, |
|
{ |
|
"epoch": 0.4991394148020654, |
|
"grad_norm": 0.13287144899368286, |
|
"learning_rate": 0.00011765406072495528, |
|
"loss": 0.95, |
|
"step": 2175 |
|
}, |
|
{ |
|
"epoch": 0.5002868617326449, |
|
"grad_norm": 0.12971337139606476, |
|
"learning_rate": 0.00011725960121960806, |
|
"loss": 0.933, |
|
"step": 2180 |
|
}, |
|
{ |
|
"epoch": 0.5014343086632244, |
|
"grad_norm": 0.13551461696624756, |
|
"learning_rate": 0.00011686486471650798, |
|
"loss": 0.9417, |
|
"step": 2185 |
|
}, |
|
{ |
|
"epoch": 0.5025817555938038, |
|
"grad_norm": 0.12743504345417023, |
|
"learning_rate": 0.0001164698575507449, |
|
"loss": 0.9396, |
|
"step": 2190 |
|
}, |
|
{ |
|
"epoch": 0.5037292025243832, |
|
"grad_norm": 0.13198213279247284, |
|
"learning_rate": 0.00011607458606175253, |
|
"loss": 0.9139, |
|
"step": 2195 |
|
}, |
|
{ |
|
"epoch": 0.5048766494549627, |
|
"grad_norm": 0.13129118084907532, |
|
"learning_rate": 0.00011567905659320663, |
|
"loss": 0.94, |
|
"step": 2200 |
|
}, |
|
{ |
|
"epoch": 0.5060240963855421, |
|
"grad_norm": 0.13837940990924835, |
|
"learning_rate": 0.00011528327549292326, |
|
"loss": 0.9357, |
|
"step": 2205 |
|
}, |
|
{ |
|
"epoch": 0.5071715433161216, |
|
"grad_norm": 0.13046391308307648, |
|
"learning_rate": 0.00011488724911275694, |
|
"loss": 0.9488, |
|
"step": 2210 |
|
}, |
|
{ |
|
"epoch": 0.5083189902467011, |
|
"grad_norm": 0.12679821252822876, |
|
"learning_rate": 0.00011449098380849858, |
|
"loss": 0.9627, |
|
"step": 2215 |
|
}, |
|
{ |
|
"epoch": 0.5094664371772806, |
|
"grad_norm": 0.12639085948467255, |
|
"learning_rate": 0.00011409448593977363, |
|
"loss": 0.9246, |
|
"step": 2220 |
|
}, |
|
{ |
|
"epoch": 0.51061388410786, |
|
"grad_norm": 0.13501042127609253, |
|
"learning_rate": 0.00011369776186993994, |
|
"loss": 0.9439, |
|
"step": 2225 |
|
}, |
|
{ |
|
"epoch": 0.5117613310384395, |
|
"grad_norm": 0.12938356399536133, |
|
"learning_rate": 0.0001133008179659856, |
|
"loss": 0.9551, |
|
"step": 2230 |
|
}, |
|
{ |
|
"epoch": 0.5129087779690189, |
|
"grad_norm": 0.13302691280841827, |
|
"learning_rate": 0.00011290366059842683, |
|
"loss": 0.9475, |
|
"step": 2235 |
|
}, |
|
{ |
|
"epoch": 0.5140562248995983, |
|
"grad_norm": 0.13080324232578278, |
|
"learning_rate": 0.00011250629614120571, |
|
"loss": 0.9586, |
|
"step": 2240 |
|
}, |
|
{ |
|
"epoch": 0.5152036718301779, |
|
"grad_norm": 0.12532858550548553, |
|
"learning_rate": 0.00011210873097158786, |
|
"loss": 0.9296, |
|
"step": 2245 |
|
}, |
|
{ |
|
"epoch": 0.5163511187607573, |
|
"grad_norm": 0.13357555866241455, |
|
"learning_rate": 0.00011171097147006013, |
|
"loss": 0.9692, |
|
"step": 2250 |
|
}, |
|
{ |
|
"epoch": 0.5174985656913368, |
|
"grad_norm": 0.13192251324653625, |
|
"learning_rate": 0.00011131302402022821, |
|
"loss": 0.9678, |
|
"step": 2255 |
|
}, |
|
{ |
|
"epoch": 0.5186460126219162, |
|
"grad_norm": 0.12825413048267365, |
|
"learning_rate": 0.00011091489500871408, |
|
"loss": 0.9521, |
|
"step": 2260 |
|
}, |
|
{ |
|
"epoch": 0.5197934595524957, |
|
"grad_norm": 0.1338176429271698, |
|
"learning_rate": 0.00011051659082505366, |
|
"loss": 0.9664, |
|
"step": 2265 |
|
}, |
|
{ |
|
"epoch": 0.5209409064830751, |
|
"grad_norm": 0.13753551244735718, |
|
"learning_rate": 0.00011011811786159416, |
|
"loss": 0.9608, |
|
"step": 2270 |
|
}, |
|
{ |
|
"epoch": 0.5220883534136547, |
|
"grad_norm": 0.1290527880191803, |
|
"learning_rate": 0.00010971948251339157, |
|
"loss": 0.9334, |
|
"step": 2275 |
|
}, |
|
{ |
|
"epoch": 0.5232358003442341, |
|
"grad_norm": 0.13250134885311127, |
|
"learning_rate": 0.00010932069117810787, |
|
"loss": 0.9512, |
|
"step": 2280 |
|
}, |
|
{ |
|
"epoch": 0.5243832472748136, |
|
"grad_norm": 0.13204516470432281, |
|
"learning_rate": 0.00010892175025590856, |
|
"loss": 0.9711, |
|
"step": 2285 |
|
}, |
|
{ |
|
"epoch": 0.525530694205393, |
|
"grad_norm": 0.12426961213350296, |
|
"learning_rate": 0.00010852266614935982, |
|
"loss": 0.9634, |
|
"step": 2290 |
|
}, |
|
{ |
|
"epoch": 0.5266781411359724, |
|
"grad_norm": 0.13476422429084778, |
|
"learning_rate": 0.00010812344526332578, |
|
"loss": 0.9345, |
|
"step": 2295 |
|
}, |
|
{ |
|
"epoch": 0.5278255880665519, |
|
"grad_norm": 0.1305239349603653, |
|
"learning_rate": 0.00010772409400486571, |
|
"loss": 0.973, |
|
"step": 2300 |
|
}, |
|
{ |
|
"epoch": 0.5289730349971313, |
|
"grad_norm": 0.12918445467948914, |
|
"learning_rate": 0.00010732461878313125, |
|
"loss": 0.9602, |
|
"step": 2305 |
|
}, |
|
{ |
|
"epoch": 0.5301204819277109, |
|
"grad_norm": 0.13843993842601776, |
|
"learning_rate": 0.00010692502600926348, |
|
"loss": 0.9528, |
|
"step": 2310 |
|
}, |
|
{ |
|
"epoch": 0.5312679288582903, |
|
"grad_norm": 0.1355164349079132, |
|
"learning_rate": 0.00010652532209629011, |
|
"loss": 0.9423, |
|
"step": 2315 |
|
}, |
|
{ |
|
"epoch": 0.5324153757888698, |
|
"grad_norm": 0.1288328468799591, |
|
"learning_rate": 0.00010612551345902244, |
|
"loss": 0.9223, |
|
"step": 2320 |
|
}, |
|
{ |
|
"epoch": 0.5335628227194492, |
|
"grad_norm": 0.13057994842529297, |
|
"learning_rate": 0.00010572560651395258, |
|
"loss": 0.9117, |
|
"step": 2325 |
|
}, |
|
{ |
|
"epoch": 0.5347102696500287, |
|
"grad_norm": 0.12813064455986023, |
|
"learning_rate": 0.0001053256076791503, |
|
"loss": 0.953, |
|
"step": 2330 |
|
}, |
|
{ |
|
"epoch": 0.5358577165806081, |
|
"grad_norm": 0.13439275324344635, |
|
"learning_rate": 0.00010492552337416007, |
|
"loss": 0.9552, |
|
"step": 2335 |
|
}, |
|
{ |
|
"epoch": 0.5370051635111877, |
|
"grad_norm": 0.12726064026355743, |
|
"learning_rate": 0.00010452536001989815, |
|
"loss": 0.9556, |
|
"step": 2340 |
|
}, |
|
{ |
|
"epoch": 0.5381526104417671, |
|
"grad_norm": 0.1329938769340515, |
|
"learning_rate": 0.00010412512403854942, |
|
"loss": 0.9806, |
|
"step": 2345 |
|
}, |
|
{ |
|
"epoch": 0.5393000573723465, |
|
"grad_norm": 0.13323380053043365, |
|
"learning_rate": 0.00010372482185346435, |
|
"loss": 0.9542, |
|
"step": 2350 |
|
}, |
|
{ |
|
"epoch": 0.540447504302926, |
|
"grad_norm": 0.12690098583698273, |
|
"learning_rate": 0.00010332445988905586, |
|
"loss": 0.9443, |
|
"step": 2355 |
|
}, |
|
{ |
|
"epoch": 0.5415949512335054, |
|
"grad_norm": 0.12987449765205383, |
|
"learning_rate": 0.00010292404457069631, |
|
"loss": 0.9617, |
|
"step": 2360 |
|
}, |
|
{ |
|
"epoch": 0.5427423981640849, |
|
"grad_norm": 0.12815946340560913, |
|
"learning_rate": 0.00010252358232461436, |
|
"loss": 0.9741, |
|
"step": 2365 |
|
}, |
|
{ |
|
"epoch": 0.5438898450946644, |
|
"grad_norm": 0.13437722623348236, |
|
"learning_rate": 0.00010212307957779173, |
|
"loss": 0.9798, |
|
"step": 2370 |
|
}, |
|
{ |
|
"epoch": 0.5450372920252439, |
|
"grad_norm": 0.14197693765163422, |
|
"learning_rate": 0.00010172254275786017, |
|
"loss": 0.9112, |
|
"step": 2375 |
|
}, |
|
{ |
|
"epoch": 0.5461847389558233, |
|
"grad_norm": 0.12714983522891998, |
|
"learning_rate": 0.0001013219782929983, |
|
"loss": 0.9103, |
|
"step": 2380 |
|
}, |
|
{ |
|
"epoch": 0.5473321858864028, |
|
"grad_norm": 0.13637425005435944, |
|
"learning_rate": 0.0001009213926118284, |
|
"loss": 0.9564, |
|
"step": 2385 |
|
}, |
|
{ |
|
"epoch": 0.5484796328169822, |
|
"grad_norm": 0.13670361042022705, |
|
"learning_rate": 0.00010052079214331318, |
|
"loss": 0.9295, |
|
"step": 2390 |
|
}, |
|
{ |
|
"epoch": 0.5496270797475616, |
|
"grad_norm": 0.12989582121372223, |
|
"learning_rate": 0.00010012018331665272, |
|
"loss": 0.9428, |
|
"step": 2395 |
|
}, |
|
{ |
|
"epoch": 0.5507745266781411, |
|
"grad_norm": 0.12808671593666077, |
|
"learning_rate": 9.971957256118129e-05, |
|
"loss": 0.9216, |
|
"step": 2400 |
|
}, |
|
{ |
|
"epoch": 0.5519219736087206, |
|
"grad_norm": 0.1358453780412674, |
|
"learning_rate": 9.931896630626402e-05, |
|
"loss": 0.9711, |
|
"step": 2405 |
|
}, |
|
{ |
|
"epoch": 0.5530694205393001, |
|
"grad_norm": 0.1230422705411911, |
|
"learning_rate": 9.891837098119389e-05, |
|
"loss": 0.9282, |
|
"step": 2410 |
|
}, |
|
{ |
|
"epoch": 0.5542168674698795, |
|
"grad_norm": 0.13326068222522736, |
|
"learning_rate": 9.851779301508842e-05, |
|
"loss": 0.9906, |
|
"step": 2415 |
|
}, |
|
{ |
|
"epoch": 0.555364314400459, |
|
"grad_norm": 0.12692315876483917, |
|
"learning_rate": 9.811723883678654e-05, |
|
"loss": 0.9537, |
|
"step": 2420 |
|
}, |
|
{ |
|
"epoch": 0.5565117613310384, |
|
"grad_norm": 0.13576091825962067, |
|
"learning_rate": 9.771671487474546e-05, |
|
"loss": 0.9393, |
|
"step": 2425 |
|
}, |
|
{ |
|
"epoch": 0.5576592082616179, |
|
"grad_norm": 0.12827935814857483, |
|
"learning_rate": 9.731622755693737e-05, |
|
"loss": 0.9014, |
|
"step": 2430 |
|
}, |
|
{ |
|
"epoch": 0.5588066551921974, |
|
"grad_norm": 0.1317823976278305, |
|
"learning_rate": 9.691578331074643e-05, |
|
"loss": 0.9424, |
|
"step": 2435 |
|
}, |
|
{ |
|
"epoch": 0.5599541021227769, |
|
"grad_norm": 0.13604985177516937, |
|
"learning_rate": 9.651538856286551e-05, |
|
"loss": 0.9395, |
|
"step": 2440 |
|
}, |
|
{ |
|
"epoch": 0.5611015490533563, |
|
"grad_norm": 0.1410413533449173, |
|
"learning_rate": 9.611504973919311e-05, |
|
"loss": 0.9711, |
|
"step": 2445 |
|
}, |
|
{ |
|
"epoch": 0.5622489959839357, |
|
"grad_norm": 0.1319737732410431, |
|
"learning_rate": 9.571477326473021e-05, |
|
"loss": 0.944, |
|
"step": 2450 |
|
}, |
|
{ |
|
"epoch": 0.5633964429145152, |
|
"grad_norm": 0.1291595995426178, |
|
"learning_rate": 9.531456556347712e-05, |
|
"loss": 0.9621, |
|
"step": 2455 |
|
}, |
|
{ |
|
"epoch": 0.5645438898450946, |
|
"grad_norm": 0.12568049132823944, |
|
"learning_rate": 9.491443305833043e-05, |
|
"loss": 0.923, |
|
"step": 2460 |
|
}, |
|
{ |
|
"epoch": 0.5656913367756741, |
|
"grad_norm": 0.13689230382442474, |
|
"learning_rate": 9.451438217097994e-05, |
|
"loss": 0.9335, |
|
"step": 2465 |
|
}, |
|
{ |
|
"epoch": 0.5668387837062536, |
|
"grad_norm": 0.12956008315086365, |
|
"learning_rate": 9.411441932180554e-05, |
|
"loss": 0.9757, |
|
"step": 2470 |
|
}, |
|
{ |
|
"epoch": 0.5679862306368331, |
|
"grad_norm": 0.13027647137641907, |
|
"learning_rate": 9.371455092977423e-05, |
|
"loss": 0.9225, |
|
"step": 2475 |
|
}, |
|
{ |
|
"epoch": 0.5691336775674125, |
|
"grad_norm": 0.12894025444984436, |
|
"learning_rate": 9.331478341233706e-05, |
|
"loss": 0.9508, |
|
"step": 2480 |
|
}, |
|
{ |
|
"epoch": 0.570281124497992, |
|
"grad_norm": 0.13095425069332123, |
|
"learning_rate": 9.291512318532614e-05, |
|
"loss": 0.9388, |
|
"step": 2485 |
|
}, |
|
{ |
|
"epoch": 0.5714285714285714, |
|
"grad_norm": 0.1269254833459854, |
|
"learning_rate": 9.251557666285174e-05, |
|
"loss": 0.9682, |
|
"step": 2490 |
|
}, |
|
{ |
|
"epoch": 0.5725760183591508, |
|
"grad_norm": 0.1342654824256897, |
|
"learning_rate": 9.211615025719919e-05, |
|
"loss": 0.9229, |
|
"step": 2495 |
|
}, |
|
{ |
|
"epoch": 0.5737234652897304, |
|
"grad_norm": 0.1338120847940445, |
|
"learning_rate": 9.17168503787262e-05, |
|
"loss": 0.9593, |
|
"step": 2500 |
|
}, |
|
{ |
|
"epoch": 0.5748709122203098, |
|
"grad_norm": 0.1292128562927246, |
|
"learning_rate": 9.131768343575979e-05, |
|
"loss": 0.9353, |
|
"step": 2505 |
|
}, |
|
{ |
|
"epoch": 0.5760183591508893, |
|
"grad_norm": 0.12859606742858887, |
|
"learning_rate": 9.091865583449351e-05, |
|
"loss": 0.9279, |
|
"step": 2510 |
|
}, |
|
{ |
|
"epoch": 0.5771658060814687, |
|
"grad_norm": 0.12240682542324066, |
|
"learning_rate": 9.051977397888464e-05, |
|
"loss": 0.9242, |
|
"step": 2515 |
|
}, |
|
{ |
|
"epoch": 0.5783132530120482, |
|
"grad_norm": 0.13055679202079773, |
|
"learning_rate": 9.01210442705514e-05, |
|
"loss": 0.9374, |
|
"step": 2520 |
|
}, |
|
{ |
|
"epoch": 0.5794606999426276, |
|
"grad_norm": 0.1301288604736328, |
|
"learning_rate": 8.972247310867027e-05, |
|
"loss": 0.9403, |
|
"step": 2525 |
|
}, |
|
{ |
|
"epoch": 0.5806081468732072, |
|
"grad_norm": 0.1294988989830017, |
|
"learning_rate": 8.932406688987309e-05, |
|
"loss": 0.9511, |
|
"step": 2530 |
|
}, |
|
{ |
|
"epoch": 0.5817555938037866, |
|
"grad_norm": 0.1301121711730957, |
|
"learning_rate": 8.892583200814466e-05, |
|
"loss": 0.9553, |
|
"step": 2535 |
|
}, |
|
{ |
|
"epoch": 0.582903040734366, |
|
"grad_norm": 0.12820468842983246, |
|
"learning_rate": 8.852777485471997e-05, |
|
"loss": 0.938, |
|
"step": 2540 |
|
}, |
|
{ |
|
"epoch": 0.5840504876649455, |
|
"grad_norm": 0.12704700231552124, |
|
"learning_rate": 8.81299018179817e-05, |
|
"loss": 0.9188, |
|
"step": 2545 |
|
}, |
|
{ |
|
"epoch": 0.5851979345955249, |
|
"grad_norm": 0.12919333577156067, |
|
"learning_rate": 8.773221928335759e-05, |
|
"loss": 0.9601, |
|
"step": 2550 |
|
}, |
|
{ |
|
"epoch": 0.5863453815261044, |
|
"grad_norm": 0.13542434573173523, |
|
"learning_rate": 8.73347336332181e-05, |
|
"loss": 0.9476, |
|
"step": 2555 |
|
}, |
|
{ |
|
"epoch": 0.5874928284566838, |
|
"grad_norm": 0.12929943203926086, |
|
"learning_rate": 8.693745124677386e-05, |
|
"loss": 0.9397, |
|
"step": 2560 |
|
}, |
|
{ |
|
"epoch": 0.5886402753872634, |
|
"grad_norm": 0.130234032869339, |
|
"learning_rate": 8.654037849997342e-05, |
|
"loss": 0.9313, |
|
"step": 2565 |
|
}, |
|
{ |
|
"epoch": 0.5897877223178428, |
|
"grad_norm": 0.13515686988830566, |
|
"learning_rate": 8.614352176540067e-05, |
|
"loss": 0.9503, |
|
"step": 2570 |
|
}, |
|
{ |
|
"epoch": 0.5909351692484223, |
|
"grad_norm": 0.1329420804977417, |
|
"learning_rate": 8.57468874121729e-05, |
|
"loss": 0.9606, |
|
"step": 2575 |
|
}, |
|
{ |
|
"epoch": 0.5920826161790017, |
|
"grad_norm": 0.1323956400156021, |
|
"learning_rate": 8.535048180583838e-05, |
|
"loss": 0.9512, |
|
"step": 2580 |
|
}, |
|
{ |
|
"epoch": 0.5932300631095812, |
|
"grad_norm": 0.13518303632736206, |
|
"learning_rate": 8.495431130827422e-05, |
|
"loss": 0.9377, |
|
"step": 2585 |
|
}, |
|
{ |
|
"epoch": 0.5943775100401606, |
|
"grad_norm": 0.13215835392475128, |
|
"learning_rate": 8.455838227758432e-05, |
|
"loss": 0.9984, |
|
"step": 2590 |
|
}, |
|
{ |
|
"epoch": 0.5955249569707401, |
|
"grad_norm": 0.13005711138248444, |
|
"learning_rate": 8.416270106799726e-05, |
|
"loss": 0.9556, |
|
"step": 2595 |
|
}, |
|
{ |
|
"epoch": 0.5966724039013196, |
|
"grad_norm": 0.1329335868358612, |
|
"learning_rate": 8.376727402976447e-05, |
|
"loss": 0.9351, |
|
"step": 2600 |
|
}, |
|
{ |
|
"epoch": 0.597819850831899, |
|
"grad_norm": 0.1298844963312149, |
|
"learning_rate": 8.3372107509058e-05, |
|
"loss": 0.9492, |
|
"step": 2605 |
|
}, |
|
{ |
|
"epoch": 0.5989672977624785, |
|
"grad_norm": 0.13211120665073395, |
|
"learning_rate": 8.297720784786906e-05, |
|
"loss": 0.9495, |
|
"step": 2610 |
|
}, |
|
{ |
|
"epoch": 0.6001147446930579, |
|
"grad_norm": 0.12725351750850677, |
|
"learning_rate": 8.2582581383906e-05, |
|
"loss": 0.953, |
|
"step": 2615 |
|
}, |
|
{ |
|
"epoch": 0.6012621916236374, |
|
"grad_norm": 0.12866829335689545, |
|
"learning_rate": 8.218823445049265e-05, |
|
"loss": 0.9536, |
|
"step": 2620 |
|
}, |
|
{ |
|
"epoch": 0.6024096385542169, |
|
"grad_norm": 0.1358039677143097, |
|
"learning_rate": 8.179417337646669e-05, |
|
"loss": 0.9731, |
|
"step": 2625 |
|
}, |
|
{ |
|
"epoch": 0.6035570854847964, |
|
"grad_norm": 0.13068200647830963, |
|
"learning_rate": 8.140040448607804e-05, |
|
"loss": 0.9592, |
|
"step": 2630 |
|
}, |
|
{ |
|
"epoch": 0.6047045324153758, |
|
"grad_norm": 0.1265789270401001, |
|
"learning_rate": 8.100693409888748e-05, |
|
"loss": 0.97, |
|
"step": 2635 |
|
}, |
|
{ |
|
"epoch": 0.6058519793459552, |
|
"grad_norm": 0.13472406566143036, |
|
"learning_rate": 8.061376852966495e-05, |
|
"loss": 0.9648, |
|
"step": 2640 |
|
}, |
|
{ |
|
"epoch": 0.6069994262765347, |
|
"grad_norm": 0.13039621710777283, |
|
"learning_rate": 8.02209140882886e-05, |
|
"loss": 0.926, |
|
"step": 2645 |
|
}, |
|
{ |
|
"epoch": 0.6081468732071141, |
|
"grad_norm": 0.1332240104675293, |
|
"learning_rate": 7.982837707964321e-05, |
|
"loss": 0.9637, |
|
"step": 2650 |
|
}, |
|
{ |
|
"epoch": 0.6092943201376936, |
|
"grad_norm": 0.1377446949481964, |
|
"learning_rate": 7.943616380351913e-05, |
|
"loss": 0.9556, |
|
"step": 2655 |
|
}, |
|
{ |
|
"epoch": 0.6104417670682731, |
|
"grad_norm": 0.12704016268253326, |
|
"learning_rate": 7.904428055451118e-05, |
|
"loss": 0.9323, |
|
"step": 2660 |
|
}, |
|
{ |
|
"epoch": 0.6115892139988526, |
|
"grad_norm": 0.12996916472911835, |
|
"learning_rate": 7.865273362191759e-05, |
|
"loss": 0.9376, |
|
"step": 2665 |
|
}, |
|
{ |
|
"epoch": 0.612736660929432, |
|
"grad_norm": 0.13720089197158813, |
|
"learning_rate": 7.826152928963904e-05, |
|
"loss": 0.9469, |
|
"step": 2670 |
|
}, |
|
{ |
|
"epoch": 0.6138841078600115, |
|
"grad_norm": 0.13306473195552826, |
|
"learning_rate": 7.787067383607796e-05, |
|
"loss": 0.9643, |
|
"step": 2675 |
|
}, |
|
{ |
|
"epoch": 0.6150315547905909, |
|
"grad_norm": 0.1308506578207016, |
|
"learning_rate": 7.748017353403748e-05, |
|
"loss": 0.9476, |
|
"step": 2680 |
|
}, |
|
{ |
|
"epoch": 0.6161790017211703, |
|
"grad_norm": 0.12719397246837616, |
|
"learning_rate": 7.70900346506211e-05, |
|
"loss": 0.949, |
|
"step": 2685 |
|
}, |
|
{ |
|
"epoch": 0.6173264486517499, |
|
"grad_norm": 0.12891539931297302, |
|
"learning_rate": 7.670026344713189e-05, |
|
"loss": 0.9486, |
|
"step": 2690 |
|
}, |
|
{ |
|
"epoch": 0.6184738955823293, |
|
"grad_norm": 0.12883388996124268, |
|
"learning_rate": 7.631086617897203e-05, |
|
"loss": 0.9422, |
|
"step": 2695 |
|
}, |
|
{ |
|
"epoch": 0.6196213425129088, |
|
"grad_norm": 0.128887340426445, |
|
"learning_rate": 7.592184909554245e-05, |
|
"loss": 0.9712, |
|
"step": 2700 |
|
}, |
|
{ |
|
"epoch": 0.6207687894434882, |
|
"grad_norm": 0.12608595192432404, |
|
"learning_rate": 7.553321844014258e-05, |
|
"loss": 0.9674, |
|
"step": 2705 |
|
}, |
|
{ |
|
"epoch": 0.6219162363740677, |
|
"grad_norm": 0.12851373851299286, |
|
"learning_rate": 7.514498044987009e-05, |
|
"loss": 0.9229, |
|
"step": 2710 |
|
}, |
|
{ |
|
"epoch": 0.6230636833046471, |
|
"grad_norm": 0.1275022029876709, |
|
"learning_rate": 7.475714135552074e-05, |
|
"loss": 0.9108, |
|
"step": 2715 |
|
}, |
|
{ |
|
"epoch": 0.6242111302352267, |
|
"grad_norm": 0.13182856142520905, |
|
"learning_rate": 7.43697073814885e-05, |
|
"loss": 0.9536, |
|
"step": 2720 |
|
}, |
|
{ |
|
"epoch": 0.6253585771658061, |
|
"grad_norm": 0.12938585877418518, |
|
"learning_rate": 7.39826847456656e-05, |
|
"loss": 0.9425, |
|
"step": 2725 |
|
}, |
|
{ |
|
"epoch": 0.6265060240963856, |
|
"grad_norm": 0.13789427280426025, |
|
"learning_rate": 7.359607965934274e-05, |
|
"loss": 0.9692, |
|
"step": 2730 |
|
}, |
|
{ |
|
"epoch": 0.627653471026965, |
|
"grad_norm": 0.13062715530395508, |
|
"learning_rate": 7.32098983271094e-05, |
|
"loss": 0.9383, |
|
"step": 2735 |
|
}, |
|
{ |
|
"epoch": 0.6288009179575444, |
|
"grad_norm": 0.13577692210674286, |
|
"learning_rate": 7.282414694675426e-05, |
|
"loss": 0.9281, |
|
"step": 2740 |
|
}, |
|
{ |
|
"epoch": 0.6299483648881239, |
|
"grad_norm": 0.13397711515426636, |
|
"learning_rate": 7.243883170916574e-05, |
|
"loss": 0.9881, |
|
"step": 2745 |
|
}, |
|
{ |
|
"epoch": 0.6310958118187033, |
|
"grad_norm": 0.13154800236225128, |
|
"learning_rate": 7.205395879823271e-05, |
|
"loss": 0.9107, |
|
"step": 2750 |
|
}, |
|
{ |
|
"epoch": 0.6322432587492829, |
|
"grad_norm": 0.12707136571407318, |
|
"learning_rate": 7.166953439074504e-05, |
|
"loss": 0.9728, |
|
"step": 2755 |
|
}, |
|
{ |
|
"epoch": 0.6333907056798623, |
|
"grad_norm": 0.13293033838272095, |
|
"learning_rate": 7.128556465629475e-05, |
|
"loss": 0.9652, |
|
"step": 2760 |
|
}, |
|
{ |
|
"epoch": 0.6345381526104418, |
|
"grad_norm": 0.1299448162317276, |
|
"learning_rate": 7.090205575717678e-05, |
|
"loss": 0.9455, |
|
"step": 2765 |
|
}, |
|
{ |
|
"epoch": 0.6356855995410212, |
|
"grad_norm": 0.12795931100845337, |
|
"learning_rate": 7.051901384829021e-05, |
|
"loss": 0.9289, |
|
"step": 2770 |
|
}, |
|
{ |
|
"epoch": 0.6368330464716007, |
|
"grad_norm": 0.13310518860816956, |
|
"learning_rate": 7.013644507703937e-05, |
|
"loss": 0.9203, |
|
"step": 2775 |
|
}, |
|
{ |
|
"epoch": 0.6379804934021801, |
|
"grad_norm": 0.1404452919960022, |
|
"learning_rate": 6.975435558323532e-05, |
|
"loss": 0.9364, |
|
"step": 2780 |
|
}, |
|
{ |
|
"epoch": 0.6391279403327597, |
|
"grad_norm": 0.1289965659379959, |
|
"learning_rate": 6.937275149899725e-05, |
|
"loss": 0.9313, |
|
"step": 2785 |
|
}, |
|
{ |
|
"epoch": 0.6402753872633391, |
|
"grad_norm": 0.12998373806476593, |
|
"learning_rate": 6.899163894865395e-05, |
|
"loss": 0.9575, |
|
"step": 2790 |
|
}, |
|
{ |
|
"epoch": 0.6414228341939185, |
|
"grad_norm": 0.13112759590148926, |
|
"learning_rate": 6.86110240486457e-05, |
|
"loss": 0.9648, |
|
"step": 2795 |
|
}, |
|
{ |
|
"epoch": 0.642570281124498, |
|
"grad_norm": 0.12994885444641113, |
|
"learning_rate": 6.823091290742602e-05, |
|
"loss": 0.961, |
|
"step": 2800 |
|
}, |
|
{ |
|
"epoch": 0.6437177280550774, |
|
"grad_norm": 0.1322287768125534, |
|
"learning_rate": 6.785131162536374e-05, |
|
"loss": 0.9878, |
|
"step": 2805 |
|
}, |
|
{ |
|
"epoch": 0.6448651749856569, |
|
"grad_norm": 0.12845389544963837, |
|
"learning_rate": 6.747222629464484e-05, |
|
"loss": 0.9455, |
|
"step": 2810 |
|
}, |
|
{ |
|
"epoch": 0.6460126219162364, |
|
"grad_norm": 0.13705497980117798, |
|
"learning_rate": 6.709366299917497e-05, |
|
"loss": 0.9537, |
|
"step": 2815 |
|
}, |
|
{ |
|
"epoch": 0.6471600688468159, |
|
"grad_norm": 0.1286800056695938, |
|
"learning_rate": 6.671562781448166e-05, |
|
"loss": 0.9477, |
|
"step": 2820 |
|
}, |
|
{ |
|
"epoch": 0.6483075157773953, |
|
"grad_norm": 0.13222958147525787, |
|
"learning_rate": 6.633812680761684e-05, |
|
"loss": 0.9348, |
|
"step": 2825 |
|
}, |
|
{ |
|
"epoch": 0.6494549627079748, |
|
"grad_norm": 0.12600712478160858, |
|
"learning_rate": 6.59611660370594e-05, |
|
"loss": 0.9627, |
|
"step": 2830 |
|
}, |
|
{ |
|
"epoch": 0.6506024096385542, |
|
"grad_norm": 0.130609929561615, |
|
"learning_rate": 6.558475155261811e-05, |
|
"loss": 0.9506, |
|
"step": 2835 |
|
}, |
|
{ |
|
"epoch": 0.6517498565691336, |
|
"grad_norm": 0.1283220797777176, |
|
"learning_rate": 6.52088893953344e-05, |
|
"loss": 0.9468, |
|
"step": 2840 |
|
}, |
|
{ |
|
"epoch": 0.6528973034997131, |
|
"grad_norm": 0.13281899690628052, |
|
"learning_rate": 6.48335855973855e-05, |
|
"loss": 0.95, |
|
"step": 2845 |
|
}, |
|
{ |
|
"epoch": 0.6540447504302926, |
|
"grad_norm": 0.12522880733013153, |
|
"learning_rate": 6.445884618198754e-05, |
|
"loss": 0.9691, |
|
"step": 2850 |
|
}, |
|
{ |
|
"epoch": 0.6551921973608721, |
|
"grad_norm": 0.13211198151111603, |
|
"learning_rate": 6.408467716329894e-05, |
|
"loss": 0.9636, |
|
"step": 2855 |
|
}, |
|
{ |
|
"epoch": 0.6563396442914515, |
|
"grad_norm": 0.13233083486557007, |
|
"learning_rate": 6.371108454632391e-05, |
|
"loss": 0.9484, |
|
"step": 2860 |
|
}, |
|
{ |
|
"epoch": 0.657487091222031, |
|
"grad_norm": 0.13322043418884277, |
|
"learning_rate": 6.33380743268159e-05, |
|
"loss": 0.925, |
|
"step": 2865 |
|
}, |
|
{ |
|
"epoch": 0.6586345381526104, |
|
"grad_norm": 0.1293569952249527, |
|
"learning_rate": 6.29656524911817e-05, |
|
"loss": 0.9651, |
|
"step": 2870 |
|
}, |
|
{ |
|
"epoch": 0.6597819850831899, |
|
"grad_norm": 0.15199431777000427, |
|
"learning_rate": 6.259382501638509e-05, |
|
"loss": 0.9831, |
|
"step": 2875 |
|
}, |
|
{ |
|
"epoch": 0.6609294320137694, |
|
"grad_norm": 0.1290358006954193, |
|
"learning_rate": 6.222259786985101e-05, |
|
"loss": 0.9603, |
|
"step": 2880 |
|
}, |
|
{ |
|
"epoch": 0.6620768789443489, |
|
"grad_norm": 0.1347620040178299, |
|
"learning_rate": 6.185197700936982e-05, |
|
"loss": 0.9258, |
|
"step": 2885 |
|
}, |
|
{ |
|
"epoch": 0.6632243258749283, |
|
"grad_norm": 0.13369546830654144, |
|
"learning_rate": 6.14819683830016e-05, |
|
"loss": 0.9471, |
|
"step": 2890 |
|
}, |
|
{ |
|
"epoch": 0.6643717728055077, |
|
"grad_norm": 0.1245836541056633, |
|
"learning_rate": 6.111257792898082e-05, |
|
"loss": 0.9626, |
|
"step": 2895 |
|
}, |
|
{ |
|
"epoch": 0.6655192197360872, |
|
"grad_norm": 0.1284221112728119, |
|
"learning_rate": 6.0743811575620846e-05, |
|
"loss": 0.9219, |
|
"step": 2900 |
|
}, |
|
{ |
|
"epoch": 0.6666666666666666, |
|
"grad_norm": 0.1400459110736847, |
|
"learning_rate": 6.0375675241219e-05, |
|
"loss": 0.9495, |
|
"step": 2905 |
|
}, |
|
{ |
|
"epoch": 0.6678141135972461, |
|
"grad_norm": 0.13489268720149994, |
|
"learning_rate": 6.000817483396148e-05, |
|
"loss": 0.9763, |
|
"step": 2910 |
|
}, |
|
{ |
|
"epoch": 0.6689615605278256, |
|
"grad_norm": 0.13535645604133606, |
|
"learning_rate": 5.96413162518285e-05, |
|
"loss": 0.9263, |
|
"step": 2915 |
|
}, |
|
{ |
|
"epoch": 0.6701090074584051, |
|
"grad_norm": 0.1278652399778366, |
|
"learning_rate": 5.9275105382499694e-05, |
|
"loss": 0.9649, |
|
"step": 2920 |
|
}, |
|
{ |
|
"epoch": 0.6712564543889845, |
|
"grad_norm": 0.13380366563796997, |
|
"learning_rate": 5.890954810325966e-05, |
|
"loss": 0.9073, |
|
"step": 2925 |
|
}, |
|
{ |
|
"epoch": 0.672403901319564, |
|
"grad_norm": 0.12865275144577026, |
|
"learning_rate": 5.854465028090355e-05, |
|
"loss": 0.9642, |
|
"step": 2930 |
|
}, |
|
{ |
|
"epoch": 0.6735513482501434, |
|
"grad_norm": 0.13362818956375122, |
|
"learning_rate": 5.8180417771643006e-05, |
|
"loss": 0.9398, |
|
"step": 2935 |
|
}, |
|
{ |
|
"epoch": 0.6746987951807228, |
|
"grad_norm": 0.1288289725780487, |
|
"learning_rate": 5.781685642101196e-05, |
|
"loss": 0.9917, |
|
"step": 2940 |
|
}, |
|
{ |
|
"epoch": 0.6758462421113024, |
|
"grad_norm": 0.1349886655807495, |
|
"learning_rate": 5.7453972063773184e-05, |
|
"loss": 0.9158, |
|
"step": 2945 |
|
}, |
|
{ |
|
"epoch": 0.6769936890418818, |
|
"grad_norm": 0.12944377958774567, |
|
"learning_rate": 5.7091770523824317e-05, |
|
"loss": 0.9787, |
|
"step": 2950 |
|
}, |
|
{ |
|
"epoch": 0.6781411359724613, |
|
"grad_norm": 0.1282043606042862, |
|
"learning_rate": 5.673025761410462e-05, |
|
"loss": 0.9396, |
|
"step": 2955 |
|
}, |
|
{ |
|
"epoch": 0.6792885829030407, |
|
"grad_norm": 0.12658511102199554, |
|
"learning_rate": 5.636943913650147e-05, |
|
"loss": 0.9507, |
|
"step": 2960 |
|
}, |
|
{ |
|
"epoch": 0.6804360298336202, |
|
"grad_norm": 0.13132619857788086, |
|
"learning_rate": 5.60093208817575e-05, |
|
"loss": 0.9011, |
|
"step": 2965 |
|
}, |
|
{ |
|
"epoch": 0.6815834767641996, |
|
"grad_norm": 0.12832701206207275, |
|
"learning_rate": 5.564990862937744e-05, |
|
"loss": 0.9602, |
|
"step": 2970 |
|
}, |
|
{ |
|
"epoch": 0.6827309236947792, |
|
"grad_norm": 0.13031287491321564, |
|
"learning_rate": 5.5291208147535466e-05, |
|
"loss": 0.934, |
|
"step": 2975 |
|
}, |
|
{ |
|
"epoch": 0.6838783706253586, |
|
"grad_norm": 0.1344255656003952, |
|
"learning_rate": 5.4933225192982586e-05, |
|
"loss": 0.9709, |
|
"step": 2980 |
|
}, |
|
{ |
|
"epoch": 0.685025817555938, |
|
"grad_norm": 0.1263609677553177, |
|
"learning_rate": 5.457596551095441e-05, |
|
"loss": 0.9742, |
|
"step": 2985 |
|
}, |
|
{ |
|
"epoch": 0.6861732644865175, |
|
"grad_norm": 0.13262014091014862, |
|
"learning_rate": 5.421943483507863e-05, |
|
"loss": 0.9751, |
|
"step": 2990 |
|
}, |
|
{ |
|
"epoch": 0.6873207114170969, |
|
"grad_norm": 0.12867961823940277, |
|
"learning_rate": 5.3863638887283364e-05, |
|
"loss": 0.9876, |
|
"step": 2995 |
|
}, |
|
{ |
|
"epoch": 0.6884681583476764, |
|
"grad_norm": 0.12955667078495026, |
|
"learning_rate": 5.350858337770498e-05, |
|
"loss": 0.9609, |
|
"step": 3000 |
|
}, |
|
{ |
|
"epoch": 0.6896156052782558, |
|
"grad_norm": 0.13022245466709137, |
|
"learning_rate": 5.315427400459678e-05, |
|
"loss": 0.9605, |
|
"step": 3005 |
|
}, |
|
{ |
|
"epoch": 0.6907630522088354, |
|
"grad_norm": 0.1346377581357956, |
|
"learning_rate": 5.280071645423726e-05, |
|
"loss": 0.9811, |
|
"step": 3010 |
|
}, |
|
{ |
|
"epoch": 0.6919104991394148, |
|
"grad_norm": 0.1265023946762085, |
|
"learning_rate": 5.244791640083906e-05, |
|
"loss": 0.9523, |
|
"step": 3015 |
|
}, |
|
{ |
|
"epoch": 0.6930579460699943, |
|
"grad_norm": 0.12777413427829742, |
|
"learning_rate": 5.2095879506457736e-05, |
|
"loss": 0.9318, |
|
"step": 3020 |
|
}, |
|
{ |
|
"epoch": 0.6942053930005737, |
|
"grad_norm": 0.12877587974071503, |
|
"learning_rate": 5.174461142090111e-05, |
|
"loss": 0.911, |
|
"step": 3025 |
|
}, |
|
{ |
|
"epoch": 0.6953528399311532, |
|
"grad_norm": 0.1300651729106903, |
|
"learning_rate": 5.139411778163832e-05, |
|
"loss": 0.8949, |
|
"step": 3030 |
|
}, |
|
{ |
|
"epoch": 0.6965002868617326, |
|
"grad_norm": 0.13519862294197083, |
|
"learning_rate": 5.104440421370962e-05, |
|
"loss": 0.9456, |
|
"step": 3035 |
|
}, |
|
{ |
|
"epoch": 0.6976477337923122, |
|
"grad_norm": 0.12986309826374054, |
|
"learning_rate": 5.0695476329635825e-05, |
|
"loss": 0.9451, |
|
"step": 3040 |
|
}, |
|
{ |
|
"epoch": 0.6987951807228916, |
|
"grad_norm": 0.12880775332450867, |
|
"learning_rate": 5.034733972932855e-05, |
|
"loss": 0.9474, |
|
"step": 3045 |
|
}, |
|
{ |
|
"epoch": 0.699942627653471, |
|
"grad_norm": 0.13083116710186005, |
|
"learning_rate": 5.000000000000002e-05, |
|
"loss": 0.9568, |
|
"step": 3050 |
|
}, |
|
{ |
|
"epoch": 0.7010900745840505, |
|
"grad_norm": 0.12505587935447693, |
|
"learning_rate": 4.9653462716073594e-05, |
|
"loss": 0.9103, |
|
"step": 3055 |
|
}, |
|
{ |
|
"epoch": 0.7022375215146299, |
|
"grad_norm": 0.12554022669792175, |
|
"learning_rate": 4.930773343909434e-05, |
|
"loss": 0.9495, |
|
"step": 3060 |
|
}, |
|
{ |
|
"epoch": 0.7033849684452094, |
|
"grad_norm": 0.12707488238811493, |
|
"learning_rate": 4.8962817717639555e-05, |
|
"loss": 0.9487, |
|
"step": 3065 |
|
}, |
|
{ |
|
"epoch": 0.7045324153757889, |
|
"grad_norm": 0.127935528755188, |
|
"learning_rate": 4.8618721087230014e-05, |
|
"loss": 0.9821, |
|
"step": 3070 |
|
}, |
|
{ |
|
"epoch": 0.7056798623063684, |
|
"grad_norm": 0.132330060005188, |
|
"learning_rate": 4.8275449070240854e-05, |
|
"loss": 0.9869, |
|
"step": 3075 |
|
}, |
|
{ |
|
"epoch": 0.7068273092369478, |
|
"grad_norm": 0.1311495006084442, |
|
"learning_rate": 4.793300717581308e-05, |
|
"loss": 0.9842, |
|
"step": 3080 |
|
}, |
|
{ |
|
"epoch": 0.7079747561675273, |
|
"grad_norm": 0.13313139975070953, |
|
"learning_rate": 4.7591400899765234e-05, |
|
"loss": 0.9559, |
|
"step": 3085 |
|
}, |
|
{ |
|
"epoch": 0.7091222030981067, |
|
"grad_norm": 0.13822641968727112, |
|
"learning_rate": 4.7250635724505e-05, |
|
"loss": 0.9324, |
|
"step": 3090 |
|
}, |
|
{ |
|
"epoch": 0.7102696500286861, |
|
"grad_norm": 0.13737522065639496, |
|
"learning_rate": 4.6910717118941286e-05, |
|
"loss": 0.9403, |
|
"step": 3095 |
|
}, |
|
{ |
|
"epoch": 0.7114170969592656, |
|
"grad_norm": 0.12806421518325806, |
|
"learning_rate": 4.6571650538396615e-05, |
|
"loss": 0.9148, |
|
"step": 3100 |
|
}, |
|
{ |
|
"epoch": 0.7125645438898451, |
|
"grad_norm": 0.13433298468589783, |
|
"learning_rate": 4.6233441424519295e-05, |
|
"loss": 0.9628, |
|
"step": 3105 |
|
}, |
|
{ |
|
"epoch": 0.7137119908204246, |
|
"grad_norm": 0.13459208607673645, |
|
"learning_rate": 4.5896095205196356e-05, |
|
"loss": 0.9604, |
|
"step": 3110 |
|
}, |
|
{ |
|
"epoch": 0.714859437751004, |
|
"grad_norm": 0.14309802651405334, |
|
"learning_rate": 4.5559617294466176e-05, |
|
"loss": 0.943, |
|
"step": 3115 |
|
}, |
|
{ |
|
"epoch": 0.7160068846815835, |
|
"grad_norm": 0.1299637407064438, |
|
"learning_rate": 4.522401309243193e-05, |
|
"loss": 0.9355, |
|
"step": 3120 |
|
}, |
|
{ |
|
"epoch": 0.7171543316121629, |
|
"grad_norm": 0.12573669850826263, |
|
"learning_rate": 4.488928798517442e-05, |
|
"loss": 0.9422, |
|
"step": 3125 |
|
}, |
|
{ |
|
"epoch": 0.7183017785427424, |
|
"grad_norm": 0.13579024374485016, |
|
"learning_rate": 4.455544734466624e-05, |
|
"loss": 0.9349, |
|
"step": 3130 |
|
}, |
|
{ |
|
"epoch": 0.7194492254733219, |
|
"grad_norm": 0.12755471467971802, |
|
"learning_rate": 4.422249652868506e-05, |
|
"loss": 0.9367, |
|
"step": 3135 |
|
}, |
|
{ |
|
"epoch": 0.7205966724039014, |
|
"grad_norm": 0.12943492829799652, |
|
"learning_rate": 4.389044088072798e-05, |
|
"loss": 0.9717, |
|
"step": 3140 |
|
}, |
|
{ |
|
"epoch": 0.7217441193344808, |
|
"grad_norm": 0.13614961504936218, |
|
"learning_rate": 4.355928572992547e-05, |
|
"loss": 0.9643, |
|
"step": 3145 |
|
}, |
|
{ |
|
"epoch": 0.7228915662650602, |
|
"grad_norm": 0.12964297831058502, |
|
"learning_rate": 4.322903639095619e-05, |
|
"loss": 0.9397, |
|
"step": 3150 |
|
}, |
|
{ |
|
"epoch": 0.7240390131956397, |
|
"grad_norm": 0.12502126395702362, |
|
"learning_rate": 4.289969816396132e-05, |
|
"loss": 0.9278, |
|
"step": 3155 |
|
}, |
|
{ |
|
"epoch": 0.7251864601262191, |
|
"grad_norm": 0.12723030149936676, |
|
"learning_rate": 4.2571276334459895e-05, |
|
"loss": 0.9721, |
|
"step": 3160 |
|
}, |
|
{ |
|
"epoch": 0.7263339070567987, |
|
"grad_norm": 0.12903666496276855, |
|
"learning_rate": 4.224377617326353e-05, |
|
"loss": 0.9325, |
|
"step": 3165 |
|
}, |
|
{ |
|
"epoch": 0.7274813539873781, |
|
"grad_norm": 0.12750981748104095, |
|
"learning_rate": 4.1917202936392265e-05, |
|
"loss": 0.965, |
|
"step": 3170 |
|
}, |
|
{ |
|
"epoch": 0.7286288009179576, |
|
"grad_norm": 0.1284424364566803, |
|
"learning_rate": 4.15915618649899e-05, |
|
"loss": 0.9264, |
|
"step": 3175 |
|
}, |
|
{ |
|
"epoch": 0.729776247848537, |
|
"grad_norm": 0.12181597203016281, |
|
"learning_rate": 4.126685818524013e-05, |
|
"loss": 0.9283, |
|
"step": 3180 |
|
}, |
|
{ |
|
"epoch": 0.7309236947791165, |
|
"grad_norm": 0.13070735335350037, |
|
"learning_rate": 4.094309710828236e-05, |
|
"loss": 0.9313, |
|
"step": 3185 |
|
}, |
|
{ |
|
"epoch": 0.7320711417096959, |
|
"grad_norm": 0.12752236425876617, |
|
"learning_rate": 4.0620283830128414e-05, |
|
"loss": 0.9366, |
|
"step": 3190 |
|
}, |
|
{ |
|
"epoch": 0.7332185886402753, |
|
"grad_norm": 0.2004683017730713, |
|
"learning_rate": 4.029842353157888e-05, |
|
"loss": 0.9166, |
|
"step": 3195 |
|
}, |
|
{ |
|
"epoch": 0.7343660355708549, |
|
"grad_norm": 0.12685176730155945, |
|
"learning_rate": 3.9977521378140084e-05, |
|
"loss": 0.9164, |
|
"step": 3200 |
|
}, |
|
{ |
|
"epoch": 0.7355134825014343, |
|
"grad_norm": 0.1288214921951294, |
|
"learning_rate": 3.965758251994115e-05, |
|
"loss": 0.9292, |
|
"step": 3205 |
|
}, |
|
{ |
|
"epoch": 0.7366609294320138, |
|
"grad_norm": 0.13338162004947662, |
|
"learning_rate": 3.933861209165146e-05, |
|
"loss": 0.9872, |
|
"step": 3210 |
|
}, |
|
{ |
|
"epoch": 0.7378083763625932, |
|
"grad_norm": 0.12942497432231903, |
|
"learning_rate": 3.9020615212398016e-05, |
|
"loss": 0.9797, |
|
"step": 3215 |
|
}, |
|
{ |
|
"epoch": 0.7389558232931727, |
|
"grad_norm": 0.13232511281967163, |
|
"learning_rate": 3.8703596985683556e-05, |
|
"loss": 0.9463, |
|
"step": 3220 |
|
}, |
|
{ |
|
"epoch": 0.7401032702237521, |
|
"grad_norm": 0.13070105016231537, |
|
"learning_rate": 3.838756249930439e-05, |
|
"loss": 0.9215, |
|
"step": 3225 |
|
}, |
|
{ |
|
"epoch": 0.7412507171543317, |
|
"grad_norm": 0.12770217657089233, |
|
"learning_rate": 3.807251682526902e-05, |
|
"loss": 0.9534, |
|
"step": 3230 |
|
}, |
|
{ |
|
"epoch": 0.7423981640849111, |
|
"grad_norm": 0.1268807053565979, |
|
"learning_rate": 3.775846501971636e-05, |
|
"loss": 0.9179, |
|
"step": 3235 |
|
}, |
|
{ |
|
"epoch": 0.7435456110154905, |
|
"grad_norm": 0.12574085593223572, |
|
"learning_rate": 3.7445412122835077e-05, |
|
"loss": 0.9214, |
|
"step": 3240 |
|
}, |
|
{ |
|
"epoch": 0.74469305794607, |
|
"grad_norm": 0.13617047667503357, |
|
"learning_rate": 3.713336315878224e-05, |
|
"loss": 0.9603, |
|
"step": 3245 |
|
}, |
|
{ |
|
"epoch": 0.7458405048766494, |
|
"grad_norm": 0.1332123875617981, |
|
"learning_rate": 3.6822323135603054e-05, |
|
"loss": 0.9363, |
|
"step": 3250 |
|
}, |
|
{ |
|
"epoch": 0.7469879518072289, |
|
"grad_norm": 0.13324113190174103, |
|
"learning_rate": 3.651229704515018e-05, |
|
"loss": 0.9128, |
|
"step": 3255 |
|
}, |
|
{ |
|
"epoch": 0.7481353987378083, |
|
"grad_norm": 0.1259755641222, |
|
"learning_rate": 3.6203289863003905e-05, |
|
"loss": 0.9431, |
|
"step": 3260 |
|
}, |
|
{ |
|
"epoch": 0.7492828456683879, |
|
"grad_norm": 0.12775766849517822, |
|
"learning_rate": 3.5895306548392005e-05, |
|
"loss": 0.9707, |
|
"step": 3265 |
|
}, |
|
{ |
|
"epoch": 0.7504302925989673, |
|
"grad_norm": 0.130545511841774, |
|
"learning_rate": 3.558835204411044e-05, |
|
"loss": 0.946, |
|
"step": 3270 |
|
}, |
|
{ |
|
"epoch": 0.7515777395295468, |
|
"grad_norm": 0.13085825741291046, |
|
"learning_rate": 3.52824312764438e-05, |
|
"loss": 0.9029, |
|
"step": 3275 |
|
}, |
|
{ |
|
"epoch": 0.7527251864601262, |
|
"grad_norm": 0.12603677809238434, |
|
"learning_rate": 3.497754915508632e-05, |
|
"loss": 0.9256, |
|
"step": 3280 |
|
}, |
|
{ |
|
"epoch": 0.7538726333907056, |
|
"grad_norm": 0.12976112961769104, |
|
"learning_rate": 3.467371057306318e-05, |
|
"loss": 0.9324, |
|
"step": 3285 |
|
}, |
|
{ |
|
"epoch": 0.7550200803212851, |
|
"grad_norm": 0.13229593634605408, |
|
"learning_rate": 3.437092040665183e-05, |
|
"loss": 0.9497, |
|
"step": 3290 |
|
}, |
|
{ |
|
"epoch": 0.7561675272518646, |
|
"grad_norm": 0.13209198415279388, |
|
"learning_rate": 3.406918351530376e-05, |
|
"loss": 0.9345, |
|
"step": 3295 |
|
}, |
|
{ |
|
"epoch": 0.7573149741824441, |
|
"grad_norm": 0.1314823031425476, |
|
"learning_rate": 3.3768504741566664e-05, |
|
"loss": 0.9272, |
|
"step": 3300 |
|
}, |
|
{ |
|
"epoch": 0.7584624211130235, |
|
"grad_norm": 0.13039475679397583, |
|
"learning_rate": 3.346888891100649e-05, |
|
"loss": 0.9358, |
|
"step": 3305 |
|
}, |
|
{ |
|
"epoch": 0.759609868043603, |
|
"grad_norm": 0.12750479578971863, |
|
"learning_rate": 3.3170340832130134e-05, |
|
"loss": 0.9454, |
|
"step": 3310 |
|
}, |
|
{ |
|
"epoch": 0.7607573149741824, |
|
"grad_norm": 0.12613527476787567, |
|
"learning_rate": 3.287286529630832e-05, |
|
"loss": 0.9337, |
|
"step": 3315 |
|
}, |
|
{ |
|
"epoch": 0.7619047619047619, |
|
"grad_norm": 0.12861678004264832, |
|
"learning_rate": 3.2576467077698493e-05, |
|
"loss": 0.9045, |
|
"step": 3320 |
|
}, |
|
{ |
|
"epoch": 0.7630522088353414, |
|
"grad_norm": 0.12556509673595428, |
|
"learning_rate": 3.228115093316848e-05, |
|
"loss": 0.9095, |
|
"step": 3325 |
|
}, |
|
{ |
|
"epoch": 0.7641996557659209, |
|
"grad_norm": 0.13398994505405426, |
|
"learning_rate": 3.198692160221987e-05, |
|
"loss": 0.966, |
|
"step": 3330 |
|
}, |
|
{ |
|
"epoch": 0.7653471026965003, |
|
"grad_norm": 0.13073302805423737, |
|
"learning_rate": 3.169378380691218e-05, |
|
"loss": 0.9506, |
|
"step": 3335 |
|
}, |
|
{ |
|
"epoch": 0.7664945496270797, |
|
"grad_norm": 0.13139608502388, |
|
"learning_rate": 3.140174225178692e-05, |
|
"loss": 0.9285, |
|
"step": 3340 |
|
}, |
|
{ |
|
"epoch": 0.7676419965576592, |
|
"grad_norm": 0.13152596354484558, |
|
"learning_rate": 3.111080162379215e-05, |
|
"loss": 0.9519, |
|
"step": 3345 |
|
}, |
|
{ |
|
"epoch": 0.7687894434882386, |
|
"grad_norm": 0.12944857776165009, |
|
"learning_rate": 3.082096659220722e-05, |
|
"loss": 0.9108, |
|
"step": 3350 |
|
}, |
|
{ |
|
"epoch": 0.7699368904188181, |
|
"grad_norm": 0.12854111194610596, |
|
"learning_rate": 3.0532241808567966e-05, |
|
"loss": 0.9315, |
|
"step": 3355 |
|
}, |
|
{ |
|
"epoch": 0.7710843373493976, |
|
"grad_norm": 0.12585926055908203, |
|
"learning_rate": 3.0244631906591825e-05, |
|
"loss": 0.9585, |
|
"step": 3360 |
|
}, |
|
{ |
|
"epoch": 0.7722317842799771, |
|
"grad_norm": 0.12900350987911224, |
|
"learning_rate": 2.9958141502103722e-05, |
|
"loss": 0.9725, |
|
"step": 3365 |
|
}, |
|
{ |
|
"epoch": 0.7733792312105565, |
|
"grad_norm": 0.13046613335609436, |
|
"learning_rate": 2.9672775192961756e-05, |
|
"loss": 0.9677, |
|
"step": 3370 |
|
}, |
|
{ |
|
"epoch": 0.774526678141136, |
|
"grad_norm": 0.1273292899131775, |
|
"learning_rate": 2.938853755898364e-05, |
|
"loss": 0.9494, |
|
"step": 3375 |
|
}, |
|
{ |
|
"epoch": 0.7756741250717154, |
|
"grad_norm": 0.13062521815299988, |
|
"learning_rate": 2.910543316187301e-05, |
|
"loss": 0.9134, |
|
"step": 3380 |
|
}, |
|
{ |
|
"epoch": 0.7768215720022948, |
|
"grad_norm": 0.12806479632854462, |
|
"learning_rate": 2.882346654514627e-05, |
|
"loss": 0.9374, |
|
"step": 3385 |
|
}, |
|
{ |
|
"epoch": 0.7779690189328744, |
|
"grad_norm": 0.1296064406633377, |
|
"learning_rate": 2.8542642234059725e-05, |
|
"loss": 0.9424, |
|
"step": 3390 |
|
}, |
|
{ |
|
"epoch": 0.7791164658634538, |
|
"grad_norm": 0.12994979321956635, |
|
"learning_rate": 2.826296473553697e-05, |
|
"loss": 0.9855, |
|
"step": 3395 |
|
}, |
|
{ |
|
"epoch": 0.7802639127940333, |
|
"grad_norm": 0.13433627784252167, |
|
"learning_rate": 2.7984438538096392e-05, |
|
"loss": 0.8971, |
|
"step": 3400 |
|
}, |
|
{ |
|
"epoch": 0.7814113597246127, |
|
"grad_norm": 0.13279375433921814, |
|
"learning_rate": 2.7707068111779377e-05, |
|
"loss": 0.9418, |
|
"step": 3405 |
|
}, |
|
{ |
|
"epoch": 0.7825588066551922, |
|
"grad_norm": 0.14554573595523834, |
|
"learning_rate": 2.7430857908078345e-05, |
|
"loss": 0.9549, |
|
"step": 3410 |
|
}, |
|
{ |
|
"epoch": 0.7837062535857716, |
|
"grad_norm": 0.12296421080827713, |
|
"learning_rate": 2.7155812359865517e-05, |
|
"loss": 0.9126, |
|
"step": 3415 |
|
}, |
|
{ |
|
"epoch": 0.7848537005163512, |
|
"grad_norm": 0.12731173634529114, |
|
"learning_rate": 2.6881935881321563e-05, |
|
"loss": 0.9698, |
|
"step": 3420 |
|
}, |
|
{ |
|
"epoch": 0.7860011474469306, |
|
"grad_norm": 0.13245446979999542, |
|
"learning_rate": 2.6609232867864896e-05, |
|
"loss": 0.9658, |
|
"step": 3425 |
|
}, |
|
{ |
|
"epoch": 0.7871485943775101, |
|
"grad_norm": 0.1442078948020935, |
|
"learning_rate": 2.6337707696081094e-05, |
|
"loss": 0.9532, |
|
"step": 3430 |
|
}, |
|
{ |
|
"epoch": 0.7882960413080895, |
|
"grad_norm": 0.12503184378147125, |
|
"learning_rate": 2.606736472365272e-05, |
|
"loss": 0.9534, |
|
"step": 3435 |
|
}, |
|
{ |
|
"epoch": 0.789443488238669, |
|
"grad_norm": 0.13164812326431274, |
|
"learning_rate": 2.5798208289289204e-05, |
|
"loss": 0.9451, |
|
"step": 3440 |
|
}, |
|
{ |
|
"epoch": 0.7905909351692484, |
|
"grad_norm": 0.13405010104179382, |
|
"learning_rate": 2.5530242712657492e-05, |
|
"loss": 0.9547, |
|
"step": 3445 |
|
}, |
|
{ |
|
"epoch": 0.7917383820998278, |
|
"grad_norm": 0.13217557966709137, |
|
"learning_rate": 2.526347229431242e-05, |
|
"loss": 0.9647, |
|
"step": 3450 |
|
}, |
|
{ |
|
"epoch": 0.7928858290304074, |
|
"grad_norm": 0.13415786623954773, |
|
"learning_rate": 2.499790131562797e-05, |
|
"loss": 0.9719, |
|
"step": 3455 |
|
}, |
|
{ |
|
"epoch": 0.7940332759609868, |
|
"grad_norm": 0.12753376364707947, |
|
"learning_rate": 2.4733534038728257e-05, |
|
"loss": 0.9217, |
|
"step": 3460 |
|
}, |
|
{ |
|
"epoch": 0.7951807228915663, |
|
"grad_norm": 0.1291799694299698, |
|
"learning_rate": 2.4470374706419485e-05, |
|
"loss": 0.9598, |
|
"step": 3465 |
|
}, |
|
{ |
|
"epoch": 0.7963281698221457, |
|
"grad_norm": 0.1334388107061386, |
|
"learning_rate": 2.4208427542121504e-05, |
|
"loss": 0.9298, |
|
"step": 3470 |
|
}, |
|
{ |
|
"epoch": 0.7974756167527252, |
|
"grad_norm": 0.1315917819738388, |
|
"learning_rate": 2.394769674980035e-05, |
|
"loss": 0.9623, |
|
"step": 3475 |
|
}, |
|
{ |
|
"epoch": 0.7986230636833046, |
|
"grad_norm": 0.1384865790605545, |
|
"learning_rate": 2.3688186513900455e-05, |
|
"loss": 0.9543, |
|
"step": 3480 |
|
}, |
|
{ |
|
"epoch": 0.7997705106138842, |
|
"grad_norm": 0.12887543439865112, |
|
"learning_rate": 2.34299009992778e-05, |
|
"loss": 0.9742, |
|
"step": 3485 |
|
}, |
|
{ |
|
"epoch": 0.8009179575444636, |
|
"grad_norm": 0.12744690477848053, |
|
"learning_rate": 2.317284435113278e-05, |
|
"loss": 0.954, |
|
"step": 3490 |
|
}, |
|
{ |
|
"epoch": 0.802065404475043, |
|
"grad_norm": 0.12910710275173187, |
|
"learning_rate": 2.2917020694944023e-05, |
|
"loss": 0.9107, |
|
"step": 3495 |
|
}, |
|
{ |
|
"epoch": 0.8032128514056225, |
|
"grad_norm": 0.1282537430524826, |
|
"learning_rate": 2.2662434136401722e-05, |
|
"loss": 0.9795, |
|
"step": 3500 |
|
}, |
|
{ |
|
"epoch": 0.8043602983362019, |
|
"grad_norm": 0.13550768792629242, |
|
"learning_rate": 2.2409088761342235e-05, |
|
"loss": 0.9415, |
|
"step": 3505 |
|
}, |
|
{ |
|
"epoch": 0.8055077452667814, |
|
"grad_norm": 0.12883414328098297, |
|
"learning_rate": 2.215698863568213e-05, |
|
"loss": 0.9445, |
|
"step": 3510 |
|
}, |
|
{ |
|
"epoch": 0.8066551921973609, |
|
"grad_norm": 0.13625557720661163, |
|
"learning_rate": 2.1906137805353212e-05, |
|
"loss": 0.9726, |
|
"step": 3515 |
|
}, |
|
{ |
|
"epoch": 0.8078026391279404, |
|
"grad_norm": 0.12835553288459778, |
|
"learning_rate": 2.1656540296237316e-05, |
|
"loss": 0.9235, |
|
"step": 3520 |
|
}, |
|
{ |
|
"epoch": 0.8089500860585198, |
|
"grad_norm": 0.13299131393432617, |
|
"learning_rate": 2.1408200114101985e-05, |
|
"loss": 0.9173, |
|
"step": 3525 |
|
}, |
|
{ |
|
"epoch": 0.8100975329890993, |
|
"grad_norm": 0.13075728714466095, |
|
"learning_rate": 2.116112124453592e-05, |
|
"loss": 0.9613, |
|
"step": 3530 |
|
}, |
|
{ |
|
"epoch": 0.8112449799196787, |
|
"grad_norm": 0.1347227394580841, |
|
"learning_rate": 2.0915307652885164e-05, |
|
"loss": 0.9611, |
|
"step": 3535 |
|
}, |
|
{ |
|
"epoch": 0.8123924268502581, |
|
"grad_norm": 0.13222533464431763, |
|
"learning_rate": 2.067076328418949e-05, |
|
"loss": 0.9492, |
|
"step": 3540 |
|
}, |
|
{ |
|
"epoch": 0.8135398737808376, |
|
"grad_norm": 0.12829838693141937, |
|
"learning_rate": 2.0427492063118935e-05, |
|
"loss": 0.9843, |
|
"step": 3545 |
|
}, |
|
{ |
|
"epoch": 0.8146873207114171, |
|
"grad_norm": 0.13268935680389404, |
|
"learning_rate": 2.018549789391102e-05, |
|
"loss": 0.9879, |
|
"step": 3550 |
|
}, |
|
{ |
|
"epoch": 0.8158347676419966, |
|
"grad_norm": 0.12710270285606384, |
|
"learning_rate": 1.994478466030787e-05, |
|
"loss": 0.8988, |
|
"step": 3555 |
|
}, |
|
{ |
|
"epoch": 0.816982214572576, |
|
"grad_norm": 0.13554465770721436, |
|
"learning_rate": 1.970535622549401e-05, |
|
"loss": 0.948, |
|
"step": 3560 |
|
}, |
|
{ |
|
"epoch": 0.8181296615031555, |
|
"grad_norm": 0.12602022290229797, |
|
"learning_rate": 1.946721643203443e-05, |
|
"loss": 0.9437, |
|
"step": 3565 |
|
}, |
|
{ |
|
"epoch": 0.8192771084337349, |
|
"grad_norm": 0.1312330663204193, |
|
"learning_rate": 1.923036910181275e-05, |
|
"loss": 0.9544, |
|
"step": 3570 |
|
}, |
|
{ |
|
"epoch": 0.8204245553643144, |
|
"grad_norm": 0.12853941321372986, |
|
"learning_rate": 1.8994818035969975e-05, |
|
"loss": 0.9798, |
|
"step": 3575 |
|
}, |
|
{ |
|
"epoch": 0.8215720022948939, |
|
"grad_norm": 0.13181468844413757, |
|
"learning_rate": 1.8760567014843545e-05, |
|
"loss": 0.944, |
|
"step": 3580 |
|
}, |
|
{ |
|
"epoch": 0.8227194492254734, |
|
"grad_norm": 0.12790626287460327, |
|
"learning_rate": 1.8527619797906494e-05, |
|
"loss": 0.901, |
|
"step": 3585 |
|
}, |
|
{ |
|
"epoch": 0.8238668961560528, |
|
"grad_norm": 0.12571856379508972, |
|
"learning_rate": 1.8295980123707357e-05, |
|
"loss": 0.955, |
|
"step": 3590 |
|
}, |
|
{ |
|
"epoch": 0.8250143430866322, |
|
"grad_norm": 0.1319160759449005, |
|
"learning_rate": 1.8065651709809905e-05, |
|
"loss": 0.9046, |
|
"step": 3595 |
|
}, |
|
{ |
|
"epoch": 0.8261617900172117, |
|
"grad_norm": 0.1291453242301941, |
|
"learning_rate": 1.783663825273372e-05, |
|
"loss": 0.9337, |
|
"step": 3600 |
|
}, |
|
{ |
|
"epoch": 0.8273092369477911, |
|
"grad_norm": 0.1331816017627716, |
|
"learning_rate": 1.7608943427894686e-05, |
|
"loss": 0.9563, |
|
"step": 3605 |
|
}, |
|
{ |
|
"epoch": 0.8284566838783706, |
|
"grad_norm": 0.14010034501552582, |
|
"learning_rate": 1.7382570889546124e-05, |
|
"loss": 0.9518, |
|
"step": 3610 |
|
}, |
|
{ |
|
"epoch": 0.8296041308089501, |
|
"grad_norm": 0.1268058568239212, |
|
"learning_rate": 1.7157524270720036e-05, |
|
"loss": 0.912, |
|
"step": 3615 |
|
}, |
|
{ |
|
"epoch": 0.8307515777395296, |
|
"grad_norm": 0.1255045235157013, |
|
"learning_rate": 1.6933807183168994e-05, |
|
"loss": 0.9146, |
|
"step": 3620 |
|
}, |
|
{ |
|
"epoch": 0.831899024670109, |
|
"grad_norm": 0.14171355962753296, |
|
"learning_rate": 1.6711423217307885e-05, |
|
"loss": 0.9269, |
|
"step": 3625 |
|
}, |
|
{ |
|
"epoch": 0.8330464716006885, |
|
"grad_norm": 0.12760847806930542, |
|
"learning_rate": 1.64903759421566e-05, |
|
"loss": 0.9143, |
|
"step": 3630 |
|
}, |
|
{ |
|
"epoch": 0.8341939185312679, |
|
"grad_norm": 0.1354866921901703, |
|
"learning_rate": 1.627066890528247e-05, |
|
"loss": 0.9634, |
|
"step": 3635 |
|
}, |
|
{ |
|
"epoch": 0.8353413654618473, |
|
"grad_norm": 0.1262219399213791, |
|
"learning_rate": 1.6052305632743592e-05, |
|
"loss": 0.9785, |
|
"step": 3640 |
|
}, |
|
{ |
|
"epoch": 0.8364888123924269, |
|
"grad_norm": 0.1341947317123413, |
|
"learning_rate": 1.583528962903197e-05, |
|
"loss": 0.9685, |
|
"step": 3645 |
|
}, |
|
{ |
|
"epoch": 0.8376362593230063, |
|
"grad_norm": 0.12864305078983307, |
|
"learning_rate": 1.5619624377017537e-05, |
|
"loss": 0.9519, |
|
"step": 3650 |
|
}, |
|
{ |
|
"epoch": 0.8387837062535858, |
|
"grad_norm": 0.15601038932800293, |
|
"learning_rate": 1.540531333789207e-05, |
|
"loss": 0.9302, |
|
"step": 3655 |
|
}, |
|
{ |
|
"epoch": 0.8399311531841652, |
|
"grad_norm": 0.13577106595039368, |
|
"learning_rate": 1.5192359951113755e-05, |
|
"loss": 0.9682, |
|
"step": 3660 |
|
}, |
|
{ |
|
"epoch": 0.8410786001147447, |
|
"grad_norm": 0.1369991898536682, |
|
"learning_rate": 1.4980767634351877e-05, |
|
"loss": 0.9267, |
|
"step": 3665 |
|
}, |
|
{ |
|
"epoch": 0.8422260470453241, |
|
"grad_norm": 0.12964287400245667, |
|
"learning_rate": 1.4770539783432113e-05, |
|
"loss": 0.9279, |
|
"step": 3670 |
|
}, |
|
{ |
|
"epoch": 0.8433734939759037, |
|
"grad_norm": 0.1294836550951004, |
|
"learning_rate": 1.4561679772281877e-05, |
|
"loss": 0.9176, |
|
"step": 3675 |
|
}, |
|
{ |
|
"epoch": 0.8445209409064831, |
|
"grad_norm": 0.1307843178510666, |
|
"learning_rate": 1.4354190952876334e-05, |
|
"loss": 0.944, |
|
"step": 3680 |
|
}, |
|
{ |
|
"epoch": 0.8456683878370626, |
|
"grad_norm": 0.1265002340078354, |
|
"learning_rate": 1.4148076655184373e-05, |
|
"loss": 0.9434, |
|
"step": 3685 |
|
}, |
|
{ |
|
"epoch": 0.846815834767642, |
|
"grad_norm": 0.13340280950069427, |
|
"learning_rate": 1.3943340187115494e-05, |
|
"loss": 0.9606, |
|
"step": 3690 |
|
}, |
|
{ |
|
"epoch": 0.8479632816982214, |
|
"grad_norm": 0.12884503602981567, |
|
"learning_rate": 1.373998483446638e-05, |
|
"loss": 0.9117, |
|
"step": 3695 |
|
}, |
|
{ |
|
"epoch": 0.8491107286288009, |
|
"grad_norm": 0.13453038036823273, |
|
"learning_rate": 1.3538013860868436e-05, |
|
"loss": 0.9357, |
|
"step": 3700 |
|
}, |
|
{ |
|
"epoch": 0.8502581755593803, |
|
"grad_norm": 0.12499819695949554, |
|
"learning_rate": 1.3337430507735205e-05, |
|
"loss": 0.949, |
|
"step": 3705 |
|
}, |
|
{ |
|
"epoch": 0.8514056224899599, |
|
"grad_norm": 0.12760350108146667, |
|
"learning_rate": 1.313823799421051e-05, |
|
"loss": 0.9245, |
|
"step": 3710 |
|
}, |
|
{ |
|
"epoch": 0.8525530694205393, |
|
"grad_norm": 0.12545782327651978, |
|
"learning_rate": 1.2940439517116676e-05, |
|
"loss": 0.9163, |
|
"step": 3715 |
|
}, |
|
{ |
|
"epoch": 0.8537005163511188, |
|
"grad_norm": 0.14146262407302856, |
|
"learning_rate": 1.2744038250903267e-05, |
|
"loss": 0.9344, |
|
"step": 3720 |
|
}, |
|
{ |
|
"epoch": 0.8548479632816982, |
|
"grad_norm": 0.1278744488954544, |
|
"learning_rate": 1.2549037347596115e-05, |
|
"loss": 0.9252, |
|
"step": 3725 |
|
}, |
|
{ |
|
"epoch": 0.8559954102122777, |
|
"grad_norm": 0.1335706263780594, |
|
"learning_rate": 1.2355439936746827e-05, |
|
"loss": 0.9753, |
|
"step": 3730 |
|
}, |
|
{ |
|
"epoch": 0.8571428571428571, |
|
"grad_norm": 0.12698203325271606, |
|
"learning_rate": 1.2163249125382426e-05, |
|
"loss": 0.9356, |
|
"step": 3735 |
|
}, |
|
{ |
|
"epoch": 0.8582903040734366, |
|
"grad_norm": 0.129505917429924, |
|
"learning_rate": 1.1972467997955595e-05, |
|
"loss": 0.9424, |
|
"step": 3740 |
|
}, |
|
{ |
|
"epoch": 0.8594377510040161, |
|
"grad_norm": 0.13229629397392273, |
|
"learning_rate": 1.1783099616295056e-05, |
|
"loss": 0.9503, |
|
"step": 3745 |
|
}, |
|
{ |
|
"epoch": 0.8605851979345955, |
|
"grad_norm": 0.13013221323490143, |
|
"learning_rate": 1.1595147019556607e-05, |
|
"loss": 0.9307, |
|
"step": 3750 |
|
}, |
|
{ |
|
"epoch": 0.861732644865175, |
|
"grad_norm": 0.1345113217830658, |
|
"learning_rate": 1.140861322417417e-05, |
|
"loss": 0.9276, |
|
"step": 3755 |
|
}, |
|
{ |
|
"epoch": 0.8628800917957544, |
|
"grad_norm": 0.1310335397720337, |
|
"learning_rate": 1.1223501223811451e-05, |
|
"loss": 0.9732, |
|
"step": 3760 |
|
}, |
|
{ |
|
"epoch": 0.8640275387263339, |
|
"grad_norm": 0.12777724862098694, |
|
"learning_rate": 1.1039813989313951e-05, |
|
"loss": 0.9675, |
|
"step": 3765 |
|
}, |
|
{ |
|
"epoch": 0.8651749856569134, |
|
"grad_norm": 0.13015612959861755, |
|
"learning_rate": 1.085755446866119e-05, |
|
"loss": 0.9165, |
|
"step": 3770 |
|
}, |
|
{ |
|
"epoch": 0.8663224325874929, |
|
"grad_norm": 0.1256059855222702, |
|
"learning_rate": 1.0676725586919457e-05, |
|
"loss": 0.9824, |
|
"step": 3775 |
|
}, |
|
{ |
|
"epoch": 0.8674698795180723, |
|
"grad_norm": 0.13032986223697662, |
|
"learning_rate": 1.0497330246194848e-05, |
|
"loss": 0.9506, |
|
"step": 3780 |
|
}, |
|
{ |
|
"epoch": 0.8686173264486517, |
|
"grad_norm": 0.12943556904792786, |
|
"learning_rate": 1.031937132558668e-05, |
|
"loss": 0.9085, |
|
"step": 3785 |
|
}, |
|
{ |
|
"epoch": 0.8697647733792312, |
|
"grad_norm": 0.13594165444374084, |
|
"learning_rate": 1.014285168114133e-05, |
|
"loss": 0.9401, |
|
"step": 3790 |
|
}, |
|
{ |
|
"epoch": 0.8709122203098106, |
|
"grad_norm": 0.12672476470470428, |
|
"learning_rate": 9.96777414580633e-06, |
|
"loss": 0.9781, |
|
"step": 3795 |
|
}, |
|
{ |
|
"epoch": 0.8720596672403901, |
|
"grad_norm": 0.13382181525230408, |
|
"learning_rate": 9.794141529384915e-06, |
|
"loss": 0.9456, |
|
"step": 3800 |
|
}, |
|
{ |
|
"epoch": 0.8732071141709696, |
|
"grad_norm": 0.12753736972808838, |
|
"learning_rate": 9.621956618491024e-06, |
|
"loss": 0.9561, |
|
"step": 3805 |
|
}, |
|
{ |
|
"epoch": 0.8743545611015491, |
|
"grad_norm": 0.1312447190284729, |
|
"learning_rate": 9.451222176504414e-06, |
|
"loss": 0.966, |
|
"step": 3810 |
|
}, |
|
{ |
|
"epoch": 0.8755020080321285, |
|
"grad_norm": 0.1302998960018158, |
|
"learning_rate": 9.281940943526491e-06, |
|
"loss": 0.936, |
|
"step": 3815 |
|
}, |
|
{ |
|
"epoch": 0.876649454962708, |
|
"grad_norm": 0.12688572704792023, |
|
"learning_rate": 9.114115636336152e-06, |
|
"loss": 0.9623, |
|
"step": 3820 |
|
}, |
|
{ |
|
"epoch": 0.8777969018932874, |
|
"grad_norm": 0.1292993426322937, |
|
"learning_rate": 8.947748948346357e-06, |
|
"loss": 0.9251, |
|
"step": 3825 |
|
}, |
|
{ |
|
"epoch": 0.8789443488238669, |
|
"grad_norm": 0.12286306172609329, |
|
"learning_rate": 8.782843549560771e-06, |
|
"loss": 0.9388, |
|
"step": 3830 |
|
}, |
|
{ |
|
"epoch": 0.8800917957544464, |
|
"grad_norm": 0.12918008863925934, |
|
"learning_rate": 8.61940208653097e-06, |
|
"loss": 0.9499, |
|
"step": 3835 |
|
}, |
|
{ |
|
"epoch": 0.8812392426850258, |
|
"grad_norm": 0.13565728068351746, |
|
"learning_rate": 8.457427182313937e-06, |
|
"loss": 0.9287, |
|
"step": 3840 |
|
}, |
|
{ |
|
"epoch": 0.8823866896156053, |
|
"grad_norm": 0.137693852186203, |
|
"learning_rate": 8.296921436430071e-06, |
|
"loss": 0.966, |
|
"step": 3845 |
|
}, |
|
{ |
|
"epoch": 0.8835341365461847, |
|
"grad_norm": 0.12873981893062592, |
|
"learning_rate": 8.137887424821277e-06, |
|
"loss": 0.9276, |
|
"step": 3850 |
|
}, |
|
{ |
|
"epoch": 0.8846815834767642, |
|
"grad_norm": 0.13087016344070435, |
|
"learning_rate": 7.980327699809832e-06, |
|
"loss": 0.9506, |
|
"step": 3855 |
|
}, |
|
{ |
|
"epoch": 0.8858290304073436, |
|
"grad_norm": 0.13923919200897217, |
|
"learning_rate": 7.824244790057223e-06, |
|
"loss": 0.9048, |
|
"step": 3860 |
|
}, |
|
{ |
|
"epoch": 0.8869764773379232, |
|
"grad_norm": 0.12863433361053467, |
|
"learning_rate": 7.66964120052377e-06, |
|
"loss": 0.9634, |
|
"step": 3865 |
|
}, |
|
{ |
|
"epoch": 0.8881239242685026, |
|
"grad_norm": 0.12779028713703156, |
|
"learning_rate": 7.516519412428203e-06, |
|
"loss": 0.9463, |
|
"step": 3870 |
|
}, |
|
{ |
|
"epoch": 0.8892713711990821, |
|
"grad_norm": 0.13034799695014954, |
|
"learning_rate": 7.3648818832080745e-06, |
|
"loss": 0.979, |
|
"step": 3875 |
|
}, |
|
{ |
|
"epoch": 0.8904188181296615, |
|
"grad_norm": 0.13406170904636383, |
|
"learning_rate": 7.214731046480094e-06, |
|
"loss": 0.9234, |
|
"step": 3880 |
|
}, |
|
{ |
|
"epoch": 0.891566265060241, |
|
"grad_norm": 0.13078339397907257, |
|
"learning_rate": 7.066069312001289e-06, |
|
"loss": 0.9578, |
|
"step": 3885 |
|
}, |
|
{ |
|
"epoch": 0.8927137119908204, |
|
"grad_norm": 0.13037657737731934, |
|
"learning_rate": 6.918899065630113e-06, |
|
"loss": 0.933, |
|
"step": 3890 |
|
}, |
|
{ |
|
"epoch": 0.8938611589213998, |
|
"grad_norm": 0.12971769273281097, |
|
"learning_rate": 6.773222669288359e-06, |
|
"loss": 0.9478, |
|
"step": 3895 |
|
}, |
|
{ |
|
"epoch": 0.8950086058519794, |
|
"grad_norm": 0.13167798519134521, |
|
"learning_rate": 6.629042460923096e-06, |
|
"loss": 0.9555, |
|
"step": 3900 |
|
}, |
|
{ |
|
"epoch": 0.8961560527825588, |
|
"grad_norm": 0.13504882156848907, |
|
"learning_rate": 6.486360754469234e-06, |
|
"loss": 0.9192, |
|
"step": 3905 |
|
}, |
|
{ |
|
"epoch": 0.8973034997131383, |
|
"grad_norm": 0.1290195882320404, |
|
"learning_rate": 6.345179839812343e-06, |
|
"loss": 0.9263, |
|
"step": 3910 |
|
}, |
|
{ |
|
"epoch": 0.8984509466437177, |
|
"grad_norm": 0.127256378531456, |
|
"learning_rate": 6.205501982751971e-06, |
|
"loss": 0.9283, |
|
"step": 3915 |
|
}, |
|
{ |
|
"epoch": 0.8995983935742972, |
|
"grad_norm": 0.1287613958120346, |
|
"learning_rate": 6.067329424965162e-06, |
|
"loss": 0.9485, |
|
"step": 3920 |
|
}, |
|
{ |
|
"epoch": 0.9007458405048766, |
|
"grad_norm": 0.13389694690704346, |
|
"learning_rate": 5.930664383970641e-06, |
|
"loss": 0.9235, |
|
"step": 3925 |
|
}, |
|
{ |
|
"epoch": 0.9018932874354562, |
|
"grad_norm": 0.13568086922168732, |
|
"learning_rate": 5.795509053093029e-06, |
|
"loss": 0.9484, |
|
"step": 3930 |
|
}, |
|
{ |
|
"epoch": 0.9030407343660356, |
|
"grad_norm": 0.12701798975467682, |
|
"learning_rate": 5.6618656014278406e-06, |
|
"loss": 0.9161, |
|
"step": 3935 |
|
}, |
|
{ |
|
"epoch": 0.904188181296615, |
|
"grad_norm": 0.13050422072410583, |
|
"learning_rate": 5.5297361738065325e-06, |
|
"loss": 0.9688, |
|
"step": 3940 |
|
}, |
|
{ |
|
"epoch": 0.9053356282271945, |
|
"grad_norm": 0.13256116211414337, |
|
"learning_rate": 5.399122890762143e-06, |
|
"loss": 0.9124, |
|
"step": 3945 |
|
}, |
|
{ |
|
"epoch": 0.9064830751577739, |
|
"grad_norm": 0.1276332139968872, |
|
"learning_rate": 5.270027848495207e-06, |
|
"loss": 0.9692, |
|
"step": 3950 |
|
}, |
|
{ |
|
"epoch": 0.9076305220883534, |
|
"grad_norm": 0.140411376953125, |
|
"learning_rate": 5.1424531188402405e-06, |
|
"loss": 0.9739, |
|
"step": 3955 |
|
}, |
|
{ |
|
"epoch": 0.9087779690189329, |
|
"grad_norm": 0.12793396413326263, |
|
"learning_rate": 5.016400749232297e-06, |
|
"loss": 0.9482, |
|
"step": 3960 |
|
}, |
|
{ |
|
"epoch": 0.9099254159495124, |
|
"grad_norm": 0.12200927734375, |
|
"learning_rate": 4.89187276267431e-06, |
|
"loss": 0.95, |
|
"step": 3965 |
|
}, |
|
{ |
|
"epoch": 0.9110728628800918, |
|
"grad_norm": 0.13150322437286377, |
|
"learning_rate": 4.7688711577044354e-06, |
|
"loss": 0.9651, |
|
"step": 3970 |
|
}, |
|
{ |
|
"epoch": 0.9122203098106713, |
|
"grad_norm": 0.13082684576511383, |
|
"learning_rate": 4.647397908364182e-06, |
|
"loss": 0.9028, |
|
"step": 3975 |
|
}, |
|
{ |
|
"epoch": 0.9133677567412507, |
|
"grad_norm": 0.12200944870710373, |
|
"learning_rate": 4.5274549641665105e-06, |
|
"loss": 0.9135, |
|
"step": 3980 |
|
}, |
|
{ |
|
"epoch": 0.9145152036718301, |
|
"grad_norm": 0.1254609227180481, |
|
"learning_rate": 4.40904425006472e-06, |
|
"loss": 0.9437, |
|
"step": 3985 |
|
}, |
|
{ |
|
"epoch": 0.9156626506024096, |
|
"grad_norm": 0.12539255619049072, |
|
"learning_rate": 4.2921676664214535e-06, |
|
"loss": 0.8961, |
|
"step": 3990 |
|
}, |
|
{ |
|
"epoch": 0.9168100975329891, |
|
"grad_norm": 0.12549173831939697, |
|
"learning_rate": 4.176827088978297e-06, |
|
"loss": 0.9377, |
|
"step": 3995 |
|
}, |
|
{ |
|
"epoch": 0.9179575444635686, |
|
"grad_norm": 0.1272364854812622, |
|
"learning_rate": 4.0630243688255185e-06, |
|
"loss": 0.952, |
|
"step": 4000 |
|
}, |
|
{ |
|
"epoch": 0.919104991394148, |
|
"grad_norm": 0.12669628858566284, |
|
"learning_rate": 3.950761332372543e-06, |
|
"loss": 0.9593, |
|
"step": 4005 |
|
}, |
|
{ |
|
"epoch": 0.9202524383247275, |
|
"grad_norm": 0.13297000527381897, |
|
"learning_rate": 3.8400397813185054e-06, |
|
"loss": 0.9559, |
|
"step": 4010 |
|
}, |
|
{ |
|
"epoch": 0.9213998852553069, |
|
"grad_norm": 0.1260041445493698, |
|
"learning_rate": 3.7308614926234165e-06, |
|
"loss": 0.9277, |
|
"step": 4015 |
|
}, |
|
{ |
|
"epoch": 0.9225473321858864, |
|
"grad_norm": 0.12770813703536987, |
|
"learning_rate": 3.6232282184795794e-06, |
|
"loss": 0.9448, |
|
"step": 4020 |
|
}, |
|
{ |
|
"epoch": 0.9236947791164659, |
|
"grad_norm": 0.13022121787071228, |
|
"learning_rate": 3.517141686283498e-06, |
|
"loss": 0.9415, |
|
"step": 4025 |
|
}, |
|
{ |
|
"epoch": 0.9248422260470454, |
|
"grad_norm": 0.12520062923431396, |
|
"learning_rate": 3.412603598608188e-06, |
|
"loss": 0.9453, |
|
"step": 4030 |
|
}, |
|
{ |
|
"epoch": 0.9259896729776248, |
|
"grad_norm": 0.12913860380649567, |
|
"learning_rate": 3.3096156331758e-06, |
|
"loss": 0.9492, |
|
"step": 4035 |
|
}, |
|
{ |
|
"epoch": 0.9271371199082042, |
|
"grad_norm": 0.12940272688865662, |
|
"learning_rate": 3.2081794428307278e-06, |
|
"loss": 0.949, |
|
"step": 4040 |
|
}, |
|
{ |
|
"epoch": 0.9282845668387837, |
|
"grad_norm": 0.12925906479358673, |
|
"learning_rate": 3.1082966555130654e-06, |
|
"loss": 0.9143, |
|
"step": 4045 |
|
}, |
|
{ |
|
"epoch": 0.9294320137693631, |
|
"grad_norm": 0.1461576372385025, |
|
"learning_rate": 3.0099688742324715e-06, |
|
"loss": 0.9194, |
|
"step": 4050 |
|
}, |
|
{ |
|
"epoch": 0.9305794606999426, |
|
"grad_norm": 0.1329105645418167, |
|
"learning_rate": 2.913197677042456e-06, |
|
"loss": 0.9007, |
|
"step": 4055 |
|
}, |
|
{ |
|
"epoch": 0.9317269076305221, |
|
"grad_norm": 0.13174231350421906, |
|
"learning_rate": 2.8179846170150903e-06, |
|
"loss": 0.9247, |
|
"step": 4060 |
|
}, |
|
{ |
|
"epoch": 0.9328743545611016, |
|
"grad_norm": 0.12422098964452744, |
|
"learning_rate": 2.7243312222159924e-06, |
|
"loss": 0.9283, |
|
"step": 4065 |
|
}, |
|
{ |
|
"epoch": 0.934021801491681, |
|
"grad_norm": 0.13059760630130768, |
|
"learning_rate": 2.6322389956799143e-06, |
|
"loss": 0.9783, |
|
"step": 4070 |
|
}, |
|
{ |
|
"epoch": 0.9351692484222605, |
|
"grad_norm": 0.1337437480688095, |
|
"learning_rate": 2.541709415386495e-06, |
|
"loss": 0.9643, |
|
"step": 4075 |
|
}, |
|
{ |
|
"epoch": 0.9363166953528399, |
|
"grad_norm": 0.13037900626659393, |
|
"learning_rate": 2.4527439342366785e-06, |
|
"loss": 0.9529, |
|
"step": 4080 |
|
}, |
|
{ |
|
"epoch": 0.9374641422834193, |
|
"grad_norm": 0.13122588396072388, |
|
"learning_rate": 2.3653439800292556e-06, |
|
"loss": 0.9504, |
|
"step": 4085 |
|
}, |
|
{ |
|
"epoch": 0.9386115892139989, |
|
"grad_norm": 0.12561407685279846, |
|
"learning_rate": 2.2795109554381024e-06, |
|
"loss": 0.9457, |
|
"step": 4090 |
|
}, |
|
{ |
|
"epoch": 0.9397590361445783, |
|
"grad_norm": 0.13426658511161804, |
|
"learning_rate": 2.195246237989479e-06, |
|
"loss": 0.9089, |
|
"step": 4095 |
|
}, |
|
{ |
|
"epoch": 0.9409064830751578, |
|
"grad_norm": 0.12589140236377716, |
|
"learning_rate": 2.1125511800401234e-06, |
|
"loss": 0.8939, |
|
"step": 4100 |
|
}, |
|
{ |
|
"epoch": 0.9420539300057372, |
|
"grad_norm": 0.13555549085140228, |
|
"learning_rate": 2.0314271087554126e-06, |
|
"loss": 0.9647, |
|
"step": 4105 |
|
}, |
|
{ |
|
"epoch": 0.9432013769363167, |
|
"grad_norm": 0.1271609365940094, |
|
"learning_rate": 1.951875326088104e-06, |
|
"loss": 0.9331, |
|
"step": 4110 |
|
}, |
|
{ |
|
"epoch": 0.9443488238668961, |
|
"grad_norm": 0.14045216143131256, |
|
"learning_rate": 1.8738971087574275e-06, |
|
"loss": 0.9597, |
|
"step": 4115 |
|
}, |
|
{ |
|
"epoch": 0.9454962707974757, |
|
"grad_norm": 0.12654034793376923, |
|
"learning_rate": 1.797493708228659e-06, |
|
"loss": 0.9267, |
|
"step": 4120 |
|
}, |
|
{ |
|
"epoch": 0.9466437177280551, |
|
"grad_norm": 0.1343478262424469, |
|
"learning_rate": 1.7226663506929142e-06, |
|
"loss": 0.9373, |
|
"step": 4125 |
|
}, |
|
{ |
|
"epoch": 0.9477911646586346, |
|
"grad_norm": 0.1329105794429779, |
|
"learning_rate": 1.6494162370475852e-06, |
|
"loss": 0.9255, |
|
"step": 4130 |
|
}, |
|
{ |
|
"epoch": 0.948938611589214, |
|
"grad_norm": 0.1293703317642212, |
|
"learning_rate": 1.5777445428770022e-06, |
|
"loss": 0.96, |
|
"step": 4135 |
|
}, |
|
{ |
|
"epoch": 0.9500860585197934, |
|
"grad_norm": 0.13163906335830688, |
|
"learning_rate": 1.5076524184336027e-06, |
|
"loss": 0.924, |
|
"step": 4140 |
|
}, |
|
{ |
|
"epoch": 0.9512335054503729, |
|
"grad_norm": 0.12938107550144196, |
|
"learning_rate": 1.4391409886194474e-06, |
|
"loss": 0.9338, |
|
"step": 4145 |
|
}, |
|
{ |
|
"epoch": 0.9523809523809523, |
|
"grad_norm": 0.13095106184482574, |
|
"learning_rate": 1.3722113529681668e-06, |
|
"loss": 0.9538, |
|
"step": 4150 |
|
}, |
|
{ |
|
"epoch": 0.9535283993115319, |
|
"grad_norm": 0.13413451611995697, |
|
"learning_rate": 1.306864585627332e-06, |
|
"loss": 0.9812, |
|
"step": 4155 |
|
}, |
|
{ |
|
"epoch": 0.9546758462421113, |
|
"grad_norm": 0.1287609338760376, |
|
"learning_rate": 1.2431017353412233e-06, |
|
"loss": 0.9374, |
|
"step": 4160 |
|
}, |
|
{ |
|
"epoch": 0.9558232931726908, |
|
"grad_norm": 0.12516798079013824, |
|
"learning_rate": 1.1809238254339105e-06, |
|
"loss": 0.9607, |
|
"step": 4165 |
|
}, |
|
{ |
|
"epoch": 0.9569707401032702, |
|
"grad_norm": 0.13837283849716187, |
|
"learning_rate": 1.1203318537929996e-06, |
|
"loss": 0.9585, |
|
"step": 4170 |
|
}, |
|
{ |
|
"epoch": 0.9581181870338497, |
|
"grad_norm": 0.13161715865135193, |
|
"learning_rate": 1.0613267928534453e-06, |
|
"loss": 0.9289, |
|
"step": 4175 |
|
}, |
|
{ |
|
"epoch": 0.9592656339644291, |
|
"grad_norm": 0.12652412056922913, |
|
"learning_rate": 1.0039095895820639e-06, |
|
"loss": 0.9477, |
|
"step": 4180 |
|
}, |
|
{ |
|
"epoch": 0.9604130808950087, |
|
"grad_norm": 0.12420324981212616, |
|
"learning_rate": 9.480811654622557e-07, |
|
"loss": 0.9562, |
|
"step": 4185 |
|
}, |
|
{ |
|
"epoch": 0.9615605278255881, |
|
"grad_norm": 0.12834912538528442, |
|
"learning_rate": 8.938424164792736e-07, |
|
"loss": 0.9724, |
|
"step": 4190 |
|
}, |
|
{ |
|
"epoch": 0.9627079747561675, |
|
"grad_norm": 0.12531572580337524, |
|
"learning_rate": 8.411942131058115e-07, |
|
"loss": 0.9464, |
|
"step": 4195 |
|
}, |
|
{ |
|
"epoch": 0.963855421686747, |
|
"grad_norm": 0.12882524728775024, |
|
"learning_rate": 7.90137400288049e-07, |
|
"loss": 0.9373, |
|
"step": 4200 |
|
}, |
|
{ |
|
"epoch": 0.9650028686173264, |
|
"grad_norm": 0.13834188878536224, |
|
"learning_rate": 7.406727974320627e-07, |
|
"loss": 0.937, |
|
"step": 4205 |
|
}, |
|
{ |
|
"epoch": 0.9661503155479059, |
|
"grad_norm": 0.13418422639369965, |
|
"learning_rate": 6.928011983907245e-07, |
|
"loss": 0.9551, |
|
"step": 4210 |
|
}, |
|
{ |
|
"epoch": 0.9672977624784854, |
|
"grad_norm": 0.1264306902885437, |
|
"learning_rate": 6.465233714509245e-07, |
|
"loss": 0.9582, |
|
"step": 4215 |
|
}, |
|
{ |
|
"epoch": 0.9684452094090649, |
|
"grad_norm": 0.1280186027288437, |
|
"learning_rate": 6.01840059321257e-07, |
|
"loss": 0.9523, |
|
"step": 4220 |
|
}, |
|
{ |
|
"epoch": 0.9695926563396443, |
|
"grad_norm": 0.12469854950904846, |
|
"learning_rate": 5.587519791200869e-07, |
|
"loss": 0.9511, |
|
"step": 4225 |
|
}, |
|
{ |
|
"epoch": 0.9707401032702238, |
|
"grad_norm": 0.13365860283374786, |
|
"learning_rate": 5.172598223640468e-07, |
|
"loss": 0.9195, |
|
"step": 4230 |
|
}, |
|
{ |
|
"epoch": 0.9718875502008032, |
|
"grad_norm": 0.1352444291114807, |
|
"learning_rate": 4.773642549569579e-07, |
|
"loss": 0.9651, |
|
"step": 4235 |
|
}, |
|
{ |
|
"epoch": 0.9730349971313826, |
|
"grad_norm": 0.12593533098697662, |
|
"learning_rate": 4.390659171790934e-07, |
|
"loss": 0.8991, |
|
"step": 4240 |
|
}, |
|
{ |
|
"epoch": 0.9741824440619621, |
|
"grad_norm": 0.13229170441627502, |
|
"learning_rate": 4.023654236769647e-07, |
|
"loss": 0.9233, |
|
"step": 4245 |
|
}, |
|
{ |
|
"epoch": 0.9753298909925416, |
|
"grad_norm": 0.13042078912258148, |
|
"learning_rate": 3.672633634534295e-07, |
|
"loss": 0.9276, |
|
"step": 4250 |
|
}, |
|
{ |
|
"epoch": 0.9764773379231211, |
|
"grad_norm": 0.12525303661823273, |
|
"learning_rate": 3.3376029985819903e-07, |
|
"loss": 0.9373, |
|
"step": 4255 |
|
}, |
|
{ |
|
"epoch": 0.9776247848537005, |
|
"grad_norm": 0.12787853181362152, |
|
"learning_rate": 3.0185677057887885e-07, |
|
"loss": 0.9313, |
|
"step": 4260 |
|
}, |
|
{ |
|
"epoch": 0.97877223178428, |
|
"grad_norm": 0.12966030836105347, |
|
"learning_rate": 2.715532876322646e-07, |
|
"loss": 0.9288, |
|
"step": 4265 |
|
}, |
|
{ |
|
"epoch": 0.9799196787148594, |
|
"grad_norm": 0.1332397758960724, |
|
"learning_rate": 2.428503373561708e-07, |
|
"loss": 0.9362, |
|
"step": 4270 |
|
}, |
|
{ |
|
"epoch": 0.9810671256454389, |
|
"grad_norm": 0.13498224318027496, |
|
"learning_rate": 2.1574838040161473e-07, |
|
"loss": 0.9411, |
|
"step": 4275 |
|
}, |
|
{ |
|
"epoch": 0.9822145725760184, |
|
"grad_norm": 0.12744614481925964, |
|
"learning_rate": 1.9024785172541136e-07, |
|
"loss": 0.9343, |
|
"step": 4280 |
|
}, |
|
{ |
|
"epoch": 0.9833620195065979, |
|
"grad_norm": 0.1275186389684677, |
|
"learning_rate": 1.6634916058319018e-07, |
|
"loss": 0.9256, |
|
"step": 4285 |
|
}, |
|
{ |
|
"epoch": 0.9845094664371773, |
|
"grad_norm": 0.12919220328330994, |
|
"learning_rate": 1.4405269052284455e-07, |
|
"loss": 0.9333, |
|
"step": 4290 |
|
}, |
|
{ |
|
"epoch": 0.9856569133677567, |
|
"grad_norm": 0.13401302695274353, |
|
"learning_rate": 1.2335879937839246e-07, |
|
"loss": 0.8878, |
|
"step": 4295 |
|
}, |
|
{ |
|
"epoch": 0.9868043602983362, |
|
"grad_norm": 0.1339481770992279, |
|
"learning_rate": 1.0426781926416996e-07, |
|
"loss": 0.9369, |
|
"step": 4300 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"step": 4300, |
|
"total_flos": 1.209382783849123e+19, |
|
"train_loss": 0.0, |
|
"train_runtime": 0.0433, |
|
"train_samples_per_second": 31910.696, |
|
"train_steps_per_second": 993.599 |
|
} |
|
], |
|
"logging_steps": 5, |
|
"max_steps": 43, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 1, |
|
"save_steps": 100, |
|
"total_flos": 1.209382783849123e+19, |
|
"train_batch_size": 2, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|