qwen30b_alpacafarm_bs64 / trainer_state.json
fengyao1909's picture
Upload folder using huggingface_hub
bfd6873 verified
{
"best_global_step": null,
"best_metric": null,
"best_model_checkpoint": null,
"epoch": 2.984025559105431,
"eval_steps": 500,
"global_step": 468,
"is_hyper_param_search": false,
"is_local_process_zero": true,
"is_world_process_zero": true,
"log_history": [
{
"epoch": 0.006389776357827476,
"grad_norm": 31.704153653628946,
"learning_rate": 0.0,
"loss": 2.5768,
"step": 1
},
{
"epoch": 0.012779552715654952,
"grad_norm": 31.58442561735789,
"learning_rate": 2.1276595744680852e-07,
"loss": 2.7208,
"step": 2
},
{
"epoch": 0.019169329073482427,
"grad_norm": 31.41195342659084,
"learning_rate": 4.2553191489361704e-07,
"loss": 2.5583,
"step": 3
},
{
"epoch": 0.025559105431309903,
"grad_norm": 33.9099531804883,
"learning_rate": 6.382978723404255e-07,
"loss": 2.6238,
"step": 4
},
{
"epoch": 0.03194888178913738,
"grad_norm": 27.556054406511105,
"learning_rate": 8.510638297872341e-07,
"loss": 2.4049,
"step": 5
},
{
"epoch": 0.038338658146964855,
"grad_norm": 31.768418102023194,
"learning_rate": 1.0638297872340427e-06,
"loss": 2.6021,
"step": 6
},
{
"epoch": 0.04472843450479233,
"grad_norm": 32.283847840303004,
"learning_rate": 1.276595744680851e-06,
"loss": 2.743,
"step": 7
},
{
"epoch": 0.051118210862619806,
"grad_norm": 29.455083723950487,
"learning_rate": 1.4893617021276596e-06,
"loss": 2.4725,
"step": 8
},
{
"epoch": 0.05750798722044728,
"grad_norm": 27.236601491186654,
"learning_rate": 1.7021276595744682e-06,
"loss": 2.3754,
"step": 9
},
{
"epoch": 0.06389776357827476,
"grad_norm": 29.533151207286714,
"learning_rate": 1.9148936170212767e-06,
"loss": 2.3779,
"step": 10
},
{
"epoch": 0.07028753993610223,
"grad_norm": 23.426628166758984,
"learning_rate": 2.1276595744680853e-06,
"loss": 2.2612,
"step": 11
},
{
"epoch": 0.07667731629392971,
"grad_norm": 24.604043292573415,
"learning_rate": 2.340425531914894e-06,
"loss": 2.0661,
"step": 12
},
{
"epoch": 0.08306709265175719,
"grad_norm": 24.655783466838816,
"learning_rate": 2.553191489361702e-06,
"loss": 2.2323,
"step": 13
},
{
"epoch": 0.08945686900958466,
"grad_norm": 15.84080580636176,
"learning_rate": 2.765957446808511e-06,
"loss": 1.7864,
"step": 14
},
{
"epoch": 0.09584664536741214,
"grad_norm": 12.05049966753011,
"learning_rate": 2.978723404255319e-06,
"loss": 1.6959,
"step": 15
},
{
"epoch": 0.10223642172523961,
"grad_norm": 12.020085870345559,
"learning_rate": 3.191489361702128e-06,
"loss": 1.5898,
"step": 16
},
{
"epoch": 0.10862619808306709,
"grad_norm": 12.3340962254883,
"learning_rate": 3.4042553191489363e-06,
"loss": 1.6419,
"step": 17
},
{
"epoch": 0.11501597444089456,
"grad_norm": 8.611078574621429,
"learning_rate": 3.6170212765957453e-06,
"loss": 1.3988,
"step": 18
},
{
"epoch": 0.12140575079872204,
"grad_norm": 4.680603958774463,
"learning_rate": 3.8297872340425535e-06,
"loss": 1.2925,
"step": 19
},
{
"epoch": 0.12779552715654952,
"grad_norm": 6.743115476232132,
"learning_rate": 4.042553191489362e-06,
"loss": 1.3818,
"step": 20
},
{
"epoch": 0.134185303514377,
"grad_norm": 4.157596387709073,
"learning_rate": 4.255319148936171e-06,
"loss": 1.3793,
"step": 21
},
{
"epoch": 0.14057507987220447,
"grad_norm": 2.9340588329022443,
"learning_rate": 4.468085106382979e-06,
"loss": 1.2788,
"step": 22
},
{
"epoch": 0.14696485623003194,
"grad_norm": 2.818255229706057,
"learning_rate": 4.680851063829788e-06,
"loss": 1.1308,
"step": 23
},
{
"epoch": 0.15335463258785942,
"grad_norm": 3.117187687433535,
"learning_rate": 4.893617021276596e-06,
"loss": 1.3701,
"step": 24
},
{
"epoch": 0.1597444089456869,
"grad_norm": 2.8101978209724914,
"learning_rate": 5.106382978723404e-06,
"loss": 1.2065,
"step": 25
},
{
"epoch": 0.16613418530351437,
"grad_norm": 2.559531586573765,
"learning_rate": 5.319148936170213e-06,
"loss": 1.2604,
"step": 26
},
{
"epoch": 0.17252396166134185,
"grad_norm": 2.5602544421302627,
"learning_rate": 5.531914893617022e-06,
"loss": 1.1226,
"step": 27
},
{
"epoch": 0.17891373801916932,
"grad_norm": 2.78567947341407,
"learning_rate": 5.744680851063831e-06,
"loss": 1.1317,
"step": 28
},
{
"epoch": 0.1853035143769968,
"grad_norm": 2.32140520760061,
"learning_rate": 5.957446808510638e-06,
"loss": 1.2707,
"step": 29
},
{
"epoch": 0.19169329073482427,
"grad_norm": 2.637245330226456,
"learning_rate": 6.170212765957447e-06,
"loss": 1.1876,
"step": 30
},
{
"epoch": 0.19808306709265175,
"grad_norm": 2.655115239155347,
"learning_rate": 6.382978723404256e-06,
"loss": 1.1477,
"step": 31
},
{
"epoch": 0.20447284345047922,
"grad_norm": 2.1291613777910037,
"learning_rate": 6.595744680851064e-06,
"loss": 1.2044,
"step": 32
},
{
"epoch": 0.2108626198083067,
"grad_norm": 2.2821733814301273,
"learning_rate": 6.808510638297873e-06,
"loss": 1.2596,
"step": 33
},
{
"epoch": 0.21725239616613418,
"grad_norm": 2.1391452019594435,
"learning_rate": 7.021276595744682e-06,
"loss": 1.1544,
"step": 34
},
{
"epoch": 0.22364217252396165,
"grad_norm": 2.634505180459112,
"learning_rate": 7.234042553191491e-06,
"loss": 1.1743,
"step": 35
},
{
"epoch": 0.23003194888178913,
"grad_norm": 2.4479669174854695,
"learning_rate": 7.446808510638298e-06,
"loss": 1.2329,
"step": 36
},
{
"epoch": 0.2364217252396166,
"grad_norm": 2.4211752635431045,
"learning_rate": 7.659574468085107e-06,
"loss": 1.2652,
"step": 37
},
{
"epoch": 0.24281150159744408,
"grad_norm": 2.4226771286938864,
"learning_rate": 7.872340425531916e-06,
"loss": 1.2498,
"step": 38
},
{
"epoch": 0.24920127795527156,
"grad_norm": 2.243174790855972,
"learning_rate": 8.085106382978723e-06,
"loss": 1.0903,
"step": 39
},
{
"epoch": 0.25559105431309903,
"grad_norm": 2.194169145672455,
"learning_rate": 8.297872340425532e-06,
"loss": 1.2425,
"step": 40
},
{
"epoch": 0.26198083067092653,
"grad_norm": 2.3929823585508028,
"learning_rate": 8.510638297872341e-06,
"loss": 1.2134,
"step": 41
},
{
"epoch": 0.268370607028754,
"grad_norm": 2.192126133315745,
"learning_rate": 8.72340425531915e-06,
"loss": 1.2291,
"step": 42
},
{
"epoch": 0.2747603833865815,
"grad_norm": 2.076981829205724,
"learning_rate": 8.936170212765958e-06,
"loss": 1.0894,
"step": 43
},
{
"epoch": 0.28115015974440893,
"grad_norm": 2.159545856122453,
"learning_rate": 9.148936170212767e-06,
"loss": 1.1815,
"step": 44
},
{
"epoch": 0.28753993610223644,
"grad_norm": 1.9857699802597364,
"learning_rate": 9.361702127659576e-06,
"loss": 1.0501,
"step": 45
},
{
"epoch": 0.2939297124600639,
"grad_norm": 2.0870694200309265,
"learning_rate": 9.574468085106385e-06,
"loss": 1.1938,
"step": 46
},
{
"epoch": 0.3003194888178914,
"grad_norm": 2.3350367723863084,
"learning_rate": 9.787234042553192e-06,
"loss": 1.0923,
"step": 47
},
{
"epoch": 0.30670926517571884,
"grad_norm": 2.455931252583481,
"learning_rate": 1e-05,
"loss": 1.1516,
"step": 48
},
{
"epoch": 0.31309904153354634,
"grad_norm": 2.1368969102758424,
"learning_rate": 9.999860789001947e-06,
"loss": 0.9968,
"step": 49
},
{
"epoch": 0.3194888178913738,
"grad_norm": 2.3688426509121596,
"learning_rate": 9.999443163759669e-06,
"loss": 1.1361,
"step": 50
},
{
"epoch": 0.3258785942492013,
"grad_norm": 2.3264607493113325,
"learning_rate": 9.998747147528375e-06,
"loss": 1.1678,
"step": 51
},
{
"epoch": 0.33226837060702874,
"grad_norm": 2.0130202409916786,
"learning_rate": 9.997772779065312e-06,
"loss": 1.1882,
"step": 52
},
{
"epoch": 0.33865814696485624,
"grad_norm": 2.39774596578351,
"learning_rate": 9.996520112627602e-06,
"loss": 1.2381,
"step": 53
},
{
"epoch": 0.3450479233226837,
"grad_norm": 2.3378114420187295,
"learning_rate": 9.994989217969224e-06,
"loss": 1.292,
"step": 54
},
{
"epoch": 0.3514376996805112,
"grad_norm": 2.4669226231480037,
"learning_rate": 9.993180180337126e-06,
"loss": 1.2083,
"step": 55
},
{
"epoch": 0.35782747603833864,
"grad_norm": 2.5723057747722446,
"learning_rate": 9.991093100466482e-06,
"loss": 1.2001,
"step": 56
},
{
"epoch": 0.36421725239616615,
"grad_norm": 2.1243722512989254,
"learning_rate": 9.988728094575082e-06,
"loss": 1.213,
"step": 57
},
{
"epoch": 0.3706070287539936,
"grad_norm": 2.4033633557830343,
"learning_rate": 9.986085294356858e-06,
"loss": 1.1875,
"step": 58
},
{
"epoch": 0.3769968051118211,
"grad_norm": 2.3474389927950385,
"learning_rate": 9.983164846974549e-06,
"loss": 1.2322,
"step": 59
},
{
"epoch": 0.38338658146964855,
"grad_norm": 2.452909045917012,
"learning_rate": 9.979966915051517e-06,
"loss": 1.1646,
"step": 60
},
{
"epoch": 0.38977635782747605,
"grad_norm": 2.4723875025133837,
"learning_rate": 9.976491676662679e-06,
"loss": 1.3015,
"step": 61
},
{
"epoch": 0.3961661341853035,
"grad_norm": 2.2634946397130347,
"learning_rate": 9.972739325324596e-06,
"loss": 1.2803,
"step": 62
},
{
"epoch": 0.402555910543131,
"grad_norm": 2.403155802895676,
"learning_rate": 9.968710069984699e-06,
"loss": 1.2175,
"step": 63
},
{
"epoch": 0.40894568690095845,
"grad_norm": 2.1295330822670127,
"learning_rate": 9.964404135009649e-06,
"loss": 1.2292,
"step": 64
},
{
"epoch": 0.41533546325878595,
"grad_norm": 2.1414114768880808,
"learning_rate": 9.959821760172849e-06,
"loss": 1.2264,
"step": 65
},
{
"epoch": 0.4217252396166134,
"grad_norm": 2.2853864319073787,
"learning_rate": 9.95496320064109e-06,
"loss": 1.1838,
"step": 66
},
{
"epoch": 0.4281150159744409,
"grad_norm": 2.2370120573822874,
"learning_rate": 9.94982872696034e-06,
"loss": 1.291,
"step": 67
},
{
"epoch": 0.43450479233226835,
"grad_norm": 2.2059962759749063,
"learning_rate": 9.94441862504068e-06,
"loss": 1.165,
"step": 68
},
{
"epoch": 0.44089456869009586,
"grad_norm": 2.5409369250481957,
"learning_rate": 9.938733196140386e-06,
"loss": 1.2124,
"step": 69
},
{
"epoch": 0.4472843450479233,
"grad_norm": 2.0755145142571996,
"learning_rate": 9.932772756849152e-06,
"loss": 1.3153,
"step": 70
},
{
"epoch": 0.4536741214057508,
"grad_norm": 2.111481371806788,
"learning_rate": 9.926537639070457e-06,
"loss": 1.208,
"step": 71
},
{
"epoch": 0.46006389776357826,
"grad_norm": 2.309594198706378,
"learning_rate": 9.92002819000309e-06,
"loss": 1.1227,
"step": 72
},
{
"epoch": 0.46645367412140576,
"grad_norm": 1.812782826112667,
"learning_rate": 9.913244772121811e-06,
"loss": 1.2081,
"step": 73
},
{
"epoch": 0.4728434504792332,
"grad_norm": 2.265672430816926,
"learning_rate": 9.90618776315717e-06,
"loss": 1.3102,
"step": 74
},
{
"epoch": 0.4792332268370607,
"grad_norm": 2.141190910110161,
"learning_rate": 9.898857556074469e-06,
"loss": 1.151,
"step": 75
},
{
"epoch": 0.48562300319488816,
"grad_norm": 2.1944205784706474,
"learning_rate": 9.891254559051886e-06,
"loss": 1.236,
"step": 76
},
{
"epoch": 0.49201277955271566,
"grad_norm": 2.2423059756988923,
"learning_rate": 9.883379195457747e-06,
"loss": 1.1508,
"step": 77
},
{
"epoch": 0.4984025559105431,
"grad_norm": 2.3279349540321492,
"learning_rate": 9.875231903826936e-06,
"loss": 1.0754,
"step": 78
},
{
"epoch": 0.5047923322683706,
"grad_norm": 2.053366178339911,
"learning_rate": 9.8668131378365e-06,
"loss": 1.2044,
"step": 79
},
{
"epoch": 0.5111821086261981,
"grad_norm": 2.126844691744352,
"learning_rate": 9.858123366280358e-06,
"loss": 1.1222,
"step": 80
},
{
"epoch": 0.5175718849840255,
"grad_norm": 2.3371921606203365,
"learning_rate": 9.849163073043223e-06,
"loss": 1.2579,
"step": 81
},
{
"epoch": 0.5239616613418531,
"grad_norm": 1.9862725064766693,
"learning_rate": 9.83993275707364e-06,
"loss": 1.128,
"step": 82
},
{
"epoch": 0.5303514376996805,
"grad_norm": 2.0299267603970046,
"learning_rate": 9.830432932356207e-06,
"loss": 1.06,
"step": 83
},
{
"epoch": 0.536741214057508,
"grad_norm": 1.967123761472139,
"learning_rate": 9.820664127882958e-06,
"loss": 1.1022,
"step": 84
},
{
"epoch": 0.5431309904153354,
"grad_norm": 2.0479944136804407,
"learning_rate": 9.8106268876239e-06,
"loss": 1.3196,
"step": 85
},
{
"epoch": 0.549520766773163,
"grad_norm": 2.0899371871761714,
"learning_rate": 9.800321770496726e-06,
"loss": 1.2082,
"step": 86
},
{
"epoch": 0.5559105431309904,
"grad_norm": 1.9264654398492607,
"learning_rate": 9.789749350335693e-06,
"loss": 1.0647,
"step": 87
},
{
"epoch": 0.5623003194888179,
"grad_norm": 2.404003249982565,
"learning_rate": 9.778910215859666e-06,
"loss": 1.1614,
"step": 88
},
{
"epoch": 0.5686900958466453,
"grad_norm": 2.2535640052562123,
"learning_rate": 9.767804970639338e-06,
"loss": 1.29,
"step": 89
},
{
"epoch": 0.5750798722044729,
"grad_norm": 1.7435884422501948,
"learning_rate": 9.756434233063616e-06,
"loss": 1.1508,
"step": 90
},
{
"epoch": 0.5814696485623003,
"grad_norm": 2.1089410317587602,
"learning_rate": 9.744798636305189e-06,
"loss": 1.0881,
"step": 91
},
{
"epoch": 0.5878594249201278,
"grad_norm": 1.998448289395409,
"learning_rate": 9.732898828285273e-06,
"loss": 1.2313,
"step": 92
},
{
"epoch": 0.5942492012779552,
"grad_norm": 2.167555925222082,
"learning_rate": 9.72073547163753e-06,
"loss": 1.1593,
"step": 93
},
{
"epoch": 0.6006389776357828,
"grad_norm": 2.057046306061941,
"learning_rate": 9.708309243671167e-06,
"loss": 1.1188,
"step": 94
},
{
"epoch": 0.6070287539936102,
"grad_norm": 2.0592362759109073,
"learning_rate": 9.695620836333219e-06,
"loss": 1.2007,
"step": 95
},
{
"epoch": 0.6134185303514377,
"grad_norm": 2.212221040092753,
"learning_rate": 9.68267095617003e-06,
"loss": 1.2359,
"step": 96
},
{
"epoch": 0.6198083067092651,
"grad_norm": 2.0841357313666706,
"learning_rate": 9.669460324287899e-06,
"loss": 1.0926,
"step": 97
},
{
"epoch": 0.6261980830670927,
"grad_norm": 2.076701412548829,
"learning_rate": 9.655989676312918e-06,
"loss": 1.3485,
"step": 98
},
{
"epoch": 0.6325878594249201,
"grad_norm": 1.9517624724174427,
"learning_rate": 9.642259762350034e-06,
"loss": 1.149,
"step": 99
},
{
"epoch": 0.6389776357827476,
"grad_norm": 2.2261739182237834,
"learning_rate": 9.628271346941252e-06,
"loss": 1.1836,
"step": 100
},
{
"epoch": 0.645367412140575,
"grad_norm": 1.92940622703802,
"learning_rate": 9.614025209023084e-06,
"loss": 1.0385,
"step": 101
},
{
"epoch": 0.6517571884984026,
"grad_norm": 2.136053016131896,
"learning_rate": 9.59952214188316e-06,
"loss": 1.1348,
"step": 102
},
{
"epoch": 0.65814696485623,
"grad_norm": 2.3240551796672286,
"learning_rate": 9.58476295311606e-06,
"loss": 1.1955,
"step": 103
},
{
"epoch": 0.6645367412140575,
"grad_norm": 1.9948022400284593,
"learning_rate": 9.569748464578343e-06,
"loss": 1.3491,
"step": 104
},
{
"epoch": 0.670926517571885,
"grad_norm": 2.298108412072366,
"learning_rate": 9.554479512342785e-06,
"loss": 1.1793,
"step": 105
},
{
"epoch": 0.6773162939297125,
"grad_norm": 2.231720080665685,
"learning_rate": 9.538956946651816e-06,
"loss": 1.1294,
"step": 106
},
{
"epoch": 0.6837060702875399,
"grad_norm": 1.9603128234576142,
"learning_rate": 9.52318163187018e-06,
"loss": 1.0876,
"step": 107
},
{
"epoch": 0.6900958466453674,
"grad_norm": 2.1392812148029736,
"learning_rate": 9.507154446436806e-06,
"loss": 1.1633,
"step": 108
},
{
"epoch": 0.6964856230031949,
"grad_norm": 2.272410499944869,
"learning_rate": 9.490876282815884e-06,
"loss": 1.2247,
"step": 109
},
{
"epoch": 0.7028753993610224,
"grad_norm": 2.1469318052505706,
"learning_rate": 9.474348047447177e-06,
"loss": 1.0897,
"step": 110
},
{
"epoch": 0.7092651757188498,
"grad_norm": 2.159780969055711,
"learning_rate": 9.457570660695542e-06,
"loss": 1.2255,
"step": 111
},
{
"epoch": 0.7156549520766773,
"grad_norm": 2.4686968074205726,
"learning_rate": 9.440545056799677e-06,
"loss": 1.1641,
"step": 112
},
{
"epoch": 0.7220447284345048,
"grad_norm": 2.2547862843159776,
"learning_rate": 9.423272183820109e-06,
"loss": 1.1979,
"step": 113
},
{
"epoch": 0.7284345047923323,
"grad_norm": 2.27724473444956,
"learning_rate": 9.405753003586396e-06,
"loss": 1.2557,
"step": 114
},
{
"epoch": 0.7348242811501597,
"grad_norm": 2.5456321939585216,
"learning_rate": 9.387988491643558e-06,
"loss": 1.1468,
"step": 115
},
{
"epoch": 0.7412140575079872,
"grad_norm": 2.3676784879006836,
"learning_rate": 9.369979637197774e-06,
"loss": 1.2817,
"step": 116
},
{
"epoch": 0.7476038338658147,
"grad_norm": 2.071452107560449,
"learning_rate": 9.351727443061284e-06,
"loss": 1.2346,
"step": 117
},
{
"epoch": 0.7539936102236422,
"grad_norm": 1.9877025274939035,
"learning_rate": 9.33323292559655e-06,
"loss": 1.1517,
"step": 118
},
{
"epoch": 0.7603833865814696,
"grad_norm": 2.193988573947218,
"learning_rate": 9.31449711465967e-06,
"loss": 1.1592,
"step": 119
},
{
"epoch": 0.7667731629392971,
"grad_norm": 1.8767489607648484,
"learning_rate": 9.29552105354302e-06,
"loss": 1.237,
"step": 120
},
{
"epoch": 0.7731629392971247,
"grad_norm": 2.1030635199225407,
"learning_rate": 9.27630579891716e-06,
"loss": 1.2034,
"step": 121
},
{
"epoch": 0.7795527156549521,
"grad_norm": 2.027092141423695,
"learning_rate": 9.256852420771999e-06,
"loss": 1.0951,
"step": 122
},
{
"epoch": 0.7859424920127795,
"grad_norm": 2.1632209630049224,
"learning_rate": 9.237162002357214e-06,
"loss": 1.1327,
"step": 123
},
{
"epoch": 0.792332268370607,
"grad_norm": 2.121172839950111,
"learning_rate": 9.217235640121927e-06,
"loss": 1.2099,
"step": 124
},
{
"epoch": 0.7987220447284346,
"grad_norm": 2.2253932467304867,
"learning_rate": 9.197074443653643e-06,
"loss": 1.1512,
"step": 125
},
{
"epoch": 0.805111821086262,
"grad_norm": 2.0054498620669405,
"learning_rate": 9.176679535616477e-06,
"loss": 1.1289,
"step": 126
},
{
"epoch": 0.8115015974440895,
"grad_norm": 2.141568588127354,
"learning_rate": 9.156052051688633e-06,
"loss": 1.1781,
"step": 127
},
{
"epoch": 0.8178913738019169,
"grad_norm": 1.9917103164782524,
"learning_rate": 9.135193140499155e-06,
"loss": 1.303,
"step": 128
},
{
"epoch": 0.8242811501597445,
"grad_norm": 2.109393580631002,
"learning_rate": 9.114103963563986e-06,
"loss": 1.218,
"step": 129
},
{
"epoch": 0.8306709265175719,
"grad_norm": 1.9608467965910812,
"learning_rate": 9.092785695221271e-06,
"loss": 1.1861,
"step": 130
},
{
"epoch": 0.8370607028753994,
"grad_norm": 2.2527371323982286,
"learning_rate": 9.071239522565978e-06,
"loss": 1.1471,
"step": 131
},
{
"epoch": 0.8434504792332268,
"grad_norm": 2.2882037789195935,
"learning_rate": 9.049466645383785e-06,
"loss": 1.2044,
"step": 132
},
{
"epoch": 0.8498402555910544,
"grad_norm": 1.8783108140741698,
"learning_rate": 9.027468276084274e-06,
"loss": 1.0654,
"step": 133
},
{
"epoch": 0.8562300319488818,
"grad_norm": 2.181556195900633,
"learning_rate": 9.00524563963343e-06,
"loss": 1.1876,
"step": 134
},
{
"epoch": 0.8626198083067093,
"grad_norm": 2.1118568627241214,
"learning_rate": 8.982799973485407e-06,
"loss": 1.149,
"step": 135
},
{
"epoch": 0.8690095846645367,
"grad_norm": 1.9977631103473594,
"learning_rate": 8.960132527513642e-06,
"loss": 1.241,
"step": 136
},
{
"epoch": 0.8753993610223643,
"grad_norm": 2.175594875693007,
"learning_rate": 8.937244563941248e-06,
"loss": 1.1798,
"step": 137
},
{
"epoch": 0.8817891373801917,
"grad_norm": 1.957109658159744,
"learning_rate": 8.914137357270723e-06,
"loss": 1.1644,
"step": 138
},
{
"epoch": 0.8881789137380192,
"grad_norm": 2.2821449104582467,
"learning_rate": 8.890812194212987e-06,
"loss": 1.3045,
"step": 139
},
{
"epoch": 0.8945686900958466,
"grad_norm": 2.0225222189950713,
"learning_rate": 8.867270373615735e-06,
"loss": 1.252,
"step": 140
},
{
"epoch": 0.9009584664536742,
"grad_norm": 2.2060119696696066,
"learning_rate": 8.8435132063911e-06,
"loss": 1.1079,
"step": 141
},
{
"epoch": 0.9073482428115016,
"grad_norm": 2.268401775396252,
"learning_rate": 8.81954201544267e-06,
"loss": 1.1864,
"step": 142
},
{
"epoch": 0.9137380191693291,
"grad_norm": 2.237757757221249,
"learning_rate": 8.79535813559181e-06,
"loss": 1.1418,
"step": 143
},
{
"epoch": 0.9201277955271565,
"grad_norm": 2.365685901904334,
"learning_rate": 8.77096291350334e-06,
"loss": 1.3222,
"step": 144
},
{
"epoch": 0.9265175718849841,
"grad_norm": 2.141967663619419,
"learning_rate": 8.746357707610544e-06,
"loss": 1.1195,
"step": 145
},
{
"epoch": 0.9329073482428115,
"grad_norm": 2.0672229679280503,
"learning_rate": 8.721543888039534e-06,
"loss": 1.1288,
"step": 146
},
{
"epoch": 0.939297124600639,
"grad_norm": 2.0223211301618003,
"learning_rate": 8.69652283653294e-06,
"loss": 1.2964,
"step": 147
},
{
"epoch": 0.9456869009584664,
"grad_norm": 2.288439921668011,
"learning_rate": 8.671295946372989e-06,
"loss": 1.0529,
"step": 148
},
{
"epoch": 0.952076677316294,
"grad_norm": 2.094215089439829,
"learning_rate": 8.6458646223039e-06,
"loss": 1.1668,
"step": 149
},
{
"epoch": 0.9584664536741214,
"grad_norm": 2.036960202702411,
"learning_rate": 8.620230280453672e-06,
"loss": 1.1601,
"step": 150
},
{
"epoch": 0.9648562300319489,
"grad_norm": 2.308069594448892,
"learning_rate": 8.594394348255239e-06,
"loss": 1.2524,
"step": 151
},
{
"epoch": 0.9712460063897763,
"grad_norm": 2.073184647781397,
"learning_rate": 8.568358264366958e-06,
"loss": 1.3667,
"step": 152
},
{
"epoch": 0.9776357827476039,
"grad_norm": 2.6124688635819275,
"learning_rate": 8.542123478592518e-06,
"loss": 1.0973,
"step": 153
},
{
"epoch": 0.9840255591054313,
"grad_norm": 2.023174608703274,
"learning_rate": 8.515691451800206e-06,
"loss": 1.2798,
"step": 154
},
{
"epoch": 0.9904153354632588,
"grad_norm": 2.1839751935320963,
"learning_rate": 8.489063655841552e-06,
"loss": 1.1375,
"step": 155
},
{
"epoch": 0.9968051118210862,
"grad_norm": 2.114621211140617,
"learning_rate": 8.462241573469378e-06,
"loss": 0.9936,
"step": 156
},
{
"epoch": 1.0,
"grad_norm": 2.114621211140617,
"learning_rate": 8.435226698255228e-06,
"loss": 1.137,
"step": 157
},
{
"epoch": 1.0063897763578276,
"grad_norm": 3.4867497812189288,
"learning_rate": 8.408020534506195e-06,
"loss": 0.8376,
"step": 158
},
{
"epoch": 1.012779552715655,
"grad_norm": 2.5440510855222676,
"learning_rate": 8.380624597181165e-06,
"loss": 0.8312,
"step": 159
},
{
"epoch": 1.0191693290734825,
"grad_norm": 2.2596612524017186,
"learning_rate": 8.353040411806449e-06,
"loss": 0.7729,
"step": 160
},
{
"epoch": 1.0255591054313098,
"grad_norm": 1.8463551184455353,
"learning_rate": 8.325269514390835e-06,
"loss": 0.8626,
"step": 161
},
{
"epoch": 1.0319488817891374,
"grad_norm": 2.0882217693894813,
"learning_rate": 8.297313451340064e-06,
"loss": 0.7676,
"step": 162
},
{
"epoch": 1.038338658146965,
"grad_norm": 2.0883744868398226,
"learning_rate": 8.269173779370712e-06,
"loss": 0.9419,
"step": 163
},
{
"epoch": 1.0447284345047922,
"grad_norm": 1.7943635404247467,
"learning_rate": 8.240852065423507e-06,
"loss": 0.7972,
"step": 164
},
{
"epoch": 1.0511182108626198,
"grad_norm": 1.9220993590643272,
"learning_rate": 8.21234988657607e-06,
"loss": 0.8412,
"step": 165
},
{
"epoch": 1.0575079872204474,
"grad_norm": 1.8676951285327783,
"learning_rate": 8.183668829955111e-06,
"loss": 0.8471,
"step": 166
},
{
"epoch": 1.0638977635782747,
"grad_norm": 1.9115531535231205,
"learning_rate": 8.154810492648038e-06,
"loss": 0.8021,
"step": 167
},
{
"epoch": 1.0702875399361023,
"grad_norm": 1.732608310176797,
"learning_rate": 8.125776481614025e-06,
"loss": 0.6606,
"step": 168
},
{
"epoch": 1.0766773162939298,
"grad_norm": 1.9131580562412362,
"learning_rate": 8.096568413594533e-06,
"loss": 0.7635,
"step": 169
},
{
"epoch": 1.0830670926517572,
"grad_norm": 1.8959582550313192,
"learning_rate": 8.067187915023283e-06,
"loss": 0.6924,
"step": 170
},
{
"epoch": 1.0894568690095847,
"grad_norm": 2.162226106303363,
"learning_rate": 8.037636621935686e-06,
"loss": 0.7722,
"step": 171
},
{
"epoch": 1.095846645367412,
"grad_norm": 2.387269781570661,
"learning_rate": 8.007916179877742e-06,
"loss": 0.7872,
"step": 172
},
{
"epoch": 1.1022364217252396,
"grad_norm": 1.8369220466875913,
"learning_rate": 7.978028243814416e-06,
"loss": 0.7746,
"step": 173
},
{
"epoch": 1.1086261980830672,
"grad_norm": 2.3362355498895853,
"learning_rate": 7.947974478037468e-06,
"loss": 0.7845,
"step": 174
},
{
"epoch": 1.1150159744408945,
"grad_norm": 2.0503755528528553,
"learning_rate": 7.917756556072792e-06,
"loss": 0.7205,
"step": 175
},
{
"epoch": 1.121405750798722,
"grad_norm": 2.165347300861018,
"learning_rate": 7.887376160587214e-06,
"loss": 0.8152,
"step": 176
},
{
"epoch": 1.1277955271565494,
"grad_norm": 2.2635558547519747,
"learning_rate": 7.85683498329481e-06,
"loss": 0.6753,
"step": 177
},
{
"epoch": 1.134185303514377,
"grad_norm": 1.9110658352060157,
"learning_rate": 7.826134724862687e-06,
"loss": 0.825,
"step": 178
},
{
"epoch": 1.1405750798722045,
"grad_norm": 2.2263812309151825,
"learning_rate": 7.795277094816292e-06,
"loss": 0.8166,
"step": 179
},
{
"epoch": 1.1469648562300319,
"grad_norm": 1.8575499012395842,
"learning_rate": 7.764263811444214e-06,
"loss": 0.831,
"step": 180
},
{
"epoch": 1.1533546325878594,
"grad_norm": 2.360397566704423,
"learning_rate": 7.733096601702508e-06,
"loss": 0.8137,
"step": 181
},
{
"epoch": 1.159744408945687,
"grad_norm": 2.2444203392924895,
"learning_rate": 7.70177720111852e-06,
"loss": 0.6924,
"step": 182
},
{
"epoch": 1.1661341853035143,
"grad_norm": 1.6246406063388128,
"learning_rate": 7.67030735369426e-06,
"loss": 0.7463,
"step": 183
},
{
"epoch": 1.1725239616613419,
"grad_norm": 2.173100756669063,
"learning_rate": 7.638688811809274e-06,
"loss": 0.7659,
"step": 184
},
{
"epoch": 1.1789137380191694,
"grad_norm": 2.0758535951181325,
"learning_rate": 7.6069233361230696e-06,
"loss": 0.8334,
"step": 185
},
{
"epoch": 1.1853035143769968,
"grad_norm": 2.0584161403900088,
"learning_rate": 7.575012695477076e-06,
"loss": 0.7935,
"step": 186
},
{
"epoch": 1.1916932907348243,
"grad_norm": 2.1564370807060524,
"learning_rate": 7.542958666796149e-06,
"loss": 0.7672,
"step": 187
},
{
"epoch": 1.1980830670926517,
"grad_norm": 1.8307453253127797,
"learning_rate": 7.510763034989616e-06,
"loss": 0.6783,
"step": 188
},
{
"epoch": 1.2044728434504792,
"grad_norm": 1.8290500329133041,
"learning_rate": 7.478427592851894e-06,
"loss": 0.6824,
"step": 189
},
{
"epoch": 1.2108626198083068,
"grad_norm": 2.0174580703448037,
"learning_rate": 7.44595414096265e-06,
"loss": 0.7652,
"step": 190
},
{
"epoch": 1.2172523961661341,
"grad_norm": 1.9899638513663098,
"learning_rate": 7.413344487586542e-06,
"loss": 0.7497,
"step": 191
},
{
"epoch": 1.2236421725239617,
"grad_norm": 2.4723485730809243,
"learning_rate": 7.380600448572532e-06,
"loss": 0.8049,
"step": 192
},
{
"epoch": 1.230031948881789,
"grad_norm": 2.034861515675997,
"learning_rate": 7.347723847252756e-06,
"loss": 0.8709,
"step": 193
},
{
"epoch": 1.2364217252396166,
"grad_norm": 2.138975439198192,
"learning_rate": 7.314716514341007e-06,
"loss": 0.6953,
"step": 194
},
{
"epoch": 1.2428115015974441,
"grad_norm": 1.9632002426077662,
"learning_rate": 7.28158028783079e-06,
"loss": 0.7643,
"step": 195
},
{
"epoch": 1.2492012779552715,
"grad_norm": 2.1518741495487625,
"learning_rate": 7.248317012892969e-06,
"loss": 0.7793,
"step": 196
},
{
"epoch": 1.255591054313099,
"grad_norm": 2.449964832983734,
"learning_rate": 7.214928541773027e-06,
"loss": 0.7526,
"step": 197
},
{
"epoch": 1.2619808306709266,
"grad_norm": 2.069692679407715,
"learning_rate": 7.1814167336879195e-06,
"loss": 0.6993,
"step": 198
},
{
"epoch": 1.268370607028754,
"grad_norm": 2.1802916449943734,
"learning_rate": 7.147783454722545e-06,
"loss": 0.5996,
"step": 199
},
{
"epoch": 1.2747603833865815,
"grad_norm": 2.1168439959277086,
"learning_rate": 7.1140305777258355e-06,
"loss": 0.8294,
"step": 200
},
{
"epoch": 1.281150159744409,
"grad_norm": 1.8184694551749518,
"learning_rate": 7.080159982206471e-06,
"loss": 0.6787,
"step": 201
},
{
"epoch": 1.2875399361022364,
"grad_norm": 1.902418537487257,
"learning_rate": 7.046173554228213e-06,
"loss": 0.8076,
"step": 202
},
{
"epoch": 1.293929712460064,
"grad_norm": 1.9892137836089137,
"learning_rate": 7.012073186304885e-06,
"loss": 0.8541,
"step": 203
},
{
"epoch": 1.3003194888178915,
"grad_norm": 2.3335747045667903,
"learning_rate": 6.9778607772949894e-06,
"loss": 0.7712,
"step": 204
},
{
"epoch": 1.3067092651757188,
"grad_norm": 1.983789823335996,
"learning_rate": 6.943538232295965e-06,
"loss": 0.7885,
"step": 205
},
{
"epoch": 1.3130990415335464,
"grad_norm": 2.149089239306387,
"learning_rate": 6.909107462538113e-06,
"loss": 0.786,
"step": 206
},
{
"epoch": 1.3194888178913737,
"grad_norm": 2.3740320043022356,
"learning_rate": 6.874570385278161e-06,
"loss": 0.6849,
"step": 207
},
{
"epoch": 1.3258785942492013,
"grad_norm": 2.0274327595584727,
"learning_rate": 6.839928923692505e-06,
"loss": 0.6331,
"step": 208
},
{
"epoch": 1.3322683706070286,
"grad_norm": 1.9834070538746025,
"learning_rate": 6.805185006770125e-06,
"loss": 0.7026,
"step": 209
},
{
"epoch": 1.3386581469648562,
"grad_norm": 1.8332634885192054,
"learning_rate": 6.7703405692051585e-06,
"loss": 0.7521,
"step": 210
},
{
"epoch": 1.3450479233226837,
"grad_norm": 2.3224931980911303,
"learning_rate": 6.735397551289179e-06,
"loss": 0.8352,
"step": 211
},
{
"epoch": 1.351437699680511,
"grad_norm": 2.5988749953217383,
"learning_rate": 6.700357898803146e-06,
"loss": 0.7261,
"step": 212
},
{
"epoch": 1.3578274760383386,
"grad_norm": 2.0025941890434518,
"learning_rate": 6.665223562909058e-06,
"loss": 0.752,
"step": 213
},
{
"epoch": 1.3642172523961662,
"grad_norm": 1.8512272814617676,
"learning_rate": 6.629996500041299e-06,
"loss": 0.7175,
"step": 214
},
{
"epoch": 1.3706070287539935,
"grad_norm": 2.221397828681688,
"learning_rate": 6.5946786717977026e-06,
"loss": 0.7518,
"step": 215
},
{
"epoch": 1.376996805111821,
"grad_norm": 2.0249123351492155,
"learning_rate": 6.5592720448303174e-06,
"loss": 0.8235,
"step": 216
},
{
"epoch": 1.3833865814696487,
"grad_norm": 2.18979667251631,
"learning_rate": 6.523778590735892e-06,
"loss": 0.9315,
"step": 217
},
{
"epoch": 1.389776357827476,
"grad_norm": 1.8960015883246881,
"learning_rate": 6.488200285946094e-06,
"loss": 0.7076,
"step": 218
},
{
"epoch": 1.3961661341853036,
"grad_norm": 1.8352798094679064,
"learning_rate": 6.452539111617454e-06,
"loss": 0.7501,
"step": 219
},
{
"epoch": 1.4025559105431311,
"grad_norm": 2.097097886969805,
"learning_rate": 6.416797053521039e-06,
"loss": 0.7881,
"step": 220
},
{
"epoch": 1.4089456869009584,
"grad_norm": 1.9059659683647745,
"learning_rate": 6.380976101931879e-06,
"loss": 0.7922,
"step": 221
},
{
"epoch": 1.415335463258786,
"grad_norm": 2.1954958084121965,
"learning_rate": 6.345078251518144e-06,
"loss": 0.8997,
"step": 222
},
{
"epoch": 1.4217252396166133,
"grad_norm": 1.966127388072899,
"learning_rate": 6.3091055012300675e-06,
"loss": 0.7675,
"step": 223
},
{
"epoch": 1.428115015974441,
"grad_norm": 2.3361083036828094,
"learning_rate": 6.273059854188636e-06,
"loss": 0.8397,
"step": 224
},
{
"epoch": 1.4345047923322682,
"grad_norm": 2.139956577956786,
"learning_rate": 6.236943317574054e-06,
"loss": 0.7714,
"step": 225
},
{
"epoch": 1.4408945686900958,
"grad_norm": 1.9866600216210257,
"learning_rate": 6.200757902513962e-06,
"loss": 0.8059,
"step": 226
},
{
"epoch": 1.4472843450479234,
"grad_norm": 2.259805416277339,
"learning_rate": 6.164505623971458e-06,
"loss": 0.802,
"step": 227
},
{
"epoch": 1.4536741214057507,
"grad_norm": 1.8782576150494317,
"learning_rate": 6.128188500632892e-06,
"loss": 0.7065,
"step": 228
},
{
"epoch": 1.4600638977635783,
"grad_norm": 2.1548044609256314,
"learning_rate": 6.091808554795462e-06,
"loss": 0.853,
"step": 229
},
{
"epoch": 1.4664536741214058,
"grad_norm": 2.1364715239439978,
"learning_rate": 6.055367812254592e-06,
"loss": 0.8676,
"step": 230
},
{
"epoch": 1.4728434504792332,
"grad_norm": 1.9045652390431531,
"learning_rate": 6.0188683021911394e-06,
"loss": 0.8338,
"step": 231
},
{
"epoch": 1.4792332268370607,
"grad_norm": 2.10885335410803,
"learning_rate": 5.982312057058392e-06,
"loss": 0.829,
"step": 232
},
{
"epoch": 1.4856230031948883,
"grad_norm": 2.1881709571010486,
"learning_rate": 5.9457011124689025e-06,
"loss": 0.9104,
"step": 233
},
{
"epoch": 1.4920127795527156,
"grad_norm": 2.3768700262231093,
"learning_rate": 5.9090375070811215e-06,
"loss": 0.6876,
"step": 234
},
{
"epoch": 1.4984025559105432,
"grad_norm": 2.261550292871675,
"learning_rate": 5.872323282485889e-06,
"loss": 0.7453,
"step": 235
},
{
"epoch": 1.5047923322683707,
"grad_norm": 1.9988487840521534,
"learning_rate": 5.835560483092743e-06,
"loss": 0.8449,
"step": 236
},
{
"epoch": 1.511182108626198,
"grad_norm": 2.0224359380963652,
"learning_rate": 5.798751156016085e-06,
"loss": 0.8223,
"step": 237
},
{
"epoch": 1.5175718849840254,
"grad_norm": 2.062039968011777,
"learning_rate": 5.7618973509611755e-06,
"loss": 0.8195,
"step": 238
},
{
"epoch": 1.5239616613418532,
"grad_norm": 1.6157234928139612,
"learning_rate": 5.72500112011001e-06,
"loss": 0.7592,
"step": 239
},
{
"epoch": 1.5303514376996805,
"grad_norm": 1.9080273993370855,
"learning_rate": 5.688064518007036e-06,
"loss": 0.7381,
"step": 240
},
{
"epoch": 1.5367412140575079,
"grad_norm": 2.2625716403086344,
"learning_rate": 5.651089601444752e-06,
"loss": 0.8195,
"step": 241
},
{
"epoch": 1.5431309904153354,
"grad_norm": 2.0895666610527126,
"learning_rate": 5.614078429349172e-06,
"loss": 0.7222,
"step": 242
},
{
"epoch": 1.549520766773163,
"grad_norm": 1.99029735584039,
"learning_rate": 5.577033062665179e-06,
"loss": 0.838,
"step": 243
},
{
"epoch": 1.5559105431309903,
"grad_norm": 2.1194831420040985,
"learning_rate": 5.53995556424176e-06,
"loss": 0.707,
"step": 244
},
{
"epoch": 1.5623003194888179,
"grad_norm": 2.3137301949010984,
"learning_rate": 5.50284799871714e-06,
"loss": 0.8003,
"step": 245
},
{
"epoch": 1.5686900958466454,
"grad_norm": 2.2464503074012674,
"learning_rate": 5.465712432403812e-06,
"loss": 0.8576,
"step": 246
},
{
"epoch": 1.5750798722044728,
"grad_norm": 2.320906889466704,
"learning_rate": 5.428550933173476e-06,
"loss": 0.916,
"step": 247
},
{
"epoch": 1.5814696485623003,
"grad_norm": 2.099438841370686,
"learning_rate": 5.391365570341893e-06,
"loss": 0.7929,
"step": 248
},
{
"epoch": 1.5878594249201279,
"grad_norm": 1.859084494736742,
"learning_rate": 5.3541584145536475e-06,
"loss": 0.6533,
"step": 249
},
{
"epoch": 1.5942492012779552,
"grad_norm": 2.278505423549853,
"learning_rate": 5.3169315376668566e-06,
"loss": 0.7552,
"step": 250
},
{
"epoch": 1.6006389776357828,
"grad_norm": 1.6860670526764372,
"learning_rate": 5.279687012637798e-06,
"loss": 0.8399,
"step": 251
},
{
"epoch": 1.6070287539936103,
"grad_norm": 2.335897193410633,
"learning_rate": 5.242426913405471e-06,
"loss": 0.7687,
"step": 252
},
{
"epoch": 1.6134185303514377,
"grad_norm": 2.060267209744552,
"learning_rate": 5.2051533147761155e-06,
"loss": 0.7508,
"step": 253
},
{
"epoch": 1.619808306709265,
"grad_norm": 2.1497401104875373,
"learning_rate": 5.167868292307679e-06,
"loss": 0.7592,
"step": 254
},
{
"epoch": 1.6261980830670928,
"grad_norm": 1.7903715915769907,
"learning_rate": 5.130573922194236e-06,
"loss": 0.801,
"step": 255
},
{
"epoch": 1.6325878594249201,
"grad_norm": 1.994091783861481,
"learning_rate": 5.093272281150383e-06,
"loss": 0.7861,
"step": 256
},
{
"epoch": 1.6389776357827475,
"grad_norm": 2.131683372343815,
"learning_rate": 5.05596544629559e-06,
"loss": 0.8001,
"step": 257
},
{
"epoch": 1.645367412140575,
"grad_norm": 2.1302879758896855,
"learning_rate": 5.018655495038542e-06,
"loss": 0.8043,
"step": 258
},
{
"epoch": 1.6517571884984026,
"grad_norm": 1.8253111170736016,
"learning_rate": 4.981344504961459e-06,
"loss": 0.8891,
"step": 259
},
{
"epoch": 1.65814696485623,
"grad_norm": 1.9749425046329303,
"learning_rate": 4.944034553704412e-06,
"loss": 0.8087,
"step": 260
},
{
"epoch": 1.6645367412140575,
"grad_norm": 2.193811659404884,
"learning_rate": 4.906727718849619e-06,
"loss": 0.7552,
"step": 261
},
{
"epoch": 1.670926517571885,
"grad_norm": 2.1765708924053575,
"learning_rate": 4.8694260778057655e-06,
"loss": 0.7897,
"step": 262
},
{
"epoch": 1.6773162939297124,
"grad_norm": 2.0608183940458398,
"learning_rate": 4.832131707692322e-06,
"loss": 0.7122,
"step": 263
},
{
"epoch": 1.68370607028754,
"grad_norm": 2.1978305930011555,
"learning_rate": 4.7948466852238844e-06,
"loss": 0.8197,
"step": 264
},
{
"epoch": 1.6900958466453675,
"grad_norm": 2.299856637633407,
"learning_rate": 4.757573086594529e-06,
"loss": 0.9011,
"step": 265
},
{
"epoch": 1.6964856230031948,
"grad_norm": 2.1505800913123707,
"learning_rate": 4.720312987362204e-06,
"loss": 0.784,
"step": 266
},
{
"epoch": 1.7028753993610224,
"grad_norm": 2.1562863338865714,
"learning_rate": 4.683068462333144e-06,
"loss": 0.8057,
"step": 267
},
{
"epoch": 1.70926517571885,
"grad_norm": 2.284446615571358,
"learning_rate": 4.645841585446356e-06,
"loss": 0.8029,
"step": 268
},
{
"epoch": 1.7156549520766773,
"grad_norm": 2.0510265573141413,
"learning_rate": 4.6086344296581095e-06,
"loss": 0.7218,
"step": 269
},
{
"epoch": 1.7220447284345048,
"grad_norm": 2.1687340523353353,
"learning_rate": 4.5714490668265245e-06,
"loss": 0.9969,
"step": 270
},
{
"epoch": 1.7284345047923324,
"grad_norm": 2.0736960494924923,
"learning_rate": 4.534287567596189e-06,
"loss": 0.85,
"step": 271
},
{
"epoch": 1.7348242811501597,
"grad_norm": 1.9770333120267458,
"learning_rate": 4.497152001282861e-06,
"loss": 0.8745,
"step": 272
},
{
"epoch": 1.741214057507987,
"grad_norm": 2.084503899334248,
"learning_rate": 4.460044435758241e-06,
"loss": 0.788,
"step": 273
},
{
"epoch": 1.7476038338658149,
"grad_norm": 2.0686608595640084,
"learning_rate": 4.4229669373348225e-06,
"loss": 0.7217,
"step": 274
},
{
"epoch": 1.7539936102236422,
"grad_norm": 1.8726775800589226,
"learning_rate": 4.3859215706508295e-06,
"loss": 0.9332,
"step": 275
},
{
"epoch": 1.7603833865814695,
"grad_norm": 2.321014760523366,
"learning_rate": 4.348910398555249e-06,
"loss": 0.848,
"step": 276
},
{
"epoch": 1.766773162939297,
"grad_norm": 2.1582714047829903,
"learning_rate": 4.311935481992965e-06,
"loss": 0.6687,
"step": 277
},
{
"epoch": 1.7731629392971247,
"grad_norm": 2.0830550177571796,
"learning_rate": 4.274998879889991e-06,
"loss": 0.7575,
"step": 278
},
{
"epoch": 1.779552715654952,
"grad_norm": 2.1028578941021845,
"learning_rate": 4.238102649038825e-06,
"loss": 0.7409,
"step": 279
},
{
"epoch": 1.7859424920127795,
"grad_norm": 1.8878312811111115,
"learning_rate": 4.2012488439839185e-06,
"loss": 0.8274,
"step": 280
},
{
"epoch": 1.792332268370607,
"grad_norm": 2.291366102734256,
"learning_rate": 4.164439516907258e-06,
"loss": 0.7611,
"step": 281
},
{
"epoch": 1.7987220447284344,
"grad_norm": 1.916507380045079,
"learning_rate": 4.127676717514114e-06,
"loss": 0.5988,
"step": 282
},
{
"epoch": 1.805111821086262,
"grad_norm": 2.2720293658938027,
"learning_rate": 4.090962492918881e-06,
"loss": 0.8268,
"step": 283
},
{
"epoch": 1.8115015974440896,
"grad_norm": 1.9518349825587402,
"learning_rate": 4.054298887531099e-06,
"loss": 0.7334,
"step": 284
},
{
"epoch": 1.817891373801917,
"grad_norm": 2.140363550906506,
"learning_rate": 4.017687942941609e-06,
"loss": 0.7335,
"step": 285
},
{
"epoch": 1.8242811501597445,
"grad_norm": 1.9883426651308305,
"learning_rate": 3.981131697808862e-06,
"loss": 0.828,
"step": 286
},
{
"epoch": 1.830670926517572,
"grad_norm": 1.951493400947361,
"learning_rate": 3.94463218774541e-06,
"loss": 0.6701,
"step": 287
},
{
"epoch": 1.8370607028753994,
"grad_norm": 1.714247520762039,
"learning_rate": 3.90819144520454e-06,
"loss": 0.7093,
"step": 288
},
{
"epoch": 1.8434504792332267,
"grad_norm": 2.0546468817069505,
"learning_rate": 3.8718114993671086e-06,
"loss": 0.777,
"step": 289
},
{
"epoch": 1.8498402555910545,
"grad_norm": 2.0974753085677222,
"learning_rate": 3.835494376028544e-06,
"loss": 0.7093,
"step": 290
},
{
"epoch": 1.8562300319488818,
"grad_norm": 2.286892348662737,
"learning_rate": 3.799242097486038e-06,
"loss": 0.8641,
"step": 291
},
{
"epoch": 1.8626198083067091,
"grad_norm": 2.1470027841795614,
"learning_rate": 3.7630566824259456e-06,
"loss": 0.7596,
"step": 292
},
{
"epoch": 1.8690095846645367,
"grad_norm": 2.1876379291519887,
"learning_rate": 3.726940145811363e-06,
"loss": 0.9124,
"step": 293
},
{
"epoch": 1.8753993610223643,
"grad_norm": 2.073070287437658,
"learning_rate": 3.6908944987699346e-06,
"loss": 0.7598,
"step": 294
},
{
"epoch": 1.8817891373801916,
"grad_norm": 2.1569863741791466,
"learning_rate": 3.6549217484818576e-06,
"loss": 0.7232,
"step": 295
},
{
"epoch": 1.8881789137380192,
"grad_norm": 2.041941562515545,
"learning_rate": 3.6190238980681235e-06,
"loss": 0.6949,
"step": 296
},
{
"epoch": 1.8945686900958467,
"grad_norm": 2.30271960588349,
"learning_rate": 3.583202946478963e-06,
"loss": 0.8439,
"step": 297
},
{
"epoch": 1.900958466453674,
"grad_norm": 2.0264437896681207,
"learning_rate": 3.5474608883825475e-06,
"loss": 0.7475,
"step": 298
},
{
"epoch": 1.9073482428115016,
"grad_norm": 1.9457581984726628,
"learning_rate": 3.5117997140539073e-06,
"loss": 0.7066,
"step": 299
},
{
"epoch": 1.9137380191693292,
"grad_norm": 1.9462959642151252,
"learning_rate": 3.47622140926411e-06,
"loss": 0.7308,
"step": 300
},
{
"epoch": 1.9201277955271565,
"grad_norm": 2.092610478552592,
"learning_rate": 3.4407279551696846e-06,
"loss": 0.7872,
"step": 301
},
{
"epoch": 1.926517571884984,
"grad_norm": 1.8472874883400652,
"learning_rate": 3.4053213282022983e-06,
"loss": 0.7524,
"step": 302
},
{
"epoch": 1.9329073482428116,
"grad_norm": 1.901162640597834,
"learning_rate": 3.370003499958703e-06,
"loss": 0.7164,
"step": 303
},
{
"epoch": 1.939297124600639,
"grad_norm": 1.8688778106140063,
"learning_rate": 3.334776437090944e-06,
"loss": 0.7011,
"step": 304
},
{
"epoch": 1.9456869009584663,
"grad_norm": 2.356897701654345,
"learning_rate": 3.2996421011968546e-06,
"loss": 0.6435,
"step": 305
},
{
"epoch": 1.952076677316294,
"grad_norm": 2.523583795757241,
"learning_rate": 3.264602448710822e-06,
"loss": 0.76,
"step": 306
},
{
"epoch": 1.9584664536741214,
"grad_norm": 1.9574404195601653,
"learning_rate": 3.2296594307948428e-06,
"loss": 0.8161,
"step": 307
},
{
"epoch": 1.9648562300319488,
"grad_norm": 2.15492898863906,
"learning_rate": 3.194814993229878e-06,
"loss": 0.7746,
"step": 308
},
{
"epoch": 1.9712460063897763,
"grad_norm": 2.010578131919025,
"learning_rate": 3.1600710763074972e-06,
"loss": 0.8067,
"step": 309
},
{
"epoch": 1.9776357827476039,
"grad_norm": 1.991937348455583,
"learning_rate": 3.125429614721842e-06,
"loss": 0.6662,
"step": 310
},
{
"epoch": 1.9840255591054312,
"grad_norm": 2.124227953910074,
"learning_rate": 3.090892537461889e-06,
"loss": 0.8373,
"step": 311
},
{
"epoch": 1.9904153354632588,
"grad_norm": 1.9267525903621767,
"learning_rate": 3.056461767704037e-06,
"loss": 0.7735,
"step": 312
},
{
"epoch": 1.9968051118210863,
"grad_norm": 2.176290715664279,
"learning_rate": 3.0221392227050126e-06,
"loss": 0.83,
"step": 313
},
{
"epoch": 2.0,
"grad_norm": 3.1455065522489942,
"learning_rate": 2.9879268136951163e-06,
"loss": 0.6453,
"step": 314
},
{
"epoch": 2.0063897763578273,
"grad_norm": 3.1822765413878225,
"learning_rate": 2.953826445771788e-06,
"loss": 0.437,
"step": 315
},
{
"epoch": 2.012779552715655,
"grad_norm": 3.366770013317489,
"learning_rate": 2.9198400177935303e-06,
"loss": 0.4245,
"step": 316
},
{
"epoch": 2.0191693290734825,
"grad_norm": 3.26466588175627,
"learning_rate": 2.8859694222741653e-06,
"loss": 0.4096,
"step": 317
},
{
"epoch": 2.02555910543131,
"grad_norm": 3.1916614763418494,
"learning_rate": 2.852216545277456e-06,
"loss": 0.5049,
"step": 318
},
{
"epoch": 2.0319488817891376,
"grad_norm": 2.4873488239851227,
"learning_rate": 2.8185832663120817e-06,
"loss": 0.491,
"step": 319
},
{
"epoch": 2.038338658146965,
"grad_norm": 2.4295793465904474,
"learning_rate": 2.785071458226972e-06,
"loss": 0.4575,
"step": 320
},
{
"epoch": 2.0447284345047922,
"grad_norm": 2.264908078075383,
"learning_rate": 2.7516829871070295e-06,
"loss": 0.5299,
"step": 321
},
{
"epoch": 2.0511182108626196,
"grad_norm": 2.115450964357911,
"learning_rate": 2.718419712169213e-06,
"loss": 0.5705,
"step": 322
},
{
"epoch": 2.0575079872204474,
"grad_norm": 2.1328168895257384,
"learning_rate": 2.685283485658995e-06,
"loss": 0.4552,
"step": 323
},
{
"epoch": 2.0638977635782747,
"grad_norm": 2.522885900225875,
"learning_rate": 2.6522761527472464e-06,
"loss": 0.423,
"step": 324
},
{
"epoch": 2.070287539936102,
"grad_norm": 3.677486298025955,
"learning_rate": 2.6193995514274705e-06,
"loss": 0.5114,
"step": 325
},
{
"epoch": 2.07667731629393,
"grad_norm": 3.147438831845241,
"learning_rate": 2.586655512413458e-06,
"loss": 0.5074,
"step": 326
},
{
"epoch": 2.083067092651757,
"grad_norm": 3.454909400135248,
"learning_rate": 2.554045859037353e-06,
"loss": 0.5319,
"step": 327
},
{
"epoch": 2.0894568690095845,
"grad_norm": 3.2027788217049293,
"learning_rate": 2.521572407148107e-06,
"loss": 0.4018,
"step": 328
},
{
"epoch": 2.0958466453674123,
"grad_norm": 2.82713510067431,
"learning_rate": 2.4892369650103837e-06,
"loss": 0.4096,
"step": 329
},
{
"epoch": 2.1022364217252396,
"grad_norm": 3.3775502635423846,
"learning_rate": 2.4570413332038523e-06,
"loss": 0.4084,
"step": 330
},
{
"epoch": 2.108626198083067,
"grad_norm": 2.5912870135939854,
"learning_rate": 2.4249873045229244e-06,
"loss": 0.5413,
"step": 331
},
{
"epoch": 2.1150159744408947,
"grad_norm": 2.6901578233118477,
"learning_rate": 2.3930766638769325e-06,
"loss": 0.3965,
"step": 332
},
{
"epoch": 2.121405750798722,
"grad_norm": 2.2464354768206967,
"learning_rate": 2.3613111881907273e-06,
"loss": 0.4243,
"step": 333
},
{
"epoch": 2.1277955271565494,
"grad_norm": 2.3073725414205857,
"learning_rate": 2.3296926463057396e-06,
"loss": 0.3954,
"step": 334
},
{
"epoch": 2.134185303514377,
"grad_norm": 2.317628398664997,
"learning_rate": 2.29822279888148e-06,
"loss": 0.4155,
"step": 335
},
{
"epoch": 2.1405750798722045,
"grad_norm": 1.9196452650507594,
"learning_rate": 2.2669033982974946e-06,
"loss": 0.3985,
"step": 336
},
{
"epoch": 2.146964856230032,
"grad_norm": 2.5155514594882864,
"learning_rate": 2.235736188555787e-06,
"loss": 0.3367,
"step": 337
},
{
"epoch": 2.1533546325878596,
"grad_norm": 2.2857947303762924,
"learning_rate": 2.2047229051837107e-06,
"loss": 0.4553,
"step": 338
},
{
"epoch": 2.159744408945687,
"grad_norm": 2.1456084476870556,
"learning_rate": 2.173865275137314e-06,
"loss": 0.4547,
"step": 339
},
{
"epoch": 2.1661341853035143,
"grad_norm": 2.240945856540863,
"learning_rate": 2.143165016705192e-06,
"loss": 0.4202,
"step": 340
},
{
"epoch": 2.1725239616613417,
"grad_norm": 1.9832713189435116,
"learning_rate": 2.1126238394127868e-06,
"loss": 0.4712,
"step": 341
},
{
"epoch": 2.1789137380191694,
"grad_norm": 1.9446053940500647,
"learning_rate": 2.082243443927212e-06,
"loss": 0.4358,
"step": 342
},
{
"epoch": 2.1853035143769968,
"grad_norm": 1.9350192214446225,
"learning_rate": 2.052025521962534e-06,
"loss": 0.5005,
"step": 343
},
{
"epoch": 2.191693290734824,
"grad_norm": 2.1868911275031033,
"learning_rate": 2.0219717561855857e-06,
"loss": 0.5543,
"step": 344
},
{
"epoch": 2.198083067092652,
"grad_norm": 2.221575157438669,
"learning_rate": 1.992083820122259e-06,
"loss": 0.3309,
"step": 345
},
{
"epoch": 2.2044728434504792,
"grad_norm": 2.144845801717813,
"learning_rate": 1.962363378064316e-06,
"loss": 0.3916,
"step": 346
},
{
"epoch": 2.2108626198083066,
"grad_norm": 2.11978548855504,
"learning_rate": 1.9328120849767198e-06,
"loss": 0.4164,
"step": 347
},
{
"epoch": 2.2172523961661343,
"grad_norm": 2.0385952191232155,
"learning_rate": 1.9034315864054682e-06,
"loss": 0.3998,
"step": 348
},
{
"epoch": 2.2236421725239617,
"grad_norm": 2.0754621683125207,
"learning_rate": 1.8742235183859747e-06,
"loss": 0.4357,
"step": 349
},
{
"epoch": 2.230031948881789,
"grad_norm": 2.0029050959618995,
"learning_rate": 1.8451895073519643e-06,
"loss": 0.3365,
"step": 350
},
{
"epoch": 2.236421725239617,
"grad_norm": 2.57807232565894,
"learning_rate": 1.8163311700448899e-06,
"loss": 0.3779,
"step": 351
},
{
"epoch": 2.242811501597444,
"grad_norm": 2.0445037247222464,
"learning_rate": 1.7876501134239316e-06,
"loss": 0.4552,
"step": 352
},
{
"epoch": 2.2492012779552715,
"grad_norm": 2.450940653052245,
"learning_rate": 1.7591479345764972e-06,
"loss": 0.4706,
"step": 353
},
{
"epoch": 2.255591054313099,
"grad_norm": 2.199833323100329,
"learning_rate": 1.7308262206292898e-06,
"loss": 0.4632,
"step": 354
},
{
"epoch": 2.2619808306709266,
"grad_norm": 2.653570092929433,
"learning_rate": 1.7026865486599375e-06,
"loss": 0.4422,
"step": 355
},
{
"epoch": 2.268370607028754,
"grad_norm": 2.3108018018174548,
"learning_rate": 1.6747304856091662e-06,
"loss": 0.4071,
"step": 356
},
{
"epoch": 2.2747603833865817,
"grad_norm": 2.2760801804733237,
"learning_rate": 1.6469595881935523e-06,
"loss": 0.4219,
"step": 357
},
{
"epoch": 2.281150159744409,
"grad_norm": 2.2663277836691185,
"learning_rate": 1.6193754028188363e-06,
"loss": 0.431,
"step": 358
},
{
"epoch": 2.2875399361022364,
"grad_norm": 2.1298943253664664,
"learning_rate": 1.591979465493806e-06,
"loss": 0.5151,
"step": 359
},
{
"epoch": 2.2939297124600637,
"grad_norm": 2.477752948045151,
"learning_rate": 1.5647733017447741e-06,
"loss": 0.5818,
"step": 360
},
{
"epoch": 2.3003194888178915,
"grad_norm": 2.219026126840176,
"learning_rate": 1.5377584265306222e-06,
"loss": 0.3346,
"step": 361
},
{
"epoch": 2.306709265175719,
"grad_norm": 2.0948183941547254,
"learning_rate": 1.510936344158448e-06,
"loss": 0.4208,
"step": 362
},
{
"epoch": 2.313099041533546,
"grad_norm": 2.4004015642072454,
"learning_rate": 1.484308548199796e-06,
"loss": 0.4439,
"step": 363
},
{
"epoch": 2.319488817891374,
"grad_norm": 2.0838816889076663,
"learning_rate": 1.4578765214074842e-06,
"loss": 0.3523,
"step": 364
},
{
"epoch": 2.3258785942492013,
"grad_norm": 2.429279030759271,
"learning_rate": 1.4316417356330441e-06,
"loss": 0.527,
"step": 365
},
{
"epoch": 2.3322683706070286,
"grad_norm": 2.2937409202550016,
"learning_rate": 1.4056056517447637e-06,
"loss": 0.4359,
"step": 366
},
{
"epoch": 2.3386581469648564,
"grad_norm": 2.335568744595556,
"learning_rate": 1.3797697195463278e-06,
"loss": 0.4426,
"step": 367
},
{
"epoch": 2.3450479233226837,
"grad_norm": 2.517760575107532,
"learning_rate": 1.3541353776961035e-06,
"loss": 0.4537,
"step": 368
},
{
"epoch": 2.351437699680511,
"grad_norm": 2.1836169125425493,
"learning_rate": 1.3287040536270135e-06,
"loss": 0.4155,
"step": 369
},
{
"epoch": 2.357827476038339,
"grad_norm": 2.206711961133346,
"learning_rate": 1.30347716346706e-06,
"loss": 0.5064,
"step": 370
},
{
"epoch": 2.364217252396166,
"grad_norm": 2.090805605064075,
"learning_rate": 1.2784561119604683e-06,
"loss": 0.3935,
"step": 371
},
{
"epoch": 2.3706070287539935,
"grad_norm": 2.245994580087271,
"learning_rate": 1.2536422923894565e-06,
"loss": 0.4418,
"step": 372
},
{
"epoch": 2.376996805111821,
"grad_norm": 1.7578744893397948,
"learning_rate": 1.2290370864966623e-06,
"loss": 0.3432,
"step": 373
},
{
"epoch": 2.3833865814696487,
"grad_norm": 2.0933614656607897,
"learning_rate": 1.2046418644081904e-06,
"loss": 0.3342,
"step": 374
},
{
"epoch": 2.389776357827476,
"grad_norm": 2.0808115537211695,
"learning_rate": 1.1804579845573288e-06,
"loss": 0.5301,
"step": 375
},
{
"epoch": 2.3961661341853033,
"grad_norm": 1.9927356398390765,
"learning_rate": 1.156486793608899e-06,
"loss": 0.4528,
"step": 376
},
{
"epoch": 2.402555910543131,
"grad_norm": 1.9488088695501393,
"learning_rate": 1.1327296263842653e-06,
"loss": 0.5109,
"step": 377
},
{
"epoch": 2.4089456869009584,
"grad_norm": 2.363258143968508,
"learning_rate": 1.1091878057870137e-06,
"loss": 0.4615,
"step": 378
},
{
"epoch": 2.415335463258786,
"grad_norm": 2.214690728832497,
"learning_rate": 1.0858626427292796e-06,
"loss": 0.3428,
"step": 379
},
{
"epoch": 2.4217252396166136,
"grad_norm": 2.4024816832009157,
"learning_rate": 1.0627554360587533e-06,
"loss": 0.4131,
"step": 380
},
{
"epoch": 2.428115015974441,
"grad_norm": 1.7023192512400525,
"learning_rate": 1.0398674724863584e-06,
"loss": 0.3013,
"step": 381
},
{
"epoch": 2.4345047923322682,
"grad_norm": 2.5554590548546408,
"learning_rate": 1.0172000265145938e-06,
"loss": 0.429,
"step": 382
},
{
"epoch": 2.440894568690096,
"grad_norm": 2.4406043120968146,
"learning_rate": 9.947543603665711e-07,
"loss": 0.4446,
"step": 383
},
{
"epoch": 2.4472843450479234,
"grad_norm": 2.355132900119845,
"learning_rate": 9.72531723915726e-07,
"loss": 0.4686,
"step": 384
},
{
"epoch": 2.4536741214057507,
"grad_norm": 2.009745480448959,
"learning_rate": 9.505333546162171e-07,
"loss": 0.4943,
"step": 385
},
{
"epoch": 2.460063897763578,
"grad_norm": 2.319627194265683,
"learning_rate": 9.287604774340236e-07,
"loss": 0.4987,
"step": 386
},
{
"epoch": 2.466453674121406,
"grad_norm": 2.434761883812788,
"learning_rate": 9.07214304778729e-07,
"loss": 0.4593,
"step": 387
},
{
"epoch": 2.472843450479233,
"grad_norm": 1.95171692263905,
"learning_rate": 8.858960364360142e-07,
"loss": 0.3701,
"step": 388
},
{
"epoch": 2.479233226837061,
"grad_norm": 2.2046486182188065,
"learning_rate": 8.648068595008458e-07,
"loss": 0.4211,
"step": 389
},
{
"epoch": 2.4856230031948883,
"grad_norm": 2.6042374898410787,
"learning_rate": 8.439479483113683e-07,
"loss": 0.4559,
"step": 390
},
{
"epoch": 2.4920127795527156,
"grad_norm": 2.0963323630173014,
"learning_rate": 8.233204643835235e-07,
"loss": 0.3564,
"step": 391
},
{
"epoch": 2.498402555910543,
"grad_norm": 2.4092444760160077,
"learning_rate": 8.029255563463589e-07,
"loss": 0.4075,
"step": 392
},
{
"epoch": 2.5047923322683707,
"grad_norm": 2.4649195547395064,
"learning_rate": 7.827643598780748e-07,
"loss": 0.4616,
"step": 393
},
{
"epoch": 2.511182108626198,
"grad_norm": 2.0786671044545595,
"learning_rate": 7.628379976427868e-07,
"loss": 0.4258,
"step": 394
},
{
"epoch": 2.5175718849840254,
"grad_norm": 2.346266411020929,
"learning_rate": 7.431475792280018e-07,
"loss": 0.4013,
"step": 395
},
{
"epoch": 2.523961661341853,
"grad_norm": 2.0883565799461197,
"learning_rate": 7.23694201082843e-07,
"loss": 0.4058,
"step": 396
},
{
"epoch": 2.5303514376996805,
"grad_norm": 1.998904610700326,
"learning_rate": 7.044789464569817e-07,
"loss": 0.3783,
"step": 397
},
{
"epoch": 2.536741214057508,
"grad_norm": 2.230046009669308,
"learning_rate": 6.855028853403295e-07,
"loss": 0.4122,
"step": 398
},
{
"epoch": 2.543130990415335,
"grad_norm": 1.9887522365789791,
"learning_rate": 6.667670744034498e-07,
"loss": 0.4234,
"step": 399
},
{
"epoch": 2.549520766773163,
"grad_norm": 2.577199407242772,
"learning_rate": 6.482725569387171e-07,
"loss": 0.3888,
"step": 400
},
{
"epoch": 2.5559105431309903,
"grad_norm": 2.402143623444451,
"learning_rate": 6.300203628022272e-07,
"loss": 0.4289,
"step": 401
},
{
"epoch": 2.562300319488818,
"grad_norm": 2.1944070015219888,
"learning_rate": 6.120115083564432e-07,
"loss": 0.4336,
"step": 402
},
{
"epoch": 2.5686900958466454,
"grad_norm": 2.0589970878526844,
"learning_rate": 5.942469964136055e-07,
"loss": 0.3676,
"step": 403
},
{
"epoch": 2.5750798722044728,
"grad_norm": 2.502670968119668,
"learning_rate": 5.767278161798912e-07,
"loss": 0.3946,
"step": 404
},
{
"epoch": 2.5814696485623,
"grad_norm": 2.234483396791303,
"learning_rate": 5.594549432003244e-07,
"loss": 0.4159,
"step": 405
},
{
"epoch": 2.587859424920128,
"grad_norm": 2.508675316184731,
"learning_rate": 5.42429339304461e-07,
"loss": 0.3766,
"step": 406
},
{
"epoch": 2.594249201277955,
"grad_norm": 2.2388652923811097,
"learning_rate": 5.256519525528254e-07,
"loss": 0.3741,
"step": 407
},
{
"epoch": 2.600638977635783,
"grad_norm": 2.1367021327758926,
"learning_rate": 5.091237171841173e-07,
"loss": 0.509,
"step": 408
},
{
"epoch": 2.6070287539936103,
"grad_norm": 2.1883188048010505,
"learning_rate": 4.92845553563196e-07,
"loss": 0.3475,
"step": 409
},
{
"epoch": 2.6134185303514377,
"grad_norm": 2.6958069924361903,
"learning_rate": 4.768183681298211e-07,
"loss": 0.5423,
"step": 410
},
{
"epoch": 2.619808306709265,
"grad_norm": 2.3649443474581617,
"learning_rate": 4.6104305334818577e-07,
"loss": 0.4518,
"step": 411
},
{
"epoch": 2.626198083067093,
"grad_norm": 2.7579147997887103,
"learning_rate": 4.455204876572172e-07,
"loss": 0.3223,
"step": 412
},
{
"epoch": 2.63258785942492,
"grad_norm": 2.322240849008944,
"learning_rate": 4.3025153542165744e-07,
"loss": 0.3889,
"step": 413
},
{
"epoch": 2.6389776357827475,
"grad_norm": 2.2585577552913327,
"learning_rate": 4.1523704688394176e-07,
"loss": 0.4524,
"step": 414
},
{
"epoch": 2.6453674121405752,
"grad_norm": 2.21502448661817,
"learning_rate": 4.0047785811684116e-07,
"loss": 0.4312,
"step": 415
},
{
"epoch": 2.6517571884984026,
"grad_norm": 2.355337885891376,
"learning_rate": 3.8597479097691626e-07,
"loss": 0.4265,
"step": 416
},
{
"epoch": 2.65814696485623,
"grad_norm": 2.2244869149661306,
"learning_rate": 3.717286530587483e-07,
"loss": 0.5291,
"step": 417
},
{
"epoch": 2.6645367412140573,
"grad_norm": 2.3569227035622013,
"learning_rate": 3.577402376499672e-07,
"loss": 0.4332,
"step": 418
},
{
"epoch": 2.670926517571885,
"grad_norm": 2.133768443895229,
"learning_rate": 3.440103236870823e-07,
"loss": 0.4609,
"step": 419
},
{
"epoch": 2.6773162939297124,
"grad_norm": 2.3164553548368323,
"learning_rate": 3.3053967571210375e-07,
"loss": 0.479,
"step": 420
},
{
"epoch": 2.68370607028754,
"grad_norm": 2.3702331769156735,
"learning_rate": 3.1732904382996975e-07,
"loss": 0.4552,
"step": 421
},
{
"epoch": 2.6900958466453675,
"grad_norm": 2.1694566517033724,
"learning_rate": 3.04379163666782e-07,
"loss": 0.3982,
"step": 422
},
{
"epoch": 2.696485623003195,
"grad_norm": 2.507080592863833,
"learning_rate": 2.916907563288357e-07,
"loss": 0.495,
"step": 423
},
{
"epoch": 2.702875399361022,
"grad_norm": 1.9030589659154709,
"learning_rate": 2.792645283624712e-07,
"loss": 0.355,
"step": 424
},
{
"epoch": 2.70926517571885,
"grad_norm": 2.486303979910709,
"learning_rate": 2.671011717147276e-07,
"loss": 0.4208,
"step": 425
},
{
"epoch": 2.7156549520766773,
"grad_norm": 2.6926299813084595,
"learning_rate": 2.5520136369481194e-07,
"loss": 0.4193,
"step": 426
},
{
"epoch": 2.722044728434505,
"grad_norm": 2.4101859848112404,
"learning_rate": 2.4356576693638555e-07,
"loss": 0.4106,
"step": 427
},
{
"epoch": 2.7284345047923324,
"grad_norm": 2.1092564449268303,
"learning_rate": 2.3219502936066228e-07,
"loss": 0.4649,
"step": 428
},
{
"epoch": 2.7348242811501597,
"grad_norm": 2.409939316965895,
"learning_rate": 2.210897841403331e-07,
"loss": 0.4726,
"step": 429
},
{
"epoch": 2.741214057507987,
"grad_norm": 2.9923889890507893,
"learning_rate": 2.1025064966430697e-07,
"loss": 0.4357,
"step": 430
},
{
"epoch": 2.747603833865815,
"grad_norm": 1.9116249904508271,
"learning_rate": 1.9967822950327453e-07,
"loss": 0.3906,
"step": 431
},
{
"epoch": 2.753993610223642,
"grad_norm": 2.363072327163832,
"learning_rate": 1.8937311237610168e-07,
"loss": 0.4384,
"step": 432
},
{
"epoch": 2.7603833865814695,
"grad_norm": 2.111336234633767,
"learning_rate": 1.793358721170435e-07,
"loss": 0.3841,
"step": 433
},
{
"epoch": 2.7667731629392973,
"grad_norm": 2.4061012547366913,
"learning_rate": 1.6956706764379438e-07,
"loss": 0.413,
"step": 434
},
{
"epoch": 2.7731629392971247,
"grad_norm": 2.3834000028849007,
"learning_rate": 1.6006724292636166e-07,
"loss": 0.4789,
"step": 435
},
{
"epoch": 2.779552715654952,
"grad_norm": 2.2642059928473066,
"learning_rate": 1.508369269567783e-07,
"loss": 0.4806,
"step": 436
},
{
"epoch": 2.7859424920127793,
"grad_norm": 2.1091265211005017,
"learning_rate": 1.418766337196431e-07,
"loss": 0.3913,
"step": 437
},
{
"epoch": 2.792332268370607,
"grad_norm": 2.0974258225125464,
"learning_rate": 1.3318686216350241e-07,
"loss": 0.401,
"step": 438
},
{
"epoch": 2.7987220447284344,
"grad_norm": 2.725505526625615,
"learning_rate": 1.2476809617306408e-07,
"loss": 0.6157,
"step": 439
},
{
"epoch": 2.8051118210862622,
"grad_norm": 2.3394680432484787,
"learning_rate": 1.166208045422551e-07,
"loss": 0.4105,
"step": 440
},
{
"epoch": 2.8115015974440896,
"grad_norm": 2.4705712014808996,
"learning_rate": 1.0874544094811424e-07,
"loss": 0.3823,
"step": 441
},
{
"epoch": 2.817891373801917,
"grad_norm": 2.0392789957555753,
"learning_rate": 1.0114244392553318e-07,
"loss": 0.44,
"step": 442
},
{
"epoch": 2.8242811501597442,
"grad_norm": 2.348950535667093,
"learning_rate": 9.381223684283291e-08,
"loss": 0.4619,
"step": 443
},
{
"epoch": 2.830670926517572,
"grad_norm": 2.1136302016617683,
"learning_rate": 8.675522787819023e-08,
"loss": 0.371,
"step": 444
},
{
"epoch": 2.8370607028753994,
"grad_norm": 2.3859707399616203,
"learning_rate": 7.997180999691101e-08,
"loss": 0.4676,
"step": 445
},
{
"epoch": 2.8434504792332267,
"grad_norm": 2.3594281771562993,
"learning_rate": 7.346236092954318e-08,
"loss": 0.496,
"step": 446
},
{
"epoch": 2.8498402555910545,
"grad_norm": 2.289174122732648,
"learning_rate": 6.722724315084805e-08,
"loss": 0.4047,
"step": 447
},
{
"epoch": 2.856230031948882,
"grad_norm": 2.378937591165076,
"learning_rate": 6.12668038596137e-08,
"loss": 0.4415,
"step": 448
},
{
"epoch": 2.862619808306709,
"grad_norm": 2.0870909731738823,
"learning_rate": 5.5581374959320366e-08,
"loss": 0.4469,
"step": 449
},
{
"epoch": 2.8690095846645365,
"grad_norm": 2.2446199520479344,
"learning_rate": 5.017127303966085e-08,
"loss": 0.4002,
"step": 450
},
{
"epoch": 2.8753993610223643,
"grad_norm": 2.2471332517670275,
"learning_rate": 4.50367993589107e-08,
"loss": 0.4317,
"step": 451
},
{
"epoch": 2.8817891373801916,
"grad_norm": 2.0864772616550673,
"learning_rate": 4.0178239827151077e-08,
"loss": 0.3966,
"step": 452
},
{
"epoch": 2.8881789137380194,
"grad_norm": 2.2798143023681794,
"learning_rate": 3.559586499035206e-08,
"loss": 0.3958,
"step": 453
},
{
"epoch": 2.8945686900958467,
"grad_norm": 2.1208404230304145,
"learning_rate": 3.128993001530245e-08,
"loss": 0.4001,
"step": 454
},
{
"epoch": 2.900958466453674,
"grad_norm": 2.1876514381127716,
"learning_rate": 2.7260674675404498e-08,
"loss": 0.3963,
"step": 455
},
{
"epoch": 2.9073482428115014,
"grad_norm": 2.2369393890230924,
"learning_rate": 2.3508323337321225e-08,
"loss": 0.412,
"step": 456
},
{
"epoch": 2.913738019169329,
"grad_norm": 1.9309934320730493,
"learning_rate": 2.0033084948483104e-08,
"loss": 0.3299,
"step": 457
},
{
"epoch": 2.9201277955271565,
"grad_norm": 2.265789089300665,
"learning_rate": 1.6835153025451246e-08,
"loss": 0.4279,
"step": 458
},
{
"epoch": 2.9265175718849843,
"grad_norm": 2.1236508228489885,
"learning_rate": 1.3914705643143788e-08,
"loss": 0.4297,
"step": 459
},
{
"epoch": 2.9329073482428116,
"grad_norm": 2.5202420167871775,
"learning_rate": 1.1271905424918294e-08,
"loss": 0.4684,
"step": 460
},
{
"epoch": 2.939297124600639,
"grad_norm": 2.223482125500666,
"learning_rate": 8.906899533517866e-09,
"loss": 0.4636,
"step": 461
},
{
"epoch": 2.9456869009584663,
"grad_norm": 1.9511784007469755,
"learning_rate": 6.819819662874372e-09,
"loss": 0.3447,
"step": 462
},
{
"epoch": 2.952076677316294,
"grad_norm": 2.484026001564462,
"learning_rate": 5.0107820307770945e-09,
"loss": 0.5442,
"step": 463
},
{
"epoch": 2.9584664536741214,
"grad_norm": 2.703733619110161,
"learning_rate": 3.4798873723984604e-09,
"loss": 0.4034,
"step": 464
},
{
"epoch": 2.9648562300319488,
"grad_norm": 2.1404889485604897,
"learning_rate": 2.2272209346885233e-09,
"loss": 0.4759,
"step": 465
},
{
"epoch": 2.9712460063897765,
"grad_norm": 2.0827067956874137,
"learning_rate": 1.2528524716259872e-09,
"loss": 0.4849,
"step": 466
},
{
"epoch": 2.977635782747604,
"grad_norm": 2.339494396070975,
"learning_rate": 5.568362403318706e-10,
"loss": 0.4075,
"step": 467
},
{
"epoch": 2.984025559105431,
"grad_norm": 2.2851445627861593,
"learning_rate": 1.3921099805302985e-10,
"loss": 0.3724,
"step": 468
}
],
"logging_steps": 1,
"max_steps": 468,
"num_input_tokens_seen": 0,
"num_train_epochs": 3,
"save_steps": 500,
"stateful_callbacks": {
"TrainerControl": {
"args": {
"should_epoch_stop": false,
"should_evaluate": false,
"should_log": false,
"should_save": true,
"should_training_stop": true
},
"attributes": {}
}
},
"total_flos": 5655918280704.0,
"train_batch_size": 4,
"trial_name": null,
"trial_params": null
}