aseratus1's picture
Training in progress, step 271, checkpoint
7a831c1 verified
{
"best_metric": 0.25790610909461975,
"best_model_checkpoint": "miner_id_24/checkpoint-200",
"epoch": 1.0027726432532347,
"eval_steps": 100,
"global_step": 271,
"is_hyper_param_search": false,
"is_local_process_zero": true,
"is_world_process_zero": true,
"log_history": [
{
"epoch": 0.0036968576709796672,
"grad_norm": 6.672558307647705,
"learning_rate": 5e-06,
"loss": 1.8315,
"step": 1
},
{
"epoch": 0.0036968576709796672,
"eval_loss": 0.4404342472553253,
"eval_runtime": 67.183,
"eval_samples_per_second": 6.787,
"eval_steps_per_second": 1.697,
"step": 1
},
{
"epoch": 0.0073937153419593345,
"grad_norm": 6.42157506942749,
"learning_rate": 1e-05,
"loss": 1.6851,
"step": 2
},
{
"epoch": 0.011090573012939002,
"grad_norm": 5.945183753967285,
"learning_rate": 1.5e-05,
"loss": 1.5755,
"step": 3
},
{
"epoch": 0.014787430683918669,
"grad_norm": 4.450586318969727,
"learning_rate": 2e-05,
"loss": 1.3689,
"step": 4
},
{
"epoch": 0.018484288354898338,
"grad_norm": 3.1546850204467773,
"learning_rate": 2.5e-05,
"loss": 1.1569,
"step": 5
},
{
"epoch": 0.022181146025878003,
"grad_norm": 2.7391388416290283,
"learning_rate": 3e-05,
"loss": 1.4513,
"step": 6
},
{
"epoch": 0.025878003696857672,
"grad_norm": 2.3573246002197266,
"learning_rate": 3.5e-05,
"loss": 1.3315,
"step": 7
},
{
"epoch": 0.029574861367837338,
"grad_norm": 2.5408079624176025,
"learning_rate": 4e-05,
"loss": 1.3033,
"step": 8
},
{
"epoch": 0.033271719038817,
"grad_norm": 2.607274293899536,
"learning_rate": 4.5e-05,
"loss": 1.4326,
"step": 9
},
{
"epoch": 0.036968576709796676,
"grad_norm": 2.4289093017578125,
"learning_rate": 5e-05,
"loss": 1.3362,
"step": 10
},
{
"epoch": 0.04066543438077634,
"grad_norm": 2.145063877105713,
"learning_rate": 5.500000000000001e-05,
"loss": 1.3975,
"step": 11
},
{
"epoch": 0.04436229205175601,
"grad_norm": 1.970733404159546,
"learning_rate": 6e-05,
"loss": 1.1519,
"step": 12
},
{
"epoch": 0.04805914972273567,
"grad_norm": 1.9928206205368042,
"learning_rate": 6.500000000000001e-05,
"loss": 1.1993,
"step": 13
},
{
"epoch": 0.051756007393715345,
"grad_norm": 1.8372405767440796,
"learning_rate": 7e-05,
"loss": 1.1659,
"step": 14
},
{
"epoch": 0.05545286506469501,
"grad_norm": 1.7766002416610718,
"learning_rate": 7.500000000000001e-05,
"loss": 1.1062,
"step": 15
},
{
"epoch": 0.059149722735674676,
"grad_norm": 2.722572088241577,
"learning_rate": 8e-05,
"loss": 1.0653,
"step": 16
},
{
"epoch": 0.06284658040665435,
"grad_norm": 1.769493818283081,
"learning_rate": 8.5e-05,
"loss": 1.0896,
"step": 17
},
{
"epoch": 0.066543438077634,
"grad_norm": 1.829550862312317,
"learning_rate": 9e-05,
"loss": 1.0492,
"step": 18
},
{
"epoch": 0.07024029574861368,
"grad_norm": 1.9306679964065552,
"learning_rate": 9.5e-05,
"loss": 1.2088,
"step": 19
},
{
"epoch": 0.07393715341959335,
"grad_norm": 1.6777886152267456,
"learning_rate": 0.0001,
"loss": 1.0575,
"step": 20
},
{
"epoch": 0.07763401109057301,
"grad_norm": 1.731042742729187,
"learning_rate": 9.999608360361113e-05,
"loss": 1.1284,
"step": 21
},
{
"epoch": 0.08133086876155268,
"grad_norm": 1.768632173538208,
"learning_rate": 9.998433502797095e-05,
"loss": 1.2378,
"step": 22
},
{
"epoch": 0.08502772643253234,
"grad_norm": 1.6279608011245728,
"learning_rate": 9.996475611356264e-05,
"loss": 1.0409,
"step": 23
},
{
"epoch": 0.08872458410351201,
"grad_norm": 1.7023952007293701,
"learning_rate": 9.993734992753777e-05,
"loss": 1.0546,
"step": 24
},
{
"epoch": 0.09242144177449169,
"grad_norm": 1.719754934310913,
"learning_rate": 9.990212076323586e-05,
"loss": 1.0298,
"step": 25
},
{
"epoch": 0.09611829944547134,
"grad_norm": 1.7114758491516113,
"learning_rate": 9.98590741395118e-05,
"loss": 1.0207,
"step": 26
},
{
"epoch": 0.09981515711645102,
"grad_norm": 1.8310364484786987,
"learning_rate": 9.980821679987125e-05,
"loss": 1.2518,
"step": 27
},
{
"epoch": 0.10351201478743069,
"grad_norm": 1.6704847812652588,
"learning_rate": 9.974955671141424e-05,
"loss": 1.0702,
"step": 28
},
{
"epoch": 0.10720887245841035,
"grad_norm": 1.610925555229187,
"learning_rate": 9.968310306358715e-05,
"loss": 1.0744,
"step": 29
},
{
"epoch": 0.11090573012939002,
"grad_norm": 1.8064525127410889,
"learning_rate": 9.960886626674302e-05,
"loss": 1.1032,
"step": 30
},
{
"epoch": 0.11460258780036968,
"grad_norm": 1.691179871559143,
"learning_rate": 9.952685795051077e-05,
"loss": 0.9404,
"step": 31
},
{
"epoch": 0.11829944547134935,
"grad_norm": 1.6878719329833984,
"learning_rate": 9.943709096197335e-05,
"loss": 1.0445,
"step": 32
},
{
"epoch": 0.12199630314232902,
"grad_norm": 1.616434931755066,
"learning_rate": 9.933957936365515e-05,
"loss": 1.0305,
"step": 33
},
{
"epoch": 0.1256931608133087,
"grad_norm": 1.6706738471984863,
"learning_rate": 9.923433843131901e-05,
"loss": 1.0578,
"step": 34
},
{
"epoch": 0.12939001848428835,
"grad_norm": 1.7515959739685059,
"learning_rate": 9.912138465157325e-05,
"loss": 1.0298,
"step": 35
},
{
"epoch": 0.133086876155268,
"grad_norm": 1.8046168088912964,
"learning_rate": 9.900073571928886e-05,
"loss": 1.1877,
"step": 36
},
{
"epoch": 0.1367837338262477,
"grad_norm": 1.6414388418197632,
"learning_rate": 9.887241053482757e-05,
"loss": 0.9156,
"step": 37
},
{
"epoch": 0.14048059149722736,
"grad_norm": 1.7848786115646362,
"learning_rate": 9.873642920108091e-05,
"loss": 1.0424,
"step": 38
},
{
"epoch": 0.14417744916820702,
"grad_norm": 1.8037313222885132,
"learning_rate": 9.859281302032106e-05,
"loss": 0.9362,
"step": 39
},
{
"epoch": 0.1478743068391867,
"grad_norm": 1.642606258392334,
"learning_rate": 9.844158449086371e-05,
"loss": 0.9545,
"step": 40
},
{
"epoch": 0.15157116451016636,
"grad_norm": 1.6696778535842896,
"learning_rate": 9.828276730354353e-05,
"loss": 0.9483,
"step": 41
},
{
"epoch": 0.15526802218114602,
"grad_norm": 1.7426838874816895,
"learning_rate": 9.811638633800287e-05,
"loss": 0.9966,
"step": 42
},
{
"epoch": 0.1589648798521257,
"grad_norm": 1.6507072448730469,
"learning_rate": 9.79424676587942e-05,
"loss": 0.8627,
"step": 43
},
{
"epoch": 0.16266173752310537,
"grad_norm": 1.8130640983581543,
"learning_rate": 9.776103851129706e-05,
"loss": 1.0294,
"step": 44
},
{
"epoch": 0.16635859519408502,
"grad_norm": 1.9146381616592407,
"learning_rate": 9.757212731744974e-05,
"loss": 1.1166,
"step": 45
},
{
"epoch": 0.17005545286506468,
"grad_norm": 1.9425106048583984,
"learning_rate": 9.737576367129694e-05,
"loss": 1.031,
"step": 46
},
{
"epoch": 0.17375231053604437,
"grad_norm": 1.9324619770050049,
"learning_rate": 9.717197833435367e-05,
"loss": 0.9654,
"step": 47
},
{
"epoch": 0.17744916820702403,
"grad_norm": 1.8076626062393188,
"learning_rate": 9.696080323078621e-05,
"loss": 0.7715,
"step": 48
},
{
"epoch": 0.18114602587800369,
"grad_norm": 2.016756057739258,
"learning_rate": 9.67422714424111e-05,
"loss": 0.9425,
"step": 49
},
{
"epoch": 0.18484288354898337,
"grad_norm": 2.1553258895874023,
"learning_rate": 9.65164172035126e-05,
"loss": 1.0886,
"step": 50
},
{
"epoch": 0.18853974121996303,
"grad_norm": 1.7580000162124634,
"learning_rate": 9.628327589547976e-05,
"loss": 1.1776,
"step": 51
},
{
"epoch": 0.1922365988909427,
"grad_norm": 1.4649349451065063,
"learning_rate": 9.604288404126363e-05,
"loss": 1.1552,
"step": 52
},
{
"epoch": 0.19593345656192238,
"grad_norm": 1.3294196128845215,
"learning_rate": 9.579527929965582e-05,
"loss": 0.9731,
"step": 53
},
{
"epoch": 0.19963031423290203,
"grad_norm": 1.3747055530548096,
"learning_rate": 9.554050045938893e-05,
"loss": 1.2221,
"step": 54
},
{
"epoch": 0.2033271719038817,
"grad_norm": 1.3121620416641235,
"learning_rate": 9.52785874330602e-05,
"loss": 1.059,
"step": 55
},
{
"epoch": 0.20702402957486138,
"grad_norm": 1.3893884420394897,
"learning_rate": 9.500958125087882e-05,
"loss": 1.1146,
"step": 56
},
{
"epoch": 0.21072088724584104,
"grad_norm": 1.3223919868469238,
"learning_rate": 9.473352405423844e-05,
"loss": 1.0077,
"step": 57
},
{
"epoch": 0.2144177449168207,
"grad_norm": 1.3322027921676636,
"learning_rate": 9.445045908911536e-05,
"loss": 1.0703,
"step": 58
},
{
"epoch": 0.21811460258780038,
"grad_norm": 1.4537849426269531,
"learning_rate": 9.416043069929388e-05,
"loss": 1.2211,
"step": 59
},
{
"epoch": 0.22181146025878004,
"grad_norm": 1.342868447303772,
"learning_rate": 9.386348431941952e-05,
"loss": 1.0331,
"step": 60
},
{
"epoch": 0.2255083179297597,
"grad_norm": 1.5239349603652954,
"learning_rate": 9.355966646788151e-05,
"loss": 1.1679,
"step": 61
},
{
"epoch": 0.22920517560073936,
"grad_norm": 1.3578258752822876,
"learning_rate": 9.324902473952528e-05,
"loss": 0.9844,
"step": 62
},
{
"epoch": 0.23290203327171904,
"grad_norm": 1.3773984909057617,
"learning_rate": 9.293160779819659e-05,
"loss": 1.0466,
"step": 63
},
{
"epoch": 0.2365988909426987,
"grad_norm": 1.3751522302627563,
"learning_rate": 9.26074653691179e-05,
"loss": 1.119,
"step": 64
},
{
"epoch": 0.24029574861367836,
"grad_norm": 1.328293800354004,
"learning_rate": 9.227664823109883e-05,
"loss": 1.0232,
"step": 65
},
{
"epoch": 0.24399260628465805,
"grad_norm": 1.310364007949829,
"learning_rate": 9.193920820858112e-05,
"loss": 0.9279,
"step": 66
},
{
"epoch": 0.2476894639556377,
"grad_norm": 1.508972406387329,
"learning_rate": 9.15951981635202e-05,
"loss": 1.0623,
"step": 67
},
{
"epoch": 0.2513863216266174,
"grad_norm": 1.4712318181991577,
"learning_rate": 9.1244671987104e-05,
"loss": 1.0572,
"step": 68
},
{
"epoch": 0.25508317929759705,
"grad_norm": 1.4435091018676758,
"learning_rate": 9.08876845913106e-05,
"loss": 1.087,
"step": 69
},
{
"epoch": 0.2587800369685767,
"grad_norm": 1.3524105548858643,
"learning_rate": 9.052429190030589e-05,
"loss": 0.9821,
"step": 70
},
{
"epoch": 0.26247689463955637,
"grad_norm": 1.4055194854736328,
"learning_rate": 9.015455084168278e-05,
"loss": 1.1205,
"step": 71
},
{
"epoch": 0.266173752310536,
"grad_norm": 1.4329872131347656,
"learning_rate": 8.977851933754316e-05,
"loss": 1.1594,
"step": 72
},
{
"epoch": 0.2698706099815157,
"grad_norm": 1.494642972946167,
"learning_rate": 8.939625629542402e-05,
"loss": 1.1341,
"step": 73
},
{
"epoch": 0.2735674676524954,
"grad_norm": 1.3670973777770996,
"learning_rate": 8.900782159906927e-05,
"loss": 0.8418,
"step": 74
},
{
"epoch": 0.27726432532347506,
"grad_norm": 1.4169329404830933,
"learning_rate": 8.861327609904859e-05,
"loss": 0.9392,
"step": 75
},
{
"epoch": 0.2809611829944547,
"grad_norm": 1.3964389562606812,
"learning_rate": 8.821268160322482e-05,
"loss": 1.0313,
"step": 76
},
{
"epoch": 0.2846580406654344,
"grad_norm": 1.298718810081482,
"learning_rate": 8.780610086707148e-05,
"loss": 0.8463,
"step": 77
},
{
"epoch": 0.28835489833641403,
"grad_norm": 1.4148890972137451,
"learning_rate": 8.739359758384162e-05,
"loss": 0.931,
"step": 78
},
{
"epoch": 0.2920517560073937,
"grad_norm": 1.4295769929885864,
"learning_rate": 8.697523637458997e-05,
"loss": 0.9263,
"step": 79
},
{
"epoch": 0.2957486136783734,
"grad_norm": 1.2890899181365967,
"learning_rate": 8.655108277804975e-05,
"loss": 0.8196,
"step": 80
},
{
"epoch": 0.29944547134935307,
"grad_norm": 1.439947247505188,
"learning_rate": 8.612120324036548e-05,
"loss": 0.9164,
"step": 81
},
{
"epoch": 0.3031423290203327,
"grad_norm": 1.3879884481430054,
"learning_rate": 8.568566510468391e-05,
"loss": 0.8937,
"step": 82
},
{
"epoch": 0.3068391866913124,
"grad_norm": 1.5276659727096558,
"learning_rate": 8.524453660060434e-05,
"loss": 1.1198,
"step": 83
},
{
"epoch": 0.31053604436229204,
"grad_norm": 1.4229848384857178,
"learning_rate": 8.479788683348995e-05,
"loss": 0.8978,
"step": 84
},
{
"epoch": 0.3142329020332717,
"grad_norm": 1.614538550376892,
"learning_rate": 8.434578577364218e-05,
"loss": 1.22,
"step": 85
},
{
"epoch": 0.3179297597042514,
"grad_norm": 1.6977165937423706,
"learning_rate": 8.388830424533934e-05,
"loss": 1.0567,
"step": 86
},
{
"epoch": 0.32162661737523107,
"grad_norm": 1.510601282119751,
"learning_rate": 8.342551391574165e-05,
"loss": 0.9535,
"step": 87
},
{
"epoch": 0.32532347504621073,
"grad_norm": 1.423045039176941,
"learning_rate": 8.295748728366413e-05,
"loss": 0.9619,
"step": 88
},
{
"epoch": 0.3290203327171904,
"grad_norm": 1.5929685831069946,
"learning_rate": 8.248429766821926e-05,
"loss": 1.0364,
"step": 89
},
{
"epoch": 0.33271719038817005,
"grad_norm": 1.4277887344360352,
"learning_rate": 8.200601919733106e-05,
"loss": 0.7981,
"step": 90
},
{
"epoch": 0.3364140480591497,
"grad_norm": 1.663561224937439,
"learning_rate": 8.15227267961226e-05,
"loss": 1.019,
"step": 91
},
{
"epoch": 0.34011090573012936,
"grad_norm": 1.484633445739746,
"learning_rate": 8.103449617517851e-05,
"loss": 0.9798,
"step": 92
},
{
"epoch": 0.3438077634011091,
"grad_norm": 1.6405881643295288,
"learning_rate": 8.054140381868436e-05,
"loss": 1.0093,
"step": 93
},
{
"epoch": 0.34750462107208874,
"grad_norm": 1.6323596239089966,
"learning_rate": 8.004352697244517e-05,
"loss": 0.9985,
"step": 94
},
{
"epoch": 0.3512014787430684,
"grad_norm": 1.4849313497543335,
"learning_rate": 7.954094363178422e-05,
"loss": 0.7756,
"step": 95
},
{
"epoch": 0.35489833641404805,
"grad_norm": 1.7660150527954102,
"learning_rate": 7.903373252932473e-05,
"loss": 1.0436,
"step": 96
},
{
"epoch": 0.3585951940850277,
"grad_norm": 1.62890625,
"learning_rate": 7.852197312265592e-05,
"loss": 0.8442,
"step": 97
},
{
"epoch": 0.36229205175600737,
"grad_norm": 1.7414716482162476,
"learning_rate": 7.800574558188547e-05,
"loss": 0.9646,
"step": 98
},
{
"epoch": 0.3659889094269871,
"grad_norm": 1.6968168020248413,
"learning_rate": 7.748513077708044e-05,
"loss": 0.8914,
"step": 99
},
{
"epoch": 0.36968576709796674,
"grad_norm": 1.9534008502960205,
"learning_rate": 7.696021026559849e-05,
"loss": 0.9362,
"step": 100
},
{
"epoch": 0.36968576709796674,
"eval_loss": 0.27733513712882996,
"eval_runtime": 68.0174,
"eval_samples_per_second": 6.704,
"eval_steps_per_second": 1.676,
"step": 100
},
{
"epoch": 0.3733826247689464,
"grad_norm": 1.5832418203353882,
"learning_rate": 7.643106627931147e-05,
"loss": 1.1038,
"step": 101
},
{
"epoch": 0.37707948243992606,
"grad_norm": 1.5433467626571655,
"learning_rate": 7.589778171172322e-05,
"loss": 1.2129,
"step": 102
},
{
"epoch": 0.3807763401109057,
"grad_norm": 1.270788311958313,
"learning_rate": 7.536044010498395e-05,
"loss": 1.0397,
"step": 103
},
{
"epoch": 0.3844731977818854,
"grad_norm": 1.1377924680709839,
"learning_rate": 7.48191256368028e-05,
"loss": 0.9277,
"step": 104
},
{
"epoch": 0.38817005545286504,
"grad_norm": 1.2180787324905396,
"learning_rate": 7.427392310726088e-05,
"loss": 1.1243,
"step": 105
},
{
"epoch": 0.39186691312384475,
"grad_norm": 1.5344997644424438,
"learning_rate": 7.372491792552693e-05,
"loss": 1.1278,
"step": 106
},
{
"epoch": 0.3955637707948244,
"grad_norm": 1.2985904216766357,
"learning_rate": 7.317219609647739e-05,
"loss": 1.1904,
"step": 107
},
{
"epoch": 0.39926062846580407,
"grad_norm": 1.1818190813064575,
"learning_rate": 7.261584420722329e-05,
"loss": 1.027,
"step": 108
},
{
"epoch": 0.4029574861367837,
"grad_norm": 1.2055531740188599,
"learning_rate": 7.20559494135458e-05,
"loss": 1.374,
"step": 109
},
{
"epoch": 0.4066543438077634,
"grad_norm": 1.2185813188552856,
"learning_rate": 7.149259942624287e-05,
"loss": 1.0247,
"step": 110
},
{
"epoch": 0.41035120147874304,
"grad_norm": 1.2887550592422485,
"learning_rate": 7.092588249738871e-05,
"loss": 1.0531,
"step": 111
},
{
"epoch": 0.41404805914972276,
"grad_norm": 1.2477582693099976,
"learning_rate": 7.03558874065087e-05,
"loss": 1.1258,
"step": 112
},
{
"epoch": 0.4177449168207024,
"grad_norm": 1.2704106569290161,
"learning_rate": 6.978270344667143e-05,
"loss": 1.0419,
"step": 113
},
{
"epoch": 0.4214417744916821,
"grad_norm": 1.257286548614502,
"learning_rate": 6.920642041050055e-05,
"loss": 1.0382,
"step": 114
},
{
"epoch": 0.42513863216266173,
"grad_norm": 1.2782636880874634,
"learning_rate": 6.862712857610813e-05,
"loss": 0.833,
"step": 115
},
{
"epoch": 0.4288354898336414,
"grad_norm": 1.3618171215057373,
"learning_rate": 6.804491869295206e-05,
"loss": 1.0615,
"step": 116
},
{
"epoch": 0.43253234750462105,
"grad_norm": 1.2854083776474,
"learning_rate": 6.745988196761975e-05,
"loss": 1.0245,
"step": 117
},
{
"epoch": 0.43622920517560076,
"grad_norm": 1.2890700101852417,
"learning_rate": 6.687211004953992e-05,
"loss": 1.0267,
"step": 118
},
{
"epoch": 0.4399260628465804,
"grad_norm": 1.2263938188552856,
"learning_rate": 6.628169501662526e-05,
"loss": 0.9602,
"step": 119
},
{
"epoch": 0.4436229205175601,
"grad_norm": 1.3618099689483643,
"learning_rate": 6.56887293608479e-05,
"loss": 1.167,
"step": 120
},
{
"epoch": 0.44731977818853974,
"grad_norm": 1.3458164930343628,
"learning_rate": 6.509330597374993e-05,
"loss": 1.1031,
"step": 121
},
{
"epoch": 0.4510166358595194,
"grad_norm": 1.395233392715454,
"learning_rate": 6.44955181318915e-05,
"loss": 1.0788,
"step": 122
},
{
"epoch": 0.45471349353049906,
"grad_norm": 1.3283040523529053,
"learning_rate": 6.38954594822384e-05,
"loss": 1.025,
"step": 123
},
{
"epoch": 0.4584103512014787,
"grad_norm": 1.4176121950149536,
"learning_rate": 6.329322402749181e-05,
"loss": 1.0014,
"step": 124
},
{
"epoch": 0.46210720887245843,
"grad_norm": 1.350151777267456,
"learning_rate": 6.268890611136211e-05,
"loss": 0.9543,
"step": 125
},
{
"epoch": 0.4658040665434381,
"grad_norm": 1.4662792682647705,
"learning_rate": 6.208260040378946e-05,
"loss": 1.1333,
"step": 126
},
{
"epoch": 0.46950092421441775,
"grad_norm": 1.4703161716461182,
"learning_rate": 6.147440188611324e-05,
"loss": 1.0494,
"step": 127
},
{
"epoch": 0.4731977818853974,
"grad_norm": 1.3620432615280151,
"learning_rate": 6.086440583619257e-05,
"loss": 0.9709,
"step": 128
},
{
"epoch": 0.47689463955637706,
"grad_norm": 1.547058343887329,
"learning_rate": 6.025270781348055e-05,
"loss": 1.0412,
"step": 129
},
{
"epoch": 0.4805914972273567,
"grad_norm": 1.2925318479537964,
"learning_rate": 5.9639403644054246e-05,
"loss": 0.8893,
"step": 130
},
{
"epoch": 0.48428835489833644,
"grad_norm": 1.461341381072998,
"learning_rate": 5.9024589405603035e-05,
"loss": 1.1617,
"step": 131
},
{
"epoch": 0.4879852125693161,
"grad_norm": 1.3552955389022827,
"learning_rate": 5.840836141237748e-05,
"loss": 0.9199,
"step": 132
},
{
"epoch": 0.49168207024029575,
"grad_norm": 1.3925375938415527,
"learning_rate": 5.779081620010104e-05,
"loss": 0.9524,
"step": 133
},
{
"epoch": 0.4953789279112754,
"grad_norm": 1.4026466608047485,
"learning_rate": 5.717205051084731e-05,
"loss": 1.0116,
"step": 134
},
{
"epoch": 0.49907578558225507,
"grad_norm": 1.488108515739441,
"learning_rate": 5.655216127788472e-05,
"loss": 1.0833,
"step": 135
},
{
"epoch": 0.5027726432532348,
"grad_norm": 1.3740253448486328,
"learning_rate": 5.5931245610491415e-05,
"loss": 0.8804,
"step": 136
},
{
"epoch": 0.5064695009242144,
"grad_norm": 1.4298604726791382,
"learning_rate": 5.5309400778742484e-05,
"loss": 0.9091,
"step": 137
},
{
"epoch": 0.5101663585951941,
"grad_norm": 1.3947893381118774,
"learning_rate": 5.4686724198272075e-05,
"loss": 0.8541,
"step": 138
},
{
"epoch": 0.5138632162661737,
"grad_norm": 1.494073510169983,
"learning_rate": 5.406331341501264e-05,
"loss": 0.9325,
"step": 139
},
{
"epoch": 0.5175600739371534,
"grad_norm": 1.4765574932098389,
"learning_rate": 5.3439266089913796e-05,
"loss": 0.863,
"step": 140
},
{
"epoch": 0.5212569316081331,
"grad_norm": 1.4526939392089844,
"learning_rate": 5.281467998364314e-05,
"loss": 0.8982,
"step": 141
},
{
"epoch": 0.5249537892791127,
"grad_norm": 1.6596426963806152,
"learning_rate": 5.218965294127155e-05,
"loss": 1.0352,
"step": 142
},
{
"epoch": 0.5286506469500925,
"grad_norm": 1.5690279006958008,
"learning_rate": 5.156428287694508e-05,
"loss": 0.871,
"step": 143
},
{
"epoch": 0.532347504621072,
"grad_norm": 1.5764353275299072,
"learning_rate": 5.093866775854618e-05,
"loss": 0.8132,
"step": 144
},
{
"epoch": 0.5360443622920518,
"grad_norm": 1.6206929683685303,
"learning_rate": 5.0312905592346496e-05,
"loss": 0.8991,
"step": 145
},
{
"epoch": 0.5397412199630314,
"grad_norm": 1.5904308557510376,
"learning_rate": 4.9687094407653516e-05,
"loss": 0.9002,
"step": 146
},
{
"epoch": 0.5434380776340111,
"grad_norm": 1.5194886922836304,
"learning_rate": 4.9061332241453835e-05,
"loss": 0.9315,
"step": 147
},
{
"epoch": 0.5471349353049908,
"grad_norm": 1.5949432849884033,
"learning_rate": 4.843571712305493e-05,
"loss": 0.8933,
"step": 148
},
{
"epoch": 0.5508317929759704,
"grad_norm": 1.750752568244934,
"learning_rate": 4.7810347058728454e-05,
"loss": 0.9813,
"step": 149
},
{
"epoch": 0.5545286506469501,
"grad_norm": 1.9377819299697876,
"learning_rate": 4.718532001635687e-05,
"loss": 0.9949,
"step": 150
},
{
"epoch": 0.5582255083179297,
"grad_norm": 1.2233893871307373,
"learning_rate": 4.6560733910086215e-05,
"loss": 0.9573,
"step": 151
},
{
"epoch": 0.5619223659889094,
"grad_norm": 1.3002984523773193,
"learning_rate": 4.593668658498738e-05,
"loss": 1.0886,
"step": 152
},
{
"epoch": 0.5656192236598891,
"grad_norm": 1.234018087387085,
"learning_rate": 4.531327580172794e-05,
"loss": 0.9641,
"step": 153
},
{
"epoch": 0.5693160813308688,
"grad_norm": 1.2332717180252075,
"learning_rate": 4.4690599221257534e-05,
"loss": 1.0655,
"step": 154
},
{
"epoch": 0.5730129390018485,
"grad_norm": 1.1721649169921875,
"learning_rate": 4.406875438950862e-05,
"loss": 1.0076,
"step": 155
},
{
"epoch": 0.5767097966728281,
"grad_norm": 1.2083436250686646,
"learning_rate": 4.34478387221153e-05,
"loss": 1.1572,
"step": 156
},
{
"epoch": 0.5804066543438078,
"grad_norm": 1.1613600254058838,
"learning_rate": 4.2827949489152716e-05,
"loss": 1.0262,
"step": 157
},
{
"epoch": 0.5841035120147874,
"grad_norm": 1.1954678297042847,
"learning_rate": 4.2209183799898975e-05,
"loss": 1.0363,
"step": 158
},
{
"epoch": 0.5878003696857671,
"grad_norm": 1.1671994924545288,
"learning_rate": 4.159163858762254e-05,
"loss": 1.0286,
"step": 159
},
{
"epoch": 0.5914972273567468,
"grad_norm": 1.1261061429977417,
"learning_rate": 4.097541059439698e-05,
"loss": 0.9091,
"step": 160
},
{
"epoch": 0.5951940850277264,
"grad_norm": 1.2204991579055786,
"learning_rate": 4.036059635594578e-05,
"loss": 1.1134,
"step": 161
},
{
"epoch": 0.5988909426987061,
"grad_norm": 1.129346489906311,
"learning_rate": 3.9747292186519456e-05,
"loss": 0.9158,
"step": 162
},
{
"epoch": 0.6025878003696857,
"grad_norm": 1.196499228477478,
"learning_rate": 3.913559416380743e-05,
"loss": 1.0781,
"step": 163
},
{
"epoch": 0.6062846580406654,
"grad_norm": 1.2142738103866577,
"learning_rate": 3.8525598113886755e-05,
"loss": 1.0086,
"step": 164
},
{
"epoch": 0.609981515711645,
"grad_norm": 1.1598280668258667,
"learning_rate": 3.791739959621054e-05,
"loss": 0.8981,
"step": 165
},
{
"epoch": 0.6136783733826248,
"grad_norm": 1.2825883626937866,
"learning_rate": 3.73110938886379e-05,
"loss": 1.0979,
"step": 166
},
{
"epoch": 0.6173752310536045,
"grad_norm": 1.2017385959625244,
"learning_rate": 3.670677597250819e-05,
"loss": 0.9349,
"step": 167
},
{
"epoch": 0.6210720887245841,
"grad_norm": 1.2944085597991943,
"learning_rate": 3.610454051776159e-05,
"loss": 1.028,
"step": 168
},
{
"epoch": 0.6247689463955638,
"grad_norm": 1.2570650577545166,
"learning_rate": 3.5504481868108496e-05,
"loss": 0.9435,
"step": 169
},
{
"epoch": 0.6284658040665434,
"grad_norm": 1.2900683879852295,
"learning_rate": 3.490669402625007e-05,
"loss": 1.0209,
"step": 170
},
{
"epoch": 0.6321626617375231,
"grad_norm": 1.3370497226715088,
"learning_rate": 3.4311270639152125e-05,
"loss": 1.0565,
"step": 171
},
{
"epoch": 0.6358595194085028,
"grad_norm": 1.3096612691879272,
"learning_rate": 3.371830498337475e-05,
"loss": 1.0703,
"step": 172
},
{
"epoch": 0.6395563770794824,
"grad_norm": 1.2441438436508179,
"learning_rate": 3.31278899504601e-05,
"loss": 0.9292,
"step": 173
},
{
"epoch": 0.6432532347504621,
"grad_norm": 1.3445340394973755,
"learning_rate": 3.254011803238026e-05,
"loss": 1.1239,
"step": 174
},
{
"epoch": 0.6469500924214417,
"grad_norm": 1.2935254573822021,
"learning_rate": 3.195508130704795e-05,
"loss": 0.9887,
"step": 175
},
{
"epoch": 0.6506469500924215,
"grad_norm": 1.3003994226455688,
"learning_rate": 3.137287142389189e-05,
"loss": 0.9951,
"step": 176
},
{
"epoch": 0.6543438077634011,
"grad_norm": 1.2665036916732788,
"learning_rate": 3.079357958949946e-05,
"loss": 0.9178,
"step": 177
},
{
"epoch": 0.6580406654343808,
"grad_norm": 1.3071720600128174,
"learning_rate": 3.0217296553328578e-05,
"loss": 0.9441,
"step": 178
},
{
"epoch": 0.6617375231053605,
"grad_norm": 1.3040660619735718,
"learning_rate": 2.9644112593491313e-05,
"loss": 0.9668,
"step": 179
},
{
"epoch": 0.6654343807763401,
"grad_norm": 1.3203717470169067,
"learning_rate": 2.90741175026113e-05,
"loss": 1.0138,
"step": 180
},
{
"epoch": 0.6691312384473198,
"grad_norm": 1.258781909942627,
"learning_rate": 2.8507400573757158e-05,
"loss": 0.8963,
"step": 181
},
{
"epoch": 0.6728280961182994,
"grad_norm": 1.2872486114501953,
"learning_rate": 2.7944050586454214e-05,
"loss": 0.8442,
"step": 182
},
{
"epoch": 0.6765249537892791,
"grad_norm": 1.318333387374878,
"learning_rate": 2.738415579277672e-05,
"loss": 0.9776,
"step": 183
},
{
"epoch": 0.6802218114602587,
"grad_norm": 1.4056696891784668,
"learning_rate": 2.682780390352262e-05,
"loss": 0.9503,
"step": 184
},
{
"epoch": 0.6839186691312384,
"grad_norm": 1.371435284614563,
"learning_rate": 2.6275082074473077e-05,
"loss": 0.8886,
"step": 185
},
{
"epoch": 0.6876155268022182,
"grad_norm": 1.3588190078735352,
"learning_rate": 2.5726076892739125e-05,
"loss": 0.8327,
"step": 186
},
{
"epoch": 0.6913123844731978,
"grad_norm": 1.4449481964111328,
"learning_rate": 2.5180874363197215e-05,
"loss": 1.0157,
"step": 187
},
{
"epoch": 0.6950092421441775,
"grad_norm": 1.4662877321243286,
"learning_rate": 2.4639559895016068e-05,
"loss": 0.9795,
"step": 188
},
{
"epoch": 0.6987060998151571,
"grad_norm": 1.4447919130325317,
"learning_rate": 2.41022182882768e-05,
"loss": 0.935,
"step": 189
},
{
"epoch": 0.7024029574861368,
"grad_norm": 1.4664056301116943,
"learning_rate": 2.3568933720688545e-05,
"loss": 0.9556,
"step": 190
},
{
"epoch": 0.7060998151571165,
"grad_norm": 1.4930390119552612,
"learning_rate": 2.3039789734401522e-05,
"loss": 0.9217,
"step": 191
},
{
"epoch": 0.7097966728280961,
"grad_norm": 1.5717326402664185,
"learning_rate": 2.2514869222919572e-05,
"loss": 1.0939,
"step": 192
},
{
"epoch": 0.7134935304990758,
"grad_norm": 1.5687856674194336,
"learning_rate": 2.1994254418114522e-05,
"loss": 1.0533,
"step": 193
},
{
"epoch": 0.7171903881700554,
"grad_norm": 1.4364888668060303,
"learning_rate": 2.1478026877344087e-05,
"loss": 0.8697,
"step": 194
},
{
"epoch": 0.7208872458410351,
"grad_norm": 1.6093854904174805,
"learning_rate": 2.0966267470675273e-05,
"loss": 1.062,
"step": 195
},
{
"epoch": 0.7245841035120147,
"grad_norm": 1.6295666694641113,
"learning_rate": 2.0459056368215785e-05,
"loss": 0.852,
"step": 196
},
{
"epoch": 0.7282809611829945,
"grad_norm": 1.6727817058563232,
"learning_rate": 1.9956473027554846e-05,
"loss": 0.9043,
"step": 197
},
{
"epoch": 0.7319778188539742,
"grad_norm": 1.708413004875183,
"learning_rate": 1.945859618131564e-05,
"loss": 0.8341,
"step": 198
},
{
"epoch": 0.7356746765249538,
"grad_norm": 1.630466341972351,
"learning_rate": 1.8965503824821495e-05,
"loss": 0.8544,
"step": 199
},
{
"epoch": 0.7393715341959335,
"grad_norm": 1.9450691938400269,
"learning_rate": 1.8477273203877398e-05,
"loss": 1.0403,
"step": 200
},
{
"epoch": 0.7393715341959335,
"eval_loss": 0.25790610909461975,
"eval_runtime": 68.0375,
"eval_samples_per_second": 6.702,
"eval_steps_per_second": 1.676,
"step": 200
},
{
"epoch": 0.7430683918669131,
"grad_norm": 1.1571354866027832,
"learning_rate": 1.7993980802668946e-05,
"loss": 1.0016,
"step": 201
},
{
"epoch": 0.7467652495378928,
"grad_norm": 1.2866359949111938,
"learning_rate": 1.7515702331780753e-05,
"loss": 1.1153,
"step": 202
},
{
"epoch": 0.7504621072088724,
"grad_norm": 1.2495567798614502,
"learning_rate": 1.7042512716335873e-05,
"loss": 1.6495,
"step": 203
},
{
"epoch": 0.7541589648798521,
"grad_norm": 1.184030294418335,
"learning_rate": 1.6574486084258366e-05,
"loss": 0.9913,
"step": 204
},
{
"epoch": 0.7578558225508318,
"grad_norm": 1.1039363145828247,
"learning_rate": 1.6111695754660667e-05,
"loss": 0.9402,
"step": 205
},
{
"epoch": 0.7615526802218114,
"grad_norm": 1.1745617389678955,
"learning_rate": 1.565421422635782e-05,
"loss": 1.0854,
"step": 206
},
{
"epoch": 0.7652495378927912,
"grad_norm": 1.162819743156433,
"learning_rate": 1.5202113166510057e-05,
"loss": 1.1295,
"step": 207
},
{
"epoch": 0.7689463955637708,
"grad_norm": 1.0997729301452637,
"learning_rate": 1.475546339939568e-05,
"loss": 0.9278,
"step": 208
},
{
"epoch": 0.7726432532347505,
"grad_norm": 1.1672478914260864,
"learning_rate": 1.4314334895316094e-05,
"loss": 1.1381,
"step": 209
},
{
"epoch": 0.7763401109057301,
"grad_norm": 1.097519874572754,
"learning_rate": 1.3878796759634544e-05,
"loss": 0.9361,
"step": 210
},
{
"epoch": 0.7800369685767098,
"grad_norm": 1.0834153890609741,
"learning_rate": 1.3448917221950263e-05,
"loss": 0.8876,
"step": 211
},
{
"epoch": 0.7837338262476895,
"grad_norm": 1.1496264934539795,
"learning_rate": 1.3024763625410024e-05,
"loss": 0.9791,
"step": 212
},
{
"epoch": 0.7874306839186691,
"grad_norm": 1.1406208276748657,
"learning_rate": 1.2606402416158392e-05,
"loss": 0.964,
"step": 213
},
{
"epoch": 0.7911275415896488,
"grad_norm": 1.2033501863479614,
"learning_rate": 1.2193899132928537e-05,
"loss": 1.0953,
"step": 214
},
{
"epoch": 0.7948243992606284,
"grad_norm": 1.1828855276107788,
"learning_rate": 1.1787318396775188e-05,
"loss": 0.9958,
"step": 215
},
{
"epoch": 0.7985212569316081,
"grad_norm": 1.1799291372299194,
"learning_rate": 1.138672390095143e-05,
"loss": 1.0501,
"step": 216
},
{
"epoch": 0.8022181146025879,
"grad_norm": 1.232038974761963,
"learning_rate": 1.0992178400930753e-05,
"loss": 1.0139,
"step": 217
},
{
"epoch": 0.8059149722735675,
"grad_norm": 1.1742757558822632,
"learning_rate": 1.060374370457599e-05,
"loss": 0.8714,
"step": 218
},
{
"epoch": 0.8096118299445472,
"grad_norm": 1.2701990604400635,
"learning_rate": 1.0221480662456845e-05,
"loss": 0.9587,
"step": 219
},
{
"epoch": 0.8133086876155268,
"grad_norm": 1.295379400253296,
"learning_rate": 9.845449158317215e-06,
"loss": 1.1038,
"step": 220
},
{
"epoch": 0.8170055452865065,
"grad_norm": 1.2289679050445557,
"learning_rate": 9.475708099694124e-06,
"loss": 0.9174,
"step": 221
},
{
"epoch": 0.8207024029574861,
"grad_norm": 1.3198155164718628,
"learning_rate": 9.112315408689414e-06,
"loss": 1.0472,
"step": 222
},
{
"epoch": 0.8243992606284658,
"grad_norm": 1.1874481439590454,
"learning_rate": 8.755328012896003e-06,
"loss": 0.8252,
"step": 223
},
{
"epoch": 0.8280961182994455,
"grad_norm": 1.2251365184783936,
"learning_rate": 8.404801836479808e-06,
"loss": 0.8606,
"step": 224
},
{
"epoch": 0.8317929759704251,
"grad_norm": 1.3227380514144897,
"learning_rate": 8.060791791418886e-06,
"loss": 1.0696,
"step": 225
},
{
"epoch": 0.8354898336414048,
"grad_norm": 1.2983530759811401,
"learning_rate": 7.723351768901172e-06,
"loss": 0.9411,
"step": 226
},
{
"epoch": 0.8391866913123844,
"grad_norm": 1.256066083908081,
"learning_rate": 7.392534630882092e-06,
"loss": 0.8509,
"step": 227
},
{
"epoch": 0.8428835489833642,
"grad_norm": 1.2773510217666626,
"learning_rate": 7.06839220180342e-06,
"loss": 0.8596,
"step": 228
},
{
"epoch": 0.8465804066543438,
"grad_norm": 1.3085066080093384,
"learning_rate": 6.750975260474718e-06,
"loss": 0.885,
"step": 229
},
{
"epoch": 0.8502772643253235,
"grad_norm": 1.2915611267089844,
"learning_rate": 6.440333532118503e-06,
"loss": 0.9502,
"step": 230
},
{
"epoch": 0.8539741219963032,
"grad_norm": 1.2121251821517944,
"learning_rate": 6.136515680580479e-06,
"loss": 0.8245,
"step": 231
},
{
"epoch": 0.8576709796672828,
"grad_norm": 1.3207285404205322,
"learning_rate": 5.839569300706127e-06,
"loss": 0.8947,
"step": 232
},
{
"epoch": 0.8613678373382625,
"grad_norm": 1.4474835395812988,
"learning_rate": 5.549540910884648e-06,
"loss": 1.1095,
"step": 233
},
{
"epoch": 0.8650646950092421,
"grad_norm": 1.3800991773605347,
"learning_rate": 5.266475945761562e-06,
"loss": 0.8926,
"step": 234
},
{
"epoch": 0.8687615526802218,
"grad_norm": 1.2967448234558105,
"learning_rate": 4.990418749121178e-06,
"loss": 0.8284,
"step": 235
},
{
"epoch": 0.8724584103512015,
"grad_norm": 1.3374454975128174,
"learning_rate": 4.721412566939804e-06,
"loss": 0.9239,
"step": 236
},
{
"epoch": 0.8761552680221811,
"grad_norm": 1.404314637184143,
"learning_rate": 4.459499540611078e-06,
"loss": 0.9797,
"step": 237
},
{
"epoch": 0.8798521256931608,
"grad_norm": 1.361304759979248,
"learning_rate": 4.2047207003442e-06,
"loss": 0.8175,
"step": 238
},
{
"epoch": 0.8835489833641405,
"grad_norm": 1.464787244796753,
"learning_rate": 3.9571159587363734e-06,
"loss": 0.912,
"step": 239
},
{
"epoch": 0.8872458410351202,
"grad_norm": 1.3126565217971802,
"learning_rate": 3.7167241045202473e-06,
"loss": 0.8109,
"step": 240
},
{
"epoch": 0.8909426987060998,
"grad_norm": 1.3885515928268433,
"learning_rate": 3.4835827964873945e-06,
"loss": 0.8852,
"step": 241
},
{
"epoch": 0.8946395563770795,
"grad_norm": 1.48948335647583,
"learning_rate": 3.2577285575889017e-06,
"loss": 0.9546,
"step": 242
},
{
"epoch": 0.8983364140480592,
"grad_norm": 1.467980146408081,
"learning_rate": 3.039196769213787e-06,
"loss": 0.8802,
"step": 243
},
{
"epoch": 0.9020332717190388,
"grad_norm": 1.5345256328582764,
"learning_rate": 2.8280216656463408e-06,
"loss": 0.835,
"step": 244
},
{
"epoch": 0.9057301293900185,
"grad_norm": 1.5734572410583496,
"learning_rate": 2.6242363287030613e-06,
"loss": 1.0006,
"step": 245
},
{
"epoch": 0.9094269870609981,
"grad_norm": 1.4659297466278076,
"learning_rate": 2.4278726825502697e-06,
"loss": 0.8125,
"step": 246
},
{
"epoch": 0.9131238447319778,
"grad_norm": 1.6777136325836182,
"learning_rate": 2.2389614887029566e-06,
"loss": 0.9509,
"step": 247
},
{
"epoch": 0.9168207024029574,
"grad_norm": 1.7119040489196777,
"learning_rate": 2.0575323412058033e-06,
"loss": 0.8874,
"step": 248
},
{
"epoch": 0.9205175600739371,
"grad_norm": 1.8174391984939575,
"learning_rate": 1.8836136619971466e-06,
"loss": 0.9305,
"step": 249
},
{
"epoch": 0.9242144177449169,
"grad_norm": 1.8792368173599243,
"learning_rate": 1.7172326964564778e-06,
"loss": 0.9589,
"step": 250
},
{
"epoch": 0.9279112754158965,
"grad_norm": 1.1916697025299072,
"learning_rate": 1.5584155091362906e-06,
"loss": 1.0869,
"step": 251
},
{
"epoch": 0.9316081330868762,
"grad_norm": 1.109428882598877,
"learning_rate": 1.4071869796789428e-06,
"loss": 0.9649,
"step": 252
},
{
"epoch": 0.9353049907578558,
"grad_norm": 1.1600779294967651,
"learning_rate": 1.263570798919106e-06,
"loss": 1.0584,
"step": 253
},
{
"epoch": 0.9390018484288355,
"grad_norm": 1.027513027191162,
"learning_rate": 1.1275894651724517e-06,
"loss": 0.8654,
"step": 254
},
{
"epoch": 0.9426987060998152,
"grad_norm": 1.1321669816970825,
"learning_rate": 9.992642807111485e-07,
"loss": 0.9776,
"step": 255
},
{
"epoch": 0.9463955637707948,
"grad_norm": 1.2419346570968628,
"learning_rate": 8.786153484267589e-07,
"loss": 1.0648,
"step": 256
},
{
"epoch": 0.9500924214417745,
"grad_norm": 1.159534215927124,
"learning_rate": 7.656615686809976e-07,
"loss": 0.9293,
"step": 257
},
{
"epoch": 0.9537892791127541,
"grad_norm": 1.2147092819213867,
"learning_rate": 6.604206363448661e-07,
"loss": 0.9849,
"step": 258
},
{
"epoch": 0.9574861367837338,
"grad_norm": 1.2285771369934082,
"learning_rate": 5.629090380266544e-07,
"loss": 0.9584,
"step": 259
},
{
"epoch": 0.9611829944547134,
"grad_norm": 1.2933838367462158,
"learning_rate": 4.7314204948923356e-07,
"loss": 0.9436,
"step": 260
},
{
"epoch": 0.9648798521256932,
"grad_norm": 1.2568949460983276,
"learning_rate": 3.9113373325698754e-07,
"loss": 0.9507,
"step": 261
},
{
"epoch": 0.9685767097966729,
"grad_norm": 1.251380443572998,
"learning_rate": 3.168969364128527e-07,
"loss": 0.9798,
"step": 262
},
{
"epoch": 0.9722735674676525,
"grad_norm": 1.3915915489196777,
"learning_rate": 2.5044328858576106e-07,
"loss": 1.047,
"step": 263
},
{
"epoch": 0.9759704251386322,
"grad_norm": 1.353722333908081,
"learning_rate": 1.917832001287645e-07,
"loss": 0.8804,
"step": 264
},
{
"epoch": 0.9796672828096118,
"grad_norm": 1.3146427869796753,
"learning_rate": 1.4092586048820578e-07,
"loss": 0.9416,
"step": 265
},
{
"epoch": 0.9833641404805915,
"grad_norm": 1.4524424076080322,
"learning_rate": 9.787923676414235e-08,
"loss": 1.0211,
"step": 266
},
{
"epoch": 0.9870609981515711,
"grad_norm": 1.499550461769104,
"learning_rate": 6.265007246223364e-08,
"loss": 0.9393,
"step": 267
},
{
"epoch": 0.9907578558225508,
"grad_norm": 1.4518331289291382,
"learning_rate": 3.524388643736387e-08,
"loss": 0.8188,
"step": 268
},
{
"epoch": 0.9944547134935305,
"grad_norm": 1.5743730068206787,
"learning_rate": 1.566497202904471e-08,
"loss": 0.8282,
"step": 269
},
{
"epoch": 0.9981515711645101,
"grad_norm": 1.7006173133850098,
"learning_rate": 3.91639638886998e-09,
"loss": 0.7826,
"step": 270
},
{
"epoch": 1.0027726432532347,
"grad_norm": 1.2868958711624146,
"learning_rate": 0.0,
"loss": 0.9401,
"step": 271
}
],
"logging_steps": 1,
"max_steps": 271,
"num_input_tokens_seen": 0,
"num_train_epochs": 2,
"save_steps": 100,
"stateful_callbacks": {
"EarlyStoppingCallback": {
"args": {
"early_stopping_patience": 5,
"early_stopping_threshold": 0.0
},
"attributes": {
"early_stopping_patience_counter": 0
}
},
"TrainerControl": {
"args": {
"should_epoch_stop": false,
"should_evaluate": false,
"should_log": false,
"should_save": true,
"should_training_stop": true
},
"attributes": {}
}
},
"total_flos": 7.756009208713052e+17,
"train_batch_size": 8,
"trial_name": null,
"trial_params": null
}