fengyao1909's picture
Upload folder using huggingface_hub
60d613d verified
{
"best_global_step": null,
"best_metric": null,
"best_model_checkpoint": null,
"epoch": 1.0,
"eval_steps": 500,
"global_step": 471,
"is_hyper_param_search": false,
"is_local_process_zero": true,
"is_world_process_zero": true,
"log_history": [
{
"epoch": 0.0021231422505307855,
"grad_norm": 6.233692311689662,
"learning_rate": 0.0,
"loss": 1.3677,
"step": 1
},
{
"epoch": 0.004246284501061571,
"grad_norm": 6.0360948097932825,
"learning_rate": 4.1666666666666667e-07,
"loss": 1.4092,
"step": 2
},
{
"epoch": 0.006369426751592357,
"grad_norm": 5.936790765452034,
"learning_rate": 8.333333333333333e-07,
"loss": 1.4005,
"step": 3
},
{
"epoch": 0.008492569002123142,
"grad_norm": 6.107653491301953,
"learning_rate": 1.25e-06,
"loss": 1.4024,
"step": 4
},
{
"epoch": 0.010615711252653927,
"grad_norm": 5.6416859423179195,
"learning_rate": 1.6666666666666667e-06,
"loss": 1.3824,
"step": 5
},
{
"epoch": 0.012738853503184714,
"grad_norm": 5.386232950715365,
"learning_rate": 2.0833333333333334e-06,
"loss": 1.4027,
"step": 6
},
{
"epoch": 0.014861995753715499,
"grad_norm": 5.260462158475368,
"learning_rate": 2.5e-06,
"loss": 1.2939,
"step": 7
},
{
"epoch": 0.016985138004246284,
"grad_norm": 4.361483321471081,
"learning_rate": 2.916666666666667e-06,
"loss": 1.3433,
"step": 8
},
{
"epoch": 0.01910828025477707,
"grad_norm": 4.258300193735322,
"learning_rate": 3.3333333333333333e-06,
"loss": 1.3352,
"step": 9
},
{
"epoch": 0.021231422505307854,
"grad_norm": 4.06506696391315,
"learning_rate": 3.7500000000000005e-06,
"loss": 1.3413,
"step": 10
},
{
"epoch": 0.02335456475583864,
"grad_norm": 2.9631037995819964,
"learning_rate": 4.166666666666667e-06,
"loss": 1.2789,
"step": 11
},
{
"epoch": 0.025477707006369428,
"grad_norm": 2.967044465905521,
"learning_rate": 4.583333333333333e-06,
"loss": 1.2588,
"step": 12
},
{
"epoch": 0.027600849256900213,
"grad_norm": 2.9160761923797542,
"learning_rate": 5e-06,
"loss": 1.3127,
"step": 13
},
{
"epoch": 0.029723991507430998,
"grad_norm": 2.3291890524964733,
"learning_rate": 5.416666666666667e-06,
"loss": 1.2187,
"step": 14
},
{
"epoch": 0.03184713375796178,
"grad_norm": 2.7317141874955646,
"learning_rate": 5.833333333333334e-06,
"loss": 1.2349,
"step": 15
},
{
"epoch": 0.03397027600849257,
"grad_norm": 2.6034871595090623,
"learning_rate": 6.25e-06,
"loss": 1.1956,
"step": 16
},
{
"epoch": 0.036093418259023353,
"grad_norm": 5.248341696418897,
"learning_rate": 6.666666666666667e-06,
"loss": 1.1591,
"step": 17
},
{
"epoch": 0.03821656050955414,
"grad_norm": 2.7756655727491086,
"learning_rate": 7.083333333333335e-06,
"loss": 1.1315,
"step": 18
},
{
"epoch": 0.040339702760084924,
"grad_norm": 1.8984519497623262,
"learning_rate": 7.500000000000001e-06,
"loss": 1.1172,
"step": 19
},
{
"epoch": 0.04246284501061571,
"grad_norm": 2.0903589430903784,
"learning_rate": 7.916666666666667e-06,
"loss": 1.0286,
"step": 20
},
{
"epoch": 0.044585987261146494,
"grad_norm": 2.3441882371051936,
"learning_rate": 8.333333333333334e-06,
"loss": 1.1418,
"step": 21
},
{
"epoch": 0.04670912951167728,
"grad_norm": 1.8672005663239521,
"learning_rate": 8.750000000000001e-06,
"loss": 1.1111,
"step": 22
},
{
"epoch": 0.04883227176220807,
"grad_norm": 2.4191480386142583,
"learning_rate": 9.166666666666666e-06,
"loss": 1.0186,
"step": 23
},
{
"epoch": 0.050955414012738856,
"grad_norm": 1.7753789195215919,
"learning_rate": 9.583333333333335e-06,
"loss": 1.0289,
"step": 24
},
{
"epoch": 0.05307855626326964,
"grad_norm": 1.2273993769357439,
"learning_rate": 1e-05,
"loss": 1.0566,
"step": 25
},
{
"epoch": 0.055201698513800426,
"grad_norm": 1.0344250013340117,
"learning_rate": 9.999876512522269e-06,
"loss": 1.0465,
"step": 26
},
{
"epoch": 0.05732484076433121,
"grad_norm": 1.1494263728238339,
"learning_rate": 9.999506056188736e-06,
"loss": 1.087,
"step": 27
},
{
"epoch": 0.059447983014861996,
"grad_norm": 1.1301602633871957,
"learning_rate": 9.99888864929809e-06,
"loss": 1.0851,
"step": 28
},
{
"epoch": 0.06157112526539278,
"grad_norm": 1.1626122948756041,
"learning_rate": 9.99802432234714e-06,
"loss": 1.0547,
"step": 29
},
{
"epoch": 0.06369426751592357,
"grad_norm": 1.0163867469393915,
"learning_rate": 9.996913118029306e-06,
"loss": 1.0567,
"step": 30
},
{
"epoch": 0.06581740976645435,
"grad_norm": 1.1007621470115974,
"learning_rate": 9.995555091232516e-06,
"loss": 1.0389,
"step": 31
},
{
"epoch": 0.06794055201698514,
"grad_norm": 1.063026640935385,
"learning_rate": 9.99395030903649e-06,
"loss": 1.0391,
"step": 32
},
{
"epoch": 0.07006369426751592,
"grad_norm": 0.8808959073050446,
"learning_rate": 9.992098850709434e-06,
"loss": 0.9964,
"step": 33
},
{
"epoch": 0.07218683651804671,
"grad_norm": 0.892414136911897,
"learning_rate": 9.990000807704114e-06,
"loss": 0.9936,
"step": 34
},
{
"epoch": 0.07430997876857749,
"grad_norm": 1.0929906845130524,
"learning_rate": 9.987656283653344e-06,
"loss": 0.9981,
"step": 35
},
{
"epoch": 0.07643312101910828,
"grad_norm": 1.470611000535341,
"learning_rate": 9.985065394364869e-06,
"loss": 1.0313,
"step": 36
},
{
"epoch": 0.07855626326963906,
"grad_norm": 0.9257456902237308,
"learning_rate": 9.982228267815644e-06,
"loss": 0.9702,
"step": 37
},
{
"epoch": 0.08067940552016985,
"grad_norm": 0.9953711624405185,
"learning_rate": 9.979145044145506e-06,
"loss": 0.9635,
"step": 38
},
{
"epoch": 0.08280254777070063,
"grad_norm": 1.0617273624322012,
"learning_rate": 9.975815875650265e-06,
"loss": 0.9483,
"step": 39
},
{
"epoch": 0.08492569002123142,
"grad_norm": 1.0473143836003516,
"learning_rate": 9.972240926774167e-06,
"loss": 0.9986,
"step": 40
},
{
"epoch": 0.0870488322717622,
"grad_norm": 0.8457919738818875,
"learning_rate": 9.968420374101782e-06,
"loss": 0.9587,
"step": 41
},
{
"epoch": 0.08917197452229299,
"grad_norm": 1.3767525767731605,
"learning_rate": 9.964354406349272e-06,
"loss": 0.9625,
"step": 42
},
{
"epoch": 0.09129511677282377,
"grad_norm": 0.8914617384358068,
"learning_rate": 9.960043224355081e-06,
"loss": 0.9615,
"step": 43
},
{
"epoch": 0.09341825902335456,
"grad_norm": 0.7787343722669178,
"learning_rate": 9.955487041070003e-06,
"loss": 1.0288,
"step": 44
},
{
"epoch": 0.09554140127388536,
"grad_norm": 0.8359830251689131,
"learning_rate": 9.95068608154667e-06,
"loss": 1.0315,
"step": 45
},
{
"epoch": 0.09766454352441614,
"grad_norm": 0.923320510217848,
"learning_rate": 9.945640582928438e-06,
"loss": 1.0166,
"step": 46
},
{
"epoch": 0.09978768577494693,
"grad_norm": 0.8371546373170314,
"learning_rate": 9.940350794437663e-06,
"loss": 0.9798,
"step": 47
},
{
"epoch": 0.10191082802547771,
"grad_norm": 0.9837185052909639,
"learning_rate": 9.934816977363404e-06,
"loss": 0.9823,
"step": 48
},
{
"epoch": 0.1040339702760085,
"grad_norm": 0.7240963389932448,
"learning_rate": 9.929039405048502e-06,
"loss": 0.9676,
"step": 49
},
{
"epoch": 0.10615711252653928,
"grad_norm": 0.8361769763648854,
"learning_rate": 9.923018362876093e-06,
"loss": 1.0051,
"step": 50
},
{
"epoch": 0.10828025477707007,
"grad_norm": 0.7854708795752825,
"learning_rate": 9.916754148255501e-06,
"loss": 0.9925,
"step": 51
},
{
"epoch": 0.11040339702760085,
"grad_norm": 0.7406975656834304,
"learning_rate": 9.91024707060755e-06,
"loss": 1.0003,
"step": 52
},
{
"epoch": 0.11252653927813164,
"grad_norm": 0.7421357632855965,
"learning_rate": 9.903497451349286e-06,
"loss": 0.9285,
"step": 53
},
{
"epoch": 0.11464968152866242,
"grad_norm": 0.7102816852549364,
"learning_rate": 9.896505623878088e-06,
"loss": 0.9876,
"step": 54
},
{
"epoch": 0.11677282377919321,
"grad_norm": 0.7777264555968619,
"learning_rate": 9.889271933555214e-06,
"loss": 1.0259,
"step": 55
},
{
"epoch": 0.11889596602972399,
"grad_norm": 0.7349378770101916,
"learning_rate": 9.881796737688732e-06,
"loss": 1.0533,
"step": 56
},
{
"epoch": 0.12101910828025478,
"grad_norm": 0.6987463564433962,
"learning_rate": 9.874080405515874e-06,
"loss": 0.9906,
"step": 57
},
{
"epoch": 0.12314225053078556,
"grad_norm": 0.8065087328611837,
"learning_rate": 9.866123318184803e-06,
"loss": 0.992,
"step": 58
},
{
"epoch": 0.12526539278131635,
"grad_norm": 0.8411662078974967,
"learning_rate": 9.857925868735774e-06,
"loss": 0.9868,
"step": 59
},
{
"epoch": 0.12738853503184713,
"grad_norm": 0.7703519212712204,
"learning_rate": 9.84948846208173e-06,
"loss": 0.9797,
"step": 60
},
{
"epoch": 0.12951167728237792,
"grad_norm": 0.8160023784995357,
"learning_rate": 9.840811514988294e-06,
"loss": 0.9056,
"step": 61
},
{
"epoch": 0.1316348195329087,
"grad_norm": 0.7403495030999158,
"learning_rate": 9.831895456053197e-06,
"loss": 0.968,
"step": 62
},
{
"epoch": 0.1337579617834395,
"grad_norm": 0.8368650004711656,
"learning_rate": 9.822740725685087e-06,
"loss": 0.991,
"step": 63
},
{
"epoch": 0.13588110403397027,
"grad_norm": 0.7631095210629854,
"learning_rate": 9.81334777608179e-06,
"loss": 1.0121,
"step": 64
},
{
"epoch": 0.13800424628450106,
"grad_norm": 0.7855099822305834,
"learning_rate": 9.803717071207965e-06,
"loss": 1.0218,
"step": 65
},
{
"epoch": 0.14012738853503184,
"grad_norm": 0.7211799308460066,
"learning_rate": 9.793849086772198e-06,
"loss": 0.9605,
"step": 66
},
{
"epoch": 0.14225053078556263,
"grad_norm": 0.7158533314697694,
"learning_rate": 9.783744310203492e-06,
"loss": 0.9648,
"step": 67
},
{
"epoch": 0.14437367303609341,
"grad_norm": 0.8116378282708514,
"learning_rate": 9.77340324062719e-06,
"loss": 0.9627,
"step": 68
},
{
"epoch": 0.1464968152866242,
"grad_norm": 0.6729942687368299,
"learning_rate": 9.76282638884034e-06,
"loss": 0.9398,
"step": 69
},
{
"epoch": 0.14861995753715498,
"grad_norm": 0.7925561163372088,
"learning_rate": 9.752014277286433e-06,
"loss": 1.0496,
"step": 70
},
{
"epoch": 0.15074309978768577,
"grad_norm": 0.8168812732930426,
"learning_rate": 9.740967440029628e-06,
"loss": 0.9914,
"step": 71
},
{
"epoch": 0.15286624203821655,
"grad_norm": 0.6727362552724726,
"learning_rate": 9.729686422728353e-06,
"loss": 1.0114,
"step": 72
},
{
"epoch": 0.15498938428874734,
"grad_norm": 0.7119941049177072,
"learning_rate": 9.718171782608355e-06,
"loss": 0.959,
"step": 73
},
{
"epoch": 0.15711252653927812,
"grad_norm": 0.8057011161059664,
"learning_rate": 9.706424088435183e-06,
"loss": 0.9365,
"step": 74
},
{
"epoch": 0.1592356687898089,
"grad_norm": 0.6871742220004184,
"learning_rate": 9.694443920486083e-06,
"loss": 0.9802,
"step": 75
},
{
"epoch": 0.1613588110403397,
"grad_norm": 0.7191162102363715,
"learning_rate": 9.682231870521347e-06,
"loss": 0.8989,
"step": 76
},
{
"epoch": 0.16348195329087048,
"grad_norm": 1.8088460965549538,
"learning_rate": 9.669788541755072e-06,
"loss": 0.9524,
"step": 77
},
{
"epoch": 0.16560509554140126,
"grad_norm": 0.7467517862627305,
"learning_rate": 9.657114548825372e-06,
"loss": 0.969,
"step": 78
},
{
"epoch": 0.16772823779193205,
"grad_norm": 0.6507183031203463,
"learning_rate": 9.644210517764014e-06,
"loss": 0.9671,
"step": 79
},
{
"epoch": 0.16985138004246284,
"grad_norm": 0.7623621079641432,
"learning_rate": 9.631077085965501e-06,
"loss": 1.0642,
"step": 80
},
{
"epoch": 0.17197452229299362,
"grad_norm": 0.7822356119042599,
"learning_rate": 9.617714902155576e-06,
"loss": 0.9911,
"step": 81
},
{
"epoch": 0.1740976645435244,
"grad_norm": 0.7827100099968081,
"learning_rate": 9.60412462635919e-06,
"loss": 1.0467,
"step": 82
},
{
"epoch": 0.1762208067940552,
"grad_norm": 0.7379573454971106,
"learning_rate": 9.590306929867896e-06,
"loss": 0.9685,
"step": 83
},
{
"epoch": 0.17834394904458598,
"grad_norm": 0.659254589941974,
"learning_rate": 9.576262495206689e-06,
"loss": 0.9991,
"step": 84
},
{
"epoch": 0.18046709129511676,
"grad_norm": 0.6623198264855394,
"learning_rate": 9.561992016100293e-06,
"loss": 0.9784,
"step": 85
},
{
"epoch": 0.18259023354564755,
"grad_norm": 0.6847143494642806,
"learning_rate": 9.547496197438896e-06,
"loss": 1.0074,
"step": 86
},
{
"epoch": 0.18471337579617833,
"grad_norm": 0.7114741953342526,
"learning_rate": 9.532775755243334e-06,
"loss": 1.0078,
"step": 87
},
{
"epoch": 0.18683651804670912,
"grad_norm": 0.7322798670773077,
"learning_rate": 9.517831416629717e-06,
"loss": 0.9434,
"step": 88
},
{
"epoch": 0.18895966029723993,
"grad_norm": 0.7355300151781767,
"learning_rate": 9.502663919773516e-06,
"loss": 0.9226,
"step": 89
},
{
"epoch": 0.1910828025477707,
"grad_norm": 0.6728390445271868,
"learning_rate": 9.487274013873104e-06,
"loss": 0.9917,
"step": 90
},
{
"epoch": 0.1932059447983015,
"grad_norm": 0.7065250007964868,
"learning_rate": 9.471662459112747e-06,
"loss": 0.9152,
"step": 91
},
{
"epoch": 0.19532908704883228,
"grad_norm": 0.7619468736243152,
"learning_rate": 9.455830026625053e-06,
"loss": 0.9537,
"step": 92
},
{
"epoch": 0.19745222929936307,
"grad_norm": 0.7903432346339875,
"learning_rate": 9.439777498452883e-06,
"loss": 0.9674,
"step": 93
},
{
"epoch": 0.19957537154989385,
"grad_norm": 0.7446836330388329,
"learning_rate": 9.423505667510724e-06,
"loss": 0.9492,
"step": 94
},
{
"epoch": 0.20169851380042464,
"grad_norm": 0.6684138178927302,
"learning_rate": 9.40701533754552e-06,
"loss": 1.0012,
"step": 95
},
{
"epoch": 0.20382165605095542,
"grad_norm": 0.7232014896126353,
"learning_rate": 9.390307323096972e-06,
"loss": 0.9491,
"step": 96
},
{
"epoch": 0.2059447983014862,
"grad_norm": 0.7677419037215932,
"learning_rate": 9.373382449457305e-06,
"loss": 0.9484,
"step": 97
},
{
"epoch": 0.208067940552017,
"grad_norm": 0.7553292492265486,
"learning_rate": 9.356241552630503e-06,
"loss": 0.9329,
"step": 98
},
{
"epoch": 0.21019108280254778,
"grad_norm": 0.731417101445953,
"learning_rate": 9.338885479291012e-06,
"loss": 1.002,
"step": 99
},
{
"epoch": 0.21231422505307856,
"grad_norm": 0.7218309778871758,
"learning_rate": 9.321315086741916e-06,
"loss": 1.0027,
"step": 100
},
{
"epoch": 0.21443736730360935,
"grad_norm": 0.7064792931836008,
"learning_rate": 9.303531242872606e-06,
"loss": 1.0383,
"step": 101
},
{
"epoch": 0.21656050955414013,
"grad_norm": 0.7789379913410127,
"learning_rate": 9.285534826115884e-06,
"loss": 0.9571,
"step": 102
},
{
"epoch": 0.21868365180467092,
"grad_norm": 0.7300691645038767,
"learning_rate": 9.2673267254046e-06,
"loss": 0.9049,
"step": 103
},
{
"epoch": 0.2208067940552017,
"grad_norm": 0.6966548166340003,
"learning_rate": 9.248907840127726e-06,
"loss": 0.9682,
"step": 104
},
{
"epoch": 0.2229299363057325,
"grad_norm": 0.6855280784617488,
"learning_rate": 9.230279080085933e-06,
"loss": 0.9296,
"step": 105
},
{
"epoch": 0.22505307855626328,
"grad_norm": 0.7528839990687416,
"learning_rate": 9.211441365446661e-06,
"loss": 0.9324,
"step": 106
},
{
"epoch": 0.22717622080679406,
"grad_norm": 0.7238165951427732,
"learning_rate": 9.192395626698656e-06,
"loss": 0.9746,
"step": 107
},
{
"epoch": 0.22929936305732485,
"grad_norm": 0.7323285515771358,
"learning_rate": 9.173142804606012e-06,
"loss": 0.9424,
"step": 108
},
{
"epoch": 0.23142250530785563,
"grad_norm": 0.7312649696483972,
"learning_rate": 9.153683850161706e-06,
"loss": 0.9893,
"step": 109
},
{
"epoch": 0.23354564755838642,
"grad_norm": 0.6801476689460046,
"learning_rate": 9.13401972454062e-06,
"loss": 1.0064,
"step": 110
},
{
"epoch": 0.2356687898089172,
"grad_norm": 0.6569930110967293,
"learning_rate": 9.114151399052064e-06,
"loss": 0.9498,
"step": 111
},
{
"epoch": 0.23779193205944799,
"grad_norm": 0.6834181446007654,
"learning_rate": 9.094079855091797e-06,
"loss": 0.9686,
"step": 112
},
{
"epoch": 0.23991507430997877,
"grad_norm": 0.7531028590215507,
"learning_rate": 9.073806084093556e-06,
"loss": 0.941,
"step": 113
},
{
"epoch": 0.24203821656050956,
"grad_norm": 0.7710859996596717,
"learning_rate": 9.053331087480075e-06,
"loss": 0.947,
"step": 114
},
{
"epoch": 0.24416135881104034,
"grad_norm": 0.7181827447543798,
"learning_rate": 9.032655876613636e-06,
"loss": 0.9993,
"step": 115
},
{
"epoch": 0.24628450106157113,
"grad_norm": 0.8046003364408971,
"learning_rate": 9.01178147274609e-06,
"loss": 0.9677,
"step": 116
},
{
"epoch": 0.2484076433121019,
"grad_norm": 0.7504304096891474,
"learning_rate": 8.990708906968431e-06,
"loss": 0.9713,
"step": 117
},
{
"epoch": 0.2505307855626327,
"grad_norm": 0.7331303659304669,
"learning_rate": 8.969439220159861e-06,
"loss": 0.9716,
"step": 118
},
{
"epoch": 0.2526539278131635,
"grad_norm": 0.784369916505515,
"learning_rate": 8.947973462936366e-06,
"loss": 1.0014,
"step": 119
},
{
"epoch": 0.25477707006369427,
"grad_norm": 0.7610760319401311,
"learning_rate": 8.926312695598837e-06,
"loss": 1.068,
"step": 120
},
{
"epoch": 0.25690021231422505,
"grad_norm": 0.7919075855161128,
"learning_rate": 8.904457988080682e-06,
"loss": 0.9523,
"step": 121
},
{
"epoch": 0.25902335456475584,
"grad_norm": 0.7293682697898348,
"learning_rate": 8.882410419894983e-06,
"loss": 0.9536,
"step": 122
},
{
"epoch": 0.2611464968152866,
"grad_norm": 0.6770519934059125,
"learning_rate": 8.860171080081174e-06,
"loss": 1.0067,
"step": 123
},
{
"epoch": 0.2632696390658174,
"grad_norm": 0.7668379033306872,
"learning_rate": 8.837741067151251e-06,
"loss": 0.973,
"step": 124
},
{
"epoch": 0.2653927813163482,
"grad_norm": 0.7139714751876862,
"learning_rate": 8.8151214890355e-06,
"loss": 0.9314,
"step": 125
},
{
"epoch": 0.267515923566879,
"grad_norm": 0.6814693095633286,
"learning_rate": 8.792313463027777e-06,
"loss": 1.0245,
"step": 126
},
{
"epoch": 0.26963906581740976,
"grad_norm": 0.6924465193860749,
"learning_rate": 8.76931811573033e-06,
"loss": 0.9371,
"step": 127
},
{
"epoch": 0.27176220806794055,
"grad_norm": 0.642073845716499,
"learning_rate": 8.74613658299813e-06,
"loss": 0.8808,
"step": 128
},
{
"epoch": 0.27388535031847133,
"grad_norm": 0.7174917070331621,
"learning_rate": 8.72277000988278e-06,
"loss": 0.9637,
"step": 129
},
{
"epoch": 0.2760084925690021,
"grad_norm": 0.6982926048873913,
"learning_rate": 8.699219550575954e-06,
"loss": 0.9757,
"step": 130
},
{
"epoch": 0.2781316348195329,
"grad_norm": 0.6567002214778425,
"learning_rate": 8.675486368352376e-06,
"loss": 0.9503,
"step": 131
},
{
"epoch": 0.2802547770700637,
"grad_norm": 0.6794958936952806,
"learning_rate": 8.651571635512372e-06,
"loss": 0.9736,
"step": 132
},
{
"epoch": 0.2823779193205945,
"grad_norm": 0.6646146029012326,
"learning_rate": 8.627476533323957e-06,
"loss": 0.9239,
"step": 133
},
{
"epoch": 0.28450106157112526,
"grad_norm": 0.6632510333242262,
"learning_rate": 8.603202251964492e-06,
"loss": 0.9977,
"step": 134
},
{
"epoch": 0.28662420382165604,
"grad_norm": 0.7248035608452511,
"learning_rate": 8.578749990461884e-06,
"loss": 1.0044,
"step": 135
},
{
"epoch": 0.28874734607218683,
"grad_norm": 0.7145181226027357,
"learning_rate": 8.554120956635375e-06,
"loss": 0.9909,
"step": 136
},
{
"epoch": 0.2908704883227176,
"grad_norm": 0.6810283606365963,
"learning_rate": 8.52931636703587e-06,
"loss": 0.9278,
"step": 137
},
{
"epoch": 0.2929936305732484,
"grad_norm": 0.6390994316344762,
"learning_rate": 8.504337446885854e-06,
"loss": 0.9401,
"step": 138
},
{
"epoch": 0.2951167728237792,
"grad_norm": 0.68090912832389,
"learning_rate": 8.47918543001886e-06,
"loss": 0.9274,
"step": 139
},
{
"epoch": 0.29723991507430997,
"grad_norm": 0.7071789703121685,
"learning_rate": 8.453861558818542e-06,
"loss": 0.9887,
"step": 140
},
{
"epoch": 0.29936305732484075,
"grad_norm": 0.7352751295567939,
"learning_rate": 8.428367084157292e-06,
"loss": 0.9288,
"step": 141
},
{
"epoch": 0.30148619957537154,
"grad_norm": 0.6595005202531524,
"learning_rate": 8.402703265334455e-06,
"loss": 1.0007,
"step": 142
},
{
"epoch": 0.3036093418259023,
"grad_norm": 0.6735904870127594,
"learning_rate": 8.376871370014139e-06,
"loss": 1.0216,
"step": 143
},
{
"epoch": 0.3057324840764331,
"grad_norm": 0.6975929380086214,
"learning_rate": 8.350872674162578e-06,
"loss": 0.8988,
"step": 144
},
{
"epoch": 0.3078556263269639,
"grad_norm": 0.7224054080414788,
"learning_rate": 8.324708461985124e-06,
"loss": 0.9669,
"step": 145
},
{
"epoch": 0.3099787685774947,
"grad_norm": 0.7281952438832896,
"learning_rate": 8.298380025862805e-06,
"loss": 0.9126,
"step": 146
},
{
"epoch": 0.31210191082802546,
"grad_norm": 0.7548928188615618,
"learning_rate": 8.271888666288488e-06,
"loss": 0.9399,
"step": 147
},
{
"epoch": 0.31422505307855625,
"grad_norm": 0.6597939741370626,
"learning_rate": 8.245235691802644e-06,
"loss": 1.0276,
"step": 148
},
{
"epoch": 0.31634819532908703,
"grad_norm": 0.7311599924151362,
"learning_rate": 8.218422418928709e-06,
"loss": 1.0213,
"step": 149
},
{
"epoch": 0.3184713375796178,
"grad_norm": 0.6841346093353339,
"learning_rate": 8.191450172108058e-06,
"loss": 0.9467,
"step": 150
},
{
"epoch": 0.3205944798301486,
"grad_norm": 0.7131209569882775,
"learning_rate": 8.164320283634585e-06,
"loss": 0.9844,
"step": 151
},
{
"epoch": 0.3227176220806794,
"grad_norm": 0.6987651677449753,
"learning_rate": 8.137034093588885e-06,
"loss": 0.9849,
"step": 152
},
{
"epoch": 0.3248407643312102,
"grad_norm": 0.705165665042376,
"learning_rate": 8.109592949772076e-06,
"loss": 1.0129,
"step": 153
},
{
"epoch": 0.32696390658174096,
"grad_norm": 0.7169171021772854,
"learning_rate": 8.081998207639212e-06,
"loss": 1.0058,
"step": 154
},
{
"epoch": 0.32908704883227174,
"grad_norm": 0.6104059708044309,
"learning_rate": 8.054251230232333e-06,
"loss": 0.9988,
"step": 155
},
{
"epoch": 0.33121019108280253,
"grad_norm": 0.6934560008115752,
"learning_rate": 8.026353388113142e-06,
"loss": 0.979,
"step": 156
},
{
"epoch": 0.3333333333333333,
"grad_norm": 0.6725043581752848,
"learning_rate": 7.998306059295302e-06,
"loss": 1.0304,
"step": 157
},
{
"epoch": 0.3354564755838641,
"grad_norm": 0.6223984704621439,
"learning_rate": 7.97011062917637e-06,
"loss": 0.9363,
"step": 158
},
{
"epoch": 0.3375796178343949,
"grad_norm": 0.706091085660486,
"learning_rate": 7.941768490469368e-06,
"loss": 1.0168,
"step": 159
},
{
"epoch": 0.33970276008492567,
"grad_norm": 0.7327606621828687,
"learning_rate": 7.913281043133978e-06,
"loss": 0.9492,
"step": 160
},
{
"epoch": 0.34182590233545646,
"grad_norm": 0.6772432140445678,
"learning_rate": 7.884649694307413e-06,
"loss": 0.9167,
"step": 161
},
{
"epoch": 0.34394904458598724,
"grad_norm": 0.6801663140277004,
"learning_rate": 7.855875858234894e-06,
"loss": 0.9572,
"step": 162
},
{
"epoch": 0.346072186836518,
"grad_norm": 0.6863907703405828,
"learning_rate": 7.826960956199796e-06,
"loss": 0.9643,
"step": 163
},
{
"epoch": 0.3481953290870488,
"grad_norm": 0.6932142414615767,
"learning_rate": 7.797906416453445e-06,
"loss": 1.0056,
"step": 164
},
{
"epoch": 0.3503184713375796,
"grad_norm": 0.6590692919655603,
"learning_rate": 7.768713674144578e-06,
"loss": 1.0103,
"step": 165
},
{
"epoch": 0.3524416135881104,
"grad_norm": 0.7043688709406302,
"learning_rate": 7.739384171248436e-06,
"loss": 0.9201,
"step": 166
},
{
"epoch": 0.35456475583864117,
"grad_norm": 0.7342438164454753,
"learning_rate": 7.709919356495555e-06,
"loss": 0.9547,
"step": 167
},
{
"epoch": 0.35668789808917195,
"grad_norm": 0.6051801915416819,
"learning_rate": 7.6803206853002e-06,
"loss": 1.0027,
"step": 168
},
{
"epoch": 0.35881104033970274,
"grad_norm": 0.7036658747712503,
"learning_rate": 7.650589619688468e-06,
"loss": 0.9752,
"step": 169
},
{
"epoch": 0.3609341825902335,
"grad_norm": 0.6579507052321748,
"learning_rate": 7.620727628226081e-06,
"loss": 0.9079,
"step": 170
},
{
"epoch": 0.3630573248407643,
"grad_norm": 0.6196419289472935,
"learning_rate": 7.590736185945843e-06,
"loss": 0.9451,
"step": 171
},
{
"epoch": 0.3651804670912951,
"grad_norm": 0.7001997909038171,
"learning_rate": 7.560616774274775e-06,
"loss": 0.9347,
"step": 172
},
{
"epoch": 0.3673036093418259,
"grad_norm": 0.6351437976372771,
"learning_rate": 7.5303708809609514e-06,
"loss": 0.9299,
"step": 173
},
{
"epoch": 0.36942675159235666,
"grad_norm": 0.664760060599698,
"learning_rate": 7.500000000000001e-06,
"loss": 0.9716,
"step": 174
},
{
"epoch": 0.37154989384288745,
"grad_norm": 0.6524778235549052,
"learning_rate": 7.469505631561318e-06,
"loss": 0.9444,
"step": 175
},
{
"epoch": 0.37367303609341823,
"grad_norm": 0.7096570464027714,
"learning_rate": 7.4388892819139625e-06,
"loss": 0.981,
"step": 176
},
{
"epoch": 0.37579617834394907,
"grad_norm": 0.701946788192956,
"learning_rate": 7.408152463352249e-06,
"loss": 0.9394,
"step": 177
},
{
"epoch": 0.37791932059447986,
"grad_norm": 0.7062701533777472,
"learning_rate": 7.3772966941210585e-06,
"loss": 0.9795,
"step": 178
},
{
"epoch": 0.38004246284501064,
"grad_norm": 0.7371591165235177,
"learning_rate": 7.346323498340839e-06,
"loss": 0.9207,
"step": 179
},
{
"epoch": 0.3821656050955414,
"grad_norm": 0.732545654535989,
"learning_rate": 7.3152344059323165e-06,
"loss": 1.0082,
"step": 180
},
{
"epoch": 0.3842887473460722,
"grad_norm": 0.7128716261827475,
"learning_rate": 7.284030952540937e-06,
"loss": 0.9764,
"step": 181
},
{
"epoch": 0.386411889596603,
"grad_norm": 0.6497530527994643,
"learning_rate": 7.252714679461001e-06,
"loss": 0.9936,
"step": 182
},
{
"epoch": 0.3885350318471338,
"grad_norm": 0.7225290801130503,
"learning_rate": 7.221287133559537e-06,
"loss": 0.9304,
"step": 183
},
{
"epoch": 0.39065817409766457,
"grad_norm": 0.6765347357945422,
"learning_rate": 7.189749867199899e-06,
"loss": 0.9094,
"step": 184
},
{
"epoch": 0.39278131634819535,
"grad_norm": 0.7174562419985759,
"learning_rate": 7.1581044381650735e-06,
"loss": 0.9801,
"step": 185
},
{
"epoch": 0.39490445859872614,
"grad_norm": 0.7472567362872526,
"learning_rate": 7.126352409580749e-06,
"loss": 1.0142,
"step": 186
},
{
"epoch": 0.3970276008492569,
"grad_norm": 0.6407928497506329,
"learning_rate": 7.094495349838093e-06,
"loss": 1.0047,
"step": 187
},
{
"epoch": 0.3991507430997877,
"grad_norm": 0.7053895517849673,
"learning_rate": 7.062534832516288e-06,
"loss": 1.0126,
"step": 188
},
{
"epoch": 0.4012738853503185,
"grad_norm": 0.7061208624812129,
"learning_rate": 7.0304724363048025e-06,
"loss": 0.9254,
"step": 189
},
{
"epoch": 0.4033970276008493,
"grad_norm": 0.6892392457356276,
"learning_rate": 6.998309744925411e-06,
"loss": 0.8733,
"step": 190
},
{
"epoch": 0.40552016985138006,
"grad_norm": 0.6824843754838162,
"learning_rate": 6.9660483470539704e-06,
"loss": 0.9369,
"step": 191
},
{
"epoch": 0.40764331210191085,
"grad_norm": 0.7207059177507389,
"learning_rate": 6.933689836241939e-06,
"loss": 0.9403,
"step": 192
},
{
"epoch": 0.40976645435244163,
"grad_norm": 0.651073565486602,
"learning_rate": 6.901235810837668e-06,
"loss": 0.9733,
"step": 193
},
{
"epoch": 0.4118895966029724,
"grad_norm": 0.6694293166716937,
"learning_rate": 6.868687873907458e-06,
"loss": 0.9576,
"step": 194
},
{
"epoch": 0.4140127388535032,
"grad_norm": 0.6526111024271177,
"learning_rate": 6.836047633156361e-06,
"loss": 0.9173,
"step": 195
},
{
"epoch": 0.416135881104034,
"grad_norm": 0.670504287795627,
"learning_rate": 6.8033167008487784e-06,
"loss": 0.9082,
"step": 196
},
{
"epoch": 0.4182590233545648,
"grad_norm": 0.6649654248466491,
"learning_rate": 6.77049669372882e-06,
"loss": 0.9684,
"step": 197
},
{
"epoch": 0.42038216560509556,
"grad_norm": 0.6971039334316886,
"learning_rate": 6.737589232940445e-06,
"loss": 0.998,
"step": 198
},
{
"epoch": 0.42250530785562634,
"grad_norm": 0.6625856942226671,
"learning_rate": 6.704595943947385e-06,
"loss": 0.9324,
"step": 199
},
{
"epoch": 0.42462845010615713,
"grad_norm": 0.6467812509098547,
"learning_rate": 6.671518456452859e-06,
"loss": 0.9503,
"step": 200
},
{
"epoch": 0.4267515923566879,
"grad_norm": 0.6751623960134163,
"learning_rate": 6.638358404319064e-06,
"loss": 0.969,
"step": 201
},
{
"epoch": 0.4288747346072187,
"grad_norm": 0.6538986247088467,
"learning_rate": 6.605117425486483e-06,
"loss": 0.9637,
"step": 202
},
{
"epoch": 0.4309978768577495,
"grad_norm": 0.6283492223913785,
"learning_rate": 6.571797161892965e-06,
"loss": 0.919,
"step": 203
},
{
"epoch": 0.43312101910828027,
"grad_norm": 0.6754135565342441,
"learning_rate": 6.538399259392637e-06,
"loss": 0.9515,
"step": 204
},
{
"epoch": 0.43524416135881105,
"grad_norm": 0.6200332586740341,
"learning_rate": 6.504925367674595e-06,
"loss": 1.0323,
"step": 205
},
{
"epoch": 0.43736730360934184,
"grad_norm": 0.6424305197480263,
"learning_rate": 6.471377140181419e-06,
"loss": 0.9402,
"step": 206
},
{
"epoch": 0.4394904458598726,
"grad_norm": 0.709344876257473,
"learning_rate": 6.437756234027512e-06,
"loss": 0.9472,
"step": 207
},
{
"epoch": 0.4416135881104034,
"grad_norm": 0.6954520680490753,
"learning_rate": 6.40406430991723e-06,
"loss": 0.9094,
"step": 208
},
{
"epoch": 0.4437367303609342,
"grad_norm": 0.6630556744494306,
"learning_rate": 6.370303032062869e-06,
"loss": 0.9094,
"step": 209
},
{
"epoch": 0.445859872611465,
"grad_norm": 0.6422331097269288,
"learning_rate": 6.336474068102444e-06,
"loss": 0.9728,
"step": 210
},
{
"epoch": 0.44798301486199577,
"grad_norm": 0.7033435384287802,
"learning_rate": 6.302579089017328e-06,
"loss": 0.9465,
"step": 211
},
{
"epoch": 0.45010615711252655,
"grad_norm": 0.7227177031033113,
"learning_rate": 6.268619769049713e-06,
"loss": 0.9227,
"step": 212
},
{
"epoch": 0.45222929936305734,
"grad_norm": 0.7229370830386173,
"learning_rate": 6.234597785619906e-06,
"loss": 0.9222,
"step": 213
},
{
"epoch": 0.4543524416135881,
"grad_norm": 0.6798315368100751,
"learning_rate": 6.200514819243476e-06,
"loss": 0.9065,
"step": 214
},
{
"epoch": 0.4564755838641189,
"grad_norm": 0.679227834446727,
"learning_rate": 6.166372553448241e-06,
"loss": 0.9163,
"step": 215
},
{
"epoch": 0.4585987261146497,
"grad_norm": 0.7284520215353752,
"learning_rate": 6.132172674691119e-06,
"loss": 0.9645,
"step": 216
},
{
"epoch": 0.4607218683651805,
"grad_norm": 0.6536710419909815,
"learning_rate": 6.097916872274815e-06,
"loss": 1.0051,
"step": 217
},
{
"epoch": 0.46284501061571126,
"grad_norm": 0.6349521879178258,
"learning_rate": 6.063606838264384e-06,
"loss": 0.9397,
"step": 218
},
{
"epoch": 0.46496815286624205,
"grad_norm": 0.64300216518759,
"learning_rate": 6.029244267403652e-06,
"loss": 0.921,
"step": 219
},
{
"epoch": 0.46709129511677283,
"grad_norm": 0.6577888839915914,
"learning_rate": 5.9948308570315e-06,
"loss": 0.9415,
"step": 220
},
{
"epoch": 0.4692144373673036,
"grad_norm": 0.6441869969003124,
"learning_rate": 5.960368306998023e-06,
"loss": 1.0097,
"step": 221
},
{
"epoch": 0.4713375796178344,
"grad_norm": 0.6164144128466511,
"learning_rate": 5.92585831958058e-06,
"loss": 0.884,
"step": 222
},
{
"epoch": 0.4734607218683652,
"grad_norm": 0.6862129278542842,
"learning_rate": 5.891302599399686e-06,
"loss": 0.9178,
"step": 223
},
{
"epoch": 0.47558386411889597,
"grad_norm": 0.648139964743113,
"learning_rate": 5.856702853334833e-06,
"loss": 0.9932,
"step": 224
},
{
"epoch": 0.47770700636942676,
"grad_norm": 0.6854865636704949,
"learning_rate": 5.8220607904401725e-06,
"loss": 0.9235,
"step": 225
},
{
"epoch": 0.47983014861995754,
"grad_norm": 0.6711248584344208,
"learning_rate": 5.78737812186009e-06,
"loss": 0.9212,
"step": 226
},
{
"epoch": 0.4819532908704883,
"grad_norm": 0.6302334515166729,
"learning_rate": 5.752656560744692e-06,
"loss": 0.9562,
"step": 227
},
{
"epoch": 0.4840764331210191,
"grad_norm": 0.6752405762911042,
"learning_rate": 5.717897822165179e-06,
"loss": 0.9556,
"step": 228
},
{
"epoch": 0.4861995753715499,
"grad_norm": 0.7007828217751222,
"learning_rate": 5.6831036230291345e-06,
"loss": 0.976,
"step": 229
},
{
"epoch": 0.4883227176220807,
"grad_norm": 0.7096600330939049,
"learning_rate": 5.648275681995716e-06,
"loss": 0.9477,
"step": 230
},
{
"epoch": 0.49044585987261147,
"grad_norm": 0.7034615904822891,
"learning_rate": 5.613415719390759e-06,
"loss": 0.906,
"step": 231
},
{
"epoch": 0.49256900212314225,
"grad_norm": 0.735843820490175,
"learning_rate": 5.578525457121807e-06,
"loss": 1.0386,
"step": 232
},
{
"epoch": 0.49469214437367304,
"grad_norm": 0.72350491411306,
"learning_rate": 5.543606618593053e-06,
"loss": 0.9102,
"step": 233
},
{
"epoch": 0.4968152866242038,
"grad_norm": 0.6865757038798469,
"learning_rate": 5.508660928620216e-06,
"loss": 0.8942,
"step": 234
},
{
"epoch": 0.4989384288747346,
"grad_norm": 0.6961697220230768,
"learning_rate": 5.473690113345343e-06,
"loss": 0.9593,
"step": 235
},
{
"epoch": 0.5010615711252654,
"grad_norm": 0.683780839923492,
"learning_rate": 5.438695900151537e-06,
"loss": 0.8619,
"step": 236
},
{
"epoch": 0.5031847133757962,
"grad_norm": 0.8892028839705228,
"learning_rate": 5.403680017577653e-06,
"loss": 0.9516,
"step": 237
},
{
"epoch": 0.505307855626327,
"grad_norm": 0.6485125727265503,
"learning_rate": 5.368644195232896e-06,
"loss": 0.9792,
"step": 238
},
{
"epoch": 0.5074309978768577,
"grad_norm": 0.6745197820597727,
"learning_rate": 5.3335901637113985e-06,
"loss": 0.9247,
"step": 239
},
{
"epoch": 0.5095541401273885,
"grad_norm": 0.6594590567939217,
"learning_rate": 5.298519654506736e-06,
"loss": 0.999,
"step": 240
},
{
"epoch": 0.5116772823779193,
"grad_norm": 0.7018679426711577,
"learning_rate": 5.2634343999263985e-06,
"loss": 0.9351,
"step": 241
},
{
"epoch": 0.5138004246284501,
"grad_norm": 0.6909133067899488,
"learning_rate": 5.228336133006223e-06,
"loss": 0.8968,
"step": 242
},
{
"epoch": 0.5159235668789809,
"grad_norm": 0.6709376382865763,
"learning_rate": 5.193226587424793e-06,
"loss": 0.9038,
"step": 243
},
{
"epoch": 0.5180467091295117,
"grad_norm": 0.7111428217652355,
"learning_rate": 5.158107497417795e-06,
"loss": 0.983,
"step": 244
},
{
"epoch": 0.5201698513800425,
"grad_norm": 0.6351568108537181,
"learning_rate": 5.122980597692372e-06,
"loss": 0.9938,
"step": 245
},
{
"epoch": 0.5222929936305732,
"grad_norm": 0.6817936890017572,
"learning_rate": 5.087847623341421e-06,
"loss": 0.9761,
"step": 246
},
{
"epoch": 0.524416135881104,
"grad_norm": 0.6900540530544701,
"learning_rate": 5.052710309757899e-06,
"loss": 0.9552,
"step": 247
},
{
"epoch": 0.5265392781316348,
"grad_norm": 0.6719177167300503,
"learning_rate": 5.0175703925490936e-06,
"loss": 0.9249,
"step": 248
},
{
"epoch": 0.5286624203821656,
"grad_norm": 0.6998669576753951,
"learning_rate": 4.982429607450907e-06,
"loss": 0.9643,
"step": 249
},
{
"epoch": 0.5307855626326964,
"grad_norm": 0.6424511913080388,
"learning_rate": 4.947289690242103e-06,
"loss": 0.9772,
"step": 250
},
{
"epoch": 0.5329087048832272,
"grad_norm": 0.7159365178289718,
"learning_rate": 4.91215237665858e-06,
"loss": 0.9305,
"step": 251
},
{
"epoch": 0.535031847133758,
"grad_norm": 0.7475463786009823,
"learning_rate": 4.877019402307629e-06,
"loss": 0.936,
"step": 252
},
{
"epoch": 0.5371549893842887,
"grad_norm": 0.7136691758831674,
"learning_rate": 4.841892502582206e-06,
"loss": 0.9603,
"step": 253
},
{
"epoch": 0.5392781316348195,
"grad_norm": 0.6203396388829158,
"learning_rate": 4.806773412575211e-06,
"loss": 1.0014,
"step": 254
},
{
"epoch": 0.5414012738853503,
"grad_norm": 0.658243565597372,
"learning_rate": 4.7716638669937784e-06,
"loss": 0.923,
"step": 255
},
{
"epoch": 0.5435244161358811,
"grad_norm": 0.6728067153766357,
"learning_rate": 4.736565600073602e-06,
"loss": 0.9161,
"step": 256
},
{
"epoch": 0.5456475583864119,
"grad_norm": 0.7214951443210661,
"learning_rate": 4.701480345493266e-06,
"loss": 0.9259,
"step": 257
},
{
"epoch": 0.5477707006369427,
"grad_norm": 0.6619249080654683,
"learning_rate": 4.666409836288603e-06,
"loss": 0.9654,
"step": 258
},
{
"epoch": 0.5498938428874734,
"grad_norm": 0.6714573866349525,
"learning_rate": 4.631355804767106e-06,
"loss": 0.9374,
"step": 259
},
{
"epoch": 0.5520169851380042,
"grad_norm": 0.6592147190515532,
"learning_rate": 4.596319982422348e-06,
"loss": 0.9083,
"step": 260
},
{
"epoch": 0.554140127388535,
"grad_norm": 0.6942501349295557,
"learning_rate": 4.561304099848464e-06,
"loss": 0.9506,
"step": 261
},
{
"epoch": 0.5562632696390658,
"grad_norm": 0.6988620238717118,
"learning_rate": 4.526309886654659e-06,
"loss": 0.9977,
"step": 262
},
{
"epoch": 0.5583864118895966,
"grad_norm": 0.6921429983455063,
"learning_rate": 4.491339071379783e-06,
"loss": 0.8757,
"step": 263
},
{
"epoch": 0.5605095541401274,
"grad_norm": 0.6771212073875934,
"learning_rate": 4.4563933814069475e-06,
"loss": 0.932,
"step": 264
},
{
"epoch": 0.5626326963906582,
"grad_norm": 0.6420633068956142,
"learning_rate": 4.4214745428781946e-06,
"loss": 0.9849,
"step": 265
},
{
"epoch": 0.564755838641189,
"grad_norm": 0.7089424922243647,
"learning_rate": 4.386584280609242e-06,
"loss": 0.9816,
"step": 266
},
{
"epoch": 0.5668789808917197,
"grad_norm": 0.6662051267433905,
"learning_rate": 4.351724318004286e-06,
"loss": 0.9407,
"step": 267
},
{
"epoch": 0.5690021231422505,
"grad_norm": 0.6301712452097127,
"learning_rate": 4.316896376970866e-06,
"loss": 1.0318,
"step": 268
},
{
"epoch": 0.5711252653927813,
"grad_norm": 0.6314890663243107,
"learning_rate": 4.282102177834822e-06,
"loss": 0.9311,
"step": 269
},
{
"epoch": 0.5732484076433121,
"grad_norm": 0.6419289651169521,
"learning_rate": 4.2473434392553115e-06,
"loss": 0.9442,
"step": 270
},
{
"epoch": 0.5753715498938429,
"grad_norm": 0.6177620934633588,
"learning_rate": 4.212621878139912e-06,
"loss": 0.9336,
"step": 271
},
{
"epoch": 0.5774946921443737,
"grad_norm": 0.6596837531543023,
"learning_rate": 4.177939209559828e-06,
"loss": 0.9878,
"step": 272
},
{
"epoch": 0.5796178343949044,
"grad_norm": 0.6453367239167705,
"learning_rate": 4.143297146665167e-06,
"loss": 0.9079,
"step": 273
},
{
"epoch": 0.5817409766454352,
"grad_norm": 0.6522513374450648,
"learning_rate": 4.108697400600316e-06,
"loss": 0.9854,
"step": 274
},
{
"epoch": 0.583864118895966,
"grad_norm": 0.6732938314131642,
"learning_rate": 4.074141680419422e-06,
"loss": 0.9926,
"step": 275
},
{
"epoch": 0.5859872611464968,
"grad_norm": 0.7092550415636378,
"learning_rate": 4.039631693001976e-06,
"loss": 0.967,
"step": 276
},
{
"epoch": 0.5881104033970276,
"grad_norm": 0.6524896445355799,
"learning_rate": 4.005169142968503e-06,
"loss": 0.9368,
"step": 277
},
{
"epoch": 0.5902335456475584,
"grad_norm": 0.644883665492233,
"learning_rate": 3.970755732596349e-06,
"loss": 0.9341,
"step": 278
},
{
"epoch": 0.5923566878980892,
"grad_norm": 0.6736291474419323,
"learning_rate": 3.936393161735616e-06,
"loss": 0.9744,
"step": 279
},
{
"epoch": 0.5944798301486199,
"grad_norm": 0.680611909030509,
"learning_rate": 3.902083127725186e-06,
"loss": 0.9889,
"step": 280
},
{
"epoch": 0.5966029723991507,
"grad_norm": 0.6811819729687757,
"learning_rate": 3.867827325308882e-06,
"loss": 0.9783,
"step": 281
},
{
"epoch": 0.5987261146496815,
"grad_norm": 0.6365141720169178,
"learning_rate": 3.83362744655176e-06,
"loss": 0.9082,
"step": 282
},
{
"epoch": 0.6008492569002123,
"grad_norm": 0.6483027093614826,
"learning_rate": 3.799485180756526e-06,
"loss": 0.9702,
"step": 283
},
{
"epoch": 0.6029723991507431,
"grad_norm": 0.6558888618128743,
"learning_rate": 3.765402214380095e-06,
"loss": 0.9624,
"step": 284
},
{
"epoch": 0.6050955414012739,
"grad_norm": 0.7423269688448214,
"learning_rate": 3.731380230950288e-06,
"loss": 0.9803,
"step": 285
},
{
"epoch": 0.6072186836518046,
"grad_norm": 0.665408242644521,
"learning_rate": 3.6974209109826724e-06,
"loss": 0.92,
"step": 286
},
{
"epoch": 0.6093418259023354,
"grad_norm": 0.6082795254064975,
"learning_rate": 3.663525931897559e-06,
"loss": 0.916,
"step": 287
},
{
"epoch": 0.6114649681528662,
"grad_norm": 0.6554479295173997,
"learning_rate": 3.6296969679371325e-06,
"loss": 0.9609,
"step": 288
},
{
"epoch": 0.613588110403397,
"grad_norm": 0.6737780075210532,
"learning_rate": 3.595935690082769e-06,
"loss": 0.8926,
"step": 289
},
{
"epoch": 0.6157112526539278,
"grad_norm": 0.6081469745516901,
"learning_rate": 3.56224376597249e-06,
"loss": 0.9514,
"step": 290
},
{
"epoch": 0.6178343949044586,
"grad_norm": 0.6581642589602584,
"learning_rate": 3.528622859818582e-06,
"loss": 0.9565,
"step": 291
},
{
"epoch": 0.6199575371549894,
"grad_norm": 0.5923692426637494,
"learning_rate": 3.495074632325407e-06,
"loss": 0.9684,
"step": 292
},
{
"epoch": 0.6220806794055201,
"grad_norm": 0.6409823550988945,
"learning_rate": 3.461600740607366e-06,
"loss": 0.9835,
"step": 293
},
{
"epoch": 0.6242038216560509,
"grad_norm": 0.6477471386159979,
"learning_rate": 3.4282028381070366e-06,
"loss": 0.8795,
"step": 294
},
{
"epoch": 0.6263269639065817,
"grad_norm": 0.6456450294718449,
"learning_rate": 3.3948825745135196e-06,
"loss": 0.939,
"step": 295
},
{
"epoch": 0.6284501061571125,
"grad_norm": 0.7315407037525199,
"learning_rate": 3.361641595680937e-06,
"loss": 0.9801,
"step": 296
},
{
"epoch": 0.6305732484076433,
"grad_norm": 0.6452110148561251,
"learning_rate": 3.3284815435471423e-06,
"loss": 0.9615,
"step": 297
},
{
"epoch": 0.6326963906581741,
"grad_norm": 0.6253835621122691,
"learning_rate": 3.295404056052616e-06,
"loss": 0.927,
"step": 298
},
{
"epoch": 0.6348195329087049,
"grad_norm": 0.6363789821603231,
"learning_rate": 3.2624107670595567e-06,
"loss": 0.9705,
"step": 299
},
{
"epoch": 0.6369426751592356,
"grad_norm": 0.6677508958368374,
"learning_rate": 3.2295033062711823e-06,
"loss": 0.9087,
"step": 300
},
{
"epoch": 0.6390658174097664,
"grad_norm": 0.7037995039835674,
"learning_rate": 3.1966832991512232e-06,
"loss": 0.9857,
"step": 301
},
{
"epoch": 0.6411889596602972,
"grad_norm": 0.629654116611617,
"learning_rate": 3.16395236684364e-06,
"loss": 0.9507,
"step": 302
},
{
"epoch": 0.643312101910828,
"grad_norm": 0.6785540515594622,
"learning_rate": 3.131312126092544e-06,
"loss": 0.9398,
"step": 303
},
{
"epoch": 0.6454352441613588,
"grad_norm": 0.6407691844743428,
"learning_rate": 3.098764189162332e-06,
"loss": 0.9039,
"step": 304
},
{
"epoch": 0.6475583864118896,
"grad_norm": 0.6258454088461756,
"learning_rate": 3.0663101637580626e-06,
"loss": 0.9242,
"step": 305
},
{
"epoch": 0.6496815286624203,
"grad_norm": 0.745199786640981,
"learning_rate": 3.03395165294603e-06,
"loss": 0.9536,
"step": 306
},
{
"epoch": 0.6518046709129511,
"grad_norm": 0.6324017551337827,
"learning_rate": 3.0016902550745896e-06,
"loss": 0.9527,
"step": 307
},
{
"epoch": 0.6539278131634819,
"grad_norm": 0.6241906193592306,
"learning_rate": 2.9695275636951983e-06,
"loss": 0.9568,
"step": 308
},
{
"epoch": 0.6560509554140127,
"grad_norm": 0.6229924568908893,
"learning_rate": 2.9374651674837128e-06,
"loss": 1.0229,
"step": 309
},
{
"epoch": 0.6581740976645435,
"grad_norm": 0.6204870651207804,
"learning_rate": 2.9055046501619088e-06,
"loss": 0.9789,
"step": 310
},
{
"epoch": 0.6602972399150743,
"grad_norm": 0.7361247533813169,
"learning_rate": 2.8736475904192516e-06,
"loss": 0.9306,
"step": 311
},
{
"epoch": 0.6624203821656051,
"grad_norm": 0.6469024579110451,
"learning_rate": 2.841895561834927e-06,
"loss": 0.8995,
"step": 312
},
{
"epoch": 0.6645435244161358,
"grad_norm": 0.6798475871777178,
"learning_rate": 2.810250132800103e-06,
"loss": 0.9457,
"step": 313
},
{
"epoch": 0.6666666666666666,
"grad_norm": 0.5974407421450565,
"learning_rate": 2.778712866440464e-06,
"loss": 0.964,
"step": 314
},
{
"epoch": 0.6687898089171974,
"grad_norm": 0.584100105339634,
"learning_rate": 2.7472853205389997e-06,
"loss": 0.9053,
"step": 315
},
{
"epoch": 0.6709129511677282,
"grad_norm": 0.6342401081454511,
"learning_rate": 2.715969047459066e-06,
"loss": 0.9826,
"step": 316
},
{
"epoch": 0.673036093418259,
"grad_norm": 0.5786828455381722,
"learning_rate": 2.6847655940676843e-06,
"loss": 0.9468,
"step": 317
},
{
"epoch": 0.6751592356687898,
"grad_norm": 0.6091300911407583,
"learning_rate": 2.6536765016591626e-06,
"loss": 0.9259,
"step": 318
},
{
"epoch": 0.6772823779193206,
"grad_norm": 0.6539407752728442,
"learning_rate": 2.622703305878941e-06,
"loss": 0.9335,
"step": 319
},
{
"epoch": 0.6794055201698513,
"grad_norm": 0.6733794364508308,
"learning_rate": 2.5918475366477536e-06,
"loss": 0.8735,
"step": 320
},
{
"epoch": 0.6815286624203821,
"grad_norm": 0.6270883315392012,
"learning_rate": 2.5611107180860395e-06,
"loss": 0.979,
"step": 321
},
{
"epoch": 0.6836518046709129,
"grad_norm": 0.6498694158555346,
"learning_rate": 2.530494368438683e-06,
"loss": 0.8834,
"step": 322
},
{
"epoch": 0.6857749469214437,
"grad_norm": 0.6869641678831084,
"learning_rate": 2.5000000000000015e-06,
"loss": 0.9647,
"step": 323
},
{
"epoch": 0.6878980891719745,
"grad_norm": 0.6239790954969553,
"learning_rate": 2.4696291190390494e-06,
"loss": 0.9792,
"step": 324
},
{
"epoch": 0.6900212314225053,
"grad_norm": 0.6134158027251899,
"learning_rate": 2.4393832257252253e-06,
"loss": 0.9696,
"step": 325
},
{
"epoch": 0.692144373673036,
"grad_norm": 0.6086160560392221,
"learning_rate": 2.4092638140541586e-06,
"loss": 0.9228,
"step": 326
},
{
"epoch": 0.6942675159235668,
"grad_norm": 0.6498216593510738,
"learning_rate": 2.3792723717739197e-06,
"loss": 0.958,
"step": 327
},
{
"epoch": 0.6963906581740976,
"grad_norm": 0.625686536550573,
"learning_rate": 2.349410380311532e-06,
"loss": 0.8942,
"step": 328
},
{
"epoch": 0.6985138004246284,
"grad_norm": 0.6531306674626144,
"learning_rate": 2.319679314699801e-06,
"loss": 0.9158,
"step": 329
},
{
"epoch": 0.7006369426751592,
"grad_norm": 0.6484034467819224,
"learning_rate": 2.290080643504446e-06,
"loss": 0.976,
"step": 330
},
{
"epoch": 0.70276008492569,
"grad_norm": 0.6097583458280551,
"learning_rate": 2.2606158287515662e-06,
"loss": 0.927,
"step": 331
},
{
"epoch": 0.7048832271762208,
"grad_norm": 0.6910687359695524,
"learning_rate": 2.2312863258554236e-06,
"loss": 0.9503,
"step": 332
},
{
"epoch": 0.7070063694267515,
"grad_norm": 0.6169296505248961,
"learning_rate": 2.2020935835465567e-06,
"loss": 0.9448,
"step": 333
},
{
"epoch": 0.7091295116772823,
"grad_norm": 0.6206102236780358,
"learning_rate": 2.1730390438002056e-06,
"loss": 0.9367,
"step": 334
},
{
"epoch": 0.7112526539278131,
"grad_norm": 0.5882669175278411,
"learning_rate": 2.1441241417651072e-06,
"loss": 0.9246,
"step": 335
},
{
"epoch": 0.7133757961783439,
"grad_norm": 0.6593545038938035,
"learning_rate": 2.1153503056925872e-06,
"loss": 0.9597,
"step": 336
},
{
"epoch": 0.7154989384288747,
"grad_norm": 0.6187836424308805,
"learning_rate": 2.086718956866024e-06,
"loss": 0.9823,
"step": 337
},
{
"epoch": 0.7176220806794055,
"grad_norm": 0.6622306313226891,
"learning_rate": 2.0582315095306343e-06,
"loss": 0.8998,
"step": 338
},
{
"epoch": 0.7197452229299363,
"grad_norm": 0.6064995088828125,
"learning_rate": 2.0298893708236307e-06,
"loss": 0.9494,
"step": 339
},
{
"epoch": 0.721868365180467,
"grad_norm": 0.6315356893553601,
"learning_rate": 2.0016939407046987e-06,
"loss": 0.934,
"step": 340
},
{
"epoch": 0.7239915074309978,
"grad_norm": 0.6132141895487265,
"learning_rate": 1.9736466118868573e-06,
"loss": 0.9662,
"step": 341
},
{
"epoch": 0.7261146496815286,
"grad_norm": 0.5913159522920567,
"learning_rate": 1.945748769767667e-06,
"loss": 0.9388,
"step": 342
},
{
"epoch": 0.7282377919320594,
"grad_norm": 0.5911001882219563,
"learning_rate": 1.9180017923607884e-06,
"loss": 0.9959,
"step": 343
},
{
"epoch": 0.7303609341825902,
"grad_norm": 0.5964411898925175,
"learning_rate": 1.8904070502279242e-06,
"loss": 0.9609,
"step": 344
},
{
"epoch": 0.732484076433121,
"grad_norm": 0.5956188683958897,
"learning_rate": 1.8629659064111138e-06,
"loss": 0.9042,
"step": 345
},
{
"epoch": 0.7346072186836518,
"grad_norm": 0.6429639213975425,
"learning_rate": 1.8356797163654172e-06,
"loss": 0.9719,
"step": 346
},
{
"epoch": 0.7367303609341825,
"grad_norm": 0.625453067786892,
"learning_rate": 1.8085498278919421e-06,
"loss": 1.0156,
"step": 347
},
{
"epoch": 0.7388535031847133,
"grad_norm": 0.6474903500210286,
"learning_rate": 1.7815775810712921e-06,
"loss": 0.9304,
"step": 348
},
{
"epoch": 0.7409766454352441,
"grad_norm": 0.643384296734223,
"learning_rate": 1.754764308197358e-06,
"loss": 0.9429,
"step": 349
},
{
"epoch": 0.7430997876857749,
"grad_norm": 0.6488250778766241,
"learning_rate": 1.728111333711514e-06,
"loss": 0.8978,
"step": 350
},
{
"epoch": 0.7452229299363057,
"grad_norm": 0.6609986780324201,
"learning_rate": 1.7016199741371958e-06,
"loss": 0.918,
"step": 351
},
{
"epoch": 0.7473460721868365,
"grad_norm": 0.6128799955400698,
"learning_rate": 1.6752915380148772e-06,
"loss": 0.953,
"step": 352
},
{
"epoch": 0.7494692144373672,
"grad_norm": 0.6122929864573905,
"learning_rate": 1.6491273258374241e-06,
"loss": 0.9491,
"step": 353
},
{
"epoch": 0.7515923566878981,
"grad_norm": 0.581921090261184,
"learning_rate": 1.6231286299858635e-06,
"loss": 1.0149,
"step": 354
},
{
"epoch": 0.7537154989384289,
"grad_norm": 0.6499572625124347,
"learning_rate": 1.5972967346655449e-06,
"loss": 0.9035,
"step": 355
},
{
"epoch": 0.7558386411889597,
"grad_norm": 0.6018564846421318,
"learning_rate": 1.5716329158427097e-06,
"loss": 0.959,
"step": 356
},
{
"epoch": 0.7579617834394905,
"grad_norm": 0.6192065507123931,
"learning_rate": 1.546138441181459e-06,
"loss": 0.9242,
"step": 357
},
{
"epoch": 0.7600849256900213,
"grad_norm": 0.6004306254969125,
"learning_rate": 1.5208145699811417e-06,
"loss": 0.9603,
"step": 358
},
{
"epoch": 0.7622080679405521,
"grad_norm": 0.6569862768052468,
"learning_rate": 1.4956625531141495e-06,
"loss": 0.9735,
"step": 359
},
{
"epoch": 0.7643312101910829,
"grad_norm": 0.6439568810696925,
"learning_rate": 1.470683632964131e-06,
"loss": 0.9389,
"step": 360
},
{
"epoch": 0.7664543524416136,
"grad_norm": 0.6145728176506001,
"learning_rate": 1.4458790433646264e-06,
"loss": 0.8859,
"step": 361
},
{
"epoch": 0.7685774946921444,
"grad_norm": 0.6326061250773658,
"learning_rate": 1.4212500095381176e-06,
"loss": 0.9676,
"step": 362
},
{
"epoch": 0.7707006369426752,
"grad_norm": 0.6123548283214884,
"learning_rate": 1.3967977480355106e-06,
"loss": 0.9067,
"step": 363
},
{
"epoch": 0.772823779193206,
"grad_norm": 0.6475017566755511,
"learning_rate": 1.3725234666760428e-06,
"loss": 0.9761,
"step": 364
},
{
"epoch": 0.7749469214437368,
"grad_norm": 0.5905257224026246,
"learning_rate": 1.3484283644876289e-06,
"loss": 0.9567,
"step": 365
},
{
"epoch": 0.7770700636942676,
"grad_norm": 0.6176210891568339,
"learning_rate": 1.3245136316476253e-06,
"loss": 0.8845,
"step": 366
},
{
"epoch": 0.7791932059447984,
"grad_norm": 0.6212881415222462,
"learning_rate": 1.3007804494240478e-06,
"loss": 0.9558,
"step": 367
},
{
"epoch": 0.7813163481953291,
"grad_norm": 0.6268225582403189,
"learning_rate": 1.2772299901172198e-06,
"loss": 0.9741,
"step": 368
},
{
"epoch": 0.7834394904458599,
"grad_norm": 0.6290633940922796,
"learning_rate": 1.2538634170018727e-06,
"loss": 0.905,
"step": 369
},
{
"epoch": 0.7855626326963907,
"grad_norm": 0.6106693917571974,
"learning_rate": 1.2306818842696716e-06,
"loss": 0.9158,
"step": 370
},
{
"epoch": 0.7876857749469215,
"grad_norm": 0.6589407432757999,
"learning_rate": 1.2076865369722246e-06,
"loss": 0.9218,
"step": 371
},
{
"epoch": 0.7898089171974523,
"grad_norm": 0.6181774095804418,
"learning_rate": 1.184878510964504e-06,
"loss": 0.9282,
"step": 372
},
{
"epoch": 0.7919320594479831,
"grad_norm": 0.634843246523155,
"learning_rate": 1.1622589328487505e-06,
"loss": 0.8704,
"step": 373
},
{
"epoch": 0.7940552016985138,
"grad_norm": 0.6588681055604763,
"learning_rate": 1.1398289199188262e-06,
"loss": 0.9072,
"step": 374
},
{
"epoch": 0.7961783439490446,
"grad_norm": 0.6496256026060344,
"learning_rate": 1.1175895801050185e-06,
"loss": 0.9369,
"step": 375
},
{
"epoch": 0.7983014861995754,
"grad_norm": 0.6157885136577669,
"learning_rate": 1.09554201191932e-06,
"loss": 0.9857,
"step": 376
},
{
"epoch": 0.8004246284501062,
"grad_norm": 0.693915421413349,
"learning_rate": 1.0736873044011632e-06,
"loss": 0.9195,
"step": 377
},
{
"epoch": 0.802547770700637,
"grad_norm": 0.6352811715121436,
"learning_rate": 1.052026537063634e-06,
"loss": 0.9494,
"step": 378
},
{
"epoch": 0.8046709129511678,
"grad_norm": 0.6314980398127243,
"learning_rate": 1.03056077984014e-06,
"loss": 0.9829,
"step": 379
},
{
"epoch": 0.8067940552016986,
"grad_norm": 0.6171720030728264,
"learning_rate": 1.0092910930315698e-06,
"loss": 0.9196,
"step": 380
},
{
"epoch": 0.8089171974522293,
"grad_norm": 0.6111752995592951,
"learning_rate": 9.882185272539107e-07,
"loss": 1.0179,
"step": 381
},
{
"epoch": 0.8110403397027601,
"grad_norm": 0.6063600459587173,
"learning_rate": 9.673441233863661e-07,
"loss": 0.9778,
"step": 382
},
{
"epoch": 0.8131634819532909,
"grad_norm": 0.6219984593584619,
"learning_rate": 9.466689125199247e-07,
"loss": 0.9517,
"step": 383
},
{
"epoch": 0.8152866242038217,
"grad_norm": 0.5884991058391071,
"learning_rate": 9.261939159064465e-07,
"loss": 0.9125,
"step": 384
},
{
"epoch": 0.8174097664543525,
"grad_norm": 0.6336855501453481,
"learning_rate": 9.059201449082045e-07,
"loss": 0.9055,
"step": 385
},
{
"epoch": 0.8195329087048833,
"grad_norm": 0.6438487189876951,
"learning_rate": 8.858486009479384e-07,
"loss": 0.8816,
"step": 386
},
{
"epoch": 0.821656050955414,
"grad_norm": 0.6165977153524274,
"learning_rate": 8.659802754593805e-07,
"loss": 0.9708,
"step": 387
},
{
"epoch": 0.8237791932059448,
"grad_norm": 0.6192150411773142,
"learning_rate": 8.463161498382949e-07,
"loss": 0.9328,
"step": 388
},
{
"epoch": 0.8259023354564756,
"grad_norm": 0.6252080211561564,
"learning_rate": 8.268571953939897e-07,
"loss": 1.0164,
"step": 389
},
{
"epoch": 0.8280254777070064,
"grad_norm": 0.6174239147729979,
"learning_rate": 8.07604373301345e-07,
"loss": 0.9278,
"step": 390
},
{
"epoch": 0.8301486199575372,
"grad_norm": 0.5934957686287227,
"learning_rate": 7.885586345533397e-07,
"loss": 0.998,
"step": 391
},
{
"epoch": 0.832271762208068,
"grad_norm": 0.6821446761372294,
"learning_rate": 7.697209199140676e-07,
"loss": 0.9244,
"step": 392
},
{
"epoch": 0.8343949044585988,
"grad_norm": 0.6293471158429464,
"learning_rate": 7.510921598722765e-07,
"loss": 0.93,
"step": 393
},
{
"epoch": 0.8365180467091295,
"grad_norm": 0.6756858812445671,
"learning_rate": 7.326732745954001e-07,
"loss": 0.9123,
"step": 394
},
{
"epoch": 0.8386411889596603,
"grad_norm": 0.6596572053271065,
"learning_rate": 7.144651738841174e-07,
"loss": 0.9155,
"step": 395
},
{
"epoch": 0.8407643312101911,
"grad_norm": 0.6341753704135638,
"learning_rate": 6.96468757127396e-07,
"loss": 0.9352,
"step": 396
},
{
"epoch": 0.8428874734607219,
"grad_norm": 0.641019320177676,
"learning_rate": 6.786849132580841e-07,
"loss": 0.9704,
"step": 397
},
{
"epoch": 0.8450106157112527,
"grad_norm": 0.6502762949418774,
"learning_rate": 6.611145207089897e-07,
"loss": 1.0109,
"step": 398
},
{
"epoch": 0.8471337579617835,
"grad_norm": 0.5883498984829939,
"learning_rate": 6.437584473694991e-07,
"loss": 0.937,
"step": 399
},
{
"epoch": 0.8492569002123143,
"grad_norm": 0.6044650989839885,
"learning_rate": 6.266175505426958e-07,
"loss": 0.9899,
"step": 400
},
{
"epoch": 0.851380042462845,
"grad_norm": 0.7032208468983784,
"learning_rate": 6.096926769030298e-07,
"loss": 0.977,
"step": 401
},
{
"epoch": 0.8535031847133758,
"grad_norm": 0.6599095174918433,
"learning_rate": 5.929846624544821e-07,
"loss": 0.8938,
"step": 402
},
{
"epoch": 0.8556263269639066,
"grad_norm": 0.6326742176715536,
"learning_rate": 5.76494332489278e-07,
"loss": 0.9807,
"step": 403
},
{
"epoch": 0.8577494692144374,
"grad_norm": 0.5864803282744446,
"learning_rate": 5.602225015471175e-07,
"loss": 0.9364,
"step": 404
},
{
"epoch": 0.8598726114649682,
"grad_norm": 0.6074843091694057,
"learning_rate": 5.441699733749479e-07,
"loss": 0.9128,
"step": 405
},
{
"epoch": 0.861995753715499,
"grad_norm": 0.6249418223588977,
"learning_rate": 5.283375408872538e-07,
"loss": 0.9691,
"step": 406
},
{
"epoch": 0.8641188959660298,
"grad_norm": 0.6168009522971204,
"learning_rate": 5.127259861268974e-07,
"loss": 0.9434,
"step": 407
},
{
"epoch": 0.8662420382165605,
"grad_norm": 0.6190463715943864,
"learning_rate": 4.973360802264859e-07,
"loss": 0.9756,
"step": 408
},
{
"epoch": 0.8683651804670913,
"grad_norm": 0.622941478104092,
"learning_rate": 4.82168583370285e-07,
"loss": 0.9562,
"step": 409
},
{
"epoch": 0.8704883227176221,
"grad_norm": 0.6538118577546125,
"learning_rate": 4.6722424475666715e-07,
"loss": 0.9785,
"step": 410
},
{
"epoch": 0.8726114649681529,
"grad_norm": 0.5955630313016704,
"learning_rate": 4.5250380256110335e-07,
"loss": 0.9444,
"step": 411
},
{
"epoch": 0.8747346072186837,
"grad_norm": 0.6354711324766207,
"learning_rate": 4.380079838997087e-07,
"loss": 0.9588,
"step": 412
},
{
"epoch": 0.8768577494692145,
"grad_norm": 0.615805033371163,
"learning_rate": 4.237375047933118e-07,
"loss": 0.9514,
"step": 413
},
{
"epoch": 0.8789808917197452,
"grad_norm": 0.6114715882888384,
"learning_rate": 4.0969307013210445e-07,
"loss": 0.9463,
"step": 414
},
{
"epoch": 0.881104033970276,
"grad_norm": 0.6095867459156437,
"learning_rate": 3.958753736408105e-07,
"loss": 0.9843,
"step": 415
},
{
"epoch": 0.8832271762208068,
"grad_norm": 0.6123640433423114,
"learning_rate": 3.822850978444254e-07,
"loss": 0.979,
"step": 416
},
{
"epoch": 0.8853503184713376,
"grad_norm": 0.6037382407098268,
"learning_rate": 3.6892291403449963e-07,
"loss": 0.9593,
"step": 417
},
{
"epoch": 0.8874734607218684,
"grad_norm": 0.6421465549828673,
"learning_rate": 3.557894822359864e-07,
"loss": 0.9743,
"step": 418
},
{
"epoch": 0.8895966029723992,
"grad_norm": 0.6523016163798167,
"learning_rate": 3.428854511746293e-07,
"loss": 0.894,
"step": 419
},
{
"epoch": 0.89171974522293,
"grad_norm": 0.6131799852928372,
"learning_rate": 3.302114582449295e-07,
"loss": 0.9001,
"step": 420
},
{
"epoch": 0.8938428874734607,
"grad_norm": 0.592786282667123,
"learning_rate": 3.177681294786539e-07,
"loss": 0.9109,
"step": 421
},
{
"epoch": 0.8959660297239915,
"grad_norm": 0.6308395651339358,
"learning_rate": 3.055560795139173e-07,
"loss": 0.9416,
"step": 422
},
{
"epoch": 0.8980891719745223,
"grad_norm": 0.5733021722650089,
"learning_rate": 2.9357591156481793e-07,
"loss": 0.9304,
"step": 423
},
{
"epoch": 0.9002123142250531,
"grad_norm": 0.6601790561924576,
"learning_rate": 2.8182821739164534e-07,
"loss": 0.9506,
"step": 424
},
{
"epoch": 0.9023354564755839,
"grad_norm": 0.6448366971138038,
"learning_rate": 2.7031357727164865e-07,
"loss": 0.9662,
"step": 425
},
{
"epoch": 0.9044585987261147,
"grad_norm": 0.6353764277416116,
"learning_rate": 2.5903255997037246e-07,
"loss": 0.9452,
"step": 426
},
{
"epoch": 0.9065817409766455,
"grad_norm": 0.6215966155589953,
"learning_rate": 2.479857227135685e-07,
"loss": 0.9373,
"step": 427
},
{
"epoch": 0.9087048832271762,
"grad_norm": 0.6071667900895601,
"learning_rate": 2.3717361115966343e-07,
"loss": 0.9365,
"step": 428
},
{
"epoch": 0.910828025477707,
"grad_norm": 0.5662505726716307,
"learning_rate": 2.2659675937281078e-07,
"loss": 0.9449,
"step": 429
},
{
"epoch": 0.9129511677282378,
"grad_norm": 0.6103475320116462,
"learning_rate": 2.1625568979651012e-07,
"loss": 0.9148,
"step": 430
},
{
"epoch": 0.9150743099787686,
"grad_norm": 0.6021851948761284,
"learning_rate": 2.061509132278028e-07,
"loss": 0.9571,
"step": 431
},
{
"epoch": 0.9171974522292994,
"grad_norm": 0.6160276194809644,
"learning_rate": 1.9628292879203482e-07,
"loss": 0.9542,
"step": 432
},
{
"epoch": 0.9193205944798302,
"grad_norm": 0.5793322886001591,
"learning_rate": 1.866522239182117e-07,
"loss": 0.9498,
"step": 433
},
{
"epoch": 0.921443736730361,
"grad_norm": 0.6436803146727256,
"learning_rate": 1.7725927431491375e-07,
"loss": 0.9164,
"step": 434
},
{
"epoch": 0.9235668789808917,
"grad_norm": 0.5802384555049214,
"learning_rate": 1.6810454394680431e-07,
"loss": 0.8862,
"step": 435
},
{
"epoch": 0.9256900212314225,
"grad_norm": 0.5991103996578487,
"learning_rate": 1.5918848501170647e-07,
"loss": 0.9835,
"step": 436
},
{
"epoch": 0.9278131634819533,
"grad_norm": 0.6140911819745668,
"learning_rate": 1.505115379182731e-07,
"loss": 0.9125,
"step": 437
},
{
"epoch": 0.9299363057324841,
"grad_norm": 0.6569732944090182,
"learning_rate": 1.420741312642282e-07,
"loss": 0.8964,
"step": 438
},
{
"epoch": 0.9320594479830149,
"grad_norm": 0.6189932685484231,
"learning_rate": 1.338766818151982e-07,
"loss": 0.9251,
"step": 439
},
{
"epoch": 0.9341825902335457,
"grad_norm": 0.641464843833588,
"learning_rate": 1.2591959448412628e-07,
"loss": 0.9644,
"step": 440
},
{
"epoch": 0.9363057324840764,
"grad_norm": 0.5982347753497826,
"learning_rate": 1.1820326231126944e-07,
"loss": 0.9263,
"step": 441
},
{
"epoch": 0.9384288747346072,
"grad_norm": 0.5823643996812994,
"learning_rate": 1.107280664447874e-07,
"loss": 0.8708,
"step": 442
},
{
"epoch": 0.940552016985138,
"grad_norm": 0.6134197078158699,
"learning_rate": 1.0349437612191259e-07,
"loss": 0.928,
"step": 443
},
{
"epoch": 0.9426751592356688,
"grad_norm": 0.6237353280448422,
"learning_rate": 9.650254865071428e-08,
"loss": 0.9015,
"step": 444
},
{
"epoch": 0.9447983014861996,
"grad_norm": 0.5971219836614964,
"learning_rate": 8.975292939244928e-08,
"loss": 0.938,
"step": 445
},
{
"epoch": 0.9469214437367304,
"grad_norm": 0.6774232363303238,
"learning_rate": 8.324585174449895e-08,
"loss": 0.9284,
"step": 446
},
{
"epoch": 0.9490445859872612,
"grad_norm": 0.6403835297664868,
"learning_rate": 7.698163712390683e-08,
"loss": 0.9299,
"step": 447
},
{
"epoch": 0.9511677282377919,
"grad_norm": 0.5942848027014522,
"learning_rate": 7.096059495149855e-08,
"loss": 0.9471,
"step": 448
},
{
"epoch": 0.9532908704883227,
"grad_norm": 0.5872025609513564,
"learning_rate": 6.518302263659737e-08,
"loss": 0.9717,
"step": 449
},
{
"epoch": 0.9554140127388535,
"grad_norm": 0.6294814684278404,
"learning_rate": 5.964920556233767e-08,
"loss": 0.9498,
"step": 450
},
{
"epoch": 0.9575371549893843,
"grad_norm": 0.6148892781592927,
"learning_rate": 5.435941707156389e-08,
"loss": 0.955,
"step": 451
},
{
"epoch": 0.9596602972399151,
"grad_norm": 0.6509786720651843,
"learning_rate": 4.931391845333089e-08,
"loss": 0.9547,
"step": 452
},
{
"epoch": 0.9617834394904459,
"grad_norm": 0.6501016558558563,
"learning_rate": 4.451295892999863e-08,
"loss": 0.9612,
"step": 453
},
{
"epoch": 0.9639065817409767,
"grad_norm": 0.5677280542511052,
"learning_rate": 3.99567756449204e-08,
"loss": 0.9536,
"step": 454
},
{
"epoch": 0.9660297239915074,
"grad_norm": 0.6270377760292427,
"learning_rate": 3.5645593650728284e-08,
"loss": 0.9415,
"step": 455
},
{
"epoch": 0.9681528662420382,
"grad_norm": 0.5936095931176947,
"learning_rate": 3.157962589821872e-08,
"loss": 0.9123,
"step": 456
},
{
"epoch": 0.970276008492569,
"grad_norm": 0.5753275305003565,
"learning_rate": 2.77590732258326e-08,
"loss": 0.8992,
"step": 457
},
{
"epoch": 0.9723991507430998,
"grad_norm": 0.6161560451020908,
"learning_rate": 2.4184124349734828e-08,
"loss": 0.9011,
"step": 458
},
{
"epoch": 0.9745222929936306,
"grad_norm": 0.5986199087825571,
"learning_rate": 2.085495585449404e-08,
"loss": 0.9366,
"step": 459
},
{
"epoch": 0.9766454352441614,
"grad_norm": 0.6144363258091632,
"learning_rate": 1.7771732184357905e-08,
"loss": 0.9702,
"step": 460
},
{
"epoch": 0.9787685774946921,
"grad_norm": 0.6159493368932808,
"learning_rate": 1.4934605635132383e-08,
"loss": 0.9388,
"step": 461
},
{
"epoch": 0.9808917197452229,
"grad_norm": 0.6326721912348251,
"learning_rate": 1.2343716346657209e-08,
"loss": 0.9672,
"step": 462
},
{
"epoch": 0.9830148619957537,
"grad_norm": 0.6286300913967682,
"learning_rate": 9.999192295886973e-09,
"loss": 0.8955,
"step": 463
},
{
"epoch": 0.9851380042462845,
"grad_norm": 0.6469478397795659,
"learning_rate": 7.90114929056618e-09,
"loss": 0.969,
"step": 464
},
{
"epoch": 0.9872611464968153,
"grad_norm": 0.6109488636303972,
"learning_rate": 6.04969096350938e-09,
"loss": 1.0043,
"step": 465
},
{
"epoch": 0.9893842887473461,
"grad_norm": 0.672241938807974,
"learning_rate": 4.444908767484712e-09,
"loss": 0.9447,
"step": 466
},
{
"epoch": 0.9915074309978769,
"grad_norm": 0.6327761576127415,
"learning_rate": 3.0868819706947327e-09,
"loss": 0.9163,
"step": 467
},
{
"epoch": 0.9936305732484076,
"grad_norm": 0.5827977793629625,
"learning_rate": 1.9756776528601085e-09,
"loss": 0.93,
"step": 468
},
{
"epoch": 0.9957537154989384,
"grad_norm": 0.5656174378230285,
"learning_rate": 1.111350701909486e-09,
"loss": 0.9092,
"step": 469
},
{
"epoch": 0.9978768577494692,
"grad_norm": 0.6248670179846091,
"learning_rate": 4.939438112638861e-10,
"loss": 0.9335,
"step": 470
},
{
"epoch": 1.0,
"grad_norm": 0.584849627731381,
"learning_rate": 1.2348747773172075e-10,
"loss": 0.9356,
"step": 471
}
],
"logging_steps": 1,
"max_steps": 471,
"num_input_tokens_seen": 0,
"num_train_epochs": 1,
"save_steps": 157,
"stateful_callbacks": {
"TrainerControl": {
"args": {
"should_epoch_stop": false,
"should_evaluate": false,
"should_log": false,
"should_save": true,
"should_training_stop": true
},
"attributes": {}
}
},
"total_flos": 70004199849984.0,
"train_batch_size": 2,
"trial_name": null,
"trial_params": null
}