gpt2-sft-port / trainer_state.json
pkarypis's picture
Model save
757b94e verified
raw
history blame
146 kB
{
"best_metric": null,
"best_model_checkpoint": null,
"epoch": 2.0,
"eval_steps": 500,
"global_step": 4516,
"is_hyper_param_search": false,
"is_local_process_zero": true,
"is_world_process_zero": true,
"log_history": [
{
"epoch": 0.0,
"grad_norm": 5.9302967105245665,
"learning_rate": 4.424778761061947e-08,
"loss": 3.0935,
"step": 1
},
{
"epoch": 0.0,
"grad_norm": 7.305779726596656,
"learning_rate": 2.2123893805309737e-07,
"loss": 3.1375,
"step": 5
},
{
"epoch": 0.0,
"grad_norm": 6.427298566220218,
"learning_rate": 4.4247787610619474e-07,
"loss": 3.1374,
"step": 10
},
{
"epoch": 0.01,
"grad_norm": 6.687251930634824,
"learning_rate": 6.637168141592922e-07,
"loss": 3.1479,
"step": 15
},
{
"epoch": 0.01,
"grad_norm": 7.0233329966869835,
"learning_rate": 8.849557522123895e-07,
"loss": 3.1188,
"step": 20
},
{
"epoch": 0.01,
"grad_norm": 6.172490978104657,
"learning_rate": 1.106194690265487e-06,
"loss": 3.105,
"step": 25
},
{
"epoch": 0.01,
"grad_norm": 6.460858990532256,
"learning_rate": 1.3274336283185843e-06,
"loss": 3.0866,
"step": 30
},
{
"epoch": 0.02,
"grad_norm": 5.767457611472164,
"learning_rate": 1.5486725663716816e-06,
"loss": 3.1042,
"step": 35
},
{
"epoch": 0.02,
"grad_norm": 5.378546026321239,
"learning_rate": 1.769911504424779e-06,
"loss": 3.0784,
"step": 40
},
{
"epoch": 0.02,
"grad_norm": 4.894944575178905,
"learning_rate": 1.991150442477876e-06,
"loss": 3.0142,
"step": 45
},
{
"epoch": 0.02,
"grad_norm": 4.703701387840793,
"learning_rate": 2.212389380530974e-06,
"loss": 3.0014,
"step": 50
},
{
"epoch": 0.02,
"grad_norm": 4.756160505527388,
"learning_rate": 2.433628318584071e-06,
"loss": 2.9575,
"step": 55
},
{
"epoch": 0.03,
"grad_norm": 3.2723458689011595,
"learning_rate": 2.6548672566371687e-06,
"loss": 2.8693,
"step": 60
},
{
"epoch": 0.03,
"grad_norm": 2.8756470842968977,
"learning_rate": 2.876106194690266e-06,
"loss": 2.8033,
"step": 65
},
{
"epoch": 0.03,
"grad_norm": 2.8122729226412173,
"learning_rate": 3.097345132743363e-06,
"loss": 2.8074,
"step": 70
},
{
"epoch": 0.03,
"grad_norm": 2.832709793973122,
"learning_rate": 3.3185840707964607e-06,
"loss": 2.7999,
"step": 75
},
{
"epoch": 0.04,
"grad_norm": 2.341031342704307,
"learning_rate": 3.539823008849558e-06,
"loss": 2.7517,
"step": 80
},
{
"epoch": 0.04,
"grad_norm": 1.8508187477955544,
"learning_rate": 3.7610619469026547e-06,
"loss": 2.704,
"step": 85
},
{
"epoch": 0.04,
"grad_norm": 1.8655311029300103,
"learning_rate": 3.982300884955752e-06,
"loss": 2.7002,
"step": 90
},
{
"epoch": 0.04,
"grad_norm": 1.6568183071451184,
"learning_rate": 4.20353982300885e-06,
"loss": 2.7038,
"step": 95
},
{
"epoch": 0.04,
"grad_norm": 1.5391772419575054,
"learning_rate": 4.424778761061948e-06,
"loss": 2.6747,
"step": 100
},
{
"epoch": 0.05,
"grad_norm": 1.4790975313018557,
"learning_rate": 4.646017699115045e-06,
"loss": 2.6395,
"step": 105
},
{
"epoch": 0.05,
"grad_norm": 1.3348163527490156,
"learning_rate": 4.867256637168142e-06,
"loss": 2.5909,
"step": 110
},
{
"epoch": 0.05,
"grad_norm": 1.4498623041693914,
"learning_rate": 5.088495575221239e-06,
"loss": 2.576,
"step": 115
},
{
"epoch": 0.05,
"grad_norm": 1.2968840174935734,
"learning_rate": 5.309734513274337e-06,
"loss": 2.5948,
"step": 120
},
{
"epoch": 0.06,
"grad_norm": 1.2121548854159094,
"learning_rate": 5.530973451327434e-06,
"loss": 2.5535,
"step": 125
},
{
"epoch": 0.06,
"grad_norm": 1.2268841707077334,
"learning_rate": 5.752212389380532e-06,
"loss": 2.5465,
"step": 130
},
{
"epoch": 0.06,
"grad_norm": 1.0602113758593934,
"learning_rate": 5.973451327433629e-06,
"loss": 2.521,
"step": 135
},
{
"epoch": 0.06,
"grad_norm": 1.2016563647195058,
"learning_rate": 6.194690265486726e-06,
"loss": 2.5316,
"step": 140
},
{
"epoch": 0.06,
"grad_norm": 1.03754488496934,
"learning_rate": 6.415929203539823e-06,
"loss": 2.5198,
"step": 145
},
{
"epoch": 0.07,
"grad_norm": 0.9732577868645889,
"learning_rate": 6.6371681415929215e-06,
"loss": 2.5058,
"step": 150
},
{
"epoch": 0.07,
"grad_norm": 0.9256037173077092,
"learning_rate": 6.858407079646018e-06,
"loss": 2.4892,
"step": 155
},
{
"epoch": 0.07,
"grad_norm": 1.0094753274438697,
"learning_rate": 7.079646017699116e-06,
"loss": 2.4458,
"step": 160
},
{
"epoch": 0.07,
"grad_norm": 0.9707942710408031,
"learning_rate": 7.300884955752213e-06,
"loss": 2.4591,
"step": 165
},
{
"epoch": 0.08,
"grad_norm": 0.9063440030760096,
"learning_rate": 7.5221238938053095e-06,
"loss": 2.4617,
"step": 170
},
{
"epoch": 0.08,
"grad_norm": 0.8163610990239183,
"learning_rate": 7.743362831858407e-06,
"loss": 2.4446,
"step": 175
},
{
"epoch": 0.08,
"grad_norm": 0.7963916852493695,
"learning_rate": 7.964601769911505e-06,
"loss": 2.4411,
"step": 180
},
{
"epoch": 0.08,
"grad_norm": 0.8345527529986408,
"learning_rate": 8.185840707964603e-06,
"loss": 2.4079,
"step": 185
},
{
"epoch": 0.08,
"grad_norm": 0.8382354358581183,
"learning_rate": 8.4070796460177e-06,
"loss": 2.3985,
"step": 190
},
{
"epoch": 0.09,
"grad_norm": 0.7309733141651356,
"learning_rate": 8.628318584070797e-06,
"loss": 2.4109,
"step": 195
},
{
"epoch": 0.09,
"grad_norm": 0.7498808709324232,
"learning_rate": 8.849557522123895e-06,
"loss": 2.3921,
"step": 200
},
{
"epoch": 0.09,
"grad_norm": 0.7366091357132286,
"learning_rate": 9.070796460176992e-06,
"loss": 2.4085,
"step": 205
},
{
"epoch": 0.09,
"grad_norm": 0.6856312033309606,
"learning_rate": 9.29203539823009e-06,
"loss": 2.3502,
"step": 210
},
{
"epoch": 0.1,
"grad_norm": 0.6568988193036476,
"learning_rate": 9.513274336283188e-06,
"loss": 2.3686,
"step": 215
},
{
"epoch": 0.1,
"grad_norm": 0.7438738093990327,
"learning_rate": 9.734513274336284e-06,
"loss": 2.3479,
"step": 220
},
{
"epoch": 0.1,
"grad_norm": 0.7679624092732862,
"learning_rate": 9.95575221238938e-06,
"loss": 2.3609,
"step": 225
},
{
"epoch": 0.1,
"grad_norm": 0.681662469567693,
"learning_rate": 1.0176991150442479e-05,
"loss": 2.3764,
"step": 230
},
{
"epoch": 0.1,
"grad_norm": 0.6935069551930801,
"learning_rate": 1.0398230088495575e-05,
"loss": 2.3426,
"step": 235
},
{
"epoch": 0.11,
"grad_norm": 0.6584555790452863,
"learning_rate": 1.0619469026548675e-05,
"loss": 2.3571,
"step": 240
},
{
"epoch": 0.11,
"grad_norm": 0.6565728726289145,
"learning_rate": 1.0840707964601771e-05,
"loss": 2.3274,
"step": 245
},
{
"epoch": 0.11,
"grad_norm": 0.6654454114545979,
"learning_rate": 1.1061946902654867e-05,
"loss": 2.3728,
"step": 250
},
{
"epoch": 0.11,
"grad_norm": 0.7201783946163706,
"learning_rate": 1.1283185840707967e-05,
"loss": 2.3253,
"step": 255
},
{
"epoch": 0.12,
"grad_norm": 0.6572739841569043,
"learning_rate": 1.1504424778761064e-05,
"loss": 2.3483,
"step": 260
},
{
"epoch": 0.12,
"grad_norm": 0.6466947511887401,
"learning_rate": 1.172566371681416e-05,
"loss": 2.3386,
"step": 265
},
{
"epoch": 0.12,
"grad_norm": 0.6495008736871682,
"learning_rate": 1.1946902654867258e-05,
"loss": 2.3333,
"step": 270
},
{
"epoch": 0.12,
"grad_norm": 0.7273542958739748,
"learning_rate": 1.2168141592920354e-05,
"loss": 2.3242,
"step": 275
},
{
"epoch": 0.12,
"grad_norm": 0.7619726163224912,
"learning_rate": 1.2389380530973452e-05,
"loss": 2.3029,
"step": 280
},
{
"epoch": 0.13,
"grad_norm": 0.6238656932775041,
"learning_rate": 1.261061946902655e-05,
"loss": 2.3114,
"step": 285
},
{
"epoch": 0.13,
"grad_norm": 0.73160147791502,
"learning_rate": 1.2831858407079647e-05,
"loss": 2.3265,
"step": 290
},
{
"epoch": 0.13,
"grad_norm": 0.6518704683290121,
"learning_rate": 1.3053097345132743e-05,
"loss": 2.3083,
"step": 295
},
{
"epoch": 0.13,
"grad_norm": 0.6720312554501874,
"learning_rate": 1.3274336283185843e-05,
"loss": 2.3307,
"step": 300
},
{
"epoch": 0.14,
"grad_norm": 0.612726939147247,
"learning_rate": 1.349557522123894e-05,
"loss": 2.3077,
"step": 305
},
{
"epoch": 0.14,
"grad_norm": 0.7020134439508396,
"learning_rate": 1.3716814159292036e-05,
"loss": 2.3018,
"step": 310
},
{
"epoch": 0.14,
"grad_norm": 0.6970071128988303,
"learning_rate": 1.3938053097345134e-05,
"loss": 2.2979,
"step": 315
},
{
"epoch": 0.14,
"grad_norm": 0.650935356346603,
"learning_rate": 1.4159292035398232e-05,
"loss": 2.2785,
"step": 320
},
{
"epoch": 0.14,
"grad_norm": 0.6181473844614722,
"learning_rate": 1.4380530973451328e-05,
"loss": 2.2816,
"step": 325
},
{
"epoch": 0.15,
"grad_norm": 0.6745455450076417,
"learning_rate": 1.4601769911504426e-05,
"loss": 2.2708,
"step": 330
},
{
"epoch": 0.15,
"grad_norm": 0.6091358189180341,
"learning_rate": 1.4823008849557523e-05,
"loss": 2.2486,
"step": 335
},
{
"epoch": 0.15,
"grad_norm": 0.6586949292605638,
"learning_rate": 1.5044247787610619e-05,
"loss": 2.2581,
"step": 340
},
{
"epoch": 0.15,
"grad_norm": 0.6104848129446266,
"learning_rate": 1.5265486725663717e-05,
"loss": 2.2852,
"step": 345
},
{
"epoch": 0.16,
"grad_norm": 0.7566033459169071,
"learning_rate": 1.5486725663716813e-05,
"loss": 2.2963,
"step": 350
},
{
"epoch": 0.16,
"grad_norm": 0.6691634764911771,
"learning_rate": 1.5707964601769913e-05,
"loss": 2.3026,
"step": 355
},
{
"epoch": 0.16,
"grad_norm": 0.6135980809816669,
"learning_rate": 1.592920353982301e-05,
"loss": 2.3014,
"step": 360
},
{
"epoch": 0.16,
"grad_norm": 0.5783542858805288,
"learning_rate": 1.6150442477876106e-05,
"loss": 2.2925,
"step": 365
},
{
"epoch": 0.16,
"grad_norm": 0.6428582491133481,
"learning_rate": 1.6371681415929206e-05,
"loss": 2.3076,
"step": 370
},
{
"epoch": 0.17,
"grad_norm": 0.5766461871952834,
"learning_rate": 1.6592920353982302e-05,
"loss": 2.2738,
"step": 375
},
{
"epoch": 0.17,
"grad_norm": 0.673912365153494,
"learning_rate": 1.68141592920354e-05,
"loss": 2.2745,
"step": 380
},
{
"epoch": 0.17,
"grad_norm": 0.6452557939005914,
"learning_rate": 1.7035398230088498e-05,
"loss": 2.2566,
"step": 385
},
{
"epoch": 0.17,
"grad_norm": 0.6304903547787424,
"learning_rate": 1.7256637168141594e-05,
"loss": 2.2899,
"step": 390
},
{
"epoch": 0.17,
"grad_norm": 0.6245152937180848,
"learning_rate": 1.747787610619469e-05,
"loss": 2.2648,
"step": 395
},
{
"epoch": 0.18,
"grad_norm": 0.6059507946790185,
"learning_rate": 1.769911504424779e-05,
"loss": 2.2776,
"step": 400
},
{
"epoch": 0.18,
"grad_norm": 0.6080298884037114,
"learning_rate": 1.7920353982300887e-05,
"loss": 2.2638,
"step": 405
},
{
"epoch": 0.18,
"grad_norm": 0.6874992312190951,
"learning_rate": 1.8141592920353983e-05,
"loss": 2.2793,
"step": 410
},
{
"epoch": 0.18,
"grad_norm": 0.6407606610375601,
"learning_rate": 1.8362831858407083e-05,
"loss": 2.2792,
"step": 415
},
{
"epoch": 0.19,
"grad_norm": 0.6527060043840156,
"learning_rate": 1.858407079646018e-05,
"loss": 2.2583,
"step": 420
},
{
"epoch": 0.19,
"grad_norm": 0.6215713645435603,
"learning_rate": 1.8805309734513276e-05,
"loss": 2.25,
"step": 425
},
{
"epoch": 0.19,
"grad_norm": 0.6545273421699539,
"learning_rate": 1.9026548672566376e-05,
"loss": 2.2336,
"step": 430
},
{
"epoch": 0.19,
"grad_norm": 0.635355881256667,
"learning_rate": 1.9247787610619472e-05,
"loss": 2.2512,
"step": 435
},
{
"epoch": 0.19,
"grad_norm": 0.603683700236946,
"learning_rate": 1.946902654867257e-05,
"loss": 2.2465,
"step": 440
},
{
"epoch": 0.2,
"grad_norm": 0.6317792101148252,
"learning_rate": 1.9690265486725665e-05,
"loss": 2.241,
"step": 445
},
{
"epoch": 0.2,
"grad_norm": 0.6460480846393403,
"learning_rate": 1.991150442477876e-05,
"loss": 2.2354,
"step": 450
},
{
"epoch": 0.2,
"grad_norm": 0.6370059094397664,
"learning_rate": 1.9999973109141592e-05,
"loss": 2.2479,
"step": 455
},
{
"epoch": 0.2,
"grad_norm": 0.6096867001987225,
"learning_rate": 1.9999808776641724e-05,
"loss": 2.248,
"step": 460
},
{
"epoch": 0.21,
"grad_norm": 0.656813030007061,
"learning_rate": 1.9999495053459817e-05,
"loss": 2.2447,
"step": 465
},
{
"epoch": 0.21,
"grad_norm": 0.6082215741015847,
"learning_rate": 1.999903194428269e-05,
"loss": 2.2385,
"step": 470
},
{
"epoch": 0.21,
"grad_norm": 0.6460027196916125,
"learning_rate": 1.99984194560289e-05,
"loss": 2.2643,
"step": 475
},
{
"epoch": 0.21,
"grad_norm": 0.6585288760609864,
"learning_rate": 1.999765759784862e-05,
"loss": 2.2333,
"step": 480
},
{
"epoch": 0.21,
"grad_norm": 0.6731268216505186,
"learning_rate": 1.9996746381123522e-05,
"loss": 2.2227,
"step": 485
},
{
"epoch": 0.22,
"grad_norm": 0.5886714983751958,
"learning_rate": 1.9995685819466593e-05,
"loss": 2.2416,
"step": 490
},
{
"epoch": 0.22,
"grad_norm": 0.6397149952247406,
"learning_rate": 1.999447592872194e-05,
"loss": 2.2344,
"step": 495
},
{
"epoch": 0.22,
"grad_norm": 0.6741928937540266,
"learning_rate": 1.9993116726964554e-05,
"loss": 2.2323,
"step": 500
},
{
"epoch": 0.22,
"grad_norm": 0.6033214250699673,
"learning_rate": 1.9991608234500023e-05,
"loss": 2.2146,
"step": 505
},
{
"epoch": 0.23,
"grad_norm": 0.6007334713273825,
"learning_rate": 1.9989950473864254e-05,
"loss": 2.2536,
"step": 510
},
{
"epoch": 0.23,
"grad_norm": 0.5823176838115881,
"learning_rate": 1.998814346982312e-05,
"loss": 2.2377,
"step": 515
},
{
"epoch": 0.23,
"grad_norm": 0.5938190318245832,
"learning_rate": 1.998618724937209e-05,
"loss": 2.2105,
"step": 520
},
{
"epoch": 0.23,
"grad_norm": 0.5823007300508676,
"learning_rate": 1.998408184173584e-05,
"loss": 2.2325,
"step": 525
},
{
"epoch": 0.23,
"grad_norm": 0.6818321102920244,
"learning_rate": 1.9981827278367796e-05,
"loss": 2.2351,
"step": 530
},
{
"epoch": 0.24,
"grad_norm": 0.6146088694935211,
"learning_rate": 1.9979423592949677e-05,
"loss": 2.218,
"step": 535
},
{
"epoch": 0.24,
"grad_norm": 0.6384715124393877,
"learning_rate": 1.997687082139099e-05,
"loss": 2.1971,
"step": 540
},
{
"epoch": 0.24,
"grad_norm": 0.6323313222418419,
"learning_rate": 1.9974169001828495e-05,
"loss": 2.2321,
"step": 545
},
{
"epoch": 0.24,
"grad_norm": 0.6358980691112853,
"learning_rate": 1.9971318174625633e-05,
"loss": 2.1898,
"step": 550
},
{
"epoch": 0.25,
"grad_norm": 0.6566561266360453,
"learning_rate": 1.9968318382371912e-05,
"loss": 2.198,
"step": 555
},
{
"epoch": 0.25,
"grad_norm": 0.6712425478211881,
"learning_rate": 1.9965169669882293e-05,
"loss": 2.1999,
"step": 560
},
{
"epoch": 0.25,
"grad_norm": 0.5910209377681687,
"learning_rate": 1.9961872084196514e-05,
"loss": 2.1923,
"step": 565
},
{
"epoch": 0.25,
"grad_norm": 0.6723896281713607,
"learning_rate": 1.9958425674578364e-05,
"loss": 2.2195,
"step": 570
},
{
"epoch": 0.25,
"grad_norm": 0.7088328095561311,
"learning_rate": 1.9954830492514984e-05,
"loss": 2.24,
"step": 575
},
{
"epoch": 0.26,
"grad_norm": 0.6619093017943884,
"learning_rate": 1.995108659171607e-05,
"loss": 2.2219,
"step": 580
},
{
"epoch": 0.26,
"grad_norm": 0.6126782889076011,
"learning_rate": 1.9947194028113072e-05,
"loss": 2.2008,
"step": 585
},
{
"epoch": 0.26,
"grad_norm": 0.6487754552128511,
"learning_rate": 1.9943152859858386e-05,
"loss": 2.1934,
"step": 590
},
{
"epoch": 0.26,
"grad_norm": 0.635765822618984,
"learning_rate": 1.993896314732445e-05,
"loss": 2.2234,
"step": 595
},
{
"epoch": 0.27,
"grad_norm": 0.617804258883185,
"learning_rate": 1.9934624953102858e-05,
"loss": 2.2019,
"step": 600
},
{
"epoch": 0.27,
"grad_norm": 0.5957065495027865,
"learning_rate": 1.993013834200344e-05,
"loss": 2.2067,
"step": 605
},
{
"epoch": 0.27,
"grad_norm": 0.5774308761603595,
"learning_rate": 1.9925503381053258e-05,
"loss": 2.2027,
"step": 610
},
{
"epoch": 0.27,
"grad_norm": 0.6095916580233893,
"learning_rate": 1.9920720139495632e-05,
"loss": 2.2157,
"step": 615
},
{
"epoch": 0.27,
"grad_norm": 0.6416588744818001,
"learning_rate": 1.9915788688789107e-05,
"loss": 2.2121,
"step": 620
},
{
"epoch": 0.28,
"grad_norm": 0.6920840037124111,
"learning_rate": 1.9910709102606373e-05,
"loss": 2.2127,
"step": 625
},
{
"epoch": 0.28,
"grad_norm": 0.6254080862305184,
"learning_rate": 1.990548145683315e-05,
"loss": 2.1888,
"step": 630
},
{
"epoch": 0.28,
"grad_norm": 0.6087010509698974,
"learning_rate": 1.9900105829567107e-05,
"loss": 2.1966,
"step": 635
},
{
"epoch": 0.28,
"grad_norm": 0.5994498166043192,
"learning_rate": 1.9894582301116633e-05,
"loss": 2.2013,
"step": 640
},
{
"epoch": 0.29,
"grad_norm": 0.5760812712064796,
"learning_rate": 1.988891095399967e-05,
"loss": 2.1973,
"step": 645
},
{
"epoch": 0.29,
"grad_norm": 0.5583773669670165,
"learning_rate": 1.9883091872942484e-05,
"loss": 2.1958,
"step": 650
},
{
"epoch": 0.29,
"grad_norm": 0.5775094919923649,
"learning_rate": 1.9877125144878387e-05,
"loss": 2.1927,
"step": 655
},
{
"epoch": 0.29,
"grad_norm": 0.6234933136336159,
"learning_rate": 1.9871010858946443e-05,
"loss": 2.1995,
"step": 660
},
{
"epoch": 0.29,
"grad_norm": 0.600439198598081,
"learning_rate": 1.9864749106490128e-05,
"loss": 2.1717,
"step": 665
},
{
"epoch": 0.3,
"grad_norm": 0.6492321115547286,
"learning_rate": 1.985833998105598e-05,
"loss": 2.1831,
"step": 670
},
{
"epoch": 0.3,
"grad_norm": 0.5940344782086231,
"learning_rate": 1.9851783578392198e-05,
"loss": 2.1655,
"step": 675
},
{
"epoch": 0.3,
"grad_norm": 0.607805681100338,
"learning_rate": 1.984507999644719e-05,
"loss": 2.2024,
"step": 680
},
{
"epoch": 0.3,
"grad_norm": 0.6037241662861466,
"learning_rate": 1.9838229335368145e-05,
"loss": 2.1866,
"step": 685
},
{
"epoch": 0.31,
"grad_norm": 0.6879418746347693,
"learning_rate": 1.9831231697499515e-05,
"loss": 2.1905,
"step": 690
},
{
"epoch": 0.31,
"grad_norm": 0.6576572500858463,
"learning_rate": 1.9824087187381486e-05,
"loss": 2.229,
"step": 695
},
{
"epoch": 0.31,
"grad_norm": 0.6108811627403412,
"learning_rate": 1.9816795911748422e-05,
"loss": 2.1914,
"step": 700
},
{
"epoch": 0.31,
"grad_norm": 0.633230001706751,
"learning_rate": 1.9809357979527274e-05,
"loss": 2.1996,
"step": 705
},
{
"epoch": 0.31,
"grad_norm": 0.629052235839188,
"learning_rate": 1.980177350183594e-05,
"loss": 2.1871,
"step": 710
},
{
"epoch": 0.32,
"grad_norm": 0.6058148638787542,
"learning_rate": 1.9794042591981615e-05,
"loss": 2.1985,
"step": 715
},
{
"epoch": 0.32,
"grad_norm": 0.5777139620091096,
"learning_rate": 1.9786165365459102e-05,
"loss": 2.1976,
"step": 720
},
{
"epoch": 0.32,
"grad_norm": 0.6060414204553223,
"learning_rate": 1.977814193994907e-05,
"loss": 2.1817,
"step": 725
},
{
"epoch": 0.32,
"grad_norm": 0.5942177905374783,
"learning_rate": 1.976997243531632e-05,
"loss": 2.1791,
"step": 730
},
{
"epoch": 0.33,
"grad_norm": 0.6096899630406291,
"learning_rate": 1.976165697360796e-05,
"loss": 2.1914,
"step": 735
},
{
"epoch": 0.33,
"grad_norm": 0.5911266066502062,
"learning_rate": 1.975319567905163e-05,
"loss": 2.1777,
"step": 740
},
{
"epoch": 0.33,
"grad_norm": 0.5924780064544021,
"learning_rate": 1.9744588678053592e-05,
"loss": 2.1854,
"step": 745
},
{
"epoch": 0.33,
"grad_norm": 0.6106853528904048,
"learning_rate": 1.9735836099196882e-05,
"loss": 2.1848,
"step": 750
},
{
"epoch": 0.33,
"grad_norm": 0.6070900980484122,
"learning_rate": 1.972693807323938e-05,
"loss": 2.1741,
"step": 755
},
{
"epoch": 0.34,
"grad_norm": 0.580301202464717,
"learning_rate": 1.971789473311184e-05,
"loss": 2.1786,
"step": 760
},
{
"epoch": 0.34,
"grad_norm": 0.7069385951588528,
"learning_rate": 1.9708706213915917e-05,
"loss": 2.1804,
"step": 765
},
{
"epoch": 0.34,
"grad_norm": 0.6374517101396402,
"learning_rate": 1.9699372652922154e-05,
"loss": 2.2118,
"step": 770
},
{
"epoch": 0.34,
"grad_norm": 0.6411072334997316,
"learning_rate": 1.968989418956792e-05,
"loss": 2.2087,
"step": 775
},
{
"epoch": 0.35,
"grad_norm": 0.6293125724155817,
"learning_rate": 1.9680270965455343e-05,
"loss": 2.1823,
"step": 780
},
{
"epoch": 0.35,
"grad_norm": 0.6299946058211658,
"learning_rate": 1.967050312434916e-05,
"loss": 2.1961,
"step": 785
},
{
"epoch": 0.35,
"grad_norm": 0.5980393367124074,
"learning_rate": 1.966059081217461e-05,
"loss": 2.1674,
"step": 790
},
{
"epoch": 0.35,
"grad_norm": 0.5904902647161646,
"learning_rate": 1.9650534177015233e-05,
"loss": 2.1945,
"step": 795
},
{
"epoch": 0.35,
"grad_norm": 0.617066596158311,
"learning_rate": 1.9640333369110662e-05,
"loss": 2.1648,
"step": 800
},
{
"epoch": 0.36,
"grad_norm": 0.5873598785066696,
"learning_rate": 1.9629988540854373e-05,
"loss": 2.1747,
"step": 805
},
{
"epoch": 0.36,
"grad_norm": 0.6769394638444185,
"learning_rate": 1.9619499846791426e-05,
"loss": 2.2016,
"step": 810
},
{
"epoch": 0.36,
"grad_norm": 0.649391086183006,
"learning_rate": 1.960886744361612e-05,
"loss": 2.1655,
"step": 815
},
{
"epoch": 0.36,
"grad_norm": 0.6260155698476166,
"learning_rate": 1.9598091490169696e-05,
"loss": 2.1665,
"step": 820
},
{
"epoch": 0.37,
"grad_norm": 0.6025903584553611,
"learning_rate": 1.958717214743793e-05,
"loss": 2.1787,
"step": 825
},
{
"epoch": 0.37,
"grad_norm": 0.6479435626863317,
"learning_rate": 1.9576109578548757e-05,
"loss": 2.1755,
"step": 830
},
{
"epoch": 0.37,
"grad_norm": 0.6001308047773649,
"learning_rate": 1.95649039487698e-05,
"loss": 2.1597,
"step": 835
},
{
"epoch": 0.37,
"grad_norm": 0.6121729500468993,
"learning_rate": 1.9553555425505933e-05,
"loss": 2.1377,
"step": 840
},
{
"epoch": 0.37,
"grad_norm": 0.6293518963498421,
"learning_rate": 1.9542064178296755e-05,
"loss": 2.1779,
"step": 845
},
{
"epoch": 0.38,
"grad_norm": 0.5786233151451842,
"learning_rate": 1.953043037881408e-05,
"loss": 2.1717,
"step": 850
},
{
"epoch": 0.38,
"grad_norm": 0.6376918407619234,
"learning_rate": 1.9518654200859356e-05,
"loss": 2.2029,
"step": 855
},
{
"epoch": 0.38,
"grad_norm": 0.6883166190013751,
"learning_rate": 1.9506735820361065e-05,
"loss": 2.1664,
"step": 860
},
{
"epoch": 0.38,
"grad_norm": 0.6763591542046004,
"learning_rate": 1.9494675415372123e-05,
"loss": 2.1375,
"step": 865
},
{
"epoch": 0.39,
"grad_norm": 0.6131017330885689,
"learning_rate": 1.9482473166067177e-05,
"loss": 2.2097,
"step": 870
},
{
"epoch": 0.39,
"grad_norm": 0.5928993924259061,
"learning_rate": 1.9470129254739952e-05,
"loss": 2.1817,
"step": 875
},
{
"epoch": 0.39,
"grad_norm": 0.6042994297919558,
"learning_rate": 1.945764386580051e-05,
"loss": 2.1827,
"step": 880
},
{
"epoch": 0.39,
"grad_norm": 0.619514725162525,
"learning_rate": 1.9445017185772493e-05,
"loss": 2.1687,
"step": 885
},
{
"epoch": 0.39,
"grad_norm": 0.6219430648039243,
"learning_rate": 1.9432249403290337e-05,
"loss": 2.1888,
"step": 890
},
{
"epoch": 0.4,
"grad_norm": 0.6212259457376662,
"learning_rate": 1.941934070909647e-05,
"loss": 2.1607,
"step": 895
},
{
"epoch": 0.4,
"grad_norm": 0.6092603199508801,
"learning_rate": 1.940629129603844e-05,
"loss": 2.1697,
"step": 900
},
{
"epoch": 0.4,
"grad_norm": 0.6679671465632506,
"learning_rate": 1.9393101359066047e-05,
"loss": 2.1714,
"step": 905
},
{
"epoch": 0.4,
"grad_norm": 0.6036880757024928,
"learning_rate": 1.9379771095228426e-05,
"loss": 2.1476,
"step": 910
},
{
"epoch": 0.41,
"grad_norm": 0.5844666301272554,
"learning_rate": 1.9366300703671104e-05,
"loss": 2.1676,
"step": 915
},
{
"epoch": 0.41,
"grad_norm": 0.6070951741547145,
"learning_rate": 1.935269038563303e-05,
"loss": 2.1469,
"step": 920
},
{
"epoch": 0.41,
"grad_norm": 0.6187770141760972,
"learning_rate": 1.9338940344443564e-05,
"loss": 2.1884,
"step": 925
},
{
"epoch": 0.41,
"grad_norm": 0.5753743321085817,
"learning_rate": 1.9325050785519438e-05,
"loss": 2.1652,
"step": 930
},
{
"epoch": 0.41,
"grad_norm": 0.560773330856025,
"learning_rate": 1.9311021916361675e-05,
"loss": 2.1603,
"step": 935
},
{
"epoch": 0.42,
"grad_norm": 0.6129991062970265,
"learning_rate": 1.9296853946552532e-05,
"loss": 2.1566,
"step": 940
},
{
"epoch": 0.42,
"grad_norm": 0.6051351557467172,
"learning_rate": 1.9282547087752314e-05,
"loss": 2.1827,
"step": 945
},
{
"epoch": 0.42,
"grad_norm": 0.7791426311648226,
"learning_rate": 1.9268101553696255e-05,
"loss": 2.1464,
"step": 950
},
{
"epoch": 0.42,
"grad_norm": 0.5750460561337694,
"learning_rate": 1.9253517560191292e-05,
"loss": 2.1649,
"step": 955
},
{
"epoch": 0.43,
"grad_norm": 0.6203389682475162,
"learning_rate": 1.9238795325112867e-05,
"loss": 2.1461,
"step": 960
},
{
"epoch": 0.43,
"grad_norm": 0.6595989849245582,
"learning_rate": 1.9223935068401668e-05,
"loss": 2.1623,
"step": 965
},
{
"epoch": 0.43,
"grad_norm": 0.592876478084952,
"learning_rate": 1.9208937012060316e-05,
"loss": 2.1582,
"step": 970
},
{
"epoch": 0.43,
"grad_norm": 0.5747650650373775,
"learning_rate": 1.9193801380150093e-05,
"loss": 2.1575,
"step": 975
},
{
"epoch": 0.43,
"grad_norm": 0.5938831018718109,
"learning_rate": 1.9178528398787553e-05,
"loss": 2.1806,
"step": 980
},
{
"epoch": 0.44,
"grad_norm": 0.598622840584413,
"learning_rate": 1.9163118296141172e-05,
"loss": 2.1476,
"step": 985
},
{
"epoch": 0.44,
"grad_norm": 0.6143064756465099,
"learning_rate": 1.9147571302427927e-05,
"loss": 2.166,
"step": 990
},
{
"epoch": 0.44,
"grad_norm": 0.5860879849217064,
"learning_rate": 1.913188764990986e-05,
"loss": 2.1389,
"step": 995
},
{
"epoch": 0.44,
"grad_norm": 0.6089076085399399,
"learning_rate": 1.9116067572890603e-05,
"loss": 2.1516,
"step": 1000
},
{
"epoch": 0.45,
"grad_norm": 0.6209536243968873,
"learning_rate": 1.9100111307711888e-05,
"loss": 2.1326,
"step": 1005
},
{
"epoch": 0.45,
"grad_norm": 0.6021689077706888,
"learning_rate": 1.9084019092750007e-05,
"loss": 2.1406,
"step": 1010
},
{
"epoch": 0.45,
"grad_norm": 0.6701022642049254,
"learning_rate": 1.906779116841225e-05,
"loss": 2.1525,
"step": 1015
},
{
"epoch": 0.45,
"grad_norm": 0.6670330060662382,
"learning_rate": 1.9051427777133328e-05,
"loss": 2.1341,
"step": 1020
},
{
"epoch": 0.45,
"grad_norm": 0.6094971005743538,
"learning_rate": 1.9034929163371726e-05,
"loss": 2.15,
"step": 1025
},
{
"epoch": 0.46,
"grad_norm": 0.5831056098168329,
"learning_rate": 1.901829557360608e-05,
"loss": 2.1383,
"step": 1030
},
{
"epoch": 0.46,
"grad_norm": 0.582116206786792,
"learning_rate": 1.9001527256331474e-05,
"loss": 2.1409,
"step": 1035
},
{
"epoch": 0.46,
"grad_norm": 0.5942348619446532,
"learning_rate": 1.8984624462055724e-05,
"loss": 2.1409,
"step": 1040
},
{
"epoch": 0.46,
"grad_norm": 0.611978662259581,
"learning_rate": 1.896758744329567e-05,
"loss": 2.1599,
"step": 1045
},
{
"epoch": 0.47,
"grad_norm": 0.617886326822044,
"learning_rate": 1.895041645457335e-05,
"loss": 2.1219,
"step": 1050
},
{
"epoch": 0.47,
"grad_norm": 0.5918397139933461,
"learning_rate": 1.8933111752412255e-05,
"loss": 2.1318,
"step": 1055
},
{
"epoch": 0.47,
"grad_norm": 0.5866014206593895,
"learning_rate": 1.8915673595333443e-05,
"loss": 2.1739,
"step": 1060
},
{
"epoch": 0.47,
"grad_norm": 0.6707841568636259,
"learning_rate": 1.8898102243851722e-05,
"loss": 2.1532,
"step": 1065
},
{
"epoch": 0.47,
"grad_norm": 0.591718916996025,
"learning_rate": 1.8880397960471724e-05,
"loss": 2.1286,
"step": 1070
},
{
"epoch": 0.48,
"grad_norm": 0.5718916077497437,
"learning_rate": 1.8862561009684e-05,
"loss": 2.1261,
"step": 1075
},
{
"epoch": 0.48,
"grad_norm": 0.5789578449104208,
"learning_rate": 1.8844591657961083e-05,
"loss": 2.136,
"step": 1080
},
{
"epoch": 0.48,
"grad_norm": 0.6364532900696288,
"learning_rate": 1.8826490173753464e-05,
"loss": 2.1503,
"step": 1085
},
{
"epoch": 0.48,
"grad_norm": 0.5762908205088724,
"learning_rate": 1.880825682748563e-05,
"loss": 2.1672,
"step": 1090
},
{
"epoch": 0.48,
"grad_norm": 0.5658591198632481,
"learning_rate": 1.878989189155199e-05,
"loss": 2.1479,
"step": 1095
},
{
"epoch": 0.49,
"grad_norm": 0.6559083324834301,
"learning_rate": 1.877139564031282e-05,
"loss": 2.1605,
"step": 1100
},
{
"epoch": 0.49,
"grad_norm": 0.5820095371678126,
"learning_rate": 1.8752768350090162e-05,
"loss": 2.1256,
"step": 1105
},
{
"epoch": 0.49,
"grad_norm": 0.6020665324033978,
"learning_rate": 1.87340102991637e-05,
"loss": 2.1178,
"step": 1110
},
{
"epoch": 0.49,
"grad_norm": 0.5705675157100321,
"learning_rate": 1.871512176776659e-05,
"loss": 2.1402,
"step": 1115
},
{
"epoch": 0.5,
"grad_norm": 0.6002920559565138,
"learning_rate": 1.8696103038081297e-05,
"loss": 2.1362,
"step": 1120
},
{
"epoch": 0.5,
"grad_norm": 0.6321117346398434,
"learning_rate": 1.8676954394235346e-05,
"loss": 2.1117,
"step": 1125
},
{
"epoch": 0.5,
"grad_norm": 0.6046421610702791,
"learning_rate": 1.86576761222971e-05,
"loss": 2.1415,
"step": 1130
},
{
"epoch": 0.5,
"grad_norm": 0.6012321769150669,
"learning_rate": 1.8638268510271492e-05,
"loss": 2.1308,
"step": 1135
},
{
"epoch": 0.5,
"grad_norm": 0.6182984319401944,
"learning_rate": 1.8618731848095706e-05,
"loss": 2.135,
"step": 1140
},
{
"epoch": 0.51,
"grad_norm": 0.6331454709115083,
"learning_rate": 1.859906642763485e-05,
"loss": 2.125,
"step": 1145
},
{
"epoch": 0.51,
"grad_norm": 0.6370344124714803,
"learning_rate": 1.8579272542677597e-05,
"loss": 2.1594,
"step": 1150
},
{
"epoch": 0.51,
"grad_norm": 0.6402170851372364,
"learning_rate": 1.8559350488931805e-05,
"loss": 2.1387,
"step": 1155
},
{
"epoch": 0.51,
"grad_norm": 0.6195070820775775,
"learning_rate": 1.853930056402008e-05,
"loss": 2.1495,
"step": 1160
},
{
"epoch": 0.52,
"grad_norm": 0.6140394376953083,
"learning_rate": 1.851912306747535e-05,
"loss": 2.1208,
"step": 1165
},
{
"epoch": 0.52,
"grad_norm": 0.583710103289087,
"learning_rate": 1.849881830073637e-05,
"loss": 2.1357,
"step": 1170
},
{
"epoch": 0.52,
"grad_norm": 0.5860072444462401,
"learning_rate": 1.847838656714324e-05,
"loss": 2.1197,
"step": 1175
},
{
"epoch": 0.52,
"grad_norm": 0.6100517580158695,
"learning_rate": 1.845782817193286e-05,
"loss": 2.1211,
"step": 1180
},
{
"epoch": 0.52,
"grad_norm": 0.588731031328821,
"learning_rate": 1.843714342223437e-05,
"loss": 2.1381,
"step": 1185
},
{
"epoch": 0.53,
"grad_norm": 0.5677390905318522,
"learning_rate": 1.841633262706456e-05,
"loss": 2.1435,
"step": 1190
},
{
"epoch": 0.53,
"grad_norm": 0.5933859169199024,
"learning_rate": 1.8395396097323268e-05,
"loss": 2.1369,
"step": 1195
},
{
"epoch": 0.53,
"grad_norm": 0.5789177308450136,
"learning_rate": 1.8374334145788723e-05,
"loss": 2.1283,
"step": 1200
},
{
"epoch": 0.53,
"grad_norm": 0.6279275335466069,
"learning_rate": 1.835314708711287e-05,
"loss": 2.1633,
"step": 1205
},
{
"epoch": 0.54,
"grad_norm": 0.6449084994619112,
"learning_rate": 1.833183523781668e-05,
"loss": 2.1421,
"step": 1210
},
{
"epoch": 0.54,
"grad_norm": 0.5890354990670971,
"learning_rate": 1.8310398916285403e-05,
"loss": 2.1269,
"step": 1215
},
{
"epoch": 0.54,
"grad_norm": 0.5709381880728748,
"learning_rate": 1.8288838442763838e-05,
"loss": 2.1086,
"step": 1220
},
{
"epoch": 0.54,
"grad_norm": 0.5821619152762381,
"learning_rate": 1.826715413935153e-05,
"loss": 2.1406,
"step": 1225
},
{
"epoch": 0.54,
"grad_norm": 0.5954153100778483,
"learning_rate": 1.824534632999796e-05,
"loss": 2.1201,
"step": 1230
},
{
"epoch": 0.55,
"grad_norm": 0.6251165164809961,
"learning_rate": 1.8223415340497707e-05,
"loss": 2.1202,
"step": 1235
},
{
"epoch": 0.55,
"grad_norm": 0.5867538775812078,
"learning_rate": 1.820136149848559e-05,
"loss": 2.1327,
"step": 1240
},
{
"epoch": 0.55,
"grad_norm": 0.5957636727076394,
"learning_rate": 1.8179185133431748e-05,
"loss": 2.138,
"step": 1245
},
{
"epoch": 0.55,
"grad_norm": 0.5786576101773259,
"learning_rate": 1.8156886576636758e-05,
"loss": 2.155,
"step": 1250
},
{
"epoch": 0.56,
"grad_norm": 0.6018519598608473,
"learning_rate": 1.8134466161226644e-05,
"loss": 2.138,
"step": 1255
},
{
"epoch": 0.56,
"grad_norm": 0.6212909757844403,
"learning_rate": 1.8111924222147927e-05,
"loss": 2.1264,
"step": 1260
},
{
"epoch": 0.56,
"grad_norm": 0.6266882301461152,
"learning_rate": 1.8089261096162617e-05,
"loss": 2.1183,
"step": 1265
},
{
"epoch": 0.56,
"grad_norm": 0.6107956724955865,
"learning_rate": 1.8066477121843163e-05,
"loss": 2.1242,
"step": 1270
},
{
"epoch": 0.56,
"grad_norm": 0.6220591205173086,
"learning_rate": 1.8043572639567434e-05,
"loss": 2.1436,
"step": 1275
},
{
"epoch": 0.57,
"grad_norm": 0.6244763954116979,
"learning_rate": 1.8020547991513583e-05,
"loss": 2.122,
"step": 1280
},
{
"epoch": 0.57,
"grad_norm": 0.5991420400814244,
"learning_rate": 1.799740352165498e-05,
"loss": 2.1374,
"step": 1285
},
{
"epoch": 0.57,
"grad_norm": 0.5702402002395458,
"learning_rate": 1.7974139575755055e-05,
"loss": 2.1165,
"step": 1290
},
{
"epoch": 0.57,
"grad_norm": 0.6517132859823608,
"learning_rate": 1.7950756501362122e-05,
"loss": 2.1448,
"step": 1295
},
{
"epoch": 0.58,
"grad_norm": 0.6420330760513209,
"learning_rate": 1.792725464780421e-05,
"loss": 2.1299,
"step": 1300
},
{
"epoch": 0.58,
"grad_norm": 0.5836293955377506,
"learning_rate": 1.790363436618382e-05,
"loss": 2.1335,
"step": 1305
},
{
"epoch": 0.58,
"grad_norm": 0.5677666710204392,
"learning_rate": 1.7879896009372698e-05,
"loss": 2.1292,
"step": 1310
},
{
"epoch": 0.58,
"grad_norm": 0.5703219593604331,
"learning_rate": 1.7856039932006567e-05,
"loss": 2.1572,
"step": 1315
},
{
"epoch": 0.58,
"grad_norm": 0.5707439602786741,
"learning_rate": 1.7832066490479797e-05,
"loss": 2.1366,
"step": 1320
},
{
"epoch": 0.59,
"grad_norm": 0.5586601849306312,
"learning_rate": 1.780797604294012e-05,
"loss": 2.1414,
"step": 1325
},
{
"epoch": 0.59,
"grad_norm": 0.6027432680139834,
"learning_rate": 1.7783768949283258e-05,
"loss": 2.1201,
"step": 1330
},
{
"epoch": 0.59,
"grad_norm": 0.6102963696348916,
"learning_rate": 1.7759445571147548e-05,
"loss": 2.1435,
"step": 1335
},
{
"epoch": 0.59,
"grad_norm": 0.7640331677774994,
"learning_rate": 1.773500627190854e-05,
"loss": 2.1322,
"step": 1340
},
{
"epoch": 0.6,
"grad_norm": 0.6539122461437712,
"learning_rate": 1.771045141667358e-05,
"loss": 2.1328,
"step": 1345
},
{
"epoch": 0.6,
"grad_norm": 0.623093025184874,
"learning_rate": 1.7685781372276338e-05,
"loss": 2.1392,
"step": 1350
},
{
"epoch": 0.6,
"grad_norm": 0.5965170495787665,
"learning_rate": 1.7660996507271334e-05,
"loss": 2.1527,
"step": 1355
},
{
"epoch": 0.6,
"grad_norm": 0.6105967976099729,
"learning_rate": 1.7636097191928437e-05,
"loss": 2.0988,
"step": 1360
},
{
"epoch": 0.6,
"grad_norm": 0.5850559463854867,
"learning_rate": 1.7611083798227334e-05,
"loss": 2.1438,
"step": 1365
},
{
"epoch": 0.61,
"grad_norm": 0.6238708376664477,
"learning_rate": 1.758595669985197e-05,
"loss": 2.1295,
"step": 1370
},
{
"epoch": 0.61,
"grad_norm": 0.6135409056345271,
"learning_rate": 1.7560716272184947e-05,
"loss": 2.1325,
"step": 1375
},
{
"epoch": 0.61,
"grad_norm": 0.6074415191617366,
"learning_rate": 1.7535362892301953e-05,
"loss": 2.1421,
"step": 1380
},
{
"epoch": 0.61,
"grad_norm": 0.6479097268032581,
"learning_rate": 1.7509896938966108e-05,
"loss": 2.1286,
"step": 1385
},
{
"epoch": 0.62,
"grad_norm": 0.6130335438655646,
"learning_rate": 1.748431879262229e-05,
"loss": 2.1361,
"step": 1390
},
{
"epoch": 0.62,
"grad_norm": 0.5930951169289039,
"learning_rate": 1.7458628835391485e-05,
"loss": 2.1454,
"step": 1395
},
{
"epoch": 0.62,
"grad_norm": 0.6063189634308347,
"learning_rate": 1.7432827451065052e-05,
"loss": 2.1338,
"step": 1400
},
{
"epoch": 0.62,
"grad_norm": 0.5927334440647005,
"learning_rate": 1.7406915025099005e-05,
"loss": 2.1062,
"step": 1405
},
{
"epoch": 0.62,
"grad_norm": 0.606160487889161,
"learning_rate": 1.7380891944608243e-05,
"loss": 2.1378,
"step": 1410
},
{
"epoch": 0.63,
"grad_norm": 0.6191090048019862,
"learning_rate": 1.7354758598360778e-05,
"loss": 2.1005,
"step": 1415
},
{
"epoch": 0.63,
"grad_norm": 0.5896149956170946,
"learning_rate": 1.732851537677191e-05,
"loss": 2.1119,
"step": 1420
},
{
"epoch": 0.63,
"grad_norm": 0.5761124135554752,
"learning_rate": 1.730216267189842e-05,
"loss": 2.1233,
"step": 1425
},
{
"epoch": 0.63,
"grad_norm": 0.5874913924437228,
"learning_rate": 1.7275700877432693e-05,
"loss": 2.0843,
"step": 1430
},
{
"epoch": 0.64,
"grad_norm": 0.5827467654305647,
"learning_rate": 1.7249130388696836e-05,
"loss": 2.1335,
"step": 1435
},
{
"epoch": 0.64,
"grad_norm": 0.589493063052537,
"learning_rate": 1.7222451602636785e-05,
"loss": 2.1021,
"step": 1440
},
{
"epoch": 0.64,
"grad_norm": 0.5820133034108264,
"learning_rate": 1.7195664917816367e-05,
"loss": 2.1126,
"step": 1445
},
{
"epoch": 0.64,
"grad_norm": 0.5766548384043362,
"learning_rate": 1.7168770734411344e-05,
"loss": 2.1167,
"step": 1450
},
{
"epoch": 0.64,
"grad_norm": 0.5939890619581065,
"learning_rate": 1.7141769454203438e-05,
"loss": 2.1273,
"step": 1455
},
{
"epoch": 0.65,
"grad_norm": 0.6298139198794492,
"learning_rate": 1.711466148057433e-05,
"loss": 2.1016,
"step": 1460
},
{
"epoch": 0.65,
"grad_norm": 0.6548342649331558,
"learning_rate": 1.7087447218499637e-05,
"loss": 2.1226,
"step": 1465
},
{
"epoch": 0.65,
"grad_norm": 0.6362324655032655,
"learning_rate": 1.7060127074542847e-05,
"loss": 2.0911,
"step": 1470
},
{
"epoch": 0.65,
"grad_norm": 0.5739192391017053,
"learning_rate": 1.7032701456849253e-05,
"loss": 2.1274,
"step": 1475
},
{
"epoch": 0.66,
"grad_norm": 0.5838401324471546,
"learning_rate": 1.700517077513987e-05,
"loss": 2.0943,
"step": 1480
},
{
"epoch": 0.66,
"grad_norm": 0.5916781269895333,
"learning_rate": 1.697753544070529e-05,
"loss": 2.11,
"step": 1485
},
{
"epoch": 0.66,
"grad_norm": 0.6017064541925474,
"learning_rate": 1.6949795866399554e-05,
"loss": 2.1309,
"step": 1490
},
{
"epoch": 0.66,
"grad_norm": 0.606808348273972,
"learning_rate": 1.6921952466633985e-05,
"loss": 2.1319,
"step": 1495
},
{
"epoch": 0.66,
"grad_norm": 0.5927345732335689,
"learning_rate": 1.689400565737098e-05,
"loss": 2.142,
"step": 1500
},
{
"epoch": 0.67,
"grad_norm": 0.6087685157474721,
"learning_rate": 1.6865955856117814e-05,
"loss": 2.131,
"step": 1505
},
{
"epoch": 0.67,
"grad_norm": 0.5898629245702609,
"learning_rate": 1.6837803481920393e-05,
"loss": 2.1328,
"step": 1510
},
{
"epoch": 0.67,
"grad_norm": 0.5605598205304891,
"learning_rate": 1.6809548955357e-05,
"loss": 2.1301,
"step": 1515
},
{
"epoch": 0.67,
"grad_norm": 0.5935434149140855,
"learning_rate": 1.6781192698532e-05,
"loss": 2.0945,
"step": 1520
},
{
"epoch": 0.68,
"grad_norm": 0.6300374892262115,
"learning_rate": 1.6752735135069556e-05,
"loss": 2.1335,
"step": 1525
},
{
"epoch": 0.68,
"grad_norm": 0.6321353944380523,
"learning_rate": 1.6724176690107272e-05,
"loss": 2.1359,
"step": 1530
},
{
"epoch": 0.68,
"grad_norm": 0.6532222332896985,
"learning_rate": 1.669551779028987e-05,
"loss": 2.1327,
"step": 1535
},
{
"epoch": 0.68,
"grad_norm": 0.6041341768443478,
"learning_rate": 1.6666758863762796e-05,
"loss": 2.1123,
"step": 1540
},
{
"epoch": 0.68,
"grad_norm": 0.5940592875659951,
"learning_rate": 1.6637900340165825e-05,
"loss": 2.1025,
"step": 1545
},
{
"epoch": 0.69,
"grad_norm": 0.6226686385577421,
"learning_rate": 1.6608942650626655e-05,
"loss": 2.129,
"step": 1550
},
{
"epoch": 0.69,
"grad_norm": 0.5921470111334243,
"learning_rate": 1.6579886227754466e-05,
"loss": 2.1204,
"step": 1555
},
{
"epoch": 0.69,
"grad_norm": 0.5653246172999827,
"learning_rate": 1.655073150563343e-05,
"loss": 2.084,
"step": 1560
},
{
"epoch": 0.69,
"grad_norm": 0.6102376529373957,
"learning_rate": 1.6521478919816263e-05,
"loss": 2.1256,
"step": 1565
},
{
"epoch": 0.7,
"grad_norm": 0.6042532710837394,
"learning_rate": 1.6492128907317696e-05,
"loss": 2.0956,
"step": 1570
},
{
"epoch": 0.7,
"grad_norm": 0.5582366988639466,
"learning_rate": 1.6462681906607955e-05,
"loss": 2.0889,
"step": 1575
},
{
"epoch": 0.7,
"grad_norm": 0.591023477781566,
"learning_rate": 1.6433138357606198e-05,
"loss": 2.122,
"step": 1580
},
{
"epoch": 0.7,
"grad_norm": 0.6078096870142705,
"learning_rate": 1.6403498701673966e-05,
"loss": 2.1259,
"step": 1585
},
{
"epoch": 0.7,
"grad_norm": 0.6313936903487295,
"learning_rate": 1.637376338160856e-05,
"loss": 2.1077,
"step": 1590
},
{
"epoch": 0.71,
"grad_norm": 0.6658101995369113,
"learning_rate": 1.6343932841636455e-05,
"loss": 2.1106,
"step": 1595
},
{
"epoch": 0.71,
"grad_norm": 0.5727992352049234,
"learning_rate": 1.6314007527406643e-05,
"loss": 2.1177,
"step": 1600
},
{
"epoch": 0.71,
"grad_norm": 0.5719330937764739,
"learning_rate": 1.6283987885983984e-05,
"loss": 2.1314,
"step": 1605
},
{
"epoch": 0.71,
"grad_norm": 0.6139618466804949,
"learning_rate": 1.6253874365842518e-05,
"loss": 2.123,
"step": 1610
},
{
"epoch": 0.72,
"grad_norm": 0.563163072074932,
"learning_rate": 1.6223667416858786e-05,
"loss": 2.1092,
"step": 1615
},
{
"epoch": 0.72,
"grad_norm": 0.5814354573440118,
"learning_rate": 1.619336749030509e-05,
"loss": 2.1252,
"step": 1620
},
{
"epoch": 0.72,
"grad_norm": 0.5944046976107267,
"learning_rate": 1.6162975038842748e-05,
"loss": 2.0941,
"step": 1625
},
{
"epoch": 0.72,
"grad_norm": 0.601284807954241,
"learning_rate": 1.613249051651535e-05,
"loss": 2.1156,
"step": 1630
},
{
"epoch": 0.72,
"grad_norm": 0.5677221501087967,
"learning_rate": 1.6101914378741964e-05,
"loss": 2.1084,
"step": 1635
},
{
"epoch": 0.73,
"grad_norm": 0.6129844032300016,
"learning_rate": 1.6071247082310337e-05,
"loss": 2.1117,
"step": 1640
},
{
"epoch": 0.73,
"grad_norm": 0.565471507607964,
"learning_rate": 1.6040489085370055e-05,
"loss": 2.1271,
"step": 1645
},
{
"epoch": 0.73,
"grad_norm": 0.6607436156849118,
"learning_rate": 1.6009640847425726e-05,
"loss": 2.11,
"step": 1650
},
{
"epoch": 0.73,
"grad_norm": 0.5721985531058912,
"learning_rate": 1.5978702829330086e-05,
"loss": 2.1007,
"step": 1655
},
{
"epoch": 0.74,
"grad_norm": 0.5894756540523463,
"learning_rate": 1.594767549327714e-05,
"loss": 2.1035,
"step": 1660
},
{
"epoch": 0.74,
"grad_norm": 0.5603298057388202,
"learning_rate": 1.591655930279524e-05,
"loss": 2.1084,
"step": 1665
},
{
"epoch": 0.74,
"grad_norm": 0.5874312868346593,
"learning_rate": 1.588535472274017e-05,
"loss": 2.106,
"step": 1670
},
{
"epoch": 0.74,
"grad_norm": 0.5667256235728622,
"learning_rate": 1.5854062219288188e-05,
"loss": 2.0911,
"step": 1675
},
{
"epoch": 0.74,
"grad_norm": 0.6368360313970768,
"learning_rate": 1.5822682259929086e-05,
"loss": 2.1471,
"step": 1680
},
{
"epoch": 0.75,
"grad_norm": 0.6114210523000054,
"learning_rate": 1.5791215313459172e-05,
"loss": 2.1012,
"step": 1685
},
{
"epoch": 0.75,
"grad_norm": 0.5790566627709446,
"learning_rate": 1.57596618499743e-05,
"loss": 2.0984,
"step": 1690
},
{
"epoch": 0.75,
"grad_norm": 0.6050924777949711,
"learning_rate": 1.572802234086283e-05,
"loss": 2.0894,
"step": 1695
},
{
"epoch": 0.75,
"grad_norm": 0.5884103557299183,
"learning_rate": 1.5696297258798573e-05,
"loss": 2.108,
"step": 1700
},
{
"epoch": 0.76,
"grad_norm": 0.6150704113212703,
"learning_rate": 1.566448707773377e-05,
"loss": 2.0898,
"step": 1705
},
{
"epoch": 0.76,
"grad_norm": 0.5721375101056374,
"learning_rate": 1.5632592272891964e-05,
"loss": 2.0974,
"step": 1710
},
{
"epoch": 0.76,
"grad_norm": 0.6413838096049399,
"learning_rate": 1.560061332076094e-05,
"loss": 2.1075,
"step": 1715
},
{
"epoch": 0.76,
"grad_norm": 0.6415438169158745,
"learning_rate": 1.5568550699085574e-05,
"loss": 2.0941,
"step": 1720
},
{
"epoch": 0.76,
"grad_norm": 0.5965083134764531,
"learning_rate": 1.5536404886860718e-05,
"loss": 2.0895,
"step": 1725
},
{
"epoch": 0.77,
"grad_norm": 0.568547826263572,
"learning_rate": 1.550417636432404e-05,
"loss": 2.1053,
"step": 1730
},
{
"epoch": 0.77,
"grad_norm": 0.5816491064752859,
"learning_rate": 1.547186561294884e-05,
"loss": 2.0995,
"step": 1735
},
{
"epoch": 0.77,
"grad_norm": 0.5944585744625376,
"learning_rate": 1.5439473115436872e-05,
"loss": 2.1043,
"step": 1740
},
{
"epoch": 0.77,
"grad_norm": 0.6412352720575958,
"learning_rate": 1.540699935571111e-05,
"loss": 2.0972,
"step": 1745
},
{
"epoch": 0.78,
"grad_norm": 0.5810756596824816,
"learning_rate": 1.5374444818908553e-05,
"loss": 2.0666,
"step": 1750
},
{
"epoch": 0.78,
"grad_norm": 0.5688785958864064,
"learning_rate": 1.5341809991372936e-05,
"loss": 2.101,
"step": 1755
},
{
"epoch": 0.78,
"grad_norm": 0.598615678112906,
"learning_rate": 1.5309095360647505e-05,
"loss": 2.114,
"step": 1760
},
{
"epoch": 0.78,
"grad_norm": 0.5856103482818028,
"learning_rate": 1.5276301415467703e-05,
"loss": 2.1113,
"step": 1765
},
{
"epoch": 0.78,
"grad_norm": 0.5902611334275555,
"learning_rate": 1.5243428645753877e-05,
"loss": 2.0956,
"step": 1770
},
{
"epoch": 0.79,
"grad_norm": 0.6140486714782962,
"learning_rate": 1.5210477542603976e-05,
"loss": 2.1055,
"step": 1775
},
{
"epoch": 0.79,
"grad_norm": 0.5911777901152846,
"learning_rate": 1.5177448598286182e-05,
"loss": 2.1295,
"step": 1780
},
{
"epoch": 0.79,
"grad_norm": 0.6342170806585613,
"learning_rate": 1.5144342306231587e-05,
"loss": 2.0823,
"step": 1785
},
{
"epoch": 0.79,
"grad_norm": 0.6042322845026318,
"learning_rate": 1.5111159161026802e-05,
"loss": 2.1014,
"step": 1790
},
{
"epoch": 0.79,
"grad_norm": 0.6477372611930943,
"learning_rate": 1.5077899658406581e-05,
"loss": 2.0999,
"step": 1795
},
{
"epoch": 0.8,
"grad_norm": 0.559863497958933,
"learning_rate": 1.5044564295246395e-05,
"loss": 2.091,
"step": 1800
},
{
"epoch": 0.8,
"grad_norm": 0.5694200093836167,
"learning_rate": 1.501115356955504e-05,
"loss": 2.1139,
"step": 1805
},
{
"epoch": 0.8,
"grad_norm": 0.5808418484030595,
"learning_rate": 1.4977667980467162e-05,
"loss": 2.1088,
"step": 1810
},
{
"epoch": 0.8,
"grad_norm": 0.5992841845727279,
"learning_rate": 1.4944108028235831e-05,
"loss": 2.0941,
"step": 1815
},
{
"epoch": 0.81,
"grad_norm": 0.5834803000229792,
"learning_rate": 1.491047421422505e-05,
"loss": 2.1296,
"step": 1820
},
{
"epoch": 0.81,
"grad_norm": 0.5579026297072871,
"learning_rate": 1.4876767040902267e-05,
"loss": 2.0896,
"step": 1825
},
{
"epoch": 0.81,
"grad_norm": 0.5737395309156446,
"learning_rate": 1.4842987011830871e-05,
"loss": 2.1355,
"step": 1830
},
{
"epoch": 0.81,
"grad_norm": 0.5867681094218473,
"learning_rate": 1.4809134631662672e-05,
"loss": 2.0505,
"step": 1835
},
{
"epoch": 0.81,
"grad_norm": 0.6502572192087762,
"learning_rate": 1.4775210406130358e-05,
"loss": 2.1313,
"step": 1840
},
{
"epoch": 0.82,
"grad_norm": 0.5979532970809924,
"learning_rate": 1.4741214842039939e-05,
"loss": 2.0781,
"step": 1845
},
{
"epoch": 0.82,
"grad_norm": 0.6123366454038003,
"learning_rate": 1.4707148447263178e-05,
"loss": 2.0912,
"step": 1850
},
{
"epoch": 0.82,
"grad_norm": 0.5747784492066287,
"learning_rate": 1.4673011730730001e-05,
"loss": 2.1221,
"step": 1855
},
{
"epoch": 0.82,
"grad_norm": 0.6380115971888696,
"learning_rate": 1.4638805202420896e-05,
"loss": 2.1201,
"step": 1860
},
{
"epoch": 0.83,
"grad_norm": 0.619383176173491,
"learning_rate": 1.4604529373359294e-05,
"loss": 2.107,
"step": 1865
},
{
"epoch": 0.83,
"grad_norm": 0.5805934810535641,
"learning_rate": 1.4570184755603936e-05,
"loss": 2.0975,
"step": 1870
},
{
"epoch": 0.83,
"grad_norm": 0.5760959289559535,
"learning_rate": 1.4535771862241218e-05,
"loss": 2.0824,
"step": 1875
},
{
"epoch": 0.83,
"grad_norm": 0.5949202997647781,
"learning_rate": 1.4501291207377537e-05,
"loss": 2.1218,
"step": 1880
},
{
"epoch": 0.83,
"grad_norm": 0.5892815463115585,
"learning_rate": 1.4466743306131594e-05,
"loss": 2.113,
"step": 1885
},
{
"epoch": 0.84,
"grad_norm": 0.6085186520596105,
"learning_rate": 1.4432128674626713e-05,
"loss": 2.1016,
"step": 1890
},
{
"epoch": 0.84,
"grad_norm": 0.6062422049370045,
"learning_rate": 1.4397447829983122e-05,
"loss": 2.1137,
"step": 1895
},
{
"epoch": 0.84,
"grad_norm": 0.6117045905800429,
"learning_rate": 1.4362701290310234e-05,
"loss": 2.0894,
"step": 1900
},
{
"epoch": 0.84,
"grad_norm": 0.6025066189988738,
"learning_rate": 1.43278895746989e-05,
"loss": 2.0885,
"step": 1905
},
{
"epoch": 0.85,
"grad_norm": 0.5904328269035394,
"learning_rate": 1.4293013203213662e-05,
"loss": 2.0893,
"step": 1910
},
{
"epoch": 0.85,
"grad_norm": 0.5788752082022146,
"learning_rate": 1.4258072696884966e-05,
"loss": 2.0891,
"step": 1915
},
{
"epoch": 0.85,
"grad_norm": 0.587026462476973,
"learning_rate": 1.422306857770141e-05,
"loss": 2.0996,
"step": 1920
},
{
"epoch": 0.85,
"grad_norm": 0.604266447885042,
"learning_rate": 1.4188001368601918e-05,
"loss": 2.0923,
"step": 1925
},
{
"epoch": 0.85,
"grad_norm": 0.5553259205464641,
"learning_rate": 1.415287159346793e-05,
"loss": 2.1059,
"step": 1930
},
{
"epoch": 0.86,
"grad_norm": 0.6081465234564575,
"learning_rate": 1.4117679777115593e-05,
"loss": 2.1104,
"step": 1935
},
{
"epoch": 0.86,
"grad_norm": 0.5753232236325628,
"learning_rate": 1.4082426445287904e-05,
"loss": 2.0742,
"step": 1940
},
{
"epoch": 0.86,
"grad_norm": 0.5488428092465065,
"learning_rate": 1.4047112124646864e-05,
"loss": 2.0926,
"step": 1945
},
{
"epoch": 0.86,
"grad_norm": 0.5796671352799692,
"learning_rate": 1.4011737342765604e-05,
"loss": 2.1266,
"step": 1950
},
{
"epoch": 0.87,
"grad_norm": 0.6026406897059337,
"learning_rate": 1.3976302628120508e-05,
"loss": 2.1287,
"step": 1955
},
{
"epoch": 0.87,
"grad_norm": 0.607723771894169,
"learning_rate": 1.3940808510083321e-05,
"loss": 2.1067,
"step": 1960
},
{
"epoch": 0.87,
"grad_norm": 0.5815255977044049,
"learning_rate": 1.390525551891323e-05,
"loss": 2.0899,
"step": 1965
},
{
"epoch": 0.87,
"grad_norm": 0.5842632091981558,
"learning_rate": 1.3869644185748954e-05,
"loss": 2.1011,
"step": 1970
},
{
"epoch": 0.87,
"grad_norm": 0.5907559959809014,
"learning_rate": 1.3833975042600799e-05,
"loss": 2.1267,
"step": 1975
},
{
"epoch": 0.88,
"grad_norm": 0.5695393885249783,
"learning_rate": 1.3798248622342719e-05,
"loss": 2.077,
"step": 1980
},
{
"epoch": 0.88,
"grad_norm": 0.5850267157805066,
"learning_rate": 1.3762465458704347e-05,
"loss": 2.0954,
"step": 1985
},
{
"epoch": 0.88,
"grad_norm": 0.5838769180250784,
"learning_rate": 1.3726626086263029e-05,
"loss": 2.0905,
"step": 1990
},
{
"epoch": 0.88,
"grad_norm": 0.5660259210439721,
"learning_rate": 1.3690731040435832e-05,
"loss": 2.0911,
"step": 1995
},
{
"epoch": 0.89,
"grad_norm": 0.6037055172383834,
"learning_rate": 1.3654780857471548e-05,
"loss": 2.0839,
"step": 2000
},
{
"epoch": 0.89,
"grad_norm": 0.5666079927329899,
"learning_rate": 1.3618776074442685e-05,
"loss": 2.1215,
"step": 2005
},
{
"epoch": 0.89,
"grad_norm": 0.5882227876138398,
"learning_rate": 1.3582717229237434e-05,
"loss": 2.0892,
"step": 2010
},
{
"epoch": 0.89,
"grad_norm": 0.6110760662981108,
"learning_rate": 1.3546604860551648e-05,
"loss": 2.1151,
"step": 2015
},
{
"epoch": 0.89,
"grad_norm": 0.6032760791023238,
"learning_rate": 1.3510439507880778e-05,
"loss": 2.1162,
"step": 2020
},
{
"epoch": 0.9,
"grad_norm": 0.6167438803725591,
"learning_rate": 1.3474221711511827e-05,
"loss": 2.0917,
"step": 2025
},
{
"epoch": 0.9,
"grad_norm": 0.5742837147705944,
"learning_rate": 1.3437952012515275e-05,
"loss": 2.1046,
"step": 2030
},
{
"epoch": 0.9,
"grad_norm": 0.5832191277289328,
"learning_rate": 1.3401630952736988e-05,
"loss": 2.0981,
"step": 2035
},
{
"epoch": 0.9,
"grad_norm": 0.59596518598508,
"learning_rate": 1.336525907479013e-05,
"loss": 2.1068,
"step": 2040
},
{
"epoch": 0.91,
"grad_norm": 0.5988247471380881,
"learning_rate": 1.3328836922047058e-05,
"loss": 2.1004,
"step": 2045
},
{
"epoch": 0.91,
"grad_norm": 0.5809982280121736,
"learning_rate": 1.32923650386312e-05,
"loss": 2.0883,
"step": 2050
},
{
"epoch": 0.91,
"grad_norm": 0.5872996199654297,
"learning_rate": 1.3255843969408932e-05,
"loss": 2.1057,
"step": 2055
},
{
"epoch": 0.91,
"grad_norm": 0.557633995302922,
"learning_rate": 1.321927425998143e-05,
"loss": 2.113,
"step": 2060
},
{
"epoch": 0.91,
"grad_norm": 0.5789693558950831,
"learning_rate": 1.318265645667652e-05,
"loss": 2.1021,
"step": 2065
},
{
"epoch": 0.92,
"grad_norm": 0.6015964775269415,
"learning_rate": 1.314599110654053e-05,
"loss": 2.0902,
"step": 2070
},
{
"epoch": 0.92,
"grad_norm": 0.5697258341638807,
"learning_rate": 1.3109278757330098e-05,
"loss": 2.1167,
"step": 2075
},
{
"epoch": 0.92,
"grad_norm": 0.6148231323006887,
"learning_rate": 1.3072519957504e-05,
"loss": 2.0815,
"step": 2080
},
{
"epoch": 0.92,
"grad_norm": 0.599800276574819,
"learning_rate": 1.3035715256214956e-05,
"loss": 2.0756,
"step": 2085
},
{
"epoch": 0.93,
"grad_norm": 0.5850789165731404,
"learning_rate": 1.2998865203301424e-05,
"loss": 2.1034,
"step": 2090
},
{
"epoch": 0.93,
"grad_norm": 0.6058004662692358,
"learning_rate": 1.296197034927938e-05,
"loss": 2.0959,
"step": 2095
},
{
"epoch": 0.93,
"grad_norm": 0.5779686330565179,
"learning_rate": 1.2925031245334112e-05,
"loss": 2.1,
"step": 2100
},
{
"epoch": 0.93,
"grad_norm": 0.6144628857265712,
"learning_rate": 1.288804844331196e-05,
"loss": 2.1017,
"step": 2105
},
{
"epoch": 0.93,
"grad_norm": 0.5780997131283874,
"learning_rate": 1.2851022495712092e-05,
"loss": 2.094,
"step": 2110
},
{
"epoch": 0.94,
"grad_norm": 0.6273183986107495,
"learning_rate": 1.2813953955678243e-05,
"loss": 2.0904,
"step": 2115
},
{
"epoch": 0.94,
"grad_norm": 0.581192517769629,
"learning_rate": 1.2776843376990448e-05,
"loss": 2.083,
"step": 2120
},
{
"epoch": 0.94,
"grad_norm": 0.5696271372299205,
"learning_rate": 1.273969131405678e-05,
"loss": 2.1046,
"step": 2125
},
{
"epoch": 0.94,
"grad_norm": 0.5507047535977257,
"learning_rate": 1.270249832190505e-05,
"loss": 2.0659,
"step": 2130
},
{
"epoch": 0.95,
"grad_norm": 0.5401936540736131,
"learning_rate": 1.2665264956174532e-05,
"loss": 2.0749,
"step": 2135
},
{
"epoch": 0.95,
"grad_norm": 0.5657656745772869,
"learning_rate": 1.2627991773107651e-05,
"loss": 2.0913,
"step": 2140
},
{
"epoch": 0.95,
"grad_norm": 0.5990306239523064,
"learning_rate": 1.259067932954168e-05,
"loss": 2.1091,
"step": 2145
},
{
"epoch": 0.95,
"grad_norm": 0.6339668095833608,
"learning_rate": 1.2553328182900414e-05,
"loss": 2.1121,
"step": 2150
},
{
"epoch": 0.95,
"grad_norm": 0.5849660907370856,
"learning_rate": 1.2515938891185856e-05,
"loss": 2.0834,
"step": 2155
},
{
"epoch": 0.96,
"grad_norm": 0.5764268543596228,
"learning_rate": 1.2478512012969864e-05,
"loss": 2.0738,
"step": 2160
},
{
"epoch": 0.96,
"grad_norm": 0.5922521284810875,
"learning_rate": 1.2441048107385815e-05,
"loss": 2.0881,
"step": 2165
},
{
"epoch": 0.96,
"grad_norm": 0.5965524164054886,
"learning_rate": 1.2403547734120253e-05,
"loss": 2.0963,
"step": 2170
},
{
"epoch": 0.96,
"grad_norm": 0.5711338828486805,
"learning_rate": 1.2366011453404527e-05,
"loss": 2.0882,
"step": 2175
},
{
"epoch": 0.97,
"grad_norm": 0.5727206638082731,
"learning_rate": 1.2328439826006415e-05,
"loss": 2.1036,
"step": 2180
},
{
"epoch": 0.97,
"grad_norm": 0.5864339394553177,
"learning_rate": 1.2290833413221757e-05,
"loss": 2.0878,
"step": 2185
},
{
"epoch": 0.97,
"grad_norm": 0.575229714224447,
"learning_rate": 1.2253192776866059e-05,
"loss": 2.0888,
"step": 2190
},
{
"epoch": 0.97,
"grad_norm": 0.5558301266934436,
"learning_rate": 1.2215518479266108e-05,
"loss": 2.0679,
"step": 2195
},
{
"epoch": 0.97,
"grad_norm": 0.5479254735303103,
"learning_rate": 1.2177811083251572e-05,
"loss": 2.0563,
"step": 2200
},
{
"epoch": 0.98,
"grad_norm": 0.5671335128265081,
"learning_rate": 1.214007115214658e-05,
"loss": 2.0647,
"step": 2205
},
{
"epoch": 0.98,
"grad_norm": 0.6144685734465458,
"learning_rate": 1.2102299249761315e-05,
"loss": 2.1117,
"step": 2210
},
{
"epoch": 0.98,
"grad_norm": 0.5929383455526278,
"learning_rate": 1.2064495940383602e-05,
"loss": 2.0879,
"step": 2215
},
{
"epoch": 0.98,
"grad_norm": 0.6014716837274918,
"learning_rate": 1.2026661788770453e-05,
"loss": 2.0864,
"step": 2220
},
{
"epoch": 0.99,
"grad_norm": 0.6418585018662625,
"learning_rate": 1.1988797360139649e-05,
"loss": 2.0883,
"step": 2225
},
{
"epoch": 0.99,
"grad_norm": 0.5633931868859651,
"learning_rate": 1.1950903220161286e-05,
"loss": 2.0837,
"step": 2230
},
{
"epoch": 0.99,
"grad_norm": 0.5922686756579777,
"learning_rate": 1.1912979934949331e-05,
"loss": 2.1281,
"step": 2235
},
{
"epoch": 0.99,
"grad_norm": 0.574086322879612,
"learning_rate": 1.1875028071053165e-05,
"loss": 2.0805,
"step": 2240
},
{
"epoch": 0.99,
"grad_norm": 0.5930291155115571,
"learning_rate": 1.1837048195449112e-05,
"loss": 2.0962,
"step": 2245
},
{
"epoch": 1.0,
"grad_norm": 0.5737004327203209,
"learning_rate": 1.1799040875531975e-05,
"loss": 2.0799,
"step": 2250
},
{
"epoch": 1.0,
"grad_norm": 0.5587037518815292,
"learning_rate": 1.1761006679106552e-05,
"loss": 2.1085,
"step": 2255
},
{
"epoch": 1.0,
"eval_loss": 2.094348430633545,
"eval_runtime": 26.3442,
"eval_samples_per_second": 1213.889,
"eval_steps_per_second": 37.959,
"step": 2258
},
{
"epoch": 1.0,
"grad_norm": 0.5476890206718826,
"learning_rate": 1.1722946174379168e-05,
"loss": 2.0989,
"step": 2260
},
{
"epoch": 1.0,
"grad_norm": 0.5992432378901903,
"learning_rate": 1.168485992994917e-05,
"loss": 2.0687,
"step": 2265
},
{
"epoch": 1.01,
"grad_norm": 0.5765822572446867,
"learning_rate": 1.1646748514800441e-05,
"loss": 2.0853,
"step": 2270
},
{
"epoch": 1.01,
"grad_norm": 0.588023877495434,
"learning_rate": 1.16086124982929e-05,
"loss": 2.0823,
"step": 2275
},
{
"epoch": 1.01,
"grad_norm": 0.5763304755946497,
"learning_rate": 1.1570452450153992e-05,
"loss": 2.0653,
"step": 2280
},
{
"epoch": 1.01,
"grad_norm": 0.5736446993524645,
"learning_rate": 1.1532268940470182e-05,
"loss": 2.0796,
"step": 2285
},
{
"epoch": 1.01,
"grad_norm": 0.595296283977219,
"learning_rate": 1.149406253967843e-05,
"loss": 2.0913,
"step": 2290
},
{
"epoch": 1.02,
"grad_norm": 0.5481984084152803,
"learning_rate": 1.1455833818557678e-05,
"loss": 2.0837,
"step": 2295
},
{
"epoch": 1.02,
"grad_norm": 0.5657607989936749,
"learning_rate": 1.1417583348220322e-05,
"loss": 2.07,
"step": 2300
},
{
"epoch": 1.02,
"grad_norm": 0.5559252423535721,
"learning_rate": 1.1379311700103673e-05,
"loss": 2.0816,
"step": 2305
},
{
"epoch": 1.02,
"grad_norm": 0.5658511458616021,
"learning_rate": 1.134101944596143e-05,
"loss": 2.0897,
"step": 2310
},
{
"epoch": 1.03,
"grad_norm": 0.5706047931406015,
"learning_rate": 1.1302707157855122e-05,
"loss": 2.093,
"step": 2315
},
{
"epoch": 1.03,
"grad_norm": 0.5621110094778881,
"learning_rate": 1.1264375408145582e-05,
"loss": 2.0919,
"step": 2320
},
{
"epoch": 1.03,
"grad_norm": 0.5742867083742238,
"learning_rate": 1.1226024769484385e-05,
"loss": 2.0647,
"step": 2325
},
{
"epoch": 1.03,
"grad_norm": 0.5865948946749304,
"learning_rate": 1.118765581480529e-05,
"loss": 2.0657,
"step": 2330
},
{
"epoch": 1.03,
"grad_norm": 0.5668604954635388,
"learning_rate": 1.1149269117315693e-05,
"loss": 2.0939,
"step": 2335
},
{
"epoch": 1.04,
"grad_norm": 0.5689216983451802,
"learning_rate": 1.1110865250488047e-05,
"loss": 2.0886,
"step": 2340
},
{
"epoch": 1.04,
"grad_norm": 0.5535818889995111,
"learning_rate": 1.1072444788051314e-05,
"loss": 2.1043,
"step": 2345
},
{
"epoch": 1.04,
"grad_norm": 0.6672484688919184,
"learning_rate": 1.1034008303982373e-05,
"loss": 2.074,
"step": 2350
},
{
"epoch": 1.04,
"grad_norm": 0.6076874546984099,
"learning_rate": 1.0995556372497467e-05,
"loss": 2.0722,
"step": 2355
},
{
"epoch": 1.05,
"grad_norm": 0.6221805162492368,
"learning_rate": 1.0957089568043607e-05,
"loss": 2.0775,
"step": 2360
},
{
"epoch": 1.05,
"grad_norm": 0.5713400475949109,
"learning_rate": 1.0918608465289993e-05,
"loss": 2.0722,
"step": 2365
},
{
"epoch": 1.05,
"grad_norm": 0.5711194665330696,
"learning_rate": 1.088011363911944e-05,
"loss": 2.0665,
"step": 2370
},
{
"epoch": 1.05,
"grad_norm": 0.5622068256678303,
"learning_rate": 1.084160566461978e-05,
"loss": 2.0796,
"step": 2375
},
{
"epoch": 1.05,
"grad_norm": 0.5517104309237052,
"learning_rate": 1.080308511707527e-05,
"loss": 2.0846,
"step": 2380
},
{
"epoch": 1.06,
"grad_norm": 0.5691869608893864,
"learning_rate": 1.0764552571957999e-05,
"loss": 2.0705,
"step": 2385
},
{
"epoch": 1.06,
"grad_norm": 0.5906122598465869,
"learning_rate": 1.0726008604919296e-05,
"loss": 2.0716,
"step": 2390
},
{
"epoch": 1.06,
"grad_norm": 0.6040600949683979,
"learning_rate": 1.0687453791781122e-05,
"loss": 2.0883,
"step": 2395
},
{
"epoch": 1.06,
"grad_norm": 0.5486232408039229,
"learning_rate": 1.0648888708527481e-05,
"loss": 2.085,
"step": 2400
},
{
"epoch": 1.07,
"grad_norm": 0.5949932847671062,
"learning_rate": 1.0610313931295793e-05,
"loss": 2.0815,
"step": 2405
},
{
"epoch": 1.07,
"grad_norm": 0.6081774979729646,
"learning_rate": 1.0571730036368308e-05,
"loss": 2.0563,
"step": 2410
},
{
"epoch": 1.07,
"grad_norm": 0.6017182128552543,
"learning_rate": 1.0533137600163488e-05,
"loss": 2.0588,
"step": 2415
},
{
"epoch": 1.07,
"grad_norm": 0.5694418009932043,
"learning_rate": 1.0494537199227393e-05,
"loss": 2.0751,
"step": 2420
},
{
"epoch": 1.07,
"grad_norm": 0.6067470145298162,
"learning_rate": 1.045592941022507e-05,
"loss": 2.0769,
"step": 2425
},
{
"epoch": 1.08,
"grad_norm": 0.5806994961992088,
"learning_rate": 1.0417314809931945e-05,
"loss": 2.0591,
"step": 2430
},
{
"epoch": 1.08,
"grad_norm": 0.6212343107425653,
"learning_rate": 1.0378693975225194e-05,
"loss": 2.0534,
"step": 2435
},
{
"epoch": 1.08,
"grad_norm": 0.5769468166281586,
"learning_rate": 1.0340067483075135e-05,
"loss": 2.0673,
"step": 2440
},
{
"epoch": 1.08,
"grad_norm": 0.5920465709161002,
"learning_rate": 1.0301435910536603e-05,
"loss": 2.1176,
"step": 2445
},
{
"epoch": 1.09,
"grad_norm": 0.5559893615735687,
"learning_rate": 1.0262799834740334e-05,
"loss": 2.0678,
"step": 2450
},
{
"epoch": 1.09,
"grad_norm": 0.5972270297844814,
"learning_rate": 1.0224159832884335e-05,
"loss": 2.1061,
"step": 2455
},
{
"epoch": 1.09,
"grad_norm": 0.5714795009191994,
"learning_rate": 1.0185516482225264e-05,
"loss": 2.0747,
"step": 2460
},
{
"epoch": 1.09,
"grad_norm": 0.56942096378165,
"learning_rate": 1.0146870360069819e-05,
"loss": 2.0946,
"step": 2465
},
{
"epoch": 1.09,
"grad_norm": 0.5795532742140115,
"learning_rate": 1.0108222043766087e-05,
"loss": 2.0746,
"step": 2470
},
{
"epoch": 1.1,
"grad_norm": 0.5673003371764763,
"learning_rate": 1.0069572110694946e-05,
"loss": 2.0384,
"step": 2475
},
{
"epoch": 1.1,
"grad_norm": 0.5660961848513918,
"learning_rate": 1.0030921138261422e-05,
"loss": 2.0569,
"step": 2480
},
{
"epoch": 1.1,
"grad_norm": 0.6185753115925553,
"learning_rate": 9.992269703886073e-06,
"loss": 2.086,
"step": 2485
},
{
"epoch": 1.1,
"grad_norm": 0.5881572329702438,
"learning_rate": 9.953618384996353e-06,
"loss": 2.0678,
"step": 2490
},
{
"epoch": 1.1,
"grad_norm": 0.6571726406062589,
"learning_rate": 9.914967759017993e-06,
"loss": 2.055,
"step": 2495
},
{
"epoch": 1.11,
"grad_norm": 0.5715335900934512,
"learning_rate": 9.876318403366371e-06,
"loss": 2.0677,
"step": 2500
},
{
"epoch": 1.11,
"grad_norm": 0.548347975342686,
"learning_rate": 9.83767089543789e-06,
"loss": 2.0935,
"step": 2505
},
{
"epoch": 1.11,
"grad_norm": 0.5744290577974958,
"learning_rate": 9.79902581260135e-06,
"loss": 2.0547,
"step": 2510
},
{
"epoch": 1.11,
"grad_norm": 0.6125826975457744,
"learning_rate": 9.76038373218931e-06,
"loss": 2.1096,
"step": 2515
},
{
"epoch": 1.12,
"grad_norm": 0.5512598583665123,
"learning_rate": 9.721745231489499e-06,
"loss": 2.0465,
"step": 2520
},
{
"epoch": 1.12,
"grad_norm": 0.5774283360356032,
"learning_rate": 9.683110887736134e-06,
"loss": 2.0941,
"step": 2525
},
{
"epoch": 1.12,
"grad_norm": 0.5815933223982568,
"learning_rate": 9.644481278101366e-06,
"loss": 2.096,
"step": 2530
},
{
"epoch": 1.12,
"grad_norm": 0.5942345003772158,
"learning_rate": 9.60585697968659e-06,
"loss": 2.0782,
"step": 2535
},
{
"epoch": 1.12,
"grad_norm": 0.5766163061927374,
"learning_rate": 9.567238569513872e-06,
"loss": 2.0514,
"step": 2540
},
{
"epoch": 1.13,
"grad_norm": 0.5720509875085317,
"learning_rate": 9.52862662451731e-06,
"loss": 2.074,
"step": 2545
},
{
"epoch": 1.13,
"grad_norm": 0.6062895087250578,
"learning_rate": 9.49002172153442e-06,
"loss": 2.0826,
"step": 2550
},
{
"epoch": 1.13,
"grad_norm": 0.5464835664934657,
"learning_rate": 9.451424437297494e-06,
"loss": 2.0715,
"step": 2555
},
{
"epoch": 1.13,
"grad_norm": 0.58010721708805,
"learning_rate": 9.412835348425038e-06,
"loss": 2.0848,
"step": 2560
},
{
"epoch": 1.14,
"grad_norm": 0.6095257828645734,
"learning_rate": 9.374255031413089e-06,
"loss": 2.0787,
"step": 2565
},
{
"epoch": 1.14,
"grad_norm": 0.5738051006138174,
"learning_rate": 9.335684062626669e-06,
"loss": 2.0726,
"step": 2570
},
{
"epoch": 1.14,
"grad_norm": 0.5801595979674933,
"learning_rate": 9.297123018291122e-06,
"loss": 2.0532,
"step": 2575
},
{
"epoch": 1.14,
"grad_norm": 0.5864959588907394,
"learning_rate": 9.25857247448354e-06,
"loss": 2.0759,
"step": 2580
},
{
"epoch": 1.14,
"grad_norm": 0.5835867474432922,
"learning_rate": 9.220033007124135e-06,
"loss": 2.0682,
"step": 2585
},
{
"epoch": 1.15,
"grad_norm": 0.582672601519007,
"learning_rate": 9.181505191967656e-06,
"loss": 2.0666,
"step": 2590
},
{
"epoch": 1.15,
"grad_norm": 0.5589435147744359,
"learning_rate": 9.142989604594757e-06,
"loss": 2.0635,
"step": 2595
},
{
"epoch": 1.15,
"grad_norm": 0.5980401873723482,
"learning_rate": 9.104486820403438e-06,
"loss": 2.0918,
"step": 2600
},
{
"epoch": 1.15,
"grad_norm": 0.5659083301199509,
"learning_rate": 9.06599741460041e-06,
"loss": 2.0836,
"step": 2605
},
{
"epoch": 1.16,
"grad_norm": 0.5443037882725329,
"learning_rate": 9.027521962192532e-06,
"loss": 2.0775,
"step": 2610
},
{
"epoch": 1.16,
"grad_norm": 0.569427600638047,
"learning_rate": 8.989061037978196e-06,
"loss": 2.0858,
"step": 2615
},
{
"epoch": 1.16,
"grad_norm": 0.5639422904028256,
"learning_rate": 8.950615216538765e-06,
"loss": 2.0996,
"step": 2620
},
{
"epoch": 1.16,
"grad_norm": 0.559083877603007,
"learning_rate": 8.912185072229974e-06,
"loss": 2.0802,
"step": 2625
},
{
"epoch": 1.16,
"grad_norm": 0.587323788912347,
"learning_rate": 8.873771179173339e-06,
"loss": 2.057,
"step": 2630
},
{
"epoch": 1.17,
"grad_norm": 0.6299496008271722,
"learning_rate": 8.83537411124761e-06,
"loss": 2.081,
"step": 2635
},
{
"epoch": 1.17,
"grad_norm": 0.5555576270966408,
"learning_rate": 8.796994442080167e-06,
"loss": 2.0803,
"step": 2640
},
{
"epoch": 1.17,
"grad_norm": 0.5881760155478749,
"learning_rate": 8.758632745038478e-06,
"loss": 2.0818,
"step": 2645
},
{
"epoch": 1.17,
"grad_norm": 0.5870501138118053,
"learning_rate": 8.720289593221502e-06,
"loss": 2.0756,
"step": 2650
},
{
"epoch": 1.18,
"grad_norm": 0.5778813704774673,
"learning_rate": 8.681965559451159e-06,
"loss": 2.0318,
"step": 2655
},
{
"epoch": 1.18,
"grad_norm": 0.5772671107773564,
"learning_rate": 8.643661216263744e-06,
"loss": 2.0614,
"step": 2660
},
{
"epoch": 1.18,
"grad_norm": 0.5522481417043467,
"learning_rate": 8.605377135901404e-06,
"loss": 2.0923,
"step": 2665
},
{
"epoch": 1.18,
"grad_norm": 0.57308361221774,
"learning_rate": 8.567113890303554e-06,
"loss": 2.0794,
"step": 2670
},
{
"epoch": 1.18,
"grad_norm": 0.5603004739786827,
"learning_rate": 8.52887205109837e-06,
"loss": 2.083,
"step": 2675
},
{
"epoch": 1.19,
"grad_norm": 0.5668872008035165,
"learning_rate": 8.490652189594212e-06,
"loss": 2.0607,
"step": 2680
},
{
"epoch": 1.19,
"grad_norm": 0.6151475257151516,
"learning_rate": 8.452454876771124e-06,
"loss": 2.066,
"step": 2685
},
{
"epoch": 1.19,
"grad_norm": 0.6039043271893284,
"learning_rate": 8.414280683272273e-06,
"loss": 2.1106,
"step": 2690
},
{
"epoch": 1.19,
"grad_norm": 0.5748765249393106,
"learning_rate": 8.376130179395452e-06,
"loss": 2.0774,
"step": 2695
},
{
"epoch": 1.2,
"grad_norm": 0.5345583316596325,
"learning_rate": 8.338003935084531e-06,
"loss": 2.0777,
"step": 2700
},
{
"epoch": 1.2,
"grad_norm": 0.5800033990462752,
"learning_rate": 8.299902519920977e-06,
"loss": 2.0762,
"step": 2705
},
{
"epoch": 1.2,
"grad_norm": 0.5531899855929184,
"learning_rate": 8.2618265031153e-06,
"loss": 2.0781,
"step": 2710
},
{
"epoch": 1.2,
"grad_norm": 0.5497023431897217,
"learning_rate": 8.223776453498599e-06,
"loss": 2.0712,
"step": 2715
},
{
"epoch": 1.2,
"grad_norm": 0.5690625479261334,
"learning_rate": 8.185752939514026e-06,
"loss": 2.0506,
"step": 2720
},
{
"epoch": 1.21,
"grad_norm": 0.5685550133194737,
"learning_rate": 8.147756529208318e-06,
"loss": 2.0707,
"step": 2725
},
{
"epoch": 1.21,
"grad_norm": 0.5803465434238907,
"learning_rate": 8.109787790223285e-06,
"loss": 2.0554,
"step": 2730
},
{
"epoch": 1.21,
"grad_norm": 0.5868948643598184,
"learning_rate": 8.071847289787367e-06,
"loss": 2.0603,
"step": 2735
},
{
"epoch": 1.21,
"grad_norm": 0.6337770079221292,
"learning_rate": 8.033935594707116e-06,
"loss": 2.0704,
"step": 2740
},
{
"epoch": 1.22,
"grad_norm": 0.5980841457342921,
"learning_rate": 7.996053271358764e-06,
"loss": 2.086,
"step": 2745
},
{
"epoch": 1.22,
"grad_norm": 0.5968842386328096,
"learning_rate": 7.958200885679752e-06,
"loss": 2.0863,
"step": 2750
},
{
"epoch": 1.22,
"grad_norm": 0.5626357387598965,
"learning_rate": 7.920379003160255e-06,
"loss": 2.1123,
"step": 2755
},
{
"epoch": 1.22,
"grad_norm": 0.5574742734858402,
"learning_rate": 7.88258818883477e-06,
"loss": 2.0788,
"step": 2760
},
{
"epoch": 1.22,
"grad_norm": 0.5511295833037257,
"learning_rate": 7.844829007273634e-06,
"loss": 2.0744,
"step": 2765
},
{
"epoch": 1.23,
"grad_norm": 0.5869356163120296,
"learning_rate": 7.807102022574631e-06,
"loss": 2.0545,
"step": 2770
},
{
"epoch": 1.23,
"grad_norm": 0.5767351032155685,
"learning_rate": 7.769407798354536e-06,
"loss": 2.0541,
"step": 2775
},
{
"epoch": 1.23,
"grad_norm": 0.5791442235745009,
"learning_rate": 7.7317468977407e-06,
"loss": 2.0827,
"step": 2780
},
{
"epoch": 1.23,
"grad_norm": 0.5588237086312414,
"learning_rate": 7.694119883362644e-06,
"loss": 2.0618,
"step": 2785
},
{
"epoch": 1.24,
"grad_norm": 0.5800923971564081,
"learning_rate": 7.65652731734366e-06,
"loss": 2.0487,
"step": 2790
},
{
"epoch": 1.24,
"grad_norm": 0.5628124232395267,
"learning_rate": 7.618969761292383e-06,
"loss": 2.0837,
"step": 2795
},
{
"epoch": 1.24,
"grad_norm": 0.5978500412812867,
"learning_rate": 7.5814477762944435e-06,
"loss": 2.0985,
"step": 2800
},
{
"epoch": 1.24,
"grad_norm": 0.6001457925270012,
"learning_rate": 7.5439619229040466e-06,
"loss": 2.0687,
"step": 2805
},
{
"epoch": 1.24,
"grad_norm": 0.6079053638123386,
"learning_rate": 7.506512761135627e-06,
"loss": 2.066,
"step": 2810
},
{
"epoch": 1.25,
"grad_norm": 0.5764835459443592,
"learning_rate": 7.4691008504554595e-06,
"loss": 2.0445,
"step": 2815
},
{
"epoch": 1.25,
"grad_norm": 0.563531023838365,
"learning_rate": 7.431726749773322e-06,
"loss": 2.047,
"step": 2820
},
{
"epoch": 1.25,
"grad_norm": 0.5416767146538698,
"learning_rate": 7.394391017434126e-06,
"loss": 2.0826,
"step": 2825
},
{
"epoch": 1.25,
"grad_norm": 0.582913848168042,
"learning_rate": 7.3570942112095955e-06,
"loss": 2.0826,
"step": 2830
},
{
"epoch": 1.26,
"grad_norm": 0.5641936744206886,
"learning_rate": 7.3198368882899095e-06,
"loss": 2.0537,
"step": 2835
},
{
"epoch": 1.26,
"grad_norm": 0.5848179390465393,
"learning_rate": 7.282619605275409e-06,
"loss": 2.0893,
"step": 2840
},
{
"epoch": 1.26,
"grad_norm": 0.6025041914415676,
"learning_rate": 7.245442918168244e-06,
"loss": 2.0556,
"step": 2845
},
{
"epoch": 1.26,
"grad_norm": 0.5684984410452509,
"learning_rate": 7.208307382364111e-06,
"loss": 2.0654,
"step": 2850
},
{
"epoch": 1.26,
"grad_norm": 0.546217043377342,
"learning_rate": 7.1712135526439094e-06,
"loss": 2.0826,
"step": 2855
},
{
"epoch": 1.27,
"grad_norm": 0.5922228462089781,
"learning_rate": 7.134161983165498e-06,
"loss": 2.0731,
"step": 2860
},
{
"epoch": 1.27,
"grad_norm": 0.5561336189629581,
"learning_rate": 7.097153227455379e-06,
"loss": 2.0739,
"step": 2865
},
{
"epoch": 1.27,
"grad_norm": 0.5847418024280782,
"learning_rate": 7.060187838400451e-06,
"loss": 2.0833,
"step": 2870
},
{
"epoch": 1.27,
"grad_norm": 0.561092981787862,
"learning_rate": 7.023266368239745e-06,
"loss": 2.0526,
"step": 2875
},
{
"epoch": 1.28,
"grad_norm": 0.5699358236742911,
"learning_rate": 6.986389368556168e-06,
"loss": 2.093,
"step": 2880
},
{
"epoch": 1.28,
"grad_norm": 0.5725191755205183,
"learning_rate": 6.949557390268272e-06,
"loss": 2.0886,
"step": 2885
},
{
"epoch": 1.28,
"grad_norm": 0.5747295606431873,
"learning_rate": 6.912770983622008e-06,
"loss": 2.0729,
"step": 2890
},
{
"epoch": 1.28,
"grad_norm": 0.5633640605422751,
"learning_rate": 6.87603069818253e-06,
"loss": 2.093,
"step": 2895
},
{
"epoch": 1.28,
"grad_norm": 0.5502893147107966,
"learning_rate": 6.839337082825954e-06,
"loss": 2.0696,
"step": 2900
},
{
"epoch": 1.29,
"grad_norm": 0.5796500181690738,
"learning_rate": 6.802690685731197e-06,
"loss": 2.0582,
"step": 2905
},
{
"epoch": 1.29,
"grad_norm": 0.581876910507113,
"learning_rate": 6.766092054371744e-06,
"loss": 2.0834,
"step": 2910
},
{
"epoch": 1.29,
"grad_norm": 0.5793416737037347,
"learning_rate": 6.729541735507503e-06,
"loss": 2.0992,
"step": 2915
},
{
"epoch": 1.29,
"grad_norm": 0.5561259183840469,
"learning_rate": 6.693040275176623e-06,
"loss": 2.0877,
"step": 2920
},
{
"epoch": 1.3,
"grad_norm": 0.5754576663833085,
"learning_rate": 6.656588218687341e-06,
"loss": 2.0878,
"step": 2925
},
{
"epoch": 1.3,
"grad_norm": 0.5697074500556285,
"learning_rate": 6.62018611060982e-06,
"loss": 2.0462,
"step": 2930
},
{
"epoch": 1.3,
"grad_norm": 0.5366439976587066,
"learning_rate": 6.583834494768042e-06,
"loss": 2.082,
"step": 2935
},
{
"epoch": 1.3,
"grad_norm": 0.6182439537995539,
"learning_rate": 6.547533914231654e-06,
"loss": 2.0679,
"step": 2940
},
{
"epoch": 1.3,
"grad_norm": 0.5944124890251387,
"learning_rate": 6.511284911307883e-06,
"loss": 2.0928,
"step": 2945
},
{
"epoch": 1.31,
"grad_norm": 0.5546584590408465,
"learning_rate": 6.475088027533399e-06,
"loss": 2.0858,
"step": 2950
},
{
"epoch": 1.31,
"grad_norm": 0.5686443398440841,
"learning_rate": 6.4389438036662686e-06,
"loss": 2.0803,
"step": 2955
},
{
"epoch": 1.31,
"grad_norm": 0.5961831366482591,
"learning_rate": 6.40285277967784e-06,
"loss": 2.0833,
"step": 2960
},
{
"epoch": 1.31,
"grad_norm": 0.5651841799240455,
"learning_rate": 6.3668154947446905e-06,
"loss": 2.0445,
"step": 2965
},
{
"epoch": 1.32,
"grad_norm": 0.6123054895183868,
"learning_rate": 6.330832487240573e-06,
"loss": 2.0647,
"step": 2970
},
{
"epoch": 1.32,
"grad_norm": 0.5806081174528559,
"learning_rate": 6.294904294728375e-06,
"loss": 2.0718,
"step": 2975
},
{
"epoch": 1.32,
"grad_norm": 0.5521295304759998,
"learning_rate": 6.2590314539520695e-06,
"loss": 2.0932,
"step": 2980
},
{
"epoch": 1.32,
"grad_norm": 0.5668162463635292,
"learning_rate": 6.223214500828729e-06,
"loss": 2.0654,
"step": 2985
},
{
"epoch": 1.32,
"grad_norm": 0.5965775515194636,
"learning_rate": 6.187453970440484e-06,
"loss": 2.063,
"step": 2990
},
{
"epoch": 1.33,
"grad_norm": 0.5939935322366416,
"learning_rate": 6.151750397026556e-06,
"loss": 2.1052,
"step": 2995
},
{
"epoch": 1.33,
"grad_norm": 0.5951691024025886,
"learning_rate": 6.116104313975267e-06,
"loss": 2.068,
"step": 3000
},
{
"epoch": 1.33,
"grad_norm": 0.5594255289433913,
"learning_rate": 6.080516253816055e-06,
"loss": 2.0449,
"step": 3005
},
{
"epoch": 1.33,
"grad_norm": 0.5556128864666243,
"learning_rate": 6.044986748211556e-06,
"loss": 2.0636,
"step": 3010
},
{
"epoch": 1.34,
"grad_norm": 0.5716761487716175,
"learning_rate": 6.009516327949621e-06,
"loss": 2.0581,
"step": 3015
},
{
"epoch": 1.34,
"grad_norm": 0.5527433996020571,
"learning_rate": 5.974105522935416e-06,
"loss": 2.0743,
"step": 3020
},
{
"epoch": 1.34,
"grad_norm": 0.5648449810320595,
"learning_rate": 5.93875486218348e-06,
"loss": 2.0513,
"step": 3025
},
{
"epoch": 1.34,
"grad_norm": 0.5805232845619748,
"learning_rate": 5.903464873809854e-06,
"loss": 2.0575,
"step": 3030
},
{
"epoch": 1.34,
"grad_norm": 0.6068918356420056,
"learning_rate": 5.868236085024153e-06,
"loss": 2.05,
"step": 3035
},
{
"epoch": 1.35,
"grad_norm": 0.6034817535792762,
"learning_rate": 5.833069022121727e-06,
"loss": 2.094,
"step": 3040
},
{
"epoch": 1.35,
"grad_norm": 0.5595943720875732,
"learning_rate": 5.797964210475766e-06,
"loss": 2.0336,
"step": 3045
},
{
"epoch": 1.35,
"grad_norm": 0.5759026105454746,
"learning_rate": 5.762922174529482e-06,
"loss": 2.0562,
"step": 3050
},
{
"epoch": 1.35,
"grad_norm": 0.5872052488648046,
"learning_rate": 5.7279434377882435e-06,
"loss": 2.0935,
"step": 3055
},
{
"epoch": 1.36,
"grad_norm": 0.5834686046944266,
"learning_rate": 5.693028522811783e-06,
"loss": 2.0347,
"step": 3060
},
{
"epoch": 1.36,
"grad_norm": 0.5588014570770753,
"learning_rate": 5.658177951206367e-06,
"loss": 2.0714,
"step": 3065
},
{
"epoch": 1.36,
"grad_norm": 0.5728435150971173,
"learning_rate": 5.6233922436170205e-06,
"loss": 2.0538,
"step": 3070
},
{
"epoch": 1.36,
"grad_norm": 0.5682184169928125,
"learning_rate": 5.588671919719735e-06,
"loss": 2.0957,
"step": 3075
},
{
"epoch": 1.36,
"grad_norm": 0.5818792890840907,
"learning_rate": 5.5540174982137185e-06,
"loss": 2.0547,
"step": 3080
},
{
"epoch": 1.37,
"grad_norm": 0.5691595531739335,
"learning_rate": 5.519429496813637e-06,
"loss": 2.0713,
"step": 3085
},
{
"epoch": 1.37,
"grad_norm": 0.5508929901268959,
"learning_rate": 5.484908432241889e-06,
"loss": 2.0676,
"step": 3090
},
{
"epoch": 1.37,
"grad_norm": 0.623727381666533,
"learning_rate": 5.4504548202208644e-06,
"loss": 2.0541,
"step": 3095
},
{
"epoch": 1.37,
"grad_norm": 0.5839742138195066,
"learning_rate": 5.416069175465274e-06,
"loss": 2.0829,
"step": 3100
},
{
"epoch": 1.38,
"grad_norm": 0.5784419367187174,
"learning_rate": 5.381752011674426e-06,
"loss": 2.1057,
"step": 3105
},
{
"epoch": 1.38,
"grad_norm": 0.5896287507439384,
"learning_rate": 5.347503841524582e-06,
"loss": 2.0712,
"step": 3110
},
{
"epoch": 1.38,
"grad_norm": 0.5589846377178616,
"learning_rate": 5.313325176661268e-06,
"loss": 2.0614,
"step": 3115
},
{
"epoch": 1.38,
"grad_norm": 0.5853751801118957,
"learning_rate": 5.279216527691657e-06,
"loss": 2.0914,
"step": 3120
},
{
"epoch": 1.38,
"grad_norm": 0.5558295198949219,
"learning_rate": 5.24517840417693e-06,
"loss": 2.0465,
"step": 3125
},
{
"epoch": 1.39,
"grad_norm": 0.5745361537739155,
"learning_rate": 5.211211314624653e-06,
"loss": 2.0819,
"step": 3130
},
{
"epoch": 1.39,
"grad_norm": 0.6007449897126004,
"learning_rate": 5.177315766481204e-06,
"loss": 2.0512,
"step": 3135
},
{
"epoch": 1.39,
"grad_norm": 0.5767872462026846,
"learning_rate": 5.143492266124164e-06,
"loss": 2.0962,
"step": 3140
},
{
"epoch": 1.39,
"grad_norm": 0.5407472022959849,
"learning_rate": 5.1097413188547805e-06,
"loss": 2.0655,
"step": 3145
},
{
"epoch": 1.4,
"grad_norm": 0.5506224595695842,
"learning_rate": 5.076063428890393e-06,
"loss": 2.0726,
"step": 3150
},
{
"epoch": 1.4,
"grad_norm": 0.5562871589781871,
"learning_rate": 5.042459099356925e-06,
"loss": 2.064,
"step": 3155
},
{
"epoch": 1.4,
"grad_norm": 0.575851586919859,
"learning_rate": 5.008928832281339e-06,
"loss": 2.0561,
"step": 3160
},
{
"epoch": 1.4,
"grad_norm": 0.5532207076708013,
"learning_rate": 4.975473128584167e-06,
"loss": 2.0736,
"step": 3165
},
{
"epoch": 1.4,
"grad_norm": 0.5837078474313633,
"learning_rate": 4.942092488072e-06,
"loss": 2.0689,
"step": 3170
},
{
"epoch": 1.41,
"grad_norm": 0.6041638225252354,
"learning_rate": 4.908787409430044e-06,
"loss": 2.0547,
"step": 3175
},
{
"epoch": 1.41,
"grad_norm": 0.5427583510220225,
"learning_rate": 4.875558390214652e-06,
"loss": 2.0631,
"step": 3180
},
{
"epoch": 1.41,
"grad_norm": 0.5602056997077802,
"learning_rate": 4.842405926845906e-06,
"loss": 2.0715,
"step": 3185
},
{
"epoch": 1.41,
"grad_norm": 0.5730490717846765,
"learning_rate": 4.8093305146001815e-06,
"loss": 2.0466,
"step": 3190
},
{
"epoch": 1.41,
"grad_norm": 0.5773554022909284,
"learning_rate": 4.776332647602774e-06,
"loss": 2.0901,
"step": 3195
},
{
"epoch": 1.42,
"grad_norm": 0.5569725713844008,
"learning_rate": 4.743412818820488e-06,
"loss": 2.0482,
"step": 3200
},
{
"epoch": 1.42,
"grad_norm": 0.5841173326271132,
"learning_rate": 4.710571520054302e-06,
"loss": 2.0814,
"step": 3205
},
{
"epoch": 1.42,
"grad_norm": 0.5402992291442881,
"learning_rate": 4.677809241931994e-06,
"loss": 2.058,
"step": 3210
},
{
"epoch": 1.42,
"grad_norm": 0.553440874328099,
"learning_rate": 4.645126473900839e-06,
"loss": 2.0671,
"step": 3215
},
{
"epoch": 1.43,
"grad_norm": 0.545373214820827,
"learning_rate": 4.612523704220264e-06,
"loss": 2.0669,
"step": 3220
},
{
"epoch": 1.43,
"grad_norm": 0.6049521826842714,
"learning_rate": 4.580001419954593e-06,
"loss": 2.0619,
"step": 3225
},
{
"epoch": 1.43,
"grad_norm": 0.5753870878962354,
"learning_rate": 4.5475601069657304e-06,
"loss": 2.0721,
"step": 3230
},
{
"epoch": 1.43,
"grad_norm": 0.5802969500313303,
"learning_rate": 4.51520024990594e-06,
"loss": 2.0501,
"step": 3235
},
{
"epoch": 1.43,
"grad_norm": 0.6097750692153683,
"learning_rate": 4.482922332210569e-06,
"loss": 2.0671,
"step": 3240
},
{
"epoch": 1.44,
"grad_norm": 0.5663413800097361,
"learning_rate": 4.45072683609086e-06,
"loss": 2.022,
"step": 3245
},
{
"epoch": 1.44,
"grad_norm": 0.5642205445629019,
"learning_rate": 4.418614242526717e-06,
"loss": 2.0846,
"step": 3250
},
{
"epoch": 1.44,
"grad_norm": 0.5683716856188827,
"learning_rate": 4.386585031259541e-06,
"loss": 2.0524,
"step": 3255
},
{
"epoch": 1.44,
"grad_norm": 0.5612207538426751,
"learning_rate": 4.354639680785059e-06,
"loss": 2.0855,
"step": 3260
},
{
"epoch": 1.45,
"grad_norm": 0.5589735070253288,
"learning_rate": 4.322778668346158e-06,
"loss": 2.0993,
"step": 3265
},
{
"epoch": 1.45,
"grad_norm": 0.5961736194374415,
"learning_rate": 4.291002469925782e-06,
"loss": 2.0652,
"step": 3270
},
{
"epoch": 1.45,
"grad_norm": 0.5468862260839046,
"learning_rate": 4.259311560239804e-06,
"loss": 2.0663,
"step": 3275
},
{
"epoch": 1.45,
"grad_norm": 0.5626783907010559,
"learning_rate": 4.227706412729943e-06,
"loss": 2.0654,
"step": 3280
},
{
"epoch": 1.45,
"grad_norm": 0.5518710687381316,
"learning_rate": 4.196187499556672e-06,
"loss": 2.0689,
"step": 3285
},
{
"epoch": 1.46,
"grad_norm": 0.5474120579829457,
"learning_rate": 4.1647552915922e-06,
"loss": 2.0608,
"step": 3290
},
{
"epoch": 1.46,
"grad_norm": 0.5607320844944361,
"learning_rate": 4.133410258413394e-06,
"loss": 2.067,
"step": 3295
},
{
"epoch": 1.46,
"grad_norm": 0.5597309185021546,
"learning_rate": 4.1021528682948064e-06,
"loss": 2.0355,
"step": 3300
},
{
"epoch": 1.46,
"grad_norm": 0.5609703086845756,
"learning_rate": 4.070983588201643e-06,
"loss": 2.0601,
"step": 3305
},
{
"epoch": 1.47,
"grad_norm": 0.5445535283201641,
"learning_rate": 4.039902883782814e-06,
"loss": 2.089,
"step": 3310
},
{
"epoch": 1.47,
"grad_norm": 0.5540723722716393,
"learning_rate": 4.008911219363956e-06,
"loss": 2.0984,
"step": 3315
},
{
"epoch": 1.47,
"grad_norm": 0.5506545037691054,
"learning_rate": 3.978009057940518e-06,
"loss": 2.0436,
"step": 3320
},
{
"epoch": 1.47,
"grad_norm": 0.5574714008910903,
"learning_rate": 3.947196861170818e-06,
"loss": 2.0586,
"step": 3325
},
{
"epoch": 1.47,
"grad_norm": 0.5561911064425956,
"learning_rate": 3.916475089369175e-06,
"loss": 2.0691,
"step": 3330
},
{
"epoch": 1.48,
"grad_norm": 0.5396289101406276,
"learning_rate": 3.8858442014990005e-06,
"loss": 2.0677,
"step": 3335
},
{
"epoch": 1.48,
"grad_norm": 0.5523532279306679,
"learning_rate": 3.855304655165978e-06,
"loss": 2.0833,
"step": 3340
},
{
"epoch": 1.48,
"grad_norm": 0.5539896811345815,
"learning_rate": 3.824856906611188e-06,
"loss": 2.0725,
"step": 3345
},
{
"epoch": 1.48,
"grad_norm": 0.5622707232991089,
"learning_rate": 3.794501410704331e-06,
"loss": 2.0606,
"step": 3350
},
{
"epoch": 1.49,
"grad_norm": 0.5678976381599128,
"learning_rate": 3.764238620936892e-06,
"loss": 2.0515,
"step": 3355
},
{
"epoch": 1.49,
"grad_norm": 0.5365268136609684,
"learning_rate": 3.7340689894154023e-06,
"loss": 2.04,
"step": 3360
},
{
"epoch": 1.49,
"grad_norm": 0.5710396071781995,
"learning_rate": 3.7039929668546636e-06,
"loss": 2.0666,
"step": 3365
},
{
"epoch": 1.49,
"grad_norm": 0.5360724949830544,
"learning_rate": 3.674011002571022e-06,
"loss": 2.0497,
"step": 3370
},
{
"epoch": 1.49,
"grad_norm": 0.5958390666640244,
"learning_rate": 3.6441235444756474e-06,
"loss": 2.0623,
"step": 3375
},
{
"epoch": 1.5,
"grad_norm": 0.553389164628562,
"learning_rate": 3.6143310390678544e-06,
"loss": 2.0507,
"step": 3380
},
{
"epoch": 1.5,
"grad_norm": 0.5592986136765083,
"learning_rate": 3.5846339314284283e-06,
"loss": 2.0778,
"step": 3385
},
{
"epoch": 1.5,
"grad_norm": 0.5691172389434093,
"learning_rate": 3.555032665212964e-06,
"loss": 2.0992,
"step": 3390
},
{
"epoch": 1.5,
"grad_norm": 0.5585433675229039,
"learning_rate": 3.5255276826452568e-06,
"loss": 2.0719,
"step": 3395
},
{
"epoch": 1.51,
"grad_norm": 0.5636915672997965,
"learning_rate": 3.496119424510678e-06,
"loss": 2.0416,
"step": 3400
},
{
"epoch": 1.51,
"grad_norm": 0.583439274066355,
"learning_rate": 3.466808330149607e-06,
"loss": 2.0235,
"step": 3405
},
{
"epoch": 1.51,
"grad_norm": 0.5827842848360779,
"learning_rate": 3.4375948374508516e-06,
"loss": 2.0517,
"step": 3410
},
{
"epoch": 1.51,
"grad_norm": 0.5496112187259576,
"learning_rate": 3.4084793828451212e-06,
"loss": 2.0551,
"step": 3415
},
{
"epoch": 1.51,
"grad_norm": 0.5633755810039756,
"learning_rate": 3.3794624012984913e-06,
"loss": 2.0731,
"step": 3420
},
{
"epoch": 1.52,
"grad_norm": 0.5575765211654279,
"learning_rate": 3.3505443263059225e-06,
"loss": 2.0528,
"step": 3425
},
{
"epoch": 1.52,
"grad_norm": 0.5745688262162019,
"learning_rate": 3.3217255898847635e-06,
"loss": 2.0758,
"step": 3430
},
{
"epoch": 1.52,
"grad_norm": 0.5662710330228317,
"learning_rate": 3.2930066225683245e-06,
"loss": 2.0781,
"step": 3435
},
{
"epoch": 1.52,
"grad_norm": 0.6041542971781206,
"learning_rate": 3.2643878533994145e-06,
"loss": 2.0621,
"step": 3440
},
{
"epoch": 1.53,
"grad_norm": 0.5748186592886745,
"learning_rate": 3.2358697099239587e-06,
"loss": 2.0384,
"step": 3445
},
{
"epoch": 1.53,
"grad_norm": 0.5493610513139873,
"learning_rate": 3.20745261818459e-06,
"loss": 2.0663,
"step": 3450
},
{
"epoch": 1.53,
"grad_norm": 0.5590458103327943,
"learning_rate": 3.1791370027143e-06,
"loss": 2.0628,
"step": 3455
},
{
"epoch": 1.53,
"grad_norm": 0.5593257111047194,
"learning_rate": 3.1509232865300886e-06,
"loss": 2.0592,
"step": 3460
},
{
"epoch": 1.53,
"grad_norm": 0.5512023020989867,
"learning_rate": 3.1228118911266492e-06,
"loss": 2.0619,
"step": 3465
},
{
"epoch": 1.54,
"grad_norm": 0.5391486301602383,
"learning_rate": 3.09480323647006e-06,
"loss": 2.0498,
"step": 3470
},
{
"epoch": 1.54,
"grad_norm": 0.622933527268558,
"learning_rate": 3.0668977409915313e-06,
"loss": 2.0695,
"step": 3475
},
{
"epoch": 1.54,
"grad_norm": 0.5450395068456455,
"learning_rate": 3.039095821581127e-06,
"loss": 2.0656,
"step": 3480
},
{
"epoch": 1.54,
"grad_norm": 0.5407141382317134,
"learning_rate": 3.011397893581568e-06,
"loss": 2.0679,
"step": 3485
},
{
"epoch": 1.55,
"grad_norm": 0.5541764677624216,
"learning_rate": 2.983804370781996e-06,
"loss": 2.063,
"step": 3490
},
{
"epoch": 1.55,
"grad_norm": 0.5688327056067299,
"learning_rate": 2.9563156654118185e-06,
"loss": 2.0924,
"step": 3495
},
{
"epoch": 1.55,
"grad_norm": 0.5522737481894859,
"learning_rate": 2.9289321881345257e-06,
"loss": 2.0597,
"step": 3500
},
{
"epoch": 1.55,
"grad_norm": 0.5722648782278515,
"learning_rate": 2.9016543480415792e-06,
"loss": 2.0453,
"step": 3505
},
{
"epoch": 1.55,
"grad_norm": 0.5743765534496285,
"learning_rate": 2.8744825526462882e-06,
"loss": 2.0877,
"step": 3510
},
{
"epoch": 1.56,
"grad_norm": 0.544940930796575,
"learning_rate": 2.847417207877714e-06,
"loss": 2.0659,
"step": 3515
},
{
"epoch": 1.56,
"grad_norm": 0.5274078516290224,
"learning_rate": 2.8204587180746256e-06,
"loss": 2.069,
"step": 3520
},
{
"epoch": 1.56,
"grad_norm": 0.5784755902482993,
"learning_rate": 2.793607485979435e-06,
"loss": 2.0918,
"step": 3525
},
{
"epoch": 1.56,
"grad_norm": 0.5522058014066434,
"learning_rate": 2.7668639127322084e-06,
"loss": 2.0692,
"step": 3530
},
{
"epoch": 1.57,
"grad_norm": 0.5504005007969354,
"learning_rate": 2.7402283978646436e-06,
"loss": 2.0503,
"step": 3535
},
{
"epoch": 1.57,
"grad_norm": 0.5611105448692179,
"learning_rate": 2.713701339294129e-06,
"loss": 2.0657,
"step": 3540
},
{
"epoch": 1.57,
"grad_norm": 0.5317527100697321,
"learning_rate": 2.687283133317774e-06,
"loss": 2.0568,
"step": 3545
},
{
"epoch": 1.57,
"grad_norm": 0.5959022387317062,
"learning_rate": 2.66097417460651e-06,
"loss": 2.0666,
"step": 3550
},
{
"epoch": 1.57,
"grad_norm": 0.5632200379233703,
"learning_rate": 2.6347748561991815e-06,
"loss": 2.0527,
"step": 3555
},
{
"epoch": 1.58,
"grad_norm": 0.5627985046203124,
"learning_rate": 2.6086855694966795e-06,
"loss": 2.0393,
"step": 3560
},
{
"epoch": 1.58,
"grad_norm": 0.5567175923283644,
"learning_rate": 2.5827067042560848e-06,
"loss": 2.0573,
"step": 3565
},
{
"epoch": 1.58,
"grad_norm": 0.5624238851932674,
"learning_rate": 2.5568386485848663e-06,
"loss": 2.0781,
"step": 3570
},
{
"epoch": 1.58,
"grad_norm": 0.5899818620849797,
"learning_rate": 2.5310817889350526e-06,
"loss": 2.0802,
"step": 3575
},
{
"epoch": 1.59,
"grad_norm": 0.565808049439973,
"learning_rate": 2.505436510097494e-06,
"loss": 2.0763,
"step": 3580
},
{
"epoch": 1.59,
"grad_norm": 0.5451813179224835,
"learning_rate": 2.4799031951960784e-06,
"loss": 2.0757,
"step": 3585
},
{
"epoch": 1.59,
"grad_norm": 0.6298493519361762,
"learning_rate": 2.45448222568204e-06,
"loss": 2.0677,
"step": 3590
},
{
"epoch": 1.59,
"grad_norm": 0.5672795408096756,
"learning_rate": 2.4291739813282324e-06,
"loss": 2.0611,
"step": 3595
},
{
"epoch": 1.59,
"grad_norm": 0.5898832484255829,
"learning_rate": 2.4039788402234787e-06,
"loss": 2.0285,
"step": 3600
},
{
"epoch": 1.6,
"grad_norm": 0.5530865907568642,
"learning_rate": 2.3788971787669023e-06,
"loss": 2.0706,
"step": 3605
},
{
"epoch": 1.6,
"grad_norm": 0.5488385468386288,
"learning_rate": 2.3539293716623268e-06,
"loss": 2.0903,
"step": 3610
},
{
"epoch": 1.6,
"grad_norm": 0.5379371223828762,
"learning_rate": 2.3290757919126516e-06,
"loss": 2.063,
"step": 3615
},
{
"epoch": 1.6,
"grad_norm": 0.5541299096624074,
"learning_rate": 2.304336810814305e-06,
"loss": 2.0659,
"step": 3620
},
{
"epoch": 1.61,
"grad_norm": 0.558959634492535,
"learning_rate": 2.2797127979516742e-06,
"loss": 2.0731,
"step": 3625
},
{
"epoch": 1.61,
"grad_norm": 0.5442632662343785,
"learning_rate": 2.2552041211916052e-06,
"loss": 2.0491,
"step": 3630
},
{
"epoch": 1.61,
"grad_norm": 0.6002594288436192,
"learning_rate": 2.230811146677896e-06,
"loss": 2.0719,
"step": 3635
},
{
"epoch": 1.61,
"grad_norm": 0.5566122271362351,
"learning_rate": 2.2065342388258193e-06,
"loss": 2.0308,
"step": 3640
},
{
"epoch": 1.61,
"grad_norm": 0.5634666610963083,
"learning_rate": 2.182373760316694e-06,
"loss": 2.0256,
"step": 3645
},
{
"epoch": 1.62,
"grad_norm": 0.5627666820890318,
"learning_rate": 2.1583300720924604e-06,
"loss": 2.0509,
"step": 3650
},
{
"epoch": 1.62,
"grad_norm": 0.5514703816145297,
"learning_rate": 2.1344035333502878e-06,
"loss": 2.0331,
"step": 3655
},
{
"epoch": 1.62,
"grad_norm": 0.5962117091888667,
"learning_rate": 2.1105945015371985e-06,
"loss": 2.0807,
"step": 3660
},
{
"epoch": 1.62,
"grad_norm": 0.5533663276007398,
"learning_rate": 2.086903332344752e-06,
"loss": 2.0603,
"step": 3665
},
{
"epoch": 1.63,
"grad_norm": 0.5599260944032467,
"learning_rate": 2.063330379703702e-06,
"loss": 2.0796,
"step": 3670
},
{
"epoch": 1.63,
"grad_norm": 0.5385163353261295,
"learning_rate": 2.039875995778735e-06,
"loss": 2.0516,
"step": 3675
},
{
"epoch": 1.63,
"grad_norm": 0.544344728664808,
"learning_rate": 2.016540530963188e-06,
"loss": 2.0581,
"step": 3680
},
{
"epoch": 1.63,
"grad_norm": 0.6042023278839296,
"learning_rate": 1.9933243338738328e-06,
"loss": 2.0652,
"step": 3685
},
{
"epoch": 1.63,
"grad_norm": 0.5763170998502684,
"learning_rate": 1.9702277513456493e-06,
"loss": 2.0673,
"step": 3690
},
{
"epoch": 1.64,
"grad_norm": 0.5513807845552491,
"learning_rate": 1.9472511284266604e-06,
"loss": 2.0851,
"step": 3695
},
{
"epoch": 1.64,
"grad_norm": 0.546481823674631,
"learning_rate": 1.9243948083727626e-06,
"loss": 2.0581,
"step": 3700
},
{
"epoch": 1.64,
"grad_norm": 0.5819777815282389,
"learning_rate": 1.9016591326426148e-06,
"loss": 2.0764,
"step": 3705
},
{
"epoch": 1.64,
"grad_norm": 0.5336652665195354,
"learning_rate": 1.879044440892517e-06,
"loss": 2.0623,
"step": 3710
},
{
"epoch": 1.65,
"grad_norm": 0.541957281094874,
"learning_rate": 1.8565510709713574e-06,
"loss": 2.0968,
"step": 3715
},
{
"epoch": 1.65,
"grad_norm": 0.5960558781920652,
"learning_rate": 1.8341793589155444e-06,
"loss": 2.0207,
"step": 3720
},
{
"epoch": 1.65,
"grad_norm": 0.5658813139193043,
"learning_rate": 1.8119296389440067e-06,
"loss": 2.0409,
"step": 3725
},
{
"epoch": 1.65,
"grad_norm": 0.5600956993728589,
"learning_rate": 1.789802243453178e-06,
"loss": 2.08,
"step": 3730
},
{
"epoch": 1.65,
"grad_norm": 0.5444257311699127,
"learning_rate": 1.7677975030120554e-06,
"loss": 2.0782,
"step": 3735
},
{
"epoch": 1.66,
"grad_norm": 0.6064353942649628,
"learning_rate": 1.7459157463572396e-06,
"loss": 2.0871,
"step": 3740
},
{
"epoch": 1.66,
"grad_norm": 0.541350200855235,
"learning_rate": 1.724157300388042e-06,
"loss": 2.0629,
"step": 3745
},
{
"epoch": 1.66,
"grad_norm": 0.5750807849757769,
"learning_rate": 1.7025224901615811e-06,
"loss": 2.0394,
"step": 3750
},
{
"epoch": 1.66,
"grad_norm": 0.5449169136101253,
"learning_rate": 1.681011638887946e-06,
"loss": 2.0631,
"step": 3755
},
{
"epoch": 1.67,
"grad_norm": 0.5608519842326459,
"learning_rate": 1.6596250679253568e-06,
"loss": 2.0747,
"step": 3760
},
{
"epoch": 1.67,
"grad_norm": 0.5751587370767709,
"learning_rate": 1.6383630967753628e-06,
"loss": 2.046,
"step": 3765
},
{
"epoch": 1.67,
"grad_norm": 0.5717931887732176,
"learning_rate": 1.6172260430780772e-06,
"loss": 2.0608,
"step": 3770
},
{
"epoch": 1.67,
"grad_norm": 0.6075450124795357,
"learning_rate": 1.596214222607424e-06,
"loss": 2.0667,
"step": 3775
},
{
"epoch": 1.67,
"grad_norm": 0.5553340528171433,
"learning_rate": 1.5753279492664264e-06,
"loss": 2.0234,
"step": 3780
},
{
"epoch": 1.68,
"grad_norm": 0.5621318283400624,
"learning_rate": 1.5545675350825097e-06,
"loss": 2.0801,
"step": 3785
},
{
"epoch": 1.68,
"grad_norm": 0.5540463623606954,
"learning_rate": 1.5339332902028537e-06,
"loss": 2.0719,
"step": 3790
},
{
"epoch": 1.68,
"grad_norm": 0.5289814584964564,
"learning_rate": 1.5134255228897376e-06,
"loss": 2.0582,
"step": 3795
},
{
"epoch": 1.68,
"grad_norm": 0.5818834582074843,
"learning_rate": 1.493044539515961e-06,
"loss": 2.0781,
"step": 3800
},
{
"epoch": 1.69,
"grad_norm": 0.5486030112867849,
"learning_rate": 1.4727906445602425e-06,
"loss": 2.067,
"step": 3805
},
{
"epoch": 1.69,
"grad_norm": 0.5692514339180118,
"learning_rate": 1.4526641406026898e-06,
"loss": 2.0709,
"step": 3810
},
{
"epoch": 1.69,
"grad_norm": 0.559159661882281,
"learning_rate": 1.432665328320263e-06,
"loss": 2.0646,
"step": 3815
},
{
"epoch": 1.69,
"grad_norm": 0.550050485618119,
"learning_rate": 1.4127945064823023e-06,
"loss": 2.045,
"step": 3820
},
{
"epoch": 1.69,
"grad_norm": 0.5752186827670506,
"learning_rate": 1.3930519719460411e-06,
"loss": 2.0315,
"step": 3825
},
{
"epoch": 1.7,
"grad_norm": 0.558580536094069,
"learning_rate": 1.3734380196521923e-06,
"loss": 2.0563,
"step": 3830
},
{
"epoch": 1.7,
"grad_norm": 0.5842852752097953,
"learning_rate": 1.35395294262053e-06,
"loss": 2.0516,
"step": 3835
},
{
"epoch": 1.7,
"grad_norm": 0.5465431273811305,
"learning_rate": 1.334597031945517e-06,
"loss": 2.0676,
"step": 3840
},
{
"epoch": 1.7,
"grad_norm": 0.5389258241998429,
"learning_rate": 1.3153705767919478e-06,
"loss": 2.0359,
"step": 3845
},
{
"epoch": 1.71,
"grad_norm": 0.5743075515523858,
"learning_rate": 1.296273864390646e-06,
"loss": 2.0785,
"step": 3850
},
{
"epoch": 1.71,
"grad_norm": 0.5423528477467442,
"learning_rate": 1.2773071800341497e-06,
"loss": 2.0472,
"step": 3855
},
{
"epoch": 1.71,
"grad_norm": 0.569665340468426,
"learning_rate": 1.2584708070724738e-06,
"loss": 2.0546,
"step": 3860
},
{
"epoch": 1.71,
"grad_norm": 0.5524520025544936,
"learning_rate": 1.2397650269088557e-06,
"loss": 2.0555,
"step": 3865
},
{
"epoch": 1.71,
"grad_norm": 0.566991554527505,
"learning_rate": 1.2211901189955689e-06,
"loss": 2.042,
"step": 3870
},
{
"epoch": 1.72,
"grad_norm": 0.5520387470342608,
"learning_rate": 1.2027463608297308e-06,
"loss": 2.0508,
"step": 3875
},
{
"epoch": 1.72,
"grad_norm": 0.5680711247160345,
"learning_rate": 1.1844340279491772e-06,
"loss": 2.059,
"step": 3880
},
{
"epoch": 1.72,
"grad_norm": 0.562855569790018,
"learning_rate": 1.166253393928325e-06,
"loss": 2.0519,
"step": 3885
},
{
"epoch": 1.72,
"grad_norm": 0.5499250883393519,
"learning_rate": 1.1482047303740996e-06,
"loss": 2.09,
"step": 3890
},
{
"epoch": 1.72,
"grad_norm": 0.573673981837146,
"learning_rate": 1.1302883069218773e-06,
"loss": 2.0418,
"step": 3895
},
{
"epoch": 1.73,
"grad_norm": 0.5372551256171191,
"learning_rate": 1.1125043912314438e-06,
"loss": 2.0444,
"step": 3900
},
{
"epoch": 1.73,
"grad_norm": 0.5413809755195805,
"learning_rate": 1.0948532489830121e-06,
"loss": 2.0631,
"step": 3905
},
{
"epoch": 1.73,
"grad_norm": 0.5496442303584832,
"learning_rate": 1.0773351438732392e-06,
"loss": 2.0554,
"step": 3910
},
{
"epoch": 1.73,
"grad_norm": 0.5634011960081305,
"learning_rate": 1.0599503376113017e-06,
"loss": 2.0596,
"step": 3915
},
{
"epoch": 1.74,
"grad_norm": 0.562809002296741,
"learning_rate": 1.0426990899149658e-06,
"loss": 2.0897,
"step": 3920
},
{
"epoch": 1.74,
"grad_norm": 0.5444434242238521,
"learning_rate": 1.0255816585067302e-06,
"loss": 2.0669,
"step": 3925
},
{
"epoch": 1.74,
"grad_norm": 0.5884898402856641,
"learning_rate": 1.0085982991099585e-06,
"loss": 2.0297,
"step": 3930
},
{
"epoch": 1.74,
"grad_norm": 0.548203890186792,
"learning_rate": 9.9174926544507e-07,
"loss": 2.0884,
"step": 3935
},
{
"epoch": 1.74,
"grad_norm": 0.5811570784957567,
"learning_rate": 9.750348092257368e-07,
"loss": 2.0676,
"step": 3940
},
{
"epoch": 1.75,
"grad_norm": 0.5566013170155961,
"learning_rate": 9.58455180155139e-07,
"loss": 2.0728,
"step": 3945
},
{
"epoch": 1.75,
"grad_norm": 0.5575934399314477,
"learning_rate": 9.420106259222184e-07,
"loss": 2.0549,
"step": 3950
},
{
"epoch": 1.75,
"grad_norm": 0.5462642114919164,
"learning_rate": 9.25701392197994e-07,
"loss": 2.0693,
"step": 3955
},
{
"epoch": 1.75,
"grad_norm": 0.5397235708075381,
"learning_rate": 9.095277226318766e-07,
"loss": 2.0735,
"step": 3960
},
{
"epoch": 1.76,
"grad_norm": 0.5500604287292675,
"learning_rate": 8.934898588480434e-07,
"loss": 2.073,
"step": 3965
},
{
"epoch": 1.76,
"grad_norm": 0.5401720371663663,
"learning_rate": 8.775880404418113e-07,
"loss": 2.0711,
"step": 3970
},
{
"epoch": 1.76,
"grad_norm": 0.563182485838835,
"learning_rate": 8.618225049760787e-07,
"loss": 2.0661,
"step": 3975
},
{
"epoch": 1.76,
"grad_norm": 0.5381368192097733,
"learning_rate": 8.461934879777545e-07,
"loss": 2.0778,
"step": 3980
},
{
"epoch": 1.76,
"grad_norm": 0.5452234358683491,
"learning_rate": 8.307012229342581e-07,
"loss": 2.0673,
"step": 3985
},
{
"epoch": 1.77,
"grad_norm": 0.5390788072629571,
"learning_rate": 8.153459412900156e-07,
"loss": 2.0731,
"step": 3990
},
{
"epoch": 1.77,
"grad_norm": 0.5696422344578307,
"learning_rate": 8.001278724430173e-07,
"loss": 2.051,
"step": 3995
},
{
"epoch": 1.77,
"grad_norm": 0.5555940065413444,
"learning_rate": 7.850472437413748e-07,
"loss": 2.0528,
"step": 4000
},
{
"epoch": 1.77,
"grad_norm": 0.5481143741603102,
"learning_rate": 7.701042804799419e-07,
"loss": 2.0632,
"step": 4005
},
{
"epoch": 1.78,
"grad_norm": 0.5601547104472069,
"learning_rate": 7.552992058969299e-07,
"loss": 2.0564,
"step": 4010
},
{
"epoch": 1.78,
"grad_norm": 0.5378597360108794,
"learning_rate": 7.406322411705891e-07,
"loss": 2.0618,
"step": 4015
},
{
"epoch": 1.78,
"grad_norm": 0.5631349577009135,
"learning_rate": 7.261036054158965e-07,
"loss": 2.0532,
"step": 4020
},
{
"epoch": 1.78,
"grad_norm": 0.556114883145545,
"learning_rate": 7.117135156812849e-07,
"loss": 2.0642,
"step": 4025
},
{
"epoch": 1.78,
"grad_norm": 0.5519883301821252,
"learning_rate": 6.974621869453924e-07,
"loss": 2.0771,
"step": 4030
},
{
"epoch": 1.79,
"grad_norm": 0.5851271038601582,
"learning_rate": 6.833498321138665e-07,
"loss": 2.0427,
"step": 4035
},
{
"epoch": 1.79,
"grad_norm": 0.5842915535785638,
"learning_rate": 6.693766620161691e-07,
"loss": 2.0312,
"step": 4040
},
{
"epoch": 1.79,
"grad_norm": 0.5582334401948562,
"learning_rate": 6.555428854024304e-07,
"loss": 2.0787,
"step": 4045
},
{
"epoch": 1.79,
"grad_norm": 0.5453229055820809,
"learning_rate": 6.418487089403392e-07,
"loss": 2.0448,
"step": 4050
},
{
"epoch": 1.8,
"grad_norm": 0.548593006958361,
"learning_rate": 6.282943372120399e-07,
"loss": 2.1003,
"step": 4055
},
{
"epoch": 1.8,
"grad_norm": 0.5643134151736231,
"learning_rate": 6.148799727110911e-07,
"loss": 2.0555,
"step": 4060
},
{
"epoch": 1.8,
"grad_norm": 0.5943245058190392,
"learning_rate": 6.016058158394278e-07,
"loss": 2.0696,
"step": 4065
},
{
"epoch": 1.8,
"grad_norm": 0.564516856076164,
"learning_rate": 5.884720649043807e-07,
"loss": 2.0581,
"step": 4070
},
{
"epoch": 1.8,
"grad_norm": 0.5716904464013306,
"learning_rate": 5.754789161157004e-07,
"loss": 2.0655,
"step": 4075
},
{
"epoch": 1.81,
"grad_norm": 0.560907477710711,
"learning_rate": 5.626265635826367e-07,
"loss": 2.0346,
"step": 4080
},
{
"epoch": 1.81,
"grad_norm": 0.5345268343756697,
"learning_rate": 5.499151993110286e-07,
"loss": 2.0554,
"step": 4085
},
{
"epoch": 1.81,
"grad_norm": 0.5662972439866378,
"learning_rate": 5.373450132004499e-07,
"loss": 2.0556,
"step": 4090
},
{
"epoch": 1.81,
"grad_norm": 0.5335363875259758,
"learning_rate": 5.249161930413549e-07,
"loss": 2.0715,
"step": 4095
},
{
"epoch": 1.82,
"grad_norm": 0.545874715460961,
"learning_rate": 5.126289245122906e-07,
"loss": 2.0442,
"step": 4100
},
{
"epoch": 1.82,
"grad_norm": 0.5794363522287975,
"learning_rate": 5.004833911771045e-07,
"loss": 2.0529,
"step": 4105
},
{
"epoch": 1.82,
"grad_norm": 0.5546164734815353,
"learning_rate": 4.884797744822212e-07,
"loss": 2.0727,
"step": 4110
},
{
"epoch": 1.82,
"grad_norm": 0.5782244959634447,
"learning_rate": 4.7661825375391767e-07,
"loss": 2.0368,
"step": 4115
},
{
"epoch": 1.82,
"grad_norm": 0.5423132781529818,
"learning_rate": 4.648990061956493e-07,
"loss": 2.0318,
"step": 4120
},
{
"epoch": 1.83,
"grad_norm": 0.5472433600199033,
"learning_rate": 4.5332220688540263e-07,
"loss": 2.0793,
"step": 4125
},
{
"epoch": 1.83,
"grad_norm": 0.5506109376831969,
"learning_rate": 4.418880287730798e-07,
"loss": 2.0665,
"step": 4130
},
{
"epoch": 1.83,
"grad_norm": 0.5531218647564758,
"learning_rate": 4.305966426779118e-07,
"loss": 2.0599,
"step": 4135
},
{
"epoch": 1.83,
"grad_norm": 0.5507412153279745,
"learning_rate": 4.194482172859127e-07,
"loss": 2.09,
"step": 4140
},
{
"epoch": 1.84,
"grad_norm": 0.5465574653418939,
"learning_rate": 4.08442919147356e-07,
"loss": 2.0697,
"step": 4145
},
{
"epoch": 1.84,
"grad_norm": 0.5682607962131835,
"learning_rate": 3.9758091267428245e-07,
"loss": 2.0707,
"step": 4150
},
{
"epoch": 1.84,
"grad_norm": 0.5543305685804978,
"learning_rate": 3.8686236013805387e-07,
"loss": 2.0486,
"step": 4155
},
{
"epoch": 1.84,
"grad_norm": 0.5408528113324927,
"learning_rate": 3.762874216669166e-07,
"loss": 2.0752,
"step": 4160
},
{
"epoch": 1.84,
"grad_norm": 0.564300205091926,
"learning_rate": 3.658562552436207e-07,
"loss": 2.0573,
"step": 4165
},
{
"epoch": 1.85,
"grad_norm": 0.5566909591043121,
"learning_rate": 3.555690167030512e-07,
"loss": 2.0601,
"step": 4170
},
{
"epoch": 1.85,
"grad_norm": 0.5721321360782805,
"learning_rate": 3.454258597299065e-07,
"loss": 2.0462,
"step": 4175
},
{
"epoch": 1.85,
"grad_norm": 0.5484268772007359,
"learning_rate": 3.354269358563966e-07,
"loss": 2.0328,
"step": 4180
},
{
"epoch": 1.85,
"grad_norm": 0.5569596172271336,
"learning_rate": 3.2557239445998534e-07,
"loss": 2.0371,
"step": 4185
},
{
"epoch": 1.86,
"grad_norm": 0.5682622284990055,
"learning_rate": 3.158623827611529e-07,
"loss": 2.0563,
"step": 4190
},
{
"epoch": 1.86,
"grad_norm": 0.5477643211494992,
"learning_rate": 3.062970458212e-07,
"loss": 2.079,
"step": 4195
},
{
"epoch": 1.86,
"grad_norm": 0.5617676773559073,
"learning_rate": 2.968765265400808e-07,
"loss": 2.0688,
"step": 4200
},
{
"epoch": 1.86,
"grad_norm": 0.5608020878059345,
"learning_rate": 2.876009656542655e-07,
"loss": 2.0645,
"step": 4205
},
{
"epoch": 1.86,
"grad_norm": 0.5586401028328312,
"learning_rate": 2.784705017346423e-07,
"loss": 2.0631,
"step": 4210
},
{
"epoch": 1.87,
"grad_norm": 0.552746620005558,
"learning_rate": 2.6948527118444313e-07,
"loss": 2.0816,
"step": 4215
},
{
"epoch": 1.87,
"grad_norm": 0.5511657086345679,
"learning_rate": 2.606454082372045e-07,
"loss": 2.0857,
"step": 4220
},
{
"epoch": 1.87,
"grad_norm": 0.5594235230943233,
"learning_rate": 2.519510449547691e-07,
"loss": 2.0356,
"step": 4225
},
{
"epoch": 1.87,
"grad_norm": 0.5716867070357035,
"learning_rate": 2.4340231122530477e-07,
"loss": 2.0743,
"step": 4230
},
{
"epoch": 1.88,
"grad_norm": 0.5423931882277669,
"learning_rate": 2.3499933476137215e-07,
"loss": 2.0517,
"step": 4235
},
{
"epoch": 1.88,
"grad_norm": 0.5599736800041696,
"learning_rate": 2.2674224109800913e-07,
"loss": 2.0999,
"step": 4240
},
{
"epoch": 1.88,
"grad_norm": 0.5717714786006198,
"learning_rate": 2.186311535908603e-07,
"loss": 2.0648,
"step": 4245
},
{
"epoch": 1.88,
"grad_norm": 0.569629853540159,
"learning_rate": 2.106661934143317e-07,
"loss": 2.0183,
"step": 4250
},
{
"epoch": 1.88,
"grad_norm": 0.5684922650306118,
"learning_rate": 2.0284747955978346e-07,
"loss": 2.0482,
"step": 4255
},
{
"epoch": 1.89,
"grad_norm": 0.5492401424747482,
"learning_rate": 1.9517512883374667e-07,
"loss": 2.0332,
"step": 4260
},
{
"epoch": 1.89,
"grad_norm": 0.5542531880317354,
"learning_rate": 1.87649255856186e-07,
"loss": 2.0623,
"step": 4265
},
{
"epoch": 1.89,
"grad_norm": 0.5473376655943359,
"learning_rate": 1.802699730587798e-07,
"loss": 2.0585,
"step": 4270
},
{
"epoch": 1.89,
"grad_norm": 0.5545686168014152,
"learning_rate": 1.73037390683245e-07,
"loss": 2.0273,
"step": 4275
},
{
"epoch": 1.9,
"grad_norm": 0.5294235962192092,
"learning_rate": 1.659516167796904e-07,
"loss": 2.0505,
"step": 4280
},
{
"epoch": 1.9,
"grad_norm": 0.5488385219678276,
"learning_rate": 1.5901275720499821e-07,
"loss": 2.0591,
"step": 4285
},
{
"epoch": 1.9,
"grad_norm": 0.5538840522828491,
"learning_rate": 1.522209156212484e-07,
"loss": 2.0607,
"step": 4290
},
{
"epoch": 1.9,
"grad_norm": 0.5397829320095424,
"learning_rate": 1.4557619349416574e-07,
"loss": 2.0596,
"step": 4295
},
{
"epoch": 1.9,
"grad_norm": 0.5530090877468173,
"learning_rate": 1.3907869009160525e-07,
"loss": 2.0583,
"step": 4300
},
{
"epoch": 1.91,
"grad_norm": 0.5344060062052084,
"learning_rate": 1.3272850248206905e-07,
"loss": 2.0403,
"step": 4305
},
{
"epoch": 1.91,
"grad_norm": 0.572022476601045,
"learning_rate": 1.265257255332586e-07,
"loss": 2.0484,
"step": 4310
},
{
"epoch": 1.91,
"grad_norm": 0.5389725504782829,
"learning_rate": 1.2047045191065144e-07,
"loss": 2.07,
"step": 4315
},
{
"epoch": 1.91,
"grad_norm": 0.5814674775977551,
"learning_rate": 1.1456277207612554e-07,
"loss": 2.0605,
"step": 4320
},
{
"epoch": 1.92,
"grad_norm": 0.5881935121326394,
"learning_rate": 1.0880277428659935e-07,
"loss": 2.0902,
"step": 4325
},
{
"epoch": 1.92,
"grad_norm": 0.5444873759948782,
"learning_rate": 1.0319054459271837e-07,
"loss": 2.0557,
"step": 4330
},
{
"epoch": 1.92,
"grad_norm": 0.545131184923368,
"learning_rate": 9.77261668375673e-08,
"loss": 2.074,
"step": 4335
},
{
"epoch": 1.92,
"grad_norm": 0.5610707777097982,
"learning_rate": 9.240972265541992e-08,
"loss": 2.056,
"step": 4340
},
{
"epoch": 1.92,
"grad_norm": 0.5675217958637914,
"learning_rate": 8.724129147051786e-08,
"loss": 2.0451,
"step": 4345
},
{
"epoch": 1.93,
"grad_norm": 0.5741070110149166,
"learning_rate": 8.222095049588264e-08,
"loss": 2.0638,
"step": 4350
},
{
"epoch": 1.93,
"grad_norm": 0.5621379206974747,
"learning_rate": 7.734877473216329e-08,
"loss": 2.0717,
"step": 4355
},
{
"epoch": 1.93,
"grad_norm": 0.565503066692675,
"learning_rate": 7.262483696652167e-08,
"loss": 2.0698,
"step": 4360
},
{
"epoch": 1.93,
"grad_norm": 0.5454143105228505,
"learning_rate": 6.804920777153112e-08,
"loss": 2.0388,
"step": 4365
},
{
"epoch": 1.94,
"grad_norm": 0.5504174721252638,
"learning_rate": 6.362195550413953e-08,
"loss": 2.0624,
"step": 4370
},
{
"epoch": 1.94,
"grad_norm": 0.5315790172920036,
"learning_rate": 5.934314630463234e-08,
"loss": 2.063,
"step": 4375
},
{
"epoch": 1.94,
"grad_norm": 0.5467456755247602,
"learning_rate": 5.521284409565675e-08,
"loss": 2.0502,
"step": 4380
},
{
"epoch": 1.94,
"grad_norm": 0.5510849335903089,
"learning_rate": 5.123111058125574e-08,
"loss": 2.0838,
"step": 4385
},
{
"epoch": 1.94,
"grad_norm": 0.5548419386886653,
"learning_rate": 4.739800524595884e-08,
"loss": 2.0612,
"step": 4390
},
{
"epoch": 1.95,
"grad_norm": 0.5836011008917115,
"learning_rate": 4.371358535388059e-08,
"loss": 2.0555,
"step": 4395
},
{
"epoch": 1.95,
"grad_norm": 0.5638108386766991,
"learning_rate": 4.017790594787574e-08,
"loss": 2.0649,
"step": 4400
},
{
"epoch": 1.95,
"grad_norm": 0.5757515165038948,
"learning_rate": 3.679101984870759e-08,
"loss": 2.0529,
"step": 4405
},
{
"epoch": 1.95,
"grad_norm": 0.5541134255339705,
"learning_rate": 3.355297765426868e-08,
"loss": 2.0679,
"step": 4410
},
{
"epoch": 1.96,
"grad_norm": 0.5886107345177329,
"learning_rate": 3.046382773881584e-08,
"loss": 2.0597,
"step": 4415
},
{
"epoch": 1.96,
"grad_norm": 0.5641428201729245,
"learning_rate": 2.7523616252252972e-08,
"loss": 2.0323,
"step": 4420
},
{
"epoch": 1.96,
"grad_norm": 0.531356767390016,
"learning_rate": 2.4732387119440483e-08,
"loss": 2.0498,
"step": 4425
},
{
"epoch": 1.96,
"grad_norm": 0.5267926881975364,
"learning_rate": 2.2090182039538055e-08,
"loss": 2.0411,
"step": 4430
},
{
"epoch": 1.96,
"grad_norm": 0.5879618739864849,
"learning_rate": 1.9597040485380692e-08,
"loss": 2.058,
"step": 4435
},
{
"epoch": 1.97,
"grad_norm": 0.543086042306532,
"learning_rate": 1.7252999702894736e-08,
"loss": 2.0496,
"step": 4440
},
{
"epoch": 1.97,
"grad_norm": 0.5586291441651783,
"learning_rate": 1.5058094710533877e-08,
"loss": 2.0761,
"step": 4445
},
{
"epoch": 1.97,
"grad_norm": 0.549955876597547,
"learning_rate": 1.3012358298760686e-08,
"loss": 2.05,
"step": 4450
},
{
"epoch": 1.97,
"grad_norm": 0.5261240170191214,
"learning_rate": 1.1115821029555884e-08,
"loss": 2.0725,
"step": 4455
},
{
"epoch": 1.98,
"grad_norm": 0.5468628553497797,
"learning_rate": 9.368511235958722e-09,
"loss": 2.0722,
"step": 4460
},
{
"epoch": 1.98,
"grad_norm": 0.5285437238383451,
"learning_rate": 7.770455021651746e-09,
"loss": 2.0526,
"step": 4465
},
{
"epoch": 1.98,
"grad_norm": 0.5701019573813519,
"learning_rate": 6.3216762605589064e-09,
"loss": 2.0526,
"step": 4470
},
{
"epoch": 1.98,
"grad_norm": 0.547520554686719,
"learning_rate": 5.022196596501383e-09,
"loss": 2.0554,
"step": 4475
},
{
"epoch": 1.98,
"grad_norm": 0.5396174951709908,
"learning_rate": 3.87203544286563e-09,
"loss": 2.0853,
"step": 4480
},
{
"epoch": 1.99,
"grad_norm": 0.5570769894252682,
"learning_rate": 2.8712099823147156e-09,
"loss": 2.0439,
"step": 4485
},
{
"epoch": 1.99,
"grad_norm": 0.5744431871050352,
"learning_rate": 2.019735166534087e-09,
"loss": 2.0522,
"step": 4490
},
{
"epoch": 1.99,
"grad_norm": 0.5394366461108568,
"learning_rate": 1.3176237160095195e-09,
"loss": 2.0773,
"step": 4495
},
{
"epoch": 1.99,
"grad_norm": 0.5461433058380382,
"learning_rate": 7.648861198306101e-10,
"loss": 2.0764,
"step": 4500
},
{
"epoch": 2.0,
"grad_norm": 0.5267222777965351,
"learning_rate": 3.6153063554089653e-10,
"loss": 2.0462,
"step": 4505
},
{
"epoch": 2.0,
"grad_norm": 0.5664405565317054,
"learning_rate": 1.0756328901018188e-10,
"loss": 2.0701,
"step": 4510
},
{
"epoch": 2.0,
"grad_norm": 0.5554391262667828,
"learning_rate": 2.987874346827013e-12,
"loss": 2.0651,
"step": 4515
},
{
"epoch": 2.0,
"eval_loss": 2.073899507522583,
"eval_runtime": 26.2602,
"eval_samples_per_second": 1217.772,
"eval_steps_per_second": 38.08,
"step": 4516
},
{
"epoch": 2.0,
"step": 4516,
"total_flos": 13637863342080.0,
"train_loss": 2.136630948372284,
"train_runtime": 1210.2491,
"train_samples_per_second": 477.469,
"train_steps_per_second": 3.731
}
],
"logging_steps": 5,
"max_steps": 4516,
"num_input_tokens_seen": 0,
"num_train_epochs": 2,
"save_steps": 100,
"total_flos": 13637863342080.0,
"train_batch_size": 32,
"trial_name": null,
"trial_params": null
}